id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
13,400
buriburisuri/sugartensor
sugartensor/sg_main.py
sg_get_context
def sg_get_context(): r"""Get current context information Returns: tf.sg_opt class object which contains all context information """ global _context # merge current context res = tf.sg_opt() for c in _context: res += c return res
python
def sg_get_context(): r"""Get current context information Returns: tf.sg_opt class object which contains all context information """ global _context # merge current context res = tf.sg_opt() for c in _context: res += c return res
[ "def", "sg_get_context", "(", ")", ":", "global", "_context", "# merge current context", "res", "=", "tf", ".", "sg_opt", "(", ")", "for", "c", "in", "_context", ":", "res", "+=", "c", "return", "res" ]
r"""Get current context information Returns: tf.sg_opt class object which contains all context information
[ "r", "Get", "current", "context", "information" ]
d2c039954777c7fbe3eb0c2ae40c45c9854deb40
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_main.py#L135-L149
13,401
buriburisuri/sugartensor
sugartensor/sg_main.py
sg_sugar_func
def sg_sugar_func(func): r""" Decorates a function `func` so that it can be a sugar function. Sugar function can be used in a chainable manner. Args: func: function to decorate Returns: A sugar function. """ @wraps(func) def wrapper(tensor, **kwargs): # call sugar function out = func(tensor, tf.sg_opt(kwargs)) # save node info for reuse out._sugar = tf.sg_opt(func=func, arg=tf.sg_opt(kwargs)+sg_get_context(), prev=tensor) # inject reuse function out.sg_reuse = types.MethodType(sg_reuse, out) return out return wrapper
python
def sg_sugar_func(func): r""" Decorates a function `func` so that it can be a sugar function. Sugar function can be used in a chainable manner. Args: func: function to decorate Returns: A sugar function. """ @wraps(func) def wrapper(tensor, **kwargs): # call sugar function out = func(tensor, tf.sg_opt(kwargs)) # save node info for reuse out._sugar = tf.sg_opt(func=func, arg=tf.sg_opt(kwargs)+sg_get_context(), prev=tensor) # inject reuse function out.sg_reuse = types.MethodType(sg_reuse, out) return out return wrapper
[ "def", "sg_sugar_func", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "tensor", ",", "*", "*", "kwargs", ")", ":", "# call sugar function", "out", "=", "func", "(", "tensor", ",", "tf", ".", "sg_opt", "(", "kwargs", ")", ")", "# save node info for reuse", "out", ".", "_sugar", "=", "tf", ".", "sg_opt", "(", "func", "=", "func", ",", "arg", "=", "tf", ".", "sg_opt", "(", "kwargs", ")", "+", "sg_get_context", "(", ")", ",", "prev", "=", "tensor", ")", "# inject reuse function", "out", ".", "sg_reuse", "=", "types", ".", "MethodType", "(", "sg_reuse", ",", "out", ")", "return", "out", "return", "wrapper" ]
r""" Decorates a function `func` so that it can be a sugar function. Sugar function can be used in a chainable manner. Args: func: function to decorate Returns: A sugar function.
[ "r", "Decorates", "a", "function", "func", "so", "that", "it", "can", "be", "a", "sugar", "function", ".", "Sugar", "function", "can", "be", "used", "in", "a", "chainable", "manner", "." ]
d2c039954777c7fbe3eb0c2ae40c45c9854deb40
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_main.py#L156-L177
13,402
buriburisuri/sugartensor
sugartensor/sg_main.py
sg_reuse
def sg_reuse(tensor, **opt): r""" Reconstruct computational graph of `tensor` so all the parameters can be reused and replace its input tensor with `opt.input`. Args: tensor: A `Tensor` (automatically given by chaining). **opt: input: A `Tensor` that will replace the original input tensor. Returns: Reconstructed tensor nodes. """ opt = tf.sg_opt(opt) assert hasattr(tensor, '_sugar'), 'cannot reuse this node.' assert opt.input is not None, 'input is mandatory.' # get all nodes in this graph nodes, prev = [tensor], tensor._sugar.prev while prev is not None: nodes = [prev] + nodes prev = prev._sugar.prev if hasattr(prev, '_sugar') else None # create graph again for this input out = opt.input for node in nodes[1:]: # exclude head node if node._sugar.is_layer: fn = tf.sg_layer_func(node._sugar.func) if node._sugar.arg.scope_name: with tf.variable_scope(node._sugar.arg.scope_name): out = fn(out, **(node._sugar.arg + tf.sg_opt(name=node._sugar.name, reuse=True))) else: out = fn(out, **(node._sugar.arg + tf.sg_opt(name=node._sugar.name, reuse=True))) else: out = node._sugar.func(out, node._sugar.arg) return out
python
def sg_reuse(tensor, **opt): r""" Reconstruct computational graph of `tensor` so all the parameters can be reused and replace its input tensor with `opt.input`. Args: tensor: A `Tensor` (automatically given by chaining). **opt: input: A `Tensor` that will replace the original input tensor. Returns: Reconstructed tensor nodes. """ opt = tf.sg_opt(opt) assert hasattr(tensor, '_sugar'), 'cannot reuse this node.' assert opt.input is not None, 'input is mandatory.' # get all nodes in this graph nodes, prev = [tensor], tensor._sugar.prev while prev is not None: nodes = [prev] + nodes prev = prev._sugar.prev if hasattr(prev, '_sugar') else None # create graph again for this input out = opt.input for node in nodes[1:]: # exclude head node if node._sugar.is_layer: fn = tf.sg_layer_func(node._sugar.func) if node._sugar.arg.scope_name: with tf.variable_scope(node._sugar.arg.scope_name): out = fn(out, **(node._sugar.arg + tf.sg_opt(name=node._sugar.name, reuse=True))) else: out = fn(out, **(node._sugar.arg + tf.sg_opt(name=node._sugar.name, reuse=True))) else: out = node._sugar.func(out, node._sugar.arg) return out
[ "def", "sg_reuse", "(", "tensor", ",", "*", "*", "opt", ")", ":", "opt", "=", "tf", ".", "sg_opt", "(", "opt", ")", "assert", "hasattr", "(", "tensor", ",", "'_sugar'", ")", ",", "'cannot reuse this node.'", "assert", "opt", ".", "input", "is", "not", "None", ",", "'input is mandatory.'", "# get all nodes in this graph", "nodes", ",", "prev", "=", "[", "tensor", "]", ",", "tensor", ".", "_sugar", ".", "prev", "while", "prev", "is", "not", "None", ":", "nodes", "=", "[", "prev", "]", "+", "nodes", "prev", "=", "prev", ".", "_sugar", ".", "prev", "if", "hasattr", "(", "prev", ",", "'_sugar'", ")", "else", "None", "# create graph again for this input", "out", "=", "opt", ".", "input", "for", "node", "in", "nodes", "[", "1", ":", "]", ":", "# exclude head node", "if", "node", ".", "_sugar", ".", "is_layer", ":", "fn", "=", "tf", ".", "sg_layer_func", "(", "node", ".", "_sugar", ".", "func", ")", "if", "node", ".", "_sugar", ".", "arg", ".", "scope_name", ":", "with", "tf", ".", "variable_scope", "(", "node", ".", "_sugar", ".", "arg", ".", "scope_name", ")", ":", "out", "=", "fn", "(", "out", ",", "*", "*", "(", "node", ".", "_sugar", ".", "arg", "+", "tf", ".", "sg_opt", "(", "name", "=", "node", ".", "_sugar", ".", "name", ",", "reuse", "=", "True", ")", ")", ")", "else", ":", "out", "=", "fn", "(", "out", ",", "*", "*", "(", "node", ".", "_sugar", ".", "arg", "+", "tf", ".", "sg_opt", "(", "name", "=", "node", ".", "_sugar", ".", "name", ",", "reuse", "=", "True", ")", ")", ")", "else", ":", "out", "=", "node", ".", "_sugar", ".", "func", "(", "out", ",", "node", ".", "_sugar", ".", "arg", ")", "return", "out" ]
r""" Reconstruct computational graph of `tensor` so all the parameters can be reused and replace its input tensor with `opt.input`. Args: tensor: A `Tensor` (automatically given by chaining). **opt: input: A `Tensor` that will replace the original input tensor. Returns: Reconstructed tensor nodes.
[ "r", "Reconstruct", "computational", "graph", "of", "tensor", "so", "all", "the", "parameters", "can", "be", "reused", "and", "replace", "its", "input", "tensor", "with", "opt", ".", "input", "." ]
d2c039954777c7fbe3eb0c2ae40c45c9854deb40
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_main.py#L452-L487
13,403
buriburisuri/sugartensor
sugartensor/sg_main.py
sg_input
def sg_input(shape=None, dtype=sg_floatx, name=None): r"""Creates a placeholder. Args: shape: A tuple/list of integers. If an integers is given, it will turn to a list. dtype: A data type. Default is float32. name: A name for the placeholder. Returns: A wrapped placeholder `Tensor`. """ if shape is None: return tf.placeholder(dtype, shape=None, name=name) else: if not isinstance(shape, (list, tuple)): shape = [shape] return tf.placeholder(dtype, shape=[None] + list(shape), name=name)
python
def sg_input(shape=None, dtype=sg_floatx, name=None): r"""Creates a placeholder. Args: shape: A tuple/list of integers. If an integers is given, it will turn to a list. dtype: A data type. Default is float32. name: A name for the placeholder. Returns: A wrapped placeholder `Tensor`. """ if shape is None: return tf.placeholder(dtype, shape=None, name=name) else: if not isinstance(shape, (list, tuple)): shape = [shape] return tf.placeholder(dtype, shape=[None] + list(shape), name=name)
[ "def", "sg_input", "(", "shape", "=", "None", ",", "dtype", "=", "sg_floatx", ",", "name", "=", "None", ")", ":", "if", "shape", "is", "None", ":", "return", "tf", ".", "placeholder", "(", "dtype", ",", "shape", "=", "None", ",", "name", "=", "name", ")", "else", ":", "if", "not", "isinstance", "(", "shape", ",", "(", "list", ",", "tuple", ")", ")", ":", "shape", "=", "[", "shape", "]", "return", "tf", ".", "placeholder", "(", "dtype", ",", "shape", "=", "[", "None", "]", "+", "list", "(", "shape", ")", ",", "name", "=", "name", ")" ]
r"""Creates a placeholder. Args: shape: A tuple/list of integers. If an integers is given, it will turn to a list. dtype: A data type. Default is float32. name: A name for the placeholder. Returns: A wrapped placeholder `Tensor`.
[ "r", "Creates", "a", "placeholder", "." ]
d2c039954777c7fbe3eb0c2ae40c45c9854deb40
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_main.py#L494-L510
13,404
buriburisuri/sugartensor
sugartensor/sg_main.py
sg_inject
def sg_inject(path, mod_name): r"""Converts all functions in the given Python module to sugar functions so that they can be used in a chainable manner. Args: path: A string. Path to the Python module mod_name: A string. The name of the Python module to inject. Returns: None """ # import module import sys if path not in list(sys.path): sys.path.append(path) globals()[mod_name] = importlib.import_module(mod_name) # find functions for func_name in dir(globals()[mod_name]): if isinstance(globals()[mod_name].__dict__.get(func_name), types.FunctionType): if not func_name.startswith('_'): # inject to tf.Variable type exec('tf.Variable.%s = %s.%s' % (func_name, mod_name, func_name)) # inject to tf.Tensor type exec('tf.Tensor.%s = %s.%s' % (func_name, mod_name, func_name))
python
def sg_inject(path, mod_name): r"""Converts all functions in the given Python module to sugar functions so that they can be used in a chainable manner. Args: path: A string. Path to the Python module mod_name: A string. The name of the Python module to inject. Returns: None """ # import module import sys if path not in list(sys.path): sys.path.append(path) globals()[mod_name] = importlib.import_module(mod_name) # find functions for func_name in dir(globals()[mod_name]): if isinstance(globals()[mod_name].__dict__.get(func_name), types.FunctionType): if not func_name.startswith('_'): # inject to tf.Variable type exec('tf.Variable.%s = %s.%s' % (func_name, mod_name, func_name)) # inject to tf.Tensor type exec('tf.Tensor.%s = %s.%s' % (func_name, mod_name, func_name))
[ "def", "sg_inject", "(", "path", ",", "mod_name", ")", ":", "# import module", "import", "sys", "if", "path", "not", "in", "list", "(", "sys", ".", "path", ")", ":", "sys", ".", "path", ".", "append", "(", "path", ")", "globals", "(", ")", "[", "mod_name", "]", "=", "importlib", ".", "import_module", "(", "mod_name", ")", "# find functions", "for", "func_name", "in", "dir", "(", "globals", "(", ")", "[", "mod_name", "]", ")", ":", "if", "isinstance", "(", "globals", "(", ")", "[", "mod_name", "]", ".", "__dict__", ".", "get", "(", "func_name", ")", ",", "types", ".", "FunctionType", ")", ":", "if", "not", "func_name", ".", "startswith", "(", "'_'", ")", ":", "# inject to tf.Variable type", "exec", "(", "'tf.Variable.%s = %s.%s'", "%", "(", "func_name", ",", "mod_name", ",", "func_name", ")", ")", "# inject to tf.Tensor type", "exec", "(", "'tf.Tensor.%s = %s.%s'", "%", "(", "func_name", ",", "mod_name", ",", "func_name", ")", ")" ]
r"""Converts all functions in the given Python module to sugar functions so that they can be used in a chainable manner. Args: path: A string. Path to the Python module mod_name: A string. The name of the Python module to inject. Returns: None
[ "r", "Converts", "all", "functions", "in", "the", "given", "Python", "module", "to", "sugar", "functions", "so", "that", "they", "can", "be", "used", "in", "a", "chainable", "manner", "." ]
d2c039954777c7fbe3eb0c2ae40c45c9854deb40
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_main.py#L517-L540
13,405
buriburisuri/sugartensor
sugartensor/sg_main.py
sg_queue_context
def sg_queue_context(sess=None): r"""Context helper for queue routines. Args: sess: A session to open queues. If not specified, a new session is created. Returns: None """ # default session sess = tf.get_default_session() if sess is None else sess # thread coordinator coord = tf.train.Coordinator() try: # start queue thread threads = tf.train.start_queue_runners(sess, coord) yield finally: # stop queue thread coord.request_stop() # wait thread to exit. coord.join(threads)
python
def sg_queue_context(sess=None): r"""Context helper for queue routines. Args: sess: A session to open queues. If not specified, a new session is created. Returns: None """ # default session sess = tf.get_default_session() if sess is None else sess # thread coordinator coord = tf.train.Coordinator() try: # start queue thread threads = tf.train.start_queue_runners(sess, coord) yield finally: # stop queue thread coord.request_stop() # wait thread to exit. coord.join(threads)
[ "def", "sg_queue_context", "(", "sess", "=", "None", ")", ":", "# default session", "sess", "=", "tf", ".", "get_default_session", "(", ")", "if", "sess", "is", "None", "else", "sess", "# thread coordinator", "coord", "=", "tf", ".", "train", ".", "Coordinator", "(", ")", "try", ":", "# start queue thread", "threads", "=", "tf", ".", "train", ".", "start_queue_runners", "(", "sess", ",", "coord", ")", "yield", "finally", ":", "# stop queue thread", "coord", ".", "request_stop", "(", ")", "# wait thread to exit.", "coord", ".", "join", "(", "threads", ")" ]
r"""Context helper for queue routines. Args: sess: A session to open queues. If not specified, a new session is created. Returns: None
[ "r", "Context", "helper", "for", "queue", "routines", "." ]
d2c039954777c7fbe3eb0c2ae40c45c9854deb40
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_main.py#L565-L588
13,406
buriburisuri/sugartensor
sugartensor/sg_main.py
sg_arg
def sg_arg(): r"""Gets current command line options Returns: tf.sg_opt instance that is updated with current commandd line options. """ if not tf.app.flags.FLAGS.__dict__['__parsed']: tf.app.flags.FLAGS._parse_flags() return tf.sg_opt(tf.app.flags.FLAGS.__dict__['__flags'])
python
def sg_arg(): r"""Gets current command line options Returns: tf.sg_opt instance that is updated with current commandd line options. """ if not tf.app.flags.FLAGS.__dict__['__parsed']: tf.app.flags.FLAGS._parse_flags() return tf.sg_opt(tf.app.flags.FLAGS.__dict__['__flags'])
[ "def", "sg_arg", "(", ")", ":", "if", "not", "tf", ".", "app", ".", "flags", ".", "FLAGS", ".", "__dict__", "[", "'__parsed'", "]", ":", "tf", ".", "app", ".", "flags", ".", "FLAGS", ".", "_parse_flags", "(", ")", "return", "tf", ".", "sg_opt", "(", "tf", ".", "app", ".", "flags", ".", "FLAGS", ".", "__dict__", "[", "'__flags'", "]", ")" ]
r"""Gets current command line options Returns: tf.sg_opt instance that is updated with current commandd line options.
[ "r", "Gets", "current", "command", "line", "options" ]
d2c039954777c7fbe3eb0c2ae40c45c9854deb40
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_main.py#L632-L640
13,407
buriburisuri/sugartensor
sugartensor/sg_main.py
sg_arg_def
def sg_arg_def(**kwargs): r"""Defines command line options Args: **kwargs: key: A name for the option. value : Default value or a tuple of (default value, description). Returns: None For example, ``` # Either of the following two lines will define `--n_epoch` command line argument and set its default value as 1. tf.sg_arg_def(n_epoch=1) tf.sg_arg_def(n_epoch=(1, 'total number of epochs')) ``` """ for k, v in kwargs.items(): if type(v) is tuple or type(v) is list: v, c = v[0], v[1] else: c = k if type(v) is str: tf.app.flags.DEFINE_string(k, v, c) elif type(v) is int: tf.app.flags.DEFINE_integer(k, v, c) elif type(v) is float: tf.app.flags.DEFINE_float(k, v, c) elif type(v) is bool: tf.app.flags.DEFINE_bool(k, v, c)
python
def sg_arg_def(**kwargs): r"""Defines command line options Args: **kwargs: key: A name for the option. value : Default value or a tuple of (default value, description). Returns: None For example, ``` # Either of the following two lines will define `--n_epoch` command line argument and set its default value as 1. tf.sg_arg_def(n_epoch=1) tf.sg_arg_def(n_epoch=(1, 'total number of epochs')) ``` """ for k, v in kwargs.items(): if type(v) is tuple or type(v) is list: v, c = v[0], v[1] else: c = k if type(v) is str: tf.app.flags.DEFINE_string(k, v, c) elif type(v) is int: tf.app.flags.DEFINE_integer(k, v, c) elif type(v) is float: tf.app.flags.DEFINE_float(k, v, c) elif type(v) is bool: tf.app.flags.DEFINE_bool(k, v, c)
[ "def", "sg_arg_def", "(", "*", "*", "kwargs", ")", ":", "for", "k", ",", "v", "in", "kwargs", ".", "items", "(", ")", ":", "if", "type", "(", "v", ")", "is", "tuple", "or", "type", "(", "v", ")", "is", "list", ":", "v", ",", "c", "=", "v", "[", "0", "]", ",", "v", "[", "1", "]", "else", ":", "c", "=", "k", "if", "type", "(", "v", ")", "is", "str", ":", "tf", ".", "app", ".", "flags", ".", "DEFINE_string", "(", "k", ",", "v", ",", "c", ")", "elif", "type", "(", "v", ")", "is", "int", ":", "tf", ".", "app", ".", "flags", ".", "DEFINE_integer", "(", "k", ",", "v", ",", "c", ")", "elif", "type", "(", "v", ")", "is", "float", ":", "tf", ".", "app", ".", "flags", ".", "DEFINE_float", "(", "k", ",", "v", ",", "c", ")", "elif", "type", "(", "v", ")", "is", "bool", ":", "tf", ".", "app", ".", "flags", ".", "DEFINE_bool", "(", "k", ",", "v", ",", "c", ")" ]
r"""Defines command line options Args: **kwargs: key: A name for the option. value : Default value or a tuple of (default value, description). Returns: None For example, ``` # Either of the following two lines will define `--n_epoch` command line argument and set its default value as 1. tf.sg_arg_def(n_epoch=1) tf.sg_arg_def(n_epoch=(1, 'total number of epochs')) ```
[ "r", "Defines", "command", "line", "options" ]
d2c039954777c7fbe3eb0c2ae40c45c9854deb40
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_main.py#L643-L675
13,408
buriburisuri/sugartensor
sugartensor/sg_logging.py
sg_summary_loss
def sg_summary_loss(tensor, prefix='losses', name=None): r"""Register `tensor` to summary report as `loss` Args: tensor: A `Tensor` to log as loss prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None """ # defaults prefix = '' if prefix is None else prefix + '/' # summary name name = prefix + _pretty_name(tensor) if name is None else prefix + name # summary statistics _scalar(name, tf.reduce_mean(tensor)) _histogram(name + '-h', tensor)
python
def sg_summary_loss(tensor, prefix='losses', name=None): r"""Register `tensor` to summary report as `loss` Args: tensor: A `Tensor` to log as loss prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None """ # defaults prefix = '' if prefix is None else prefix + '/' # summary name name = prefix + _pretty_name(tensor) if name is None else prefix + name # summary statistics _scalar(name, tf.reduce_mean(tensor)) _histogram(name + '-h', tensor)
[ "def", "sg_summary_loss", "(", "tensor", ",", "prefix", "=", "'losses'", ",", "name", "=", "None", ")", ":", "# defaults", "prefix", "=", "''", "if", "prefix", "is", "None", "else", "prefix", "+", "'/'", "# summary name", "name", "=", "prefix", "+", "_pretty_name", "(", "tensor", ")", "if", "name", "is", "None", "else", "prefix", "+", "name", "# summary statistics", "_scalar", "(", "name", ",", "tf", ".", "reduce_mean", "(", "tensor", ")", ")", "_histogram", "(", "name", "+", "'-h'", ",", "tensor", ")" ]
r"""Register `tensor` to summary report as `loss` Args: tensor: A `Tensor` to log as loss prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None
[ "r", "Register", "tensor", "to", "summary", "report", "as", "loss" ]
d2c039954777c7fbe3eb0c2ae40c45c9854deb40
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_logging.py#L19-L36
13,409
buriburisuri/sugartensor
sugartensor/sg_logging.py
sg_summary_gradient
def sg_summary_gradient(tensor, gradient, prefix=None, name=None): r"""Register `tensor` to summary report as `gradient` Args: tensor: A `Tensor` to log as gradient gradient: A 0-D `Tensor`. A gradient to log prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None """ # defaults prefix = '' if prefix is None else prefix + '/' # summary name name = prefix + _pretty_name(tensor) if name is None else prefix + name # summary statistics # noinspection PyBroadException _scalar(name + '/grad', tf.reduce_mean(tf.abs(gradient))) _histogram(name + '/grad-h', tf.abs(gradient))
python
def sg_summary_gradient(tensor, gradient, prefix=None, name=None): r"""Register `tensor` to summary report as `gradient` Args: tensor: A `Tensor` to log as gradient gradient: A 0-D `Tensor`. A gradient to log prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None """ # defaults prefix = '' if prefix is None else prefix + '/' # summary name name = prefix + _pretty_name(tensor) if name is None else prefix + name # summary statistics # noinspection PyBroadException _scalar(name + '/grad', tf.reduce_mean(tf.abs(gradient))) _histogram(name + '/grad-h', tf.abs(gradient))
[ "def", "sg_summary_gradient", "(", "tensor", ",", "gradient", ",", "prefix", "=", "None", ",", "name", "=", "None", ")", ":", "# defaults", "prefix", "=", "''", "if", "prefix", "is", "None", "else", "prefix", "+", "'/'", "# summary name", "name", "=", "prefix", "+", "_pretty_name", "(", "tensor", ")", "if", "name", "is", "None", "else", "prefix", "+", "name", "# summary statistics", "# noinspection PyBroadException", "_scalar", "(", "name", "+", "'/grad'", ",", "tf", ".", "reduce_mean", "(", "tf", ".", "abs", "(", "gradient", ")", ")", ")", "_histogram", "(", "name", "+", "'/grad-h'", ",", "tf", ".", "abs", "(", "gradient", ")", ")" ]
r"""Register `tensor` to summary report as `gradient` Args: tensor: A `Tensor` to log as gradient gradient: A 0-D `Tensor`. A gradient to log prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None
[ "r", "Register", "tensor", "to", "summary", "report", "as", "gradient" ]
d2c039954777c7fbe3eb0c2ae40c45c9854deb40
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_logging.py#L59-L78
13,410
buriburisuri/sugartensor
sugartensor/sg_logging.py
sg_summary_activation
def sg_summary_activation(tensor, prefix=None, name=None): r"""Register `tensor` to summary report as `activation` Args: tensor: A `Tensor` to log as activation prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None """ # defaults prefix = '' if prefix is None else prefix + '/' # summary name name = prefix + _pretty_name(tensor) if name is None else prefix + name # summary statistics _scalar(name + '/ratio', tf.reduce_mean(tf.cast(tf.greater(tensor, 0), tf.sg_floatx))) _histogram(name + '/ratio-h', tensor)
python
def sg_summary_activation(tensor, prefix=None, name=None): r"""Register `tensor` to summary report as `activation` Args: tensor: A `Tensor` to log as activation prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None """ # defaults prefix = '' if prefix is None else prefix + '/' # summary name name = prefix + _pretty_name(tensor) if name is None else prefix + name # summary statistics _scalar(name + '/ratio', tf.reduce_mean(tf.cast(tf.greater(tensor, 0), tf.sg_floatx))) _histogram(name + '/ratio-h', tensor)
[ "def", "sg_summary_activation", "(", "tensor", ",", "prefix", "=", "None", ",", "name", "=", "None", ")", ":", "# defaults", "prefix", "=", "''", "if", "prefix", "is", "None", "else", "prefix", "+", "'/'", "# summary name", "name", "=", "prefix", "+", "_pretty_name", "(", "tensor", ")", "if", "name", "is", "None", "else", "prefix", "+", "name", "# summary statistics", "_scalar", "(", "name", "+", "'/ratio'", ",", "tf", ".", "reduce_mean", "(", "tf", ".", "cast", "(", "tf", ".", "greater", "(", "tensor", ",", "0", ")", ",", "tf", ".", "sg_floatx", ")", ")", ")", "_histogram", "(", "name", "+", "'/ratio-h'", ",", "tensor", ")" ]
r"""Register `tensor` to summary report as `activation` Args: tensor: A `Tensor` to log as activation prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None
[ "r", "Register", "tensor", "to", "summary", "report", "as", "activation" ]
d2c039954777c7fbe3eb0c2ae40c45c9854deb40
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_logging.py#L81-L99
13,411
buriburisuri/sugartensor
sugartensor/sg_logging.py
sg_summary_param
def sg_summary_param(tensor, prefix=None, name=None): r"""Register `tensor` to summary report as `parameters` Args: tensor: A `Tensor` to log as parameters prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None """ # defaults prefix = '' if prefix is None else prefix + '/' # summary name name = prefix + _pretty_name(tensor) if name is None else prefix + name # summary statistics _scalar(name + '/abs', tf.reduce_mean(tf.abs(tensor))) _histogram(name + '/abs-h', tf.abs(tensor))
python
def sg_summary_param(tensor, prefix=None, name=None): r"""Register `tensor` to summary report as `parameters` Args: tensor: A `Tensor` to log as parameters prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None """ # defaults prefix = '' if prefix is None else prefix + '/' # summary name name = prefix + _pretty_name(tensor) if name is None else prefix + name # summary statistics _scalar(name + '/abs', tf.reduce_mean(tf.abs(tensor))) _histogram(name + '/abs-h', tf.abs(tensor))
[ "def", "sg_summary_param", "(", "tensor", ",", "prefix", "=", "None", ",", "name", "=", "None", ")", ":", "# defaults", "prefix", "=", "''", "if", "prefix", "is", "None", "else", "prefix", "+", "'/'", "# summary name", "name", "=", "prefix", "+", "_pretty_name", "(", "tensor", ")", "if", "name", "is", "None", "else", "prefix", "+", "name", "# summary statistics", "_scalar", "(", "name", "+", "'/abs'", ",", "tf", ".", "reduce_mean", "(", "tf", ".", "abs", "(", "tensor", ")", ")", ")", "_histogram", "(", "name", "+", "'/abs-h'", ",", "tf", ".", "abs", "(", "tensor", ")", ")" ]
r"""Register `tensor` to summary report as `parameters` Args: tensor: A `Tensor` to log as parameters prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None
[ "r", "Register", "tensor", "to", "summary", "report", "as", "parameters" ]
d2c039954777c7fbe3eb0c2ae40c45c9854deb40
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_logging.py#L102-L119
13,412
buriburisuri/sugartensor
sugartensor/sg_logging.py
sg_summary_image
def sg_summary_image(tensor, prefix=None, name=None): r"""Register `tensor` to summary report as `image` Args: tensor: A tensor to log as image prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None """ # defaults prefix = '' if prefix is None else prefix + '/' # summary name name = prefix + _pretty_name(tensor) if name is None else prefix + name # summary statistics if not tf.get_variable_scope().reuse: tf.summary.image(name + '-im', tensor)
python
def sg_summary_image(tensor, prefix=None, name=None): r"""Register `tensor` to summary report as `image` Args: tensor: A tensor to log as image prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None """ # defaults prefix = '' if prefix is None else prefix + '/' # summary name name = prefix + _pretty_name(tensor) if name is None else prefix + name # summary statistics if not tf.get_variable_scope().reuse: tf.summary.image(name + '-im', tensor)
[ "def", "sg_summary_image", "(", "tensor", ",", "prefix", "=", "None", ",", "name", "=", "None", ")", ":", "# defaults", "prefix", "=", "''", "if", "prefix", "is", "None", "else", "prefix", "+", "'/'", "# summary name", "name", "=", "prefix", "+", "_pretty_name", "(", "tensor", ")", "if", "name", "is", "None", "else", "prefix", "+", "name", "# summary statistics", "if", "not", "tf", ".", "get_variable_scope", "(", ")", ".", "reuse", ":", "tf", ".", "summary", ".", "image", "(", "name", "+", "'-im'", ",", "tensor", ")" ]
r"""Register `tensor` to summary report as `image` Args: tensor: A tensor to log as image prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None
[ "r", "Register", "tensor", "to", "summary", "report", "as", "image" ]
d2c039954777c7fbe3eb0c2ae40c45c9854deb40
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_logging.py#L122-L139
13,413
buriburisuri/sugartensor
sugartensor/sg_logging.py
sg_summary_audio
def sg_summary_audio(tensor, sample_rate=16000, prefix=None, name=None): r"""Register `tensor` to summary report as audio Args: tensor: A `Tensor` to log as audio sample_rate : An int. Sample rate to report. Default is 16000. prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None """ # defaults prefix = '' if prefix is None else prefix + '/' # summary name name = prefix + _pretty_name(tensor) if name is None else prefix + name # summary statistics if not tf.get_variable_scope().reuse: tf.summary.audio(name + '-au', tensor, sample_rate)
python
def sg_summary_audio(tensor, sample_rate=16000, prefix=None, name=None): r"""Register `tensor` to summary report as audio Args: tensor: A `Tensor` to log as audio sample_rate : An int. Sample rate to report. Default is 16000. prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None """ # defaults prefix = '' if prefix is None else prefix + '/' # summary name name = prefix + _pretty_name(tensor) if name is None else prefix + name # summary statistics if not tf.get_variable_scope().reuse: tf.summary.audio(name + '-au', tensor, sample_rate)
[ "def", "sg_summary_audio", "(", "tensor", ",", "sample_rate", "=", "16000", ",", "prefix", "=", "None", ",", "name", "=", "None", ")", ":", "# defaults", "prefix", "=", "''", "if", "prefix", "is", "None", "else", "prefix", "+", "'/'", "# summary name", "name", "=", "prefix", "+", "_pretty_name", "(", "tensor", ")", "if", "name", "is", "None", "else", "prefix", "+", "name", "# summary statistics", "if", "not", "tf", ".", "get_variable_scope", "(", ")", ".", "reuse", ":", "tf", ".", "summary", ".", "audio", "(", "name", "+", "'-au'", ",", "tensor", ",", "sample_rate", ")" ]
r"""Register `tensor` to summary report as audio Args: tensor: A `Tensor` to log as audio sample_rate : An int. Sample rate to report. Default is 16000. prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None
[ "r", "Register", "tensor", "to", "summary", "report", "as", "audio" ]
d2c039954777c7fbe3eb0c2ae40c45c9854deb40
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_logging.py#L142-L160
13,414
buriburisuri/sugartensor
sugartensor/sg_train.py
sg_train
def sg_train(**kwargs): r"""Trains the model. Args: **kwargs: optim: A name for optimizer. 'MaxProp' (default), 'AdaMax', 'Adam', 'RMSProp' or 'sgd'. loss: A 0-D `Tensor` containing the value to minimize. lr: A Python Scalar (optional). Learning rate. Default is .001. beta1: A Python Scalar (optional). Default is .9. beta2: A Python Scalar (optional). Default is .99. save_dir: A string. The root path to which checkpoint and log files are saved. Default is `asset/train`. max_ep: A positive integer. Maximum number of epochs. Default is 1000. ep_size: A positive integer. Number of Total batches in an epoch. For proper display of log. Default is 1e5. save_interval: A Python scalar. The interval of saving checkpoint files. By default, for every 600 seconds, a checkpoint file is written. log_interval: A Python scalar. The interval of recoding logs. By default, for every 60 seconds, logging is executed. max_keep: A positive integer. Maximum number of recent checkpoints to keep. Default is 5. keep_interval: A Python scalar. How often to keep checkpoints. Default is 1 hour. category: Scope name or list to train eval_metric: A list of tensors containing the value to evaluate. Default is []. tqdm: Boolean. If True (Default), progress bars are shown. If False, a series of loss will be shown on the console. """ opt = tf.sg_opt(kwargs) assert opt.loss is not None, 'loss is mandatory.' # default training options opt += tf.sg_opt(optim='MaxProp', lr=0.001, beta1=0.9, beta2=0.99, category='', ep_size=100000) # get optimizer train_op = sg_optim(opt.loss, optim=opt.optim, lr=0.001, beta1=opt.beta1, beta2=opt.beta2, category=opt.category) # for console logging loss_ = opt.loss # use only first loss when multiple GPU case if isinstance(opt.loss, (tuple, list)): loss_ = opt.loss[0] # define train function # noinspection PyUnusedLocal @sg_train_func def train_func(sess, arg): return sess.run([loss_, train_op])[0] # run train function train_func(**opt)
python
def sg_train(**kwargs): r"""Trains the model. Args: **kwargs: optim: A name for optimizer. 'MaxProp' (default), 'AdaMax', 'Adam', 'RMSProp' or 'sgd'. loss: A 0-D `Tensor` containing the value to minimize. lr: A Python Scalar (optional). Learning rate. Default is .001. beta1: A Python Scalar (optional). Default is .9. beta2: A Python Scalar (optional). Default is .99. save_dir: A string. The root path to which checkpoint and log files are saved. Default is `asset/train`. max_ep: A positive integer. Maximum number of epochs. Default is 1000. ep_size: A positive integer. Number of Total batches in an epoch. For proper display of log. Default is 1e5. save_interval: A Python scalar. The interval of saving checkpoint files. By default, for every 600 seconds, a checkpoint file is written. log_interval: A Python scalar. The interval of recoding logs. By default, for every 60 seconds, logging is executed. max_keep: A positive integer. Maximum number of recent checkpoints to keep. Default is 5. keep_interval: A Python scalar. How often to keep checkpoints. Default is 1 hour. category: Scope name or list to train eval_metric: A list of tensors containing the value to evaluate. Default is []. tqdm: Boolean. If True (Default), progress bars are shown. If False, a series of loss will be shown on the console. """ opt = tf.sg_opt(kwargs) assert opt.loss is not None, 'loss is mandatory.' # default training options opt += tf.sg_opt(optim='MaxProp', lr=0.001, beta1=0.9, beta2=0.99, category='', ep_size=100000) # get optimizer train_op = sg_optim(opt.loss, optim=opt.optim, lr=0.001, beta1=opt.beta1, beta2=opt.beta2, category=opt.category) # for console logging loss_ = opt.loss # use only first loss when multiple GPU case if isinstance(opt.loss, (tuple, list)): loss_ = opt.loss[0] # define train function # noinspection PyUnusedLocal @sg_train_func def train_func(sess, arg): return sess.run([loss_, train_op])[0] # run train function train_func(**opt)
[ "def", "sg_train", "(", "*", "*", "kwargs", ")", ":", "opt", "=", "tf", ".", "sg_opt", "(", "kwargs", ")", "assert", "opt", ".", "loss", "is", "not", "None", ",", "'loss is mandatory.'", "# default training options", "opt", "+=", "tf", ".", "sg_opt", "(", "optim", "=", "'MaxProp'", ",", "lr", "=", "0.001", ",", "beta1", "=", "0.9", ",", "beta2", "=", "0.99", ",", "category", "=", "''", ",", "ep_size", "=", "100000", ")", "# get optimizer", "train_op", "=", "sg_optim", "(", "opt", ".", "loss", ",", "optim", "=", "opt", ".", "optim", ",", "lr", "=", "0.001", ",", "beta1", "=", "opt", ".", "beta1", ",", "beta2", "=", "opt", ".", "beta2", ",", "category", "=", "opt", ".", "category", ")", "# for console logging", "loss_", "=", "opt", ".", "loss", "# use only first loss when multiple GPU case", "if", "isinstance", "(", "opt", ".", "loss", ",", "(", "tuple", ",", "list", ")", ")", ":", "loss_", "=", "opt", ".", "loss", "[", "0", "]", "# define train function", "# noinspection PyUnusedLocal", "@", "sg_train_func", "def", "train_func", "(", "sess", ",", "arg", ")", ":", "return", "sess", ".", "run", "(", "[", "loss_", ",", "train_op", "]", ")", "[", "0", "]", "# run train function", "train_func", "(", "*", "*", "opt", ")" ]
r"""Trains the model. Args: **kwargs: optim: A name for optimizer. 'MaxProp' (default), 'AdaMax', 'Adam', 'RMSProp' or 'sgd'. loss: A 0-D `Tensor` containing the value to minimize. lr: A Python Scalar (optional). Learning rate. Default is .001. beta1: A Python Scalar (optional). Default is .9. beta2: A Python Scalar (optional). Default is .99. save_dir: A string. The root path to which checkpoint and log files are saved. Default is `asset/train`. max_ep: A positive integer. Maximum number of epochs. Default is 1000. ep_size: A positive integer. Number of Total batches in an epoch. For proper display of log. Default is 1e5. save_interval: A Python scalar. The interval of saving checkpoint files. By default, for every 600 seconds, a checkpoint file is written. log_interval: A Python scalar. The interval of recoding logs. By default, for every 60 seconds, logging is executed. max_keep: A positive integer. Maximum number of recent checkpoints to keep. Default is 5. keep_interval: A Python scalar. How often to keep checkpoints. Default is 1 hour. category: Scope name or list to train eval_metric: A list of tensors containing the value to evaluate. Default is []. tqdm: Boolean. If True (Default), progress bars are shown. If False, a series of loss will be shown on the console.
[ "r", "Trains", "the", "model", "." ]
d2c039954777c7fbe3eb0c2ae40c45c9854deb40
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_train.py#L13-L69
13,415
buriburisuri/sugartensor
sugartensor/sg_train.py
sg_restore
def sg_restore(sess, save_path, category=''): r""" Restores previously saved variables. Args: sess: A `Session` to use to restore the parameters. save_path: Path where parameters were previously saved. category: A `String` to filter variables starts with given category. Returns: """ # to list if not isinstance(category, (tuple, list)): category = [category] # make variable list to load var_list = {} for cat in category: for t in tf.global_variables(): if t.name.startswith(cat): var_list[t.name[:-2]] = t # restore parameters saver = tf.train.Saver(var_list) saver.restore(sess, save_path)
python
def sg_restore(sess, save_path, category=''): r""" Restores previously saved variables. Args: sess: A `Session` to use to restore the parameters. save_path: Path where parameters were previously saved. category: A `String` to filter variables starts with given category. Returns: """ # to list if not isinstance(category, (tuple, list)): category = [category] # make variable list to load var_list = {} for cat in category: for t in tf.global_variables(): if t.name.startswith(cat): var_list[t.name[:-2]] = t # restore parameters saver = tf.train.Saver(var_list) saver.restore(sess, save_path)
[ "def", "sg_restore", "(", "sess", ",", "save_path", ",", "category", "=", "''", ")", ":", "# to list", "if", "not", "isinstance", "(", "category", ",", "(", "tuple", ",", "list", ")", ")", ":", "category", "=", "[", "category", "]", "# make variable list to load", "var_list", "=", "{", "}", "for", "cat", "in", "category", ":", "for", "t", "in", "tf", ".", "global_variables", "(", ")", ":", "if", "t", ".", "name", ".", "startswith", "(", "cat", ")", ":", "var_list", "[", "t", ".", "name", "[", ":", "-", "2", "]", "]", "=", "t", "# restore parameters", "saver", "=", "tf", ".", "train", ".", "Saver", "(", "var_list", ")", "saver", ".", "restore", "(", "sess", ",", "save_path", ")" ]
r""" Restores previously saved variables. Args: sess: A `Session` to use to restore the parameters. save_path: Path where parameters were previously saved. category: A `String` to filter variables starts with given category. Returns:
[ "r", "Restores", "previously", "saved", "variables", "." ]
d2c039954777c7fbe3eb0c2ae40c45c9854deb40
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_train.py#L124-L148
13,416
buriburisuri/sugartensor
sugartensor/sg_train.py
sg_regularizer_loss
def sg_regularizer_loss(scale=1.0): r""" Get regularizer losss Args: scale: A scalar. A weight applied to regularizer loss """ return scale * tf.reduce_mean(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))
python
def sg_regularizer_loss(scale=1.0): r""" Get regularizer losss Args: scale: A scalar. A weight applied to regularizer loss """ return scale * tf.reduce_mean(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))
[ "def", "sg_regularizer_loss", "(", "scale", "=", "1.0", ")", ":", "return", "scale", "*", "tf", ".", "reduce_mean", "(", "tf", ".", "get_collection", "(", "tf", ".", "GraphKeys", ".", "REGULARIZATION_LOSSES", ")", ")" ]
r""" Get regularizer losss Args: scale: A scalar. A weight applied to regularizer loss
[ "r", "Get", "regularizer", "losss" ]
d2c039954777c7fbe3eb0c2ae40c45c9854deb40
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_train.py#L376-L382
13,417
buriburisuri/sugartensor
sugartensor/sg_net.py
sg_densenet_layer
def sg_densenet_layer(x, opt): r"""Applies basic architecture of densenet layer. Note that the fc layers in the original architecture will be replaced with fully convolutional layers. For convenience, We still call them fc layers, though. Args: x: A `Tensor`. opt: dim: An integer. Dimension for this resnet layer num: Number of times to repeat act: String. 'relu' (default). the activation function name trans: Boolean. If True(default), transition layer will be applied. reuse: Boolean(Optional). If True, all variables will be loaded from previous network. name: String. (optional) Used as convolution layer prefix Returns: A `Tensor`. """ assert opt.dim is not None, 'dim is mandatory.' assert opt.num is not None, 'num is mandatory.' # default stride opt += tf.sg_opt(stride=1, act='relu', trans=True) # format convolutional layer name def cname(index): return opt.name if opt.name is None else opt.name + '_%d' % index # dense layer with tf.sg_context(bias=False, reuse=opt.reuse): out = x for i in range(opt.num): # dense block out_new = (out .sg_bypass(act=opt.act, bn=True, name=cname(3 * i + 1)) .sg_conv(dim=opt.dim // 4, size=1, act=opt.act, bn=True, name=cname(3 * i + 2)) .sg_conv(dim=opt.dim, size=3, name=cname(3 * i + 3))) out = tf.concat([out_new, out], 3) # transition layer if opt.trans: out = (out .sg_bypass(act=opt.act, bn=True, name=cname(3 * i + 4)) .sg_conv(size=1, name=cname(3 * i + 5)) .sg_pool(avg=True)) return out
python
def sg_densenet_layer(x, opt): r"""Applies basic architecture of densenet layer. Note that the fc layers in the original architecture will be replaced with fully convolutional layers. For convenience, We still call them fc layers, though. Args: x: A `Tensor`. opt: dim: An integer. Dimension for this resnet layer num: Number of times to repeat act: String. 'relu' (default). the activation function name trans: Boolean. If True(default), transition layer will be applied. reuse: Boolean(Optional). If True, all variables will be loaded from previous network. name: String. (optional) Used as convolution layer prefix Returns: A `Tensor`. """ assert opt.dim is not None, 'dim is mandatory.' assert opt.num is not None, 'num is mandatory.' # default stride opt += tf.sg_opt(stride=1, act='relu', trans=True) # format convolutional layer name def cname(index): return opt.name if opt.name is None else opt.name + '_%d' % index # dense layer with tf.sg_context(bias=False, reuse=opt.reuse): out = x for i in range(opt.num): # dense block out_new = (out .sg_bypass(act=opt.act, bn=True, name=cname(3 * i + 1)) .sg_conv(dim=opt.dim // 4, size=1, act=opt.act, bn=True, name=cname(3 * i + 2)) .sg_conv(dim=opt.dim, size=3, name=cname(3 * i + 3))) out = tf.concat([out_new, out], 3) # transition layer if opt.trans: out = (out .sg_bypass(act=opt.act, bn=True, name=cname(3 * i + 4)) .sg_conv(size=1, name=cname(3 * i + 5)) .sg_pool(avg=True)) return out
[ "def", "sg_densenet_layer", "(", "x", ",", "opt", ")", ":", "assert", "opt", ".", "dim", "is", "not", "None", ",", "'dim is mandatory.'", "assert", "opt", ".", "num", "is", "not", "None", ",", "'num is mandatory.'", "# default stride", "opt", "+=", "tf", ".", "sg_opt", "(", "stride", "=", "1", ",", "act", "=", "'relu'", ",", "trans", "=", "True", ")", "# format convolutional layer name", "def", "cname", "(", "index", ")", ":", "return", "opt", ".", "name", "if", "opt", ".", "name", "is", "None", "else", "opt", ".", "name", "+", "'_%d'", "%", "index", "# dense layer", "with", "tf", ".", "sg_context", "(", "bias", "=", "False", ",", "reuse", "=", "opt", ".", "reuse", ")", ":", "out", "=", "x", "for", "i", "in", "range", "(", "opt", ".", "num", ")", ":", "# dense block", "out_new", "=", "(", "out", ".", "sg_bypass", "(", "act", "=", "opt", ".", "act", ",", "bn", "=", "True", ",", "name", "=", "cname", "(", "3", "*", "i", "+", "1", ")", ")", ".", "sg_conv", "(", "dim", "=", "opt", ".", "dim", "//", "4", ",", "size", "=", "1", ",", "act", "=", "opt", ".", "act", ",", "bn", "=", "True", ",", "name", "=", "cname", "(", "3", "*", "i", "+", "2", ")", ")", ".", "sg_conv", "(", "dim", "=", "opt", ".", "dim", ",", "size", "=", "3", ",", "name", "=", "cname", "(", "3", "*", "i", "+", "3", ")", ")", ")", "out", "=", "tf", ".", "concat", "(", "[", "out_new", ",", "out", "]", ",", "3", ")", "# transition layer", "if", "opt", ".", "trans", ":", "out", "=", "(", "out", ".", "sg_bypass", "(", "act", "=", "opt", ".", "act", ",", "bn", "=", "True", ",", "name", "=", "cname", "(", "3", "*", "i", "+", "4", ")", ")", ".", "sg_conv", "(", "size", "=", "1", ",", "name", "=", "cname", "(", "3", "*", "i", "+", "5", ")", ")", ".", "sg_pool", "(", "avg", "=", "True", ")", ")", "return", "out" ]
r"""Applies basic architecture of densenet layer. Note that the fc layers in the original architecture will be replaced with fully convolutional layers. For convenience, We still call them fc layers, though. Args: x: A `Tensor`. opt: dim: An integer. Dimension for this resnet layer num: Number of times to repeat act: String. 'relu' (default). the activation function name trans: Boolean. If True(default), transition layer will be applied. reuse: Boolean(Optional). If True, all variables will be loaded from previous network. name: String. (optional) Used as convolution layer prefix Returns: A `Tensor`.
[ "r", "Applies", "basic", "architecture", "of", "densenet", "layer", "." ]
d2c039954777c7fbe3eb0c2ae40c45c9854deb40
https://github.com/buriburisuri/sugartensor/blob/d2c039954777c7fbe3eb0c2ae40c45c9854deb40/sugartensor/sg_net.py#L432-L480
13,418
thanethomson/statik
statik/utils.py
deep_merge_dict
def deep_merge_dict(a, b): """Deep merges dictionary b into dictionary a.""" if not isinstance(a, dict): raise TypeError("a must be a dict, but found %s" % a.__class__.__name__) if not isinstance(b, dict): raise TypeError("b must be a dict, but found %s" % b.__class__.__name__) _a = copy(a) _b = copy(b) for key_b, val_b in iteritems(_b): # if it's a sub-dictionary if isinstance(val_b, dict): if key_b not in _a or not isinstance(_a[key_b], dict): _a[key_b] = {} # perform the deep merge recursively _a[key_b] = deep_merge_dict(_a[key_b], val_b) else: _a[key_b] = val_b # b should now be deep-merged into a return _a
python
def deep_merge_dict(a, b): """Deep merges dictionary b into dictionary a.""" if not isinstance(a, dict): raise TypeError("a must be a dict, but found %s" % a.__class__.__name__) if not isinstance(b, dict): raise TypeError("b must be a dict, but found %s" % b.__class__.__name__) _a = copy(a) _b = copy(b) for key_b, val_b in iteritems(_b): # if it's a sub-dictionary if isinstance(val_b, dict): if key_b not in _a or not isinstance(_a[key_b], dict): _a[key_b] = {} # perform the deep merge recursively _a[key_b] = deep_merge_dict(_a[key_b], val_b) else: _a[key_b] = val_b # b should now be deep-merged into a return _a
[ "def", "deep_merge_dict", "(", "a", ",", "b", ")", ":", "if", "not", "isinstance", "(", "a", ",", "dict", ")", ":", "raise", "TypeError", "(", "\"a must be a dict, but found %s\"", "%", "a", ".", "__class__", ".", "__name__", ")", "if", "not", "isinstance", "(", "b", ",", "dict", ")", ":", "raise", "TypeError", "(", "\"b must be a dict, but found %s\"", "%", "b", ".", "__class__", ".", "__name__", ")", "_a", "=", "copy", "(", "a", ")", "_b", "=", "copy", "(", "b", ")", "for", "key_b", ",", "val_b", "in", "iteritems", "(", "_b", ")", ":", "# if it's a sub-dictionary", "if", "isinstance", "(", "val_b", ",", "dict", ")", ":", "if", "key_b", "not", "in", "_a", "or", "not", "isinstance", "(", "_a", "[", "key_b", "]", ",", "dict", ")", ":", "_a", "[", "key_b", "]", "=", "{", "}", "# perform the deep merge recursively", "_a", "[", "key_b", "]", "=", "deep_merge_dict", "(", "_a", "[", "key_b", "]", ",", "val_b", ")", "else", ":", "_a", "[", "key_b", "]", "=", "val_b", "# b should now be deep-merged into a", "return", "_a" ]
Deep merges dictionary b into dictionary a.
[ "Deep", "merges", "dictionary", "b", "into", "dictionary", "a", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/utils.py#L102-L124
13,419
thanethomson/statik
statik/utils.py
copy_file_if_modified
def copy_file_if_modified(src_path, dest_path): """Only copies the file from the source path to the destination path if it doesn't exist yet or it has been modified. Intended to provide something of an optimisation when a project has large trees of assets.""" # if the destination path is a directory, delete it completely - we assume here we are # writing a file to the filesystem if os.path.isdir(dest_path): shutil.rmtree(dest_path) must_copy = False if not os.path.exists(dest_path): must_copy = True else: src_stat = os.stat(src_path) dest_stat = os.stat(dest_path) # if the size or last modified timestamp are different if ((src_stat[stat.ST_SIZE] != dest_stat[stat.ST_SIZE]) or (src_stat[stat.ST_MTIME] != dest_stat[stat.ST_MTIME])): must_copy = True if must_copy: shutil.copy2(src_path, dest_path)
python
def copy_file_if_modified(src_path, dest_path): """Only copies the file from the source path to the destination path if it doesn't exist yet or it has been modified. Intended to provide something of an optimisation when a project has large trees of assets.""" # if the destination path is a directory, delete it completely - we assume here we are # writing a file to the filesystem if os.path.isdir(dest_path): shutil.rmtree(dest_path) must_copy = False if not os.path.exists(dest_path): must_copy = True else: src_stat = os.stat(src_path) dest_stat = os.stat(dest_path) # if the size or last modified timestamp are different if ((src_stat[stat.ST_SIZE] != dest_stat[stat.ST_SIZE]) or (src_stat[stat.ST_MTIME] != dest_stat[stat.ST_MTIME])): must_copy = True if must_copy: shutil.copy2(src_path, dest_path)
[ "def", "copy_file_if_modified", "(", "src_path", ",", "dest_path", ")", ":", "# if the destination path is a directory, delete it completely - we assume here we are", "# writing a file to the filesystem", "if", "os", ".", "path", ".", "isdir", "(", "dest_path", ")", ":", "shutil", ".", "rmtree", "(", "dest_path", ")", "must_copy", "=", "False", "if", "not", "os", ".", "path", ".", "exists", "(", "dest_path", ")", ":", "must_copy", "=", "True", "else", ":", "src_stat", "=", "os", ".", "stat", "(", "src_path", ")", "dest_stat", "=", "os", ".", "stat", "(", "dest_path", ")", "# if the size or last modified timestamp are different", "if", "(", "(", "src_stat", "[", "stat", ".", "ST_SIZE", "]", "!=", "dest_stat", "[", "stat", ".", "ST_SIZE", "]", ")", "or", "(", "src_stat", "[", "stat", ".", "ST_MTIME", "]", "!=", "dest_stat", "[", "stat", ".", "ST_MTIME", "]", ")", ")", ":", "must_copy", "=", "True", "if", "must_copy", ":", "shutil", ".", "copy2", "(", "src_path", ",", "dest_path", ")" ]
Only copies the file from the source path to the destination path if it doesn't exist yet or it has been modified. Intended to provide something of an optimisation when a project has large trees of assets.
[ "Only", "copies", "the", "file", "from", "the", "source", "path", "to", "the", "destination", "path", "if", "it", "doesn", "t", "exist", "yet", "or", "it", "has", "been", "modified", ".", "Intended", "to", "provide", "something", "of", "an", "optimisation", "when", "a", "project", "has", "large", "trees", "of", "assets", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/utils.py#L141-L163
13,420
thanethomson/statik
statik/utils.py
get_url_file_ext
def get_url_file_ext(url): """Attempts to extract the file extension from the given URL.""" # get the last part of the path component filename = url.split('/')[-1] name, ext = os.path.splitext(filename) # handle case of files with leading dot if not ext and name and name[0] == '.': ext = name return ext
python
def get_url_file_ext(url): """Attempts to extract the file extension from the given URL.""" # get the last part of the path component filename = url.split('/')[-1] name, ext = os.path.splitext(filename) # handle case of files with leading dot if not ext and name and name[0] == '.': ext = name return ext
[ "def", "get_url_file_ext", "(", "url", ")", ":", "# get the last part of the path component", "filename", "=", "url", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", "name", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "# handle case of files with leading dot", "if", "not", "ext", "and", "name", "and", "name", "[", "0", "]", "==", "'.'", ":", "ext", "=", "name", "return", "ext" ]
Attempts to extract the file extension from the given URL.
[ "Attempts", "to", "extract", "the", "file", "extension", "from", "the", "given", "URL", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/utils.py#L195-L205
13,421
thanethomson/statik
statik/utils.py
generate_quickstart
def generate_quickstart(project_path): """Generates all of the basic paths for a Statik project within the given project path. If the project path doesn't exist, it will be created.""" ensure_path_exists(project_path) ensure_file_exists(os.path.join(project_path, "config.yml"), DEFAULT_CONFIG_CONTENT) ensure_path_exists(os.path.join(project_path, 'models')) ensure_path_exists(os.path.join(project_path, 'data')) ensure_path_exists(os.path.join(project_path, 'themes')) ensure_path_exists(os.path.join(project_path, 'templates')) ensure_path_exists(os.path.join(project_path, 'templatetags')) ensure_path_exists(os.path.join(project_path, 'views')) ensure_path_exists(os.path.join(project_path, 'assets'))
python
def generate_quickstart(project_path): """Generates all of the basic paths for a Statik project within the given project path. If the project path doesn't exist, it will be created.""" ensure_path_exists(project_path) ensure_file_exists(os.path.join(project_path, "config.yml"), DEFAULT_CONFIG_CONTENT) ensure_path_exists(os.path.join(project_path, 'models')) ensure_path_exists(os.path.join(project_path, 'data')) ensure_path_exists(os.path.join(project_path, 'themes')) ensure_path_exists(os.path.join(project_path, 'templates')) ensure_path_exists(os.path.join(project_path, 'templatetags')) ensure_path_exists(os.path.join(project_path, 'views')) ensure_path_exists(os.path.join(project_path, 'assets'))
[ "def", "generate_quickstart", "(", "project_path", ")", ":", "ensure_path_exists", "(", "project_path", ")", "ensure_file_exists", "(", "os", ".", "path", ".", "join", "(", "project_path", ",", "\"config.yml\"", ")", ",", "DEFAULT_CONFIG_CONTENT", ")", "ensure_path_exists", "(", "os", ".", "path", ".", "join", "(", "project_path", ",", "'models'", ")", ")", "ensure_path_exists", "(", "os", ".", "path", ".", "join", "(", "project_path", ",", "'data'", ")", ")", "ensure_path_exists", "(", "os", ".", "path", ".", "join", "(", "project_path", ",", "'themes'", ")", ")", "ensure_path_exists", "(", "os", ".", "path", ".", "join", "(", "project_path", ",", "'templates'", ")", ")", "ensure_path_exists", "(", "os", ".", "path", ".", "join", "(", "project_path", ",", "'templatetags'", ")", ")", "ensure_path_exists", "(", "os", ".", "path", ".", "join", "(", "project_path", ",", "'views'", ")", ")", "ensure_path_exists", "(", "os", ".", "path", ".", "join", "(", "project_path", ",", "'assets'", ")", ")" ]
Generates all of the basic paths for a Statik project within the given project path. If the project path doesn't exist, it will be created.
[ "Generates", "all", "of", "the", "basic", "paths", "for", "a", "Statik", "project", "within", "the", "given", "project", "path", ".", "If", "the", "project", "path", "doesn", "t", "exist", "it", "will", "be", "created", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/utils.py#L208-L219
13,422
thanethomson/statik
statik/utils.py
get_project_config_file
def get_project_config_file(path, default_config_file_name): """Attempts to extract the project config file's absolute path from the given path. If the path is a directory, it automatically assumes a "config.yml" file will be in that directory. If the path is to a .yml file, it assumes that that is the root configuration file for the project.""" _path, _config_file_path = None, None path = os.path.abspath(path) if os.path.isdir(path): _path = path # use the default config file _config_file_path = os.path.join(_path, default_config_file_name) logger.debug("Using default project configuration file path: %s", _config_file_path) elif path.endswith(".yml"): _path = os.path.dirname(path) _config_file_path = path logger.debug("Using custom project configuration file path: %s", _config_file_path) return _path, _config_file_path
python
def get_project_config_file(path, default_config_file_name): """Attempts to extract the project config file's absolute path from the given path. If the path is a directory, it automatically assumes a "config.yml" file will be in that directory. If the path is to a .yml file, it assumes that that is the root configuration file for the project.""" _path, _config_file_path = None, None path = os.path.abspath(path) if os.path.isdir(path): _path = path # use the default config file _config_file_path = os.path.join(_path, default_config_file_name) logger.debug("Using default project configuration file path: %s", _config_file_path) elif path.endswith(".yml"): _path = os.path.dirname(path) _config_file_path = path logger.debug("Using custom project configuration file path: %s", _config_file_path) return _path, _config_file_path
[ "def", "get_project_config_file", "(", "path", ",", "default_config_file_name", ")", ":", "_path", ",", "_config_file_path", "=", "None", ",", "None", "path", "=", "os", ".", "path", ".", "abspath", "(", "path", ")", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "_path", "=", "path", "# use the default config file", "_config_file_path", "=", "os", ".", "path", ".", "join", "(", "_path", ",", "default_config_file_name", ")", "logger", ".", "debug", "(", "\"Using default project configuration file path: %s\"", ",", "_config_file_path", ")", "elif", "path", ".", "endswith", "(", "\".yml\"", ")", ":", "_path", "=", "os", ".", "path", ".", "dirname", "(", "path", ")", "_config_file_path", "=", "path", "logger", ".", "debug", "(", "\"Using custom project configuration file path: %s\"", ",", "_config_file_path", ")", "return", "_path", ",", "_config_file_path" ]
Attempts to extract the project config file's absolute path from the given path. If the path is a directory, it automatically assumes a "config.yml" file will be in that directory. If the path is to a .yml file, it assumes that that is the root configuration file for the project.
[ "Attempts", "to", "extract", "the", "project", "config", "file", "s", "absolute", "path", "from", "the", "given", "path", ".", "If", "the", "path", "is", "a", "directory", "it", "automatically", "assumes", "a", "config", ".", "yml", "file", "will", "be", "in", "that", "directory", ".", "If", "the", "path", "is", "to", "a", ".", "yml", "file", "it", "assumes", "that", "that", "is", "the", "root", "configuration", "file", "for", "the", "project", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/utils.py#L259-L276
13,423
thanethomson/statik
statik/utils.py
strip_el_text
def strip_el_text(el, max_depth=0, cur_depth=0): """Recursively strips the plain text out of the given XML etree element up to the desired depth. Args: el: The etree element to scan. max_depth: The depth to which to recursively strip text (default: 0). cur_depth: The current recursive depth to which we've scanned so far. Returns: The stripped, plain text from within the element. """ # text in front of any child elements el_text = strip_str(el.text if el.text is not None else "") if cur_depth < max_depth: for child in el: el_text += " "+strip_el_text(child, max_depth=max_depth, cur_depth=cur_depth+1) else: # add the last child's tail text, if any children = list(el) if children is not None and len(children) > 0: if children[-1].tail is not None: el_text += " "+strip_str(children[-1].tail) # we skip the root element if cur_depth > 0: # in case there's any text at the end of the element if el.tail is not None: el_text += " "+strip_str(el.tail) return strip_str(el_text)
python
def strip_el_text(el, max_depth=0, cur_depth=0): """Recursively strips the plain text out of the given XML etree element up to the desired depth. Args: el: The etree element to scan. max_depth: The depth to which to recursively strip text (default: 0). cur_depth: The current recursive depth to which we've scanned so far. Returns: The stripped, plain text from within the element. """ # text in front of any child elements el_text = strip_str(el.text if el.text is not None else "") if cur_depth < max_depth: for child in el: el_text += " "+strip_el_text(child, max_depth=max_depth, cur_depth=cur_depth+1) else: # add the last child's tail text, if any children = list(el) if children is not None and len(children) > 0: if children[-1].tail is not None: el_text += " "+strip_str(children[-1].tail) # we skip the root element if cur_depth > 0: # in case there's any text at the end of the element if el.tail is not None: el_text += " "+strip_str(el.tail) return strip_str(el_text)
[ "def", "strip_el_text", "(", "el", ",", "max_depth", "=", "0", ",", "cur_depth", "=", "0", ")", ":", "# text in front of any child elements", "el_text", "=", "strip_str", "(", "el", ".", "text", "if", "el", ".", "text", "is", "not", "None", "else", "\"\"", ")", "if", "cur_depth", "<", "max_depth", ":", "for", "child", "in", "el", ":", "el_text", "+=", "\" \"", "+", "strip_el_text", "(", "child", ",", "max_depth", "=", "max_depth", ",", "cur_depth", "=", "cur_depth", "+", "1", ")", "else", ":", "# add the last child's tail text, if any", "children", "=", "list", "(", "el", ")", "if", "children", "is", "not", "None", "and", "len", "(", "children", ")", ">", "0", ":", "if", "children", "[", "-", "1", "]", ".", "tail", "is", "not", "None", ":", "el_text", "+=", "\" \"", "+", "strip_str", "(", "children", "[", "-", "1", "]", ".", "tail", ")", "# we skip the root element", "if", "cur_depth", ">", "0", ":", "# in case there's any text at the end of the element", "if", "el", ".", "tail", "is", "not", "None", ":", "el_text", "+=", "\" \"", "+", "strip_str", "(", "el", ".", "tail", ")", "return", "strip_str", "(", "el_text", ")" ]
Recursively strips the plain text out of the given XML etree element up to the desired depth. Args: el: The etree element to scan. max_depth: The depth to which to recursively strip text (default: 0). cur_depth: The current recursive depth to which we've scanned so far. Returns: The stripped, plain text from within the element.
[ "Recursively", "strips", "the", "plain", "text", "out", "of", "the", "given", "XML", "etree", "element", "up", "to", "the", "desired", "depth", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/utils.py#L303-L333
13,424
thanethomson/statik
statik/utils.py
find_first_file_with_ext
def find_first_file_with_ext(base_paths, prefix, exts): """Runs through the given list of file extensions and returns the first file with the given base path and extension combination that actually exists. Args: base_paths: The base paths in which to search for files. prefix: The filename prefix of the file for which to search. exts: An ordered list of file extensions for which to search. Returns: On success, a 2-tuple containing the base path in which the file was found, and the extension of the file. On failure, returns (None, None). """ for base_path in base_paths: for ext in exts: filename = os.path.join(base_path, "%s%s" % (prefix, ext)) if os.path.exists(filename) and os.path.isfile(filename): logger.debug("Found first file with relevant extension: %s", filename) return base_path, ext logger.debug("No files found for prefix %s, extensions %s", prefix, ", ".join(exts)) return None, None
python
def find_first_file_with_ext(base_paths, prefix, exts): """Runs through the given list of file extensions and returns the first file with the given base path and extension combination that actually exists. Args: base_paths: The base paths in which to search for files. prefix: The filename prefix of the file for which to search. exts: An ordered list of file extensions for which to search. Returns: On success, a 2-tuple containing the base path in which the file was found, and the extension of the file. On failure, returns (None, None). """ for base_path in base_paths: for ext in exts: filename = os.path.join(base_path, "%s%s" % (prefix, ext)) if os.path.exists(filename) and os.path.isfile(filename): logger.debug("Found first file with relevant extension: %s", filename) return base_path, ext logger.debug("No files found for prefix %s, extensions %s", prefix, ", ".join(exts)) return None, None
[ "def", "find_first_file_with_ext", "(", "base_paths", ",", "prefix", ",", "exts", ")", ":", "for", "base_path", "in", "base_paths", ":", "for", "ext", "in", "exts", ":", "filename", "=", "os", ".", "path", ".", "join", "(", "base_path", ",", "\"%s%s\"", "%", "(", "prefix", ",", "ext", ")", ")", "if", "os", ".", "path", ".", "exists", "(", "filename", ")", "and", "os", ".", "path", ".", "isfile", "(", "filename", ")", ":", "logger", ".", "debug", "(", "\"Found first file with relevant extension: %s\"", ",", "filename", ")", "return", "base_path", ",", "ext", "logger", ".", "debug", "(", "\"No files found for prefix %s, extensions %s\"", ",", "prefix", ",", "\", \"", ".", "join", "(", "exts", ")", ")", "return", "None", ",", "None" ]
Runs through the given list of file extensions and returns the first file with the given base path and extension combination that actually exists. Args: base_paths: The base paths in which to search for files. prefix: The filename prefix of the file for which to search. exts: An ordered list of file extensions for which to search. Returns: On success, a 2-tuple containing the base path in which the file was found, and the extension of the file. On failure, returns (None, None).
[ "Runs", "through", "the", "given", "list", "of", "file", "extensions", "and", "returns", "the", "first", "file", "with", "the", "given", "base", "path", "and", "extension", "combination", "that", "actually", "exists", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/utils.py#L336-L357
13,425
thanethomson/statik
statik/utils.py
find_duplicates_in_array
def find_duplicates_in_array(array): """Runs through the array and returns the elements that contain more than one duplicate Args: array: The array to check for duplicates. Returns: Array of the elements that are duplicates. Returns empty list if there are no duplicates. """ duplicates = [] non_duplicates = [] if len(array) != len(set(array)): for item in array: if item not in non_duplicates: non_duplicates.append(item) elif item in non_duplicates and item not in duplicates: duplicates.append(item) return duplicates
python
def find_duplicates_in_array(array): """Runs through the array and returns the elements that contain more than one duplicate Args: array: The array to check for duplicates. Returns: Array of the elements that are duplicates. Returns empty list if there are no duplicates. """ duplicates = [] non_duplicates = [] if len(array) != len(set(array)): for item in array: if item not in non_duplicates: non_duplicates.append(item) elif item in non_duplicates and item not in duplicates: duplicates.append(item) return duplicates
[ "def", "find_duplicates_in_array", "(", "array", ")", ":", "duplicates", "=", "[", "]", "non_duplicates", "=", "[", "]", "if", "len", "(", "array", ")", "!=", "len", "(", "set", "(", "array", ")", ")", ":", "for", "item", "in", "array", ":", "if", "item", "not", "in", "non_duplicates", ":", "non_duplicates", ".", "append", "(", "item", ")", "elif", "item", "in", "non_duplicates", "and", "item", "not", "in", "duplicates", ":", "duplicates", ".", "append", "(", "item", ")", "return", "duplicates" ]
Runs through the array and returns the elements that contain more than one duplicate Args: array: The array to check for duplicates. Returns: Array of the elements that are duplicates. Returns empty list if there are no duplicates.
[ "Runs", "through", "the", "array", "and", "returns", "the", "elements", "that", "contain", "more", "than", "one", "duplicate" ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/utils.py#L365-L386
13,426
thanethomson/statik
setup.py
read_requirements
def read_requirements(filename): """ Parse a requirements file. Accepts vcs+ links, and places the URL into `DEPENDENCY_LINKS`. :return: list of str for each package """ data = [] for line in read_file(filename): line = line.strip() if not line or line.startswith('#'): continue if '+' in line[:4]: repo_link, egg_name = line.split('#egg=') if not egg_name: raise ValueError('Unknown requirement: {0}' .format(line)) DEPENDENCY_LINKS.append(line) line = egg_name data.append(line) return data
python
def read_requirements(filename): """ Parse a requirements file. Accepts vcs+ links, and places the URL into `DEPENDENCY_LINKS`. :return: list of str for each package """ data = [] for line in read_file(filename): line = line.strip() if not line or line.startswith('#'): continue if '+' in line[:4]: repo_link, egg_name = line.split('#egg=') if not egg_name: raise ValueError('Unknown requirement: {0}' .format(line)) DEPENDENCY_LINKS.append(line) line = egg_name data.append(line) return data
[ "def", "read_requirements", "(", "filename", ")", ":", "data", "=", "[", "]", "for", "line", "in", "read_file", "(", "filename", ")", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "not", "line", "or", "line", ".", "startswith", "(", "'#'", ")", ":", "continue", "if", "'+'", "in", "line", "[", ":", "4", "]", ":", "repo_link", ",", "egg_name", "=", "line", ".", "split", "(", "'#egg='", ")", "if", "not", "egg_name", ":", "raise", "ValueError", "(", "'Unknown requirement: {0}'", ".", "format", "(", "line", ")", ")", "DEPENDENCY_LINKS", ".", "append", "(", "line", ")", "line", "=", "egg_name", "data", ".", "append", "(", "line", ")", "return", "data" ]
Parse a requirements file. Accepts vcs+ links, and places the URL into `DEPENDENCY_LINKS`. :return: list of str for each package
[ "Parse", "a", "requirements", "file", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/setup.py#L22-L49
13,427
thanethomson/statik
statik/models.py
StatikModel.find_additional_rels
def find_additional_rels(self, all_models): """Attempts to scan for additional relationship fields for this model based on all of the other models' structures and relationships. """ for model_name, model in iteritems(all_models): if model_name != self.name: for field_name in model.field_names: field = model.fields[field_name] # if this field type references the current model if field.field_type == self.name and field.back_populates is not None and \ (isinstance(field, StatikForeignKeyField) or isinstance(field, StatikManyToManyField)): self.additional_rels[field.back_populates] = { 'to_model': model_name, 'back_populates': field_name, 'secondary': (model_name, field.field_type) if isinstance(field, StatikManyToManyField) else None } logger.debug( 'Additional relationship %s.%s -> %s (%s)', self.name, field.back_populates, model_name, self.additional_rels[field.back_populates] )
python
def find_additional_rels(self, all_models): """Attempts to scan for additional relationship fields for this model based on all of the other models' structures and relationships. """ for model_name, model in iteritems(all_models): if model_name != self.name: for field_name in model.field_names: field = model.fields[field_name] # if this field type references the current model if field.field_type == self.name and field.back_populates is not None and \ (isinstance(field, StatikForeignKeyField) or isinstance(field, StatikManyToManyField)): self.additional_rels[field.back_populates] = { 'to_model': model_name, 'back_populates': field_name, 'secondary': (model_name, field.field_type) if isinstance(field, StatikManyToManyField) else None } logger.debug( 'Additional relationship %s.%s -> %s (%s)', self.name, field.back_populates, model_name, self.additional_rels[field.back_populates] )
[ "def", "find_additional_rels", "(", "self", ",", "all_models", ")", ":", "for", "model_name", ",", "model", "in", "iteritems", "(", "all_models", ")", ":", "if", "model_name", "!=", "self", ".", "name", ":", "for", "field_name", "in", "model", ".", "field_names", ":", "field", "=", "model", ".", "fields", "[", "field_name", "]", "# if this field type references the current model", "if", "field", ".", "field_type", "==", "self", ".", "name", "and", "field", ".", "back_populates", "is", "not", "None", "and", "(", "isinstance", "(", "field", ",", "StatikForeignKeyField", ")", "or", "isinstance", "(", "field", ",", "StatikManyToManyField", ")", ")", ":", "self", ".", "additional_rels", "[", "field", ".", "back_populates", "]", "=", "{", "'to_model'", ":", "model_name", ",", "'back_populates'", ":", "field_name", ",", "'secondary'", ":", "(", "model_name", ",", "field", ".", "field_type", ")", "if", "isinstance", "(", "field", ",", "StatikManyToManyField", ")", "else", "None", "}", "logger", ".", "debug", "(", "'Additional relationship %s.%s -> %s (%s)'", ",", "self", ".", "name", ",", "field", ".", "back_populates", ",", "model_name", ",", "self", ".", "additional_rels", "[", "field", ".", "back_populates", "]", ")" ]
Attempts to scan for additional relationship fields for this model based on all of the other models' structures and relationships.
[ "Attempts", "to", "scan", "for", "additional", "relationship", "fields", "for", "this", "model", "based", "on", "all", "of", "the", "other", "models", "structures", "and", "relationships", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/models.py#L73-L96
13,428
thanethomson/statik
statik/database.py
StatikDatabase.create_db
def create_db(self, models): """Creates the in-memory SQLite database from the model configuration.""" # first create the table definitions self.tables = dict( [ (model_name, self.create_model_table(model)) for model_name, model in iteritems(models) ] ) # now create the tables in memory logger.debug("Creating %d database table(s)...", len(self.tables)) try: self.Base.metadata.create_all(self.engine) except Exception as exc: raise StatikError( message="Failed to create in-memory data model.", orig_exc=exc ) self.load_all_model_data(models)
python
def create_db(self, models): """Creates the in-memory SQLite database from the model configuration.""" # first create the table definitions self.tables = dict( [ (model_name, self.create_model_table(model)) for model_name, model in iteritems(models) ] ) # now create the tables in memory logger.debug("Creating %d database table(s)...", len(self.tables)) try: self.Base.metadata.create_all(self.engine) except Exception as exc: raise StatikError( message="Failed to create in-memory data model.", orig_exc=exc ) self.load_all_model_data(models)
[ "def", "create_db", "(", "self", ",", "models", ")", ":", "# first create the table definitions", "self", ".", "tables", "=", "dict", "(", "[", "(", "model_name", ",", "self", ".", "create_model_table", "(", "model", ")", ")", "for", "model_name", ",", "model", "in", "iteritems", "(", "models", ")", "]", ")", "# now create the tables in memory", "logger", ".", "debug", "(", "\"Creating %d database table(s)...\"", ",", "len", "(", "self", ".", "tables", ")", ")", "try", ":", "self", ".", "Base", ".", "metadata", ".", "create_all", "(", "self", ".", "engine", ")", "except", "Exception", "as", "exc", ":", "raise", "StatikError", "(", "message", "=", "\"Failed to create in-memory data model.\"", ",", "orig_exc", "=", "exc", ")", "self", ".", "load_all_model_data", "(", "models", ")" ]
Creates the in-memory SQLite database from the model configuration.
[ "Creates", "the", "in", "-", "memory", "SQLite", "database", "from", "the", "model", "configuration", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/database.py#L106-L125
13,429
thanethomson/statik
statik/database.py
StatikDatabase.sort_models
def sort_models(self): """Sorts the database models appropriately based on their relationships so that we load our data in the appropriate order. Returns: A sorted list containing the names of the models. """ model_names = [ table.name for table in self.Base.metadata.sorted_tables if table.name in self.models ] logger.debug("Unsorted models: %s", model_names) model_count = len(model_names) swapped = True sort_round = 0 while swapped: sort_round += 1 logger.debug('Sorting round: %d (%s)', sort_round, model_names) sorted_models = [] for i in range(model_count): model = self.models[model_names[i]] # check if this model has any dependencies which haven't been taken care of in this round for foreign_model_name in model.foreign_models: if foreign_model_name not in sorted_models: sorted_models.append(foreign_model_name) if model.name not in sorted_models: sorted_models.append(model.name) # we're done here (no changes after this sorting round) if model_names == sorted_models: swapped = False model_names = sorted_models logger.debug("Sorted models: %s (%d rounds)", model_names, sort_round) return model_names
python
def sort_models(self): """Sorts the database models appropriately based on their relationships so that we load our data in the appropriate order. Returns: A sorted list containing the names of the models. """ model_names = [ table.name for table in self.Base.metadata.sorted_tables if table.name in self.models ] logger.debug("Unsorted models: %s", model_names) model_count = len(model_names) swapped = True sort_round = 0 while swapped: sort_round += 1 logger.debug('Sorting round: %d (%s)', sort_round, model_names) sorted_models = [] for i in range(model_count): model = self.models[model_names[i]] # check if this model has any dependencies which haven't been taken care of in this round for foreign_model_name in model.foreign_models: if foreign_model_name not in sorted_models: sorted_models.append(foreign_model_name) if model.name not in sorted_models: sorted_models.append(model.name) # we're done here (no changes after this sorting round) if model_names == sorted_models: swapped = False model_names = sorted_models logger.debug("Sorted models: %s (%d rounds)", model_names, sort_round) return model_names
[ "def", "sort_models", "(", "self", ")", ":", "model_names", "=", "[", "table", ".", "name", "for", "table", "in", "self", ".", "Base", ".", "metadata", ".", "sorted_tables", "if", "table", ".", "name", "in", "self", ".", "models", "]", "logger", ".", "debug", "(", "\"Unsorted models: %s\"", ",", "model_names", ")", "model_count", "=", "len", "(", "model_names", ")", "swapped", "=", "True", "sort_round", "=", "0", "while", "swapped", ":", "sort_round", "+=", "1", "logger", ".", "debug", "(", "'Sorting round: %d (%s)'", ",", "sort_round", ",", "model_names", ")", "sorted_models", "=", "[", "]", "for", "i", "in", "range", "(", "model_count", ")", ":", "model", "=", "self", ".", "models", "[", "model_names", "[", "i", "]", "]", "# check if this model has any dependencies which haven't been taken care of in this round", "for", "foreign_model_name", "in", "model", ".", "foreign_models", ":", "if", "foreign_model_name", "not", "in", "sorted_models", ":", "sorted_models", ".", "append", "(", "foreign_model_name", ")", "if", "model", ".", "name", "not", "in", "sorted_models", ":", "sorted_models", ".", "append", "(", "model", ".", "name", ")", "# we're done here (no changes after this sorting round)", "if", "model_names", "==", "sorted_models", ":", "swapped", "=", "False", "model_names", "=", "sorted_models", "logger", ".", "debug", "(", "\"Sorted models: %s (%d rounds)\"", ",", "model_names", ",", "sort_round", ")", "return", "model_names" ]
Sorts the database models appropriately based on their relationships so that we load our data in the appropriate order. Returns: A sorted list containing the names of the models.
[ "Sorts", "the", "database", "models", "appropriately", "based", "on", "their", "relationships", "so", "that", "we", "load", "our", "data", "in", "the", "appropriate", "order", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/database.py#L141-L178
13,430
thanethomson/statik
statik/database.py
StatikDatabase.create_model_table
def create_model_table(self, model): """Creates the table for the given model. Args: model: A StatikModel instance. Returns: A SQLAlchemy model instance for the table corresponding to this particular model. """ try: return db_model_factory(self.Base, model, self.models) except Exception as exc: raise ModelError( model.name, message="failed to create in-memory table.", orig_exc=exc, context=self.error_context )
python
def create_model_table(self, model): """Creates the table for the given model. Args: model: A StatikModel instance. Returns: A SQLAlchemy model instance for the table corresponding to this particular model. """ try: return db_model_factory(self.Base, model, self.models) except Exception as exc: raise ModelError( model.name, message="failed to create in-memory table.", orig_exc=exc, context=self.error_context )
[ "def", "create_model_table", "(", "self", ",", "model", ")", ":", "try", ":", "return", "db_model_factory", "(", "self", ".", "Base", ",", "model", ",", "self", ".", "models", ")", "except", "Exception", "as", "exc", ":", "raise", "ModelError", "(", "model", ".", "name", ",", "message", "=", "\"failed to create in-memory table.\"", ",", "orig_exc", "=", "exc", ",", "context", "=", "self", ".", "error_context", ")" ]
Creates the table for the given model. Args: model: A StatikModel instance. Returns: A SQLAlchemy model instance for the table corresponding to this particular model.
[ "Creates", "the", "table", "for", "the", "given", "model", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/database.py#L180-L198
13,431
thanethomson/statik
statik/database.py
StatikDatabase.load_model_data
def load_model_data(self, path, model): """Loads the data for the specified model from the given path. """ if os.path.isdir(path): # try find a model data collection if os.path.isfile(os.path.join(path, '_all.yml')): self.load_model_data_collection(path, model) self.load_model_data_from_files(path, model) self.session.commit()
python
def load_model_data(self, path, model): """Loads the data for the specified model from the given path. """ if os.path.isdir(path): # try find a model data collection if os.path.isfile(os.path.join(path, '_all.yml')): self.load_model_data_collection(path, model) self.load_model_data_from_files(path, model) self.session.commit()
[ "def", "load_model_data", "(", "self", ",", "path", ",", "model", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "# try find a model data collection", "if", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "path", ",", "'_all.yml'", ")", ")", ":", "self", ".", "load_model_data_collection", "(", "path", ",", "model", ")", "self", ".", "load_model_data_from_files", "(", "path", ",", "model", ")", "self", ".", "session", ".", "commit", "(", ")" ]
Loads the data for the specified model from the given path.
[ "Loads", "the", "data", "for", "the", "specified", "model", "from", "the", "given", "path", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/database.py#L200-L208
13,432
thanethomson/statik
statik/database.py
StatikDatabase.query
def query(self, query, additional_locals=None, safe_mode=False): """Executes the given SQLAlchemy query string. Args: query: The SQLAlchemy ORM query (or Python code) to be executed. additional_locals: Any additional local variables to inject into the execution context when executing the query. safe_mode: Boolean value indicating whether or not to execute queries in safe mode only. If True, this only allows MLAlchemy-style queries. If False, this allows both exec() and MLAlchemy-style queries. Default: False. Returns: The result of executing the query. """ logger.debug("Attempting to execute database query: %s", query) if safe_mode and not isinstance(query, dict): raise SafetyViolationError( context=self.error_context ) if isinstance(query, dict): logger.debug("Executing query in safe mode (MLAlchemy)") return mlalchemy.parse_query(query).to_sqlalchemy(self.session, self.tables).all() else: logger.debug("Executing unsafe query (Python exec())") if additional_locals is not None: for k, v in iteritems(additional_locals): locals()[k] = v exec( compile( 'result = %s' % query.strip(), '<string>', 'exec' ), globals(), locals() ) return locals()['result']
python
def query(self, query, additional_locals=None, safe_mode=False): """Executes the given SQLAlchemy query string. Args: query: The SQLAlchemy ORM query (or Python code) to be executed. additional_locals: Any additional local variables to inject into the execution context when executing the query. safe_mode: Boolean value indicating whether or not to execute queries in safe mode only. If True, this only allows MLAlchemy-style queries. If False, this allows both exec() and MLAlchemy-style queries. Default: False. Returns: The result of executing the query. """ logger.debug("Attempting to execute database query: %s", query) if safe_mode and not isinstance(query, dict): raise SafetyViolationError( context=self.error_context ) if isinstance(query, dict): logger.debug("Executing query in safe mode (MLAlchemy)") return mlalchemy.parse_query(query).to_sqlalchemy(self.session, self.tables).all() else: logger.debug("Executing unsafe query (Python exec())") if additional_locals is not None: for k, v in iteritems(additional_locals): locals()[k] = v exec( compile( 'result = %s' % query.strip(), '<string>', 'exec' ), globals(), locals() ) return locals()['result']
[ "def", "query", "(", "self", ",", "query", ",", "additional_locals", "=", "None", ",", "safe_mode", "=", "False", ")", ":", "logger", ".", "debug", "(", "\"Attempting to execute database query: %s\"", ",", "query", ")", "if", "safe_mode", "and", "not", "isinstance", "(", "query", ",", "dict", ")", ":", "raise", "SafetyViolationError", "(", "context", "=", "self", ".", "error_context", ")", "if", "isinstance", "(", "query", ",", "dict", ")", ":", "logger", ".", "debug", "(", "\"Executing query in safe mode (MLAlchemy)\"", ")", "return", "mlalchemy", ".", "parse_query", "(", "query", ")", ".", "to_sqlalchemy", "(", "self", ".", "session", ",", "self", ".", "tables", ")", ".", "all", "(", ")", "else", ":", "logger", ".", "debug", "(", "\"Executing unsafe query (Python exec())\"", ")", "if", "additional_locals", "is", "not", "None", ":", "for", "k", ",", "v", "in", "iteritems", "(", "additional_locals", ")", ":", "locals", "(", ")", "[", "k", "]", "=", "v", "exec", "(", "compile", "(", "'result = %s'", "%", "query", ".", "strip", "(", ")", ",", "'<string>'", ",", "'exec'", ")", ",", "globals", "(", ")", ",", "locals", "(", ")", ")", "return", "locals", "(", ")", "[", "'result'", "]" ]
Executes the given SQLAlchemy query string. Args: query: The SQLAlchemy ORM query (or Python code) to be executed. additional_locals: Any additional local variables to inject into the execution context when executing the query. safe_mode: Boolean value indicating whether or not to execute queries in safe mode only. If True, this only allows MLAlchemy-style queries. If False, this allows both exec() and MLAlchemy-style queries. Default: False. Returns: The result of executing the query.
[ "Executes", "the", "given", "SQLAlchemy", "query", "string", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/database.py#L328-L367
13,433
thanethomson/statik
statik/generator.py
generate
def generate(input_path, output_path=None, in_memory=False, safe_mode=False, error_context=None): """Executes the Statik site generator using the given parameters. """ project = StatikProject(input_path, safe_mode=safe_mode, error_context=error_context) return project.generate(output_path=output_path, in_memory=in_memory)
python
def generate(input_path, output_path=None, in_memory=False, safe_mode=False, error_context=None): """Executes the Statik site generator using the given parameters. """ project = StatikProject(input_path, safe_mode=safe_mode, error_context=error_context) return project.generate(output_path=output_path, in_memory=in_memory)
[ "def", "generate", "(", "input_path", ",", "output_path", "=", "None", ",", "in_memory", "=", "False", ",", "safe_mode", "=", "False", ",", "error_context", "=", "None", ")", ":", "project", "=", "StatikProject", "(", "input_path", ",", "safe_mode", "=", "safe_mode", ",", "error_context", "=", "error_context", ")", "return", "project", ".", "generate", "(", "output_path", "=", "output_path", ",", "in_memory", "=", "in_memory", ")" ]
Executes the Statik site generator using the given parameters.
[ "Executes", "the", "Statik", "site", "generator", "using", "the", "given", "parameters", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/generator.py#L11-L15
13,434
thanethomson/statik
statik/project.py
StatikProject.generate
def generate(self, output_path=None, in_memory=False): """Executes the Statik project generator. Args: output_path: The path to which to write output files. in_memory: Whether or not to generate the results in memory. If True, this will generate the output result as a dictionary. If False, this will write the output to files in the output_path. Returns: If in_memory is True, this returns a dictionary containing the actual generated static content. If in_memory is False, this returns an integer indicating the number of files generated in the output path. """ result = dict() if in_memory else 0 logger.info("Generating Statik build...") try: if output_path is None and not in_memory: raise InternalError( "If project is not to be generated in-memory, an output path must be specified" ) self.error_context.update(filename=self.config_file_path) self.config = self.config or StatikConfig(self.config_file_path) if self.config.encoding is not None: logger.debug("Using encoding: %s", self.config.encoding) else: logger.debug("Using encoding: %s", self.config.encoding) self.error_context.clear() self.models = self.load_models() self.template_engine = StatikTemplateEngine(self) if self.config.external_database is not None: self.config.external_database.write_files(output_path, self.models) self.views = self.load_views() if not self.views: raise NoViewsError() self.db = self.load_db_data(self.models) self.project_context = self.load_project_context() in_memory_result = self.process_views() if in_memory: result = in_memory_result else: # dump the in-memory output to files file_count = self.dump_in_memory_result(in_memory_result, output_path) logger.info('Wrote %d output file(s) to folder: %s', file_count, output_path) # copy any assets across, recursively self.copy_assets(output_path) result = file_count logger.info("Success!") except StatikError as exc: logger.debug(traceback.format_exc()) logger.error(exc.render()) # re-raise the error to stop execution raise exc except Exception as exc: logger.debug(traceback.format_exc()) _exc = StatikError( message="Failed to build project. Run Statik in verbose mode (-v) to see " + "additional traceback information about this error.", orig_exc=exc, context=self.error_context ) logger.error(_exc.render()) raise _exc finally: try: # make sure to destroy the database engine (to provide for the possibility of # database engine reloads when watching for changes) if self.db is not None: self.db.shutdown() except Exception as e: logger.exception("Unable to clean up properly: %s", e) return result
python
def generate(self, output_path=None, in_memory=False): """Executes the Statik project generator. Args: output_path: The path to which to write output files. in_memory: Whether or not to generate the results in memory. If True, this will generate the output result as a dictionary. If False, this will write the output to files in the output_path. Returns: If in_memory is True, this returns a dictionary containing the actual generated static content. If in_memory is False, this returns an integer indicating the number of files generated in the output path. """ result = dict() if in_memory else 0 logger.info("Generating Statik build...") try: if output_path is None and not in_memory: raise InternalError( "If project is not to be generated in-memory, an output path must be specified" ) self.error_context.update(filename=self.config_file_path) self.config = self.config or StatikConfig(self.config_file_path) if self.config.encoding is not None: logger.debug("Using encoding: %s", self.config.encoding) else: logger.debug("Using encoding: %s", self.config.encoding) self.error_context.clear() self.models = self.load_models() self.template_engine = StatikTemplateEngine(self) if self.config.external_database is not None: self.config.external_database.write_files(output_path, self.models) self.views = self.load_views() if not self.views: raise NoViewsError() self.db = self.load_db_data(self.models) self.project_context = self.load_project_context() in_memory_result = self.process_views() if in_memory: result = in_memory_result else: # dump the in-memory output to files file_count = self.dump_in_memory_result(in_memory_result, output_path) logger.info('Wrote %d output file(s) to folder: %s', file_count, output_path) # copy any assets across, recursively self.copy_assets(output_path) result = file_count logger.info("Success!") except StatikError as exc: logger.debug(traceback.format_exc()) logger.error(exc.render()) # re-raise the error to stop execution raise exc except Exception as exc: logger.debug(traceback.format_exc()) _exc = StatikError( message="Failed to build project. Run Statik in verbose mode (-v) to see " + "additional traceback information about this error.", orig_exc=exc, context=self.error_context ) logger.error(_exc.render()) raise _exc finally: try: # make sure to destroy the database engine (to provide for the possibility of # database engine reloads when watching for changes) if self.db is not None: self.db.shutdown() except Exception as e: logger.exception("Unable to clean up properly: %s", e) return result
[ "def", "generate", "(", "self", ",", "output_path", "=", "None", ",", "in_memory", "=", "False", ")", ":", "result", "=", "dict", "(", ")", "if", "in_memory", "else", "0", "logger", ".", "info", "(", "\"Generating Statik build...\"", ")", "try", ":", "if", "output_path", "is", "None", "and", "not", "in_memory", ":", "raise", "InternalError", "(", "\"If project is not to be generated in-memory, an output path must be specified\"", ")", "self", ".", "error_context", ".", "update", "(", "filename", "=", "self", ".", "config_file_path", ")", "self", ".", "config", "=", "self", ".", "config", "or", "StatikConfig", "(", "self", ".", "config_file_path", ")", "if", "self", ".", "config", ".", "encoding", "is", "not", "None", ":", "logger", ".", "debug", "(", "\"Using encoding: %s\"", ",", "self", ".", "config", ".", "encoding", ")", "else", ":", "logger", ".", "debug", "(", "\"Using encoding: %s\"", ",", "self", ".", "config", ".", "encoding", ")", "self", ".", "error_context", ".", "clear", "(", ")", "self", ".", "models", "=", "self", ".", "load_models", "(", ")", "self", ".", "template_engine", "=", "StatikTemplateEngine", "(", "self", ")", "if", "self", ".", "config", ".", "external_database", "is", "not", "None", ":", "self", ".", "config", ".", "external_database", ".", "write_files", "(", "output_path", ",", "self", ".", "models", ")", "self", ".", "views", "=", "self", ".", "load_views", "(", ")", "if", "not", "self", ".", "views", ":", "raise", "NoViewsError", "(", ")", "self", ".", "db", "=", "self", ".", "load_db_data", "(", "self", ".", "models", ")", "self", ".", "project_context", "=", "self", ".", "load_project_context", "(", ")", "in_memory_result", "=", "self", ".", "process_views", "(", ")", "if", "in_memory", ":", "result", "=", "in_memory_result", "else", ":", "# dump the in-memory output to files", "file_count", "=", "self", ".", "dump_in_memory_result", "(", "in_memory_result", ",", "output_path", ")", "logger", ".", "info", "(", "'Wrote %d output file(s) to folder: %s'", ",", "file_count", ",", "output_path", ")", "# copy any assets across, recursively", "self", ".", "copy_assets", "(", "output_path", ")", "result", "=", "file_count", "logger", ".", "info", "(", "\"Success!\"", ")", "except", "StatikError", "as", "exc", ":", "logger", ".", "debug", "(", "traceback", ".", "format_exc", "(", ")", ")", "logger", ".", "error", "(", "exc", ".", "render", "(", ")", ")", "# re-raise the error to stop execution", "raise", "exc", "except", "Exception", "as", "exc", ":", "logger", ".", "debug", "(", "traceback", ".", "format_exc", "(", ")", ")", "_exc", "=", "StatikError", "(", "message", "=", "\"Failed to build project. Run Statik in verbose mode (-v) to see \"", "+", "\"additional traceback information about this error.\"", ",", "orig_exc", "=", "exc", ",", "context", "=", "self", ".", "error_context", ")", "logger", ".", "error", "(", "_exc", ".", "render", "(", ")", ")", "raise", "_exc", "finally", ":", "try", ":", "# make sure to destroy the database engine (to provide for the possibility of", "# database engine reloads when watching for changes)", "if", "self", ".", "db", "is", "not", "None", ":", "self", ".", "db", ".", "shutdown", "(", ")", "except", "Exception", "as", "e", ":", "logger", ".", "exception", "(", "\"Unable to clean up properly: %s\"", ",", "e", ")", "return", "result" ]
Executes the Statik project generator. Args: output_path: The path to which to write output files. in_memory: Whether or not to generate the results in memory. If True, this will generate the output result as a dictionary. If False, this will write the output to files in the output_path. Returns: If in_memory is True, this returns a dictionary containing the actual generated static content. If in_memory is False, this returns an integer indicating the number of files generated in the output path.
[ "Executes", "the", "Statik", "project", "generator", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/project.py#L71-L156
13,435
thanethomson/statik
statik/project.py
StatikProject.load_views
def load_views(self): """Loads the views for this project from the project directory structure.""" view_path = os.path.join(self.path, StatikProject.VIEWS_DIR) logger.debug("Loading views from: %s", view_path) if not os.path.isdir(view_path): raise MissingProjectFolderError(StatikProject.VIEWS_DIR) view_files = list_files(view_path, ['yml', 'yaml']) logger.debug("Found %d view(s) in project", len(view_files)) views = {} for view_file in view_files: view_name = extract_filename(view_file) views[view_name] = StatikView( filename=os.path.join(view_path, view_file), encoding=self.config.encoding, name=view_name, models=self.models, template_engine=self.template_engine, error_context=self.error_context ) return views
python
def load_views(self): """Loads the views for this project from the project directory structure.""" view_path = os.path.join(self.path, StatikProject.VIEWS_DIR) logger.debug("Loading views from: %s", view_path) if not os.path.isdir(view_path): raise MissingProjectFolderError(StatikProject.VIEWS_DIR) view_files = list_files(view_path, ['yml', 'yaml']) logger.debug("Found %d view(s) in project", len(view_files)) views = {} for view_file in view_files: view_name = extract_filename(view_file) views[view_name] = StatikView( filename=os.path.join(view_path, view_file), encoding=self.config.encoding, name=view_name, models=self.models, template_engine=self.template_engine, error_context=self.error_context ) return views
[ "def", "load_views", "(", "self", ")", ":", "view_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "path", ",", "StatikProject", ".", "VIEWS_DIR", ")", "logger", ".", "debug", "(", "\"Loading views from: %s\"", ",", "view_path", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "view_path", ")", ":", "raise", "MissingProjectFolderError", "(", "StatikProject", ".", "VIEWS_DIR", ")", "view_files", "=", "list_files", "(", "view_path", ",", "[", "'yml'", ",", "'yaml'", "]", ")", "logger", ".", "debug", "(", "\"Found %d view(s) in project\"", ",", "len", "(", "view_files", ")", ")", "views", "=", "{", "}", "for", "view_file", "in", "view_files", ":", "view_name", "=", "extract_filename", "(", "view_file", ")", "views", "[", "view_name", "]", "=", "StatikView", "(", "filename", "=", "os", ".", "path", ".", "join", "(", "view_path", ",", "view_file", ")", ",", "encoding", "=", "self", ".", "config", ".", "encoding", ",", "name", "=", "view_name", ",", "models", "=", "self", ".", "models", ",", "template_engine", "=", "self", ".", "template_engine", ",", "error_context", "=", "self", ".", "error_context", ")", "return", "views" ]
Loads the views for this project from the project directory structure.
[ "Loads", "the", "views", "for", "this", "project", "from", "the", "project", "directory", "structure", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/project.py#L181-L203
13,436
thanethomson/statik
statik/project.py
StatikProject.process_views
def process_views(self): """Processes the loaded views to generate the required output data.""" output = {} logger.debug("Processing %d view(s)...", len(self.views)) for view_name, view in iteritems(self.views): try: output = deep_merge_dict( output, view.process( self.db, safe_mode=self.safe_mode, extra_context=self.project_context ) ) except StatikError as exc: # just re-raise it raise exc except Exception as exc: # for unhandled view-related exceptions, raise our own exception raise ViewError( message="Failed to render view \"%s\"." % view_name, orig_exc=exc ) return output
python
def process_views(self): """Processes the loaded views to generate the required output data.""" output = {} logger.debug("Processing %d view(s)...", len(self.views)) for view_name, view in iteritems(self.views): try: output = deep_merge_dict( output, view.process( self.db, safe_mode=self.safe_mode, extra_context=self.project_context ) ) except StatikError as exc: # just re-raise it raise exc except Exception as exc: # for unhandled view-related exceptions, raise our own exception raise ViewError( message="Failed to render view \"%s\"." % view_name, orig_exc=exc ) return output
[ "def", "process_views", "(", "self", ")", ":", "output", "=", "{", "}", "logger", ".", "debug", "(", "\"Processing %d view(s)...\"", ",", "len", "(", "self", ".", "views", ")", ")", "for", "view_name", ",", "view", "in", "iteritems", "(", "self", ".", "views", ")", ":", "try", ":", "output", "=", "deep_merge_dict", "(", "output", ",", "view", ".", "process", "(", "self", ".", "db", ",", "safe_mode", "=", "self", ".", "safe_mode", ",", "extra_context", "=", "self", ".", "project_context", ")", ")", "except", "StatikError", "as", "exc", ":", "# just re-raise it", "raise", "exc", "except", "Exception", "as", "exc", ":", "# for unhandled view-related exceptions, raise our own exception", "raise", "ViewError", "(", "message", "=", "\"Failed to render view \\\"%s\\\".\"", "%", "view_name", ",", "orig_exc", "=", "exc", ")", "return", "output" ]
Processes the loaded views to generate the required output data.
[ "Processes", "the", "loaded", "views", "to", "generate", "the", "required", "output", "data", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/project.py#L243-L268
13,437
thanethomson/statik
statik/project.py
StatikProject.dump_in_memory_result
def dump_in_memory_result(self, result, output_path): """Recursively dumps the result of our processing into files within the given output path. Args: result: The in-memory result of our processing. output_path: Full path to the folder into which to dump the files. Returns: The number of files generated (integer). """ file_count = 0 logger.debug("Dumping in-memory processing results to output folder: %s", output_path) for k, v in iteritems(result): cur_output_path = os.path.join(output_path, k) if isinstance(v, dict): file_count += self.dump_in_memory_result(v, cur_output_path) else: if not os.path.isdir(output_path): os.makedirs(output_path) filename = os.path.join(output_path, k) logger.debug("Writing output file: %s", filename) # dump the contents of the file with open(filename, 'wt', encoding=self.config.encoding) as f: f.write(v) file_count += 1 return file_count
python
def dump_in_memory_result(self, result, output_path): """Recursively dumps the result of our processing into files within the given output path. Args: result: The in-memory result of our processing. output_path: Full path to the folder into which to dump the files. Returns: The number of files generated (integer). """ file_count = 0 logger.debug("Dumping in-memory processing results to output folder: %s", output_path) for k, v in iteritems(result): cur_output_path = os.path.join(output_path, k) if isinstance(v, dict): file_count += self.dump_in_memory_result(v, cur_output_path) else: if not os.path.isdir(output_path): os.makedirs(output_path) filename = os.path.join(output_path, k) logger.debug("Writing output file: %s", filename) # dump the contents of the file with open(filename, 'wt', encoding=self.config.encoding) as f: f.write(v) file_count += 1 return file_count
[ "def", "dump_in_memory_result", "(", "self", ",", "result", ",", "output_path", ")", ":", "file_count", "=", "0", "logger", ".", "debug", "(", "\"Dumping in-memory processing results to output folder: %s\"", ",", "output_path", ")", "for", "k", ",", "v", "in", "iteritems", "(", "result", ")", ":", "cur_output_path", "=", "os", ".", "path", ".", "join", "(", "output_path", ",", "k", ")", "if", "isinstance", "(", "v", ",", "dict", ")", ":", "file_count", "+=", "self", ".", "dump_in_memory_result", "(", "v", ",", "cur_output_path", ")", "else", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "output_path", ")", ":", "os", ".", "makedirs", "(", "output_path", ")", "filename", "=", "os", ".", "path", ".", "join", "(", "output_path", ",", "k", ")", "logger", ".", "debug", "(", "\"Writing output file: %s\"", ",", "filename", ")", "# dump the contents of the file", "with", "open", "(", "filename", ",", "'wt'", ",", "encoding", "=", "self", ".", "config", ".", "encoding", ")", "as", "f", ":", "f", ".", "write", "(", "v", ")", "file_count", "+=", "1", "return", "file_count" ]
Recursively dumps the result of our processing into files within the given output path. Args: result: The in-memory result of our processing. output_path: Full path to the folder into which to dump the files. Returns: The number of files generated (integer).
[ "Recursively", "dumps", "the", "result", "of", "our", "processing", "into", "files", "within", "the", "given", "output", "path", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/project.py#L270-L300
13,438
thanethomson/statik
statik/project.py
StatikProject.copy_assets
def copy_assets(self, output_path): """Copies all asset files from the source path to the destination path. If no such source path exists, no asset copying will be performed. """ src_paths = [] # if we have a theme if self.config.theme is not None: # assume it's in the folder: "themes/theme_name/assets" src_paths.append(os.path.join( self.path, StatikProject.THEMES_DIR, self.config.theme, StatikProject.ASSETS_DIR )) # NOTE: Adding the theme's assets directory *before* the project's internal assets # directory always ensures that the project's own assets are copied *after* the # theme's, thereby ensuring that the project's assets folder takes precedence # over the theme's. # always attempt to copy from our base assets folder if os.path.isabs(self.config.assets_src_path): src_paths.append(self.config.assets_src_path) else: src_paths.append(os.path.join(self.path, self.config.assets_src_path)) for src_path in src_paths: if os.path.exists(src_path) and os.path.isdir(src_path): dest_path = self.config.assets_dest_path if not os.path.isabs(dest_path): dest_path = os.path.join(output_path, dest_path) asset_count = copy_tree(src_path, dest_path) logger.info("Copied %s asset(s) from %s to %s", asset_count, src_path, dest_path) else: logger.info( "Missing assets source path - skipping copying of assets: %s", src_path )
python
def copy_assets(self, output_path): """Copies all asset files from the source path to the destination path. If no such source path exists, no asset copying will be performed. """ src_paths = [] # if we have a theme if self.config.theme is not None: # assume it's in the folder: "themes/theme_name/assets" src_paths.append(os.path.join( self.path, StatikProject.THEMES_DIR, self.config.theme, StatikProject.ASSETS_DIR )) # NOTE: Adding the theme's assets directory *before* the project's internal assets # directory always ensures that the project's own assets are copied *after* the # theme's, thereby ensuring that the project's assets folder takes precedence # over the theme's. # always attempt to copy from our base assets folder if os.path.isabs(self.config.assets_src_path): src_paths.append(self.config.assets_src_path) else: src_paths.append(os.path.join(self.path, self.config.assets_src_path)) for src_path in src_paths: if os.path.exists(src_path) and os.path.isdir(src_path): dest_path = self.config.assets_dest_path if not os.path.isabs(dest_path): dest_path = os.path.join(output_path, dest_path) asset_count = copy_tree(src_path, dest_path) logger.info("Copied %s asset(s) from %s to %s", asset_count, src_path, dest_path) else: logger.info( "Missing assets source path - skipping copying of assets: %s", src_path )
[ "def", "copy_assets", "(", "self", ",", "output_path", ")", ":", "src_paths", "=", "[", "]", "# if we have a theme", "if", "self", ".", "config", ".", "theme", "is", "not", "None", ":", "# assume it's in the folder: \"themes/theme_name/assets\"", "src_paths", ".", "append", "(", "os", ".", "path", ".", "join", "(", "self", ".", "path", ",", "StatikProject", ".", "THEMES_DIR", ",", "self", ".", "config", ".", "theme", ",", "StatikProject", ".", "ASSETS_DIR", ")", ")", "# NOTE: Adding the theme's assets directory *before* the project's internal assets", "# directory always ensures that the project's own assets are copied *after* the", "# theme's, thereby ensuring that the project's assets folder takes precedence", "# over the theme's.", "# always attempt to copy from our base assets folder", "if", "os", ".", "path", ".", "isabs", "(", "self", ".", "config", ".", "assets_src_path", ")", ":", "src_paths", ".", "append", "(", "self", ".", "config", ".", "assets_src_path", ")", "else", ":", "src_paths", ".", "append", "(", "os", ".", "path", ".", "join", "(", "self", ".", "path", ",", "self", ".", "config", ".", "assets_src_path", ")", ")", "for", "src_path", "in", "src_paths", ":", "if", "os", ".", "path", ".", "exists", "(", "src_path", ")", "and", "os", ".", "path", ".", "isdir", "(", "src_path", ")", ":", "dest_path", "=", "self", ".", "config", ".", "assets_dest_path", "if", "not", "os", ".", "path", ".", "isabs", "(", "dest_path", ")", ":", "dest_path", "=", "os", ".", "path", ".", "join", "(", "output_path", ",", "dest_path", ")", "asset_count", "=", "copy_tree", "(", "src_path", ",", "dest_path", ")", "logger", ".", "info", "(", "\"Copied %s asset(s) from %s to %s\"", ",", "asset_count", ",", "src_path", ",", "dest_path", ")", "else", ":", "logger", ".", "info", "(", "\"Missing assets source path - skipping copying of assets: %s\"", ",", "src_path", ")" ]
Copies all asset files from the source path to the destination path. If no such source path exists, no asset copying will be performed.
[ "Copies", "all", "asset", "files", "from", "the", "source", "path", "to", "the", "destination", "path", ".", "If", "no", "such", "source", "path", "exists", "no", "asset", "copying", "will", "be", "performed", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/project.py#L302-L340
13,439
thanethomson/statik
statik/autogen.py
autogen
def autogen(project_path): """Autogenerates views and templates for all the models in the project.""" generate_quickstart(project_path) project = StatikProject(project_path) project.config = StatikConfig(project.config_file_path) models = list(project.load_models().values()) logger.info('Creating view and template for home page (index.html).') generate_yaml_file(os.path.join(project_path, StatikProject.VIEWS_DIR, 'index.yaml'), { 'path': '/', 'template': 'index' } ) generate_index_file(os.path.join(project_path, StatikProject.TEMPLATES_DIR, 'index.jinja2')) for model in models: logger.info('Creating view and template for model: %s' % model.name) generate_yaml_file(os.path.join(project_path, StatikProject.VIEWS_DIR, '%s.yaml' % model.name), { 'path': { 'template': '/%s/{{ %s.pk }}' % (model.name, model.name), 'for-each': { '%s' % model.name: 'session.query(%s).all()' % model.name } }, 'template': ('%s' % model.name), } ) generate_model_file(os.path.join(project_path, StatikProject.TEMPLATES_DIR, '%s.jinja2' % model.name), project, model, model.fields.values())
python
def autogen(project_path): """Autogenerates views and templates for all the models in the project.""" generate_quickstart(project_path) project = StatikProject(project_path) project.config = StatikConfig(project.config_file_path) models = list(project.load_models().values()) logger.info('Creating view and template for home page (index.html).') generate_yaml_file(os.path.join(project_path, StatikProject.VIEWS_DIR, 'index.yaml'), { 'path': '/', 'template': 'index' } ) generate_index_file(os.path.join(project_path, StatikProject.TEMPLATES_DIR, 'index.jinja2')) for model in models: logger.info('Creating view and template for model: %s' % model.name) generate_yaml_file(os.path.join(project_path, StatikProject.VIEWS_DIR, '%s.yaml' % model.name), { 'path': { 'template': '/%s/{{ %s.pk }}' % (model.name, model.name), 'for-each': { '%s' % model.name: 'session.query(%s).all()' % model.name } }, 'template': ('%s' % model.name), } ) generate_model_file(os.path.join(project_path, StatikProject.TEMPLATES_DIR, '%s.jinja2' % model.name), project, model, model.fields.values())
[ "def", "autogen", "(", "project_path", ")", ":", "generate_quickstart", "(", "project_path", ")", "project", "=", "StatikProject", "(", "project_path", ")", "project", ".", "config", "=", "StatikConfig", "(", "project", ".", "config_file_path", ")", "models", "=", "list", "(", "project", ".", "load_models", "(", ")", ".", "values", "(", ")", ")", "logger", ".", "info", "(", "'Creating view and template for home page (index.html).'", ")", "generate_yaml_file", "(", "os", ".", "path", ".", "join", "(", "project_path", ",", "StatikProject", ".", "VIEWS_DIR", ",", "'index.yaml'", ")", ",", "{", "'path'", ":", "'/'", ",", "'template'", ":", "'index'", "}", ")", "generate_index_file", "(", "os", ".", "path", ".", "join", "(", "project_path", ",", "StatikProject", ".", "TEMPLATES_DIR", ",", "'index.jinja2'", ")", ")", "for", "model", "in", "models", ":", "logger", ".", "info", "(", "'Creating view and template for model: %s'", "%", "model", ".", "name", ")", "generate_yaml_file", "(", "os", ".", "path", ".", "join", "(", "project_path", ",", "StatikProject", ".", "VIEWS_DIR", ",", "'%s.yaml'", "%", "model", ".", "name", ")", ",", "{", "'path'", ":", "{", "'template'", ":", "'/%s/{{ %s.pk }}'", "%", "(", "model", ".", "name", ",", "model", ".", "name", ")", ",", "'for-each'", ":", "{", "'%s'", "%", "model", ".", "name", ":", "'session.query(%s).all()'", "%", "model", ".", "name", "}", "}", ",", "'template'", ":", "(", "'%s'", "%", "model", ".", "name", ")", ",", "}", ")", "generate_model_file", "(", "os", ".", "path", ".", "join", "(", "project_path", ",", "StatikProject", ".", "TEMPLATES_DIR", ",", "'%s.jinja2'", "%", "model", ".", "name", ")", ",", "project", ",", "model", ",", "model", ".", "fields", ".", "values", "(", ")", ")" ]
Autogenerates views and templates for all the models in the project.
[ "Autogenerates", "views", "and", "templates", "for", "all", "the", "models", "in", "the", "project", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/autogen.py#L17-L51
13,440
thanethomson/statik
statik/autogen.py
generate_yaml_file
def generate_yaml_file(filename, contents): """Creates a yaml file with the given content.""" with open(filename, 'w') as file: file.write(yaml.dump(contents, default_flow_style=False))
python
def generate_yaml_file(filename, contents): """Creates a yaml file with the given content.""" with open(filename, 'w') as file: file.write(yaml.dump(contents, default_flow_style=False))
[ "def", "generate_yaml_file", "(", "filename", ",", "contents", ")", ":", "with", "open", "(", "filename", ",", "'w'", ")", "as", "file", ":", "file", ".", "write", "(", "yaml", ".", "dump", "(", "contents", ",", "default_flow_style", "=", "False", ")", ")" ]
Creates a yaml file with the given content.
[ "Creates", "a", "yaml", "file", "with", "the", "given", "content", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/autogen.py#L54-L57
13,441
thanethomson/statik
statik/autogen.py
generate_index_file
def generate_index_file(filename): """Constructs a default home page for the project.""" with open(filename, 'w') as file: content = open(os.path.join(os.path.dirname(__file__), 'templates/index_page.html'), 'r').read() file.write(content)
python
def generate_index_file(filename): """Constructs a default home page for the project.""" with open(filename, 'w') as file: content = open(os.path.join(os.path.dirname(__file__), 'templates/index_page.html'), 'r').read() file.write(content)
[ "def", "generate_index_file", "(", "filename", ")", ":", "with", "open", "(", "filename", ",", "'w'", ")", "as", "file", ":", "content", "=", "open", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "'templates/index_page.html'", ")", ",", "'r'", ")", ".", "read", "(", ")", "file", ".", "write", "(", "content", ")" ]
Constructs a default home page for the project.
[ "Constructs", "a", "default", "home", "page", "for", "the", "project", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/autogen.py#L60-L64
13,442
thanethomson/statik
statik/autogen.py
generate_model_file
def generate_model_file(filename, project, model, fields): """Creates a webpage for a given instance of a model.""" for field in fields: field.type = field.__class__.__name__ content = open(os.path.join(os.path.dirname(__file__), 'templates/model_page.html'), 'r').read() engine = StatikTemplateEngine(project) template = engine.create_template(content) # create context and update from project.config context = {'model': model, 'fields': fields} context.update(dict(project.config.context_static)) string = template.render(context) with open(filename, 'w') as file: file.write(string)
python
def generate_model_file(filename, project, model, fields): """Creates a webpage for a given instance of a model.""" for field in fields: field.type = field.__class__.__name__ content = open(os.path.join(os.path.dirname(__file__), 'templates/model_page.html'), 'r').read() engine = StatikTemplateEngine(project) template = engine.create_template(content) # create context and update from project.config context = {'model': model, 'fields': fields} context.update(dict(project.config.context_static)) string = template.render(context) with open(filename, 'w') as file: file.write(string)
[ "def", "generate_model_file", "(", "filename", ",", "project", ",", "model", ",", "fields", ")", ":", "for", "field", "in", "fields", ":", "field", ".", "type", "=", "field", ".", "__class__", ".", "__name__", "content", "=", "open", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "'templates/model_page.html'", ")", ",", "'r'", ")", ".", "read", "(", ")", "engine", "=", "StatikTemplateEngine", "(", "project", ")", "template", "=", "engine", ".", "create_template", "(", "content", ")", "# create context and update from project.config", "context", "=", "{", "'model'", ":", "model", ",", "'fields'", ":", "fields", "}", "context", ".", "update", "(", "dict", "(", "project", ".", "config", ".", "context_static", ")", ")", "string", "=", "template", ".", "render", "(", "context", ")", "with", "open", "(", "filename", ",", "'w'", ")", "as", "file", ":", "file", ".", "write", "(", "string", ")" ]
Creates a webpage for a given instance of a model.
[ "Creates", "a", "webpage", "for", "a", "given", "instance", "of", "a", "model", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/autogen.py#L67-L84
13,443
thanethomson/statik
statik/context.py
StatikContext.build_dynamic
def build_dynamic(self, db, extra=None, safe_mode=False): """Builds the dynamic context based on our current dynamic context entity and the given database.""" result = dict() for var, query in iteritems(self.dynamic): result[var] = db.query(query, safe_mode=safe_mode, additional_locals=extra) return result
python
def build_dynamic(self, db, extra=None, safe_mode=False): """Builds the dynamic context based on our current dynamic context entity and the given database.""" result = dict() for var, query in iteritems(self.dynamic): result[var] = db.query(query, safe_mode=safe_mode, additional_locals=extra) return result
[ "def", "build_dynamic", "(", "self", ",", "db", ",", "extra", "=", "None", ",", "safe_mode", "=", "False", ")", ":", "result", "=", "dict", "(", ")", "for", "var", ",", "query", "in", "iteritems", "(", "self", ".", "dynamic", ")", ":", "result", "[", "var", "]", "=", "db", ".", "query", "(", "query", ",", "safe_mode", "=", "safe_mode", ",", "additional_locals", "=", "extra", ")", "return", "result" ]
Builds the dynamic context based on our current dynamic context entity and the given database.
[ "Builds", "the", "dynamic", "context", "based", "on", "our", "current", "dynamic", "context", "entity", "and", "the", "given", "database", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/context.py#L47-L53
13,444
thanethomson/statik
statik/context.py
StatikContext.build_for_each
def build_for_each(self, db, safe_mode=False, extra=None): """Builds the for-each context.""" result = dict() for var, query in iteritems(self.for_each): result[var] = db.query( query, additional_locals=extra, safe_mode=safe_mode ) return result
python
def build_for_each(self, db, safe_mode=False, extra=None): """Builds the for-each context.""" result = dict() for var, query in iteritems(self.for_each): result[var] = db.query( query, additional_locals=extra, safe_mode=safe_mode ) return result
[ "def", "build_for_each", "(", "self", ",", "db", ",", "safe_mode", "=", "False", ",", "extra", "=", "None", ")", ":", "result", "=", "dict", "(", ")", "for", "var", ",", "query", "in", "iteritems", "(", "self", ".", "for_each", ")", ":", "result", "[", "var", "]", "=", "db", ".", "query", "(", "query", ",", "additional_locals", "=", "extra", ",", "safe_mode", "=", "safe_mode", ")", "return", "result" ]
Builds the for-each context.
[ "Builds", "the", "for", "-", "each", "context", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/context.py#L55-L64
13,445
thanethomson/statik
statik/context.py
StatikContext.build
def build(self, db=None, safe_mode=False, for_each_inst=None, extra=None): """Builds a dictionary that can be used as context for template rendering.""" result = copy(self.initial) result.update(self.static) if self.dynamic: result.update(self.build_dynamic(db, extra=extra, safe_mode=safe_mode)) if self.for_each and for_each_inst: result.update(self.build_for_each(db, safe_mode=safe_mode, extra=extra)) if isinstance(extra, dict): result.update(extra) return result
python
def build(self, db=None, safe_mode=False, for_each_inst=None, extra=None): """Builds a dictionary that can be used as context for template rendering.""" result = copy(self.initial) result.update(self.static) if self.dynamic: result.update(self.build_dynamic(db, extra=extra, safe_mode=safe_mode)) if self.for_each and for_each_inst: result.update(self.build_for_each(db, safe_mode=safe_mode, extra=extra)) if isinstance(extra, dict): result.update(extra) return result
[ "def", "build", "(", "self", ",", "db", "=", "None", ",", "safe_mode", "=", "False", ",", "for_each_inst", "=", "None", ",", "extra", "=", "None", ")", ":", "result", "=", "copy", "(", "self", ".", "initial", ")", "result", ".", "update", "(", "self", ".", "static", ")", "if", "self", ".", "dynamic", ":", "result", ".", "update", "(", "self", ".", "build_dynamic", "(", "db", ",", "extra", "=", "extra", ",", "safe_mode", "=", "safe_mode", ")", ")", "if", "self", ".", "for_each", "and", "for_each_inst", ":", "result", ".", "update", "(", "self", ".", "build_for_each", "(", "db", ",", "safe_mode", "=", "safe_mode", ",", "extra", "=", "extra", ")", ")", "if", "isinstance", "(", "extra", ",", "dict", ")", ":", "result", ".", "update", "(", "extra", ")", "return", "result" ]
Builds a dictionary that can be used as context for template rendering.
[ "Builds", "a", "dictionary", "that", "can", "be", "used", "as", "context", "for", "template", "rendering", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/context.py#L66-L76
13,446
thanethomson/statik
statik/templating.py
template_exception_handler
def template_exception_handler(fn, error_context, filename=None): """Calls the given function, attempting to catch any template-related errors, and converts the error to a Statik TemplateError instance. Returns the result returned by the function itself.""" error_message = None if filename: error_context.update(filename=filename) try: return fn() except jinja2.TemplateSyntaxError as exc: error_context.update(filename=exc.filename, line_no=exc.lineno) error_message = exc.message except jinja2.TemplateError as exc: error_message = exc.message except Exception as exc: error_message = "%s" % exc raise TemplateError(message=error_message, context=error_context)
python
def template_exception_handler(fn, error_context, filename=None): """Calls the given function, attempting to catch any template-related errors, and converts the error to a Statik TemplateError instance. Returns the result returned by the function itself.""" error_message = None if filename: error_context.update(filename=filename) try: return fn() except jinja2.TemplateSyntaxError as exc: error_context.update(filename=exc.filename, line_no=exc.lineno) error_message = exc.message except jinja2.TemplateError as exc: error_message = exc.message except Exception as exc: error_message = "%s" % exc raise TemplateError(message=error_message, context=error_context)
[ "def", "template_exception_handler", "(", "fn", ",", "error_context", ",", "filename", "=", "None", ")", ":", "error_message", "=", "None", "if", "filename", ":", "error_context", ".", "update", "(", "filename", "=", "filename", ")", "try", ":", "return", "fn", "(", ")", "except", "jinja2", ".", "TemplateSyntaxError", "as", "exc", ":", "error_context", ".", "update", "(", "filename", "=", "exc", ".", "filename", ",", "line_no", "=", "exc", ".", "lineno", ")", "error_message", "=", "exc", ".", "message", "except", "jinja2", ".", "TemplateError", "as", "exc", ":", "error_message", "=", "exc", ".", "message", "except", "Exception", "as", "exc", ":", "error_message", "=", "\"%s\"", "%", "exc", "raise", "TemplateError", "(", "message", "=", "error_message", ",", "context", "=", "error_context", ")" ]
Calls the given function, attempting to catch any template-related errors, and converts the error to a Statik TemplateError instance. Returns the result returned by the function itself.
[ "Calls", "the", "given", "function", "attempting", "to", "catch", "any", "template", "-", "related", "errors", "and", "converts", "the", "error", "to", "a", "Statik", "TemplateError", "instance", ".", "Returns", "the", "result", "returned", "by", "the", "function", "itself", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/templating.py#L53-L70
13,447
thanethomson/statik
statik/templating.py
StatikTemplateEngine.create_template
def create_template(self, s, provider_name=None): """Creates a template from the given string based on the specified provider or the provider with highest precedence. Args: s: The string to convert to a template. provider_name: The name of the provider to use to create the template. """ if provider_name is None: provider_name = self.supported_providers[0] return template_exception_handler( lambda: self.get_provider(provider_name).create_template(s), self.error_context )
python
def create_template(self, s, provider_name=None): """Creates a template from the given string based on the specified provider or the provider with highest precedence. Args: s: The string to convert to a template. provider_name: The name of the provider to use to create the template. """ if provider_name is None: provider_name = self.supported_providers[0] return template_exception_handler( lambda: self.get_provider(provider_name).create_template(s), self.error_context )
[ "def", "create_template", "(", "self", ",", "s", ",", "provider_name", "=", "None", ")", ":", "if", "provider_name", "is", "None", ":", "provider_name", "=", "self", ".", "supported_providers", "[", "0", "]", "return", "template_exception_handler", "(", "lambda", ":", "self", ".", "get_provider", "(", "provider_name", ")", ".", "create_template", "(", "s", ")", ",", "self", ".", "error_context", ")" ]
Creates a template from the given string based on the specified provider or the provider with highest precedence. Args: s: The string to convert to a template. provider_name: The name of the provider to use to create the template.
[ "Creates", "a", "template", "from", "the", "given", "string", "based", "on", "the", "specified", "provider", "or", "the", "provider", "with", "highest", "precedence", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/templating.py#L197-L210
13,448
thanethomson/statik
statik/fields.py
construct_field
def construct_field(model_name, field_name, field_type, all_models, **kwargs): """Helper function to build a field from the given field name and type. Args: model_name: The name of the model for which we're building this field. field_name: The name of the field to build. field_type: A string indicator as to which field type must be built. all_models: A list containing the names of all of the models, which will help us when building foreign key lookups. """ field_type_parts = field_type.split('->') _field_type = field_type_parts[0].strip().split('[]')[0].strip() back_populates = field_type_parts[1].strip() if len(field_type_parts) > 1 else None error_context = kwargs.pop('error_context', StatikErrorContext()) _kwargs = copy(kwargs) _kwargs['back_populates'] = back_populates if _field_type not in FIELD_TYPES and _field_type not in all_models: raise InvalidFieldTypeError( model_name, field_name, context=error_context ) if _field_type in FIELD_TYPES: return FIELD_TYPES[_field_type](field_name, **_kwargs) if field_type_parts[0].strip().endswith('[]'): return StatikManyToManyField(field_name, _field_type, **_kwargs) return StatikForeignKeyField(field_name, _field_type, **_kwargs)
python
def construct_field(model_name, field_name, field_type, all_models, **kwargs): """Helper function to build a field from the given field name and type. Args: model_name: The name of the model for which we're building this field. field_name: The name of the field to build. field_type: A string indicator as to which field type must be built. all_models: A list containing the names of all of the models, which will help us when building foreign key lookups. """ field_type_parts = field_type.split('->') _field_type = field_type_parts[0].strip().split('[]')[0].strip() back_populates = field_type_parts[1].strip() if len(field_type_parts) > 1 else None error_context = kwargs.pop('error_context', StatikErrorContext()) _kwargs = copy(kwargs) _kwargs['back_populates'] = back_populates if _field_type not in FIELD_TYPES and _field_type not in all_models: raise InvalidFieldTypeError( model_name, field_name, context=error_context ) if _field_type in FIELD_TYPES: return FIELD_TYPES[_field_type](field_name, **_kwargs) if field_type_parts[0].strip().endswith('[]'): return StatikManyToManyField(field_name, _field_type, **_kwargs) return StatikForeignKeyField(field_name, _field_type, **_kwargs)
[ "def", "construct_field", "(", "model_name", ",", "field_name", ",", "field_type", ",", "all_models", ",", "*", "*", "kwargs", ")", ":", "field_type_parts", "=", "field_type", ".", "split", "(", "'->'", ")", "_field_type", "=", "field_type_parts", "[", "0", "]", ".", "strip", "(", ")", ".", "split", "(", "'[]'", ")", "[", "0", "]", ".", "strip", "(", ")", "back_populates", "=", "field_type_parts", "[", "1", "]", ".", "strip", "(", ")", "if", "len", "(", "field_type_parts", ")", ">", "1", "else", "None", "error_context", "=", "kwargs", ".", "pop", "(", "'error_context'", ",", "StatikErrorContext", "(", ")", ")", "_kwargs", "=", "copy", "(", "kwargs", ")", "_kwargs", "[", "'back_populates'", "]", "=", "back_populates", "if", "_field_type", "not", "in", "FIELD_TYPES", "and", "_field_type", "not", "in", "all_models", ":", "raise", "InvalidFieldTypeError", "(", "model_name", ",", "field_name", ",", "context", "=", "error_context", ")", "if", "_field_type", "in", "FIELD_TYPES", ":", "return", "FIELD_TYPES", "[", "_field_type", "]", "(", "field_name", ",", "*", "*", "_kwargs", ")", "if", "field_type_parts", "[", "0", "]", ".", "strip", "(", ")", ".", "endswith", "(", "'[]'", ")", ":", "return", "StatikManyToManyField", "(", "field_name", ",", "_field_type", ",", "*", "*", "_kwargs", ")", "return", "StatikForeignKeyField", "(", "field_name", ",", "_field_type", ",", "*", "*", "_kwargs", ")" ]
Helper function to build a field from the given field name and type. Args: model_name: The name of the model for which we're building this field. field_name: The name of the field to build. field_type: A string indicator as to which field type must be built. all_models: A list containing the names of all of the models, which will help us when building foreign key lookups.
[ "Helper", "function", "to", "build", "a", "field", "from", "the", "given", "field", "name", "and", "type", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/fields.py#L90-L121
13,449
thanethomson/statik
statik/pagination.py
paginate
def paginate(db_query, items_per_page, offset=0, start_page=1): """Instantiates a Paginator instance for database queries. Args: db_query: The SQLAlchemy database query to paginate. items_per_page: The desired number of items per page. offset: The number of items to skip when paginating. start_page: The number of the first page when reporting on page numbers. """ return Paginator(db_query, items_per_page, offset=offset, start_page=start_page)
python
def paginate(db_query, items_per_page, offset=0, start_page=1): """Instantiates a Paginator instance for database queries. Args: db_query: The SQLAlchemy database query to paginate. items_per_page: The desired number of items per page. offset: The number of items to skip when paginating. start_page: The number of the first page when reporting on page numbers. """ return Paginator(db_query, items_per_page, offset=offset, start_page=start_page)
[ "def", "paginate", "(", "db_query", ",", "items_per_page", ",", "offset", "=", "0", ",", "start_page", "=", "1", ")", ":", "return", "Paginator", "(", "db_query", ",", "items_per_page", ",", "offset", "=", "offset", ",", "start_page", "=", "start_page", ")" ]
Instantiates a Paginator instance for database queries. Args: db_query: The SQLAlchemy database query to paginate. items_per_page: The desired number of items per page. offset: The number of items to skip when paginating. start_page: The number of the first page when reporting on page numbers.
[ "Instantiates", "a", "Paginator", "instance", "for", "database", "queries", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/pagination.py#L132-L141
13,450
thanethomson/statik
statik/views.py
StatikViewPath.render_reverse
def render_reverse(self, inst=None, context=None): """Renders the reverse URL for this path.""" rendered = self.render(inst=inst, context=context) parts = rendered.split('/') # we only prettify URLs for these files if parts[-1] in ['index.html', 'index.htm']: return ('/'.join(parts[:-1])) + '/' return rendered
python
def render_reverse(self, inst=None, context=None): """Renders the reverse URL for this path.""" rendered = self.render(inst=inst, context=context) parts = rendered.split('/') # we only prettify URLs for these files if parts[-1] in ['index.html', 'index.htm']: return ('/'.join(parts[:-1])) + '/' return rendered
[ "def", "render_reverse", "(", "self", ",", "inst", "=", "None", ",", "context", "=", "None", ")", ":", "rendered", "=", "self", ".", "render", "(", "inst", "=", "inst", ",", "context", "=", "context", ")", "parts", "=", "rendered", ".", "split", "(", "'/'", ")", "# we only prettify URLs for these files", "if", "parts", "[", "-", "1", "]", "in", "[", "'index.html'", ",", "'index.htm'", "]", ":", "return", "(", "'/'", ".", "join", "(", "parts", "[", ":", "-", "1", "]", ")", ")", "+", "'/'", "return", "rendered" ]
Renders the reverse URL for this path.
[ "Renders", "the", "reverse", "URL", "for", "this", "path", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/views.py#L61-L68
13,451
thanethomson/statik
statik/views.py
StatikViewPath.create
def create( cls, path, template_engine=None, output_filename=None, output_ext=None, view_name=None ): """Create the relevant subclass of StatikView based on the given path variable and parameters.""" # if it's a complex view if isinstance(path, dict): return StatikViewComplexPath( path, template_engine, output_filename=output_filename, output_ext=output_ext, view_name=view_name ) elif isinstance(path, basestring): return StatikViewSimplePath( path, output_filename=output_filename, output_ext=output_ext, view_name=view_name ) else: raise ValueError( "Unrecognised structure for \"path\" configuration in view: %s" % view_name )
python
def create( cls, path, template_engine=None, output_filename=None, output_ext=None, view_name=None ): """Create the relevant subclass of StatikView based on the given path variable and parameters.""" # if it's a complex view if isinstance(path, dict): return StatikViewComplexPath( path, template_engine, output_filename=output_filename, output_ext=output_ext, view_name=view_name ) elif isinstance(path, basestring): return StatikViewSimplePath( path, output_filename=output_filename, output_ext=output_ext, view_name=view_name ) else: raise ValueError( "Unrecognised structure for \"path\" configuration in view: %s" % view_name )
[ "def", "create", "(", "cls", ",", "path", ",", "template_engine", "=", "None", ",", "output_filename", "=", "None", ",", "output_ext", "=", "None", ",", "view_name", "=", "None", ")", ":", "# if it's a complex view", "if", "isinstance", "(", "path", ",", "dict", ")", ":", "return", "StatikViewComplexPath", "(", "path", ",", "template_engine", ",", "output_filename", "=", "output_filename", ",", "output_ext", "=", "output_ext", ",", "view_name", "=", "view_name", ")", "elif", "isinstance", "(", "path", ",", "basestring", ")", ":", "return", "StatikViewSimplePath", "(", "path", ",", "output_filename", "=", "output_filename", ",", "output_ext", "=", "output_ext", ",", "view_name", "=", "view_name", ")", "else", ":", "raise", "ValueError", "(", "\"Unrecognised structure for \\\"path\\\" configuration in view: %s\"", "%", "view_name", ")" ]
Create the relevant subclass of StatikView based on the given path variable and parameters.
[ "Create", "the", "relevant", "subclass", "of", "StatikView", "based", "on", "the", "given", "path", "variable", "and", "parameters", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/views.py#L71-L100
13,452
thanethomson/statik
statik/views.py
StatikComplexViewRenderer.render
def render(self, context, db=None, safe_mode=False, extra_context=None): """Renders the given context using the specified database, returning a dictionary containing path segments and rendered view contents.""" if not db: raise MissingParameterError( "db", context=self.error_context ) rendered_views = dict() path_instances = db.query(self.path.query, safe_mode=safe_mode) extra_ctx = copy(extra_context) if extra_context else dict() for inst in path_instances: extra_ctx.update({ self.path.variable: inst }) ctx = context.build( db=db, safe_mode=safe_mode, for_each_inst=inst, extra=extra_ctx ) inst_path = self.path.render(inst=inst, context=ctx) rendered_view = self.template.render(ctx) rendered_views = deep_merge_dict( rendered_views, dict_from_path(inst_path, final_value=rendered_view) ) return rendered_views
python
def render(self, context, db=None, safe_mode=False, extra_context=None): """Renders the given context using the specified database, returning a dictionary containing path segments and rendered view contents.""" if not db: raise MissingParameterError( "db", context=self.error_context ) rendered_views = dict() path_instances = db.query(self.path.query, safe_mode=safe_mode) extra_ctx = copy(extra_context) if extra_context else dict() for inst in path_instances: extra_ctx.update({ self.path.variable: inst }) ctx = context.build( db=db, safe_mode=safe_mode, for_each_inst=inst, extra=extra_ctx ) inst_path = self.path.render(inst=inst, context=ctx) rendered_view = self.template.render(ctx) rendered_views = deep_merge_dict( rendered_views, dict_from_path(inst_path, final_value=rendered_view) ) return rendered_views
[ "def", "render", "(", "self", ",", "context", ",", "db", "=", "None", ",", "safe_mode", "=", "False", ",", "extra_context", "=", "None", ")", ":", "if", "not", "db", ":", "raise", "MissingParameterError", "(", "\"db\"", ",", "context", "=", "self", ".", "error_context", ")", "rendered_views", "=", "dict", "(", ")", "path_instances", "=", "db", ".", "query", "(", "self", ".", "path", ".", "query", ",", "safe_mode", "=", "safe_mode", ")", "extra_ctx", "=", "copy", "(", "extra_context", ")", "if", "extra_context", "else", "dict", "(", ")", "for", "inst", "in", "path_instances", ":", "extra_ctx", ".", "update", "(", "{", "self", ".", "path", ".", "variable", ":", "inst", "}", ")", "ctx", "=", "context", ".", "build", "(", "db", "=", "db", ",", "safe_mode", "=", "safe_mode", ",", "for_each_inst", "=", "inst", ",", "extra", "=", "extra_ctx", ")", "inst_path", "=", "self", ".", "path", ".", "render", "(", "inst", "=", "inst", ",", "context", "=", "ctx", ")", "rendered_view", "=", "self", ".", "template", ".", "render", "(", "ctx", ")", "rendered_views", "=", "deep_merge_dict", "(", "rendered_views", ",", "dict_from_path", "(", "inst_path", ",", "final_value", "=", "rendered_view", ")", ")", "return", "rendered_views" ]
Renders the given context using the specified database, returning a dictionary containing path segments and rendered view contents.
[ "Renders", "the", "given", "context", "using", "the", "specified", "database", "returning", "a", "dictionary", "containing", "path", "segments", "and", "rendered", "view", "contents", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/views.py#L282-L310
13,453
thanethomson/statik
statik/views.py
StatikView.render
def render(self, db, safe_mode=False, extra_context=None): """Renders this view, given the specified StatikDatabase instance.""" return self.renderer.render( self.context, db, safe_mode=safe_mode, extra_context=extra_context )
python
def render(self, db, safe_mode=False, extra_context=None): """Renders this view, given the specified StatikDatabase instance.""" return self.renderer.render( self.context, db, safe_mode=safe_mode, extra_context=extra_context )
[ "def", "render", "(", "self", ",", "db", ",", "safe_mode", "=", "False", ",", "extra_context", "=", "None", ")", ":", "return", "self", ".", "renderer", ".", "render", "(", "self", ".", "context", ",", "db", ",", "safe_mode", "=", "safe_mode", ",", "extra_context", "=", "extra_context", ")" ]
Renders this view, given the specified StatikDatabase instance.
[ "Renders", "this", "view", "given", "the", "specified", "StatikDatabase", "instance", "." ]
56b1b5a2cb05a97afa81f428bfcefc833e935b8d
https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/views.py#L399-L406
13,454
goldmann/docker-squash
docker_squash/image.py
Image._validate_number_of_layers
def _validate_number_of_layers(self, number_of_layers): """ Makes sure that the specified number of layers to squash is a valid number """ # Only positive numbers are correct if number_of_layers <= 0: raise SquashError( "Number of layers to squash cannot be less or equal 0, provided: %s" % number_of_layers) # Do not squash if provided number of layer to squash is bigger # than number of actual layers in the image if number_of_layers > len(self.old_image_layers): raise SquashError( "Cannot squash %s layers, the %s image contains only %s layers" % (number_of_layers, self.image, len(self.old_image_layers)))
python
def _validate_number_of_layers(self, number_of_layers): """ Makes sure that the specified number of layers to squash is a valid number """ # Only positive numbers are correct if number_of_layers <= 0: raise SquashError( "Number of layers to squash cannot be less or equal 0, provided: %s" % number_of_layers) # Do not squash if provided number of layer to squash is bigger # than number of actual layers in the image if number_of_layers > len(self.old_image_layers): raise SquashError( "Cannot squash %s layers, the %s image contains only %s layers" % (number_of_layers, self.image, len(self.old_image_layers)))
[ "def", "_validate_number_of_layers", "(", "self", ",", "number_of_layers", ")", ":", "# Only positive numbers are correct", "if", "number_of_layers", "<=", "0", ":", "raise", "SquashError", "(", "\"Number of layers to squash cannot be less or equal 0, provided: %s\"", "%", "number_of_layers", ")", "# Do not squash if provided number of layer to squash is bigger", "# than number of actual layers in the image", "if", "number_of_layers", ">", "len", "(", "self", ".", "old_image_layers", ")", ":", "raise", "SquashError", "(", "\"Cannot squash %s layers, the %s image contains only %s layers\"", "%", "(", "number_of_layers", ",", "self", ".", "image", ",", "len", "(", "self", ".", "old_image_layers", ")", ")", ")" ]
Makes sure that the specified number of layers to squash is a valid number
[ "Makes", "sure", "that", "the", "specified", "number", "of", "layers", "to", "squash", "is", "a", "valid", "number" ]
89e0297942be268791aff2098b7ebfa50d82f8e8
https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L125-L140
13,455
goldmann/docker-squash
docker_squash/image.py
Image._files_in_layers
def _files_in_layers(self, layers, directory): """ Prepare a list of files in all layers """ files = {} for layer in layers: self.log.debug("Generating list of files in layer '%s'..." % layer) tar_file = os.path.join(directory, layer, "layer.tar") with tarfile.open(tar_file, 'r', format=tarfile.PAX_FORMAT) as tar: files[layer] = [self._normalize_path( x) for x in tar.getnames()] self.log.debug("Done, found %s files" % len(files[layer])) return files
python
def _files_in_layers(self, layers, directory): """ Prepare a list of files in all layers """ files = {} for layer in layers: self.log.debug("Generating list of files in layer '%s'..." % layer) tar_file = os.path.join(directory, layer, "layer.tar") with tarfile.open(tar_file, 'r', format=tarfile.PAX_FORMAT) as tar: files[layer] = [self._normalize_path( x) for x in tar.getnames()] self.log.debug("Done, found %s files" % len(files[layer])) return files
[ "def", "_files_in_layers", "(", "self", ",", "layers", ",", "directory", ")", ":", "files", "=", "{", "}", "for", "layer", "in", "layers", ":", "self", ".", "log", ".", "debug", "(", "\"Generating list of files in layer '%s'...\"", "%", "layer", ")", "tar_file", "=", "os", ".", "path", ".", "join", "(", "directory", ",", "layer", ",", "\"layer.tar\"", ")", "with", "tarfile", ".", "open", "(", "tar_file", ",", "'r'", ",", "format", "=", "tarfile", ".", "PAX_FORMAT", ")", "as", "tar", ":", "files", "[", "layer", "]", "=", "[", "self", ".", "_normalize_path", "(", "x", ")", "for", "x", "in", "tar", ".", "getnames", "(", ")", "]", "self", ".", "log", ".", "debug", "(", "\"Done, found %s files\"", "%", "len", "(", "files", "[", "layer", "]", ")", ")", "return", "files" ]
Prepare a list of files in all layers
[ "Prepare", "a", "list", "of", "files", "in", "all", "layers" ]
89e0297942be268791aff2098b7ebfa50d82f8e8
https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L260-L274
13,456
goldmann/docker-squash
docker_squash/image.py
Image._prepare_tmp_directory
def _prepare_tmp_directory(self, tmp_dir): """ Creates temporary directory that is used to work on layers """ if tmp_dir: if os.path.exists(tmp_dir): raise SquashError( "The '%s' directory already exists, please remove it before you proceed" % tmp_dir) os.makedirs(tmp_dir) else: tmp_dir = tempfile.mkdtemp(prefix="docker-squash-") self.log.debug("Using %s as the temporary directory" % tmp_dir) return tmp_dir
python
def _prepare_tmp_directory(self, tmp_dir): """ Creates temporary directory that is used to work on layers """ if tmp_dir: if os.path.exists(tmp_dir): raise SquashError( "The '%s' directory already exists, please remove it before you proceed" % tmp_dir) os.makedirs(tmp_dir) else: tmp_dir = tempfile.mkdtemp(prefix="docker-squash-") self.log.debug("Using %s as the temporary directory" % tmp_dir) return tmp_dir
[ "def", "_prepare_tmp_directory", "(", "self", ",", "tmp_dir", ")", ":", "if", "tmp_dir", ":", "if", "os", ".", "path", ".", "exists", "(", "tmp_dir", ")", ":", "raise", "SquashError", "(", "\"The '%s' directory already exists, please remove it before you proceed\"", "%", "tmp_dir", ")", "os", ".", "makedirs", "(", "tmp_dir", ")", "else", ":", "tmp_dir", "=", "tempfile", ".", "mkdtemp", "(", "prefix", "=", "\"docker-squash-\"", ")", "self", ".", "log", ".", "debug", "(", "\"Using %s as the temporary directory\"", "%", "tmp_dir", ")", "return", "tmp_dir" ]
Creates temporary directory that is used to work on layers
[ "Creates", "temporary", "directory", "that", "is", "used", "to", "work", "on", "layers" ]
89e0297942be268791aff2098b7ebfa50d82f8e8
https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L276-L289
13,457
goldmann/docker-squash
docker_squash/image.py
Image._layers_to_squash
def _layers_to_squash(self, layers, from_layer): """ Prepares a list of layer IDs that should be squashed """ to_squash = [] to_leave = [] should_squash = True for l in reversed(layers): if l == from_layer: should_squash = False if should_squash: to_squash.append(l) else: to_leave.append(l) to_squash.reverse() to_leave.reverse() return to_squash, to_leave
python
def _layers_to_squash(self, layers, from_layer): """ Prepares a list of layer IDs that should be squashed """ to_squash = [] to_leave = [] should_squash = True for l in reversed(layers): if l == from_layer: should_squash = False if should_squash: to_squash.append(l) else: to_leave.append(l) to_squash.reverse() to_leave.reverse() return to_squash, to_leave
[ "def", "_layers_to_squash", "(", "self", ",", "layers", ",", "from_layer", ")", ":", "to_squash", "=", "[", "]", "to_leave", "=", "[", "]", "should_squash", "=", "True", "for", "l", "in", "reversed", "(", "layers", ")", ":", "if", "l", "==", "from_layer", ":", "should_squash", "=", "False", "if", "should_squash", ":", "to_squash", ".", "append", "(", "l", ")", "else", ":", "to_leave", ".", "append", "(", "l", ")", "to_squash", ".", "reverse", "(", ")", "to_leave", ".", "reverse", "(", ")", "return", "to_squash", ",", "to_leave" ]
Prepares a list of layer IDs that should be squashed
[ "Prepares", "a", "list", "of", "layer", "IDs", "that", "should", "be", "squashed" ]
89e0297942be268791aff2098b7ebfa50d82f8e8
https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L319-L337
13,458
goldmann/docker-squash
docker_squash/image.py
Image._save_image
def _save_image(self, image_id, directory): """ Saves the image as a tar archive under specified name """ for x in [0, 1, 2]: self.log.info("Saving image %s to %s directory..." % (image_id, directory)) self.log.debug("Try #%s..." % (x + 1)) try: image = self.docker.get_image(image_id) if docker.version_info[0] < 3: # Docker library prior to 3.0.0 returned the requests # object directly which cold be used to read from self.log.debug("Extracting image using HTTPResponse object directly") self._extract_tar(image, directory) else: # Docker library >=3.0.0 returns iterator over raw data self.log.debug("Extracting image using iterator over raw data") fd_r, fd_w = os.pipe() r = os.fdopen(fd_r, 'rb') w = os.fdopen(fd_w, 'wb') extracter = threading.Thread(target=self._extract_tar, args=(r,directory)) extracter.start() for chunk in image: w.write(chunk) w.flush() w.close() extracter.join() r.close() self.log.info("Image saved!") return True except Exception as e: self.log.exception(e) self.log.warn( "An error occured while saving the %s image, retrying..." % image_id) raise SquashError("Couldn't save %s image!" % image_id)
python
def _save_image(self, image_id, directory): """ Saves the image as a tar archive under specified name """ for x in [0, 1, 2]: self.log.info("Saving image %s to %s directory..." % (image_id, directory)) self.log.debug("Try #%s..." % (x + 1)) try: image = self.docker.get_image(image_id) if docker.version_info[0] < 3: # Docker library prior to 3.0.0 returned the requests # object directly which cold be used to read from self.log.debug("Extracting image using HTTPResponse object directly") self._extract_tar(image, directory) else: # Docker library >=3.0.0 returns iterator over raw data self.log.debug("Extracting image using iterator over raw data") fd_r, fd_w = os.pipe() r = os.fdopen(fd_r, 'rb') w = os.fdopen(fd_w, 'wb') extracter = threading.Thread(target=self._extract_tar, args=(r,directory)) extracter.start() for chunk in image: w.write(chunk) w.flush() w.close() extracter.join() r.close() self.log.info("Image saved!") return True except Exception as e: self.log.exception(e) self.log.warn( "An error occured while saving the %s image, retrying..." % image_id) raise SquashError("Couldn't save %s image!" % image_id)
[ "def", "_save_image", "(", "self", ",", "image_id", ",", "directory", ")", ":", "for", "x", "in", "[", "0", ",", "1", ",", "2", "]", ":", "self", ".", "log", ".", "info", "(", "\"Saving image %s to %s directory...\"", "%", "(", "image_id", ",", "directory", ")", ")", "self", ".", "log", ".", "debug", "(", "\"Try #%s...\"", "%", "(", "x", "+", "1", ")", ")", "try", ":", "image", "=", "self", ".", "docker", ".", "get_image", "(", "image_id", ")", "if", "docker", ".", "version_info", "[", "0", "]", "<", "3", ":", "# Docker library prior to 3.0.0 returned the requests", "# object directly which cold be used to read from", "self", ".", "log", ".", "debug", "(", "\"Extracting image using HTTPResponse object directly\"", ")", "self", ".", "_extract_tar", "(", "image", ",", "directory", ")", "else", ":", "# Docker library >=3.0.0 returns iterator over raw data", "self", ".", "log", ".", "debug", "(", "\"Extracting image using iterator over raw data\"", ")", "fd_r", ",", "fd_w", "=", "os", ".", "pipe", "(", ")", "r", "=", "os", ".", "fdopen", "(", "fd_r", ",", "'rb'", ")", "w", "=", "os", ".", "fdopen", "(", "fd_w", ",", "'wb'", ")", "extracter", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_extract_tar", ",", "args", "=", "(", "r", ",", "directory", ")", ")", "extracter", ".", "start", "(", ")", "for", "chunk", "in", "image", ":", "w", ".", "write", "(", "chunk", ")", "w", ".", "flush", "(", ")", "w", ".", "close", "(", ")", "extracter", ".", "join", "(", ")", "r", ".", "close", "(", ")", "self", ".", "log", ".", "info", "(", "\"Image saved!\"", ")", "return", "True", "except", "Exception", "as", "e", ":", "self", ".", "log", ".", "exception", "(", "e", ")", "self", ".", "log", ".", "warn", "(", "\"An error occured while saving the %s image, retrying...\"", "%", "image_id", ")", "raise", "SquashError", "(", "\"Couldn't save %s image!\"", "%", "image_id", ")" ]
Saves the image as a tar archive under specified name
[ "Saves", "the", "image", "as", "a", "tar", "archive", "under", "specified", "name" ]
89e0297942be268791aff2098b7ebfa50d82f8e8
https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L343-L386
13,459
goldmann/docker-squash
docker_squash/image.py
Image._unpack
def _unpack(self, tar_file, directory): """ Unpacks tar archive to selected directory """ self.log.info("Unpacking %s tar file to %s directory" % (tar_file, directory)) with tarfile.open(tar_file, 'r') as tar: tar.extractall(path=directory) self.log.info("Archive unpacked!")
python
def _unpack(self, tar_file, directory): """ Unpacks tar archive to selected directory """ self.log.info("Unpacking %s tar file to %s directory" % (tar_file, directory)) with tarfile.open(tar_file, 'r') as tar: tar.extractall(path=directory) self.log.info("Archive unpacked!")
[ "def", "_unpack", "(", "self", ",", "tar_file", ",", "directory", ")", ":", "self", ".", "log", ".", "info", "(", "\"Unpacking %s tar file to %s directory\"", "%", "(", "tar_file", ",", "directory", ")", ")", "with", "tarfile", ".", "open", "(", "tar_file", ",", "'r'", ")", "as", "tar", ":", "tar", ".", "extractall", "(", "path", "=", "directory", ")", "self", ".", "log", ".", "info", "(", "\"Archive unpacked!\"", ")" ]
Unpacks tar archive to selected directory
[ "Unpacks", "tar", "archive", "to", "selected", "directory" ]
89e0297942be268791aff2098b7ebfa50d82f8e8
https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L388-L397
13,460
goldmann/docker-squash
docker_squash/image.py
Image._parse_image_name
def _parse_image_name(self, image): """ Parses the provided image name and splits it in the name and tag part, if possible. If no tag is provided 'latest' is used. """ if ':' in image and '/' not in image.split(':')[-1]: image_tag = image.split(':')[-1] image_name = image[:-(len(image_tag) + 1)] else: image_tag = "latest" image_name = image return (image_name, image_tag)
python
def _parse_image_name(self, image): """ Parses the provided image name and splits it in the name and tag part, if possible. If no tag is provided 'latest' is used. """ if ':' in image and '/' not in image.split(':')[-1]: image_tag = image.split(':')[-1] image_name = image[:-(len(image_tag) + 1)] else: image_tag = "latest" image_name = image return (image_name, image_tag)
[ "def", "_parse_image_name", "(", "self", ",", "image", ")", ":", "if", "':'", "in", "image", "and", "'/'", "not", "in", "image", ".", "split", "(", "':'", ")", "[", "-", "1", "]", ":", "image_tag", "=", "image", ".", "split", "(", "':'", ")", "[", "-", "1", "]", "image_name", "=", "image", "[", ":", "-", "(", "len", "(", "image_tag", ")", "+", "1", ")", "]", "else", ":", "image_tag", "=", "\"latest\"", "image_name", "=", "image", "return", "(", "image_name", ",", "image_tag", ")" ]
Parses the provided image name and splits it in the name and tag part, if possible. If no tag is provided 'latest' is used.
[ "Parses", "the", "provided", "image", "name", "and", "splits", "it", "in", "the", "name", "and", "tag", "part", "if", "possible", ".", "If", "no", "tag", "is", "provided", "latest", "is", "used", "." ]
89e0297942be268791aff2098b7ebfa50d82f8e8
https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L405-L418
13,461
goldmann/docker-squash
docker_squash/image.py
Image._dump_json
def _dump_json(self, data, new_line=False): """ Helper function to marshal object into JSON string. Additionally a sha256sum of the created JSON string is generated. """ # We do not want any spaces between keys and values in JSON json_data = json.dumps(data, separators=(',', ':')) if new_line: json_data = "%s\n" % json_data # Generate sha256sum of the JSON data, may be handy sha = hashlib.sha256(json_data.encode('utf-8')).hexdigest() return json_data, sha
python
def _dump_json(self, data, new_line=False): """ Helper function to marshal object into JSON string. Additionally a sha256sum of the created JSON string is generated. """ # We do not want any spaces between keys and values in JSON json_data = json.dumps(data, separators=(',', ':')) if new_line: json_data = "%s\n" % json_data # Generate sha256sum of the JSON data, may be handy sha = hashlib.sha256(json_data.encode('utf-8')).hexdigest() return json_data, sha
[ "def", "_dump_json", "(", "self", ",", "data", ",", "new_line", "=", "False", ")", ":", "# We do not want any spaces between keys and values in JSON", "json_data", "=", "json", ".", "dumps", "(", "data", ",", "separators", "=", "(", "','", ",", "':'", ")", ")", "if", "new_line", ":", "json_data", "=", "\"%s\\n\"", "%", "json_data", "# Generate sha256sum of the JSON data, may be handy", "sha", "=", "hashlib", ".", "sha256", "(", "json_data", ".", "encode", "(", "'utf-8'", ")", ")", ".", "hexdigest", "(", ")", "return", "json_data", ",", "sha" ]
Helper function to marshal object into JSON string. Additionally a sha256sum of the created JSON string is generated.
[ "Helper", "function", "to", "marshal", "object", "into", "JSON", "string", ".", "Additionally", "a", "sha256sum", "of", "the", "created", "JSON", "string", "is", "generated", "." ]
89e0297942be268791aff2098b7ebfa50d82f8e8
https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L420-L435
13,462
goldmann/docker-squash
docker_squash/image.py
Image._move_layers
def _move_layers(self, layers, src, dest): """ This moves all the layers that should be copied as-is. In other words - all layers that are not meant to be squashed will be moved from the old image to the new image untouched. """ for layer in layers: layer_id = layer.replace('sha256:', '') self.log.debug("Moving unmodified layer '%s'..." % layer_id) shutil.move(os.path.join(src, layer_id), dest)
python
def _move_layers(self, layers, src, dest): """ This moves all the layers that should be copied as-is. In other words - all layers that are not meant to be squashed will be moved from the old image to the new image untouched. """ for layer in layers: layer_id = layer.replace('sha256:', '') self.log.debug("Moving unmodified layer '%s'..." % layer_id) shutil.move(os.path.join(src, layer_id), dest)
[ "def", "_move_layers", "(", "self", ",", "layers", ",", "src", ",", "dest", ")", ":", "for", "layer", "in", "layers", ":", "layer_id", "=", "layer", ".", "replace", "(", "'sha256:'", ",", "''", ")", "self", ".", "log", ".", "debug", "(", "\"Moving unmodified layer '%s'...\"", "%", "layer_id", ")", "shutil", ".", "move", "(", "os", ".", "path", ".", "join", "(", "src", ",", "layer_id", ")", ",", "dest", ")" ]
This moves all the layers that should be copied as-is. In other words - all layers that are not meant to be squashed will be moved from the old image to the new image untouched.
[ "This", "moves", "all", "the", "layers", "that", "should", "be", "copied", "as", "-", "is", ".", "In", "other", "words", "-", "all", "layers", "that", "are", "not", "meant", "to", "be", "squashed", "will", "be", "moved", "from", "the", "old", "image", "to", "the", "new", "image", "untouched", "." ]
89e0297942be268791aff2098b7ebfa50d82f8e8
https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L475-L485
13,463
goldmann/docker-squash
docker_squash/image.py
Image._marker_files
def _marker_files(self, tar, members): """ Searches for marker files in the specified archive. Docker marker files are files taht have the .wh. prefix in the name. These files mark the corresponding file to be removed (hidden) when we start a container from the image. """ marker_files = {} self.log.debug( "Searching for marker files in '%s' archive..." % tar.name) for member in members: if '.wh.' in member.name: self.log.debug("Found '%s' marker file" % member.name) marker_files[member] = tar.extractfile(member) self.log.debug("Done, found %s files" % len(marker_files)) return marker_files
python
def _marker_files(self, tar, members): """ Searches for marker files in the specified archive. Docker marker files are files taht have the .wh. prefix in the name. These files mark the corresponding file to be removed (hidden) when we start a container from the image. """ marker_files = {} self.log.debug( "Searching for marker files in '%s' archive..." % tar.name) for member in members: if '.wh.' in member.name: self.log.debug("Found '%s' marker file" % member.name) marker_files[member] = tar.extractfile(member) self.log.debug("Done, found %s files" % len(marker_files)) return marker_files
[ "def", "_marker_files", "(", "self", ",", "tar", ",", "members", ")", ":", "marker_files", "=", "{", "}", "self", ".", "log", ".", "debug", "(", "\"Searching for marker files in '%s' archive...\"", "%", "tar", ".", "name", ")", "for", "member", "in", "members", ":", "if", "'.wh.'", "in", "member", ".", "name", ":", "self", ".", "log", ".", "debug", "(", "\"Found '%s' marker file\"", "%", "member", ".", "name", ")", "marker_files", "[", "member", "]", "=", "tar", ".", "extractfile", "(", "member", ")", "self", ".", "log", ".", "debug", "(", "\"Done, found %s files\"", "%", "len", "(", "marker_files", ")", ")", "return", "marker_files" ]
Searches for marker files in the specified archive. Docker marker files are files taht have the .wh. prefix in the name. These files mark the corresponding file to be removed (hidden) when we start a container from the image.
[ "Searches", "for", "marker", "files", "in", "the", "specified", "archive", "." ]
89e0297942be268791aff2098b7ebfa50d82f8e8
https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L501-L521
13,464
goldmann/docker-squash
docker_squash/image.py
Image._add_markers
def _add_markers(self, markers, tar, files_in_layers, added_symlinks): """ This method is responsible for adding back all markers that were not added to the squashed layer AND files they refer to can be found in layers we do not squash. """ if markers: self.log.debug("Marker files to add: %s" % [o.name for o in markers.keys()]) else: # No marker files to add return # https://github.com/goldmann/docker-squash/issues/108 # Some tar archives do have the filenames prefixed with './' # which does not have any effect when we unpack the tar achive, # but when processing tar content - we see this. tar_files = [self._normalize_path(x) for x in tar.getnames()] for marker, marker_file in six.iteritems(markers): actual_file = marker.name.replace('.wh.', '') normalized_file = self._normalize_path(actual_file) should_be_added_back = False if self._file_should_be_skipped(normalized_file, added_symlinks): self.log.debug( "Skipping '%s' marker file, this file is on a symlink path" % normalized_file) continue if normalized_file in tar_files: self.log.debug( "Skipping '%s' marker file, this file was added earlier for some reason..." % normalized_file) continue if files_in_layers: for files in files_in_layers.values(): if normalized_file in files: should_be_added_back = True break else: # There are no previous layers, so we need to add it back # In fact this shouldn't happen since having a marker file # where there is no previous layer does not make sense. should_be_added_back = True if should_be_added_back: self.log.debug( "Adding '%s' marker file back..." % marker.name) # Marker files on AUFS are hardlinks, we need to create # regular files, therefore we need to recreate the tarinfo # object tar.addfile(tarfile.TarInfo(name=marker.name), marker_file) # Add the file name to the list too to avoid re-reading all files # in tar archive tar_files.append(normalized_file) else: self.log.debug( "Skipping '%s' marker file..." % marker.name)
python
def _add_markers(self, markers, tar, files_in_layers, added_symlinks): """ This method is responsible for adding back all markers that were not added to the squashed layer AND files they refer to can be found in layers we do not squash. """ if markers: self.log.debug("Marker files to add: %s" % [o.name for o in markers.keys()]) else: # No marker files to add return # https://github.com/goldmann/docker-squash/issues/108 # Some tar archives do have the filenames prefixed with './' # which does not have any effect when we unpack the tar achive, # but when processing tar content - we see this. tar_files = [self._normalize_path(x) for x in tar.getnames()] for marker, marker_file in six.iteritems(markers): actual_file = marker.name.replace('.wh.', '') normalized_file = self._normalize_path(actual_file) should_be_added_back = False if self._file_should_be_skipped(normalized_file, added_symlinks): self.log.debug( "Skipping '%s' marker file, this file is on a symlink path" % normalized_file) continue if normalized_file in tar_files: self.log.debug( "Skipping '%s' marker file, this file was added earlier for some reason..." % normalized_file) continue if files_in_layers: for files in files_in_layers.values(): if normalized_file in files: should_be_added_back = True break else: # There are no previous layers, so we need to add it back # In fact this shouldn't happen since having a marker file # where there is no previous layer does not make sense. should_be_added_back = True if should_be_added_back: self.log.debug( "Adding '%s' marker file back..." % marker.name) # Marker files on AUFS are hardlinks, we need to create # regular files, therefore we need to recreate the tarinfo # object tar.addfile(tarfile.TarInfo(name=marker.name), marker_file) # Add the file name to the list too to avoid re-reading all files # in tar archive tar_files.append(normalized_file) else: self.log.debug( "Skipping '%s' marker file..." % marker.name)
[ "def", "_add_markers", "(", "self", ",", "markers", ",", "tar", ",", "files_in_layers", ",", "added_symlinks", ")", ":", "if", "markers", ":", "self", ".", "log", ".", "debug", "(", "\"Marker files to add: %s\"", "%", "[", "o", ".", "name", "for", "o", "in", "markers", ".", "keys", "(", ")", "]", ")", "else", ":", "# No marker files to add", "return", "# https://github.com/goldmann/docker-squash/issues/108", "# Some tar archives do have the filenames prefixed with './'", "# which does not have any effect when we unpack the tar achive,", "# but when processing tar content - we see this.", "tar_files", "=", "[", "self", ".", "_normalize_path", "(", "x", ")", "for", "x", "in", "tar", ".", "getnames", "(", ")", "]", "for", "marker", ",", "marker_file", "in", "six", ".", "iteritems", "(", "markers", ")", ":", "actual_file", "=", "marker", ".", "name", ".", "replace", "(", "'.wh.'", ",", "''", ")", "normalized_file", "=", "self", ".", "_normalize_path", "(", "actual_file", ")", "should_be_added_back", "=", "False", "if", "self", ".", "_file_should_be_skipped", "(", "normalized_file", ",", "added_symlinks", ")", ":", "self", ".", "log", ".", "debug", "(", "\"Skipping '%s' marker file, this file is on a symlink path\"", "%", "normalized_file", ")", "continue", "if", "normalized_file", "in", "tar_files", ":", "self", ".", "log", ".", "debug", "(", "\"Skipping '%s' marker file, this file was added earlier for some reason...\"", "%", "normalized_file", ")", "continue", "if", "files_in_layers", ":", "for", "files", "in", "files_in_layers", ".", "values", "(", ")", ":", "if", "normalized_file", "in", "files", ":", "should_be_added_back", "=", "True", "break", "else", ":", "# There are no previous layers, so we need to add it back", "# In fact this shouldn't happen since having a marker file", "# where there is no previous layer does not make sense.", "should_be_added_back", "=", "True", "if", "should_be_added_back", ":", "self", ".", "log", ".", "debug", "(", "\"Adding '%s' marker file back...\"", "%", "marker", ".", "name", ")", "# Marker files on AUFS are hardlinks, we need to create", "# regular files, therefore we need to recreate the tarinfo", "# object", "tar", ".", "addfile", "(", "tarfile", ".", "TarInfo", "(", "name", "=", "marker", ".", "name", ")", ",", "marker_file", ")", "# Add the file name to the list too to avoid re-reading all files", "# in tar archive", "tar_files", ".", "append", "(", "normalized_file", ")", "else", ":", "self", ".", "log", ".", "debug", "(", "\"Skipping '%s' marker file...\"", "%", "marker", ".", "name", ")" ]
This method is responsible for adding back all markers that were not added to the squashed layer AND files they refer to can be found in layers we do not squash.
[ "This", "method", "is", "responsible", "for", "adding", "back", "all", "markers", "that", "were", "not", "added", "to", "the", "squashed", "layer", "AND", "files", "they", "refer", "to", "can", "be", "found", "in", "layers", "we", "do", "not", "squash", "." ]
89e0297942be268791aff2098b7ebfa50d82f8e8
https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L523-L582
13,465
goldmann/docker-squash
docker_squash/lib/xtarfile.py
_proc_pax
def _proc_pax(self, filetar): """Process an extended or global header as described in POSIX.1-2001.""" # Read the header information. buf = filetar.fileobj.read(self._block(self.size)) # A pax header stores supplemental information for either # the following file (extended) or all following files # (global). if self.type == tarfile.XGLTYPE: pax_headers = filetar.pax_headers else: pax_headers = filetar.pax_headers.copy() # Parse pax header information. A record looks like that: # "%d %s=%s\n" % (length, keyword, value). length is the size # of the complete record including the length field itself and # the newline. keyword and value are both UTF-8 encoded strings. regex = re.compile(r"(\d+) ([^=]+)=", re.U) pos = 0 while True: match = regex.match(buf, pos) if not match: break length, keyword = match.groups() length = int(length) value = buf[match.end(2) + 1:match.start(1) + length - 1] try: keyword = keyword.decode("utf8") except Exception: pass try: value = value.decode("utf8") except Exception: pass pax_headers[keyword] = value pos += length # Fetch the next header. try: next = self.fromtarfile(filetar) except tarfile.HeaderError: raise tarfile.SubsequentHeaderError("missing or bad subsequent header") if self.type in (tarfile.XHDTYPE, tarfile.SOLARIS_XHDTYPE): # Patch the TarInfo object with the extended header info. next._apply_pax_info(pax_headers, filetar.encoding, filetar.errors) next.offset = self.offset if "size" in pax_headers: # If the extended header replaces the size field, # we need to recalculate the offset where the next # header starts. offset = next.offset_data if next.isreg() or next.type not in tarfile.SUPPORTED_TYPES: offset += next._block(next.size) filetar.offset = offset return next
python
def _proc_pax(self, filetar): """Process an extended or global header as described in POSIX.1-2001.""" # Read the header information. buf = filetar.fileobj.read(self._block(self.size)) # A pax header stores supplemental information for either # the following file (extended) or all following files # (global). if self.type == tarfile.XGLTYPE: pax_headers = filetar.pax_headers else: pax_headers = filetar.pax_headers.copy() # Parse pax header information. A record looks like that: # "%d %s=%s\n" % (length, keyword, value). length is the size # of the complete record including the length field itself and # the newline. keyword and value are both UTF-8 encoded strings. regex = re.compile(r"(\d+) ([^=]+)=", re.U) pos = 0 while True: match = regex.match(buf, pos) if not match: break length, keyword = match.groups() length = int(length) value = buf[match.end(2) + 1:match.start(1) + length - 1] try: keyword = keyword.decode("utf8") except Exception: pass try: value = value.decode("utf8") except Exception: pass pax_headers[keyword] = value pos += length # Fetch the next header. try: next = self.fromtarfile(filetar) except tarfile.HeaderError: raise tarfile.SubsequentHeaderError("missing or bad subsequent header") if self.type in (tarfile.XHDTYPE, tarfile.SOLARIS_XHDTYPE): # Patch the TarInfo object with the extended header info. next._apply_pax_info(pax_headers, filetar.encoding, filetar.errors) next.offset = self.offset if "size" in pax_headers: # If the extended header replaces the size field, # we need to recalculate the offset where the next # header starts. offset = next.offset_data if next.isreg() or next.type not in tarfile.SUPPORTED_TYPES: offset += next._block(next.size) filetar.offset = offset return next
[ "def", "_proc_pax", "(", "self", ",", "filetar", ")", ":", "# Read the header information.", "buf", "=", "filetar", ".", "fileobj", ".", "read", "(", "self", ".", "_block", "(", "self", ".", "size", ")", ")", "# A pax header stores supplemental information for either", "# the following file (extended) or all following files", "# (global).", "if", "self", ".", "type", "==", "tarfile", ".", "XGLTYPE", ":", "pax_headers", "=", "filetar", ".", "pax_headers", "else", ":", "pax_headers", "=", "filetar", ".", "pax_headers", ".", "copy", "(", ")", "# Parse pax header information. A record looks like that:", "# \"%d %s=%s\\n\" % (length, keyword, value). length is the size", "# of the complete record including the length field itself and", "# the newline. keyword and value are both UTF-8 encoded strings.", "regex", "=", "re", ".", "compile", "(", "r\"(\\d+) ([^=]+)=\"", ",", "re", ".", "U", ")", "pos", "=", "0", "while", "True", ":", "match", "=", "regex", ".", "match", "(", "buf", ",", "pos", ")", "if", "not", "match", ":", "break", "length", ",", "keyword", "=", "match", ".", "groups", "(", ")", "length", "=", "int", "(", "length", ")", "value", "=", "buf", "[", "match", ".", "end", "(", "2", ")", "+", "1", ":", "match", ".", "start", "(", "1", ")", "+", "length", "-", "1", "]", "try", ":", "keyword", "=", "keyword", ".", "decode", "(", "\"utf8\"", ")", "except", "Exception", ":", "pass", "try", ":", "value", "=", "value", ".", "decode", "(", "\"utf8\"", ")", "except", "Exception", ":", "pass", "pax_headers", "[", "keyword", "]", "=", "value", "pos", "+=", "length", "# Fetch the next header.", "try", ":", "next", "=", "self", ".", "fromtarfile", "(", "filetar", ")", "except", "tarfile", ".", "HeaderError", ":", "raise", "tarfile", ".", "SubsequentHeaderError", "(", "\"missing or bad subsequent header\"", ")", "if", "self", ".", "type", "in", "(", "tarfile", ".", "XHDTYPE", ",", "tarfile", ".", "SOLARIS_XHDTYPE", ")", ":", "# Patch the TarInfo object with the extended header info.", "next", ".", "_apply_pax_info", "(", "pax_headers", ",", "filetar", ".", "encoding", ",", "filetar", ".", "errors", ")", "next", ".", "offset", "=", "self", ".", "offset", "if", "\"size\"", "in", "pax_headers", ":", "# If the extended header replaces the size field,", "# we need to recalculate the offset where the next", "# header starts.", "offset", "=", "next", ".", "offset_data", "if", "next", ".", "isreg", "(", ")", "or", "next", ".", "type", "not", "in", "tarfile", ".", "SUPPORTED_TYPES", ":", "offset", "+=", "next", ".", "_block", "(", "next", ".", "size", ")", "filetar", ".", "offset", "=", "offset", "return", "next" ]
Process an extended or global header as described in POSIX.1-2001.
[ "Process", "an", "extended", "or", "global", "header", "as", "described", "in", "POSIX", ".", "1", "-", "2001", "." ]
89e0297942be268791aff2098b7ebfa50d82f8e8
https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/lib/xtarfile.py#L20-L81
13,466
goldmann/docker-squash
docker_squash/lib/xtarfile.py
_create_pax_generic_header
def _create_pax_generic_header(cls, pax_headers, type=tarfile.XHDTYPE): """Return a POSIX.1-2001 extended or global header sequence that contains a list of keyword, value pairs. The values must be unicode objects. """ records = [] for keyword, value in pax_headers.iteritems(): try: keyword = keyword.encode("utf8") except Exception: pass try: value = value.encode("utf8") except Exception: pass l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n' n = p = 0 while True: n = l + len(str(p)) if n == p: break p = n records.append("%d %s=%s\n" % (p, keyword, value)) records = "".join(records) # We use a hardcoded "././@PaxHeader" name like star does # instead of the one that POSIX recommends. info = {} info["name"] = "././@PaxHeader" info["type"] = type info["size"] = len(records) info["magic"] = tarfile.POSIX_MAGIC # Create pax header + record blocks. return cls._create_header(info, tarfile.USTAR_FORMAT) + \ cls._create_payload(records)
python
def _create_pax_generic_header(cls, pax_headers, type=tarfile.XHDTYPE): """Return a POSIX.1-2001 extended or global header sequence that contains a list of keyword, value pairs. The values must be unicode objects. """ records = [] for keyword, value in pax_headers.iteritems(): try: keyword = keyword.encode("utf8") except Exception: pass try: value = value.encode("utf8") except Exception: pass l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n' n = p = 0 while True: n = l + len(str(p)) if n == p: break p = n records.append("%d %s=%s\n" % (p, keyword, value)) records = "".join(records) # We use a hardcoded "././@PaxHeader" name like star does # instead of the one that POSIX recommends. info = {} info["name"] = "././@PaxHeader" info["type"] = type info["size"] = len(records) info["magic"] = tarfile.POSIX_MAGIC # Create pax header + record blocks. return cls._create_header(info, tarfile.USTAR_FORMAT) + \ cls._create_payload(records)
[ "def", "_create_pax_generic_header", "(", "cls", ",", "pax_headers", ",", "type", "=", "tarfile", ".", "XHDTYPE", ")", ":", "records", "=", "[", "]", "for", "keyword", ",", "value", "in", "pax_headers", ".", "iteritems", "(", ")", ":", "try", ":", "keyword", "=", "keyword", ".", "encode", "(", "\"utf8\"", ")", "except", "Exception", ":", "pass", "try", ":", "value", "=", "value", ".", "encode", "(", "\"utf8\"", ")", "except", "Exception", ":", "pass", "l", "=", "len", "(", "keyword", ")", "+", "len", "(", "value", ")", "+", "3", "# ' ' + '=' + '\\n'", "n", "=", "p", "=", "0", "while", "True", ":", "n", "=", "l", "+", "len", "(", "str", "(", "p", ")", ")", "if", "n", "==", "p", ":", "break", "p", "=", "n", "records", ".", "append", "(", "\"%d %s=%s\\n\"", "%", "(", "p", ",", "keyword", ",", "value", ")", ")", "records", "=", "\"\"", ".", "join", "(", "records", ")", "# We use a hardcoded \"././@PaxHeader\" name like star does", "# instead of the one that POSIX recommends.", "info", "=", "{", "}", "info", "[", "\"name\"", "]", "=", "\"././@PaxHeader\"", "info", "[", "\"type\"", "]", "=", "type", "info", "[", "\"size\"", "]", "=", "len", "(", "records", ")", "info", "[", "\"magic\"", "]", "=", "tarfile", ".", "POSIX_MAGIC", "# Create pax header + record blocks.", "return", "cls", ".", "_create_header", "(", "info", ",", "tarfile", ".", "USTAR_FORMAT", ")", "+", "cls", ".", "_create_payload", "(", "records", ")" ]
Return a POSIX.1-2001 extended or global header sequence that contains a list of keyword, value pairs. The values must be unicode objects.
[ "Return", "a", "POSIX", ".", "1", "-", "2001", "extended", "or", "global", "header", "sequence", "that", "contains", "a", "list", "of", "keyword", "value", "pairs", ".", "The", "values", "must", "be", "unicode", "objects", "." ]
89e0297942be268791aff2098b7ebfa50d82f8e8
https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/lib/xtarfile.py#L84-L122
13,467
goldmann/docker-squash
docker_squash/v2_image.py
V2Image._read_json_file
def _read_json_file(self, json_file): """ Helper function to read JSON file as OrderedDict """ self.log.debug("Reading '%s' JSON file..." % json_file) with open(json_file, 'r') as f: return json.load(f, object_pairs_hook=OrderedDict)
python
def _read_json_file(self, json_file): """ Helper function to read JSON file as OrderedDict """ self.log.debug("Reading '%s' JSON file..." % json_file) with open(json_file, 'r') as f: return json.load(f, object_pairs_hook=OrderedDict)
[ "def", "_read_json_file", "(", "self", ",", "json_file", ")", ":", "self", ".", "log", ".", "debug", "(", "\"Reading '%s' JSON file...\"", "%", "json_file", ")", "with", "open", "(", "json_file", ",", "'r'", ")", "as", "f", ":", "return", "json", ".", "load", "(", "f", ",", "object_pairs_hook", "=", "OrderedDict", ")" ]
Helper function to read JSON file as OrderedDict
[ "Helper", "function", "to", "read", "JSON", "file", "as", "OrderedDict" ]
89e0297942be268791aff2098b7ebfa50d82f8e8
https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/v2_image.py#L122-L128
13,468
goldmann/docker-squash
docker_squash/v2_image.py
V2Image._read_layer_paths
def _read_layer_paths(self, old_image_config, old_image_manifest, layers_to_move): """ In case of v2 format, layer id's are not the same as the id's used in the exported tar archive to name directories for layers. These id's can be found in the configuration files saved with the image - we need to read them. """ # In manifest.json we do not have listed all layers # but only layers that do contain some data. current_manifest_layer = 0 layer_paths_to_move = [] layer_paths_to_squash = [] # Iterate over image history, from base image to top layer for i, layer in enumerate(old_image_config['history']): # If it's not an empty layer get the id # (directory name) where the layer's data is # stored if not layer.get('empty_layer', False): layer_id = old_image_manifest['Layers'][ current_manifest_layer].rsplit('/')[0] # Check if this layer should be moved or squashed if len(layers_to_move) > i: layer_paths_to_move.append(layer_id) else: layer_paths_to_squash.append(layer_id) current_manifest_layer += 1 return layer_paths_to_squash, layer_paths_to_move
python
def _read_layer_paths(self, old_image_config, old_image_manifest, layers_to_move): """ In case of v2 format, layer id's are not the same as the id's used in the exported tar archive to name directories for layers. These id's can be found in the configuration files saved with the image - we need to read them. """ # In manifest.json we do not have listed all layers # but only layers that do contain some data. current_manifest_layer = 0 layer_paths_to_move = [] layer_paths_to_squash = [] # Iterate over image history, from base image to top layer for i, layer in enumerate(old_image_config['history']): # If it's not an empty layer get the id # (directory name) where the layer's data is # stored if not layer.get('empty_layer', False): layer_id = old_image_manifest['Layers'][ current_manifest_layer].rsplit('/')[0] # Check if this layer should be moved or squashed if len(layers_to_move) > i: layer_paths_to_move.append(layer_id) else: layer_paths_to_squash.append(layer_id) current_manifest_layer += 1 return layer_paths_to_squash, layer_paths_to_move
[ "def", "_read_layer_paths", "(", "self", ",", "old_image_config", ",", "old_image_manifest", ",", "layers_to_move", ")", ":", "# In manifest.json we do not have listed all layers", "# but only layers that do contain some data.", "current_manifest_layer", "=", "0", "layer_paths_to_move", "=", "[", "]", "layer_paths_to_squash", "=", "[", "]", "# Iterate over image history, from base image to top layer", "for", "i", ",", "layer", "in", "enumerate", "(", "old_image_config", "[", "'history'", "]", ")", ":", "# If it's not an empty layer get the id", "# (directory name) where the layer's data is", "# stored", "if", "not", "layer", ".", "get", "(", "'empty_layer'", ",", "False", ")", ":", "layer_id", "=", "old_image_manifest", "[", "'Layers'", "]", "[", "current_manifest_layer", "]", ".", "rsplit", "(", "'/'", ")", "[", "0", "]", "# Check if this layer should be moved or squashed", "if", "len", "(", "layers_to_move", ")", ">", "i", ":", "layer_paths_to_move", ".", "append", "(", "layer_id", ")", "else", ":", "layer_paths_to_squash", ".", "append", "(", "layer_id", ")", "current_manifest_layer", "+=", "1", "return", "layer_paths_to_squash", ",", "layer_paths_to_move" ]
In case of v2 format, layer id's are not the same as the id's used in the exported tar archive to name directories for layers. These id's can be found in the configuration files saved with the image - we need to read them.
[ "In", "case", "of", "v2", "format", "layer", "id", "s", "are", "not", "the", "same", "as", "the", "id", "s", "used", "in", "the", "exported", "tar", "archive", "to", "name", "directories", "for", "layers", ".", "These", "id", "s", "can", "be", "found", "in", "the", "configuration", "files", "saved", "with", "the", "image", "-", "we", "need", "to", "read", "them", "." ]
89e0297942be268791aff2098b7ebfa50d82f8e8
https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/v2_image.py#L130-L163
13,469
goldmann/docker-squash
docker_squash/v2_image.py
V2Image._generate_squashed_layer_path_id
def _generate_squashed_layer_path_id(self): """ This function generates the id used to name the directory to store the squashed layer content in the archive. This mimics what Docker does here: https://github.com/docker/docker/blob/v1.10.0-rc1/image/v1/imagev1.go#L42 To make it simpler we do reuse old image metadata and modify it to what it should look which means to be exact as https://github.com/docker/docker/blob/v1.10.0-rc1/image/v1/imagev1.go#L64 """ # Using OrderedDict, because order of JSON elements is important v1_metadata = OrderedDict(self.old_image_config) # Update image creation date v1_metadata['created'] = self.date # Remove unnecessary elements # Do not fail if key is not found for key in 'history', 'rootfs', 'container': v1_metadata.pop(key, None) # Docker internally changes the order of keys between # exported metadata (why oh why?!). We need to add 'os' # element after 'layer_id' operating_system = v1_metadata.pop('os', None) # The 'layer_id' element is the chain_id of the # squashed layer v1_metadata['layer_id'] = "sha256:%s" % self.chain_ids[-1] # Add back 'os' element if operating_system: v1_metadata['os'] = operating_system # The 'parent' element is the name of the directory (inside the # exported tar archive) of the last layer that we move # (layer below squashed layer) if self.layer_paths_to_move: if self.layer_paths_to_squash: parent = self.layer_paths_to_move[-1] else: parent = self.layer_paths_to_move[0] v1_metadata['parent'] = "sha256:%s" % parent # The 'Image' element is the id of the layer from which we squash if self.squash_id: # Update image id, should be one layer below squashed layer v1_metadata['config']['Image'] = self.squash_id else: v1_metadata['config']['Image'] = "" # Get the sha256sum of the JSON exported metadata, # we do not care about the metadata anymore sha = self._dump_json(v1_metadata)[1] return sha
python
def _generate_squashed_layer_path_id(self): """ This function generates the id used to name the directory to store the squashed layer content in the archive. This mimics what Docker does here: https://github.com/docker/docker/blob/v1.10.0-rc1/image/v1/imagev1.go#L42 To make it simpler we do reuse old image metadata and modify it to what it should look which means to be exact as https://github.com/docker/docker/blob/v1.10.0-rc1/image/v1/imagev1.go#L64 """ # Using OrderedDict, because order of JSON elements is important v1_metadata = OrderedDict(self.old_image_config) # Update image creation date v1_metadata['created'] = self.date # Remove unnecessary elements # Do not fail if key is not found for key in 'history', 'rootfs', 'container': v1_metadata.pop(key, None) # Docker internally changes the order of keys between # exported metadata (why oh why?!). We need to add 'os' # element after 'layer_id' operating_system = v1_metadata.pop('os', None) # The 'layer_id' element is the chain_id of the # squashed layer v1_metadata['layer_id'] = "sha256:%s" % self.chain_ids[-1] # Add back 'os' element if operating_system: v1_metadata['os'] = operating_system # The 'parent' element is the name of the directory (inside the # exported tar archive) of the last layer that we move # (layer below squashed layer) if self.layer_paths_to_move: if self.layer_paths_to_squash: parent = self.layer_paths_to_move[-1] else: parent = self.layer_paths_to_move[0] v1_metadata['parent'] = "sha256:%s" % parent # The 'Image' element is the id of the layer from which we squash if self.squash_id: # Update image id, should be one layer below squashed layer v1_metadata['config']['Image'] = self.squash_id else: v1_metadata['config']['Image'] = "" # Get the sha256sum of the JSON exported metadata, # we do not care about the metadata anymore sha = self._dump_json(v1_metadata)[1] return sha
[ "def", "_generate_squashed_layer_path_id", "(", "self", ")", ":", "# Using OrderedDict, because order of JSON elements is important", "v1_metadata", "=", "OrderedDict", "(", "self", ".", "old_image_config", ")", "# Update image creation date", "v1_metadata", "[", "'created'", "]", "=", "self", ".", "date", "# Remove unnecessary elements", "# Do not fail if key is not found", "for", "key", "in", "'history'", ",", "'rootfs'", ",", "'container'", ":", "v1_metadata", ".", "pop", "(", "key", ",", "None", ")", "# Docker internally changes the order of keys between", "# exported metadata (why oh why?!). We need to add 'os'", "# element after 'layer_id'", "operating_system", "=", "v1_metadata", ".", "pop", "(", "'os'", ",", "None", ")", "# The 'layer_id' element is the chain_id of the", "# squashed layer", "v1_metadata", "[", "'layer_id'", "]", "=", "\"sha256:%s\"", "%", "self", ".", "chain_ids", "[", "-", "1", "]", "# Add back 'os' element", "if", "operating_system", ":", "v1_metadata", "[", "'os'", "]", "=", "operating_system", "# The 'parent' element is the name of the directory (inside the", "# exported tar archive) of the last layer that we move", "# (layer below squashed layer)", "if", "self", ".", "layer_paths_to_move", ":", "if", "self", ".", "layer_paths_to_squash", ":", "parent", "=", "self", ".", "layer_paths_to_move", "[", "-", "1", "]", "else", ":", "parent", "=", "self", ".", "layer_paths_to_move", "[", "0", "]", "v1_metadata", "[", "'parent'", "]", "=", "\"sha256:%s\"", "%", "parent", "# The 'Image' element is the id of the layer from which we squash", "if", "self", ".", "squash_id", ":", "# Update image id, should be one layer below squashed layer", "v1_metadata", "[", "'config'", "]", "[", "'Image'", "]", "=", "self", ".", "squash_id", "else", ":", "v1_metadata", "[", "'config'", "]", "[", "'Image'", "]", "=", "\"\"", "# Get the sha256sum of the JSON exported metadata,", "# we do not care about the metadata anymore", "sha", "=", "self", ".", "_dump_json", "(", "v1_metadata", ")", "[", "1", "]", "return", "sha" ]
This function generates the id used to name the directory to store the squashed layer content in the archive. This mimics what Docker does here: https://github.com/docker/docker/blob/v1.10.0-rc1/image/v1/imagev1.go#L42 To make it simpler we do reuse old image metadata and modify it to what it should look which means to be exact as https://github.com/docker/docker/blob/v1.10.0-rc1/image/v1/imagev1.go#L64
[ "This", "function", "generates", "the", "id", "used", "to", "name", "the", "directory", "to", "store", "the", "squashed", "layer", "content", "in", "the", "archive", "." ]
89e0297942be268791aff2098b7ebfa50d82f8e8
https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/v2_image.py#L215-L273
13,470
django-dbbackup/django-dbbackup
dbbackup/management/commands/_base.py
BaseDbBackupCommand.write_local_file
def write_local_file(self, outputfile, path): """Write file to the desired path.""" self.logger.info("Writing file to %s", path) outputfile.seek(0) with open(path, 'wb') as fd: copyfileobj(outputfile, fd)
python
def write_local_file(self, outputfile, path): """Write file to the desired path.""" self.logger.info("Writing file to %s", path) outputfile.seek(0) with open(path, 'wb') as fd: copyfileobj(outputfile, fd)
[ "def", "write_local_file", "(", "self", ",", "outputfile", ",", "path", ")", ":", "self", ".", "logger", ".", "info", "(", "\"Writing file to %s\"", ",", "path", ")", "outputfile", ".", "seek", "(", "0", ")", "with", "open", "(", "path", ",", "'wb'", ")", "as", "fd", ":", "copyfileobj", "(", "outputfile", ",", "fd", ")" ]
Write file to the desired path.
[ "Write", "file", "to", "the", "desired", "path", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/management/commands/_base.py#L94-L99
13,471
django-dbbackup/django-dbbackup
dbbackup/management/commands/_base.py
BaseDbBackupCommand._cleanup_old_backups
def _cleanup_old_backups(self, database=None, servername=None): """ Cleanup old backups, keeping the number of backups specified by DBBACKUP_CLEANUP_KEEP and any backups that occur on first of the month. """ self.storage.clean_old_backups(encrypted=self.encrypt, compressed=self.compress, content_type=self.content_type, database=database, servername=servername)
python
def _cleanup_old_backups(self, database=None, servername=None): """ Cleanup old backups, keeping the number of backups specified by DBBACKUP_CLEANUP_KEEP and any backups that occur on first of the month. """ self.storage.clean_old_backups(encrypted=self.encrypt, compressed=self.compress, content_type=self.content_type, database=database, servername=servername)
[ "def", "_cleanup_old_backups", "(", "self", ",", "database", "=", "None", ",", "servername", "=", "None", ")", ":", "self", ".", "storage", ".", "clean_old_backups", "(", "encrypted", "=", "self", ".", "encrypt", ",", "compressed", "=", "self", ".", "compress", ",", "content_type", "=", "self", ".", "content_type", ",", "database", "=", "database", ",", "servername", "=", "servername", ")" ]
Cleanup old backups, keeping the number of backups specified by DBBACKUP_CLEANUP_KEEP and any backups that occur on first of the month.
[ "Cleanup", "old", "backups", "keeping", "the", "number", "of", "backups", "specified", "by", "DBBACKUP_CLEANUP_KEEP", "and", "any", "backups", "that", "occur", "on", "first", "of", "the", "month", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/management/commands/_base.py#L123-L132
13,472
django-dbbackup/django-dbbackup
dbbackup/management/commands/dbbackup.py
Command._save_new_backup
def _save_new_backup(self, database): """ Save a new backup file. """ self.logger.info("Backing Up Database: %s", database['NAME']) # Get backup and name filename = self.connector.generate_filename(self.servername) outputfile = self.connector.create_dump() # Apply trans if self.compress: compressed_file, filename = utils.compress_file(outputfile, filename) outputfile = compressed_file if self.encrypt: encrypted_file, filename = utils.encrypt_file(outputfile, filename) outputfile = encrypted_file # Set file name filename = self.filename if self.filename else filename self.logger.debug("Backup size: %s", utils.handle_size(outputfile)) # Store backup outputfile.seek(0) if self.path is None: self.write_to_storage(outputfile, filename) else: self.write_local_file(outputfile, self.path)
python
def _save_new_backup(self, database): """ Save a new backup file. """ self.logger.info("Backing Up Database: %s", database['NAME']) # Get backup and name filename = self.connector.generate_filename(self.servername) outputfile = self.connector.create_dump() # Apply trans if self.compress: compressed_file, filename = utils.compress_file(outputfile, filename) outputfile = compressed_file if self.encrypt: encrypted_file, filename = utils.encrypt_file(outputfile, filename) outputfile = encrypted_file # Set file name filename = self.filename if self.filename else filename self.logger.debug("Backup size: %s", utils.handle_size(outputfile)) # Store backup outputfile.seek(0) if self.path is None: self.write_to_storage(outputfile, filename) else: self.write_local_file(outputfile, self.path)
[ "def", "_save_new_backup", "(", "self", ",", "database", ")", ":", "self", ".", "logger", ".", "info", "(", "\"Backing Up Database: %s\"", ",", "database", "[", "'NAME'", "]", ")", "# Get backup and name", "filename", "=", "self", ".", "connector", ".", "generate_filename", "(", "self", ".", "servername", ")", "outputfile", "=", "self", ".", "connector", ".", "create_dump", "(", ")", "# Apply trans", "if", "self", ".", "compress", ":", "compressed_file", ",", "filename", "=", "utils", ".", "compress_file", "(", "outputfile", ",", "filename", ")", "outputfile", "=", "compressed_file", "if", "self", ".", "encrypt", ":", "encrypted_file", ",", "filename", "=", "utils", ".", "encrypt_file", "(", "outputfile", ",", "filename", ")", "outputfile", "=", "encrypted_file", "# Set file name", "filename", "=", "self", ".", "filename", "if", "self", ".", "filename", "else", "filename", "self", ".", "logger", ".", "debug", "(", "\"Backup size: %s\"", ",", "utils", ".", "handle_size", "(", "outputfile", ")", ")", "# Store backup", "outputfile", ".", "seek", "(", "0", ")", "if", "self", ".", "path", "is", "None", ":", "self", ".", "write_to_storage", "(", "outputfile", ",", "filename", ")", "else", ":", "self", ".", "write_local_file", "(", "outputfile", ",", "self", ".", "path", ")" ]
Save a new backup file.
[ "Save", "a", "new", "backup", "file", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/management/commands/dbbackup.py#L67-L90
13,473
django-dbbackup/django-dbbackup
dbbackup/management/commands/mediabackup.py
Command._explore_storage
def _explore_storage(self): """Generator of all files contained in media storage.""" path = '' dirs = [path] while dirs: path = dirs.pop() subdirs, files = self.media_storage.listdir(path) for media_filename in files: yield os.path.join(path, media_filename) dirs.extend([os.path.join(path, subdir) for subdir in subdirs])
python
def _explore_storage(self): """Generator of all files contained in media storage.""" path = '' dirs = [path] while dirs: path = dirs.pop() subdirs, files = self.media_storage.listdir(path) for media_filename in files: yield os.path.join(path, media_filename) dirs.extend([os.path.join(path, subdir) for subdir in subdirs])
[ "def", "_explore_storage", "(", "self", ")", ":", "path", "=", "''", "dirs", "=", "[", "path", "]", "while", "dirs", ":", "path", "=", "dirs", ".", "pop", "(", ")", "subdirs", ",", "files", "=", "self", ".", "media_storage", ".", "listdir", "(", "path", ")", "for", "media_filename", "in", "files", ":", "yield", "os", ".", "path", ".", "join", "(", "path", ",", "media_filename", ")", "dirs", ".", "extend", "(", "[", "os", ".", "path", ".", "join", "(", "path", ",", "subdir", ")", "for", "subdir", "in", "subdirs", "]", ")" ]
Generator of all files contained in media storage.
[ "Generator", "of", "all", "files", "contained", "in", "media", "storage", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/management/commands/mediabackup.py#L59-L68
13,474
django-dbbackup/django-dbbackup
dbbackup/management/commands/mediabackup.py
Command._create_tar
def _create_tar(self, name): """Create TAR file.""" fileobj = utils.create_spooled_temporary_file() mode = 'w:gz' if self.compress else 'w' tar_file = tarfile.open(name=name, fileobj=fileobj, mode=mode) for media_filename in self._explore_storage(): tarinfo = tarfile.TarInfo(media_filename) media_file = self.media_storage.open(media_filename) tarinfo.size = len(media_file) tar_file.addfile(tarinfo, media_file) # Close the TAR for writing tar_file.close() return fileobj
python
def _create_tar(self, name): """Create TAR file.""" fileobj = utils.create_spooled_temporary_file() mode = 'w:gz' if self.compress else 'w' tar_file = tarfile.open(name=name, fileobj=fileobj, mode=mode) for media_filename in self._explore_storage(): tarinfo = tarfile.TarInfo(media_filename) media_file = self.media_storage.open(media_filename) tarinfo.size = len(media_file) tar_file.addfile(tarinfo, media_file) # Close the TAR for writing tar_file.close() return fileobj
[ "def", "_create_tar", "(", "self", ",", "name", ")", ":", "fileobj", "=", "utils", ".", "create_spooled_temporary_file", "(", ")", "mode", "=", "'w:gz'", "if", "self", ".", "compress", "else", "'w'", "tar_file", "=", "tarfile", ".", "open", "(", "name", "=", "name", ",", "fileobj", "=", "fileobj", ",", "mode", "=", "mode", ")", "for", "media_filename", "in", "self", ".", "_explore_storage", "(", ")", ":", "tarinfo", "=", "tarfile", ".", "TarInfo", "(", "media_filename", ")", "media_file", "=", "self", ".", "media_storage", ".", "open", "(", "media_filename", ")", "tarinfo", ".", "size", "=", "len", "(", "media_file", ")", "tar_file", ".", "addfile", "(", "tarinfo", ",", "media_file", ")", "# Close the TAR for writing", "tar_file", ".", "close", "(", ")", "return", "fileobj" ]
Create TAR file.
[ "Create", "TAR", "file", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/management/commands/mediabackup.py#L70-L82
13,475
django-dbbackup/django-dbbackup
dbbackup/management/commands/mediabackup.py
Command.backup_mediafiles
def backup_mediafiles(self): """ Create backup file and write it to storage. """ # Create file name extension = "tar%s" % ('.gz' if self.compress else '') filename = utils.filename_generate(extension, servername=self.servername, content_type=self.content_type) tarball = self._create_tar(filename) # Apply trans if self.encrypt: encrypted_file = utils.encrypt_file(tarball, filename) tarball, filename = encrypted_file self.logger.debug("Backup size: %s", utils.handle_size(tarball)) # Store backup tarball.seek(0) if self.path is None: self.write_to_storage(tarball, filename) else: self.write_local_file(tarball, self.path)
python
def backup_mediafiles(self): """ Create backup file and write it to storage. """ # Create file name extension = "tar%s" % ('.gz' if self.compress else '') filename = utils.filename_generate(extension, servername=self.servername, content_type=self.content_type) tarball = self._create_tar(filename) # Apply trans if self.encrypt: encrypted_file = utils.encrypt_file(tarball, filename) tarball, filename = encrypted_file self.logger.debug("Backup size: %s", utils.handle_size(tarball)) # Store backup tarball.seek(0) if self.path is None: self.write_to_storage(tarball, filename) else: self.write_local_file(tarball, self.path)
[ "def", "backup_mediafiles", "(", "self", ")", ":", "# Create file name", "extension", "=", "\"tar%s\"", "%", "(", "'.gz'", "if", "self", ".", "compress", "else", "''", ")", "filename", "=", "utils", ".", "filename_generate", "(", "extension", ",", "servername", "=", "self", ".", "servername", ",", "content_type", "=", "self", ".", "content_type", ")", "tarball", "=", "self", ".", "_create_tar", "(", "filename", ")", "# Apply trans", "if", "self", ".", "encrypt", ":", "encrypted_file", "=", "utils", ".", "encrypt_file", "(", "tarball", ",", "filename", ")", "tarball", ",", "filename", "=", "encrypted_file", "self", ".", "logger", ".", "debug", "(", "\"Backup size: %s\"", ",", "utils", ".", "handle_size", "(", "tarball", ")", ")", "# Store backup", "tarball", ".", "seek", "(", "0", ")", "if", "self", ".", "path", "is", "None", ":", "self", ".", "write_to_storage", "(", "tarball", ",", "filename", ")", "else", ":", "self", ".", "write_local_file", "(", "tarball", ",", "self", ".", "path", ")" ]
Create backup file and write it to storage.
[ "Create", "backup", "file", "and", "write", "it", "to", "storage", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/management/commands/mediabackup.py#L84-L105
13,476
django-dbbackup/django-dbbackup
dbbackup/utils.py
bytes_to_str
def bytes_to_str(byteVal, decimals=1): """ Convert bytes to a human readable string. :param byteVal: Value to convert in bytes :type byteVal: int or float :param decimal: Number of decimal to display :type decimal: int :returns: Number of byte with the best unit of measure :rtype: str """ for unit, byte in BYTES: if (byteVal >= byte): if decimals == 0: return '%s %s' % (int(round(byteVal / byte, 0)), unit) return '%s %s' % (round(byteVal / byte, decimals), unit) return '%s B' % byteVal
python
def bytes_to_str(byteVal, decimals=1): """ Convert bytes to a human readable string. :param byteVal: Value to convert in bytes :type byteVal: int or float :param decimal: Number of decimal to display :type decimal: int :returns: Number of byte with the best unit of measure :rtype: str """ for unit, byte in BYTES: if (byteVal >= byte): if decimals == 0: return '%s %s' % (int(round(byteVal / byte, 0)), unit) return '%s %s' % (round(byteVal / byte, decimals), unit) return '%s B' % byteVal
[ "def", "bytes_to_str", "(", "byteVal", ",", "decimals", "=", "1", ")", ":", "for", "unit", ",", "byte", "in", "BYTES", ":", "if", "(", "byteVal", ">=", "byte", ")", ":", "if", "decimals", "==", "0", ":", "return", "'%s %s'", "%", "(", "int", "(", "round", "(", "byteVal", "/", "byte", ",", "0", ")", ")", ",", "unit", ")", "return", "'%s %s'", "%", "(", "round", "(", "byteVal", "/", "byte", ",", "decimals", ")", ",", "unit", ")", "return", "'%s B'", "%", "byteVal" ]
Convert bytes to a human readable string. :param byteVal: Value to convert in bytes :type byteVal: int or float :param decimal: Number of decimal to display :type decimal: int :returns: Number of byte with the best unit of measure :rtype: str
[ "Convert", "bytes", "to", "a", "human", "readable", "string", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/utils.py#L58-L76
13,477
django-dbbackup/django-dbbackup
dbbackup/utils.py
mail_admins
def mail_admins(subject, message, fail_silently=False, connection=None, html_message=None): """Sends a message to the admins, as defined by the DBBACKUP_ADMINS setting.""" if not settings.ADMINS: return mail = EmailMultiAlternatives('%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject), message, settings.SERVER_EMAIL, [a[1] for a in settings.ADMINS], connection=connection) if html_message: mail.attach_alternative(html_message, 'text/html') mail.send(fail_silently=fail_silently)
python
def mail_admins(subject, message, fail_silently=False, connection=None, html_message=None): """Sends a message to the admins, as defined by the DBBACKUP_ADMINS setting.""" if not settings.ADMINS: return mail = EmailMultiAlternatives('%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject), message, settings.SERVER_EMAIL, [a[1] for a in settings.ADMINS], connection=connection) if html_message: mail.attach_alternative(html_message, 'text/html') mail.send(fail_silently=fail_silently)
[ "def", "mail_admins", "(", "subject", ",", "message", ",", "fail_silently", "=", "False", ",", "connection", "=", "None", ",", "html_message", "=", "None", ")", ":", "if", "not", "settings", ".", "ADMINS", ":", "return", "mail", "=", "EmailMultiAlternatives", "(", "'%s%s'", "%", "(", "settings", ".", "EMAIL_SUBJECT_PREFIX", ",", "subject", ")", ",", "message", ",", "settings", ".", "SERVER_EMAIL", ",", "[", "a", "[", "1", "]", "for", "a", "in", "settings", ".", "ADMINS", "]", ",", "connection", "=", "connection", ")", "if", "html_message", ":", "mail", ".", "attach_alternative", "(", "html_message", ",", "'text/html'", ")", "mail", ".", "send", "(", "fail_silently", "=", "fail_silently", ")" ]
Sends a message to the admins, as defined by the DBBACKUP_ADMINS setting.
[ "Sends", "a", "message", "to", "the", "admins", "as", "defined", "by", "the", "DBBACKUP_ADMINS", "setting", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/utils.py#L93-L103
13,478
django-dbbackup/django-dbbackup
dbbackup/utils.py
create_spooled_temporary_file
def create_spooled_temporary_file(filepath=None, fileobj=None): """ Create a spooled temporary file. if ``filepath`` or ``fileobj`` is defined its content will be copied into temporary file. :param filepath: Path of input file :type filepath: str :param fileobj: Input file object :type fileobj: file :returns: Spooled temporary file :rtype: :class:`tempfile.SpooledTemporaryFile` """ spooled_file = tempfile.SpooledTemporaryFile( max_size=settings.TMP_FILE_MAX_SIZE, dir=settings.TMP_DIR) if filepath: fileobj = open(filepath, 'r+b') if fileobj is not None: fileobj.seek(0) copyfileobj(fileobj, spooled_file, settings.TMP_FILE_READ_SIZE) return spooled_file
python
def create_spooled_temporary_file(filepath=None, fileobj=None): """ Create a spooled temporary file. if ``filepath`` or ``fileobj`` is defined its content will be copied into temporary file. :param filepath: Path of input file :type filepath: str :param fileobj: Input file object :type fileobj: file :returns: Spooled temporary file :rtype: :class:`tempfile.SpooledTemporaryFile` """ spooled_file = tempfile.SpooledTemporaryFile( max_size=settings.TMP_FILE_MAX_SIZE, dir=settings.TMP_DIR) if filepath: fileobj = open(filepath, 'r+b') if fileobj is not None: fileobj.seek(0) copyfileobj(fileobj, spooled_file, settings.TMP_FILE_READ_SIZE) return spooled_file
[ "def", "create_spooled_temporary_file", "(", "filepath", "=", "None", ",", "fileobj", "=", "None", ")", ":", "spooled_file", "=", "tempfile", ".", "SpooledTemporaryFile", "(", "max_size", "=", "settings", ".", "TMP_FILE_MAX_SIZE", ",", "dir", "=", "settings", ".", "TMP_DIR", ")", "if", "filepath", ":", "fileobj", "=", "open", "(", "filepath", ",", "'r+b'", ")", "if", "fileobj", "is", "not", "None", ":", "fileobj", ".", "seek", "(", "0", ")", "copyfileobj", "(", "fileobj", ",", "spooled_file", ",", "settings", ".", "TMP_FILE_READ_SIZE", ")", "return", "spooled_file" ]
Create a spooled temporary file. if ``filepath`` or ``fileobj`` is defined its content will be copied into temporary file. :param filepath: Path of input file :type filepath: str :param fileobj: Input file object :type fileobj: file :returns: Spooled temporary file :rtype: :class:`tempfile.SpooledTemporaryFile`
[ "Create", "a", "spooled", "temporary", "file", ".", "if", "filepath", "or", "fileobj", "is", "defined", "its", "content", "will", "be", "copied", "into", "temporary", "file", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/utils.py#L129-L151
13,479
django-dbbackup/django-dbbackup
dbbackup/utils.py
compress_file
def compress_file(inputfile, filename): """ Compress input file using gzip and change its name. :param inputfile: File to compress :type inputfile: ``file`` like object :param filename: File's name :type filename: ``str`` :returns: Tuple with compressed file and new file's name :rtype: :class:`tempfile.SpooledTemporaryFile`, ``str`` """ outputfile = create_spooled_temporary_file() new_filename = filename + '.gz' zipfile = gzip.GzipFile(filename=filename, fileobj=outputfile, mode="wb") try: inputfile.seek(0) copyfileobj(inputfile, zipfile, settings.TMP_FILE_READ_SIZE) finally: zipfile.close() return outputfile, new_filename
python
def compress_file(inputfile, filename): """ Compress input file using gzip and change its name. :param inputfile: File to compress :type inputfile: ``file`` like object :param filename: File's name :type filename: ``str`` :returns: Tuple with compressed file and new file's name :rtype: :class:`tempfile.SpooledTemporaryFile`, ``str`` """ outputfile = create_spooled_temporary_file() new_filename = filename + '.gz' zipfile = gzip.GzipFile(filename=filename, fileobj=outputfile, mode="wb") try: inputfile.seek(0) copyfileobj(inputfile, zipfile, settings.TMP_FILE_READ_SIZE) finally: zipfile.close() return outputfile, new_filename
[ "def", "compress_file", "(", "inputfile", ",", "filename", ")", ":", "outputfile", "=", "create_spooled_temporary_file", "(", ")", "new_filename", "=", "filename", "+", "'.gz'", "zipfile", "=", "gzip", ".", "GzipFile", "(", "filename", "=", "filename", ",", "fileobj", "=", "outputfile", ",", "mode", "=", "\"wb\"", ")", "try", ":", "inputfile", ".", "seek", "(", "0", ")", "copyfileobj", "(", "inputfile", ",", "zipfile", ",", "settings", ".", "TMP_FILE_READ_SIZE", ")", "finally", ":", "zipfile", ".", "close", "(", ")", "return", "outputfile", ",", "new_filename" ]
Compress input file using gzip and change its name. :param inputfile: File to compress :type inputfile: ``file`` like object :param filename: File's name :type filename: ``str`` :returns: Tuple with compressed file and new file's name :rtype: :class:`tempfile.SpooledTemporaryFile`, ``str``
[ "Compress", "input", "file", "using", "gzip", "and", "change", "its", "name", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/utils.py#L234-L255
13,480
django-dbbackup/django-dbbackup
dbbackup/utils.py
uncompress_file
def uncompress_file(inputfile, filename): """ Uncompress this file using gzip and change its name. :param inputfile: File to compress :type inputfile: ``file`` like object :param filename: File's name :type filename: ``str`` :returns: Tuple with file and new file's name :rtype: :class:`tempfile.SpooledTemporaryFile`, ``str`` """ zipfile = gzip.GzipFile(fileobj=inputfile, mode="rb") try: outputfile = create_spooled_temporary_file(fileobj=zipfile) finally: zipfile.close() new_basename = os.path.basename(filename).replace('.gz', '') return outputfile, new_basename
python
def uncompress_file(inputfile, filename): """ Uncompress this file using gzip and change its name. :param inputfile: File to compress :type inputfile: ``file`` like object :param filename: File's name :type filename: ``str`` :returns: Tuple with file and new file's name :rtype: :class:`tempfile.SpooledTemporaryFile`, ``str`` """ zipfile = gzip.GzipFile(fileobj=inputfile, mode="rb") try: outputfile = create_spooled_temporary_file(fileobj=zipfile) finally: zipfile.close() new_basename = os.path.basename(filename).replace('.gz', '') return outputfile, new_basename
[ "def", "uncompress_file", "(", "inputfile", ",", "filename", ")", ":", "zipfile", "=", "gzip", ".", "GzipFile", "(", "fileobj", "=", "inputfile", ",", "mode", "=", "\"rb\"", ")", "try", ":", "outputfile", "=", "create_spooled_temporary_file", "(", "fileobj", "=", "zipfile", ")", "finally", ":", "zipfile", ".", "close", "(", ")", "new_basename", "=", "os", ".", "path", ".", "basename", "(", "filename", ")", ".", "replace", "(", "'.gz'", ",", "''", ")", "return", "outputfile", ",", "new_basename" ]
Uncompress this file using gzip and change its name. :param inputfile: File to compress :type inputfile: ``file`` like object :param filename: File's name :type filename: ``str`` :returns: Tuple with file and new file's name :rtype: :class:`tempfile.SpooledTemporaryFile`, ``str``
[ "Uncompress", "this", "file", "using", "gzip", "and", "change", "its", "name", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/utils.py#L258-L277
13,481
django-dbbackup/django-dbbackup
dbbackup/utils.py
timestamp
def timestamp(value): """ Return the timestamp of a datetime.datetime object. :param value: a datetime object :type value: datetime.datetime :return: the timestamp :rtype: str """ value = value if timezone.is_naive(value) else timezone.localtime(value) return value.strftime(settings.DATE_FORMAT)
python
def timestamp(value): """ Return the timestamp of a datetime.datetime object. :param value: a datetime object :type value: datetime.datetime :return: the timestamp :rtype: str """ value = value if timezone.is_naive(value) else timezone.localtime(value) return value.strftime(settings.DATE_FORMAT)
[ "def", "timestamp", "(", "value", ")", ":", "value", "=", "value", "if", "timezone", ".", "is_naive", "(", "value", ")", "else", "timezone", ".", "localtime", "(", "value", ")", "return", "value", ".", "strftime", "(", "settings", ".", "DATE_FORMAT", ")" ]
Return the timestamp of a datetime.datetime object. :param value: a datetime object :type value: datetime.datetime :return: the timestamp :rtype: str
[ "Return", "the", "timestamp", "of", "a", "datetime", ".", "datetime", "object", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/utils.py#L280-L291
13,482
django-dbbackup/django-dbbackup
dbbackup/utils.py
datefmt_to_regex
def datefmt_to_regex(datefmt): """ Convert a strftime format string to a regex. :param datefmt: strftime format string :type datefmt: ``str`` :returns: Equivalent regex :rtype: ``re.compite`` """ new_string = datefmt for pat, reg in PATTERN_MATCHNG: new_string = new_string.replace(pat, reg) return re.compile(r'(%s)' % new_string)
python
def datefmt_to_regex(datefmt): """ Convert a strftime format string to a regex. :param datefmt: strftime format string :type datefmt: ``str`` :returns: Equivalent regex :rtype: ``re.compite`` """ new_string = datefmt for pat, reg in PATTERN_MATCHNG: new_string = new_string.replace(pat, reg) return re.compile(r'(%s)' % new_string)
[ "def", "datefmt_to_regex", "(", "datefmt", ")", ":", "new_string", "=", "datefmt", "for", "pat", ",", "reg", "in", "PATTERN_MATCHNG", ":", "new_string", "=", "new_string", ".", "replace", "(", "pat", ",", "reg", ")", "return", "re", ".", "compile", "(", "r'(%s)'", "%", "new_string", ")" ]
Convert a strftime format string to a regex. :param datefmt: strftime format string :type datefmt: ``str`` :returns: Equivalent regex :rtype: ``re.compite``
[ "Convert", "a", "strftime", "format", "string", "to", "a", "regex", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/utils.py#L327-L340
13,483
django-dbbackup/django-dbbackup
dbbackup/utils.py
filename_to_date
def filename_to_date(filename, datefmt=None): """ Return a datetime from a file name. :param datefmt: strftime format string, ``settings.DATE_FORMAT`` is used if is ``None`` :type datefmt: ``str`` or ``NoneType`` :returns: Date guessed or nothing if no date found :rtype: ``datetime.datetime`` or ``NoneType`` """ datefmt = datefmt or settings.DATE_FORMAT datestring = filename_to_datestring(filename, datefmt) if datestring is not None: return datetime.strptime(datestring, datefmt)
python
def filename_to_date(filename, datefmt=None): """ Return a datetime from a file name. :param datefmt: strftime format string, ``settings.DATE_FORMAT`` is used if is ``None`` :type datefmt: ``str`` or ``NoneType`` :returns: Date guessed or nothing if no date found :rtype: ``datetime.datetime`` or ``NoneType`` """ datefmt = datefmt or settings.DATE_FORMAT datestring = filename_to_datestring(filename, datefmt) if datestring is not None: return datetime.strptime(datestring, datefmt)
[ "def", "filename_to_date", "(", "filename", ",", "datefmt", "=", "None", ")", ":", "datefmt", "=", "datefmt", "or", "settings", ".", "DATE_FORMAT", "datestring", "=", "filename_to_datestring", "(", "filename", ",", "datefmt", ")", "if", "datestring", "is", "not", "None", ":", "return", "datetime", ".", "strptime", "(", "datestring", ",", "datefmt", ")" ]
Return a datetime from a file name. :param datefmt: strftime format string, ``settings.DATE_FORMAT`` is used if is ``None`` :type datefmt: ``str`` or ``NoneType`` :returns: Date guessed or nothing if no date found :rtype: ``datetime.datetime`` or ``NoneType``
[ "Return", "a", "datetime", "from", "a", "file", "name", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/utils.py#L361-L375
13,484
django-dbbackup/django-dbbackup
dbbackup/utils.py
filename_generate
def filename_generate(extension, database_name='', servername=None, content_type='db', wildcard=None): """ Create a new backup filename. :param extension: Extension of backup file :type extension: ``str`` :param database_name: If it is database backup specify its name :type database_name: ``str`` :param servername: Specify server name or by default ``settings.DBBACKUP_HOSTNAME`` :type servername: ``str`` :param content_type: Content type to backup, ``'media'`` or ``'db'`` :type content_type: ``str`` :param wildcard: Replace datetime with this wilecard regex :type content_type: ``str`` :returns: Computed file name :rtype: ``str` """ if content_type == 'db': if '/' in database_name: database_name = os.path.basename(database_name) if '.' in database_name: database_name = database_name.split('.')[0] template = settings.FILENAME_TEMPLATE elif content_type == 'media': template = settings.MEDIA_FILENAME_TEMPLATE else: template = settings.FILENAME_TEMPLATE params = { 'servername': servername or settings.HOSTNAME, 'datetime': wildcard or datetime.now().strftime(settings.DATE_FORMAT), 'databasename': database_name, 'extension': extension, 'content_type': content_type } if callable(template): filename = template(**params) else: filename = template.format(**params) filename = REG_FILENAME_CLEAN.sub('-', filename) filename = filename[1:] if filename.startswith('-') else filename return filename
python
def filename_generate(extension, database_name='', servername=None, content_type='db', wildcard=None): """ Create a new backup filename. :param extension: Extension of backup file :type extension: ``str`` :param database_name: If it is database backup specify its name :type database_name: ``str`` :param servername: Specify server name or by default ``settings.DBBACKUP_HOSTNAME`` :type servername: ``str`` :param content_type: Content type to backup, ``'media'`` or ``'db'`` :type content_type: ``str`` :param wildcard: Replace datetime with this wilecard regex :type content_type: ``str`` :returns: Computed file name :rtype: ``str` """ if content_type == 'db': if '/' in database_name: database_name = os.path.basename(database_name) if '.' in database_name: database_name = database_name.split('.')[0] template = settings.FILENAME_TEMPLATE elif content_type == 'media': template = settings.MEDIA_FILENAME_TEMPLATE else: template = settings.FILENAME_TEMPLATE params = { 'servername': servername or settings.HOSTNAME, 'datetime': wildcard or datetime.now().strftime(settings.DATE_FORMAT), 'databasename': database_name, 'extension': extension, 'content_type': content_type } if callable(template): filename = template(**params) else: filename = template.format(**params) filename = REG_FILENAME_CLEAN.sub('-', filename) filename = filename[1:] if filename.startswith('-') else filename return filename
[ "def", "filename_generate", "(", "extension", ",", "database_name", "=", "''", ",", "servername", "=", "None", ",", "content_type", "=", "'db'", ",", "wildcard", "=", "None", ")", ":", "if", "content_type", "==", "'db'", ":", "if", "'/'", "in", "database_name", ":", "database_name", "=", "os", ".", "path", ".", "basename", "(", "database_name", ")", "if", "'.'", "in", "database_name", ":", "database_name", "=", "database_name", ".", "split", "(", "'.'", ")", "[", "0", "]", "template", "=", "settings", ".", "FILENAME_TEMPLATE", "elif", "content_type", "==", "'media'", ":", "template", "=", "settings", ".", "MEDIA_FILENAME_TEMPLATE", "else", ":", "template", "=", "settings", ".", "FILENAME_TEMPLATE", "params", "=", "{", "'servername'", ":", "servername", "or", "settings", ".", "HOSTNAME", ",", "'datetime'", ":", "wildcard", "or", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "settings", ".", "DATE_FORMAT", ")", ",", "'databasename'", ":", "database_name", ",", "'extension'", ":", "extension", ",", "'content_type'", ":", "content_type", "}", "if", "callable", "(", "template", ")", ":", "filename", "=", "template", "(", "*", "*", "params", ")", "else", ":", "filename", "=", "template", ".", "format", "(", "*", "*", "params", ")", "filename", "=", "REG_FILENAME_CLEAN", ".", "sub", "(", "'-'", ",", "filename", ")", "filename", "=", "filename", "[", "1", ":", "]", "if", "filename", ".", "startswith", "(", "'-'", ")", "else", "filename", "return", "filename" ]
Create a new backup filename. :param extension: Extension of backup file :type extension: ``str`` :param database_name: If it is database backup specify its name :type database_name: ``str`` :param servername: Specify server name or by default ``settings.DBBACKUP_HOSTNAME`` :type servername: ``str`` :param content_type: Content type to backup, ``'media'`` or ``'db'`` :type content_type: ``str`` :param wildcard: Replace datetime with this wilecard regex :type content_type: ``str`` :returns: Computed file name :rtype: ``str`
[ "Create", "a", "new", "backup", "filename", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/utils.py#L378-L424
13,485
django-dbbackup/django-dbbackup
dbbackup/storage.py
get_storage
def get_storage(path=None, options=None): """ Get the specified storage configured with options. :param path: Path in Python dot style to module containing the storage class. If empty settings.DBBACKUP_STORAGE will be used. :type path: ``str`` :param options: Parameters for configure the storage, if empty settings.DBBACKUP_STORAGE_OPTIONS will be used. :type options: ``dict`` :return: Storage configured :rtype: :class:`.Storage` """ path = path or settings.STORAGE options = options or settings.STORAGE_OPTIONS if not path: raise ImproperlyConfigured('You must specify a storage class using ' 'DBBACKUP_STORAGE settings.') return Storage(path, **options)
python
def get_storage(path=None, options=None): """ Get the specified storage configured with options. :param path: Path in Python dot style to module containing the storage class. If empty settings.DBBACKUP_STORAGE will be used. :type path: ``str`` :param options: Parameters for configure the storage, if empty settings.DBBACKUP_STORAGE_OPTIONS will be used. :type options: ``dict`` :return: Storage configured :rtype: :class:`.Storage` """ path = path or settings.STORAGE options = options or settings.STORAGE_OPTIONS if not path: raise ImproperlyConfigured('You must specify a storage class using ' 'DBBACKUP_STORAGE settings.') return Storage(path, **options)
[ "def", "get_storage", "(", "path", "=", "None", ",", "options", "=", "None", ")", ":", "path", "=", "path", "or", "settings", ".", "STORAGE", "options", "=", "options", "or", "settings", ".", "STORAGE_OPTIONS", "if", "not", "path", ":", "raise", "ImproperlyConfigured", "(", "'You must specify a storage class using '", "'DBBACKUP_STORAGE settings.'", ")", "return", "Storage", "(", "path", ",", "*", "*", "options", ")" ]
Get the specified storage configured with options. :param path: Path in Python dot style to module containing the storage class. If empty settings.DBBACKUP_STORAGE will be used. :type path: ``str`` :param options: Parameters for configure the storage, if empty settings.DBBACKUP_STORAGE_OPTIONS will be used. :type options: ``dict`` :return: Storage configured :rtype: :class:`.Storage`
[ "Get", "the", "specified", "storage", "configured", "with", "options", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/storage.py#L10-L30
13,486
django-dbbackup/django-dbbackup
dbbackup/storage.py
Storage.list_backups
def list_backups(self, encrypted=None, compressed=None, content_type=None, database=None, servername=None): """ List stored files except given filter. If filter is None, it won't be used. ``content_type`` must be ``'db'`` for database backups or ``'media'`` for media backups. :param encrypted: Filter by encrypted or not :type encrypted: ``bool`` or ``None`` :param compressed: Filter by compressed or not :type compressed: ``bool`` or ``None`` :param content_type: Filter by media or database backup, must be ``'db'`` or ``'media'`` :type content_type: ``str`` or ``None`` :param database: Filter by source database's name :type: ``str`` or ``None`` :param servername: Filter by source server's name :type: ``str`` or ``None`` :returns: List of files :rtype: ``list`` of ``str`` """ if content_type not in ('db', 'media', None): msg = "Bad content_type %s, must be 'db', 'media', or None" % ( content_type) raise TypeError(msg) # TODO: Make better filter for include only backups files = [f for f in self.list_directory() if utils.filename_to_datestring(f)] if encrypted is not None: files = [f for f in files if ('.gpg' in f) == encrypted] if compressed is not None: files = [f for f in files if ('.gz' in f) == compressed] if content_type == 'media': files = [f for f in files if '.tar' in f] elif content_type == 'db': files = [f for f in files if '.tar' not in f] if database: files = [f for f in files if database in f] if servername: files = [f for f in files if servername in f] return files
python
def list_backups(self, encrypted=None, compressed=None, content_type=None, database=None, servername=None): """ List stored files except given filter. If filter is None, it won't be used. ``content_type`` must be ``'db'`` for database backups or ``'media'`` for media backups. :param encrypted: Filter by encrypted or not :type encrypted: ``bool`` or ``None`` :param compressed: Filter by compressed or not :type compressed: ``bool`` or ``None`` :param content_type: Filter by media or database backup, must be ``'db'`` or ``'media'`` :type content_type: ``str`` or ``None`` :param database: Filter by source database's name :type: ``str`` or ``None`` :param servername: Filter by source server's name :type: ``str`` or ``None`` :returns: List of files :rtype: ``list`` of ``str`` """ if content_type not in ('db', 'media', None): msg = "Bad content_type %s, must be 'db', 'media', or None" % ( content_type) raise TypeError(msg) # TODO: Make better filter for include only backups files = [f for f in self.list_directory() if utils.filename_to_datestring(f)] if encrypted is not None: files = [f for f in files if ('.gpg' in f) == encrypted] if compressed is not None: files = [f for f in files if ('.gz' in f) == compressed] if content_type == 'media': files = [f for f in files if '.tar' in f] elif content_type == 'db': files = [f for f in files if '.tar' not in f] if database: files = [f for f in files if database in f] if servername: files = [f for f in files if servername in f] return files
[ "def", "list_backups", "(", "self", ",", "encrypted", "=", "None", ",", "compressed", "=", "None", ",", "content_type", "=", "None", ",", "database", "=", "None", ",", "servername", "=", "None", ")", ":", "if", "content_type", "not", "in", "(", "'db'", ",", "'media'", ",", "None", ")", ":", "msg", "=", "\"Bad content_type %s, must be 'db', 'media', or None\"", "%", "(", "content_type", ")", "raise", "TypeError", "(", "msg", ")", "# TODO: Make better filter for include only backups", "files", "=", "[", "f", "for", "f", "in", "self", ".", "list_directory", "(", ")", "if", "utils", ".", "filename_to_datestring", "(", "f", ")", "]", "if", "encrypted", "is", "not", "None", ":", "files", "=", "[", "f", "for", "f", "in", "files", "if", "(", "'.gpg'", "in", "f", ")", "==", "encrypted", "]", "if", "compressed", "is", "not", "None", ":", "files", "=", "[", "f", "for", "f", "in", "files", "if", "(", "'.gz'", "in", "f", ")", "==", "compressed", "]", "if", "content_type", "==", "'media'", ":", "files", "=", "[", "f", "for", "f", "in", "files", "if", "'.tar'", "in", "f", "]", "elif", "content_type", "==", "'db'", ":", "files", "=", "[", "f", "for", "f", "in", "files", "if", "'.tar'", "not", "in", "f", "]", "if", "database", ":", "files", "=", "[", "f", "for", "f", "in", "files", "if", "database", "in", "f", "]", "if", "servername", ":", "files", "=", "[", "f", "for", "f", "in", "files", "if", "servername", "in", "f", "]", "return", "files" ]
List stored files except given filter. If filter is None, it won't be used. ``content_type`` must be ``'db'`` for database backups or ``'media'`` for media backups. :param encrypted: Filter by encrypted or not :type encrypted: ``bool`` or ``None`` :param compressed: Filter by compressed or not :type compressed: ``bool`` or ``None`` :param content_type: Filter by media or database backup, must be ``'db'`` or ``'media'`` :type content_type: ``str`` or ``None`` :param database: Filter by source database's name :type: ``str`` or ``None`` :param servername: Filter by source server's name :type: ``str`` or ``None`` :returns: List of files :rtype: ``list`` of ``str``
[ "List", "stored", "files", "except", "given", "filter", ".", "If", "filter", "is", "None", "it", "won", "t", "be", "used", ".", "content_type", "must", "be", "db", "for", "database", "backups", "or", "media", "for", "media", "backups", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/storage.py#L91-L136
13,487
django-dbbackup/django-dbbackup
dbbackup/storage.py
Storage.get_older_backup
def get_older_backup(self, encrypted=None, compressed=None, content_type=None, database=None, servername=None): """ Return the older backup's file name. :param encrypted: Filter by encrypted or not :type encrypted: ``bool`` or ``None`` :param compressed: Filter by compressed or not :type compressed: ``bool`` or ``None`` :param content_type: Filter by media or database backup, must be ``'db'`` or ``'media'`` :type content_type: ``str`` or ``None`` :param database: Filter by source database's name :type: ``str`` or ``None`` :param servername: Filter by source server's name :type: ``str`` or ``None`` :returns: Older file :rtype: ``str`` :raises: FileNotFound: If no backup file is found """ files = self.list_backups(encrypted=encrypted, compressed=compressed, content_type=content_type, database=database, servername=servername) if not files: raise FileNotFound("There's no backup file available.") return min(files, key=utils.filename_to_date)
python
def get_older_backup(self, encrypted=None, compressed=None, content_type=None, database=None, servername=None): """ Return the older backup's file name. :param encrypted: Filter by encrypted or not :type encrypted: ``bool`` or ``None`` :param compressed: Filter by compressed or not :type compressed: ``bool`` or ``None`` :param content_type: Filter by media or database backup, must be ``'db'`` or ``'media'`` :type content_type: ``str`` or ``None`` :param database: Filter by source database's name :type: ``str`` or ``None`` :param servername: Filter by source server's name :type: ``str`` or ``None`` :returns: Older file :rtype: ``str`` :raises: FileNotFound: If no backup file is found """ files = self.list_backups(encrypted=encrypted, compressed=compressed, content_type=content_type, database=database, servername=servername) if not files: raise FileNotFound("There's no backup file available.") return min(files, key=utils.filename_to_date)
[ "def", "get_older_backup", "(", "self", ",", "encrypted", "=", "None", ",", "compressed", "=", "None", ",", "content_type", "=", "None", ",", "database", "=", "None", ",", "servername", "=", "None", ")", ":", "files", "=", "self", ".", "list_backups", "(", "encrypted", "=", "encrypted", ",", "compressed", "=", "compressed", ",", "content_type", "=", "content_type", ",", "database", "=", "database", ",", "servername", "=", "servername", ")", "if", "not", "files", ":", "raise", "FileNotFound", "(", "\"There's no backup file available.\"", ")", "return", "min", "(", "files", ",", "key", "=", "utils", ".", "filename_to_date", ")" ]
Return the older backup's file name. :param encrypted: Filter by encrypted or not :type encrypted: ``bool`` or ``None`` :param compressed: Filter by compressed or not :type compressed: ``bool`` or ``None`` :param content_type: Filter by media or database backup, must be ``'db'`` or ``'media'`` :type content_type: ``str`` or ``None`` :param database: Filter by source database's name :type: ``str`` or ``None`` :param servername: Filter by source server's name :type: ``str`` or ``None`` :returns: Older file :rtype: ``str`` :raises: FileNotFound: If no backup file is found
[ "Return", "the", "older", "backup", "s", "file", "name", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/storage.py#L172-L204
13,488
django-dbbackup/django-dbbackup
dbbackup/storage.py
Storage.clean_old_backups
def clean_old_backups(self, encrypted=None, compressed=None, content_type=None, database=None, servername=None, keep_number=None): """ Delete olders backups and hold the number defined. :param encrypted: Filter by encrypted or not :type encrypted: ``bool`` or ``None`` :param compressed: Filter by compressed or not :type compressed: ``bool`` or ``None`` :param content_type: Filter by media or database backup, must be ``'db'`` or ``'media'`` :type content_type: ``str`` or ``None`` :param database: Filter by source database's name :type: ``str`` or ``None`` :param servername: Filter by source server's name :type: ``str`` or ``None`` :param keep_number: Number of files to keep, other will be deleted :type keep_number: ``int`` or ``None`` """ if keep_number is None: keep_number = settings.CLEANUP_KEEP if content_type == 'db' \ else settings.CLEANUP_KEEP_MEDIA keep_filter = settings.CLEANUP_KEEP_FILTER files = self.list_backups(encrypted=encrypted, compressed=compressed, content_type=content_type, database=database, servername=servername) files = sorted(files, key=utils.filename_to_date, reverse=True) files_to_delete = [fi for i, fi in enumerate(files) if i >= keep_number] for filename in files_to_delete: if keep_filter(filename): continue self.delete_file(filename)
python
def clean_old_backups(self, encrypted=None, compressed=None, content_type=None, database=None, servername=None, keep_number=None): """ Delete olders backups and hold the number defined. :param encrypted: Filter by encrypted or not :type encrypted: ``bool`` or ``None`` :param compressed: Filter by compressed or not :type compressed: ``bool`` or ``None`` :param content_type: Filter by media or database backup, must be ``'db'`` or ``'media'`` :type content_type: ``str`` or ``None`` :param database: Filter by source database's name :type: ``str`` or ``None`` :param servername: Filter by source server's name :type: ``str`` or ``None`` :param keep_number: Number of files to keep, other will be deleted :type keep_number: ``int`` or ``None`` """ if keep_number is None: keep_number = settings.CLEANUP_KEEP if content_type == 'db' \ else settings.CLEANUP_KEEP_MEDIA keep_filter = settings.CLEANUP_KEEP_FILTER files = self.list_backups(encrypted=encrypted, compressed=compressed, content_type=content_type, database=database, servername=servername) files = sorted(files, key=utils.filename_to_date, reverse=True) files_to_delete = [fi for i, fi in enumerate(files) if i >= keep_number] for filename in files_to_delete: if keep_filter(filename): continue self.delete_file(filename)
[ "def", "clean_old_backups", "(", "self", ",", "encrypted", "=", "None", ",", "compressed", "=", "None", ",", "content_type", "=", "None", ",", "database", "=", "None", ",", "servername", "=", "None", ",", "keep_number", "=", "None", ")", ":", "if", "keep_number", "is", "None", ":", "keep_number", "=", "settings", ".", "CLEANUP_KEEP", "if", "content_type", "==", "'db'", "else", "settings", ".", "CLEANUP_KEEP_MEDIA", "keep_filter", "=", "settings", ".", "CLEANUP_KEEP_FILTER", "files", "=", "self", ".", "list_backups", "(", "encrypted", "=", "encrypted", ",", "compressed", "=", "compressed", ",", "content_type", "=", "content_type", ",", "database", "=", "database", ",", "servername", "=", "servername", ")", "files", "=", "sorted", "(", "files", ",", "key", "=", "utils", ".", "filename_to_date", ",", "reverse", "=", "True", ")", "files_to_delete", "=", "[", "fi", "for", "i", ",", "fi", "in", "enumerate", "(", "files", ")", "if", "i", ">=", "keep_number", "]", "for", "filename", "in", "files_to_delete", ":", "if", "keep_filter", "(", "filename", ")", ":", "continue", "self", ".", "delete_file", "(", "filename", ")" ]
Delete olders backups and hold the number defined. :param encrypted: Filter by encrypted or not :type encrypted: ``bool`` or ``None`` :param compressed: Filter by compressed or not :type compressed: ``bool`` or ``None`` :param content_type: Filter by media or database backup, must be ``'db'`` or ``'media'`` :type content_type: ``str`` or ``None`` :param database: Filter by source database's name :type: ``str`` or ``None`` :param servername: Filter by source server's name :type: ``str`` or ``None`` :param keep_number: Number of files to keep, other will be deleted :type keep_number: ``int`` or ``None``
[ "Delete", "olders", "backups", "and", "hold", "the", "number", "defined", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/storage.py#L206-L244
13,489
django-dbbackup/django-dbbackup
dbbackup/management/commands/dbrestore.py
Command._get_database
def _get_database(self, options): """Get the database to restore.""" database_name = options.get('database') if not database_name: if len(settings.DATABASES) > 1: errmsg = "Because this project contains more than one database, you"\ " must specify the --database option." raise CommandError(errmsg) database_name = list(settings.DATABASES.keys())[0] if database_name not in settings.DATABASES: raise CommandError("Database %s does not exist." % database_name) return database_name, settings.DATABASES[database_name]
python
def _get_database(self, options): """Get the database to restore.""" database_name = options.get('database') if not database_name: if len(settings.DATABASES) > 1: errmsg = "Because this project contains more than one database, you"\ " must specify the --database option." raise CommandError(errmsg) database_name = list(settings.DATABASES.keys())[0] if database_name not in settings.DATABASES: raise CommandError("Database %s does not exist." % database_name) return database_name, settings.DATABASES[database_name]
[ "def", "_get_database", "(", "self", ",", "options", ")", ":", "database_name", "=", "options", ".", "get", "(", "'database'", ")", "if", "not", "database_name", ":", "if", "len", "(", "settings", ".", "DATABASES", ")", ">", "1", ":", "errmsg", "=", "\"Because this project contains more than one database, you\"", "\" must specify the --database option.\"", "raise", "CommandError", "(", "errmsg", ")", "database_name", "=", "list", "(", "settings", ".", "DATABASES", ".", "keys", "(", ")", ")", "[", "0", "]", "if", "database_name", "not", "in", "settings", ".", "DATABASES", ":", "raise", "CommandError", "(", "\"Database %s does not exist.\"", "%", "database_name", ")", "return", "database_name", ",", "settings", ".", "DATABASES", "[", "database_name", "]" ]
Get the database to restore.
[ "Get", "the", "database", "to", "restore", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/management/commands/dbrestore.py#L57-L68
13,490
django-dbbackup/django-dbbackup
dbbackup/management/commands/dbrestore.py
Command._restore_backup
def _restore_backup(self): """Restore the specified database.""" input_filename, input_file = self._get_backup_file(database=self.database_name, servername=self.servername) self.logger.info("Restoring backup for database '%s' and server '%s'", self.database_name, self.servername) self.logger.info("Restoring: %s" % input_filename) if self.decrypt: unencrypted_file, input_filename = utils.unencrypt_file(input_file, input_filename, self.passphrase) input_file.close() input_file = unencrypted_file if self.uncompress: uncompressed_file, input_filename = utils.uncompress_file(input_file, input_filename) input_file.close() input_file = uncompressed_file self.logger.info("Restore tempfile created: %s", utils.handle_size(input_file)) if self.interactive: self._ask_confirmation() input_file.seek(0) self.connector = get_connector(self.database_name) self.connector.restore_dump(input_file)
python
def _restore_backup(self): """Restore the specified database.""" input_filename, input_file = self._get_backup_file(database=self.database_name, servername=self.servername) self.logger.info("Restoring backup for database '%s' and server '%s'", self.database_name, self.servername) self.logger.info("Restoring: %s" % input_filename) if self.decrypt: unencrypted_file, input_filename = utils.unencrypt_file(input_file, input_filename, self.passphrase) input_file.close() input_file = unencrypted_file if self.uncompress: uncompressed_file, input_filename = utils.uncompress_file(input_file, input_filename) input_file.close() input_file = uncompressed_file self.logger.info("Restore tempfile created: %s", utils.handle_size(input_file)) if self.interactive: self._ask_confirmation() input_file.seek(0) self.connector = get_connector(self.database_name) self.connector.restore_dump(input_file)
[ "def", "_restore_backup", "(", "self", ")", ":", "input_filename", ",", "input_file", "=", "self", ".", "_get_backup_file", "(", "database", "=", "self", ".", "database_name", ",", "servername", "=", "self", ".", "servername", ")", "self", ".", "logger", ".", "info", "(", "\"Restoring backup for database '%s' and server '%s'\"", ",", "self", ".", "database_name", ",", "self", ".", "servername", ")", "self", ".", "logger", ".", "info", "(", "\"Restoring: %s\"", "%", "input_filename", ")", "if", "self", ".", "decrypt", ":", "unencrypted_file", ",", "input_filename", "=", "utils", ".", "unencrypt_file", "(", "input_file", ",", "input_filename", ",", "self", ".", "passphrase", ")", "input_file", ".", "close", "(", ")", "input_file", "=", "unencrypted_file", "if", "self", ".", "uncompress", ":", "uncompressed_file", ",", "input_filename", "=", "utils", ".", "uncompress_file", "(", "input_file", ",", "input_filename", ")", "input_file", ".", "close", "(", ")", "input_file", "=", "uncompressed_file", "self", ".", "logger", ".", "info", "(", "\"Restore tempfile created: %s\"", ",", "utils", ".", "handle_size", "(", "input_file", ")", ")", "if", "self", ".", "interactive", ":", "self", ".", "_ask_confirmation", "(", ")", "input_file", ".", "seek", "(", "0", ")", "self", ".", "connector", "=", "get_connector", "(", "self", ".", "database_name", ")", "self", ".", "connector", ".", "restore_dump", "(", "input_file", ")" ]
Restore the specified database.
[ "Restore", "the", "specified", "database", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/management/commands/dbrestore.py#L70-L94
13,491
django-dbbackup/django-dbbackup
dbbackup/db/base.py
get_connector
def get_connector(database_name=None): """ Get a connector from its database key in setttings. """ from django.db import connections, DEFAULT_DB_ALIAS # Get DB database_name = database_name or DEFAULT_DB_ALIAS connection = connections[database_name] engine = connection.settings_dict['ENGINE'] connector_settings = settings.CONNECTORS.get(database_name, {}) connector_path = connector_settings.get('CONNECTOR', CONNECTOR_MAPPING[engine]) connector_module_path = '.'.join(connector_path.split('.')[:-1]) module = import_module(connector_module_path) connector_name = connector_path.split('.')[-1] connector = getattr(module, connector_name) return connector(database_name, **connector_settings)
python
def get_connector(database_name=None): """ Get a connector from its database key in setttings. """ from django.db import connections, DEFAULT_DB_ALIAS # Get DB database_name = database_name or DEFAULT_DB_ALIAS connection = connections[database_name] engine = connection.settings_dict['ENGINE'] connector_settings = settings.CONNECTORS.get(database_name, {}) connector_path = connector_settings.get('CONNECTOR', CONNECTOR_MAPPING[engine]) connector_module_path = '.'.join(connector_path.split('.')[:-1]) module = import_module(connector_module_path) connector_name = connector_path.split('.')[-1] connector = getattr(module, connector_name) return connector(database_name, **connector_settings)
[ "def", "get_connector", "(", "database_name", "=", "None", ")", ":", "from", "django", ".", "db", "import", "connections", ",", "DEFAULT_DB_ALIAS", "# Get DB", "database_name", "=", "database_name", "or", "DEFAULT_DB_ALIAS", "connection", "=", "connections", "[", "database_name", "]", "engine", "=", "connection", ".", "settings_dict", "[", "'ENGINE'", "]", "connector_settings", "=", "settings", ".", "CONNECTORS", ".", "get", "(", "database_name", ",", "{", "}", ")", "connector_path", "=", "connector_settings", ".", "get", "(", "'CONNECTOR'", ",", "CONNECTOR_MAPPING", "[", "engine", "]", ")", "connector_module_path", "=", "'.'", ".", "join", "(", "connector_path", ".", "split", "(", "'.'", ")", "[", ":", "-", "1", "]", ")", "module", "=", "import_module", "(", "connector_module_path", ")", "connector_name", "=", "connector_path", ".", "split", "(", "'.'", ")", "[", "-", "1", "]", "connector", "=", "getattr", "(", "module", ",", "connector_name", ")", "return", "connector", "(", "database_name", ",", "*", "*", "connector_settings", ")" ]
Get a connector from its database key in setttings.
[ "Get", "a", "connector", "from", "its", "database", "key", "in", "setttings", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/db/base.py#L31-L46
13,492
django-dbbackup/django-dbbackup
dbbackup/db/base.py
BaseDBConnector.settings
def settings(self): """Mix of database and connector settings.""" if not hasattr(self, '_settings'): sett = self.connection.settings_dict.copy() sett.update(settings.CONNECTORS.get(self.database_name, {})) self._settings = sett return self._settings
python
def settings(self): """Mix of database and connector settings.""" if not hasattr(self, '_settings'): sett = self.connection.settings_dict.copy() sett.update(settings.CONNECTORS.get(self.database_name, {})) self._settings = sett return self._settings
[ "def", "settings", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'_settings'", ")", ":", "sett", "=", "self", ".", "connection", ".", "settings_dict", ".", "copy", "(", ")", "sett", ".", "update", "(", "settings", ".", "CONNECTORS", ".", "get", "(", "self", ".", "database_name", ",", "{", "}", ")", ")", "self", ".", "_settings", "=", "sett", "return", "self", ".", "_settings" ]
Mix of database and connector settings.
[ "Mix", "of", "database", "and", "connector", "settings", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/db/base.py#L65-L71
13,493
django-dbbackup/django-dbbackup
dbbackup/db/base.py
BaseCommandDBConnector.run_command
def run_command(self, command, stdin=None, env=None): """ Launch a shell command line. :param command: Command line to launch :type command: str :param stdin: Standard input of command :type stdin: file :param env: Environment variable used in command :type env: dict :return: Standard output of command :rtype: file """ cmd = shlex.split(command) stdout = SpooledTemporaryFile(max_size=settings.TMP_FILE_MAX_SIZE, dir=settings.TMP_DIR) stderr = SpooledTemporaryFile(max_size=settings.TMP_FILE_MAX_SIZE, dir=settings.TMP_DIR) full_env = os.environ.copy() if self.use_parent_env else {} full_env.update(self.env) full_env.update(env or {}) try: if isinstance(stdin, (ContentFile, SFTPStorageFile)): process = Popen(cmd, stdin=PIPE, stdout=stdout, stderr=stderr, env=full_env) process.communicate(input=stdin.read()) else: process = Popen(cmd, stdin=stdin, stdout=stdout, stderr=stderr, env=full_env) process.wait() if process.poll(): stderr.seek(0) raise exceptions.CommandConnectorError( "Error running: {}\n{}".format(command, stderr.read().decode('utf-8'))) stdout.seek(0) stderr.seek(0) return stdout, stderr except OSError as err: raise exceptions.CommandConnectorError( "Error running: {}\n{}".format(command, str(err)))
python
def run_command(self, command, stdin=None, env=None): """ Launch a shell command line. :param command: Command line to launch :type command: str :param stdin: Standard input of command :type stdin: file :param env: Environment variable used in command :type env: dict :return: Standard output of command :rtype: file """ cmd = shlex.split(command) stdout = SpooledTemporaryFile(max_size=settings.TMP_FILE_MAX_SIZE, dir=settings.TMP_DIR) stderr = SpooledTemporaryFile(max_size=settings.TMP_FILE_MAX_SIZE, dir=settings.TMP_DIR) full_env = os.environ.copy() if self.use_parent_env else {} full_env.update(self.env) full_env.update(env or {}) try: if isinstance(stdin, (ContentFile, SFTPStorageFile)): process = Popen(cmd, stdin=PIPE, stdout=stdout, stderr=stderr, env=full_env) process.communicate(input=stdin.read()) else: process = Popen(cmd, stdin=stdin, stdout=stdout, stderr=stderr, env=full_env) process.wait() if process.poll(): stderr.seek(0) raise exceptions.CommandConnectorError( "Error running: {}\n{}".format(command, stderr.read().decode('utf-8'))) stdout.seek(0) stderr.seek(0) return stdout, stderr except OSError as err: raise exceptions.CommandConnectorError( "Error running: {}\n{}".format(command, str(err)))
[ "def", "run_command", "(", "self", ",", "command", ",", "stdin", "=", "None", ",", "env", "=", "None", ")", ":", "cmd", "=", "shlex", ".", "split", "(", "command", ")", "stdout", "=", "SpooledTemporaryFile", "(", "max_size", "=", "settings", ".", "TMP_FILE_MAX_SIZE", ",", "dir", "=", "settings", ".", "TMP_DIR", ")", "stderr", "=", "SpooledTemporaryFile", "(", "max_size", "=", "settings", ".", "TMP_FILE_MAX_SIZE", ",", "dir", "=", "settings", ".", "TMP_DIR", ")", "full_env", "=", "os", ".", "environ", ".", "copy", "(", ")", "if", "self", ".", "use_parent_env", "else", "{", "}", "full_env", ".", "update", "(", "self", ".", "env", ")", "full_env", ".", "update", "(", "env", "or", "{", "}", ")", "try", ":", "if", "isinstance", "(", "stdin", ",", "(", "ContentFile", ",", "SFTPStorageFile", ")", ")", ":", "process", "=", "Popen", "(", "cmd", ",", "stdin", "=", "PIPE", ",", "stdout", "=", "stdout", ",", "stderr", "=", "stderr", ",", "env", "=", "full_env", ")", "process", ".", "communicate", "(", "input", "=", "stdin", ".", "read", "(", ")", ")", "else", ":", "process", "=", "Popen", "(", "cmd", ",", "stdin", "=", "stdin", ",", "stdout", "=", "stdout", ",", "stderr", "=", "stderr", ",", "env", "=", "full_env", ")", "process", ".", "wait", "(", ")", "if", "process", ".", "poll", "(", ")", ":", "stderr", ".", "seek", "(", "0", ")", "raise", "exceptions", ".", "CommandConnectorError", "(", "\"Error running: {}\\n{}\"", ".", "format", "(", "command", ",", "stderr", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", ")", ")", "stdout", ".", "seek", "(", "0", ")", "stderr", ".", "seek", "(", "0", ")", "return", "stdout", ",", "stderr", "except", "OSError", "as", "err", ":", "raise", "exceptions", ".", "CommandConnectorError", "(", "\"Error running: {}\\n{}\"", ".", "format", "(", "command", ",", "str", "(", "err", ")", ")", ")" ]
Launch a shell command line. :param command: Command line to launch :type command: str :param stdin: Standard input of command :type stdin: file :param env: Environment variable used in command :type env: dict :return: Standard output of command :rtype: file
[ "Launch", "a", "shell", "command", "line", "." ]
77de209e2d5317e51510d0f888e085ee0c400d66
https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/db/base.py#L118-L155
13,494
tchellomello/raincloudy
raincloudy/faucet.py
RainCloudyFaucetCore._assign_zones
def _assign_zones(self): """Assign all RainCloudyFaucetZone managed by faucet.""" for zone_id in range(1, 5): zone = \ RainCloudyFaucetZone( parent=self._parent, controller=self._controller, faucet=self, zone_id=zone_id) if zone not in self.zones: self.zones.append(zone)
python
def _assign_zones(self): """Assign all RainCloudyFaucetZone managed by faucet.""" for zone_id in range(1, 5): zone = \ RainCloudyFaucetZone( parent=self._parent, controller=self._controller, faucet=self, zone_id=zone_id) if zone not in self.zones: self.zones.append(zone)
[ "def", "_assign_zones", "(", "self", ")", ":", "for", "zone_id", "in", "range", "(", "1", ",", "5", ")", ":", "zone", "=", "RainCloudyFaucetZone", "(", "parent", "=", "self", ".", "_parent", ",", "controller", "=", "self", ".", "_controller", ",", "faucet", "=", "self", ",", "zone_id", "=", "zone_id", ")", "if", "zone", "not", "in", "self", ".", "zones", ":", "self", ".", "zones", ".", "append", "(", "zone", ")" ]
Assign all RainCloudyFaucetZone managed by faucet.
[ "Assign", "all", "RainCloudyFaucetZone", "managed", "by", "faucet", "." ]
1847fa913e5ba79645d51bf23637860d68c67dbf
https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/faucet.py#L38-L49
13,495
tchellomello/raincloudy
raincloudy/faucet.py
RainCloudyFaucetCore._find_zone_by_id
def _find_zone_by_id(self, zone_id): """Return zone by id.""" if not self.zones: return None zone = list(filter( lambda zone: zone.id == zone_id, self.zones)) return zone[0] if zone else None
python
def _find_zone_by_id(self, zone_id): """Return zone by id.""" if not self.zones: return None zone = list(filter( lambda zone: zone.id == zone_id, self.zones)) return zone[0] if zone else None
[ "def", "_find_zone_by_id", "(", "self", ",", "zone_id", ")", ":", "if", "not", "self", ".", "zones", ":", "return", "None", "zone", "=", "list", "(", "filter", "(", "lambda", "zone", ":", "zone", ".", "id", "==", "zone_id", ",", "self", ".", "zones", ")", ")", "return", "zone", "[", "0", "]", "if", "zone", "else", "None" ]
Return zone by id.
[ "Return", "zone", "by", "id", "." ]
1847fa913e5ba79645d51bf23637860d68c67dbf
https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/faucet.py#L114-L122
13,496
tchellomello/raincloudy
raincloudy/faucet.py
RainCloudyFaucetZone._set_zone_name
def _set_zone_name(self, zoneid, name): """Private method to override zone name.""" # zone starts with index 0 zoneid -= 1 data = { '_set_zone_name': 'Set Name', 'select_zone': str(zoneid), 'zone_name': name, } self._controller.post(data)
python
def _set_zone_name(self, zoneid, name): """Private method to override zone name.""" # zone starts with index 0 zoneid -= 1 data = { '_set_zone_name': 'Set Name', 'select_zone': str(zoneid), 'zone_name': name, } self._controller.post(data)
[ "def", "_set_zone_name", "(", "self", ",", "zoneid", ",", "name", ")", ":", "# zone starts with index 0", "zoneid", "-=", "1", "data", "=", "{", "'_set_zone_name'", ":", "'Set Name'", ",", "'select_zone'", ":", "str", "(", "zoneid", ")", ",", "'zone_name'", ":", "name", ",", "}", "self", ".", "_controller", ".", "post", "(", "data", ")" ]
Private method to override zone name.
[ "Private", "method", "to", "override", "zone", "name", "." ]
1847fa913e5ba79645d51bf23637860d68c67dbf
https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/faucet.py#L181-L190
13,497
tchellomello/raincloudy
raincloudy/faucet.py
RainCloudyFaucetZone._set_watering_time
def _set_watering_time(self, zoneid, value): """Private method to set watering_time per zone.""" if value not in MANUAL_WATERING_ALLOWED: raise ValueError( 'Valid options are: {}'.format( ', '.join(map(str, MANUAL_WATERING_ALLOWED))) ) if isinstance(value, int) and value == 0: value = 'OFF' elif isinstance(value, str): value = value.upper() if value == 'ON': value = MAX_WATERING_MINUTES ddata = self.preupdate() attr = 'zone{}_select_manual_mode'.format(zoneid) ddata[attr] = value self.submit_action(ddata)
python
def _set_watering_time(self, zoneid, value): """Private method to set watering_time per zone.""" if value not in MANUAL_WATERING_ALLOWED: raise ValueError( 'Valid options are: {}'.format( ', '.join(map(str, MANUAL_WATERING_ALLOWED))) ) if isinstance(value, int) and value == 0: value = 'OFF' elif isinstance(value, str): value = value.upper() if value == 'ON': value = MAX_WATERING_MINUTES ddata = self.preupdate() attr = 'zone{}_select_manual_mode'.format(zoneid) ddata[attr] = value self.submit_action(ddata)
[ "def", "_set_watering_time", "(", "self", ",", "zoneid", ",", "value", ")", ":", "if", "value", "not", "in", "MANUAL_WATERING_ALLOWED", ":", "raise", "ValueError", "(", "'Valid options are: {}'", ".", "format", "(", "', '", ".", "join", "(", "map", "(", "str", ",", "MANUAL_WATERING_ALLOWED", ")", ")", ")", ")", "if", "isinstance", "(", "value", ",", "int", ")", "and", "value", "==", "0", ":", "value", "=", "'OFF'", "elif", "isinstance", "(", "value", ",", "str", ")", ":", "value", "=", "value", ".", "upper", "(", ")", "if", "value", "==", "'ON'", ":", "value", "=", "MAX_WATERING_MINUTES", "ddata", "=", "self", ".", "preupdate", "(", ")", "attr", "=", "'zone{}_select_manual_mode'", ".", "format", "(", "zoneid", ")", "ddata", "[", "attr", "]", "=", "value", "self", ".", "submit_action", "(", "ddata", ")" ]
Private method to set watering_time per zone.
[ "Private", "method", "to", "set", "watering_time", "per", "zone", "." ]
1847fa913e5ba79645d51bf23637860d68c67dbf
https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/faucet.py#L202-L220
13,498
tchellomello/raincloudy
raincloudy/faucet.py
RainCloudyFaucetZone.watering_time
def watering_time(self): """Return watering_time from zone.""" # zone starts with index 0 index = self.id - 1 auto_watering_time =\ self._attributes['rain_delay_mode'][index]['auto_watering_time'] manual_watering_time =\ self._attributes['rain_delay_mode'][index]['manual_watering_time'] if auto_watering_time > manual_watering_time: watering_time = auto_watering_time else: watering_time = manual_watering_time return watering_time
python
def watering_time(self): """Return watering_time from zone.""" # zone starts with index 0 index = self.id - 1 auto_watering_time =\ self._attributes['rain_delay_mode'][index]['auto_watering_time'] manual_watering_time =\ self._attributes['rain_delay_mode'][index]['manual_watering_time'] if auto_watering_time > manual_watering_time: watering_time = auto_watering_time else: watering_time = manual_watering_time return watering_time
[ "def", "watering_time", "(", "self", ")", ":", "# zone starts with index 0", "index", "=", "self", ".", "id", "-", "1", "auto_watering_time", "=", "self", ".", "_attributes", "[", "'rain_delay_mode'", "]", "[", "index", "]", "[", "'auto_watering_time'", "]", "manual_watering_time", "=", "self", ".", "_attributes", "[", "'rain_delay_mode'", "]", "[", "index", "]", "[", "'manual_watering_time'", "]", "if", "auto_watering_time", ">", "manual_watering_time", ":", "watering_time", "=", "auto_watering_time", "else", ":", "watering_time", "=", "manual_watering_time", "return", "watering_time" ]
Return watering_time from zone.
[ "Return", "watering_time", "from", "zone", "." ]
1847fa913e5ba79645d51bf23637860d68c67dbf
https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/faucet.py#L224-L239
13,499
tchellomello/raincloudy
raincloudy/faucet.py
RainCloudyFaucetZone._set_rain_delay
def _set_rain_delay(self, zoneid, value): """Generic method to set auto_watering program.""" # current index for rain_delay starts in 0 zoneid -= 1 if isinstance(value, int): if value > MAX_RAIN_DELAY_DAYS or value < 0: return None elif value == 0: value = 'off' elif value == 1: value = '1day' elif value >= 2: value = str(value) + 'days' elif isinstance(value, str): if value.lower() != 'off': return None ddata = self.preupdate() attr = 'zone{}_rain_delay_select'.format(zoneid) ddata[attr] = value self.submit_action(ddata) return True
python
def _set_rain_delay(self, zoneid, value): """Generic method to set auto_watering program.""" # current index for rain_delay starts in 0 zoneid -= 1 if isinstance(value, int): if value > MAX_RAIN_DELAY_DAYS or value < 0: return None elif value == 0: value = 'off' elif value == 1: value = '1day' elif value >= 2: value = str(value) + 'days' elif isinstance(value, str): if value.lower() != 'off': return None ddata = self.preupdate() attr = 'zone{}_rain_delay_select'.format(zoneid) ddata[attr] = value self.submit_action(ddata) return True
[ "def", "_set_rain_delay", "(", "self", ",", "zoneid", ",", "value", ")", ":", "# current index for rain_delay starts in 0", "zoneid", "-=", "1", "if", "isinstance", "(", "value", ",", "int", ")", ":", "if", "value", ">", "MAX_RAIN_DELAY_DAYS", "or", "value", "<", "0", ":", "return", "None", "elif", "value", "==", "0", ":", "value", "=", "'off'", "elif", "value", "==", "1", ":", "value", "=", "'1day'", "elif", "value", ">=", "2", ":", "value", "=", "str", "(", "value", ")", "+", "'days'", "elif", "isinstance", "(", "value", ",", "str", ")", ":", "if", "value", ".", "lower", "(", ")", "!=", "'off'", ":", "return", "None", "ddata", "=", "self", ".", "preupdate", "(", ")", "attr", "=", "'zone{}_rain_delay_select'", ".", "format", "(", "zoneid", ")", "ddata", "[", "attr", "]", "=", "value", "self", ".", "submit_action", "(", "ddata", ")", "return", "True" ]
Generic method to set auto_watering program.
[ "Generic", "method", "to", "set", "auto_watering", "program", "." ]
1847fa913e5ba79645d51bf23637860d68c67dbf
https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/faucet.py#L250-L272