id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
241,500
diffeo/yakonfig
yakonfig/toplevel.py
_recurse_config
def _recurse_config(parent_config, modules, f, prefix=''): '''Walk through the module tree. This is a helper function for :func:`create_config_tree` and :func:`_walk_config`. It calls `f` once for each module in the configuration tree with parameters `parent_config`, `config_name`, `prefix`, and `module`. `parent_config[config_name]` may or may not exist (but could be populated, as :func:`create_config_tree`). If even the parent configuration doesn't exist, `parent_config` could be :const:`None`. :param dict parent_config: configuration dictionary holding configuration for `modules`, or maybe :const:`None` :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :param f: callable to call on each module :param str prefix: prefix name of `parent_config` :return: `parent_config` ''' for module in modules: config_name = getattr(module, 'config_name', None) if config_name is None: raise ProgrammerError('{0!r} must provide a config_name' .format(module)) new_name = prefix + config_name f(parent_config, config_name, new_name, module) try: _recurse_config((parent_config or {}).get(config_name, None), getattr(module, 'sub_modules', []), f, new_name + '.') except: # achieve a sort of stack trace on the way out logger.error('exception in _recurse_config of %s', module) raise return parent_config
python
def _recurse_config(parent_config, modules, f, prefix=''): '''Walk through the module tree. This is a helper function for :func:`create_config_tree` and :func:`_walk_config`. It calls `f` once for each module in the configuration tree with parameters `parent_config`, `config_name`, `prefix`, and `module`. `parent_config[config_name]` may or may not exist (but could be populated, as :func:`create_config_tree`). If even the parent configuration doesn't exist, `parent_config` could be :const:`None`. :param dict parent_config: configuration dictionary holding configuration for `modules`, or maybe :const:`None` :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :param f: callable to call on each module :param str prefix: prefix name of `parent_config` :return: `parent_config` ''' for module in modules: config_name = getattr(module, 'config_name', None) if config_name is None: raise ProgrammerError('{0!r} must provide a config_name' .format(module)) new_name = prefix + config_name f(parent_config, config_name, new_name, module) try: _recurse_config((parent_config or {}).get(config_name, None), getattr(module, 'sub_modules', []), f, new_name + '.') except: # achieve a sort of stack trace on the way out logger.error('exception in _recurse_config of %s', module) raise return parent_config
[ "def", "_recurse_config", "(", "parent_config", ",", "modules", ",", "f", ",", "prefix", "=", "''", ")", ":", "for", "module", "in", "modules", ":", "config_name", "=", "getattr", "(", "module", ",", "'config_name'", ",", "None", ")", "if", "config_name", "is", "None", ":", "raise", "ProgrammerError", "(", "'{0!r} must provide a config_name'", ".", "format", "(", "module", ")", ")", "new_name", "=", "prefix", "+", "config_name", "f", "(", "parent_config", ",", "config_name", ",", "new_name", ",", "module", ")", "try", ":", "_recurse_config", "(", "(", "parent_config", "or", "{", "}", ")", ".", "get", "(", "config_name", ",", "None", ")", ",", "getattr", "(", "module", ",", "'sub_modules'", ",", "[", "]", ")", ",", "f", ",", "new_name", "+", "'.'", ")", "except", ":", "# achieve a sort of stack trace on the way out", "logger", ".", "error", "(", "'exception in _recurse_config of %s'", ",", "module", ")", "raise", "return", "parent_config" ]
Walk through the module tree. This is a helper function for :func:`create_config_tree` and :func:`_walk_config`. It calls `f` once for each module in the configuration tree with parameters `parent_config`, `config_name`, `prefix`, and `module`. `parent_config[config_name]` may or may not exist (but could be populated, as :func:`create_config_tree`). If even the parent configuration doesn't exist, `parent_config` could be :const:`None`. :param dict parent_config: configuration dictionary holding configuration for `modules`, or maybe :const:`None` :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :param f: callable to call on each module :param str prefix: prefix name of `parent_config` :return: `parent_config`
[ "Walk", "through", "the", "module", "tree", "." ]
412e195da29b4f4fc7b72967c192714a6f5eaeb5
https://github.com/diffeo/yakonfig/blob/412e195da29b4f4fc7b72967c192714a6f5eaeb5/yakonfig/toplevel.py#L262-L300
241,501
diffeo/yakonfig
yakonfig/toplevel.py
create_config_tree
def create_config_tree(config, modules, prefix=''): '''Cause every possible configuration sub-dictionary to exist. This is intended to be called very early in the configuration sequence. For each module, it checks that the corresponding configuration item exists in `config` and creates it as an empty dictionary if required, and then recurses into child configs/modules. :param dict config: configuration to populate :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :param str prefix: prefix name of the config :return: `config` :raises yakonfig.ConfigurationError: if an expected name is present in the provided config, but that name is not a dictionary ''' def work_in(parent_config, config_name, prefix, module): if config_name not in parent_config: # this is the usual, expected case parent_config[config_name] = {} elif not isinstance(parent_config[config_name], collections.Mapping): raise ConfigurationError( '{0} must be an object configuration'.format(prefix)) else: # config_name is a pre-existing dictionary in parent_config pass _recurse_config(config, modules, work_in)
python
def create_config_tree(config, modules, prefix=''): '''Cause every possible configuration sub-dictionary to exist. This is intended to be called very early in the configuration sequence. For each module, it checks that the corresponding configuration item exists in `config` and creates it as an empty dictionary if required, and then recurses into child configs/modules. :param dict config: configuration to populate :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :param str prefix: prefix name of the config :return: `config` :raises yakonfig.ConfigurationError: if an expected name is present in the provided config, but that name is not a dictionary ''' def work_in(parent_config, config_name, prefix, module): if config_name not in parent_config: # this is the usual, expected case parent_config[config_name] = {} elif not isinstance(parent_config[config_name], collections.Mapping): raise ConfigurationError( '{0} must be an object configuration'.format(prefix)) else: # config_name is a pre-existing dictionary in parent_config pass _recurse_config(config, modules, work_in)
[ "def", "create_config_tree", "(", "config", ",", "modules", ",", "prefix", "=", "''", ")", ":", "def", "work_in", "(", "parent_config", ",", "config_name", ",", "prefix", ",", "module", ")", ":", "if", "config_name", "not", "in", "parent_config", ":", "# this is the usual, expected case", "parent_config", "[", "config_name", "]", "=", "{", "}", "elif", "not", "isinstance", "(", "parent_config", "[", "config_name", "]", ",", "collections", ".", "Mapping", ")", ":", "raise", "ConfigurationError", "(", "'{0} must be an object configuration'", ".", "format", "(", "prefix", ")", ")", "else", ":", "# config_name is a pre-existing dictionary in parent_config", "pass", "_recurse_config", "(", "config", ",", "modules", ",", "work_in", ")" ]
Cause every possible configuration sub-dictionary to exist. This is intended to be called very early in the configuration sequence. For each module, it checks that the corresponding configuration item exists in `config` and creates it as an empty dictionary if required, and then recurses into child configs/modules. :param dict config: configuration to populate :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :param str prefix: prefix name of the config :return: `config` :raises yakonfig.ConfigurationError: if an expected name is present in the provided config, but that name is not a dictionary
[ "Cause", "every", "possible", "configuration", "sub", "-", "dictionary", "to", "exist", "." ]
412e195da29b4f4fc7b72967c192714a6f5eaeb5
https://github.com/diffeo/yakonfig/blob/412e195da29b4f4fc7b72967c192714a6f5eaeb5/yakonfig/toplevel.py#L303-L332
241,502
diffeo/yakonfig
yakonfig/toplevel.py
_walk_config
def _walk_config(config, modules, f, prefix=''): """Recursively walk through a module list. For every module, calls ``f(config, module, name)`` where `config` is the configuration scoped to that module, `module` is the Configurable-like object, and `name` is the complete path (ending in the module name). :param dict config: configuration to walk and possibly update :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :param f: callback function for each module :param str prefix: prefix name of the config :return: config """ def work_in(parent_config, config_name, prefix, module): # create_config_tree() needs to have been called by now # and you should never hit either of these asserts if config_name not in parent_config: raise ProgrammerError('{0} not present in configuration' .format(prefix)) if not isinstance(parent_config[config_name], collections.Mapping): raise ConfigurationError( '{0} must be an object configuration'.format(prefix)) # do the work! f(parent_config[config_name], module, prefix) return _recurse_config(config, modules, work_in)
python
def _walk_config(config, modules, f, prefix=''): """Recursively walk through a module list. For every module, calls ``f(config, module, name)`` where `config` is the configuration scoped to that module, `module` is the Configurable-like object, and `name` is the complete path (ending in the module name). :param dict config: configuration to walk and possibly update :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :param f: callback function for each module :param str prefix: prefix name of the config :return: config """ def work_in(parent_config, config_name, prefix, module): # create_config_tree() needs to have been called by now # and you should never hit either of these asserts if config_name not in parent_config: raise ProgrammerError('{0} not present in configuration' .format(prefix)) if not isinstance(parent_config[config_name], collections.Mapping): raise ConfigurationError( '{0} must be an object configuration'.format(prefix)) # do the work! f(parent_config[config_name], module, prefix) return _recurse_config(config, modules, work_in)
[ "def", "_walk_config", "(", "config", ",", "modules", ",", "f", ",", "prefix", "=", "''", ")", ":", "def", "work_in", "(", "parent_config", ",", "config_name", ",", "prefix", ",", "module", ")", ":", "# create_config_tree() needs to have been called by now", "# and you should never hit either of these asserts", "if", "config_name", "not", "in", "parent_config", ":", "raise", "ProgrammerError", "(", "'{0} not present in configuration'", ".", "format", "(", "prefix", ")", ")", "if", "not", "isinstance", "(", "parent_config", "[", "config_name", "]", ",", "collections", ".", "Mapping", ")", ":", "raise", "ConfigurationError", "(", "'{0} must be an object configuration'", ".", "format", "(", "prefix", ")", ")", "# do the work!", "f", "(", "parent_config", "[", "config_name", "]", ",", "module", ",", "prefix", ")", "return", "_recurse_config", "(", "config", ",", "modules", ",", "work_in", ")" ]
Recursively walk through a module list. For every module, calls ``f(config, module, name)`` where `config` is the configuration scoped to that module, `module` is the Configurable-like object, and `name` is the complete path (ending in the module name). :param dict config: configuration to walk and possibly update :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :param f: callback function for each module :param str prefix: prefix name of the config :return: config
[ "Recursively", "walk", "through", "a", "module", "list", "." ]
412e195da29b4f4fc7b72967c192714a6f5eaeb5
https://github.com/diffeo/yakonfig/blob/412e195da29b4f4fc7b72967c192714a6f5eaeb5/yakonfig/toplevel.py#L335-L364
241,503
diffeo/yakonfig
yakonfig/toplevel.py
collect_add_argparse
def collect_add_argparse(parser, modules): """Add all command-line options. `modules` is an iterable of :class:`yakonfig.configurable.Configurable` objects, or anything equivalently typed. This calls :meth:`~yakonfig.configurable.Configurable.add_arguments` (if present) on all of them to set the global command-line arguments. :param argparse.ArgumentParser parser: argparse parser :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` """ def work_in(parent_config, config_name, prefix, module): f = getattr(module, 'add_arguments', None) if f is not None: f(parser) _recurse_config(dict(), modules, work_in) return parser
python
def collect_add_argparse(parser, modules): """Add all command-line options. `modules` is an iterable of :class:`yakonfig.configurable.Configurable` objects, or anything equivalently typed. This calls :meth:`~yakonfig.configurable.Configurable.add_arguments` (if present) on all of them to set the global command-line arguments. :param argparse.ArgumentParser parser: argparse parser :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` """ def work_in(parent_config, config_name, prefix, module): f = getattr(module, 'add_arguments', None) if f is not None: f(parser) _recurse_config(dict(), modules, work_in) return parser
[ "def", "collect_add_argparse", "(", "parser", ",", "modules", ")", ":", "def", "work_in", "(", "parent_config", ",", "config_name", ",", "prefix", ",", "module", ")", ":", "f", "=", "getattr", "(", "module", ",", "'add_arguments'", ",", "None", ")", "if", "f", "is", "not", "None", ":", "f", "(", "parser", ")", "_recurse_config", "(", "dict", "(", ")", ",", "modules", ",", "work_in", ")", "return", "parser" ]
Add all command-line options. `modules` is an iterable of :class:`yakonfig.configurable.Configurable` objects, or anything equivalently typed. This calls :meth:`~yakonfig.configurable.Configurable.add_arguments` (if present) on all of them to set the global command-line arguments. :param argparse.ArgumentParser parser: argparse parser :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable`
[ "Add", "all", "command", "-", "line", "options", "." ]
412e195da29b4f4fc7b72967c192714a6f5eaeb5
https://github.com/diffeo/yakonfig/blob/412e195da29b4f4fc7b72967c192714a6f5eaeb5/yakonfig/toplevel.py#L367-L386
241,504
diffeo/yakonfig
yakonfig/toplevel.py
assemble_default_config
def assemble_default_config(modules): """Build the default configuration from a set of modules. `modules` is an iterable of :class:`yakonfig.configurable.Configurable` objects, or anything equivalently typed. This produces the default configuration from that list of modules. :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :return: configuration dictionary """ def work_in(parent_config, config_name, prefix, module): my_config = dict(getattr(module, 'default_config', {})) if config_name in parent_config: extra_config = parent_config[config_name] raise ProgrammerError( 'config for {0} already present when about to fetch {3}.default_config (had {1!r} would have set {2!r})'.format( prefix, extra_config, my_config, module)) parent_config[config_name] = my_config return _recurse_config(dict(), modules, work_in)
python
def assemble_default_config(modules): """Build the default configuration from a set of modules. `modules` is an iterable of :class:`yakonfig.configurable.Configurable` objects, or anything equivalently typed. This produces the default configuration from that list of modules. :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :return: configuration dictionary """ def work_in(parent_config, config_name, prefix, module): my_config = dict(getattr(module, 'default_config', {})) if config_name in parent_config: extra_config = parent_config[config_name] raise ProgrammerError( 'config for {0} already present when about to fetch {3}.default_config (had {1!r} would have set {2!r})'.format( prefix, extra_config, my_config, module)) parent_config[config_name] = my_config return _recurse_config(dict(), modules, work_in)
[ "def", "assemble_default_config", "(", "modules", ")", ":", "def", "work_in", "(", "parent_config", ",", "config_name", ",", "prefix", ",", "module", ")", ":", "my_config", "=", "dict", "(", "getattr", "(", "module", ",", "'default_config'", ",", "{", "}", ")", ")", "if", "config_name", "in", "parent_config", ":", "extra_config", "=", "parent_config", "[", "config_name", "]", "raise", "ProgrammerError", "(", "'config for {0} already present when about to fetch {3}.default_config (had {1!r} would have set {2!r})'", ".", "format", "(", "prefix", ",", "extra_config", ",", "my_config", ",", "module", ")", ")", "parent_config", "[", "config_name", "]", "=", "my_config", "return", "_recurse_config", "(", "dict", "(", ")", ",", "modules", ",", "work_in", ")" ]
Build the default configuration from a set of modules. `modules` is an iterable of :class:`yakonfig.configurable.Configurable` objects, or anything equivalently typed. This produces the default configuration from that list of modules. :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :return: configuration dictionary
[ "Build", "the", "default", "configuration", "from", "a", "set", "of", "modules", "." ]
412e195da29b4f4fc7b72967c192714a6f5eaeb5
https://github.com/diffeo/yakonfig/blob/412e195da29b4f4fc7b72967c192714a6f5eaeb5/yakonfig/toplevel.py#L389-L411
241,505
diffeo/yakonfig
yakonfig/toplevel.py
fill_in_arguments
def fill_in_arguments(config, modules, args): """Fill in configuration fields from command-line arguments. `config` is a dictionary holding the initial configuration, probably the result of :func:`assemble_default_config`. It reads through `modules`, and for each, fills in any configuration values that are provided in `args`. `config` is modified in place. `args` may be either a dictionary or an object (as the result of :mod:`argparse`). :param dict config: configuration tree to update :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :param args: command-line objects :paramtype args: dict or object :return: config """ def work_in(config, module, name): rkeys = getattr(module, 'runtime_keys', {}) for (attr, cname) in iteritems(rkeys): v = args.get(attr, None) if v is not None: config[cname] = v if not isinstance(args, collections.Mapping): args = vars(args) return _walk_config(config, modules, work_in)
python
def fill_in_arguments(config, modules, args): """Fill in configuration fields from command-line arguments. `config` is a dictionary holding the initial configuration, probably the result of :func:`assemble_default_config`. It reads through `modules`, and for each, fills in any configuration values that are provided in `args`. `config` is modified in place. `args` may be either a dictionary or an object (as the result of :mod:`argparse`). :param dict config: configuration tree to update :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :param args: command-line objects :paramtype args: dict or object :return: config """ def work_in(config, module, name): rkeys = getattr(module, 'runtime_keys', {}) for (attr, cname) in iteritems(rkeys): v = args.get(attr, None) if v is not None: config[cname] = v if not isinstance(args, collections.Mapping): args = vars(args) return _walk_config(config, modules, work_in)
[ "def", "fill_in_arguments", "(", "config", ",", "modules", ",", "args", ")", ":", "def", "work_in", "(", "config", ",", "module", ",", "name", ")", ":", "rkeys", "=", "getattr", "(", "module", ",", "'runtime_keys'", ",", "{", "}", ")", "for", "(", "attr", ",", "cname", ")", "in", "iteritems", "(", "rkeys", ")", ":", "v", "=", "args", ".", "get", "(", "attr", ",", "None", ")", "if", "v", "is", "not", "None", ":", "config", "[", "cname", "]", "=", "v", "if", "not", "isinstance", "(", "args", ",", "collections", ".", "Mapping", ")", ":", "args", "=", "vars", "(", "args", ")", "return", "_walk_config", "(", "config", ",", "modules", ",", "work_in", ")" ]
Fill in configuration fields from command-line arguments. `config` is a dictionary holding the initial configuration, probably the result of :func:`assemble_default_config`. It reads through `modules`, and for each, fills in any configuration values that are provided in `args`. `config` is modified in place. `args` may be either a dictionary or an object (as the result of :mod:`argparse`). :param dict config: configuration tree to update :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :param args: command-line objects :paramtype args: dict or object :return: config
[ "Fill", "in", "configuration", "fields", "from", "command", "-", "line", "arguments", "." ]
412e195da29b4f4fc7b72967c192714a6f5eaeb5
https://github.com/diffeo/yakonfig/blob/412e195da29b4f4fc7b72967c192714a6f5eaeb5/yakonfig/toplevel.py#L414-L441
241,506
diffeo/yakonfig
yakonfig/toplevel.py
do_config_discovery
def do_config_discovery(config, modules): '''Let modules detect additional configuration values. `config` is the initial dictionary with command-line and file-derived values, but nothing else, filled in. This calls :meth:`yakonfig.configurable.Configurable.discover_config` on every configuration module. It is expect that this method will modify the passed-in configuration dictionaries in place. :param dict config: configuration tree to update :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :return: `config` ''' def work_in(config, module, name): f = getattr(module, 'discover_config', None) if f: f(config, name) return _walk_config(config, modules, work_in)
python
def do_config_discovery(config, modules): '''Let modules detect additional configuration values. `config` is the initial dictionary with command-line and file-derived values, but nothing else, filled in. This calls :meth:`yakonfig.configurable.Configurable.discover_config` on every configuration module. It is expect that this method will modify the passed-in configuration dictionaries in place. :param dict config: configuration tree to update :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :return: `config` ''' def work_in(config, module, name): f = getattr(module, 'discover_config', None) if f: f(config, name) return _walk_config(config, modules, work_in)
[ "def", "do_config_discovery", "(", "config", ",", "modules", ")", ":", "def", "work_in", "(", "config", ",", "module", ",", "name", ")", ":", "f", "=", "getattr", "(", "module", ",", "'discover_config'", ",", "None", ")", "if", "f", ":", "f", "(", "config", ",", "name", ")", "return", "_walk_config", "(", "config", ",", "modules", ",", "work_in", ")" ]
Let modules detect additional configuration values. `config` is the initial dictionary with command-line and file-derived values, but nothing else, filled in. This calls :meth:`yakonfig.configurable.Configurable.discover_config` on every configuration module. It is expect that this method will modify the passed-in configuration dictionaries in place. :param dict config: configuration tree to update :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :return: `config`
[ "Let", "modules", "detect", "additional", "configuration", "values", "." ]
412e195da29b4f4fc7b72967c192714a6f5eaeb5
https://github.com/diffeo/yakonfig/blob/412e195da29b4f4fc7b72967c192714a6f5eaeb5/yakonfig/toplevel.py#L444-L463
241,507
kejbaly2/idid
idid/cli.py
main
def main(arguments=None, config=None): """ Parse arguments for ``idid`` command. Pass optional parameter ``arguments`` as either command line string or list of options. This is mainly useful for testing. ``config`` can be passed in as a path or string to access user defined values for important variables manually. YAML only. Returns the saved logg string. """ # Parse options, initialize gathered stats options = LoggOptions(arguments=arguments).parse() # FIXME: pass in only config; set config.journal = options.journal if not config: config = options.config_file logg = Logg(config, options.journal) return logg.logg_record(options.logg, options.date)
python
def main(arguments=None, config=None): """ Parse arguments for ``idid`` command. Pass optional parameter ``arguments`` as either command line string or list of options. This is mainly useful for testing. ``config`` can be passed in as a path or string to access user defined values for important variables manually. YAML only. Returns the saved logg string. """ # Parse options, initialize gathered stats options = LoggOptions(arguments=arguments).parse() # FIXME: pass in only config; set config.journal = options.journal if not config: config = options.config_file logg = Logg(config, options.journal) return logg.logg_record(options.logg, options.date)
[ "def", "main", "(", "arguments", "=", "None", ",", "config", "=", "None", ")", ":", "# Parse options, initialize gathered stats", "options", "=", "LoggOptions", "(", "arguments", "=", "arguments", ")", ".", "parse", "(", ")", "# FIXME: pass in only config; set config.journal = options.journal", "if", "not", "config", ":", "config", "=", "options", ".", "config_file", "logg", "=", "Logg", "(", "config", ",", "options", ".", "journal", ")", "return", "logg", ".", "logg_record", "(", "options", ".", "logg", ",", "options", ".", "date", ")" ]
Parse arguments for ``idid`` command. Pass optional parameter ``arguments`` as either command line string or list of options. This is mainly useful for testing. ``config`` can be passed in as a path or string to access user defined values for important variables manually. YAML only. Returns the saved logg string.
[ "Parse", "arguments", "for", "idid", "command", "." ]
0f19e9ca9c8fa4a81e95c490dfbbc1b452c7451c
https://github.com/kejbaly2/idid/blob/0f19e9ca9c8fa4a81e95c490dfbbc1b452c7451c/idid/cli.py#L241-L263
241,508
kejbaly2/idid
idid/cli.py
LoggOptions._parse
def _parse(self, opts, args): """ Perform additional check for ``idid`` command arguments """ k_args = len(args) _dt = opts.date = None logg = opts.logg = None journal = opts.journal = None default_journal = self.config.get('default_journal') _journals = self.config.get('journals') or {} log.debug(' ... got {0} args [{1}]'.format(k_args, args)) if k_args == 0 and default_journal: # launch the editor to save a message into 'default' branch # FIXME: 'unsorted' should be configurable as 'default branch' log.warn('Target branch not set, using "{0}"'.format( default_journal)) journal, logg = default_journal, '--' elif k_args == 1: # NOTE: two different usage patterns can be expected here # 1) idid journal # launch EDITOR for logg in target journal # 2) idid 'logg record' # save under default branch 'unsorted' # if we have a value that's got more than one word in it, we # assume it's a logg (B), otherwise (A) arg = args.pop() k_words = len(arg.split()) # variant A); user wants to launch the editor # variany B); user wants to save record to 'master' branch # default to an unspecified, unsorted target journal since # journal not specified if k_words == 1: journal, logg = (arg, '--') elif default_journal: journal, logg = (default_journal, arg) else: raise RuntimeError('UNKNOWN ERROR') elif k_args == 2: # variants: # 1) idid [datetime] 'logg message' # 2) idid [datetime] journal # launch editor # 3) idid journal 'logg message' # 4) idid unquoted logg message _one = args[0].strip() _two = args[1].strip() # try to parse a date from the value _dt = self._parse_date(_one, DT_ISO_FMT) if _dt: if _two in _journals: # scenario 2) journal, logg = (_two, '--') else: # scenario 1) journal, logg = (_two, _one) elif _one in _journals: # senario 3) journal, logg = (_one, _two) elif default_journal: # senario 4) journal, logg = (default_journal, _two) else: raise RuntimeError("No journal specified.") elif k_args >= 3: # variants: # 1) idid [datetime] journal 'message' # 2) idid [datetime] unquoted logg # 3) idid journal unquoted logg _one = args[0] _two = args[1] _three = ' '.join(args[2:]) # try to parse a date from the value _dt = self._parse_date(_one, DT_ISO_FMT) if _dt: if _two in _journals: # scenario 1) journal, logg = (_two, _three) elif default_journal: # scenario 2) journal, logg = (_two, ' '.join(args[1:])) else: raise RuntimeError("No journal specified.") elif _one in _journals: # senario 3) journal, logg = (_one, ' '.join(args[1:])) elif default_journal: # senario 4) journal, logg = (default_journal, ' '.join(args[:])) else: raise RuntimeError("No journal specified.") else: raise RuntimeError("Ambiguous command line arguments!") opts.date = _dt or utils.Date('today', fmt=DT_ISO_FMT) opts.journal = journal opts.logg = logg log.debug(' Found Date: {0}'.format(_dt)) log.debug(' Found Target: {0}'.format(journal)) log.debug(' Found Logg: {0}'.format(logg)) return opts
python
def _parse(self, opts, args): """ Perform additional check for ``idid`` command arguments """ k_args = len(args) _dt = opts.date = None logg = opts.logg = None journal = opts.journal = None default_journal = self.config.get('default_journal') _journals = self.config.get('journals') or {} log.debug(' ... got {0} args [{1}]'.format(k_args, args)) if k_args == 0 and default_journal: # launch the editor to save a message into 'default' branch # FIXME: 'unsorted' should be configurable as 'default branch' log.warn('Target branch not set, using "{0}"'.format( default_journal)) journal, logg = default_journal, '--' elif k_args == 1: # NOTE: two different usage patterns can be expected here # 1) idid journal # launch EDITOR for logg in target journal # 2) idid 'logg record' # save under default branch 'unsorted' # if we have a value that's got more than one word in it, we # assume it's a logg (B), otherwise (A) arg = args.pop() k_words = len(arg.split()) # variant A); user wants to launch the editor # variany B); user wants to save record to 'master' branch # default to an unspecified, unsorted target journal since # journal not specified if k_words == 1: journal, logg = (arg, '--') elif default_journal: journal, logg = (default_journal, arg) else: raise RuntimeError('UNKNOWN ERROR') elif k_args == 2: # variants: # 1) idid [datetime] 'logg message' # 2) idid [datetime] journal # launch editor # 3) idid journal 'logg message' # 4) idid unquoted logg message _one = args[0].strip() _two = args[1].strip() # try to parse a date from the value _dt = self._parse_date(_one, DT_ISO_FMT) if _dt: if _two in _journals: # scenario 2) journal, logg = (_two, '--') else: # scenario 1) journal, logg = (_two, _one) elif _one in _journals: # senario 3) journal, logg = (_one, _two) elif default_journal: # senario 4) journal, logg = (default_journal, _two) else: raise RuntimeError("No journal specified.") elif k_args >= 3: # variants: # 1) idid [datetime] journal 'message' # 2) idid [datetime] unquoted logg # 3) idid journal unquoted logg _one = args[0] _two = args[1] _three = ' '.join(args[2:]) # try to parse a date from the value _dt = self._parse_date(_one, DT_ISO_FMT) if _dt: if _two in _journals: # scenario 1) journal, logg = (_two, _three) elif default_journal: # scenario 2) journal, logg = (_two, ' '.join(args[1:])) else: raise RuntimeError("No journal specified.") elif _one in _journals: # senario 3) journal, logg = (_one, ' '.join(args[1:])) elif default_journal: # senario 4) journal, logg = (default_journal, ' '.join(args[:])) else: raise RuntimeError("No journal specified.") else: raise RuntimeError("Ambiguous command line arguments!") opts.date = _dt or utils.Date('today', fmt=DT_ISO_FMT) opts.journal = journal opts.logg = logg log.debug(' Found Date: {0}'.format(_dt)) log.debug(' Found Target: {0}'.format(journal)) log.debug(' Found Logg: {0}'.format(logg)) return opts
[ "def", "_parse", "(", "self", ",", "opts", ",", "args", ")", ":", "k_args", "=", "len", "(", "args", ")", "_dt", "=", "opts", ".", "date", "=", "None", "logg", "=", "opts", ".", "logg", "=", "None", "journal", "=", "opts", ".", "journal", "=", "None", "default_journal", "=", "self", ".", "config", ".", "get", "(", "'default_journal'", ")", "_journals", "=", "self", ".", "config", ".", "get", "(", "'journals'", ")", "or", "{", "}", "log", ".", "debug", "(", "' ... got {0} args [{1}]'", ".", "format", "(", "k_args", ",", "args", ")", ")", "if", "k_args", "==", "0", "and", "default_journal", ":", "# launch the editor to save a message into 'default' branch", "# FIXME: 'unsorted' should be configurable as 'default branch'", "log", ".", "warn", "(", "'Target branch not set, using \"{0}\"'", ".", "format", "(", "default_journal", ")", ")", "journal", ",", "logg", "=", "default_journal", ",", "'--'", "elif", "k_args", "==", "1", ":", "# NOTE: two different usage patterns can be expected here", "# 1) idid journal # launch EDITOR for logg in target journal", "# 2) idid 'logg record' # save under default branch 'unsorted'", "# if we have a value that's got more than one word in it, we", "# assume it's a logg (B), otherwise (A)", "arg", "=", "args", ".", "pop", "(", ")", "k_words", "=", "len", "(", "arg", ".", "split", "(", ")", ")", "# variant A); user wants to launch the editor", "# variany B); user wants to save record to 'master' branch", "# default to an unspecified, unsorted target journal since", "# journal not specified", "if", "k_words", "==", "1", ":", "journal", ",", "logg", "=", "(", "arg", ",", "'--'", ")", "elif", "default_journal", ":", "journal", ",", "logg", "=", "(", "default_journal", ",", "arg", ")", "else", ":", "raise", "RuntimeError", "(", "'UNKNOWN ERROR'", ")", "elif", "k_args", "==", "2", ":", "# variants:", "# 1) idid [datetime] 'logg message'", "# 2) idid [datetime] journal # launch editor", "# 3) idid journal 'logg message'", "# 4) idid unquoted logg message", "_one", "=", "args", "[", "0", "]", ".", "strip", "(", ")", "_two", "=", "args", "[", "1", "]", ".", "strip", "(", ")", "# try to parse a date from the value", "_dt", "=", "self", ".", "_parse_date", "(", "_one", ",", "DT_ISO_FMT", ")", "if", "_dt", ":", "if", "_two", "in", "_journals", ":", "# scenario 2)", "journal", ",", "logg", "=", "(", "_two", ",", "'--'", ")", "else", ":", "# scenario 1)", "journal", ",", "logg", "=", "(", "_two", ",", "_one", ")", "elif", "_one", "in", "_journals", ":", "# senario 3)", "journal", ",", "logg", "=", "(", "_one", ",", "_two", ")", "elif", "default_journal", ":", "# senario 4)", "journal", ",", "logg", "=", "(", "default_journal", ",", "_two", ")", "else", ":", "raise", "RuntimeError", "(", "\"No journal specified.\"", ")", "elif", "k_args", ">=", "3", ":", "# variants:", "# 1) idid [datetime] journal 'message'", "# 2) idid [datetime] unquoted logg", "# 3) idid journal unquoted logg", "_one", "=", "args", "[", "0", "]", "_two", "=", "args", "[", "1", "]", "_three", "=", "' '", ".", "join", "(", "args", "[", "2", ":", "]", ")", "# try to parse a date from the value", "_dt", "=", "self", ".", "_parse_date", "(", "_one", ",", "DT_ISO_FMT", ")", "if", "_dt", ":", "if", "_two", "in", "_journals", ":", "# scenario 1)", "journal", ",", "logg", "=", "(", "_two", ",", "_three", ")", "elif", "default_journal", ":", "# scenario 2)", "journal", ",", "logg", "=", "(", "_two", ",", "' '", ".", "join", "(", "args", "[", "1", ":", "]", ")", ")", "else", ":", "raise", "RuntimeError", "(", "\"No journal specified.\"", ")", "elif", "_one", "in", "_journals", ":", "# senario 3)", "journal", ",", "logg", "=", "(", "_one", ",", "' '", ".", "join", "(", "args", "[", "1", ":", "]", ")", ")", "elif", "default_journal", ":", "# senario 4)", "journal", ",", "logg", "=", "(", "default_journal", ",", "' '", ".", "join", "(", "args", "[", ":", "]", ")", ")", "else", ":", "raise", "RuntimeError", "(", "\"No journal specified.\"", ")", "else", ":", "raise", "RuntimeError", "(", "\"Ambiguous command line arguments!\"", ")", "opts", ".", "date", "=", "_dt", "or", "utils", ".", "Date", "(", "'today'", ",", "fmt", "=", "DT_ISO_FMT", ")", "opts", ".", "journal", "=", "journal", "opts", ".", "logg", "=", "logg", "log", ".", "debug", "(", "' Found Date: {0}'", ".", "format", "(", "_dt", ")", ")", "log", ".", "debug", "(", "' Found Target: {0}'", ".", "format", "(", "journal", ")", ")", "log", ".", "debug", "(", "' Found Logg: {0}'", ".", "format", "(", "logg", ")", ")", "return", "opts" ]
Perform additional check for ``idid`` command arguments
[ "Perform", "additional", "check", "for", "idid", "command", "arguments" ]
0f19e9ca9c8fa4a81e95c490dfbbc1b452c7451c
https://github.com/kejbaly2/idid/blob/0f19e9ca9c8fa4a81e95c490dfbbc1b452c7451c/idid/cli.py#L129-L233
241,509
jonathansick/paperweight
paperweight/texutils.py
find_root_tex_document
def find_root_tex_document(base_dir="."): """Find the tex article in the current directory that can be considered a root. We do this by searching contents for ``'\documentclass'``. Parameters ---------- base_dir : str Directory to search for LaTeX documents, relative to the current working directory. Returns ------- tex_path : str Path to the root tex document relative to the current working directory. """ log = logging.getLogger(__name__) for tex_path in iter_tex_documents(base_dir=base_dir): with codecs.open(tex_path, 'r', encoding='utf-8') as f: text = f.read() if len(docclass_pattern.findall(text)) > 0: log.debug("Found root tex {0}".format(tex_path)) return tex_path log.warning("Could not find a root .tex file") raise RootNotFound
python
def find_root_tex_document(base_dir="."): """Find the tex article in the current directory that can be considered a root. We do this by searching contents for ``'\documentclass'``. Parameters ---------- base_dir : str Directory to search for LaTeX documents, relative to the current working directory. Returns ------- tex_path : str Path to the root tex document relative to the current working directory. """ log = logging.getLogger(__name__) for tex_path in iter_tex_documents(base_dir=base_dir): with codecs.open(tex_path, 'r', encoding='utf-8') as f: text = f.read() if len(docclass_pattern.findall(text)) > 0: log.debug("Found root tex {0}".format(tex_path)) return tex_path log.warning("Could not find a root .tex file") raise RootNotFound
[ "def", "find_root_tex_document", "(", "base_dir", "=", "\".\"", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "__name__", ")", "for", "tex_path", "in", "iter_tex_documents", "(", "base_dir", "=", "base_dir", ")", ":", "with", "codecs", ".", "open", "(", "tex_path", ",", "'r'", ",", "encoding", "=", "'utf-8'", ")", "as", "f", ":", "text", "=", "f", ".", "read", "(", ")", "if", "len", "(", "docclass_pattern", ".", "findall", "(", "text", ")", ")", ">", "0", ":", "log", ".", "debug", "(", "\"Found root tex {0}\"", ".", "format", "(", "tex_path", ")", ")", "return", "tex_path", "log", ".", "warning", "(", "\"Could not find a root .tex file\"", ")", "raise", "RootNotFound" ]
Find the tex article in the current directory that can be considered a root. We do this by searching contents for ``'\documentclass'``. Parameters ---------- base_dir : str Directory to search for LaTeX documents, relative to the current working directory. Returns ------- tex_path : str Path to the root tex document relative to the current working directory.
[ "Find", "the", "tex", "article", "in", "the", "current", "directory", "that", "can", "be", "considered", "a", "root", ".", "We", "do", "this", "by", "searching", "contents", "for", "\\", "documentclass", "." ]
803535b939a56d375967cefecd5fdca81323041e
https://github.com/jonathansick/paperweight/blob/803535b939a56d375967cefecd5fdca81323041e/paperweight/texutils.py#L30-L54
241,510
jonathansick/paperweight
paperweight/texutils.py
iter_tex_documents
def iter_tex_documents(base_dir="."): """Iterate through all .tex documents in the current directory.""" for path, dirlist, filelist in os.walk(base_dir): for name in fnmatch.filter(filelist, "*.tex"): yield os.path.join(path, name)
python
def iter_tex_documents(base_dir="."): """Iterate through all .tex documents in the current directory.""" for path, dirlist, filelist in os.walk(base_dir): for name in fnmatch.filter(filelist, "*.tex"): yield os.path.join(path, name)
[ "def", "iter_tex_documents", "(", "base_dir", "=", "\".\"", ")", ":", "for", "path", ",", "dirlist", ",", "filelist", "in", "os", ".", "walk", "(", "base_dir", ")", ":", "for", "name", "in", "fnmatch", ".", "filter", "(", "filelist", ",", "\"*.tex\"", ")", ":", "yield", "os", ".", "path", ".", "join", "(", "path", ",", "name", ")" ]
Iterate through all .tex documents in the current directory.
[ "Iterate", "through", "all", ".", "tex", "documents", "in", "the", "current", "directory", "." ]
803535b939a56d375967cefecd5fdca81323041e
https://github.com/jonathansick/paperweight/blob/803535b939a56d375967cefecd5fdca81323041e/paperweight/texutils.py#L57-L61
241,511
jonathansick/paperweight
paperweight/texutils.py
inline
def inline(root_text, base_dir="", replacer=None, ifexists_replacer=None): """Inline all input latex files. The inlining is accomplished recursively. All files are opened as UTF-8 unicode files. Parameters ---------- root_txt : unicode Text to process (and include in-lined files). base_dir : str Base directory of file containing ``root_text``. Defaults to the current working directory. replacer : function Function called by :func:`re.sub` to replace ``\input`` expressions with a latex document. Changeable only for testing purposes. ifexists_replacer : function Function called by :func:`re.sub` to replace ``\InputIfExists`` expressions with a latex document. Changeable only for testing purposes. Returns ------- txt : unicode Text with referenced files included. """ def _sub_line(match): """Function to be used with re.sub to inline files for each match.""" fname = match.group(1) if not fname.endswith('.tex'): full_fname = ".".join((fname, 'tex')) else: full_fname = fname full_path = os.path.abspath(os.path.join(base_dir, full_fname)) try: with codecs.open(full_path, 'r', encoding='utf-8') as f: included_text = f.read() except IOError: # TODO actually do logging here print("Cannot open {0} for in-lining".format(full_path)) return u"" else: # Recursively inline files included_text = inline(included_text, base_dir=base_dir) return included_text def _sub_line_ifexists(match): """Function to be used with re.sub for the input_ifexists_pattern.""" fname = match.group(1) if not fname.endswith('.tex'): full_fname = ".".join((fname, 'tex')) else: full_fname = fname full_path = os.path.abspath(os.path.join(base_dir, full_fname)) if os.path.exists(full_path): with codecs.open(full_path, 'r', encoding='utf-8') as f: included_text = f.read() # Append extra info after input included_text = "\n".join((included_text, match.group(2))) else: # Use the fall-back clause in InputIfExists included_text = match.group(3) # Recursively inline files included_text = inline(included_text, base_dir=base_dir) return included_text # Text processing pipline result = remove_comments(root_text) result = input_pattern.sub(_sub_line, result) result = include_pattern.sub(_sub_line, result) result = input_ifexists_pattern.sub(_sub_line_ifexists, result) return result
python
def inline(root_text, base_dir="", replacer=None, ifexists_replacer=None): """Inline all input latex files. The inlining is accomplished recursively. All files are opened as UTF-8 unicode files. Parameters ---------- root_txt : unicode Text to process (and include in-lined files). base_dir : str Base directory of file containing ``root_text``. Defaults to the current working directory. replacer : function Function called by :func:`re.sub` to replace ``\input`` expressions with a latex document. Changeable only for testing purposes. ifexists_replacer : function Function called by :func:`re.sub` to replace ``\InputIfExists`` expressions with a latex document. Changeable only for testing purposes. Returns ------- txt : unicode Text with referenced files included. """ def _sub_line(match): """Function to be used with re.sub to inline files for each match.""" fname = match.group(1) if not fname.endswith('.tex'): full_fname = ".".join((fname, 'tex')) else: full_fname = fname full_path = os.path.abspath(os.path.join(base_dir, full_fname)) try: with codecs.open(full_path, 'r', encoding='utf-8') as f: included_text = f.read() except IOError: # TODO actually do logging here print("Cannot open {0} for in-lining".format(full_path)) return u"" else: # Recursively inline files included_text = inline(included_text, base_dir=base_dir) return included_text def _sub_line_ifexists(match): """Function to be used with re.sub for the input_ifexists_pattern.""" fname = match.group(1) if not fname.endswith('.tex'): full_fname = ".".join((fname, 'tex')) else: full_fname = fname full_path = os.path.abspath(os.path.join(base_dir, full_fname)) if os.path.exists(full_path): with codecs.open(full_path, 'r', encoding='utf-8') as f: included_text = f.read() # Append extra info after input included_text = "\n".join((included_text, match.group(2))) else: # Use the fall-back clause in InputIfExists included_text = match.group(3) # Recursively inline files included_text = inline(included_text, base_dir=base_dir) return included_text # Text processing pipline result = remove_comments(root_text) result = input_pattern.sub(_sub_line, result) result = include_pattern.sub(_sub_line, result) result = input_ifexists_pattern.sub(_sub_line_ifexists, result) return result
[ "def", "inline", "(", "root_text", ",", "base_dir", "=", "\"\"", ",", "replacer", "=", "None", ",", "ifexists_replacer", "=", "None", ")", ":", "def", "_sub_line", "(", "match", ")", ":", "\"\"\"Function to be used with re.sub to inline files for each match.\"\"\"", "fname", "=", "match", ".", "group", "(", "1", ")", "if", "not", "fname", ".", "endswith", "(", "'.tex'", ")", ":", "full_fname", "=", "\".\"", ".", "join", "(", "(", "fname", ",", "'tex'", ")", ")", "else", ":", "full_fname", "=", "fname", "full_path", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "base_dir", ",", "full_fname", ")", ")", "try", ":", "with", "codecs", ".", "open", "(", "full_path", ",", "'r'", ",", "encoding", "=", "'utf-8'", ")", "as", "f", ":", "included_text", "=", "f", ".", "read", "(", ")", "except", "IOError", ":", "# TODO actually do logging here", "print", "(", "\"Cannot open {0} for in-lining\"", ".", "format", "(", "full_path", ")", ")", "return", "u\"\"", "else", ":", "# Recursively inline files", "included_text", "=", "inline", "(", "included_text", ",", "base_dir", "=", "base_dir", ")", "return", "included_text", "def", "_sub_line_ifexists", "(", "match", ")", ":", "\"\"\"Function to be used with re.sub for the input_ifexists_pattern.\"\"\"", "fname", "=", "match", ".", "group", "(", "1", ")", "if", "not", "fname", ".", "endswith", "(", "'.tex'", ")", ":", "full_fname", "=", "\".\"", ".", "join", "(", "(", "fname", ",", "'tex'", ")", ")", "else", ":", "full_fname", "=", "fname", "full_path", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "base_dir", ",", "full_fname", ")", ")", "if", "os", ".", "path", ".", "exists", "(", "full_path", ")", ":", "with", "codecs", ".", "open", "(", "full_path", ",", "'r'", ",", "encoding", "=", "'utf-8'", ")", "as", "f", ":", "included_text", "=", "f", ".", "read", "(", ")", "# Append extra info after input", "included_text", "=", "\"\\n\"", ".", "join", "(", "(", "included_text", ",", "match", ".", "group", "(", "2", ")", ")", ")", "else", ":", "# Use the fall-back clause in InputIfExists", "included_text", "=", "match", ".", "group", "(", "3", ")", "# Recursively inline files", "included_text", "=", "inline", "(", "included_text", ",", "base_dir", "=", "base_dir", ")", "return", "included_text", "# Text processing pipline", "result", "=", "remove_comments", "(", "root_text", ")", "result", "=", "input_pattern", ".", "sub", "(", "_sub_line", ",", "result", ")", "result", "=", "include_pattern", ".", "sub", "(", "_sub_line", ",", "result", ")", "result", "=", "input_ifexists_pattern", ".", "sub", "(", "_sub_line_ifexists", ",", "result", ")", "return", "result" ]
Inline all input latex files. The inlining is accomplished recursively. All files are opened as UTF-8 unicode files. Parameters ---------- root_txt : unicode Text to process (and include in-lined files). base_dir : str Base directory of file containing ``root_text``. Defaults to the current working directory. replacer : function Function called by :func:`re.sub` to replace ``\input`` expressions with a latex document. Changeable only for testing purposes. ifexists_replacer : function Function called by :func:`re.sub` to replace ``\InputIfExists`` expressions with a latex document. Changeable only for testing purposes. Returns ------- txt : unicode Text with referenced files included.
[ "Inline", "all", "input", "latex", "files", "." ]
803535b939a56d375967cefecd5fdca81323041e
https://github.com/jonathansick/paperweight/blob/803535b939a56d375967cefecd5fdca81323041e/paperweight/texutils.py#L89-L164
241,512
jonathansick/paperweight
paperweight/texutils.py
inline_blob
def inline_blob(commit_ref, root_text, base_dir='.', repo_dir=""): """Inline all input latex files that exist as git blobs in a tree object. The inlining is accomplished recursively. All files are opened as UTF-8 unicode files. Parameters ---------- commit_ref : str String identifying a git commit/tag. root_text : unicode Text of tex document where referenced files will be inlined. base_dir : str Directory of the master tex document, relative to the repo_dir. repo_dir : str Directory of the containing git repository. Returns ------- txt : unicode Text with referenced files included. """ def _sub_blob(match): """Function to be used with re.sub to inline files for each match.""" fname = match.group(1) if not fname.endswith('.tex'): full_fname = ".".join((fname, 'tex')) else: full_fname = fname git_rel_path = os.path.relpath(full_fname, base_dir) included_text = read_git_blob(commit_ref, git_rel_path, repo_dir=repo_dir) if included_text is None: # perhaps file is not in VC # FIXME need to deal with possibility # it does not exist there either with codecs.open(full_fname, 'r', encoding='utf-8') as f: included_text = f.read() # Recursively inline files included_text = inline_blob(commit_ref, included_text, base_dir=base_dir, repo_dir=repo_dir) return included_text def _sub_blob_ifexists(match): """Function to be used with re.sub for the input_ifexists_pattern.""" fname = match.group(1) if not fname.endswith('.tex'): full_fname = ".".join((fname, 'tex')) else: full_fname = fname # full_fname is relative to the root_path # Make path relative to git repo root git_rel_path = os.path.relpath( os.path.join(repo_dir, base_dir, full_fname), repo_dir) included_text = read_git_blob(commit_ref, git_rel_path, repo_dir=repo_dir) if included_text is not None: # Append extra info after input included_text = "\n".join((included_text, match.group(2))) if included_text is None: # Use the fall-back clause in InputIfExists included_text = match.group(3) # Recursively inline files included_text = inline_blob(commit_ref, included_text, base_dir=base_dir, repo_dir=repo_dir) return included_text # Text processing pipline result = remove_comments(root_text) result = input_pattern.sub(_sub_blob, result) result = include_pattern.sub(_sub_blob, result) result = input_ifexists_pattern.sub(_sub_blob_ifexists, result) return result
python
def inline_blob(commit_ref, root_text, base_dir='.', repo_dir=""): """Inline all input latex files that exist as git blobs in a tree object. The inlining is accomplished recursively. All files are opened as UTF-8 unicode files. Parameters ---------- commit_ref : str String identifying a git commit/tag. root_text : unicode Text of tex document where referenced files will be inlined. base_dir : str Directory of the master tex document, relative to the repo_dir. repo_dir : str Directory of the containing git repository. Returns ------- txt : unicode Text with referenced files included. """ def _sub_blob(match): """Function to be used with re.sub to inline files for each match.""" fname = match.group(1) if not fname.endswith('.tex'): full_fname = ".".join((fname, 'tex')) else: full_fname = fname git_rel_path = os.path.relpath(full_fname, base_dir) included_text = read_git_blob(commit_ref, git_rel_path, repo_dir=repo_dir) if included_text is None: # perhaps file is not in VC # FIXME need to deal with possibility # it does not exist there either with codecs.open(full_fname, 'r', encoding='utf-8') as f: included_text = f.read() # Recursively inline files included_text = inline_blob(commit_ref, included_text, base_dir=base_dir, repo_dir=repo_dir) return included_text def _sub_blob_ifexists(match): """Function to be used with re.sub for the input_ifexists_pattern.""" fname = match.group(1) if not fname.endswith('.tex'): full_fname = ".".join((fname, 'tex')) else: full_fname = fname # full_fname is relative to the root_path # Make path relative to git repo root git_rel_path = os.path.relpath( os.path.join(repo_dir, base_dir, full_fname), repo_dir) included_text = read_git_blob(commit_ref, git_rel_path, repo_dir=repo_dir) if included_text is not None: # Append extra info after input included_text = "\n".join((included_text, match.group(2))) if included_text is None: # Use the fall-back clause in InputIfExists included_text = match.group(3) # Recursively inline files included_text = inline_blob(commit_ref, included_text, base_dir=base_dir, repo_dir=repo_dir) return included_text # Text processing pipline result = remove_comments(root_text) result = input_pattern.sub(_sub_blob, result) result = include_pattern.sub(_sub_blob, result) result = input_ifexists_pattern.sub(_sub_blob_ifexists, result) return result
[ "def", "inline_blob", "(", "commit_ref", ",", "root_text", ",", "base_dir", "=", "'.'", ",", "repo_dir", "=", "\"\"", ")", ":", "def", "_sub_blob", "(", "match", ")", ":", "\"\"\"Function to be used with re.sub to inline files for each match.\"\"\"", "fname", "=", "match", ".", "group", "(", "1", ")", "if", "not", "fname", ".", "endswith", "(", "'.tex'", ")", ":", "full_fname", "=", "\".\"", ".", "join", "(", "(", "fname", ",", "'tex'", ")", ")", "else", ":", "full_fname", "=", "fname", "git_rel_path", "=", "os", ".", "path", ".", "relpath", "(", "full_fname", ",", "base_dir", ")", "included_text", "=", "read_git_blob", "(", "commit_ref", ",", "git_rel_path", ",", "repo_dir", "=", "repo_dir", ")", "if", "included_text", "is", "None", ":", "# perhaps file is not in VC", "# FIXME need to deal with possibility", "# it does not exist there either", "with", "codecs", ".", "open", "(", "full_fname", ",", "'r'", ",", "encoding", "=", "'utf-8'", ")", "as", "f", ":", "included_text", "=", "f", ".", "read", "(", ")", "# Recursively inline files", "included_text", "=", "inline_blob", "(", "commit_ref", ",", "included_text", ",", "base_dir", "=", "base_dir", ",", "repo_dir", "=", "repo_dir", ")", "return", "included_text", "def", "_sub_blob_ifexists", "(", "match", ")", ":", "\"\"\"Function to be used with re.sub for the input_ifexists_pattern.\"\"\"", "fname", "=", "match", ".", "group", "(", "1", ")", "if", "not", "fname", ".", "endswith", "(", "'.tex'", ")", ":", "full_fname", "=", "\".\"", ".", "join", "(", "(", "fname", ",", "'tex'", ")", ")", "else", ":", "full_fname", "=", "fname", "# full_fname is relative to the root_path", "# Make path relative to git repo root", "git_rel_path", "=", "os", ".", "path", ".", "relpath", "(", "os", ".", "path", ".", "join", "(", "repo_dir", ",", "base_dir", ",", "full_fname", ")", ",", "repo_dir", ")", "included_text", "=", "read_git_blob", "(", "commit_ref", ",", "git_rel_path", ",", "repo_dir", "=", "repo_dir", ")", "if", "included_text", "is", "not", "None", ":", "# Append extra info after input", "included_text", "=", "\"\\n\"", ".", "join", "(", "(", "included_text", ",", "match", ".", "group", "(", "2", ")", ")", ")", "if", "included_text", "is", "None", ":", "# Use the fall-back clause in InputIfExists", "included_text", "=", "match", ".", "group", "(", "3", ")", "# Recursively inline files", "included_text", "=", "inline_blob", "(", "commit_ref", ",", "included_text", ",", "base_dir", "=", "base_dir", ",", "repo_dir", "=", "repo_dir", ")", "return", "included_text", "# Text processing pipline", "result", "=", "remove_comments", "(", "root_text", ")", "result", "=", "input_pattern", ".", "sub", "(", "_sub_blob", ",", "result", ")", "result", "=", "include_pattern", ".", "sub", "(", "_sub_blob", ",", "result", ")", "result", "=", "input_ifexists_pattern", ".", "sub", "(", "_sub_blob_ifexists", ",", "result", ")", "return", "result" ]
Inline all input latex files that exist as git blobs in a tree object. The inlining is accomplished recursively. All files are opened as UTF-8 unicode files. Parameters ---------- commit_ref : str String identifying a git commit/tag. root_text : unicode Text of tex document where referenced files will be inlined. base_dir : str Directory of the master tex document, relative to the repo_dir. repo_dir : str Directory of the containing git repository. Returns ------- txt : unicode Text with referenced files included.
[ "Inline", "all", "input", "latex", "files", "that", "exist", "as", "git", "blobs", "in", "a", "tree", "object", "." ]
803535b939a56d375967cefecd5fdca81323041e
https://github.com/jonathansick/paperweight/blob/803535b939a56d375967cefecd5fdca81323041e/paperweight/texutils.py#L167-L246
241,513
codingjester/pycitibike
pycitibike/__init__.py
Citibike._get
def _get(self, uri, options): """ Quick and dirty wrapper around the requests object to do some simple data catching :params uri: a string, the uri you want to request :params options: a dict, the list of parameters you want to use """ url = "http://%s/%s" % (self.host, uri) r = requests.get(url, params=options) if r.status_code == 200: data = r.json() return data['results'] else: # Throws anything not 200 error r.raise_for_status()
python
def _get(self, uri, options): """ Quick and dirty wrapper around the requests object to do some simple data catching :params uri: a string, the uri you want to request :params options: a dict, the list of parameters you want to use """ url = "http://%s/%s" % (self.host, uri) r = requests.get(url, params=options) if r.status_code == 200: data = r.json() return data['results'] else: # Throws anything not 200 error r.raise_for_status()
[ "def", "_get", "(", "self", ",", "uri", ",", "options", ")", ":", "url", "=", "\"http://%s/%s\"", "%", "(", "self", ".", "host", ",", "uri", ")", "r", "=", "requests", ".", "get", "(", "url", ",", "params", "=", "options", ")", "if", "r", ".", "status_code", "==", "200", ":", "data", "=", "r", ".", "json", "(", ")", "return", "data", "[", "'results'", "]", "else", ":", "# Throws anything not 200 error", "r", ".", "raise_for_status", "(", ")" ]
Quick and dirty wrapper around the requests object to do some simple data catching :params uri: a string, the uri you want to request :params options: a dict, the list of parameters you want to use
[ "Quick", "and", "dirty", "wrapper", "around", "the", "requests", "object", "to", "do", "some", "simple", "data", "catching" ]
740a6f9da60a1e0ff97a9c61ae4469ec2e207443
https://github.com/codingjester/pycitibike/blob/740a6f9da60a1e0ff97a9c61ae4469ec2e207443/pycitibike/__init__.py#L39-L54
241,514
shreyaspotnis/rampage
rampage/widgets/MainWindow.py
MainWindow.setWindowTitle
def setWindowTitle(self, newTitle=''): """Prepend Rampage to all window titles.""" title = 'Rampage - ' + newTitle super(MainWindow, self).setWindowTitle(title)
python
def setWindowTitle(self, newTitle=''): """Prepend Rampage to all window titles.""" title = 'Rampage - ' + newTitle super(MainWindow, self).setWindowTitle(title)
[ "def", "setWindowTitle", "(", "self", ",", "newTitle", "=", "''", ")", ":", "title", "=", "'Rampage - '", "+", "newTitle", "super", "(", "MainWindow", ",", "self", ")", ".", "setWindowTitle", "(", "title", ")" ]
Prepend Rampage to all window titles.
[ "Prepend", "Rampage", "to", "all", "window", "titles", "." ]
e2565aef7ee16ee06523de975e8aa41aca14e3b2
https://github.com/shreyaspotnis/rampage/blob/e2565aef7ee16ee06523de975e8aa41aca14e3b2/rampage/widgets/MainWindow.py#L105-L108
241,515
Amsterdam/authorization_django
authorization_django/jwks.py
load
def load(jwks): """Parse a JWKSet and return a dictionary that maps key IDs on keys.""" sign_keys = {} verify_keys = {} try: keyset = json.loads(jwks) for key in keyset['keys']: for op in key['key_ops']: if op == 'sign': k = sign_keys elif op == 'verify': k = verify_keys else: raise JWKError("Unsupported key operation: {}".format(op)) if key['kty'] == 'oct': k[key['kid']] = _Key(alg=key['alg'], key=base64.urlsafe_b64decode(key['k'])) elif key['kty'] == 'EC': alg, ec_key = _load_ecdsa(key, op == 'verify') k[key['kid']] = _Key(alg=alg, key=ec_key) else: raise JWKError("Unsupported key type: {}".format(key['kty'])) except (KeyError, json.JSONDecodeError) as e: raise JWKError() from e keys = _KeySet(signers=MappingProxyType(sign_keys), verifiers=MappingProxyType(verify_keys)) return keys
python
def load(jwks): """Parse a JWKSet and return a dictionary that maps key IDs on keys.""" sign_keys = {} verify_keys = {} try: keyset = json.loads(jwks) for key in keyset['keys']: for op in key['key_ops']: if op == 'sign': k = sign_keys elif op == 'verify': k = verify_keys else: raise JWKError("Unsupported key operation: {}".format(op)) if key['kty'] == 'oct': k[key['kid']] = _Key(alg=key['alg'], key=base64.urlsafe_b64decode(key['k'])) elif key['kty'] == 'EC': alg, ec_key = _load_ecdsa(key, op == 'verify') k[key['kid']] = _Key(alg=alg, key=ec_key) else: raise JWKError("Unsupported key type: {}".format(key['kty'])) except (KeyError, json.JSONDecodeError) as e: raise JWKError() from e keys = _KeySet(signers=MappingProxyType(sign_keys), verifiers=MappingProxyType(verify_keys)) return keys
[ "def", "load", "(", "jwks", ")", ":", "sign_keys", "=", "{", "}", "verify_keys", "=", "{", "}", "try", ":", "keyset", "=", "json", ".", "loads", "(", "jwks", ")", "for", "key", "in", "keyset", "[", "'keys'", "]", ":", "for", "op", "in", "key", "[", "'key_ops'", "]", ":", "if", "op", "==", "'sign'", ":", "k", "=", "sign_keys", "elif", "op", "==", "'verify'", ":", "k", "=", "verify_keys", "else", ":", "raise", "JWKError", "(", "\"Unsupported key operation: {}\"", ".", "format", "(", "op", ")", ")", "if", "key", "[", "'kty'", "]", "==", "'oct'", ":", "k", "[", "key", "[", "'kid'", "]", "]", "=", "_Key", "(", "alg", "=", "key", "[", "'alg'", "]", ",", "key", "=", "base64", ".", "urlsafe_b64decode", "(", "key", "[", "'k'", "]", ")", ")", "elif", "key", "[", "'kty'", "]", "==", "'EC'", ":", "alg", ",", "ec_key", "=", "_load_ecdsa", "(", "key", ",", "op", "==", "'verify'", ")", "k", "[", "key", "[", "'kid'", "]", "]", "=", "_Key", "(", "alg", "=", "alg", ",", "key", "=", "ec_key", ")", "else", ":", "raise", "JWKError", "(", "\"Unsupported key type: {}\"", ".", "format", "(", "key", "[", "'kty'", "]", ")", ")", "except", "(", "KeyError", ",", "json", ".", "JSONDecodeError", ")", "as", "e", ":", "raise", "JWKError", "(", ")", "from", "e", "keys", "=", "_KeySet", "(", "signers", "=", "MappingProxyType", "(", "sign_keys", ")", ",", "verifiers", "=", "MappingProxyType", "(", "verify_keys", ")", ")", "return", "keys" ]
Parse a JWKSet and return a dictionary that maps key IDs on keys.
[ "Parse", "a", "JWKSet", "and", "return", "a", "dictionary", "that", "maps", "key", "IDs", "on", "keys", "." ]
71da52b38a7f5a16a2bde8f8ea97b3c11ccb1be1
https://github.com/Amsterdam/authorization_django/blob/71da52b38a7f5a16a2bde8f8ea97b3c11ccb1be1/authorization_django/jwks.py#L21-L45
241,516
vinu76jsr/pipsort
setup.py
version
def version(): """ Get the local package version. """ path = join("lib", _CONFIG["name"], "__version__.py") with open(path) as stream: exec(stream.read()) return __version__
python
def version(): """ Get the local package version. """ path = join("lib", _CONFIG["name"], "__version__.py") with open(path) as stream: exec(stream.read()) return __version__
[ "def", "version", "(", ")", ":", "path", "=", "join", "(", "\"lib\"", ",", "_CONFIG", "[", "\"name\"", "]", ",", "\"__version__.py\"", ")", "with", "open", "(", "path", ")", "as", "stream", ":", "exec", "(", "stream", ".", "read", "(", ")", ")", "return", "__version__" ]
Get the local package version.
[ "Get", "the", "local", "package", "version", "." ]
71ead1269de85ee0255741390bf1da85d81b7d16
https://github.com/vinu76jsr/pipsort/blob/71ead1269de85ee0255741390bf1da85d81b7d16/setup.py#L49-L56
241,517
konture/CloeePy
cloeepy/config.py
Config._set_config_path
def _set_config_path(self): """ Reads config path from environment variable CLOEEPY_CONFIG_PATH and sets as instance attr """ self._path = os.getenv("CLOEEPY_CONFIG_PATH") if self._path is None: msg = "CLOEEPY_CONFIG_PATH is not set. Exiting..." sys.exit(msg)
python
def _set_config_path(self): """ Reads config path from environment variable CLOEEPY_CONFIG_PATH and sets as instance attr """ self._path = os.getenv("CLOEEPY_CONFIG_PATH") if self._path is None: msg = "CLOEEPY_CONFIG_PATH is not set. Exiting..." sys.exit(msg)
[ "def", "_set_config_path", "(", "self", ")", ":", "self", ".", "_path", "=", "os", ".", "getenv", "(", "\"CLOEEPY_CONFIG_PATH\"", ")", "if", "self", ".", "_path", "is", "None", ":", "msg", "=", "\"CLOEEPY_CONFIG_PATH is not set. Exiting...\"", "sys", ".", "exit", "(", "msg", ")" ]
Reads config path from environment variable CLOEEPY_CONFIG_PATH and sets as instance attr
[ "Reads", "config", "path", "from", "environment", "variable", "CLOEEPY_CONFIG_PATH", "and", "sets", "as", "instance", "attr" ]
dcb21284d2df405d92ac6868ea7215792c9323b9
https://github.com/konture/CloeePy/blob/dcb21284d2df405d92ac6868ea7215792c9323b9/cloeepy/config.py#L45-L53
241,518
konture/CloeePy
cloeepy/config.py
Config._load_config
def _load_config(self): """ Loads the YAML configuration file and sets python dictionary and raw contents as instance attrs. """ if not os.path.exists(self._path): sys.exit("Config path %s does not exist" % self._path) # create empty config object self._config_dict = {} # read file and marshal yaml with open(self._path, 'r') as f: self._raw = f.read() self._config_dict = yaml.load(self._raw)
python
def _load_config(self): """ Loads the YAML configuration file and sets python dictionary and raw contents as instance attrs. """ if not os.path.exists(self._path): sys.exit("Config path %s does not exist" % self._path) # create empty config object self._config_dict = {} # read file and marshal yaml with open(self._path, 'r') as f: self._raw = f.read() self._config_dict = yaml.load(self._raw)
[ "def", "_load_config", "(", "self", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "_path", ")", ":", "sys", ".", "exit", "(", "\"Config path %s does not exist\"", "%", "self", ".", "_path", ")", "# create empty config object", "self", ".", "_config_dict", "=", "{", "}", "# read file and marshal yaml", "with", "open", "(", "self", ".", "_path", ",", "'r'", ")", "as", "f", ":", "self", ".", "_raw", "=", "f", ".", "read", "(", ")", "self", ".", "_config_dict", "=", "yaml", ".", "load", "(", "self", ".", "_raw", ")" ]
Loads the YAML configuration file and sets python dictionary and raw contents as instance attrs.
[ "Loads", "the", "YAML", "configuration", "file", "and", "sets", "python", "dictionary", "and", "raw", "contents", "as", "instance", "attrs", "." ]
dcb21284d2df405d92ac6868ea7215792c9323b9
https://github.com/konture/CloeePy/blob/dcb21284d2df405d92ac6868ea7215792c9323b9/cloeepy/config.py#L56-L68
241,519
konture/CloeePy
cloeepy/config.py
Config._set_attributes
def _set_attributes(self): """ Recursively transforms config dictionaries into instance attrs to make for easy dot attribute access instead of dictionary access. """ # turn config dict into nested objects config = obj(self._config_dict) # set the attributes onto instance for k, v in self._config_dict.items(): setattr(self, k, getattr(config, k))
python
def _set_attributes(self): """ Recursively transforms config dictionaries into instance attrs to make for easy dot attribute access instead of dictionary access. """ # turn config dict into nested objects config = obj(self._config_dict) # set the attributes onto instance for k, v in self._config_dict.items(): setattr(self, k, getattr(config, k))
[ "def", "_set_attributes", "(", "self", ")", ":", "# turn config dict into nested objects", "config", "=", "obj", "(", "self", ".", "_config_dict", ")", "# set the attributes onto instance", "for", "k", ",", "v", "in", "self", ".", "_config_dict", ".", "items", "(", ")", ":", "setattr", "(", "self", ",", "k", ",", "getattr", "(", "config", ",", "k", ")", ")" ]
Recursively transforms config dictionaries into instance attrs to make for easy dot attribute access instead of dictionary access.
[ "Recursively", "transforms", "config", "dictionaries", "into", "instance", "attrs", "to", "make", "for", "easy", "dot", "attribute", "access", "instead", "of", "dictionary", "access", "." ]
dcb21284d2df405d92ac6868ea7215792c9323b9
https://github.com/konture/CloeePy/blob/dcb21284d2df405d92ac6868ea7215792c9323b9/cloeepy/config.py#L83-L92
241,520
JNRowe/jnrbase
jnrbase/template.py
highlight
def highlight(__text: str, *, lexer: str = 'diff', formatter: str = 'terminal') -> str: """Highlight text highlighted using ``pygments``. Returns text untouched if colour output is not enabled. See also: :pypi:`Pygments` Args: __text: Text to highlight lexer: Jinja lexer to use formatter: Jinja formatter to use Returns: Syntax highlighted output, when possible """ if sys.stdout.isatty(): lexer = get_lexer_by_name(lexer) formatter = get_formatter_by_name(formatter) __text = pyg_highlight(__text, lexer, formatter) return __text
python
def highlight(__text: str, *, lexer: str = 'diff', formatter: str = 'terminal') -> str: """Highlight text highlighted using ``pygments``. Returns text untouched if colour output is not enabled. See also: :pypi:`Pygments` Args: __text: Text to highlight lexer: Jinja lexer to use formatter: Jinja formatter to use Returns: Syntax highlighted output, when possible """ if sys.stdout.isatty(): lexer = get_lexer_by_name(lexer) formatter = get_formatter_by_name(formatter) __text = pyg_highlight(__text, lexer, formatter) return __text
[ "def", "highlight", "(", "__text", ":", "str", ",", "*", ",", "lexer", ":", "str", "=", "'diff'", ",", "formatter", ":", "str", "=", "'terminal'", ")", "->", "str", ":", "if", "sys", ".", "stdout", ".", "isatty", "(", ")", ":", "lexer", "=", "get_lexer_by_name", "(", "lexer", ")", "formatter", "=", "get_formatter_by_name", "(", "formatter", ")", "__text", "=", "pyg_highlight", "(", "__text", ",", "lexer", ",", "formatter", ")", "return", "__text" ]
Highlight text highlighted using ``pygments``. Returns text untouched if colour output is not enabled. See also: :pypi:`Pygments` Args: __text: Text to highlight lexer: Jinja lexer to use formatter: Jinja formatter to use Returns: Syntax highlighted output, when possible
[ "Highlight", "text", "highlighted", "using", "pygments", "." ]
ae505ef69a9feb739b5f4e62c5a8e6533104d3ea
https://github.com/JNRowe/jnrbase/blob/ae505ef69a9feb739b5f4e62c5a8e6533104d3ea/jnrbase/template.py#L77-L96
241,521
JNRowe/jnrbase
jnrbase/template.py
html2text
def html2text(__html: str, *, width: int = 80, ascii_replacements: bool = False) -> str: """HTML to plain text renderer. See also: :pypi:`html2text` Args: __html: Text to process width: Paragraph width ascii_replacements: Use pseudo-ASCII replacements for Unicode Returns: Rendered text """ html2.BODY_WIDTH = width html2.UNICODE_SNOB = ascii_replacements return html2.html2text(__html).strip()
python
def html2text(__html: str, *, width: int = 80, ascii_replacements: bool = False) -> str: """HTML to plain text renderer. See also: :pypi:`html2text` Args: __html: Text to process width: Paragraph width ascii_replacements: Use pseudo-ASCII replacements for Unicode Returns: Rendered text """ html2.BODY_WIDTH = width html2.UNICODE_SNOB = ascii_replacements return html2.html2text(__html).strip()
[ "def", "html2text", "(", "__html", ":", "str", ",", "*", ",", "width", ":", "int", "=", "80", ",", "ascii_replacements", ":", "bool", "=", "False", ")", "->", "str", ":", "html2", ".", "BODY_WIDTH", "=", "width", "html2", ".", "UNICODE_SNOB", "=", "ascii_replacements", "return", "html2", ".", "html2text", "(", "__html", ")", ".", "strip", "(", ")" ]
HTML to plain text renderer. See also: :pypi:`html2text` Args: __html: Text to process width: Paragraph width ascii_replacements: Use pseudo-ASCII replacements for Unicode Returns: Rendered text
[ "HTML", "to", "plain", "text", "renderer", "." ]
ae505ef69a9feb739b5f4e62c5a8e6533104d3ea
https://github.com/JNRowe/jnrbase/blob/ae505ef69a9feb739b5f4e62c5a8e6533104d3ea/jnrbase/template.py#L100-L115
241,522
JNRowe/jnrbase
jnrbase/template.py
regexp
def regexp(__string: str, __pattern: str, __repl: Union[Callable, str], *, count: int = 0, flags: int = 0) -> str: """Jinja filter for regexp replacements. See :func:`re.sub` for documentation. Returns: Text with substitutions applied """ return re.sub(__pattern, __repl, __string, count, flags)
python
def regexp(__string: str, __pattern: str, __repl: Union[Callable, str], *, count: int = 0, flags: int = 0) -> str: """Jinja filter for regexp replacements. See :func:`re.sub` for documentation. Returns: Text with substitutions applied """ return re.sub(__pattern, __repl, __string, count, flags)
[ "def", "regexp", "(", "__string", ":", "str", ",", "__pattern", ":", "str", ",", "__repl", ":", "Union", "[", "Callable", ",", "str", "]", ",", "*", ",", "count", ":", "int", "=", "0", ",", "flags", ":", "int", "=", "0", ")", "->", "str", ":", "return", "re", ".", "sub", "(", "__pattern", ",", "__repl", ",", "__string", ",", "count", ",", "flags", ")" ]
Jinja filter for regexp replacements. See :func:`re.sub` for documentation. Returns: Text with substitutions applied
[ "Jinja", "filter", "for", "regexp", "replacements", "." ]
ae505ef69a9feb739b5f4e62c5a8e6533104d3ea
https://github.com/JNRowe/jnrbase/blob/ae505ef69a9feb739b5f4e62c5a8e6533104d3ea/jnrbase/template.py#L119-L128
241,523
JNRowe/jnrbase
jnrbase/template.py
setup
def setup(__pkg: str) -> jinja2.Environment: """Configure a new Jinja environment with our filters. Args: __pkg: Package name to use as base for templates searches Returns: Configured Jinja environment """ dirs = [path.join(d, 'templates') for d in xdg_basedir.get_data_dirs(__pkg)] env = jinja2.Environment( autoescape=jinja2.select_autoescape(['html', 'xml']), loader=jinja2.ChoiceLoader([jinja2.FileSystemLoader(s) for s in dirs])) env.loader.loaders.append(jinja2.PackageLoader(__pkg, 'templates')) env.filters.update(FILTERS) return env
python
def setup(__pkg: str) -> jinja2.Environment: """Configure a new Jinja environment with our filters. Args: __pkg: Package name to use as base for templates searches Returns: Configured Jinja environment """ dirs = [path.join(d, 'templates') for d in xdg_basedir.get_data_dirs(__pkg)] env = jinja2.Environment( autoescape=jinja2.select_autoescape(['html', 'xml']), loader=jinja2.ChoiceLoader([jinja2.FileSystemLoader(s) for s in dirs])) env.loader.loaders.append(jinja2.PackageLoader(__pkg, 'templates')) env.filters.update(FILTERS) return env
[ "def", "setup", "(", "__pkg", ":", "str", ")", "->", "jinja2", ".", "Environment", ":", "dirs", "=", "[", "path", ".", "join", "(", "d", ",", "'templates'", ")", "for", "d", "in", "xdg_basedir", ".", "get_data_dirs", "(", "__pkg", ")", "]", "env", "=", "jinja2", ".", "Environment", "(", "autoescape", "=", "jinja2", ".", "select_autoescape", "(", "[", "'html'", ",", "'xml'", "]", ")", ",", "loader", "=", "jinja2", ".", "ChoiceLoader", "(", "[", "jinja2", ".", "FileSystemLoader", "(", "s", ")", "for", "s", "in", "dirs", "]", ")", ")", "env", ".", "loader", ".", "loaders", ".", "append", "(", "jinja2", ".", "PackageLoader", "(", "__pkg", ",", "'templates'", ")", ")", "env", ".", "filters", ".", "update", "(", "FILTERS", ")", "return", "env" ]
Configure a new Jinja environment with our filters. Args: __pkg: Package name to use as base for templates searches Returns: Configured Jinja environment
[ "Configure", "a", "new", "Jinja", "environment", "with", "our", "filters", "." ]
ae505ef69a9feb739b5f4e62c5a8e6533104d3ea
https://github.com/JNRowe/jnrbase/blob/ae505ef69a9feb739b5f4e62c5a8e6533104d3ea/jnrbase/template.py#L145-L162
241,524
ArabellaTech/aa-intercom
aa_intercom/utils.py
upload_intercom_user
def upload_intercom_user(obj_id): """Creates or updates single user account on intercom""" UserModel = get_user_model() intercom_user = False instance = UserModel.objects.get(pk=obj_id) data = instance.get_intercom_data() if not getattr(settings, "SKIP_INTERCOM", False): try: intercom_user = intercom.users.create(**data) except errors.ServiceUnavailableError: pass if intercom_user: UserModel.objects.filter(pk=obj_id).update( intercom_last_api_response=intercom_user.to_dict(), intercom_api_response_timestamp=make_aware(datetime.now(), pytz.UTC) )
python
def upload_intercom_user(obj_id): """Creates or updates single user account on intercom""" UserModel = get_user_model() intercom_user = False instance = UserModel.objects.get(pk=obj_id) data = instance.get_intercom_data() if not getattr(settings, "SKIP_INTERCOM", False): try: intercom_user = intercom.users.create(**data) except errors.ServiceUnavailableError: pass if intercom_user: UserModel.objects.filter(pk=obj_id).update( intercom_last_api_response=intercom_user.to_dict(), intercom_api_response_timestamp=make_aware(datetime.now(), pytz.UTC) )
[ "def", "upload_intercom_user", "(", "obj_id", ")", ":", "UserModel", "=", "get_user_model", "(", ")", "intercom_user", "=", "False", "instance", "=", "UserModel", ".", "objects", ".", "get", "(", "pk", "=", "obj_id", ")", "data", "=", "instance", ".", "get_intercom_data", "(", ")", "if", "not", "getattr", "(", "settings", ",", "\"SKIP_INTERCOM\"", ",", "False", ")", ":", "try", ":", "intercom_user", "=", "intercom", ".", "users", ".", "create", "(", "*", "*", "data", ")", "except", "errors", ".", "ServiceUnavailableError", ":", "pass", "if", "intercom_user", ":", "UserModel", ".", "objects", ".", "filter", "(", "pk", "=", "obj_id", ")", ".", "update", "(", "intercom_last_api_response", "=", "intercom_user", ".", "to_dict", "(", ")", ",", "intercom_api_response_timestamp", "=", "make_aware", "(", "datetime", ".", "now", "(", ")", ",", "pytz", ".", "UTC", ")", ")" ]
Creates or updates single user account on intercom
[ "Creates", "or", "updates", "single", "user", "account", "on", "intercom" ]
f7e2ab63967529660f9c2fe4f1d0bf3cec1502c2
https://github.com/ArabellaTech/aa-intercom/blob/f7e2ab63967529660f9c2fe4f1d0bf3cec1502c2/aa_intercom/utils.py#L14-L31
241,525
edwards-lab/libGWAS
libgwas/boundary.py
BoundaryCheck.LoadExclusions
def LoadExclusions(self, snps): """ Load locus exclusions. :param snps: Can either be a list of rsids or a file containing rsids. :return: None If snps is a file, the file must only contain RSIDs separated by whitespace (tabs, spaces and return characters). """ snp_names = [] if len(snps) == 1 and os.path.isfile(snps[0]): snp_names = open(snps).read().strip().split() else: snp_names = snps for snp in snp_names: if len(snp.strip()) > 0: self.ignored_rs.append(snp)
python
def LoadExclusions(self, snps): """ Load locus exclusions. :param snps: Can either be a list of rsids or a file containing rsids. :return: None If snps is a file, the file must only contain RSIDs separated by whitespace (tabs, spaces and return characters). """ snp_names = [] if len(snps) == 1 and os.path.isfile(snps[0]): snp_names = open(snps).read().strip().split() else: snp_names = snps for snp in snp_names: if len(snp.strip()) > 0: self.ignored_rs.append(snp)
[ "def", "LoadExclusions", "(", "self", ",", "snps", ")", ":", "snp_names", "=", "[", "]", "if", "len", "(", "snps", ")", "==", "1", "and", "os", ".", "path", ".", "isfile", "(", "snps", "[", "0", "]", ")", ":", "snp_names", "=", "open", "(", "snps", ")", ".", "read", "(", ")", ".", "strip", "(", ")", ".", "split", "(", ")", "else", ":", "snp_names", "=", "snps", "for", "snp", "in", "snp_names", ":", "if", "len", "(", "snp", ".", "strip", "(", ")", ")", ">", "0", ":", "self", ".", "ignored_rs", ".", "append", "(", "snp", ")" ]
Load locus exclusions. :param snps: Can either be a list of rsids or a file containing rsids. :return: None If snps is a file, the file must only contain RSIDs separated by whitespace (tabs, spaces and return characters).
[ "Load", "locus", "exclusions", "." ]
d68c9a083d443dfa5d7c5112de29010909cfe23f
https://github.com/edwards-lab/libGWAS/blob/d68c9a083d443dfa5d7c5112de29010909cfe23f/libgwas/boundary.py#L97-L114
241,526
scivision/histutils
histutils/rawDMCreader.py
getserialnum
def getserialnum(flist): """ This function assumes the serial number of the camera is in a particular place in the filename. Yes, this is a little lame, but it's how the original 2011 image-writing program worked, and I've carried over the scheme rather than appending bits to dozens of TB of files. """ sn = [] for f in flist: tmp = search(r'(?<=CamSer)\d{3,6}', f) if tmp: ser = int(tmp.group()) else: ser = None sn.append(ser) return sn
python
def getserialnum(flist): """ This function assumes the serial number of the camera is in a particular place in the filename. Yes, this is a little lame, but it's how the original 2011 image-writing program worked, and I've carried over the scheme rather than appending bits to dozens of TB of files. """ sn = [] for f in flist: tmp = search(r'(?<=CamSer)\d{3,6}', f) if tmp: ser = int(tmp.group()) else: ser = None sn.append(ser) return sn
[ "def", "getserialnum", "(", "flist", ")", ":", "sn", "=", "[", "]", "for", "f", "in", "flist", ":", "tmp", "=", "search", "(", "r'(?<=CamSer)\\d{3,6}'", ",", "f", ")", "if", "tmp", ":", "ser", "=", "int", "(", "tmp", ".", "group", "(", ")", ")", "else", ":", "ser", "=", "None", "sn", ".", "append", "(", "ser", ")", "return", "sn" ]
This function assumes the serial number of the camera is in a particular place in the filename. Yes, this is a little lame, but it's how the original 2011 image-writing program worked, and I've carried over the scheme rather than appending bits to dozens of TB of files.
[ "This", "function", "assumes", "the", "serial", "number", "of", "the", "camera", "is", "in", "a", "particular", "place", "in", "the", "filename", ".", "Yes", "this", "is", "a", "little", "lame", "but", "it", "s", "how", "the", "original", "2011", "image", "-", "writing", "program", "worked", "and", "I", "ve", "carried", "over", "the", "scheme", "rather", "than", "appending", "bits", "to", "dozens", "of", "TB", "of", "files", "." ]
859a91d3894cb57faed34881c6ea16130b90571e
https://github.com/scivision/histutils/blob/859a91d3894cb57faed34881c6ea16130b90571e/histutils/rawDMCreader.py#L80-L94
241,527
scivision/histutils
histutils/rawDMCreader.py
getDMCparam
def getDMCparam(fn: Path, xyPix, xyBin, FrameIndReq=None, ut1req=None, kineticsec=None, startUTC=None, nHeadBytes=4, verbose=0): """ nHeadBytes=4 for 2013-2016 data nHeadBytes=0 for 2011 data """ Nmetadata = nHeadBytes // 2 # FIXME for DMCdata version 1 only if not fn.is_file(): # leave this here, getsize() doesn't fail on directory raise ValueError(f'{fn} is not a file!') print(f'reading {fn}') # int() in case we are fed a float or int SuperX = int(xyPix[0] // xyBin[0]) SuperY = int(xyPix[1] // xyBin[1]) PixelsPerImage, BytesPerImage, BytesPerFrame = howbig( SuperX, SuperY, nHeadBytes) (firstRawInd, lastRawInd) = getRawInd( fn, BytesPerImage, nHeadBytes, Nmetadata) FrameIndRel = whichframes(fn, FrameIndReq, kineticsec, ut1req, startUTC, firstRawInd, lastRawInd, BytesPerImage, BytesPerFrame, verbose) return {'superx': SuperX, 'supery': SuperY, 'nmetadata': Nmetadata, 'bytesperframe': BytesPerFrame, 'pixelsperimage': PixelsPerImage, 'nframeextract': FrameIndRel.size, 'frameindrel': FrameIndRel}
python
def getDMCparam(fn: Path, xyPix, xyBin, FrameIndReq=None, ut1req=None, kineticsec=None, startUTC=None, nHeadBytes=4, verbose=0): """ nHeadBytes=4 for 2013-2016 data nHeadBytes=0 for 2011 data """ Nmetadata = nHeadBytes // 2 # FIXME for DMCdata version 1 only if not fn.is_file(): # leave this here, getsize() doesn't fail on directory raise ValueError(f'{fn} is not a file!') print(f'reading {fn}') # int() in case we are fed a float or int SuperX = int(xyPix[0] // xyBin[0]) SuperY = int(xyPix[1] // xyBin[1]) PixelsPerImage, BytesPerImage, BytesPerFrame = howbig( SuperX, SuperY, nHeadBytes) (firstRawInd, lastRawInd) = getRawInd( fn, BytesPerImage, nHeadBytes, Nmetadata) FrameIndRel = whichframes(fn, FrameIndReq, kineticsec, ut1req, startUTC, firstRawInd, lastRawInd, BytesPerImage, BytesPerFrame, verbose) return {'superx': SuperX, 'supery': SuperY, 'nmetadata': Nmetadata, 'bytesperframe': BytesPerFrame, 'pixelsperimage': PixelsPerImage, 'nframeextract': FrameIndRel.size, 'frameindrel': FrameIndRel}
[ "def", "getDMCparam", "(", "fn", ":", "Path", ",", "xyPix", ",", "xyBin", ",", "FrameIndReq", "=", "None", ",", "ut1req", "=", "None", ",", "kineticsec", "=", "None", ",", "startUTC", "=", "None", ",", "nHeadBytes", "=", "4", ",", "verbose", "=", "0", ")", ":", "Nmetadata", "=", "nHeadBytes", "//", "2", "# FIXME for DMCdata version 1 only", "if", "not", "fn", ".", "is_file", "(", ")", ":", "# leave this here, getsize() doesn't fail on directory", "raise", "ValueError", "(", "f'{fn} is not a file!'", ")", "print", "(", "f'reading {fn}'", ")", "# int() in case we are fed a float or int", "SuperX", "=", "int", "(", "xyPix", "[", "0", "]", "//", "xyBin", "[", "0", "]", ")", "SuperY", "=", "int", "(", "xyPix", "[", "1", "]", "//", "xyBin", "[", "1", "]", ")", "PixelsPerImage", ",", "BytesPerImage", ",", "BytesPerFrame", "=", "howbig", "(", "SuperX", ",", "SuperY", ",", "nHeadBytes", ")", "(", "firstRawInd", ",", "lastRawInd", ")", "=", "getRawInd", "(", "fn", ",", "BytesPerImage", ",", "nHeadBytes", ",", "Nmetadata", ")", "FrameIndRel", "=", "whichframes", "(", "fn", ",", "FrameIndReq", ",", "kineticsec", ",", "ut1req", ",", "startUTC", ",", "firstRawInd", ",", "lastRawInd", ",", "BytesPerImage", ",", "BytesPerFrame", ",", "verbose", ")", "return", "{", "'superx'", ":", "SuperX", ",", "'supery'", ":", "SuperY", ",", "'nmetadata'", ":", "Nmetadata", ",", "'bytesperframe'", ":", "BytesPerFrame", ",", "'pixelsperimage'", ":", "PixelsPerImage", ",", "'nframeextract'", ":", "FrameIndRel", ".", "size", ",", "'frameindrel'", ":", "FrameIndRel", "}" ]
nHeadBytes=4 for 2013-2016 data nHeadBytes=0 for 2011 data
[ "nHeadBytes", "=", "4", "for", "2013", "-", "2016", "data", "nHeadBytes", "=", "0", "for", "2011", "data" ]
859a91d3894cb57faed34881c6ea16130b90571e
https://github.com/scivision/histutils/blob/859a91d3894cb57faed34881c6ea16130b90571e/histutils/rawDMCreader.py#L97-L126
241,528
scivision/histutils
histutils/rawDMCreader.py
whichframes
def whichframes(fn, FrameIndReq, kineticsec, ut1req, startUTC, firstRawInd, lastRawInd, BytesPerImage, BytesPerFrame, verbose): ext = Path(fn).suffix # %% get file size fileSizeBytes = fn.stat().st_size if fileSizeBytes < BytesPerImage: raise ValueError( f'File size {fileSizeBytes} is smaller than a single image frame!') if ext == '.DMCdata' and fileSizeBytes % BytesPerFrame: logging.error( f"Looks like I am not reading this file correctly, with BPF: {BytesPerFrame:d}") if ext == '.DMCdata': nFrame = fileSizeBytes // BytesPerFrame print(f'{nFrame} frames, Bytes: {fileSizeBytes} in file {fn}') nFrameRaw = (lastRawInd - firstRawInd + 1) if nFrameRaw != nFrame: logging.warning( f'there may be missed frames: nFrameRaw {nFrameRaw} nFrame {nFrame}') else: nFrame = lastRawInd - firstRawInd + 1 allrawframe = arange(firstRawInd, lastRawInd + 1, 1, dtype=int64) print(f"first / last raw frame #'s: {firstRawInd} / {lastRawInd} ") # %% absolute time estimate ut1_unix_all = frame2ut1(startUTC, kineticsec, allrawframe) # %% setup frame indices """ if no requested frames were specified, read all frames. Otherwise, just return the requested frames Assignments have to be "int64", not just python "int". Windows python 2.7 64-bit on files >2.1GB, the bytes will wrap """ FrameIndRel = ut12frame(ut1req, arange(0, nFrame, 1, dtype=int64), ut1_unix_all) # NOTE: no ut1req or problems with ut1req, canNOT use else, need to test len() in case index is [0] validly if FrameIndRel is None or len(FrameIndRel) == 0: FrameIndRel = req2frame(FrameIndReq, nFrame) badReqInd = (FrameIndRel > nFrame) | (FrameIndRel < 0) # check if we requested frames beyond what the BigFN contains if badReqInd.any(): # don't include frames in case of None raise ValueError(f'frames requested outside the times covered in {fn}') nFrameExtract = FrameIndRel.size # to preallocate properly nBytesExtract = nFrameExtract * BytesPerFrame print( f'Extracted {nFrameExtract} frames from {fn} totaling {nBytesExtract/1e9:.2f} GB.') if nBytesExtract > 4e9: print(f'This will require {nBytesExtract/1e9:.2f} GB of RAM.') return FrameIndRel
python
def whichframes(fn, FrameIndReq, kineticsec, ut1req, startUTC, firstRawInd, lastRawInd, BytesPerImage, BytesPerFrame, verbose): ext = Path(fn).suffix # %% get file size fileSizeBytes = fn.stat().st_size if fileSizeBytes < BytesPerImage: raise ValueError( f'File size {fileSizeBytes} is smaller than a single image frame!') if ext == '.DMCdata' and fileSizeBytes % BytesPerFrame: logging.error( f"Looks like I am not reading this file correctly, with BPF: {BytesPerFrame:d}") if ext == '.DMCdata': nFrame = fileSizeBytes // BytesPerFrame print(f'{nFrame} frames, Bytes: {fileSizeBytes} in file {fn}') nFrameRaw = (lastRawInd - firstRawInd + 1) if nFrameRaw != nFrame: logging.warning( f'there may be missed frames: nFrameRaw {nFrameRaw} nFrame {nFrame}') else: nFrame = lastRawInd - firstRawInd + 1 allrawframe = arange(firstRawInd, lastRawInd + 1, 1, dtype=int64) print(f"first / last raw frame #'s: {firstRawInd} / {lastRawInd} ") # %% absolute time estimate ut1_unix_all = frame2ut1(startUTC, kineticsec, allrawframe) # %% setup frame indices """ if no requested frames were specified, read all frames. Otherwise, just return the requested frames Assignments have to be "int64", not just python "int". Windows python 2.7 64-bit on files >2.1GB, the bytes will wrap """ FrameIndRel = ut12frame(ut1req, arange(0, nFrame, 1, dtype=int64), ut1_unix_all) # NOTE: no ut1req or problems with ut1req, canNOT use else, need to test len() in case index is [0] validly if FrameIndRel is None or len(FrameIndRel) == 0: FrameIndRel = req2frame(FrameIndReq, nFrame) badReqInd = (FrameIndRel > nFrame) | (FrameIndRel < 0) # check if we requested frames beyond what the BigFN contains if badReqInd.any(): # don't include frames in case of None raise ValueError(f'frames requested outside the times covered in {fn}') nFrameExtract = FrameIndRel.size # to preallocate properly nBytesExtract = nFrameExtract * BytesPerFrame print( f'Extracted {nFrameExtract} frames from {fn} totaling {nBytesExtract/1e9:.2f} GB.') if nBytesExtract > 4e9: print(f'This will require {nBytesExtract/1e9:.2f} GB of RAM.') return FrameIndRel
[ "def", "whichframes", "(", "fn", ",", "FrameIndReq", ",", "kineticsec", ",", "ut1req", ",", "startUTC", ",", "firstRawInd", ",", "lastRawInd", ",", "BytesPerImage", ",", "BytesPerFrame", ",", "verbose", ")", ":", "ext", "=", "Path", "(", "fn", ")", ".", "suffix", "# %% get file size", "fileSizeBytes", "=", "fn", ".", "stat", "(", ")", ".", "st_size", "if", "fileSizeBytes", "<", "BytesPerImage", ":", "raise", "ValueError", "(", "f'File size {fileSizeBytes} is smaller than a single image frame!'", ")", "if", "ext", "==", "'.DMCdata'", "and", "fileSizeBytes", "%", "BytesPerFrame", ":", "logging", ".", "error", "(", "f\"Looks like I am not reading this file correctly, with BPF: {BytesPerFrame:d}\"", ")", "if", "ext", "==", "'.DMCdata'", ":", "nFrame", "=", "fileSizeBytes", "//", "BytesPerFrame", "print", "(", "f'{nFrame} frames, Bytes: {fileSizeBytes} in file {fn}'", ")", "nFrameRaw", "=", "(", "lastRawInd", "-", "firstRawInd", "+", "1", ")", "if", "nFrameRaw", "!=", "nFrame", ":", "logging", ".", "warning", "(", "f'there may be missed frames: nFrameRaw {nFrameRaw} nFrame {nFrame}'", ")", "else", ":", "nFrame", "=", "lastRawInd", "-", "firstRawInd", "+", "1", "allrawframe", "=", "arange", "(", "firstRawInd", ",", "lastRawInd", "+", "1", ",", "1", ",", "dtype", "=", "int64", ")", "print", "(", "f\"first / last raw frame #'s: {firstRawInd} / {lastRawInd} \"", ")", "# %% absolute time estimate", "ut1_unix_all", "=", "frame2ut1", "(", "startUTC", ",", "kineticsec", ",", "allrawframe", ")", "# %% setup frame indices", "FrameIndRel", "=", "ut12frame", "(", "ut1req", ",", "arange", "(", "0", ",", "nFrame", ",", "1", ",", "dtype", "=", "int64", ")", ",", "ut1_unix_all", ")", "# NOTE: no ut1req or problems with ut1req, canNOT use else, need to test len() in case index is [0] validly", "if", "FrameIndRel", "is", "None", "or", "len", "(", "FrameIndRel", ")", "==", "0", ":", "FrameIndRel", "=", "req2frame", "(", "FrameIndReq", ",", "nFrame", ")", "badReqInd", "=", "(", "FrameIndRel", ">", "nFrame", ")", "|", "(", "FrameIndRel", "<", "0", ")", "# check if we requested frames beyond what the BigFN contains", "if", "badReqInd", ".", "any", "(", ")", ":", "# don't include frames in case of None", "raise", "ValueError", "(", "f'frames requested outside the times covered in {fn}'", ")", "nFrameExtract", "=", "FrameIndRel", ".", "size", "# to preallocate properly", "nBytesExtract", "=", "nFrameExtract", "*", "BytesPerFrame", "print", "(", "f'Extracted {nFrameExtract} frames from {fn} totaling {nBytesExtract/1e9:.2f} GB.'", ")", "if", "nBytesExtract", ">", "4e9", ":", "print", "(", "f'This will require {nBytesExtract/1e9:.2f} GB of RAM.'", ")", "return", "FrameIndRel" ]
if no requested frames were specified, read all frames. Otherwise, just return the requested frames Assignments have to be "int64", not just python "int". Windows python 2.7 64-bit on files >2.1GB, the bytes will wrap
[ "if", "no", "requested", "frames", "were", "specified", "read", "all", "frames", ".", "Otherwise", "just", "return", "the", "requested", "frames", "Assignments", "have", "to", "be", "int64", "not", "just", "python", "int", ".", "Windows", "python", "2", ".", "7", "64", "-", "bit", "on", "files", ">", "2", ".", "1GB", "the", "bytes", "will", "wrap" ]
859a91d3894cb57faed34881c6ea16130b90571e
https://github.com/scivision/histutils/blob/859a91d3894cb57faed34881c6ea16130b90571e/histutils/rawDMCreader.py#L208-L266
241,529
scivision/histutils
histutils/rawDMCreader.py
getDMCframe
def getDMCframe(f, iFrm: int, finf: dict, verbose: bool=False): """ f is open file handle """ # on windows, "int" is int32 and overflows at 2.1GB! We need np.int64 currByte = iFrm * finf['bytesperframe'] # %% advance to start of frame in bytes if verbose: print(f'seeking to byte {currByte}') assert isinstance(iFrm, (int, int64)), 'int32 will fail on files > 2GB' try: f.seek(currByte, 0) except IOError as e: raise IOError(f'I couldnt seek to byte {currByte:d}. try using a 64-bit integer for iFrm \n' 'is {f.name} a DMCdata file? {e}') # %% read data ***LABVIEW USES ROW-MAJOR C ORDERING!! try: currFrame = fromfile(f, uint16, finf['pixelsperimage']).reshape((finf['supery'], finf['superx']), order='C') except ValueError as e: raise ValueError(f'read past end of file? {e}') rawFrameInd = meta2rawInd(f, finf['nmetadata']) if rawFrameInd is None: # 2011 no metadata file rawFrameInd = iFrm + 1 # fallback return currFrame, rawFrameInd
python
def getDMCframe(f, iFrm: int, finf: dict, verbose: bool=False): """ f is open file handle """ # on windows, "int" is int32 and overflows at 2.1GB! We need np.int64 currByte = iFrm * finf['bytesperframe'] # %% advance to start of frame in bytes if verbose: print(f'seeking to byte {currByte}') assert isinstance(iFrm, (int, int64)), 'int32 will fail on files > 2GB' try: f.seek(currByte, 0) except IOError as e: raise IOError(f'I couldnt seek to byte {currByte:d}. try using a 64-bit integer for iFrm \n' 'is {f.name} a DMCdata file? {e}') # %% read data ***LABVIEW USES ROW-MAJOR C ORDERING!! try: currFrame = fromfile(f, uint16, finf['pixelsperimage']).reshape((finf['supery'], finf['superx']), order='C') except ValueError as e: raise ValueError(f'read past end of file? {e}') rawFrameInd = meta2rawInd(f, finf['nmetadata']) if rawFrameInd is None: # 2011 no metadata file rawFrameInd = iFrm + 1 # fallback return currFrame, rawFrameInd
[ "def", "getDMCframe", "(", "f", ",", "iFrm", ":", "int", ",", "finf", ":", "dict", ",", "verbose", ":", "bool", "=", "False", ")", ":", "# on windows, \"int\" is int32 and overflows at 2.1GB! We need np.int64", "currByte", "=", "iFrm", "*", "finf", "[", "'bytesperframe'", "]", "# %% advance to start of frame in bytes", "if", "verbose", ":", "print", "(", "f'seeking to byte {currByte}'", ")", "assert", "isinstance", "(", "iFrm", ",", "(", "int", ",", "int64", ")", ")", ",", "'int32 will fail on files > 2GB'", "try", ":", "f", ".", "seek", "(", "currByte", ",", "0", ")", "except", "IOError", "as", "e", ":", "raise", "IOError", "(", "f'I couldnt seek to byte {currByte:d}. try using a 64-bit integer for iFrm \\n'", "'is {f.name} a DMCdata file? {e}'", ")", "# %% read data ***LABVIEW USES ROW-MAJOR C ORDERING!!", "try", ":", "currFrame", "=", "fromfile", "(", "f", ",", "uint16", ",", "finf", "[", "'pixelsperimage'", "]", ")", ".", "reshape", "(", "(", "finf", "[", "'supery'", "]", ",", "finf", "[", "'superx'", "]", ")", ",", "order", "=", "'C'", ")", "except", "ValueError", "as", "e", ":", "raise", "ValueError", "(", "f'read past end of file? {e}'", ")", "rawFrameInd", "=", "meta2rawInd", "(", "f", ",", "finf", "[", "'nmetadata'", "]", ")", "if", "rawFrameInd", "is", "None", ":", "# 2011 no metadata file", "rawFrameInd", "=", "iFrm", "+", "1", "# fallback", "return", "currFrame", ",", "rawFrameInd" ]
f is open file handle
[ "f", "is", "open", "file", "handle" ]
859a91d3894cb57faed34881c6ea16130b90571e
https://github.com/scivision/histutils/blob/859a91d3894cb57faed34881c6ea16130b90571e/histutils/rawDMCreader.py#L269-L299
241,530
a-tal/kezmenu3
kezmenu3/kezmenu.py
KezMenu._fixSize
def _fixSize(self): """Fix the menu size. Commonly called when the font is changed""" self.height = 0 for o in self.options: text = o['label'] font = o['font'] ren = font.render(text, 1, (0, 0, 0)) if ren.get_width() > self.width: self.width = ren.get_width() self.height += font.get_height()
python
def _fixSize(self): """Fix the menu size. Commonly called when the font is changed""" self.height = 0 for o in self.options: text = o['label'] font = o['font'] ren = font.render(text, 1, (0, 0, 0)) if ren.get_width() > self.width: self.width = ren.get_width() self.height += font.get_height()
[ "def", "_fixSize", "(", "self", ")", ":", "self", ".", "height", "=", "0", "for", "o", "in", "self", ".", "options", ":", "text", "=", "o", "[", "'label'", "]", "font", "=", "o", "[", "'font'", "]", "ren", "=", "font", ".", "render", "(", "text", ",", "1", ",", "(", "0", ",", "0", ",", "0", ")", ")", "if", "ren", ".", "get_width", "(", ")", ">", "self", ".", "width", ":", "self", ".", "width", "=", "ren", ".", "get_width", "(", ")", "self", ".", "height", "+=", "font", ".", "get_height", "(", ")" ]
Fix the menu size. Commonly called when the font is changed
[ "Fix", "the", "menu", "size", ".", "Commonly", "called", "when", "the", "font", "is", "changed" ]
3b06f9cb67fdc98a73928f877eea86692f832fa4
https://github.com/a-tal/kezmenu3/blob/3b06f9cb67fdc98a73928f877eea86692f832fa4/kezmenu3/kezmenu.py#L72-L81
241,531
a-tal/kezmenu3
kezmenu3/kezmenu.py
KezMenu.draw
def draw(self, surface): """Blit the menu to a surface.""" offset = 0 i = 0 ol, ot = self.screen_topleft_offset first = self.options and self.options[0] last = self.options and self.options[-1] for o in self.options: indent = o.get('padding_col', 0) # padding above the line if o != first and o.get('padding_line', 0): offset += o['padding_line'] font = o.get('font', self._font) if i == self.option and self.focus_color: clr = self.focus_color else: clr = self.color text = o['label'] ren = font.render(text, 1, clr) if ren.get_width() > self.width: self.width = ren.get_width() o['label_rect'] = pygame.Rect( (ol + self.x + indent, ot + self.y + offset), (ren.get_width(), ren.get_height()), ) surface.blit(ren, (self.x + indent, self.y + offset)) offset += font.get_height() # padding below the line if o != last and o.get('padding_line', 0): offset += o['padding_line'] i += 1
python
def draw(self, surface): """Blit the menu to a surface.""" offset = 0 i = 0 ol, ot = self.screen_topleft_offset first = self.options and self.options[0] last = self.options and self.options[-1] for o in self.options: indent = o.get('padding_col', 0) # padding above the line if o != first and o.get('padding_line', 0): offset += o['padding_line'] font = o.get('font', self._font) if i == self.option and self.focus_color: clr = self.focus_color else: clr = self.color text = o['label'] ren = font.render(text, 1, clr) if ren.get_width() > self.width: self.width = ren.get_width() o['label_rect'] = pygame.Rect( (ol + self.x + indent, ot + self.y + offset), (ren.get_width(), ren.get_height()), ) surface.blit(ren, (self.x + indent, self.y + offset)) offset += font.get_height() # padding below the line if o != last and o.get('padding_line', 0): offset += o['padding_line'] i += 1
[ "def", "draw", "(", "self", ",", "surface", ")", ":", "offset", "=", "0", "i", "=", "0", "ol", ",", "ot", "=", "self", ".", "screen_topleft_offset", "first", "=", "self", ".", "options", "and", "self", ".", "options", "[", "0", "]", "last", "=", "self", ".", "options", "and", "self", ".", "options", "[", "-", "1", "]", "for", "o", "in", "self", ".", "options", ":", "indent", "=", "o", ".", "get", "(", "'padding_col'", ",", "0", ")", "# padding above the line", "if", "o", "!=", "first", "and", "o", ".", "get", "(", "'padding_line'", ",", "0", ")", ":", "offset", "+=", "o", "[", "'padding_line'", "]", "font", "=", "o", ".", "get", "(", "'font'", ",", "self", ".", "_font", ")", "if", "i", "==", "self", ".", "option", "and", "self", ".", "focus_color", ":", "clr", "=", "self", ".", "focus_color", "else", ":", "clr", "=", "self", ".", "color", "text", "=", "o", "[", "'label'", "]", "ren", "=", "font", ".", "render", "(", "text", ",", "1", ",", "clr", ")", "if", "ren", ".", "get_width", "(", ")", ">", "self", ".", "width", ":", "self", ".", "width", "=", "ren", ".", "get_width", "(", ")", "o", "[", "'label_rect'", "]", "=", "pygame", ".", "Rect", "(", "(", "ol", "+", "self", ".", "x", "+", "indent", ",", "ot", "+", "self", ".", "y", "+", "offset", ")", ",", "(", "ren", ".", "get_width", "(", ")", ",", "ren", ".", "get_height", "(", ")", ")", ",", ")", "surface", ".", "blit", "(", "ren", ",", "(", "self", ".", "x", "+", "indent", ",", "self", ".", "y", "+", "offset", ")", ")", "offset", "+=", "font", ".", "get_height", "(", ")", "# padding below the line", "if", "o", "!=", "last", "and", "o", ".", "get", "(", "'padding_line'", ",", "0", ")", ":", "offset", "+=", "o", "[", "'padding_line'", "]", "i", "+=", "1" ]
Blit the menu to a surface.
[ "Blit", "the", "menu", "to", "a", "surface", "." ]
3b06f9cb67fdc98a73928f877eea86692f832fa4
https://github.com/a-tal/kezmenu3/blob/3b06f9cb67fdc98a73928f877eea86692f832fa4/kezmenu3/kezmenu.py#L83-L117
241,532
a-tal/kezmenu3
kezmenu3/kezmenu.py
KezMenu.update
def update(self, events, time_passed=None): """Update the menu and get input for the menu. @events: the pygame catched events @time_passed: delta time since the last call """ for e in events: if e.type == pygame.QUIT: raise SystemExit if e.type == pygame.KEYDOWN: if e.key == pygame.K_ESCAPE: raise SystemExit if e.key == pygame.K_DOWN: self.option += 1 if e.key == pygame.K_UP: self.option -= 1 if e.key == pygame.K_RETURN or e.key == pygame.K_SPACE: self.options[self.option]['callable']() # Mouse controls elif e.type == pygame.MOUSEBUTTONDOWN: lb, cb, rb = pygame.mouse.get_pressed() if lb: self.options[self.option]['callable']() # Menu limits if self.option > len(self.options) - 1: self.option = len(self.options) - 1 elif self.option < 0: self.option = 0 # Check for mouse position if self.mouse_enabled: self._checkMousePositionForFocus() if time_passed: self._updateEffects(time_passed)
python
def update(self, events, time_passed=None): """Update the menu and get input for the menu. @events: the pygame catched events @time_passed: delta time since the last call """ for e in events: if e.type == pygame.QUIT: raise SystemExit if e.type == pygame.KEYDOWN: if e.key == pygame.K_ESCAPE: raise SystemExit if e.key == pygame.K_DOWN: self.option += 1 if e.key == pygame.K_UP: self.option -= 1 if e.key == pygame.K_RETURN or e.key == pygame.K_SPACE: self.options[self.option]['callable']() # Mouse controls elif e.type == pygame.MOUSEBUTTONDOWN: lb, cb, rb = pygame.mouse.get_pressed() if lb: self.options[self.option]['callable']() # Menu limits if self.option > len(self.options) - 1: self.option = len(self.options) - 1 elif self.option < 0: self.option = 0 # Check for mouse position if self.mouse_enabled: self._checkMousePositionForFocus() if time_passed: self._updateEffects(time_passed)
[ "def", "update", "(", "self", ",", "events", ",", "time_passed", "=", "None", ")", ":", "for", "e", "in", "events", ":", "if", "e", ".", "type", "==", "pygame", ".", "QUIT", ":", "raise", "SystemExit", "if", "e", ".", "type", "==", "pygame", ".", "KEYDOWN", ":", "if", "e", ".", "key", "==", "pygame", ".", "K_ESCAPE", ":", "raise", "SystemExit", "if", "e", ".", "key", "==", "pygame", ".", "K_DOWN", ":", "self", ".", "option", "+=", "1", "if", "e", ".", "key", "==", "pygame", ".", "K_UP", ":", "self", ".", "option", "-=", "1", "if", "e", ".", "key", "==", "pygame", ".", "K_RETURN", "or", "e", ".", "key", "==", "pygame", ".", "K_SPACE", ":", "self", ".", "options", "[", "self", ".", "option", "]", "[", "'callable'", "]", "(", ")", "# Mouse controls", "elif", "e", ".", "type", "==", "pygame", ".", "MOUSEBUTTONDOWN", ":", "lb", ",", "cb", ",", "rb", "=", "pygame", ".", "mouse", ".", "get_pressed", "(", ")", "if", "lb", ":", "self", ".", "options", "[", "self", ".", "option", "]", "[", "'callable'", "]", "(", ")", "# Menu limits", "if", "self", ".", "option", ">", "len", "(", "self", ".", "options", ")", "-", "1", ":", "self", ".", "option", "=", "len", "(", "self", ".", "options", ")", "-", "1", "elif", "self", ".", "option", "<", "0", ":", "self", ".", "option", "=", "0", "# Check for mouse position", "if", "self", ".", "mouse_enabled", ":", "self", ".", "_checkMousePositionForFocus", "(", ")", "if", "time_passed", ":", "self", ".", "_updateEffects", "(", "time_passed", ")" ]
Update the menu and get input for the menu. @events: the pygame catched events @time_passed: delta time since the last call
[ "Update", "the", "menu", "and", "get", "input", "for", "the", "menu", "." ]
3b06f9cb67fdc98a73928f877eea86692f832fa4
https://github.com/a-tal/kezmenu3/blob/3b06f9cb67fdc98a73928f877eea86692f832fa4/kezmenu3/kezmenu.py#L119-L152
241,533
a-tal/kezmenu3
kezmenu3/kezmenu.py
KezMenu._checkMousePositionForFocus
def _checkMousePositionForFocus(self): """Check the mouse position to know if move focus on a option""" i = 0 cur_pos = pygame.mouse.get_pos() ml, mt = self.position for o in self.options: rect = o.get('label_rect') if rect: if rect.collidepoint(cur_pos) and self.mouse_pos != cur_pos: self.option = i self.mouse_pos = cur_pos break i += 1
python
def _checkMousePositionForFocus(self): """Check the mouse position to know if move focus on a option""" i = 0 cur_pos = pygame.mouse.get_pos() ml, mt = self.position for o in self.options: rect = o.get('label_rect') if rect: if rect.collidepoint(cur_pos) and self.mouse_pos != cur_pos: self.option = i self.mouse_pos = cur_pos break i += 1
[ "def", "_checkMousePositionForFocus", "(", "self", ")", ":", "i", "=", "0", "cur_pos", "=", "pygame", ".", "mouse", ".", "get_pos", "(", ")", "ml", ",", "mt", "=", "self", ".", "position", "for", "o", "in", "self", ".", "options", ":", "rect", "=", "o", ".", "get", "(", "'label_rect'", ")", "if", "rect", ":", "if", "rect", ".", "collidepoint", "(", "cur_pos", ")", "and", "self", ".", "mouse_pos", "!=", "cur_pos", ":", "self", ".", "option", "=", "i", "self", ".", "mouse_pos", "=", "cur_pos", "break", "i", "+=", "1" ]
Check the mouse position to know if move focus on a option
[ "Check", "the", "mouse", "position", "to", "know", "if", "move", "focus", "on", "a", "option" ]
3b06f9cb67fdc98a73928f877eea86692f832fa4
https://github.com/a-tal/kezmenu3/blob/3b06f9cb67fdc98a73928f877eea86692f832fa4/kezmenu3/kezmenu.py#L154-L166
241,534
a-tal/kezmenu3
kezmenu3/kezmenu.py
KezMenu.center_at
def center_at(self, x, y): """Center the menu at x, y""" self.x = x - (self.width / 2) self.y = y - (self.height / 2)
python
def center_at(self, x, y): """Center the menu at x, y""" self.x = x - (self.width / 2) self.y = y - (self.height / 2)
[ "def", "center_at", "(", "self", ",", "x", ",", "y", ")", ":", "self", ".", "x", "=", "x", "-", "(", "self", ".", "width", "/", "2", ")", "self", ".", "y", "=", "y", "-", "(", "self", ".", "height", "/", "2", ")" ]
Center the menu at x, y
[ "Center", "the", "menu", "at", "x", "y" ]
3b06f9cb67fdc98a73928f877eea86692f832fa4
https://github.com/a-tal/kezmenu3/blob/3b06f9cb67fdc98a73928f877eea86692f832fa4/kezmenu3/kezmenu.py#L189-L193
241,535
mdeous/fatbotslim
fatbotslim/irc/bot.py
run_bots
def run_bots(bots): """ Run many bots in parallel. :param bots: IRC bots to run. :type bots: list """ greenlets = [spawn(bot.run) for bot in bots] try: joinall(greenlets) except KeyboardInterrupt: for bot in bots: bot.disconnect() finally: killall(greenlets)
python
def run_bots(bots): """ Run many bots in parallel. :param bots: IRC bots to run. :type bots: list """ greenlets = [spawn(bot.run) for bot in bots] try: joinall(greenlets) except KeyboardInterrupt: for bot in bots: bot.disconnect() finally: killall(greenlets)
[ "def", "run_bots", "(", "bots", ")", ":", "greenlets", "=", "[", "spawn", "(", "bot", ".", "run", ")", "for", "bot", "in", "bots", "]", "try", ":", "joinall", "(", "greenlets", ")", "except", "KeyboardInterrupt", ":", "for", "bot", "in", "bots", ":", "bot", ".", "disconnect", "(", ")", "finally", ":", "killall", "(", "greenlets", ")" ]
Run many bots in parallel. :param bots: IRC bots to run. :type bots: list
[ "Run", "many", "bots", "in", "parallel", "." ]
341595d24454a79caee23750eac271f9d0626c88
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/irc/bot.py#L417-L431
241,536
mdeous/fatbotslim
fatbotslim/irc/bot.py
Message.parse
def parse(cls, data): """ Extracts message informations from `data`. :param data: received line. :type data: unicode :return: extracted informations (source, destination, command, args). :rtype: tuple(Source, str, str, list) :raise: :class:`fatbotslim.irc.NullMessage` if `data` is empty. """ src = u'' dst = None if data[0] == u':': src, data = data[1:].split(u' ', 1) if u' :' in data: data, trailing = data.split(u' :', 1) args = data.split() args.extend(trailing.split()) else: args = data.split() command = args.pop(0) if command in (PRIVMSG, NOTICE): dst = args.pop(0) if ctcp_re.match(args[0]): args = args[0].strip(u'\x01').split() command = u'CTCP_' + args.pop(0) return Source(src), dst, command, args
python
def parse(cls, data): """ Extracts message informations from `data`. :param data: received line. :type data: unicode :return: extracted informations (source, destination, command, args). :rtype: tuple(Source, str, str, list) :raise: :class:`fatbotslim.irc.NullMessage` if `data` is empty. """ src = u'' dst = None if data[0] == u':': src, data = data[1:].split(u' ', 1) if u' :' in data: data, trailing = data.split(u' :', 1) args = data.split() args.extend(trailing.split()) else: args = data.split() command = args.pop(0) if command in (PRIVMSG, NOTICE): dst = args.pop(0) if ctcp_re.match(args[0]): args = args[0].strip(u'\x01').split() command = u'CTCP_' + args.pop(0) return Source(src), dst, command, args
[ "def", "parse", "(", "cls", ",", "data", ")", ":", "src", "=", "u''", "dst", "=", "None", "if", "data", "[", "0", "]", "==", "u':'", ":", "src", ",", "data", "=", "data", "[", "1", ":", "]", ".", "split", "(", "u' '", ",", "1", ")", "if", "u' :'", "in", "data", ":", "data", ",", "trailing", "=", "data", ".", "split", "(", "u' :'", ",", "1", ")", "args", "=", "data", ".", "split", "(", ")", "args", ".", "extend", "(", "trailing", ".", "split", "(", ")", ")", "else", ":", "args", "=", "data", ".", "split", "(", ")", "command", "=", "args", ".", "pop", "(", "0", ")", "if", "command", "in", "(", "PRIVMSG", ",", "NOTICE", ")", ":", "dst", "=", "args", ".", "pop", "(", "0", ")", "if", "ctcp_re", ".", "match", "(", "args", "[", "0", "]", ")", ":", "args", "=", "args", "[", "0", "]", ".", "strip", "(", "u'\\x01'", ")", ".", "split", "(", ")", "command", "=", "u'CTCP_'", "+", "args", ".", "pop", "(", "0", ")", "return", "Source", "(", "src", ")", ",", "dst", ",", "command", ",", "args" ]
Extracts message informations from `data`. :param data: received line. :type data: unicode :return: extracted informations (source, destination, command, args). :rtype: tuple(Source, str, str, list) :raise: :class:`fatbotslim.irc.NullMessage` if `data` is empty.
[ "Extracts", "message", "informations", "from", "data", "." ]
341595d24454a79caee23750eac271f9d0626c88
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/irc/bot.py#L75-L101
241,537
mdeous/fatbotslim
fatbotslim/irc/bot.py
Source.parse
def parse(cls, prefix): """ Extracts informations from `prefix`. :param prefix: prefix with format ``<servername>|<nick>['!'<user>]['@'<host>]``. :type prefix: unicode :return: extracted informations (nickname or host, mode, username, host). :rtype: tuple(str, str, str, str) """ try: nick, rest = prefix.split(u'!') except ValueError: return prefix, None, None, None try: mode, rest = rest.split(u'=') except ValueError: mode, rest = None, rest try: user, host = rest.split(u'@') except ValueError: return nick, mode, rest, None return nick, mode, user, host
python
def parse(cls, prefix): """ Extracts informations from `prefix`. :param prefix: prefix with format ``<servername>|<nick>['!'<user>]['@'<host>]``. :type prefix: unicode :return: extracted informations (nickname or host, mode, username, host). :rtype: tuple(str, str, str, str) """ try: nick, rest = prefix.split(u'!') except ValueError: return prefix, None, None, None try: mode, rest = rest.split(u'=') except ValueError: mode, rest = None, rest try: user, host = rest.split(u'@') except ValueError: return nick, mode, rest, None return nick, mode, user, host
[ "def", "parse", "(", "cls", ",", "prefix", ")", ":", "try", ":", "nick", ",", "rest", "=", "prefix", ".", "split", "(", "u'!'", ")", "except", "ValueError", ":", "return", "prefix", ",", "None", ",", "None", ",", "None", "try", ":", "mode", ",", "rest", "=", "rest", ".", "split", "(", "u'='", ")", "except", "ValueError", ":", "mode", ",", "rest", "=", "None", ",", "rest", "try", ":", "user", ",", "host", "=", "rest", ".", "split", "(", "u'@'", ")", "except", "ValueError", ":", "return", "nick", ",", "mode", ",", "rest", ",", "None", "return", "nick", ",", "mode", ",", "user", ",", "host" ]
Extracts informations from `prefix`. :param prefix: prefix with format ``<servername>|<nick>['!'<user>]['@'<host>]``. :type prefix: unicode :return: extracted informations (nickname or host, mode, username, host). :rtype: tuple(str, str, str, str)
[ "Extracts", "informations", "from", "prefix", "." ]
341595d24454a79caee23750eac271f9d0626c88
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/irc/bot.py#L124-L145
241,538
mdeous/fatbotslim
fatbotslim/irc/bot.py
IRC._create_connection
def _create_connection(self): """ Creates a transport channel. :return: transport channel instance :rtype: :class:`fatbotslim.irc.tcp.TCP` or :class:`fatbotslim.irc.tcp.SSL` """ transport = SSL if self.ssl else TCP return transport(self.server, self.port)
python
def _create_connection(self): """ Creates a transport channel. :return: transport channel instance :rtype: :class:`fatbotslim.irc.tcp.TCP` or :class:`fatbotslim.irc.tcp.SSL` """ transport = SSL if self.ssl else TCP return transport(self.server, self.port)
[ "def", "_create_connection", "(", "self", ")", ":", "transport", "=", "SSL", "if", "self", ".", "ssl", "else", "TCP", "return", "transport", "(", "self", ".", "server", ",", "self", ".", "port", ")" ]
Creates a transport channel. :return: transport channel instance :rtype: :class:`fatbotslim.irc.tcp.TCP` or :class:`fatbotslim.irc.tcp.SSL`
[ "Creates", "a", "transport", "channel", "." ]
341595d24454a79caee23750eac271f9d0626c88
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/irc/bot.py#L188-L196
241,539
mdeous/fatbotslim
fatbotslim/irc/bot.py
IRC._connect
def _connect(self): """ Connects the bot to the server and identifies itself. """ self.conn = self._create_connection() spawn(self.conn.connect) self.set_nick(self.nick) self.cmd(u'USER', u'{0} 3 * {1}'.format(self.nick, self.realname))
python
def _connect(self): """ Connects the bot to the server and identifies itself. """ self.conn = self._create_connection() spawn(self.conn.connect) self.set_nick(self.nick) self.cmd(u'USER', u'{0} 3 * {1}'.format(self.nick, self.realname))
[ "def", "_connect", "(", "self", ")", ":", "self", ".", "conn", "=", "self", ".", "_create_connection", "(", ")", "spawn", "(", "self", ".", "conn", ".", "connect", ")", "self", ".", "set_nick", "(", "self", ".", "nick", ")", "self", ".", "cmd", "(", "u'USER'", ",", "u'{0} 3 * {1}'", ".", "format", "(", "self", ".", "nick", ",", "self", ".", "realname", ")", ")" ]
Connects the bot to the server and identifies itself.
[ "Connects", "the", "bot", "to", "the", "server", "and", "identifies", "itself", "." ]
341595d24454a79caee23750eac271f9d0626c88
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/irc/bot.py#L198-L205
241,540
mdeous/fatbotslim
fatbotslim/irc/bot.py
IRC._send
def _send(self, command): """ Sends a raw line to the server. :param command: line to send. :type command: unicode """ command = command.encode('utf-8') log.debug('>> ' + command) self.conn.oqueue.put(command)
python
def _send(self, command): """ Sends a raw line to the server. :param command: line to send. :type command: unicode """ command = command.encode('utf-8') log.debug('>> ' + command) self.conn.oqueue.put(command)
[ "def", "_send", "(", "self", ",", "command", ")", ":", "command", "=", "command", ".", "encode", "(", "'utf-8'", ")", "log", ".", "debug", "(", "'>> '", "+", "command", ")", "self", ".", "conn", ".", "oqueue", ".", "put", "(", "command", ")" ]
Sends a raw line to the server. :param command: line to send. :type command: unicode
[ "Sends", "a", "raw", "line", "to", "the", "server", "." ]
341595d24454a79caee23750eac271f9d0626c88
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/irc/bot.py#L207-L216
241,541
mdeous/fatbotslim
fatbotslim/irc/bot.py
IRC._handle
def _handle(self, msg): """ Pass a received message to the registered handlers. :param msg: received message :type msg: :class:`fatbotslim.irc.Message` """ def handler_yielder(): for handler in self.handlers: yield handler def handler_callback(_): if msg.propagate: try: h = hyielder.next() g = self._pool.spawn(handler_runner, h) g.link(handler_callback) except StopIteration: pass def handler_runner(h): for command in h.commands: if command == msg.command: method = getattr(h, h.commands[command]) method(msg) hyielder = handler_yielder() try: next_handler = hyielder.next() g = self._pool.spawn(handler_runner, next_handler) g.link(handler_callback) except StopIteration: pass
python
def _handle(self, msg): """ Pass a received message to the registered handlers. :param msg: received message :type msg: :class:`fatbotslim.irc.Message` """ def handler_yielder(): for handler in self.handlers: yield handler def handler_callback(_): if msg.propagate: try: h = hyielder.next() g = self._pool.spawn(handler_runner, h) g.link(handler_callback) except StopIteration: pass def handler_runner(h): for command in h.commands: if command == msg.command: method = getattr(h, h.commands[command]) method(msg) hyielder = handler_yielder() try: next_handler = hyielder.next() g = self._pool.spawn(handler_runner, next_handler) g.link(handler_callback) except StopIteration: pass
[ "def", "_handle", "(", "self", ",", "msg", ")", ":", "def", "handler_yielder", "(", ")", ":", "for", "handler", "in", "self", ".", "handlers", ":", "yield", "handler", "def", "handler_callback", "(", "_", ")", ":", "if", "msg", ".", "propagate", ":", "try", ":", "h", "=", "hyielder", ".", "next", "(", ")", "g", "=", "self", ".", "_pool", ".", "spawn", "(", "handler_runner", ",", "h", ")", "g", ".", "link", "(", "handler_callback", ")", "except", "StopIteration", ":", "pass", "def", "handler_runner", "(", "h", ")", ":", "for", "command", "in", "h", ".", "commands", ":", "if", "command", "==", "msg", ".", "command", ":", "method", "=", "getattr", "(", "h", ",", "h", ".", "commands", "[", "command", "]", ")", "method", "(", "msg", ")", "hyielder", "=", "handler_yielder", "(", ")", "try", ":", "next_handler", "=", "hyielder", ".", "next", "(", ")", "g", "=", "self", ".", "_pool", ".", "spawn", "(", "handler_runner", ",", "next_handler", ")", "g", ".", "link", "(", "handler_callback", ")", "except", "StopIteration", ":", "pass" ]
Pass a received message to the registered handlers. :param msg: received message :type msg: :class:`fatbotslim.irc.Message`
[ "Pass", "a", "received", "message", "to", "the", "registered", "handlers", "." ]
341595d24454a79caee23750eac271f9d0626c88
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/irc/bot.py#L243-L276
241,542
mdeous/fatbotslim
fatbotslim/irc/bot.py
IRC.randomize_nick
def randomize_nick(cls, base, suffix_length=3): """ Generates a pseudo-random nickname. :param base: prefix to use for the generated nickname. :type base: unicode :param suffix_length: amount of digits to append to `base` :type suffix_length: int :return: generated nickname. :rtype: unicode """ suffix = u''.join(choice(u'0123456789') for _ in range(suffix_length)) return u'{0}{1}'.format(base, suffix)
python
def randomize_nick(cls, base, suffix_length=3): """ Generates a pseudo-random nickname. :param base: prefix to use for the generated nickname. :type base: unicode :param suffix_length: amount of digits to append to `base` :type suffix_length: int :return: generated nickname. :rtype: unicode """ suffix = u''.join(choice(u'0123456789') for _ in range(suffix_length)) return u'{0}{1}'.format(base, suffix)
[ "def", "randomize_nick", "(", "cls", ",", "base", ",", "suffix_length", "=", "3", ")", ":", "suffix", "=", "u''", ".", "join", "(", "choice", "(", "u'0123456789'", ")", "for", "_", "in", "range", "(", "suffix_length", ")", ")", "return", "u'{0}{1}'", ".", "format", "(", "base", ",", "suffix", ")" ]
Generates a pseudo-random nickname. :param base: prefix to use for the generated nickname. :type base: unicode :param suffix_length: amount of digits to append to `base` :type suffix_length: int :return: generated nickname. :rtype: unicode
[ "Generates", "a", "pseudo", "-", "random", "nickname", "." ]
341595d24454a79caee23750eac271f9d0626c88
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/irc/bot.py#L279-L291
241,543
mdeous/fatbotslim
fatbotslim/irc/bot.py
IRC.add_handler
def add_handler(self, handler, args=None, kwargs=None): """ Registers a new handler. :param handler: handler to register. :type handler: :class:`fatbotslim.handlers.BaseHandler` :param args: positional arguments to pass to the handler's constructor. :type args: list :param kwargs: keyword arguments to pass to the handler's constructor. :type kwargs: dict """ args = [] if args is None else args kwargs = {} if kwargs is None else kwargs handler_instance = handler(self, *args, **kwargs) if isinstance(handler_instance, RightsHandler): self.rights = handler_instance if handler_instance not in self.handlers: self.handlers.append(handler_instance)
python
def add_handler(self, handler, args=None, kwargs=None): """ Registers a new handler. :param handler: handler to register. :type handler: :class:`fatbotslim.handlers.BaseHandler` :param args: positional arguments to pass to the handler's constructor. :type args: list :param kwargs: keyword arguments to pass to the handler's constructor. :type kwargs: dict """ args = [] if args is None else args kwargs = {} if kwargs is None else kwargs handler_instance = handler(self, *args, **kwargs) if isinstance(handler_instance, RightsHandler): self.rights = handler_instance if handler_instance not in self.handlers: self.handlers.append(handler_instance)
[ "def", "add_handler", "(", "self", ",", "handler", ",", "args", "=", "None", ",", "kwargs", "=", "None", ")", ":", "args", "=", "[", "]", "if", "args", "is", "None", "else", "args", "kwargs", "=", "{", "}", "if", "kwargs", "is", "None", "else", "kwargs", "handler_instance", "=", "handler", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "if", "isinstance", "(", "handler_instance", ",", "RightsHandler", ")", ":", "self", ".", "rights", "=", "handler_instance", "if", "handler_instance", "not", "in", "self", ".", "handlers", ":", "self", ".", "handlers", ".", "append", "(", "handler_instance", ")" ]
Registers a new handler. :param handler: handler to register. :type handler: :class:`fatbotslim.handlers.BaseHandler` :param args: positional arguments to pass to the handler's constructor. :type args: list :param kwargs: keyword arguments to pass to the handler's constructor. :type kwargs: dict
[ "Registers", "a", "new", "handler", "." ]
341595d24454a79caee23750eac271f9d0626c88
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/irc/bot.py#L311-L328
241,544
mdeous/fatbotslim
fatbotslim/irc/bot.py
IRC.cmd
def cmd(self, command, args, prefix=None): """ Sends a command to the server. :param command: IRC code to send. :type command: unicode :param args: arguments to pass with the command. :type args: basestring :param prefix: optional prefix to prepend to the command. :type prefix: str or None """ if prefix is None: prefix = u'' raw_cmd = u'{0} {1} {2}'.format(prefix, command, args).strip() self._send(raw_cmd)
python
def cmd(self, command, args, prefix=None): """ Sends a command to the server. :param command: IRC code to send. :type command: unicode :param args: arguments to pass with the command. :type args: basestring :param prefix: optional prefix to prepend to the command. :type prefix: str or None """ if prefix is None: prefix = u'' raw_cmd = u'{0} {1} {2}'.format(prefix, command, args).strip() self._send(raw_cmd)
[ "def", "cmd", "(", "self", ",", "command", ",", "args", ",", "prefix", "=", "None", ")", ":", "if", "prefix", "is", "None", ":", "prefix", "=", "u''", "raw_cmd", "=", "u'{0} {1} {2}'", ".", "format", "(", "prefix", ",", "command", ",", "args", ")", ".", "strip", "(", ")", "self", ".", "_send", "(", "raw_cmd", ")" ]
Sends a command to the server. :param command: IRC code to send. :type command: unicode :param args: arguments to pass with the command. :type args: basestring :param prefix: optional prefix to prepend to the command. :type prefix: str or None
[ "Sends", "a", "command", "to", "the", "server", "." ]
341595d24454a79caee23750eac271f9d0626c88
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/irc/bot.py#L330-L344
241,545
mdeous/fatbotslim
fatbotslim/irc/bot.py
IRC.ctcp_reply
def ctcp_reply(self, command, dst, message=None): """ Sends a reply to a CTCP request. :param command: CTCP command to use. :type command: str :param dst: sender of the initial request. :type dst: str :param message: data to attach to the reply. :type message: str """ if message is None: raw_cmd = u'\x01{0}\x01'.format(command) else: raw_cmd = u'\x01{0} {1}\x01'.format(command, message) self.notice(dst, raw_cmd)
python
def ctcp_reply(self, command, dst, message=None): """ Sends a reply to a CTCP request. :param command: CTCP command to use. :type command: str :param dst: sender of the initial request. :type dst: str :param message: data to attach to the reply. :type message: str """ if message is None: raw_cmd = u'\x01{0}\x01'.format(command) else: raw_cmd = u'\x01{0} {1}\x01'.format(command, message) self.notice(dst, raw_cmd)
[ "def", "ctcp_reply", "(", "self", ",", "command", ",", "dst", ",", "message", "=", "None", ")", ":", "if", "message", "is", "None", ":", "raw_cmd", "=", "u'\\x01{0}\\x01'", ".", "format", "(", "command", ")", "else", ":", "raw_cmd", "=", "u'\\x01{0} {1}\\x01'", ".", "format", "(", "command", ",", "message", ")", "self", ".", "notice", "(", "dst", ",", "raw_cmd", ")" ]
Sends a reply to a CTCP request. :param command: CTCP command to use. :type command: str :param dst: sender of the initial request. :type dst: str :param message: data to attach to the reply. :type message: str
[ "Sends", "a", "reply", "to", "a", "CTCP", "request", "." ]
341595d24454a79caee23750eac271f9d0626c88
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/irc/bot.py#L346-L361
241,546
mdeous/fatbotslim
fatbotslim/irc/bot.py
IRC.msg
def msg(self, target, msg): """ Sends a message to an user or channel. :param target: user or channel to send to. :type target: str :param msg: message to send. :type msg: str """ self.cmd(u'PRIVMSG', u'{0} :{1}'.format(target, msg))
python
def msg(self, target, msg): """ Sends a message to an user or channel. :param target: user or channel to send to. :type target: str :param msg: message to send. :type msg: str """ self.cmd(u'PRIVMSG', u'{0} :{1}'.format(target, msg))
[ "def", "msg", "(", "self", ",", "target", ",", "msg", ")", ":", "self", ".", "cmd", "(", "u'PRIVMSG'", ",", "u'{0} :{1}'", ".", "format", "(", "target", ",", "msg", ")", ")" ]
Sends a message to an user or channel. :param target: user or channel to send to. :type target: str :param msg: message to send. :type msg: str
[ "Sends", "a", "message", "to", "an", "user", "or", "channel", "." ]
341595d24454a79caee23750eac271f9d0626c88
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/irc/bot.py#L363-L372
241,547
mdeous/fatbotslim
fatbotslim/irc/bot.py
IRC.notice
def notice(self, target, msg): """ Sends a NOTICE to an user or channel. :param target: user or channel to send to. :type target: str :param msg: message to send. :type msg: basestring """ self.cmd(u'NOTICE', u'{0} :{1}'.format(target, msg))
python
def notice(self, target, msg): """ Sends a NOTICE to an user or channel. :param target: user or channel to send to. :type target: str :param msg: message to send. :type msg: basestring """ self.cmd(u'NOTICE', u'{0} :{1}'.format(target, msg))
[ "def", "notice", "(", "self", ",", "target", ",", "msg", ")", ":", "self", ".", "cmd", "(", "u'NOTICE'", ",", "u'{0} :{1}'", ".", "format", "(", "target", ",", "msg", ")", ")" ]
Sends a NOTICE to an user or channel. :param target: user or channel to send to. :type target: str :param msg: message to send. :type msg: basestring
[ "Sends", "a", "NOTICE", "to", "an", "user", "or", "channel", "." ]
341595d24454a79caee23750eac271f9d0626c88
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/irc/bot.py#L374-L383
241,548
csaez/wishlib
wishlib/si/utils.py
siget
def siget(fullname=""): """Returns a softimage object given its fullname.""" fullname = str(fullname) if not len(fullname): return None return sidict.GetObject(fullname, False)
python
def siget(fullname=""): """Returns a softimage object given its fullname.""" fullname = str(fullname) if not len(fullname): return None return sidict.GetObject(fullname, False)
[ "def", "siget", "(", "fullname", "=", "\"\"", ")", ":", "fullname", "=", "str", "(", "fullname", ")", "if", "not", "len", "(", "fullname", ")", ":", "return", "None", "return", "sidict", ".", "GetObject", "(", "fullname", ",", "False", ")" ]
Returns a softimage object given its fullname.
[ "Returns", "a", "softimage", "object", "given", "its", "fullname", "." ]
c212fa7875006a332a4cefbf69885ced9647bc2f
https://github.com/csaez/wishlib/blob/c212fa7875006a332a4cefbf69885ced9647bc2f/wishlib/si/utils.py#L30-L35
241,549
csaez/wishlib
wishlib/si/utils.py
cmd_wrapper
def cmd_wrapper(cmd_name, **kwds): """Wrap and execute a softimage command accepting named arguments""" cmd = si.Commands(cmd_name) if not cmd: raise Exception(cmd_name + " doesnt found!") for arg in cmd.Arguments: value = kwds.get(arg.Name) if value: arg.Value = value return cmd.Execute()
python
def cmd_wrapper(cmd_name, **kwds): """Wrap and execute a softimage command accepting named arguments""" cmd = si.Commands(cmd_name) if not cmd: raise Exception(cmd_name + " doesnt found!") for arg in cmd.Arguments: value = kwds.get(arg.Name) if value: arg.Value = value return cmd.Execute()
[ "def", "cmd_wrapper", "(", "cmd_name", ",", "*", "*", "kwds", ")", ":", "cmd", "=", "si", ".", "Commands", "(", "cmd_name", ")", "if", "not", "cmd", ":", "raise", "Exception", "(", "cmd_name", "+", "\" doesnt found!\"", ")", "for", "arg", "in", "cmd", ".", "Arguments", ":", "value", "=", "kwds", ".", "get", "(", "arg", ".", "Name", ")", "if", "value", ":", "arg", ".", "Value", "=", "value", "return", "cmd", ".", "Execute", "(", ")" ]
Wrap and execute a softimage command accepting named arguments
[ "Wrap", "and", "execute", "a", "softimage", "command", "accepting", "named", "arguments" ]
c212fa7875006a332a4cefbf69885ced9647bc2f
https://github.com/csaez/wishlib/blob/c212fa7875006a332a4cefbf69885ced9647bc2f/wishlib/si/utils.py#L38-L47
241,550
Amsterdam/authorization_django
authorization_django/middleware.py
_create_logger
def _create_logger(middleware_settings): """ Creates a logger using the given settings. """ if django_settings.DEBUG: level = logging.DEBUG formatter = logging.Formatter( middleware_settings['LOGGER_FORMAT_DEBUG']) else: level = middleware_settings['LOGGER_LEVEL'] formatter = logging.Formatter(middleware_settings['LOGGER_FORMAT']) handler = logging.StreamHandler(sys.stderr) handler.setLevel(level) handler.setFormatter(formatter) logger = logging.getLogger(middleware_settings['LOGGER_NAME']) # If in some strange way this logger already exists we make sure to delete # its existing handlers del logger.handlers[:] logger.addHandler(handler) # Disable propagation by default logger.propagate = False return logger
python
def _create_logger(middleware_settings): """ Creates a logger using the given settings. """ if django_settings.DEBUG: level = logging.DEBUG formatter = logging.Formatter( middleware_settings['LOGGER_FORMAT_DEBUG']) else: level = middleware_settings['LOGGER_LEVEL'] formatter = logging.Formatter(middleware_settings['LOGGER_FORMAT']) handler = logging.StreamHandler(sys.stderr) handler.setLevel(level) handler.setFormatter(formatter) logger = logging.getLogger(middleware_settings['LOGGER_NAME']) # If in some strange way this logger already exists we make sure to delete # its existing handlers del logger.handlers[:] logger.addHandler(handler) # Disable propagation by default logger.propagate = False return logger
[ "def", "_create_logger", "(", "middleware_settings", ")", ":", "if", "django_settings", ".", "DEBUG", ":", "level", "=", "logging", ".", "DEBUG", "formatter", "=", "logging", ".", "Formatter", "(", "middleware_settings", "[", "'LOGGER_FORMAT_DEBUG'", "]", ")", "else", ":", "level", "=", "middleware_settings", "[", "'LOGGER_LEVEL'", "]", "formatter", "=", "logging", ".", "Formatter", "(", "middleware_settings", "[", "'LOGGER_FORMAT'", "]", ")", "handler", "=", "logging", ".", "StreamHandler", "(", "sys", ".", "stderr", ")", "handler", ".", "setLevel", "(", "level", ")", "handler", ".", "setFormatter", "(", "formatter", ")", "logger", "=", "logging", ".", "getLogger", "(", "middleware_settings", "[", "'LOGGER_NAME'", "]", ")", "# If in some strange way this logger already exists we make sure to delete", "# its existing handlers", "del", "logger", ".", "handlers", "[", ":", "]", "logger", ".", "addHandler", "(", "handler", ")", "# Disable propagation by default", "logger", ".", "propagate", "=", "False", "return", "logger" ]
Creates a logger using the given settings.
[ "Creates", "a", "logger", "using", "the", "given", "settings", "." ]
71da52b38a7f5a16a2bde8f8ea97b3c11ccb1be1
https://github.com/Amsterdam/authorization_django/blob/71da52b38a7f5a16a2bde8f8ea97b3c11ccb1be1/authorization_django/middleware.py#L15-L41
241,551
EwilDawe/typy
typy/keyboard.py
press
def press(*keys): """ Simulates a key-press for all the keys passed to the function :param keys: list of keys to be pressed :return: None """ for key in keys: win32api.keybd_event(codes[key], 0, 0, 0) release(key)
python
def press(*keys): """ Simulates a key-press for all the keys passed to the function :param keys: list of keys to be pressed :return: None """ for key in keys: win32api.keybd_event(codes[key], 0, 0, 0) release(key)
[ "def", "press", "(", "*", "keys", ")", ":", "for", "key", "in", "keys", ":", "win32api", ".", "keybd_event", "(", "codes", "[", "key", "]", ",", "0", ",", "0", ",", "0", ")", "release", "(", "key", ")" ]
Simulates a key-press for all the keys passed to the function :param keys: list of keys to be pressed :return: None
[ "Simulates", "a", "key", "-", "press", "for", "all", "the", "keys", "passed", "to", "the", "function" ]
0349e7176567a4dbef318e75d9b3d6868950a1a9
https://github.com/EwilDawe/typy/blob/0349e7176567a4dbef318e75d9b3d6868950a1a9/typy/keyboard.py#L11-L21
241,552
EwilDawe/typy
typy/keyboard.py
hold
def hold(*keys, hold_time = 0, hold_while = None): """ Simulates the holding of all the keys passed to the function These keys are held down for a default period of 0 seconds before release :param keys: list of keys to be held :param hold_time: length of time to hold keys :param hold_while: hold keys while hold_while returns True :return: None """ for key in keys: win32api.keybd_event(codes[key], 0, 0, 0) if callable(hold_while): while hold_while(): pass else: time.sleep(hold_time) release(*keys)
python
def hold(*keys, hold_time = 0, hold_while = None): """ Simulates the holding of all the keys passed to the function These keys are held down for a default period of 0 seconds before release :param keys: list of keys to be held :param hold_time: length of time to hold keys :param hold_while: hold keys while hold_while returns True :return: None """ for key in keys: win32api.keybd_event(codes[key], 0, 0, 0) if callable(hold_while): while hold_while(): pass else: time.sleep(hold_time) release(*keys)
[ "def", "hold", "(", "*", "keys", ",", "hold_time", "=", "0", ",", "hold_while", "=", "None", ")", ":", "for", "key", "in", "keys", ":", "win32api", ".", "keybd_event", "(", "codes", "[", "key", "]", ",", "0", ",", "0", ",", "0", ")", "if", "callable", "(", "hold_while", ")", ":", "while", "hold_while", "(", ")", ":", "pass", "else", ":", "time", ".", "sleep", "(", "hold_time", ")", "release", "(", "*", "keys", ")" ]
Simulates the holding of all the keys passed to the function These keys are held down for a default period of 0 seconds before release :param keys: list of keys to be held :param hold_time: length of time to hold keys :param hold_while: hold keys while hold_while returns True :return: None
[ "Simulates", "the", "holding", "of", "all", "the", "keys", "passed", "to", "the", "function", "These", "keys", "are", "held", "down", "for", "a", "default", "period", "of", "0", "seconds", "before", "release" ]
0349e7176567a4dbef318e75d9b3d6868950a1a9
https://github.com/EwilDawe/typy/blob/0349e7176567a4dbef318e75d9b3d6868950a1a9/typy/keyboard.py#L24-L42
241,553
EwilDawe/typy
typy/keyboard.py
release
def release(*keys): """ Simulates the release of all the keys passed to this function :param keys: list of keys to be released :return: None """ for key in keys: win32api.keybd_event(codes[key], 0, win32con.KEYEVENTF_KEYUP, 0)
python
def release(*keys): """ Simulates the release of all the keys passed to this function :param keys: list of keys to be released :return: None """ for key in keys: win32api.keybd_event(codes[key], 0, win32con.KEYEVENTF_KEYUP, 0)
[ "def", "release", "(", "*", "keys", ")", ":", "for", "key", "in", "keys", ":", "win32api", ".", "keybd_event", "(", "codes", "[", "key", "]", ",", "0", ",", "win32con", ".", "KEYEVENTF_KEYUP", ",", "0", ")" ]
Simulates the release of all the keys passed to this function :param keys: list of keys to be released :return: None
[ "Simulates", "the", "release", "of", "all", "the", "keys", "passed", "to", "this", "function" ]
0349e7176567a4dbef318e75d9b3d6868950a1a9
https://github.com/EwilDawe/typy/blob/0349e7176567a4dbef318e75d9b3d6868950a1a9/typy/keyboard.py#L45-L54
241,554
fr33jc/bang
bang/providers/rs.py
normalize_input_value
def normalize_input_value(value): """ Returns an input value normalized for RightScale API 2.0. This typically means adjusting the *input type* prefix to be one of the valid values:: blank ignore inherit text: env: cred: key: array: This list comes from the table published here: http://reference.rightscale.com/api1.5/resources/ResourceInputs.html#multi_update If unspecified, value is assumed to be a of type ``text``. """ if value in ('blank', 'ignore', 'inherit'): return value # assume any unspecified or unknown types are text tokens = value.split(':') if (len(tokens) < 2 or tokens[0] not in ('text', 'env', 'cred', 'key', 'array')): return 'text:%s' % value return value
python
def normalize_input_value(value): """ Returns an input value normalized for RightScale API 2.0. This typically means adjusting the *input type* prefix to be one of the valid values:: blank ignore inherit text: env: cred: key: array: This list comes from the table published here: http://reference.rightscale.com/api1.5/resources/ResourceInputs.html#multi_update If unspecified, value is assumed to be a of type ``text``. """ if value in ('blank', 'ignore', 'inherit'): return value # assume any unspecified or unknown types are text tokens = value.split(':') if (len(tokens) < 2 or tokens[0] not in ('text', 'env', 'cred', 'key', 'array')): return 'text:%s' % value return value
[ "def", "normalize_input_value", "(", "value", ")", ":", "if", "value", "in", "(", "'blank'", ",", "'ignore'", ",", "'inherit'", ")", ":", "return", "value", "# assume any unspecified or unknown types are text", "tokens", "=", "value", ".", "split", "(", "':'", ")", "if", "(", "len", "(", "tokens", ")", "<", "2", "or", "tokens", "[", "0", "]", "not", "in", "(", "'text'", ",", "'env'", ",", "'cred'", ",", "'key'", ",", "'array'", ")", ")", ":", "return", "'text:%s'", "%", "value", "return", "value" ]
Returns an input value normalized for RightScale API 2.0. This typically means adjusting the *input type* prefix to be one of the valid values:: blank ignore inherit text: env: cred: key: array: This list comes from the table published here: http://reference.rightscale.com/api1.5/resources/ResourceInputs.html#multi_update If unspecified, value is assumed to be a of type ``text``.
[ "Returns", "an", "input", "value", "normalized", "for", "RightScale", "API", "2", ".", "0", "." ]
8f000713f88d2a9a8c1193b63ca10a6578560c16
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/providers/rs.py#L52-L84
241,555
fr33jc/bang
bang/providers/rs.py
Servers.create_stack
def create_stack(self, name): """ Creates stack if necessary. """ deployment = find_exact(self.api.deployments, name=name) if not deployment: try: # TODO: replace when python-rightscale handles non-json self.api.client.post( '/api/deployments', data={'deployment[name]': name}, ) except HTTPError as e: log.error( 'Failed to create stack %s. ' 'RightScale returned %d:\n%s' % (name, e.response.status_code, e.response.content) )
python
def create_stack(self, name): """ Creates stack if necessary. """ deployment = find_exact(self.api.deployments, name=name) if not deployment: try: # TODO: replace when python-rightscale handles non-json self.api.client.post( '/api/deployments', data={'deployment[name]': name}, ) except HTTPError as e: log.error( 'Failed to create stack %s. ' 'RightScale returned %d:\n%s' % (name, e.response.status_code, e.response.content) )
[ "def", "create_stack", "(", "self", ",", "name", ")", ":", "deployment", "=", "find_exact", "(", "self", ".", "api", ".", "deployments", ",", "name", "=", "name", ")", "if", "not", "deployment", ":", "try", ":", "# TODO: replace when python-rightscale handles non-json", "self", ".", "api", ".", "client", ".", "post", "(", "'/api/deployments'", ",", "data", "=", "{", "'deployment[name]'", ":", "name", "}", ",", ")", "except", "HTTPError", "as", "e", ":", "log", ".", "error", "(", "'Failed to create stack %s. '", "'RightScale returned %d:\\n%s'", "%", "(", "name", ",", "e", ".", "response", ".", "status_code", ",", "e", ".", "response", ".", "content", ")", ")" ]
Creates stack if necessary.
[ "Creates", "stack", "if", "necessary", "." ]
8f000713f88d2a9a8c1193b63ca10a6578560c16
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/providers/rs.py#L100-L117
241,556
GMadorell/abris
abris_transform/configuration/data_model.py
DataModel.set_features_types_from_dataframe
def set_features_types_from_dataframe(self, data_frame): """ Sets the features types from the given data_frame. All the calls except the first one are ignored. """ if self.__feature_types_set: return self.__feature_types_set = True dtypes = data_frame.dtypes for feature in self.__iter__(): name = feature.get_name() type_name = data_type_to_type_name(dtypes[name]) feature.set_type_name(type_name)
python
def set_features_types_from_dataframe(self, data_frame): """ Sets the features types from the given data_frame. All the calls except the first one are ignored. """ if self.__feature_types_set: return self.__feature_types_set = True dtypes = data_frame.dtypes for feature in self.__iter__(): name = feature.get_name() type_name = data_type_to_type_name(dtypes[name]) feature.set_type_name(type_name)
[ "def", "set_features_types_from_dataframe", "(", "self", ",", "data_frame", ")", ":", "if", "self", ".", "__feature_types_set", ":", "return", "self", ".", "__feature_types_set", "=", "True", "dtypes", "=", "data_frame", ".", "dtypes", "for", "feature", "in", "self", ".", "__iter__", "(", ")", ":", "name", "=", "feature", ".", "get_name", "(", ")", "type_name", "=", "data_type_to_type_name", "(", "dtypes", "[", "name", "]", ")", "feature", ".", "set_type_name", "(", "type_name", ")" ]
Sets the features types from the given data_frame. All the calls except the first one are ignored.
[ "Sets", "the", "features", "types", "from", "the", "given", "data_frame", ".", "All", "the", "calls", "except", "the", "first", "one", "are", "ignored", "." ]
0d8ab7ec506835a45fae6935d129f5d7e6937bb2
https://github.com/GMadorell/abris/blob/0d8ab7ec506835a45fae6935d129f5d7e6937bb2/abris_transform/configuration/data_model.py#L12-L25
241,557
lextoumbourou/txstripe
txstripe/util.py
handle_api_error
def handle_api_error(resp): """Stolen straight from the Stripe Python source.""" content = yield resp.json() headers = HeaderWrapper(resp.headers) try: err = content['error'] except (KeyError, TypeError): raise error.APIError( "Invalid response object from API: %r (HTTP response code " "was %d)" % (content, resp.code), resp, resp.code, content, headers) if resp.code in [400, 404]: raise error.InvalidRequestError( err.get('message'), err.get('param'), resp, resp.code, content, headers) elif resp.code == 401: raise error.AuthenticationError( err.get('message'), resp, resp.code, content, headers) elif resp.code == 402: raise error.CardError( err.get('message'), err.get('param'), err.get('code'), content, resp.code, resp, headers) else: raise error.APIError( err.get('message'), content, resp.code, resp, headers)
python
def handle_api_error(resp): """Stolen straight from the Stripe Python source.""" content = yield resp.json() headers = HeaderWrapper(resp.headers) try: err = content['error'] except (KeyError, TypeError): raise error.APIError( "Invalid response object from API: %r (HTTP response code " "was %d)" % (content, resp.code), resp, resp.code, content, headers) if resp.code in [400, 404]: raise error.InvalidRequestError( err.get('message'), err.get('param'), resp, resp.code, content, headers) elif resp.code == 401: raise error.AuthenticationError( err.get('message'), resp, resp.code, content, headers) elif resp.code == 402: raise error.CardError( err.get('message'), err.get('param'), err.get('code'), content, resp.code, resp, headers) else: raise error.APIError( err.get('message'), content, resp.code, resp, headers)
[ "def", "handle_api_error", "(", "resp", ")", ":", "content", "=", "yield", "resp", ".", "json", "(", ")", "headers", "=", "HeaderWrapper", "(", "resp", ".", "headers", ")", "try", ":", "err", "=", "content", "[", "'error'", "]", "except", "(", "KeyError", ",", "TypeError", ")", ":", "raise", "error", ".", "APIError", "(", "\"Invalid response object from API: %r (HTTP response code \"", "\"was %d)\"", "%", "(", "content", ",", "resp", ".", "code", ")", ",", "resp", ",", "resp", ".", "code", ",", "content", ",", "headers", ")", "if", "resp", ".", "code", "in", "[", "400", ",", "404", "]", ":", "raise", "error", ".", "InvalidRequestError", "(", "err", ".", "get", "(", "'message'", ")", ",", "err", ".", "get", "(", "'param'", ")", ",", "resp", ",", "resp", ".", "code", ",", "content", ",", "headers", ")", "elif", "resp", ".", "code", "==", "401", ":", "raise", "error", ".", "AuthenticationError", "(", "err", ".", "get", "(", "'message'", ")", ",", "resp", ",", "resp", ".", "code", ",", "content", ",", "headers", ")", "elif", "resp", ".", "code", "==", "402", ":", "raise", "error", ".", "CardError", "(", "err", ".", "get", "(", "'message'", ")", ",", "err", ".", "get", "(", "'param'", ")", ",", "err", ".", "get", "(", "'code'", ")", ",", "content", ",", "resp", ".", "code", ",", "resp", ",", "headers", ")", "else", ":", "raise", "error", ".", "APIError", "(", "err", ".", "get", "(", "'message'", ")", ",", "content", ",", "resp", ".", "code", ",", "resp", ",", "headers", ")" ]
Stolen straight from the Stripe Python source.
[ "Stolen", "straight", "from", "the", "Stripe", "Python", "source", "." ]
a69e67f524258026fd1840655a0578311bba3b89
https://github.com/lextoumbourou/txstripe/blob/a69e67f524258026fd1840655a0578311bba3b89/txstripe/util.py#L11-L39
241,558
DaveMcEwan/ndim
ndim_base.py
vectors_between_pts
def vectors_between_pts(pts=[]): '''Return vectors between points on N dimensions. Last vector is the path between the first and last point, creating a loop. ''' assert isinstance(pts, list) and len(pts) > 0 l_pts = len(pts) l_pt_prev = None for pt in pts: assert isinstance(pt, tuple) l_pt = len(pt) assert l_pt > 1 for i in pt: assert isinstance(i, float) if l_pt_prev is not None: assert l_pt == l_pt_prev l_pt_prev = l_pt return [tuple([pts[(i+1) % l_pts][j] - pts[i][j] for j in range(l_pt)]) \ for i in range(l_pts)]
python
def vectors_between_pts(pts=[]): '''Return vectors between points on N dimensions. Last vector is the path between the first and last point, creating a loop. ''' assert isinstance(pts, list) and len(pts) > 0 l_pts = len(pts) l_pt_prev = None for pt in pts: assert isinstance(pt, tuple) l_pt = len(pt) assert l_pt > 1 for i in pt: assert isinstance(i, float) if l_pt_prev is not None: assert l_pt == l_pt_prev l_pt_prev = l_pt return [tuple([pts[(i+1) % l_pts][j] - pts[i][j] for j in range(l_pt)]) \ for i in range(l_pts)]
[ "def", "vectors_between_pts", "(", "pts", "=", "[", "]", ")", ":", "assert", "isinstance", "(", "pts", ",", "list", ")", "and", "len", "(", "pts", ")", ">", "0", "l_pts", "=", "len", "(", "pts", ")", "l_pt_prev", "=", "None", "for", "pt", "in", "pts", ":", "assert", "isinstance", "(", "pt", ",", "tuple", ")", "l_pt", "=", "len", "(", "pt", ")", "assert", "l_pt", ">", "1", "for", "i", "in", "pt", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "if", "l_pt_prev", "is", "not", "None", ":", "assert", "l_pt", "==", "l_pt_prev", "l_pt_prev", "=", "l_pt", "return", "[", "tuple", "(", "[", "pts", "[", "(", "i", "+", "1", ")", "%", "l_pts", "]", "[", "j", "]", "-", "pts", "[", "i", "]", "[", "j", "]", "for", "j", "in", "range", "(", "l_pt", ")", "]", ")", "for", "i", "in", "range", "(", "l_pts", ")", "]" ]
Return vectors between points on N dimensions. Last vector is the path between the first and last point, creating a loop.
[ "Return", "vectors", "between", "points", "on", "N", "dimensions", ".", "Last", "vector", "is", "the", "path", "between", "the", "first", "and", "last", "point", "creating", "a", "loop", "." ]
f1ea023d3e597160fc1e9e11921de07af659f9d2
https://github.com/DaveMcEwan/ndim/blob/f1ea023d3e597160fc1e9e11921de07af659f9d2/ndim_base.py#L8-L26
241,559
DaveMcEwan/ndim
ndim_base.py
pt_between_pts
def pt_between_pts(a=(0.0, 0.0), b=(0.0, 0.0), t=0.5): '''Return the point between two points on N dimensions. ''' assert isinstance(a, tuple) assert isinstance(b, tuple) l_pt = len(a) assert l_pt > 1 assert l_pt == len(b) for i in a: assert isinstance(i, float) for i in b: assert isinstance(i, float) assert isinstance(t, float) assert 0 <= t <= 1 return tuple([ ((b[i] - a[i]) * t) + a[i] for i in range(l_pt) ])
python
def pt_between_pts(a=(0.0, 0.0), b=(0.0, 0.0), t=0.5): '''Return the point between two points on N dimensions. ''' assert isinstance(a, tuple) assert isinstance(b, tuple) l_pt = len(a) assert l_pt > 1 assert l_pt == len(b) for i in a: assert isinstance(i, float) for i in b: assert isinstance(i, float) assert isinstance(t, float) assert 0 <= t <= 1 return tuple([ ((b[i] - a[i]) * t) + a[i] for i in range(l_pt) ])
[ "def", "pt_between_pts", "(", "a", "=", "(", "0.0", ",", "0.0", ")", ",", "b", "=", "(", "0.0", ",", "0.0", ")", ",", "t", "=", "0.5", ")", ":", "assert", "isinstance", "(", "a", ",", "tuple", ")", "assert", "isinstance", "(", "b", ",", "tuple", ")", "l_pt", "=", "len", "(", "a", ")", "assert", "l_pt", ">", "1", "assert", "l_pt", "==", "len", "(", "b", ")", "for", "i", "in", "a", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "for", "i", "in", "b", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "assert", "isinstance", "(", "t", ",", "float", ")", "assert", "0", "<=", "t", "<=", "1", "return", "tuple", "(", "[", "(", "(", "b", "[", "i", "]", "-", "a", "[", "i", "]", ")", "*", "t", ")", "+", "a", "[", "i", "]", "for", "i", "in", "range", "(", "l_pt", ")", "]", ")" ]
Return the point between two points on N dimensions.
[ "Return", "the", "point", "between", "two", "points", "on", "N", "dimensions", "." ]
f1ea023d3e597160fc1e9e11921de07af659f9d2
https://github.com/DaveMcEwan/ndim/blob/f1ea023d3e597160fc1e9e11921de07af659f9d2/ndim_base.py#L56-L71
241,560
DaveMcEwan/ndim
ndim_base.py
pt_rotate
def pt_rotate(pt=(0.0, 0.0), angle=[0.0], center=(0.0, 0.0)): '''Return given point rotated around a center point in N dimensions. Angle is list of rotation in radians for each pair of axis. ''' assert isinstance(pt, tuple) l_pt = len(pt) assert l_pt > 1 for i in pt: assert isinstance(i, float) assert isinstance(angle, list) l_angle = len(angle) assert l_angle == l_pt-1 for i in angle: assert isinstance(i, float) assert abs(i) <= 2*pi assert isinstance(center, tuple) assert len(center) == l_pt for i in center: assert isinstance(i, float) # Get vector from center to point and use to get relative polar coordinate. v_cart = [pt[i] - center[i] for i in range(l_pt)] # Length of vector needs to stay constant for new point. v_pol_l = [sqrt(v_cart[i]**2 + v_cart[i+1]**2) for i in range(l_angle)] v_pol_a = [(atan(v_cart[i+1] / v_cart[i]) if v_cart[i] != 0.0 else pi/2) + pi*int(pt[i] < center[i]) \ for i in range(l_angle)] # Add rotation angle then convert back to cartesian vector. n_pol_a = [v_pol_a[i] + angle[i] for i in range(l_angle)] n_cart = [v_pol_l[0] * cos(n_pol_a[0])] + [v_pol_l[i] * sin(n_pol_a[i])\ for i in range(l_angle)] # Add in the centre offset to get original offset from c. r = [n_cart[i] + center[i] for i in range(l_pt)] return tuple(r)
python
def pt_rotate(pt=(0.0, 0.0), angle=[0.0], center=(0.0, 0.0)): '''Return given point rotated around a center point in N dimensions. Angle is list of rotation in radians for each pair of axis. ''' assert isinstance(pt, tuple) l_pt = len(pt) assert l_pt > 1 for i in pt: assert isinstance(i, float) assert isinstance(angle, list) l_angle = len(angle) assert l_angle == l_pt-1 for i in angle: assert isinstance(i, float) assert abs(i) <= 2*pi assert isinstance(center, tuple) assert len(center) == l_pt for i in center: assert isinstance(i, float) # Get vector from center to point and use to get relative polar coordinate. v_cart = [pt[i] - center[i] for i in range(l_pt)] # Length of vector needs to stay constant for new point. v_pol_l = [sqrt(v_cart[i]**2 + v_cart[i+1]**2) for i in range(l_angle)] v_pol_a = [(atan(v_cart[i+1] / v_cart[i]) if v_cart[i] != 0.0 else pi/2) + pi*int(pt[i] < center[i]) \ for i in range(l_angle)] # Add rotation angle then convert back to cartesian vector. n_pol_a = [v_pol_a[i] + angle[i] for i in range(l_angle)] n_cart = [v_pol_l[0] * cos(n_pol_a[0])] + [v_pol_l[i] * sin(n_pol_a[i])\ for i in range(l_angle)] # Add in the centre offset to get original offset from c. r = [n_cart[i] + center[i] for i in range(l_pt)] return tuple(r)
[ "def", "pt_rotate", "(", "pt", "=", "(", "0.0", ",", "0.0", ")", ",", "angle", "=", "[", "0.0", "]", ",", "center", "=", "(", "0.0", ",", "0.0", ")", ")", ":", "assert", "isinstance", "(", "pt", ",", "tuple", ")", "l_pt", "=", "len", "(", "pt", ")", "assert", "l_pt", ">", "1", "for", "i", "in", "pt", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "assert", "isinstance", "(", "angle", ",", "list", ")", "l_angle", "=", "len", "(", "angle", ")", "assert", "l_angle", "==", "l_pt", "-", "1", "for", "i", "in", "angle", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "assert", "abs", "(", "i", ")", "<=", "2", "*", "pi", "assert", "isinstance", "(", "center", ",", "tuple", ")", "assert", "len", "(", "center", ")", "==", "l_pt", "for", "i", "in", "center", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "# Get vector from center to point and use to get relative polar coordinate.", "v_cart", "=", "[", "pt", "[", "i", "]", "-", "center", "[", "i", "]", "for", "i", "in", "range", "(", "l_pt", ")", "]", "# Length of vector needs to stay constant for new point.", "v_pol_l", "=", "[", "sqrt", "(", "v_cart", "[", "i", "]", "**", "2", "+", "v_cart", "[", "i", "+", "1", "]", "**", "2", ")", "for", "i", "in", "range", "(", "l_angle", ")", "]", "v_pol_a", "=", "[", "(", "atan", "(", "v_cart", "[", "i", "+", "1", "]", "/", "v_cart", "[", "i", "]", ")", "if", "v_cart", "[", "i", "]", "!=", "0.0", "else", "pi", "/", "2", ")", "+", "pi", "*", "int", "(", "pt", "[", "i", "]", "<", "center", "[", "i", "]", ")", "for", "i", "in", "range", "(", "l_angle", ")", "]", "# Add rotation angle then convert back to cartesian vector.", "n_pol_a", "=", "[", "v_pol_a", "[", "i", "]", "+", "angle", "[", "i", "]", "for", "i", "in", "range", "(", "l_angle", ")", "]", "n_cart", "=", "[", "v_pol_l", "[", "0", "]", "*", "cos", "(", "n_pol_a", "[", "0", "]", ")", "]", "+", "[", "v_pol_l", "[", "i", "]", "*", "sin", "(", "n_pol_a", "[", "i", "]", ")", "for", "i", "in", "range", "(", "l_angle", ")", "]", "# Add in the centre offset to get original offset from c.", "r", "=", "[", "n_cart", "[", "i", "]", "+", "center", "[", "i", "]", "for", "i", "in", "range", "(", "l_pt", ")", "]", "return", "tuple", "(", "r", ")" ]
Return given point rotated around a center point in N dimensions. Angle is list of rotation in radians for each pair of axis.
[ "Return", "given", "point", "rotated", "around", "a", "center", "point", "in", "N", "dimensions", ".", "Angle", "is", "list", "of", "rotation", "in", "radians", "for", "each", "pair", "of", "axis", "." ]
f1ea023d3e597160fc1e9e11921de07af659f9d2
https://github.com/DaveMcEwan/ndim/blob/f1ea023d3e597160fc1e9e11921de07af659f9d2/ndim_base.py#L145-L180
241,561
DaveMcEwan/ndim
ndim_base.py
pts_rotate
def pts_rotate(pts=[], angle=[0.0], center=(0.0, 0.0)): '''Return given points rotated around a center point in N dimensions. Angle is list of rotation in radians for each pair of axis. ''' assert isinstance(pts, list) and len(pts) > 0 l_pt_prev = None for pt in pts: assert isinstance(pt, tuple) l_pt = len(pt) assert l_pt > 1 for i in pt: assert isinstance(i, float) if l_pt_prev is not None: assert l_pt == l_pt_prev l_pt_prev = l_pt assert isinstance(angle, list) l_angle = len(angle) assert l_angle == l_pt-1 for i in angle: assert isinstance(i, float) assert abs(i) <= 2*pi assert isinstance(center, tuple) assert len(center) == l_pt for i in center: assert isinstance(i, float) return [pt_rotate(pt, angle, center) for pt in pts]
python
def pts_rotate(pts=[], angle=[0.0], center=(0.0, 0.0)): '''Return given points rotated around a center point in N dimensions. Angle is list of rotation in radians for each pair of axis. ''' assert isinstance(pts, list) and len(pts) > 0 l_pt_prev = None for pt in pts: assert isinstance(pt, tuple) l_pt = len(pt) assert l_pt > 1 for i in pt: assert isinstance(i, float) if l_pt_prev is not None: assert l_pt == l_pt_prev l_pt_prev = l_pt assert isinstance(angle, list) l_angle = len(angle) assert l_angle == l_pt-1 for i in angle: assert isinstance(i, float) assert abs(i) <= 2*pi assert isinstance(center, tuple) assert len(center) == l_pt for i in center: assert isinstance(i, float) return [pt_rotate(pt, angle, center) for pt in pts]
[ "def", "pts_rotate", "(", "pts", "=", "[", "]", ",", "angle", "=", "[", "0.0", "]", ",", "center", "=", "(", "0.0", ",", "0.0", ")", ")", ":", "assert", "isinstance", "(", "pts", ",", "list", ")", "and", "len", "(", "pts", ")", ">", "0", "l_pt_prev", "=", "None", "for", "pt", "in", "pts", ":", "assert", "isinstance", "(", "pt", ",", "tuple", ")", "l_pt", "=", "len", "(", "pt", ")", "assert", "l_pt", ">", "1", "for", "i", "in", "pt", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "if", "l_pt_prev", "is", "not", "None", ":", "assert", "l_pt", "==", "l_pt_prev", "l_pt_prev", "=", "l_pt", "assert", "isinstance", "(", "angle", ",", "list", ")", "l_angle", "=", "len", "(", "angle", ")", "assert", "l_angle", "==", "l_pt", "-", "1", "for", "i", "in", "angle", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "assert", "abs", "(", "i", ")", "<=", "2", "*", "pi", "assert", "isinstance", "(", "center", ",", "tuple", ")", "assert", "len", "(", "center", ")", "==", "l_pt", "for", "i", "in", "center", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "return", "[", "pt_rotate", "(", "pt", ",", "angle", ",", "center", ")", "for", "pt", "in", "pts", "]" ]
Return given points rotated around a center point in N dimensions. Angle is list of rotation in radians for each pair of axis.
[ "Return", "given", "points", "rotated", "around", "a", "center", "point", "in", "N", "dimensions", ".", "Angle", "is", "list", "of", "rotation", "in", "radians", "for", "each", "pair", "of", "axis", "." ]
f1ea023d3e597160fc1e9e11921de07af659f9d2
https://github.com/DaveMcEwan/ndim/blob/f1ea023d3e597160fc1e9e11921de07af659f9d2/ndim_base.py#L183-L209
241,562
DaveMcEwan/ndim
ndim_base.py
pt_shift
def pt_shift(pt=(0.0, 0.0), shift=[0.0, 0.0]): '''Return given point shifted in N dimensions. ''' assert isinstance(pt, tuple) l_pt = len(pt) assert l_pt > 1 for i in pt: assert isinstance(i, float) assert isinstance(shift, list) l_sh = len(shift) assert l_sh == l_pt for i in shift: assert isinstance(i, float) return tuple([pt[i] + shift[i] for i in range(l_pt)])
python
def pt_shift(pt=(0.0, 0.0), shift=[0.0, 0.0]): '''Return given point shifted in N dimensions. ''' assert isinstance(pt, tuple) l_pt = len(pt) assert l_pt > 1 for i in pt: assert isinstance(i, float) assert isinstance(shift, list) l_sh = len(shift) assert l_sh == l_pt for i in shift: assert isinstance(i, float) return tuple([pt[i] + shift[i] for i in range(l_pt)])
[ "def", "pt_shift", "(", "pt", "=", "(", "0.0", ",", "0.0", ")", ",", "shift", "=", "[", "0.0", ",", "0.0", "]", ")", ":", "assert", "isinstance", "(", "pt", ",", "tuple", ")", "l_pt", "=", "len", "(", "pt", ")", "assert", "l_pt", ">", "1", "for", "i", "in", "pt", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "assert", "isinstance", "(", "shift", ",", "list", ")", "l_sh", "=", "len", "(", "shift", ")", "assert", "l_sh", "==", "l_pt", "for", "i", "in", "shift", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "return", "tuple", "(", "[", "pt", "[", "i", "]", "+", "shift", "[", "i", "]", "for", "i", "in", "range", "(", "l_pt", ")", "]", ")" ]
Return given point shifted in N dimensions.
[ "Return", "given", "point", "shifted", "in", "N", "dimensions", "." ]
f1ea023d3e597160fc1e9e11921de07af659f9d2
https://github.com/DaveMcEwan/ndim/blob/f1ea023d3e597160fc1e9e11921de07af659f9d2/ndim_base.py#L212-L226
241,563
DaveMcEwan/ndim
ndim_base.py
pts_shift
def pts_shift(pts=[], shift=[0.0, 0.0]): '''Return given points shifted in N dimensions. ''' assert isinstance(pts, list) and len(pts) > 0 l_pt_prev = None for pt in pts: assert isinstance(pt, tuple) l_pt = len(pt) assert l_pt > 1 for i in pt: assert isinstance(i, float) if l_pt_prev is not None: assert l_pt == l_pt_prev l_pt_prev = l_pt assert isinstance(shift, list) l_sh = len(shift) assert l_sh == l_pt for i in shift: assert isinstance(i, float) return [pt_shift(pt, shift) for pt in pts]
python
def pts_shift(pts=[], shift=[0.0, 0.0]): '''Return given points shifted in N dimensions. ''' assert isinstance(pts, list) and len(pts) > 0 l_pt_prev = None for pt in pts: assert isinstance(pt, tuple) l_pt = len(pt) assert l_pt > 1 for i in pt: assert isinstance(i, float) if l_pt_prev is not None: assert l_pt == l_pt_prev l_pt_prev = l_pt assert isinstance(shift, list) l_sh = len(shift) assert l_sh == l_pt for i in shift: assert isinstance(i, float) return [pt_shift(pt, shift) for pt in pts]
[ "def", "pts_shift", "(", "pts", "=", "[", "]", ",", "shift", "=", "[", "0.0", ",", "0.0", "]", ")", ":", "assert", "isinstance", "(", "pts", ",", "list", ")", "and", "len", "(", "pts", ")", ">", "0", "l_pt_prev", "=", "None", "for", "pt", "in", "pts", ":", "assert", "isinstance", "(", "pt", ",", "tuple", ")", "l_pt", "=", "len", "(", "pt", ")", "assert", "l_pt", ">", "1", "for", "i", "in", "pt", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "if", "l_pt_prev", "is", "not", "None", ":", "assert", "l_pt", "==", "l_pt_prev", "l_pt_prev", "=", "l_pt", "assert", "isinstance", "(", "shift", ",", "list", ")", "l_sh", "=", "len", "(", "shift", ")", "assert", "l_sh", "==", "l_pt", "for", "i", "in", "shift", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "return", "[", "pt_shift", "(", "pt", ",", "shift", ")", "for", "pt", "in", "pts", "]" ]
Return given points shifted in N dimensions.
[ "Return", "given", "points", "shifted", "in", "N", "dimensions", "." ]
f1ea023d3e597160fc1e9e11921de07af659f9d2
https://github.com/DaveMcEwan/ndim/blob/f1ea023d3e597160fc1e9e11921de07af659f9d2/ndim_base.py#L229-L249
241,564
DaveMcEwan/ndim
ndim_base.py
pt_scale
def pt_scale(pt=(0.0, 0.0), f=1.0): '''Return given point scaled by factor f from origin. ''' assert isinstance(pt, tuple) l_pt = len(pt) assert l_pt > 1 for i in pt: assert isinstance(i, float) assert isinstance(f, float) return tuple([pt[i]*f for i in range(l_pt)])
python
def pt_scale(pt=(0.0, 0.0), f=1.0): '''Return given point scaled by factor f from origin. ''' assert isinstance(pt, tuple) l_pt = len(pt) assert l_pt > 1 for i in pt: assert isinstance(i, float) assert isinstance(f, float) return tuple([pt[i]*f for i in range(l_pt)])
[ "def", "pt_scale", "(", "pt", "=", "(", "0.0", ",", "0.0", ")", ",", "f", "=", "1.0", ")", ":", "assert", "isinstance", "(", "pt", ",", "tuple", ")", "l_pt", "=", "len", "(", "pt", ")", "assert", "l_pt", ">", "1", "for", "i", "in", "pt", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "assert", "isinstance", "(", "f", ",", "float", ")", "return", "tuple", "(", "[", "pt", "[", "i", "]", "*", "f", "for", "i", "in", "range", "(", "l_pt", ")", "]", ")" ]
Return given point scaled by factor f from origin.
[ "Return", "given", "point", "scaled", "by", "factor", "f", "from", "origin", "." ]
f1ea023d3e597160fc1e9e11921de07af659f9d2
https://github.com/DaveMcEwan/ndim/blob/f1ea023d3e597160fc1e9e11921de07af659f9d2/ndim_base.py#L348-L358
241,565
DaveMcEwan/ndim
ndim_base.py
pts_scale
def pts_scale(pts=[], f=1.0): '''Return given points scaled by factor f from origin. ''' assert isinstance(pts, list) and len(pts) > 0 l_pt_prev = None for pt in pts: assert isinstance(pt, tuple) l_pt = len(pt) assert l_pt > 1 for i in pt: assert isinstance(i, float) if l_pt_prev is not None: assert l_pt == l_pt_prev l_pt_prev = l_pt assert isinstance(f, float) return [pt_scale(pt, f) for pt in pts]
python
def pts_scale(pts=[], f=1.0): '''Return given points scaled by factor f from origin. ''' assert isinstance(pts, list) and len(pts) > 0 l_pt_prev = None for pt in pts: assert isinstance(pt, tuple) l_pt = len(pt) assert l_pt > 1 for i in pt: assert isinstance(i, float) if l_pt_prev is not None: assert l_pt == l_pt_prev l_pt_prev = l_pt assert isinstance(f, float) return [pt_scale(pt, f) for pt in pts]
[ "def", "pts_scale", "(", "pts", "=", "[", "]", ",", "f", "=", "1.0", ")", ":", "assert", "isinstance", "(", "pts", ",", "list", ")", "and", "len", "(", "pts", ")", ">", "0", "l_pt_prev", "=", "None", "for", "pt", "in", "pts", ":", "assert", "isinstance", "(", "pt", ",", "tuple", ")", "l_pt", "=", "len", "(", "pt", ")", "assert", "l_pt", ">", "1", "for", "i", "in", "pt", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "if", "l_pt_prev", "is", "not", "None", ":", "assert", "l_pt", "==", "l_pt_prev", "l_pt_prev", "=", "l_pt", "assert", "isinstance", "(", "f", ",", "float", ")", "return", "[", "pt_scale", "(", "pt", ",", "f", ")", "for", "pt", "in", "pts", "]" ]
Return given points scaled by factor f from origin.
[ "Return", "given", "points", "scaled", "by", "factor", "f", "from", "origin", "." ]
f1ea023d3e597160fc1e9e11921de07af659f9d2
https://github.com/DaveMcEwan/ndim/blob/f1ea023d3e597160fc1e9e11921de07af659f9d2/ndim_base.py#L361-L377
241,566
DaveMcEwan/ndim
ndim_base.py
angle_diff
def angle_diff(start_a=[0.0], end_a=[0.0], direction=True): '''Return difference in angle from start_a to end_a. Direction follows the right-hand-rule so positive is counter-clockwise. ''' assert isinstance(start_a, list) assert isinstance(end_a, list) l_angle = len(start_a) assert l_angle > 0 assert l_angle == len(end_a) for i in start_a: assert isinstance(i, float) assert abs(i) <= 2*pi for i in end_a: assert isinstance(i, float) assert abs(i) <= 2*pi assert isinstance(direction, bool) # Convert True/False to 1/-1 inv = 2 * int(direction) - 1 diff = [end_a[i] - start_a[i] for i in range(l_angle)] diff = [(2*pi + d) if d < 0.0 else d for d in diff] return [d*inv for d in diff]
python
def angle_diff(start_a=[0.0], end_a=[0.0], direction=True): '''Return difference in angle from start_a to end_a. Direction follows the right-hand-rule so positive is counter-clockwise. ''' assert isinstance(start_a, list) assert isinstance(end_a, list) l_angle = len(start_a) assert l_angle > 0 assert l_angle == len(end_a) for i in start_a: assert isinstance(i, float) assert abs(i) <= 2*pi for i in end_a: assert isinstance(i, float) assert abs(i) <= 2*pi assert isinstance(direction, bool) # Convert True/False to 1/-1 inv = 2 * int(direction) - 1 diff = [end_a[i] - start_a[i] for i in range(l_angle)] diff = [(2*pi + d) if d < 0.0 else d for d in diff] return [d*inv for d in diff]
[ "def", "angle_diff", "(", "start_a", "=", "[", "0.0", "]", ",", "end_a", "=", "[", "0.0", "]", ",", "direction", "=", "True", ")", ":", "assert", "isinstance", "(", "start_a", ",", "list", ")", "assert", "isinstance", "(", "end_a", ",", "list", ")", "l_angle", "=", "len", "(", "start_a", ")", "assert", "l_angle", ">", "0", "assert", "l_angle", "==", "len", "(", "end_a", ")", "for", "i", "in", "start_a", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "assert", "abs", "(", "i", ")", "<=", "2", "*", "pi", "for", "i", "in", "end_a", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "assert", "abs", "(", "i", ")", "<=", "2", "*", "pi", "assert", "isinstance", "(", "direction", ",", "bool", ")", "# Convert True/False to 1/-1", "inv", "=", "2", "*", "int", "(", "direction", ")", "-", "1", "diff", "=", "[", "end_a", "[", "i", "]", "-", "start_a", "[", "i", "]", "for", "i", "in", "range", "(", "l_angle", ")", "]", "diff", "=", "[", "(", "2", "*", "pi", "+", "d", ")", "if", "d", "<", "0.0", "else", "d", "for", "d", "in", "diff", "]", "return", "[", "d", "*", "inv", "for", "d", "in", "diff", "]" ]
Return difference in angle from start_a to end_a. Direction follows the right-hand-rule so positive is counter-clockwise.
[ "Return", "difference", "in", "angle", "from", "start_a", "to", "end_a", ".", "Direction", "follows", "the", "right", "-", "hand", "-", "rule", "so", "positive", "is", "counter", "-", "clockwise", "." ]
f1ea023d3e597160fc1e9e11921de07af659f9d2
https://github.com/DaveMcEwan/ndim/blob/f1ea023d3e597160fc1e9e11921de07af659f9d2/ndim_base.py#L380-L402
241,567
DaveMcEwan/ndim
ndim_base.py
gen_polygon_pts
def gen_polygon_pts(n_pts=3, radius=[1.0]): '''Generate points for a polygon with a number of radiuses. This makes it easy to generate shapes with an arbitrary number of sides, regularly angled around the origin. A single radius will give a simple shape such as a square, hexagon, etc. Multiple radiuses will give complex shapes like stars, gear wheels, ratchet wheels, etc. ''' assert isinstance(n_pts, int) and n_pts > 0 assert isinstance(radius, list) l_rad = len(radius) assert l_rad > 0 for i in radius: assert isinstance(i, float) return [pt_rotate((radius[i % l_rad], 0.0), [i*2*pi/n_pts]) \ for i in range(n_pts)]
python
def gen_polygon_pts(n_pts=3, radius=[1.0]): '''Generate points for a polygon with a number of radiuses. This makes it easy to generate shapes with an arbitrary number of sides, regularly angled around the origin. A single radius will give a simple shape such as a square, hexagon, etc. Multiple radiuses will give complex shapes like stars, gear wheels, ratchet wheels, etc. ''' assert isinstance(n_pts, int) and n_pts > 0 assert isinstance(radius, list) l_rad = len(radius) assert l_rad > 0 for i in radius: assert isinstance(i, float) return [pt_rotate((radius[i % l_rad], 0.0), [i*2*pi/n_pts]) \ for i in range(n_pts)]
[ "def", "gen_polygon_pts", "(", "n_pts", "=", "3", ",", "radius", "=", "[", "1.0", "]", ")", ":", "assert", "isinstance", "(", "n_pts", ",", "int", ")", "and", "n_pts", ">", "0", "assert", "isinstance", "(", "radius", ",", "list", ")", "l_rad", "=", "len", "(", "radius", ")", "assert", "l_rad", ">", "0", "for", "i", "in", "radius", ":", "assert", "isinstance", "(", "i", ",", "float", ")", "return", "[", "pt_rotate", "(", "(", "radius", "[", "i", "%", "l_rad", "]", ",", "0.0", ")", ",", "[", "i", "*", "2", "*", "pi", "/", "n_pts", "]", ")", "for", "i", "in", "range", "(", "n_pts", ")", "]" ]
Generate points for a polygon with a number of radiuses. This makes it easy to generate shapes with an arbitrary number of sides, regularly angled around the origin. A single radius will give a simple shape such as a square, hexagon, etc. Multiple radiuses will give complex shapes like stars, gear wheels, ratchet wheels, etc.
[ "Generate", "points", "for", "a", "polygon", "with", "a", "number", "of", "radiuses", ".", "This", "makes", "it", "easy", "to", "generate", "shapes", "with", "an", "arbitrary", "number", "of", "sides", "regularly", "angled", "around", "the", "origin", ".", "A", "single", "radius", "will", "give", "a", "simple", "shape", "such", "as", "a", "square", "hexagon", "etc", ".", "Multiple", "radiuses", "will", "give", "complex", "shapes", "like", "stars", "gear", "wheels", "ratchet", "wheels", "etc", "." ]
f1ea023d3e597160fc1e9e11921de07af659f9d2
https://github.com/DaveMcEwan/ndim/blob/f1ea023d3e597160fc1e9e11921de07af659f9d2/ndim_base.py#L405-L421
241,568
redbridge/molnctrl
molnctrl/__init__.py
_add_params_docstring
def _add_params_docstring(params): """ Add params to doc string """ p_string = "\nAccepts the following paramters: \n" for param in params: p_string += "name: %s, required: %s, description: %s \n" % (param['name'], param['required'], param['description']) return p_string
python
def _add_params_docstring(params): """ Add params to doc string """ p_string = "\nAccepts the following paramters: \n" for param in params: p_string += "name: %s, required: %s, description: %s \n" % (param['name'], param['required'], param['description']) return p_string
[ "def", "_add_params_docstring", "(", "params", ")", ":", "p_string", "=", "\"\\nAccepts the following paramters: \\n\"", "for", "param", "in", "params", ":", "p_string", "+=", "\"name: %s, required: %s, description: %s \\n\"", "%", "(", "param", "[", "'name'", "]", ",", "param", "[", "'required'", "]", ",", "param", "[", "'description'", "]", ")", "return", "p_string" ]
Add params to doc string
[ "Add", "params", "to", "doc", "string" ]
9990ae7e522ce364bb61a735f774dc28de5f8e60
https://github.com/redbridge/molnctrl/blob/9990ae7e522ce364bb61a735f774dc28de5f8e60/molnctrl/__init__.py#L40-L46
241,569
redbridge/molnctrl
molnctrl/__init__.py
_create_api_method
def _create_api_method(cls, name, api_method): """ Create dynamic class methods based on the Cloudmonkey precached_verbs """ def _api_method(self, **kwargs): # lookup the command command = api_method['name'] if kwargs: return self._make_request(command, kwargs) else: kwargs = {} return self._make_request(command, kwargs) _api_method.__doc__ = api_method['description'] _api_method.__doc__ += _add_params_docstring(api_method['params']) _api_method.__name__ = str(name) setattr(cls, _api_method.__name__, _api_method)
python
def _create_api_method(cls, name, api_method): """ Create dynamic class methods based on the Cloudmonkey precached_verbs """ def _api_method(self, **kwargs): # lookup the command command = api_method['name'] if kwargs: return self._make_request(command, kwargs) else: kwargs = {} return self._make_request(command, kwargs) _api_method.__doc__ = api_method['description'] _api_method.__doc__ += _add_params_docstring(api_method['params']) _api_method.__name__ = str(name) setattr(cls, _api_method.__name__, _api_method)
[ "def", "_create_api_method", "(", "cls", ",", "name", ",", "api_method", ")", ":", "def", "_api_method", "(", "self", ",", "*", "*", "kwargs", ")", ":", "# lookup the command", "command", "=", "api_method", "[", "'name'", "]", "if", "kwargs", ":", "return", "self", ".", "_make_request", "(", "command", ",", "kwargs", ")", "else", ":", "kwargs", "=", "{", "}", "return", "self", ".", "_make_request", "(", "command", ",", "kwargs", ")", "_api_method", ".", "__doc__", "=", "api_method", "[", "'description'", "]", "_api_method", ".", "__doc__", "+=", "_add_params_docstring", "(", "api_method", "[", "'params'", "]", ")", "_api_method", ".", "__name__", "=", "str", "(", "name", ")", "setattr", "(", "cls", ",", "_api_method", ".", "__name__", ",", "_api_method", ")" ]
Create dynamic class methods based on the Cloudmonkey precached_verbs
[ "Create", "dynamic", "class", "methods", "based", "on", "the", "Cloudmonkey", "precached_verbs" ]
9990ae7e522ce364bb61a735f774dc28de5f8e60
https://github.com/redbridge/molnctrl/blob/9990ae7e522ce364bb61a735f774dc28de5f8e60/molnctrl/__init__.py#L48-L62
241,570
deviantony/valigator
valigator/valigator.py
validate
def validate(backup): """Use this endpoint to start a backup validation. You must specify the backup type in the endpoint. Specify JSON data for backup archive info. { 'archive_path': '/path/to/archive' } Data must be valid, otherwise it will abort with a 400 code. First, it will try to search for an existing extension definition in the configuration file. If no matching extension is found, it will abort with a 404 code. It will then plan the backup validation by sending a message to the broker. """ data = request.json if not data: abort(400, 'No data received') try: archive_path = data['archive_path'] except KeyError: abort(400, 'Missing key \'archive_path\' in data') try: config['extension'][backup] except KeyError: abort(404, 'No extension configuration found for: {}'.format(backup)) workdir = ''.join([config['valigator']['tmp_dir'], '/', generate_uuid()]) backup_data = {'archive_path': archive_path, 'workdir': workdir, 'image': config['extension'][backup]['image'], 'command': config['extension'][backup]['command']} validate_backup.delay(config, backup_data)
python
def validate(backup): """Use this endpoint to start a backup validation. You must specify the backup type in the endpoint. Specify JSON data for backup archive info. { 'archive_path': '/path/to/archive' } Data must be valid, otherwise it will abort with a 400 code. First, it will try to search for an existing extension definition in the configuration file. If no matching extension is found, it will abort with a 404 code. It will then plan the backup validation by sending a message to the broker. """ data = request.json if not data: abort(400, 'No data received') try: archive_path = data['archive_path'] except KeyError: abort(400, 'Missing key \'archive_path\' in data') try: config['extension'][backup] except KeyError: abort(404, 'No extension configuration found for: {}'.format(backup)) workdir = ''.join([config['valigator']['tmp_dir'], '/', generate_uuid()]) backup_data = {'archive_path': archive_path, 'workdir': workdir, 'image': config['extension'][backup]['image'], 'command': config['extension'][backup]['command']} validate_backup.delay(config, backup_data)
[ "def", "validate", "(", "backup", ")", ":", "data", "=", "request", ".", "json", "if", "not", "data", ":", "abort", "(", "400", ",", "'No data received'", ")", "try", ":", "archive_path", "=", "data", "[", "'archive_path'", "]", "except", "KeyError", ":", "abort", "(", "400", ",", "'Missing key \\'archive_path\\' in data'", ")", "try", ":", "config", "[", "'extension'", "]", "[", "backup", "]", "except", "KeyError", ":", "abort", "(", "404", ",", "'No extension configuration found for: {}'", ".", "format", "(", "backup", ")", ")", "workdir", "=", "''", ".", "join", "(", "[", "config", "[", "'valigator'", "]", "[", "'tmp_dir'", "]", ",", "'/'", ",", "generate_uuid", "(", ")", "]", ")", "backup_data", "=", "{", "'archive_path'", ":", "archive_path", ",", "'workdir'", ":", "workdir", ",", "'image'", ":", "config", "[", "'extension'", "]", "[", "backup", "]", "[", "'image'", "]", ",", "'command'", ":", "config", "[", "'extension'", "]", "[", "backup", "]", "[", "'command'", "]", "}", "validate_backup", ".", "delay", "(", "config", ",", "backup_data", ")" ]
Use this endpoint to start a backup validation. You must specify the backup type in the endpoint. Specify JSON data for backup archive info. { 'archive_path': '/path/to/archive' } Data must be valid, otherwise it will abort with a 400 code. First, it will try to search for an existing extension definition in the configuration file. If no matching extension is found, it will abort with a 404 code. It will then plan the backup validation by sending a message to the broker.
[ "Use", "this", "endpoint", "to", "start", "a", "backup", "validation", ".", "You", "must", "specify", "the", "backup", "type", "in", "the", "endpoint", ".", "Specify", "JSON", "data", "for", "backup", "archive", "info", "." ]
0557029bc58ea1270e358c14ca382d3807ed5b6f
https://github.com/deviantony/valigator/blob/0557029bc58ea1270e358c14ca382d3807ed5b6f/valigator/valigator.py#L10-L47
241,571
deviantony/valigator
valigator/valigator.py
main
def main(conf): """Main function, entry point of the program.""" global config config = load_configuration(conf) app.conf.update(config['celery']) run(host=config['valigator']['bind'], port=config['valigator']['port'])
python
def main(conf): """Main function, entry point of the program.""" global config config = load_configuration(conf) app.conf.update(config['celery']) run(host=config['valigator']['bind'], port=config['valigator']['port'])
[ "def", "main", "(", "conf", ")", ":", "global", "config", "config", "=", "load_configuration", "(", "conf", ")", "app", ".", "conf", ".", "update", "(", "config", "[", "'celery'", "]", ")", "run", "(", "host", "=", "config", "[", "'valigator'", "]", "[", "'bind'", "]", ",", "port", "=", "config", "[", "'valigator'", "]", "[", "'port'", "]", ")" ]
Main function, entry point of the program.
[ "Main", "function", "entry", "point", "of", "the", "program", "." ]
0557029bc58ea1270e358c14ca382d3807ed5b6f
https://github.com/deviantony/valigator/blob/0557029bc58ea1270e358c14ca382d3807ed5b6f/valigator/valigator.py#L54-L59
241,572
cdeboever3/cdpybio
cdpybio/variants.py
record_variant_id
def record_variant_id(record): """Get variant ID from pyvcf.model._Record""" if record.ID: return record.ID else: return record.CHROM + ':' + str(record.POS)
python
def record_variant_id(record): """Get variant ID from pyvcf.model._Record""" if record.ID: return record.ID else: return record.CHROM + ':' + str(record.POS)
[ "def", "record_variant_id", "(", "record", ")", ":", "if", "record", ".", "ID", ":", "return", "record", ".", "ID", "else", ":", "return", "record", ".", "CHROM", "+", "':'", "+", "str", "(", "record", ".", "POS", ")" ]
Get variant ID from pyvcf.model._Record
[ "Get", "variant", "ID", "from", "pyvcf", ".", "model", ".", "_Record" ]
38efdf0e11d01bc00a135921cb91a19c03db5d5c
https://github.com/cdeboever3/cdpybio/blob/38efdf0e11d01bc00a135921cb91a19c03db5d5c/cdpybio/variants.py#L7-L12
241,573
cdeboever3/cdpybio
cdpybio/variants.py
wasp_snp_directory
def wasp_snp_directory(vcf, directory, sample_name=None): """ Convert VCF file into input for WASP. Only bi-allelic heterozygous sites are used. Parameters: ----------- vcf : str Path to VCF file. directory : str Output directory. This is the directory that will hold the files for WASP. sample_name : str If provided, use this sample name to get heterozygous SNPs from VCF file. """ chrom = [] pos = [] ref = [] alt = [] vcf_reader = pyvcf.Reader(open(vcf, 'r')) if sample_name: def condition(record, sample_name): return sample_name in [x.sample for x in record.get_hets()] else: def condition(record, sample_name): return len(record.get_hets()) > 0 for record in vcf_reader: if condition(record, sample_name): if len(record.ALT) == 1: chrom.append(record.CHROM) pos.append(record.POS) ref.append(record.REF) alt.append(record.ALT[0].sequence) df = pd.DataFrame([chrom, pos, ref, alt], index=['chrom', 'position', 'RefAllele', 'AltAllele']).T if not os.path.exists(directory): os.makedirs(directory) for c in set(df.chrom): tdf = df[df.chrom == c] if tdf.shape[0] > 0: f = gzip.open(os.path.join(directory, '{}.snps.txt.gz'.format(c)), 'wb') lines = (tdf.position.astype(str) + '\t' + tdf.RefAllele + '\t' + tdf.AltAllele) f.write('\n'.join(lines) + '\n') f.close()
python
def wasp_snp_directory(vcf, directory, sample_name=None): """ Convert VCF file into input for WASP. Only bi-allelic heterozygous sites are used. Parameters: ----------- vcf : str Path to VCF file. directory : str Output directory. This is the directory that will hold the files for WASP. sample_name : str If provided, use this sample name to get heterozygous SNPs from VCF file. """ chrom = [] pos = [] ref = [] alt = [] vcf_reader = pyvcf.Reader(open(vcf, 'r')) if sample_name: def condition(record, sample_name): return sample_name in [x.sample for x in record.get_hets()] else: def condition(record, sample_name): return len(record.get_hets()) > 0 for record in vcf_reader: if condition(record, sample_name): if len(record.ALT) == 1: chrom.append(record.CHROM) pos.append(record.POS) ref.append(record.REF) alt.append(record.ALT[0].sequence) df = pd.DataFrame([chrom, pos, ref, alt], index=['chrom', 'position', 'RefAllele', 'AltAllele']).T if not os.path.exists(directory): os.makedirs(directory) for c in set(df.chrom): tdf = df[df.chrom == c] if tdf.shape[0] > 0: f = gzip.open(os.path.join(directory, '{}.snps.txt.gz'.format(c)), 'wb') lines = (tdf.position.astype(str) + '\t' + tdf.RefAllele + '\t' + tdf.AltAllele) f.write('\n'.join(lines) + '\n') f.close()
[ "def", "wasp_snp_directory", "(", "vcf", ",", "directory", ",", "sample_name", "=", "None", ")", ":", "chrom", "=", "[", "]", "pos", "=", "[", "]", "ref", "=", "[", "]", "alt", "=", "[", "]", "vcf_reader", "=", "pyvcf", ".", "Reader", "(", "open", "(", "vcf", ",", "'r'", ")", ")", "if", "sample_name", ":", "def", "condition", "(", "record", ",", "sample_name", ")", ":", "return", "sample_name", "in", "[", "x", ".", "sample", "for", "x", "in", "record", ".", "get_hets", "(", ")", "]", "else", ":", "def", "condition", "(", "record", ",", "sample_name", ")", ":", "return", "len", "(", "record", ".", "get_hets", "(", ")", ")", ">", "0", "for", "record", "in", "vcf_reader", ":", "if", "condition", "(", "record", ",", "sample_name", ")", ":", "if", "len", "(", "record", ".", "ALT", ")", "==", "1", ":", "chrom", ".", "append", "(", "record", ".", "CHROM", ")", "pos", ".", "append", "(", "record", ".", "POS", ")", "ref", ".", "append", "(", "record", ".", "REF", ")", "alt", ".", "append", "(", "record", ".", "ALT", "[", "0", "]", ".", "sequence", ")", "df", "=", "pd", ".", "DataFrame", "(", "[", "chrom", ",", "pos", ",", "ref", ",", "alt", "]", ",", "index", "=", "[", "'chrom'", ",", "'position'", ",", "'RefAllele'", ",", "'AltAllele'", "]", ")", ".", "T", "if", "not", "os", ".", "path", ".", "exists", "(", "directory", ")", ":", "os", ".", "makedirs", "(", "directory", ")", "for", "c", "in", "set", "(", "df", ".", "chrom", ")", ":", "tdf", "=", "df", "[", "df", ".", "chrom", "==", "c", "]", "if", "tdf", ".", "shape", "[", "0", "]", ">", "0", ":", "f", "=", "gzip", ".", "open", "(", "os", ".", "path", ".", "join", "(", "directory", ",", "'{}.snps.txt.gz'", ".", "format", "(", "c", ")", ")", ",", "'wb'", ")", "lines", "=", "(", "tdf", ".", "position", ".", "astype", "(", "str", ")", "+", "'\\t'", "+", "tdf", ".", "RefAllele", "+", "'\\t'", "+", "tdf", ".", "AltAllele", ")", "f", ".", "write", "(", "'\\n'", ".", "join", "(", "lines", ")", "+", "'\\n'", ")", "f", ".", "close", "(", ")" ]
Convert VCF file into input for WASP. Only bi-allelic heterozygous sites are used. Parameters: ----------- vcf : str Path to VCF file. directory : str Output directory. This is the directory that will hold the files for WASP. sample_name : str If provided, use this sample name to get heterozygous SNPs from VCF file.
[ "Convert", "VCF", "file", "into", "input", "for", "WASP", ".", "Only", "bi", "-", "allelic", "heterozygous", "sites", "are", "used", "." ]
38efdf0e11d01bc00a135921cb91a19c03db5d5c
https://github.com/cdeboever3/cdpybio/blob/38efdf0e11d01bc00a135921cb91a19c03db5d5c/cdpybio/variants.py#L32-L81
241,574
cdeboever3/cdpybio
cdpybio/variants.py
vcf_as_df
def vcf_as_df(fn): """ Read VCF file into pandas DataFrame. Parameters: ----------- fn : str Path to VCF file. Returns ------- df : pandas.DataFrame The VCF file as a data frame. Note that all header information is thrown away. """ header_lines = 0 with open(fn, 'r') as f: line = f.readline().strip() header_lines += 1 while line[0] == '#': line = f.readline().strip() header_lines += 1 header_lines -= 2 df = pd.read_table(fn, skiprows=header_lines, header=0) df.columns = ['CHROM'] + list(df.columns[1:]) return df
python
def vcf_as_df(fn): """ Read VCF file into pandas DataFrame. Parameters: ----------- fn : str Path to VCF file. Returns ------- df : pandas.DataFrame The VCF file as a data frame. Note that all header information is thrown away. """ header_lines = 0 with open(fn, 'r') as f: line = f.readline().strip() header_lines += 1 while line[0] == '#': line = f.readline().strip() header_lines += 1 header_lines -= 2 df = pd.read_table(fn, skiprows=header_lines, header=0) df.columns = ['CHROM'] + list(df.columns[1:]) return df
[ "def", "vcf_as_df", "(", "fn", ")", ":", "header_lines", "=", "0", "with", "open", "(", "fn", ",", "'r'", ")", "as", "f", ":", "line", "=", "f", ".", "readline", "(", ")", ".", "strip", "(", ")", "header_lines", "+=", "1", "while", "line", "[", "0", "]", "==", "'#'", ":", "line", "=", "f", ".", "readline", "(", ")", ".", "strip", "(", ")", "header_lines", "+=", "1", "header_lines", "-=", "2", "df", "=", "pd", ".", "read_table", "(", "fn", ",", "skiprows", "=", "header_lines", ",", "header", "=", "0", ")", "df", ".", "columns", "=", "[", "'CHROM'", "]", "+", "list", "(", "df", ".", "columns", "[", "1", ":", "]", ")", "return", "df" ]
Read VCF file into pandas DataFrame. Parameters: ----------- fn : str Path to VCF file. Returns ------- df : pandas.DataFrame The VCF file as a data frame. Note that all header information is thrown away.
[ "Read", "VCF", "file", "into", "pandas", "DataFrame", "." ]
38efdf0e11d01bc00a135921cb91a19c03db5d5c
https://github.com/cdeboever3/cdpybio/blob/38efdf0e11d01bc00a135921cb91a19c03db5d5c/cdpybio/variants.py#L83-L110
241,575
cdeboever3/cdpybio
cdpybio/variants.py
make_het_matrix
def make_het_matrix(fn): """ Make boolean matrix of samples by variants. One indicates that the sample is heterozygous for that variant. Parameters: ----------- vcf : str Path to VCF file. """ # TODO: parallelize? vcf_df = vcf_as_df(fn) variant_ids = vcf_df.apply(lambda x: df_variant_id(x), axis=1) vcf_reader = pyvcf.Reader(open(fn, 'r')) record = vcf_reader.next() hets = pd.DataFrame(0, index=variant_ids, columns=[x.sample for x in record.samples]) vcf_reader = pyvcf.Reader(open(fn, 'r')) for record in vcf_reader: h = record.get_hets() i = record_variant_id(record) hets.ix[i, [x.sample for x in h]] = 1 return hets
python
def make_het_matrix(fn): """ Make boolean matrix of samples by variants. One indicates that the sample is heterozygous for that variant. Parameters: ----------- vcf : str Path to VCF file. """ # TODO: parallelize? vcf_df = vcf_as_df(fn) variant_ids = vcf_df.apply(lambda x: df_variant_id(x), axis=1) vcf_reader = pyvcf.Reader(open(fn, 'r')) record = vcf_reader.next() hets = pd.DataFrame(0, index=variant_ids, columns=[x.sample for x in record.samples]) vcf_reader = pyvcf.Reader(open(fn, 'r')) for record in vcf_reader: h = record.get_hets() i = record_variant_id(record) hets.ix[i, [x.sample for x in h]] = 1 return hets
[ "def", "make_het_matrix", "(", "fn", ")", ":", "# TODO: parallelize?", "vcf_df", "=", "vcf_as_df", "(", "fn", ")", "variant_ids", "=", "vcf_df", ".", "apply", "(", "lambda", "x", ":", "df_variant_id", "(", "x", ")", ",", "axis", "=", "1", ")", "vcf_reader", "=", "pyvcf", ".", "Reader", "(", "open", "(", "fn", ",", "'r'", ")", ")", "record", "=", "vcf_reader", ".", "next", "(", ")", "hets", "=", "pd", ".", "DataFrame", "(", "0", ",", "index", "=", "variant_ids", ",", "columns", "=", "[", "x", ".", "sample", "for", "x", "in", "record", ".", "samples", "]", ")", "vcf_reader", "=", "pyvcf", ".", "Reader", "(", "open", "(", "fn", ",", "'r'", ")", ")", "for", "record", "in", "vcf_reader", ":", "h", "=", "record", ".", "get_hets", "(", ")", "i", "=", "record_variant_id", "(", "record", ")", "hets", ".", "ix", "[", "i", ",", "[", "x", ".", "sample", "for", "x", "in", "h", "]", "]", "=", "1", "return", "hets" ]
Make boolean matrix of samples by variants. One indicates that the sample is heterozygous for that variant. Parameters: ----------- vcf : str Path to VCF file.
[ "Make", "boolean", "matrix", "of", "samples", "by", "variants", ".", "One", "indicates", "that", "the", "sample", "is", "heterozygous", "for", "that", "variant", "." ]
38efdf0e11d01bc00a135921cb91a19c03db5d5c
https://github.com/cdeboever3/cdpybio/blob/38efdf0e11d01bc00a135921cb91a19c03db5d5c/cdpybio/variants.py#L112-L137
241,576
frascoweb/frasco-users
frasco_users/__init__.py
UsersFeature.current
def current(self): """Returns the current user """ if not has_request_context(): return self.no_req_ctx_user_stack.top user_stack = getattr(_request_ctx_stack.top, 'user_stack', None) if user_stack and user_stack.top: return user_stack.top return _get_user()
python
def current(self): """Returns the current user """ if not has_request_context(): return self.no_req_ctx_user_stack.top user_stack = getattr(_request_ctx_stack.top, 'user_stack', None) if user_stack and user_stack.top: return user_stack.top return _get_user()
[ "def", "current", "(", "self", ")", ":", "if", "not", "has_request_context", "(", ")", ":", "return", "self", ".", "no_req_ctx_user_stack", ".", "top", "user_stack", "=", "getattr", "(", "_request_ctx_stack", ".", "top", ",", "'user_stack'", ",", "None", ")", "if", "user_stack", "and", "user_stack", ".", "top", ":", "return", "user_stack", ".", "top", "return", "_get_user", "(", ")" ]
Returns the current user
[ "Returns", "the", "current", "user" ]
16591ca466de5b7c80d7a2384327d9cf2d919c41
https://github.com/frascoweb/frasco-users/blob/16591ca466de5b7c80d7a2384327d9cf2d919c41/frasco_users/__init__.py#L233-L241
241,577
frascoweb/frasco-users
frasco_users/__init__.py
UsersFeature.generate_user_token
def generate_user_token(self, user, salt=None): """Generates a unique token associated to the user """ return self.token_serializer.dumps(str(user.id), salt=salt)
python
def generate_user_token(self, user, salt=None): """Generates a unique token associated to the user """ return self.token_serializer.dumps(str(user.id), salt=salt)
[ "def", "generate_user_token", "(", "self", ",", "user", ",", "salt", "=", "None", ")", ":", "return", "self", ".", "token_serializer", ".", "dumps", "(", "str", "(", "user", ".", "id", ")", ",", "salt", "=", "salt", ")" ]
Generates a unique token associated to the user
[ "Generates", "a", "unique", "token", "associated", "to", "the", "user" ]
16591ca466de5b7c80d7a2384327d9cf2d919c41
https://github.com/frascoweb/frasco-users/blob/16591ca466de5b7c80d7a2384327d9cf2d919c41/frasco_users/__init__.py#L285-L288
241,578
frascoweb/frasco-users
frasco_users/__init__.py
UsersFeature.update_password
def update_password(self, user, password, skip_validation=False): """Updates the password of a user """ pwcol = self.options["password_column"] pwhash = self.bcrypt.generate_password_hash(password) if not skip_validation: self.validate_password(user, password, pwhash) if self.options['prevent_password_reuse']: user.previous_passwords = [getattr(user, pwcol)] + (user.previous_passwords or []) if self.options['max_password_reuse_saved']: user.previous_passwords = user.previous_passwords[:self.options['max_password_reuse_saved']] setattr(user, pwcol, pwhash) user.last_password_change_at = datetime.datetime.utcnow() user.must_reset_password_at_login = False
python
def update_password(self, user, password, skip_validation=False): """Updates the password of a user """ pwcol = self.options["password_column"] pwhash = self.bcrypt.generate_password_hash(password) if not skip_validation: self.validate_password(user, password, pwhash) if self.options['prevent_password_reuse']: user.previous_passwords = [getattr(user, pwcol)] + (user.previous_passwords or []) if self.options['max_password_reuse_saved']: user.previous_passwords = user.previous_passwords[:self.options['max_password_reuse_saved']] setattr(user, pwcol, pwhash) user.last_password_change_at = datetime.datetime.utcnow() user.must_reset_password_at_login = False
[ "def", "update_password", "(", "self", ",", "user", ",", "password", ",", "skip_validation", "=", "False", ")", ":", "pwcol", "=", "self", ".", "options", "[", "\"password_column\"", "]", "pwhash", "=", "self", ".", "bcrypt", ".", "generate_password_hash", "(", "password", ")", "if", "not", "skip_validation", ":", "self", ".", "validate_password", "(", "user", ",", "password", ",", "pwhash", ")", "if", "self", ".", "options", "[", "'prevent_password_reuse'", "]", ":", "user", ".", "previous_passwords", "=", "[", "getattr", "(", "user", ",", "pwcol", ")", "]", "+", "(", "user", ".", "previous_passwords", "or", "[", "]", ")", "if", "self", ".", "options", "[", "'max_password_reuse_saved'", "]", ":", "user", ".", "previous_passwords", "=", "user", ".", "previous_passwords", "[", ":", "self", ".", "options", "[", "'max_password_reuse_saved'", "]", "]", "setattr", "(", "user", ",", "pwcol", ",", "pwhash", ")", "user", ".", "last_password_change_at", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", "user", ".", "must_reset_password_at_login", "=", "False" ]
Updates the password of a user
[ "Updates", "the", "password", "of", "a", "user" ]
16591ca466de5b7c80d7a2384327d9cf2d919c41
https://github.com/frascoweb/frasco-users/blob/16591ca466de5b7c80d7a2384327d9cf2d919c41/frasco_users/__init__.py#L339-L352
241,579
frascoweb/frasco-users
frasco_users/__init__.py
UsersFeature.login_required
def login_required(self, fresh=False, redirect_to=None): """Ensures that a user is authenticated """ if not self.logged_in() or (fresh and not self.login_manager.login_fresh()): if redirect_to: resp = redirect(redirect_to) else: resp = self.login_manager.unauthorized() current_context.exit(resp, trigger_action_group="missing_user")
python
def login_required(self, fresh=False, redirect_to=None): """Ensures that a user is authenticated """ if not self.logged_in() or (fresh and not self.login_manager.login_fresh()): if redirect_to: resp = redirect(redirect_to) else: resp = self.login_manager.unauthorized() current_context.exit(resp, trigger_action_group="missing_user")
[ "def", "login_required", "(", "self", ",", "fresh", "=", "False", ",", "redirect_to", "=", "None", ")", ":", "if", "not", "self", ".", "logged_in", "(", ")", "or", "(", "fresh", "and", "not", "self", ".", "login_manager", ".", "login_fresh", "(", ")", ")", ":", "if", "redirect_to", ":", "resp", "=", "redirect", "(", "redirect_to", ")", "else", ":", "resp", "=", "self", ".", "login_manager", ".", "unauthorized", "(", ")", "current_context", ".", "exit", "(", "resp", ",", "trigger_action_group", "=", "\"missing_user\"", ")" ]
Ensures that a user is authenticated
[ "Ensures", "that", "a", "user", "is", "authenticated" ]
16591ca466de5b7c80d7a2384327d9cf2d919c41
https://github.com/frascoweb/frasco-users/blob/16591ca466de5b7c80d7a2384327d9cf2d919c41/frasco_users/__init__.py#L364-L372
241,580
frascoweb/frasco-users
frasco_users/__init__.py
UsersFeature._login
def _login(self, user, provider=None, remember=False, force=False, **attrs): """Updates user attributes and login the user in flask-login """ user.last_login_at = datetime.datetime.now() user.last_login_provider = provider or self.options["default_auth_provider_name"] user.last_login_from = request.remote_addr populate_obj(user, attrs) save_model(user) flask_login.login_user(user, remember=remember, force=force)
python
def _login(self, user, provider=None, remember=False, force=False, **attrs): """Updates user attributes and login the user in flask-login """ user.last_login_at = datetime.datetime.now() user.last_login_provider = provider or self.options["default_auth_provider_name"] user.last_login_from = request.remote_addr populate_obj(user, attrs) save_model(user) flask_login.login_user(user, remember=remember, force=force)
[ "def", "_login", "(", "self", ",", "user", ",", "provider", "=", "None", ",", "remember", "=", "False", ",", "force", "=", "False", ",", "*", "*", "attrs", ")", ":", "user", ".", "last_login_at", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "user", ".", "last_login_provider", "=", "provider", "or", "self", ".", "options", "[", "\"default_auth_provider_name\"", "]", "user", ".", "last_login_from", "=", "request", ".", "remote_addr", "populate_obj", "(", "user", ",", "attrs", ")", "save_model", "(", "user", ")", "flask_login", ".", "login_user", "(", "user", ",", "remember", "=", "remember", ",", "force", "=", "force", ")" ]
Updates user attributes and login the user in flask-login
[ "Updates", "user", "attributes", "and", "login", "the", "user", "in", "flask", "-", "login" ]
16591ca466de5b7c80d7a2384327d9cf2d919c41
https://github.com/frascoweb/frasco-users/blob/16591ca466de5b7c80d7a2384327d9cf2d919c41/frasco_users/__init__.py#L431-L439
241,581
frascoweb/frasco-users
frasco_users/__init__.py
UsersFeature.check_password_confirm
def check_password_confirm(self, form, trigger_action_group=None): """Checks that the password and the confirm password match in the provided form. Won't do anything if any of the password fields are not in the form. """ pwcol = self.options['password_column'] pwconfirmfield = pwcol + "_confirm" if pwcol in form and pwconfirmfield in form and form[pwconfirmfield].data != form[pwcol].data: if self.options["password_confirm_failed_message"]: flash(self.options["password_confirm_failed_message"], "error") current_context.exit(trigger_action_group=trigger_action_group)
python
def check_password_confirm(self, form, trigger_action_group=None): """Checks that the password and the confirm password match in the provided form. Won't do anything if any of the password fields are not in the form. """ pwcol = self.options['password_column'] pwconfirmfield = pwcol + "_confirm" if pwcol in form and pwconfirmfield in form and form[pwconfirmfield].data != form[pwcol].data: if self.options["password_confirm_failed_message"]: flash(self.options["password_confirm_failed_message"], "error") current_context.exit(trigger_action_group=trigger_action_group)
[ "def", "check_password_confirm", "(", "self", ",", "form", ",", "trigger_action_group", "=", "None", ")", ":", "pwcol", "=", "self", ".", "options", "[", "'password_column'", "]", "pwconfirmfield", "=", "pwcol", "+", "\"_confirm\"", "if", "pwcol", "in", "form", "and", "pwconfirmfield", "in", "form", "and", "form", "[", "pwconfirmfield", "]", ".", "data", "!=", "form", "[", "pwcol", "]", ".", "data", ":", "if", "self", ".", "options", "[", "\"password_confirm_failed_message\"", "]", ":", "flash", "(", "self", ".", "options", "[", "\"password_confirm_failed_message\"", "]", ",", "\"error\"", ")", "current_context", ".", "exit", "(", "trigger_action_group", "=", "trigger_action_group", ")" ]
Checks that the password and the confirm password match in the provided form. Won't do anything if any of the password fields are not in the form.
[ "Checks", "that", "the", "password", "and", "the", "confirm", "password", "match", "in", "the", "provided", "form", ".", "Won", "t", "do", "anything", "if", "any", "of", "the", "password", "fields", "are", "not", "in", "the", "form", "." ]
16591ca466de5b7c80d7a2384327d9cf2d919c41
https://github.com/frascoweb/frasco-users/blob/16591ca466de5b7c80d7a2384327d9cf2d919c41/frasco_users/__init__.py#L528-L538
241,582
frascoweb/frasco-users
frasco_users/__init__.py
UsersFeature.reset_password
def reset_password(self, token=None, login_user=None): """Resets the password of the user identified by the token """ pwcol = self.options['password_column'] if not token: if "token" in request.view_args: token = request.view_args["token"] elif "token" in request.values: token = request.values["token"] else: raise OptionMissingError(("Missing 'token' option or 'token' view arg " "or 'token' GET paramater in 'reset_password' action")) user = self.find_by_token(token, salt="password-reset", max_age=self.options["reset_password_ttl"]) if user is None: if self.options["reset_password_error_message"]: flash(self.options["reset_password_error_message"], "error") current_context.exit(trigger_action_group="reset_password_failed") self.update_password_from_form(user) self.reset_password_signal.send(self, user=user) if (login_user is None and self.options["login_user_on_reset_password"]) or login_user: flask_login.login_user(user) return user
python
def reset_password(self, token=None, login_user=None): """Resets the password of the user identified by the token """ pwcol = self.options['password_column'] if not token: if "token" in request.view_args: token = request.view_args["token"] elif "token" in request.values: token = request.values["token"] else: raise OptionMissingError(("Missing 'token' option or 'token' view arg " "or 'token' GET paramater in 'reset_password' action")) user = self.find_by_token(token, salt="password-reset", max_age=self.options["reset_password_ttl"]) if user is None: if self.options["reset_password_error_message"]: flash(self.options["reset_password_error_message"], "error") current_context.exit(trigger_action_group="reset_password_failed") self.update_password_from_form(user) self.reset_password_signal.send(self, user=user) if (login_user is None and self.options["login_user_on_reset_password"]) or login_user: flask_login.login_user(user) return user
[ "def", "reset_password", "(", "self", ",", "token", "=", "None", ",", "login_user", "=", "None", ")", ":", "pwcol", "=", "self", ".", "options", "[", "'password_column'", "]", "if", "not", "token", ":", "if", "\"token\"", "in", "request", ".", "view_args", ":", "token", "=", "request", ".", "view_args", "[", "\"token\"", "]", "elif", "\"token\"", "in", "request", ".", "values", ":", "token", "=", "request", ".", "values", "[", "\"token\"", "]", "else", ":", "raise", "OptionMissingError", "(", "(", "\"Missing 'token' option or 'token' view arg \"", "\"or 'token' GET paramater in 'reset_password' action\"", ")", ")", "user", "=", "self", ".", "find_by_token", "(", "token", ",", "salt", "=", "\"password-reset\"", ",", "max_age", "=", "self", ".", "options", "[", "\"reset_password_ttl\"", "]", ")", "if", "user", "is", "None", ":", "if", "self", ".", "options", "[", "\"reset_password_error_message\"", "]", ":", "flash", "(", "self", ".", "options", "[", "\"reset_password_error_message\"", "]", ",", "\"error\"", ")", "current_context", ".", "exit", "(", "trigger_action_group", "=", "\"reset_password_failed\"", ")", "self", ".", "update_password_from_form", "(", "user", ")", "self", ".", "reset_password_signal", ".", "send", "(", "self", ",", "user", "=", "user", ")", "if", "(", "login_user", "is", "None", "and", "self", ".", "options", "[", "\"login_user_on_reset_password\"", "]", ")", "or", "login_user", ":", "flask_login", ".", "login_user", "(", "user", ")", "return", "user" ]
Resets the password of the user identified by the token
[ "Resets", "the", "password", "of", "the", "user", "identified", "by", "the", "token" ]
16591ca466de5b7c80d7a2384327d9cf2d919c41
https://github.com/frascoweb/frasco-users/blob/16591ca466de5b7c80d7a2384327d9cf2d919c41/frasco_users/__init__.py#L695-L718
241,583
frascoweb/frasco-users
frasco_users/__init__.py
UsersFeature.update_password_from_form
def update_password_from_form(self, user=None, form=None): """Updates the user password using a form """ user = user or self.current if not form and "form" in current_context.data and request.method == "POST": form = current_context.data.form elif not form: raise OptionMissingError("Missing a form in 'update_user_password' action") self._update_password_from_form(user, form) save_model(user) self.update_user_password_signal.send(self, user=user)
python
def update_password_from_form(self, user=None, form=None): """Updates the user password using a form """ user = user or self.current if not form and "form" in current_context.data and request.method == "POST": form = current_context.data.form elif not form: raise OptionMissingError("Missing a form in 'update_user_password' action") self._update_password_from_form(user, form) save_model(user) self.update_user_password_signal.send(self, user=user)
[ "def", "update_password_from_form", "(", "self", ",", "user", "=", "None", ",", "form", "=", "None", ")", ":", "user", "=", "user", "or", "self", ".", "current", "if", "not", "form", "and", "\"form\"", "in", "current_context", ".", "data", "and", "request", ".", "method", "==", "\"POST\"", ":", "form", "=", "current_context", ".", "data", ".", "form", "elif", "not", "form", ":", "raise", "OptionMissingError", "(", "\"Missing a form in 'update_user_password' action\"", ")", "self", ".", "_update_password_from_form", "(", "user", ",", "form", ")", "save_model", "(", "user", ")", "self", ".", "update_user_password_signal", ".", "send", "(", "self", ",", "user", "=", "user", ")" ]
Updates the user password using a form
[ "Updates", "the", "user", "password", "using", "a", "form" ]
16591ca466de5b7c80d7a2384327d9cf2d919c41
https://github.com/frascoweb/frasco-users/blob/16591ca466de5b7c80d7a2384327d9cf2d919c41/frasco_users/__init__.py#L731-L742
241,584
frascoweb/frasco-users
frasco_users/__init__.py
UsersFeature.check_user_password
def check_user_password(self, user, password=None, form=None): """Checks if the password matches the one of the user. If no password is provided, the current form will be used """ pwcol = self.options['password_column'] if password is None: if not form and "form" in current_context.data and request.method == "POST": form = current_context.data.form if form: password = form[pwcol].data else: raise OptionMissingError("Missing 'password' option or a form") current_pwd = getattr(user, pwcol) if not current_pwd or not self.bcrypt.check_password_hash(current_pwd, password): current_context.exit(trigger_action_group="password_mismatch")
python
def check_user_password(self, user, password=None, form=None): """Checks if the password matches the one of the user. If no password is provided, the current form will be used """ pwcol = self.options['password_column'] if password is None: if not form and "form" in current_context.data and request.method == "POST": form = current_context.data.form if form: password = form[pwcol].data else: raise OptionMissingError("Missing 'password' option or a form") current_pwd = getattr(user, pwcol) if not current_pwd or not self.bcrypt.check_password_hash(current_pwd, password): current_context.exit(trigger_action_group="password_mismatch")
[ "def", "check_user_password", "(", "self", ",", "user", ",", "password", "=", "None", ",", "form", "=", "None", ")", ":", "pwcol", "=", "self", ".", "options", "[", "'password_column'", "]", "if", "password", "is", "None", ":", "if", "not", "form", "and", "\"form\"", "in", "current_context", ".", "data", "and", "request", ".", "method", "==", "\"POST\"", ":", "form", "=", "current_context", ".", "data", ".", "form", "if", "form", ":", "password", "=", "form", "[", "pwcol", "]", ".", "data", "else", ":", "raise", "OptionMissingError", "(", "\"Missing 'password' option or a form\"", ")", "current_pwd", "=", "getattr", "(", "user", ",", "pwcol", ")", "if", "not", "current_pwd", "or", "not", "self", ".", "bcrypt", ".", "check_password_hash", "(", "current_pwd", ",", "password", ")", ":", "current_context", ".", "exit", "(", "trigger_action_group", "=", "\"password_mismatch\"", ")" ]
Checks if the password matches the one of the user. If no password is provided, the current form will be used
[ "Checks", "if", "the", "password", "matches", "the", "one", "of", "the", "user", ".", "If", "no", "password", "is", "provided", "the", "current", "form", "will", "be", "used" ]
16591ca466de5b7c80d7a2384327d9cf2d919c41
https://github.com/frascoweb/frasco-users/blob/16591ca466de5b7c80d7a2384327d9cf2d919c41/frasco_users/__init__.py#L762-L776
241,585
frascoweb/frasco-users
frasco_users/__init__.py
UsersFeature.check_unique_attr
def check_unique_attr(self, attrs, user=None, form=None, flash_msg=None): """Checks that an attribute of the current user is unique amongst all users. If no value is provided, the current form will be used. """ user = user or self.current ucol = self.options["username_column"] email = self.options["email_column"] if not isinstance(attrs, (list, tuple, dict)): attrs = [attrs] for name in attrs: if isinstance(attrs, dict): value = attrs[name] else: form = form or current_context.data.get("form") if not form: raise OptionMissingError("Missing 'value' option or form in 'check_user_unique_attr' action") value = form[name].data if name == ucol and not self.options["username_case_sensitive"]: filters = (ucol + '_lcase', value.strip().lower()) elif name == emailcol: filters = (emailcol, value.strip().lower()) else: filters = (name, value.strip()) if self.query.filter({"$and": [filters, ("id__ne", user.id)]}).count() > 0: if flash_msg is None: flash_msg = "The %s is already in use" % name if flash_msg: flash(flash_msg, "error") current_context.exit(trigger_action_group="user_attr_not_unique")
python
def check_unique_attr(self, attrs, user=None, form=None, flash_msg=None): """Checks that an attribute of the current user is unique amongst all users. If no value is provided, the current form will be used. """ user = user or self.current ucol = self.options["username_column"] email = self.options["email_column"] if not isinstance(attrs, (list, tuple, dict)): attrs = [attrs] for name in attrs: if isinstance(attrs, dict): value = attrs[name] else: form = form or current_context.data.get("form") if not form: raise OptionMissingError("Missing 'value' option or form in 'check_user_unique_attr' action") value = form[name].data if name == ucol and not self.options["username_case_sensitive"]: filters = (ucol + '_lcase', value.strip().lower()) elif name == emailcol: filters = (emailcol, value.strip().lower()) else: filters = (name, value.strip()) if self.query.filter({"$and": [filters, ("id__ne", user.id)]}).count() > 0: if flash_msg is None: flash_msg = "The %s is already in use" % name if flash_msg: flash(flash_msg, "error") current_context.exit(trigger_action_group="user_attr_not_unique")
[ "def", "check_unique_attr", "(", "self", ",", "attrs", ",", "user", "=", "None", ",", "form", "=", "None", ",", "flash_msg", "=", "None", ")", ":", "user", "=", "user", "or", "self", ".", "current", "ucol", "=", "self", ".", "options", "[", "\"username_column\"", "]", "email", "=", "self", ".", "options", "[", "\"email_column\"", "]", "if", "not", "isinstance", "(", "attrs", ",", "(", "list", ",", "tuple", ",", "dict", ")", ")", ":", "attrs", "=", "[", "attrs", "]", "for", "name", "in", "attrs", ":", "if", "isinstance", "(", "attrs", ",", "dict", ")", ":", "value", "=", "attrs", "[", "name", "]", "else", ":", "form", "=", "form", "or", "current_context", ".", "data", ".", "get", "(", "\"form\"", ")", "if", "not", "form", ":", "raise", "OptionMissingError", "(", "\"Missing 'value' option or form in 'check_user_unique_attr' action\"", ")", "value", "=", "form", "[", "name", "]", ".", "data", "if", "name", "==", "ucol", "and", "not", "self", ".", "options", "[", "\"username_case_sensitive\"", "]", ":", "filters", "=", "(", "ucol", "+", "'_lcase'", ",", "value", ".", "strip", "(", ")", ".", "lower", "(", ")", ")", "elif", "name", "==", "emailcol", ":", "filters", "=", "(", "emailcol", ",", "value", ".", "strip", "(", ")", ".", "lower", "(", ")", ")", "else", ":", "filters", "=", "(", "name", ",", "value", ".", "strip", "(", ")", ")", "if", "self", ".", "query", ".", "filter", "(", "{", "\"$and\"", ":", "[", "filters", ",", "(", "\"id__ne\"", ",", "user", ".", "id", ")", "]", "}", ")", ".", "count", "(", ")", ">", "0", ":", "if", "flash_msg", "is", "None", ":", "flash_msg", "=", "\"The %s is already in use\"", "%", "name", "if", "flash_msg", ":", "flash", "(", "flash_msg", ",", "\"error\"", ")", "current_context", ".", "exit", "(", "trigger_action_group", "=", "\"user_attr_not_unique\"", ")" ]
Checks that an attribute of the current user is unique amongst all users. If no value is provided, the current form will be used.
[ "Checks", "that", "an", "attribute", "of", "the", "current", "user", "is", "unique", "amongst", "all", "users", ".", "If", "no", "value", "is", "provided", "the", "current", "form", "will", "be", "used", "." ]
16591ca466de5b7c80d7a2384327d9cf2d919c41
https://github.com/frascoweb/frasco-users/blob/16591ca466de5b7c80d7a2384327d9cf2d919c41/frasco_users/__init__.py#L810-L841
241,586
frascoweb/frasco-users
frasco_users/__init__.py
UsersFeature.oauth_signup
def oauth_signup(self, provider, attrs, defaults, redirect_url=None): """Start the signup process after having logged in via oauth """ session["oauth_user_defaults"] = defaults session["oauth_user_attrs"] = dict(provider=provider, **attrs) if not redirect_url: redirect_url = request.args.get("next") return redirect(url_for('users.oauth_signup', next=redirect_url))
python
def oauth_signup(self, provider, attrs, defaults, redirect_url=None): """Start the signup process after having logged in via oauth """ session["oauth_user_defaults"] = defaults session["oauth_user_attrs"] = dict(provider=provider, **attrs) if not redirect_url: redirect_url = request.args.get("next") return redirect(url_for('users.oauth_signup', next=redirect_url))
[ "def", "oauth_signup", "(", "self", ",", "provider", ",", "attrs", ",", "defaults", ",", "redirect_url", "=", "None", ")", ":", "session", "[", "\"oauth_user_defaults\"", "]", "=", "defaults", "session", "[", "\"oauth_user_attrs\"", "]", "=", "dict", "(", "provider", "=", "provider", ",", "*", "*", "attrs", ")", "if", "not", "redirect_url", ":", "redirect_url", "=", "request", ".", "args", ".", "get", "(", "\"next\"", ")", "return", "redirect", "(", "url_for", "(", "'users.oauth_signup'", ",", "next", "=", "redirect_url", ")", ")" ]
Start the signup process after having logged in via oauth
[ "Start", "the", "signup", "process", "after", "having", "logged", "in", "via", "oauth" ]
16591ca466de5b7c80d7a2384327d9cf2d919c41
https://github.com/frascoweb/frasco-users/blob/16591ca466de5b7c80d7a2384327d9cf2d919c41/frasco_users/__init__.py#L863-L870
241,587
AndresMWeber/Nomenclate
nomenclate/core/formatter.py
FormatString.get_valid_format_order
def get_valid_format_order(cls, format_target, format_order=None): """ Checks to see if the target format string follows the proper style """ format_order = format_order or cls.parse_format_order(format_target) cls.validate_no_token_duplicates(format_order) format_target = cls.remove_tokens(format_target, format_order) format_target = cls.remove_static_text(format_target) cls.validate_separator_characters(format_target) cls.validate_matched_parenthesis(format_target) return format_order
python
def get_valid_format_order(cls, format_target, format_order=None): """ Checks to see if the target format string follows the proper style """ format_order = format_order or cls.parse_format_order(format_target) cls.validate_no_token_duplicates(format_order) format_target = cls.remove_tokens(format_target, format_order) format_target = cls.remove_static_text(format_target) cls.validate_separator_characters(format_target) cls.validate_matched_parenthesis(format_target) return format_order
[ "def", "get_valid_format_order", "(", "cls", ",", "format_target", ",", "format_order", "=", "None", ")", ":", "format_order", "=", "format_order", "or", "cls", ".", "parse_format_order", "(", "format_target", ")", "cls", ".", "validate_no_token_duplicates", "(", "format_order", ")", "format_target", "=", "cls", ".", "remove_tokens", "(", "format_target", ",", "format_order", ")", "format_target", "=", "cls", ".", "remove_static_text", "(", "format_target", ")", "cls", ".", "validate_separator_characters", "(", "format_target", ")", "cls", ".", "validate_matched_parenthesis", "(", "format_target", ")", "return", "format_order" ]
Checks to see if the target format string follows the proper style
[ "Checks", "to", "see", "if", "the", "target", "format", "string", "follows", "the", "proper", "style" ]
e6d6fc28beac042bad588e56fbe77531d2de6b6f
https://github.com/AndresMWeber/Nomenclate/blob/e6d6fc28beac042bad588e56fbe77531d2de6b6f/nomenclate/core/formatter.py#L52-L61
241,588
scieloorg/porteira
porteira/porteira.py
Schema._handle_errors
def _handle_errors(self, errors_list): """ Handles errors list Output Format: [(DOMIAN, LINE, COLUMN, LEVEL, TYPE_NAME, MESSAGE),] Ex.: [(PARSER, 3, 51, FATAL, ERR_TAG_NAME_MISMATCH, Opening and ending tag mismatch: statpage line 3 and startpage), (SCHEMASV, 2, 0, ERROR, SCHEMAV_CVC_ELT_1, Element 'wizard': No matching global declaration available for the validation root)] """ errors = [] for error in errors_list: errors.append((error.domain_name, error.line, error.column, error.level_name, error.type_name, error.message)) return errors
python
def _handle_errors(self, errors_list): """ Handles errors list Output Format: [(DOMIAN, LINE, COLUMN, LEVEL, TYPE_NAME, MESSAGE),] Ex.: [(PARSER, 3, 51, FATAL, ERR_TAG_NAME_MISMATCH, Opening and ending tag mismatch: statpage line 3 and startpage), (SCHEMASV, 2, 0, ERROR, SCHEMAV_CVC_ELT_1, Element 'wizard': No matching global declaration available for the validation root)] """ errors = [] for error in errors_list: errors.append((error.domain_name, error.line, error.column, error.level_name, error.type_name, error.message)) return errors
[ "def", "_handle_errors", "(", "self", ",", "errors_list", ")", ":", "errors", "=", "[", "]", "for", "error", "in", "errors_list", ":", "errors", ".", "append", "(", "(", "error", ".", "domain_name", ",", "error", ".", "line", ",", "error", ".", "column", ",", "error", ".", "level_name", ",", "error", ".", "type_name", ",", "error", ".", "message", ")", ")", "return", "errors" ]
Handles errors list Output Format: [(DOMIAN, LINE, COLUMN, LEVEL, TYPE_NAME, MESSAGE),] Ex.: [(PARSER, 3, 51, FATAL, ERR_TAG_NAME_MISMATCH, Opening and ending tag mismatch: statpage line 3 and startpage), (SCHEMASV, 2, 0, ERROR, SCHEMAV_CVC_ELT_1, Element 'wizard': No matching global declaration available for the validation root)]
[ "Handles", "errors", "list" ]
e61f7d248b16848e63b2f85f37125aa77aba0366
https://github.com/scieloorg/porteira/blob/e61f7d248b16848e63b2f85f37125aa77aba0366/porteira/porteira.py#L23-L39
241,589
scieloorg/porteira
porteira/porteira.py
Schema.get_validation_errors
def get_validation_errors(self, xml_input): """ This method returns a list of validation errors. If there are no errors an empty list is returned """ errors = [] try: parsed_xml = etree.parse(self._handle_xml(xml_input)) self.xmlschema.assertValid(parsed_xml) except (etree.DocumentInvalid, etree.XMLSyntaxError), e: errors = self._handle_errors(e.error_log) except AttributeError: raise CannotValidate('Set XSD to validate the XML') return errors
python
def get_validation_errors(self, xml_input): """ This method returns a list of validation errors. If there are no errors an empty list is returned """ errors = [] try: parsed_xml = etree.parse(self._handle_xml(xml_input)) self.xmlschema.assertValid(parsed_xml) except (etree.DocumentInvalid, etree.XMLSyntaxError), e: errors = self._handle_errors(e.error_log) except AttributeError: raise CannotValidate('Set XSD to validate the XML') return errors
[ "def", "get_validation_errors", "(", "self", ",", "xml_input", ")", ":", "errors", "=", "[", "]", "try", ":", "parsed_xml", "=", "etree", ".", "parse", "(", "self", ".", "_handle_xml", "(", "xml_input", ")", ")", "self", ".", "xmlschema", ".", "assertValid", "(", "parsed_xml", ")", "except", "(", "etree", ".", "DocumentInvalid", ",", "etree", ".", "XMLSyntaxError", ")", ",", "e", ":", "errors", "=", "self", ".", "_handle_errors", "(", "e", ".", "error_log", ")", "except", "AttributeError", ":", "raise", "CannotValidate", "(", "'Set XSD to validate the XML'", ")", "return", "errors" ]
This method returns a list of validation errors. If there are no errors an empty list is returned
[ "This", "method", "returns", "a", "list", "of", "validation", "errors", ".", "If", "there", "are", "no", "errors", "an", "empty", "list", "is", "returned" ]
e61f7d248b16848e63b2f85f37125aa77aba0366
https://github.com/scieloorg/porteira/blob/e61f7d248b16848e63b2f85f37125aa77aba0366/porteira/porteira.py#L50-L63
241,590
scieloorg/porteira
porteira/porteira.py
Schema.validate
def validate(self, xml_input): """ This method validate the parsing and schema, return a boolean """ parsed_xml = etree.parse(self._handle_xml(xml_input)) try: return self.xmlschema.validate(parsed_xml) except AttributeError: raise CannotValidate('Set XSD to validate the XML')
python
def validate(self, xml_input): """ This method validate the parsing and schema, return a boolean """ parsed_xml = etree.parse(self._handle_xml(xml_input)) try: return self.xmlschema.validate(parsed_xml) except AttributeError: raise CannotValidate('Set XSD to validate the XML')
[ "def", "validate", "(", "self", ",", "xml_input", ")", ":", "parsed_xml", "=", "etree", ".", "parse", "(", "self", ".", "_handle_xml", "(", "xml_input", ")", ")", "try", ":", "return", "self", ".", "xmlschema", ".", "validate", "(", "parsed_xml", ")", "except", "AttributeError", ":", "raise", "CannotValidate", "(", "'Set XSD to validate the XML'", ")" ]
This method validate the parsing and schema, return a boolean
[ "This", "method", "validate", "the", "parsing", "and", "schema", "return", "a", "boolean" ]
e61f7d248b16848e63b2f85f37125aa77aba0366
https://github.com/scieloorg/porteira/blob/e61f7d248b16848e63b2f85f37125aa77aba0366/porteira/porteira.py#L65-L73
241,591
scieloorg/porteira
porteira/porteira.py
Schema.deserialize
def deserialize(self, xml_input, *args, **kwargs): """ Convert XML to dict object """ return xmltodict.parse(xml_input, *args, **kwargs)
python
def deserialize(self, xml_input, *args, **kwargs): """ Convert XML to dict object """ return xmltodict.parse(xml_input, *args, **kwargs)
[ "def", "deserialize", "(", "self", ",", "xml_input", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "xmltodict", ".", "parse", "(", "xml_input", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Convert XML to dict object
[ "Convert", "XML", "to", "dict", "object" ]
e61f7d248b16848e63b2f85f37125aa77aba0366
https://github.com/scieloorg/porteira/blob/e61f7d248b16848e63b2f85f37125aa77aba0366/porteira/porteira.py#L75-L79
241,592
chamrc/to
to/__init__.py
_import_all_modules
def _import_all_modules(): """dynamically imports all modules in the package""" import traceback import os global results globals_, locals_ = globals(), locals() def load_module(modulename, package_module): try: names = [] module = __import__(package_module, globals_, locals_, [modulename]) for name in module.__dict__: if not name.startswith('_'): globals_[name] = module.__dict__[name] names.append(name) except Exception: traceback.print_exc() raise return module, names def load_dir(abs_dirpath, rel_dirpath=''): results = [] # dynamically import all the package modules for filename in os.listdir(abs_dirpath): rel_filepath = os.path.join(rel_dirpath, filename) abs_filepath = os.path.join(abs_dirpath, filename) if filename[0] != '_' and os.path.isfile(abs_filepath) and filename.split('.')[-1] in ('py', 'pyw'): modulename = '.'.join(os.path.normpath(os.path.splitext(rel_filepath)[0]).split(os.sep)) package_module = '.'.join([__name__, modulename]) module, names = load_module(modulename, package_module) results += names elif os.path.isdir(abs_filepath): results += load_dir(abs_filepath, rel_filepath) return results return load_dir(os.path.dirname(__file__))
python
def _import_all_modules(): """dynamically imports all modules in the package""" import traceback import os global results globals_, locals_ = globals(), locals() def load_module(modulename, package_module): try: names = [] module = __import__(package_module, globals_, locals_, [modulename]) for name in module.__dict__: if not name.startswith('_'): globals_[name] = module.__dict__[name] names.append(name) except Exception: traceback.print_exc() raise return module, names def load_dir(abs_dirpath, rel_dirpath=''): results = [] # dynamically import all the package modules for filename in os.listdir(abs_dirpath): rel_filepath = os.path.join(rel_dirpath, filename) abs_filepath = os.path.join(abs_dirpath, filename) if filename[0] != '_' and os.path.isfile(abs_filepath) and filename.split('.')[-1] in ('py', 'pyw'): modulename = '.'.join(os.path.normpath(os.path.splitext(rel_filepath)[0]).split(os.sep)) package_module = '.'.join([__name__, modulename]) module, names = load_module(modulename, package_module) results += names elif os.path.isdir(abs_filepath): results += load_dir(abs_filepath, rel_filepath) return results return load_dir(os.path.dirname(__file__))
[ "def", "_import_all_modules", "(", ")", ":", "import", "traceback", "import", "os", "global", "results", "globals_", ",", "locals_", "=", "globals", "(", ")", ",", "locals", "(", ")", "def", "load_module", "(", "modulename", ",", "package_module", ")", ":", "try", ":", "names", "=", "[", "]", "module", "=", "__import__", "(", "package_module", ",", "globals_", ",", "locals_", ",", "[", "modulename", "]", ")", "for", "name", "in", "module", ".", "__dict__", ":", "if", "not", "name", ".", "startswith", "(", "'_'", ")", ":", "globals_", "[", "name", "]", "=", "module", ".", "__dict__", "[", "name", "]", "names", ".", "append", "(", "name", ")", "except", "Exception", ":", "traceback", ".", "print_exc", "(", ")", "raise", "return", "module", ",", "names", "def", "load_dir", "(", "abs_dirpath", ",", "rel_dirpath", "=", "''", ")", ":", "results", "=", "[", "]", "# dynamically import all the package modules", "for", "filename", "in", "os", ".", "listdir", "(", "abs_dirpath", ")", ":", "rel_filepath", "=", "os", ".", "path", ".", "join", "(", "rel_dirpath", ",", "filename", ")", "abs_filepath", "=", "os", ".", "path", ".", "join", "(", "abs_dirpath", ",", "filename", ")", "if", "filename", "[", "0", "]", "!=", "'_'", "and", "os", ".", "path", ".", "isfile", "(", "abs_filepath", ")", "and", "filename", ".", "split", "(", "'.'", ")", "[", "-", "1", "]", "in", "(", "'py'", ",", "'pyw'", ")", ":", "modulename", "=", "'.'", ".", "join", "(", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "splitext", "(", "rel_filepath", ")", "[", "0", "]", ")", ".", "split", "(", "os", ".", "sep", ")", ")", "package_module", "=", "'.'", ".", "join", "(", "[", "__name__", ",", "modulename", "]", ")", "module", ",", "names", "=", "load_module", "(", "modulename", ",", "package_module", ")", "results", "+=", "names", "elif", "os", ".", "path", ".", "isdir", "(", "abs_filepath", ")", ":", "results", "+=", "load_dir", "(", "abs_filepath", ",", "rel_filepath", ")", "return", "results", "return", "load_dir", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ")" ]
dynamically imports all modules in the package
[ "dynamically", "imports", "all", "modules", "in", "the", "package" ]
ea1122bef08615b6c19904dadf2608e10c20c960
https://github.com/chamrc/to/blob/ea1122bef08615b6c19904dadf2608e10c20c960/to/__init__.py#L1-L42
241,593
yograterol/zoort
zoort.py
compress_folder_dump
def compress_folder_dump(path, target): ''' Compress folder dump to tar.gz file ''' import tarfile if not path or not os.path.isdir(path): raise SystemExit(_error_codes.get(105)) name_out_file = (target + 'dump-' + datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S')) tar = tarfile.open(name_out_file + '.tar.gz', 'w:gz') tar.add(path, arcname='dump') tar.close() return (name_out_file, name_out_file + '.tar.gz')
python
def compress_folder_dump(path, target): ''' Compress folder dump to tar.gz file ''' import tarfile if not path or not os.path.isdir(path): raise SystemExit(_error_codes.get(105)) name_out_file = (target + 'dump-' + datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S')) tar = tarfile.open(name_out_file + '.tar.gz', 'w:gz') tar.add(path, arcname='dump') tar.close() return (name_out_file, name_out_file + '.tar.gz')
[ "def", "compress_folder_dump", "(", "path", ",", "target", ")", ":", "import", "tarfile", "if", "not", "path", "or", "not", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "raise", "SystemExit", "(", "_error_codes", ".", "get", "(", "105", ")", ")", "name_out_file", "=", "(", "target", "+", "'dump-'", "+", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "'%Y-%m-%d-%H-%M-%S'", ")", ")", "tar", "=", "tarfile", ".", "open", "(", "name_out_file", "+", "'.tar.gz'", ",", "'w:gz'", ")", "tar", ".", "add", "(", "path", ",", "arcname", "=", "'dump'", ")", "tar", ".", "close", "(", ")", "return", "(", "name_out_file", ",", "name_out_file", "+", "'.tar.gz'", ")" ]
Compress folder dump to tar.gz file
[ "Compress", "folder", "dump", "to", "tar", ".", "gz", "file" ]
ed6669ab945007c20a83f6d468856c4eb585c752
https://github.com/yograterol/zoort/blob/ed6669ab945007c20a83f6d468856c4eb585c752/zoort.py#L665-L677
241,594
yograterol/zoort
zoort.py
encrypt_file
def encrypt_file(path, output, password=None): ''' Encrypt file with AES method and password. ''' if not password: password = PASSWORD_FILE query = 'openssl aes-128-cbc -salt -in {0} -out {1} -k {2}' with hide('output'): local(query.format(path, output, password)) os.remove(path)
python
def encrypt_file(path, output, password=None): ''' Encrypt file with AES method and password. ''' if not password: password = PASSWORD_FILE query = 'openssl aes-128-cbc -salt -in {0} -out {1} -k {2}' with hide('output'): local(query.format(path, output, password)) os.remove(path)
[ "def", "encrypt_file", "(", "path", ",", "output", ",", "password", "=", "None", ")", ":", "if", "not", "password", ":", "password", "=", "PASSWORD_FILE", "query", "=", "'openssl aes-128-cbc -salt -in {0} -out {1} -k {2}'", "with", "hide", "(", "'output'", ")", ":", "local", "(", "query", ".", "format", "(", "path", ",", "output", ",", "password", ")", ")", "os", ".", "remove", "(", "path", ")" ]
Encrypt file with AES method and password.
[ "Encrypt", "file", "with", "AES", "method", "and", "password", "." ]
ed6669ab945007c20a83f6d468856c4eb585c752
https://github.com/yograterol/zoort/blob/ed6669ab945007c20a83f6d468856c4eb585c752/zoort.py#L680-L689
241,595
yograterol/zoort
zoort.py
decrypt_file
def decrypt_file(path, password=None): ''' Decrypt file with AES method and password. ''' global PASSWORD_FILE if not password: password = PASSWORD_FILE if path and not os.path.isfile(path): raise SystemExit(_error_codes.get(106)) query = 'openssl aes-128-cbc -d -salt -in {0} -out {1} -k {2}' with hide('output'): local(query.format(path, path + '.tar.gz', PASSWORD_FILE))
python
def decrypt_file(path, password=None): ''' Decrypt file with AES method and password. ''' global PASSWORD_FILE if not password: password = PASSWORD_FILE if path and not os.path.isfile(path): raise SystemExit(_error_codes.get(106)) query = 'openssl aes-128-cbc -d -salt -in {0} -out {1} -k {2}' with hide('output'): local(query.format(path, path + '.tar.gz', PASSWORD_FILE))
[ "def", "decrypt_file", "(", "path", ",", "password", "=", "None", ")", ":", "global", "PASSWORD_FILE", "if", "not", "password", ":", "password", "=", "PASSWORD_FILE", "if", "path", "and", "not", "os", ".", "path", ".", "isfile", "(", "path", ")", ":", "raise", "SystemExit", "(", "_error_codes", ".", "get", "(", "106", ")", ")", "query", "=", "'openssl aes-128-cbc -d -salt -in {0} -out {1} -k {2}'", "with", "hide", "(", "'output'", ")", ":", "local", "(", "query", ".", "format", "(", "path", ",", "path", "+", "'.tar.gz'", ",", "PASSWORD_FILE", ")", ")" ]
Decrypt file with AES method and password.
[ "Decrypt", "file", "with", "AES", "method", "and", "password", "." ]
ed6669ab945007c20a83f6d468856c4eb585c752
https://github.com/yograterol/zoort/blob/ed6669ab945007c20a83f6d468856c4eb585c752/zoort.py#L692-L703
241,596
yograterol/zoort
zoort.py
optional_actions
def optional_actions(encrypt, path, compress_file, **kwargs): ''' Optional actions about of AWS S3 and encrypt file. ''' yes = ('y', 'Y') file_to_upload = normalize_path(path) + compress_file[1] if encrypt in yes: encrypt_file(compress_file[1], compress_file[0]) file_to_upload = compress_file[0] if kwargs.get('s3') in yes: factory_uploader('S3', name_backup=file_to_upload, bucket_name=AWS_BUCKET_NAME, action='upload') if kwargs.get('glacier') in yes: factory_uploader('Glacier', name_backup=file_to_upload, vault_name=AWS_VAULT_NAME, path=os.path.join(os.path.expanduser('~'), '.zoort.db'), action='upload') if kwargs.get('dropbox') in yes: factory_uploader('Dropbox', name_backup=file_to_upload, action='upload') if kwargs.get('swift') in yes: factory_uploader('Swift', name_backup=file_to_upload, action='upload')
python
def optional_actions(encrypt, path, compress_file, **kwargs): ''' Optional actions about of AWS S3 and encrypt file. ''' yes = ('y', 'Y') file_to_upload = normalize_path(path) + compress_file[1] if encrypt in yes: encrypt_file(compress_file[1], compress_file[0]) file_to_upload = compress_file[0] if kwargs.get('s3') in yes: factory_uploader('S3', name_backup=file_to_upload, bucket_name=AWS_BUCKET_NAME, action='upload') if kwargs.get('glacier') in yes: factory_uploader('Glacier', name_backup=file_to_upload, vault_name=AWS_VAULT_NAME, path=os.path.join(os.path.expanduser('~'), '.zoort.db'), action='upload') if kwargs.get('dropbox') in yes: factory_uploader('Dropbox', name_backup=file_to_upload, action='upload') if kwargs.get('swift') in yes: factory_uploader('Swift', name_backup=file_to_upload, action='upload')
[ "def", "optional_actions", "(", "encrypt", ",", "path", ",", "compress_file", ",", "*", "*", "kwargs", ")", ":", "yes", "=", "(", "'y'", ",", "'Y'", ")", "file_to_upload", "=", "normalize_path", "(", "path", ")", "+", "compress_file", "[", "1", "]", "if", "encrypt", "in", "yes", ":", "encrypt_file", "(", "compress_file", "[", "1", "]", ",", "compress_file", "[", "0", "]", ")", "file_to_upload", "=", "compress_file", "[", "0", "]", "if", "kwargs", ".", "get", "(", "'s3'", ")", "in", "yes", ":", "factory_uploader", "(", "'S3'", ",", "name_backup", "=", "file_to_upload", ",", "bucket_name", "=", "AWS_BUCKET_NAME", ",", "action", "=", "'upload'", ")", "if", "kwargs", ".", "get", "(", "'glacier'", ")", "in", "yes", ":", "factory_uploader", "(", "'Glacier'", ",", "name_backup", "=", "file_to_upload", ",", "vault_name", "=", "AWS_VAULT_NAME", ",", "path", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", ",", "'.zoort.db'", ")", ",", "action", "=", "'upload'", ")", "if", "kwargs", ".", "get", "(", "'dropbox'", ")", "in", "yes", ":", "factory_uploader", "(", "'Dropbox'", ",", "name_backup", "=", "file_to_upload", ",", "action", "=", "'upload'", ")", "if", "kwargs", ".", "get", "(", "'swift'", ")", "in", "yes", ":", "factory_uploader", "(", "'Swift'", ",", "name_backup", "=", "file_to_upload", ",", "action", "=", "'upload'", ")" ]
Optional actions about of AWS S3 and encrypt file.
[ "Optional", "actions", "about", "of", "AWS", "S3", "and", "encrypt", "file", "." ]
ed6669ab945007c20a83f6d468856c4eb585c752
https://github.com/yograterol/zoort/blob/ed6669ab945007c20a83f6d468856c4eb585c752/zoort.py#L706-L731
241,597
yograterol/zoort
zoort.py
main
def main(): '''Main entry point for the mongo_backups CLI.''' args = docopt(__doc__, version=__version__) if args.get('backup'): backup_database(args) if args.get('backup_all'): backup_all(args) if args.get('decrypt'): decrypt_file(args.get('<path>')) if args.get('configure'): configure(service='all') if args.get('configure-aws'): configure(service='aws') if args.get('configure-dropbox'): configure(service='dropbox') if args.get('configure-swift'): configure(service='swift') if args.get('download_all'): download_all()
python
def main(): '''Main entry point for the mongo_backups CLI.''' args = docopt(__doc__, version=__version__) if args.get('backup'): backup_database(args) if args.get('backup_all'): backup_all(args) if args.get('decrypt'): decrypt_file(args.get('<path>')) if args.get('configure'): configure(service='all') if args.get('configure-aws'): configure(service='aws') if args.get('configure-dropbox'): configure(service='dropbox') if args.get('configure-swift'): configure(service='swift') if args.get('download_all'): download_all()
[ "def", "main", "(", ")", ":", "args", "=", "docopt", "(", "__doc__", ",", "version", "=", "__version__", ")", "if", "args", ".", "get", "(", "'backup'", ")", ":", "backup_database", "(", "args", ")", "if", "args", ".", "get", "(", "'backup_all'", ")", ":", "backup_all", "(", "args", ")", "if", "args", ".", "get", "(", "'decrypt'", ")", ":", "decrypt_file", "(", "args", ".", "get", "(", "'<path>'", ")", ")", "if", "args", ".", "get", "(", "'configure'", ")", ":", "configure", "(", "service", "=", "'all'", ")", "if", "args", ".", "get", "(", "'configure-aws'", ")", ":", "configure", "(", "service", "=", "'aws'", ")", "if", "args", ".", "get", "(", "'configure-dropbox'", ")", ":", "configure", "(", "service", "=", "'dropbox'", ")", "if", "args", ".", "get", "(", "'configure-swift'", ")", ":", "configure", "(", "service", "=", "'swift'", ")", "if", "args", ".", "get", "(", "'download_all'", ")", ":", "download_all", "(", ")" ]
Main entry point for the mongo_backups CLI.
[ "Main", "entry", "point", "for", "the", "mongo_backups", "CLI", "." ]
ed6669ab945007c20a83f6d468856c4eb585c752
https://github.com/yograterol/zoort/blob/ed6669ab945007c20a83f6d468856c4eb585c752/zoort.py#L735-L753
241,598
yograterol/zoort
zoort.py
backup_database
def backup_database(args): ''' Backup one database from CLI ''' username = args.get('<user>') password = args.get('<password>') database = args['<database>'] host = args.get('<host>') or '127.0.0.1' path = args.get('--path') or os.getcwd() s3 = args.get('--upload_s3') glacier = args.get('--upload_glacier') dropbox = args.get('--upload_dropbox') swift = args.get('--upload_swift') encrypt = args.get('--encrypt') or 'Y' if not database: raise SystemExit(_error_codes.get(101)) if path and not os.path.isdir(path): raise SystemExit(_error_codes.get(105)) query = 'mongodump -d {database} --host {host} ' if username: query += '-u {username} ' if password: query += '-p {password} ' if path: query += '-o {path}/dump' local(query.format(username=username, password=password, database=database, host=host, path=path)) compress_file = compress_folder_dump( normalize_path(path) + 'dump', normalize_path(path)) shutil.rmtree(normalize_path(path) + 'dump') optional_actions(encrypt, path, compress_file, s3=s3, glacier=glacier, dropbox=dropbox, swift=swift)
python
def backup_database(args): ''' Backup one database from CLI ''' username = args.get('<user>') password = args.get('<password>') database = args['<database>'] host = args.get('<host>') or '127.0.0.1' path = args.get('--path') or os.getcwd() s3 = args.get('--upload_s3') glacier = args.get('--upload_glacier') dropbox = args.get('--upload_dropbox') swift = args.get('--upload_swift') encrypt = args.get('--encrypt') or 'Y' if not database: raise SystemExit(_error_codes.get(101)) if path and not os.path.isdir(path): raise SystemExit(_error_codes.get(105)) query = 'mongodump -d {database} --host {host} ' if username: query += '-u {username} ' if password: query += '-p {password} ' if path: query += '-o {path}/dump' local(query.format(username=username, password=password, database=database, host=host, path=path)) compress_file = compress_folder_dump( normalize_path(path) + 'dump', normalize_path(path)) shutil.rmtree(normalize_path(path) + 'dump') optional_actions(encrypt, path, compress_file, s3=s3, glacier=glacier, dropbox=dropbox, swift=swift)
[ "def", "backup_database", "(", "args", ")", ":", "username", "=", "args", ".", "get", "(", "'<user>'", ")", "password", "=", "args", ".", "get", "(", "'<password>'", ")", "database", "=", "args", "[", "'<database>'", "]", "host", "=", "args", ".", "get", "(", "'<host>'", ")", "or", "'127.0.0.1'", "path", "=", "args", ".", "get", "(", "'--path'", ")", "or", "os", ".", "getcwd", "(", ")", "s3", "=", "args", ".", "get", "(", "'--upload_s3'", ")", "glacier", "=", "args", ".", "get", "(", "'--upload_glacier'", ")", "dropbox", "=", "args", ".", "get", "(", "'--upload_dropbox'", ")", "swift", "=", "args", ".", "get", "(", "'--upload_swift'", ")", "encrypt", "=", "args", ".", "get", "(", "'--encrypt'", ")", "or", "'Y'", "if", "not", "database", ":", "raise", "SystemExit", "(", "_error_codes", ".", "get", "(", "101", ")", ")", "if", "path", "and", "not", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "raise", "SystemExit", "(", "_error_codes", ".", "get", "(", "105", ")", ")", "query", "=", "'mongodump -d {database} --host {host} '", "if", "username", ":", "query", "+=", "'-u {username} '", "if", "password", ":", "query", "+=", "'-p {password} '", "if", "path", ":", "query", "+=", "'-o {path}/dump'", "local", "(", "query", ".", "format", "(", "username", "=", "username", ",", "password", "=", "password", ",", "database", "=", "database", ",", "host", "=", "host", ",", "path", "=", "path", ")", ")", "compress_file", "=", "compress_folder_dump", "(", "normalize_path", "(", "path", ")", "+", "'dump'", ",", "normalize_path", "(", "path", ")", ")", "shutil", ".", "rmtree", "(", "normalize_path", "(", "path", ")", "+", "'dump'", ")", "optional_actions", "(", "encrypt", ",", "path", ",", "compress_file", ",", "s3", "=", "s3", ",", "glacier", "=", "glacier", ",", "dropbox", "=", "dropbox", ",", "swift", "=", "swift", ")" ]
Backup one database from CLI
[ "Backup", "one", "database", "from", "CLI" ]
ed6669ab945007c20a83f6d468856c4eb585c752
https://github.com/yograterol/zoort/blob/ed6669ab945007c20a83f6d468856c4eb585c752/zoort.py#L760-L801
241,599
yograterol/zoort
zoort.py
backup_all
def backup_all(args): ''' Backup all databases with access user. ''' username = None password = None auth = args.get('--auth') path = args.get('--path') s3 = args.get('--upload_s3') glacier = args.get('--upload_glacier') dropbox = args.get('--upload_dropbox') swift = args.get('--upload_swift') encrypt = args.get('--encrypt') or 'Y' if (ADMIN_USER and ADMIN_PASSWORD): username = ADMIN_USER password = ADMIN_PASSWORD if not path: path = os.getcwd() if path and not os.path.isdir(path): raise SystemExit(_error_codes.get(105)) if auth: query = 'mongodump -u {username} -p {password} ' else: query = 'mongodump ' if path: query += '-o {path}/dump' local(query.format(username=username, password=password, path=path)) compress_file = compress_folder_dump( normalize_path(path) + 'dump', normalize_path(path)) shutil.rmtree(normalize_path(path) + 'dump') optional_actions(encrypt, path, compress_file, s3=s3, glacier=glacier, dropbox=dropbox, swift=swift)
python
def backup_all(args): ''' Backup all databases with access user. ''' username = None password = None auth = args.get('--auth') path = args.get('--path') s3 = args.get('--upload_s3') glacier = args.get('--upload_glacier') dropbox = args.get('--upload_dropbox') swift = args.get('--upload_swift') encrypt = args.get('--encrypt') or 'Y' if (ADMIN_USER and ADMIN_PASSWORD): username = ADMIN_USER password = ADMIN_PASSWORD if not path: path = os.getcwd() if path and not os.path.isdir(path): raise SystemExit(_error_codes.get(105)) if auth: query = 'mongodump -u {username} -p {password} ' else: query = 'mongodump ' if path: query += '-o {path}/dump' local(query.format(username=username, password=password, path=path)) compress_file = compress_folder_dump( normalize_path(path) + 'dump', normalize_path(path)) shutil.rmtree(normalize_path(path) + 'dump') optional_actions(encrypt, path, compress_file, s3=s3, glacier=glacier, dropbox=dropbox, swift=swift)
[ "def", "backup_all", "(", "args", ")", ":", "username", "=", "None", "password", "=", "None", "auth", "=", "args", ".", "get", "(", "'--auth'", ")", "path", "=", "args", ".", "get", "(", "'--path'", ")", "s3", "=", "args", ".", "get", "(", "'--upload_s3'", ")", "glacier", "=", "args", ".", "get", "(", "'--upload_glacier'", ")", "dropbox", "=", "args", ".", "get", "(", "'--upload_dropbox'", ")", "swift", "=", "args", ".", "get", "(", "'--upload_swift'", ")", "encrypt", "=", "args", ".", "get", "(", "'--encrypt'", ")", "or", "'Y'", "if", "(", "ADMIN_USER", "and", "ADMIN_PASSWORD", ")", ":", "username", "=", "ADMIN_USER", "password", "=", "ADMIN_PASSWORD", "if", "not", "path", ":", "path", "=", "os", ".", "getcwd", "(", ")", "if", "path", "and", "not", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "raise", "SystemExit", "(", "_error_codes", ".", "get", "(", "105", ")", ")", "if", "auth", ":", "query", "=", "'mongodump -u {username} -p {password} '", "else", ":", "query", "=", "'mongodump '", "if", "path", ":", "query", "+=", "'-o {path}/dump'", "local", "(", "query", ".", "format", "(", "username", "=", "username", ",", "password", "=", "password", ",", "path", "=", "path", ")", ")", "compress_file", "=", "compress_folder_dump", "(", "normalize_path", "(", "path", ")", "+", "'dump'", ",", "normalize_path", "(", "path", ")", ")", "shutil", ".", "rmtree", "(", "normalize_path", "(", "path", ")", "+", "'dump'", ")", "optional_actions", "(", "encrypt", ",", "path", ",", "compress_file", ",", "s3", "=", "s3", ",", "glacier", "=", "glacier", ",", "dropbox", "=", "dropbox", ",", "swift", "=", "swift", ")" ]
Backup all databases with access user.
[ "Backup", "all", "databases", "with", "access", "user", "." ]
ed6669ab945007c20a83f6d468856c4eb585c752
https://github.com/yograterol/zoort/blob/ed6669ab945007c20a83f6d468856c4eb585c752/zoort.py#L804-L844