id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
21,300
nvbn/thefuck
thefuck/rules/brew_unknown_command.py
_get_brew_commands
def _get_brew_commands(brew_path_prefix): """To get brew default commands on local environment""" brew_cmd_path = brew_path_prefix + BREW_CMD_PATH return [name[:-3] for name in os.listdir(brew_cmd_path) if name.endswith(('.rb', '.sh'))]
python
def _get_brew_commands(brew_path_prefix): """To get brew default commands on local environment""" brew_cmd_path = brew_path_prefix + BREW_CMD_PATH return [name[:-3] for name in os.listdir(brew_cmd_path) if name.endswith(('.rb', '.sh'))]
[ "def", "_get_brew_commands", "(", "brew_path_prefix", ")", ":", "brew_cmd_path", "=", "brew_path_prefix", "+", "BREW_CMD_PATH", "return", "[", "name", "[", ":", "-", "3", "]", "for", "name", "in", "os", ".", "listdir", "(", "brew_cmd_path", ")", "if", "name", ".", "endswith", "(", "(", "'.rb'", ",", "'.sh'", ")", ")", "]" ]
To get brew default commands on local environment
[ "To", "get", "brew", "default", "commands", "on", "local", "environment" ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/rules/brew_unknown_command.py#L13-L18
21,301
nvbn/thefuck
thefuck/specific/git.py
git_support
def git_support(fn, command): """Resolves git aliases and supports testing for both git and hub.""" # supports GitHub's `hub` command # which is recommended to be used with `alias git=hub` # but at this point, shell aliases have already been resolved if not is_app(command, 'git', 'hub'): return False # perform git aliases expansion if 'trace: alias expansion:' in command.output: search = re.search("trace: alias expansion: ([^ ]*) => ([^\n]*)", command.output) alias = search.group(1) # by default git quotes everything, for example: # 'commit' '--amend' # which is surprising and does not allow to easily test for # eg. 'git commit' expansion = ' '.join(shell.quote(part) for part in shell.split_command(search.group(2))) new_script = command.script.replace(alias, expansion) command = command.update(script=new_script) return fn(command)
python
def git_support(fn, command): """Resolves git aliases and supports testing for both git and hub.""" # supports GitHub's `hub` command # which is recommended to be used with `alias git=hub` # but at this point, shell aliases have already been resolved if not is_app(command, 'git', 'hub'): return False # perform git aliases expansion if 'trace: alias expansion:' in command.output: search = re.search("trace: alias expansion: ([^ ]*) => ([^\n]*)", command.output) alias = search.group(1) # by default git quotes everything, for example: # 'commit' '--amend' # which is surprising and does not allow to easily test for # eg. 'git commit' expansion = ' '.join(shell.quote(part) for part in shell.split_command(search.group(2))) new_script = command.script.replace(alias, expansion) command = command.update(script=new_script) return fn(command)
[ "def", "git_support", "(", "fn", ",", "command", ")", ":", "# supports GitHub's `hub` command", "# which is recommended to be used with `alias git=hub`", "# but at this point, shell aliases have already been resolved", "if", "not", "is_app", "(", "command", ",", "'git'", ",", "'hub'", ")", ":", "return", "False", "# perform git aliases expansion", "if", "'trace: alias expansion:'", "in", "command", ".", "output", ":", "search", "=", "re", ".", "search", "(", "\"trace: alias expansion: ([^ ]*) => ([^\\n]*)\"", ",", "command", ".", "output", ")", "alias", "=", "search", ".", "group", "(", "1", ")", "# by default git quotes everything, for example:", "# 'commit' '--amend'", "# which is surprising and does not allow to easily test for", "# eg. 'git commit'", "expansion", "=", "' '", ".", "join", "(", "shell", ".", "quote", "(", "part", ")", "for", "part", "in", "shell", ".", "split_command", "(", "search", ".", "group", "(", "2", ")", ")", ")", "new_script", "=", "command", ".", "script", ".", "replace", "(", "alias", ",", "expansion", ")", "command", "=", "command", ".", "update", "(", "script", "=", "new_script", ")", "return", "fn", "(", "command", ")" ]
Resolves git aliases and supports testing for both git and hub.
[ "Resolves", "git", "aliases", "and", "supports", "testing", "for", "both", "git", "and", "hub", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/specific/git.py#L8-L32
21,302
nvbn/thefuck
thefuck/ui.py
read_actions
def read_actions(): """Yields actions for pressed keys.""" while True: key = get_key() # Handle arrows, j/k (qwerty), and n/e (colemak) if key in (const.KEY_UP, const.KEY_CTRL_N, 'k', 'e'): yield const.ACTION_PREVIOUS elif key in (const.KEY_DOWN, const.KEY_CTRL_P, 'j', 'n'): yield const.ACTION_NEXT elif key in (const.KEY_CTRL_C, 'q'): yield const.ACTION_ABORT elif key in ('\n', '\r'): yield const.ACTION_SELECT
python
def read_actions(): """Yields actions for pressed keys.""" while True: key = get_key() # Handle arrows, j/k (qwerty), and n/e (colemak) if key in (const.KEY_UP, const.KEY_CTRL_N, 'k', 'e'): yield const.ACTION_PREVIOUS elif key in (const.KEY_DOWN, const.KEY_CTRL_P, 'j', 'n'): yield const.ACTION_NEXT elif key in (const.KEY_CTRL_C, 'q'): yield const.ACTION_ABORT elif key in ('\n', '\r'): yield const.ACTION_SELECT
[ "def", "read_actions", "(", ")", ":", "while", "True", ":", "key", "=", "get_key", "(", ")", "# Handle arrows, j/k (qwerty), and n/e (colemak)", "if", "key", "in", "(", "const", ".", "KEY_UP", ",", "const", ".", "KEY_CTRL_N", ",", "'k'", ",", "'e'", ")", ":", "yield", "const", ".", "ACTION_PREVIOUS", "elif", "key", "in", "(", "const", ".", "KEY_DOWN", ",", "const", ".", "KEY_CTRL_P", ",", "'j'", ",", "'n'", ")", ":", "yield", "const", ".", "ACTION_NEXT", "elif", "key", "in", "(", "const", ".", "KEY_CTRL_C", ",", "'q'", ")", ":", "yield", "const", ".", "ACTION_ABORT", "elif", "key", "in", "(", "'\\n'", ",", "'\\r'", ")", ":", "yield", "const", ".", "ACTION_SELECT" ]
Yields actions for pressed keys.
[ "Yields", "actions", "for", "pressed", "keys", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/ui.py#L11-L24
21,303
nvbn/thefuck
thefuck/entrypoints/shell_logger.py
shell_logger
def shell_logger(output): """Logs shell output to the `output`. Works like unix script command with `-f` flag. """ if not os.environ.get('SHELL'): logs.warn("Shell logger doesn't support your platform.") sys.exit(1) fd = os.open(output, os.O_CREAT | os.O_TRUNC | os.O_RDWR) os.write(fd, b'\x00' * const.LOG_SIZE_IN_BYTES) buffer = mmap.mmap(fd, const.LOG_SIZE_IN_BYTES, mmap.MAP_SHARED, mmap.PROT_WRITE) return_code = _spawn(os.environ['SHELL'], partial(_read, buffer)) sys.exit(return_code)
python
def shell_logger(output): """Logs shell output to the `output`. Works like unix script command with `-f` flag. """ if not os.environ.get('SHELL'): logs.warn("Shell logger doesn't support your platform.") sys.exit(1) fd = os.open(output, os.O_CREAT | os.O_TRUNC | os.O_RDWR) os.write(fd, b'\x00' * const.LOG_SIZE_IN_BYTES) buffer = mmap.mmap(fd, const.LOG_SIZE_IN_BYTES, mmap.MAP_SHARED, mmap.PROT_WRITE) return_code = _spawn(os.environ['SHELL'], partial(_read, buffer)) sys.exit(return_code)
[ "def", "shell_logger", "(", "output", ")", ":", "if", "not", "os", ".", "environ", ".", "get", "(", "'SHELL'", ")", ":", "logs", ".", "warn", "(", "\"Shell logger doesn't support your platform.\"", ")", "sys", ".", "exit", "(", "1", ")", "fd", "=", "os", ".", "open", "(", "output", ",", "os", ".", "O_CREAT", "|", "os", ".", "O_TRUNC", "|", "os", ".", "O_RDWR", ")", "os", ".", "write", "(", "fd", ",", "b'\\x00'", "*", "const", ".", "LOG_SIZE_IN_BYTES", ")", "buffer", "=", "mmap", ".", "mmap", "(", "fd", ",", "const", ".", "LOG_SIZE_IN_BYTES", ",", "mmap", ".", "MAP_SHARED", ",", "mmap", ".", "PROT_WRITE", ")", "return_code", "=", "_spawn", "(", "os", ".", "environ", "[", "'SHELL'", "]", ",", "partial", "(", "_read", ",", "buffer", ")", ")", "sys", ".", "exit", "(", "return_code", ")" ]
Logs shell output to the `output`. Works like unix script command with `-f` flag.
[ "Logs", "shell", "output", "to", "the", "output", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/entrypoints/shell_logger.py#L64-L79
21,304
nvbn/thefuck
thefuck/output_readers/__init__.py
get_output
def get_output(script, expanded): """Get output of the script. :param script: Console script. :type script: str :param expanded: Console script with expanded aliases. :type expanded: str :rtype: str """ if shell_logger.is_available(): return shell_logger.get_output(script) if settings.instant_mode: return read_log.get_output(script) else: return rerun.get_output(script, expanded)
python
def get_output(script, expanded): """Get output of the script. :param script: Console script. :type script: str :param expanded: Console script with expanded aliases. :type expanded: str :rtype: str """ if shell_logger.is_available(): return shell_logger.get_output(script) if settings.instant_mode: return read_log.get_output(script) else: return rerun.get_output(script, expanded)
[ "def", "get_output", "(", "script", ",", "expanded", ")", ":", "if", "shell_logger", ".", "is_available", "(", ")", ":", "return", "shell_logger", ".", "get_output", "(", "script", ")", "if", "settings", ".", "instant_mode", ":", "return", "read_log", ".", "get_output", "(", "script", ")", "else", ":", "return", "rerun", ".", "get_output", "(", "script", ",", "expanded", ")" ]
Get output of the script. :param script: Console script. :type script: str :param expanded: Console script with expanded aliases. :type expanded: str :rtype: str
[ "Get", "output", "of", "the", "script", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/output_readers/__init__.py#L5-L20
21,305
nvbn/thefuck
thefuck/argument_parser.py
Parser._add_arguments
def _add_arguments(self): """Adds arguments to parser.""" self._parser.add_argument( '-v', '--version', action='store_true', help="show program's version number and exit") self._parser.add_argument( '-a', '--alias', nargs='?', const=get_alias(), help='[custom-alias-name] prints alias for current shell') self._parser.add_argument( '-l', '--shell-logger', action='store', help='log shell output to the file') self._parser.add_argument( '--enable-experimental-instant-mode', action='store_true', help='enable experimental instant mode, use on your own risk') self._parser.add_argument( '-h', '--help', action='store_true', help='show this help message and exit') self._add_conflicting_arguments() self._parser.add_argument( '-d', '--debug', action='store_true', help='enable debug output') self._parser.add_argument( '--force-command', action='store', help=SUPPRESS) self._parser.add_argument( 'command', nargs='*', help='command that should be fixed')
python
def _add_arguments(self): """Adds arguments to parser.""" self._parser.add_argument( '-v', '--version', action='store_true', help="show program's version number and exit") self._parser.add_argument( '-a', '--alias', nargs='?', const=get_alias(), help='[custom-alias-name] prints alias for current shell') self._parser.add_argument( '-l', '--shell-logger', action='store', help='log shell output to the file') self._parser.add_argument( '--enable-experimental-instant-mode', action='store_true', help='enable experimental instant mode, use on your own risk') self._parser.add_argument( '-h', '--help', action='store_true', help='show this help message and exit') self._add_conflicting_arguments() self._parser.add_argument( '-d', '--debug', action='store_true', help='enable debug output') self._parser.add_argument( '--force-command', action='store', help=SUPPRESS) self._parser.add_argument( 'command', nargs='*', help='command that should be fixed')
[ "def", "_add_arguments", "(", "self", ")", ":", "self", ".", "_parser", ".", "add_argument", "(", "'-v'", ",", "'--version'", ",", "action", "=", "'store_true'", ",", "help", "=", "\"show program's version number and exit\"", ")", "self", ".", "_parser", ".", "add_argument", "(", "'-a'", ",", "'--alias'", ",", "nargs", "=", "'?'", ",", "const", "=", "get_alias", "(", ")", ",", "help", "=", "'[custom-alias-name] prints alias for current shell'", ")", "self", ".", "_parser", ".", "add_argument", "(", "'-l'", ",", "'--shell-logger'", ",", "action", "=", "'store'", ",", "help", "=", "'log shell output to the file'", ")", "self", ".", "_parser", ".", "add_argument", "(", "'--enable-experimental-instant-mode'", ",", "action", "=", "'store_true'", ",", "help", "=", "'enable experimental instant mode, use on your own risk'", ")", "self", ".", "_parser", ".", "add_argument", "(", "'-h'", ",", "'--help'", ",", "action", "=", "'store_true'", ",", "help", "=", "'show this help message and exit'", ")", "self", ".", "_add_conflicting_arguments", "(", ")", "self", ".", "_parser", ".", "add_argument", "(", "'-d'", ",", "'--debug'", ",", "action", "=", "'store_true'", ",", "help", "=", "'enable debug output'", ")", "self", ".", "_parser", ".", "add_argument", "(", "'--force-command'", ",", "action", "=", "'store'", ",", "help", "=", "SUPPRESS", ")", "self", ".", "_parser", ".", "add_argument", "(", "'command'", ",", "nargs", "=", "'*'", ",", "help", "=", "'command that should be fixed'", ")" ]
Adds arguments to parser.
[ "Adds", "arguments", "to", "parser", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/argument_parser.py#L17-L52
21,306
nvbn/thefuck
thefuck/argument_parser.py
Parser._add_conflicting_arguments
def _add_conflicting_arguments(self): """It's too dangerous to use `-y` and `-r` together.""" group = self._parser.add_mutually_exclusive_group() group.add_argument( '-y', '--yes', '--yeah', action='store_true', help='execute fixed command without confirmation') group.add_argument( '-r', '--repeat', action='store_true', help='repeat on failure')
python
def _add_conflicting_arguments(self): """It's too dangerous to use `-y` and `-r` together.""" group = self._parser.add_mutually_exclusive_group() group.add_argument( '-y', '--yes', '--yeah', action='store_true', help='execute fixed command without confirmation') group.add_argument( '-r', '--repeat', action='store_true', help='repeat on failure')
[ "def", "_add_conflicting_arguments", "(", "self", ")", ":", "group", "=", "self", ".", "_parser", ".", "add_mutually_exclusive_group", "(", ")", "group", ".", "add_argument", "(", "'-y'", ",", "'--yes'", ",", "'--yeah'", ",", "action", "=", "'store_true'", ",", "help", "=", "'execute fixed command without confirmation'", ")", "group", ".", "add_argument", "(", "'-r'", ",", "'--repeat'", ",", "action", "=", "'store_true'", ",", "help", "=", "'repeat on failure'", ")" ]
It's too dangerous to use `-y` and `-r` together.
[ "It", "s", "too", "dangerous", "to", "use", "-", "y", "and", "-", "r", "together", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/argument_parser.py#L54-L64
21,307
nvbn/thefuck
thefuck/specific/npm.py
get_scripts
def get_scripts(): """Get custom npm scripts.""" proc = Popen(['npm', 'run-script'], stdout=PIPE) should_yeild = False for line in proc.stdout.readlines(): line = line.decode() if 'available via `npm run-script`:' in line: should_yeild = True continue if should_yeild and re.match(r'^ [^ ]+', line): yield line.strip().split(' ')[0]
python
def get_scripts(): """Get custom npm scripts.""" proc = Popen(['npm', 'run-script'], stdout=PIPE) should_yeild = False for line in proc.stdout.readlines(): line = line.decode() if 'available via `npm run-script`:' in line: should_yeild = True continue if should_yeild and re.match(r'^ [^ ]+', line): yield line.strip().split(' ')[0]
[ "def", "get_scripts", "(", ")", ":", "proc", "=", "Popen", "(", "[", "'npm'", ",", "'run-script'", "]", ",", "stdout", "=", "PIPE", ")", "should_yeild", "=", "False", "for", "line", "in", "proc", ".", "stdout", ".", "readlines", "(", ")", ":", "line", "=", "line", ".", "decode", "(", ")", "if", "'available via `npm run-script`:'", "in", "line", ":", "should_yeild", "=", "True", "continue", "if", "should_yeild", "and", "re", ".", "match", "(", "r'^ [^ ]+'", ",", "line", ")", ":", "yield", "line", ".", "strip", "(", ")", ".", "split", "(", "' '", ")", "[", "0", "]" ]
Get custom npm scripts.
[ "Get", "custom", "npm", "scripts", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/specific/npm.py#L10-L21
21,308
nvbn/thefuck
thefuck/conf.py
Settings.init
def init(self, args=None): """Fills `settings` with values from `settings.py` and env.""" from .logs import exception self._setup_user_dir() self._init_settings_file() try: self.update(self._settings_from_file()) except Exception: exception("Can't load settings from file", sys.exc_info()) try: self.update(self._settings_from_env()) except Exception: exception("Can't load settings from env", sys.exc_info()) self.update(self._settings_from_args(args))
python
def init(self, args=None): """Fills `settings` with values from `settings.py` and env.""" from .logs import exception self._setup_user_dir() self._init_settings_file() try: self.update(self._settings_from_file()) except Exception: exception("Can't load settings from file", sys.exc_info()) try: self.update(self._settings_from_env()) except Exception: exception("Can't load settings from env", sys.exc_info()) self.update(self._settings_from_args(args))
[ "def", "init", "(", "self", ",", "args", "=", "None", ")", ":", "from", ".", "logs", "import", "exception", "self", ".", "_setup_user_dir", "(", ")", "self", ".", "_init_settings_file", "(", ")", "try", ":", "self", ".", "update", "(", "self", ".", "_settings_from_file", "(", ")", ")", "except", "Exception", ":", "exception", "(", "\"Can't load settings from file\"", ",", "sys", ".", "exc_info", "(", ")", ")", "try", ":", "self", ".", "update", "(", "self", ".", "_settings_from_env", "(", ")", ")", "except", "Exception", ":", "exception", "(", "\"Can't load settings from env\"", ",", "sys", ".", "exc_info", "(", ")", ")", "self", ".", "update", "(", "self", ".", "_settings_from_args", "(", "args", ")", ")" ]
Fills `settings` with values from `settings.py` and env.
[ "Fills", "settings", "with", "values", "from", "settings", ".", "py", "and", "env", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/conf.py#L17-L34
21,309
nvbn/thefuck
thefuck/conf.py
Settings._get_user_dir_path
def _get_user_dir_path(self): """Returns Path object representing the user config resource""" xdg_config_home = os.environ.get('XDG_CONFIG_HOME', '~/.config') user_dir = Path(xdg_config_home, 'thefuck').expanduser() legacy_user_dir = Path('~', '.thefuck').expanduser() # For backward compatibility use legacy '~/.thefuck' if it exists: if legacy_user_dir.is_dir(): warn(u'Config path {} is deprecated. Please move to {}'.format( legacy_user_dir, user_dir)) return legacy_user_dir else: return user_dir
python
def _get_user_dir_path(self): """Returns Path object representing the user config resource""" xdg_config_home = os.environ.get('XDG_CONFIG_HOME', '~/.config') user_dir = Path(xdg_config_home, 'thefuck').expanduser() legacy_user_dir = Path('~', '.thefuck').expanduser() # For backward compatibility use legacy '~/.thefuck' if it exists: if legacy_user_dir.is_dir(): warn(u'Config path {} is deprecated. Please move to {}'.format( legacy_user_dir, user_dir)) return legacy_user_dir else: return user_dir
[ "def", "_get_user_dir_path", "(", "self", ")", ":", "xdg_config_home", "=", "os", ".", "environ", ".", "get", "(", "'XDG_CONFIG_HOME'", ",", "'~/.config'", ")", "user_dir", "=", "Path", "(", "xdg_config_home", ",", "'thefuck'", ")", ".", "expanduser", "(", ")", "legacy_user_dir", "=", "Path", "(", "'~'", ",", "'.thefuck'", ")", ".", "expanduser", "(", ")", "# For backward compatibility use legacy '~/.thefuck' if it exists:", "if", "legacy_user_dir", ".", "is_dir", "(", ")", ":", "warn", "(", "u'Config path {} is deprecated. Please move to {}'", ".", "format", "(", "legacy_user_dir", ",", "user_dir", ")", ")", "return", "legacy_user_dir", "else", ":", "return", "user_dir" ]
Returns Path object representing the user config resource
[ "Returns", "Path", "object", "representing", "the", "user", "config", "resource" ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/conf.py#L44-L56
21,310
nvbn/thefuck
thefuck/conf.py
Settings._setup_user_dir
def _setup_user_dir(self): """Returns user config dir, create it when it doesn't exist.""" user_dir = self._get_user_dir_path() rules_dir = user_dir.joinpath('rules') if not rules_dir.is_dir(): rules_dir.mkdir(parents=True) self.user_dir = user_dir
python
def _setup_user_dir(self): """Returns user config dir, create it when it doesn't exist.""" user_dir = self._get_user_dir_path() rules_dir = user_dir.joinpath('rules') if not rules_dir.is_dir(): rules_dir.mkdir(parents=True) self.user_dir = user_dir
[ "def", "_setup_user_dir", "(", "self", ")", ":", "user_dir", "=", "self", ".", "_get_user_dir_path", "(", ")", "rules_dir", "=", "user_dir", ".", "joinpath", "(", "'rules'", ")", "if", "not", "rules_dir", ".", "is_dir", "(", ")", ":", "rules_dir", ".", "mkdir", "(", "parents", "=", "True", ")", "self", ".", "user_dir", "=", "user_dir" ]
Returns user config dir, create it when it doesn't exist.
[ "Returns", "user", "config", "dir", "create", "it", "when", "it", "doesn", "t", "exist", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/conf.py#L58-L65
21,311
nvbn/thefuck
thefuck/conf.py
Settings._settings_from_file
def _settings_from_file(self): """Loads settings from file.""" settings = load_source( 'settings', text_type(self.user_dir.joinpath('settings.py'))) return {key: getattr(settings, key) for key in const.DEFAULT_SETTINGS.keys() if hasattr(settings, key)}
python
def _settings_from_file(self): """Loads settings from file.""" settings = load_source( 'settings', text_type(self.user_dir.joinpath('settings.py'))) return {key: getattr(settings, key) for key in const.DEFAULT_SETTINGS.keys() if hasattr(settings, key)}
[ "def", "_settings_from_file", "(", "self", ")", ":", "settings", "=", "load_source", "(", "'settings'", ",", "text_type", "(", "self", ".", "user_dir", ".", "joinpath", "(", "'settings.py'", ")", ")", ")", "return", "{", "key", ":", "getattr", "(", "settings", ",", "key", ")", "for", "key", "in", "const", ".", "DEFAULT_SETTINGS", ".", "keys", "(", ")", "if", "hasattr", "(", "settings", ",", "key", ")", "}" ]
Loads settings from file.
[ "Loads", "settings", "from", "file", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/conf.py#L67-L73
21,312
nvbn/thefuck
thefuck/conf.py
Settings._rules_from_env
def _rules_from_env(self, val): """Transforms rules list from env-string to python.""" val = val.split(':') if 'DEFAULT_RULES' in val: val = const.DEFAULT_RULES + [rule for rule in val if rule != 'DEFAULT_RULES'] return val
python
def _rules_from_env(self, val): """Transforms rules list from env-string to python.""" val = val.split(':') if 'DEFAULT_RULES' in val: val = const.DEFAULT_RULES + [rule for rule in val if rule != 'DEFAULT_RULES'] return val
[ "def", "_rules_from_env", "(", "self", ",", "val", ")", ":", "val", "=", "val", ".", "split", "(", "':'", ")", "if", "'DEFAULT_RULES'", "in", "val", ":", "val", "=", "const", ".", "DEFAULT_RULES", "+", "[", "rule", "for", "rule", "in", "val", "if", "rule", "!=", "'DEFAULT_RULES'", "]", "return", "val" ]
Transforms rules list from env-string to python.
[ "Transforms", "rules", "list", "from", "env", "-", "string", "to", "python", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/conf.py#L75-L80
21,313
nvbn/thefuck
thefuck/conf.py
Settings._priority_from_env
def _priority_from_env(self, val): """Gets priority pairs from env.""" for part in val.split(':'): try: rule, priority = part.split('=') yield rule, int(priority) except ValueError: continue
python
def _priority_from_env(self, val): """Gets priority pairs from env.""" for part in val.split(':'): try: rule, priority = part.split('=') yield rule, int(priority) except ValueError: continue
[ "def", "_priority_from_env", "(", "self", ",", "val", ")", ":", "for", "part", "in", "val", ".", "split", "(", "':'", ")", ":", "try", ":", "rule", ",", "priority", "=", "part", ".", "split", "(", "'='", ")", "yield", "rule", ",", "int", "(", "priority", ")", "except", "ValueError", ":", "continue" ]
Gets priority pairs from env.
[ "Gets", "priority", "pairs", "from", "env", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/conf.py#L82-L89
21,314
nvbn/thefuck
thefuck/conf.py
Settings._val_from_env
def _val_from_env(self, env, attr): """Transforms env-strings to python.""" val = os.environ[env] if attr in ('rules', 'exclude_rules'): return self._rules_from_env(val) elif attr == 'priority': return dict(self._priority_from_env(val)) elif attr in ('wait_command', 'history_limit', 'wait_slow_command', 'num_close_matches'): return int(val) elif attr in ('require_confirmation', 'no_colors', 'debug', 'alter_history', 'instant_mode'): return val.lower() == 'true' elif attr == 'slow_commands': return val.split(':') else: return val
python
def _val_from_env(self, env, attr): """Transforms env-strings to python.""" val = os.environ[env] if attr in ('rules', 'exclude_rules'): return self._rules_from_env(val) elif attr == 'priority': return dict(self._priority_from_env(val)) elif attr in ('wait_command', 'history_limit', 'wait_slow_command', 'num_close_matches'): return int(val) elif attr in ('require_confirmation', 'no_colors', 'debug', 'alter_history', 'instant_mode'): return val.lower() == 'true' elif attr == 'slow_commands': return val.split(':') else: return val
[ "def", "_val_from_env", "(", "self", ",", "env", ",", "attr", ")", ":", "val", "=", "os", ".", "environ", "[", "env", "]", "if", "attr", "in", "(", "'rules'", ",", "'exclude_rules'", ")", ":", "return", "self", ".", "_rules_from_env", "(", "val", ")", "elif", "attr", "==", "'priority'", ":", "return", "dict", "(", "self", ".", "_priority_from_env", "(", "val", ")", ")", "elif", "attr", "in", "(", "'wait_command'", ",", "'history_limit'", ",", "'wait_slow_command'", ",", "'num_close_matches'", ")", ":", "return", "int", "(", "val", ")", "elif", "attr", "in", "(", "'require_confirmation'", ",", "'no_colors'", ",", "'debug'", ",", "'alter_history'", ",", "'instant_mode'", ")", ":", "return", "val", ".", "lower", "(", ")", "==", "'true'", "elif", "attr", "==", "'slow_commands'", ":", "return", "val", ".", "split", "(", "':'", ")", "else", ":", "return", "val" ]
Transforms env-strings to python.
[ "Transforms", "env", "-", "strings", "to", "python", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/conf.py#L91-L107
21,315
nvbn/thefuck
thefuck/conf.py
Settings._settings_from_env
def _settings_from_env(self): """Loads settings from env.""" return {attr: self._val_from_env(env, attr) for env, attr in const.ENV_TO_ATTR.items() if env in os.environ}
python
def _settings_from_env(self): """Loads settings from env.""" return {attr: self._val_from_env(env, attr) for env, attr in const.ENV_TO_ATTR.items() if env in os.environ}
[ "def", "_settings_from_env", "(", "self", ")", ":", "return", "{", "attr", ":", "self", ".", "_val_from_env", "(", "env", ",", "attr", ")", "for", "env", ",", "attr", "in", "const", ".", "ENV_TO_ATTR", ".", "items", "(", ")", "if", "env", "in", "os", ".", "environ", "}" ]
Loads settings from env.
[ "Loads", "settings", "from", "env", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/conf.py#L109-L113
21,316
nvbn/thefuck
thefuck/conf.py
Settings._settings_from_args
def _settings_from_args(self, args): """Loads settings from args.""" if not args: return {} from_args = {} if args.yes: from_args['require_confirmation'] = not args.yes if args.debug: from_args['debug'] = args.debug if args.repeat: from_args['repeat'] = args.repeat return from_args
python
def _settings_from_args(self, args): """Loads settings from args.""" if not args: return {} from_args = {} if args.yes: from_args['require_confirmation'] = not args.yes if args.debug: from_args['debug'] = args.debug if args.repeat: from_args['repeat'] = args.repeat return from_args
[ "def", "_settings_from_args", "(", "self", ",", "args", ")", ":", "if", "not", "args", ":", "return", "{", "}", "from_args", "=", "{", "}", "if", "args", ".", "yes", ":", "from_args", "[", "'require_confirmation'", "]", "=", "not", "args", ".", "yes", "if", "args", ".", "debug", ":", "from_args", "[", "'debug'", "]", "=", "args", ".", "debug", "if", "args", ".", "repeat", ":", "from_args", "[", "'repeat'", "]", "=", "args", ".", "repeat", "return", "from_args" ]
Loads settings from args.
[ "Loads", "settings", "from", "args", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/conf.py#L115-L127
21,317
nvbn/thefuck
thefuck/rules/ln_s_order.py
_get_destination
def _get_destination(script_parts): """When arguments order is wrong first argument will be destination.""" for part in script_parts: if part not in {'ln', '-s', '--symbolic'} and os.path.exists(part): return part
python
def _get_destination(script_parts): """When arguments order is wrong first argument will be destination.""" for part in script_parts: if part not in {'ln', '-s', '--symbolic'} and os.path.exists(part): return part
[ "def", "_get_destination", "(", "script_parts", ")", ":", "for", "part", "in", "script_parts", ":", "if", "part", "not", "in", "{", "'ln'", ",", "'-s'", ",", "'--symbolic'", "}", "and", "os", ".", "path", ".", "exists", "(", "part", ")", ":", "return", "part" ]
When arguments order is wrong first argument will be destination.
[ "When", "arguments", "order", "is", "wrong", "first", "argument", "will", "be", "destination", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/rules/ln_s_order.py#L5-L9
21,318
nvbn/thefuck
thefuck/specific/sudo.py
sudo_support
def sudo_support(fn, command): """Removes sudo before calling fn and adds it after.""" if not command.script.startswith('sudo '): return fn(command) result = fn(command.update(script=command.script[5:])) if result and isinstance(result, six.string_types): return u'sudo {}'.format(result) elif isinstance(result, list): return [u'sudo {}'.format(x) for x in result] else: return result
python
def sudo_support(fn, command): """Removes sudo before calling fn and adds it after.""" if not command.script.startswith('sudo '): return fn(command) result = fn(command.update(script=command.script[5:])) if result and isinstance(result, six.string_types): return u'sudo {}'.format(result) elif isinstance(result, list): return [u'sudo {}'.format(x) for x in result] else: return result
[ "def", "sudo_support", "(", "fn", ",", "command", ")", ":", "if", "not", "command", ".", "script", ".", "startswith", "(", "'sudo '", ")", ":", "return", "fn", "(", "command", ")", "result", "=", "fn", "(", "command", ".", "update", "(", "script", "=", "command", ".", "script", "[", "5", ":", "]", ")", ")", "if", "result", "and", "isinstance", "(", "result", ",", "six", ".", "string_types", ")", ":", "return", "u'sudo {}'", ".", "format", "(", "result", ")", "elif", "isinstance", "(", "result", ",", "list", ")", ":", "return", "[", "u'sudo {}'", ".", "format", "(", "x", ")", "for", "x", "in", "result", "]", "else", ":", "return", "result" ]
Removes sudo before calling fn and adds it after.
[ "Removes", "sudo", "before", "calling", "fn", "and", "adds", "it", "after", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/specific/sudo.py#L6-L18
21,319
nvbn/thefuck
thefuck/output_readers/rerun.py
_kill_process
def _kill_process(proc): """Tries to kill the process otherwise just logs a debug message, the process will be killed when thefuck terminates. :type proc: Process """ try: proc.kill() except AccessDenied: logs.debug(u'Rerun: process PID {} ({}) could not be terminated'.format( proc.pid, proc.exe()))
python
def _kill_process(proc): """Tries to kill the process otherwise just logs a debug message, the process will be killed when thefuck terminates. :type proc: Process """ try: proc.kill() except AccessDenied: logs.debug(u'Rerun: process PID {} ({}) could not be terminated'.format( proc.pid, proc.exe()))
[ "def", "_kill_process", "(", "proc", ")", ":", "try", ":", "proc", ".", "kill", "(", ")", "except", "AccessDenied", ":", "logs", ".", "debug", "(", "u'Rerun: process PID {} ({}) could not be terminated'", ".", "format", "(", "proc", ".", "pid", ",", "proc", ".", "exe", "(", ")", ")", ")" ]
Tries to kill the process otherwise just logs a debug message, the process will be killed when thefuck terminates. :type proc: Process
[ "Tries", "to", "kill", "the", "process", "otherwise", "just", "logs", "a", "debug", "message", "the", "process", "will", "be", "killed", "when", "thefuck", "terminates", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/output_readers/rerun.py#L9-L20
21,320
nvbn/thefuck
thefuck/output_readers/rerun.py
_wait_output
def _wait_output(popen, is_slow): """Returns `True` if we can get output of the command in the `settings.wait_command` time. Command will be killed if it wasn't finished in the time. :type popen: Popen :rtype: bool """ proc = Process(popen.pid) try: proc.wait(settings.wait_slow_command if is_slow else settings.wait_command) return True except TimeoutExpired: for child in proc.children(recursive=True): _kill_process(child) _kill_process(proc) return False
python
def _wait_output(popen, is_slow): """Returns `True` if we can get output of the command in the `settings.wait_command` time. Command will be killed if it wasn't finished in the time. :type popen: Popen :rtype: bool """ proc = Process(popen.pid) try: proc.wait(settings.wait_slow_command if is_slow else settings.wait_command) return True except TimeoutExpired: for child in proc.children(recursive=True): _kill_process(child) _kill_process(proc) return False
[ "def", "_wait_output", "(", "popen", ",", "is_slow", ")", ":", "proc", "=", "Process", "(", "popen", ".", "pid", ")", "try", ":", "proc", ".", "wait", "(", "settings", ".", "wait_slow_command", "if", "is_slow", "else", "settings", ".", "wait_command", ")", "return", "True", "except", "TimeoutExpired", ":", "for", "child", "in", "proc", ".", "children", "(", "recursive", "=", "True", ")", ":", "_kill_process", "(", "child", ")", "_kill_process", "(", "proc", ")", "return", "False" ]
Returns `True` if we can get output of the command in the `settings.wait_command` time. Command will be killed if it wasn't finished in the time. :type popen: Popen :rtype: bool
[ "Returns", "True", "if", "we", "can", "get", "output", "of", "the", "command", "in", "the", "settings", ".", "wait_command", "time", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/output_readers/rerun.py#L23-L42
21,321
nvbn/thefuck
thefuck/output_readers/read_log.py
get_output
def get_output(script): """Reads script output from log. :type script: str :rtype: str | None """ if six.PY2: logs.warn('Experimental instant mode is Python 3+ only') return None if 'THEFUCK_OUTPUT_LOG' not in os.environ: logs.warn("Output log isn't specified") return None if const.USER_COMMAND_MARK not in os.environ.get('PS1', ''): logs.warn( "PS1 doesn't contain user command mark, please ensure " "that PS1 is not changed after The Fuck alias initialization") return None try: with logs.debug_time(u'Read output from log'): fd = os.open(os.environ['THEFUCK_OUTPUT_LOG'], os.O_RDONLY) buffer = mmap.mmap(fd, const.LOG_SIZE_IN_BYTES, mmap.MAP_SHARED, mmap.PROT_READ) _skip_old_lines(buffer) lines = _get_output_lines(script, buffer) output = '\n'.join(lines).strip() logs.debug(u'Received output: {}'.format(output)) return output except OSError: logs.warn("Can't read output log") return None except ScriptNotInLog: logs.warn("Script not found in output log") return None
python
def get_output(script): """Reads script output from log. :type script: str :rtype: str | None """ if six.PY2: logs.warn('Experimental instant mode is Python 3+ only') return None if 'THEFUCK_OUTPUT_LOG' not in os.environ: logs.warn("Output log isn't specified") return None if const.USER_COMMAND_MARK not in os.environ.get('PS1', ''): logs.warn( "PS1 doesn't contain user command mark, please ensure " "that PS1 is not changed after The Fuck alias initialization") return None try: with logs.debug_time(u'Read output from log'): fd = os.open(os.environ['THEFUCK_OUTPUT_LOG'], os.O_RDONLY) buffer = mmap.mmap(fd, const.LOG_SIZE_IN_BYTES, mmap.MAP_SHARED, mmap.PROT_READ) _skip_old_lines(buffer) lines = _get_output_lines(script, buffer) output = '\n'.join(lines).strip() logs.debug(u'Received output: {}'.format(output)) return output except OSError: logs.warn("Can't read output log") return None except ScriptNotInLog: logs.warn("Script not found in output log") return None
[ "def", "get_output", "(", "script", ")", ":", "if", "six", ".", "PY2", ":", "logs", ".", "warn", "(", "'Experimental instant mode is Python 3+ only'", ")", "return", "None", "if", "'THEFUCK_OUTPUT_LOG'", "not", "in", "os", ".", "environ", ":", "logs", ".", "warn", "(", "\"Output log isn't specified\"", ")", "return", "None", "if", "const", ".", "USER_COMMAND_MARK", "not", "in", "os", ".", "environ", ".", "get", "(", "'PS1'", ",", "''", ")", ":", "logs", ".", "warn", "(", "\"PS1 doesn't contain user command mark, please ensure \"", "\"that PS1 is not changed after The Fuck alias initialization\"", ")", "return", "None", "try", ":", "with", "logs", ".", "debug_time", "(", "u'Read output from log'", ")", ":", "fd", "=", "os", ".", "open", "(", "os", ".", "environ", "[", "'THEFUCK_OUTPUT_LOG'", "]", ",", "os", ".", "O_RDONLY", ")", "buffer", "=", "mmap", ".", "mmap", "(", "fd", ",", "const", ".", "LOG_SIZE_IN_BYTES", ",", "mmap", ".", "MAP_SHARED", ",", "mmap", ".", "PROT_READ", ")", "_skip_old_lines", "(", "buffer", ")", "lines", "=", "_get_output_lines", "(", "script", ",", "buffer", ")", "output", "=", "'\\n'", ".", "join", "(", "lines", ")", ".", "strip", "(", ")", "logs", ".", "debug", "(", "u'Received output: {}'", ".", "format", "(", "output", ")", ")", "return", "output", "except", "OSError", ":", "logs", ".", "warn", "(", "\"Can't read output log\"", ")", "return", "None", "except", "ScriptNotInLog", ":", "logs", ".", "warn", "(", "\"Script not found in output log\"", ")", "return", "None" ]
Reads script output from log. :type script: str :rtype: str | None
[ "Reads", "script", "output", "from", "log", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/output_readers/read_log.py#L70-L105
21,322
nvbn/thefuck
thefuck/specific/archlinux.py
get_pkgfile
def get_pkgfile(command): """ Gets the packages that provide the given command using `pkgfile`. If the command is of the form `sudo foo`, searches for the `foo` command instead. """ try: command = command.strip() if command.startswith('sudo '): command = command[5:] command = command.split(" ")[0] packages = subprocess.check_output( ['pkgfile', '-b', '-v', command], universal_newlines=True, stderr=utils.DEVNULL ).splitlines() return [package.split()[0] for package in packages] except subprocess.CalledProcessError as err: if err.returncode == 1 and err.output == "": return [] else: raise err
python
def get_pkgfile(command): """ Gets the packages that provide the given command using `pkgfile`. If the command is of the form `sudo foo`, searches for the `foo` command instead. """ try: command = command.strip() if command.startswith('sudo '): command = command[5:] command = command.split(" ")[0] packages = subprocess.check_output( ['pkgfile', '-b', '-v', command], universal_newlines=True, stderr=utils.DEVNULL ).splitlines() return [package.split()[0] for package in packages] except subprocess.CalledProcessError as err: if err.returncode == 1 and err.output == "": return [] else: raise err
[ "def", "get_pkgfile", "(", "command", ")", ":", "try", ":", "command", "=", "command", ".", "strip", "(", ")", "if", "command", ".", "startswith", "(", "'sudo '", ")", ":", "command", "=", "command", "[", "5", ":", "]", "command", "=", "command", ".", "split", "(", "\" \"", ")", "[", "0", "]", "packages", "=", "subprocess", ".", "check_output", "(", "[", "'pkgfile'", ",", "'-b'", ",", "'-v'", ",", "command", "]", ",", "universal_newlines", "=", "True", ",", "stderr", "=", "utils", ".", "DEVNULL", ")", ".", "splitlines", "(", ")", "return", "[", "package", ".", "split", "(", ")", "[", "0", "]", "for", "package", "in", "packages", "]", "except", "subprocess", ".", "CalledProcessError", "as", "err", ":", "if", "err", ".", "returncode", "==", "1", "and", "err", ".", "output", "==", "\"\"", ":", "return", "[", "]", "else", ":", "raise", "err" ]
Gets the packages that provide the given command using `pkgfile`. If the command is of the form `sudo foo`, searches for the `foo` command instead.
[ "Gets", "the", "packages", "that", "provide", "the", "given", "command", "using", "pkgfile", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/specific/archlinux.py#L7-L31
21,323
nvbn/thefuck
thefuck/rules/cd_correction.py
_get_sub_dirs
def _get_sub_dirs(parent): """Returns a list of the child directories of the given parent directory""" return [child for child in os.listdir(parent) if os.path.isdir(os.path.join(parent, child))]
python
def _get_sub_dirs(parent): """Returns a list of the child directories of the given parent directory""" return [child for child in os.listdir(parent) if os.path.isdir(os.path.join(parent, child))]
[ "def", "_get_sub_dirs", "(", "parent", ")", ":", "return", "[", "child", "for", "child", "in", "os", ".", "listdir", "(", "parent", ")", "if", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "parent", ",", "child", ")", ")", "]" ]
Returns a list of the child directories of the given parent directory
[ "Returns", "a", "list", "of", "the", "child", "directories", "of", "the", "given", "parent", "directory" ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/rules/cd_correction.py#L14-L16
21,324
nvbn/thefuck
thefuck/types.py
Command.update
def update(self, **kwargs): """Returns new command with replaced fields. :rtype: Command """ kwargs.setdefault('script', self.script) kwargs.setdefault('output', self.output) return Command(**kwargs)
python
def update(self, **kwargs): """Returns new command with replaced fields. :rtype: Command """ kwargs.setdefault('script', self.script) kwargs.setdefault('output', self.output) return Command(**kwargs)
[ "def", "update", "(", "self", ",", "*", "*", "kwargs", ")", ":", "kwargs", ".", "setdefault", "(", "'script'", ",", "self", ".", "script", ")", "kwargs", ".", "setdefault", "(", "'output'", ",", "self", ".", "output", ")", "return", "Command", "(", "*", "*", "kwargs", ")" ]
Returns new command with replaced fields. :rtype: Command
[ "Returns", "new", "command", "with", "replaced", "fields", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/types.py#L58-L66
21,325
nvbn/thefuck
thefuck/types.py
Command.from_raw_script
def from_raw_script(cls, raw_script): """Creates instance of `Command` from a list of script parts. :type raw_script: [basestring] :rtype: Command :raises: EmptyCommand """ script = format_raw_script(raw_script) if not script: raise EmptyCommand expanded = shell.from_shell(script) output = get_output(script, expanded) return cls(expanded, output)
python
def from_raw_script(cls, raw_script): """Creates instance of `Command` from a list of script parts. :type raw_script: [basestring] :rtype: Command :raises: EmptyCommand """ script = format_raw_script(raw_script) if not script: raise EmptyCommand expanded = shell.from_shell(script) output = get_output(script, expanded) return cls(expanded, output)
[ "def", "from_raw_script", "(", "cls", ",", "raw_script", ")", ":", "script", "=", "format_raw_script", "(", "raw_script", ")", "if", "not", "script", ":", "raise", "EmptyCommand", "expanded", "=", "shell", ".", "from_shell", "(", "script", ")", "output", "=", "get_output", "(", "script", ",", "expanded", ")", "return", "cls", "(", "expanded", ",", "output", ")" ]
Creates instance of `Command` from a list of script parts. :type raw_script: [basestring] :rtype: Command :raises: EmptyCommand
[ "Creates", "instance", "of", "Command", "from", "a", "list", "of", "script", "parts", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/types.py#L69-L83
21,326
nvbn/thefuck
thefuck/types.py
Rule.from_path
def from_path(cls, path): """Creates rule instance from path. :type path: pathlib.Path :rtype: Rule """ name = path.name[:-3] with logs.debug_time(u'Importing rule: {};'.format(name)): rule_module = load_source(name, str(path)) priority = getattr(rule_module, 'priority', DEFAULT_PRIORITY) return cls(name, rule_module.match, rule_module.get_new_command, getattr(rule_module, 'enabled_by_default', True), getattr(rule_module, 'side_effect', None), settings.priority.get(name, priority), getattr(rule_module, 'requires_output', True))
python
def from_path(cls, path): """Creates rule instance from path. :type path: pathlib.Path :rtype: Rule """ name = path.name[:-3] with logs.debug_time(u'Importing rule: {};'.format(name)): rule_module = load_source(name, str(path)) priority = getattr(rule_module, 'priority', DEFAULT_PRIORITY) return cls(name, rule_module.match, rule_module.get_new_command, getattr(rule_module, 'enabled_by_default', True), getattr(rule_module, 'side_effect', None), settings.priority.get(name, priority), getattr(rule_module, 'requires_output', True))
[ "def", "from_path", "(", "cls", ",", "path", ")", ":", "name", "=", "path", ".", "name", "[", ":", "-", "3", "]", "with", "logs", ".", "debug_time", "(", "u'Importing rule: {};'", ".", "format", "(", "name", ")", ")", ":", "rule_module", "=", "load_source", "(", "name", ",", "str", "(", "path", ")", ")", "priority", "=", "getattr", "(", "rule_module", ",", "'priority'", ",", "DEFAULT_PRIORITY", ")", "return", "cls", "(", "name", ",", "rule_module", ".", "match", ",", "rule_module", ".", "get_new_command", ",", "getattr", "(", "rule_module", ",", "'enabled_by_default'", ",", "True", ")", ",", "getattr", "(", "rule_module", ",", "'side_effect'", ",", "None", ")", ",", "settings", ".", "priority", ".", "get", "(", "name", ",", "priority", ")", ",", "getattr", "(", "rule_module", ",", "'requires_output'", ",", "True", ")", ")" ]
Creates rule instance from path. :type path: pathlib.Path :rtype: Rule
[ "Creates", "rule", "instance", "from", "path", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/types.py#L131-L147
21,327
nvbn/thefuck
thefuck/types.py
Rule.is_enabled
def is_enabled(self): """Returns `True` when rule enabled. :rtype: bool """ if self.name in settings.exclude_rules: return False elif self.name in settings.rules: return True elif self.enabled_by_default and ALL_ENABLED in settings.rules: return True else: return False
python
def is_enabled(self): """Returns `True` when rule enabled. :rtype: bool """ if self.name in settings.exclude_rules: return False elif self.name in settings.rules: return True elif self.enabled_by_default and ALL_ENABLED in settings.rules: return True else: return False
[ "def", "is_enabled", "(", "self", ")", ":", "if", "self", ".", "name", "in", "settings", ".", "exclude_rules", ":", "return", "False", "elif", "self", ".", "name", "in", "settings", ".", "rules", ":", "return", "True", "elif", "self", ".", "enabled_by_default", "and", "ALL_ENABLED", "in", "settings", ".", "rules", ":", "return", "True", "else", ":", "return", "False" ]
Returns `True` when rule enabled. :rtype: bool
[ "Returns", "True", "when", "rule", "enabled", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/types.py#L150-L163
21,328
nvbn/thefuck
thefuck/types.py
Rule.is_match
def is_match(self, command): """Returns `True` if rule matches the command. :type command: Command :rtype: bool """ if command.output is None and self.requires_output: return False try: with logs.debug_time(u'Trying rule: {};'.format(self.name)): if self.match(command): return True except Exception: logs.rule_failed(self, sys.exc_info())
python
def is_match(self, command): """Returns `True` if rule matches the command. :type command: Command :rtype: bool """ if command.output is None and self.requires_output: return False try: with logs.debug_time(u'Trying rule: {};'.format(self.name)): if self.match(command): return True except Exception: logs.rule_failed(self, sys.exc_info())
[ "def", "is_match", "(", "self", ",", "command", ")", ":", "if", "command", ".", "output", "is", "None", "and", "self", ".", "requires_output", ":", "return", "False", "try", ":", "with", "logs", ".", "debug_time", "(", "u'Trying rule: {};'", ".", "format", "(", "self", ".", "name", ")", ")", ":", "if", "self", ".", "match", "(", "command", ")", ":", "return", "True", "except", "Exception", ":", "logs", ".", "rule_failed", "(", "self", ",", "sys", ".", "exc_info", "(", ")", ")" ]
Returns `True` if rule matches the command. :type command: Command :rtype: bool
[ "Returns", "True", "if", "rule", "matches", "the", "command", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/types.py#L165-L180
21,329
nvbn/thefuck
thefuck/types.py
Rule.get_corrected_commands
def get_corrected_commands(self, command): """Returns generator with corrected commands. :type command: Command :rtype: Iterable[CorrectedCommand] """ new_commands = self.get_new_command(command) if not isinstance(new_commands, list): new_commands = (new_commands,) for n, new_command in enumerate(new_commands): yield CorrectedCommand(script=new_command, side_effect=self.side_effect, priority=(n + 1) * self.priority)
python
def get_corrected_commands(self, command): """Returns generator with corrected commands. :type command: Command :rtype: Iterable[CorrectedCommand] """ new_commands = self.get_new_command(command) if not isinstance(new_commands, list): new_commands = (new_commands,) for n, new_command in enumerate(new_commands): yield CorrectedCommand(script=new_command, side_effect=self.side_effect, priority=(n + 1) * self.priority)
[ "def", "get_corrected_commands", "(", "self", ",", "command", ")", ":", "new_commands", "=", "self", ".", "get_new_command", "(", "command", ")", "if", "not", "isinstance", "(", "new_commands", ",", "list", ")", ":", "new_commands", "=", "(", "new_commands", ",", ")", "for", "n", ",", "new_command", "in", "enumerate", "(", "new_commands", ")", ":", "yield", "CorrectedCommand", "(", "script", "=", "new_command", ",", "side_effect", "=", "self", ".", "side_effect", ",", "priority", "=", "(", "n", "+", "1", ")", "*", "self", ".", "priority", ")" ]
Returns generator with corrected commands. :type command: Command :rtype: Iterable[CorrectedCommand]
[ "Returns", "generator", "with", "corrected", "commands", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/types.py#L182-L195
21,330
nvbn/thefuck
thefuck/types.py
CorrectedCommand._get_script
def _get_script(self): """Returns fixed commands script. If `settings.repeat` is `True`, appends command with second attempt of running fuck in case fixed command fails again. """ if settings.repeat: repeat_fuck = '{} --repeat {}--force-command {}'.format( get_alias(), '--debug ' if settings.debug else '', shell.quote(self.script)) return shell.or_(self.script, repeat_fuck) else: return self.script
python
def _get_script(self): """Returns fixed commands script. If `settings.repeat` is `True`, appends command with second attempt of running fuck in case fixed command fails again. """ if settings.repeat: repeat_fuck = '{} --repeat {}--force-command {}'.format( get_alias(), '--debug ' if settings.debug else '', shell.quote(self.script)) return shell.or_(self.script, repeat_fuck) else: return self.script
[ "def", "_get_script", "(", "self", ")", ":", "if", "settings", ".", "repeat", ":", "repeat_fuck", "=", "'{} --repeat {}--force-command {}'", ".", "format", "(", "get_alias", "(", ")", ",", "'--debug '", "if", "settings", ".", "debug", "else", "''", ",", "shell", ".", "quote", "(", "self", ".", "script", ")", ")", "return", "shell", ".", "or_", "(", "self", ".", "script", ",", "repeat_fuck", ")", "else", ":", "return", "self", ".", "script" ]
Returns fixed commands script. If `settings.repeat` is `True`, appends command with second attempt of running fuck in case fixed command fails again.
[ "Returns", "fixed", "commands", "script", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/types.py#L228-L242
21,331
nvbn/thefuck
thefuck/types.py
CorrectedCommand.run
def run(self, old_cmd): """Runs command from rule for passed command. :type old_cmd: Command """ if self.side_effect: self.side_effect(old_cmd, self.script) if settings.alter_history: shell.put_to_history(self.script) # This depends on correct setting of PYTHONIOENCODING by the alias: logs.debug(u'PYTHONIOENCODING: {}'.format( os.environ.get('PYTHONIOENCODING', '!!not-set!!'))) print(self._get_script())
python
def run(self, old_cmd): """Runs command from rule for passed command. :type old_cmd: Command """ if self.side_effect: self.side_effect(old_cmd, self.script) if settings.alter_history: shell.put_to_history(self.script) # This depends on correct setting of PYTHONIOENCODING by the alias: logs.debug(u'PYTHONIOENCODING: {}'.format( os.environ.get('PYTHONIOENCODING', '!!not-set!!'))) print(self._get_script())
[ "def", "run", "(", "self", ",", "old_cmd", ")", ":", "if", "self", ".", "side_effect", ":", "self", ".", "side_effect", "(", "old_cmd", ",", "self", ".", "script", ")", "if", "settings", ".", "alter_history", ":", "shell", ".", "put_to_history", "(", "self", ".", "script", ")", "# This depends on correct setting of PYTHONIOENCODING by the alias:", "logs", ".", "debug", "(", "u'PYTHONIOENCODING: {}'", ".", "format", "(", "os", ".", "environ", ".", "get", "(", "'PYTHONIOENCODING'", ",", "'!!not-set!!'", ")", ")", ")", "print", "(", "self", ".", "_get_script", "(", ")", ")" ]
Runs command from rule for passed command. :type old_cmd: Command
[ "Runs", "command", "from", "rule", "for", "passed", "command", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/types.py#L244-L258
21,332
nvbn/thefuck
thefuck/entrypoints/not_configured.py
_get_shell_pid
def _get_shell_pid(): """Returns parent process pid.""" proc = Process(os.getpid()) try: return proc.parent().pid except TypeError: return proc.parent.pid
python
def _get_shell_pid(): """Returns parent process pid.""" proc = Process(os.getpid()) try: return proc.parent().pid except TypeError: return proc.parent.pid
[ "def", "_get_shell_pid", "(", ")", ":", "proc", "=", "Process", "(", "os", ".", "getpid", "(", ")", ")", "try", ":", "return", "proc", ".", "parent", "(", ")", ".", "pid", "except", "TypeError", ":", "return", "proc", ".", "parent", ".", "pid" ]
Returns parent process pid.
[ "Returns", "parent", "process", "pid", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/entrypoints/not_configured.py#L19-L26
21,333
nvbn/thefuck
thefuck/entrypoints/not_configured.py
_record_first_run
def _record_first_run(): """Records shell pid to tracker file.""" info = {'pid': _get_shell_pid(), 'time': time.time()} mode = 'wb' if six.PY2 else 'w' with _get_not_configured_usage_tracker_path().open(mode) as tracker: json.dump(info, tracker)
python
def _record_first_run(): """Records shell pid to tracker file.""" info = {'pid': _get_shell_pid(), 'time': time.time()} mode = 'wb' if six.PY2 else 'w' with _get_not_configured_usage_tracker_path().open(mode) as tracker: json.dump(info, tracker)
[ "def", "_record_first_run", "(", ")", ":", "info", "=", "{", "'pid'", ":", "_get_shell_pid", "(", ")", ",", "'time'", ":", "time", ".", "time", "(", ")", "}", "mode", "=", "'wb'", "if", "six", ".", "PY2", "else", "'w'", "with", "_get_not_configured_usage_tracker_path", "(", ")", ".", "open", "(", "mode", ")", "as", "tracker", ":", "json", ".", "dump", "(", "info", ",", "tracker", ")" ]
Records shell pid to tracker file.
[ "Records", "shell", "pid", "to", "tracker", "file", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/entrypoints/not_configured.py#L36-L43
21,334
nvbn/thefuck
thefuck/entrypoints/not_configured.py
_is_second_run
def _is_second_run(): """Returns `True` when we know that `fuck` called second time.""" tracker_path = _get_not_configured_usage_tracker_path() if not tracker_path.exists(): return False current_pid = _get_shell_pid() with tracker_path.open('r') as tracker: try: info = json.load(tracker) except ValueError: return False if not (isinstance(info, dict) and info.get('pid') == current_pid): return False return (_get_previous_command() == 'fuck' or time.time() - info.get('time', 0) < const.CONFIGURATION_TIMEOUT)
python
def _is_second_run(): """Returns `True` when we know that `fuck` called second time.""" tracker_path = _get_not_configured_usage_tracker_path() if not tracker_path.exists(): return False current_pid = _get_shell_pid() with tracker_path.open('r') as tracker: try: info = json.load(tracker) except ValueError: return False if not (isinstance(info, dict) and info.get('pid') == current_pid): return False return (_get_previous_command() == 'fuck' or time.time() - info.get('time', 0) < const.CONFIGURATION_TIMEOUT)
[ "def", "_is_second_run", "(", ")", ":", "tracker_path", "=", "_get_not_configured_usage_tracker_path", "(", ")", "if", "not", "tracker_path", ".", "exists", "(", ")", ":", "return", "False", "current_pid", "=", "_get_shell_pid", "(", ")", "with", "tracker_path", ".", "open", "(", "'r'", ")", "as", "tracker", ":", "try", ":", "info", "=", "json", ".", "load", "(", "tracker", ")", "except", "ValueError", ":", "return", "False", "if", "not", "(", "isinstance", "(", "info", ",", "dict", ")", "and", "info", ".", "get", "(", "'pid'", ")", "==", "current_pid", ")", ":", "return", "False", "return", "(", "_get_previous_command", "(", ")", "==", "'fuck'", "or", "time", ".", "time", "(", ")", "-", "info", ".", "get", "(", "'time'", ",", "0", ")", "<", "const", ".", "CONFIGURATION_TIMEOUT", ")" ]
Returns `True` when we know that `fuck` called second time.
[ "Returns", "True", "when", "we", "know", "that", "fuck", "called", "second", "time", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/entrypoints/not_configured.py#L55-L72
21,335
nvbn/thefuck
thefuck/entrypoints/not_configured.py
_is_already_configured
def _is_already_configured(configuration_details): """Returns `True` when alias already in shell config.""" path = Path(configuration_details.path).expanduser() with path.open('r') as shell_config: return configuration_details.content in shell_config.read()
python
def _is_already_configured(configuration_details): """Returns `True` when alias already in shell config.""" path = Path(configuration_details.path).expanduser() with path.open('r') as shell_config: return configuration_details.content in shell_config.read()
[ "def", "_is_already_configured", "(", "configuration_details", ")", ":", "path", "=", "Path", "(", "configuration_details", ".", "path", ")", ".", "expanduser", "(", ")", "with", "path", ".", "open", "(", "'r'", ")", "as", "shell_config", ":", "return", "configuration_details", ".", "content", "in", "shell_config", ".", "read", "(", ")" ]
Returns `True` when alias already in shell config.
[ "Returns", "True", "when", "alias", "already", "in", "shell", "config", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/entrypoints/not_configured.py#L75-L79
21,336
nvbn/thefuck
thefuck/entrypoints/not_configured.py
_configure
def _configure(configuration_details): """Adds alias to shell config.""" path = Path(configuration_details.path).expanduser() with path.open('a') as shell_config: shell_config.write(u'\n') shell_config.write(configuration_details.content) shell_config.write(u'\n')
python
def _configure(configuration_details): """Adds alias to shell config.""" path = Path(configuration_details.path).expanduser() with path.open('a') as shell_config: shell_config.write(u'\n') shell_config.write(configuration_details.content) shell_config.write(u'\n')
[ "def", "_configure", "(", "configuration_details", ")", ":", "path", "=", "Path", "(", "configuration_details", ".", "path", ")", ".", "expanduser", "(", ")", "with", "path", ".", "open", "(", "'a'", ")", "as", "shell_config", ":", "shell_config", ".", "write", "(", "u'\\n'", ")", "shell_config", ".", "write", "(", "configuration_details", ".", "content", ")", "shell_config", ".", "write", "(", "u'\\n'", ")" ]
Adds alias to shell config.
[ "Adds", "alias", "to", "shell", "config", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/entrypoints/not_configured.py#L82-L88
21,337
nvbn/thefuck
thefuck/entrypoints/not_configured.py
main
def main(): """Shows useful information about how-to configure alias on a first run and configure automatically on a second. It'll be only visible when user type fuck and when alias isn't configured. """ settings.init() configuration_details = shell.how_to_configure() if ( configuration_details and configuration_details.can_configure_automatically ): if _is_already_configured(configuration_details): logs.already_configured(configuration_details) return elif _is_second_run(): _configure(configuration_details) logs.configured_successfully(configuration_details) return else: _record_first_run() logs.how_to_configure_alias(configuration_details)
python
def main(): """Shows useful information about how-to configure alias on a first run and configure automatically on a second. It'll be only visible when user type fuck and when alias isn't configured. """ settings.init() configuration_details = shell.how_to_configure() if ( configuration_details and configuration_details.can_configure_automatically ): if _is_already_configured(configuration_details): logs.already_configured(configuration_details) return elif _is_second_run(): _configure(configuration_details) logs.configured_successfully(configuration_details) return else: _record_first_run() logs.how_to_configure_alias(configuration_details)
[ "def", "main", "(", ")", ":", "settings", ".", "init", "(", ")", "configuration_details", "=", "shell", ".", "how_to_configure", "(", ")", "if", "(", "configuration_details", "and", "configuration_details", ".", "can_configure_automatically", ")", ":", "if", "_is_already_configured", "(", "configuration_details", ")", ":", "logs", ".", "already_configured", "(", "configuration_details", ")", "return", "elif", "_is_second_run", "(", ")", ":", "_configure", "(", "configuration_details", ")", "logs", ".", "configured_successfully", "(", "configuration_details", ")", "return", "else", ":", "_record_first_run", "(", ")", "logs", ".", "how_to_configure_alias", "(", "configuration_details", ")" ]
Shows useful information about how-to configure alias on a first run and configure automatically on a second. It'll be only visible when user type fuck and when alias isn't configured.
[ "Shows", "useful", "information", "about", "how", "-", "to", "configure", "alias", "on", "a", "first", "run", "and", "configure", "automatically", "on", "a", "second", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/entrypoints/not_configured.py#L91-L114
21,338
nvbn/thefuck
thefuck/utils.py
memoize
def memoize(fn): """Caches previous calls to the function.""" memo = {} @wraps(fn) def wrapper(*args, **kwargs): if not memoize.disabled: key = pickle.dumps((args, kwargs)) if key not in memo: memo[key] = fn(*args, **kwargs) value = memo[key] else: # Memoize is disabled, call the function value = fn(*args, **kwargs) return value return wrapper
python
def memoize(fn): """Caches previous calls to the function.""" memo = {} @wraps(fn) def wrapper(*args, **kwargs): if not memoize.disabled: key = pickle.dumps((args, kwargs)) if key not in memo: memo[key] = fn(*args, **kwargs) value = memo[key] else: # Memoize is disabled, call the function value = fn(*args, **kwargs) return value return wrapper
[ "def", "memoize", "(", "fn", ")", ":", "memo", "=", "{", "}", "@", "wraps", "(", "fn", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "memoize", ".", "disabled", ":", "key", "=", "pickle", ".", "dumps", "(", "(", "args", ",", "kwargs", ")", ")", "if", "key", "not", "in", "memo", ":", "memo", "[", "key", "]", "=", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "value", "=", "memo", "[", "key", "]", "else", ":", "# Memoize is disabled, call the function", "value", "=", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "value", "return", "wrapper" ]
Caches previous calls to the function.
[ "Caches", "previous", "calls", "to", "the", "function", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L25-L42
21,339
nvbn/thefuck
thefuck/utils.py
default_settings
def default_settings(params): """Adds default values to settings if it not presented. Usage: @default_settings({'apt': '/usr/bin/apt'}) def match(command): print(settings.apt) """ def _default_settings(fn, command): for k, w in params.items(): settings.setdefault(k, w) return fn(command) return decorator(_default_settings)
python
def default_settings(params): """Adds default values to settings if it not presented. Usage: @default_settings({'apt': '/usr/bin/apt'}) def match(command): print(settings.apt) """ def _default_settings(fn, command): for k, w in params.items(): settings.setdefault(k, w) return fn(command) return decorator(_default_settings)
[ "def", "default_settings", "(", "params", ")", ":", "def", "_default_settings", "(", "fn", ",", "command", ")", ":", "for", "k", ",", "w", "in", "params", ".", "items", "(", ")", ":", "settings", ".", "setdefault", "(", "k", ",", "w", ")", "return", "fn", "(", "command", ")", "return", "decorator", "(", "_default_settings", ")" ]
Adds default values to settings if it not presented. Usage: @default_settings({'apt': '/usr/bin/apt'}) def match(command): print(settings.apt)
[ "Adds", "default", "values", "to", "settings", "if", "it", "not", "presented", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L73-L87
21,340
nvbn/thefuck
thefuck/utils.py
get_closest
def get_closest(word, possibilities, cutoff=0.6, fallback_to_first=True): """Returns closest match or just first from possibilities.""" possibilities = list(possibilities) try: return difflib_get_close_matches(word, possibilities, 1, cutoff)[0] except IndexError: if fallback_to_first: return possibilities[0]
python
def get_closest(word, possibilities, cutoff=0.6, fallback_to_first=True): """Returns closest match or just first from possibilities.""" possibilities = list(possibilities) try: return difflib_get_close_matches(word, possibilities, 1, cutoff)[0] except IndexError: if fallback_to_first: return possibilities[0]
[ "def", "get_closest", "(", "word", ",", "possibilities", ",", "cutoff", "=", "0.6", ",", "fallback_to_first", "=", "True", ")", ":", "possibilities", "=", "list", "(", "possibilities", ")", "try", ":", "return", "difflib_get_close_matches", "(", "word", ",", "possibilities", ",", "1", ",", "cutoff", ")", "[", "0", "]", "except", "IndexError", ":", "if", "fallback_to_first", ":", "return", "possibilities", "[", "0", "]" ]
Returns closest match or just first from possibilities.
[ "Returns", "closest", "match", "or", "just", "first", "from", "possibilities", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L90-L97
21,341
nvbn/thefuck
thefuck/utils.py
get_close_matches
def get_close_matches(word, possibilities, n=None, cutoff=0.6): """Overrides `difflib.get_close_match` to controle argument `n`.""" if n is None: n = settings.num_close_matches return difflib_get_close_matches(word, possibilities, n, cutoff)
python
def get_close_matches(word, possibilities, n=None, cutoff=0.6): """Overrides `difflib.get_close_match` to controle argument `n`.""" if n is None: n = settings.num_close_matches return difflib_get_close_matches(word, possibilities, n, cutoff)
[ "def", "get_close_matches", "(", "word", ",", "possibilities", ",", "n", "=", "None", ",", "cutoff", "=", "0.6", ")", ":", "if", "n", "is", "None", ":", "n", "=", "settings", ".", "num_close_matches", "return", "difflib_get_close_matches", "(", "word", ",", "possibilities", ",", "n", ",", "cutoff", ")" ]
Overrides `difflib.get_close_match` to controle argument `n`.
[ "Overrides", "difflib", ".", "get_close_match", "to", "controle", "argument", "n", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L100-L104
21,342
nvbn/thefuck
thefuck/utils.py
replace_argument
def replace_argument(script, from_, to): """Replaces command line argument.""" replaced_in_the_end = re.sub(u' {}$'.format(re.escape(from_)), u' {}'.format(to), script, count=1) if replaced_in_the_end != script: return replaced_in_the_end else: return script.replace( u' {} '.format(from_), u' {} '.format(to), 1)
python
def replace_argument(script, from_, to): """Replaces command line argument.""" replaced_in_the_end = re.sub(u' {}$'.format(re.escape(from_)), u' {}'.format(to), script, count=1) if replaced_in_the_end != script: return replaced_in_the_end else: return script.replace( u' {} '.format(from_), u' {} '.format(to), 1)
[ "def", "replace_argument", "(", "script", ",", "from_", ",", "to", ")", ":", "replaced_in_the_end", "=", "re", ".", "sub", "(", "u' {}$'", ".", "format", "(", "re", ".", "escape", "(", "from_", ")", ")", ",", "u' {}'", ".", "format", "(", "to", ")", ",", "script", ",", "count", "=", "1", ")", "if", "replaced_in_the_end", "!=", "script", ":", "return", "replaced_in_the_end", "else", ":", "return", "script", ".", "replace", "(", "u' {} '", ".", "format", "(", "from_", ")", ",", "u' {} '", ".", "format", "(", "to", ")", ",", "1", ")" ]
Replaces command line argument.
[ "Replaces", "command", "line", "argument", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L131-L139
21,343
nvbn/thefuck
thefuck/utils.py
is_app
def is_app(command, *app_names, **kwargs): """Returns `True` if command is call to one of passed app names.""" at_least = kwargs.pop('at_least', 0) if kwargs: raise TypeError("got an unexpected keyword argument '{}'".format(kwargs.keys())) if len(command.script_parts) > at_least: return command.script_parts[0] in app_names return False
python
def is_app(command, *app_names, **kwargs): """Returns `True` if command is call to one of passed app names.""" at_least = kwargs.pop('at_least', 0) if kwargs: raise TypeError("got an unexpected keyword argument '{}'".format(kwargs.keys())) if len(command.script_parts) > at_least: return command.script_parts[0] in app_names return False
[ "def", "is_app", "(", "command", ",", "*", "app_names", ",", "*", "*", "kwargs", ")", ":", "at_least", "=", "kwargs", ".", "pop", "(", "'at_least'", ",", "0", ")", "if", "kwargs", ":", "raise", "TypeError", "(", "\"got an unexpected keyword argument '{}'\"", ".", "format", "(", "kwargs", ".", "keys", "(", ")", ")", ")", "if", "len", "(", "command", ".", "script_parts", ")", ">", "at_least", ":", "return", "command", ".", "script_parts", "[", "0", "]", "in", "app_names", "return", "False" ]
Returns `True` if command is call to one of passed app names.
[ "Returns", "True", "if", "command", "is", "call", "to", "one", "of", "passed", "app", "names", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L170-L180
21,344
nvbn/thefuck
thefuck/utils.py
for_app
def for_app(*app_names, **kwargs): """Specifies that matching script is for on of app names.""" def _for_app(fn, command): if is_app(command, *app_names, **kwargs): return fn(command) else: return False return decorator(_for_app)
python
def for_app(*app_names, **kwargs): """Specifies that matching script is for on of app names.""" def _for_app(fn, command): if is_app(command, *app_names, **kwargs): return fn(command) else: return False return decorator(_for_app)
[ "def", "for_app", "(", "*", "app_names", ",", "*", "*", "kwargs", ")", ":", "def", "_for_app", "(", "fn", ",", "command", ")", ":", "if", "is_app", "(", "command", ",", "*", "app_names", ",", "*", "*", "kwargs", ")", ":", "return", "fn", "(", "command", ")", "else", ":", "return", "False", "return", "decorator", "(", "_for_app", ")" ]
Specifies that matching script is for on of app names.
[ "Specifies", "that", "matching", "script", "is", "for", "on", "of", "app", "names", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L183-L191
21,345
nvbn/thefuck
thefuck/utils.py
cache
def cache(*depends_on): """Caches function result in temporary file. Cache will be expired when modification date of files from `depends_on` will be changed. Only functions should be wrapped in `cache`, not methods. """ def cache_decorator(fn): @memoize @wraps(fn) def wrapper(*args, **kwargs): if cache.disabled: return fn(*args, **kwargs) else: return _cache.get_value(fn, depends_on, args, kwargs) return wrapper return cache_decorator
python
def cache(*depends_on): """Caches function result in temporary file. Cache will be expired when modification date of files from `depends_on` will be changed. Only functions should be wrapped in `cache`, not methods. """ def cache_decorator(fn): @memoize @wraps(fn) def wrapper(*args, **kwargs): if cache.disabled: return fn(*args, **kwargs) else: return _cache.get_value(fn, depends_on, args, kwargs) return wrapper return cache_decorator
[ "def", "cache", "(", "*", "depends_on", ")", ":", "def", "cache_decorator", "(", "fn", ")", ":", "@", "memoize", "@", "wraps", "(", "fn", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "cache", ".", "disabled", ":", "return", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "return", "_cache", ".", "get_value", "(", "fn", ",", "depends_on", ",", "args", ",", "kwargs", ")", "return", "wrapper", "return", "cache_decorator" ]
Caches function result in temporary file. Cache will be expired when modification date of files from `depends_on` will be changed. Only functions should be wrapped in `cache`, not methods.
[ "Caches", "function", "result", "in", "temporary", "file", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L266-L286
21,346
nvbn/thefuck
thefuck/utils.py
format_raw_script
def format_raw_script(raw_script): """Creates single script from a list of script parts. :type raw_script: [basestring] :rtype: basestring """ if six.PY2: script = ' '.join(arg.decode('utf-8') for arg in raw_script) else: script = ' '.join(raw_script) return script.strip()
python
def format_raw_script(raw_script): """Creates single script from a list of script parts. :type raw_script: [basestring] :rtype: basestring """ if six.PY2: script = ' '.join(arg.decode('utf-8') for arg in raw_script) else: script = ' '.join(raw_script) return script.strip()
[ "def", "format_raw_script", "(", "raw_script", ")", ":", "if", "six", ".", "PY2", ":", "script", "=", "' '", ".", "join", "(", "arg", ".", "decode", "(", "'utf-8'", ")", "for", "arg", "in", "raw_script", ")", "else", ":", "script", "=", "' '", ".", "join", "(", "raw_script", ")", "return", "script", ".", "strip", "(", ")" ]
Creates single script from a list of script parts. :type raw_script: [basestring] :rtype: basestring
[ "Creates", "single", "script", "from", "a", "list", "of", "script", "parts", "." ]
40ab4eb62db57627bff10cf029d29c94704086a2
https://github.com/nvbn/thefuck/blob/40ab4eb62db57627bff10cf029d29c94704086a2/thefuck/utils.py#L325-L337
21,347
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/policy.py
Policy.export_model
def export_model(self): """ Exports latest saved model to .nn format for Unity embedding. """ with self.graph.as_default(): target_nodes = ','.join(self._process_graph()) ckpt = tf.train.get_checkpoint_state(self.model_path) freeze_graph.freeze_graph( input_graph=self.model_path + '/raw_graph_def.pb', input_binary=True, input_checkpoint=ckpt.model_checkpoint_path, output_node_names=target_nodes, output_graph=(self.model_path + '/frozen_graph_def.pb'), clear_devices=True, initializer_nodes='', input_saver='', restore_op_name='save/restore_all', filename_tensor_name='save/Const:0') tf2bc.convert(self.model_path + '/frozen_graph_def.pb', self.model_path + '.nn') logger.info('Exported ' + self.model_path + '.nn file')
python
def export_model(self): """ Exports latest saved model to .nn format for Unity embedding. """ with self.graph.as_default(): target_nodes = ','.join(self._process_graph()) ckpt = tf.train.get_checkpoint_state(self.model_path) freeze_graph.freeze_graph( input_graph=self.model_path + '/raw_graph_def.pb', input_binary=True, input_checkpoint=ckpt.model_checkpoint_path, output_node_names=target_nodes, output_graph=(self.model_path + '/frozen_graph_def.pb'), clear_devices=True, initializer_nodes='', input_saver='', restore_op_name='save/restore_all', filename_tensor_name='save/Const:0') tf2bc.convert(self.model_path + '/frozen_graph_def.pb', self.model_path + '.nn') logger.info('Exported ' + self.model_path + '.nn file')
[ "def", "export_model", "(", "self", ")", ":", "with", "self", ".", "graph", ".", "as_default", "(", ")", ":", "target_nodes", "=", "','", ".", "join", "(", "self", ".", "_process_graph", "(", ")", ")", "ckpt", "=", "tf", ".", "train", ".", "get_checkpoint_state", "(", "self", ".", "model_path", ")", "freeze_graph", ".", "freeze_graph", "(", "input_graph", "=", "self", ".", "model_path", "+", "'/raw_graph_def.pb'", ",", "input_binary", "=", "True", ",", "input_checkpoint", "=", "ckpt", ".", "model_checkpoint_path", ",", "output_node_names", "=", "target_nodes", ",", "output_graph", "=", "(", "self", ".", "model_path", "+", "'/frozen_graph_def.pb'", ")", ",", "clear_devices", "=", "True", ",", "initializer_nodes", "=", "''", ",", "input_saver", "=", "''", ",", "restore_op_name", "=", "'save/restore_all'", ",", "filename_tensor_name", "=", "'save/Const:0'", ")", "tf2bc", ".", "convert", "(", "self", ".", "model_path", "+", "'/frozen_graph_def.pb'", ",", "self", ".", "model_path", "+", "'.nn'", ")", "logger", ".", "info", "(", "'Exported '", "+", "self", ".", "model_path", "+", "'.nn file'", ")" ]
Exports latest saved model to .nn format for Unity embedding.
[ "Exports", "latest", "saved", "model", "to", ".", "nn", "format", "for", "Unity", "embedding", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/policy.py#L185-L204
21,348
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/buffer.py
Buffer.reset_local_buffers
def reset_local_buffers(self): """ Resets all the local local_buffers """ agent_ids = list(self.keys()) for k in agent_ids: self[k].reset_agent()
python
def reset_local_buffers(self): """ Resets all the local local_buffers """ agent_ids = list(self.keys()) for k in agent_ids: self[k].reset_agent()
[ "def", "reset_local_buffers", "(", "self", ")", ":", "agent_ids", "=", "list", "(", "self", ".", "keys", "(", ")", ")", "for", "k", "in", "agent_ids", ":", "self", "[", "k", "]", ".", "reset_agent", "(", ")" ]
Resets all the local local_buffers
[ "Resets", "all", "the", "local", "local_buffers" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/buffer.py#L221-L227
21,349
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/meta_curriculum.py
MetaCurriculum.lesson_nums
def lesson_nums(self): """A dict from brain name to the brain's curriculum's lesson number.""" lesson_nums = {} for brain_name, curriculum in self.brains_to_curriculums.items(): lesson_nums[brain_name] = curriculum.lesson_num return lesson_nums
python
def lesson_nums(self): """A dict from brain name to the brain's curriculum's lesson number.""" lesson_nums = {} for brain_name, curriculum in self.brains_to_curriculums.items(): lesson_nums[brain_name] = curriculum.lesson_num return lesson_nums
[ "def", "lesson_nums", "(", "self", ")", ":", "lesson_nums", "=", "{", "}", "for", "brain_name", ",", "curriculum", "in", "self", ".", "brains_to_curriculums", ".", "items", "(", ")", ":", "lesson_nums", "[", "brain_name", "]", "=", "curriculum", ".", "lesson_num", "return", "lesson_nums" ]
A dict from brain name to the brain's curriculum's lesson number.
[ "A", "dict", "from", "brain", "name", "to", "the", "brain", "s", "curriculum", "s", "lesson", "number", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/meta_curriculum.py#L61-L67
21,350
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/meta_curriculum.py
MetaCurriculum.increment_lessons
def increment_lessons(self, measure_vals, reward_buff_sizes=None): """Attempts to increments all the lessons of all the curriculums in this MetaCurriculum. Note that calling this method does not guarantee the lesson of a curriculum will increment. The lesson of a curriculum will only increment if the specified measure threshold defined in the curriculum has been reached and the minimum number of episodes in the lesson have been completed. Args: measure_vals (dict): A dict of brain name to measure value. reward_buff_sizes (dict): A dict of brain names to the size of their corresponding reward buffers. Returns: A dict from brain name to whether that brain's lesson number was incremented. """ ret = {} if reward_buff_sizes: for brain_name, buff_size in reward_buff_sizes.items(): if self._lesson_ready_to_increment(brain_name, buff_size): measure_val = measure_vals[brain_name] ret[brain_name] = (self.brains_to_curriculums[brain_name] .increment_lesson(measure_val)) else: for brain_name, measure_val in measure_vals.items(): ret[brain_name] = (self.brains_to_curriculums[brain_name] .increment_lesson(measure_val)) return ret
python
def increment_lessons(self, measure_vals, reward_buff_sizes=None): """Attempts to increments all the lessons of all the curriculums in this MetaCurriculum. Note that calling this method does not guarantee the lesson of a curriculum will increment. The lesson of a curriculum will only increment if the specified measure threshold defined in the curriculum has been reached and the minimum number of episodes in the lesson have been completed. Args: measure_vals (dict): A dict of brain name to measure value. reward_buff_sizes (dict): A dict of brain names to the size of their corresponding reward buffers. Returns: A dict from brain name to whether that brain's lesson number was incremented. """ ret = {} if reward_buff_sizes: for brain_name, buff_size in reward_buff_sizes.items(): if self._lesson_ready_to_increment(brain_name, buff_size): measure_val = measure_vals[brain_name] ret[brain_name] = (self.brains_to_curriculums[brain_name] .increment_lesson(measure_val)) else: for brain_name, measure_val in measure_vals.items(): ret[brain_name] = (self.brains_to_curriculums[brain_name] .increment_lesson(measure_val)) return ret
[ "def", "increment_lessons", "(", "self", ",", "measure_vals", ",", "reward_buff_sizes", "=", "None", ")", ":", "ret", "=", "{", "}", "if", "reward_buff_sizes", ":", "for", "brain_name", ",", "buff_size", "in", "reward_buff_sizes", ".", "items", "(", ")", ":", "if", "self", ".", "_lesson_ready_to_increment", "(", "brain_name", ",", "buff_size", ")", ":", "measure_val", "=", "measure_vals", "[", "brain_name", "]", "ret", "[", "brain_name", "]", "=", "(", "self", ".", "brains_to_curriculums", "[", "brain_name", "]", ".", "increment_lesson", "(", "measure_val", ")", ")", "else", ":", "for", "brain_name", ",", "measure_val", "in", "measure_vals", ".", "items", "(", ")", ":", "ret", "[", "brain_name", "]", "=", "(", "self", ".", "brains_to_curriculums", "[", "brain_name", "]", ".", "increment_lesson", "(", "measure_val", ")", ")", "return", "ret" ]
Attempts to increments all the lessons of all the curriculums in this MetaCurriculum. Note that calling this method does not guarantee the lesson of a curriculum will increment. The lesson of a curriculum will only increment if the specified measure threshold defined in the curriculum has been reached and the minimum number of episodes in the lesson have been completed. Args: measure_vals (dict): A dict of brain name to measure value. reward_buff_sizes (dict): A dict of brain names to the size of their corresponding reward buffers. Returns: A dict from brain name to whether that brain's lesson number was incremented.
[ "Attempts", "to", "increments", "all", "the", "lessons", "of", "all", "the", "curriculums", "in", "this", "MetaCurriculum", ".", "Note", "that", "calling", "this", "method", "does", "not", "guarantee", "the", "lesson", "of", "a", "curriculum", "will", "increment", ".", "The", "lesson", "of", "a", "curriculum", "will", "only", "increment", "if", "the", "specified", "measure", "threshold", "defined", "in", "the", "curriculum", "has", "been", "reached", "and", "the", "minimum", "number", "of", "episodes", "in", "the", "lesson", "have", "been", "completed", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/meta_curriculum.py#L91-L119
21,351
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/meta_curriculum.py
MetaCurriculum.set_all_curriculums_to_lesson_num
def set_all_curriculums_to_lesson_num(self, lesson_num): """Sets all the curriculums in this meta curriculum to a specified lesson number. Args: lesson_num (int): The lesson number which all the curriculums will be set to. """ for _, curriculum in self.brains_to_curriculums.items(): curriculum.lesson_num = lesson_num
python
def set_all_curriculums_to_lesson_num(self, lesson_num): """Sets all the curriculums in this meta curriculum to a specified lesson number. Args: lesson_num (int): The lesson number which all the curriculums will be set to. """ for _, curriculum in self.brains_to_curriculums.items(): curriculum.lesson_num = lesson_num
[ "def", "set_all_curriculums_to_lesson_num", "(", "self", ",", "lesson_num", ")", ":", "for", "_", ",", "curriculum", "in", "self", ".", "brains_to_curriculums", ".", "items", "(", ")", ":", "curriculum", ".", "lesson_num", "=", "lesson_num" ]
Sets all the curriculums in this meta curriculum to a specified lesson number. Args: lesson_num (int): The lesson number which all the curriculums will be set to.
[ "Sets", "all", "the", "curriculums", "in", "this", "meta", "curriculum", "to", "a", "specified", "lesson", "number", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/meta_curriculum.py#L122-L131
21,352
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/meta_curriculum.py
MetaCurriculum.get_config
def get_config(self): """Get the combined configuration of all curriculums in this MetaCurriculum. Returns: A dict from parameter to value. """ config = {} for _, curriculum in self.brains_to_curriculums.items(): curr_config = curriculum.get_config() config.update(curr_config) return config
python
def get_config(self): """Get the combined configuration of all curriculums in this MetaCurriculum. Returns: A dict from parameter to value. """ config = {} for _, curriculum in self.brains_to_curriculums.items(): curr_config = curriculum.get_config() config.update(curr_config) return config
[ "def", "get_config", "(", "self", ")", ":", "config", "=", "{", "}", "for", "_", ",", "curriculum", "in", "self", ".", "brains_to_curriculums", ".", "items", "(", ")", ":", "curr_config", "=", "curriculum", ".", "get_config", "(", ")", "config", ".", "update", "(", "curr_config", ")", "return", "config" ]
Get the combined configuration of all curriculums in this MetaCurriculum. Returns: A dict from parameter to value.
[ "Get", "the", "combined", "configuration", "of", "all", "curriculums", "in", "this", "MetaCurriculum", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/meta_curriculum.py#L134-L147
21,353
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/trainer_metrics.py
TrainerMetrics.end_experience_collection_timer
def end_experience_collection_timer(self): """ Inform Metrics class that experience collection is done. """ if self.time_start_experience_collection: curr_delta = time() - self.time_start_experience_collection if self.delta_last_experience_collection is None: self.delta_last_experience_collection = curr_delta else: self.delta_last_experience_collection += curr_delta self.time_start_experience_collection = None
python
def end_experience_collection_timer(self): """ Inform Metrics class that experience collection is done. """ if self.time_start_experience_collection: curr_delta = time() - self.time_start_experience_collection if self.delta_last_experience_collection is None: self.delta_last_experience_collection = curr_delta else: self.delta_last_experience_collection += curr_delta self.time_start_experience_collection = None
[ "def", "end_experience_collection_timer", "(", "self", ")", ":", "if", "self", ".", "time_start_experience_collection", ":", "curr_delta", "=", "time", "(", ")", "-", "self", ".", "time_start_experience_collection", "if", "self", ".", "delta_last_experience_collection", "is", "None", ":", "self", ".", "delta_last_experience_collection", "=", "curr_delta", "else", ":", "self", ".", "delta_last_experience_collection", "+=", "curr_delta", "self", ".", "time_start_experience_collection", "=", "None" ]
Inform Metrics class that experience collection is done.
[ "Inform", "Metrics", "class", "that", "experience", "collection", "is", "done", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/trainer_metrics.py#L39-L49
21,354
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/trainer_metrics.py
TrainerMetrics.add_delta_step
def add_delta_step(self, delta: float): """ Inform Metrics class about time to step in environment. """ if self.delta_last_experience_collection: self.delta_last_experience_collection += delta else: self.delta_last_experience_collection = delta
python
def add_delta_step(self, delta: float): """ Inform Metrics class about time to step in environment. """ if self.delta_last_experience_collection: self.delta_last_experience_collection += delta else: self.delta_last_experience_collection = delta
[ "def", "add_delta_step", "(", "self", ",", "delta", ":", "float", ")", ":", "if", "self", ".", "delta_last_experience_collection", ":", "self", ".", "delta_last_experience_collection", "+=", "delta", "else", ":", "self", ".", "delta_last_experience_collection", "=", "delta" ]
Inform Metrics class about time to step in environment.
[ "Inform", "Metrics", "class", "about", "time", "to", "step", "in", "environment", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/trainer_metrics.py#L51-L58
21,355
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/trainer_metrics.py
TrainerMetrics.end_policy_update
def end_policy_update(self): """ Inform Metrics class that policy update has started. """ if self.time_policy_update_start: self.delta_policy_update = time() - self.time_policy_update_start else: self.delta_policy_update = 0 delta_train_start = time() - self.time_training_start LOGGER.debug(" Policy Update Training Metrics for {}: " "\n\t\tTime to update Policy: {:0.3f} s \n" "\t\tTime elapsed since training: {:0.3f} s \n" "\t\tTime for experience collection: {:0.3f} s \n" "\t\tBuffer Length: {} \n" "\t\tReturns : {:0.3f}\n" .format(self.brain_name, self.delta_policy_update, delta_train_start, self.delta_last_experience_collection, self.last_buffer_length, self.last_mean_return)) self._add_row(delta_train_start)
python
def end_policy_update(self): """ Inform Metrics class that policy update has started. """ if self.time_policy_update_start: self.delta_policy_update = time() - self.time_policy_update_start else: self.delta_policy_update = 0 delta_train_start = time() - self.time_training_start LOGGER.debug(" Policy Update Training Metrics for {}: " "\n\t\tTime to update Policy: {:0.3f} s \n" "\t\tTime elapsed since training: {:0.3f} s \n" "\t\tTime for experience collection: {:0.3f} s \n" "\t\tBuffer Length: {} \n" "\t\tReturns : {:0.3f}\n" .format(self.brain_name, self.delta_policy_update, delta_train_start, self.delta_last_experience_collection, self.last_buffer_length, self.last_mean_return)) self._add_row(delta_train_start)
[ "def", "end_policy_update", "(", "self", ")", ":", "if", "self", ".", "time_policy_update_start", ":", "self", ".", "delta_policy_update", "=", "time", "(", ")", "-", "self", ".", "time_policy_update_start", "else", ":", "self", ".", "delta_policy_update", "=", "0", "delta_train_start", "=", "time", "(", ")", "-", "self", ".", "time_training_start", "LOGGER", ".", "debug", "(", "\" Policy Update Training Metrics for {}: \"", "\"\\n\\t\\tTime to update Policy: {:0.3f} s \\n\"", "\"\\t\\tTime elapsed since training: {:0.3f} s \\n\"", "\"\\t\\tTime for experience collection: {:0.3f} s \\n\"", "\"\\t\\tBuffer Length: {} \\n\"", "\"\\t\\tReturns : {:0.3f}\\n\"", ".", "format", "(", "self", ".", "brain_name", ",", "self", ".", "delta_policy_update", ",", "delta_train_start", ",", "self", ".", "delta_last_experience_collection", ",", "self", ".", "last_buffer_length", ",", "self", ".", "last_mean_return", ")", ")", "self", ".", "_add_row", "(", "delta_train_start", ")" ]
Inform Metrics class that policy update has started.
[ "Inform", "Metrics", "class", "that", "policy", "update", "has", "started", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/trainer_metrics.py#L79-L97
21,356
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/trainer_metrics.py
TrainerMetrics.write_training_metrics
def write_training_metrics(self): """ Write Training Metrics to CSV """ with open(self.path, 'w') as file: writer = csv.writer(file) writer.writerow(FIELD_NAMES) for row in self.rows: writer.writerow(row)
python
def write_training_metrics(self): """ Write Training Metrics to CSV """ with open(self.path, 'w') as file: writer = csv.writer(file) writer.writerow(FIELD_NAMES) for row in self.rows: writer.writerow(row)
[ "def", "write_training_metrics", "(", "self", ")", ":", "with", "open", "(", "self", ".", "path", ",", "'w'", ")", "as", "file", ":", "writer", "=", "csv", ".", "writer", "(", "file", ")", "writer", ".", "writerow", "(", "FIELD_NAMES", ")", "for", "row", "in", "self", ".", "rows", ":", "writer", ".", "writerow", "(", "row", ")" ]
Write Training Metrics to CSV
[ "Write", "Training", "Metrics", "to", "CSV" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/trainer_metrics.py#L99-L107
21,357
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/ppo/models.py
PPOModel.create_reward_encoder
def create_reward_encoder(): """Creates TF ops to track and increment recent average cumulative reward.""" last_reward = tf.Variable(0, name="last_reward", trainable=False, dtype=tf.float32) new_reward = tf.placeholder(shape=[], dtype=tf.float32, name='new_reward') update_reward = tf.assign(last_reward, new_reward) return last_reward, new_reward, update_reward
python
def create_reward_encoder(): """Creates TF ops to track and increment recent average cumulative reward.""" last_reward = tf.Variable(0, name="last_reward", trainable=False, dtype=tf.float32) new_reward = tf.placeholder(shape=[], dtype=tf.float32, name='new_reward') update_reward = tf.assign(last_reward, new_reward) return last_reward, new_reward, update_reward
[ "def", "create_reward_encoder", "(", ")", ":", "last_reward", "=", "tf", ".", "Variable", "(", "0", ",", "name", "=", "\"last_reward\"", ",", "trainable", "=", "False", ",", "dtype", "=", "tf", ".", "float32", ")", "new_reward", "=", "tf", ".", "placeholder", "(", "shape", "=", "[", "]", ",", "dtype", "=", "tf", ".", "float32", ",", "name", "=", "'new_reward'", ")", "update_reward", "=", "tf", ".", "assign", "(", "last_reward", ",", "new_reward", ")", "return", "last_reward", ",", "new_reward", ",", "update_reward" ]
Creates TF ops to track and increment recent average cumulative reward.
[ "Creates", "TF", "ops", "to", "track", "and", "increment", "recent", "average", "cumulative", "reward", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/ppo/models.py#L49-L54
21,358
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/bc/trainer.py
BCTrainer.update_policy
def update_policy(self): """ Updates the policy. """ self.demonstration_buffer.update_buffer.shuffle() batch_losses = [] num_batches = min(len(self.demonstration_buffer.update_buffer['actions']) // self.n_sequences, self.batches_per_epoch) for i in range(num_batches): update_buffer = self.demonstration_buffer.update_buffer start = i * self.n_sequences end = (i + 1) * self.n_sequences mini_batch = update_buffer.make_mini_batch(start, end) run_out = self.policy.update(mini_batch, self.n_sequences) loss = run_out['policy_loss'] batch_losses.append(loss) if len(batch_losses) > 0: self.stats['Losses/Cloning Loss'].append(np.mean(batch_losses)) else: self.stats['Losses/Cloning Loss'].append(0)
python
def update_policy(self): """ Updates the policy. """ self.demonstration_buffer.update_buffer.shuffle() batch_losses = [] num_batches = min(len(self.demonstration_buffer.update_buffer['actions']) // self.n_sequences, self.batches_per_epoch) for i in range(num_batches): update_buffer = self.demonstration_buffer.update_buffer start = i * self.n_sequences end = (i + 1) * self.n_sequences mini_batch = update_buffer.make_mini_batch(start, end) run_out = self.policy.update(mini_batch, self.n_sequences) loss = run_out['policy_loss'] batch_losses.append(loss) if len(batch_losses) > 0: self.stats['Losses/Cloning Loss'].append(np.mean(batch_losses)) else: self.stats['Losses/Cloning Loss'].append(0)
[ "def", "update_policy", "(", "self", ")", ":", "self", ".", "demonstration_buffer", ".", "update_buffer", ".", "shuffle", "(", ")", "batch_losses", "=", "[", "]", "num_batches", "=", "min", "(", "len", "(", "self", ".", "demonstration_buffer", ".", "update_buffer", "[", "'actions'", "]", ")", "//", "self", ".", "n_sequences", ",", "self", ".", "batches_per_epoch", ")", "for", "i", "in", "range", "(", "num_batches", ")", ":", "update_buffer", "=", "self", ".", "demonstration_buffer", ".", "update_buffer", "start", "=", "i", "*", "self", ".", "n_sequences", "end", "=", "(", "i", "+", "1", ")", "*", "self", ".", "n_sequences", "mini_batch", "=", "update_buffer", ".", "make_mini_batch", "(", "start", ",", "end", ")", "run_out", "=", "self", ".", "policy", ".", "update", "(", "mini_batch", ",", "self", ".", "n_sequences", ")", "loss", "=", "run_out", "[", "'policy_loss'", "]", "batch_losses", ".", "append", "(", "loss", ")", "if", "len", "(", "batch_losses", ")", ">", "0", ":", "self", ".", "stats", "[", "'Losses/Cloning Loss'", "]", ".", "append", "(", "np", ".", "mean", "(", "batch_losses", ")", ")", "else", ":", "self", ".", "stats", "[", "'Losses/Cloning Loss'", "]", ".", "append", "(", "0", ")" ]
Updates the policy.
[ "Updates", "the", "policy", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/bc/trainer.py#L152-L171
21,359
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/models.py
LearningModel.create_global_steps
def create_global_steps(): """Creates TF ops to track and increment global training step.""" global_step = tf.Variable(0, name="global_step", trainable=False, dtype=tf.int32) increment_step = tf.assign(global_step, tf.add(global_step, 1)) return global_step, increment_step
python
def create_global_steps(): """Creates TF ops to track and increment global training step.""" global_step = tf.Variable(0, name="global_step", trainable=False, dtype=tf.int32) increment_step = tf.assign(global_step, tf.add(global_step, 1)) return global_step, increment_step
[ "def", "create_global_steps", "(", ")", ":", "global_step", "=", "tf", ".", "Variable", "(", "0", ",", "name", "=", "\"global_step\"", ",", "trainable", "=", "False", ",", "dtype", "=", "tf", ".", "int32", ")", "increment_step", "=", "tf", ".", "assign", "(", "global_step", ",", "tf", ".", "add", "(", "global_step", ",", "1", ")", ")", "return", "global_step", ",", "increment_step" ]
Creates TF ops to track and increment global training step.
[ "Creates", "TF", "ops", "to", "track", "and", "increment", "global", "training", "step", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/models.py#L43-L47
21,360
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/tensorflow_to_barracuda.py
flatten
def flatten(items,enter=lambda x:isinstance(x, list)): # http://stackoverflow.com/a/40857703 # https://github.com/ctmakro/canton/blob/master/canton/misc.py """Yield items from any nested iterable; see REF.""" for x in items: if enter(x): yield from flatten(x) else: yield x
python
def flatten(items,enter=lambda x:isinstance(x, list)): # http://stackoverflow.com/a/40857703 # https://github.com/ctmakro/canton/blob/master/canton/misc.py """Yield items from any nested iterable; see REF.""" for x in items: if enter(x): yield from flatten(x) else: yield x
[ "def", "flatten", "(", "items", ",", "enter", "=", "lambda", "x", ":", "isinstance", "(", "x", ",", "list", ")", ")", ":", "# http://stackoverflow.com/a/40857703", "# https://github.com/ctmakro/canton/blob/master/canton/misc.py", "for", "x", "in", "items", ":", "if", "enter", "(", "x", ")", ":", "yield", "from", "flatten", "(", "x", ")", "else", ":", "yield", "x" ]
Yield items from any nested iterable; see REF.
[ "Yield", "items", "from", "any", "nested", "iterable", ";", "see", "REF", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/tensorflow_to_barracuda.py#L496-L504
21,361
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/tensorflow_to_barracuda.py
replace_strings_in_list
def replace_strings_in_list(array_of_strigs, replace_with_strings): "A value in replace_with_strings can be either single string or list of strings" potentially_nested_list = [replace_with_strings.get(s) or s for s in array_of_strigs] return list(flatten(potentially_nested_list))
python
def replace_strings_in_list(array_of_strigs, replace_with_strings): "A value in replace_with_strings can be either single string or list of strings" potentially_nested_list = [replace_with_strings.get(s) or s for s in array_of_strigs] return list(flatten(potentially_nested_list))
[ "def", "replace_strings_in_list", "(", "array_of_strigs", ",", "replace_with_strings", ")", ":", "potentially_nested_list", "=", "[", "replace_with_strings", ".", "get", "(", "s", ")", "or", "s", "for", "s", "in", "array_of_strigs", "]", "return", "list", "(", "flatten", "(", "potentially_nested_list", ")", ")" ]
A value in replace_with_strings can be either single string or list of strings
[ "A", "value", "in", "replace_with_strings", "can", "be", "either", "single", "string", "or", "list", "of", "strings" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/tensorflow_to_barracuda.py#L506-L509
21,362
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/tensorflow_to_barracuda.py
remove_duplicates_from_list
def remove_duplicates_from_list(array): "Preserves the order of elements in the list" output = [] unique = set() for a in array: if a not in unique: unique.add(a) output.append(a) return output
python
def remove_duplicates_from_list(array): "Preserves the order of elements in the list" output = [] unique = set() for a in array: if a not in unique: unique.add(a) output.append(a) return output
[ "def", "remove_duplicates_from_list", "(", "array", ")", ":", "output", "=", "[", "]", "unique", "=", "set", "(", ")", "for", "a", "in", "array", ":", "if", "a", "not", "in", "unique", ":", "unique", ".", "add", "(", "a", ")", "output", ".", "append", "(", "a", ")", "return", "output" ]
Preserves the order of elements in the list
[ "Preserves", "the", "order", "of", "elements", "in", "the", "list" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/tensorflow_to_barracuda.py#L511-L519
21,363
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/tensorflow_to_barracuda.py
pool_to_HW
def pool_to_HW(shape, data_frmt): """ Convert from NHWC|NCHW => HW """ if len(shape) != 4: return shape # Not NHWC|NCHW, return as is if data_frmt == 'NCHW': return [shape[2], shape[3]] return [shape[1], shape[2]]
python
def pool_to_HW(shape, data_frmt): """ Convert from NHWC|NCHW => HW """ if len(shape) != 4: return shape # Not NHWC|NCHW, return as is if data_frmt == 'NCHW': return [shape[2], shape[3]] return [shape[1], shape[2]]
[ "def", "pool_to_HW", "(", "shape", ",", "data_frmt", ")", ":", "if", "len", "(", "shape", ")", "!=", "4", ":", "return", "shape", "# Not NHWC|NCHW, return as is", "if", "data_frmt", "==", "'NCHW'", ":", "return", "[", "shape", "[", "2", "]", ",", "shape", "[", "3", "]", "]", "return", "[", "shape", "[", "1", "]", ",", "shape", "[", "2", "]", "]" ]
Convert from NHWC|NCHW => HW
[ "Convert", "from", "NHWC|NCHW", "=", ">", "HW" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/tensorflow_to_barracuda.py#L523-L530
21,364
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/trainer_controller.py
TrainerController._export_graph
def _export_graph(self): """ Exports latest saved models to .nn format for Unity embedding. """ for brain_name in self.trainers.keys(): self.trainers[brain_name].export_model()
python
def _export_graph(self): """ Exports latest saved models to .nn format for Unity embedding. """ for brain_name in self.trainers.keys(): self.trainers[brain_name].export_model()
[ "def", "_export_graph", "(", "self", ")", ":", "for", "brain_name", "in", "self", ".", "trainers", ".", "keys", "(", ")", ":", "self", ".", "trainers", "[", "brain_name", "]", ".", "export_model", "(", ")" ]
Exports latest saved models to .nn format for Unity embedding.
[ "Exports", "latest", "saved", "models", "to", ".", "nn", "format", "for", "Unity", "embedding", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/trainer_controller.py#L115-L120
21,365
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/trainer_controller.py
TrainerController._reset_env
def _reset_env(self, env: BaseUnityEnvironment): """Resets the environment. Returns: A Data structure corresponding to the initial reset state of the environment. """ if self.meta_curriculum is not None: return env.reset(train_mode=self.fast_simulation, config=self.meta_curriculum.get_config()) else: return env.reset(train_mode=self.fast_simulation)
python
def _reset_env(self, env: BaseUnityEnvironment): """Resets the environment. Returns: A Data structure corresponding to the initial reset state of the environment. """ if self.meta_curriculum is not None: return env.reset(train_mode=self.fast_simulation, config=self.meta_curriculum.get_config()) else: return env.reset(train_mode=self.fast_simulation)
[ "def", "_reset_env", "(", "self", ",", "env", ":", "BaseUnityEnvironment", ")", ":", "if", "self", ".", "meta_curriculum", "is", "not", "None", ":", "return", "env", ".", "reset", "(", "train_mode", "=", "self", ".", "fast_simulation", ",", "config", "=", "self", ".", "meta_curriculum", ".", "get_config", "(", ")", ")", "else", ":", "return", "env", ".", "reset", "(", "train_mode", "=", "self", ".", "fast_simulation", ")" ]
Resets the environment. Returns: A Data structure corresponding to the initial reset state of the environment.
[ "Resets", "the", "environment", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/trainer_controller.py#L183-L193
21,366
Unity-Technologies/ml-agents
ml-agents-envs/mlagents/envs/socket_communicator.py
SocketCommunicator.close
def close(self): """ Sends a shutdown signal to the unity environment, and closes the socket connection. """ if self._socket is not None and self._conn is not None: message_input = UnityMessage() message_input.header.status = 400 self._communicator_send(message_input.SerializeToString()) if self._socket is not None: self._socket.close() self._socket = None if self._socket is not None: self._conn.close() self._conn = None
python
def close(self): """ Sends a shutdown signal to the unity environment, and closes the socket connection. """ if self._socket is not None and self._conn is not None: message_input = UnityMessage() message_input.header.status = 400 self._communicator_send(message_input.SerializeToString()) if self._socket is not None: self._socket.close() self._socket = None if self._socket is not None: self._conn.close() self._conn = None
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "_socket", "is", "not", "None", "and", "self", ".", "_conn", "is", "not", "None", ":", "message_input", "=", "UnityMessage", "(", ")", "message_input", ".", "header", ".", "status", "=", "400", "self", ".", "_communicator_send", "(", "message_input", ".", "SerializeToString", "(", ")", ")", "if", "self", ".", "_socket", "is", "not", "None", ":", "self", ".", "_socket", ".", "close", "(", ")", "self", ".", "_socket", "=", "None", "if", "self", ".", "_socket", "is", "not", "None", ":", "self", ".", "_conn", ".", "close", "(", ")", "self", ".", "_conn", "=", "None" ]
Sends a shutdown signal to the unity environment, and closes the socket connection.
[ "Sends", "a", "shutdown", "signal", "to", "the", "unity", "environment", "and", "closes", "the", "socket", "connection", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents-envs/mlagents/envs/socket_communicator.py#L84-L97
21,367
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/ppo/trainer.py
PPOTrainer.increment_step_and_update_last_reward
def increment_step_and_update_last_reward(self): """ Increment the step count of the trainer and Updates the last reward """ if len(self.stats['Environment/Cumulative Reward']) > 0: mean_reward = np.mean(self.stats['Environment/Cumulative Reward']) self.policy.update_reward(mean_reward) self.policy.increment_step() self.step = self.policy.get_current_step()
python
def increment_step_and_update_last_reward(self): """ Increment the step count of the trainer and Updates the last reward """ if len(self.stats['Environment/Cumulative Reward']) > 0: mean_reward = np.mean(self.stats['Environment/Cumulative Reward']) self.policy.update_reward(mean_reward) self.policy.increment_step() self.step = self.policy.get_current_step()
[ "def", "increment_step_and_update_last_reward", "(", "self", ")", ":", "if", "len", "(", "self", ".", "stats", "[", "'Environment/Cumulative Reward'", "]", ")", ">", "0", ":", "mean_reward", "=", "np", ".", "mean", "(", "self", ".", "stats", "[", "'Environment/Cumulative Reward'", "]", ")", "self", ".", "policy", ".", "update_reward", "(", "mean_reward", ")", "self", ".", "policy", ".", "increment_step", "(", ")", "self", ".", "step", "=", "self", ".", "policy", ".", "get_current_step", "(", ")" ]
Increment the step count of the trainer and Updates the last reward
[ "Increment", "the", "step", "count", "of", "the", "trainer", "and", "Updates", "the", "last", "reward" ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/ppo/trainer.py#L99-L107
21,368
Unity-Technologies/ml-agents
ml-agents/mlagents/trainers/ppo/trainer.py
PPOTrainer.update_policy
def update_policy(self): """ Uses demonstration_buffer to update the policy. """ self.trainer_metrics.start_policy_update_timer( number_experiences=len(self.training_buffer.update_buffer['actions']), mean_return=float(np.mean(self.cumulative_returns_since_policy_update))) n_sequences = max(int(self.trainer_parameters['batch_size'] / self.policy.sequence_length), 1) value_total, policy_total, forward_total, inverse_total = [], [], [], [] advantages = self.training_buffer.update_buffer['advantages'].get_batch() self.training_buffer.update_buffer['advantages'].set( (advantages - advantages.mean()) / (advantages.std() + 1e-10)) num_epoch = self.trainer_parameters['num_epoch'] for _ in range(num_epoch): self.training_buffer.update_buffer.shuffle() buffer = self.training_buffer.update_buffer for l in range(len(self.training_buffer.update_buffer['actions']) // n_sequences): start = l * n_sequences end = (l + 1) * n_sequences run_out = self.policy.update(buffer.make_mini_batch(start, end), n_sequences) value_total.append(run_out['value_loss']) policy_total.append(np.abs(run_out['policy_loss'])) if self.use_curiosity: inverse_total.append(run_out['inverse_loss']) forward_total.append(run_out['forward_loss']) self.stats['Losses/Value Loss'].append(np.mean(value_total)) self.stats['Losses/Policy Loss'].append(np.mean(policy_total)) if self.use_curiosity: self.stats['Losses/Forward Loss'].append(np.mean(forward_total)) self.stats['Losses/Inverse Loss'].append(np.mean(inverse_total)) self.training_buffer.reset_update_buffer() self.trainer_metrics.end_policy_update()
python
def update_policy(self): """ Uses demonstration_buffer to update the policy. """ self.trainer_metrics.start_policy_update_timer( number_experiences=len(self.training_buffer.update_buffer['actions']), mean_return=float(np.mean(self.cumulative_returns_since_policy_update))) n_sequences = max(int(self.trainer_parameters['batch_size'] / self.policy.sequence_length), 1) value_total, policy_total, forward_total, inverse_total = [], [], [], [] advantages = self.training_buffer.update_buffer['advantages'].get_batch() self.training_buffer.update_buffer['advantages'].set( (advantages - advantages.mean()) / (advantages.std() + 1e-10)) num_epoch = self.trainer_parameters['num_epoch'] for _ in range(num_epoch): self.training_buffer.update_buffer.shuffle() buffer = self.training_buffer.update_buffer for l in range(len(self.training_buffer.update_buffer['actions']) // n_sequences): start = l * n_sequences end = (l + 1) * n_sequences run_out = self.policy.update(buffer.make_mini_batch(start, end), n_sequences) value_total.append(run_out['value_loss']) policy_total.append(np.abs(run_out['policy_loss'])) if self.use_curiosity: inverse_total.append(run_out['inverse_loss']) forward_total.append(run_out['forward_loss']) self.stats['Losses/Value Loss'].append(np.mean(value_total)) self.stats['Losses/Policy Loss'].append(np.mean(policy_total)) if self.use_curiosity: self.stats['Losses/Forward Loss'].append(np.mean(forward_total)) self.stats['Losses/Inverse Loss'].append(np.mean(inverse_total)) self.training_buffer.reset_update_buffer() self.trainer_metrics.end_policy_update()
[ "def", "update_policy", "(", "self", ")", ":", "self", ".", "trainer_metrics", ".", "start_policy_update_timer", "(", "number_experiences", "=", "len", "(", "self", ".", "training_buffer", ".", "update_buffer", "[", "'actions'", "]", ")", ",", "mean_return", "=", "float", "(", "np", ".", "mean", "(", "self", ".", "cumulative_returns_since_policy_update", ")", ")", ")", "n_sequences", "=", "max", "(", "int", "(", "self", ".", "trainer_parameters", "[", "'batch_size'", "]", "/", "self", ".", "policy", ".", "sequence_length", ")", ",", "1", ")", "value_total", ",", "policy_total", ",", "forward_total", ",", "inverse_total", "=", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", "advantages", "=", "self", ".", "training_buffer", ".", "update_buffer", "[", "'advantages'", "]", ".", "get_batch", "(", ")", "self", ".", "training_buffer", ".", "update_buffer", "[", "'advantages'", "]", ".", "set", "(", "(", "advantages", "-", "advantages", ".", "mean", "(", ")", ")", "/", "(", "advantages", ".", "std", "(", ")", "+", "1e-10", ")", ")", "num_epoch", "=", "self", ".", "trainer_parameters", "[", "'num_epoch'", "]", "for", "_", "in", "range", "(", "num_epoch", ")", ":", "self", ".", "training_buffer", ".", "update_buffer", ".", "shuffle", "(", ")", "buffer", "=", "self", ".", "training_buffer", ".", "update_buffer", "for", "l", "in", "range", "(", "len", "(", "self", ".", "training_buffer", ".", "update_buffer", "[", "'actions'", "]", ")", "//", "n_sequences", ")", ":", "start", "=", "l", "*", "n_sequences", "end", "=", "(", "l", "+", "1", ")", "*", "n_sequences", "run_out", "=", "self", ".", "policy", ".", "update", "(", "buffer", ".", "make_mini_batch", "(", "start", ",", "end", ")", ",", "n_sequences", ")", "value_total", ".", "append", "(", "run_out", "[", "'value_loss'", "]", ")", "policy_total", ".", "append", "(", "np", ".", "abs", "(", "run_out", "[", "'policy_loss'", "]", ")", ")", "if", "self", ".", "use_curiosity", ":", "inverse_total", ".", "append", "(", "run_out", "[", "'inverse_loss'", "]", ")", "forward_total", ".", "append", "(", "run_out", "[", "'forward_loss'", "]", ")", "self", ".", "stats", "[", "'Losses/Value Loss'", "]", ".", "append", "(", "np", ".", "mean", "(", "value_total", ")", ")", "self", ".", "stats", "[", "'Losses/Policy Loss'", "]", ".", "append", "(", "np", ".", "mean", "(", "policy_total", ")", ")", "if", "self", ".", "use_curiosity", ":", "self", ".", "stats", "[", "'Losses/Forward Loss'", "]", ".", "append", "(", "np", ".", "mean", "(", "forward_total", ")", ")", "self", ".", "stats", "[", "'Losses/Inverse Loss'", "]", ".", "append", "(", "np", ".", "mean", "(", "inverse_total", ")", ")", "self", ".", "training_buffer", ".", "reset_update_buffer", "(", ")", "self", ".", "trainer_metrics", ".", "end_policy_update", "(", ")" ]
Uses demonstration_buffer to update the policy.
[ "Uses", "demonstration_buffer", "to", "update", "the", "policy", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents/mlagents/trainers/ppo/trainer.py#L315-L346
21,369
Unity-Technologies/ml-agents
ml-agents-envs/mlagents/envs/rpc_communicator.py
RpcCommunicator.create_server
def create_server(self): """ Creates the GRPC server. """ self.check_port(self.port) try: # Establish communication grpc self.server = grpc.server(ThreadPoolExecutor(max_workers=10)) self.unity_to_external = UnityToExternalServicerImplementation() add_UnityToExternalServicer_to_server(self.unity_to_external, self.server) # Using unspecified address, which means that grpc is communicating on all IPs # This is so that the docker container can connect. self.server.add_insecure_port('[::]:' + str(self.port)) self.server.start() self.is_open = True except: raise UnityWorkerInUseException(self.worker_id)
python
def create_server(self): """ Creates the GRPC server. """ self.check_port(self.port) try: # Establish communication grpc self.server = grpc.server(ThreadPoolExecutor(max_workers=10)) self.unity_to_external = UnityToExternalServicerImplementation() add_UnityToExternalServicer_to_server(self.unity_to_external, self.server) # Using unspecified address, which means that grpc is communicating on all IPs # This is so that the docker container can connect. self.server.add_insecure_port('[::]:' + str(self.port)) self.server.start() self.is_open = True except: raise UnityWorkerInUseException(self.worker_id)
[ "def", "create_server", "(", "self", ")", ":", "self", ".", "check_port", "(", "self", ".", "port", ")", "try", ":", "# Establish communication grpc", "self", ".", "server", "=", "grpc", ".", "server", "(", "ThreadPoolExecutor", "(", "max_workers", "=", "10", ")", ")", "self", ".", "unity_to_external", "=", "UnityToExternalServicerImplementation", "(", ")", "add_UnityToExternalServicer_to_server", "(", "self", ".", "unity_to_external", ",", "self", ".", "server", ")", "# Using unspecified address, which means that grpc is communicating on all IPs", "# This is so that the docker container can connect.", "self", ".", "server", ".", "add_insecure_port", "(", "'[::]:'", "+", "str", "(", "self", ".", "port", ")", ")", "self", ".", "server", ".", "start", "(", ")", "self", ".", "is_open", "=", "True", "except", ":", "raise", "UnityWorkerInUseException", "(", "self", ".", "worker_id", ")" ]
Creates the GRPC server.
[ "Creates", "the", "GRPC", "server", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents-envs/mlagents/envs/rpc_communicator.py#L46-L63
21,370
Unity-Technologies/ml-agents
ml-agents-envs/mlagents/envs/rpc_communicator.py
RpcCommunicator.check_port
def check_port(self, port): """ Attempts to bind to the requested communicator port, checking if it is already in use. """ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.bind(("localhost", port)) except socket.error: raise UnityWorkerInUseException(self.worker_id) finally: s.close()
python
def check_port(self, port): """ Attempts to bind to the requested communicator port, checking if it is already in use. """ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.bind(("localhost", port)) except socket.error: raise UnityWorkerInUseException(self.worker_id) finally: s.close()
[ "def", "check_port", "(", "self", ",", "port", ")", ":", "s", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "try", ":", "s", ".", "bind", "(", "(", "\"localhost\"", ",", "port", ")", ")", "except", "socket", ".", "error", ":", "raise", "UnityWorkerInUseException", "(", "self", ".", "worker_id", ")", "finally", ":", "s", ".", "close", "(", ")" ]
Attempts to bind to the requested communicator port, checking if it is already in use.
[ "Attempts", "to", "bind", "to", "the", "requested", "communicator", "port", "checking", "if", "it", "is", "already", "in", "use", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents-envs/mlagents/envs/rpc_communicator.py#L65-L75
21,371
Unity-Technologies/ml-agents
ml-agents-envs/mlagents/envs/rpc_communicator.py
RpcCommunicator.close
def close(self): """ Sends a shutdown signal to the unity environment, and closes the grpc connection. """ if self.is_open: message_input = UnityMessage() message_input.header.status = 400 self.unity_to_external.parent_conn.send(message_input) self.unity_to_external.parent_conn.close() self.server.stop(False) self.is_open = False
python
def close(self): """ Sends a shutdown signal to the unity environment, and closes the grpc connection. """ if self.is_open: message_input = UnityMessage() message_input.header.status = 400 self.unity_to_external.parent_conn.send(message_input) self.unity_to_external.parent_conn.close() self.server.stop(False) self.is_open = False
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "is_open", ":", "message_input", "=", "UnityMessage", "(", ")", "message_input", ".", "header", ".", "status", "=", "400", "self", ".", "unity_to_external", ".", "parent_conn", ".", "send", "(", "message_input", ")", "self", ".", "unity_to_external", ".", "parent_conn", ".", "close", "(", ")", "self", ".", "server", ".", "stop", "(", "False", ")", "self", ".", "is_open", "=", "False" ]
Sends a shutdown signal to the unity environment, and closes the grpc connection.
[ "Sends", "a", "shutdown", "signal", "to", "the", "unity", "environment", "and", "closes", "the", "grpc", "connection", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents-envs/mlagents/envs/rpc_communicator.py#L103-L113
21,372
Unity-Technologies/ml-agents
ml-agents-envs/mlagents/envs/brain.py
BrainInfo.from_agent_proto
def from_agent_proto(agent_info_list, brain_params): """ Converts list of agent infos to BrainInfo. """ vis_obs = [] for i in range(brain_params.number_visual_observations): obs = [BrainInfo.process_pixels(x.visual_observations[i], brain_params.camera_resolutions[i]['blackAndWhite']) for x in agent_info_list] vis_obs += [obs] if len(agent_info_list) == 0: memory_size = 0 else: memory_size = max([len(x.memories) for x in agent_info_list]) if memory_size == 0: memory = np.zeros((0, 0)) else: [x.memories.extend([0] * (memory_size - len(x.memories))) for x in agent_info_list] memory = np.array([list(x.memories) for x in agent_info_list]) total_num_actions = sum(brain_params.vector_action_space_size) mask_actions = np.ones((len(agent_info_list), total_num_actions)) for agent_index, agent_info in enumerate(agent_info_list): if agent_info.action_mask is not None: if len(agent_info.action_mask) == total_num_actions: mask_actions[agent_index, :] = [ 0 if agent_info.action_mask[k] else 1 for k in range(total_num_actions)] if any([np.isnan(x.reward) for x in agent_info_list]): logger.warning("An agent had a NaN reward for brain " + brain_params.brain_name) if any([np.isnan(x.stacked_vector_observation).any() for x in agent_info_list]): logger.warning("An agent had a NaN observation for brain " + brain_params.brain_name) if len(agent_info_list) == 0: vector_obs = np.zeros( (0, brain_params.vector_observation_space_size * brain_params.num_stacked_vector_observations) ) else: vector_obs = np.nan_to_num( np.array([x.stacked_vector_observation for x in agent_info_list]) ) brain_info = BrainInfo( visual_observation=vis_obs, vector_observation=vector_obs, text_observations=[x.text_observation for x in agent_info_list], memory=memory, reward=[x.reward if not np.isnan(x.reward) else 0 for x in agent_info_list], agents=[x.id for x in agent_info_list], local_done=[x.done for x in agent_info_list], vector_action=np.array([x.stored_vector_actions for x in agent_info_list]), text_action=[list(x.stored_text_actions) for x in agent_info_list], max_reached=[x.max_step_reached for x in agent_info_list], custom_observations=[x.custom_observation for x in agent_info_list], action_mask=mask_actions ) return brain_info
python
def from_agent_proto(agent_info_list, brain_params): """ Converts list of agent infos to BrainInfo. """ vis_obs = [] for i in range(brain_params.number_visual_observations): obs = [BrainInfo.process_pixels(x.visual_observations[i], brain_params.camera_resolutions[i]['blackAndWhite']) for x in agent_info_list] vis_obs += [obs] if len(agent_info_list) == 0: memory_size = 0 else: memory_size = max([len(x.memories) for x in agent_info_list]) if memory_size == 0: memory = np.zeros((0, 0)) else: [x.memories.extend([0] * (memory_size - len(x.memories))) for x in agent_info_list] memory = np.array([list(x.memories) for x in agent_info_list]) total_num_actions = sum(brain_params.vector_action_space_size) mask_actions = np.ones((len(agent_info_list), total_num_actions)) for agent_index, agent_info in enumerate(agent_info_list): if agent_info.action_mask is not None: if len(agent_info.action_mask) == total_num_actions: mask_actions[agent_index, :] = [ 0 if agent_info.action_mask[k] else 1 for k in range(total_num_actions)] if any([np.isnan(x.reward) for x in agent_info_list]): logger.warning("An agent had a NaN reward for brain " + brain_params.brain_name) if any([np.isnan(x.stacked_vector_observation).any() for x in agent_info_list]): logger.warning("An agent had a NaN observation for brain " + brain_params.brain_name) if len(agent_info_list) == 0: vector_obs = np.zeros( (0, brain_params.vector_observation_space_size * brain_params.num_stacked_vector_observations) ) else: vector_obs = np.nan_to_num( np.array([x.stacked_vector_observation for x in agent_info_list]) ) brain_info = BrainInfo( visual_observation=vis_obs, vector_observation=vector_obs, text_observations=[x.text_observation for x in agent_info_list], memory=memory, reward=[x.reward if not np.isnan(x.reward) else 0 for x in agent_info_list], agents=[x.id for x in agent_info_list], local_done=[x.done for x in agent_info_list], vector_action=np.array([x.stored_vector_actions for x in agent_info_list]), text_action=[list(x.stored_text_actions) for x in agent_info_list], max_reached=[x.max_step_reached for x in agent_info_list], custom_observations=[x.custom_observation for x in agent_info_list], action_mask=mask_actions ) return brain_info
[ "def", "from_agent_proto", "(", "agent_info_list", ",", "brain_params", ")", ":", "vis_obs", "=", "[", "]", "for", "i", "in", "range", "(", "brain_params", ".", "number_visual_observations", ")", ":", "obs", "=", "[", "BrainInfo", ".", "process_pixels", "(", "x", ".", "visual_observations", "[", "i", "]", ",", "brain_params", ".", "camera_resolutions", "[", "i", "]", "[", "'blackAndWhite'", "]", ")", "for", "x", "in", "agent_info_list", "]", "vis_obs", "+=", "[", "obs", "]", "if", "len", "(", "agent_info_list", ")", "==", "0", ":", "memory_size", "=", "0", "else", ":", "memory_size", "=", "max", "(", "[", "len", "(", "x", ".", "memories", ")", "for", "x", "in", "agent_info_list", "]", ")", "if", "memory_size", "==", "0", ":", "memory", "=", "np", ".", "zeros", "(", "(", "0", ",", "0", ")", ")", "else", ":", "[", "x", ".", "memories", ".", "extend", "(", "[", "0", "]", "*", "(", "memory_size", "-", "len", "(", "x", ".", "memories", ")", ")", ")", "for", "x", "in", "agent_info_list", "]", "memory", "=", "np", ".", "array", "(", "[", "list", "(", "x", ".", "memories", ")", "for", "x", "in", "agent_info_list", "]", ")", "total_num_actions", "=", "sum", "(", "brain_params", ".", "vector_action_space_size", ")", "mask_actions", "=", "np", ".", "ones", "(", "(", "len", "(", "agent_info_list", ")", ",", "total_num_actions", ")", ")", "for", "agent_index", ",", "agent_info", "in", "enumerate", "(", "agent_info_list", ")", ":", "if", "agent_info", ".", "action_mask", "is", "not", "None", ":", "if", "len", "(", "agent_info", ".", "action_mask", ")", "==", "total_num_actions", ":", "mask_actions", "[", "agent_index", ",", ":", "]", "=", "[", "0", "if", "agent_info", ".", "action_mask", "[", "k", "]", "else", "1", "for", "k", "in", "range", "(", "total_num_actions", ")", "]", "if", "any", "(", "[", "np", ".", "isnan", "(", "x", ".", "reward", ")", "for", "x", "in", "agent_info_list", "]", ")", ":", "logger", ".", "warning", "(", "\"An agent had a NaN reward for brain \"", "+", "brain_params", ".", "brain_name", ")", "if", "any", "(", "[", "np", ".", "isnan", "(", "x", ".", "stacked_vector_observation", ")", ".", "any", "(", ")", "for", "x", "in", "agent_info_list", "]", ")", ":", "logger", ".", "warning", "(", "\"An agent had a NaN observation for brain \"", "+", "brain_params", ".", "brain_name", ")", "if", "len", "(", "agent_info_list", ")", "==", "0", ":", "vector_obs", "=", "np", ".", "zeros", "(", "(", "0", ",", "brain_params", ".", "vector_observation_space_size", "*", "brain_params", ".", "num_stacked_vector_observations", ")", ")", "else", ":", "vector_obs", "=", "np", ".", "nan_to_num", "(", "np", ".", "array", "(", "[", "x", ".", "stacked_vector_observation", "for", "x", "in", "agent_info_list", "]", ")", ")", "brain_info", "=", "BrainInfo", "(", "visual_observation", "=", "vis_obs", ",", "vector_observation", "=", "vector_obs", ",", "text_observations", "=", "[", "x", ".", "text_observation", "for", "x", "in", "agent_info_list", "]", ",", "memory", "=", "memory", ",", "reward", "=", "[", "x", ".", "reward", "if", "not", "np", ".", "isnan", "(", "x", ".", "reward", ")", "else", "0", "for", "x", "in", "agent_info_list", "]", ",", "agents", "=", "[", "x", ".", "id", "for", "x", "in", "agent_info_list", "]", ",", "local_done", "=", "[", "x", ".", "done", "for", "x", "in", "agent_info_list", "]", ",", "vector_action", "=", "np", ".", "array", "(", "[", "x", ".", "stored_vector_actions", "for", "x", "in", "agent_info_list", "]", ")", ",", "text_action", "=", "[", "list", "(", "x", ".", "stored_text_actions", ")", "for", "x", "in", "agent_info_list", "]", ",", "max_reached", "=", "[", "x", ".", "max_step_reached", "for", "x", "in", "agent_info_list", "]", ",", "custom_observations", "=", "[", "x", ".", "custom_observation", "for", "x", "in", "agent_info_list", "]", ",", "action_mask", "=", "mask_actions", ")", "return", "brain_info" ]
Converts list of agent infos to BrainInfo.
[ "Converts", "list", "of", "agent", "infos", "to", "BrainInfo", "." ]
37d139af636e4a2351751fbf0f2fca5a9ed7457f
https://github.com/Unity-Technologies/ml-agents/blob/37d139af636e4a2351751fbf0f2fca5a9ed7457f/ml-agents-envs/mlagents/envs/brain.py#L85-L138
21,373
apache/incubator-superset
superset/views/dashboard.py
Dashboard.new
def new(self): """Creates a new, blank dashboard and redirects to it in edit mode""" new_dashboard = models.Dashboard( dashboard_title='[ untitled dashboard ]', owners=[g.user], ) db.session.add(new_dashboard) db.session.commit() return redirect(f'/superset/dashboard/{new_dashboard.id}/?edit=true')
python
def new(self): """Creates a new, blank dashboard and redirects to it in edit mode""" new_dashboard = models.Dashboard( dashboard_title='[ untitled dashboard ]', owners=[g.user], ) db.session.add(new_dashboard) db.session.commit() return redirect(f'/superset/dashboard/{new_dashboard.id}/?edit=true')
[ "def", "new", "(", "self", ")", ":", "new_dashboard", "=", "models", ".", "Dashboard", "(", "dashboard_title", "=", "'[ untitled dashboard ]'", ",", "owners", "=", "[", "g", ".", "user", "]", ",", ")", "db", ".", "session", ".", "add", "(", "new_dashboard", ")", "db", ".", "session", ".", "commit", "(", ")", "return", "redirect", "(", "f'/superset/dashboard/{new_dashboard.id}/?edit=true'", ")" ]
Creates a new, blank dashboard and redirects to it in edit mode
[ "Creates", "a", "new", "blank", "dashboard", "and", "redirects", "to", "it", "in", "edit", "mode" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/views/dashboard.py#L32-L40
21,374
apache/incubator-superset
superset/views/tags.py
TagView.get
def get(self, object_type, object_id): """List all tags a given object has.""" if object_id == 0: return json_success(json.dumps([])) query = db.session.query(TaggedObject).filter(and_( TaggedObject.object_type == object_type, TaggedObject.object_id == object_id)) tags = [{'id': obj.tag.id, 'name': obj.tag.name} for obj in query] return json_success(json.dumps(tags))
python
def get(self, object_type, object_id): """List all tags a given object has.""" if object_id == 0: return json_success(json.dumps([])) query = db.session.query(TaggedObject).filter(and_( TaggedObject.object_type == object_type, TaggedObject.object_id == object_id)) tags = [{'id': obj.tag.id, 'name': obj.tag.name} for obj in query] return json_success(json.dumps(tags))
[ "def", "get", "(", "self", ",", "object_type", ",", "object_id", ")", ":", "if", "object_id", "==", "0", ":", "return", "json_success", "(", "json", ".", "dumps", "(", "[", "]", ")", ")", "query", "=", "db", ".", "session", ".", "query", "(", "TaggedObject", ")", ".", "filter", "(", "and_", "(", "TaggedObject", ".", "object_type", "==", "object_type", ",", "TaggedObject", ".", "object_id", "==", "object_id", ")", ")", "tags", "=", "[", "{", "'id'", ":", "obj", ".", "tag", ".", "id", ",", "'name'", ":", "obj", ".", "tag", ".", "name", "}", "for", "obj", "in", "query", "]", "return", "json_success", "(", "json", ".", "dumps", "(", "tags", ")", ")" ]
List all tags a given object has.
[ "List", "all", "tags", "a", "given", "object", "has", "." ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/views/tags.py#L78-L87
21,375
apache/incubator-superset
superset/views/tags.py
TagView.post
def post(self, object_type, object_id): """Add new tags to an object.""" if object_id == 0: return Response(status=404) tagged_objects = [] for name in request.get_json(force=True): if ':' in name: type_name = name.split(':', 1)[0] type_ = TagTypes[type_name] else: type_ = TagTypes.custom tag = db.session.query(Tag).filter_by(name=name, type=type_).first() if not tag: tag = Tag(name=name, type=type_) tagged_objects.append( TaggedObject( object_id=object_id, object_type=object_type, tag=tag, ), ) db.session.add_all(tagged_objects) db.session.commit() return Response(status=201)
python
def post(self, object_type, object_id): """Add new tags to an object.""" if object_id == 0: return Response(status=404) tagged_objects = [] for name in request.get_json(force=True): if ':' in name: type_name = name.split(':', 1)[0] type_ = TagTypes[type_name] else: type_ = TagTypes.custom tag = db.session.query(Tag).filter_by(name=name, type=type_).first() if not tag: tag = Tag(name=name, type=type_) tagged_objects.append( TaggedObject( object_id=object_id, object_type=object_type, tag=tag, ), ) db.session.add_all(tagged_objects) db.session.commit() return Response(status=201)
[ "def", "post", "(", "self", ",", "object_type", ",", "object_id", ")", ":", "if", "object_id", "==", "0", ":", "return", "Response", "(", "status", "=", "404", ")", "tagged_objects", "=", "[", "]", "for", "name", "in", "request", ".", "get_json", "(", "force", "=", "True", ")", ":", "if", "':'", "in", "name", ":", "type_name", "=", "name", ".", "split", "(", "':'", ",", "1", ")", "[", "0", "]", "type_", "=", "TagTypes", "[", "type_name", "]", "else", ":", "type_", "=", "TagTypes", ".", "custom", "tag", "=", "db", ".", "session", ".", "query", "(", "Tag", ")", ".", "filter_by", "(", "name", "=", "name", ",", "type", "=", "type_", ")", ".", "first", "(", ")", "if", "not", "tag", ":", "tag", "=", "Tag", "(", "name", "=", "name", ",", "type", "=", "type_", ")", "tagged_objects", ".", "append", "(", "TaggedObject", "(", "object_id", "=", "object_id", ",", "object_type", "=", "object_type", ",", "tag", "=", "tag", ",", ")", ",", ")", "db", ".", "session", ".", "add_all", "(", "tagged_objects", ")", "db", ".", "session", ".", "commit", "(", ")", "return", "Response", "(", "status", "=", "201", ")" ]
Add new tags to an object.
[ "Add", "new", "tags", "to", "an", "object", "." ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/views/tags.py#L91-L119
21,376
apache/incubator-superset
superset/views/tags.py
TagView.delete
def delete(self, object_type, object_id): """Remove tags from an object.""" tag_names = request.get_json(force=True) if not tag_names: return Response(status=403) db.session.query(TaggedObject).filter(and_( TaggedObject.object_type == object_type, TaggedObject.object_id == object_id), TaggedObject.tag.has(Tag.name.in_(tag_names)), ).delete(synchronize_session=False) db.session.commit() return Response(status=204)
python
def delete(self, object_type, object_id): """Remove tags from an object.""" tag_names = request.get_json(force=True) if not tag_names: return Response(status=403) db.session.query(TaggedObject).filter(and_( TaggedObject.object_type == object_type, TaggedObject.object_id == object_id), TaggedObject.tag.has(Tag.name.in_(tag_names)), ).delete(synchronize_session=False) db.session.commit() return Response(status=204)
[ "def", "delete", "(", "self", ",", "object_type", ",", "object_id", ")", ":", "tag_names", "=", "request", ".", "get_json", "(", "force", "=", "True", ")", "if", "not", "tag_names", ":", "return", "Response", "(", "status", "=", "403", ")", "db", ".", "session", ".", "query", "(", "TaggedObject", ")", ".", "filter", "(", "and_", "(", "TaggedObject", ".", "object_type", "==", "object_type", ",", "TaggedObject", ".", "object_id", "==", "object_id", ")", ",", "TaggedObject", ".", "tag", ".", "has", "(", "Tag", ".", "name", ".", "in_", "(", "tag_names", ")", ")", ",", ")", ".", "delete", "(", "synchronize_session", "=", "False", ")", "db", ".", "session", ".", "commit", "(", ")", "return", "Response", "(", "status", "=", "204", ")" ]
Remove tags from an object.
[ "Remove", "tags", "from", "an", "object", "." ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/views/tags.py#L123-L136
21,377
apache/incubator-superset
superset/viz.py
BaseViz.query_obj
def query_obj(self): """Building a query object""" form_data = self.form_data self.process_query_filters() gb = form_data.get('groupby') or [] metrics = self.all_metrics or [] columns = form_data.get('columns') or [] groupby = [] for o in gb + columns: if o not in groupby: groupby.append(o) is_timeseries = self.is_timeseries if DTTM_ALIAS in groupby: groupby.remove(DTTM_ALIAS) is_timeseries = True granularity = ( form_data.get('granularity') or form_data.get('granularity_sqla') ) limit = int(form_data.get('limit') or 0) timeseries_limit_metric = form_data.get('timeseries_limit_metric') row_limit = int(form_data.get('row_limit') or config.get('ROW_LIMIT')) # default order direction order_desc = form_data.get('order_desc', True) since, until = utils.get_since_until(relative_end=relative_end, time_range=form_data.get('time_range'), since=form_data.get('since'), until=form_data.get('until')) time_shift = form_data.get('time_shift', '') self.time_shift = utils.parse_human_timedelta(time_shift) from_dttm = None if since is None else (since - self.time_shift) to_dttm = None if until is None else (until - self.time_shift) if from_dttm and to_dttm and from_dttm > to_dttm: raise Exception(_('From date cannot be larger than to date')) self.from_dttm = from_dttm self.to_dttm = to_dttm # extras are used to query elements specific to a datasource type # for instance the extra where clause that applies only to Tables extras = { 'where': form_data.get('where', ''), 'having': form_data.get('having', ''), 'having_druid': form_data.get('having_filters', []), 'time_grain_sqla': form_data.get('time_grain_sqla', ''), 'druid_time_origin': form_data.get('druid_time_origin', ''), } d = { 'granularity': granularity, 'from_dttm': from_dttm, 'to_dttm': to_dttm, 'is_timeseries': is_timeseries, 'groupby': groupby, 'metrics': metrics, 'row_limit': row_limit, 'filter': self.form_data.get('filters', []), 'timeseries_limit': limit, 'extras': extras, 'timeseries_limit_metric': timeseries_limit_metric, 'order_desc': order_desc, 'prequeries': [], 'is_prequery': False, } return d
python
def query_obj(self): """Building a query object""" form_data = self.form_data self.process_query_filters() gb = form_data.get('groupby') or [] metrics = self.all_metrics or [] columns = form_data.get('columns') or [] groupby = [] for o in gb + columns: if o not in groupby: groupby.append(o) is_timeseries = self.is_timeseries if DTTM_ALIAS in groupby: groupby.remove(DTTM_ALIAS) is_timeseries = True granularity = ( form_data.get('granularity') or form_data.get('granularity_sqla') ) limit = int(form_data.get('limit') or 0) timeseries_limit_metric = form_data.get('timeseries_limit_metric') row_limit = int(form_data.get('row_limit') or config.get('ROW_LIMIT')) # default order direction order_desc = form_data.get('order_desc', True) since, until = utils.get_since_until(relative_end=relative_end, time_range=form_data.get('time_range'), since=form_data.get('since'), until=form_data.get('until')) time_shift = form_data.get('time_shift', '') self.time_shift = utils.parse_human_timedelta(time_shift) from_dttm = None if since is None else (since - self.time_shift) to_dttm = None if until is None else (until - self.time_shift) if from_dttm and to_dttm and from_dttm > to_dttm: raise Exception(_('From date cannot be larger than to date')) self.from_dttm = from_dttm self.to_dttm = to_dttm # extras are used to query elements specific to a datasource type # for instance the extra where clause that applies only to Tables extras = { 'where': form_data.get('where', ''), 'having': form_data.get('having', ''), 'having_druid': form_data.get('having_filters', []), 'time_grain_sqla': form_data.get('time_grain_sqla', ''), 'druid_time_origin': form_data.get('druid_time_origin', ''), } d = { 'granularity': granularity, 'from_dttm': from_dttm, 'to_dttm': to_dttm, 'is_timeseries': is_timeseries, 'groupby': groupby, 'metrics': metrics, 'row_limit': row_limit, 'filter': self.form_data.get('filters', []), 'timeseries_limit': limit, 'extras': extras, 'timeseries_limit_metric': timeseries_limit_metric, 'order_desc': order_desc, 'prequeries': [], 'is_prequery': False, } return d
[ "def", "query_obj", "(", "self", ")", ":", "form_data", "=", "self", ".", "form_data", "self", ".", "process_query_filters", "(", ")", "gb", "=", "form_data", ".", "get", "(", "'groupby'", ")", "or", "[", "]", "metrics", "=", "self", ".", "all_metrics", "or", "[", "]", "columns", "=", "form_data", ".", "get", "(", "'columns'", ")", "or", "[", "]", "groupby", "=", "[", "]", "for", "o", "in", "gb", "+", "columns", ":", "if", "o", "not", "in", "groupby", ":", "groupby", ".", "append", "(", "o", ")", "is_timeseries", "=", "self", ".", "is_timeseries", "if", "DTTM_ALIAS", "in", "groupby", ":", "groupby", ".", "remove", "(", "DTTM_ALIAS", ")", "is_timeseries", "=", "True", "granularity", "=", "(", "form_data", ".", "get", "(", "'granularity'", ")", "or", "form_data", ".", "get", "(", "'granularity_sqla'", ")", ")", "limit", "=", "int", "(", "form_data", ".", "get", "(", "'limit'", ")", "or", "0", ")", "timeseries_limit_metric", "=", "form_data", ".", "get", "(", "'timeseries_limit_metric'", ")", "row_limit", "=", "int", "(", "form_data", ".", "get", "(", "'row_limit'", ")", "or", "config", ".", "get", "(", "'ROW_LIMIT'", ")", ")", "# default order direction", "order_desc", "=", "form_data", ".", "get", "(", "'order_desc'", ",", "True", ")", "since", ",", "until", "=", "utils", ".", "get_since_until", "(", "relative_end", "=", "relative_end", ",", "time_range", "=", "form_data", ".", "get", "(", "'time_range'", ")", ",", "since", "=", "form_data", ".", "get", "(", "'since'", ")", ",", "until", "=", "form_data", ".", "get", "(", "'until'", ")", ")", "time_shift", "=", "form_data", ".", "get", "(", "'time_shift'", ",", "''", ")", "self", ".", "time_shift", "=", "utils", ".", "parse_human_timedelta", "(", "time_shift", ")", "from_dttm", "=", "None", "if", "since", "is", "None", "else", "(", "since", "-", "self", ".", "time_shift", ")", "to_dttm", "=", "None", "if", "until", "is", "None", "else", "(", "until", "-", "self", ".", "time_shift", ")", "if", "from_dttm", "and", "to_dttm", "and", "from_dttm", ">", "to_dttm", ":", "raise", "Exception", "(", "_", "(", "'From date cannot be larger than to date'", ")", ")", "self", ".", "from_dttm", "=", "from_dttm", "self", ".", "to_dttm", "=", "to_dttm", "# extras are used to query elements specific to a datasource type", "# for instance the extra where clause that applies only to Tables", "extras", "=", "{", "'where'", ":", "form_data", ".", "get", "(", "'where'", ",", "''", ")", ",", "'having'", ":", "form_data", ".", "get", "(", "'having'", ",", "''", ")", ",", "'having_druid'", ":", "form_data", ".", "get", "(", "'having_filters'", ",", "[", "]", ")", ",", "'time_grain_sqla'", ":", "form_data", ".", "get", "(", "'time_grain_sqla'", ",", "''", ")", ",", "'druid_time_origin'", ":", "form_data", ".", "get", "(", "'druid_time_origin'", ",", "''", ")", ",", "}", "d", "=", "{", "'granularity'", ":", "granularity", ",", "'from_dttm'", ":", "from_dttm", ",", "'to_dttm'", ":", "to_dttm", ",", "'is_timeseries'", ":", "is_timeseries", ",", "'groupby'", ":", "groupby", ",", "'metrics'", ":", "metrics", ",", "'row_limit'", ":", "row_limit", ",", "'filter'", ":", "self", ".", "form_data", ".", "get", "(", "'filters'", ",", "[", "]", ")", ",", "'timeseries_limit'", ":", "limit", ",", "'extras'", ":", "extras", ",", "'timeseries_limit_metric'", ":", "timeseries_limit_metric", ",", "'order_desc'", ":", "order_desc", ",", "'prequeries'", ":", "[", "]", ",", "'is_prequery'", ":", "False", ",", "}", "return", "d" ]
Building a query object
[ "Building", "a", "query", "object" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/viz.py#L249-L317
21,378
apache/incubator-superset
superset/viz.py
BaseViz.data
def data(self): """This is the data object serialized to the js layer""" content = { 'form_data': self.form_data, 'token': self.token, 'viz_name': self.viz_type, 'filter_select_enabled': self.datasource.filter_select_enabled, } return content
python
def data(self): """This is the data object serialized to the js layer""" content = { 'form_data': self.form_data, 'token': self.token, 'viz_name': self.viz_type, 'filter_select_enabled': self.datasource.filter_select_enabled, } return content
[ "def", "data", "(", "self", ")", ":", "content", "=", "{", "'form_data'", ":", "self", ".", "form_data", ",", "'token'", ":", "self", ".", "token", ",", "'viz_name'", ":", "self", ".", "viz_type", ",", "'filter_select_enabled'", ":", "self", ".", "datasource", ".", "filter_select_enabled", ",", "}", "return", "content" ]
This is the data object serialized to the js layer
[ "This", "is", "the", "data", "object", "serialized", "to", "the", "js", "layer" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/viz.py#L472-L480
21,379
apache/incubator-superset
superset/viz.py
HistogramViz.query_obj
def query_obj(self): """Returns the query object for this visualization""" d = super().query_obj() d['row_limit'] = self.form_data.get( 'row_limit', int(config.get('VIZ_ROW_LIMIT'))) numeric_columns = self.form_data.get('all_columns_x') if numeric_columns is None: raise Exception(_('Must have at least one numeric column specified')) self.columns = numeric_columns d['columns'] = numeric_columns + self.groupby # override groupby entry to avoid aggregation d['groupby'] = [] return d
python
def query_obj(self): """Returns the query object for this visualization""" d = super().query_obj() d['row_limit'] = self.form_data.get( 'row_limit', int(config.get('VIZ_ROW_LIMIT'))) numeric_columns = self.form_data.get('all_columns_x') if numeric_columns is None: raise Exception(_('Must have at least one numeric column specified')) self.columns = numeric_columns d['columns'] = numeric_columns + self.groupby # override groupby entry to avoid aggregation d['groupby'] = [] return d
[ "def", "query_obj", "(", "self", ")", ":", "d", "=", "super", "(", ")", ".", "query_obj", "(", ")", "d", "[", "'row_limit'", "]", "=", "self", ".", "form_data", ".", "get", "(", "'row_limit'", ",", "int", "(", "config", ".", "get", "(", "'VIZ_ROW_LIMIT'", ")", ")", ")", "numeric_columns", "=", "self", ".", "form_data", ".", "get", "(", "'all_columns_x'", ")", "if", "numeric_columns", "is", "None", ":", "raise", "Exception", "(", "_", "(", "'Must have at least one numeric column specified'", ")", ")", "self", ".", "columns", "=", "numeric_columns", "d", "[", "'columns'", "]", "=", "numeric_columns", "+", "self", ".", "groupby", "# override groupby entry to avoid aggregation", "d", "[", "'groupby'", "]", "=", "[", "]", "return", "d" ]
Returns the query object for this visualization
[ "Returns", "the", "query", "object", "for", "this", "visualization" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/viz.py#L1474-L1486
21,380
apache/incubator-superset
superset/viz.py
HistogramViz.get_data
def get_data(self, df): """Returns the chart data""" chart_data = [] if len(self.groupby) > 0: groups = df.groupby(self.groupby) else: groups = [((), df)] for keys, data in groups: chart_data.extend([{ 'key': self.labelify(keys, column), 'values': data[column].tolist()} for column in self.columns]) return chart_data
python
def get_data(self, df): """Returns the chart data""" chart_data = [] if len(self.groupby) > 0: groups = df.groupby(self.groupby) else: groups = [((), df)] for keys, data in groups: chart_data.extend([{ 'key': self.labelify(keys, column), 'values': data[column].tolist()} for column in self.columns]) return chart_data
[ "def", "get_data", "(", "self", ",", "df", ")", ":", "chart_data", "=", "[", "]", "if", "len", "(", "self", ".", "groupby", ")", ">", "0", ":", "groups", "=", "df", ".", "groupby", "(", "self", ".", "groupby", ")", "else", ":", "groups", "=", "[", "(", "(", ")", ",", "df", ")", "]", "for", "keys", ",", "data", "in", "groups", ":", "chart_data", ".", "extend", "(", "[", "{", "'key'", ":", "self", ".", "labelify", "(", "keys", ",", "column", ")", ",", "'values'", ":", "data", "[", "column", "]", ".", "tolist", "(", ")", "}", "for", "column", "in", "self", ".", "columns", "]", ")", "return", "chart_data" ]
Returns the chart data
[ "Returns", "the", "chart", "data" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/viz.py#L1498-L1510
21,381
apache/incubator-superset
superset/viz.py
PartitionViz.levels_for
def levels_for(self, time_op, groups, df): """ Compute the partition at each `level` from the dataframe. """ levels = {} for i in range(0, len(groups) + 1): agg_df = df.groupby(groups[:i]) if i else df levels[i] = ( agg_df.mean() if time_op == 'agg_mean' else agg_df.sum(numeric_only=True)) return levels
python
def levels_for(self, time_op, groups, df): """ Compute the partition at each `level` from the dataframe. """ levels = {} for i in range(0, len(groups) + 1): agg_df = df.groupby(groups[:i]) if i else df levels[i] = ( agg_df.mean() if time_op == 'agg_mean' else agg_df.sum(numeric_only=True)) return levels
[ "def", "levels_for", "(", "self", ",", "time_op", ",", "groups", ",", "df", ")", ":", "levels", "=", "{", "}", "for", "i", "in", "range", "(", "0", ",", "len", "(", "groups", ")", "+", "1", ")", ":", "agg_df", "=", "df", ".", "groupby", "(", "groups", "[", ":", "i", "]", ")", "if", "i", "else", "df", "levels", "[", "i", "]", "=", "(", "agg_df", ".", "mean", "(", ")", "if", "time_op", "==", "'agg_mean'", "else", "agg_df", ".", "sum", "(", "numeric_only", "=", "True", ")", ")", "return", "levels" ]
Compute the partition at each `level` from the dataframe.
[ "Compute", "the", "partition", "at", "each", "level", "from", "the", "dataframe", "." ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/viz.py#L2648-L2658
21,382
apache/incubator-superset
superset/viz.py
PartitionViz.nest_values
def nest_values(self, levels, level=0, metric=None, dims=()): """ Nest values at each level on the back-end with access and setting, instead of summing from the bottom. """ if not level: return [{ 'name': m, 'val': levels[0][m], 'children': self.nest_values(levels, 1, m), } for m in levels[0].index] if level == 1: return [{ 'name': i, 'val': levels[1][metric][i], 'children': self.nest_values(levels, 2, metric, (i,)), } for i in levels[1][metric].index] if level >= len(levels): return [] return [{ 'name': i, 'val': levels[level][metric][dims][i], 'children': self.nest_values( levels, level + 1, metric, dims + (i,), ), } for i in levels[level][metric][dims].index]
python
def nest_values(self, levels, level=0, metric=None, dims=()): """ Nest values at each level on the back-end with access and setting, instead of summing from the bottom. """ if not level: return [{ 'name': m, 'val': levels[0][m], 'children': self.nest_values(levels, 1, m), } for m in levels[0].index] if level == 1: return [{ 'name': i, 'val': levels[1][metric][i], 'children': self.nest_values(levels, 2, metric, (i,)), } for i in levels[1][metric].index] if level >= len(levels): return [] return [{ 'name': i, 'val': levels[level][metric][dims][i], 'children': self.nest_values( levels, level + 1, metric, dims + (i,), ), } for i in levels[level][metric][dims].index]
[ "def", "nest_values", "(", "self", ",", "levels", ",", "level", "=", "0", ",", "metric", "=", "None", ",", "dims", "=", "(", ")", ")", ":", "if", "not", "level", ":", "return", "[", "{", "'name'", ":", "m", ",", "'val'", ":", "levels", "[", "0", "]", "[", "m", "]", ",", "'children'", ":", "self", ".", "nest_values", "(", "levels", ",", "1", ",", "m", ")", ",", "}", "for", "m", "in", "levels", "[", "0", "]", ".", "index", "]", "if", "level", "==", "1", ":", "return", "[", "{", "'name'", ":", "i", ",", "'val'", ":", "levels", "[", "1", "]", "[", "metric", "]", "[", "i", "]", ",", "'children'", ":", "self", ".", "nest_values", "(", "levels", ",", "2", ",", "metric", ",", "(", "i", ",", ")", ")", ",", "}", "for", "i", "in", "levels", "[", "1", "]", "[", "metric", "]", ".", "index", "]", "if", "level", ">=", "len", "(", "levels", ")", ":", "return", "[", "]", "return", "[", "{", "'name'", ":", "i", ",", "'val'", ":", "levels", "[", "level", "]", "[", "metric", "]", "[", "dims", "]", "[", "i", "]", ",", "'children'", ":", "self", ".", "nest_values", "(", "levels", ",", "level", "+", "1", ",", "metric", ",", "dims", "+", "(", "i", ",", ")", ",", ")", ",", "}", "for", "i", "in", "levels", "[", "level", "]", "[", "metric", "]", "[", "dims", "]", ".", "index", "]" ]
Nest values at each level on the back-end with access and setting, instead of summing from the bottom.
[ "Nest", "values", "at", "each", "level", "on", "the", "back", "-", "end", "with", "access", "and", "setting", "instead", "of", "summing", "from", "the", "bottom", "." ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/viz.py#L2701-L2726
21,383
apache/incubator-superset
superset/connectors/base/models.py
BaseDatasource.get_fk_many_from_list
def get_fk_many_from_list( self, object_list, fkmany, fkmany_class, key_attr): """Update ORM one-to-many list from object list Used for syncing metrics and columns using the same code""" object_dict = {o.get(key_attr): o for o in object_list} object_keys = [o.get(key_attr) for o in object_list] # delete fks that have been removed fkmany = [o for o in fkmany if getattr(o, key_attr) in object_keys] # sync existing fks for fk in fkmany: obj = object_dict.get(getattr(fk, key_attr)) for attr in fkmany_class.update_from_object_fields: setattr(fk, attr, obj.get(attr)) # create new fks new_fks = [] orm_keys = [getattr(o, key_attr) for o in fkmany] for obj in object_list: key = obj.get(key_attr) if key not in orm_keys: del obj['id'] orm_kwargs = {} for k in obj: if ( k in fkmany_class.update_from_object_fields and k in obj ): orm_kwargs[k] = obj[k] new_obj = fkmany_class(**orm_kwargs) new_fks.append(new_obj) fkmany += new_fks return fkmany
python
def get_fk_many_from_list( self, object_list, fkmany, fkmany_class, key_attr): """Update ORM one-to-many list from object list Used for syncing metrics and columns using the same code""" object_dict = {o.get(key_attr): o for o in object_list} object_keys = [o.get(key_attr) for o in object_list] # delete fks that have been removed fkmany = [o for o in fkmany if getattr(o, key_attr) in object_keys] # sync existing fks for fk in fkmany: obj = object_dict.get(getattr(fk, key_attr)) for attr in fkmany_class.update_from_object_fields: setattr(fk, attr, obj.get(attr)) # create new fks new_fks = [] orm_keys = [getattr(o, key_attr) for o in fkmany] for obj in object_list: key = obj.get(key_attr) if key not in orm_keys: del obj['id'] orm_kwargs = {} for k in obj: if ( k in fkmany_class.update_from_object_fields and k in obj ): orm_kwargs[k] = obj[k] new_obj = fkmany_class(**orm_kwargs) new_fks.append(new_obj) fkmany += new_fks return fkmany
[ "def", "get_fk_many_from_list", "(", "self", ",", "object_list", ",", "fkmany", ",", "fkmany_class", ",", "key_attr", ")", ":", "object_dict", "=", "{", "o", ".", "get", "(", "key_attr", ")", ":", "o", "for", "o", "in", "object_list", "}", "object_keys", "=", "[", "o", ".", "get", "(", "key_attr", ")", "for", "o", "in", "object_list", "]", "# delete fks that have been removed", "fkmany", "=", "[", "o", "for", "o", "in", "fkmany", "if", "getattr", "(", "o", ",", "key_attr", ")", "in", "object_keys", "]", "# sync existing fks", "for", "fk", "in", "fkmany", ":", "obj", "=", "object_dict", ".", "get", "(", "getattr", "(", "fk", ",", "key_attr", ")", ")", "for", "attr", "in", "fkmany_class", ".", "update_from_object_fields", ":", "setattr", "(", "fk", ",", "attr", ",", "obj", ".", "get", "(", "attr", ")", ")", "# create new fks", "new_fks", "=", "[", "]", "orm_keys", "=", "[", "getattr", "(", "o", ",", "key_attr", ")", "for", "o", "in", "fkmany", "]", "for", "obj", "in", "object_list", ":", "key", "=", "obj", ".", "get", "(", "key_attr", ")", "if", "key", "not", "in", "orm_keys", ":", "del", "obj", "[", "'id'", "]", "orm_kwargs", "=", "{", "}", "for", "k", "in", "obj", ":", "if", "(", "k", "in", "fkmany_class", ".", "update_from_object_fields", "and", "k", "in", "obj", ")", ":", "orm_kwargs", "[", "k", "]", "=", "obj", "[", "k", "]", "new_obj", "=", "fkmany_class", "(", "*", "*", "orm_kwargs", ")", "new_fks", ".", "append", "(", "new_obj", ")", "fkmany", "+=", "new_fks", "return", "fkmany" ]
Update ORM one-to-many list from object list Used for syncing metrics and columns using the same code
[ "Update", "ORM", "one", "-", "to", "-", "many", "list", "from", "object", "list" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/connectors/base/models.py#L281-L316
21,384
apache/incubator-superset
superset/connectors/base/models.py
BaseDatasource.update_from_object
def update_from_object(self, obj): """Update datasource from a data structure The UI's table editor crafts a complex data structure that contains most of the datasource's properties as well as an array of metrics and columns objects. This method receives the object from the UI and syncs the datasource to match it. Since the fields are different for the different connectors, the implementation uses ``update_from_object_fields`` which can be defined for each connector and defines which fields should be synced""" for attr in self.update_from_object_fields: setattr(self, attr, obj.get(attr)) self.owners = obj.get('owners', []) # Syncing metrics metrics = self.get_fk_many_from_list( obj.get('metrics'), self.metrics, self.metric_class, 'metric_name') self.metrics = metrics # Syncing columns self.columns = self.get_fk_many_from_list( obj.get('columns'), self.columns, self.column_class, 'column_name')
python
def update_from_object(self, obj): """Update datasource from a data structure The UI's table editor crafts a complex data structure that contains most of the datasource's properties as well as an array of metrics and columns objects. This method receives the object from the UI and syncs the datasource to match it. Since the fields are different for the different connectors, the implementation uses ``update_from_object_fields`` which can be defined for each connector and defines which fields should be synced""" for attr in self.update_from_object_fields: setattr(self, attr, obj.get(attr)) self.owners = obj.get('owners', []) # Syncing metrics metrics = self.get_fk_many_from_list( obj.get('metrics'), self.metrics, self.metric_class, 'metric_name') self.metrics = metrics # Syncing columns self.columns = self.get_fk_many_from_list( obj.get('columns'), self.columns, self.column_class, 'column_name')
[ "def", "update_from_object", "(", "self", ",", "obj", ")", ":", "for", "attr", "in", "self", ".", "update_from_object_fields", ":", "setattr", "(", "self", ",", "attr", ",", "obj", ".", "get", "(", "attr", ")", ")", "self", ".", "owners", "=", "obj", ".", "get", "(", "'owners'", ",", "[", "]", ")", "# Syncing metrics", "metrics", "=", "self", ".", "get_fk_many_from_list", "(", "obj", ".", "get", "(", "'metrics'", ")", ",", "self", ".", "metrics", ",", "self", ".", "metric_class", ",", "'metric_name'", ")", "self", ".", "metrics", "=", "metrics", "# Syncing columns", "self", ".", "columns", "=", "self", ".", "get_fk_many_from_list", "(", "obj", ".", "get", "(", "'columns'", ")", ",", "self", ".", "columns", ",", "self", ".", "column_class", ",", "'column_name'", ")" ]
Update datasource from a data structure The UI's table editor crafts a complex data structure that contains most of the datasource's properties as well as an array of metrics and columns objects. This method receives the object from the UI and syncs the datasource to match it. Since the fields are different for the different connectors, the implementation uses ``update_from_object_fields`` which can be defined for each connector and defines which fields should be synced
[ "Update", "datasource", "from", "a", "data", "structure" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/connectors/base/models.py#L318-L341
21,385
apache/incubator-superset
superset/common/query_context.py
QueryContext.df_metrics_to_num
def df_metrics_to_num(self, df, query_object): """Converting metrics to numeric when pandas.read_sql cannot""" metrics = [metric for metric in query_object.metrics] for col, dtype in df.dtypes.items(): if dtype.type == np.object_ and col in metrics: df[col] = pd.to_numeric(df[col], errors='coerce')
python
def df_metrics_to_num(self, df, query_object): """Converting metrics to numeric when pandas.read_sql cannot""" metrics = [metric for metric in query_object.metrics] for col, dtype in df.dtypes.items(): if dtype.type == np.object_ and col in metrics: df[col] = pd.to_numeric(df[col], errors='coerce')
[ "def", "df_metrics_to_num", "(", "self", ",", "df", ",", "query_object", ")", ":", "metrics", "=", "[", "metric", "for", "metric", "in", "query_object", ".", "metrics", "]", "for", "col", ",", "dtype", "in", "df", ".", "dtypes", ".", "items", "(", ")", ":", "if", "dtype", ".", "type", "==", "np", ".", "object_", "and", "col", "in", "metrics", ":", "df", "[", "col", "]", "=", "pd", ".", "to_numeric", "(", "df", "[", "col", "]", ",", "errors", "=", "'coerce'", ")" ]
Converting metrics to numeric when pandas.read_sql cannot
[ "Converting", "metrics", "to", "numeric", "when", "pandas", ".", "read_sql", "cannot" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/common/query_context.py#L112-L117
21,386
apache/incubator-superset
superset/common/query_context.py
QueryContext.get_single_payload
def get_single_payload(self, query_obj): """Returns a payload of metadata and data""" payload = self.get_df_payload(query_obj) df = payload.get('df') status = payload.get('status') if status != utils.QueryStatus.FAILED: if df is not None and df.empty: payload['error'] = 'No data' else: payload['data'] = self.get_data(df) if 'df' in payload: del payload['df'] return payload
python
def get_single_payload(self, query_obj): """Returns a payload of metadata and data""" payload = self.get_df_payload(query_obj) df = payload.get('df') status = payload.get('status') if status != utils.QueryStatus.FAILED: if df is not None and df.empty: payload['error'] = 'No data' else: payload['data'] = self.get_data(df) if 'df' in payload: del payload['df'] return payload
[ "def", "get_single_payload", "(", "self", ",", "query_obj", ")", ":", "payload", "=", "self", ".", "get_df_payload", "(", "query_obj", ")", "df", "=", "payload", ".", "get", "(", "'df'", ")", "status", "=", "payload", ".", "get", "(", "'status'", ")", "if", "status", "!=", "utils", ".", "QueryStatus", ".", "FAILED", ":", "if", "df", "is", "not", "None", "and", "df", ".", "empty", ":", "payload", "[", "'error'", "]", "=", "'No data'", "else", ":", "payload", "[", "'data'", "]", "=", "self", ".", "get_data", "(", "df", ")", "if", "'df'", "in", "payload", ":", "del", "payload", "[", "'df'", "]", "return", "payload" ]
Returns a payload of metadata and data
[ "Returns", "a", "payload", "of", "metadata", "and", "data" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/common/query_context.py#L122-L134
21,387
apache/incubator-superset
superset/common/query_context.py
QueryContext.get_df_payload
def get_df_payload(self, query_obj, **kwargs): """Handles caching around the df paylod retrieval""" cache_key = query_obj.cache_key( datasource=self.datasource.uid, **kwargs) if query_obj else None logging.info('Cache key: {}'.format(cache_key)) is_loaded = False stacktrace = None df = None cached_dttm = datetime.utcnow().isoformat().split('.')[0] cache_value = None status = None query = '' error_message = None if cache_key and cache and not self.force: cache_value = cache.get(cache_key) if cache_value: stats_logger.incr('loaded_from_cache') try: cache_value = pkl.loads(cache_value) df = cache_value['df'] query = cache_value['query'] status = utils.QueryStatus.SUCCESS is_loaded = True except Exception as e: logging.exception(e) logging.error('Error reading cache: ' + utils.error_msg_from_exception(e)) logging.info('Serving from cache') if query_obj and not is_loaded: try: query_result = self.get_query_result(query_obj) status = query_result['status'] query = query_result['query'] error_message = query_result['error_message'] df = query_result['df'] if status != utils.QueryStatus.FAILED: stats_logger.incr('loaded_from_source') is_loaded = True except Exception as e: logging.exception(e) if not error_message: error_message = '{}'.format(e) status = utils.QueryStatus.FAILED stacktrace = traceback.format_exc() if ( is_loaded and cache_key and cache and status != utils.QueryStatus.FAILED): try: cache_value = dict( dttm=cached_dttm, df=df if df is not None else None, query=query, ) cache_value = pkl.dumps( cache_value, protocol=pkl.HIGHEST_PROTOCOL) logging.info('Caching {} chars at key {}'.format( len(cache_value), cache_key)) stats_logger.incr('set_cache_key') cache.set( cache_key, cache_value, timeout=self.cache_timeout) except Exception as e: # cache.set call can fail if the backend is down or if # the key is too large or whatever other reasons logging.warning('Could not cache key {}'.format(cache_key)) logging.exception(e) cache.delete(cache_key) return { 'cache_key': cache_key, 'cached_dttm': cache_value['dttm'] if cache_value is not None else None, 'cache_timeout': self.cache_timeout, 'df': df, 'error': error_message, 'is_cached': cache_key is not None, 'query': query, 'status': status, 'stacktrace': stacktrace, 'rowcount': len(df.index) if df is not None else 0, }
python
def get_df_payload(self, query_obj, **kwargs): """Handles caching around the df paylod retrieval""" cache_key = query_obj.cache_key( datasource=self.datasource.uid, **kwargs) if query_obj else None logging.info('Cache key: {}'.format(cache_key)) is_loaded = False stacktrace = None df = None cached_dttm = datetime.utcnow().isoformat().split('.')[0] cache_value = None status = None query = '' error_message = None if cache_key and cache and not self.force: cache_value = cache.get(cache_key) if cache_value: stats_logger.incr('loaded_from_cache') try: cache_value = pkl.loads(cache_value) df = cache_value['df'] query = cache_value['query'] status = utils.QueryStatus.SUCCESS is_loaded = True except Exception as e: logging.exception(e) logging.error('Error reading cache: ' + utils.error_msg_from_exception(e)) logging.info('Serving from cache') if query_obj and not is_loaded: try: query_result = self.get_query_result(query_obj) status = query_result['status'] query = query_result['query'] error_message = query_result['error_message'] df = query_result['df'] if status != utils.QueryStatus.FAILED: stats_logger.incr('loaded_from_source') is_loaded = True except Exception as e: logging.exception(e) if not error_message: error_message = '{}'.format(e) status = utils.QueryStatus.FAILED stacktrace = traceback.format_exc() if ( is_loaded and cache_key and cache and status != utils.QueryStatus.FAILED): try: cache_value = dict( dttm=cached_dttm, df=df if df is not None else None, query=query, ) cache_value = pkl.dumps( cache_value, protocol=pkl.HIGHEST_PROTOCOL) logging.info('Caching {} chars at key {}'.format( len(cache_value), cache_key)) stats_logger.incr('set_cache_key') cache.set( cache_key, cache_value, timeout=self.cache_timeout) except Exception as e: # cache.set call can fail if the backend is down or if # the key is too large or whatever other reasons logging.warning('Could not cache key {}'.format(cache_key)) logging.exception(e) cache.delete(cache_key) return { 'cache_key': cache_key, 'cached_dttm': cache_value['dttm'] if cache_value is not None else None, 'cache_timeout': self.cache_timeout, 'df': df, 'error': error_message, 'is_cached': cache_key is not None, 'query': query, 'status': status, 'stacktrace': stacktrace, 'rowcount': len(df.index) if df is not None else 0, }
[ "def", "get_df_payload", "(", "self", ",", "query_obj", ",", "*", "*", "kwargs", ")", ":", "cache_key", "=", "query_obj", ".", "cache_key", "(", "datasource", "=", "self", ".", "datasource", ".", "uid", ",", "*", "*", "kwargs", ")", "if", "query_obj", "else", "None", "logging", ".", "info", "(", "'Cache key: {}'", ".", "format", "(", "cache_key", ")", ")", "is_loaded", "=", "False", "stacktrace", "=", "None", "df", "=", "None", "cached_dttm", "=", "datetime", ".", "utcnow", "(", ")", ".", "isoformat", "(", ")", ".", "split", "(", "'.'", ")", "[", "0", "]", "cache_value", "=", "None", "status", "=", "None", "query", "=", "''", "error_message", "=", "None", "if", "cache_key", "and", "cache", "and", "not", "self", ".", "force", ":", "cache_value", "=", "cache", ".", "get", "(", "cache_key", ")", "if", "cache_value", ":", "stats_logger", ".", "incr", "(", "'loaded_from_cache'", ")", "try", ":", "cache_value", "=", "pkl", ".", "loads", "(", "cache_value", ")", "df", "=", "cache_value", "[", "'df'", "]", "query", "=", "cache_value", "[", "'query'", "]", "status", "=", "utils", ".", "QueryStatus", ".", "SUCCESS", "is_loaded", "=", "True", "except", "Exception", "as", "e", ":", "logging", ".", "exception", "(", "e", ")", "logging", ".", "error", "(", "'Error reading cache: '", "+", "utils", ".", "error_msg_from_exception", "(", "e", ")", ")", "logging", ".", "info", "(", "'Serving from cache'", ")", "if", "query_obj", "and", "not", "is_loaded", ":", "try", ":", "query_result", "=", "self", ".", "get_query_result", "(", "query_obj", ")", "status", "=", "query_result", "[", "'status'", "]", "query", "=", "query_result", "[", "'query'", "]", "error_message", "=", "query_result", "[", "'error_message'", "]", "df", "=", "query_result", "[", "'df'", "]", "if", "status", "!=", "utils", ".", "QueryStatus", ".", "FAILED", ":", "stats_logger", ".", "incr", "(", "'loaded_from_source'", ")", "is_loaded", "=", "True", "except", "Exception", "as", "e", ":", "logging", ".", "exception", "(", "e", ")", "if", "not", "error_message", ":", "error_message", "=", "'{}'", ".", "format", "(", "e", ")", "status", "=", "utils", ".", "QueryStatus", ".", "FAILED", "stacktrace", "=", "traceback", ".", "format_exc", "(", ")", "if", "(", "is_loaded", "and", "cache_key", "and", "cache", "and", "status", "!=", "utils", ".", "QueryStatus", ".", "FAILED", ")", ":", "try", ":", "cache_value", "=", "dict", "(", "dttm", "=", "cached_dttm", ",", "df", "=", "df", "if", "df", "is", "not", "None", "else", "None", ",", "query", "=", "query", ",", ")", "cache_value", "=", "pkl", ".", "dumps", "(", "cache_value", ",", "protocol", "=", "pkl", ".", "HIGHEST_PROTOCOL", ")", "logging", ".", "info", "(", "'Caching {} chars at key {}'", ".", "format", "(", "len", "(", "cache_value", ")", ",", "cache_key", ")", ")", "stats_logger", ".", "incr", "(", "'set_cache_key'", ")", "cache", ".", "set", "(", "cache_key", ",", "cache_value", ",", "timeout", "=", "self", ".", "cache_timeout", ")", "except", "Exception", "as", "e", ":", "# cache.set call can fail if the backend is down or if", "# the key is too large or whatever other reasons", "logging", ".", "warning", "(", "'Could not cache key {}'", ".", "format", "(", "cache_key", ")", ")", "logging", ".", "exception", "(", "e", ")", "cache", ".", "delete", "(", "cache_key", ")", "return", "{", "'cache_key'", ":", "cache_key", ",", "'cached_dttm'", ":", "cache_value", "[", "'dttm'", "]", "if", "cache_value", "is", "not", "None", "else", "None", ",", "'cache_timeout'", ":", "self", ".", "cache_timeout", ",", "'df'", ":", "df", ",", "'error'", ":", "error_message", ",", "'is_cached'", ":", "cache_key", "is", "not", "None", ",", "'query'", ":", "query", ",", "'status'", ":", "status", ",", "'stacktrace'", ":", "stacktrace", ",", "'rowcount'", ":", "len", "(", "df", ".", "index", ")", "if", "df", "is", "not", "None", "else", "0", ",", "}" ]
Handles caching around the df paylod retrieval
[ "Handles", "caching", "around", "the", "df", "paylod", "retrieval" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/common/query_context.py#L152-L237
21,388
apache/incubator-superset
superset/models/core.py
Slice.data
def data(self): """Data used to render slice in templates""" d = {} self.token = '' try: d = self.viz.data self.token = d.get('token') except Exception as e: logging.exception(e) d['error'] = str(e) return { 'datasource': self.datasource_name, 'description': self.description, 'description_markeddown': self.description_markeddown, 'edit_url': self.edit_url, 'form_data': self.form_data, 'slice_id': self.id, 'slice_name': self.slice_name, 'slice_url': self.slice_url, 'modified': self.modified(), 'changed_on_humanized': self.changed_on_humanized, 'changed_on': self.changed_on.isoformat(), }
python
def data(self): """Data used to render slice in templates""" d = {} self.token = '' try: d = self.viz.data self.token = d.get('token') except Exception as e: logging.exception(e) d['error'] = str(e) return { 'datasource': self.datasource_name, 'description': self.description, 'description_markeddown': self.description_markeddown, 'edit_url': self.edit_url, 'form_data': self.form_data, 'slice_id': self.id, 'slice_name': self.slice_name, 'slice_url': self.slice_url, 'modified': self.modified(), 'changed_on_humanized': self.changed_on_humanized, 'changed_on': self.changed_on.isoformat(), }
[ "def", "data", "(", "self", ")", ":", "d", "=", "{", "}", "self", ".", "token", "=", "''", "try", ":", "d", "=", "self", ".", "viz", ".", "data", "self", ".", "token", "=", "d", ".", "get", "(", "'token'", ")", "except", "Exception", "as", "e", ":", "logging", ".", "exception", "(", "e", ")", "d", "[", "'error'", "]", "=", "str", "(", "e", ")", "return", "{", "'datasource'", ":", "self", ".", "datasource_name", ",", "'description'", ":", "self", ".", "description", ",", "'description_markeddown'", ":", "self", ".", "description_markeddown", ",", "'edit_url'", ":", "self", ".", "edit_url", ",", "'form_data'", ":", "self", ".", "form_data", ",", "'slice_id'", ":", "self", ".", "id", ",", "'slice_name'", ":", "self", ".", "slice_name", ",", "'slice_url'", ":", "self", ".", "slice_url", ",", "'modified'", ":", "self", ".", "modified", "(", ")", ",", "'changed_on_humanized'", ":", "self", ".", "changed_on_humanized", ",", "'changed_on'", ":", "self", ".", "changed_on", ".", "isoformat", "(", ")", ",", "}" ]
Data used to render slice in templates
[ "Data", "used", "to", "render", "slice", "in", "templates" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/models/core.py#L226-L248
21,389
apache/incubator-superset
superset/models/core.py
Slice.import_obj
def import_obj(cls, slc_to_import, slc_to_override, import_time=None): """Inserts or overrides slc in the database. remote_id and import_time fields in params_dict are set to track the slice origin and ensure correct overrides for multiple imports. Slice.perm is used to find the datasources and connect them. :param Slice slc_to_import: Slice object to import :param Slice slc_to_override: Slice to replace, id matches remote_id :returns: The resulting id for the imported slice :rtype: int """ session = db.session make_transient(slc_to_import) slc_to_import.dashboards = [] slc_to_import.alter_params( remote_id=slc_to_import.id, import_time=import_time) slc_to_import = slc_to_import.copy() params = slc_to_import.params_dict slc_to_import.datasource_id = ConnectorRegistry.get_datasource_by_name( session, slc_to_import.datasource_type, params['datasource_name'], params['schema'], params['database_name']).id if slc_to_override: slc_to_override.override(slc_to_import) session.flush() return slc_to_override.id session.add(slc_to_import) logging.info('Final slice: {}'.format(slc_to_import.to_json())) session.flush() return slc_to_import.id
python
def import_obj(cls, slc_to_import, slc_to_override, import_time=None): """Inserts or overrides slc in the database. remote_id and import_time fields in params_dict are set to track the slice origin and ensure correct overrides for multiple imports. Slice.perm is used to find the datasources and connect them. :param Slice slc_to_import: Slice object to import :param Slice slc_to_override: Slice to replace, id matches remote_id :returns: The resulting id for the imported slice :rtype: int """ session = db.session make_transient(slc_to_import) slc_to_import.dashboards = [] slc_to_import.alter_params( remote_id=slc_to_import.id, import_time=import_time) slc_to_import = slc_to_import.copy() params = slc_to_import.params_dict slc_to_import.datasource_id = ConnectorRegistry.get_datasource_by_name( session, slc_to_import.datasource_type, params['datasource_name'], params['schema'], params['database_name']).id if slc_to_override: slc_to_override.override(slc_to_import) session.flush() return slc_to_override.id session.add(slc_to_import) logging.info('Final slice: {}'.format(slc_to_import.to_json())) session.flush() return slc_to_import.id
[ "def", "import_obj", "(", "cls", ",", "slc_to_import", ",", "slc_to_override", ",", "import_time", "=", "None", ")", ":", "session", "=", "db", ".", "session", "make_transient", "(", "slc_to_import", ")", "slc_to_import", ".", "dashboards", "=", "[", "]", "slc_to_import", ".", "alter_params", "(", "remote_id", "=", "slc_to_import", ".", "id", ",", "import_time", "=", "import_time", ")", "slc_to_import", "=", "slc_to_import", ".", "copy", "(", ")", "params", "=", "slc_to_import", ".", "params_dict", "slc_to_import", ".", "datasource_id", "=", "ConnectorRegistry", ".", "get_datasource_by_name", "(", "session", ",", "slc_to_import", ".", "datasource_type", ",", "params", "[", "'datasource_name'", "]", ",", "params", "[", "'schema'", "]", ",", "params", "[", "'database_name'", "]", ")", ".", "id", "if", "slc_to_override", ":", "slc_to_override", ".", "override", "(", "slc_to_import", ")", "session", ".", "flush", "(", ")", "return", "slc_to_override", ".", "id", "session", ".", "add", "(", "slc_to_import", ")", "logging", ".", "info", "(", "'Final slice: {}'", ".", "format", "(", "slc_to_import", ".", "to_json", "(", ")", ")", ")", "session", ".", "flush", "(", ")", "return", "slc_to_import", ".", "id" ]
Inserts or overrides slc in the database. remote_id and import_time fields in params_dict are set to track the slice origin and ensure correct overrides for multiple imports. Slice.perm is used to find the datasources and connect them. :param Slice slc_to_import: Slice object to import :param Slice slc_to_override: Slice to replace, id matches remote_id :returns: The resulting id for the imported slice :rtype: int
[ "Inserts", "or", "overrides", "slc", "in", "the", "database", "." ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/models/core.py#L336-L366
21,390
apache/incubator-superset
superset/models/core.py
Database.grains_dict
def grains_dict(self): """Allowing to lookup grain by either label or duration For backward compatibility""" d = {grain.duration: grain for grain in self.grains()} d.update({grain.label: grain for grain in self.grains()}) return d
python
def grains_dict(self): """Allowing to lookup grain by either label or duration For backward compatibility""" d = {grain.duration: grain for grain in self.grains()} d.update({grain.label: grain for grain in self.grains()}) return d
[ "def", "grains_dict", "(", "self", ")", ":", "d", "=", "{", "grain", ".", "duration", ":", "grain", "for", "grain", "in", "self", ".", "grains", "(", ")", "}", "d", ".", "update", "(", "{", "grain", ".", "label", ":", "grain", "for", "grain", "in", "self", ".", "grains", "(", ")", "}", ")", "return", "d" ]
Allowing to lookup grain by either label or duration For backward compatibility
[ "Allowing", "to", "lookup", "grain", "by", "either", "label", "or", "duration" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/models/core.py#L1050-L1056
21,391
apache/incubator-superset
superset/models/core.py
Log.log_this
def log_this(cls, f): """Decorator to log user actions""" @functools.wraps(f) def wrapper(*args, **kwargs): user_id = None if g.user: user_id = g.user.get_id() d = request.form.to_dict() or {} # request parameters can overwrite post body request_params = request.args.to_dict() d.update(request_params) d.update(kwargs) slice_id = d.get('slice_id') dashboard_id = d.get('dashboard_id') try: slice_id = int( slice_id or json.loads(d.get('form_data')).get('slice_id')) except (ValueError, TypeError): slice_id = 0 stats_logger.incr(f.__name__) start_dttm = datetime.now() value = f(*args, **kwargs) duration_ms = (datetime.now() - start_dttm).total_seconds() * 1000 # bulk insert try: explode_by = d.get('explode') records = json.loads(d.get(explode_by)) except Exception: records = [d] referrer = request.referrer[:1000] if request.referrer else None logs = [] for record in records: try: json_string = json.dumps(record) except Exception: json_string = None log = cls( action=f.__name__, json=json_string, dashboard_id=dashboard_id, slice_id=slice_id, duration_ms=duration_ms, referrer=referrer, user_id=user_id) logs.append(log) sesh = db.session() sesh.bulk_save_objects(logs) sesh.commit() return value return wrapper
python
def log_this(cls, f): """Decorator to log user actions""" @functools.wraps(f) def wrapper(*args, **kwargs): user_id = None if g.user: user_id = g.user.get_id() d = request.form.to_dict() or {} # request parameters can overwrite post body request_params = request.args.to_dict() d.update(request_params) d.update(kwargs) slice_id = d.get('slice_id') dashboard_id = d.get('dashboard_id') try: slice_id = int( slice_id or json.loads(d.get('form_data')).get('slice_id')) except (ValueError, TypeError): slice_id = 0 stats_logger.incr(f.__name__) start_dttm = datetime.now() value = f(*args, **kwargs) duration_ms = (datetime.now() - start_dttm).total_seconds() * 1000 # bulk insert try: explode_by = d.get('explode') records = json.loads(d.get(explode_by)) except Exception: records = [d] referrer = request.referrer[:1000] if request.referrer else None logs = [] for record in records: try: json_string = json.dumps(record) except Exception: json_string = None log = cls( action=f.__name__, json=json_string, dashboard_id=dashboard_id, slice_id=slice_id, duration_ms=duration_ms, referrer=referrer, user_id=user_id) logs.append(log) sesh = db.session() sesh.bulk_save_objects(logs) sesh.commit() return value return wrapper
[ "def", "log_this", "(", "cls", ",", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "user_id", "=", "None", "if", "g", ".", "user", ":", "user_id", "=", "g", ".", "user", ".", "get_id", "(", ")", "d", "=", "request", ".", "form", ".", "to_dict", "(", ")", "or", "{", "}", "# request parameters can overwrite post body", "request_params", "=", "request", ".", "args", ".", "to_dict", "(", ")", "d", ".", "update", "(", "request_params", ")", "d", ".", "update", "(", "kwargs", ")", "slice_id", "=", "d", ".", "get", "(", "'slice_id'", ")", "dashboard_id", "=", "d", ".", "get", "(", "'dashboard_id'", ")", "try", ":", "slice_id", "=", "int", "(", "slice_id", "or", "json", ".", "loads", "(", "d", ".", "get", "(", "'form_data'", ")", ")", ".", "get", "(", "'slice_id'", ")", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "slice_id", "=", "0", "stats_logger", ".", "incr", "(", "f", ".", "__name__", ")", "start_dttm", "=", "datetime", ".", "now", "(", ")", "value", "=", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "duration_ms", "=", "(", "datetime", ".", "now", "(", ")", "-", "start_dttm", ")", ".", "total_seconds", "(", ")", "*", "1000", "# bulk insert", "try", ":", "explode_by", "=", "d", ".", "get", "(", "'explode'", ")", "records", "=", "json", ".", "loads", "(", "d", ".", "get", "(", "explode_by", ")", ")", "except", "Exception", ":", "records", "=", "[", "d", "]", "referrer", "=", "request", ".", "referrer", "[", ":", "1000", "]", "if", "request", ".", "referrer", "else", "None", "logs", "=", "[", "]", "for", "record", "in", "records", ":", "try", ":", "json_string", "=", "json", ".", "dumps", "(", "record", ")", "except", "Exception", ":", "json_string", "=", "None", "log", "=", "cls", "(", "action", "=", "f", ".", "__name__", ",", "json", "=", "json_string", ",", "dashboard_id", "=", "dashboard_id", ",", "slice_id", "=", "slice_id", ",", "duration_ms", "=", "duration_ms", ",", "referrer", "=", "referrer", ",", "user_id", "=", "user_id", ")", "logs", ".", "append", "(", "log", ")", "sesh", "=", "db", ".", "session", "(", ")", "sesh", ".", "bulk_save_objects", "(", "logs", ")", "sesh", ".", "commit", "(", ")", "return", "value", "return", "wrapper" ]
Decorator to log user actions
[ "Decorator", "to", "log", "user", "actions" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/models/core.py#L1143-L1200
21,392
apache/incubator-superset
superset/views/base.py
api
def api(f): """ A decorator to label an endpoint as an API. Catches uncaught exceptions and return the response in the JSON format """ def wraps(self, *args, **kwargs): try: return f(self, *args, **kwargs) except Exception as e: logging.exception(e) return json_error_response(get_error_msg()) return functools.update_wrapper(wraps, f)
python
def api(f): """ A decorator to label an endpoint as an API. Catches uncaught exceptions and return the response in the JSON format """ def wraps(self, *args, **kwargs): try: return f(self, *args, **kwargs) except Exception as e: logging.exception(e) return json_error_response(get_error_msg()) return functools.update_wrapper(wraps, f)
[ "def", "api", "(", "f", ")", ":", "def", "wraps", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "return", "f", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "e", ":", "logging", ".", "exception", "(", "e", ")", "return", "json_error_response", "(", "get_error_msg", "(", ")", ")", "return", "functools", ".", "update_wrapper", "(", "wraps", ",", "f", ")" ]
A decorator to label an endpoint as an API. Catches uncaught exceptions and return the response in the JSON format
[ "A", "decorator", "to", "label", "an", "endpoint", "as", "an", "API", ".", "Catches", "uncaught", "exceptions", "and", "return", "the", "response", "in", "the", "JSON", "format" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/views/base.py#L96-L108
21,393
apache/incubator-superset
superset/views/base.py
handle_api_exception
def handle_api_exception(f): """ A decorator to catch superset exceptions. Use it after the @api decorator above so superset exception handler is triggered before the handler for generic exceptions. """ def wraps(self, *args, **kwargs): try: return f(self, *args, **kwargs) except SupersetSecurityException as e: logging.exception(e) return json_error_response(utils.error_msg_from_exception(e), status=e.status, stacktrace=traceback.format_exc(), link=e.link) except SupersetException as e: logging.exception(e) return json_error_response(utils.error_msg_from_exception(e), stacktrace=traceback.format_exc(), status=e.status) except Exception as e: logging.exception(e) return json_error_response(utils.error_msg_from_exception(e), stacktrace=traceback.format_exc()) return functools.update_wrapper(wraps, f)
python
def handle_api_exception(f): """ A decorator to catch superset exceptions. Use it after the @api decorator above so superset exception handler is triggered before the handler for generic exceptions. """ def wraps(self, *args, **kwargs): try: return f(self, *args, **kwargs) except SupersetSecurityException as e: logging.exception(e) return json_error_response(utils.error_msg_from_exception(e), status=e.status, stacktrace=traceback.format_exc(), link=e.link) except SupersetException as e: logging.exception(e) return json_error_response(utils.error_msg_from_exception(e), stacktrace=traceback.format_exc(), status=e.status) except Exception as e: logging.exception(e) return json_error_response(utils.error_msg_from_exception(e), stacktrace=traceback.format_exc()) return functools.update_wrapper(wraps, f)
[ "def", "handle_api_exception", "(", "f", ")", ":", "def", "wraps", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "return", "f", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "except", "SupersetSecurityException", "as", "e", ":", "logging", ".", "exception", "(", "e", ")", "return", "json_error_response", "(", "utils", ".", "error_msg_from_exception", "(", "e", ")", ",", "status", "=", "e", ".", "status", ",", "stacktrace", "=", "traceback", ".", "format_exc", "(", ")", ",", "link", "=", "e", ".", "link", ")", "except", "SupersetException", "as", "e", ":", "logging", ".", "exception", "(", "e", ")", "return", "json_error_response", "(", "utils", ".", "error_msg_from_exception", "(", "e", ")", ",", "stacktrace", "=", "traceback", ".", "format_exc", "(", ")", ",", "status", "=", "e", ".", "status", ")", "except", "Exception", "as", "e", ":", "logging", ".", "exception", "(", "e", ")", "return", "json_error_response", "(", "utils", ".", "error_msg_from_exception", "(", "e", ")", ",", "stacktrace", "=", "traceback", ".", "format_exc", "(", ")", ")", "return", "functools", ".", "update_wrapper", "(", "wraps", ",", "f", ")" ]
A decorator to catch superset exceptions. Use it after the @api decorator above so superset exception handler is triggered before the handler for generic exceptions.
[ "A", "decorator", "to", "catch", "superset", "exceptions", ".", "Use", "it", "after", "the" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/views/base.py#L111-L134
21,394
apache/incubator-superset
superset/views/base.py
check_ownership
def check_ownership(obj, raise_if_false=True): """Meant to be used in `pre_update` hooks on models to enforce ownership Admin have all access, and other users need to be referenced on either the created_by field that comes with the ``AuditMixin``, or in a field named ``owners`` which is expected to be a one-to-many with the User model. It is meant to be used in the ModelView's pre_update hook in which raising will abort the update. """ if not obj: return False security_exception = SupersetSecurityException( "You don't have the rights to alter [{}]".format(obj)) if g.user.is_anonymous: if raise_if_false: raise security_exception return False roles = [r.name for r in get_user_roles()] if 'Admin' in roles: return True session = db.create_scoped_session() orig_obj = session.query(obj.__class__).filter_by(id=obj.id).first() # Making a list of owners that works across ORM models owners = [] if hasattr(orig_obj, 'owners'): owners += orig_obj.owners if hasattr(orig_obj, 'owner'): owners += [orig_obj.owner] if hasattr(orig_obj, 'created_by'): owners += [orig_obj.created_by] owner_names = [o.username for o in owners if o] if ( g.user and hasattr(g.user, 'username') and g.user.username in owner_names): return True if raise_if_false: raise security_exception else: return False
python
def check_ownership(obj, raise_if_false=True): """Meant to be used in `pre_update` hooks on models to enforce ownership Admin have all access, and other users need to be referenced on either the created_by field that comes with the ``AuditMixin``, or in a field named ``owners`` which is expected to be a one-to-many with the User model. It is meant to be used in the ModelView's pre_update hook in which raising will abort the update. """ if not obj: return False security_exception = SupersetSecurityException( "You don't have the rights to alter [{}]".format(obj)) if g.user.is_anonymous: if raise_if_false: raise security_exception return False roles = [r.name for r in get_user_roles()] if 'Admin' in roles: return True session = db.create_scoped_session() orig_obj = session.query(obj.__class__).filter_by(id=obj.id).first() # Making a list of owners that works across ORM models owners = [] if hasattr(orig_obj, 'owners'): owners += orig_obj.owners if hasattr(orig_obj, 'owner'): owners += [orig_obj.owner] if hasattr(orig_obj, 'created_by'): owners += [orig_obj.created_by] owner_names = [o.username for o in owners if o] if ( g.user and hasattr(g.user, 'username') and g.user.username in owner_names): return True if raise_if_false: raise security_exception else: return False
[ "def", "check_ownership", "(", "obj", ",", "raise_if_false", "=", "True", ")", ":", "if", "not", "obj", ":", "return", "False", "security_exception", "=", "SupersetSecurityException", "(", "\"You don't have the rights to alter [{}]\"", ".", "format", "(", "obj", ")", ")", "if", "g", ".", "user", ".", "is_anonymous", ":", "if", "raise_if_false", ":", "raise", "security_exception", "return", "False", "roles", "=", "[", "r", ".", "name", "for", "r", "in", "get_user_roles", "(", ")", "]", "if", "'Admin'", "in", "roles", ":", "return", "True", "session", "=", "db", ".", "create_scoped_session", "(", ")", "orig_obj", "=", "session", ".", "query", "(", "obj", ".", "__class__", ")", ".", "filter_by", "(", "id", "=", "obj", ".", "id", ")", ".", "first", "(", ")", "# Making a list of owners that works across ORM models", "owners", "=", "[", "]", "if", "hasattr", "(", "orig_obj", ",", "'owners'", ")", ":", "owners", "+=", "orig_obj", ".", "owners", "if", "hasattr", "(", "orig_obj", ",", "'owner'", ")", ":", "owners", "+=", "[", "orig_obj", ".", "owner", "]", "if", "hasattr", "(", "orig_obj", ",", "'created_by'", ")", ":", "owners", "+=", "[", "orig_obj", ".", "created_by", "]", "owner_names", "=", "[", "o", ".", "username", "for", "o", "in", "owners", "if", "o", "]", "if", "(", "g", ".", "user", "and", "hasattr", "(", "g", ".", "user", ",", "'username'", ")", "and", "g", ".", "user", ".", "username", "in", "owner_names", ")", ":", "return", "True", "if", "raise_if_false", ":", "raise", "security_exception", "else", ":", "return", "False" ]
Meant to be used in `pre_update` hooks on models to enforce ownership Admin have all access, and other users need to be referenced on either the created_by field that comes with the ``AuditMixin``, or in a field named ``owners`` which is expected to be a one-to-many with the User model. It is meant to be used in the ModelView's pre_update hook in which raising will abort the update.
[ "Meant", "to", "be", "used", "in", "pre_update", "hooks", "on", "models", "to", "enforce", "ownership" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/views/base.py#L331-L374
21,395
apache/incubator-superset
superset/views/base.py
bind_field
def bind_field( self, form: DynamicForm, unbound_field: UnboundField, options: Dict[Any, Any], ) -> Field: """ Customize how fields are bound by stripping all whitespace. :param form: The form :param unbound_field: The unbound field :param options: The field options :returns: The bound field """ filters = unbound_field.kwargs.get('filters', []) filters.append(lambda x: x.strip() if isinstance(x, str) else x) return unbound_field.bind(form=form, filters=filters, **options)
python
def bind_field( self, form: DynamicForm, unbound_field: UnboundField, options: Dict[Any, Any], ) -> Field: """ Customize how fields are bound by stripping all whitespace. :param form: The form :param unbound_field: The unbound field :param options: The field options :returns: The bound field """ filters = unbound_field.kwargs.get('filters', []) filters.append(lambda x: x.strip() if isinstance(x, str) else x) return unbound_field.bind(form=form, filters=filters, **options)
[ "def", "bind_field", "(", "self", ",", "form", ":", "DynamicForm", ",", "unbound_field", ":", "UnboundField", ",", "options", ":", "Dict", "[", "Any", ",", "Any", "]", ",", ")", "->", "Field", ":", "filters", "=", "unbound_field", ".", "kwargs", ".", "get", "(", "'filters'", ",", "[", "]", ")", "filters", ".", "append", "(", "lambda", "x", ":", "x", ".", "strip", "(", ")", "if", "isinstance", "(", "x", ",", "str", ")", "else", "x", ")", "return", "unbound_field", ".", "bind", "(", "form", "=", "form", ",", "filters", "=", "filters", ",", "*", "*", "options", ")" ]
Customize how fields are bound by stripping all whitespace. :param form: The form :param unbound_field: The unbound field :param options: The field options :returns: The bound field
[ "Customize", "how", "fields", "are", "bound", "by", "stripping", "all", "whitespace", "." ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/views/base.py#L377-L394
21,396
apache/incubator-superset
superset/views/base.py
BaseSupersetView.common_bootsrap_payload
def common_bootsrap_payload(self): """Common data always sent to the client""" messages = get_flashed_messages(with_categories=True) locale = str(get_locale()) return { 'flash_messages': messages, 'conf': {k: conf.get(k) for k in FRONTEND_CONF_KEYS}, 'locale': locale, 'language_pack': get_language_pack(locale), 'feature_flags': get_feature_flags(), }
python
def common_bootsrap_payload(self): """Common data always sent to the client""" messages = get_flashed_messages(with_categories=True) locale = str(get_locale()) return { 'flash_messages': messages, 'conf': {k: conf.get(k) for k in FRONTEND_CONF_KEYS}, 'locale': locale, 'language_pack': get_language_pack(locale), 'feature_flags': get_feature_flags(), }
[ "def", "common_bootsrap_payload", "(", "self", ")", ":", "messages", "=", "get_flashed_messages", "(", "with_categories", "=", "True", ")", "locale", "=", "str", "(", "get_locale", "(", ")", ")", "return", "{", "'flash_messages'", ":", "messages", ",", "'conf'", ":", "{", "k", ":", "conf", ".", "get", "(", "k", ")", "for", "k", "in", "FRONTEND_CONF_KEYS", "}", ",", "'locale'", ":", "locale", ",", "'language_pack'", ":", "get_language_pack", "(", "locale", ")", ",", "'feature_flags'", ":", "get_feature_flags", "(", ")", ",", "}" ]
Common data always sent to the client
[ "Common", "data", "always", "sent", "to", "the", "client" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/views/base.py#L156-L166
21,397
apache/incubator-superset
superset/views/base.py
DeleteMixin._delete
def _delete(self, pk): """ Delete function logic, override to implement diferent logic deletes the record with primary_key = pk :param pk: record primary key to delete """ item = self.datamodel.get(pk, self._base_filters) if not item: abort(404) try: self.pre_delete(item) except Exception as e: flash(str(e), 'danger') else: view_menu = security_manager.find_view_menu(item.get_perm()) pvs = security_manager.get_session.query( security_manager.permissionview_model).filter_by( view_menu=view_menu).all() schema_view_menu = None if hasattr(item, 'schema_perm'): schema_view_menu = security_manager.find_view_menu(item.schema_perm) pvs.extend(security_manager.get_session.query( security_manager.permissionview_model).filter_by( view_menu=schema_view_menu).all()) if self.datamodel.delete(item): self.post_delete(item) for pv in pvs: security_manager.get_session.delete(pv) if view_menu: security_manager.get_session.delete(view_menu) if schema_view_menu: security_manager.get_session.delete(schema_view_menu) security_manager.get_session.commit() flash(*self.datamodel.message) self.update_redirect()
python
def _delete(self, pk): """ Delete function logic, override to implement diferent logic deletes the record with primary_key = pk :param pk: record primary key to delete """ item = self.datamodel.get(pk, self._base_filters) if not item: abort(404) try: self.pre_delete(item) except Exception as e: flash(str(e), 'danger') else: view_menu = security_manager.find_view_menu(item.get_perm()) pvs = security_manager.get_session.query( security_manager.permissionview_model).filter_by( view_menu=view_menu).all() schema_view_menu = None if hasattr(item, 'schema_perm'): schema_view_menu = security_manager.find_view_menu(item.schema_perm) pvs.extend(security_manager.get_session.query( security_manager.permissionview_model).filter_by( view_menu=schema_view_menu).all()) if self.datamodel.delete(item): self.post_delete(item) for pv in pvs: security_manager.get_session.delete(pv) if view_menu: security_manager.get_session.delete(view_menu) if schema_view_menu: security_manager.get_session.delete(schema_view_menu) security_manager.get_session.commit() flash(*self.datamodel.message) self.update_redirect()
[ "def", "_delete", "(", "self", ",", "pk", ")", ":", "item", "=", "self", ".", "datamodel", ".", "get", "(", "pk", ",", "self", ".", "_base_filters", ")", "if", "not", "item", ":", "abort", "(", "404", ")", "try", ":", "self", ".", "pre_delete", "(", "item", ")", "except", "Exception", "as", "e", ":", "flash", "(", "str", "(", "e", ")", ",", "'danger'", ")", "else", ":", "view_menu", "=", "security_manager", ".", "find_view_menu", "(", "item", ".", "get_perm", "(", ")", ")", "pvs", "=", "security_manager", ".", "get_session", ".", "query", "(", "security_manager", ".", "permissionview_model", ")", ".", "filter_by", "(", "view_menu", "=", "view_menu", ")", ".", "all", "(", ")", "schema_view_menu", "=", "None", "if", "hasattr", "(", "item", ",", "'schema_perm'", ")", ":", "schema_view_menu", "=", "security_manager", ".", "find_view_menu", "(", "item", ".", "schema_perm", ")", "pvs", ".", "extend", "(", "security_manager", ".", "get_session", ".", "query", "(", "security_manager", ".", "permissionview_model", ")", ".", "filter_by", "(", "view_menu", "=", "schema_view_menu", ")", ".", "all", "(", ")", ")", "if", "self", ".", "datamodel", ".", "delete", "(", "item", ")", ":", "self", ".", "post_delete", "(", "item", ")", "for", "pv", "in", "pvs", ":", "security_manager", ".", "get_session", ".", "delete", "(", "pv", ")", "if", "view_menu", ":", "security_manager", ".", "get_session", ".", "delete", "(", "view_menu", ")", "if", "schema_view_menu", ":", "security_manager", ".", "get_session", ".", "delete", "(", "schema_view_menu", ")", "security_manager", ".", "get_session", ".", "commit", "(", ")", "flash", "(", "*", "self", ".", "datamodel", ".", "message", ")", "self", ".", "update_redirect", "(", ")" ]
Delete function logic, override to implement diferent logic deletes the record with primary_key = pk :param pk: record primary key to delete
[ "Delete", "function", "logic", "override", "to", "implement", "diferent", "logic", "deletes", "the", "record", "with", "primary_key", "=", "pk" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/views/base.py#L207-L251
21,398
apache/incubator-superset
superset/views/base.py
SupersetFilter.get_all_permissions
def get_all_permissions(self): """Returns a set of tuples with the perm name and view menu name""" perms = set() for role in self.get_user_roles(): for perm_view in role.permissions: t = (perm_view.permission.name, perm_view.view_menu.name) perms.add(t) return perms
python
def get_all_permissions(self): """Returns a set of tuples with the perm name and view menu name""" perms = set() for role in self.get_user_roles(): for perm_view in role.permissions: t = (perm_view.permission.name, perm_view.view_menu.name) perms.add(t) return perms
[ "def", "get_all_permissions", "(", "self", ")", ":", "perms", "=", "set", "(", ")", "for", "role", "in", "self", ".", "get_user_roles", "(", ")", ":", "for", "perm_view", "in", "role", ".", "permissions", ":", "t", "=", "(", "perm_view", ".", "permission", ".", "name", ",", "perm_view", ".", "view_menu", ".", "name", ")", "perms", ".", "add", "(", "t", ")", "return", "perms" ]
Returns a set of tuples with the perm name and view menu name
[ "Returns", "a", "set", "of", "tuples", "with", "the", "perm", "name", "and", "view", "menu", "name" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/views/base.py#L286-L293
21,399
apache/incubator-superset
superset/views/base.py
SupersetFilter.get_view_menus
def get_view_menus(self, permission_name): """Returns the details of view_menus for a perm name""" vm = set() for perm_name, vm_name in self.get_all_permissions(): if perm_name == permission_name: vm.add(vm_name) return vm
python
def get_view_menus(self, permission_name): """Returns the details of view_menus for a perm name""" vm = set() for perm_name, vm_name in self.get_all_permissions(): if perm_name == permission_name: vm.add(vm_name) return vm
[ "def", "get_view_menus", "(", "self", ",", "permission_name", ")", ":", "vm", "=", "set", "(", ")", "for", "perm_name", ",", "vm_name", "in", "self", ".", "get_all_permissions", "(", ")", ":", "if", "perm_name", "==", "permission_name", ":", "vm", ".", "add", "(", "vm_name", ")", "return", "vm" ]
Returns the details of view_menus for a perm name
[ "Returns", "the", "details", "of", "view_menus", "for", "a", "perm", "name" ]
ca2996c78f679260eb79c6008e276733df5fb653
https://github.com/apache/incubator-superset/blob/ca2996c78f679260eb79c6008e276733df5fb653/superset/views/base.py#L306-L312