id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
7,300
MicroPyramid/django-mfa
django_mfa/utils.py
build_uri
def build_uri(secret, name, initial_count=None, issuer_name=None): """ Returns the provisioning URI for the OTP; works for either TOTP or HOTP. This can then be encoded in a QR Code and used to provision the Google Authenticator app. For module-internal use. See also: http://code.google.com/p/google-authenticator/wiki/KeyUriFormat @param [String] the hotp/totp secret used to generate the URI @param [String] name of the account @param [Integer] initial_count starting counter value, defaults to None. If none, the OTP type will be assumed as TOTP. @param [String] the name of the OTP issuer; this will be the organization title of the OTP entry in Authenticator @return [String] provisioning uri """ # initial_count may be 0 as a valid param is_initial_count_present = (initial_count is not None) otp_type = 'hotp' if is_initial_count_present else 'totp' base = 'otpauth://%s/' % otp_type if issuer_name: issuer_name = quote(issuer_name) base += '%s:' % issuer_name uri = '%(base)s%(name)s?secret=%(secret)s' % { 'name': quote(name, safe='@'), 'secret': secret, 'base': base, } if is_initial_count_present: uri += '&counter=%s' % initial_count if issuer_name: uri += '&issuer=%s' % issuer_name return uri
python
def build_uri(secret, name, initial_count=None, issuer_name=None): """ Returns the provisioning URI for the OTP; works for either TOTP or HOTP. This can then be encoded in a QR Code and used to provision the Google Authenticator app. For module-internal use. See also: http://code.google.com/p/google-authenticator/wiki/KeyUriFormat @param [String] the hotp/totp secret used to generate the URI @param [String] name of the account @param [Integer] initial_count starting counter value, defaults to None. If none, the OTP type will be assumed as TOTP. @param [String] the name of the OTP issuer; this will be the organization title of the OTP entry in Authenticator @return [String] provisioning uri """ # initial_count may be 0 as a valid param is_initial_count_present = (initial_count is not None) otp_type = 'hotp' if is_initial_count_present else 'totp' base = 'otpauth://%s/' % otp_type if issuer_name: issuer_name = quote(issuer_name) base += '%s:' % issuer_name uri = '%(base)s%(name)s?secret=%(secret)s' % { 'name': quote(name, safe='@'), 'secret': secret, 'base': base, } if is_initial_count_present: uri += '&counter=%s' % initial_count if issuer_name: uri += '&issuer=%s' % issuer_name return uri
[ "def", "build_uri", "(", "secret", ",", "name", ",", "initial_count", "=", "None", ",", "issuer_name", "=", "None", ")", ":", "# initial_count may be 0 as a valid param", "is_initial_count_present", "=", "(", "initial_count", "is", "not", "None", ")", "otp_type", "=", "'hotp'", "if", "is_initial_count_present", "else", "'totp'", "base", "=", "'otpauth://%s/'", "%", "otp_type", "if", "issuer_name", ":", "issuer_name", "=", "quote", "(", "issuer_name", ")", "base", "+=", "'%s:'", "%", "issuer_name", "uri", "=", "'%(base)s%(name)s?secret=%(secret)s'", "%", "{", "'name'", ":", "quote", "(", "name", ",", "safe", "=", "'@'", ")", ",", "'secret'", ":", "secret", ",", "'base'", ":", "base", ",", "}", "if", "is_initial_count_present", ":", "uri", "+=", "'&counter=%s'", "%", "initial_count", "if", "issuer_name", ":", "uri", "+=", "'&issuer=%s'", "%", "issuer_name", "return", "uri" ]
Returns the provisioning URI for the OTP; works for either TOTP or HOTP. This can then be encoded in a QR Code and used to provision the Google Authenticator app. For module-internal use. See also: http://code.google.com/p/google-authenticator/wiki/KeyUriFormat @param [String] the hotp/totp secret used to generate the URI @param [String] name of the account @param [Integer] initial_count starting counter value, defaults to None. If none, the OTP type will be assumed as TOTP. @param [String] the name of the OTP issuer; this will be the organization title of the OTP entry in Authenticator @return [String] provisioning uri
[ "Returns", "the", "provisioning", "URI", "for", "the", "OTP", ";", "works", "for", "either", "TOTP", "or", "HOTP", "." ]
7baf16297ffa8b5b4aa0b9961a889a964fcbdb39
https://github.com/MicroPyramid/django-mfa/blob/7baf16297ffa8b5b4aa0b9961a889a964fcbdb39/django_mfa/utils.py#L15-L57
7,301
MicroPyramid/django-mfa
django_mfa/utils.py
strings_equal
def strings_equal(s1, s2): """ Timing-attack resistant string comparison. Normal comparison using == will short-circuit on the first mismatching character. This avoids that by scanning the whole string, though we still reveal to a timing attack whether the strings are the same length. """ try: s1 = unicodedata.normalize('NFKC', str(s1)) s2 = unicodedata.normalize('NFKC', str(s2)) except: s1 = unicodedata.normalize('NFKC', unicode(s1)) s2 = unicodedata.normalize('NFKC', unicode(s2)) return compare_digest(s1, s2)
python
def strings_equal(s1, s2): """ Timing-attack resistant string comparison. Normal comparison using == will short-circuit on the first mismatching character. This avoids that by scanning the whole string, though we still reveal to a timing attack whether the strings are the same length. """ try: s1 = unicodedata.normalize('NFKC', str(s1)) s2 = unicodedata.normalize('NFKC', str(s2)) except: s1 = unicodedata.normalize('NFKC', unicode(s1)) s2 = unicodedata.normalize('NFKC', unicode(s2)) return compare_digest(s1, s2)
[ "def", "strings_equal", "(", "s1", ",", "s2", ")", ":", "try", ":", "s1", "=", "unicodedata", ".", "normalize", "(", "'NFKC'", ",", "str", "(", "s1", ")", ")", "s2", "=", "unicodedata", ".", "normalize", "(", "'NFKC'", ",", "str", "(", "s2", ")", ")", "except", ":", "s1", "=", "unicodedata", ".", "normalize", "(", "'NFKC'", ",", "unicode", "(", "s1", ")", ")", "s2", "=", "unicodedata", ".", "normalize", "(", "'NFKC'", ",", "unicode", "(", "s2", ")", ")", "return", "compare_digest", "(", "s1", ",", "s2", ")" ]
Timing-attack resistant string comparison. Normal comparison using == will short-circuit on the first mismatching character. This avoids that by scanning the whole string, though we still reveal to a timing attack whether the strings are the same length.
[ "Timing", "-", "attack", "resistant", "string", "comparison", "." ]
7baf16297ffa8b5b4aa0b9961a889a964fcbdb39
https://github.com/MicroPyramid/django-mfa/blob/7baf16297ffa8b5b4aa0b9961a889a964fcbdb39/django_mfa/utils.py#L79-L94
7,302
libyal/libbde
setup.py
GetPythonLibraryDirectoryPath
def GetPythonLibraryDirectoryPath(): """Retrieves the Python library directory path.""" path = sysconfig.get_python_lib(True) _, _, path = path.rpartition(sysconfig.PREFIX) if path.startswith(os.sep): path = path[1:] return path
python
def GetPythonLibraryDirectoryPath(): """Retrieves the Python library directory path.""" path = sysconfig.get_python_lib(True) _, _, path = path.rpartition(sysconfig.PREFIX) if path.startswith(os.sep): path = path[1:] return path
[ "def", "GetPythonLibraryDirectoryPath", "(", ")", ":", "path", "=", "sysconfig", ".", "get_python_lib", "(", "True", ")", "_", ",", "_", ",", "path", "=", "path", ".", "rpartition", "(", "sysconfig", ".", "PREFIX", ")", "if", "path", ".", "startswith", "(", "os", ".", "sep", ")", ":", "path", "=", "path", "[", "1", ":", "]", "return", "path" ]
Retrieves the Python library directory path.
[ "Retrieves", "the", "Python", "library", "directory", "path", "." ]
5f59d11dbb52690b4155f2cc3fcb1ac512d076a8
https://github.com/libyal/libbde/blob/5f59d11dbb52690b4155f2cc3fcb1ac512d076a8/setup.py#L267-L275
7,303
libyal/libbde
setup.py
custom_build_ext.run
def run(self): """Runs the build extension.""" compiler = new_compiler(compiler=self.compiler) if compiler.compiler_type == "msvc": self.define = [ ("UNICODE", ""), ] else: command = "sh configure --disable-shared-libs" output = self._RunCommand(command) print_line = False for line in output.split("\n"): line = line.rstrip() if line == "configure:": print_line = True if print_line: print(line) self.define = [ ("HAVE_CONFIG_H", ""), ("LOCALEDIR", "\"/usr/share/locale\""), ] build_ext.run(self)
python
def run(self): """Runs the build extension.""" compiler = new_compiler(compiler=self.compiler) if compiler.compiler_type == "msvc": self.define = [ ("UNICODE", ""), ] else: command = "sh configure --disable-shared-libs" output = self._RunCommand(command) print_line = False for line in output.split("\n"): line = line.rstrip() if line == "configure:": print_line = True if print_line: print(line) self.define = [ ("HAVE_CONFIG_H", ""), ("LOCALEDIR", "\"/usr/share/locale\""), ] build_ext.run(self)
[ "def", "run", "(", "self", ")", ":", "compiler", "=", "new_compiler", "(", "compiler", "=", "self", ".", "compiler", ")", "if", "compiler", ".", "compiler_type", "==", "\"msvc\"", ":", "self", ".", "define", "=", "[", "(", "\"UNICODE\"", ",", "\"\"", ")", ",", "]", "else", ":", "command", "=", "\"sh configure --disable-shared-libs\"", "output", "=", "self", ".", "_RunCommand", "(", "command", ")", "print_line", "=", "False", "for", "line", "in", "output", ".", "split", "(", "\"\\n\"", ")", ":", "line", "=", "line", ".", "rstrip", "(", ")", "if", "line", "==", "\"configure:\"", ":", "print_line", "=", "True", "if", "print_line", ":", "print", "(", "line", ")", "self", ".", "define", "=", "[", "(", "\"HAVE_CONFIG_H\"", ",", "\"\"", ")", ",", "(", "\"LOCALEDIR\"", ",", "\"\\\"/usr/share/locale\\\"\"", ")", ",", "]", "build_ext", ".", "run", "(", "self", ")" ]
Runs the build extension.
[ "Runs", "the", "build", "extension", "." ]
5f59d11dbb52690b4155f2cc3fcb1ac512d076a8
https://github.com/libyal/libbde/blob/5f59d11dbb52690b4155f2cc3fcb1ac512d076a8/setup.py#L82-L108
7,304
libyal/libbde
setup.py
ProjectInformation._ReadConfigureAc
def _ReadConfigureAc(self): """Reads configure.ac to initialize the project information.""" file_object = open("configure.ac", "rb") if not file_object: raise IOError("Unable to open: configure.ac") found_ac_init = False found_library_name = False for line in file_object.readlines(): line = line.strip() if found_library_name: library_version = line[1:-2] if sys.version_info[0] >= 3: library_version = library_version.decode("ascii") self.library_version = library_version break elif found_ac_init: library_name = line[1:-2] if sys.version_info[0] >= 3: library_name = library_name.decode("ascii") self.library_name = library_name found_library_name = True elif line.startswith(b"AC_INIT"): found_ac_init = True file_object.close() if not self.library_name or not self.library_version: raise RuntimeError( "Unable to find library name and version in: configure.ac")
python
def _ReadConfigureAc(self): """Reads configure.ac to initialize the project information.""" file_object = open("configure.ac", "rb") if not file_object: raise IOError("Unable to open: configure.ac") found_ac_init = False found_library_name = False for line in file_object.readlines(): line = line.strip() if found_library_name: library_version = line[1:-2] if sys.version_info[0] >= 3: library_version = library_version.decode("ascii") self.library_version = library_version break elif found_ac_init: library_name = line[1:-2] if sys.version_info[0] >= 3: library_name = library_name.decode("ascii") self.library_name = library_name found_library_name = True elif line.startswith(b"AC_INIT"): found_ac_init = True file_object.close() if not self.library_name or not self.library_version: raise RuntimeError( "Unable to find library name and version in: configure.ac")
[ "def", "_ReadConfigureAc", "(", "self", ")", ":", "file_object", "=", "open", "(", "\"configure.ac\"", ",", "\"rb\"", ")", "if", "not", "file_object", ":", "raise", "IOError", "(", "\"Unable to open: configure.ac\"", ")", "found_ac_init", "=", "False", "found_library_name", "=", "False", "for", "line", "in", "file_object", ".", "readlines", "(", ")", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "found_library_name", ":", "library_version", "=", "line", "[", "1", ":", "-", "2", "]", "if", "sys", ".", "version_info", "[", "0", "]", ">=", "3", ":", "library_version", "=", "library_version", ".", "decode", "(", "\"ascii\"", ")", "self", ".", "library_version", "=", "library_version", "break", "elif", "found_ac_init", ":", "library_name", "=", "line", "[", "1", ":", "-", "2", "]", "if", "sys", ".", "version_info", "[", "0", "]", ">=", "3", ":", "library_name", "=", "library_name", ".", "decode", "(", "\"ascii\"", ")", "self", ".", "library_name", "=", "library_name", "found_library_name", "=", "True", "elif", "line", ".", "startswith", "(", "b\"AC_INIT\"", ")", ":", "found_ac_init", "=", "True", "file_object", ".", "close", "(", ")", "if", "not", "self", ".", "library_name", "or", "not", "self", ".", "library_version", ":", "raise", "RuntimeError", "(", "\"Unable to find library name and version in: configure.ac\"", ")" ]
Reads configure.ac to initialize the project information.
[ "Reads", "configure", ".", "ac", "to", "initialize", "the", "project", "information", "." ]
5f59d11dbb52690b4155f2cc3fcb1ac512d076a8
https://github.com/libyal/libbde/blob/5f59d11dbb52690b4155f2cc3fcb1ac512d076a8/setup.py#L198-L229
7,305
libyal/libbde
setup.py
ProjectInformation._ReadMakefileAm
def _ReadMakefileAm(self): """Reads Makefile.am to initialize the project information.""" if not self.library_name: raise RuntimeError("Missing library name") file_object = open("Makefile.am", "rb") if not file_object: raise IOError("Unable to open: Makefile.am") found_subdirs = False for line in file_object.readlines(): line = line.strip() if found_subdirs: library_name, _, _ = line.partition(b" ") if sys.version_info[0] >= 3: library_name = library_name.decode("ascii") self.include_directories.append(library_name) if library_name.startswith("lib"): self.library_names.append(library_name) if library_name == self.library_name: break elif line.startswith(b"SUBDIRS"): found_subdirs = True file_object.close() if not self.include_directories or not self.library_names: raise RuntimeError( "Unable to find include directories and library names in: " "Makefile.am")
python
def _ReadMakefileAm(self): """Reads Makefile.am to initialize the project information.""" if not self.library_name: raise RuntimeError("Missing library name") file_object = open("Makefile.am", "rb") if not file_object: raise IOError("Unable to open: Makefile.am") found_subdirs = False for line in file_object.readlines(): line = line.strip() if found_subdirs: library_name, _, _ = line.partition(b" ") if sys.version_info[0] >= 3: library_name = library_name.decode("ascii") self.include_directories.append(library_name) if library_name.startswith("lib"): self.library_names.append(library_name) if library_name == self.library_name: break elif line.startswith(b"SUBDIRS"): found_subdirs = True file_object.close() if not self.include_directories or not self.library_names: raise RuntimeError( "Unable to find include directories and library names in: " "Makefile.am")
[ "def", "_ReadMakefileAm", "(", "self", ")", ":", "if", "not", "self", ".", "library_name", ":", "raise", "RuntimeError", "(", "\"Missing library name\"", ")", "file_object", "=", "open", "(", "\"Makefile.am\"", ",", "\"rb\"", ")", "if", "not", "file_object", ":", "raise", "IOError", "(", "\"Unable to open: Makefile.am\"", ")", "found_subdirs", "=", "False", "for", "line", "in", "file_object", ".", "readlines", "(", ")", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "found_subdirs", ":", "library_name", ",", "_", ",", "_", "=", "line", ".", "partition", "(", "b\" \"", ")", "if", "sys", ".", "version_info", "[", "0", "]", ">=", "3", ":", "library_name", "=", "library_name", ".", "decode", "(", "\"ascii\"", ")", "self", ".", "include_directories", ".", "append", "(", "library_name", ")", "if", "library_name", ".", "startswith", "(", "\"lib\"", ")", ":", "self", ".", "library_names", ".", "append", "(", "library_name", ")", "if", "library_name", "==", "self", ".", "library_name", ":", "break", "elif", "line", ".", "startswith", "(", "b\"SUBDIRS\"", ")", ":", "found_subdirs", "=", "True", "file_object", ".", "close", "(", ")", "if", "not", "self", ".", "include_directories", "or", "not", "self", ".", "library_names", ":", "raise", "RuntimeError", "(", "\"Unable to find include directories and library names in: \"", "\"Makefile.am\"", ")" ]
Reads Makefile.am to initialize the project information.
[ "Reads", "Makefile", ".", "am", "to", "initialize", "the", "project", "information", "." ]
5f59d11dbb52690b4155f2cc3fcb1ac512d076a8
https://github.com/libyal/libbde/blob/5f59d11dbb52690b4155f2cc3fcb1ac512d076a8/setup.py#L231-L264
7,306
amol-/dukpy
dukpy/babel.py
babel_compile
def babel_compile(source, **kwargs): """Compiles the given ``source`` from ES6 to ES5 using Babeljs""" presets = kwargs.get('presets') if not presets: kwargs['presets'] = ["es2015"] with open(BABEL_COMPILER, 'rb') as babel_js: return evaljs( (babel_js.read().decode('utf-8'), 'var bres, res;' 'bres = Babel.transform(dukpy.es6code, dukpy.babel_options);', 'res = {map: bres.map, code: bres.code};'), es6code=source, babel_options=kwargs )
python
def babel_compile(source, **kwargs): """Compiles the given ``source`` from ES6 to ES5 using Babeljs""" presets = kwargs.get('presets') if not presets: kwargs['presets'] = ["es2015"] with open(BABEL_COMPILER, 'rb') as babel_js: return evaljs( (babel_js.read().decode('utf-8'), 'var bres, res;' 'bres = Babel.transform(dukpy.es6code, dukpy.babel_options);', 'res = {map: bres.map, code: bres.code};'), es6code=source, babel_options=kwargs )
[ "def", "babel_compile", "(", "source", ",", "*", "*", "kwargs", ")", ":", "presets", "=", "kwargs", ".", "get", "(", "'presets'", ")", "if", "not", "presets", ":", "kwargs", "[", "'presets'", "]", "=", "[", "\"es2015\"", "]", "with", "open", "(", "BABEL_COMPILER", ",", "'rb'", ")", "as", "babel_js", ":", "return", "evaljs", "(", "(", "babel_js", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", ",", "'var bres, res;'", "'bres = Babel.transform(dukpy.es6code, dukpy.babel_options);'", ",", "'res = {map: bres.map, code: bres.code};'", ")", ",", "es6code", "=", "source", ",", "babel_options", "=", "kwargs", ")" ]
Compiles the given ``source`` from ES6 to ES5 using Babeljs
[ "Compiles", "the", "given", "source", "from", "ES6", "to", "ES5", "using", "Babeljs" ]
69f56f375a217c9f907499c28dbc964af76feae6
https://github.com/amol-/dukpy/blob/69f56f375a217c9f907499c28dbc964af76feae6/dukpy/babel.py#L7-L20
7,307
amol-/dukpy
dukpy/coffee.py
coffee_compile
def coffee_compile(source): """Compiles the given ``source`` from CoffeeScript to JavaScript""" with open(COFFEE_COMPILER, 'rb') as coffeescript_js: return evaljs( (coffeescript_js.read().decode('utf-8'), 'CoffeeScript.compile(dukpy.coffeecode)'), coffeecode=source )
python
def coffee_compile(source): """Compiles the given ``source`` from CoffeeScript to JavaScript""" with open(COFFEE_COMPILER, 'rb') as coffeescript_js: return evaljs( (coffeescript_js.read().decode('utf-8'), 'CoffeeScript.compile(dukpy.coffeecode)'), coffeecode=source )
[ "def", "coffee_compile", "(", "source", ")", ":", "with", "open", "(", "COFFEE_COMPILER", ",", "'rb'", ")", "as", "coffeescript_js", ":", "return", "evaljs", "(", "(", "coffeescript_js", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", ",", "'CoffeeScript.compile(dukpy.coffeecode)'", ")", ",", "coffeecode", "=", "source", ")" ]
Compiles the given ``source`` from CoffeeScript to JavaScript
[ "Compiles", "the", "given", "source", "from", "CoffeeScript", "to", "JavaScript" ]
69f56f375a217c9f907499c28dbc964af76feae6
https://github.com/amol-/dukpy/blob/69f56f375a217c9f907499c28dbc964af76feae6/dukpy/coffee.py#L7-L14
7,308
amol-/dukpy
dukpy/module_loader.py
JSModuleLoader.register_path
def register_path(self, path): """Registers a directory where to look for modules. By default only modules relative to current path are found. """ self._paths.insert(0, os.path.abspath(path))
python
def register_path(self, path): """Registers a directory where to look for modules. By default only modules relative to current path are found. """ self._paths.insert(0, os.path.abspath(path))
[ "def", "register_path", "(", "self", ",", "path", ")", ":", "self", ".", "_paths", ".", "insert", "(", "0", ",", "os", ".", "path", ".", "abspath", "(", "path", ")", ")" ]
Registers a directory where to look for modules. By default only modules relative to current path are found.
[ "Registers", "a", "directory", "where", "to", "look", "for", "modules", "." ]
69f56f375a217c9f907499c28dbc964af76feae6
https://github.com/amol-/dukpy/blob/69f56f375a217c9f907499c28dbc964af76feae6/dukpy/module_loader.py#L20-L25
7,309
amol-/dukpy
dukpy/module_loader.py
JSModuleLoader.lookup
def lookup(self, module_name): """Searches for a file providing given module. Returns the normalized module id and path of the file. """ for search_path in self._paths: module_path = os.path.join(search_path, module_name) new_module_name, module_file = self._lookup(module_path, module_name) if module_file: return new_module_name, module_file return None, None
python
def lookup(self, module_name): """Searches for a file providing given module. Returns the normalized module id and path of the file. """ for search_path in self._paths: module_path = os.path.join(search_path, module_name) new_module_name, module_file = self._lookup(module_path, module_name) if module_file: return new_module_name, module_file return None, None
[ "def", "lookup", "(", "self", ",", "module_name", ")", ":", "for", "search_path", "in", "self", ".", "_paths", ":", "module_path", "=", "os", ".", "path", ".", "join", "(", "search_path", ",", "module_name", ")", "new_module_name", ",", "module_file", "=", "self", ".", "_lookup", "(", "module_path", ",", "module_name", ")", "if", "module_file", ":", "return", "new_module_name", ",", "module_file", "return", "None", ",", "None" ]
Searches for a file providing given module. Returns the normalized module id and path of the file.
[ "Searches", "for", "a", "file", "providing", "given", "module", "." ]
69f56f375a217c9f907499c28dbc964af76feae6
https://github.com/amol-/dukpy/blob/69f56f375a217c9f907499c28dbc964af76feae6/dukpy/module_loader.py#L27-L37
7,310
amol-/dukpy
dukpy/module_loader.py
JSModuleLoader.load
def load(self, module_name): """Returns source code and normalized module id of the given module. Only supports source code files encoded as UTF-8 """ module_name, path = self.lookup(module_name) if path: with open(path, 'rb') as f: return module_name, f.read().decode('utf-8') return None, None
python
def load(self, module_name): """Returns source code and normalized module id of the given module. Only supports source code files encoded as UTF-8 """ module_name, path = self.lookup(module_name) if path: with open(path, 'rb') as f: return module_name, f.read().decode('utf-8') return None, None
[ "def", "load", "(", "self", ",", "module_name", ")", ":", "module_name", ",", "path", "=", "self", ".", "lookup", "(", "module_name", ")", "if", "path", ":", "with", "open", "(", "path", ",", "'rb'", ")", "as", "f", ":", "return", "module_name", ",", "f", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", "return", "None", ",", "None" ]
Returns source code and normalized module id of the given module. Only supports source code files encoded as UTF-8
[ "Returns", "source", "code", "and", "normalized", "module", "id", "of", "the", "given", "module", "." ]
69f56f375a217c9f907499c28dbc964af76feae6
https://github.com/amol-/dukpy/blob/69f56f375a217c9f907499c28dbc964af76feae6/dukpy/module_loader.py#L39-L48
7,311
amol-/dukpy
dukpy/lessc.py
less_compile
def less_compile(source, options=None): """Compiles the given ``source`` from LESS to CSS""" options = options or {} res = NodeLikeInterpreter().evaljs( ('var result = null;' 'var less = require("less/less-node");', 'less.render(dukpy.lesscode, dukpy.lessoptions, function(error, output) {' ' result = {"error": error, "output": output};' '});' 'result;'), lesscode=source, lessoptions=options ) if not res: raise RuntimeError('Results or errors unavailable') if res.get('error'): raise LessCompilerError(res['error']['message']) return res['output']['css']
python
def less_compile(source, options=None): """Compiles the given ``source`` from LESS to CSS""" options = options or {} res = NodeLikeInterpreter().evaljs( ('var result = null;' 'var less = require("less/less-node");', 'less.render(dukpy.lesscode, dukpy.lessoptions, function(error, output) {' ' result = {"error": error, "output": output};' '});' 'result;'), lesscode=source, lessoptions=options ) if not res: raise RuntimeError('Results or errors unavailable') if res.get('error'): raise LessCompilerError(res['error']['message']) return res['output']['css']
[ "def", "less_compile", "(", "source", ",", "options", "=", "None", ")", ":", "options", "=", "options", "or", "{", "}", "res", "=", "NodeLikeInterpreter", "(", ")", ".", "evaljs", "(", "(", "'var result = null;'", "'var less = require(\"less/less-node\");'", ",", "'less.render(dukpy.lesscode, dukpy.lessoptions, function(error, output) {'", "' result = {\"error\": error, \"output\": output};'", "'});'", "'result;'", ")", ",", "lesscode", "=", "source", ",", "lessoptions", "=", "options", ")", "if", "not", "res", ":", "raise", "RuntimeError", "(", "'Results or errors unavailable'", ")", "if", "res", ".", "get", "(", "'error'", ")", ":", "raise", "LessCompilerError", "(", "res", "[", "'error'", "]", "[", "'message'", "]", ")", "return", "res", "[", "'output'", "]", "[", "'css'", "]" ]
Compiles the given ``source`` from LESS to CSS
[ "Compiles", "the", "given", "source", "from", "LESS", "to", "CSS" ]
69f56f375a217c9f907499c28dbc964af76feae6
https://github.com/amol-/dukpy/blob/69f56f375a217c9f907499c28dbc964af76feae6/dukpy/lessc.py#L4-L23
7,312
amol-/dukpy
dukpy/install.py
install_jspackage
def install_jspackage(package_name, version, modulesdir): """Installs a JavaScript package downloaded from npmjs.org. For example to install React:: install_jspackage('react', '0.14.8', './node_modules') To install last version provide `None` as the version. """ if not version: version = '' requirements = _resolve_dependencies(package_name, version) print('Packages going to be installed: {0}'.format(', '.join( '{0}->{1}'.format(*i) for i in requirements ))) downloads = {} for dependency_name, _, version_info in requirements: try: downloads[dependency_name] = version_info['dist']['tarball'] except KeyError: raise JSPackageInstallError('Unable to detect a supported download url for package', error_code=3) for dependency_name, download_url in downloads.items(): tarball = BytesIO() print('Fetching {0}'.format(download_url), end='') with closing(urlopen(download_url)) as data: chunk = data.read(1024) while chunk: print('.', end='') tarball.write(chunk) chunk = data.read(1024) print('') tarball.seek(0) with closing(tarfile.open(fileobj=tarball)) as tb: dest = os.path.join(modulesdir, dependency_name) tmpdir = tempfile.mkdtemp() try: tb.extractall(tmpdir) shutil.rmtree(os.path.abspath(dest), ignore_errors=True) shutil.move(os.path.join(tmpdir, 'package'), os.path.abspath(dest)) finally: shutil.rmtree(tmpdir) print('Installing {0} in {1} Done!'.format(package_name, modulesdir))
python
def install_jspackage(package_name, version, modulesdir): """Installs a JavaScript package downloaded from npmjs.org. For example to install React:: install_jspackage('react', '0.14.8', './node_modules') To install last version provide `None` as the version. """ if not version: version = '' requirements = _resolve_dependencies(package_name, version) print('Packages going to be installed: {0}'.format(', '.join( '{0}->{1}'.format(*i) for i in requirements ))) downloads = {} for dependency_name, _, version_info in requirements: try: downloads[dependency_name] = version_info['dist']['tarball'] except KeyError: raise JSPackageInstallError('Unable to detect a supported download url for package', error_code=3) for dependency_name, download_url in downloads.items(): tarball = BytesIO() print('Fetching {0}'.format(download_url), end='') with closing(urlopen(download_url)) as data: chunk = data.read(1024) while chunk: print('.', end='') tarball.write(chunk) chunk = data.read(1024) print('') tarball.seek(0) with closing(tarfile.open(fileobj=tarball)) as tb: dest = os.path.join(modulesdir, dependency_name) tmpdir = tempfile.mkdtemp() try: tb.extractall(tmpdir) shutil.rmtree(os.path.abspath(dest), ignore_errors=True) shutil.move(os.path.join(tmpdir, 'package'), os.path.abspath(dest)) finally: shutil.rmtree(tmpdir) print('Installing {0} in {1} Done!'.format(package_name, modulesdir))
[ "def", "install_jspackage", "(", "package_name", ",", "version", ",", "modulesdir", ")", ":", "if", "not", "version", ":", "version", "=", "''", "requirements", "=", "_resolve_dependencies", "(", "package_name", ",", "version", ")", "print", "(", "'Packages going to be installed: {0}'", ".", "format", "(", "', '", ".", "join", "(", "'{0}->{1}'", ".", "format", "(", "*", "i", ")", "for", "i", "in", "requirements", ")", ")", ")", "downloads", "=", "{", "}", "for", "dependency_name", ",", "_", ",", "version_info", "in", "requirements", ":", "try", ":", "downloads", "[", "dependency_name", "]", "=", "version_info", "[", "'dist'", "]", "[", "'tarball'", "]", "except", "KeyError", ":", "raise", "JSPackageInstallError", "(", "'Unable to detect a supported download url for package'", ",", "error_code", "=", "3", ")", "for", "dependency_name", ",", "download_url", "in", "downloads", ".", "items", "(", ")", ":", "tarball", "=", "BytesIO", "(", ")", "print", "(", "'Fetching {0}'", ".", "format", "(", "download_url", ")", ",", "end", "=", "''", ")", "with", "closing", "(", "urlopen", "(", "download_url", ")", ")", "as", "data", ":", "chunk", "=", "data", ".", "read", "(", "1024", ")", "while", "chunk", ":", "print", "(", "'.'", ",", "end", "=", "''", ")", "tarball", ".", "write", "(", "chunk", ")", "chunk", "=", "data", ".", "read", "(", "1024", ")", "print", "(", "''", ")", "tarball", ".", "seek", "(", "0", ")", "with", "closing", "(", "tarfile", ".", "open", "(", "fileobj", "=", "tarball", ")", ")", "as", "tb", ":", "dest", "=", "os", ".", "path", ".", "join", "(", "modulesdir", ",", "dependency_name", ")", "tmpdir", "=", "tempfile", ".", "mkdtemp", "(", ")", "try", ":", "tb", ".", "extractall", "(", "tmpdir", ")", "shutil", ".", "rmtree", "(", "os", ".", "path", ".", "abspath", "(", "dest", ")", ",", "ignore_errors", "=", "True", ")", "shutil", ".", "move", "(", "os", ".", "path", ".", "join", "(", "tmpdir", ",", "'package'", ")", ",", "os", ".", "path", ".", "abspath", "(", "dest", ")", ")", "finally", ":", "shutil", ".", "rmtree", "(", "tmpdir", ")", "print", "(", "'Installing {0} in {1} Done!'", ".", "format", "(", "package_name", ",", "modulesdir", ")", ")" ]
Installs a JavaScript package downloaded from npmjs.org. For example to install React:: install_jspackage('react', '0.14.8', './node_modules') To install last version provide `None` as the version.
[ "Installs", "a", "JavaScript", "package", "downloaded", "from", "npmjs", ".", "org", "." ]
69f56f375a217c9f907499c28dbc964af76feae6
https://github.com/amol-/dukpy/blob/69f56f375a217c9f907499c28dbc964af76feae6/dukpy/install.py#L39-L87
7,313
amol-/dukpy
dukpy/evaljs.py
JSInterpreter.evaljs
def evaljs(self, code, **kwargs): """Runs JavaScript code in the context of the interpreter. All arguments will be converted to plain javascript objects through the JSON encoder and will be available in `dukpy` global object. Returns the last object on javascript stack. """ jsvars = json.dumps(kwargs) jscode = self._adapt_code(code) if not isinstance(jscode, bytes): jscode = jscode.encode('utf-8') if not isinstance(jsvars, bytes): jsvars = jsvars.encode('utf-8') res = _dukpy.eval_string(self, jscode, jsvars) if res is None: return None return json.loads(res.decode('utf-8'))
python
def evaljs(self, code, **kwargs): """Runs JavaScript code in the context of the interpreter. All arguments will be converted to plain javascript objects through the JSON encoder and will be available in `dukpy` global object. Returns the last object on javascript stack. """ jsvars = json.dumps(kwargs) jscode = self._adapt_code(code) if not isinstance(jscode, bytes): jscode = jscode.encode('utf-8') if not isinstance(jsvars, bytes): jsvars = jsvars.encode('utf-8') res = _dukpy.eval_string(self, jscode, jsvars) if res is None: return None return json.loads(res.decode('utf-8'))
[ "def", "evaljs", "(", "self", ",", "code", ",", "*", "*", "kwargs", ")", ":", "jsvars", "=", "json", ".", "dumps", "(", "kwargs", ")", "jscode", "=", "self", ".", "_adapt_code", "(", "code", ")", "if", "not", "isinstance", "(", "jscode", ",", "bytes", ")", ":", "jscode", "=", "jscode", ".", "encode", "(", "'utf-8'", ")", "if", "not", "isinstance", "(", "jsvars", ",", "bytes", ")", ":", "jsvars", "=", "jsvars", ".", "encode", "(", "'utf-8'", ")", "res", "=", "_dukpy", ".", "eval_string", "(", "self", ",", "jscode", ",", "jsvars", ")", "if", "res", "is", "None", ":", "return", "None", "return", "json", ".", "loads", "(", "res", ".", "decode", "(", "'utf-8'", ")", ")" ]
Runs JavaScript code in the context of the interpreter. All arguments will be converted to plain javascript objects through the JSON encoder and will be available in `dukpy` global object. Returns the last object on javascript stack.
[ "Runs", "JavaScript", "code", "in", "the", "context", "of", "the", "interpreter", "." ]
69f56f375a217c9f907499c28dbc964af76feae6
https://github.com/amol-/dukpy/blob/69f56f375a217c9f907499c28dbc964af76feae6/dukpy/evaljs.py#L39-L61
7,314
amol-/dukpy
dukpy/tsc.py
typescript_compile
def typescript_compile(source): """Compiles the given ``source`` from TypeScript to ES5 using TypescriptServices.js""" with open(TS_COMPILER, 'r') as tsservices_js: return evaljs( (tsservices_js.read(), 'ts.transpile(dukpy.tscode, {options});'.format(options=TSC_OPTIONS)), tscode=source )
python
def typescript_compile(source): """Compiles the given ``source`` from TypeScript to ES5 using TypescriptServices.js""" with open(TS_COMPILER, 'r') as tsservices_js: return evaljs( (tsservices_js.read(), 'ts.transpile(dukpy.tscode, {options});'.format(options=TSC_OPTIONS)), tscode=source )
[ "def", "typescript_compile", "(", "source", ")", ":", "with", "open", "(", "TS_COMPILER", ",", "'r'", ")", "as", "tsservices_js", ":", "return", "evaljs", "(", "(", "tsservices_js", ".", "read", "(", ")", ",", "'ts.transpile(dukpy.tscode, {options});'", ".", "format", "(", "options", "=", "TSC_OPTIONS", ")", ")", ",", "tscode", "=", "source", ")" ]
Compiles the given ``source`` from TypeScript to ES5 using TypescriptServices.js
[ "Compiles", "the", "given", "source", "from", "TypeScript", "to", "ES5", "using", "TypescriptServices", ".", "js" ]
69f56f375a217c9f907499c28dbc964af76feae6
https://github.com/amol-/dukpy/blob/69f56f375a217c9f907499c28dbc964af76feae6/dukpy/tsc.py#L8-L15
7,315
edoburu/django-private-storage
private_storage/views.py
PrivateStorageView.get_private_file
def get_private_file(self): """ Return all relevant data in a single object, so this is easy to extend and server implementations can pick what they need. """ return PrivateFile( request=self.request, storage=self.get_storage(), relative_name=self.get_path() )
python
def get_private_file(self): """ Return all relevant data in a single object, so this is easy to extend and server implementations can pick what they need. """ return PrivateFile( request=self.request, storage=self.get_storage(), relative_name=self.get_path() )
[ "def", "get_private_file", "(", "self", ")", ":", "return", "PrivateFile", "(", "request", "=", "self", ".", "request", ",", "storage", "=", "self", ".", "get_storage", "(", ")", ",", "relative_name", "=", "self", ".", "get_path", "(", ")", ")" ]
Return all relevant data in a single object, so this is easy to extend and server implementations can pick what they need.
[ "Return", "all", "relevant", "data", "in", "a", "single", "object", "so", "this", "is", "easy", "to", "extend", "and", "server", "implementations", "can", "pick", "what", "they", "need", "." ]
35b718024fee75b0ed3400f601976b20246c7d05
https://github.com/edoburu/django-private-storage/blob/35b718024fee75b0ed3400f601976b20246c7d05/private_storage/views.py#L55-L64
7,316
edoburu/django-private-storage
private_storage/views.py
PrivateStorageView.get
def get(self, request, *args, **kwargs): """ Handle incoming GET requests """ private_file = self.get_private_file() if not self.can_access_file(private_file): return HttpResponseForbidden('Private storage access denied') if not private_file.exists(): return self.serve_file_not_found(private_file) else: return self.serve_file(private_file)
python
def get(self, request, *args, **kwargs): """ Handle incoming GET requests """ private_file = self.get_private_file() if not self.can_access_file(private_file): return HttpResponseForbidden('Private storage access denied') if not private_file.exists(): return self.serve_file_not_found(private_file) else: return self.serve_file(private_file)
[ "def", "get", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "private_file", "=", "self", ".", "get_private_file", "(", ")", "if", "not", "self", ".", "can_access_file", "(", "private_file", ")", ":", "return", "HttpResponseForbidden", "(", "'Private storage access denied'", ")", "if", "not", "private_file", ".", "exists", "(", ")", ":", "return", "self", ".", "serve_file_not_found", "(", "private_file", ")", "else", ":", "return", "self", ".", "serve_file", "(", "private_file", ")" ]
Handle incoming GET requests
[ "Handle", "incoming", "GET", "requests" ]
35b718024fee75b0ed3400f601976b20246c7d05
https://github.com/edoburu/django-private-storage/blob/35b718024fee75b0ed3400f601976b20246c7d05/private_storage/views.py#L66-L78
7,317
edoburu/django-private-storage
private_storage/views.py
PrivateStorageView.serve_file
def serve_file(self, private_file): """ Serve the file that was retrieved from the storage. The relative path can be found with ``private_file.relative_name``. :type private_file: :class:`private_storage.models.PrivateFile` :rtype: django.http.HttpResponse """ response = self.server_class().serve(private_file) if self.content_disposition: # Join syntax works in all Python versions. Python 3 doesn't support b'..'.format(), # and % formatting was added for bytes in 3.5: https://bugs.python.org/issue3982 filename = self.get_content_disposition_filename(private_file) response['Content-Disposition'] = b'; '.join([ self.content_disposition.encode(), self._encode_filename_header(filename) ]) return response
python
def serve_file(self, private_file): """ Serve the file that was retrieved from the storage. The relative path can be found with ``private_file.relative_name``. :type private_file: :class:`private_storage.models.PrivateFile` :rtype: django.http.HttpResponse """ response = self.server_class().serve(private_file) if self.content_disposition: # Join syntax works in all Python versions. Python 3 doesn't support b'..'.format(), # and % formatting was added for bytes in 3.5: https://bugs.python.org/issue3982 filename = self.get_content_disposition_filename(private_file) response['Content-Disposition'] = b'; '.join([ self.content_disposition.encode(), self._encode_filename_header(filename) ]) return response
[ "def", "serve_file", "(", "self", ",", "private_file", ")", ":", "response", "=", "self", ".", "server_class", "(", ")", ".", "serve", "(", "private_file", ")", "if", "self", ".", "content_disposition", ":", "# Join syntax works in all Python versions. Python 3 doesn't support b'..'.format(),", "# and % formatting was added for bytes in 3.5: https://bugs.python.org/issue3982", "filename", "=", "self", ".", "get_content_disposition_filename", "(", "private_file", ")", "response", "[", "'Content-Disposition'", "]", "=", "b'; '", ".", "join", "(", "[", "self", ".", "content_disposition", ".", "encode", "(", ")", ",", "self", ".", "_encode_filename_header", "(", "filename", ")", "]", ")", "return", "response" ]
Serve the file that was retrieved from the storage. The relative path can be found with ``private_file.relative_name``. :type private_file: :class:`private_storage.models.PrivateFile` :rtype: django.http.HttpResponse
[ "Serve", "the", "file", "that", "was", "retrieved", "from", "the", "storage", ".", "The", "relative", "path", "can", "be", "found", "with", "private_file", ".", "relative_name", "." ]
35b718024fee75b0ed3400f601976b20246c7d05
https://github.com/edoburu/django-private-storage/blob/35b718024fee75b0ed3400f601976b20246c7d05/private_storage/views.py#L94-L112
7,318
edoburu/django-private-storage
private_storage/views.py
PrivateStorageView.get_content_disposition_filename
def get_content_disposition_filename(self, private_file): """ Return the filename in the download header. """ return self.content_disposition_filename or os.path.basename(private_file.relative_name)
python
def get_content_disposition_filename(self, private_file): """ Return the filename in the download header. """ return self.content_disposition_filename or os.path.basename(private_file.relative_name)
[ "def", "get_content_disposition_filename", "(", "self", ",", "private_file", ")", ":", "return", "self", ".", "content_disposition_filename", "or", "os", ".", "path", ".", "basename", "(", "private_file", ".", "relative_name", ")" ]
Return the filename in the download header.
[ "Return", "the", "filename", "in", "the", "download", "header", "." ]
35b718024fee75b0ed3400f601976b20246c7d05
https://github.com/edoburu/django-private-storage/blob/35b718024fee75b0ed3400f601976b20246c7d05/private_storage/views.py#L114-L118
7,319
edoburu/django-private-storage
private_storage/views.py
PrivateStorageView._encode_filename_header
def _encode_filename_header(self, filename): """ The filename, encoded to use in a ``Content-Disposition`` header. """ # Based on https://www.djangosnippets.org/snippets/1710/ user_agent = self.request.META.get('HTTP_USER_AGENT', None) if 'WebKit' in user_agent: # Support available for UTF-8 encoded strings. # This also matches Edgee. return u'filename={}'.format(filename).encode("utf-8") elif 'MSIE' in user_agent: # IE does not support RFC2231 for internationalized headers, but somehow # percent-decodes it so this can be used instead. Note that using the word # "attachment" anywhere in the filename overrides an inline content-disposition. url_encoded = quote(filename.encode("utf-8")).replace('attachment', "a%74tachment") return "filename={}".format(url_encoded).encode("utf-8") else: # For others like Firefox, we follow RFC2231 (encoding extension in HTTP headers). rfc2231_filename = quote(filename.encode("utf-8")) return "filename*=UTF-8''{}".format(rfc2231_filename).encode("utf-8")
python
def _encode_filename_header(self, filename): """ The filename, encoded to use in a ``Content-Disposition`` header. """ # Based on https://www.djangosnippets.org/snippets/1710/ user_agent = self.request.META.get('HTTP_USER_AGENT', None) if 'WebKit' in user_agent: # Support available for UTF-8 encoded strings. # This also matches Edgee. return u'filename={}'.format(filename).encode("utf-8") elif 'MSIE' in user_agent: # IE does not support RFC2231 for internationalized headers, but somehow # percent-decodes it so this can be used instead. Note that using the word # "attachment" anywhere in the filename overrides an inline content-disposition. url_encoded = quote(filename.encode("utf-8")).replace('attachment', "a%74tachment") return "filename={}".format(url_encoded).encode("utf-8") else: # For others like Firefox, we follow RFC2231 (encoding extension in HTTP headers). rfc2231_filename = quote(filename.encode("utf-8")) return "filename*=UTF-8''{}".format(rfc2231_filename).encode("utf-8")
[ "def", "_encode_filename_header", "(", "self", ",", "filename", ")", ":", "# Based on https://www.djangosnippets.org/snippets/1710/", "user_agent", "=", "self", ".", "request", ".", "META", ".", "get", "(", "'HTTP_USER_AGENT'", ",", "None", ")", "if", "'WebKit'", "in", "user_agent", ":", "# Support available for UTF-8 encoded strings.", "# This also matches Edgee.", "return", "u'filename={}'", ".", "format", "(", "filename", ")", ".", "encode", "(", "\"utf-8\"", ")", "elif", "'MSIE'", "in", "user_agent", ":", "# IE does not support RFC2231 for internationalized headers, but somehow", "# percent-decodes it so this can be used instead. Note that using the word", "# \"attachment\" anywhere in the filename overrides an inline content-disposition.", "url_encoded", "=", "quote", "(", "filename", ".", "encode", "(", "\"utf-8\"", ")", ")", ".", "replace", "(", "'attachment'", ",", "\"a%74tachment\"", ")", "return", "\"filename={}\"", ".", "format", "(", "url_encoded", ")", ".", "encode", "(", "\"utf-8\"", ")", "else", ":", "# For others like Firefox, we follow RFC2231 (encoding extension in HTTP headers).", "rfc2231_filename", "=", "quote", "(", "filename", ".", "encode", "(", "\"utf-8\"", ")", ")", "return", "\"filename*=UTF-8''{}\"", ".", "format", "(", "rfc2231_filename", ")", ".", "encode", "(", "\"utf-8\"", ")" ]
The filename, encoded to use in a ``Content-Disposition`` header.
[ "The", "filename", "encoded", "to", "use", "in", "a", "Content", "-", "Disposition", "header", "." ]
35b718024fee75b0ed3400f601976b20246c7d05
https://github.com/edoburu/django-private-storage/blob/35b718024fee75b0ed3400f601976b20246c7d05/private_storage/views.py#L120-L139
7,320
edoburu/django-private-storage
private_storage/servers.py
add_no_cache_headers
def add_no_cache_headers(func): """ Makes sure the retrieved file is not cached on disk, or cached by proxy servers in between. This would circumvent any checking whether the user may even access the file. """ @wraps(func) def _dec(*args, **kwargs): response = func(*args, **kwargs) response['Expires'] = 'Thu, 01 Jan 1970 00:00:00 GMT' # HTTP 1.0 proxies response['Cache-Control'] = 'max-age=0, no-cache, must-revalidate, proxy-revalidate' # HTTP 1.1 return response return _dec
python
def add_no_cache_headers(func): """ Makes sure the retrieved file is not cached on disk, or cached by proxy servers in between. This would circumvent any checking whether the user may even access the file. """ @wraps(func) def _dec(*args, **kwargs): response = func(*args, **kwargs) response['Expires'] = 'Thu, 01 Jan 1970 00:00:00 GMT' # HTTP 1.0 proxies response['Cache-Control'] = 'max-age=0, no-cache, must-revalidate, proxy-revalidate' # HTTP 1.1 return response return _dec
[ "def", "add_no_cache_headers", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "_dec", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "response", "=", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "response", "[", "'Expires'", "]", "=", "'Thu, 01 Jan 1970 00:00:00 GMT'", "# HTTP 1.0 proxies", "response", "[", "'Cache-Control'", "]", "=", "'max-age=0, no-cache, must-revalidate, proxy-revalidate'", "# HTTP 1.1", "return", "response", "return", "_dec" ]
Makes sure the retrieved file is not cached on disk, or cached by proxy servers in between. This would circumvent any checking whether the user may even access the file.
[ "Makes", "sure", "the", "retrieved", "file", "is", "not", "cached", "on", "disk", "or", "cached", "by", "proxy", "servers", "in", "between", ".", "This", "would", "circumvent", "any", "checking", "whether", "the", "user", "may", "even", "access", "the", "file", "." ]
35b718024fee75b0ed3400f601976b20246c7d05
https://github.com/edoburu/django-private-storage/blob/35b718024fee75b0ed3400f601976b20246c7d05/private_storage/servers.py#L43-L56
7,321
datamade/parserator
parserator/training.py
readTrainingData
def readTrainingData(file_locations, GROUP_LABEL): ''' Used in downstream tests ''' class Mock(object): pass mock_module = Mock() mock_module.PARENT_LABEL = GROUP_LABEL for location in file_locations: with open(location) as f: tree = etree.parse(f) xml = tree.getroot() for each in data_prep_utils.TrainingData(xml, mock_module): yield each
python
def readTrainingData(file_locations, GROUP_LABEL): ''' Used in downstream tests ''' class Mock(object): pass mock_module = Mock() mock_module.PARENT_LABEL = GROUP_LABEL for location in file_locations: with open(location) as f: tree = etree.parse(f) xml = tree.getroot() for each in data_prep_utils.TrainingData(xml, mock_module): yield each
[ "def", "readTrainingData", "(", "file_locations", ",", "GROUP_LABEL", ")", ":", "class", "Mock", "(", "object", ")", ":", "pass", "mock_module", "=", "Mock", "(", ")", "mock_module", ".", "PARENT_LABEL", "=", "GROUP_LABEL", "for", "location", "in", "file_locations", ":", "with", "open", "(", "location", ")", "as", "f", ":", "tree", "=", "etree", ".", "parse", "(", "f", ")", "xml", "=", "tree", ".", "getroot", "(", ")", "for", "each", "in", "data_prep_utils", ".", "TrainingData", "(", "xml", ",", "mock_module", ")", ":", "yield", "each" ]
Used in downstream tests
[ "Used", "in", "downstream", "tests" ]
4dc69b0d115bf33e2d169ff40b05143257a5f481
https://github.com/datamade/parserator/blob/4dc69b0d115bf33e2d169ff40b05143257a5f481/parserator/training.py#L58-L72
7,322
Bouke/django-user-sessions
user_sessions/templatetags/user_sessions.py
device
def device(value): """ Transform a User Agent into human readable text. Example output: * Safari on iPhone * Chrome on Windows 8.1 * Safari on OS X * Firefox * Linux * None """ browser = None for regex, name in BROWSERS: if regex.search(value): browser = name break device = None for regex, name in DEVICES: if regex.search(value): device = name break if browser and device: return _('%(browser)s on %(device)s') % { 'browser': browser, 'device': device } if browser: return browser if device: return device return None
python
def device(value): """ Transform a User Agent into human readable text. Example output: * Safari on iPhone * Chrome on Windows 8.1 * Safari on OS X * Firefox * Linux * None """ browser = None for regex, name in BROWSERS: if regex.search(value): browser = name break device = None for regex, name in DEVICES: if regex.search(value): device = name break if browser and device: return _('%(browser)s on %(device)s') % { 'browser': browser, 'device': device } if browser: return browser if device: return device return None
[ "def", "device", "(", "value", ")", ":", "browser", "=", "None", "for", "regex", ",", "name", "in", "BROWSERS", ":", "if", "regex", ".", "search", "(", "value", ")", ":", "browser", "=", "name", "break", "device", "=", "None", "for", "regex", ",", "name", "in", "DEVICES", ":", "if", "regex", ".", "search", "(", "value", ")", ":", "device", "=", "name", "break", "if", "browser", "and", "device", ":", "return", "_", "(", "'%(browser)s on %(device)s'", ")", "%", "{", "'browser'", ":", "browser", ",", "'device'", ":", "device", "}", "if", "browser", ":", "return", "browser", "if", "device", ":", "return", "device", "return", "None" ]
Transform a User Agent into human readable text. Example output: * Safari on iPhone * Chrome on Windows 8.1 * Safari on OS X * Firefox * Linux * None
[ "Transform", "a", "User", "Agent", "into", "human", "readable", "text", "." ]
9362ad60d61b68faccac674e9aae030537ff821a
https://github.com/Bouke/django-user-sessions/blob/9362ad60d61b68faccac674e9aae030537ff821a/user_sessions/templatetags/user_sessions.py#L39-L77
7,323
Bouke/django-user-sessions
user_sessions/templatetags/user_sessions.py
location
def location(value): """ Transform an IP address into an approximate location. Example output: * Zwolle, The Netherlands * The Netherlands * None """ try: location = geoip() and geoip().city(value) except Exception: try: location = geoip() and geoip().country(value) except Exception as e: warnings.warn(str(e)) location = None if location and location['country_name']: if 'city' in location and location['city']: return '{}, {}'.format(location['city'], location['country_name']) return location['country_name'] return None
python
def location(value): """ Transform an IP address into an approximate location. Example output: * Zwolle, The Netherlands * The Netherlands * None """ try: location = geoip() and geoip().city(value) except Exception: try: location = geoip() and geoip().country(value) except Exception as e: warnings.warn(str(e)) location = None if location and location['country_name']: if 'city' in location and location['city']: return '{}, {}'.format(location['city'], location['country_name']) return location['country_name'] return None
[ "def", "location", "(", "value", ")", ":", "try", ":", "location", "=", "geoip", "(", ")", "and", "geoip", "(", ")", ".", "city", "(", "value", ")", "except", "Exception", ":", "try", ":", "location", "=", "geoip", "(", ")", "and", "geoip", "(", ")", ".", "country", "(", "value", ")", "except", "Exception", "as", "e", ":", "warnings", ".", "warn", "(", "str", "(", "e", ")", ")", "location", "=", "None", "if", "location", "and", "location", "[", "'country_name'", "]", ":", "if", "'city'", "in", "location", "and", "location", "[", "'city'", "]", ":", "return", "'{}, {}'", ".", "format", "(", "location", "[", "'city'", "]", ",", "location", "[", "'country_name'", "]", ")", "return", "location", "[", "'country_name'", "]", "return", "None" ]
Transform an IP address into an approximate location. Example output: * Zwolle, The Netherlands * The Netherlands * None
[ "Transform", "an", "IP", "address", "into", "an", "approximate", "location", "." ]
9362ad60d61b68faccac674e9aae030537ff821a
https://github.com/Bouke/django-user-sessions/blob/9362ad60d61b68faccac674e9aae030537ff821a/user_sessions/templatetags/user_sessions.py#L81-L103
7,324
dschep/lambda-decorators
lambda_decorators.py
before
def before(func): """ Run a function before the handler is invoked, is passed the event & context and must return an event & context too. Usage:: >>> # to create a reusable decorator >>> @before ... def print_request_id(event, context): ... print(context.aws_request_id) ... return event, context >>> @print_request_id ... def handler(event, context): ... pass >>> class Context: ... aws_request_id = 'ID!' >>> handler({}, Context()) ID! >>> # or a one off >>> @before(lambda e, c: (e['body'], c)) ... def handler(body, context): ... return body >>> handler({'body': 'BOOODYY'}, object()) 'BOOODYY' """ class BeforeDecorator(LambdaDecorator): def before(self, event, context): return func(event, context) return BeforeDecorator
python
def before(func): """ Run a function before the handler is invoked, is passed the event & context and must return an event & context too. Usage:: >>> # to create a reusable decorator >>> @before ... def print_request_id(event, context): ... print(context.aws_request_id) ... return event, context >>> @print_request_id ... def handler(event, context): ... pass >>> class Context: ... aws_request_id = 'ID!' >>> handler({}, Context()) ID! >>> # or a one off >>> @before(lambda e, c: (e['body'], c)) ... def handler(body, context): ... return body >>> handler({'body': 'BOOODYY'}, object()) 'BOOODYY' """ class BeforeDecorator(LambdaDecorator): def before(self, event, context): return func(event, context) return BeforeDecorator
[ "def", "before", "(", "func", ")", ":", "class", "BeforeDecorator", "(", "LambdaDecorator", ")", ":", "def", "before", "(", "self", ",", "event", ",", "context", ")", ":", "return", "func", "(", "event", ",", "context", ")", "return", "BeforeDecorator" ]
Run a function before the handler is invoked, is passed the event & context and must return an event & context too. Usage:: >>> # to create a reusable decorator >>> @before ... def print_request_id(event, context): ... print(context.aws_request_id) ... return event, context >>> @print_request_id ... def handler(event, context): ... pass >>> class Context: ... aws_request_id = 'ID!' >>> handler({}, Context()) ID! >>> # or a one off >>> @before(lambda e, c: (e['body'], c)) ... def handler(body, context): ... return body >>> handler({'body': 'BOOODYY'}, object()) 'BOOODYY'
[ "Run", "a", "function", "before", "the", "handler", "is", "invoked", "is", "passed", "the", "event", "&", "context", "and", "must", "return", "an", "event", "&", "context", "too", "." ]
9195914c8afe26843de9968d96dae6a89f061e8a
https://github.com/dschep/lambda-decorators/blob/9195914c8afe26843de9968d96dae6a89f061e8a/lambda_decorators.py#L234-L264
7,325
dschep/lambda-decorators
lambda_decorators.py
after
def after(func): """ Run a function after the handler is invoked, is passed the response and must return an response too. Usage:: >>> # to create a reusable decorator >>> @after ... def gnu_terry_pratchett(retval): ... retval.setdefault('Headers', {})['X-Clacks-Overhead'] = 'GNU Terry Pratchett' ... return retval >>> @gnu_terry_pratchett ... def handler(event, context): ... return {'body': ''} >>> handler({}, object()) {'body': '', 'Headers': {'X-Clacks-Overhead': 'GNU Terry Pratchett'}} """ class AfterDecorator(LambdaDecorator): def after(self, retval): return func(retval) return AfterDecorator
python
def after(func): """ Run a function after the handler is invoked, is passed the response and must return an response too. Usage:: >>> # to create a reusable decorator >>> @after ... def gnu_terry_pratchett(retval): ... retval.setdefault('Headers', {})['X-Clacks-Overhead'] = 'GNU Terry Pratchett' ... return retval >>> @gnu_terry_pratchett ... def handler(event, context): ... return {'body': ''} >>> handler({}, object()) {'body': '', 'Headers': {'X-Clacks-Overhead': 'GNU Terry Pratchett'}} """ class AfterDecorator(LambdaDecorator): def after(self, retval): return func(retval) return AfterDecorator
[ "def", "after", "(", "func", ")", ":", "class", "AfterDecorator", "(", "LambdaDecorator", ")", ":", "def", "after", "(", "self", ",", "retval", ")", ":", "return", "func", "(", "retval", ")", "return", "AfterDecorator" ]
Run a function after the handler is invoked, is passed the response and must return an response too. Usage:: >>> # to create a reusable decorator >>> @after ... def gnu_terry_pratchett(retval): ... retval.setdefault('Headers', {})['X-Clacks-Overhead'] = 'GNU Terry Pratchett' ... return retval >>> @gnu_terry_pratchett ... def handler(event, context): ... return {'body': ''} >>> handler({}, object()) {'body': '', 'Headers': {'X-Clacks-Overhead': 'GNU Terry Pratchett'}}
[ "Run", "a", "function", "after", "the", "handler", "is", "invoked", "is", "passed", "the", "response", "and", "must", "return", "an", "response", "too", "." ]
9195914c8afe26843de9968d96dae6a89f061e8a
https://github.com/dschep/lambda-decorators/blob/9195914c8afe26843de9968d96dae6a89f061e8a/lambda_decorators.py#L267-L289
7,326
dschep/lambda-decorators
lambda_decorators.py
on_exception
def on_exception(func): """ Run a function when a handler thows an exception. It's return value is returned to AWS. Usage:: >>> # to create a reusable decorator >>> @on_exception ... def handle_errors(exception): ... print(exception) ... return {'statusCode': 500, 'body': 'uh oh'} >>> @handle_errors ... def handler(event, context): ... raise Exception('it broke!') >>> handler({}, object()) it broke! {'statusCode': 500, 'body': 'uh oh'} >>> # or a one off >>> @on_exception(lambda e: {'statusCode': 500}) ... def handler(body, context): ... raise Exception >>> handler({}, object()) {'statusCode': 500} """ class OnExceptionDecorator(LambdaDecorator): def on_exception(self, exception): return func(exception) return OnExceptionDecorator
python
def on_exception(func): """ Run a function when a handler thows an exception. It's return value is returned to AWS. Usage:: >>> # to create a reusable decorator >>> @on_exception ... def handle_errors(exception): ... print(exception) ... return {'statusCode': 500, 'body': 'uh oh'} >>> @handle_errors ... def handler(event, context): ... raise Exception('it broke!') >>> handler({}, object()) it broke! {'statusCode': 500, 'body': 'uh oh'} >>> # or a one off >>> @on_exception(lambda e: {'statusCode': 500}) ... def handler(body, context): ... raise Exception >>> handler({}, object()) {'statusCode': 500} """ class OnExceptionDecorator(LambdaDecorator): def on_exception(self, exception): return func(exception) return OnExceptionDecorator
[ "def", "on_exception", "(", "func", ")", ":", "class", "OnExceptionDecorator", "(", "LambdaDecorator", ")", ":", "def", "on_exception", "(", "self", ",", "exception", ")", ":", "return", "func", "(", "exception", ")", "return", "OnExceptionDecorator" ]
Run a function when a handler thows an exception. It's return value is returned to AWS. Usage:: >>> # to create a reusable decorator >>> @on_exception ... def handle_errors(exception): ... print(exception) ... return {'statusCode': 500, 'body': 'uh oh'} >>> @handle_errors ... def handler(event, context): ... raise Exception('it broke!') >>> handler({}, object()) it broke! {'statusCode': 500, 'body': 'uh oh'} >>> # or a one off >>> @on_exception(lambda e: {'statusCode': 500}) ... def handler(body, context): ... raise Exception >>> handler({}, object()) {'statusCode': 500}
[ "Run", "a", "function", "when", "a", "handler", "thows", "an", "exception", ".", "It", "s", "return", "value", "is", "returned", "to", "AWS", "." ]
9195914c8afe26843de9968d96dae6a89f061e8a
https://github.com/dschep/lambda-decorators/blob/9195914c8afe26843de9968d96dae6a89f061e8a/lambda_decorators.py#L292-L321
7,327
dschep/lambda-decorators
lambda_decorators.py
async_handler
def async_handler(handler): """ This decorator allows for use of async handlers by automatically running them in an event loop. The loop is added to the context object for if the handler needs it. Usage:: >>> from lambda_decorators import async_handler >>> async def foobar(): ... return 'foobar' >>> @async_handler ... async def handler(event, context): ... return await foobar() >>> class Context: ... pass >>> handler({}, Context()) 'foobar' *NOTE: Python 3 only* """ @wraps(handler) def wrapper(event, context): context.loop = asyncio.get_event_loop() return context.loop.run_until_complete(handler(event, context)) return wrapper
python
def async_handler(handler): """ This decorator allows for use of async handlers by automatically running them in an event loop. The loop is added to the context object for if the handler needs it. Usage:: >>> from lambda_decorators import async_handler >>> async def foobar(): ... return 'foobar' >>> @async_handler ... async def handler(event, context): ... return await foobar() >>> class Context: ... pass >>> handler({}, Context()) 'foobar' *NOTE: Python 3 only* """ @wraps(handler) def wrapper(event, context): context.loop = asyncio.get_event_loop() return context.loop.run_until_complete(handler(event, context)) return wrapper
[ "def", "async_handler", "(", "handler", ")", ":", "@", "wraps", "(", "handler", ")", "def", "wrapper", "(", "event", ",", "context", ")", ":", "context", ".", "loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "return", "context", ".", "loop", ".", "run_until_complete", "(", "handler", "(", "event", ",", "context", ")", ")", "return", "wrapper" ]
This decorator allows for use of async handlers by automatically running them in an event loop. The loop is added to the context object for if the handler needs it. Usage:: >>> from lambda_decorators import async_handler >>> async def foobar(): ... return 'foobar' >>> @async_handler ... async def handler(event, context): ... return await foobar() >>> class Context: ... pass >>> handler({}, Context()) 'foobar' *NOTE: Python 3 only*
[ "This", "decorator", "allows", "for", "use", "of", "async", "handlers", "by", "automatically", "running", "them", "in", "an", "event", "loop", ".", "The", "loop", "is", "added", "to", "the", "context", "object", "for", "if", "the", "handler", "needs", "it", "." ]
9195914c8afe26843de9968d96dae6a89f061e8a
https://github.com/dschep/lambda-decorators/blob/9195914c8afe26843de9968d96dae6a89f061e8a/lambda_decorators.py#L324-L351
7,328
dschep/lambda-decorators
lambda_decorators.py
dump_json_body
def dump_json_body(handler): """ Automatically serialize response bodies with json.dumps. Returns a 500 error if the response cannot be serialized Usage:: >>> from lambda_decorators import dump_json_body >>> @dump_json_body ... def handler(event, context): ... return {'statusCode': 200, 'body': {'hello': 'world'}} >>> handler({}, object()) {'statusCode': 200, 'body': '{"hello": "world"}'} """ @wraps(handler) def wrapper(event, context): response = handler(event, context) if 'body' in response: try: response['body'] = json.dumps(response['body']) except Exception as exception: return {'statusCode': 500, 'body': str(exception)} return response return wrapper
python
def dump_json_body(handler): """ Automatically serialize response bodies with json.dumps. Returns a 500 error if the response cannot be serialized Usage:: >>> from lambda_decorators import dump_json_body >>> @dump_json_body ... def handler(event, context): ... return {'statusCode': 200, 'body': {'hello': 'world'}} >>> handler({}, object()) {'statusCode': 200, 'body': '{"hello": "world"}'} """ @wraps(handler) def wrapper(event, context): response = handler(event, context) if 'body' in response: try: response['body'] = json.dumps(response['body']) except Exception as exception: return {'statusCode': 500, 'body': str(exception)} return response return wrapper
[ "def", "dump_json_body", "(", "handler", ")", ":", "@", "wraps", "(", "handler", ")", "def", "wrapper", "(", "event", ",", "context", ")", ":", "response", "=", "handler", "(", "event", ",", "context", ")", "if", "'body'", "in", "response", ":", "try", ":", "response", "[", "'body'", "]", "=", "json", ".", "dumps", "(", "response", "[", "'body'", "]", ")", "except", "Exception", "as", "exception", ":", "return", "{", "'statusCode'", ":", "500", ",", "'body'", ":", "str", "(", "exception", ")", "}", "return", "response", "return", "wrapper" ]
Automatically serialize response bodies with json.dumps. Returns a 500 error if the response cannot be serialized Usage:: >>> from lambda_decorators import dump_json_body >>> @dump_json_body ... def handler(event, context): ... return {'statusCode': 200, 'body': {'hello': 'world'}} >>> handler({}, object()) {'statusCode': 200, 'body': '{"hello": "world"}'}
[ "Automatically", "serialize", "response", "bodies", "with", "json", ".", "dumps", "." ]
9195914c8afe26843de9968d96dae6a89f061e8a
https://github.com/dschep/lambda-decorators/blob/9195914c8afe26843de9968d96dae6a89f061e8a/lambda_decorators.py#L401-L425
7,329
dschep/lambda-decorators
lambda_decorators.py
json_http_resp
def json_http_resp(handler): """ Automatically serialize return value to the body of a successfull HTTP response. Returns a 500 error if the response cannot be serialized Usage:: >>> from lambda_decorators import json_http_resp >>> @json_http_resp ... def handler(event, context): ... return {'hello': 'world'} >>> handler({}, object()) {'statusCode': 200, 'body': '{"hello": "world"}'} in this example, the decorated handler returns: .. code:: python {'statusCode': 200, 'body': '{"hello": "world"}'} """ @wraps(handler) def wrapper(event, context): response = handler(event, context) try: body = json.dumps(response) except Exception as exception: return {'statusCode': 500, 'body': str(exception)} return {'statusCode': 200, 'body': body} return wrapper
python
def json_http_resp(handler): """ Automatically serialize return value to the body of a successfull HTTP response. Returns a 500 error if the response cannot be serialized Usage:: >>> from lambda_decorators import json_http_resp >>> @json_http_resp ... def handler(event, context): ... return {'hello': 'world'} >>> handler({}, object()) {'statusCode': 200, 'body': '{"hello": "world"}'} in this example, the decorated handler returns: .. code:: python {'statusCode': 200, 'body': '{"hello": "world"}'} """ @wraps(handler) def wrapper(event, context): response = handler(event, context) try: body = json.dumps(response) except Exception as exception: return {'statusCode': 500, 'body': str(exception)} return {'statusCode': 200, 'body': body} return wrapper
[ "def", "json_http_resp", "(", "handler", ")", ":", "@", "wraps", "(", "handler", ")", "def", "wrapper", "(", "event", ",", "context", ")", ":", "response", "=", "handler", "(", "event", ",", "context", ")", "try", ":", "body", "=", "json", ".", "dumps", "(", "response", ")", "except", "Exception", "as", "exception", ":", "return", "{", "'statusCode'", ":", "500", ",", "'body'", ":", "str", "(", "exception", ")", "}", "return", "{", "'statusCode'", ":", "200", ",", "'body'", ":", "body", "}", "return", "wrapper" ]
Automatically serialize return value to the body of a successfull HTTP response. Returns a 500 error if the response cannot be serialized Usage:: >>> from lambda_decorators import json_http_resp >>> @json_http_resp ... def handler(event, context): ... return {'hello': 'world'} >>> handler({}, object()) {'statusCode': 200, 'body': '{"hello": "world"}'} in this example, the decorated handler returns: .. code:: python {'statusCode': 200, 'body': '{"hello": "world"}'}
[ "Automatically", "serialize", "return", "value", "to", "the", "body", "of", "a", "successfull", "HTTP", "response", "." ]
9195914c8afe26843de9968d96dae6a89f061e8a
https://github.com/dschep/lambda-decorators/blob/9195914c8afe26843de9968d96dae6a89f061e8a/lambda_decorators.py#L428-L459
7,330
dschep/lambda-decorators
lambda_decorators.py
load_json_body
def load_json_body(handler): """ Automatically deserialize event bodies with json.loads. Automatically returns a 400 BAD REQUEST if there is an error while parsing. Usage:: >>> from lambda_decorators import load_json_body >>> @load_json_body ... def handler(event, context): ... return event['body']['foo'] >>> handler({'body': '{"foo": "bar"}'}, object()) 'bar' note that ``event['body']`` is already a dictionary and didn't have to explicitly be parsed. """ @wraps(handler) def wrapper(event, context): if isinstance(event.get('body'), str): try: event['body'] = json.loads(event['body']) except: return {'statusCode': 400, 'body': 'BAD REQUEST'} return handler(event, context) return wrapper
python
def load_json_body(handler): """ Automatically deserialize event bodies with json.loads. Automatically returns a 400 BAD REQUEST if there is an error while parsing. Usage:: >>> from lambda_decorators import load_json_body >>> @load_json_body ... def handler(event, context): ... return event['body']['foo'] >>> handler({'body': '{"foo": "bar"}'}, object()) 'bar' note that ``event['body']`` is already a dictionary and didn't have to explicitly be parsed. """ @wraps(handler) def wrapper(event, context): if isinstance(event.get('body'), str): try: event['body'] = json.loads(event['body']) except: return {'statusCode': 400, 'body': 'BAD REQUEST'} return handler(event, context) return wrapper
[ "def", "load_json_body", "(", "handler", ")", ":", "@", "wraps", "(", "handler", ")", "def", "wrapper", "(", "event", ",", "context", ")", ":", "if", "isinstance", "(", "event", ".", "get", "(", "'body'", ")", ",", "str", ")", ":", "try", ":", "event", "[", "'body'", "]", "=", "json", ".", "loads", "(", "event", "[", "'body'", "]", ")", "except", ":", "return", "{", "'statusCode'", ":", "400", ",", "'body'", ":", "'BAD REQUEST'", "}", "return", "handler", "(", "event", ",", "context", ")", "return", "wrapper" ]
Automatically deserialize event bodies with json.loads. Automatically returns a 400 BAD REQUEST if there is an error while parsing. Usage:: >>> from lambda_decorators import load_json_body >>> @load_json_body ... def handler(event, context): ... return event['body']['foo'] >>> handler({'body': '{"foo": "bar"}'}, object()) 'bar' note that ``event['body']`` is already a dictionary and didn't have to explicitly be parsed.
[ "Automatically", "deserialize", "event", "bodies", "with", "json", ".", "loads", "." ]
9195914c8afe26843de9968d96dae6a89f061e8a
https://github.com/dschep/lambda-decorators/blob/9195914c8afe26843de9968d96dae6a89f061e8a/lambda_decorators.py#L462-L489
7,331
dschep/lambda-decorators
lambda_decorators.py
json_schema_validator
def json_schema_validator(request_schema=None, response_schema=None): """ Validate your request & response payloads against a JSONSchema. *NOTE: depends on the* `jsonschema <https://github.com/Julian/jsonschema>`_ *package. If you're using* `serverless-python-requirements <https://github.com/UnitedIncome/serverless-python-requirements>`_ *you're all set. If you cURLed* ``lambda_decorators.py`` *you'll have to install it manually in your service's root directory.* Usage:: >>> from jsonschema import ValidationError >>> from lambda_decorators import json_schema_validator >>> @json_schema_validator(request_schema={ ... 'type': 'object', 'properties': {'price': {'type': 'number'}}}) ... def handler(event, context): ... return event['price'] >>> handler({'price': 'bar'}, object()) {'statusCode': 400, 'body': "RequestValidationError: 'bar' is not of type 'number'"} >>> @json_schema_validator(response_schema={ ... 'type': 'object', 'properties': {'price': {'type': 'number'}}}) ... def handler(event, context): ... return {'price': 'bar'} >>> handler({}, object()) {'statusCode': 500, 'body': "ResponseValidationError: 'bar' is not of type 'number'"} """ def wrapper_wrapper(handler): @wraps(handler) def wrapper(event, context): if request_schema is not None: if jsonschema is None: logger.error('jsonschema is not installed, skipping request validation') else: try: jsonschema.validate(event, request_schema) except jsonschema.ValidationError as exception: return {'statusCode': 400, 'body': 'RequestValidationError: {}'.format( exception.message)} response = handler(event, context) if response_schema is not None: if jsonschema is None: logger.error('jsonschema is not installed, skipping response validation') else: try: jsonschema.validate(response, response_schema) except jsonschema.ValidationError as exception: return {'statusCode': 500, 'body': 'ResponseValidationError: {}'.format( exception.message)} return response return wrapper return wrapper_wrapper
python
def json_schema_validator(request_schema=None, response_schema=None): """ Validate your request & response payloads against a JSONSchema. *NOTE: depends on the* `jsonschema <https://github.com/Julian/jsonschema>`_ *package. If you're using* `serverless-python-requirements <https://github.com/UnitedIncome/serverless-python-requirements>`_ *you're all set. If you cURLed* ``lambda_decorators.py`` *you'll have to install it manually in your service's root directory.* Usage:: >>> from jsonschema import ValidationError >>> from lambda_decorators import json_schema_validator >>> @json_schema_validator(request_schema={ ... 'type': 'object', 'properties': {'price': {'type': 'number'}}}) ... def handler(event, context): ... return event['price'] >>> handler({'price': 'bar'}, object()) {'statusCode': 400, 'body': "RequestValidationError: 'bar' is not of type 'number'"} >>> @json_schema_validator(response_schema={ ... 'type': 'object', 'properties': {'price': {'type': 'number'}}}) ... def handler(event, context): ... return {'price': 'bar'} >>> handler({}, object()) {'statusCode': 500, 'body': "ResponseValidationError: 'bar' is not of type 'number'"} """ def wrapper_wrapper(handler): @wraps(handler) def wrapper(event, context): if request_schema is not None: if jsonschema is None: logger.error('jsonschema is not installed, skipping request validation') else: try: jsonschema.validate(event, request_schema) except jsonschema.ValidationError as exception: return {'statusCode': 400, 'body': 'RequestValidationError: {}'.format( exception.message)} response = handler(event, context) if response_schema is not None: if jsonschema is None: logger.error('jsonschema is not installed, skipping response validation') else: try: jsonschema.validate(response, response_schema) except jsonschema.ValidationError as exception: return {'statusCode': 500, 'body': 'ResponseValidationError: {}'.format( exception.message)} return response return wrapper return wrapper_wrapper
[ "def", "json_schema_validator", "(", "request_schema", "=", "None", ",", "response_schema", "=", "None", ")", ":", "def", "wrapper_wrapper", "(", "handler", ")", ":", "@", "wraps", "(", "handler", ")", "def", "wrapper", "(", "event", ",", "context", ")", ":", "if", "request_schema", "is", "not", "None", ":", "if", "jsonschema", "is", "None", ":", "logger", ".", "error", "(", "'jsonschema is not installed, skipping request validation'", ")", "else", ":", "try", ":", "jsonschema", ".", "validate", "(", "event", ",", "request_schema", ")", "except", "jsonschema", ".", "ValidationError", "as", "exception", ":", "return", "{", "'statusCode'", ":", "400", ",", "'body'", ":", "'RequestValidationError: {}'", ".", "format", "(", "exception", ".", "message", ")", "}", "response", "=", "handler", "(", "event", ",", "context", ")", "if", "response_schema", "is", "not", "None", ":", "if", "jsonschema", "is", "None", ":", "logger", ".", "error", "(", "'jsonschema is not installed, skipping response validation'", ")", "else", ":", "try", ":", "jsonschema", ".", "validate", "(", "response", ",", "response_schema", ")", "except", "jsonschema", ".", "ValidationError", "as", "exception", ":", "return", "{", "'statusCode'", ":", "500", ",", "'body'", ":", "'ResponseValidationError: {}'", ".", "format", "(", "exception", ".", "message", ")", "}", "return", "response", "return", "wrapper", "return", "wrapper_wrapper" ]
Validate your request & response payloads against a JSONSchema. *NOTE: depends on the* `jsonschema <https://github.com/Julian/jsonschema>`_ *package. If you're using* `serverless-python-requirements <https://github.com/UnitedIncome/serverless-python-requirements>`_ *you're all set. If you cURLed* ``lambda_decorators.py`` *you'll have to install it manually in your service's root directory.* Usage:: >>> from jsonschema import ValidationError >>> from lambda_decorators import json_schema_validator >>> @json_schema_validator(request_schema={ ... 'type': 'object', 'properties': {'price': {'type': 'number'}}}) ... def handler(event, context): ... return event['price'] >>> handler({'price': 'bar'}, object()) {'statusCode': 400, 'body': "RequestValidationError: 'bar' is not of type 'number'"} >>> @json_schema_validator(response_schema={ ... 'type': 'object', 'properties': {'price': {'type': 'number'}}}) ... def handler(event, context): ... return {'price': 'bar'} >>> handler({}, object()) {'statusCode': 500, 'body': "ResponseValidationError: 'bar' is not of type 'number'"}
[ "Validate", "your", "request", "&", "response", "payloads", "against", "a", "JSONSchema", "." ]
9195914c8afe26843de9968d96dae6a89f061e8a
https://github.com/dschep/lambda-decorators/blob/9195914c8afe26843de9968d96dae6a89f061e8a/lambda_decorators.py#L492-L546
7,332
dschep/lambda-decorators
lambda_decorators.py
no_retry_on_failure
def no_retry_on_failure(handler): """ AWS Lambda retries scheduled lambdas that don't execute succesfully. This detects this by storing requests IDs in memory and exiting early on duplicates. Since this is in memory, don't use it on very frequently scheduled lambdas. It logs a critical message then exits with a statusCode of 200 to avoid further retries. Usage:: >>> import logging, sys >>> from lambda_decorators import no_retry_on_failure, logger >>> logger.addHandler(logging.StreamHandler(stream=sys.stdout)) >>> @no_retry_on_failure ... def scheduled_handler(event, context): ... return {'statusCode': 500} >>> class Context: ... aws_request_id = 1 >>> scheduled_handler({}, Context()) {'statusCode': 500} >>> scheduled_handler({}, Context()) Retry attempt on request id 1 detected. {'statusCode': 200} """ seen_request_ids = set() @wraps(handler) def wrapper(event, context): if context.aws_request_id in seen_request_ids: logger.critical('Retry attempt on request id %s detected.', context.aws_request_id) return {'statusCode': 200} seen_request_ids.add(context.aws_request_id) return handler(event, context) return wrapper
python
def no_retry_on_failure(handler): """ AWS Lambda retries scheduled lambdas that don't execute succesfully. This detects this by storing requests IDs in memory and exiting early on duplicates. Since this is in memory, don't use it on very frequently scheduled lambdas. It logs a critical message then exits with a statusCode of 200 to avoid further retries. Usage:: >>> import logging, sys >>> from lambda_decorators import no_retry_on_failure, logger >>> logger.addHandler(logging.StreamHandler(stream=sys.stdout)) >>> @no_retry_on_failure ... def scheduled_handler(event, context): ... return {'statusCode': 500} >>> class Context: ... aws_request_id = 1 >>> scheduled_handler({}, Context()) {'statusCode': 500} >>> scheduled_handler({}, Context()) Retry attempt on request id 1 detected. {'statusCode': 200} """ seen_request_ids = set() @wraps(handler) def wrapper(event, context): if context.aws_request_id in seen_request_ids: logger.critical('Retry attempt on request id %s detected.', context.aws_request_id) return {'statusCode': 200} seen_request_ids.add(context.aws_request_id) return handler(event, context) return wrapper
[ "def", "no_retry_on_failure", "(", "handler", ")", ":", "seen_request_ids", "=", "set", "(", ")", "@", "wraps", "(", "handler", ")", "def", "wrapper", "(", "event", ",", "context", ")", ":", "if", "context", ".", "aws_request_id", "in", "seen_request_ids", ":", "logger", ".", "critical", "(", "'Retry attempt on request id %s detected.'", ",", "context", ".", "aws_request_id", ")", "return", "{", "'statusCode'", ":", "200", "}", "seen_request_ids", ".", "add", "(", "context", ".", "aws_request_id", ")", "return", "handler", "(", "event", ",", "context", ")", "return", "wrapper" ]
AWS Lambda retries scheduled lambdas that don't execute succesfully. This detects this by storing requests IDs in memory and exiting early on duplicates. Since this is in memory, don't use it on very frequently scheduled lambdas. It logs a critical message then exits with a statusCode of 200 to avoid further retries. Usage:: >>> import logging, sys >>> from lambda_decorators import no_retry_on_failure, logger >>> logger.addHandler(logging.StreamHandler(stream=sys.stdout)) >>> @no_retry_on_failure ... def scheduled_handler(event, context): ... return {'statusCode': 500} >>> class Context: ... aws_request_id = 1 >>> scheduled_handler({}, Context()) {'statusCode': 500} >>> scheduled_handler({}, Context()) Retry attempt on request id 1 detected. {'statusCode': 200}
[ "AWS", "Lambda", "retries", "scheduled", "lambdas", "that", "don", "t", "execute", "succesfully", "." ]
9195914c8afe26843de9968d96dae6a89f061e8a
https://github.com/dschep/lambda-decorators/blob/9195914c8afe26843de9968d96dae6a89f061e8a/lambda_decorators.py#L579-L617
7,333
dmarx/psaw
psaw/PushshiftAPI.py
PushshiftAPIMinimal._wrap_thing
def _wrap_thing(self, thing, kind): """Mimic praw.Submission and praw.Comment API""" thing['created'] = self._epoch_utc_to_local(thing['created_utc']) thing['d_'] = copy.deepcopy(thing) ThingType = namedtuple(kind, thing.keys()) thing = ThingType(**thing) return thing
python
def _wrap_thing(self, thing, kind): """Mimic praw.Submission and praw.Comment API""" thing['created'] = self._epoch_utc_to_local(thing['created_utc']) thing['d_'] = copy.deepcopy(thing) ThingType = namedtuple(kind, thing.keys()) thing = ThingType(**thing) return thing
[ "def", "_wrap_thing", "(", "self", ",", "thing", ",", "kind", ")", ":", "thing", "[", "'created'", "]", "=", "self", ".", "_epoch_utc_to_local", "(", "thing", "[", "'created_utc'", "]", ")", "thing", "[", "'d_'", "]", "=", "copy", ".", "deepcopy", "(", "thing", ")", "ThingType", "=", "namedtuple", "(", "kind", ",", "thing", ".", "keys", "(", ")", ")", "thing", "=", "ThingType", "(", "*", "*", "thing", ")", "return", "thing" ]
Mimic praw.Submission and praw.Comment API
[ "Mimic", "praw", ".", "Submission", "and", "praw", ".", "Comment", "API" ]
5702abdd1a0ccd60b115fc4b545eb2c087c56194
https://github.com/dmarx/psaw/blob/5702abdd1a0ccd60b115fc4b545eb2c087c56194/psaw/PushshiftAPI.py#L112-L118
7,334
dmarx/psaw
psaw/PushshiftAPI.py
PushshiftAPIMinimal._add_nec_args
def _add_nec_args(self, payload): """Adds 'limit' and 'created_utc' arguments to the payload as necessary.""" if self._limited(payload): # Do nothing I guess? Not sure how paging works on this endpoint... return if 'limit' not in payload: payload['limit'] = self.max_results_per_request if 'sort' not in payload: # Getting weird results if this is not made explicit. Unclear why. payload['sort'] = 'desc' if 'filter' in payload: #and payload.get('created_utc', None) is None: if not isinstance(payload['filter'], list): if isinstance(payload['filter'], str): payload['filter'] = [payload['filter']] else: payload['filter'] = list(payload['filter']) if 'created_utc' not in payload['filter']: payload['filter'].append('created_utc')
python
def _add_nec_args(self, payload): """Adds 'limit' and 'created_utc' arguments to the payload as necessary.""" if self._limited(payload): # Do nothing I guess? Not sure how paging works on this endpoint... return if 'limit' not in payload: payload['limit'] = self.max_results_per_request if 'sort' not in payload: # Getting weird results if this is not made explicit. Unclear why. payload['sort'] = 'desc' if 'filter' in payload: #and payload.get('created_utc', None) is None: if not isinstance(payload['filter'], list): if isinstance(payload['filter'], str): payload['filter'] = [payload['filter']] else: payload['filter'] = list(payload['filter']) if 'created_utc' not in payload['filter']: payload['filter'].append('created_utc')
[ "def", "_add_nec_args", "(", "self", ",", "payload", ")", ":", "if", "self", ".", "_limited", "(", "payload", ")", ":", "# Do nothing I guess? Not sure how paging works on this endpoint...", "return", "if", "'limit'", "not", "in", "payload", ":", "payload", "[", "'limit'", "]", "=", "self", ".", "max_results_per_request", "if", "'sort'", "not", "in", "payload", ":", "# Getting weird results if this is not made explicit. Unclear why.", "payload", "[", "'sort'", "]", "=", "'desc'", "if", "'filter'", "in", "payload", ":", "#and payload.get('created_utc', None) is None:", "if", "not", "isinstance", "(", "payload", "[", "'filter'", "]", ",", "list", ")", ":", "if", "isinstance", "(", "payload", "[", "'filter'", "]", ",", "str", ")", ":", "payload", "[", "'filter'", "]", "=", "[", "payload", "[", "'filter'", "]", "]", "else", ":", "payload", "[", "'filter'", "]", "=", "list", "(", "payload", "[", "'filter'", "]", ")", "if", "'created_utc'", "not", "in", "payload", "[", "'filter'", "]", ":", "payload", "[", "'filter'", "]", ".", "append", "(", "'created_utc'", ")" ]
Adds 'limit' and 'created_utc' arguments to the payload as necessary.
[ "Adds", "limit", "and", "created_utc", "arguments", "to", "the", "payload", "as", "necessary", "." ]
5702abdd1a0ccd60b115fc4b545eb2c087c56194
https://github.com/dmarx/psaw/blob/5702abdd1a0ccd60b115fc4b545eb2c087c56194/psaw/PushshiftAPI.py#L130-L147
7,335
pyroscope/pyrocore
src/pyrocore/scripts/rtmv.py
pretty_path
def pretty_path(path): """ Prettify path for logging. """ path = fmt.to_utf8(path) home_dir = os.path.expanduser("~") if path.startswith(home_dir): path = "~" + path[len(home_dir):] return '"%s"' % (path,)
python
def pretty_path(path): """ Prettify path for logging. """ path = fmt.to_utf8(path) home_dir = os.path.expanduser("~") if path.startswith(home_dir): path = "~" + path[len(home_dir):] return '"%s"' % (path,)
[ "def", "pretty_path", "(", "path", ")", ":", "path", "=", "fmt", ".", "to_utf8", "(", "path", ")", "home_dir", "=", "os", ".", "path", ".", "expanduser", "(", "\"~\"", ")", "if", "path", ".", "startswith", "(", "home_dir", ")", ":", "path", "=", "\"~\"", "+", "path", "[", "len", "(", "home_dir", ")", ":", "]", "return", "'\"%s\"'", "%", "(", "path", ",", ")" ]
Prettify path for logging.
[ "Prettify", "path", "for", "logging", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtmv.py#L31-L38
7,336
pyroscope/pyrocore
src/pyrocore/scripts/rtmv.py
RtorrentMove.guarded
def guarded(self, call, *args): """ Catch exceptions thrown by filesystem calls, and don't really execute them in dry-run mode. """ self.LOG.debug('%s(%s)' % ( call.__name__, ', '.join([pretty_path(i) for i in args]), )) if not self.options.dry_run: try: call(*args) except (EnvironmentError, UnicodeError) as exc: self.fatal('%s(%s) failed [%s]' % ( call.__name__, ', '.join([pretty_path(i) for i in args]), exc, ))
python
def guarded(self, call, *args): """ Catch exceptions thrown by filesystem calls, and don't really execute them in dry-run mode. """ self.LOG.debug('%s(%s)' % ( call.__name__, ', '.join([pretty_path(i) for i in args]), )) if not self.options.dry_run: try: call(*args) except (EnvironmentError, UnicodeError) as exc: self.fatal('%s(%s) failed [%s]' % ( call.__name__, ', '.join([pretty_path(i) for i in args]), exc, ))
[ "def", "guarded", "(", "self", ",", "call", ",", "*", "args", ")", ":", "self", ".", "LOG", ".", "debug", "(", "'%s(%s)'", "%", "(", "call", ".", "__name__", ",", "', '", ".", "join", "(", "[", "pretty_path", "(", "i", ")", "for", "i", "in", "args", "]", ")", ",", ")", ")", "if", "not", "self", ".", "options", ".", "dry_run", ":", "try", ":", "call", "(", "*", "args", ")", "except", "(", "EnvironmentError", ",", "UnicodeError", ")", "as", "exc", ":", "self", ".", "fatal", "(", "'%s(%s) failed [%s]'", "%", "(", "call", ".", "__name__", ",", "', '", ".", "join", "(", "[", "pretty_path", "(", "i", ")", "for", "i", "in", "args", "]", ")", ",", "exc", ",", ")", ")" ]
Catch exceptions thrown by filesystem calls, and don't really execute them in dry-run mode.
[ "Catch", "exceptions", "thrown", "by", "filesystem", "calls", "and", "don", "t", "really", "execute", "them", "in", "dry", "-", "run", "mode", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtmv.py#L82-L95
7,337
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
run
def run(): """ Module level test. """ logging.basicConfig(level=logging.DEBUG) load_config.ConfigLoader().load() config.debug = True print(repr(config.engine.item(sys.argv[1])))
python
def run(): """ Module level test. """ logging.basicConfig(level=logging.DEBUG) load_config.ConfigLoader().load() config.debug = True print(repr(config.engine.item(sys.argv[1])))
[ "def", "run", "(", ")", ":", "logging", ".", "basicConfig", "(", "level", "=", "logging", ".", "DEBUG", ")", "load_config", ".", "ConfigLoader", "(", ")", ".", "load", "(", ")", "config", ".", "debug", "=", "True", "print", "(", "repr", "(", "config", ".", "engine", ".", "item", "(", "sys", ".", "argv", "[", "1", "]", ")", ")", ")" ]
Module level test.
[ "Module", "level", "test", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L848-L854
7,338
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
RtorrentItem._make_it_so
def _make_it_so(self, command, calls, *args, **kwargs): """ Perform some error-checked XMLRPC calls. """ observer = kwargs.pop('observer', False) args = (self._fields["hash"],) + args try: for call in calls: self._engine.LOG.debug("%s%s torrent #%s (%s)" % ( command[0].upper(), command[1:], self._fields["hash"], call)) if call.startswith(':') or call[:2].endswith('.'): namespace = self._engine._rpc else: namespace = self._engine._rpc.d result = getattr(namespace, call.lstrip(':'))(*args) if observer: observer(result) except xmlrpc.ERRORS as exc: raise error.EngineError("While %s torrent #%s: %s" % (command, self._fields["hash"], exc))
python
def _make_it_so(self, command, calls, *args, **kwargs): """ Perform some error-checked XMLRPC calls. """ observer = kwargs.pop('observer', False) args = (self._fields["hash"],) + args try: for call in calls: self._engine.LOG.debug("%s%s torrent #%s (%s)" % ( command[0].upper(), command[1:], self._fields["hash"], call)) if call.startswith(':') or call[:2].endswith('.'): namespace = self._engine._rpc else: namespace = self._engine._rpc.d result = getattr(namespace, call.lstrip(':'))(*args) if observer: observer(result) except xmlrpc.ERRORS as exc: raise error.EngineError("While %s torrent #%s: %s" % (command, self._fields["hash"], exc))
[ "def", "_make_it_so", "(", "self", ",", "command", ",", "calls", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "observer", "=", "kwargs", ".", "pop", "(", "'observer'", ",", "False", ")", "args", "=", "(", "self", ".", "_fields", "[", "\"hash\"", "]", ",", ")", "+", "args", "try", ":", "for", "call", "in", "calls", ":", "self", ".", "_engine", ".", "LOG", ".", "debug", "(", "\"%s%s torrent #%s (%s)\"", "%", "(", "command", "[", "0", "]", ".", "upper", "(", ")", ",", "command", "[", "1", ":", "]", ",", "self", ".", "_fields", "[", "\"hash\"", "]", ",", "call", ")", ")", "if", "call", ".", "startswith", "(", "':'", ")", "or", "call", "[", ":", "2", "]", ".", "endswith", "(", "'.'", ")", ":", "namespace", "=", "self", ".", "_engine", ".", "_rpc", "else", ":", "namespace", "=", "self", ".", "_engine", ".", "_rpc", ".", "d", "result", "=", "getattr", "(", "namespace", ",", "call", ".", "lstrip", "(", "':'", ")", ")", "(", "*", "args", ")", "if", "observer", ":", "observer", "(", "result", ")", "except", "xmlrpc", ".", "ERRORS", "as", "exc", ":", "raise", "error", ".", "EngineError", "(", "\"While %s torrent #%s: %s\"", "%", "(", "command", ",", "self", ".", "_fields", "[", "\"hash\"", "]", ",", "exc", ")", ")" ]
Perform some error-checked XMLRPC calls.
[ "Perform", "some", "error", "-", "checked", "XMLRPC", "calls", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L60-L77
7,339
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
RtorrentItem.fetch
def fetch(self, name, engine_name=None): """ Get a field on demand. """ # TODO: Get each on-demand field in a multicall for all other items, since # we likely need it anyway; another (more easy) way would be to pre-fetch dynamically # with the list of fields from filters and output formats try: return self._fields[name] except KeyError: if isinstance(name, (int, long)): name = "custom_%d" % name if name == "done": val = float(self.fetch("completed_chunks")) / self.fetch("size_chunks") elif name == "files": val = self._get_files() elif name.startswith("kind_") and name[5:].isdigit(): val = self._get_kind(int(name[5:], 10)) elif name.startswith("custom_"): key = name[7:] try: if len(key) == 1 and key in "12345": val = getattr(self._engine._rpc.d, "custom"+key)(self._fields["hash"]) else: val = self._engine._rpc.d.custom(self._fields["hash"], key) except xmlrpc.ERRORS as exc: raise error.EngineError("While accessing field %r: %s" % (name, exc)) else: getter_name = engine_name if engine_name else RtorrentEngine.PYRO2RT_MAPPING.get(name, name) if getter_name[0] == '=': getter_name = getter_name[1:] else: getter_name = "get_" + getter_name getter = getattr(self._engine._rpc.d, getter_name) try: val = getter(self._fields["hash"]) except xmlrpc.ERRORS as exc: raise error.EngineError("While accessing field %r: %s" % (name, exc)) # TODO: Currently, NOT caching makes no sense; in a demon, it does! #if isinstance(FieldDefinition.FIELDS.get(name), engine.ConstantField): self._fields[name] = val return val
python
def fetch(self, name, engine_name=None): """ Get a field on demand. """ # TODO: Get each on-demand field in a multicall for all other items, since # we likely need it anyway; another (more easy) way would be to pre-fetch dynamically # with the list of fields from filters and output formats try: return self._fields[name] except KeyError: if isinstance(name, (int, long)): name = "custom_%d" % name if name == "done": val = float(self.fetch("completed_chunks")) / self.fetch("size_chunks") elif name == "files": val = self._get_files() elif name.startswith("kind_") and name[5:].isdigit(): val = self._get_kind(int(name[5:], 10)) elif name.startswith("custom_"): key = name[7:] try: if len(key) == 1 and key in "12345": val = getattr(self._engine._rpc.d, "custom"+key)(self._fields["hash"]) else: val = self._engine._rpc.d.custom(self._fields["hash"], key) except xmlrpc.ERRORS as exc: raise error.EngineError("While accessing field %r: %s" % (name, exc)) else: getter_name = engine_name if engine_name else RtorrentEngine.PYRO2RT_MAPPING.get(name, name) if getter_name[0] == '=': getter_name = getter_name[1:] else: getter_name = "get_" + getter_name getter = getattr(self._engine._rpc.d, getter_name) try: val = getter(self._fields["hash"]) except xmlrpc.ERRORS as exc: raise error.EngineError("While accessing field %r: %s" % (name, exc)) # TODO: Currently, NOT caching makes no sense; in a demon, it does! #if isinstance(FieldDefinition.FIELDS.get(name), engine.ConstantField): self._fields[name] = val return val
[ "def", "fetch", "(", "self", ",", "name", ",", "engine_name", "=", "None", ")", ":", "# TODO: Get each on-demand field in a multicall for all other items, since", "# we likely need it anyway; another (more easy) way would be to pre-fetch dynamically", "# with the list of fields from filters and output formats", "try", ":", "return", "self", ".", "_fields", "[", "name", "]", "except", "KeyError", ":", "if", "isinstance", "(", "name", ",", "(", "int", ",", "long", ")", ")", ":", "name", "=", "\"custom_%d\"", "%", "name", "if", "name", "==", "\"done\"", ":", "val", "=", "float", "(", "self", ".", "fetch", "(", "\"completed_chunks\"", ")", ")", "/", "self", ".", "fetch", "(", "\"size_chunks\"", ")", "elif", "name", "==", "\"files\"", ":", "val", "=", "self", ".", "_get_files", "(", ")", "elif", "name", ".", "startswith", "(", "\"kind_\"", ")", "and", "name", "[", "5", ":", "]", ".", "isdigit", "(", ")", ":", "val", "=", "self", ".", "_get_kind", "(", "int", "(", "name", "[", "5", ":", "]", ",", "10", ")", ")", "elif", "name", ".", "startswith", "(", "\"custom_\"", ")", ":", "key", "=", "name", "[", "7", ":", "]", "try", ":", "if", "len", "(", "key", ")", "==", "1", "and", "key", "in", "\"12345\"", ":", "val", "=", "getattr", "(", "self", ".", "_engine", ".", "_rpc", ".", "d", ",", "\"custom\"", "+", "key", ")", "(", "self", ".", "_fields", "[", "\"hash\"", "]", ")", "else", ":", "val", "=", "self", ".", "_engine", ".", "_rpc", ".", "d", ".", "custom", "(", "self", ".", "_fields", "[", "\"hash\"", "]", ",", "key", ")", "except", "xmlrpc", ".", "ERRORS", "as", "exc", ":", "raise", "error", ".", "EngineError", "(", "\"While accessing field %r: %s\"", "%", "(", "name", ",", "exc", ")", ")", "else", ":", "getter_name", "=", "engine_name", "if", "engine_name", "else", "RtorrentEngine", ".", "PYRO2RT_MAPPING", ".", "get", "(", "name", ",", "name", ")", "if", "getter_name", "[", "0", "]", "==", "'='", ":", "getter_name", "=", "getter_name", "[", "1", ":", "]", "else", ":", "getter_name", "=", "\"get_\"", "+", "getter_name", "getter", "=", "getattr", "(", "self", ".", "_engine", ".", "_rpc", ".", "d", ",", "getter_name", ")", "try", ":", "val", "=", "getter", "(", "self", ".", "_fields", "[", "\"hash\"", "]", ")", "except", "xmlrpc", ".", "ERRORS", "as", "exc", ":", "raise", "error", ".", "EngineError", "(", "\"While accessing field %r: %s\"", "%", "(", "name", ",", "exc", ")", ")", "# TODO: Currently, NOT caching makes no sense; in a demon, it does!", "#if isinstance(FieldDefinition.FIELDS.get(name), engine.ConstantField):", "self", ".", "_fields", "[", "name", "]", "=", "val", "return", "val" ]
Get a field on demand.
[ "Get", "a", "field", "on", "demand", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L167-L211
7,340
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
RtorrentItem.datapath
def datapath(self): """ Get an item's data path. """ path = self._fields['path'] if not path: # stopped item with no base_dir? path = self.fetch('directory') if path and not self._fields['is_multi_file']: path = os.path.join(path, self._fields['name']) return os.path.expanduser(fmt.to_unicode(path))
python
def datapath(self): """ Get an item's data path. """ path = self._fields['path'] if not path: # stopped item with no base_dir? path = self.fetch('directory') if path and not self._fields['is_multi_file']: path = os.path.join(path, self._fields['name']) return os.path.expanduser(fmt.to_unicode(path))
[ "def", "datapath", "(", "self", ")", ":", "path", "=", "self", ".", "_fields", "[", "'path'", "]", "if", "not", "path", ":", "# stopped item with no base_dir?", "path", "=", "self", ".", "fetch", "(", "'directory'", ")", "if", "path", "and", "not", "self", ".", "_fields", "[", "'is_multi_file'", "]", ":", "path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "self", ".", "_fields", "[", "'name'", "]", ")", "return", "os", ".", "path", ".", "expanduser", "(", "fmt", ".", "to_unicode", "(", "path", ")", ")" ]
Get an item's data path.
[ "Get", "an", "item", "s", "data", "path", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L214-L222
7,341
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
RtorrentItem.announce_urls
def announce_urls(self, default=[]): # pylint: disable=dangerous-default-value """ Get a list of all announce URLs. Returns `default` if no trackers are found at all. """ try: response = self._engine._rpc.t.multicall(self._fields["hash"], 0, "t.url=", "t.is_enabled=") except xmlrpc.ERRORS as exc: raise error.EngineError("While getting announce URLs for #%s: %s" % (self._fields["hash"], exc)) if response: return [i[0] for i in response if i[1]] else: return default
python
def announce_urls(self, default=[]): # pylint: disable=dangerous-default-value """ Get a list of all announce URLs. Returns `default` if no trackers are found at all. """ try: response = self._engine._rpc.t.multicall(self._fields["hash"], 0, "t.url=", "t.is_enabled=") except xmlrpc.ERRORS as exc: raise error.EngineError("While getting announce URLs for #%s: %s" % (self._fields["hash"], exc)) if response: return [i[0] for i in response if i[1]] else: return default
[ "def", "announce_urls", "(", "self", ",", "default", "=", "[", "]", ")", ":", "# pylint: disable=dangerous-default-value", "try", ":", "response", "=", "self", ".", "_engine", ".", "_rpc", ".", "t", ".", "multicall", "(", "self", ".", "_fields", "[", "\"hash\"", "]", ",", "0", ",", "\"t.url=\"", ",", "\"t.is_enabled=\"", ")", "except", "xmlrpc", ".", "ERRORS", "as", "exc", ":", "raise", "error", ".", "EngineError", "(", "\"While getting announce URLs for #%s: %s\"", "%", "(", "self", ".", "_fields", "[", "\"hash\"", "]", ",", "exc", ")", ")", "if", "response", ":", "return", "[", "i", "[", "0", "]", "for", "i", "in", "response", "if", "i", "[", "1", "]", "]", "else", ":", "return", "default" ]
Get a list of all announce URLs. Returns `default` if no trackers are found at all.
[ "Get", "a", "list", "of", "all", "announce", "URLs", ".", "Returns", "default", "if", "no", "trackers", "are", "found", "at", "all", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L225-L237
7,342
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
RtorrentItem.tag
def tag(self, tags): """ Add or remove tags. """ # Get tag list and add/remove given tags tags = tags.lower() previous = self.tagged tagset = previous.copy() for tag in tags.replace(',', ' ').split(): if tag.startswith('-'): tagset.discard(tag[1:]) elif tag.startswith('+'): tagset.add(tag[1:]) else: tagset.add(tag) # Write back new tagset, if changed tagset.discard('') if tagset != previous: tagset = ' '.join(sorted(tagset)) self._make_it_so("setting tags %r on" % (tagset,), ["custom.set"], "tags", tagset) self._fields["custom_tags"] = tagset
python
def tag(self, tags): """ Add or remove tags. """ # Get tag list and add/remove given tags tags = tags.lower() previous = self.tagged tagset = previous.copy() for tag in tags.replace(',', ' ').split(): if tag.startswith('-'): tagset.discard(tag[1:]) elif tag.startswith('+'): tagset.add(tag[1:]) else: tagset.add(tag) # Write back new tagset, if changed tagset.discard('') if tagset != previous: tagset = ' '.join(sorted(tagset)) self._make_it_so("setting tags %r on" % (tagset,), ["custom.set"], "tags", tagset) self._fields["custom_tags"] = tagset
[ "def", "tag", "(", "self", ",", "tags", ")", ":", "# Get tag list and add/remove given tags", "tags", "=", "tags", ".", "lower", "(", ")", "previous", "=", "self", ".", "tagged", "tagset", "=", "previous", ".", "copy", "(", ")", "for", "tag", "in", "tags", ".", "replace", "(", "','", ",", "' '", ")", ".", "split", "(", ")", ":", "if", "tag", ".", "startswith", "(", "'-'", ")", ":", "tagset", ".", "discard", "(", "tag", "[", "1", ":", "]", ")", "elif", "tag", ".", "startswith", "(", "'+'", ")", ":", "tagset", ".", "add", "(", "tag", "[", "1", ":", "]", ")", "else", ":", "tagset", ".", "add", "(", "tag", ")", "# Write back new tagset, if changed", "tagset", ".", "discard", "(", "''", ")", "if", "tagset", "!=", "previous", ":", "tagset", "=", "' '", ".", "join", "(", "sorted", "(", "tagset", ")", ")", "self", ".", "_make_it_so", "(", "\"setting tags %r on\"", "%", "(", "tagset", ",", ")", ",", "[", "\"custom.set\"", "]", ",", "\"tags\"", ",", "tagset", ")", "self", ".", "_fields", "[", "\"custom_tags\"", "]", "=", "tagset" ]
Add or remove tags.
[ "Add", "or", "remove", "tags", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L264-L284
7,343
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
RtorrentItem.set_throttle
def set_throttle(self, name): """ Assign to throttle group. """ if name.lower() == "null": name = "NULL" if name.lower() == "none": name = '' if name not in self._engine.known_throttle_names: if self._engine._rpc.throttle.up.max(xmlrpc.NOHASH, name) == -1: if self._engine._rpc.throttle.down.max(xmlrpc.NOHASH, name) == -1: raise error.UserError("Unknown throttle name '{}'".format(name)) self._engine.known_throttle_names.add(name) if (name or "NONE") == self.throttle: self._engine.LOG.debug("Keeping throttle %r on torrent #%s" % (self.throttle, self._fields["hash"])) return active = self.is_active if active: self._engine.LOG.debug("Torrent #%s stopped for throttling" % (self._fields["hash"],)) self.stop() self._make_it_so("setting throttle %r on" % (name,), ["throttle_name.set"], name) if active: self._engine.LOG.debug("Torrent #%s restarted after throttling" % (self._fields["hash"],)) self.start()
python
def set_throttle(self, name): """ Assign to throttle group. """ if name.lower() == "null": name = "NULL" if name.lower() == "none": name = '' if name not in self._engine.known_throttle_names: if self._engine._rpc.throttle.up.max(xmlrpc.NOHASH, name) == -1: if self._engine._rpc.throttle.down.max(xmlrpc.NOHASH, name) == -1: raise error.UserError("Unknown throttle name '{}'".format(name)) self._engine.known_throttle_names.add(name) if (name or "NONE") == self.throttle: self._engine.LOG.debug("Keeping throttle %r on torrent #%s" % (self.throttle, self._fields["hash"])) return active = self.is_active if active: self._engine.LOG.debug("Torrent #%s stopped for throttling" % (self._fields["hash"],)) self.stop() self._make_it_so("setting throttle %r on" % (name,), ["throttle_name.set"], name) if active: self._engine.LOG.debug("Torrent #%s restarted after throttling" % (self._fields["hash"],)) self.start()
[ "def", "set_throttle", "(", "self", ",", "name", ")", ":", "if", "name", ".", "lower", "(", ")", "==", "\"null\"", ":", "name", "=", "\"NULL\"", "if", "name", ".", "lower", "(", ")", "==", "\"none\"", ":", "name", "=", "''", "if", "name", "not", "in", "self", ".", "_engine", ".", "known_throttle_names", ":", "if", "self", ".", "_engine", ".", "_rpc", ".", "throttle", ".", "up", ".", "max", "(", "xmlrpc", ".", "NOHASH", ",", "name", ")", "==", "-", "1", ":", "if", "self", ".", "_engine", ".", "_rpc", ".", "throttle", ".", "down", ".", "max", "(", "xmlrpc", ".", "NOHASH", ",", "name", ")", "==", "-", "1", ":", "raise", "error", ".", "UserError", "(", "\"Unknown throttle name '{}'\"", ".", "format", "(", "name", ")", ")", "self", ".", "_engine", ".", "known_throttle_names", ".", "add", "(", "name", ")", "if", "(", "name", "or", "\"NONE\"", ")", "==", "self", ".", "throttle", ":", "self", ".", "_engine", ".", "LOG", ".", "debug", "(", "\"Keeping throttle %r on torrent #%s\"", "%", "(", "self", ".", "throttle", ",", "self", ".", "_fields", "[", "\"hash\"", "]", ")", ")", "return", "active", "=", "self", ".", "is_active", "if", "active", ":", "self", ".", "_engine", ".", "LOG", ".", "debug", "(", "\"Torrent #%s stopped for throttling\"", "%", "(", "self", ".", "_fields", "[", "\"hash\"", "]", ",", ")", ")", "self", ".", "stop", "(", ")", "self", ".", "_make_it_so", "(", "\"setting throttle %r on\"", "%", "(", "name", ",", ")", ",", "[", "\"throttle_name.set\"", "]", ",", "name", ")", "if", "active", ":", "self", ".", "_engine", ".", "LOG", ".", "debug", "(", "\"Torrent #%s restarted after throttling\"", "%", "(", "self", ".", "_fields", "[", "\"hash\"", "]", ",", ")", ")", "self", ".", "start", "(", ")" ]
Assign to throttle group.
[ "Assign", "to", "throttle", "group", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L287-L312
7,344
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
RtorrentItem.purge
def purge(self): """ Delete PARTIAL data files and remove torrent from client. """ def partial_file(item): "Filter out partial files" #print "???", repr(item) return item.completed_chunks < item.size_chunks self.cull(file_filter=partial_file, attrs=["get_completed_chunks", "get_size_chunks"])
python
def purge(self): """ Delete PARTIAL data files and remove torrent from client. """ def partial_file(item): "Filter out partial files" #print "???", repr(item) return item.completed_chunks < item.size_chunks self.cull(file_filter=partial_file, attrs=["get_completed_chunks", "get_size_chunks"])
[ "def", "purge", "(", "self", ")", ":", "def", "partial_file", "(", "item", ")", ":", "\"Filter out partial files\"", "#print \"???\", repr(item)", "return", "item", ".", "completed_chunks", "<", "item", ".", "size_chunks", "self", ".", "cull", "(", "file_filter", "=", "partial_file", ",", "attrs", "=", "[", "\"get_completed_chunks\"", ",", "\"get_size_chunks\"", "]", ")" ]
Delete PARTIAL data files and remove torrent from client.
[ "Delete", "PARTIAL", "data", "files", "and", "remove", "torrent", "from", "client", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L388-L396
7,345
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
RtorrentEngine.load_config
def load_config(self, namespace=None, rcfile=None): """ Load file given in "rcfile". """ if namespace is None: namespace = config if namespace.scgi_url: return # already have the connection to rTorrent # Get and check config file name if not rcfile: rcfile = getattr(config, "rtorrent_rc", None) if not rcfile: raise error.UserError("No 'rtorrent_rc' path defined in configuration!") if not os.path.isfile(rcfile): raise error.UserError("Config file %r doesn't exist!" % (rcfile,)) # Parse the file self.LOG.debug("Loading rtorrent config from %r" % (rcfile,)) rc_vals = Bunch(scgi_local='', scgi_port = '') with open(rcfile) as handle: continued = False for line in handle.readlines(): # Skip comments, continuations, and empty lines line = line.strip() continued, was_continued = line.endswith('\\'), continued if not line or was_continued or line.startswith("#"): continue # Be lenient about errors, after all it's not our own config file try: key, val = line.split("=", 1) except ValueError: self.LOG.warning("Ignored invalid line %r in %r!" % (line, rcfile)) continue key, val = key.strip(), val.strip() key = self.RTORRENT_RC_ALIASES.get(key, key).replace('.', '_') # Copy values we're interested in if key in self.RTORRENT_RC_KEYS: self.LOG.debug("rtorrent.rc: %s = %s" % (key, val)) rc_vals[key] = val # Validate fields if rc_vals.scgi_local: rc_vals.scgi_local = os.path.expanduser(rc_vals.scgi_local) if rc_vals.scgi_local.startswith('/'): rc_vals.scgi_local = "scgi://" + rc_vals.scgi_local if rc_vals.scgi_port and not rc_vals.scgi_port.startswith("scgi://"): rc_vals.scgi_port = "scgi://" + rc_vals.scgi_port # Prefer UNIX domain sockets over TCP sockets namespace.scgi_url = rc_vals.scgi_local or rc_vals.scgi_port
python
def load_config(self, namespace=None, rcfile=None): """ Load file given in "rcfile". """ if namespace is None: namespace = config if namespace.scgi_url: return # already have the connection to rTorrent # Get and check config file name if not rcfile: rcfile = getattr(config, "rtorrent_rc", None) if not rcfile: raise error.UserError("No 'rtorrent_rc' path defined in configuration!") if not os.path.isfile(rcfile): raise error.UserError("Config file %r doesn't exist!" % (rcfile,)) # Parse the file self.LOG.debug("Loading rtorrent config from %r" % (rcfile,)) rc_vals = Bunch(scgi_local='', scgi_port = '') with open(rcfile) as handle: continued = False for line in handle.readlines(): # Skip comments, continuations, and empty lines line = line.strip() continued, was_continued = line.endswith('\\'), continued if not line or was_continued or line.startswith("#"): continue # Be lenient about errors, after all it's not our own config file try: key, val = line.split("=", 1) except ValueError: self.LOG.warning("Ignored invalid line %r in %r!" % (line, rcfile)) continue key, val = key.strip(), val.strip() key = self.RTORRENT_RC_ALIASES.get(key, key).replace('.', '_') # Copy values we're interested in if key in self.RTORRENT_RC_KEYS: self.LOG.debug("rtorrent.rc: %s = %s" % (key, val)) rc_vals[key] = val # Validate fields if rc_vals.scgi_local: rc_vals.scgi_local = os.path.expanduser(rc_vals.scgi_local) if rc_vals.scgi_local.startswith('/'): rc_vals.scgi_local = "scgi://" + rc_vals.scgi_local if rc_vals.scgi_port and not rc_vals.scgi_port.startswith("scgi://"): rc_vals.scgi_port = "scgi://" + rc_vals.scgi_port # Prefer UNIX domain sockets over TCP sockets namespace.scgi_url = rc_vals.scgi_local or rc_vals.scgi_port
[ "def", "load_config", "(", "self", ",", "namespace", "=", "None", ",", "rcfile", "=", "None", ")", ":", "if", "namespace", "is", "None", ":", "namespace", "=", "config", "if", "namespace", ".", "scgi_url", ":", "return", "# already have the connection to rTorrent", "# Get and check config file name", "if", "not", "rcfile", ":", "rcfile", "=", "getattr", "(", "config", ",", "\"rtorrent_rc\"", ",", "None", ")", "if", "not", "rcfile", ":", "raise", "error", ".", "UserError", "(", "\"No 'rtorrent_rc' path defined in configuration!\"", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "rcfile", ")", ":", "raise", "error", ".", "UserError", "(", "\"Config file %r doesn't exist!\"", "%", "(", "rcfile", ",", ")", ")", "# Parse the file", "self", ".", "LOG", ".", "debug", "(", "\"Loading rtorrent config from %r\"", "%", "(", "rcfile", ",", ")", ")", "rc_vals", "=", "Bunch", "(", "scgi_local", "=", "''", ",", "scgi_port", "=", "''", ")", "with", "open", "(", "rcfile", ")", "as", "handle", ":", "continued", "=", "False", "for", "line", "in", "handle", ".", "readlines", "(", ")", ":", "# Skip comments, continuations, and empty lines", "line", "=", "line", ".", "strip", "(", ")", "continued", ",", "was_continued", "=", "line", ".", "endswith", "(", "'\\\\'", ")", ",", "continued", "if", "not", "line", "or", "was_continued", "or", "line", ".", "startswith", "(", "\"#\"", ")", ":", "continue", "# Be lenient about errors, after all it's not our own config file", "try", ":", "key", ",", "val", "=", "line", ".", "split", "(", "\"=\"", ",", "1", ")", "except", "ValueError", ":", "self", ".", "LOG", ".", "warning", "(", "\"Ignored invalid line %r in %r!\"", "%", "(", "line", ",", "rcfile", ")", ")", "continue", "key", ",", "val", "=", "key", ".", "strip", "(", ")", ",", "val", ".", "strip", "(", ")", "key", "=", "self", ".", "RTORRENT_RC_ALIASES", ".", "get", "(", "key", ",", "key", ")", ".", "replace", "(", "'.'", ",", "'_'", ")", "# Copy values we're interested in", "if", "key", "in", "self", ".", "RTORRENT_RC_KEYS", ":", "self", ".", "LOG", ".", "debug", "(", "\"rtorrent.rc: %s = %s\"", "%", "(", "key", ",", "val", ")", ")", "rc_vals", "[", "key", "]", "=", "val", "# Validate fields", "if", "rc_vals", ".", "scgi_local", ":", "rc_vals", ".", "scgi_local", "=", "os", ".", "path", ".", "expanduser", "(", "rc_vals", ".", "scgi_local", ")", "if", "rc_vals", ".", "scgi_local", ".", "startswith", "(", "'/'", ")", ":", "rc_vals", ".", "scgi_local", "=", "\"scgi://\"", "+", "rc_vals", ".", "scgi_local", "if", "rc_vals", ".", "scgi_port", "and", "not", "rc_vals", ".", "scgi_port", ".", "startswith", "(", "\"scgi://\"", ")", ":", "rc_vals", ".", "scgi_port", "=", "\"scgi://\"", "+", "rc_vals", ".", "scgi_port", "# Prefer UNIX domain sockets over TCP sockets", "namespace", ".", "scgi_url", "=", "rc_vals", ".", "scgi_local", "or", "rc_vals", ".", "scgi_port" ]
Load file given in "rcfile".
[ "Load", "file", "given", "in", "rcfile", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L577-L628
7,346
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
RtorrentEngine._resolve_viewname
def _resolve_viewname(self, viewname): """ Check for special view names and return existing rTorrent one. """ if viewname == "-": try: # Only works with rTorrent-PS at this time! viewname = self.open().ui.current_view() except xmlrpc.ERRORS as exc: raise error.EngineError("Can't get name of current view: %s" % (exc)) return viewname
python
def _resolve_viewname(self, viewname): """ Check for special view names and return existing rTorrent one. """ if viewname == "-": try: # Only works with rTorrent-PS at this time! viewname = self.open().ui.current_view() except xmlrpc.ERRORS as exc: raise error.EngineError("Can't get name of current view: %s" % (exc)) return viewname
[ "def", "_resolve_viewname", "(", "self", ",", "viewname", ")", ":", "if", "viewname", "==", "\"-\"", ":", "try", ":", "# Only works with rTorrent-PS at this time!", "viewname", "=", "self", ".", "open", "(", ")", ".", "ui", ".", "current_view", "(", ")", "except", "xmlrpc", ".", "ERRORS", "as", "exc", ":", "raise", "error", ".", "EngineError", "(", "\"Can't get name of current view: %s\"", "%", "(", "exc", ")", ")", "return", "viewname" ]
Check for special view names and return existing rTorrent one.
[ "Check", "for", "special", "view", "names", "and", "return", "existing", "rTorrent", "one", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L655-L665
7,347
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
RtorrentEngine.open
def open(self): """ Open connection. """ # Only connect once if self._rpc is not None: return self._rpc # Get connection URL from rtorrent.rc self.load_config() # Reading abilities are on the downfall, so... if not config.scgi_url: raise error.UserError("You need to configure a XMLRPC connection, read" " https://pyrocore.readthedocs.io/en/latest/setup.html") # Connect and get instance ID (also ensures we're connectable) self._rpc = xmlrpc.RTorrentProxy(config.scgi_url) self.versions, self.version_info = self._rpc._set_mappings() self.engine_id = self._rpc.session.name() time_usec = self._rpc.system.time_usec() # Make sure xmlrpc-c works as expected if time_usec < 2**32: self.LOG.warn("Your xmlrpc-c is broken (64 bit integer support missing," " %r returned instead)" % (type(time_usec),)) # Get other manifest values self.engine_software = "rTorrent %s/%s" % self.versions if "+ssh:" in config.scgi_url: self.startup = int(self._rpc.startup_time() or time.time()) else: self._session_dir = self._rpc.session.path() if not self._session_dir: raise error.UserError("You need a session directory, read" " https://pyrocore.readthedocs.io/en/latest/setup.html") if not os.path.exists(self._session_dir): raise error.UserError("Non-existing session directory %r" % self._session_dir) self._download_dir = os.path.expanduser(self._rpc.directory.default()) if not os.path.exists(self._download_dir): raise error.UserError("Non-existing download directory %r" % self._download_dir) self.startup = os.path.getmtime(os.path.join(self._session_dir, "rtorrent.lock")) # Return connection self.LOG.debug(repr(self)) return self._rpc
python
def open(self): """ Open connection. """ # Only connect once if self._rpc is not None: return self._rpc # Get connection URL from rtorrent.rc self.load_config() # Reading abilities are on the downfall, so... if not config.scgi_url: raise error.UserError("You need to configure a XMLRPC connection, read" " https://pyrocore.readthedocs.io/en/latest/setup.html") # Connect and get instance ID (also ensures we're connectable) self._rpc = xmlrpc.RTorrentProxy(config.scgi_url) self.versions, self.version_info = self._rpc._set_mappings() self.engine_id = self._rpc.session.name() time_usec = self._rpc.system.time_usec() # Make sure xmlrpc-c works as expected if time_usec < 2**32: self.LOG.warn("Your xmlrpc-c is broken (64 bit integer support missing," " %r returned instead)" % (type(time_usec),)) # Get other manifest values self.engine_software = "rTorrent %s/%s" % self.versions if "+ssh:" in config.scgi_url: self.startup = int(self._rpc.startup_time() or time.time()) else: self._session_dir = self._rpc.session.path() if not self._session_dir: raise error.UserError("You need a session directory, read" " https://pyrocore.readthedocs.io/en/latest/setup.html") if not os.path.exists(self._session_dir): raise error.UserError("Non-existing session directory %r" % self._session_dir) self._download_dir = os.path.expanduser(self._rpc.directory.default()) if not os.path.exists(self._download_dir): raise error.UserError("Non-existing download directory %r" % self._download_dir) self.startup = os.path.getmtime(os.path.join(self._session_dir, "rtorrent.lock")) # Return connection self.LOG.debug(repr(self)) return self._rpc
[ "def", "open", "(", "self", ")", ":", "# Only connect once", "if", "self", ".", "_rpc", "is", "not", "None", ":", "return", "self", ".", "_rpc", "# Get connection URL from rtorrent.rc", "self", ".", "load_config", "(", ")", "# Reading abilities are on the downfall, so...", "if", "not", "config", ".", "scgi_url", ":", "raise", "error", ".", "UserError", "(", "\"You need to configure a XMLRPC connection, read\"", "\" https://pyrocore.readthedocs.io/en/latest/setup.html\"", ")", "# Connect and get instance ID (also ensures we're connectable)", "self", ".", "_rpc", "=", "xmlrpc", ".", "RTorrentProxy", "(", "config", ".", "scgi_url", ")", "self", ".", "versions", ",", "self", ".", "version_info", "=", "self", ".", "_rpc", ".", "_set_mappings", "(", ")", "self", ".", "engine_id", "=", "self", ".", "_rpc", ".", "session", ".", "name", "(", ")", "time_usec", "=", "self", ".", "_rpc", ".", "system", ".", "time_usec", "(", ")", "# Make sure xmlrpc-c works as expected", "if", "time_usec", "<", "2", "**", "32", ":", "self", ".", "LOG", ".", "warn", "(", "\"Your xmlrpc-c is broken (64 bit integer support missing,\"", "\" %r returned instead)\"", "%", "(", "type", "(", "time_usec", ")", ",", ")", ")", "# Get other manifest values", "self", ".", "engine_software", "=", "\"rTorrent %s/%s\"", "%", "self", ".", "versions", "if", "\"+ssh:\"", "in", "config", ".", "scgi_url", ":", "self", ".", "startup", "=", "int", "(", "self", ".", "_rpc", ".", "startup_time", "(", ")", "or", "time", ".", "time", "(", ")", ")", "else", ":", "self", ".", "_session_dir", "=", "self", ".", "_rpc", ".", "session", ".", "path", "(", ")", "if", "not", "self", ".", "_session_dir", ":", "raise", "error", ".", "UserError", "(", "\"You need a session directory, read\"", "\" https://pyrocore.readthedocs.io/en/latest/setup.html\"", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "_session_dir", ")", ":", "raise", "error", ".", "UserError", "(", "\"Non-existing session directory %r\"", "%", "self", ".", "_session_dir", ")", "self", ".", "_download_dir", "=", "os", ".", "path", ".", "expanduser", "(", "self", ".", "_rpc", ".", "directory", ".", "default", "(", ")", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "_download_dir", ")", ":", "raise", "error", ".", "UserError", "(", "\"Non-existing download directory %r\"", "%", "self", ".", "_download_dir", ")", "self", ".", "startup", "=", "os", ".", "path", ".", "getmtime", "(", "os", ".", "path", ".", "join", "(", "self", ".", "_session_dir", ",", "\"rtorrent.lock\"", ")", ")", "# Return connection", "self", ".", "LOG", ".", "debug", "(", "repr", "(", "self", ")", ")", "return", "self", ".", "_rpc" ]
Open connection.
[ "Open", "connection", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L668-L713
7,348
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
RtorrentEngine.multicall
def multicall(self, viewname, fields): """ Query the given fields of items in the given view. The result list contains named tuples, so you can access the fields directly by their name. """ commands = tuple('d.{}='.format(x) for x in fields) result_type = namedtuple('DownloadItem', [x.replace('.', '_') for x in fields]) items = self.open().d.multicall(viewname, *commands) return [result_type(*x) for x in items]
python
def multicall(self, viewname, fields): """ Query the given fields of items in the given view. The result list contains named tuples, so you can access the fields directly by their name. """ commands = tuple('d.{}='.format(x) for x in fields) result_type = namedtuple('DownloadItem', [x.replace('.', '_') for x in fields]) items = self.open().d.multicall(viewname, *commands) return [result_type(*x) for x in items]
[ "def", "multicall", "(", "self", ",", "viewname", ",", "fields", ")", ":", "commands", "=", "tuple", "(", "'d.{}='", ".", "format", "(", "x", ")", "for", "x", "in", "fields", ")", "result_type", "=", "namedtuple", "(", "'DownloadItem'", ",", "[", "x", ".", "replace", "(", "'.'", ",", "'_'", ")", "for", "x", "in", "fields", "]", ")", "items", "=", "self", ".", "open", "(", ")", ".", "d", ".", "multicall", "(", "viewname", ",", "*", "commands", ")", "return", "[", "result_type", "(", "*", "x", ")", "for", "x", "in", "items", "]" ]
Query the given fields of items in the given view. The result list contains named tuples, so you can access the fields directly by their name.
[ "Query", "the", "given", "fields", "of", "items", "in", "the", "given", "view", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L716-L725
7,349
pyroscope/pyrocore
src/pyrocore/torrent/rtorrent.py
RtorrentEngine.item
def item(self, infohash, prefetch=None, cache=False): """ Fetch a single item by its info hash. """ return next(self.items(infohash, prefetch, cache))
python
def item(self, infohash, prefetch=None, cache=False): """ Fetch a single item by its info hash. """ return next(self.items(infohash, prefetch, cache))
[ "def", "item", "(", "self", ",", "infohash", ",", "prefetch", "=", "None", ",", "cache", "=", "False", ")", ":", "return", "next", "(", "self", ".", "items", "(", "infohash", ",", "prefetch", ",", "cache", ")", ")" ]
Fetch a single item by its info hash.
[ "Fetch", "a", "single", "item", "by", "its", "info", "hash", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L734-L737
7,350
pyroscope/pyrocore
src/pyrocore/torrent/broom.py
DiskSpaceManager._load_rules
def _load_rules(self): """Load rule definitions from config.""" for ruleset in self.active_rulesets: section_name = 'sweep_rules_' + ruleset.lower() try: ruledefs = getattr(self.config, section_name) except AttributeError: raise error.UserError("There is no [{}] section in your configuration" .format(section_name.upper())) for ruledef, filtercond in ruledefs.items(): if ruledef.endswith('.filter'): rulename = ruledef.rsplit('.', 1)[0] rule = SweepRule(ruleset, rulename, int(ruledefs.get(rulename + '.prio', '999')), ruledefs.get(rulename + '.order', self.default_order), parse_cond(filtercond)) self.rules.append(rule) self.rules.sort(key=lambda x: (x.prio, x.name)) return self.rules
python
def _load_rules(self): """Load rule definitions from config.""" for ruleset in self.active_rulesets: section_name = 'sweep_rules_' + ruleset.lower() try: ruledefs = getattr(self.config, section_name) except AttributeError: raise error.UserError("There is no [{}] section in your configuration" .format(section_name.upper())) for ruledef, filtercond in ruledefs.items(): if ruledef.endswith('.filter'): rulename = ruledef.rsplit('.', 1)[0] rule = SweepRule(ruleset, rulename, int(ruledefs.get(rulename + '.prio', '999')), ruledefs.get(rulename + '.order', self.default_order), parse_cond(filtercond)) self.rules.append(rule) self.rules.sort(key=lambda x: (x.prio, x.name)) return self.rules
[ "def", "_load_rules", "(", "self", ")", ":", "for", "ruleset", "in", "self", ".", "active_rulesets", ":", "section_name", "=", "'sweep_rules_'", "+", "ruleset", ".", "lower", "(", ")", "try", ":", "ruledefs", "=", "getattr", "(", "self", ".", "config", ",", "section_name", ")", "except", "AttributeError", ":", "raise", "error", ".", "UserError", "(", "\"There is no [{}] section in your configuration\"", ".", "format", "(", "section_name", ".", "upper", "(", ")", ")", ")", "for", "ruledef", ",", "filtercond", "in", "ruledefs", ".", "items", "(", ")", ":", "if", "ruledef", ".", "endswith", "(", "'.filter'", ")", ":", "rulename", "=", "ruledef", ".", "rsplit", "(", "'.'", ",", "1", ")", "[", "0", "]", "rule", "=", "SweepRule", "(", "ruleset", ",", "rulename", ",", "int", "(", "ruledefs", ".", "get", "(", "rulename", "+", "'.prio'", ",", "'999'", ")", ")", ",", "ruledefs", ".", "get", "(", "rulename", "+", "'.order'", ",", "self", ".", "default_order", ")", ",", "parse_cond", "(", "filtercond", ")", ")", "self", ".", "rules", ".", "append", "(", "rule", ")", "self", ".", "rules", ".", "sort", "(", "key", "=", "lambda", "x", ":", "(", "x", ".", "prio", ",", "x", ".", "name", ")", ")", "return", "self", ".", "rules" ]
Load rule definitions from config.
[ "Load", "rule", "definitions", "from", "config", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/broom.py#L54-L74
7,351
pyroscope/pyrocore
src/pyrocore/scripts/pyrotorque.py
RtorrentQueueManager._parse_schedule
def _parse_schedule(self, schedule): """ Parse a job schedule. """ result = {} for param in shlex.split(str(schedule)): # do not feed unicode to shlex try: key, val = param.split('=', 1) except (TypeError, ValueError): self.fatal("Bad param '%s' in job schedule '%s'" % (param, schedule)) else: result[key] = val return result
python
def _parse_schedule(self, schedule): """ Parse a job schedule. """ result = {} for param in shlex.split(str(schedule)): # do not feed unicode to shlex try: key, val = param.split('=', 1) except (TypeError, ValueError): self.fatal("Bad param '%s' in job schedule '%s'" % (param, schedule)) else: result[key] = val return result
[ "def", "_parse_schedule", "(", "self", ",", "schedule", ")", ":", "result", "=", "{", "}", "for", "param", "in", "shlex", ".", "split", "(", "str", "(", "schedule", ")", ")", ":", "# do not feed unicode to shlex", "try", ":", "key", ",", "val", "=", "param", ".", "split", "(", "'='", ",", "1", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "self", ".", "fatal", "(", "\"Bad param '%s' in job schedule '%s'\"", "%", "(", "param", ",", "schedule", ")", ")", "else", ":", "result", "[", "key", "]", "=", "val", "return", "result" ]
Parse a job schedule.
[ "Parse", "a", "job", "schedule", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/pyrotorque.py#L76-L89
7,352
pyroscope/pyrocore
src/pyrocore/scripts/pyrotorque.py
RtorrentQueueManager._validate_config
def _validate_config(self): """ Handle and check configuration. """ groups = dict( job=defaultdict(Bunch), httpd=defaultdict(Bunch), ) for key, val in config.torque.items(): # Auto-convert numbers and bools if val.isdigit(): config.torque[key] = val = int(val) elif val.lower() in (matching.TRUE | matching.FALSE): val = matching.truth(str(val), key) # Assemble grouped parameters stem = key.split('.', 1)[0] if key == "httpd.active": groups[stem]["active"] = val elif stem in groups: try: stem, name, param = key.split('.', 2) except (TypeError, ValueError): self.fatal("Bad %s configuration key %r (expecting %s.NAME.PARAM)" % (stem, key, stem)) else: groups[stem][name][param] = val for key, val in groups.iteritems(): setattr(self, key.replace("job", "jobs"), Bunch(val)) # Validate httpd config if self.httpd.active: if self.httpd.waitress.url_scheme not in ("http", "https"): self.fatal("HTTP URL scheme must be either 'http' or 'https'") if not isinstance(self.httpd.waitress.port, int) or not(1024 <= self.httpd.waitress.port < 65536): self.fatal("HTTP port must be a 16 bit number >= 1024") # Validate jobs for name, params in self.jobs.items(): for key in ("handler", "schedule"): if key not in params: self.fatal("Job '%s' is missing the required 'job.%s.%s' parameter" % (name, name, key)) bool_param = lambda k, default, p=params: matching.truth(p.get(k, default), "job.%s.%s" % (name, k)) params.job_name = name params.dry_run = bool_param("dry_run", False) or self.options.dry_run params.active = bool_param("active", True) params.schedule = self._parse_schedule(params.schedule) if params.active: try: params.handler = pymagic.import_name(params.handler) except ImportError as exc: self.fatal("Bad handler name '%s' for job '%s':\n %s" % (params.handler, name, exc))
python
def _validate_config(self): """ Handle and check configuration. """ groups = dict( job=defaultdict(Bunch), httpd=defaultdict(Bunch), ) for key, val in config.torque.items(): # Auto-convert numbers and bools if val.isdigit(): config.torque[key] = val = int(val) elif val.lower() in (matching.TRUE | matching.FALSE): val = matching.truth(str(val), key) # Assemble grouped parameters stem = key.split('.', 1)[0] if key == "httpd.active": groups[stem]["active"] = val elif stem in groups: try: stem, name, param = key.split('.', 2) except (TypeError, ValueError): self.fatal("Bad %s configuration key %r (expecting %s.NAME.PARAM)" % (stem, key, stem)) else: groups[stem][name][param] = val for key, val in groups.iteritems(): setattr(self, key.replace("job", "jobs"), Bunch(val)) # Validate httpd config if self.httpd.active: if self.httpd.waitress.url_scheme not in ("http", "https"): self.fatal("HTTP URL scheme must be either 'http' or 'https'") if not isinstance(self.httpd.waitress.port, int) or not(1024 <= self.httpd.waitress.port < 65536): self.fatal("HTTP port must be a 16 bit number >= 1024") # Validate jobs for name, params in self.jobs.items(): for key in ("handler", "schedule"): if key not in params: self.fatal("Job '%s' is missing the required 'job.%s.%s' parameter" % (name, name, key)) bool_param = lambda k, default, p=params: matching.truth(p.get(k, default), "job.%s.%s" % (name, k)) params.job_name = name params.dry_run = bool_param("dry_run", False) or self.options.dry_run params.active = bool_param("active", True) params.schedule = self._parse_schedule(params.schedule) if params.active: try: params.handler = pymagic.import_name(params.handler) except ImportError as exc: self.fatal("Bad handler name '%s' for job '%s':\n %s" % (params.handler, name, exc))
[ "def", "_validate_config", "(", "self", ")", ":", "groups", "=", "dict", "(", "job", "=", "defaultdict", "(", "Bunch", ")", ",", "httpd", "=", "defaultdict", "(", "Bunch", ")", ",", ")", "for", "key", ",", "val", "in", "config", ".", "torque", ".", "items", "(", ")", ":", "# Auto-convert numbers and bools", "if", "val", ".", "isdigit", "(", ")", ":", "config", ".", "torque", "[", "key", "]", "=", "val", "=", "int", "(", "val", ")", "elif", "val", ".", "lower", "(", ")", "in", "(", "matching", ".", "TRUE", "|", "matching", ".", "FALSE", ")", ":", "val", "=", "matching", ".", "truth", "(", "str", "(", "val", ")", ",", "key", ")", "# Assemble grouped parameters", "stem", "=", "key", ".", "split", "(", "'.'", ",", "1", ")", "[", "0", "]", "if", "key", "==", "\"httpd.active\"", ":", "groups", "[", "stem", "]", "[", "\"active\"", "]", "=", "val", "elif", "stem", "in", "groups", ":", "try", ":", "stem", ",", "name", ",", "param", "=", "key", ".", "split", "(", "'.'", ",", "2", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "self", ".", "fatal", "(", "\"Bad %s configuration key %r (expecting %s.NAME.PARAM)\"", "%", "(", "stem", ",", "key", ",", "stem", ")", ")", "else", ":", "groups", "[", "stem", "]", "[", "name", "]", "[", "param", "]", "=", "val", "for", "key", ",", "val", "in", "groups", ".", "iteritems", "(", ")", ":", "setattr", "(", "self", ",", "key", ".", "replace", "(", "\"job\"", ",", "\"jobs\"", ")", ",", "Bunch", "(", "val", ")", ")", "# Validate httpd config", "if", "self", ".", "httpd", ".", "active", ":", "if", "self", ".", "httpd", ".", "waitress", ".", "url_scheme", "not", "in", "(", "\"http\"", ",", "\"https\"", ")", ":", "self", ".", "fatal", "(", "\"HTTP URL scheme must be either 'http' or 'https'\"", ")", "if", "not", "isinstance", "(", "self", ".", "httpd", ".", "waitress", ".", "port", ",", "int", ")", "or", "not", "(", "1024", "<=", "self", ".", "httpd", ".", "waitress", ".", "port", "<", "65536", ")", ":", "self", ".", "fatal", "(", "\"HTTP port must be a 16 bit number >= 1024\"", ")", "# Validate jobs", "for", "name", ",", "params", "in", "self", ".", "jobs", ".", "items", "(", ")", ":", "for", "key", "in", "(", "\"handler\"", ",", "\"schedule\"", ")", ":", "if", "key", "not", "in", "params", ":", "self", ".", "fatal", "(", "\"Job '%s' is missing the required 'job.%s.%s' parameter\"", "%", "(", "name", ",", "name", ",", "key", ")", ")", "bool_param", "=", "lambda", "k", ",", "default", ",", "p", "=", "params", ":", "matching", ".", "truth", "(", "p", ".", "get", "(", "k", ",", "default", ")", ",", "\"job.%s.%s\"", "%", "(", "name", ",", "k", ")", ")", "params", ".", "job_name", "=", "name", "params", ".", "dry_run", "=", "bool_param", "(", "\"dry_run\"", ",", "False", ")", "or", "self", ".", "options", ".", "dry_run", "params", ".", "active", "=", "bool_param", "(", "\"active\"", ",", "True", ")", "params", ".", "schedule", "=", "self", ".", "_parse_schedule", "(", "params", ".", "schedule", ")", "if", "params", ".", "active", ":", "try", ":", "params", ".", "handler", "=", "pymagic", ".", "import_name", "(", "params", ".", "handler", ")", "except", "ImportError", "as", "exc", ":", "self", ".", "fatal", "(", "\"Bad handler name '%s' for job '%s':\\n %s\"", "%", "(", "params", ".", "handler", ",", "name", ",", "exc", ")", ")" ]
Handle and check configuration.
[ "Handle", "and", "check", "configuration", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/pyrotorque.py#L92-L146
7,353
pyroscope/pyrocore
src/pyrocore/scripts/pyrotorque.py
RtorrentQueueManager._add_jobs
def _add_jobs(self): """ Add configured jobs. """ for name, params in self.jobs.items(): if params.active: params.handler = params.handler(params) self.sched.add_cron_job(params.handler.run, **params.schedule)
python
def _add_jobs(self): """ Add configured jobs. """ for name, params in self.jobs.items(): if params.active: params.handler = params.handler(params) self.sched.add_cron_job(params.handler.run, **params.schedule)
[ "def", "_add_jobs", "(", "self", ")", ":", "for", "name", ",", "params", "in", "self", ".", "jobs", ".", "items", "(", ")", ":", "if", "params", ".", "active", ":", "params", ".", "handler", "=", "params", ".", "handler", "(", "params", ")", "self", ".", "sched", ".", "add_cron_job", "(", "params", ".", "handler", ".", "run", ",", "*", "*", "params", ".", "schedule", ")" ]
Add configured jobs.
[ "Add", "configured", "jobs", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/pyrotorque.py#L149-L155
7,354
pyroscope/pyrocore
src/pyrocore/scripts/pyrotorque.py
RtorrentQueueManager._init_wsgi_server
def _init_wsgi_server(self): """ Set up WSGI HTTP server. """ self.wsgi_server = None if self.httpd.active: # Only import dependencies when server is active from waitress.server import WSGIServer from pyrocore.daemon import webapp # Set up WSGI stack wsgi_app = webapp.make_app(self.httpd) # try: # import wsgilog # except ImportError: # self.LOG.info("'wsgilog' middleware not installed") # else: # wsgi_app = wsgilog.WsgiLog(wsgi_app, **self.httpd.wsgilog) ##logging.getLogger('waitress').setLevel(logging.DEBUG) self.LOG.debug("Waitress config: %r" % self.httpd.waitress) self.wsgi_server = WSGIServer(wsgi_app, **self.httpd.waitress) self.LOG.info("Started web server at %s://%s:%d/" % ( self.httpd.waitress.url_scheme, self.wsgi_server.get_server_name(self.wsgi_server.effective_host), int(self.wsgi_server.effective_port), ))
python
def _init_wsgi_server(self): """ Set up WSGI HTTP server. """ self.wsgi_server = None if self.httpd.active: # Only import dependencies when server is active from waitress.server import WSGIServer from pyrocore.daemon import webapp # Set up WSGI stack wsgi_app = webapp.make_app(self.httpd) # try: # import wsgilog # except ImportError: # self.LOG.info("'wsgilog' middleware not installed") # else: # wsgi_app = wsgilog.WsgiLog(wsgi_app, **self.httpd.wsgilog) ##logging.getLogger('waitress').setLevel(logging.DEBUG) self.LOG.debug("Waitress config: %r" % self.httpd.waitress) self.wsgi_server = WSGIServer(wsgi_app, **self.httpd.waitress) self.LOG.info("Started web server at %s://%s:%d/" % ( self.httpd.waitress.url_scheme, self.wsgi_server.get_server_name(self.wsgi_server.effective_host), int(self.wsgi_server.effective_port), ))
[ "def", "_init_wsgi_server", "(", "self", ")", ":", "self", ".", "wsgi_server", "=", "None", "if", "self", ".", "httpd", ".", "active", ":", "# Only import dependencies when server is active", "from", "waitress", ".", "server", "import", "WSGIServer", "from", "pyrocore", ".", "daemon", "import", "webapp", "# Set up WSGI stack", "wsgi_app", "=", "webapp", ".", "make_app", "(", "self", ".", "httpd", ")", "# try:", "# import wsgilog", "# except ImportError:", "# self.LOG.info(\"'wsgilog' middleware not installed\")", "# else:", "# wsgi_app = wsgilog.WsgiLog(wsgi_app, **self.httpd.wsgilog)", "##logging.getLogger('waitress').setLevel(logging.DEBUG)", "self", ".", "LOG", ".", "debug", "(", "\"Waitress config: %r\"", "%", "self", ".", "httpd", ".", "waitress", ")", "self", ".", "wsgi_server", "=", "WSGIServer", "(", "wsgi_app", ",", "*", "*", "self", ".", "httpd", ".", "waitress", ")", "self", ".", "LOG", ".", "info", "(", "\"Started web server at %s://%s:%d/\"", "%", "(", "self", ".", "httpd", ".", "waitress", ".", "url_scheme", ",", "self", ".", "wsgi_server", ".", "get_server_name", "(", "self", ".", "wsgi_server", ".", "effective_host", ")", ",", "int", "(", "self", ".", "wsgi_server", ".", "effective_port", ")", ",", ")", ")" ]
Set up WSGI HTTP server.
[ "Set", "up", "WSGI", "HTTP", "server", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/pyrotorque.py#L158-L184
7,355
pyroscope/pyrocore
src/pyrocore/scripts/pyrotorque.py
RtorrentQueueManager._run_forever
def _run_forever(self): """ Run configured jobs until termination request. """ while True: try: tick = time.time() asyncore.loop(timeout=self.POLL_TIMEOUT, use_poll=True) # Sleep for remaining poll cycle time tick += self.POLL_TIMEOUT - time.time() if tick > 0: # wait POLL_TIMEOUT at most (robust against time shifts) time.sleep(min(tick, self.POLL_TIMEOUT)) except KeyboardInterrupt as exc: self.LOG.info("Termination request received (%s)" % exc) break except SystemExit as exc: self.return_code = exc.code or 0 self.LOG.info("System exit (RC=%r)" % self.return_code) break else: # Idle work #self.LOG.warn("IDLE %s %r" % (self.options.guard_file, os.path.exists(self.options.guard_file))) if self.options.guard_file and not os.path.exists(self.options.guard_file): self.LOG.warn("Guard file '%s' disappeared, exiting!" % self.options.guard_file) break
python
def _run_forever(self): """ Run configured jobs until termination request. """ while True: try: tick = time.time() asyncore.loop(timeout=self.POLL_TIMEOUT, use_poll=True) # Sleep for remaining poll cycle time tick += self.POLL_TIMEOUT - time.time() if tick > 0: # wait POLL_TIMEOUT at most (robust against time shifts) time.sleep(min(tick, self.POLL_TIMEOUT)) except KeyboardInterrupt as exc: self.LOG.info("Termination request received (%s)" % exc) break except SystemExit as exc: self.return_code = exc.code or 0 self.LOG.info("System exit (RC=%r)" % self.return_code) break else: # Idle work #self.LOG.warn("IDLE %s %r" % (self.options.guard_file, os.path.exists(self.options.guard_file))) if self.options.guard_file and not os.path.exists(self.options.guard_file): self.LOG.warn("Guard file '%s' disappeared, exiting!" % self.options.guard_file) break
[ "def", "_run_forever", "(", "self", ")", ":", "while", "True", ":", "try", ":", "tick", "=", "time", ".", "time", "(", ")", "asyncore", ".", "loop", "(", "timeout", "=", "self", ".", "POLL_TIMEOUT", ",", "use_poll", "=", "True", ")", "# Sleep for remaining poll cycle time", "tick", "+=", "self", ".", "POLL_TIMEOUT", "-", "time", ".", "time", "(", ")", "if", "tick", ">", "0", ":", "# wait POLL_TIMEOUT at most (robust against time shifts)", "time", ".", "sleep", "(", "min", "(", "tick", ",", "self", ".", "POLL_TIMEOUT", ")", ")", "except", "KeyboardInterrupt", "as", "exc", ":", "self", ".", "LOG", ".", "info", "(", "\"Termination request received (%s)\"", "%", "exc", ")", "break", "except", "SystemExit", "as", "exc", ":", "self", ".", "return_code", "=", "exc", ".", "code", "or", "0", "self", ".", "LOG", ".", "info", "(", "\"System exit (RC=%r)\"", "%", "self", ".", "return_code", ")", "break", "else", ":", "# Idle work", "#self.LOG.warn(\"IDLE %s %r\" % (self.options.guard_file, os.path.exists(self.options.guard_file)))", "if", "self", ".", "options", ".", "guard_file", "and", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "options", ".", "guard_file", ")", ":", "self", ".", "LOG", ".", "warn", "(", "\"Guard file '%s' disappeared, exiting!\"", "%", "self", ".", "options", ".", "guard_file", ")", "break" ]
Run configured jobs until termination request.
[ "Run", "configured", "jobs", "until", "termination", "request", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/pyrotorque.py#L187-L213
7,356
pyroscope/pyrocore
src/pyrocore/scripts/rtxmlrpc.py
read_blob
def read_blob(arg): """Read a BLOB from given ``@arg``.""" result = None if arg == '@-': result = sys.stdin.read() elif any(arg.startswith('@{}://'.format(x)) for x in {'http', 'https', 'ftp', 'file'}): if not requests: raise error.UserError("You must 'pip install requests' to support @URL arguments.") try: response = requests.get(arg[1:]) response.raise_for_status() result = response.content except requests.RequestException as exc: raise error.UserError(str(exc)) else: with open(os.path.expanduser(arg[1:]), 'rb') as handle: result = handle.read() return result
python
def read_blob(arg): """Read a BLOB from given ``@arg``.""" result = None if arg == '@-': result = sys.stdin.read() elif any(arg.startswith('@{}://'.format(x)) for x in {'http', 'https', 'ftp', 'file'}): if not requests: raise error.UserError("You must 'pip install requests' to support @URL arguments.") try: response = requests.get(arg[1:]) response.raise_for_status() result = response.content except requests.RequestException as exc: raise error.UserError(str(exc)) else: with open(os.path.expanduser(arg[1:]), 'rb') as handle: result = handle.read() return result
[ "def", "read_blob", "(", "arg", ")", ":", "result", "=", "None", "if", "arg", "==", "'@-'", ":", "result", "=", "sys", ".", "stdin", ".", "read", "(", ")", "elif", "any", "(", "arg", ".", "startswith", "(", "'@{}://'", ".", "format", "(", "x", ")", ")", "for", "x", "in", "{", "'http'", ",", "'https'", ",", "'ftp'", ",", "'file'", "}", ")", ":", "if", "not", "requests", ":", "raise", "error", ".", "UserError", "(", "\"You must 'pip install requests' to support @URL arguments.\"", ")", "try", ":", "response", "=", "requests", ".", "get", "(", "arg", "[", "1", ":", "]", ")", "response", ".", "raise_for_status", "(", ")", "result", "=", "response", ".", "content", "except", "requests", ".", "RequestException", "as", "exc", ":", "raise", "error", ".", "UserError", "(", "str", "(", "exc", ")", ")", "else", ":", "with", "open", "(", "os", ".", "path", ".", "expanduser", "(", "arg", "[", "1", ":", "]", ")", ",", "'rb'", ")", "as", "handle", ":", "result", "=", "handle", ".", "read", "(", ")", "return", "result" ]
Read a BLOB from given ``@arg``.
[ "Read", "a", "BLOB", "from", "given" ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtxmlrpc.py#L46-L64
7,357
pyroscope/pyrocore
src/pyrocore/scripts/rtxmlrpc.py
RtorrentXmlRpc.open
def open(self): """Open connection and return proxy.""" if not self.proxy: if not config.scgi_url: config.engine.load_config() if not config.scgi_url: self.LOG.error("You need to configure a XMLRPC connection, read" " https://pyrocore.readthedocs.io/en/latest/setup.html") self.proxy = xmlrpc.RTorrentProxy(config.scgi_url) self.proxy._set_mappings() return self.proxy
python
def open(self): """Open connection and return proxy.""" if not self.proxy: if not config.scgi_url: config.engine.load_config() if not config.scgi_url: self.LOG.error("You need to configure a XMLRPC connection, read" " https://pyrocore.readthedocs.io/en/latest/setup.html") self.proxy = xmlrpc.RTorrentProxy(config.scgi_url) self.proxy._set_mappings() return self.proxy
[ "def", "open", "(", "self", ")", ":", "if", "not", "self", ".", "proxy", ":", "if", "not", "config", ".", "scgi_url", ":", "config", ".", "engine", ".", "load_config", "(", ")", "if", "not", "config", ".", "scgi_url", ":", "self", ".", "LOG", ".", "error", "(", "\"You need to configure a XMLRPC connection, read\"", "\" https://pyrocore.readthedocs.io/en/latest/setup.html\"", ")", "self", ".", "proxy", "=", "xmlrpc", ".", "RTorrentProxy", "(", "config", ".", "scgi_url", ")", "self", ".", "proxy", ".", "_set_mappings", "(", ")", "return", "self", ".", "proxy" ]
Open connection and return proxy.
[ "Open", "connection", "and", "return", "proxy", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtxmlrpc.py#L111-L121
7,358
pyroscope/pyrocore
src/pyrocore/scripts/rtxmlrpc.py
RtorrentXmlRpc.execute
def execute(self, proxy, method, args): """Execute given XMLRPC call.""" try: result = getattr(proxy, method)(raw_xml=self.options.xml, *tuple(args)) except xmlrpc.ERRORS as exc: self.LOG.error("While calling %s(%s): %s" % (method, ", ".join(repr(i) for i in args), exc)) self.return_code = error.EX_NOINPUT if "not find" in getattr(exc, "faultString", "") else error.EX_DATAERR else: if not self.options.quiet: if self.options.repr: # Pretty-print if requested, or it's a collection and not a scalar result = pformat(result) elif hasattr(result, "__iter__"): result = '\n'.join(i if isinstance(i, basestring) else pformat(i) for i in result) print(fmt.to_console(result))
python
def execute(self, proxy, method, args): """Execute given XMLRPC call.""" try: result = getattr(proxy, method)(raw_xml=self.options.xml, *tuple(args)) except xmlrpc.ERRORS as exc: self.LOG.error("While calling %s(%s): %s" % (method, ", ".join(repr(i) for i in args), exc)) self.return_code = error.EX_NOINPUT if "not find" in getattr(exc, "faultString", "") else error.EX_DATAERR else: if not self.options.quiet: if self.options.repr: # Pretty-print if requested, or it's a collection and not a scalar result = pformat(result) elif hasattr(result, "__iter__"): result = '\n'.join(i if isinstance(i, basestring) else pformat(i) for i in result) print(fmt.to_console(result))
[ "def", "execute", "(", "self", ",", "proxy", ",", "method", ",", "args", ")", ":", "try", ":", "result", "=", "getattr", "(", "proxy", ",", "method", ")", "(", "raw_xml", "=", "self", ".", "options", ".", "xml", ",", "*", "tuple", "(", "args", ")", ")", "except", "xmlrpc", ".", "ERRORS", "as", "exc", ":", "self", ".", "LOG", ".", "error", "(", "\"While calling %s(%s): %s\"", "%", "(", "method", ",", "\", \"", ".", "join", "(", "repr", "(", "i", ")", "for", "i", "in", "args", ")", ",", "exc", ")", ")", "self", ".", "return_code", "=", "error", ".", "EX_NOINPUT", "if", "\"not find\"", "in", "getattr", "(", "exc", ",", "\"faultString\"", ",", "\"\"", ")", "else", "error", ".", "EX_DATAERR", "else", ":", "if", "not", "self", ".", "options", ".", "quiet", ":", "if", "self", ".", "options", ".", "repr", ":", "# Pretty-print if requested, or it's a collection and not a scalar", "result", "=", "pformat", "(", "result", ")", "elif", "hasattr", "(", "result", ",", "\"__iter__\"", ")", ":", "result", "=", "'\\n'", ".", "join", "(", "i", "if", "isinstance", "(", "i", ",", "basestring", ")", "else", "pformat", "(", "i", ")", "for", "i", "in", "result", ")", "print", "(", "fmt", ".", "to_console", "(", "result", ")", ")" ]
Execute given XMLRPC call.
[ "Execute", "given", "XMLRPC", "call", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtxmlrpc.py#L149-L163
7,359
pyroscope/pyrocore
src/pyrocore/scripts/rtxmlrpc.py
RtorrentXmlRpc.do_repl
def do_repl(self): """REPL for rTorrent XMLRPC commands.""" from prompt_toolkit import prompt from prompt_toolkit.history import FileHistory from prompt_toolkit.auto_suggest import AutoSuggestFromHistory from prompt_toolkit.contrib.completers import WordCompleter self.options.quiet = False proxy = self.open() ps1 = proxy.session.name() + u'> ' words = ['help', 'stats', 'exit'] words += [x + '=' for x in proxy.system.listMethods()] history_file = os.path.join(config.config_dir, '.rtxmlrpc_history') while True: try: try: cmd = prompt(ps1, completer=WordCompleter(words), auto_suggest=AutoSuggestFromHistory(), history=FileHistory(history_file)) except KeyboardInterrupt: cmd = '' if not cmd: print("Enter '?' or 'help' for usage information, 'Ctrl-D' to exit.") if cmd in {'?', 'help'}: self.repl_usage() continue elif cmd in {'', 'stats'}: print(repr(proxy).split(None, 1)[1]) continue elif cmd in {'exit'}: raise EOFError() try: method, raw_args = cmd.split('=', 1) except ValueError: print("ERROR: '=' not found") continue raw_args = raw_args.split(',') args = self.cooked(raw_args) self.execute(proxy, method, args) except EOFError: print('Bye from {!r}'.format(proxy)) break
python
def do_repl(self): """REPL for rTorrent XMLRPC commands.""" from prompt_toolkit import prompt from prompt_toolkit.history import FileHistory from prompt_toolkit.auto_suggest import AutoSuggestFromHistory from prompt_toolkit.contrib.completers import WordCompleter self.options.quiet = False proxy = self.open() ps1 = proxy.session.name() + u'> ' words = ['help', 'stats', 'exit'] words += [x + '=' for x in proxy.system.listMethods()] history_file = os.path.join(config.config_dir, '.rtxmlrpc_history') while True: try: try: cmd = prompt(ps1, completer=WordCompleter(words), auto_suggest=AutoSuggestFromHistory(), history=FileHistory(history_file)) except KeyboardInterrupt: cmd = '' if not cmd: print("Enter '?' or 'help' for usage information, 'Ctrl-D' to exit.") if cmd in {'?', 'help'}: self.repl_usage() continue elif cmd in {'', 'stats'}: print(repr(proxy).split(None, 1)[1]) continue elif cmd in {'exit'}: raise EOFError() try: method, raw_args = cmd.split('=', 1) except ValueError: print("ERROR: '=' not found") continue raw_args = raw_args.split(',') args = self.cooked(raw_args) self.execute(proxy, method, args) except EOFError: print('Bye from {!r}'.format(proxy)) break
[ "def", "do_repl", "(", "self", ")", ":", "from", "prompt_toolkit", "import", "prompt", "from", "prompt_toolkit", ".", "history", "import", "FileHistory", "from", "prompt_toolkit", ".", "auto_suggest", "import", "AutoSuggestFromHistory", "from", "prompt_toolkit", ".", "contrib", ".", "completers", "import", "WordCompleter", "self", ".", "options", ".", "quiet", "=", "False", "proxy", "=", "self", ".", "open", "(", ")", "ps1", "=", "proxy", ".", "session", ".", "name", "(", ")", "+", "u'> '", "words", "=", "[", "'help'", ",", "'stats'", ",", "'exit'", "]", "words", "+=", "[", "x", "+", "'='", "for", "x", "in", "proxy", ".", "system", ".", "listMethods", "(", ")", "]", "history_file", "=", "os", ".", "path", ".", "join", "(", "config", ".", "config_dir", ",", "'.rtxmlrpc_history'", ")", "while", "True", ":", "try", ":", "try", ":", "cmd", "=", "prompt", "(", "ps1", ",", "completer", "=", "WordCompleter", "(", "words", ")", ",", "auto_suggest", "=", "AutoSuggestFromHistory", "(", ")", ",", "history", "=", "FileHistory", "(", "history_file", ")", ")", "except", "KeyboardInterrupt", ":", "cmd", "=", "''", "if", "not", "cmd", ":", "print", "(", "\"Enter '?' or 'help' for usage information, 'Ctrl-D' to exit.\"", ")", "if", "cmd", "in", "{", "'?'", ",", "'help'", "}", ":", "self", ".", "repl_usage", "(", ")", "continue", "elif", "cmd", "in", "{", "''", ",", "'stats'", "}", ":", "print", "(", "repr", "(", "proxy", ")", ".", "split", "(", "None", ",", "1", ")", "[", "1", "]", ")", "continue", "elif", "cmd", "in", "{", "'exit'", "}", ":", "raise", "EOFError", "(", ")", "try", ":", "method", ",", "raw_args", "=", "cmd", ".", "split", "(", "'='", ",", "1", ")", "except", "ValueError", ":", "print", "(", "\"ERROR: '=' not found\"", ")", "continue", "raw_args", "=", "raw_args", ".", "split", "(", "','", ")", "args", "=", "self", ".", "cooked", "(", "raw_args", ")", "self", ".", "execute", "(", "proxy", ",", "method", ",", "args", ")", "except", "EOFError", ":", "print", "(", "'Bye from {!r}'", ".", "format", "(", "proxy", ")", ")", "break" ]
REPL for rTorrent XMLRPC commands.
[ "REPL", "for", "rTorrent", "XMLRPC", "commands", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtxmlrpc.py#L179-L224
7,360
pyroscope/pyrocore
src/pyrocore/scripts/rtxmlrpc.py
RtorrentXmlRpc.do_import
def do_import(self): """Handle import files or streams passed with '-i'.""" tmp_import = None try: if self.args[0].startswith('@') and self.args[0] != '@-': import_file = os.path.expanduser(self.args[0][1:]) if not os.path.isfile(import_file): self.parser.error("File not found (or not a file): {}".format(import_file)) args = (xmlrpc.NOHASH, os.path.abspath(import_file)) else: script_text = '\n'.join(self.args + ['']) if script_text == '@-\n': script_text = sys.stdin.read() with tempfile.NamedTemporaryFile(suffix='.rc', prefix='rtxmlrpc-', delete=False) as handle: handle.write(script_text) tmp_import = handle.name args = (xmlrpc.NOHASH, tmp_import) self.execute(self.open(), 'import', args) finally: if tmp_import and os.path.exists(tmp_import): os.remove(tmp_import)
python
def do_import(self): """Handle import files or streams passed with '-i'.""" tmp_import = None try: if self.args[0].startswith('@') and self.args[0] != '@-': import_file = os.path.expanduser(self.args[0][1:]) if not os.path.isfile(import_file): self.parser.error("File not found (or not a file): {}".format(import_file)) args = (xmlrpc.NOHASH, os.path.abspath(import_file)) else: script_text = '\n'.join(self.args + ['']) if script_text == '@-\n': script_text = sys.stdin.read() with tempfile.NamedTemporaryFile(suffix='.rc', prefix='rtxmlrpc-', delete=False) as handle: handle.write(script_text) tmp_import = handle.name args = (xmlrpc.NOHASH, tmp_import) self.execute(self.open(), 'import', args) finally: if tmp_import and os.path.exists(tmp_import): os.remove(tmp_import)
[ "def", "do_import", "(", "self", ")", ":", "tmp_import", "=", "None", "try", ":", "if", "self", ".", "args", "[", "0", "]", ".", "startswith", "(", "'@'", ")", "and", "self", ".", "args", "[", "0", "]", "!=", "'@-'", ":", "import_file", "=", "os", ".", "path", ".", "expanduser", "(", "self", ".", "args", "[", "0", "]", "[", "1", ":", "]", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "import_file", ")", ":", "self", ".", "parser", ".", "error", "(", "\"File not found (or not a file): {}\"", ".", "format", "(", "import_file", ")", ")", "args", "=", "(", "xmlrpc", ".", "NOHASH", ",", "os", ".", "path", ".", "abspath", "(", "import_file", ")", ")", "else", ":", "script_text", "=", "'\\n'", ".", "join", "(", "self", ".", "args", "+", "[", "''", "]", ")", "if", "script_text", "==", "'@-\\n'", ":", "script_text", "=", "sys", ".", "stdin", ".", "read", "(", ")", "with", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "'.rc'", ",", "prefix", "=", "'rtxmlrpc-'", ",", "delete", "=", "False", ")", "as", "handle", ":", "handle", ".", "write", "(", "script_text", ")", "tmp_import", "=", "handle", ".", "name", "args", "=", "(", "xmlrpc", ".", "NOHASH", ",", "tmp_import", ")", "self", ".", "execute", "(", "self", ".", "open", "(", ")", ",", "'import'", ",", "args", ")", "finally", ":", "if", "tmp_import", "and", "os", ".", "path", ".", "exists", "(", "tmp_import", ")", ":", "os", ".", "remove", "(", "tmp_import", ")" ]
Handle import files or streams passed with '-i'.
[ "Handle", "import", "files", "or", "streams", "passed", "with", "-", "i", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtxmlrpc.py#L227-L249
7,361
pyroscope/pyrocore
src/pyrocore/scripts/rtxmlrpc.py
RtorrentXmlRpc.do_command
def do_command(self): """Call a single command with arguments.""" method = self.args[0] raw_args = self.args[1:] if '=' in method: if raw_args: self.parser.error("Please don't mix rTorrent and shell argument styles!") method, raw_args = method.split('=', 1) raw_args = raw_args.split(',') self.execute(self.open(), method, self.cooked(raw_args))
python
def do_command(self): """Call a single command with arguments.""" method = self.args[0] raw_args = self.args[1:] if '=' in method: if raw_args: self.parser.error("Please don't mix rTorrent and shell argument styles!") method, raw_args = method.split('=', 1) raw_args = raw_args.split(',') self.execute(self.open(), method, self.cooked(raw_args))
[ "def", "do_command", "(", "self", ")", ":", "method", "=", "self", ".", "args", "[", "0", "]", "raw_args", "=", "self", ".", "args", "[", "1", ":", "]", "if", "'='", "in", "method", ":", "if", "raw_args", ":", "self", ".", "parser", ".", "error", "(", "\"Please don't mix rTorrent and shell argument styles!\"", ")", "method", ",", "raw_args", "=", "method", ".", "split", "(", "'='", ",", "1", ")", "raw_args", "=", "raw_args", ".", "split", "(", "','", ")", "self", ".", "execute", "(", "self", ".", "open", "(", ")", ",", "method", ",", "self", ".", "cooked", "(", "raw_args", ")", ")" ]
Call a single command with arguments.
[ "Call", "a", "single", "command", "with", "arguments", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtxmlrpc.py#L252-L263
7,362
pyroscope/pyrocore
src/pyrocore/scripts/pyroadmin.py
AdminTool.download_resource
def download_resource(self, download_url, target, guard): """ Helper to download and install external resources. """ download_url = download_url.strip() if not os.path.isabs(target): target = os.path.join(config.config_dir, target) if os.path.exists(os.path.join(target, guard)): self.LOG.info("Already have '%s' in '%s'..." % (download_url, target)) return if not os.path.isdir(target): os.makedirs(target) self.LOG.info("Downloading '%s' to '%s'..." % (download_url, target)) with closing(urllib2.urlopen(download_url)) as url_handle: if download_url.endswith(".zip"): with closing(ZipFile(StringIO(url_handle.read()))) as zip_handle: # pylint: disable=no-member zip_handle.extractall(target) # pylint: disable=no-member else: with open(os.path.join(target, guard), "wb") as file_handle: shutil.copyfileobj(url_handle, file_handle)
python
def download_resource(self, download_url, target, guard): """ Helper to download and install external resources. """ download_url = download_url.strip() if not os.path.isabs(target): target = os.path.join(config.config_dir, target) if os.path.exists(os.path.join(target, guard)): self.LOG.info("Already have '%s' in '%s'..." % (download_url, target)) return if not os.path.isdir(target): os.makedirs(target) self.LOG.info("Downloading '%s' to '%s'..." % (download_url, target)) with closing(urllib2.urlopen(download_url)) as url_handle: if download_url.endswith(".zip"): with closing(ZipFile(StringIO(url_handle.read()))) as zip_handle: # pylint: disable=no-member zip_handle.extractall(target) # pylint: disable=no-member else: with open(os.path.join(target, guard), "wb") as file_handle: shutil.copyfileobj(url_handle, file_handle)
[ "def", "download_resource", "(", "self", ",", "download_url", ",", "target", ",", "guard", ")", ":", "download_url", "=", "download_url", ".", "strip", "(", ")", "if", "not", "os", ".", "path", ".", "isabs", "(", "target", ")", ":", "target", "=", "os", ".", "path", ".", "join", "(", "config", ".", "config_dir", ",", "target", ")", "if", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "target", ",", "guard", ")", ")", ":", "self", ".", "LOG", ".", "info", "(", "\"Already have '%s' in '%s'...\"", "%", "(", "download_url", ",", "target", ")", ")", "return", "if", "not", "os", ".", "path", ".", "isdir", "(", "target", ")", ":", "os", ".", "makedirs", "(", "target", ")", "self", ".", "LOG", ".", "info", "(", "\"Downloading '%s' to '%s'...\"", "%", "(", "download_url", ",", "target", ")", ")", "with", "closing", "(", "urllib2", ".", "urlopen", "(", "download_url", ")", ")", "as", "url_handle", ":", "if", "download_url", ".", "endswith", "(", "\".zip\"", ")", ":", "with", "closing", "(", "ZipFile", "(", "StringIO", "(", "url_handle", ".", "read", "(", ")", ")", ")", ")", "as", "zip_handle", ":", "# pylint: disable=no-member", "zip_handle", ".", "extractall", "(", "target", ")", "# pylint: disable=no-member", "else", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "target", ",", "guard", ")", ",", "\"wb\"", ")", "as", "file_handle", ":", "shutil", ".", "copyfileobj", "(", "url_handle", ",", "file_handle", ")" ]
Helper to download and install external resources.
[ "Helper", "to", "download", "and", "install", "external", "resources", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/pyroadmin.py#L81-L102
7,363
pyroscope/pyrocore
docs/examples/rt-down-stats.py
fmt_duration
def fmt_duration(secs): """Format a duration in seconds.""" return ' '.join(fmt.human_duration(secs, 0, precision=2, short=True).strip().split())
python
def fmt_duration(secs): """Format a duration in seconds.""" return ' '.join(fmt.human_duration(secs, 0, precision=2, short=True).strip().split())
[ "def", "fmt_duration", "(", "secs", ")", ":", "return", "' '", ".", "join", "(", "fmt", ".", "human_duration", "(", "secs", ",", "0", ",", "precision", "=", "2", ",", "short", "=", "True", ")", ".", "strip", "(", ")", ".", "split", "(", ")", ")" ]
Format a duration in seconds.
[ "Format", "a", "duration", "in", "seconds", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/docs/examples/rt-down-stats.py#L13-L15
7,364
pyroscope/pyrocore
docs/examples/rt-down-stats.py
disk_free
def disk_free(path): """Return free bytes on partition holding `path`.""" stats = os.statvfs(path) return stats.f_bavail * stats.f_frsize
python
def disk_free(path): """Return free bytes on partition holding `path`.""" stats = os.statvfs(path) return stats.f_bavail * stats.f_frsize
[ "def", "disk_free", "(", "path", ")", ":", "stats", "=", "os", ".", "statvfs", "(", "path", ")", "return", "stats", ".", "f_bavail", "*", "stats", ".", "f_frsize" ]
Return free bytes on partition holding `path`.
[ "Return", "free", "bytes", "on", "partition", "holding", "path", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/docs/examples/rt-down-stats.py#L18-L21
7,365
pyroscope/pyrocore
src/pyrocore/util/matching.py
truth
def truth(val, context): """ Convert truth value in "val" to a boolean. """ try: 0 + val except TypeError: lower_val = val.lower() if lower_val in TRUE: return True elif lower_val in FALSE: return False else: raise FilterError("Bad boolean value %r in %r (expected one of '%s', or '%s')" % ( val, context, "' '".join(TRUE), "' '".join(FALSE) )) else: return bool(val)
python
def truth(val, context): """ Convert truth value in "val" to a boolean. """ try: 0 + val except TypeError: lower_val = val.lower() if lower_val in TRUE: return True elif lower_val in FALSE: return False else: raise FilterError("Bad boolean value %r in %r (expected one of '%s', or '%s')" % ( val, context, "' '".join(TRUE), "' '".join(FALSE) )) else: return bool(val)
[ "def", "truth", "(", "val", ",", "context", ")", ":", "try", ":", "0", "+", "val", "except", "TypeError", ":", "lower_val", "=", "val", ".", "lower", "(", ")", "if", "lower_val", "in", "TRUE", ":", "return", "True", "elif", "lower_val", "in", "FALSE", ":", "return", "False", "else", ":", "raise", "FilterError", "(", "\"Bad boolean value %r in %r (expected one of '%s', or '%s')\"", "%", "(", "val", ",", "context", ",", "\"' '\"", ".", "join", "(", "TRUE", ")", ",", "\"' '\"", ".", "join", "(", "FALSE", ")", ")", ")", "else", ":", "return", "bool", "(", "val", ")" ]
Convert truth value in "val" to a boolean.
[ "Convert", "truth", "value", "in", "val", "to", "a", "boolean", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/matching.py#L35-L52
7,366
pyroscope/pyrocore
src/pyrocore/util/matching.py
_time_ym_delta
def _time_ym_delta(timestamp, delta, months): """ Helper to add a year or month delta to a timestamp. """ timestamp = list(time.localtime(timestamp)) timestamp[int(months)] += delta return time.mktime(timestamp)
python
def _time_ym_delta(timestamp, delta, months): """ Helper to add a year or month delta to a timestamp. """ timestamp = list(time.localtime(timestamp)) timestamp[int(months)] += delta return time.mktime(timestamp)
[ "def", "_time_ym_delta", "(", "timestamp", ",", "delta", ",", "months", ")", ":", "timestamp", "=", "list", "(", "time", ".", "localtime", "(", "timestamp", ")", ")", "timestamp", "[", "int", "(", "months", ")", "]", "+=", "delta", "return", "time", ".", "mktime", "(", "timestamp", ")" ]
Helper to add a year or month delta to a timestamp.
[ "Helper", "to", "add", "a", "year", "or", "month", "delta", "to", "a", "timestamp", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/matching.py#L55-L60
7,367
pyroscope/pyrocore
src/pyrocore/util/matching.py
unquote_pre_filter
def unquote_pre_filter(pre_filter, _regex=re.compile(r'[\\]+')): """ Unquote a pre-filter condition. """ if pre_filter.startswith('"') and pre_filter.endswith('"'): # Unquote outer level pre_filter = pre_filter[1:-1] pre_filter = _regex.sub(lambda x: x.group(0)[:len(x.group(0)) // 2], pre_filter) return pre_filter
python
def unquote_pre_filter(pre_filter, _regex=re.compile(r'[\\]+')): """ Unquote a pre-filter condition. """ if pre_filter.startswith('"') and pre_filter.endswith('"'): # Unquote outer level pre_filter = pre_filter[1:-1] pre_filter = _regex.sub(lambda x: x.group(0)[:len(x.group(0)) // 2], pre_filter) return pre_filter
[ "def", "unquote_pre_filter", "(", "pre_filter", ",", "_regex", "=", "re", ".", "compile", "(", "r'[\\\\]+'", ")", ")", ":", "if", "pre_filter", ".", "startswith", "(", "'\"'", ")", "and", "pre_filter", ".", "endswith", "(", "'\"'", ")", ":", "# Unquote outer level", "pre_filter", "=", "pre_filter", "[", "1", ":", "-", "1", "]", "pre_filter", "=", "_regex", ".", "sub", "(", "lambda", "x", ":", "x", ".", "group", "(", "0", ")", "[", ":", "len", "(", "x", ".", "group", "(", "0", ")", ")", "//", "2", "]", ",", "pre_filter", ")", "return", "pre_filter" ]
Unquote a pre-filter condition.
[ "Unquote", "a", "pre", "-", "filter", "condition", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/matching.py#L63-L71
7,368
pyroscope/pyrocore
src/pyrocore/util/matching.py
ConditionParser._create_filter
def _create_filter(self, condition): """ Create a filter object from a textual condition. """ # "Normal" comparison operators? comparison = re.match(r"^(%s)(<[>=]?|>=?|!=|~)(.*)$" % self.ident_re, condition) if comparison: name, comparison, values = comparison.groups() if values and values[0] in "+-": raise FilterError("Comparison operator cannot be followed by '%s' in '%s'" % (values[0], condition)) values = self.COMPARISON_OPS[comparison] % values else: # Split name from value(s) try: name, values = condition.split('=', 1) except ValueError: if self.default_field: name, values = self.default_field, condition else: raise FilterError("Field name missing in '%s' (expected '=')" % condition) # Try to find field definition field = self.lookup(name) if not field: raise FilterError("Unknown field %r in %r" % (name, condition)) if field.get("matcher") is None: raise FilterError("Field %r cannot be used as a filter" % (name,)) # Make filters from values (split on commas outside of /…/) filters = [] split_values = re.findall(r'(!?/[^/]*/|[^,]+)(?:,|$)', values) if values else [''] if not split_values: raise FilterError("Internal Error: Cannot split %r into match values" % (values,)) for value in split_values: wrapper = None if value.startswith('!'): wrapper = NegateFilter value = value[1:] field_matcher = field["matcher"](name, value) filters.append(wrapper(field_matcher) if wrapper else field_matcher) # Return filters return CompoundFilterAny(filters) if len(filters) > 1 else filters[0]
python
def _create_filter(self, condition): """ Create a filter object from a textual condition. """ # "Normal" comparison operators? comparison = re.match(r"^(%s)(<[>=]?|>=?|!=|~)(.*)$" % self.ident_re, condition) if comparison: name, comparison, values = comparison.groups() if values and values[0] in "+-": raise FilterError("Comparison operator cannot be followed by '%s' in '%s'" % (values[0], condition)) values = self.COMPARISON_OPS[comparison] % values else: # Split name from value(s) try: name, values = condition.split('=', 1) except ValueError: if self.default_field: name, values = self.default_field, condition else: raise FilterError("Field name missing in '%s' (expected '=')" % condition) # Try to find field definition field = self.lookup(name) if not field: raise FilterError("Unknown field %r in %r" % (name, condition)) if field.get("matcher") is None: raise FilterError("Field %r cannot be used as a filter" % (name,)) # Make filters from values (split on commas outside of /…/) filters = [] split_values = re.findall(r'(!?/[^/]*/|[^,]+)(?:,|$)', values) if values else [''] if not split_values: raise FilterError("Internal Error: Cannot split %r into match values" % (values,)) for value in split_values: wrapper = None if value.startswith('!'): wrapper = NegateFilter value = value[1:] field_matcher = field["matcher"](name, value) filters.append(wrapper(field_matcher) if wrapper else field_matcher) # Return filters return CompoundFilterAny(filters) if len(filters) > 1 else filters[0]
[ "def", "_create_filter", "(", "self", ",", "condition", ")", ":", "# \"Normal\" comparison operators?", "comparison", "=", "re", ".", "match", "(", "r\"^(%s)(<[>=]?|>=?|!=|~)(.*)$\"", "%", "self", ".", "ident_re", ",", "condition", ")", "if", "comparison", ":", "name", ",", "comparison", ",", "values", "=", "comparison", ".", "groups", "(", ")", "if", "values", "and", "values", "[", "0", "]", "in", "\"+-\"", ":", "raise", "FilterError", "(", "\"Comparison operator cannot be followed by '%s' in '%s'\"", "%", "(", "values", "[", "0", "]", ",", "condition", ")", ")", "values", "=", "self", ".", "COMPARISON_OPS", "[", "comparison", "]", "%", "values", "else", ":", "# Split name from value(s)", "try", ":", "name", ",", "values", "=", "condition", ".", "split", "(", "'='", ",", "1", ")", "except", "ValueError", ":", "if", "self", ".", "default_field", ":", "name", ",", "values", "=", "self", ".", "default_field", ",", "condition", "else", ":", "raise", "FilterError", "(", "\"Field name missing in '%s' (expected '=')\"", "%", "condition", ")", "# Try to find field definition", "field", "=", "self", ".", "lookup", "(", "name", ")", "if", "not", "field", ":", "raise", "FilterError", "(", "\"Unknown field %r in %r\"", "%", "(", "name", ",", "condition", ")", ")", "if", "field", ".", "get", "(", "\"matcher\"", ")", "is", "None", ":", "raise", "FilterError", "(", "\"Field %r cannot be used as a filter\"", "%", "(", "name", ",", ")", ")", "# Make filters from values (split on commas outside of /…/)", "filters", "=", "[", "]", "split_values", "=", "re", ".", "findall", "(", "r'(!?/[^/]*/|[^,]+)(?:,|$)'", ",", "values", ")", "if", "values", "else", "[", "''", "]", "if", "not", "split_values", ":", "raise", "FilterError", "(", "\"Internal Error: Cannot split %r into match values\"", "%", "(", "values", ",", ")", ")", "for", "value", "in", "split_values", ":", "wrapper", "=", "None", "if", "value", ".", "startswith", "(", "'!'", ")", ":", "wrapper", "=", "NegateFilter", "value", "=", "value", "[", "1", ":", "]", "field_matcher", "=", "field", "[", "\"matcher\"", "]", "(", "name", ",", "value", ")", "filters", ".", "append", "(", "wrapper", "(", "field_matcher", ")", "if", "wrapper", "else", "field_matcher", ")", "# Return filters", "return", "CompoundFilterAny", "(", "filters", ")", "if", "len", "(", "filters", ")", ">", "1", "else", "filters", "[", "0", "]" ]
Create a filter object from a textual condition.
[ "Create", "a", "filter", "object", "from", "a", "textual", "condition", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/matching.py#L736-L778
7,369
pyroscope/pyrocore
src/pyrocore/util/matching.py
ConditionParser.parse
def parse(self, conditions): """ Parse filter conditions. @param conditions: multiple conditions. @type conditions: list or str """ conditions_text = conditions try: conditions = shlex.split(fmt.to_utf8(conditions)) except AttributeError: # Not a string, assume parsed tree conditions_text = self._tree2str(conditions) # Empty list? if not conditions: raise FilterError("No conditions given at all!") # NOT *must* appear at the start of a group negate = conditions[:1] == ["NOT"] if negate: conditions = conditions[1:] if not conditions: raise FilterError("NOT must be followed by some conditions!") # Handle grouping if '[' in conditions: tree = [[]] for term in conditions: if term == '[': tree.append([]) # new grouping elif term == ']': subtree = tree.pop() if not tree: raise FilterError("Unbalanced brackets, too many closing ']' in condition %r" % (conditions_text,)) tree[-1].append(subtree) # append finished group to containing level else: tree[-1].append(term) # append to current level if len(tree) > 1: raise FilterError("Unbalanced brackets, too many open '[' in condition %r" % (conditions_text,)) conditions = tree[0] # Prepare root matcher conditions = list(conditions) matcher = CompoundFilterAll() if "OR" in conditions: root = CompoundFilterAny() root.append(matcher) else: root = matcher # Go through conditions and parse them for condition in conditions: if condition == "OR": # Leading OR, or OR OR in sequence? if not matcher: raise FilterError("Left-hand side of OR missing in %r!" % (conditions_text,)) # Start next run of AND conditions matcher = CompoundFilterAll() root.append(matcher) elif isinstance(condition, list): matcher.append(self.parse(condition)) else: matcher.append(self._create_filter(condition)) # Trailing OR? if not matcher: raise FilterError("Right-hand side of OR missing in %r!" % (conditions_text,)) return NegateFilter(root) if negate else root
python
def parse(self, conditions): """ Parse filter conditions. @param conditions: multiple conditions. @type conditions: list or str """ conditions_text = conditions try: conditions = shlex.split(fmt.to_utf8(conditions)) except AttributeError: # Not a string, assume parsed tree conditions_text = self._tree2str(conditions) # Empty list? if not conditions: raise FilterError("No conditions given at all!") # NOT *must* appear at the start of a group negate = conditions[:1] == ["NOT"] if negate: conditions = conditions[1:] if not conditions: raise FilterError("NOT must be followed by some conditions!") # Handle grouping if '[' in conditions: tree = [[]] for term in conditions: if term == '[': tree.append([]) # new grouping elif term == ']': subtree = tree.pop() if not tree: raise FilterError("Unbalanced brackets, too many closing ']' in condition %r" % (conditions_text,)) tree[-1].append(subtree) # append finished group to containing level else: tree[-1].append(term) # append to current level if len(tree) > 1: raise FilterError("Unbalanced brackets, too many open '[' in condition %r" % (conditions_text,)) conditions = tree[0] # Prepare root matcher conditions = list(conditions) matcher = CompoundFilterAll() if "OR" in conditions: root = CompoundFilterAny() root.append(matcher) else: root = matcher # Go through conditions and parse them for condition in conditions: if condition == "OR": # Leading OR, or OR OR in sequence? if not matcher: raise FilterError("Left-hand side of OR missing in %r!" % (conditions_text,)) # Start next run of AND conditions matcher = CompoundFilterAll() root.append(matcher) elif isinstance(condition, list): matcher.append(self.parse(condition)) else: matcher.append(self._create_filter(condition)) # Trailing OR? if not matcher: raise FilterError("Right-hand side of OR missing in %r!" % (conditions_text,)) return NegateFilter(root) if negate else root
[ "def", "parse", "(", "self", ",", "conditions", ")", ":", "conditions_text", "=", "conditions", "try", ":", "conditions", "=", "shlex", ".", "split", "(", "fmt", ".", "to_utf8", "(", "conditions", ")", ")", "except", "AttributeError", ":", "# Not a string, assume parsed tree", "conditions_text", "=", "self", ".", "_tree2str", "(", "conditions", ")", "# Empty list?", "if", "not", "conditions", ":", "raise", "FilterError", "(", "\"No conditions given at all!\"", ")", "# NOT *must* appear at the start of a group", "negate", "=", "conditions", "[", ":", "1", "]", "==", "[", "\"NOT\"", "]", "if", "negate", ":", "conditions", "=", "conditions", "[", "1", ":", "]", "if", "not", "conditions", ":", "raise", "FilterError", "(", "\"NOT must be followed by some conditions!\"", ")", "# Handle grouping", "if", "'['", "in", "conditions", ":", "tree", "=", "[", "[", "]", "]", "for", "term", "in", "conditions", ":", "if", "term", "==", "'['", ":", "tree", ".", "append", "(", "[", "]", ")", "# new grouping", "elif", "term", "==", "']'", ":", "subtree", "=", "tree", ".", "pop", "(", ")", "if", "not", "tree", ":", "raise", "FilterError", "(", "\"Unbalanced brackets, too many closing ']' in condition %r\"", "%", "(", "conditions_text", ",", ")", ")", "tree", "[", "-", "1", "]", ".", "append", "(", "subtree", ")", "# append finished group to containing level", "else", ":", "tree", "[", "-", "1", "]", ".", "append", "(", "term", ")", "# append to current level", "if", "len", "(", "tree", ")", ">", "1", ":", "raise", "FilterError", "(", "\"Unbalanced brackets, too many open '[' in condition %r\"", "%", "(", "conditions_text", ",", ")", ")", "conditions", "=", "tree", "[", "0", "]", "# Prepare root matcher", "conditions", "=", "list", "(", "conditions", ")", "matcher", "=", "CompoundFilterAll", "(", ")", "if", "\"OR\"", "in", "conditions", ":", "root", "=", "CompoundFilterAny", "(", ")", "root", ".", "append", "(", "matcher", ")", "else", ":", "root", "=", "matcher", "# Go through conditions and parse them", "for", "condition", "in", "conditions", ":", "if", "condition", "==", "\"OR\"", ":", "# Leading OR, or OR OR in sequence?", "if", "not", "matcher", ":", "raise", "FilterError", "(", "\"Left-hand side of OR missing in %r!\"", "%", "(", "conditions_text", ",", ")", ")", "# Start next run of AND conditions", "matcher", "=", "CompoundFilterAll", "(", ")", "root", ".", "append", "(", "matcher", ")", "elif", "isinstance", "(", "condition", ",", "list", ")", ":", "matcher", ".", "append", "(", "self", ".", "parse", "(", "condition", ")", ")", "else", ":", "matcher", ".", "append", "(", "self", ".", "_create_filter", "(", "condition", ")", ")", "# Trailing OR?", "if", "not", "matcher", ":", "raise", "FilterError", "(", "\"Right-hand side of OR missing in %r!\"", "%", "(", "conditions_text", ",", ")", ")", "return", "NegateFilter", "(", "root", ")", "if", "negate", "else", "root" ]
Parse filter conditions. @param conditions: multiple conditions. @type conditions: list or str
[ "Parse", "filter", "conditions", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/matching.py#L792-L862
7,370
pyroscope/pyrocore
src/pyrocore/torrent/jobs.py
_flux_engine_data
def _flux_engine_data(engine): """ Return rTorrent data set for pushing to InfluxDB. """ data = stats.engine_data(engine) # Make it flat data["up_rate"] = data["upload"][0] data["up_limit"] = data["upload"][1] data["down_rate"] = data["download"][0] data["down_limit"] = data["download"][1] data["version"] = data["versions"][0] views = data["views"] del data["upload"] del data["download"] del data["versions"] del data["views"] return data, views
python
def _flux_engine_data(engine): """ Return rTorrent data set for pushing to InfluxDB. """ data = stats.engine_data(engine) # Make it flat data["up_rate"] = data["upload"][0] data["up_limit"] = data["upload"][1] data["down_rate"] = data["download"][0] data["down_limit"] = data["download"][1] data["version"] = data["versions"][0] views = data["views"] del data["upload"] del data["download"] del data["versions"] del data["views"] return data, views
[ "def", "_flux_engine_data", "(", "engine", ")", ":", "data", "=", "stats", ".", "engine_data", "(", "engine", ")", "# Make it flat", "data", "[", "\"up_rate\"", "]", "=", "data", "[", "\"upload\"", "]", "[", "0", "]", "data", "[", "\"up_limit\"", "]", "=", "data", "[", "\"upload\"", "]", "[", "1", "]", "data", "[", "\"down_rate\"", "]", "=", "data", "[", "\"download\"", "]", "[", "0", "]", "data", "[", "\"down_limit\"", "]", "=", "data", "[", "\"download\"", "]", "[", "1", "]", "data", "[", "\"version\"", "]", "=", "data", "[", "\"versions\"", "]", "[", "0", "]", "views", "=", "data", "[", "\"views\"", "]", "del", "data", "[", "\"upload\"", "]", "del", "data", "[", "\"download\"", "]", "del", "data", "[", "\"versions\"", "]", "del", "data", "[", "\"views\"", "]", "return", "data", ",", "views" ]
Return rTorrent data set for pushing to InfluxDB.
[ "Return", "rTorrent", "data", "set", "for", "pushing", "to", "InfluxDB", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/jobs.py#L35-L53
7,371
pyroscope/pyrocore
src/pyrocore/torrent/jobs.py
EngineStats.run
def run(self): """ Statistics logger job callback. """ try: proxy = config_ini.engine.open() self.LOG.info("Stats for %s - up %s, %s" % ( config_ini.engine.engine_id, fmt.human_duration(proxy.system.time() - config_ini.engine.startup, 0, 2, True).strip(), proxy )) except (error.LoggableError, xmlrpc.ERRORS), exc: self.LOG.warn(str(exc))
python
def run(self): """ Statistics logger job callback. """ try: proxy = config_ini.engine.open() self.LOG.info("Stats for %s - up %s, %s" % ( config_ini.engine.engine_id, fmt.human_duration(proxy.system.time() - config_ini.engine.startup, 0, 2, True).strip(), proxy )) except (error.LoggableError, xmlrpc.ERRORS), exc: self.LOG.warn(str(exc))
[ "def", "run", "(", "self", ")", ":", "try", ":", "proxy", "=", "config_ini", ".", "engine", ".", "open", "(", ")", "self", ".", "LOG", ".", "info", "(", "\"Stats for %s - up %s, %s\"", "%", "(", "config_ini", ".", "engine", ".", "engine_id", ",", "fmt", ".", "human_duration", "(", "proxy", ".", "system", ".", "time", "(", ")", "-", "config_ini", ".", "engine", ".", "startup", ",", "0", ",", "2", ",", "True", ")", ".", "strip", "(", ")", ",", "proxy", ")", ")", "except", "(", "error", ".", "LoggableError", ",", "xmlrpc", ".", "ERRORS", ")", ",", "exc", ":", "self", ".", "LOG", ".", "warn", "(", "str", "(", "exc", ")", ")" ]
Statistics logger job callback.
[ "Statistics", "logger", "job", "callback", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/jobs.py#L68-L79
7,372
pyroscope/pyrocore
src/pyrocore/torrent/jobs.py
InfluxDBStats._influxdb_url
def _influxdb_url(self): """ Return REST API URL to access time series. """ url = "{0}/db/{1}/series".format(self.influxdb.url.rstrip('/'), self.config.dbname) if self.influxdb.user and self.influxdb.password: url += "?u={0}&p={1}".format(self.influxdb.user, self.influxdb.password) return url
python
def _influxdb_url(self): """ Return REST API URL to access time series. """ url = "{0}/db/{1}/series".format(self.influxdb.url.rstrip('/'), self.config.dbname) if self.influxdb.user and self.influxdb.password: url += "?u={0}&p={1}".format(self.influxdb.user, self.influxdb.password) return url
[ "def", "_influxdb_url", "(", "self", ")", ":", "url", "=", "\"{0}/db/{1}/series\"", ".", "format", "(", "self", ".", "influxdb", ".", "url", ".", "rstrip", "(", "'/'", ")", ",", "self", ".", "config", ".", "dbname", ")", "if", "self", ".", "influxdb", ".", "user", "and", "self", ".", "influxdb", ".", "password", ":", "url", "+=", "\"?u={0}&p={1}\"", ".", "format", "(", "self", ".", "influxdb", ".", "user", ",", "self", ".", "influxdb", ".", "password", ")", "return", "url" ]
Return REST API URL to access time series.
[ "Return", "REST", "API", "URL", "to", "access", "time", "series", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/jobs.py#L99-L107
7,373
pyroscope/pyrocore
src/pyrocore/torrent/jobs.py
InfluxDBStats._push_data
def _push_data(self): """ Push stats data to InfluxDB. """ if not (self.config.series or self.config.series_host): self.LOG.info("Misconfigured InfluxDB job, neither 'series' nor 'series_host' is set!") return # Assemble data fluxdata = [] if self.config.series: try: config_ini.engine.open() data, views = _flux_engine_data(config_ini.engine) fluxdata.append(dict( name=self.config.series, columns=data.keys(), points=[data.values()] )) fluxdata.append(dict( name=self.config.series + '_views', columns=views.keys(), points=[views.values()] )) except (error.LoggableError, xmlrpc.ERRORS), exc: self.LOG.warn("InfluxDB stats: {0}".format(exc)) # if self.config.series_host: # fluxdata.append(dict( # name = self.config.series_host, # columns = .keys(), # points = [.values()] # )) if not fluxdata: self.LOG.debug("InfluxDB stats: no data (previous errors?)") return # Encode into InfluxDB data packet fluxurl = self._influxdb_url() fluxjson = json.dumps(fluxdata) self.LOG.debug("POST to {0} with {1}".format(fluxurl.split('?')[0], fluxjson)) # Push it! try: # TODO: Use a session requests.post(fluxurl, data=fluxjson, timeout=self.influxdb.timeout) except RequestException, exc: self.LOG.info("InfluxDB POST error: {0}".format(exc))
python
def _push_data(self): """ Push stats data to InfluxDB. """ if not (self.config.series or self.config.series_host): self.LOG.info("Misconfigured InfluxDB job, neither 'series' nor 'series_host' is set!") return # Assemble data fluxdata = [] if self.config.series: try: config_ini.engine.open() data, views = _flux_engine_data(config_ini.engine) fluxdata.append(dict( name=self.config.series, columns=data.keys(), points=[data.values()] )) fluxdata.append(dict( name=self.config.series + '_views', columns=views.keys(), points=[views.values()] )) except (error.LoggableError, xmlrpc.ERRORS), exc: self.LOG.warn("InfluxDB stats: {0}".format(exc)) # if self.config.series_host: # fluxdata.append(dict( # name = self.config.series_host, # columns = .keys(), # points = [.values()] # )) if not fluxdata: self.LOG.debug("InfluxDB stats: no data (previous errors?)") return # Encode into InfluxDB data packet fluxurl = self._influxdb_url() fluxjson = json.dumps(fluxdata) self.LOG.debug("POST to {0} with {1}".format(fluxurl.split('?')[0], fluxjson)) # Push it! try: # TODO: Use a session requests.post(fluxurl, data=fluxjson, timeout=self.influxdb.timeout) except RequestException, exc: self.LOG.info("InfluxDB POST error: {0}".format(exc))
[ "def", "_push_data", "(", "self", ")", ":", "if", "not", "(", "self", ".", "config", ".", "series", "or", "self", ".", "config", ".", "series_host", ")", ":", "self", ".", "LOG", ".", "info", "(", "\"Misconfigured InfluxDB job, neither 'series' nor 'series_host' is set!\"", ")", "return", "# Assemble data", "fluxdata", "=", "[", "]", "if", "self", ".", "config", ".", "series", ":", "try", ":", "config_ini", ".", "engine", ".", "open", "(", ")", "data", ",", "views", "=", "_flux_engine_data", "(", "config_ini", ".", "engine", ")", "fluxdata", ".", "append", "(", "dict", "(", "name", "=", "self", ".", "config", ".", "series", ",", "columns", "=", "data", ".", "keys", "(", ")", ",", "points", "=", "[", "data", ".", "values", "(", ")", "]", ")", ")", "fluxdata", ".", "append", "(", "dict", "(", "name", "=", "self", ".", "config", ".", "series", "+", "'_views'", ",", "columns", "=", "views", ".", "keys", "(", ")", ",", "points", "=", "[", "views", ".", "values", "(", ")", "]", ")", ")", "except", "(", "error", ".", "LoggableError", ",", "xmlrpc", ".", "ERRORS", ")", ",", "exc", ":", "self", ".", "LOG", ".", "warn", "(", "\"InfluxDB stats: {0}\"", ".", "format", "(", "exc", ")", ")", "# if self.config.series_host:", "# fluxdata.append(dict(", "# name = self.config.series_host,", "# columns = .keys(),", "# points = [.values()]", "# ))", "if", "not", "fluxdata", ":", "self", ".", "LOG", ".", "debug", "(", "\"InfluxDB stats: no data (previous errors?)\"", ")", "return", "# Encode into InfluxDB data packet", "fluxurl", "=", "self", ".", "_influxdb_url", "(", ")", "fluxjson", "=", "json", ".", "dumps", "(", "fluxdata", ")", "self", ".", "LOG", ".", "debug", "(", "\"POST to {0} with {1}\"", ".", "format", "(", "fluxurl", ".", "split", "(", "'?'", ")", "[", "0", "]", ",", "fluxjson", ")", ")", "# Push it!", "try", ":", "# TODO: Use a session", "requests", ".", "post", "(", "fluxurl", ",", "data", "=", "fluxjson", ",", "timeout", "=", "self", ".", "influxdb", ".", "timeout", ")", "except", "RequestException", ",", "exc", ":", "self", ".", "LOG", ".", "info", "(", "\"InfluxDB POST error: {0}\"", ".", "format", "(", "exc", ")", ")" ]
Push stats data to InfluxDB.
[ "Push", "stats", "data", "to", "InfluxDB", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/jobs.py#L110-L158
7,374
pyroscope/pyrocore
src/pyrocore/torrent/filter.py
FilterJobBase.run
def run(self): """ Filter job callback. """ from pyrocore import config try: config.engine.open() # TODO: select view into items items = [] self.run_filter(items) except (error.LoggableError, xmlrpc.ERRORS) as exc: self.LOG.warn(str(exc))
python
def run(self): """ Filter job callback. """ from pyrocore import config try: config.engine.open() # TODO: select view into items items = [] self.run_filter(items) except (error.LoggableError, xmlrpc.ERRORS) as exc: self.LOG.warn(str(exc))
[ "def", "run", "(", "self", ")", ":", "from", "pyrocore", "import", "config", "try", ":", "config", ".", "engine", ".", "open", "(", ")", "# TODO: select view into items", "items", "=", "[", "]", "self", ".", "run_filter", "(", "items", ")", "except", "(", "error", ".", "LoggableError", ",", "xmlrpc", ".", "ERRORS", ")", "as", "exc", ":", "self", ".", "LOG", ".", "warn", "(", "str", "(", "exc", ")", ")" ]
Filter job callback.
[ "Filter", "job", "callback", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/filter.py#L38-L49
7,375
pyroscope/pyrocore
src/pyrocore/scripts/chtor.py
replace_fields
def replace_fields(meta, patterns): """ Replace patterns in fields. """ for pattern in patterns: try: field, regex, subst, _ = pattern.split(pattern[-1]) # TODO: Allow numerical indices, and "+" for append namespace = meta keypath = [i.replace('\0', '.') for i in field.replace('..', '\0').split('.')] for key in keypath[:-1]: namespace = namespace[key] namespace[keypath[-1]] = re.sub(regex, subst, namespace[keypath[-1]]) except (KeyError, IndexError, TypeError, ValueError) as exc: raise error.UserError("Bad substitution '%s' (%s)!" % (pattern, exc)) return meta
python
def replace_fields(meta, patterns): """ Replace patterns in fields. """ for pattern in patterns: try: field, regex, subst, _ = pattern.split(pattern[-1]) # TODO: Allow numerical indices, and "+" for append namespace = meta keypath = [i.replace('\0', '.') for i in field.replace('..', '\0').split('.')] for key in keypath[:-1]: namespace = namespace[key] namespace[keypath[-1]] = re.sub(regex, subst, namespace[keypath[-1]]) except (KeyError, IndexError, TypeError, ValueError) as exc: raise error.UserError("Bad substitution '%s' (%s)!" % (pattern, exc)) return meta
[ "def", "replace_fields", "(", "meta", ",", "patterns", ")", ":", "for", "pattern", "in", "patterns", ":", "try", ":", "field", ",", "regex", ",", "subst", ",", "_", "=", "pattern", ".", "split", "(", "pattern", "[", "-", "1", "]", ")", "# TODO: Allow numerical indices, and \"+\" for append", "namespace", "=", "meta", "keypath", "=", "[", "i", ".", "replace", "(", "'\\0'", ",", "'.'", ")", "for", "i", "in", "field", ".", "replace", "(", "'..'", ",", "'\\0'", ")", ".", "split", "(", "'.'", ")", "]", "for", "key", "in", "keypath", "[", ":", "-", "1", "]", ":", "namespace", "=", "namespace", "[", "key", "]", "namespace", "[", "keypath", "[", "-", "1", "]", "]", "=", "re", ".", "sub", "(", "regex", ",", "subst", ",", "namespace", "[", "keypath", "[", "-", "1", "]", "]", ")", "except", "(", "KeyError", ",", "IndexError", ",", "TypeError", ",", "ValueError", ")", "as", "exc", ":", "raise", "error", ".", "UserError", "(", "\"Bad substitution '%s' (%s)!\"", "%", "(", "pattern", ",", "exc", ")", ")", "return", "meta" ]
Replace patterns in fields.
[ "Replace", "patterns", "in", "fields", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/chtor.py#L34-L51
7,376
pyroscope/pyrocore
src/pyrocore/__init__.py
connect
def connect(config_dir=None, optional_config_files=None, cron_cfg="cron"): """ Initialize everything for interactive use. Returns a ready-to-use RtorrentEngine object. """ from pyrocore.scripts.base import ScriptBase from pyrocore.util import load_config ScriptBase.setup(cron_cfg=cron_cfg) load_config.ConfigLoader(config_dir).load(optional_config_files or []) from pyrocore import config config.engine.open() return config.engine
python
def connect(config_dir=None, optional_config_files=None, cron_cfg="cron"): """ Initialize everything for interactive use. Returns a ready-to-use RtorrentEngine object. """ from pyrocore.scripts.base import ScriptBase from pyrocore.util import load_config ScriptBase.setup(cron_cfg=cron_cfg) load_config.ConfigLoader(config_dir).load(optional_config_files or []) from pyrocore import config config.engine.open() return config.engine
[ "def", "connect", "(", "config_dir", "=", "None", ",", "optional_config_files", "=", "None", ",", "cron_cfg", "=", "\"cron\"", ")", ":", "from", "pyrocore", ".", "scripts", ".", "base", "import", "ScriptBase", "from", "pyrocore", ".", "util", "import", "load_config", "ScriptBase", ".", "setup", "(", "cron_cfg", "=", "cron_cfg", ")", "load_config", ".", "ConfigLoader", "(", "config_dir", ")", ".", "load", "(", "optional_config_files", "or", "[", "]", ")", "from", "pyrocore", "import", "config", "config", ".", "engine", ".", "open", "(", ")", "return", "config", ".", "engine" ]
Initialize everything for interactive use. Returns a ready-to-use RtorrentEngine object.
[ "Initialize", "everything", "for", "interactive", "use", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/__init__.py#L23-L36
7,377
pyroscope/pyrocore
src/pyrocore/scripts/base.py
ScriptBase.setup
def setup(cls, cron_cfg="cron"): """ Set up the runtime environment. """ random.seed() logging_cfg = cls.LOGGING_CFG if "%s" in logging_cfg: logging_cfg = logging_cfg % (cron_cfg if "--cron" in sys.argv[1:] else "scripts",) logging_cfg = os.path.expanduser(logging_cfg) if os.path.exists(logging_cfg): logging.HERE = os.path.dirname(logging_cfg) logging.config.fileConfig(logging_cfg) else: logging.basicConfig(level=logging.INFO) logging.getLogger().debug("Logging config read from '%s'" % logging_cfg)
python
def setup(cls, cron_cfg="cron"): """ Set up the runtime environment. """ random.seed() logging_cfg = cls.LOGGING_CFG if "%s" in logging_cfg: logging_cfg = logging_cfg % (cron_cfg if "--cron" in sys.argv[1:] else "scripts",) logging_cfg = os.path.expanduser(logging_cfg) if os.path.exists(logging_cfg): logging.HERE = os.path.dirname(logging_cfg) logging.config.fileConfig(logging_cfg) else: logging.basicConfig(level=logging.INFO) logging.getLogger().debug("Logging config read from '%s'" % logging_cfg)
[ "def", "setup", "(", "cls", ",", "cron_cfg", "=", "\"cron\"", ")", ":", "random", ".", "seed", "(", ")", "logging_cfg", "=", "cls", ".", "LOGGING_CFG", "if", "\"%s\"", "in", "logging_cfg", ":", "logging_cfg", "=", "logging_cfg", "%", "(", "cron_cfg", "if", "\"--cron\"", "in", "sys", ".", "argv", "[", "1", ":", "]", "else", "\"scripts\"", ",", ")", "logging_cfg", "=", "os", ".", "path", ".", "expanduser", "(", "logging_cfg", ")", "if", "os", ".", "path", ".", "exists", "(", "logging_cfg", ")", ":", "logging", ".", "HERE", "=", "os", ".", "path", ".", "dirname", "(", "logging_cfg", ")", "logging", ".", "config", ".", "fileConfig", "(", "logging_cfg", ")", "else", ":", "logging", ".", "basicConfig", "(", "level", "=", "logging", ".", "INFO", ")", "logging", ".", "getLogger", "(", ")", ".", "debug", "(", "\"Logging config read from '%s'\"", "%", "logging_cfg", ")" ]
Set up the runtime environment.
[ "Set", "up", "the", "runtime", "environment", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/base.py#L61-L76
7,378
pyroscope/pyrocore
src/pyrocore/scripts/base.py
ScriptBase._get_pkg_meta
def _get_pkg_meta(self): """ Try to find package metadata. """ logger = logging.getLogger('pyrocore.scripts.base.version_info') pkg_info = None warnings = [] for info_ext, info_name in (('.dist-info', 'METADATA'), ('.egg-info', 'PKG-INFO')): try: # Development setup pkg_path = os.path.join( __file__.split(__name__.replace('.', os.sep))[0], # containing path __name__.split(".")[0] # package name ) if os.path.exists(pkg_path + info_ext): pkg_path += info_ext else: globbed_paths = glob.glob(pkg_path + "-*-py%d.%d" % sys.version_info[:2] + info_ext) if len(globbed_paths) == 1: pkg_path = globbed_paths[0] elif globbed_paths: warnings.append("Found {} release-specific candidate versions in *{}" .format(len(globbed_paths), info_ext)) pkg_path = None else: globbed_paths = glob.glob(pkg_path + "-*" + info_ext) if len(globbed_paths) == 1: pkg_path = globbed_paths[0] else: warnings.append("Found {} candidate versions in *{}" .format(len(globbed_paths), info_ext)) pkg_path = None if pkg_path: with open(os.path.join(pkg_path, info_name)) as handle: pkg_info = handle.read() break except IOError: continue if not pkg_info: logger.warn("Software version cannot be determined! ({})".format(', '.join(warnings))) return pkg_info or "Version: 0.0.0\n"
python
def _get_pkg_meta(self): """ Try to find package metadata. """ logger = logging.getLogger('pyrocore.scripts.base.version_info') pkg_info = None warnings = [] for info_ext, info_name in (('.dist-info', 'METADATA'), ('.egg-info', 'PKG-INFO')): try: # Development setup pkg_path = os.path.join( __file__.split(__name__.replace('.', os.sep))[0], # containing path __name__.split(".")[0] # package name ) if os.path.exists(pkg_path + info_ext): pkg_path += info_ext else: globbed_paths = glob.glob(pkg_path + "-*-py%d.%d" % sys.version_info[:2] + info_ext) if len(globbed_paths) == 1: pkg_path = globbed_paths[0] elif globbed_paths: warnings.append("Found {} release-specific candidate versions in *{}" .format(len(globbed_paths), info_ext)) pkg_path = None else: globbed_paths = glob.glob(pkg_path + "-*" + info_ext) if len(globbed_paths) == 1: pkg_path = globbed_paths[0] else: warnings.append("Found {} candidate versions in *{}" .format(len(globbed_paths), info_ext)) pkg_path = None if pkg_path: with open(os.path.join(pkg_path, info_name)) as handle: pkg_info = handle.read() break except IOError: continue if not pkg_info: logger.warn("Software version cannot be determined! ({})".format(', '.join(warnings))) return pkg_info or "Version: 0.0.0\n"
[ "def", "_get_pkg_meta", "(", "self", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "'pyrocore.scripts.base.version_info'", ")", "pkg_info", "=", "None", "warnings", "=", "[", "]", "for", "info_ext", ",", "info_name", "in", "(", "(", "'.dist-info'", ",", "'METADATA'", ")", ",", "(", "'.egg-info'", ",", "'PKG-INFO'", ")", ")", ":", "try", ":", "# Development setup", "pkg_path", "=", "os", ".", "path", ".", "join", "(", "__file__", ".", "split", "(", "__name__", ".", "replace", "(", "'.'", ",", "os", ".", "sep", ")", ")", "[", "0", "]", ",", "# containing path", "__name__", ".", "split", "(", "\".\"", ")", "[", "0", "]", "# package name", ")", "if", "os", ".", "path", ".", "exists", "(", "pkg_path", "+", "info_ext", ")", ":", "pkg_path", "+=", "info_ext", "else", ":", "globbed_paths", "=", "glob", ".", "glob", "(", "pkg_path", "+", "\"-*-py%d.%d\"", "%", "sys", ".", "version_info", "[", ":", "2", "]", "+", "info_ext", ")", "if", "len", "(", "globbed_paths", ")", "==", "1", ":", "pkg_path", "=", "globbed_paths", "[", "0", "]", "elif", "globbed_paths", ":", "warnings", ".", "append", "(", "\"Found {} release-specific candidate versions in *{}\"", ".", "format", "(", "len", "(", "globbed_paths", ")", ",", "info_ext", ")", ")", "pkg_path", "=", "None", "else", ":", "globbed_paths", "=", "glob", ".", "glob", "(", "pkg_path", "+", "\"-*\"", "+", "info_ext", ")", "if", "len", "(", "globbed_paths", ")", "==", "1", ":", "pkg_path", "=", "globbed_paths", "[", "0", "]", "else", ":", "warnings", ".", "append", "(", "\"Found {} candidate versions in *{}\"", ".", "format", "(", "len", "(", "globbed_paths", ")", ",", "info_ext", ")", ")", "pkg_path", "=", "None", "if", "pkg_path", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "pkg_path", ",", "info_name", ")", ")", "as", "handle", ":", "pkg_info", "=", "handle", ".", "read", "(", ")", "break", "except", "IOError", ":", "continue", "if", "not", "pkg_info", ":", "logger", ".", "warn", "(", "\"Software version cannot be determined! ({})\"", ".", "format", "(", "', '", ".", "join", "(", "warnings", ")", ")", ")", "return", "pkg_info", "or", "\"Version: 0.0.0\\n\"" ]
Try to find package metadata.
[ "Try", "to", "find", "package", "metadata", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/base.py#L79-L121
7,379
pyroscope/pyrocore
src/pyrocore/scripts/base.py
ScriptBase.add_bool_option
def add_bool_option(self, *args, **kwargs): """ Add a boolean option. @keyword help: Option description. """ dest = [o for o in args if o.startswith("--")][0].replace("--", "").replace("-", "_") self.parser.add_option(dest=dest, action="store_true", default=False, help=kwargs['help'], *args)
python
def add_bool_option(self, *args, **kwargs): """ Add a boolean option. @keyword help: Option description. """ dest = [o for o in args if o.startswith("--")][0].replace("--", "").replace("-", "_") self.parser.add_option(dest=dest, action="store_true", default=False, help=kwargs['help'], *args)
[ "def", "add_bool_option", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "dest", "=", "[", "o", "for", "o", "in", "args", "if", "o", ".", "startswith", "(", "\"--\"", ")", "]", "[", "0", "]", ".", "replace", "(", "\"--\"", ",", "\"\"", ")", ".", "replace", "(", "\"-\"", ",", "\"_\"", ")", "self", ".", "parser", ".", "add_option", "(", "dest", "=", "dest", ",", "action", "=", "\"store_true\"", ",", "default", "=", "False", ",", "help", "=", "kwargs", "[", "'help'", "]", ",", "*", "args", ")" ]
Add a boolean option. @keyword help: Option description.
[ "Add", "a", "boolean", "option", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/base.py#L162-L169
7,380
pyroscope/pyrocore
src/pyrocore/scripts/base.py
ScriptBase.add_value_option
def add_value_option(self, *args, **kwargs): """ Add a value option. @keyword dest: Destination attribute, derived from long option name if not given. @keyword action: How to handle the option. @keyword help: Option description. @keyword default: If given, add this value to the help string. """ kwargs['metavar'] = args[-1] if 'dest' not in kwargs: kwargs['dest'] = [o for o in args if o.startswith("--")][0].replace("--", "").replace("-", "_") if 'default' in kwargs and kwargs['default']: kwargs['help'] += " [%s]" % kwargs['default'] self.parser.add_option(*args[:-1], **kwargs)
python
def add_value_option(self, *args, **kwargs): """ Add a value option. @keyword dest: Destination attribute, derived from long option name if not given. @keyword action: How to handle the option. @keyword help: Option description. @keyword default: If given, add this value to the help string. """ kwargs['metavar'] = args[-1] if 'dest' not in kwargs: kwargs['dest'] = [o for o in args if o.startswith("--")][0].replace("--", "").replace("-", "_") if 'default' in kwargs and kwargs['default']: kwargs['help'] += " [%s]" % kwargs['default'] self.parser.add_option(*args[:-1], **kwargs)
[ "def", "add_value_option", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "'metavar'", "]", "=", "args", "[", "-", "1", "]", "if", "'dest'", "not", "in", "kwargs", ":", "kwargs", "[", "'dest'", "]", "=", "[", "o", "for", "o", "in", "args", "if", "o", ".", "startswith", "(", "\"--\"", ")", "]", "[", "0", "]", ".", "replace", "(", "\"--\"", ",", "\"\"", ")", ".", "replace", "(", "\"-\"", ",", "\"_\"", ")", "if", "'default'", "in", "kwargs", "and", "kwargs", "[", "'default'", "]", ":", "kwargs", "[", "'help'", "]", "+=", "\" [%s]\"", "%", "kwargs", "[", "'default'", "]", "self", ".", "parser", ".", "add_option", "(", "*", "args", "[", ":", "-", "1", "]", ",", "*", "*", "kwargs", ")" ]
Add a value option. @keyword dest: Destination attribute, derived from long option name if not given. @keyword action: How to handle the option. @keyword help: Option description. @keyword default: If given, add this value to the help string.
[ "Add", "a", "value", "option", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/base.py#L172-L185
7,381
pyroscope/pyrocore
src/pyrocore/scripts/base.py
ScriptBase.handle_completion
def handle_completion(self): """ Handle shell completion stuff. """ # We don't want these in the help, so handle them explicitely if len(sys.argv) > 1 and sys.argv[1].startswith("--help-completion-"): handler = getattr(self, sys.argv[1][2:].replace('-', '_'), None) if handler: print '\n'.join(sorted(handler())) self.STD_LOG_LEVEL = logging.DEBUG sys.exit(error.EX_OK)
python
def handle_completion(self): """ Handle shell completion stuff. """ # We don't want these in the help, so handle them explicitely if len(sys.argv) > 1 and sys.argv[1].startswith("--help-completion-"): handler = getattr(self, sys.argv[1][2:].replace('-', '_'), None) if handler: print '\n'.join(sorted(handler())) self.STD_LOG_LEVEL = logging.DEBUG sys.exit(error.EX_OK)
[ "def", "handle_completion", "(", "self", ")", ":", "# We don't want these in the help, so handle them explicitely", "if", "len", "(", "sys", ".", "argv", ")", ">", "1", "and", "sys", ".", "argv", "[", "1", "]", ".", "startswith", "(", "\"--help-completion-\"", ")", ":", "handler", "=", "getattr", "(", "self", ",", "sys", ".", "argv", "[", "1", "]", "[", "2", ":", "]", ".", "replace", "(", "'-'", ",", "'_'", ")", ",", "None", ")", "if", "handler", ":", "print", "'\\n'", ".", "join", "(", "sorted", "(", "handler", "(", ")", ")", ")", "self", ".", "STD_LOG_LEVEL", "=", "logging", ".", "DEBUG", "sys", ".", "exit", "(", "error", ".", "EX_OK", ")" ]
Handle shell completion stuff.
[ "Handle", "shell", "completion", "stuff", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/base.py#L224-L233
7,382
pyroscope/pyrocore
src/pyrocore/scripts/base.py
ScriptBase.help_completion_options
def help_completion_options(self): """ Return options of this command. """ for opt in self.parser.option_list: for lopt in opt._long_opts: yield lopt
python
def help_completion_options(self): """ Return options of this command. """ for opt in self.parser.option_list: for lopt in opt._long_opts: yield lopt
[ "def", "help_completion_options", "(", "self", ")", ":", "for", "opt", "in", "self", ".", "parser", ".", "option_list", ":", "for", "lopt", "in", "opt", ".", "_long_opts", ":", "yield", "lopt" ]
Return options of this command.
[ "Return", "options", "of", "this", "command", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/base.py#L236-L241
7,383
pyroscope/pyrocore
src/pyrocore/scripts/base.py
ScriptBase.fatal
def fatal(self, msg, exc=None): """ Exit on a fatal error. """ if exc is not None: self.LOG.fatal("%s (%s)" % (msg, exc)) if self.options.debug: return # let the caller re-raise it else: self.LOG.fatal(msg) sys.exit(error.EX_SOFTWARE)
python
def fatal(self, msg, exc=None): """ Exit on a fatal error. """ if exc is not None: self.LOG.fatal("%s (%s)" % (msg, exc)) if self.options.debug: return # let the caller re-raise it else: self.LOG.fatal(msg) sys.exit(error.EX_SOFTWARE)
[ "def", "fatal", "(", "self", ",", "msg", ",", "exc", "=", "None", ")", ":", "if", "exc", "is", "not", "None", ":", "self", ".", "LOG", ".", "fatal", "(", "\"%s (%s)\"", "%", "(", "msg", ",", "exc", ")", ")", "if", "self", ".", "options", ".", "debug", ":", "return", "# let the caller re-raise it", "else", ":", "self", ".", "LOG", ".", "fatal", "(", "msg", ")", "sys", ".", "exit", "(", "error", ".", "EX_SOFTWARE", ")" ]
Exit on a fatal error.
[ "Exit", "on", "a", "fatal", "error", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/base.py#L244-L253
7,384
pyroscope/pyrocore
src/pyrocore/scripts/base.py
ScriptBase.run
def run(self): """ The main program skeleton. """ log_total = True try: try: # Preparation steps self.get_options() # Template method with the tool's main loop self.mainloop() except error.LoggableError, exc: if self.options.debug: raise # Log errors caused by invalid user input try: msg = str(exc) except UnicodeError: msg = unicode(exc, "UTF-8") self.LOG.error(msg) sys.exit(error.EX_SOFTWARE) except KeyboardInterrupt, exc: if self.options.debug: raise sys.stderr.write("\n\nAborted by CTRL-C!\n") sys.stderr.flush() sys.exit(error.EX_TEMPFAIL) except IOError, exc: # [Errno 32] Broken pipe? if exc.errno == errno.EPIPE: sys.stderr.write("\n%s, exiting!\n" % exc) sys.stderr.flush() # Monkey patch to prevent an exception during logging shutdown try: handlers = logging._handlerList except AttributeError: pass else: for handler in handlers: try: handler.flush = lambda *_: None except AttributeError: pass # skip special handlers log_total = False sys.exit(error.EX_IOERR) else: raise finally: # Shut down if log_total and self.options: ## No time logging on --version and such running_time = time.time() - self.startup self.LOG.log(self.STD_LOG_LEVEL, "Total time: %.3f seconds." % running_time) logging.shutdown() # Special exit code? if self.return_code: sys.exit(self.return_code)
python
def run(self): """ The main program skeleton. """ log_total = True try: try: # Preparation steps self.get_options() # Template method with the tool's main loop self.mainloop() except error.LoggableError, exc: if self.options.debug: raise # Log errors caused by invalid user input try: msg = str(exc) except UnicodeError: msg = unicode(exc, "UTF-8") self.LOG.error(msg) sys.exit(error.EX_SOFTWARE) except KeyboardInterrupt, exc: if self.options.debug: raise sys.stderr.write("\n\nAborted by CTRL-C!\n") sys.stderr.flush() sys.exit(error.EX_TEMPFAIL) except IOError, exc: # [Errno 32] Broken pipe? if exc.errno == errno.EPIPE: sys.stderr.write("\n%s, exiting!\n" % exc) sys.stderr.flush() # Monkey patch to prevent an exception during logging shutdown try: handlers = logging._handlerList except AttributeError: pass else: for handler in handlers: try: handler.flush = lambda *_: None except AttributeError: pass # skip special handlers log_total = False sys.exit(error.EX_IOERR) else: raise finally: # Shut down if log_total and self.options: ## No time logging on --version and such running_time = time.time() - self.startup self.LOG.log(self.STD_LOG_LEVEL, "Total time: %.3f seconds." % running_time) logging.shutdown() # Special exit code? if self.return_code: sys.exit(self.return_code)
[ "def", "run", "(", "self", ")", ":", "log_total", "=", "True", "try", ":", "try", ":", "# Preparation steps", "self", ".", "get_options", "(", ")", "# Template method with the tool's main loop", "self", ".", "mainloop", "(", ")", "except", "error", ".", "LoggableError", ",", "exc", ":", "if", "self", ".", "options", ".", "debug", ":", "raise", "# Log errors caused by invalid user input", "try", ":", "msg", "=", "str", "(", "exc", ")", "except", "UnicodeError", ":", "msg", "=", "unicode", "(", "exc", ",", "\"UTF-8\"", ")", "self", ".", "LOG", ".", "error", "(", "msg", ")", "sys", ".", "exit", "(", "error", ".", "EX_SOFTWARE", ")", "except", "KeyboardInterrupt", ",", "exc", ":", "if", "self", ".", "options", ".", "debug", ":", "raise", "sys", ".", "stderr", ".", "write", "(", "\"\\n\\nAborted by CTRL-C!\\n\"", ")", "sys", ".", "stderr", ".", "flush", "(", ")", "sys", ".", "exit", "(", "error", ".", "EX_TEMPFAIL", ")", "except", "IOError", ",", "exc", ":", "# [Errno 32] Broken pipe?", "if", "exc", ".", "errno", "==", "errno", ".", "EPIPE", ":", "sys", ".", "stderr", ".", "write", "(", "\"\\n%s, exiting!\\n\"", "%", "exc", ")", "sys", ".", "stderr", ".", "flush", "(", ")", "# Monkey patch to prevent an exception during logging shutdown", "try", ":", "handlers", "=", "logging", ".", "_handlerList", "except", "AttributeError", ":", "pass", "else", ":", "for", "handler", "in", "handlers", ":", "try", ":", "handler", ".", "flush", "=", "lambda", "*", "_", ":", "None", "except", "AttributeError", ":", "pass", "# skip special handlers", "log_total", "=", "False", "sys", ".", "exit", "(", "error", ".", "EX_IOERR", ")", "else", ":", "raise", "finally", ":", "# Shut down", "if", "log_total", "and", "self", ".", "options", ":", "## No time logging on --version and such", "running_time", "=", "time", ".", "time", "(", ")", "-", "self", ".", "startup", "self", ".", "LOG", ".", "log", "(", "self", ".", "STD_LOG_LEVEL", ",", "\"Total time: %.3f seconds.\"", "%", "running_time", ")", "logging", ".", "shutdown", "(", ")", "# Special exit code?", "if", "self", ".", "return_code", ":", "sys", ".", "exit", "(", "self", ".", "return_code", ")" ]
The main program skeleton.
[ "The", "main", "program", "skeleton", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/base.py#L256-L317
7,385
pyroscope/pyrocore
src/pyrocore/scripts/base.py
ScriptBaseWithConfig.add_options
def add_options(self): """ Add configuration options. """ super(ScriptBaseWithConfig, self).add_options() self.add_value_option("--config-dir", "DIR", help="configuration directory [{}]".format(os.environ.get('PYRO_CONFIG_DIR', self.CONFIG_DIR_DEFAULT))) self.add_value_option("--config-file", "PATH", action="append", default=[], help="additional config file(s) to read") self.add_value_option("-D", "--define", "KEY=VAL [-D ...]", default=[], action="append", dest="defines", help="override configuration attributes")
python
def add_options(self): """ Add configuration options. """ super(ScriptBaseWithConfig, self).add_options() self.add_value_option("--config-dir", "DIR", help="configuration directory [{}]".format(os.environ.get('PYRO_CONFIG_DIR', self.CONFIG_DIR_DEFAULT))) self.add_value_option("--config-file", "PATH", action="append", default=[], help="additional config file(s) to read") self.add_value_option("-D", "--define", "KEY=VAL [-D ...]", default=[], action="append", dest="defines", help="override configuration attributes")
[ "def", "add_options", "(", "self", ")", ":", "super", "(", "ScriptBaseWithConfig", ",", "self", ")", ".", "add_options", "(", ")", "self", ".", "add_value_option", "(", "\"--config-dir\"", ",", "\"DIR\"", ",", "help", "=", "\"configuration directory [{}]\"", ".", "format", "(", "os", ".", "environ", ".", "get", "(", "'PYRO_CONFIG_DIR'", ",", "self", ".", "CONFIG_DIR_DEFAULT", ")", ")", ")", "self", ".", "add_value_option", "(", "\"--config-file\"", ",", "\"PATH\"", ",", "action", "=", "\"append\"", ",", "default", "=", "[", "]", ",", "help", "=", "\"additional config file(s) to read\"", ")", "self", ".", "add_value_option", "(", "\"-D\"", ",", "\"--define\"", ",", "\"KEY=VAL [-D ...]\"", ",", "default", "=", "[", "]", ",", "action", "=", "\"append\"", ",", "dest", "=", "\"defines\"", ",", "help", "=", "\"override configuration attributes\"", ")" ]
Add configuration options.
[ "Add", "configuration", "options", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/base.py#L338-L350
7,386
pyroscope/pyrocore
src/pyrocore/scripts/base.py
ScriptBaseWithConfig.check_for_connection
def check_for_connection(self): """ Scan arguments for a `@name` one. """ for idx, arg in enumerate(self.args): if arg.startswith('@'): if arg[1:] not in config.connections: self.parser.error("Undefined connection '{}'!".format(arg[1:])) config.scgi_url = config.connections[arg[1:]] self.LOG.debug("Switched to connection %s (%s)", arg[1:], config.scgi_url) del self.args[idx] break
python
def check_for_connection(self): """ Scan arguments for a `@name` one. """ for idx, arg in enumerate(self.args): if arg.startswith('@'): if arg[1:] not in config.connections: self.parser.error("Undefined connection '{}'!".format(arg[1:])) config.scgi_url = config.connections[arg[1:]] self.LOG.debug("Switched to connection %s (%s)", arg[1:], config.scgi_url) del self.args[idx] break
[ "def", "check_for_connection", "(", "self", ")", ":", "for", "idx", ",", "arg", "in", "enumerate", "(", "self", ".", "args", ")", ":", "if", "arg", ".", "startswith", "(", "'@'", ")", ":", "if", "arg", "[", "1", ":", "]", "not", "in", "config", ".", "connections", ":", "self", ".", "parser", ".", "error", "(", "\"Undefined connection '{}'!\"", ".", "format", "(", "arg", "[", "1", ":", "]", ")", ")", "config", ".", "scgi_url", "=", "config", ".", "connections", "[", "arg", "[", "1", ":", "]", "]", "self", ".", "LOG", ".", "debug", "(", "\"Switched to connection %s (%s)\"", ",", "arg", "[", "1", ":", "]", ",", "config", ".", "scgi_url", ")", "del", "self", ".", "args", "[", "idx", "]", "break" ]
Scan arguments for a `@name` one.
[ "Scan", "arguments", "for", "a" ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/base.py#L374-L384
7,387
pyroscope/pyrocore
src/pyrocore/scripts/base.py
PromptDecorator.quit
def quit(self): """ Exit the program due to user's choices. """ self.script.LOG.warn("Abort due to user choice!") sys.exit(self.QUIT_RC)
python
def quit(self): """ Exit the program due to user's choices. """ self.script.LOG.warn("Abort due to user choice!") sys.exit(self.QUIT_RC)
[ "def", "quit", "(", "self", ")", ":", "self", ".", "script", ".", "LOG", ".", "warn", "(", "\"Abort due to user choice!\"", ")", "sys", ".", "exit", "(", "self", ".", "QUIT_RC", ")" ]
Exit the program due to user's choices.
[ "Exit", "the", "program", "due", "to", "user", "s", "choices", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/base.py#L443-L447
7,388
pyroscope/pyrocore
src/pyrocore/daemon/webapp.py
redirect
def redirect(req, _log=pymagic.get_lazy_logger("redirect")): """ Redirect controller to emit a HTTP 301. """ log = req.environ.get("wsgilog.logger", _log) target = req.relative_url(req.urlvars.to) log.info("Redirecting '%s' to '%s'" % (req.url, target)) return exc.HTTPMovedPermanently(location=target)
python
def redirect(req, _log=pymagic.get_lazy_logger("redirect")): """ Redirect controller to emit a HTTP 301. """ log = req.environ.get("wsgilog.logger", _log) target = req.relative_url(req.urlvars.to) log.info("Redirecting '%s' to '%s'" % (req.url, target)) return exc.HTTPMovedPermanently(location=target)
[ "def", "redirect", "(", "req", ",", "_log", "=", "pymagic", ".", "get_lazy_logger", "(", "\"redirect\"", ")", ")", ":", "log", "=", "req", ".", "environ", ".", "get", "(", "\"wsgilog.logger\"", ",", "_log", ")", "target", "=", "req", ".", "relative_url", "(", "req", ".", "urlvars", ".", "to", ")", "log", ".", "info", "(", "\"Redirecting '%s' to '%s'\"", "%", "(", "req", ".", "url", ",", "target", ")", ")", "return", "exc", ".", "HTTPMovedPermanently", "(", "location", "=", "target", ")" ]
Redirect controller to emit a HTTP 301.
[ "Redirect", "controller", "to", "emit", "a", "HTTP", "301", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/daemon/webapp.py#L228-L234
7,389
pyroscope/pyrocore
src/pyrocore/daemon/webapp.py
make_app
def make_app(httpd_config): """ Factory for the monitoring webapp. """ #mimetypes.add_type('image/vnd.microsoft.icon', '.ico') # Default paths to serve static file from htdocs_paths = [ os.path.realpath(os.path.join(config.config_dir, "htdocs")), os.path.join(os.path.dirname(config.__file__), "data", "htdocs"), ] return (Router() .add_route("/", controller=redirect, to="/static/index.html") .add_route("/favicon.ico", controller=redirect, to="/static/favicon.ico") .add_route("/static/{filepath:.+}", controller=StaticFolders(htdocs_paths)) .add_route("/json/{action}", controller=JsonController(**httpd_config.json)) )
python
def make_app(httpd_config): """ Factory for the monitoring webapp. """ #mimetypes.add_type('image/vnd.microsoft.icon', '.ico') # Default paths to serve static file from htdocs_paths = [ os.path.realpath(os.path.join(config.config_dir, "htdocs")), os.path.join(os.path.dirname(config.__file__), "data", "htdocs"), ] return (Router() .add_route("/", controller=redirect, to="/static/index.html") .add_route("/favicon.ico", controller=redirect, to="/static/favicon.ico") .add_route("/static/{filepath:.+}", controller=StaticFolders(htdocs_paths)) .add_route("/json/{action}", controller=JsonController(**httpd_config.json)) )
[ "def", "make_app", "(", "httpd_config", ")", ":", "#mimetypes.add_type('image/vnd.microsoft.icon', '.ico')", "# Default paths to serve static file from", "htdocs_paths", "=", "[", "os", ".", "path", ".", "realpath", "(", "os", ".", "path", ".", "join", "(", "config", ".", "config_dir", ",", "\"htdocs\"", ")", ")", ",", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "config", ".", "__file__", ")", ",", "\"data\"", ",", "\"htdocs\"", ")", ",", "]", "return", "(", "Router", "(", ")", ".", "add_route", "(", "\"/\"", ",", "controller", "=", "redirect", ",", "to", "=", "\"/static/index.html\"", ")", ".", "add_route", "(", "\"/favicon.ico\"", ",", "controller", "=", "redirect", ",", "to", "=", "\"/static/favicon.ico\"", ")", ".", "add_route", "(", "\"/static/{filepath:.+}\"", ",", "controller", "=", "StaticFolders", "(", "htdocs_paths", ")", ")", ".", "add_route", "(", "\"/json/{action}\"", ",", "controller", "=", "JsonController", "(", "*", "*", "httpd_config", ".", "json", ")", ")", ")" ]
Factory for the monitoring webapp.
[ "Factory", "for", "the", "monitoring", "webapp", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/daemon/webapp.py#L237-L253
7,390
pyroscope/pyrocore
src/pyrocore/daemon/webapp.py
JsonController.guarded
def guarded(self, func, *args, **kwargs): """ Call a function, return None on errors. """ try: return func(*args, **kwargs) except (EnvironmentError, error.LoggableError, xmlrpc.ERRORS) as g_exc: if func.__name__ not in self.ERRORS_LOGGED: self.LOG.warn("While calling '%s': %s" % (func.__name__, g_exc)) self.ERRORS_LOGGED.add(func.__name__) return None
python
def guarded(self, func, *args, **kwargs): """ Call a function, return None on errors. """ try: return func(*args, **kwargs) except (EnvironmentError, error.LoggableError, xmlrpc.ERRORS) as g_exc: if func.__name__ not in self.ERRORS_LOGGED: self.LOG.warn("While calling '%s': %s" % (func.__name__, g_exc)) self.ERRORS_LOGGED.add(func.__name__) return None
[ "def", "guarded", "(", "self", ",", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "(", "EnvironmentError", ",", "error", ".", "LoggableError", ",", "xmlrpc", ".", "ERRORS", ")", "as", "g_exc", ":", "if", "func", ".", "__name__", "not", "in", "self", ".", "ERRORS_LOGGED", ":", "self", ".", "LOG", ".", "warn", "(", "\"While calling '%s': %s\"", "%", "(", "func", ".", "__name__", ",", "g_exc", ")", ")", "self", ".", "ERRORS_LOGGED", ".", "add", "(", "func", ".", "__name__", ")", "return", "None" ]
Call a function, return None on errors.
[ "Call", "a", "function", "return", "None", "on", "errors", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/daemon/webapp.py#L112-L121
7,391
pyroscope/pyrocore
src/pyrocore/daemon/webapp.py
JsonController.json_engine
def json_engine(self, req): # pylint: disable=R0201,W0613 """ Return torrent engine data. """ try: return stats.engine_data(config.engine) except (error.LoggableError, xmlrpc.ERRORS) as torrent_exc: raise exc.HTTPInternalServerError(str(torrent_exc))
python
def json_engine(self, req): # pylint: disable=R0201,W0613 """ Return torrent engine data. """ try: return stats.engine_data(config.engine) except (error.LoggableError, xmlrpc.ERRORS) as torrent_exc: raise exc.HTTPInternalServerError(str(torrent_exc))
[ "def", "json_engine", "(", "self", ",", "req", ")", ":", "# pylint: disable=R0201,W0613", "try", ":", "return", "stats", ".", "engine_data", "(", "config", ".", "engine", ")", "except", "(", "error", ".", "LoggableError", ",", "xmlrpc", ".", "ERRORS", ")", "as", "torrent_exc", ":", "raise", "exc", ".", "HTTPInternalServerError", "(", "str", "(", "torrent_exc", ")", ")" ]
Return torrent engine data.
[ "Return", "torrent", "engine", "data", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/daemon/webapp.py#L124-L130
7,392
pyroscope/pyrocore
src/pyrocore/daemon/webapp.py
JsonController.json_charts
def json_charts(self, req): """ Return charting data. """ disk_used, disk_total, disk_detail = 0, 0, [] for disk_usage_path in self.cfg.disk_usage_path.split(os.pathsep): disk_usage = self.guarded(psutil.disk_usage, os.path.expanduser(disk_usage_path.strip())) if disk_usage: disk_used += disk_usage.used disk_total += disk_usage.total disk_detail.append((disk_usage.used, disk_usage.total)) data = dict( engine = self.json_engine(req), uptime = time.time() - psutil.BOOT_TIME, # pylint: disable=no-member fqdn = self.guarded(socket.getfqdn), cpu_usage = self.guarded(psutil.cpu_percent, 0), ram_usage = self.guarded(psutil.virtual_memory), swap_usage = self.guarded(psutil.swap_memory), disk_usage = (disk_used, disk_total, disk_detail) if disk_total else None, disk_io = self.guarded(psutil.disk_io_counters), net_io = self.guarded(psutil.net_io_counters), ) return data
python
def json_charts(self, req): """ Return charting data. """ disk_used, disk_total, disk_detail = 0, 0, [] for disk_usage_path in self.cfg.disk_usage_path.split(os.pathsep): disk_usage = self.guarded(psutil.disk_usage, os.path.expanduser(disk_usage_path.strip())) if disk_usage: disk_used += disk_usage.used disk_total += disk_usage.total disk_detail.append((disk_usage.used, disk_usage.total)) data = dict( engine = self.json_engine(req), uptime = time.time() - psutil.BOOT_TIME, # pylint: disable=no-member fqdn = self.guarded(socket.getfqdn), cpu_usage = self.guarded(psutil.cpu_percent, 0), ram_usage = self.guarded(psutil.virtual_memory), swap_usage = self.guarded(psutil.swap_memory), disk_usage = (disk_used, disk_total, disk_detail) if disk_total else None, disk_io = self.guarded(psutil.disk_io_counters), net_io = self.guarded(psutil.net_io_counters), ) return data
[ "def", "json_charts", "(", "self", ",", "req", ")", ":", "disk_used", ",", "disk_total", ",", "disk_detail", "=", "0", ",", "0", ",", "[", "]", "for", "disk_usage_path", "in", "self", ".", "cfg", ".", "disk_usage_path", ".", "split", "(", "os", ".", "pathsep", ")", ":", "disk_usage", "=", "self", ".", "guarded", "(", "psutil", ".", "disk_usage", ",", "os", ".", "path", ".", "expanduser", "(", "disk_usage_path", ".", "strip", "(", ")", ")", ")", "if", "disk_usage", ":", "disk_used", "+=", "disk_usage", ".", "used", "disk_total", "+=", "disk_usage", ".", "total", "disk_detail", ".", "append", "(", "(", "disk_usage", ".", "used", ",", "disk_usage", ".", "total", ")", ")", "data", "=", "dict", "(", "engine", "=", "self", ".", "json_engine", "(", "req", ")", ",", "uptime", "=", "time", ".", "time", "(", ")", "-", "psutil", ".", "BOOT_TIME", ",", "# pylint: disable=no-member", "fqdn", "=", "self", ".", "guarded", "(", "socket", ".", "getfqdn", ")", ",", "cpu_usage", "=", "self", ".", "guarded", "(", "psutil", ".", "cpu_percent", ",", "0", ")", ",", "ram_usage", "=", "self", ".", "guarded", "(", "psutil", ".", "virtual_memory", ")", ",", "swap_usage", "=", "self", ".", "guarded", "(", "psutil", ".", "swap_memory", ")", ",", "disk_usage", "=", "(", "disk_used", ",", "disk_total", ",", "disk_detail", ")", "if", "disk_total", "else", "None", ",", "disk_io", "=", "self", ".", "guarded", "(", "psutil", ".", "disk_io_counters", ")", ",", "net_io", "=", "self", ".", "guarded", "(", "psutil", ".", "net_io_counters", ")", ",", ")", "return", "data" ]
Return charting data.
[ "Return", "charting", "data", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/daemon/webapp.py#L133-L155
7,393
pyroscope/pyrocore
src/pyrocore/daemon/webapp.py
Router.parse_route
def parse_route(cls, template): """ Parse a route definition, and return the compiled regex that matches it. """ regex = '' last_pos = 0 for match in cls.ROUTES_RE.finditer(template): regex += re.escape(template[last_pos:match.start()]) var_name = match.group(1) expr = match.group(2) or '[^/]+' expr = '(?P<%s>%s)' % (var_name, expr) regex += expr last_pos = match.end() regex += re.escape(template[last_pos:]) regex = '^%s$' % regex return re.compile(regex)
python
def parse_route(cls, template): """ Parse a route definition, and return the compiled regex that matches it. """ regex = '' last_pos = 0 for match in cls.ROUTES_RE.finditer(template): regex += re.escape(template[last_pos:match.start()]) var_name = match.group(1) expr = match.group(2) or '[^/]+' expr = '(?P<%s>%s)' % (var_name, expr) regex += expr last_pos = match.end() regex += re.escape(template[last_pos:]) regex = '^%s$' % regex return re.compile(regex)
[ "def", "parse_route", "(", "cls", ",", "template", ")", ":", "regex", "=", "''", "last_pos", "=", "0", "for", "match", "in", "cls", ".", "ROUTES_RE", ".", "finditer", "(", "template", ")", ":", "regex", "+=", "re", ".", "escape", "(", "template", "[", "last_pos", ":", "match", ".", "start", "(", ")", "]", ")", "var_name", "=", "match", ".", "group", "(", "1", ")", "expr", "=", "match", ".", "group", "(", "2", ")", "or", "'[^/]+'", "expr", "=", "'(?P<%s>%s)'", "%", "(", "var_name", ",", "expr", ")", "regex", "+=", "expr", "last_pos", "=", "match", ".", "end", "(", ")", "regex", "+=", "re", ".", "escape", "(", "template", "[", "last_pos", ":", "]", ")", "regex", "=", "'^%s$'", "%", "regex", "return", "re", ".", "compile", "(", "regex", ")" ]
Parse a route definition, and return the compiled regex that matches it.
[ "Parse", "a", "route", "definition", "and", "return", "the", "compiled", "regex", "that", "matches", "it", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/daemon/webapp.py#L173-L190
7,394
pyroscope/pyrocore
src/pyrocore/daemon/webapp.py
Router.add_route
def add_route(self, template, controller, **kwargs): """ Add a route definition `controller` can be either a controller instance, or the name of a callable that will be imported. """ if isinstance(controller, basestring): controller = pymagic.import_name(controller) self.routes.append((self.parse_route(template), controller, kwargs)) return self
python
def add_route(self, template, controller, **kwargs): """ Add a route definition `controller` can be either a controller instance, or the name of a callable that will be imported. """ if isinstance(controller, basestring): controller = pymagic.import_name(controller) self.routes.append((self.parse_route(template), controller, kwargs)) return self
[ "def", "add_route", "(", "self", ",", "template", ",", "controller", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "controller", ",", "basestring", ")", ":", "controller", "=", "pymagic", ".", "import_name", "(", "controller", ")", "self", ".", "routes", ".", "append", "(", "(", "self", ".", "parse_route", "(", "template", ")", ",", "controller", ",", "kwargs", ")", ")", "return", "self" ]
Add a route definition `controller` can be either a controller instance, or the name of a callable that will be imported.
[ "Add", "a", "route", "definition" ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/daemon/webapp.py#L198-L209
7,395
pyroscope/pyrocore
src/pyrocore/torrent/engine.py
_duration
def _duration(start, end): """ Return time delta. """ if start and end: if start > end: return None else: return end - start elif start: return time.time() - start else: return None
python
def _duration(start, end): """ Return time delta. """ if start and end: if start > end: return None else: return end - start elif start: return time.time() - start else: return None
[ "def", "_duration", "(", "start", ",", "end", ")", ":", "if", "start", "and", "end", ":", "if", "start", ">", "end", ":", "return", "None", "else", ":", "return", "end", "-", "start", "elif", "start", ":", "return", "time", ".", "time", "(", ")", "-", "start", "else", ":", "return", "None" ]
Return time delta.
[ "Return", "time", "delta", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/engine.py#L53-L64
7,396
pyroscope/pyrocore
src/pyrocore/torrent/engine.py
_fmt_files
def _fmt_files(filelist): """ Produce a file listing. """ depth = max(i.path.count('/') for i in filelist) pad = ['\uFFFE'] * depth base_indent = ' ' * 38 indent = 0 result = [] prev_path = pad sorted_files = sorted((i.path.split('/')[:-1]+pad, i.path.rsplit('/', 1)[-1], i) for i in filelist) for path, name, fileinfo in sorted_files: path = path[:depth] if path != prev_path: common = min([depth] + [idx for idx, (dirname, prev_name) in enumerate(zip(path, prev_path)) if dirname != prev_name ]) #result.append("!!%r %r" % (indent, common)) #result.append("!!%r" % (prev_path,)) #result.append("!!%r" % (path,)) while indent > common: indent -= 1 result.append("%s%s/" % (base_indent, ' ' * indent)) for dirname in path[common:]: if dirname == '\uFFFE': break result.append("%s%s\\ %s" % (base_indent, ' ' * indent, dirname)) indent += 1 ##result.append("!!%r %r" % (path, name)) result.append(" %s %s %s %s| %s" % ( {0: "off ", 1: " ", 2: "high"}.get(fileinfo.prio, "????"), fmt.iso_datetime(fileinfo.mtime), fmt.human_size(fileinfo.size), ' ' * indent, name, )) prev_path = path while indent > 0: indent -= 1 result.append("%s%s/" % (base_indent, ' ' * indent)) result.append("%s= %d file(s)" % (base_indent, len(filelist))) return '\n'.join(result)
python
def _fmt_files(filelist): """ Produce a file listing. """ depth = max(i.path.count('/') for i in filelist) pad = ['\uFFFE'] * depth base_indent = ' ' * 38 indent = 0 result = [] prev_path = pad sorted_files = sorted((i.path.split('/')[:-1]+pad, i.path.rsplit('/', 1)[-1], i) for i in filelist) for path, name, fileinfo in sorted_files: path = path[:depth] if path != prev_path: common = min([depth] + [idx for idx, (dirname, prev_name) in enumerate(zip(path, prev_path)) if dirname != prev_name ]) #result.append("!!%r %r" % (indent, common)) #result.append("!!%r" % (prev_path,)) #result.append("!!%r" % (path,)) while indent > common: indent -= 1 result.append("%s%s/" % (base_indent, ' ' * indent)) for dirname in path[common:]: if dirname == '\uFFFE': break result.append("%s%s\\ %s" % (base_indent, ' ' * indent, dirname)) indent += 1 ##result.append("!!%r %r" % (path, name)) result.append(" %s %s %s %s| %s" % ( {0: "off ", 1: " ", 2: "high"}.get(fileinfo.prio, "????"), fmt.iso_datetime(fileinfo.mtime), fmt.human_size(fileinfo.size), ' ' * indent, name, )) prev_path = path while indent > 0: indent -= 1 result.append("%s%s/" % (base_indent, ' ' * indent)) result.append("%s= %d file(s)" % (base_indent, len(filelist))) return '\n'.join(result)
[ "def", "_fmt_files", "(", "filelist", ")", ":", "depth", "=", "max", "(", "i", ".", "path", ".", "count", "(", "'/'", ")", "for", "i", "in", "filelist", ")", "pad", "=", "[", "'\\uFFFE'", "]", "*", "depth", "base_indent", "=", "' '", "*", "38", "indent", "=", "0", "result", "=", "[", "]", "prev_path", "=", "pad", "sorted_files", "=", "sorted", "(", "(", "i", ".", "path", ".", "split", "(", "'/'", ")", "[", ":", "-", "1", "]", "+", "pad", ",", "i", ".", "path", ".", "rsplit", "(", "'/'", ",", "1", ")", "[", "-", "1", "]", ",", "i", ")", "for", "i", "in", "filelist", ")", "for", "path", ",", "name", ",", "fileinfo", "in", "sorted_files", ":", "path", "=", "path", "[", ":", "depth", "]", "if", "path", "!=", "prev_path", ":", "common", "=", "min", "(", "[", "depth", "]", "+", "[", "idx", "for", "idx", ",", "(", "dirname", ",", "prev_name", ")", "in", "enumerate", "(", "zip", "(", "path", ",", "prev_path", ")", ")", "if", "dirname", "!=", "prev_name", "]", ")", "#result.append(\"!!%r %r\" % (indent, common))", "#result.append(\"!!%r\" % (prev_path,))", "#result.append(\"!!%r\" % (path,))", "while", "indent", ">", "common", ":", "indent", "-=", "1", "result", ".", "append", "(", "\"%s%s/\"", "%", "(", "base_indent", ",", "' '", "*", "indent", ")", ")", "for", "dirname", "in", "path", "[", "common", ":", "]", ":", "if", "dirname", "==", "'\\uFFFE'", ":", "break", "result", ".", "append", "(", "\"%s%s\\\\ %s\"", "%", "(", "base_indent", ",", "' '", "*", "indent", ",", "dirname", ")", ")", "indent", "+=", "1", "##result.append(\"!!%r %r\" % (path, name))", "result", ".", "append", "(", "\" %s %s %s %s| %s\"", "%", "(", "{", "0", ":", "\"off \"", ",", "1", ":", "\" \"", ",", "2", ":", "\"high\"", "}", ".", "get", "(", "fileinfo", ".", "prio", ",", "\"????\"", ")", ",", "fmt", ".", "iso_datetime", "(", "fileinfo", ".", "mtime", ")", ",", "fmt", ".", "human_size", "(", "fileinfo", ".", "size", ")", ",", "' '", "*", "indent", ",", "name", ",", ")", ")", "prev_path", "=", "path", "while", "indent", ">", "0", ":", "indent", "-=", "1", "result", ".", "append", "(", "\"%s%s/\"", "%", "(", "base_indent", ",", "' '", "*", "indent", ")", ")", "result", ".", "append", "(", "\"%s= %d file(s)\"", "%", "(", "base_indent", ",", "len", "(", "filelist", ")", ")", ")", "return", "'\\n'", ".", "join", "(", "result", ")" ]
Produce a file listing.
[ "Produce", "a", "file", "listing", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/engine.py#L142-L190
7,397
pyroscope/pyrocore
src/pyrocore/torrent/engine.py
detect_traits
def detect_traits(item): """ Build traits list from attributes of the passed item. Currently, "kind_51", "name" and "alias" are considered. See pyrocore.util.traits:dectect_traits for more details. """ return traits.detect_traits( name=item.name, alias=item.alias, filetype=(list(item.fetch("kind_51")) or [None]).pop(), )
python
def detect_traits(item): """ Build traits list from attributes of the passed item. Currently, "kind_51", "name" and "alias" are considered. See pyrocore.util.traits:dectect_traits for more details. """ return traits.detect_traits( name=item.name, alias=item.alias, filetype=(list(item.fetch("kind_51")) or [None]).pop(), )
[ "def", "detect_traits", "(", "item", ")", ":", "return", "traits", ".", "detect_traits", "(", "name", "=", "item", ".", "name", ",", "alias", "=", "item", ".", "alias", ",", "filetype", "=", "(", "list", "(", "item", ".", "fetch", "(", "\"kind_51\"", ")", ")", "or", "[", "None", "]", ")", ".", "pop", "(", ")", ",", ")" ]
Build traits list from attributes of the passed item. Currently, "kind_51", "name" and "alias" are considered. See pyrocore.util.traits:dectect_traits for more details.
[ "Build", "traits", "list", "from", "attributes", "of", "the", "passed", "item", ".", "Currently", "kind_51", "name", "and", "alias", "are", "considered", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/engine.py#L193-L202
7,398
pyroscope/pyrocore
src/pyrocore/torrent/engine.py
TorrentProxy.add_manifold_attribute
def add_manifold_attribute(cls, name): """ Register a manifold engine attribute. @return: field definition object, or None if "name" isn't a manifold attribute. """ if name.startswith("custom_"): try: return FieldDefinition.FIELDS[name] except KeyError: field = OnDemandField(fmt.to_unicode, name, "custom attribute %r" % name.split('_', 1)[1], matcher=matching.PatternFilter) setattr(cls, name, field) # add field to all proxy objects return field elif name.startswith("kind_") and name[5:].isdigit(): try: return FieldDefinition.FIELDS[name] except KeyError: limit = int(name[5:].lstrip('0') or '0', 10) if limit > 100: raise error.UserError("kind_N: N > 100 in %r" % name) field = OnDemandField(set, name, "kinds of files that make up more than %d%% of this item's size" % limit, matcher=matching.TaggedAsFilter, formatter=_fmt_tags, engine_name="kind_%d" % limit) setattr(cls, name, field) return field
python
def add_manifold_attribute(cls, name): """ Register a manifold engine attribute. @return: field definition object, or None if "name" isn't a manifold attribute. """ if name.startswith("custom_"): try: return FieldDefinition.FIELDS[name] except KeyError: field = OnDemandField(fmt.to_unicode, name, "custom attribute %r" % name.split('_', 1)[1], matcher=matching.PatternFilter) setattr(cls, name, field) # add field to all proxy objects return field elif name.startswith("kind_") and name[5:].isdigit(): try: return FieldDefinition.FIELDS[name] except KeyError: limit = int(name[5:].lstrip('0') or '0', 10) if limit > 100: raise error.UserError("kind_N: N > 100 in %r" % name) field = OnDemandField(set, name, "kinds of files that make up more than %d%% of this item's size" % limit, matcher=matching.TaggedAsFilter, formatter=_fmt_tags, engine_name="kind_%d" % limit) setattr(cls, name, field) return field
[ "def", "add_manifold_attribute", "(", "cls", ",", "name", ")", ":", "if", "name", ".", "startswith", "(", "\"custom_\"", ")", ":", "try", ":", "return", "FieldDefinition", ".", "FIELDS", "[", "name", "]", "except", "KeyError", ":", "field", "=", "OnDemandField", "(", "fmt", ".", "to_unicode", ",", "name", ",", "\"custom attribute %r\"", "%", "name", ".", "split", "(", "'_'", ",", "1", ")", "[", "1", "]", ",", "matcher", "=", "matching", ".", "PatternFilter", ")", "setattr", "(", "cls", ",", "name", ",", "field", ")", "# add field to all proxy objects", "return", "field", "elif", "name", ".", "startswith", "(", "\"kind_\"", ")", "and", "name", "[", "5", ":", "]", ".", "isdigit", "(", ")", ":", "try", ":", "return", "FieldDefinition", ".", "FIELDS", "[", "name", "]", "except", "KeyError", ":", "limit", "=", "int", "(", "name", "[", "5", ":", "]", ".", "lstrip", "(", "'0'", ")", "or", "'0'", ",", "10", ")", "if", "limit", ">", "100", ":", "raise", "error", ".", "UserError", "(", "\"kind_N: N > 100 in %r\"", "%", "name", ")", "field", "=", "OnDemandField", "(", "set", ",", "name", ",", "\"kinds of files that make up more than %d%% of this item's size\"", "%", "limit", ",", "matcher", "=", "matching", ".", "TaggedAsFilter", ",", "formatter", "=", "_fmt_tags", ",", "engine_name", "=", "\"kind_%d\"", "%", "limit", ")", "setattr", "(", "cls", ",", "name", ",", "field", ")", "return", "field" ]
Register a manifold engine attribute. @return: field definition object, or None if "name" isn't a manifold attribute.
[ "Register", "a", "manifold", "engine", "attribute", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/engine.py#L304-L331
7,399
pyroscope/pyrocore
src/pyrocore/torrent/engine.py
TorrentProxy.add_custom_fields
def add_custom_fields(cls, *args, **kw): """ Add any custom fields defined in the configuration. """ for factory in config.custom_field_factories: for field in factory(): setattr(cls, field.name, field)
python
def add_custom_fields(cls, *args, **kw): """ Add any custom fields defined in the configuration. """ for factory in config.custom_field_factories: for field in factory(): setattr(cls, field.name, field)
[ "def", "add_custom_fields", "(", "cls", ",", "*", "args", ",", "*", "*", "kw", ")", ":", "for", "factory", "in", "config", ".", "custom_field_factories", ":", "for", "field", "in", "factory", "(", ")", ":", "setattr", "(", "cls", ",", "field", ".", "name", ",", "field", ")" ]
Add any custom fields defined in the configuration.
[ "Add", "any", "custom", "fields", "defined", "in", "the", "configuration", "." ]
89ad01346a570943d20311a0b488440975876612
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/engine.py#L335-L340