id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
|
|---|---|---|---|---|---|---|---|---|---|---|---|
7,400
|
pyroscope/pyrocore
|
src/pyrocore/torrent/engine.py
|
TorrentView._fetch_items
|
def _fetch_items(self):
""" Fetch to attribute.
"""
if self._items is None:
self._items = list(self.engine.items(self))
return self._items
|
python
|
def _fetch_items(self):
""" Fetch to attribute.
"""
if self._items is None:
self._items = list(self.engine.items(self))
return self._items
|
[
"def",
"_fetch_items",
"(",
"self",
")",
":",
"if",
"self",
".",
"_items",
"is",
"None",
":",
"self",
".",
"_items",
"=",
"list",
"(",
"self",
".",
"engine",
".",
"items",
"(",
"self",
")",
")",
"return",
"self",
".",
"_items"
] |
Fetch to attribute.
|
[
"Fetch",
"to",
"attribute",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/engine.py#L605-L611
|
7,401
|
pyroscope/pyrocore
|
src/pyrocore/torrent/engine.py
|
TorrentView._check_hash_view
|
def _check_hash_view(self):
""" Return infohash if view name refers to a single item, else None.
"""
infohash = None
if self.viewname.startswith('#'):
infohash = self.viewname[1:]
elif len(self.viewname) == 40:
try:
int(self.viewname, 16)
except (TypeError, ValueError):
pass
else:
infohash = self.viewname
return infohash
|
python
|
def _check_hash_view(self):
""" Return infohash if view name refers to a single item, else None.
"""
infohash = None
if self.viewname.startswith('#'):
infohash = self.viewname[1:]
elif len(self.viewname) == 40:
try:
int(self.viewname, 16)
except (TypeError, ValueError):
pass
else:
infohash = self.viewname
return infohash
|
[
"def",
"_check_hash_view",
"(",
"self",
")",
":",
"infohash",
"=",
"None",
"if",
"self",
".",
"viewname",
".",
"startswith",
"(",
"'#'",
")",
":",
"infohash",
"=",
"self",
".",
"viewname",
"[",
"1",
":",
"]",
"elif",
"len",
"(",
"self",
".",
"viewname",
")",
"==",
"40",
":",
"try",
":",
"int",
"(",
"self",
".",
"viewname",
",",
"16",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"pass",
"else",
":",
"infohash",
"=",
"self",
".",
"viewname",
"return",
"infohash"
] |
Return infohash if view name refers to a single item, else None.
|
[
"Return",
"infohash",
"if",
"view",
"name",
"refers",
"to",
"a",
"single",
"item",
"else",
"None",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/engine.py#L614-L627
|
7,402
|
pyroscope/pyrocore
|
src/pyrocore/torrent/engine.py
|
TorrentView.size
|
def size(self):
""" Total unfiltered size of view.
"""
#return len(self._fetch_items())
if self._check_hash_view():
return 1
else:
return self.engine.open().view.size(xmlrpc.NOHASH, self.viewname)
|
python
|
def size(self):
""" Total unfiltered size of view.
"""
#return len(self._fetch_items())
if self._check_hash_view():
return 1
else:
return self.engine.open().view.size(xmlrpc.NOHASH, self.viewname)
|
[
"def",
"size",
"(",
"self",
")",
":",
"#return len(self._fetch_items())",
"if",
"self",
".",
"_check_hash_view",
"(",
")",
":",
"return",
"1",
"else",
":",
"return",
"self",
".",
"engine",
".",
"open",
"(",
")",
".",
"view",
".",
"size",
"(",
"xmlrpc",
".",
"NOHASH",
",",
"self",
".",
"viewname",
")"
] |
Total unfiltered size of view.
|
[
"Total",
"unfiltered",
"size",
"of",
"view",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/engine.py#L630-L637
|
7,403
|
pyroscope/pyrocore
|
src/pyrocore/torrent/engine.py
|
TorrentEngine.group_by
|
def group_by(self, fields, items=None):
""" Returns a dict of lists of items, grouped by the given fields.
``fields`` can be a string (one field) or an iterable of field names.
"""
result = defaultdict(list)
if items is None:
items = self.items()
try:
key = operator.attrgetter(fields + '')
except TypeError:
def key(obj, names=tuple(fields)):
'Helper to return group key tuple'
return tuple(getattr(obj, x) for x in names)
for item in items:
result[key(item)].append(item)
return result
|
python
|
def group_by(self, fields, items=None):
""" Returns a dict of lists of items, grouped by the given fields.
``fields`` can be a string (one field) or an iterable of field names.
"""
result = defaultdict(list)
if items is None:
items = self.items()
try:
key = operator.attrgetter(fields + '')
except TypeError:
def key(obj, names=tuple(fields)):
'Helper to return group key tuple'
return tuple(getattr(obj, x) for x in names)
for item in items:
result[key(item)].append(item)
return result
|
[
"def",
"group_by",
"(",
"self",
",",
"fields",
",",
"items",
"=",
"None",
")",
":",
"result",
"=",
"defaultdict",
"(",
"list",
")",
"if",
"items",
"is",
"None",
":",
"items",
"=",
"self",
".",
"items",
"(",
")",
"try",
":",
"key",
"=",
"operator",
".",
"attrgetter",
"(",
"fields",
"+",
"''",
")",
"except",
"TypeError",
":",
"def",
"key",
"(",
"obj",
",",
"names",
"=",
"tuple",
"(",
"fields",
")",
")",
":",
"'Helper to return group key tuple'",
"return",
"tuple",
"(",
"getattr",
"(",
"obj",
",",
"x",
")",
"for",
"x",
"in",
"names",
")",
"for",
"item",
"in",
"items",
":",
"result",
"[",
"key",
"(",
"item",
")",
"]",
".",
"append",
"(",
"item",
")",
"return",
"result"
] |
Returns a dict of lists of items, grouped by the given fields.
``fields`` can be a string (one field) or an iterable of field names.
|
[
"Returns",
"a",
"dict",
"of",
"lists",
"of",
"items",
"grouped",
"by",
"the",
"given",
"fields",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/engine.py#L700-L719
|
7,404
|
pyroscope/pyrocore
|
src/pyrocore/util/xmlrpc.py
|
RTorrentProxy._set_mappings
|
def _set_mappings(self):
""" Set command mappings according to rTorrent version.
"""
try:
self._versions = (self.system.client_version(), self.system.library_version(),)
self._version_info = tuple(int(i) for i in self._versions[0].split('.'))
self._use_deprecated = self._version_info < (0, 8, 7)
# Merge mappings for this version
self._mapping = self._mapping.copy()
for key, val in sorted(i for i in vars(config).items() if i[0].startswith("xmlrpc_")):
map_version = tuple(int(i) for i in key.split('_')[1:])
if map_version <= self._version_info:
if config.debug:
self.LOG.debug("MAPPING for %r added: %r" % (map_version, val))
self._mapping.update(val)
self._fix_mappings()
except ERRORS as exc:
raise error.LoggableError("Can't connect to %s (%s)" % (self._url, exc))
return self._versions, self._version_info
|
python
|
def _set_mappings(self):
""" Set command mappings according to rTorrent version.
"""
try:
self._versions = (self.system.client_version(), self.system.library_version(),)
self._version_info = tuple(int(i) for i in self._versions[0].split('.'))
self._use_deprecated = self._version_info < (0, 8, 7)
# Merge mappings for this version
self._mapping = self._mapping.copy()
for key, val in sorted(i for i in vars(config).items() if i[0].startswith("xmlrpc_")):
map_version = tuple(int(i) for i in key.split('_')[1:])
if map_version <= self._version_info:
if config.debug:
self.LOG.debug("MAPPING for %r added: %r" % (map_version, val))
self._mapping.update(val)
self._fix_mappings()
except ERRORS as exc:
raise error.LoggableError("Can't connect to %s (%s)" % (self._url, exc))
return self._versions, self._version_info
|
[
"def",
"_set_mappings",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"_versions",
"=",
"(",
"self",
".",
"system",
".",
"client_version",
"(",
")",
",",
"self",
".",
"system",
".",
"library_version",
"(",
")",
",",
")",
"self",
".",
"_version_info",
"=",
"tuple",
"(",
"int",
"(",
"i",
")",
"for",
"i",
"in",
"self",
".",
"_versions",
"[",
"0",
"]",
".",
"split",
"(",
"'.'",
")",
")",
"self",
".",
"_use_deprecated",
"=",
"self",
".",
"_version_info",
"<",
"(",
"0",
",",
"8",
",",
"7",
")",
"# Merge mappings for this version",
"self",
".",
"_mapping",
"=",
"self",
".",
"_mapping",
".",
"copy",
"(",
")",
"for",
"key",
",",
"val",
"in",
"sorted",
"(",
"i",
"for",
"i",
"in",
"vars",
"(",
"config",
")",
".",
"items",
"(",
")",
"if",
"i",
"[",
"0",
"]",
".",
"startswith",
"(",
"\"xmlrpc_\"",
")",
")",
":",
"map_version",
"=",
"tuple",
"(",
"int",
"(",
"i",
")",
"for",
"i",
"in",
"key",
".",
"split",
"(",
"'_'",
")",
"[",
"1",
":",
"]",
")",
"if",
"map_version",
"<=",
"self",
".",
"_version_info",
":",
"if",
"config",
".",
"debug",
":",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"MAPPING for %r added: %r\"",
"%",
"(",
"map_version",
",",
"val",
")",
")",
"self",
".",
"_mapping",
".",
"update",
"(",
"val",
")",
"self",
".",
"_fix_mappings",
"(",
")",
"except",
"ERRORS",
"as",
"exc",
":",
"raise",
"error",
".",
"LoggableError",
"(",
"\"Can't connect to %s (%s)\"",
"%",
"(",
"self",
".",
"_url",
",",
"exc",
")",
")",
"return",
"self",
".",
"_versions",
",",
"self",
".",
"_version_info"
] |
Set command mappings according to rTorrent version.
|
[
"Set",
"command",
"mappings",
"according",
"to",
"rTorrent",
"version",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/xmlrpc.py#L241-L261
|
7,405
|
pyroscope/pyrocore
|
src/pyrocore/util/xmlrpc.py
|
RTorrentProxy._fix_mappings
|
def _fix_mappings(self):
""" Add computed stuff to mappings.
"""
self._mapping.update((key+'=', val+'=') for key, val in self._mapping.items() if not key.endswith('='))
if config.debug:
self.LOG.debug("CMD MAPPINGS ARE: %r" % (self._mapping,))
|
python
|
def _fix_mappings(self):
""" Add computed stuff to mappings.
"""
self._mapping.update((key+'=', val+'=') for key, val in self._mapping.items() if not key.endswith('='))
if config.debug:
self.LOG.debug("CMD MAPPINGS ARE: %r" % (self._mapping,))
|
[
"def",
"_fix_mappings",
"(",
"self",
")",
":",
"self",
".",
"_mapping",
".",
"update",
"(",
"(",
"key",
"+",
"'='",
",",
"val",
"+",
"'='",
")",
"for",
"key",
",",
"val",
"in",
"self",
".",
"_mapping",
".",
"items",
"(",
")",
"if",
"not",
"key",
".",
"endswith",
"(",
"'='",
")",
")",
"if",
"config",
".",
"debug",
":",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"CMD MAPPINGS ARE: %r\"",
"%",
"(",
"self",
".",
"_mapping",
",",
")",
")"
] |
Add computed stuff to mappings.
|
[
"Add",
"computed",
"stuff",
"to",
"mappings",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/xmlrpc.py#L264-L270
|
7,406
|
pyroscope/pyrocore
|
src/pyrocore/util/xmlrpc.py
|
RTorrentProxy._map_call
|
def _map_call(self, cmd):
""" Map old to new command names.
"""
if config.debug and cmd != self._mapping.get(cmd, cmd):
self.LOG.debug("MAP %s ==> %s" % (cmd, self._mapping[cmd]))
cmd = self._mapping.get(cmd, cmd)
# These we do by code, to avoid lengthy lists in the config
if not self._use_deprecated and any(cmd.startswith(i) for i in ("d.get_", "f.get_", "p.get_", "t.get_")):
cmd = cmd[:2] + cmd[6:]
return cmd
|
python
|
def _map_call(self, cmd):
""" Map old to new command names.
"""
if config.debug and cmd != self._mapping.get(cmd, cmd):
self.LOG.debug("MAP %s ==> %s" % (cmd, self._mapping[cmd]))
cmd = self._mapping.get(cmd, cmd)
# These we do by code, to avoid lengthy lists in the config
if not self._use_deprecated and any(cmd.startswith(i) for i in ("d.get_", "f.get_", "p.get_", "t.get_")):
cmd = cmd[:2] + cmd[6:]
return cmd
|
[
"def",
"_map_call",
"(",
"self",
",",
"cmd",
")",
":",
"if",
"config",
".",
"debug",
"and",
"cmd",
"!=",
"self",
".",
"_mapping",
".",
"get",
"(",
"cmd",
",",
"cmd",
")",
":",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"MAP %s ==> %s\"",
"%",
"(",
"cmd",
",",
"self",
".",
"_mapping",
"[",
"cmd",
"]",
")",
")",
"cmd",
"=",
"self",
".",
"_mapping",
".",
"get",
"(",
"cmd",
",",
"cmd",
")",
"# These we do by code, to avoid lengthy lists in the config",
"if",
"not",
"self",
".",
"_use_deprecated",
"and",
"any",
"(",
"cmd",
".",
"startswith",
"(",
"i",
")",
"for",
"i",
"in",
"(",
"\"d.get_\"",
",",
"\"f.get_\"",
",",
"\"p.get_\"",
",",
"\"t.get_\"",
")",
")",
":",
"cmd",
"=",
"cmd",
"[",
":",
"2",
"]",
"+",
"cmd",
"[",
"6",
":",
"]",
"return",
"cmd"
] |
Map old to new command names.
|
[
"Map",
"old",
"to",
"new",
"command",
"names",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/xmlrpc.py#L273-L284
|
7,407
|
pyroscope/pyrocore
|
src/pyrocore/torrent/watch.py
|
MetafileHandler.parse
|
def parse(self):
""" Parse metafile and check pre-conditions.
"""
try:
if not os.path.getsize(self.ns.pathname):
# Ignore 0-byte dummy files (Firefox creates these while downloading)
self.job.LOG.warn("Ignoring 0-byte metafile '%s'" % (self.ns.pathname,))
return
self.metadata = metafile.checked_open(self.ns.pathname)
except EnvironmentError as exc:
self.job.LOG.error("Can't read metafile '%s' (%s)" % (
self.ns.pathname, str(exc).replace(": '%s'" % self.ns.pathname, ""),
))
return
except ValueError as exc:
self.job.LOG.error("Invalid metafile '%s': %s" % (self.ns.pathname, exc))
return
self.ns.info_hash = metafile.info_hash(self.metadata)
self.ns.info_name = self.metadata["info"]["name"]
self.job.LOG.info("Loaded '%s' from metafile '%s'" % (self.ns.info_name, self.ns.pathname))
# Check whether item is already loaded
try:
name = self.job.proxy.d.name(self.ns.info_hash, fail_silently=True)
except xmlrpc.HashNotFound:
pass
except xmlrpc.ERRORS as exc:
if exc.faultString != "Could not find info-hash.":
self.job.LOG.error("While checking for #%s: %s" % (self.ns.info_hash, exc))
return
else:
self.job.LOG.warn("Item #%s '%s' already added to client" % (self.ns.info_hash, name))
return
return True
|
python
|
def parse(self):
""" Parse metafile and check pre-conditions.
"""
try:
if not os.path.getsize(self.ns.pathname):
# Ignore 0-byte dummy files (Firefox creates these while downloading)
self.job.LOG.warn("Ignoring 0-byte metafile '%s'" % (self.ns.pathname,))
return
self.metadata = metafile.checked_open(self.ns.pathname)
except EnvironmentError as exc:
self.job.LOG.error("Can't read metafile '%s' (%s)" % (
self.ns.pathname, str(exc).replace(": '%s'" % self.ns.pathname, ""),
))
return
except ValueError as exc:
self.job.LOG.error("Invalid metafile '%s': %s" % (self.ns.pathname, exc))
return
self.ns.info_hash = metafile.info_hash(self.metadata)
self.ns.info_name = self.metadata["info"]["name"]
self.job.LOG.info("Loaded '%s' from metafile '%s'" % (self.ns.info_name, self.ns.pathname))
# Check whether item is already loaded
try:
name = self.job.proxy.d.name(self.ns.info_hash, fail_silently=True)
except xmlrpc.HashNotFound:
pass
except xmlrpc.ERRORS as exc:
if exc.faultString != "Could not find info-hash.":
self.job.LOG.error("While checking for #%s: %s" % (self.ns.info_hash, exc))
return
else:
self.job.LOG.warn("Item #%s '%s' already added to client" % (self.ns.info_hash, name))
return
return True
|
[
"def",
"parse",
"(",
"self",
")",
":",
"try",
":",
"if",
"not",
"os",
".",
"path",
".",
"getsize",
"(",
"self",
".",
"ns",
".",
"pathname",
")",
":",
"# Ignore 0-byte dummy files (Firefox creates these while downloading)",
"self",
".",
"job",
".",
"LOG",
".",
"warn",
"(",
"\"Ignoring 0-byte metafile '%s'\"",
"%",
"(",
"self",
".",
"ns",
".",
"pathname",
",",
")",
")",
"return",
"self",
".",
"metadata",
"=",
"metafile",
".",
"checked_open",
"(",
"self",
".",
"ns",
".",
"pathname",
")",
"except",
"EnvironmentError",
"as",
"exc",
":",
"self",
".",
"job",
".",
"LOG",
".",
"error",
"(",
"\"Can't read metafile '%s' (%s)\"",
"%",
"(",
"self",
".",
"ns",
".",
"pathname",
",",
"str",
"(",
"exc",
")",
".",
"replace",
"(",
"\": '%s'\"",
"%",
"self",
".",
"ns",
".",
"pathname",
",",
"\"\"",
")",
",",
")",
")",
"return",
"except",
"ValueError",
"as",
"exc",
":",
"self",
".",
"job",
".",
"LOG",
".",
"error",
"(",
"\"Invalid metafile '%s': %s\"",
"%",
"(",
"self",
".",
"ns",
".",
"pathname",
",",
"exc",
")",
")",
"return",
"self",
".",
"ns",
".",
"info_hash",
"=",
"metafile",
".",
"info_hash",
"(",
"self",
".",
"metadata",
")",
"self",
".",
"ns",
".",
"info_name",
"=",
"self",
".",
"metadata",
"[",
"\"info\"",
"]",
"[",
"\"name\"",
"]",
"self",
".",
"job",
".",
"LOG",
".",
"info",
"(",
"\"Loaded '%s' from metafile '%s'\"",
"%",
"(",
"self",
".",
"ns",
".",
"info_name",
",",
"self",
".",
"ns",
".",
"pathname",
")",
")",
"# Check whether item is already loaded",
"try",
":",
"name",
"=",
"self",
".",
"job",
".",
"proxy",
".",
"d",
".",
"name",
"(",
"self",
".",
"ns",
".",
"info_hash",
",",
"fail_silently",
"=",
"True",
")",
"except",
"xmlrpc",
".",
"HashNotFound",
":",
"pass",
"except",
"xmlrpc",
".",
"ERRORS",
"as",
"exc",
":",
"if",
"exc",
".",
"faultString",
"!=",
"\"Could not find info-hash.\"",
":",
"self",
".",
"job",
".",
"LOG",
".",
"error",
"(",
"\"While checking for #%s: %s\"",
"%",
"(",
"self",
".",
"ns",
".",
"info_hash",
",",
"exc",
")",
")",
"return",
"else",
":",
"self",
".",
"job",
".",
"LOG",
".",
"warn",
"(",
"\"Item #%s '%s' already added to client\"",
"%",
"(",
"self",
".",
"ns",
".",
"info_hash",
",",
"name",
")",
")",
"return",
"return",
"True"
] |
Parse metafile and check pre-conditions.
|
[
"Parse",
"metafile",
"and",
"check",
"pre",
"-",
"conditions",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/watch.py#L59-L94
|
7,408
|
pyroscope/pyrocore
|
src/pyrocore/torrent/watch.py
|
MetafileHandler.addinfo
|
def addinfo(self):
""" Add known facts to templating namespace.
"""
# Basic values
self.ns.watch_path = self.job.config.path
self.ns.relpath = None
for watch in self.job.config.path:
if self.ns.pathname.startswith(watch.rstrip('/') + '/'):
self.ns.relpath = os.path.dirname(self.ns.pathname)[len(watch.rstrip('/'))+1:]
break
# Build indicator flags for target state from filename
flags = self.ns.pathname.split(os.sep)
flags.extend(flags[-1].split('.'))
self.ns.flags = set(i for i in flags if i)
# Metafile stuff
announce = self.metadata.get("announce", None)
if announce:
self.ns.tracker_alias = configuration.map_announce2alias(announce)
main_file = self.ns.info_name
if "files" in self.metadata["info"]:
main_file = list(sorted((i["length"], i["path"][-1])
for i in self.metadata["info"]["files"]))[-1][1]
self.ns.filetype = os.path.splitext(main_file)[1]
# Add name traits
kind, info = traits.name_trait(self.ns.info_name, add_info=True)
self.ns.traits = Bunch(info)
self.ns.traits.kind = kind
self.ns.label = '/'.join(traits.detect_traits(
name=self.ns.info_name, alias=self.ns.tracker_alias, filetype=self.ns.filetype)).strip('/')
# Finally, expand commands from templates
self.ns.commands = []
for key, cmd in sorted(self.job.custom_cmds.items()):
try:
self.ns.commands.append(formatting.expand_template(cmd, self.ns))
except error.LoggableError as exc:
self.job.LOG.error("While expanding '%s' custom command: %s" % (key, exc))
|
python
|
def addinfo(self):
""" Add known facts to templating namespace.
"""
# Basic values
self.ns.watch_path = self.job.config.path
self.ns.relpath = None
for watch in self.job.config.path:
if self.ns.pathname.startswith(watch.rstrip('/') + '/'):
self.ns.relpath = os.path.dirname(self.ns.pathname)[len(watch.rstrip('/'))+1:]
break
# Build indicator flags for target state from filename
flags = self.ns.pathname.split(os.sep)
flags.extend(flags[-1].split('.'))
self.ns.flags = set(i for i in flags if i)
# Metafile stuff
announce = self.metadata.get("announce", None)
if announce:
self.ns.tracker_alias = configuration.map_announce2alias(announce)
main_file = self.ns.info_name
if "files" in self.metadata["info"]:
main_file = list(sorted((i["length"], i["path"][-1])
for i in self.metadata["info"]["files"]))[-1][1]
self.ns.filetype = os.path.splitext(main_file)[1]
# Add name traits
kind, info = traits.name_trait(self.ns.info_name, add_info=True)
self.ns.traits = Bunch(info)
self.ns.traits.kind = kind
self.ns.label = '/'.join(traits.detect_traits(
name=self.ns.info_name, alias=self.ns.tracker_alias, filetype=self.ns.filetype)).strip('/')
# Finally, expand commands from templates
self.ns.commands = []
for key, cmd in sorted(self.job.custom_cmds.items()):
try:
self.ns.commands.append(formatting.expand_template(cmd, self.ns))
except error.LoggableError as exc:
self.job.LOG.error("While expanding '%s' custom command: %s" % (key, exc))
|
[
"def",
"addinfo",
"(",
"self",
")",
":",
"# Basic values",
"self",
".",
"ns",
".",
"watch_path",
"=",
"self",
".",
"job",
".",
"config",
".",
"path",
"self",
".",
"ns",
".",
"relpath",
"=",
"None",
"for",
"watch",
"in",
"self",
".",
"job",
".",
"config",
".",
"path",
":",
"if",
"self",
".",
"ns",
".",
"pathname",
".",
"startswith",
"(",
"watch",
".",
"rstrip",
"(",
"'/'",
")",
"+",
"'/'",
")",
":",
"self",
".",
"ns",
".",
"relpath",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"self",
".",
"ns",
".",
"pathname",
")",
"[",
"len",
"(",
"watch",
".",
"rstrip",
"(",
"'/'",
")",
")",
"+",
"1",
":",
"]",
"break",
"# Build indicator flags for target state from filename",
"flags",
"=",
"self",
".",
"ns",
".",
"pathname",
".",
"split",
"(",
"os",
".",
"sep",
")",
"flags",
".",
"extend",
"(",
"flags",
"[",
"-",
"1",
"]",
".",
"split",
"(",
"'.'",
")",
")",
"self",
".",
"ns",
".",
"flags",
"=",
"set",
"(",
"i",
"for",
"i",
"in",
"flags",
"if",
"i",
")",
"# Metafile stuff",
"announce",
"=",
"self",
".",
"metadata",
".",
"get",
"(",
"\"announce\"",
",",
"None",
")",
"if",
"announce",
":",
"self",
".",
"ns",
".",
"tracker_alias",
"=",
"configuration",
".",
"map_announce2alias",
"(",
"announce",
")",
"main_file",
"=",
"self",
".",
"ns",
".",
"info_name",
"if",
"\"files\"",
"in",
"self",
".",
"metadata",
"[",
"\"info\"",
"]",
":",
"main_file",
"=",
"list",
"(",
"sorted",
"(",
"(",
"i",
"[",
"\"length\"",
"]",
",",
"i",
"[",
"\"path\"",
"]",
"[",
"-",
"1",
"]",
")",
"for",
"i",
"in",
"self",
".",
"metadata",
"[",
"\"info\"",
"]",
"[",
"\"files\"",
"]",
")",
")",
"[",
"-",
"1",
"]",
"[",
"1",
"]",
"self",
".",
"ns",
".",
"filetype",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"main_file",
")",
"[",
"1",
"]",
"# Add name traits",
"kind",
",",
"info",
"=",
"traits",
".",
"name_trait",
"(",
"self",
".",
"ns",
".",
"info_name",
",",
"add_info",
"=",
"True",
")",
"self",
".",
"ns",
".",
"traits",
"=",
"Bunch",
"(",
"info",
")",
"self",
".",
"ns",
".",
"traits",
".",
"kind",
"=",
"kind",
"self",
".",
"ns",
".",
"label",
"=",
"'/'",
".",
"join",
"(",
"traits",
".",
"detect_traits",
"(",
"name",
"=",
"self",
".",
"ns",
".",
"info_name",
",",
"alias",
"=",
"self",
".",
"ns",
".",
"tracker_alias",
",",
"filetype",
"=",
"self",
".",
"ns",
".",
"filetype",
")",
")",
".",
"strip",
"(",
"'/'",
")",
"# Finally, expand commands from templates",
"self",
".",
"ns",
".",
"commands",
"=",
"[",
"]",
"for",
"key",
",",
"cmd",
"in",
"sorted",
"(",
"self",
".",
"job",
".",
"custom_cmds",
".",
"items",
"(",
")",
")",
":",
"try",
":",
"self",
".",
"ns",
".",
"commands",
".",
"append",
"(",
"formatting",
".",
"expand_template",
"(",
"cmd",
",",
"self",
".",
"ns",
")",
")",
"except",
"error",
".",
"LoggableError",
"as",
"exc",
":",
"self",
".",
"job",
".",
"LOG",
".",
"error",
"(",
"\"While expanding '%s' custom command: %s\"",
"%",
"(",
"key",
",",
"exc",
")",
")"
] |
Add known facts to templating namespace.
|
[
"Add",
"known",
"facts",
"to",
"templating",
"namespace",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/watch.py#L97-L137
|
7,409
|
pyroscope/pyrocore
|
src/pyrocore/torrent/watch.py
|
MetafileHandler.load
|
def load(self):
""" Load metafile into client.
"""
if not self.ns.info_hash and not self.parse():
return
self.addinfo()
# TODO: dry_run
try:
# TODO: Scrub metafile if requested
# Determine target state
start_it = self.job.config.load_mode.lower() in ("start", "started")
queue_it = self.job.config.queued
if "start" in self.ns.flags:
start_it = True
elif "load" in self.ns.flags:
start_it = False
if "queue" in self.ns.flags:
queue_it = True
# Load metafile into client
load_cmd = self.job.proxy.load.verbose
if queue_it:
if not start_it:
self.ns.commands.append("d.priority.set=0")
elif start_it:
load_cmd = self.job.proxy.load.start_verbose
self.job.LOG.debug("Templating values are:\n %s" % "\n ".join("%s=%s" % (key, repr(val))
for key, val in sorted(self.ns.items())
))
load_cmd(xmlrpc.NOHASH, self.ns.pathname, *tuple(self.ns.commands))
time.sleep(.05) # let things settle
# Announce new item
if not self.job.config.quiet:
msg = "%s: Loaded '%s' from '%s/'%s%s" % (
self.job.__class__.__name__,
fmt.to_utf8(self.job.proxy.d.name(self.ns.info_hash, fail_silently=True)),
os.path.dirname(self.ns.pathname).rstrip(os.sep),
" [queued]" if queue_it else "",
(" [startable]" if queue_it else " [started]") if start_it else " [normal]",
)
self.job.proxy.log(xmlrpc.NOHASH, msg)
# TODO: Evaluate fields and set client values
# TODO: Add metadata to tied file if requested
# TODO: Execute commands AFTER adding the item, with full templating
# Example: Labeling - add items to a persistent view, i.e. "postcmd = view.set_visible={{label}}"
# could also be done automatically from the path, see above under "flags" (autolabel = True)
# and add traits to the flags, too, in that case
except xmlrpc.ERRORS as exc:
self.job.LOG.error("While loading #%s: %s" % (self.ns.info_hash, exc))
|
python
|
def load(self):
""" Load metafile into client.
"""
if not self.ns.info_hash and not self.parse():
return
self.addinfo()
# TODO: dry_run
try:
# TODO: Scrub metafile if requested
# Determine target state
start_it = self.job.config.load_mode.lower() in ("start", "started")
queue_it = self.job.config.queued
if "start" in self.ns.flags:
start_it = True
elif "load" in self.ns.flags:
start_it = False
if "queue" in self.ns.flags:
queue_it = True
# Load metafile into client
load_cmd = self.job.proxy.load.verbose
if queue_it:
if not start_it:
self.ns.commands.append("d.priority.set=0")
elif start_it:
load_cmd = self.job.proxy.load.start_verbose
self.job.LOG.debug("Templating values are:\n %s" % "\n ".join("%s=%s" % (key, repr(val))
for key, val in sorted(self.ns.items())
))
load_cmd(xmlrpc.NOHASH, self.ns.pathname, *tuple(self.ns.commands))
time.sleep(.05) # let things settle
# Announce new item
if not self.job.config.quiet:
msg = "%s: Loaded '%s' from '%s/'%s%s" % (
self.job.__class__.__name__,
fmt.to_utf8(self.job.proxy.d.name(self.ns.info_hash, fail_silently=True)),
os.path.dirname(self.ns.pathname).rstrip(os.sep),
" [queued]" if queue_it else "",
(" [startable]" if queue_it else " [started]") if start_it else " [normal]",
)
self.job.proxy.log(xmlrpc.NOHASH, msg)
# TODO: Evaluate fields and set client values
# TODO: Add metadata to tied file if requested
# TODO: Execute commands AFTER adding the item, with full templating
# Example: Labeling - add items to a persistent view, i.e. "postcmd = view.set_visible={{label}}"
# could also be done automatically from the path, see above under "flags" (autolabel = True)
# and add traits to the flags, too, in that case
except xmlrpc.ERRORS as exc:
self.job.LOG.error("While loading #%s: %s" % (self.ns.info_hash, exc))
|
[
"def",
"load",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"ns",
".",
"info_hash",
"and",
"not",
"self",
".",
"parse",
"(",
")",
":",
"return",
"self",
".",
"addinfo",
"(",
")",
"# TODO: dry_run",
"try",
":",
"# TODO: Scrub metafile if requested",
"# Determine target state",
"start_it",
"=",
"self",
".",
"job",
".",
"config",
".",
"load_mode",
".",
"lower",
"(",
")",
"in",
"(",
"\"start\"",
",",
"\"started\"",
")",
"queue_it",
"=",
"self",
".",
"job",
".",
"config",
".",
"queued",
"if",
"\"start\"",
"in",
"self",
".",
"ns",
".",
"flags",
":",
"start_it",
"=",
"True",
"elif",
"\"load\"",
"in",
"self",
".",
"ns",
".",
"flags",
":",
"start_it",
"=",
"False",
"if",
"\"queue\"",
"in",
"self",
".",
"ns",
".",
"flags",
":",
"queue_it",
"=",
"True",
"# Load metafile into client",
"load_cmd",
"=",
"self",
".",
"job",
".",
"proxy",
".",
"load",
".",
"verbose",
"if",
"queue_it",
":",
"if",
"not",
"start_it",
":",
"self",
".",
"ns",
".",
"commands",
".",
"append",
"(",
"\"d.priority.set=0\"",
")",
"elif",
"start_it",
":",
"load_cmd",
"=",
"self",
".",
"job",
".",
"proxy",
".",
"load",
".",
"start_verbose",
"self",
".",
"job",
".",
"LOG",
".",
"debug",
"(",
"\"Templating values are:\\n %s\"",
"%",
"\"\\n \"",
".",
"join",
"(",
"\"%s=%s\"",
"%",
"(",
"key",
",",
"repr",
"(",
"val",
")",
")",
"for",
"key",
",",
"val",
"in",
"sorted",
"(",
"self",
".",
"ns",
".",
"items",
"(",
")",
")",
")",
")",
"load_cmd",
"(",
"xmlrpc",
".",
"NOHASH",
",",
"self",
".",
"ns",
".",
"pathname",
",",
"*",
"tuple",
"(",
"self",
".",
"ns",
".",
"commands",
")",
")",
"time",
".",
"sleep",
"(",
".05",
")",
"# let things settle",
"# Announce new item",
"if",
"not",
"self",
".",
"job",
".",
"config",
".",
"quiet",
":",
"msg",
"=",
"\"%s: Loaded '%s' from '%s/'%s%s\"",
"%",
"(",
"self",
".",
"job",
".",
"__class__",
".",
"__name__",
",",
"fmt",
".",
"to_utf8",
"(",
"self",
".",
"job",
".",
"proxy",
".",
"d",
".",
"name",
"(",
"self",
".",
"ns",
".",
"info_hash",
",",
"fail_silently",
"=",
"True",
")",
")",
",",
"os",
".",
"path",
".",
"dirname",
"(",
"self",
".",
"ns",
".",
"pathname",
")",
".",
"rstrip",
"(",
"os",
".",
"sep",
")",
",",
"\" [queued]\"",
"if",
"queue_it",
"else",
"\"\"",
",",
"(",
"\" [startable]\"",
"if",
"queue_it",
"else",
"\" [started]\"",
")",
"if",
"start_it",
"else",
"\" [normal]\"",
",",
")",
"self",
".",
"job",
".",
"proxy",
".",
"log",
"(",
"xmlrpc",
".",
"NOHASH",
",",
"msg",
")",
"# TODO: Evaluate fields and set client values",
"# TODO: Add metadata to tied file if requested",
"# TODO: Execute commands AFTER adding the item, with full templating",
"# Example: Labeling - add items to a persistent view, i.e. \"postcmd = view.set_visible={{label}}\"",
"# could also be done automatically from the path, see above under \"flags\" (autolabel = True)",
"# and add traits to the flags, too, in that case",
"except",
"xmlrpc",
".",
"ERRORS",
"as",
"exc",
":",
"self",
".",
"job",
".",
"LOG",
".",
"error",
"(",
"\"While loading #%s: %s\"",
"%",
"(",
"self",
".",
"ns",
".",
"info_hash",
",",
"exc",
")",
")"
] |
Load metafile into client.
|
[
"Load",
"metafile",
"into",
"client",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/watch.py#L140-L199
|
7,410
|
pyroscope/pyrocore
|
src/pyrocore/torrent/watch.py
|
TreeWatchHandler.handle_path
|
def handle_path(self, event):
""" Handle a path-related event.
"""
self.job.LOG.debug("Notification %r" % event)
if event.dir:
return
if any(event.pathname.endswith(i) for i in self.METAFILE_EXT):
MetafileHandler(self.job, event.pathname).handle()
elif os.path.basename(event.pathname) == "watch.ini":
self.job.LOG.info("NOT YET Reloading watch config for '%s'" % event.path)
|
python
|
def handle_path(self, event):
""" Handle a path-related event.
"""
self.job.LOG.debug("Notification %r" % event)
if event.dir:
return
if any(event.pathname.endswith(i) for i in self.METAFILE_EXT):
MetafileHandler(self.job, event.pathname).handle()
elif os.path.basename(event.pathname) == "watch.ini":
self.job.LOG.info("NOT YET Reloading watch config for '%s'" % event.path)
|
[
"def",
"handle_path",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"job",
".",
"LOG",
".",
"debug",
"(",
"\"Notification %r\"",
"%",
"event",
")",
"if",
"event",
".",
"dir",
":",
"return",
"if",
"any",
"(",
"event",
".",
"pathname",
".",
"endswith",
"(",
"i",
")",
"for",
"i",
"in",
"self",
".",
"METAFILE_EXT",
")",
":",
"MetafileHandler",
"(",
"self",
".",
"job",
",",
"event",
".",
"pathname",
")",
".",
"handle",
"(",
")",
"elif",
"os",
".",
"path",
".",
"basename",
"(",
"event",
".",
"pathname",
")",
"==",
"\"watch.ini\"",
":",
"self",
".",
"job",
".",
"LOG",
".",
"info",
"(",
"\"NOT YET Reloading watch config for '%s'\"",
"%",
"event",
".",
"path",
")"
] |
Handle a path-related event.
|
[
"Handle",
"a",
"path",
"-",
"related",
"event",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/watch.py#L243-L253
|
7,411
|
pyroscope/pyrocore
|
src/pyrocore/torrent/watch.py
|
TreeWatch.setup
|
def setup(self):
""" Set up inotify manager.
See https://github.com/seb-m/pyinotify/.
"""
if not pyinotify.WatchManager:
raise error.UserError("You need to install 'pyinotify' to use %s (%s)!" % (
self.__class__.__name__, pyinotify._import_error)) # pylint: disable=E1101, W0212
self.manager = pyinotify.WatchManager()
self.handler = TreeWatchHandler(job=self)
self.notifier = pyinotify.AsyncNotifier(self.manager, self.handler)
if self.LOG.isEnabledFor(logging.DEBUG):
mask = pyinotify.ALL_EVENTS
else:
mask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_TO # bogus pylint: disable=E1101
# Add all configured base dirs
for path in self.config.path:
self.manager.add_watch(path.strip(), mask, rec=True, auto_add=True)
|
python
|
def setup(self):
""" Set up inotify manager.
See https://github.com/seb-m/pyinotify/.
"""
if not pyinotify.WatchManager:
raise error.UserError("You need to install 'pyinotify' to use %s (%s)!" % (
self.__class__.__name__, pyinotify._import_error)) # pylint: disable=E1101, W0212
self.manager = pyinotify.WatchManager()
self.handler = TreeWatchHandler(job=self)
self.notifier = pyinotify.AsyncNotifier(self.manager, self.handler)
if self.LOG.isEnabledFor(logging.DEBUG):
mask = pyinotify.ALL_EVENTS
else:
mask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_TO # bogus pylint: disable=E1101
# Add all configured base dirs
for path in self.config.path:
self.manager.add_watch(path.strip(), mask, rec=True, auto_add=True)
|
[
"def",
"setup",
"(",
"self",
")",
":",
"if",
"not",
"pyinotify",
".",
"WatchManager",
":",
"raise",
"error",
".",
"UserError",
"(",
"\"You need to install 'pyinotify' to use %s (%s)!\"",
"%",
"(",
"self",
".",
"__class__",
".",
"__name__",
",",
"pyinotify",
".",
"_import_error",
")",
")",
"# pylint: disable=E1101, W0212",
"self",
".",
"manager",
"=",
"pyinotify",
".",
"WatchManager",
"(",
")",
"self",
".",
"handler",
"=",
"TreeWatchHandler",
"(",
"job",
"=",
"self",
")",
"self",
".",
"notifier",
"=",
"pyinotify",
".",
"AsyncNotifier",
"(",
"self",
".",
"manager",
",",
"self",
".",
"handler",
")",
"if",
"self",
".",
"LOG",
".",
"isEnabledFor",
"(",
"logging",
".",
"DEBUG",
")",
":",
"mask",
"=",
"pyinotify",
".",
"ALL_EVENTS",
"else",
":",
"mask",
"=",
"pyinotify",
".",
"IN_CLOSE_WRITE",
"|",
"pyinotify",
".",
"IN_MOVED_TO",
"# bogus pylint: disable=E1101",
"# Add all configured base dirs",
"for",
"path",
"in",
"self",
".",
"config",
".",
"path",
":",
"self",
".",
"manager",
".",
"add_watch",
"(",
"path",
".",
"strip",
"(",
")",
",",
"mask",
",",
"rec",
"=",
"True",
",",
"auto_add",
"=",
"True",
")"
] |
Set up inotify manager.
See https://github.com/seb-m/pyinotify/.
|
[
"Set",
"up",
"inotify",
"manager",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/watch.py#L332-L352
|
7,412
|
pyroscope/pyrocore
|
src/pyrocore/util/traits.py
|
get_filetypes
|
def get_filetypes(filelist, path=None, size=os.path.getsize):
""" Get a sorted list of file types and their weight in percent
from an iterable of file names.
@return: List of weighted file extensions (no '.'), sorted in descending order
@rtype: list of (weight, filetype)
"""
path = path or (lambda _: _)
# Get total size for each file extension
histo = defaultdict(int)
for entry in filelist:
ext = os.path.splitext(path(entry))[1].lstrip('.').lower()
if ext and ext[0] == 'r' and ext[1:].isdigit():
ext = "rar"
elif ext == "jpeg":
ext = "jpg"
elif ext == "mpeg":
ext = "mpg"
histo[ext] += size(entry)
# Normalize values to integer percent
total = sum(histo.values())
if total:
for ext, val in histo.items():
histo[ext] = int(val * 100.0 / total + .499)
return sorted(zip(histo.values(), histo.keys()), reverse=True)
|
python
|
def get_filetypes(filelist, path=None, size=os.path.getsize):
""" Get a sorted list of file types and their weight in percent
from an iterable of file names.
@return: List of weighted file extensions (no '.'), sorted in descending order
@rtype: list of (weight, filetype)
"""
path = path or (lambda _: _)
# Get total size for each file extension
histo = defaultdict(int)
for entry in filelist:
ext = os.path.splitext(path(entry))[1].lstrip('.').lower()
if ext and ext[0] == 'r' and ext[1:].isdigit():
ext = "rar"
elif ext == "jpeg":
ext = "jpg"
elif ext == "mpeg":
ext = "mpg"
histo[ext] += size(entry)
# Normalize values to integer percent
total = sum(histo.values())
if total:
for ext, val in histo.items():
histo[ext] = int(val * 100.0 / total + .499)
return sorted(zip(histo.values(), histo.keys()), reverse=True)
|
[
"def",
"get_filetypes",
"(",
"filelist",
",",
"path",
"=",
"None",
",",
"size",
"=",
"os",
".",
"path",
".",
"getsize",
")",
":",
"path",
"=",
"path",
"or",
"(",
"lambda",
"_",
":",
"_",
")",
"# Get total size for each file extension",
"histo",
"=",
"defaultdict",
"(",
"int",
")",
"for",
"entry",
"in",
"filelist",
":",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"path",
"(",
"entry",
")",
")",
"[",
"1",
"]",
".",
"lstrip",
"(",
"'.'",
")",
".",
"lower",
"(",
")",
"if",
"ext",
"and",
"ext",
"[",
"0",
"]",
"==",
"'r'",
"and",
"ext",
"[",
"1",
":",
"]",
".",
"isdigit",
"(",
")",
":",
"ext",
"=",
"\"rar\"",
"elif",
"ext",
"==",
"\"jpeg\"",
":",
"ext",
"=",
"\"jpg\"",
"elif",
"ext",
"==",
"\"mpeg\"",
":",
"ext",
"=",
"\"mpg\"",
"histo",
"[",
"ext",
"]",
"+=",
"size",
"(",
"entry",
")",
"# Normalize values to integer percent",
"total",
"=",
"sum",
"(",
"histo",
".",
"values",
"(",
")",
")",
"if",
"total",
":",
"for",
"ext",
",",
"val",
"in",
"histo",
".",
"items",
"(",
")",
":",
"histo",
"[",
"ext",
"]",
"=",
"int",
"(",
"val",
"*",
"100.0",
"/",
"total",
"+",
".499",
")",
"return",
"sorted",
"(",
"zip",
"(",
"histo",
".",
"values",
"(",
")",
",",
"histo",
".",
"keys",
"(",
")",
")",
",",
"reverse",
"=",
"True",
")"
] |
Get a sorted list of file types and their weight in percent
from an iterable of file names.
@return: List of weighted file extensions (no '.'), sorted in descending order
@rtype: list of (weight, filetype)
|
[
"Get",
"a",
"sorted",
"list",
"of",
"file",
"types",
"and",
"their",
"weight",
"in",
"percent",
"from",
"an",
"iterable",
"of",
"file",
"names",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/traits.py#L127-L154
|
7,413
|
pyroscope/pyrocore
|
src/pyrocore/util/traits.py
|
name_trait
|
def name_trait(name, add_info=False):
""" Determine content type from name.
"""
kind, info = None, {}
# Anything to check against?
if name and not name.startswith("VTS_"):
lower_name = name.lower()
trait_patterns = (("tv", TV_PATTERNS, "show"), ("movie", MOVIE_PATTERNS, "title"))
# TV check
if any(i in lower_name for i in _DEFINITELY_TV):
kind = "tv"
trait_patterns = trait_patterns[:1]
# Regex checks
re_name = '.'.join([i.lstrip('[(').rstrip(')]') for i in name.split(' .')])
for trait, patterns, title_group in trait_patterns:
matched, patname = None, None
for patname, pattern in patterns:
matched = pattern.match(re_name)
##print matched, patname, re_name; print " ", pattern.pattern
if matched and not any(i in matched.groupdict()[title_group].lower() for i in BAD_TITLE_WORDS):
kind, info = trait, matched.groupdict()
break
if matched:
info["pattern"] = patname
# Fold auxiliary groups into main one
for key, val in list(info.items()):
if key[-1].isdigit():
del info[key]
if val:
key = re.sub("[0-9]+$", "", key)
info[key] = ("%s %s" % (info.get(key) or "", val)).strip()
break
# TODO: Split by "dvdrip", year, etc. to get to the title and then
# do a imdb / tvdb lookup; cache results, hits for longer, misses
# for a day at max.
# Return requested result
return (kind, info) if add_info else kind
|
python
|
def name_trait(name, add_info=False):
""" Determine content type from name.
"""
kind, info = None, {}
# Anything to check against?
if name and not name.startswith("VTS_"):
lower_name = name.lower()
trait_patterns = (("tv", TV_PATTERNS, "show"), ("movie", MOVIE_PATTERNS, "title"))
# TV check
if any(i in lower_name for i in _DEFINITELY_TV):
kind = "tv"
trait_patterns = trait_patterns[:1]
# Regex checks
re_name = '.'.join([i.lstrip('[(').rstrip(')]') for i in name.split(' .')])
for trait, patterns, title_group in trait_patterns:
matched, patname = None, None
for patname, pattern in patterns:
matched = pattern.match(re_name)
##print matched, patname, re_name; print " ", pattern.pattern
if matched and not any(i in matched.groupdict()[title_group].lower() for i in BAD_TITLE_WORDS):
kind, info = trait, matched.groupdict()
break
if matched:
info["pattern"] = patname
# Fold auxiliary groups into main one
for key, val in list(info.items()):
if key[-1].isdigit():
del info[key]
if val:
key = re.sub("[0-9]+$", "", key)
info[key] = ("%s %s" % (info.get(key) or "", val)).strip()
break
# TODO: Split by "dvdrip", year, etc. to get to the title and then
# do a imdb / tvdb lookup; cache results, hits for longer, misses
# for a day at max.
# Return requested result
return (kind, info) if add_info else kind
|
[
"def",
"name_trait",
"(",
"name",
",",
"add_info",
"=",
"False",
")",
":",
"kind",
",",
"info",
"=",
"None",
",",
"{",
"}",
"# Anything to check against?",
"if",
"name",
"and",
"not",
"name",
".",
"startswith",
"(",
"\"VTS_\"",
")",
":",
"lower_name",
"=",
"name",
".",
"lower",
"(",
")",
"trait_patterns",
"=",
"(",
"(",
"\"tv\"",
",",
"TV_PATTERNS",
",",
"\"show\"",
")",
",",
"(",
"\"movie\"",
",",
"MOVIE_PATTERNS",
",",
"\"title\"",
")",
")",
"# TV check",
"if",
"any",
"(",
"i",
"in",
"lower_name",
"for",
"i",
"in",
"_DEFINITELY_TV",
")",
":",
"kind",
"=",
"\"tv\"",
"trait_patterns",
"=",
"trait_patterns",
"[",
":",
"1",
"]",
"# Regex checks",
"re_name",
"=",
"'.'",
".",
"join",
"(",
"[",
"i",
".",
"lstrip",
"(",
"'[('",
")",
".",
"rstrip",
"(",
"')]'",
")",
"for",
"i",
"in",
"name",
".",
"split",
"(",
"' .'",
")",
"]",
")",
"for",
"trait",
",",
"patterns",
",",
"title_group",
"in",
"trait_patterns",
":",
"matched",
",",
"patname",
"=",
"None",
",",
"None",
"for",
"patname",
",",
"pattern",
"in",
"patterns",
":",
"matched",
"=",
"pattern",
".",
"match",
"(",
"re_name",
")",
"##print matched, patname, re_name; print \" \", pattern.pattern",
"if",
"matched",
"and",
"not",
"any",
"(",
"i",
"in",
"matched",
".",
"groupdict",
"(",
")",
"[",
"title_group",
"]",
".",
"lower",
"(",
")",
"for",
"i",
"in",
"BAD_TITLE_WORDS",
")",
":",
"kind",
",",
"info",
"=",
"trait",
",",
"matched",
".",
"groupdict",
"(",
")",
"break",
"if",
"matched",
":",
"info",
"[",
"\"pattern\"",
"]",
"=",
"patname",
"# Fold auxiliary groups into main one",
"for",
"key",
",",
"val",
"in",
"list",
"(",
"info",
".",
"items",
"(",
")",
")",
":",
"if",
"key",
"[",
"-",
"1",
"]",
".",
"isdigit",
"(",
")",
":",
"del",
"info",
"[",
"key",
"]",
"if",
"val",
":",
"key",
"=",
"re",
".",
"sub",
"(",
"\"[0-9]+$\"",
",",
"\"\"",
",",
"key",
")",
"info",
"[",
"key",
"]",
"=",
"(",
"\"%s %s\"",
"%",
"(",
"info",
".",
"get",
"(",
"key",
")",
"or",
"\"\"",
",",
"val",
")",
")",
".",
"strip",
"(",
")",
"break",
"# TODO: Split by \"dvdrip\", year, etc. to get to the title and then",
"# do a imdb / tvdb lookup; cache results, hits for longer, misses",
"# for a day at max.",
"# Return requested result",
"return",
"(",
"kind",
",",
"info",
")",
"if",
"add_info",
"else",
"kind"
] |
Determine content type from name.
|
[
"Determine",
"content",
"type",
"from",
"name",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/traits.py#L157-L201
|
7,414
|
pyroscope/pyrocore
|
src/pyrocore/util/traits.py
|
detect_traits
|
def detect_traits(name=None, alias=None, filetype=None):
""" Build traits list from passed attributes.
The result is a list of hierarchical classifiers, the top-level
consisting of "audio", "movie", "tv", "video", "document", etc.
It can be used as a part of completion paths to build directory
structures.
"""
result = []
if filetype:
filetype = filetype.lstrip('.')
# Check for "themed" trackers
theme = config.traits_by_alias.get(alias)
if alias and theme:
result = [theme, filetype or "other"]
# Guess from file extensionn and name
elif filetype in KIND_AUDIO:
result = ["audio", filetype]
elif filetype in KIND_VIDEO:
result = ["video", filetype]
contents = name_trait(name)
if contents:
result = [contents, filetype]
elif filetype in KIND_IMAGE:
result = ["img", filetype]
elif filetype in KIND_DOCS:
result = ["docs", filetype]
elif filetype in KIND_ARCHIVE:
result = ["misc", filetype]
contents = name_trait(name)
if contents:
result = [contents, filetype]
return result
|
python
|
def detect_traits(name=None, alias=None, filetype=None):
""" Build traits list from passed attributes.
The result is a list of hierarchical classifiers, the top-level
consisting of "audio", "movie", "tv", "video", "document", etc.
It can be used as a part of completion paths to build directory
structures.
"""
result = []
if filetype:
filetype = filetype.lstrip('.')
# Check for "themed" trackers
theme = config.traits_by_alias.get(alias)
if alias and theme:
result = [theme, filetype or "other"]
# Guess from file extensionn and name
elif filetype in KIND_AUDIO:
result = ["audio", filetype]
elif filetype in KIND_VIDEO:
result = ["video", filetype]
contents = name_trait(name)
if contents:
result = [contents, filetype]
elif filetype in KIND_IMAGE:
result = ["img", filetype]
elif filetype in KIND_DOCS:
result = ["docs", filetype]
elif filetype in KIND_ARCHIVE:
result = ["misc", filetype]
contents = name_trait(name)
if contents:
result = [contents, filetype]
return result
|
[
"def",
"detect_traits",
"(",
"name",
"=",
"None",
",",
"alias",
"=",
"None",
",",
"filetype",
"=",
"None",
")",
":",
"result",
"=",
"[",
"]",
"if",
"filetype",
":",
"filetype",
"=",
"filetype",
".",
"lstrip",
"(",
"'.'",
")",
"# Check for \"themed\" trackers",
"theme",
"=",
"config",
".",
"traits_by_alias",
".",
"get",
"(",
"alias",
")",
"if",
"alias",
"and",
"theme",
":",
"result",
"=",
"[",
"theme",
",",
"filetype",
"or",
"\"other\"",
"]",
"# Guess from file extensionn and name",
"elif",
"filetype",
"in",
"KIND_AUDIO",
":",
"result",
"=",
"[",
"\"audio\"",
",",
"filetype",
"]",
"elif",
"filetype",
"in",
"KIND_VIDEO",
":",
"result",
"=",
"[",
"\"video\"",
",",
"filetype",
"]",
"contents",
"=",
"name_trait",
"(",
"name",
")",
"if",
"contents",
":",
"result",
"=",
"[",
"contents",
",",
"filetype",
"]",
"elif",
"filetype",
"in",
"KIND_IMAGE",
":",
"result",
"=",
"[",
"\"img\"",
",",
"filetype",
"]",
"elif",
"filetype",
"in",
"KIND_DOCS",
":",
"result",
"=",
"[",
"\"docs\"",
",",
"filetype",
"]",
"elif",
"filetype",
"in",
"KIND_ARCHIVE",
":",
"result",
"=",
"[",
"\"misc\"",
",",
"filetype",
"]",
"contents",
"=",
"name_trait",
"(",
"name",
")",
"if",
"contents",
":",
"result",
"=",
"[",
"contents",
",",
"filetype",
"]",
"return",
"result"
] |
Build traits list from passed attributes.
The result is a list of hierarchical classifiers, the top-level
consisting of "audio", "movie", "tv", "video", "document", etc.
It can be used as a part of completion paths to build directory
structures.
|
[
"Build",
"traits",
"list",
"from",
"passed",
"attributes",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/traits.py#L204-L241
|
7,415
|
pyroscope/pyrocore
|
src/pyrocore/util/metafile.py
|
console_progress
|
def console_progress():
""" Return a progress indicator for consoles if
stdout is a tty.
"""
def progress(totalhashed, totalsize):
"Helper"
msg = " " * 30
if totalhashed < totalsize:
msg = "%5.1f%% complete" % (totalhashed * 100.0 / totalsize)
sys.stdout.write(msg + " \r")
sys.stdout.flush()
try:
return progress if sys.stdout.isatty() else None
except AttributeError:
return None
|
python
|
def console_progress():
""" Return a progress indicator for consoles if
stdout is a tty.
"""
def progress(totalhashed, totalsize):
"Helper"
msg = " " * 30
if totalhashed < totalsize:
msg = "%5.1f%% complete" % (totalhashed * 100.0 / totalsize)
sys.stdout.write(msg + " \r")
sys.stdout.flush()
try:
return progress if sys.stdout.isatty() else None
except AttributeError:
return None
|
[
"def",
"console_progress",
"(",
")",
":",
"def",
"progress",
"(",
"totalhashed",
",",
"totalsize",
")",
":",
"\"Helper\"",
"msg",
"=",
"\" \"",
"*",
"30",
"if",
"totalhashed",
"<",
"totalsize",
":",
"msg",
"=",
"\"%5.1f%% complete\"",
"%",
"(",
"totalhashed",
"*",
"100.0",
"/",
"totalsize",
")",
"sys",
".",
"stdout",
".",
"write",
"(",
"msg",
"+",
"\" \\r\"",
")",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"try",
":",
"return",
"progress",
"if",
"sys",
".",
"stdout",
".",
"isatty",
"(",
")",
"else",
"None",
"except",
"AttributeError",
":",
"return",
"None"
] |
Return a progress indicator for consoles if
stdout is a tty.
|
[
"Return",
"a",
"progress",
"indicator",
"for",
"consoles",
"if",
"stdout",
"is",
"a",
"tty",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/metafile.py#L74-L89
|
7,416
|
pyroscope/pyrocore
|
src/pyrocore/util/metafile.py
|
check_info
|
def check_info(info):
""" Validate info dict.
Raise ValueError if validation fails.
"""
if not isinstance(info, dict):
raise ValueError("bad metainfo - not a dictionary")
pieces = info.get("pieces")
if not isinstance(pieces, basestring) or len(pieces) % 20 != 0:
raise ValueError("bad metainfo - bad pieces key")
piece_size = info.get("piece length")
if not isinstance(piece_size, (int, long)) or piece_size <= 0:
raise ValueError("bad metainfo - illegal piece length")
name = info.get("name")
if not isinstance(name, basestring):
raise ValueError("bad metainfo - bad name (type is %r)" % type(name).__name__)
if not ALLOWED_ROOT_NAME.match(name):
raise ValueError("name %s disallowed for security reasons" % name)
if ("files" in info) == ("length" in info):
raise ValueError("single/multiple file mix")
if "length" in info:
length = info.get("length")
if not isinstance(length, (int, long)) or length < 0:
raise ValueError("bad metainfo - bad length")
else:
files = info.get("files")
if not isinstance(files, (list, tuple)):
raise ValueError("bad metainfo - bad file list")
for item in files:
if not isinstance(item, dict):
raise ValueError("bad metainfo - bad file value")
length = item.get("length")
if not isinstance(length, (int, long)) or length < 0:
raise ValueError("bad metainfo - bad length")
path = item.get("path")
if not isinstance(path, (list, tuple)) or not path:
raise ValueError("bad metainfo - bad path")
for part in path:
if not isinstance(part, basestring):
raise ValueError("bad metainfo - bad path dir")
part = fmt.to_unicode(part)
if part == '..':
raise ValueError("relative path in %s disallowed for security reasons" % '/'.join(path))
if part and not ALLOWED_PATH_NAME.match(part):
raise ValueError("path %s disallowed for security reasons" % part)
file_paths = [os.sep.join(item["path"]) for item in files]
if len(set(file_paths)) != len(file_paths):
raise ValueError("bad metainfo - duplicate path")
return info
|
python
|
def check_info(info):
""" Validate info dict.
Raise ValueError if validation fails.
"""
if not isinstance(info, dict):
raise ValueError("bad metainfo - not a dictionary")
pieces = info.get("pieces")
if not isinstance(pieces, basestring) or len(pieces) % 20 != 0:
raise ValueError("bad metainfo - bad pieces key")
piece_size = info.get("piece length")
if not isinstance(piece_size, (int, long)) or piece_size <= 0:
raise ValueError("bad metainfo - illegal piece length")
name = info.get("name")
if not isinstance(name, basestring):
raise ValueError("bad metainfo - bad name (type is %r)" % type(name).__name__)
if not ALLOWED_ROOT_NAME.match(name):
raise ValueError("name %s disallowed for security reasons" % name)
if ("files" in info) == ("length" in info):
raise ValueError("single/multiple file mix")
if "length" in info:
length = info.get("length")
if not isinstance(length, (int, long)) or length < 0:
raise ValueError("bad metainfo - bad length")
else:
files = info.get("files")
if not isinstance(files, (list, tuple)):
raise ValueError("bad metainfo - bad file list")
for item in files:
if not isinstance(item, dict):
raise ValueError("bad metainfo - bad file value")
length = item.get("length")
if not isinstance(length, (int, long)) or length < 0:
raise ValueError("bad metainfo - bad length")
path = item.get("path")
if not isinstance(path, (list, tuple)) or not path:
raise ValueError("bad metainfo - bad path")
for part in path:
if not isinstance(part, basestring):
raise ValueError("bad metainfo - bad path dir")
part = fmt.to_unicode(part)
if part == '..':
raise ValueError("relative path in %s disallowed for security reasons" % '/'.join(path))
if part and not ALLOWED_PATH_NAME.match(part):
raise ValueError("path %s disallowed for security reasons" % part)
file_paths = [os.sep.join(item["path"]) for item in files]
if len(set(file_paths)) != len(file_paths):
raise ValueError("bad metainfo - duplicate path")
return info
|
[
"def",
"check_info",
"(",
"info",
")",
":",
"if",
"not",
"isinstance",
"(",
"info",
",",
"dict",
")",
":",
"raise",
"ValueError",
"(",
"\"bad metainfo - not a dictionary\"",
")",
"pieces",
"=",
"info",
".",
"get",
"(",
"\"pieces\"",
")",
"if",
"not",
"isinstance",
"(",
"pieces",
",",
"basestring",
")",
"or",
"len",
"(",
"pieces",
")",
"%",
"20",
"!=",
"0",
":",
"raise",
"ValueError",
"(",
"\"bad metainfo - bad pieces key\"",
")",
"piece_size",
"=",
"info",
".",
"get",
"(",
"\"piece length\"",
")",
"if",
"not",
"isinstance",
"(",
"piece_size",
",",
"(",
"int",
",",
"long",
")",
")",
"or",
"piece_size",
"<=",
"0",
":",
"raise",
"ValueError",
"(",
"\"bad metainfo - illegal piece length\"",
")",
"name",
"=",
"info",
".",
"get",
"(",
"\"name\"",
")",
"if",
"not",
"isinstance",
"(",
"name",
",",
"basestring",
")",
":",
"raise",
"ValueError",
"(",
"\"bad metainfo - bad name (type is %r)\"",
"%",
"type",
"(",
"name",
")",
".",
"__name__",
")",
"if",
"not",
"ALLOWED_ROOT_NAME",
".",
"match",
"(",
"name",
")",
":",
"raise",
"ValueError",
"(",
"\"name %s disallowed for security reasons\"",
"%",
"name",
")",
"if",
"(",
"\"files\"",
"in",
"info",
")",
"==",
"(",
"\"length\"",
"in",
"info",
")",
":",
"raise",
"ValueError",
"(",
"\"single/multiple file mix\"",
")",
"if",
"\"length\"",
"in",
"info",
":",
"length",
"=",
"info",
".",
"get",
"(",
"\"length\"",
")",
"if",
"not",
"isinstance",
"(",
"length",
",",
"(",
"int",
",",
"long",
")",
")",
"or",
"length",
"<",
"0",
":",
"raise",
"ValueError",
"(",
"\"bad metainfo - bad length\"",
")",
"else",
":",
"files",
"=",
"info",
".",
"get",
"(",
"\"files\"",
")",
"if",
"not",
"isinstance",
"(",
"files",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"raise",
"ValueError",
"(",
"\"bad metainfo - bad file list\"",
")",
"for",
"item",
"in",
"files",
":",
"if",
"not",
"isinstance",
"(",
"item",
",",
"dict",
")",
":",
"raise",
"ValueError",
"(",
"\"bad metainfo - bad file value\"",
")",
"length",
"=",
"item",
".",
"get",
"(",
"\"length\"",
")",
"if",
"not",
"isinstance",
"(",
"length",
",",
"(",
"int",
",",
"long",
")",
")",
"or",
"length",
"<",
"0",
":",
"raise",
"ValueError",
"(",
"\"bad metainfo - bad length\"",
")",
"path",
"=",
"item",
".",
"get",
"(",
"\"path\"",
")",
"if",
"not",
"isinstance",
"(",
"path",
",",
"(",
"list",
",",
"tuple",
")",
")",
"or",
"not",
"path",
":",
"raise",
"ValueError",
"(",
"\"bad metainfo - bad path\"",
")",
"for",
"part",
"in",
"path",
":",
"if",
"not",
"isinstance",
"(",
"part",
",",
"basestring",
")",
":",
"raise",
"ValueError",
"(",
"\"bad metainfo - bad path dir\"",
")",
"part",
"=",
"fmt",
".",
"to_unicode",
"(",
"part",
")",
"if",
"part",
"==",
"'..'",
":",
"raise",
"ValueError",
"(",
"\"relative path in %s disallowed for security reasons\"",
"%",
"'/'",
".",
"join",
"(",
"path",
")",
")",
"if",
"part",
"and",
"not",
"ALLOWED_PATH_NAME",
".",
"match",
"(",
"part",
")",
":",
"raise",
"ValueError",
"(",
"\"path %s disallowed for security reasons\"",
"%",
"part",
")",
"file_paths",
"=",
"[",
"os",
".",
"sep",
".",
"join",
"(",
"item",
"[",
"\"path\"",
"]",
")",
"for",
"item",
"in",
"files",
"]",
"if",
"len",
"(",
"set",
"(",
"file_paths",
")",
")",
"!=",
"len",
"(",
"file_paths",
")",
":",
"raise",
"ValueError",
"(",
"\"bad metainfo - duplicate path\"",
")",
"return",
"info"
] |
Validate info dict.
Raise ValueError if validation fails.
|
[
"Validate",
"info",
"dict",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/metafile.py#L112-L171
|
7,417
|
pyroscope/pyrocore
|
src/pyrocore/util/metafile.py
|
check_meta
|
def check_meta(meta):
""" Validate meta dict.
Raise ValueError if validation fails.
"""
if not isinstance(meta, dict):
raise ValueError("bad metadata - not a dictionary")
if not isinstance(meta.get("announce"), basestring):
raise ValueError("bad announce URL - not a string")
check_info(meta.get("info"))
return meta
|
python
|
def check_meta(meta):
""" Validate meta dict.
Raise ValueError if validation fails.
"""
if not isinstance(meta, dict):
raise ValueError("bad metadata - not a dictionary")
if not isinstance(meta.get("announce"), basestring):
raise ValueError("bad announce URL - not a string")
check_info(meta.get("info"))
return meta
|
[
"def",
"check_meta",
"(",
"meta",
")",
":",
"if",
"not",
"isinstance",
"(",
"meta",
",",
"dict",
")",
":",
"raise",
"ValueError",
"(",
"\"bad metadata - not a dictionary\"",
")",
"if",
"not",
"isinstance",
"(",
"meta",
".",
"get",
"(",
"\"announce\"",
")",
",",
"basestring",
")",
":",
"raise",
"ValueError",
"(",
"\"bad announce URL - not a string\"",
")",
"check_info",
"(",
"meta",
".",
"get",
"(",
"\"info\"",
")",
")",
"return",
"meta"
] |
Validate meta dict.
Raise ValueError if validation fails.
|
[
"Validate",
"meta",
"dict",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/metafile.py#L174-L185
|
7,418
|
pyroscope/pyrocore
|
src/pyrocore/util/metafile.py
|
clean_meta
|
def clean_meta(meta, including_info=False, logger=None):
""" Clean meta dict. Optionally log changes using the given logger.
@param logger: If given, a callable accepting a string message.
@return: Set of keys removed from C{meta}.
"""
modified = set()
for key in meta.keys():
if [key] not in METAFILE_STD_KEYS:
if logger:
logger("Removing key %r..." % (key,))
del meta[key]
modified.add(key)
if including_info:
for key in meta["info"].keys():
if ["info", key] not in METAFILE_STD_KEYS:
if logger:
logger("Removing key %r..." % ("info." + key,))
del meta["info"][key]
modified.add("info." + key)
for idx, entry in enumerate(meta["info"].get("files", [])):
for key in entry.keys():
if ["info", "files", key] not in METAFILE_STD_KEYS:
if logger:
logger("Removing key %r from file #%d..." % (key, idx + 1))
del entry[key]
modified.add("info.files." + key)
# Remove crap that certain PHP software puts in paths
entry["path"] = [i for i in entry["path"] if i]
return modified
|
python
|
def clean_meta(meta, including_info=False, logger=None):
""" Clean meta dict. Optionally log changes using the given logger.
@param logger: If given, a callable accepting a string message.
@return: Set of keys removed from C{meta}.
"""
modified = set()
for key in meta.keys():
if [key] not in METAFILE_STD_KEYS:
if logger:
logger("Removing key %r..." % (key,))
del meta[key]
modified.add(key)
if including_info:
for key in meta["info"].keys():
if ["info", key] not in METAFILE_STD_KEYS:
if logger:
logger("Removing key %r..." % ("info." + key,))
del meta["info"][key]
modified.add("info." + key)
for idx, entry in enumerate(meta["info"].get("files", [])):
for key in entry.keys():
if ["info", "files", key] not in METAFILE_STD_KEYS:
if logger:
logger("Removing key %r from file #%d..." % (key, idx + 1))
del entry[key]
modified.add("info.files." + key)
# Remove crap that certain PHP software puts in paths
entry["path"] = [i for i in entry["path"] if i]
return modified
|
[
"def",
"clean_meta",
"(",
"meta",
",",
"including_info",
"=",
"False",
",",
"logger",
"=",
"None",
")",
":",
"modified",
"=",
"set",
"(",
")",
"for",
"key",
"in",
"meta",
".",
"keys",
"(",
")",
":",
"if",
"[",
"key",
"]",
"not",
"in",
"METAFILE_STD_KEYS",
":",
"if",
"logger",
":",
"logger",
"(",
"\"Removing key %r...\"",
"%",
"(",
"key",
",",
")",
")",
"del",
"meta",
"[",
"key",
"]",
"modified",
".",
"add",
"(",
"key",
")",
"if",
"including_info",
":",
"for",
"key",
"in",
"meta",
"[",
"\"info\"",
"]",
".",
"keys",
"(",
")",
":",
"if",
"[",
"\"info\"",
",",
"key",
"]",
"not",
"in",
"METAFILE_STD_KEYS",
":",
"if",
"logger",
":",
"logger",
"(",
"\"Removing key %r...\"",
"%",
"(",
"\"info.\"",
"+",
"key",
",",
")",
")",
"del",
"meta",
"[",
"\"info\"",
"]",
"[",
"key",
"]",
"modified",
".",
"add",
"(",
"\"info.\"",
"+",
"key",
")",
"for",
"idx",
",",
"entry",
"in",
"enumerate",
"(",
"meta",
"[",
"\"info\"",
"]",
".",
"get",
"(",
"\"files\"",
",",
"[",
"]",
")",
")",
":",
"for",
"key",
"in",
"entry",
".",
"keys",
"(",
")",
":",
"if",
"[",
"\"info\"",
",",
"\"files\"",
",",
"key",
"]",
"not",
"in",
"METAFILE_STD_KEYS",
":",
"if",
"logger",
":",
"logger",
"(",
"\"Removing key %r from file #%d...\"",
"%",
"(",
"key",
",",
"idx",
"+",
"1",
")",
")",
"del",
"entry",
"[",
"key",
"]",
"modified",
".",
"add",
"(",
"\"info.files.\"",
"+",
"key",
")",
"# Remove crap that certain PHP software puts in paths",
"entry",
"[",
"\"path\"",
"]",
"=",
"[",
"i",
"for",
"i",
"in",
"entry",
"[",
"\"path\"",
"]",
"if",
"i",
"]",
"return",
"modified"
] |
Clean meta dict. Optionally log changes using the given logger.
@param logger: If given, a callable accepting a string message.
@return: Set of keys removed from C{meta}.
|
[
"Clean",
"meta",
"dict",
".",
"Optionally",
"log",
"changes",
"using",
"the",
"given",
"logger",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/metafile.py#L188-L222
|
7,419
|
pyroscope/pyrocore
|
src/pyrocore/util/metafile.py
|
sanitize
|
def sanitize(meta, diagnostics=False):
""" Try to fix common problems, especially transcode non-standard string encodings.
"""
bad_encodings, bad_fields = set(), set()
def sane_encoding(field, text):
"Transcoding helper."
for encoding in ('utf-8', meta.get('encoding', None), 'cp1252'):
if encoding:
try:
u8_text = text.decode(encoding).encode("utf-8")
if encoding != 'utf-8':
bad_encodings.add(encoding)
bad_fields.add(field)
return u8_text
except UnicodeError:
continue
else:
# Broken beyond anything reasonable
bad_encodings.add('UNKNOWN/EXOTIC')
bad_fields.add(field)
return str(text, 'utf-8', 'replace').replace('\ufffd', '_').encode("utf-8")
# Go through all string fields and check them
for field in ("comment", "created by"):
if field in meta:
meta[field] = sane_encoding(field, meta[field])
meta["info"]["name"] = sane_encoding('info name', meta["info"]["name"])
for entry in meta["info"].get("files", []):
entry["path"] = [sane_encoding('file path', i) for i in entry["path"]]
return (meta, bad_encodings, bad_fields) if diagnostics else meta
|
python
|
def sanitize(meta, diagnostics=False):
""" Try to fix common problems, especially transcode non-standard string encodings.
"""
bad_encodings, bad_fields = set(), set()
def sane_encoding(field, text):
"Transcoding helper."
for encoding in ('utf-8', meta.get('encoding', None), 'cp1252'):
if encoding:
try:
u8_text = text.decode(encoding).encode("utf-8")
if encoding != 'utf-8':
bad_encodings.add(encoding)
bad_fields.add(field)
return u8_text
except UnicodeError:
continue
else:
# Broken beyond anything reasonable
bad_encodings.add('UNKNOWN/EXOTIC')
bad_fields.add(field)
return str(text, 'utf-8', 'replace').replace('\ufffd', '_').encode("utf-8")
# Go through all string fields and check them
for field in ("comment", "created by"):
if field in meta:
meta[field] = sane_encoding(field, meta[field])
meta["info"]["name"] = sane_encoding('info name', meta["info"]["name"])
for entry in meta["info"].get("files", []):
entry["path"] = [sane_encoding('file path', i) for i in entry["path"]]
return (meta, bad_encodings, bad_fields) if diagnostics else meta
|
[
"def",
"sanitize",
"(",
"meta",
",",
"diagnostics",
"=",
"False",
")",
":",
"bad_encodings",
",",
"bad_fields",
"=",
"set",
"(",
")",
",",
"set",
"(",
")",
"def",
"sane_encoding",
"(",
"field",
",",
"text",
")",
":",
"\"Transcoding helper.\"",
"for",
"encoding",
"in",
"(",
"'utf-8'",
",",
"meta",
".",
"get",
"(",
"'encoding'",
",",
"None",
")",
",",
"'cp1252'",
")",
":",
"if",
"encoding",
":",
"try",
":",
"u8_text",
"=",
"text",
".",
"decode",
"(",
"encoding",
")",
".",
"encode",
"(",
"\"utf-8\"",
")",
"if",
"encoding",
"!=",
"'utf-8'",
":",
"bad_encodings",
".",
"add",
"(",
"encoding",
")",
"bad_fields",
".",
"add",
"(",
"field",
")",
"return",
"u8_text",
"except",
"UnicodeError",
":",
"continue",
"else",
":",
"# Broken beyond anything reasonable",
"bad_encodings",
".",
"add",
"(",
"'UNKNOWN/EXOTIC'",
")",
"bad_fields",
".",
"add",
"(",
"field",
")",
"return",
"str",
"(",
"text",
",",
"'utf-8'",
",",
"'replace'",
")",
".",
"replace",
"(",
"'\\ufffd'",
",",
"'_'",
")",
".",
"encode",
"(",
"\"utf-8\"",
")",
"# Go through all string fields and check them",
"for",
"field",
"in",
"(",
"\"comment\"",
",",
"\"created by\"",
")",
":",
"if",
"field",
"in",
"meta",
":",
"meta",
"[",
"field",
"]",
"=",
"sane_encoding",
"(",
"field",
",",
"meta",
"[",
"field",
"]",
")",
"meta",
"[",
"\"info\"",
"]",
"[",
"\"name\"",
"]",
"=",
"sane_encoding",
"(",
"'info name'",
",",
"meta",
"[",
"\"info\"",
"]",
"[",
"\"name\"",
"]",
")",
"for",
"entry",
"in",
"meta",
"[",
"\"info\"",
"]",
".",
"get",
"(",
"\"files\"",
",",
"[",
"]",
")",
":",
"entry",
"[",
"\"path\"",
"]",
"=",
"[",
"sane_encoding",
"(",
"'file path'",
",",
"i",
")",
"for",
"i",
"in",
"entry",
"[",
"\"path\"",
"]",
"]",
"return",
"(",
"meta",
",",
"bad_encodings",
",",
"bad_fields",
")",
"if",
"diagnostics",
"else",
"meta"
] |
Try to fix common problems, especially transcode non-standard string encodings.
|
[
"Try",
"to",
"fix",
"common",
"problems",
"especially",
"transcode",
"non",
"-",
"standard",
"string",
"encodings",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/metafile.py#L225-L258
|
7,420
|
pyroscope/pyrocore
|
src/pyrocore/util/metafile.py
|
add_fast_resume
|
def add_fast_resume(meta, datapath):
""" Add fast resume data to a metafile dict.
"""
# Get list of files
files = meta["info"].get("files", None)
single = files is None
if single:
if os.path.isdir(datapath):
datapath = os.path.join(datapath, meta["info"]["name"])
files = [Bunch(
path=[os.path.abspath(datapath)],
length=meta["info"]["length"],
)]
# Prepare resume data
resume = meta.setdefault("libtorrent_resume", {})
resume["bitfield"] = len(meta["info"]["pieces"]) // 20
resume["files"] = []
piece_length = meta["info"]["piece length"]
offset = 0
for fileinfo in files:
# Get the path into the filesystem
filepath = os.sep.join(fileinfo["path"])
if not single:
filepath = os.path.join(datapath, filepath.strip(os.sep))
# Check file size
if os.path.getsize(filepath) != fileinfo["length"]:
raise OSError(errno.EINVAL, "File size mismatch for %r [is %d, expected %d]" % (
filepath, os.path.getsize(filepath), fileinfo["length"],
))
# Add resume data for this file
resume["files"].append(dict(
priority=1,
mtime=int(os.path.getmtime(filepath)),
completed=(offset+fileinfo["length"]+piece_length-1) // piece_length
- offset // piece_length,
))
offset += fileinfo["length"]
return meta
|
python
|
def add_fast_resume(meta, datapath):
""" Add fast resume data to a metafile dict.
"""
# Get list of files
files = meta["info"].get("files", None)
single = files is None
if single:
if os.path.isdir(datapath):
datapath = os.path.join(datapath, meta["info"]["name"])
files = [Bunch(
path=[os.path.abspath(datapath)],
length=meta["info"]["length"],
)]
# Prepare resume data
resume = meta.setdefault("libtorrent_resume", {})
resume["bitfield"] = len(meta["info"]["pieces"]) // 20
resume["files"] = []
piece_length = meta["info"]["piece length"]
offset = 0
for fileinfo in files:
# Get the path into the filesystem
filepath = os.sep.join(fileinfo["path"])
if not single:
filepath = os.path.join(datapath, filepath.strip(os.sep))
# Check file size
if os.path.getsize(filepath) != fileinfo["length"]:
raise OSError(errno.EINVAL, "File size mismatch for %r [is %d, expected %d]" % (
filepath, os.path.getsize(filepath), fileinfo["length"],
))
# Add resume data for this file
resume["files"].append(dict(
priority=1,
mtime=int(os.path.getmtime(filepath)),
completed=(offset+fileinfo["length"]+piece_length-1) // piece_length
- offset // piece_length,
))
offset += fileinfo["length"]
return meta
|
[
"def",
"add_fast_resume",
"(",
"meta",
",",
"datapath",
")",
":",
"# Get list of files",
"files",
"=",
"meta",
"[",
"\"info\"",
"]",
".",
"get",
"(",
"\"files\"",
",",
"None",
")",
"single",
"=",
"files",
"is",
"None",
"if",
"single",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"datapath",
")",
":",
"datapath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"datapath",
",",
"meta",
"[",
"\"info\"",
"]",
"[",
"\"name\"",
"]",
")",
"files",
"=",
"[",
"Bunch",
"(",
"path",
"=",
"[",
"os",
".",
"path",
".",
"abspath",
"(",
"datapath",
")",
"]",
",",
"length",
"=",
"meta",
"[",
"\"info\"",
"]",
"[",
"\"length\"",
"]",
",",
")",
"]",
"# Prepare resume data",
"resume",
"=",
"meta",
".",
"setdefault",
"(",
"\"libtorrent_resume\"",
",",
"{",
"}",
")",
"resume",
"[",
"\"bitfield\"",
"]",
"=",
"len",
"(",
"meta",
"[",
"\"info\"",
"]",
"[",
"\"pieces\"",
"]",
")",
"//",
"20",
"resume",
"[",
"\"files\"",
"]",
"=",
"[",
"]",
"piece_length",
"=",
"meta",
"[",
"\"info\"",
"]",
"[",
"\"piece length\"",
"]",
"offset",
"=",
"0",
"for",
"fileinfo",
"in",
"files",
":",
"# Get the path into the filesystem",
"filepath",
"=",
"os",
".",
"sep",
".",
"join",
"(",
"fileinfo",
"[",
"\"path\"",
"]",
")",
"if",
"not",
"single",
":",
"filepath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"datapath",
",",
"filepath",
".",
"strip",
"(",
"os",
".",
"sep",
")",
")",
"# Check file size",
"if",
"os",
".",
"path",
".",
"getsize",
"(",
"filepath",
")",
"!=",
"fileinfo",
"[",
"\"length\"",
"]",
":",
"raise",
"OSError",
"(",
"errno",
".",
"EINVAL",
",",
"\"File size mismatch for %r [is %d, expected %d]\"",
"%",
"(",
"filepath",
",",
"os",
".",
"path",
".",
"getsize",
"(",
"filepath",
")",
",",
"fileinfo",
"[",
"\"length\"",
"]",
",",
")",
")",
"# Add resume data for this file",
"resume",
"[",
"\"files\"",
"]",
".",
"append",
"(",
"dict",
"(",
"priority",
"=",
"1",
",",
"mtime",
"=",
"int",
"(",
"os",
".",
"path",
".",
"getmtime",
"(",
"filepath",
")",
")",
",",
"completed",
"=",
"(",
"offset",
"+",
"fileinfo",
"[",
"\"length\"",
"]",
"+",
"piece_length",
"-",
"1",
")",
"//",
"piece_length",
"-",
"offset",
"//",
"piece_length",
",",
")",
")",
"offset",
"+=",
"fileinfo",
"[",
"\"length\"",
"]",
"return",
"meta"
] |
Add fast resume data to a metafile dict.
|
[
"Add",
"fast",
"resume",
"data",
"to",
"a",
"metafile",
"dict",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/metafile.py#L301-L343
|
7,421
|
pyroscope/pyrocore
|
src/pyrocore/util/metafile.py
|
data_size
|
def data_size(metadata):
""" Calculate the size of a torrent based on parsed metadata.
"""
info = metadata['info']
if 'length' in info:
# Single file
total_size = info['length']
else:
# Directory structure
total_size = sum([f['length'] for f in info['files']])
return total_size
|
python
|
def data_size(metadata):
""" Calculate the size of a torrent based on parsed metadata.
"""
info = metadata['info']
if 'length' in info:
# Single file
total_size = info['length']
else:
# Directory structure
total_size = sum([f['length'] for f in info['files']])
return total_size
|
[
"def",
"data_size",
"(",
"metadata",
")",
":",
"info",
"=",
"metadata",
"[",
"'info'",
"]",
"if",
"'length'",
"in",
"info",
":",
"# Single file",
"total_size",
"=",
"info",
"[",
"'length'",
"]",
"else",
":",
"# Directory structure",
"total_size",
"=",
"sum",
"(",
"[",
"f",
"[",
"'length'",
"]",
"for",
"f",
"in",
"info",
"[",
"'files'",
"]",
"]",
")",
"return",
"total_size"
] |
Calculate the size of a torrent based on parsed metadata.
|
[
"Calculate",
"the",
"size",
"of",
"a",
"torrent",
"based",
"on",
"parsed",
"metadata",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/metafile.py#L352-L364
|
7,422
|
pyroscope/pyrocore
|
src/pyrocore/util/metafile.py
|
checked_open
|
def checked_open(filename, log=None, quiet=False):
""" Open and validate the given metafile.
Optionally provide diagnostics on the passed logger, for
invalid metafiles, which then just cause a warning but no exception.
"quiet" can supress that warning.
"""
with open(filename, "rb") as handle:
raw_data = handle.read()
data = bencode.bdecode(raw_data)
try:
check_meta(data)
if raw_data != bencode.bencode(data):
raise ValueError("Bad bencoded data - dict keys out of order?")
except ValueError as exc:
if log:
# Warn about it, unless it's a quiet value query
if not quiet:
log.warn("%s: %s" % (filename, exc))
else:
raise
return data
|
python
|
def checked_open(filename, log=None, quiet=False):
""" Open and validate the given metafile.
Optionally provide diagnostics on the passed logger, for
invalid metafiles, which then just cause a warning but no exception.
"quiet" can supress that warning.
"""
with open(filename, "rb") as handle:
raw_data = handle.read()
data = bencode.bdecode(raw_data)
try:
check_meta(data)
if raw_data != bencode.bencode(data):
raise ValueError("Bad bencoded data - dict keys out of order?")
except ValueError as exc:
if log:
# Warn about it, unless it's a quiet value query
if not quiet:
log.warn("%s: %s" % (filename, exc))
else:
raise
return data
|
[
"def",
"checked_open",
"(",
"filename",
",",
"log",
"=",
"None",
",",
"quiet",
"=",
"False",
")",
":",
"with",
"open",
"(",
"filename",
",",
"\"rb\"",
")",
"as",
"handle",
":",
"raw_data",
"=",
"handle",
".",
"read",
"(",
")",
"data",
"=",
"bencode",
".",
"bdecode",
"(",
"raw_data",
")",
"try",
":",
"check_meta",
"(",
"data",
")",
"if",
"raw_data",
"!=",
"bencode",
".",
"bencode",
"(",
"data",
")",
":",
"raise",
"ValueError",
"(",
"\"Bad bencoded data - dict keys out of order?\"",
")",
"except",
"ValueError",
"as",
"exc",
":",
"if",
"log",
":",
"# Warn about it, unless it's a quiet value query",
"if",
"not",
"quiet",
":",
"log",
".",
"warn",
"(",
"\"%s: %s\"",
"%",
"(",
"filename",
",",
"exc",
")",
")",
"else",
":",
"raise",
"return",
"data"
] |
Open and validate the given metafile.
Optionally provide diagnostics on the passed logger, for
invalid metafiles, which then just cause a warning but no exception.
"quiet" can supress that warning.
|
[
"Open",
"and",
"validate",
"the",
"given",
"metafile",
".",
"Optionally",
"provide",
"diagnostics",
"on",
"the",
"passed",
"logger",
"for",
"invalid",
"metafiles",
"which",
"then",
"just",
"cause",
"a",
"warning",
"but",
"no",
"exception",
".",
"quiet",
"can",
"supress",
"that",
"warning",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/metafile.py#L367-L389
|
7,423
|
pyroscope/pyrocore
|
src/pyrocore/util/metafile.py
|
MaskingPrettyPrinter.format
|
def format(self, obj, context, maxlevels, level): # pylint: disable=arguments-differ
""" Mask obj if it looks like an URL, then pass it to the super class.
"""
if isinstance(obj, basestring) and "://" in fmt.to_unicode(obj):
obj = mask_keys(obj)
return pprint.PrettyPrinter.format(self, obj, context, maxlevels, level)
|
python
|
def format(self, obj, context, maxlevels, level): # pylint: disable=arguments-differ
""" Mask obj if it looks like an URL, then pass it to the super class.
"""
if isinstance(obj, basestring) and "://" in fmt.to_unicode(obj):
obj = mask_keys(obj)
return pprint.PrettyPrinter.format(self, obj, context, maxlevels, level)
|
[
"def",
"format",
"(",
"self",
",",
"obj",
",",
"context",
",",
"maxlevels",
",",
"level",
")",
":",
"# pylint: disable=arguments-differ",
"if",
"isinstance",
"(",
"obj",
",",
"basestring",
")",
"and",
"\"://\"",
"in",
"fmt",
".",
"to_unicode",
"(",
"obj",
")",
":",
"obj",
"=",
"mask_keys",
"(",
"obj",
")",
"return",
"pprint",
".",
"PrettyPrinter",
".",
"format",
"(",
"self",
",",
"obj",
",",
"context",
",",
"maxlevels",
",",
"level",
")"
] |
Mask obj if it looks like an URL, then pass it to the super class.
|
[
"Mask",
"obj",
"if",
"it",
"looks",
"like",
"an",
"URL",
"then",
"pass",
"it",
"to",
"the",
"super",
"class",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/metafile.py#L104-L109
|
7,424
|
pyroscope/pyrocore
|
src/pyrocore/util/metafile.py
|
Metafile._get_datapath
|
def _get_datapath(self):
""" Get a valid datapath, else raise an exception.
"""
if self._datapath is None:
raise OSError(errno.ENOENT, "You didn't provide any datapath for %r" % self.filename)
return self._datapath
|
python
|
def _get_datapath(self):
""" Get a valid datapath, else raise an exception.
"""
if self._datapath is None:
raise OSError(errno.ENOENT, "You didn't provide any datapath for %r" % self.filename)
return self._datapath
|
[
"def",
"_get_datapath",
"(",
"self",
")",
":",
"if",
"self",
".",
"_datapath",
"is",
"None",
":",
"raise",
"OSError",
"(",
"errno",
".",
"ENOENT",
",",
"\"You didn't provide any datapath for %r\"",
"%",
"self",
".",
"filename",
")",
"return",
"self",
".",
"_datapath"
] |
Get a valid datapath, else raise an exception.
|
[
"Get",
"a",
"valid",
"datapath",
"else",
"raise",
"an",
"exception",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/metafile.py#L413-L419
|
7,425
|
pyroscope/pyrocore
|
src/pyrocore/util/metafile.py
|
Metafile._set_datapath
|
def _set_datapath(self, datapath):
""" Set a datapath.
"""
if datapath:
self._datapath = datapath.rstrip(os.sep)
self._fifo = int(stat.S_ISFIFO(os.stat(self.datapath).st_mode))
else:
self._datapath = None
self._fifo = False
|
python
|
def _set_datapath(self, datapath):
""" Set a datapath.
"""
if datapath:
self._datapath = datapath.rstrip(os.sep)
self._fifo = int(stat.S_ISFIFO(os.stat(self.datapath).st_mode))
else:
self._datapath = None
self._fifo = False
|
[
"def",
"_set_datapath",
"(",
"self",
",",
"datapath",
")",
":",
"if",
"datapath",
":",
"self",
".",
"_datapath",
"=",
"datapath",
".",
"rstrip",
"(",
"os",
".",
"sep",
")",
"self",
".",
"_fifo",
"=",
"int",
"(",
"stat",
".",
"S_ISFIFO",
"(",
"os",
".",
"stat",
"(",
"self",
".",
"datapath",
")",
".",
"st_mode",
")",
")",
"else",
":",
"self",
".",
"_datapath",
"=",
"None",
"self",
".",
"_fifo",
"=",
"False"
] |
Set a datapath.
|
[
"Set",
"a",
"datapath",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/metafile.py#L421-L429
|
7,426
|
pyroscope/pyrocore
|
src/pyrocore/util/metafile.py
|
Metafile.walk
|
def walk(self):
""" Generate paths in "self.datapath".
"""
# FIFO?
if self._fifo:
if self._fifo > 1:
raise RuntimeError("INTERNAL ERROR: FIFO read twice!")
self._fifo += 1
# Read paths relative to directory containing the FIFO
with open(self.datapath, "r") as fifo:
while True:
relpath = fifo.readline().rstrip('\n')
if not relpath: # EOF?
break
self.LOG.debug("Read relative path %r from FIFO..." % (relpath,))
yield os.path.join(os.path.dirname(self.datapath), relpath)
self.LOG.debug("FIFO %r closed!" % (self.datapath,))
# Directory?
elif os.path.isdir(self.datapath):
# Walk the directory tree
for dirpath, dirnames, filenames in os.walk(self.datapath): #, followlinks=True):
# Don't scan blacklisted directories
for bad in dirnames[:]:
if any(fnmatch.fnmatch(bad, pattern) for pattern in self.ignore):
dirnames.remove(bad)
# Yield all filenames that aren't blacklisted
for filename in filenames:
if not any(fnmatch.fnmatch(filename, pattern) for pattern in self.ignore):
#yield os.path.join(dirpath[len(self.datapath)+1:], filename)
yield os.path.join(dirpath, filename)
# Single file
else:
# Yield the filename
yield self.datapath
|
python
|
def walk(self):
""" Generate paths in "self.datapath".
"""
# FIFO?
if self._fifo:
if self._fifo > 1:
raise RuntimeError("INTERNAL ERROR: FIFO read twice!")
self._fifo += 1
# Read paths relative to directory containing the FIFO
with open(self.datapath, "r") as fifo:
while True:
relpath = fifo.readline().rstrip('\n')
if not relpath: # EOF?
break
self.LOG.debug("Read relative path %r from FIFO..." % (relpath,))
yield os.path.join(os.path.dirname(self.datapath), relpath)
self.LOG.debug("FIFO %r closed!" % (self.datapath,))
# Directory?
elif os.path.isdir(self.datapath):
# Walk the directory tree
for dirpath, dirnames, filenames in os.walk(self.datapath): #, followlinks=True):
# Don't scan blacklisted directories
for bad in dirnames[:]:
if any(fnmatch.fnmatch(bad, pattern) for pattern in self.ignore):
dirnames.remove(bad)
# Yield all filenames that aren't blacklisted
for filename in filenames:
if not any(fnmatch.fnmatch(filename, pattern) for pattern in self.ignore):
#yield os.path.join(dirpath[len(self.datapath)+1:], filename)
yield os.path.join(dirpath, filename)
# Single file
else:
# Yield the filename
yield self.datapath
|
[
"def",
"walk",
"(",
"self",
")",
":",
"# FIFO?",
"if",
"self",
".",
"_fifo",
":",
"if",
"self",
".",
"_fifo",
">",
"1",
":",
"raise",
"RuntimeError",
"(",
"\"INTERNAL ERROR: FIFO read twice!\"",
")",
"self",
".",
"_fifo",
"+=",
"1",
"# Read paths relative to directory containing the FIFO",
"with",
"open",
"(",
"self",
".",
"datapath",
",",
"\"r\"",
")",
"as",
"fifo",
":",
"while",
"True",
":",
"relpath",
"=",
"fifo",
".",
"readline",
"(",
")",
".",
"rstrip",
"(",
"'\\n'",
")",
"if",
"not",
"relpath",
":",
"# EOF?",
"break",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"Read relative path %r from FIFO...\"",
"%",
"(",
"relpath",
",",
")",
")",
"yield",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"self",
".",
"datapath",
")",
",",
"relpath",
")",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"FIFO %r closed!\"",
"%",
"(",
"self",
".",
"datapath",
",",
")",
")",
"# Directory?",
"elif",
"os",
".",
"path",
".",
"isdir",
"(",
"self",
".",
"datapath",
")",
":",
"# Walk the directory tree",
"for",
"dirpath",
",",
"dirnames",
",",
"filenames",
"in",
"os",
".",
"walk",
"(",
"self",
".",
"datapath",
")",
":",
"#, followlinks=True):",
"# Don't scan blacklisted directories",
"for",
"bad",
"in",
"dirnames",
"[",
":",
"]",
":",
"if",
"any",
"(",
"fnmatch",
".",
"fnmatch",
"(",
"bad",
",",
"pattern",
")",
"for",
"pattern",
"in",
"self",
".",
"ignore",
")",
":",
"dirnames",
".",
"remove",
"(",
"bad",
")",
"# Yield all filenames that aren't blacklisted",
"for",
"filename",
"in",
"filenames",
":",
"if",
"not",
"any",
"(",
"fnmatch",
".",
"fnmatch",
"(",
"filename",
",",
"pattern",
")",
"for",
"pattern",
"in",
"self",
".",
"ignore",
")",
":",
"#yield os.path.join(dirpath[len(self.datapath)+1:], filename)",
"yield",
"os",
".",
"path",
".",
"join",
"(",
"dirpath",
",",
"filename",
")",
"# Single file",
"else",
":",
"# Yield the filename",
"yield",
"self",
".",
"datapath"
] |
Generate paths in "self.datapath".
|
[
"Generate",
"paths",
"in",
"self",
".",
"datapath",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/metafile.py#L434-L472
|
7,427
|
pyroscope/pyrocore
|
src/pyrocore/util/metafile.py
|
Metafile._calc_size
|
def _calc_size(self):
""" Get total size of "self.datapath".
"""
return sum(os.path.getsize(filename)
for filename in self.walk()
)
|
python
|
def _calc_size(self):
""" Get total size of "self.datapath".
"""
return sum(os.path.getsize(filename)
for filename in self.walk()
)
|
[
"def",
"_calc_size",
"(",
"self",
")",
":",
"return",
"sum",
"(",
"os",
".",
"path",
".",
"getsize",
"(",
"filename",
")",
"for",
"filename",
"in",
"self",
".",
"walk",
"(",
")",
")"
] |
Get total size of "self.datapath".
|
[
"Get",
"total",
"size",
"of",
"self",
".",
"datapath",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/metafile.py#L475-L480
|
7,428
|
pyroscope/pyrocore
|
src/pyrocore/util/metafile.py
|
Metafile._make_info
|
def _make_info(self, piece_size, progress, walker, piece_callback=None):
""" Create info dict.
"""
# These collect the file descriptions and piece hashes
file_list = []
pieces = []
# Initialize progress state
hashing_secs = time.time()
totalsize = -1 if self._fifo else self._calc_size()
totalhashed = 0
# Start a new piece
sha1sum = hashlib.sha1()
done = 0
filename = None
# Hash all files
for filename in walker:
# Assemble file info
filesize = os.path.getsize(filename)
filepath = filename[len(os.path.dirname(self.datapath) if self._fifo else self.datapath):].lstrip(os.sep)
file_list.append({
"length": filesize,
"path": [fmt.to_utf8(x) for x in fmt.to_unicode(filepath).replace(os.sep, '/').split('/')],
})
self.LOG.debug("Hashing %r, size %d..." % (filename, filesize))
# Open file and hash it
fileoffset = 0
handle = open(filename, "rb")
try:
while fileoffset < filesize:
# Read rest of piece or file, whatever is smaller
chunk = handle.read(min(filesize - fileoffset, piece_size - done))
sha1sum.update(chunk) # bogus pylint: disable=E1101
done += len(chunk)
fileoffset += len(chunk)
totalhashed += len(chunk)
# Piece is done
if done == piece_size:
pieces.append(sha1sum.digest()) # bogus pylint: disable=E1101
if piece_callback:
piece_callback(filename, pieces[-1])
# Start a new piece
sha1sum = hashlib.sha1()
done = 0
# Report progress
if progress:
progress(totalhashed, totalsize)
finally:
handle.close()
# Add hash of partial last piece
if done > 0:
pieces.append(sha1sum.digest()) # bogus pylint: disable=E1103
if piece_callback:
piece_callback(filename, pieces[-1])
# Build the meta dict
metainfo = {
"pieces": b"".join(pieces),
"piece length": piece_size,
"name": os.path.basename(self.datapath),
}
# Handle directory/FIFO vs. single file
if self._fifo or os.path.isdir(self.datapath):
metainfo["files"] = file_list
else:
metainfo["length"] = totalhashed
hashing_secs = time.time() - hashing_secs
self.LOG.info("Hashing of %s took %.1f secs (%s/s)" % (
fmt.human_size(totalhashed).strip(), hashing_secs, fmt.human_size(totalhashed / hashing_secs).strip(),
))
# Return validated info dict
return check_info(metainfo), totalhashed
|
python
|
def _make_info(self, piece_size, progress, walker, piece_callback=None):
""" Create info dict.
"""
# These collect the file descriptions and piece hashes
file_list = []
pieces = []
# Initialize progress state
hashing_secs = time.time()
totalsize = -1 if self._fifo else self._calc_size()
totalhashed = 0
# Start a new piece
sha1sum = hashlib.sha1()
done = 0
filename = None
# Hash all files
for filename in walker:
# Assemble file info
filesize = os.path.getsize(filename)
filepath = filename[len(os.path.dirname(self.datapath) if self._fifo else self.datapath):].lstrip(os.sep)
file_list.append({
"length": filesize,
"path": [fmt.to_utf8(x) for x in fmt.to_unicode(filepath).replace(os.sep, '/').split('/')],
})
self.LOG.debug("Hashing %r, size %d..." % (filename, filesize))
# Open file and hash it
fileoffset = 0
handle = open(filename, "rb")
try:
while fileoffset < filesize:
# Read rest of piece or file, whatever is smaller
chunk = handle.read(min(filesize - fileoffset, piece_size - done))
sha1sum.update(chunk) # bogus pylint: disable=E1101
done += len(chunk)
fileoffset += len(chunk)
totalhashed += len(chunk)
# Piece is done
if done == piece_size:
pieces.append(sha1sum.digest()) # bogus pylint: disable=E1101
if piece_callback:
piece_callback(filename, pieces[-1])
# Start a new piece
sha1sum = hashlib.sha1()
done = 0
# Report progress
if progress:
progress(totalhashed, totalsize)
finally:
handle.close()
# Add hash of partial last piece
if done > 0:
pieces.append(sha1sum.digest()) # bogus pylint: disable=E1103
if piece_callback:
piece_callback(filename, pieces[-1])
# Build the meta dict
metainfo = {
"pieces": b"".join(pieces),
"piece length": piece_size,
"name": os.path.basename(self.datapath),
}
# Handle directory/FIFO vs. single file
if self._fifo or os.path.isdir(self.datapath):
metainfo["files"] = file_list
else:
metainfo["length"] = totalhashed
hashing_secs = time.time() - hashing_secs
self.LOG.info("Hashing of %s took %.1f secs (%s/s)" % (
fmt.human_size(totalhashed).strip(), hashing_secs, fmt.human_size(totalhashed / hashing_secs).strip(),
))
# Return validated info dict
return check_info(metainfo), totalhashed
|
[
"def",
"_make_info",
"(",
"self",
",",
"piece_size",
",",
"progress",
",",
"walker",
",",
"piece_callback",
"=",
"None",
")",
":",
"# These collect the file descriptions and piece hashes",
"file_list",
"=",
"[",
"]",
"pieces",
"=",
"[",
"]",
"# Initialize progress state",
"hashing_secs",
"=",
"time",
".",
"time",
"(",
")",
"totalsize",
"=",
"-",
"1",
"if",
"self",
".",
"_fifo",
"else",
"self",
".",
"_calc_size",
"(",
")",
"totalhashed",
"=",
"0",
"# Start a new piece",
"sha1sum",
"=",
"hashlib",
".",
"sha1",
"(",
")",
"done",
"=",
"0",
"filename",
"=",
"None",
"# Hash all files",
"for",
"filename",
"in",
"walker",
":",
"# Assemble file info",
"filesize",
"=",
"os",
".",
"path",
".",
"getsize",
"(",
"filename",
")",
"filepath",
"=",
"filename",
"[",
"len",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"self",
".",
"datapath",
")",
"if",
"self",
".",
"_fifo",
"else",
"self",
".",
"datapath",
")",
":",
"]",
".",
"lstrip",
"(",
"os",
".",
"sep",
")",
"file_list",
".",
"append",
"(",
"{",
"\"length\"",
":",
"filesize",
",",
"\"path\"",
":",
"[",
"fmt",
".",
"to_utf8",
"(",
"x",
")",
"for",
"x",
"in",
"fmt",
".",
"to_unicode",
"(",
"filepath",
")",
".",
"replace",
"(",
"os",
".",
"sep",
",",
"'/'",
")",
".",
"split",
"(",
"'/'",
")",
"]",
",",
"}",
")",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"Hashing %r, size %d...\"",
"%",
"(",
"filename",
",",
"filesize",
")",
")",
"# Open file and hash it",
"fileoffset",
"=",
"0",
"handle",
"=",
"open",
"(",
"filename",
",",
"\"rb\"",
")",
"try",
":",
"while",
"fileoffset",
"<",
"filesize",
":",
"# Read rest of piece or file, whatever is smaller",
"chunk",
"=",
"handle",
".",
"read",
"(",
"min",
"(",
"filesize",
"-",
"fileoffset",
",",
"piece_size",
"-",
"done",
")",
")",
"sha1sum",
".",
"update",
"(",
"chunk",
")",
"# bogus pylint: disable=E1101",
"done",
"+=",
"len",
"(",
"chunk",
")",
"fileoffset",
"+=",
"len",
"(",
"chunk",
")",
"totalhashed",
"+=",
"len",
"(",
"chunk",
")",
"# Piece is done",
"if",
"done",
"==",
"piece_size",
":",
"pieces",
".",
"append",
"(",
"sha1sum",
".",
"digest",
"(",
")",
")",
"# bogus pylint: disable=E1101",
"if",
"piece_callback",
":",
"piece_callback",
"(",
"filename",
",",
"pieces",
"[",
"-",
"1",
"]",
")",
"# Start a new piece",
"sha1sum",
"=",
"hashlib",
".",
"sha1",
"(",
")",
"done",
"=",
"0",
"# Report progress",
"if",
"progress",
":",
"progress",
"(",
"totalhashed",
",",
"totalsize",
")",
"finally",
":",
"handle",
".",
"close",
"(",
")",
"# Add hash of partial last piece",
"if",
"done",
">",
"0",
":",
"pieces",
".",
"append",
"(",
"sha1sum",
".",
"digest",
"(",
")",
")",
"# bogus pylint: disable=E1103",
"if",
"piece_callback",
":",
"piece_callback",
"(",
"filename",
",",
"pieces",
"[",
"-",
"1",
"]",
")",
"# Build the meta dict",
"metainfo",
"=",
"{",
"\"pieces\"",
":",
"b\"\"",
".",
"join",
"(",
"pieces",
")",
",",
"\"piece length\"",
":",
"piece_size",
",",
"\"name\"",
":",
"os",
".",
"path",
".",
"basename",
"(",
"self",
".",
"datapath",
")",
",",
"}",
"# Handle directory/FIFO vs. single file",
"if",
"self",
".",
"_fifo",
"or",
"os",
".",
"path",
".",
"isdir",
"(",
"self",
".",
"datapath",
")",
":",
"metainfo",
"[",
"\"files\"",
"]",
"=",
"file_list",
"else",
":",
"metainfo",
"[",
"\"length\"",
"]",
"=",
"totalhashed",
"hashing_secs",
"=",
"time",
".",
"time",
"(",
")",
"-",
"hashing_secs",
"self",
".",
"LOG",
".",
"info",
"(",
"\"Hashing of %s took %.1f secs (%s/s)\"",
"%",
"(",
"fmt",
".",
"human_size",
"(",
"totalhashed",
")",
".",
"strip",
"(",
")",
",",
"hashing_secs",
",",
"fmt",
".",
"human_size",
"(",
"totalhashed",
"/",
"hashing_secs",
")",
".",
"strip",
"(",
")",
",",
")",
")",
"# Return validated info dict",
"return",
"check_info",
"(",
"metainfo",
")",
",",
"totalhashed"
] |
Create info dict.
|
[
"Create",
"info",
"dict",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/metafile.py#L483-L564
|
7,429
|
pyroscope/pyrocore
|
src/pyrocore/util/metafile.py
|
Metafile._make_meta
|
def _make_meta(self, tracker_url, root_name, private, progress):
""" Create torrent dict.
"""
# Calculate piece size
if self._fifo:
# TODO we need to add a (command line) param, probably for total data size
# for now, always 1MB
piece_size_exp = 20
else:
total_size = self._calc_size()
if total_size:
piece_size_exp = int(math.log(total_size) / math.log(2)) - 9
else:
piece_size_exp = 0
piece_size_exp = min(max(15, piece_size_exp), 24)
piece_size = 2 ** piece_size_exp
# Build info hash
info, totalhashed = self._make_info(piece_size, progress, self.walk() if self._fifo else sorted(self.walk()))
# Enforce unique hash per tracker
info["x_cross_seed"] = hashlib.md5(tracker_url).hexdigest()
# Set private flag
if private:
info["private"] = 1
# Freely chosen root name (default is basename of the data path)
if root_name:
info["name"] = root_name
# Torrent metadata
meta = {
"info": info,
"announce": tracker_url.strip(),
}
#XXX meta["encoding"] = "UTF-8"
# Return validated meta dict
return check_meta(meta), totalhashed
|
python
|
def _make_meta(self, tracker_url, root_name, private, progress):
""" Create torrent dict.
"""
# Calculate piece size
if self._fifo:
# TODO we need to add a (command line) param, probably for total data size
# for now, always 1MB
piece_size_exp = 20
else:
total_size = self._calc_size()
if total_size:
piece_size_exp = int(math.log(total_size) / math.log(2)) - 9
else:
piece_size_exp = 0
piece_size_exp = min(max(15, piece_size_exp), 24)
piece_size = 2 ** piece_size_exp
# Build info hash
info, totalhashed = self._make_info(piece_size, progress, self.walk() if self._fifo else sorted(self.walk()))
# Enforce unique hash per tracker
info["x_cross_seed"] = hashlib.md5(tracker_url).hexdigest()
# Set private flag
if private:
info["private"] = 1
# Freely chosen root name (default is basename of the data path)
if root_name:
info["name"] = root_name
# Torrent metadata
meta = {
"info": info,
"announce": tracker_url.strip(),
}
#XXX meta["encoding"] = "UTF-8"
# Return validated meta dict
return check_meta(meta), totalhashed
|
[
"def",
"_make_meta",
"(",
"self",
",",
"tracker_url",
",",
"root_name",
",",
"private",
",",
"progress",
")",
":",
"# Calculate piece size",
"if",
"self",
".",
"_fifo",
":",
"# TODO we need to add a (command line) param, probably for total data size",
"# for now, always 1MB",
"piece_size_exp",
"=",
"20",
"else",
":",
"total_size",
"=",
"self",
".",
"_calc_size",
"(",
")",
"if",
"total_size",
":",
"piece_size_exp",
"=",
"int",
"(",
"math",
".",
"log",
"(",
"total_size",
")",
"/",
"math",
".",
"log",
"(",
"2",
")",
")",
"-",
"9",
"else",
":",
"piece_size_exp",
"=",
"0",
"piece_size_exp",
"=",
"min",
"(",
"max",
"(",
"15",
",",
"piece_size_exp",
")",
",",
"24",
")",
"piece_size",
"=",
"2",
"**",
"piece_size_exp",
"# Build info hash",
"info",
",",
"totalhashed",
"=",
"self",
".",
"_make_info",
"(",
"piece_size",
",",
"progress",
",",
"self",
".",
"walk",
"(",
")",
"if",
"self",
".",
"_fifo",
"else",
"sorted",
"(",
"self",
".",
"walk",
"(",
")",
")",
")",
"# Enforce unique hash per tracker",
"info",
"[",
"\"x_cross_seed\"",
"]",
"=",
"hashlib",
".",
"md5",
"(",
"tracker_url",
")",
".",
"hexdigest",
"(",
")",
"# Set private flag",
"if",
"private",
":",
"info",
"[",
"\"private\"",
"]",
"=",
"1",
"# Freely chosen root name (default is basename of the data path)",
"if",
"root_name",
":",
"info",
"[",
"\"name\"",
"]",
"=",
"root_name",
"# Torrent metadata",
"meta",
"=",
"{",
"\"info\"",
":",
"info",
",",
"\"announce\"",
":",
"tracker_url",
".",
"strip",
"(",
")",
",",
"}",
"#XXX meta[\"encoding\"] = \"UTF-8\"",
"# Return validated meta dict",
"return",
"check_meta",
"(",
"meta",
")",
",",
"totalhashed"
] |
Create torrent dict.
|
[
"Create",
"torrent",
"dict",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/metafile.py#L567-L608
|
7,430
|
pyroscope/pyrocore
|
src/pyrocore/util/metafile.py
|
Metafile.check
|
def check(self, metainfo, datapath, progress=None):
""" Check piece hashes of a metafile against the given datapath.
"""
if datapath:
self.datapath = datapath
def check_piece(filename, piece):
"Callback for new piece"
if piece != metainfo["info"]["pieces"][check_piece.piece_index:check_piece.piece_index+20]:
self.LOG.warn("Piece #%d: Hashes differ in file %r" % (check_piece.piece_index//20, filename))
check_piece.piece_index += 20
check_piece.piece_index = 0
datameta, _ = self._make_info(int(metainfo["info"]["piece length"]), progress,
[datapath] if "length" in metainfo["info"] else
(os.path.join(*([datapath] + i["path"])) for i in metainfo["info"]["files"]),
piece_callback=check_piece
)
return datameta["pieces"] == metainfo["info"]["pieces"]
|
python
|
def check(self, metainfo, datapath, progress=None):
""" Check piece hashes of a metafile against the given datapath.
"""
if datapath:
self.datapath = datapath
def check_piece(filename, piece):
"Callback for new piece"
if piece != metainfo["info"]["pieces"][check_piece.piece_index:check_piece.piece_index+20]:
self.LOG.warn("Piece #%d: Hashes differ in file %r" % (check_piece.piece_index//20, filename))
check_piece.piece_index += 20
check_piece.piece_index = 0
datameta, _ = self._make_info(int(metainfo["info"]["piece length"]), progress,
[datapath] if "length" in metainfo["info"] else
(os.path.join(*([datapath] + i["path"])) for i in metainfo["info"]["files"]),
piece_callback=check_piece
)
return datameta["pieces"] == metainfo["info"]["pieces"]
|
[
"def",
"check",
"(",
"self",
",",
"metainfo",
",",
"datapath",
",",
"progress",
"=",
"None",
")",
":",
"if",
"datapath",
":",
"self",
".",
"datapath",
"=",
"datapath",
"def",
"check_piece",
"(",
"filename",
",",
"piece",
")",
":",
"\"Callback for new piece\"",
"if",
"piece",
"!=",
"metainfo",
"[",
"\"info\"",
"]",
"[",
"\"pieces\"",
"]",
"[",
"check_piece",
".",
"piece_index",
":",
"check_piece",
".",
"piece_index",
"+",
"20",
"]",
":",
"self",
".",
"LOG",
".",
"warn",
"(",
"\"Piece #%d: Hashes differ in file %r\"",
"%",
"(",
"check_piece",
".",
"piece_index",
"//",
"20",
",",
"filename",
")",
")",
"check_piece",
".",
"piece_index",
"+=",
"20",
"check_piece",
".",
"piece_index",
"=",
"0",
"datameta",
",",
"_",
"=",
"self",
".",
"_make_info",
"(",
"int",
"(",
"metainfo",
"[",
"\"info\"",
"]",
"[",
"\"piece length\"",
"]",
")",
",",
"progress",
",",
"[",
"datapath",
"]",
"if",
"\"length\"",
"in",
"metainfo",
"[",
"\"info\"",
"]",
"else",
"(",
"os",
".",
"path",
".",
"join",
"(",
"*",
"(",
"[",
"datapath",
"]",
"+",
"i",
"[",
"\"path\"",
"]",
")",
")",
"for",
"i",
"in",
"metainfo",
"[",
"\"info\"",
"]",
"[",
"\"files\"",
"]",
")",
",",
"piece_callback",
"=",
"check_piece",
")",
"return",
"datameta",
"[",
"\"pieces\"",
"]",
"==",
"metainfo",
"[",
"\"info\"",
"]",
"[",
"\"pieces\"",
"]"
] |
Check piece hashes of a metafile against the given datapath.
|
[
"Check",
"piece",
"hashes",
"of",
"a",
"metafile",
"against",
"the",
"given",
"datapath",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/metafile.py#L674-L692
|
7,431
|
pyroscope/pyrocore
|
src/pyrocore/torrent/queue.py
|
QueueManager._start
|
def _start(self, items):
""" Start some items if conditions are met.
"""
# TODO: Filter by a custom date field, for scheduled downloads starting at a certain time, or after a given delay
# TODO: Don't start anything more if download BW is used >= config threshold in %
# Check if anything more is ready to start downloading
startable = [i for i in items if self.config.startable.match(i)]
if not startable:
self.LOG.debug("Checked %d item(s), none startable according to [ %s ]",
len(items), self.config.startable)
return
# Check intermission delay
now = time.time()
if now < self.last_start:
# compensate for summer time and other oddities
self.last_start = now
delayed = int(self.last_start + self.config.intermission - now)
if delayed > 0:
self.LOG.debug("Delaying start of {:d} item(s),"
" due to {:d}s intermission with {:d}s left"
.format(len(startable), self.config.intermission, delayed))
return
# TODO: sort by priority, then loaded time
# Stick to "start_at_once" parameter, unless "downloading_min" is violated
downloading = [i for i in items if self.config.downloading.match(i)]
start_now = max(self.config.start_at_once, self.config.downloading_min - len(downloading))
start_now = min(start_now, len(startable))
#down_traffic = sum(i.down for i in downloading)
##self.LOG.info("%d downloading, down %d" % (len(downloading), down_traffic))
# Start eligible items
for idx, item in enumerate(startable):
# Check if we reached 'start_now' in this run
if idx >= start_now:
self.LOG.debug("Only starting %d item(s) in this run, %d more could be downloading" % (
start_now, len(startable)-idx,))
break
# TODO: Prevent start of more torrents that can fit on the drive (taking "off" files into account)
# (restarts items that were stopped due to the "low_diskspace" schedule, and also avoids triggering it at all)
# Only check the other conditions when we have `downloading_min` covered
if len(downloading) < self.config.downloading_min:
self.LOG.debug("Catching up from %d to a minimum of %d downloading item(s)" % (
len(downloading), self.config.downloading_min))
else:
# Limit to the given maximum of downloading items
if len(downloading) >= self.config.downloading_max:
self.LOG.debug("Already downloading %d item(s) out of %d max, %d more could be downloading" % (
len(downloading), self.config.downloading_max, len(startable)-idx,))
break
# If we made it here, start it!
self.last_start = now
downloading.append(item)
self.LOG.info("%s '%s' [%s, #%s]" % (
"WOULD start" if self.config.dry_run else "Starting",
fmt.to_utf8(item.name), item.alias, item.hash))
if not self.config.dry_run:
item.start()
if not self.config.quiet:
self.proxy.log(xmlrpc.NOHASH, "%s: Started '%s' {%s}" % (
self.__class__.__name__, fmt.to_utf8(item.name), item.alias,
))
|
python
|
def _start(self, items):
""" Start some items if conditions are met.
"""
# TODO: Filter by a custom date field, for scheduled downloads starting at a certain time, or after a given delay
# TODO: Don't start anything more if download BW is used >= config threshold in %
# Check if anything more is ready to start downloading
startable = [i for i in items if self.config.startable.match(i)]
if not startable:
self.LOG.debug("Checked %d item(s), none startable according to [ %s ]",
len(items), self.config.startable)
return
# Check intermission delay
now = time.time()
if now < self.last_start:
# compensate for summer time and other oddities
self.last_start = now
delayed = int(self.last_start + self.config.intermission - now)
if delayed > 0:
self.LOG.debug("Delaying start of {:d} item(s),"
" due to {:d}s intermission with {:d}s left"
.format(len(startable), self.config.intermission, delayed))
return
# TODO: sort by priority, then loaded time
# Stick to "start_at_once" parameter, unless "downloading_min" is violated
downloading = [i for i in items if self.config.downloading.match(i)]
start_now = max(self.config.start_at_once, self.config.downloading_min - len(downloading))
start_now = min(start_now, len(startable))
#down_traffic = sum(i.down for i in downloading)
##self.LOG.info("%d downloading, down %d" % (len(downloading), down_traffic))
# Start eligible items
for idx, item in enumerate(startable):
# Check if we reached 'start_now' in this run
if idx >= start_now:
self.LOG.debug("Only starting %d item(s) in this run, %d more could be downloading" % (
start_now, len(startable)-idx,))
break
# TODO: Prevent start of more torrents that can fit on the drive (taking "off" files into account)
# (restarts items that were stopped due to the "low_diskspace" schedule, and also avoids triggering it at all)
# Only check the other conditions when we have `downloading_min` covered
if len(downloading) < self.config.downloading_min:
self.LOG.debug("Catching up from %d to a minimum of %d downloading item(s)" % (
len(downloading), self.config.downloading_min))
else:
# Limit to the given maximum of downloading items
if len(downloading) >= self.config.downloading_max:
self.LOG.debug("Already downloading %d item(s) out of %d max, %d more could be downloading" % (
len(downloading), self.config.downloading_max, len(startable)-idx,))
break
# If we made it here, start it!
self.last_start = now
downloading.append(item)
self.LOG.info("%s '%s' [%s, #%s]" % (
"WOULD start" if self.config.dry_run else "Starting",
fmt.to_utf8(item.name), item.alias, item.hash))
if not self.config.dry_run:
item.start()
if not self.config.quiet:
self.proxy.log(xmlrpc.NOHASH, "%s: Started '%s' {%s}" % (
self.__class__.__name__, fmt.to_utf8(item.name), item.alias,
))
|
[
"def",
"_start",
"(",
"self",
",",
"items",
")",
":",
"# TODO: Filter by a custom date field, for scheduled downloads starting at a certain time, or after a given delay",
"# TODO: Don't start anything more if download BW is used >= config threshold in %",
"# Check if anything more is ready to start downloading",
"startable",
"=",
"[",
"i",
"for",
"i",
"in",
"items",
"if",
"self",
".",
"config",
".",
"startable",
".",
"match",
"(",
"i",
")",
"]",
"if",
"not",
"startable",
":",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"Checked %d item(s), none startable according to [ %s ]\"",
",",
"len",
"(",
"items",
")",
",",
"self",
".",
"config",
".",
"startable",
")",
"return",
"# Check intermission delay",
"now",
"=",
"time",
".",
"time",
"(",
")",
"if",
"now",
"<",
"self",
".",
"last_start",
":",
"# compensate for summer time and other oddities",
"self",
".",
"last_start",
"=",
"now",
"delayed",
"=",
"int",
"(",
"self",
".",
"last_start",
"+",
"self",
".",
"config",
".",
"intermission",
"-",
"now",
")",
"if",
"delayed",
">",
"0",
":",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"Delaying start of {:d} item(s),\"",
"\" due to {:d}s intermission with {:d}s left\"",
".",
"format",
"(",
"len",
"(",
"startable",
")",
",",
"self",
".",
"config",
".",
"intermission",
",",
"delayed",
")",
")",
"return",
"# TODO: sort by priority, then loaded time",
"# Stick to \"start_at_once\" parameter, unless \"downloading_min\" is violated",
"downloading",
"=",
"[",
"i",
"for",
"i",
"in",
"items",
"if",
"self",
".",
"config",
".",
"downloading",
".",
"match",
"(",
"i",
")",
"]",
"start_now",
"=",
"max",
"(",
"self",
".",
"config",
".",
"start_at_once",
",",
"self",
".",
"config",
".",
"downloading_min",
"-",
"len",
"(",
"downloading",
")",
")",
"start_now",
"=",
"min",
"(",
"start_now",
",",
"len",
"(",
"startable",
")",
")",
"#down_traffic = sum(i.down for i in downloading)",
"##self.LOG.info(\"%d downloading, down %d\" % (len(downloading), down_traffic))",
"# Start eligible items",
"for",
"idx",
",",
"item",
"in",
"enumerate",
"(",
"startable",
")",
":",
"# Check if we reached 'start_now' in this run",
"if",
"idx",
">=",
"start_now",
":",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"Only starting %d item(s) in this run, %d more could be downloading\"",
"%",
"(",
"start_now",
",",
"len",
"(",
"startable",
")",
"-",
"idx",
",",
")",
")",
"break",
"# TODO: Prevent start of more torrents that can fit on the drive (taking \"off\" files into account)",
"# (restarts items that were stopped due to the \"low_diskspace\" schedule, and also avoids triggering it at all)",
"# Only check the other conditions when we have `downloading_min` covered",
"if",
"len",
"(",
"downloading",
")",
"<",
"self",
".",
"config",
".",
"downloading_min",
":",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"Catching up from %d to a minimum of %d downloading item(s)\"",
"%",
"(",
"len",
"(",
"downloading",
")",
",",
"self",
".",
"config",
".",
"downloading_min",
")",
")",
"else",
":",
"# Limit to the given maximum of downloading items",
"if",
"len",
"(",
"downloading",
")",
">=",
"self",
".",
"config",
".",
"downloading_max",
":",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"Already downloading %d item(s) out of %d max, %d more could be downloading\"",
"%",
"(",
"len",
"(",
"downloading",
")",
",",
"self",
".",
"config",
".",
"downloading_max",
",",
"len",
"(",
"startable",
")",
"-",
"idx",
",",
")",
")",
"break",
"# If we made it here, start it!",
"self",
".",
"last_start",
"=",
"now",
"downloading",
".",
"append",
"(",
"item",
")",
"self",
".",
"LOG",
".",
"info",
"(",
"\"%s '%s' [%s, #%s]\"",
"%",
"(",
"\"WOULD start\"",
"if",
"self",
".",
"config",
".",
"dry_run",
"else",
"\"Starting\"",
",",
"fmt",
".",
"to_utf8",
"(",
"item",
".",
"name",
")",
",",
"item",
".",
"alias",
",",
"item",
".",
"hash",
")",
")",
"if",
"not",
"self",
".",
"config",
".",
"dry_run",
":",
"item",
".",
"start",
"(",
")",
"if",
"not",
"self",
".",
"config",
".",
"quiet",
":",
"self",
".",
"proxy",
".",
"log",
"(",
"xmlrpc",
".",
"NOHASH",
",",
"\"%s: Started '%s' {%s}\"",
"%",
"(",
"self",
".",
"__class__",
".",
"__name__",
",",
"fmt",
".",
"to_utf8",
"(",
"item",
".",
"name",
")",
",",
"item",
".",
"alias",
",",
")",
")"
] |
Start some items if conditions are met.
|
[
"Start",
"some",
"items",
"if",
"conditions",
"are",
"met",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/queue.py#L63-L132
|
7,432
|
pyroscope/pyrocore
|
src/pyrocore/torrent/queue.py
|
QueueManager.run
|
def run(self):
""" Queue manager job callback.
"""
try:
self.proxy = config_ini.engine.open()
# Get items from 'pyrotorque' view
items = list(config_ini.engine.items(self.VIEWNAME, cache=False))
if self.sort_key:
items.sort(key=self.sort_key)
#self.LOG.debug("Sorted: %r" % [i.name for i in items])
# Handle found items
self._start(items)
self.LOG.debug("%s - %s" % (config_ini.engine.engine_id, self.proxy))
except (error.LoggableError, xmlrpc.ERRORS) as exc:
# only debug, let the statistics logger do its job
self.LOG.debug(str(exc))
|
python
|
def run(self):
""" Queue manager job callback.
"""
try:
self.proxy = config_ini.engine.open()
# Get items from 'pyrotorque' view
items = list(config_ini.engine.items(self.VIEWNAME, cache=False))
if self.sort_key:
items.sort(key=self.sort_key)
#self.LOG.debug("Sorted: %r" % [i.name for i in items])
# Handle found items
self._start(items)
self.LOG.debug("%s - %s" % (config_ini.engine.engine_id, self.proxy))
except (error.LoggableError, xmlrpc.ERRORS) as exc:
# only debug, let the statistics logger do its job
self.LOG.debug(str(exc))
|
[
"def",
"run",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"proxy",
"=",
"config_ini",
".",
"engine",
".",
"open",
"(",
")",
"# Get items from 'pyrotorque' view",
"items",
"=",
"list",
"(",
"config_ini",
".",
"engine",
".",
"items",
"(",
"self",
".",
"VIEWNAME",
",",
"cache",
"=",
"False",
")",
")",
"if",
"self",
".",
"sort_key",
":",
"items",
".",
"sort",
"(",
"key",
"=",
"self",
".",
"sort_key",
")",
"#self.LOG.debug(\"Sorted: %r\" % [i.name for i in items])",
"# Handle found items",
"self",
".",
"_start",
"(",
"items",
")",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"%s - %s\"",
"%",
"(",
"config_ini",
".",
"engine",
".",
"engine_id",
",",
"self",
".",
"proxy",
")",
")",
"except",
"(",
"error",
".",
"LoggableError",
",",
"xmlrpc",
".",
"ERRORS",
")",
"as",
"exc",
":",
"# only debug, let the statistics logger do its job",
"self",
".",
"LOG",
".",
"debug",
"(",
"str",
"(",
"exc",
")",
")"
] |
Queue manager job callback.
|
[
"Queue",
"manager",
"job",
"callback",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/queue.py#L135-L153
|
7,433
|
pyroscope/pyrocore
|
src/pyrocore/scripts/rtcontrol.py
|
print_help_fields
|
def print_help_fields():
""" Print help about fields and field formatters.
"""
# Mock entries, so they fulfill the expectations towards a field definition
def custom_manifold():
"named rTorrent custom attribute, e.g. 'custom_completion_target'"
return ("custom_KEY", custom_manifold)
def kind_manifold():
"file types that contribute at least N% to the item's total size"
return ("kind_N", kind_manifold)
print('')
print("Fields are:")
print("\n".join([" %-21s %s" % (name, field.__doc__)
for name, field in sorted(engine.FieldDefinition.FIELDS.items() + [
custom_manifold(), kind_manifold(),
])
]))
print('')
print("Format specifiers are:")
print("\n".join([" %-21s %s" % (name, doc)
for name, doc in sorted(formatting.OutputMapping.formatter_help())
]))
print('')
print("Append format specifiers using a '.' to field names in '-o' lists,\n"
"e.g. 'size.sz' or 'completed.raw.delta'.")
|
python
|
def print_help_fields():
""" Print help about fields and field formatters.
"""
# Mock entries, so they fulfill the expectations towards a field definition
def custom_manifold():
"named rTorrent custom attribute, e.g. 'custom_completion_target'"
return ("custom_KEY", custom_manifold)
def kind_manifold():
"file types that contribute at least N% to the item's total size"
return ("kind_N", kind_manifold)
print('')
print("Fields are:")
print("\n".join([" %-21s %s" % (name, field.__doc__)
for name, field in sorted(engine.FieldDefinition.FIELDS.items() + [
custom_manifold(), kind_manifold(),
])
]))
print('')
print("Format specifiers are:")
print("\n".join([" %-21s %s" % (name, doc)
for name, doc in sorted(formatting.OutputMapping.formatter_help())
]))
print('')
print("Append format specifiers using a '.' to field names in '-o' lists,\n"
"e.g. 'size.sz' or 'completed.raw.delta'.")
|
[
"def",
"print_help_fields",
"(",
")",
":",
"# Mock entries, so they fulfill the expectations towards a field definition",
"def",
"custom_manifold",
"(",
")",
":",
"\"named rTorrent custom attribute, e.g. 'custom_completion_target'\"",
"return",
"(",
"\"custom_KEY\"",
",",
"custom_manifold",
")",
"def",
"kind_manifold",
"(",
")",
":",
"\"file types that contribute at least N% to the item's total size\"",
"return",
"(",
"\"kind_N\"",
",",
"kind_manifold",
")",
"print",
"(",
"''",
")",
"print",
"(",
"\"Fields are:\"",
")",
"print",
"(",
"\"\\n\"",
".",
"join",
"(",
"[",
"\" %-21s %s\"",
"%",
"(",
"name",
",",
"field",
".",
"__doc__",
")",
"for",
"name",
",",
"field",
"in",
"sorted",
"(",
"engine",
".",
"FieldDefinition",
".",
"FIELDS",
".",
"items",
"(",
")",
"+",
"[",
"custom_manifold",
"(",
")",
",",
"kind_manifold",
"(",
")",
",",
"]",
")",
"]",
")",
")",
"print",
"(",
"''",
")",
"print",
"(",
"\"Format specifiers are:\"",
")",
"print",
"(",
"\"\\n\"",
".",
"join",
"(",
"[",
"\" %-21s %s\"",
"%",
"(",
"name",
",",
"doc",
")",
"for",
"name",
",",
"doc",
"in",
"sorted",
"(",
"formatting",
".",
"OutputMapping",
".",
"formatter_help",
"(",
")",
")",
"]",
")",
")",
"print",
"(",
"''",
")",
"print",
"(",
"\"Append format specifiers using a '.' to field names in '-o' lists,\\n\"",
"\"e.g. 'size.sz' or 'completed.raw.delta'.\"",
")"
] |
Print help about fields and field formatters.
|
[
"Print",
"help",
"about",
"fields",
"and",
"field",
"formatters",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtcontrol.py#L35-L61
|
7,434
|
pyroscope/pyrocore
|
src/pyrocore/scripts/rtcontrol.py
|
FieldStatistics.add
|
def add(self, field, val):
"Add a sample"
if engine.FieldDefinition.FIELDS[field]._matcher is matching.TimeFilter:
val = self._basetime - val
try:
self.total[field] += val
self.min[field] = min(self.min[field], val) if field in self.min else val
self.max[field] = max(self.max[field], val)
except (ValueError, TypeError):
self.errors[field] += 1
|
python
|
def add(self, field, val):
"Add a sample"
if engine.FieldDefinition.FIELDS[field]._matcher is matching.TimeFilter:
val = self._basetime - val
try:
self.total[field] += val
self.min[field] = min(self.min[field], val) if field in self.min else val
self.max[field] = max(self.max[field], val)
except (ValueError, TypeError):
self.errors[field] += 1
|
[
"def",
"add",
"(",
"self",
",",
"field",
",",
"val",
")",
":",
"if",
"engine",
".",
"FieldDefinition",
".",
"FIELDS",
"[",
"field",
"]",
".",
"_matcher",
"is",
"matching",
".",
"TimeFilter",
":",
"val",
"=",
"self",
".",
"_basetime",
"-",
"val",
"try",
":",
"self",
".",
"total",
"[",
"field",
"]",
"+=",
"val",
"self",
".",
"min",
"[",
"field",
"]",
"=",
"min",
"(",
"self",
".",
"min",
"[",
"field",
"]",
",",
"val",
")",
"if",
"field",
"in",
"self",
".",
"min",
"else",
"val",
"self",
".",
"max",
"[",
"field",
"]",
"=",
"max",
"(",
"self",
".",
"max",
"[",
"field",
"]",
",",
"val",
")",
"except",
"(",
"ValueError",
",",
"TypeError",
")",
":",
"self",
".",
"errors",
"[",
"field",
"]",
"+=",
"1"
] |
Add a sample
|
[
"Add",
"a",
"sample"
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtcontrol.py#L83-L93
|
7,435
|
pyroscope/pyrocore
|
src/pyrocore/scripts/rtcontrol.py
|
RtorrentControl.help_completion_fields
|
def help_completion_fields(self):
""" Return valid field names.
"""
for name, field in sorted(engine.FieldDefinition.FIELDS.items()):
if issubclass(field._matcher, matching.BoolFilter):
yield "%s=no" % (name,)
yield "%s=yes" % (name,)
continue
elif issubclass(field._matcher, matching.PatternFilter):
yield "%s=" % (name,)
yield "%s=/" % (name,)
yield "%s=?" % (name,)
yield "%s=\"'*'\"" % (name,)
continue
elif issubclass(field._matcher, matching.NumericFilterBase):
for i in range(10):
yield "%s=%d" % (name, i)
else:
yield "%s=" % (name,)
yield r"%s=+" % (name,)
yield r"%s=-" % (name,)
yield "custom_"
yield "kind_"
|
python
|
def help_completion_fields(self):
""" Return valid field names.
"""
for name, field in sorted(engine.FieldDefinition.FIELDS.items()):
if issubclass(field._matcher, matching.BoolFilter):
yield "%s=no" % (name,)
yield "%s=yes" % (name,)
continue
elif issubclass(field._matcher, matching.PatternFilter):
yield "%s=" % (name,)
yield "%s=/" % (name,)
yield "%s=?" % (name,)
yield "%s=\"'*'\"" % (name,)
continue
elif issubclass(field._matcher, matching.NumericFilterBase):
for i in range(10):
yield "%s=%d" % (name, i)
else:
yield "%s=" % (name,)
yield r"%s=+" % (name,)
yield r"%s=-" % (name,)
yield "custom_"
yield "kind_"
|
[
"def",
"help_completion_fields",
"(",
"self",
")",
":",
"for",
"name",
",",
"field",
"in",
"sorted",
"(",
"engine",
".",
"FieldDefinition",
".",
"FIELDS",
".",
"items",
"(",
")",
")",
":",
"if",
"issubclass",
"(",
"field",
".",
"_matcher",
",",
"matching",
".",
"BoolFilter",
")",
":",
"yield",
"\"%s=no\"",
"%",
"(",
"name",
",",
")",
"yield",
"\"%s=yes\"",
"%",
"(",
"name",
",",
")",
"continue",
"elif",
"issubclass",
"(",
"field",
".",
"_matcher",
",",
"matching",
".",
"PatternFilter",
")",
":",
"yield",
"\"%s=\"",
"%",
"(",
"name",
",",
")",
"yield",
"\"%s=/\"",
"%",
"(",
"name",
",",
")",
"yield",
"\"%s=?\"",
"%",
"(",
"name",
",",
")",
"yield",
"\"%s=\\\"'*'\\\"\"",
"%",
"(",
"name",
",",
")",
"continue",
"elif",
"issubclass",
"(",
"field",
".",
"_matcher",
",",
"matching",
".",
"NumericFilterBase",
")",
":",
"for",
"i",
"in",
"range",
"(",
"10",
")",
":",
"yield",
"\"%s=%d\"",
"%",
"(",
"name",
",",
"i",
")",
"else",
":",
"yield",
"\"%s=\"",
"%",
"(",
"name",
",",
")",
"yield",
"r\"%s=+\"",
"%",
"(",
"name",
",",
")",
"yield",
"r\"%s=-\"",
"%",
"(",
"name",
",",
")",
"yield",
"\"custom_\"",
"yield",
"\"kind_\""
] |
Return valid field names.
|
[
"Return",
"valid",
"field",
"names",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtcontrol.py#L309-L333
|
7,436
|
pyroscope/pyrocore
|
src/pyrocore/scripts/rtcontrol.py
|
RtorrentControl.format_item
|
def format_item(self, item, defaults=None, stencil=None):
""" Format an item.
"""
from pyrobase.osutil import shell_escape
try:
item_text = fmt.to_console(formatting.format_item(self.options.output_format, item, defaults))
except (NameError, ValueError, TypeError), exc:
self.fatal("Trouble with formatting item %r\n\n FORMAT = %r\n\n REASON =" % (item, self.options.output_format), exc)
raise # in --debug mode
if self.options.shell:
item_text = '\t'.join(shell_escape(i) for i in item_text.split('\t'))
# Justify headers according to stencil
if stencil:
item_text = '\t'.join(i.ljust(len(s)) for i, s in zip(item_text.split('\t'), stencil))
return item_text
|
python
|
def format_item(self, item, defaults=None, stencil=None):
""" Format an item.
"""
from pyrobase.osutil import shell_escape
try:
item_text = fmt.to_console(formatting.format_item(self.options.output_format, item, defaults))
except (NameError, ValueError, TypeError), exc:
self.fatal("Trouble with formatting item %r\n\n FORMAT = %r\n\n REASON =" % (item, self.options.output_format), exc)
raise # in --debug mode
if self.options.shell:
item_text = '\t'.join(shell_escape(i) for i in item_text.split('\t'))
# Justify headers according to stencil
if stencil:
item_text = '\t'.join(i.ljust(len(s)) for i, s in zip(item_text.split('\t'), stencil))
return item_text
|
[
"def",
"format_item",
"(",
"self",
",",
"item",
",",
"defaults",
"=",
"None",
",",
"stencil",
"=",
"None",
")",
":",
"from",
"pyrobase",
".",
"osutil",
"import",
"shell_escape",
"try",
":",
"item_text",
"=",
"fmt",
".",
"to_console",
"(",
"formatting",
".",
"format_item",
"(",
"self",
".",
"options",
".",
"output_format",
",",
"item",
",",
"defaults",
")",
")",
"except",
"(",
"NameError",
",",
"ValueError",
",",
"TypeError",
")",
",",
"exc",
":",
"self",
".",
"fatal",
"(",
"\"Trouble with formatting item %r\\n\\n FORMAT = %r\\n\\n REASON =\"",
"%",
"(",
"item",
",",
"self",
".",
"options",
".",
"output_format",
")",
",",
"exc",
")",
"raise",
"# in --debug mode",
"if",
"self",
".",
"options",
".",
"shell",
":",
"item_text",
"=",
"'\\t'",
".",
"join",
"(",
"shell_escape",
"(",
"i",
")",
"for",
"i",
"in",
"item_text",
".",
"split",
"(",
"'\\t'",
")",
")",
"# Justify headers according to stencil",
"if",
"stencil",
":",
"item_text",
"=",
"'\\t'",
".",
"join",
"(",
"i",
".",
"ljust",
"(",
"len",
"(",
"s",
")",
")",
"for",
"i",
",",
"s",
"in",
"zip",
"(",
"item_text",
".",
"split",
"(",
"'\\t'",
")",
",",
"stencil",
")",
")",
"return",
"item_text"
] |
Format an item.
|
[
"Format",
"an",
"item",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtcontrol.py#L337-L355
|
7,437
|
pyroscope/pyrocore
|
src/pyrocore/scripts/rtcontrol.py
|
RtorrentControl.emit
|
def emit(self, item, defaults=None, stencil=None, to_log=False, item_formatter=None):
""" Print an item to stdout, or the log on INFO level.
"""
item_text = self.format_item(item, defaults, stencil)
# Post-process line?
if item_formatter:
item_text = item_formatter(item_text)
# For a header, use configured escape codes on a terminal
if item is None and os.isatty(sys.stdout.fileno()):
item_text = ''.join((config.output_header_ecma48, item_text, "\x1B[0m"))
# Dump to selected target
if to_log:
if callable(to_log):
to_log(item_text)
else:
self.LOG.info(item_text)
elif self.options.nul:
sys.stdout.write(item_text + '\0')
sys.stdout.flush()
else:
print(item_text)
return item_text.count('\n') + 1
|
python
|
def emit(self, item, defaults=None, stencil=None, to_log=False, item_formatter=None):
""" Print an item to stdout, or the log on INFO level.
"""
item_text = self.format_item(item, defaults, stencil)
# Post-process line?
if item_formatter:
item_text = item_formatter(item_text)
# For a header, use configured escape codes on a terminal
if item is None and os.isatty(sys.stdout.fileno()):
item_text = ''.join((config.output_header_ecma48, item_text, "\x1B[0m"))
# Dump to selected target
if to_log:
if callable(to_log):
to_log(item_text)
else:
self.LOG.info(item_text)
elif self.options.nul:
sys.stdout.write(item_text + '\0')
sys.stdout.flush()
else:
print(item_text)
return item_text.count('\n') + 1
|
[
"def",
"emit",
"(",
"self",
",",
"item",
",",
"defaults",
"=",
"None",
",",
"stencil",
"=",
"None",
",",
"to_log",
"=",
"False",
",",
"item_formatter",
"=",
"None",
")",
":",
"item_text",
"=",
"self",
".",
"format_item",
"(",
"item",
",",
"defaults",
",",
"stencil",
")",
"# Post-process line?",
"if",
"item_formatter",
":",
"item_text",
"=",
"item_formatter",
"(",
"item_text",
")",
"# For a header, use configured escape codes on a terminal",
"if",
"item",
"is",
"None",
"and",
"os",
".",
"isatty",
"(",
"sys",
".",
"stdout",
".",
"fileno",
"(",
")",
")",
":",
"item_text",
"=",
"''",
".",
"join",
"(",
"(",
"config",
".",
"output_header_ecma48",
",",
"item_text",
",",
"\"\\x1B[0m\"",
")",
")",
"# Dump to selected target",
"if",
"to_log",
":",
"if",
"callable",
"(",
"to_log",
")",
":",
"to_log",
"(",
"item_text",
")",
"else",
":",
"self",
".",
"LOG",
".",
"info",
"(",
"item_text",
")",
"elif",
"self",
".",
"options",
".",
"nul",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"item_text",
"+",
"'\\0'",
")",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"else",
":",
"print",
"(",
"item_text",
")",
"return",
"item_text",
".",
"count",
"(",
"'\\n'",
")",
"+",
"1"
] |
Print an item to stdout, or the log on INFO level.
|
[
"Print",
"an",
"item",
"to",
"stdout",
"or",
"the",
"log",
"on",
"INFO",
"level",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtcontrol.py#L358-L383
|
7,438
|
pyroscope/pyrocore
|
src/pyrocore/scripts/rtcontrol.py
|
RtorrentControl.validate_output_format
|
def validate_output_format(self, default_format):
""" Prepare output format for later use.
"""
output_format = self.options.output_format
# Use default format if none is given
if output_format is None:
output_format = default_format
# Check if it's a custom output format from configuration
# (they take precedence over field names, so name them wisely)
output_format = config.formats.get(output_format, output_format)
# Expand plain field list to usable form
if re.match(r"^[,._0-9a-zA-Z]+$", output_format):
self.plain_output_format = True
output_format = "%%(%s)s" % ")s\t%(".join(formatting.validate_field_list(output_format, allow_fmt_specs=True))
# Replace some escape sequences
output_format = (output_format
.replace(r"\\", "\\")
.replace(r"\n", "\n")
.replace(r"\t", "\t")
.replace(r"\$", "\0") # the next 3 allow using $() instead of %()
.replace("$(", "%(")
.replace("\0", "$")
.replace(r"\ ", " ") # to prevent stripping in config file
#.replace(r"\", "\")
)
self.options.output_format = formatting.preparse(output_format)
|
python
|
def validate_output_format(self, default_format):
""" Prepare output format for later use.
"""
output_format = self.options.output_format
# Use default format if none is given
if output_format is None:
output_format = default_format
# Check if it's a custom output format from configuration
# (they take precedence over field names, so name them wisely)
output_format = config.formats.get(output_format, output_format)
# Expand plain field list to usable form
if re.match(r"^[,._0-9a-zA-Z]+$", output_format):
self.plain_output_format = True
output_format = "%%(%s)s" % ")s\t%(".join(formatting.validate_field_list(output_format, allow_fmt_specs=True))
# Replace some escape sequences
output_format = (output_format
.replace(r"\\", "\\")
.replace(r"\n", "\n")
.replace(r"\t", "\t")
.replace(r"\$", "\0") # the next 3 allow using $() instead of %()
.replace("$(", "%(")
.replace("\0", "$")
.replace(r"\ ", " ") # to prevent stripping in config file
#.replace(r"\", "\")
)
self.options.output_format = formatting.preparse(output_format)
|
[
"def",
"validate_output_format",
"(",
"self",
",",
"default_format",
")",
":",
"output_format",
"=",
"self",
".",
"options",
".",
"output_format",
"# Use default format if none is given",
"if",
"output_format",
"is",
"None",
":",
"output_format",
"=",
"default_format",
"# Check if it's a custom output format from configuration",
"# (they take precedence over field names, so name them wisely)",
"output_format",
"=",
"config",
".",
"formats",
".",
"get",
"(",
"output_format",
",",
"output_format",
")",
"# Expand plain field list to usable form",
"if",
"re",
".",
"match",
"(",
"r\"^[,._0-9a-zA-Z]+$\"",
",",
"output_format",
")",
":",
"self",
".",
"plain_output_format",
"=",
"True",
"output_format",
"=",
"\"%%(%s)s\"",
"%",
"\")s\\t%(\"",
".",
"join",
"(",
"formatting",
".",
"validate_field_list",
"(",
"output_format",
",",
"allow_fmt_specs",
"=",
"True",
")",
")",
"# Replace some escape sequences",
"output_format",
"=",
"(",
"output_format",
".",
"replace",
"(",
"r\"\\\\\"",
",",
"\"\\\\\"",
")",
".",
"replace",
"(",
"r\"\\n\"",
",",
"\"\\n\"",
")",
".",
"replace",
"(",
"r\"\\t\"",
",",
"\"\\t\"",
")",
".",
"replace",
"(",
"r\"\\$\"",
",",
"\"\\0\"",
")",
"# the next 3 allow using $() instead of %()",
".",
"replace",
"(",
"\"$(\"",
",",
"\"%(\"",
")",
".",
"replace",
"(",
"\"\\0\"",
",",
"\"$\"",
")",
".",
"replace",
"(",
"r\"\\ \"",
",",
"\" \"",
")",
"# to prevent stripping in config file",
"#.replace(r\"\\\", \"\\\")",
")",
"self",
".",
"options",
".",
"output_format",
"=",
"formatting",
".",
"preparse",
"(",
"output_format",
")"
] |
Prepare output format for later use.
|
[
"Prepare",
"output",
"format",
"for",
"later",
"use",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtcontrol.py#L387-L417
|
7,439
|
pyroscope/pyrocore
|
src/pyrocore/scripts/rtcontrol.py
|
RtorrentControl.get_output_fields
|
def get_output_fields(self):
""" Get field names from output template.
"""
# Re-engineer list from output format
# XXX TODO: Would be better to use a FieldRecorder class to catch the full field names
emit_fields = list(i.lower() for i in re.sub(r"[^_A-Z]+", ' ', self.format_item(None)).split())
# Validate result
result = []
for name in emit_fields[:]:
if name not in engine.FieldDefinition.FIELDS:
self.LOG.warn("Omitted unknown name '%s' from statistics and output format sorting" % name)
else:
result.append(name)
return result
|
python
|
def get_output_fields(self):
""" Get field names from output template.
"""
# Re-engineer list from output format
# XXX TODO: Would be better to use a FieldRecorder class to catch the full field names
emit_fields = list(i.lower() for i in re.sub(r"[^_A-Z]+", ' ', self.format_item(None)).split())
# Validate result
result = []
for name in emit_fields[:]:
if name not in engine.FieldDefinition.FIELDS:
self.LOG.warn("Omitted unknown name '%s' from statistics and output format sorting" % name)
else:
result.append(name)
return result
|
[
"def",
"get_output_fields",
"(",
"self",
")",
":",
"# Re-engineer list from output format",
"# XXX TODO: Would be better to use a FieldRecorder class to catch the full field names",
"emit_fields",
"=",
"list",
"(",
"i",
".",
"lower",
"(",
")",
"for",
"i",
"in",
"re",
".",
"sub",
"(",
"r\"[^_A-Z]+\"",
",",
"' '",
",",
"self",
".",
"format_item",
"(",
"None",
")",
")",
".",
"split",
"(",
")",
")",
"# Validate result",
"result",
"=",
"[",
"]",
"for",
"name",
"in",
"emit_fields",
"[",
":",
"]",
":",
"if",
"name",
"not",
"in",
"engine",
".",
"FieldDefinition",
".",
"FIELDS",
":",
"self",
".",
"LOG",
".",
"warn",
"(",
"\"Omitted unknown name '%s' from statistics and output format sorting\"",
"%",
"name",
")",
"else",
":",
"result",
".",
"append",
"(",
"name",
")",
"return",
"result"
] |
Get field names from output template.
|
[
"Get",
"field",
"names",
"from",
"output",
"template",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtcontrol.py#L421-L436
|
7,440
|
pyroscope/pyrocore
|
src/pyrocore/scripts/rtcontrol.py
|
RtorrentControl.validate_sort_fields
|
def validate_sort_fields(self):
""" Take care of sorting.
"""
sort_fields = ','.join(self.options.sort_fields)
if sort_fields == '*':
sort_fields = self.get_output_fields()
return formatting.validate_sort_fields(sort_fields or config.sort_fields)
|
python
|
def validate_sort_fields(self):
""" Take care of sorting.
"""
sort_fields = ','.join(self.options.sort_fields)
if sort_fields == '*':
sort_fields = self.get_output_fields()
return formatting.validate_sort_fields(sort_fields or config.sort_fields)
|
[
"def",
"validate_sort_fields",
"(",
"self",
")",
":",
"sort_fields",
"=",
"','",
".",
"join",
"(",
"self",
".",
"options",
".",
"sort_fields",
")",
"if",
"sort_fields",
"==",
"'*'",
":",
"sort_fields",
"=",
"self",
".",
"get_output_fields",
"(",
")",
"return",
"formatting",
".",
"validate_sort_fields",
"(",
"sort_fields",
"or",
"config",
".",
"sort_fields",
")"
] |
Take care of sorting.
|
[
"Take",
"care",
"of",
"sorting",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtcontrol.py#L439-L446
|
7,441
|
pyroscope/pyrocore
|
src/pyrocore/scripts/rtcontrol.py
|
RtorrentControl.show_in_view
|
def show_in_view(self, sourceview, matches, targetname=None):
""" Show search result in ncurses view.
"""
append = self.options.append_view or self.options.alter_view == 'append'
remove = self.options.alter_view == 'remove'
action_name = ', appending to' if append else ', removing from' if remove else ' into'
targetname = config.engine.show(matches,
targetname or self.options.to_view or "rtcontrol",
append=append, disjoin=remove)
msg = "Filtered %d out of %d torrents using [ %s ]" % (
len(matches), sourceview.size(), sourceview.matcher)
self.LOG.info("%s%s rTorrent view %r." % (msg, action_name, targetname))
config.engine.log(msg)
|
python
|
def show_in_view(self, sourceview, matches, targetname=None):
""" Show search result in ncurses view.
"""
append = self.options.append_view or self.options.alter_view == 'append'
remove = self.options.alter_view == 'remove'
action_name = ', appending to' if append else ', removing from' if remove else ' into'
targetname = config.engine.show(matches,
targetname or self.options.to_view or "rtcontrol",
append=append, disjoin=remove)
msg = "Filtered %d out of %d torrents using [ %s ]" % (
len(matches), sourceview.size(), sourceview.matcher)
self.LOG.info("%s%s rTorrent view %r." % (msg, action_name, targetname))
config.engine.log(msg)
|
[
"def",
"show_in_view",
"(",
"self",
",",
"sourceview",
",",
"matches",
",",
"targetname",
"=",
"None",
")",
":",
"append",
"=",
"self",
".",
"options",
".",
"append_view",
"or",
"self",
".",
"options",
".",
"alter_view",
"==",
"'append'",
"remove",
"=",
"self",
".",
"options",
".",
"alter_view",
"==",
"'remove'",
"action_name",
"=",
"', appending to'",
"if",
"append",
"else",
"', removing from'",
"if",
"remove",
"else",
"' into'",
"targetname",
"=",
"config",
".",
"engine",
".",
"show",
"(",
"matches",
",",
"targetname",
"or",
"self",
".",
"options",
".",
"to_view",
"or",
"\"rtcontrol\"",
",",
"append",
"=",
"append",
",",
"disjoin",
"=",
"remove",
")",
"msg",
"=",
"\"Filtered %d out of %d torrents using [ %s ]\"",
"%",
"(",
"len",
"(",
"matches",
")",
",",
"sourceview",
".",
"size",
"(",
")",
",",
"sourceview",
".",
"matcher",
")",
"self",
".",
"LOG",
".",
"info",
"(",
"\"%s%s rTorrent view %r.\"",
"%",
"(",
"msg",
",",
"action_name",
",",
"targetname",
")",
")",
"config",
".",
"engine",
".",
"log",
"(",
"msg",
")"
] |
Show search result in ncurses view.
|
[
"Show",
"search",
"result",
"in",
"ncurses",
"view",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/rtcontrol.py#L449-L461
|
7,442
|
pyroscope/pyrocore
|
docs/examples/rt-heatmap.py
|
HeatMap.heatmap
|
def heatmap(self, df, imagefile):
""" Create the heat map.
"""
import seaborn as sns
import matplotlib.ticker as tkr
import matplotlib.pyplot as plt
from matplotlib.colors import LinearSegmentedColormap
sns.set()
with sns.axes_style('whitegrid'):
fig, ax = plt.subplots(figsize=(5, 11)) # inches
cmax = max(df[self.args[2]].max(), self.CMAP_MIN_MAX)
csteps = {
0.0: 'darkred', 0.3/cmax: 'red', 0.6/cmax: 'orangered', 0.9/cmax: 'coral',
1.0/cmax: 'skyblue', 1.5/cmax: 'blue', 1.9/cmax: 'darkblue',
2.0/cmax: 'darkgreen', 3.0/cmax: 'green',
(self.CMAP_MIN_MAX - .1)/cmax: 'palegreen', 1.0: 'yellow'}
cmap = LinearSegmentedColormap.from_list('RdGrYl', sorted(csteps.items()), N=256)
dataset = df.pivot(*self.args)
sns.heatmap(dataset, mask=dataset.isnull(), annot=False, linewidths=.5, square=True, ax=ax, cmap=cmap,
annot_kws=dict(stretch='condensed'))
ax.tick_params(axis='y', labelrotation=30, labelsize=8)
# ax.get_yaxis().set_major_formatter(tkr.FuncFormatter(lambda x, p: x))
plt.savefig(imagefile)
|
python
|
def heatmap(self, df, imagefile):
""" Create the heat map.
"""
import seaborn as sns
import matplotlib.ticker as tkr
import matplotlib.pyplot as plt
from matplotlib.colors import LinearSegmentedColormap
sns.set()
with sns.axes_style('whitegrid'):
fig, ax = plt.subplots(figsize=(5, 11)) # inches
cmax = max(df[self.args[2]].max(), self.CMAP_MIN_MAX)
csteps = {
0.0: 'darkred', 0.3/cmax: 'red', 0.6/cmax: 'orangered', 0.9/cmax: 'coral',
1.0/cmax: 'skyblue', 1.5/cmax: 'blue', 1.9/cmax: 'darkblue',
2.0/cmax: 'darkgreen', 3.0/cmax: 'green',
(self.CMAP_MIN_MAX - .1)/cmax: 'palegreen', 1.0: 'yellow'}
cmap = LinearSegmentedColormap.from_list('RdGrYl', sorted(csteps.items()), N=256)
dataset = df.pivot(*self.args)
sns.heatmap(dataset, mask=dataset.isnull(), annot=False, linewidths=.5, square=True, ax=ax, cmap=cmap,
annot_kws=dict(stretch='condensed'))
ax.tick_params(axis='y', labelrotation=30, labelsize=8)
# ax.get_yaxis().set_major_formatter(tkr.FuncFormatter(lambda x, p: x))
plt.savefig(imagefile)
|
[
"def",
"heatmap",
"(",
"self",
",",
"df",
",",
"imagefile",
")",
":",
"import",
"seaborn",
"as",
"sns",
"import",
"matplotlib",
".",
"ticker",
"as",
"tkr",
"import",
"matplotlib",
".",
"pyplot",
"as",
"plt",
"from",
"matplotlib",
".",
"colors",
"import",
"LinearSegmentedColormap",
"sns",
".",
"set",
"(",
")",
"with",
"sns",
".",
"axes_style",
"(",
"'whitegrid'",
")",
":",
"fig",
",",
"ax",
"=",
"plt",
".",
"subplots",
"(",
"figsize",
"=",
"(",
"5",
",",
"11",
")",
")",
"# inches",
"cmax",
"=",
"max",
"(",
"df",
"[",
"self",
".",
"args",
"[",
"2",
"]",
"]",
".",
"max",
"(",
")",
",",
"self",
".",
"CMAP_MIN_MAX",
")",
"csteps",
"=",
"{",
"0.0",
":",
"'darkred'",
",",
"0.3",
"/",
"cmax",
":",
"'red'",
",",
"0.6",
"/",
"cmax",
":",
"'orangered'",
",",
"0.9",
"/",
"cmax",
":",
"'coral'",
",",
"1.0",
"/",
"cmax",
":",
"'skyblue'",
",",
"1.5",
"/",
"cmax",
":",
"'blue'",
",",
"1.9",
"/",
"cmax",
":",
"'darkblue'",
",",
"2.0",
"/",
"cmax",
":",
"'darkgreen'",
",",
"3.0",
"/",
"cmax",
":",
"'green'",
",",
"(",
"self",
".",
"CMAP_MIN_MAX",
"-",
".1",
")",
"/",
"cmax",
":",
"'palegreen'",
",",
"1.0",
":",
"'yellow'",
"}",
"cmap",
"=",
"LinearSegmentedColormap",
".",
"from_list",
"(",
"'RdGrYl'",
",",
"sorted",
"(",
"csteps",
".",
"items",
"(",
")",
")",
",",
"N",
"=",
"256",
")",
"dataset",
"=",
"df",
".",
"pivot",
"(",
"*",
"self",
".",
"args",
")",
"sns",
".",
"heatmap",
"(",
"dataset",
",",
"mask",
"=",
"dataset",
".",
"isnull",
"(",
")",
",",
"annot",
"=",
"False",
",",
"linewidths",
"=",
".5",
",",
"square",
"=",
"True",
",",
"ax",
"=",
"ax",
",",
"cmap",
"=",
"cmap",
",",
"annot_kws",
"=",
"dict",
"(",
"stretch",
"=",
"'condensed'",
")",
")",
"ax",
".",
"tick_params",
"(",
"axis",
"=",
"'y'",
",",
"labelrotation",
"=",
"30",
",",
"labelsize",
"=",
"8",
")",
"# ax.get_yaxis().set_major_formatter(tkr.FuncFormatter(lambda x, p: x))",
"plt",
".",
"savefig",
"(",
"imagefile",
")"
] |
Create the heat map.
|
[
"Create",
"the",
"heat",
"map",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/docs/examples/rt-heatmap.py#L55-L81
|
7,443
|
pyroscope/pyrocore
|
src/pyrocore/ui/categories.py
|
CategoryManager.mainloop
|
def mainloop(self):
""" Manage category views.
"""
# Get client state
proxy = config.engine.open()
views = [x for x in sorted(proxy.view.list()) if x.startswith(self.PREFIX)]
current_view = real_current_view = proxy.ui.current_view()
if current_view not in views:
if views:
current_view = views[0]
else:
raise error.UserError("There are no '{}*' views defined at all!".format(self.PREFIX))
# Check options
if self.options.list:
for name in sorted(views):
print("{} {:5d} {}".format(
'*' if name == real_current_view else ' ',
proxy.view.size(xmlrpc.NOHASH, name),
name[self.PREFIX_LEN:]))
elif self.options.next or self.options.prev or self.options.update:
# Determine next in line
if self.options.update:
new_view = current_view
else:
new_view = (views * 2)[views.index(current_view) + (1 if self.options.next else -1)]
self.LOG.info("{} category view '{}'.".format(
"Updating" if self.options.update else "Switching to", new_view))
# Update and switch to filtered view
proxy.pyro.category.update(xmlrpc.NOHASH, new_view[self.PREFIX_LEN:])
proxy.ui.current_view.set(new_view)
else:
self.LOG.info("Current category view is '{}'.".format(current_view[self.PREFIX_LEN:]))
self.LOG.info("Use '--help' to get usage information.")
|
python
|
def mainloop(self):
""" Manage category views.
"""
# Get client state
proxy = config.engine.open()
views = [x for x in sorted(proxy.view.list()) if x.startswith(self.PREFIX)]
current_view = real_current_view = proxy.ui.current_view()
if current_view not in views:
if views:
current_view = views[0]
else:
raise error.UserError("There are no '{}*' views defined at all!".format(self.PREFIX))
# Check options
if self.options.list:
for name in sorted(views):
print("{} {:5d} {}".format(
'*' if name == real_current_view else ' ',
proxy.view.size(xmlrpc.NOHASH, name),
name[self.PREFIX_LEN:]))
elif self.options.next or self.options.prev or self.options.update:
# Determine next in line
if self.options.update:
new_view = current_view
else:
new_view = (views * 2)[views.index(current_view) + (1 if self.options.next else -1)]
self.LOG.info("{} category view '{}'.".format(
"Updating" if self.options.update else "Switching to", new_view))
# Update and switch to filtered view
proxy.pyro.category.update(xmlrpc.NOHASH, new_view[self.PREFIX_LEN:])
proxy.ui.current_view.set(new_view)
else:
self.LOG.info("Current category view is '{}'.".format(current_view[self.PREFIX_LEN:]))
self.LOG.info("Use '--help' to get usage information.")
|
[
"def",
"mainloop",
"(",
"self",
")",
":",
"# Get client state",
"proxy",
"=",
"config",
".",
"engine",
".",
"open",
"(",
")",
"views",
"=",
"[",
"x",
"for",
"x",
"in",
"sorted",
"(",
"proxy",
".",
"view",
".",
"list",
"(",
")",
")",
"if",
"x",
".",
"startswith",
"(",
"self",
".",
"PREFIX",
")",
"]",
"current_view",
"=",
"real_current_view",
"=",
"proxy",
".",
"ui",
".",
"current_view",
"(",
")",
"if",
"current_view",
"not",
"in",
"views",
":",
"if",
"views",
":",
"current_view",
"=",
"views",
"[",
"0",
"]",
"else",
":",
"raise",
"error",
".",
"UserError",
"(",
"\"There are no '{}*' views defined at all!\"",
".",
"format",
"(",
"self",
".",
"PREFIX",
")",
")",
"# Check options",
"if",
"self",
".",
"options",
".",
"list",
":",
"for",
"name",
"in",
"sorted",
"(",
"views",
")",
":",
"print",
"(",
"\"{} {:5d} {}\"",
".",
"format",
"(",
"'*'",
"if",
"name",
"==",
"real_current_view",
"else",
"' '",
",",
"proxy",
".",
"view",
".",
"size",
"(",
"xmlrpc",
".",
"NOHASH",
",",
"name",
")",
",",
"name",
"[",
"self",
".",
"PREFIX_LEN",
":",
"]",
")",
")",
"elif",
"self",
".",
"options",
".",
"next",
"or",
"self",
".",
"options",
".",
"prev",
"or",
"self",
".",
"options",
".",
"update",
":",
"# Determine next in line",
"if",
"self",
".",
"options",
".",
"update",
":",
"new_view",
"=",
"current_view",
"else",
":",
"new_view",
"=",
"(",
"views",
"*",
"2",
")",
"[",
"views",
".",
"index",
"(",
"current_view",
")",
"+",
"(",
"1",
"if",
"self",
".",
"options",
".",
"next",
"else",
"-",
"1",
")",
"]",
"self",
".",
"LOG",
".",
"info",
"(",
"\"{} category view '{}'.\"",
".",
"format",
"(",
"\"Updating\"",
"if",
"self",
".",
"options",
".",
"update",
"else",
"\"Switching to\"",
",",
"new_view",
")",
")",
"# Update and switch to filtered view",
"proxy",
".",
"pyro",
".",
"category",
".",
"update",
"(",
"xmlrpc",
".",
"NOHASH",
",",
"new_view",
"[",
"self",
".",
"PREFIX_LEN",
":",
"]",
")",
"proxy",
".",
"ui",
".",
"current_view",
".",
"set",
"(",
"new_view",
")",
"else",
":",
"self",
".",
"LOG",
".",
"info",
"(",
"\"Current category view is '{}'.\"",
".",
"format",
"(",
"current_view",
"[",
"self",
".",
"PREFIX_LEN",
":",
"]",
")",
")",
"self",
".",
"LOG",
".",
"info",
"(",
"\"Use '--help' to get usage information.\"",
")"
] |
Manage category views.
|
[
"Manage",
"category",
"views",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/ui/categories.py#L53-L91
|
7,444
|
pyroscope/pyrocore
|
src/pyrocore/data/config/config.py
|
_custom_fields
|
def _custom_fields():
""" Yield custom field definitions.
"""
# Import some commonly needed modules
import os
from pyrocore.torrent import engine, matching
from pyrocore.util import fmt
# PUT CUSTOM FIELD CODE HERE
# Disk space check (as an example)
# see https://pyrocore.readthedocs.io/en/latest/custom.html#has-room
def has_room(obj):
"Check disk space."
pathname = obj.path
if pathname and not os.path.exists(pathname):
pathname = os.path.dirname(pathname)
if pathname and os.path.exists(pathname):
stats = os.statvfs(pathname)
return (stats.f_bavail * stats.f_frsize - int(diskspace_threshold_mb) * 1024**2
> obj.size * (1.0 - obj.done / 100.0))
else:
return None
yield engine.DynamicField(engine.untyped, "has_room",
"check whether the download will fit on its target device",
matcher=matching.BoolFilter, accessor=has_room,
formatter=lambda val: "OK" if val else "??" if val is None else "NO")
globals().setdefault("diskspace_threshold_mb", "500")
|
python
|
def _custom_fields():
""" Yield custom field definitions.
"""
# Import some commonly needed modules
import os
from pyrocore.torrent import engine, matching
from pyrocore.util import fmt
# PUT CUSTOM FIELD CODE HERE
# Disk space check (as an example)
# see https://pyrocore.readthedocs.io/en/latest/custom.html#has-room
def has_room(obj):
"Check disk space."
pathname = obj.path
if pathname and not os.path.exists(pathname):
pathname = os.path.dirname(pathname)
if pathname and os.path.exists(pathname):
stats = os.statvfs(pathname)
return (stats.f_bavail * stats.f_frsize - int(diskspace_threshold_mb) * 1024**2
> obj.size * (1.0 - obj.done / 100.0))
else:
return None
yield engine.DynamicField(engine.untyped, "has_room",
"check whether the download will fit on its target device",
matcher=matching.BoolFilter, accessor=has_room,
formatter=lambda val: "OK" if val else "??" if val is None else "NO")
globals().setdefault("diskspace_threshold_mb", "500")
|
[
"def",
"_custom_fields",
"(",
")",
":",
"# Import some commonly needed modules",
"import",
"os",
"from",
"pyrocore",
".",
"torrent",
"import",
"engine",
",",
"matching",
"from",
"pyrocore",
".",
"util",
"import",
"fmt",
"# PUT CUSTOM FIELD CODE HERE",
"# Disk space check (as an example)",
"# see https://pyrocore.readthedocs.io/en/latest/custom.html#has-room",
"def",
"has_room",
"(",
"obj",
")",
":",
"\"Check disk space.\"",
"pathname",
"=",
"obj",
".",
"path",
"if",
"pathname",
"and",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"pathname",
")",
":",
"pathname",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"pathname",
")",
"if",
"pathname",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"pathname",
")",
":",
"stats",
"=",
"os",
".",
"statvfs",
"(",
"pathname",
")",
"return",
"(",
"stats",
".",
"f_bavail",
"*",
"stats",
".",
"f_frsize",
"-",
"int",
"(",
"diskspace_threshold_mb",
")",
"*",
"1024",
"**",
"2",
">",
"obj",
".",
"size",
"*",
"(",
"1.0",
"-",
"obj",
".",
"done",
"/",
"100.0",
")",
")",
"else",
":",
"return",
"None",
"yield",
"engine",
".",
"DynamicField",
"(",
"engine",
".",
"untyped",
",",
"\"has_room\"",
",",
"\"check whether the download will fit on its target device\"",
",",
"matcher",
"=",
"matching",
".",
"BoolFilter",
",",
"accessor",
"=",
"has_room",
",",
"formatter",
"=",
"lambda",
"val",
":",
"\"OK\"",
"if",
"val",
"else",
"\"??\"",
"if",
"val",
"is",
"None",
"else",
"\"NO\"",
")",
"globals",
"(",
")",
".",
"setdefault",
"(",
"\"diskspace_threshold_mb\"",
",",
"\"500\"",
")"
] |
Yield custom field definitions.
|
[
"Yield",
"custom",
"field",
"definitions",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/data/config/config.py#L7-L35
|
7,445
|
pyroscope/pyrocore
|
src/pyrocore/util/stats.py
|
engine_data
|
def engine_data(engine):
""" Get important performance data and metadata from rTorrent.
"""
views = ("default", "main", "started", "stopped", "complete",
"incomplete", "seeding", "leeching", "active", "messages")
methods = [
"throttle.global_up.rate", "throttle.global_up.max_rate",
"throttle.global_down.rate", "throttle.global_down.max_rate",
]
# Get data via multicall
proxy = engine.open()
calls = [dict(methodName=method, params=[]) for method in methods] \
+ [dict(methodName="view.size", params=['', view]) for view in views]
result = proxy.system.multicall(calls, flatten=True)
# Build result object
data = dict(
now = time.time(),
engine_id = engine.engine_id,
versions = engine.versions,
uptime = engine.uptime,
upload = [result[0], result[1]],
download = [result[2], result[3]],
views = dict([(name, result[4+i])
for i, name in enumerate(views)
]),
)
return data
|
python
|
def engine_data(engine):
""" Get important performance data and metadata from rTorrent.
"""
views = ("default", "main", "started", "stopped", "complete",
"incomplete", "seeding", "leeching", "active", "messages")
methods = [
"throttle.global_up.rate", "throttle.global_up.max_rate",
"throttle.global_down.rate", "throttle.global_down.max_rate",
]
# Get data via multicall
proxy = engine.open()
calls = [dict(methodName=method, params=[]) for method in methods] \
+ [dict(methodName="view.size", params=['', view]) for view in views]
result = proxy.system.multicall(calls, flatten=True)
# Build result object
data = dict(
now = time.time(),
engine_id = engine.engine_id,
versions = engine.versions,
uptime = engine.uptime,
upload = [result[0], result[1]],
download = [result[2], result[3]],
views = dict([(name, result[4+i])
for i, name in enumerate(views)
]),
)
return data
|
[
"def",
"engine_data",
"(",
"engine",
")",
":",
"views",
"=",
"(",
"\"default\"",
",",
"\"main\"",
",",
"\"started\"",
",",
"\"stopped\"",
",",
"\"complete\"",
",",
"\"incomplete\"",
",",
"\"seeding\"",
",",
"\"leeching\"",
",",
"\"active\"",
",",
"\"messages\"",
")",
"methods",
"=",
"[",
"\"throttle.global_up.rate\"",
",",
"\"throttle.global_up.max_rate\"",
",",
"\"throttle.global_down.rate\"",
",",
"\"throttle.global_down.max_rate\"",
",",
"]",
"# Get data via multicall",
"proxy",
"=",
"engine",
".",
"open",
"(",
")",
"calls",
"=",
"[",
"dict",
"(",
"methodName",
"=",
"method",
",",
"params",
"=",
"[",
"]",
")",
"for",
"method",
"in",
"methods",
"]",
"+",
"[",
"dict",
"(",
"methodName",
"=",
"\"view.size\"",
",",
"params",
"=",
"[",
"''",
",",
"view",
"]",
")",
"for",
"view",
"in",
"views",
"]",
"result",
"=",
"proxy",
".",
"system",
".",
"multicall",
"(",
"calls",
",",
"flatten",
"=",
"True",
")",
"# Build result object",
"data",
"=",
"dict",
"(",
"now",
"=",
"time",
".",
"time",
"(",
")",
",",
"engine_id",
"=",
"engine",
".",
"engine_id",
",",
"versions",
"=",
"engine",
".",
"versions",
",",
"uptime",
"=",
"engine",
".",
"uptime",
",",
"upload",
"=",
"[",
"result",
"[",
"0",
"]",
",",
"result",
"[",
"1",
"]",
"]",
",",
"download",
"=",
"[",
"result",
"[",
"2",
"]",
",",
"result",
"[",
"3",
"]",
"]",
",",
"views",
"=",
"dict",
"(",
"[",
"(",
"name",
",",
"result",
"[",
"4",
"+",
"i",
"]",
")",
"for",
"i",
",",
"name",
"in",
"enumerate",
"(",
"views",
")",
"]",
")",
",",
")",
"return",
"data"
] |
Get important performance data and metadata from rTorrent.
|
[
"Get",
"important",
"performance",
"data",
"and",
"metadata",
"from",
"rTorrent",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/stats.py#L25-L54
|
7,446
|
pyroscope/pyrocore
|
src/pyrocore/util/osmagic.py
|
_write_pidfile
|
def _write_pidfile(pidfile):
""" Write file with current process ID.
"""
pid = str(os.getpid())
handle = open(pidfile, 'w')
try:
handle.write("%s\n" % pid)
finally:
handle.close()
|
python
|
def _write_pidfile(pidfile):
""" Write file with current process ID.
"""
pid = str(os.getpid())
handle = open(pidfile, 'w')
try:
handle.write("%s\n" % pid)
finally:
handle.close()
|
[
"def",
"_write_pidfile",
"(",
"pidfile",
")",
":",
"pid",
"=",
"str",
"(",
"os",
".",
"getpid",
"(",
")",
")",
"handle",
"=",
"open",
"(",
"pidfile",
",",
"'w'",
")",
"try",
":",
"handle",
".",
"write",
"(",
"\"%s\\n\"",
"%",
"pid",
")",
"finally",
":",
"handle",
".",
"close",
"(",
")"
] |
Write file with current process ID.
|
[
"Write",
"file",
"with",
"current",
"process",
"ID",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/osmagic.py#L30-L38
|
7,447
|
pyroscope/pyrocore
|
src/pyrocore/util/osmagic.py
|
guard
|
def guard(pidfile, guardfile=None):
""" Raise an EnvironmentError when the "guardfile" doesn't exist, or
the process with the ID found in "pidfile" is still active.
"""
# Check guard
if guardfile and not os.path.exists(guardfile):
raise EnvironmentError("Guard file '%s' not found, won't start!" % guardfile)
if os.path.exists(pidfile):
running, pid = check_process(pidfile)
if running:
raise EnvironmentError("Daemon process #%d still running, won't start!" % pid)
else:
logging.getLogger("daemonize").info("Process #%d disappeared, continuing..." % pid)
# Keep race condition window small, by immediately writing launcher process ID
_write_pidfile(pidfile)
|
python
|
def guard(pidfile, guardfile=None):
""" Raise an EnvironmentError when the "guardfile" doesn't exist, or
the process with the ID found in "pidfile" is still active.
"""
# Check guard
if guardfile and not os.path.exists(guardfile):
raise EnvironmentError("Guard file '%s' not found, won't start!" % guardfile)
if os.path.exists(pidfile):
running, pid = check_process(pidfile)
if running:
raise EnvironmentError("Daemon process #%d still running, won't start!" % pid)
else:
logging.getLogger("daemonize").info("Process #%d disappeared, continuing..." % pid)
# Keep race condition window small, by immediately writing launcher process ID
_write_pidfile(pidfile)
|
[
"def",
"guard",
"(",
"pidfile",
",",
"guardfile",
"=",
"None",
")",
":",
"# Check guard",
"if",
"guardfile",
"and",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"guardfile",
")",
":",
"raise",
"EnvironmentError",
"(",
"\"Guard file '%s' not found, won't start!\"",
"%",
"guardfile",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"pidfile",
")",
":",
"running",
",",
"pid",
"=",
"check_process",
"(",
"pidfile",
")",
"if",
"running",
":",
"raise",
"EnvironmentError",
"(",
"\"Daemon process #%d still running, won't start!\"",
"%",
"pid",
")",
"else",
":",
"logging",
".",
"getLogger",
"(",
"\"daemonize\"",
")",
".",
"info",
"(",
"\"Process #%d disappeared, continuing...\"",
"%",
"pid",
")",
"# Keep race condition window small, by immediately writing launcher process ID",
"_write_pidfile",
"(",
"pidfile",
")"
] |
Raise an EnvironmentError when the "guardfile" doesn't exist, or
the process with the ID found in "pidfile" is still active.
|
[
"Raise",
"an",
"EnvironmentError",
"when",
"the",
"guardfile",
"doesn",
"t",
"exist",
"or",
"the",
"process",
"with",
"the",
"ID",
"found",
"in",
"pidfile",
"is",
"still",
"active",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/osmagic.py#L70-L86
|
7,448
|
pyroscope/pyrocore
|
src/pyrocore/util/osmagic.py
|
daemonize
|
def daemonize(pidfile=None, logfile=None, sync=True):
""" Fork the process into the background.
@param pidfile: Optional PID file path.
@param sync: Wait for parent process to disappear?
@param logfile: Optional name of stdin/stderr log file or stream.
"""
log = logging.getLogger("daemonize")
ppid = os.getpid()
try:
pid = os.fork()
if pid > 0:
log.debug("Parent exiting (PID %d, CHILD %d)" % (ppid, pid))
sys.exit(0)
except OSError as exc:
log.critical("fork #1 failed (PID %d): (%d) %s\n" % (os.getpid(), exc.errno, exc.strerror))
sys.exit(1)
##os.chdir("/")
##os.umask(0022)
os.setsid()
try:
pid = os.fork()
if pid > 0:
log.debug("Session leader exiting (PID %d, PPID %d, DEMON %d)" % (os.getpid(), ppid, pid))
sys.exit(0)
except OSError as exc:
log.critical("fork #2 failed (PID %d): (%d) %s\n" % (os.getpid(), exc.errno, exc.strerror))
sys.exit(1)
if pidfile:
_write_pidfile(pidfile)
def sig_term(*dummy):
"Handler for SIGTERM."
sys.exit(0)
stdin = open("/dev/null", "r")
os.dup2(stdin.fileno(), sys.stdin.fileno())
signal.signal(signal.SIGTERM, sig_term)
if logfile:
try:
logfile + ""
except TypeError:
if logfile.fileno() != sys.stdout.fileno():
os.dup2(logfile.fileno(), sys.stdout.fileno())
if logfile.fileno() != sys.stderr.fileno():
os.dup2(logfile.fileno(), sys.stderr.fileno())
else:
log.debug("Redirecting stdout / stderr to %r" % logfile)
loghandle = open(logfile, "a+")
os.dup2(loghandle.fileno(), sys.stdout.fileno())
os.dup2(loghandle.fileno(), sys.stderr.fileno())
loghandle.close()
if sync:
# Wait for 5 seconds at most, in 10ms steps
polling = 5, .01
for _ in range(int(polling[0] * 1 / polling[1])):
try:
os.kill(ppid, 0)
except OSError:
break
else:
time.sleep(polling[1])
log.debug("Process detached (PID %d)" % os.getpid())
|
python
|
def daemonize(pidfile=None, logfile=None, sync=True):
""" Fork the process into the background.
@param pidfile: Optional PID file path.
@param sync: Wait for parent process to disappear?
@param logfile: Optional name of stdin/stderr log file or stream.
"""
log = logging.getLogger("daemonize")
ppid = os.getpid()
try:
pid = os.fork()
if pid > 0:
log.debug("Parent exiting (PID %d, CHILD %d)" % (ppid, pid))
sys.exit(0)
except OSError as exc:
log.critical("fork #1 failed (PID %d): (%d) %s\n" % (os.getpid(), exc.errno, exc.strerror))
sys.exit(1)
##os.chdir("/")
##os.umask(0022)
os.setsid()
try:
pid = os.fork()
if pid > 0:
log.debug("Session leader exiting (PID %d, PPID %d, DEMON %d)" % (os.getpid(), ppid, pid))
sys.exit(0)
except OSError as exc:
log.critical("fork #2 failed (PID %d): (%d) %s\n" % (os.getpid(), exc.errno, exc.strerror))
sys.exit(1)
if pidfile:
_write_pidfile(pidfile)
def sig_term(*dummy):
"Handler for SIGTERM."
sys.exit(0)
stdin = open("/dev/null", "r")
os.dup2(stdin.fileno(), sys.stdin.fileno())
signal.signal(signal.SIGTERM, sig_term)
if logfile:
try:
logfile + ""
except TypeError:
if logfile.fileno() != sys.stdout.fileno():
os.dup2(logfile.fileno(), sys.stdout.fileno())
if logfile.fileno() != sys.stderr.fileno():
os.dup2(logfile.fileno(), sys.stderr.fileno())
else:
log.debug("Redirecting stdout / stderr to %r" % logfile)
loghandle = open(logfile, "a+")
os.dup2(loghandle.fileno(), sys.stdout.fileno())
os.dup2(loghandle.fileno(), sys.stderr.fileno())
loghandle.close()
if sync:
# Wait for 5 seconds at most, in 10ms steps
polling = 5, .01
for _ in range(int(polling[0] * 1 / polling[1])):
try:
os.kill(ppid, 0)
except OSError:
break
else:
time.sleep(polling[1])
log.debug("Process detached (PID %d)" % os.getpid())
|
[
"def",
"daemonize",
"(",
"pidfile",
"=",
"None",
",",
"logfile",
"=",
"None",
",",
"sync",
"=",
"True",
")",
":",
"log",
"=",
"logging",
".",
"getLogger",
"(",
"\"daemonize\"",
")",
"ppid",
"=",
"os",
".",
"getpid",
"(",
")",
"try",
":",
"pid",
"=",
"os",
".",
"fork",
"(",
")",
"if",
"pid",
">",
"0",
":",
"log",
".",
"debug",
"(",
"\"Parent exiting (PID %d, CHILD %d)\"",
"%",
"(",
"ppid",
",",
"pid",
")",
")",
"sys",
".",
"exit",
"(",
"0",
")",
"except",
"OSError",
"as",
"exc",
":",
"log",
".",
"critical",
"(",
"\"fork #1 failed (PID %d): (%d) %s\\n\"",
"%",
"(",
"os",
".",
"getpid",
"(",
")",
",",
"exc",
".",
"errno",
",",
"exc",
".",
"strerror",
")",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"##os.chdir(\"/\")",
"##os.umask(0022)",
"os",
".",
"setsid",
"(",
")",
"try",
":",
"pid",
"=",
"os",
".",
"fork",
"(",
")",
"if",
"pid",
">",
"0",
":",
"log",
".",
"debug",
"(",
"\"Session leader exiting (PID %d, PPID %d, DEMON %d)\"",
"%",
"(",
"os",
".",
"getpid",
"(",
")",
",",
"ppid",
",",
"pid",
")",
")",
"sys",
".",
"exit",
"(",
"0",
")",
"except",
"OSError",
"as",
"exc",
":",
"log",
".",
"critical",
"(",
"\"fork #2 failed (PID %d): (%d) %s\\n\"",
"%",
"(",
"os",
".",
"getpid",
"(",
")",
",",
"exc",
".",
"errno",
",",
"exc",
".",
"strerror",
")",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"if",
"pidfile",
":",
"_write_pidfile",
"(",
"pidfile",
")",
"def",
"sig_term",
"(",
"*",
"dummy",
")",
":",
"\"Handler for SIGTERM.\"",
"sys",
".",
"exit",
"(",
"0",
")",
"stdin",
"=",
"open",
"(",
"\"/dev/null\"",
",",
"\"r\"",
")",
"os",
".",
"dup2",
"(",
"stdin",
".",
"fileno",
"(",
")",
",",
"sys",
".",
"stdin",
".",
"fileno",
"(",
")",
")",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGTERM",
",",
"sig_term",
")",
"if",
"logfile",
":",
"try",
":",
"logfile",
"+",
"\"\"",
"except",
"TypeError",
":",
"if",
"logfile",
".",
"fileno",
"(",
")",
"!=",
"sys",
".",
"stdout",
".",
"fileno",
"(",
")",
":",
"os",
".",
"dup2",
"(",
"logfile",
".",
"fileno",
"(",
")",
",",
"sys",
".",
"stdout",
".",
"fileno",
"(",
")",
")",
"if",
"logfile",
".",
"fileno",
"(",
")",
"!=",
"sys",
".",
"stderr",
".",
"fileno",
"(",
")",
":",
"os",
".",
"dup2",
"(",
"logfile",
".",
"fileno",
"(",
")",
",",
"sys",
".",
"stderr",
".",
"fileno",
"(",
")",
")",
"else",
":",
"log",
".",
"debug",
"(",
"\"Redirecting stdout / stderr to %r\"",
"%",
"logfile",
")",
"loghandle",
"=",
"open",
"(",
"logfile",
",",
"\"a+\"",
")",
"os",
".",
"dup2",
"(",
"loghandle",
".",
"fileno",
"(",
")",
",",
"sys",
".",
"stdout",
".",
"fileno",
"(",
")",
")",
"os",
".",
"dup2",
"(",
"loghandle",
".",
"fileno",
"(",
")",
",",
"sys",
".",
"stderr",
".",
"fileno",
"(",
")",
")",
"loghandle",
".",
"close",
"(",
")",
"if",
"sync",
":",
"# Wait for 5 seconds at most, in 10ms steps",
"polling",
"=",
"5",
",",
".01",
"for",
"_",
"in",
"range",
"(",
"int",
"(",
"polling",
"[",
"0",
"]",
"*",
"1",
"/",
"polling",
"[",
"1",
"]",
")",
")",
":",
"try",
":",
"os",
".",
"kill",
"(",
"ppid",
",",
"0",
")",
"except",
"OSError",
":",
"break",
"else",
":",
"time",
".",
"sleep",
"(",
"polling",
"[",
"1",
"]",
")",
"log",
".",
"debug",
"(",
"\"Process detached (PID %d)\"",
"%",
"os",
".",
"getpid",
"(",
")",
")"
] |
Fork the process into the background.
@param pidfile: Optional PID file path.
@param sync: Wait for parent process to disappear?
@param logfile: Optional name of stdin/stderr log file or stream.
|
[
"Fork",
"the",
"process",
"into",
"the",
"background",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/osmagic.py#L89-L158
|
7,449
|
pyroscope/pyrocore
|
src/pyrocore/util/algo.py
|
flatten
|
def flatten(nested, containers=(list, tuple)):
""" Flatten a nested list in-place and return it.
"""
flat = list(nested) # handle iterators / generators
i = 0
while i < len(flat):
while isinstance(flat[i], containers):
if not flat[i]:
# kill empty list
flat.pop(i)
# inspect new 'i'th element in outer loop
i -= 1
break
else:
flat[i:i + 1] = (flat[i])
# 'i'th element is scalar, proceed
i += 1
return flat
|
python
|
def flatten(nested, containers=(list, tuple)):
""" Flatten a nested list in-place and return it.
"""
flat = list(nested) # handle iterators / generators
i = 0
while i < len(flat):
while isinstance(flat[i], containers):
if not flat[i]:
# kill empty list
flat.pop(i)
# inspect new 'i'th element in outer loop
i -= 1
break
else:
flat[i:i + 1] = (flat[i])
# 'i'th element is scalar, proceed
i += 1
return flat
|
[
"def",
"flatten",
"(",
"nested",
",",
"containers",
"=",
"(",
"list",
",",
"tuple",
")",
")",
":",
"flat",
"=",
"list",
"(",
"nested",
")",
"# handle iterators / generators",
"i",
"=",
"0",
"while",
"i",
"<",
"len",
"(",
"flat",
")",
":",
"while",
"isinstance",
"(",
"flat",
"[",
"i",
"]",
",",
"containers",
")",
":",
"if",
"not",
"flat",
"[",
"i",
"]",
":",
"# kill empty list",
"flat",
".",
"pop",
"(",
"i",
")",
"# inspect new 'i'th element in outer loop",
"i",
"-=",
"1",
"break",
"else",
":",
"flat",
"[",
"i",
":",
"i",
"+",
"1",
"]",
"=",
"(",
"flat",
"[",
"i",
"]",
")",
"# 'i'th element is scalar, proceed",
"i",
"+=",
"1",
"return",
"flat"
] |
Flatten a nested list in-place and return it.
|
[
"Flatten",
"a",
"nested",
"list",
"in",
"-",
"place",
"and",
"return",
"it",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/algo.py#L42-L62
|
7,450
|
pyroscope/pyrocore
|
pavement.py
|
gendocs
|
def gendocs():
"create some doc pages automatically"
helppage = path("docs/references-cli-usage.rst")
content = [
".. automatically generated using 'paver gendocs'.",
"",
".. contents::",
" :local:",
"",
".. note::",
"",
" The help output presented here applies to version ``%s`` of the tools."
% sh("pyroadmin --version", capture=True).split()[1],
"",
]
for tool in sorted(project.entry_points["console_scripts"]):
tool, _ = tool.split(None, 1)
content.extend([
".. _cli-usage-%s:" % tool,
"",
tool,
'^' * len(tool),
"",
"::",
"",
])
help_opt = "--help-fields --config-dir /tmp" if tool == "rtcontrol" else "--help"
help_txt = sh("%s -q %s" % (tool, help_opt), capture=True, ignore_error=True).splitlines()
content.extend(' ' + i for i in help_txt
if ' on Python ' not in i and 'Copyright (c) 200' not in i
and 'see the full documentation' not in i
and ' https://pyrocore.readthedocs.io/' not in i)
content.extend([
"",
])
content = [line.rstrip() for line in content if all(
i not in line for i in (", Copyright (c) ", "Total time: ", "Configuration file '/tmp/")
)]
content = [line for line, succ in zip(content, content[1:] + ['']) if line or succ] # filter twin empty lines
helppage.write_lines(content)
|
python
|
def gendocs():
"create some doc pages automatically"
helppage = path("docs/references-cli-usage.rst")
content = [
".. automatically generated using 'paver gendocs'.",
"",
".. contents::",
" :local:",
"",
".. note::",
"",
" The help output presented here applies to version ``%s`` of the tools."
% sh("pyroadmin --version", capture=True).split()[1],
"",
]
for tool in sorted(project.entry_points["console_scripts"]):
tool, _ = tool.split(None, 1)
content.extend([
".. _cli-usage-%s:" % tool,
"",
tool,
'^' * len(tool),
"",
"::",
"",
])
help_opt = "--help-fields --config-dir /tmp" if tool == "rtcontrol" else "--help"
help_txt = sh("%s -q %s" % (tool, help_opt), capture=True, ignore_error=True).splitlines()
content.extend(' ' + i for i in help_txt
if ' on Python ' not in i and 'Copyright (c) 200' not in i
and 'see the full documentation' not in i
and ' https://pyrocore.readthedocs.io/' not in i)
content.extend([
"",
])
content = [line.rstrip() for line in content if all(
i not in line for i in (", Copyright (c) ", "Total time: ", "Configuration file '/tmp/")
)]
content = [line for line, succ in zip(content, content[1:] + ['']) if line or succ] # filter twin empty lines
helppage.write_lines(content)
|
[
"def",
"gendocs",
"(",
")",
":",
"helppage",
"=",
"path",
"(",
"\"docs/references-cli-usage.rst\"",
")",
"content",
"=",
"[",
"\".. automatically generated using 'paver gendocs'.\"",
",",
"\"\"",
",",
"\".. contents::\"",
",",
"\" :local:\"",
",",
"\"\"",
",",
"\".. note::\"",
",",
"\"\"",
",",
"\" The help output presented here applies to version ``%s`` of the tools.\"",
"%",
"sh",
"(",
"\"pyroadmin --version\"",
",",
"capture",
"=",
"True",
")",
".",
"split",
"(",
")",
"[",
"1",
"]",
",",
"\"\"",
",",
"]",
"for",
"tool",
"in",
"sorted",
"(",
"project",
".",
"entry_points",
"[",
"\"console_scripts\"",
"]",
")",
":",
"tool",
",",
"_",
"=",
"tool",
".",
"split",
"(",
"None",
",",
"1",
")",
"content",
".",
"extend",
"(",
"[",
"\".. _cli-usage-%s:\"",
"%",
"tool",
",",
"\"\"",
",",
"tool",
",",
"'^'",
"*",
"len",
"(",
"tool",
")",
",",
"\"\"",
",",
"\"::\"",
",",
"\"\"",
",",
"]",
")",
"help_opt",
"=",
"\"--help-fields --config-dir /tmp\"",
"if",
"tool",
"==",
"\"rtcontrol\"",
"else",
"\"--help\"",
"help_txt",
"=",
"sh",
"(",
"\"%s -q %s\"",
"%",
"(",
"tool",
",",
"help_opt",
")",
",",
"capture",
"=",
"True",
",",
"ignore_error",
"=",
"True",
")",
".",
"splitlines",
"(",
")",
"content",
".",
"extend",
"(",
"' '",
"+",
"i",
"for",
"i",
"in",
"help_txt",
"if",
"' on Python '",
"not",
"in",
"i",
"and",
"'Copyright (c) 200'",
"not",
"in",
"i",
"and",
"'see the full documentation'",
"not",
"in",
"i",
"and",
"' https://pyrocore.readthedocs.io/'",
"not",
"in",
"i",
")",
"content",
".",
"extend",
"(",
"[",
"\"\"",
",",
"]",
")",
"content",
"=",
"[",
"line",
".",
"rstrip",
"(",
")",
"for",
"line",
"in",
"content",
"if",
"all",
"(",
"i",
"not",
"in",
"line",
"for",
"i",
"in",
"(",
"\", Copyright (c) \"",
",",
"\"Total time: \"",
",",
"\"Configuration file '/tmp/\"",
")",
")",
"]",
"content",
"=",
"[",
"line",
"for",
"line",
",",
"succ",
"in",
"zip",
"(",
"content",
",",
"content",
"[",
"1",
":",
"]",
"+",
"[",
"''",
"]",
")",
"if",
"line",
"or",
"succ",
"]",
"# filter twin empty lines",
"helppage",
".",
"write_lines",
"(",
"content",
")"
] |
create some doc pages automatically
|
[
"create",
"some",
"doc",
"pages",
"automatically"
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/pavement.py#L196-L237
|
7,451
|
pyroscope/pyrocore
|
pavement.py
|
watchdog_pid
|
def watchdog_pid():
"""Get watchdog PID via ``netstat``."""
result = sh('netstat -tulpn 2>/dev/null | grep 127.0.0.1:{:d}'
.format(SPHINX_AUTOBUILD_PORT), capture=True, ignore_error=True)
pid = result.strip()
pid = pid.split()[-1] if pid else None
pid = pid.split('/', 1)[0] if pid and pid != '-' else None
return pid
|
python
|
def watchdog_pid():
"""Get watchdog PID via ``netstat``."""
result = sh('netstat -tulpn 2>/dev/null | grep 127.0.0.1:{:d}'
.format(SPHINX_AUTOBUILD_PORT), capture=True, ignore_error=True)
pid = result.strip()
pid = pid.split()[-1] if pid else None
pid = pid.split('/', 1)[0] if pid and pid != '-' else None
return pid
|
[
"def",
"watchdog_pid",
"(",
")",
":",
"result",
"=",
"sh",
"(",
"'netstat -tulpn 2>/dev/null | grep 127.0.0.1:{:d}'",
".",
"format",
"(",
"SPHINX_AUTOBUILD_PORT",
")",
",",
"capture",
"=",
"True",
",",
"ignore_error",
"=",
"True",
")",
"pid",
"=",
"result",
".",
"strip",
"(",
")",
"pid",
"=",
"pid",
".",
"split",
"(",
")",
"[",
"-",
"1",
"]",
"if",
"pid",
"else",
"None",
"pid",
"=",
"pid",
".",
"split",
"(",
"'/'",
",",
"1",
")",
"[",
"0",
"]",
"if",
"pid",
"and",
"pid",
"!=",
"'-'",
"else",
"None",
"return",
"pid"
] |
Get watchdog PID via ``netstat``.
|
[
"Get",
"watchdog",
"PID",
"via",
"netstat",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/pavement.py#L262-L270
|
7,452
|
pyroscope/pyrocore
|
pavement.py
|
autodocs
|
def autodocs():
"create Sphinx docs locally, and start a watchdog"
build_dir = path('docs/_build')
index_html = build_dir / 'html/index.html'
if build_dir.exists():
build_dir.rmtree()
with pushd("docs"):
print "\n*** Generating API doc ***\n"
sh("sphinx-apidoc -o apidoc -f -T -M ../src/pyrocore")
sh("sphinx-apidoc -o apidoc -f -T -M $(dirname $(python -c 'import tempita; print(tempita.__file__)'))")
print "\n*** Generating HTML doc ***\n"
sh('nohup %s/Makefile SPHINXBUILD="sphinx-autobuild -p %d'
' -i \'.*\' -i \'*.log\' -i \'*.png\' -i \'*.txt\'" html >autobuild.log 2>&1 &'
% (os.getcwd(), SPHINX_AUTOBUILD_PORT))
for i in range(25):
time.sleep(2.5)
pid = watchdog_pid()
if pid:
sh("touch docs/index.rst")
sh('ps {}'.format(pid))
url = 'http://localhost:{port:d}/'.format(port=SPHINX_AUTOBUILD_PORT)
print("\n*** Open '{}' in your browser...".format(url))
break
|
python
|
def autodocs():
"create Sphinx docs locally, and start a watchdog"
build_dir = path('docs/_build')
index_html = build_dir / 'html/index.html'
if build_dir.exists():
build_dir.rmtree()
with pushd("docs"):
print "\n*** Generating API doc ***\n"
sh("sphinx-apidoc -o apidoc -f -T -M ../src/pyrocore")
sh("sphinx-apidoc -o apidoc -f -T -M $(dirname $(python -c 'import tempita; print(tempita.__file__)'))")
print "\n*** Generating HTML doc ***\n"
sh('nohup %s/Makefile SPHINXBUILD="sphinx-autobuild -p %d'
' -i \'.*\' -i \'*.log\' -i \'*.png\' -i \'*.txt\'" html >autobuild.log 2>&1 &'
% (os.getcwd(), SPHINX_AUTOBUILD_PORT))
for i in range(25):
time.sleep(2.5)
pid = watchdog_pid()
if pid:
sh("touch docs/index.rst")
sh('ps {}'.format(pid))
url = 'http://localhost:{port:d}/'.format(port=SPHINX_AUTOBUILD_PORT)
print("\n*** Open '{}' in your browser...".format(url))
break
|
[
"def",
"autodocs",
"(",
")",
":",
"build_dir",
"=",
"path",
"(",
"'docs/_build'",
")",
"index_html",
"=",
"build_dir",
"/",
"'html/index.html'",
"if",
"build_dir",
".",
"exists",
"(",
")",
":",
"build_dir",
".",
"rmtree",
"(",
")",
"with",
"pushd",
"(",
"\"docs\"",
")",
":",
"print",
"\"\\n*** Generating API doc ***\\n\"",
"sh",
"(",
"\"sphinx-apidoc -o apidoc -f -T -M ../src/pyrocore\"",
")",
"sh",
"(",
"\"sphinx-apidoc -o apidoc -f -T -M $(dirname $(python -c 'import tempita; print(tempita.__file__)'))\"",
")",
"print",
"\"\\n*** Generating HTML doc ***\\n\"",
"sh",
"(",
"'nohup %s/Makefile SPHINXBUILD=\"sphinx-autobuild -p %d'",
"' -i \\'.*\\' -i \\'*.log\\' -i \\'*.png\\' -i \\'*.txt\\'\" html >autobuild.log 2>&1 &'",
"%",
"(",
"os",
".",
"getcwd",
"(",
")",
",",
"SPHINX_AUTOBUILD_PORT",
")",
")",
"for",
"i",
"in",
"range",
"(",
"25",
")",
":",
"time",
".",
"sleep",
"(",
"2.5",
")",
"pid",
"=",
"watchdog_pid",
"(",
")",
"if",
"pid",
":",
"sh",
"(",
"\"touch docs/index.rst\"",
")",
"sh",
"(",
"'ps {}'",
".",
"format",
"(",
"pid",
")",
")",
"url",
"=",
"'http://localhost:{port:d}/'",
".",
"format",
"(",
"port",
"=",
"SPHINX_AUTOBUILD_PORT",
")",
"print",
"(",
"\"\\n*** Open '{}' in your browser...\"",
".",
"format",
"(",
"url",
")",
")",
"break"
] |
create Sphinx docs locally, and start a watchdog
|
[
"create",
"Sphinx",
"docs",
"locally",
"and",
"start",
"a",
"watchdog"
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/pavement.py#L275-L299
|
7,453
|
pyroscope/pyrocore
|
pavement.py
|
stopdocs
|
def stopdocs():
"stop Sphinx watchdog"
for i in range(4):
pid = watchdog_pid()
if pid:
if not i:
sh('ps {}'.format(pid))
sh('kill {}'.format(pid))
time.sleep(.5)
else:
break
|
python
|
def stopdocs():
"stop Sphinx watchdog"
for i in range(4):
pid = watchdog_pid()
if pid:
if not i:
sh('ps {}'.format(pid))
sh('kill {}'.format(pid))
time.sleep(.5)
else:
break
|
[
"def",
"stopdocs",
"(",
")",
":",
"for",
"i",
"in",
"range",
"(",
"4",
")",
":",
"pid",
"=",
"watchdog_pid",
"(",
")",
"if",
"pid",
":",
"if",
"not",
"i",
":",
"sh",
"(",
"'ps {}'",
".",
"format",
"(",
"pid",
")",
")",
"sh",
"(",
"'kill {}'",
".",
"format",
"(",
"pid",
")",
")",
"time",
".",
"sleep",
"(",
".5",
")",
"else",
":",
"break"
] |
stop Sphinx watchdog
|
[
"stop",
"Sphinx",
"watchdog"
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/pavement.py#L303-L313
|
7,454
|
pyroscope/pyrocore
|
pavement.py
|
coverage
|
def coverage():
"generate coverage report and show in browser"
coverage_index = path("build/coverage/index.html")
coverage_index.remove()
sh("paver test")
coverage_index.exists() and webbrowser.open(coverage_index)
|
python
|
def coverage():
"generate coverage report and show in browser"
coverage_index = path("build/coverage/index.html")
coverage_index.remove()
sh("paver test")
coverage_index.exists() and webbrowser.open(coverage_index)
|
[
"def",
"coverage",
"(",
")",
":",
"coverage_index",
"=",
"path",
"(",
"\"build/coverage/index.html\"",
")",
"coverage_index",
".",
"remove",
"(",
")",
"sh",
"(",
"\"paver test\"",
")",
"coverage_index",
".",
"exists",
"(",
")",
"and",
"webbrowser",
".",
"open",
"(",
"coverage_index",
")"
] |
generate coverage report and show in browser
|
[
"generate",
"coverage",
"report",
"and",
"show",
"in",
"browser"
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/pavement.py#L327-L332
|
7,455
|
pyroscope/pyrocore
|
src/pyrocore/config.py
|
lookup_announce_alias
|
def lookup_announce_alias(name):
""" Get canonical alias name and announce URL list for the given alias.
"""
for alias, urls in announce.items():
if alias.lower() == name.lower():
return alias, urls
raise KeyError("Unknown alias %s" % (name,))
|
python
|
def lookup_announce_alias(name):
""" Get canonical alias name and announce URL list for the given alias.
"""
for alias, urls in announce.items():
if alias.lower() == name.lower():
return alias, urls
raise KeyError("Unknown alias %s" % (name,))
|
[
"def",
"lookup_announce_alias",
"(",
"name",
")",
":",
"for",
"alias",
",",
"urls",
"in",
"announce",
".",
"items",
"(",
")",
":",
"if",
"alias",
".",
"lower",
"(",
")",
"==",
"name",
".",
"lower",
"(",
")",
":",
"return",
"alias",
",",
"urls",
"raise",
"KeyError",
"(",
"\"Unknown alias %s\"",
"%",
"(",
"name",
",",
")",
")"
] |
Get canonical alias name and announce URL list for the given alias.
|
[
"Get",
"canonical",
"alias",
"name",
"and",
"announce",
"URL",
"list",
"for",
"the",
"given",
"alias",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/config.py#L27-L34
|
7,456
|
pyroscope/pyrocore
|
src/pyrocore/config.py
|
map_announce2alias
|
def map_announce2alias(url):
""" Get tracker alias for announce URL, and if none is defined, the 2nd level domain.
"""
import urlparse
# Try to find an exact alias URL match and return its label
for alias, urls in announce.items():
if any(i == url for i in urls):
return alias
# Try to find an alias URL prefix and return its label
parts = urlparse.urlparse(url)
server = urlparse.urlunparse((parts.scheme, parts.netloc, "/", None, None, None))
for alias, urls in announce.items():
if any(i.startswith(server) for i in urls):
return alias
# Return 2nd level domain name if no alias found
try:
return '.'.join(parts.netloc.split(':')[0].split('.')[-2:])
except IndexError:
return parts.netloc
|
python
|
def map_announce2alias(url):
""" Get tracker alias for announce URL, and if none is defined, the 2nd level domain.
"""
import urlparse
# Try to find an exact alias URL match and return its label
for alias, urls in announce.items():
if any(i == url for i in urls):
return alias
# Try to find an alias URL prefix and return its label
parts = urlparse.urlparse(url)
server = urlparse.urlunparse((parts.scheme, parts.netloc, "/", None, None, None))
for alias, urls in announce.items():
if any(i.startswith(server) for i in urls):
return alias
# Return 2nd level domain name if no alias found
try:
return '.'.join(parts.netloc.split(':')[0].split('.')[-2:])
except IndexError:
return parts.netloc
|
[
"def",
"map_announce2alias",
"(",
"url",
")",
":",
"import",
"urlparse",
"# Try to find an exact alias URL match and return its label",
"for",
"alias",
",",
"urls",
"in",
"announce",
".",
"items",
"(",
")",
":",
"if",
"any",
"(",
"i",
"==",
"url",
"for",
"i",
"in",
"urls",
")",
":",
"return",
"alias",
"# Try to find an alias URL prefix and return its label",
"parts",
"=",
"urlparse",
".",
"urlparse",
"(",
"url",
")",
"server",
"=",
"urlparse",
".",
"urlunparse",
"(",
"(",
"parts",
".",
"scheme",
",",
"parts",
".",
"netloc",
",",
"\"/\"",
",",
"None",
",",
"None",
",",
"None",
")",
")",
"for",
"alias",
",",
"urls",
"in",
"announce",
".",
"items",
"(",
")",
":",
"if",
"any",
"(",
"i",
".",
"startswith",
"(",
"server",
")",
"for",
"i",
"in",
"urls",
")",
":",
"return",
"alias",
"# Return 2nd level domain name if no alias found",
"try",
":",
"return",
"'.'",
".",
"join",
"(",
"parts",
".",
"netloc",
".",
"split",
"(",
"':'",
")",
"[",
"0",
"]",
".",
"split",
"(",
"'.'",
")",
"[",
"-",
"2",
":",
"]",
")",
"except",
"IndexError",
":",
"return",
"parts",
".",
"netloc"
] |
Get tracker alias for announce URL, and if none is defined, the 2nd level domain.
|
[
"Get",
"tracker",
"alias",
"for",
"announce",
"URL",
"and",
"if",
"none",
"is",
"defined",
"the",
"2nd",
"level",
"domain",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/config.py#L37-L59
|
7,457
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
validate
|
def validate(key, val):
""" Validate a configuration value.
"""
if val and val.startswith("~/"):
return os.path.expanduser(val)
if key == "output_header_frequency":
return int(val, 10)
if key.endswith("_ecma48"):
return eval("'%s'" % val.replace("'", r"\'")) # pylint: disable=eval-used
return val
|
python
|
def validate(key, val):
""" Validate a configuration value.
"""
if val and val.startswith("~/"):
return os.path.expanduser(val)
if key == "output_header_frequency":
return int(val, 10)
if key.endswith("_ecma48"):
return eval("'%s'" % val.replace("'", r"\'")) # pylint: disable=eval-used
return val
|
[
"def",
"validate",
"(",
"key",
",",
"val",
")",
":",
"if",
"val",
"and",
"val",
".",
"startswith",
"(",
"\"~/\"",
")",
":",
"return",
"os",
".",
"path",
".",
"expanduser",
"(",
"val",
")",
"if",
"key",
"==",
"\"output_header_frequency\"",
":",
"return",
"int",
"(",
"val",
",",
"10",
")",
"if",
"key",
".",
"endswith",
"(",
"\"_ecma48\"",
")",
":",
"return",
"eval",
"(",
"\"'%s'\"",
"%",
"val",
".",
"replace",
"(",
"\"'\"",
",",
"r\"\\'\"",
")",
")",
"# pylint: disable=eval-used",
"return",
"val"
] |
Validate a configuration value.
|
[
"Validate",
"a",
"configuration",
"value",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L35-L45
|
7,458
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
ConfigLoader._update_config
|
def _update_config(self, namespace): # pylint: disable=no-self-use
""" Inject the items from the given dict into the configuration.
"""
for key, val in namespace.items():
setattr(config, key, val)
|
python
|
def _update_config(self, namespace): # pylint: disable=no-self-use
""" Inject the items from the given dict into the configuration.
"""
for key, val in namespace.items():
setattr(config, key, val)
|
[
"def",
"_update_config",
"(",
"self",
",",
"namespace",
")",
":",
"# pylint: disable=no-self-use",
"for",
"key",
",",
"val",
"in",
"namespace",
".",
"items",
"(",
")",
":",
"setattr",
"(",
"config",
",",
"key",
",",
"val",
")"
] |
Inject the items from the given dict into the configuration.
|
[
"Inject",
"the",
"items",
"from",
"the",
"given",
"dict",
"into",
"the",
"configuration",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L85-L89
|
7,459
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
ConfigLoader._interpolation_escape
|
def _interpolation_escape(self, namespace):
""" Re-escape interpolation strings.
"""
for key, val in namespace.items():
if '%' in val:
namespace[key] = self.INTERPOLATION_ESCAPE.sub(lambda match: '%' + match.group(0), val)
|
python
|
def _interpolation_escape(self, namespace):
""" Re-escape interpolation strings.
"""
for key, val in namespace.items():
if '%' in val:
namespace[key] = self.INTERPOLATION_ESCAPE.sub(lambda match: '%' + match.group(0), val)
|
[
"def",
"_interpolation_escape",
"(",
"self",
",",
"namespace",
")",
":",
"for",
"key",
",",
"val",
"in",
"namespace",
".",
"items",
"(",
")",
":",
"if",
"'%'",
"in",
"val",
":",
"namespace",
"[",
"key",
"]",
"=",
"self",
".",
"INTERPOLATION_ESCAPE",
".",
"sub",
"(",
"lambda",
"match",
":",
"'%'",
"+",
"match",
".",
"group",
"(",
"0",
")",
",",
"val",
")"
] |
Re-escape interpolation strings.
|
[
"Re",
"-",
"escape",
"interpolation",
"strings",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L92-L97
|
7,460
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
ConfigLoader._validate_namespace
|
def _validate_namespace(self, namespace):
""" Validate the given namespace. This method is idempotent!
"""
# Update config values (so other code can access them in the bootstrap phase)
self._update_config(namespace)
# Validate announce URLs
for key, val in namespace["announce"].items():
if isinstance(val, basestring):
namespace["announce"][key] = val.split()
# Re-escape output formats
self._interpolation_escape(namespace["formats"])
# Create objects from module specs
for factory in ("engine",):
if isinstance(namespace[factory], basestring):
namespace[factory] = pymagic.import_name(namespace[factory])() if namespace[factory] else None
# Do some standard type conversions
for key in namespace:
# Split lists
if key.endswith("_list") and isinstance(namespace[key], basestring):
namespace[key] = [i.strip() for i in namespace[key].replace(',', ' ').split()]
# Resolve factory and callback handler lists
elif any(key.endswith(i) for i in ("_factories", "_callbacks")) and isinstance(namespace[key], basestring):
namespace[key] = [pymagic.import_name(i.strip()) for i in namespace[key].replace(',', ' ').split()]
# Update config values again
self._update_config(namespace)
|
python
|
def _validate_namespace(self, namespace):
""" Validate the given namespace. This method is idempotent!
"""
# Update config values (so other code can access them in the bootstrap phase)
self._update_config(namespace)
# Validate announce URLs
for key, val in namespace["announce"].items():
if isinstance(val, basestring):
namespace["announce"][key] = val.split()
# Re-escape output formats
self._interpolation_escape(namespace["formats"])
# Create objects from module specs
for factory in ("engine",):
if isinstance(namespace[factory], basestring):
namespace[factory] = pymagic.import_name(namespace[factory])() if namespace[factory] else None
# Do some standard type conversions
for key in namespace:
# Split lists
if key.endswith("_list") and isinstance(namespace[key], basestring):
namespace[key] = [i.strip() for i in namespace[key].replace(',', ' ').split()]
# Resolve factory and callback handler lists
elif any(key.endswith(i) for i in ("_factories", "_callbacks")) and isinstance(namespace[key], basestring):
namespace[key] = [pymagic.import_name(i.strip()) for i in namespace[key].replace(',', ' ').split()]
# Update config values again
self._update_config(namespace)
|
[
"def",
"_validate_namespace",
"(",
"self",
",",
"namespace",
")",
":",
"# Update config values (so other code can access them in the bootstrap phase)",
"self",
".",
"_update_config",
"(",
"namespace",
")",
"# Validate announce URLs",
"for",
"key",
",",
"val",
"in",
"namespace",
"[",
"\"announce\"",
"]",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"val",
",",
"basestring",
")",
":",
"namespace",
"[",
"\"announce\"",
"]",
"[",
"key",
"]",
"=",
"val",
".",
"split",
"(",
")",
"# Re-escape output formats",
"self",
".",
"_interpolation_escape",
"(",
"namespace",
"[",
"\"formats\"",
"]",
")",
"# Create objects from module specs",
"for",
"factory",
"in",
"(",
"\"engine\"",
",",
")",
":",
"if",
"isinstance",
"(",
"namespace",
"[",
"factory",
"]",
",",
"basestring",
")",
":",
"namespace",
"[",
"factory",
"]",
"=",
"pymagic",
".",
"import_name",
"(",
"namespace",
"[",
"factory",
"]",
")",
"(",
")",
"if",
"namespace",
"[",
"factory",
"]",
"else",
"None",
"# Do some standard type conversions",
"for",
"key",
"in",
"namespace",
":",
"# Split lists",
"if",
"key",
".",
"endswith",
"(",
"\"_list\"",
")",
"and",
"isinstance",
"(",
"namespace",
"[",
"key",
"]",
",",
"basestring",
")",
":",
"namespace",
"[",
"key",
"]",
"=",
"[",
"i",
".",
"strip",
"(",
")",
"for",
"i",
"in",
"namespace",
"[",
"key",
"]",
".",
"replace",
"(",
"','",
",",
"' '",
")",
".",
"split",
"(",
")",
"]",
"# Resolve factory and callback handler lists",
"elif",
"any",
"(",
"key",
".",
"endswith",
"(",
"i",
")",
"for",
"i",
"in",
"(",
"\"_factories\"",
",",
"\"_callbacks\"",
")",
")",
"and",
"isinstance",
"(",
"namespace",
"[",
"key",
"]",
",",
"basestring",
")",
":",
"namespace",
"[",
"key",
"]",
"=",
"[",
"pymagic",
".",
"import_name",
"(",
"i",
".",
"strip",
"(",
")",
")",
"for",
"i",
"in",
"namespace",
"[",
"key",
"]",
".",
"replace",
"(",
"','",
",",
"' '",
")",
".",
"split",
"(",
")",
"]",
"# Update config values again",
"self",
".",
"_update_config",
"(",
"namespace",
")"
] |
Validate the given namespace. This method is idempotent!
|
[
"Validate",
"the",
"given",
"namespace",
".",
"This",
"method",
"is",
"idempotent!"
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L100-L130
|
7,461
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
ConfigLoader._set_from_ini
|
def _set_from_ini(self, namespace, ini_file):
""" Copy values from loaded INI file to namespace.
"""
# Isolate global values
global_vars = dict((key, val)
for key, val in namespace.items()
if isinstance(val, basestring)
)
# Copy all sections
for section in ini_file.sections():
# Get values set so far
if section == "GLOBAL":
raw_vars = global_vars
else:
raw_vars = namespace.setdefault(section.lower(), {})
# Override with values set in this INI file
raw_vars.update(dict(ini_file.items(section, raw=True)))
# Interpolate and validate all values
if section == "FORMATS":
self._interpolation_escape(raw_vars)
raw_vars.update(dict(
(key, validate(key, val))
for key, val in ini_file.items(section, vars=raw_vars)
))
# Update global values
namespace.update(global_vars)
|
python
|
def _set_from_ini(self, namespace, ini_file):
""" Copy values from loaded INI file to namespace.
"""
# Isolate global values
global_vars = dict((key, val)
for key, val in namespace.items()
if isinstance(val, basestring)
)
# Copy all sections
for section in ini_file.sections():
# Get values set so far
if section == "GLOBAL":
raw_vars = global_vars
else:
raw_vars = namespace.setdefault(section.lower(), {})
# Override with values set in this INI file
raw_vars.update(dict(ini_file.items(section, raw=True)))
# Interpolate and validate all values
if section == "FORMATS":
self._interpolation_escape(raw_vars)
raw_vars.update(dict(
(key, validate(key, val))
for key, val in ini_file.items(section, vars=raw_vars)
))
# Update global values
namespace.update(global_vars)
|
[
"def",
"_set_from_ini",
"(",
"self",
",",
"namespace",
",",
"ini_file",
")",
":",
"# Isolate global values",
"global_vars",
"=",
"dict",
"(",
"(",
"key",
",",
"val",
")",
"for",
"key",
",",
"val",
"in",
"namespace",
".",
"items",
"(",
")",
"if",
"isinstance",
"(",
"val",
",",
"basestring",
")",
")",
"# Copy all sections",
"for",
"section",
"in",
"ini_file",
".",
"sections",
"(",
")",
":",
"# Get values set so far",
"if",
"section",
"==",
"\"GLOBAL\"",
":",
"raw_vars",
"=",
"global_vars",
"else",
":",
"raw_vars",
"=",
"namespace",
".",
"setdefault",
"(",
"section",
".",
"lower",
"(",
")",
",",
"{",
"}",
")",
"# Override with values set in this INI file",
"raw_vars",
".",
"update",
"(",
"dict",
"(",
"ini_file",
".",
"items",
"(",
"section",
",",
"raw",
"=",
"True",
")",
")",
")",
"# Interpolate and validate all values",
"if",
"section",
"==",
"\"FORMATS\"",
":",
"self",
".",
"_interpolation_escape",
"(",
"raw_vars",
")",
"raw_vars",
".",
"update",
"(",
"dict",
"(",
"(",
"key",
",",
"validate",
"(",
"key",
",",
"val",
")",
")",
"for",
"key",
",",
"val",
"in",
"ini_file",
".",
"items",
"(",
"section",
",",
"vars",
"=",
"raw_vars",
")",
")",
")",
"# Update global values",
"namespace",
".",
"update",
"(",
"global_vars",
")"
] |
Copy values from loaded INI file to namespace.
|
[
"Copy",
"values",
"from",
"loaded",
"INI",
"file",
"to",
"namespace",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L133-L162
|
7,462
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
ConfigLoader._set_defaults
|
def _set_defaults(self, namespace, optional_cfg_files):
""" Set default values in the given dict.
"""
# Add current configuration directory
namespace["config_dir"] = self.config_dir
# Load defaults
for idx, cfg_file in enumerate([self.CONFIG_INI] + optional_cfg_files):
if any(i in cfg_file for i in set('/' + os.sep)):
continue # skip any non-plain filenames
try:
defaults = pymagic.resource_string("pyrocore", "data/config/" + cfg_file) #@UndefinedVariable
except IOError as exc:
if idx and exc.errno == errno.ENOENT:
continue
raise
ini_file = ConfigParser.SafeConfigParser()
ini_file.optionxform = str # case-sensitive option names
ini_file.readfp(StringIO.StringIO(defaults), "<defaults>")
self._set_from_ini(namespace, ini_file)
|
python
|
def _set_defaults(self, namespace, optional_cfg_files):
""" Set default values in the given dict.
"""
# Add current configuration directory
namespace["config_dir"] = self.config_dir
# Load defaults
for idx, cfg_file in enumerate([self.CONFIG_INI] + optional_cfg_files):
if any(i in cfg_file for i in set('/' + os.sep)):
continue # skip any non-plain filenames
try:
defaults = pymagic.resource_string("pyrocore", "data/config/" + cfg_file) #@UndefinedVariable
except IOError as exc:
if idx and exc.errno == errno.ENOENT:
continue
raise
ini_file = ConfigParser.SafeConfigParser()
ini_file.optionxform = str # case-sensitive option names
ini_file.readfp(StringIO.StringIO(defaults), "<defaults>")
self._set_from_ini(namespace, ini_file)
|
[
"def",
"_set_defaults",
"(",
"self",
",",
"namespace",
",",
"optional_cfg_files",
")",
":",
"# Add current configuration directory",
"namespace",
"[",
"\"config_dir\"",
"]",
"=",
"self",
".",
"config_dir",
"# Load defaults",
"for",
"idx",
",",
"cfg_file",
"in",
"enumerate",
"(",
"[",
"self",
".",
"CONFIG_INI",
"]",
"+",
"optional_cfg_files",
")",
":",
"if",
"any",
"(",
"i",
"in",
"cfg_file",
"for",
"i",
"in",
"set",
"(",
"'/'",
"+",
"os",
".",
"sep",
")",
")",
":",
"continue",
"# skip any non-plain filenames",
"try",
":",
"defaults",
"=",
"pymagic",
".",
"resource_string",
"(",
"\"pyrocore\"",
",",
"\"data/config/\"",
"+",
"cfg_file",
")",
"#@UndefinedVariable",
"except",
"IOError",
"as",
"exc",
":",
"if",
"idx",
"and",
"exc",
".",
"errno",
"==",
"errno",
".",
"ENOENT",
":",
"continue",
"raise",
"ini_file",
"=",
"ConfigParser",
".",
"SafeConfigParser",
"(",
")",
"ini_file",
".",
"optionxform",
"=",
"str",
"# case-sensitive option names",
"ini_file",
".",
"readfp",
"(",
"StringIO",
".",
"StringIO",
"(",
"defaults",
")",
",",
"\"<defaults>\"",
")",
"self",
".",
"_set_from_ini",
"(",
"namespace",
",",
"ini_file",
")"
] |
Set default values in the given dict.
|
[
"Set",
"default",
"values",
"in",
"the",
"given",
"dict",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L165-L186
|
7,463
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
ConfigLoader._load_ini
|
def _load_ini(self, namespace, config_file):
""" Load INI style configuration.
"""
self.LOG.debug("Loading %r..." % (config_file,))
ini_file = ConfigParser.SafeConfigParser()
ini_file.optionxform = str # case-sensitive option names
if ini_file.read(config_file):
self._set_from_ini(namespace, ini_file)
else:
self.LOG.warning("Configuration file %r not found,"
" use the command 'pyroadmin --create-config' to create it!" % (config_file,))
|
python
|
def _load_ini(self, namespace, config_file):
""" Load INI style configuration.
"""
self.LOG.debug("Loading %r..." % (config_file,))
ini_file = ConfigParser.SafeConfigParser()
ini_file.optionxform = str # case-sensitive option names
if ini_file.read(config_file):
self._set_from_ini(namespace, ini_file)
else:
self.LOG.warning("Configuration file %r not found,"
" use the command 'pyroadmin --create-config' to create it!" % (config_file,))
|
[
"def",
"_load_ini",
"(",
"self",
",",
"namespace",
",",
"config_file",
")",
":",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"Loading %r...\"",
"%",
"(",
"config_file",
",",
")",
")",
"ini_file",
"=",
"ConfigParser",
".",
"SafeConfigParser",
"(",
")",
"ini_file",
".",
"optionxform",
"=",
"str",
"# case-sensitive option names",
"if",
"ini_file",
".",
"read",
"(",
"config_file",
")",
":",
"self",
".",
"_set_from_ini",
"(",
"namespace",
",",
"ini_file",
")",
"else",
":",
"self",
".",
"LOG",
".",
"warning",
"(",
"\"Configuration file %r not found,\"",
"\" use the command 'pyroadmin --create-config' to create it!\"",
"%",
"(",
"config_file",
",",
")",
")"
] |
Load INI style configuration.
|
[
"Load",
"INI",
"style",
"configuration",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L189-L199
|
7,464
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
ConfigLoader._load_py
|
def _load_py(self, namespace, config_file):
""" Load scripted configuration.
"""
if config_file and os.path.isfile(config_file):
self.LOG.debug("Loading %r..." % (config_file,))
exec(compile(open(config_file).read(), config_file, 'exec'), # pylint: disable=exec-used
vars(config), namespace)
else:
self.LOG.warning("Configuration file %r not found!" % (config_file,))
|
python
|
def _load_py(self, namespace, config_file):
""" Load scripted configuration.
"""
if config_file and os.path.isfile(config_file):
self.LOG.debug("Loading %r..." % (config_file,))
exec(compile(open(config_file).read(), config_file, 'exec'), # pylint: disable=exec-used
vars(config), namespace)
else:
self.LOG.warning("Configuration file %r not found!" % (config_file,))
|
[
"def",
"_load_py",
"(",
"self",
",",
"namespace",
",",
"config_file",
")",
":",
"if",
"config_file",
"and",
"os",
".",
"path",
".",
"isfile",
"(",
"config_file",
")",
":",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"Loading %r...\"",
"%",
"(",
"config_file",
",",
")",
")",
"exec",
"(",
"compile",
"(",
"open",
"(",
"config_file",
")",
".",
"read",
"(",
")",
",",
"config_file",
",",
"'exec'",
")",
",",
"# pylint: disable=exec-used",
"vars",
"(",
"config",
")",
",",
"namespace",
")",
"else",
":",
"self",
".",
"LOG",
".",
"warning",
"(",
"\"Configuration file %r not found!\"",
"%",
"(",
"config_file",
",",
")",
")"
] |
Load scripted configuration.
|
[
"Load",
"scripted",
"configuration",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L202-L210
|
7,465
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
ConfigLoader.load
|
def load(self, optional_cfg_files=None):
""" Actually load the configuation from either the default location or the given directory.
"""
optional_cfg_files = optional_cfg_files or []
# Guard against coding errors
if self._loaded:
raise RuntimeError("INTERNAL ERROR: Attempt to load configuration twice!")
try:
# Load configuration
namespace = {}
self._set_defaults(namespace, optional_cfg_files)
self._load_ini(namespace, os.path.join(self.config_dir, self.CONFIG_INI))
for cfg_file in optional_cfg_files:
if not os.path.isabs(cfg_file):
cfg_file = os.path.join(self.config_dir, cfg_file)
if os.path.exists(cfg_file):
self._load_ini(namespace, cfg_file)
self._validate_namespace(namespace)
self._load_py(namespace, namespace["config_script"])
self._validate_namespace(namespace)
for callback in namespace["config_validator_callbacks"]:
callback()
except ConfigParser.ParsingError as exc:
raise error.UserError(exc)
# Ready to go...
self._loaded = True
|
python
|
def load(self, optional_cfg_files=None):
""" Actually load the configuation from either the default location or the given directory.
"""
optional_cfg_files = optional_cfg_files or []
# Guard against coding errors
if self._loaded:
raise RuntimeError("INTERNAL ERROR: Attempt to load configuration twice!")
try:
# Load configuration
namespace = {}
self._set_defaults(namespace, optional_cfg_files)
self._load_ini(namespace, os.path.join(self.config_dir, self.CONFIG_INI))
for cfg_file in optional_cfg_files:
if not os.path.isabs(cfg_file):
cfg_file = os.path.join(self.config_dir, cfg_file)
if os.path.exists(cfg_file):
self._load_ini(namespace, cfg_file)
self._validate_namespace(namespace)
self._load_py(namespace, namespace["config_script"])
self._validate_namespace(namespace)
for callback in namespace["config_validator_callbacks"]:
callback()
except ConfigParser.ParsingError as exc:
raise error.UserError(exc)
# Ready to go...
self._loaded = True
|
[
"def",
"load",
"(",
"self",
",",
"optional_cfg_files",
"=",
"None",
")",
":",
"optional_cfg_files",
"=",
"optional_cfg_files",
"or",
"[",
"]",
"# Guard against coding errors",
"if",
"self",
".",
"_loaded",
":",
"raise",
"RuntimeError",
"(",
"\"INTERNAL ERROR: Attempt to load configuration twice!\"",
")",
"try",
":",
"# Load configuration",
"namespace",
"=",
"{",
"}",
"self",
".",
"_set_defaults",
"(",
"namespace",
",",
"optional_cfg_files",
")",
"self",
".",
"_load_ini",
"(",
"namespace",
",",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"config_dir",
",",
"self",
".",
"CONFIG_INI",
")",
")",
"for",
"cfg_file",
"in",
"optional_cfg_files",
":",
"if",
"not",
"os",
".",
"path",
".",
"isabs",
"(",
"cfg_file",
")",
":",
"cfg_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"config_dir",
",",
"cfg_file",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"cfg_file",
")",
":",
"self",
".",
"_load_ini",
"(",
"namespace",
",",
"cfg_file",
")",
"self",
".",
"_validate_namespace",
"(",
"namespace",
")",
"self",
".",
"_load_py",
"(",
"namespace",
",",
"namespace",
"[",
"\"config_script\"",
"]",
")",
"self",
".",
"_validate_namespace",
"(",
"namespace",
")",
"for",
"callback",
"in",
"namespace",
"[",
"\"config_validator_callbacks\"",
"]",
":",
"callback",
"(",
")",
"except",
"ConfigParser",
".",
"ParsingError",
"as",
"exc",
":",
"raise",
"error",
".",
"UserError",
"(",
"exc",
")",
"# Ready to go...",
"self",
".",
"_loaded",
"=",
"True"
] |
Actually load the configuation from either the default location or the given directory.
|
[
"Actually",
"load",
"the",
"configuation",
"from",
"either",
"the",
"default",
"location",
"or",
"the",
"given",
"directory",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L213-L246
|
7,466
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
ConfigLoader.create
|
def create(self, remove_all_rc_files=False):
""" Create default configuration files at either the default location or the given directory.
"""
# Check and create configuration directory
if os.path.exists(self.config_dir):
self.LOG.debug("Configuration directory %r already exists!" % (self.config_dir,))
else:
os.mkdir(self.config_dir)
if remove_all_rc_files:
for subdir in ('.', 'rtorrent.d'):
config_files = list(glob.glob(os.path.join(os.path.abspath(self.config_dir), subdir, '*.rc')))
config_files += list(glob.glob(os.path.join(os.path.abspath(self.config_dir), subdir, '*.rc.default')))
for config_file in config_files:
self.LOG.info("Removing %r!" % (config_file,))
os.remove(config_file)
# Create default configuration files
for filepath in sorted(walk_resources("pyrocore", "data/config")):
# Load from package data
text = pymagic.resource_string("pyrocore", "data/config" + filepath)
# Create missing subdirs
config_file = self.config_dir + filepath
if not os.path.exists(os.path.dirname(config_file)):
os.makedirs(os.path.dirname(config_file))
# Write configuration files
config_trail = [".default"]
if os.path.exists(config_file):
self.LOG.debug("Configuration file %r already exists!" % (config_file,))
else:
config_trail.append('')
for i in config_trail:
with open(config_file + i, "w") as handle:
handle.write(text)
self.LOG.info("Configuration file %r written!" % (config_file + i,))
|
python
|
def create(self, remove_all_rc_files=False):
""" Create default configuration files at either the default location or the given directory.
"""
# Check and create configuration directory
if os.path.exists(self.config_dir):
self.LOG.debug("Configuration directory %r already exists!" % (self.config_dir,))
else:
os.mkdir(self.config_dir)
if remove_all_rc_files:
for subdir in ('.', 'rtorrent.d'):
config_files = list(glob.glob(os.path.join(os.path.abspath(self.config_dir), subdir, '*.rc')))
config_files += list(glob.glob(os.path.join(os.path.abspath(self.config_dir), subdir, '*.rc.default')))
for config_file in config_files:
self.LOG.info("Removing %r!" % (config_file,))
os.remove(config_file)
# Create default configuration files
for filepath in sorted(walk_resources("pyrocore", "data/config")):
# Load from package data
text = pymagic.resource_string("pyrocore", "data/config" + filepath)
# Create missing subdirs
config_file = self.config_dir + filepath
if not os.path.exists(os.path.dirname(config_file)):
os.makedirs(os.path.dirname(config_file))
# Write configuration files
config_trail = [".default"]
if os.path.exists(config_file):
self.LOG.debug("Configuration file %r already exists!" % (config_file,))
else:
config_trail.append('')
for i in config_trail:
with open(config_file + i, "w") as handle:
handle.write(text)
self.LOG.info("Configuration file %r written!" % (config_file + i,))
|
[
"def",
"create",
"(",
"self",
",",
"remove_all_rc_files",
"=",
"False",
")",
":",
"# Check and create configuration directory",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"config_dir",
")",
":",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"Configuration directory %r already exists!\"",
"%",
"(",
"self",
".",
"config_dir",
",",
")",
")",
"else",
":",
"os",
".",
"mkdir",
"(",
"self",
".",
"config_dir",
")",
"if",
"remove_all_rc_files",
":",
"for",
"subdir",
"in",
"(",
"'.'",
",",
"'rtorrent.d'",
")",
":",
"config_files",
"=",
"list",
"(",
"glob",
".",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"self",
".",
"config_dir",
")",
",",
"subdir",
",",
"'*.rc'",
")",
")",
")",
"config_files",
"+=",
"list",
"(",
"glob",
".",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"self",
".",
"config_dir",
")",
",",
"subdir",
",",
"'*.rc.default'",
")",
")",
")",
"for",
"config_file",
"in",
"config_files",
":",
"self",
".",
"LOG",
".",
"info",
"(",
"\"Removing %r!\"",
"%",
"(",
"config_file",
",",
")",
")",
"os",
".",
"remove",
"(",
"config_file",
")",
"# Create default configuration files",
"for",
"filepath",
"in",
"sorted",
"(",
"walk_resources",
"(",
"\"pyrocore\"",
",",
"\"data/config\"",
")",
")",
":",
"# Load from package data",
"text",
"=",
"pymagic",
".",
"resource_string",
"(",
"\"pyrocore\"",
",",
"\"data/config\"",
"+",
"filepath",
")",
"# Create missing subdirs",
"config_file",
"=",
"self",
".",
"config_dir",
"+",
"filepath",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"config_file",
")",
")",
":",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"config_file",
")",
")",
"# Write configuration files",
"config_trail",
"=",
"[",
"\".default\"",
"]",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"config_file",
")",
":",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"Configuration file %r already exists!\"",
"%",
"(",
"config_file",
",",
")",
")",
"else",
":",
"config_trail",
".",
"append",
"(",
"''",
")",
"for",
"i",
"in",
"config_trail",
":",
"with",
"open",
"(",
"config_file",
"+",
"i",
",",
"\"w\"",
")",
"as",
"handle",
":",
"handle",
".",
"write",
"(",
"text",
")",
"self",
".",
"LOG",
".",
"info",
"(",
"\"Configuration file %r written!\"",
"%",
"(",
"config_file",
"+",
"i",
",",
")",
")"
] |
Create default configuration files at either the default location or the given directory.
|
[
"Create",
"default",
"configuration",
"files",
"at",
"either",
"the",
"default",
"location",
"or",
"the",
"given",
"directory",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L249-L285
|
7,467
|
pyroscope/pyrocore
|
src/pyrocore/scripts/mktor.py
|
MetafileCreator.make_magnet_meta
|
def make_magnet_meta(self, magnet_uri):
""" Create a magnet-uri torrent.
"""
import cgi
import hashlib
if magnet_uri.startswith("magnet:"):
magnet_uri = magnet_uri[7:]
meta = {"magnet-uri": "magnet:" + magnet_uri}
magnet_params = cgi.parse_qs(magnet_uri.lstrip('?'))
meta_name = magnet_params.get("xt", [hashlib.sha1(magnet_uri).hexdigest()])[0]
if "dn" in magnet_params:
meta_name = "%s-%s" % (magnet_params["dn"][0], meta_name)
meta_name = re.sub(r"[^-_,a-zA-Z0-9]+", '.', meta_name).strip('.').replace("urn.btih.", "")
if not config.magnet_watch:
self.fatal("You MUST set the 'magnet_watch' config option!")
meta_path = os.path.join(config.magnet_watch, "magnet-%s.torrent" % meta_name)
self.LOG.debug("Writing magnet-uri metafile %r..." % (meta_path,))
try:
bencode.bwrite(meta_path, meta)
except EnvironmentError as exc:
self.fatal("Error writing magnet-uri metafile %r (%s)" % (meta_path, exc,))
raise
|
python
|
def make_magnet_meta(self, magnet_uri):
""" Create a magnet-uri torrent.
"""
import cgi
import hashlib
if magnet_uri.startswith("magnet:"):
magnet_uri = magnet_uri[7:]
meta = {"magnet-uri": "magnet:" + magnet_uri}
magnet_params = cgi.parse_qs(magnet_uri.lstrip('?'))
meta_name = magnet_params.get("xt", [hashlib.sha1(magnet_uri).hexdigest()])[0]
if "dn" in magnet_params:
meta_name = "%s-%s" % (magnet_params["dn"][0], meta_name)
meta_name = re.sub(r"[^-_,a-zA-Z0-9]+", '.', meta_name).strip('.').replace("urn.btih.", "")
if not config.magnet_watch:
self.fatal("You MUST set the 'magnet_watch' config option!")
meta_path = os.path.join(config.magnet_watch, "magnet-%s.torrent" % meta_name)
self.LOG.debug("Writing magnet-uri metafile %r..." % (meta_path,))
try:
bencode.bwrite(meta_path, meta)
except EnvironmentError as exc:
self.fatal("Error writing magnet-uri metafile %r (%s)" % (meta_path, exc,))
raise
|
[
"def",
"make_magnet_meta",
"(",
"self",
",",
"magnet_uri",
")",
":",
"import",
"cgi",
"import",
"hashlib",
"if",
"magnet_uri",
".",
"startswith",
"(",
"\"magnet:\"",
")",
":",
"magnet_uri",
"=",
"magnet_uri",
"[",
"7",
":",
"]",
"meta",
"=",
"{",
"\"magnet-uri\"",
":",
"\"magnet:\"",
"+",
"magnet_uri",
"}",
"magnet_params",
"=",
"cgi",
".",
"parse_qs",
"(",
"magnet_uri",
".",
"lstrip",
"(",
"'?'",
")",
")",
"meta_name",
"=",
"magnet_params",
".",
"get",
"(",
"\"xt\"",
",",
"[",
"hashlib",
".",
"sha1",
"(",
"magnet_uri",
")",
".",
"hexdigest",
"(",
")",
"]",
")",
"[",
"0",
"]",
"if",
"\"dn\"",
"in",
"magnet_params",
":",
"meta_name",
"=",
"\"%s-%s\"",
"%",
"(",
"magnet_params",
"[",
"\"dn\"",
"]",
"[",
"0",
"]",
",",
"meta_name",
")",
"meta_name",
"=",
"re",
".",
"sub",
"(",
"r\"[^-_,a-zA-Z0-9]+\"",
",",
"'.'",
",",
"meta_name",
")",
".",
"strip",
"(",
"'.'",
")",
".",
"replace",
"(",
"\"urn.btih.\"",
",",
"\"\"",
")",
"if",
"not",
"config",
".",
"magnet_watch",
":",
"self",
".",
"fatal",
"(",
"\"You MUST set the 'magnet_watch' config option!\"",
")",
"meta_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"config",
".",
"magnet_watch",
",",
"\"magnet-%s.torrent\"",
"%",
"meta_name",
")",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"Writing magnet-uri metafile %r...\"",
"%",
"(",
"meta_path",
",",
")",
")",
"try",
":",
"bencode",
".",
"bwrite",
"(",
"meta_path",
",",
"meta",
")",
"except",
"EnvironmentError",
"as",
"exc",
":",
"self",
".",
"fatal",
"(",
"\"Error writing magnet-uri metafile %r (%s)\"",
"%",
"(",
"meta_path",
",",
"exc",
",",
")",
")",
"raise"
] |
Create a magnet-uri torrent.
|
[
"Create",
"a",
"magnet",
"-",
"uri",
"torrent",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/mktor.py#L84-L109
|
7,468
|
pyroscope/pyrocore
|
src/pyrocore/util/pymagic.py
|
get_class_logger
|
def get_class_logger(obj):
""" Get a logger specific for the given object's class.
"""
return logging.getLogger(obj.__class__.__module__ + '.' + obj.__class__.__name__)
|
python
|
def get_class_logger(obj):
""" Get a logger specific for the given object's class.
"""
return logging.getLogger(obj.__class__.__module__ + '.' + obj.__class__.__name__)
|
[
"def",
"get_class_logger",
"(",
"obj",
")",
":",
"return",
"logging",
".",
"getLogger",
"(",
"obj",
".",
"__class__",
".",
"__module__",
"+",
"'.'",
"+",
"obj",
".",
"__class__",
".",
"__name__",
")"
] |
Get a logger specific for the given object's class.
|
[
"Get",
"a",
"logger",
"specific",
"for",
"the",
"given",
"object",
"s",
"class",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/pymagic.py#L67-L70
|
7,469
|
pyroscope/pyrocore
|
src/pyrocore/util/pymagic.py
|
JSONEncoder.default
|
def default(self, o): # pylint: disable=method-hidden
"""Support more object types."""
if isinstance(o, set):
return list(sorted(o))
elif hasattr(o, 'as_dict'):
return o.as_dict()
else:
return super(JSONEncoder, self).default(o)
|
python
|
def default(self, o): # pylint: disable=method-hidden
"""Support more object types."""
if isinstance(o, set):
return list(sorted(o))
elif hasattr(o, 'as_dict'):
return o.as_dict()
else:
return super(JSONEncoder, self).default(o)
|
[
"def",
"default",
"(",
"self",
",",
"o",
")",
":",
"# pylint: disable=method-hidden",
"if",
"isinstance",
"(",
"o",
",",
"set",
")",
":",
"return",
"list",
"(",
"sorted",
"(",
"o",
")",
")",
"elif",
"hasattr",
"(",
"o",
",",
"'as_dict'",
")",
":",
"return",
"o",
".",
"as_dict",
"(",
")",
"else",
":",
"return",
"super",
"(",
"JSONEncoder",
",",
"self",
")",
".",
"default",
"(",
"o",
")"
] |
Support more object types.
|
[
"Support",
"more",
"object",
"types",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/pymagic.py#L85-L92
|
7,470
|
pyroscope/pyrocore
|
src/pyrocore/torrent/formatting.py
|
fmt_sz
|
def fmt_sz(intval):
""" Format a byte sized value.
"""
try:
return fmt.human_size(intval)
except (ValueError, TypeError):
return "N/A".rjust(len(fmt.human_size(0)))
|
python
|
def fmt_sz(intval):
""" Format a byte sized value.
"""
try:
return fmt.human_size(intval)
except (ValueError, TypeError):
return "N/A".rjust(len(fmt.human_size(0)))
|
[
"def",
"fmt_sz",
"(",
"intval",
")",
":",
"try",
":",
"return",
"fmt",
".",
"human_size",
"(",
"intval",
")",
"except",
"(",
"ValueError",
",",
"TypeError",
")",
":",
"return",
"\"N/A\"",
".",
"rjust",
"(",
"len",
"(",
"fmt",
".",
"human_size",
"(",
"0",
")",
")",
")"
] |
Format a byte sized value.
|
[
"Format",
"a",
"byte",
"sized",
"value",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/formatting.py#L41-L47
|
7,471
|
pyroscope/pyrocore
|
src/pyrocore/torrent/formatting.py
|
fmt_iso
|
def fmt_iso(timestamp):
""" Format a UNIX timestamp to an ISO datetime string.
"""
try:
return fmt.iso_datetime(timestamp)
except (ValueError, TypeError):
return "N/A".rjust(len(fmt.iso_datetime(0)))
|
python
|
def fmt_iso(timestamp):
""" Format a UNIX timestamp to an ISO datetime string.
"""
try:
return fmt.iso_datetime(timestamp)
except (ValueError, TypeError):
return "N/A".rjust(len(fmt.iso_datetime(0)))
|
[
"def",
"fmt_iso",
"(",
"timestamp",
")",
":",
"try",
":",
"return",
"fmt",
".",
"iso_datetime",
"(",
"timestamp",
")",
"except",
"(",
"ValueError",
",",
"TypeError",
")",
":",
"return",
"\"N/A\"",
".",
"rjust",
"(",
"len",
"(",
"fmt",
".",
"iso_datetime",
"(",
"0",
")",
")",
")"
] |
Format a UNIX timestamp to an ISO datetime string.
|
[
"Format",
"a",
"UNIX",
"timestamp",
"to",
"an",
"ISO",
"datetime",
"string",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/formatting.py#L50-L56
|
7,472
|
pyroscope/pyrocore
|
src/pyrocore/torrent/formatting.py
|
fmt_duration
|
def fmt_duration(duration):
""" Format a duration value in seconds to a readable form.
"""
try:
return fmt.human_duration(float(duration), 0, 2, True)
except (ValueError, TypeError):
return "N/A".rjust(len(fmt.human_duration(0, 0, 2, True)))
|
python
|
def fmt_duration(duration):
""" Format a duration value in seconds to a readable form.
"""
try:
return fmt.human_duration(float(duration), 0, 2, True)
except (ValueError, TypeError):
return "N/A".rjust(len(fmt.human_duration(0, 0, 2, True)))
|
[
"def",
"fmt_duration",
"(",
"duration",
")",
":",
"try",
":",
"return",
"fmt",
".",
"human_duration",
"(",
"float",
"(",
"duration",
")",
",",
"0",
",",
"2",
",",
"True",
")",
"except",
"(",
"ValueError",
",",
"TypeError",
")",
":",
"return",
"\"N/A\"",
".",
"rjust",
"(",
"len",
"(",
"fmt",
".",
"human_duration",
"(",
"0",
",",
"0",
",",
"2",
",",
"True",
")",
")",
")"
] |
Format a duration value in seconds to a readable form.
|
[
"Format",
"a",
"duration",
"value",
"in",
"seconds",
"to",
"a",
"readable",
"form",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/formatting.py#L59-L65
|
7,473
|
pyroscope/pyrocore
|
src/pyrocore/torrent/formatting.py
|
fmt_subst
|
def fmt_subst(regex, subst):
"""Replace regex with string."""
return lambda text: re.sub(regex, subst, text) if text else text
|
python
|
def fmt_subst(regex, subst):
"""Replace regex with string."""
return lambda text: re.sub(regex, subst, text) if text else text
|
[
"def",
"fmt_subst",
"(",
"regex",
",",
"subst",
")",
":",
"return",
"lambda",
"text",
":",
"re",
".",
"sub",
"(",
"regex",
",",
"subst",
",",
"text",
")",
"if",
"text",
"else",
"text"
] |
Replace regex with string.
|
[
"Replace",
"regex",
"with",
"string",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/formatting.py#L89-L91
|
7,474
|
pyroscope/pyrocore
|
src/pyrocore/torrent/formatting.py
|
preparse
|
def preparse(output_format):
""" Do any special processing of a template, and return the result.
"""
try:
return templating.preparse(output_format, lambda path: os.path.join(config.config_dir, "templates", path))
except ImportError as exc:
if "tempita" in str(exc):
raise error.UserError("To be able to use Tempita templates, install the 'tempita' package (%s)\n"
" Possibly USING THE FOLLOWING COMMAND:\n"
" %s/easy_install tempita" % (exc, os.path.dirname(sys.executable)))
raise
except IOError as exc:
raise error.LoggableError("Cannot read template: {}".format(exc))
|
python
|
def preparse(output_format):
""" Do any special processing of a template, and return the result.
"""
try:
return templating.preparse(output_format, lambda path: os.path.join(config.config_dir, "templates", path))
except ImportError as exc:
if "tempita" in str(exc):
raise error.UserError("To be able to use Tempita templates, install the 'tempita' package (%s)\n"
" Possibly USING THE FOLLOWING COMMAND:\n"
" %s/easy_install tempita" % (exc, os.path.dirname(sys.executable)))
raise
except IOError as exc:
raise error.LoggableError("Cannot read template: {}".format(exc))
|
[
"def",
"preparse",
"(",
"output_format",
")",
":",
"try",
":",
"return",
"templating",
".",
"preparse",
"(",
"output_format",
",",
"lambda",
"path",
":",
"os",
".",
"path",
".",
"join",
"(",
"config",
".",
"config_dir",
",",
"\"templates\"",
",",
"path",
")",
")",
"except",
"ImportError",
"as",
"exc",
":",
"if",
"\"tempita\"",
"in",
"str",
"(",
"exc",
")",
":",
"raise",
"error",
".",
"UserError",
"(",
"\"To be able to use Tempita templates, install the 'tempita' package (%s)\\n\"",
"\" Possibly USING THE FOLLOWING COMMAND:\\n\"",
"\" %s/easy_install tempita\"",
"%",
"(",
"exc",
",",
"os",
".",
"path",
".",
"dirname",
"(",
"sys",
".",
"executable",
")",
")",
")",
"raise",
"except",
"IOError",
"as",
"exc",
":",
"raise",
"error",
".",
"LoggableError",
"(",
"\"Cannot read template: {}\"",
".",
"format",
"(",
"exc",
")",
")"
] |
Do any special processing of a template, and return the result.
|
[
"Do",
"any",
"special",
"processing",
"of",
"a",
"template",
"and",
"return",
"the",
"result",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/formatting.py#L214-L226
|
7,475
|
pyroscope/pyrocore
|
src/pyrocore/torrent/formatting.py
|
validate_field_list
|
def validate_field_list(fields, allow_fmt_specs=False, name_filter=None):
""" Make sure the fields in the given list exist.
@param fields: List of fields (comma-/space-separated if a string).
@type fields: list or str
@return: validated field names.
@rtype: list
"""
formats = [i[4:] for i in globals() if i.startswith("fmt_")]
try:
fields = [i.strip() for i in fields.replace(',', ' ').split()]
except AttributeError:
# Not a string, expecting an iterable
pass
if name_filter:
fields = [name_filter(name) for name in fields]
for name in fields:
if allow_fmt_specs and '.' in name:
fullname = name
name, fmtspecs = name.split('.', 1)
for fmtspec in fmtspecs.split('.'):
if fmtspec not in formats and fmtspec != "raw":
raise error.UserError("Unknown format specification %r in %r" % (fmtspec, fullname))
if name not in engine.FieldDefinition.FIELDS and not engine.TorrentProxy.add_manifold_attribute(name):
raise error.UserError("Unknown field name %r" % (name,))
return fields
|
python
|
def validate_field_list(fields, allow_fmt_specs=False, name_filter=None):
""" Make sure the fields in the given list exist.
@param fields: List of fields (comma-/space-separated if a string).
@type fields: list or str
@return: validated field names.
@rtype: list
"""
formats = [i[4:] for i in globals() if i.startswith("fmt_")]
try:
fields = [i.strip() for i in fields.replace(',', ' ').split()]
except AttributeError:
# Not a string, expecting an iterable
pass
if name_filter:
fields = [name_filter(name) for name in fields]
for name in fields:
if allow_fmt_specs and '.' in name:
fullname = name
name, fmtspecs = name.split('.', 1)
for fmtspec in fmtspecs.split('.'):
if fmtspec not in formats and fmtspec != "raw":
raise error.UserError("Unknown format specification %r in %r" % (fmtspec, fullname))
if name not in engine.FieldDefinition.FIELDS and not engine.TorrentProxy.add_manifold_attribute(name):
raise error.UserError("Unknown field name %r" % (name,))
return fields
|
[
"def",
"validate_field_list",
"(",
"fields",
",",
"allow_fmt_specs",
"=",
"False",
",",
"name_filter",
"=",
"None",
")",
":",
"formats",
"=",
"[",
"i",
"[",
"4",
":",
"]",
"for",
"i",
"in",
"globals",
"(",
")",
"if",
"i",
".",
"startswith",
"(",
"\"fmt_\"",
")",
"]",
"try",
":",
"fields",
"=",
"[",
"i",
".",
"strip",
"(",
")",
"for",
"i",
"in",
"fields",
".",
"replace",
"(",
"','",
",",
"' '",
")",
".",
"split",
"(",
")",
"]",
"except",
"AttributeError",
":",
"# Not a string, expecting an iterable",
"pass",
"if",
"name_filter",
":",
"fields",
"=",
"[",
"name_filter",
"(",
"name",
")",
"for",
"name",
"in",
"fields",
"]",
"for",
"name",
"in",
"fields",
":",
"if",
"allow_fmt_specs",
"and",
"'.'",
"in",
"name",
":",
"fullname",
"=",
"name",
"name",
",",
"fmtspecs",
"=",
"name",
".",
"split",
"(",
"'.'",
",",
"1",
")",
"for",
"fmtspec",
"in",
"fmtspecs",
".",
"split",
"(",
"'.'",
")",
":",
"if",
"fmtspec",
"not",
"in",
"formats",
"and",
"fmtspec",
"!=",
"\"raw\"",
":",
"raise",
"error",
".",
"UserError",
"(",
"\"Unknown format specification %r in %r\"",
"%",
"(",
"fmtspec",
",",
"fullname",
")",
")",
"if",
"name",
"not",
"in",
"engine",
".",
"FieldDefinition",
".",
"FIELDS",
"and",
"not",
"engine",
".",
"TorrentProxy",
".",
"add_manifold_attribute",
"(",
"name",
")",
":",
"raise",
"error",
".",
"UserError",
"(",
"\"Unknown field name %r\"",
"%",
"(",
"name",
",",
")",
")",
"return",
"fields"
] |
Make sure the fields in the given list exist.
@param fields: List of fields (comma-/space-separated if a string).
@type fields: list or str
@return: validated field names.
@rtype: list
|
[
"Make",
"sure",
"the",
"fields",
"in",
"the",
"given",
"list",
"exist",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/formatting.py#L319-L349
|
7,476
|
pyroscope/pyrocore
|
src/pyrocore/torrent/formatting.py
|
validate_sort_fields
|
def validate_sort_fields(sort_fields):
""" Make sure the fields in the given list exist, and return sorting key.
If field names are prefixed with '-', sort order is reversed for that field (descending).
"""
# Allow descending order per field by prefixing with '-'
descending = set()
def sort_order_filter(name):
"Helper to remove flag and memoize sort order"
if name.startswith('-'):
name = name[1:]
descending.add(name)
return name
# Split and validate field list
sort_fields = validate_field_list(sort_fields, name_filter=sort_order_filter)
log.debug("Sorting order is: %s" % ", ".join([('-' if i in descending else '') + i
for i in sort_fields]))
# No descending fields?
if not descending:
return operator.attrgetter(*tuple(sort_fields))
# Need to provide complex key
class Key(object):
"Complex sort order key"
def __init__(self, obj, *args):
"Remember object to be compared"
self.obj = obj
def __lt__(self, other):
"Compare to other key"
for field in sort_fields:
lhs, rhs = getattr(self.obj, field), getattr(other.obj, field)
if lhs == rhs:
continue
return rhs < lhs if field in descending else lhs < rhs
return False
return Key
|
python
|
def validate_sort_fields(sort_fields):
""" Make sure the fields in the given list exist, and return sorting key.
If field names are prefixed with '-', sort order is reversed for that field (descending).
"""
# Allow descending order per field by prefixing with '-'
descending = set()
def sort_order_filter(name):
"Helper to remove flag and memoize sort order"
if name.startswith('-'):
name = name[1:]
descending.add(name)
return name
# Split and validate field list
sort_fields = validate_field_list(sort_fields, name_filter=sort_order_filter)
log.debug("Sorting order is: %s" % ", ".join([('-' if i in descending else '') + i
for i in sort_fields]))
# No descending fields?
if not descending:
return operator.attrgetter(*tuple(sort_fields))
# Need to provide complex key
class Key(object):
"Complex sort order key"
def __init__(self, obj, *args):
"Remember object to be compared"
self.obj = obj
def __lt__(self, other):
"Compare to other key"
for field in sort_fields:
lhs, rhs = getattr(self.obj, field), getattr(other.obj, field)
if lhs == rhs:
continue
return rhs < lhs if field in descending else lhs < rhs
return False
return Key
|
[
"def",
"validate_sort_fields",
"(",
"sort_fields",
")",
":",
"# Allow descending order per field by prefixing with '-'",
"descending",
"=",
"set",
"(",
")",
"def",
"sort_order_filter",
"(",
"name",
")",
":",
"\"Helper to remove flag and memoize sort order\"",
"if",
"name",
".",
"startswith",
"(",
"'-'",
")",
":",
"name",
"=",
"name",
"[",
"1",
":",
"]",
"descending",
".",
"add",
"(",
"name",
")",
"return",
"name",
"# Split and validate field list",
"sort_fields",
"=",
"validate_field_list",
"(",
"sort_fields",
",",
"name_filter",
"=",
"sort_order_filter",
")",
"log",
".",
"debug",
"(",
"\"Sorting order is: %s\"",
"%",
"\", \"",
".",
"join",
"(",
"[",
"(",
"'-'",
"if",
"i",
"in",
"descending",
"else",
"''",
")",
"+",
"i",
"for",
"i",
"in",
"sort_fields",
"]",
")",
")",
"# No descending fields?",
"if",
"not",
"descending",
":",
"return",
"operator",
".",
"attrgetter",
"(",
"*",
"tuple",
"(",
"sort_fields",
")",
")",
"# Need to provide complex key",
"class",
"Key",
"(",
"object",
")",
":",
"\"Complex sort order key\"",
"def",
"__init__",
"(",
"self",
",",
"obj",
",",
"*",
"args",
")",
":",
"\"Remember object to be compared\"",
"self",
".",
"obj",
"=",
"obj",
"def",
"__lt__",
"(",
"self",
",",
"other",
")",
":",
"\"Compare to other key\"",
"for",
"field",
"in",
"sort_fields",
":",
"lhs",
",",
"rhs",
"=",
"getattr",
"(",
"self",
".",
"obj",
",",
"field",
")",
",",
"getattr",
"(",
"other",
".",
"obj",
",",
"field",
")",
"if",
"lhs",
"==",
"rhs",
":",
"continue",
"return",
"rhs",
"<",
"lhs",
"if",
"field",
"in",
"descending",
"else",
"lhs",
"<",
"rhs",
"return",
"False",
"return",
"Key"
] |
Make sure the fields in the given list exist, and return sorting key.
If field names are prefixed with '-', sort order is reversed for that field (descending).
|
[
"Make",
"sure",
"the",
"fields",
"in",
"the",
"given",
"list",
"exist",
"and",
"return",
"sorting",
"key",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/formatting.py#L352-L390
|
7,477
|
pyroscope/pyrocore
|
src/pyrocore/torrent/formatting.py
|
OutputMapping.formatter_help
|
def formatter_help(cls):
""" Return a list of format specifiers and their documentation.
"""
result = [("raw", "Switch off the default field formatter.")]
for name, method in globals().items():
if name.startswith("fmt_"):
result.append((name[4:], method.__doc__.strip()))
return result
|
python
|
def formatter_help(cls):
""" Return a list of format specifiers and their documentation.
"""
result = [("raw", "Switch off the default field formatter.")]
for name, method in globals().items():
if name.startswith("fmt_"):
result.append((name[4:], method.__doc__.strip()))
return result
|
[
"def",
"formatter_help",
"(",
"cls",
")",
":",
"result",
"=",
"[",
"(",
"\"raw\"",
",",
"\"Switch off the default field formatter.\"",
")",
"]",
"for",
"name",
",",
"method",
"in",
"globals",
"(",
")",
".",
"items",
"(",
")",
":",
"if",
"name",
".",
"startswith",
"(",
"\"fmt_\"",
")",
":",
"result",
".",
"append",
"(",
"(",
"name",
"[",
"4",
":",
"]",
",",
"method",
".",
"__doc__",
".",
"strip",
"(",
")",
")",
")",
"return",
"result"
] |
Return a list of format specifiers and their documentation.
|
[
"Return",
"a",
"list",
"of",
"format",
"specifiers",
"and",
"their",
"documentation",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/formatting.py#L138-L147
|
7,478
|
wroberts/pytimeparse
|
pytimeparse/timeparse.py
|
timeparse
|
def timeparse(sval, granularity='seconds'):
'''
Parse a time expression, returning it as a number of seconds. If
possible, the return value will be an `int`; if this is not
possible, the return will be a `float`. Returns `None` if a time
expression cannot be parsed from the given string.
Arguments:
- `sval`: the string value to parse
>>> timeparse('1:24')
84
>>> timeparse(':22')
22
>>> timeparse('1 minute, 24 secs')
84
>>> timeparse('1m24s')
84
>>> timeparse('1.2 minutes')
72
>>> timeparse('1.2 seconds')
1.2
Time expressions can be signed.
>>> timeparse('- 1 minute')
-60
>>> timeparse('+ 1 minute')
60
If granularity is specified as ``minutes``, then ambiguous digits following
a colon will be interpreted as minutes; otherwise they are considered seconds.
>>> timeparse('1:30')
90
>>> timeparse('1:30', granularity='minutes')
5400
'''
match = COMPILED_SIGN.match(sval)
sign = -1 if match.groupdict()['sign'] == '-' else 1
sval = match.groupdict()['unsigned']
for timefmt in COMPILED_TIMEFORMATS:
match = timefmt.match(sval)
if match and match.group(0).strip():
mdict = match.groupdict()
if granularity == 'minutes':
mdict = _interpret_as_minutes(sval, mdict)
# if all of the fields are integer numbers
if all(v.isdigit() for v in list(mdict.values()) if v):
return sign * sum([MULTIPLIERS[k] * int(v, 10) for (k, v) in
list(mdict.items()) if v is not None])
# if SECS is an integer number
elif ('secs' not in mdict or
mdict['secs'] is None or
mdict['secs'].isdigit()):
# we will return an integer
return (
sign * int(sum([MULTIPLIERS[k] * float(v) for (k, v) in
list(mdict.items()) if k != 'secs' and v is not None])) +
(int(mdict['secs'], 10) if mdict['secs'] else 0))
else:
# SECS is a float, we will return a float
return sign * sum([MULTIPLIERS[k] * float(v) for (k, v) in
list(mdict.items()) if v is not None])
|
python
|
def timeparse(sval, granularity='seconds'):
'''
Parse a time expression, returning it as a number of seconds. If
possible, the return value will be an `int`; if this is not
possible, the return will be a `float`. Returns `None` if a time
expression cannot be parsed from the given string.
Arguments:
- `sval`: the string value to parse
>>> timeparse('1:24')
84
>>> timeparse(':22')
22
>>> timeparse('1 minute, 24 secs')
84
>>> timeparse('1m24s')
84
>>> timeparse('1.2 minutes')
72
>>> timeparse('1.2 seconds')
1.2
Time expressions can be signed.
>>> timeparse('- 1 minute')
-60
>>> timeparse('+ 1 minute')
60
If granularity is specified as ``minutes``, then ambiguous digits following
a colon will be interpreted as minutes; otherwise they are considered seconds.
>>> timeparse('1:30')
90
>>> timeparse('1:30', granularity='minutes')
5400
'''
match = COMPILED_SIGN.match(sval)
sign = -1 if match.groupdict()['sign'] == '-' else 1
sval = match.groupdict()['unsigned']
for timefmt in COMPILED_TIMEFORMATS:
match = timefmt.match(sval)
if match and match.group(0).strip():
mdict = match.groupdict()
if granularity == 'minutes':
mdict = _interpret_as_minutes(sval, mdict)
# if all of the fields are integer numbers
if all(v.isdigit() for v in list(mdict.values()) if v):
return sign * sum([MULTIPLIERS[k] * int(v, 10) for (k, v) in
list(mdict.items()) if v is not None])
# if SECS is an integer number
elif ('secs' not in mdict or
mdict['secs'] is None or
mdict['secs'].isdigit()):
# we will return an integer
return (
sign * int(sum([MULTIPLIERS[k] * float(v) for (k, v) in
list(mdict.items()) if k != 'secs' and v is not None])) +
(int(mdict['secs'], 10) if mdict['secs'] else 0))
else:
# SECS is a float, we will return a float
return sign * sum([MULTIPLIERS[k] * float(v) for (k, v) in
list(mdict.items()) if v is not None])
|
[
"def",
"timeparse",
"(",
"sval",
",",
"granularity",
"=",
"'seconds'",
")",
":",
"match",
"=",
"COMPILED_SIGN",
".",
"match",
"(",
"sval",
")",
"sign",
"=",
"-",
"1",
"if",
"match",
".",
"groupdict",
"(",
")",
"[",
"'sign'",
"]",
"==",
"'-'",
"else",
"1",
"sval",
"=",
"match",
".",
"groupdict",
"(",
")",
"[",
"'unsigned'",
"]",
"for",
"timefmt",
"in",
"COMPILED_TIMEFORMATS",
":",
"match",
"=",
"timefmt",
".",
"match",
"(",
"sval",
")",
"if",
"match",
"and",
"match",
".",
"group",
"(",
"0",
")",
".",
"strip",
"(",
")",
":",
"mdict",
"=",
"match",
".",
"groupdict",
"(",
")",
"if",
"granularity",
"==",
"'minutes'",
":",
"mdict",
"=",
"_interpret_as_minutes",
"(",
"sval",
",",
"mdict",
")",
"# if all of the fields are integer numbers",
"if",
"all",
"(",
"v",
".",
"isdigit",
"(",
")",
"for",
"v",
"in",
"list",
"(",
"mdict",
".",
"values",
"(",
")",
")",
"if",
"v",
")",
":",
"return",
"sign",
"*",
"sum",
"(",
"[",
"MULTIPLIERS",
"[",
"k",
"]",
"*",
"int",
"(",
"v",
",",
"10",
")",
"for",
"(",
"k",
",",
"v",
")",
"in",
"list",
"(",
"mdict",
".",
"items",
"(",
")",
")",
"if",
"v",
"is",
"not",
"None",
"]",
")",
"# if SECS is an integer number",
"elif",
"(",
"'secs'",
"not",
"in",
"mdict",
"or",
"mdict",
"[",
"'secs'",
"]",
"is",
"None",
"or",
"mdict",
"[",
"'secs'",
"]",
".",
"isdigit",
"(",
")",
")",
":",
"# we will return an integer",
"return",
"(",
"sign",
"*",
"int",
"(",
"sum",
"(",
"[",
"MULTIPLIERS",
"[",
"k",
"]",
"*",
"float",
"(",
"v",
")",
"for",
"(",
"k",
",",
"v",
")",
"in",
"list",
"(",
"mdict",
".",
"items",
"(",
")",
")",
"if",
"k",
"!=",
"'secs'",
"and",
"v",
"is",
"not",
"None",
"]",
")",
")",
"+",
"(",
"int",
"(",
"mdict",
"[",
"'secs'",
"]",
",",
"10",
")",
"if",
"mdict",
"[",
"'secs'",
"]",
"else",
"0",
")",
")",
"else",
":",
"# SECS is a float, we will return a float",
"return",
"sign",
"*",
"sum",
"(",
"[",
"MULTIPLIERS",
"[",
"k",
"]",
"*",
"float",
"(",
"v",
")",
"for",
"(",
"k",
",",
"v",
")",
"in",
"list",
"(",
"mdict",
".",
"items",
"(",
")",
")",
"if",
"v",
"is",
"not",
"None",
"]",
")"
] |
Parse a time expression, returning it as a number of seconds. If
possible, the return value will be an `int`; if this is not
possible, the return will be a `float`. Returns `None` if a time
expression cannot be parsed from the given string.
Arguments:
- `sval`: the string value to parse
>>> timeparse('1:24')
84
>>> timeparse(':22')
22
>>> timeparse('1 minute, 24 secs')
84
>>> timeparse('1m24s')
84
>>> timeparse('1.2 minutes')
72
>>> timeparse('1.2 seconds')
1.2
Time expressions can be signed.
>>> timeparse('- 1 minute')
-60
>>> timeparse('+ 1 minute')
60
If granularity is specified as ``minutes``, then ambiguous digits following
a colon will be interpreted as minutes; otherwise they are considered seconds.
>>> timeparse('1:30')
90
>>> timeparse('1:30', granularity='minutes')
5400
|
[
"Parse",
"a",
"time",
"expression",
"returning",
"it",
"as",
"a",
"number",
"of",
"seconds",
".",
"If",
"possible",
"the",
"return",
"value",
"will",
"be",
"an",
"int",
";",
"if",
"this",
"is",
"not",
"possible",
"the",
"return",
"will",
"be",
"a",
"float",
".",
"Returns",
"None",
"if",
"a",
"time",
"expression",
"cannot",
"be",
"parsed",
"from",
"the",
"given",
"string",
"."
] |
dc7e783216b98a04d3f749bd82c863d6d7c41f6e
|
https://github.com/wroberts/pytimeparse/blob/dc7e783216b98a04d3f749bd82c863d6d7c41f6e/pytimeparse/timeparse.py#L118-L181
|
7,479
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
get_client
|
def get_client(project_id=None, credentials=None,
service_url=None, service_account=None,
private_key=None, private_key_file=None,
json_key=None, json_key_file=None,
readonly=True, swallow_results=True,
num_retries=0):
"""Return a singleton instance of BigQueryClient. Either
AssertionCredentials or a service account and private key combination need
to be provided in order to authenticate requests to BigQuery.
Parameters
----------
project_id : str, optional
The BigQuery project id, required unless json_key or json_key_file is
provided.
credentials : oauth2client.client.SignedJwtAssertionCredentials, optional
AssertionCredentials instance to authenticate requests to BigQuery
(optional, must provide `service_account` and (`private_key` or
`private_key_file`) or (`json_key` or `json_key_file`) if not included
service_url : str, optional
A URI string template pointing to the location of Google's API
discovery service. Requires two parameters {api} and {apiVersion} that
when filled in produce an absolute URI to the discovery document for
that service. If not set then the default googleapiclient discovery URI
is used. See `credentials`
service_account : str, optional
The Google API service account name. See `credentials`
private_key : str, optional
The private key associated with the service account in PKCS12 or PEM
format. See `credentials`
private_key_file : str, optional
The name of the file containing the private key associated with the
service account in PKCS12 or PEM format. See `credentials`
json_key : dict, optional
The JSON key associated with the service account. See `credentials`
json_key_file : str, optional
The name of the JSON key file associated with the service account. See
`credentials`.
readonly : bool
Bool indicating if BigQuery access is read-only. Has no effect if
credentials are provided. Default True.
swallow_results : bool
If set to False, then return the actual response value instead of
converting to boolean. Default True.
num_retries : int, optional
The number of times to retry the request. Default 0 (no retry).
Returns
-------
BigQueryClient
An instance of the BigQuery client.
"""
if not credentials:
assert (service_account and (private_key or private_key_file)) or (
json_key or json_key_file), \
'Must provide AssertionCredentials or service account and P12 key\
or JSON key'
if not project_id:
assert json_key or json_key_file, \
'Must provide project_id unless json_key or json_key_file is\
provided'
if service_url is None:
service_url = DISCOVERY_URI
scope = BIGQUERY_SCOPE_READ_ONLY if readonly else BIGQUERY_SCOPE
if private_key_file:
credentials = _credentials().from_p12_keyfile(service_account,
private_key_file,
scopes=scope)
if private_key:
try:
if isinstance(private_key, basestring):
private_key = private_key.decode('utf-8')
except NameError:
# python3 -- private_key is already unicode
pass
credentials = _credentials().from_p12_keyfile_buffer(
service_account,
StringIO(private_key),
scopes=scope)
if json_key_file:
with open(json_key_file, 'r') as key_file:
json_key = json.load(key_file)
if json_key:
credentials = _credentials().from_json_keyfile_dict(json_key,
scopes=scope)
if not project_id:
project_id = json_key['project_id']
bq_service = _get_bq_service(credentials=credentials,
service_url=service_url)
return BigQueryClient(bq_service, project_id, swallow_results,
num_retries)
|
python
|
def get_client(project_id=None, credentials=None,
service_url=None, service_account=None,
private_key=None, private_key_file=None,
json_key=None, json_key_file=None,
readonly=True, swallow_results=True,
num_retries=0):
"""Return a singleton instance of BigQueryClient. Either
AssertionCredentials or a service account and private key combination need
to be provided in order to authenticate requests to BigQuery.
Parameters
----------
project_id : str, optional
The BigQuery project id, required unless json_key or json_key_file is
provided.
credentials : oauth2client.client.SignedJwtAssertionCredentials, optional
AssertionCredentials instance to authenticate requests to BigQuery
(optional, must provide `service_account` and (`private_key` or
`private_key_file`) or (`json_key` or `json_key_file`) if not included
service_url : str, optional
A URI string template pointing to the location of Google's API
discovery service. Requires two parameters {api} and {apiVersion} that
when filled in produce an absolute URI to the discovery document for
that service. If not set then the default googleapiclient discovery URI
is used. See `credentials`
service_account : str, optional
The Google API service account name. See `credentials`
private_key : str, optional
The private key associated with the service account in PKCS12 or PEM
format. See `credentials`
private_key_file : str, optional
The name of the file containing the private key associated with the
service account in PKCS12 or PEM format. See `credentials`
json_key : dict, optional
The JSON key associated with the service account. See `credentials`
json_key_file : str, optional
The name of the JSON key file associated with the service account. See
`credentials`.
readonly : bool
Bool indicating if BigQuery access is read-only. Has no effect if
credentials are provided. Default True.
swallow_results : bool
If set to False, then return the actual response value instead of
converting to boolean. Default True.
num_retries : int, optional
The number of times to retry the request. Default 0 (no retry).
Returns
-------
BigQueryClient
An instance of the BigQuery client.
"""
if not credentials:
assert (service_account and (private_key or private_key_file)) or (
json_key or json_key_file), \
'Must provide AssertionCredentials or service account and P12 key\
or JSON key'
if not project_id:
assert json_key or json_key_file, \
'Must provide project_id unless json_key or json_key_file is\
provided'
if service_url is None:
service_url = DISCOVERY_URI
scope = BIGQUERY_SCOPE_READ_ONLY if readonly else BIGQUERY_SCOPE
if private_key_file:
credentials = _credentials().from_p12_keyfile(service_account,
private_key_file,
scopes=scope)
if private_key:
try:
if isinstance(private_key, basestring):
private_key = private_key.decode('utf-8')
except NameError:
# python3 -- private_key is already unicode
pass
credentials = _credentials().from_p12_keyfile_buffer(
service_account,
StringIO(private_key),
scopes=scope)
if json_key_file:
with open(json_key_file, 'r') as key_file:
json_key = json.load(key_file)
if json_key:
credentials = _credentials().from_json_keyfile_dict(json_key,
scopes=scope)
if not project_id:
project_id = json_key['project_id']
bq_service = _get_bq_service(credentials=credentials,
service_url=service_url)
return BigQueryClient(bq_service, project_id, swallow_results,
num_retries)
|
[
"def",
"get_client",
"(",
"project_id",
"=",
"None",
",",
"credentials",
"=",
"None",
",",
"service_url",
"=",
"None",
",",
"service_account",
"=",
"None",
",",
"private_key",
"=",
"None",
",",
"private_key_file",
"=",
"None",
",",
"json_key",
"=",
"None",
",",
"json_key_file",
"=",
"None",
",",
"readonly",
"=",
"True",
",",
"swallow_results",
"=",
"True",
",",
"num_retries",
"=",
"0",
")",
":",
"if",
"not",
"credentials",
":",
"assert",
"(",
"service_account",
"and",
"(",
"private_key",
"or",
"private_key_file",
")",
")",
"or",
"(",
"json_key",
"or",
"json_key_file",
")",
",",
"'Must provide AssertionCredentials or service account and P12 key\\\n or JSON key'",
"if",
"not",
"project_id",
":",
"assert",
"json_key",
"or",
"json_key_file",
",",
"'Must provide project_id unless json_key or json_key_file is\\\n provided'",
"if",
"service_url",
"is",
"None",
":",
"service_url",
"=",
"DISCOVERY_URI",
"scope",
"=",
"BIGQUERY_SCOPE_READ_ONLY",
"if",
"readonly",
"else",
"BIGQUERY_SCOPE",
"if",
"private_key_file",
":",
"credentials",
"=",
"_credentials",
"(",
")",
".",
"from_p12_keyfile",
"(",
"service_account",
",",
"private_key_file",
",",
"scopes",
"=",
"scope",
")",
"if",
"private_key",
":",
"try",
":",
"if",
"isinstance",
"(",
"private_key",
",",
"basestring",
")",
":",
"private_key",
"=",
"private_key",
".",
"decode",
"(",
"'utf-8'",
")",
"except",
"NameError",
":",
"# python3 -- private_key is already unicode",
"pass",
"credentials",
"=",
"_credentials",
"(",
")",
".",
"from_p12_keyfile_buffer",
"(",
"service_account",
",",
"StringIO",
"(",
"private_key",
")",
",",
"scopes",
"=",
"scope",
")",
"if",
"json_key_file",
":",
"with",
"open",
"(",
"json_key_file",
",",
"'r'",
")",
"as",
"key_file",
":",
"json_key",
"=",
"json",
".",
"load",
"(",
"key_file",
")",
"if",
"json_key",
":",
"credentials",
"=",
"_credentials",
"(",
")",
".",
"from_json_keyfile_dict",
"(",
"json_key",
",",
"scopes",
"=",
"scope",
")",
"if",
"not",
"project_id",
":",
"project_id",
"=",
"json_key",
"[",
"'project_id'",
"]",
"bq_service",
"=",
"_get_bq_service",
"(",
"credentials",
"=",
"credentials",
",",
"service_url",
"=",
"service_url",
")",
"return",
"BigQueryClient",
"(",
"bq_service",
",",
"project_id",
",",
"swallow_results",
",",
"num_retries",
")"
] |
Return a singleton instance of BigQueryClient. Either
AssertionCredentials or a service account and private key combination need
to be provided in order to authenticate requests to BigQuery.
Parameters
----------
project_id : str, optional
The BigQuery project id, required unless json_key or json_key_file is
provided.
credentials : oauth2client.client.SignedJwtAssertionCredentials, optional
AssertionCredentials instance to authenticate requests to BigQuery
(optional, must provide `service_account` and (`private_key` or
`private_key_file`) or (`json_key` or `json_key_file`) if not included
service_url : str, optional
A URI string template pointing to the location of Google's API
discovery service. Requires two parameters {api} and {apiVersion} that
when filled in produce an absolute URI to the discovery document for
that service. If not set then the default googleapiclient discovery URI
is used. See `credentials`
service_account : str, optional
The Google API service account name. See `credentials`
private_key : str, optional
The private key associated with the service account in PKCS12 or PEM
format. See `credentials`
private_key_file : str, optional
The name of the file containing the private key associated with the
service account in PKCS12 or PEM format. See `credentials`
json_key : dict, optional
The JSON key associated with the service account. See `credentials`
json_key_file : str, optional
The name of the JSON key file associated with the service account. See
`credentials`.
readonly : bool
Bool indicating if BigQuery access is read-only. Has no effect if
credentials are provided. Default True.
swallow_results : bool
If set to False, then return the actual response value instead of
converting to boolean. Default True.
num_retries : int, optional
The number of times to retry the request. Default 0 (no retry).
Returns
-------
BigQueryClient
An instance of the BigQuery client.
|
[
"Return",
"a",
"singleton",
"instance",
"of",
"BigQueryClient",
".",
"Either",
"AssertionCredentials",
"or",
"a",
"service",
"account",
"and",
"private",
"key",
"combination",
"need",
"to",
"be",
"provided",
"in",
"order",
"to",
"authenticate",
"requests",
"to",
"BigQuery",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L54-L155
|
7,480
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
get_projects
|
def get_projects(bq_service):
"""Given the BigQuery service, return data about all projects."""
projects_request = bq_service.projects().list().execute()
projects = []
for project in projects_request.get('projects', []):
project_data = {
'id': project['id'],
'name': project['friendlyName']
}
projects.append(project_data)
return projects
|
python
|
def get_projects(bq_service):
"""Given the BigQuery service, return data about all projects."""
projects_request = bq_service.projects().list().execute()
projects = []
for project in projects_request.get('projects', []):
project_data = {
'id': project['id'],
'name': project['friendlyName']
}
projects.append(project_data)
return projects
|
[
"def",
"get_projects",
"(",
"bq_service",
")",
":",
"projects_request",
"=",
"bq_service",
".",
"projects",
"(",
")",
".",
"list",
"(",
")",
".",
"execute",
"(",
")",
"projects",
"=",
"[",
"]",
"for",
"project",
"in",
"projects_request",
".",
"get",
"(",
"'projects'",
",",
"[",
"]",
")",
":",
"project_data",
"=",
"{",
"'id'",
":",
"project",
"[",
"'id'",
"]",
",",
"'name'",
":",
"project",
"[",
"'friendlyName'",
"]",
"}",
"projects",
".",
"append",
"(",
"project_data",
")",
"return",
"projects"
] |
Given the BigQuery service, return data about all projects.
|
[
"Given",
"the",
"BigQuery",
"service",
"return",
"data",
"about",
"all",
"projects",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L158-L169
|
7,481
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
_get_bq_service
|
def _get_bq_service(credentials=None, service_url=None):
"""Construct an authorized BigQuery service object."""
assert credentials, 'Must provide ServiceAccountCredentials'
http = credentials.authorize(Http())
service = build(
'bigquery',
'v2',
http=http,
discoveryServiceUrl=service_url,
cache_discovery=False
)
return service
|
python
|
def _get_bq_service(credentials=None, service_url=None):
"""Construct an authorized BigQuery service object."""
assert credentials, 'Must provide ServiceAccountCredentials'
http = credentials.authorize(Http())
service = build(
'bigquery',
'v2',
http=http,
discoveryServiceUrl=service_url,
cache_discovery=False
)
return service
|
[
"def",
"_get_bq_service",
"(",
"credentials",
"=",
"None",
",",
"service_url",
"=",
"None",
")",
":",
"assert",
"credentials",
",",
"'Must provide ServiceAccountCredentials'",
"http",
"=",
"credentials",
".",
"authorize",
"(",
"Http",
"(",
")",
")",
"service",
"=",
"build",
"(",
"'bigquery'",
",",
"'v2'",
",",
"http",
"=",
"http",
",",
"discoveryServiceUrl",
"=",
"service_url",
",",
"cache_discovery",
"=",
"False",
")",
"return",
"service"
] |
Construct an authorized BigQuery service object.
|
[
"Construct",
"an",
"authorized",
"BigQuery",
"service",
"object",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L172-L186
|
7,482
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient._submit_query_job
|
def _submit_query_job(self, query_data):
""" Submit a query job to BigQuery.
This is similar to BigQueryClient.query, but gives the user
direct access to the query method on the offical BigQuery
python client.
For fine-grained control over a query job, see:
https://google-api-client-libraries.appspot.com/documentation/bigquery/v2/python/latest/bigquery_v2.jobs.html#query
Parameters
----------
query_data
query object as per "configuration.query" in
https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query
Returns
-------
tuple
job id and query results if query completed. If dry_run is True,
job id will be None and results will be empty if the query is valid
or a dict containing the response if invalid.
Raises
------
BigQueryTimeoutException
On timeout
"""
logger.debug('Submitting query job: %s' % query_data)
job_collection = self.bigquery.jobs()
try:
query_reply = job_collection.query(
projectId=self.project_id, body=query_data).execute(
num_retries=self.num_retries)
except HttpError as e:
if query_data.get("dryRun", False):
return None, json.loads(e.content.decode('utf8'))
raise
job_id = query_reply['jobReference'].get('jobId')
schema = query_reply.get('schema', {'fields': None})['fields']
rows = query_reply.get('rows', [])
job_complete = query_reply.get('jobComplete', False)
# raise exceptions if it's not an async query
# and job is not completed after timeout
if not job_complete and query_data.get("timeoutMs", False):
logger.error('BigQuery job %s timeout' % job_id)
raise BigQueryTimeoutException()
return job_id, [self._transform_row(row, schema) for row in rows]
|
python
|
def _submit_query_job(self, query_data):
""" Submit a query job to BigQuery.
This is similar to BigQueryClient.query, but gives the user
direct access to the query method on the offical BigQuery
python client.
For fine-grained control over a query job, see:
https://google-api-client-libraries.appspot.com/documentation/bigquery/v2/python/latest/bigquery_v2.jobs.html#query
Parameters
----------
query_data
query object as per "configuration.query" in
https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query
Returns
-------
tuple
job id and query results if query completed. If dry_run is True,
job id will be None and results will be empty if the query is valid
or a dict containing the response if invalid.
Raises
------
BigQueryTimeoutException
On timeout
"""
logger.debug('Submitting query job: %s' % query_data)
job_collection = self.bigquery.jobs()
try:
query_reply = job_collection.query(
projectId=self.project_id, body=query_data).execute(
num_retries=self.num_retries)
except HttpError as e:
if query_data.get("dryRun", False):
return None, json.loads(e.content.decode('utf8'))
raise
job_id = query_reply['jobReference'].get('jobId')
schema = query_reply.get('schema', {'fields': None})['fields']
rows = query_reply.get('rows', [])
job_complete = query_reply.get('jobComplete', False)
# raise exceptions if it's not an async query
# and job is not completed after timeout
if not job_complete and query_data.get("timeoutMs", False):
logger.error('BigQuery job %s timeout' % job_id)
raise BigQueryTimeoutException()
return job_id, [self._transform_row(row, schema) for row in rows]
|
[
"def",
"_submit_query_job",
"(",
"self",
",",
"query_data",
")",
":",
"logger",
".",
"debug",
"(",
"'Submitting query job: %s'",
"%",
"query_data",
")",
"job_collection",
"=",
"self",
".",
"bigquery",
".",
"jobs",
"(",
")",
"try",
":",
"query_reply",
"=",
"job_collection",
".",
"query",
"(",
"projectId",
"=",
"self",
".",
"project_id",
",",
"body",
"=",
"query_data",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"except",
"HttpError",
"as",
"e",
":",
"if",
"query_data",
".",
"get",
"(",
"\"dryRun\"",
",",
"False",
")",
":",
"return",
"None",
",",
"json",
".",
"loads",
"(",
"e",
".",
"content",
".",
"decode",
"(",
"'utf8'",
")",
")",
"raise",
"job_id",
"=",
"query_reply",
"[",
"'jobReference'",
"]",
".",
"get",
"(",
"'jobId'",
")",
"schema",
"=",
"query_reply",
".",
"get",
"(",
"'schema'",
",",
"{",
"'fields'",
":",
"None",
"}",
")",
"[",
"'fields'",
"]",
"rows",
"=",
"query_reply",
".",
"get",
"(",
"'rows'",
",",
"[",
"]",
")",
"job_complete",
"=",
"query_reply",
".",
"get",
"(",
"'jobComplete'",
",",
"False",
")",
"# raise exceptions if it's not an async query",
"# and job is not completed after timeout",
"if",
"not",
"job_complete",
"and",
"query_data",
".",
"get",
"(",
"\"timeoutMs\"",
",",
"False",
")",
":",
"logger",
".",
"error",
"(",
"'BigQuery job %s timeout'",
"%",
"job_id",
")",
"raise",
"BigQueryTimeoutException",
"(",
")",
"return",
"job_id",
",",
"[",
"self",
".",
"_transform_row",
"(",
"row",
",",
"schema",
")",
"for",
"row",
"in",
"rows",
"]"
] |
Submit a query job to BigQuery.
This is similar to BigQueryClient.query, but gives the user
direct access to the query method on the offical BigQuery
python client.
For fine-grained control over a query job, see:
https://google-api-client-libraries.appspot.com/documentation/bigquery/v2/python/latest/bigquery_v2.jobs.html#query
Parameters
----------
query_data
query object as per "configuration.query" in
https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query
Returns
-------
tuple
job id and query results if query completed. If dry_run is True,
job id will be None and results will be empty if the query is valid
or a dict containing the response if invalid.
Raises
------
BigQueryTimeoutException
On timeout
|
[
"Submit",
"a",
"query",
"job",
"to",
"BigQuery",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L226-L279
|
7,483
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient._insert_job
|
def _insert_job(self, body_object):
""" Submit a job to BigQuery
Direct proxy to the insert() method of the offical BigQuery
python client.
Able to submit load, link, query, copy, or extract jobs.
For more details, see:
https://google-api-client-libraries.appspot.com/documentation/bigquery/v2/python/latest/bigquery_v2.jobs.html#insert
Parameters
----------
body_object : body object passed to bigquery.jobs().insert()
Returns
-------
response of the bigquery.jobs().insert().execute() call
Raises
------
BigQueryTimeoutException on timeout
"""
logger.debug('Submitting job: %s' % body_object)
job_collection = self.bigquery.jobs()
return job_collection.insert(
projectId=self.project_id,
body=body_object
).execute(num_retries=self.num_retries)
|
python
|
def _insert_job(self, body_object):
""" Submit a job to BigQuery
Direct proxy to the insert() method of the offical BigQuery
python client.
Able to submit load, link, query, copy, or extract jobs.
For more details, see:
https://google-api-client-libraries.appspot.com/documentation/bigquery/v2/python/latest/bigquery_v2.jobs.html#insert
Parameters
----------
body_object : body object passed to bigquery.jobs().insert()
Returns
-------
response of the bigquery.jobs().insert().execute() call
Raises
------
BigQueryTimeoutException on timeout
"""
logger.debug('Submitting job: %s' % body_object)
job_collection = self.bigquery.jobs()
return job_collection.insert(
projectId=self.project_id,
body=body_object
).execute(num_retries=self.num_retries)
|
[
"def",
"_insert_job",
"(",
"self",
",",
"body_object",
")",
":",
"logger",
".",
"debug",
"(",
"'Submitting job: %s'",
"%",
"body_object",
")",
"job_collection",
"=",
"self",
".",
"bigquery",
".",
"jobs",
"(",
")",
"return",
"job_collection",
".",
"insert",
"(",
"projectId",
"=",
"self",
".",
"project_id",
",",
"body",
"=",
"body_object",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")"
] |
Submit a job to BigQuery
Direct proxy to the insert() method of the offical BigQuery
python client.
Able to submit load, link, query, copy, or extract jobs.
For more details, see:
https://google-api-client-libraries.appspot.com/documentation/bigquery/v2/python/latest/bigquery_v2.jobs.html#insert
Parameters
----------
body_object : body object passed to bigquery.jobs().insert()
Returns
-------
response of the bigquery.jobs().insert().execute() call
Raises
------
BigQueryTimeoutException on timeout
|
[
"Submit",
"a",
"job",
"to",
"BigQuery"
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L302-L333
|
7,484
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.query
|
def query(self, query, max_results=None, timeout=0, dry_run=False, use_legacy_sql=None, external_udf_uris=None):
"""Submit a query to BigQuery.
Parameters
----------
query : str
BigQuery query string
max_results : int, optional
The maximum number of rows to return per page of results.
timeout : float, optional
How long to wait for the query to complete, in seconds before
the request times out and returns.
dry_run : bool, optional
If True, the query isn't actually run. A valid query will return an
empty response, while an invalid one will return the same error
message it would if it wasn't a dry run.
use_legacy_sql : bool, optional. Default True.
If False, the query will use BigQuery's standard SQL (https://cloud.google.com/bigquery/sql-reference/)
external_udf_uris : list, optional
Contains external UDF URIs. If given, URIs must be Google Cloud
Storage and have .js extensions.
Returns
-------
tuple
(job id, query results) if the query completed. If dry_run is True,
job id will be None and results will be empty if the query is valid
or a ``dict`` containing the response if invalid.
Raises
------
BigQueryTimeoutException
on timeout
"""
logger.debug('Executing query: %s' % query)
query_data = {
'query': query,
'timeoutMs': timeout * 1000,
'dryRun': dry_run,
'maxResults': max_results
}
if use_legacy_sql is not None:
query_data['useLegacySql'] = use_legacy_sql
if external_udf_uris:
query_data['userDefinedFunctionResources'] = \
[ {'resourceUri': u} for u in external_udf_uris ]
return self._submit_query_job(query_data)
|
python
|
def query(self, query, max_results=None, timeout=0, dry_run=False, use_legacy_sql=None, external_udf_uris=None):
"""Submit a query to BigQuery.
Parameters
----------
query : str
BigQuery query string
max_results : int, optional
The maximum number of rows to return per page of results.
timeout : float, optional
How long to wait for the query to complete, in seconds before
the request times out and returns.
dry_run : bool, optional
If True, the query isn't actually run. A valid query will return an
empty response, while an invalid one will return the same error
message it would if it wasn't a dry run.
use_legacy_sql : bool, optional. Default True.
If False, the query will use BigQuery's standard SQL (https://cloud.google.com/bigquery/sql-reference/)
external_udf_uris : list, optional
Contains external UDF URIs. If given, URIs must be Google Cloud
Storage and have .js extensions.
Returns
-------
tuple
(job id, query results) if the query completed. If dry_run is True,
job id will be None and results will be empty if the query is valid
or a ``dict`` containing the response if invalid.
Raises
------
BigQueryTimeoutException
on timeout
"""
logger.debug('Executing query: %s' % query)
query_data = {
'query': query,
'timeoutMs': timeout * 1000,
'dryRun': dry_run,
'maxResults': max_results
}
if use_legacy_sql is not None:
query_data['useLegacySql'] = use_legacy_sql
if external_udf_uris:
query_data['userDefinedFunctionResources'] = \
[ {'resourceUri': u} for u in external_udf_uris ]
return self._submit_query_job(query_data)
|
[
"def",
"query",
"(",
"self",
",",
"query",
",",
"max_results",
"=",
"None",
",",
"timeout",
"=",
"0",
",",
"dry_run",
"=",
"False",
",",
"use_legacy_sql",
"=",
"None",
",",
"external_udf_uris",
"=",
"None",
")",
":",
"logger",
".",
"debug",
"(",
"'Executing query: %s'",
"%",
"query",
")",
"query_data",
"=",
"{",
"'query'",
":",
"query",
",",
"'timeoutMs'",
":",
"timeout",
"*",
"1000",
",",
"'dryRun'",
":",
"dry_run",
",",
"'maxResults'",
":",
"max_results",
"}",
"if",
"use_legacy_sql",
"is",
"not",
"None",
":",
"query_data",
"[",
"'useLegacySql'",
"]",
"=",
"use_legacy_sql",
"if",
"external_udf_uris",
":",
"query_data",
"[",
"'userDefinedFunctionResources'",
"]",
"=",
"[",
"{",
"'resourceUri'",
":",
"u",
"}",
"for",
"u",
"in",
"external_udf_uris",
"]",
"return",
"self",
".",
"_submit_query_job",
"(",
"query_data",
")"
] |
Submit a query to BigQuery.
Parameters
----------
query : str
BigQuery query string
max_results : int, optional
The maximum number of rows to return per page of results.
timeout : float, optional
How long to wait for the query to complete, in seconds before
the request times out and returns.
dry_run : bool, optional
If True, the query isn't actually run. A valid query will return an
empty response, while an invalid one will return the same error
message it would if it wasn't a dry run.
use_legacy_sql : bool, optional. Default True.
If False, the query will use BigQuery's standard SQL (https://cloud.google.com/bigquery/sql-reference/)
external_udf_uris : list, optional
Contains external UDF URIs. If given, URIs must be Google Cloud
Storage and have .js extensions.
Returns
-------
tuple
(job id, query results) if the query completed. If dry_run is True,
job id will be None and results will be empty if the query is valid
or a ``dict`` containing the response if invalid.
Raises
------
BigQueryTimeoutException
on timeout
|
[
"Submit",
"a",
"query",
"to",
"BigQuery",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L335-L387
|
7,485
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.get_query_schema
|
def get_query_schema(self, job_id):
"""Retrieve the schema of a query by job id.
Parameters
----------
job_id : str
The job_id that references a BigQuery query
Returns
-------
list
A ``list`` of ``dict`` objects that represent the schema.
"""
query_reply = self.get_query_results(job_id, offset=0, limit=0)
if not query_reply['jobComplete']:
logger.warning('BigQuery job %s not complete' % job_id)
raise UnfinishedQueryException()
return query_reply['schema']['fields']
|
python
|
def get_query_schema(self, job_id):
"""Retrieve the schema of a query by job id.
Parameters
----------
job_id : str
The job_id that references a BigQuery query
Returns
-------
list
A ``list`` of ``dict`` objects that represent the schema.
"""
query_reply = self.get_query_results(job_id, offset=0, limit=0)
if not query_reply['jobComplete']:
logger.warning('BigQuery job %s not complete' % job_id)
raise UnfinishedQueryException()
return query_reply['schema']['fields']
|
[
"def",
"get_query_schema",
"(",
"self",
",",
"job_id",
")",
":",
"query_reply",
"=",
"self",
".",
"get_query_results",
"(",
"job_id",
",",
"offset",
"=",
"0",
",",
"limit",
"=",
"0",
")",
"if",
"not",
"query_reply",
"[",
"'jobComplete'",
"]",
":",
"logger",
".",
"warning",
"(",
"'BigQuery job %s not complete'",
"%",
"job_id",
")",
"raise",
"UnfinishedQueryException",
"(",
")",
"return",
"query_reply",
"[",
"'schema'",
"]",
"[",
"'fields'",
"]"
] |
Retrieve the schema of a query by job id.
Parameters
----------
job_id : str
The job_id that references a BigQuery query
Returns
-------
list
A ``list`` of ``dict`` objects that represent the schema.
|
[
"Retrieve",
"the",
"schema",
"of",
"a",
"query",
"by",
"job",
"id",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L389-L409
|
7,486
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.get_table_schema
|
def get_table_schema(self, dataset, table, project_id=None):
"""Return the table schema.
Parameters
----------
dataset : str
The dataset containing the `table`.
table : str
The table to get the schema for
project_id: str, optional
The project of the dataset.
Returns
-------
list
A ``list`` of ``dict`` objects that represent the table schema. If
the table doesn't exist, None is returned.
"""
project_id = self._get_project_id(project_id)
try:
result = self.bigquery.tables().get(
projectId=project_id,
tableId=table,
datasetId=dataset).execute(num_retries=self.num_retries)
except HttpError as e:
if int(e.resp['status']) == 404:
logger.warn('Table %s.%s does not exist', dataset, table)
return None
raise
return result['schema']['fields']
|
python
|
def get_table_schema(self, dataset, table, project_id=None):
"""Return the table schema.
Parameters
----------
dataset : str
The dataset containing the `table`.
table : str
The table to get the schema for
project_id: str, optional
The project of the dataset.
Returns
-------
list
A ``list`` of ``dict`` objects that represent the table schema. If
the table doesn't exist, None is returned.
"""
project_id = self._get_project_id(project_id)
try:
result = self.bigquery.tables().get(
projectId=project_id,
tableId=table,
datasetId=dataset).execute(num_retries=self.num_retries)
except HttpError as e:
if int(e.resp['status']) == 404:
logger.warn('Table %s.%s does not exist', dataset, table)
return None
raise
return result['schema']['fields']
|
[
"def",
"get_table_schema",
"(",
"self",
",",
"dataset",
",",
"table",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"try",
":",
"result",
"=",
"self",
".",
"bigquery",
".",
"tables",
"(",
")",
".",
"get",
"(",
"projectId",
"=",
"project_id",
",",
"tableId",
"=",
"table",
",",
"datasetId",
"=",
"dataset",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"except",
"HttpError",
"as",
"e",
":",
"if",
"int",
"(",
"e",
".",
"resp",
"[",
"'status'",
"]",
")",
"==",
"404",
":",
"logger",
".",
"warn",
"(",
"'Table %s.%s does not exist'",
",",
"dataset",
",",
"table",
")",
"return",
"None",
"raise",
"return",
"result",
"[",
"'schema'",
"]",
"[",
"'fields'",
"]"
] |
Return the table schema.
Parameters
----------
dataset : str
The dataset containing the `table`.
table : str
The table to get the schema for
project_id: str, optional
The project of the dataset.
Returns
-------
list
A ``list`` of ``dict`` objects that represent the table schema. If
the table doesn't exist, None is returned.
|
[
"Return",
"the",
"table",
"schema",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L411-L442
|
7,487
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.check_job
|
def check_job(self, job_id):
"""Return the state and number of results of a query by job id.
Parameters
----------
job_id : str
The job id of the query to check.
Returns
-------
tuple
(``bool``, ``int``) Whether or not the query has completed and the
total number of rows included in the query table if it has
completed (else 0)
"""
query_reply = self.get_query_results(job_id, offset=0, limit=0)
return (query_reply.get('jobComplete', False),
int(query_reply.get('totalRows', 0)))
|
python
|
def check_job(self, job_id):
"""Return the state and number of results of a query by job id.
Parameters
----------
job_id : str
The job id of the query to check.
Returns
-------
tuple
(``bool``, ``int``) Whether or not the query has completed and the
total number of rows included in the query table if it has
completed (else 0)
"""
query_reply = self.get_query_results(job_id, offset=0, limit=0)
return (query_reply.get('jobComplete', False),
int(query_reply.get('totalRows', 0)))
|
[
"def",
"check_job",
"(",
"self",
",",
"job_id",
")",
":",
"query_reply",
"=",
"self",
".",
"get_query_results",
"(",
"job_id",
",",
"offset",
"=",
"0",
",",
"limit",
"=",
"0",
")",
"return",
"(",
"query_reply",
".",
"get",
"(",
"'jobComplete'",
",",
"False",
")",
",",
"int",
"(",
"query_reply",
".",
"get",
"(",
"'totalRows'",
",",
"0",
")",
")",
")"
] |
Return the state and number of results of a query by job id.
Parameters
----------
job_id : str
The job id of the query to check.
Returns
-------
tuple
(``bool``, ``int``) Whether or not the query has completed and the
total number of rows included in the query table if it has
completed (else 0)
|
[
"Return",
"the",
"state",
"and",
"number",
"of",
"results",
"of",
"a",
"query",
"by",
"job",
"id",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L444-L463
|
7,488
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.get_query_rows
|
def get_query_rows(self, job_id, offset=None, limit=None, timeout=0):
"""Retrieve a list of rows from a query table by job id.
This method will append results from multiple pages together. If you
want to manually page through results, you can use `get_query_results`
method directly.
Parameters
----------
job_id : str
The job id that references a BigQuery query.
offset : int, optional
The offset of the rows to pull from BigQuery
limit : int, optional
The number of rows to retrieve from a query table.
timeout : float, optional
Timeout in seconds.
Returns
-------
list
A ``list`` of ``dict`` objects that represent table rows.
"""
# Get query results
query_reply = self.get_query_results(job_id, offset=offset,
limit=limit, timeout=timeout)
if not query_reply['jobComplete']:
logger.warning('BigQuery job %s not complete' % job_id)
raise UnfinishedQueryException()
schema = query_reply["schema"]["fields"]
rows = query_reply.get('rows', [])
page_token = query_reply.get("pageToken")
records = [self._transform_row(row, schema) for row in rows]
# Append to records if there are multiple pages for query results
while page_token and (not limit or len(records) < limit):
query_reply = self.get_query_results(
job_id, offset=offset, limit=limit, page_token=page_token,
timeout=timeout)
page_token = query_reply.get("pageToken")
rows = query_reply.get('rows', [])
records += [self._transform_row(row, schema) for row in rows]
return records[:limit] if limit else records
|
python
|
def get_query_rows(self, job_id, offset=None, limit=None, timeout=0):
"""Retrieve a list of rows from a query table by job id.
This method will append results from multiple pages together. If you
want to manually page through results, you can use `get_query_results`
method directly.
Parameters
----------
job_id : str
The job id that references a BigQuery query.
offset : int, optional
The offset of the rows to pull from BigQuery
limit : int, optional
The number of rows to retrieve from a query table.
timeout : float, optional
Timeout in seconds.
Returns
-------
list
A ``list`` of ``dict`` objects that represent table rows.
"""
# Get query results
query_reply = self.get_query_results(job_id, offset=offset,
limit=limit, timeout=timeout)
if not query_reply['jobComplete']:
logger.warning('BigQuery job %s not complete' % job_id)
raise UnfinishedQueryException()
schema = query_reply["schema"]["fields"]
rows = query_reply.get('rows', [])
page_token = query_reply.get("pageToken")
records = [self._transform_row(row, schema) for row in rows]
# Append to records if there are multiple pages for query results
while page_token and (not limit or len(records) < limit):
query_reply = self.get_query_results(
job_id, offset=offset, limit=limit, page_token=page_token,
timeout=timeout)
page_token = query_reply.get("pageToken")
rows = query_reply.get('rows', [])
records += [self._transform_row(row, schema) for row in rows]
return records[:limit] if limit else records
|
[
"def",
"get_query_rows",
"(",
"self",
",",
"job_id",
",",
"offset",
"=",
"None",
",",
"limit",
"=",
"None",
",",
"timeout",
"=",
"0",
")",
":",
"# Get query results",
"query_reply",
"=",
"self",
".",
"get_query_results",
"(",
"job_id",
",",
"offset",
"=",
"offset",
",",
"limit",
"=",
"limit",
",",
"timeout",
"=",
"timeout",
")",
"if",
"not",
"query_reply",
"[",
"'jobComplete'",
"]",
":",
"logger",
".",
"warning",
"(",
"'BigQuery job %s not complete'",
"%",
"job_id",
")",
"raise",
"UnfinishedQueryException",
"(",
")",
"schema",
"=",
"query_reply",
"[",
"\"schema\"",
"]",
"[",
"\"fields\"",
"]",
"rows",
"=",
"query_reply",
".",
"get",
"(",
"'rows'",
",",
"[",
"]",
")",
"page_token",
"=",
"query_reply",
".",
"get",
"(",
"\"pageToken\"",
")",
"records",
"=",
"[",
"self",
".",
"_transform_row",
"(",
"row",
",",
"schema",
")",
"for",
"row",
"in",
"rows",
"]",
"# Append to records if there are multiple pages for query results",
"while",
"page_token",
"and",
"(",
"not",
"limit",
"or",
"len",
"(",
"records",
")",
"<",
"limit",
")",
":",
"query_reply",
"=",
"self",
".",
"get_query_results",
"(",
"job_id",
",",
"offset",
"=",
"offset",
",",
"limit",
"=",
"limit",
",",
"page_token",
"=",
"page_token",
",",
"timeout",
"=",
"timeout",
")",
"page_token",
"=",
"query_reply",
".",
"get",
"(",
"\"pageToken\"",
")",
"rows",
"=",
"query_reply",
".",
"get",
"(",
"'rows'",
",",
"[",
"]",
")",
"records",
"+=",
"[",
"self",
".",
"_transform_row",
"(",
"row",
",",
"schema",
")",
"for",
"row",
"in",
"rows",
"]",
"return",
"records",
"[",
":",
"limit",
"]",
"if",
"limit",
"else",
"records"
] |
Retrieve a list of rows from a query table by job id.
This method will append results from multiple pages together. If you
want to manually page through results, you can use `get_query_results`
method directly.
Parameters
----------
job_id : str
The job id that references a BigQuery query.
offset : int, optional
The offset of the rows to pull from BigQuery
limit : int, optional
The number of rows to retrieve from a query table.
timeout : float, optional
Timeout in seconds.
Returns
-------
list
A ``list`` of ``dict`` objects that represent table rows.
|
[
"Retrieve",
"a",
"list",
"of",
"rows",
"from",
"a",
"query",
"table",
"by",
"job",
"id",
".",
"This",
"method",
"will",
"append",
"results",
"from",
"multiple",
"pages",
"together",
".",
"If",
"you",
"want",
"to",
"manually",
"page",
"through",
"results",
"you",
"can",
"use",
"get_query_results",
"method",
"directly",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L465-L508
|
7,489
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.check_dataset
|
def check_dataset(self, dataset_id, project_id=None):
"""Check to see if a dataset exists.
Parameters
----------
dataset_id : str
Dataset unique id
project_id: str, optional
The project the dataset is in
Returns
-------
bool
True if dataset at `dataset_id` exists, else Fasle
"""
dataset = self.get_dataset(dataset_id, project_id)
return bool(dataset)
|
python
|
def check_dataset(self, dataset_id, project_id=None):
"""Check to see if a dataset exists.
Parameters
----------
dataset_id : str
Dataset unique id
project_id: str, optional
The project the dataset is in
Returns
-------
bool
True if dataset at `dataset_id` exists, else Fasle
"""
dataset = self.get_dataset(dataset_id, project_id)
return bool(dataset)
|
[
"def",
"check_dataset",
"(",
"self",
",",
"dataset_id",
",",
"project_id",
"=",
"None",
")",
":",
"dataset",
"=",
"self",
".",
"get_dataset",
"(",
"dataset_id",
",",
"project_id",
")",
"return",
"bool",
"(",
"dataset",
")"
] |
Check to see if a dataset exists.
Parameters
----------
dataset_id : str
Dataset unique id
project_id: str, optional
The project the dataset is in
Returns
-------
bool
True if dataset at `dataset_id` exists, else Fasle
|
[
"Check",
"to",
"see",
"if",
"a",
"dataset",
"exists",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L510-L526
|
7,490
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.get_dataset
|
def get_dataset(self, dataset_id, project_id=None):
"""Retrieve a dataset if it exists, otherwise return an empty dict.
Parameters
----------
dataset_id : str
Dataset unique id
project_id: str, optional
The project the dataset is in
Returns
-------
dict
Contains dataset object if it exists, else empty
"""
project_id = self._get_project_id(project_id)
try:
dataset = self.bigquery.datasets().get(
projectId=project_id, datasetId=dataset_id).execute(
num_retries=self.num_retries)
except HttpError:
dataset = {}
return dataset
|
python
|
def get_dataset(self, dataset_id, project_id=None):
"""Retrieve a dataset if it exists, otherwise return an empty dict.
Parameters
----------
dataset_id : str
Dataset unique id
project_id: str, optional
The project the dataset is in
Returns
-------
dict
Contains dataset object if it exists, else empty
"""
project_id = self._get_project_id(project_id)
try:
dataset = self.bigquery.datasets().get(
projectId=project_id, datasetId=dataset_id).execute(
num_retries=self.num_retries)
except HttpError:
dataset = {}
return dataset
|
[
"def",
"get_dataset",
"(",
"self",
",",
"dataset_id",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"try",
":",
"dataset",
"=",
"self",
".",
"bigquery",
".",
"datasets",
"(",
")",
".",
"get",
"(",
"projectId",
"=",
"project_id",
",",
"datasetId",
"=",
"dataset_id",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"except",
"HttpError",
":",
"dataset",
"=",
"{",
"}",
"return",
"dataset"
] |
Retrieve a dataset if it exists, otherwise return an empty dict.
Parameters
----------
dataset_id : str
Dataset unique id
project_id: str, optional
The project the dataset is in
Returns
-------
dict
Contains dataset object if it exists, else empty
|
[
"Retrieve",
"a",
"dataset",
"if",
"it",
"exists",
"otherwise",
"return",
"an",
"empty",
"dict",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L528-L552
|
7,491
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.get_table
|
def get_table(self, dataset, table, project_id=None):
""" Retrieve a table if it exists, otherwise return an empty dict.
Parameters
----------
dataset : str
The dataset that the table is in
table : str
The name of the table
project_id: str, optional
The project that the table is in
Returns
-------
dict
Containing the table object if it exists, else empty
"""
project_id = self._get_project_id(project_id)
try:
table = self.bigquery.tables().get(
projectId=project_id, datasetId=dataset,
tableId=table).execute(num_retries=self.num_retries)
except HttpError:
table = {}
return table
|
python
|
def get_table(self, dataset, table, project_id=None):
""" Retrieve a table if it exists, otherwise return an empty dict.
Parameters
----------
dataset : str
The dataset that the table is in
table : str
The name of the table
project_id: str, optional
The project that the table is in
Returns
-------
dict
Containing the table object if it exists, else empty
"""
project_id = self._get_project_id(project_id)
try:
table = self.bigquery.tables().get(
projectId=project_id, datasetId=dataset,
tableId=table).execute(num_retries=self.num_retries)
except HttpError:
table = {}
return table
|
[
"def",
"get_table",
"(",
"self",
",",
"dataset",
",",
"table",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"try",
":",
"table",
"=",
"self",
".",
"bigquery",
".",
"tables",
"(",
")",
".",
"get",
"(",
"projectId",
"=",
"project_id",
",",
"datasetId",
"=",
"dataset",
",",
"tableId",
"=",
"table",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"except",
"HttpError",
":",
"table",
"=",
"{",
"}",
"return",
"table"
] |
Retrieve a table if it exists, otherwise return an empty dict.
Parameters
----------
dataset : str
The dataset that the table is in
table : str
The name of the table
project_id: str, optional
The project that the table is in
Returns
-------
dict
Containing the table object if it exists, else empty
|
[
"Retrieve",
"a",
"table",
"if",
"it",
"exists",
"otherwise",
"return",
"an",
"empty",
"dict",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L574-L599
|
7,492
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.create_table
|
def create_table(self, dataset, table, schema,
expiration_time=None, time_partitioning=False,
project_id=None):
"""Create a new table in the dataset.
Parameters
----------
dataset : str
The dataset to create the table in
table : str
The name of the table to create
schema : dict
The table schema
expiration_time : int or double, optional
The expiry time in milliseconds since the epoch.
time_partitioning : bool, optional
Create a time partitioning.
project_id: str, optional
The project to create the table in
Returns
-------
Union[bool, dict]
If the table was successfully created, or response from BigQuery
if swallow_results is set to False
"""
project_id = self._get_project_id(project_id)
body = {
'schema': {'fields': schema},
'tableReference': {
'tableId': table,
'projectId': project_id,
'datasetId': dataset
}
}
if expiration_time is not None:
body['expirationTime'] = expiration_time
if time_partitioning:
body['timePartitioning'] = {'type': 'DAY'}
try:
table = self.bigquery.tables().insert(
projectId=project_id,
datasetId=dataset,
body=body
).execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return table
except HttpError as e:
logger.error(('Cannot create table {0}.{1}.{2}\n'
'Http Error: {3}').format(project_id, dataset, table, e.content))
if self.swallow_results:
return False
else:
return {}
|
python
|
def create_table(self, dataset, table, schema,
expiration_time=None, time_partitioning=False,
project_id=None):
"""Create a new table in the dataset.
Parameters
----------
dataset : str
The dataset to create the table in
table : str
The name of the table to create
schema : dict
The table schema
expiration_time : int or double, optional
The expiry time in milliseconds since the epoch.
time_partitioning : bool, optional
Create a time partitioning.
project_id: str, optional
The project to create the table in
Returns
-------
Union[bool, dict]
If the table was successfully created, or response from BigQuery
if swallow_results is set to False
"""
project_id = self._get_project_id(project_id)
body = {
'schema': {'fields': schema},
'tableReference': {
'tableId': table,
'projectId': project_id,
'datasetId': dataset
}
}
if expiration_time is not None:
body['expirationTime'] = expiration_time
if time_partitioning:
body['timePartitioning'] = {'type': 'DAY'}
try:
table = self.bigquery.tables().insert(
projectId=project_id,
datasetId=dataset,
body=body
).execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return table
except HttpError as e:
logger.error(('Cannot create table {0}.{1}.{2}\n'
'Http Error: {3}').format(project_id, dataset, table, e.content))
if self.swallow_results:
return False
else:
return {}
|
[
"def",
"create_table",
"(",
"self",
",",
"dataset",
",",
"table",
",",
"schema",
",",
"expiration_time",
"=",
"None",
",",
"time_partitioning",
"=",
"False",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"body",
"=",
"{",
"'schema'",
":",
"{",
"'fields'",
":",
"schema",
"}",
",",
"'tableReference'",
":",
"{",
"'tableId'",
":",
"table",
",",
"'projectId'",
":",
"project_id",
",",
"'datasetId'",
":",
"dataset",
"}",
"}",
"if",
"expiration_time",
"is",
"not",
"None",
":",
"body",
"[",
"'expirationTime'",
"]",
"=",
"expiration_time",
"if",
"time_partitioning",
":",
"body",
"[",
"'timePartitioning'",
"]",
"=",
"{",
"'type'",
":",
"'DAY'",
"}",
"try",
":",
"table",
"=",
"self",
".",
"bigquery",
".",
"tables",
"(",
")",
".",
"insert",
"(",
"projectId",
"=",
"project_id",
",",
"datasetId",
"=",
"dataset",
",",
"body",
"=",
"body",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"True",
"else",
":",
"return",
"table",
"except",
"HttpError",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"(",
"'Cannot create table {0}.{1}.{2}\\n'",
"'Http Error: {3}'",
")",
".",
"format",
"(",
"project_id",
",",
"dataset",
",",
"table",
",",
"e",
".",
"content",
")",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"False",
"else",
":",
"return",
"{",
"}"
] |
Create a new table in the dataset.
Parameters
----------
dataset : str
The dataset to create the table in
table : str
The name of the table to create
schema : dict
The table schema
expiration_time : int or double, optional
The expiry time in milliseconds since the epoch.
time_partitioning : bool, optional
Create a time partitioning.
project_id: str, optional
The project to create the table in
Returns
-------
Union[bool, dict]
If the table was successfully created, or response from BigQuery
if swallow_results is set to False
|
[
"Create",
"a",
"new",
"table",
"in",
"the",
"dataset",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L601-L661
|
7,493
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.patch_table
|
def patch_table(self, dataset, table, schema, project_id=None):
"""Patch an existing table in the dataset.
Parameters
----------
dataset : str
The dataset to patch the table in
table : str
The name of the table to patch
schema : dict
The table schema
project_id: str, optional
The project to patch the table in
Returns
-------
Union[bool, dict]
Bool indicating if the table was successfully patched or not,
or response from BigQuery if swallow_results is set to False
"""
project_id = self._get_project_id(project_id)
body = {
'schema': {'fields': schema},
'tableReference': {
'tableId': table,
'projectId': project_id,
'datasetId': dataset
}
}
try:
result = self.bigquery.tables().patch(
projectId=project_id,
datasetId=dataset,
body=body
).execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return result
except HttpError as e:
logger.error(('Cannot patch table {0}.{1}.{2}\n'
'Http Error: {3}').format(project_id, dataset, table, e.content))
if self.swallow_results:
return False
else:
return {}
|
python
|
def patch_table(self, dataset, table, schema, project_id=None):
"""Patch an existing table in the dataset.
Parameters
----------
dataset : str
The dataset to patch the table in
table : str
The name of the table to patch
schema : dict
The table schema
project_id: str, optional
The project to patch the table in
Returns
-------
Union[bool, dict]
Bool indicating if the table was successfully patched or not,
or response from BigQuery if swallow_results is set to False
"""
project_id = self._get_project_id(project_id)
body = {
'schema': {'fields': schema},
'tableReference': {
'tableId': table,
'projectId': project_id,
'datasetId': dataset
}
}
try:
result = self.bigquery.tables().patch(
projectId=project_id,
datasetId=dataset,
body=body
).execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return result
except HttpError as e:
logger.error(('Cannot patch table {0}.{1}.{2}\n'
'Http Error: {3}').format(project_id, dataset, table, e.content))
if self.swallow_results:
return False
else:
return {}
|
[
"def",
"patch_table",
"(",
"self",
",",
"dataset",
",",
"table",
",",
"schema",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"body",
"=",
"{",
"'schema'",
":",
"{",
"'fields'",
":",
"schema",
"}",
",",
"'tableReference'",
":",
"{",
"'tableId'",
":",
"table",
",",
"'projectId'",
":",
"project_id",
",",
"'datasetId'",
":",
"dataset",
"}",
"}",
"try",
":",
"result",
"=",
"self",
".",
"bigquery",
".",
"tables",
"(",
")",
".",
"patch",
"(",
"projectId",
"=",
"project_id",
",",
"datasetId",
"=",
"dataset",
",",
"body",
"=",
"body",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"True",
"else",
":",
"return",
"result",
"except",
"HttpError",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"(",
"'Cannot patch table {0}.{1}.{2}\\n'",
"'Http Error: {3}'",
")",
".",
"format",
"(",
"project_id",
",",
"dataset",
",",
"table",
",",
"e",
".",
"content",
")",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"False",
"else",
":",
"return",
"{",
"}"
] |
Patch an existing table in the dataset.
Parameters
----------
dataset : str
The dataset to patch the table in
table : str
The name of the table to patch
schema : dict
The table schema
project_id: str, optional
The project to patch the table in
Returns
-------
Union[bool, dict]
Bool indicating if the table was successfully patched or not,
or response from BigQuery if swallow_results is set to False
|
[
"Patch",
"an",
"existing",
"table",
"in",
"the",
"dataset",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L714-L762
|
7,494
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.create_view
|
def create_view(self, dataset, view, query, use_legacy_sql=None, project_id=None):
"""Create a new view in the dataset.
Parameters
----------
dataset : str
The dataset to create the view in
view : str
The name of the view to create
query : dict
A query that BigQuery executes when the view is referenced.
use_legacy_sql : bool, optional
If False, the query will use BigQuery's standard SQL
(https://cloud.google.com/bigquery/sql-reference/)
project_id: str, optional
The project to create the view in
Returns
-------
Union[bool, dict]
bool indicating if the view was successfully created or not,
or response from BigQuery if swallow_results is set to False.
"""
project_id = self._get_project_id(project_id)
body = {
'tableReference': {
'tableId': view,
'projectId': project_id,
'datasetId': dataset
},
'view': {
'query': query
}
}
if use_legacy_sql is not None:
body['view']['useLegacySql'] = use_legacy_sql
try:
view = self.bigquery.tables().insert(
projectId=project_id,
datasetId=dataset,
body=body
).execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return view
except HttpError as e:
logger.error(('Cannot create view {0}.{1}\n'
'Http Error: {2}').format(dataset, view, e.content))
if self.swallow_results:
return False
else:
return {}
|
python
|
def create_view(self, dataset, view, query, use_legacy_sql=None, project_id=None):
"""Create a new view in the dataset.
Parameters
----------
dataset : str
The dataset to create the view in
view : str
The name of the view to create
query : dict
A query that BigQuery executes when the view is referenced.
use_legacy_sql : bool, optional
If False, the query will use BigQuery's standard SQL
(https://cloud.google.com/bigquery/sql-reference/)
project_id: str, optional
The project to create the view in
Returns
-------
Union[bool, dict]
bool indicating if the view was successfully created or not,
or response from BigQuery if swallow_results is set to False.
"""
project_id = self._get_project_id(project_id)
body = {
'tableReference': {
'tableId': view,
'projectId': project_id,
'datasetId': dataset
},
'view': {
'query': query
}
}
if use_legacy_sql is not None:
body['view']['useLegacySql'] = use_legacy_sql
try:
view = self.bigquery.tables().insert(
projectId=project_id,
datasetId=dataset,
body=body
).execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return view
except HttpError as e:
logger.error(('Cannot create view {0}.{1}\n'
'Http Error: {2}').format(dataset, view, e.content))
if self.swallow_results:
return False
else:
return {}
|
[
"def",
"create_view",
"(",
"self",
",",
"dataset",
",",
"view",
",",
"query",
",",
"use_legacy_sql",
"=",
"None",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"body",
"=",
"{",
"'tableReference'",
":",
"{",
"'tableId'",
":",
"view",
",",
"'projectId'",
":",
"project_id",
",",
"'datasetId'",
":",
"dataset",
"}",
",",
"'view'",
":",
"{",
"'query'",
":",
"query",
"}",
"}",
"if",
"use_legacy_sql",
"is",
"not",
"None",
":",
"body",
"[",
"'view'",
"]",
"[",
"'useLegacySql'",
"]",
"=",
"use_legacy_sql",
"try",
":",
"view",
"=",
"self",
".",
"bigquery",
".",
"tables",
"(",
")",
".",
"insert",
"(",
"projectId",
"=",
"project_id",
",",
"datasetId",
"=",
"dataset",
",",
"body",
"=",
"body",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"True",
"else",
":",
"return",
"view",
"except",
"HttpError",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"(",
"'Cannot create view {0}.{1}\\n'",
"'Http Error: {2}'",
")",
".",
"format",
"(",
"dataset",
",",
"view",
",",
"e",
".",
"content",
")",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"False",
"else",
":",
"return",
"{",
"}"
] |
Create a new view in the dataset.
Parameters
----------
dataset : str
The dataset to create the view in
view : str
The name of the view to create
query : dict
A query that BigQuery executes when the view is referenced.
use_legacy_sql : bool, optional
If False, the query will use BigQuery's standard SQL
(https://cloud.google.com/bigquery/sql-reference/)
project_id: str, optional
The project to create the view in
Returns
-------
Union[bool, dict]
bool indicating if the view was successfully created or not,
or response from BigQuery if swallow_results is set to False.
|
[
"Create",
"a",
"new",
"view",
"in",
"the",
"dataset",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L764-L820
|
7,495
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.delete_table
|
def delete_table(self, dataset, table, project_id=None):
"""Delete a table from the dataset.
Parameters
----------
dataset : str
The dataset to delete the table from.
table : str
The name of the table to delete
project_id: str, optional
String id of the project
Returns
-------
Union[bool, dict]
bool indicating if the table was successfully deleted or not,
or response from BigQuery if swallow_results is set for False.
"""
project_id = self._get_project_id(project_id)
try:
response = self.bigquery.tables().delete(
projectId=project_id,
datasetId=dataset,
tableId=table
).execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return response
except HttpError as e:
logger.error(('Cannot delete table {0}.{1}\n'
'Http Error: {2}').format(dataset, table, e.content))
if self.swallow_results:
return False
else:
return {}
|
python
|
def delete_table(self, dataset, table, project_id=None):
"""Delete a table from the dataset.
Parameters
----------
dataset : str
The dataset to delete the table from.
table : str
The name of the table to delete
project_id: str, optional
String id of the project
Returns
-------
Union[bool, dict]
bool indicating if the table was successfully deleted or not,
or response from BigQuery if swallow_results is set for False.
"""
project_id = self._get_project_id(project_id)
try:
response = self.bigquery.tables().delete(
projectId=project_id,
datasetId=dataset,
tableId=table
).execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return response
except HttpError as e:
logger.error(('Cannot delete table {0}.{1}\n'
'Http Error: {2}').format(dataset, table, e.content))
if self.swallow_results:
return False
else:
return {}
|
[
"def",
"delete_table",
"(",
"self",
",",
"dataset",
",",
"table",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"try",
":",
"response",
"=",
"self",
".",
"bigquery",
".",
"tables",
"(",
")",
".",
"delete",
"(",
"projectId",
"=",
"project_id",
",",
"datasetId",
"=",
"dataset",
",",
"tableId",
"=",
"table",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"True",
"else",
":",
"return",
"response",
"except",
"HttpError",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"(",
"'Cannot delete table {0}.{1}\\n'",
"'Http Error: {2}'",
")",
".",
"format",
"(",
"dataset",
",",
"table",
",",
"e",
".",
"content",
")",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"False",
"else",
":",
"return",
"{",
"}"
] |
Delete a table from the dataset.
Parameters
----------
dataset : str
The dataset to delete the table from.
table : str
The name of the table to delete
project_id: str, optional
String id of the project
Returns
-------
Union[bool, dict]
bool indicating if the table was successfully deleted or not,
or response from BigQuery if swallow_results is set for False.
|
[
"Delete",
"a",
"table",
"from",
"the",
"dataset",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L822-L859
|
7,496
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.get_tables
|
def get_tables(self, dataset_id, app_id, start_time, end_time, project_id=None):
"""Retrieve a list of tables that are related to the given app id
and are inside the range of start and end times.
Parameters
----------
dataset_id : str
The BigQuery dataset id to consider.
app_id : str
The appspot name
start_time : Union[datetime, int]
The datetime or unix time after which records will be fetched.
end_time : Union[datetime, int]
The datetime or unix time up to which records will be fetched.
project_id: str, optional
String id of the project
Returns
-------
list
A ``list`` of table names.
"""
if isinstance(start_time, datetime):
start_time = calendar.timegm(start_time.utctimetuple())
if isinstance(end_time, datetime):
end_time = calendar.timegm(end_time.utctimetuple())
every_table = self._get_all_tables(dataset_id, project_id)
app_tables = every_table.get(app_id, {})
return self._filter_tables_by_time(app_tables, start_time, end_time)
|
python
|
def get_tables(self, dataset_id, app_id, start_time, end_time, project_id=None):
"""Retrieve a list of tables that are related to the given app id
and are inside the range of start and end times.
Parameters
----------
dataset_id : str
The BigQuery dataset id to consider.
app_id : str
The appspot name
start_time : Union[datetime, int]
The datetime or unix time after which records will be fetched.
end_time : Union[datetime, int]
The datetime or unix time up to which records will be fetched.
project_id: str, optional
String id of the project
Returns
-------
list
A ``list`` of table names.
"""
if isinstance(start_time, datetime):
start_time = calendar.timegm(start_time.utctimetuple())
if isinstance(end_time, datetime):
end_time = calendar.timegm(end_time.utctimetuple())
every_table = self._get_all_tables(dataset_id, project_id)
app_tables = every_table.get(app_id, {})
return self._filter_tables_by_time(app_tables, start_time, end_time)
|
[
"def",
"get_tables",
"(",
"self",
",",
"dataset_id",
",",
"app_id",
",",
"start_time",
",",
"end_time",
",",
"project_id",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"start_time",
",",
"datetime",
")",
":",
"start_time",
"=",
"calendar",
".",
"timegm",
"(",
"start_time",
".",
"utctimetuple",
"(",
")",
")",
"if",
"isinstance",
"(",
"end_time",
",",
"datetime",
")",
":",
"end_time",
"=",
"calendar",
".",
"timegm",
"(",
"end_time",
".",
"utctimetuple",
"(",
")",
")",
"every_table",
"=",
"self",
".",
"_get_all_tables",
"(",
"dataset_id",
",",
"project_id",
")",
"app_tables",
"=",
"every_table",
".",
"get",
"(",
"app_id",
",",
"{",
"}",
")",
"return",
"self",
".",
"_filter_tables_by_time",
"(",
"app_tables",
",",
"start_time",
",",
"end_time",
")"
] |
Retrieve a list of tables that are related to the given app id
and are inside the range of start and end times.
Parameters
----------
dataset_id : str
The BigQuery dataset id to consider.
app_id : str
The appspot name
start_time : Union[datetime, int]
The datetime or unix time after which records will be fetched.
end_time : Union[datetime, int]
The datetime or unix time up to which records will be fetched.
project_id: str, optional
String id of the project
Returns
-------
list
A ``list`` of table names.
|
[
"Retrieve",
"a",
"list",
"of",
"tables",
"that",
"are",
"related",
"to",
"the",
"given",
"app",
"id",
"and",
"are",
"inside",
"the",
"range",
"of",
"start",
"and",
"end",
"times",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L861-L893
|
7,497
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.wait_for_job
|
def wait_for_job(self, job, interval=5, timeout=60):
"""
Waits until the job indicated by job_resource is done or has failed
Parameters
----------
job : Union[dict, str]
``dict`` representing a BigQuery job resource, or a ``str``
representing the BigQuery job id
interval : float, optional
Polling interval in seconds, default = 5
timeout : float, optional
Timeout in seconds, default = 60
Returns
-------
dict
Final state of the job resouce, as described here:
https://developers.google.com/resources/api-libraries/documentation/bigquery/v2/python/latest/bigquery_v2.jobs.html#get
Raises
------
Union[JobExecutingException, BigQueryTimeoutException]
On http/auth failures or timeout
"""
complete = False
job_id = str(job if isinstance(job,
(six.binary_type, six.text_type, int))
else job['jobReference']['jobId'])
job_resource = None
start_time = time()
elapsed_time = 0
while not (complete or elapsed_time > timeout):
sleep(interval)
request = self.bigquery.jobs().get(projectId=self.project_id,
jobId=job_id)
job_resource = request.execute(num_retries=self.num_retries)
self._raise_executing_exception_if_error(job_resource)
complete = job_resource.get('status').get('state') == u'DONE'
elapsed_time = time() - start_time
# raise exceptions if timeout
if not complete:
logger.error('BigQuery job %s timeout' % job_id)
raise BigQueryTimeoutException()
return job_resource
|
python
|
def wait_for_job(self, job, interval=5, timeout=60):
"""
Waits until the job indicated by job_resource is done or has failed
Parameters
----------
job : Union[dict, str]
``dict`` representing a BigQuery job resource, or a ``str``
representing the BigQuery job id
interval : float, optional
Polling interval in seconds, default = 5
timeout : float, optional
Timeout in seconds, default = 60
Returns
-------
dict
Final state of the job resouce, as described here:
https://developers.google.com/resources/api-libraries/documentation/bigquery/v2/python/latest/bigquery_v2.jobs.html#get
Raises
------
Union[JobExecutingException, BigQueryTimeoutException]
On http/auth failures or timeout
"""
complete = False
job_id = str(job if isinstance(job,
(six.binary_type, six.text_type, int))
else job['jobReference']['jobId'])
job_resource = None
start_time = time()
elapsed_time = 0
while not (complete or elapsed_time > timeout):
sleep(interval)
request = self.bigquery.jobs().get(projectId=self.project_id,
jobId=job_id)
job_resource = request.execute(num_retries=self.num_retries)
self._raise_executing_exception_if_error(job_resource)
complete = job_resource.get('status').get('state') == u'DONE'
elapsed_time = time() - start_time
# raise exceptions if timeout
if not complete:
logger.error('BigQuery job %s timeout' % job_id)
raise BigQueryTimeoutException()
return job_resource
|
[
"def",
"wait_for_job",
"(",
"self",
",",
"job",
",",
"interval",
"=",
"5",
",",
"timeout",
"=",
"60",
")",
":",
"complete",
"=",
"False",
"job_id",
"=",
"str",
"(",
"job",
"if",
"isinstance",
"(",
"job",
",",
"(",
"six",
".",
"binary_type",
",",
"six",
".",
"text_type",
",",
"int",
")",
")",
"else",
"job",
"[",
"'jobReference'",
"]",
"[",
"'jobId'",
"]",
")",
"job_resource",
"=",
"None",
"start_time",
"=",
"time",
"(",
")",
"elapsed_time",
"=",
"0",
"while",
"not",
"(",
"complete",
"or",
"elapsed_time",
">",
"timeout",
")",
":",
"sleep",
"(",
"interval",
")",
"request",
"=",
"self",
".",
"bigquery",
".",
"jobs",
"(",
")",
".",
"get",
"(",
"projectId",
"=",
"self",
".",
"project_id",
",",
"jobId",
"=",
"job_id",
")",
"job_resource",
"=",
"request",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"self",
".",
"_raise_executing_exception_if_error",
"(",
"job_resource",
")",
"complete",
"=",
"job_resource",
".",
"get",
"(",
"'status'",
")",
".",
"get",
"(",
"'state'",
")",
"==",
"u'DONE'",
"elapsed_time",
"=",
"time",
"(",
")",
"-",
"start_time",
"# raise exceptions if timeout",
"if",
"not",
"complete",
":",
"logger",
".",
"error",
"(",
"'BigQuery job %s timeout'",
"%",
"job_id",
")",
"raise",
"BigQueryTimeoutException",
"(",
")",
"return",
"job_resource"
] |
Waits until the job indicated by job_resource is done or has failed
Parameters
----------
job : Union[dict, str]
``dict`` representing a BigQuery job resource, or a ``str``
representing the BigQuery job id
interval : float, optional
Polling interval in seconds, default = 5
timeout : float, optional
Timeout in seconds, default = 60
Returns
-------
dict
Final state of the job resouce, as described here:
https://developers.google.com/resources/api-libraries/documentation/bigquery/v2/python/latest/bigquery_v2.jobs.html#get
Raises
------
Union[JobExecutingException, BigQueryTimeoutException]
On http/auth failures or timeout
|
[
"Waits",
"until",
"the",
"job",
"indicated",
"by",
"job_resource",
"is",
"done",
"or",
"has",
"failed"
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L1274-L1321
|
7,498
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.push_rows
|
def push_rows(self, dataset, table, rows, insert_id_key=None,
skip_invalid_rows=None, ignore_unknown_values=None,
template_suffix=None, project_id=None):
"""Upload rows to BigQuery table.
Parameters
----------
dataset : str
The dataset to upload to
table : str
The name of the table to insert rows into
rows : list
A ``list`` of rows (``dict`` objects) to add to the table
insert_id_key : str, optional
Key for insertId in row.
You can use dot separated key for nested column.
skip_invalid_rows : bool, optional
Insert all valid rows of a request, even if invalid rows exist.
ignore_unknown_values : bool, optional
Accept rows that contain values that do not match the schema.
template_suffix : str, optional
Inserts the rows into an {table}{template_suffix}.
If table {table}{template_suffix} doesn't exist, create from {table}.
project_id: str, optional
The project to upload to
Returns
-------
Union[bool, dict]
bool indicating if insert succeeded or not, or response
from BigQuery if swallow_results is set for False.
"""
project_id = self._get_project_id(project_id)
table_data = self.bigquery.tabledata()
rows_data = []
for row in rows:
each_row = {}
each_row["json"] = row
if insert_id_key is not None:
keys = insert_id_key.split('.')
val = reduce(lambda d, key: d.get(key) if d else None, keys, row)
if val is not None:
each_row["insertId"] = val
rows_data.append(each_row)
data = {
"kind": "bigquery#tableDataInsertAllRequest",
"rows": rows_data
}
if skip_invalid_rows is not None:
data['skipInvalidRows'] = skip_invalid_rows
if ignore_unknown_values is not None:
data['ignoreUnknownValues'] = ignore_unknown_values
if template_suffix is not None:
data['templateSuffix'] = template_suffix
try:
response = table_data.insertAll(
projectId=project_id,
datasetId=dataset,
tableId=table,
body=data
).execute(num_retries=self.num_retries)
if response.get('insertErrors'):
logger.error('BigQuery insert errors: %s' % response)
if self.swallow_results:
return False
else:
return response
if self.swallow_results:
return True
else:
return response
except HttpError as e:
logger.exception('Problem with BigQuery insertAll')
if self.swallow_results:
return False
else:
return {
'insertErrors': [{
'errors': [{
'reason': 'httperror',
'message': e
}]
}]
}
|
python
|
def push_rows(self, dataset, table, rows, insert_id_key=None,
skip_invalid_rows=None, ignore_unknown_values=None,
template_suffix=None, project_id=None):
"""Upload rows to BigQuery table.
Parameters
----------
dataset : str
The dataset to upload to
table : str
The name of the table to insert rows into
rows : list
A ``list`` of rows (``dict`` objects) to add to the table
insert_id_key : str, optional
Key for insertId in row.
You can use dot separated key for nested column.
skip_invalid_rows : bool, optional
Insert all valid rows of a request, even if invalid rows exist.
ignore_unknown_values : bool, optional
Accept rows that contain values that do not match the schema.
template_suffix : str, optional
Inserts the rows into an {table}{template_suffix}.
If table {table}{template_suffix} doesn't exist, create from {table}.
project_id: str, optional
The project to upload to
Returns
-------
Union[bool, dict]
bool indicating if insert succeeded or not, or response
from BigQuery if swallow_results is set for False.
"""
project_id = self._get_project_id(project_id)
table_data = self.bigquery.tabledata()
rows_data = []
for row in rows:
each_row = {}
each_row["json"] = row
if insert_id_key is not None:
keys = insert_id_key.split('.')
val = reduce(lambda d, key: d.get(key) if d else None, keys, row)
if val is not None:
each_row["insertId"] = val
rows_data.append(each_row)
data = {
"kind": "bigquery#tableDataInsertAllRequest",
"rows": rows_data
}
if skip_invalid_rows is not None:
data['skipInvalidRows'] = skip_invalid_rows
if ignore_unknown_values is not None:
data['ignoreUnknownValues'] = ignore_unknown_values
if template_suffix is not None:
data['templateSuffix'] = template_suffix
try:
response = table_data.insertAll(
projectId=project_id,
datasetId=dataset,
tableId=table,
body=data
).execute(num_retries=self.num_retries)
if response.get('insertErrors'):
logger.error('BigQuery insert errors: %s' % response)
if self.swallow_results:
return False
else:
return response
if self.swallow_results:
return True
else:
return response
except HttpError as e:
logger.exception('Problem with BigQuery insertAll')
if self.swallow_results:
return False
else:
return {
'insertErrors': [{
'errors': [{
'reason': 'httperror',
'message': e
}]
}]
}
|
[
"def",
"push_rows",
"(",
"self",
",",
"dataset",
",",
"table",
",",
"rows",
",",
"insert_id_key",
"=",
"None",
",",
"skip_invalid_rows",
"=",
"None",
",",
"ignore_unknown_values",
"=",
"None",
",",
"template_suffix",
"=",
"None",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"table_data",
"=",
"self",
".",
"bigquery",
".",
"tabledata",
"(",
")",
"rows_data",
"=",
"[",
"]",
"for",
"row",
"in",
"rows",
":",
"each_row",
"=",
"{",
"}",
"each_row",
"[",
"\"json\"",
"]",
"=",
"row",
"if",
"insert_id_key",
"is",
"not",
"None",
":",
"keys",
"=",
"insert_id_key",
".",
"split",
"(",
"'.'",
")",
"val",
"=",
"reduce",
"(",
"lambda",
"d",
",",
"key",
":",
"d",
".",
"get",
"(",
"key",
")",
"if",
"d",
"else",
"None",
",",
"keys",
",",
"row",
")",
"if",
"val",
"is",
"not",
"None",
":",
"each_row",
"[",
"\"insertId\"",
"]",
"=",
"val",
"rows_data",
".",
"append",
"(",
"each_row",
")",
"data",
"=",
"{",
"\"kind\"",
":",
"\"bigquery#tableDataInsertAllRequest\"",
",",
"\"rows\"",
":",
"rows_data",
"}",
"if",
"skip_invalid_rows",
"is",
"not",
"None",
":",
"data",
"[",
"'skipInvalidRows'",
"]",
"=",
"skip_invalid_rows",
"if",
"ignore_unknown_values",
"is",
"not",
"None",
":",
"data",
"[",
"'ignoreUnknownValues'",
"]",
"=",
"ignore_unknown_values",
"if",
"template_suffix",
"is",
"not",
"None",
":",
"data",
"[",
"'templateSuffix'",
"]",
"=",
"template_suffix",
"try",
":",
"response",
"=",
"table_data",
".",
"insertAll",
"(",
"projectId",
"=",
"project_id",
",",
"datasetId",
"=",
"dataset",
",",
"tableId",
"=",
"table",
",",
"body",
"=",
"data",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"if",
"response",
".",
"get",
"(",
"'insertErrors'",
")",
":",
"logger",
".",
"error",
"(",
"'BigQuery insert errors: %s'",
"%",
"response",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"False",
"else",
":",
"return",
"response",
"if",
"self",
".",
"swallow_results",
":",
"return",
"True",
"else",
":",
"return",
"response",
"except",
"HttpError",
"as",
"e",
":",
"logger",
".",
"exception",
"(",
"'Problem with BigQuery insertAll'",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"False",
"else",
":",
"return",
"{",
"'insertErrors'",
":",
"[",
"{",
"'errors'",
":",
"[",
"{",
"'reason'",
":",
"'httperror'",
",",
"'message'",
":",
"e",
"}",
"]",
"}",
"]",
"}"
] |
Upload rows to BigQuery table.
Parameters
----------
dataset : str
The dataset to upload to
table : str
The name of the table to insert rows into
rows : list
A ``list`` of rows (``dict`` objects) to add to the table
insert_id_key : str, optional
Key for insertId in row.
You can use dot separated key for nested column.
skip_invalid_rows : bool, optional
Insert all valid rows of a request, even if invalid rows exist.
ignore_unknown_values : bool, optional
Accept rows that contain values that do not match the schema.
template_suffix : str, optional
Inserts the rows into an {table}{template_suffix}.
If table {table}{template_suffix} doesn't exist, create from {table}.
project_id: str, optional
The project to upload to
Returns
-------
Union[bool, dict]
bool indicating if insert succeeded or not, or response
from BigQuery if swallow_results is set for False.
|
[
"Upload",
"rows",
"to",
"BigQuery",
"table",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L1323-L1415
|
7,499
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.get_all_tables
|
def get_all_tables(self, dataset_id, project_id=None):
"""Retrieve a list of tables for the dataset.
Parameters
----------
dataset_id : str
The dataset to retrieve table data for.
project_id: str
Unique ``str`` identifying the BigQuery project contains the dataset
Returns
-------
A ``list`` with all table names
"""
tables_data = self._get_all_tables_for_dataset(dataset_id, project_id)
tables = []
for table in tables_data.get('tables', []):
table_name = table.get('tableReference', {}).get('tableId')
if table_name:
tables.append(table_name)
return tables
|
python
|
def get_all_tables(self, dataset_id, project_id=None):
"""Retrieve a list of tables for the dataset.
Parameters
----------
dataset_id : str
The dataset to retrieve table data for.
project_id: str
Unique ``str`` identifying the BigQuery project contains the dataset
Returns
-------
A ``list`` with all table names
"""
tables_data = self._get_all_tables_for_dataset(dataset_id, project_id)
tables = []
for table in tables_data.get('tables', []):
table_name = table.get('tableReference', {}).get('tableId')
if table_name:
tables.append(table_name)
return tables
|
[
"def",
"get_all_tables",
"(",
"self",
",",
"dataset_id",
",",
"project_id",
"=",
"None",
")",
":",
"tables_data",
"=",
"self",
".",
"_get_all_tables_for_dataset",
"(",
"dataset_id",
",",
"project_id",
")",
"tables",
"=",
"[",
"]",
"for",
"table",
"in",
"tables_data",
".",
"get",
"(",
"'tables'",
",",
"[",
"]",
")",
":",
"table_name",
"=",
"table",
".",
"get",
"(",
"'tableReference'",
",",
"{",
"}",
")",
".",
"get",
"(",
"'tableId'",
")",
"if",
"table_name",
":",
"tables",
".",
"append",
"(",
"table_name",
")",
"return",
"tables"
] |
Retrieve a list of tables for the dataset.
Parameters
----------
dataset_id : str
The dataset to retrieve table data for.
project_id: str
Unique ``str`` identifying the BigQuery project contains the dataset
Returns
-------
A ``list`` with all table names
|
[
"Retrieve",
"a",
"list",
"of",
"tables",
"for",
"the",
"dataset",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L1417-L1438
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.