body stringlengths 26 98.2k | body_hash int64 -9,222,864,604,528,158,000 9,221,803,474B | docstring stringlengths 1 16.8k | path stringlengths 5 230 | name stringlengths 1 96 | repository_name stringlengths 7 89 | lang stringclasses 1
value | body_without_docstring stringlengths 20 98.2k |
|---|---|---|---|---|---|---|---|
def map_items(func, v):
'A helper to apply `func` to all elements (keys and values) within dict\n\n No type checking of values passed to func is done, so `func`\n should be resilient to values which it should not handle\n\n Initial usecase - apply_recursive(url_fragment, ensure_unicode)\n '
return v... | -4,214,522,138,324,589,600 | A helper to apply `func` to all elements (keys and values) within dict
No type checking of values passed to func is done, so `func`
should be resilient to values which it should not handle
Initial usecase - apply_recursive(url_fragment, ensure_unicode) | datalad/utils.py | map_items | AKSoo/datalad | python | def map_items(func, v):
'A helper to apply `func` to all elements (keys and values) within dict\n\n No type checking of values passed to func is done, so `func`\n should be resilient to values which it should not handle\n\n Initial usecase - apply_recursive(url_fragment, ensure_unicode)\n '
return v... |
def partition(items, predicate=bool):
"Partition `items` by `predicate`.\n\n Parameters\n ----------\n items : iterable\n predicate : callable\n A function that will be mapped over each element in `items`. The\n elements will partitioned based on whether the return value is false or\n ... | 5,726,742,929,948,577,000 | Partition `items` by `predicate`.
Parameters
----------
items : iterable
predicate : callable
A function that will be mapped over each element in `items`. The
elements will partitioned based on whether the return value is false or
true.
Returns
-------
A tuple with two generators, the first for 'false' it... | datalad/utils.py | partition | AKSoo/datalad | python | def partition(items, predicate=bool):
"Partition `items` by `predicate`.\n\n Parameters\n ----------\n items : iterable\n predicate : callable\n A function that will be mapped over each element in `items`. The\n elements will partitioned based on whether the return value is false or\n ... |
def generate_chunks(container, size):
'Given a container, generate chunks from it with size up to `size`\n '
assert (size > 0), 'Size should be non-0 positive'
while container:
(yield container[:size])
container = container[size:] | 4,757,401,932,832,806,000 | Given a container, generate chunks from it with size up to `size` | datalad/utils.py | generate_chunks | AKSoo/datalad | python | def generate_chunks(container, size):
'\n '
assert (size > 0), 'Size should be non-0 positive'
while container:
(yield container[:size])
container = container[size:] |
def generate_file_chunks(files, cmd=None):
'Given a list of files, generate chunks of them to avoid exceeding cmdline length\n\n Parameters\n ----------\n files: list of str\n cmd: str or list of str, optional\n Command to account for as well\n '
files = ensure_list(files)
cmd = ensure_l... | -910,834,174,419,088,900 | Given a list of files, generate chunks of them to avoid exceeding cmdline length
Parameters
----------
files: list of str
cmd: str or list of str, optional
Command to account for as well | datalad/utils.py | generate_file_chunks | AKSoo/datalad | python | def generate_file_chunks(files, cmd=None):
'Given a list of files, generate chunks of them to avoid exceeding cmdline length\n\n Parameters\n ----------\n files: list of str\n cmd: str or list of str, optional\n Command to account for as well\n '
files = ensure_list(files)
cmd = ensure_l... |
def saved_generator(gen):
'Given a generator returns two generators, where 2nd one just replays\n\n So the first one would be going through the generated items and 2nd one\n would be yielding saved items\n '
saved = []
def gen1():
for x in gen:
saved.append(x)
(yiel... | 6,482,172,339,794,419,000 | Given a generator returns two generators, where 2nd one just replays
So the first one would be going through the generated items and 2nd one
would be yielding saved items | datalad/utils.py | saved_generator | AKSoo/datalad | python | def saved_generator(gen):
'Given a generator returns two generators, where 2nd one just replays\n\n So the first one would be going through the generated items and 2nd one\n would be yielding saved items\n '
saved = []
def gen1():
for x in gen:
saved.append(x)
(yiel... |
def optional_args(decorator):
'allows a decorator to take optional positional and keyword arguments.\n Assumes that taking a single, callable, positional argument means that\n it is decorating a function, i.e. something like this::\n\n @my_decorator\n def function(): pass\n\n ... | 4,930,945,743,788,969,000 | allows a decorator to take optional positional and keyword arguments.
Assumes that taking a single, callable, positional argument means that
it is decorating a function, i.e. something like this::
@my_decorator
def function(): pass
Calls decorator with decorator(f, `*args`, `**kwargs`) | datalad/utils.py | optional_args | AKSoo/datalad | python | def optional_args(decorator):
'allows a decorator to take optional positional and keyword arguments.\n Assumes that taking a single, callable, positional argument means that\n it is decorating a function, i.e. something like this::\n\n @my_decorator\n def function(): pass\n\n ... |
def get_tempfile_kwargs(tkwargs=None, prefix='', wrapped=None):
'Updates kwargs to be passed to tempfile. calls depending on env vars\n '
if (tkwargs is None):
tkwargs_ = {}
else:
tkwargs_ = tkwargs.copy()
if ('prefix' not in tkwargs_):
tkwargs_['prefix'] = '_'.join(((['datala... | 3,268,730,254,755,503,000 | Updates kwargs to be passed to tempfile. calls depending on env vars | datalad/utils.py | get_tempfile_kwargs | AKSoo/datalad | python | def get_tempfile_kwargs(tkwargs=None, prefix=, wrapped=None):
'\n '
if (tkwargs is None):
tkwargs_ = {}
else:
tkwargs_ = tkwargs.copy()
if ('prefix' not in tkwargs_):
tkwargs_['prefix'] = '_'.join(((['datalad_temp'] + ([prefix] if prefix else [])) + ([] if (on_windows or (not ... |
@optional_args
def line_profile(func):
'Q&D helper to line profile the function and spit out stats\n '
import line_profiler
prof = line_profiler.LineProfiler()
@wraps(func)
def _wrap_line_profile(*args, **kwargs):
try:
pfunc = prof(func)
return pfunc(*args, **kwar... | 7,644,648,597,135,648,000 | Q&D helper to line profile the function and spit out stats | datalad/utils.py | line_profile | AKSoo/datalad | python | @optional_args
def line_profile(func):
'\n '
import line_profiler
prof = line_profiler.LineProfiler()
@wraps(func)
def _wrap_line_profile(*args, **kwargs):
try:
pfunc = prof(func)
return pfunc(*args, **kwargs)
finally:
prof.print_stats()
re... |
@optional_args
def collect_method_callstats(func):
'Figure out methods which call the method repeatedly on the same instance\n\n Use case(s):\n - .repo is expensive since does all kinds of checks.\n - .config is expensive transitively since it calls .repo each time\n\n TODO:\n - fancy one could... | -9,041,022,728,436,863,000 | Figure out methods which call the method repeatedly on the same instance
Use case(s):
- .repo is expensive since does all kinds of checks.
- .config is expensive transitively since it calls .repo each time
TODO:
- fancy one could look through the stack for the same id(self) to see if
that location is alread... | datalad/utils.py | collect_method_callstats | AKSoo/datalad | python | @optional_args
def collect_method_callstats(func):
'Figure out methods which call the method repeatedly on the same instance\n\n Use case(s):\n - .repo is expensive since does all kinds of checks.\n - .config is expensive transitively since it calls .repo each time\n\n TODO:\n - fancy one could... |
def never_fail(f):
'Assure that function never fails -- all exceptions are caught\n\n Returns `None` if function fails internally.\n '
@wraps(f)
def wrapped_func(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception as e:
lgr.warning(('DataLad inter... | -3,585,793,547,426,326,000 | Assure that function never fails -- all exceptions are caught
Returns `None` if function fails internally. | datalad/utils.py | never_fail | AKSoo/datalad | python | def never_fail(f):
'Assure that function never fails -- all exceptions are caught\n\n Returns `None` if function fails internally.\n '
@wraps(f)
def wrapped_func(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception as e:
lgr.warning(('DataLad inter... |
@contextmanager
def nothing_cm():
'Just a dummy cm to programmically switch context managers'
(yield) | -6,187,207,211,022,128,000 | Just a dummy cm to programmically switch context managers | datalad/utils.py | nothing_cm | AKSoo/datalad | python | @contextmanager
def nothing_cm():
(yield) |
@contextmanager
def swallow_outputs():
'Context manager to help consuming both stdout and stderr, and print()\n\n stdout is available as cm.out and stderr as cm.err whenever cm is the\n yielded context manager.\n Internally uses temporary files to guarantee absent side-effects of swallowing\n into Strin... | 2,491,819,345,219,235,300 | Context manager to help consuming both stdout and stderr, and print()
stdout is available as cm.out and stderr as cm.err whenever cm is the
yielded context manager.
Internally uses temporary files to guarantee absent side-effects of swallowing
into StringIO which lacks .fileno.
print mocking is necessary for some use... | datalad/utils.py | swallow_outputs | AKSoo/datalad | python | @contextmanager
def swallow_outputs():
'Context manager to help consuming both stdout and stderr, and print()\n\n stdout is available as cm.out and stderr as cm.err whenever cm is the\n yielded context manager.\n Internally uses temporary files to guarantee absent side-effects of swallowing\n into Strin... |
@contextmanager
def swallow_logs(new_level=None, file_=None, name='datalad'):
'Context manager to consume all logs.\n\n '
lgr = logging.getLogger(name)
old_level = lgr.level
old_handlers = lgr.handlers
class StringIOAdapter(object):
'Little adapter to help getting out values\n\n A... | 1,683,860,999,491,172,400 | Context manager to consume all logs. | datalad/utils.py | swallow_logs | AKSoo/datalad | python | @contextmanager
def swallow_logs(new_level=None, file_=None, name='datalad'):
'\n\n '
lgr = logging.getLogger(name)
old_level = lgr.level
old_handlers = lgr.handlers
class StringIOAdapter(object):
'Little adapter to help getting out values\n\n And to stay consistent with how swall... |
@contextmanager
def disable_logger(logger=None):
"context manager to temporarily disable logging\n\n This is to provide one of swallow_logs' purposes without unnecessarily\n creating temp files (see gh-1865)\n\n Parameters\n ----------\n logger: Logger\n Logger whose handlers will be ordered t... | -2,598,924,556,852,235,300 | context manager to temporarily disable logging
This is to provide one of swallow_logs' purposes without unnecessarily
creating temp files (see gh-1865)
Parameters
----------
logger: Logger
Logger whose handlers will be ordered to not log anything.
Default: datalad's topmost Logger ('datalad') | datalad/utils.py | disable_logger | AKSoo/datalad | python | @contextmanager
def disable_logger(logger=None):
"context manager to temporarily disable logging\n\n This is to provide one of swallow_logs' purposes without unnecessarily\n creating temp files (see gh-1865)\n\n Parameters\n ----------\n logger: Logger\n Logger whose handlers will be ordered t... |
def setup_exceptionhook(ipython=False):
'Overloads default sys.excepthook with our exceptionhook handler.\n\n If interactive, our exceptionhook handler will invoke\n pdb.post_mortem; if not interactive, then invokes default handler.\n '
def _datalad_pdb_excepthook(type, value, tb):
impor... | 2,870,071,984,776,959,500 | Overloads default sys.excepthook with our exceptionhook handler.
If interactive, our exceptionhook handler will invoke
pdb.post_mortem; if not interactive, then invokes default handler. | datalad/utils.py | setup_exceptionhook | AKSoo/datalad | python | def setup_exceptionhook(ipython=False):
'Overloads default sys.excepthook with our exceptionhook handler.\n\n If interactive, our exceptionhook handler will invoke\n pdb.post_mortem; if not interactive, then invokes default handler.\n '
def _datalad_pdb_excepthook(type, value, tb):
impor... |
def ensure_dir(*args):
'Make sure directory exists.\n\n Joins the list of arguments to an os-specific path to the desired\n directory and creates it, if it not exists yet.\n '
dirname = op.join(*args)
if (not exists(dirname)):
os.makedirs(dirname)
return dirname | 2,885,700,046,755,640,300 | Make sure directory exists.
Joins the list of arguments to an os-specific path to the desired
directory and creates it, if it not exists yet. | datalad/utils.py | ensure_dir | AKSoo/datalad | python | def ensure_dir(*args):
'Make sure directory exists.\n\n Joins the list of arguments to an os-specific path to the desired\n directory and creates it, if it not exists yet.\n '
dirname = op.join(*args)
if (not exists(dirname)):
os.makedirs(dirname)
return dirname |
def updated(d, update):
"Return a copy of the input with the 'update'\n\n Primarily for updating dictionaries\n "
d = d.copy()
d.update(update)
return d | -8,583,097,218,153,299,000 | Return a copy of the input with the 'update'
Primarily for updating dictionaries | datalad/utils.py | updated | AKSoo/datalad | python | def updated(d, update):
"Return a copy of the input with the 'update'\n\n Primarily for updating dictionaries\n "
d = d.copy()
d.update(update)
return d |
def getpwd():
'Try to return a CWD without dereferencing possible symlinks\n\n This function will try to use PWD environment variable to provide a current\n working directory, possibly with some directories along the path being\n symlinks to other directories. Unfortunately, PWD is used/set only by the\n ... | -2,341,153,836,494,955,000 | Try to return a CWD without dereferencing possible symlinks
This function will try to use PWD environment variable to provide a current
working directory, possibly with some directories along the path being
symlinks to other directories. Unfortunately, PWD is used/set only by the
shell and such functions as `os.chdir... | datalad/utils.py | getpwd | AKSoo/datalad | python | def getpwd():
'Try to return a CWD without dereferencing possible symlinks\n\n This function will try to use PWD environment variable to provide a current\n working directory, possibly with some directories along the path being\n symlinks to other directories. Unfortunately, PWD is used/set only by the\n ... |
def dlabspath(path, norm=False):
'Symlinks-in-the-cwd aware abspath\n\n os.path.abspath relies on os.getcwd() which would not know about symlinks\n in the path\n\n TODO: we might want to norm=True by default to match behavior of\n os .path.abspath?\n '
if (not isabs(path)):
path = op.join... | -8,477,254,246,258,512,000 | Symlinks-in-the-cwd aware abspath
os.path.abspath relies on os.getcwd() which would not know about symlinks
in the path
TODO: we might want to norm=True by default to match behavior of
os .path.abspath? | datalad/utils.py | dlabspath | AKSoo/datalad | python | def dlabspath(path, norm=False):
'Symlinks-in-the-cwd aware abspath\n\n os.path.abspath relies on os.getcwd() which would not know about symlinks\n in the path\n\n TODO: we might want to norm=True by default to match behavior of\n os .path.abspath?\n '
if (not isabs(path)):
path = op.join... |
def with_pathsep(path):
'Little helper to guarantee that path ends with /'
return ((path + sep) if (not path.endswith(sep)) else path) | -6,586,447,538,217,079,000 | Little helper to guarantee that path ends with / | datalad/utils.py | with_pathsep | AKSoo/datalad | python | def with_pathsep(path):
return ((path + sep) if (not path.endswith(sep)) else path) |
def get_path_prefix(path, pwd=None):
'Get path prefix (for current directory)\n\n Returns relative path to the topdir, if we are under topdir, and if not\n absolute path to topdir. If `pwd` is not specified - current directory\n assumed\n '
pwd = (pwd or getpwd())
path = dlabspath(path)
pat... | -7,963,500,683,572,217,000 | Get path prefix (for current directory)
Returns relative path to the topdir, if we are under topdir, and if not
absolute path to topdir. If `pwd` is not specified - current directory
assumed | datalad/utils.py | get_path_prefix | AKSoo/datalad | python | def get_path_prefix(path, pwd=None):
'Get path prefix (for current directory)\n\n Returns relative path to the topdir, if we are under topdir, and if not\n absolute path to topdir. If `pwd` is not specified - current directory\n assumed\n '
pwd = (pwd or getpwd())
path = dlabspath(path)
pat... |
def path_startswith(path, prefix):
'Return True if path starts with prefix path\n\n Parameters\n ----------\n path: str\n prefix: str\n '
(path, prefix) = _get_normalized_paths(path, prefix)
return path.startswith(prefix) | -6,795,381,623,888,553,000 | Return True if path starts with prefix path
Parameters
----------
path: str
prefix: str | datalad/utils.py | path_startswith | AKSoo/datalad | python | def path_startswith(path, prefix):
'Return True if path starts with prefix path\n\n Parameters\n ----------\n path: str\n prefix: str\n '
(path, prefix) = _get_normalized_paths(path, prefix)
return path.startswith(prefix) |
def path_is_subpath(path, prefix):
'Return True if path is a subpath of prefix\n\n It will return False if path == prefix.\n\n Parameters\n ----------\n path: str\n prefix: str\n '
(path, prefix) = _get_normalized_paths(path, prefix)
return ((len(prefix) < len(path)) and path.startswith(pr... | -3,600,815,073,040,493,600 | Return True if path is a subpath of prefix
It will return False if path == prefix.
Parameters
----------
path: str
prefix: str | datalad/utils.py | path_is_subpath | AKSoo/datalad | python | def path_is_subpath(path, prefix):
'Return True if path is a subpath of prefix\n\n It will return False if path == prefix.\n\n Parameters\n ----------\n path: str\n prefix: str\n '
(path, prefix) = _get_normalized_paths(path, prefix)
return ((len(prefix) < len(path)) and path.startswith(pr... |
def knows_annex(path):
'Returns whether at a given path there is information about an annex\n\n It is just a thin wrapper around GitRepo.is_with_annex() classmethod\n which also checks for `path` to exist first.\n\n This includes actually present annexes, but also uninitialized ones, or\n even the prese... | -567,862,675,604,367,170 | Returns whether at a given path there is information about an annex
It is just a thin wrapper around GitRepo.is_with_annex() classmethod
which also checks for `path` to exist first.
This includes actually present annexes, but also uninitialized ones, or
even the presence of a remote annex branch. | datalad/utils.py | knows_annex | AKSoo/datalad | python | def knows_annex(path):
'Returns whether at a given path there is information about an annex\n\n It is just a thin wrapper around GitRepo.is_with_annex() classmethod\n which also checks for `path` to exist first.\n\n This includes actually present annexes, but also uninitialized ones, or\n even the prese... |
@contextmanager
def make_tempfile(content=None, wrapped=None, **tkwargs):
'Helper class to provide a temporary file name and remove it at the end (context manager)\n\n Parameters\n ----------\n mkdir : bool, optional (default: False)\n If True, temporary directory created using tempfile.mkdtemp()\n ... | 7,536,303,112,862,519,000 | Helper class to provide a temporary file name and remove it at the end (context manager)
Parameters
----------
mkdir : bool, optional (default: False)
If True, temporary directory created using tempfile.mkdtemp()
content : str or bytes, optional
Content to be stored in the file created
wrapped : function, opti... | datalad/utils.py | make_tempfile | AKSoo/datalad | python | @contextmanager
def make_tempfile(content=None, wrapped=None, **tkwargs):
'Helper class to provide a temporary file name and remove it at the end (context manager)\n\n Parameters\n ----------\n mkdir : bool, optional (default: False)\n If True, temporary directory created using tempfile.mkdtemp()\n ... |
def _path_(*p):
'Given a path in POSIX" notation, regenerate one in native to the env one'
if on_windows:
return op.join(*map((lambda x: op.join(*x.split('/'))), p))
else:
return op.join(*p) | -5,201,050,113,459,553,000 | Given a path in POSIX" notation, regenerate one in native to the env one | datalad/utils.py | _path_ | AKSoo/datalad | python | def _path_(*p):
if on_windows:
return op.join(*map((lambda x: op.join(*x.split('/'))), p))
else:
return op.join(*p) |
def get_timestamp_suffix(time_=None, prefix='-'):
'Return a time stamp (full date and time up to second)\n\n primarily to be used for generation of log files names\n '
args = []
if (time_ is not None):
if isinstance(time_, int):
time_ = time.gmtime(time_)
args.append(time_)... | -1,093,651,240,819,786,200 | Return a time stamp (full date and time up to second)
primarily to be used for generation of log files names | datalad/utils.py | get_timestamp_suffix | AKSoo/datalad | python | def get_timestamp_suffix(time_=None, prefix='-'):
'Return a time stamp (full date and time up to second)\n\n primarily to be used for generation of log files names\n '
args = []
if (time_ is not None):
if isinstance(time_, int):
time_ = time.gmtime(time_)
args.append(time_)... |
def get_logfilename(dspath, cmd='datalad'):
"Return a filename to use for logging under a dataset/repository\n\n directory would be created if doesn't exist, but dspath must exist\n and be a directory\n "
assert exists(dspath)
assert isdir(dspath)
ds_logdir = ensure_dir(dspath, '.git', 'datalad... | -7,525,460,786,256,228,000 | Return a filename to use for logging under a dataset/repository
directory would be created if doesn't exist, but dspath must exist
and be a directory | datalad/utils.py | get_logfilename | AKSoo/datalad | python | def get_logfilename(dspath, cmd='datalad'):
"Return a filename to use for logging under a dataset/repository\n\n directory would be created if doesn't exist, but dspath must exist\n and be a directory\n "
assert exists(dspath)
assert isdir(dspath)
ds_logdir = ensure_dir(dspath, '.git', 'datalad... |
def get_trace(edges, start, end, trace=None):
"Return the trace/path to reach a node in a tree.\n\n Parameters\n ----------\n edges : sequence(2-tuple)\n The tree given by a sequence of edges (parent, child) tuples. The\n nodes can be identified by any value and data type that supports\n the... | 2,135,985,338,423,239,000 | Return the trace/path to reach a node in a tree.
Parameters
----------
edges : sequence(2-tuple)
The tree given by a sequence of edges (parent, child) tuples. The
nodes can be identified by any value and data type that supports
the '==' operation.
start :
Identifier of the start node. Must be present as a valu... | datalad/utils.py | get_trace | AKSoo/datalad | python | def get_trace(edges, start, end, trace=None):
"Return the trace/path to reach a node in a tree.\n\n Parameters\n ----------\n edges : sequence(2-tuple)\n The tree given by a sequence of edges (parent, child) tuples. The\n nodes can be identified by any value and data type that supports\n the... |
def get_dataset_root(path):
"Return the root of an existent dataset containing a given path\n\n The root path is returned in the same absolute or relative form\n as the input argument. If no associated dataset exists, or the\n input path doesn't exist, None is returned.\n\n If `path` is a symlink or som... | 1,666,258,663,008,669,700 | Return the root of an existent dataset containing a given path
The root path is returned in the same absolute or relative form
as the input argument. If no associated dataset exists, or the
input path doesn't exist, None is returned.
If `path` is a symlink or something other than a directory, its
the root dataset con... | datalad/utils.py | get_dataset_root | AKSoo/datalad | python | def get_dataset_root(path):
"Return the root of an existent dataset containing a given path\n\n The root path is returned in the same absolute or relative form\n as the input argument. If no associated dataset exists, or the\n input path doesn't exist, None is returned.\n\n If `path` is a symlink or som... |
def try_multiple(ntrials, exception, base, f, *args, **kwargs):
'Call f multiple times making exponentially growing delay between the calls'
for trial in range(1, (ntrials + 1)):
try:
return f(*args, **kwargs)
except exception as exc:
if (trial == ntrials):
... | -2,131,526,919,984,439,300 | Call f multiple times making exponentially growing delay between the calls | datalad/utils.py | try_multiple | AKSoo/datalad | python | def try_multiple(ntrials, exception, base, f, *args, **kwargs):
for trial in range(1, (ntrials + 1)):
try:
return f(*args, **kwargs)
except exception as exc:
if (trial == ntrials):
raise
t = (base ** trial)
lgr.warning('Caught %s o... |
@optional_args
def try_multiple_dec(f, ntrials=None, duration=0.1, exceptions=None, increment_type=None, exceptions_filter=None, logger=None):
"Decorator to try function multiple times.\n\n Main purpose is to decorate functions dealing with removal of files/directories\n and which might need a few seconds to ... | -414,031,308,378,374,340 | Decorator to try function multiple times.
Main purpose is to decorate functions dealing with removal of files/directories
and which might need a few seconds to work correctly on Windows which takes
its time to release files/directories.
Parameters
----------
ntrials: int, optional
duration: float, optional
Seconds ... | datalad/utils.py | try_multiple_dec | AKSoo/datalad | python | @optional_args
def try_multiple_dec(f, ntrials=None, duration=0.1, exceptions=None, increment_type=None, exceptions_filter=None, logger=None):
"Decorator to try function multiple times.\n\n Main purpose is to decorate functions dealing with removal of files/directories\n and which might need a few seconds to ... |
@try_multiple_dec
def unlink(f):
'\'Robust\' unlink. Would try multiple times\n\n On windows boxes there is evidence for a latency of more than a second\n until a file is considered no longer "in-use".\n WindowsError is not known on Linux, and if IOError or any other\n exception\n is thrown then if ... | 758,740,387,580,010,900 | 'Robust' unlink. Would try multiple times
On windows boxes there is evidence for a latency of more than a second
until a file is considered no longer "in-use".
WindowsError is not known on Linux, and if IOError or any other
exception
is thrown then if except statement has WindowsError in it -- NameError
also see gh-2... | datalad/utils.py | unlink | AKSoo/datalad | python | @try_multiple_dec
def unlink(f):
'\'Robust\' unlink. Would try multiple times\n\n On windows boxes there is evidence for a latency of more than a second\n until a file is considered no longer "in-use".\n WindowsError is not known on Linux, and if IOError or any other\n exception\n is thrown then if ... |
@try_multiple_dec
def _rmtree(*args, **kwargs):
'Just a helper to decorate shutil.rmtree.\n\n rmtree defined above does more and ideally should not itself be decorated\n since a recursive definition and does checks for open files inside etc -\n might be too runtime expensive\n '
return shutil.rmtree... | -4,898,703,226,461,722,000 | Just a helper to decorate shutil.rmtree.
rmtree defined above does more and ideally should not itself be decorated
since a recursive definition and does checks for open files inside etc -
might be too runtime expensive | datalad/utils.py | _rmtree | AKSoo/datalad | python | @try_multiple_dec
def _rmtree(*args, **kwargs):
'Just a helper to decorate shutil.rmtree.\n\n rmtree defined above does more and ideally should not itself be decorated\n since a recursive definition and does checks for open files inside etc -\n might be too runtime expensive\n '
return shutil.rmtree... |
def slash_join(base, extension):
"Join two strings with a '/', avoiding duplicate slashes\n\n If any of the strings is None the other is returned as is.\n "
if (extension is None):
return base
if (base is None):
return extension
return '/'.join((base.rstrip('/'), extension.lstrip('... | 1,535,180,192,747,289,600 | Join two strings with a '/', avoiding duplicate slashes
If any of the strings is None the other is returned as is. | datalad/utils.py | slash_join | AKSoo/datalad | python | def slash_join(base, extension):
"Join two strings with a '/', avoiding duplicate slashes\n\n If any of the strings is None the other is returned as is.\n "
if (extension is None):
return base
if (base is None):
return extension
return '/'.join((base.rstrip('/'), extension.lstrip('... |
def open_r_encdetect(fname, readahead=1000):
'Return a file object in read mode with auto-detected encoding\n\n This is helpful when dealing with files of unknown encoding.\n\n Parameters\n ----------\n readahead: int, optional\n How many bytes to read for guessing the encoding type. If\n neg... | -4,121,653,151,508,509,000 | Return a file object in read mode with auto-detected encoding
This is helpful when dealing with files of unknown encoding.
Parameters
----------
readahead: int, optional
How many bytes to read for guessing the encoding type. If
negative - full file will be read | datalad/utils.py | open_r_encdetect | AKSoo/datalad | python | def open_r_encdetect(fname, readahead=1000):
'Return a file object in read mode with auto-detected encoding\n\n This is helpful when dealing with files of unknown encoding.\n\n Parameters\n ----------\n readahead: int, optional\n How many bytes to read for guessing the encoding type. If\n neg... |
def read_file(fname, decode=True):
'A helper to read file passing content via ensure_unicode\n\n Parameters\n ----------\n decode: bool, optional\n if False, no ensure_unicode and file content returned as bytes\n '
with open(fname, 'rb') as f:
content = f.read()
return (ensure_unico... | -4,267,377,191,964,235,000 | A helper to read file passing content via ensure_unicode
Parameters
----------
decode: bool, optional
if False, no ensure_unicode and file content returned as bytes | datalad/utils.py | read_file | AKSoo/datalad | python | def read_file(fname, decode=True):
'A helper to read file passing content via ensure_unicode\n\n Parameters\n ----------\n decode: bool, optional\n if False, no ensure_unicode and file content returned as bytes\n '
with open(fname, 'rb') as f:
content = f.read()
return (ensure_unico... |
def read_csv_lines(fname, dialect=None, readahead=16384, **kwargs):
'A generator of dict records from a CSV/TSV\n\n Automatically guesses the encoding for each record to convert to UTF-8\n\n Parameters\n ----------\n fname: str\n Filename\n dialect: str, optional\n Dialect to specify to csv... | -6,179,929,616,358,541,000 | A generator of dict records from a CSV/TSV
Automatically guesses the encoding for each record to convert to UTF-8
Parameters
----------
fname: str
Filename
dialect: str, optional
Dialect to specify to csv.reader. If not specified -- guessed from
the file, if fails to guess, "excel-tab" is assumed
readahead: int... | datalad/utils.py | read_csv_lines | AKSoo/datalad | python | def read_csv_lines(fname, dialect=None, readahead=16384, **kwargs):
'A generator of dict records from a CSV/TSV\n\n Automatically guesses the encoding for each record to convert to UTF-8\n\n Parameters\n ----------\n fname: str\n Filename\n dialect: str, optional\n Dialect to specify to csv... |
def import_modules(modnames, pkg, msg='Failed to import {module}', log=lgr.debug):
"Helper to import a list of modules without failing if N/A\n\n Parameters\n ----------\n modnames: list of str\n List of module names to import\n pkg: str\n Package under which to import\n msg: str, optional\... | 3,748,128,079,028,701,000 | Helper to import a list of modules without failing if N/A
Parameters
----------
modnames: list of str
List of module names to import
pkg: str
Package under which to import
msg: str, optional
Message template for .format() to log at DEBUG level if import fails.
Keys {module} and {package} will be provided and '... | datalad/utils.py | import_modules | AKSoo/datalad | python | def import_modules(modnames, pkg, msg='Failed to import {module}', log=lgr.debug):
"Helper to import a list of modules without failing if N/A\n\n Parameters\n ----------\n modnames: list of str\n List of module names to import\n pkg: str\n Package under which to import\n msg: str, optional\... |
def import_module_from_file(modpath, pkg=None, log=lgr.debug):
'Import provided module given a path\n\n TODO:\n - RF/make use of it in pipeline.py which has similar logic\n - join with import_modules above?\n\n Parameters\n ----------\n pkg: module, optional\n If provided, and modpath is und... | 9,020,425,505,208,875,000 | Import provided module given a path
TODO:
- RF/make use of it in pipeline.py which has similar logic
- join with import_modules above?
Parameters
----------
pkg: module, optional
If provided, and modpath is under pkg.__path__, relative import will be
used | datalad/utils.py | import_module_from_file | AKSoo/datalad | python | def import_module_from_file(modpath, pkg=None, log=lgr.debug):
'Import provided module given a path\n\n TODO:\n - RF/make use of it in pipeline.py which has similar logic\n - join with import_modules above?\n\n Parameters\n ----------\n pkg: module, optional\n If provided, and modpath is und... |
def get_encoding_info():
'Return a dictionary with various encoding/locale information'
import sys, locale
from collections import OrderedDict
return OrderedDict([('default', sys.getdefaultencoding()), ('filesystem', sys.getfilesystemencoding()), ('locale.prefered', locale.getpreferredencoding())]) | -5,240,476,570,044,784,000 | Return a dictionary with various encoding/locale information | datalad/utils.py | get_encoding_info | AKSoo/datalad | python | def get_encoding_info():
import sys, locale
from collections import OrderedDict
return OrderedDict([('default', sys.getdefaultencoding()), ('filesystem', sys.getfilesystemencoding()), ('locale.prefered', locale.getpreferredencoding())]) |
def create_tree_archive(path, name, load, overwrite=False, archives_leading_dir=True):
'Given an archive `name`, create under `path` with specified `load` tree\n '
from datalad.support.archives import compress_files
dirname = file_basename(name)
full_dirname = op.join(path, dirname)
os.makedirs(f... | 4,681,706,310,525,413,000 | Given an archive `name`, create under `path` with specified `load` tree | datalad/utils.py | create_tree_archive | AKSoo/datalad | python | def create_tree_archive(path, name, load, overwrite=False, archives_leading_dir=True):
'\n '
from datalad.support.archives import compress_files
dirname = file_basename(name)
full_dirname = op.join(path, dirname)
os.makedirs(full_dirname)
create_tree(full_dirname, load, archives_leading_dir=a... |
def create_tree(path, tree, archives_leading_dir=True, remove_existing=False):
'Given a list of tuples (name, load) create such a tree\n\n if load is a tuple itself -- that would create either a subtree or an archive\n with that content and place it into the tree if name ends with .tar.gz\n '
lgr.log(5... | -7,951,379,912,310,503,000 | Given a list of tuples (name, load) create such a tree
if load is a tuple itself -- that would create either a subtree or an archive
with that content and place it into the tree if name ends with .tar.gz | datalad/utils.py | create_tree | AKSoo/datalad | python | def create_tree(path, tree, archives_leading_dir=True, remove_existing=False):
'Given a list of tuples (name, load) create such a tree\n\n if load is a tuple itself -- that would create either a subtree or an archive\n with that content and place it into the tree if name ends with .tar.gz\n '
lgr.log(5... |
def get_suggestions_msg(values, known, sep='\n '):
'Return a formatted string with suggestions for values given the known ones\n '
import difflib
suggestions = []
for value in ensure_list(values):
suggestions += difflib.get_close_matches(value, known)
suggestions = unique(suggestio... | -2,174,618,792,934,269,700 | Return a formatted string with suggestions for values given the known ones | datalad/utils.py | get_suggestions_msg | AKSoo/datalad | python | def get_suggestions_msg(values, known, sep='\n '):
'\n '
import difflib
suggestions = []
for value in ensure_list(values):
suggestions += difflib.get_close_matches(value, known)
suggestions = unique(suggestions)
msg = 'Did you mean any of these?'
if suggestions:
if ... |
def bytes2human(n, format='%(value).1f %(symbol)sB'):
'\n Convert n bytes into a human readable string based on format.\n symbols can be either "customary", "customary_ext", "iec" or "iec_ext",\n see: http://goo.gl/kTQMs\n\n >>> from datalad.utils import bytes2human\n >>> bytes2human(1)\n \'... | -7,311,965,313,156,211,000 | Convert n bytes into a human readable string based on format.
symbols can be either "customary", "customary_ext", "iec" or "iec_ext",
see: http://goo.gl/kTQMs
>>> from datalad.utils import bytes2human
>>> bytes2human(1)
'1.0 B'
>>> bytes2human(1024)
'1.0 KB'
>>> bytes2human(1048576)
'1.0 MB'
>>> bytes2... | datalad/utils.py | bytes2human | AKSoo/datalad | python | def bytes2human(n, format='%(value).1f %(symbol)sB'):
'\n Convert n bytes into a human readable string based on format.\n symbols can be either "customary", "customary_ext", "iec" or "iec_ext",\n see: http://goo.gl/kTQMs\n\n >>> from datalad.utils import bytes2human\n >>> bytes2human(1)\n \'... |
def quote_cmdlinearg(arg):
'Perform platform-appropriate argument quoting'
return ('"{}"'.format(arg.replace('"', '""')) if on_windows else shlex_quote(arg)) | 9,031,621,463,784,785,000 | Perform platform-appropriate argument quoting | datalad/utils.py | quote_cmdlinearg | AKSoo/datalad | python | def quote_cmdlinearg(arg):
return ('"{}"'.format(arg.replace('"', '')) if on_windows else shlex_quote(arg)) |
def guard_for_format(arg):
"Replace { and } with {{ and }}\n\n To be used in cases if arg is not expected to have provided\n by user .format() placeholders, but 'arg' might become a part\n of a composite passed to .format(), e.g. via 'Run'\n "
return arg.replace('{', '{{').replace('}', '}}') | 4,900,778,269,469,641,000 | Replace { and } with {{ and }}
To be used in cases if arg is not expected to have provided
by user .format() placeholders, but 'arg' might become a part
of a composite passed to .format(), e.g. via 'Run' | datalad/utils.py | guard_for_format | AKSoo/datalad | python | def guard_for_format(arg):
"Replace { and } with {{ and }}\n\n To be used in cases if arg is not expected to have provided\n by user .format() placeholders, but 'arg' might become a part\n of a composite passed to .format(), e.g. via 'Run'\n "
return arg.replace('{', '{{').replace('}', '}}') |
def join_cmdline(args):
'Join command line args into a string using quote_cmdlinearg\n '
return ' '.join(map(quote_cmdlinearg, args)) | -8,821,647,151,772,595,000 | Join command line args into a string using quote_cmdlinearg | datalad/utils.py | join_cmdline | AKSoo/datalad | python | def join_cmdline(args):
'\n '
return ' '.join(map(quote_cmdlinearg, args)) |
def split_cmdline(s):
'Perform platform-appropriate command line splitting.\n\n Identical to `shlex.split()` on non-windows platforms.\n\n Modified from https://stackoverflow.com/a/35900070\n '
if (not on_windows):
return shlex_split(s)
RE_CMD_LEX = '"((?:""|\\\\["\\\\]|[^"])*)"?()|(\\\\\\\... | 6,851,305,332,115,587,000 | Perform platform-appropriate command line splitting.
Identical to `shlex.split()` on non-windows platforms.
Modified from https://stackoverflow.com/a/35900070 | datalad/utils.py | split_cmdline | AKSoo/datalad | python | def split_cmdline(s):
'Perform platform-appropriate command line splitting.\n\n Identical to `shlex.split()` on non-windows platforms.\n\n Modified from https://stackoverflow.com/a/35900070\n '
if (not on_windows):
return shlex_split(s)
RE_CMD_LEX = '"((?:|\\\\["\\\\]|[^"])*)"?()|(\\\\\\\\(... |
def get_wrapped_class(wrapped):
'Determine the command class a wrapped __call__ belongs to'
mod = sys.modules[wrapped.__module__]
command_class_name = wrapped.__qualname__.split('.')[(- 2)]
_func_class = mod.__dict__[command_class_name]
lgr.debug('Determined class of decorated function: %s', _func_c... | -6,224,155,212,005,527,000 | Determine the command class a wrapped __call__ belongs to | datalad/utils.py | get_wrapped_class | AKSoo/datalad | python | def get_wrapped_class(wrapped):
mod = sys.modules[wrapped.__module__]
command_class_name = wrapped.__qualname__.split('.')[(- 2)]
_func_class = mod.__dict__[command_class_name]
lgr.debug('Determined class of decorated function: %s', _func_class)
return _func_class |
def check_symlink_capability(path, target):
"helper similar to datalad.tests.utils.has_symlink_capability\n\n However, for use in a datalad command context, we shouldn't\n assume to be able to write to tmpfile and also not import a whole lot from\n datalad's test machinery. Finally, we want to know, whethe... | 1,218,640,300,326,594,300 | helper similar to datalad.tests.utils.has_symlink_capability
However, for use in a datalad command context, we shouldn't
assume to be able to write to tmpfile and also not import a whole lot from
datalad's test machinery. Finally, we want to know, whether we can create a
symlink at a specific location, not just somewh... | datalad/utils.py | check_symlink_capability | AKSoo/datalad | python | def check_symlink_capability(path, target):
"helper similar to datalad.tests.utils.has_symlink_capability\n\n However, for use in a datalad command context, we shouldn't\n assume to be able to write to tmpfile and also not import a whole lot from\n datalad's test machinery. Finally, we want to know, whethe... |
def lmtime(filepath, mtime):
'Set mtime for files. On Windows a merely adapter to os.utime\n '
os.utime(filepath, (time.time(), mtime)) | 473,848,352,068,869,500 | Set mtime for files. On Windows a merely adapter to os.utime | datalad/utils.py | lmtime | AKSoo/datalad | python | def lmtime(filepath, mtime):
'\n '
os.utime(filepath, (time.time(), mtime)) |
def lmtime(filepath, mtime):
'Set mtime for files, while not de-referencing symlinks.\n\n To overcome absence of os.lutime\n\n Works only on linux and OSX ATM\n '
from .cmd import WitlessRunner
smtime = time.strftime('%Y%m%d%H%M.%S', time.localtime(mtime))
lgr.log(3, 'Setting mtime ... | 1,039,836,322,329,771,400 | Set mtime for files, while not de-referencing symlinks.
To overcome absence of os.lutime
Works only on linux and OSX ATM | datalad/utils.py | lmtime | AKSoo/datalad | python | def lmtime(filepath, mtime):
'Set mtime for files, while not de-referencing symlinks.\n\n To overcome absence of os.lutime\n\n Works only on linux and OSX ATM\n '
from .cmd import WitlessRunner
smtime = time.strftime('%Y%m%d%H%M.%S', time.localtime(mtime))
lgr.log(3, 'Setting mtime ... |
def format_element(self, elem, format_spec):
'Format a single element\n\n For sequences, this is called once for each element in a\n sequence. For anything else, it is called on the entire\n object. It is intended to be overridden in subclases.\n '
return self.element_formatter.forma... | 6,269,833,802,502,472,000 | Format a single element
For sequences, this is called once for each element in a
sequence. For anything else, it is called on the entire
object. It is intended to be overridden in subclases. | datalad/utils.py | format_element | AKSoo/datalad | python | def format_element(self, elem, format_spec):
'Format a single element\n\n For sequences, this is called once for each element in a\n sequence. For anything else, it is called on the entire\n object. It is intended to be overridden in subclases.\n '
return self.element_formatter.forma... |
def __init__(self, name, executable=False):
'\n\n Parameters\n ----------\n name : str\n Name of the file\n executable: bool, optional\n Make it executable\n '
self.name = name
self.executable = executable | -4,959,471,173,376,464,000 | Parameters
----------
name : str
Name of the file
executable: bool, optional
Make it executable | datalad/utils.py | __init__ | AKSoo/datalad | python | def __init__(self, name, executable=False):
'\n\n Parameters\n ----------\n name : str\n Name of the file\n executable: bool, optional\n Make it executable\n '
self.name = name
self.executable = executable |
def assert_logged(self, msg=None, level=None, regex=True, **kwargs):
'Provide assertion on whether a msg was logged at a given level\n\n If neither `msg` nor `level` provided, checks if anything was logged\n at all.\n\n Parameters\n ----------\n msg: str, optio... | -3,269,136,505,347,764,700 | Provide assertion on whether a msg was logged at a given level
If neither `msg` nor `level` provided, checks if anything was logged
at all.
Parameters
----------
msg: str, optional
Message (as a regular expression, if `regex`) to be searched.
If no msg provided, checks if anything was logged at a given level.
lev... | datalad/utils.py | assert_logged | AKSoo/datalad | python | def assert_logged(self, msg=None, level=None, regex=True, **kwargs):
'Provide assertion on whether a msg was logged at a given level\n\n If neither `msg` nor `level` provided, checks if anything was logged\n at all.\n\n Parameters\n ----------\n msg: str, optio... |
@cached_property
def openapi_types():
'\n This must be a method because a model may have properties that are\n of type self, this must run after the class is loaded\n\n Returns\n openapi_types (dict): The key is attribute name\n and the value is attribute type.\n ... | 592,781,851,320,592,800 | This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type. | clients/keto/python/ory_keto_client/model/delete_ory_access_control_policy_internal_server_error.py | openapi_types | Stackwalkerllc/sdk | python | @cached_property
def openapi_types():
'\n This must be a method because a model may have properties that are\n of type self, this must run after the class is loaded\n\n Returns\n openapi_types (dict): The key is attribute name\n and the value is attribute type.\n ... |
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
'DeleteOryAccessControlPolicyInternalServerError - a model defined in OpenAPI\n\n Keyword Args:\n _check_type (bool): if True, values for parameters in openapi_types\n will be type checked and a Typ... | -3,289,304,096,865,934,000 | DeleteOryAccessControlPolicyInternalServerError - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defa... | clients/keto/python/ory_keto_client/model/delete_ory_access_control_policy_internal_server_error.py | __init__ | Stackwalkerllc/sdk | python | @convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
'DeleteOryAccessControlPolicyInternalServerError - a model defined in OpenAPI\n\n Keyword Args:\n _check_type (bool): if True, values for parameters in openapi_types\n will be type checked and a Typ... |
def set_line_str(self, line_str):
'\n Set line_str.\n\n Line_str is only writeable if LogEvent was created from a string,\n not from a system.profile documents.\n '
if (not self.from_string):
raise ValueError("can't set line_str for LogEvent created from system.profile docume... | 7,303,293,722,448,755,000 | Set line_str.
Line_str is only writeable if LogEvent was created from a string,
not from a system.profile documents. | mtools/util/logevent.py | set_line_str | sindbach/mtools | python | def set_line_str(self, line_str):
'\n Set line_str.\n\n Line_str is only writeable if LogEvent was created from a string,\n not from a system.profile documents.\n '
if (not self.from_string):
raise ValueError("can't set line_str for LogEvent created from system.profile docume... |
def get_line_str(self):
'Return line_str depending on source, logfile or system.profile.'
if self.from_string:
return ' '.join([s for s in [self.merge_marker_str, self._datetime_str, self._line_str] if s])
else:
return ' '.join([s for s in [self._datetime_str, self._line_str] if s]) | -4,736,257,944,441,563,000 | Return line_str depending on source, logfile or system.profile. | mtools/util/logevent.py | get_line_str | sindbach/mtools | python | def get_line_str(self):
if self.from_string:
return ' '.join([s for s in [self.merge_marker_str, self._datetime_str, self._line_str] if s])
else:
return ' '.join([s for s in [self._datetime_str, self._line_str] if s]) |
@property
def split_tokens(self):
'Split string into tokens (lazy).'
if (not self._split_tokens_calculated):
self._split_tokens = self._line_str.split()
self._split_tokens_calculated = True
return self._split_tokens | 5,830,234,605,733,159,000 | Split string into tokens (lazy). | mtools/util/logevent.py | split_tokens | sindbach/mtools | python | @property
def split_tokens(self):
if (not self._split_tokens_calculated):
self._split_tokens = self._line_str.split()
self._split_tokens_calculated = True
return self._split_tokens |
@property
def duration(self):
'Calculate duration if available (lazy).'
if (not self._duration_calculated):
self._duration_calculated = True
line_str = self.line_str
if (line_str and line_str.endswith('ms') and ('Scheduled new oplog query' not in line_str)):
try:
... | -6,191,071,677,299,454,000 | Calculate duration if available (lazy). | mtools/util/logevent.py | duration | sindbach/mtools | python | @property
def duration(self):
if (not self._duration_calculated):
self._duration_calculated = True
line_str = self.line_str
if (line_str and line_str.endswith('ms') and ('Scheduled new oplog query' not in line_str)):
try:
space_pos = line_str.rfind(' ')
... |
@property
def datetime(self):
'Extract datetime if available (lazy).'
if (not self._datetime_calculated):
self._datetime_calculated = True
split_tokens = self.split_tokens[:10]
for offs in range(len(split_tokens)):
dt = self._match_datetime_pattern(split_tokens[offs:(offs + 4... | 7,229,479,786,862,700,000 | Extract datetime if available (lazy). | mtools/util/logevent.py | datetime | sindbach/mtools | python | @property
def datetime(self):
if (not self._datetime_calculated):
self._datetime_calculated = True
split_tokens = self.split_tokens[:10]
for offs in range(len(split_tokens)):
dt = self._match_datetime_pattern(split_tokens[offs:(offs + 4)])
if dt:
... |
def _match_datetime_pattern(self, tokens):
"\n Match the datetime pattern at the beginning of the token list.\n\n There are several formats that this method needs to understand\n and distinguish between (see MongoDB's SERVER-7965):\n\n ctime-pre2.4 Wed Dec 31 19:00:00\n ctime ... | 2,427,690,610,165,711,000 | Match the datetime pattern at the beginning of the token list.
There are several formats that this method needs to understand
and distinguish between (see MongoDB's SERVER-7965):
ctime-pre2.4 Wed Dec 31 19:00:00
ctime Wed Dec 31 19:00:00.000
iso8601-utc 1970-01-01T00:00:00.000Z
iso8601-local 1969-1... | mtools/util/logevent.py | _match_datetime_pattern | sindbach/mtools | python | def _match_datetime_pattern(self, tokens):
"\n Match the datetime pattern at the beginning of the token list.\n\n There are several formats that this method needs to understand\n and distinguish between (see MongoDB's SERVER-7965):\n\n ctime-pre2.4 Wed Dec 31 19:00:00\n ctime ... |
@property
def thread(self):
'Extract thread name if available (lazy).'
if (not self._thread_calculated):
self._thread_calculated = True
split_tokens = self.split_tokens
if (not self.datetime_nextpos):
return None
if (len(split_tokens) <= self.datetime_nextpos):
... | 5,699,371,472,807,143,000 | Extract thread name if available (lazy). | mtools/util/logevent.py | thread | sindbach/mtools | python | @property
def thread(self):
if (not self._thread_calculated):
self._thread_calculated = True
split_tokens = self.split_tokens
if (not self.datetime_nextpos):
return None
if (len(split_tokens) <= self.datetime_nextpos):
return None
connection_token... |
@property
def conn(self):
"\n Extract conn name if available (lazy).\n\n This value is None for all lines except the log lines related to\n connections, that is lines matching '\\[conn[0-9]+\\]' or\n '\\[(initandlisten|mongosMain)\\] .* connection accepted from'.\n "
self.thre... | 5,706,740,834,924,622,000 | Extract conn name if available (lazy).
This value is None for all lines except the log lines related to
connections, that is lines matching '\[conn[0-9]+\]' or
'\[(initandlisten|mongosMain)\] .* connection accepted from'. | mtools/util/logevent.py | conn | sindbach/mtools | python | @property
def conn(self):
"\n Extract conn name if available (lazy).\n\n This value is None for all lines except the log lines related to\n connections, that is lines matching '\\[conn[0-9]+\\]' or\n '\\[(initandlisten|mongosMain)\\] .* connection accepted from'.\n "
self.thre... |
@property
def operation(self):
'\n Extract operation if available (lazy).\n\n Operations: query, insert, update, remove, getmore, command\n '
if (not self._operation_calculated):
self._operation_calculated = True
self._extract_operation_and_namespace()
return self._opera... | -3,139,542,630,484,868,600 | Extract operation if available (lazy).
Operations: query, insert, update, remove, getmore, command | mtools/util/logevent.py | operation | sindbach/mtools | python | @property
def operation(self):
'\n Extract operation if available (lazy).\n\n Operations: query, insert, update, remove, getmore, command\n '
if (not self._operation_calculated):
self._operation_calculated = True
self._extract_operation_and_namespace()
return self._opera... |
@property
def namespace(self):
'Extract namespace if available (lazy).'
if (not self._operation_calculated):
self._operation_calculated = True
self._extract_operation_and_namespace()
return self._namespace | -8,693,397,654,284,354,000 | Extract namespace if available (lazy). | mtools/util/logevent.py | namespace | sindbach/mtools | python | @property
def namespace(self):
if (not self._operation_calculated):
self._operation_calculated = True
self._extract_operation_and_namespace()
return self._namespace |
def _extract_operation_and_namespace(self):
"\n Helper method to extract both operation and namespace from a logevent.\n\n It doesn't make sense to only extract one as they appear back to back\n in the token list.\n "
split_tokens = self.split_tokens
if (not self._datetime_nextpo... | 6,718,564,088,524,435,000 | Helper method to extract both operation and namespace from a logevent.
It doesn't make sense to only extract one as they appear back to back
in the token list. | mtools/util/logevent.py | _extract_operation_and_namespace | sindbach/mtools | python | def _extract_operation_and_namespace(self):
"\n Helper method to extract both operation and namespace from a logevent.\n\n It doesn't make sense to only extract one as they appear back to back\n in the token list.\n "
split_tokens = self.split_tokens
if (not self._datetime_nextpo... |
@property
def pattern(self):
'Extract query pattern from operations.'
if (not self._pattern):
if ((self.operation in ['query', 'getmore', 'update', 'remove']) or (self.command in ['count', 'findandmodify'])):
self._pattern = self._find_pattern('query: ')
elif (self.command == 'find')... | 3,728,147,135,323,575,300 | Extract query pattern from operations. | mtools/util/logevent.py | pattern | sindbach/mtools | python | @property
def pattern(self):
if (not self._pattern):
if ((self.operation in ['query', 'getmore', 'update', 'remove']) or (self.command in ['count', 'findandmodify'])):
self._pattern = self._find_pattern('query: ')
elif (self.command == 'find'):
self._pattern = self._find... |
@property
def sort_pattern(self):
'Extract query pattern from operations.'
if (not self._sort_pattern):
if (self.operation in ['query', 'getmore']):
self._sort_pattern = self._find_pattern('orderby: ')
return self._sort_pattern | -5,626,022,467,865,773,000 | Extract query pattern from operations. | mtools/util/logevent.py | sort_pattern | sindbach/mtools | python | @property
def sort_pattern(self):
if (not self._sort_pattern):
if (self.operation in ['query', 'getmore']):
self._sort_pattern = self._find_pattern('orderby: ')
return self._sort_pattern |
@property
def command(self):
'Extract query pattern from operations.'
if (not self._command_calculated):
self._command_calculated = True
if (self.operation == 'command'):
try:
command_idx = self.split_tokens.index('command:')
command = self.split_token... | 3,725,386,157,492,850,000 | Extract query pattern from operations. | mtools/util/logevent.py | command | sindbach/mtools | python | @property
def command(self):
if (not self._command_calculated):
self._command_calculated = True
if (self.operation == 'command'):
try:
command_idx = self.split_tokens.index('command:')
command = self.split_tokens[(command_idx + 1)]
if ... |
@property
def nscanned(self):
'Extract nscanned or keysExamined counter if available (lazy).'
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._nscanned | 1,807,220,756,329,251,600 | Extract nscanned or keysExamined counter if available (lazy). | mtools/util/logevent.py | nscanned | sindbach/mtools | python | @property
def nscanned(self):
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._nscanned |
@property
def nscannedObjects(self):
'\n Extract counters if available (lazy).\n\n Looks for nscannedObjects or docsExamined.\n '
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._nscannedObjects | -7,772,727,705,828,972,000 | Extract counters if available (lazy).
Looks for nscannedObjects or docsExamined. | mtools/util/logevent.py | nscannedObjects | sindbach/mtools | python | @property
def nscannedObjects(self):
'\n Extract counters if available (lazy).\n\n Looks for nscannedObjects or docsExamined.\n '
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._nscannedObjects |
@property
def ntoreturn(self):
'Extract ntoreturn counter if available (lazy).'
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._ntoreturn | 1,006,731,818,353,051,600 | Extract ntoreturn counter if available (lazy). | mtools/util/logevent.py | ntoreturn | sindbach/mtools | python | @property
def ntoreturn(self):
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._ntoreturn |
@property
def writeConflicts(self):
'Extract ntoreturn counter if available (lazy).'
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._writeConflicts | 3,734,549,808,003,737,600 | Extract ntoreturn counter if available (lazy). | mtools/util/logevent.py | writeConflicts | sindbach/mtools | python | @property
def writeConflicts(self):
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._writeConflicts |
@property
def nreturned(self):
'\n Extract counters if available (lazy).\n\n Looks for nreturned, nReturned, or nMatched counter.\n '
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._nreturned | 8,576,279,460,733,580,000 | Extract counters if available (lazy).
Looks for nreturned, nReturned, or nMatched counter. | mtools/util/logevent.py | nreturned | sindbach/mtools | python | @property
def nreturned(self):
'\n Extract counters if available (lazy).\n\n Looks for nreturned, nReturned, or nMatched counter.\n '
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._nreturned |
@property
def ninserted(self):
'Extract ninserted or nInserted counter if available (lazy).'
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._ninserted | -5,332,214,218,849,756,000 | Extract ninserted or nInserted counter if available (lazy). | mtools/util/logevent.py | ninserted | sindbach/mtools | python | @property
def ninserted(self):
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._ninserted |
@property
def ndeleted(self):
'Extract ndeleted or nDeleted counter if available (lazy).'
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._ndeleted | -5,896,970,563,387,445,000 | Extract ndeleted or nDeleted counter if available (lazy). | mtools/util/logevent.py | ndeleted | sindbach/mtools | python | @property
def ndeleted(self):
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._ndeleted |
@property
def nupdated(self):
'Extract nupdated or nModified counter if available (lazy).'
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._nupdated | 2,019,395,670,723,748,600 | Extract nupdated or nModified counter if available (lazy). | mtools/util/logevent.py | nupdated | sindbach/mtools | python | @property
def nupdated(self):
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._nupdated |
@property
def numYields(self):
'Extract numYields counter if available (lazy).'
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._numYields | 6,802,860,109,620,639,000 | Extract numYields counter if available (lazy). | mtools/util/logevent.py | numYields | sindbach/mtools | python | @property
def numYields(self):
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._numYields |
@property
def planSummary(self):
'Extract numYields counter if available (lazy).'
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._planSummary | -8,060,062,694,230,500,000 | Extract numYields counter if available (lazy). | mtools/util/logevent.py | planSummary | sindbach/mtools | python | @property
def planSummary(self):
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._planSummary |
@property
def r(self):
'Extract read lock (r) counter if available (lazy).'
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._r | -7,113,388,393,597,944,000 | Extract read lock (r) counter if available (lazy). | mtools/util/logevent.py | r | sindbach/mtools | python | @property
def r(self):
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._r |
@property
def w(self):
'Extract write lock (w) counter if available (lazy).'
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._w | -4,226,283,736,360,890,400 | Extract write lock (w) counter if available (lazy). | mtools/util/logevent.py | w | sindbach/mtools | python | @property
def w(self):
if (not self._counters_calculated):
self._counters_calculated = True
self._extract_counters()
return self._w |
def _extract_counters(self):
'Extract counters like nscanned and nreturned from the logevent.'
counters = ['nscanned', 'nscannedObjects', 'ntoreturn', 'nreturned', 'ninserted', 'nupdated', 'ndeleted', 'r', 'w', 'numYields', 'planSummary', 'writeConflicts', 'keyUpdates']
counter_equiv = {'docsExamined': 'nsc... | -8,631,324,852,518,519,000 | Extract counters like nscanned and nreturned from the logevent. | mtools/util/logevent.py | _extract_counters | sindbach/mtools | python | def _extract_counters(self):
counters = ['nscanned', 'nscannedObjects', 'ntoreturn', 'nreturned', 'ninserted', 'nupdated', 'ndeleted', 'r', 'w', 'numYields', 'planSummary', 'writeConflicts', 'keyUpdates']
counter_equiv = {'docsExamined': 'nscannedObjects', 'keysExamined': 'nscanned', 'nDeleted': 'ndeleted'... |
@property
def level(self):
'Extract log level if available (lazy).'
if (not self._level_calculated):
self._level_calculated = True
self._extract_level()
return self._level | 2,071,184,124,526,647,600 | Extract log level if available (lazy). | mtools/util/logevent.py | level | sindbach/mtools | python | @property
def level(self):
if (not self._level_calculated):
self._level_calculated = True
self._extract_level()
return self._level |
@property
def component(self):
'Extract log component if available (lazy).'
self.level
return self._component | 3,989,212,613,295,903,000 | Extract log component if available (lazy). | mtools/util/logevent.py | component | sindbach/mtools | python | @property
def component(self):
self.level
return self._component |
def _extract_level(self):
'Extract level and component if available (lazy).'
if (self._level is None):
split_tokens = self.split_tokens
if (not split_tokens):
self._level = False
self._component = False
return
x = (self.log_levels.index(split_tokens[1]... | -3,180,627,665,237,303,000 | Extract level and component if available (lazy). | mtools/util/logevent.py | _extract_level | sindbach/mtools | python | def _extract_level(self):
if (self._level is None):
split_tokens = self.split_tokens
if (not split_tokens):
self._level = False
self._component = False
return
x = (self.log_levels.index(split_tokens[1]) if (split_tokens[1] in self.log_levels) else Non... |
def parse_all(self):
'\n Trigger extraction of all information.\n\n These values are usually evaluated lazily.\n '
tokens = self.split_tokens
duration = self.duration
datetime = self.datetime
thread = self.thread
operation = self.operation
namespace = self.namespace
... | 2,178,256,742,827,818,000 | Trigger extraction of all information.
These values are usually evaluated lazily. | mtools/util/logevent.py | parse_all | sindbach/mtools | python | def parse_all(self):
'\n Trigger extraction of all information.\n\n These values are usually evaluated lazily.\n '
tokens = self.split_tokens
duration = self.duration
datetime = self.datetime
thread = self.thread
operation = self.operation
namespace = self.namespace
... |
def __str__(self):
'Default string conversion for LogEvent object is its line_str.'
return str(self.line_str) | -4,018,836,960,612,078,600 | Default string conversion for LogEvent object is its line_str. | mtools/util/logevent.py | __str__ | sindbach/mtools | python | def __str__(self):
return str(self.line_str) |
def to_dict(self, labels=None):
'Convert LogEvent object to a dictionary.'
output = {}
if (labels is None):
labels = ['line_str', 'split_tokens', 'datetime', 'operation', 'thread', 'namespace', 'nscanned', 'ntoreturn', 'nreturned', 'ninserted', 'nupdated', 'ndeleted', 'duration', 'r', 'w', 'numYield... | 4,889,754,670,686,270,000 | Convert LogEvent object to a dictionary. | mtools/util/logevent.py | to_dict | sindbach/mtools | python | def to_dict(self, labels=None):
output = {}
if (labels is None):
labels = ['line_str', 'split_tokens', 'datetime', 'operation', 'thread', 'namespace', 'nscanned', 'ntoreturn', 'nreturned', 'ninserted', 'nupdated', 'ndeleted', 'duration', 'r', 'w', 'numYields']
for label in labels:
value... |
def to_json(self, labels=None):
'Convert LogEvent object to valid JSON.'
output = self.to_dict(labels)
return json.dumps(output, cls=DateTimeEncoder, ensure_ascii=False) | -2,870,708,172,949,533,700 | Convert LogEvent object to valid JSON. | mtools/util/logevent.py | to_json | sindbach/mtools | python | def to_json(self, labels=None):
output = self.to_dict(labels)
return json.dumps(output, cls=DateTimeEncoder, ensure_ascii=False) |
def _parse_document(self):
'Parse system.profile doc, copy all values to member variables.'
self._reset()
doc = self._profile_doc
self._split_tokens_calculated = True
self._split_tokens = None
self._duration_calculated = True
self._duration = doc[u'millis']
self._datetime_calculated = Tr... | 1,999,894,103,749,726,000 | Parse system.profile doc, copy all values to member variables. | mtools/util/logevent.py | _parse_document | sindbach/mtools | python | def _parse_document(self):
self._reset()
doc = self._profile_doc
self._split_tokens_calculated = True
self._split_tokens = None
self._duration_calculated = True
self._duration = doc[u'millis']
self._datetime_calculated = True
self._datetime = doc[u'ts']
if (self._datetime.tzinfo... |
def __init__(self, weights_file: str, base_model_name: str):
' Invoke a predict method of this class to predict image quality using nima model\n '
try:
self.nima = Nima(base_model_name, weights=None)
self.nima.build()
self.nima.nima_model.load_weights(weights_file)
except Exce... | -2,317,369,645,536,502,000 | Invoke a predict method of this class to predict image quality using nima model | deepinsight_iqa/nima/predict.py | __init__ | sandyz1000/deepinsight-iqa | python | def __init__(self, weights_file: str, base_model_name: str):
' \n '
try:
self.nima = Nima(base_model_name, weights=None)
self.nima.build()
self.nima.nima_model.load_weights(weights_file)
except Exception as e:
print('Unable to load NIMA weights', str(e))
sys.ex... |
def bernstein_test_1(rep: str):
'011 . x + 1'
a = '011'
b = '1'
return bitwise_xor(bitwise_dot(a, rep), b) | 6,488,060,793,820,434,000 | 011 . x + 1 | data/p3BR/R2/benchmark/startQiskit_QC292.py | bernstein_test_1 | UCLA-SEAL/QDiff | python | def bernstein_test_1(rep: str):
a = '011'
b = '1'
return bitwise_xor(bitwise_dot(a, rep), b) |
def bernstein_test_2(rep: str):
'000 . x + 0'
a = '000'
b = '0'
return bitwise_xor(bitwise_dot(a, rep), b) | 8,969,665,367,625,561,000 | 000 . x + 0 | data/p3BR/R2/benchmark/startQiskit_QC292.py | bernstein_test_2 | UCLA-SEAL/QDiff | python | def bernstein_test_2(rep: str):
a = '000'
b = '0'
return bitwise_xor(bitwise_dot(a, rep), b) |
def bernstein_test_3(rep: str):
'111 . x + 1'
a = '111'
b = '1'
return bitwise_xor(bitwise_dot(a, rep), b) | 4,693,651,165,882,063,000 | 111 . x + 1 | data/p3BR/R2/benchmark/startQiskit_QC292.py | bernstein_test_3 | UCLA-SEAL/QDiff | python | def bernstein_test_3(rep: str):
a = '111'
b = '1'
return bitwise_xor(bitwise_dot(a, rep), b) |
def fasterMovie(self):
'Let movie faster.'
if ((self.state == self.PLAYING) or (self.state == self.READY)):
self.sendRtspRequest(self.FASTER) | 231,428,766,339,245,540 | Let movie faster. | Task2/Client_dev.py | fasterMovie | Aiemu/CourseCN-Proj-RTP | python | def fasterMovie(self):
if ((self.state == self.PLAYING) or (self.state == self.READY)):
self.sendRtspRequest(self.FASTER) |
def slowerMovie(self):
'Let movie slower.'
if ((self.state == self.PLAYING) or (self.state == self.READY)):
self.sendRtspRequest(self.SLOWER) | -1,343,215,242,521,325,300 | Let movie slower. | Task2/Client_dev.py | slowerMovie | Aiemu/CourseCN-Proj-RTP | python | def slowerMovie(self):
if ((self.state == self.PLAYING) or (self.state == self.READY)):
self.sendRtspRequest(self.SLOWER) |
def setupMovie(self):
'Setup init.'
if (self.state == self.INIT):
self.sendRtspRequest(self.SETUP) | -7,091,073,410,713,007,000 | Setup init. | Task2/Client_dev.py | setupMovie | Aiemu/CourseCN-Proj-RTP | python | def setupMovie(self):
if (self.state == self.INIT):
self.sendRtspRequest(self.SETUP) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.