_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q16000
|
BaseId.script
|
train
|
def script(self):
"""Returns a Script object representing a contract script portion of a
payment to the destination."""
# The script object is cached, since constructing it may involve
# a number of string-manipulation operations.
if getattr(self, '_script', None) is None:
self._script = self._script__getter()
return self._script
|
python
|
{
"resource": ""
}
|
q16001
|
_ChainedHashAlgorithm.copy
|
train
|
def copy(self):
"Return a clone of this hash object."
other = _ChainedHashAlgorithm(self._algorithms)
other._hobj = deepcopy(self._hobj)
other._fobj = deepcopy(self._fobj)
return other
|
python
|
{
"resource": ""
}
|
q16002
|
_ChainedHashAlgorithm._finalize
|
train
|
def _finalize(self):
"Computes _fobj, the completed hash."
hobj = self._hobj
for hashname in self._algorithms[1:]:
fobj = hashlib.new(hashname)
fobj.update(hobj.digest())
hobj = fobj
self._fobj = hobj
|
python
|
{
"resource": ""
}
|
q16003
|
_ChainedHashAlgorithm.update
|
train
|
def update(self, *args):
"Appends any passed in byte arrays to the digest object."
for string in args:
self._hobj.update(string)
self._fobj = None
|
python
|
{
"resource": ""
}
|
q16004
|
BaseAuthTreeLink.count
|
train
|
def count(self):
"The number of items, pruned or otherwise, contained by this branch."
if getattr(self, '_count', None) is None:
self._count = getattr(self.node, 'count', 0)
return self._count
|
python
|
{
"resource": ""
}
|
q16005
|
BaseAuthTreeLink.size
|
train
|
def size(self):
"The canonical serialized size of this branch."
if getattr(self, '_size', None) is None:
self._size = getattr(self.node, 'size', 0)
return self._size
|
python
|
{
"resource": ""
}
|
q16006
|
BaseAuthTreeNode._forward_iterator
|
train
|
def _forward_iterator(self):
"Returns a forward iterator over the trie"
path = [(self, 0, Bits())]
while path:
node, idx, prefix = path.pop()
if idx==0 and node.value is not None and not node.prune_value:
yield (self._unpickle_key(prefix), self._unpickle_value(node.value))
if idx<len(node.children):
path.append((node, idx+1, prefix))
link = node.children[idx]
if not link.pruned:
path.append((link.node, 0, prefix + link.prefix))
|
python
|
{
"resource": ""
}
|
q16007
|
BaseAuthTreeNode.trim
|
train
|
def trim(self, prefixes):
"Prunes any keys beginning with the specified the specified prefixes."
_prefixes, prefixes = set(map(lambda k:self._prepare_key(k), prefixes)), list()
for t in lookahead(sorted(_prefixes)):
if t[1] is not None:
if t[0] == commonprefix(t):
continue
prefixes.append(t[0])
length = 0
for prefix in prefixes:
length += self._trim(prefix)
return length
|
python
|
{
"resource": ""
}
|
q16008
|
ScriptPickler.dump
|
train
|
def dump(self, script, file=None):
"Write a compressed representation of script to the Pickler's file object."
if file is None:
file = self._file
self._dump(script, file, self._protocol, self._version)
|
python
|
{
"resource": ""
}
|
q16009
|
ScriptPickler.dumps
|
train
|
def dumps(self, script):
"Return a compressed representation of script as a binary string."
string = BytesIO()
self._dump(script, string, self._protocol, self._version)
return string.getvalue()
|
python
|
{
"resource": ""
}
|
q16010
|
ScriptPickler.load
|
train
|
def load(self, file=None):
"Read and decompress a compact script from the Pickler's file object."
if file is None:
file = self._file
script_class = self.get_script_class()
script = self._load(file, self._protocol, self._version)
return script_class(script)
|
python
|
{
"resource": ""
}
|
q16011
|
ScriptPickler.loads
|
train
|
def loads(self, string):
"Decompress the passed-in compact script and return the result."
script_class = self.get_script_class()
script = self._load(BytesIO(string), self._protocol, self._version)
return script_class(script)
|
python
|
{
"resource": ""
}
|
q16012
|
merkle
|
train
|
def merkle(hashes, func=_merkle_hash256):
"""Convert an iterable of hashes or hashable objects into a binary tree,
construct the interior values using a passed-in constructor or compression
function, and return the root value of the tree. The default compressor is
the hash256 function, resulting in root-hash for the entire tree."""
# We use append to duplicate the final item in the iterable of hashes, so
# we need hashes to be a list-like object, regardless of what we were
# passed.
hashes = list(iter(hashes))
# If the passed-in iterable is empty, allow the constructor to choose our
# return value:
if not hashes:
return func()
# We must make sure the constructor/compressor is called for the case of
# a single item as well, in which case the loop below is not entered.
if len(hashes) == 1:
return func(*hashes)
# Build up successive layers of the binary hash tree, starting from the
# bottom. We've reached the root node when the list has been reduced to
# one element.
while len(hashes) > 1:
# For reasons lost to time, Satoshi decided that any traversal though
# a bitcoin hash tree will have the same number steps. This is because
# the last element is repeated when there is an odd number of elements
# in level, resulting in the right portion of the binary tree being
# extended into a full tower.
hashes.append(hashes[-1])
# By creating an iterator and then duplicating it, we cause two items
# to be pulled out of the hashes array each time through the generator.
# The last element is ignored if there is an odd number of elements
# (meaning there was originally an even number, because of the append
# operation above).
hashes = list(func(l,r) for l,r in zip(*(iter(hashes),)*2))
# Return the root node of the Merkle tree to the caller.
return hashes[0]
|
python
|
{
"resource": ""
}
|
q16013
|
b58encode
|
train
|
def b58encode(b, errors='strict'):
"Encode bytes to a base58-encoded string."
len_ = len(b)
# Convert big-endian bytes to integer
n = BigInteger.deserialize(BytesIO(b), len_)
# Divide that integer into base58
res = []
while n > 0:
n, r = divmod (n, 58)
res.append(b58digits[r])
res = ''.join(res[::-1])
# Encode leading zeros as base58 zeros
pad = 0
for c in b:
if c == six.int2byte(0): pad += 1
else: break
return (b58digits[0] * pad + res, len_)
|
python
|
{
"resource": ""
}
|
q16014
|
b58decode
|
train
|
def b58decode(s, errors='strict'):
"Decode a base58-encoding string, returning bytes."
if not s:
return (b'', 0)
# Convert the string to an integer
n = 0
for c in s:
n *= 58
if c not in b58digits:
raise InvalidBase58Error(u"character %r is not a valid base58 "
u"character" % c)
digit = b58digits.index(c)
n += digit
# Convert the integer to bytes
res = BigInteger(n).serialize((n.bit_length()+7)//8 or 1)
# Add padding back.
pad = 0
for c in s[:-1]:
if c == b58digits[0]: pad += 1
else: break
return (b'\x00' * pad + res, len(s))
|
python
|
{
"resource": ""
}
|
q16015
|
info
|
train
|
def info(torrent_path):
"""Print out information from .torrent file."""
my_torrent = Torrent.from_file(torrent_path)
size = my_torrent.total_size
click.secho('Name: %s' % my_torrent.name, fg='blue')
click.secho('Files:')
for file_tuple in my_torrent.files:
click.secho(file_tuple.name)
click.secho('Hash: %s' % my_torrent.info_hash, fg='blue')
click.secho('Size: %s (%s)' % (humanize_filesize(size), size), fg='blue')
click.secho('Magnet: %s' % my_torrent.get_magnet(), fg='yellow')
|
python
|
{
"resource": ""
}
|
q16016
|
create
|
train
|
def create(source, dest, tracker, open_trackers, comment, cache):
"""Create torrent file from a single file or a directory."""
source_title = path.basename(source).replace('.', '_').replace(' ', '_')
dest = '%s.torrent' % path.join(dest, source_title)
click.secho('Creating torrent from %s ...' % source)
my_torrent = Torrent.create_from(source)
if comment:
my_torrent.comment = comment
urls = []
if tracker:
urls = tracker.split(',')
if open_trackers:
click.secho('Fetching an up-to-date open tracker list ...')
try:
urls.extend(get_open_trackers_from_remote())
except RemoteDownloadError:
click.secho('Failed. Using built-in open tracker list.', fg='red', err=True)
urls.extend(get_open_trackers_from_local())
if urls:
my_torrent.announce_urls = urls
my_torrent.to_file(dest)
click.secho('Torrent file created: %s' % dest, fg='green')
click.secho('Torrent info hash: %s' % my_torrent.info_hash, fg='blue')
if cache:
click.secho('Uploading to %s torrent cache service ...')
try:
result = upload_to_cache_server(dest)
click.secho('Cached torrent URL: %s' % result, fg='yellow')
except RemoteUploadError as e:
click.secho('Failed: %s' % e, fg='red', err=True)
|
python
|
{
"resource": ""
}
|
q16017
|
Torrent.files
|
train
|
def files(self):
"""Files in torrent.
List of namedtuples (filepath, size).
:rtype: list[TorrentFile]
"""
files = []
info = self._struct.get('info')
if not info:
return files
if 'files' in info:
base = info['name']
for f in info['files']:
files.append(TorrentFile(join(base, *f['path']), f['length']))
else:
files.append(TorrentFile(info['name'], info['length']))
return files
|
python
|
{
"resource": ""
}
|
q16018
|
Torrent.info_hash
|
train
|
def info_hash(self):
"""Hash of torrent file info section. Also known as torrent hash."""
info = self._struct.get('info')
if not info:
return None
return sha1(Bencode.encode(info)).hexdigest()
|
python
|
{
"resource": ""
}
|
q16019
|
Torrent.to_file
|
train
|
def to_file(self, filepath=None):
"""Writes Torrent object into file, either
:param filepath:
"""
if filepath is None and self._filepath is None:
raise TorrentError('Unable to save torrent to file: no filepath supplied.')
if filepath is not None:
self._filepath = filepath
with open(self._filepath, mode='wb') as f:
f.write(self.to_string())
|
python
|
{
"resource": ""
}
|
q16020
|
Torrent.create_from
|
train
|
def create_from(cls, src_path):
"""Returns Torrent object created from a file or a directory.
:param str src_path:
:rtype: Torrent
"""
is_dir = isdir(src_path)
target_files, size_data = cls._get_target_files_info(src_path)
SIZE_MIN = 32768 # 32 KiB
SIZE_DEFAULT = 262144 # 256 KiB
SIZE_MAX = 1048576 # 1 MiB
CHUNKS_MIN = 1000 # todo use those limits as advised
CHUNKS_MAX = 2200
size_piece = SIZE_MIN
if size_data > SIZE_MIN:
size_piece = SIZE_DEFAULT
if size_piece > SIZE_MAX:
size_piece = SIZE_MAX
def read(filepath):
with open(filepath, 'rb') as f:
while True:
chunk = f.read(size_piece - len(pieces_buffer))
chunk_size = len(chunk)
if chunk_size == 0:
break
yield chunk
pieces = bytearray()
pieces_buffer = bytearray()
for fpath, _, _ in target_files:
for chunk in read(fpath):
pieces_buffer += chunk
if len(pieces_buffer) == size_piece:
pieces += sha1(pieces_buffer).digest()[:20]
pieces_buffer = bytearray()
if len(pieces_buffer):
pieces += sha1(pieces_buffer).digest()[:20]
pieces_buffer = bytearray()
info = {
'name': basename(src_path),
'pieces': bytes(pieces),
'piece length': size_piece,
}
if is_dir:
files = []
for _, length, path in target_files:
files.append({'length': length, 'path': path})
info['files'] = files
else:
info['length'] = target_files[0][1]
torrent = cls({'info': info})
torrent.created_by = get_app_version()
torrent.creation_date = datetime.utcnow()
return torrent
|
python
|
{
"resource": ""
}
|
q16021
|
Torrent.from_file
|
train
|
def from_file(cls, filepath):
"""Alternative constructor to get Torrent object from file.
:param str filepath:
:rtype: Torrent
"""
torrent = cls(Bencode.read_file(filepath))
torrent._filepath = filepath
return torrent
|
python
|
{
"resource": ""
}
|
q16022
|
humanize_filesize
|
train
|
def humanize_filesize(bytes_size):
"""Returns human readable filesize.
:param int bytes_size:
:rtype: str
"""
if not bytes_size:
return '0 B'
names = ('B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB')
name_idx = int(math.floor(math.log(bytes_size, 1024)))
size = round(bytes_size / math.pow(1024, name_idx), 2)
return '%s %s' % (size, names[name_idx])
|
python
|
{
"resource": ""
}
|
q16023
|
upload_to_cache_server
|
train
|
def upload_to_cache_server(fpath):
"""Uploads .torrent file to a cache server.
Returns upload file URL.
:rtype: str
"""
url_base = 'http://torrage.info'
url_upload = '%s/autoupload.php' % url_base
url_download = '%s/torrent.php?h=' % url_base
file_field = 'torrent'
try:
import requests
response = requests.post(url_upload, files={file_field: open(fpath, 'rb')}, timeout=REMOTE_TIMEOUT)
response.raise_for_status()
info_cache = response.text
return url_download + info_cache
except (ImportError, requests.RequestException) as e:
# Now trace is lost. `raise from` to consider.
raise RemoteUploadError('Unable to upload to %s: %s' % (url_upload, e))
|
python
|
{
"resource": ""
}
|
q16024
|
get_open_trackers_from_remote
|
train
|
def get_open_trackers_from_remote():
"""Returns open trackers announce URLs list from remote repo."""
url_base = 'https://raw.githubusercontent.com/idlesign/torrentool/master/torrentool/repo'
url = '%s/%s' % (url_base, OPEN_TRACKERS_FILENAME)
try:
import requests
response = requests.get(url, timeout=REMOTE_TIMEOUT)
response.raise_for_status()
open_trackers = response.text.splitlines()
except (ImportError, requests.RequestException) as e:
# Now trace is lost. `raise from` to consider.
raise RemoteDownloadError('Unable to download from %s: %s' % (url, e))
return open_trackers
|
python
|
{
"resource": ""
}
|
q16025
|
get_open_trackers_from_local
|
train
|
def get_open_trackers_from_local():
"""Returns open trackers announce URLs list from local backup."""
with open(path.join(path.dirname(__file__), 'repo', OPEN_TRACKERS_FILENAME)) as f:
open_trackers = map(str.strip, f.readlines())
return list(open_trackers)
|
python
|
{
"resource": ""
}
|
q16026
|
Bencode.encode
|
train
|
def encode(cls, value):
"""Encodes a value into bencoded bytes.
:param value: Python object to be encoded (str, int, list, dict).
:param str val_encoding: Encoding used by strings in a given object.
:rtype: bytes
"""
val_encoding = 'utf-8'
def encode_str(v):
try:
v_enc = encode(v, val_encoding)
except UnicodeDecodeError:
if PY3:
raise
else:
# Suppose bytestring
v_enc = v
prefix = encode('%s:' % len(v_enc), val_encoding)
return prefix + v_enc
def encode_(val):
if isinstance(val, str_type):
result = encode_str(val)
elif isinstance(val, int_types):
result = encode(('i%se' % val), val_encoding)
elif isinstance(val, (list, set, tuple)):
result = encode('l', val_encoding)
for item in val:
result += encode_(item)
result += encode('e', val_encoding)
elif isinstance(val, dict):
result = encode('d', val_encoding)
# Dictionaries are expected to be sorted by key.
for k, v in OrderedDict(sorted(val.items(), key=itemgetter(0))).items():
result += (encode_str(k) + encode_(v))
result += encode('e', val_encoding)
elif isinstance(val, byte_types):
result = encode('%s:' % len(val), val_encoding)
result += val
else:
raise BencodeEncodingError('Unable to encode `%s` %s' % (type(val), val))
return result
return encode_(value)
|
python
|
{
"resource": ""
}
|
q16027
|
Bencode.decode
|
train
|
def decode(cls, encoded):
"""Decodes bencoded data introduced as bytes.
Returns decoded structure(s).
:param bytes encoded:
"""
def create_dict(items):
# Let's guarantee that dictionaries are sorted.
k_v_pair = zip(*[iter(items)] * 2)
return OrderedDict(sorted(k_v_pair, key=itemgetter(0)))
def create_list(items):
return list(items)
stack_items = []
stack_containers = []
def compress_stack():
target_container = stack_containers.pop()
subitems = []
while True:
subitem = stack_items.pop()
subitems.append(subitem)
if subitem is target_container:
break
container_creator = subitems.pop()
container = container_creator(reversed(subitems))
stack_items.append(container)
def parse_forward(till_char, sequence):
number = ''
char_sub_idx = 0
for char_sub_idx, char_sub in enumerate(sequence):
char_sub = chr_(char_sub)
if char_sub == till_char:
break
number += char_sub
number = int(number or 0)
char_sub_idx += 1
return number, char_sub_idx
while encoded:
char = encoded[0]
char = chr_(char)
if char == 'd': # Dictionary
stack_items.append(create_dict)
stack_containers.append(create_dict)
encoded = encoded[1:]
elif char == 'l': # List
stack_items.append(create_list)
stack_containers.append(create_list)
encoded = encoded[1:]
elif char == 'i': # Integer
number, char_sub_idx = parse_forward('e', encoded[1:])
char_sub_idx += 1
stack_items.append(number)
encoded = encoded[char_sub_idx:]
elif char.isdigit(): # String
str_len, char_sub_idx = parse_forward(':', encoded)
last_char_idx = char_sub_idx + str_len
string = encoded[char_sub_idx:last_char_idx]
try:
string = string.decode('utf-8')
except UnicodeDecodeError:
# Considered bytestring (e.g. `pieces` hashes concatenation).
pass
stack_items.append(string)
encoded = encoded[last_char_idx:]
elif char == 'e': # End of a dictionary or a list.
compress_stack()
encoded = encoded[1:]
else:
raise BencodeDecodingError('Unable to interpret `%s` char.' % char)
if len(stack_items) == 1:
stack_items = stack_items.pop()
return stack_items
|
python
|
{
"resource": ""
}
|
q16028
|
Bencode.read_string
|
train
|
def read_string(cls, string):
"""Decodes a given bencoded string or bytestring.
Returns decoded structure(s).
:param str string:
:rtype: list
"""
if PY3 and not isinstance(string, byte_types):
string = string.encode()
return cls.decode(string)
|
python
|
{
"resource": ""
}
|
q16029
|
Bencode.read_file
|
train
|
def read_file(cls, filepath):
"""Decodes bencoded data of a given file.
Returns decoded structure(s).
:param str filepath:
:rtype: list
"""
with open(filepath, mode='rb') as f:
contents = f.read()
return cls.decode(contents)
|
python
|
{
"resource": ""
}
|
q16030
|
create_csp_header
|
train
|
def create_csp_header(cspDict):
""" create csp header string """
policy = ['%s %s' % (k, v) for k, v in cspDict.items() if v != '']
return '; '.join(policy)
|
python
|
{
"resource": ""
}
|
q16031
|
csp_header
|
train
|
def csp_header(csp={}):
""" Decorator to include csp header on app.route wrapper """
_csp = csp_default().read()
_csp.update(csp)
_header = ''
if 'report-only' in _csp and _csp['report-only'] is True:
_header = 'Content-Security-Policy-Report-Only'
else:
_header = 'Content-Security-Policy'
if 'report-only' in _csp:
del _csp['report-only']
_headers = {_header: create_csp_header(_csp)}
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
resp = make_response(f(*args, **kwargs))
h = resp.headers
for header, value in _headers.items():
h[header] = value
return resp
return decorated_function
return decorator
|
python
|
{
"resource": ""
}
|
q16032
|
csp_default.read
|
train
|
def read(self):
""" read default csp settings from json file """
with open(self.default_file) as json_file:
try:
return json.load(json_file)
except Exception as e:
raise 'empty file'
|
python
|
{
"resource": ""
}
|
q16033
|
csp_default.update
|
train
|
def update(self,updates={}):
"""
update csp_default.json with dict
if file empty add default-src and create dict
"""
try:
csp = self.read()
except:
csp = {'default-src':"'self'"}
self.write(csp)
csp.update(updates)
self.write(csp)
|
python
|
{
"resource": ""
}
|
q16034
|
ImageSetUploadView.get_image_set
|
train
|
def get_image_set(self):
"""
Obtain existing ImageSet if `pk` is specified, otherwise
create a new ImageSet for the user.
"""
image_set_pk = self.kwargs.get("pk", None)
if image_set_pk is None:
return self.request.user.image_sets.create()
return get_object_or_404(self.get_queryset(), pk=image_set_pk)
|
python
|
{
"resource": ""
}
|
q16035
|
tracer_config
|
train
|
def tracer_config(__init__, app, args, kwargs):
"""
Wraps the Tornado web application initialization so that the
TornadoTracing instance is created around an OpenTracing-compatible tracer.
"""
__init__(*args, **kwargs)
tracing = app.settings.get('opentracing_tracing')
tracer_callable = app.settings.get('opentracing_tracer_callable')
tracer_parameters = app.settings.get('opentracing_tracer_parameters', {})
if tracer_callable is not None:
if not callable(tracer_callable):
tracer_callable = _get_callable_from_name(tracer_callable)
tracer = tracer_callable(**tracer_parameters)
tracing = TornadoTracing(tracer)
if tracing is None:
tracing = TornadoTracing() # fallback to the global tracer
app.settings['opentracing_tracing'] = tracing
tracing._trace_all = app.settings.get('opentracing_trace_all',
DEFAULT_TRACE_ALL)
tracing._trace_client = app.settings.get('opentracing_trace_client',
DEFAULT_TRACE_CLIENT)
tracing._start_span_cb = app.settings.get('opentracing_start_span_cb',
None)
httpclient._set_tracing_enabled(tracing._trace_client)
if tracing._trace_client:
httpclient._set_tracing_info(tracing._tracer_obj,
tracing._start_span_cb)
|
python
|
{
"resource": ""
}
|
q16036
|
TornadoTracing._apply_tracing
|
train
|
def _apply_tracing(self, handler, attributes):
"""
Helper function to avoid rewriting for middleware and decorator.
Returns a new span from the request with logged attributes and
correct operation name from the func.
"""
operation_name = self._get_operation_name(handler)
headers = handler.request.headers
request = handler.request
# start new span from trace info
try:
span_ctx = self._tracer.extract(opentracing.Format.HTTP_HEADERS,
headers)
scope = self._tracer.start_active_span(operation_name,
child_of=span_ctx)
except (opentracing.InvalidCarrierException,
opentracing.SpanContextCorruptedException):
scope = self._tracer.start_active_span(operation_name)
# add span to current spans
setattr(request, SCOPE_ATTR, scope)
# log any traced attributes
scope.span.set_tag(tags.COMPONENT, 'tornado')
scope.span.set_tag(tags.SPAN_KIND, tags.SPAN_KIND_RPC_SERVER)
scope.span.set_tag(tags.HTTP_METHOD, request.method)
scope.span.set_tag(tags.HTTP_URL, request.uri)
for attr in attributes:
if hasattr(request, attr):
payload = str(getattr(request, attr))
if payload:
scope.span.set_tag(attr, payload)
# invoke the start span callback, if any
self._call_start_span_cb(scope.span, request)
return scope
|
python
|
{
"resource": ""
}
|
q16037
|
execute
|
train
|
def execute(func, handler, args, kwargs):
"""
Wrap the handler ``_execute`` method to trace incoming requests,
extracting the context from the headers, if available.
"""
tracing = handler.settings.get('opentracing_tracing')
with tracer_stack_context():
if tracing._trace_all:
attrs = handler.settings.get('opentracing_traced_attributes', [])
tracing._apply_tracing(handler, attrs)
return func(*args, **kwargs)
|
python
|
{
"resource": ""
}
|
q16038
|
on_finish
|
train
|
def on_finish(func, handler, args, kwargs):
"""
Wrap the handler ``on_finish`` method to finish the Span for the
given request, if available.
"""
tracing = handler.settings.get('opentracing_tracing')
tracing._finish_tracing(handler)
return func(*args, **kwargs)
|
python
|
{
"resource": ""
}
|
q16039
|
log_exception
|
train
|
def log_exception(func, handler, args, kwargs):
"""
Wrap the handler ``log_exception`` method to finish the Span for the
given request, if available. This method is called when an Exception
is not handled in the user code.
"""
# safe-guard: expected arguments -> log_exception(self, typ, value, tb)
value = args[1] if len(args) == 3 else None
if value is None:
return func(*args, **kwargs)
tracing = handler.settings.get('opentracing_tracing')
if not isinstance(value, HTTPError) or 500 <= value.status_code <= 599:
tracing._finish_tracing(handler, error=value)
return func(*args, **kwargs)
|
python
|
{
"resource": ""
}
|
q16040
|
SchemaNode.schema_root
|
train
|
def schema_root(self) -> "SchemaTreeNode":
"""Return the root node of the receiver's schema."""
sn = self
while sn.parent:
sn = sn.parent
return sn
|
python
|
{
"resource": ""
}
|
q16041
|
SchemaNode.content_type
|
train
|
def content_type(self) -> ContentType:
"""Return receiver's content type."""
return self._ctype if self._ctype else self.parent.content_type()
|
python
|
{
"resource": ""
}
|
q16042
|
SchemaNode.data_parent
|
train
|
def data_parent(self) -> Optional["InternalNode"]:
"""Return the closest ancestor data node."""
parent = self.parent
while parent:
if isinstance(parent, DataNode):
return parent
parent = parent.parent
|
python
|
{
"resource": ""
}
|
q16043
|
SchemaNode.iname
|
train
|
def iname(self) -> InstanceName:
"""Return the instance name corresponding to the receiver."""
dp = self.data_parent()
return (self.name if dp and self.ns == dp.ns
else self.ns + ":" + self.name)
|
python
|
{
"resource": ""
}
|
q16044
|
SchemaNode.data_path
|
train
|
def data_path(self) -> DataPath:
"""Return the receiver's data path."""
dp = self.data_parent()
return (dp.data_path() if dp else "") + "/" + self.iname()
|
python
|
{
"resource": ""
}
|
q16045
|
SchemaNode._node_digest
|
train
|
def _node_digest(self) -> Dict[str, Any]:
"""Return dictionary of receiver's properties suitable for clients."""
res = {"kind": self._yang_class()}
if self.mandatory:
res["mandatory"] = True
if self.description:
res["description"] = self.description
return res
|
python
|
{
"resource": ""
}
|
q16046
|
SchemaNode._iname2qname
|
train
|
def _iname2qname(self, iname: InstanceName) -> QualName:
"""Translate instance name to qualified name in the receiver's context.
"""
p, s, loc = iname.partition(":")
return (loc, p) if s else (p, self.ns)
|
python
|
{
"resource": ""
}
|
q16047
|
SchemaNode._handle_substatements
|
train
|
def _handle_substatements(self, stmt: Statement, sctx: SchemaContext) -> None:
"""Dispatch actions for substatements of `stmt`."""
for s in stmt.substatements:
if s.prefix:
key = (
sctx.schema_data.modules[sctx.text_mid].prefix_map[s.prefix][0]
+ ":" + s.keyword)
else:
key = s.keyword
mname = SchemaNode._stmt_callback.get(key, "_noop")
method = getattr(self, mname)
method(s, sctx)
|
python
|
{
"resource": ""
}
|
q16048
|
SchemaNode._follow_leafref
|
train
|
def _follow_leafref(
self, xpath: "Expr", init: "TerminalNode") -> Optional["DataNode"]:
"""Return the data node referred to by a leafref path.
Args:
xpath: XPath expression compiled from a leafref path.
init: initial context node
"""
if isinstance(xpath, LocationPath):
lft = self._follow_leafref(xpath.left, init)
if lft is None:
return None
return lft._follow_leafref(xpath.right, init)
elif isinstance(xpath, Step):
if xpath.axis == Axis.parent:
return self.data_parent()
elif xpath.axis == Axis.child:
if isinstance(self, InternalNode) and xpath.qname:
qname = (xpath.qname if xpath.qname[1]
else (xpath.qname[0], init.ns))
return self.get_data_child(*qname)
elif isinstance(xpath, Root):
return self.schema_root()
return None
|
python
|
{
"resource": ""
}
|
q16049
|
SchemaNode._nacm_default_deny_stmt
|
train
|
def _nacm_default_deny_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:
"""Set NACM default access."""
if not hasattr(self, 'default_deny'):
return
if stmt.keyword == "default-deny-all":
self.default_deny = DefaultDeny.all
elif stmt.keyword == "default-deny-write":
self.default_deny = DefaultDeny.write
|
python
|
{
"resource": ""
}
|
q16050
|
InternalNode.get_child
|
train
|
def get_child(self, name: YangIdentifier,
ns: YangIdentifier = None) -> Optional[SchemaNode]:
"""Return receiver's schema child.
Args:
name: Child's name.
ns: Child's namespace (= `self.ns` if absent).
"""
ns = ns if ns else self.ns
todo = []
for child in self.children:
if child.name is None:
todo.append(child)
elif child.name == name and child.ns == ns:
return child
for c in todo:
grandchild = c.get_child(name, ns)
if grandchild is not None:
return grandchild
|
python
|
{
"resource": ""
}
|
q16051
|
InternalNode.get_schema_descendant
|
train
|
def get_schema_descendant(
self, route: SchemaRoute) -> Optional[SchemaNode]:
"""Return descendant schema node or ``None`` if not found.
Args:
route: Schema route to the descendant node
(relative to the receiver).
"""
node = self
for p in route:
node = node.get_child(*p)
if node is None:
return None
return node
|
python
|
{
"resource": ""
}
|
q16052
|
InternalNode.get_data_child
|
train
|
def get_data_child(self, name: YangIdentifier,
ns: YangIdentifier = None) -> Optional["DataNode"]:
"""Return data node directly under the receiver."""
ns = ns if ns else self.ns
todo = []
for child in self.children:
if child.name == name and child.ns == ns:
if isinstance(child, DataNode):
return child
todo.insert(0, child)
elif not isinstance(child, DataNode):
todo.append(child)
for c in todo:
res = c.get_data_child(name, ns)
if res:
return res
|
python
|
{
"resource": ""
}
|
q16053
|
InternalNode.filter_children
|
train
|
def filter_children(self, ctype: ContentType = None) -> List[SchemaNode]:
"""Return receiver's children based on content type.
Args:
ctype: Content type.
"""
if ctype is None:
ctype = self.content_type()
return [c for c in self.children if
not isinstance(c, (RpcActionNode, NotificationNode)) and
c.content_type().value & ctype.value != 0]
|
python
|
{
"resource": ""
}
|
q16054
|
InternalNode.data_children
|
train
|
def data_children(self) -> List["DataNode"]:
"""Return the set of all data nodes directly under the receiver."""
res = []
for child in self.children:
if isinstance(child, DataNode):
res.append(child)
elif not isinstance(child, SchemaTreeNode):
res.extend(child.data_children())
return res
|
python
|
{
"resource": ""
}
|
q16055
|
InternalNode._child_inst_names
|
train
|
def _child_inst_names(self) -> Set[InstanceName]:
"""Return the set of instance names under the receiver."""
return frozenset([c.iname() for c in self.data_children()])
|
python
|
{
"resource": ""
}
|
q16056
|
InternalNode._make_schema_patterns
|
train
|
def _make_schema_patterns(self) -> None:
"""Build schema pattern for the receiver and its data descendants."""
self.schema_pattern = self._schema_pattern()
for dc in self.data_children():
if isinstance(dc, InternalNode):
dc._make_schema_patterns()
|
python
|
{
"resource": ""
}
|
q16057
|
InternalNode._handle_child
|
train
|
def _handle_child(
self, node: SchemaNode, stmt: Statement, sctx: SchemaContext) -> None:
"""Add child node to the receiver and handle substatements."""
if not sctx.schema_data.if_features(stmt, sctx.text_mid):
return
node.name = stmt.argument
node.ns = sctx.default_ns
node._get_description(stmt)
self._add_child(node)
node._handle_substatements(stmt, sctx)
|
python
|
{
"resource": ""
}
|
q16058
|
InternalNode._uses_stmt
|
train
|
def _uses_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:
"""Handle uses statement."""
if not sctx.schema_data.if_features(stmt, sctx.text_mid):
return
grp, gid = sctx.schema_data.get_definition(stmt, sctx)
if stmt.find1("when"):
sn = GroupNode()
self._add_child(sn)
else:
sn = self
sn._handle_substatements(grp, gid)
for augst in stmt.find_all("augment"):
sn._augment_stmt(augst, sctx)
for refst in stmt.find_all("refine"):
sn._refine_stmt(refst, sctx)
|
python
|
{
"resource": ""
}
|
q16059
|
InternalNode._container_stmt
|
train
|
def _container_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:
"""Handle container statement."""
self._handle_child(ContainerNode(), stmt, sctx)
|
python
|
{
"resource": ""
}
|
q16060
|
InternalNode._identity_stmt
|
train
|
def _identity_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:
"""Handle identity statement."""
if not sctx.schema_data.if_features(stmt, sctx.text_mid):
return
id = (stmt.argument, sctx.schema_data.namespace(sctx.text_mid))
adj = sctx.schema_data.identity_adjs.setdefault(id, IdentityAdjacency())
for bst in stmt.find_all("base"):
bid = sctx.schema_data.translate_pname(bst.argument, sctx.text_mid)
adj.bases.add(bid)
badj = sctx.schema_data.identity_adjs.setdefault(
bid, IdentityAdjacency())
badj.derivs.add(id)
sctx.schema_data.identity_adjs[id] = adj
|
python
|
{
"resource": ""
}
|
q16061
|
InternalNode._list_stmt
|
train
|
def _list_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:
"""Handle list statement."""
self._handle_child(ListNode(), stmt, sctx)
|
python
|
{
"resource": ""
}
|
q16062
|
InternalNode._choice_stmt
|
train
|
def _choice_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:
"""Handle choice statement."""
self._handle_child(ChoiceNode(), stmt, sctx)
|
python
|
{
"resource": ""
}
|
q16063
|
InternalNode._case_stmt
|
train
|
def _case_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:
"""Handle case statement."""
self._handle_child(CaseNode(), stmt, sctx)
|
python
|
{
"resource": ""
}
|
q16064
|
InternalNode._leaf_stmt
|
train
|
def _leaf_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:
"""Handle leaf statement."""
node = LeafNode()
node.type = DataType._resolve_type(
stmt.find1("type", required=True), sctx)
self._handle_child(node, stmt, sctx)
|
python
|
{
"resource": ""
}
|
q16065
|
InternalNode._leaf_list_stmt
|
train
|
def _leaf_list_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:
"""Handle leaf-list statement."""
node = LeafListNode()
node.type = DataType._resolve_type(
stmt.find1("type", required=True), sctx)
self._handle_child(node, stmt, sctx)
|
python
|
{
"resource": ""
}
|
q16066
|
InternalNode._rpc_action_stmt
|
train
|
def _rpc_action_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:
"""Handle rpc or action statement."""
self._handle_child(RpcActionNode(), stmt, sctx)
|
python
|
{
"resource": ""
}
|
q16067
|
InternalNode._notification_stmt
|
train
|
def _notification_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:
"""Handle notification statement."""
self._handle_child(NotificationNode(), stmt, sctx)
|
python
|
{
"resource": ""
}
|
q16068
|
InternalNode._anydata_stmt
|
train
|
def _anydata_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:
"""Handle anydata statement."""
self._handle_child(AnydataNode(), stmt, sctx)
|
python
|
{
"resource": ""
}
|
q16069
|
InternalNode._ascii_tree
|
train
|
def _ascii_tree(self, indent: str, no_types: bool, val_count: bool) -> str:
"""Return the receiver's subtree as ASCII art."""
def suffix(sn):
return f" {{{sn.val_count}}}\n" if val_count else "\n"
if not self.children:
return ""
cs = []
for c in self.children:
cs.extend(c._flatten())
cs.sort(key=lambda x: x.qual_name)
res = ""
for c in cs[:-1]:
res += (indent + c._tree_line(no_types) + suffix(c) +
c._ascii_tree(indent + "| ", no_types, val_count))
return (res + indent + cs[-1]._tree_line(no_types) + suffix(cs[-1]) +
cs[-1]._ascii_tree(indent + " ", no_types, val_count))
|
python
|
{
"resource": ""
}
|
q16070
|
SchemaTreeNode._annotation_stmt
|
train
|
def _annotation_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:
"""Handle annotation statement."""
if not sctx.schema_data.if_features(stmt, sctx.text_mid):
return
dst = stmt.find1("description")
self.annotations[(stmt.argument, sctx.default_ns)] = Annotation(
DataType._resolve_type(stmt.find1("type", required=True), sctx),
dst.argument if dst else None)
|
python
|
{
"resource": ""
}
|
q16071
|
DataNode.orphan_instance
|
train
|
def orphan_instance(self, rval: RawValue) -> "ObjectMember":
"""Return an isolated instance of the receiver.
Args:
rval: Raw value to be used for the returned instance.
"""
val = self.from_raw(rval)
return ObjectMember(self.iname(), {}, val, None, self, datetime.now())
|
python
|
{
"resource": ""
}
|
q16072
|
DataNode.split_instance_route
|
train
|
def split_instance_route(self, route: "InstanceRoute") -> Optional[Tuple[
"InstanceRoute", "InstanceRoute"]]:
"""Split `route` into the part up to receiver and the rest.
Args:
route: Absolute instance route (the receiver should correspond to an
instance node on this route).
Returns:
A tuple consisting of
- the part of `route` from the root up to and including the
instance whose schema node is the receiver, and
- the rest of `route`.
``None`` is returned if the receiver is not on the route.
"""
sroute = []
sn = self
while sn:
sroute.append(sn.iname())
sn = sn.data_parent()
i = 0
while True:
if not sroute:
break
inst = sroute.pop()
if inst != route[i].iname():
return None
while True: # skip up to next MemberName
i += 1
if i >= len(route) or isinstance(route[i], MemberName):
break
if not sroute:
return (InstanceRoute(route[:i]), InstanceRoute(route[i:]))
if i >= len(route):
return None
|
python
|
{
"resource": ""
}
|
q16073
|
ListNode._check_list_props
|
train
|
def _check_list_props(self, inst: "InstanceNode") -> None:
"""Check uniqueness of keys and "unique" properties, if applicable."""
if self.keys:
self._check_keys(inst)
for u in self.unique:
self._check_unique(u, inst)
|
python
|
{
"resource": ""
}
|
q16074
|
ListNode.orphan_entry
|
train
|
def orphan_entry(self, rval: RawObject) -> "ArrayEntry":
"""Return an isolated entry of the receiver.
Args:
rval: Raw object to be used for the returned entry.
"""
val = self.entry_from_raw(rval)
return ArrayEntry(0, EmptyList(), EmptyList(), val, None, self,
val.timestamp)
|
python
|
{
"resource": ""
}
|
q16075
|
ChoiceNode._active_case
|
train
|
def _active_case(self, value: ObjectValue) -> Optional["CaseNode"]:
"""Return receiver's case that's active in an instance node value."""
for c in self.children:
for cc in c.data_children():
if cc.iname() in value:
return c
|
python
|
{
"resource": ""
}
|
q16076
|
LeafListNode.default
|
train
|
def default(self) -> Optional[ScalarValue]:
"""Default value of the receiver, if any."""
if self.mandatory:
return None
if self._default is not None:
return self._default
return (None if self.type.default is None
else ArrayValue([self.type.default]))
|
python
|
{
"resource": ""
}
|
q16077
|
RpcActionNode._input_stmt
|
train
|
def _input_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:
"""Handle RPC or action input statement."""
self.get_child("input")._handle_substatements(stmt, sctx)
|
python
|
{
"resource": ""
}
|
q16078
|
RpcActionNode._output_stmt
|
train
|
def _output_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:
"""Handle RPC or action output statement."""
self.get_child("output")._handle_substatements(stmt, sctx)
|
python
|
{
"resource": ""
}
|
q16079
|
LinkedList.from_list
|
train
|
def from_list(cls, vals: List[Value] = [], reverse: bool = False) -> "LinkedList":
"""Create an instance from a standard list.
Args:
vals: Python list of instance values.
"""
res = EmptyList()
for v in (vals if reverse else vals[::-1]):
res = cls(v, res)
return res
|
python
|
{
"resource": ""
}
|
q16080
|
InstanceNode.path
|
train
|
def path(self) -> Tuple[InstanceKey]:
"""Return the list of keys on the path from root to the receiver."""
res = []
inst: InstanceNode = self
while inst.parinst:
res.insert(0, inst._key)
inst = inst.parinst
return tuple(res)
|
python
|
{
"resource": ""
}
|
q16081
|
InstanceNode.put_member
|
train
|
def put_member(self, name: InstanceName, value: Value,
raw: bool = False) -> "InstanceNode":
"""Return receiver's member with a new value.
If the member is permitted by the schema but doesn't exist, it
is created.
Args:
name: Instance name of the member.
value: New value of the member.
raw: Flag to be set if `value` is raw.
Raises:
NonexistentSchemaNode: If member `name` is not permitted by the
schema.
InstanceValueError: If the receiver's value is not an object.
"""
if not isinstance(self.value, ObjectValue):
raise InstanceValueError(self.json_pointer(), "member of non-object")
csn = self._member_schema_node(name)
newval = self.value.copy()
newval[name] = csn.from_raw(value, self.json_pointer()) if raw else value
return self._copy(newval)._member(name)
|
python
|
{
"resource": ""
}
|
q16082
|
InstanceNode.up
|
train
|
def up(self) -> "InstanceNode":
"""Return an instance node corresponding to the receiver's parent.
Raises:
NonexistentInstance: If there is no parent.
"""
ts = max(self.timestamp, self.parinst.timestamp)
return self.parinst._copy(self._zip(), ts)
|
python
|
{
"resource": ""
}
|
q16083
|
InstanceNode.top
|
train
|
def top(self) -> "InstanceNode":
"""Return an instance node corresponding to the root of the data tree."""
inst = self
while inst.parinst:
inst = inst.up()
return inst
|
python
|
{
"resource": ""
}
|
q16084
|
InstanceNode.goto
|
train
|
def goto(self, iroute: "InstanceRoute") -> "InstanceNode":
"""Move the focus to an instance inside the receiver's value.
Args:
iroute: Instance route (relative to the receiver).
Returns:
The instance node corresponding to the target instance.
Raises:
InstanceValueError: If `iroute` is incompatible with the receiver's
value.
NonexistentInstance: If the instance node doesn't exist.
NonDataNode: If an instance route addresses a non-data node
(rpc/action/notification).
"""
inst = self
for sel in iroute:
inst = sel.goto_step(inst)
return inst
|
python
|
{
"resource": ""
}
|
q16085
|
InstanceNode.peek
|
train
|
def peek(self, iroute: "InstanceRoute") -> Optional[Value]:
"""Return a value within the receiver's subtree.
Args:
iroute: Instance route (relative to the receiver).
"""
val = self.value
sn = self.schema_node
for sel in iroute:
val, sn = sel.peek_step(val, sn)
if val is None:
return None
return val
|
python
|
{
"resource": ""
}
|
q16086
|
InstanceNode.validate
|
train
|
def validate(self, scope: ValidationScope = ValidationScope.all,
ctype: ContentType = ContentType.config) -> None:
"""Validate the receiver's value.
Args:
scope: Scope of the validation (syntax, semantics or all).
ctype: Receiver's content type.
Raises:
SchemaError: If the value doesn't conform to the schema.
SemanticError: If the value violates a semantic constraint.
YangTypeError: If the value is a scalar of incorrect type.
"""
self.schema_node._validate(self, scope, ctype)
|
python
|
{
"resource": ""
}
|
q16087
|
InstanceNode.add_defaults
|
train
|
def add_defaults(self, ctype: ContentType = None) -> "InstanceNode":
"""Return the receiver with defaults added recursively to its value.
Args:
ctype: Content type of the defaults to be added. If it is
``None``, the content type will be the same as receiver's.
"""
val = self.value
if not (isinstance(val, StructuredValue) and self.is_internal()):
return self
res = self
if isinstance(val, ObjectValue):
if val:
for mn in self._member_names():
m = res._member(mn) if res is self else res.sibling(mn)
res = m.add_defaults(ctype)
res = res.up()
return self.schema_node._add_defaults(res, ctype)
if not val:
return res
en = res[0]
while True:
res = en.add_defaults(ctype)
try:
en = res.next()
except NonexistentInstance:
break
return res.up()
|
python
|
{
"resource": ""
}
|
q16088
|
InstanceNode._node_set
|
train
|
def _node_set(self) -> List["InstanceNode"]:
"""XPath - return the list of all receiver's nodes."""
return list(self) if isinstance(self.value, ArrayValue) else [self]
|
python
|
{
"resource": ""
}
|
q16089
|
InstanceNode._children
|
train
|
def _children(self, qname:
Union[QualName, bool] = None) -> List["InstanceNode"]:
"""XPath - return the list of receiver's children."""
sn = self.schema_node
if not isinstance(sn, InternalNode):
return []
if qname:
cn = sn.get_data_child(*qname)
if cn is None:
return []
iname = cn.iname()
if iname in self.value:
return self._member(iname)._node_set()
wd = cn._default_instance(self, ContentType.all, lazy=True)
if iname not in wd.value:
return []
while True:
cn = cn.parent
if cn is sn:
return wd._member(iname)._node_set()
if (cn.when and not cn.when.evaluate(self) or
isinstance(cn, CaseNode) and
cn.qual_name != cn.parent.default_case):
return []
res = []
wd = sn._add_defaults(self, ContentType.all, lazy=True)
for mn in wd.value:
res.extend(wd._member(mn)._node_set())
return res
|
python
|
{
"resource": ""
}
|
q16090
|
InstanceNode._descendants
|
train
|
def _descendants(self, qname: Union[QualName, bool] = None,
with_self: bool = False) -> List["InstanceNode"]:
"""XPath - return the list of receiver's descendants."""
res = ([] if not with_self or (qname and self.qual_name != qname)
else [self])
for c in self._children():
if not qname or c.qual_name == qname:
res.append(c)
res += c._descendants(qname)
return res
|
python
|
{
"resource": ""
}
|
q16091
|
ObjectMember.qual_name
|
train
|
def qual_name(self) -> QualName:
"""Return the receiver's qualified name."""
p, s, loc = self._key.partition(":")
return (loc, p) if s else (p, self.namespace)
|
python
|
{
"resource": ""
}
|
q16092
|
ObjectMember.sibling
|
train
|
def sibling(self, name: InstanceName) -> "ObjectMember":
"""Return an instance node corresponding to a sibling member.
Args:
name: Instance name of the sibling member.
Raises:
NonexistentSchemaNode: If member `name` is not permitted by the
schema.
NonexistentInstance: If sibling member `name` doesn't exist.
"""
ssn = self.parinst._member_schema_node(name)
try:
sibs = self.siblings.copy()
newval = sibs.pop(name)
sibs[self.name] = self.value
return ObjectMember(name, sibs, newval, self.parinst,
ssn, self.timestamp)
except KeyError:
raise NonexistentInstance(self.json_pointer(),
f"member '{name}'") from None
|
python
|
{
"resource": ""
}
|
q16093
|
ObjectMember.look_up
|
train
|
def look_up(self, **keys: Dict[InstanceName, ScalarValue]) -> "ArrayEntry":
"""Return the entry with matching keys.
Args:
keys: Keys and values specified as keyword arguments.
Raises:
InstanceValueError: If the receiver's value is not a YANG list.
NonexistentInstance: If no entry with matching keys exists.
"""
if not isinstance(self.schema_node, ListNode):
raise InstanceValueError(self.json_pointer(), "lookup on non-list")
try:
for i in range(len(self.value)):
en = self.value[i]
flag = True
for k in keys:
if en[k] != keys[k]:
flag = False
break
if flag:
return self._entry(i)
raise NonexistentInstance(self.json_pointer(), "entry lookup failed")
except KeyError:
raise NonexistentInstance(self.json_pointer(), "entry lookup failed") from None
except TypeError:
raise InstanceValueError(self.json_pointer(), "lookup on non-list") from None
|
python
|
{
"resource": ""
}
|
q16094
|
ObjectMember._zip
|
train
|
def _zip(self) -> ObjectValue:
"""Zip the receiver into an object and return it."""
res = ObjectValue(self.siblings.copy(), self.timestamp)
res[self.name] = self.value
return res
|
python
|
{
"resource": ""
}
|
q16095
|
ArrayEntry.previous
|
train
|
def previous(self) -> "ArrayEntry":
"""Return an instance node corresponding to the previous entry.
Raises:
NonexistentInstance: If the receiver is the first entry of the
parent array.
"""
try:
newval, nbef = self.before.pop()
except IndexError:
raise NonexistentInstance(self.json_pointer(), "previous of first") from None
return ArrayEntry(
self.index - 1, nbef, self.after.cons(self.value), newval,
self.parinst, self.schema_node, self.timestamp)
|
python
|
{
"resource": ""
}
|
q16096
|
ArrayEntry.next
|
train
|
def next(self) -> "ArrayEntry":
"""Return an instance node corresponding to the next entry.
Raises:
NonexistentInstance: If the receiver is the last entry of the parent array.
"""
try:
newval, naft = self.after.pop()
except IndexError:
raise NonexistentInstance(self.json_pointer(), "next of last") from None
return ArrayEntry(
self.index + 1, self.before.cons(self.value), naft, newval,
self.parinst, self.schema_node, self.timestamp)
|
python
|
{
"resource": ""
}
|
q16097
|
ArrayEntry.insert_before
|
train
|
def insert_before(self, value: Union[RawValue, Value],
raw: bool = False) -> "ArrayEntry":
"""Insert a new entry before the receiver.
Args:
value: The value of the new entry.
raw: Flag to be set if `value` is raw.
Returns:
An instance node of the new inserted entry.
"""
return ArrayEntry(self.index, self.before, self.after.cons(self.value),
self._cook_value(value, raw), self.parinst,
self.schema_node, datetime.now())
|
python
|
{
"resource": ""
}
|
q16098
|
ArrayEntry._zip
|
train
|
def _zip(self) -> ArrayValue:
"""Zip the receiver into an array and return it."""
res = list(self.before)
res.reverse()
res.append(self.value)
res.extend(list(self.after))
return ArrayValue(res, self.timestamp)
|
python
|
{
"resource": ""
}
|
q16099
|
ArrayEntry._ancestors_or_self
|
train
|
def _ancestors_or_self(
self, qname: Union[QualName, bool] = None) -> List[InstanceNode]:
"""XPath - return the list of receiver's ancestors including itself."""
res = [] if qname and self.qual_name != qname else [self]
return res + self.up()._ancestors(qname)
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.