_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q22700
|
load_builtin_plugins
|
train
|
def load_builtin_plugins() -> int:
"""
Load built-in plugins distributed along with "nonebot" package.
"""
plugin_dir = os.path.join(os.path.dirname(__file__), 'plugins')
return load_plugins(plugin_dir, 'nonebot.plugins')
|
python
|
{
"resource": ""
}
|
q22701
|
_extract_image_urls
|
train
|
def _extract_image_urls(arg: Message_T) -> List[str]:
"""Extract all image urls from a message-like object."""
arg_as_msg = Message(arg)
return [s.data['url'] for s in arg_as_msg
if s.type == 'image' and 'url' in s.data]
|
python
|
{
"resource": ""
}
|
q22702
|
on_natural_language
|
train
|
def on_natural_language(keywords: Union[Optional[Iterable], Callable] = None,
*, permission: int = perm.EVERYBODY,
only_to_me: bool = True,
only_short_message: bool = True,
allow_empty_message: bool = False) -> Callable:
"""
Decorator to register a function as a natural language processor.
:param keywords: keywords to respond to, if None, respond to all messages
:param permission: permission required by the processor
:param only_to_me: only handle messages to me
:param only_short_message: only handle short messages
:param allow_empty_message: handle empty messages
"""
def deco(func: Callable) -> Callable:
nl_processor = NLProcessor(func=func, keywords=keywords,
permission=permission,
only_to_me=only_to_me,
only_short_message=only_short_message,
allow_empty_message=allow_empty_message)
_nl_processors.add(nl_processor)
return func
if isinstance(keywords, Callable):
# here "keywords" is the function to be decorated
return on_natural_language()(keywords)
else:
return deco
|
python
|
{
"resource": ""
}
|
q22703
|
handle_natural_language
|
train
|
async def handle_natural_language(bot: NoneBot, ctx: Context_T) -> bool:
"""
Handle a message as natural language.
This function is typically called by "handle_message".
:param bot: NoneBot instance
:param ctx: message context
:return: the message is handled as natural language
"""
session = NLPSession(bot, ctx, str(ctx['message']))
# use msg_text here because CQ code "share" may be very long,
# at the same time some plugins may want to handle it
msg_text_length = len(session.msg_text)
futures = []
for p in _nl_processors:
if not p.allow_empty_message and not session.msg:
# don't allow empty msg, but it is one, so skip to next
continue
if p.only_short_message and \
msg_text_length > bot.config.SHORT_MESSAGE_MAX_LENGTH:
continue
if p.only_to_me and not ctx['to_me']:
continue
should_run = await perm.check_permission(bot, ctx, p.permission)
if should_run and p.keywords:
for kw in p.keywords:
if kw in session.msg_text:
break
else:
# no keyword matches
should_run = False
if should_run:
futures.append(asyncio.ensure_future(p.func(session)))
if futures:
# wait for intent commands, and sort them by confidence
intent_commands = []
for fut in futures:
try:
res = await fut
if isinstance(res, NLPResult):
intent_commands.append(res.to_intent_command())
elif isinstance(res, IntentCommand):
intent_commands.append(res)
except Exception as e:
logger.error('An exception occurred while running '
'some natural language processor:')
logger.exception(e)
intent_commands.sort(key=lambda ic: ic.confidence, reverse=True)
logger.debug(f'Intent commands: {intent_commands}')
if intent_commands and intent_commands[0].confidence >= 60.0:
# choose the intent command with highest confidence
chosen_cmd = intent_commands[0]
logger.debug(
f'Intent command with highest confidence: {chosen_cmd}')
return await call_command(
bot, ctx, chosen_cmd.name,
args=chosen_cmd.args,
current_arg=chosen_cmd.current_arg,
check_perm=False
)
else:
logger.debug('No intent command has enough confidence')
return False
|
python
|
{
"resource": ""
}
|
q22704
|
check_permission
|
train
|
async def check_permission(bot: NoneBot, ctx: Context_T,
permission_required: int) -> bool:
"""
Check if the context has the permission required.
:param bot: NoneBot instance
:param ctx: message context
:param permission_required: permission required
:return: the context has the permission
"""
min_ctx_kwargs = {}
for field in _min_context_fields:
if field in ctx:
min_ctx_kwargs[field] = ctx[field]
else:
min_ctx_kwargs[field] = None
min_ctx = _MinContext(**min_ctx_kwargs)
return await _check(bot, min_ctx, permission_required)
|
python
|
{
"resource": ""
}
|
q22705
|
match_regex
|
train
|
def match_regex(pattern: str, message=None, *, flags=0,
fullmatch: bool = False) -> Filter_T:
"""
Validate any string object to ensure it matches a given pattern.
"""
pattern = re.compile(pattern, flags)
def validate(value):
if fullmatch:
if not re.fullmatch(pattern, value):
_raise_failure(message)
else:
if not re.match(pattern, value):
_raise_failure(message)
return value
return validate
|
python
|
{
"resource": ""
}
|
q22706
|
ensure_true
|
train
|
def ensure_true(bool_func: Callable[[Any], bool],
message=None) -> Filter_T:
"""
Validate any object to ensure the result of applying
a boolean function to it is True.
"""
def validate(value):
if bool_func(value) is not True:
_raise_failure(message)
return value
return validate
|
python
|
{
"resource": ""
}
|
q22707
|
between_inclusive
|
train
|
def between_inclusive(start=None, end=None, message=None) -> Filter_T:
"""
Validate any comparable object to ensure it's between
`start` and `end` inclusively.
"""
def validate(value):
if start is not None and value < start:
_raise_failure(message)
if end is not None and end < value:
_raise_failure(message)
return value
return validate
|
python
|
{
"resource": ""
}
|
q22708
|
check_docstring_first
|
train
|
def check_docstring_first(src, filename='<unknown>'):
# type: (bytes, str) -> int
"""Returns nonzero if the source has what looks like a docstring that is
not at the beginning of the source.
A string will be considered a docstring if it is a STRING token with a
col offset of 0.
"""
found_docstring_line = None
found_code_line = None
tok_gen = tokenize_tokenize(io.BytesIO(src).readline)
for tok_type, _, (sline, scol), _, _ in tok_gen:
# Looks like a docstring!
if tok_type == tokenize.STRING and scol == 0:
if found_docstring_line is not None:
print(
'{}:{} Multiple module docstrings '
'(first docstring on line {}).'.format(
filename, sline, found_docstring_line,
),
)
return 1
elif found_code_line is not None:
print(
'{}:{} Module docstring appears after code '
'(code seen on line {}).'.format(
filename, sline, found_code_line,
),
)
return 1
else:
found_docstring_line = sline
elif tok_type not in NON_CODE_TOKENS and found_code_line is None:
found_code_line = sline
return 0
|
python
|
{
"resource": ""
}
|
q22709
|
get_aws_secrets_from_env
|
train
|
def get_aws_secrets_from_env(): # type: () -> Set[str]
"""Extract AWS secrets from environment variables."""
keys = set()
for env_var in (
'AWS_SECRET_ACCESS_KEY', 'AWS_SECURITY_TOKEN', 'AWS_SESSION_TOKEN',
):
if env_var in os.environ:
keys.add(os.environ[env_var])
return keys
|
python
|
{
"resource": ""
}
|
q22710
|
get_aws_secrets_from_file
|
train
|
def get_aws_secrets_from_file(credentials_file): # type: (str) -> Set[str]
"""Extract AWS secrets from configuration files.
Read an ini-style configuration file and return a set with all found AWS
secret access keys.
"""
aws_credentials_file_path = os.path.expanduser(credentials_file)
if not os.path.exists(aws_credentials_file_path):
return set()
parser = configparser.ConfigParser()
try:
parser.read(aws_credentials_file_path)
except configparser.MissingSectionHeaderError:
return set()
keys = set()
for section in parser.sections():
for var in (
'aws_secret_access_key', 'aws_security_token',
'aws_session_token',
):
try:
key = parser.get(section, var).strip()
if key:
keys.add(key)
except configparser.NoOptionError:
pass
return keys
|
python
|
{
"resource": ""
}
|
q22711
|
check_file_for_aws_keys
|
train
|
def check_file_for_aws_keys(filenames, keys):
# type: (Sequence[str], Set[str]) -> List[Dict[str, str]]
"""Check if files contain AWS secrets.
Return a list of all files containing AWS secrets and keys found, with all
but the first four characters obfuscated to ease debugging.
"""
bad_files = []
for filename in filenames:
with open(filename, 'r') as content:
text_body = content.read()
for key in keys:
# naively match the entire file, low chance of incorrect
# collision
if key in text_body:
bad_files.append({
'filename': filename, 'key': key[:4] + '*' * 28,
})
return bad_files
|
python
|
{
"resource": ""
}
|
q22712
|
sort
|
train
|
def sort(lines): # type: (List[str]) -> List[str]
"""Sort a YAML file in alphabetical order, keeping blocks together.
:param lines: array of strings (without newlines)
:return: sorted array of strings
"""
# make a copy of lines since we will clobber it
lines = list(lines)
new_lines = parse_block(lines, header=True)
for block in sorted(parse_blocks(lines), key=first_key):
if new_lines:
new_lines.append('')
new_lines.extend(block)
return new_lines
|
python
|
{
"resource": ""
}
|
q22713
|
parse_block
|
train
|
def parse_block(lines, header=False): # type: (List[str], bool) -> List[str]
"""Parse and return a single block, popping off the start of `lines`.
If parsing a header block, we stop after we reach a line that is not a
comment. Otherwise, we stop after reaching an empty line.
:param lines: list of lines
:param header: whether we are parsing a header block
:return: list of lines that form the single block
"""
block_lines = []
while lines and lines[0] and (not header or lines[0].startswith('#')):
block_lines.append(lines.pop(0))
return block_lines
|
python
|
{
"resource": ""
}
|
q22714
|
parse_blocks
|
train
|
def parse_blocks(lines): # type: (List[str]) -> List[List[str]]
"""Parse and return all possible blocks, popping off the start of `lines`.
:param lines: list of lines
:return: list of blocks, where each block is a list of lines
"""
blocks = []
while lines:
if lines[0] == '':
lines.pop(0)
else:
blocks.append(parse_block(lines))
return blocks
|
python
|
{
"resource": ""
}
|
q22715
|
first_key
|
train
|
def first_key(lines): # type: (List[str]) -> str
"""Returns a string representing the sort key of a block.
The sort key is the first YAML key we encounter, ignoring comments, and
stripping leading quotes.
>>> print(test)
# some comment
'foo': true
>>> first_key(test)
'foo'
"""
for line in lines:
if line.startswith('#'):
continue
if any(line.startswith(quote) for quote in QUOTES):
return line[1:]
return line
else:
return ''
|
python
|
{
"resource": ""
}
|
q22716
|
create_sentinel
|
train
|
async def create_sentinel(sentinels, *, db=None, password=None,
encoding=None, minsize=1, maxsize=10,
ssl=None, timeout=0.2, loop=None):
"""Creates Redis Sentinel client.
`sentinels` is a list of sentinel nodes.
"""
if loop is None:
loop = asyncio.get_event_loop()
pool = await create_sentinel_pool(sentinels,
db=db,
password=password,
encoding=encoding,
minsize=minsize,
maxsize=maxsize,
ssl=ssl,
timeout=timeout,
loop=loop)
return RedisSentinel(pool)
|
python
|
{
"resource": ""
}
|
q22717
|
RedisSentinel.execute
|
train
|
def execute(self, command, *args, **kwargs):
"""Execute Sentinel command.
It will be prefixed with SENTINEL automatically.
"""
return self._pool.execute(
b'SENTINEL', command, *args, **kwargs)
|
python
|
{
"resource": ""
}
|
q22718
|
RedisSentinel.master
|
train
|
def master(self, name):
"""Returns a dictionary containing the specified masters state."""
fut = self.execute(b'MASTER', name, encoding='utf-8')
return wait_convert(fut, parse_sentinel_master)
|
python
|
{
"resource": ""
}
|
q22719
|
RedisSentinel.masters
|
train
|
def masters(self):
"""Returns a list of dictionaries containing each master's state."""
fut = self.execute(b'MASTERS', encoding='utf-8')
# TODO: process masters: we can adjust internal state
return wait_convert(fut, parse_sentinel_masters)
|
python
|
{
"resource": ""
}
|
q22720
|
RedisSentinel.slaves
|
train
|
def slaves(self, name):
"""Returns a list of slaves for ``name``."""
fut = self.execute(b'SLAVES', name, encoding='utf-8')
return wait_convert(fut, parse_sentinel_slaves_and_sentinels)
|
python
|
{
"resource": ""
}
|
q22721
|
RedisSentinel.sentinels
|
train
|
def sentinels(self, name):
"""Returns a list of sentinels for ``name``."""
fut = self.execute(b'SENTINELS', name, encoding='utf-8')
return wait_convert(fut, parse_sentinel_slaves_and_sentinels)
|
python
|
{
"resource": ""
}
|
q22722
|
RedisSentinel.monitor
|
train
|
def monitor(self, name, ip, port, quorum):
"""Add a new master to Sentinel to be monitored."""
fut = self.execute(b'MONITOR', name, ip, port, quorum)
return wait_ok(fut)
|
python
|
{
"resource": ""
}
|
q22723
|
RedisSentinel.remove
|
train
|
def remove(self, name):
"""Remove a master from Sentinel's monitoring."""
fut = self.execute(b'REMOVE', name)
return wait_ok(fut)
|
python
|
{
"resource": ""
}
|
q22724
|
RedisSentinel.set
|
train
|
def set(self, name, option, value):
"""Set Sentinel monitoring parameters for a given master."""
fut = self.execute(b"SET", name, option, value)
return wait_ok(fut)
|
python
|
{
"resource": ""
}
|
q22725
|
RedisSentinel.failover
|
train
|
def failover(self, name):
"""Force a failover of a named master."""
fut = self.execute(b'FAILOVER', name)
return wait_ok(fut)
|
python
|
{
"resource": ""
}
|
q22726
|
ListCommandsMixin.blpop
|
train
|
def blpop(self, key, *keys, timeout=0, encoding=_NOTSET):
"""Remove and get the first element in a list, or block until
one is available.
:raises TypeError: if timeout is not int
:raises ValueError: if timeout is less than 0
"""
if not isinstance(timeout, int):
raise TypeError("timeout argument must be int")
if timeout < 0:
raise ValueError("timeout must be greater equal 0")
args = keys + (timeout,)
return self.execute(b'BLPOP', key, *args, encoding=encoding)
|
python
|
{
"resource": ""
}
|
q22727
|
ListCommandsMixin.lindex
|
train
|
def lindex(self, key, index, *, encoding=_NOTSET):
"""Get an element from a list by its index.
:raises TypeError: if index is not int
"""
if not isinstance(index, int):
raise TypeError("index argument must be int")
return self.execute(b'LINDEX', key, index, encoding=encoding)
|
python
|
{
"resource": ""
}
|
q22728
|
ListCommandsMixin.linsert
|
train
|
def linsert(self, key, pivot, value, before=False):
"""Inserts value in the list stored at key either before or
after the reference value pivot.
"""
where = b'AFTER' if not before else b'BEFORE'
return self.execute(b'LINSERT', key, where, pivot, value)
|
python
|
{
"resource": ""
}
|
q22729
|
ListCommandsMixin.lpop
|
train
|
def lpop(self, key, *, encoding=_NOTSET):
"""Removes and returns the first element of the list stored at key."""
return self.execute(b'LPOP', key, encoding=encoding)
|
python
|
{
"resource": ""
}
|
q22730
|
ListCommandsMixin.lrem
|
train
|
def lrem(self, key, count, value):
"""Removes the first count occurrences of elements equal to value
from the list stored at key.
:raises TypeError: if count is not int
"""
if not isinstance(count, int):
raise TypeError("count argument must be int")
return self.execute(b'LREM', key, count, value)
|
python
|
{
"resource": ""
}
|
q22731
|
ListCommandsMixin.lset
|
train
|
def lset(self, key, index, value):
"""Sets the list element at index to value.
:raises TypeError: if index is not int
"""
if not isinstance(index, int):
raise TypeError("index argument must be int")
return self.execute(b'LSET', key, index, value)
|
python
|
{
"resource": ""
}
|
q22732
|
ListCommandsMixin.ltrim
|
train
|
def ltrim(self, key, start, stop):
"""Trim an existing list so that it will contain only the specified
range of elements specified.
:raises TypeError: if start or stop is not int
"""
if not isinstance(start, int):
raise TypeError("start argument must be int")
if not isinstance(stop, int):
raise TypeError("stop argument must be int")
fut = self.execute(b'LTRIM', key, start, stop)
return wait_ok(fut)
|
python
|
{
"resource": ""
}
|
q22733
|
ListCommandsMixin.rpop
|
train
|
def rpop(self, key, *, encoding=_NOTSET):
"""Removes and returns the last element of the list stored at key."""
return self.execute(b'RPOP', key, encoding=encoding)
|
python
|
{
"resource": ""
}
|
q22734
|
Pipeline.execute
|
train
|
async def execute(self, *, return_exceptions=False):
"""Execute all buffered commands.
Any exception that is raised by any command is caught and
raised later when processing results.
Exceptions can also be returned in result if
`return_exceptions` flag is set to True.
"""
assert not self._done, "Pipeline already executed. Create new one."
self._done = True
if self._pipeline:
if isinstance(self._pool_or_conn, AbcPool):
async with self._pool_or_conn.get() as conn:
return await self._do_execute(
conn, return_exceptions=return_exceptions)
else:
return await self._do_execute(
self._pool_or_conn,
return_exceptions=return_exceptions)
else:
return await self._gather_result(return_exceptions)
|
python
|
{
"resource": ""
}
|
q22735
|
StreamReader.readobj
|
train
|
async def readobj(self):
"""
Return a parsed Redis object or an exception
when something wrong happened.
"""
assert self._parser is not None, "set_parser must be called"
while True:
obj = self._parser.gets()
if obj is not False:
# TODO: implement resume the read
# Return any valid object and the Nil->None
# case. When its False there is nothing there
# to be parsed and we have to wait for more data.
return obj
if self._exception:
raise self._exception
if self._eof:
break
await self._wait_for_data('readobj')
|
python
|
{
"resource": ""
}
|
q22736
|
ServerCommandsMixin.client_list
|
train
|
def client_list(self):
"""Get the list of client connections.
Returns list of ClientInfo named tuples.
"""
fut = self.execute(b'CLIENT', b'LIST', encoding='utf-8')
return wait_convert(fut, to_tuples)
|
python
|
{
"resource": ""
}
|
q22737
|
ServerCommandsMixin.client_getname
|
train
|
def client_getname(self, encoding=_NOTSET):
"""Get the current connection name."""
return self.execute(b'CLIENT', b'GETNAME', encoding=encoding)
|
python
|
{
"resource": ""
}
|
q22738
|
ServerCommandsMixin.client_setname
|
train
|
def client_setname(self, name):
"""Set the current connection name."""
fut = self.execute(b'CLIENT', b'SETNAME', name)
return wait_ok(fut)
|
python
|
{
"resource": ""
}
|
q22739
|
ServerCommandsMixin.command_getkeys
|
train
|
def command_getkeys(self, command, *args, encoding='utf-8'):
"""Extract keys given a full Redis command."""
return self.execute(b'COMMAND', b'GETKEYS', command, *args,
encoding=encoding)
|
python
|
{
"resource": ""
}
|
q22740
|
ServerCommandsMixin.command_info
|
train
|
def command_info(self, command, *commands):
"""Get array of specific Redis command details."""
return self.execute(b'COMMAND', b'INFO', command, *commands,
encoding='utf-8')
|
python
|
{
"resource": ""
}
|
q22741
|
ServerCommandsMixin.config_set
|
train
|
def config_set(self, parameter, value):
"""Set a configuration parameter to the given value."""
if not isinstance(parameter, str):
raise TypeError("parameter must be str")
fut = self.execute(b'CONFIG', b'SET', parameter, value)
return wait_ok(fut)
|
python
|
{
"resource": ""
}
|
q22742
|
ServerCommandsMixin.debug_sleep
|
train
|
def debug_sleep(self, timeout):
"""Suspend connection for timeout seconds."""
fut = self.execute(b'DEBUG', b'SLEEP', timeout)
return wait_ok(fut)
|
python
|
{
"resource": ""
}
|
q22743
|
ServerCommandsMixin.flushall
|
train
|
def flushall(self, async_op=False):
"""
Remove all keys from all databases.
:param async_op: lets the entire dataset to be freed asynchronously. \
Defaults to False
"""
if async_op:
fut = self.execute(b'FLUSHALL', b'ASYNC')
else:
fut = self.execute(b'FLUSHALL')
return wait_ok(fut)
|
python
|
{
"resource": ""
}
|
q22744
|
ServerCommandsMixin.flushdb
|
train
|
def flushdb(self, async_op=False):
"""
Remove all keys from the current database.
:param async_op: lets a single database to be freed asynchronously. \
Defaults to False
"""
if async_op:
fut = self.execute(b'FLUSHDB', b'ASYNC')
else:
fut = self.execute(b'FLUSHDB')
return wait_ok(fut)
|
python
|
{
"resource": ""
}
|
q22745
|
ServerCommandsMixin.info
|
train
|
def info(self, section='default'):
"""Get information and statistics about the server.
If called without argument will return default set of sections.
For available sections, see http://redis.io/commands/INFO
:raises ValueError: if section is invalid
"""
if not section:
raise ValueError("invalid section")
fut = self.execute(b'INFO', section, encoding='utf-8')
return wait_convert(fut, parse_info)
|
python
|
{
"resource": ""
}
|
q22746
|
ServerCommandsMixin.role
|
train
|
def role(self):
"""Return the role of the server instance.
Returns named tuples describing role of the instance.
For fields information see http://redis.io/commands/role#output-format
"""
fut = self.execute(b'ROLE', encoding='utf-8')
return wait_convert(fut, parse_role)
|
python
|
{
"resource": ""
}
|
q22747
|
ServerCommandsMixin.shutdown
|
train
|
def shutdown(self, save=None):
"""Synchronously save the dataset to disk and then
shut down the server.
"""
if save is self.SHUTDOWN_SAVE:
return self.execute(b'SHUTDOWN', b'SAVE')
elif save is self.SHUTDOWN_NOSAVE:
return self.execute(b'SHUTDOWN', b'NOSAVE')
else:
return self.execute(b'SHUTDOWN')
|
python
|
{
"resource": ""
}
|
q22748
|
ServerCommandsMixin.slaveof
|
train
|
def slaveof(self, host, port=None):
"""Make the server a slave of another instance,
or promote it as master.
Calling ``slaveof(None)`` will send ``SLAVEOF NO ONE``.
.. versionchanged:: v0.2.6
``slaveof()`` form deprecated
in favour of explicit ``slaveof(None)``.
"""
if host is None and port is None:
return self.execute(b'SLAVEOF', b'NO', b'ONE')
return self.execute(b'SLAVEOF', host, port)
|
python
|
{
"resource": ""
}
|
q22749
|
ServerCommandsMixin.slowlog_get
|
train
|
def slowlog_get(self, length=None):
"""Returns the Redis slow queries log."""
if length is not None:
if not isinstance(length, int):
raise TypeError("length must be int or None")
return self.execute(b'SLOWLOG', b'GET', length)
else:
return self.execute(b'SLOWLOG', b'GET')
|
python
|
{
"resource": ""
}
|
q22750
|
PubSubCommandsMixin.publish_json
|
train
|
def publish_json(self, channel, obj):
"""Post a JSON-encoded message to channel."""
return self.publish(channel, json.dumps(obj))
|
python
|
{
"resource": ""
}
|
q22751
|
PubSubCommandsMixin.unsubscribe
|
train
|
def unsubscribe(self, channel, *channels):
"""Unsubscribe from specific channels.
Arguments can be instances of :class:`~aioredis.Channel`.
"""
conn = self._pool_or_conn
return conn.execute_pubsub(b'UNSUBSCRIBE', channel, *channels)
|
python
|
{
"resource": ""
}
|
q22752
|
PubSubCommandsMixin.punsubscribe
|
train
|
def punsubscribe(self, pattern, *patterns):
"""Unsubscribe from specific patterns.
Arguments can be instances of :class:`~aioredis.Channel`.
"""
conn = self._pool_or_conn
return conn.execute_pubsub(b'PUNSUBSCRIBE', pattern, *patterns)
|
python
|
{
"resource": ""
}
|
q22753
|
PubSubCommandsMixin.pubsub_channels
|
train
|
def pubsub_channels(self, pattern=None):
"""Lists the currently active channels."""
args = [b'PUBSUB', b'CHANNELS']
if pattern is not None:
args.append(pattern)
return self.execute(*args)
|
python
|
{
"resource": ""
}
|
q22754
|
Channel.get
|
train
|
async def get(self, *, encoding=None, decoder=None):
"""Coroutine that waits for and returns a message.
:raises aioredis.ChannelClosedError: If channel is unsubscribed
and has no messages.
"""
assert decoder is None or callable(decoder), decoder
if self._queue.exhausted:
raise ChannelClosedError()
msg = await self._queue.get()
if msg is EndOfStream:
# TODO: maybe we need an explicit marker for "end of stream"
# currently, returning None may overlap with
# possible return value from `decoder`
# so the user would have to check `ch.is_active`
# to determine if its EoS or payload
return
if self._is_pattern:
dest_channel, msg = msg
if encoding is not None:
msg = msg.decode(encoding)
if decoder is not None:
msg = decoder(msg)
if self._is_pattern:
return dest_channel, msg
return msg
|
python
|
{
"resource": ""
}
|
q22755
|
Channel.get_json
|
train
|
async def get_json(self, encoding='utf-8'):
"""Shortcut to get JSON messages."""
return (await self.get(encoding=encoding, decoder=json.loads))
|
python
|
{
"resource": ""
}
|
q22756
|
Channel.iter
|
train
|
def iter(self, *, encoding=None, decoder=None):
"""Same as get method but its native coroutine.
Usage example:
>>> async for msg in ch.iter():
... print(msg)
"""
return _IterHelper(self,
is_active=lambda ch: ch.is_active,
encoding=encoding,
decoder=decoder)
|
python
|
{
"resource": ""
}
|
q22757
|
Receiver.pattern
|
train
|
def pattern(self, pattern):
"""Create a pattern channel.
Returns ``_Sender`` object implementing
:class:`~aioredis.abc.AbcChannel`.
"""
enc_pattern = _converters[type(pattern)](pattern)
if (enc_pattern, True) not in self._refs:
ch = _Sender(self, enc_pattern,
is_pattern=True)
self._refs[(enc_pattern, True)] = ch
return self._refs[(enc_pattern, True)]
|
python
|
{
"resource": ""
}
|
q22758
|
Receiver.channels
|
train
|
def channels(self):
"""Read-only channels dict."""
return types.MappingProxyType({
ch.name: ch for ch in self._refs.values()
if not ch.is_pattern})
|
python
|
{
"resource": ""
}
|
q22759
|
Receiver.wait_message
|
train
|
async def wait_message(self):
"""Blocks until new message appear."""
if not self._queue.empty():
return True
if self._queue.closed:
return False
await self._queue.wait()
return self.is_active
|
python
|
{
"resource": ""
}
|
q22760
|
Receiver.is_active
|
train
|
def is_active(self):
"""Returns True if listener has any active subscription."""
if self._queue.exhausted:
return False
return any(ch.is_active for ch in self._refs.values())
|
python
|
{
"resource": ""
}
|
q22761
|
Receiver.iter
|
train
|
def iter(self, *, encoding=None, decoder=None):
"""Returns async iterator.
Usage example:
>>> async for ch, msg in mpsc.iter():
... print(ch, msg)
"""
return _IterHelper(self,
is_active=lambda r: not r._queue.exhausted,
encoding=encoding,
decoder=decoder)
|
python
|
{
"resource": ""
}
|
q22762
|
create_pool
|
train
|
async def create_pool(address, *, db=None, password=None, ssl=None,
encoding=None, minsize=1, maxsize=10,
parser=None, loop=None, create_connection_timeout=None,
pool_cls=None, connection_cls=None):
# FIXME: rewrite docstring
"""Creates Redis Pool.
By default it creates pool of Redis instances, but it is
also possible to create pool of plain connections by passing
``lambda conn: conn`` as commands_factory.
*commands_factory* parameter is deprecated since v0.2.9
All arguments are the same as for create_connection.
Returns RedisPool instance or a pool_cls if it is given.
"""
if pool_cls:
assert issubclass(pool_cls, AbcPool),\
"pool_class does not meet the AbcPool contract"
cls = pool_cls
else:
cls = ConnectionsPool
if isinstance(address, str):
address, options = parse_url(address)
db = options.setdefault('db', db)
password = options.setdefault('password', password)
encoding = options.setdefault('encoding', encoding)
create_connection_timeout = options.setdefault(
'timeout', create_connection_timeout)
if 'ssl' in options:
assert options['ssl'] or (not options['ssl'] and not ssl), (
"Conflicting ssl options are set", options['ssl'], ssl)
ssl = ssl or options['ssl']
# TODO: minsize/maxsize
pool = cls(address, db, password, encoding,
minsize=minsize, maxsize=maxsize,
ssl=ssl, parser=parser,
create_connection_timeout=create_connection_timeout,
connection_cls=connection_cls,
loop=loop)
try:
await pool._fill_free(override_min=False)
except Exception:
pool.close()
await pool.wait_closed()
await pool.wait_closed()
raise
return pool
|
python
|
{
"resource": ""
}
|
q22763
|
ConnectionsPool.execute
|
train
|
def execute(self, command, *args, **kw):
"""Executes redis command in a free connection and returns
future waiting for result.
Picks connection from free pool and send command through
that connection.
If no connection is found, returns coroutine waiting for
free connection to execute command.
"""
conn, address = self.get_connection(command, args)
if conn is not None:
fut = conn.execute(command, *args, **kw)
return self._check_result(fut, command, args, kw)
else:
coro = self._wait_execute(address, command, args, kw)
return self._check_result(coro, command, args, kw)
|
python
|
{
"resource": ""
}
|
q22764
|
ConnectionsPool.get_connection
|
train
|
def get_connection(self, command, args=()):
"""Get free connection from pool.
Returns connection.
"""
# TODO: find a better way to determine if connection is free
# and not havily used.
command = command.upper().strip()
is_pubsub = command in _PUBSUB_COMMANDS
if is_pubsub and self._pubsub_conn:
if not self._pubsub_conn.closed:
return self._pubsub_conn, self._pubsub_conn.address
self._pubsub_conn = None
for i in range(self.freesize):
conn = self._pool[0]
self._pool.rotate(1)
if conn.closed: # or conn._waiters: (eg: busy connection)
continue
if conn.in_pubsub:
continue
if is_pubsub:
self._pubsub_conn = conn
self._pool.remove(conn)
self._used.add(conn)
return conn, conn.address
return None, self._address
|
python
|
{
"resource": ""
}
|
q22765
|
ConnectionsPool._wait_execute
|
train
|
async def _wait_execute(self, address, command, args, kw):
"""Acquire connection and execute command."""
conn = await self.acquire(command, args)
try:
return (await conn.execute(command, *args, **kw))
finally:
self.release(conn)
|
python
|
{
"resource": ""
}
|
q22766
|
ConnectionsPool.select
|
train
|
async def select(self, db):
"""Changes db index for all free connections.
All previously acquired connections will be closed when released.
"""
res = True
async with self._cond:
for i in range(self.freesize):
res = res and (await self._pool[i].select(db))
else:
self._db = db
return res
|
python
|
{
"resource": ""
}
|
q22767
|
ConnectionsPool.acquire
|
train
|
async def acquire(self, command=None, args=()):
"""Acquires a connection from free pool.
Creates new connection if needed.
"""
if self.closed:
raise PoolClosedError("Pool is closed")
async with self._cond:
if self.closed:
raise PoolClosedError("Pool is closed")
while True:
await self._fill_free(override_min=True)
if self.freesize:
conn = self._pool.popleft()
assert not conn.closed, conn
assert conn not in self._used, (conn, self._used)
self._used.add(conn)
return conn
else:
await self._cond.wait()
|
python
|
{
"resource": ""
}
|
q22768
|
ConnectionsPool.release
|
train
|
def release(self, conn):
"""Returns used connection back into pool.
When returned connection has db index that differs from one in pool
the connection will be closed and dropped.
When queue of free connections is full the connection will be dropped.
"""
assert conn in self._used, (
"Invalid connection, maybe from other pool", conn)
self._used.remove(conn)
if not conn.closed:
if conn.in_transaction:
logger.warning(
"Connection %r is in transaction, closing it.", conn)
conn.close()
elif conn.in_pubsub:
logger.warning(
"Connection %r is in subscribe mode, closing it.", conn)
conn.close()
elif conn._waiters:
logger.warning(
"Connection %r has pending commands, closing it.", conn)
conn.close()
elif conn.db == self.db:
if self.maxsize and self.freesize < self.maxsize:
self._pool.append(conn)
else:
# consider this connection as old and close it.
conn.close()
else:
conn.close()
# FIXME: check event loop is not closed
asyncio.ensure_future(self._wakeup(), loop=self._loop)
|
python
|
{
"resource": ""
}
|
q22769
|
PyReader.feed
|
train
|
def feed(self, data, o: int = 0, l: int = -1):
"""Feed data to parser."""
if l == -1:
l = len(data) - o
if o < 0 or l < 0:
raise ValueError("negative input")
if o + l > len(data):
raise ValueError("input is larger than buffer size")
self._parser.buf.extend(data[o:o+l])
|
python
|
{
"resource": ""
}
|
q22770
|
encode_command
|
train
|
def encode_command(*args, buf=None):
"""Encodes arguments into redis bulk-strings array.
Raises TypeError if any of args not of bytearray, bytes, float, int, or str
type.
"""
if buf is None:
buf = bytearray()
buf.extend(b'*%d\r\n' % len(args))
try:
for arg in args:
barg = _converters[type(arg)](arg)
buf.extend(b'$%d\r\n%s\r\n' % (len(barg), barg))
except KeyError:
raise TypeError("Argument {!r} expected to be of bytearray, bytes,"
" float, int, or str type".format(arg))
return buf
|
python
|
{
"resource": ""
}
|
q22771
|
parse_url
|
train
|
def parse_url(url):
"""Parse Redis connection URI.
Parse according to IANA specs:
* https://www.iana.org/assignments/uri-schemes/prov/redis
* https://www.iana.org/assignments/uri-schemes/prov/rediss
Also more rules applied:
* empty scheme is treated as unix socket path no further parsing is done.
* 'unix://' scheme is treated as unix socket path and parsed.
* Multiple query parameter values and blank values are considered error.
* DB number specified as path and as query parameter is considered error.
* Password specified in userinfo and as query parameter is
considered error.
"""
r = urlparse(url)
assert r.scheme in ('', 'redis', 'rediss', 'unix'), (
"Unsupported URI scheme", r.scheme)
if r.scheme == '':
return url, {}
query = {}
for p, v in parse_qsl(r.query, keep_blank_values=True):
assert p not in query, ("Multiple parameters are not allowed", p, v)
assert v, ("Empty parameters are not allowed", p, v)
query[p] = v
if r.scheme == 'unix':
assert r.path, ("Empty path is not allowed", url)
assert not r.netloc, (
"Netlocation is not allowed for unix scheme", r.netloc)
return r.path, _parse_uri_options(query, '', r.password)
address = (r.hostname or 'localhost', int(r.port or 6379))
path = r.path
if path.startswith('/'):
path = r.path[1:]
options = _parse_uri_options(query, path, r.password)
if r.scheme == 'rediss':
options['ssl'] = True
return address, options
|
python
|
{
"resource": ""
}
|
q22772
|
create_connection
|
train
|
async def create_connection(address, *, db=None, password=None, ssl=None,
encoding=None, parser=None, loop=None,
timeout=None, connection_cls=None):
"""Creates redis connection.
Opens connection to Redis server specified by address argument.
Address argument can be one of the following:
* A tuple representing (host, port) pair for TCP connections;
* A string representing either Redis URI or unix domain socket path.
SSL argument is passed through to asyncio.create_connection.
By default SSL/TLS is not used.
By default any timeout is applied at the connection stage, however
you can set a limitted time used trying to open a connection via
the `timeout` Kw.
Encoding argument can be used to decode byte-replies to strings.
By default no decoding is done.
Parser parameter can be used to pass custom Redis protocol parser class.
By default hiredis.Reader is used (unless it is missing or platform
is not CPython).
Return value is RedisConnection instance or a connection_cls if it is
given.
This function is a coroutine.
"""
assert isinstance(address, (tuple, list, str)), "tuple or str expected"
if isinstance(address, str):
logger.debug("Parsing Redis URI %r", address)
address, options = parse_url(address)
db = options.setdefault('db', db)
password = options.setdefault('password', password)
encoding = options.setdefault('encoding', encoding)
timeout = options.setdefault('timeout', timeout)
if 'ssl' in options:
assert options['ssl'] or (not options['ssl'] and not ssl), (
"Conflicting ssl options are set", options['ssl'], ssl)
ssl = ssl or options['ssl']
if timeout is not None and timeout <= 0:
raise ValueError("Timeout has to be None or a number greater than 0")
if connection_cls:
assert issubclass(connection_cls, AbcConnection),\
"connection_class does not meet the AbcConnection contract"
cls = connection_cls
else:
cls = RedisConnection
if loop is None:
loop = asyncio.get_event_loop()
if isinstance(address, (list, tuple)):
host, port = address
logger.debug("Creating tcp connection to %r", address)
reader, writer = await asyncio.wait_for(open_connection(
host, port, limit=MAX_CHUNK_SIZE, ssl=ssl, loop=loop),
timeout, loop=loop)
sock = writer.transport.get_extra_info('socket')
if sock is not None:
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
address = sock.getpeername()
address = tuple(address[:2])
else:
logger.debug("Creating unix connection to %r", address)
reader, writer = await asyncio.wait_for(open_unix_connection(
address, ssl=ssl, limit=MAX_CHUNK_SIZE, loop=loop),
timeout, loop=loop)
sock = writer.transport.get_extra_info('socket')
if sock is not None:
address = sock.getpeername()
conn = cls(reader, writer, encoding=encoding,
address=address, parser=parser,
loop=loop)
try:
if password is not None:
await conn.auth(password)
if db is not None:
await conn.select(db)
except Exception:
conn.close()
await conn.wait_closed()
raise
return conn
|
python
|
{
"resource": ""
}
|
q22773
|
RedisConnection._read_data
|
train
|
async def _read_data(self):
"""Response reader task."""
last_error = ConnectionClosedError(
"Connection has been closed by server")
while not self._reader.at_eof():
try:
obj = await self._reader.readobj()
except asyncio.CancelledError:
# NOTE: reader can get cancelled from `close()` method only.
last_error = RuntimeError('this is unexpected')
break
except ProtocolError as exc:
# ProtocolError is fatal
# so connection must be closed
if self._in_transaction is not None:
self._transaction_error = exc
last_error = exc
break
except Exception as exc:
# NOTE: for QUIT command connection error can be received
# before response
last_error = exc
break
else:
if (obj == b'' or obj is None) and self._reader.at_eof():
logger.debug("Connection has been closed by server,"
" response: %r", obj)
last_error = ConnectionClosedError("Reader at end of file")
break
if isinstance(obj, MaxClientsError):
last_error = obj
break
if self._in_pubsub:
self._process_pubsub(obj)
else:
self._process_data(obj)
self._closing = True
self._loop.call_soon(self._do_close, last_error)
|
python
|
{
"resource": ""
}
|
q22774
|
RedisConnection._process_data
|
train
|
def _process_data(self, obj):
"""Processes command results."""
assert len(self._waiters) > 0, (type(obj), obj)
waiter, encoding, cb = self._waiters.popleft()
if isinstance(obj, RedisError):
if isinstance(obj, ReplyError):
if obj.args[0].startswith('READONLY'):
obj = ReadOnlyError(obj.args[0])
_set_exception(waiter, obj)
if self._in_transaction is not None:
self._transaction_error = obj
else:
if encoding is not None:
try:
obj = decode(obj, encoding)
except Exception as exc:
_set_exception(waiter, exc)
return
if cb is not None:
try:
obj = cb(obj)
except Exception as exc:
_set_exception(waiter, exc)
return
_set_result(waiter, obj)
if self._in_transaction is not None:
self._in_transaction.append((encoding, cb))
|
python
|
{
"resource": ""
}
|
q22775
|
RedisConnection.execute
|
train
|
def execute(self, command, *args, encoding=_NOTSET):
"""Executes redis command and returns Future waiting for the answer.
Raises:
* TypeError if any of args can not be encoded as bytes.
* ReplyError on redis '-ERR' responses.
* ProtocolError when response can not be decoded meaning connection
is broken.
* ConnectionClosedError when either client or server has closed the
connection.
"""
if self._reader is None or self._reader.at_eof():
msg = self._close_msg or "Connection closed or corrupted"
raise ConnectionClosedError(msg)
if command is None:
raise TypeError("command must not be None")
if None in args:
raise TypeError("args must not contain None")
command = command.upper().strip()
is_pubsub = command in _PUBSUB_COMMANDS
is_ping = command in ('PING', b'PING')
if self._in_pubsub and not (is_pubsub or is_ping):
raise RedisError("Connection in SUBSCRIBE mode")
elif is_pubsub:
logger.warning("Deprecated. Use `execute_pubsub` method directly")
return self.execute_pubsub(command, *args)
if command in ('SELECT', b'SELECT'):
cb = partial(self._set_db, args=args)
elif command in ('MULTI', b'MULTI'):
cb = self._start_transaction
elif command in ('EXEC', b'EXEC'):
cb = partial(self._end_transaction, discard=False)
elif command in ('DISCARD', b'DISCARD'):
cb = partial(self._end_transaction, discard=True)
else:
cb = None
if encoding is _NOTSET:
encoding = self._encoding
fut = self._loop.create_future()
if self._pipeline_buffer is None:
self._writer.write(encode_command(command, *args))
else:
encode_command(command, *args, buf=self._pipeline_buffer)
self._waiters.append((fut, encoding, cb))
return fut
|
python
|
{
"resource": ""
}
|
q22776
|
RedisConnection.closed
|
train
|
def closed(self):
"""True if connection is closed."""
closed = self._closing or self._closed
if not closed and self._reader and self._reader.at_eof():
self._closing = closed = True
self._loop.call_soon(self._do_close, None)
return closed
|
python
|
{
"resource": ""
}
|
q22777
|
RedisConnection.select
|
train
|
def select(self, db):
"""Change the selected database for the current connection."""
if not isinstance(db, int):
raise TypeError("DB must be of int type, not {!r}".format(db))
if db < 0:
raise ValueError("DB must be greater or equal 0, got {!r}"
.format(db))
fut = self.execute('SELECT', db)
return wait_ok(fut)
|
python
|
{
"resource": ""
}
|
q22778
|
GeoCommandsMixin.geoadd
|
train
|
def geoadd(self, key, longitude, latitude, member, *args, **kwargs):
"""Add one or more geospatial items in the geospatial index represented
using a sorted set.
:rtype: int
"""
return self.execute(
b'GEOADD', key, longitude, latitude, member, *args, **kwargs
)
|
python
|
{
"resource": ""
}
|
q22779
|
GeoCommandsMixin.geohash
|
train
|
def geohash(self, key, member, *members, **kwargs):
"""Returns members of a geospatial index as standard geohash strings.
:rtype: list[str or bytes or None]
"""
return self.execute(
b'GEOHASH', key, member, *members, **kwargs
)
|
python
|
{
"resource": ""
}
|
q22780
|
GeoCommandsMixin.geopos
|
train
|
def geopos(self, key, member, *members, **kwargs):
"""Returns longitude and latitude of members of a geospatial index.
:rtype: list[GeoPoint or None]
"""
fut = self.execute(b'GEOPOS', key, member, *members, **kwargs)
return wait_convert(fut, make_geopos)
|
python
|
{
"resource": ""
}
|
q22781
|
GeoCommandsMixin.geodist
|
train
|
def geodist(self, key, member1, member2, unit='m'):
"""Returns the distance between two members of a geospatial index.
:rtype: list[float or None]
"""
fut = self.execute(b'GEODIST', key, member1, member2, unit)
return wait_convert(fut, make_geodist)
|
python
|
{
"resource": ""
}
|
q22782
|
GeoCommandsMixin.georadius
|
train
|
def georadius(self, key, longitude, latitude, radius, unit='m', *,
with_dist=False, with_hash=False, with_coord=False,
count=None, sort=None, encoding=_NOTSET):
"""Query a sorted set representing a geospatial index to fetch members
matching a given maximum distance from a point.
Return value follows Redis convention:
* if none of ``WITH*`` flags are set -- list of strings returned:
>>> await redis.georadius('Sicily', 15, 37, 200, 'km')
[b"Palermo", b"Catania"]
* if any flag (or all) is set -- list of named tuples returned:
>>> await redis.georadius('Sicily', 15, 37, 200, 'km',
... with_dist=True)
[GeoMember(name=b"Palermo", dist=190.4424, hash=None, coord=None),
GeoMember(name=b"Catania", dist=56.4413, hash=None, coord=None)]
:raises TypeError: radius is not float or int
:raises TypeError: count is not int
:raises ValueError: if unit not equal ``m``, ``km``, ``mi`` or ``ft``
:raises ValueError: if sort not equal ``ASC`` or ``DESC``
:rtype: list[str] or list[GeoMember]
"""
args = validate_georadius_options(
radius, unit, with_dist, with_hash, with_coord, count, sort
)
fut = self.execute(
b'GEORADIUS', key, longitude, latitude, radius,
unit, *args, encoding=encoding
)
if with_dist or with_hash or with_coord:
return wait_convert(fut, make_geomember,
with_dist=with_dist,
with_hash=with_hash,
with_coord=with_coord)
return fut
|
python
|
{
"resource": ""
}
|
q22783
|
GeoCommandsMixin.georadiusbymember
|
train
|
def georadiusbymember(self, key, member, radius, unit='m', *,
with_dist=False, with_hash=False, with_coord=False,
count=None, sort=None, encoding=_NOTSET):
"""Query a sorted set representing a geospatial index to fetch members
matching a given maximum distance from a member.
Return value follows Redis convention:
* if none of ``WITH*`` flags are set -- list of strings returned:
>>> await redis.georadiusbymember('Sicily', 'Palermo', 200, 'km')
[b"Palermo", b"Catania"]
* if any flag (or all) is set -- list of named tuples returned:
>>> await redis.georadiusbymember('Sicily', 'Palermo', 200, 'km',
... with_dist=True)
[GeoMember(name=b"Palermo", dist=190.4424, hash=None, coord=None),
GeoMember(name=b"Catania", dist=56.4413, hash=None, coord=None)]
:raises TypeError: radius is not float or int
:raises TypeError: count is not int
:raises ValueError: if unit not equal ``m``, ``km``, ``mi`` or ``ft``
:raises ValueError: if sort not equal ``ASC`` or ``DESC``
:rtype: list[str] or list[GeoMember]
"""
args = validate_georadius_options(
radius, unit, with_dist, with_hash, with_coord, count, sort
)
fut = self.execute(
b'GEORADIUSBYMEMBER', key, member, radius,
unit, *args, encoding=encoding)
if with_dist or with_hash or with_coord:
return wait_convert(fut, make_geomember,
with_dist=with_dist,
with_hash=with_hash,
with_coord=with_coord)
return fut
|
python
|
{
"resource": ""
}
|
q22784
|
ScriptingCommandsMixin.eval
|
train
|
def eval(self, script, keys=[], args=[]):
"""Execute a Lua script server side."""
return self.execute(b'EVAL', script, len(keys), *(keys + args))
|
python
|
{
"resource": ""
}
|
q22785
|
ScriptingCommandsMixin.evalsha
|
train
|
def evalsha(self, digest, keys=[], args=[]):
"""Execute a Lua script server side by its SHA1 digest."""
return self.execute(b'EVALSHA', digest, len(keys), *(keys + args))
|
python
|
{
"resource": ""
}
|
q22786
|
ScriptingCommandsMixin.script_exists
|
train
|
def script_exists(self, digest, *digests):
"""Check existence of scripts in the script cache."""
return self.execute(b'SCRIPT', b'EXISTS', digest, *digests)
|
python
|
{
"resource": ""
}
|
q22787
|
StringCommandsMixin.bitcount
|
train
|
def bitcount(self, key, start=None, end=None):
"""Count set bits in a string.
:raises TypeError: if only start or end specified.
"""
if start is None and end is not None:
raise TypeError("both start and stop must be specified")
elif start is not None and end is None:
raise TypeError("both start and stop must be specified")
elif start is not None and end is not None:
args = (start, end)
else:
args = ()
return self.execute(b'BITCOUNT', key, *args)
|
python
|
{
"resource": ""
}
|
q22788
|
StringCommandsMixin.bitop_and
|
train
|
def bitop_and(self, dest, key, *keys):
"""Perform bitwise AND operations between strings."""
return self.execute(b'BITOP', b'AND', dest, key, *keys)
|
python
|
{
"resource": ""
}
|
q22789
|
StringCommandsMixin.bitop_or
|
train
|
def bitop_or(self, dest, key, *keys):
"""Perform bitwise OR operations between strings."""
return self.execute(b'BITOP', b'OR', dest, key, *keys)
|
python
|
{
"resource": ""
}
|
q22790
|
StringCommandsMixin.bitop_xor
|
train
|
def bitop_xor(self, dest, key, *keys):
"""Perform bitwise XOR operations between strings."""
return self.execute(b'BITOP', b'XOR', dest, key, *keys)
|
python
|
{
"resource": ""
}
|
q22791
|
StringCommandsMixin.bitop_not
|
train
|
def bitop_not(self, dest, key):
"""Perform bitwise NOT operations between strings."""
return self.execute(b'BITOP', b'NOT', dest, key)
|
python
|
{
"resource": ""
}
|
q22792
|
StringCommandsMixin.bitpos
|
train
|
def bitpos(self, key, bit, start=None, end=None):
"""Find first bit set or clear in a string.
:raises ValueError: if bit is not 0 or 1
"""
if bit not in (1, 0):
raise ValueError("bit argument must be either 1 or 0")
bytes_range = []
if start is not None:
bytes_range.append(start)
if end is not None:
if start is None:
bytes_range = [0, end]
else:
bytes_range.append(end)
return self.execute(b'BITPOS', key, bit, *bytes_range)
|
python
|
{
"resource": ""
}
|
q22793
|
StringCommandsMixin.decrby
|
train
|
def decrby(self, key, decrement):
"""Decrement the integer value of a key by the given number.
:raises TypeError: if decrement is not int
"""
if not isinstance(decrement, int):
raise TypeError("decrement must be of type int")
return self.execute(b'DECRBY', key, decrement)
|
python
|
{
"resource": ""
}
|
q22794
|
StringCommandsMixin.get
|
train
|
def get(self, key, *, encoding=_NOTSET):
"""Get the value of a key."""
return self.execute(b'GET', key, encoding=encoding)
|
python
|
{
"resource": ""
}
|
q22795
|
StringCommandsMixin.getrange
|
train
|
def getrange(self, key, start, end, *, encoding=_NOTSET):
"""Get a substring of the string stored at a key.
:raises TypeError: if start or end is not int
"""
if not isinstance(start, int):
raise TypeError("start argument must be int")
if not isinstance(end, int):
raise TypeError("end argument must be int")
return self.execute(b'GETRANGE', key, start, end, encoding=encoding)
|
python
|
{
"resource": ""
}
|
q22796
|
StringCommandsMixin.getset
|
train
|
def getset(self, key, value, *, encoding=_NOTSET):
"""Set the string value of a key and return its old value."""
return self.execute(b'GETSET', key, value, encoding=encoding)
|
python
|
{
"resource": ""
}
|
q22797
|
StringCommandsMixin.incrby
|
train
|
def incrby(self, key, increment):
"""Increment the integer value of a key by the given amount.
:raises TypeError: if increment is not int
"""
if not isinstance(increment, int):
raise TypeError("increment must be of type int")
return self.execute(b'INCRBY', key, increment)
|
python
|
{
"resource": ""
}
|
q22798
|
StringCommandsMixin.incrbyfloat
|
train
|
def incrbyfloat(self, key, increment):
"""Increment the float value of a key by the given amount.
:raises TypeError: if increment is not int
"""
if not isinstance(increment, float):
raise TypeError("increment must be of type int")
fut = self.execute(b'INCRBYFLOAT', key, increment)
return wait_convert(fut, float)
|
python
|
{
"resource": ""
}
|
q22799
|
StringCommandsMixin.mget
|
train
|
def mget(self, key, *keys, encoding=_NOTSET):
"""Get the values of all the given keys."""
return self.execute(b'MGET', key, *keys, encoding=encoding)
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.