source_code
stringlengths 45
7.24k
| function_name
stringlengths 1
111
| messages
list |
|---|---|---|
def x(self, rr_id) ->str:
return join(self.dir_tails_hopper, rr_id
) if self.external else self.dir_tails
|
dir_tails_top
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, rr_id) ->str:\n return join(self.dir_tails_hopper, rr_id\n ) if self.external else self.dir_tails\n",
"role": "user"
},
{
"content": "dir_tails_top",
"role": "assistant"
}
] |
def x(self, rr_id) ->str:
return join(self.dir_tails_top(rr_id), rev_reg_id2cred_def_id(rr_id))
|
dir_tails_target
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, rr_id) ->str:\n return join(self.dir_tails_top(rr_id), rev_reg_id2cred_def_id(rr_id))\n",
"role": "user"
},
{
"content": "dir_tails_target",
"role": "assistant"
}
] |
def x(self, rr_id: str, rr_size: int) ->None:
try:
makedirs(join(self._dir_tails_sentinel, rr_id), exist_ok=False)
except FileExistsError:
LOGGER.warning('Rev reg %s construction already in progress', rr_id)
else:
open(join(self._dir_tails_sentinel, rr_id, '.{}'.format(rr_size)), 'w'
).close()
|
mark_in_progress
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, rr_id: str, rr_size: int) ->None:\n try:\n makedirs(join(self._dir_tails_sentinel, rr_id), exist_ok=False)\n except FileExistsError:\n LOGGER.warning('Rev reg %s construction already in progress', rr_id)\n else:\n open(join(self._dir_tails_sentinel, rr_id, '.{}'.format(rr_size)), 'w'\n ).close()\n",
"role": "user"
},
{
"content": "mark_in_progress",
"role": "assistant"
}
] |
async def serve(self) ->None:
"""
Write pidfile to sentinel directory if need be, and wait for sentinels
to shut down or build revocation registry and tails file.
"""
LOGGER.debug('RevRegBuilder.serve >>>')
assert self.external
file_pid = join(self._dir_tails_sentinel, '.pid')
if isfile(file_pid):
with open(file_pid, 'r') as fh_pid:
pid = int(fh_pid.read())
try:
kill(pid, 0)
except ProcessLookupError:
remove(file_pid)
LOGGER.info('RevRegBuilder removed derelict .pid file')
except PermissionError:
LOGGER.info(
'RevRegBuilder process already running with pid %s: exiting',
pid)
LOGGER.debug('RevRegBuilder.serve <<<')
return
else:
LOGGER.info(
'RevRegBuilder process already running with pid %s: exiting',
pid)
LOGGER.debug('RevRegBuilder.serve <<<')
return
pid = getpid()
with open(file_pid, 'w') as pid_fh:
print(str(pid), file=pid_fh)
file_stop = join(self._dir_tails_sentinel, '.stop')
while True:
if isfile(file_stop):
remove(file_stop)
remove(file_pid)
break
p_pending = [join(self._dir_tails_sentinel, d) for d in listdir(
self._dir_tails_sentinel) if isdir(join(self.
_dir_tails_sentinel, d))]
p_pending = [p for p in p_pending if [s for s in listdir(p) if s.
startswith('.')]]
if p_pending:
pdir = basename(p_pending[0])
rr_id = pdir
rr_size = int([s for s in listdir(p_pending[0]) if s.startswith
('.')][0][1:])
open(join(p_pending[0], '.in-progress'), 'w').close()
await self.create_rev_reg(rr_id, rr_size or None)
rmtree(p_pending[0])
await asyncio.sleep(1)
LOGGER.debug('RevRegBuilder.serve <<<')
|
serve
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def serve(self) ->None:\n \"\"\"\n Write pidfile to sentinel directory if need be, and wait for sentinels\n to shut down or build revocation registry and tails file.\n \"\"\"\n LOGGER.debug('RevRegBuilder.serve >>>')\n assert self.external\n file_pid = join(self._dir_tails_sentinel, '.pid')\n if isfile(file_pid):\n with open(file_pid, 'r') as fh_pid:\n pid = int(fh_pid.read())\n try:\n kill(pid, 0)\n except ProcessLookupError:\n remove(file_pid)\n LOGGER.info('RevRegBuilder removed derelict .pid file')\n except PermissionError:\n LOGGER.info(\n 'RevRegBuilder process already running with pid %s: exiting',\n pid)\n LOGGER.debug('RevRegBuilder.serve <<<')\n return\n else:\n LOGGER.info(\n 'RevRegBuilder process already running with pid %s: exiting',\n pid)\n LOGGER.debug('RevRegBuilder.serve <<<')\n return\n pid = getpid()\n with open(file_pid, 'w') as pid_fh:\n print(str(pid), file=pid_fh)\n file_stop = join(self._dir_tails_sentinel, '.stop')\n while True:\n if isfile(file_stop):\n remove(file_stop)\n remove(file_pid)\n break\n p_pending = [join(self._dir_tails_sentinel, d) for d in listdir(\n self._dir_tails_sentinel) if isdir(join(self.\n _dir_tails_sentinel, d))]\n p_pending = [p for p in p_pending if [s for s in listdir(p) if s.\n startswith('.')]]\n if p_pending:\n pdir = basename(p_pending[0])\n rr_id = pdir\n rr_size = int([s for s in listdir(p_pending[0]) if s.startswith\n ('.')][0][1:])\n open(join(p_pending[0], '.in-progress'), 'w').close()\n await self.create_rev_reg(rr_id, rr_size or None)\n rmtree(p_pending[0])\n await asyncio.sleep(1)\n LOGGER.debug('RevRegBuilder.serve <<<')\n",
"role": "user"
},
{
"content": "serve",
"role": "assistant"
}
] |
async def stop(wallet_name: str) ->None:
"""
Gracefully stop an external revocation registry builder, waiting for its current.
The indy-sdk toolkit uses a temporary directory for tails file mustration,
and shutting down the toolkit removes the directory, crashing the external
tails file write. This method allows a graceful stop to wait for completion
of such tasks already in progress.
:wallet_name: name external revocation registry builder to check
:return: whether a task is pending.
"""
LOGGER.debug('RevRegBuilder.stop >>>')
dir_sentinel = join(RevRegBuilder.dir_tails_sentinel(wallet_name))
if isdir(dir_sentinel):
open(join(dir_sentinel, '.stop'), 'w').close()
while any(isfile(join(dir_sentinel, d, '.in-progress')) for d in
listdir(dir_sentinel)):
await asyncio.sleep(1)
LOGGER.debug('RevRegBuilder.stop <<<')
|
stop
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def stop(wallet_name: str) ->None:\n \"\"\"\n Gracefully stop an external revocation registry builder, waiting for its current.\n\n The indy-sdk toolkit uses a temporary directory for tails file mustration,\n and shutting down the toolkit removes the directory, crashing the external\n tails file write. This method allows a graceful stop to wait for completion\n of such tasks already in progress.\n\n :wallet_name: name external revocation registry builder to check\n :return: whether a task is pending.\n \"\"\"\n LOGGER.debug('RevRegBuilder.stop >>>')\n dir_sentinel = join(RevRegBuilder.dir_tails_sentinel(wallet_name))\n if isdir(dir_sentinel):\n open(join(dir_sentinel, '.stop'), 'w').close()\n while any(isfile(join(dir_sentinel, d, '.in-progress')) for d in\n listdir(dir_sentinel)):\n await asyncio.sleep(1)\n LOGGER.debug('RevRegBuilder.stop <<<')\n",
"role": "user"
},
{
"content": "stop",
"role": "assistant"
}
] |
async def create_rev_reg(self, rr_id: str, rr_size: int=None) ->None:
"""
Create revocation registry artifacts and new tails file (with association to
corresponding revocation registry identifier via symbolic link name)
for input revocation registry identifier. Symbolic link presence signals completion.
If revocation registry builder operates in a process external to its Issuer's,
target directory is hopper directory.
Raise WalletState for closed wallet.
:param rr_id: revocation registry identifier
:param rr_size: revocation registry size (defaults to 64)
"""
LOGGER.debug('RevRegBuilder.create_rev_reg >>> rr_id: %s, rr_size: %s',
rr_id, rr_size)
if not self.wallet.handle:
LOGGER.debug('RevRegBuilder.create_rev_reg <!< Wallet %s is closed',
self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
if not ok_rev_reg_id(rr_id):
LOGGER.debug('RevRegBuilder.create_rev_reg <!< Bad rev reg id %s',
rr_id)
raise BadIdentifier('Bad rev reg id {}'.format(rr_id))
rr_size = rr_size or 64
cd_id, tag = rev_reg_id2cred_def_id_tag(rr_id)
dir_tails = self.dir_tails_top(rr_id)
dir_target = self.dir_tails_target(rr_id)
if self.external:
try:
makedirs(dir_target, exist_ok=False)
except FileExistsError:
LOGGER.warning(
'RevRegBuilder.create_rev_reg found dir %s, but task not in progress: rebuilding rev reg %s'
, dir_target, rr_id)
rmtree(dir_target)
makedirs(dir_target, exist_ok=False)
LOGGER.info('Creating revocation registry (capacity %s) for rev reg id %s',
rr_size, rr_id)
tails_writer_handle = await blob_storage.open_writer('default', json.
dumps({'base_dir': dir_target, 'uri_pattern': ''}))
created_rr_id, rr_def_json, rr_ent_json = (await anoncreds.
issuer_create_and_store_revoc_reg(self.wallet.handle, self.did,
'CL_ACCUM', tag, cd_id, json.dumps({'max_cred_num': rr_size,
'issuance_type': 'ISSUANCE_BY_DEFAULT'}), tails_writer_handle))
tails_hash = basename(Tails.unlinked(dir_target).pop())
with open(join(dir_target, 'rr_def.json'), 'w') as rr_def_fh:
print(rr_def_json, file=rr_def_fh)
with open(join(dir_target, 'rr_ent.json'), 'w') as rr_ent_fh:
print(rr_ent_json, file=rr_ent_fh)
Tails.associate(dir_tails, created_rr_id, tails_hash)
LOGGER.debug('RevRegBuilder.create_rev_reg <<<')
|
create_rev_reg
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def create_rev_reg(self, rr_id: str, rr_size: int=None) ->None:\n \"\"\"\n Create revocation registry artifacts and new tails file (with association to\n corresponding revocation registry identifier via symbolic link name)\n for input revocation registry identifier. Symbolic link presence signals completion.\n If revocation registry builder operates in a process external to its Issuer's,\n target directory is hopper directory.\n\n Raise WalletState for closed wallet.\n\n :param rr_id: revocation registry identifier\n :param rr_size: revocation registry size (defaults to 64)\n \"\"\"\n LOGGER.debug('RevRegBuilder.create_rev_reg >>> rr_id: %s, rr_size: %s',\n rr_id, rr_size)\n if not self.wallet.handle:\n LOGGER.debug('RevRegBuilder.create_rev_reg <!< Wallet %s is closed',\n self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n if not ok_rev_reg_id(rr_id):\n LOGGER.debug('RevRegBuilder.create_rev_reg <!< Bad rev reg id %s',\n rr_id)\n raise BadIdentifier('Bad rev reg id {}'.format(rr_id))\n rr_size = rr_size or 64\n cd_id, tag = rev_reg_id2cred_def_id_tag(rr_id)\n dir_tails = self.dir_tails_top(rr_id)\n dir_target = self.dir_tails_target(rr_id)\n if self.external:\n try:\n makedirs(dir_target, exist_ok=False)\n except FileExistsError:\n LOGGER.warning(\n 'RevRegBuilder.create_rev_reg found dir %s, but task not in progress: rebuilding rev reg %s'\n , dir_target, rr_id)\n rmtree(dir_target)\n makedirs(dir_target, exist_ok=False)\n LOGGER.info('Creating revocation registry (capacity %s) for rev reg id %s',\n rr_size, rr_id)\n tails_writer_handle = await blob_storage.open_writer('default', json.\n dumps({'base_dir': dir_target, 'uri_pattern': ''}))\n created_rr_id, rr_def_json, rr_ent_json = (await anoncreds.\n issuer_create_and_store_revoc_reg(self.wallet.handle, self.did,\n 'CL_ACCUM', tag, cd_id, json.dumps({'max_cred_num': rr_size,\n 'issuance_type': 'ISSUANCE_BY_DEFAULT'}), tails_writer_handle))\n tails_hash = basename(Tails.unlinked(dir_target).pop())\n with open(join(dir_target, 'rr_def.json'), 'w') as rr_def_fh:\n print(rr_def_json, file=rr_def_fh)\n with open(join(dir_target, 'rr_ent.json'), 'w') as rr_ent_fh:\n print(rr_ent_json, file=rr_ent_fh)\n Tails.associate(dir_tails, created_rr_id, tails_hash)\n LOGGER.debug('RevRegBuilder.create_rev_reg <<<')\n",
"role": "user"
},
{
"content": "create_rev_reg",
"role": "assistant"
}
] |
def x(parser, token):
nodelist = parser.parse(('endultracache',))
parser.delete_first_token()
tokens = token.split_contents()
if len(tokens) < 3:
raise TemplateSyntaxError('' %
' tag requires at least 2 arguments.' % tokens[0])
return UltraCacheNode(nodelist, parser.compile_filter(tokens[1]),
tokens[2], [parser.compile_filter(token) for token in tokens[3:]])
|
do_ultracache
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(parser, token):\n nodelist = parser.parse(('endultracache',))\n parser.delete_first_token()\n tokens = token.split_contents()\n if len(tokens) < 3:\n raise TemplateSyntaxError('' %\n ' tag requires at least 2 arguments.' % tokens[0])\n return UltraCacheNode(nodelist, parser.compile_filter(tokens[1]),\n tokens[2], [parser.compile_filter(token) for token in tokens[3:]])\n",
"role": "user"
},
{
"content": "do_ultracache",
"role": "assistant"
}
] |
def x(dumpit: Any, elide_to: int=None) ->str:
if elide_to is not None:
elide_to = max(elide_to, 3)
try:
rv = json.dumps(json.loads(dumpit) if isinstance(dumpit, str) else
dumpit, indent=4)
except TypeError:
rv = '{}'.format(pformat(dumpit, indent=4, width=120))
return rv if elide_to is None or len(rv) <= elide_to else '{}...'.format(rv
[0:elide_to - 3])
|
ppjson
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(dumpit: Any, elide_to: int=None) ->str:\n if elide_to is not None:\n elide_to = max(elide_to, 3)\n try:\n rv = json.dumps(json.loads(dumpit) if isinstance(dumpit, str) else\n dumpit, indent=4)\n except TypeError:\n rv = '{}'.format(pformat(dumpit, indent=4, width=120))\n return rv if elide_to is None or len(rv) <= elide_to else '{}...'.format(rv\n [0:elide_to - 3])\n",
"role": "user"
},
{
"content": "ppjson",
"role": "assistant"
}
] |
def x(coro: Callable) ->Any:
event_loop = None
try:
event_loop = asyncio.get_event_loop()
except RuntimeError:
event_loop = asyncio.new_event_loop()
asyncio.set_event_loop(event_loop)
return event_loop.run_until_complete(coro)
|
do_wait
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(coro: Callable) ->Any:\n event_loop = None\n try:\n event_loop = asyncio.get_event_loop()\n except RuntimeError:\n event_loop = asyncio.new_event_loop()\n asyncio.set_event_loop(event_loop)\n return event_loop.run_until_complete(coro)\n",
"role": "user"
},
{
"content": "do_wait",
"role": "assistant"
}
] |
def x(ini_paths: Union[str, Sequence[str]]) ->dict:
var_dflt = '\\${(.*?):-(.*?)}'
def x(content):
rv = expandvars(content)
while True:
match = re.search(var_dflt, rv)
if match is None:
break
bash_var = '${{{}}}'.format(match.group(1))
value = expandvars(bash_var)
rv = re.sub(var_dflt, match.group(2) if value == bash_var else
value, rv, count=1)
return rv
parser = ConfigParser()
for ini in ([ini_paths] if isinstance(ini_paths, str) else ini_paths):
if not isfile(ini):
raise FileNotFoundError('No such file: {}'.format(ini))
with open(ini, 'r') as ini_fh:
ini_text = _interpolate(ini_fh.read())
parser.read_string(ini_text)
return {s: dict(parser[s].items()) for s in parser.sections()}
|
inis2dict
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(ini_paths: Union[str, Sequence[str]]) ->dict:\n var_dflt = '\\\\${(.*?):-(.*?)}'\n\n def x(content):\n rv = expandvars(content)\n while True:\n match = re.search(var_dflt, rv)\n if match is None:\n break\n bash_var = '${{{}}}'.format(match.group(1))\n value = expandvars(bash_var)\n rv = re.sub(var_dflt, match.group(2) if value == bash_var else\n value, rv, count=1)\n return rv\n parser = ConfigParser()\n for ini in ([ini_paths] if isinstance(ini_paths, str) else ini_paths):\n if not isfile(ini):\n raise FileNotFoundError('No such file: {}'.format(ini))\n with open(ini, 'r') as ini_fh:\n ini_text = _interpolate(ini_fh.read())\n parser.read_string(ini_text)\n return {s: dict(parser[s].items()) for s in parser.sections()}\n",
"role": "user"
},
{
"content": "inis2dict",
"role": "assistant"
}
] |
def x(self, digits: int=None) ->float:
self._mark[:] = [self._mark[1], time()]
rv = self._mark[1] - self._mark[0]
if digits is not None and digits > 0:
rv = round(rv, digits)
elif digits == 0 or self._digits == 0:
rv = int(rv)
elif self._digits is not None and self._digits > 0:
rv = round(rv, self._digits)
return rv
|
mark
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, digits: int=None) ->float:\n self._mark[:] = [self._mark[1], time()]\n rv = self._mark[1] - self._mark[0]\n if digits is not None and digits > 0:\n rv = round(rv, digits)\n elif digits == 0 or self._digits == 0:\n rv = int(rv)\n elif self._digits is not None and self._digits > 0:\n rv = round(rv, self._digits)\n return rv\n",
"role": "user"
},
{
"content": "mark",
"role": "assistant"
}
] |
async def open(self) ->None:
"""
Begin the search operation.
"""
LOGGER.debug('StorageRecordSearch.open >>>')
if self.opened:
LOGGER.debug('StorageRecordSearch.open <!< Search is already opened')
raise BadSearch('Search is already opened')
if not self._wallet.opened:
LOGGER.debug('StorageRecordSearch.open <!< Wallet %s is closed',
self._wallet.name)
raise WalletState('Wallet {} is closed'.format(self._wallet.name))
self._handle = await non_secrets.open_wallet_search(self._wallet.handle,
self._type, self._query_json, StorageRecordSearch.OPTIONS_JSON)
LOGGER.debug('StorageRecordSearch.open <<<')
|
open
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def open(self) ->None:\n \"\"\"\n Begin the search operation.\n \"\"\"\n LOGGER.debug('StorageRecordSearch.open >>>')\n if self.opened:\n LOGGER.debug('StorageRecordSearch.open <!< Search is already opened')\n raise BadSearch('Search is already opened')\n if not self._wallet.opened:\n LOGGER.debug('StorageRecordSearch.open <!< Wallet %s is closed',\n self._wallet.name)\n raise WalletState('Wallet {} is closed'.format(self._wallet.name))\n self._handle = await non_secrets.open_wallet_search(self._wallet.handle,\n self._type, self._query_json, StorageRecordSearch.OPTIONS_JSON)\n LOGGER.debug('StorageRecordSearch.open <<<')\n",
"role": "user"
},
{
"content": "open",
"role": "assistant"
}
] |
async def fetch(self, limit: int=None) ->Sequence[StorageRecord]:
"""
Fetch next batch of search results.
Raise BadSearch if search is closed, WalletState if wallet is closed.
:param limit: maximum number of records to return (default value Wallet.DEFAULT_CHUNK)
:return: next batch of records found
"""
LOGGER.debug('StorageRecordSearch.fetch >>> limit: %s', limit)
if not self.opened:
LOGGER.debug(
'StorageRecordSearch.fetch <!< Storage record search is closed')
raise BadSearch('Storage record search is closed')
if not self._wallet.opened:
LOGGER.debug('StorageRecordSearch.fetch <!< Wallet %s is closed',
self._wallet.name)
raise WalletState('Wallet {} is closed'.format(self._wallet.name))
records = json.loads(await non_secrets.fetch_wallet_search_next_records
(self._wallet.handle, self.handle, limit or Wallet.DEFAULT_CHUNK))[
'records'] or []
rv = [StorageRecord(typ=rec['type'], value=rec['value'], tags=rec[
'tags'], ident=rec['id']) for rec in records]
LOGGER.debug('StorageRecordSearch.fetch <<< %s', rv)
return rv
|
fetch
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def fetch(self, limit: int=None) ->Sequence[StorageRecord]:\n \"\"\"\n Fetch next batch of search results.\n\n Raise BadSearch if search is closed, WalletState if wallet is closed.\n\n :param limit: maximum number of records to return (default value Wallet.DEFAULT_CHUNK)\n :return: next batch of records found\n \"\"\"\n LOGGER.debug('StorageRecordSearch.fetch >>> limit: %s', limit)\n if not self.opened:\n LOGGER.debug(\n 'StorageRecordSearch.fetch <!< Storage record search is closed')\n raise BadSearch('Storage record search is closed')\n if not self._wallet.opened:\n LOGGER.debug('StorageRecordSearch.fetch <!< Wallet %s is closed',\n self._wallet.name)\n raise WalletState('Wallet {} is closed'.format(self._wallet.name))\n records = json.loads(await non_secrets.fetch_wallet_search_next_records\n (self._wallet.handle, self.handle, limit or Wallet.DEFAULT_CHUNK))[\n 'records'] or []\n rv = [StorageRecord(typ=rec['type'], value=rec['value'], tags=rec[\n 'tags'], ident=rec['id']) for rec in records]\n LOGGER.debug('StorageRecordSearch.fetch <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "fetch",
"role": "assistant"
}
] |
async def close(self) ->None:
"""
Close search.
"""
LOGGER.debug('StorageRecordSearch.close >>>')
if self._handle:
await non_secrets.close_wallet_search(self.handle)
self._handle = None
LOGGER.debug('StorageRecordSearch.close <<<')
|
close
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def close(self) ->None:\n \"\"\"\n Close search.\n \"\"\"\n LOGGER.debug('StorageRecordSearch.close >>>')\n if self._handle:\n await non_secrets.close_wallet_search(self.handle)\n self._handle = None\n LOGGER.debug('StorageRecordSearch.close <<<')\n",
"role": "user"
},
{
"content": "close",
"role": "assistant"
}
] |
def x(self) ->str:
if self._cache_id:
return self._cache_id
with open(join(expanduser('~'), '.indy_client', 'pool', self.name,
'{}.txn'.format(self.name))) as fh_genesis:
genesis = [json.loads(line) for line in fh_genesis.readlines() if line]
hps = []
for gen_txn in genesis:
hps.append(self.protocol.genesis_host_port(gen_txn))
hps.sort()
self._cache_id = ':'.join('{}:{}'.format(hp[0], hp[1]) for hp in hps)
return self._cache_id
|
cache_id
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self) ->str:\n if self._cache_id:\n return self._cache_id\n with open(join(expanduser('~'), '.indy_client', 'pool', self.name,\n '{}.txn'.format(self.name))) as fh_genesis:\n genesis = [json.loads(line) for line in fh_genesis.readlines() if line]\n hps = []\n for gen_txn in genesis:\n hps.append(self.protocol.genesis_host_port(gen_txn))\n hps.sort()\n self._cache_id = ':'.join('{}:{}'.format(hp[0], hp[1]) for hp in hps)\n return self._cache_id\n",
"role": "user"
},
{
"content": "cache_id",
"role": "assistant"
}
] |
async def open(self) ->'NodePool':
"""
Explicit entry. Opens pool as configured, for later closure via close().
Creates pool if it does not yet exist, using configured genesis transaction file.
For use when keeping pool open across multiple calls.
Raise any AbsentPool if node pool ledger configuration is not available.
:return: current object
"""
LOGGER.debug('NodePool.open >>>')
await pool.set_protocol_version(self.protocol.indy())
LOGGER.info('Pool ledger %s set protocol %s', self.name, self.protocol)
try:
self._handle = await pool.open_pool_ledger(self.name, json.dumps(
self.config))
except IndyError as x_indy:
if x_indy.error_code == ErrorCode.PoolLedgerNotCreatedError:
LOGGER.debug(
'NodePool.open <!< Absent node pool %s ledger configuration',
self.name)
raise AbsentPool('Absent node pool {} ledger configuration'.
format(self.name))
LOGGER.debug(
'NodePool.open <!< cannot open node pool %s: indy error code %s',
self.name, x_indy.error_code)
raise
LOGGER.debug('NodePool.open <<<')
return self
|
open
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def open(self) ->'NodePool':\n \"\"\"\n Explicit entry. Opens pool as configured, for later closure via close().\n Creates pool if it does not yet exist, using configured genesis transaction file.\n For use when keeping pool open across multiple calls.\n\n Raise any AbsentPool if node pool ledger configuration is not available.\n\n :return: current object\n \"\"\"\n LOGGER.debug('NodePool.open >>>')\n await pool.set_protocol_version(self.protocol.indy())\n LOGGER.info('Pool ledger %s set protocol %s', self.name, self.protocol)\n try:\n self._handle = await pool.open_pool_ledger(self.name, json.dumps(\n self.config))\n except IndyError as x_indy:\n if x_indy.error_code == ErrorCode.PoolLedgerNotCreatedError:\n LOGGER.debug(\n 'NodePool.open <!< Absent node pool %s ledger configuration',\n self.name)\n raise AbsentPool('Absent node pool {} ledger configuration'.\n format(self.name))\n LOGGER.debug(\n 'NodePool.open <!< cannot open node pool %s: indy error code %s',\n self.name, x_indy.error_code)\n raise\n LOGGER.debug('NodePool.open <<<')\n return self\n",
"role": "user"
},
{
"content": "open",
"role": "assistant"
}
] |
async def close(self) ->None:
"""
Explicit exit. Closes pool. For use when keeping pool open across multiple calls.
"""
LOGGER.debug('NodePool.close >>>')
if not self.handle:
LOGGER.warning('Abstaining from closing pool %s: already closed',
self.name)
else:
await pool.close_pool_ledger(self.handle)
self._handle = None
LOGGER.debug('NodePool.close <<<')
|
close
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def close(self) ->None:\n \"\"\"\n Explicit exit. Closes pool. For use when keeping pool open across multiple calls.\n \"\"\"\n LOGGER.debug('NodePool.close >>>')\n if not self.handle:\n LOGGER.warning('Abstaining from closing pool %s: already closed',\n self.name)\n else:\n await pool.close_pool_ledger(self.handle)\n self._handle = None\n LOGGER.debug('NodePool.close <<<')\n",
"role": "user"
},
{
"content": "close",
"role": "assistant"
}
] |
async def refresh(self) ->None:
"""
Refresh local copy of pool ledger and update node pool connections.
"""
LOGGER.debug('NodePool.refresh >>>')
await pool.refresh_pool_ledger(self.handle)
LOGGER.debug('NodePool.refresh <<<')
|
refresh
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def refresh(self) ->None:\n \"\"\"\n Refresh local copy of pool ledger and update node pool connections.\n \"\"\"\n LOGGER.debug('NodePool.refresh >>>')\n await pool.refresh_pool_ledger(self.handle)\n LOGGER.debug('NodePool.refresh <<<')\n",
"role": "user"
},
{
"content": "refresh",
"role": "assistant"
}
] |
async def reseed(self, seed: str=None) ->None:
"""
Rotate key for VON anchor: generate new key, submit to ledger, update wallet.
Raise WalletState if wallet is currently closed.
:param seed: new seed for ed25519 key pair (default random)
"""
LOGGER.debug('BaseAnchor.reseed >>> seed: [SEED]')
verkey = await self.wallet.reseed_init(seed)
req_json = await ledger.build_nym_request(self.did, self.did, verkey,
self.name, (await self.get_nym_role()).token())
await self._sign_submit(req_json)
await self.wallet.reseed_apply()
LOGGER.debug('BaseAnchor.reseed <<<')
|
reseed
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def reseed(self, seed: str=None) ->None:\n \"\"\"\n Rotate key for VON anchor: generate new key, submit to ledger, update wallet.\n Raise WalletState if wallet is currently closed.\n\n :param seed: new seed for ed25519 key pair (default random)\n \"\"\"\n LOGGER.debug('BaseAnchor.reseed >>> seed: [SEED]')\n verkey = await self.wallet.reseed_init(seed)\n req_json = await ledger.build_nym_request(self.did, self.did, verkey,\n self.name, (await self.get_nym_role()).token())\n await self._sign_submit(req_json)\n await self.wallet.reseed_apply()\n LOGGER.debug('BaseAnchor.reseed <<<')\n",
"role": "user"
},
{
"content": "reseed",
"role": "assistant"
}
] |
async def get_nym(self, target_did: str=None) ->str:
"""
Get json cryptonym (including current verification key) for input (anchor) DID from ledger.
Return empty production {} if the ledger has no such cryptonym.
Raise BadLedgerTxn on failure. Raise WalletState if target DID is default (own DID) value but
wallet does not have it (neither created nor opened since initialization).
:param target_did: DID of cryptonym to fetch (default own DID)
:return: cryptonym json
"""
LOGGER.debug('BaseAnchor.get_nym >>> target_did: %s', target_did)
if target_did and not ok_did(target_did):
LOGGER.debug('BaseAnchor.get_nym <!< Bad DID %s', target_did)
raise BadIdentifier('Bad DID {}'.format(target_did))
if not (target_did or self.did):
LOGGER.debug(
'BaseAnchor.get_nym <!< Bad wallet state: DID for %s unavailable',
self.name)
raise WalletState('Bad wallet state: DID for {} unavailable'.format
(self.name))
rv = json.dumps({})
get_nym_req = await ledger.build_get_nym_request(self.did, target_did or
self.did)
resp_json = await self._submit(get_nym_req)
data_json = json.loads(resp_json)['result']['data']
if data_json:
rv = data_json
LOGGER.debug('BaseAnchor.get_nym <<< %s', rv)
return rv
|
get_nym
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def get_nym(self, target_did: str=None) ->str:\n \"\"\"\n Get json cryptonym (including current verification key) for input (anchor) DID from ledger.\n Return empty production {} if the ledger has no such cryptonym.\n\n Raise BadLedgerTxn on failure. Raise WalletState if target DID is default (own DID) value but\n wallet does not have it (neither created nor opened since initialization).\n\n :param target_did: DID of cryptonym to fetch (default own DID)\n :return: cryptonym json\n \"\"\"\n LOGGER.debug('BaseAnchor.get_nym >>> target_did: %s', target_did)\n if target_did and not ok_did(target_did):\n LOGGER.debug('BaseAnchor.get_nym <!< Bad DID %s', target_did)\n raise BadIdentifier('Bad DID {}'.format(target_did))\n if not (target_did or self.did):\n LOGGER.debug(\n 'BaseAnchor.get_nym <!< Bad wallet state: DID for %s unavailable',\n self.name)\n raise WalletState('Bad wallet state: DID for {} unavailable'.format\n (self.name))\n rv = json.dumps({})\n get_nym_req = await ledger.build_get_nym_request(self.did, target_did or\n self.did)\n resp_json = await self._submit(get_nym_req)\n data_json = json.loads(resp_json)['result']['data']\n if data_json:\n rv = data_json\n LOGGER.debug('BaseAnchor.get_nym <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "get_nym",
"role": "assistant"
}
] |
async def get_nym_role(self, target_did: str=None) ->Role:
"""
Return the cryptonym role for input did from the ledger - note that this may exceed
the role of least privilege for the class.
Raise AbsentNym if current anchor has no cryptonym on the ledger, or WalletState if current DID unavailable.
:param target_did: DID of cryptonym role to fetch (default own DID)
:return: identifier for current cryptonym role on ledger
"""
LOGGER.debug('BaseAnchor.get_nym_role >>> target_did: %s', target_did)
nym = json.loads(await self.get_nym(target_did))
if not nym:
LOGGER.debug(
'BaseAnchor.get_nym_role <!< Ledger has no cryptonym for anchor %s'
, self.name)
raise AbsentNym('Ledger has no cryptonym for anchor {}'.format(self
.name))
rv = Role.get(nym['role'])
LOGGER.debug('BaseAnchor.get_nym_role <<< %s', rv)
return rv
|
get_nym_role
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def get_nym_role(self, target_did: str=None) ->Role:\n \"\"\"\n Return the cryptonym role for input did from the ledger - note that this may exceed\n the role of least privilege for the class.\n\n Raise AbsentNym if current anchor has no cryptonym on the ledger, or WalletState if current DID unavailable.\n\n :param target_did: DID of cryptonym role to fetch (default own DID)\n :return: identifier for current cryptonym role on ledger\n \"\"\"\n LOGGER.debug('BaseAnchor.get_nym_role >>> target_did: %s', target_did)\n nym = json.loads(await self.get_nym(target_did))\n if not nym:\n LOGGER.debug(\n 'BaseAnchor.get_nym_role <!< Ledger has no cryptonym for anchor %s'\n , self.name)\n raise AbsentNym('Ledger has no cryptonym for anchor {}'.format(self\n .name))\n rv = Role.get(nym['role'])\n LOGGER.debug('BaseAnchor.get_nym_role <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "get_nym_role",
"role": "assistant"
}
] |
def x() ->Role:
LOGGER.debug('BaseAnchor.least_role >>>')
rv = Role.TRUST_ANCHOR
LOGGER.debug('BaseAnchor.least_role <<< %s', rv)
return rv
|
least_role
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x() ->Role:\n LOGGER.debug('BaseAnchor.least_role >>>')\n rv = Role.TRUST_ANCHOR\n LOGGER.debug('BaseAnchor.least_role <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "least_role",
"role": "assistant"
}
] |
async def set_did_endpoint(self, remote_did: str, did_endpoint: str
) ->EndpointInfo:
"""
Set endpoint as metadata for pairwise remote DID in wallet. Pick up (transport)
verification key from pairwise relation and return with endpoint in EndpointInfo.
Raise BadIdentifier on bad DID. Raise WalletState if wallet is closed.
Raise AbsentRecord if pairwise relation not present in wallet.
:param remote_did: pairwise remote DID
:param endpoint: value to set as endpoint in wallet and cache
:return: endpoint and (transport) verification key
"""
LOGGER.debug(
'BaseAnchor.set_did_endpoint >>> remote_did: %s, did_endpoint: %s',
remote_did, did_endpoint)
if not ok_did(remote_did):
LOGGER.debug('BaseAnchor.set_did_endpoint <!< Bad DID %s', remote_did)
raise BadIdentifier('Bad DID {}'.format(remote_did))
pairwise_info = (await self.wallet.get_pairwise(remote_did)).get(remote_did
, None)
if not pairwise_info:
LOGGER.debug(
'BaseAnchor.set_did_endpoint <!< Anchor %s has no pairwise relation for remote DID %s'
, self.name, remote_did)
raise AbsentRecord(
'Anchor {} has no pairwise relation for remote DID {}'.format(
self.name, remote_did))
await self.wallet.write_pairwise(pairwise_info.their_did, pairwise_info
.their_verkey, pairwise_info.my_did, {'did_endpoint': did_endpoint})
rv = EndpointInfo(did_endpoint, pairwise_info.their_verkey)
LOGGER.debug('BaseAnchor.set_did_endpoint <<< %s', rv)
return rv
|
set_did_endpoint
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def set_did_endpoint(self, remote_did: str, did_endpoint: str\n ) ->EndpointInfo:\n \"\"\"\n Set endpoint as metadata for pairwise remote DID in wallet. Pick up (transport)\n verification key from pairwise relation and return with endpoint in EndpointInfo.\n\n\n Raise BadIdentifier on bad DID. Raise WalletState if wallet is closed.\n Raise AbsentRecord if pairwise relation not present in wallet.\n\n :param remote_did: pairwise remote DID\n :param endpoint: value to set as endpoint in wallet and cache\n :return: endpoint and (transport) verification key\n \"\"\"\n LOGGER.debug(\n 'BaseAnchor.set_did_endpoint >>> remote_did: %s, did_endpoint: %s',\n remote_did, did_endpoint)\n if not ok_did(remote_did):\n LOGGER.debug('BaseAnchor.set_did_endpoint <!< Bad DID %s', remote_did)\n raise BadIdentifier('Bad DID {}'.format(remote_did))\n pairwise_info = (await self.wallet.get_pairwise(remote_did)).get(remote_did\n , None)\n if not pairwise_info:\n LOGGER.debug(\n 'BaseAnchor.set_did_endpoint <!< Anchor %s has no pairwise relation for remote DID %s'\n , self.name, remote_did)\n raise AbsentRecord(\n 'Anchor {} has no pairwise relation for remote DID {}'.format(\n self.name, remote_did))\n await self.wallet.write_pairwise(pairwise_info.their_did, pairwise_info\n .their_verkey, pairwise_info.my_did, {'did_endpoint': did_endpoint})\n rv = EndpointInfo(did_endpoint, pairwise_info.their_verkey)\n LOGGER.debug('BaseAnchor.set_did_endpoint <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "set_did_endpoint",
"role": "assistant"
}
] |
async def get_did_endpoint(self, remote_did: str) ->EndpointInfo:
"""
Return endpoint info for remote DID.
Raise BadIdentifier for bad remote DID. Raise WalletState if bypassing cache but wallet is closed.
Raise AbsentRecord for no such endpoint.
:param remote_did: pairwise remote DID
:return: endpoint and (transport) verification key as EndpointInfo
"""
LOGGER.debug('BaseAnchor.get_did_endpoint >>> remote_did: %s', remote_did)
if not ok_did(remote_did):
LOGGER.debug('BaseAnchor.get_did_endpoint <!< Bad DID %s', remote_did)
raise BadIdentifier('Bad DID {}'.format(remote_did))
if not self.wallet.handle:
LOGGER.debug('BaseAnchor.get_did_endpoint <!< Wallet %s is closed',
self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
pairwise_info = (await self.wallet.get_pairwise(remote_did)).get(remote_did
, None)
if not (pairwise_info and 'did_endpoint' in pairwise_info.metadata):
LOGGER.debug(
'BaseAnchor.get_did_endpoint <!< No endpoint for remote DID %s',
remote_did)
raise AbsentRecord('No endpoint for remote DID {}'.format(remote_did))
rv = EndpointInfo(pairwise_info.metadata['did_endpoint'], pairwise_info
.their_verkey)
LOGGER.debug('BaseAnchor.get_did_endpoint <<< %s', rv)
return rv
|
get_did_endpoint
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def get_did_endpoint(self, remote_did: str) ->EndpointInfo:\n \"\"\"\n Return endpoint info for remote DID.\n Raise BadIdentifier for bad remote DID. Raise WalletState if bypassing cache but wallet is closed.\n Raise AbsentRecord for no such endpoint.\n\n :param remote_did: pairwise remote DID\n :return: endpoint and (transport) verification key as EndpointInfo\n \"\"\"\n LOGGER.debug('BaseAnchor.get_did_endpoint >>> remote_did: %s', remote_did)\n if not ok_did(remote_did):\n LOGGER.debug('BaseAnchor.get_did_endpoint <!< Bad DID %s', remote_did)\n raise BadIdentifier('Bad DID {}'.format(remote_did))\n if not self.wallet.handle:\n LOGGER.debug('BaseAnchor.get_did_endpoint <!< Wallet %s is closed',\n self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n pairwise_info = (await self.wallet.get_pairwise(remote_did)).get(remote_did\n , None)\n if not (pairwise_info and 'did_endpoint' in pairwise_info.metadata):\n LOGGER.debug(\n 'BaseAnchor.get_did_endpoint <!< No endpoint for remote DID %s',\n remote_did)\n raise AbsentRecord('No endpoint for remote DID {}'.format(remote_did))\n rv = EndpointInfo(pairwise_info.metadata['did_endpoint'], pairwise_info\n .their_verkey)\n LOGGER.debug('BaseAnchor.get_did_endpoint <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "get_did_endpoint",
"role": "assistant"
}
] |
async def send_endpoint(self, endpoint: str) ->None:
"""
Send anchor's own endpoint attribute to ledger (and endpoint cache),
if ledger does not yet have input value. Specify None to clear.
Raise BadIdentifier on endpoint not formatted as '<ip-address>:<port>',
BadLedgerTxn on failure, WalletState if wallet is closed.
:param endpoint: value to set as endpoint attribute on ledger and cache:
specify URL or None to clear.
"""
LOGGER.debug('BaseAnchor.send_endpoint >>> endpoint: %s', endpoint)
ledger_endpoint = await self.get_endpoint()
if ledger_endpoint == endpoint:
LOGGER.info('%s endpoint already set as %s', self.name, endpoint)
LOGGER.debug('BaseAnchor.send_endpoint <<< (%s already set for %s )')
return
attr_json = json.dumps({'endpoint': {'endpoint': endpoint}})
req_json = await ledger.build_attrib_request(self.did, self.did, None,
attr_json, None)
await self._sign_submit(req_json)
for _ in range(16):
if await self.get_endpoint(None, False) == endpoint:
break
await asyncio.sleep(1)
LOGGER.info(
'Sent endpoint %s to ledger, waiting 1s for its confirmation',
endpoint)
else:
LOGGER.debug(
'BaseAnchor.send_endpoint <!< timed out waiting on send endpoint %s'
, endpoint)
raise BadLedgerTxn('Timed out waiting on sent endpoint {}'.format(
endpoint))
LOGGER.debug('BaseAnchor.send_endpoint <<<')
|
send_endpoint
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def send_endpoint(self, endpoint: str) ->None:\n \"\"\"\n Send anchor's own endpoint attribute to ledger (and endpoint cache),\n if ledger does not yet have input value. Specify None to clear.\n\n Raise BadIdentifier on endpoint not formatted as '<ip-address>:<port>',\n BadLedgerTxn on failure, WalletState if wallet is closed.\n\n :param endpoint: value to set as endpoint attribute on ledger and cache:\n specify URL or None to clear.\n \"\"\"\n LOGGER.debug('BaseAnchor.send_endpoint >>> endpoint: %s', endpoint)\n ledger_endpoint = await self.get_endpoint()\n if ledger_endpoint == endpoint:\n LOGGER.info('%s endpoint already set as %s', self.name, endpoint)\n LOGGER.debug('BaseAnchor.send_endpoint <<< (%s already set for %s )')\n return\n attr_json = json.dumps({'endpoint': {'endpoint': endpoint}})\n req_json = await ledger.build_attrib_request(self.did, self.did, None,\n attr_json, None)\n await self._sign_submit(req_json)\n for _ in range(16):\n if await self.get_endpoint(None, False) == endpoint:\n break\n await asyncio.sleep(1)\n LOGGER.info(\n 'Sent endpoint %s to ledger, waiting 1s for its confirmation',\n endpoint)\n else:\n LOGGER.debug(\n 'BaseAnchor.send_endpoint <!< timed out waiting on send endpoint %s'\n , endpoint)\n raise BadLedgerTxn('Timed out waiting on sent endpoint {}'.format(\n endpoint))\n LOGGER.debug('BaseAnchor.send_endpoint <<<')\n",
"role": "user"
},
{
"content": "send_endpoint",
"role": "assistant"
}
] |
async def get_endpoint(self, target_did: str=None, from_cache: bool=True
) ->str:
"""
Get endpoint attribute for anchor having input DID (default own DID).
Raise WalletState if target DID is default (own DID) value but wallet does not have it
(neither created nor opened since initialization).
:param target_did: DID of anchor for which to find endpoint attribute on ledger
:param from_cache: check endpoint cache first before visiting ledger; always update cache with ledger value
:return: endpoint attribute value, or None for no such value
"""
LOGGER.debug('BaseAnchor.get_endpoint >>> target_did: %s, from_cache: %s',
target_did, from_cache)
rv = None
if not (target_did or self.did):
LOGGER.debug(
'BaseAnchor.get_endpoint <!< Bad wallet state: DID for %s unavailable'
, self.name)
raise WalletState('Bad wallet state: DID for {} unavailable'.format
(self.name))
target_did = target_did or self.did
if not ok_did(target_did):
LOGGER.debug('BaseAnchor.get_endpoint <!< Bad DID %s', target_did)
raise BadIdentifier('Bad DID {}'.format(target_did))
if from_cache:
with ENDPOINT_CACHE.lock:
if target_did in ENDPOINT_CACHE:
LOGGER.info(
'BaseAnchor.get_endpoint: got endpoint for %s from cache',
target_did)
rv = ENDPOINT_CACHE[target_did]
LOGGER.debug('BaseAnchor.get_endpoint <<< %s', rv)
return rv
req_json = await ledger.build_get_attrib_request(self.did, target_did,
'endpoint', None, None)
resp_json = await self._submit(req_json)
data_json = json.loads(resp_json)['result']['data']
if data_json:
rv = json.loads(data_json)['endpoint'].get('endpoint', None)
else:
LOGGER.info(
'_AgentCore.get_endpoint: ledger query returned response with no data'
)
with ENDPOINT_CACHE.lock:
if rv:
ENDPOINT_CACHE[target_did] = rv
else:
ENDPOINT_CACHE.pop(target_did, None)
assert target_did not in ENDPOINT_CACHE
LOGGER.debug('BaseAnchor.get_endpoint <<< %s', rv)
return rv
|
get_endpoint
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def get_endpoint(self, target_did: str=None, from_cache: bool=True\n ) ->str:\n \"\"\"\n Get endpoint attribute for anchor having input DID (default own DID).\n Raise WalletState if target DID is default (own DID) value but wallet does not have it\n (neither created nor opened since initialization).\n\n :param target_did: DID of anchor for which to find endpoint attribute on ledger\n :param from_cache: check endpoint cache first before visiting ledger; always update cache with ledger value\n :return: endpoint attribute value, or None for no such value\n \"\"\"\n LOGGER.debug('BaseAnchor.get_endpoint >>> target_did: %s, from_cache: %s',\n target_did, from_cache)\n rv = None\n if not (target_did or self.did):\n LOGGER.debug(\n 'BaseAnchor.get_endpoint <!< Bad wallet state: DID for %s unavailable'\n , self.name)\n raise WalletState('Bad wallet state: DID for {} unavailable'.format\n (self.name))\n target_did = target_did or self.did\n if not ok_did(target_did):\n LOGGER.debug('BaseAnchor.get_endpoint <!< Bad DID %s', target_did)\n raise BadIdentifier('Bad DID {}'.format(target_did))\n if from_cache:\n with ENDPOINT_CACHE.lock:\n if target_did in ENDPOINT_CACHE:\n LOGGER.info(\n 'BaseAnchor.get_endpoint: got endpoint for %s from cache',\n target_did)\n rv = ENDPOINT_CACHE[target_did]\n LOGGER.debug('BaseAnchor.get_endpoint <<< %s', rv)\n return rv\n req_json = await ledger.build_get_attrib_request(self.did, target_did,\n 'endpoint', None, None)\n resp_json = await self._submit(req_json)\n data_json = json.loads(resp_json)['result']['data']\n if data_json:\n rv = json.loads(data_json)['endpoint'].get('endpoint', None)\n else:\n LOGGER.info(\n '_AgentCore.get_endpoint: ledger query returned response with no data'\n )\n with ENDPOINT_CACHE.lock:\n if rv:\n ENDPOINT_CACHE[target_did] = rv\n else:\n ENDPOINT_CACHE.pop(target_did, None)\n assert target_did not in ENDPOINT_CACHE\n LOGGER.debug('BaseAnchor.get_endpoint <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "get_endpoint",
"role": "assistant"
}
] |
async def _submit(self, req_json: str) ->str:
"""
Submit (json) request to ledger; return (json) result.
Raise AbsentPool for no pool, ClosedPool if pool is not yet open, or BadLedgerTxn on failure.
:param req_json: json of request to sign and submit
:return: json response
"""
LOGGER.debug('BaseAnchor._submit >>> req_json: %s', req_json)
if not self.pool:
LOGGER.debug('BaseAnchor._submit <!< absent pool')
raise AbsentPool('Cannot submit request: absent pool')
if not self.pool.handle:
LOGGER.debug('BaseAnchor._submit <!< closed pool %s', self.pool.name)
raise ClosedPool('Cannot submit request to closed pool {}'.format(
self.pool.name))
rv_json = await ledger.submit_request(self.pool.handle, req_json)
await asyncio.sleep(0)
resp = json.loads(rv_json)
if resp.get('op', '') in ('REQNACK', 'REJECT'):
LOGGER.debug('BaseAnchor._submit <!< ledger rejected request: %s',
resp['reason'])
raise BadLedgerTxn('Ledger rejected transaction request: {}'.format
(resp['reason']))
LOGGER.debug('BaseAnchor._submit <<< %s', rv_json)
return rv_json
|
submit
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def _submit(self, req_json: str) ->str:\n \"\"\"\n Submit (json) request to ledger; return (json) result.\n\n Raise AbsentPool for no pool, ClosedPool if pool is not yet open, or BadLedgerTxn on failure.\n\n :param req_json: json of request to sign and submit\n :return: json response\n \"\"\"\n LOGGER.debug('BaseAnchor._submit >>> req_json: %s', req_json)\n if not self.pool:\n LOGGER.debug('BaseAnchor._submit <!< absent pool')\n raise AbsentPool('Cannot submit request: absent pool')\n if not self.pool.handle:\n LOGGER.debug('BaseAnchor._submit <!< closed pool %s', self.pool.name)\n raise ClosedPool('Cannot submit request to closed pool {}'.format(\n self.pool.name))\n rv_json = await ledger.submit_request(self.pool.handle, req_json)\n await asyncio.sleep(0)\n resp = json.loads(rv_json)\n if resp.get('op', '') in ('REQNACK', 'REJECT'):\n LOGGER.debug('BaseAnchor._submit <!< ledger rejected request: %s',\n resp['reason'])\n raise BadLedgerTxn('Ledger rejected transaction request: {}'.format\n (resp['reason']))\n LOGGER.debug('BaseAnchor._submit <<< %s', rv_json)\n return rv_json\n",
"role": "user"
},
{
"content": "submit",
"role": "assistant"
}
] |
async def _verkey_for(self, target: str) ->str:
"""
Given a DID, retrieve its verification key, looking in wallet, then pool.
Given a verification key or None, return input.
Raise WalletState if the wallet is closed. Given a recipient DID not in the wallet,
raise AbsentPool if the instance has no pool or ClosedPool if its pool is closed.
If no such verification key is on the ledger, raise AbsentNym.
:param target: verification key, or DID to resolve to such
:return: verification key
"""
LOGGER.debug('BaseAnchor._verkey_for >>> target: %s', target)
rv = target
if rv is None or not ok_did(rv):
LOGGER.debug('BaseAnchor._verkey_for <<< %s', rv)
return rv
if self.wallet.handle:
try:
rv = await did.key_for_local_did(self.wallet.handle, target)
LOGGER.info('Anchor %s got verkey for DID %s from wallet', self
.name, target)
LOGGER.debug('BaseAnchor._verkey_for <<< %s', rv)
return rv
except IndyError as x_indy:
if x_indy.error_code != ErrorCode.WalletItemNotFound:
LOGGER.debug(
'BaseAnchor._verkey_for <!< key lookup for local DID %s raised indy error code %s'
, target, x_indy.error_code)
raise
nym = json.loads(await self.get_nym(target))
if not nym:
LOGGER.debug(
'BaseAnchor._verkey_for <!< Wallet %s closed and ledger has no cryptonym for DID %s'
, self.name, target)
raise AbsentNym(
'Wallet {} closed, and ledger has no cryptonym for DID {}'.
format(self.name, target))
rv = json.loads(await self.get_nym(target))['verkey']
LOGGER.info('Anchor %s got verkey for DID %s from pool %s', self.name,
target, self.pool.name)
LOGGER.debug('BaseAnchor._verkey_for <<< %s', rv)
return rv
|
verkey_for
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def _verkey_for(self, target: str) ->str:\n \"\"\"\n Given a DID, retrieve its verification key, looking in wallet, then pool.\n Given a verification key or None, return input.\n\n Raise WalletState if the wallet is closed. Given a recipient DID not in the wallet,\n raise AbsentPool if the instance has no pool or ClosedPool if its pool is closed.\n If no such verification key is on the ledger, raise AbsentNym.\n\n :param target: verification key, or DID to resolve to such\n :return: verification key\n \"\"\"\n LOGGER.debug('BaseAnchor._verkey_for >>> target: %s', target)\n rv = target\n if rv is None or not ok_did(rv):\n LOGGER.debug('BaseAnchor._verkey_for <<< %s', rv)\n return rv\n if self.wallet.handle:\n try:\n rv = await did.key_for_local_did(self.wallet.handle, target)\n LOGGER.info('Anchor %s got verkey for DID %s from wallet', self\n .name, target)\n LOGGER.debug('BaseAnchor._verkey_for <<< %s', rv)\n return rv\n except IndyError as x_indy:\n if x_indy.error_code != ErrorCode.WalletItemNotFound:\n LOGGER.debug(\n 'BaseAnchor._verkey_for <!< key lookup for local DID %s raised indy error code %s'\n , target, x_indy.error_code)\n raise\n nym = json.loads(await self.get_nym(target))\n if not nym:\n LOGGER.debug(\n 'BaseAnchor._verkey_for <!< Wallet %s closed and ledger has no cryptonym for DID %s'\n , self.name, target)\n raise AbsentNym(\n 'Wallet {} closed, and ledger has no cryptonym for DID {}'.\n format(self.name, target))\n rv = json.loads(await self.get_nym(target))['verkey']\n LOGGER.info('Anchor %s got verkey for DID %s from pool %s', self.name,\n target, self.pool.name)\n LOGGER.debug('BaseAnchor._verkey_for <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "verkey_for",
"role": "assistant"
}
] |
async def encrypt(self, message: bytes, authn: bool=False, recip: str=None
) ->bytes:
"""
Encrypt plaintext for owner of DID or verification key, anonymously or via
authenticated encryption scheme. If given DID, first check wallet and then pool
for corresponding verification key.
Raise WalletState if the wallet is closed. Given a recipient DID not in the wallet,
raise AbsentPool if the instance has no pool or ClosedPool if its pool is closed.
:param message: plaintext, as bytes
:param authn: whether to use authenticated encryption scheme
:param recip: DID or verification key of recipient, None for anchor's own
:return: ciphertext, as bytes
"""
LOGGER.debug('BaseAnchor.encrypt >>> message: %s, authn: %s, recip: %s',
message, authn, recip)
if not self.wallet.handle:
LOGGER.debug('BaseAnchor.encrypt <!< Wallet %s is closed', self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
rv = await self.wallet.encrypt(message, authn, await self._verkey_for(
recip))
LOGGER.debug('BaseAnchor.auth_encrypt <<< %s', rv)
return rv
|
encrypt
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def encrypt(self, message: bytes, authn: bool=False, recip: str=None\n ) ->bytes:\n \"\"\"\n Encrypt plaintext for owner of DID or verification key, anonymously or via\n authenticated encryption scheme. If given DID, first check wallet and then pool\n for corresponding verification key.\n\n Raise WalletState if the wallet is closed. Given a recipient DID not in the wallet,\n raise AbsentPool if the instance has no pool or ClosedPool if its pool is closed.\n\n :param message: plaintext, as bytes\n :param authn: whether to use authenticated encryption scheme\n :param recip: DID or verification key of recipient, None for anchor's own\n :return: ciphertext, as bytes\n \"\"\"\n LOGGER.debug('BaseAnchor.encrypt >>> message: %s, authn: %s, recip: %s',\n message, authn, recip)\n if not self.wallet.handle:\n LOGGER.debug('BaseAnchor.encrypt <!< Wallet %s is closed', self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n rv = await self.wallet.encrypt(message, authn, await self._verkey_for(\n recip))\n LOGGER.debug('BaseAnchor.auth_encrypt <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "encrypt",
"role": "assistant"
}
] |
async def decrypt(self, ciphertext: bytes, sender: str=None) ->(bytes, str):
"""
Decrypt ciphertext and optionally authenticate sender.
Raise BadKey if authentication operation reveals sender key distinct from current
verification key of owner of input DID. Raise WalletState if wallet is closed.
:param ciphertext: ciphertext, as bytes
:param sender: DID or verification key of sender, None for anonymously encrypted ciphertext
:return: decrypted bytes and sender verification key (None for anonymous decryption)
"""
LOGGER.debug('BaseAnchor.decrypt >>> ciphertext: %s, sender: %s',
ciphertext, sender)
if not self.wallet.handle:
LOGGER.debug('BaseAnchor.decrypt <!< Wallet %s is closed', self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
from_verkey = None
if sender:
from_verkey = await self._verkey_for(sender)
rv = await self.wallet.decrypt(ciphertext, True if from_verkey else
None, to_verkey=None, from_verkey=from_verkey)
LOGGER.debug('BaseAnchor.decrypt <<< %s', rv)
return rv
|
decrypt
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def decrypt(self, ciphertext: bytes, sender: str=None) ->(bytes, str):\n \"\"\"\n Decrypt ciphertext and optionally authenticate sender.\n\n Raise BadKey if authentication operation reveals sender key distinct from current\n verification key of owner of input DID. Raise WalletState if wallet is closed.\n\n :param ciphertext: ciphertext, as bytes\n :param sender: DID or verification key of sender, None for anonymously encrypted ciphertext\n :return: decrypted bytes and sender verification key (None for anonymous decryption)\n \"\"\"\n LOGGER.debug('BaseAnchor.decrypt >>> ciphertext: %s, sender: %s',\n ciphertext, sender)\n if not self.wallet.handle:\n LOGGER.debug('BaseAnchor.decrypt <!< Wallet %s is closed', self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n from_verkey = None\n if sender:\n from_verkey = await self._verkey_for(sender)\n rv = await self.wallet.decrypt(ciphertext, True if from_verkey else\n None, to_verkey=None, from_verkey=from_verkey)\n LOGGER.debug('BaseAnchor.decrypt <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "decrypt",
"role": "assistant"
}
] |
async def sign(self, message: bytes) ->bytes:
"""
Sign message; return signature. Raise WalletState if wallet is closed.
:param message: Content to sign, as bytes
:return: signature, as bytes
"""
LOGGER.debug('BaseAnchor.sign >>> message: %s', message)
if not self.wallet.handle:
LOGGER.debug('BaseAnchor.sign <!< Wallet %s is closed', self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
rv = await self.wallet.sign(message)
LOGGER.debug('BaseAnchor.sign <<< %s', rv)
return rv
|
sign
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def sign(self, message: bytes) ->bytes:\n \"\"\"\n Sign message; return signature. Raise WalletState if wallet is closed.\n\n :param message: Content to sign, as bytes\n :return: signature, as bytes\n \"\"\"\n LOGGER.debug('BaseAnchor.sign >>> message: %s', message)\n if not self.wallet.handle:\n LOGGER.debug('BaseAnchor.sign <!< Wallet %s is closed', self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n rv = await self.wallet.sign(message)\n LOGGER.debug('BaseAnchor.sign <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "sign",
"role": "assistant"
}
] |
async def verify(self, message: bytes, signature: bytes, signer: str=None
) ->bool:
"""
Verify signature with input signer verification key (via lookup by DID first if need be).
Raise WalletState if wallet is closed.
:param message: Content to sign, as bytes
:param signature: signature, as bytes
:param signer: signer DID or verification key; omit for anchor's own
:return: whether signature is valid
"""
LOGGER.debug('BaseAnchor.verify >>> signer: %s, message: %s, signature: %s'
, signer, message, signature)
if not self.wallet.handle:
LOGGER.debug('BaseAnchor.verify <!< Wallet %s is closed', self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
verkey = None
if signer:
verkey = await self._verkey_for(signer)
rv = await self.wallet.verify(message, signature, verkey)
LOGGER.debug('BaseAnchor.verify <<< %s', rv)
return rv
|
verify
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def verify(self, message: bytes, signature: bytes, signer: str=None\n ) ->bool:\n \"\"\"\n Verify signature with input signer verification key (via lookup by DID first if need be).\n Raise WalletState if wallet is closed.\n\n :param message: Content to sign, as bytes\n :param signature: signature, as bytes\n :param signer: signer DID or verification key; omit for anchor's own\n :return: whether signature is valid\n \"\"\"\n LOGGER.debug('BaseAnchor.verify >>> signer: %s, message: %s, signature: %s'\n , signer, message, signature)\n if not self.wallet.handle:\n LOGGER.debug('BaseAnchor.verify <!< Wallet %s is closed', self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n verkey = None\n if signer:\n verkey = await self._verkey_for(signer)\n rv = await self.wallet.verify(message, signature, verkey)\n LOGGER.debug('BaseAnchor.verify <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "verify",
"role": "assistant"
}
] |
async def get_txn(self, seq_no: int) ->str:
"""
Find a transaction on the distributed ledger by its sequence number.
:param seq_no: transaction number
:return: json sequence number of transaction, null for no match
"""
LOGGER.debug('BaseAnchor.get_txn >>> seq_no: %s', seq_no)
rv_json = json.dumps({})
req_json = await ledger.build_get_txn_request(self.did, None, seq_no)
resp = json.loads(await self._submit(req_json))
rv_json = self.pool.protocol.txn2data(resp)
LOGGER.debug('BaseAnchor.get_txn <<< %s', rv_json)
return rv_json
|
get_txn
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def get_txn(self, seq_no: int) ->str:\n \"\"\"\n Find a transaction on the distributed ledger by its sequence number.\n\n :param seq_no: transaction number\n :return: json sequence number of transaction, null for no match\n \"\"\"\n LOGGER.debug('BaseAnchor.get_txn >>> seq_no: %s', seq_no)\n rv_json = json.dumps({})\n req_json = await ledger.build_get_txn_request(self.did, None, seq_no)\n resp = json.loads(await self._submit(req_json))\n rv_json = self.pool.protocol.txn2data(resp)\n LOGGER.debug('BaseAnchor.get_txn <<< %s', rv_json)\n return rv_json\n",
"role": "user"
},
{
"content": "get_txn",
"role": "assistant"
}
] |
def x(self):
rv = {'id': self.id, 'type': self.type, 'priority': self.priority}
if self.recip_keys:
rv['routingKeys'] = [canon_ref(k.did, k.id, '#') for k in self.
recip_keys]
if self.routing_keys:
rv['routingKeys'] = [canon_ref(k.did, k.id, '#') for k in self.
routing_keys]
rv['serviceEndpoint'] = self.endpoint
return rv
|
to_dict
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n rv = {'id': self.id, 'type': self.type, 'priority': self.priority}\n if self.recip_keys:\n rv['routingKeys'] = [canon_ref(k.did, k.id, '#') for k in self.\n recip_keys]\n if self.routing_keys:\n rv['routingKeys'] = [canon_ref(k.did, k.id, '#') for k in self.\n routing_keys]\n rv['serviceEndpoint'] = self.endpoint\n return rv\n",
"role": "user"
},
{
"content": "to_dict",
"role": "assistant"
}
] |
async def _assert_link_secret(self, action: str) ->str:
"""
Return current wallet link secret label. Raise AbsentLinkSecret if link secret is not set.
:param action: action requiring link secret
"""
rv = await self.wallet.get_link_secret_label()
if rv is None:
LOGGER.debug(
'HolderProver._assert_link_secret: action %s requires link secret but it is not set'
, action)
raise AbsentLinkSecret(
'Action {} requires link secret but it is not set'.format(action))
return rv
|
assert_link_secret
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def _assert_link_secret(self, action: str) ->str:\n \"\"\"\n Return current wallet link secret label. Raise AbsentLinkSecret if link secret is not set.\n\n :param action: action requiring link secret\n \"\"\"\n rv = await self.wallet.get_link_secret_label()\n if rv is None:\n LOGGER.debug(\n 'HolderProver._assert_link_secret: action %s requires link secret but it is not set'\n , action)\n raise AbsentLinkSecret(\n 'Action {} requires link secret but it is not set'.format(action))\n return rv\n",
"role": "user"
},
{
"content": "assert_link_secret",
"role": "assistant"
}
] |
async def _sync_revoc_for_proof(self, rr_id: str) ->None:
"""
Pick up tails file reader handle for input revocation registry identifier. If no symbolic
link is present, get the revocation registry definition to retrieve its tails file hash,
then find the tails file and link it.
Raise AbsentTails for missing corresponding tails file.
:param rr_id: revocation registry identifier
"""
LOGGER.debug('HolderProver._sync_revoc_for_proof >>> rr_id: %s', rr_id)
if not ok_rev_reg_id(rr_id):
LOGGER.debug('HolderProver._sync_revoc_for_proof <!< Bad rev reg id %s'
, rr_id)
raise BadIdentifier('Bad rev reg id {}'.format(rr_id))
cd_id, tag = rev_reg_id2cred_def_id_tag(rr_id)
try:
json.loads(await self.get_cred_def(cd_id))
except AbsentCredDef:
LOGGER.debug(
'HolderProver._sync_revoc_for_proof <!< corrupt tails tree %s may be for another ledger'
, self._dir_tails)
raise AbsentCredDef('Corrupt tails tree {} may be for another ledger'
.format(self._dir_tails))
except ClosedPool:
pass
with REVO_CACHE.lock:
revo_cache_entry = REVO_CACHE.get(rr_id, None)
tails = revo_cache_entry.tails if revo_cache_entry else None
if tails is None:
try:
tails = await Tails(self._dir_tails, cd_id, tag).open()
except AbsentTails:
rr_def = json.loads(await self.get_rev_reg_def(rr_id))
tails_hash = rr_def['value']['tailsHash']
path_tails = join(Tails.dir(self._dir_tails, rr_id), tails_hash
)
if not isfile(path_tails):
LOGGER.debug(
'HolderProver._sync_revoc_for_proof <!< No tails file present at %s'
, path_tails)
raise AbsentTails('No tails file present at {}'.format(
path_tails))
Tails.associate(self._dir_tails, rr_id, tails_hash)
tails = await Tails(self._dir_tails, cd_id, tag).open()
if revo_cache_entry is None:
REVO_CACHE[rr_id] = RevoCacheEntry(None, tails)
else:
REVO_CACHE[rr_id].tails = tails
LOGGER.debug('HolderProver._sync_revoc_for_proof <<<')
|
sync_revoc_for_proof
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def _sync_revoc_for_proof(self, rr_id: str) ->None:\n \"\"\"\n Pick up tails file reader handle for input revocation registry identifier. If no symbolic\n link is present, get the revocation registry definition to retrieve its tails file hash,\n then find the tails file and link it.\n\n Raise AbsentTails for missing corresponding tails file.\n\n :param rr_id: revocation registry identifier\n \"\"\"\n LOGGER.debug('HolderProver._sync_revoc_for_proof >>> rr_id: %s', rr_id)\n if not ok_rev_reg_id(rr_id):\n LOGGER.debug('HolderProver._sync_revoc_for_proof <!< Bad rev reg id %s'\n , rr_id)\n raise BadIdentifier('Bad rev reg id {}'.format(rr_id))\n cd_id, tag = rev_reg_id2cred_def_id_tag(rr_id)\n try:\n json.loads(await self.get_cred_def(cd_id))\n except AbsentCredDef:\n LOGGER.debug(\n 'HolderProver._sync_revoc_for_proof <!< corrupt tails tree %s may be for another ledger'\n , self._dir_tails)\n raise AbsentCredDef('Corrupt tails tree {} may be for another ledger'\n .format(self._dir_tails))\n except ClosedPool:\n pass\n with REVO_CACHE.lock:\n revo_cache_entry = REVO_CACHE.get(rr_id, None)\n tails = revo_cache_entry.tails if revo_cache_entry else None\n if tails is None:\n try:\n tails = await Tails(self._dir_tails, cd_id, tag).open()\n except AbsentTails:\n rr_def = json.loads(await self.get_rev_reg_def(rr_id))\n tails_hash = rr_def['value']['tailsHash']\n path_tails = join(Tails.dir(self._dir_tails, rr_id), tails_hash\n )\n if not isfile(path_tails):\n LOGGER.debug(\n 'HolderProver._sync_revoc_for_proof <!< No tails file present at %s'\n , path_tails)\n raise AbsentTails('No tails file present at {}'.format(\n path_tails))\n Tails.associate(self._dir_tails, rr_id, tails_hash)\n tails = await Tails(self._dir_tails, cd_id, tag).open()\n if revo_cache_entry is None:\n REVO_CACHE[rr_id] = RevoCacheEntry(None, tails)\n else:\n REVO_CACHE[rr_id].tails = tails\n LOGGER.debug('HolderProver._sync_revoc_for_proof <<<')\n",
"role": "user"
},
{
"content": "sync_revoc_for_proof",
"role": "assistant"
}
] |
def x(self, rr_id: str) ->str:
LOGGER.debug('HolderProver.dir_tails >>>')
if not ok_rev_reg_id(rr_id):
LOGGER.debug('HolderProver.dir_tails <!< Bad rev reg id %s', rr_id)
raise BadIdentifier('Bad rev reg id {}'.format(rr_id))
rv = Tails.dir(self._dir_tails, rr_id)
LOGGER.debug('HolderProver.dir_tails <<< %s', rv)
return rv
|
dir_tails
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, rr_id: str) ->str:\n LOGGER.debug('HolderProver.dir_tails >>>')\n if not ok_rev_reg_id(rr_id):\n LOGGER.debug('HolderProver.dir_tails <!< Bad rev reg id %s', rr_id)\n raise BadIdentifier('Bad rev reg id {}'.format(rr_id))\n rv = Tails.dir(self._dir_tails, rr_id)\n LOGGER.debug('HolderProver.dir_tails <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "dir_tails",
"role": "assistant"
}
] |
async def open(self) ->'HolderProver':
"""
Explicit entry. Perform ancestor opening operations,
then parse cache from archive if so configured, and
synchronize revocation registry to tails tree content.
:return: current object
"""
LOGGER.debug('HolderProver.open >>>')
await super().open()
if self.config.get('parse-caches-on-open', False):
ArchivableCaches.parse(self.dir_cache)
for path_rr_id in Tails.links(self._dir_tails):
await self._sync_revoc_for_proof(basename(path_rr_id))
LOGGER.debug('HolderProver.open <<<')
return self
|
open
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def open(self) ->'HolderProver':\n \"\"\"\n Explicit entry. Perform ancestor opening operations,\n then parse cache from archive if so configured, and\n synchronize revocation registry to tails tree content.\n\n :return: current object\n \"\"\"\n LOGGER.debug('HolderProver.open >>>')\n await super().open()\n if self.config.get('parse-caches-on-open', False):\n ArchivableCaches.parse(self.dir_cache)\n for path_rr_id in Tails.links(self._dir_tails):\n await self._sync_revoc_for_proof(basename(path_rr_id))\n LOGGER.debug('HolderProver.open <<<')\n return self\n",
"role": "user"
},
{
"content": "open",
"role": "assistant"
}
] |
async def close(self) ->None:
"""
Explicit exit. If so configured, populate cache to prove all creds in
wallet offline if need be, archive cache, and purge prior cache archives.
:return: current object
"""
LOGGER.debug('HolderProver.close >>>')
if self.config.get('archive-holder-prover-caches-on-close', False):
await self.load_cache_for_proof(True)
ArchivableCaches.purge_archives(self.dir_cache, True)
await BaseAnchor.close(self)
for path_rr_id in Tails.links(self._dir_tails):
rr_id = basename(path_rr_id)
try:
await self._sync_revoc_for_proof(rr_id)
except ClosedPool:
LOGGER.warning(
'HolderProver sync-revoc on close required ledger for %s but pool was closed'
, rr_id)
LOGGER.debug('HolderProver.close <<<')
|
close
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def close(self) ->None:\n \"\"\"\n Explicit exit. If so configured, populate cache to prove all creds in\n wallet offline if need be, archive cache, and purge prior cache archives.\n\n :return: current object\n \"\"\"\n LOGGER.debug('HolderProver.close >>>')\n if self.config.get('archive-holder-prover-caches-on-close', False):\n await self.load_cache_for_proof(True)\n ArchivableCaches.purge_archives(self.dir_cache, True)\n await BaseAnchor.close(self)\n for path_rr_id in Tails.links(self._dir_tails):\n rr_id = basename(path_rr_id)\n try:\n await self._sync_revoc_for_proof(rr_id)\n except ClosedPool:\n LOGGER.warning(\n 'HolderProver sync-revoc on close required ledger for %s but pool was closed'\n , rr_id)\n LOGGER.debug('HolderProver.close <<<')\n",
"role": "user"
},
{
"content": "close",
"role": "assistant"
}
] |
async def rev_regs(self) ->list:
"""
Return list of revocation registry identifiers for which HolderProver has associated tails files.
The operation creates associations for any (newly copied, via service wrapper API) tails files without.
:return: list of revocation registry identifiers for which HolderProver has associated tails files
"""
LOGGER.debug('HolderProver.rev_regs >>>')
for path_rr_id in Tails.links(self._dir_tails):
await self._sync_revoc_for_proof(basename(path_rr_id))
rv = [basename(f) for f in Tails.links(self._dir_tails)]
LOGGER.debug('HolderProver.rev_regs <<< %s', rv)
return rv
|
rev_regs
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def rev_regs(self) ->list:\n \"\"\"\n Return list of revocation registry identifiers for which HolderProver has associated tails files.\n The operation creates associations for any (newly copied, via service wrapper API) tails files without.\n\n :return: list of revocation registry identifiers for which HolderProver has associated tails files\n \"\"\"\n LOGGER.debug('HolderProver.rev_regs >>>')\n for path_rr_id in Tails.links(self._dir_tails):\n await self._sync_revoc_for_proof(basename(path_rr_id))\n rv = [basename(f) for f in Tails.links(self._dir_tails)]\n LOGGER.debug('HolderProver.rev_regs <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "rev_regs",
"role": "assistant"
}
] |
async def offline_intervals(self, cd_ids: Union[str, Sequence[str]]) ->dict:
"""
Return default non-revocation intervals for input cred def ids, based on content of revocation cache,
for augmentation into specification for Verifier.build_proof_req_json. Note that the close() call
to set the anchor off-line extends all revocation cache registry delta entries to its time of execution:
in this case, the intervals will all be single timestamps rather than (to, fro) pairs.
Raise CacheIndex if proof request cites credential definition without corresponding
content in cred def cache or revocation cache.
:param cd_ids: credential definition identifier or sequence thereof
:return: dict mapping revocable cred def ids to interval specifications to augment into cd_id2spec
parameter for Verifier.build_proof_req_json(), and non-revocable cred def ids to empty dict; e.g.,
::
{
'Vx4E82R17q...:3:CL:16:tag': {
'interval': (1528111730, 1528115832)
},
'R17v42T4pk...:3:CL:19:tag': {},
'Z9ccax812j...:3:CL:27:tag': {
'interval': (1528112408, 1528116008)
},
'9cHbp54C8n...:3:CL:37:tag': {
'interval': 1528116426
},
'6caBcmLi33...:tag:CL:41:tag': {},
...
}
"""
LOGGER.debug('HolderProver.offline_intervals >>> cd_ids: %s', cd_ids)
rv = {}
for cd_id in ([cd_ids] if isinstance(cd_ids, str) else cd_ids):
if not ok_cred_def_id(cd_id):
LOGGER.debug(
'HolderProver.offline_intervals <!< Bad cred def id %s', cd_id)
raise BadIdentifier('Bad cred def id {}'.format(cd_id))
try:
cred_def = json.loads(await self.get_cred_def(cd_id))
except ClosedPool:
LOGGER.debug(
'HolderProver.offline_intervals <!< no such cred def %s in cred def cache'
, cd_id)
raise CacheIndex('No cached delta for non-revoc interval on {}'
.format(cd_id))
rv[cd_id] = {}
if 'revocation' in cred_def['value']:
with REVO_CACHE.lock:
fro, to = REVO_CACHE.dflt_interval(cd_id)
if not (fro and to):
LOGGER.debug(
'HolderProver.offline_intervals <!< no cached delta for non-revoc interval on %s'
, cd_id)
raise CacheIndex(
'No cached delta for non-revoc interval on {}'.
format(cd_id))
rv[cd_id]['interval'] = to if fro == to else (fro, to)
LOGGER.debug('HolderProver.offline_intervals <<< %s', rv)
return rv
|
offline_intervals
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def offline_intervals(self, cd_ids: Union[str, Sequence[str]]) ->dict:\n \"\"\"\n Return default non-revocation intervals for input cred def ids, based on content of revocation cache,\n for augmentation into specification for Verifier.build_proof_req_json. Note that the close() call\n to set the anchor off-line extends all revocation cache registry delta entries to its time of execution:\n in this case, the intervals will all be single timestamps rather than (to, fro) pairs.\n\n Raise CacheIndex if proof request cites credential definition without corresponding\n content in cred def cache or revocation cache.\n\n :param cd_ids: credential definition identifier or sequence thereof\n :return: dict mapping revocable cred def ids to interval specifications to augment into cd_id2spec\n parameter for Verifier.build_proof_req_json(), and non-revocable cred def ids to empty dict; e.g.,\n\n ::\n\n {\n 'Vx4E82R17q...:3:CL:16:tag': {\n 'interval': (1528111730, 1528115832)\n },\n 'R17v42T4pk...:3:CL:19:tag': {},\n 'Z9ccax812j...:3:CL:27:tag': {\n 'interval': (1528112408, 1528116008)\n },\n '9cHbp54C8n...:3:CL:37:tag': {\n 'interval': 1528116426\n },\n '6caBcmLi33...:tag:CL:41:tag': {},\n ...\n }\n \"\"\"\n LOGGER.debug('HolderProver.offline_intervals >>> cd_ids: %s', cd_ids)\n rv = {}\n for cd_id in ([cd_ids] if isinstance(cd_ids, str) else cd_ids):\n if not ok_cred_def_id(cd_id):\n LOGGER.debug(\n 'HolderProver.offline_intervals <!< Bad cred def id %s', cd_id)\n raise BadIdentifier('Bad cred def id {}'.format(cd_id))\n try:\n cred_def = json.loads(await self.get_cred_def(cd_id))\n except ClosedPool:\n LOGGER.debug(\n 'HolderProver.offline_intervals <!< no such cred def %s in cred def cache'\n , cd_id)\n raise CacheIndex('No cached delta for non-revoc interval on {}'\n .format(cd_id))\n rv[cd_id] = {}\n if 'revocation' in cred_def['value']:\n with REVO_CACHE.lock:\n fro, to = REVO_CACHE.dflt_interval(cd_id)\n if not (fro and to):\n LOGGER.debug(\n 'HolderProver.offline_intervals <!< no cached delta for non-revoc interval on %s'\n , cd_id)\n raise CacheIndex(\n 'No cached delta for non-revoc interval on {}'.\n format(cd_id))\n rv[cd_id]['interval'] = to if fro == to else (fro, to)\n LOGGER.debug('HolderProver.offline_intervals <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "offline_intervals",
"role": "assistant"
}
] |
async def create_link_secret(self, label: str) ->None:
"""
Create link secret (a.k.a. master secret) used in proofs by HolderProver, if the
current link secret does not already correspond to the input link secret label.
Raise WalletState if wallet is closed, or any other IndyError causing failure
to set link secret in wallet.
:param label: label for link secret; indy-sdk uses label to generate link secret
"""
LOGGER.debug('HolderProver.create_link_secret >>> label: %s', label)
await self.wallet.create_link_secret(label)
LOGGER.debug('HolderProver.create_link_secret <<<')
|
create_link_secret
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def create_link_secret(self, label: str) ->None:\n \"\"\"\n Create link secret (a.k.a. master secret) used in proofs by HolderProver, if the\n current link secret does not already correspond to the input link secret label.\n\n Raise WalletState if wallet is closed, or any other IndyError causing failure\n to set link secret in wallet.\n\n :param label: label for link secret; indy-sdk uses label to generate link secret\n \"\"\"\n LOGGER.debug('HolderProver.create_link_secret >>> label: %s', label)\n await self.wallet.create_link_secret(label)\n LOGGER.debug('HolderProver.create_link_secret <<<')\n",
"role": "user"
},
{
"content": "create_link_secret",
"role": "assistant"
}
] |
async def store_cred(self, cred_json: str, cred_req_metadata_json: str) ->str:
"""
Store cred in wallet as HolderProver, return its credential identifier as created in wallet.
Raise AbsentTails if tails file not available for revocation registry for input credential.
Raise WalletState if wallet is closed.
:param cred_json: credential json as HolderProver created
:param cred_req_metadata_json: credential request metadata json as HolderProver created via create_cred_req()
:return: credential identifier within wallet
"""
LOGGER.debug(
'HolderProver.store_cred >>> cred_json: %s, cred_req_metadata_json: %s'
, cred_json, cred_req_metadata_json)
if not self.wallet.handle:
LOGGER.debug('HolderProver.store_cred <!< Wallet %s is closed',
self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
cred = json.loads(cred_json)
cred_def_json = await self.get_cred_def(cred['cred_def_id'])
rr_id = cred['rev_reg_id']
rr_def_json = None
if rr_id:
await self._sync_revoc_for_proof(rr_id)
rr_def_json = await self.get_rev_reg_def(rr_id)
rv = await anoncreds.prover_store_credential(self.wallet.handle, None,
cred_req_metadata_json, cred_json, cred_def_json, rr_def_json)
LOGGER.debug('HolderProver.store_cred <<< %s', rv)
return rv
|
store_cred
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def store_cred(self, cred_json: str, cred_req_metadata_json: str) ->str:\n \"\"\"\n Store cred in wallet as HolderProver, return its credential identifier as created in wallet.\n\n Raise AbsentTails if tails file not available for revocation registry for input credential.\n Raise WalletState if wallet is closed.\n\n :param cred_json: credential json as HolderProver created\n :param cred_req_metadata_json: credential request metadata json as HolderProver created via create_cred_req()\n :return: credential identifier within wallet\n \"\"\"\n LOGGER.debug(\n 'HolderProver.store_cred >>> cred_json: %s, cred_req_metadata_json: %s'\n , cred_json, cred_req_metadata_json)\n if not self.wallet.handle:\n LOGGER.debug('HolderProver.store_cred <!< Wallet %s is closed',\n self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n cred = json.loads(cred_json)\n cred_def_json = await self.get_cred_def(cred['cred_def_id'])\n rr_id = cred['rev_reg_id']\n rr_def_json = None\n if rr_id:\n await self._sync_revoc_for_proof(rr_id)\n rr_def_json = await self.get_rev_reg_def(rr_id)\n rv = await anoncreds.prover_store_credential(self.wallet.handle, None,\n cred_req_metadata_json, cred_json, cred_def_json, rr_def_json)\n LOGGER.debug('HolderProver.store_cred <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "store_cred",
"role": "assistant"
}
] |
async def load_cache_for_proof(self, archive: bool=False) ->int:
"""
Load schema, cred def, revocation caches; optionally archive enough to go
offline and be able to generate proof on all credentials in wallet.
Return timestamp (epoch seconds) of cache load event, also used as subdirectory
for cache archives.
:param archive: True to archive now or False to demur (subclasses may still
need to augment archivable caches further)
:return: cache load event timestamp (epoch seconds)
"""
LOGGER.debug('HolderProver.load_cache_for_proof >>> archive: %s', archive)
rv = int(time())
box_ids = json.loads(await self.get_box_ids_held())
for s_id in box_ids['schema_id']:
with SCHEMA_CACHE.lock:
await self.get_schema(s_id)
for cd_id in box_ids['cred_def_id']:
with CRED_DEF_CACHE.lock:
await self.get_cred_def(cd_id)
for rr_id in box_ids['rev_reg_id']:
await self.get_rev_reg_def(rr_id)
with REVO_CACHE.lock:
revo_cache_entry = REVO_CACHE.get(rr_id, None)
if revo_cache_entry:
try:
await revo_cache_entry.get_delta_json(self.
_build_rr_delta_json, rv, rv)
except ClosedPool:
LOGGER.warning(
'HolderProver %s is offline from pool %s, cannot update revo cache reg delta for %s to %s'
, self.name, self.pool.name, rr_id, rv)
except AbsentPool:
LOGGER.warning(
'HolderProver %s has no pool, cannot update revo cache reg delta for %s to %s'
, self.name, rr_id, rv)
if archive:
ArchivableCaches.archive(self.dir_cache)
LOGGER.debug('HolderProver.load_cache_for_proof <<< %s', rv)
return rv
|
load_cache_for_proof
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def load_cache_for_proof(self, archive: bool=False) ->int:\n \"\"\"\n Load schema, cred def, revocation caches; optionally archive enough to go\n offline and be able to generate proof on all credentials in wallet.\n\n Return timestamp (epoch seconds) of cache load event, also used as subdirectory\n for cache archives.\n\n :param archive: True to archive now or False to demur (subclasses may still\n need to augment archivable caches further)\n :return: cache load event timestamp (epoch seconds)\n \"\"\"\n LOGGER.debug('HolderProver.load_cache_for_proof >>> archive: %s', archive)\n rv = int(time())\n box_ids = json.loads(await self.get_box_ids_held())\n for s_id in box_ids['schema_id']:\n with SCHEMA_CACHE.lock:\n await self.get_schema(s_id)\n for cd_id in box_ids['cred_def_id']:\n with CRED_DEF_CACHE.lock:\n await self.get_cred_def(cd_id)\n for rr_id in box_ids['rev_reg_id']:\n await self.get_rev_reg_def(rr_id)\n with REVO_CACHE.lock:\n revo_cache_entry = REVO_CACHE.get(rr_id, None)\n if revo_cache_entry:\n try:\n await revo_cache_entry.get_delta_json(self.\n _build_rr_delta_json, rv, rv)\n except ClosedPool:\n LOGGER.warning(\n 'HolderProver %s is offline from pool %s, cannot update revo cache reg delta for %s to %s'\n , self.name, self.pool.name, rr_id, rv)\n except AbsentPool:\n LOGGER.warning(\n 'HolderProver %s has no pool, cannot update revo cache reg delta for %s to %s'\n , self.name, rr_id, rv)\n if archive:\n ArchivableCaches.archive(self.dir_cache)\n LOGGER.debug('HolderProver.load_cache_for_proof <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "load_cache_for_proof",
"role": "assistant"
}
] |
async def get_box_ids_held(self) ->str:
"""
Return json object on lists of all unique box identifiers for credentials in wallet, as
evidenced by tails directory content:
* schema identifiers
* credential definition identifiers
* revocation registry identifiers.
E.g.,
::
{
"schema_id": [
"R17v42T4pk...:2:tombstone:1.2",
"9cHbp54C8n...:2:business:2.0",
...
],
"cred_def_id": [
"R17v42T4pk...:3:CL:19:tag",
"9cHbp54C8n...:3:CL:37:tag",
...
]
"rev_reg_id": [
"R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:0",
"R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:1",
"9cHbp54C8n...:4:9cHbp54C8n...:3:CL:37:tag:CL_ACCUM:0",
"9cHbp54C8n...:4:9cHbp54C8n...:3:CL:37:tag:CL_ACCUM:1",
"9cHbp54C8n...:4:9cHbp54C8n...:3:CL:37:tag:CL_ACCUM:2",
...
]
}
Raise WalletState if wallet is closed.
:return: tuple of sets for schema ids, cred def ids, rev reg ids
"""
LOGGER.debug('HolderProver.get_box_ids_held >>>')
if not self.wallet.handle:
LOGGER.debug('HolderProver.get_box_ids_held <!< Wallet %s is closed',
self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
rr_ids = {basename(link) for link in Tails.links(self._dir_tails)}
un_rr_ids = set()
for rr_id in rr_ids:
if not json.loads(await self.get_cred_infos_by_q(json.dumps({
'rev_reg_id': rr_id}), 1)):
un_rr_ids.add(rr_id)
rr_ids -= un_rr_ids
cd_ids = {cd_id for cd_id in listdir(self._dir_tails) if isdir(join(
self._dir_tails, cd_id)) and ok_cred_def_id(cd_id)}
s_ids = set()
for cd_id in cd_ids:
s_ids.add(json.loads(await self.get_schema(cred_def_id2seq_no(cd_id
)))['id'])
un_cd_ids = set()
for cd_id in cd_ids:
if not json.loads(await self.get_cred_infos_by_q(json.dumps({
'cred_def_id': cd_id}), 1)):
un_cd_ids.add(cd_id)
cd_ids -= un_cd_ids
un_s_ids = set()
for s_id in s_ids:
if not json.loads(await self.get_cred_infos_by_q(json.dumps({
'schema_id': s_id}), 1)):
un_s_ids.add(s_id)
s_ids -= un_s_ids
rv = json.dumps({'schema_id': list(s_ids), 'cred_def_id': list(cd_ids),
'rev_reg_id': list(rr_ids)})
LOGGER.debug('HolderProver.get_box_ids_held <<< %s', rv)
return rv
|
get_box_ids_held
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def get_box_ids_held(self) ->str:\n \"\"\"\n Return json object on lists of all unique box identifiers for credentials in wallet, as\n evidenced by tails directory content:\n\n * schema identifiers\n * credential definition identifiers\n * revocation registry identifiers.\n\n E.g.,\n\n ::\n\n {\n \"schema_id\": [\n \"R17v42T4pk...:2:tombstone:1.2\",\n \"9cHbp54C8n...:2:business:2.0\",\n ...\n ],\n \"cred_def_id\": [\n \"R17v42T4pk...:3:CL:19:tag\",\n \"9cHbp54C8n...:3:CL:37:tag\",\n ...\n ]\n \"rev_reg_id\": [\n \"R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:0\",\n \"R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:1\",\n \"9cHbp54C8n...:4:9cHbp54C8n...:3:CL:37:tag:CL_ACCUM:0\",\n \"9cHbp54C8n...:4:9cHbp54C8n...:3:CL:37:tag:CL_ACCUM:1\",\n \"9cHbp54C8n...:4:9cHbp54C8n...:3:CL:37:tag:CL_ACCUM:2\",\n ...\n ]\n }\n\n Raise WalletState if wallet is closed.\n\n :return: tuple of sets for schema ids, cred def ids, rev reg ids\n \"\"\"\n LOGGER.debug('HolderProver.get_box_ids_held >>>')\n if not self.wallet.handle:\n LOGGER.debug('HolderProver.get_box_ids_held <!< Wallet %s is closed',\n self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n rr_ids = {basename(link) for link in Tails.links(self._dir_tails)}\n un_rr_ids = set()\n for rr_id in rr_ids:\n if not json.loads(await self.get_cred_infos_by_q(json.dumps({\n 'rev_reg_id': rr_id}), 1)):\n un_rr_ids.add(rr_id)\n rr_ids -= un_rr_ids\n cd_ids = {cd_id for cd_id in listdir(self._dir_tails) if isdir(join(\n self._dir_tails, cd_id)) and ok_cred_def_id(cd_id)}\n s_ids = set()\n for cd_id in cd_ids:\n s_ids.add(json.loads(await self.get_schema(cred_def_id2seq_no(cd_id\n )))['id'])\n un_cd_ids = set()\n for cd_id in cd_ids:\n if not json.loads(await self.get_cred_infos_by_q(json.dumps({\n 'cred_def_id': cd_id}), 1)):\n un_cd_ids.add(cd_id)\n cd_ids -= un_cd_ids\n un_s_ids = set()\n for s_id in s_ids:\n if not json.loads(await self.get_cred_infos_by_q(json.dumps({\n 'schema_id': s_id}), 1)):\n un_s_ids.add(s_id)\n s_ids -= un_s_ids\n rv = json.dumps({'schema_id': list(s_ids), 'cred_def_id': list(cd_ids),\n 'rev_reg_id': list(rr_ids)})\n LOGGER.debug('HolderProver.get_box_ids_held <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "get_box_ids_held",
"role": "assistant"
}
] |
async def get_cred_infos_by_q(self, query_json: str, limit: int=None) ->str:
"""
A cred-info aggregates:
* its wallet cred-id (aka wallet referent)
* its attribute names and values
* its schema identifier
* its credential definition identifier
* its revocation registry identifier
* its credential revocation identifier.
Return list of cred-infos from wallet by input WQL query;
return cred-infos for all credentials in wallet for no query.
Raise WalletState if the wallet is closed.
The operation supports a subset of WQL; i.e.,
::
query = {subquery}
subquery = {subquery, ..., subquery} - WHERE subquery AND ... AND subquery
subquery = $or: [{subquery},..., {subquery}] - WHERE subquery OR ... OR subquery
subquery = $not: {subquery} - Where NOT (subquery)
subquery = "tagName": tagValue - WHERE tagName == tagValue
subquery = "tagName": {$in: [tagValue, ..., tagValue]} - WHERE tagName IN (tagValue, ..., tagValue)
subquery = "tagName": {$neq: tagValue} - WHERE tagName != tagValue
but not
::
subquery = "tagName": {$gt: tagValue} - WHERE tagName > tagValue
subquery = "tagName": {$gte: tagValue} - WHERE tagName >= tagValue
subquery = "tagName": {$lt: tagValue} - WHERE tagName < tagValue
subquery = "tagName": {$lte: tagValue} - WHERE tagName <= tagValue
subquery = "tagName": {$like: tagValue} - WHERE tagName LIKE tagValue
:param query_json: WQL query json
:param limit: maximum number of results to return
:return: cred-infos as json list; i.e.,
::
[
{
"referent": string, # credential identifier in the wallet
"attrs": {
"attr1" : {"raw": "value1", "encoded": "value1_as_int" },
"attr2" : {"raw": "value2", "encoded": "value2_as_int" },
...
}
"schema_id": string,
"cred_def_id": string,
"rev_reg_id": Optional<string>,
"cred_rev_id": Optional<string>
},
...
]
"""
LOGGER.debug(
'HolderProver.get_cred_infos_by_q >>> query_json: %s, limit: %s',
query_json, limit)
if not self.wallet.handle:
LOGGER.debug('HolderProver.get_cred_infos_by_q <!< Wallet %s is closed'
, self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
infos = []
if limit and limit < 0:
limit = None
handle, cardinality = await anoncreds.prover_search_credentials(self.
wallet.handle, json.dumps(canon_cred_wql(json.loads(query_json))))
chunk = min(cardinality, limit or cardinality, Wallet.DEFAULT_CHUNK)
if limit:
cardinality = min(limit, cardinality)
try:
while len(infos) != cardinality:
batch = json.loads(await anoncreds.prover_fetch_credentials(
handle, chunk))
infos.extend(batch)
if len(batch) < chunk:
break
if len(infos) != cardinality:
LOGGER.warning(
'Credential search/limit indicated %s results but fetched %s',
cardinality, len(infos))
finally:
await anoncreds.prover_close_credentials_search(handle)
rv_json = json.dumps(infos)
LOGGER.debug('HolderProver.get_cred_infos_by_q <<< %s', rv_json)
return rv_json
|
get_cred_infos_by_q
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def get_cred_infos_by_q(self, query_json: str, limit: int=None) ->str:\n \"\"\"\n A cred-info aggregates:\n * its wallet cred-id (aka wallet referent)\n * its attribute names and values\n * its schema identifier\n * its credential definition identifier\n * its revocation registry identifier\n * its credential revocation identifier.\n\n Return list of cred-infos from wallet by input WQL query;\n return cred-infos for all credentials in wallet for no query.\n\n Raise WalletState if the wallet is closed.\n\n The operation supports a subset of WQL; i.e.,\n\n ::\n\n query = {subquery}\n subquery = {subquery, ..., subquery} - WHERE subquery AND ... AND subquery\n subquery = $or: [{subquery},..., {subquery}] - WHERE subquery OR ... OR subquery\n subquery = $not: {subquery} - Where NOT (subquery)\n subquery = \"tagName\": tagValue - WHERE tagName == tagValue\n subquery = \"tagName\": {$in: [tagValue, ..., tagValue]} - WHERE tagName IN (tagValue, ..., tagValue)\n subquery = \"tagName\": {$neq: tagValue} - WHERE tagName != tagValue\n\n but not\n\n ::\n\n subquery = \"tagName\": {$gt: tagValue} - WHERE tagName > tagValue\n subquery = \"tagName\": {$gte: tagValue} - WHERE tagName >= tagValue\n subquery = \"tagName\": {$lt: tagValue} - WHERE tagName < tagValue\n subquery = \"tagName\": {$lte: tagValue} - WHERE tagName <= tagValue\n subquery = \"tagName\": {$like: tagValue} - WHERE tagName LIKE tagValue\n\n :param query_json: WQL query json\n :param limit: maximum number of results to return\n\n :return: cred-infos as json list; i.e.,\n\n ::\n\n [\n {\n \"referent\": string, # credential identifier in the wallet\n \"attrs\": {\n \"attr1\" : {\"raw\": \"value1\", \"encoded\": \"value1_as_int\" },\n \"attr2\" : {\"raw\": \"value2\", \"encoded\": \"value2_as_int\" },\n ...\n }\n \"schema_id\": string,\n \"cred_def_id\": string,\n \"rev_reg_id\": Optional<string>,\n \"cred_rev_id\": Optional<string>\n },\n ...\n ]\n\n \"\"\"\n LOGGER.debug(\n 'HolderProver.get_cred_infos_by_q >>> query_json: %s, limit: %s',\n query_json, limit)\n if not self.wallet.handle:\n LOGGER.debug('HolderProver.get_cred_infos_by_q <!< Wallet %s is closed'\n , self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n infos = []\n if limit and limit < 0:\n limit = None\n handle, cardinality = await anoncreds.prover_search_credentials(self.\n wallet.handle, json.dumps(canon_cred_wql(json.loads(query_json))))\n chunk = min(cardinality, limit or cardinality, Wallet.DEFAULT_CHUNK)\n if limit:\n cardinality = min(limit, cardinality)\n try:\n while len(infos) != cardinality:\n batch = json.loads(await anoncreds.prover_fetch_credentials(\n handle, chunk))\n infos.extend(batch)\n if len(batch) < chunk:\n break\n if len(infos) != cardinality:\n LOGGER.warning(\n 'Credential search/limit indicated %s results but fetched %s',\n cardinality, len(infos))\n finally:\n await anoncreds.prover_close_credentials_search(handle)\n rv_json = json.dumps(infos)\n LOGGER.debug('HolderProver.get_cred_infos_by_q <<< %s', rv_json)\n return rv_json\n",
"role": "user"
},
{
"content": "get_cred_infos_by_q",
"role": "assistant"
}
] |
async def get_cred_infos_by_filter(self, filt: dict=None) ->str:
"""
Return cred-info (json list) from wallet by input filter for
schema identifier and/or credential definition identifier components;
return info of all credentials for no filter.
Raise WalletState if the wallet is closed.
:param filt: indy-sdk filter for credentials; i.e.,
::
{
"schema_id": string, # optional
"schema_issuer_did": string, # optional
"schema_name": string, # optional
"schema_version": string, # optional
"issuer_did": string, # optional
"cred_def_id": string # optional
}
:return: credential infos as json list; i.e.,
::
[
{
"referent": string, # credential identifier in the wallet
"attrs": {
"attr1" : {"raw": "value1", "encoded": "value1_as_int" },
"attr2" : {"raw": "value2", "encoded": "value2_as_int" },
...
}
"schema_id": string,
"cred_def_id": string,
"rev_reg_id": Optional<string>,
"cred_rev_id": Optional<string>
},
...
]
"""
LOGGER.debug('HolderProver.get_cred_infos_by_filter >>> filt: %s', filt)
if not self.wallet.handle:
LOGGER.debug(
'HolderProver.get_cred_infos_by_filter <!< Wallet %s is closed',
self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
rv_json = await anoncreds.prover_get_credentials(self.wallet.handle,
json.dumps(filt or {}))
LOGGER.debug('HolderProver.get_cred_infos_by_filter <<< %s', rv_json)
return rv_json
|
get_cred_infos_by_filter
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def get_cred_infos_by_filter(self, filt: dict=None) ->str:\n \"\"\"\n Return cred-info (json list) from wallet by input filter for\n schema identifier and/or credential definition identifier components;\n return info of all credentials for no filter.\n\n Raise WalletState if the wallet is closed.\n\n :param filt: indy-sdk filter for credentials; i.e.,\n\n ::\n\n {\n \"schema_id\": string, # optional\n \"schema_issuer_did\": string, # optional\n \"schema_name\": string, # optional\n \"schema_version\": string, # optional\n \"issuer_did\": string, # optional\n \"cred_def_id\": string # optional\n }\n\n :return: credential infos as json list; i.e.,\n\n ::\n\n [\n {\n \"referent\": string, # credential identifier in the wallet\n \"attrs\": {\n \"attr1\" : {\"raw\": \"value1\", \"encoded\": \"value1_as_int\" },\n \"attr2\" : {\"raw\": \"value2\", \"encoded\": \"value2_as_int\" },\n ...\n }\n \"schema_id\": string,\n \"cred_def_id\": string,\n \"rev_reg_id\": Optional<string>,\n \"cred_rev_id\": Optional<string>\n },\n ...\n ]\n\n \"\"\"\n LOGGER.debug('HolderProver.get_cred_infos_by_filter >>> filt: %s', filt)\n if not self.wallet.handle:\n LOGGER.debug(\n 'HolderProver.get_cred_infos_by_filter <!< Wallet %s is closed',\n self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n rv_json = await anoncreds.prover_get_credentials(self.wallet.handle,\n json.dumps(filt or {}))\n LOGGER.debug('HolderProver.get_cred_infos_by_filter <<< %s', rv_json)\n return rv_json\n",
"role": "user"
},
{
"content": "get_cred_infos_by_filter",
"role": "assistant"
}
] |
async def get_cred_info_by_id(self, cred_id: str) ->str:
"""
Return cred-info json from wallet by wallet credential identifier.
Raise AbsentCred for no such credential. Raise WalletState if the wallet is closed.
:param cred_id: credential identifier of interest
:return: json with cred for input credential identifier
:return: cred-info json; i.e.,
::
{
"referent": string, # credential identifier in the wallet
"attrs": {
"attr1" : {"raw": "value1", "encoded": "value1_as_int" },
"attr2" : {"raw": "value2", "encoded": "value2_as_int" },
...
}
"schema_id": string,
"cred_def_id": string,
"rev_reg_id": Optional<string>,
"cred_rev_id": Optional<string>
}
"""
LOGGER.debug('HolderProver.get_cred_info_by_id >>> cred_id: %s', cred_id)
if not self.wallet.handle:
LOGGER.debug('HolderProver.get_cred_info_by_id <!< Wallet %s is closed'
, self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
try:
rv_json = await anoncreds.prover_get_credential(self.wallet.handle,
cred_id)
except IndyError as x_indy:
if x_indy.error_code == ErrorCode.WalletItemNotFound:
LOGGER.debug(
'HolderProver.get_cred_info_by_id <!< no cred in wallet %s for cred id %s'
, self.name, cred_id)
raise AbsentCred('No cred in wallet for {}'.format(cred_id))
LOGGER.debug(
'HolderProver.get_cred_info_by_id <!< wallet %s, cred id %s: indy error code %s'
, self.name, cred_id, x_indy.error_code)
raise
LOGGER.debug('HolderProver.get_cred_info_by_id <<< %s', rv_json)
return rv_json
|
get_cred_info_by_id
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def get_cred_info_by_id(self, cred_id: str) ->str:\n \"\"\"\n Return cred-info json from wallet by wallet credential identifier.\n\n Raise AbsentCred for no such credential. Raise WalletState if the wallet is closed.\n\n :param cred_id: credential identifier of interest\n :return: json with cred for input credential identifier\n\n :return: cred-info json; i.e.,\n\n ::\n\n {\n \"referent\": string, # credential identifier in the wallet\n \"attrs\": {\n \"attr1\" : {\"raw\": \"value1\", \"encoded\": \"value1_as_int\" },\n \"attr2\" : {\"raw\": \"value2\", \"encoded\": \"value2_as_int\" },\n ...\n }\n \"schema_id\": string,\n \"cred_def_id\": string,\n \"rev_reg_id\": Optional<string>,\n \"cred_rev_id\": Optional<string>\n }\n \"\"\"\n LOGGER.debug('HolderProver.get_cred_info_by_id >>> cred_id: %s', cred_id)\n if not self.wallet.handle:\n LOGGER.debug('HolderProver.get_cred_info_by_id <!< Wallet %s is closed'\n , self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n try:\n rv_json = await anoncreds.prover_get_credential(self.wallet.handle,\n cred_id)\n except IndyError as x_indy:\n if x_indy.error_code == ErrorCode.WalletItemNotFound:\n LOGGER.debug(\n 'HolderProver.get_cred_info_by_id <!< no cred in wallet %s for cred id %s'\n , self.name, cred_id)\n raise AbsentCred('No cred in wallet for {}'.format(cred_id))\n LOGGER.debug(\n 'HolderProver.get_cred_info_by_id <!< wallet %s, cred id %s: indy error code %s'\n , self.name, cred_id, x_indy.error_code)\n raise\n LOGGER.debug('HolderProver.get_cred_info_by_id <<< %s', rv_json)\n return rv_json\n",
"role": "user"
},
{
"content": "get_cred_info_by_id",
"role": "assistant"
}
] |
async def reset_wallet(self, seed: str=None) ->Wallet:
"""
Close and delete HolderProver wallet, then create and open a replacement on prior link secret.
Note that this operation effectively destroys private keys for credential definitions. Its
intended use is primarily for testing and demonstration.
Raise AbsentLinkSecret if link secret not set. Raise WalletState if the wallet is closed.
:param seed: seed to use for new wallet (default random)
:return: replacement wallet
"""
LOGGER.debug('HolderProver.reset_wallet >>>')
self.wallet = await WalletManager().reset(self.wallet, seed)
rv = self.wallet
LOGGER.debug('HolderProver.reset_wallet <<< %s', rv)
return rv
|
reset_wallet
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def reset_wallet(self, seed: str=None) ->Wallet:\n \"\"\"\n Close and delete HolderProver wallet, then create and open a replacement on prior link secret.\n Note that this operation effectively destroys private keys for credential definitions. Its\n intended use is primarily for testing and demonstration.\n\n Raise AbsentLinkSecret if link secret not set. Raise WalletState if the wallet is closed.\n\n :param seed: seed to use for new wallet (default random)\n :return: replacement wallet\n \"\"\"\n LOGGER.debug('HolderProver.reset_wallet >>>')\n self.wallet = await WalletManager().reset(self.wallet, seed)\n rv = self.wallet\n LOGGER.debug('HolderProver.reset_wallet <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "reset_wallet",
"role": "assistant"
}
] |
def x(self, context):
current = context
try:
for bit in self.lookups:
try:
current = current[bit]
except (TypeError, AttributeError, KeyError, ValueError, IndexError
):
try:
if isinstance(current, BaseContext) and getattr(type(
current), bit):
raise AttributeError
current = getattr(current, bit)
except (TypeError, AttributeError) as e:
if isinstance(e, AttributeError) and not isinstance(current
, BaseContext) and bit in dir(current):
raise
try:
current = current[int(bit)]
except (IndexError, ValueError, KeyError, TypeError):
raise VariableDoesNotExist(
'Failed lookup for key [%s] in %r', (bit, current))
if callable(current):
if getattr(current, 'do_not_call_in_templates', False):
pass
elif getattr(current, 'alters_data', False):
try:
current = context.template.engine.string_if_invalid
except AttributeError:
current = settings.TEMPLATE_STRING_IF_INVALID
else:
try:
current = current()
except TypeError:
try:
inspect.getcallargs(current)
except TypeError:
current = context.template.engine.string_if_invalid
else:
raise
elif isinstance(current, Model):
if 'request' in context and hasattr(context['request'],
'_ultracache'):
ct = ContentType.objects.get_for_model(current.__class__)
context['request']._ultracache.append((ct.id, current.pk))
except Exception as e:
template_name = getattr(context, 'template_name', None) or 'unknown'
if logger is not None:
logger.debug(
'Exception while resolving variable "%s" in template "%s".',
bit, template_name, exc_info=True)
if getattr(e, 'silent_variable_failure', False):
current = context.template.engine.string_if_invalid
else:
raise
return current
|
my_resolve_lookup
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, context):\n current = context\n try:\n for bit in self.lookups:\n try:\n current = current[bit]\n except (TypeError, AttributeError, KeyError, ValueError, IndexError\n ):\n try:\n if isinstance(current, BaseContext) and getattr(type(\n current), bit):\n raise AttributeError\n current = getattr(current, bit)\n except (TypeError, AttributeError) as e:\n if isinstance(e, AttributeError) and not isinstance(current\n , BaseContext) and bit in dir(current):\n raise\n try:\n current = current[int(bit)]\n except (IndexError, ValueError, KeyError, TypeError):\n raise VariableDoesNotExist(\n 'Failed lookup for key [%s] in %r', (bit, current))\n if callable(current):\n if getattr(current, 'do_not_call_in_templates', False):\n pass\n elif getattr(current, 'alters_data', False):\n try:\n current = context.template.engine.string_if_invalid\n except AttributeError:\n current = settings.TEMPLATE_STRING_IF_INVALID\n else:\n try:\n current = current()\n except TypeError:\n try:\n inspect.getcallargs(current)\n except TypeError:\n current = context.template.engine.string_if_invalid\n else:\n raise\n elif isinstance(current, Model):\n if 'request' in context and hasattr(context['request'],\n '_ultracache'):\n ct = ContentType.objects.get_for_model(current.__class__)\n context['request']._ultracache.append((ct.id, current.pk))\n except Exception as e:\n template_name = getattr(context, 'template_name', None) or 'unknown'\n if logger is not None:\n logger.debug(\n 'Exception while resolving variable \"%s\" in template \"%s\".',\n bit, template_name, exc_info=True)\n if getattr(e, 'silent_variable_failure', False):\n current = context.template.engine.string_if_invalid\n else:\n raise\n return current\n",
"role": "user"
},
{
"content": "my_resolve_lookup",
"role": "assistant"
}
] |
def x(self) ->dict:
LOGGER.debug('SchemaCache.index >>>')
rv = self._seq_no2schema_key
LOGGER.debug('SchemaCache.index <<< %s', rv)
return rv
|
index
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self) ->dict:\n LOGGER.debug('SchemaCache.index >>>')\n rv = self._seq_no2schema_key\n LOGGER.debug('SchemaCache.index <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "index",
"role": "assistant"
}
] |
def x(self, seq_no: int) ->SchemaKey:
LOGGER.debug('SchemaCache.schema_key_for >>> seq_no: %s', seq_no)
rv = self._seq_no2schema_key.get(seq_no, None)
LOGGER.debug('SchemaCache.schema_key_for <<< %s', rv)
return rv
|
schema_key_for
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, seq_no: int) ->SchemaKey:\n LOGGER.debug('SchemaCache.schema_key_for >>> seq_no: %s', seq_no)\n rv = self._seq_no2schema_key.get(seq_no, None)\n LOGGER.debug('SchemaCache.schema_key_for <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "schema_key_for",
"role": "assistant"
}
] |
def x(self) ->list:
LOGGER.debug('SchemaCache.schemata >>>')
LOGGER.debug('SchemaCache.schemata <<<')
return [self._schema_key2schema[seq_no] for seq_no in self.
_schema_key2schema]
|
schemata
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self) ->list:\n LOGGER.debug('SchemaCache.schemata >>>')\n LOGGER.debug('SchemaCache.schemata <<<')\n return [self._schema_key2schema[seq_no] for seq_no in self.\n _schema_key2schema]\n",
"role": "user"
},
{
"content": "schemata",
"role": "assistant"
}
] |
def x(self) ->None:
LOGGER.debug('SchemaCache.clear >>>')
self._schema_key2schema = {}
self._seq_no2schema_key = {}
LOGGER.debug('SchemaCache.clear <<<')
|
clear
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self) ->None:\n LOGGER.debug('SchemaCache.clear >>>')\n self._schema_key2schema = {}\n self._seq_no2schema_key = {}\n LOGGER.debug('SchemaCache.clear <<<')\n",
"role": "user"
},
{
"content": "clear",
"role": "assistant"
}
] |
async def _get_update(self, rr_builder: Callable, fro: int, to: int, delta:
bool) ->(str, int):
"""
Get rev reg delta/state json, and its timestamp on the distributed ledger,
from cached rev reg delta/state frames list or distributed ledger,
updating cache as necessary.
Raise BadRevStateTime if caller asks for a delta/state in the future. Raise ClosedPool
if an update requires the ledger but the node pool is closed.
Issuer anchors cannot revoke retroactively.
Hence, for any new request against asked-for interval (fro, to):
* if the cache has a frame f on f.timestamp <= to <= f.to,
> return its rev reg delta/state; e.g., starred frame below:
Frames: --------[xxxxx]----[xx]-----[*********]-----[x]-----------[xx]---------> time
Fro-to: ^----^
* otherwise, if there is a maximum frame f with fro <= f.to and f.timestamp <= to
> return its rev reg delta/state; e.g., starred frame below:
Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[*]-----------[xx]---------> time
Fro-to: ^----------------------------------^
* otherwise, if the cache has a frame f on f.timestamp < to,
> check the distributed ledger for a delta to/state for the rev reg since e.timestamp;
- if there is one, bake it through 'to' into a new delta/state, add new frame to cache and
return rev reg delta/state; e.g., starred frame below:
Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time
Fro-to: ^------^
Ledger: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]--!--------[xx]---------> time
Update: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]--[*****]--[xx]---------> time
- otherwise, update the 'to' time in the frame and return the rev reg delta/state;
e.g., starred frame below:
Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time
Fro-to: ^------^
Ledger: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time
Update: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[**********]--[xx]---------> time
* otherwise, there is no cache frame f on f.timestamp < to:
> create new frame and add it to cache; return rev reg delta/state; e.g., starred frame below:
Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[*]-----------[xx]-----> time
Fro-to: ^--^
Ledger: -!------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time
Update: -[***]--[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time
On return of any previously existing rev reg delta/state frame, always update its query time beforehand.
:param rr_builder: callback to build rev reg delta/state if need be (specify holder-prover anchor's
_build_rr_delta_json() or verifier anchor's _build_rr_state_json() as needed)
:param fro: least time (epoch seconds) of interest; lower-bounds 'to' on frame housing return data
:param to: greatest time (epoch seconds) of interest; upper-bounds returned revocation delta/state timestamp
:param delta: True to operate on rev reg deltas, False for states
:return: rev reg delta/state json and ledger timestamp (epoch seconds)
"""
LOGGER.debug(
'RevoCacheEntry.get_update >>> rr_builder: %s, fro: %s, to: %s, delta: %s'
, rr_builder.__name__, fro, to, delta)
if fro > to:
fro, to = to, fro
now = int(time())
if to > now:
LOGGER.debug(
'RevoCacheEntry._get_update <!< Cannot query a rev reg %s in the future (%s > %s)'
, 'delta' if delta else 'state', to, now)
raise BadRevStateTime(
'Cannot query a rev reg {} in the future ({} > {})'.format(
'delta' if delta else 'state', to, now))
cache_frame = None
rr_update_json = None
rr_frames = self.rr_delta_frames if delta else self.rr_state_frames
frames = [frame for frame in rr_frames if frame.timestamp <= to <= frame.to
]
if frames:
cache_frame = max(frames, key=lambda f: f.timestamp)
else:
frames = [frame for frame in rr_frames if fro <= frame.to and frame
.timestamp <= to]
if frames:
cache_frame = max(frames, key=lambda f: f.timestamp)
if not frames:
frames = [frame for frame in rr_frames if frame.timestamp < to]
if frames:
latest_cached = max(frames, key=lambda frame: frame.timestamp)
if delta:
rr_update_json, timestamp = await rr_builder(self.
rev_reg_def['id'], to=to, fro=latest_cached.timestamp,
fro_delta=latest_cached.rr_update)
else:
rr_update_json, timestamp = await rr_builder(self.
rev_reg_def['id'], to)
if timestamp == latest_cached.timestamp:
latest_cached.to = to
cache_frame = latest_cached
else:
rr_update_json, timestamp = await rr_builder(self.rev_reg_def[
'id'], to)
if cache_frame is None:
cache_frame = RevRegUpdateFrame(to, timestamp, json.loads(
rr_update_json))
rr_frames.append(cache_frame)
self.cull(delta)
else:
cache_frame.qtime = int(time())
rv = json.dumps(cache_frame.rr_update), cache_frame.timestamp
LOGGER.debug('RevoCacheEntry._get_update <<< %s', rv)
return rv
|
get_update
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def _get_update(self, rr_builder: Callable, fro: int, to: int, delta:\n bool) ->(str, int):\n \"\"\"\n Get rev reg delta/state json, and its timestamp on the distributed ledger,\n from cached rev reg delta/state frames list or distributed ledger,\n updating cache as necessary.\n\n Raise BadRevStateTime if caller asks for a delta/state in the future. Raise ClosedPool\n if an update requires the ledger but the node pool is closed.\n\n Issuer anchors cannot revoke retroactively.\n Hence, for any new request against asked-for interval (fro, to):\n * if the cache has a frame f on f.timestamp <= to <= f.to,\n > return its rev reg delta/state; e.g., starred frame below:\n\n Frames: --------[xxxxx]----[xx]-----[*********]-----[x]-----------[xx]---------> time\n Fro-to: ^----^\n\n * otherwise, if there is a maximum frame f with fro <= f.to and f.timestamp <= to\n > return its rev reg delta/state; e.g., starred frame below:\n\n Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[*]-----------[xx]---------> time\n Fro-to: ^----------------------------------^\n\n * otherwise, if the cache has a frame f on f.timestamp < to,\n > check the distributed ledger for a delta to/state for the rev reg since e.timestamp;\n - if there is one, bake it through 'to' into a new delta/state, add new frame to cache and\n return rev reg delta/state; e.g., starred frame below:\n\n Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time\n Fro-to: ^------^\n Ledger: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]--!--------[xx]---------> time\n Update: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]--[*****]--[xx]---------> time\n\n - otherwise, update the 'to' time in the frame and return the rev reg delta/state;\n e.g., starred frame below:\n\n Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time\n Fro-to: ^------^\n Ledger: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time\n Update: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[**********]--[xx]---------> time\n\n * otherwise, there is no cache frame f on f.timestamp < to:\n > create new frame and add it to cache; return rev reg delta/state; e.g., starred frame below:\n\n Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[*]-----------[xx]-----> time\n Fro-to: ^--^\n Ledger: -!------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time\n Update: -[***]--[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time\n\n On return of any previously existing rev reg delta/state frame, always update its query time beforehand.\n\n :param rr_builder: callback to build rev reg delta/state if need be (specify holder-prover anchor's\n _build_rr_delta_json() or verifier anchor's _build_rr_state_json() as needed)\n :param fro: least time (epoch seconds) of interest; lower-bounds 'to' on frame housing return data\n :param to: greatest time (epoch seconds) of interest; upper-bounds returned revocation delta/state timestamp\n :param delta: True to operate on rev reg deltas, False for states\n :return: rev reg delta/state json and ledger timestamp (epoch seconds)\n \"\"\"\n LOGGER.debug(\n 'RevoCacheEntry.get_update >>> rr_builder: %s, fro: %s, to: %s, delta: %s'\n , rr_builder.__name__, fro, to, delta)\n if fro > to:\n fro, to = to, fro\n now = int(time())\n if to > now:\n LOGGER.debug(\n 'RevoCacheEntry._get_update <!< Cannot query a rev reg %s in the future (%s > %s)'\n , 'delta' if delta else 'state', to, now)\n raise BadRevStateTime(\n 'Cannot query a rev reg {} in the future ({} > {})'.format(\n 'delta' if delta else 'state', to, now))\n cache_frame = None\n rr_update_json = None\n rr_frames = self.rr_delta_frames if delta else self.rr_state_frames\n frames = [frame for frame in rr_frames if frame.timestamp <= to <= frame.to\n ]\n if frames:\n cache_frame = max(frames, key=lambda f: f.timestamp)\n else:\n frames = [frame for frame in rr_frames if fro <= frame.to and frame\n .timestamp <= to]\n if frames:\n cache_frame = max(frames, key=lambda f: f.timestamp)\n if not frames:\n frames = [frame for frame in rr_frames if frame.timestamp < to]\n if frames:\n latest_cached = max(frames, key=lambda frame: frame.timestamp)\n if delta:\n rr_update_json, timestamp = await rr_builder(self.\n rev_reg_def['id'], to=to, fro=latest_cached.timestamp,\n fro_delta=latest_cached.rr_update)\n else:\n rr_update_json, timestamp = await rr_builder(self.\n rev_reg_def['id'], to)\n if timestamp == latest_cached.timestamp:\n latest_cached.to = to\n cache_frame = latest_cached\n else:\n rr_update_json, timestamp = await rr_builder(self.rev_reg_def[\n 'id'], to)\n if cache_frame is None:\n cache_frame = RevRegUpdateFrame(to, timestamp, json.loads(\n rr_update_json))\n rr_frames.append(cache_frame)\n self.cull(delta)\n else:\n cache_frame.qtime = int(time())\n rv = json.dumps(cache_frame.rr_update), cache_frame.timestamp\n LOGGER.debug('RevoCacheEntry._get_update <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "get_update",
"role": "assistant"
}
] |
async def get_delta_json(self, rr_delta_builder: Callable[['HolderProver',
str, int, int, dict], Awaitable[Tuple[str, int]]], fro: int, to: int) ->(
str, int):
"""
Get rev reg delta json, and its timestamp on the distributed ledger,
from cached rev reg delta frames list or distributed ledger,
updating cache as necessary.
Raise BadRevStateTime if caller asks for a delta to the future.
On return of any previously existing rev reg delta frame, always update its query time beforehand.
:param rr_delta_builder: callback to build rev reg delta if need be (specify anchor instance's
_build_rr_delta())
:param fro: least time (epoch seconds) of interest; lower-bounds 'to' on frame housing return data
:param to: greatest time (epoch seconds) of interest; upper-bounds returned revocation delta timestamp
:return: rev reg delta json and ledger timestamp (epoch seconds)
"""
LOGGER.debug(
'RevoCacheEntry.get_delta_json >>> rr_delta_builder: %s, fro: %s, to: %s'
, rr_delta_builder.__name__, fro, to)
rv = await self._get_update(rr_delta_builder, fro, to, True)
LOGGER.debug('RevoCacheEntry.get_delta_json <<< %s', rv)
return rv
|
get_delta_json
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def get_delta_json(self, rr_delta_builder: Callable[['HolderProver',\n str, int, int, dict], Awaitable[Tuple[str, int]]], fro: int, to: int) ->(\n str, int):\n \"\"\"\n Get rev reg delta json, and its timestamp on the distributed ledger,\n from cached rev reg delta frames list or distributed ledger,\n updating cache as necessary.\n\n Raise BadRevStateTime if caller asks for a delta to the future.\n\n On return of any previously existing rev reg delta frame, always update its query time beforehand.\n\n :param rr_delta_builder: callback to build rev reg delta if need be (specify anchor instance's\n _build_rr_delta())\n :param fro: least time (epoch seconds) of interest; lower-bounds 'to' on frame housing return data\n :param to: greatest time (epoch seconds) of interest; upper-bounds returned revocation delta timestamp\n :return: rev reg delta json and ledger timestamp (epoch seconds)\n \"\"\"\n LOGGER.debug(\n 'RevoCacheEntry.get_delta_json >>> rr_delta_builder: %s, fro: %s, to: %s'\n , rr_delta_builder.__name__, fro, to)\n rv = await self._get_update(rr_delta_builder, fro, to, True)\n LOGGER.debug('RevoCacheEntry.get_delta_json <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "get_delta_json",
"role": "assistant"
}
] |
async def get_state_json(self, rr_state_builder: Callable[['Verifier', str,
int], Awaitable[Tuple[str, int]]], fro: int, to: int) ->(str, int):
"""
Get rev reg state json, and its timestamp on the distributed ledger,
from cached rev reg state frames list or distributed ledger,
updating cache as necessary.
Raise BadRevStateTime if caller asks for a state in the future.
On return of any previously existing rev reg state frame, always update its query time beforehand.
:param rr_state_builder: callback to build rev reg state if need be (specify anchor instance's
_build_rr_state())
:param fro: least time (epoch seconds) of interest; lower-bounds 'to' on frame housing return data
:param to: greatest time (epoch seconds) of interest; upper-bounds returned revocation state timestamp
:return: rev reg state json and ledger timestamp (epoch seconds)
"""
LOGGER.debug(
'RevoCacheEntry.get_state_json >>> rr_state_builder: %s, fro: %s, to: %s'
, rr_state_builder.__name__, fro, to)
rv = await self._get_update(rr_state_builder, fro, to, False)
LOGGER.debug('RevoCacheEntry.get_state_json <<< %s', rv)
return rv
|
get_state_json
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def get_state_json(self, rr_state_builder: Callable[['Verifier', str,\n int], Awaitable[Tuple[str, int]]], fro: int, to: int) ->(str, int):\n \"\"\"\n Get rev reg state json, and its timestamp on the distributed ledger,\n from cached rev reg state frames list or distributed ledger,\n updating cache as necessary.\n\n Raise BadRevStateTime if caller asks for a state in the future.\n\n On return of any previously existing rev reg state frame, always update its query time beforehand.\n\n :param rr_state_builder: callback to build rev reg state if need be (specify anchor instance's\n _build_rr_state())\n :param fro: least time (epoch seconds) of interest; lower-bounds 'to' on frame housing return data\n :param to: greatest time (epoch seconds) of interest; upper-bounds returned revocation state timestamp\n :return: rev reg state json and ledger timestamp (epoch seconds)\n \"\"\"\n LOGGER.debug(\n 'RevoCacheEntry.get_state_json >>> rr_state_builder: %s, fro: %s, to: %s'\n , rr_state_builder.__name__, fro, to)\n rv = await self._get_update(rr_state_builder, fro, to, False)\n LOGGER.debug('RevoCacheEntry.get_state_json <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "get_state_json",
"role": "assistant"
}
] |
def x() ->None:
LOGGER.debug('clear >>>')
with SCHEMA_CACHE.lock:
SCHEMA_CACHE.clear()
with CRED_DEF_CACHE.lock:
CRED_DEF_CACHE.clear()
with REVO_CACHE.lock:
REVO_CACHE.clear()
LOGGER.debug('clear <<<')
|
clear
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x() ->None:\n LOGGER.debug('clear >>>')\n with SCHEMA_CACHE.lock:\n SCHEMA_CACHE.clear()\n with CRED_DEF_CACHE.lock:\n CRED_DEF_CACHE.clear()\n with REVO_CACHE.lock:\n REVO_CACHE.clear()\n LOGGER.debug('clear <<<')\n",
"role": "user"
},
{
"content": "clear",
"role": "assistant"
}
] |
def x(base_dir: str) ->int:
LOGGER.debug('archive >>> base_dir: %s', base_dir)
rv = int(time())
timestamp_dir = join(base_dir, str(rv))
makedirs(timestamp_dir, exist_ok=True)
with SCHEMA_CACHE.lock:
with open(join(timestamp_dir, 'schema'), 'w') as archive:
print(json.dumps(SCHEMA_CACHE.schemata()), file=archive)
with CRED_DEF_CACHE.lock:
with open(join(timestamp_dir, 'cred_def'), 'w') as archive:
print(json.dumps(CRED_DEF_CACHE), file=archive)
with REVO_CACHE.lock:
with open(join(timestamp_dir, 'revocation'), 'w') as archive:
revo_cache_dict = {}
for rr_id in REVO_CACHE:
revo_cache_dict[rr_id] = {'rev_reg_def': REVO_CACHE[rr_id].
rev_reg_def, 'rr_delta_frames': [vars(f) for f in
REVO_CACHE[rr_id].rr_delta_frames], 'rr_state_frames':
[vars(f) for f in REVO_CACHE[rr_id].rr_state_frames]}
print(json.dumps(revo_cache_dict), file=archive)
LOGGER.debug('archive <<< %s', rv)
return rv
|
archive
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(base_dir: str) ->int:\n LOGGER.debug('archive >>> base_dir: %s', base_dir)\n rv = int(time())\n timestamp_dir = join(base_dir, str(rv))\n makedirs(timestamp_dir, exist_ok=True)\n with SCHEMA_CACHE.lock:\n with open(join(timestamp_dir, 'schema'), 'w') as archive:\n print(json.dumps(SCHEMA_CACHE.schemata()), file=archive)\n with CRED_DEF_CACHE.lock:\n with open(join(timestamp_dir, 'cred_def'), 'w') as archive:\n print(json.dumps(CRED_DEF_CACHE), file=archive)\n with REVO_CACHE.lock:\n with open(join(timestamp_dir, 'revocation'), 'w') as archive:\n revo_cache_dict = {}\n for rr_id in REVO_CACHE:\n revo_cache_dict[rr_id] = {'rev_reg_def': REVO_CACHE[rr_id].\n rev_reg_def, 'rr_delta_frames': [vars(f) for f in\n REVO_CACHE[rr_id].rr_delta_frames], 'rr_state_frames':\n [vars(f) for f in REVO_CACHE[rr_id].rr_state_frames]}\n print(json.dumps(revo_cache_dict), file=archive)\n LOGGER.debug('archive <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "archive",
"role": "assistant"
}
] |
def x(base_dir: str, retain_latest: bool=False) ->None:
LOGGER.debug('purge_archives >>> base_dir: %s, retain_latest: %s',
base_dir, retain_latest)
if isdir(base_dir):
timestamps = sorted([int(t) for t in listdir(base_dir) if t.isdigit()])
if retain_latest and timestamps:
timestamps.pop()
for timestamp in timestamps:
timestamp_dir = join(base_dir, str(timestamp))
rmtree(timestamp_dir)
LOGGER.info('Purged archive cache directory %s', timestamp_dir)
LOGGER.debug('purge_archives <<<')
|
purge_archives
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(base_dir: str, retain_latest: bool=False) ->None:\n LOGGER.debug('purge_archives >>> base_dir: %s, retain_latest: %s',\n base_dir, retain_latest)\n if isdir(base_dir):\n timestamps = sorted([int(t) for t in listdir(base_dir) if t.isdigit()])\n if retain_latest and timestamps:\n timestamps.pop()\n for timestamp in timestamps:\n timestamp_dir = join(base_dir, str(timestamp))\n rmtree(timestamp_dir)\n LOGGER.info('Purged archive cache directory %s', timestamp_dir)\n LOGGER.debug('purge_archives <<<')\n",
"role": "user"
},
{
"content": "purge_archives",
"role": "assistant"
}
] |
def x(pairwise: PairwiseInfo) ->dict:
rv = {canon_pairwise_tag(tag): raw(pairwise.metadata[tag]) for tag in
pairwise.metadata or {}}
rv['~their_did'] = pairwise.their_did
rv['~their_verkey'] = pairwise.their_verkey
rv['~my_did'] = pairwise.my_did
rv['~my_verkey'] = pairwise.my_verkey
if not StorageRecord.ok_tags(rv):
raise BadRecord('Pairwise metadata {} must map strings to strings'.
format(rv))
return rv
|
pairwise_info2tags
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(pairwise: PairwiseInfo) ->dict:\n rv = {canon_pairwise_tag(tag): raw(pairwise.metadata[tag]) for tag in \n pairwise.metadata or {}}\n rv['~their_did'] = pairwise.their_did\n rv['~their_verkey'] = pairwise.their_verkey\n rv['~my_did'] = pairwise.my_did\n rv['~my_verkey'] = pairwise.my_verkey\n if not StorageRecord.ok_tags(rv):\n raise BadRecord('Pairwise metadata {} must map strings to strings'.\n format(rv))\n return rv\n",
"role": "user"
},
{
"content": "pairwise_info2tags",
"role": "assistant"
}
] |
def x(storec: StorageRecord) ->PairwiseInfo:
return PairwiseInfo(storec.id, storec.value, storec.tags['~my_did'],
storec.tags['~my_verkey'], {tag[tag.startswith('~'):]: storec.tags[
tag] for tag in storec.tags or {}})
|
storage_record2pairwise_info
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(storec: StorageRecord) ->PairwiseInfo:\n return PairwiseInfo(storec.id, storec.value, storec.tags['~my_did'],\n storec.tags['~my_verkey'], {tag[tag.startswith('~'):]: storec.tags[\n tag] for tag in storec.tags or {}})\n",
"role": "user"
},
{
"content": "storage_record2pairwise_info",
"role": "assistant"
}
] |
def x(self, config: dict) ->dict:
assert {'name', 'id'} & {k for k in config}
return {'id': config.get('name', config.get('id')), 'storage_type':
config.get('storage_type', self.default_storage_type),
'freshness_time': config.get('freshness_time', self.
default_freshness_time)}
|
config2indy
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, config: dict) ->dict:\n assert {'name', 'id'} & {k for k in config}\n return {'id': config.get('name', config.get('id')), 'storage_type':\n config.get('storage_type', self.default_storage_type),\n 'freshness_time': config.get('freshness_time', self.\n default_freshness_time)}\n",
"role": "user"
},
{
"content": "config2indy",
"role": "assistant"
}
] |
def x(self, config: dict, access: str=None) ->dict:
rv = {k: config.get(k, self._defaults[k]) for k in ('auto_create',
'auto_remove')}
rv['access'] = access or self.default_access
for key in ('seed', 'did', 'link_secret_label'):
if key in config:
rv[key] = config[key]
return rv
|
config2von
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, config: dict, access: str=None) ->dict:\n rv = {k: config.get(k, self._defaults[k]) for k in ('auto_create',\n 'auto_remove')}\n rv['access'] = access or self.default_access\n for key in ('seed', 'did', 'link_secret_label'):\n if key in config:\n rv[key] = config[key]\n return rv\n",
"role": "user"
},
{
"content": "config2von",
"role": "assistant"
}
] |
async def create(self, config: dict=None, access: str=None, replace: bool=False
) ->Wallet:
"""
Create wallet on input name with given configuration and access credential value.
Raise ExtantWallet if wallet on input name exists already and replace parameter is False.
Raise BadAccess on replacement for bad access credentials value.
FAIR WARNING: specifying replace=True attempts to remove any matching wallet before proceeding; to
succeed, the existing wallet must use the same access credentials that the input configuration has.
:param config: configuration data for both indy-sdk and VON anchor wallet:
- 'name' or 'id': wallet name
- 'storage_type': storage type
- 'freshness_time': freshness time
- 'did': (optional) DID to use
- 'seed': (optional) seed to use
- 'auto_create': whether to create the wallet on first open (persists past close, can work with auto_remove)
- 'auto_remove': whether to remove the wallet on next close
- 'link_secret_label': (optional) link secret label to use to create link secret
:param access: indy wallet access credential ('key') value, if different than default
:param replace: whether to replace old wallet if it exists
:return: wallet created
"""
LOGGER.debug('WalletManager.create >>> config %s, access %s, replace %s',
config, access, replace)
assert {'name', 'id'} & {k for k in config}
wallet_name = config.get('name', config.get('id'))
if replace:
von_wallet = self.get(config, access)
if not await von_wallet.remove():
LOGGER.debug(
'WalletManager.create <!< Failed to remove wallet %s for replacement'
, wallet_name)
raise ExtantWallet('Failed to remove wallet {} for replacement'
.format(wallet_name))
indy_config = self._config2indy(config)
von_config = self._config2von(config, access)
rv = Wallet(indy_config, von_config)
await rv.create()
LOGGER.debug('WalletManager.create <<< %s', rv)
return rv
|
create
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def create(self, config: dict=None, access: str=None, replace: bool=False\n ) ->Wallet:\n \"\"\"\n Create wallet on input name with given configuration and access credential value.\n\n Raise ExtantWallet if wallet on input name exists already and replace parameter is False.\n Raise BadAccess on replacement for bad access credentials value.\n\n FAIR WARNING: specifying replace=True attempts to remove any matching wallet before proceeding; to\n succeed, the existing wallet must use the same access credentials that the input configuration has.\n\n :param config: configuration data for both indy-sdk and VON anchor wallet:\n\n - 'name' or 'id': wallet name\n - 'storage_type': storage type\n - 'freshness_time': freshness time\n - 'did': (optional) DID to use\n - 'seed': (optional) seed to use\n - 'auto_create': whether to create the wallet on first open (persists past close, can work with auto_remove)\n - 'auto_remove': whether to remove the wallet on next close\n - 'link_secret_label': (optional) link secret label to use to create link secret\n\n :param access: indy wallet access credential ('key') value, if different than default\n :param replace: whether to replace old wallet if it exists\n :return: wallet created\n \"\"\"\n LOGGER.debug('WalletManager.create >>> config %s, access %s, replace %s',\n config, access, replace)\n assert {'name', 'id'} & {k for k in config}\n wallet_name = config.get('name', config.get('id'))\n if replace:\n von_wallet = self.get(config, access)\n if not await von_wallet.remove():\n LOGGER.debug(\n 'WalletManager.create <!< Failed to remove wallet %s for replacement'\n , wallet_name)\n raise ExtantWallet('Failed to remove wallet {} for replacement'\n .format(wallet_name))\n indy_config = self._config2indy(config)\n von_config = self._config2von(config, access)\n rv = Wallet(indy_config, von_config)\n await rv.create()\n LOGGER.debug('WalletManager.create <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "create",
"role": "assistant"
}
] |
def x(self, config: dict, access: str=None) ->Wallet:
LOGGER.debug('WalletManager.get >>> config %s, access %s', config, access)
rv = Wallet(self._config2indy(config), self._config2von(config, access))
LOGGER.debug('WalletManager.get <<< %s', rv)
return rv
|
get
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, config: dict, access: str=None) ->Wallet:\n LOGGER.debug('WalletManager.get >>> config %s, access %s', config, access)\n rv = Wallet(self._config2indy(config), self._config2von(config, access))\n LOGGER.debug('WalletManager.get <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "get",
"role": "assistant"
}
] |
async def reseed_local(self, local_wallet: Wallet, next_seed: str=None
) ->DIDInfo:
"""
Generate and apply new key, in wallet only, for local DID based on input seed (default random).
Raise WalletState if wallet is closed.
Note that this operation does not update the corresponding NYM on the ledger: for VON anchors
anchored to the ledger, use von_anchor.BaseAnchor.reseed().
:param local_wallet: VON anchor wallet without NYM on ledger
:param next_seed: incoming replacement seed (default random)
:return: DIDInfo with new verification key and metadata for DID
"""
LOGGER.debug('WalletManager.reseed_local >>> local_wallet %s', local_wallet
)
await local_wallet.reseed_init(next_seed)
rv = await local_wallet.reseed_apply()
LOGGER.debug('WalletManager.reseed_local <<< %s', rv)
return rv
|
reseed_local
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def reseed_local(self, local_wallet: Wallet, next_seed: str=None\n ) ->DIDInfo:\n \"\"\"\n Generate and apply new key, in wallet only, for local DID based on input seed (default random).\n Raise WalletState if wallet is closed.\n\n Note that this operation does not update the corresponding NYM on the ledger: for VON anchors\n anchored to the ledger, use von_anchor.BaseAnchor.reseed().\n\n :param local_wallet: VON anchor wallet without NYM on ledger\n :param next_seed: incoming replacement seed (default random)\n :return: DIDInfo with new verification key and metadata for DID\n \"\"\"\n LOGGER.debug('WalletManager.reseed_local >>> local_wallet %s', local_wallet\n )\n await local_wallet.reseed_init(next_seed)\n rv = await local_wallet.reseed_apply()\n LOGGER.debug('WalletManager.reseed_local <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "reseed_local",
"role": "assistant"
}
] |
async def export_wallet(self, von_wallet: Wallet, path: str) ->None:
"""
Export an existing VON anchor wallet. Raise WalletState if wallet is closed.
:param von_wallet: open wallet
:param path: path to which to export wallet
"""
LOGGER.debug('WalletManager.export_wallet >>> von_wallet %s, path %s',
von_wallet, path)
if not von_wallet.handle:
LOGGER.debug('WalletManager.export_wallet <!< Wallet %s is closed',
von_wallet.name)
raise WalletState('Wallet {} is closed'.format(von_wallet.name))
await wallet.export_wallet(von_wallet.handle, json.dumps({'path': path,
**von_wallet.access_creds}))
LOGGER.debug('WalletManager.export_wallet <<<')
|
export_wallet
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def export_wallet(self, von_wallet: Wallet, path: str) ->None:\n \"\"\"\n Export an existing VON anchor wallet. Raise WalletState if wallet is closed.\n\n :param von_wallet: open wallet\n :param path: path to which to export wallet\n \"\"\"\n LOGGER.debug('WalletManager.export_wallet >>> von_wallet %s, path %s',\n von_wallet, path)\n if not von_wallet.handle:\n LOGGER.debug('WalletManager.export_wallet <!< Wallet %s is closed',\n von_wallet.name)\n raise WalletState('Wallet {} is closed'.format(von_wallet.name))\n await wallet.export_wallet(von_wallet.handle, json.dumps({'path': path,\n **von_wallet.access_creds}))\n LOGGER.debug('WalletManager.export_wallet <<<')\n",
"role": "user"
},
{
"content": "export_wallet",
"role": "assistant"
}
] |
async def import_wallet(self, indy_config: dict, path: str, access: str=None
) ->None:
"""
Import a VON anchor wallet. Raise BadAccess on bad access credential value.
:param indy_config: indy wallet configuration to use, with:
- 'id'
- 'storage_type' (optional)
- 'storage_config' (optional)
:param path: path from which to import wallet file
:param access: indy access credentials value (default value from wallet manager)
"""
LOGGER.debug('WalletManager.import_wallet >>> indy_config %s, path: %s',
indy_config, path)
try:
await wallet.import_wallet(json.dumps(indy_config), json.dumps({
'key': access or self.default_access}), json.dumps({'path':
path, 'key': access or self.default_access}))
except IndyError as x_indy:
if x_indy.error_code == ErrorCode.CommonInvalidStructure:
LOGGER.debug(
'WalletManager.import_wallet <!< bad access credential value for wallet %s'
, indy_config.get('id', '(no id)'))
raise BadAccess('Bad access credential value for wallet {}'.
format(indy_config.get('id', '(no id)')))
LOGGER.debug(
'WalletManager.import_wallet <!< indy error code %s on wallet %s import'
, x_indy.error_code, indy_config.get('id', '(no id)'))
raise
LOGGER.debug('WalletManager.import_wallet <<<')
|
import_wallet
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def import_wallet(self, indy_config: dict, path: str, access: str=None\n ) ->None:\n \"\"\"\n Import a VON anchor wallet. Raise BadAccess on bad access credential value.\n\n :param indy_config: indy wallet configuration to use, with:\n\n - 'id'\n - 'storage_type' (optional)\n - 'storage_config' (optional)\n\n :param path: path from which to import wallet file\n :param access: indy access credentials value (default value from wallet manager)\n \"\"\"\n LOGGER.debug('WalletManager.import_wallet >>> indy_config %s, path: %s',\n indy_config, path)\n try:\n await wallet.import_wallet(json.dumps(indy_config), json.dumps({\n 'key': access or self.default_access}), json.dumps({'path':\n path, 'key': access or self.default_access}))\n except IndyError as x_indy:\n if x_indy.error_code == ErrorCode.CommonInvalidStructure:\n LOGGER.debug(\n 'WalletManager.import_wallet <!< bad access credential value for wallet %s'\n , indy_config.get('id', '(no id)'))\n raise BadAccess('Bad access credential value for wallet {}'.\n format(indy_config.get('id', '(no id)')))\n LOGGER.debug(\n 'WalletManager.import_wallet <!< indy error code %s on wallet %s import'\n , x_indy.error_code, indy_config.get('id', '(no id)'))\n raise\n LOGGER.debug('WalletManager.import_wallet <<<')\n",
"role": "user"
},
{
"content": "import_wallet",
"role": "assistant"
}
] |
async def reset(self, von_wallet: Wallet, seed: str=None) ->Wallet:
"""
Close and delete (open) VON anchor wallet and then create, open, and return
replacement on current link secret.
Note that this operation effectively destroys private keys for keyed data
structures such as credential offers or credential definitions.
Raise WalletState if the wallet is closed.
:param von_wallet: open wallet
:param seed: seed to use for new wallet (default random)
:return: replacement wallet
"""
LOGGER.debug('WalletManager.reset >>> von_wallet %s', von_wallet)
if not von_wallet.handle:
LOGGER.debug('WalletManager.reset <!< Wallet %s is closed',
von_wallet.name)
raise WalletState('Wallet {} is closed'.format(von_wallet.name))
w_config = von_wallet.config
w_config['did'] = von_wallet.did
w_config['seed'] = seed
w_config['auto_create'] = von_wallet.auto_create
w_config['auto_remove'] = von_wallet.auto_remove
label = await von_wallet.get_link_secret_label()
if label:
w_config['link_secret_label'] = label
await von_wallet.close()
if not von_wallet.auto_remove:
await self.remove(von_wallet)
rv = await self.create(w_config, von_wallet.access)
await rv.open()
LOGGER.debug('WalletManager.reset <<< %s', rv)
return rv
|
reset
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def reset(self, von_wallet: Wallet, seed: str=None) ->Wallet:\n \"\"\"\n Close and delete (open) VON anchor wallet and then create, open, and return\n replacement on current link secret.\n\n Note that this operation effectively destroys private keys for keyed data\n structures such as credential offers or credential definitions.\n\n Raise WalletState if the wallet is closed.\n\n :param von_wallet: open wallet\n :param seed: seed to use for new wallet (default random)\n :return: replacement wallet\n \"\"\"\n LOGGER.debug('WalletManager.reset >>> von_wallet %s', von_wallet)\n if not von_wallet.handle:\n LOGGER.debug('WalletManager.reset <!< Wallet %s is closed',\n von_wallet.name)\n raise WalletState('Wallet {} is closed'.format(von_wallet.name))\n w_config = von_wallet.config\n w_config['did'] = von_wallet.did\n w_config['seed'] = seed\n w_config['auto_create'] = von_wallet.auto_create\n w_config['auto_remove'] = von_wallet.auto_remove\n label = await von_wallet.get_link_secret_label()\n if label:\n w_config['link_secret_label'] = label\n await von_wallet.close()\n if not von_wallet.auto_remove:\n await self.remove(von_wallet)\n rv = await self.create(w_config, von_wallet.access)\n await rv.open()\n LOGGER.debug('WalletManager.reset <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "reset",
"role": "assistant"
}
] |
async def remove(self, von_wallet: Wallet) ->None:
"""
Remove serialized wallet if it exists. Raise WalletState if wallet is open.
:param von_wallet: (closed) wallet to remove
"""
LOGGER.debug('WalletManager.remove >>> wallet %s', von_wallet)
await von_wallet.remove()
LOGGER.debug('WalletManager.remove <<<')
|
remove
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def remove(self, von_wallet: Wallet) ->None:\n \"\"\"\n Remove serialized wallet if it exists. Raise WalletState if wallet is open.\n\n :param von_wallet: (closed) wallet to remove\n \"\"\"\n LOGGER.debug('WalletManager.remove >>> wallet %s', von_wallet)\n await von_wallet.remove()\n LOGGER.debug('WalletManager.remove <<<')\n",
"role": "user"
},
{
"content": "remove",
"role": "assistant"
}
] |
async def register_storage_library(storage_type: str, c_library: str,
entry_point: str) ->None:
"""
Load a wallet storage plug-in.
An indy-sdk wallet storage plug-in is a shared library; relying parties must explicitly
load it before creating or opening a wallet with the plug-in.
The implementation loads a dynamic library and calls an entry point; internally,
the plug-in calls the indy-sdk wallet
async def register_wallet_storage_library(storage_type: str, c_library: str, fn_pfx: str).
:param storage_type: wallet storage type
:param c_library: plug-in library
:param entry_point: function to initialize the library
"""
LOGGER.debug(
'WalletManager.register_storage_library >>> storage_type %s, c_library %s, entry_point %s'
, storage_type, c_library, entry_point)
try:
stg_lib = CDLL(c_library)
result = stg_lib[entry_point]()
if result:
LOGGER.debug(
'WalletManager.register_storage_library <!< indy error code %s on storage library entry at %s'
, result, entry_point)
raise IndyError(result)
LOGGER.info('Loaded storage library type %s (%s)', storage_type,
c_library)
except IndyError as x_indy:
LOGGER.debug(
'WalletManager.register_storage_library <!< indy error code %s on load of storage library %s %s'
, x_indy.error_code, storage_type, c_library)
raise
LOGGER.debug('WalletManager.register_storage_library <<<')
|
register_storage_library
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def register_storage_library(storage_type: str, c_library: str,\n entry_point: str) ->None:\n \"\"\"\n Load a wallet storage plug-in.\n\n An indy-sdk wallet storage plug-in is a shared library; relying parties must explicitly\n load it before creating or opening a wallet with the plug-in.\n\n The implementation loads a dynamic library and calls an entry point; internally,\n the plug-in calls the indy-sdk wallet\n async def register_wallet_storage_library(storage_type: str, c_library: str, fn_pfx: str).\n\n :param storage_type: wallet storage type\n :param c_library: plug-in library\n :param entry_point: function to initialize the library\n \"\"\"\n LOGGER.debug(\n 'WalletManager.register_storage_library >>> storage_type %s, c_library %s, entry_point %s'\n , storage_type, c_library, entry_point)\n try:\n stg_lib = CDLL(c_library)\n result = stg_lib[entry_point]()\n if result:\n LOGGER.debug(\n 'WalletManager.register_storage_library <!< indy error code %s on storage library entry at %s'\n , result, entry_point)\n raise IndyError(result)\n LOGGER.info('Loaded storage library type %s (%s)', storage_type,\n c_library)\n except IndyError as x_indy:\n LOGGER.debug(\n 'WalletManager.register_storage_library <!< indy error code %s on load of storage library %s %s'\n , x_indy.error_code, storage_type, c_library)\n raise\n LOGGER.debug('WalletManager.register_storage_library <<<')\n",
"role": "user"
},
{
"content": "register_storage_library",
"role": "assistant"
}
] |
def x(sender, **kwargs):
if not invalidate:
return
if kwargs.get('raw', False):
return
if sender is MigrationRecorder.Migration:
return
if issubclass(sender, Model):
obj = kwargs['instance']
if isinstance(obj, Model):
try:
ct = ContentType.objects.get_for_model(sender)
except RuntimeError:
return
if kwargs.get('created', False):
key = 'ucache-ct-%s' % ct.id
to_delete = cache.get(key, [])
if to_delete:
try:
cache.delete_many(to_delete)
except NotImplementedError:
for k in to_delete:
cache.delete(k)
cache.delete(key)
key = 'ucache-ct-pth-%s' % ct.id
if purger is not None:
for li in cache.get(key, []):
purger(li[0], li[1])
cache.delete(key)
else:
key = 'ucache-%s-%s' % (ct.id, obj.pk)
to_delete = cache.get(key, [])
if to_delete:
try:
cache.delete_many(to_delete)
except NotImplementedError:
for k in to_delete:
cache.delete(k)
cache.delete(key)
key = 'ucache-pth-%s-%s' % (ct.id, obj.pk)
if purger is not None:
for li in cache.get(key, []):
purger(li[0], li[1])
cache.delete(key)
|
on_post_save
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(sender, **kwargs):\n if not invalidate:\n return\n if kwargs.get('raw', False):\n return\n if sender is MigrationRecorder.Migration:\n return\n if issubclass(sender, Model):\n obj = kwargs['instance']\n if isinstance(obj, Model):\n try:\n ct = ContentType.objects.get_for_model(sender)\n except RuntimeError:\n return\n if kwargs.get('created', False):\n key = 'ucache-ct-%s' % ct.id\n to_delete = cache.get(key, [])\n if to_delete:\n try:\n cache.delete_many(to_delete)\n except NotImplementedError:\n for k in to_delete:\n cache.delete(k)\n cache.delete(key)\n key = 'ucache-ct-pth-%s' % ct.id\n if purger is not None:\n for li in cache.get(key, []):\n purger(li[0], li[1])\n cache.delete(key)\n else:\n key = 'ucache-%s-%s' % (ct.id, obj.pk)\n to_delete = cache.get(key, [])\n if to_delete:\n try:\n cache.delete_many(to_delete)\n except NotImplementedError:\n for k in to_delete:\n cache.delete(k)\n cache.delete(key)\n key = 'ucache-pth-%s-%s' % (ct.id, obj.pk)\n if purger is not None:\n for li in cache.get(key, []):\n purger(li[0], li[1])\n cache.delete(key)\n",
"role": "user"
},
{
"content": "on_post_save",
"role": "assistant"
}
] |
def x(sender, **kwargs):
if not invalidate:
return
if kwargs.get('raw', False):
return
if sender is MigrationRecorder.Migration:
return
if issubclass(sender, Model):
obj = kwargs['instance']
if isinstance(obj, Model):
try:
ct = ContentType.objects.get_for_model(sender)
except RuntimeError:
return
key = 'ucache-%s-%s' % (ct.id, obj.pk)
to_delete = cache.get(key, [])
if to_delete:
try:
cache.delete_many(to_delete)
except NotImplementedError:
for k in to_delete:
cache.delete(k)
cache.delete(key)
key = 'ucache-pth-%s-%s' % (ct.id, obj.pk)
if purger is not None:
for li in cache.get(key, []):
purger(li[0], li[1])
cache.delete(key)
|
on_post_delete
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(sender, **kwargs):\n if not invalidate:\n return\n if kwargs.get('raw', False):\n return\n if sender is MigrationRecorder.Migration:\n return\n if issubclass(sender, Model):\n obj = kwargs['instance']\n if isinstance(obj, Model):\n try:\n ct = ContentType.objects.get_for_model(sender)\n except RuntimeError:\n return\n key = 'ucache-%s-%s' % (ct.id, obj.pk)\n to_delete = cache.get(key, [])\n if to_delete:\n try:\n cache.delete_many(to_delete)\n except NotImplementedError:\n for k in to_delete:\n cache.delete(k)\n cache.delete(key)\n key = 'ucache-pth-%s-%s' % (ct.id, obj.pk)\n if purger is not None:\n for li in cache.get(key, []):\n purger(li[0], li[1])\n cache.delete(key)\n",
"role": "user"
},
{
"content": "on_post_delete",
"role": "assistant"
}
] |
async def create_signing_key(self, seed: str=None, metadata: dict=None
) ->KeyInfo:
"""
Create a new signing key pair.
Raise WalletState if wallet is closed, ExtantRecord if verification key already exists.
:param seed: optional seed allowing deterministic key creation
:param metadata: optional metadata to store with key pair
:return: KeyInfo for new key pair
"""
LOGGER.debug('Wallet.create_signing_key >>> seed: [SEED], metadata: %s',
metadata)
if not self.handle:
LOGGER.debug('Wallet.create_signing_key <!< Wallet %s is closed',
self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
try:
verkey = await crypto.create_key(self.handle, json.dumps({'seed':
seed} if seed else {}))
except IndyError as x_indy:
if x_indy.error_code == ErrorCode.WalletItemAlreadyExists:
LOGGER.debug(
'Wallet.create_signing_key <!< Verification key already present in wallet %s'
, self.name)
raise ExtantRecord('Verification key already present in wallet {}'
.format(self.name))
LOGGER.debug('Wallet.create_signing_key <!< indy-sdk raised error %s',
x_indy.error_code)
raise
await crypto.set_key_metadata(self.handle, verkey, json.dumps(metadata or
{}))
rv = KeyInfo(verkey, metadata or {})
LOGGER.debug('Wallet.create_signing_key <<< %s', rv)
return rv
|
create_signing_key
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def create_signing_key(self, seed: str=None, metadata: dict=None\n ) ->KeyInfo:\n \"\"\"\n Create a new signing key pair.\n\n Raise WalletState if wallet is closed, ExtantRecord if verification key already exists.\n\n :param seed: optional seed allowing deterministic key creation\n :param metadata: optional metadata to store with key pair\n :return: KeyInfo for new key pair\n \"\"\"\n LOGGER.debug('Wallet.create_signing_key >>> seed: [SEED], metadata: %s',\n metadata)\n if not self.handle:\n LOGGER.debug('Wallet.create_signing_key <!< Wallet %s is closed',\n self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n try:\n verkey = await crypto.create_key(self.handle, json.dumps({'seed':\n seed} if seed else {}))\n except IndyError as x_indy:\n if x_indy.error_code == ErrorCode.WalletItemAlreadyExists:\n LOGGER.debug(\n 'Wallet.create_signing_key <!< Verification key already present in wallet %s'\n , self.name)\n raise ExtantRecord('Verification key already present in wallet {}'\n .format(self.name))\n LOGGER.debug('Wallet.create_signing_key <!< indy-sdk raised error %s',\n x_indy.error_code)\n raise\n await crypto.set_key_metadata(self.handle, verkey, json.dumps(metadata or\n {}))\n rv = KeyInfo(verkey, metadata or {})\n LOGGER.debug('Wallet.create_signing_key <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "create_signing_key",
"role": "assistant"
}
] |
async def get_signing_key(self, verkey: str) ->KeyInfo:
"""
Get signing key pair for input verification key.
Raise WalletState if wallet is closed, AbsentRecord for no such key pair.
:param verkey: verification key of key pair
:return: KeyInfo for key pair
"""
LOGGER.debug('Wallet.get_signing_key >>> seed: [SEED], verkey: %s', verkey)
if not self.handle:
LOGGER.debug('Wallet.get_signing_key <!< Wallet %s is closed', self
.name)
raise WalletState('Wallet {} is closed'.format(self.name))
try:
metadata = await crypto.get_key_metadata(self.handle, verkey)
except IndyError as x_indy:
if x_indy.error_code == ErrorCode.WalletItemNotFound:
LOGGER.debug(
'Wallet.get_signing_key <!< Verification key %s not in wallet %s'
, verkey, self.name)
raise AbsentRecord('Verification key not in wallet {}'.format(
self.name))
LOGGER.debug('Wallet.get_signing_key <!< indy-sdk raised error %s',
x_indy.error_code)
raise
rv = KeyInfo(verkey, json.loads(metadata) if metadata else {})
LOGGER.debug('Wallet.get_signing_key <<< %s', rv)
return rv
|
get_signing_key
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def get_signing_key(self, verkey: str) ->KeyInfo:\n \"\"\"\n Get signing key pair for input verification key.\n\n Raise WalletState if wallet is closed, AbsentRecord for no such key pair.\n\n :param verkey: verification key of key pair\n :return: KeyInfo for key pair\n \"\"\"\n LOGGER.debug('Wallet.get_signing_key >>> seed: [SEED], verkey: %s', verkey)\n if not self.handle:\n LOGGER.debug('Wallet.get_signing_key <!< Wallet %s is closed', self\n .name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n try:\n metadata = await crypto.get_key_metadata(self.handle, verkey)\n except IndyError as x_indy:\n if x_indy.error_code == ErrorCode.WalletItemNotFound:\n LOGGER.debug(\n 'Wallet.get_signing_key <!< Verification key %s not in wallet %s'\n , verkey, self.name)\n raise AbsentRecord('Verification key not in wallet {}'.format(\n self.name))\n LOGGER.debug('Wallet.get_signing_key <!< indy-sdk raised error %s',\n x_indy.error_code)\n raise\n rv = KeyInfo(verkey, json.loads(metadata) if metadata else {})\n LOGGER.debug('Wallet.get_signing_key <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "get_signing_key",
"role": "assistant"
}
] |
async def create_local_did(self, seed: str=None, loc_did: str=None,
metadata: dict=None) ->DIDInfo:
"""
Create and store a new local DID for use in pairwise DID relations.
:param seed: seed from which to create (default random)
:param loc_did: local DID value (default None to let indy-sdk generate)
:param metadata: metadata to associate with the local DID
(operation always sets 'since', 'modified' epoch timestamps)
:return: DIDInfo for new local DID
"""
LOGGER.debug(
'Wallet.create_local_did >>> seed: [SEED] loc_did: %s metadata: %s',
loc_did, metadata)
cfg = {}
if seed:
cfg['seed'] = seed
if loc_did:
cfg['did'] = loc_did
if not self.handle:
LOGGER.debug('Wallet.create_local_did <!< Wallet %s is closed',
self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
try:
created_did, verkey = await did.create_and_store_my_did(self.handle,
json.dumps(cfg))
except IndyError as x_indy:
if x_indy.error_code == ErrorCode.DidAlreadyExistsError:
LOGGER.debug(
'Wallet.create_local_did <!< DID %s already present in wallet %s'
, loc_did, self.name)
raise ExtantRecord('Local DID {} already present in wallet {}'.
format(loc_did, self.name))
LOGGER.debug('Wallet.create_local_did <!< indy-sdk raised error %s',
x_indy.error_code)
raise
now = int(time())
loc_did_metadata = {**metadata or {}, 'since': now, 'modified': now}
await did.set_did_metadata(self.handle, created_did, json.dumps(
loc_did_metadata))
rv = DIDInfo(created_did, verkey, loc_did_metadata)
LOGGER.debug('Wallet.create_local_did <<< %s', rv)
return rv
|
create_local_did
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def create_local_did(self, seed: str=None, loc_did: str=None,\n metadata: dict=None) ->DIDInfo:\n \"\"\"\n Create and store a new local DID for use in pairwise DID relations.\n\n :param seed: seed from which to create (default random)\n :param loc_did: local DID value (default None to let indy-sdk generate)\n :param metadata: metadata to associate with the local DID\n (operation always sets 'since', 'modified' epoch timestamps)\n :return: DIDInfo for new local DID\n \"\"\"\n LOGGER.debug(\n 'Wallet.create_local_did >>> seed: [SEED] loc_did: %s metadata: %s',\n loc_did, metadata)\n cfg = {}\n if seed:\n cfg['seed'] = seed\n if loc_did:\n cfg['did'] = loc_did\n if not self.handle:\n LOGGER.debug('Wallet.create_local_did <!< Wallet %s is closed',\n self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n try:\n created_did, verkey = await did.create_and_store_my_did(self.handle,\n json.dumps(cfg))\n except IndyError as x_indy:\n if x_indy.error_code == ErrorCode.DidAlreadyExistsError:\n LOGGER.debug(\n 'Wallet.create_local_did <!< DID %s already present in wallet %s'\n , loc_did, self.name)\n raise ExtantRecord('Local DID {} already present in wallet {}'.\n format(loc_did, self.name))\n LOGGER.debug('Wallet.create_local_did <!< indy-sdk raised error %s',\n x_indy.error_code)\n raise\n now = int(time())\n loc_did_metadata = {**metadata or {}, 'since': now, 'modified': now}\n await did.set_did_metadata(self.handle, created_did, json.dumps(\n loc_did_metadata))\n rv = DIDInfo(created_did, verkey, loc_did_metadata)\n LOGGER.debug('Wallet.create_local_did <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "create_local_did",
"role": "assistant"
}
] |
async def replace_local_did_metadata(self, loc_did: str, metadata: dict
) ->DIDInfo:
"""
Replace the metadata associated with a local DID.
Raise WalletState if wallet is closed, AbsentRecord for no such local DID.
:param loc_did: local DID of interest
:param metadata: new metadata to store
:return: DIDInfo for local DID after write
"""
LOGGER.debug(
'Wallet.replace_local_did_metadata >>> loc_did: %s, metadata: %s',
loc_did, metadata)
old = await self.get_local_did(loc_did)
now = int(time())
loc_did_metadata = {**metadata or {}, 'since': (old.metadata or {}).get
('since', now), 'modified': now}
try:
await did.set_did_metadata(self.handle, loc_did, json.dumps(
loc_did_metadata))
except IndyError as x_indy:
LOGGER.debug(
'Wallet.replace_local_did_metadata <!< indy-sdk raised error %s',
x_indy.error_code)
raise
rv = await self.get_local_did(loc_did)
LOGGER.debug('Wallet.replace_local_did_metadata <<< %s', rv)
return rv
|
replace_local_did_metadata
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def replace_local_did_metadata(self, loc_did: str, metadata: dict\n ) ->DIDInfo:\n \"\"\"\n Replace the metadata associated with a local DID.\n\n Raise WalletState if wallet is closed, AbsentRecord for no such local DID.\n\n :param loc_did: local DID of interest\n :param metadata: new metadata to store\n :return: DIDInfo for local DID after write\n \"\"\"\n LOGGER.debug(\n 'Wallet.replace_local_did_metadata >>> loc_did: %s, metadata: %s',\n loc_did, metadata)\n old = await self.get_local_did(loc_did)\n now = int(time())\n loc_did_metadata = {**metadata or {}, 'since': (old.metadata or {}).get\n ('since', now), 'modified': now}\n try:\n await did.set_did_metadata(self.handle, loc_did, json.dumps(\n loc_did_metadata))\n except IndyError as x_indy:\n LOGGER.debug(\n 'Wallet.replace_local_did_metadata <!< indy-sdk raised error %s',\n x_indy.error_code)\n raise\n rv = await self.get_local_did(loc_did)\n LOGGER.debug('Wallet.replace_local_did_metadata <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "replace_local_did_metadata",
"role": "assistant"
}
] |
async def get_local_dids(self) ->Sequence[DIDInfo]:
"""
Get list of DIDInfos for local DIDs.
:return: list of local DIDInfos
"""
LOGGER.debug('Wallet.get_local_dids >>>')
dids_with_meta = json.loads(did.list_my_dids_with_meta(self.handle))
rv = []
for did_with_meta in dids_with_meta:
meta = json.loads(did_with_meta['metadata']) if did_with_meta[
'metadata'] else {}
if meta.get('anchor', False):
continue
rv.append(DIDInfo(did_with_meta['did'], did_with_meta['verkey'], meta))
LOGGER.debug('Wallet.get_local_dids <<< %s', rv)
return rv
|
get_local_dids
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def get_local_dids(self) ->Sequence[DIDInfo]:\n \"\"\"\n Get list of DIDInfos for local DIDs.\n\n :return: list of local DIDInfos\n \"\"\"\n LOGGER.debug('Wallet.get_local_dids >>>')\n dids_with_meta = json.loads(did.list_my_dids_with_meta(self.handle))\n rv = []\n for did_with_meta in dids_with_meta:\n meta = json.loads(did_with_meta['metadata']) if did_with_meta[\n 'metadata'] else {}\n if meta.get('anchor', False):\n continue\n rv.append(DIDInfo(did_with_meta['did'], did_with_meta['verkey'], meta))\n LOGGER.debug('Wallet.get_local_dids <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "get_local_dids",
"role": "assistant"
}
] |
async def get_local_did(self, loc: str) ->DIDInfo:
"""
Get local DID info by local DID or verification key.
Raise AbsentRecord for no such local DID.
:param loc: DID or verification key of interest
:return: DIDInfo for local DID
"""
LOGGER.debug('Wallet.get_local_did >>> loc: %s', loc)
if not self.handle:
LOGGER.debug('Wallet.get_local_did <!< Wallet %s is closed', self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
if ok_did(loc):
try:
did_with_meta = json.loads(await did.get_my_did_with_meta(self.
handle, loc))
rv = DIDInfo(did_with_meta['did'], did_with_meta['verkey'],
json.loads(did_with_meta['metadata']) if did_with_meta[
'metadata'] else {})
except IndyError as x_indy:
if x_indy.error_code == ErrorCode.WalletItemNotFound:
LOGGER.debug(
'Wallet.get_local_did <!< DID %s not present in wallet %s',
loc, self.name)
raise AbsentRecord('Local DID {} not present in wallet {}'.
format(loc, self.name))
LOGGER.debug('Wallet.get_local_did <!< indy-sdk raised error %s',
x_indy.error_code)
raise
else:
dids_with_meta = json.loads(await did.list_my_dids_with_meta(self.
handle))
for did_with_meta in dids_with_meta:
if did_with_meta['verkey'] == loc:
rv = DIDInfo(did_with_meta['did'], did_with_meta['verkey'],
json.loads(did_with_meta['metadata']) if did_with_meta[
'metadata'] else {})
break
else:
LOGGER.debug(
'Wallet.get_local_did <!< Wallet %s has no local DID for verkey %s'
, self.name, loc)
raise AbsentRecord('Wallet {} has no local DID for verkey {}'.
format(self.name, loc))
LOGGER.debug('Wallet.get_local_did <<< %s', rv)
return rv
|
get_local_did
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def get_local_did(self, loc: str) ->DIDInfo:\n \"\"\"\n Get local DID info by local DID or verification key.\n Raise AbsentRecord for no such local DID.\n\n :param loc: DID or verification key of interest\n :return: DIDInfo for local DID\n \"\"\"\n LOGGER.debug('Wallet.get_local_did >>> loc: %s', loc)\n if not self.handle:\n LOGGER.debug('Wallet.get_local_did <!< Wallet %s is closed', self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n if ok_did(loc):\n try:\n did_with_meta = json.loads(await did.get_my_did_with_meta(self.\n handle, loc))\n rv = DIDInfo(did_with_meta['did'], did_with_meta['verkey'], \n json.loads(did_with_meta['metadata']) if did_with_meta[\n 'metadata'] else {})\n except IndyError as x_indy:\n if x_indy.error_code == ErrorCode.WalletItemNotFound:\n LOGGER.debug(\n 'Wallet.get_local_did <!< DID %s not present in wallet %s',\n loc, self.name)\n raise AbsentRecord('Local DID {} not present in wallet {}'.\n format(loc, self.name))\n LOGGER.debug('Wallet.get_local_did <!< indy-sdk raised error %s',\n x_indy.error_code)\n raise\n else:\n dids_with_meta = json.loads(await did.list_my_dids_with_meta(self.\n handle))\n for did_with_meta in dids_with_meta:\n if did_with_meta['verkey'] == loc:\n rv = DIDInfo(did_with_meta['did'], did_with_meta['verkey'],\n json.loads(did_with_meta['metadata']) if did_with_meta[\n 'metadata'] else {})\n break\n else:\n LOGGER.debug(\n 'Wallet.get_local_did <!< Wallet %s has no local DID for verkey %s'\n , self.name, loc)\n raise AbsentRecord('Wallet {} has no local DID for verkey {}'.\n format(self.name, loc))\n LOGGER.debug('Wallet.get_local_did <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "get_local_did",
"role": "assistant"
}
] |
async def get_anchor_did(self) ->str:
"""
Get current anchor DID by metadata, None for not yet set.
:return: DID
"""
LOGGER.debug('Wallet.get_anchor_did >>>')
if not self.handle:
LOGGER.debug('Wallet.get_anchor_did <!< Wallet %s is closed', self.name
)
raise WalletState('Wallet {} is closed'.format(self.name))
rv = None
dids_with_meta = json.loads(await did.list_my_dids_with_meta(self.handle))
latest = 0
for did_with_meta in dids_with_meta:
try:
meta = json.loads(did_with_meta['metadata']) if did_with_meta[
'metadata'] else {}
if not meta.get('anchor', False):
continue
if isinstance(meta, dict) and meta.get('since', -1) > latest:
rv = did_with_meta.get('did')
except json.decoder.JSONDecodeError:
continue
LOGGER.debug('Wallet.get_anchor_did <<< %s', rv)
return rv
|
get_anchor_did
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def get_anchor_did(self) ->str:\n \"\"\"\n Get current anchor DID by metadata, None for not yet set.\n\n :return: DID\n \"\"\"\n LOGGER.debug('Wallet.get_anchor_did >>>')\n if not self.handle:\n LOGGER.debug('Wallet.get_anchor_did <!< Wallet %s is closed', self.name\n )\n raise WalletState('Wallet {} is closed'.format(self.name))\n rv = None\n dids_with_meta = json.loads(await did.list_my_dids_with_meta(self.handle))\n latest = 0\n for did_with_meta in dids_with_meta:\n try:\n meta = json.loads(did_with_meta['metadata']) if did_with_meta[\n 'metadata'] else {}\n if not meta.get('anchor', False):\n continue\n if isinstance(meta, dict) and meta.get('since', -1) > latest:\n rv = did_with_meta.get('did')\n except json.decoder.JSONDecodeError:\n continue\n LOGGER.debug('Wallet.get_anchor_did <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "get_anchor_did",
"role": "assistant"
}
] |
async def create_link_secret(self, label: str) ->None:
"""
Create link secret (a.k.a. master secret) used in proofs by HolderProver, if the
current link secret does not already correspond to the input link secret label.
Raise WalletState if wallet is closed, or any other IndyError causing failure
to set link secret in wallet.
:param label: label for link secret; indy-sdk uses label to generate link secret
"""
LOGGER.debug('Wallet.create_link_secret >>> label: %s', label)
if not self.handle:
LOGGER.debug('Wallet.create_link_secret <!< Wallet %s is closed',
self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
try:
await anoncreds.prover_create_master_secret(self.handle, label)
await self._write_link_secret_label(label)
except IndyError as x_indy:
if (x_indy.error_code == ErrorCode.
AnoncredsMasterSecretDuplicateNameError):
LOGGER.warning(
'Wallet %s link secret already current: abstaining from updating label record'
, self.name)
await self._write_link_secret_label(label)
else:
LOGGER.debug(
'Wallet.create_link_secret <!< cannot create link secret for wallet %s, indy error code %s'
, self.name, x_indy.error_code)
raise
LOGGER.debug('Wallet.create_link_secret <<<')
|
create_link_secret
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def create_link_secret(self, label: str) ->None:\n \"\"\"\n Create link secret (a.k.a. master secret) used in proofs by HolderProver, if the\n current link secret does not already correspond to the input link secret label.\n\n Raise WalletState if wallet is closed, or any other IndyError causing failure\n to set link secret in wallet.\n\n :param label: label for link secret; indy-sdk uses label to generate link secret\n \"\"\"\n LOGGER.debug('Wallet.create_link_secret >>> label: %s', label)\n if not self.handle:\n LOGGER.debug('Wallet.create_link_secret <!< Wallet %s is closed',\n self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n try:\n await anoncreds.prover_create_master_secret(self.handle, label)\n await self._write_link_secret_label(label)\n except IndyError as x_indy:\n if (x_indy.error_code == ErrorCode.\n AnoncredsMasterSecretDuplicateNameError):\n LOGGER.warning(\n 'Wallet %s link secret already current: abstaining from updating label record'\n , self.name)\n await self._write_link_secret_label(label)\n else:\n LOGGER.debug(\n 'Wallet.create_link_secret <!< cannot create link secret for wallet %s, indy error code %s'\n , self.name, x_indy.error_code)\n raise\n LOGGER.debug('Wallet.create_link_secret <<<')\n",
"role": "user"
},
{
"content": "create_link_secret",
"role": "assistant"
}
] |
async def _write_link_secret_label(self, label) ->None:
"""
Update non-secret storage record with link secret label.
:param label: link secret label
"""
LOGGER.debug('Wallet._write_link_secret_label <<< %s', label)
if await self.get_link_secret_label() == label:
LOGGER.info(
'Wallet._write_link_secret_label abstaining - already current')
else:
await self.write_non_secret(StorageRecord(TYPE_LINK_SECRET_LABEL,
label, tags=None, ident=str(int(time()))))
LOGGER.debug('Wallet._write_link_secret_label <<<')
|
write_link_secret_label
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def _write_link_secret_label(self, label) ->None:\n \"\"\"\n Update non-secret storage record with link secret label.\n\n :param label: link secret label\n \"\"\"\n LOGGER.debug('Wallet._write_link_secret_label <<< %s', label)\n if await self.get_link_secret_label() == label:\n LOGGER.info(\n 'Wallet._write_link_secret_label abstaining - already current')\n else:\n await self.write_non_secret(StorageRecord(TYPE_LINK_SECRET_LABEL,\n label, tags=None, ident=str(int(time()))))\n LOGGER.debug('Wallet._write_link_secret_label <<<')\n",
"role": "user"
},
{
"content": "write_link_secret_label",
"role": "assistant"
}
] |
async def get_link_secret_label(self) ->str:
"""
Get current link secret label from non-secret storage records; return None for no match.
:return: latest non-secret storage record for link secret label
"""
LOGGER.debug('Wallet.get_link_secret_label >>>')
if not self.handle:
LOGGER.debug('Wallet.get_link_secret <!< Wallet %s is closed', self
.name)
raise WalletState('Wallet {} is closed'.format(self.name))
rv = None
records = await self.get_non_secret(TYPE_LINK_SECRET_LABEL)
if records:
rv = records[str(max(int(k) for k in records))].value
LOGGER.debug('Wallet.get_link_secret_label <<< %s', rv)
return rv
|
get_link_secret_label
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def get_link_secret_label(self) ->str:\n \"\"\"\n Get current link secret label from non-secret storage records; return None for no match.\n\n :return: latest non-secret storage record for link secret label\n \"\"\"\n LOGGER.debug('Wallet.get_link_secret_label >>>')\n if not self.handle:\n LOGGER.debug('Wallet.get_link_secret <!< Wallet %s is closed', self\n .name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n rv = None\n records = await self.get_non_secret(TYPE_LINK_SECRET_LABEL)\n if records:\n rv = records[str(max(int(k) for k in records))].value\n LOGGER.debug('Wallet.get_link_secret_label <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "get_link_secret_label",
"role": "assistant"
}
] |
async def open(self) ->'Wallet':
"""
Explicit entry. Open wallet as configured, for later closure via close().
For use when keeping wallet open across multiple calls.
Raise any IndyError causing failure to open wallet, WalletState if wallet already open,
or AbsentWallet on attempt to enter wallet not yet created.
:return: current object
"""
LOGGER.debug('Wallet.open >>>')
created = False
while True:
try:
self._handle = await wallet.open_wallet(json.dumps(self.config),
json.dumps(self.access_creds))
LOGGER.info('Opened wallet %s on handle %s', self.name, self.handle
)
break
except IndyError as x_indy:
if x_indy.error_code == ErrorCode.WalletNotFoundError:
if created:
LOGGER.debug(
'Wallet.open() <!< Wallet %s not found after creation',
self.name)
raise AbsentWallet('Wallet {} not found after creation'
.format(self.name))
if self.auto_create:
await self.create()
continue
else:
LOGGER.debug('Wallet.open() <!< Wallet %s not found',
self.name)
raise AbsentWallet('Wallet {} not found'.format(self.name))
elif x_indy.error_code == ErrorCode.WalletAlreadyOpenedError:
LOGGER.debug('Wallet.open() <!< Wallet %s is already open',
self.name)
raise WalletState('Wallet {} is already open'.format(self.name)
)
elif x_indy.error_code == ErrorCode.WalletAccessFailed:
LOGGER.debug(
'Wallet.open() <!< Bad access credentials value for wallet %s'
, self.name)
raise BadAccess('Bad access credentials value for wallet {}'
.format(self.name))
LOGGER.debug('Wallet %s open raised indy error %s', self.name,
x_indy.error_code)
raise
self.did = await self.get_anchor_did()
self.verkey = await did.key_for_local_did(self.handle, self.did
) if self.did else None
LOGGER.info('Wallet %s got verkey %s for existing DID %s', self.name,
self.verkey, self.did)
LOGGER.debug('Wallet.open <<<')
return self
|
open
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def open(self) ->'Wallet':\n \"\"\"\n Explicit entry. Open wallet as configured, for later closure via close().\n For use when keeping wallet open across multiple calls.\n\n Raise any IndyError causing failure to open wallet, WalletState if wallet already open,\n or AbsentWallet on attempt to enter wallet not yet created.\n\n :return: current object\n \"\"\"\n LOGGER.debug('Wallet.open >>>')\n created = False\n while True:\n try:\n self._handle = await wallet.open_wallet(json.dumps(self.config),\n json.dumps(self.access_creds))\n LOGGER.info('Opened wallet %s on handle %s', self.name, self.handle\n )\n break\n except IndyError as x_indy:\n if x_indy.error_code == ErrorCode.WalletNotFoundError:\n if created:\n LOGGER.debug(\n 'Wallet.open() <!< Wallet %s not found after creation',\n self.name)\n raise AbsentWallet('Wallet {} not found after creation'\n .format(self.name))\n if self.auto_create:\n await self.create()\n continue\n else:\n LOGGER.debug('Wallet.open() <!< Wallet %s not found',\n self.name)\n raise AbsentWallet('Wallet {} not found'.format(self.name))\n elif x_indy.error_code == ErrorCode.WalletAlreadyOpenedError:\n LOGGER.debug('Wallet.open() <!< Wallet %s is already open',\n self.name)\n raise WalletState('Wallet {} is already open'.format(self.name)\n )\n elif x_indy.error_code == ErrorCode.WalletAccessFailed:\n LOGGER.debug(\n 'Wallet.open() <!< Bad access credentials value for wallet %s'\n , self.name)\n raise BadAccess('Bad access credentials value for wallet {}'\n .format(self.name))\n LOGGER.debug('Wallet %s open raised indy error %s', self.name,\n x_indy.error_code)\n raise\n self.did = await self.get_anchor_did()\n self.verkey = await did.key_for_local_did(self.handle, self.did\n ) if self.did else None\n LOGGER.info('Wallet %s got verkey %s for existing DID %s', self.name,\n self.verkey, self.did)\n LOGGER.debug('Wallet.open <<<')\n return self\n",
"role": "user"
},
{
"content": "open",
"role": "assistant"
}
] |
async def create(self) ->None:
"""
Persist the wallet. Raise ExtantWallet if it already exists.
Actuators should prefer WalletManager.create() to calling this method directly - the wallet manager
filters wallet configuration through preset defaults.
"""
LOGGER.debug('Wallet.create >>>')
try:
await wallet.create_wallet(config=json.dumps(self.config),
credentials=json.dumps(self.access_creds))
LOGGER.info('Created wallet %s', self.name)
except IndyError as x_indy:
if x_indy.error_code == ErrorCode.WalletAlreadyExistsError:
LOGGER.debug('Wallet.create <!< Wallet %s already exists', self
.name)
raise ExtantWallet('Wallet {} already exists'.format(self.name))
LOGGER.debug(
'Wallet.create <!< indy error code %s on creation of wallet %s',
x_indy.error_code, self.name)
raise
auto_remove = self.auto_remove
self.auto_remove = False
async with self:
did_info = await self.create_local_did(self._von_config.get('seed',
None), self._von_config.get('did', None), {'anchor': True})
self.did = did_info.did
self.verkey = did_info.verkey
if 'link_secret_label' in self._von_config:
await self.create_link_secret(self._von_config['link_secret_label']
)
self.auto_remove = auto_remove
LOGGER.debug('Wallet.create <<<')
|
create
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def create(self) ->None:\n \"\"\"\n Persist the wallet. Raise ExtantWallet if it already exists.\n\n Actuators should prefer WalletManager.create() to calling this method directly - the wallet manager\n filters wallet configuration through preset defaults.\n \"\"\"\n LOGGER.debug('Wallet.create >>>')\n try:\n await wallet.create_wallet(config=json.dumps(self.config),\n credentials=json.dumps(self.access_creds))\n LOGGER.info('Created wallet %s', self.name)\n except IndyError as x_indy:\n if x_indy.error_code == ErrorCode.WalletAlreadyExistsError:\n LOGGER.debug('Wallet.create <!< Wallet %s already exists', self\n .name)\n raise ExtantWallet('Wallet {} already exists'.format(self.name))\n LOGGER.debug(\n 'Wallet.create <!< indy error code %s on creation of wallet %s',\n x_indy.error_code, self.name)\n raise\n auto_remove = self.auto_remove\n self.auto_remove = False\n async with self:\n did_info = await self.create_local_did(self._von_config.get('seed',\n None), self._von_config.get('did', None), {'anchor': True})\n self.did = did_info.did\n self.verkey = did_info.verkey\n if 'link_secret_label' in self._von_config:\n await self.create_link_secret(self._von_config['link_secret_label']\n )\n self.auto_remove = auto_remove\n LOGGER.debug('Wallet.create <<<')\n",
"role": "user"
},
{
"content": "create",
"role": "assistant"
}
] |
async def close(self) ->None:
"""
Explicit exit. Close wallet (and delete if so configured).
"""
LOGGER.debug('Wallet.close >>>')
if not self.handle:
LOGGER.warning('Abstaining from closing wallet %s: already closed',
self.name)
else:
LOGGER.debug('Closing wallet %s', self.name)
await wallet.close_wallet(self.handle)
self._handle = None
if self.auto_remove:
LOGGER.info('Automatically removing wallet %s', self.name)
await self.remove()
self._handle = None
LOGGER.debug('Wallet.close <<<')
|
close
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def close(self) ->None:\n \"\"\"\n Explicit exit. Close wallet (and delete if so configured).\n \"\"\"\n LOGGER.debug('Wallet.close >>>')\n if not self.handle:\n LOGGER.warning('Abstaining from closing wallet %s: already closed',\n self.name)\n else:\n LOGGER.debug('Closing wallet %s', self.name)\n await wallet.close_wallet(self.handle)\n self._handle = None\n if self.auto_remove:\n LOGGER.info('Automatically removing wallet %s', self.name)\n await self.remove()\n self._handle = None\n LOGGER.debug('Wallet.close <<<')\n",
"role": "user"
},
{
"content": "close",
"role": "assistant"
}
] |
async def remove(self) ->bool:
"""
Remove serialized wallet, best effort, if it exists. Return whether wallet absent after operation
(removal successful or else not present a priori).
Raise WalletState if wallet is open.
:return: whether wallet gone from persistent storage
"""
LOGGER.debug('Wallet.remove >>>')
if self.handle:
LOGGER.debug('Wallet.remove <!< Wallet %s is open', self.name)
raise WalletState('Wallet {} is open'.format(self.name))
rv = True
try:
LOGGER.info('Attempting to remove wallet: %s', self.name)
await wallet.delete_wallet(json.dumps(self.config), json.dumps(self
.access_creds))
except IndyError as x_indy:
if x_indy.error_code == ErrorCode.WalletNotFoundError:
LOGGER.info('Wallet %s not present; abstaining from removal',
self.name)
else:
LOGGER.info('Failed wallet %s removal; indy-sdk error code %s',
self.name, x_indy.error_code)
rv = False
LOGGER.debug('Wallet.remove <<< %s', rv)
return rv
|
remove
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def remove(self) ->bool:\n \"\"\"\n Remove serialized wallet, best effort, if it exists. Return whether wallet absent after operation\n (removal successful or else not present a priori).\n\n Raise WalletState if wallet is open.\n\n :return: whether wallet gone from persistent storage\n \"\"\"\n LOGGER.debug('Wallet.remove >>>')\n if self.handle:\n LOGGER.debug('Wallet.remove <!< Wallet %s is open', self.name)\n raise WalletState('Wallet {} is open'.format(self.name))\n rv = True\n try:\n LOGGER.info('Attempting to remove wallet: %s', self.name)\n await wallet.delete_wallet(json.dumps(self.config), json.dumps(self\n .access_creds))\n except IndyError as x_indy:\n if x_indy.error_code == ErrorCode.WalletNotFoundError:\n LOGGER.info('Wallet %s not present; abstaining from removal',\n self.name)\n else:\n LOGGER.info('Failed wallet %s removal; indy-sdk error code %s',\n self.name, x_indy.error_code)\n rv = False\n LOGGER.debug('Wallet.remove <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "remove",
"role": "assistant"
}
] |
async def write_pairwise(self, their_did: str, their_verkey: str=None,
my_did: str=None, metadata: dict=None, replace_meta: bool=False
) ->PairwiseInfo:
"""
Store a pairwise DID for a secure connection. Use verification key for local DID in wallet if
supplied; otherwise, create one first. If local DID specified but not present, raise AbsentRecord.
With supplied metadata, replace or augment and overwrite any existing metadata for the pairwise
relation if one already exists in the wallet. Always include local and remote DIDs and keys in
metadata to allow for WQL search.
Raise AbsentRecord on call to update a non-existent record. Raise BadRecord if metadata does not
coerce into non-secrets API tags specification {str:str}.
:param their_did: remote DID
:param their_verkey: remote verification key (default None is OK if updating an existing pairwise DID)
:param my_did: local DID
:param metadata: metadata for pairwise connection
:param replace_meta: whether to (True) replace or (False) augment and overwrite existing metadata
:return: resulting PairwiseInfo
"""
LOGGER.debug(
'Wallet.write_pairwise >>> their_did: %s, their_verkey: %s, my_did: %s, metadata: %s, replace_meta: %s'
, their_did, their_verkey, my_did, metadata, replace_meta)
if their_verkey is None:
match = await self.get_pairwise(their_did)
if not match:
LOGGER.debug(
'Wallet.write_pairwise <!< Wallet %s has no pairwise DID on %s to update'
, self.name, their_did)
raise AbsentRecord('Wallet {} has no pairwise DID on {} to update'
.format(self.name, their_did))
their_verkey = [pwise for pwise in match.values()][0].their_verkey
try:
await did.store_their_did(self.handle, json.dumps({'did': their_did,
'verkey': their_verkey}))
except IndyError as x_indy:
if x_indy.error_code == ErrorCode.WalletItemAlreadyExists:
pass
else:
LOGGER.debug(
'Wallet.write_pairwise <!< Wallet %s write of their_did %s raised indy error code %s'
, self.name, their_did, x_indy.error_code)
raise
if my_did:
my_did_info = await self.get_local_did(my_did)
else:
my_did_info = await self.create_local_did(None, None, {
'pairwise_for': their_did})
pairwise = PairwiseInfo(their_did, their_verkey, my_did_info.did,
my_did_info.verkey, metadata)
try:
storec = await self.write_non_secret(StorageRecord(TYPE_PAIRWISE,
their_verkey, tags=pairwise_info2tags(pairwise), ident=
their_did), replace_meta)
except BadRecord:
LOGGER.debug(
'Wallet.write_pairwise <!< Pairwise metadata %s does not coerce into flat {str:str} tags dict'
, pairwise.metadata)
raise
rv = storage_record2pairwise_info(storec)
LOGGER.debug('Wallet.write_pairwise <<< %s', rv)
return rv
|
write_pairwise
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def write_pairwise(self, their_did: str, their_verkey: str=None,\n my_did: str=None, metadata: dict=None, replace_meta: bool=False\n ) ->PairwiseInfo:\n \"\"\"\n Store a pairwise DID for a secure connection. Use verification key for local DID in wallet if\n supplied; otherwise, create one first. If local DID specified but not present, raise AbsentRecord.\n\n With supplied metadata, replace or augment and overwrite any existing metadata for the pairwise\n relation if one already exists in the wallet. Always include local and remote DIDs and keys in\n metadata to allow for WQL search.\n\n Raise AbsentRecord on call to update a non-existent record. Raise BadRecord if metadata does not\n coerce into non-secrets API tags specification {str:str}.\n\n :param their_did: remote DID\n :param their_verkey: remote verification key (default None is OK if updating an existing pairwise DID)\n :param my_did: local DID\n :param metadata: metadata for pairwise connection\n :param replace_meta: whether to (True) replace or (False) augment and overwrite existing metadata\n :return: resulting PairwiseInfo\n \"\"\"\n LOGGER.debug(\n 'Wallet.write_pairwise >>> their_did: %s, their_verkey: %s, my_did: %s, metadata: %s, replace_meta: %s'\n , their_did, their_verkey, my_did, metadata, replace_meta)\n if their_verkey is None:\n match = await self.get_pairwise(their_did)\n if not match:\n LOGGER.debug(\n 'Wallet.write_pairwise <!< Wallet %s has no pairwise DID on %s to update'\n , self.name, their_did)\n raise AbsentRecord('Wallet {} has no pairwise DID on {} to update'\n .format(self.name, their_did))\n their_verkey = [pwise for pwise in match.values()][0].their_verkey\n try:\n await did.store_their_did(self.handle, json.dumps({'did': their_did,\n 'verkey': their_verkey}))\n except IndyError as x_indy:\n if x_indy.error_code == ErrorCode.WalletItemAlreadyExists:\n pass\n else:\n LOGGER.debug(\n 'Wallet.write_pairwise <!< Wallet %s write of their_did %s raised indy error code %s'\n , self.name, their_did, x_indy.error_code)\n raise\n if my_did:\n my_did_info = await self.get_local_did(my_did)\n else:\n my_did_info = await self.create_local_did(None, None, {\n 'pairwise_for': their_did})\n pairwise = PairwiseInfo(their_did, their_verkey, my_did_info.did,\n my_did_info.verkey, metadata)\n try:\n storec = await self.write_non_secret(StorageRecord(TYPE_PAIRWISE,\n their_verkey, tags=pairwise_info2tags(pairwise), ident=\n their_did), replace_meta)\n except BadRecord:\n LOGGER.debug(\n 'Wallet.write_pairwise <!< Pairwise metadata %s does not coerce into flat {str:str} tags dict'\n , pairwise.metadata)\n raise\n rv = storage_record2pairwise_info(storec)\n LOGGER.debug('Wallet.write_pairwise <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "write_pairwise",
"role": "assistant"
}
] |
async def delete_pairwise(self, their_did: str) ->None:
"""
Remove a pairwise DID record by its remote DID. Silently return if no such record is present.
Raise WalletState for closed wallet, or BadIdentifier for invalid pairwise DID.
:param their_did: remote DID marking pairwise DID to remove
"""
LOGGER.debug('Wallet.delete_pairwise >>> their_did: %s', their_did)
if not ok_did(their_did):
LOGGER.debug('Wallet.delete_pairwise <!< Bad DID %s', their_did)
raise BadIdentifier('Bad DID {}'.format(their_did))
await self.delete_non_secret(TYPE_PAIRWISE, their_did)
LOGGER.debug('Wallet.delete_pairwise <<<')
|
delete_pairwise
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def delete_pairwise(self, their_did: str) ->None:\n \"\"\"\n Remove a pairwise DID record by its remote DID. Silently return if no such record is present.\n Raise WalletState for closed wallet, or BadIdentifier for invalid pairwise DID.\n\n :param their_did: remote DID marking pairwise DID to remove\n \"\"\"\n LOGGER.debug('Wallet.delete_pairwise >>> their_did: %s', their_did)\n if not ok_did(their_did):\n LOGGER.debug('Wallet.delete_pairwise <!< Bad DID %s', their_did)\n raise BadIdentifier('Bad DID {}'.format(their_did))\n await self.delete_non_secret(TYPE_PAIRWISE, their_did)\n LOGGER.debug('Wallet.delete_pairwise <<<')\n",
"role": "user"
},
{
"content": "delete_pairwise",
"role": "assistant"
}
] |
async def get_pairwise(self, pairwise_filt: str=None) ->dict:
"""
Return dict mapping each pairwise DID of interest in wallet to its pairwise info, or,
for no filter specified, mapping them all. If wallet has no such item, return empty dict.
:param pairwise_filt: remote DID of interest, or WQL json (default all)
:return: dict mapping remote DIDs to PairwiseInfo
"""
LOGGER.debug('Wallet.get_pairwise >>> pairwise_filt: %s', pairwise_filt)
if not self.handle:
LOGGER.debug('Wallet.get_pairwise <!< Wallet %s is closed', self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
storecs = await self.get_non_secret(TYPE_PAIRWISE, pairwise_filt if
ok_did(pairwise_filt) or not pairwise_filt else json.loads(
pairwise_filt), canon_pairwise_wql)
rv = {k: storage_record2pairwise_info(storecs[k]) for k in storecs}
LOGGER.debug('Wallet.get_pairwise <<< %s', rv)
return rv
|
get_pairwise
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def get_pairwise(self, pairwise_filt: str=None) ->dict:\n \"\"\"\n Return dict mapping each pairwise DID of interest in wallet to its pairwise info, or,\n for no filter specified, mapping them all. If wallet has no such item, return empty dict.\n\n :param pairwise_filt: remote DID of interest, or WQL json (default all)\n :return: dict mapping remote DIDs to PairwiseInfo\n \"\"\"\n LOGGER.debug('Wallet.get_pairwise >>> pairwise_filt: %s', pairwise_filt)\n if not self.handle:\n LOGGER.debug('Wallet.get_pairwise <!< Wallet %s is closed', self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n storecs = await self.get_non_secret(TYPE_PAIRWISE, pairwise_filt if \n ok_did(pairwise_filt) or not pairwise_filt else json.loads(\n pairwise_filt), canon_pairwise_wql)\n rv = {k: storage_record2pairwise_info(storecs[k]) for k in storecs}\n LOGGER.debug('Wallet.get_pairwise <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "get_pairwise",
"role": "assistant"
}
] |
async def write_non_secret(self, storec: StorageRecord, replace_meta: bool=
False) ->StorageRecord:
"""
Add or update non-secret storage record to the wallet; return resulting wallet non-secret record.
:param storec: non-secret storage record
:param replace_meta: whether to replace any existing metadata on matching record or to augment it
:return: non-secret storage record as it appears in the wallet after write
"""
LOGGER.debug('Wallet.write_non_secret >>> storec: %s, replace_meta: %s',
storec, replace_meta)
if not self.handle:
LOGGER.debug('Wallet.write_non_secret <!< Wallet %s is closed',
self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
if not StorageRecord.ok_tags(storec.tags):
LOGGER.debug(
'Wallet.write_non_secret <!< bad storage record tags %s; use flat {str: str} dict'
, storec)
raise BadRecord('Bad storage record tags {}; use flat {{str:str}} dict'
.format(storec))
try:
record = json.loads(await non_secrets.get_wallet_record(self.handle,
storec.type, storec.id, json.dumps({'retrieveType': False,
'retrieveValue': True, 'retrieveTags': True})))
if record['value'] != storec.value:
await non_secrets.update_wallet_record_value(self.handle,
storec.type, storec.id, storec.value)
except IndyError as x_indy:
if x_indy.error_code == ErrorCode.WalletItemNotFound:
await non_secrets.add_wallet_record(self.handle, storec.type,
storec.id, storec.value, json.dumps(storec.tags) if storec.
tags else None)
else:
LOGGER.debug(
'Wallet.write_non_secret <!< Wallet lookup raised indy error code %s'
, x_indy.error_code)
raise
else:
if (record['tags'] or None) != storec.tags:
tags = storec.tags or {} if replace_meta else {**record['tags'],
**storec.tags or {}}
await non_secrets.update_wallet_record_tags(self.handle, storec
.type, storec.id, json.dumps(tags))
record = json.loads(await non_secrets.get_wallet_record(self.handle,
storec.type, storec.id, json.dumps({'retrieveType': False,
'retrieveValue': True, 'retrieveTags': True})))
rv = StorageRecord(storec.type, record['value'], tags=record.get('tags',
None), ident=record['id'])
LOGGER.debug('Wallet.write_non_secret <<< %s', rv)
return rv
|
write_non_secret
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def write_non_secret(self, storec: StorageRecord, replace_meta: bool=\n False) ->StorageRecord:\n \"\"\"\n Add or update non-secret storage record to the wallet; return resulting wallet non-secret record.\n\n :param storec: non-secret storage record\n :param replace_meta: whether to replace any existing metadata on matching record or to augment it\n :return: non-secret storage record as it appears in the wallet after write\n \"\"\"\n LOGGER.debug('Wallet.write_non_secret >>> storec: %s, replace_meta: %s',\n storec, replace_meta)\n if not self.handle:\n LOGGER.debug('Wallet.write_non_secret <!< Wallet %s is closed',\n self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n if not StorageRecord.ok_tags(storec.tags):\n LOGGER.debug(\n 'Wallet.write_non_secret <!< bad storage record tags %s; use flat {str: str} dict'\n , storec)\n raise BadRecord('Bad storage record tags {}; use flat {{str:str}} dict'\n .format(storec))\n try:\n record = json.loads(await non_secrets.get_wallet_record(self.handle,\n storec.type, storec.id, json.dumps({'retrieveType': False,\n 'retrieveValue': True, 'retrieveTags': True})))\n if record['value'] != storec.value:\n await non_secrets.update_wallet_record_value(self.handle,\n storec.type, storec.id, storec.value)\n except IndyError as x_indy:\n if x_indy.error_code == ErrorCode.WalletItemNotFound:\n await non_secrets.add_wallet_record(self.handle, storec.type,\n storec.id, storec.value, json.dumps(storec.tags) if storec.\n tags else None)\n else:\n LOGGER.debug(\n 'Wallet.write_non_secret <!< Wallet lookup raised indy error code %s'\n , x_indy.error_code)\n raise\n else:\n if (record['tags'] or None) != storec.tags:\n tags = storec.tags or {} if replace_meta else {**record['tags'],\n **storec.tags or {}}\n await non_secrets.update_wallet_record_tags(self.handle, storec\n .type, storec.id, json.dumps(tags))\n record = json.loads(await non_secrets.get_wallet_record(self.handle,\n storec.type, storec.id, json.dumps({'retrieveType': False,\n 'retrieveValue': True, 'retrieveTags': True})))\n rv = StorageRecord(storec.type, record['value'], tags=record.get('tags',\n None), ident=record['id'])\n LOGGER.debug('Wallet.write_non_secret <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "write_non_secret",
"role": "assistant"
}
] |
async def delete_non_secret(self, typ: str, ident: str) ->None:
"""
Remove a non-secret record by its type and identifier. Silently return if no such record is present.
Raise WalletState for closed wallet.
:param typ: non-secret storage record type
:param ident: non-secret storage record identifier
"""
LOGGER.debug('Wallet.delete_non_secret >>> typ: %s, ident: %s', typ, ident)
if not self.handle:
LOGGER.debug('Wallet.delete_non_secret <!< Wallet %s is closed',
self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
try:
await non_secrets.delete_wallet_record(self.handle, typ, ident)
except IndyError as x_indy:
if x_indy.error_code == ErrorCode.WalletItemNotFound:
LOGGER.info(
'Wallet.delete_non_secret <!< no record for type %s on identifier %s'
, typ, ident)
else:
LOGGER.debug(
'Wallet.delete_non_secret <!< deletion of %s record on identifier %s raised indy error code %s'
, typ, ident, x_indy.error_code)
raise
LOGGER.debug('Wallet.delete_non_secret <<<')
|
delete_non_secret
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def delete_non_secret(self, typ: str, ident: str) ->None:\n \"\"\"\n Remove a non-secret record by its type and identifier. Silently return if no such record is present.\n Raise WalletState for closed wallet.\n\n :param typ: non-secret storage record type\n :param ident: non-secret storage record identifier\n \"\"\"\n LOGGER.debug('Wallet.delete_non_secret >>> typ: %s, ident: %s', typ, ident)\n if not self.handle:\n LOGGER.debug('Wallet.delete_non_secret <!< Wallet %s is closed',\n self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n try:\n await non_secrets.delete_wallet_record(self.handle, typ, ident)\n except IndyError as x_indy:\n if x_indy.error_code == ErrorCode.WalletItemNotFound:\n LOGGER.info(\n 'Wallet.delete_non_secret <!< no record for type %s on identifier %s'\n , typ, ident)\n else:\n LOGGER.debug(\n 'Wallet.delete_non_secret <!< deletion of %s record on identifier %s raised indy error code %s'\n , typ, ident, x_indy.error_code)\n raise\n LOGGER.debug('Wallet.delete_non_secret <<<')\n",
"role": "user"
},
{
"content": "delete_non_secret",
"role": "assistant"
}
] |
async def get_non_secret(self, typ: str, filt: Union[dict, str]=None,
canon_wql: Callable[[dict], dict]=None, limit: int=None) ->dict:
"""
Return dict mapping each non-secret storage record of interest by identifier or,
for no filter specified, mapping them all. If wallet has no such item, return empty dict.
:param typ: non-secret storage record type
:param filt: non-secret storage record identifier or WQL json (default all)
:param canon_wql: WQL canonicalization function (default von_anchor.canon.canon_non_secret_wql())
:param limit: maximum number of results to return (default no limit)
:return: dict mapping identifiers to non-secret storage records
"""
LOGGER.debug('Wallet.get_non_secret >>> typ: %s, filt: %s, canon_wql: %s',
typ, filt, canon_wql)
if not self.handle:
LOGGER.debug('Wallet.get_non_secret <!< Wallet %s is closed', self.name
)
raise WalletState('Wallet {} is closed'.format(self.name))
records = []
if isinstance(filt, str):
try:
records = [json.loads(await non_secrets.get_wallet_record(self.
handle, typ, filt, json.dumps({'retrieveType': False,
'retrieveValue': True, 'retrieveTags': True})))]
except IndyError as x_indy:
if x_indy.error_code == ErrorCode.WalletItemNotFound:
pass
else:
LOGGER.debug(
'Wallet.get_non_secret <!< Wallet %s lookup raised indy exception %s'
, self.name, x_indy.error_code)
raise
else:
canon = canon_wql or canon_non_secret_wql
s_handle = await non_secrets.open_wallet_search(self.handle, typ,
json.dumps(canon(filt or {})), json.dumps({'retrieveRecords':
True, 'retrieveTotalCount': True, 'retrieveType': False,
'retrieveValue': True, 'retrieveTags': True}))
records = []
cardinality = int(json.loads(await non_secrets.
fetch_wallet_search_next_records(self.handle, s_handle, 0))[
'totalCount'])
chunk = min(cardinality, limit or cardinality, Wallet.DEFAULT_CHUNK)
if limit:
cardinality = min(limit, cardinality)
try:
while len(records) != cardinality:
batch = json.loads(await non_secrets.
fetch_wallet_search_next_records(self.handle, s_handle,
chunk))['records']
records.extend(batch)
if len(batch) < chunk:
break
if len(records) != cardinality:
LOGGER.warning(
'Non-secret search/limit indicated %s results but fetched %s'
, cardinality, len(records))
finally:
await non_secrets.close_wallet_search(s_handle)
rv = {record['id']: StorageRecord(typ, record['value'], record['tags'],
record['id']) for record in records}
LOGGER.debug('Wallet.get_non_secret <<< %s', rv)
return rv
|
get_non_secret
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def get_non_secret(self, typ: str, filt: Union[dict, str]=None,\n canon_wql: Callable[[dict], dict]=None, limit: int=None) ->dict:\n \"\"\"\n Return dict mapping each non-secret storage record of interest by identifier or,\n for no filter specified, mapping them all. If wallet has no such item, return empty dict.\n\n :param typ: non-secret storage record type\n :param filt: non-secret storage record identifier or WQL json (default all)\n :param canon_wql: WQL canonicalization function (default von_anchor.canon.canon_non_secret_wql())\n :param limit: maximum number of results to return (default no limit)\n :return: dict mapping identifiers to non-secret storage records\n \"\"\"\n LOGGER.debug('Wallet.get_non_secret >>> typ: %s, filt: %s, canon_wql: %s',\n typ, filt, canon_wql)\n if not self.handle:\n LOGGER.debug('Wallet.get_non_secret <!< Wallet %s is closed', self.name\n )\n raise WalletState('Wallet {} is closed'.format(self.name))\n records = []\n if isinstance(filt, str):\n try:\n records = [json.loads(await non_secrets.get_wallet_record(self.\n handle, typ, filt, json.dumps({'retrieveType': False,\n 'retrieveValue': True, 'retrieveTags': True})))]\n except IndyError as x_indy:\n if x_indy.error_code == ErrorCode.WalletItemNotFound:\n pass\n else:\n LOGGER.debug(\n 'Wallet.get_non_secret <!< Wallet %s lookup raised indy exception %s'\n , self.name, x_indy.error_code)\n raise\n else:\n canon = canon_wql or canon_non_secret_wql\n s_handle = await non_secrets.open_wallet_search(self.handle, typ,\n json.dumps(canon(filt or {})), json.dumps({'retrieveRecords': \n True, 'retrieveTotalCount': True, 'retrieveType': False,\n 'retrieveValue': True, 'retrieveTags': True}))\n records = []\n cardinality = int(json.loads(await non_secrets.\n fetch_wallet_search_next_records(self.handle, s_handle, 0))[\n 'totalCount'])\n chunk = min(cardinality, limit or cardinality, Wallet.DEFAULT_CHUNK)\n if limit:\n cardinality = min(limit, cardinality)\n try:\n while len(records) != cardinality:\n batch = json.loads(await non_secrets.\n fetch_wallet_search_next_records(self.handle, s_handle,\n chunk))['records']\n records.extend(batch)\n if len(batch) < chunk:\n break\n if len(records) != cardinality:\n LOGGER.warning(\n 'Non-secret search/limit indicated %s results but fetched %s'\n , cardinality, len(records))\n finally:\n await non_secrets.close_wallet_search(s_handle)\n rv = {record['id']: StorageRecord(typ, record['value'], record['tags'],\n record['id']) for record in records}\n LOGGER.debug('Wallet.get_non_secret <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "get_non_secret",
"role": "assistant"
}
] |
async def encrypt(self, message: bytes, authn: bool=False, to_verkey: str=
None, from_verkey: str=None) ->bytes:
"""
Encrypt plaintext for owner of DID, anonymously or via authenticated encryption scheme.
Raise AbsentMessage for missing message, or WalletState if wallet is closed.
:param message: plaintext, as bytes
:param authn: whether to use authenticated encryption scheme
:param to_verkey: verification key of recipient, None for anchor's own
:param from_verkey: verification key of sender for authenticated encryption, None for anchor's own
:return: ciphertext, as bytes
"""
LOGGER.debug(
'Wallet.encrypt >>> message: %s, authn: %s, to_verkey: %s, from_verkey: %s'
, message, authn, to_verkey, from_verkey)
if not message:
LOGGER.debug('Wallet.encrypt <!< No message to encrypt')
raise AbsentMessage('No message to encrypt')
if not self.handle:
LOGGER.debug('Wallet.encrypt <!< Wallet %s is closed', self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
if authn:
rv = await crypto.auth_crypt(self.handle, from_verkey or self.
verkey, to_verkey or self.verkey, message)
else:
rv = await crypto.anon_crypt(to_verkey or self.verkey, message)
LOGGER.debug('Wallet.auth_encrypt <<< %s', rv)
return rv
|
encrypt
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def encrypt(self, message: bytes, authn: bool=False, to_verkey: str=\n None, from_verkey: str=None) ->bytes:\n \"\"\"\n Encrypt plaintext for owner of DID, anonymously or via authenticated encryption scheme.\n Raise AbsentMessage for missing message, or WalletState if wallet is closed.\n\n :param message: plaintext, as bytes\n :param authn: whether to use authenticated encryption scheme\n :param to_verkey: verification key of recipient, None for anchor's own\n :param from_verkey: verification key of sender for authenticated encryption, None for anchor's own\n :return: ciphertext, as bytes\n \"\"\"\n LOGGER.debug(\n 'Wallet.encrypt >>> message: %s, authn: %s, to_verkey: %s, from_verkey: %s'\n , message, authn, to_verkey, from_verkey)\n if not message:\n LOGGER.debug('Wallet.encrypt <!< No message to encrypt')\n raise AbsentMessage('No message to encrypt')\n if not self.handle:\n LOGGER.debug('Wallet.encrypt <!< Wallet %s is closed', self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n if authn:\n rv = await crypto.auth_crypt(self.handle, from_verkey or self.\n verkey, to_verkey or self.verkey, message)\n else:\n rv = await crypto.anon_crypt(to_verkey or self.verkey, message)\n LOGGER.debug('Wallet.auth_encrypt <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "encrypt",
"role": "assistant"
}
] |
async def decrypt(self, ciphertext: bytes, authn_check: bool=None,
to_verkey: str=None, from_verkey: str=None) ->(bytes, str):
"""
Decrypt ciphertext and optionally authenticate sender.
Raise BadKey if authentication operation checks and reveals sender key distinct from input
sender verification key. Raise AbsentMessage for missing ciphertext, or WalletState if
wallet is closed.
:param ciphertext: ciphertext, as bytes
:param authn_check: True to authenticate and check sender verification key,
False to authenticate and return sender verification key for client to decide fitness, or
None to use anonymous decryption
:param to_verkey: recipient verification key, default anchor's own
:param from_verkey: sender verification key, ignored for anonymous decryption,
default anchor's own for authenticated decryption
:return: decrypted bytes and sender verification key (None for anonymous decryption)
"""
LOGGER.debug(
'Wallet.decrypt >>> ciphertext: %s, authn_check: %s, to_verkey: %s, from_verkey: %s'
, ciphertext, authn_check, to_verkey, from_verkey)
if not ciphertext:
LOGGER.debug('Wallet.decrypt <!< No ciphertext to decrypt')
raise AbsentMessage('No ciphertext to decrypt')
if not self.handle:
LOGGER.debug('Wallet.decrypt <!< Wallet %s is closed', self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
sender_verkey = None
if authn_check is None:
plaintext = await crypto.anon_decrypt(self.handle, to_verkey or
self.verkey, ciphertext)
else:
sender_verkey, plaintext = await crypto.auth_decrypt(self.handle,
to_verkey or self.verkey, ciphertext)
if authn_check and sender_verkey != (from_verkey or self.verkey):
LOGGER.debug(
'Wallet.decrypt <!< Authentication revealed unexpected sender key on decryption'
)
raise BadKey(
'Authentication revealed unexpected sender key on decryption')
rv = plaintext, sender_verkey
LOGGER.debug('Wallet.decrypt <<< %s', rv)
return rv
|
decrypt
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def decrypt(self, ciphertext: bytes, authn_check: bool=None,\n to_verkey: str=None, from_verkey: str=None) ->(bytes, str):\n \"\"\"\n Decrypt ciphertext and optionally authenticate sender.\n\n Raise BadKey if authentication operation checks and reveals sender key distinct from input\n sender verification key. Raise AbsentMessage for missing ciphertext, or WalletState if\n wallet is closed.\n\n :param ciphertext: ciphertext, as bytes\n :param authn_check: True to authenticate and check sender verification key,\n False to authenticate and return sender verification key for client to decide fitness, or\n None to use anonymous decryption\n :param to_verkey: recipient verification key, default anchor's own\n :param from_verkey: sender verification key, ignored for anonymous decryption,\n default anchor's own for authenticated decryption\n :return: decrypted bytes and sender verification key (None for anonymous decryption)\n \"\"\"\n LOGGER.debug(\n 'Wallet.decrypt >>> ciphertext: %s, authn_check: %s, to_verkey: %s, from_verkey: %s'\n , ciphertext, authn_check, to_verkey, from_verkey)\n if not ciphertext:\n LOGGER.debug('Wallet.decrypt <!< No ciphertext to decrypt')\n raise AbsentMessage('No ciphertext to decrypt')\n if not self.handle:\n LOGGER.debug('Wallet.decrypt <!< Wallet %s is closed', self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n sender_verkey = None\n if authn_check is None:\n plaintext = await crypto.anon_decrypt(self.handle, to_verkey or\n self.verkey, ciphertext)\n else:\n sender_verkey, plaintext = await crypto.auth_decrypt(self.handle, \n to_verkey or self.verkey, ciphertext)\n if authn_check and sender_verkey != (from_verkey or self.verkey):\n LOGGER.debug(\n 'Wallet.decrypt <!< Authentication revealed unexpected sender key on decryption'\n )\n raise BadKey(\n 'Authentication revealed unexpected sender key on decryption')\n rv = plaintext, sender_verkey\n LOGGER.debug('Wallet.decrypt <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "decrypt",
"role": "assistant"
}
] |
async def sign(self, message: bytes, verkey: str=None) ->bytes:
"""
Derive signing key and Sign message; return signature. Raise WalletState if wallet is closed.
Raise AbsentMessage for missing message, or WalletState if wallet is closed.
:param message: Content to sign, as bytes
:param verkey: verification key corresponding to private signing key (default anchor's own)
:return: signature, as bytes
"""
LOGGER.debug('Wallet.sign >>> message: %s, verkey: %s', message, verkey)
if not message:
LOGGER.debug('Wallet.sign <!< No message to sign')
raise AbsentMessage('No message to sign')
if not self.handle:
LOGGER.debug('Wallet.sign <!< Wallet %s is closed', self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
rv = await crypto.crypto_sign(self.handle, verkey or self.verkey, message)
LOGGER.debug('Wallet.sign <<< %s', rv)
return rv
|
sign
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def sign(self, message: bytes, verkey: str=None) ->bytes:\n \"\"\"\n Derive signing key and Sign message; return signature. Raise WalletState if wallet is closed.\n Raise AbsentMessage for missing message, or WalletState if wallet is closed.\n\n :param message: Content to sign, as bytes\n :param verkey: verification key corresponding to private signing key (default anchor's own)\n :return: signature, as bytes\n \"\"\"\n LOGGER.debug('Wallet.sign >>> message: %s, verkey: %s', message, verkey)\n if not message:\n LOGGER.debug('Wallet.sign <!< No message to sign')\n raise AbsentMessage('No message to sign')\n if not self.handle:\n LOGGER.debug('Wallet.sign <!< Wallet %s is closed', self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n rv = await crypto.crypto_sign(self.handle, verkey or self.verkey, message)\n LOGGER.debug('Wallet.sign <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "sign",
"role": "assistant"
}
] |
async def verify(self, message: bytes, signature: bytes, verkey: str=None
) ->bool:
"""
Verify signature against input signer verification key (default anchor's own).
Raise AbsentMessage for missing message or signature, or WalletState if wallet is closed.
:param message: Content to sign, as bytes
:param signature: signature, as bytes
:param verkey: signer verification key (default for anchor's own)
:return: whether signature is valid
"""
LOGGER.debug('Wallet.verify >>> message: %s, signature: %s, verkey: %s',
message, signature, verkey)
if not message:
LOGGER.debug('Wallet.verify <!< No message to verify')
raise AbsentMessage('No message to verify')
if not signature:
LOGGER.debug('Wallet.verify <!< No signature to verify')
raise AbsentMessage('No signature to verify')
if not self.handle:
LOGGER.debug('Wallet.verify <!< Wallet %s is closed', self.name)
raise WalletState('Wallet {} is closed'.format(self.name))
rv = await crypto.crypto_verify(verkey or self.verkey, message, signature)
LOGGER.debug('Wallet.verify <<< %s', rv)
return rv
|
verify
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def verify(self, message: bytes, signature: bytes, verkey: str=None\n ) ->bool:\n \"\"\"\n Verify signature against input signer verification key (default anchor's own).\n Raise AbsentMessage for missing message or signature, or WalletState if wallet is closed.\n\n :param message: Content to sign, as bytes\n :param signature: signature, as bytes\n :param verkey: signer verification key (default for anchor's own)\n :return: whether signature is valid\n \"\"\"\n LOGGER.debug('Wallet.verify >>> message: %s, signature: %s, verkey: %s',\n message, signature, verkey)\n if not message:\n LOGGER.debug('Wallet.verify <!< No message to verify')\n raise AbsentMessage('No message to verify')\n if not signature:\n LOGGER.debug('Wallet.verify <!< No signature to verify')\n raise AbsentMessage('No signature to verify')\n if not self.handle:\n LOGGER.debug('Wallet.verify <!< Wallet %s is closed', self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n rv = await crypto.crypto_verify(verkey or self.verkey, message, signature)\n LOGGER.debug('Wallet.verify <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "verify",
"role": "assistant"
}
] |
async def pack(self, message: str, recip_verkeys: Union[str, Sequence[str]]
=None, sender_verkey: str=None) ->bytes:
"""
Pack a message for one or more recipients (default anchor only).
Raise AbsentMessage for missing message, or WalletState if wallet is closed.
:param message: message to pack
:param recip_verkeys: verification keys of recipients (default anchor's own, only)
:param sender_verkey: sender verification key (default anonymous encryption)
:return: packed message
"""
LOGGER.debug(
'Wallet.pack >>> message: %s, recip_verkeys: %s, sender_verkey: %s',
message, recip_verkeys, sender_verkey)
if message is None:
LOGGER.debug('Wallet.pack <!< No message to pack')
raise AbsentMessage('No message to pack')
rv = await crypto.pack_message(self.handle, message, [recip_verkeys] if
isinstance(recip_verkeys, str) else list(recip_verkeys or [self.
verkey]), sender_verkey)
LOGGER.debug('Wallet.pack <<< %s', rv)
return rv
|
pack
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def pack(self, message: str, recip_verkeys: Union[str, Sequence[str]]\n =None, sender_verkey: str=None) ->bytes:\n \"\"\"\n Pack a message for one or more recipients (default anchor only).\n Raise AbsentMessage for missing message, or WalletState if wallet is closed.\n\n :param message: message to pack\n :param recip_verkeys: verification keys of recipients (default anchor's own, only)\n :param sender_verkey: sender verification key (default anonymous encryption)\n :return: packed message\n \"\"\"\n LOGGER.debug(\n 'Wallet.pack >>> message: %s, recip_verkeys: %s, sender_verkey: %s',\n message, recip_verkeys, sender_verkey)\n if message is None:\n LOGGER.debug('Wallet.pack <!< No message to pack')\n raise AbsentMessage('No message to pack')\n rv = await crypto.pack_message(self.handle, message, [recip_verkeys] if\n isinstance(recip_verkeys, str) else list(recip_verkeys or [self.\n verkey]), sender_verkey)\n LOGGER.debug('Wallet.pack <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "pack",
"role": "assistant"
}
] |
async def unpack(self, ciphertext: bytes) ->(str, str, str):
"""
Unpack a message. Return triple with cleartext, sender verification key, and recipient verification key.
Raise AbsentMessage for missing ciphertext, or WalletState if wallet is closed. Raise AbsentRecord
if wallet has no key to unpack ciphertext.
:param ciphertext: JWE-like formatted message as pack() produces
:return: cleartext, sender verification key, recipient verification key
"""
LOGGER.debug('Wallet.unpack >>> ciphertext: %s', ciphertext)
if not ciphertext:
LOGGER.debug('Wallet.pack <!< No ciphertext to unpack')
raise AbsentMessage('No ciphertext to unpack')
try:
unpacked = json.loads(await crypto.unpack_message(self.handle,
ciphertext))
except IndyError as x_indy:
if x_indy.error_code == ErrorCode.WalletItemNotFound:
LOGGER.debug(
'Wallet.unpack <!< Wallet %s has no local key to unpack ciphertext'
, self.name)
raise AbsentRecord(
'Wallet {} has no local key to unpack ciphertext'.format(
self.name))
LOGGER.debug(
'Wallet.unpack <!< Wallet %s unpack() raised indy error code {}',
x_indy.error_code)
raise
rv = unpacked['message'], unpacked.get('sender_verkey', None
), unpacked.get('recipient_verkey', None)
LOGGER.debug('Wallet.unpack <<< %s', rv)
return rv
|
unpack
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "async def unpack(self, ciphertext: bytes) ->(str, str, str):\n \"\"\"\n Unpack a message. Return triple with cleartext, sender verification key, and recipient verification key.\n Raise AbsentMessage for missing ciphertext, or WalletState if wallet is closed. Raise AbsentRecord\n if wallet has no key to unpack ciphertext.\n\n :param ciphertext: JWE-like formatted message as pack() produces\n :return: cleartext, sender verification key, recipient verification key\n \"\"\"\n LOGGER.debug('Wallet.unpack >>> ciphertext: %s', ciphertext)\n if not ciphertext:\n LOGGER.debug('Wallet.pack <!< No ciphertext to unpack')\n raise AbsentMessage('No ciphertext to unpack')\n try:\n unpacked = json.loads(await crypto.unpack_message(self.handle,\n ciphertext))\n except IndyError as x_indy:\n if x_indy.error_code == ErrorCode.WalletItemNotFound:\n LOGGER.debug(\n 'Wallet.unpack <!< Wallet %s has no local key to unpack ciphertext'\n , self.name)\n raise AbsentRecord(\n 'Wallet {} has no local key to unpack ciphertext'.format(\n self.name))\n LOGGER.debug(\n 'Wallet.unpack <!< Wallet %s unpack() raised indy error code {}',\n x_indy.error_code)\n raise\n rv = unpacked['message'], unpacked.get('sender_verkey', None\n ), unpacked.get('recipient_verkey', None)\n LOGGER.debug('Wallet.unpack <<< %s', rv)\n return rv\n",
"role": "user"
},
{
"content": "unpack",
"role": "assistant"
}
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.