source_code
stringlengths
45
7.24k
function_name
stringlengths
1
111
messages
list
async def reseed_init(self, next_seed: str=None) ->str: """ Begin reseed operation: generate new key. Raise WalletState if wallet is closed. :param next_seed: incoming replacement seed (default random) :return: new verification key """ LOGGER.debug('Wallet.reseed_init >>> next_seed: [SEED]') if not self.handle: LOGGER.debug('Wallet.reseed_init <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rv = await did.replace_keys_start(self.handle, self.did, json.dumps({ 'seed': next_seed} if next_seed else {})) LOGGER.debug('Wallet.reseed_init <<< %s', rv) return rv
reseed_init
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def reseed_init(self, next_seed: str=None) ->str:\n \"\"\"\n Begin reseed operation: generate new key. Raise WalletState if wallet is closed.\n\n :param next_seed: incoming replacement seed (default random)\n :return: new verification key\n \"\"\"\n LOGGER.debug('Wallet.reseed_init >>> next_seed: [SEED]')\n if not self.handle:\n LOGGER.debug('Wallet.reseed_init <!< Wallet %s is closed', self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n rv = await did.replace_keys_start(self.handle, self.did, json.dumps({\n 'seed': next_seed} if next_seed else {}))\n LOGGER.debug('Wallet.reseed_init <<< %s', rv)\n return rv\n", "role": "user" }, { "content": "reseed_init", "role": "assistant" } ]
async def reseed_apply(self) ->DIDInfo: """ Replace verification key with new verification key from reseed operation. Raise WalletState if wallet is closed. :return: DIDInfo with new verification key and metadata for DID """ LOGGER.debug('Wallet.reseed_apply >>>') if not self.handle: LOGGER.debug('Wallet.reseed_init <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) await did.replace_keys_apply(self.handle, self.did) self.verkey = await did.key_for_local_did(self.handle, self.did) now = int(time()) rv = DIDInfo(self.did, self.verkey, {'anchor': True, 'since': now, 'modified': now}) await did.set_did_metadata(self.handle, self.did, json.dumps(rv.metadata)) LOGGER.info('Wallet %s set seed hash metadata for DID %s', self.name, self.did) LOGGER.debug('Wallet.reseed_apply <<< %s', rv) return rv
reseed_apply
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def reseed_apply(self) ->DIDInfo:\n \"\"\"\n Replace verification key with new verification key from reseed operation.\n Raise WalletState if wallet is closed.\n\n :return: DIDInfo with new verification key and metadata for DID\n \"\"\"\n LOGGER.debug('Wallet.reseed_apply >>>')\n if not self.handle:\n LOGGER.debug('Wallet.reseed_init <!< Wallet %s is closed', self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n await did.replace_keys_apply(self.handle, self.did)\n self.verkey = await did.key_for_local_did(self.handle, self.did)\n now = int(time())\n rv = DIDInfo(self.did, self.verkey, {'anchor': True, 'since': now,\n 'modified': now})\n await did.set_did_metadata(self.handle, self.did, json.dumps(rv.metadata))\n LOGGER.info('Wallet %s set seed hash metadata for DID %s', self.name,\n self.did)\n LOGGER.debug('Wallet.reseed_apply <<< %s', rv)\n return rv\n", "role": "user" }, { "content": "reseed_apply", "role": "assistant" } ]
async def send_schema(self, schema_data_json: str) ->str: """ Send schema to ledger, then retrieve it as written to the ledger and return it. Raise BadLedgerTxn on failure. Raise BadAttribute for attribute name with spaces or reserved for indy-sdk. If schema already exists on ledger, log error and return schema. :param schema_data_json: schema data json with name, version, attribute names; e.g., :: { 'name': 'my-schema', 'version': '1.234', 'attr_names': ['favourite_drink', 'height', 'last_visit_date'] } :return: schema json as written to ledger (or existed a priori) """ LOGGER.debug('Origin.send_schema >>> schema_data_json: %s', schema_data_json) schema_data = json.loads(schema_data_json) for attr in schema_data['attr_names']: if not re.match('(?=[^- ])[-_a-zA-Z0-9 ]+(?<=[^- ])$', attr ) or attr.strip().lower() == 'hash': LOGGER.debug('Origin.send_schema <!< Bad attribute name [%s]', attr ) raise BadAttribute('Bad attribute name [{}]'.format(attr)) s_id = schema_id(self.did, schema_data['name'], schema_data['version']) s_key = schema_key(s_id) rv_json = None with SCHEMA_CACHE.lock: try: rv_json = await self.get_schema(s_key) LOGGER.error( 'Schema %s version %s already exists on ledger for origin-did %s: not sending' , schema_data['name'], schema_data['version'], self.did) except AbsentSchema: _, schema_json = await anoncreds.issuer_create_schema(self.did, schema_data['name'], schema_data['version'], json.dumps( schema_data['attr_names'])) req_json = await ledger.build_schema_request(self.did, schema_json) await self._sign_submit(req_json) for _ in range(16): try: rv_json = await self.get_schema(s_key) break except AbsentSchema: await sleep(1) LOGGER.info( 'Sent schema %s to ledger, waiting 1s for its appearance' , s_id) if not rv_json: LOGGER.debug( 'Origin.send_schema <!< timed out waiting on sent schema %s' , s_id) raise BadLedgerTxn('Timed out waiting on sent schema {}'. format(s_id)) LOGGER.debug('Origin.send_schema <<< %s', rv_json) return rv_json
send_schema
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def send_schema(self, schema_data_json: str) ->str:\n \"\"\"\n Send schema to ledger, then retrieve it as written to the ledger and return it.\n Raise BadLedgerTxn on failure. Raise BadAttribute for attribute name with spaces or\n reserved for indy-sdk.\n\n If schema already exists on ledger, log error and return schema.\n\n :param schema_data_json: schema data json with name, version, attribute names; e.g.,\n\n ::\n\n {\n 'name': 'my-schema',\n 'version': '1.234',\n 'attr_names': ['favourite_drink', 'height', 'last_visit_date']\n }\n\n :return: schema json as written to ledger (or existed a priori)\n \"\"\"\n LOGGER.debug('Origin.send_schema >>> schema_data_json: %s',\n schema_data_json)\n schema_data = json.loads(schema_data_json)\n for attr in schema_data['attr_names']:\n if not re.match('(?=[^- ])[-_a-zA-Z0-9 ]+(?<=[^- ])$', attr\n ) or attr.strip().lower() == 'hash':\n LOGGER.debug('Origin.send_schema <!< Bad attribute name [%s]', attr\n )\n raise BadAttribute('Bad attribute name [{}]'.format(attr))\n s_id = schema_id(self.did, schema_data['name'], schema_data['version'])\n s_key = schema_key(s_id)\n rv_json = None\n with SCHEMA_CACHE.lock:\n try:\n rv_json = await self.get_schema(s_key)\n LOGGER.error(\n 'Schema %s version %s already exists on ledger for origin-did %s: not sending'\n , schema_data['name'], schema_data['version'], self.did)\n except AbsentSchema:\n _, schema_json = await anoncreds.issuer_create_schema(self.did,\n schema_data['name'], schema_data['version'], json.dumps(\n schema_data['attr_names']))\n req_json = await ledger.build_schema_request(self.did, schema_json)\n await self._sign_submit(req_json)\n for _ in range(16):\n try:\n rv_json = await self.get_schema(s_key)\n break\n except AbsentSchema:\n await sleep(1)\n LOGGER.info(\n 'Sent schema %s to ledger, waiting 1s for its appearance'\n , s_id)\n if not rv_json:\n LOGGER.debug(\n 'Origin.send_schema <!< timed out waiting on sent schema %s'\n , s_id)\n raise BadLedgerTxn('Timed out waiting on sent schema {}'.\n format(s_id))\n LOGGER.debug('Origin.send_schema <<< %s', rv_json)\n return rv_json\n", "role": "user" }, { "content": "send_schema", "role": "assistant" } ]
def x() ->Role: LOGGER.debug('NominalAnchor.least_role >>>') rv = Role.USER LOGGER.debug('NominalAnchor.least_role <<< %s', rv) return rv
least_role
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x() ->Role:\n LOGGER.debug('NominalAnchor.least_role >>>')\n rv = Role.USER\n LOGGER.debug('NominalAnchor.least_role <<< %s', rv)\n return rv\n", "role": "user" }, { "content": "least_role", "role": "assistant" } ]
async def close(self) ->None: """ Explicit exit. If so configured, populate cache to prove for any creds on schemata, cred defs, and rev regs marked of interest in configuration at initialization, archive cache, and purge prior cache archives. :return: current object """ LOGGER.debug('OrgHubAnchor.close >>>') archive_caches = False if self.config.get('archive-holder-prover-caches-on-close', False): archive_caches = True await self.load_cache_for_proof(False) if self.config.get('archive-verifier-caches-on-close', {}): archive_caches = True await self.load_cache_for_verification(False) if archive_caches: ArchivableCaches.archive(self.dir_cache) ArchivableCaches.purge_archives(self.dir_cache, True) for path_rr_id in Tails.links(self._dir_tails): rr_id = basename(path_rr_id) try: await HolderProver._sync_revoc_for_proof(self, rr_id) except ClosedPool: LOGGER.warning( 'OrgHubAnchor sync-revoc on close required ledger for %s but pool was closed' , rr_id) LOGGER.debug('OrgHubAnchor.close <<<')
close
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def close(self) ->None:\n \"\"\"\n Explicit exit. If so configured, populate cache to prove for any creds on schemata,\n cred defs, and rev regs marked of interest in configuration at initialization,\n archive cache, and purge prior cache archives.\n\n :return: current object\n \"\"\"\n LOGGER.debug('OrgHubAnchor.close >>>')\n archive_caches = False\n if self.config.get('archive-holder-prover-caches-on-close', False):\n archive_caches = True\n await self.load_cache_for_proof(False)\n if self.config.get('archive-verifier-caches-on-close', {}):\n archive_caches = True\n await self.load_cache_for_verification(False)\n if archive_caches:\n ArchivableCaches.archive(self.dir_cache)\n ArchivableCaches.purge_archives(self.dir_cache, True)\n for path_rr_id in Tails.links(self._dir_tails):\n rr_id = basename(path_rr_id)\n try:\n await HolderProver._sync_revoc_for_proof(self, rr_id)\n except ClosedPool:\n LOGGER.warning(\n 'OrgHubAnchor sync-revoc on close required ledger for %s but pool was closed'\n , rr_id)\n LOGGER.debug('OrgHubAnchor.close <<<')\n", "role": "user" }, { "content": "close", "role": "assistant" } ]
def x(query: dict) ->dict: for k in [qk for qk in query]: attr_match = re.match('attr::([^:]+)::(marker|value)$', k) if isinstance(query[k], dict): query[k] = canon_cred_wql(query[k]) if k == '$or': if not isinstance(query[k], list): raise BadWalletQuery('Bad WQL; $or value must be a list in {}' .format(json.dumps(query))) query[k] = [canon_cred_wql(subq) for subq in query[k]] if attr_match: qkey = 'attr::{}::{}'.format(canon(attr_match.group(1)), canon( attr_match.group(2))) query[qkey] = query.pop(k) tag_value = query[qkey] if isinstance(tag_value, dict) and len(tag_value) == 1: if '$in' in tag_value: tag_value['$in'] = [raw(val) for val in tag_value.pop( '$in')] else: wql_op = set(tag_value.keys()).pop() tag_value[wql_op] = raw(tag_value[wql_op]) else: query[qkey] = raw(query[qkey]) return query
canon_cred_wql
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(query: dict) ->dict:\n for k in [qk for qk in query]:\n attr_match = re.match('attr::([^:]+)::(marker|value)$', k)\n if isinstance(query[k], dict):\n query[k] = canon_cred_wql(query[k])\n if k == '$or':\n if not isinstance(query[k], list):\n raise BadWalletQuery('Bad WQL; $or value must be a list in {}'\n .format(json.dumps(query)))\n query[k] = [canon_cred_wql(subq) for subq in query[k]]\n if attr_match:\n qkey = 'attr::{}::{}'.format(canon(attr_match.group(1)), canon(\n attr_match.group(2)))\n query[qkey] = query.pop(k)\n tag_value = query[qkey]\n if isinstance(tag_value, dict) and len(tag_value) == 1:\n if '$in' in tag_value:\n tag_value['$in'] = [raw(val) for val in tag_value.pop(\n '$in')]\n else:\n wql_op = set(tag_value.keys()).pop()\n tag_value[wql_op] = raw(tag_value[wql_op])\n else:\n query[qkey] = raw(query[qkey])\n return query\n", "role": "user" }, { "content": "canon_cred_wql", "role": "assistant" } ]
def x(query: dict=None) ->dict: if not query: return {'~their_did': {'$neq': ''}} for k in [qk for qk in query]: if isinstance(query[k], dict): query[k] = canon_pairwise_wql(query[k]) if k == '$or': if not isinstance(query[k], list): raise BadWalletQuery('Bad WQL; $or value must be a list in {}' .format(json.dumps(query))) query[k] = [canon_pairwise_wql(subq) for subq in query[k]] elif k == '$not': query[k] = canon_pairwise_wql(query.pop(k)) elif k not in WQL_1_OPS: qkey = canon_pairwise_tag(k) query[qkey] = query.pop(k) tag_value = query[qkey] if isinstance(tag_value, dict) and len(tag_value) == 1: if '$in' in tag_value: tag_value['$in'] = [raw(val) for val in tag_value['$in']] else: wql_op = set(tag_value.keys()).pop() tag_value[wql_op] = raw(tag_value[wql_op]) else: query[qkey] = raw(query.pop(qkey)) return query
canon_pairwise_wql
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(query: dict=None) ->dict:\n if not query:\n return {'~their_did': {'$neq': ''}}\n for k in [qk for qk in query]:\n if isinstance(query[k], dict):\n query[k] = canon_pairwise_wql(query[k])\n if k == '$or':\n if not isinstance(query[k], list):\n raise BadWalletQuery('Bad WQL; $or value must be a list in {}'\n .format(json.dumps(query)))\n query[k] = [canon_pairwise_wql(subq) for subq in query[k]]\n elif k == '$not':\n query[k] = canon_pairwise_wql(query.pop(k))\n elif k not in WQL_1_OPS:\n qkey = canon_pairwise_tag(k)\n query[qkey] = query.pop(k)\n tag_value = query[qkey]\n if isinstance(tag_value, dict) and len(tag_value) == 1:\n if '$in' in tag_value:\n tag_value['$in'] = [raw(val) for val in tag_value['$in']]\n else:\n wql_op = set(tag_value.keys()).pop()\n tag_value[wql_op] = raw(tag_value[wql_op])\n else:\n query[qkey] = raw(query.pop(qkey))\n return query\n", "role": "user" }, { "content": "canon_pairwise_wql", "role": "assistant" } ]
def x(filename): try: with open(filename) as fh_req: return [line.strip() for line in fh_req if line.strip() and not line.startswith('#')] except FileNotFoundError: print('File not found: {}'.format(realpath(filename)), file=stderr) raise
parse_requirements
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(filename):\n try:\n with open(filename) as fh_req:\n return [line.strip() for line in fh_req if line.strip() and not\n line.startswith('#')]\n except FileNotFoundError:\n print('File not found: {}'.format(realpath(filename)), file=stderr)\n raise\n", "role": "user" }, { "content": "parse_requirements", "role": "assistant" } ]
async def add_config(self, name: str, genesis: str=None) ->None: """ Given pool name and genesis transaction path or data, add node pool configuration to indy home directory. Raise ExtantPool if node pool configuration on input name already exists. :param name: pool name :param genesis: genesis transaction path or raw data """ LOGGER.debug('NodePoolManager.__init__ >>> name: %s, genesis: %s', name, genesis) if name in await self.list(): LOGGER.debug( 'NodePoolManager.add_config: <!< Node pool %s configuration already present' , name) raise ExtantPool('Node pool {} configuration already present'. format(name)) genesis_tmp = None path_gen = realpath(expanduser(expandvars(genesis))) try: if not isfile(path_gen): genesis_tmp = NamedTemporaryFile(mode='w+b', buffering=0, delete=False) with genesis_tmp: genesis_tmp.write(genesis.encode()) await pool.create_pool_ledger_config(name, json.dumps({ 'genesis_txn': path_gen if isfile(path_gen) else genesis_tmp.name}) ) finally: if genesis_tmp: remove(genesis_tmp.name) LOGGER.debug('NodePoolManager.__init__ <<<')
add_config
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def add_config(self, name: str, genesis: str=None) ->None:\n \"\"\"\n Given pool name and genesis transaction path or data, add node pool configuration to indy home directory.\n Raise ExtantPool if node pool configuration on input name already exists.\n\n :param name: pool name\n :param genesis: genesis transaction path or raw data\n \"\"\"\n LOGGER.debug('NodePoolManager.__init__ >>> name: %s, genesis: %s', name,\n genesis)\n if name in await self.list():\n LOGGER.debug(\n 'NodePoolManager.add_config: <!< Node pool %s configuration already present'\n , name)\n raise ExtantPool('Node pool {} configuration already present'.\n format(name))\n genesis_tmp = None\n path_gen = realpath(expanduser(expandvars(genesis)))\n try:\n if not isfile(path_gen):\n genesis_tmp = NamedTemporaryFile(mode='w+b', buffering=0,\n delete=False)\n with genesis_tmp:\n genesis_tmp.write(genesis.encode())\n await pool.create_pool_ledger_config(name, json.dumps({\n 'genesis_txn': path_gen if isfile(path_gen) else genesis_tmp.name})\n )\n finally:\n if genesis_tmp:\n remove(genesis_tmp.name)\n LOGGER.debug('NodePoolManager.__init__ <<<')\n", "role": "user" }, { "content": "add_config", "role": "assistant" } ]
async def list(self) ->List[str]: """ Return list of pool names configured, empty list for none. :return: list of pool names. """ LOGGER.debug('NodePoolManager.list >>>') rv = [p['pool'] for p in await pool.list_pools()] LOGGER.debug('NodePoolManager.list <<< %s', rv) return rv
list
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def list(self) ->List[str]:\n \"\"\"\n Return list of pool names configured, empty list for none.\n\n :return: list of pool names.\n \"\"\"\n LOGGER.debug('NodePoolManager.list >>>')\n rv = [p['pool'] for p in await pool.list_pools()]\n LOGGER.debug('NodePoolManager.list <<< %s', rv)\n return rv\n", "role": "user" }, { "content": "list", "role": "assistant" } ]
def x(self, name: str, config: dict=None) ->NodePool: LOGGER.debug('NodePoolManager.node_pool >>>') rv = NodePool(name, self.protocol, config) LOGGER.debug('NodePoolManager.node_pool <<< %s', rv) return rv
get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, name: str, config: dict=None) ->NodePool:\n LOGGER.debug('NodePoolManager.node_pool >>>')\n rv = NodePool(name, self.protocol, config)\n LOGGER.debug('NodePoolManager.node_pool <<< %s', rv)\n return rv\n", "role": "user" }, { "content": "get", "role": "assistant" } ]
async def remove(self, name: str) ->None: """ Remove serialized pool info if it exists. Abstain from removing open node pool. """ LOGGER.debug('NodePoolManager.remove >>> name: %s', name) try: await pool.delete_pool_ledger_config(name) except IndyError as x_indy: LOGGER.info('Abstaining from node pool removal; indy-sdk error code %s' , x_indy.error_code) LOGGER.debug('NodePool.remove <<<')
remove
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def remove(self, name: str) ->None:\n \"\"\"\n Remove serialized pool info if it exists. Abstain from removing open node pool.\n \"\"\"\n LOGGER.debug('NodePoolManager.remove >>> name: %s', name)\n try:\n await pool.delete_pool_ledger_config(name)\n except IndyError as x_indy:\n LOGGER.info('Abstaining from node pool removal; indy-sdk error code %s'\n , x_indy.error_code)\n LOGGER.debug('NodePool.remove <<<')\n", "role": "user" }, { "content": "remove", "role": "assistant" } ]
def x(self) ->dict: return {k: self._pubkey[k] for k in self._pubkey if self._pubkey[k].authn}
authnkey
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self) ->dict:\n return {k: self._pubkey[k] for k in self._pubkey if self._pubkey[k].authn}\n", "role": "user" }, { "content": "authnkey", "role": "assistant" } ]
def x(self, item: Union[Service, PublicKey]) ->'DIDDoc': if isinstance(item, Service): self.service[item.id] = item elif isinstance(item, PublicKey): self.pubkey[item.id] = item else: raise BadDIDDocItem('Cannot add item {} to DIDDoc on DID {}'.format (item, self.did))
set
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, item: Union[Service, PublicKey]) ->'DIDDoc':\n if isinstance(item, Service):\n self.service[item.id] = item\n elif isinstance(item, PublicKey):\n self.pubkey[item.id] = item\n else:\n raise BadDIDDocItem('Cannot add item {} to DIDDoc on DID {}'.format\n (item, self.did))\n", "role": "user" }, { "content": "set", "role": "assistant" } ]
def x(self) ->str: return {'@context': DIDDoc.CONTEXT, 'id': canon_ref(self.did, self.did), 'publicKey': [pubkey.to_dict() for pubkey in self.pubkey.values()], 'authentication': [{'type': pubkey.type.authn_type, 'publicKey': canon_ref(self.did, pubkey.id)} for pubkey in self.pubkey.values() if pubkey.authn], 'service': [service.to_dict() for service in self. service.values()]}
serialize
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self) ->str:\n return {'@context': DIDDoc.CONTEXT, 'id': canon_ref(self.did, self.did),\n 'publicKey': [pubkey.to_dict() for pubkey in self.pubkey.values()],\n 'authentication': [{'type': pubkey.type.authn_type, 'publicKey':\n canon_ref(self.did, pubkey.id)} for pubkey in self.pubkey.values() if\n pubkey.authn], 'service': [service.to_dict() for service in self.\n service.values()]}\n", "role": "user" }, { "content": "serialize", "role": "assistant" } ]
def x(self, service: dict, tags: Union[Sequence[str], str]) ->List[PublicKey]: rv = [] for tag in ([tags] if isinstance(tags, str) else list(tags)): for svc_key in service.get(tag, {}): canon_key = canon_ref(self.did, svc_key) pubkey = None if '#' in svc_key: if canon_key in self.pubkey: pubkey = self.pubkey[canon_key] else: LOGGER.debug( 'DIDDoc.add_service_pubkeys <!< DID document %s has no public key %s' , self.did, svc_key) raise AbsentDIDDocItem( 'DID document {} has no public key {}'.format(self. did, svc_key)) else: for existing_pubkey in self.pubkey.values(): if existing_pubkey.value == svc_key: pubkey = existing_pubkey break else: pubkey = PublicKey(self.did, ident=svc_key[-9:-1], value=svc_key) self._pubkey[pubkey.id] = pubkey if pubkey and pubkey not in rv: rv.append(pubkey) return rv
add_service_pubkeys
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, service: dict, tags: Union[Sequence[str], str]) ->List[PublicKey]:\n rv = []\n for tag in ([tags] if isinstance(tags, str) else list(tags)):\n for svc_key in service.get(tag, {}):\n canon_key = canon_ref(self.did, svc_key)\n pubkey = None\n if '#' in svc_key:\n if canon_key in self.pubkey:\n pubkey = self.pubkey[canon_key]\n else:\n LOGGER.debug(\n 'DIDDoc.add_service_pubkeys <!< DID document %s has no public key %s'\n , self.did, svc_key)\n raise AbsentDIDDocItem(\n 'DID document {} has no public key {}'.format(self.\n did, svc_key))\n else:\n for existing_pubkey in self.pubkey.values():\n if existing_pubkey.value == svc_key:\n pubkey = existing_pubkey\n break\n else:\n pubkey = PublicKey(self.did, ident=svc_key[-9:-1],\n value=svc_key)\n self._pubkey[pubkey.id] = pubkey\n if pubkey and pubkey not in rv:\n rv.append(pubkey)\n return rv\n", "role": "user" }, { "content": "add_service_pubkeys", "role": "assistant" } ]
def x(cls, did_doc: dict) ->'DIDDoc': rv = None if 'id' in did_doc: rv = DIDDoc(did_doc['id']) else: if 'publicKey' not in did_doc: LOGGER.debug('DIDDoc.deserialize <!< no identifier in DID document' ) raise AbsentDIDDocItem('No identifier in DID document') for pubkey in did_doc['publicKey']: pubkey_did = canon_did(resource(pubkey['id'])) if ok_did(pubkey_did): rv = DIDDoc(pubkey_did) break else: LOGGER.debug('DIDDoc.deserialize <!< no identifier in DID document' ) raise AbsentDIDDocItem('No identifier in DID document') for pubkey in did_doc['publicKey']: pubkey_type = PublicKeyType.get(pubkey['type']) authn = any(canon_ref(rv.did, ak.get('publicKey', '')) == canon_ref (rv.did, pubkey['id']) for ak in did_doc.get('authentication', {}) if isinstance(ak.get('publicKey', None), str)) key = PublicKey(rv.did, pubkey['id'], pubkey[pubkey_type.specifier], pubkey_type, canon_did(pubkey['controller']), authn) rv.pubkey[key.id] = key for akey in did_doc.get('authentication', {}): pk_ref = akey.get('publicKey', None) if pk_ref: pass else: pubkey_type = PublicKeyType.get(akey['type']) key = PublicKey(rv.did, akey['id'], akey[pubkey_type.specifier], pubkey_type, canon_did(akey['controller']), True) rv.pubkey[key.id] = key for service in did_doc.get('service', {}): endpoint = service['serviceEndpoint'] svc = Service(rv.did, service.get('id', canon_ref(rv.did, 'assigned-service-{}'.format(len(rv.service)), ';')), service[ 'type'], rv.add_service_pubkeys(service, 'recipientKeys'), rv. add_service_pubkeys(service, ['mediatorKeys', 'routingKeys']), canon_ref(rv.did, endpoint, ';') if ';' in endpoint else endpoint, service.get('priority', None)) rv.service[svc.id] = svc return rv
deserialize
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(cls, did_doc: dict) ->'DIDDoc':\n rv = None\n if 'id' in did_doc:\n rv = DIDDoc(did_doc['id'])\n else:\n if 'publicKey' not in did_doc:\n LOGGER.debug('DIDDoc.deserialize <!< no identifier in DID document'\n )\n raise AbsentDIDDocItem('No identifier in DID document')\n for pubkey in did_doc['publicKey']:\n pubkey_did = canon_did(resource(pubkey['id']))\n if ok_did(pubkey_did):\n rv = DIDDoc(pubkey_did)\n break\n else:\n LOGGER.debug('DIDDoc.deserialize <!< no identifier in DID document'\n )\n raise AbsentDIDDocItem('No identifier in DID document')\n for pubkey in did_doc['publicKey']:\n pubkey_type = PublicKeyType.get(pubkey['type'])\n authn = any(canon_ref(rv.did, ak.get('publicKey', '')) == canon_ref\n (rv.did, pubkey['id']) for ak in did_doc.get('authentication',\n {}) if isinstance(ak.get('publicKey', None), str))\n key = PublicKey(rv.did, pubkey['id'], pubkey[pubkey_type.specifier],\n pubkey_type, canon_did(pubkey['controller']), authn)\n rv.pubkey[key.id] = key\n for akey in did_doc.get('authentication', {}):\n pk_ref = akey.get('publicKey', None)\n if pk_ref:\n pass\n else:\n pubkey_type = PublicKeyType.get(akey['type'])\n key = PublicKey(rv.did, akey['id'], akey[pubkey_type.specifier],\n pubkey_type, canon_did(akey['controller']), True)\n rv.pubkey[key.id] = key\n for service in did_doc.get('service', {}):\n endpoint = service['serviceEndpoint']\n svc = Service(rv.did, service.get('id', canon_ref(rv.did,\n 'assigned-service-{}'.format(len(rv.service)), ';')), service[\n 'type'], rv.add_service_pubkeys(service, 'recipientKeys'), rv.\n add_service_pubkeys(service, ['mediatorKeys', 'routingKeys']), \n canon_ref(rv.did, endpoint, ';') if ';' in endpoint else\n endpoint, service.get('priority', None))\n rv.service[svc.id] = svc\n return rv\n", "role": "user" }, { "content": "deserialize", "role": "assistant" } ]
def x(version: str) ->'Protocol': return (Protocol.V_13 if version == Protocol.V_13.value.name else Protocol.DEFAULT)
get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(version: str) ->'Protocol':\n return (Protocol.V_13 if version == Protocol.V_13.value.name else\n Protocol.DEFAULT)\n", "role": "user" }, { "content": "get", "role": "assistant" } ]
def x(self, for_box_id: bool=False) ->str: if for_box_id: return '' if self == Protocol.V_13 else ':tag' return 'tag'
cd_id_tag
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, for_box_id: bool=False) ->str:\n if for_box_id:\n return '' if self == Protocol.V_13 else ':tag'\n return 'tag'\n", "role": "user" }, { "content": "cd_id_tag", "role": "assistant" } ]
def x(self, issuer_did: str, schema_seq_no: int) ->str: return '{}:3:CL:{}{}'.format(issuer_did, schema_seq_no, self.cd_id_tag( True))
cred_def_id
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, issuer_did: str, schema_seq_no: int) ->str:\n return '{}:3:CL:{}{}'.format(issuer_did, schema_seq_no, self.cd_id_tag(\n True))\n", "role": "user" }, { "content": "cred_def_id", "role": "assistant" } ]
def x(self, txn: dict) ->SchemaKey: rv = None if self == Protocol.V_13: rv = SchemaKey(txn['identifier'], txn['data']['name'], txn['data'][ 'version']) else: txn_txn = txn.get('txn', None) or txn rv = SchemaKey(txn_txn['metadata']['from'], txn_txn['data']['data'] ['name'], txn_txn['data']['data']['version']) return rv
txn_data2schema_key
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, txn: dict) ->SchemaKey:\n rv = None\n if self == Protocol.V_13:\n rv = SchemaKey(txn['identifier'], txn['data']['name'], txn['data'][\n 'version'])\n else:\n txn_txn = txn.get('txn', None) or txn\n rv = SchemaKey(txn_txn['metadata']['from'], txn_txn['data']['data']\n ['name'], txn_txn['data']['data']['version'])\n return rv\n", "role": "user" }, { "content": "txn_data2schema_key", "role": "assistant" } ]
def x(self, txn: dict) ->str: rv_json = json.dumps({}) if self == Protocol.V_13: rv_json = json.dumps(txn['result'].get('data', {})) else: rv_json = json.dumps((txn['result'].get('data', {}) or {}).get( 'txn', {})) return rv_json
txn2data
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, txn: dict) ->str:\n rv_json = json.dumps({})\n if self == Protocol.V_13:\n rv_json = json.dumps(txn['result'].get('data', {}))\n else:\n rv_json = json.dumps((txn['result'].get('data', {}) or {}).get(\n 'txn', {}))\n return rv_json\n", "role": "user" }, { "content": "txn2data", "role": "assistant" } ]
def x(self, txn: dict) ->int: rv = None if self == Protocol.V_13: rv = txn['result']['txnTime'] else: rv = txn['result']['txnMetadata']['txnTime'] return rv
txn2epoch
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, txn: dict) ->int:\n rv = None\n if self == Protocol.V_13:\n rv = txn['result']['txnTime']\n else:\n rv = txn['result']['txnMetadata']['txnTime']\n return rv\n", "role": "user" }, { "content": "txn2epoch", "role": "assistant" } ]
def x(self, genesis_txn: dict) ->tuple: txn_data = genesis_txn['data'] if self == Protocol.V_13 else genesis_txn[ 'txn']['data']['data'] return txn_data['node_ip'], txn_data['node_port']
genesis_host_port
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, genesis_txn: dict) ->tuple:\n txn_data = genesis_txn['data'] if self == Protocol.V_13 else genesis_txn[\n 'txn']['data']['data']\n return txn_data['node_ip'], txn_data['node_port']\n", "role": "user" }, { "content": "genesis_host_port", "role": "assistant" } ]
async def open(self) ->'Tails': """ Open reader handle and return current object. :return: current object """ LOGGER.debug('Tails.open >>>') self._reader_handle = await blob_storage.open_reader('default', self. _tails_config_json) LOGGER.debug('Tails.open <<<') return self
open
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def open(self) ->'Tails':\n \"\"\"\n Open reader handle and return current object.\n\n :return: current object\n \"\"\"\n LOGGER.debug('Tails.open >>>')\n self._reader_handle = await blob_storage.open_reader('default', self.\n _tails_config_json)\n LOGGER.debug('Tails.open <<<')\n return self\n", "role": "user" }, { "content": "open", "role": "assistant" } ]
def x(token: str) ->bool: LOGGER.debug('Tails.ok_hash >>> token: %s', token) rv = re.match('[{}]{{42,44}}$'.format(B58), token) is not None LOGGER.debug('Tails.ok_hash <<< %s', rv) return rv
ok_hash
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(token: str) ->bool:\n LOGGER.debug('Tails.ok_hash >>> token: %s', token)\n rv = re.match('[{}]{{42,44}}$'.format(B58), token) is not None\n LOGGER.debug('Tails.ok_hash <<< %s', rv)\n return rv\n", "role": "user" }, { "content": "ok_hash", "role": "assistant" } ]
def x(base_dir: str, rr_id: str, tails_hash: str) ->None: LOGGER.debug('Tails.associate >>> base_dir: %s, rr_id: %s, tails_hash: %s', base_dir, rr_id, tails_hash) if not ok_rev_reg_id(rr_id): LOGGER.debug('Tails.associate <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) if not Tails.ok_hash(tails_hash): LOGGER.debug('Tails.associate <!< Bad tails hash %s', tails_hash) raise BadIdentifier('Bad tails hash {}'.format(tails_hash)) cd_id = rev_reg_id2cred_def_id(rr_id) directory = join(base_dir, cd_id) cwd = getcwd() makedirs(directory, exist_ok=True) chdir(directory) symlink(tails_hash, rr_id) chdir(cwd) LOGGER.debug('Tails.associate <<<')
associate
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(base_dir: str, rr_id: str, tails_hash: str) ->None:\n LOGGER.debug('Tails.associate >>> base_dir: %s, rr_id: %s, tails_hash: %s',\n base_dir, rr_id, tails_hash)\n if not ok_rev_reg_id(rr_id):\n LOGGER.debug('Tails.associate <!< Bad rev reg id %s', rr_id)\n raise BadIdentifier('Bad rev reg id {}'.format(rr_id))\n if not Tails.ok_hash(tails_hash):\n LOGGER.debug('Tails.associate <!< Bad tails hash %s', tails_hash)\n raise BadIdentifier('Bad tails hash {}'.format(tails_hash))\n cd_id = rev_reg_id2cred_def_id(rr_id)\n directory = join(base_dir, cd_id)\n cwd = getcwd()\n makedirs(directory, exist_ok=True)\n chdir(directory)\n symlink(tails_hash, rr_id)\n chdir(cwd)\n LOGGER.debug('Tails.associate <<<')\n", "role": "user" }, { "content": "associate", "role": "assistant" } ]
def x(base_dir: str, rr_id: str) ->str: LOGGER.debug('Tails.dir >>> base_dir: %s, rr_id: %s', base_dir, rr_id) if not ok_rev_reg_id(rr_id): LOGGER.debug('Tails.dir <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) rv = join(base_dir, rev_reg_id2cred_def_id(rr_id)) LOGGER.debug('Tails.dir <<< %s', rv) return rv
dir
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(base_dir: str, rr_id: str) ->str:\n LOGGER.debug('Tails.dir >>> base_dir: %s, rr_id: %s', base_dir, rr_id)\n if not ok_rev_reg_id(rr_id):\n LOGGER.debug('Tails.dir <!< Bad rev reg id %s', rr_id)\n raise BadIdentifier('Bad rev reg id {}'.format(rr_id))\n rv = join(base_dir, rev_reg_id2cred_def_id(rr_id))\n LOGGER.debug('Tails.dir <<< %s', rv)\n return rv\n", "role": "user" }, { "content": "dir", "role": "assistant" } ]
def x(base_dir: str, rr_id: str) ->str: LOGGER.debug('Tails.linked >>> base_dir: %s, rr_id: %s', base_dir, rr_id) if not ok_rev_reg_id(rr_id): LOGGER.debug('Tails.linked <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) cd_id = rev_reg_id2cred_def_id(rr_id) link = join(base_dir, cd_id, rr_id) rv = join(base_dir, cd_id, readlink(link)) if islink(link) else None LOGGER.debug('Tails.linked <<< %s', rv) return rv
linked
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(base_dir: str, rr_id: str) ->str:\n LOGGER.debug('Tails.linked >>> base_dir: %s, rr_id: %s', base_dir, rr_id)\n if not ok_rev_reg_id(rr_id):\n LOGGER.debug('Tails.linked <!< Bad rev reg id %s', rr_id)\n raise BadIdentifier('Bad rev reg id {}'.format(rr_id))\n cd_id = rev_reg_id2cred_def_id(rr_id)\n link = join(base_dir, cd_id, rr_id)\n rv = join(base_dir, cd_id, readlink(link)) if islink(link) else None\n LOGGER.debug('Tails.linked <<< %s', rv)\n return rv\n", "role": "user" }, { "content": "linked", "role": "assistant" } ]
def x(base_dir: str, issuer_did: str=None) ->set: LOGGER.debug('Tails.links >>> base_dir: %s, issuer_did: %s', base_dir, issuer_did) if issuer_did and not ok_did(issuer_did): LOGGER.debug('Tails.links <!< Bad DID %s', issuer_did) raise BadIdentifier('Bad DID {}'.format(issuer_did)) rv = set() for dir_path, dir_names, file_names in walk(base_dir, topdown=True): dir_names[:] = [d for d in dir_names if not d.startswith('.')] for file_name in file_names: if islink(join(dir_path, file_name)) and (not issuer_did or ok_rev_reg_id(file_name, issuer_did)): rv.add(join(dir_path, file_name)) LOGGER.debug('Tails.links <<< %s', rv) return rv
links
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(base_dir: str, issuer_did: str=None) ->set:\n LOGGER.debug('Tails.links >>> base_dir: %s, issuer_did: %s', base_dir,\n issuer_did)\n if issuer_did and not ok_did(issuer_did):\n LOGGER.debug('Tails.links <!< Bad DID %s', issuer_did)\n raise BadIdentifier('Bad DID {}'.format(issuer_did))\n rv = set()\n for dir_path, dir_names, file_names in walk(base_dir, topdown=True):\n dir_names[:] = [d for d in dir_names if not d.startswith('.')]\n for file_name in file_names:\n if islink(join(dir_path, file_name)) and (not issuer_did or\n ok_rev_reg_id(file_name, issuer_did)):\n rv.add(join(dir_path, file_name))\n LOGGER.debug('Tails.links <<< %s', rv)\n return rv\n", "role": "user" }, { "content": "links", "role": "assistant" } ]
def x(base_dir: str) ->set: LOGGER.debug('Tails.unlinked >>> base_dir: %s', base_dir) rv = set() for dir_path, dir_names, file_names in walk(base_dir, topdown=True): dir_names[:] = [d for d in dir_names if not d.startswith('.')] for file_name in file_names: if isfile(join(dir_path, file_name)) and Tails.ok_hash(file_name): rv.add(join(dir_path, file_name)) rv -= {join(dirname(path_link), readlink(path_link)) for path_link in Tails.links(base_dir)} LOGGER.debug('Tails.unlinked <<< %s', rv) return rv
unlinked
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(base_dir: str) ->set:\n LOGGER.debug('Tails.unlinked >>> base_dir: %s', base_dir)\n rv = set()\n for dir_path, dir_names, file_names in walk(base_dir, topdown=True):\n dir_names[:] = [d for d in dir_names if not d.startswith('.')]\n for file_name in file_names:\n if isfile(join(dir_path, file_name)) and Tails.ok_hash(file_name):\n rv.add(join(dir_path, file_name))\n rv -= {join(dirname(path_link), readlink(path_link)) for path_link in\n Tails.links(base_dir)}\n LOGGER.debug('Tails.unlinked <<< %s', rv)\n return rv\n", "role": "user" }, { "content": "unlinked", "role": "assistant" } ]
def x(base_dir: str, cd_id: str) ->(str, int): LOGGER.debug('Tails.next_tag >>> base_dir: %s, cd_id: %s', base_dir, cd_id) if not ok_cred_def_id(cd_id): LOGGER.debug('Tails.next_tag <!< Bad cred def id %s', cd_id) raise BadIdentifier('Bad cred def id {}'.format(cd_id)) tag = 1 + max([int(rev_reg_id2tag(basename(f))) for f in Tails.links( base_dir) if cd_id in basename(f)] + [-1]) size = min(2 ** (tag + 6), Tails.MAX_SIZE) rv = tag, size LOGGER.debug('Tails.next_tag <<< %s', rv) return rv
next_tag
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(base_dir: str, cd_id: str) ->(str, int):\n LOGGER.debug('Tails.next_tag >>> base_dir: %s, cd_id: %s', base_dir, cd_id)\n if not ok_cred_def_id(cd_id):\n LOGGER.debug('Tails.next_tag <!< Bad cred def id %s', cd_id)\n raise BadIdentifier('Bad cred def id {}'.format(cd_id))\n tag = 1 + max([int(rev_reg_id2tag(basename(f))) for f in Tails.links(\n base_dir) if cd_id in basename(f)] + [-1])\n size = min(2 ** (tag + 6), Tails.MAX_SIZE)\n rv = tag, size\n LOGGER.debug('Tails.next_tag <<< %s', rv)\n return rv\n", "role": "user" }, { "content": "next_tag", "role": "assistant" } ]
def x(base_dir: str, cd_id: str) ->str: LOGGER.debug('Tails.current_rev_reg_id >>> base_dir: %s, cd_id: %s', base_dir, cd_id) if not ok_cred_def_id(cd_id): LOGGER.debug('Tails.current_rev_reg_id <!< Bad cred def id %s', cd_id) raise BadIdentifier('Bad cred def id {}'.format(cd_id)) tags = [int(rev_reg_id2tag(basename(f))) for f in Tails.links(base_dir) if cd_id in basename(f)] if not tags: raise AbsentTails('No tails files present for cred def id {}'. format(cd_id)) rv = rev_reg_id(cd_id, str(max(tags))) LOGGER.debug('Tails.current_rev_reg_id <<< %s', rv) return rv
current_rev_reg_id
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(base_dir: str, cd_id: str) ->str:\n LOGGER.debug('Tails.current_rev_reg_id >>> base_dir: %s, cd_id: %s',\n base_dir, cd_id)\n if not ok_cred_def_id(cd_id):\n LOGGER.debug('Tails.current_rev_reg_id <!< Bad cred def id %s', cd_id)\n raise BadIdentifier('Bad cred def id {}'.format(cd_id))\n tags = [int(rev_reg_id2tag(basename(f))) for f in Tails.links(base_dir) if\n cd_id in basename(f)]\n if not tags:\n raise AbsentTails('No tails files present for cred def id {}'.\n format(cd_id))\n rv = rev_reg_id(cd_id, str(max(tags)))\n LOGGER.debug('Tails.current_rev_reg_id <<< %s', rv)\n return rv\n", "role": "user" }, { "content": "current_rev_reg_id", "role": "assistant" } ]
def x(self) ->str: config = json.loads(self._tails_config_json) return join(config['base_dir'], config['file'])
path
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self) ->str:\n config = json.loads(self._tails_config_json)\n return join(config['base_dir'], config['file'])\n", "role": "user" }, { "content": "path", "role": "assistant" } ]
def x(orig: Any) ->str: if isinstance(orig, int) and -I32_BOUND <= orig < I32_BOUND: return str(int(orig)) try: i32orig = int(str(orig)) if -I32_BOUND <= i32orig < I32_BOUND: return str(i32orig) except (ValueError, TypeError): pass rv = int.from_bytes(sha256(raw(orig).encode()).digest(), 'big') while -I32_BOUND <= rv < I32_BOUND: rv = int.from_bytes(sha256(rv.encode()).digest(), 'big') return str(rv)
encode
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(orig: Any) ->str:\n if isinstance(orig, int) and -I32_BOUND <= orig < I32_BOUND:\n return str(int(orig))\n try:\n i32orig = int(str(orig))\n if -I32_BOUND <= i32orig < I32_BOUND:\n return str(i32orig)\n except (ValueError, TypeError):\n pass\n rv = int.from_bytes(sha256(raw(orig).encode()).digest(), 'big')\n while -I32_BOUND <= rv < I32_BOUND:\n rv = int.from_bytes(sha256(rv.encode()).digest(), 'big')\n return str(rv)\n", "role": "user" }, { "content": "encode", "role": "assistant" } ]
def x(relation: str) ->'Predicate': for pred in Predicate: if relation.upper() in (pred.value.fortran, pred.value.wql.upper(), pred.value.math): return pred return None
get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(relation: str) ->'Predicate':\n for pred in Predicate:\n if relation.upper() in (pred.value.fortran, pred.value.wql.upper(),\n pred.value.math):\n return pred\n return None\n", "role": "user" }, { "content": "get", "role": "assistant" } ]
def x(value: Any) ->int: if isinstance(value, (bool, int)): return int(value) return int(str(value))
to_int
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(value: Any) ->int:\n if isinstance(value, (bool, int)):\n return int(value)\n return int(str(value))\n", "role": "user" }, { "content": "to_int", "role": "assistant" } ]
def x(token: Union[str, int]=None) ->'Role': if token is None: return Role.USER for role in Role: if role == Role.ROLE_REMOVE: continue if isinstance(token, int) and token in role.value: return role if str(token).upper() == role.name or token in (str(v) for v in role.value): return role return None
get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(token: Union[str, int]=None) ->'Role':\n if token is None:\n return Role.USER\n for role in Role:\n if role == Role.ROLE_REMOVE:\n continue\n if isinstance(token, int) and token in role.value:\n return role\n if str(token).upper() == role.name or token in (str(v) for v in\n role.value):\n return role\n return None\n", "role": "user" }, { "content": "get", "role": "assistant" } ]
def x(self) ->str: return self.value[0] if self in (Role.USER, Role.ROLE_REMOVE ) else self.name
token
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self) ->str:\n return self.value[0] if self in (Role.USER, Role.ROLE_REMOVE\n ) else self.name\n", "role": "user" }, { "content": "token", "role": "assistant" } ]
def x(timeout, *params): def x(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def x(view_or_request, *args, **kwargs): request = getattr(view_or_request, 'request', view_or_request) if not hasattr(_thread_locals, 'ultracache_request'): setattr(_thread_locals, 'ultracache_request', request) if request.method.lower() not in ('get', 'head'): return view_func(view_or_request, *args, **kwargs) l = 0 try: l = len(request._messages) except (AttributeError, TypeError): pass if l: return view_func(view_or_request, *args, **kwargs) li = [str(view_or_request.__class__), view_func.__name__] if not set(params).intersection(set(('request.get_full_path()', 'request.path', 'request.path_info'))): li.append(request.get_full_path()) if 'django.contrib.sites' in settings.INSTALLED_APPS: li.append(get_current_site_pk(request)) keys = list(kwargs.keys()) keys.sort() for key in keys: li.append('%s,%s' % (key, kwargs[key])) for param in params: if not isinstance(param, str): param = str(param) li.append(eval(param)) s = ':'.join([str(l) for l in li]) hashed = hashlib.md5(s.encode('utf-8')).hexdigest() cache_key = 'ucache-get-%s' % hashed cached = cache.get(cache_key, None) if cached is None: request._ultracache = [] response = view_func(view_or_request, *args, **kwargs) content = None if isinstance(response, TemplateResponse): content = response.render().rendered_content elif isinstance(response, HttpResponse): content = response.content if content is not None: headers = getattr(response, '_headers', {}) cache.set(cache_key, {'content': content, 'headers': headers}, timeout) cache_meta(request, cache_key) else: response = HttpResponse(cached['content']) for k, v in cached['headers'].items(): response[v[0]] = v[1] return response return _wrapped_view return decorator
cached_get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(timeout, *params):\n\n def x(view_func):\n\n @wraps(view_func, assigned=available_attrs(view_func))\n def x(view_or_request, *args, **kwargs):\n request = getattr(view_or_request, 'request', view_or_request)\n if not hasattr(_thread_locals, 'ultracache_request'):\n setattr(_thread_locals, 'ultracache_request', request)\n if request.method.lower() not in ('get', 'head'):\n return view_func(view_or_request, *args, **kwargs)\n l = 0\n try:\n l = len(request._messages)\n except (AttributeError, TypeError):\n pass\n if l:\n return view_func(view_or_request, *args, **kwargs)\n li = [str(view_or_request.__class__), view_func.__name__]\n if not set(params).intersection(set(('request.get_full_path()',\n 'request.path', 'request.path_info'))):\n li.append(request.get_full_path())\n if 'django.contrib.sites' in settings.INSTALLED_APPS:\n li.append(get_current_site_pk(request))\n keys = list(kwargs.keys())\n keys.sort()\n for key in keys:\n li.append('%s,%s' % (key, kwargs[key]))\n for param in params:\n if not isinstance(param, str):\n param = str(param)\n li.append(eval(param))\n s = ':'.join([str(l) for l in li])\n hashed = hashlib.md5(s.encode('utf-8')).hexdigest()\n cache_key = 'ucache-get-%s' % hashed\n cached = cache.get(cache_key, None)\n if cached is None:\n request._ultracache = []\n response = view_func(view_or_request, *args, **kwargs)\n content = None\n if isinstance(response, TemplateResponse):\n content = response.render().rendered_content\n elif isinstance(response, HttpResponse):\n content = response.content\n if content is not None:\n headers = getattr(response, '_headers', {})\n cache.set(cache_key, {'content': content, 'headers':\n headers}, timeout)\n cache_meta(request, cache_key)\n else:\n response = HttpResponse(cached['content'])\n for k, v in cached['headers'].items():\n response[v[0]] = v[1]\n return response\n return _wrapped_view\n return decorator\n", "role": "user" }, { "content": "cached_get", "role": "assistant" } ]
def x(timeout, *params): def x(cls): class WrappedClass(cls): def x(self, *args, **kwargs): super(WrappedClass, self).__init__(*args, **kwargs) @cached_get(timeout, *params) def x(self, *args, **kwargs): return super(WrappedClass, self).get(*args, **kwargs) return WrappedClass return decorator
ultracache
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(timeout, *params):\n\n def x(cls):\n\n\n class WrappedClass(cls):\n\n def x(self, *args, **kwargs):\n super(WrappedClass, self).__init__(*args, **kwargs)\n\n @cached_get(timeout, *params)\n def x(self, *args, **kwargs):\n return super(WrappedClass, self).get(*args, **kwargs)\n return WrappedClass\n return decorator\n", "role": "user" }, { "content": "ultracache", "role": "assistant" } ]
async def open(self) ->'Issuer': """ Explicit entry. Perform ancestor opening operations, then synchronize revocation registry to tails tree content. :return: current object """ LOGGER.debug('Issuer.open >>>') await super().open() for path_rr_id in Tails.links(self.dir_tails, self.did): await self._sync_revoc_for_issue(basename(path_rr_id)) LOGGER.debug('Issuer.open <<<') return self
open
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def open(self) ->'Issuer':\n \"\"\"\n Explicit entry. Perform ancestor opening operations,\n then synchronize revocation registry to tails tree content.\n\n :return: current object\n \"\"\"\n LOGGER.debug('Issuer.open >>>')\n await super().open()\n for path_rr_id in Tails.links(self.dir_tails, self.did):\n await self._sync_revoc_for_issue(basename(path_rr_id))\n LOGGER.debug('Issuer.open <<<')\n return self\n", "role": "user" }, { "content": "open", "role": "assistant" } ]
async def _send_rev_reg_def(self, rr_id: str) ->None: """ Move tails file from hopper; deserialize revocation registry definition and initial entry; send to ledger and cache revocation registry definition. Operation serializes to subdirectory within tails hopper directory; symbolic link presence signals completion. Raise AbsentRevReg if revocation registry is not ready in hopper, or AbsentTails if tails file is not yet linked by its revocation registry identifier. :param rr_id: revocation registry identifier """ LOGGER.debug('Issuer._send_rev_reg_def >>> rr_id: %s', rr_id) dir_tails_rr_id = self.rrb.dir_tails_top(rr_id) dir_target = self.rrb.dir_tails_target(rr_id) if not Tails.linked(dir_tails_rr_id, rr_id): LOGGER.debug( 'Issuer._send_rev_reg_def <!< Tails file for rev reg %s not ready in dir %s' , rr_id, dir_target) raise AbsentRevReg('Tails file for rev reg {} not ready in dir {}'. format(rr_id, dir_target)) file_rr_def = join(dir_target, 'rr_def.json') if not isfile(file_rr_def): LOGGER.debug( 'Issuer._send_rev_reg_def <!< Rev reg def file %s not present', file_rr_def) raise AbsentRevReg('Rev reg def file {} not present'.format( file_rr_def)) with open(file_rr_def, 'r') as fh_rr_def: rr_def_json = fh_rr_def.read() file_rr_ent = join(dir_target, 'rr_ent.json') if not isfile(file_rr_ent): LOGGER.debug( 'Issuer._send_rev_reg_def <!< Rev reg entry file %s not present', file_rr_ent) raise AbsentRevReg('Rev reg entry file {} not present'.format( file_rr_ent)) with open(file_rr_ent, 'r') as fh_rr_ent: rr_ent_json = fh_rr_ent.read() file_tails = Tails.linked(dir_tails_rr_id, rr_id) if not file_tails: LOGGER.debug( 'Issuer._send_rev_reg_def <!< Tails link %s not present in dir %s', rr_id, dir_target) raise AbsentTails('Tails link {} not present in dir {}'.format( rr_id, dir_target)) if self.rrbx: dir_cd_id = join(self.dir_tails, rev_reg_id2cred_def_id(rr_id)) makedirs(dir_cd_id, exist_ok=True) rename(file_tails, join(dir_cd_id, basename(file_tails))) with REVO_CACHE.lock: rr_def_req_json = await ledger.build_revoc_reg_def_request(self.did, rr_def_json) await self._sign_submit(rr_def_req_json) await self.get_rev_reg_def(rr_id) rr_ent_req_json = await ledger.build_revoc_reg_entry_request(self.did, rr_id, 'CL_ACCUM', rr_ent_json) await self._sign_submit(rr_ent_req_json) if self.rrbx: Tails.associate(self.dir_tails, rr_id, basename(file_tails)) rmtree(dir_tails_rr_id) else: remove(file_rr_def) remove(file_rr_ent) LOGGER.debug('Issuer._send_rev_reg_def <<<')
send_rev_reg_def
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def _send_rev_reg_def(self, rr_id: str) ->None:\n \"\"\"\n Move tails file from hopper; deserialize revocation registry definition and initial entry;\n send to ledger and cache revocation registry definition.\n\n Operation serializes to subdirectory within tails hopper directory; symbolic\n link presence signals completion.\n\n Raise AbsentRevReg if revocation registry is not ready in hopper, or AbsentTails if\n tails file is not yet linked by its revocation registry identifier.\n\n :param rr_id: revocation registry identifier\n \"\"\"\n LOGGER.debug('Issuer._send_rev_reg_def >>> rr_id: %s', rr_id)\n dir_tails_rr_id = self.rrb.dir_tails_top(rr_id)\n dir_target = self.rrb.dir_tails_target(rr_id)\n if not Tails.linked(dir_tails_rr_id, rr_id):\n LOGGER.debug(\n 'Issuer._send_rev_reg_def <!< Tails file for rev reg %s not ready in dir %s'\n , rr_id, dir_target)\n raise AbsentRevReg('Tails file for rev reg {} not ready in dir {}'.\n format(rr_id, dir_target))\n file_rr_def = join(dir_target, 'rr_def.json')\n if not isfile(file_rr_def):\n LOGGER.debug(\n 'Issuer._send_rev_reg_def <!< Rev reg def file %s not present',\n file_rr_def)\n raise AbsentRevReg('Rev reg def file {} not present'.format(\n file_rr_def))\n with open(file_rr_def, 'r') as fh_rr_def:\n rr_def_json = fh_rr_def.read()\n file_rr_ent = join(dir_target, 'rr_ent.json')\n if not isfile(file_rr_ent):\n LOGGER.debug(\n 'Issuer._send_rev_reg_def <!< Rev reg entry file %s not present',\n file_rr_ent)\n raise AbsentRevReg('Rev reg entry file {} not present'.format(\n file_rr_ent))\n with open(file_rr_ent, 'r') as fh_rr_ent:\n rr_ent_json = fh_rr_ent.read()\n file_tails = Tails.linked(dir_tails_rr_id, rr_id)\n if not file_tails:\n LOGGER.debug(\n 'Issuer._send_rev_reg_def <!< Tails link %s not present in dir %s',\n rr_id, dir_target)\n raise AbsentTails('Tails link {} not present in dir {}'.format(\n rr_id, dir_target))\n if self.rrbx:\n dir_cd_id = join(self.dir_tails, rev_reg_id2cred_def_id(rr_id))\n makedirs(dir_cd_id, exist_ok=True)\n rename(file_tails, join(dir_cd_id, basename(file_tails)))\n with REVO_CACHE.lock:\n rr_def_req_json = await ledger.build_revoc_reg_def_request(self.did,\n rr_def_json)\n await self._sign_submit(rr_def_req_json)\n await self.get_rev_reg_def(rr_id)\n rr_ent_req_json = await ledger.build_revoc_reg_entry_request(self.did,\n rr_id, 'CL_ACCUM', rr_ent_json)\n await self._sign_submit(rr_ent_req_json)\n if self.rrbx:\n Tails.associate(self.dir_tails, rr_id, basename(file_tails))\n rmtree(dir_tails_rr_id)\n else:\n remove(file_rr_def)\n remove(file_rr_ent)\n LOGGER.debug('Issuer._send_rev_reg_def <<<')\n", "role": "user" }, { "content": "send_rev_reg_def", "role": "assistant" } ]
async def _set_rev_reg(self, rr_id: str, rr_size: int) ->None: """ Move precomputed revocation registry data from hopper into place within tails directory. :param rr_id: revocation registry identifier :param rr_size: revocation registry size, in case creation required """ LOGGER.debug('Issuer._set_rev_reg >>> rr_id: %s, rr_size: %s', rr_id, rr_size) assert self.rrbx dir_hopper_rr_id = join(self.rrb.dir_tails_hopper, rr_id) while Tails.linked(dir_hopper_rr_id, rr_id) is None: await asyncio.sleep(1) await self._send_rev_reg_def(rr_id) cd_id = rev_reg_id2cred_def_id(rr_id) next_tag, rr_size_suggested = Tails.next_tag(self.dir_tails, cd_id) rr_id = rev_reg_id(cd_id, next_tag) self.rrb.mark_in_progress(rr_id, rr_size or rr_size_suggested) LOGGER.debug('Issuer._set_rev_reg <<<')
set_rev_reg
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def _set_rev_reg(self, rr_id: str, rr_size: int) ->None:\n \"\"\"\n Move precomputed revocation registry data from hopper into place within tails directory.\n\n :param rr_id: revocation registry identifier\n :param rr_size: revocation registry size, in case creation required\n \"\"\"\n LOGGER.debug('Issuer._set_rev_reg >>> rr_id: %s, rr_size: %s', rr_id,\n rr_size)\n assert self.rrbx\n dir_hopper_rr_id = join(self.rrb.dir_tails_hopper, rr_id)\n while Tails.linked(dir_hopper_rr_id, rr_id) is None:\n await asyncio.sleep(1)\n await self._send_rev_reg_def(rr_id)\n cd_id = rev_reg_id2cred_def_id(rr_id)\n next_tag, rr_size_suggested = Tails.next_tag(self.dir_tails, cd_id)\n rr_id = rev_reg_id(cd_id, next_tag)\n self.rrb.mark_in_progress(rr_id, rr_size or rr_size_suggested)\n LOGGER.debug('Issuer._set_rev_reg <<<')\n", "role": "user" }, { "content": "set_rev_reg", "role": "assistant" } ]
async def _sync_revoc_for_issue(self, rr_id: str, rr_size: int=None) ->None: """ Create revocation registry if need be for input revocation registry identifier; open and cache tails file reader. :param rr_id: revocation registry identifier :param rr_size: if new revocation registry necessary, its size (default as per RevRegBuilder.create_rev_reg()) """ LOGGER.debug('Issuer._sync_revoc_for_issue >>> rr_id: %s, rr_size: %s', rr_id, rr_size) if not ok_rev_reg_id(rr_id): LOGGER.debug('Issuer._sync_revoc_for_issue <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) cd_id, tag = rev_reg_id2cred_def_id_tag(rr_id) try: await self.get_cred_def(cd_id) except AbsentCredDef: LOGGER.debug( 'Issuer._sync_revoc_for_issue <!< tails tree %s may be for another ledger; no cred def found on %s' , self.dir_tails, cd_id) raise AbsentCredDef( 'Tails tree {} may be for another ledger; no cred def found on {}' .format(self.dir_tails, cd_id)) with REVO_CACHE.lock: revo_cache_entry = REVO_CACHE.get(rr_id, None) tails = None if revo_cache_entry is None else revo_cache_entry.tails if tails is None: try: tails = await Tails(self.dir_tails, cd_id, tag).open() except AbsentTails: if self.rrbx: await self._set_rev_reg(rr_id, rr_size) else: await self.rrb.create_rev_reg(rr_id, rr_size) await self._send_rev_reg_def(rr_id) tails = await Tails(self.dir_tails, cd_id, tag).open() if revo_cache_entry is None: REVO_CACHE[rr_id] = RevoCacheEntry(None, tails) else: REVO_CACHE[rr_id].tails = tails LOGGER.debug('Issuer._sync_revoc_for_issue <<<')
sync_revoc_for_issue
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def _sync_revoc_for_issue(self, rr_id: str, rr_size: int=None) ->None:\n \"\"\"\n Create revocation registry if need be for input revocation registry identifier;\n open and cache tails file reader.\n\n :param rr_id: revocation registry identifier\n :param rr_size: if new revocation registry necessary, its size (default as per RevRegBuilder.create_rev_reg())\n \"\"\"\n LOGGER.debug('Issuer._sync_revoc_for_issue >>> rr_id: %s, rr_size: %s',\n rr_id, rr_size)\n if not ok_rev_reg_id(rr_id):\n LOGGER.debug('Issuer._sync_revoc_for_issue <!< Bad rev reg id %s',\n rr_id)\n raise BadIdentifier('Bad rev reg id {}'.format(rr_id))\n cd_id, tag = rev_reg_id2cred_def_id_tag(rr_id)\n try:\n await self.get_cred_def(cd_id)\n except AbsentCredDef:\n LOGGER.debug(\n 'Issuer._sync_revoc_for_issue <!< tails tree %s may be for another ledger; no cred def found on %s'\n , self.dir_tails, cd_id)\n raise AbsentCredDef(\n 'Tails tree {} may be for another ledger; no cred def found on {}'\n .format(self.dir_tails, cd_id))\n with REVO_CACHE.lock:\n revo_cache_entry = REVO_CACHE.get(rr_id, None)\n tails = None if revo_cache_entry is None else revo_cache_entry.tails\n if tails is None:\n try:\n tails = await Tails(self.dir_tails, cd_id, tag).open()\n except AbsentTails:\n if self.rrbx:\n await self._set_rev_reg(rr_id, rr_size)\n else:\n await self.rrb.create_rev_reg(rr_id, rr_size)\n await self._send_rev_reg_def(rr_id)\n tails = await Tails(self.dir_tails, cd_id, tag).open()\n if revo_cache_entry is None:\n REVO_CACHE[rr_id] = RevoCacheEntry(None, tails)\n else:\n REVO_CACHE[rr_id].tails = tails\n LOGGER.debug('Issuer._sync_revoc_for_issue <<<')\n", "role": "user" }, { "content": "sync_revoc_for_issue", "role": "assistant" } ]
def x(self, rr_id: str) ->str: LOGGER.debug('Issuer.path_tails >>>') if not ok_rev_reg_id(rr_id): LOGGER.debug('Issuer.path_tails <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) rv = Tails.linked(self.dir_tails, rr_id) LOGGER.debug('Issuer.path_tails <<< %s', rv) return rv
path_tails
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, rr_id: str) ->str:\n LOGGER.debug('Issuer.path_tails >>>')\n if not ok_rev_reg_id(rr_id):\n LOGGER.debug('Issuer.path_tails <!< Bad rev reg id %s', rr_id)\n raise BadIdentifier('Bad rev reg id {}'.format(rr_id))\n rv = Tails.linked(self.dir_tails, rr_id)\n LOGGER.debug('Issuer.path_tails <<< %s', rv)\n return rv\n", "role": "user" }, { "content": "path_tails", "role": "assistant" } ]
async def _create_cred_def(self, schema: dict, ledger_cred_def: dict, revo: bool) ->(str, bool): """ Create credential definition in wallet as part of the send_cred_def() sequence. Return whether the private key for the cred def is OK to continue with the sequence, propagating the cred def and revocation registry info to the ledger. :param schema: schema on which to create cred def :param ledger_cred_def: credential definition as ledger has it (typically, None) :param revo: whether cred def supports revocation :return: cred def json and whether local cred def private key is OK, hence cred def is OK to send to the ledger """ LOGGER.debug( 'Issuer._create_cred_def >>> schema: %s, ledger_cred_def: %s, revo: %s' , schema, ledger_cred_def, revo) cred_def_json = '{}' private_key_ok = True try: _, cred_def_json = (await anoncreds. issuer_create_and_store_credential_def(self.wallet.handle, self .did, json.dumps(schema), self.pool.protocol.cd_id_tag(False), 'CL', json.dumps({'support_revocation': revo}))) if ledger_cred_def: private_key_ok = False LOGGER.warning( 'New cred def on %s in wallet shadows existing one on ledger: private key not usable' , cred_def_id(self.did, schema['seqNo'], self.pool.protocol)) except IndyError as x_indy: if x_indy.error_code == ErrorCode.AnoncredsCredDefAlreadyExistsError: if ledger_cred_def: LOGGER.info( 'Issuer wallet %s reusing existing cred def on schema %s version %s' , self.name, schema['name'], schema['version']) else: LOGGER.debug('Issuer._create_cred_def <!< corrupt wallet %s', self.name) raise CorruptWallet( 'Corrupt Issuer wallet {} has cred def on schema {} not on ledger' .format(self.name, schema['id'])) else: LOGGER.debug( 'Issuer._create_cred_def <!< cannot store cred def in wallet %s: indy error code %s' , self.name, x_indy.error_code) raise rv = cred_def_json, private_key_ok LOGGER.debug('Issuer._create_cred_def <<< %s', rv) return rv
create_cred_def
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def _create_cred_def(self, schema: dict, ledger_cred_def: dict, revo:\n bool) ->(str, bool):\n \"\"\"\n Create credential definition in wallet as part of the send_cred_def() sequence.\n Return whether the private key for the cred def is OK to continue with the sequence,\n propagating the cred def and revocation registry info to the ledger.\n\n :param schema: schema on which to create cred def\n :param ledger_cred_def: credential definition as ledger has it (typically, None)\n :param revo: whether cred def supports revocation\n :return: cred def json and whether local cred def private key is OK, hence cred def is OK to send to the ledger\n \"\"\"\n LOGGER.debug(\n 'Issuer._create_cred_def >>> schema: %s, ledger_cred_def: %s, revo: %s'\n , schema, ledger_cred_def, revo)\n cred_def_json = '{}'\n private_key_ok = True\n try:\n _, cred_def_json = (await anoncreds.\n issuer_create_and_store_credential_def(self.wallet.handle, self\n .did, json.dumps(schema), self.pool.protocol.cd_id_tag(False),\n 'CL', json.dumps({'support_revocation': revo})))\n if ledger_cred_def:\n private_key_ok = False\n LOGGER.warning(\n 'New cred def on %s in wallet shadows existing one on ledger: private key not usable'\n , cred_def_id(self.did, schema['seqNo'], self.pool.protocol))\n except IndyError as x_indy:\n if x_indy.error_code == ErrorCode.AnoncredsCredDefAlreadyExistsError:\n if ledger_cred_def:\n LOGGER.info(\n 'Issuer wallet %s reusing existing cred def on schema %s version %s'\n , self.name, schema['name'], schema['version'])\n else:\n LOGGER.debug('Issuer._create_cred_def <!< corrupt wallet %s',\n self.name)\n raise CorruptWallet(\n 'Corrupt Issuer wallet {} has cred def on schema {} not on ledger'\n .format(self.name, schema['id']))\n else:\n LOGGER.debug(\n 'Issuer._create_cred_def <!< cannot store cred def in wallet %s: indy error code %s'\n , self.name, x_indy.error_code)\n raise\n rv = cred_def_json, private_key_ok\n LOGGER.debug('Issuer._create_cred_def <<< %s', rv)\n return rv\n", "role": "user" }, { "content": "create_cred_def", "role": "assistant" } ]
async def send_cred_def(self, s_id: str, revo: bool=True, rr_size: int=None ) ->str: """ Create a credential definition as Issuer, store it in its wallet, and send it to the ledger. Raise CorruptWallet for wallet not pertaining to current ledger, BadLedgerTxn on failure to send credential definition to ledger if need be, WalletState for closed wallet, or IndyError for any other failure to create and store credential definition in wallet. :param s_id: schema identifier :param revo: whether to support revocation for cred def :param rr_size: size of initial revocation registry (default as per RevRegBuilder.create_rev_reg()), if revocation supported :return: json credential definition as it appears on ledger """ LOGGER.debug('Issuer.send_cred_def >>> s_id: %s, revo: %s, rr_size: %s', s_id, revo, rr_size) if not ok_schema_id(s_id): LOGGER.debug('Issuer.send_cred_def <!< Bad schema id %s', s_id) raise BadIdentifier('Bad schema id {}'.format(s_id)) if not self.wallet.handle: LOGGER.debug('Issuer.send_cred_def <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) if not self.pool: LOGGER.debug('Issuer.send_cred_def <!< issuer %s has no pool', self .name) raise AbsentPool('Issuer {} has no pool: cannot send cred def'. format(self.name)) rv_json = json.dumps({}) schema_json = await self.get_schema(schema_key(s_id)) schema = json.loads(schema_json) cd_id = cred_def_id(self.did, schema['seqNo'], self.pool.protocol) private_key_ok = True with CRED_DEF_CACHE.lock: try: rv_json = await self.get_cred_def(cd_id) LOGGER.info( 'Cred def on schema %s version %s already exists on ledger; Issuer %s not sending another' , schema['name'], schema['version'], self.name) except AbsentCredDef: pass cred_def_json, private_key_ok = await self._create_cred_def(schema, json.loads(rv_json), revo) if not json.loads(rv_json): req_json = await ledger.build_cred_def_request(self.did, cred_def_json) await self._sign_submit(req_json) for _ in range(16): try: rv_json = await self.get_cred_def(cd_id) break except AbsentCredDef: await asyncio.sleep(1) LOGGER.info( 'Sent cred def %s to ledger, waiting 1s for its appearance' , cd_id) if not rv_json: LOGGER.debug( 'Issuer.send_cred_def <!< timed out waiting on sent cred_def %s' , cd_id) raise BadLedgerTxn('Timed out waiting on sent cred_def {}'. format(cd_id)) if revo: if self.rrbx: _, rr_size_suggested = Tails.next_tag(self.dir_tails, cd_id ) self.rrb.mark_in_progress(rev_reg_id(cd_id, '0'), rr_size or rr_size_suggested) await self._sync_revoc_for_issue(rev_reg_id(cd_id, '0'), rr_size) if revo and private_key_ok: for tag in [str(t) for t in range(1, int(Tails.next_tag(self. dir_tails, cd_id)[0]))]: await self._sync_revoc_for_issue(rev_reg_id(cd_id, tag), rr_size if tag == '0' else None) makedirs(join(self.dir_tails, cd_id), exist_ok=True) LOGGER.debug('Issuer.send_cred_def <<< %s', rv_json) return rv_json
send_cred_def
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def send_cred_def(self, s_id: str, revo: bool=True, rr_size: int=None\n ) ->str:\n \"\"\"\n Create a credential definition as Issuer, store it in its wallet, and send it to the ledger.\n\n Raise CorruptWallet for wallet not pertaining to current ledger, BadLedgerTxn on failure\n to send credential definition to ledger if need be, WalletState for closed wallet,\n or IndyError for any other failure to create and store credential definition in wallet.\n\n :param s_id: schema identifier\n :param revo: whether to support revocation for cred def\n :param rr_size: size of initial revocation registry (default as per RevRegBuilder.create_rev_reg()),\n if revocation supported\n :return: json credential definition as it appears on ledger\n \"\"\"\n LOGGER.debug('Issuer.send_cred_def >>> s_id: %s, revo: %s, rr_size: %s',\n s_id, revo, rr_size)\n if not ok_schema_id(s_id):\n LOGGER.debug('Issuer.send_cred_def <!< Bad schema id %s', s_id)\n raise BadIdentifier('Bad schema id {}'.format(s_id))\n if not self.wallet.handle:\n LOGGER.debug('Issuer.send_cred_def <!< Wallet %s is closed', self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n if not self.pool:\n LOGGER.debug('Issuer.send_cred_def <!< issuer %s has no pool', self\n .name)\n raise AbsentPool('Issuer {} has no pool: cannot send cred def'.\n format(self.name))\n rv_json = json.dumps({})\n schema_json = await self.get_schema(schema_key(s_id))\n schema = json.loads(schema_json)\n cd_id = cred_def_id(self.did, schema['seqNo'], self.pool.protocol)\n private_key_ok = True\n with CRED_DEF_CACHE.lock:\n try:\n rv_json = await self.get_cred_def(cd_id)\n LOGGER.info(\n 'Cred def on schema %s version %s already exists on ledger; Issuer %s not sending another'\n , schema['name'], schema['version'], self.name)\n except AbsentCredDef:\n pass\n cred_def_json, private_key_ok = await self._create_cred_def(schema,\n json.loads(rv_json), revo)\n if not json.loads(rv_json):\n req_json = await ledger.build_cred_def_request(self.did,\n cred_def_json)\n await self._sign_submit(req_json)\n for _ in range(16):\n try:\n rv_json = await self.get_cred_def(cd_id)\n break\n except AbsentCredDef:\n await asyncio.sleep(1)\n LOGGER.info(\n 'Sent cred def %s to ledger, waiting 1s for its appearance'\n , cd_id)\n if not rv_json:\n LOGGER.debug(\n 'Issuer.send_cred_def <!< timed out waiting on sent cred_def %s'\n , cd_id)\n raise BadLedgerTxn('Timed out waiting on sent cred_def {}'.\n format(cd_id))\n if revo:\n if self.rrbx:\n _, rr_size_suggested = Tails.next_tag(self.dir_tails, cd_id\n )\n self.rrb.mark_in_progress(rev_reg_id(cd_id, '0'), \n rr_size or rr_size_suggested)\n await self._sync_revoc_for_issue(rev_reg_id(cd_id, '0'),\n rr_size)\n if revo and private_key_ok:\n for tag in [str(t) for t in range(1, int(Tails.next_tag(self.\n dir_tails, cd_id)[0]))]:\n await self._sync_revoc_for_issue(rev_reg_id(cd_id, tag), \n rr_size if tag == '0' else None)\n makedirs(join(self.dir_tails, cd_id), exist_ok=True)\n LOGGER.debug('Issuer.send_cred_def <<< %s', rv_json)\n return rv_json\n", "role": "user" }, { "content": "send_cred_def", "role": "assistant" } ]
async def create_cred_offer(self, schema_seq_no: int) ->str: """ Create credential offer as Issuer for given schema. Raise CorruptWallet if the wallet has no private key for the corresponding credential definition. Raise WalletState for closed wallet. :param schema_seq_no: schema sequence number :return: credential offer json for use in storing credentials at HolderProver. """ LOGGER.debug('Issuer.create_cred_offer >>> schema_seq_no: %s', schema_seq_no) if not self.wallet.handle: LOGGER.debug('Issuer.create_cred_offer <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) if not self.pool: LOGGER.debug('Issuer.create_cred_offer <!< issuer %s has no pool', self.name) raise AbsentPool('Issuer {} has no pool: cannot create cred offer'. format(self.name)) rv = None cd_id = cred_def_id(self.did, schema_seq_no, self.pool.protocol) try: rv = await anoncreds.issuer_create_credential_offer(self.wallet. handle, cd_id) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletNotFoundError: LOGGER.debug( 'Issuer.create_cred_offer <!< did not issue cred definition from wallet %s' , self.name) raise CorruptWallet( 'Cannot create cred offer: did not issue cred definition from wallet {}' .format(self.name)) LOGGER.debug( 'Issuer.create_cred_offer <!< cannot create cred offer, indy error code %s' , x_indy.error_code) raise LOGGER.debug('Issuer.create_cred_offer <<< %s', rv) return rv
create_cred_offer
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def create_cred_offer(self, schema_seq_no: int) ->str:\n \"\"\"\n Create credential offer as Issuer for given schema.\n\n Raise CorruptWallet if the wallet has no private key for the corresponding credential definition.\n Raise WalletState for closed wallet.\n\n :param schema_seq_no: schema sequence number\n :return: credential offer json for use in storing credentials at HolderProver.\n \"\"\"\n LOGGER.debug('Issuer.create_cred_offer >>> schema_seq_no: %s',\n schema_seq_no)\n if not self.wallet.handle:\n LOGGER.debug('Issuer.create_cred_offer <!< Wallet %s is closed',\n self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n if not self.pool:\n LOGGER.debug('Issuer.create_cred_offer <!< issuer %s has no pool',\n self.name)\n raise AbsentPool('Issuer {} has no pool: cannot create cred offer'.\n format(self.name))\n rv = None\n cd_id = cred_def_id(self.did, schema_seq_no, self.pool.protocol)\n try:\n rv = await anoncreds.issuer_create_credential_offer(self.wallet.\n handle, cd_id)\n except IndyError as x_indy:\n if x_indy.error_code == ErrorCode.WalletNotFoundError:\n LOGGER.debug(\n 'Issuer.create_cred_offer <!< did not issue cred definition from wallet %s'\n , self.name)\n raise CorruptWallet(\n 'Cannot create cred offer: did not issue cred definition from wallet {}'\n .format(self.name))\n LOGGER.debug(\n 'Issuer.create_cred_offer <!< cannot create cred offer, indy error code %s'\n , x_indy.error_code)\n raise\n LOGGER.debug('Issuer.create_cred_offer <<< %s', rv)\n return rv\n", "role": "user" }, { "content": "create_cred_offer", "role": "assistant" } ]
async def create_cred(self, cred_offer_json, cred_req_json: str, cred_attrs: dict, rr_size: int=None) ->(str, str): """ Create credential as Issuer out of credential request and dict of key:value (raw, unencoded) entries for attributes. Return credential json, and if cred def supports revocation, credential revocation identifier. Raise WalletState for closed wallet. If the credential definition supports revocation, and the current revocation registry is full, the processing creates a new revocation registry en passant. Depending on the revocation registry size (by default starting at 64 and doubling iteratively through a maximum of 100000) and the revocation registry builder posture (see RevRegBuilder.__init__()), this operation may delay credential creation by several seconds. The use of an external revocation registry builder runs a parallel process, skirting this delay, but is more costly at initialization. :param cred_offer_json: credential offer json as created by Issuer :param cred_req_json: credential request json as created by HolderProver :param cred_attrs: dict mapping each attribute to its original value (the operation encodes it); e.g., :: { 'favourite_drink': 'martini', 'height': 180, 'last_visit_date': '2017-12-31', 'weaknesses': None } :param rr_size: size of new revocation registry (default as per RevRegBuilder.create_rev_reg()) if necessary :return: tuple with newly issued credential json, credential revocation identifier (if cred def supports revocation, None otherwise). """ LOGGER.debug( 'Issuer.create_cred >>> cred_offer_json: %s, cred_req_json: %s, cred_attrs: %s, rr_size: %s' , cred_offer_json, cred_req_json, cred_attrs, rr_size) if not self.wallet.handle: LOGGER.debug('Issuer.create_cred <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) cd_id = json.loads(cred_offer_json)['cred_def_id'] if not ok_cred_def_id(cd_id): LOGGER.debug('Issuer.create_cred <!< Bad cred def id %s', cd_id) raise BadIdentifier('Bad cred def id {}'.format(cd_id)) cred_def = json.loads(await self.get_cred_def(cd_id)) if 'revocation' in cred_def['value']: with REVO_CACHE.lock: rr_id = Tails.current_rev_reg_id(self.dir_tails, cd_id) tails = REVO_CACHE[rr_id].tails assert tails try: cred_json, cred_revoc_id, _ = (await anoncreds. issuer_create_credential(self.wallet.handle, cred_offer_json, cred_req_json, json.dumps({k: cred_attr_value(cred_attrs[k]) for k in cred_attrs}), rr_id, tails.reader_handle)) rv = cred_json, cred_revoc_id except IndyError as x_indy: if (x_indy.error_code == ErrorCode. AnoncredsRevocationRegistryFullError): tag, rr_size_suggested = Tails.next_tag(self.dir_tails, cd_id) rr_id = rev_reg_id(cd_id, tag) if self.rrbx: await self._set_rev_reg(rr_id, rr_size) else: await self.rrb.create_rev_reg(rr_id, rr_size or rr_size_suggested) await self._send_rev_reg_def(rr_id) REVO_CACHE[rr_id].tails = await Tails(self.dir_tails, cd_id ).open() return await self.create_cred(cred_offer_json, cred_req_json, cred_attrs) LOGGER.debug( 'Issuer.create_cred <!< cannot create cred, indy error code %s' , x_indy.error_code) raise else: try: cred_json, _, _ = await anoncreds.issuer_create_credential(self .wallet.handle, cred_offer_json, cred_req_json, json.dumps( {k: cred_attr_value(cred_attrs[k]) for k in cred_attrs}), None, None) rv = cred_json, None except IndyError as x_indy: LOGGER.debug( 'Issuer.create_cred <!< cannot create cred, indy error code %s' , x_indy.error_code) raise LOGGER.debug('Issuer.create_cred <<< %s', rv) return rv
create_cred
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def create_cred(self, cred_offer_json, cred_req_json: str, cred_attrs:\n dict, rr_size: int=None) ->(str, str):\n \"\"\"\n Create credential as Issuer out of credential request and dict of key:value (raw, unencoded)\n entries for attributes.\n\n Return credential json, and if cred def supports revocation, credential revocation identifier.\n Raise WalletState for closed wallet.\n\n If the credential definition supports revocation, and the current revocation registry is full,\n the processing creates a new revocation registry en passant. Depending on the revocation\n registry size (by default starting at 64 and doubling iteratively through a maximum of 100000)\n and the revocation registry builder posture (see RevRegBuilder.__init__()), this operation may\n delay credential creation by several seconds. The use of an external revocation registry builder\n runs a parallel process, skirting this delay, but is more costly at initialization.\n\n :param cred_offer_json: credential offer json as created by Issuer\n :param cred_req_json: credential request json as created by HolderProver\n :param cred_attrs: dict mapping each attribute to its original value (the operation encodes it); e.g.,\n\n ::\n\n {\n 'favourite_drink': 'martini',\n 'height': 180,\n 'last_visit_date': '2017-12-31',\n 'weaknesses': None\n }\n\n :param rr_size: size of new revocation registry (default as per RevRegBuilder.create_rev_reg()) if necessary\n :return: tuple with newly issued credential json, credential revocation identifier (if cred def\n supports revocation, None otherwise).\n \"\"\"\n LOGGER.debug(\n 'Issuer.create_cred >>> cred_offer_json: %s, cred_req_json: %s, cred_attrs: %s, rr_size: %s'\n , cred_offer_json, cred_req_json, cred_attrs, rr_size)\n if not self.wallet.handle:\n LOGGER.debug('Issuer.create_cred <!< Wallet %s is closed', self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n cd_id = json.loads(cred_offer_json)['cred_def_id']\n if not ok_cred_def_id(cd_id):\n LOGGER.debug('Issuer.create_cred <!< Bad cred def id %s', cd_id)\n raise BadIdentifier('Bad cred def id {}'.format(cd_id))\n cred_def = json.loads(await self.get_cred_def(cd_id))\n if 'revocation' in cred_def['value']:\n with REVO_CACHE.lock:\n rr_id = Tails.current_rev_reg_id(self.dir_tails, cd_id)\n tails = REVO_CACHE[rr_id].tails\n assert tails\n try:\n cred_json, cred_revoc_id, _ = (await anoncreds.\n issuer_create_credential(self.wallet.handle,\n cred_offer_json, cred_req_json, json.dumps({k:\n cred_attr_value(cred_attrs[k]) for k in cred_attrs}),\n rr_id, tails.reader_handle))\n rv = cred_json, cred_revoc_id\n except IndyError as x_indy:\n if (x_indy.error_code == ErrorCode.\n AnoncredsRevocationRegistryFullError):\n tag, rr_size_suggested = Tails.next_tag(self.dir_tails,\n cd_id)\n rr_id = rev_reg_id(cd_id, tag)\n if self.rrbx:\n await self._set_rev_reg(rr_id, rr_size)\n else:\n await self.rrb.create_rev_reg(rr_id, rr_size or\n rr_size_suggested)\n await self._send_rev_reg_def(rr_id)\n REVO_CACHE[rr_id].tails = await Tails(self.dir_tails, cd_id\n ).open()\n return await self.create_cred(cred_offer_json,\n cred_req_json, cred_attrs)\n LOGGER.debug(\n 'Issuer.create_cred <!< cannot create cred, indy error code %s'\n , x_indy.error_code)\n raise\n else:\n try:\n cred_json, _, _ = await anoncreds.issuer_create_credential(self\n .wallet.handle, cred_offer_json, cred_req_json, json.dumps(\n {k: cred_attr_value(cred_attrs[k]) for k in cred_attrs}),\n None, None)\n rv = cred_json, None\n except IndyError as x_indy:\n LOGGER.debug(\n 'Issuer.create_cred <!< cannot create cred, indy error code %s'\n , x_indy.error_code)\n raise\n LOGGER.debug('Issuer.create_cred <<< %s', rv)\n return rv\n", "role": "user" }, { "content": "create_cred", "role": "assistant" } ]
async def revoke_cred(self, rr_id: str, cr_id) ->int: """ Revoke credential that input revocation registry identifier and credential revocation identifier specify. Return (epoch seconds) time of revocation. Raise AbsentTails if no tails file is available for input revocation registry identifier. Raise WalletState for closed wallet. Raise BadRevocation if issuer cannot revoke specified credential for any other reason (e.g., did not issue it, already revoked it). :param rr_id: revocation registry identifier :param cr_id: credential revocation identifier :return: time of revocation, in epoch seconds """ LOGGER.debug('Issuer.revoke_cred >>> rr_id: %s, cr_id: %s', rr_id, cr_id) if not self.wallet.handle: LOGGER.debug('Issuer.revoke_cred <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) if not ok_rev_reg_id(rr_id): LOGGER.debug('Issuer.revoke_cred <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) tails_reader_handle = (await Tails(self.dir_tails, * rev_reg_id2cred_def_id_tag(rr_id)).open()).reader_handle try: rrdelta_json = await anoncreds.issuer_revoke_credential(self.wallet .handle, tails_reader_handle, rr_id, cr_id) except IndyError as x_indy: LOGGER.debug( 'Issuer.revoke_cred <!< Could not revoke revoc reg id %s, cred rev id %s: indy error code %s' , rr_id, cr_id, x_indy.error_code) raise BadRevocation( 'Could not revoke revoc reg id {}, cred rev id {}: indy error code {}' .format(rr_id, cr_id, x_indy.error_code)) rr_ent_req_json = await ledger.build_revoc_reg_entry_request(self.did, rr_id, 'CL_ACCUM', rrdelta_json) resp_json = await self._sign_submit(rr_ent_req_json) resp = json.loads(resp_json) rv = self.pool.protocol.txn2epoch(resp) LOGGER.debug('Issuer.revoke_cred <<< %s', rv) return rv
revoke_cred
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def revoke_cred(self, rr_id: str, cr_id) ->int:\n \"\"\"\n Revoke credential that input revocation registry identifier and\n credential revocation identifier specify.\n\n Return (epoch seconds) time of revocation.\n\n Raise AbsentTails if no tails file is available for input revocation registry identifier.\n Raise WalletState for closed wallet.\n Raise BadRevocation if issuer cannot revoke specified credential for any other reason\n (e.g., did not issue it, already revoked it).\n\n :param rr_id: revocation registry identifier\n :param cr_id: credential revocation identifier\n :return: time of revocation, in epoch seconds\n \"\"\"\n LOGGER.debug('Issuer.revoke_cred >>> rr_id: %s, cr_id: %s', rr_id, cr_id)\n if not self.wallet.handle:\n LOGGER.debug('Issuer.revoke_cred <!< Wallet %s is closed', self.name)\n raise WalletState('Wallet {} is closed'.format(self.name))\n if not ok_rev_reg_id(rr_id):\n LOGGER.debug('Issuer.revoke_cred <!< Bad rev reg id %s', rr_id)\n raise BadIdentifier('Bad rev reg id {}'.format(rr_id))\n tails_reader_handle = (await Tails(self.dir_tails, *\n rev_reg_id2cred_def_id_tag(rr_id)).open()).reader_handle\n try:\n rrdelta_json = await anoncreds.issuer_revoke_credential(self.wallet\n .handle, tails_reader_handle, rr_id, cr_id)\n except IndyError as x_indy:\n LOGGER.debug(\n 'Issuer.revoke_cred <!< Could not revoke revoc reg id %s, cred rev id %s: indy error code %s'\n , rr_id, cr_id, x_indy.error_code)\n raise BadRevocation(\n 'Could not revoke revoc reg id {}, cred rev id {}: indy error code {}'\n .format(rr_id, cr_id, x_indy.error_code))\n rr_ent_req_json = await ledger.build_revoc_reg_entry_request(self.did,\n rr_id, 'CL_ACCUM', rrdelta_json)\n resp_json = await self._sign_submit(rr_ent_req_json)\n resp = json.loads(resp_json)\n rv = self.pool.protocol.txn2epoch(resp)\n LOGGER.debug('Issuer.revoke_cred <<< %s', rv)\n return rv\n", "role": "user" }, { "content": "revoke_cred", "role": "assistant" } ]
async def get_box_ids_issued(self) ->str: """ Return json object on lists of all unique box identifiers (schema identifiers, credential definition identifiers, and revocation registry identifiers) for all credential definitions and credentials issued; e.g., :: { "schema_id": [ "R17v42T4pk...:2:tombstone:1.2", ... ], "cred_def_id": [ "R17v42T4pk...:3:CL:19:tag", ... ] "rev_reg_id": [ "R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:0", "R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:1", ... ] } An issuer must issue a credential definition to include its schema identifier in the returned values; the schema identifier in isolation belongs properly to an Origin, not necessarily to an Issuer. The operation may be useful for a Verifier anchor going off-line to seed its cache before doing so. :return: tuple of sets for schema ids, cred def ids, rev reg ids """ LOGGER.debug('Issuer.get_box_ids_issued >>>') cd_ids = [d for d in listdir(self.dir_tails) if isdir(join(self. dir_tails, d)) and ok_cred_def_id(d, self.did)] s_ids = [] for cd_id in cd_ids: try: s_ids.append(json.loads(await self.get_schema( cred_def_id2seq_no(cd_id)))['id']) except AbsentSchema: LOGGER.error( 'Issuer %s has issued cred def %s but no corresponding schema on ledger' , self.name, cd_id) rr_ids = [basename(link) for link in Tails.links(self.dir_tails, self.did)] rv = json.dumps({'schema_id': s_ids, 'cred_def_id': cd_ids, 'rev_reg_id': rr_ids}) LOGGER.debug('Issuer.get_box_ids_issued <<< %s', rv) return rv
get_box_ids_issued
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def get_box_ids_issued(self) ->str:\n \"\"\"\n Return json object on lists of all unique box identifiers (schema identifiers,\n credential definition identifiers, and revocation registry identifiers) for\n all credential definitions and credentials issued; e.g.,\n\n ::\n\n {\n \"schema_id\": [\n \"R17v42T4pk...:2:tombstone:1.2\",\n ...\n ],\n \"cred_def_id\": [\n \"R17v42T4pk...:3:CL:19:tag\",\n ...\n ]\n \"rev_reg_id\": [\n \"R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:0\",\n \"R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:1\",\n ...\n ]\n }\n\n An issuer must issue a credential definition to include its schema identifier\n in the returned values; the schema identifier in isolation belongs properly\n to an Origin, not necessarily to an Issuer.\n\n The operation may be useful for a Verifier anchor going off-line to seed its\n cache before doing so.\n\n :return: tuple of sets for schema ids, cred def ids, rev reg ids\n \"\"\"\n LOGGER.debug('Issuer.get_box_ids_issued >>>')\n cd_ids = [d for d in listdir(self.dir_tails) if isdir(join(self.\n dir_tails, d)) and ok_cred_def_id(d, self.did)]\n s_ids = []\n for cd_id in cd_ids:\n try:\n s_ids.append(json.loads(await self.get_schema(\n cred_def_id2seq_no(cd_id)))['id'])\n except AbsentSchema:\n LOGGER.error(\n 'Issuer %s has issued cred def %s but no corresponding schema on ledger'\n , self.name, cd_id)\n rr_ids = [basename(link) for link in Tails.links(self.dir_tails, self.did)]\n rv = json.dumps({'schema_id': s_ids, 'cred_def_id': cd_ids,\n 'rev_reg_id': rr_ids})\n LOGGER.debug('Issuer.get_box_ids_issued <<< %s', rv)\n return rv\n", "role": "user" }, { "content": "get_box_ids_issued", "role": "assistant" } ]
def x(ref: str, delimiter: str=None) ->str: return ref.split(delimiter if delimiter else '#')[0]
resource
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(ref: str, delimiter: str=None) ->str:\n return ref.split(delimiter if delimiter else '#')[0]\n", "role": "user" }, { "content": "resource", "role": "assistant" } ]
def x(uri: str) ->str: if ok_did(uri): return uri if uri.startswith('did:sov:'): rv = uri[8:] if ok_did(rv): return rv raise BadIdentifier( 'Bad specification {} does not correspond to a sovrin DID'.format(uri))
canon_did
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(uri: str) ->str:\n if ok_did(uri):\n return uri\n if uri.startswith('did:sov:'):\n rv = uri[8:]\n if ok_did(rv):\n return rv\n raise BadIdentifier(\n 'Bad specification {} does not correspond to a sovrin DID'.format(uri))\n", "role": "user" }, { "content": "canon_did", "role": "assistant" } ]
def x(did: str, ref: str, delimiter: str=None): if not ok_did(did): raise BadIdentifier('Bad DID {} cannot act as DID document identifier' .format(did)) if ok_did(ref): return 'did:sov:{}'.format(did) if ok_did(resource(ref, delimiter)): return 'did:sov:{}'.format(ref) if ref.startswith('did:sov:'): rv = ref[8:] if ok_did(resource(rv, delimiter)): return ref raise BadIdentifier('Bad URI {} does not correspond to a sovrin DID' .format(ref)) if urlparse(ref).scheme: return ref return 'did:sov:{}{}{}'.format(did, delimiter if delimiter else '#', ref)
canon_ref
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(did: str, ref: str, delimiter: str=None):\n if not ok_did(did):\n raise BadIdentifier('Bad DID {} cannot act as DID document identifier'\n .format(did))\n if ok_did(ref):\n return 'did:sov:{}'.format(did)\n if ok_did(resource(ref, delimiter)):\n return 'did:sov:{}'.format(ref)\n if ref.startswith('did:sov:'):\n rv = ref[8:]\n if ok_did(resource(rv, delimiter)):\n return ref\n raise BadIdentifier('Bad URI {} does not correspond to a sovrin DID'\n .format(ref))\n if urlparse(ref).scheme:\n return ref\n return 'did:sov:{}{}{}'.format(did, delimiter if delimiter else '#', ref)\n", "role": "user" }, { "content": "canon_ref", "role": "assistant" } ]
def x(tags: dict) ->bool: if not tags: return True depth = 0 queue = [(i, depth + 1) for i in tags.values() if isinstance(i, dict)] max_depth = 0 while queue and max_depth < 2: sub, depth = queue.pop() max_depth = max(max_depth, depth) queue = queue + [(i, depth + 1) for i in sub.values() if isinstance (i, dict)] return max_depth < 2 and all(isinstance(k, str) and isinstance(tags[k], str) for k in tags)
ok_tags
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(tags: dict) ->bool:\n if not tags:\n return True\n depth = 0\n queue = [(i, depth + 1) for i in tags.values() if isinstance(i, dict)]\n max_depth = 0\n while queue and max_depth < 2:\n sub, depth = queue.pop()\n max_depth = max(max_depth, depth)\n queue = queue + [(i, depth + 1) for i in sub.values() if isinstance\n (i, dict)]\n return max_depth < 2 and all(isinstance(k, str) and isinstance(tags[k],\n str) for k in tags)\n", "role": "user" }, { "content": "ok_tags", "role": "assistant" } ]
def x(self, val: str) ->None: if not StorageRecord.ok_tags(val): LOGGER.debug( 'StorageRecord.__init__ <!< Tags %s must map strings to strings', val) raise BadRecord('Tags {} must map strings to strings'.format(val)) self._tags = val or {}
tags
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, val: str) ->None:\n if not StorageRecord.ok_tags(val):\n LOGGER.debug(\n 'StorageRecord.__init__ <!< Tags %s must map strings to strings',\n val)\n raise BadRecord('Tags {} must map strings to strings'.format(val))\n self._tags = val or {}\n", "role": "user" }, { "content": "tags", "role": "assistant" } ]
def x(self) ->dict: return {t: self.tags[t] for t in self.tags or {} if t.startswith('~') } or None
clear_tags
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self) ->dict:\n return {t: self.tags[t] for t in self.tags or {} if t.startswith('~')\n } or None\n", "role": "user" }, { "content": "clear_tags", "role": "assistant" } ]
def x(self) ->dict: return {t: self._tags[t] for t in self.tags or {} if not t.startswith('~') } or None
encr_tags
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self) ->dict:\n return {t: self._tags[t] for t in self.tags or {} if not t.startswith('~')\n } or None\n", "role": "user" }, { "content": "encr_tags", "role": "assistant" } ]
def x(prefix): try: response = requests.get(url=API_ENDPOINT.format(prefix), headers={ 'User-Agent': USER_AGENT}, timeout=getattr(settings, 'PWNED_PASSWORDS_API_TIMEOUT', REQUEST_TIMEOUT)) response.raise_for_status() except requests.RequestException as e: log.warning('Skipped Pwned Passwords check due to error: %r', e) return None results = {} for line in response.text.splitlines(): line_suffix, _, times = line.partition(':') results[line_suffix] = int(times) return results
get_pwned
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(prefix):\n try:\n response = requests.get(url=API_ENDPOINT.format(prefix), headers={\n 'User-Agent': USER_AGENT}, timeout=getattr(settings,\n 'PWNED_PASSWORDS_API_TIMEOUT', REQUEST_TIMEOUT))\n response.raise_for_status()\n except requests.RequestException as e:\n log.warning('Skipped Pwned Passwords check due to error: %r', e)\n return None\n results = {}\n for line in response.text.splitlines():\n line_suffix, _, times = line.partition(':')\n results[line_suffix] = int(times)\n return results\n", "role": "user" }, { "content": "get_pwned", "role": "assistant" } ]
def x(password): if not isinstance(password, text_type): raise TypeError('Password values to check must be Unicode strings.') password_hash = hashlib.sha1(password.encode('utf-8')).hexdigest().upper() prefix, suffix = password_hash[:5], password_hash[5:] results = _get_pwned(prefix) if results is None: return None return results.get(suffix, 0)
pwned_password
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(password):\n if not isinstance(password, text_type):\n raise TypeError('Password values to check must be Unicode strings.')\n password_hash = hashlib.sha1(password.encode('utf-8')).hexdigest().upper()\n prefix, suffix = password_hash[:5], password_hash[5:]\n results = _get_pwned(prefix)\n if results is None:\n return None\n return results.get(suffix, 0)\n", "role": "user" }, { "content": "pwned_password", "role": "assistant" } ]
def x(self, proxfun, **kwargs): if isinstance(proxfun, str): try: proxfun_name = proxfun.split(None, 1)[0] op = getattr(operators, proxfun_name) self.objectives.append(lambda theta, rho: op(theta.copy(), float(rho), **kwargs)) except AttributeError as e: print(str(e) + '\n' + 'Could not find the function ' + proxfun + ' in the operators module!') elif hasattr(proxfun, '__call__'): self.objectives.append(lambda theta, rho: proxfun(theta.copy(), float(rho))) else: raise TypeError( 'The argument "proxfun" must be a string or a function!')
add_regularizer
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, proxfun, **kwargs):\n if isinstance(proxfun, str):\n try:\n proxfun_name = proxfun.split(None, 1)[0]\n op = getattr(operators, proxfun_name)\n self.objectives.append(lambda theta, rho: op(theta.copy(),\n float(rho), **kwargs))\n except AttributeError as e:\n print(str(e) + '\\n' + 'Could not find the function ' + proxfun +\n ' in the operators module!')\n elif hasattr(proxfun, '__call__'):\n self.objectives.append(lambda theta, rho: proxfun(theta.copy(),\n float(rho)))\n else:\n raise TypeError(\n 'The argument \"proxfun\" must be a string or a function!')\n", "role": "user" }, { "content": "add_regularizer", "role": "assistant" } ]
def x(self, regularizers, clear=True): if clear: self.clear() list([self.add_regularizer(proxfun, **regularizers[proxfun]) for proxfun in regularizers.keys()])
set_regularizers
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, regularizers, clear=True):\n if clear:\n self.clear()\n list([self.add_regularizer(proxfun, **regularizers[proxfun]) for\n proxfun in regularizers.keys()])\n", "role": "user" }, { "content": "set_regularizers", "role": "assistant" } ]
def x(self, theta_init, max_iter=50, callback=None, disp=0, tau=(10.0, 2.0, 2.0), tol=0.001): num_obj = len(self.objectives) assert num_obj >= 1, 'There must be at least one objective!' orig_shape = theta_init.shape primals = [theta_init.flatten() for _ in range(num_obj)] duals = [np.zeros(theta_init.size) for _ in range(num_obj)] theta_avg = np.mean(primals, axis=0).ravel() tau = namedtuple('tau', ('init', 'inc', 'dec'))(*tau) rho = tau.init tstart = time.time() self.metadata = defaultdict(list) self.converged = False for cur_iter in range(max_iter): theta_prev = theta_avg for varidx, dual in enumerate(duals): primals[varidx] = self.objectives[varidx]((theta_prev - dual). reshape(orig_shape), rho).ravel() theta_avg = np.mean(primals, axis=0) for varidx, primal in enumerate(primals): duals[varidx] += primal - theta_avg primal_resid = float(np.sum([np.linalg.norm(primal - theta_avg) for primal in primals])) dual_resid = num_obj * rho ** 2 * np.linalg.norm(theta_avg - theta_prev ) if primal_resid > tau.init * dual_resid: rho *= float(tau.inc) elif dual_resid > tau.init * primal_resid: rho /= float(tau.dec) self.metadata['Primal resid'].append(primal_resid) self.metadata['Dual resid'].append(dual_resid) self.metadata['Time (s)'].append(time.time() - tstart) self.metadata['rho'].append(rho) if callback is not None: data = valmap(last, self.metadata) callback(theta_avg.reshape(orig_shape), data) self.update_display(cur_iter + 1, disp) if (primal_resid <= tol) & (dual_resid <= tol): self.converged = True break self.update_display(-1, disp) self.theta = theta_avg.reshape(orig_shape) return self.theta
minimize
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, theta_init, max_iter=50, callback=None, disp=0, tau=(10.0, 2.0,\n 2.0), tol=0.001):\n num_obj = len(self.objectives)\n assert num_obj >= 1, 'There must be at least one objective!'\n orig_shape = theta_init.shape\n primals = [theta_init.flatten() for _ in range(num_obj)]\n duals = [np.zeros(theta_init.size) for _ in range(num_obj)]\n theta_avg = np.mean(primals, axis=0).ravel()\n tau = namedtuple('tau', ('init', 'inc', 'dec'))(*tau)\n rho = tau.init\n tstart = time.time()\n self.metadata = defaultdict(list)\n self.converged = False\n for cur_iter in range(max_iter):\n theta_prev = theta_avg\n for varidx, dual in enumerate(duals):\n primals[varidx] = self.objectives[varidx]((theta_prev - dual).\n reshape(orig_shape), rho).ravel()\n theta_avg = np.mean(primals, axis=0)\n for varidx, primal in enumerate(primals):\n duals[varidx] += primal - theta_avg\n primal_resid = float(np.sum([np.linalg.norm(primal - theta_avg) for\n primal in primals]))\n dual_resid = num_obj * rho ** 2 * np.linalg.norm(theta_avg - theta_prev\n )\n if primal_resid > tau.init * dual_resid:\n rho *= float(tau.inc)\n elif dual_resid > tau.init * primal_resid:\n rho /= float(tau.dec)\n self.metadata['Primal resid'].append(primal_resid)\n self.metadata['Dual resid'].append(dual_resid)\n self.metadata['Time (s)'].append(time.time() - tstart)\n self.metadata['rho'].append(rho)\n if callback is not None:\n data = valmap(last, self.metadata)\n callback(theta_avg.reshape(orig_shape), data)\n self.update_display(cur_iter + 1, disp)\n if (primal_resid <= tol) & (dual_resid <= tol):\n self.converged = True\n break\n self.update_display(-1, disp)\n self.theta = theta_avg.reshape(orig_shape)\n return self.theta\n", "role": "user" }, { "content": "minimize", "role": "assistant" } ]
def x(self, iteration, disp_level, col_width=12): if disp_level == 0: return else: if disp_level == 1 and iteration >= 0: print('[Iteration %i]' % iteration) if disp_level > 1: data = valmap(last, self.metadata) keys = ['Time (s)', 'Primal resid', 'Dual resid', 'rho'] if iteration == 1: print(tableprint.header(keys, width=col_width)) print(tableprint.row([data[k] for k in keys], width=col_width, format_spec='4g')) if iteration == -1: print(tableprint.bottom(len(keys), width=col_width) + '\n') if iteration == -1 and self.converged: print('Converged after %i iterations!' % len(self.metadata[ 'Primal resid']))
update_display
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, iteration, disp_level, col_width=12):\n if disp_level == 0:\n return\n else:\n if disp_level == 1 and iteration >= 0:\n print('[Iteration %i]' % iteration)\n if disp_level > 1:\n data = valmap(last, self.metadata)\n keys = ['Time (s)', 'Primal resid', 'Dual resid', 'rho']\n if iteration == 1:\n print(tableprint.header(keys, width=col_width))\n print(tableprint.row([data[k] for k in keys], width=col_width,\n format_spec='4g'))\n if iteration == -1:\n print(tableprint.bottom(len(keys), width=col_width) + '\\n')\n if iteration == -1 and self.converged:\n print('Converged after %i iterations!' % len(self.metadata[\n 'Primal resid']))\n", "role": "user" }, { "content": "update_display", "role": "assistant" } ]
def x(x, x_obs, rho, penalties): assert type(x) == Tensor, 'Input array must be a Tensor' while True: x = squared_error(x, rho, x_obs) for ix, penalty in enumerate(penalties): x = x.unfold(ix).svt(penalty / rho).fold() yield x
susvd
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(x, x_obs, rho, penalties):\n assert type(x) == Tensor, 'Input array must be a Tensor'\n while True:\n x = squared_error(x, rho, x_obs)\n for ix, penalty in enumerate(penalties):\n x = x.unfold(ix).svt(penalty / rho).fold()\n yield x\n", "role": "user" }, { "content": "susvd", "role": "assistant" } ]
def x(self, obj, context=None) ->bytes: stream = BytesIO() self.build_stream(obj, stream, context) return stream.getvalue()
build
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, obj, context=None) ->bytes:\n stream = BytesIO()\n self.build_stream(obj, stream, context)\n return stream.getvalue()\n", "role": "user" }, { "content": "build", "role": "assistant" } ]
def x(self, data: bytes, context=None): stream = BytesIO(data) return self.parse_stream(stream, context)
parse
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, data: bytes, context=None):\n stream = BytesIO(data)\n return self.parse_stream(stream, context)\n", "role": "user" }, { "content": "parse", "role": "assistant" } ]
def x(self, obj, stream: BytesIO, context=None) ->None: if context is None: context = Context() if not isinstance(context, Context): context = Context(context) try: self._build_stream(obj, stream, context) except Error: raise except Exception as exc: raise BuildingError(str(exc))
build_stream
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, obj, stream: BytesIO, context=None) ->None:\n if context is None:\n context = Context()\n if not isinstance(context, Context):\n context = Context(context)\n try:\n self._build_stream(obj, stream, context)\n except Error:\n raise\n except Exception as exc:\n raise BuildingError(str(exc))\n", "role": "user" }, { "content": "build_stream", "role": "assistant" } ]
def x(self, stream: BytesIO, context=None): if context is None: context = Context() if not isinstance(context, Context): context = Context(context) try: return self._parse_stream(stream, context) except Error: raise except Exception as exc: raise ParsingError(str(exc))
parse_stream
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, stream: BytesIO, context=None):\n if context is None:\n context = Context()\n if not isinstance(context, Context):\n context = Context(context)\n try:\n return self._parse_stream(stream, context)\n except Error:\n raise\n except Exception as exc:\n raise ParsingError(str(exc))\n", "role": "user" }, { "content": "parse_stream", "role": "assistant" } ]
def x(self, context=None) ->int: if context is None: context = Context() if not isinstance(context, Context): context = Context(context) try: return self._sizeof(context) except Error: raise except Exception as exc: raise SizeofError(str(exc))
sizeof
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, context=None) ->int:\n if context is None:\n context = Context()\n if not isinstance(context, Context):\n context = Context(context)\n try:\n return self._sizeof(context)\n except Error:\n raise\n except Exception as exc:\n raise SizeofError(str(exc))\n", "role": "user" }, { "content": "sizeof", "role": "assistant" } ]
def x(x0, rho, optimizer, num_steps=50): optimizer.set_theta(x0, float(rho)) optimizer.theta_admm_prev = optimizer.theta_original_to_flat(x0) return optimizer.optimize(num_steps=num_steps)
sfo
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(x0, rho, optimizer, num_steps=50):\n optimizer.set_theta(x0, float(rho))\n optimizer.theta_admm_prev = optimizer.theta_original_to_flat(x0)\n return optimizer.optimize(num_steps=num_steps)\n", "role": "user" }, { "content": "sfo", "role": "assistant" } ]
def x(x0, rho, x, y): n = float(x.shape[0]) f = lambda w: np.mean(np.exp(x.dot(w)) - y * x.dot(w)) df = lambda w: (x.T.dot(np.exp(x.dot(w))) - x.T.dot(y)) / n return bfgs(x0, rho, f, df)
poissreg
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(x0, rho, x, y):\n n = float(x.shape[0])\n f = lambda w: np.mean(np.exp(x.dot(w)) - y * x.dot(w))\n df = lambda w: (x.T.dot(np.exp(x.dot(w))) - x.T.dot(y)) / n\n return bfgs(x0, rho, f, df)\n", "role": "user" }, { "content": "poissreg", "role": "assistant" } ]
def x(x0, rho, f_df, maxiter=50, method='BFGS'): orig_shape = x0.shape def x(x): xk = x.reshape(orig_shape) obj, grad = f_df(xk) g = obj + rho / 2.0 * np.sum((xk - x0) ** 2) dg = (grad + rho * (xk - x0)).ravel() return g, dg options = {'maxiter': maxiter, 'disp': False} return opt.minimize(f_df_augmented, x0.ravel(), method=method, jac=True, options=options).x.reshape(orig_shape)
bfgs
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(x0, rho, f_df, maxiter=50, method='BFGS'):\n orig_shape = x0.shape\n\n def x(x):\n xk = x.reshape(orig_shape)\n obj, grad = f_df(xk)\n g = obj + rho / 2.0 * np.sum((xk - x0) ** 2)\n dg = (grad + rho * (xk - x0)).ravel()\n return g, dg\n options = {'maxiter': maxiter, 'disp': False}\n return opt.minimize(f_df_augmented, x0.ravel(), method=method, jac=True,\n options=options).x.reshape(orig_shape)\n", "role": "user" }, { "content": "bfgs", "role": "assistant" } ]
def x(x0, rho, gamma, axis=0): n = x0.shape[axis] lap_op = spdiags([(2 + rho / gamma) * np.ones(n), -1 * np.ones(n), -1 * np.ones(n)], [0, -1, 1], n, n, format='csc') x_out = np.rollaxis(spsolve(gamma * lap_op, rho * np.rollaxis(x0, axis, 0)), axis, 0) return x_out
smooth
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(x0, rho, gamma, axis=0):\n n = x0.shape[axis]\n lap_op = spdiags([(2 + rho / gamma) * np.ones(n), -1 * np.ones(n), -1 *\n np.ones(n)], [0, -1, 1], n, n, format='csc')\n x_out = np.rollaxis(spsolve(gamma * lap_op, rho * np.rollaxis(x0, axis,\n 0)), axis, 0)\n return x_out\n", "role": "user" }, { "content": "smooth", "role": "assistant" } ]
def x(x0, rho, gamma): u, s, v = np.linalg.svd(x0, full_matrices=False) sthr = np.maximum(s - gamma / float(rho), 0) x_out = u.dot(np.diag(sthr)).dot(v) return x_out
nucnorm
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(x0, rho, gamma):\n u, s, v = np.linalg.svd(x0, full_matrices=False)\n sthr = np.maximum(s - gamma / float(rho), 0)\n x_out = u.dot(np.diag(sthr)).dot(v)\n return x_out\n", "role": "user" }, { "content": "nucnorm", "role": "assistant" } ]
def x(x0, rho, gamma): try: from skimage.restoration import denoise_tv_bregman except ImportError: print('Error: scikit-image not found. TVD will not work.') return x0 return denoise_tv_bregman(x0, rho / gamma)
tvd
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(x0, rho, gamma):\n try:\n from skimage.restoration import denoise_tv_bregman\n except ImportError:\n print('Error: scikit-image not found. TVD will not work.')\n return x0\n return denoise_tv_bregman(x0, rho / gamma)\n", "role": "user" }, { "content": "tvd", "role": "assistant" } ]
def x(x0, rho, gamma): lmbda = float(gamma) / rho return (x0 - lmbda) * (x0 >= lmbda) + (x0 + lmbda) * (x0 <= -lmbda)
sparse
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(x0, rho, gamma):\n lmbda = float(gamma) / rho\n return (x0 - lmbda) * (x0 >= lmbda) + (x0 + lmbda) * (x0 <= -lmbda)\n", "role": "user" }, { "content": "sparse", "role": "assistant" } ]
def x(x0, rho, P, q): return np.linalg.solve(rho * np.eye(q.shape[0]) + P, rho * x0.copy() + q)
linsys
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(x0, rho, P, q):\n return np.linalg.solve(rho * np.eye(q.shape[0]) + P, rho * x0.copy() + q)\n", "role": "user" }, { "content": "linsys", "role": "assistant" } ]
def x(self): status = _HTTP_STATUS_LINES.get(self._status_code) return str(status or '{} Unknown'.format(self._status_code))
status
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n status = _HTTP_STATUS_LINES.get(self._status_code)\n return str(status or '{} Unknown'.format(self._status_code))\n", "role": "user" }, { "content": "status", "role": "assistant" } ]
def x(self): if 'Content-Type' not in self.headers: self.headers.add_header('Content-Type', self.default_content_type) if self._cookies: for c in self._cookies.values(): self.headers.add_header('Set-Cookie', c.OutputString()) return self.headers.items()
headerlist
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if 'Content-Type' not in self.headers:\n self.headers.add_header('Content-Type', self.default_content_type)\n if self._cookies:\n for c in self._cookies.values():\n self.headers.add_header('Set-Cookie', c.OutputString())\n return self.headers.items()\n", "role": "user" }, { "content": "headerlist", "role": "assistant" } ]
def x(self, response, expected_url=None): self.assertIn(response.status_code, self.redirect_codes, self. _get_redirect_assertion_message(response)) if expected_url: location_header = response._headers.get('location', None) self.assertEqual(location_header, ('Location', str(expected_url)), 'Response should redirect to {0}, but it redirects to {1} instead' .format(expected_url, location_header[1]))
assert_redirect
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, response, expected_url=None):\n self.assertIn(response.status_code, self.redirect_codes, self.\n _get_redirect_assertion_message(response))\n if expected_url:\n location_header = response._headers.get('location', None)\n self.assertEqual(location_header, ('Location', str(expected_url)),\n 'Response should redirect to {0}, but it redirects to {1} instead'\n .format(expected_url, location_header[1]))\n", "role": "user" }, { "content": "assert_redirect", "role": "assistant" } ]
def x(self, model_class, **kwargs): return _InstanceContext(self.assert_instance_does_not_exist, self. assert_instance_exists, model_class, **kwargs)
assert_instance_created
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, model_class, **kwargs):\n return _InstanceContext(self.assert_instance_does_not_exist, self.\n assert_instance_exists, model_class, **kwargs)\n", "role": "user" }, { "content": "assert_instance_created", "role": "assistant" } ]
def x(self, model_class, **kwargs): return _InstanceContext(self.assert_instance_exists, self. assert_instance_does_not_exist, model_class, **kwargs)
assert_instance_deleted
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, model_class, **kwargs):\n return _InstanceContext(self.assert_instance_exists, self.\n assert_instance_does_not_exist, model_class, **kwargs)\n", "role": "user" }, { "content": "assert_instance_deleted", "role": "assistant" } ]
def x(x): if ';' in x: content_type, priority = x.split(';') casted_priority = float(priority.split('=')[1]) else: content_type, casted_priority = x, 1.0 content_type = content_type.lstrip().rstrip() return content_type, casted_priority
split_into_mimetype_and_priority
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(x):\n if ';' in x:\n content_type, priority = x.split(';')\n casted_priority = float(priority.split('=')[1])\n else:\n content_type, casted_priority = x, 1.0\n content_type = content_type.lstrip().rstrip()\n return content_type, casted_priority\n", "role": "user" }, { "content": "split_into_mimetype_and_priority", "role": "assistant" } ]
def x(accept_header): return sorted([_split_into_mimetype_and_priority(x) for x in accept_header.split(',')], key=lambda x: x[1], reverse=True)
parse_and_sort_accept_header
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(accept_header):\n return sorted([_split_into_mimetype_and_priority(x) for x in\n accept_header.split(',')], key=lambda x: x[1], reverse=True)\n", "role": "user" }, { "content": "parse_and_sort_accept_header", "role": "assistant" } ]
def x(accept_header, mimetypes): for mimetype_pattern, _ in _parse_and_sort_accept_header(accept_header): matched_types = fnmatch.filter(mimetypes, mimetype_pattern) if matched_types: return matched_types[0] return mimetypes[0]
accept_best_match
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(accept_header, mimetypes):\n for mimetype_pattern, _ in _parse_and_sort_accept_header(accept_header):\n matched_types = fnmatch.filter(mimetypes, mimetype_pattern)\n if matched_types:\n return matched_types[0]\n return mimetypes[0]\n", "role": "user" }, { "content": "accept_best_match", "role": "assistant" } ]
def x(url_vars, type_hints): typed_url_vars = {} try: for k, v in url_vars.items(): arg_type = type_hints.get(k) if arg_type and arg_type != str: typed_url_vars[k] = arg_type(v) else: typed_url_vars[k] = v except ValueError: return False, {} return True, typed_url_vars
match_url_vars_type
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(url_vars, type_hints):\n typed_url_vars = {}\n try:\n for k, v in url_vars.items():\n arg_type = type_hints.get(k)\n if arg_type and arg_type != str:\n typed_url_vars[k] = arg_type(v)\n else:\n typed_url_vars[k] = v\n except ValueError:\n return False, {}\n return True, typed_url_vars\n", "role": "user" }, { "content": "match_url_vars_type", "role": "assistant" } ]
def x(rule, path): split_rule = split_by_slash(rule) split_path = split_by_slash(path) url_vars = {} if len(split_rule) != len(split_path): return False, {} for r, p in zip(split_rule, split_path): if r.startswith('{') and r.endswith('}'): url_vars[r[1:-1]] = p continue if r != p: return False, {} return True, url_vars
match_path
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(rule, path):\n split_rule = split_by_slash(rule)\n split_path = split_by_slash(path)\n url_vars = {}\n if len(split_rule) != len(split_path):\n return False, {}\n for r, p in zip(split_rule, split_path):\n if r.startswith('{') and r.endswith('}'):\n url_vars[r[1:-1]] = p\n continue\n if r != p:\n return False, {}\n return True, url_vars\n", "role": "user" }, { "content": "match_path", "role": "assistant" } ]
def x(self, path, method): if path != '/': path = path.rstrip('/') method = method.upper() status = 404 for p, n, m in self.endpoints: matched, url_vars = match_path(p, path) if not matched: continue if method not in m: status = 405 raise HTTPError(status=status, body= f'Method not found: {path} {method}') callback, type_hints = m[method] type_matched, typed_url_vars = match_url_vars_type(url_vars, type_hints ) if not type_matched: continue return callback, typed_url_vars raise HTTPError(status=status, body=f'Not found: {path}')
match
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, path, method):\n if path != '/':\n path = path.rstrip('/')\n method = method.upper()\n status = 404\n for p, n, m in self.endpoints:\n matched, url_vars = match_path(p, path)\n if not matched:\n continue\n if method not in m:\n status = 405\n raise HTTPError(status=status, body=\n f'Method not found: {path} {method}')\n callback, type_hints = m[method]\n type_matched, typed_url_vars = match_url_vars_type(url_vars, type_hints\n )\n if not type_matched:\n continue\n return callback, typed_url_vars\n raise HTTPError(status=status, body=f'Not found: {path}')\n", "role": "user" }, { "content": "match", "role": "assistant" } ]
def x(self, rule, method, name, callback): if rule != '/': rule = rule.rstrip('/') method = method.upper() for i, e in enumerate(self.endpoints): r, n, callbacks = e if r == rule: assert name == n and n is not None, 'A same path should set a same name for reverse routing.' callbacks[method] = callback, get_type_hints(callback) self.endpoints[i] = r, name, callbacks break else: e = rule, name, {method: (callback, get_type_hints(callback))} self.endpoints.append(e)
add
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, rule, method, name, callback):\n if rule != '/':\n rule = rule.rstrip('/')\n method = method.upper()\n for i, e in enumerate(self.endpoints):\n r, n, callbacks = e\n if r == rule:\n assert name == n and n is not None, 'A same path should set a same name for reverse routing.'\n callbacks[method] = callback, get_type_hints(callback)\n self.endpoints[i] = r, name, callbacks\n break\n else:\n e = rule, name, {method: (callback, get_type_hints(callback))}\n self.endpoints.append(e)\n", "role": "user" }, { "content": "add", "role": "assistant" } ]
def x(self, name, **kwargs): for p, n, _ in self.endpoints: if name == n: return p.format(**kwargs)
reverse
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, name, **kwargs):\n for p, n, _ in self.endpoints:\n if name == n:\n return p.format(**kwargs)\n", "role": "user" }, { "content": "reverse", "role": "assistant" } ]
def x(fn): lock = threading.Lock() @functools.wraps(fn) def x(*args, **kwargs): lock.acquire() try: return fn(*args, **kwargs) finally: lock.release() return decorated
synchronized
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(fn):\n lock = threading.Lock()\n\n @functools.wraps(fn)\n def x(*args, **kwargs):\n lock.acquire()\n try:\n return fn(*args, **kwargs)\n finally:\n lock.release()\n return decorated\n", "role": "user" }, { "content": "synchronized", "role": "assistant" } ]
def x(self, value): with self._lock.exclusive: self._value = value return value
set
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, value):\n with self._lock.exclusive:\n self._value = value\n return value\n", "role": "user" }, { "content": "set", "role": "assistant" } ]
def x(self, value): with self._lock.exclusive: oldval = self._value self._value = value return oldval
get_and_set
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, value):\n with self._lock.exclusive:\n oldval = self._value\n self._value = value\n return oldval\n", "role": "user" }, { "content": "get_and_set", "role": "assistant" } ]
def x(self, expect, update): with self._lock.exclusive: if self._value == expect: self._value = update return True return False
compare_and_set
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, expect, update):\n with self._lock.exclusive:\n if self._value == expect:\n self._value = update\n return True\n return False\n", "role": "user" }, { "content": "compare_and_set", "role": "assistant" } ]
def x(self, delta): with self._lock.exclusive: self._value += delta return self._value
add_and_get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, delta):\n with self._lock.exclusive:\n self._value += delta\n return self._value\n", "role": "user" }, { "content": "add_and_get", "role": "assistant" } ]
def x(self, delta): with self._lock.exclusive: oldval = self._value self._value += delta return oldval
get_and_add
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, delta):\n with self._lock.exclusive:\n oldval = self._value\n self._value += delta\n return oldval\n", "role": "user" }, { "content": "get_and_add", "role": "assistant" } ]
def x(self, delta): with self._lock.exclusive: self._value -= delta return self._value
subtract_and_get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, delta):\n with self._lock.exclusive:\n self._value -= delta\n return self._value\n", "role": "user" }, { "content": "subtract_and_get", "role": "assistant" } ]
def x(self, delta): with self._lock.exclusive: oldval = self._value self._value -= delta return oldval
get_and_subtract
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, delta):\n with self._lock.exclusive:\n oldval = self._value\n self._value -= delta\n return oldval\n", "role": "user" }, { "content": "get_and_subtract", "role": "assistant" } ]