desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Adds an output to a Transaction\'s list of outputs. Args: output (:class:`~bigchaindb.common.transaction. Output`): An Output to be added to the Transaction.'
def add_output(self, output):
if (not isinstance(output, Output)): raise TypeError('`output` must be an Output instance or None') self.outputs.append(output)
'Fulfills a previous Transaction\'s Output by signing Inputs. Note: This method works only for the following Cryptoconditions currently: - Ed25519Fulfillment - ThresholdSha256 Furthermore, note that all keys required to fully sign the Transaction have to be passed to this method. A subset of all will cause this method ...
def sign(self, private_keys):
if ((private_keys is None) or (not isinstance(private_keys, list))): raise TypeError('`private_keys` must be a list instance') def gen_public_key(private_key): public_key = private_key.get_verifying_key().encode() return public_key.decode() key_pairs = {gen_public_key(...
'Signs a single Input. Note: This method works only for the following Cryptoconditions currently: - Ed25519Fulfillment - ThresholdSha256. Args: input_ (:class:`~bigchaindb.common.transaction. Input`) The Input to be signed. message (str): The message to be signed key_pairs (dict): The keys to sign the Transaction with....
@classmethod def _sign_input(cls, input_, message, key_pairs):
if isinstance(input_.fulfillment, Ed25519Sha256): return cls._sign_simple_signature_fulfillment(input_, message, key_pairs) elif isinstance(input_.fulfillment, ThresholdSha256): return cls._sign_threshold_signature_fulfillment(input_, message, key_pairs) else: raise ValueError("Fulfi...
'Signs a Ed25519Fulfillment. Args: input_ (:class:`~bigchaindb.common.transaction. Input`) The input to be signed. message (str): The message to be signed key_pairs (dict): The keys to sign the Transaction with.'
@classmethod def _sign_simple_signature_fulfillment(cls, input_, message, key_pairs):
input_ = deepcopy(input_) public_key = input_.owners_before[0] try: input_.fulfillment.sign(message.encode(), base58.b58decode(key_pairs[public_key].encode())) except KeyError: raise KeypairMismatchException('Public key {} is not a pair to any of the priv...
'Signs a ThresholdSha256. Args: input_ (:class:`~bigchaindb.common.transaction. Input`) The Input to be signed. message (str): The message to be signed key_pairs (dict): The keys to sign the Transaction with.'
@classmethod def _sign_threshold_signature_fulfillment(cls, input_, message, key_pairs):
input_ = deepcopy(input_) for owner_before in set(input_.owners_before): ccffill = input_.fulfillment subffills = ccffill.get_subcondition_from_vk(base58.b58decode(owner_before)) if (not subffills): raise KeypairMismatchException('Public key {} cannot be found ...
'Validates the Inputs in the Transaction against given Outputs. Note: Given a `CREATE` or `GENESIS` Transaction is passed, dummy values for Outputs are submitted for validation that evaluate parts of the validation-checks to `True`. Args: outputs (:obj:`list` of :class:`~bigchaindb.common. transaction.Output`): A list ...
def inputs_valid(self, outputs=None):
if (self.operation in (Transaction.CREATE, Transaction.GENESIS)): return self._inputs_valid(['dummyvalue' for _ in self.inputs]) elif (self.operation == Transaction.TRANSFER): return self._inputs_valid([output.fulfillment.condition_uri for output in outputs]) else: allowed_ops = ', ...
'Validates an Input against a given set of Outputs. Note: The number of `output_condition_uris` must be equal to the number of Inputs a Transaction has. Args: output_condition_uris (:obj:`list` of :obj:`str`): A list of Outputs to check the Inputs against. Returns: bool: If all Outputs are valid.'
def _inputs_valid(self, output_condition_uris):
if (len(self.inputs) != len(output_condition_uris)): raise ValueError('Inputs and output_condition_uris must have the same count') tx_dict = self.to_dict() tx_dict = Transaction._remove_signatures(tx_dict) tx_serialized = Transaction._to_str(tx_dict) def validate(i, outp...
'Validates a single Input against a single Output. Note: In case of a `CREATE` or `GENESIS` Transaction, this method does not validate against `output_condition_uri`. Args: input_ (:class:`~bigchaindb.common.transaction. Input`) The Input to be signed. operation (str): The type of Transaction. tx_serialized (str): The ...
@staticmethod def _input_valid(input_, operation, tx_serialized, output_condition_uri=None):
ccffill = input_.fulfillment try: parsed_ffill = Fulfillment.from_uri(ccffill.serialize_uri()) except (TypeError, ValueError, ParsingError, ASN1DecodeError, ASN1EncodeError): return False if (operation in (Transaction.CREATE, Transaction.GENESIS)): output_valid = True else: ...
'Transforms the object to a Python dictionary. Returns: dict: The Transaction as an alternative serialization format.'
def to_dict(self):
tx = {'inputs': [input_.to_dict() for input_ in self.inputs], 'outputs': [output.to_dict() for output in self.outputs], 'operation': str(self.operation), 'metadata': self.metadata, 'asset': self.asset, 'version': self.version} tx_no_signatures = Transaction._remove_signatures(tx) tx_serialized = Transaction...
'Takes a Transaction dictionary and removes all signatures. Args: tx_dict (dict): The Transaction to remove all signatures from. Returns: dict'
@staticmethod def _remove_signatures(tx_dict):
tx_dict = deepcopy(tx_dict) for input_ in tx_dict['inputs']: input_['fulfillment'] = None return tx_dict
'Get the asset id from a list of :class:`~.Transactions`. This is useful when we want to check if the multiple inputs of a transaction are related to the same asset id. Args: transactions (:obj:`list` of :class:`~bigchaindb.common. transaction.Transaction`): A list of Transactions. Usually input Transactions that shoul...
@staticmethod def get_asset_id(transactions):
if (not isinstance(transactions, list)): transactions = [transactions] asset_ids = {(tx.id if (tx.operation == Transaction.CREATE) else tx.asset['id']) for tx in transactions} if (len(asset_ids) > 1): raise AssetIdMismatch('All inputs of all transactions passed need to ...
'Validate the transaction ID of a transaction Args: tx_body (dict): The Transaction to be transformed.'
@staticmethod def validate_id(tx_body):
tx_body = deepcopy(tx_body) try: proposed_tx_id = tx_body.pop('id') except KeyError: raise InvalidHash('No transaction id found!') tx_body_no_signatures = Transaction._remove_signatures(tx_body) tx_body_serialized = Transaction._to_str(tx_body_no_signatures) valid_tx_id ...
'Transforms a Python dictionary to a Transaction object. Args: tx_body (dict): The Transaction to be transformed. Returns: :class:`~bigchaindb.common.transaction.Transaction`'
@classmethod def from_dict(cls, tx):
cls.validate_id(tx) inputs = [Input.from_dict(input_) for input_ in tx['inputs']] outputs = [Output.from_dict(output) for output in tx['outputs']] return cls(tx['operation'], tx['asset'], inputs, outputs, tx['metadata'], tx['version'])
'Initialize the BlockPipeline creator'
def __init__(self):
self.bigchain = Bigchain() self.txs = tx_collector()
'Filter a transaction. Args: tx (dict): the transaction to process. Returns: dict: The transaction if assigned to the current node, ``None`` otherwise.'
def filter_tx(self, tx):
if (tx['assignee'] == self.bigchain.me): tx.pop('assignee') tx.pop('assignment_timestamp') return tx
'Validate a transaction. Also checks if the transaction already exists in the blockchain. If it does, or it\'s invalid, it\'s deleted from the backlog immediately. Args: tx (dict): the transaction to validate. Returns: :class:`~bigchaindb.models.Transaction`: The transaction if valid, ``None`` otherwise.'
def validate_tx(self, tx):
try: tx = Transaction.from_dict(tx) except ValidationError: return None if (not self.bigchain.is_new_transaction(tx.id)): self.bigchain.delete_transaction(tx.id) return None try: if (tx.operation == Transaction.GENESIS): raise GenesisBlockAlreadyExists...
'Create a block. This method accumulates transactions to put in a block and outputs a block when one of the following conditions is true: - the size limit of the block has been reached, or - a timeout happened. Args: tx (:class:`~bigchaindb.models.Transaction`): the transaction to validate, might be None if a timeout h...
def create(self, tx, timeout=False):
txs = self.txs.send(tx) if ((len(txs) == 1000) or (timeout and txs)): block = self.bigchain.create_block(txs) self.txs = tx_collector() return block
'Write the block to the Database. Args: block (:class:`~bigchaindb.models.Block`): the block of transactions to write to the database. Returns: :class:`~bigchaindb.models.Block`: The Block.'
def write(self, block):
logger.info('Write new block %s with %s transactions', block.id, len(block.transactions)) self.bigchain.write_block(block) self.bigchain.statsd.incr('pipelines.block.throughput', len(block.transactions)) return block
'Delete transactions. Args: block (:class:`~bigchaindb.models.Block`): the block containg the transactions to delete. Returns: :class:`~bigchaindb.models.Block`: The block.'
def delete_tx(self, block):
self.bigchain.delete_transaction(*[tx.id for tx in block.transactions]) return block
'Initialize StaleTransaction monitor Args: timeout: how often to check for stale tx (in sec) backlog_reassign_delay: How stale a transaction should be before reassignment (in sec). If supplied, overrides the Bigchain default value.'
def __init__(self, timeout=5, backlog_reassign_delay=None):
self.bigchain = Bigchain(backlog_reassign_delay=backlog_reassign_delay) self.timeout = timeout
'Poll backlog for stale transactions Returns: txs (list): txs to be re assigned'
def check_transactions(self):
sleep(self.timeout) for tx in self.bigchain.get_stale_transactions(): (yield tx)
'Put tx back in backlog with new assignee Returns: transaction'
def reassign_transactions(self, tx):
logger.info('Reassigning transaction with id %s', tx['id']) self.bigchain.reassign_transaction(tx) return tx
'Checks if block has enough invalid votes to make a decision Args: next_vote: The next vote.'
def check_for_quorum(self, next_vote):
try: block_id = next_vote['vote']['voting_for_block'] node = next_vote['node_pubkey'] except KeyError: return next_block = self.bigchain.get_block(block_id) result = self.bigchain.block_election(next_block) self.handle_block_events(result, block_id) if (result['status'] =...
'Liquidates transactions from invalid blocks so they can be processed again'
def requeue_transactions(self, invalid_block):
logger.info('Rewriting %s transactions from invalid block %s', len(invalid_block.transactions), invalid_block.id) for tx in invalid_block.transactions: self.bigchain.write_transaction(tx) return invalid_block
'Initialize the Block voter.'
def __init__(self):
self.bigchain = Bigchain() self.last_voted_id = Bigchain().get_last_voted_block().id self.counters = Counter() self.blocks_validity_status = {} dummy_tx = Transaction.create([self.bigchain.me], [([self.bigchain.me], 1)]).to_dict() self.invalid_dummy_tx = dummy_tx
'Given a block, ungroup the transactions in it. Args: block_id (str): the id of the block in progress. transactions (list(dict)): transactions of the block in progress. Returns: ``None`` if the block has been already voted, an iterator that yields a transaction, block id, and the total number of transactions contained ...
def ungroup(self, block_id, transactions):
num_tx = len(transactions) for tx in transactions: (yield (tx, block_id, num_tx))
'Validate a transaction. Transaction must also not be in any VALID block. Args: tx_dict (dict): the transaction to validate block_id (str): the id of block containing the transaction num_tx (int): the total number of transactions to process Returns: Three values are returned, the validity of the transaction, ``block_id...
def validate_tx(self, tx_dict, block_id, num_tx):
try: tx = Transaction.from_dict(tx_dict) new = self.bigchain.is_new_transaction(tx.id, exclude_block_id=block_id) if (not new): raise exceptions.ValidationError('Tx already exists, %s', tx.id) tx.validate(self.bigchain) valid = True except exceptions....
'Collect the validity of transactions and cast a vote when ready. Args: tx_validity (bool): the validity of the transaction block_id (str): the id of block containing the transaction num_tx (int): the total number of transactions to process Returns: None, or a vote if a decision has been reached.'
def vote(self, tx_validity, block_id, num_tx):
self.counters[block_id] += 1 self.blocks_validity_status[block_id] = (tx_validity and self.blocks_validity_status.get(block_id, True)) if (self.counters[block_id] == num_tx): vote = self.bigchain.vote(block_id, self.last_voted_id, self.blocks_validity_status[block_id]) self.last_voted_id = b...
'Write vote to the database. Args: vote: the vote to write.'
def write_vote(self, vote, num_tx):
validity = ('valid' if vote['vote']['is_block_valid'] else 'invalid') logger.info("Voting '%s' for block %s", validity, vote['vote']['voting_for_block']) self.bigchain.write_vote(vote) self.bigchain.statsd.incr('pipelines.vote.throughput', num_tx) return vote
'See :meth:`bigchaindb.models.Transaction.validate` for documentation.'
@staticmethod def validate_transaction(bigchain, transaction):
return transaction.validate(bigchain)
'See :meth:`bigchaindb.models.Block.validate` for documentation.'
@staticmethod def validate_block(bigchain, block):
return block.validate(bigchain)
'Validate transaction spend Args: bigchain (Bigchain): an instantiated bigchaindb.Bigchain object. Returns: The transaction (Transaction) if the transaction is valid else it raises an exception describing the reason why the transaction is invalid. Raises: ValidationError: If the transaction is invalid'
def validate(self, bigchain):
input_conditions = [] if (self.operation == Transaction.TRANSFER): input_txs = [] for input_ in self.inputs: input_txid = input_.fulfills.txid (input_tx, status) = bigchain.get_transaction(input_txid, include_status=True) if (input_tx is None): ...
'Helper method that reconstructs a transaction dict that was returned from the database. It checks what asset_id to retrieve, retrieves the asset from the asset table and reconstructs the transaction. Args: bigchain (:class:`~bigchaindb.Bigchain`): An instance of Bigchain used to perform database queries. tx_dict (:obj...
@classmethod def from_db(cls, bigchain, tx_dict):
if (tx_dict['operation'] in [Transaction.CREATE, Transaction.GENESIS]): asset = list(bigchain.get_assets([tx_dict['id']]))[0] del asset['id'] tx_dict.update({'asset': asset}) return cls.from_dict(tx_dict)
'The Block model is mainly used for (de)serialization and integrity checking. Args: transaction (:obj:`list` of :class:`~.Transaction`): Transactions to be included in the Block. node_pubkey (str): The public key of the node creating the Block. timestamp (str): The Unix time a Block was created. voters (:obj:`list` of ...
def __init__(self, transactions=None, node_pubkey=None, timestamp=None, voters=None, signature=None):
if ((transactions is not None) and (not isinstance(transactions, list))): raise TypeError('`transactions` must be a list instance or None') else: self.transactions = (transactions or []) if ((voters is not None) and (not isinstance(voters, list))): raise TypeErro...
'Validate the Block. Args: bigchain (:class:`~bigchaindb.Bigchain`): An instantiated Bigchain object. Note: The hash of the block (`id`) is validated on the `self.from_dict` method. This is because the `from_dict` is the only method in which we have the original json payload. The `id` provided by this class is a mutabl...
def validate(self, bigchain):
self._validate_block(bigchain) self._validate_block_transactions(bigchain) return self
'Validate the Block without validating the transactions. Args: bigchain (:class:`~bigchaindb.Bigchain`): An instantiated Bigchain object. Raises: ValidationError: If there is a problem with the block'
def _validate_block(self, bigchain):
if (self.node_pubkey not in bigchain.federation): raise SybilError('Only federation nodes can create blocks') if (not self.is_signature_valid()): raise InvalidSignature('Invalid block signature') txids = [tx.id for tx in self.transactions] if (len(txids) != len(set(t...
'Validate Block transactions. Args: bigchain (Bigchain): an instantiated bigchaindb.Bigchain object. Raises: ValidationError: If an invalid transaction is found'
def _validate_block_transactions(self, bigchain):
for tx in self.transactions: bigchain.validate_transaction(tx)
'Create a signature for the Block and overwrite `self.signature`. Args: private_key (str): A private key corresponding to `self.node_pubkey`. Returns: :class:`~.Block`'
def sign(self, private_key):
block_body = self.to_dict() block_serialized = serialize(block_body['block']) private_key = PrivateKey(private_key) self.signature = private_key.sign(block_serialized.encode()).decode() return self
'Check the validity of a Block\'s signature. Returns: bool: Stating the validity of the Block\'s signature.'
def is_signature_valid(self):
block = self.to_dict()['block'] block_serialized = serialize(block).encode() public_key = PublicKey(block['node_pubkey']) try: return public_key.verify(block_serialized, self.signature) except (ValueError, AttributeError): return False
'Transform a Python dictionary to a Block object. Args: block_body (dict): A block dictionary to be transformed. tx_construct (functions): Function to instantiate Transaction instance Returns: :class:`~Block` Raises: InvalidHash: If the block\'s id is not corresponding to its data.'
@classmethod def from_dict(cls, block_body, tx_construct=Transaction.from_dict):
block = block_body['block'] block_serialized = serialize(block) block_id = hash_data(block_serialized) if (block_id != block_body['id']): raise InvalidHash() transactions = [tx_construct(tx) for tx in block['transactions']] signature = block_body.get('signature') return cls(transacti...
'Transform the Block to a Python dictionary. Returns: dict: The Block as a dict. Raises: ValueError: If the Block doesn\'t contain any transactions.'
def to_dict(self):
if (len(self.transactions) == 0): raise ValueError('Empty block creation is not allowed') block = {'timestamp': self.timestamp, 'transactions': [tx.to_dict() for tx in self.transactions], 'node_pubkey': self.node_pubkey, 'voters': self.voters} block_serialized = serialize(block) b...
'Helper method that reconstructs a block_dict that was returned from the database. It checks what asset_ids to retrieve, retrieves the assets from the assets table and reconstructs the block. Args: bigchain (:class:`~bigchaindb.Bigchain`): An instance of Bigchain used to perform database queries. block_dict(:obj:`dict`...
@classmethod def from_db(cls, bigchain, block_dict, from_dict_kwargs=None):
asset_ids = cls.get_asset_ids(block_dict) assets = bigchain.get_assets(asset_ids) block_dict = cls.couple_assets(block_dict, assets) kwargs = (from_dict_kwargs or {}) return cls.from_dict(block_dict, **kwargs)
'Extracts the assets from the ``CREATE`` transactions in the block. Returns: tuple: (assets, block) with the assets being a list of dicts and the block being the dict of the block with no assets in the CREATE transactions.'
def decouple_assets(self):
block_dict = deepcopy(self.to_dict()) assets = [] for transaction in block_dict['block']['transactions']: if (transaction['operation'] in [Transaction.CREATE, Transaction.GENESIS]): asset = transaction.pop('asset') asset.update({'id': transaction['id']}) assets.ap...
'Given a block_dict with no assets (as returned from a database call) and a list of assets, reconstruct the original block by putting the assets back into the ``CREATE`` transactions in the block. Args: block_dict (:obj:`dict`): The block dict as returned from a database call. assets (:obj:`list` of :obj:`dict`): A lis...
@staticmethod def couple_assets(block_dict, assets):
assets = {asset.pop('id'): asset for asset in assets} for transaction in block_dict['block']['transactions']: if (transaction['operation'] in [Transaction.CREATE, Transaction.GENESIS]): transaction.update({'asset': assets.get(transaction['id'])}) return block_dict
'Given a block_dict return all the asset_ids for that block (the txid of CREATE transactions). Useful to know which assets to retrieve from the database to reconstruct the block. Args: block_dict (:obj:`dict`): The block dict as returned from a database call. Returns: list: The list of asset_ids in the block.'
@staticmethod def get_asset_ids(block_dict):
asset_ids = [] for transaction in block_dict['block']['transactions']: if (transaction['operation'] in [Transaction.CREATE, Transaction.GENESIS]): asset_ids.append(transaction['id']) return asset_ids
'Calculate the election status of a block.'
@classmethod def block_election(cls, block, votes, keyring):
eligible_voters = (set(block['block']['voters']) & set(keyring)) n_voters = len(eligible_voters) (eligible_votes, ineligible_votes) = cls.partition_eligible_votes(votes, eligible_voters) by_voter = cls.dedupe_by_voter(eligible_votes) results = cls.count_votes(by_voter) results['block_id'] = bloc...
'Filter votes from unknown nodes or nodes that are not listed on block. This is the primary Sybill protection.'
@classmethod def partition_eligible_votes(cls, votes, eligible_voters):
(eligible, ineligible) = ([], []) for vote in votes: voter_eligible = (vote.get('node_pubkey') in eligible_voters) if voter_eligible: try: if cls.verify_vote_signature(vote): eligible.append(vote) continue except Val...
'Throw a critical error if there is a duplicate vote'
@classmethod def dedupe_by_voter(cls, eligible_votes):
by_voter = {} for vote in eligible_votes: pubkey = vote['node_pubkey'] if (pubkey in by_voter): raise CriticalDuplicateVote(pubkey) by_voter[pubkey] = vote return by_voter
'Given a list of eligible votes, (votes from known nodes that are listed as voters), produce the number that say valid and the number that say invalid. Votes must agree on previous block, otherwise they become invalid.'
@classmethod def count_votes(cls, by_voter):
prev_blocks = collections.Counter() malformed = [] for vote in by_voter.values(): if (not cls.verify_vote_schema(vote)): malformed.append(vote) continue if (vote['vote']['is_block_valid'] is True): prev_blocks[vote['vote']['previous_block']] += 1 n_val...
'Decide on votes. To return VALID there must be a clear majority that say VALID and also agree on the previous block. A tie on an even number of votes counts as INVALID.'
@classmethod def decide_votes(cls, n_voters, n_valid, n_invalid):
if ((n_invalid * 2) >= n_voters): return INVALID if ((n_valid * 2) > n_voters): return VALID return UNDECIDED
'Verify the signature of a vote'
@classmethod def verify_vote_signature(cls, vote):
signature = vote.get('signature') pk_base58 = vote.get('node_pubkey') if (not ((type(signature) == str) and (type(pk_base58) == str))): raise ValueError(('Malformed vote: %s' % vote)) public_key = PublicKey(pk_base58) body = serialize(vote['vote']).encode() return public_key.verify...
'Setup the gunicorn access and error loggers. This overrides the parent method. Its main goal is to simply pipe all the logs to the TCP socket used througout BigchainDB. Args: cfg (:obj:`gunicorn.config.Config`): Gunicorn configuration object. *Ignored*.'
def setup(self, cfg):
self._set_socklog_handler(self.error_log) self._set_socklog_handler(self.access_log)
'API endpoint to get details about a transaction. Args: tx_id (str): the id of the transaction. Return: A JSON string containing the data about the transaction.'
def get(self, tx_id):
pool = current_app.config['bigchain_pool'] with pool() as bigchain: (tx, status) = bigchain.get_transaction(tx_id, include_status=True) if ((not tx) or (status is not bigchain.TX_VALID)): return make_error(404) return tx.to_dict()
'API endpoint to push transactions to the Federation. Return: A ``dict`` containing the data about the transaction.'
def post(self):
pool = current_app.config['bigchain_pool'] tx = request.get_json(force=True) try: tx_obj = Transaction.from_dict(tx) except SchemaValidationError as e: return make_error(400, message='Invalid transaction schema: {}'.format(e.__cause__.message)) except ValidationError as e: ...
'API endpoint to get details about a block. Args: block_id (str): the id of the block. Return: A JSON string containing the data about the block.'
def get(self, block_id):
pool = current_app.config['bigchain_pool'] with pool() as bigchain: block = bigchain.get_block(block_id=block_id) if (not block): return make_error(404) return block
'API endpoint to get the related blocks for a transaction. Return: A ``list`` of ``block_id``s that contain the given transaction. The list may be filtered when provided a status query parameter: "valid", "invalid", "undecided".'
def get(self):
parser = reqparse.RequestParser() parser.add_argument('transaction_id', type=str, required=True) parser.add_argument('status', type=str, case_sensitive=False, choices=[Bigchain.BLOCK_VALID, Bigchain.BLOCK_INVALID, Bigchain.BLOCK_UNDECIDED]) args = parser.parse_args(strict=True) tx_id = args['transac...
'API endpoint to get details about the status of a transaction or a block. Return: A ``dict`` in the format ``{\'status\': <status>}``, where ``<status>`` is one of "valid", "invalid", "undecided", "backlog".'
def get(self):
parser = reqparse.RequestParser() parser.add_argument('transaction_id', type=str) parser.add_argument('block_id', type=str) args = parser.parse_args(strict=True) tx_id = args['transaction_id'] block_id = args['block_id'] if (bool(tx_id) == bool(block_id)): return make_error(400, 'Pro...
'API endpoint to retrieve a list of links to transaction outputs. Returns: A :obj:`list` of :cls:`str` of links to outputs.'
def get(self):
parser = reqparse.RequestParser() parser.add_argument('public_key', type=parameters.valid_ed25519, required=True) parser.add_argument('spent', type=parameters.valid_bool) args = parser.parse_args(strict=True) pool = current_app.config['bigchain_pool'] with pool() as bigchain: outputs = b...
'API endpoint to perform a text search on the assets. Args: search (str): Text search string to query the text index limit (int, optional): Limit the number of returned documents. Return: A list of assets that match the query.'
def get(self):
parser = reqparse.RequestParser() parser.add_argument('search', type=str, required=True) parser.add_argument('limit', type=int) args = parser.parse_args() if (not args['search']): return make_error(400, 'text_search cannot be empty') if (not args['limit']): del args['lim...
'API endpoint to get details about votes on a block. Return: A list of votes voting for a block with ID ``block_id``.'
def get(self):
parser = reqparse.RequestParser() parser.add_argument('block_id', type=str, required=True) args = parser.parse_args(strict=True) pool = current_app.config['bigchain_pool'] with pool() as bigchain: votes = list(backend.query.get_votes_by_block_id(bigchain.connection, args['block_id'])) re...
'Create the new middleware. Args: app: a flask application'
def __init__(self, app):
self.app = app
'Run the middleware and then call the original WSGI application.'
def __call__(self, environ, start_response):
if (environ['REQUEST_METHOD'] == 'GET'): try: del environ['CONTENT_TYPE'] except KeyError: pass else: logger.debug('Remove header "Content-Type" from GET request') return self.app(environ, start_response)
'Add an input task; Reads from the outqueue of the Node'
def add_input(self, prefix, node, next):
name = ('%s_%s' % (prefix, node.name)) next_name = ('%s_%s' % (prefix, next.name)) if (node.name == 'changefeed'): self.processes.append(node) def f(*args, **kwargs): _kwargs = {'timeout': 0.1} _kwargs.update(kwargs) return node.outqueue.get(*args, **kwarg...
'Add a stage task, popping from own queue and appending to the queue of the next node'
def add_stage(self, prefix, node, next):
f = node.target name = ('%s_%s' % (prefix, node.name)) if next: next_name = ('%s_%s' % (prefix, next.name)) def inner(*args, **kwargs): out = f(*args, **kwargs) if ((out is not None) and next): self._enqueue(next_name, out) return out task = functools.wrap...
'internal function; add item(s) to queue)'
def _enqueue(self, name, item):
queue = self.queues.setdefault(name, []) if isinstance(item, types.GeneratorType): items = list(item) else: items = [item] for item in items: if (type(item) != tuple): item = (item,) queue.append(list(item))
'Advance pipeline stage. Throws Empty if no data to consume.'
def step(self, name, **kwargs):
logging.debug('Stepping %s', name) task = self.tasks[name] if (name in self.input_tasks): return task(**kwargs) else: queue = self.queues.get(name, []) if (not queue): raise Empty(name) return task(*queue.pop(0), **kwargs) logging.debug('Stepped %s',...
'Get sizes of non empty queues'
@property def counts(self):
counts = {} for name in self.queues: n = len(self.queues[name]) if n: counts[name] = n return counts
'Shortcut to get a queue'
def __getattr__(self, name):
return (lambda **kwargs: self.step(name, **kwargs))
'Start async inputs; changefeeds etc'
@contextmanager def start(self):
for p in self.processes: p.start() time.sleep(0.2) try: (yield) finally: for p in self.processes: p.terminate()
'Stubs out the get_object operation. :param full_contents: The FULL contents of the object :param start_byte: The first byte to grab. :param end_byte: The last byte to grab.'
def stub_get_object(self, full_contents, start_byte=0, end_byte=None):
get_object_response = {} expected_params = {} contents = full_contents end_byte_range = end_byte if ((start_byte != 0) and (end_byte is None)): end_byte = (len(full_contents) - 1) if (end_byte == (len(full_contents) - 1)): end_byte_range = '' if (end_byte is not None): ...
'Creates a file in a tmpdir ``filename`` should be a relative path, e.g. "foo/bar/baz.txt" It will be translated into a full path in a tmp dir. ``mode`` is the mode the file should be opened either as ``w`` or `wb``. Returns the full path to the file.'
def create_file(self, filename, contents, mode='w'):
full_path = os.path.join(self.rootdir, filename) if (not os.path.isdir(os.path.dirname(full_path))): os.makedirs(os.path.dirname(full_path)) with open(full_path, mode) as f: f.write(contents) return full_path
'Append contents to a file ``filename`` should be a relative path, e.g. "foo/bar/baz.txt" It will be translated into a full path in a tmp dir. Returns the full path to the file.'
def append_file(self, filename, contents):
full_path = os.path.join(self.rootdir, filename) if (not os.path.isdir(os.path.dirname(full_path))): os.makedirs(os.path.dirname(full_path)) with open(full_path, 'a') as f: f.write(contents) return full_path
'Translate relative path to full path in temp dir. f.full_path(\'foo/bar.txt\') -> /tmp/asdfasd/foo/bar.txt'
def full_path(self, filename):
return os.path.join(self.rootdir, filename)
'Configuration object for managed S3 transfers :param multipart_threshold: The transfer size threshold for which multipart uploads, downloads, and copies will automatically be triggered. :param max_concurrency: The maximum number of threads that will be making requests to perform a transfer. If ``use_threads`` is set t...
def __init__(self, multipart_threshold=(8 * MB), max_concurrency=10, multipart_chunksize=(8 * MB), num_download_attempts=5, max_io_queue=100, io_chunksize=(256 * KB), use_threads=True):
super(TransferConfig, self).__init__(multipart_threshold=multipart_threshold, max_request_concurrency=max_concurrency, multipart_chunksize=multipart_chunksize, num_download_attempts=num_download_attempts, max_io_queue_size=max_io_queue, io_chunksize=io_chunksize) for alias in self.ALIAS: setattr(self, a...
'Upload a file to an S3 object. Variants have also been injected into S3 client, Bucket and Object. You don\'t have to use S3Transfer.upload_file() directly.'
def upload_file(self, filename, bucket, key, callback=None, extra_args=None):
if (not isinstance(filename, six.string_types)): raise ValueError('Filename must be a string') subscribers = self._get_subscribers(callback) future = self._manager.upload(filename, bucket, key, extra_args, subscribers) try: future.result() except ClientError as e: ...
'Download an S3 object to a file. Variants have also been injected into S3 client, Bucket and Object. You don\'t have to use S3Transfer.download_file() directly.'
def download_file(self, bucket, key, filename, extra_args=None, callback=None):
if (not isinstance(filename, six.string_types)): raise ValueError('Filename must be a string') subscribers = self._get_subscribers(callback) future = self._manager.download(bucket, key, filename, extra_args, subscribers) try: future.result() except S3TransferRetriesExceed...
'The **read-only** profile name.'
@property def profile_name(self):
return (self._session.profile or 'default')
'The **read-only** region name.'
@property def region_name(self):
return self._session.get_config_variable('region')
'The event emitter for a session'
@property def events(self):
return self._session.get_component('event_emitter')
'The profiles available to the session credentials'
@property def available_profiles(self):
return self._session.available_profiles
'Setup loader paths so that we can load resources.'
def _setup_loader(self):
self._loader = self._session.get_component('data_loader') self._loader.search_paths.append(os.path.join(os.path.dirname(__file__), 'data'))
'Get a list of available services that can be loaded as low-level clients via :py:meth:`Session.client`. :rtype: list :return: List of service names'
def get_available_services(self):
return self._session.get_available_services()
'Get a list of available services that can be loaded as resource clients via :py:meth:`Session.resource`. :rtype: list :return: List of service names'
def get_available_resources(self):
return self._loader.list_available_services(type_name='resources-1')
'Lists the available partitions :rtype: list :return: Returns a list of partition names (e.g., ["aws", "aws-cn"])'
def get_available_partitions(self):
return self._session.get_available_partitions()
'Lists the region and endpoint names of a particular partition. :type service_name: string :param service_name: Name of a service to list endpoint for (e.g., s3). :type partition_name: string :param partition_name: Name of the partition to limit endpoints to. (e.g., aws for the public AWS endpoints, aws-cn for AWS Chin...
def get_available_regions(self, service_name, partition_name='aws', allow_non_regional=False):
return self._session.get_available_regions(service_name=service_name, partition_name=partition_name, allow_non_regional=allow_non_regional)
'Return the :class:`botocore.credential.Credential` object associated with this session. If the credentials have not yet been loaded, this will attempt to load them. If they have already been loaded, this will return the cached credentials.'
def get_credentials(self):
return self._session.get_credentials()
'Create a low-level service client by name. :type service_name: string :param service_name: The name of a service, e.g. \'s3\' or \'ec2\'. You can get a list of available services via :py:meth:`get_available_services`. :type region_name: string :param region_name: The name of the region associated with the client. A cl...
def client(self, service_name, region_name=None, api_version=None, use_ssl=True, verify=None, endpoint_url=None, aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None, config=None):
return self._session.create_client(service_name, region_name=region_name, api_version=api_version, use_ssl=use_ssl, verify=verify, endpoint_url=endpoint_url, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token, config=config)
'Create a resource service client by name. :type service_name: string :param service_name: The name of a service, e.g. \'s3\' or \'ec2\'. You can get a list of available services via :py:meth:`get_available_resources`. :type region_name: string :param region_name: The name of the region associated with the client. A cl...
def resource(self, service_name, region_name=None, api_version=None, use_ssl=True, verify=None, endpoint_url=None, aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None, config=None):
try: resource_model = self._loader.load_service_model(service_name, 'resources-1', api_version) except UnknownServiceError: available = self.get_available_resources() has_low_level_client = (service_name in self.get_available_services()) raise ResourceNotExistsError(service_name,...
'Documents an entire service. :returns: The reStructured text of the documented service.'
def document_service(self):
doc_structure = DocumentStructure(self._service_name, section_names=self.sections, target='html') self.title(doc_structure.get_section('title')) self.table_of_contents(doc_structure.get_section('table-of-contents')) self.client_api(doc_structure.get_section('client')) self.paginator_api(doc_structur...
'Create a copy of this metadata object.'
def copy(self):
params = self.__dict__.copy() service_name = params.pop('service_name') return ResourceMeta(service_name, **params)
':type parent: ServiceResource :param parent: The resource instance to which this action is attached. :type params: dict :param params: Request parameters sent to the service. :type response: dict :param response: Low-level operation response.'
def __call__(self, parent, params, response):
if (self.search_path and (self.search_path != '$')): response = jmespath.search(self.search_path, response) return response
':type parent: ServiceResource :param parent: The resource instance to which this action is attached. :type params: dict :param params: Request parameters sent to the service. :type response: dict :param response: Low-level operation response.'
def __call__(self, parent, params, response):
resource_name = self.resource_model.type json_definition = self.service_context.resource_json_definitions.get(resource_name) resource_cls = self.factory.load_from_definition(resource_name=resource_name, single_resource_json_definition=json_definition, service_context=self.service_context) raw_response =...
'Handles the creation of a single response item by setting parameters and creating the appropriate resource instance. :type resource_cls: ServiceResource subclass :param resource_cls: The resource class to instantiate. :type parent: ServiceResource :param parent: The resource instance to which this action is attached. ...
def handle_response_item(self, resource_cls, parent, identifiers, resource_data):
kwargs = {'client': parent.meta.client} for (name, value) in identifiers.items(): if isinstance(value, list): value = value.pop(0) kwargs[name] = value resource = resource_cls(**kwargs) if (resource_data is not None): resource.meta.data = resource_data return reso...
'A generator which yields resource instances after doing the appropriate service operation calls and handling any pagination on your behalf. Page size, item limit, and filter parameters are applied if they have previously been set. >>> bucket = s3.Bucket(\'boto3\') >>> for obj in bucket.objects.all(): ... print(obj...
def __iter__(self):
limit = self._params.get('limit', None) count = 0 for page in self.pages(): for item in page: (yield item) count += 1 if ((limit is not None) and (count >= limit)): return
'Create a clone of this collection. This is used by the methods below to provide a chainable interface that returns copies rather than the original. This allows things like: >>> base = collection.filter(Param1=1) >>> query1 = base.filter(Param2=2) >>> query2 = base.filter(Param3=3) >>> query1.params {\'Param1\': 1, \'P...
def _clone(self, **kwargs):
params = copy.deepcopy(self._params) merge_dicts(params, kwargs, append_lists=True) clone = self.__class__(self._model, self._parent, self._handler, **params) return clone
'A generator which yields pages of resource instances after doing the appropriate service operation calls and handling any pagination on your behalf. Non-paginated calls will return a single page of items. Page size, item limit, and filter parameters are applied if they have previously been set. >>> bucket = s3.Bucket(...
def pages(self):
client = self._parent.meta.client cleaned_params = self._params.copy() limit = cleaned_params.pop('limit', None) page_size = cleaned_params.pop('page_size', None) params = create_request_parameters(self._parent, self._model.request) merge_dicts(params, cleaned_params, append_lists=True) if c...
'Get all items from the collection, optionally with a custom page size and item count limit. This method returns an iterable generator which yields individual resource instances. Example use:: # Iterate through items >>> for queue in sqs.queues.all(): ... print(queue.url) \'https://url1\' \'https://url2\' # Convert...
def all(self):
return self._clone()
'Get items from the collection, passing keyword arguments along as parameters to the underlying service operation, which are typically used to filter the results. This method returns an iterable generator which yields individual resource instances. Example use:: # Iterate through items >>> for queue in sqs.queues.filte...
def filter(self, **kwargs):
return self._clone(**kwargs)
'Return at most this many resources. >>> for bucket in s3.buckets.limit(5): ... print(bucket.name) \'bucket1\' \'bucket2\' \'bucket3\' \'bucket4\' \'bucket5\' :type count: int :param count: Return no more than this many items :rtype: :py:class:`ResourceCollection`'
def limit(self, count):
return self._clone(limit=count)
'Fetch at most this many resources per service request. >>> for obj in s3.Bucket(\'boto3\').objects.page_size(100): ... print(obj.key) :type count: int :param count: Fetch this many items per request :rtype: :py:class:`ResourceCollection`'
def page_size(self, count):
return self._clone(page_size=count)