Search is not available for this dataset
text
stringlengths 75
104k
|
|---|
def set_fee_asset(self, fee_asset):
""" Set asset to fee
"""
if isinstance(fee_asset, self.amount_class):
self.fee_asset_id = fee_asset["id"]
elif isinstance(fee_asset, self.asset_class):
self.fee_asset_id = fee_asset["id"]
elif fee_asset:
self.fee_asset_id = fee_asset
else:
self.fee_asset_id = "1.3.0"
|
def add_required_fees(self, ops, asset_id="1.3.0"):
""" Auxiliary method to obtain the required fees for a set of
operations. Requires a websocket connection to a witness node!
"""
ws = self.blockchain.rpc
fees = ws.get_required_fees([i.json() for i in ops], asset_id)
for i, d in enumerate(ops):
if isinstance(fees[i], list):
# Operation is a proposal
ops[i].op.data["fee"] = Asset(
amount=fees[i][0]["amount"], asset_id=fees[i][0]["asset_id"]
)
for j, _ in enumerate(ops[i].op.data["proposed_ops"].data):
ops[i].op.data["proposed_ops"].data[j].data["op"].op.data[
"fee"
] = Asset(
amount=fees[i][1][j]["amount"],
asset_id=fees[i][1][j]["asset_id"],
)
else:
# Operation is a regular operation
ops[i].op.data["fee"] = Asset(
amount=fees[i]["amount"], asset_id=fees[i]["asset_id"]
)
return ops
|
def constructTx(self):
""" Construct the actual transaction and store it in the class's dict
store
"""
ops = list()
for op in self.ops:
if isinstance(op, ProposalBuilder):
# This operation is a proposal an needs to be deal with
# differently
proposal = op.get_raw()
if proposal:
ops.append(proposal)
elif isinstance(op, self.operation_class):
ops.extend([op])
else:
# otherwise, we simply wrap ops into Operations
ops.extend([self.operation_class(op)])
# We now wrap everything into an actual transaction
ops = self.add_required_fees(ops, asset_id=self.fee_asset_id)
expiration = formatTimeFromNow(
self.expiration
or self.blockchain.expiration
or 30 # defaults to 30 seconds
)
ref_block_num, ref_block_prefix = self.get_block_params()
self.tx = self.signed_transaction_class(
ref_block_num=ref_block_num,
ref_block_prefix=ref_block_prefix,
expiration=expiration,
operations=ops,
)
dict.update(self, self.tx.json())
self._unset_require_reconstruction()
|
def get_block_params(self):
""" Auxiliary method to obtain ``ref_block_num`` and
``ref_block_prefix``. Requires a websocket connection to a
witness node!
"""
ws = self.blockchain.rpc
dynBCParams = ws.get_dynamic_global_properties()
ref_block_num = dynBCParams["head_block_number"] & 0xFFFF
ref_block_prefix = struct.unpack_from(
"<I", unhexlify(dynBCParams["head_block_id"]), 4
)[0]
return ref_block_num, ref_block_prefix
|
def sign(self):
""" Sign a provided transaction with the provided key(s)
:param dict tx: The transaction to be signed and returned
:param string wifs: One or many wif keys to use for signing
a transaction. If not present, the keys will be loaded
from the wallet as defined in "missing_signatures" key
of the transactions.
"""
self.constructTx()
if "operations" not in self or not self["operations"]:
return
# Legacy compatibility!
# If we are doing a proposal, obtain the account from the proposer_id
if self.blockchain.proposer:
proposer = self.account_class(
self.blockchain.proposer, blockchain_instance=self.blockchain
)
self.wifs = set()
self.signing_accounts = list()
self.appendSigner(proposer["id"], "active")
# We need to set the default prefix, otherwise pubkeys are
# presented wrongly!
if self.blockchain.rpc:
self.operations.default_prefix = self.blockchain.rpc.chain_params["prefix"]
elif "blockchain" in self:
self.operations.default_prefix = self["blockchain"]["prefix"]
try:
signedtx = self.signed_transaction_class(**self.json())
except Exception:
raise ValueError("Invalid TransactionBuilder Format")
if not any(self.wifs):
raise MissingKeyError
signedtx.sign(self.wifs, chain=self.blockchain.rpc.chain_params)
self["signatures"].extend(signedtx.json().get("signatures"))
return signedtx
|
def verify_authority(self):
""" Verify the authority of the signed transaction
"""
try:
if not self.blockchain.rpc.verify_authority(self.json()):
raise InsufficientAuthorityError
except Exception as e:
raise e
|
def broadcast(self):
""" Broadcast a transaction to the blockchain network
:param tx tx: Signed transaction to broadcast
"""
# Sign if not signed
if not self._is_signed():
self.sign()
# Cannot broadcast an empty transaction
if "operations" not in self or not self["operations"]:
log.warning("No operations in transaction! Returning")
return
# Obtain JS
ret = self.json()
# Debugging mode does not broadcast
if self.blockchain.nobroadcast:
log.warning("Not broadcasting anything!")
self.clear()
return ret
# Broadcast
try:
if self.blockchain.blocking:
ret = self.blockchain.rpc.broadcast_transaction_synchronous(
ret, api="network_broadcast"
)
ret.update(**ret.get("trx", {}))
else:
self.blockchain.rpc.broadcast_transaction(ret, api="network_broadcast")
except Exception as e:
raise e
finally:
self.clear()
return ret
|
def clear(self):
""" Clear the transaction builder and start from scratch
"""
self.ops = []
self.wifs = set()
self.signing_accounts = []
# This makes sure that _is_constructed will return False afterwards
self["expiration"] = None
dict.__init__(self, {})
|
def addSigningInformation(self, account, permission):
""" This is a private method that adds side information to a
unsigned/partial transaction in order to simplify later
signing (e.g. for multisig or coldstorage)
FIXME: Does not work with owner keys!
"""
self.constructTx()
self["blockchain"] = self.blockchain.rpc.chain_params
if isinstance(account, self.publickey_class):
self["missing_signatures"] = [str(account)]
else:
accountObj = self.account_class(account)
authority = accountObj[permission]
# We add a required_authorities to be able to identify
# how to sign later. This is an array, because we
# may later want to allow multiple operations per tx
self.update({"required_authorities": {accountObj["name"]: authority}})
for account_auth in authority["account_auths"]:
account_auth_account = self.account_class(account_auth[0])
self["required_authorities"].update(
{account_auth[0]: account_auth_account.get(permission)}
)
# Try to resolve required signatures for offline signing
self["missing_signatures"] = [x[0] for x in authority["key_auths"]]
# Add one recursion of keys from account_auths:
for account_auth in authority["account_auths"]:
account_auth_account = self.account_class(account_auth[0])
self["missing_signatures"].extend(
[x[0] for x in account_auth_account[permission]["key_auths"]]
)
|
def appendMissingSignatures(self):
""" Store which accounts/keys are supposed to sign the transaction
This method is used for an offline-signer!
"""
missing_signatures = self.get("missing_signatures", [])
for pub in missing_signatures:
wif = self.blockchain.wallet.getPrivateKeyForPublicKey(pub)
if wif:
self.appendWif(wif)
|
def as_base(self, base):
""" Returns the price instance so that the base asset is ``base``.
Note: This makes a copy of the object!
"""
if base == self["base"]["symbol"]:
return self.copy()
elif base == self["quote"]["symbol"]:
return self.copy().invert()
else:
raise InvalidAssetException
|
def as_quote(self, quote):
""" Returns the price instance so that the quote asset is ``quote``.
Note: This makes a copy of the object!
"""
if quote == self["quote"]["symbol"]:
return self.copy()
elif quote == self["base"]["symbol"]:
return self.copy().invert()
else:
raise InvalidAssetException
|
def invert(self):
""" Invert the price (e.g. go from ``USD/BTS`` into ``BTS/USD``)
"""
tmp = self["quote"]
self["quote"] = self["base"]
self["base"] = tmp
if "for_sale" in self and self["for_sale"]:
self["for_sale"] = self.amount_class(
self["for_sale"]["amount"] * self["price"], self["base"]["symbol"]
)
return self
|
def json(self):
"""
return {
"base": self["base"].json(),
"quote": self["quote"].json()
}
"""
quote = self["quote"]
base = self["base"]
frac = Fraction(int(quote) / int(base)).limit_denominator(
10 ** base["asset"]["precision"]
)
return {
"base": {"amount": int(frac.denominator), "asset_id": base["asset"]["id"]},
"quote": {"amount": int(frac.numerator), "asset_id": quote["asset"]["id"]},
}
|
def wipe(self):
""" Wipe the store
"""
keys = list(self.keys()).copy()
for key in keys:
self.delete(key)
|
def connect(self, node="", rpcuser="", rpcpassword="", **kwargs):
""" Connect to blockchain network (internal use only)
"""
if not node:
if "node" in self.config:
node = self.config["node"]
else:
raise ValueError("A Blockchain node needs to be provided!")
if not rpcuser and "rpcuser" in self.config:
rpcuser = self.config["rpcuser"]
if not rpcpassword and "rpcpassword" in self.config:
rpcpassword = self.config["rpcpassword"]
self.rpc = self.rpc_class(node, rpcuser, rpcpassword, **kwargs)
|
def finalizeOp(self, ops, account, permission, **kwargs):
""" This method obtains the required private keys if present in
the wallet, finalizes the transaction, signs it and
broadacasts it
:param operation ops: The operation (or list of operaions) to
broadcast
:param operation account: The account that authorizes the
operation
:param string permission: The required permission for
signing (active, owner, posting)
:param object append_to: This allows to provide an instance of
ProposalsBuilder (see :func:`new_proposal`) or
TransactionBuilder (see :func:`new_tx()`) to specify
where to put a specific operation.
... note:: ``append_to`` is exposed to every method used in the
this class
... note::
If ``ops`` is a list of operation, they all need to be
signable by the same key! Thus, you cannot combine ops
that require active permission with ops that require
posting permission. Neither can you use different
accounts for different operations!
... note:: This uses ``txbuffer`` as instance of
:class:`transactionbuilder.TransactionBuilder`.
You may want to use your own txbuffer
"""
if "append_to" in kwargs and kwargs["append_to"]:
if self.proposer:
log.warning(
"You may not use append_to and self.proposer at "
"the same time. Append new_proposal(..) instead"
)
# Append to the append_to and return
append_to = kwargs["append_to"]
parent = append_to.get_parent()
assert isinstance(
append_to, (self.transactionbuilder_class, self.proposalbuilder_class)
)
append_to.appendOps(ops)
# Add the signer to the buffer so we sign the tx properly
if isinstance(append_to, self.proposalbuilder_class):
parent.appendSigner(append_to.proposer, permission)
else:
parent.appendSigner(account, permission)
# This returns as we used append_to, it does NOT broadcast, or sign
return append_to.get_parent()
elif self.proposer:
# Legacy proposer mode!
proposal = self.proposal()
proposal.set_proposer(self.proposer)
proposal.set_expiration(self.proposal_expiration)
proposal.set_review(self.proposal_review)
proposal.appendOps(ops)
# Go forward to see what the other options do ...
else:
# Append tot he default buffer
self.txbuffer.appendOps(ops)
# The API that obtains the fee only allows to specify one particular
# fee asset for all operations in that transaction even though the
# blockchain itself could allow to pay multiple operations with
# different fee assets.
if "fee_asset" in kwargs and kwargs["fee_asset"]:
self.txbuffer.set_fee_asset(kwargs["fee_asset"])
# Add signing information, signer, sign and optionally broadcast
if self.unsigned:
# In case we don't want to sign anything
self.txbuffer.addSigningInformation(account, permission)
return self.txbuffer
elif self.bundle:
# In case we want to add more ops to the tx (bundle)
self.txbuffer.appendSigner(account, permission)
return self.txbuffer.json()
else:
# default behavior: sign + broadcast
self.txbuffer.appendSigner(account, permission)
self.txbuffer.sign()
return self.txbuffer.broadcast()
|
def sign(self, tx=None, wifs=[]):
""" Sign a provided transaction witht he provided key(s)
:param dict tx: The transaction to be signed and returned
:param string wifs: One or many wif keys to use for signing
a transaction. If not present, the keys will be loaded
from the wallet as defined in "missing_signatures" key
of the transactions.
"""
if tx:
txbuffer = self.transactionbuilder_class(tx, blockchain_instance=self)
else:
txbuffer = self.txbuffer
txbuffer.appendWif(wifs)
txbuffer.appendMissingSignatures()
txbuffer.sign()
return txbuffer.json()
|
def broadcast(self, tx=None):
""" Broadcast a transaction to the Blockchain
:param tx tx: Signed transaction to broadcast
"""
if tx:
# If tx is provided, we broadcast the tx
return self.transactionbuilder_class(
tx, blockchain_instance=self
).broadcast()
else:
return self.txbuffer.broadcast()
|
def proposal(self, proposer=None, proposal_expiration=None, proposal_review=None):
""" Return the default proposal buffer
... note:: If any parameter is set, the default proposal
parameters will be changed!
"""
if not self._propbuffer:
return self.new_proposal(
self.tx(), proposer, proposal_expiration, proposal_review
)
if proposer:
self._propbuffer[0].set_proposer(proposer)
if proposal_expiration:
self._propbuffer[0].set_expiration(proposal_expiration)
if proposal_review:
self._propbuffer[0].set_review(proposal_review)
return self._propbuffer[0]
|
def new_tx(self, *args, **kwargs):
""" Let's obtain a new txbuffer
:returns int txid: id of the new txbuffer
"""
builder = self.transactionbuilder_class(
*args, blockchain_instance=self, **kwargs
)
self._txbuffers.append(builder)
return builder
|
def detail(self, *args, **kwargs):
prefix = kwargs.pop("prefix", default_prefix)
# remove dublicates
kwargs["votes"] = list(set(kwargs["votes"]))
""" This is an example how to sort votes prior to using them in the
Object
"""
# # Sort votes
# kwargs["votes"] = sorted(
# kwargs["votes"],
# key=lambda x: float(x.split(":")[1]),
# )
return OrderedDict(
[
("memo_key", PublicKey(kwargs["memo_key"], prefix=prefix)),
("voting_account", ObjectId(kwargs["voting_account"], "account")),
("num_witness", Uint16(kwargs["num_witness"])),
("num_committee", Uint16(kwargs["num_committee"])),
("votes", Array([VoteId(o) for o in kwargs["votes"]])),
("extensions", Set([])),
]
)
|
def id(self):
""" The transaction id of this transaction
"""
# Store signatures temporarily since they are not part of
# transaction id
sigs = self.data["signatures"]
self.data.pop("signatures", None)
# Generage Hash of the seriliazed version
h = hashlib.sha256(bytes(self)).digest()
# recover signatures
self.data["signatures"] = sigs
# Return properly truncated tx hash
return hexlify(h[:20]).decode("ascii")
|
def sign(self, wifkeys, chain=None):
""" Sign the transaction with the provided private keys.
:param array wifkeys: Array of wif keys
:param str chain: identifier for the chain
"""
if not chain:
chain = self.get_default_prefix()
self.deriveDigest(chain)
# Get Unique private keys
self.privkeys = []
for item in wifkeys:
if item not in self.privkeys:
self.privkeys.append(item)
# Sign the message with every private key given!
sigs = []
for wif in self.privkeys:
signature = sign_message(self.message, wif)
sigs.append(Signature(signature))
self.data["signatures"] = Array(sigs)
return self
|
def store(self, data, key=None, *args, **kwargs):
""" Cache the list
:param list data: List of objects to cache
"""
list.__init__(self, data)
self._store_items(self._cache_key(key))
|
def store(self, data, key="id"):
""" Cache the list
:param list data: List of objects to cache
"""
dict.__init__(self, data)
self._store_item(key)
|
def objectid_valid(i):
""" Test if a string looks like a regular object id of the
form:::
xxxx.yyyyy.zzzz
with those being numbers.
"""
if "." not in i:
return False
parts = i.split(".")
if len(parts) == 3:
try:
[int(x) for x in parts]
return True
except Exception:
pass
return False
|
def refresh(self):
""" This is the refresh method that overloads the prototype in
BlockchainObject.
"""
dict.__init__(
self,
self.blockchain.rpc.get_object(self.identifier),
blockchain_instance=self.blockchain,
)
|
def _encrypt_xor(a, b, aes):
""" Returns encrypt(a ^ b). """
a = unhexlify("%0.32x" % (int((a), 16) ^ int(hexlify(b), 16)))
return aes.encrypt(a)
|
def encrypt(privkey, passphrase):
""" BIP0038 non-ec-multiply encryption. Returns BIP0038 encrypted privkey.
:param privkey: Private key
:type privkey: Base58
:param str passphrase: UTF-8 encoded passphrase for encryption
:return: BIP0038 non-ec-multiply encrypted wif key
:rtype: Base58
"""
if isinstance(privkey, str):
privkey = PrivateKey(privkey)
else:
privkey = PrivateKey(repr(privkey))
privkeyhex = repr(privkey) # hex
addr = format(privkey.bitcoin.address, "BTC")
a = _bytes(addr)
salt = hashlib.sha256(hashlib.sha256(a).digest()).digest()[0:4]
if SCRYPT_MODULE == "scrypt": # pragma: no cover
key = scrypt.hash(passphrase, salt, 16384, 8, 8)
elif SCRYPT_MODULE == "pylibscrypt": # pragma: no cover
key = scrypt.scrypt(bytes(passphrase, "utf-8"), salt, 16384, 8, 8)
else: # pragma: no cover
raise ValueError("No scrypt module loaded") # pragma: no cover
(derived_half1, derived_half2) = (key[:32], key[32:])
aes = AES.new(derived_half2, AES.MODE_ECB)
encrypted_half1 = _encrypt_xor(privkeyhex[:32], derived_half1[:16], aes)
encrypted_half2 = _encrypt_xor(privkeyhex[32:], derived_half1[16:], aes)
" flag byte is forced 0xc0 because Graphene only uses compressed keys "
payload = b"\x01" + b"\x42" + b"\xc0" + salt + encrypted_half1 + encrypted_half2
" Checksum "
checksum = hashlib.sha256(hashlib.sha256(payload).digest()).digest()[:4]
privatkey = hexlify(payload + checksum).decode("ascii")
return Base58(privatkey)
|
def decrypt(encrypted_privkey, passphrase):
"""BIP0038 non-ec-multiply decryption. Returns WIF privkey.
:param Base58 encrypted_privkey: Private key
:param str passphrase: UTF-8 encoded passphrase for decryption
:return: BIP0038 non-ec-multiply decrypted key
:rtype: Base58
:raises SaltException: if checksum verification failed (e.g. wrong
password)
"""
d = unhexlify(base58decode(encrypted_privkey))
d = d[2:] # remove trailing 0x01 and 0x42
flagbyte = d[0:1] # get flag byte
d = d[1:] # get payload
assert flagbyte == b"\xc0", "Flagbyte has to be 0xc0"
salt = d[0:4]
d = d[4:-4]
if SCRYPT_MODULE == "scrypt": # pragma: no cover
key = scrypt.hash(passphrase, salt, 16384, 8, 8)
elif SCRYPT_MODULE == "pylibscrypt": # pragma: no cover
key = scrypt.scrypt(bytes(passphrase, "utf-8"), salt, 16384, 8, 8)
else:
raise ValueError("No scrypt module loaded") # pragma: no cover
derivedhalf1 = key[0:32]
derivedhalf2 = key[32:64]
encryptedhalf1 = d[0:16]
encryptedhalf2 = d[16:32]
aes = AES.new(derivedhalf2, AES.MODE_ECB)
decryptedhalf2 = aes.decrypt(encryptedhalf2)
decryptedhalf1 = aes.decrypt(encryptedhalf1)
privraw = decryptedhalf1 + decryptedhalf2
privraw = "%064x" % (int(hexlify(privraw), 16) ^ int(hexlify(derivedhalf1), 16))
wif = Base58(privraw)
""" Verify Salt """
privkey = PrivateKey(format(wif, "wif"))
addr = format(privkey.bitcoin.address, "BTC")
a = _bytes(addr)
saltverify = hashlib.sha256(hashlib.sha256(a).digest()).digest()[0:4]
if saltverify != salt: # pragma: no cover
raise SaltException("checksum verification failed! Password may be incorrect.")
return wif
|
def setKeys(self, loadkeys):
""" This method is strictly only for in memory keys that are
passed to Wallet with the ``keys`` argument
"""
log.debug("Force setting of private keys. Not using the wallet database!")
if isinstance(loadkeys, dict):
loadkeys = list(loadkeys.values())
elif not isinstance(loadkeys, (list, set)):
loadkeys = [loadkeys]
for wif in loadkeys:
pub = self.publickey_from_wif(wif)
self.store.add(str(wif), pub)
|
def unlock(self, pwd):
""" Unlock the wallet database
"""
if self.store.is_encrypted():
return self.store.unlock(pwd)
|
def newWallet(self, pwd):
""" Create a new wallet database
"""
if self.created():
raise WalletExists("You already have created a wallet!")
self.store.unlock(pwd)
|
def addPrivateKey(self, wif):
""" Add a private key to the wallet database
"""
try:
pub = self.publickey_from_wif(wif)
except Exception:
raise InvalidWifError("Invalid Key format!")
if str(pub) in self.store:
raise KeyAlreadyInStoreException("Key already in the store")
self.store.add(str(wif), str(pub))
|
def getPrivateKeyForPublicKey(self, pub):
""" Obtain the private key for a given public key
:param str pub: Public Key
"""
if str(pub) not in self.store:
raise KeyNotFound
return self.store.getPrivateKeyForPublicKey(str(pub))
|
def removeAccount(self, account):
""" Remove all keys associated with a given account
"""
accounts = self.getAccounts()
for a in accounts:
if a["name"] == account:
self.store.delete(a["pubkey"])
|
def getOwnerKeyForAccount(self, name):
""" Obtain owner Private Key for an account from the wallet database
"""
account = self.rpc.get_account(name)
for authority in account["owner"]["key_auths"]:
key = self.getPrivateKeyForPublicKey(authority[0])
if key:
return key
raise KeyNotFound
|
def getMemoKeyForAccount(self, name):
""" Obtain owner Memo Key for an account from the wallet database
"""
account = self.rpc.get_account(name)
key = self.getPrivateKeyForPublicKey(account["options"]["memo_key"])
if key:
return key
return False
|
def getActiveKeyForAccount(self, name):
""" Obtain owner Active Key for an account from the wallet database
"""
account = self.rpc.get_account(name)
for authority in account["active"]["key_auths"]:
try:
return self.getPrivateKeyForPublicKey(authority[0])
except Exception:
pass
return False
|
def getAccountFromPrivateKey(self, wif):
""" Obtain account name from private key
"""
pub = self.publickey_from_wif(wif)
return self.getAccountFromPublicKey(pub)
|
def getAccountsFromPublicKey(self, pub):
""" Obtain all accounts associated with a public key
"""
names = self.rpc.get_key_references([str(pub)])[0]
for name in names:
yield name
|
def getAccountFromPublicKey(self, pub):
""" Obtain the first account name from public key
"""
# FIXME, this only returns the first associated key.
# If the key is used by multiple accounts, this
# will surely lead to undesired behavior
names = list(self.getAccountsFromPublicKey(str(pub)))
if names:
return names[0]
|
def getKeyType(self, account, pub):
""" Get key type
"""
for authority in ["owner", "active"]:
for key in account[authority]["key_auths"]:
if str(pub) == key[0]:
return authority
if str(pub) == account["options"]["memo_key"]:
return "memo"
return None
|
def getAccounts(self):
""" Return all accounts installed in the wallet database
"""
pubkeys = self.getPublicKeys()
accounts = []
for pubkey in pubkeys:
# Filter those keys not for our network
if pubkey[: len(self.prefix)] == self.prefix:
accounts.extend(self.getAccountsFromPublicKey(pubkey))
return accounts
|
def getPublicKeys(self, current=False):
""" Return all installed public keys
:param bool current: If true, returns only keys for currently
connected blockchain
"""
pubkeys = self.store.getPublicKeys()
if not current:
return pubkeys
pubs = []
for pubkey in pubkeys:
# Filter those keys not for our network
if pubkey[: len(self.prefix)] == self.prefix:
pubs.append(pubkey)
return pubs
|
def rpcexec(self, payload):
""" Execute a call by sending the payload
:param json payload: Payload data
:raises ValueError: if the server does not respond in proper JSON
format
"""
if not self.ws: # pragma: no cover
self.connect()
log.debug(json.dumps(payload))
# Mutex/Lock
# We need to lock because we need to wait for websocket
# response but don't want to allow other threads to send
# requests (that might take less time) to disturb
self.__lock.acquire()
# Send over websocket
try:
self.ws.send(json.dumps(payload, ensure_ascii=False).encode("utf8"))
# Receive from websocket
ret = self.ws.recv()
finally:
# Release lock
self.__lock.release()
return ret
|
def unlock_wallet(self, *args, **kwargs):
""" Unlock the library internal wallet
"""
self.blockchain.wallet.unlock(*args, **kwargs)
return self
|
def encrypt(self, message):
""" Encrypt a memo
:param str message: clear text memo message
:returns: encrypted message
:rtype: str
"""
if not message:
return None
nonce = str(random.getrandbits(64))
try:
memo_wif = self.blockchain.wallet.getPrivateKeyForPublicKey(
self.from_account["options"]["memo_key"]
)
except KeyNotFound:
# if all fails, raise exception
raise MissingKeyError(
"Memo private key {} for {} could not be found".format(
self.from_account["options"]["memo_key"], self.from_account["name"]
)
)
if not memo_wif:
raise MissingKeyError(
"Memo key for %s missing!" % self.from_account["name"]
)
if not hasattr(self, "chain_prefix"):
self.chain_prefix = self.blockchain.prefix
enc = memo.encode_memo(
self.privatekey_class(memo_wif),
self.publickey_class(
self.to_account["options"]["memo_key"], prefix=self.chain_prefix
),
nonce,
message,
)
return {
"message": enc,
"nonce": nonce,
"from": self.from_account["options"]["memo_key"],
"to": self.to_account["options"]["memo_key"],
}
|
def decrypt(self, message):
""" Decrypt a message
:param dict message: encrypted memo message
:returns: decrypted message
:rtype: str
"""
if not message:
return None
# We first try to decode assuming we received the memo
try:
memo_wif = self.blockchain.wallet.getPrivateKeyForPublicKey(message["to"])
pubkey = message["from"]
except KeyNotFound:
try:
# if that failed, we assume that we have sent the memo
memo_wif = self.blockchain.wallet.getPrivateKeyForPublicKey(
message["from"]
)
pubkey = message["to"]
except KeyNotFound:
# if all fails, raise exception
raise MissingKeyError(
"None of the required memo keys are installed!"
"Need any of {}".format([message["to"], message["from"]])
)
if not hasattr(self, "chain_prefix"):
self.chain_prefix = self.blockchain.prefix
return memo.decode_memo(
self.privatekey_class(memo_wif),
self.publickey_class(pubkey, prefix=self.chain_prefix),
message.get("nonce"),
message.get("message"),
)
|
def get_shared_secret(priv, pub):
""" Derive the share secret between ``priv`` and ``pub``
:param `Base58` priv: Private Key
:param `Base58` pub: Public Key
:return: Shared secret
:rtype: hex
The shared secret is generated such that::
Pub(Alice) * Priv(Bob) = Pub(Bob) * Priv(Alice)
"""
pub_point = pub.point()
priv_point = int(repr(priv), 16)
res = pub_point * priv_point
res_hex = "%032x" % res.x()
# Zero padding
res_hex = "0" * (64 - len(res_hex)) + res_hex
return res_hex
|
def init_aes(shared_secret, nonce):
""" Initialize AES instance
:param hex shared_secret: Shared Secret to use as encryption key
:param int nonce: Random nonce
:return: AES instance
:rtype: AES
"""
" Shared Secret "
ss = hashlib.sha512(unhexlify(shared_secret)).digest()
" Seed "
seed = bytes(str(nonce), "ascii") + hexlify(ss)
seed_digest = hexlify(hashlib.sha512(seed).digest()).decode("ascii")
" AES "
key = unhexlify(seed_digest[0:64])
iv = unhexlify(seed_digest[64:96])
return AES.new(key, AES.MODE_CBC, iv)
|
def encode_memo(priv, pub, nonce, message):
""" Encode a message with a shared secret between Alice and Bob
:param PrivateKey priv: Private Key (of Alice)
:param PublicKey pub: Public Key (of Bob)
:param int nonce: Random nonce
:param str message: Memo message
:return: Encrypted message
:rtype: hex
"""
shared_secret = get_shared_secret(priv, pub)
aes = init_aes(shared_secret, nonce)
" Checksum "
raw = bytes(message, "utf8")
checksum = hashlib.sha256(raw).digest()
raw = checksum[0:4] + raw
" Padding "
raw = _pad(raw, 16)
" Encryption "
return hexlify(aes.encrypt(raw)).decode("ascii")
|
def decode_memo(priv, pub, nonce, message):
""" Decode a message with a shared secret between Alice and Bob
:param PrivateKey priv: Private Key (of Bob)
:param PublicKey pub: Public Key (of Alice)
:param int nonce: Nonce used for Encryption
:param bytes message: Encrypted Memo message
:return: Decrypted message
:rtype: str
:raise ValueError: if message cannot be decoded as valid UTF-8
string
"""
shared_secret = get_shared_secret(priv, pub)
aes = init_aes(shared_secret, nonce)
" Encryption "
raw = bytes(message, "ascii")
cleartext = aes.decrypt(unhexlify(raw))
" Checksum "
checksum = cleartext[0:4]
message = cleartext[4:]
message = _unpad(message, 16)
" Verify checksum "
check = hashlib.sha256(message).digest()[0:4]
if check != checksum: # pragma: no cover
raise ValueError("checksum verification failure")
return message.decode("utf8")
|
def sign(self, account=None, **kwargs):
""" Sign a message with an account's memo key
:param str account: (optional) the account that owns the bet
(defaults to ``default_account``)
:raises ValueError: If not account for signing is provided
:returns: the signed message encapsulated in a known format
"""
if not account:
if "default_account" in self.blockchain.config:
account = self.blockchain.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
# Data for message
account = self.account_class(account, blockchain_instance=self.blockchain)
info = self.blockchain.info()
meta = dict(
timestamp=info["time"],
block=info["head_block_number"],
memokey=account["options"]["memo_key"],
account=account["name"],
)
# wif key
wif = self.blockchain.wallet.getPrivateKeyForPublicKey(
account["options"]["memo_key"]
)
# We strip the message here so we know for sure there are no trailing
# whitespaces or returns
message = self.message.strip()
enc_message = self.SIGNED_MESSAGE_META.format(**locals())
# signature
signature = hexlify(sign_message(enc_message, wif)).decode("ascii")
self.signed_by_account = account
self.signed_by_name = account["name"]
self.meta = meta
self.plain_message = message
return self.SIGNED_MESSAGE_ENCAPSULATED.format(
MESSAGE_SPLIT=self.MESSAGE_SPLIT, **locals()
)
|
def verify(self, **kwargs):
""" Verify a message with an account's memo key
:param str account: (optional) the account that owns the bet
(defaults to ``default_account``)
:returns: True if the message is verified successfully
:raises InvalidMessageSignature if the signature is not ok
"""
# Split message into its parts
parts = re.split("|".join(self.MESSAGE_SPLIT), self.message)
parts = [x for x in parts if x.strip()]
assert len(parts) > 2, "Incorrect number of message parts"
# Strip away all whitespaces before and after the message
message = parts[0].strip()
signature = parts[2].strip()
# Parse the meta data
meta = dict(re.findall(r"(\S+)=(.*)", parts[1]))
log.info("Message is: {}".format(message))
log.info("Meta is: {}".format(json.dumps(meta)))
log.info("Signature is: {}".format(signature))
# Ensure we have all the data in meta
assert "account" in meta, "No 'account' could be found in meta data"
assert "memokey" in meta, "No 'memokey' could be found in meta data"
assert "block" in meta, "No 'block' could be found in meta data"
assert "timestamp" in meta, "No 'timestamp' could be found in meta data"
account_name = meta.get("account").strip()
memo_key = meta["memokey"].strip()
try:
self.publickey_class(memo_key, prefix=self.blockchain.prefix)
except Exception:
raise InvalidMemoKeyException("The memo key in the message is invalid")
# Load account from blockchain
try:
account = self.account_class(
account_name, blockchain_instance=self.blockchain
)
except AccountDoesNotExistsException:
raise AccountDoesNotExistsException(
"Could not find account {}. Are you connected to the right chain?".format(
account_name
)
)
# Test if memo key is the same as on the blockchain
if not account["options"]["memo_key"] == memo_key:
raise WrongMemoKey(
"Memo Key of account {} on the Blockchain ".format(account["name"])
+ "differs from memo key in the message: {} != {}".format(
account["options"]["memo_key"], memo_key
)
)
# Reformat message
enc_message = self.SIGNED_MESSAGE_META.format(**locals())
# Verify Signature
pubkey = verify_message(enc_message, unhexlify(signature))
# Verify pubky
pk = self.publickey_class(
hexlify(pubkey).decode("ascii"), prefix=self.blockchain.prefix
)
if format(pk, self.blockchain.prefix) != memo_key:
raise InvalidMessageSignature("The signature doesn't match the memo key")
self.signed_by_account = account
self.signed_by_name = account["name"]
self.meta = meta
self.plain_message = message
return True
|
def sign(self, account=None, **kwargs):
""" Sign a message with an account's memo key
:param str account: (optional) the account that owns the bet
(defaults to ``default_account``)
:raises ValueError: If not account for signing is provided
:returns: the signed message encapsulated in a known format
"""
if not account:
if "default_account" in self.blockchain.config:
account = self.blockchain.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
# Data for message
account = self.account_class(account, blockchain_instance=self.blockchain)
# wif key
wif = self.blockchain.wallet.getPrivateKeyForPublicKey(
account["options"]["memo_key"]
)
payload = [
"from",
account["name"],
"key",
account["options"]["memo_key"],
"time",
str(datetime.utcnow()),
"text",
self.message,
]
enc_message = json.dumps(payload, separators=(",", ":"))
# signature
signature = hexlify(sign_message(enc_message, wif)).decode("ascii")
return dict(signed=enc_message, payload=payload, signature=signature)
|
def verify(self, **kwargs):
""" Verify a message with an account's memo key
:param str account: (optional) the account that owns the bet
(defaults to ``default_account``)
:returns: True if the message is verified successfully
:raises InvalidMessageSignature if the signature is not ok
"""
if not isinstance(self.message, dict):
try:
self.message = json.loads(self.message)
except Exception:
raise ValueError("Message must be valid JSON")
payload = self.message.get("payload")
assert payload, "Missing payload"
payload_dict = {k[0]: k[1] for k in zip(payload[::2], payload[1::2])}
signature = self.message.get("signature")
account_name = payload_dict.get("from").strip()
memo_key = payload_dict.get("key").strip()
assert account_name, "Missing account name 'from'"
assert memo_key, "missing 'key'"
try:
self.publickey_class(memo_key, prefix=self.blockchain.prefix)
except Exception:
raise InvalidMemoKeyException("The memo key in the message is invalid")
# Load account from blockchain
try:
account = self.account_class(
account_name, blockchain_instance=self.blockchain
)
except AccountDoesNotExistsException:
raise AccountDoesNotExistsException(
"Could not find account {}. Are you connected to the right chain?".format(
account_name
)
)
# Test if memo key is the same as on the blockchain
if not account["options"]["memo_key"] == memo_key:
raise WrongMemoKey(
"Memo Key of account {} on the Blockchain ".format(account["name"])
+ "differs from memo key in the message: {} != {}".format(
account["options"]["memo_key"], memo_key
)
)
# Ensure payload and signed match
signed_target = json.dumps(self.message.get("payload"), separators=(",", ":"))
signed_actual = self.message.get("signed")
assert (
signed_target == signed_actual
), "payload doesn't match signed message: \n{}\n{}".format(
signed_target, signed_actual
)
# Reformat message
enc_message = self.message.get("signed")
# Verify Signature
pubkey = verify_message(enc_message, unhexlify(signature))
# Verify pubky
pk = self.publickey_class(
hexlify(pubkey).decode("ascii"), prefix=self.blockchain.prefix
)
if format(pk, self.blockchain.prefix) != memo_key:
raise InvalidMessageSignature("The signature doesn't match the memo key")
self.signed_by_account = account
self.signed_by_name = account["name"]
self.plain_message = payload_dict.get("text")
return True
|
def env():
"""Verify IPMI environment"""
ipmi = cij.env_to_dict(PREFIX, REQUIRED)
if ipmi is None:
ipmi["USER"] = "admin"
ipmi["PASS"] = "admin"
ipmi["HOST"] = "localhost"
ipmi["PORT"] = "623"
cij.info("ipmi.env: USER: %s, PASS: %s, HOST: %s, PORT: %s" % (
ipmi["USER"], ipmi["PASS"], ipmi["HOST"], ipmi["PORT"]
))
cij.env_export(PREFIX, EXPORTED, ipmi)
return 0
|
def cmd(command):
"""Send IPMI 'command' via ipmitool"""
env()
ipmi = cij.env_to_dict(PREFIX, EXPORTED + REQUIRED)
command = "ipmitool -U %s -P %s -H %s -p %s %s" % (
ipmi["USER"], ipmi["PASS"], ipmi["HOST"], ipmi["PORT"], command)
cij.info("ipmi.command: %s" % command)
return cij.util.execute(command, shell=True, echo=True)
|
def regex_find(pattern, content):
"""Find the given 'pattern' in 'content'"""
find = re.findall(pattern, content)
if not find:
cij.err("pattern <%r> is invalid, no matches!" % pattern)
cij.err("content: %r" % content)
return ''
if len(find) >= 2:
cij.err("pattern <%r> is too simple, matched more than 2!" % pattern)
cij.err("content: %r" % content)
return ''
return find[0]
|
def execute(cmd=None, shell=True, echo=True):
"""
Execute the given 'cmd'
@returns (rcode, stdout, stderr)
"""
if echo:
cij.emph("cij.util.execute: shell: %r, cmd: %r" % (shell, cmd))
rcode = 1
stdout, stderr = ("", "")
if cmd:
if shell:
cmd = " ".join(cmd)
proc = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=shell, close_fds=True)
stdout, stderr = proc.communicate()
rcode = proc.returncode
if rcode and echo:
cij.warn("cij.util.execute: stdout: %s" % stdout)
cij.err("cij.util.execute: stderr: %s" % stderr)
cij.err("cij.util.execute: rcode: %s" % rcode)
return rcode, stdout, stderr
|
def env():
"""Verify BOARD variables and construct exported variables"""
if cij.ssh.env():
cij.err("board.env: invalid SSH environment")
return 1
board = cij.env_to_dict(PREFIX, REQUIRED) # Verify REQUIRED variables
if board is None:
cij.err("board.env: invalid BOARD environment")
return 1
board["CLASS"] = "_".join([board[r] for r in REQUIRED[:-1]])
board["IDENT"] = "-".join([board["CLASS"], board["ALIAS"]])
cij.env_export(PREFIX, EXPORTED, board) # Export EXPORTED variables
return 0
|
def cat_file(path):
"""Cat file and return content"""
cmd = ["cat", path]
status, stdout, _ = cij.ssh.command(cmd, shell=True, echo=True)
if status:
raise RuntimeError("cij.nvme.env: cat %s failed" % path)
return stdout.strip()
|
def env():
"""Verify NVME variables and construct exported variables"""
if cij.ssh.env():
cij.err("cij.nvme.env: invalid SSH environment")
return 1
nvme = cij.env_to_dict(PREFIX, REQUIRED)
nvme["DEV_PATH"] = os.path.join("/dev", nvme["DEV_NAME"])
# get version, chunks, luns and chs
try:
sysfs = os.path.join("/sys/class/block", nvme["DEV_NAME"], "lightnvm")
nvme["LNVM_VERSION"] = cat_file(os.path.join(sysfs, "version"))
if nvme["LNVM_VERSION"] == "2.0":
luns = "punits"
chs = "groups"
elif nvme["LNVM_VERSION"] == "1.2":
luns = "num_luns"
chs = "num_channels"
else:
raise RuntimeError("cij.nvme.env: invalid lnvm version: %s" % nvme["LNVM_VERSION"])
nvme["LNVM_NUM_CHUNKS"] = cat_file(os.path.join(sysfs, "chunks"))
nvme["LNVM_NUM_LUNS"] = cat_file(os.path.join(sysfs, luns))
nvme["LNVM_NUM_CHS"] = cat_file(os.path.join(sysfs, chs))
nvme["LNVM_TOTAL_LUNS"] = str(int(nvme["LNVM_NUM_LUNS"]) * int(nvme["LNVM_NUM_CHS"]))
nvme["LNVM_TOTAL_CHUNKS"] = str(int(nvme["LNVM_TOTAL_LUNS"]) * int(nvme["LNVM_NUM_CHUNKS"]))
# get spec version by identify namespace data struct
if nvme["LNVM_VERSION"] == "2.0":
cmd = ["nvme", "id-ctrl", nvme["DEV_PATH"], "--raw-binary"]
status, stdout, _ = cij.ssh.command(cmd, shell=True)
if status:
raise RuntimeError("cij.nvme.env: nvme id-ctrl fail")
buff = cij.bin.Buffer(types=IdentifyCDS, length=1)
buff.memcopy(stdout)
if buff[0].VS[1023] == 0x5a:
nvme["SPEC_VERSION"] = "Denali"
else:
nvme["SPEC_VERSION"] = "Spec20"
else:
nvme["SPEC_VERSION"] = "Spec12"
# get chunk meta information
nvme["LNVM_CHUNK_META_LENGTH"] = str(get_sizeof_descriptor_table(nvme["SPEC_VERSION"]))
nvme["LNVM_CHUNK_META_SIZE"] = str(int(nvme["LNVM_CHUNK_META_LENGTH"]) *
int(nvme["LNVM_TOTAL_CHUNKS"]))
except StandardError:
traceback.print_exc()
return 1
cij.env_export(PREFIX, EXPORTED, nvme)
return 0
|
def fmt(lbaf=3):
"""Do format for NVMe device"""
if env():
cij.err("cij.nvme.exists: Invalid NVMe ENV.")
return 1
nvme = cij.env_to_dict(PREFIX, EXPORTED + REQUIRED)
cmd = ["nvme", "format", nvme["DEV_PATH"], "-l", str(lbaf)]
rcode, _, _ = cij.ssh.command(cmd, shell=True)
return rcode
|
def get_meta(offset, length, output):
"""Get chunk meta of NVMe device"""
if env():
cij.err("cij.nvme.meta: Invalid NVMe ENV.")
return 1
nvme = cij.env_to_dict(PREFIX, EXPORTED + REQUIRED)
max_size = 0x40000
with open(output, "wb") as fout:
for off in range(offset, length, max_size):
size = min(length - off, max_size)
cmd = ["nvme get-log",
nvme["DEV_PATH"],
"-i 0xca",
"-o 0x%x" % off,
"-l 0x%x" % size,
"-b"]
status, stdout, _ = cij.ssh.command(cmd, shell=True)
if status:
cij.err("cij.nvme.meta: Error get chunk meta")
return 1
fout.write(stdout)
return 0
|
def comp_meta(file_bef, file_aft, mode="pfail"):
"""Compare chunk meta, mode=[pfail, power, reboot]"""
if env():
cij.err("cij.nvme.comp_meta: Invalid NVMe ENV.")
return 1
nvme = cij.env_to_dict(PREFIX, EXPORTED + REQUIRED)
num_chk = int(nvme["LNVM_TOTAL_CHUNKS"])
meta_bef = cij.bin.Buffer(types=get_descriptor_table(nvme['SPEC_VERSION']), length=num_chk)
meta_aft = cij.bin.Buffer(types=get_descriptor_table(nvme['SPEC_VERSION']), length=num_chk)
meta_bef.read(file_bef)
meta_aft.read(file_aft)
for chk in range(num_chk):
ignore = ["WL", "RSV0"]
# PFAIL: BEFORE IS OPEN CHUNK, WRITE POINTER IS NOT SURE, IGNORE
if mode == "pfail" and meta_bef[chk].CS == 4:
ignore.append("WP")
# COMPARE CHUNK META
if meta_bef.compare(meta_aft, chk, ignore=ignore):
cij.warn("META_BUFF_BEF[%s]:" % chk)
meta_bef.dump(chk)
cij.warn("META_BUFF_AFT[%s]:" % chk)
meta_aft.dump(chk)
cij.err("Error compare, CHUNK: %s" % chk)
return 1
return 0
|
def get_sizeof_descriptor_table(version="Denali"):
"""
Get sizeof DescriptorTable
"""
if version == "Denali":
return sizeof(DescriptorTableDenali)
elif version == "Spec20":
return sizeof(DescriptorTableSpec20)
elif version == "Spec12":
return 0
else:
raise RuntimeError("Error version!")
|
def env():
"""Verify LNVM variables and construct exported variables"""
if cij.ssh.env():
cij.err("cij.lnvm.env: invalid SSH environment")
return 1
lnvm = cij.env_to_dict(PREFIX, REQUIRED)
nvme = cij.env_to_dict("NVME", ["DEV_NAME"])
if "BGN" not in lnvm.keys():
cij.err("cij.lnvm.env: invalid LNVM_BGN")
return 1
if "END" not in lnvm.keys():
cij.err("cij.lnvm.env: invalid LNVM_END")
return 1
if "DEV_TYPE" not in lnvm.keys():
cij.err("cij.lnvm.env: invalid LNVM_DEV_TYPE")
return 1
lnvm["DEV_NAME"] = "%sb%03de%03d" % (nvme["DEV_NAME"], int(lnvm["BGN"]), int(lnvm["END"]))
lnvm["DEV_PATH"] = "/dev/%s" % lnvm["DEV_NAME"]
cij.env_export(PREFIX, EXPORTED, lnvm)
return 0
|
def create():
"""Create LNVM device"""
if env():
cij.err("cij.lnvm.create: Invalid LNVM ENV")
return 1
nvme = cij.env_to_dict("NVME", ["DEV_NAME"])
lnvm = cij.env_to_dict(PREFIX, EXPORTED + REQUIRED)
cij.emph("lnvm.create: LNVM_DEV_NAME: %s" % lnvm["DEV_NAME"])
cmd = ["nvme lnvm create -d %s -n %s -t %s -b %s -e %s -f" % (
nvme["DEV_NAME"], lnvm["DEV_NAME"], lnvm["DEV_TYPE"], lnvm["BGN"], lnvm["END"])]
rcode, _, _ = cij.ssh.command(cmd, shell=True)
if rcode:
cij.err("cij.lnvm.create: FAILED")
return 1
return 0
|
def dump(buf, indent=0, skip=""):
"""Dump UnionType/StructType to STDOUT"""
if not isinstance(type(buf), (type(Union), type(Structure))):
raise RuntimeError("Error type(%s)" % type(buf))
for field in getattr(buf, '_fields_'):
name, types = field[0], field[1]
if name in skip:
return
value = getattr(buf, name)
if isinstance(types, (type(Union), type(Structure))):
cij.info("%s%s:" % (" " * indent, name))
dump(value, indent+2, skip)
elif isinstance(types, type(Array)):
for i, item in enumerate(value):
name_index = "%s[%s]" % (name, i)
if isinstance(types, (type(Union), type(Structure))):
cij.info("%s%s:" % (" " * indent, name_index))
dump(item, indent + 2, skip)
else:
cij.info("%s%-12s: 0x%x" % (" " * indent, name_index, item))
else:
cij.info("%s%-12s: 0x%x" % (" " * indent, name, value))
|
def compare(buf_a, buf_b, ignore):
"""Compare of two Buffer item"""
for field in getattr(buf_a, '_fields_'):
name, types = field[0], field[1]
if name in ignore:
continue
val_a = getattr(buf_a, name)
val_b = getattr(buf_b, name)
if isinstance(types, (type(Union), type(Structure))):
if compare(val_a, val_b, ignore):
return 1
elif isinstance(types, type(Array)):
for i, _ in enumerate(val_a):
if isinstance(types, (type(Union), type(Structure))):
if compare(val_a[i], val_b[i], ignore):
return 1
else:
if val_a[i] != val_b[i]:
return 1
else:
if val_a != val_b:
return 1
return 0
|
def memcopy(self, stream, offset=0, length=float("inf")):
"""Copy stream to buffer"""
data = [ord(i) for i in list(stream)]
size = min(length, len(data), self.m_size)
buff = cast(self.m_buf, POINTER(c_uint8))
for i in range(size):
buff[offset + i] = data[i]
|
def write(self, path):
"""Write buffer to file"""
with open(path, "wb") as fout:
fout.write(self.m_buf)
|
def read(self, path):
"""Read file to buffer"""
with open(path, "rb") as fout:
memmove(self.m_buf, fout.read(self.m_size), self.m_size)
|
def dump(self, offset=0, length=1):
"""Dump item"""
for i in range(offset, offset + length):
if "ctypes" in str(self.m_types):
cij.info("Buff[%s]: %s" % (i, self.m_buf[i]))
else:
cij.info("Buff[%s]:" % i)
dump(self.m_buf[i], 2)
|
def compare(self, buf, offset=0, length=1, ignore=""):
"""Compare buffer"""
for i in range(offset, offset + length):
if isinstance(self.m_types, (type(Union), type(Structure))):
if compare(self.m_buf[i], buf[i], ignore=ignore):
return 1
elif self.m_buf[i] != buf[i]:
return 1
return 0
|
def power_on(self, interval=200):
"""230v power on"""
if self.__power_on_port is None:
cij.err("cij.usb.relay: Invalid USB_RELAY_POWER_ON")
return 1
return self.__press(self.__power_on_port, interval=interval)
|
def power_off(self, interval=200):
"""230v power off"""
if self.__power_off_port is None:
cij.err("cij.usb.relay: Invalid USB_RELAY_POWER_OFF")
return 1
return self.__press(self.__power_off_port, interval=interval)
|
def power_btn(self, interval=200):
"""TARGET power button"""
if self.__power_btn_port is None:
cij.err("cij.usb.relay: Invalid USB_RELAY_POWER_BTN")
return 1
return self.__press(self.__power_btn_port, interval=interval)
|
def get_chunk_information(self, chk, lun, chunk_name):
"""Get chunk information"""
cmd = ["nvm_cmd rprt_lun", self.envs,
"%d %d > %s" % (chk, lun, chunk_name)]
status, _, _ = cij.ssh.command(cmd, shell=True)
return status
|
def is_bad_chunk(self, chk, yml):
"""Check the chunk is offline or not"""
cs = self.get_chunk_status(chk, yml)
if cs >= 8:
return True
return False
|
def is_free_chunk(self, chk):
"""Check the chunk is free or not"""
cs = self.get_chunk_status(chk)
if cs & 0x1 != 0:
return True
return False
|
def is_closed_chunk(self, chk):
"""Check the chunk is free or not"""
cs = self.get_chunk_status(chk)
if cs & 0x2 != 0:
return True
return False
|
def is_open_chunk(self, chk):
"""Check the chunk is free or not"""
cs = self.get_chunk_status(chk)
if cs & 0x4 != 0:
return True
return False
|
def vblk_erase(self, address):
"""nvm_vblk erase"""
cmd = ["nvm_vblk erase", self.envs, "0x%x" % address]
status, _, _ = cij.ssh.command(cmd, shell=True)
return status
|
def slc_erase(self, address, BE_ID=0x1, PMODE=0x0100):
"""slc erase"""
cmd = ["NVM_CLI_BE_ID=0x%x" % BE_ID, "NVM_CLI_PMODE=0x%x" % PMODE, "nvm_cmd erase", self.envs, "0x%x" % address]
status, _, _ = cij.ssh.command(cmd, shell=True)
return status
|
def env():
"""Verify BLOCK variables and construct exported variables"""
if cij.ssh.env():
cij.err("cij.block.env: invalid SSH environment")
return 1
block = cij.env_to_dict(PREFIX, REQUIRED)
block["DEV_PATH"] = "/dev/%s" % block["DEV_NAME"]
cij.env_export(PREFIX, EXPORTED, block)
return 0
|
def script_run(trun, script):
"""Execute a script or testcase"""
if trun["conf"]["VERBOSE"]:
cij.emph("rnr:script:run { script: %s }" % script)
cij.emph("rnr:script:run:evars: %s" % script["evars"])
launchers = {
".py": "python",
".sh": "source"
}
ext = os.path.splitext(script["fpath"])[-1]
if not ext in launchers.keys():
cij.err("rnr:script:run { invalid script[\"fpath\"]: %r }" % script["fpath"])
return 1
launch = launchers[ext]
with open(script["log_fpath"], "a") as log_fd:
log_fd.write("# script_fpath: %r\n" % script["fpath"])
log_fd.flush()
bgn = time.time()
cmd = [
'bash', '-c',
'CIJ_ROOT=$(cij_root) && '
'source $CIJ_ROOT/modules/cijoe.sh && '
'source %s && '
'CIJ_TEST_RES_ROOT="%s" %s %s ' % (
trun["conf"]["ENV_FPATH"],
script["res_root"],
launch,
script["fpath"]
)
]
if trun["conf"]["VERBOSE"] > 1:
cij.emph("rnr:script:run { cmd: %r }" % " ".join(cmd))
evars = os.environ.copy()
evars.update({k: str(script["evars"][k]) for k in script["evars"]})
process = Popen(
cmd,
stdout=log_fd,
stderr=STDOUT,
cwd=script["res_root"],
env=evars
)
process.wait()
script["rcode"] = process.returncode
script["wallc"] = time.time() - bgn
if trun["conf"]["VERBOSE"]:
cij.emph("rnr:script:run { wallc: %02f }" % script["wallc"])
cij.emph(
"rnr:script:run { rcode: %r } " % script["rcode"],
script["rcode"]
)
return script["rcode"]
|
def hook_setup(parent, hook_fpath):
"""Setup hook"""
hook = copy.deepcopy(HOOK)
hook["name"] = os.path.splitext(os.path.basename(hook_fpath))[0]
hook["name"] = hook["name"].replace("_enter", "").replace("_exit", "")
hook["res_root"] = parent["res_root"]
hook["fpath_orig"] = hook_fpath
hook["fname"] = "hook_%s" % os.path.basename(hook["fpath_orig"])
hook["fpath"] = os.sep.join([hook["res_root"], hook["fname"]])
hook["log_fpath"] = os.sep.join([
hook["res_root"],
"%s.log" % hook["fname"]
])
hook["evars"].update(copy.deepcopy(parent["evars"]))
shutil.copyfile(hook["fpath_orig"], hook["fpath"])
return hook
|
def hooks_setup(trun, parent, hnames=None):
"""
Setup test-hooks
@returns dict of hook filepaths {"enter": [], "exit": []}
"""
hooks = {
"enter": [],
"exit": []
}
if hnames is None: # Nothing to do, just return the struct
return hooks
for hname in hnames: # Fill out paths
for med in HOOK_PATTERNS:
for ptn in HOOK_PATTERNS[med]:
fpath = os.sep.join([trun["conf"]["HOOKS"], ptn % hname])
if not os.path.exists(fpath):
continue
hook = hook_setup(parent, fpath)
if not hook:
continue
hooks[med].append(hook)
if not hooks["enter"] + hooks["exit"]:
cij.err("rnr:hooks_setup:FAIL { hname: %r has no files }" % hname)
return None
return hooks
|
def trun_to_file(trun, fpath=None):
"""Dump the given trun to file"""
if fpath is None:
fpath = yml_fpath(trun["conf"]["OUTPUT"])
with open(fpath, 'w') as yml_file:
data = yaml.dump(trun, explicit_start=True, default_flow_style=False)
yml_file.write(data)
|
def trun_emph(trun):
"""Print essential info on"""
if trun["conf"]["VERBOSE"] > 1: # Print environment variables
cij.emph("rnr:CONF {")
for cvar in sorted(trun["conf"].keys()):
cij.emph(" % 16s: %r" % (cvar, trun["conf"][cvar]))
cij.emph("}")
if trun["conf"]["VERBOSE"]:
cij.emph("rnr:INFO {")
cij.emph(" OUTPUT: %r" % trun["conf"]["OUTPUT"])
cij.emph(" yml_fpath: %r" % yml_fpath(trun["conf"]["OUTPUT"]))
cij.emph("}")
|
def tcase_setup(trun, parent, tcase_fname):
"""
Create and initialize a testcase
"""
#pylint: disable=locally-disabled, unused-argument
case = copy.deepcopy(TESTCASE)
case["fname"] = tcase_fname
case["fpath_orig"] = os.sep.join([trun["conf"]["TESTCASES"], case["fname"]])
if not os.path.exists(case["fpath_orig"]):
cij.err('rnr:tcase_setup: !case["fpath_orig"]: %r' % case["fpath_orig"])
return None
case["name"] = os.path.splitext(case["fname"])[0]
case["ident"] = "/".join([parent["ident"], case["fname"]])
case["res_root"] = os.sep.join([parent["res_root"], case["fname"]])
case["aux_root"] = os.sep.join([case["res_root"], "_aux"])
case["log_fpath"] = os.sep.join([case["res_root"], "run.log"])
case["fpath"] = os.sep.join([case["res_root"], case["fname"]])
case["evars"].update(copy.deepcopy(parent["evars"]))
# Initalize
os.makedirs(case["res_root"]) # Create DIRS
os.makedirs(case["aux_root"])
shutil.copyfile(case["fpath_orig"], case["fpath"]) # Copy testcase
# Initialize hooks
case["hooks"] = hooks_setup(trun, case, parent.get("hooks_pr_tcase"))
return case
|
def tsuite_exit(trun, tsuite):
"""Triggers when exiting the given testsuite"""
if trun["conf"]["VERBOSE"]:
cij.emph("rnr:tsuite:exit")
rcode = 0
for hook in reversed(tsuite["hooks"]["exit"]): # EXIT-hooks
rcode = script_run(trun, hook)
if rcode:
break
if trun["conf"]["VERBOSE"]:
cij.emph("rnr:tsuite:exit { rcode: %r } " % rcode, rcode)
return rcode
|
def tsuite_enter(trun, tsuite):
"""Triggers when entering the given testsuite"""
if trun["conf"]["VERBOSE"]:
cij.emph("rnr:tsuite:enter { name: %r }" % tsuite["name"])
rcode = 0
for hook in tsuite["hooks"]["enter"]: # ENTER-hooks
rcode = script_run(trun, hook)
if rcode:
break
if trun["conf"]["VERBOSE"]:
cij.emph("rnr:tsuite:enter { rcode: %r } " % rcode, rcode)
return rcode
|
def tsuite_setup(trun, declr, enum):
"""
Creates and initialized a TESTSUITE struct and site-effects such as creating
output directories and forwarding initialization of testcases
"""
suite = copy.deepcopy(TESTSUITE) # Setup the test-suite
suite["name"] = declr.get("name")
if suite["name"] is None:
cij.err("rnr:tsuite_setup: no testsuite is given")
return None
suite["alias"] = declr.get("alias")
suite["ident"] = "%s_%d" % (suite["name"], enum)
suite["res_root"] = os.sep.join([trun["conf"]["OUTPUT"], suite["ident"]])
suite["aux_root"] = os.sep.join([suite["res_root"], "_aux"])
suite["evars"].update(copy.deepcopy(trun["evars"]))
suite["evars"].update(copy.deepcopy(declr.get("evars", {})))
# Initialize
os.makedirs(suite["res_root"])
os.makedirs(suite["aux_root"])
# Setup testsuite-hooks
suite["hooks"] = hooks_setup(trun, suite, declr.get("hooks"))
# Forward from declaration
suite["hooks_pr_tcase"] = declr.get("hooks_pr_tcase", [])
suite["fname"] = "%s.suite" % suite["name"]
suite["fpath"] = os.sep.join([trun["conf"]["TESTSUITES"], suite["fname"]])
#
# Load testcases from .suite file OR from declaration
#
tcase_fpaths = [] # Load testcase fpaths
if os.path.exists(suite["fpath"]): # From suite-file
suite_lines = (
l.strip() for l in open(suite["fpath"]).read().splitlines()
)
tcase_fpaths.extend(
(l for l in suite_lines if len(l) > 1 and l[0] != "#")
)
else: # From declaration
tcase_fpaths.extend(declr.get("testcases", []))
# NOTE: fix duplicates; allow them
# NOTE: Currently hot-fixed here
if len(set(tcase_fpaths)) != len(tcase_fpaths):
cij.err("rnr:suite: failed: duplicate tcase in suite not supported")
return None
for tcase_fname in tcase_fpaths: # Setup testcases
tcase = tcase_setup(trun, suite, tcase_fname)
if not tcase:
cij.err("rnr:suite: failed: tcase_setup")
return None
suite["testcases"].append(tcase)
return suite
|
def tcase_exit(trun, tsuite, tcase):
"""..."""
#pylint: disable=locally-disabled, unused-argument
if trun["conf"]["VERBOSE"]:
cij.emph("rnr:tcase:exit { fname: %r }" % tcase["fname"])
rcode = 0
for hook in reversed(tcase["hooks"]["exit"]): # tcase EXIT-hooks
rcode = script_run(trun, hook)
if rcode:
break
if trun["conf"]["VERBOSE"]:
cij.emph("rnr:tcase:exit { rcode: %r }" % rcode, rcode)
return rcode
|
def tcase_enter(trun, tsuite, tcase):
"""
setup res_root and aux_root, log info and run tcase-enter-hooks
@returns 0 when all hooks succeed, some value othervise
"""
#pylint: disable=locally-disabled, unused-argument
if trun["conf"]["VERBOSE"]:
cij.emph("rnr:tcase:enter")
cij.emph("rnr:tcase:enter { fname: %r }" % tcase["fname"])
cij.emph("rnr:tcase:enter { log_fpath: %r }" % tcase["log_fpath"])
rcode = 0
for hook in tcase["hooks"]["enter"]: # tcase ENTER-hooks
rcode = script_run(trun, hook)
if rcode:
break
if trun["conf"]["VERBOSE"]:
cij.emph("rnr:tcase:exit: { rcode: %r }" % rcode, rcode)
return rcode
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.