code
string | signature
string | docstring
string | loss_without_docstring
float64 | loss_with_docstring
float64 | factor
float64 |
|---|---|---|---|---|---|
print_tx(ctx.bitshares.create_committee_member(url, account=account))
|
def createcommittee(ctx, url, account)
|
Setup a committee account for your account
| 10.94659
| 10.5492
| 1.03767
|
if key == "default_account" and value[0] == "@":
value = value[1:]
ctx.bitshares.config[key] = value
|
def set(ctx, key, value)
|
Set configuration parameters
| 6.43942
| 6.732325
| 0.956493
|
t = [["Key", "Value"]]
for key in ctx.bitshares.config:
t.append([key, ctx.bitshares.config[key]])
print_table(t)
|
def configuration(ctx)
|
Show configuration variables
| 4.999933
| 4.318115
| 1.157897
|
if filename:
tx = filename.read()
else:
tx = sys.stdin.read()
tx = TransactionBuilder(eval(tx), bitshares_instance=ctx.bitshares)
tx.appendMissingSignatures()
tx.sign()
print_tx(tx.json())
|
def sign(ctx, filename)
|
Sign a json-formatted transaction
| 5.93275
| 5.495422
| 1.07958
|
from bitsharesbase.account import PrivateKey
t = [["wif", "pubkey"]]
for n in range(0, num):
wif = PrivateKey()
t.append([str(wif), format(wif.pubkey, prefix)])
print_table(t)
|
def randomwif(prefix, num)
|
Obtain a random private/public key pair
| 4.430419
| 4.556497
| 0.97233
|
print_tx(ctx.bitshares.approvewitness(witnesses, account=account))
|
def approvewitness(ctx, witnesses, account)
|
Approve witness(es)
| 9.23982
| 8.387189
| 1.101659
|
print_tx(ctx.bitshares.disapprovewitness(witnesses, account=account))
|
def disapprovewitness(ctx, witnesses, account)
|
Disapprove witness(es)
| 7.361797
| 7.485086
| 0.983529
|
t = [
[
"weight",
"account",
"signing_key",
"vote_id",
"url",
"total_missed",
"last_confirmed_block_num",
]
]
for witness in sorted(Witnesses(), key=lambda x: x.weight, reverse=True):
witness.refresh()
t.append(
[
"{:.2f}%".format(witness.weight * 100),
witness.account["name"],
witness["signing_key"],
witness["vote_id"],
witness["url"],
witness["total_missed"],
witness["last_confirmed_block_num"],
]
)
print_table(t)
|
def witnesses(ctx)
|
List witnesses and relevant information
| 2.991493
| 2.727142
| 1.096933
|
account = Account(account, full=True)
t = [["vesting_id", "claimable"]]
for vest in account["vesting_balances"]:
vesting = Vesting(vest)
t.append([vesting["id"], str(vesting.claimable)])
print_table(t)
|
def vesting(ctx, account)
|
List accounts vesting balances
| 4.395137
| 4.101677
| 1.071546
|
vesting = Vesting(vestingid)
if amount:
amount = Amount(float(amount), "BTS")
else:
amount = vesting.claimable
print_tx(
ctx.bitshares.vesting_balance_withdraw(
vesting["id"], amount=amount, account=vesting["owner"]
)
)
|
def claim(ctx, vestingid, account, amount)
|
Claim funds from the vesting balance
| 4.624561
| 4.365586
| 1.059322
|
print_tx(
ctx.bitshares.reserve(
Amount(amount, symbol, bitshares_instance=ctx.bitshares), account=account
)
)
|
def reserve(ctx, amount, symbol, account)
|
Reserve/Burn tokens
| 5.802266
| 6.044718
| 0.95989
|
global ctx
ctx = context
app.run(port=port)
|
def run(context, port)
|
Run the Webserver/SocketIO and app
| 7.765508
| 8.151625
| 0.952633
|
@click.pass_context
@verbose
def new_func(ctx, *args, **kwargs):
ctx.obj["offline"] = True
ctx.bitshares = BitShares(**ctx.obj)
ctx.blockchain = ctx.bitshares
ctx.bitshares.set_shared_instance()
return ctx.invoke(f, *args, **kwargs)
return update_wrapper(new_func, f)
|
def offline(f)
|
This decorator allows you to access ``ctx.bitshares`` which is
an instance of BitShares with ``offline=True``.
| 4.76426
| 3.752739
| 1.269542
|
def wrap(f):
@click.pass_context
@verbose
def new_func(ctx, *args, **kwargs):
newoptions = ctx.obj
newoptions.update(kwargsChain)
ctx.bitshares = BitShares(**newoptions)
ctx.blockchain = ctx.bitshares
set_shared_bitshares_instance(ctx.bitshares)
return ctx.invoke(f, *args, **kwargs)
return update_wrapper(new_func, f)
return wrap
|
def customchain(**kwargsChain)
|
This decorator allows you to access ``ctx.bitshares`` which is
an instance of BitShares. But in contrast to @chain, this is a
decorator that expects parameters that are directed right to
``BitShares()``.
... code-block::python
@main.command()
@click.option("--worker", default=None)
@click.pass_context
@customchain(foo="bar")
@unlock
def list(ctx, worker):
print(ctx.obj)
| 3.64626
| 3.305821
| 1.102982
|
print_tx(ctx.bitshares.disapproveproposal(proposal, account=account))
|
def disapproveproposal(ctx, proposal, account)
|
Disapprove a proposal
| 10.075841
| 10.3274
| 0.975642
|
print_tx(ctx.bitshares.approveproposal(proposal, account=account))
|
def approveproposal(ctx, proposal, account)
|
Approve a proposal
| 13.655702
| 13.590752
| 1.004779
|
proposals = Proposals(account)
t = [
[
"id",
"expiration",
"proposer",
"required approvals",
"available approvals",
"review period time",
"proposal",
]
]
for proposal in proposals:
t.append(
[
proposal["id"],
proposal["expiration_time"],
Account(proposal.proposer)["name"],
[
Account(x)["name"]
for x in (
proposal["required_active_approvals"]
+ proposal["required_owner_approvals"]
)
],
json.dumps(
[Account(x)["name"] for x in proposal["available_active_approvals"]]
+ proposal["available_key_approvals"]
+ proposal["available_owner_approvals"],
indent=1,
),
proposal.get("review_period_time", None),
format_dict(proposal["proposed_transaction"]),
]
)
print_table(t)
|
def proposals(ctx, account)
|
List proposals
| 3.213194
| 3.200338
| 1.004017
|
if not file:
print_message("Prompting for message. Terminate with CTRL-D", "info")
file = click.get_text_stream("stdin")
m = Message(file.read(), bitshares_instance=ctx.bitshares)
print_message(m.sign(account), "info")
|
def sign(ctx, file, account)
|
Sign a message with an account
| 5.5675
| 5.047602
| 1.102999
|
if not file:
print_message("Prompting for message. Terminate with CTRL-D", "info")
file = click.get_text_stream("stdin")
m = Message(file.read(), bitshares_instance=ctx.bitshares)
try:
if m.verify():
print_message("Verified", "success")
else:
print_message("not verified", "error")
except InvalidMessageSignature:
print_message("Signature INVALID!", "error")
|
def verify(ctx, file, account)
|
Verify a signed message
| 4.767964
| 4.469785
| 1.06671
|
if not foreign_account:
from bitsharesbase.account import PasswordKey
pwd = click.prompt(
"Password for Key Derivation", hide_input=True, confirmation_prompt=True
)
foreign_account = format(
PasswordKey(account, pwd, permission).get_public(), "BTS"
)
print_tx(
ctx.bitshares.allow(
foreign_account,
weight=weight,
account=account,
permission=permission,
threshold=threshold,
)
)
|
def allow(ctx, foreign_account, permission, weight, threshold, account)
|
Add a key/account to an account's permission
| 4.010527
| 4.258526
| 0.941764
|
print_tx(
ctx.bitshares.disallow(
foreign_account, account=account, permission=permission, threshold=threshold
)
)
|
def disallow(ctx, foreign_account, permission, threshold, account)
|
Remove a key/account from an account's permission
| 4.973487
| 5.631776
| 0.883112
|
from bitsharesbase.operations import getOperationNameForId
t = [["#", "time (block)", "operation", "details"]]
for a in account:
account = Account(a, bitshares_instance=ctx.bitshares)
for b in account.history(limit=limit, only_ops=type, exclude_ops=exclude):
block = BlockHeader(b["block_num"])
row = [
b["id"],
"%s (%s)" % (block.time(), b["block_num"]),
"{} ({})".format(getOperationNameForId(b["op"][0]), b["op"][0]),
pprintOperation(b) if not raw else json.dumps(b, indent=4),
]
t.append(row)
print_table(t)
|
def history(ctx, account, limit, type, csv, exclude, raw)
|
Show history of an account
| 3.742115
| 3.774597
| 0.991395
|
print_tx(ctx.bitshares.transfer(to, amount, asset, memo=memo, account=account))
|
def transfer(ctx, to, amount, asset, memo, account)
|
Transfer assets
| 4.794122
| 5.444191
| 0.880594
|
t = [["Account", "Amount"]]
for a in accounts:
account = Account(a, bitshares_instance=ctx.bitshares)
for b in account.balances:
t.append([str(a), str(b)])
print_table(t)
|
def balance(ctx, accounts)
|
Show Account balances
| 3.97464
| 3.755861
| 1.05825
|
print_tx(
ctx.bitshares.create_account(accountname, registrar=account, password=password)
)
|
def newaccount(ctx, accountname, account, password)
|
Create a new account
| 9.296652
| 9.720679
| 0.956379
|
from bitsharesbase import transactions, operations
account = Account(account)
op = {
"fee": {"amount": 0, "asset_id": "1.3.0"},
"registrar": account["id"],
"referrer": account["id"],
"referrer_percent": 100,
"name": account_name,
"owner": account["owner"],
"active": account["active"],
"options": account["options"],
"extensions": {},
"prefix": ctx.bitshares.rpc.chain_params["prefix"],
}
op = operations.Account_create(**op)
print_tx(ctx.bitshares.finalizeOp(op, account, "active"))
|
def cloneaccount(ctx, account_name, account)
|
Clone an account
This copies the owner and active permissions as well as the
options (e.g. votes, memo key)
| 2.729085
| 2.738072
| 0.996718
|
print_tx(ctx.bitshares.update_memo_key(key, account=account))
|
def changememokey(ctx, key, account)
|
Change the memo key of an account
| 9.946549
| 10.176814
| 0.977374
|
account = Account(account, blockchain_instance=ctx.blockchain)
print_tx(account.whitelist(whitelist_account))
|
def whitelist(ctx, whitelist_account, account)
|
Add an account to a whitelist
| 5.628898
| 6.030807
| 0.933357
|
account = Account(account, blockchain_instance=ctx.blockchain)
print_tx(account.blacklist(blacklist_account))
|
def blacklist(ctx, blacklist_account, account)
|
Add an account to a blacklist
| 6.178247
| 7.205143
| 0.857477
|
account = Account(account, blockchain_instance=ctx.blockchain)
print_tx(account.nolist(unlist_account))
|
def unlist(ctx, unlist_account, account)
|
Remove an account from any list
| 8.323973
| 9.724113
| 0.856014
|
print_tx(ctx.bitshares.set_proxy(proxy_account, account=account))
|
def setproxy(ctx, proxy_account, account)
|
Set the proxy account for an account
| 9.37404
| 9.49457
| 0.987305
|
if obj.upper() == obj:
# Asset
from bitshares.asset import Asset
asset = Asset(obj, full=True)
calls = asset.get_call_orders(limit)
t = [["acount", "debt", "collateral", "call price", "ratio"]]
for call in calls:
t.append(
[
str(call["account"]["name"]),
str(call["debt"]),
str(call["collateral"]),
str(call["call_price"]),
"%.2f" % (call["ratio"]),
]
)
print_table(t)
else:
# Account
from bitshares.dex import Dex
dex = Dex(bitshares_instance=ctx.bitshares)
calls = dex.list_debt_positions(account=obj)
t = [["debt", "collateral", "call price", "ratio"]]
for symbol in calls:
t.append(
[
str(calls[symbol]["debt"]),
str(calls[symbol]["collateral"]),
str(calls[symbol]["call_price"]),
"%.2f" % (calls[symbol]["ratio"]),
]
)
print_table(t)
|
def calls(ctx, obj, limit)
|
List call/short positions of an account or an asset
| 2.606302
| 2.395984
| 1.087779
|
from bitshares.asset import Asset
asset = Asset(asset, full=True)
if not asset.is_bitasset:
print_message("{} is not a bitasset.".format(asset["symbol"]), "warning")
sys.exit(1)
calls = asset.get_settle_orders(limit)
t = [["acount", "amount", "date"]]
for call in calls:
t.append([str(call["account"]["name"]), str(call["amount"]), str(call["date"])])
print_table(t)
|
def settlements(ctx, asset, limit)
|
Show pending settlement orders of a bitasset
| 4.010337
| 3.552539
| 1.128865
|
print_tx(ctx.bitshares.approveworker(workers, account=account))
|
def approveworker(ctx, workers, account)
|
Approve worker(es)
| 10.809535
| 10.616469
| 1.018185
|
print_tx(ctx.bitshares.disapproveworker(workers, account=account))
|
def disapproveworker(ctx, workers, account)
|
Disapprove worker(es)
| 9.154963
| 9.669588
| 0.946779
|
workers = Workers(account)
t = [["id", "name/url", "daily_pay", "votes", "time", "account"]]
workers_sorted = sorted(
workers, key=lambda x: int(x["total_votes_for"]), reverse=True
)
if top:
workers_sorted = workers_sorted[: top + 1]
for worker in workers_sorted:
if worker["work_end_date"] < datetime.datetime.utcnow():
continue
votes = Amount({"amount": worker["total_votes_for"], "asset_id": "1.3.0"})
amount = Amount({"amount": worker["daily_pay"], "asset_id": "1.3.0"})
t.append(
[
worker["id"],
"{name}\n{url}".format(**worker),
str(amount),
str(votes),
"{work_begin_date:%Y-%m-%d}\n-\n{work_end_date:%Y-%m-%d}".format(
**worker
),
str(Account(worker["worker_account"])["name"]),
]
)
print_table(t)
|
def workers(ctx, account, top)
|
List all workers (of an account)
| 3.209443
| 3.176163
| 1.010478
|
if not key:
while True:
key = click.prompt(
"Private Key (wif) [Enter to quit]",
hide_input=True,
show_default=False,
default="exit",
)
if not key or key == "exit":
break
try:
ctx.bitshares.wallet.addPrivateKey(key)
except Exception as e:
click.echo(str(e))
continue
else:
for k in key:
try:
ctx.bitshares.wallet.addPrivateKey(k)
except Exception as e:
click.echo(str(e))
installedKeys = ctx.bitshares.wallet.getPublicKeys()
if len(installedKeys) == 1:
name = ctx.bitshares.wallet.getAccountFromPublicKey(installedKeys[0])
if name: # only if a name to the key was found
account = Account(name, bitshares_instance=ctx.bitshares)
click.echo("=" * 30)
click.echo("Setting new default user: %s" % account["name"])
click.echo()
click.echo("You can change these settings with:")
click.echo(" uptick set default_account <account>")
click.echo("=" * 30)
config["default_account"] = account["name"]
|
def addkey(ctx, key)
|
Add a private key to the wallet
| 3.162064
| 3.080711
| 1.026407
|
if not pubkeys:
pubkeys = click.prompt("Public Keys").split(" ")
if click.confirm(
"Are you sure you want to delete keys from your wallet?\n"
"This step is IRREVERSIBLE! If you don't have a backup, "
"You may lose access to your account!"
):
for pub in pubkeys:
ctx.bitshares.wallet.removePrivateKeyFromPublicKey(pub)
|
def delkey(ctx, pubkeys)
|
Delete a private key from the wallet
| 4.968413
| 4.902896
| 1.013363
|
click.echo(ctx.bitshares.wallet.getPrivateKeyForPublicKey(pubkey))
|
def getkey(ctx, pubkey)
|
Obtain private key in WIF format
| 15.071202
| 14.043494
| 1.07318
|
t = [["Available Key"]]
for key in ctx.bitshares.wallet.getPublicKeys():
t.append([key])
print_table(t)
|
def listkeys(ctx)
|
List all keys (for all networks)
| 11.114768
| 9.596073
| 1.158262
|
t = [["Name", "Key", "Owner", "Active", "Memo"]]
for key in tqdm(ctx.bitshares.wallet.getPublicKeys(True)):
for account in ctx.bitshares.wallet.getAccountsFromPublicKey(key):
account = Account(account)
is_owner = key in [x[0] for x in account["owner"]["key_auths"]]
is_active = key in [x[0] for x in account["active"]["key_auths"]]
is_memo = key == account["options"]["memo_key"]
t.append([
account["name"],
key,
"x" if is_owner else "",
"x" if is_active else "",
"x" if is_memo else "",
])
print_table(t)
|
def listaccounts(ctx)
|
List accounts (for the connected network)
| 2.750137
| 2.754805
| 0.998306
|
from bitsharesbase.account import PasswordKey
password = click.prompt("Account Passphrase", hide_input=True)
account = Account(account, bitshares_instance=ctx.bitshares)
imported = False
if role == "owner":
owner_key = PasswordKey(account["name"], password, role="owner")
owner_pubkey = format(
owner_key.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]
)
if owner_pubkey in [x[0] for x in account["owner"]["key_auths"]]:
print_message("Importing owner key!")
owner_privkey = owner_key.get_private_key()
ctx.bitshares.wallet.addPrivateKey(owner_privkey)
imported = True
if role == "active":
active_key = PasswordKey(account["name"], password, role="active")
active_pubkey = format(
active_key.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]
)
if active_pubkey in [x[0] for x in account["active"]["key_auths"]]:
print_message("Importing active key!")
active_privkey = active_key.get_private_key()
ctx.bitshares.wallet.addPrivateKey(active_privkey)
imported = True
if role == "memo":
memo_key = PasswordKey(account["name"], password, role=role)
memo_pubkey = format(
memo_key.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]
)
if memo_pubkey == account["memo_key"]:
print_message("Importing memo key!")
memo_privkey = memo_key.get_private_key()
ctx.bitshares.wallet.addPrivateKey(memo_privkey)
imported = True
if not imported:
print_message("No matching key(s) found. Password correct?", "error")
|
def importaccount(ctx, account, role)
|
Import an account using an account password
| 1.85939
| 1.835356
| 1.013095
|
import shutil
this_dir, this_filename = os.path.split(__file__)
default_config_file = os.path.join(this_dir, "apis/example-config.yaml")
config_file = ctx.obj["configfile"]
shutil.copyfile(default_config_file, config_file)
print_message("Config file created: {}".format(config_file))
|
def create(ctx)
|
Create default config file
| 3.696256
| 3.238818
| 1.141236
|
module = ctx.config.get("api", "poloniex")
# unlockWallet
if module == "poloniex":
from .apis import poloniex
poloniex.run(ctx, port=5000)
else:
print_message("Unkown 'api'!", "error")
|
def start(ctx)
|
Start the API according to the config file
| 9.75337
| 8.414962
| 1.159051
|
timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT)
kwargs['return_crawler'] = True
crawler = wait_for(timeout, _fetch_in_reactor, url, **kwargs)
if hasattr(crawler.spider, 'response'):
return crawler.spider.response
|
def fetch(url, **kwargs)
|
Fetches an URL and returns the response.
Parameters
----------
url : str
An URL to crawl.
spider_cls : scrapy.Spider (default: DefaultSpider)
A spider class to be used in the crawler instance.
capture_items : bool (default: True)
If enabled, the scraped items are captured and returned.
return_crawler : bool (default: False)
If enabled, the crawler instance is returned. If ``capture_items`` is
enabled, the scraped items is collected in ``crawler.items``.
settings : dict, optional
Custom crawler settings.
timeout : int, (default: DEFAULT_TIMEOUT)
Result wait timeout.
Returns
-------
out : Response or None
Returns a ``Response`` instance if the crawler is able to retrieve a
response, otherwise it returns ``None``.
Raises
------
crochet.TimeoutError
| 5.473898
| 6.750481
| 0.81089
|
timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT)
return wait_for(timeout, _crawl_in_reactor, url, callback, **kwargs)
|
def crawl(url, callback, **kwargs)
|
Crawls an URL with given callback.
Parameters
----------
url : str
An URL to crawl.
callback : callable
A function to be used as spider callback for the given URL.
spider_cls : scrapy.Spider (default: DefaultSpider)
A spider class to be used in the crawler instance.
capture_items : bool (default: True)
If enabled, the scraped items are captured and returned.
return_crawler : bool (default: False)
If enabled, the crawler instance is returned. If ``capture_items`` is
enabled, the scraped items is collected in ``crawler.items``.
settings : dict, optional
Custom crawler settings.
timeout : int, (default: DEFAULT_TIMEOUT)
Result wait timeout.
Returns
-------
out
By default, the scraped items. If ``return_crawler`` is ``True``,
returns the crawler instance.
Raises
------
crochet.TimeoutError
| 5.60219
| 13.395566
| 0.418212
|
timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT)
return wait_for(timeout, _run_spider_in_reactor, spider_cls, **kwargs)
|
def run_spider(spider_cls, **kwargs)
|
Runs a spider and returns the scraped items (by default).
Parameters
----------
spider_cls : scrapy.Spider
A spider class to run.
capture_items : bool (default: True)
If enabled, the scraped items are captured and returned.
return_crawler : bool (default: False)
If enabled, the crawler instance is returned. If ``capture_items`` is
enabled, the scraped items is collected in ``crawler.items``.
settings : dict, optional
Custom crawler settings.
timeout : int, (default: DEFAULT_TIMEOUT)
Result wait timeout.
Returns
-------
out : list or scrapy.crawler.Crawler instance
The scraped items by default or the crawler instance if
``return_crawler`` is ``True``.
Raises
------
crochet.TimeoutError
| 5.071106
| 10.010095
| 0.506599
|
def parse(self, response):
self.response = response
req = Request(url) if isinstance(url, six.string_types) else url
req.dont_filter = True
req.meta['handle_httpstatus_all'] = True
spider_cls = override_start_requests(spider_cls, [req], parse=parse)
return _run_spider_in_reactor(spider_cls, **kwargs)
|
def _fetch_in_reactor(url, spider_cls=DefaultSpider, **kwargs)
|
Fetches an URL and returns the response.
Parameters
----------
url : str
An URL to fetch.
spider_cls : scrapy.Spider (default: DefaultSpider)
A spider class to be used in the crawler.
kwargs : dict, optional
Additional arguments to be passed to ``_run_spider_in_reactor``.
Returns
-------
crochet.EventualResult
| 4.087933
| 4.092443
| 0.998898
|
spider_cls = override_start_requests(spider_cls, [url], callback)
return _run_spider_in_reactor(spider_cls, **kwargs)
|
def _crawl_in_reactor(url, callback, spider_cls=DefaultSpider, **kwargs)
|
Crawls given URL with given callback.
Parameters
----------
url : str
The URL to crawl.
callback : callable
Function to be used as callback for the request.
spider_cls : scrapy.Spider (default: DefaultSpider)
A spider class to be used in the crawler instance.
kwargs : dict, optional
Extra arguments to be passed to ``_run_spider_in_reactor``.
Returns
-------
crochet.EventualResult
| 4.403628
| 8.566352
| 0.514061
|
settings = settings or {}
crawler_settings = get_project_settings().copy()
crawler_settings.setdict(default_settings)
crawler_settings.setdict(settings)
log_scrapy_info(crawler_settings)
crawler = Crawler(spider_cls, crawler_settings)
d = crawler.crawl(**kwargs)
if capture_items:
crawler.items = _OutputItems()
crawler.signals.connect(crawler.items.append, signal=signals.item_scraped)
d.addCallback(lambda _: crawler.items)
if return_crawler:
d.addCallback(lambda _: crawler)
return d
|
def _run_spider_in_reactor(spider_cls, capture_items=True, return_crawler=False,
settings=None, **kwargs)
|
Runs given spider inside the twisted reactdor.
Parameters
----------
spider_cls : scrapy.Spider
Spider to run.
capture_items : bool (default: True)
If enabled, the scraped items are captured and returned.
return_crawler : bool (default: False)
If enabled, the crawler instance is returned. If ``capture_items`` is
enabled, the scraped items is collected in ``crawler.items``.
settings : dict, optional
Custom crawler settings.
Returns
-------
out : crochet.EventualResult
If ``capture_items`` is ``True``, returns scraped items. If
``return_crawler`` is ``True``, returns the crawler instance.
| 2.542212
| 2.971125
| 0.855639
|
def start_requests():
for url in start_urls:
req = Request(url, dont_filter=True) if isinstance(url, six.string_types) else url
if callback is not None:
req.callback = callback
yield req
attrs['start_requests'] = staticmethod(start_requests)
return type(spider_cls.__name__, (spider_cls, ), attrs)
|
def override_start_requests(spider_cls, start_urls, callback=None, **attrs)
|
Returns a new spider class overriding the ``start_requests``.
This function is useful to replace the start requests of an existing spider
class on runtime.
Parameters
----------
spider_cls : scrapy.Spider
Spider class to be used as base class.
start_urls : iterable
Iterable of URLs or ``Request`` objects.
callback : callable, optional
Callback for the start URLs.
attrs : dict, optional
Additional class attributes.
Returns
-------
out : class
A subclass of ``spider_cls`` with overrided ``start_requests`` method.
| 2.369928
| 2.425295
| 0.977171
|
result = func(*args, **kwargs)
try:
return result.wait(timeout)
except crochet.TimeoutError:
result.cancel()
raise
|
def wait_for(timeout, func, *args, **kwargs)
|
Waits for a eventual result.
Parameters
----------
timeout : int
How much time to wait, in seconds.
func : callable
A function that returns ``crochet.EventualResult``.
args : tuple, optional
Arguments for ``func``.
kwargs : dict, optional
Keyword arguments for ``func``.
Returns
-------
out
Given ``func`` result.
Raises
------
corchet.TimeoutError
| 4.236264
| 3.731719
| 1.135204
|
if not pygments:
raise TypeError("pygments module required")
if not isinstance(code, six.string_types):
code = pprint.pformat(code)
if isinstance(lexer, six.string_types):
lexer = pygments.lexers.get_lexer_by_name(lexer)
if isinstance(formatter, six.string_types):
formatter = pygments.formatters.get_formatter_by_name(formatter)
if formatter.name.lower() == 'html':
formatter.full = True
formatter.cssclass = "pygments-%s" % uuid.uuid4()
if output_wrapper is None:
output_wrapper = HTML
return output_wrapper(pygments.highlight(code, lexer, formatter))
|
def highlight(code, lexer='html', formatter='html', output_wrapper=None)
|
Highlights given code using pygments.
| 2.274359
| 2.265998
| 1.003689
|
'''
get the first unused supval table key, or 0 if the
table is empty.
useful for filling the supval table sequentially.
'''
slot = self._n.suplast(tag)
if slot is None or slot == idaapi.BADNODE:
return 0
else:
return slot + 1
|
def _get_next_slot(self, tag)
|
get the first unused supval table key, or 0 if the
table is empty.
useful for filling the supval table sequentially.
| 13.822358
| 3.496503
| 3.953195
|
'''
Runs the git executable with the arguments given and returns a list of
lines produced on its standard output.
'''
popen_kwargs = {
'stdout': subprocess.PIPE,
'stderr': subprocess.PIPE,
}
if git_env:
popen_kwargs['env'] = git_env
if self._git_toplevel:
popen_kwargs['cwd'] = self._git_toplevel
git_process = subprocess.Popen(
[GitRunner._git_executable] + args,
**popen_kwargs
)
try:
out, err = git_process.communicate()
git_process.wait()
except Exception as e:
raise GitError("Couldn't run 'git {args}':{newline}{ex}".format(
args=' '.join(args),
newline=os.linesep,
ex=str(e)
))
if (0 != git_process.returncode) or err:
if err:
err = err.decode('utf_8')
raise GitError("'git {args}' failed with:{newline}{err}".format(
args=' '.join(args),
newline=os.linesep,
err=err
))
if not out:
raise ValueError("No output")
return out.decode('utf_8').splitlines()
|
def run_git(self, args, git_env=None)
|
Runs the git executable with the arguments given and returns a list of
lines produced on its standard output.
| 2.601884
| 2.279753
| 1.141301
|
algo_id = {'sha1': 1, 'sha384': 2}[signing_algorithm]
if not signature:
signature = make_dummy_signature(algo_id)
src_fileobj.seek(0)
mardata = mar.parse_stream(src_fileobj)
# Header
header = mardata.header
dest_fileobj.write(mar_header.build(header))
# Signature block
sig = dict(algorithm_id=algo_id,
size=len(signature),
signature=signature,
)
# This will be fixed up later
filesize = 0
sigs_offset = dest_fileobj.tell()
sigs = sigs_header.build(dict(
filesize=filesize,
count=1,
sigs=[sig],
))
dest_fileobj.write(sigs)
# Write the additional section
dest_fileobj.write(extras_header.build(mardata.additional))
# Write the data
data_offset = dest_fileobj.tell()
src_fileobj.seek(mardata.data_offset)
write_to_file(takeexactly(src_fileobj, mardata.data_length), dest_fileobj)
# Write the index
index_offset = dest_fileobj.tell()
index = mardata.index
# Adjust the offsets
data_offset_delta = data_offset - mardata.data_offset
for e in index.entries:
e.offset += data_offset_delta
dest_fileobj.write(index_header.build(index))
filesize = dest_fileobj.tell()
# Go back and update the index offset and filesize
dest_fileobj.seek(0)
header.index_offset = index_offset
dest_fileobj.write(mar_header.build(header))
dest_fileobj.seek(sigs_offset)
sigs = sigs_header.build(dict(
filesize=filesize,
count=1,
sigs=[sig],
))
dest_fileobj.write(sigs)
|
def add_signature_block(src_fileobj, dest_fileobj, signing_algorithm, signature=None)
|
Add a signature block to marfile, a MarReader object.
Productversion and channel are preserved, but any existing signatures are overwritten.
Args:
src_fileobj (file object): The input MAR file to add a signature to
dest_fileobj (file object): File object to write new MAR file to. Must be open in w+b mode.
signing_algorithm (str): One of 'sha1', or 'sha384'
signature (bytes): Signature to write, or None to use a dummy signature
| 2.860553
| 2.730783
| 1.047521
|
if os.path.isdir(path):
self.add_dir(path, compress)
else:
self.add_file(path, compress)
|
def add(self, path, compress=None)
|
Add `path` to the MAR file.
If `path` is a file, it will be added directly.
If `path` is a directory, it will be traversed recursively and all
files inside will be added.
Args:
path (str): path to file or directory on disk to add to this MAR
file
compress (str): One of 'xz', 'bz2', or None. Defaults to None.
| 2.32978
| 3.095724
| 0.75258
|
if not os.path.isdir(path):
raise ValueError('{} is not a directory'.format(path))
for root, dirs, files in os.walk(path):
for f in files:
self.add_file(os.path.join(root, f), compress)
|
def add_dir(self, path, compress)
|
Add all files under directory `path` to the MAR file.
Args:
path (str): path to directory to add to this MAR file
compress (str): One of 'xz', 'bz2', or None. Defaults to None.
| 1.887541
| 2.026822
| 0.931281
|
f = file_iter(fileobj)
flags = flags or os.stat(path) & 0o777
return self.add_stream(f, path, compress, flags)
|
def add_fileobj(self, fileobj, path, compress, flags=None)
|
Add the contents of a file object to the MAR file.
Args:
fileobj (file-like object): open file object
path (str): name of this file in the MAR file
compress (str): One of 'xz', 'bz2', or None. Defaults to None.
flags (int): permission of this file in the MAR file. Defaults to the permissions of `path`
| 5.677062
| 5.673299
| 1.000663
|
self.data_fileobj.seek(self.last_offset)
if compress == 'bz2':
stream = bz2_compress_stream(stream)
elif compress == 'xz':
stream = xz_compress_stream(stream)
elif compress is None:
pass
else:
raise ValueError('Unsupported compression type: {}'.format(compress))
size = write_to_file(stream, self.data_fileobj)
# On Windows, convert \ to /
# very difficult to mock this out for coverage on linux
if os.sep == '\\': # pragma: no cover
path = path.replace('\\', '/')
e = dict(
name=six.u(path),
offset=self.last_offset,
size=size,
flags=flags,
)
self.entries.append(e)
self.last_offset += e['size']
|
def add_stream(self, stream, path, compress, flags)
|
Add the contents of an iterable to the MAR file.
Args:
stream (iterable): yields blocks of data
path (str): name of this file in the MAR file
compress (str): One of 'xz', 'bz2', or None. Defaults to None.
flags (int): permission of this file in the MAR file
| 3.564245
| 3.529292
| 1.009904
|
if not os.path.isfile(path):
raise ValueError('{} is not a file'.format(path))
self.fileobj.seek(self.last_offset)
with open(path, 'rb') as f:
flags = os.stat(path).st_mode & 0o777
self.add_fileobj(f, path, compress, flags)
|
def add_file(self, path, compress)
|
Add a single file to the MAR file.
Args:
path (str): path to a file to add to this MAR file.
compress (str): One of 'xz', 'bz2', or None. Defaults to None.
| 3.033654
| 3.10813
| 0.976038
|
self.fileobj.seek(0)
header = mar_header.build(dict(index_offset=self.last_offset))
self.fileobj.write(header)
|
def write_header(self)
|
Write the MAR header to the file.
The MAR header includes the MAR magic bytes as well as the offset to
where the index data can be found.
| 7.753555
| 5.283063
| 1.467625
|
if not self.signing_algorithm:
return []
algo_id = {'sha1': 1, 'sha384': 2}[self.signing_algorithm]
signature = make_dummy_signature(algo_id)
return [(algo_id, signature)]
|
def dummy_signatures(self)
|
Create a dummy signature.
This is used when initially writing the MAR header and we don't know
what the final signature data will be.
Returns:
Fake signature data suitable for writing to the header with
.write_signatures()
| 5.268606
| 4.946313
| 1.065158
|
if not self.signing_algorithm:
return []
algo_id = {'sha1': 1, 'sha384': 2}[self.signing_algorithm]
hashers = [(algo_id, make_hasher(algo_id))]
for block in get_signature_data(self.fileobj, self.filesize):
[h.update(block) for (_, h) in hashers]
signatures = [(algo_id, sign_hash(self.signing_key, h.finalize(), h.algorithm.name)) for (algo_id, h) in hashers]
return signatures
|
def calculate_signatures(self)
|
Calculate the signatures for this MAR file.
Returns:
A list of signature tuples: [(algorithm_id, signature_data), ...]
| 4.505165
| 4.264867
| 1.056343
|
self.fileobj.seek(self.signature_offset)
sig_entries = [dict(algorithm_id=id_,
size=len(sig),
signature=sig)
for (id_, sig) in signatures]
sigs = sigs_header.build(dict(
filesize=self.filesize,
count=len(signatures),
sigs=sig_entries,
))
self.fileobj.write(sigs)
signatures_len = len(sigs)
self.additional_offset = self.signature_offset + signatures_len
# sanity check; this should never happen
if not self.additional_offset == self.fileobj.tell(): # pragma: no cover
raise IOError('ended up at unexpected offset')
|
def write_signatures(self, signatures)
|
Write signature data to the MAR file.
Args:
signatures (list): list of signature tuples of the form
(algorithm_id, signature_data)
| 4.534114
| 4.310758
| 1.051814
|
self.fileobj.seek(self.additional_offset)
extras = extras_header.build(dict(
count=1,
sections=[dict(
channel=six.u(channel),
productversion=six.u(productversion),
size=len(channel) + len(productversion) + 2 + 8,
padding=b'',
)],
))
self.fileobj.write(extras)
self.last_offset = self.fileobj.tell()
|
def write_additional(self, productversion, channel)
|
Write the additional information to the MAR header.
Args:
productversion (str): product and version string
channel (str): channel string
| 4.664165
| 4.816957
| 0.96828
|
self.fileobj.seek(self.last_offset)
index = index_header.build(dict(entries=self.entries))
self.fileobj.write(index)
self.filesize = self.fileobj.tell()
|
def write_index(self)
|
Write the index of all our files to the MAR file.
| 4.513372
| 4.468246
| 1.010099
|
# Update the last_offset in the mar header
self.write_header()
# Write out the index of contents
self.write_index()
if not self.use_old_format:
# Refresh the signature
sigs = self.calculate_signatures()
self.write_signatures(sigs)
|
def finish(self)
|
Finalize the MAR file.
The MAR header, index and signatures need to be updated once we've
finished adding all the files.
| 10.528947
| 6.651971
| 1.582831
|
parser = ArgumentParser('Utility for managing MAR files')
create_group = parser.add_argument_group("Create a MAR file")
create_group.add_argument("-c", "--create", metavar="MARFILE", help="create MAR")
create_group.add_argument("-V", "--productversion", dest="productversion",
help="product/version string")
create_group.add_argument("-H", "--channel", dest="channel",
help="channel this MAR file is applicable to")
create_group.add_argument("files", nargs=REMAINDER,
help="files to add to the MAR file")
extract_group = parser.add_argument_group("Extract a MAR file")
extract_group.add_argument("-x", "--extract", help="extract MAR", metavar="MARFILE")
list_group = parser.add_argument_group("Print information on a MAR file")
list_group.add_argument("-t", "--list", help="print out MAR contents",
metavar="MARFILE")
list_group.add_argument("-T", "--list-detailed", metavar="MARFILE",
help="print out MAR contents including signatures")
verify_group = parser.add_argument_group("Verify a MAR file")
verify_group.add_argument("-v", "--verify", metavar="MARFILE",
help="verify the marfile")
parser.add_argument("-j", "--bzip2", action="store_const", dest="compression",
const="bz2", help="compress/decompress members with BZ2")
parser.add_argument("-J", "--xz", action="store_const", dest="compression",
const="xz", help="compress/decompress archive with XZ")
parser.add_argument("--auto", action="store_const", dest="compression",
const="auto", help="automatically decompress contents")
parser.add_argument("-k", "--keyfiles", dest="keyfiles", action='append',
help="sign/verify with given key(s)")
parser.add_argument("-C", "--chdir", dest="chdir",
help="chdir to this directory before creating or "
"extracing; location of marfile isn't affected by "
"this option.")
parser.add_argument("--verbose", dest="loglevel", action="store_const",
const=logging.DEBUG, default=logging.WARN,
help="increase logging verbosity")
parser.add_argument('--version', action='version', version='mar version {}'.format(mardor.version_str))
signing_group = parser.add_argument_group('Sign a MAR file')
signing_group.add_argument('--hash', help='output hash for signing', choices=('sha1', 'sha384'))
signing_group.add_argument('--asn1', help='format hash as an ASN1 DigestInfo block',
default=False, action='store_true')
signing_group.add_argument('--add-signature', help='inject signature', nargs=3,
metavar=('input', 'output', 'signature'))
return parser
|
def build_argparser()
|
Build argument parser for the CLI.
| 3.001522
| 2.986075
| 1.005173
|
with open(marfile, 'rb') as f:
with MarReader(f) as m:
m.extract(str(destdir), decompress=decompress)
|
def do_extract(marfile, destdir, decompress)
|
Extract the MAR file to the destdir.
| 3.339479
| 3.344159
| 0.998601
|
builtin_keys = {
('release', 'sha1'): [mardor.mozilla.release1_sha1, mardor.mozilla.release2_sha1],
('release', 'sha384'): [mardor.mozilla.release1_sha384, mardor.mozilla.release2_sha384],
('nightly', 'sha1'): [mardor.mozilla.nightly1_sha1, mardor.mozilla.nightly2_sha1],
('nightly', 'sha384'): [mardor.mozilla.nightly1_sha384, mardor.mozilla.nightly2_sha384],
('dep', 'sha1'): [mardor.mozilla.dep1_sha1, mardor.mozilla.dep2_sha1],
('dep', 'sha384'): [mardor.mozilla.dep1_sha384, mardor.mozilla.dep2_sha384],
('autograph-stage', 'sha384'): [mardor.mozilla.autograph_stage_sha384],
}
keys = []
for keyfile in keyfiles:
if keyfile.startswith(':mozilla-'):
name = keyfile.split(':mozilla-')[1]
try:
keys.extend(builtin_keys[name, signature_type])
except KeyError:
raise ValueError('Invalid internal key name: {}'
.format(keyfile))
else:
key = open(keyfile, 'rb').read()
keys.append(key)
return keys
|
def get_keys(keyfiles, signature_type)
|
Get public keys for the given keyfiles.
Args:
keyfiles: List of filenames with public keys, or :mozilla- prefixed key
names
signature_type: one of 'sha1' or 'sha384'
Returns:
List of public keys as strings
| 2.124178
| 1.966576
| 1.080141
|
try:
with open(marfile, 'rb') as f:
with MarReader(f) as m:
# Check various parts of the mar file
# e.g. signature algorithms and additional block sections
errors = m.get_errors()
if errors:
print("File is not well formed: {}".format(errors))
sys.exit(1)
if keyfiles:
try:
keys = get_keys(keyfiles, m.signature_type)
except ValueError as e:
print(e)
sys.exit(1)
if any(m.verify(key) for key in keys):
print("Verification OK")
return True
else:
print("Verification failed")
sys.exit(1)
else:
print("Verification OK")
return True
except Exception as e:
print("Error opening or parsing file: {}".format(e))
sys.exit(1)
|
def do_verify(marfile, keyfiles=None)
|
Verify the MAR file.
| 3.226481
| 3.223907
| 1.000798
|
with open(marfile, 'rb') as f:
with MarReader(f) as m:
if detailed:
if m.compression_type:
yield "Compression type: {}".format(m.compression_type)
if m.signature_type:
yield "Signature type: {}".format(m.signature_type)
if m.mardata.signatures:
plural = "s" if (m.mardata.signatures.count == 0 or m.mardata.signatures.count > 1) else ""
yield "Signature block found with {} signature{}".format(m.mardata.signatures.count, plural)
for s in m.mardata.signatures.sigs:
yield "- Signature {} size {}".format(s.algorithm_id, s.size)
yield ""
if m.mardata.additional:
yield "{} additional block found:".format(len(m.mardata.additional.sections))
for s in m.mardata.additional.sections:
if s.id == 1:
yield (" - Product Information Block:")
yield (" - MAR channel name: {}".format(s.channel))
yield (" - Product version: {}".format(s.productversion))
yield ""
else:
yield ("Unknown additional data")
yield ("{:7s} {:7s} {:7s}".format("SIZE", "MODE", "NAME"))
for e in m.mardata.index.entries:
yield ("{:<7d} {:04o} {}".format(e.size, e.flags, e.name))
|
def do_list(marfile, detailed=False)
|
List the MAR file.
Yields lines of text to output
| 3.567131
| 3.558783
| 1.002346
|
with open(marfile, 'w+b') as f:
with MarWriter(f, productversion=productversion, channel=channel,
signing_key=signing_key,
signing_algorithm=signing_algorithm,
) as m:
for f in files:
m.add(f, compress=compress)
|
def do_create(marfile, files, compress, productversion=None, channel=None,
signing_key=None, signing_algorithm=None)
|
Create a new MAR file.
| 2.507268
| 2.379269
| 1.053798
|
# Add a dummy signature into a temporary file
dst = tempfile.TemporaryFile()
with open(marfile, 'rb') as f:
add_signature_block(f, dst, hash_algo)
dst.seek(0)
with MarReader(dst) as m:
hashes = m.calculate_hashes()
h = hashes[0][1]
if asn1:
h = format_hash(h, hash_algo)
print(base64.b64encode(h).decode('ascii'))
|
def do_hash(hash_algo, marfile, asn1=False)
|
Output the hash for this MAR file.
| 4.391128
| 4.400266
| 0.997923
|
signature = open(signature_file, 'rb').read()
if len(signature) == 256:
hash_algo = 'sha1'
elif len(signature) == 512:
hash_algo = 'sha384'
else:
raise ValueError()
with open(output_file, 'w+b') as dst:
with open(input_file, 'rb') as src:
add_signature_block(src, dst, hash_algo, signature)
|
def do_add_signature(input_file, output_file, signature_file)
|
Add a signature to the MAR file.
| 2.49066
| 2.50306
| 0.995046
|
# Make sure only one action has been specified
if len([a for a in [args.create, args.extract, args.verify, args.list,
args.list_detailed, args.hash, args.add_signature] if a
is not None]) != 1:
parser.error("Must specify something to do (one of -c, -x, -t, -T, -v, --hash, --add-signature)")
if args.create and not args.files:
parser.error("Must specify at least one file to add to marfile")
if args.extract and args.compression not in (None, 'bz2', 'xz', 'auto'): # pragma: nocover
parser.error('Unsupported compression type')
if args.create and args.compression not in (None, 'bz2', 'xz'): # pragma: nocover
parser.error('Unsupported compression type')
if args.hash and len(args.files) != 1:
parser.error("Must specify a file to output the hash for")
|
def check_args(parser, args)
|
Validate commandline arguments.
| 3.813215
| 3.763327
| 1.013256
|
if args.keyfiles:
signing_key = open(args.keyfiles[0], 'rb').read()
bits = get_keysize(signing_key)
if bits == 2048:
signing_algorithm = 'sha1'
elif bits == 4096:
signing_algorithm = 'sha384'
else:
parser.error("Unsupported key size {} from key {}".format(bits, args.keyfiles[0]))
print("Using {} to sign using algorithm {!s}".format(args.keyfiles[0], signing_algorithm))
else:
signing_key = None
signing_algorithm = None
return signing_key, signing_algorithm
|
def get_key_from_cmdline(parser, args)
|
Return the signing key and signing algoritm from the commandline.
| 2.980557
| 2.620959
| 1.137201
|
parser = build_argparser()
args = parser.parse_args(argv)
logging.basicConfig(level=args.loglevel, format="%(message)s")
check_args(parser, args)
if args.extract:
marfile = os.path.abspath(args.extract)
if args.chdir:
os.chdir(args.chdir)
do_extract(marfile, os.getcwd(), args.compression)
elif args.verify:
do_verify(args.verify, args.keyfiles)
elif args.list:
print("\n".join(do_list(args.list)))
elif args.list_detailed:
print("\n".join(do_list(args.list_detailed, detailed=True)))
elif args.create:
marfile = os.path.abspath(args.create)
signing_key, signing_algorithm = get_key_from_cmdline(parser, args)
if args.chdir:
os.chdir(args.chdir)
do_create(marfile, args.files, args.compression,
productversion=args.productversion, channel=args.channel,
signing_key=signing_key, signing_algorithm=signing_algorithm)
elif args.hash:
do_hash(args.hash, args.files[0], args.asn1)
elif args.add_signature:
do_add_signature(args.add_signature[0], args.add_signature[1], args.add_signature[2])
# sanity check; should never happen
else: # pragma: no cover
parser.error("Unsupported action")
|
def main(argv=None)
|
Run the main CLI entry point.
| 2.920267
| 2.893062
| 1.009404
|
best_compression = None
for e in self.mardata.index.entries:
self.fileobj.seek(e.offset)
magic = self.fileobj.read(10)
compression = guess_compression(magic)
if compression == 'xz':
best_compression = 'xz'
break
elif compression == 'bz2' and best_compression is None:
best_compression = 'bz2'
return best_compression
|
def compression_type(self)
|
Return the latest compresion type used in this MAR.
Returns:
One of None, 'bz2', or 'xz'
| 4.306187
| 3.496471
| 1.231581
|
if not self.mardata.signatures:
return None
for sig in self.mardata.signatures.sigs:
if sig.algorithm_id == 1:
return 'sha1'
elif sig.algorithm_id == 2:
return 'sha384'
else:
return 'unknown'
|
def signature_type(self)
|
Return the signature type used in this MAR.
Returns:
One of None, 'unknown', 'sha1', or 'sha384'
| 4.799288
| 3.060276
| 1.568253
|
self.fileobj.seek(e.offset)
stream = file_iter(self.fileobj)
stream = takeexactly(stream, e.size)
if decompress == 'auto':
stream = auto_decompress_stream(stream)
elif decompress == 'bz2':
stream = bz2_decompress_stream(stream)
elif decompress == 'xz':
stream = xz_decompress_stream(stream)
elif decompress is None:
pass
else:
raise ValueError("Unsupported decompression type: {}".format(decompress))
for block in stream:
yield block
|
def extract_entry(self, e, decompress='auto')
|
Yield blocks of data for this entry from this MAR file.
Args:
e (:obj:`mardor.format.index_entry`): An index_entry object that
refers to this file's size and offset inside the MAR file.
path (str): Where on disk to extract this file to.
decompress (str, optional): Controls whether files are decompressed
when extracted. Must be one of None, 'auto', 'bz2', or 'xz'.
Defaults to 'auto'
Yields:
Blocks of data for `e`
| 2.69769
| 2.49558
| 1.080987
|
for e in self.mardata.index.entries:
name = e.name
entry_path = safejoin(destdir, name)
entry_dir = os.path.dirname(entry_path)
mkdir(entry_dir)
with open(entry_path, 'wb') as f:
write_to_file(self.extract_entry(e, decompress), f)
os.chmod(entry_path, e.flags)
|
def extract(self, destdir, decompress='auto')
|
Extract the entire MAR file into a directory.
Args:
destdir (str): A local directory on disk into which the contents of
this MAR file will be extracted. Required parent directories
will be created as necessary.
decompress (obj, optional): Controls whether files are decompressed
when extracted. Must be one of 'auto' or None. Defaults to
'auto'.
| 4.287914
| 4.014601
| 1.06808
|
errors = []
errors.extend(self._get_signature_errors())
errors.extend(self._get_additional_errors())
errors.extend(self._get_entry_errors())
return errors if errors else None
|
def get_errors(self)
|
Verify that this MAR file is well formed.
Returns:
A list of strings describing errors in the MAR file
None if this MAR file appears well formed.
| 4.034678
| 3.731109
| 1.081361
|
if not self.mardata.signatures or not self.mardata.signatures.sigs:
# This MAR file can't be verified since it has no signatures
return False
hashers = []
for sig in self.mardata.signatures.sigs:
hashers.append((sig.algorithm_id, sig.signature, make_hasher(sig.algorithm_id)))
assert len(hashers) == len(self.mardata.signatures.sigs)
for block in get_signature_data(self.fileobj,
self.mardata.signatures.filesize):
[h.update(block) for (_, _, h) in hashers]
for algo_id, sig, h in hashers:
if not verify_signature(verify_key, sig, h.finalize(), h.algorithm.name):
return False
else:
return True
|
def verify(self, verify_key)
|
Verify that this MAR file has a valid signature.
Args:
verify_key (str): PEM formatted public key
Returns:
True if the MAR file's signature matches its contents
False otherwise; this includes cases where there is no signature.
| 4.605126
| 4.047309
| 1.137824
|
if not self.mardata.additional:
return None
for s in self.mardata.additional.sections:
if s.id == 1:
return str(s.productversion), str(s.channel)
return None
|
def productinfo(self)
|
Return the productversion and channel of this MAR if present.
| 8.376338
| 4.168648
| 2.009366
|
hashers = []
if not self.mardata.signatures:
return []
for s in self.mardata.signatures.sigs:
h = make_hasher(s.algorithm_id)
hashers.append((s.algorithm_id, h))
for block in get_signature_data(self.fileobj, self.mardata.signatures.filesize):
[h.update(block) for (_, h) in hashers]
return [(algo_id, h.finalize()) for (algo_id, h) in hashers]
|
def calculate_hashes(self)
|
Return hashes of the contents of this MAR file.
The hashes depend on the algorithms defined in the MAR file's signature block.
Returns:
A list of (algorithm_id, hash) tuples
| 4.563031
| 3.99665
| 1.141714
|
if not ctx.index.entries:
return False
return ctx.data_offset > 8 and ctx.data_offset > (ctx.signatures.offset_end + 8)
|
def _has_extras(ctx)
|
Determine if a MAR file has an additional section block or not.
It does this by looking at where file data starts in the file. If this
starts immediately after the signature data, then no additional sections are present.
Args:
ctx (context): construct parsing context
Returns:
True if the MAR file has an additional section block
False otherwise
| 14.240174
| 12.151467
| 1.171889
|
try:
key = serialization.load_pem_public_key(
keydata,
backend=default_backend(),
)
return key
except ValueError:
key = serialization.load_pem_private_key(
keydata,
password=None,
backend=default_backend(),
)
key = key.public_key()
return key
|
def get_publickey(keydata)
|
Load the public key from a PEM encoded string.
| 1.779885
| 1.790899
| 0.99385
|
key = serialization.load_pem_private_key(
keydata,
password=None,
backend=default_backend(),
)
return key
|
def get_privatekey(keydata)
|
Load the private key from a PEM encoded string.
| 2.155084
| 2.099024
| 1.026707
|
# Read everything except the signature entries
# The first 8 bytes are covered, as is everything from the beginning
# of the additional section to the end of the file. The signature
# algorithm id and size fields are also covered.
fileobj.seek(0)
marfile = mar.parse_stream(fileobj)
if not marfile.signatures:
raise IOError("Can't generate signature data for file without signature blocks")
# MAR header
fileobj.seek(0)
block = fileobj.read(8)
yield block
# Signatures header
sigs = sigs_header.parse_stream(fileobj)
sig_types = [(sig.algorithm_id, sig.size) for sig in sigs.sigs]
block = Int64ub.build(filesize) + Int32ub.build(sigs.count)
yield block
# Signature algorithm id and size per entry
for algorithm_id, size in sig_types:
block = Int32ub.build(algorithm_id) + Int32ub.build(size)
yield block
# Everything else in the file is covered
for block in file_iter(fileobj):
yield block
|
def get_signature_data(fileobj, filesize)
|
Read data from MAR file that is required for MAR signatures.
Args:
fileboj (file-like object): file-like object to read the MAR data from
filesize (int): the total size of the file
Yields:
blocks of bytes representing the data required to generate or validate
signatures.
| 5.436155
| 5.286222
| 1.028363
|
if algorithm_id == 1:
return hashes.Hash(hashes.SHA1(), default_backend())
elif algorithm_id == 2:
return hashes.Hash(hashes.SHA384(), default_backend())
else:
raise ValueError("Unsupported signing algorithm: %s" % algorithm_id)
|
def make_hasher(algorithm_id)
|
Create a hashing object for the given signing algorithm.
| 2.65003
| 2.312342
| 1.146037
|
hash_algo = _hash_algorithms[hash_algo]
return get_privatekey(private_key).sign(
hash,
padding.PKCS1v15(),
utils.Prehashed(hash_algo),
)
|
def sign_hash(private_key, hash, hash_algo)
|
Sign the given hash with the given private key.
Args:
private_key (str): PEM enoded private key
hash (byte str): hash to sign
hash_algo (str): name of hash algorithm used
Returns:
byte string representing the signature
| 3.627709
| 4.946692
| 0.733361
|
hash_algo = _hash_algorithms[hash_algo]
try:
return get_publickey(public_key).verify(
signature,
hash,
padding.PKCS1v15(),
utils.Prehashed(hash_algo),
) is None
except InvalidSignature:
return False
|
def verify_signature(public_key, signature, hash, hash_algo)
|
Verify the given signature is correct for the given hash and public key.
Args:
public_key (str): PEM encoded public key
signature (bytes): signature to verify
hash (bytes): hash of data
hash_algo (str): hash algorithm used
Returns:
True if the signature is valid, False otherwise
| 3.556221
| 4.541041
| 0.783129
|
private_key = rsa.generate_private_key(
public_exponent=65537,
key_size=bits,
backend=default_backend(),
)
private_pem = private_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption(),
)
public_pem = private_key.public_key().public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo,
)
return private_pem, public_pem
|
def make_rsa_keypair(bits)
|
Generate an RSA keypair.
Args:
bits (int): number of bits to use for the key.
Returns:
(private_key, public_key) - both as PEM encoded strings
| 1.256823
| 1.409154
| 0.891899
|
try:
os.makedirs(path)
# sanity check
if not os.path.isdir(path): # pragma: no cover
raise IOError('path is not a directory')
except OSError as e:
# EEXIST
if e.errno == 17 and os.path.isdir(path):
return
raise
|
def mkdir(path)
|
Make a directory and its parents.
Args:
path (str): path to create
Returns:
None
Raises:
OSError if the directory cannot be created.
| 3.094017
| 3.891474
| 0.795076
|
total = 0
for block in iterable:
n = min(len(block), size - total)
block = block[:n]
if block:
yield block
total += len(block)
if total >= size:
break
if total < size:
raise ValueError('not enough data (yielded {} of {})')
# sanity check; this should never happen
if total != size: # pragma: no cover
raise ValueError('yielded too much data')
|
def takeexactly(iterable, size)
|
Yield blocks from `iterable` until exactly len(size) have been returned.
Args:
iterable (iterable): Any iterable that yields sliceable objects that
have length.
size (int): How much data to consume
Yields:
blocks from `iterable` such that
sum(len(block) for block in takeexactly(iterable, size)) == size
Raises:
ValueError if there is less than `size` data in `iterable`
| 3.966318
| 4.068585
| 0.974864
|
n = 0
for block in src:
dst.write(block)
n += len(block)
return n
|
def write_to_file(src, dst)
|
Write data from `src` into `dst`.
Args:
src (iterable): iterable that yields blocks of data to write
dst (file-like object): file-like object that must support
.write(block)
Returns:
number of bytes written to `dst`
| 3.845604
| 3.403107
| 1.130027
|
compressor = bz2.BZ2Compressor(level)
for block in src:
encoded = compressor.compress(block)
if encoded:
yield encoded
yield compressor.flush()
|
def bz2_compress_stream(src, level=9)
|
Compress data from `src`.
Args:
src (iterable): iterable that yields blocks of data to compress
level (int): compression level (1-9) default is 9
Yields:
blocks of compressed data
| 2.74161
| 3.564325
| 0.769181
|
dec = bz2.BZ2Decompressor()
for block in src:
decoded = dec.decompress(block)
if decoded:
yield decoded
|
def bz2_decompress_stream(src)
|
Decompress data from `src`.
Args:
src (iterable): iterable that yields blocks of compressed data
Yields:
blocks of uncompressed data
| 2.987422
| 3.614798
| 0.826442
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.