_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q7100
create
train
def create(ctx, to, amount, symbol, secret, hash, account, expiration): """ Create an HTLC contract """ ctx.blockchain.blocking = True tx = ctx.blockchain.htlc_create( Amount(amount, symbol), to, secret, hash_type=hash, expiration=expiration, account=account, ) tx.pop("trx", None) print_tx(tx) results = tx.get("operation_results", {}) if results: htlc_id = results[0][1] print("Your htlc_id is: {}".format(htlc_id))
python
{ "resource": "" }
q7101
redeem
train
def redeem(ctx, htlc_id, secret, account): """ Redeem an HTLC contract """ print_tx(ctx.blockchain.htlc_redeem(htlc_id, secret, account=account))
python
{ "resource": "" }
q7102
unique
train
def unique(flags_class): """ A decorator for flags classes to forbid flag aliases. """ if not is_flags_class_final(flags_class): raise TypeError('unique check can be applied only to flags classes that have members') if not flags_class.__member_aliases__: return flags_class aliases = ', '.join('%s -> %s' % (alias, name) for alias, name in flags_class.__member_aliases__.items()) raise ValueError('duplicate values found in %r: %s' % (flags_class, aliases))
python
{ "resource": "" }
q7103
unique_bits
train
def unique_bits(flags_class): """ A decorator for flags classes to forbid declaring flags with overlapping bits. """ flags_class = unique(flags_class) other_bits = 0 for name, member in flags_class.__members_without_aliases__.items(): bits = int(member) if other_bits & bits: for other_name, other_member in flags_class.__members_without_aliases__.items(): if int(other_member) & bits: raise ValueError("%r: '%s' and '%s' have overlapping bits" % (flags_class, other_name, name)) else: other_bits |= bits
python
{ "resource": "" }
q7104
newfeed
train
def newfeed(ctx, symbol, price, market, cer, mssr, mcr, account): """ Publish a price feed! Examples: \b uptick newfeed USD 0.01 USD/BTS uptick newfeed USD 100 BTS/USD Core Exchange Rate (CER) \b If no CER is provided, the cer will be the same as the settlement price with a 5% premium (Only if the 'market' is against the core asset (e.g. BTS)). The CER is always defined against the core asset (BTS). This means that if the backing asset is not the core asset (BTS), then you must specify your own cer as a float. The float `x` will be interpreted as `x BTS/SYMBOL`. """ if cer: cer = Price(cer, quote=symbol, base="1.3.0", bitshares_instance=ctx.bitshares) print_tx( ctx.bitshares.publish_price_feed( symbol, Price(price, market), cer=cer, mssr=mssr, mcr=mcr, account=account ) )
python
{ "resource": "" }
q7105
createcommittee
train
def createcommittee(ctx, url, account): """ Setup a committee account for your account """ print_tx(ctx.bitshares.create_committee_member(url, account=account))
python
{ "resource": "" }
q7106
configuration
train
def configuration(ctx): """ Show configuration variables """ t = [["Key", "Value"]] for key in ctx.bitshares.config: t.append([key, ctx.bitshares.config[key]]) print_table(t)
python
{ "resource": "" }
q7107
sign
train
def sign(ctx, filename): """ Sign a json-formatted transaction """ if filename: tx = filename.read() else: tx = sys.stdin.read() tx = TransactionBuilder(eval(tx), bitshares_instance=ctx.bitshares) tx.appendMissingSignatures() tx.sign() print_tx(tx.json())
python
{ "resource": "" }
q7108
witnesses
train
def witnesses(ctx): """ List witnesses and relevant information """ t = [ [ "weight", "account", "signing_key", "vote_id", "url", "total_missed", "last_confirmed_block_num", ] ] for witness in sorted(Witnesses(), key=lambda x: x.weight, reverse=True): witness.refresh() t.append( [ "{:.2f}%".format(witness.weight * 100), witness.account["name"], witness["signing_key"], witness["vote_id"], witness["url"], witness["total_missed"], witness["last_confirmed_block_num"], ] ) print_table(t)
python
{ "resource": "" }
q7109
claim
train
def claim(ctx, vestingid, account, amount): """ Claim funds from the vesting balance """ vesting = Vesting(vestingid) if amount: amount = Amount(float(amount), "BTS") else: amount = vesting.claimable print_tx( ctx.bitshares.vesting_balance_withdraw( vesting["id"], amount=amount, account=vesting["owner"] ) )
python
{ "resource": "" }
q7110
offline
train
def offline(f): """ This decorator allows you to access ``ctx.bitshares`` which is an instance of BitShares with ``offline=True``. """ @click.pass_context @verbose def new_func(ctx, *args, **kwargs): ctx.obj["offline"] = True ctx.bitshares = BitShares(**ctx.obj) ctx.blockchain = ctx.bitshares ctx.bitshares.set_shared_instance() return ctx.invoke(f, *args, **kwargs) return update_wrapper(new_func, f)
python
{ "resource": "" }
q7111
verify
train
def verify(ctx, file, account): """ Verify a signed message """ if not file: print_message("Prompting for message. Terminate with CTRL-D", "info") file = click.get_text_stream("stdin") m = Message(file.read(), bitshares_instance=ctx.bitshares) try: if m.verify(): print_message("Verified", "success") else: print_message("not verified", "error") except InvalidMessageSignature: print_message("Signature INVALID!", "error")
python
{ "resource": "" }
q7112
cloneaccount
train
def cloneaccount(ctx, account_name, account): """ Clone an account This copies the owner and active permissions as well as the options (e.g. votes, memo key) """ from bitsharesbase import transactions, operations account = Account(account) op = { "fee": {"amount": 0, "asset_id": "1.3.0"}, "registrar": account["id"], "referrer": account["id"], "referrer_percent": 100, "name": account_name, "owner": account["owner"], "active": account["active"], "options": account["options"], "extensions": {}, "prefix": ctx.bitshares.rpc.chain_params["prefix"], } op = operations.Account_create(**op) print_tx(ctx.bitshares.finalizeOp(op, account, "active"))
python
{ "resource": "" }
q7113
whitelist
train
def whitelist(ctx, whitelist_account, account): """ Add an account to a whitelist """ account = Account(account, blockchain_instance=ctx.blockchain) print_tx(account.whitelist(whitelist_account))
python
{ "resource": "" }
q7114
blacklist
train
def blacklist(ctx, blacklist_account, account): """ Add an account to a blacklist """ account = Account(account, blockchain_instance=ctx.blockchain) print_tx(account.blacklist(blacklist_account))
python
{ "resource": "" }
q7115
unlist
train
def unlist(ctx, unlist_account, account): """ Remove an account from any list """ account = Account(account, blockchain_instance=ctx.blockchain) print_tx(account.nolist(unlist_account))
python
{ "resource": "" }
q7116
setproxy
train
def setproxy(ctx, proxy_account, account): """ Set the proxy account for an account """ print_tx(ctx.bitshares.set_proxy(proxy_account, account=account))
python
{ "resource": "" }
q7117
settlements
train
def settlements(ctx, asset, limit): """ Show pending settlement orders of a bitasset """ from bitshares.asset import Asset asset = Asset(asset, full=True) if not asset.is_bitasset: print_message("{} is not a bitasset.".format(asset["symbol"]), "warning") sys.exit(1) calls = asset.get_settle_orders(limit) t = [["acount", "amount", "date"]] for call in calls: t.append([str(call["account"]["name"]), str(call["amount"]), str(call["date"])]) print_table(t)
python
{ "resource": "" }
q7118
addkey
train
def addkey(ctx, key): """ Add a private key to the wallet """ if not key: while True: key = click.prompt( "Private Key (wif) [Enter to quit]", hide_input=True, show_default=False, default="exit", ) if not key or key == "exit": break try: ctx.bitshares.wallet.addPrivateKey(key) except Exception as e: click.echo(str(e)) continue else: for k in key: try: ctx.bitshares.wallet.addPrivateKey(k) except Exception as e: click.echo(str(e)) installedKeys = ctx.bitshares.wallet.getPublicKeys() if len(installedKeys) == 1: name = ctx.bitshares.wallet.getAccountFromPublicKey(installedKeys[0]) if name: # only if a name to the key was found account = Account(name, bitshares_instance=ctx.bitshares) click.echo("=" * 30) click.echo("Setting new default user: %s" % account["name"]) click.echo() click.echo("You can change these settings with:") click.echo(" uptick set default_account <account>") click.echo("=" * 30) config["default_account"] = account["name"]
python
{ "resource": "" }
q7119
delkey
train
def delkey(ctx, pubkeys): """ Delete a private key from the wallet """ if not pubkeys: pubkeys = click.prompt("Public Keys").split(" ") if click.confirm( "Are you sure you want to delete keys from your wallet?\n" "This step is IRREVERSIBLE! If you don't have a backup, " "You may lose access to your account!" ): for pub in pubkeys: ctx.bitshares.wallet.removePrivateKeyFromPublicKey(pub)
python
{ "resource": "" }
q7120
create
train
def create(ctx): """ Create default config file """ import shutil this_dir, this_filename = os.path.split(__file__) default_config_file = os.path.join(this_dir, "apis/example-config.yaml") config_file = ctx.obj["configfile"] shutil.copyfile(default_config_file, config_file) print_message("Config file created: {}".format(config_file))
python
{ "resource": "" }
q7121
start
train
def start(ctx): """ Start the API according to the config file """ module = ctx.config.get("api", "poloniex") # unlockWallet if module == "poloniex": from .apis import poloniex poloniex.run(ctx, port=5000) else: print_message("Unkown 'api'!", "error")
python
{ "resource": "" }
q7122
crawl
train
def crawl(url, callback, **kwargs): """Crawls an URL with given callback. Parameters ---------- url : str An URL to crawl. callback : callable A function to be used as spider callback for the given URL. spider_cls : scrapy.Spider (default: DefaultSpider) A spider class to be used in the crawler instance. capture_items : bool (default: True) If enabled, the scraped items are captured and returned. return_crawler : bool (default: False) If enabled, the crawler instance is returned. If ``capture_items`` is enabled, the scraped items is collected in ``crawler.items``. settings : dict, optional Custom crawler settings. timeout : int, (default: DEFAULT_TIMEOUT) Result wait timeout. Returns ------- out By default, the scraped items. If ``return_crawler`` is ``True``, returns the crawler instance. Raises ------ crochet.TimeoutError """ timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT) return wait_for(timeout, _crawl_in_reactor, url, callback, **kwargs)
python
{ "resource": "" }
q7123
_crawl_in_reactor
train
def _crawl_in_reactor(url, callback, spider_cls=DefaultSpider, **kwargs): """Crawls given URL with given callback. Parameters ---------- url : str The URL to crawl. callback : callable Function to be used as callback for the request. spider_cls : scrapy.Spider (default: DefaultSpider) A spider class to be used in the crawler instance. kwargs : dict, optional Extra arguments to be passed to ``_run_spider_in_reactor``. Returns ------- crochet.EventualResult """ spider_cls = override_start_requests(spider_cls, [url], callback) return _run_spider_in_reactor(spider_cls, **kwargs)
python
{ "resource": "" }
q7124
_run_spider_in_reactor
train
def _run_spider_in_reactor(spider_cls, capture_items=True, return_crawler=False, settings=None, **kwargs): """Runs given spider inside the twisted reactdor. Parameters ---------- spider_cls : scrapy.Spider Spider to run. capture_items : bool (default: True) If enabled, the scraped items are captured and returned. return_crawler : bool (default: False) If enabled, the crawler instance is returned. If ``capture_items`` is enabled, the scraped items is collected in ``crawler.items``. settings : dict, optional Custom crawler settings. Returns ------- out : crochet.EventualResult If ``capture_items`` is ``True``, returns scraped items. If ``return_crawler`` is ``True``, returns the crawler instance. """ settings = settings or {} crawler_settings = get_project_settings().copy() crawler_settings.setdict(default_settings) crawler_settings.setdict(settings) log_scrapy_info(crawler_settings) crawler = Crawler(spider_cls, crawler_settings) d = crawler.crawl(**kwargs) if capture_items: crawler.items = _OutputItems() crawler.signals.connect(crawler.items.append, signal=signals.item_scraped) d.addCallback(lambda _: crawler.items) if return_crawler: d.addCallback(lambda _: crawler) return d
python
{ "resource": "" }
q7125
override_start_requests
train
def override_start_requests(spider_cls, start_urls, callback=None, **attrs): """Returns a new spider class overriding the ``start_requests``. This function is useful to replace the start requests of an existing spider class on runtime. Parameters ---------- spider_cls : scrapy.Spider Spider class to be used as base class. start_urls : iterable Iterable of URLs or ``Request`` objects. callback : callable, optional Callback for the start URLs. attrs : dict, optional Additional class attributes. Returns ------- out : class A subclass of ``spider_cls`` with overrided ``start_requests`` method. """ def start_requests(): for url in start_urls: req = Request(url, dont_filter=True) if isinstance(url, six.string_types) else url if callback is not None: req.callback = callback yield req attrs['start_requests'] = staticmethod(start_requests) return type(spider_cls.__name__, (spider_cls, ), attrs)
python
{ "resource": "" }
q7126
wait_for
train
def wait_for(timeout, func, *args, **kwargs): """Waits for a eventual result. Parameters ---------- timeout : int How much time to wait, in seconds. func : callable A function that returns ``crochet.EventualResult``. args : tuple, optional Arguments for ``func``. kwargs : dict, optional Keyword arguments for ``func``. Returns ------- out Given ``func`` result. Raises ------ corchet.TimeoutError """ result = func(*args, **kwargs) try: return result.wait(timeout) except crochet.TimeoutError: result.cancel() raise
python
{ "resource": "" }
q7127
highlight
train
def highlight(code, lexer='html', formatter='html', output_wrapper=None): """Highlights given code using pygments.""" if not pygments: raise TypeError("pygments module required") if not isinstance(code, six.string_types): code = pprint.pformat(code) if isinstance(lexer, six.string_types): lexer = pygments.lexers.get_lexer_by_name(lexer) if isinstance(formatter, six.string_types): formatter = pygments.formatters.get_formatter_by_name(formatter) if formatter.name.lower() == 'html': formatter.full = True formatter.cssclass = "pygments-%s" % uuid.uuid4() if output_wrapper is None: output_wrapper = HTML return output_wrapper(pygments.highlight(code, lexer, formatter))
python
{ "resource": "" }
q7128
Netnode._get_next_slot
train
def _get_next_slot(self, tag): ''' get the first unused supval table key, or 0 if the table is empty. useful for filling the supval table sequentially. ''' slot = self._n.suplast(tag) if slot is None or slot == idaapi.BADNODE: return 0 else: return slot + 1
python
{ "resource": "" }
q7129
GitRunner.run_git
train
def run_git(self, args, git_env=None): ''' Runs the git executable with the arguments given and returns a list of lines produced on its standard output. ''' popen_kwargs = { 'stdout': subprocess.PIPE, 'stderr': subprocess.PIPE, } if git_env: popen_kwargs['env'] = git_env if self._git_toplevel: popen_kwargs['cwd'] = self._git_toplevel git_process = subprocess.Popen( [GitRunner._git_executable] + args, **popen_kwargs ) try: out, err = git_process.communicate() git_process.wait() except Exception as e: raise GitError("Couldn't run 'git {args}':{newline}{ex}".format( args=' '.join(args), newline=os.linesep, ex=str(e) )) if (0 != git_process.returncode) or err: if err: err = err.decode('utf_8') raise GitError("'git {args}' failed with:{newline}{err}".format( args=' '.join(args), newline=os.linesep, err=err )) if not out: raise ValueError("No output") return out.decode('utf_8').splitlines()
python
{ "resource": "" }
q7130
add_signature_block
train
def add_signature_block(src_fileobj, dest_fileobj, signing_algorithm, signature=None): """Add a signature block to marfile, a MarReader object. Productversion and channel are preserved, but any existing signatures are overwritten. Args: src_fileobj (file object): The input MAR file to add a signature to dest_fileobj (file object): File object to write new MAR file to. Must be open in w+b mode. signing_algorithm (str): One of 'sha1', or 'sha384' signature (bytes): Signature to write, or None to use a dummy signature """ algo_id = {'sha1': 1, 'sha384': 2}[signing_algorithm] if not signature: signature = make_dummy_signature(algo_id) src_fileobj.seek(0) mardata = mar.parse_stream(src_fileobj) # Header header = mardata.header dest_fileobj.write(mar_header.build(header)) # Signature block sig = dict(algorithm_id=algo_id, size=len(signature), signature=signature, ) # This will be fixed up later filesize = 0 sigs_offset = dest_fileobj.tell() sigs = sigs_header.build(dict( filesize=filesize, count=1, sigs=[sig], )) dest_fileobj.write(sigs) # Write the additional section dest_fileobj.write(extras_header.build(mardata.additional)) # Write the data data_offset = dest_fileobj.tell() src_fileobj.seek(mardata.data_offset) write_to_file(takeexactly(src_fileobj, mardata.data_length), dest_fileobj) # Write the index index_offset = dest_fileobj.tell() index = mardata.index # Adjust the offsets data_offset_delta = data_offset - mardata.data_offset for e in index.entries: e.offset += data_offset_delta dest_fileobj.write(index_header.build(index)) filesize = dest_fileobj.tell() # Go back and update the index offset and filesize dest_fileobj.seek(0) header.index_offset = index_offset dest_fileobj.write(mar_header.build(header)) dest_fileobj.seek(sigs_offset) sigs = sigs_header.build(dict( filesize=filesize, count=1, sigs=[sig], )) dest_fileobj.write(sigs)
python
{ "resource": "" }
q7131
MarWriter.add
train
def add(self, path, compress=None): """Add `path` to the MAR file. If `path` is a file, it will be added directly. If `path` is a directory, it will be traversed recursively and all files inside will be added. Args: path (str): path to file or directory on disk to add to this MAR file compress (str): One of 'xz', 'bz2', or None. Defaults to None. """ if os.path.isdir(path): self.add_dir(path, compress) else: self.add_file(path, compress)
python
{ "resource": "" }
q7132
MarWriter.add_dir
train
def add_dir(self, path, compress): """Add all files under directory `path` to the MAR file. Args: path (str): path to directory to add to this MAR file compress (str): One of 'xz', 'bz2', or None. Defaults to None. """ if not os.path.isdir(path): raise ValueError('{} is not a directory'.format(path)) for root, dirs, files in os.walk(path): for f in files: self.add_file(os.path.join(root, f), compress)
python
{ "resource": "" }
q7133
MarWriter.add_fileobj
train
def add_fileobj(self, fileobj, path, compress, flags=None): """Add the contents of a file object to the MAR file. Args: fileobj (file-like object): open file object path (str): name of this file in the MAR file compress (str): One of 'xz', 'bz2', or None. Defaults to None. flags (int): permission of this file in the MAR file. Defaults to the permissions of `path` """ f = file_iter(fileobj) flags = flags or os.stat(path) & 0o777 return self.add_stream(f, path, compress, flags)
python
{ "resource": "" }
q7134
MarWriter.add_stream
train
def add_stream(self, stream, path, compress, flags): """Add the contents of an iterable to the MAR file. Args: stream (iterable): yields blocks of data path (str): name of this file in the MAR file compress (str): One of 'xz', 'bz2', or None. Defaults to None. flags (int): permission of this file in the MAR file """ self.data_fileobj.seek(self.last_offset) if compress == 'bz2': stream = bz2_compress_stream(stream) elif compress == 'xz': stream = xz_compress_stream(stream) elif compress is None: pass else: raise ValueError('Unsupported compression type: {}'.format(compress)) size = write_to_file(stream, self.data_fileobj) # On Windows, convert \ to / # very difficult to mock this out for coverage on linux if os.sep == '\\': # pragma: no cover path = path.replace('\\', '/') e = dict( name=six.u(path), offset=self.last_offset, size=size, flags=flags, ) self.entries.append(e) self.last_offset += e['size']
python
{ "resource": "" }
q7135
MarWriter.add_file
train
def add_file(self, path, compress): """Add a single file to the MAR file. Args: path (str): path to a file to add to this MAR file. compress (str): One of 'xz', 'bz2', or None. Defaults to None. """ if not os.path.isfile(path): raise ValueError('{} is not a file'.format(path)) self.fileobj.seek(self.last_offset) with open(path, 'rb') as f: flags = os.stat(path).st_mode & 0o777 self.add_fileobj(f, path, compress, flags)
python
{ "resource": "" }
q7136
MarWriter.write_header
train
def write_header(self): """Write the MAR header to the file. The MAR header includes the MAR magic bytes as well as the offset to where the index data can be found. """ self.fileobj.seek(0) header = mar_header.build(dict(index_offset=self.last_offset)) self.fileobj.write(header)
python
{ "resource": "" }
q7137
MarWriter.dummy_signatures
train
def dummy_signatures(self): """Create a dummy signature. This is used when initially writing the MAR header and we don't know what the final signature data will be. Returns: Fake signature data suitable for writing to the header with .write_signatures() """ if not self.signing_algorithm: return [] algo_id = {'sha1': 1, 'sha384': 2}[self.signing_algorithm] signature = make_dummy_signature(algo_id) return [(algo_id, signature)]
python
{ "resource": "" }
q7138
MarWriter.calculate_signatures
train
def calculate_signatures(self): """Calculate the signatures for this MAR file. Returns: A list of signature tuples: [(algorithm_id, signature_data), ...] """ if not self.signing_algorithm: return [] algo_id = {'sha1': 1, 'sha384': 2}[self.signing_algorithm] hashers = [(algo_id, make_hasher(algo_id))] for block in get_signature_data(self.fileobj, self.filesize): [h.update(block) for (_, h) in hashers] signatures = [(algo_id, sign_hash(self.signing_key, h.finalize(), h.algorithm.name)) for (algo_id, h) in hashers] return signatures
python
{ "resource": "" }
q7139
MarWriter.write_signatures
train
def write_signatures(self, signatures): """Write signature data to the MAR file. Args: signatures (list): list of signature tuples of the form (algorithm_id, signature_data) """ self.fileobj.seek(self.signature_offset) sig_entries = [dict(algorithm_id=id_, size=len(sig), signature=sig) for (id_, sig) in signatures] sigs = sigs_header.build(dict( filesize=self.filesize, count=len(signatures), sigs=sig_entries, )) self.fileobj.write(sigs) signatures_len = len(sigs) self.additional_offset = self.signature_offset + signatures_len # sanity check; this should never happen if not self.additional_offset == self.fileobj.tell(): # pragma: no cover raise IOError('ended up at unexpected offset')
python
{ "resource": "" }
q7140
MarWriter.write_additional
train
def write_additional(self, productversion, channel): """Write the additional information to the MAR header. Args: productversion (str): product and version string channel (str): channel string """ self.fileobj.seek(self.additional_offset) extras = extras_header.build(dict( count=1, sections=[dict( channel=six.u(channel), productversion=six.u(productversion), size=len(channel) + len(productversion) + 2 + 8, padding=b'', )], )) self.fileobj.write(extras) self.last_offset = self.fileobj.tell()
python
{ "resource": "" }
q7141
MarWriter.write_index
train
def write_index(self): """Write the index of all our files to the MAR file.""" self.fileobj.seek(self.last_offset) index = index_header.build(dict(entries=self.entries)) self.fileobj.write(index) self.filesize = self.fileobj.tell()
python
{ "resource": "" }
q7142
MarWriter.finish
train
def finish(self): """Finalize the MAR file. The MAR header, index and signatures need to be updated once we've finished adding all the files. """ # Update the last_offset in the mar header self.write_header() # Write out the index of contents self.write_index() if not self.use_old_format: # Refresh the signature sigs = self.calculate_signatures() self.write_signatures(sigs)
python
{ "resource": "" }
q7143
build_argparser
train
def build_argparser(): """Build argument parser for the CLI.""" parser = ArgumentParser('Utility for managing MAR files') create_group = parser.add_argument_group("Create a MAR file") create_group.add_argument("-c", "--create", metavar="MARFILE", help="create MAR") create_group.add_argument("-V", "--productversion", dest="productversion", help="product/version string") create_group.add_argument("-H", "--channel", dest="channel", help="channel this MAR file is applicable to") create_group.add_argument("files", nargs=REMAINDER, help="files to add to the MAR file") extract_group = parser.add_argument_group("Extract a MAR file") extract_group.add_argument("-x", "--extract", help="extract MAR", metavar="MARFILE") list_group = parser.add_argument_group("Print information on a MAR file") list_group.add_argument("-t", "--list", help="print out MAR contents", metavar="MARFILE") list_group.add_argument("-T", "--list-detailed", metavar="MARFILE", help="print out MAR contents including signatures") verify_group = parser.add_argument_group("Verify a MAR file") verify_group.add_argument("-v", "--verify", metavar="MARFILE", help="verify the marfile") parser.add_argument("-j", "--bzip2", action="store_const", dest="compression", const="bz2", help="compress/decompress members with BZ2") parser.add_argument("-J", "--xz", action="store_const", dest="compression", const="xz", help="compress/decompress archive with XZ") parser.add_argument("--auto", action="store_const", dest="compression", const="auto", help="automatically decompress contents") parser.add_argument("-k", "--keyfiles", dest="keyfiles", action='append', help="sign/verify with given key(s)") parser.add_argument("-C", "--chdir", dest="chdir", help="chdir to this directory before creating or " "extracing; location of marfile isn't affected by " "this option.") parser.add_argument("--verbose", dest="loglevel", action="store_const", const=logging.DEBUG, default=logging.WARN, help="increase logging verbosity") parser.add_argument('--version', action='version', version='mar version {}'.format(mardor.version_str)) signing_group = parser.add_argument_group('Sign a MAR file') signing_group.add_argument('--hash', help='output hash for signing', choices=('sha1', 'sha384')) signing_group.add_argument('--asn1', help='format hash as an ASN1 DigestInfo block', default=False, action='store_true') signing_group.add_argument('--add-signature', help='inject signature', nargs=3, metavar=('input', 'output', 'signature')) return parser
python
{ "resource": "" }
q7144
do_extract
train
def do_extract(marfile, destdir, decompress): """Extract the MAR file to the destdir.""" with open(marfile, 'rb') as f: with MarReader(f) as m: m.extract(str(destdir), decompress=decompress)
python
{ "resource": "" }
q7145
get_keys
train
def get_keys(keyfiles, signature_type): """Get public keys for the given keyfiles. Args: keyfiles: List of filenames with public keys, or :mozilla- prefixed key names signature_type: one of 'sha1' or 'sha384' Returns: List of public keys as strings """ builtin_keys = { ('release', 'sha1'): [mardor.mozilla.release1_sha1, mardor.mozilla.release2_sha1], ('release', 'sha384'): [mardor.mozilla.release1_sha384, mardor.mozilla.release2_sha384], ('nightly', 'sha1'): [mardor.mozilla.nightly1_sha1, mardor.mozilla.nightly2_sha1], ('nightly', 'sha384'): [mardor.mozilla.nightly1_sha384, mardor.mozilla.nightly2_sha384], ('dep', 'sha1'): [mardor.mozilla.dep1_sha1, mardor.mozilla.dep2_sha1], ('dep', 'sha384'): [mardor.mozilla.dep1_sha384, mardor.mozilla.dep2_sha384], ('autograph-stage', 'sha384'): [mardor.mozilla.autograph_stage_sha384], } keys = [] for keyfile in keyfiles: if keyfile.startswith(':mozilla-'): name = keyfile.split(':mozilla-')[1] try: keys.extend(builtin_keys[name, signature_type]) except KeyError: raise ValueError('Invalid internal key name: {}' .format(keyfile)) else: key = open(keyfile, 'rb').read() keys.append(key) return keys
python
{ "resource": "" }
q7146
do_verify
train
def do_verify(marfile, keyfiles=None): """Verify the MAR file.""" try: with open(marfile, 'rb') as f: with MarReader(f) as m: # Check various parts of the mar file # e.g. signature algorithms and additional block sections errors = m.get_errors() if errors: print("File is not well formed: {}".format(errors)) sys.exit(1) if keyfiles: try: keys = get_keys(keyfiles, m.signature_type) except ValueError as e: print(e) sys.exit(1) if any(m.verify(key) for key in keys): print("Verification OK") return True else: print("Verification failed") sys.exit(1) else: print("Verification OK") return True except Exception as e: print("Error opening or parsing file: {}".format(e)) sys.exit(1)
python
{ "resource": "" }
q7147
do_list
train
def do_list(marfile, detailed=False): """ List the MAR file. Yields lines of text to output """ with open(marfile, 'rb') as f: with MarReader(f) as m: if detailed: if m.compression_type: yield "Compression type: {}".format(m.compression_type) if m.signature_type: yield "Signature type: {}".format(m.signature_type) if m.mardata.signatures: plural = "s" if (m.mardata.signatures.count == 0 or m.mardata.signatures.count > 1) else "" yield "Signature block found with {} signature{}".format(m.mardata.signatures.count, plural) for s in m.mardata.signatures.sigs: yield "- Signature {} size {}".format(s.algorithm_id, s.size) yield "" if m.mardata.additional: yield "{} additional block found:".format(len(m.mardata.additional.sections)) for s in m.mardata.additional.sections: if s.id == 1: yield (" - Product Information Block:") yield (" - MAR channel name: {}".format(s.channel)) yield (" - Product version: {}".format(s.productversion)) yield "" else: yield ("Unknown additional data") yield ("{:7s} {:7s} {:7s}".format("SIZE", "MODE", "NAME")) for e in m.mardata.index.entries: yield ("{:<7d} {:04o} {}".format(e.size, e.flags, e.name))
python
{ "resource": "" }
q7148
do_create
train
def do_create(marfile, files, compress, productversion=None, channel=None, signing_key=None, signing_algorithm=None): """Create a new MAR file.""" with open(marfile, 'w+b') as f: with MarWriter(f, productversion=productversion, channel=channel, signing_key=signing_key, signing_algorithm=signing_algorithm, ) as m: for f in files: m.add(f, compress=compress)
python
{ "resource": "" }
q7149
do_hash
train
def do_hash(hash_algo, marfile, asn1=False): """Output the hash for this MAR file.""" # Add a dummy signature into a temporary file dst = tempfile.TemporaryFile() with open(marfile, 'rb') as f: add_signature_block(f, dst, hash_algo) dst.seek(0) with MarReader(dst) as m: hashes = m.calculate_hashes() h = hashes[0][1] if asn1: h = format_hash(h, hash_algo) print(base64.b64encode(h).decode('ascii'))
python
{ "resource": "" }
q7150
do_add_signature
train
def do_add_signature(input_file, output_file, signature_file): """Add a signature to the MAR file.""" signature = open(signature_file, 'rb').read() if len(signature) == 256: hash_algo = 'sha1' elif len(signature) == 512: hash_algo = 'sha384' else: raise ValueError() with open(output_file, 'w+b') as dst: with open(input_file, 'rb') as src: add_signature_block(src, dst, hash_algo, signature)
python
{ "resource": "" }
q7151
check_args
train
def check_args(parser, args): """Validate commandline arguments.""" # Make sure only one action has been specified if len([a for a in [args.create, args.extract, args.verify, args.list, args.list_detailed, args.hash, args.add_signature] if a is not None]) != 1: parser.error("Must specify something to do (one of -c, -x, -t, -T, -v, --hash, --add-signature)") if args.create and not args.files: parser.error("Must specify at least one file to add to marfile") if args.extract and args.compression not in (None, 'bz2', 'xz', 'auto'): # pragma: nocover parser.error('Unsupported compression type') if args.create and args.compression not in (None, 'bz2', 'xz'): # pragma: nocover parser.error('Unsupported compression type') if args.hash and len(args.files) != 1: parser.error("Must specify a file to output the hash for")
python
{ "resource": "" }
q7152
get_key_from_cmdline
train
def get_key_from_cmdline(parser, args): """Return the signing key and signing algoritm from the commandline.""" if args.keyfiles: signing_key = open(args.keyfiles[0], 'rb').read() bits = get_keysize(signing_key) if bits == 2048: signing_algorithm = 'sha1' elif bits == 4096: signing_algorithm = 'sha384' else: parser.error("Unsupported key size {} from key {}".format(bits, args.keyfiles[0])) print("Using {} to sign using algorithm {!s}".format(args.keyfiles[0], signing_algorithm)) else: signing_key = None signing_algorithm = None return signing_key, signing_algorithm
python
{ "resource": "" }
q7153
main
train
def main(argv=None): """Run the main CLI entry point.""" parser = build_argparser() args = parser.parse_args(argv) logging.basicConfig(level=args.loglevel, format="%(message)s") check_args(parser, args) if args.extract: marfile = os.path.abspath(args.extract) if args.chdir: os.chdir(args.chdir) do_extract(marfile, os.getcwd(), args.compression) elif args.verify: do_verify(args.verify, args.keyfiles) elif args.list: print("\n".join(do_list(args.list))) elif args.list_detailed: print("\n".join(do_list(args.list_detailed, detailed=True))) elif args.create: marfile = os.path.abspath(args.create) signing_key, signing_algorithm = get_key_from_cmdline(parser, args) if args.chdir: os.chdir(args.chdir) do_create(marfile, args.files, args.compression, productversion=args.productversion, channel=args.channel, signing_key=signing_key, signing_algorithm=signing_algorithm) elif args.hash: do_hash(args.hash, args.files[0], args.asn1) elif args.add_signature: do_add_signature(args.add_signature[0], args.add_signature[1], args.add_signature[2]) # sanity check; should never happen else: # pragma: no cover parser.error("Unsupported action")
python
{ "resource": "" }
q7154
MarReader.compression_type
train
def compression_type(self): """Return the latest compresion type used in this MAR. Returns: One of None, 'bz2', or 'xz' """ best_compression = None for e in self.mardata.index.entries: self.fileobj.seek(e.offset) magic = self.fileobj.read(10) compression = guess_compression(magic) if compression == 'xz': best_compression = 'xz' break elif compression == 'bz2' and best_compression is None: best_compression = 'bz2' return best_compression
python
{ "resource": "" }
q7155
MarReader.signature_type
train
def signature_type(self): """Return the signature type used in this MAR. Returns: One of None, 'unknown', 'sha1', or 'sha384' """ if not self.mardata.signatures: return None for sig in self.mardata.signatures.sigs: if sig.algorithm_id == 1: return 'sha1' elif sig.algorithm_id == 2: return 'sha384' else: return 'unknown'
python
{ "resource": "" }
q7156
MarReader.extract_entry
train
def extract_entry(self, e, decompress='auto'): """Yield blocks of data for this entry from this MAR file. Args: e (:obj:`mardor.format.index_entry`): An index_entry object that refers to this file's size and offset inside the MAR file. path (str): Where on disk to extract this file to. decompress (str, optional): Controls whether files are decompressed when extracted. Must be one of None, 'auto', 'bz2', or 'xz'. Defaults to 'auto' Yields: Blocks of data for `e` """ self.fileobj.seek(e.offset) stream = file_iter(self.fileobj) stream = takeexactly(stream, e.size) if decompress == 'auto': stream = auto_decompress_stream(stream) elif decompress == 'bz2': stream = bz2_decompress_stream(stream) elif decompress == 'xz': stream = xz_decompress_stream(stream) elif decompress is None: pass else: raise ValueError("Unsupported decompression type: {}".format(decompress)) for block in stream: yield block
python
{ "resource": "" }
q7157
MarReader.extract
train
def extract(self, destdir, decompress='auto'): """Extract the entire MAR file into a directory. Args: destdir (str): A local directory on disk into which the contents of this MAR file will be extracted. Required parent directories will be created as necessary. decompress (obj, optional): Controls whether files are decompressed when extracted. Must be one of 'auto' or None. Defaults to 'auto'. """ for e in self.mardata.index.entries: name = e.name entry_path = safejoin(destdir, name) entry_dir = os.path.dirname(entry_path) mkdir(entry_dir) with open(entry_path, 'wb') as f: write_to_file(self.extract_entry(e, decompress), f) os.chmod(entry_path, e.flags)
python
{ "resource": "" }
q7158
MarReader.get_errors
train
def get_errors(self): """Verify that this MAR file is well formed. Returns: A list of strings describing errors in the MAR file None if this MAR file appears well formed. """ errors = [] errors.extend(self._get_signature_errors()) errors.extend(self._get_additional_errors()) errors.extend(self._get_entry_errors()) return errors if errors else None
python
{ "resource": "" }
q7159
MarReader.verify
train
def verify(self, verify_key): """Verify that this MAR file has a valid signature. Args: verify_key (str): PEM formatted public key Returns: True if the MAR file's signature matches its contents False otherwise; this includes cases where there is no signature. """ if not self.mardata.signatures or not self.mardata.signatures.sigs: # This MAR file can't be verified since it has no signatures return False hashers = [] for sig in self.mardata.signatures.sigs: hashers.append((sig.algorithm_id, sig.signature, make_hasher(sig.algorithm_id))) assert len(hashers) == len(self.mardata.signatures.sigs) for block in get_signature_data(self.fileobj, self.mardata.signatures.filesize): [h.update(block) for (_, _, h) in hashers] for algo_id, sig, h in hashers: if not verify_signature(verify_key, sig, h.finalize(), h.algorithm.name): return False else: return True
python
{ "resource": "" }
q7160
MarReader.productinfo
train
def productinfo(self): """Return the productversion and channel of this MAR if present.""" if not self.mardata.additional: return None for s in self.mardata.additional.sections: if s.id == 1: return str(s.productversion), str(s.channel) return None
python
{ "resource": "" }
q7161
MarReader.calculate_hashes
train
def calculate_hashes(self): """Return hashes of the contents of this MAR file. The hashes depend on the algorithms defined in the MAR file's signature block. Returns: A list of (algorithm_id, hash) tuples """ hashers = [] if not self.mardata.signatures: return [] for s in self.mardata.signatures.sigs: h = make_hasher(s.algorithm_id) hashers.append((s.algorithm_id, h)) for block in get_signature_data(self.fileobj, self.mardata.signatures.filesize): [h.update(block) for (_, h) in hashers] return [(algo_id, h.finalize()) for (algo_id, h) in hashers]
python
{ "resource": "" }
q7162
_has_extras
train
def _has_extras(ctx): """Determine if a MAR file has an additional section block or not. It does this by looking at where file data starts in the file. If this starts immediately after the signature data, then no additional sections are present. Args: ctx (context): construct parsing context Returns: True if the MAR file has an additional section block False otherwise """ if not ctx.index.entries: return False return ctx.data_offset > 8 and ctx.data_offset > (ctx.signatures.offset_end + 8)
python
{ "resource": "" }
q7163
get_publickey
train
def get_publickey(keydata): """Load the public key from a PEM encoded string.""" try: key = serialization.load_pem_public_key( keydata, backend=default_backend(), ) return key except ValueError: key = serialization.load_pem_private_key( keydata, password=None, backend=default_backend(), ) key = key.public_key() return key
python
{ "resource": "" }
q7164
get_privatekey
train
def get_privatekey(keydata): """Load the private key from a PEM encoded string.""" key = serialization.load_pem_private_key( keydata, password=None, backend=default_backend(), ) return key
python
{ "resource": "" }
q7165
get_signature_data
train
def get_signature_data(fileobj, filesize): """Read data from MAR file that is required for MAR signatures. Args: fileboj (file-like object): file-like object to read the MAR data from filesize (int): the total size of the file Yields: blocks of bytes representing the data required to generate or validate signatures. """ # Read everything except the signature entries # The first 8 bytes are covered, as is everything from the beginning # of the additional section to the end of the file. The signature # algorithm id and size fields are also covered. fileobj.seek(0) marfile = mar.parse_stream(fileobj) if not marfile.signatures: raise IOError("Can't generate signature data for file without signature blocks") # MAR header fileobj.seek(0) block = fileobj.read(8) yield block # Signatures header sigs = sigs_header.parse_stream(fileobj) sig_types = [(sig.algorithm_id, sig.size) for sig in sigs.sigs] block = Int64ub.build(filesize) + Int32ub.build(sigs.count) yield block # Signature algorithm id and size per entry for algorithm_id, size in sig_types: block = Int32ub.build(algorithm_id) + Int32ub.build(size) yield block # Everything else in the file is covered for block in file_iter(fileobj): yield block
python
{ "resource": "" }
q7166
make_hasher
train
def make_hasher(algorithm_id): """Create a hashing object for the given signing algorithm.""" if algorithm_id == 1: return hashes.Hash(hashes.SHA1(), default_backend()) elif algorithm_id == 2: return hashes.Hash(hashes.SHA384(), default_backend()) else: raise ValueError("Unsupported signing algorithm: %s" % algorithm_id)
python
{ "resource": "" }
q7167
sign_hash
train
def sign_hash(private_key, hash, hash_algo): """Sign the given hash with the given private key. Args: private_key (str): PEM enoded private key hash (byte str): hash to sign hash_algo (str): name of hash algorithm used Returns: byte string representing the signature """ hash_algo = _hash_algorithms[hash_algo] return get_privatekey(private_key).sign( hash, padding.PKCS1v15(), utils.Prehashed(hash_algo), )
python
{ "resource": "" }
q7168
verify_signature
train
def verify_signature(public_key, signature, hash, hash_algo): """Verify the given signature is correct for the given hash and public key. Args: public_key (str): PEM encoded public key signature (bytes): signature to verify hash (bytes): hash of data hash_algo (str): hash algorithm used Returns: True if the signature is valid, False otherwise """ hash_algo = _hash_algorithms[hash_algo] try: return get_publickey(public_key).verify( signature, hash, padding.PKCS1v15(), utils.Prehashed(hash_algo), ) is None except InvalidSignature: return False
python
{ "resource": "" }
q7169
make_rsa_keypair
train
def make_rsa_keypair(bits): """Generate an RSA keypair. Args: bits (int): number of bits to use for the key. Returns: (private_key, public_key) - both as PEM encoded strings """ private_key = rsa.generate_private_key( public_exponent=65537, key_size=bits, backend=default_backend(), ) private_pem = private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption(), ) public_pem = private_key.public_key().public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo, ) return private_pem, public_pem
python
{ "resource": "" }
q7170
mkdir
train
def mkdir(path): """Make a directory and its parents. Args: path (str): path to create Returns: None Raises: OSError if the directory cannot be created. """ try: os.makedirs(path) # sanity check if not os.path.isdir(path): # pragma: no cover raise IOError('path is not a directory') except OSError as e: # EEXIST if e.errno == 17 and os.path.isdir(path): return raise
python
{ "resource": "" }
q7171
write_to_file
train
def write_to_file(src, dst): """Write data from `src` into `dst`. Args: src (iterable): iterable that yields blocks of data to write dst (file-like object): file-like object that must support .write(block) Returns: number of bytes written to `dst` """ n = 0 for block in src: dst.write(block) n += len(block) return n
python
{ "resource": "" }
q7172
auto_decompress_stream
train
def auto_decompress_stream(src): """Decompress data from `src` if required. If the first block of `src` appears to be compressed, then the entire stream will be uncompressed. Otherwise the stream will be passed along as-is. Args: src (iterable): iterable that yields blocks of data Yields: blocks of uncompressed data """ block = next(src) compression = guess_compression(block) if compression == 'bz2': src = bz2_decompress_stream(chain([block], src)) elif compression == 'xz': src = xz_decompress_stream(chain([block], src)) else: src = chain([block], src) for block in src: yield block
python
{ "resource": "" }
q7173
path_is_inside
train
def path_is_inside(path, dirname): """Return True if path is under dirname.""" path = os.path.abspath(path) dirname = os.path.abspath(dirname) while len(path) >= len(dirname): if path == dirname: return True newpath = os.path.dirname(path) if newpath == path: return False path = newpath return False
python
{ "resource": "" }
q7174
safejoin
train
def safejoin(base, *elements): """Safely joins paths together. The result will always be a subdirectory under `base`, otherwise ValueError is raised. Args: base (str): base path elements (list of strings): path elements to join to base Returns: elements joined to base """ # TODO: do we really want to be absolute here? base = os.path.abspath(base) path = os.path.join(base, *elements) path = os.path.normpath(path) if not path_is_inside(path, base): raise ValueError('target path is outside of the base path') return path
python
{ "resource": "" }
q7175
filesize
train
def filesize(fileobj): """Return the number of bytes in the fileobj. This function seeks to the end of the file, and then back to the original position. """ current = fileobj.tell() fileobj.seek(0, 2) end = fileobj.tell() fileobj.seek(current) return end
python
{ "resource": "" }
q7176
_sync
train
def _sync(timeout=None): """I will wait until all pending handlers cothreads have completed """ evt = WeakEvent(auto_reset=False) # first ensure that any pending callbacks from worker threads have been delivered # these are calls of _fromMain() Callback(evt.Signal) evt.Wait(timeout=timeout) evt.Reset() # reuse # grab the current set of inprogress cothreads/events wait4 = set(_handlers) # because Spawn.Wait() can only be called once, remove them and # use 'evt' as a proxy for what I'm waiting on so that overlapping # calls to _sync() will wait for these as well. # However, this means that our failure will must cascade to subsequent # calls to _sync() before we complete. _handlers.clear() _handlers.add(evt) try: WaitForAll(wait4, timeout=timeout) except Exception as e: evt.SignalException(e) # pass along error to next concurrent _sync() else: evt.Signal()
python
{ "resource": "" }
q7177
set_debug
train
def set_debug(lvl): """Set PVA global debug print level. This prints directly to stdout, bypassing eg. sys.stdout. :param lvl: logging.* level or logLevel* """ lvl = _lvlmap.get(lvl, lvl) assert lvl in _lvls, lvl _ClientProvider.set_debug(lvl)
python
{ "resource": "" }
q7178
cleanup
train
def cleanup(): """P4P sequenced shutdown. Intended to be atexit. Idenpotent. """ _log.debug("P4P atexit begins") # clean provider registry from .server import clearProviders, _cleanup_servers clearProviders() # close client contexts from .client.raw import _cleanup_contexts _cleanup_contexts() # stop servers _cleanup_servers() # shutdown default work queue from .util import _defaultWorkQueue _defaultWorkQueue.stop() _log.debug("P4P atexit completes")
python
{ "resource": "" }
q7179
Value.changed
train
def changed(self, *fields): """Test if one or more fields have changed. A field is considered to have changed if it has been marked as changed, or if any of its parent, or child, fields have been marked as changed. """ S = super(Value, self).changed for fld in fields or (None,): # no args tests for any change if S(fld): return True return False
python
{ "resource": "" }
q7180
NTNDArray.wrap
train
def wrap(self, value): """Wrap numpy.ndarray as Value """ attrib = getattr(value, 'attrib', {}) S, NS = divmod(time.time(), 1.0) value = numpy.asarray(value) # loses any special/augmented attributes dims = list(value.shape) dims.reverse() # inner-most sent as left if 'ColorMode' not in attrib: # attempt to infer color mode from shape if value.ndim==2: attrib['ColorMode'] = 0 # gray elif value.ndim==3: for idx,dim in enumerate(dims): if dim==3: # assume it's a color attrib['ColorMode'] = 2 + idx # 2 - RGB1, 3 - RGB2, 4 - RGB3 break # assume that the first is color, and any subsequent dim=3 is a thin ROI dataSize = value.nbytes return Value(self.type, { 'value': (self._code2u[value.dtype.char], value.flatten()), 'compressedSize': dataSize, 'uncompressedSize': dataSize, 'uniqueId': 0, 'timeStamp': { 'secondsPastEpoch': S, 'nanoseconds': NS * 1e9, }, 'attribute': [{'name': K, 'value': V} for K, V in attrib.items()], 'dimension': [{'size': N, 'offset': 0, 'fullSize': N, 'binning': 1, 'reverse': False} for N in dims], })
python
{ "resource": "" }
q7181
NTNDArray.unwrap
train
def unwrap(klass, value): """Unwrap Value as NTNDArray """ V = value.value if V is None: # Union empty. treat as zero-length char array V = numpy.zeros((0,), dtype=numpy.uint8) return V.view(klass.ntndarray)._store(value)
python
{ "resource": "" }
q7182
defaultBuilder
train
def defaultBuilder(value, nt): """Reasonably sensible default handling of put builder """ if callable(value): def logbuilder(V): try: value(V) except: _log.exception("Error in Builder") raise # will be logged again return logbuilder def builder(V): try: if isinstance(value, Value): V[None] = value elif isinstance(value, dict): for k, v in value.items(): V[k] = v else: nt.assign(V, value) except: _log.exception("Exception in Put builder") raise # will be printed to stdout from extension code. return builder
python
{ "resource": "" }
q7183
Context.disconnect
train
def disconnect(self, name=None): """Clear internal Channel cache, allowing currently unused channels to be implictly closed. :param str name: None, to clear the entire cache, or a name string to clear only a certain entry. """ if name is None: self._channels = {} else: self._channels.pop(name) if self._ctxt is not None: self._ctxt.disconnect(name)
python
{ "resource": "" }
q7184
Context._request
train
def _request(self, process=None, wait=None): """helper for building pvRequests :param str process: Control remote processing. May be 'true', 'false', 'passive', or None. :param bool wait: Wait for all server processing to complete. """ opts = [] if process is not None: opts.append('process=%s' % process) if wait is not None: if wait: opts.append('wait=true') else: opts.append('wait=false') return 'field()record[%s]' % (','.join(opts))
python
{ "resource": "" }
q7185
Context.get
train
def get(self, name, handler, request=None): """Begin Fetch of current value of a PV :param name: A single name string or list of name strings :param request: A :py:class:`p4p.Value` or string to qualify this request, or None to use a default. :param callable handler: Completion notification. Called with a Value, RemoteError, or Cancelled :returns: A object with a method cancel() which may be used to abort the operation. """ chan = self._channel(name) return _p4p.ClientOperation(chan, handler=unwrapHandler(handler, self._nt), pvRequest=wrapRequest(request), get=True, put=False)
python
{ "resource": "" }
q7186
Context.put
train
def put(self, name, handler, builder=None, request=None, get=True): """Write a new value to a PV. :param name: A single name string or list of name strings :param callable handler: Completion notification. Called with None (success), RemoteError, or Cancelled :param callable builder: Called when the PV Put type is known. A builder is responsible for filling in the Value to be sent. builder(value) :param request: A :py:class:`p4p.Value` or string to qualify this request, or None to use a default. :param bool get: Whether to do a Get before the Put. If True then the value passed to the builder callable will be initialized with recent PV values. eg. use this with NTEnum to find the enumeration list. :returns: A object with a method cancel() which may be used to abort the operation. """ chan = self._channel(name) return _p4p.ClientOperation(chan, handler=unwrapHandler(handler, self._nt), builder=defaultBuilder(builder, self._nt), pvRequest=wrapRequest(request), get=get, put=True)
python
{ "resource": "" }
q7187
Context.rpc
train
def rpc(self, name, handler, value, request=None): """Perform RPC operation on PV :param name: A single name string or list of name strings :param callable handler: Completion notification. Called with a Value, RemoteError, or Cancelled :param request: A :py:class:`p4p.Value` or string to qualify this request, or None to use a default. :returns: A object with a method cancel() which may be used to abort the operation. """ chan = self._channel(name) if value is None: value = Value(Type([])) return _p4p.ClientOperation(chan, handler=unwrapHandler(handler, self._nt), value=value, pvRequest=wrapRequest(request), rpc=True)
python
{ "resource": "" }
q7188
Context.monitor
train
def monitor(self, name, handler, request=None, **kws): """Begin subscription to named PV :param str name: PV name string :param callable handler: Completion notification. Called with None (FIFO not empty), RemoteError, Cancelled, or Disconnected :param request: A :py:class:`p4p.Value` or string to qualify this request, or None to use a default. :param bool notify_disconnect: Whether disconnect (and done) notifications are delivered to the callback (as None). :returns: A Subscription """ chan = self._channel(name) return Subscription(context=self, nt=self._nt, channel=chan, handler=monHandler(handler), pvRequest=wrapRequest(request), **kws)
python
{ "resource": "" }
q7189
timesout
train
def timesout(deftimeout=5.0): """Decorate a coroutine to implement an overall timeout. The decorated coroutine will have an additional keyword argument 'timeout=' which gives a timeout in seconds, or None to disable timeout. :param float deftimeout: The default timeout= for the decorated coroutine. It is suggested perform one overall timeout at a high level rather than multiple timeouts on low-level operations. :: @timesout() @asyncio.coroutine def dostuff(ctxt): yield from ctxt.put('msg', 'Working') A, B = yield from ctxt.get(['foo', 'bar']) yield from ctxt.put('bar', A+B, wait=True) yield from ctxt.put('msg', 'Done') @asyncio.coroutine def exec(): with Context('pva') as ctxt: yield from dostuff(ctxt, timeout=5) """ def decorate(fn): assert asyncio.iscoroutinefunction(fn), "Place @timesout before @coroutine" @wraps(fn) @asyncio.coroutine def wrapper(*args, timeout=deftimeout, **kws): loop = kws.get('loop') fut = fn(*args, **kws) if timeout is None: yield from fut else: yield from asyncio.wait_for(fut, timeout=timeout, loop=loop) return wrapper return decorate
python
{ "resource": "" }
q7190
Subscription.close
train
def close(self): """Begin closing subscription. """ if self._S is not None: # after .close() self._event should never be called self._S.close() self._S = None self._Q.put_nowait(None)
python
{ "resource": "" }
q7191
Context.close
train
def close(self): """Force close all Channels and cancel all Operations """ if self._Q is not None: for T in self._T: self._Q.interrupt() for n, T in enumerate(self._T): _log.debug('Join Context worker %d', n) T.join() _log.debug('Joined Context workers') self._Q, self._T = None, None super(Context, self).close()
python
{ "resource": "" }
q7192
rpc
train
def rpc(rtype=None): """Decorator marks a method for export. :param type: Specifies which :py:class:`Type` this method will return. The return type (rtype) must be one of: - An instance of :py:class:`p4p.Type` - None, in which case the method must return a :py:class:`p4p.Value` - One of the NT helper classes (eg :py:class:`p4p.nt.NTScalar`). - A list or tuple used to construct a :py:class:`p4p.Type`. Exported methods raise an :py:class:`Exception` to indicate an error to the remote caller. :py:class:`RemoteError` may be raised to send a specific message describing the error condition. >>> class Example(object): @rpc(NTScalar.buildType('d')) def add(self, lhs, rhs): return {'value':float(lhs)+flost(rhs)} """ wrap = None if rtype is None or isinstance(rtype, Type): pass elif isinstance(type, (list, tuple)): rtype = Type(rtype) elif hasattr(rtype, 'type'): # eg. one of the NT* helper classes wrap = rtype.wrap rtype = rtype.type else: raise TypeError("Not supported") def wrapper(fn): if wrap is not None: orig = fn @wraps(orig) def wrapper2(*args, **kws): return wrap(orig(*args, **kws)) fn = wrapper2 fn._reply_Type = rtype return fn return wrapper
python
{ "resource": "" }
q7193
rpccall
train
def rpccall(pvname, request=None, rtype=None): """Decorator marks a client proxy method. :param str pvname: The PV name, which will be formated using the 'format' argument of the proxy class constructor. :param request: A pvRequest string or :py:class:`p4p.Value` passed to eg. :py:meth:`p4p.client.thread.Context.rpc`. The method to be decorated must have all keyword arguments, where the keywords are type code strings or :class:`~p4p.Type`. """ def wrapper(fn): fn._call_PV = pvname fn._call_Request = request fn._reply_Type = rtype return fn return wrapper
python
{ "resource": "" }
q7194
quickRPCServer
train
def quickRPCServer(provider, prefix, target, maxsize=20, workers=1, useenv=True, conf=None, isolate=False): """Run an RPC server in the current thread Calls are handled sequentially, and always in the current thread, if workers=1 (the default). If workers>1 then calls are handled concurrently by a pool of worker threads. Requires NTURI style argument encoding. :param str provider: A provider name. Must be unique in this process. :param str prefix: PV name prefix. Along with method names, must be globally unique. :param target: The object which is exporting methods. (use the :func:`rpc` decorator) :param int maxsize: Number of pending RPC calls to be queued. :param int workers: Number of worker threads (default 1) :param useenv: Passed to :class:`~p4p.server.Server` :param conf: Passed to :class:`~p4p.server.Server` :param isolate: Passed to :class:`~p4p.server.Server` """ from p4p.server import Server import time queue = ThreadedWorkQueue(maxsize=maxsize, workers=workers) provider = NTURIDispatcher(queue, target=target, prefix=prefix, name=provider) threads = [] server = Server(providers=[provider], useenv=useenv, conf=conf, isolate=isolate) with server, queue: while True: time.sleep(10.0)
python
{ "resource": "" }
q7195
rpcproxy
train
def rpcproxy(spec): """Decorator to enable this class to proxy RPC client calls The decorated class constructor takes two additional arguments, `context=` is required to be a :class:`~p4p.client.thread.Context`. `format`= can be a string, tuple, or dictionary and is applied to PV name strings given to :py:func:`rpcall`. Other arguments are passed to the user class constructor. :: @rpcproxy class MyProxy(object): @rpccall("%s:add") def add(lhs='d', rhs='d'): pass ctxt = Context('pva') proxy = MyProxy(context=ctxt, format="tst:") # evaluates "%s:add"%"tst:" The decorated class will be a sub-class of the provided class and :class:`RPCProxyBase`. """ # inject our ctor first so we don't have to worry about super() non-sense. def _proxyinit(self, context=None, format={}, **kws): assert context is not None, context self.context = context self.format = format spec.__init__(self, **kws) obj = {'__init__': _proxyinit} for K, V in inspect.getmembers(spec, lambda M: hasattr(M, '_call_PV')): obj[K] = _wrapMethod(K, V) return type(spec.__name__, (RPCProxyBase, spec), obj)
python
{ "resource": "" }
q7196
ClientUnwrapper.unwrap
train
def unwrap(self, val): """Unpack a Value as some other python type """ if val.getID()!=self.id: self._update(val) return self._unwrap(val)
python
{ "resource": "" }
q7197
NTTable.buildType
train
def buildType(columns=[], extra=[]): """Build a table :param list columns: List of column names and types. eg [('colA', 'd')] :param list extra: A list of tuples describing additional non-standard fields :returns: A :py:class:`Type` """ return Type(id="epics:nt/NTTable:1.0", spec=[ ('labels', 'as'), ('value', ('S', None, columns)), ('descriptor', 's'), ('alarm', alarm), ('timeStamp', timeStamp), ] + extra)
python
{ "resource": "" }
q7198
NTTable.wrap
train
def wrap(self, values): """Pack an iterable of dict into a Value >>> T=NTTable([('A', 'ai'), ('B', 'as')]) >>> V = T.wrap([ {'A':42, 'B':'one'}, {'A':43, 'B':'two'}, ]) """ if isinstance(values, Value): return values cols = dict([(L, []) for L in self.labels]) try: # unzip list of dict for V in values: for L in self.labels: try: cols[L].append(V[L]) except (IndexError, KeyError): pass # allow omit empty columns for L in self.labels: V = cols[L] if len(V) == 0: del cols[L] try: return self.Value(self.type, { 'labels': self.labels, 'value': cols, }) except: _log.error("Failed to encode '%s' with %s", cols, self.labels) raise except: _log.exception("Failed to wrap: %s", values) raise
python
{ "resource": "" }
q7199
NTTable.unwrap
train
def unwrap(value): """Iterate an NTTable :returns: An iterator yielding an OrderedDict for each column """ ret = [] # build lists of column names, and value lbl, cols = [], [] for cname, cval in value.value.items(): lbl.append(cname) cols.append(cval) # zip together column arrays to iterate over rows for rval in izip(*cols): # zip together column names and row values ret.append(OrderedDict(zip(lbl, rval))) return ret
python
{ "resource": "" }