_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3 values | text stringlengths 75 19.8k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q30200 | PublicKey.from_signature | train | def from_signature(message, signature):
""" Attempts to create PublicKey object by deriving it
from the message and signature.
Args:
message (bytes): The message to be verified.
signature (Signature): The signature for message.
The recovery_id must not be None!
Returns:
PublicKey:
A PublicKey object derived from the
signature, it it exists. None otherwise.
"""
if signature.recovery_id is None:
raise ValueError("The signature must have a recovery_id.")
msg = get_bytes(message)
pub_keys = bitcoin_curve.recover_public_key(msg,
signature,
signature.recovery_id)
for k, recid in pub_keys:
if signature.recovery_id is not None and recid == signature.recovery_id:
return PublicKey(k.x, k.y)
return None | python | {
"resource": ""
} |
q30201 | Signature.from_der | train | def from_der(der):
""" Decodes a Signature that was DER-encoded.
Args:
der (bytes or str): The DER encoding to be decoded.
Returns:
Signature: The deserialized signature.
"""
d = get_bytes(der)
# d must conform to (from btcd):
# [0 ] 0x30 - ASN.1 identifier for sequence
# [1 ] <1-byte> - total remaining length
# [2 ] 0x02 - ASN.1 identifier to specify an integer follows
# [3 ] <1-byte> - length of R
# [4.] <bytes> - R
# [..] 0x02 - ASN.1 identifier to specify an integer follows
# [..] <1-byte> - length of S
# [..] <bytes> - S
# 6 bytes + R (min. 1 byte) + S (min. 1 byte)
if len(d) < 8:
raise ValueError("DER signature string is too short.")
# 6 bytes + R (max. 33 bytes) + S (max. 33 bytes)
if len(d) > 72:
raise ValueError("DER signature string is too long.")
if d[0] != 0x30:
raise ValueError("DER signature does not start with 0x30.")
if d[1] != len(d[2:]):
raise ValueError("DER signature length incorrect.")
total_length = d[1]
if d[2] != 0x02:
raise ValueError("DER signature no 1st int marker.")
if d[3] <= 0 or d[3] > (total_length - 7):
raise ValueError("DER signature incorrect R length.")
# Grab R, check for errors
rlen = d[3]
s_magic_index = 4 + rlen
rb = d[4:s_magic_index]
if rb[0] & 0x80 != 0:
raise ValueError("DER signature R is negative.")
if len(rb) > 1 and rb[0] == 0 and rb[1] & 0x80 != 0x80:
raise ValueError("DER signature R is excessively padded.")
r = int.from_bytes(rb, 'big')
# Grab S, check for errors
if d[s_magic_index] != 0x02:
raise ValueError("DER signature no 2nd int marker.")
slen_index = s_magic_index + 1
slen = d[slen_index]
if slen <= 0 or slen > len(d) - (slen_index + 1):
raise ValueError("DER signature incorrect S length.")
sb = d[slen_index + 1:]
if sb[0] & 0x80 != 0:
raise ValueError("DER signature S is negative.")
if len(sb) > 1 and sb[0] == 0 and sb[1] & 0x80 != 0x80:
raise ValueError("DER signature S is excessively padded.")
s = int.from_bytes(sb, 'big')
if r < 1 or r >= bitcoin_curve.n:
raise ValueError("DER signature R is not between 1 and N - 1.")
if s < 1 or s >= bitcoin_curve.n:
raise ValueError("DER signature S is not between 1 and N - 1.")
return Signature(r, s) | python | {
"resource": ""
} |
q30202 | Signature.from_bytes | train | def from_bytes(b):
""" Extracts the r and s components from a byte string.
Args:
b (bytes): A 64-byte long string. The first 32 bytes are
extracted as the r component and the second 32 bytes
are extracted as the s component.
Returns:
Signature: A Signature object.
Raises:
ValueError: If signature is incorrect length
"""
if len(b) != 64:
raise ValueError("from_bytes: Signature length != 64.")
r = int.from_bytes(b[0:32], 'big')
s = int.from_bytes(b[32:64], 'big')
return Signature(r, s) | python | {
"resource": ""
} |
q30203 | Signature.to_der | train | def to_der(self):
""" Encodes this signature using DER
Returns:
bytes: The DER encoding of (self.r, self.s).
"""
# Output should be:
# 0x30 <length> 0x02 <length r> r 0x02 <length s> s
r, s = self._canonicalize()
total_length = 6 + len(r) + len(s)
der = bytes([0x30, total_length - 2, 0x02, len(r)]) + r + bytes([0x02, len(s)]) + s
return der | python | {
"resource": ""
} |
q30204 | HDKey.from_bytes | train | def from_bytes(b):
""" Generates either a HDPrivateKey or HDPublicKey from the underlying
bytes.
The serialization must conform to the description in:
https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki#serialization-format
Args:
b (bytes): A byte stream conforming to the above.
Returns:
HDPrivateKey or HDPublicKey:
Either an HD private or
public key object, depending on what was serialized.
"""
if len(b) < 78:
raise ValueError("b must be at least 78 bytes long.")
version = int.from_bytes(b[:4], 'big')
depth = b[4]
parent_fingerprint = b[5:9]
index = int.from_bytes(b[9:13], 'big')
chain_code = b[13:45]
key_bytes = b[45:78]
rv = None
if version == HDPrivateKey.MAINNET_VERSION or version == HDPrivateKey.TESTNET_VERSION:
if key_bytes[0] != 0:
raise ValueError("First byte of private key must be 0x00!")
private_key = int.from_bytes(key_bytes[1:], 'big')
rv = HDPrivateKey(key=private_key,
chain_code=chain_code,
index=index,
depth=depth,
parent_fingerprint=parent_fingerprint)
elif version == HDPublicKey.MAINNET_VERSION or version == HDPublicKey.TESTNET_VERSION:
if key_bytes[0] != 0x02 and key_bytes[0] != 0x03:
raise ValueError("First byte of public key must be 0x02 or 0x03!")
public_key = PublicKey.from_bytes(key_bytes)
rv = HDPublicKey(x=public_key.point.x,
y=public_key.point.y,
chain_code=chain_code,
index=index,
depth=depth,
parent_fingerprint=parent_fingerprint)
else:
raise ValueError("incorrect encoding.")
return rv | python | {
"resource": ""
} |
q30205 | HDKey.to_b58check | train | def to_b58check(self, testnet=False):
""" Generates a Base58Check encoding of this key.
Args:
testnet (bool): True if the key is to be used with
testnet, False otherwise.
Returns:
str: A Base58Check encoded string representing the key.
"""
b = self.testnet_bytes if testnet else bytes(self)
return base58.b58encode_check(b) | python | {
"resource": ""
} |
q30206 | HDPrivateKey.master_key_from_entropy | train | def master_key_from_entropy(passphrase='', strength=128):
""" Generates a master key from system entropy.
Args:
strength (int): Amount of entropy desired. This should be
a multiple of 32 between 128 and 256.
passphrase (str): An optional passphrase for the generated
mnemonic string.
Returns:
HDPrivateKey, str:
a tuple consisting of the master
private key and a mnemonic string from which the seed
can be recovered.
"""
if strength % 32 != 0:
raise ValueError("strength must be a multiple of 32")
if strength < 128 or strength > 256:
raise ValueError("strength should be >= 128 and <= 256")
entropy = rand_bytes(strength // 8)
m = Mnemonic(language='english')
n = m.to_mnemonic(entropy)
return HDPrivateKey.master_key_from_seed(
Mnemonic.to_seed(n, passphrase)), n | python | {
"resource": ""
} |
q30207 | HDPrivateKey.master_key_from_seed | train | def master_key_from_seed(seed):
""" Generates a master key from a provided seed.
Args:
seed (bytes or str): a string of bytes or a hex string
Returns:
HDPrivateKey: the master private key.
"""
S = get_bytes(seed)
I = hmac.new(b"Bitcoin seed", S, hashlib.sha512).digest()
Il, Ir = I[:32], I[32:]
parse_Il = int.from_bytes(Il, 'big')
if parse_Il == 0 or parse_Il >= bitcoin_curve.n:
raise ValueError("Bad seed, resulting in invalid key!")
return HDPrivateKey(key=parse_Il, chain_code=Ir, index=0, depth=0) | python | {
"resource": ""
} |
q30208 | HDPrivateKey.from_parent | train | def from_parent(parent_key, i):
""" Derives a child private key from a parent
private key. It is not possible to derive a child
private key from a public parent key.
Args:
parent_private_key (HDPrivateKey):
"""
if not isinstance(parent_key, HDPrivateKey):
raise TypeError("parent_key must be an HDPrivateKey object.")
hmac_key = parent_key.chain_code
if i & 0x80000000:
hmac_data = b'\x00' + bytes(parent_key._key) + i.to_bytes(length=4, byteorder='big')
else:
hmac_data = parent_key.public_key.compressed_bytes + i.to_bytes(length=4, byteorder='big')
I = hmac.new(hmac_key, hmac_data, hashlib.sha512).digest()
Il, Ir = I[:32], I[32:]
parse_Il = int.from_bytes(Il, 'big')
if parse_Il >= bitcoin_curve.n:
return None
child_key = (parse_Il + parent_key._key.key) % bitcoin_curve.n
if child_key == 0:
# Incredibly unlucky choice
return None
child_depth = parent_key.depth + 1
return HDPrivateKey(key=child_key,
chain_code=Ir,
index=i,
depth=child_depth,
parent_fingerprint=parent_key.fingerprint) | python | {
"resource": ""
} |
q30209 | is_hex_string | train | def is_hex_string(string):
"""Check if the string is only composed of hex characters."""
pattern = re.compile(r'[A-Fa-f0-9]+')
if isinstance(string, six.binary_type):
string = str(string)
return pattern.match(string) is not None | python | {
"resource": ""
} |
q30210 | long_to_hex | train | def long_to_hex(l, size):
"""Encode a long value as a hex string, 0-padding to size.
Note that size is the size of the resulting hex string. So, for a 32Byte
long size should be 64 (two hex characters per byte"."""
f_str = "{0:0%sx}" % size
return ensure_bytes(f_str.format(l).lower()) | python | {
"resource": ""
} |
q30211 | PrivateKey.get_public_key | train | def get_public_key(self):
"""Get the PublicKey for this PrivateKey."""
return PublicKey.from_verifying_key(
self._private_key.get_verifying_key(),
network=self.network, compressed=self.compressed) | python | {
"resource": ""
} |
q30212 | PrivateKey.get_extended_key | train | def get_extended_key(self):
"""Get the extended key.
Extended keys contain the network bytes and the public or private
key.
"""
network_hex_chars = hexlify(
chr_py2(self.network.SECRET_KEY))
return ensure_bytes(network_hex_chars + self.get_key()) | python | {
"resource": ""
} |
q30213 | PrivateKey.from_wif | train | def from_wif(cls, wif, network=BitcoinMainNet):
"""Import a key in WIF format.
WIF is Wallet Import Format. It is a base58 encoded checksummed key.
See https://en.bitcoin.it/wiki/Wallet_import_format for a full
description.
This supports compressed WIFs - see this for an explanation:
http://bitcoin.stackexchange.com/questions/7299/when-importing-private-keys-will-compressed-or-uncompressed-format-be-used # nopep8
(specifically http://bitcoin.stackexchange.com/a/7958)
"""
# Decode the base58 string and ensure the checksum is valid
wif = ensure_str(wif)
try:
extended_key_bytes = base58.b58decode_check(wif)
except ValueError as e:
# Invalid checksum!
raise ChecksumException(e)
# Verify we're on the right network
network_bytes = extended_key_bytes[0]
# py3k interprets network_byte as an int already
if not isinstance(network_bytes, six.integer_types):
network_bytes = ord(network_bytes)
if (network_bytes != network.SECRET_KEY):
raise incompatible_network_exception_factory(
network_name=network.NAME,
expected_prefix=network.SECRET_KEY,
given_prefix=network_bytes)
# Drop the network bytes
extended_key_bytes = extended_key_bytes[1:]
# Check for comprssed public key
# This only affects the way in which addresses are generated.
compressed = False
if len(extended_key_bytes) == 33:
# We are supposed to use compressed form!
extended_key_bytes = extended_key_bytes[:-1]
compressed = True
# And we should finally have a valid key
return cls(long_or_int(hexlify(extended_key_bytes), 16), network,
compressed=compressed) | python | {
"resource": ""
} |
q30214 | PrivateKey.from_master_password | train | def from_master_password(cls, password, network=BitcoinMainNet):
"""Generate a new key from a master password.
This password is hashed via a single round of sha256 and is highly
breakable, but it's the standard brainwallet approach.
See `PrivateKey.from_master_password_slow` for a slightly more
secure generation method (which will still be subject to a rainbow
table attack :\)
"""
password = ensure_bytes(password)
key = sha256(password).hexdigest()
return cls.from_hex_key(key, network) | python | {
"resource": ""
} |
q30215 | PublicKey.get_key | train | def get_key(self, compressed=None):
"""Get the hex-encoded key.
:param compressed: False if you want a standard 65 Byte key (the most
standard option). True if you want the compressed 33 Byte form.
Defaults to None, which in turn uses the self.compressed attribute.
:type compressed: bool
PublicKeys consist of an ID byte, the x, and the y coordinates
on the elliptic curve.
In the case of uncompressed keys, the ID byte is 04.
Compressed keys use the SEC1 format:
If Y is odd: id_byte = 03
else: id_byte = 02
Note that I pieced this algorithm together from the pycoin source.
This is documented in http://www.secg.org/collateral/sec1_final.pdf
but, honestly, it's pretty confusing.
I guess this is a pretty big warning that I'm not *positive* this
will do the right thing in all cases. The tests pass, and this does
exactly what pycoin does, but I'm not positive pycoin works either!
"""
if compressed is None:
compressed = self.compressed
if compressed:
parity = 2 + (self.y & 1) # 0x02 even, 0x03 odd
return ensure_bytes(
long_to_hex(parity, 2) +
long_to_hex(self.x, 64))
else:
return ensure_bytes(
b'04' +
long_to_hex(self.x, 64) +
long_to_hex(self.y, 64)) | python | {
"resource": ""
} |
q30216 | PublicKey.from_hex_key | train | def from_hex_key(cls, key, network=BitcoinMainNet):
"""Load the PublicKey from a compressed or uncompressed hex key.
This format is defined in PublicKey.get_key()
"""
if len(key) == 130 or len(key) == 66:
# It might be a hexlified byte array
try:
key = unhexlify(key)
except TypeError:
pass
key = ensure_bytes(key)
compressed = False
id_byte = key[0]
if not isinstance(id_byte, six.integer_types):
id_byte = ord(id_byte)
if id_byte == 4:
# Uncompressed public point
# 1B ID + 32B x coord + 32B y coord = 65 B
if len(key) != 65:
raise KeyParseError("Invalid key length")
public_pair = PublicPair(
long_or_int(hexlify(key[1:33]), 16),
long_or_int(hexlify(key[33:]), 16))
elif id_byte in [2, 3]:
# Compressed public point!
compressed = True
if len(key) != 33:
raise KeyParseError("Invalid key length")
y_odd = bool(id_byte & 0x01) # 0 even, 1 odd
x = long_or_int(hexlify(key[1:]), 16)
# The following x-to-pair algorithm was lifted from pycoin
# I still need to sit down an understand it. It is also described
# in http://www.secg.org/collateral/sec1_final.pdf
curve = SECP256k1.curve
p = curve.p()
# For SECP256k1, curve.a() is 0 and curve.b() is 7, so this is
# effectively (x ** 3 + 7) % p, but the full equation is kept
# for just-in-case-the-curve-is-broken future-proofing
alpha = (pow(x, 3, p) + curve.a() * x + curve.b()) % p
beta = square_root_mod_prime(alpha, p)
y_even = not y_odd
if y_even == bool(beta & 1):
public_pair = PublicPair(x, p - beta)
else:
public_pair = PublicPair(x, beta)
else:
raise KeyParseError("The given key is not in a known format.")
return cls.from_public_pair(public_pair, network=network,
compressed=compressed) | python | {
"resource": ""
} |
q30217 | PublicKey.create_point | train | def create_point(self, x, y):
"""Create an ECDSA point on the SECP256k1 curve with the given coords.
:param x: The x coordinate on the curve
:type x: long
:param y: The y coodinate on the curve
:type y: long
"""
if (not isinstance(x, six.integer_types) or
not isinstance(y, six.integer_types)):
raise ValueError("The coordinates must be longs.")
return _ECDSA_Point(SECP256k1.curve, x, y) | python | {
"resource": ""
} |
q30218 | PublicKey.from_point | train | def from_point(cls, point, network=BitcoinMainNet, **kwargs):
"""Create a PublicKey from a point on the SECP256k1 curve.
:param point: A point on the SECP256k1 curve.
:type point: SECP256k1.point
"""
verifying_key = VerifyingKey.from_public_point(point, curve=SECP256k1)
return cls.from_verifying_key(verifying_key, network=network, **kwargs) | python | {
"resource": ""
} |
q30219 | get_chart | train | def get_chart(chart_type, time_span=None, rolling_average=None, api_code=None):
"""Get chart data of a specific chart type.
:param str chart_type: type of chart
:param str time_span: duration of the chart.
Default is 1 year for most charts, 1 week for mempool charts (optional)
(Example: 5weeks)
:param str rolling_average: duration over which the data should be averaged (optional)
:param str api_code: Blockchain.info API code (optional)
:return: an instance of :class:`Chart` class
"""
resource = 'charts/' + chart_type + '?format=json'
if time_span is not None:
resource += '×pan=' + time_span
if rolling_average is not None:
resource += '&rollingAverage=' + rolling_average
if api_code is not None:
resource += '&api_code=' + api_code
response = util.call_api(resource)
json_response = json.loads(response)
return Chart(json_response) | python | {
"resource": ""
} |
q30220 | get_pools | train | def get_pools(time_span=None, api_code=None):
"""Get number of blocks mined by each pool.
:param str time_span: duration of the chart.
Default is 4days (optional)
:param str api_code: Blockchain.info API code (optional)
:return: an instance of dict:{str,int}
"""
resource = 'pools'
if time_span is not None:
resource += '?timespan=' + time_span
if api_code is not None:
resource += '&api_code=' + api_code
response = util.call_api(resource, base_url='https://api.blockchain.info/')
json_response = json.loads(response)
return {k: v for (k, v) in json_response.items()} | python | {
"resource": ""
} |
q30221 | Wallet.send | train | def send(self, to, amount, from_address=None, fee=None):
"""Send bitcoin from your wallet to a single address.
:param str to: recipient bitcoin address
:param int amount: amount to send (in satoshi)
:param str from_address: specific address to send from (optional)
:param int fee: transaction fee in satoshi. Must be greater than the default
fee (optional).
:return: an instance of :class:`PaymentResponse` class
"""
recipient = {to: amount}
return self.send_many(recipient, from_address, fee) | python | {
"resource": ""
} |
q30222 | Wallet.send_many | train | def send_many(self, recipients, from_address=None, fee=None):
"""Send bitcoin from your wallet to multiple addresses.
:param dictionary recipients: dictionary with the structure of 'address':amount
:param str from_address: specific address to send from (optional)
:param int fee: transaction fee in satoshi. Must be greater than the default
fee (optional).
:return: an instance of :class:`PaymentResponse` class
"""
params = self.build_basic_request(read_only=False)
if len(recipients) == 1:
to_address, amount = recipients.popitem()
params['to'] = to_address
params['amount'] = amount
method = 'payment'
else:
params['recipients'] = json.dumps(recipients)
method = 'sendmany'
if from_address is not None:
params['from'] = from_address
if fee is not None:
params['fee'] = fee
response = util.call_api("merchant/{0}/{1}".format(self.identifier, method), params,
base_url=self.service_url)
json_response = json.loads(response)
self.parse_error(json_response)
payment_response = PaymentResponse(json_response['message'],
json_response['tx_hash'],
json_response.get('notice'))
return payment_response | python | {
"resource": ""
} |
q30223 | Wallet.list_addresses | train | def list_addresses(self):
"""List all active addresses in the wallet.
:return: an array of :class:`Address` objects
"""
params = self.build_basic_request()
response = util.call_api("merchant/{0}/list".format(self.identifier), params, base_url=self.service_url)
json_response = json.loads(response)
self.parse_error(json_response)
addresses = []
for a in json_response['addresses']:
address = Address(a['balance'], a['address'], a.get('label'), a['total_received'])
addresses.append(address)
return addresses | python | {
"resource": ""
} |
q30224 | create_wallet | train | def create_wallet(password, api_code, service_url, priv=None, label=None, email=None):
"""Create a new Blockchain.info wallet. It can be created containing a
pre-generated private key or will otherwise generate a new private key.
:param str password: password for the new wallet. At least 10 characters.
:param str api_code: API code with create wallets permission
:param str service_url: URL to an instance of service-my-wallet-v3 (with trailing slash)
:param str priv: private key to add to the wallet (optional)
:param str label: label for the first address in the wallet (optional)
:param str email: email to associate with the new wallet (optional)
:return: an instance of :class:`WalletResponse` class
"""
params = {'password': password, 'api_code': api_code}
if priv is not None:
params['priv'] = priv
if label is not None:
params['label'] = label
if email is not None:
params['email'] = email
response = util.call_api("api/v2/create", params, base_url=service_url)
json_response = json.loads(response)
return CreateWalletResponse(json_response['guid'],
json_response['address'],
json_response['label']) | python | {
"resource": ""
} |
q30225 | get_block | train | def get_block(block_id, api_code=None):
"""Get a single block based on a block hash.
:param str block_id: block hash to look up
:param str api_code: Blockchain.info API code (optional)
:return: an instance of :class:`Block` class
"""
resource = 'rawblock/' + block_id
if api_code is not None:
resource += '?api_code=' + api_code
response = util.call_api(resource)
json_response = json.loads(response)
return Block(json_response) | python | {
"resource": ""
} |
q30226 | get_tx | train | def get_tx(tx_id, api_code=None):
"""Get a single transaction based on a transaction hash.
:param str tx_id: transaction hash to look up
:param str api_code: Blockchain.info API code (optional)
:return: an instance of :class:`Transaction` class
"""
resource = 'rawtx/' + tx_id
if api_code is not None:
resource += '?api_code=' + api_code
response = util.call_api(resource)
json_response = json.loads(response)
return Transaction(json_response) | python | {
"resource": ""
} |
q30227 | get_block_height | train | def get_block_height(height, api_code=None):
"""Get an array of blocks at the specified height.
:param int height: block height to look up
:param str api_code: Blockchain.info API code (optional)
:return: an array of :class:`Block` objects
"""
resource = 'block-height/{0}?format=json'.format(height)
if api_code is not None:
resource += '&api_code=' + api_code
response = util.call_api(resource)
json_response = json.loads(response)
return [Block(b) for b in json_response['blocks']] | python | {
"resource": ""
} |
q30228 | get_address | train | def get_address(address, filter=None, limit=None, offset=None, api_code=None):
"""Get data for a single address including an address balance and list of relevant transactions.
:param str address: address(base58 or hash160) to look up
:param FilterType filter: the filter for transactions selection (optional)
:param int limit: limit number of transactions to display (optional)
:param int offset: number of transactions to skip when display (optional)
:param str api_code: Blockchain.info API code (optional)
:return: an instance of :class:`Address` class
"""
resource = 'address/{0}?format=json'.format(address)
if filter is not None:
if isinstance(filter, FilterType):
resource += '&filter=' + str(filter.value)
else:
raise ValueError('Filter must be of FilterType enum')
if limit is not None:
resource += '&limit=' + str(limit)
if offset is not None:
resource += '&offset=' + str(offset)
if api_code is not None:
resource += '&api_code=' + api_code
response = util.call_api(resource)
json_response = json.loads(response)
return Address(json_response) | python | {
"resource": ""
} |
q30229 | get_xpub | train | def get_xpub(xpub, filter=None, limit=None, offset=None, api_code=None):
"""Get data for a single xpub including balance and list of relevant transactions.
:param str xpub: address(xpub) to look up
:param FilterType filter: the filter for transactions selection (optional)
:param int limit: limit number of transactions to fetch (optional)
:param int offset: number of transactions to skip when fetch (optional)
:param str api_code: Blockchain.info API code (optional)
:return: an instance of :class:`Xpub` class
"""
resource = 'multiaddr?active=' + xpub
if filter is not None:
if isinstance(filter, FilterType):
resource += '&filter=' + str(filter.value)
else:
raise ValueError('Filter must be of FilterType enum')
if limit is not None:
resource += '&limit=' + str(limit)
if offset is not None:
resource += '&offset=' + str(offset)
if api_code is not None:
resource += '&api_code=' + api_code
response = util.call_api(resource)
json_response = json.loads(response)
return Xpub(json_response) | python | {
"resource": ""
} |
q30230 | get_multi_address | train | def get_multi_address(addresses, filter=None, limit=None, offset=None, api_code=None):
"""Get aggregate summary for multiple addresses including overall balance, per address balance
and list of relevant transactions.
:param tuple addresses: addresses(xpub or base58) to look up
:param FilterType filter: the filter for transactions selection (optional)
:param int limit: limit number of transactions to fetch (optional)
:param int offset: number of transactions to skip when fetch (optional)
:param str api_code: Blockchain.info API code (optional)
:return: an instance of :class:`MultiAddress` class
"""
if isinstance(addresses, basestring):
resource = 'multiaddr?active=' + addresses
else:
resource = 'multiaddr?active=' + '|'.join(addresses)
if filter is not None:
if isinstance(filter, FilterType):
resource += '&filter=' + str(filter.value)
else:
raise ValueError('Filter must be of FilterType enum')
if limit is not None:
resource += '&limit=' + str(limit)
if offset is not None:
resource += '&offset=' + str(offset)
if api_code is not None:
resource += '&api_code=' + api_code
response = util.call_api(resource)
json_response = json.loads(response)
return MultiAddress(json_response) | python | {
"resource": ""
} |
q30231 | get_balance | train | def get_balance(addresses, filter=None, api_code=None):
"""Get balances for each address provided.
:param tuple addresses: addresses(xpub or base58) to look up
:param FilterType filter: the filter for transactions selection (optional)
:param str api_code: Blockchain.info API code (optional)
:return: a dictionary of str, :class:`Balance`
"""
if isinstance(addresses, basestring):
resource = 'balance?active=' + addresses
else:
resource = 'balance?active=' + '|'.join(addresses)
if filter is not None:
if isinstance(filter, FilterType):
resource += '&filter=' + str(filter.value)
else:
raise ValueError('Filter must be of FilterType enum')
if api_code is not None:
resource += '&api_code=' + api_code
response = util.call_api(resource)
json_response = json.loads(response)
return {k: Balance(v) for (k, v) in json_response.items()} | python | {
"resource": ""
} |
q30232 | get_unspent_outputs | train | def get_unspent_outputs(addresses, confirmations=None, limit=None, api_code=None):
"""Get unspent outputs for a single address.
:param tuple addresses: addresses(xpub or base58) to look up
:param int confirmations: minimum confirmations to include (optional)
:param int limit: limit number of unspent outputs to fetch (optional)
:param str api_code: Blockchain.info API code (optional)
:return: an array of :class:`UnspentOutput` objects
"""
if isinstance(addresses, basestring):
resource = 'unspent?active=' + addresses
else:
resource = 'unspent?active=' + '|'.join(addresses)
if confirmations is not None:
resource += '&confirmations=' + str(confirmations)
if limit is not None:
resource += '&limit=' + str(limit)
if api_code is not None:
resource += '&api_code=' + api_code
response = util.call_api(resource)
json_response = json.loads(response)
return [UnspentOutput(o) for o in json_response['unspent_outputs']] | python | {
"resource": ""
} |
q30233 | get_unconfirmed_tx | train | def get_unconfirmed_tx(api_code=None):
"""Get a list of currently unconfirmed transactions.
:param str api_code: Blockchain.info API code (optional)
:return: an array of :class:`Transaction` objects
"""
resource = 'unconfirmed-transactions?format=json'
if api_code is not None:
resource += '&api_code=' + api_code
response = util.call_api(resource)
json_response = json.loads(response)
return [Transaction(t) for t in json_response['txs']] | python | {
"resource": ""
} |
q30234 | get_blocks | train | def get_blocks(time=None, pool_name=None, api_code=None):
"""Get a list of blocks for a specific day or mining pool.
Both parameters are optional but at least one is required.
:param int time: time in milliseconds
:param str pool_name: name of the mining pool
:param str api_code: Blockchain.info API code (optional)
:return: an array of :class:`SimpleBlock` objects
"""
resource = 'blocks/{0}?format=json'
if api_code is not None:
resource += '&api_code=' + api_code
if time is not None:
resource = resource.format(time)
elif pool_name is not None:
resource = resource.format(pool_name)
else:
resource = resource.format('')
response = util.call_api(resource)
json_response = json.loads(response)
return [SimpleBlock(b) for b in json_response['blocks']] | python | {
"resource": ""
} |
q30235 | Culebron.populateViewTree | train | def populateViewTree(self, view):
'''
Populates the View tree.
'''
vuid = view.getUniqueId()
text = view.__smallStr__()
if view.getParent() is None:
self.viewTree.insert('', Tkinter.END, vuid, text=text)
else:
self.viewTree.insert(view.getParent().getUniqueId(), Tkinter.END, vuid, text=text, tags=('ttk'))
self.viewTree.set(vuid, 'T', '*' if view.isTarget() else ' ')
self.viewTree.tag_bind('ttk', '<1>', self.viewTreeItemClicked) | python | {
"resource": ""
} |
q30236 | Culebron.command | train | def command(self, keycode):
'''
Presses a key.
Generates the actual key press on the device and prints the line in the script.
'''
self.device.press(keycode)
self.printOperation(None, Operation.PRESS, keycode) | python | {
"resource": ""
} |
q30237 | Culebron.cancelOperation | train | def cancelOperation(self):
'''
Cancels the ongoing operation if any.
'''
if self.isLongTouchingPoint:
self.toggleLongTouchPoint()
elif self.isTouchingPoint:
self.toggleTouchPoint()
elif self.isGeneratingTestCondition:
self.toggleGenerateTestCondition() | python | {
"resource": ""
} |
q30238 | Culebron.saveSnapshot | train | def saveSnapshot(self):
'''
Saves the current shanpshot to the specified file.
Current snapshot is the image being displayed on the main window.
'''
filename = self.snapshotDir + os.sep + '${serialno}-${focusedwindowname}-${timestamp}' + '.' + self.snapshotFormat.lower()
# We have the snapshot already taken, no need to retake
d = FileDialog(self, self.device.substituteDeviceTemplate(filename))
saveAsFilename = d.askSaveAsFilename()
if saveAsFilename:
_format = os.path.splitext(saveAsFilename)[1][1:].upper()
self.printOperation(None, Operation.SNAPSHOT, filename, _format, self.deviceArt, self.dropShadow,
self.screenGlare)
# FIXME: we should add deviceArt, dropShadow and screenGlare to the saved image
# self.unscaledScreenshot.save(saveAsFilename, _format, self.deviceArt, self.dropShadow, self.screenGlare)
self.unscaledScreenshot.save(saveAsFilename, _format) | python | {
"resource": ""
} |
q30239 | Culebron.saveViewSnapshot | train | def saveViewSnapshot(self, view):
'''
Saves the View snapshot.
'''
if not view:
raise ValueError("view must be provided to take snapshot")
filename = self.snapshotDir + os.sep + '${serialno}-' + view.variableNameFromId() + '-${timestamp}' + '.' + self.snapshotFormat.lower()
d = FileDialog(self, self.device.substituteDeviceTemplate(filename))
saveAsFilename = d.askSaveAsFilename()
if saveAsFilename:
_format = os.path.splitext(saveAsFilename)[1][1:].upper()
self.printOperation(view, Operation.VIEW_SNAPSHOT, filename, _format)
view.writeImageToFile(saveAsFilename, _format) | python | {
"resource": ""
} |
q30240 | Culebron.toggleLongTouchPoint | train | def toggleLongTouchPoint(self):
'''
Toggles the long touch point operation.
'''
if not self.isLongTouchingPoint:
msg = 'Long touching point'
self.toast(msg, background=Color.GREEN)
self.statusBar.set(msg)
self.isLongTouchingPoint = True
# FIXME: There should be 2 methods DIP & PX
self.coordinatesUnit = Unit.PX
else:
self.toast(None)
self.statusBar.clear()
self.isLongTouchingPoint = False | python | {
"resource": ""
} |
q30241 | WifiManager.getWifiState | train | def getWifiState(self):
'''
Gets the Wi-Fi enabled state.
@return: One of WIFI_STATE_DISABLED, WIFI_STATE_DISABLING, WIFI_STATE_ENABLED, WIFI_STATE_ENABLING, WIFI_STATE_UNKNOWN
'''
result = self.device.shell('dumpsys wifi')
if result:
state = result.splitlines()[0]
if self.WIFI_IS_ENABLED_RE.match(state):
return self.WIFI_STATE_ENABLED
elif self.WIFI_IS_DISABLED_RE.match(state):
return self.WIFI_STATE_DISABLED
print >> sys.stderr, "UNKNOWN WIFI STATE:", state
return self.WIFI_STATE_UNKNOWN | python | {
"resource": ""
} |
q30242 | AdbClient.setTimer | train | def setTimer(self, timeout, description=None):
"""
Sets a timer.
:param description:
:param timeout: timeout in seconds
:return: the timerId
"""
self.timerId += 1
timer = Timer(timeout, self.__timeoutHandler, (self.timerId, description))
timer.start()
self.timers[self.timerId] = timer
return self.timerId | python | {
"resource": ""
} |
q30243 | AdbClient.getProperty | train | def getProperty(self, key, strip=True):
''' Gets the property value for key '''
self.__checkTransport()
import collections
MAP_PROPS = collections.OrderedDict([
(re.compile('display.width'), self.__getDisplayWidth),
(re.compile('display.height'), self.__getDisplayHeight),
(re.compile('display.density'), self.__getDisplayDensity),
(re.compile('display.orientation'), self.__getDisplayOrientation),
(re.compile('.*'), self.__getProp),
])
'''Maps properties key values (as regexps) to instance methods to obtain its values.'''
for kre in MAP_PROPS.keys():
if kre.match(key):
return MAP_PROPS[kre](key=key, strip=strip)
raise ValueError("key='%s' does not match any map entry") | python | {
"resource": ""
} |
q30244 | AdbClient.isLocked | train | def isLocked(self):
'''
Checks if the device screen is locked.
@return True if the device screen is locked
'''
self.__checkTransport()
lockScreenRE = re.compile('mShowingLockscreen=(true|false)')
dwp = self.shell('dumpsys window policy')
m = lockScreenRE.search(dwp)
if m:
return m.group(1) == 'true'
dreamingLockscreenRE = re.compile('mDreamingLockscreen=(true|false)')
m = dreamingLockscreenRE.search(dwp)
if m:
return m.group(1) == 'true'
raise RuntimeError("Couldn't determine screen lock state") | python | {
"resource": ""
} |
q30245 | AdbClient.isScreenOn | train | def isScreenOn(self):
"""
Checks if the screen is ON.
@return: True if the device screen is ON
"""
self.__checkTransport()
screenOnRE = re.compile('mScreenOnFully=(true|false)')
m = screenOnRE.search(self.shell('dumpsys window policy'))
if m:
return m.group(1) == 'true'
raise RuntimeError("Couldn't determine screen ON state") | python | {
"resource": ""
} |
q30246 | AdbClient.percentSame | train | def percentSame(image1, image2):
'''
Returns the percent of pixels that are equal
@author: catshoes
'''
# If the images differ in size, return 0% same.
size_x1, size_y1 = image1.size
size_x2, size_y2 = image2.size
if (size_x1 != size_x2 or
size_y1 != size_y2):
return 0
# Images are the same size
# Return the percent of pixels that are equal.
numPixelsSame = 0
numPixelsTotal = size_x1 * size_y1
image1Pixels = image1.load()
image2Pixels = image2.load()
# Loop over all pixels, comparing pixel in image1 to image2
for x in range(size_x1):
for y in range(size_y1):
if image1Pixels[x, y] == image2Pixels[x, y]:
numPixelsSame += 1
return numPixelsSame / float(numPixelsTotal) | python | {
"resource": ""
} |
q30247 | AdbClient.imageInScreen | train | def imageInScreen(screen, image):
"""
Checks if image is on the screen
@param screen: the screen image
@param image: the partial image to look for
@return: True or False
@author: Perry Tsai <ripple0129@gmail.com>
"""
# To make sure image smaller than screen.
size_x1, size_y1 = screen.size
size_x2, size_y2 = image.size
if size_x1 <= size_x2 or size_y1 <= size_y2:
return 0
# Load pixels.
screenPixels = screen.load()
imagePixels = image.load()
# Loop over all pixels, if pixel image[0,0] same as pixel screen[x,y] do crop and compare
for x in range(size_x1 - size_x2):
for y in range(size_y1 - size_y2):
if imagePixels[0, 0] == screenPixels[x, y]:
croppedScreen = screen.crop((x, y, x + size_x2, y + size_y2))
size_x3, size_y3 = croppedScreen.size
croppedPixels = croppedScreen.load()
for x in range(size_x3):
for y in range(size_y3):
if imagePixels[x, y] == croppedPixels[x, y]:
return True | python | {
"resource": ""
} |
q30248 | AdbClient.isKeyboardShown | train | def isKeyboardShown(self):
'''
Whether the keyboard is displayed.
'''
self.__checkTransport()
dim = self.shell('dumpsys input_method')
if dim:
# FIXME: API >= 15 ?
return "mInputShown=true" in dim
return False | python | {
"resource": ""
} |
q30249 | ControlPanel.tabLayout | train | def tabLayout(self):
''' For all tabs, specify the number of buttons in a row '''
self.childWindow.column += 1
if self.childWindow.column > Layout.BUTTONS_NUMBER:
self.childWindow.column = 0
self.childWindow.row += 1 | python | {
"resource": ""
} |
q30250 | debugArgsToDict | train | def debugArgsToDict(a):
"""
Converts a string representation of debug arguments to a dictionary.
The string can be of the form
IDENTIFIER1=val1,IDENTIFIER2=val2
:param a: the argument string
:return: the dictionary
"""
s = a.replace('+', ' ')
s = s.replace('=', ':')
s = re.sub(r'([A-Z][A-Z_]+)', r"'\1'", s)
return ast.literal_eval('{ ' + s + ' }') | python | {
"resource": ""
} |
q30251 | View.getHeight | train | def getHeight(self):
'''
Gets the height.
'''
if self.useUiAutomator:
return self.map['bounds'][1][1] - self.map['bounds'][0][1]
else:
try:
return int(self.map[self.heightProperty])
except:
return 0 | python | {
"resource": ""
} |
q30252 | View.getWidth | train | def getWidth(self):
'''
Gets the width.
'''
if self.useUiAutomator:
return self.map['bounds'][1][0] - self.map['bounds'][0][0]
else:
try:
return int(self.map[self.widthProperty])
except:
return 0 | python | {
"resource": ""
} |
q30253 | View.getVisibility | train | def getVisibility(self):
'''
Gets the View visibility
'''
try:
if self.map[GET_VISIBILITY_PROPERTY] == 'VISIBLE':
return VISIBLE
elif self.map[GET_VISIBILITY_PROPERTY] == 'INVISIBLE':
return INVISIBLE
elif self.map[GET_VISIBILITY_PROPERTY] == 'GONE':
return GONE
else:
return -2
except:
return -1 | python | {
"resource": ""
} |
q30254 | View.__getX | train | def __getX(self):
'''
Gets the View X coordinate
'''
if DEBUG_COORDS:
print >>sys.stderr, "getX(%s %s ## %s)" % (self.getClass(), self.getId(), self.getUniqueId())
x = 0
if self.useUiAutomator:
x = self.map['bounds'][0][0]
else:
try:
if GET_VISIBILITY_PROPERTY in self.map and self.map[GET_VISIBILITY_PROPERTY] == 'VISIBLE':
_x = int(self.map[self.leftProperty])
if DEBUG_COORDS: print >>sys.stderr, " getX: VISIBLE adding %d" % _x
x += _x
except:
warnings.warn("View %s has no '%s' property" % (self.getId(), self.leftProperty))
if DEBUG_COORDS: print >>sys.stderr, " getX: returning %d" % (x)
return x | python | {
"resource": ""
} |
q30255 | View.__getY | train | def __getY(self):
'''
Gets the View Y coordinate
'''
if DEBUG_COORDS:
print >>sys.stderr, "getY(%s %s ## %s)" % (self.getClass(), self.getId(), self.getUniqueId())
y = 0
if self.useUiAutomator:
y = self.map['bounds'][0][1]
else:
try:
if GET_VISIBILITY_PROPERTY in self.map and self.map[GET_VISIBILITY_PROPERTY] == 'VISIBLE':
_y = int(self.map[self.topProperty])
if DEBUG_COORDS: print >>sys.stderr, " getY: VISIBLE adding %d" % _y
y += _y
except:
warnings.warn("View %s has no '%s' property" % (self.getId(), self.topProperty))
if DEBUG_COORDS: print >>sys.stderr, " getY: returning %d" % (y)
return y | python | {
"resource": ""
} |
q30256 | View.getCoords | train | def getCoords(self):
'''
Gets the coords of the View
@return: A tuple containing the View's coordinates ((L, T), (R, B))
'''
if DEBUG_COORDS:
print >>sys.stderr, "getCoords(%s %s ## %s)" % (self.getClass(), self.getId(), self.getUniqueId())
(x, y) = self.getXY();
w = self.getWidth()
h = self.getHeight()
return ((x, y), (x+w, y+h)) | python | {
"resource": ""
} |
q30257 | View.getCenter | train | def getCenter(self):
'''
Gets the center coords of the View
@author: U{Dean Morin <https://github.com/deanmorin>}
'''
(left, top), (right, bottom) = self.getCoords()
x = left + (right - left) / 2
y = top + (bottom - top) / 2
return (x, y) | python | {
"resource": ""
} |
q30258 | View.isFocused | train | def isFocused(self):
'''
Gets the focused value
@return: the focused value. If the property cannot be found returns C{False}
'''
try:
return True if self.map[self.isFocusedProperty].lower() == 'true' else False
except Exception:
return False | python | {
"resource": ""
} |
q30259 | EditText.setText | train | def setText(self, text):
"""
This function makes sure that any previously entered text is deleted before
setting the value of the field.
"""
if self.text() == text:
return
self.touch()
maxSize = len(self.text()) + 1
self.device.press('KEYCODE_DEL', adbclient.DOWN_AND_UP, repeat=maxSize)
self.device.press('KEYCODE_FORWARD_DEL', adbclient.DOWN_AND_UP, repeat=maxSize)
self.type(text, alreadyTouched=True) | python | {
"resource": ""
} |
q30260 | UiDevice.openNotification | train | def openNotification(self):
'''
Opens the notification shade.
'''
# the tablet has a different Notification/Quick Settings bar depending on x
w13 = self.device.display['width'] / 3
s = (w13, 0)
e = (w13, self.device.display['height']/2)
self.device.drag(s, e, 500, 20, -1)
self.vc.sleep(1)
self.vc.dump(-1) | python | {
"resource": ""
} |
q30261 | UiDevice.openQuickSettings | train | def openQuickSettings(self):
'''
Opens the Quick Settings shade.
'''
# the tablet has a different Notification/Quick Settings bar depending on x
w23 = 2 * self.device.display['width'] / 3
s = (w23, 0)
e = (w23, self.device.display['height']/2)
self.device.drag(s, e, 500, 20, -1)
self.vc.sleep(1)
if self.vc.getSdkVersion() >= 20:
self.device.drag(s, e, 500, 20, -1)
self.vc.sleep(1)
self.vc.dump(-1) | python | {
"resource": ""
} |
q30262 | UiAutomator2AndroidViewClient.StartElement | train | def StartElement(self, name, attributes):
'''
Expat start element event handler
'''
if name == 'hierarchy':
pass
elif name == 'node':
# Instantiate an Element object
attributes['uniqueId'] = 'id/no_id/%d' % self.idCount
bounds = re.split('[\][,]', attributes['bounds'])
attributes['bounds'] = ((int(bounds[1]), int(bounds[2])), (int(bounds[4]), int(bounds[5])))
if DEBUG_BOUNDS:
print >> sys.stderr, "bounds=", attributes['bounds']
self.idCount += 1
child = View.factory(attributes, self.device, version=self.version, uiAutomatorHelper=self.uiAutomatorHelper)
self.views.append(child)
# Push element onto the stack and make it a child of parent
if not self.nodeStack:
self.root = child
else:
self.parent = self.nodeStack[-1]
self.parent.add(child)
self.nodeStack.append(child) | python | {
"resource": ""
} |
q30263 | Excerpt2Code.CharacterData | train | def CharacterData(self, data):
'''
Expat character data event handler
'''
if data.strip():
data = data.encode()
if not self.data:
self.data = data
else:
self.data += data | python | {
"resource": ""
} |
q30264 | ViewClient.__updateNavButtons | train | def __updateNavButtons(self):
"""
Updates the navigation buttons that might be on the device screen.
"""
navButtons = None
for v in self.views:
if v.getId() == 'com.android.systemui:id/nav_buttons':
navButtons = v
break
if navButtons:
self.navBack = self.findViewById('com.android.systemui:id/back', navButtons)
self.navHome = self.findViewById('com.android.systemui:id/home', navButtons)
self.navRecentApps = self.findViewById('com.android.systemui:id/recent_apps', navButtons)
else:
if self.uiAutomatorHelper:
print >> sys.stderr, "WARNING: nav buttons not found. Perhaps the device has hardware buttons."
self.navBack = None
self.navHome = None
self.navRecentApps = None | python | {
"resource": ""
} |
q30265 | ViewClient.list | train | def list(self, sleep=1):
'''
List the windows.
Sleep is useful to wait some time before obtaining the new content when something in the
window has changed.
This also sets L{self.windows} as the list of windows.
@type sleep: int
@param sleep: sleep in seconds before proceeding to dump the content
@return: the list of windows
'''
if sleep > 0:
time.sleep(sleep)
if self.useUiAutomator:
raise Exception("Not implemented yet: listing windows with UiAutomator")
else:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect((VIEW_SERVER_HOST, self.localPort))
except socket.error, ex:
raise RuntimeError("ERROR: Connecting to %s:%d: %s" % (VIEW_SERVER_HOST, self.localPort, ex))
s.send('list\r\n')
received = ""
doneRE = re.compile("DONE")
while True:
received += s.recv(1024)
if doneRE.search(received[-7:]):
break
s.close()
if DEBUG:
self.received = received
if DEBUG_RECEIVED:
print >>sys.stderr, "received %d chars" % len(received)
print >>sys.stderr
print >>sys.stderr, received
print >>sys.stderr
self.windows = {}
for line in received.split('\n'):
if not line:
break
if doneRE.search(line):
break
values = line.split()
if len(values) > 1:
package = values[1]
else:
package = "UNKNOWN"
if len(values) > 0:
wid = values[0]
else:
wid = '00000000'
self.windows[int('0x' + wid, 16)] = package
return self.windows | python | {
"resource": ""
} |
q30266 | ViewClient.findViewById | train | def findViewById(self, viewId, root="ROOT", viewFilter=None):
'''
Finds the View with the specified viewId.
@type viewId: str
@param viewId: the ID of the view to find
@type root: str
@type root: View
@param root: the root node of the tree where the View will be searched
@type: viewFilter: function
@param viewFilter: a function that will be invoked providing the candidate View as a parameter
and depending on the return value (C{True} or C{False}) the View will be
selected and returned as the result of C{findViewById()} or ignored.
This can be C{None} and no extra filtering is applied.
@return: the C{View} found or C{None}
'''
if not root:
return None
if type(root) == types.StringType and root == "ROOT":
return self.findViewById(viewId, self.root, viewFilter)
if root.getId() == viewId:
if viewFilter:
if viewFilter(root):
return root
else:
return root
if re.match('^id/no_id', viewId) or re.match('^id/.+/.+', viewId):
if root.getUniqueId() == viewId:
if viewFilter:
if viewFilter(root):
return root;
else:
return root
for ch in root.children:
foundView = self.findViewById(viewId, ch, viewFilter)
if foundView:
if viewFilter:
if viewFilter(foundView):
return foundView
else:
return foundView | python | {
"resource": ""
} |
q30267 | ViewClient.findViewByTagOrRaise | train | def findViewByTagOrRaise(self, tag, root="ROOT"):
'''
Finds the View with the specified tag or raise a ViewNotFoundException
'''
view = self.findViewWithAttribute('getTag()', tag, root)
if view:
return view
else:
raise ViewNotFoundException("tag", tag, root) | python | {
"resource": ""
} |
q30268 | ViewClient.findViewWithAttribute | train | def findViewWithAttribute(self, attr, val, root="ROOT"):
'''
Finds the View with the specified attribute and value
'''
if DEBUG:
try:
print >> sys.stderr, u'findViewWithAttribute({0}, {1}, {2})'.format(attr, unicode(val, encoding='utf-8', errors='replace'), root)
except:
pass
print >> sys.stderr, " findViewWithAttribute: type(val)=", type(val)
return self.__findViewWithAttributeInTree(attr, val, root) | python | {
"resource": ""
} |
q30269 | ViewClient.findViewsWithAttribute | train | def findViewsWithAttribute(self, attr, val, root="ROOT"):
'''
Finds the Views with the specified attribute and value.
This allows you to see all items that match your criteria in the view hierarchy
Usage:
buttons = v.findViewsWithAttribute("class", "android.widget.Button")
'''
return self.__findViewsWithAttributeInTree(attr, val, root) | python | {
"resource": ""
} |
q30270 | ViewClient.findViewWithAttributeThatMatches | train | def findViewWithAttributeThatMatches(self, attr, regex, root="ROOT"):
'''
Finds the list of Views with the specified attribute matching
regex
'''
return self.__findViewWithAttributeInTreeThatMatches(attr, regex, root) | python | {
"resource": ""
} |
q30271 | ViewClient.click | train | def click(self, x=-1, y=-1, selector=None):
"""
An alias for touch.
:param x:
:param y:
:param selector:
:return:
"""
self.touch(x=x, y=y, selector=selector) | python | {
"resource": ""
} |
q30272 | ViewClient.pressKeyCode | train | def pressKeyCode(self, keycode, metaState=0):
'''By default no meta state'''
if self.uiAutomatorHelper:
if DEBUG_UI_AUTOMATOR_HELPER:
print >> sys.stderr, "pressKeyCode(%d, %d)" % (keycode, metaState)
self.uiAutomatorHelper.pressKeyCode(keycode, metaState)
else:
warnings.warn("pressKeyCode only implemented using UiAutomatorHelper. Use AdbClient.type() instead") | python | {
"resource": ""
} |
q30273 | ViewClient.__pickleable | train | def __pickleable(tree):
'''
Makes the tree pickleable.
'''
def removeDeviceReference(view):
'''
Removes the reference to a L{MonkeyDevice}.
'''
view.device = None
###########################################################################################
# FIXME: Unfortunately deepcopy does not work with MonkeyDevice objects, which is
# sadly the reason why we cannot pickle the tree and we need to remove the MonkeyDevice
# references.
# We wanted to copy the tree to preserve the original and make piclkleable the copy.
#treeCopy = copy.deepcopy(tree)
treeCopy = tree
# IMPORTANT:
# This assumes that the first element in the list is the tree root
ViewClient.__traverse(treeCopy[0], transform=removeDeviceReference)
###########################################################################################
return treeCopy | python | {
"resource": ""
} |
q30274 | ViewClient.distanceTo | train | def distanceTo(self, tree):
'''
Calculates the distance between the current state and the tree passed as argument.
@type tree: list of Views
@param tree: Tree of Views
@return: the distance
'''
return ViewClient.distance(ViewClient.__pickleable(self.views), tree) | python | {
"resource": ""
} |
q30275 | ViewClient.distance | train | def distance(tree1, tree2):
'''
Calculates the distance between the two trees.
@type tree1: list of Views
@param tree1: Tree of Views
@type tree2: list of Views
@param tree2: Tree of Views
@return: the distance
'''
################################################################
#FIXME: this should copy the entire tree and then transform it #
################################################################
pickleableTree1 = ViewClient.__pickleable(tree1)
pickleableTree2 = ViewClient.__pickleable(tree2)
s1 = pickle.dumps(pickleableTree1)
s2 = pickle.dumps(pickleableTree2)
if DEBUG_DISTANCE:
print >>sys.stderr, "distance: calculating distance between", s1[:20], "and", s2[:20]
l1 = len(s1)
l2 = len(s2)
t = float(max(l1, l2))
if l1 == l2:
if DEBUG_DISTANCE:
print >>sys.stderr, "distance: trees have same length, using Hamming distance"
return ViewClient.__hammingDistance(s1, s2)/t
else:
if DEBUG_DISTANCE:
print >>sys.stderr, "distance: trees have different length, using Levenshtein distance"
return ViewClient.__levenshteinDistance(s1, s2)/t | python | {
"resource": ""
} |
q30276 | ViewClient.__hammingDistance | train | def __hammingDistance(s1, s2):
'''
Finds the Hamming distance between two strings.
@param s1: string
@param s2: string
@return: the distance
@raise ValueError: if the lenght of the strings differ
'''
l1 = len(s1)
l2 = len(s2)
if l1 != l2:
raise ValueError("Hamming distance requires strings of same size.")
return sum(ch1 != ch2 for ch1, ch2 in zip(s1, s2)) | python | {
"resource": ""
} |
q30277 | ViewClient.hammingDistance | train | def hammingDistance(self, tree):
'''
Finds the Hamming distance between this tree and the one passed as argument.
'''
s1 = ' '.join(map(View.__str__, self.views))
s2 = ' '.join(map(View.__str__, tree))
return ViewClient.__hammingDistance(s1, s2) | python | {
"resource": ""
} |
q30278 | ViewClient.__levenshteinDistance | train | def __levenshteinDistance(s, t):
'''
Find the Levenshtein distance between two Strings.
Python version of Levenshtein distance method implemented in Java at
U{http://www.java2s.com/Code/Java/Data-Type/FindtheLevenshteindistancebetweentwoStrings.htm}.
This is the number of changes needed to change one String into
another, where each change is a single character modification (deletion,
insertion or substitution).
The previous implementation of the Levenshtein distance algorithm
was from U{http://www.merriampark.com/ld.htm}
Chas Emerick has written an implementation in Java, which avoids an OutOfMemoryError
which can occur when my Java implementation is used with very large strings.
This implementation of the Levenshtein distance algorithm
is from U{http://www.merriampark.com/ldjava.htm}::
StringUtils.getLevenshteinDistance(null, *) = IllegalArgumentException
StringUtils.getLevenshteinDistance(*, null) = IllegalArgumentException
StringUtils.getLevenshteinDistance("","") = 0
StringUtils.getLevenshteinDistance("","a") = 1
StringUtils.getLevenshteinDistance("aaapppp", "") = 7
StringUtils.getLevenshteinDistance("frog", "fog") = 1
StringUtils.getLevenshteinDistance("fly", "ant") = 3
StringUtils.getLevenshteinDistance("elephant", "hippo") = 7
StringUtils.getLevenshteinDistance("hippo", "elephant") = 7
StringUtils.getLevenshteinDistance("hippo", "zzzzzzzz") = 8
StringUtils.getLevenshteinDistance("hello", "hallo") = 1
@param s: the first String, must not be null
@param t: the second String, must not be null
@return: result distance
@raise ValueError: if either String input C{null}
'''
if s is None or t is None:
raise ValueError("Strings must not be null")
n = len(s)
m = len(t)
if n == 0:
return m
elif m == 0:
return n
if n > m:
tmp = s
s = t
t = tmp
n = m;
m = len(t)
p = [None]*(n+1)
d = [None]*(n+1)
for i in range(0, n+1):
p[i] = i
for j in range(1, m+1):
if DEBUG_DISTANCE:
if j % 100 == 0:
print >>sys.stderr, "DEBUG:", int(j/(m+1.0)*100),"%\r",
t_j = t[j-1]
d[0] = j
for i in range(1, n+1):
cost = 0 if s[i-1] == t_j else 1
# minimum of cell to the left+1, to the top+1, diagonally left and up +cost
d[i] = min(min(d[i-1]+1, p[i]+1), p[i-1]+cost)
_d = p
p = d
d = _d
if DEBUG_DISTANCE:
print >> sys.stderr, "\n"
return p[n] | python | {
"resource": ""
} |
q30279 | ViewClient.levenshteinDistance | train | def levenshteinDistance(self, tree):
'''
Finds the Levenshtein distance between this tree and the one passed as argument.
'''
s1 = ' '.join(map(View.__microStr__, self.views))
s2 = ' '.join(map(View.__microStr__, tree))
return ViewClient.__levenshteinDistance(s1, s2) | python | {
"resource": ""
} |
q30280 | Worker.run | train | def run(self):
""" Continously receive tasks and execute them """
while True:
task = self.tasks_queue.get()
if task is None:
self.tasks_queue.task_done()
break
# No exception detected in any thread,
# continue the execution.
if self.exceptions_queue.empty():
try:
# Execute the task
func, args, kargs = task
result = func(*args, **kargs)
self.results_queue.put(result)
except Exception as e:
self.exceptions_queue.put(e)
# Mark this task as done, whether an exception happened or not
self.tasks_queue.task_done() | python | {
"resource": ""
} |
q30281 | ThreadPool.add_task | train | def add_task(self, func, *args, **kargs):
""" Add a task to the queue """
self.tasks_queue.put((func, args, kargs)) | python | {
"resource": ""
} |
q30282 | ThreadPool.start_parallel | train | def start_parallel(self):
""" Prepare threads to run tasks"""
for _ in range(self.num_threads):
Worker(self.tasks_queue, self.results_queue, self.exceptions_queue) | python | {
"resource": ""
} |
q30283 | ThreadPool.result | train | def result(self):
""" Stop threads and return the result of all called tasks """
# Send None to all threads to cleanly stop them
for _ in range(self.num_threads):
self.tasks_queue.put(None)
# Wait for completion of all the tasks in the queue
self.tasks_queue.join()
# Check if one of the thread raised an exception, if yes
# raise it here in the function
if not self.exceptions_queue.empty():
raise self.exceptions_queue.get()
return self.results_queue | python | {
"resource": ""
} |
q30284 | xml_marshal_bucket_notifications | train | def xml_marshal_bucket_notifications(notifications):
"""
Marshals the notifications structure for sending to S3 compatible storage
:param notifications: Dictionary with following structure:
{
'TopicConfigurations': [
{
'Id': 'string',
'Arn': 'string',
'Events': [
's3:ReducedRedundancyLostObject'|'s3:ObjectCreated:*'|'s3:ObjectCreated:Put'|'s3:ObjectCreated:Post'|'s3:ObjectCreated:Copy'|'s3:ObjectCreated:CompleteMultipartUpload'|'s3:ObjectRemoved:*'|'s3:ObjectRemoved:Delete'|'s3:ObjectRemoved:DeleteMarkerCreated',
],
'Filter': {
'Key': {
'FilterRules': [
{
'Name': 'prefix'|'suffix',
'Value': 'string'
},
]
}
}
},
],
'QueueConfigurations': [
{
'Id': 'string',
'Arn': 'string',
'Events': [
's3:ReducedRedundancyLostObject'|'s3:ObjectCreated:*'|'s3:ObjectCreated:Put'|'s3:ObjectCreated:Post'|'s3:ObjectCreated:Copy'|'s3:ObjectCreated:CompleteMultipartUpload'|'s3:ObjectRemoved:*'|'s3:ObjectRemoved:Delete'|'s3:ObjectRemoved:DeleteMarkerCreated',
],
'Filter': {
'Key': {
'FilterRules': [
{
'Name': 'prefix'|'suffix',
'Value': 'string'
},
]
}
}
},
],
'CloudFunctionConfigurations': [
{
'Id': 'string',
'Arn': 'string',
'Events': [
's3:ReducedRedundancyLostObject'|'s3:ObjectCreated:*'|'s3:ObjectCreated:Put'|'s3:ObjectCreated:Post'|'s3:ObjectCreated:Copy'|'s3:ObjectCreated:CompleteMultipartUpload'|'s3:ObjectRemoved:*'|'s3:ObjectRemoved:Delete'|'s3:ObjectRemoved:DeleteMarkerCreated',
],
'Filter': {
'Key': {
'FilterRules': [
{
'Name': 'prefix'|'suffix',
'Value': 'string'
},
]
}
}
},
]
}
:return: Marshalled XML data
"""
root = s3_xml.Element('NotificationConfiguration', {'xmlns': _S3_NAMESPACE})
_add_notification_config_to_xml(
root,
'TopicConfiguration',
notifications.get('TopicConfigurations', [])
)
_add_notification_config_to_xml(
root,
'QueueConfiguration',
notifications.get('QueueConfigurations', [])
)
_add_notification_config_to_xml(
root,
'CloudFunctionConfiguration',
notifications.get('CloudFunctionConfigurations', [])
)
data = io.BytesIO()
s3_xml.ElementTree(root).write(data, encoding=None, xml_declaration=False)
return data.getvalue() | python | {
"resource": ""
} |
q30285 | _add_notification_config_to_xml | train | def _add_notification_config_to_xml(node, element_name, configs):
"""
Internal function that builds the XML sub-structure for a given
kind of notification configuration.
"""
for config in configs:
config_node = s3_xml.SubElement(node, element_name)
if 'Id' in config:
id_node = s3_xml.SubElement(config_node, 'Id')
id_node.text = config['Id']
arn_node = s3_xml.SubElement(
config_node,
NOTIFICATIONS_ARN_FIELDNAME_MAP[element_name]
)
arn_node.text = config['Arn']
for event in config['Events']:
event_node = s3_xml.SubElement(config_node, 'Event')
event_node.text = event
filter_rules = config.get('Filter', {}).get(
'Key', {}).get('FilterRules', [])
if filter_rules:
filter_node = s3_xml.SubElement(config_node, 'Filter')
s3key_node = s3_xml.SubElement(filter_node, 'S3Key')
for filter_rule in filter_rules:
filter_rule_node = s3_xml.SubElement(s3key_node, 'FilterRule')
name_node = s3_xml.SubElement(filter_rule_node, 'Name')
name_node.text = filter_rule['Name']
value_node = s3_xml.SubElement(filter_rule_node, 'Value')
value_node.text = filter_rule['Value']
return node | python | {
"resource": ""
} |
q30286 | xml_marshal_delete_objects | train | def xml_marshal_delete_objects(object_names):
"""
Marshal Multi-Object Delete request body from object names.
:param object_names: List of object keys to be deleted.
:return: Serialized XML string for multi-object delete request body.
"""
root = s3_xml.Element('Delete')
# use quiet mode in the request - this causes the S3 Server to
# limit its response to only object keys that had errors during
# the delete operation.
quiet = s3_xml.SubElement(root, 'Quiet')
quiet.text = "true"
# add each object to the request.
for object_name in object_names:
object_elt = s3_xml.SubElement(root, 'Object')
key_elt = s3_xml.SubElement(object_elt, 'Key')
key_elt.text = object_name
# return the marshalled xml.
data = io.BytesIO()
s3_xml.ElementTree(root).write(data, encoding=None, xml_declaration=False)
return data.getvalue() | python | {
"resource": ""
} |
q30287 | PostPolicy.set_key | train | def set_key(self, key):
"""
Set key policy condition.
:param key: set key name.
"""
is_non_empty_string(key)
self.policies.append(('eq', '$key', key))
self.form_data['key'] = key
self.key = key | python | {
"resource": ""
} |
q30288 | PostPolicy.set_key_startswith | train | def set_key_startswith(self, key_startswith):
"""
Set key startswith policy condition.
:param key_startswith: set key prefix name.
"""
is_non_empty_string(key_startswith)
self.policies.append(('starts-with', '$key', key_startswith))
self.form_data['key'] = key_startswith | python | {
"resource": ""
} |
q30289 | PostPolicy.set_bucket_name | train | def set_bucket_name(self, bucket_name):
"""
Set bucket name policy condition.
:param bucket_name: set bucket name.
"""
is_valid_bucket_name(bucket_name)
self.policies.append(('eq', '$bucket', bucket_name))
self.form_data['bucket'] = bucket_name
self.bucket_name = bucket_name | python | {
"resource": ""
} |
q30290 | PostPolicy.set_content_type | train | def set_content_type(self, content_type):
"""
Set content-type policy condition.
:param content_type: set content type name.
"""
self.policies.append(('eq', '$Content-Type', content_type))
self.form_data['Content-Type'] = content_type | python | {
"resource": ""
} |
q30291 | PostPolicy.base64 | train | def base64(self, extras=()):
"""
Encode json into base64.
"""
s = self._marshal_json(extras=extras)
s_bytes = s if isinstance(s, bytes) else s.encode('utf-8')
b64enc = base64.b64encode(s_bytes)
return b64enc.decode('utf-8') if isinstance(b64enc, bytes) else b64enc | python | {
"resource": ""
} |
q30292 | PostPolicy.is_valid | train | def is_valid(self):
"""
Validate for required parameters.
"""
if not isinstance(self._expiration, datetime.datetime):
raise InvalidArgumentError('Expiration datetime must be specified.')
if 'key' not in self.form_data:
raise InvalidArgumentError('object key must be specified.')
if 'bucket' not in self.form_data:
raise InvalidArgumentError('bucket name must be specified.') | python | {
"resource": ""
} |
q30293 | post_presign_signature | train | def post_presign_signature(date, region, secret_key, policy_str):
"""
Calculates signature version '4' for POST policy string.
:param date: datetime formatted date.
:param region: region of the bucket for the policy.
:param secret_key: Amazon S3 secret access key.
:param policy_str: policy string.
:return: hexlified sha256 signature digest.
"""
signing_key = generate_signing_key(date, region, secret_key)
signature = hmac.new(signing_key, policy_str.encode('utf-8'),
hashlib.sha256).hexdigest()
return signature | python | {
"resource": ""
} |
q30294 | presign_v4 | train | def presign_v4(method, url, access_key, secret_key, session_token=None,
region=None, headers=None, expires=None, response_headers=None,
request_date=None):
"""
Calculates signature version '4' for regular presigned URLs.
:param method: Method to be presigned examples 'PUT', 'GET'.
:param url: URL to be presigned.
:param access_key: Access key id for your AWS s3 account.
:param secret_key: Secret access key for your AWS s3 account.
:param session_token: Session token key set only for temporary
access credentials.
:param region: region of the bucket, it is optional.
:param headers: any additional HTTP request headers to
be presigned, it is optional.
:param expires: final expiration of the generated URL. Maximum is 7days.
:param response_headers: Specify additional query string parameters.
:param request_date: the date of the request.
"""
# Validate input arguments.
if not access_key or not secret_key:
raise InvalidArgumentError('Invalid access_key and secret_key.')
if region is None:
region = 'us-east-1'
if headers is None:
headers = {}
if expires is None:
expires = '604800'
if request_date is None:
request_date = datetime.utcnow()
parsed_url = urlsplit(url)
content_hash_hex = _UNSIGNED_PAYLOAD
host = parsed_url.netloc
headers['Host'] = host
iso8601Date = request_date.strftime("%Y%m%dT%H%M%SZ")
headers_to_sign = headers
# Construct queries.
query = {}
query['X-Amz-Algorithm'] = _SIGN_V4_ALGORITHM
query['X-Amz-Credential'] = generate_credential_string(access_key,
request_date,
region)
query['X-Amz-Date'] = iso8601Date
query['X-Amz-Expires'] = str(expires)
if session_token:
query['X-Amz-Security-Token'] = session_token
signed_headers = get_signed_headers(headers_to_sign)
query['X-Amz-SignedHeaders'] = ';'.join(signed_headers)
if response_headers is not None:
query.update(response_headers)
# URL components.
url_components = [parsed_url.geturl()]
if query is not None:
ordered_query = collections.OrderedDict(sorted(query.items()))
query_components = []
for component_key in ordered_query:
single_component = [component_key]
if ordered_query[component_key] is not None:
single_component.append('=')
single_component.append(
queryencode(ordered_query[component_key])
)
else:
single_component.append('=')
query_components.append(''.join(single_component))
query_string = '&'.join(query_components)
if query_string:
url_components.append('?')
url_components.append(query_string)
new_url = ''.join(url_components)
# new url constructor block ends.
new_parsed_url = urlsplit(new_url)
canonical_request = generate_canonical_request(method,
new_parsed_url,
headers_to_sign,
signed_headers,
content_hash_hex)
string_to_sign = generate_string_to_sign(request_date, region,
canonical_request)
signing_key = generate_signing_key(request_date, region, secret_key)
signature = hmac.new(signing_key, string_to_sign.encode('utf-8'),
hashlib.sha256).hexdigest()
new_parsed_url = urlsplit(new_url + "&X-Amz-Signature="+signature)
return new_parsed_url.geturl() | python | {
"resource": ""
} |
q30295 | get_signed_headers | train | def get_signed_headers(headers):
"""
Get signed headers.
:param headers: input dictionary to be sorted.
"""
signed_headers = []
for header in headers:
signed_headers.append(header.lower().strip())
return sorted(signed_headers) | python | {
"resource": ""
} |
q30296 | sign_v4 | train | def sign_v4(method, url, region, headers=None,
access_key=None,
secret_key=None,
session_token=None,
content_sha256=None):
"""
Signature version 4.
:param method: HTTP method used for signature.
:param url: Final url which needs to be signed.
:param region: Region should be set to bucket region.
:param headers: Optional headers for the method.
:param access_key: Optional access key, if not
specified no signature is needed.
:param secret_key: Optional secret key, if not
specified no signature is needed.
:param session_token: Optional session token, set
only for temporary credentials.
:param content_sha256: Optional body sha256.
"""
# If no access key or secret key is provided return headers.
if not access_key or not secret_key:
return headers
if headers is None:
headers = FoldCaseDict()
if region is None:
region = 'us-east-1'
parsed_url = urlsplit(url)
secure = parsed_url.scheme == 'https'
if secure:
content_sha256 = _UNSIGNED_PAYLOAD
if content_sha256 is None:
# with no payload, calculate sha256 for 0 length data.
content_sha256 = get_sha256_hexdigest('')
host = parsed_url.netloc
headers['Host'] = host
date = datetime.utcnow()
headers['X-Amz-Date'] = date.strftime("%Y%m%dT%H%M%SZ")
headers['X-Amz-Content-Sha256'] = content_sha256
if session_token:
headers['X-Amz-Security-Token'] = session_token
headers_to_sign = headers
signed_headers = get_signed_headers(headers_to_sign)
canonical_req = generate_canonical_request(method,
parsed_url,
headers_to_sign,
signed_headers,
content_sha256)
string_to_sign = generate_string_to_sign(date, region,
canonical_req)
signing_key = generate_signing_key(date, region, secret_key)
signature = hmac.new(signing_key, string_to_sign.encode('utf-8'),
hashlib.sha256).hexdigest()
authorization_header = generate_authorization_header(access_key,
date,
region,
signed_headers,
signature)
headers['Authorization'] = authorization_header
return headers | python | {
"resource": ""
} |
q30297 | generate_canonical_request | train | def generate_canonical_request(method, parsed_url, headers, signed_headers, content_sha256):
"""
Generate canonical request.
:param method: HTTP method.
:param parsed_url: Parsed url is input from :func:`urlsplit`
:param headers: HTTP header dictionary.
:param content_sha256: Content sha256 hexdigest string.
"""
lines = [method, parsed_url.path, parsed_url.query]
# Headers added to canonical request.
header_lines = []
for header in signed_headers:
value = headers[header.title()]
value = str(value).strip()
header_lines.append(header + ':' + str(value))
lines = lines + header_lines
lines.append('')
lines.append(';'.join(signed_headers))
lines.append(content_sha256)
return '\n'.join(lines) | python | {
"resource": ""
} |
q30298 | generate_string_to_sign | train | def generate_string_to_sign(date, region, canonical_request):
"""
Generate string to sign.
:param date: Date is input from :meth:`datetime.datetime`
:param region: Region should be set to bucket region.
:param canonical_request: Canonical request generated previously.
"""
formatted_date_time = date.strftime("%Y%m%dT%H%M%SZ")
canonical_request_hasher = hashlib.sha256()
canonical_request_hasher.update(canonical_request.encode('utf-8'))
canonical_request_sha256 = canonical_request_hasher.hexdigest()
scope = generate_scope_string(date, region)
return '\n'.join([_SIGN_V4_ALGORITHM,
formatted_date_time,
scope,
canonical_request_sha256]) | python | {
"resource": ""
} |
q30299 | generate_signing_key | train | def generate_signing_key(date, region, secret_key):
"""
Generate signing key.
:param date: Date is input from :meth:`datetime.datetime`
:param region: Region should be set to bucket region.
:param secret_key: Secret access key.
"""
formatted_date = date.strftime("%Y%m%d")
key1_string = 'AWS4' + secret_key
key1 = key1_string.encode('utf-8')
key2 = hmac.new(key1, formatted_date.encode('utf-8'),
hashlib.sha256).digest()
key3 = hmac.new(key2, region.encode('utf-8'), hashlib.sha256).digest()
key4 = hmac.new(key3, 's3'.encode('utf-8'), hashlib.sha256).digest()
return hmac.new(key4, 'aws4_request'.encode('utf-8'),
hashlib.sha256).digest() | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.