id
int64 0
458k
| file_name
stringlengths 4
119
| file_path
stringlengths 14
227
| content
stringlengths 24
9.96M
| size
int64 24
9.96M
| language
stringclasses 1
value | extension
stringclasses 14
values | total_lines
int64 1
219k
| avg_line_length
float64 2.52
4.63M
| max_line_length
int64 5
9.91M
| alphanum_fraction
float64 0
1
| repo_name
stringlengths 7
101
| repo_stars
int64 100
139k
| repo_forks
int64 0
26.4k
| repo_open_issues
int64 0
2.27k
| repo_license
stringclasses 12
values | repo_extraction_date
stringclasses 433
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9,400
|
ConnMan.py
|
s3tools_s3cmd/S3/ConnMan.py
|
# -*- coding: utf-8 -*-
## --------------------------------------------------------------------
## Amazon S3 manager
##
## Authors : Michal Ludvig <michal@logix.cz> (https://www.logix.cz/michal)
## Florent Viard <florent@sodria.com> (https://www.sodria.com)
## Copyright : TGRMN Software, Sodria SAS and contributors
## License : GPL Version 2
## Website : https://s3tools.org
## --------------------------------------------------------------------
from __future__ import absolute_import
import sys
if sys.version_info >= (3, 0):
from .Custom_httplib3x import httplib
else:
from .Custom_httplib27 import httplib
import ssl
from logging import debug
from threading import Semaphore
from time import time
try:
# python 3 support
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
from .Config import Config
from .Exceptions import ParameterError, S3SSLCertificateError
from .Utils import getBucketFromHostname
__all__ = ["ConnMan"]
class http_connection(object):
context = None
context_set = False
@staticmethod
def _ssl_verified_context(cafile):
cfg = Config()
context = None
try:
context = ssl.create_default_context(cafile=cafile)
except AttributeError: # no ssl.create_default_context
pass
if context and not cfg.check_ssl_hostname:
context.check_hostname = False
debug(u'Disabling SSL certificate hostname checking')
return context
@staticmethod
def _ssl_unverified_context(cafile):
debug(u'Disabling SSL certificate checking')
context = None
try:
context = ssl._create_unverified_context(cafile=cafile,
cert_reqs=ssl.CERT_NONE)
except AttributeError: # no ssl._create_unverified_context
pass
return context
@staticmethod
def _ssl_client_auth_context(certfile, keyfile, check_server_cert, cafile):
context = None
try:
cert_reqs = ssl.CERT_REQUIRED if check_server_cert else ssl.CERT_NONE
context = ssl._create_unverified_context(cafile=cafile,
keyfile=keyfile,
certfile=certfile,
cert_reqs=cert_reqs)
except AttributeError: # no ssl._create_unverified_context
pass
return context
@staticmethod
def _ssl_context():
if http_connection.context_set:
return http_connection.context
cfg = Config()
cafile = cfg.ca_certs_file
if cafile == "":
cafile = None
certfile = cfg.ssl_client_cert_file or None
keyfile = cfg.ssl_client_key_file or None # the key may be embedded into cert file
debug(u"Using ca_certs_file %s", cafile)
debug(u"Using ssl_client_cert_file %s", certfile)
debug(u"Using ssl_client_key_file %s", keyfile)
if certfile is not None:
context = http_connection._ssl_client_auth_context(certfile, keyfile, cfg.check_ssl_certificate, cafile)
elif cfg.check_ssl_certificate:
context = http_connection._ssl_verified_context(cafile)
else:
context = http_connection._ssl_unverified_context(cafile)
http_connection.context = context
http_connection.context_set = True
return context
def forgive_wildcard_cert(self, cert, hostname):
"""
Wildcard matching for *.s3.amazonaws.com and similar per region.
Per http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html:
"We recommend that all bucket names comply with DNS naming conventions."
Per http://docs.aws.amazon.com/AmazonS3/latest/dev/VirtualHosting.html:
"When using virtual hosted-style buckets with SSL, the SSL
wild card certificate only matches buckets that do not contain
periods. To work around this, use HTTP or write your own
certificate verification logic."
Therefore, we need a custom validation routine that allows
mybucket.example.com.s3.amazonaws.com to be considered a valid
hostname for the *.s3.amazonaws.com wildcard cert, and for the
region-specific *.s3-[region].amazonaws.com wildcard cert.
We also forgive non-S3 wildcard certificates should the
hostname match, to allow compatibility with other S3
API-compatible storage providers.
"""
debug(u'checking SSL subjectAltName as forgiving wildcard cert')
san = cert.get('subjectAltName', ())
hostname = hostname.lower()
cleaned_host_bucket_config = urlparse('https://' + Config.host_bucket).hostname
for key, value in san:
if key == 'DNS':
value = value.lower()
if value.startswith('*.s3') and \
(value.endswith('.amazonaws.com') and hostname.endswith('.amazonaws.com')) or \
(value.endswith('.amazonaws.com.cn') and hostname.endswith('.amazonaws.com.cn')):
return True
elif value == cleaned_host_bucket_config % \
{'bucket': '*', 'location': Config.bucket_location.lower()} and \
hostname.endswith(cleaned_host_bucket_config % \
{'bucket': '', 'location': Config.bucket_location.lower()}):
return True
return False
def match_hostname(self):
cert = self.c.sock.getpeercert()
try:
ssl.match_hostname(cert, self.hostname)
except AttributeError:
# old ssl module doesn't have this function
return
except ValueError:
# empty SSL cert means underlying SSL library didn't validate it, we don't either.
return
except S3CertificateError as e:
if not self.forgive_wildcard_cert(cert, self.hostname):
raise e
@staticmethod
def _https_connection(hostname, port=None):
try:
context = http_connection._ssl_context()
# Wildcard certificates do not work with DNS-style named buckets.
bucket_name, success = getBucketFromHostname(hostname)
if success and '.' in bucket_name:
# this merely delays running the hostname check until
# after the connection is made and we get control
# back. We then run the same check, relaxed for S3's
# wildcard certificates.
debug(u'Bucket name contains "." character, disabling initial SSL hostname check')
check_hostname = False
if context:
context.check_hostname = False
else:
if context:
check_hostname = context.check_hostname
else:
# Earliest version of python that don't have context,
# don't check hostnames anyway
check_hostname = True
# Note, we are probably needed to try to set check_hostname because of that bug:
# http://bugs.python.org/issue22959
conn = httplib.HTTPSConnection(hostname, port, context=context, check_hostname=check_hostname)
debug(u'httplib.HTTPSConnection() has both context and check_hostname')
except TypeError:
try:
# in case check_hostname parameter is not present try again
conn = httplib.HTTPSConnection(hostname, port, context=context)
debug(u'httplib.HTTPSConnection() has only context')
except TypeError:
# in case even context parameter is not present try one last time
conn = httplib.HTTPSConnection(hostname, port)
debug(u'httplib.HTTPSConnection() has neither context nor check_hostname')
return conn
def __init__(self, id, hostname, ssl, cfg):
self.ssl = ssl
self.id = id
self.counter = 0
# Whatever is the input, ensure to have clean hostname and port
parsed_hostname = urlparse('https://' + hostname)
self.hostname = parsed_hostname.hostname
self.port = parsed_hostname.port
if parsed_hostname.path and parsed_hostname.path != '/':
self.path = parsed_hostname.path.rstrip('/')
debug(u'endpoint path set to %s', self.path)
else:
self.path = None
"""
History note:
In a perfect world, or in the future:
- All http proxies would support CONNECT/tunnel, and so there would be no need
for using "absolute URIs" in format_uri.
- All s3-like servers would work well whether using relative or ABSOLUTE URIs.
But currently, what is currently common:
- Proxies without support for CONNECT for http, and so "absolute URIs" have to
be used.
- Proxies with support for CONNECT for httpS but s3-like servers having issues
with "absolute URIs", so relative one still have to be used as the requests will
pass as-is, through the proxy because of the CONNECT mode.
"""
if not cfg.proxy_host:
if ssl:
self.c = http_connection._https_connection(self.hostname, self.port)
debug(u'non-proxied HTTPSConnection(%s, %s)', self.hostname, self.port)
else:
self.c = httplib.HTTPConnection(self.hostname, self.port)
debug(u'non-proxied HTTPConnection(%s, %s)', self.hostname, self.port)
else:
if ssl:
self.c = http_connection._https_connection(cfg.proxy_host, cfg.proxy_port)
debug(u'proxied HTTPSConnection(%s, %s)', cfg.proxy_host, cfg.proxy_port)
port = self.port and self.port or 443
self.c.set_tunnel(self.hostname, port)
debug(u'tunnel to %s, %s', self.hostname, port)
else:
self.c = httplib.HTTPConnection(cfg.proxy_host, cfg.proxy_port)
debug(u'proxied HTTPConnection(%s, %s)', cfg.proxy_host, cfg.proxy_port)
# No tunnel here for the moment
self.last_used_time = time()
class ConnMan(object):
_CS_REQ_SENT = httplib._CS_REQ_SENT
CONTINUE = httplib.CONTINUE
conn_pool_sem = Semaphore()
conn_pool = {}
conn_max_counter = 800 ## AWS closes connection after some ~90 requests
@staticmethod
def get(hostname, ssl=None):
cfg = Config()
if ssl is None:
ssl = cfg.use_https
conn = None
if cfg.proxy_host != "":
if ssl and sys.hexversion < 0x02070000:
raise ParameterError("use_https=True can't be used with proxy on Python <2.7")
conn_id = "proxy://%s:%s" % (cfg.proxy_host, cfg.proxy_port)
else:
conn_id = "http%s://%s" % (ssl and "s" or "", hostname)
ConnMan.conn_pool_sem.acquire()
if conn_id not in ConnMan.conn_pool:
ConnMan.conn_pool[conn_id] = []
while ConnMan.conn_pool[conn_id]:
conn = ConnMan.conn_pool[conn_id].pop()
cur_time = time()
if cur_time < conn.last_used_time + cfg.connection_max_age \
and cur_time >= conn.last_used_time:
debug("ConnMan.get(): re-using connection: %s#%d"
% (conn.id, conn.counter))
break
# Conn is too old or wall clock went back in the past
debug("ConnMan.get(): closing expired connection")
ConnMan.close(conn)
conn = None
ConnMan.conn_pool_sem.release()
if not conn:
debug("ConnMan.get(): creating new connection: %s" % conn_id)
conn = http_connection(conn_id, hostname, ssl, cfg)
conn.c.connect()
if conn.ssl and cfg.check_ssl_certificate and cfg.check_ssl_hostname:
conn.match_hostname()
conn.counter += 1
return conn
@staticmethod
def put(conn):
if conn.id.startswith("proxy://"):
ConnMan.close(conn)
debug("ConnMan.put(): closing proxy connection (keep-alive not yet"
" supported)")
return
if conn.counter >= ConnMan.conn_max_counter:
ConnMan.close(conn)
debug("ConnMan.put(): closing over-used connection")
return
cfg = Config()
if not cfg.connection_pooling:
ConnMan.close(conn)
debug("ConnMan.put(): closing connection (connection pooling disabled)")
return
# Update timestamp of conn to record when was its last use
conn.last_used_time = time()
ConnMan.conn_pool_sem.acquire()
ConnMan.conn_pool[conn.id].append(conn)
ConnMan.conn_pool_sem.release()
debug("ConnMan.put(): connection put back to pool (%s#%d)"
% (conn.id, conn.counter))
@staticmethod
def close(conn):
if conn:
conn.c.close()
| 13,293
|
Python
|
.py
| 286
| 35.251748
| 116
| 0.598936
|
s3tools/s3cmd
| 4,533
| 903
| 301
|
GPL-2.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,401
|
Progress.py
|
s3tools_s3cmd/S3/Progress.py
|
# -*- coding: utf-8 -*-
## --------------------------------------------------------------------
## Amazon S3 manager
##
## Authors : Michal Ludvig <michal@logix.cz> (https://www.logix.cz/michal)
## Florent Viard <florent@sodria.com> (https://www.sodria.com)
## Copyright : TGRMN Software, Sodria SAS and contributors
## License : GPL Version 2
## Website : https://s3tools.org
## --------------------------------------------------------------------
from __future__ import absolute_import, division
import sys
import datetime
import time
import S3.Utils
class Progress(object):
_stdout = sys.stdout
_last_display = 0
def __init__(self, labels, total_size):
self._stdout = sys.stdout
self.new_file(labels, total_size)
def new_file(self, labels, total_size):
self.labels = labels
self.total_size = total_size
# Set initial_position to something in the
# case we're not counting from 0. For instance
# when appending to a partially downloaded file.
# Setting initial_position will let the speed
# be computed right.
self.initial_position = 0
self.current_position = self.initial_position
self.time_start = datetime.datetime.now()
self.time_last = self.time_start
self.time_current = self.time_start
self.display(new_file = True)
def update(self, current_position = -1, delta_position = -1):
self.time_last = self.time_current
self.time_current = datetime.datetime.now()
if current_position > -1:
self.current_position = current_position
elif delta_position > -1:
self.current_position += delta_position
#else:
# no update, just call display()
self.display()
def done(self, message):
self.display(done_message = message)
def output_labels(self):
self._stdout.write(u"%(action)s: '%(source)s' -> '%(destination)s' %(extra)s\n" % self.labels)
self._stdout.flush()
def _display_needed(self):
# We only need to update the display every so often.
if time.time() - self._last_display > 1:
self._last_display = time.time()
return True
return False
def display(self, new_file = False, done_message = None):
"""
display(new_file = False[/True], done = False[/True])
Override this method to provide a nicer output.
"""
if new_file:
self.output_labels()
self.last_milestone = 0
return
if self.current_position == self.total_size:
print_size = S3.Utils.formatSize(self.current_position, True)
if print_size[1] != "": print_size[1] += "B"
timedelta = self.time_current - self.time_start
sec_elapsed = timedelta.days * 86400 + timedelta.seconds + float(timedelta.microseconds) / 1000000.0
print_speed = S3.Utils.formatSize((self.current_position - self.initial_position) / sec_elapsed, True, True)
self._stdout.write("100%% %s%s in %.2fs (%.2f %sB/s)\n" %
(print_size[0], print_size[1], sec_elapsed, print_speed[0], print_speed[1]))
self._stdout.flush()
return
rel_position = (self.current_position * 100) // self.total_size
if rel_position >= self.last_milestone:
# Move by increments of 5.
# NOTE: to check: Looks like to not do what is looks like to be designed to do
self.last_milestone = (rel_position // 5) * 5
self._stdout.write("%d%% ", self.last_milestone)
self._stdout.flush()
return
class ProgressANSI(Progress):
## http://en.wikipedia.org/wiki/ANSI_escape_code
SCI = '\x1b['
ANSI_hide_cursor = SCI + "?25l"
ANSI_show_cursor = SCI + "?25h"
ANSI_save_cursor_pos = SCI + "s"
ANSI_restore_cursor_pos = SCI + "u"
ANSI_move_cursor_to_column = SCI + "%uG"
ANSI_erase_to_eol = SCI + "0K"
ANSI_erase_current_line = SCI + "2K"
def display(self, new_file = False, done_message = None):
"""
display(new_file = False[/True], done_message = None)
"""
if new_file:
self.output_labels()
self._stdout.write(self.ANSI_save_cursor_pos)
self._stdout.flush()
return
# Only display progress every so often
if not (new_file or done_message) and not self._display_needed():
return
timedelta = self.time_current - self.time_start
sec_elapsed = timedelta.days * 86400 + timedelta.seconds + float(timedelta.microseconds)/1000000.0
if (sec_elapsed > 0):
print_speed = S3.Utils.formatSize((self.current_position - self.initial_position) / sec_elapsed, True, True)
else:
print_speed = (0, "")
self._stdout.write(self.ANSI_restore_cursor_pos)
self._stdout.write(self.ANSI_erase_to_eol)
self._stdout.write("%(current)s of %(total)s %(percent)3d%% in %(elapsed)ds %(speed).2f %(speed_coeff)sB/s" % {
"current" : str(self.current_position).rjust(len(str(self.total_size))),
"total" : self.total_size,
"percent" : self.total_size and ((self.current_position * 100) // self.total_size) or 0,
"elapsed" : sec_elapsed,
"speed" : print_speed[0],
"speed_coeff" : print_speed[1]
})
if done_message:
self._stdout.write(" %s\n" % done_message)
self._stdout.flush()
class ProgressCR(Progress):
## Uses CR char (Carriage Return) just like other progress bars do.
CR_char = chr(13)
def display(self, new_file = False, done_message = None):
"""
display(new_file = False[/True], done_message = None)
"""
if new_file:
self.output_labels()
return
# Only display progress every so often
if not (new_file or done_message) and not self._display_needed():
return
timedelta = self.time_current - self.time_start
sec_elapsed = timedelta.days * 86400 + timedelta.seconds + float(timedelta.microseconds)/1000000.0
if (sec_elapsed > 0):
print_speed = S3.Utils.formatSize((self.current_position - self.initial_position) / sec_elapsed, True, True)
else:
print_speed = (0, "")
self._stdout.write(self.CR_char)
output = " %(current)s of %(total)s %(percent)3d%% in %(elapsed)4ds %(speed)7.2f %(speed_coeff)sB/s" % {
"current" : str(self.current_position).rjust(len(str(self.total_size))),
"total" : self.total_size,
"percent" : self.total_size and ((self.current_position * 100) // self.total_size) or 0,
"elapsed" : sec_elapsed,
"speed" : print_speed[0],
"speed_coeff" : print_speed[1]
}
self._stdout.write(output)
if done_message:
self._stdout.write(" %s\n" % done_message)
self._stdout.flush()
class StatsInfo(object):
"""Holding info for stats totals"""
def __init__(self):
self.files = None
self.size = None
self.files_transferred = None
self.size_transferred = None
self.files_copied = None
self.size_copied = None
self.files_deleted = None
self.size_deleted = None
def format_output(self):
outstr = u""
if self.files is not None:
tmp_str = u"Number of files: %d"% self.files
if self.size is not None:
tmp_str += " (%d bytes) "% self.size
outstr += u"\nStats: " + tmp_str
if self.files_transferred:
tmp_str = u"Number of files transferred: %d"% self.files_transferred
if self.size_transferred is not None:
tmp_str += " (%d bytes) "% self.size_transferred
outstr += u"\nStats: " + tmp_str
if self.files_copied:
tmp_str = u"Number of files copied: %d"% self.files_copied
if self.size_copied is not None:
tmp_str += " (%d bytes) "% self.size_copied
outstr += u"\nStats: " + tmp_str
if self.files_deleted:
tmp_str = u"Number of files deleted: %d"% self.files_deleted
if self.size_deleted is not None:
tmp_str += " (%d bytes) "% self.size_deleted
outstr += u"\nStats: " + tmp_str
return outstr
# vim:et:ts=4:sts=4:ai
| 8,555
|
Python
|
.py
| 191
| 35.534031
| 122
| 0.581563
|
s3tools/s3cmd
| 4,533
| 903
| 301
|
GPL-2.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,402
|
PkgInfo.py
|
s3tools_s3cmd/S3/PkgInfo.py
|
# -*- coding: utf-8 -*-
## --------------------------------------------------------------------
## Amazon S3 manager
##
## Authors : Michal Ludvig <michal@logix.cz> (https://www.logix.cz/michal)
## Florent Viard <florent@sodria.com> (https://www.sodria.com)
## Copyright : TGRMN Software, Sodria SAS and contributors
## License : GPL Version 2
## Website : https://s3tools.org
## --------------------------------------------------------------------
package = "s3cmd"
version = "2.4.0-dev"
url = "http://s3tools.org"
license = "GNU GPL v2+"
short_description = "Command line tool for managing Amazon S3 and CloudFront services"
long_description = """
S3cmd lets you copy files from/to Amazon S3
(Simple Storage Service) using a simple to use
command line client. Supports rsync-like backup,
GPG encryption, and more. Also supports management
of Amazon's CloudFront content delivery network.
"""
# vim:et:ts=4:sts=4:ai
| 938
|
Python
|
.py
| 23
| 39.652174
| 86
| 0.615132
|
s3tools/s3cmd
| 4,533
| 903
| 301
|
GPL-2.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,403
|
AccessLog.py
|
s3tools_s3cmd/S3/AccessLog.py
|
# -*- coding: utf-8 -*-
## --------------------------------------------------------------------
## Amazon S3 - Access Control List representation
##
## Authors : Michal Ludvig <michal@logix.cz> (https://www.logix.cz/michal)
## Florent Viard <florent@sodria.com> (https://www.sodria.com)
## Copyright : TGRMN Software, Sodria SAS and contributors
## License : GPL Version 2
## Website : https://s3tools.org
## --------------------------------------------------------------------
from __future__ import absolute_import, print_function
import sys
from . import S3Uri
from .Exceptions import ParameterError
from .BaseUtils import getTreeFromXml, decode_from_s3
from .ACL import GranteeAnonRead
try:
import xml.etree.ElementTree as ET
except ImportError:
import elementtree.ElementTree as ET
PY3 = (sys.version_info >= (3,0))
__all__ = []
class AccessLog(object):
LOG_DISABLED = "<BucketLoggingStatus></BucketLoggingStatus>"
LOG_TEMPLATE = "<LoggingEnabled><TargetBucket></TargetBucket><TargetPrefix></TargetPrefix></LoggingEnabled>"
def __init__(self, xml = None):
if not xml:
xml = self.LOG_DISABLED
self.tree = getTreeFromXml(xml)
self.tree.attrib['xmlns'] = "http://doc.s3.amazonaws.com/2006-03-01"
def isLoggingEnabled(self):
return (self.tree.find(".//LoggingEnabled") is not None)
def disableLogging(self):
el = self.tree.find(".//LoggingEnabled")
if el:
self.tree.remove(el)
def enableLogging(self, target_prefix_uri):
el = self.tree.find(".//LoggingEnabled")
if not el:
el = getTreeFromXml(self.LOG_TEMPLATE)
self.tree.append(el)
el.find(".//TargetBucket").text = target_prefix_uri.bucket()
el.find(".//TargetPrefix").text = target_prefix_uri.object()
def targetPrefix(self):
if self.isLoggingEnabled():
target_prefix = u"s3://%s/%s" % (
self.tree.find(".//LoggingEnabled//TargetBucket").text,
self.tree.find(".//LoggingEnabled//TargetPrefix").text)
return S3Uri.S3Uri(target_prefix)
else:
return ""
def setAclPublic(self, acl_public):
le = self.tree.find(".//LoggingEnabled")
if le is None:
raise ParameterError("Logging not enabled, can't set default ACL for logs")
tg = le.find(".//TargetGrants")
if not acl_public:
if not tg:
## All good, it's not been there
return
else:
le.remove(tg)
else: # acl_public == True
anon_read = GranteeAnonRead().getElement()
if not tg:
tg = ET.SubElement(le, "TargetGrants")
## What if TargetGrants already exists? We should check if
## AnonRead is there before appending a new one. Later...
tg.append(anon_read)
def isAclPublic(self):
raise NotImplementedError()
def __unicode__(self):
return decode_from_s3(ET.tostring(self.tree))
def __str__(self):
if PY3:
# Return unicode
return ET.tostring(self.tree, encoding="unicode")
else:
# Return bytes
return ET.tostring(self.tree)
__all__.append("AccessLog")
if __name__ == "__main__":
log = AccessLog()
print(log)
log.enableLogging(S3Uri.S3Uri(u"s3://targetbucket/prefix/log-"))
print(log)
log.setAclPublic(True)
print(log)
log.setAclPublic(False)
print(log)
log.disableLogging()
print(log)
# vim:et:ts=4:sts=4:ai
| 3,622
|
Python
|
.py
| 93
| 31.462366
| 112
| 0.596866
|
s3tools/s3cmd
| 4,533
| 903
| 301
|
GPL-2.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,404
|
BidirMap.py
|
s3tools_s3cmd/S3/BidirMap.py
|
# -*- coding: utf-8 -*-
## --------------------------------------------------------------------
## Amazon S3 manager
##
## Authors : Michal Ludvig <michal@logix.cz> (https://www.logix.cz/michal)
## Florent Viard <florent@sodria.com> (https://www.sodria.com)
## Copyright : TGRMN Software, Sodria SAS and contributors
## License : GPL Version 2
## Website : https://s3tools.org
## --------------------------------------------------------------------
class BidirMap(object):
def __init__(self, **map):
self.k2v = {}
self.v2k = {}
for key in map:
self.__setitem__(key, map[key])
def __setitem__(self, key, value):
if value in self.v2k:
if self.v2k[value] != key:
raise KeyError("Value '"+str(value)+"' already in use with key '"+str(self.v2k[value])+"'")
try:
del(self.v2k[self.k2v[key]])
except KeyError:
pass
self.k2v[key] = value
self.v2k[value] = key
def __getitem__(self, key):
return self.k2v[key]
def __str__(self):
return self.v2k.__str__()
def getkey(self, value):
return self.v2k[value]
def getvalue(self, key):
return self.k2v[key]
def keys(self):
return [key for key in self.k2v]
def values(self):
return [value for value in self.v2k]
# vim:et:ts=4:sts=4:ai
| 1,403
|
Python
|
.py
| 39
| 29.384615
| 107
| 0.511078
|
s3tools/s3cmd
| 4,533
| 903
| 301
|
GPL-2.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,405
|
SortedDict.py
|
s3tools_s3cmd/S3/SortedDict.py
|
# -*- coding: utf-8 -*-
## --------------------------------------------------------------------
## Amazon S3 manager
##
## Authors : Michal Ludvig <michal@logix.cz> (https://www.logix.cz/michal)
## Florent Viard <florent@sodria.com> (https://www.sodria.com)
## Copyright : TGRMN Software, Sodria SAS and contributors
## License : GPL Version 2
## Website : https://s3tools.org
## --------------------------------------------------------------------
from __future__ import absolute_import, print_function
from .BidirMap import BidirMap
class SortedDictIterator(object):
def __init__(self, sorted_dict, keys, reverse=False):
self.sorted_dict = sorted_dict
self.keys = keys
if reverse:
self.pop_index = -1
else:
self.pop_index = 0
def __iter__(self):
return self
def __next__(self):
try:
return self.keys.pop(self.pop_index)
except IndexError:
raise StopIteration
next = __next__
class SortedDict(dict):
def __init__(self, mapping = {}, ignore_case = True, **kwargs):
"""
WARNING: SortedDict() with ignore_case==True will
drop entries differing only in capitalisation!
Eg: SortedDict({'auckland':1, 'Auckland':2}).keys() => ['Auckland']
With ignore_case==False it's all right
"""
dict.__init__(self, mapping, **kwargs)
self.ignore_case = ignore_case
def keys(self):
# TODO fix
# Probably not anymore memory efficient on python2
# as now 2 copies of keys to sort them.
keys = dict.keys(self)
if self.ignore_case:
# Translation map
xlat_map = BidirMap()
for key in keys:
xlat_map[key.lower()] = key
# Lowercase keys
lc_keys = sorted(xlat_map.keys())
return [xlat_map[k] for k in lc_keys]
else:
keys = sorted(keys)
return keys
def __iter__(self):
return SortedDictIterator(self, self.keys())
def __reversed__(self):
return SortedDictIterator(self, self.keys(), reverse=True)
def __getitem__(self, index):
"""Override to support the "get_slice" for python3 """
if isinstance(index, slice):
r = SortedDict(ignore_case = self.ignore_case)
for k in self.keys()[index]:
r[k] = self[k]
else:
r = super(SortedDict, self).__getitem__(index)
return r
if __name__ == "__main__":
d = { 'AWS' : 1, 'Action' : 2, 'america' : 3, 'Auckland' : 4, 'America' : 5 }
sd = SortedDict(d)
print("Wanted: Action, america, Auckland, AWS, [ignore case]")
print("Got: ", end=' ')
for key in sd:
print("%s," % key, end=' ')
print(" [used: __iter__()]")
d = SortedDict(d, ignore_case = False)
print("Wanted: AWS, Action, America, Auckland, america, [case sensitive]")
print("Got: ", end=' ')
for key in d.keys():
print("%s," % key, end=' ')
print(" [used: keys()]")
# vim:et:ts=4:sts=4:ai
| 3,152
|
Python
|
.py
| 82
| 30.536585
| 84
| 0.536498
|
s3tools/s3cmd
| 4,533
| 903
| 301
|
GPL-2.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,406
|
BaseUtils.py
|
s3tools_s3cmd/S3/BaseUtils.py
|
# -*- coding: utf-8 -*-
## --------------------------------------------------------------------
## Amazon S3 manager
##
## Authors : Michal Ludvig <michal@logix.cz> (https://www.logix.cz/michal)
## Florent Viard <florent@sodria.com> (https://www.sodria.com)
## Copyright : TGRMN Software, Sodria SAS and contributors
## License : GPL Version 2
## Website : https://s3tools.org
## --------------------------------------------------------------------
from __future__ import absolute_import, division
import functools
import re
import posixpath
import sys
from calendar import timegm
from hashlib import md5
from logging import debug, warning, error
import xml.dom.minidom
import xml.etree.ElementTree as ET
from .ExitCodes import EX_OSFILE
try:
import dateutil.parser
except ImportError:
sys.stderr.write(u"""
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
ImportError trying to import dateutil.parser.
Please install the python dateutil module:
$ sudo apt-get install python-dateutil
or
$ sudo yum install python-dateutil
or
$ pip install python-dateutil
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
""")
sys.stderr.flush()
sys.exit(EX_OSFILE)
try:
from urllib import quote
except ImportError:
# python 3 support
from urllib.parse import quote
try:
unicode = unicode
except NameError:
# python 3 support
# In python 3, unicode -> str, and str -> bytes
unicode = str
__all__ = []
s3path = posixpath
__all__.append("s3path")
try:
md5()
except ValueError as exc:
# md5 is disabled for FIPS-compliant Python builds.
# Since s3cmd does not use md5 in a security context,
# it is safe to allow the use of it by setting useforsecurity to False.
try:
md5(usedforsecurity=False)
md5 = functools.partial(md5, usedforsecurity=False)
except Exception:
# "usedforsecurity" is only available on python >= 3.9 or RHEL distributions
raise exc
__all__.append("md5")
RE_S3_DATESTRING = re.compile('\\.[0-9]*(?:[Z\\-\\+]*?)')
RE_XML_NAMESPACE = re.compile(b'^(<?[^>]+?>\\s*|\\s*)(<\\w+) xmlns=[\'"](https?://[^\'"]+)[\'"]', re.MULTILINE)
# Date and time helpers
def dateS3toPython(date):
# Reset milliseconds to 000
date = RE_S3_DATESTRING.sub(".000", date)
return dateutil.parser.parse(date, fuzzy=True)
__all__.append("dateS3toPython")
def dateS3toUnix(date):
## NOTE: This is timezone-aware and return the timestamp regarding GMT
return timegm(dateS3toPython(date).utctimetuple())
__all__.append("dateS3toUnix")
def dateRFC822toPython(date):
"""
Convert a string formatted like '2020-06-27T15:56:34Z' into a python datetime
"""
return dateutil.parser.parse(date, fuzzy=True)
__all__.append("dateRFC822toPython")
def dateRFC822toUnix(date):
return timegm(dateRFC822toPython(date).utctimetuple())
__all__.append("dateRFC822toUnix")
def formatDateTime(s3timestamp):
date_obj = dateutil.parser.parse(s3timestamp, fuzzy=True)
return date_obj.strftime("%Y-%m-%d %H:%M")
__all__.append("formatDateTime")
# Encoding / Decoding
def base_unicodise(string, encoding='UTF-8', errors='replace', silent=False):
"""
Convert 'string' to Unicode or raise an exception.
"""
if type(string) == unicode:
return string
if not silent:
debug("Unicodising %r using %s" % (string, encoding))
try:
return unicode(string, encoding, errors)
except UnicodeDecodeError:
raise UnicodeDecodeError("Conversion to unicode failed: %r" % string)
__all__.append("base_unicodise")
def base_deunicodise(string, encoding='UTF-8', errors='replace', silent=False):
"""
Convert unicode 'string' to <type str>, by default replacing
all invalid characters with '?' or raise an exception.
"""
if type(string) != unicode:
return string
if not silent:
debug("DeUnicodising %r using %s" % (string, encoding))
try:
return string.encode(encoding, errors)
except UnicodeEncodeError:
raise UnicodeEncodeError("Conversion from unicode failed: %r" % string)
__all__.append("base_deunicodise")
def decode_from_s3(string, errors = "replace"):
"""
Convert S3 UTF-8 'string' to Unicode or raise an exception.
"""
return base_unicodise(string, "UTF-8", errors, True)
__all__.append("decode_from_s3")
def encode_to_s3(string, errors='replace'):
"""
Convert Unicode to S3 UTF-8 'string', by default replacing
all invalid characters with '?' or raise an exception.
"""
return base_deunicodise(string, "UTF-8", errors, True)
__all__.append("encode_to_s3")
def s3_quote(param, quote_backslashes=True, unicode_output=False):
"""
URI encode every byte. UriEncode() must enforce the following rules:
- URI encode every byte except the unreserved characters: 'A'-'Z', 'a'-'z', '0'-'9', '-', '.', '_', and '~'.
- The space character is a reserved character and must be encoded as "%20" (and not as "+").
- Each URI encoded byte is formed by a '%' and the two-digit hexadecimal value of the byte.
- Letters in the hexadecimal value must be uppercase, for example "%1A".
- Encode the forward slash character, '/', everywhere except in the object key name.
For example, if the object key name is photos/Jan/sample.jpg, the forward slash in the key name is not encoded.
"""
if quote_backslashes:
safe_chars = "~"
else:
safe_chars = "~/"
param = encode_to_s3(param)
param = quote(param, safe=safe_chars)
if unicode_output:
param = decode_from_s3(param)
else:
param = encode_to_s3(param)
return param
__all__.append("s3_quote")
def base_urlencode_string(string, urlencoding_mode = None, unicode_output=False):
string = encode_to_s3(string)
if urlencoding_mode == "verbatim":
## Don't do any pre-processing
return string
encoded = quote(string, safe="~/")
debug("String '%s' encoded to '%s'" % (string, encoded))
if unicode_output:
return decode_from_s3(encoded)
else:
return encode_to_s3(encoded)
__all__.append("base_urlencode_string")
def base_replace_nonprintables(string, with_message=False):
"""
replace_nonprintables(string)
Replaces all non-printable characters 'ch' in 'string'
where ord(ch) <= 26 with ^@, ^A, ... ^Z
"""
new_string = ""
modified = 0
for c in string:
o = ord(c)
if (o <= 31):
new_string += "^" + chr(ord('@') + o)
modified += 1
elif (o == 127):
new_string += "^?"
modified += 1
else:
new_string += c
if modified and with_message:
warning("%d non-printable characters replaced in: %s" % (modified, new_string))
return new_string
__all__.append("base_replace_nonprintables")
# XML helpers
def parseNodes(nodes):
## WARNING: Ignores text nodes from mixed xml/text.
## For instance <tag1>some text<tag2>other text</tag2></tag1>
## will be ignore "some text" node
## WARNING 2: Any node at first level without children will also be ignored
retval = []
for node in nodes:
retval_item = {}
for child in node:
name = decode_from_s3(child.tag)
if len(child):
retval_item[name] = parseNodes([child])
else:
found_text = node.findtext(".//%s" % child.tag)
if found_text is not None:
retval_item[name] = decode_from_s3(found_text)
else:
retval_item[name] = None
if retval_item:
retval.append(retval_item)
return retval
__all__.append("parseNodes")
def getPrettyFromXml(xmlstr):
xmlparser = xml.dom.minidom.parseString(xmlstr)
return xmlparser.toprettyxml()
__all__.append("getPrettyFromXml")
def stripNameSpace(xml):
"""
removeNameSpace(xml) -- remove top-level AWS namespace
Operate on raw byte(utf-8) xml string. (Not unicode)
"""
xmlns_match = RE_XML_NAMESPACE.match(xml)
if xmlns_match:
xmlns = xmlns_match.group(3)
xml = RE_XML_NAMESPACE.sub(b"\\1\\2", xml, 1)
else:
xmlns = None
return xml, xmlns
__all__.append("stripNameSpace")
def getTreeFromXml(xml):
xml, xmlns = stripNameSpace(encode_to_s3(xml))
try:
tree = ET.fromstring(xml)
if xmlns:
tree.attrib['xmlns'] = xmlns
return tree
except Exception as e:
error("Error parsing xml: %s", e)
error(xml)
raise
__all__.append("getTreeFromXml")
def getListFromXml(xml, node):
tree = getTreeFromXml(xml)
nodes = tree.findall('.//%s' % (node))
return parseNodes(nodes)
__all__.append("getListFromXml")
def getDictFromTree(tree):
ret_dict = {}
for child in tree:
if len(child):
## Complex-type child. Recurse
content = getDictFromTree(child)
else:
content = decode_from_s3(child.text) if child.text is not None else None
child_tag = decode_from_s3(child.tag)
if child_tag in ret_dict:
if not type(ret_dict[child_tag]) == list:
ret_dict[child_tag] = [ret_dict[child_tag]]
ret_dict[child_tag].append(content or "")
else:
ret_dict[child_tag] = content or ""
return ret_dict
__all__.append("getDictFromTree")
def getTextFromXml(xml, xpath):
tree = getTreeFromXml(xml)
if tree.tag.endswith(xpath):
return decode_from_s3(tree.text) if tree.text is not None else None
else:
result = tree.findtext(xpath)
return decode_from_s3(result) if result is not None else None
__all__.append("getTextFromXml")
def getRootTagName(xml):
tree = getTreeFromXml(xml)
return decode_from_s3(tree.tag) if tree.tag is not None else None
__all__.append("getRootTagName")
def xmlTextNode(tag_name, text):
el = ET.Element(tag_name)
el.text = decode_from_s3(text)
return el
__all__.append("xmlTextNode")
def appendXmlTextNode(tag_name, text, parent):
"""
Creates a new <tag_name> Node and sets
its content to 'text'. Then appends the
created Node to 'parent' element if given.
Returns the newly created Node.
"""
el = xmlTextNode(tag_name, text)
parent.append(el)
return el
__all__.append("appendXmlTextNode")
# vim:et:ts=4:sts=4:ai
| 10,452
|
Python
|
.py
| 289
| 30.799308
| 115
| 0.642978
|
s3tools/s3cmd
| 4,533
| 903
| 301
|
GPL-2.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,407
|
Custom_httplib27.py
|
s3tools_s3cmd/S3/Custom_httplib27.py
|
from __future__ import absolute_import, print_function
import os
import httplib
from httplib import (_CS_REQ_SENT, _CS_REQ_STARTED, CONTINUE, UnknownProtocol,
CannotSendHeader, NO_CONTENT, NOT_MODIFIED, EXPECTATION_FAILED,
HTTPMessage, HTTPException)
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from .BaseUtils import encode_to_s3
_METHODS_EXPECTING_BODY = ['PATCH', 'POST', 'PUT']
# Fixed python 2.X httplib to be able to support
# Expect: 100-Continue http feature
# Inspired by:
# http://bugs.python.org/file26357/issue1346874-273.patch
def httpresponse_patched_begin(self):
""" Re-implemented httplib begin function
to not loop over "100 CONTINUE" status replies
but to report it to higher level so it can be processed.
"""
if self.msg is not None:
# we've already started reading the response
return
# read only one status even if we get a non-100 response
version, status, reason = self._read_status()
self.status = status
self.reason = reason.strip()
if version == 'HTTP/1.0':
self.version = 10
elif version.startswith('HTTP/1.'):
self.version = 11 # use HTTP/1.1 code for HTTP/1.x where x>=1
elif version == 'HTTP/0.9':
self.version = 9
else:
raise UnknownProtocol(version)
if self.version == 9:
self.length = None
self.chunked = 0
self.will_close = 1
self.msg = HTTPMessage(StringIO())
return
self.msg = HTTPMessage(self.fp, 0)
if self.debuglevel > 0:
for hdr in self.msg.headers:
print("header:", hdr, end=" ")
# don't let the msg keep an fp
self.msg.fp = None
# are we using the chunked-style of transfer encoding?
tr_enc = self.msg.getheader('transfer-encoding')
if tr_enc and tr_enc.lower() == "chunked":
self.chunked = 1
self.chunk_left = None
else:
self.chunked = 0
# will the connection close at the end of the response?
self.will_close = self._check_close()
# do we have a Content-Length?
# NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked"
length = self.msg.getheader('content-length')
if length and not self.chunked:
try:
self.length = int(length)
except ValueError:
self.length = None
else:
if self.length < 0: # ignore nonsensical negative lengths
self.length = None
else:
self.length = None
# does the body have a fixed length? (of zero)
if (status == NO_CONTENT or status == NOT_MODIFIED or
100 <= status < 200 or # 1xx codes
self._method == 'HEAD'):
self.length = 0
# if the connection remains open, and we aren't using chunked, and
# a content-length was not provided, then assume that the connection
# WILL close.
if not self.will_close and \
not self.chunked and \
self.length is None:
self.will_close = 1
def httpconnection_patched_set_content_length(self, body, method):
## REIMPLEMENTED because new in last httplib but needed by send_request
# Set the content-length based on the body. If the body is "empty", we
# set Content-Length: 0 for methods that expect a body (RFC 7230,
# Section 3.3.2). If the body is set for other methods, we set the
# header provided we can figure out what the length is.
thelen = None
if body is None and method.upper() in _METHODS_EXPECTING_BODY:
thelen = '0'
elif body is not None:
try:
thelen = str(len(body))
except (TypeError, AttributeError):
# If this is a file-like object, try to
# fstat its file descriptor
try:
thelen = str(os.fstat(body.fileno()).st_size)
except (AttributeError, OSError):
# Don't send a length if this failed
if self.debuglevel > 0: print("Cannot stat!!")
if thelen is not None:
self.putheader('Content-Length', thelen)
def httpconnection_patched_send_request(self, method, url, body, headers):
# Honor explicitly requested Host: and Accept-Encoding: headers.
header_names = dict.fromkeys([k.lower() for k in headers])
skips = {}
if 'host' in header_names:
skips['skip_host'] = 1
if 'accept-encoding' in header_names:
skips['skip_accept_encoding'] = 1
expect_continue = False
for hdr, value in headers.iteritems():
if 'expect' == hdr.lower() and '100-continue' in value.lower():
expect_continue = True
url = encode_to_s3(url)
self.putrequest(method, url, **skips)
if 'content-length' not in header_names:
self._set_content_length(body, method)
for hdr, value in headers.iteritems():
self.putheader(encode_to_s3(hdr), encode_to_s3(value))
# If an Expect: 100-continue was sent, we need to check for a 417
# Expectation Failed to avoid unnecessarily sending the body
# See RFC 2616 8.2.3
if not expect_continue:
self.endheaders(body)
else:
if not body:
raise HTTPException("A body is required when expecting "
"100-continue")
self.endheaders()
resp = self.getresponse()
resp.read()
self._HTTPConnection__state = _CS_REQ_SENT
if resp.status == EXPECTATION_FAILED:
raise ExpectationFailed()
elif resp.status == CONTINUE:
self.send(body)
def httpconnection_patched_endheaders(self, message_body=None):
"""Indicate that the last header line has been sent to the server.
This method sends the request to the server. The optional
message_body argument can be used to pass a message body
associated with the request. The message body will be sent in
the same packet as the message headers if it is string, otherwise it is
sent as a separate packet.
"""
if self._HTTPConnection__state == _CS_REQ_STARTED:
self._HTTPConnection__state = _CS_REQ_SENT
else:
raise CannotSendHeader()
self._send_output(message_body)
# TCP Maximum Segment Size (MSS) is determined by the TCP stack on
# a per-connection basis. There is no simple and efficient
# platform independent mechanism for determining the MSS, so
# instead a reasonable estimate is chosen. The getsockopt()
# interface using the TCP_MAXSEG parameter may be a suitable
# approach on some operating systems. A value of 16KiB is chosen
# as a reasonable estimate of the maximum MSS.
mss = 16384
def httpconnection_patched_send_output(self, message_body=None):
"""Send the currently buffered request and clear the buffer.
Appends an extra \\r\\n to the buffer.
A message_body may be specified, to be appended to the request.
"""
self._buffer.extend((b"", b""))
msg = b"\r\n".join(self._buffer)
del self._buffer[:]
msg = encode_to_s3(msg)
# If msg and message_body are sent in a single send() call,
# it will avoid performance problems caused by the interaction
# between delayed ack and the Nagle algorithm.
if isinstance(message_body, str) and len(message_body) < mss:
msg += message_body
message_body = None
self.send(msg)
if message_body is not None:
#message_body was not a string (i.e. it is a file) and
#we must run the risk of Nagle
self.send(message_body)
class ExpectationFailed(HTTPException):
pass
# Wrappers #
def httpconnection_patched_wrapper_send_body(self, message_body):
self.send(message_body)
httplib.HTTPResponse.begin = httpresponse_patched_begin
httplib.HTTPConnection.endheaders = httpconnection_patched_endheaders
httplib.HTTPConnection._send_output = httpconnection_patched_send_output
httplib.HTTPConnection._set_content_length = httpconnection_patched_set_content_length
httplib.HTTPConnection._send_request = httpconnection_patched_send_request
# Interfaces added to httplib.HTTPConnection:
httplib.HTTPConnection.wrapper_send_body = httpconnection_patched_wrapper_send_body
| 8,180
|
Python
|
.py
| 192
| 35.994792
| 86
| 0.674299
|
s3tools/s3cmd
| 4,533
| 903
| 301
|
GPL-2.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,408
|
HashCache.py
|
s3tools_s3cmd/S3/HashCache.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
try:
# python 3 support
import cPickle as pickle
except ImportError:
import pickle
from .Utils import deunicodise
class HashCache(object):
def __init__(self):
self.inodes = dict()
def add(self, dev, inode, mtime, size, md5):
if dev == 0 or inode == 0: return # Windows
if dev not in self.inodes:
self.inodes[dev] = dict()
if inode not in self.inodes[dev]:
self.inodes[dev][inode] = dict()
self.inodes[dev][inode][mtime] = dict(md5=md5, size=size)
def md5(self, dev, inode, mtime, size):
try:
d = self.inodes[dev][inode][mtime]
if d['size'] != size:
return None
except Exception:
return None
return d['md5']
def mark_all_for_purge(self):
for d in tuple(self.inodes):
for i in tuple(self.inodes[d]):
for c in tuple(self.inodes[d][i]):
self.inodes[d][i][c]['purge'] = True
def unmark_for_purge(self, dev, inode, mtime, size):
try:
d = self.inodes[dev][inode][mtime]
except KeyError:
return
if d['size'] == size and 'purge' in d:
del self.inodes[dev][inode][mtime]['purge']
def purge(self):
for d in tuple(self.inodes):
for i in tuple(self.inodes[d]):
for m in tuple(self.inodes[d][i]):
if 'purge' in self.inodes[d][i][m]:
del self.inodes[d][i]
break
def save(self, f):
d = dict(inodes=self.inodes, version=1)
with open(deunicodise(f), 'wb') as fp:
pickle.dump(d, fp)
def load(self, f):
with open(deunicodise(f), 'rb') as fp:
d = pickle.load(fp)
if d.get('version') == 1 and 'inodes' in d:
self.inodes = d['inodes']
| 1,957
|
Python
|
.py
| 54
| 26.314815
| 65
| 0.534601
|
s3tools/s3cmd
| 4,533
| 903
| 301
|
GPL-2.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,409
|
MultiPart.py
|
s3tools_s3cmd/S3/MultiPart.py
|
# -*- coding: utf-8 -*-
## Amazon S3 Multipart upload support
## Author: Jerome Leclanche <jerome.leclanche@gmail.com>
## License: GPL Version 2
from __future__ import absolute_import
import sys
from logging import debug, info, warning, error
from .Crypto import calculateChecksum
from .Exceptions import ParameterError
from .S3Uri import S3UriS3
from .BaseUtils import getTextFromXml, getTreeFromXml, s3_quote, parseNodes
from .Utils import formatSize
SIZE_1MB = 1024 * 1024
class MultiPartUpload(object):
"""Supports MultiPartUpload and MultiPartUpload(Copy) operation"""
MIN_CHUNK_SIZE_MB = 5 # 5MB
MAX_CHUNK_SIZE_MB = 5 * 1024 # 5GB
MAX_FILE_SIZE = 5 * 1024 * 1024 # 5TB
def __init__(self, s3, src, dst_uri, headers_baseline=None,
src_size=None):
self.s3 = s3
self.file_stream = None
self.src_uri = None
self.src_size = src_size
self.dst_uri = dst_uri
self.parts = {}
self.headers_baseline = headers_baseline or {}
if isinstance(src, S3UriS3):
# Source is the uri of an object to s3-to-s3 copy with multipart.
self.src_uri = src
if not src_size:
raise ParameterError("Source size is missing for "
"MultipartUploadCopy operation")
c_size = self.s3.config.multipart_copy_chunk_size_mb * SIZE_1MB
else:
# Source is a file_stream to upload
self.file_stream = src
c_size = self.s3.config.multipart_chunk_size_mb * SIZE_1MB
self.chunk_size = c_size
self.upload_id = self.initiate_multipart_upload()
def get_parts_information(self, uri, upload_id):
part_list = self.s3.list_multipart(uri, upload_id)
parts = dict()
for elem in part_list:
try:
parts[int(elem['PartNumber'])] = {
'checksum': elem['ETag'],
'size': elem['Size']
}
except KeyError:
pass
return parts
def get_unique_upload_id(self, uri):
upload_id = ""
multipart_list = self.s3.get_multipart(uri)
for mpupload in multipart_list:
try:
mp_upload_id = mpupload['UploadId']
mp_path = mpupload['Key']
info("mp_path: %s, object: %s" % (mp_path, uri.object()))
if mp_path == uri.object():
if upload_id:
raise ValueError(
"More than one UploadId for URI %s. Disable "
"multipart upload, or use\n %s multipart %s\n"
"to list the Ids, then pass a unique --upload-id "
"into the put command." % (uri, sys.argv[0], uri))
upload_id = mp_upload_id
except KeyError:
pass
return upload_id
def initiate_multipart_upload(self):
"""
Begin a multipart upload
http://docs.amazonwebservices.com/AmazonS3/latest/API/index.html?mpUploadInitiate.html
"""
if self.s3.config.upload_id:
self.upload_id = self.s3.config.upload_id
elif self.s3.config.put_continue:
self.upload_id = self.get_unique_upload_id(self.dst_uri)
else:
self.upload_id = ""
if not self.upload_id:
request = self.s3.create_request("OBJECT_POST", uri=self.dst_uri,
headers=self.headers_baseline,
uri_params={'uploads': None})
response = self.s3.send_request(request)
data = response["data"]
self.upload_id = getTextFromXml(data, "UploadId")
return self.upload_id
def upload_all_parts(self, extra_label=''):
"""
Execute a full multipart upload on a file
Returns the seq/etag dict
TODO use num_processes to thread it
"""
if not self.upload_id:
raise ParameterError("Attempting to use a multipart upload that "
"has not been initiated.")
remote_statuses = {}
if self.src_uri:
filename = self.src_uri.uri()
# Continue is not possible with multipart copy
else:
filename = self.file_stream.stream_name
if self.s3.config.put_continue:
remote_statuses = self.get_parts_information(self.dst_uri,
self.upload_id)
if extra_label:
extra_label = u' ' + extra_label
labels = {
'source': filename,
'destination': self.dst_uri.uri(),
}
seq = 1
if self.src_size:
size_left = self.src_size
nr_parts = self.src_size // self.chunk_size \
+ (self.src_size % self.chunk_size and 1)
debug("MultiPart: Uploading %s in %d parts" % (filename, nr_parts))
while size_left > 0:
offset = self.chunk_size * (seq - 1)
current_chunk_size = min(self.src_size - offset,
self.chunk_size)
size_left -= current_chunk_size
labels['extra'] = "[part %d of %d, %s]%s" % (
seq, nr_parts, "%d%sB" % formatSize(current_chunk_size,
human_readable=True),
extra_label)
try:
if self.file_stream:
self.upload_part(
seq, offset, current_chunk_size, labels,
remote_status=remote_statuses.get(seq))
else:
self.copy_part(
seq, offset, current_chunk_size, labels,
remote_status=remote_statuses.get(seq))
except:
error(u"\nUpload of '%s' part %d failed. Use\n "
"%s abortmp %s %s\nto abort the upload, or\n "
"%s --upload-id %s put ...\nto continue the upload."
% (filename, seq, sys.argv[0], self.dst_uri,
self.upload_id, sys.argv[0], self.upload_id))
raise
seq += 1
debug("MultiPart: Upload finished: %d parts", seq - 1)
return
# Else -> Case of u"<stdin>" source
debug("MultiPart: Uploading from %s" % filename)
while True:
buffer = self.file_stream.read(self.chunk_size)
offset = 0 # send from start of the buffer
current_chunk_size = len(buffer)
labels['extra'] = "[part %d of -, %s]%s" % (
seq, "%d%sB" % formatSize(current_chunk_size,
human_readable=True),
extra_label)
if not buffer:
# EOF
break
try:
self.upload_part(seq, offset, current_chunk_size, labels,
buffer,
remote_status=remote_statuses.get(seq))
except:
error(u"\nUpload of '%s' part %d failed. Use\n "
"%s abortmp %s %s\nto abort, or\n "
"%s --upload-id %s put ...\nto continue the upload."
% (filename, seq, sys.argv[0], self.dst_uri,
self.upload_id, sys.argv[0], self.upload_id))
raise
seq += 1
debug("MultiPart: Upload finished: %d parts", seq - 1)
def upload_part(self, seq, offset, chunk_size, labels, buffer='',
remote_status=None):
"""
Upload a file chunk
http://docs.amazonwebservices.com/AmazonS3/latest/API/index.html?mpUploadUploadPart.html
"""
# TODO implement Content-MD5
debug("Uploading part %i of %r (%s bytes)" % (seq, self.upload_id,
chunk_size))
if remote_status is not None:
if int(remote_status['size']) == chunk_size:
checksum = calculateChecksum(buffer, self.file_stream, offset,
chunk_size,
self.s3.config.send_chunk)
remote_checksum = remote_status['checksum'].strip('"\'')
if remote_checksum == checksum:
warning("MultiPart: size and md5sum match for %s part %d, "
"skipping." % (self.dst_uri, seq))
self.parts[seq] = remote_status['checksum']
return None
else:
warning("MultiPart: checksum (%s vs %s) does not match for"
" %s part %d, reuploading."
% (remote_checksum, checksum, self.dst_uri, seq))
else:
warning("MultiPart: size (%d vs %d) does not match for %s part"
" %d, reuploading." % (int(remote_status['size']),
chunk_size, self.dst_uri, seq))
headers = {"content-length": str(chunk_size)}
query_string_params = {'partNumber': '%s' % seq,
'uploadId': self.upload_id}
request = self.s3.create_request("OBJECT_PUT", uri=self.dst_uri,
headers=headers,
uri_params=query_string_params)
response = self.s3.send_file(request, self.file_stream, labels, buffer,
offset=offset, chunk_size=chunk_size)
self.parts[seq] = response["headers"].get('etag', '').strip('"\'')
return response
def copy_part(self, seq, offset, chunk_size, labels, remote_status=None):
"""
Copy a remote file chunk
http://docs.amazonwebservices.com/AmazonS3/latest/API/index.html?mpUploadUploadPart.html
http://docs.amazonwebservices.com/AmazonS3/latest/API/mpUploadUploadPartCopy.html
"""
debug("Copying part %i of %r (%s bytes)" % (seq, self.upload_id,
chunk_size))
# set up headers with copy-params.
# Examples:
# x-amz-copy-source: /source_bucket/sourceObject
# x-amz-copy-source-range:bytes=first-last
# x-amz-copy-source-if-match: etag
# x-amz-copy-source-if-none-match: etag
# x-amz-copy-source-if-unmodified-since: time_stamp
# x-amz-copy-source-if-modified-since: time_stamp
headers = {
"x-amz-copy-source": s3_quote("/%s/%s" % (self.src_uri.bucket(),
self.src_uri.object()),
quote_backslashes=False,
unicode_output=True)
}
# byte range, with end byte included. A 10 byte file has bytes=0-9
headers["x-amz-copy-source-range"] = \
"bytes=%d-%d" % (offset, (offset + chunk_size - 1))
query_string_params = {'partNumber': '%s' % seq,
'uploadId': self.upload_id}
request = self.s3.create_request("OBJECT_PUT", uri=self.dst_uri,
headers=headers,
uri_params=query_string_params)
labels[u'action'] = u'remote copy'
response = self.s3.send_request_with_progress(request, labels,
chunk_size)
# NOTE: Amazon sends whitespace while upload progresses, which
# accumulates in response body and seems to confuse XML parser.
# Strip newlines to find ETag in XML response data
#data = response["data"].replace("\n", '')
self.parts[seq] = getTextFromXml(response['data'], "ETag") or ''
return response
def complete_multipart_upload(self):
"""
Finish a multipart upload
http://docs.amazonwebservices.com/AmazonS3/latest/API/index.html?mpUploadComplete.html
"""
debug("MultiPart: Completing upload: %s" % self.upload_id)
parts_xml = []
part_xml = "<Part><PartNumber>%i</PartNumber><ETag>%s</ETag></Part>"
for seq, etag in self.parts.items():
parts_xml.append(part_xml % (seq, etag))
body = "<CompleteMultipartUpload>%s</CompleteMultipartUpload>" \
% "".join(parts_xml)
headers = {"content-length": str(len(body))}
request = self.s3.create_request(
"OBJECT_POST", uri=self.dst_uri, headers=headers, body=body,
uri_params={'uploadId': self.upload_id})
response = self.s3.send_request(request)
return response
def abort_upload(self):
"""
Abort multipart upload
http://docs.amazonwebservices.com/AmazonS3/latest/API/index.html?mpUploadAbort.html
"""
debug("MultiPart: Aborting upload: %s" % self.upload_id)
#request = self.s3.create_request("OBJECT_DELETE", uri = self.uri,
# uri_params = {'uploadId': self.upload_id})
#response = self.s3.send_request(request)
response = None
return response
# vim:et:ts=4:sts=4:ai
| 13,649
|
Python
|
.py
| 281
| 33.096085
| 96
| 0.516664
|
s3tools/s3cmd
| 4,533
| 903
| 301
|
GPL-2.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,410
|
Crypto.py
|
s3tools_s3cmd/S3/Crypto.py
|
# -*- coding: utf-8 -*-
## --------------------------------------------------------------------
## Amazon S3 manager
##
## Authors : Michal Ludvig <michal@logix.cz> (https://www.logix.cz/michal)
## Florent Viard <florent@sodria.com> (https://www.sodria.com)
## Copyright : TGRMN Software, Sodria SAS and contributors
## License : GPL Version 2
## Website : https://s3tools.org
## --------------------------------------------------------------------
from __future__ import absolute_import
import sys
import hmac
try:
from base64 import encodebytes as encodestring
except ImportError:
# Python 2 support
from base64 import encodestring
from . import Config
from logging import debug
from .BaseUtils import encode_to_s3, decode_from_s3, s3_quote, md5, unicode
from .Utils import time_to_epoch, deunicodise, check_bucket_name_dns_support
from .SortedDict import SortedDict
import datetime
from hashlib import sha1, sha256
__all__ = []
def format_param_str(params, always_have_equal=False, limited_keys=None):
"""
Format URL parameters from a params dict and returns
?parm1=val1&parm2=val2 or an empty string if there
are no parameters. Output of this function should
be appended directly to self.resource['uri']
- Set "always_have_equal" to always have the "=" char for a param even when
there is no value for it.
- Set "limited_keys" list to restrict the param string to keys that are
defined in it.
"""
if not params:
return ""
param_str = ""
equal_str = always_have_equal and u'=' or ''
for key in sorted(params.keys()):
if limited_keys and key not in limited_keys:
continue
value = params[key]
if value in (None, ""):
param_str += "&%s%s" % (s3_quote(key, unicode_output=True), equal_str)
else:
param_str += "&%s=%s" % (key, s3_quote(params[key], unicode_output=True))
return param_str and "?" + param_str[1:]
__all__.append("format_param_str")
### AWS Version 2 signing
def sign_string_v2(string_to_sign):
"""Sign a string with the secret key, returning base64 encoded results.
By default the configured secret key is used, but may be overridden as
an argument.
Useful for REST authentication. See http://s3.amazonaws.com/doc/s3-developer-guide/RESTAuthentication.html
string_to_sign should be utf-8 "bytes".
and returned signature will be utf-8 encoded "bytes".
"""
secret_key = Config.Config().secret_key
signature = encodestring(hmac.new(encode_to_s3(secret_key), string_to_sign, sha1).digest()).strip()
return signature
__all__.append("sign_string_v2")
def sign_request_v2(method='GET', canonical_uri='/', params=None, cur_headers=None):
"""Sign a string with the secret key, returning base64 encoded results.
By default the configured secret key is used, but may be overridden as
an argument.
Useful for REST authentication. See http://s3.amazonaws.com/doc/s3-developer-guide/RESTAuthentication.html
string_to_sign should be utf-8 "bytes".
"""
# valid sub-resources to be included in sign v2:
SUBRESOURCES_TO_INCLUDE = ['acl', 'lifecycle', 'location', 'logging',
'notification', 'partNumber', 'policy',
'requestPayment', 'tagging', 'torrent',
'uploadId', 'uploads', 'versionId',
'versioning', 'versions', 'website',
# Missing of aws s3 doc but needed
'delete', 'cors', 'restore']
if cur_headers is None:
cur_headers = SortedDict(ignore_case = True)
access_key = Config.Config().access_key
string_to_sign = method + "\n"
string_to_sign += cur_headers.get("content-md5", "") + "\n"
string_to_sign += cur_headers.get("content-type", "") + "\n"
string_to_sign += cur_headers.get("date", "") + "\n"
for header in sorted(cur_headers.keys()):
if header.startswith("x-amz-"):
string_to_sign += header + ":" + cur_headers[header] + "\n"
if header.startswith("x-emc-"):
string_to_sign += header + ":"+ cur_headers[header] + "\n"
canonical_uri = s3_quote(canonical_uri, quote_backslashes=False, unicode_output=True)
canonical_querystring = format_param_str(params, limited_keys=SUBRESOURCES_TO_INCLUDE)
# canonical_querystring would be empty if no param given, otherwise it will
# starts with a "?"
canonical_uri += canonical_querystring
string_to_sign += canonical_uri
debug("SignHeaders: " + repr(string_to_sign))
signature = decode_from_s3(sign_string_v2(encode_to_s3(string_to_sign)))
new_headers = SortedDict(list(cur_headers.items()), ignore_case=True)
new_headers["Authorization"] = "AWS " + access_key + ":" + signature
return new_headers
__all__.append("sign_request_v2")
def sign_url_v2(url_to_sign, expiry):
"""Sign a URL in s3://bucket/object form with the given expiry
time. The object will be accessible via the signed URL until the
AWS key and secret are revoked or the expiry time is reached, even
if the object is otherwise private.
See: http://s3.amazonaws.com/doc/s3-developer-guide/RESTAuthentication.html
"""
return sign_url_base_v2(
bucket = url_to_sign.bucket(),
object = url_to_sign.object(),
expiry = expiry
)
__all__.append("sign_url_v2")
def sign_url_base_v2(**parms):
"""Shared implementation of sign_url methods. Takes a hash of 'bucket', 'object' and 'expiry' as args."""
content_disposition=Config.Config().content_disposition
content_type=Config.Config().content_type
parms['expiry']=time_to_epoch(parms['expiry'])
parms['access_key']=Config.Config().access_key
parms['host_base']=Config.Config().host_base
parms['object'] = s3_quote(parms['object'], quote_backslashes=False, unicode_output=True)
parms['proto'] = 'http'
if Config.Config().signurl_use_https:
parms['proto'] = 'https'
debug("Expiry interpreted as epoch time %s", parms['expiry'])
signtext = 'GET\n\n\n%(expiry)d\n/%(bucket)s/%(object)s' % parms
param_separator = '?'
if content_disposition:
signtext += param_separator + 'response-content-disposition=' + content_disposition
param_separator = '&'
if content_type:
signtext += param_separator + 'response-content-type=' + content_type
param_separator = '&'
debug("Signing plaintext: %r", signtext)
parms['sig'] = s3_quote(sign_string_v2(encode_to_s3(signtext)), unicode_output=True)
debug("Urlencoded signature: %s", parms['sig'])
if check_bucket_name_dns_support(Config.Config().host_bucket, parms['bucket']):
url = "%(proto)s://%(bucket)s.%(host_base)s/%(object)s"
else:
url = "%(proto)s://%(host_base)s/%(bucket)s/%(object)s"
url += "?AWSAccessKeyId=%(access_key)s&Expires=%(expiry)d&Signature=%(sig)s"
url = url % parms
if content_disposition:
url += "&response-content-disposition=" + s3_quote(content_disposition, unicode_output=True)
if content_type:
url += "&response-content-type=" + s3_quote(content_type, unicode_output=True)
return url
__all__.append("sign_url_base_v2")
def sign(key, msg):
return hmac.new(key, encode_to_s3(msg), sha256).digest()
def getSignatureKey(key, dateStamp, regionName, serviceName):
"""
Input: unicode params
Output: bytes
"""
kDate = sign(encode_to_s3('AWS4' + key), dateStamp)
kRegion = sign(kDate, regionName)
kService = sign(kRegion, serviceName)
kSigning = sign(kService, 'aws4_request')
return kSigning
def sign_request_v4(method='GET', host='', canonical_uri='/', params=None,
region='us-east-1', cur_headers=None, body=b''):
service = 's3'
if cur_headers is None:
cur_headers = SortedDict(ignore_case = True)
cfg = Config.Config()
access_key = cfg.access_key
secret_key = cfg.secret_key
t = datetime.datetime.utcnow()
amzdate = t.strftime('%Y%m%dT%H%M%SZ')
datestamp = t.strftime('%Y%m%d')
signing_key = getSignatureKey(secret_key, datestamp, region, service)
canonical_uri = s3_quote(canonical_uri, quote_backslashes=False, unicode_output=True)
canonical_querystring = format_param_str(params, always_have_equal=True).lstrip('?')
if type(body) == type(sha256(b'')):
payload_hash = decode_from_s3(body.hexdigest())
else:
payload_hash = decode_from_s3(sha256(encode_to_s3(body)).hexdigest())
canonical_headers = {'host' : host,
'x-amz-content-sha256': payload_hash,
'x-amz-date' : amzdate
}
signed_headers = 'host;x-amz-content-sha256;x-amz-date'
for header in cur_headers.keys():
# avoid duplicate headers and previous Authorization
if header == 'Authorization' or header in signed_headers.split(';'):
continue
canonical_headers[header.strip()] = cur_headers[header].strip()
signed_headers += ';' + header.strip()
# sort headers into a string
canonical_headers_str = ''
for k, v in sorted(canonical_headers.items()):
canonical_headers_str += k + ":" + v + "\n"
canonical_headers = canonical_headers_str
debug(u"canonical_headers = %s" % canonical_headers)
signed_headers = ';'.join(sorted(signed_headers.split(';')))
canonical_request = method + '\n' + canonical_uri + '\n' + canonical_querystring + '\n' + canonical_headers + '\n' + signed_headers + '\n' + payload_hash
debug('Canonical Request:\n%s\n----------------------' % canonical_request)
algorithm = 'AWS4-HMAC-SHA256'
credential_scope = datestamp + '/' + region + '/' + service + '/' + 'aws4_request'
string_to_sign = algorithm + '\n' + amzdate + '\n' + credential_scope + '\n' + decode_from_s3(sha256(encode_to_s3(canonical_request)).hexdigest())
signature = decode_from_s3(hmac.new(signing_key, encode_to_s3(string_to_sign), sha256).hexdigest())
authorization_header = algorithm + ' ' + 'Credential=' + access_key + '/' + credential_scope + ',' + 'SignedHeaders=' + signed_headers + ',' + 'Signature=' + signature
new_headers = SortedDict(cur_headers.items())
new_headers.update({'x-amz-date':amzdate,
'Authorization':authorization_header,
'x-amz-content-sha256': payload_hash})
debug("signature-v4 headers: %s" % new_headers)
return new_headers
__all__.append("sign_request_v4")
def checksum_file_descriptor(file_desc, offset=0, size=None, hash_func=sha256):
hash = hash_func()
if size is None:
for chunk in iter(lambda: file_desc.read(8192), b''):
hash.update(chunk)
else:
file_desc.seek(offset)
size_left = size
while size_left > 0:
chunk = file_desc.read(min(8192, size_left))
if not chunk:
break
size_left -= len(chunk)
hash.update(chunk)
return hash
__all__.append("checksum_file_stream")
def checksum_sha256_file(file, offset=0, size=None):
if not isinstance(file, unicode):
# file is directly a file descriptor
return checksum_file_descriptor(file, offset, size, sha256)
# Otherwise, we expect file to be a filename
with open(deunicodise(file),'rb') as fp:
return checksum_file_descriptor(fp, offset, size, sha256)
__all__.append("checksum_sha256_file")
def checksum_sha256_buffer(buffer, offset=0, size=None):
hash = sha256()
if size is None:
hash.update(buffer)
else:
hash.update(buffer[offset:offset+size])
return hash
__all__.append("checksum_sha256_buffer")
def generate_content_md5(body):
m = md5(encode_to_s3(body))
base64md5 = encodestring(m.digest())
base64md5 = decode_from_s3(base64md5)
if base64md5[-1] == '\n':
base64md5 = base64md5[0:-1]
return decode_from_s3(base64md5)
__all__.append("generate_content_md5")
def hash_file_md5(filename):
h = md5()
with open(deunicodise(filename), "rb") as fp:
while True:
# Hash 32kB chunks
data = fp.read(32*1024)
if not data:
break
h.update(data)
return h.hexdigest()
__all__.append("hash_file_md5")
def calculateChecksum(buffer, mfile, offset, chunk_size, send_chunk):
md5_hash = md5()
size_left = chunk_size
if buffer == '':
mfile.seek(offset)
while size_left > 0:
data = mfile.read(min(send_chunk, size_left))
if not data:
break
md5_hash.update(data)
size_left -= len(data)
else:
md5_hash.update(buffer)
return md5_hash.hexdigest()
__all__.append("calculateChecksum")
| 12,924
|
Python
|
.py
| 279
| 39.516129
| 172
| 0.639392
|
s3tools/s3cmd
| 4,533
| 903
| 301
|
GPL-2.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,411
|
Utils.py
|
s3tools_s3cmd/S3/Utils.py
|
# -*- coding: utf-8 -*-
## --------------------------------------------------------------------
## Amazon S3 manager
##
## Authors : Michal Ludvig <michal@logix.cz> (https://www.logix.cz/michal)
## Florent Viard <florent@sodria.com> (https://www.sodria.com)
## Copyright : TGRMN Software, Sodria SAS and contributors
## License : GPL Version 2
## Website : https://s3tools.org
## --------------------------------------------------------------------
from __future__ import absolute_import, division
import os
import time
import re
import string as string_mod
import random
import errno
from logging import debug
try:
unicode
except NameError:
# python 3 support
# In python 3, unicode -> str, and str -> bytes
unicode = str
import S3.Config
import S3.Exceptions
from S3.BaseUtils import (base_urlencode_string, base_replace_nonprintables,
base_unicodise, base_deunicodise, md5)
__all__ = []
def formatSize(size, human_readable=False, floating_point=False):
size = floating_point and float(size) or int(size)
if human_readable:
coeffs = ['K', 'M', 'G', 'T']
coeff = ""
while size > 2048:
size /= 1024
coeff = coeffs.pop(0)
return (floating_point and float(size) or int(size), coeff)
else:
return (size, "")
__all__.append("formatSize")
def convertHeaderTupleListToDict(list):
"""
Header keys are always in lowercase in python2 but not in python3.
"""
retval = {}
for tuple in list:
retval[tuple[0].lower()] = tuple[1]
return retval
__all__.append("convertHeaderTupleListToDict")
_rnd_chars = string_mod.ascii_letters + string_mod.digits
_rnd_chars_len = len(_rnd_chars)
def rndstr(len):
retval = ""
while len > 0:
retval += _rnd_chars[random.randint(0, _rnd_chars_len-1)]
len -= 1
return retval
__all__.append("rndstr")
def mktmpsomething(prefix, randchars, createfunc):
old_umask = os.umask(0o077)
tries = 5
while tries > 0:
dirname = prefix + rndstr(randchars)
try:
createfunc(dirname)
break
except OSError as e:
if e.errno != errno.EEXIST:
os.umask(old_umask)
raise
tries -= 1
os.umask(old_umask)
return dirname
__all__.append("mktmpsomething")
def mktmpdir(prefix = os.getenv('TMP','/tmp') + "/tmpdir-", randchars = 10):
return mktmpsomething(prefix, randchars, os.mkdir)
__all__.append("mktmpdir")
def mktmpfile(prefix = os.getenv('TMP','/tmp') + "/tmpfile-", randchars = 20):
createfunc = lambda filename : os.close(os.open(deunicodise(filename), os.O_CREAT | os.O_EXCL))
return mktmpsomething(prefix, randchars, createfunc)
__all__.append("mktmpfile")
def mkdir_with_parents(dir_name):
"""
mkdir_with_parents(dst_dir)
Create directory 'dir_name' with all parent directories
Returns True on success, False otherwise.
"""
pathmembers = dir_name.split(os.sep)
tmp_stack = []
while pathmembers and not os.path.isdir(deunicodise(os.sep.join(pathmembers))):
tmp_stack.append(pathmembers.pop())
while tmp_stack:
pathmembers.append(tmp_stack.pop())
cur_dir = os.sep.join(pathmembers)
try:
debug("mkdir(%s)" % cur_dir)
os.mkdir(deunicodise(cur_dir))
except (OSError, IOError) as e:
debug("Can not make directory '%s' (Reason: %s)" % (cur_dir, e.strerror))
return False
except Exception as e:
debug("Can not make directory '%s' (Reason: %s)" % (cur_dir, e))
return False
return True
__all__.append("mkdir_with_parents")
def unicodise(string, encoding=None, errors='replace', silent=False):
if not encoding:
encoding = S3.Config.Config().encoding
return base_unicodise(string, encoding, errors, silent)
__all__.append("unicodise")
def unicodise_s(string, encoding=None, errors='replace'):
"""
Alias to silent version of unicodise
"""
return unicodise(string, encoding, errors, True)
__all__.append("unicodise_s")
def deunicodise(string, encoding=None, errors='replace', silent=False):
if not encoding:
encoding = S3.Config.Config().encoding
return base_deunicodise(string, encoding, errors, silent)
__all__.append("deunicodise")
def deunicodise_s(string, encoding=None, errors='replace'):
"""
Alias to silent version of deunicodise
"""
return deunicodise(string, encoding, errors, True)
__all__.append("deunicodise_s")
def unicodise_safe(string, encoding=None):
"""
Convert 'string' to Unicode according to current encoding
and replace all invalid characters with '?'
"""
return unicodise(deunicodise(string, encoding), encoding).replace(u'\ufffd', '?')
__all__.append("unicodise_safe")
## Low level methods
def urlencode_string(string, urlencoding_mode=None, unicode_output=False):
if urlencoding_mode is None:
urlencoding_mode = S3.Config.Config().urlencoding_mode
return base_urlencode_string(string, urlencoding_mode, unicode_output)
__all__.append("urlencode_string")
def replace_nonprintables(string):
"""
replace_nonprintables(string)
Replaces all non-printable characters 'ch' in 'string'
where ord(ch) <= 26 with ^@, ^A, ... ^Z
"""
warning_message = (S3.Config.Config().urlencoding_mode != "fixbucket")
return base_replace_nonprintables(string, warning_message)
__all__.append("replace_nonprintables")
def time_to_epoch(t):
"""Convert time specified in a variety of forms into UNIX epoch time.
Accepts datetime.datetime, int, anything that has a strftime() method, and standard time 9-tuples
"""
if isinstance(t, int):
# Already an int
return t
elif isinstance(t, tuple) or isinstance(t, time.struct_time):
# Assume it's a time 9-tuple
return int(time.mktime(t))
elif hasattr(t, 'timetuple'):
# Looks like a datetime object or compatible
return int(time.mktime(t.timetuple()))
elif hasattr(t, 'strftime'):
# Looks like the object supports standard srftime()
return int(t.strftime('%s'))
elif isinstance(t, str) or isinstance(t, unicode) or isinstance(t, bytes):
# See if it's a string representation of an epoch
try:
# Support relative times (eg. "+60")
if t.startswith('+'):
return time.time() + int(t[1:])
return int(t)
except ValueError:
# Try to parse it as a timestamp string
try:
return time.strptime(t)
except ValueError as ex:
# Will fall through
debug("Failed to parse date with strptime: %s", ex)
pass
raise S3.Exceptions.ParameterError('Unable to convert %r to an epoch time. Pass an epoch time. Try `date -d \'now + 1 year\' +%%s` (shell) or time.mktime (Python).' % t)
def check_bucket_name(bucket, dns_strict=True):
if dns_strict:
invalid = re.search(r"([^a-z0-9\.-])", bucket, re.UNICODE)
if invalid:
raise S3.Exceptions.ParameterError("Bucket name '%s' contains disallowed character '%s'. The only supported ones are: lowercase us-ascii letters (a-z), digits (0-9), dot (.) and hyphen (-)." % (bucket, invalid.groups()[0]))
else:
invalid = re.search(r"([^A-Za-z0-9\._-])", bucket, re.UNICODE)
if invalid:
raise S3.Exceptions.ParameterError("Bucket name '%s' contains disallowed character '%s'. The only supported ones are: us-ascii letters (a-z, A-Z), digits (0-9), dot (.), hyphen (-) and underscore (_)." % (bucket, invalid.groups()[0]))
if len(bucket) < 3:
raise S3.Exceptions.ParameterError("Bucket name '%s' is too short (min 3 characters)" % bucket)
if len(bucket) > 255:
raise S3.Exceptions.ParameterError("Bucket name '%s' is too long (max 255 characters)" % bucket)
if dns_strict:
if len(bucket) > 63:
raise S3.Exceptions.ParameterError("Bucket name '%s' is too long (max 63 characters)" % bucket)
if re.search(r"-\.", bucket, re.UNICODE):
raise S3.Exceptions.ParameterError("Bucket name '%s' must not contain sequence '-.' for DNS compatibility" % bucket)
if re.search(r"\.\.", bucket, re.UNICODE):
raise S3.Exceptions.ParameterError("Bucket name '%s' must not contain sequence '..' for DNS compatibility" % bucket)
if not re.search(r"^[0-9a-z]", bucket, re.UNICODE):
raise S3.Exceptions.ParameterError("Bucket name '%s' must start with a letter or a digit" % bucket)
if not re.search(r"[0-9a-z]$", bucket, re.UNICODE):
raise S3.Exceptions.ParameterError("Bucket name '%s' must end with a letter or a digit" % bucket)
return True
__all__.append("check_bucket_name")
def check_bucket_name_dns_conformity(bucket):
try:
return check_bucket_name(bucket, dns_strict = True)
except S3.Exceptions.ParameterError:
return False
__all__.append("check_bucket_name_dns_conformity")
def check_bucket_name_dns_support(bucket_host, bucket_name):
"""
Check whether either the host_bucket support buckets and
either bucket name is dns compatible
"""
if "%(bucket)s" not in bucket_host:
return False
return check_bucket_name_dns_conformity(bucket_name)
__all__.append("check_bucket_name_dns_support")
def getBucketFromHostname(hostname):
"""
bucket, success = getBucketFromHostname(hostname)
Only works for hostnames derived from bucket names
using Config.host_bucket pattern.
Returns bucket name and a boolean success flag.
"""
if "%(bucket)s" not in S3.Config.Config().host_bucket:
return (hostname, False)
# Create RE pattern from Config.host_bucket
pattern = S3.Config.Config().host_bucket.lower() % { 'bucket' : '(?P<bucket>.*)' }
m = re.match(pattern, hostname, re.UNICODE)
if not m:
return (hostname, False)
return m.group(1), True
__all__.append("getBucketFromHostname")
def getHostnameFromBucket(bucket):
return S3.Config.Config().host_bucket.lower() % { 'bucket' : bucket }
__all__.append("getHostnameFromBucket")
# Deal with the fact that pwd and grp modules don't exist for Windows
try:
import pwd
def getpwuid_username(uid):
"""returns a username from the password database for the given uid"""
return unicodise_s(pwd.getpwuid(uid).pw_name)
except ImportError:
import getpass
def getpwuid_username(uid):
return unicodise_s(getpass.getuser())
__all__.append("getpwuid_username")
try:
import grp
def getgrgid_grpname(gid):
"""returns a groupname from the group database for the given gid"""
return unicodise_s(grp.getgrgid(gid).gr_name)
except ImportError:
def getgrgid_grpname(gid):
return u"nobody"
__all__.append("getgrgid_grpname")
# vim:et:ts=4:sts=4:ai
| 11,033
|
Python
|
.py
| 263
| 35.878327
| 246
| 0.652251
|
s3tools/s3cmd
| 4,533
| 903
| 301
|
GPL-2.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,412
|
setup.py
|
bitcoin-abe_bitcoin-abe/setup.py
|
from distutils.core import setup
execfile("Abe/version.py")
setup(
name = "Abe",
version = __version__,
requires = ['Crypto.Hash'],
packages = ['Abe', 'Abe.Chain'],
package_data = {'Abe': ['htdocs/*']},
author = "John Tobey",
author_email = "jtobey@john-edwin-tobey.org",
url = "https://github.com/bitcoin-abe/bitcoin-abe",
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: Financial and Insurance Industry',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Natural Language :: English',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Topic :: Database :: Front-Ends',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Office/Business :: Financial',
'Topic :: Security :: Cryptography',
#'Topic :: Software Development :: Libraries :: Python Modules',
],
description = "Abe: a free block chain browser for Bitcoin-based currencies.",
long_description = """Abe reads the Bitcoin block chain from disk, loads
it into a database, indexes it, and provides a web interface to search
and navigate it. Abe works with several Bitcoin-derived currencies,
including Namecoin and LiteCoin.
Abe draws inspiration from Bitcoin Block Explorer (BBE,
http://blockexplorer.com/) and seeks some level of compatibility with
it but uses a completely new implementation.""",
)
| 1,877
|
Python
|
.py
| 41
| 39.731707
| 83
| 0.636661
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,413
|
conftest.py
|
bitcoin-abe_bitcoin-abe/test/conftest.py
|
# Copyright(C) 2014 by Abe developers.
# conftest.py: pytest session-scoped objects
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from __future__ import print_function
import pytest
import db
@pytest.fixture(scope="session", params=db.testdb_params())
def db_server(request):
server = db.create_server(request.param)
request.addfinalizer(server.delete)
return server
| 1,003
|
Python
|
.py
| 23
| 41.913043
| 70
| 0.782787
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,414
|
db.py
|
bitcoin-abe_bitcoin-abe/test/db.py
|
# Copyright(C) 2014 by Abe developers.
# db.py: temporary database for automated testing
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from __future__ import print_function
import pytest
import py.path
import json
import contextlib
import os
import subprocess
import Abe.util
def testdb_params():
dbs = os.environ.get('ABE_TEST_DB')
if dbs is not None:
return dbs.split()
if os.environ.get('ABE_TEST') == 'quick':
return ['sqlite']
return ['sqlite', 'mysql', 'postgres']
# XXX
def ignore_errors(thunk):
def doit():
try:
thunk()
except Exception:
pass
return doit
@pytest.fixture(scope="module")
def testdb(request, db_server):
request.addfinalizer(ignore_errors(db_server.dropdb))
return db_server
def create_server(dbtype=None):
if dbtype in (None, 'sqlite3', 'sqlite'):
return SqliteMemoryDB()
if dbtype in ('mysql', 'MySQLdb'):
return MysqlDB()
if dbtype in ('psycopg2', 'postgres'):
return PostgresDB()
pytest.skip('Unknown dbtype: %s' % dbtype)
class DB(object):
def __init__(db, dbtype, connect_args):
db.dbtype = dbtype
db.connect_args = connect_args
db.cmdline = ('--dbtype', dbtype, '--connect-args', json.dumps(connect_args))
db.store = None
def createdb(db):
pass
def load(db, *args):
db.createdb()
db.store, argv = Abe.util.CmdLine(db.cmdline + args).init()
assert len(argv) == 0
db.store.catch_up()
return db.store
def dropdb(db):
if db.store:
db.store.close()
def delete(db):
pass
class SqliteDB(DB):
def __init__(db, connect_args):
DB.__init__(db, 'sqlite3', connect_args)
def delete(db):
DB.delete(db)
os.unlink(db.connect_args)
class SqliteMemoryDB(SqliteDB):
def __init__(db):
#print("SqliteMemoryDB.__init__")
SqliteDB.__init__(db, ':memory:')
def delete(db):
DB.delete(db)
#print("SqliteMemoryDB.delete")
class ServerDB(DB):
def __init__(db, dbtype):
pytest.importorskip(dbtype)
import tempfile
db.installation_dir = py.path.local(tempfile.mkdtemp(prefix='abe-test-'))
print("Created temporary directory %s" % db.installation_dir)
try:
db.server = db.install_server()
except Exception as e:
#print("EXCEPTION %s" % e)
db._delete_tmpdir()
pytest.skip(e)
raise
DB.__init__(db, dbtype, db.get_connect_args())
def install_server(db):
pass
@contextlib.contextmanager
def root(db):
conn = db.connect_as_root()
cur = conn.cursor()
try:
yield cur
except:
conn.rollback()
raise
finally:
cur.close()
conn.close()
def delete(db):
try:
db.shutdown()
db.server.wait()
finally:
db._delete_tmpdir()
pass
def _delete_tmpdir(db):
if os.environ.get('ABE_TEST_KEEP_TMPDIR', '') == '':
db.installation_dir.remove()
print("Deleted temporary directory %s" % db.installation_dir)
class MysqlDB(ServerDB):
def __init__(db):
ServerDB.__init__(db, 'MySQLdb')
def get_connect_args(db):
return {'user': 'abe', 'passwd': 'Bitcoin', 'db': 'abe', 'unix_socket': db.socket}
def install_server(db):
db.socket = str(db.installation_dir.join('mysql.sock'))
db.installation_dir.ensure_dir('tmp')
mycnf = db.installation_dir.join('my.cnf')
mycnf.write('[mysqld]\n'
'datadir=%(installation_dir)s\n'
#'log\n'
#'log-error\n'
'skip-networking\n'
'socket=mysql.sock\n'
'pid-file=mysqld.pid\n'
'tmpdir=tmp\n' % { 'installation_dir': db.installation_dir })
subprocess.check_call(['mysql_install_db', '--defaults-file=' + str(mycnf)])
server = subprocess.Popen(['mysqld', '--defaults-file=' + str(mycnf)])
import time, MySQLdb
tries = 30
for t in range(tries):
try:
with db.root() as cur:
cur.execute("CREATE USER 'abe'@'localhost' IDENTIFIED BY 'Bitcoin'")
return server
except MySQLdb.OperationalError as e:
if t+1 == tries:
raise e
time.sleep(1)
def connect_as_root(db):
import MySQLdb
conn = MySQLdb.connect(unix_socket=db.socket, user='root')
return conn
def createdb(db):
with db.root() as cur:
cur.execute('CREATE DATABASE abe')
cur.execute("GRANT ALL ON abe.* TO 'abe'@'localhost'")
DB.createdb(db)
def dropdb(db):
DB.dropdb(db)
with db.root() as cur:
cur.execute('DROP DATABASE abe')
def shutdown(db):
subprocess.check_call(['mysqladmin', '-S', db.socket, '-u', 'root', 'shutdown'])
class PostgresDB(ServerDB):
def __init__(db):
ServerDB.__init__(db, 'psycopg2')
def get_connect_args(db):
return {'user': 'abe', 'password': 'Bitcoin', 'database': 'abe', 'host': str(db.installation_dir)}
def install_server(db):
db.bindir = subprocess.Popen(['pg_config', '--bindir'], stdout=subprocess.PIPE).communicate()[0].rstrip()
subprocess.check_call([
os.path.join(db.bindir, 'initdb'),
'-D', str(db.installation_dir),
'-U', 'postgres'])
server = subprocess.Popen([
os.path.join(db.bindir, 'postgres'),
'-D', str(db.installation_dir),
'-c', 'listen_addresses=',
'-c', 'unix_socket_directory=.'])
import time, psycopg2
tries = 30
for t in range(tries):
try:
with db.root() as cur:
cur.execute("COMMIT") # XXX
cur.execute("CREATE USER abe UNENCRYPTED PASSWORD 'Bitcoin'")
cur.execute("COMMIT")
return server
except psycopg2.OperationalError as e:
if t+1 == tries:
raise e
time.sleep(1)
def connect_as_root(db):
import psycopg2
conn = psycopg2.connect(host=str(db.installation_dir), user='postgres')
return conn
def createdb(db):
with db.root() as cur:
cur.execute("COMMIT") # XXX
cur.execute('CREATE DATABASE abe')
cur.execute("GRANT ALL ON DATABASE abe TO abe")
cur.execute("COMMIT")
DB.createdb(db)
def dropdb(db):
DB.dropdb(db)
with db.root() as cur:
cur.execute("COMMIT") # XXX
cur.execute('DROP DATABASE abe')
cur.execute("COMMIT")
def shutdown(db):
subprocess.check_call([
os.path.join(db.bindir, 'pg_ctl'), 'stop',
'-D', str(db.installation_dir),
'-m', 'immediate'])
| 7,825
|
Python
|
.py
| 217
| 26.967742
| 113
| 0.574221
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,415
|
test_block_order.py
|
bitcoin-abe_bitcoin-abe/test/test_block_order.py
|
# Copyright(C) 2014 by Abe developers.
# test_block_order.py: test Abe importing blocks out of order.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
import pytest
import os
import json
import tempfile
import py.path
from db import testdb
import data
import Abe.Chain
from Abe.deserialize import opcodes
@pytest.fixture(scope="module")
def gen(testdb, request):
gen = data.testnet14(testdb)
chain = gen.chain
blocks = gen.blocks
# A - C* - D**
# \
# E - B*
#
# * contains tx1
# ** contains tx2
tx1 = gen.tx(txIn=[gen.txin(prevout=blocks[1]['transactions'][0]['txOut'][0], scriptSig='XXX')],
txOut=[gen.txout(addr='n1pTUVnjZ6GHxujaoJ62P9NBMNjLr5N2EQ', value=50e8)])
A = blocks[-1]
C = gen.block(prev=A, transactions=[gen.coinbase(), tx1])
E = gen.block(prev=A)
B = gen.block(prev=E, transactions=[gen.coinbase(), tx1])
tx2 = gen.tx(txIn=[gen.txin(prevout=C['transactions'][1]['txOut'][0], scriptSig='YYY')],
txOut=[gen.txout(addr='2NFTctsgcAmrgtiboLJUx9q8qu5H1qVpcAb', value=50e8)])
D = gen.block(prev=C, transactions=[gen.coinbase(), tx2])
blocks += [B, C, D, E]
# XXX Lots of code duplicated in test_std_tx.py.
datadir = py.path.local(tempfile.mkdtemp(prefix='abe-test-'))
request.addfinalizer(datadir.remove)
gen.save_blkfile(str(datadir.join('blk0001.dat')), blocks)
gen.store = testdb.load('--datadir', json.dumps([{
'dirname': str(datadir),
'chain': chain.name,
'loader': 'blkfile'}]))
gen.chain = gen.store.get_chain_by_name(chain.name)
return gen
@pytest.fixture(scope="module")
def a2NFT(gen):
return data.ah(gen, '2NFTctsgcAmrgtiboLJUx9q8qu5H1qVpcAb')
def test_a2NFT_balance(a2NFT, gen):
assert a2NFT['balance'] == { gen.chain.id: 50e8 }
| 2,486
|
Python
|
.py
| 60
| 36.866667
| 100
| 0.689212
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,416
|
datagen.py
|
bitcoin-abe_bitcoin-abe/test/datagen.py
|
# Copyright(C) 2014 by Abe developers.
# datagen.py: test data generation
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
import Abe.Chain
import Abe.BCDataStream
import Abe.util
from Abe.deserialize import opcodes
class Gen(object):
def __init__(gen, rng=1, chain=None, **kwargs):
if not hasattr(rng, 'randrange'):
import random
rng = random.Random(rng)
if chain is None:
chain = Abe.Chain.create("Testnet")
gen._rng = rng
gen.chain = chain
for attr, val in kwargs.items():
setattr(gen, attr, val)
def random_bytes(gen, num_bytes):
return ''.join(chr(gen._rng.randrange(256)) for _ in xrange(num_bytes))
def random_addr_hash(gen):
return gen.random_bytes(20)
def encode_script(gen, *ops):
ds = Abe.BCDataStream.BCDataStream()
for op in ops:
if isinstance(op, int):
ds.write(chr(op))
elif isinstance(op, str):
ds.write_string(op)
else:
raise ValueError(op)
return ds.input
def op(gen, d):
if isinstance(d, int):
if d == 0:
return opcodes.OP_0
if d == -1 or 1 <= d <= 16:
return d + opcodes.OP_1 - 1
# Hmm, maybe time to switch to Python 3 with int.from_bytes?
h = "00%x" % (d if d >= 0 else -1-d)
if len(h) % 2:
h = h[1:]
elif h[2] < '8':
h = h[2:]
if d < 0:
import string
h = h.translate(string.maketrans('0123456789abcdef', 'fedcba9876543210'))
return h.decode('hex')
raise ValueError(n)
def address_scriptPubKey(gen, hash):
return gen.encode_script(opcodes.OP_DUP, opcodes.OP_HASH160, hash, opcodes.OP_EQUALVERIFY, opcodes.OP_CHECKSIG)
def pubkey_scriptPubKey(gen, pubkey):
return gen.encode_script(pubkey, opcodes.OP_CHECKSIG)
def multisig_scriptPubKey(gen, m, pubkeys):
ops = [ gen.op(m) ] + pubkeys + [ gen.op(len(pubkeys)), opcodes.OP_CHECKMULTISIG ]
return gen.encode_script(*ops)
def p2sh_scriptPubKey(gen, hash):
return gen.encode_script(opcodes.OP_HASH160, hash, opcodes.OP_EQUAL)
def txin(gen, **kwargs):
txin = { 'sequence': 0xffffffff, 'pos': 0 }
txin.update(kwargs)
if 'prevout' in txin:
txin['prevout_hash'] = txin['prevout']['hash']
txin['prevout_n'] = txin['prevout']['pos']
return txin
def coinbase_txin(gen, **kwargs):
chain = gen.chain
args = {
'prevout_hash': chain.coinbase_prevout_hash,
'prevout_n': chain.coinbase_prevout_n,
'scriptSig': '04ffff001d0101'.decode('hex'),
}
args.update(kwargs)
return gen.txin(**args)
def txout(gen, **kwargs):
txout = { 'value': 1, 'pos': 0 }
txout.update(kwargs)
if 'scriptPubKey' in txout:
pass
elif 'multisig' in txout:
txout['scriptPubKey'] = gen.multisig_scriptPubKey(txout['multisig']['m'], txout['multisig']['pubkeys'])
elif 'pubkey' in txout:
txout['scriptPubKey'] = gen.pubkey_scriptPubKey(txout['pubkey'])
elif 'addr' in txout:
version, hash = Abe.util.decode_check_address(txout['addr'])
if version == gen.chain.address_version:
txout['scriptPubKey'] = gen.address_scriptPubKey(hash)
elif version == gen.chain.script_addr_vers:
txout['scriptPubKey'] = gen.p2sh_scriptPubKey(hash)
else:
raise ValueError('Invalid address version %r not in (%r, %r)' % (version, gen.chain.address_version, gen.chain.script_addr_vers))
else:
txout['scriptPubKey'] = gen.address_scriptPubKey(gen.random_addr_hash())
return txout
def tx(gen, txIn, txOut, version=1, lockTime=0, **kwargs):
chain = gen.chain
def parse_txin(i, arg):
arg['pos'] = i
return gen.txin(**arg)
def parse_txout(i, arg):
arg['pos'] = i
return gen.txout(**arg)
tx = {
'version': version,
'txIn': [parse_txin(i, arg) for i, arg in enumerate(txIn)],
'txOut': [parse_txout(i, arg) for i, arg in enumerate(txOut)],
'lockTime': lockTime,
}
tx['__data__'] = chain.serialize_transaction(tx)
tx['hash'] = chain.transaction_hash(tx['__data__'])
for txout in tx['txOut']:
txout['hash'] = tx['hash']
return tx
def coinbase(gen, txOut=None, value=50e8, **kwargs):
if txOut is None:
txOut = [ gen.txout(value=value) ]
return gen.tx([ gen.coinbase_txin(**kwargs) ], txOut, **kwargs)
def block(gen, prev=None, transactions=None, version=1, nTime=1231006506, nBits=0x1d00ffff, nNonce=253):
chain = gen.chain
if prev is None:
prev = chain.genesis_hash_prev
elif isinstance(prev, dict):
prev = prev['hash']
if transactions is None:
transactions = [gen.coinbase()]
block = {
'version': version,
'hashPrev': prev,
'hashMerkleRoot': chain.merkle_root([ tx['hash'] for tx in transactions ]),
'nTime': nTime,
'nBits': nBits,
'nNonce': nNonce,
'transactions': transactions,
}
block['hash'] = chain.block_header_hash(chain.serialize_block_header(block))
return block
def save_blkfile(gen, blkfile, blocks):
import struct
with open(blkfile, 'wb') as f:
for bobj in blocks:
f.write(gen.chain.magic)
bstr = gen.chain.serialize_block(bobj)
f.write(struct.pack('<i', len(bstr)))
f.write(bstr)
| 6,606
|
Python
|
.py
| 156
| 32.403846
| 145
| 0.581113
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,417
|
test_max200.py
|
bitcoin-abe_bitcoin-abe/test/test_max200.py
|
# Copyright(C) 2014 by Abe developers.
# test_max200.py: test Abe loading through Maxcoin Block 200.
# This test exercises SHA3 block hashes and an unusual Merkle root algorithm.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
import pytest
from db import testdb
import os
import Abe.util
import Abe.Chain
@pytest.fixture(scope="module")
def max200(testdb):
try:
Abe.util.sha3_256('x')
except Exception as e:
pytest.skip('SHA3 not working: e')
dirname = os.path.join(os.path.split(__file__)[0], 'max200')
store = testdb.load('--datadir', dirname)
return store
def test_block_number(max200):
assert max200.get_block_number(max200.get_chain_by_name('Maxcoin').id) == 200
| 1,335
|
Python
|
.py
| 32
| 39.21875
| 81
| 0.754048
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,418
|
test_btc200.py
|
bitcoin-abe_bitcoin-abe/test/test_btc200.py
|
# Copyright(C) 2014 by Abe developers.
# test_btc200.py: test Abe loading through Bitcoin Block 200.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
import pytest
from db import testdb
import os
import Abe.util
import Abe.Chain
@pytest.fixture(scope="module")
def btc200(testdb):
dirname = os.path.join(os.path.split(__file__)[0], 'btc200')
store = testdb.load('--datadir', dirname)
return store
def test_block_number(btc200):
assert btc200.get_block_number(1) == 200
@pytest.fixture(scope="module")
def coinbase_200(btc200):
return btc200.export_tx(tx_hash = '2b1f06c2401d3b49a33c3f5ad5864c0bc70044c4068f9174546f3cfc1887d5ba')
def test_coinbase_hash(coinbase_200):
assert coinbase_200['hash'] == '2b1f06c2401d3b49a33c3f5ad5864c0bc70044c4068f9174546f3cfc1887d5ba'
def test_coinbase_in(coinbase_200):
assert len(coinbase_200['in']) == 1
assert coinbase_200['vin_sz'] == 1
def test_coinbase_lock_time(coinbase_200):
assert coinbase_200['lock_time'] == 0
def test_coinbase_prev_out(coinbase_200):
assert coinbase_200['in'][0]['prev_out'] == {
"hash": "0000000000000000000000000000000000000000000000000000000000000000",
"n": 4294967295
}
def test_coinbase_raw_scriptSig(coinbase_200):
assert coinbase_200['in'][0]['raw_scriptSig'] == "04ffff001d0138"
def test_coinbase_out(coinbase_200):
assert len(coinbase_200['out']) == 1
assert coinbase_200['vout_sz'] == 1
def test_coinbase_raw_scriptPubKey(coinbase_200):
assert coinbase_200['out'][0]['raw_scriptPubKey'] == \
"41045e071dedd1ed03721c6e9bba28fc276795421a378637fb41090192bb9f208630dcbac5862a3baeb9df3ca6e4e256b7fd2404824c20198ca1b004ee2197866433ac"
def test_coinbase_value(coinbase_200):
assert coinbase_200['out'][0]['value'] == "50.00000000"
def test_coinbase_size(coinbase_200):
assert coinbase_200['size'] == 134
def test_coinbase_ver(coinbase_200):
assert coinbase_200['ver'] == 1
@pytest.fixture(scope="module")
def b182t1(btc200):
return btc200.export_tx(
tx_hash = '591e91f809d716912ca1d4a9295e70c3e78bab077683f79350f101da64588073',
format = 'browser')
def test_tx_hash(b182t1):
assert b182t1['hash'] == '591e91f809d716912ca1d4a9295e70c3e78bab077683f79350f101da64588073'
def test_tx_version(b182t1):
assert b182t1['version'] == 1
def test_tx_lockTime(b182t1):
assert b182t1['lockTime'] == 0
def test_tx_size(b182t1):
assert b182t1['size'] == 275
def test_tx_cc(b182t1):
assert len(b182t1['chain_candidates']) == 1
def test_tx_chain_name(b182t1):
assert b182t1['chain_candidates'][0]['chain'].name == 'Bitcoin'
def test_tx_in_longest(b182t1):
assert b182t1['chain_candidates'][0]['in_longest']
def test_tx_block_nTime(b182t1):
assert b182t1['chain_candidates'][0]['block_nTime'] == 1231740736
def test_tx_block_height(b182t1):
assert b182t1['chain_candidates'][0]['block_height'] == 182
def test_tx_block_hash(b182t1):
assert b182t1['chain_candidates'][0]['block_hash'] == \
'0000000054487811fc4ff7a95be738aa5ad9320c394c482b27c0da28b227ad5d'
def test_tx_tx_pos(b182t1):
assert b182t1['chain_candidates'][0]['tx_pos'] == 1
def test_tx_in(b182t1):
assert len(b182t1['in']) == 1
def test_tx_in_pos(b182t1):
assert b182t1['in'][0]['pos'] == 0
def test_tx_in_binscript(b182t1):
assert b182t1['in'][0]['binscript'] == '47304402201f27e51caeb9a0988a1e50799ff0af94a3902403c3ad4068b063e7b4d1b0a76702206713f69bd344058b0dee55a9798759092d0916dbbc3e592fee43060005ddc17401'.decode('hex')
def test_tx_in_value(b182t1):
assert b182t1['in'][0]['value'] == 3000000000
def test_tx_in_prev_out(b182t1):
assert b182t1['in'][0]['o_hash'] == 'a16f3ce4dd5deb92d98ef5cf8afeaf0775ebca408f708b2146c4fb42b41e14be'
assert b182t1['in'][0]['o_pos'] == 1
def test_tx_in_script_type(b182t1):
assert b182t1['in'][0]['script_type'] == Abe.Chain.SCRIPT_TYPE_PUBKEY
def test_tx_in_binaddr(b182t1):
assert b182t1['in'][0]['binaddr'] == '11b366edfc0a8b66feebae5c2e25a7b6a5d1cf31'.decode('hex')
def test_tx_out(b182t1):
assert len(b182t1['out']) == 2
def test_tx_out_pos(b182t1):
assert b182t1['out'][0]['pos'] == 0
assert b182t1['out'][1]['pos'] == 1
def test_tx_out_binscript(b182t1):
assert b182t1['out'][0]['binscript'] == '410401518fa1d1e1e3e162852d68d9be1c0abad5e3d6297ec95f1f91b909dc1afe616d6876f92918451ca387c4387609ae1a895007096195a824baf9c38ea98c09c3ac'.decode('hex')
assert b182t1['out'][1]['binscript'] == '410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac'.decode('hex')
def test_tx_out_value(b182t1):
assert b182t1['out'][0]['value'] == 100000000
assert b182t1['out'][1]['value'] == 2900000000
def test_tx_out_redeemed(b182t1):
assert b182t1['out'][0]['o_hash'] is None
assert b182t1['out'][0]['o_pos'] is None
assert b182t1['out'][1]['o_hash'] == '12b5633bad1f9c167d523ad1aa1947b2732a865bf5414eab2f9e5ae5d5c191ba'
assert b182t1['out'][1]['o_pos'] == 0
def test_tx_out_binaddr(b182t1):
assert b182t1['out'][0]['binaddr'] == 'db3b465a2b678e0bdc3e4944bb41abb5a795ae04'.decode('hex')
assert b182t1['out'][1]['binaddr'] == '11b366edfc0a8b66feebae5c2e25a7b6a5d1cf31'.decode('hex')
def test_tx_value_in(b182t1):
assert b182t1['value_in'] == 3000000000
def test_tx_value_out(b182t1):
assert b182t1['value_out'] == 3000000000
| 6,062
|
Python
|
.py
| 122
| 46.139344
| 203
| 0.743555
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,419
|
test_util.py
|
bitcoin-abe_bitcoin-abe/test/test_util.py
|
# Copyright(C) 2014 by Abe developers.
# test_util.py: test Abe utility functions
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
import pytest
import Abe.util as util
def test_calculate_target_004c792d():
assert util.calculate_target(0x004c792d) == 0
def test_calculate_target_1d00ffff():
assert util.calculate_target(0x1d00ffff) == 0xffff0000000000000000000000000000000000000000000000000000
def test_calculate_target_1c00800e():
assert util.calculate_target(0x1c00800e) == 0x800e00000000000000000000000000000000000000000000000000
def test_calculate_target_1b0e7256():
assert util.calculate_target(0x1b0e7256) == 0xe7256000000000000000000000000000000000000000000000000
def test_calculate_target_1b0098fa():
assert util.calculate_target(0x1b0098fa) == 0x98fa000000000000000000000000000000000000000000000000
def test_calculate_target_1a6a93b3():
assert util.calculate_target(0x1a6a93b3) == 0x6a93b30000000000000000000000000000000000000000000000
def test_calculate_target_1a022fbe():
assert util.calculate_target(0x1a022fbe) == 0x22fbe0000000000000000000000000000000000000000000000
def test_calculate_target_1900896c():
assert util.calculate_target(0x1900896c) == 0x896c00000000000000000000000000000000000000000000
def test_calculate_target_1e0fffff():
assert util.calculate_target(0x1e0fffff) == 0xfffff000000000000000000000000000000000000000000000000000000
def test_calculate_target_1f123456():
assert util.calculate_target(0x1f123456) == 0x12345600000000000000000000000000000000000000000000000000000000
def test_calculate_target_80555555():
assert util.calculate_target(0x80555555) == 0x5555550000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
def test_calculate_target_00777777():
assert util.calculate_target(0x00777777) == 0x0
def test_calculate_target_01cccccc():
assert util.calculate_target(0x01cccccc) == -0x4c
def test_calculate_target_02666666():
assert util.calculate_target(0x02666666) == 0x6666
def test_calculate_target_03aaaaaa():
assert util.calculate_target(0x03aaaaaa) == -0x2aaaaa
| 2,899
|
Python
|
.py
| 47
| 58.957447
| 306
| 0.834804
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,420
|
test_std_tx.py
|
bitcoin-abe_bitcoin-abe/test/test_std_tx.py
|
# Copyright(C) 2014 by Abe developers.
# test_std_tx.py: test Abe importing standard Bitcoin transaction types.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
import pytest
import os
import json
import tempfile
import py.path
from db import testdb
import data
import Abe.Chain
from Abe.deserialize import opcodes
@pytest.fixture(scope="module")
def gen(testdb, request):
gen = data.testnet14(testdb)
chain = gen.chain
blocks = gen.blocks
# Test block with an interesting transaction.
blocks.append(
gen.block(
prev=blocks[-1],
transactions=[
gen.coinbase(value=50.01e8),
gen.tx(txIn=[gen.txin(prevout=blocks[1]['transactions'][0]['txOut'][0], scriptSig='XXX')],
txOut=[gen.txout(addr='n1pTUVnjZ6GHxujaoJ62P9NBMNjLr5N2EQ', value=9.99e8),
gen.txout(addr='2NFTctsgcAmrgtiboLJUx9q8qu5H1qVpcAb', value=20e8),
gen.txout(multisig={"m":2, "pubkeys":data.PUBKEYS[2:5]}, value=20e8)])]) )
if 'ABE_TEST_SAVE_BLKFILE' in os.environ:
gen.save_blkfile(os.environ['ABE_TEST_SAVE_BLKFILE'], blocks)
# XXX Lots of code duplicated in test_block_order.py.
datadir = py.path.local(tempfile.mkdtemp(prefix='abe-test-'))
request.addfinalizer(datadir.remove)
gen.save_blkfile(str(datadir.join('blk0001.dat')), blocks)
gen.store = testdb.load('--datadir', json.dumps([{
'dirname': str(datadir),
'chain': chain.name,
'loader': 'blkfile'}]))
gen.chain = gen.store.get_chain_by_name(chain.name)
return gen
def test_b0_hash(gen):
# Testnet Block 0 hash.
block_0_hash = '000000000933ea01ad0ee984209779baaec3ced90fa3f408719526f8d77f4943'.decode('hex')[::-1]
assert gen.blocks[0]['hash'] == block_0_hash
def test_b1_hash(gen):
# Testnet Block 1 hash.
block_1_hash = '00000000b873e79784647a6c82962c70d228557d24a747ea4d1b8bbe878e1206'.decode('hex')[::-1]
assert gen.blocks[1]['hash'] == block_1_hash
@pytest.fixture(scope="module")
def ahn1p(gen):
return data.ah(gen, 'n1pTUVnjZ6GHxujaoJ62P9NBMNjLr5N2EQ')
def test_ahn1p_binaddr(ahn1p):
assert ahn1p['binaddr'] == 'deb1f1ffbef6061a0b8f6d23b4e72164b4678253'.decode('hex')
def test_ahn1p_subbinaddr(ahn1p):
assert 'subbinaddr' not in ahn1p
def test_ahn1p_version(ahn1p):
assert ahn1p['version'] == '\x6f'
def test_ahn1p_chains(ahn1p):
assert len(ahn1p['chains']) == 1
def test_ahn1p_c0_name(ahn1p):
assert ahn1p['chains'][0].name == 'Testnet'
def test_ahn1p_balance(ahn1p, gen):
assert ahn1p['balance'] == { gen.chain.id: 9.99e8 }
def test_ahn1p_txpoints(ahn1p):
assert len(ahn1p['txpoints']) == 1
def test_ahn1p_p0_type(ahn1p):
assert ahn1p['txpoints'][0]['type'] == 'direct'
def test_ahn1p_p0_is_out(ahn1p):
assert not ahn1p['txpoints'][0]['is_out']
def test_ahn1p_p0_nTime(ahn1p):
assert ahn1p['txpoints'][0]['nTime'] == 1231006506
def test_ahn1p_p0_chain(ahn1p):
assert ahn1p['txpoints'][0]['chain'].name == 'Testnet'
def test_ahn1p_p0_height(ahn1p):
assert ahn1p['txpoints'][0]['height'] == 14
def test_ahn1p_p0_blk_hash(ahn1p):
assert ahn1p['txpoints'][0]['blk_hash'] == '0c2d2879773626a081d74e73b3dcb9276e2a366e4571b2de6d90c2a67295382e'
def test_ahn1p_p0_tx_hash(ahn1p):
assert ahn1p['txpoints'][0]['tx_hash'] == 'dd5e827c88eb24502cb74670fa58430e8c51fa6a514c46451829c1896438ce52'
def test_ahn1p_p0_pos(ahn1p):
assert ahn1p['txpoints'][0]['pos'] == 0
def test_ahn1p_p0_value(ahn1p):
assert ahn1p['txpoints'][0]['value'] == 9.99e8
def test_ahn1p_sent(ahn1p, gen):
assert ahn1p['sent'] == { gen.chain.id: 0 }
def test_ahn1p_received(ahn1p, gen):
assert ahn1p['received'] == { gen.chain.id: 9.99e8 }
def test_ahn1p_counts(ahn1p):
assert ahn1p['counts'] == [1, 0]
@pytest.fixture(scope="module")
def a2NFT(gen):
return data.ah(gen, '2NFTctsgcAmrgtiboLJUx9q8qu5H1qVpcAb')
def test_a2NFT_binaddr(a2NFT):
assert a2NFT['binaddr'] == 'f3aae15f9b92a094bb4e01afe99f99ab4135f362'.decode('hex')
def test_a2NFT_subbinaddr(a2NFT):
assert 'subbinaddr' not in a2NFT
def test_a2NFT_version(a2NFT):
assert a2NFT['version'] == '\xc4'
def test_a2NFT_chains(a2NFT):
assert len(a2NFT['chains']) == 1
def test_a2NFT_c0_name(a2NFT):
assert a2NFT['chains'][0].name == 'Testnet'
def test_a2NFT_balance(a2NFT, gen):
assert a2NFT['balance'] == { gen.chain.id: 20e8 }
def test_a2NFT_txpoints(a2NFT):
assert len(a2NFT['txpoints']) == 1
def test_a2NFT_p0_type(a2NFT):
assert a2NFT['txpoints'][0]['type'] == 'direct'
def test_a2NFT_p0_is_out(a2NFT):
assert not a2NFT['txpoints'][0]['is_out']
def test_a2NFT_p0_nTime(a2NFT):
assert a2NFT['txpoints'][0]['nTime'] == 1231006506
def test_a2NFT_p0_chain(a2NFT):
assert a2NFT['txpoints'][0]['chain'].name == 'Testnet'
def test_a2NFT_p0_height(a2NFT):
assert a2NFT['txpoints'][0]['height'] == 14
def test_a2NFT_p0_blk_hash(a2NFT):
assert a2NFT['txpoints'][0]['blk_hash'] == '0c2d2879773626a081d74e73b3dcb9276e2a366e4571b2de6d90c2a67295382e'
def test_a2NFT_p0_tx_hash(a2NFT):
assert a2NFT['txpoints'][0]['tx_hash'] == 'dd5e827c88eb24502cb74670fa58430e8c51fa6a514c46451829c1896438ce52'
def test_a2NFT_p0_pos(a2NFT):
assert a2NFT['txpoints'][0]['pos'] == 1
def test_a2NFT_p0_value(a2NFT):
assert a2NFT['txpoints'][0]['value'] == 20e8
def test_a2NFT_sent(a2NFT, gen):
assert a2NFT['sent'] == { gen.chain.id: 0 }
def test_a2NFT_received(a2NFT, gen):
assert a2NFT['received'] == { gen.chain.id: 20e8 }
def test_a2NFT_counts(a2NFT):
assert a2NFT['counts'] == [1, 0]
@pytest.fixture(scope="module")
def an3j4(gen):
return data.ah(gen, 'n3j41Rkn51bdfh3NgyaA7x2JKEsfuvq888')
def test_an3j4_binaddr(an3j4, gen):
assert an3j4['binaddr'] == gen.chain.pubkey_hash(data.PUBKEYS[3])
def test_an3j4_subbinaddr(an3j4, gen):
assert 'subbinaddr' not in an3j4
def test_an3j4_version(an3j4):
assert an3j4['version'] == '\x6f'
def test_an3j4_chains(an3j4):
assert len(an3j4['chains']) == 1
def test_an3j4_c0_name(an3j4):
assert an3j4['chains'][0].name == 'Testnet'
def test_an3j4_balance(an3j4, gen):
assert an3j4['balance'] == { gen.chain.id: 0 }
def test_an3j4_txpoints(an3j4):
assert len(an3j4['txpoints']) == 1
def test_an3j4_p0_type(an3j4):
assert an3j4['txpoints'][0]['type'] == 'escrow'
def test_an3j4_p0_is_out(an3j4):
assert not an3j4['txpoints'][0]['is_out']
def test_an3j4_p0_nTime(an3j4):
assert an3j4['txpoints'][0]['nTime'] == 1231006506
def test_an3j4_p0_chain(an3j4):
assert an3j4['txpoints'][0]['chain'].name == 'Testnet'
def test_an3j4_p0_height(an3j4):
assert an3j4['txpoints'][0]['height'] == 14
def test_an3j4_p0_blk_hash(an3j4):
assert an3j4['txpoints'][0]['blk_hash'] == '0c2d2879773626a081d74e73b3dcb9276e2a366e4571b2de6d90c2a67295382e'
def test_an3j4_p0_tx_hash(an3j4):
assert an3j4['txpoints'][0]['tx_hash'] == 'dd5e827c88eb24502cb74670fa58430e8c51fa6a514c46451829c1896438ce52'
def test_an3j4_p0_pos(an3j4):
assert an3j4['txpoints'][0]['pos'] == 2
def test_an3j4_p0_value(an3j4):
assert an3j4['txpoints'][0]['value'] == 20e8
def test_an3j4_sent(an3j4, gen):
assert an3j4['sent'] == { gen.chain.id: 0 }
def test_an3j4_received(an3j4, gen):
assert an3j4['received'] == { gen.chain.id: 0 }
def test_an3j4_counts(an3j4):
assert an3j4['counts'] == [0, 0]
# TODO: look up multisig by its P2SH address, check subbinaddr.
# TODO: test different types of redeemed outputs.
def b(gen, b):
return gen.store.export_block(chain=gen.chain, block_number=b)
@pytest.fixture(scope="module")
def b14(gen):
return b(gen, 14)
def test_b14_chain_candidates(b14):
assert len(b14['chain_candidates']) == 1
def test_b14cc0_chain_name(b14):
assert b14['chain_candidates'][0]['chain'].name == 'Testnet'
def test_b14cc0_in_longest(b14):
assert b14['chain_candidates'][0]['in_longest']
def test_b14_chain_satoshis(b14):
assert b14['chain_satoshis'] == 750*10**8
def test_b14_chain_satoshi_seconds(b14):
assert b14['chain_satoshi_seconds'] == -656822590000000000
def test_b14_chain_work(b14):
assert b14['chain_work'] == 64425492495
def test_b14_fees(b14):
assert b14['fees'] == 0.01e8
def test_b14_generated(b14):
assert b14['generated'] == 50e8
def test_b14_hash(b14):
assert b14['hash'] == '0c2d2879773626a081d74e73b3dcb9276e2a366e4571b2de6d90c2a67295382e'
def test_b14_hashMerkleRoot(b14):
assert b14['hashMerkleRoot'] == '93f17b59330df6c97f8d305572b0b98608b34a2f4fa235e6ff69bbe343e3a764'
def test_b14_hashPrev(b14):
assert b14['hashPrev'] == '2155786533653694385a772e33d9547848c809b1d1bce3500a377fe37ad3d250'
def test_b14_height(b14):
assert b14['height'] == 14
def test_b14_nBits(b14):
assert b14['nBits'] == 0x1d00ffff
def test_b14_next_block_hashes(b14):
assert b14['next_block_hashes'] == []
def test_b14_nNonce(b14):
assert b14['nNonce'] == 253
def test_b14_nTime(b14):
assert b14['nTime'] == 1231006506
@pytest.mark.xfail
def test_b14_satoshis_destroyed(b14):
# XXX Is this value right?
assert b14['satoshis_destroyed'] == -328412110000000000
@pytest.mark.xfail
def test_b14_satoshi_seconds(b14):
# XXX Is this value right?
assert b14['satoshi_seconds'] == -328410480000000000
def test_b14_transactions(b14):
assert len(b14['transactions']) == 2
def test_b14_t1_fees(b14):
assert b14['transactions'][1]['fees'] == 0.01e8
def test_b14_t1_hash(b14):
assert b14['transactions'][1]['hash'] == 'dd5e827c88eb24502cb74670fa58430e8c51fa6a514c46451829c1896438ce52'
def test_b14_t1_in(b14):
assert len(b14['transactions'][1]['in']) == 1
def test_b14_t1i0_address_version(b14):
assert b14['transactions'][1]['in'][0]['address_version'] == '\x6f'
def test_b14_t1i0_binaddr(b14, gen):
assert b14['transactions'][1]['in'][0]['binaddr'] == gen.chain.pubkey_hash(data.PUBKEYS[1])
def test_b14_t1i0_value(b14):
assert b14['transactions'][1]['in'][0]['value'] == 50e8
def test_b14_t1_out(b14):
assert len(b14['transactions'][1]['out']) == 3
def test_b14_t1o0_address_version(b14):
assert b14['transactions'][1]['out'][0]['address_version'] == '\x6f'
def test_b14_t1o0_binaddr(b14, gen):
assert b14['transactions'][1]['out'][0]['binaddr'] == 'deb1f1ffbef6061a0b8f6d23b4e72164b4678253'.decode('hex')
def test_b14_t1o0_value(b14):
assert b14['transactions'][1]['out'][0]['value'] == 9.99e8
def test_b14_t1o1_address_version(b14):
assert b14['transactions'][1]['out'][1]['address_version'] == '\xc4'
def test_b14_t1o1_binaddr(b14, gen):
assert b14['transactions'][1]['out'][1]['binaddr'] == 'f3aae15f9b92a094bb4e01afe99f99ab4135f362'.decode('hex')
def test_b14_t1o1_value(b14):
assert b14['transactions'][1]['out'][1]['value'] == 20e8
def test_b14_t1o2_address_version(b14):
assert b14['transactions'][1]['out'][2]['address_version'] == '\x6f'
def test_b14_t1o2_binaddr(b14, gen):
assert b14['transactions'][1]['out'][2]['binaddr'] == 'b8bcada90d0992bdc64188d6a0ac3f9fd200d1d1'.decode('hex')
def test_b14_t1o2_subbinaddr(b14, gen):
assert len(b14['transactions'][1]['out'][2]['subbinaddr']) == 3
def test_b14_t1o2k0(b14, gen):
assert b14['transactions'][1]['out'][2]['subbinaddr'][0] == gen.chain.pubkey_hash(data.PUBKEYS[2])
def test_b14_t1o2k1(b14, gen):
assert b14['transactions'][1]['out'][2]['subbinaddr'][1] == gen.chain.pubkey_hash(data.PUBKEYS[3])
def test_b14_t1o2k2(b14, gen):
assert b14['transactions'][1]['out'][2]['subbinaddr'][2] == gen.chain.pubkey_hash(data.PUBKEYS[4])
def test_b14_t1o2_required_signatures(b14):
assert b14['transactions'][1]['out'][2]['required_signatures'] == 2
def test_b14_t1o2_value(b14):
assert b14['transactions'][1]['out'][2]['value'] == 20e8
def test_b14_value_out(b14):
assert b14['value_out'] == 100e8
def test_b14_version(b14):
assert b14['version'] == 1
def bt(gen, b, t):
return gen.store.export_tx(tx_hash=gen.blocks[b]['transactions'][t]['hash'][::-1].encode('hex'), format='browser')
@pytest.fixture(scope="module")
def b14t1(gen):
return bt(gen, 14, 1)
def test_b14t1o0_script_type(b14t1):
assert b14t1['out'][0]['script_type'] == Abe.Chain.SCRIPT_TYPE_ADDRESS
def test_b14t1o0_binaddr(b14t1):
assert b14t1['out'][0]['binaddr'] == Abe.util.decode_address('n1pTUVnjZ6GHxujaoJ62P9NBMNjLr5N2EQ')[1]
assert b14t1['out'][0]['binaddr'] == 'deb1f1ffbef6061a0b8f6d23b4e72164b4678253'.decode('hex')
def test_b14t1o0_value(b14t1):
assert b14t1['out'][0]['value'] == 9.99e8
def test_b14t1o1_script_type(b14t1):
assert b14t1['out'][1]['script_type'] == Abe.Chain.SCRIPT_TYPE_P2SH
def test_b14t1o1_binaddr(b14t1):
assert b14t1['out'][1]['binaddr'] == Abe.util.decode_address('2NFTctsgcAmrgtiboLJUx9q8qu5H1qVpcAb')[1]
def test_b14t1o1_value(b14t1):
assert b14t1['out'][1]['value'] == 20e8
def test_b14t1o2_script_type(b14t1):
assert b14t1['out'][2]['script_type'] == Abe.Chain.SCRIPT_TYPE_MULTISIG
def test_b14t1o2_required_signatures(b14t1):
assert b14t1['out'][2]['required_signatures'] == 2
def test_b14t1o2_binaddr(b14t1, gen):
assert b14t1['out'][2]['binaddr'] == 'b8bcada90d0992bdc64188d6a0ac3f9fd200d1d1'.decode('hex')
def test_b14t1o2_subbinaddr(b14t1, gen):
assert b14t1['out'][2]['subbinaddr'] == [ gen.chain.pubkey_hash(pubkey) for pubkey in data.PUBKEYS[2:5] ]
def test_b14t1o2_value(b14t1):
assert b14t1['out'][2]['value'] == 20e8
| 14,199
|
Python
|
.py
| 305
| 42.590164
| 118
| 0.703044
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,421
|
data.py
|
bitcoin-abe_bitcoin-abe/test/data.py
|
import datagen
import Abe.Chain
from Abe.util import hex2b
PUBKEYS = [
x.decode('hex') for x in [
# Satoshi's genesis pubkey.
'04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5f',
# Testnet Block 1 pubkey.
'021aeaf2f8638a129a3156fbe7e5ef635226b0bafd495ff03afe2c843d7e3a4b51',
# Some test pubkeys.
'0269184483e5494727d2dec54da85db9b18bee827bb3d1eee23b122edf810b8262',
'0217819b778f0bcfee53bbed495ca20fdc828f40ffd6d9481fe4c0d091b1486f69',
'022820a6eb4e6817bf68301856e0803e05d19f54714006f2088e74103be396eb5a',
]]
def testnet14(db):
chain = Abe.Chain.create('Testnet')
blocks = []
gen = datagen.Gen(chain=chain, db=db, blocks=blocks)
# The Bitcoin/Testnet genesis transaction.
genesis_coinbase = gen.coinbase(
scriptSig=gen.encode_script(
'\xff\xff\x00\x1d', '\x04', 'The Times 03/Jan/2009 Chancellor on brink of second bailout for banks'),
txOut=[gen.txout(pubkey=PUBKEYS[0], value=50*10**8)])
# Testnet Blocks 0 and 1.
blocks.append(gen.block(transactions=[genesis_coinbase], nTime=1296688602, nNonce=414098458))
blocks.append( gen.block(prev=blocks[-1], nTime=1296688928, nNonce=1924588547,
transactions=[gen.coinbase(scriptSig=hex2b('0420e7494d017f062f503253482f'),
txOut=[gen.txout(pubkey=PUBKEYS[1], value=50*10**8)])]) )
# Test blocks with random coinbase addresses and bogus proof-of-work.
for i in xrange(12):
blocks.append( gen.block(prev=blocks[-1]) )
return gen
def ah(gen, addr):
return gen.store.export_address_history(addr, chain=gen.chain)
| 1,787
|
Python
|
.py
| 34
| 44.058824
| 141
| 0.706827
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,422
|
namecoin_dump.py
|
bitcoin-abe_bitcoin-abe/tools/namecoin_dump.py
|
#!/usr/bin/env python
# Dump the Namecoin name data to standard output.
# Copyright(C) 2011 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
import sys
import logging
import Abe.DataStore
import Abe.readconf
from Abe.deserialize import script_GetOp, opcodes
NAME_NEW = opcodes.OP_1
NAME_FIRSTUPDATE = opcodes.OP_2
NAME_UPDATE = opcodes.OP_3
NAME_SCRIPT_MIN = '\x51'
NAME_SCRIPT_MAX = '\x54'
BLOCKS_TO_EXPIRE = 12000
def iterate_name_updates(store, logger, chain_id):
for height, tx_pos, txout_pos, script in store.selectall("""
SELECT cc.block_height, bt.tx_pos, txout.txout_pos,
txout.txout_scriptPubKey
FROM chain_candidate cc
JOIN block_tx bt ON (cc.block_id = bt.block_id)
JOIN txout ON (bt.tx_id = txout.tx_id)
WHERE cc.chain_id = ?
AND txout_scriptPubKey >= ? AND txout_scriptPubKey < ?
ORDER BY cc.block_height, bt.tx_pos, txout.txout_pos""",
(chain_id, store.binin(NAME_SCRIPT_MIN),
store.binin(NAME_SCRIPT_MAX))):
height = int(height)
tx_pos = int(tx_pos)
txout_pos = int(txout_pos)
i = script_GetOp(store.binout(script))
try:
name_op = i.next()[0]
if name_op == NAME_NEW:
continue # no effect on name map
elif name_op == NAME_FIRSTUPDATE:
is_first = True
name = i.next()[1]
newtx_hash = i.next()[1]
#rand = i.next()[1] # XXX documented as optional; is it?
value = i.next()[1]
elif name_op == NAME_UPDATE:
is_first = False
name = i.next()[1]
value = i.next()[1]
else:
logger.warning("Unexpected first op: %s", repr(name_op))
continue
except StopIteration:
logger.warning("Strange script at %d:%d:%d",
height, tx_pos, txout_pos)
continue
yield (height, tx_pos, txout_pos, is_first, name, value)
def get_expiration_depth(height):
if height < 24000:
return 12000
if height < 48000:
return height - 12000
return 36000
def dump(store, logger, chain_id):
from collections import deque
top = store.get_block_number(chain_id)
expires = {}
expiry_queue = deque() # XXX unneeded synchronization
for x in iterate_name_updates(store, logger, chain_id):
height, tx_pos, txout_pos, is_first, name, value = x
while expiry_queue and expiry_queue[0]['block_id'] < height:
e = expiry_queue.popleft()
dead = e['name']
if expires[dead] == e['block_id']:
print repr((e['block_id'], 'Expired', dead, None))
if expires.get(name, height) < height:
type = 'Resurrected'
elif is_first:
type = 'First'
else:
type = 'Renewed'
print repr((height, type, name, value))
expiry = height + get_expiration_depth(height)
expires[name] = expiry
expiry_queue.append({'block_id': expiry, 'name': name, 'value': value})
for e in expiry_queue:
if expires[e['name']] > e['block_id']:
pass
elif e['block_id'] <= top:
print repr((e['block_id'], 'Expired', e['name'], None))
else:
print repr((e['block_id'], 'Until', e['name'], e['value']))
def main(argv):
logging.basicConfig(level=logging.DEBUG)
conf = {
'chain_id': None,
}
conf.update(Abe.DataStore.CONFIG_DEFAULTS)
args, argv = Abe.readconf.parse_argv(argv, conf, strict=False)
if argv and argv[0] in ('-h', '--help'):
print "Usage: namecoin_dump.py --dbtype=MODULE --connect-args=ARGS"
return 0
elif argv:
sys.stderr.write(
"Error: unknown option `%s'\n"
"See `namecoin_dump.py --help' for more information.\n"
% (argv[0],))
return 1
store = Abe.DataStore.new(args)
logger = logging.getLogger(__name__)
if args.chain_id is None:
row = store.selectrow(
"SELECT chain_id FROM chain WHERE chain_name = 'Namecoin'")
if row is None:
raise Exception("Can not find Namecoin chain in database.")
args.chain_id = row[0]
dump(store, logger, args.chain_id)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| 5,174
|
Python
|
.py
| 128
| 31.679688
| 79
| 0.594654
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,423
|
ecdsa.py
|
bitcoin-abe_bitcoin-abe/contrib/ecdsa.py
|
#!/usr/bin/env python
# Retrieved from http://ecdsa.org/ecdsa.py on 2011-10-17.
# Thanks to ThomasV.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
import sys
import os
import warnings
import optparse
import re
from cgi import escape
import posixpath
import wsgiref.util
import time
import binascii
import daemon
import Abe.DataStore
import Abe.readconf
import operator
# bitcointools -- modified deserialize.py to return raw transaction
import Abe.deserialize
import Abe.util # Added functions.
import Abe.base58
from Abe.abe import *
AML_APPNAME = "Bitcoin ecdsa.org"
AML_TEMPLATE = """
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<link rel="stylesheet" type="text/css" href="http://s3.ecdsa.org/style.css" />
<link rel="shortcut icon" href="http://s3.ecdsa.org/favicon.ico" />
<title>%(title)s</title>
</head>
<body>
<div id="logo">
<a href="%(dotdot)s/">
<img src="http://s3.ecdsa.org/bc_logo.png" alt="Bitcoin logo" border="none" />
</a>
</div>
<div id="navigation">
<ul>
<li><a href="%(dotdot)shome">Home</a> </li>
<li><a href="%(dotdot)ssearch">Search</a> </li>
<li><a href="%(dotdot)sannotate">Annotations</a> </li>
<li><a href="%(dotdot)swidgets">Widgets</a></li>
<li><a href="%(dotdot)sthresholdRelease">Threshold release</a></li>
<li><a href="%(dotdot)sstats.html">Statistics</a></li>
</ul>
</div>
<div id=\"content\">
<h1>%(h1)s</h1>
%(body)s
</div>
</body>
</html>
"""
class Aml(Abe):
def __init__(abe, store, args):
abe.store = store
abe.args = args
abe.htdocs = args.document_root or find_htdocs()
abe.static_path = '' if args.static_path is None else args.static_path
abe.template_vars = args.template_vars.copy()
abe.template_vars['STATIC_PATH'] = (
abe.template_vars.get('STATIC_PATH', abe.static_path))
abe.template = flatten(args.template)
abe.debug = args.debug
import logging
abe.log = logging
abe.log.info('Abe initialized.')
abe.home = "home"
if not args.auto_agpl:
abe.template_vars['download'] = (
abe.template_vars.get('download', ''))
abe.base_url = args.base_url
abe.reports = abe.get_reports()
def handle_home(abe, page):
page['title'] = 'Bitcoin Web Services'
body = page['body']
body += [ """
<p>This website allows you to :
<ul>
<li>Annotate transactions in the blockchain (signature requested)</li>
<li>Use fundraiser widgets (counters, progress bars, javascript)</li>
<li>Release data when donations to an address reach a given threshold.</li>
</ul>
<br/><br/>
<p style="font-size: smaller">
This site is powered by <span style="font-style: italic"> <a href="https://github.com/bitcoin-abe/bitcoin-abe">bitcoin-ABE</a></span>
source:<a href="ecdsa.py">[1]</a> <a href="abe.diff">[2]</a>
</p>"""
]
return
def get_sender_comment(abe, tx_id):
r = abe.store.selectrow("SELECT c_text, c_pubkey, c_sig FROM comments WHERE c_tx = ?""", (tx_id,))
if r:
return r[0]
else:
return ""
def get_address_comment(abe, address):
#rename this column in sql
r = abe.store.selectrow("SELECT text FROM addr_comments WHERE address = '%s'"""%(address))
if r:
return r[0]
else:
return ""
def get_tx(abe, tx_hash ):
row = abe.store.selectrow("""
SELECT tx_id, tx_version, tx_lockTime, tx_size
FROM tx
WHERE tx_hash = ?
""", (abe.store.hashin_hex(tx_hash),))
if row is None: return None, None, None, None
tx_id, tx_version, tx_lockTime, tx_size = (int(row[0]), int(row[1]), int(row[2]), int(row[3]))
return tx_id, tx_version, tx_lockTime, tx_size
def get_tx_inputs(abe, tx_id):
return abe.store.selectall("""
SELECT
txin.txin_pos,
txin.txin_scriptSig,
txout.txout_value,
COALESCE(prevtx.tx_hash, u.txout_tx_hash),
prevtx.tx_id,
COALESCE(txout.txout_pos, u.txout_pos),
pubkey.pubkey_hash
FROM txin
LEFT JOIN txout ON (txout.txout_id = txin.txout_id)
LEFT JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
LEFT JOIN tx prevtx ON (txout.tx_id = prevtx.tx_id)
LEFT JOIN unlinked_txin u ON (u.txin_id = txin.txin_id)
WHERE txin.tx_id = ?
ORDER BY txin.txin_pos
""", (tx_id,))
def get_tx_outputs(abe, tx_id):
return abe.store.selectall("""
SELECT
txout.txout_pos,
txout.txout_scriptPubKey,
txout.txout_value,
nexttx.tx_hash,
nexttx.tx_id,
txin.txin_pos,
pubkey.pubkey_hash
FROM txout
LEFT JOIN txin ON (txin.txout_id = txout.txout_id)
LEFT JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
LEFT JOIN tx nexttx ON (txin.tx_id = nexttx.tx_id)
WHERE txout.tx_id = ?
ORDER BY txout.txout_pos
""", (tx_id,))
def handle_tx(abe, page):
tx_hash = wsgiref.util.shift_path_info(page['env'])
if tx_hash in (None, '') or page['env']['PATH_INFO'] != '':
raise PageNotFound()
page['title'] = ['Transaction ', tx_hash[:10], '...', tx_hash[-4:]]
body = page['body']
if not HASH_PREFIX_RE.match(tx_hash):
body += ['<p class="error">Not a valid transaction hash.</p>']
return
tx_id, tx_version, tx_lockTime, tx_size = abe.get_tx( tx_hash )
if tx_id is None:
body += ['<p class="error">Transaction not found.</p>']
return
block_rows = abe.store.selectall("""
SELECT c.chain_name, cc.in_longest,
b.block_nTime, b.block_height, b.block_hash,
block_tx.tx_pos
FROM chain c
JOIN chain_candidate cc ON (cc.chain_id = c.chain_id)
JOIN block b ON (b.block_id = cc.block_id)
JOIN block_tx ON (block_tx.block_id = b.block_id)
WHERE block_tx.tx_id = ?
ORDER BY c.chain_id, cc.in_longest DESC, b.block_hash
""", (tx_id,))
def parse_row(row):
pos, script, value, o_hash, o_id, o_pos, binaddr = row
chain = abe.get_default_chain()
hash = abe.store.binout(binaddr)
address = hash_to_address(chain['address_version'], hash)
return {
"pos": int(pos),
"script": abe.store.binout(script),
"value": None if value is None else int(value),
"o_hash": abe.store.hashout_hex(o_hash),
"o_id": o_id,
"o_pos": None if o_pos is None else int(o_pos),
"binaddr": abe.store.binout(binaddr),
}
def row_to_html(row, this_ch, other_ch, no_link_text):
body = []
body += [
'<tr>\n',
'<td><a name="', this_ch, row['pos'], '">', row['pos'],
'</a></td>\n<td>']
if row['o_hash'] is None:
body += [no_link_text]
else:
body += [
'<a href="', row['o_hash'], '#', other_ch, row['o_pos'],
'">', row['o_hash'][:10], '...:', row['o_pos'], '</a>']
body += [
'</td>\n',
'<td>', format_satoshis(row['value'], chain), '</td>\n',
]
if row['binaddr'] is None:
body += ['Unknown', '</td><td></td>']
else:
link = hash_to_address_link(chain['address_version'], row['binaddr'], '../')
addr = hash_to_address(chain['address_version'], row['binaddr'])
comment = abe.get_address_comment(addr)
comment += " <a title=\"add comment\" href=\"http://ecdsa.org/annotate?address="+addr+"\">[+]</a>"
body += [ '<td>', link, '</td><td>', comment, '</td>']
body += ['</tr>\n']
return body
in_rows = map(parse_row, abe.get_tx_inputs(tx_id))
out_rows = map(parse_row, abe.get_tx_outputs(tx_id))
def sum_values(rows):
ret = 0
for row in rows:
if row['value'] is None:
return None
ret += row['value']
return ret
value_in = sum_values(in_rows)
value_out = sum_values(out_rows)
is_coinbase = None
body += abe.short_link(page, 't/' + hexb58(tx_hash[:14]))
body += ['<p>Hash: ', tx_hash, '<br />\n']
chain = None
for row in block_rows:
(name, in_longest, nTime, height, blk_hash, tx_pos) = (
row[0], int(row[1]), int(row[2]), int(row[3]),
abe.store.hashout_hex(row[4]), int(row[5]))
if chain is None:
chain = abe.chain_lookup_by_name(name)
is_coinbase = (tx_pos == 0)
elif name <> chain['name']:
abe.log.warn('Transaction ' + tx_hash + ' in multiple chains: '
+ name + ', ' + chain['name'])
body += [
'Appeared in <a href="../block/', blk_hash, '">',
escape(name), ' ',
height if in_longest else [blk_hash[:10], '...', blk_hash[-4:]],
'</a> (', format_time(nTime), ')<br />\n']
if chain is None:
abe.log.warn('Assuming default chain for Transaction ' + tx_hash)
chain = abe.get_default_chain()
sender_comment = abe.get_sender_comment(tx_id)
sender_comment += " <a href=\"http://ecdsa.org/annotate?tx="+tx_hash+"\">[+]</a>"
fee = format_satoshis(0 if is_coinbase else (value_in and value_out and value_in - value_out), chain)
body += [
len(in_rows),' inputs, ', len(out_rows),' outputs.<br/>\n'
'Amounts: ', format_satoshis(value_in, chain), ' --> ', format_satoshis(value_out, chain), ' + ',fee,' fee.<br/>\n',
'Size: ', tx_size, ' bytes<br /><br/>\n',
'<b>Comment from sender:</b><br/>', sender_comment, '<br/>\n',
]
body += ['</p>\n',
'<a name="inputs"><h3>Inputs</h3></a>\n<table>\n',
'<tr><th>Index</th><th>Previous output</th><th>Amount</th>',
'<th>From address</th><th>Comment</th></tr>\n']
for row in in_rows:
page['body'] += row_to_html(row, 'i', 'o', 'Generation' if is_coinbase else 'Unknown')
body += ['</table>\n',
'<a name="outputs"><h3>Outputs</h3></a>\n<table>\n',
'<tr><th>Index</th><th>Redeemed at</th><th>Amount</th>',
'<th>To address</th><th>Comment</th></tr>\n']
for row in out_rows:
page['body'] += row_to_html(row, 'o', 'i', 'Not yet redeemed')
body += ['</table>\n']
def trackrow_to_html(row, report_name):
line = [ '<tr>\n<td>' ]
if row['o_hash'] is None:
line += ['Generation' if is_coinbase else 'Unknown']
else:
line += [
'<a href="', row['o_hash'], '">', row['o_hash'][:10], '...:', row['o_pos'], '</a>']
line += [
'</td>\n',
'<td>', format_satoshis(row['value'], chain), '</td>\n',
'<td>']
if row['binaddr'] is None:
line += ['Unknown']
else:
line += hash_to_address_link(chain['address_version'], row['binaddr'], '../')
line += [
'</td>\n',
'<td>', row['dist'].get(report_name),'</td>\n',
'<td>', row['comment'],'</td>\n',
'</tr>\n']
return line
def get_address_out_rows(abe, dbhash):
return abe.store.selectall("""
SELECT
b.block_nTime,
cc.chain_id,
b.block_height,
1,
b.block_hash,
tx.tx_hash,
tx.tx_id,
txin.txin_pos,
-prevout.txout_value
FROM chain_candidate cc
JOIN block b ON (b.block_id = cc.block_id)
JOIN block_tx ON (block_tx.block_id = b.block_id)
JOIN tx ON (tx.tx_id = block_tx.tx_id)
JOIN txin ON (txin.tx_id = tx.tx_id)
JOIN txout prevout ON (txin.txout_id = prevout.txout_id)
JOIN pubkey ON (pubkey.pubkey_id = prevout.pubkey_id)
WHERE pubkey.pubkey_hash = ?
AND cc.in_longest = 1""",
(dbhash,))
def get_address_in_rows(abe, dbhash):
return abe.store.selectall("""
SELECT
b.block_nTime,
cc.chain_id,
b.block_height,
0,
b.block_hash,
tx.tx_hash,
tx.tx_id,
txout.txout_pos,
txout.txout_value
FROM chain_candidate cc
JOIN block b ON (b.block_id = cc.block_id)
JOIN block_tx ON (block_tx.block_id = b.block_id)
JOIN tx ON (tx.tx_id = block_tx.tx_id)
JOIN txout ON (txout.tx_id = tx.tx_id)
JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
WHERE pubkey.pubkey_hash = ?
AND cc.in_longest = 1""",
(dbhash,))
def handle_qr(abe,page):
address = wsgiref.util.shift_path_info(page['env'])
if address in (None, '') or page['env']['PATH_INFO'] != '':
raise PageNotFound()
body = page['body']
page['title'] = 'Address ' + escape(address)
version, binaddr = decode_check_address(address)
if binaddr is None:
body += ['<p>Not a valid address.</p>']
return
ret = """<html><body>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.5.2/jquery.min.js"></script>
<script type="text/javascript" src="http://ecdsa.org/jquery.qrcode.min.js"></script>
<div id="qrcode"></div>
<script>jQuery('#qrcode').qrcode("bitcoin:%s");</script>
</body></html>"""%address
abe.do_raw(page, ret)
page['content_type']='text/html'
def handle_address(abe, page):
#action = abe.get_param( page, 'action', '')
address = wsgiref.util.shift_path_info(page['env'])
if address in (None, '') or page['env']['PATH_INFO'] != '':
raise PageNotFound()
body = page['body']
page['title'] = 'Address ' + escape(address)
version, binaddr = decode_check_address(address)
if binaddr is None:
body += ['<p>Not a valid address.</p>']
return
txpoints = []
chains = {}
balance = {}
received = {}
sent = {}
count = [0, 0]
chain_ids = []
def adj_balance(txpoint):
chain_id = txpoint['chain_id']
value = txpoint['value']
if chain_id not in balance:
chain_ids.append(chain_id)
chains[chain_id] = abe.chain_lookup_by_id(chain_id)
balance[chain_id] = 0
received[chain_id] = 0
sent[chain_id] = 0
balance[chain_id] += value
if value > 0:
received[chain_id] += value
else:
sent[chain_id] -= value
count[txpoint['is_in']] += 1
dbhash = abe.store.binin(binaddr)
rows = []
rows += abe.get_address_out_rows( dbhash )
rows += abe.get_address_in_rows( dbhash )
#rows.sort()
for row in rows:
nTime, chain_id, height, is_in, blk_hash, tx_hash, tx_id, pos, value = row
txpoint = {
"nTime": int(nTime),
"chain_id": int(chain_id),
"height": int(height),
"is_in": int(is_in),
"blk_hash": abe.store.hashout_hex(blk_hash),
"tx_hash": abe.store.hashout_hex(tx_hash),
"tx_id": int(tx_id),
"pos": int(pos),
"value": int(value),
}
adj_balance(txpoint)
txpoints.append(txpoint)
#txpoints.sort( lambda a,b: a['tx_id']<b['tx_id'])
txpoints = sorted(txpoints, key=operator.itemgetter("tx_id"))
if (not chain_ids):
body += ['<p>Address not seen on the network.</p>']
return
def format_amounts(amounts, link):
ret = []
for chain_id in chain_ids:
chain = chains[chain_id]
if chain_id != chain_ids[0]:
ret += [', ']
ret += [format_satoshis(amounts[chain_id], chain),
' ', escape(chain['code3'])]
if link:
other = hash_to_address(chain['address_version'], binaddr)
if other != address:
ret[-1] = ['<a href="', page['dotdot'],
'address/', other,
'">', ret[-1], '</a>']
return ret
comment = abe.get_address_comment(address)
comment += " <a title=\"add comment\" href=\"http://ecdsa.org/annotate?address="+address+"\">[+]</a>"
body += [ '<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.5.2/jquery.min.js"></script>',
'<script type="text/javascript" src="http://ecdsa.org/jquery.qrcode.min.js"></script>',
'<div style="float:right;" id="qrcode"></div>',
"<script>jQuery('#qrcode').qrcode(\"bitcoin:"+address+"\");</script>" ]
body += abe.short_link(page, 'a/' + address[:10])
body += ['<p>Balance: '] + format_amounts(balance, True)
for chain_id in chain_ids:
balance[chain_id] = 0 # Reset for history traversal.
body += ['<br />\n',
'Transactions in: ', count[0], '<br />\n',
'Received: ', format_amounts(received, False), '<br />\n',
'Transactions out: ', count[1], '<br />\n',
'Sent: ', format_amounts(sent, False), '<br/>'
'Comment: ', comment, '<br/>'
]
body += ['</p>\n'
'<h3>Transactions</h3>\n'
'<table>\n<tr><th>Transaction</th><th>Block</th>'
'<th>Approx. Time</th><th>Amount</th><th>Balance</th>'
'<th>Comment</th>'
'</tr>\n']
for elt in txpoints:
chain = chains[elt['chain_id']]
balance[elt['chain_id']] += elt['value']
body += ['<tr><td><a href="../tx/', elt['tx_hash'],
'#', 'i' if elt['is_in'] else 'o', elt['pos'],
'">', elt['tx_hash'][:10], '...</a>',
'</td><td><a href="../block/', elt['blk_hash'],
'">', elt['height'], '</a></td><td>',
format_time(elt['nTime']), '</td><td>']
if elt['value'] < 0:
body += ['<span style="color:red;">-', format_satoshis(-elt['value'], chain), "</span>" ]
else:
body += ['+', format_satoshis(elt['value'], chain)]
# get sender comment
comment = abe.get_sender_comment(elt['tx_id'])
comment += " <a href=\"http://ecdsa.org/annotate?tx="+elt['tx_hash']+"\">[+]</a>"
body += ['</td><td>',
format_satoshis(balance[elt['chain_id']], chain),
'</td><td>', comment,
'</td></tr>\n']
body += ['</table>\n']
def search_form(abe, page):
q = (page['params'].get('q') or [''])[0]
return [
'<p>Search by address, block number, block or transaction hash,'
' or chain name:</p>\n'
'<form action="', page['dotdot'], 'search"><p>\n'
'<input name="q" size="64" value="', escape(q), '" />'
'<button type="submit">Search</button>\n'
'<br />Address or hash search requires at least the first six'
' characters.</p></form>\n']
def get_reports(abe):
rows = abe.store.selectall("select reports.report_id, tx.tx_id, tx.tx_hash, name from reports left join tx on tx.tx_id=reports.tx_id" )
return map(lambda x: { 'report_id':int(x[0]), 'tx_id':int(x[1]), 'tx_hash':x[2], 'name':x[3] }, rows)
def handle_reports(abe, page):
page['title'] = 'Fraud reports'
page['body'] += [ 'List of transactions that have been reported as fraudulent.', '<br/><br/>']
page['body'] += [ '<table><tr><th>name</th><th>transaction</th></tr>']
for item in abe.reports:
link = '<a href="tx/' + item['tx_hash'] + '">'+ item['tx_hash'] + '</a>'
page['body'] += ['<tr><td>'+item['name']+'</td><td>'+link+'</td></tr>']
page['body'] += [ '</table>']
def handle_annotate(abe, page):
tx_hash = (page['params'].get('tx') or [''])[0]
address = (page['params'].get('address') or [''])[0]
message = (page['params'].get('comment') or [''])[0]
signature = (page['params'].get('signature') or [''])[0]
if not tx_hash and not address:
page['title'] = 'Annotations'
page['body'] += [ 'This website allows you to annotate the Bitcoin blockchain.<br/><br/>',
'You will need a version of bitcoind that has the "signmessage" command.<br/>'
'In order to annotate an address or transaction, first <a href="search">find</a> the corresponding page, then follow the "[+]" link. <a href="http://ecdsa.org/annotate?tx=e357fece18a4191be8236570c7dc309ec6ac04473317320b5e8b9ab7cd023549">(example here)</a><br/><br/>']
page['body'] += [ '<h3>Annotated addresses.</h3>']
rows = abe.store.selectall("""select text, address from addr_comments limit 100""" )
page['body'] += [ '<table>']
page['body'] += [ '<tr><th>Address</th><th>Comment</th></tr>']
for row in rows:
link = '<a href="address/' + row[1]+ '">'+ row[1] + '</a>'
page['body'] += ['<tr><td>'+link+'</td><td>'+row[0]+'</td></tr>']
page['body'] += [ '</table>']
page['body'] += [ '<h3>Annotated transactions.</h3>']
rows = abe.store.selectall("""select tx.tx_id, tx.tx_hash, comments.c_text
from comments left join tx on tx.tx_id = comments.c_tx where c_sig != '' limit 100""" )
page['body'] += [ '<table>']
page['body'] += [ '<tr><th>Transaction</th><th>Comment</th></tr>']
for row in rows:
link = '<a href="tx/' + row[1]+ '">'+ row[1] + '</a>'
page['body'] += ['<tr><td>'+link+'</td><td>'+row[2]+'</td></tr>']
page['body'] += [ '</table>']
return
if tx_hash:
page['title'] = 'Annotate transaction'
tx_id, b, c, d = abe.get_tx( tx_hash )
chain = abe.get_default_chain()
in_addresses = []
for row in abe.get_tx_inputs( tx_id ):
addr = abe.store.binout(row[6])
addr = hash_to_address_link(chain['address_version'], addr, '../')
in_addresses.append( addr[3] )
if not address:
address = in_addresses[0]
out_addresses = []
for row in abe.get_tx_outputs( tx_id ):
addr = abe.store.binout(row[6])
addr = hash_to_address_link(chain['address_version'], addr, '../')
out_addresses.append( addr[3] )
if message or signature:
# check address
#if address not in in_addresses and address not in out_addresses:
if address not in in_addresses:
page['title'] = 'Error'
page['body'] = ['<p>wrong address for this transaction.</p>\n']
print address, in_addresses
return
# check signature
import bitcoinrpc
conn = bitcoinrpc.connect_to_local()
message = message.replace("\r\n","\\n").replace("!","\\!").replace("$","\\$")
print "verifymessage:", address, signature, repr(message)
try:
v = conn.verifymessage(address,signature, tx_hash+":"+message)
except:
v = False
if not v:
page['title'] = 'Error'
page['body'] = ['<p>Invalid signature.</p>']
return
# little bobby tables
message = message.replace('"', '\\"').replace("'", "\\'")
# escape html
message = escape( message )
message = message[:1024]
row = abe.store.selectrow("select c_tx from comments where c_tx=%d "%(tx_id ) )
if not row:
abe.store.sql("insert into comments (c_tx, c_text, c_pubkey, c_sig) VALUES (%d, '%s', '%s', '%s')"%( tx_id, message, address, signature) )
abe.store.commit()
page['body'] = ['<p>Your comment was added successfully.</p>\n']
else:
if not message:
abe.store.sql("delete from comments where c_tx=%d "%( tx_id ) )
abe.store.commit()
page['body'] = ['<p>Your comment was deleted.</p>\n']
else:
abe.store.sql("update comments set c_text='%s', c_sig='%s', c_pubkey='%s' where c_tx=%d "%( message, signature, address, tx_id ) )
abe.store.commit()
page['body'] = ['<p>Your comment was updated.</p>\n']
return
else:
select = "<select id=\"address\" onkeyup=\"change_address(this.value);\" onchange=\"change_address(this.value);\" name='address'>" \
+ "\n".join( map( lambda addr: "<option value=\""+addr+"\">"+addr+"</option>", in_addresses ) ) \
+"</select>"
select = select.replace("<option value=\""+address+"\">","<option value=\""+address+"\" selected>")
tx_link = '<a href="tx/' + tx_hash + '">'+ tx_hash + '</a>'
javascript = """
<script>
function change_address(x){
document.getElementById("saddress").innerHTML=x;
}
function change_text(x){
x = x.replace(/!/g,"\\\\!");
x = x.replace(/\\n/g,"\\\\n");
x = x.replace(/\\$/g,"\\\\$");
document.getElementById("stext").innerHTML = x;
}
function onload(){
change_text(document.getElementById("text").value);
//change_address(document.getElementById("address").value);
}
</script>
"""
page['title'] = 'Annotate transaction'
page['body'] = [
javascript,
'<form id="form" action="', page['dotdot'], 'annotate">\n'
'Transaction: ',tx_link,'<br/>'
'Address:', select,'<br/><br/>\n'
'Message:<br/><textarea id="text" onkeyup="change_text(this.value);" name="comment" cols="80" value=""></textarea><br/><br/>\n'
'You must sign your message with one of the input addresses of involved in the transaction.<br/>\n'
'The signature will be returned by the following command line:<br/>\n'
'<pre>bitcoind signmessage <span id="saddress">'+in_addresses[0]+'</span> "'+tx_hash+':<span id="stext">your text</span>"</pre>\n'
'Signature:<br/><input name="signature" value="" style="width:500px;"/><br/>'
'<input name="tx" type="hidden" value="'+tx_hash+'" />'
'<button type="submit">Submit</button>\n'
'</form>\n']
return
if address:
page['title'] = 'Annotate address'
if message or signature:
# check signature
import bitcoinrpc
conn = bitcoinrpc.connect_to_local()
message = message.replace("\n","\\n").replace("!","\\!").replace("$","\\$")
print "verifymessage:", address, signature, message
try:
v = conn.verifymessage(address,signature, message)
except:
v = False
if not v:
page['title'] = 'Error'
page['body'] = ['<p>Invalid signature.</p>']
return
# little bobby tables
message = message.replace('"', '\\"').replace("'", "\\'")
# escape html
message = escape( message )
message = message[:1024]
row = abe.store.selectrow("select address from addr_comments where address='%s' "%(address ) )
if not row:
abe.store.sql("insert into addr_comments (address, text) VALUES ('%s', '%s')"%( address, message) )
abe.store.commit()
page['body'] = ['<p>Your comment was added successfully.</p>\n']
else:
if not message:
abe.store.sql("delete from addr_comments where address='%s' "%( message ) )
abe.store.commit()
page['body'] = ['<p>Your comment was deleted.</p>\n']
else:
abe.store.sql("update addr_comments set text='%s' where address='%s' "%( message, address ) )
abe.store.commit()
page['body'] = ['<p>Your comment was updated.</p>\n']
return
else:
javascript = """
<script>
function change_text(x){
x = x.replace(/!/g,"\\\\!");
x = x.replace(/\\n/g,"\\\\n");
x = x.replace(/\\$/g,"\\\\$");
document.getElementById("stext").innerHTML=x;
}
function onload(){
change_text(document.getElementById("text").value);
}
</script>
"""
page['title'] = 'Annotate address'
page['body'] = [
javascript,
'<form id="form" action="', page['dotdot'], 'annotate">\n'
'Address:', address,'<br/><br/>\n'
'Message:<br/><textarea id="text" onkeyup="change_text(this.value);" name="comment" cols="80" value=""></textarea><br/><br/>\n'
'You must sign your message with the addresses.<br/>\n'
'The signature will be returned by the following command line:<br/>\n'
'<pre>bitcoind signmessage <span id="saddress">'+address+'</span> "<span id="stext">your text</span>"</pre>\n'
'Signature:<br/><input name="signature" value="" style="width:500px;"/><br/>'
'<input name="address" type="hidden" value="'+address+'" />'
'<button type="submit">Submit</button>\n'
'</form>\n']
def handle_thresholdRelease(abe, page):
page['title'] = 'Threshold Release'
chain = abe.get_default_chain()
target = (page['params'].get('target') or [''])[0]
address = (page['params'].get('address') or [''])[0]
secret = (page['params'].get('secret') or [''])[0]
signature = (page['params'].get('signature') or [''])[0]
if address:
# check if address is valid
version, binaddr = decode_check_address(address)
if binaddr is None:
page['body'] = ['<p>Not a valid address.</p>']
return
# check amount
try:
target = float(target)
except:
page['body'] = ['<p>Not a valid amount.</p>']
return
# check signature
import bitcoinrpc
conn = bitcoinrpc.connect_to_local()
print address, signature
try:
v = conn.verifymessage(address,signature, "fundraiser")
except:
v = False
if not v:
page['body'] = ['<p>Invalid signature.</p>']
return
# little bobby tables
secret = secret.replace('"', '\\"').replace("'", "\\'")
# escape html
#message = escape( message )
#
secret = secret[:1024]
row = abe.store.selectrow("select address from fundraisers where address='%s'"%(address ) )
if not row:
abe.store.sql("insert into fundraisers (address, target, secret) VALUES ('%s', %d, '%s')"%( address, target, secret) )
abe.store.commit()
page['body'] = ['<p>Your fundraiser was added successfully.</p>\n']
else:
if not secret:
abe.store.sql("delete from fundraisers where address='%s'"%( address ) )
abe.store.commit()
page['body'] = ['<p>Fundraiser entry was deleted.</p>\n']
else:
abe.store.sql("update fundraisers set target=%d, secret='%s' where address='%s'"%( target, secret, address ) )
abe.store.commit()
page['body'] = ['<p>Your fundraiser data was updated.</p>\n']
msg = "<object data=\"http://ecdsa.org/fundraiser/"+address+"?width=400\" height=\"60\" width=\"400\">Donate to "+address+"</object/>"
page['body'] += "Sample code:<br/><pre>"+escape(msg)+"</pre><br/><br/>"+msg
return
else:
javascript = """
<script>
function change_address(x){
//check validity here
document.getElementById("saddress").innerHTML=x;
}
function onload(){
change_address(document.getElementById("address").value);
}
</script>
"""
msg= """
This service allows you to release digital content when a requested amount of Bitcoin donations has been reached.<br/>
<br/>
For example, you may want to publish a low quality version of a music file, and release a high quality version only if donations reach the price you want.<br/>
<br/>
There are various ways to use this service:
<ul>
<li>You may upload your content at a private URL; we will disclose the URL once the amount is reached.</li>
<li>You may encrypt your content and upload it to a public server; we will publish the encryption password only when the target amount is reached.</li>
</ul>
Once the threshold is reached, the content is displayed in place of the donation progress bar.<br/>
<br/>
"""
page['title'] = 'Threshold Release'
page['body'] = [
javascript, msg,
'<form id="form" action="', page['dotdot'], 'thresholdRelease">\n'
'Address:<br/><input name="address" value="" style="width:500px;" onkeyup="change_address(this.value);"/><br/><br/>'
'Target amount:<br/><input name="target" value="" style="width:500px;"/><br/><br/>'
'Secret (will be displayed in place of the widget when the donation target is reached. Html, max. 1024 bytes):<br/>'
'<textarea name="secret" value="" style="width:500px;"></textarea><br/><br/>'
'You must provide a signature in order to demonstrate that you own the bitcoin address of the fundraiser.<br/>'
'The signature will be returned by the following command line:<br/>\n'
'<pre>bitcoind signmessage <span id="saddress"></span> <span id="stext">fundraiser</span></pre>\n'
'Signature:<br/><input name="signature" value="" style="width:500px;"/><br/>'
'<button type="submit">Submit</button>\n'
'</form>\n'
]
# check and display html as it is typed
def get_fundraiser(abe,page):
address = page['env'].get('PATH_INFO')[1:]
if not address: return None,None,None,None
chain = abe.get_default_chain()
# get donations
donations = abe.q_getreceivedbyaddress(page,chain)
try:
donations = float(donations)
except:
donations = 0
# check if address is in the database
row = abe.store.selectrow("select target, secret from fundraisers where address='%s'"%address )
secret = None
target = None
if row:
target, secret = row
if donations < target: secret = None
target = float(target)
#priority
try:
target = float( page['params'].get('target')[0] )
except:
pass
return address, donations, target, secret
def handle_fundraiser_js(abe,page):
""" return a scriptlet"""
address,donations,target,secret = abe.get_fundraiser(page)
if secret:
secret = escape( secret )
ret = "var fundraiser_address = \"%s\";\nvar fundraiser_secret='%s';\nvar fundraiser_received = %f;\nfundraiser_callback();\n"%(address,secret,donations)
abe.do_raw(page, ret)
page['content_type']='text/javascript'
def handle_fundraiser_img(abe,page):
return abe.handle_counter(page)
def handle_counter(abe,page):
""" return a png with percentage"""
address, donations, target, secret = abe.get_fundraiser(page)
if target:
progress = int(100 * donations/target)
progress = max(0, min( progress, 100 ))
return abe.serve_static("percent/%dpercent.png"%progress, page['start_response'])
else:
donations = "%.2f"%donations
path = "/img/" + donations + ".png"
cpath = abe.htdocs + path
if not os.path.exists(cpath):
s = donations+ " BTC"
length = 13*len(s)
cmd = "echo \"%s\" | convert -page %dx20+0+0 -font Helvetica -style Normal -background none -undercolor none -fill black -pointsize 22 text:- +repage -background none -flatten %s"%(s, length, cpath)
print cmd
os.system(cmd)
return abe.serve_static(path, page['start_response'])
def get_param(abe,page,name,default):
try:
return page['params'].get(name)[0]
except:
return default
def handle_fundraiser(abe, page):
abe.handle_widgets(page)
def handle_widgets(abe, page):
""" return embedded html"""
address, donations, target, secret = abe.get_fundraiser(page)
if not address:
f = open(abe.htdocs + '/widgets.html', "rb")
s = f.read()
f.close()
page['body'] = s
page['title'] = "Bitcoin Widgets"
return
if secret:
abe.do_raw(page, secret)
page['content_type']='text/html'
return
try:
width = int(page['params'].get('width')[0])
except:
width = 400
try:
bg = page['params'].get('bg')[0]
except:
bg = "#000000"
try:
lc = page['params'].get('leftcolor')[0]
except:
lc = "#dddddd"
try:
rc = page['params'].get('rightcolor')[0]
except:
rc = "#ffaa44"
try:
padding = page['params'].get('padding')[0]
except:
padding = "3"
try:
radius = page['params'].get('radius')[0]
except:
radius = "1em"
try:
textcolor = page['params'].get('textcolor')[0]
except:
textcolor = "#000000"
leftwidth = width - 120
if target:
progress = min( width, max( 1, int( leftwidth * donations/target ) ))
percent = min( 100, max( 0, int( 100 * donations/target ) ))
title = "%d"%percent + " percent of %.2f BTC"%target
else:
title = ""
progress = leftwidth
outer_style = "border-radius:%s; -moz-border-radius:%s; padding:%s; color:%s; background-color: %s;"%(radius,radius,padding,textcolor,bg)
left_style = "border-radius:%s; -moz-border-radius:%s; padding:%s; background-color: %s;"%(radius,radius,padding,lc)
right_style = "border-radius:%s; -moz-border-radius:%s; padding:%s; background-color: %s; width:80px; text-align:center;"%(radius,radius,padding,rc)
count = "%.2f BTC"%donations
link_count = "<a style=\"text-decoration:none;color:"+textcolor + "\" title=\""+ title + "\" href=\"http://ecdsa.org/address/"+address+"\" target=\"_blank\">"+count+"</a>"
text = "Donate"
link_text = "<a style=\"text-decoration:none;color:"+textcolor+"\" href=\"javascript:alert('Donate to this Bitcoin address:\\n"+address+"');\">"+text+"</a>"
ret = """<table style="border-width:0px;"><tr><td>
<table style="%s width:%dpx;">
<tr><td style="%s width:%dpx; text-align:center;">%s</td><td></td></tr>
</table>
</td>
<td>
<table style="%s width:100px;">
<tr><td style="%s">%s</td></tr>
</table>
</td></tr></table>"""%(outer_style,leftwidth,left_style,progress,link_count,outer_style,right_style,link_text)
abe.do_raw(page, ret)
page['content_type']='text/html'
def serve(store):
args = store.args
abe = Aml(store, args)
if args.host or args.port:
# HTTP server.
if args.host is None:
args.host = "localhost"
from wsgiref.simple_server import make_server
port = int(args.port or 80)
httpd = make_server(args.host, port, abe )
print "Listening on http://" + args.host + ":" + str(port)
try:
httpd.serve_forever()
except:
httpd.shutdown()
raise
from daemon import Daemon
class MyDaemon(Daemon):
def __init__(self,args):
self.args = args
Daemon.__init__(self, self.args.pidfile, stderr=self.args.error_log, stdout=self.args.access_log )
def run(self):
store = make_store(self.args)
serve(store)
if __name__ == '__main__':
cmd = sys.argv[1]
if cmd not in ['start','stop','restart','run']:
print "usage: %s start|stop|restart" % sys.argv[0]
sys.exit(2)
argv = sys.argv[2:]
conf = {
"port": 80,
"host": '',
"no_serve": None,
"debug": None,
"static_path": None,
"auto_agpl": None,
"download_name":None,
"watch_pid": None,
"base_url": None,
"no_update": None,
"pidfile": '',
"access_log": '',
"error_log": '',
"document_root":'',
"template": AML_TEMPLATE,
"template_vars": {
"APPNAME": AML_APPNAME,
"CONTENT_TYPE": 'text/html',
},
}
conf.update(DataStore.CONFIG_DEFAULTS)
argv.append('--config=/etc/abe.conf')
args, argv = readconf.parse_argv(argv, conf)
if argv:
sys.stderr.write("Error: unknown option `%s'\n" % (argv[0],))
sys.exit(1)
daemon = MyDaemon(args)
if cmd == 'start' :
daemon.start()
elif cmd == 'stop' :
daemon.stop()
elif cmd == 'restart' :
daemon.restart()
elif cmd=='run':
daemon.stop()
daemon.run()
sys.exit(0)
| 45,627
|
Python
|
.py
| 977
| 33.829069
| 297
| 0.506261
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,424
|
reconfigure.py
|
bitcoin-abe_bitcoin-abe/Abe/reconfigure.py
|
#!/usr/bin/env python
# Copyright(C) 2012,2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
"""Reconfigure an Abe instance."""
import sys
import logging
import util
import firstbits
def keep_scriptsig_reconfigure(store, args):
have = store.keep_scriptsig
want = args.keep_scriptsig
if have == want:
return
if want:
store.log.warn("Can not turn on keep-scriptsig: unimplemented")
return
lock = store.get_lock()
try:
# XXX Should use a temporary schema_version.
store.drop_view_if_exists("txin_detail")
store.drop_column_if_exists("txin", "txin_scriptSig")
store.drop_column_if_exists("txin", "txin_sequence")
store.config['keep_scriptsig'] = "false"
store.keep_scriptsig = want
store.refresh_ddl()
store.ddl(store.get_ddl("txin_detail"))
store.save_configvar("keep_scriptsig")
store.commit()
finally:
store.release_lock(lock)
def main(argv):
cmdline = util.CmdLine(argv)
cmdline.usage = lambda: \
"""Usage: python -m Abe.reconfigure [-h] [--config=FILE] [--CONFIGVAR=VALUE]...
Apply configuration changes to an existing Abe database, if possible.
--help Show this help message and exit.
--config FILE Read options from FILE.
--use-firstbits {true|false}
Turn Firstbits support on or off.
--keep-scriptsig false Remove input validation scripts from the database.
All configuration variables may be given as command arguments."""
store, argv = cmdline.init()
if store is None:
return 0
firstbits.reconfigure(store, args)
keep_scriptsig_reconfigure(store, args)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| 2,435
|
Python
|
.py
| 61
| 35.016393
| 87
| 0.692797
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,425
|
util.py
|
bitcoin-abe_bitcoin-abe/Abe/util.py
|
# Copyright(C) 2011,2012,2013,2014 by Abe developers.
# Copyright (c) 2010 Gavin Andresen
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
#
# Misc util routines
#
import re
import base58
import Crypto.Hash.SHA256 as SHA256
try:
import Crypto.Hash.RIPEMD as RIPEMD160
except Exception:
import ripemd_via_hashlib as RIPEMD160
# This function comes from bitcointools, bct-LICENSE.txt.
def determine_db_dir():
import os
import os.path
import platform
if platform.system() == "Darwin":
return os.path.expanduser("~/Library/Application Support/Bitcoin/")
elif platform.system() == "Windows":
return os.path.join(os.environ['APPDATA'], "Bitcoin")
return os.path.expanduser("~/.bitcoin")
# This function comes from bitcointools, bct-LICENSE.txt.
def long_hex(bytes):
return bytes.encode('hex_codec')
# This function comes from bitcointools, bct-LICENSE.txt.
def short_hex(bytes):
t = bytes.encode('hex_codec')
if len(t) < 11:
return t
return t[0:4]+"..."+t[-4:]
NULL_HASH = "\0" * 32
GENESIS_HASH_PREV = NULL_HASH
def sha256(s):
return SHA256.new(s).digest()
def double_sha256(s):
return sha256(sha256(s))
def sha3_256(s):
import hashlib
import sys
if sys.version_info < (3, 4):
import sha3
return hashlib.sha3_256(s).digest()
def pubkey_to_hash(pubkey):
return RIPEMD160.new(SHA256.new(pubkey).digest()).digest()
def calculate_target(nBits):
# cf. CBigNum::SetCompact in bignum.h
shift = 8 * (((nBits >> 24) & 0xff) - 3)
bits = nBits & 0x7fffff
sign = -1 if (nBits & 0x800000) else 1
return sign * (bits << shift if shift >= 0 else bits >> -shift)
def target_to_difficulty(target):
return ((1 << 224) - 1) * 1000 / (target + 1) / 1000.0
def calculate_difficulty(nBits):
return target_to_difficulty(calculate_target(nBits))
def work_to_difficulty(work):
return work * ((1 << 224) - 1) * 1000 / (1 << 256) / 1000.0
def target_to_work(target):
# XXX will this round using the same rules as C++ Bitcoin?
return int((1 << 256) / (target + 1))
def calculate_work(prev_work, nBits):
if prev_work is None:
return None
return prev_work + target_to_work(calculate_target(nBits))
def work_to_target(work):
return int((1 << 256) / work) - 1
def get_search_height(n):
if n < 2:
return None
if n & 1:
return n >> 1 if n & 2 else n - (n >> 2)
bit = 2
while (n & bit) == 0:
bit <<= 1
return n - bit
ADDRESS_RE = re.compile('[1-9A-HJ-NP-Za-km-z]{26,}\\Z')
def possible_address(string):
return ADDRESS_RE.match(string)
def hash_to_address(version, hash):
vh = version + hash
return base58.b58encode(vh + double_sha256(vh)[:4])
def decode_check_address(address):
if possible_address(address):
version, hash = decode_address(address)
if hash_to_address(version, hash) == address:
return version, hash
return None, None
def decode_address(addr):
bytes = base58.b58decode(addr, None)
if len(bytes) < 25:
bytes = ('\0' * (25 - len(bytes))) + bytes
return bytes[:-24], bytes[-24:-4]
class JsonrpcException(Exception):
def __init__(ex, error, method, params):
Exception.__init__(ex)
ex.code = error['code']
ex.message = error['message']
ex.data = error.get('data')
ex.method = method
ex.params = params
def __str__(ex):
return ex.method + ": " + ex.message + " (code " + str(ex.code) + ")"
class JsonrpcMethodNotFound(JsonrpcException):
pass
def jsonrpc(url, method, *params):
import json, urllib
postdata = json.dumps({"jsonrpc": "2.0",
"method": method, "params": params, "id": "x"})
respdata = urllib.urlopen(url, postdata).read()
resp = json.loads(respdata)
if resp.get('error') is not None:
if resp['error']['code'] == -32601:
raise JsonrpcMethodNotFound(resp['error'], method, params)
raise JsonrpcException(resp['error'], method, params)
return resp['result']
def str_to_ds(s):
import BCDataStream
ds = BCDataStream.BCDataStream()
ds.write(s)
return ds
class CmdLine(object):
def __init__(self, argv, conf=None):
self.argv = argv
if conf is None:
self.conf = {}
else:
self.conf = conf.copy()
def usage(self):
return "Sorry, no help is available."
def init(self):
import DataStore, readconf, logging, sys
self.conf.update({ "debug": None, "logging": None })
self.conf.update(DataStore.CONFIG_DEFAULTS)
args, argv = readconf.parse_argv(self.argv, self.conf, strict=False)
if argv and argv[0] in ('-h', '--help'):
print self.usage()
return None, []
logging.basicConfig(
stream=sys.stdout, level=logging.DEBUG, format="%(message)s")
if args.logging is not None:
import logging.config as logging_config
logging_config.dictConfig(args.logging)
store = DataStore.new(args)
return store, argv
# Abstract hex-binary conversions for eventual porting to Python 3.
def hex2b(s):
return s.decode('hex')
def b2hex(b):
return b.encode('hex')
| 5,908
|
Python
|
.py
| 162
| 31.222222
| 77
| 0.655281
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,426
|
ripemd_via_hashlib.py
|
bitcoin-abe_bitcoin-abe/Abe/ripemd_via_hashlib.py
|
# RIPEMD hash interface via hashlib for those who don't have
# Crypto.Hash.RIPEMD.
import hashlib
def new(data=''):
h = hashlib.new('ripemd160')
h.update(data)
return h
| 183
|
Python
|
.py
| 7
| 23.142857
| 60
| 0.718391
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,427
|
upgrade.py
|
bitcoin-abe_bitcoin-abe/Abe/upgrade.py
|
#!/usr/bin/env python
# Copyright(C) 2011,2012,2013,2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
"""Upgrade to the current database schema."""
import os
import sys
import DataStore
import util
def run_upgrades_locked(store, upgrades):
for i in xrange(len(upgrades) - 1):
vers, func = upgrades[i]
if store.config['schema_version'] == vers:
sv = upgrades[i+1][0]
store.log.warning("Upgrading schema to version: %s", sv)
func(store)
if sv[:3] == 'Abe':
store.sql(
"UPDATE configvar SET configvar_value = ?"
" WHERE configvar_name = 'schema_version'",
(sv,))
if store.rowcount() != 1:
raise Exception("Failed to update schema_version");
else:
store.sql(
"UPDATE config SET schema_version = ? WHERE config_id = 1",
(sv,))
store.commit()
store.config['schema_version'] = sv
def run_upgrades(store, upgrades):
"""Guard against concurrent upgrades."""
lock = store.get_lock()
try:
run_upgrades_locked(store, upgrades)
finally:
store.release_lock(lock)
def add_block_value_in(store):
store.sql("ALTER TABLE block ADD block_value_in NUMERIC(30)")
def add_block_value_out(store):
store.sql("ALTER TABLE block ADD block_value_out NUMERIC(30)")
def add_block_total_satoshis(store):
store.sql("ALTER TABLE block ADD block_total_satoshis NUMERIC(26)")
def add_block_total_seconds(store):
store.sql("ALTER TABLE block ADD block_total_seconds NUMERIC(20)")
def add_block_satoshi_seconds(store):
store.sql("ALTER TABLE block ADD block_satoshi_seconds NUMERIC(28)")
def add_block_total_ss(store):
store.sql("ALTER TABLE block ADD block_total_ss NUMERIC(28)")
def add_satoshi_seconds_destroyed(store):
store.sql("ALTER TABLE block_tx ADD satoshi_seconds_destroyed NUMERIC(28)")
def add_cc_block_height(store):
store.sql("ALTER TABLE chain_candidate ADD block_height NUMERIC(14)")
def init_cc_block_height(store):
store.sql(
"""UPDATE chain_candidate cc
SET block_height = (
SELECT block_height
FROM block b
WHERE b.block_id = cc.block_id)
""")
def index_cc_block_height(store):
store.sql(
"""CREATE INDEX x_cc_chain_block_height
ON chain_candidate (chain_id, block_height)""")
def index_cc_block(store):
store.sql(
"""CREATE INDEX x_cc_block ON chain_candidate (block_id)""")
def create_block_txin(store):
store.sql(
"""CREATE TABLE block_txin (
block_id NUMERIC(14),
txin_id NUMERIC(26),
out_block_id NUMERIC(14),
PRIMARY KEY (block_id, txin_id)
)""")
def index_block_tx_tx(store):
try:
store.sql("DROP INDEX x_block_tx_tx")
except Exception:
store.rollback()
store.sql("CREATE INDEX x_block_tx_tx ON block_tx (tx_id)")
def init_block_txin(store):
store.log.info("Initializing block_txin.")
count = int(store.selectrow("SELECT COUNT(1) FROM block_txin")[0] or 0)
tried = 0
added = 0
seen = set()
store.log.info("...loading existing keys")
# XXX store.conn and store.sql_transform no longer exist.
cur = store.conn.cursor()
cur.execute(store.sql_transform("""
SELECT block_id, txin_id FROM block_txin"""))
for row in cur:
seen.add(row)
store.log.info("...finding output blocks")
cur.execute(store.sql_transform("""
SELECT bt.block_id, txin.txin_id, obt.block_id
FROM block_tx bt
JOIN txin USING (tx_id)
JOIN txout USING (txout_id)
JOIN block_tx obt ON (txout.tx_id = obt.tx_id)"""))
for row in cur:
(block_id, txin_id, oblock_id) = row
if (block_id, txin_id) not in seen:
# If oblock is an ancestor of block, insert into block_txin.
if store.is_descended_from(block_id, oblock_id):
store.sql("""
INSERT INTO block_txin (block_id, txin_id, out_block_id)
VALUES (?, ?, ?)""",
(block_id, txin_id, oblock_id))
count += 1
added += 1
if count % 1000 == 0:
store.commit()
store.log.info("commit %d", count)
tried += 1
if tried % 1000 == 0:
sys.stdout.write('\r%d/%d ' % (added, tried))
sys.stdout.flush()
store.log.info('done.')
def init_block_value_in(store):
store.log.info("Calculating block_value_in.")
for row in store.selectall("""
SELECT b.block_id, SUM(txout.txout_value)
FROM block b
JOIN block_tx USING (block_id)
JOIN txin USING (tx_id)
LEFT JOIN txout USING (txout_id)
GROUP BY b.block_id
"""):
store.sql("UPDATE block SET block_value_in = ? WHERE block_id = ?",
(int(row[1] or 0), row[0]))
def init_block_value_out(store):
store.log.info("Calculating block_value_out.")
for row in store.selectall("""
SELECT b.block_id, SUM(txout.txout_value)
FROM block b
JOIN block_tx USING (block_id)
JOIN txout USING (tx_id)
GROUP BY b.block_id
"""):
store.sql("UPDATE block SET block_value_out = ? WHERE block_id = ?",
(int(row[1]), row[0]))
def init_block_totals(store):
store.log.info("Calculating block total generated and age.")
last_chain_id = None
stats = None
for row in store.selectall("""
SELECT cc.chain_id, b.prev_block_id, b.block_id,
b.block_value_out - b.block_value_in, b.block_nTime
FROM chain_candidate cc
JOIN block b USING (block_id)
WHERE cc.block_height IS NOT NULL
ORDER BY cc.chain_id, cc.block_height"""):
chain_id, prev_id, block_id, generated, nTime = row
generated = int(generated)
nTime = int(nTime)
if chain_id != last_chain_id:
stats = {}
last_chain_id = chain_id
if prev_id is None:
stats[block_id] = {
"chain_start": nTime,
"satoshis": generated}
else:
stats[block_id] = {
"chain_start": stats[prev_id]['chain_start'],
"satoshis": generated + stats[prev_id]['satoshis']}
store.sql("UPDATE block SET block_total_seconds = ?,"
" block_total_satoshis = ?"
" WHERE block_id = ?",
(nTime - stats[block_id]['chain_start'],
stats[block_id]['satoshis'], block_id))
def init_satoshi_seconds_destroyed(store):
store.log.info("Calculating satoshi-seconds destroyed.")
count = 0
step = 100
start = 1
stop = int(store.selectrow("SELECT MAX(block_id) FROM block_tx")[0])
# XXX store.conn and store.sql_transform no longer exist.
cur = store.conn.cursor()
while start <= stop:
cur.execute(store.sql_transform("""
SELECT bt.block_id, bt.tx_id,
SUM(txout.txout_value * (b.block_nTime - ob.block_nTime))
FROM block b
JOIN block_tx bt USING (block_id)
JOIN txin USING (tx_id)
JOIN txout USING (txout_id)
JOIN block_tx obt ON (txout.tx_id = obt.tx_id)
JOIN block_txin bti ON (
bti.block_id = bt.block_id AND
bti.txin_id = txin.txin_id AND
obt.block_id = bti.out_block_id)
JOIN block ob ON (bti.out_block_id = ob.block_id)
WHERE bt.block_id >= ?
AND bt.block_id < ?
GROUP BY bt.block_id, bt.tx_id"""), (start, start + step))
for row in cur:
block_id, tx_id, destroyed = row
sys.stdout.write("\rssd: " + str(count) + " ")
count += 1
store.sql("UPDATE block_tx SET satoshi_seconds_destroyed = ?"
" WHERE block_id = ? AND tx_id = ?",
(destroyed, block_id, tx_id))
start += step
store.log.info("done.")
def set_0_satoshi_seconds_destroyed(store):
store.log.info("Setting NULL to 0 in satoshi_seconds_destroyed.")
# XXX store.conn and store.sql_transform no longer exist.
cur = store.conn.cursor()
cur.execute(store.sql_transform("""
SELECT bt.block_id, bt.tx_id
FROM block_tx bt
JOIN block b USING (block_id)
WHERE b.block_height IS NOT NULL
AND bt.satoshi_seconds_destroyed IS NULL"""))
for row in cur:
store.sql("""
UPDATE block_tx bt SET satoshi_seconds_destroyed = 0
WHERE block_id = ? AND tx_id = ?""", row)
def init_block_satoshi_seconds(store, ):
store.log.info("Calculating satoshi-seconds.")
# XXX store.conn and store.sql_transform no longer exist.
cur = store.conn.cursor()
stats = {}
cur.execute(store.sql_transform("""
SELECT b.block_id, b.block_total_satoshis, b.block_nTime,
b.prev_block_id, SUM(bt.satoshi_seconds_destroyed),
b.block_height
FROM block b
JOIN block_tx bt ON (b.block_id = bt.block_id)
GROUP BY b.block_id, b.block_total_satoshis, b.block_nTime,
b.prev_block_id, b.block_height
ORDER BY b.block_height"""))
count = 0
while True:
row = cur.fetchone()
if row is None:
break
block_id, satoshis, nTime, prev_id, destroyed, height = row
satoshis = int(satoshis)
destroyed = int(destroyed)
if height is None:
continue
if prev_id is None:
stats[block_id] = {
"satoshis": satoshis,
"ss": 0,
"total_ss": 0,
"nTime": nTime}
else:
created = (stats[prev_id]['satoshis']
* (nTime - stats[prev_id]['nTime']))
stats[block_id] = {
"satoshis": satoshis,
"ss": stats[prev_id]['ss'] + created - destroyed,
"total_ss": stats[prev_id]['total_ss'] + created,
"nTime": nTime}
store.sql("""
UPDATE block
SET block_satoshi_seconds = ?,
block_total_ss = ?,
block_ss_destroyed = ?
WHERE block_id = ?""",
(store.intin(stats[block_id]['ss']),
store.intin(stats[block_id]['total_ss']),
store.intin(destroyed),
block_id))
count += 1
if count % 1000 == 0:
store.commit()
store.log.info("Updated %d blocks", count)
if count % 1000 != 0:
store.log.info("Updated %d blocks", count)
def index_block_nTime(store):
store.log.info("Indexing block_nTime.")
store.sql("CREATE INDEX x_block_nTime ON block (block_nTime)")
def replace_chain_summary(store):
store.sql("DROP VIEW chain_summary")
store.sql("""
CREATE VIEW chain_summary AS SELECT
cc.chain_id,
cc.in_longest,
b.block_id,
b.block_hash,
b.block_version,
b.block_hashMerkleRoot,
b.block_nTime,
b.block_nBits,
b.block_nNonce,
cc.block_height,
b.prev_block_id,
prev.block_hash prev_block_hash,
b.block_chain_work,
b.block_num_tx,
b.block_value_in,
b.block_value_out,
b.block_total_satoshis,
b.block_total_seconds,
b.block_satoshi_seconds,
b.block_total_ss,
b.block_ss_destroyed
FROM chain_candidate cc
JOIN block b ON (cc.block_id = b.block_id)
LEFT JOIN block prev ON (b.prev_block_id = prev.block_id)""")
def drop_block_ss_columns(store):
"""Drop columns that may have been added in error."""
for c in ['created', 'destroyed']:
try:
store.sql("ALTER TABLE block DROP COLUMN block_ss_" + c)
except Exception:
store.rollback()
def add_constraint(store, table, name, constraint):
try:
store.sql("ALTER TABLE " + table + " ADD CONSTRAINT " + name +
" " + constraint)
except Exception:
store.log.exception(
"Failed to create constraint on table " + table + ": " +
constraint + "; ignoring error.")
store.rollback()
def add_fk_block_txin_block_id(store):
add_constraint(store, "block_txin", "fk1_block_txin",
"FOREIGN KEY (block_id) REFERENCES block (block_id)")
def add_fk_block_txin_tx_id(store):
add_constraint(store, "block_txin", "fk2_block_txin",
"FOREIGN KEY (txin_id) REFERENCES txin (txin_id)")
def add_fk_block_txin_out_block_id(store):
add_constraint(store, "block_txin", "fk3_block_txin",
"FOREIGN KEY (out_block_id) REFERENCES block (block_id)")
def add_chk_block_txin_out_block_id_nn(store):
add_constraint(store, "block_txin", "chk3_block_txin",
"CHECK (out_block_id IS NOT NULL)")
def create_x_cc_block_id(store):
store.sql("CREATE INDEX x_cc_block_id ON chain_candidate (block_id)")
def reverse_binary_hashes(store):
if store.config['binary_type'] != 'hex':
raise Exception(
'To support search by hash prefix, we have to reverse all values'
' in block.block_hash, block.block_hashMerkleRoot, tx.tx_hash,'
' orphan_block.block_hashPrev, and unlinked_txin.txout_tx_hash.'
' This has not been automated. You may perform this step manually,'
' then issue "UPDATE config SET schema_version = \'9.1\'" and'
' rerun this program.')
def drop_x_cc_block_id(store):
"""Redundant with x_cc_block"""
store.sql("DROP INDEX x_cc_block_id")
def create_x_cc_block_height(store):
store.sql(
"CREATE INDEX x_cc_block_height ON chain_candidate (block_height)")
def create_txout_approx(store):
store.sql("""
CREATE VIEW txout_approx AS SELECT
txout_id,
tx_id,
txout_value txout_approx_value
FROM txout""")
def add_fk_chain_candidate_block_id(store):
add_constraint(store, "chain_candidate", "fk1_chain_candidate",
"FOREIGN KEY (block_id) REFERENCES block (block_id)")
def create_configvar(store):
store.sql("""
CREATE TABLE configvar (
configvar_name VARCHAR(100) NOT NULL PRIMARY KEY,
configvar_value VARCHAR(255)
)""")
def configure(store):
# XXX This won't work anymore.
store.args.binary_type = store.config['binary_type']
store.configure()
store.save_config()
def populate_abe_sequences(store):
if store.config['sql.sequence_type'] == 'update':
try:
store.sql("""CREATE TABLE abe_sequences (
key VARCHAR(100) NOT NULL PRIMARY KEY,
nextid NUMERIC(30)
)""")
except Exception:
store.rollback()
for t in ['block', 'tx', 'txin', 'txout', 'pubkey',
'chain', 'magic', 'policy']:
(last_id,) = store.selectrow("SELECT MAX(" + t + "_id) FROM " + t)
if last_id is None:
continue
store.sql("UPDATE abe_sequences SET nextid = ? WHERE key = ?"
" AND nextid <= ?",
(last_id + 1, t, last_id))
if store.rowcount() < 1:
store.sql("INSERT INTO abe_sequences (key, nextid)"
" VALUES (?, ?)", (t, last_id + 1))
def add_datadir_chain_id(store):
store.sql("ALTER TABLE datadir ADD chain_id NUMERIC(10) NULL")
def noop(store):
pass
def rescan_if_missed_blocks(store):
"""
Due to a bug, some blocks may have been loaded but not placed in
a chain. If so, reset all datadir offsets to 0 to force a rescan.
"""
(bad,) = store.selectrow("""
SELECT COUNT(1)
FROM block
LEFT JOIN chain_candidate USING (block_id)
WHERE chain_id IS NULL
""")
if bad > 0:
store.sql(
"UPDATE datadir SET blkfile_number = 1, blkfile_offset = 0")
def insert_missed_blocks(store):
"""
Rescanning doesn't always work due to timeouts and resource
constraints. This may help.
"""
missed = []
for row in store.selectall("""
SELECT b.block_id
FROM block b
LEFT JOIN chain_candidate cc ON (b.block_id = cc.block_id)
WHERE chain_id IS NULL
ORDER BY b.block_height
"""):
missed.append(row[0])
if not missed:
return
store.log.info("Attempting to repair %d missed blocks.", len(missed))
inserted = 0
for block_id in missed:
# Insert block if its previous block is in the chain.
# XXX This won't work if we want to support forks.
# XXX This doesn't work for unattached blocks.
store.sql("""
INSERT INTO chain_candidate (
chain_id, block_id, block_height, in_longest)
SELECT cc.chain_id, b.block_id, b.block_height, 0
FROM chain_candidate cc
JOIN block prev ON (cc.block_id = prev.block_id)
JOIN block b ON (b.prev_block_id = prev.block_id)
WHERE b.block_id = ?""", (block_id,))
inserted += store.rowcount()
store.commit() # XXX not sure why PostgreSQL needs this.
store.log.info("Inserted %d rows into chain_candidate.", inserted)
def repair_missed_blocks(store):
store.log.info("Finding longest chains.")
best_work = []
for row in store.selectall("""
SELECT cc.chain_id, MAX(b.block_chain_work)
FROM chain_candidate cc
JOIN block b USING (block_id)
GROUP BY cc.chain_id"""):
best_work.append(row)
best = []
for row in best_work:
chain_id, bcw = row
(block_id,) = store.selectrow("""
SELECT MIN(block_id)
FROM block b
JOIN chain_candidate cc USING (block_id)
WHERE cc.chain_id = ?
AND b.block_chain_work = ?
""", (chain_id, bcw))
(in_longest,) = store.selectrow("""
SELECT in_longest
FROM chain_candidate
WHERE chain_id = ?
AND block_id = ?
""", (chain_id, block_id))
if in_longest == 1:
store.log.info("Chain %d already has the block of greatest work.",
chain_id)
continue
best.append([chain_id, block_id])
store.sql("""
UPDATE chain
SET chain_last_block_id = ?
WHERE chain_id = ?""",
(block_id, chain_id))
if store.rowcount() == 1:
store.log.info("Chain %d block %d", chain_id, block_id)
else:
raise Exception("Wrong rowcount updating chain " + str(chain_id))
if not best:
return
store.log.info("Marking blocks in longest chains.")
for elt in best:
chain_id, block_id = elt
count = 0
while True:
store.sql("""
UPDATE chain_candidate
SET in_longest = 1
WHERE chain_id = ?
AND block_id = ?""",
(chain_id, block_id))
if store.rowcount() != 1:
raise Exception("Wrong rowcount updating chain_candidate ("
+ str(chain_id) + ", " + str(block_id) + ")")
count += 1
row = store.selectrow("""
SELECT b.prev_block_id, cc.in_longest
FROM block b
JOIN chain_candidate cc ON (b.prev_block_id = cc.block_id)
WHERE cc.chain_id = ?
AND b.block_id = ?""",
(chain_id, block_id))
if row is None:
break # genesis block?
block_id, in_longest = row
if in_longest == 1:
break
store.log.info("Processed %d in chain %d", count, chain_id)
store.log.info("Repair successful.")
def add_block_num_tx(store):
store.sql("ALTER TABLE block ADD block_num_tx NUMERIC(10)")
def add_block_ss_destroyed(store):
store.sql("ALTER TABLE block ADD block_ss_destroyed NUMERIC(28)")
def init_block_tx_sums(store):
store.log.info("Calculating block_num_tx and block_ss_destroyed.")
rows = store.selectall("""
SELECT block_id,
COUNT(1),
COUNT(satoshi_seconds_destroyed),
SUM(satoshi_seconds_destroyed)
FROM block
JOIN block_tx USING (block_id)
GROUP BY block_id""")
count = 0
store.log.info("Storing block_num_tx and block_ss_destroyed.")
for row in rows:
block_id, num_tx, num_ssd, ssd = row
if num_ssd < num_tx:
ssd = None
store.sql("""
UPDATE block
SET block_num_tx = ?,
block_ss_destroyed = ?
WHERE block_id = ?""",
(num_tx, ssd, block_id))
count += 1
if count % 1000 == 0:
store.commit()
# XXX would like to set NOT NULL on block_num_tx.
def config_ddl(store):
# XXX This won't work anymore.
store.configure_ddl_implicit_commit()
store.save_configvar("ddl_implicit_commit")
def config_create_table_epilogue(store):
# XXX This won't work anymore.
store.configure_create_table_epilogue()
store.save_configvar("create_table_epilogue")
def rename_abe_sequences_key(store):
"""Drop and recreate abe_sequences with key renamed to sequence_key."""
# Renaming a column is horribly unportable.
try:
data = store.selectall("""
SELECT DISTINCT key, nextid
FROM abe_sequences""")
except Exception:
store.rollback()
return
store.log.info("copying sequence positions: %s", data)
store.ddl("DROP TABLE abe_sequences")
store.ddl("""CREATE TABLE abe_sequences (
sequence_key VARCHAR(100) PRIMARY KEY,
nextid NUMERIC(30)
)""")
for row in data:
store.sql("INSERT INTO abe_sequences (sequence_key, nextid)"
" VALUES (?, ?)", row)
def create_x_txin_txout(store):
store.sql("CREATE INDEX x_txin_txout ON txin (txout_id)")
def save_datadir(store):
"""Copy the datadir table to recreate it with a new column."""
store.sql("CREATE TABLE abe_tmp_datadir AS SELECT * FROM datadir")
def add_datadir_id(store):
data = store.selectall("""
SELECT dirname, blkfile_number, blkfile_offset, chain_id
FROM abe_tmp_datadir""")
try:
store.ddl("DROP TABLE datadir")
except Exception:
store.rollback() # Assume already dropped.
store.ddl("""CREATE TABLE datadir (
datadir_id NUMERIC(10) PRIMARY KEY,
dirname VARCHAR(2000) NOT NULL,
blkfile_number NUMERIC(4) NULL,
blkfile_offset NUMERIC(20) NULL,
chain_id NUMERIC(10) NULL
)""")
store.create_sequence("datadir")
for row in data:
new_row = [store.new_id("datadir")]
new_row += row
store.sql("""
INSERT INTO datadir (
datadir_id, dirname, blkfile_number, blkfile_offset, chain_id
) VALUES (?, ?, ?, ?, ?)""", new_row)
def drop_tmp_datadir(store):
store.ddl("DROP TABLE abe_tmp_datadir")
def config_clob(store):
# This won't work anymore.
store.configure_max_varchar()
store.save_configvar("max_varchar")
store.configure_clob_type()
store.save_configvar("clob_type")
def clear_bad_addresses(store):
"""Set address=Unknown for the bogus outputs in Bitcoin 71036."""
bad_tx = [
'a288fec5559c3f73fd3d93db8e8460562ebfe2fcf04a5114e8d0f2920a6270dc',
'2a0597e665ac3d1cabeede95cedf907934db7f639e477b3c77b242140d8cf728',
'e411dbebd2f7d64dafeef9b14b5c59ec60c36779d43f850e5e347abee1e1a455']
for tx_hash in bad_tx:
row = store.selectrow("""
SELECT tx_id FROM tx WHERE tx_hash = ?""",
(store.hashin_hex(tx_hash),))
if row:
store.sql("""
UPDATE txout SET pubkey_id = NULL
WHERE tx_id = ? AND txout_pos = 1 AND pubkey_id IS NOT NULL""",
(row[0],))
if store.rowcount():
store.log.info("Cleared txout %s", tx_hash)
def find_namecoin_addresses(store):
updated = 0
for tx_id, txout_pos, script in store.selectall("""
SELECT tx_id, txout_pos, txout_scriptPubKey
FROM txout
WHERE pubkey_id IS NULL"""):
pubkey_id = store.script_to_pubkey_id(store.binout(script))
if pubkey_id is not None:
store.sql("""
UPDATE txout
SET pubkey_id = ?
WHERE tx_id = ?
AND txout_pos = ?""", (pubkey_id, tx_id, txout_pos))
updated += 1
if updated % 1000 == 0:
store.commit()
store.log.info("Found %d addresses", updated)
if updated % 1000 > 0:
store.commit()
store.log.info("Found %d addresses", updated)
def create_abe_lock(store):
store.ddl("""CREATE TABLE abe_lock (
lock_id NUMERIC(10) NOT NULL PRIMARY KEY,
pid VARCHAR(255) NULL
)""")
def create_abe_lock_row(store):
store.sql("INSERT INTO abe_lock (lock_id) VALUES (1)")
def insert_null_pubkey(store):
dbnull = store.binin(DataStore.NULL_PUBKEY_HASH)
row = store.selectrow("SELECT pubkey_id FROM pubkey WHERE pubkey_hash = ?",
(dbnull,))
if row:
# Null hash seen in a transaction. Go to some trouble to
# set its pubkey_id = 0 without violating constraints.
old_id = row[0]
import random # No need for cryptographic strength here.
temp_hash = "".join([chr(random.randint(0, 255)) for x in xrange(20)])
store.sql("INSERT INTO pubkey (pubkey_id, pubkey_hash) VALUES (?, ?)",
(DataStore.NULL_PUBKEY_ID, store.binin(temp_hash)))
store.sql("UPDATE txout SET pubkey_id = ? WHERE pubkey_id = ?",
(DataStore.NULL_PUBKEY_ID, old_id))
store.sql("DELETE FROM pubkey WHERE pubkey_id = ?", (old_id,))
store.sql("UPDATE pubkey SET pubkey_hash = ? WHERE pubkey_id = ?",
(dbnull, DataStore.NULL_PUBKEY_ID))
else:
store.sql("""
INSERT INTO pubkey (pubkey_id, pubkey_hash) VALUES (?, ?)""",
(DataStore.NULL_PUBKEY_ID, dbnull))
def set_netfee_pubkey_id(store):
store.log.info("Updating network fee output address to 'Destroyed'...")
# XXX This doesn't work for Oracle because of LOB weirdness.
# There, you could probably get away with:
# UPDATE txout SET pubkey_id = 0 WHERE txout_scriptPubKey BETWEEN 1 AND 2;
# UPDATE configvar SET configvar_value = 'Abe26' WHERE configvar_name =
# 'schema_version' AND configvar_value = 'Abe25.3';
# COMMIT;
store.sql("""
UPDATE txout
SET pubkey_id = ?
WHERE txout_scriptPubKey = ?""",
(DataStore.NULL_PUBKEY_ID,
store.binin(DataStore.SCRIPT_NETWORK_FEE)))
store.log.info("...rows updated: %d", store.rowcount())
def adjust_block_total_satoshis(store):
store.log.info("Adjusting value outstanding for lost coins.")
block = {}
block_ids = []
store.log.info("...getting block relationships.")
for block_id, prev_id in store.selectall("""
SELECT block_id, prev_block_id
FROM block
WHERE block_height IS NOT NULL
ORDER BY block_height"""):
block[block_id] = {"prev_id": prev_id}
block_ids.append(block_id)
store.log.info("...getting lossage per block.")
for block_id, lost in store.selectall("""
SELECT block_tx.block_id, SUM(txout.txout_value)
FROM block_tx
JOIN txout ON (block_tx.tx_id = txout.tx_id)
WHERE txout.pubkey_id <= 0
GROUP BY block_tx.block_id"""):
if block_id in block:
block[block_id]["lost"] = lost
store.log.info("...calculating adjustments.")
for block_id in block_ids:
b = block[block_id]
prev_id = b["prev_id"]
prev_lost = 0 if prev_id is None else block[prev_id]["cum_lost"]
b["cum_lost"] = b.get("lost", 0) + prev_lost
store.log.info("...applying adjustments.")
count = 0
for block_id in block_ids:
adj = block[block_id]["cum_lost"]
if adj != 0:
store.sql("""
UPDATE block
SET block_total_satoshis = block_total_satoshis - ?
WHERE block_id = ?""",
(adj, block_id))
count += 1
if count % 1000 == 0:
store.log.info("Adjusted %d of %d blocks.", count, len(block_ids))
if count % 1000 != 0:
store.log.info("Adjusted %d of %d blocks.", count, len(block_ids))
def config_concat_style(store):
store._sql.configure_concat_style()
store.config['sql.concat_style'] = store._sql.config['concat_style']
store.save_configvar("sql.concat_style")
def config_limit_style(store):
# XXX This won't work anymore.
store.configure_limit_style()
store.save_configvar("limit_style")
def config_sequence_type(store):
# XXX This won't work anymore.
if store.config['sequence_type'] != "update":
return
store.configure_sequence_type()
if store.config['sequence_type'] != "update":
store.log.info("Creating native sequences.")
for name in ['magic', 'policy', 'chain', 'datadir',
'tx', 'txout', 'pubkey', 'txin', 'block']:
store.get_db().drop_sequence_if_exists(name)
store.create_sequence(name)
store.save_configvar("sequence_type")
def add_search_block_id(store):
store.log.info("Creating block.search_block_id")
store.sql("ALTER TABLE block ADD search_block_id NUMERIC(14) NULL")
def populate_search_block_id(store):
store.log.info("Calculating block.search_block_id")
for block_id, height, prev_id in store.selectall("""
SELECT block_id, block_height, prev_block_id
FROM block
WHERE block_height IS NOT NULL
ORDER BY block_height"""):
height = int(height)
search_id = None
if prev_id is not None:
prev_id = int(prev_id)
search_height = util.get_search_height(height)
if search_height is not None:
search_id = store.get_block_id_at_height(search_height, prev_id)
store.sql("UPDATE block SET search_block_id = ? WHERE block_id = ?",
(search_id, block_id))
store.cache_block(int(block_id), height, prev_id, search_id)
store.commit()
def add_fk_search_block_id(store):
add_constraint(store, "block", "fk1_search_block_id",
"FOREIGN KEY (search_block_id) REFERENCES block (block_id)")
def create_firstbits(store):
flag = store.config.get('use_firstbits')
if flag is None:
if store.args.use_firstbits is None:
store.log.info("use_firstbits not found, defaulting to false.")
store.config['use_firstbits'] = "false"
store.save_configvar("use_firstbits")
return
flag = "true" if store.args.use_firstbits else "false"
store.config['use_firstbits'] = flag
store.save_configvar("use_firstbits")
if flag == "true":
import firstbits
firstbits.create_firstbits(store)
def populate_firstbits(store):
if store.config['use_firstbits'] == "true":
import firstbits
firstbits.populate_firstbits(store)
def add_keep_scriptsig(store):
store.config['keep_scriptsig'] = "true"
store.save_configvar("keep_scriptsig")
def drop_satoshi_seconds_destroyed(store):
store.get_db().drop_column_if_exists("block_txin", "satoshi_seconds_destroyed")
def widen_blkfile_number(store):
data = store.selectall("""
SELECT datadir_id, dirname, blkfile_number, blkfile_offset, chain_id
FROM abe_tmp_datadir""")
store.get_db().drop_table_if_exists("datadir")
store.ddl("""CREATE TABLE datadir (
datadir_id NUMERIC(10) NOT NULL PRIMARY KEY,
dirname VARCHAR(2000) NOT NULL,
blkfile_number NUMERIC(8) NULL,
blkfile_offset NUMERIC(20) NULL,
chain_id NUMERIC(10) NULL
)""")
for row in data:
store.sql("""
INSERT INTO datadir (
datadir_id, dirname, blkfile_number, blkfile_offset, chain_id
) VALUES (?, ?, ?, ?, ?)""", row)
def add_datadir_loader(store):
store.sql("ALTER TABLE datadir ADD datadir_loader VARCHAR(100) NULL")
def add_chain_policy(store):
store.ddl("ALTER TABLE chain ADD chain_policy VARCHAR(255)")
def populate_chain_policy(store):
store.sql("UPDATE chain SET chain_policy = chain_name")
def add_chain_magic(store):
store.ddl("ALTER TABLE chain ADD chain_magic BINARY(4)")
def populate_chain_magic(store):
for chain_id, magic in store.selectall("""
SELECT chain.chain_id, magic.magic
FROM chain
JOIN magic ON (chain.magic_id = magic.magic_id)"""):
store.sql("UPDATE chain SET chain_magic = ? WHERE chain_id = ?",
(magic, chain_id))
def drop_policy(store):
for stmt in [
"ALTER TABLE chain DROP COLUMN policy_id",
"DROP TABLE policy"]:
try:
store.ddl(stmt)
except store.dbmodule.DatabaseError, e:
store.log.warning("Cleanup failed, ignoring: %s", stmt)
def drop_magic(store):
for stmt in [
"ALTER TABLE chain DROP COLUMN magic_id",
"DROP TABLE magic"]:
try:
store.ddl(stmt)
except store.dbmodule.DatabaseError, e:
store.log.warning("Cleanup failed, ignoring: %s", stmt)
def add_chain_decimals(store):
store.ddl("ALTER TABLE chain ADD chain_decimals NUMERIC(2)")
def insert_chain_novacoin(store):
import Chain
try:
store.insert_chain(Chain.create("NovaCoin"))
except Exception:
pass
def txin_detail_multisig(store):
store.get_db().drop_view_if_exists('txin_detail')
store.ddl("""
CREATE VIEW txin_detail AS SELECT
cc.chain_id,
cc.in_longest,
cc.block_id,
b.block_hash,
b.block_height,
block_tx.tx_pos,
tx.tx_id,
tx.tx_hash,
tx.tx_lockTime,
tx.tx_version,
tx.tx_size,
txin.txin_id,
txin.txin_pos,
txin.txout_id prevout_id""" + (""",
txin.txin_scriptSig,
txin.txin_sequence""" if store.keep_scriptsig else """,
NULL txin_scriptSig,
NULL txin_sequence""") + """,
prevout.txout_value txin_value,
prevout.txout_scriptPubKey txin_scriptPubKey,
pubkey.pubkey_id,
pubkey.pubkey_hash,
pubkey.pubkey
FROM chain_candidate cc
JOIN block b ON (cc.block_id = b.block_id)
JOIN block_tx ON (b.block_id = block_tx.block_id)
JOIN tx ON (tx.tx_id = block_tx.tx_id)
JOIN txin ON (tx.tx_id = txin.tx_id)
LEFT JOIN txout prevout ON (txin.txout_id = prevout.txout_id)
LEFT JOIN pubkey
ON (prevout.pubkey_id = pubkey.pubkey_id)""")
def add_chain_script_addr_vers(store):
store.ddl("ALTER TABLE chain ADD chain_script_addr_vers VARBINARY(100) NULL")
def populate_chain_script_addr_vers(store):
def update(addr_vers, script_vers):
store.sql("UPDATE chain SET chain_script_addr_vers=? WHERE chain_address_version=?",
(store.binin(script_vers), store.binin(addr_vers)))
update('\x00', '\x05')
update('\x6f', '\xc4')
def create_multisig_pubkey(store):
store.ddl("""
CREATE TABLE multisig_pubkey (
multisig_id NUMERIC(26) NOT NULL,
pubkey_id NUMERIC(26) NOT NULL,
PRIMARY KEY (multisig_id, pubkey_id),
FOREIGN KEY (multisig_id) REFERENCES pubkey (pubkey_id),
FOREIGN KEY (pubkey_id) REFERENCES pubkey (pubkey_id)
)""")
def create_x_multisig_pubkey_multisig(store):
store.ddl("CREATE INDEX x_multisig_pubkey_pubkey ON multisig_pubkey (pubkey_id)")
def update_chain_policy(store):
store.sql("""
UPDATE chain
SET chain_policy = 'Sha256Chain'
WHERE chain_policy = chain_name
AND chain_name IN ('Weeds', 'BeerTokens', 'SolidCoin', 'ScTestnet', 'Worldcoin', 'Anoncoin')""")
def populate_multisig_pubkey(store):
store.init_chains()
store.log.info("Finding new address types.")
rows = store.selectall("""
SELECT txout_id, chain_id, txout_scriptPubKey
FROM txout_detail
WHERE pubkey_id IS NULL""")
count = 0
for txout_id, chain_id, db_script in rows:
script = store.binout(db_script)
pubkey_id = store.script_to_pubkey_id(store.get_chain_by_id(chain_id), script)
if pubkey_id > 0:
store.sql("UPDATE txout SET pubkey_id = ? WHERE txout_id = ?",
(pubkey_id, txout_id))
count += 1
store.commit()
store.log.info("Found %d", count)
sql_arg_names = (
'binary_type', 'max_varchar', 'ddl_implicit_commit',
'create_table_epilogue', 'sequence_type', 'limit_style',
'int_type', 'clob_type')
def abstract_sql(store):
for name in sql_arg_names:
store.sql("""
UPDATE configvar
SET configvar_name = ?
WHERE configvar_name = ?""", ('sql.' + name, name))
store.commit()
def add_unlinked_tx(store):
store.ddl("""
CREATE TABLE unlinked_tx (
tx_id NUMERIC(26) NOT NULL,
PRIMARY KEY (tx_id),
FOREIGN KEY (tx_id)
REFERENCES tx (tx_id)
)""")
def cleanup_unlinked_tx(store):
txcount = 0
for tx_id in store.selectall("""
SELECT t.tx_id
FROM tx t
LEFT JOIN block_tx bt ON (t.tx_id = bt.tx_id)
WHERE bt.tx_id IS NULL
"""):
store._clean_unlinked_tx(tx_id)
txcount += 1
store.commit()
store.log.info("Cleaned up %d unlinked transactions", txcount)
upgrades = [
('6', add_block_value_in),
('6.1', add_block_value_out),
('6.2', add_block_total_satoshis),
('6.3', add_block_total_seconds),
('6.4', add_block_satoshi_seconds),
('6.5', add_block_total_ss),
('6.6', add_satoshi_seconds_destroyed),
('6.7', add_cc_block_height),
('6.8', init_cc_block_height),
('6.9', index_cc_block_height),
('6.10', index_cc_block),
('6.11', create_block_txin),
('6.12', index_block_tx_tx),
('6.13', init_block_txin),
('6.14', init_block_value_in),
('6.15', init_block_value_out),
('6.16', init_block_totals),
('6.17', init_satoshi_seconds_destroyed),
('6.18', set_0_satoshi_seconds_destroyed),
('6.19', noop),
('6.20', index_block_nTime),
('6.21', replace_chain_summary),
('7', replace_chain_summary),
('7.1', index_block_tx_tx), # forgot to put in abe.py
('7.2', init_block_txin), # abe.py put bad data there.
('7.3', init_satoshi_seconds_destroyed),
('7.4', set_0_satoshi_seconds_destroyed),
('7.5', noop),
('7.6', drop_block_ss_columns),
('8', add_fk_block_txin_block_id),
('8.1', add_fk_block_txin_tx_id),
('8.2', add_fk_block_txin_out_block_id),
('8.3', add_chk_block_txin_out_block_id_nn),
('8.4', create_x_cc_block_id),
('9', reverse_binary_hashes),
('9.1', drop_x_cc_block_id),
('9.2', create_x_cc_block_height),
('10', create_txout_approx),
('11', add_fk_chain_candidate_block_id),
('12', create_configvar),
('12.1', configure),
('Abe13', populate_abe_sequences),
('Abe14', add_datadir_chain_id),
('Abe15', noop),
('Abe16', rescan_if_missed_blocks), # May be slow.
('Abe17', insert_missed_blocks),
('Abe17.1', repair_missed_blocks),
('Abe18', add_block_num_tx), # Seconds
('Abe18.1', add_block_ss_destroyed), # Seconds
('Abe18.2', init_block_tx_sums), # 5 minutes
('Abe18.3', replace_chain_summary), # Fast
('Abe19', config_ddl), # Fast
('Abe20', config_create_table_epilogue), # Fast
('Abe20.1', rename_abe_sequences_key), # Fast
('Abe21', create_x_txin_txout), # 25 seconds
('Abe22', save_datadir), # Fast
('Abe22.1', add_datadir_id), # Fast
('Abe22.2', drop_tmp_datadir), # Fast
('Abe23', config_clob), # Fast
('Abe24', clear_bad_addresses), # Fast
('Abe24.1', find_namecoin_addresses), # 2 minutes if you have Namecoin
('Abe25', create_abe_lock), # Fast
('Abe25.1', create_abe_lock_row), # Fast
('Abe25.2', insert_null_pubkey), # 1 second
('Abe25.3', set_netfee_pubkey_id), # Seconds
('Abe26', adjust_block_total_satoshis), # 1-3 minutes
('Abe26.1', init_block_satoshi_seconds), # 3-10 minutes
('Abe27', config_limit_style), # Fast
('Abe28', config_sequence_type), # Fast
# Should be okay back to here.
('Abe29', add_search_block_id), # Seconds
('Abe29.1', populate_search_block_id), # 1-2 minutes if using firstbits
('Abe29.2', add_fk_search_block_id), # Seconds
('Abe29.3', create_firstbits), # Fast
('Abe29.4', populate_firstbits), # Slow if config use_firstbits=true
('Abe30', add_keep_scriptsig), # Fast
('Abe31', drop_satoshi_seconds_destroyed), # Seconds
('Abe32', save_datadir), # Fast
('Abe32.1', widen_blkfile_number), # Fast
('Abe32.2', drop_tmp_datadir), # Fast
('Abe33', add_datadir_loader), # Fast
('Abe34', noop), # Fast
('Abe35', add_chain_policy), # Fast
('Abe35.1', populate_chain_policy), # Fast
('Abe35.2', add_chain_magic), # Fast
('Abe35.3', populate_chain_magic), # Fast
('Abe35.4', drop_policy), # Fast
('Abe35.5', drop_magic), # Fast
('Abe36', add_chain_decimals), # Fast
('Abe36.1', insert_chain_novacoin), # Fast
('Abe37', txin_detail_multisig), # Fast
('Abe37.1', add_chain_script_addr_vers), # Fast
('Abe37.2', populate_chain_script_addr_vers), # Fast
('Abe37.3', create_multisig_pubkey), # Fast
('Abe37.4', create_x_multisig_pubkey_multisig), # Fast
('Abe37.5', update_chain_policy), # Fast
('Abe37.6', populate_multisig_pubkey), # Minutes-hours
('Abe38', abstract_sql), # Fast
('Abe39', config_concat_style), # Fast
('Abe40', add_unlinked_tx), # Fast
('Abe40.1', cleanup_unlinked_tx), # Hours, could be done offline
('Abe41', None)
]
def upgrade_schema(store):
if 'sql.binary_type' not in store.config:
for name in sql_arg_names:
store.config['sql.' + name] = store.config[name]
del store.config[name]
store.init_sql()
run_upgrades(store, upgrades)
sv = store.config['schema_version']
curr = upgrades[-1][0]
if sv != curr:
raise Exception('Can not upgrade from schema version %s to %s\n'
% (sv, curr))
store.log.warning("Upgrade complete.")
if __name__ == '__main__':
print "Run Abe with --upgrade added to the usual arguments."
sys.exit(2)
| 44,689
|
Python
|
.py
| 1,090
| 32.069725
| 107
| 0.585204
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,428
|
firstbits.py
|
bitcoin-abe_bitcoin-abe/Abe/firstbits.py
|
#!/usr/bin/env python
# Copyright(C) 2011,2012 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
"""Reconfigure an Abe instance to use or not use Firstbits."""
def populate_firstbits(store):
blocks, fbs = 0, 0
log_incr = 1000
for addr_vers, block_id in store.selectall("""
SELECT c.chain_address_version,
cc.block_id
FROM chain c
JOIN chain_candidate cc ON (c.chain_id = cc.chain_id)
WHERE cc.block_height IS NOT NULL
ORDER BY cc.chain_id, cc.block_height"""):
fbs += store.do_vers_firstbits(addr_vers, int(block_id))
blocks += 1
if blocks % log_incr == 0:
store.commit()
store.log.info("%d firstbits in %d blocks" % (fbs, blocks))
if blocks % log_incr > 0:
store.commit()
store.log.info("%d firstbits in %d blocks" % (fbs, blocks))
def create_firstbits(store):
store.log.info("Creating firstbits table.")
store.ddl(
"""CREATE TABLE abe_firstbits (
pubkey_id NUMERIC(26) NOT NULL,
block_id NUMERIC(14) NOT NULL,
address_version BIT VARYING(80) NOT NULL,
firstbits VARCHAR(50) NOT NULL,
PRIMARY KEY (address_version, pubkey_id, block_id),
FOREIGN KEY (pubkey_id) REFERENCES pubkey (pubkey_id),
FOREIGN KEY (block_id) REFERENCES block (block_id)
)""")
store.ddl(
"""CREATE INDEX x_abe_firstbits
ON abe_firstbits (address_version, firstbits)""")
def drop_firstbits(store):
store.log.info("Dropping firstbits table.")
store.ddl("DROP TABLE abe_firstbits")
def reconfigure(store, args):
have = store.config['use_firstbits'] == "true"
want = args.use_firstbits
if have == want:
return
lock = store.get_lock()
try:
# XXX Should temporarily store a new schema_version.
if want:
create_firstbits(store)
populate_firstbits(store)
store.config['use_firstbits'] = "true"
else:
drop_firstbits(store)
store.config['use_firstbits'] = "false"
store.use_firstbits = want
store.save_configvar("use_firstbits")
store.commit()
finally:
store.release_lock(lock)
| 2,936
|
Python
|
.py
| 72
| 33.652778
| 71
| 0.644709
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,429
|
genesis_tx.py
|
bitcoin-abe_bitcoin-abe/Abe/genesis_tx.py
|
# Copyright(C) 2013 by Abe developers.
# genesis_tx.py: known transactions unavailable through RPC for
# historical reasons: https://bitcointalk.org/index.php?topic=119530.0
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
def get(tx_hash_hex):
"""
Given the hexadecimal hash of the genesis transaction (as shown
by, e.g., "bitcoind getblock 0") return the hexadecimal raw
transaction. This works around a Bitcoind limitation described at
https://bitcointalk.org/index.php?topic=119530.0
"""
# Main Bitcoin chain:
if tx_hash_hex == "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b":
return "01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff4d04ffff001d0104455468652054696d65732030332f4a616e2f32303039204368616e63656c6c6f72206f6e206272696e6b206f66207365636f6e64206261696c6f757420666f722062616e6b73ffffffff0100f2052a01000000434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000"
# NovaCoin:
if tx_hash_hex == "4cb33b3b6a861dcbc685d3e614a9cafb945738d6833f182855679f2fad02057b":
return "01000000398e1151010000000000000000000000000000000000000000000000000000000000000000ffffffff4d04ffff001d020f274468747470733a2f2f626974636f696e74616c6b2e6f72672f696e6465782e7068703f746f7069633d3133343137392e6d736731353032313936236d736731353032313936ffffffff0100000000000000000000000000"
# CryptoCash / CashCoin:
if tx_hash_hex == "c7e715851ef2eebd4a881c48f0d6140e187d8e8f417eaacb6c6e7ed6c462dbde":
return "010000006eb7dc52010000000000000000000000000000000000000000000000000000000000000000ffffffff7604ffff001d020f274c6c4a616e2032302c20323031342031323a3430616d204544542e204e65776567672074656173657220737567676573747320746865205553206f6e6c696e652072657461696c206769616e74206d617920626567696e20616363657074696e6720626974636f696e20736f6f6effffffff0100000000000000000000000000"
# Hirocoin
if tx_hash_hex == "b0019d92bc054f7418960c91e252e7d24c77719c7a30128c5f6a827c73095d2a":
return "01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff4f04ffff001d0104474a6170616e546f6461792031332f4d61722f323031342057617973206579656420746f206d616b6520706c616e65732065617369657220746f2066696e6420696e206f6365616effffffff0100902f50090000004341040184710fa689ad5023690c80f3a49c8f13f8d45b8c857fbcbc8bc4a8e4d3eb4b10f4d4604fa08dce601aaf0f470216fe1b51850b4acf21b179c45070ac7b03a9ac00000000"
# Bitleu
if tx_hash_hex == "30cbad942f9fe09d06cabc91773860a827f3625a72eb2ae830c2c8844ffb6de2":
return "01000000f8141e53010000000000000000000000000000000000000000000000000000000000000000ffffffff1904ffff001d020f27104269746c65752072656c61756e63682effffffff0100000000000000000000000000"
# Maxcoin
if tx_hash_hex == "f8cc3b46c273a488c318dc7d98cc053494af2871e495e17f5c7c246055e46af3": # XXX not sure that's right
return "01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff3c04ffff001d01043453686170652d7368696674696e6720736f66747761726520646566656e647320616761696e737420626f746e6574206861636b73ffffffff010065cd1d00000000434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000"
# Dash
if tx_hash_hex == "e0028eb9648db56b1ac77cf090b99048a8007e2bb64b68f092c03c7f56a662c7":
return "01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff6204ffff001d01044c5957697265642030392f4a616e2f3230313420546865204772616e64204578706572696d656e7420476f6573204c6976653a204f76657273746f636b2e636f6d204973204e6f7720416363657074696e6720426974636f696e73ffffffff0100f2052a010000004341040184710fa689ad5023690c80f3a49c8f13f8d45b8c857fbcbc8bc4a8e4d3eb4b10f4d4604fa08dce601aaf0f470216fe1b51850b4acf21b179c45070ac7b03a9ac00000000"
# BlackCoin
if tx_hash_hex == "12630d16a97f24b287c8c2594dda5fb98c9e6c70fc61d44191931ea2aa08dc90":
return "01000000e0df0a53010000000000000000000000000000000000000000000000000000000000000000ffffffff2800012a24323020466562203230313420426974636f696e2041544d7320636f6d6520746f20555341ffffffff0100000000000000000000000000"
# Unbreakablecoin
if tx_hash_hex == "e417a7bd4b5d0c0f27caba6bc16963c9dac23a970702336620cc71196f193dfb":
return "01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff4e05b1073383000104455468652054696d65732030332f4a616e2f32303039204368616e63656c6c6f72206f6e206272696e6b206f66207365636f6e64206261696c6f757420666f722062616e6b73ffffffff010100000000000000434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000"
# Californium
if tx_hash_hex == "00000a99a373e0fd8209e0d19696855a3523cbc6bdd242745b0cf0640ed15eaf":
return "010000000000000000000000000000000000000000000000000000000000000000000000a8f1e781b4c530971ad4844474e95b3b7cf955eb8194ef461f7a737b18224b7f8efa5e54ffff0f1e88000a000101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff6304ffff001d01044c5a43616c69666f726e69756d206973206120726164696f616374697665206d6574616c6c6963206368656d6963616c20656c656d656e7420776974682073796d626f6c20436620616e642061746f6d6963206e756d626572203938ffffffff0100e1f50500000000434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000"
# Extract your chain's genesis transaction data from the first
# block file and add it here, or better yet, patch your coin's
# getrawtransaction to return it on request:
#if tx_hash_hex == "<HASH>"
# return "<DATA>"
return None
| 6,396
|
Python
|
.py
| 59
| 103.59322
| 631
| 0.894654
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,430
|
SqlAbstraction.py
|
bitcoin-abe_bitcoin-abe/Abe/SqlAbstraction.py
|
# Copyright(C) 2011,2012,2013 by John Tobey <jtobey@john-edwin-tobey.org>
# sql.py: feature-detecting, SQL-transforming database abstraction layer
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
import re
import logging
MAX_SCRIPT = 1000000
MAX_PUBKEY = 65
NO_CLOB = 'BUG_NO_CLOB'
STMT_RE = re.compile(r"([^']+)((?:'[^']*')?)")
class SqlAbstraction(object):
"""
Database abstraction class based on DB-API 2 and standard SQL with
workarounds to support SQLite3, PostgreSQL/psycopg2, MySQL,
Oracle, ODBC, and IBM DB2.
"""
def __init__(sql, args):
sql.module = args.module
sql.connect_args = args.connect_args
sql.prefix = args.prefix
sql.config = args.config
sql.binary_type = args.binary_type
sql.int_type = args.int_type
sql.log = logging.getLogger(__name__)
sql.sqllog = logging.getLogger(__name__ + ".sql")
if not args.log_sql:
sql.sqllog.setLevel(logging.WARNING)
sql._conn = None
sql._cursor = None
sql.auto_reconnect = False
sql.in_transaction = False
sql._set_flavour()
def _set_flavour(sql):
def identity(x):
return x
transform = identity
transform_stmt = sql._transform_stmt
selectall = sql._selectall
if sql.module.paramstyle in ('format', 'pyformat'):
transform_stmt = sql._qmark_to_format(transform_stmt)
elif sql.module.paramstyle == 'named':
transform_stmt = sql._qmark_to_named(transform_stmt)
elif sql.module.paramstyle != 'qmark':
sql.log.warning("Database parameter style is "
"%s, trying qmark", sql.module.paramstyle)
pass
# Binary I/O with the database.
# Reversed versions exist for Bitcoin hashes; since the
# protocol treats them as 256-bit integers and represents them
# as little endian, we have to reverse them in hex to satisfy
# human expectations.
def rev(x):
return None if x is None else x[::-1]
def to_hex(x):
return None if x is None else str(x).encode('hex')
def from_hex(x):
return None if x is None else x.decode('hex')
def to_hex_rev(x):
return None if x is None else str(x)[::-1].encode('hex')
def from_hex_rev(x):
return None if x is None else x.decode('hex')[::-1]
val = sql.config.get('binary_type')
if val in (None, 'str', "binary"):
binin = identity
binin_hex = from_hex
binout = identity
binout_hex = to_hex
revin = rev
revin_hex = from_hex
revout = rev
revout_hex = to_hex
elif val in ("buffer", "bytearray", "pg-bytea"):
if val == "bytearray":
def to_btype(x):
return None if x is None else bytearray(x)
else:
def to_btype(x):
return None if x is None else buffer(x)
def to_str(x):
return None if x is None else str(x)
binin = to_btype
binin_hex = lambda x: to_btype(from_hex(x))
binout = to_str
binout_hex = to_hex
revin = lambda x: to_btype(rev(x))
revin_hex = lambda x: to_btype(from_hex(x))
revout = rev
revout_hex = to_hex
if val == "pg-bytea":
transform_stmt = sql._binary_as_bytea(transform_stmt)
elif val == "hex":
transform = sql._binary_as_hex(transform)
binin = to_hex
binin_hex = identity
binout = from_hex
binout_hex = identity
revin = to_hex_rev
revin_hex = identity
revout = from_hex_rev
revout_hex = identity
else:
raise Exception("Unsupported binary-type %s" % (val,))
val = sql.config.get('int_type')
if val in (None, 'int'):
intin = identity
elif val == 'decimal':
import decimal
def _intin(x):
return None if x is None else decimal.Decimal(x)
intin = _intin
elif val == 'str':
def _intin(x):
return None if x is None else str(x)
intin = _intin
# Work around sqlite3's integer overflow.
transform = sql._approximate(transform)
else:
raise Exception("Unsupported int-type %s" % (val,))
val = sql.config.get('sequence_type')
if val in (None, 'update'):
new_id = lambda key: sql._new_id_update(key)
create_sequence = lambda key: sql._create_sequence_update(key)
drop_sequence = lambda key: sql._drop_sequence_update(key)
elif val == 'mysql':
new_id = lambda key: sql._new_id_mysql(key)
create_sequence = lambda key: sql._create_sequence_mysql(key)
drop_sequence = lambda key: sql._drop_sequence_mysql(key)
else:
create_sequence = lambda key: sql._create_sequence(key)
drop_sequence = lambda key: sql._drop_sequence(key)
if val == 'oracle':
new_id = lambda key: sql._new_id_oracle(key)
elif val == 'nvf':
new_id = lambda key: sql._new_id_nvf(key)
elif val == 'postgres':
new_id = lambda key: sql._new_id_postgres(key)
elif val == 'db2':
new_id = lambda key: sql._new_id_db2(key)
create_sequence = lambda key: sql._create_sequence_db2(key)
else:
raise Exception("Unsupported sequence-type %s" % (val,))
# Convert Oracle LOB to str.
if hasattr(sql.module, "LOB") and isinstance(sql.module.LOB, type):
def fix_lob(fn):
def ret(x):
return None if x is None else fn(str(x))
return ret
binout = fix_lob(binout)
binout_hex = fix_lob(binout_hex)
val = sql.config.get('limit_style')
if val in (None, 'native'):
pass
elif val == 'emulated':
selectall = sql.emulate_limit(selectall)
val = sql.config.get('concat_style')
if val in (None, 'ansi'):
pass
elif val == 'mysql':
transform_stmt = sql._transform_concat(transform_stmt)
# Also squeeze in MySQL VARBINARY length fix
# Some MySQL version do not auto-convert to BLOB
transform_stmt = sql._transform_varbinary(transform_stmt)
transform_stmt = sql._append_table_epilogue(transform_stmt)
transform = sql._fallback_to_lob(transform)
transform = sql._fallback_to_approximate(transform)
sql.transform_chunk = transform
sql.transform_stmt = transform_stmt
sql.selectall = selectall
sql._cache = {}
sql.binin = binin
sql.binin_hex = binin_hex
sql.binout = binout
sql.binout_hex = binout_hex
sql.revin = revin
sql.revin_hex = revin_hex
sql.revout = revout
sql.revout_hex = revout_hex
# Might reimplement these someday...
def binout_int(x):
if x is None:
return None
return int(binout_hex(x), 16)
def binin_int(x, bits):
if x is None:
return None
return binin_hex(("%%0%dx" % (bits / 4)) % x)
sql.binout_int = binout_int
sql.binin_int = binin_int
sql.intin = intin
sql.new_id = new_id
sql.create_sequence = create_sequence
sql.drop_sequence = drop_sequence
def connect(sql):
cargs = sql.connect_args
if cargs is None:
conn = sql.module.connect()
else:
try:
conn = sql._connect(cargs)
except UnicodeError:
# Perhaps this driver needs its strings encoded.
# Python's default is ASCII. Let's try UTF-8, which
# should be the default anyway.
#import locale
#enc = locale.getlocale()[1] or locale.getdefaultlocale()[1]
enc = 'UTF-8'
def to_utf8(obj):
if isinstance(obj, dict):
for k in obj.keys():
obj[k] = to_utf8(obj[k])
if isinstance(obj, list):
return map(to_utf8, obj)
if isinstance(obj, unicode):
return obj.encode(enc)
return obj
conn = sql._connect(to_utf8(cargs))
sql.log.info("Connection required conversion to UTF-8")
return conn
def _connect(sql, cargs):
if isinstance(cargs, dict):
if "" in cargs:
cargs = cargs.copy()
nkwargs = cargs[""]
del(cargs[""])
if isinstance(nkwargs, list):
return sql.module.connect(*nkwargs, **cargs)
return sql.module.connect(nkwargs, **cargs)
else:
return sql.module.connect(**cargs)
if isinstance(cargs, list):
return sql.module.connect(*cargs)
return sql.module.connect(cargs)
def conn(sql):
if sql._conn is None:
sql._conn = sql.connect()
return sql._conn
def cursor(sql):
if sql._cursor is None:
sql._cursor = sql.conn().cursor()
return sql._cursor
def rowcount(sql):
return sql.cursor().rowcount
def reconnect(sql):
sql.log.info("Reconnecting to database.")
try:
sql.close()
except Exception:
pass
return sql.conn()
# Run transform_chunk on each chunk between string literals.
def _transform_stmt(sql, stmt):
def transform_chunk(match):
return sql.transform_chunk(match.group(1)) + match.group(2)
return STMT_RE.sub(transform_chunk, stmt)
# Convert standard placeholders to Python "format" style.
def _qmark_to_format(sql, fn):
def ret(stmt):
return fn(stmt.replace('%', '%%').replace("?", "%s"))
return ret
# Convert standard placeholders to Python "named" style.
def _qmark_to_named(sql, fn):
patt = re.compile(r"\?")
def ret(stmt):
i = [0]
def newname(match):
i[0] += 1
return ":p%d" % (i[0],)
def transform_chunk(match):
return patt.sub(newname, match.group(1)) + match.group(2)
return fn(STMT_RE.sub(transform_chunk, stmt))
return ret
# Convert the standard BINARY type to a hex string for databases
# and drivers that don't support BINARY.
def _binary_as_hex(sql, fn):
patt = re.compile(r"\b((?:VAR)?)BINARY\s*\(\s*([0-9]+)\s*\)")
x_patt = re.compile(r"X\z")
def fixup(match):
return (match.group(1) + "CHAR(" +
str(int(match.group(2)) * 2) + ")")
def ret(chunk):
return fn(x_patt.sub("", patt.sub(fixup, chunk)))
return ret
# Convert the standard BINARY type to the PostgreSQL BYTEA type.
def _binary_as_bytea(sql, fn):
type_patt = re.compile("((?:VAR)?)BINARY\\(([0-9]+)\\)")
lit_patt = re.compile("X'((?:[0-9a-fA-F][0-9a-fA-F])*)'")
def ret(stmt):
def transform_chunk(match):
ret = type_patt.sub("BYTEA", match.group(1))
if match.group(1).endswith('X') and match.group(2) != '':
ret = ret[:-1] + "'"
for i in match.group(2)[1:-1].decode('hex'):
ret += r'\\%03o' % ord(i)
ret += "'::bytea"
else:
ret += match.group(2)
return ret
return fn(STMT_RE.sub(transform_chunk, stmt))
return ret
# Converts VARCHAR types that are too long to CLOB or similar.
def _fallback_to_lob(sql, fn):
if sql.config.get('max_varchar') is None:
return fn
max_varchar = int(sql.config['max_varchar'])
if sql.config.get('clob_type') is None:
return fn
clob_type = sql.config['clob_type']
patt = re.compile("VARCHAR\\(([0-9]+)\\)")
def fixup(match):
width = int(match.group(1))
if width > max_varchar and clob_type != NO_CLOB:
return clob_type
return match.group()
def ret(stmt):
return fn(patt.sub(fixup, stmt))
return ret
# Convert high-precision NUMERIC and DECIMAL types to DOUBLE PRECISION
# to avoid integer overflow with SQLite.
def _fallback_to_approximate(sql, fn):
if sql.config.get('max_precision', "") == "":
return fn
max_precision = int(sql.config['max_precision'])
patt = re.compile(
r"\b(?:NUMERIC|DECIMAL)\s*\(\s*([0-9]+)\s*(?:,.*?)?\)")
def fixup(match):
precision = int(match.group(1))
if precision > max_precision:
return "DOUBLE PRECISION"
return match.group()
def ret(stmt):
return fn(patt.sub(fixup, stmt))
return ret
def _approximate(store, fn):
def repl(match):
return 'CAST(' + match.group(1) + match.group(2) + ' AS DOUBLE PRECISION) ' \
+ match.group(1) + '_approx' + match.group(2)
def ret(stmt):
return fn(re.sub(r'\b(\w+)(\w*) \1_approx\2\b', repl, stmt))
return ret
def emulate_limit(sql, selectall):
limit_re = re.compile(r"(.*)\bLIMIT\s+(\?|\d+)\s*\Z", re.DOTALL)
def ret(stmt, params=()):
match = limit_re.match(sql.transform_stmt_cached(stmt))
if match:
if match.group(2) == '?':
n = params[-1]
params = params[:-1]
else:
n = int(match.group(2))
sql.cursor().execute(match.group(1), params)
return [ sql.cursor().fetchone() for i in xrange(n) ]
return selectall(stmt, params)
return ret
def _transform_concat(sql, fn):
concat_re = re.compile(r"((?:(?:'[^']*'|\?)\s*\|\|\s*)+(?:'[^']*'|\?))", re.DOTALL)
def repl(match):
clist = re.sub(r"\s*\|\|\s*", ", ", match.group(1))
return 'CONCAT(' + clist + ')'
def ret(stmt):
return fn(concat_re.sub(repl, stmt))
return ret
def _transform_varbinary(sql, fn):
varbinary_re = re.compile(r"VARBINARY\(" + str(MAX_SCRIPT) + "\)")
def ret(stmt):
# Suitable for prefix+length up to 16,777,215 (2^24 - 1)
return fn(varbinary_re.sub("MEDIUMBLOB", stmt))
return ret
def _append_table_epilogue(sql, fn):
epilogue = sql.config.get('create_table_epilogue', "")
if epilogue == "":
return fn
patt = re.compile(r"\s*CREATE\s+TABLE\b")
def ret(stmt):
if patt.match(stmt):
stmt += epilogue
return fn(stmt)
return ret
def transform_stmt_cached(sql, stmt):
cached = sql._cache.get(stmt)
if cached is None:
cached = sql.transform_stmt(stmt)
sql._cache[stmt] = cached
return cached
def _execute(sql, stmt, params):
try:
sql.cursor().execute(stmt, params)
except (sql.module.OperationalError, sql.module.InternalError, sql.module.ProgrammingError) as e:
if sql.in_transaction or not sql.auto_reconnect:
raise
sql.log.warning("Replacing possible stale cursor: %s", e)
try:
sql.reconnect()
except Exception:
sql.log.exception("Failed to reconnect")
raise e
sql.cursor().execute(stmt, params)
def sql(sql, stmt, params=()):
cached = sql.transform_stmt_cached(stmt)
sql.sqllog.info("EXEC: %s %r", cached, params)
try:
sql._execute(cached, params)
except Exception as e:
sql.sqllog.info("EXCEPTION: %s", e)
raise
finally:
sql.in_transaction = True
def ddl(sql, stmt):
stmt = sql.transform_stmt(stmt)
sql.sqllog.info("DDL: %s", stmt)
try:
sql.cursor().execute(stmt)
except Exception as e:
sql.sqllog.info("EXCEPTION: %s", e)
raise
if sql.config.get('ddl_implicit_commit') == 'false':
sql.commit()
else:
sql.in_transaction = False
def selectrow(sql, stmt, params=()):
sql.sql(stmt, params)
ret = sql.cursor().fetchone()
sql.sqllog.debug("FETCH: %s", ret)
return ret
def _selectall(sql, stmt, params=()):
sql.sql(stmt, params)
ret = sql.cursor().fetchall()
sql.sqllog.debug("FETCHALL: %s", ret)
return ret
def _new_id_update(sql, key):
"""
Allocate a synthetic identifier by updating a table.
"""
while True:
row = sql.selectrow("SELECT nextid FROM %ssequences WHERE sequence_key = ?" % (sql.prefix), (key,))
if row is None:
raise Exception("Sequence %s does not exist" % key)
ret = row[0]
sql.sql("UPDATE %ssequences SET nextid = nextid + 1"
" WHERE sequence_key = ? AND nextid = ?" % sql.prefix,
(key, ret))
if sql.cursor().rowcount == 1:
return ret
sql.log.info('Contention on %ssequences %s:%d' % sql.prefix, key, ret)
def _get_sequence_initial_value(sql, key):
(ret,) = sql.selectrow("SELECT MAX(" + key + "_id) FROM " + key)
ret = 1 if ret is None else ret + 1
return ret
def _create_sequence_update(sql, key):
sql.commit()
ret = sql._get_sequence_initial_value(key)
try:
sql.sql("INSERT INTO %ssequences (sequence_key, nextid)"
" VALUES (?, ?)" % sql.prefix, (key, ret))
except sql.module.DatabaseError as e:
sql.rollback()
try:
sql.ddl("""CREATE TABLE %ssequences (
sequence_key VARCHAR(100) NOT NULL PRIMARY KEY,
nextid NUMERIC(30)
)""" % sql.prefix)
except Exception:
sql.rollback()
raise e
sql.sql("INSERT INTO %ssequences (sequence_key, nextid)"
" VALUES (?, ?)" % sql.prefix, (key, ret))
def _drop_sequence_update(sql, key):
sql.commit()
sql.sql("DELETE FROM %ssequences WHERE sequence_key = ?" % sql.prefix,
(key,))
sql.commit()
def _new_id_oracle(sql, key):
(ret,) = sql.selectrow("SELECT " + key + "_seq.NEXTVAL FROM DUAL")
return ret
def _create_sequence(sql, key):
sql.ddl("CREATE SEQUENCE %s_seq START WITH %d"
% (key, sql._get_sequence_initial_value(key)))
def _drop_sequence(sql, key):
sql.ddl("DROP SEQUENCE %s_seq" % (key,))
def _new_id_nvf(sql, key):
(ret,) = sql.selectrow("SELECT NEXT VALUE FOR " + key + "_seq")
return ret
def _new_id_postgres(sql, key):
(ret,) = sql.selectrow("SELECT NEXTVAL('" + key + "_seq')")
return ret
def _create_sequence_db2(sql, key):
sql.commit()
try:
rows = sql.selectall("SELECT 1 FROM %sdual" % sql.prefix)
if len(rows) != 1:
sql.sql("INSERT INTO %sdual(x) VALUES ('X')" % sql.prefix)
except sql.module.DatabaseError as e:
sql.rollback()
sql.drop_table_if_exists('%sdual' % sql.prefix)
sql.ddl("CREATE TABLE %sdual (x CHAR(1))" % sql.prefix)
sql.sql("INSERT INTO %sdual(x) VALUES ('X')" % sql.prefix)
sql.log.info("Created silly table %sdual" % sql.prefix)
sql._create_sequence(key)
def _new_id_db2(sql, key):
(ret,) = sql.selectrow("SELECT NEXTVAL FOR " + key + "_seq"
" FROM %sdual" % sql.prefix)
return ret
def _create_sequence_mysql(sql, key):
sql.ddl("CREATE TABLE %s_seq (id BIGINT AUTO_INCREMENT PRIMARY KEY)"
" AUTO_INCREMENT=%d"
% (key, sql._get_sequence_initial_value(key)))
def _drop_sequence_mysql(sql, key):
sql.ddl("DROP TABLE %s_seq" % (key,))
def _new_id_mysql(sql, key):
sql.sql("INSERT INTO " + key + "_seq () VALUES ()")
(ret,) = sql.selectrow("SELECT LAST_INSERT_ID()")
if ret % 1000 == 0:
sql.sql("DELETE FROM " + key + "_seq WHERE id < ?", (ret,))
return ret
def commit(sql):
sql.sqllog.info("COMMIT")
sql.conn().commit()
sql.in_transaction = False
def rollback(sql):
if sql.module is None:
return
sql.sqllog.info("ROLLBACK")
try:
sql.conn().rollback()
sql.in_transaction = False
except sql.module.OperationalError as e:
sql.log.warning("Reconnecting after rollback error: %s", e)
sql.reconnect()
def close(sql):
conn = sql._conn
if conn is not None:
sql.sqllog.info("CLOSE")
conn.close()
sql._conn = None
sql._cursor = None
def configure(sql):
sql.configure_ddl_implicit_commit()
sql.configure_create_table_epilogue()
sql.configure_max_varchar()
sql.configure_max_precision()
sql.configure_clob_type()
sql.configure_binary_type()
sql.configure_int_type()
sql.configure_sequence_type()
sql.configure_limit_style()
sql.configure_concat_style()
return sql.config
def configure_binary_type(sql):
defaults = (['binary', 'bytearray', 'buffer', 'hex', 'pg-bytea']
if sql.binary_type is None else
[ sql.binary_type ])
tests = (defaults
if sql.config.get('binary_type') is None else
[ sql.config['binary_type'] ])
for val in tests:
sql.config['binary_type'] = val
sql._set_flavour()
if sql._test_binary_type():
sql.log.info("binary_type=%s", val)
return
raise Exception(
"No known binary data representation works"
if len(tests) > 1 else
"Binary type " + tests[0] + " fails test")
def configure_int_type(sql):
defaults = (['int', 'decimal', 'str']
if sql.int_type is None else
[ sql.int_type ])
tests = (defaults if sql.config.get('int_type') is None else
[ sql.config['int_type'] ])
for val in tests:
sql.config['int_type'] = val
sql._set_flavour()
if sql._test_int_type():
sql.log.info("int_type=%s", val)
return
raise Exception(
"No known large integer representation works"
if len(tests) > 1 else
"Integer type " + tests[0] + " fails test")
def configure_sequence_type(sql):
for val in ['nvf', 'oracle', 'postgres', 'mysql', 'db2', 'update']:
sql.config['sequence_type'] = val
sql._set_flavour()
if sql._test_sequence_type():
sql.log.info("sequence_type=%s", val)
return
raise Exception("No known sequence type works")
def _drop_if_exists(sql, otype, name):
try:
sql.sql("DROP " + otype + " " + name)
sql.commit()
except sql.module.DatabaseError:
sql.rollback()
def drop_table_if_exists(sql, obj):
sql._drop_if_exists("TABLE", obj)
def drop_view_if_exists(sql, obj):
sql._drop_if_exists("VIEW", obj)
def drop_sequence_if_exists(sql, key):
try:
sql.drop_sequence(key)
except sql.module.DatabaseError:
sql.rollback()
def drop_column_if_exists(sql, table, column):
try:
sql.ddl("ALTER TABLE " + table + " DROP COLUMN " + column)
except sql.module.DatabaseError:
sql.rollback()
def configure_ddl_implicit_commit(sql):
if 'create_table_epilogue' not in sql.config:
sql.config['create_table_epilogue'] = ''
for val in ['true', 'false']:
sql.config['ddl_implicit_commit'] = val
sql._set_flavour()
if sql._test_ddl():
sql.log.info("ddl_implicit_commit=%s", val)
return
raise Exception("Can not test for DDL implicit commit.")
def _test_ddl(sql):
"""Test whether DDL performs implicit commit."""
sql.drop_table_if_exists("%stest_1" % sql.prefix)
try:
sql.ddl(
"CREATE TABLE %stest_1 ("
" %stest_1_id NUMERIC(12) NOT NULL PRIMARY KEY,"
" foo VARCHAR(10))" % (sql.prefix, sql.prefix))
sql.rollback()
sql.selectall("SELECT MAX(%stest_1_id) FROM %stest_1"
% (sql.prefix, sql.prefix))
return True
except sql.module.DatabaseError as e:
sql.rollback()
return False
except Exception:
sql.rollback()
return False
finally:
sql.drop_table_if_exists("%stest_1" % sql.prefix)
def configure_create_table_epilogue(sql):
for val in ['', ' ENGINE=InnoDB']:
sql.config['create_table_epilogue'] = val
sql._set_flavour()
if sql._test_transaction():
sql.log.info("create_table_epilogue='%s'", val)
return
raise Exception("Can not create a transactional table.")
def _test_transaction(sql):
"""Test whether CREATE TABLE needs ENGINE=InnoDB for rollback."""
sql.drop_table_if_exists("%stest_1" % sql.prefix)
try:
sql.ddl("CREATE TABLE %stest_1 (a NUMERIC(12))" % sql.prefix)
sql.sql("INSERT INTO %stest_1 (a) VALUES (4)" % sql.prefix)
sql.commit()
sql.sql("INSERT INTO %stest_1 (a) VALUES (5)" % sql.prefix)
sql.rollback()
data = [int(row[0]) for row in sql.selectall(
"SELECT a FROM %stest_1" % sql.prefix)]
return data == [4]
except sql.module.DatabaseError as e:
sql.rollback()
return False
except Exception as e:
sql.rollback()
return False
finally:
sql.drop_table_if_exists("%stest_1" % sql.prefix)
def configure_max_varchar(sql):
"""Find the maximum VARCHAR width, up to 0xffffffff"""
lo = 0
hi = 1 << 32
mid = hi - 1
sql.config['max_varchar'] = str(mid)
sql.drop_table_if_exists("%stest_1" % sql.prefix)
while True:
sql.drop_table_if_exists("%stest_1" % sql.prefix)
try:
sql.ddl("""CREATE TABLE %stest_1
(a VARCHAR(%d), b VARCHAR(%d))"""
% (sql.prefix, mid, mid))
sql.sql("INSERT INTO %stest_1 (a, b) VALUES ('x', 'y')"
% sql.prefix)
row = sql.selectrow("SELECT a, b FROM %stest_1"
% sql.prefix)
if [x for x in row] == ['x', 'y']:
lo = mid
else:
hi = mid
except sql.module.DatabaseError as e:
sql.rollback()
hi = mid
except Exception as e:
sql.rollback()
hi = mid
if lo + 1 == hi:
sql.config['max_varchar'] = str(lo)
sql.log.info("max_varchar=%s", sql.config['max_varchar'])
break
mid = (lo + hi) / 2
sql.drop_table_if_exists("%stest_1" % sql.prefix)
def configure_max_precision(sql):
sql.config['max_precision'] = "" # XXX
def configure_clob_type(sql):
"""Find the name of the CLOB type, if any."""
long_str = 'x' * 10000
sql.drop_table_if_exists("%stest_1" % sql.prefix)
for val in ['CLOB', 'LONGTEXT', 'TEXT', 'LONG']:
try:
sql.ddl("CREATE TABLE %stest_1 (a %s)" % (sql.prefix, val))
sql.sql("INSERT INTO %stest_1 (a) VALUES (?)" % sql.prefix, (sql.binin(long_str),))
out = sql.selectrow("SELECT a FROM %stest_1" % sql.prefix)[0]
if sql.binout(out) == long_str:
sql.config['clob_type'] = val
sql.log.info("clob_type=%s", val)
return
else:
sql.log.debug("out=%s", repr(out))
except sql.module.DatabaseError as e:
sql.rollback()
except Exception as e:
try:
sql.rollback()
except Exception:
# Fetching a CLOB really messes up Easysoft ODBC Oracle.
sql.reconnect()
raise
finally:
sql.drop_table_if_exists("%stest_1" % sql.prefix)
sql.log.info("No native type found for CLOB.")
sql.config['clob_type'] = NO_CLOB
def _test_binary_type(sql):
sql.drop_table_if_exists("%stest_1" % sql.prefix)
try:
# XXX The 10000 should be configurable: max_desired_binary?
sql.ddl("""
CREATE TABLE %stest_1 (
test_id NUMERIC(2) NOT NULL PRIMARY KEY,
test_bit BINARY(32),
test_varbit VARBINARY(10000))""" % sql.prefix)
val = str(''.join(map(chr, range(0, 256, 8))))
sql.sql("INSERT INTO %stest_1 (test_id, test_bit, test_varbit)"
" VALUES (?, ?, ?)" % sql.prefix,
(1, sql.revin(val), sql.binin(val)))
(bit, vbit) = sql.selectrow("SELECT test_bit, test_varbit FROM %stest_1" % sql.prefix)
if sql.revout(bit) != val:
return False
if sql.binout(vbit) != val:
return False
return True
except sql.module.DatabaseError as e:
sql.rollback()
return False
except Exception as e:
sql.rollback()
return False
finally:
sql.drop_table_if_exists("%stest_1" % sql.prefix)
def _test_int_type(sql):
sql.drop_view_if_exists("%stest_v1" % sql.prefix)
sql.drop_table_if_exists("%stest_1" % sql.prefix)
try:
sql.ddl("""
CREATE TABLE %stest_1 (
test_id NUMERIC(2) NOT NULL PRIMARY KEY,
i1 NUMERIC(28), i2 NUMERIC(28), i3 NUMERIC(28))""" % sql.prefix)
# XXX No longer needed?
sql.ddl("""
CREATE VIEW %stest_v1 AS
SELECT test_id,
i1 i1_approx,
i1,
i2
FROM %stest_1""" % (sql.prefix, sql.prefix))
v1 = 2099999999999999
v2 = 1234567890
v3 = 12345678901234567890L
sql.sql("INSERT INTO %stest_1 (test_id, i1, i2, i3)"
" VALUES (?, ?, ?, ?)" % sql.prefix,
(1, sql.intin(v1), v2, sql.intin(v3)))
sql.commit()
prod, o1 = sql.selectrow("SELECT i1_approx * i2, i1 FROM %stest_v1" % sql.prefix)
prod = int(prod)
o1 = int(o1)
if prod < v1 * v2 * 1.0001 and prod > v1 * v2 * 0.9999 and o1 == v1:
return True
return False
except sql.module.DatabaseError as e:
sql.rollback()
return False
except Exception as e:
sql.rollback()
return False
finally:
sql.drop_view_if_exists("%stest_v1" % sql.prefix)
sql.drop_table_if_exists("%stest_1" % sql.prefix)
def _test_sequence_type(sql):
sql.drop_table_if_exists("%stest_1" % sql.prefix)
sql.drop_sequence_if_exists("%stest_1" % sql.prefix)
try:
sql.ddl("""
CREATE TABLE %stest_1 (
%stest_1_id NUMERIC(12) NOT NULL PRIMARY KEY,
foo VARCHAR(10)
)""" % (sql.prefix, sql.prefix))
sql.create_sequence('%stest_1' % sql.prefix)
id1 = sql.new_id('%stest_1' % sql.prefix)
id2 = sql.new_id('%stest_1' % sql.prefix)
if int(id1) != int(id2):
return True
return False
except sql.module.DatabaseError as e:
sql.rollback()
return False
except Exception as e:
sql.rollback()
return False
finally:
sql.drop_table_if_exists("%stest_1" % sql.prefix)
try:
sql.drop_sequence("%stest_1" % sql.prefix)
except sql.module.DatabaseError:
sql.rollback()
def configure_limit_style(sql):
for val in ['native', 'emulated']:
sql.config['limit_style'] = val
sql._set_flavour()
if sql._test_limit_style():
sql.log.info("limit_style=%s", val)
return
raise Exception("Can not emulate LIMIT.")
def _test_limit_style(sql):
sql.drop_table_if_exists("%stest_1" % sql.prefix)
try:
sql.ddl("""
CREATE TABLE %stest_1 (
%stest_1_id NUMERIC(12) NOT NULL PRIMARY KEY
)""" % (sql.prefix, sql.prefix))
for id in (2, 4, 6, 8):
sql.sql("INSERT INTO %stest_1 (%stest_1_id) VALUES (?)"
% (sql.prefix, sql.prefix),
(id,))
rows = sql.selectall("""
SELECT %stest_1_id FROM %stest_1 ORDER BY %stest_1_id
LIMIT 3""" % (sql.prefix, sql.prefix, sql.prefix))
return [int(row[0]) for row in rows] == [2, 4, 6]
except sql.module.DatabaseError as e:
sql.rollback()
return False
except Exception as e:
sql.rollback()
return False
finally:
sql.drop_table_if_exists("%stest_1" % sql.prefix)
def configure_concat_style(sql):
for val in ['ansi', 'mysql']:
sql.config['concat_style'] = val
sql._set_flavour()
if sql._test_concat_style():
sql.log.info("concat_style=%s", val)
return
raise Exception("Can not find suitable concatenation style.")
def _test_concat_style(sql):
try:
rows = sql.selectall("""
SELECT 'foo' || ? || 'baz' AS String1,
? || 'foo' || ? AS String2
""", ('bar', 'baz', 'bar'));
sql.log.info(str(rows))
if rows[0][0] == 'foobarbaz' and rows[0][1] == 'bazfoobar':
return True
except Exception as e:
pass
sql.rollback()
return False
| 36,459
|
Python
|
.py
| 883
| 29.275198
| 111
| 0.525232
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,431
|
abe.py
|
bitcoin-abe_bitcoin-abe/Abe/abe.py
|
#!/usr/bin/env python
# Copyright(C) 2011,2012,2013,2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
import sys
import os
import optparse
import re
from cgi import escape
import posixpath
import wsgiref.util
import time
import calendar
import math
import logging
import json
import version
import DataStore
import readconf
# bitcointools -- modified deserialize.py to return raw transaction
import deserialize
import util # Added functions.
import base58
__version__ = version.__version__
ABE_APPNAME = "Abe"
ABE_VERSION = __version__
ABE_URL = 'https://github.com/bitcoin-abe/bitcoin-abe'
COPYRIGHT_YEARS = '2011'
COPYRIGHT = "Abe developers"
COPYRIGHT_URL = 'https://github.com/bitcoin-abe'
DONATIONS_BTC = '1PWC7PNHL1SgvZaN7xEtygenKjWobWsCuf'
DONATIONS_NMC = 'NJ3MSELK1cWnqUa6xhF2wUYAnz3RSrWXcK'
TIME1970 = time.strptime('1970-01-01','%Y-%m-%d')
EPOCH1970 = calendar.timegm(TIME1970)
# Abe-generated content should all be valid HTML and XHTML fragments.
# Configurable templates may contain either. HTML seems better supported
# under Internet Explorer.
DEFAULT_CONTENT_TYPE = "text/html; charset=utf-8"
DEFAULT_HOMEPAGE = "chains";
DEFAULT_TEMPLATE = """
<!DOCTYPE html>
<html lang="en">
<head>
<link rel="stylesheet" type="text/css"
href="%(dotdot)s%(STATIC_PATH)sabe.css" />
<link rel="shortcut icon" href="%(dotdot)s%(STATIC_PATH)sfavicon.ico" />
<title>%(title)s</title>
</head>
<body>
<h1><a href="%(dotdot)s%(HOMEPAGE)s"><img
src="%(dotdot)s%(STATIC_PATH)slogo32.png" alt="Abe logo" /></a> %(h1)s
</h1>
%(body)s
<p><a href="%(dotdot)sq">API</a> (machine-readable pages)</p>
<p style="font-size: smaller">
<span style="font-style: italic">
Powered by <a href="%(ABE_URL)s">%(APPNAME)s</a>
</span>
%(download)s
Tips appreciated!
<a href="%(dotdot)saddress/%(DONATIONS_BTC)s">BTC</a>
<a href="%(dotdot)saddress/%(DONATIONS_NMC)s">NMC</a>
</p>
</body>
</html>
"""
DEFAULT_LOG_FORMAT = "%(message)s"
DEFAULT_DECIMALS = 8
# It is fun to change "6" to "3" and search lots of addresses.
ADDR_PREFIX_RE = re.compile('[1-9A-HJ-NP-Za-km-z]{6,}\\Z')
HEIGHT_RE = re.compile('(?:0|[1-9][0-9]*)\\Z')
HASH_PREFIX_RE = re.compile('[0-9a-fA-F]{0,64}\\Z')
HASH_PREFIX_MIN = 6
NETHASH_HEADER = """\
blockNumber: height of last block in interval + 1
time: block time in seconds since 0h00 1 Jan 1970 UTC
target: decimal target at blockNumber
avgTargetSinceLast: harmonic mean of target over interval
difficulty: difficulty at blockNumber
hashesToWin: expected number of hashes needed to solve a block at this difficulty
avgIntervalSinceLast: interval seconds divided by blocks
netHashPerSecond: estimated network hash rate over interval
Statistical values are approximate and differ slightly from http://blockexplorer.com/q/nethash.
/chain/CHAIN/q/nethash[/INTERVAL[/START[/STOP]]]
Default INTERVAL=144, START=0, STOP=infinity.
Negative values back from the last block.
Append ?format=json to URL for headerless, JSON output.
blockNumber,time,target,avgTargetSinceLast,difficulty,hashesToWin,avgIntervalSinceLast,netHashPerSecond
START DATA
"""
NETHASH_SVG_TEMPLATE = """\
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:abe="http://abe.bit/abe"
viewBox="0 0 300 200"
preserveAspectRatio="none"
onload="Abe.draw(this)">
<style>
#chart polyline {
stroke-width: 0.1%%;
fill-opacity: 0;
stroke-opacity: 0.5;
</style>
<script type="application/ecmascript"
xlink:href="%(dotdot)s%(STATIC_PATH)snethash.js"/>
<g id="chart">
<polyline abe:window="1d" style="stroke: red;"/>
<polyline abe:window="3d" style="stroke: orange;"/>
<polyline abe:window="7d" style="stroke: yellow;"/>
<polyline abe:window="14d" style="stroke: green;"/>
<polyline abe:window="30d" style="stroke: blue;"/>
%(body)s
</g>
</svg>
"""
# How many addresses to accept in /unspent/ADDR|ADDR|...
MAX_UNSPENT_ADDRESSES = 200
def make_store(args):
store = DataStore.new(args)
if (not args.no_load):
store.catch_up()
return store
class NoSuchChainError(Exception):
"""Thrown when a chain lookup fails"""
class PageNotFound(Exception):
"""Thrown when code wants to return 404 Not Found"""
class Redirect(Exception):
"""Thrown when code wants to redirect the request"""
class Streamed(Exception):
"""Thrown when code has written the document to the callable
returned by start_response."""
class Abe:
def __init__(abe, store, args):
abe.store = store
abe.args = args
abe.htdocs = args.document_root or find_htdocs()
abe.static_path = '' if args.static_path is None else args.static_path
abe.template_vars = args.template_vars.copy()
abe.template_vars['STATIC_PATH'] = (
abe.template_vars.get('STATIC_PATH', abe.static_path))
abe.template = flatten(args.template)
abe.debug = args.debug
abe.log = logging.getLogger(__name__)
abe.log.info('Abe initialized.')
abe.home = str(abe.template_vars.get("HOMEPAGE", DEFAULT_HOMEPAGE))
if not args.auto_agpl:
abe.template_vars['download'] = (
abe.template_vars.get('download', ''))
abe.base_url = args.base_url
abe.address_history_rows_max = int(
args.address_history_rows_max or 1000)
if args.shortlink_type is None:
abe.shortlink_type = ("firstbits" if store.use_firstbits else
"non-firstbits")
else:
abe.shortlink_type = args.shortlink_type
if abe.shortlink_type != "firstbits":
abe.shortlink_type = int(abe.shortlink_type)
if abe.shortlink_type < 2:
raise ValueError("shortlink-type: 2 character minimum")
elif not store.use_firstbits:
abe.shortlink_type = "non-firstbits"
abe.log.warning("Ignoring shortlink-type=firstbits since" +
" the database does not support it.")
if abe.shortlink_type == "non-firstbits":
abe.shortlink_type = 10
def __call__(abe, env, start_response):
import urlparse
page = {
"status": '200 OK',
"title": [escape(ABE_APPNAME), " ", ABE_VERSION],
"body": [],
"env": env,
"params": {},
"dotdot": "../" * (escape(env['PATH_INFO']).count('/') - 1),
"start_response": start_response,
"content_type": str(abe.template_vars['CONTENT_TYPE']),
"template": abe.template,
"chain": None,
}
if 'QUERY_STRING' in env:
page['params'] = urlparse.parse_qs(env['QUERY_STRING'])
if abe.fix_path_info(env):
abe.log.debug("fixed path_info")
return redirect(page)
cmd = wsgiref.util.shift_path_info(env)
handler = abe.get_handler(cmd)
tvars = abe.template_vars.copy()
tvars['dotdot'] = page['dotdot']
page['template_vars'] = tvars
try:
if handler is None:
return abe.serve_static(cmd + env['PATH_INFO'], start_response)
if (not abe.args.no_load):
# Always be up-to-date, even if we means having to wait
# for a response! XXX Could use threads, timers, or a
# cron job.
abe.store.catch_up()
handler(page)
except PageNotFound:
page['status'] = '404 Not Found'
page['body'] = ['<p class="error">Sorry, ', env['SCRIPT_NAME'],
env['PATH_INFO'],
' does not exist on this server.</p>']
except NoSuchChainError, e:
page['body'] += [
'<p class="error">'
'Sorry, I don\'t know about that chain!</p>\n']
except Redirect:
return redirect(page)
except Streamed:
return ''
except Exception:
abe.store.rollback()
raise
abe.store.rollback() # Close implicitly opened transaction.
start_response(page['status'],
[('Content-type', page['content_type']),
('Cache-Control', 'max-age=30')])
tvars['title'] = flatten(page['title'])
tvars['h1'] = flatten(page.get('h1') or page['title'])
tvars['body'] = flatten(page['body'])
if abe.args.auto_agpl:
tvars['download'] = (
' <a href="' + page['dotdot'] + 'download">Source</a>')
content = page['template'] % tvars
if isinstance(content, unicode):
content = content.encode('UTF-8')
return [content]
def get_handler(abe, cmd):
return getattr(abe, 'handle_' + cmd, None)
def handle_chains(abe, page):
page['title'] = ABE_APPNAME + ' Search'
body = page['body']
body += [
abe.search_form(page),
'<table>\n',
'<tr><th>Currency</th><th>Code</th><th>Block</th><th>Time</th>',
'<th>Started</th><th>Age (days)</th><th>Coins Created</th>',
'<th>Avg Coin Age</th><th>',
'% <a href="https://en.bitcoin.it/wiki/Bitcoin_Days_Destroyed">',
'CoinDD</a></th>',
'</tr>\n']
now = time.time() - EPOCH1970
rows = abe.store.selectall("""
SELECT c.chain_name, b.block_height, b.block_nTime, b.block_hash,
b.block_total_seconds, b.block_total_satoshis,
b.block_satoshi_seconds,
b.block_total_ss
FROM chain c
JOIN block b ON (c.chain_last_block_id = b.block_id)
ORDER BY c.chain_name
""")
for row in rows:
name = row[0]
chain = abe.store.get_chain_by_name(name)
if chain is None:
abe.log.warning("Store does not know chain: %s", name)
continue
body += [
'<tr><td><a href="chain/', escape(name), '">',
escape(name), '</a></td><td>', escape(chain.code3), '</td>']
if row[1] is not None:
(height, nTime, hash) = (
int(row[1]), int(row[2]), abe.store.hashout_hex(row[3]))
body += [
'<td><a href="block/', hash, '">', height, '</a></td>',
'<td>', format_time(nTime), '</td>']
if row[6] is not None and row[7] is not None:
(seconds, satoshis, ss, total_ss) = (
int(row[4]), int(row[5]), int(row[6]), int(row[7]))
started = nTime - seconds
chain_age = now - started
since_block = now - nTime
if satoshis == 0:
avg_age = ' '
else:
avg_age = '%5g' % ((float(ss) / satoshis + since_block)
/ 86400.0)
if chain_age <= 0:
percent_destroyed = ' '
else:
more = since_block * satoshis
denominator = total_ss + more
if denominator <= 0:
percent_destroyed = ' '
else:
percent_destroyed = '%5g%%' % (
100.0 - (100.0 * (ss + more) / denominator))
body += [
'<td>', format_time(started)[:10], '</td>',
'<td>', '%5g' % (chain_age / 86400.0), '</td>',
'<td>', format_satoshis(satoshis, chain), '</td>',
'<td>', avg_age, '</td>',
'<td>', percent_destroyed, '</td>']
body += ['</tr>\n']
body += ['</table>\n']
if len(rows) == 0:
body += ['<p>No block data found.</p>\n']
def chain_lookup_by_name(abe, symbol):
if symbol is None:
ret = abe.get_default_chain()
else:
ret = abe.store.get_chain_by_name(symbol)
if ret is None:
raise NoSuchChainError()
return ret
def get_default_chain(abe):
return abe.store.get_default_chain()
def format_addresses(abe, data, dotdot, chain):
if data['binaddr'] is None:
return 'Unknown'
if 'subbinaddr' in data:
# Multisig or known P2SH.
ret = [hash_to_address_link(chain.script_addr_vers, data['binaddr'], dotdot, text='Escrow'),
' ', data['required_signatures'], ' of']
for binaddr in data['subbinaddr']:
ret += [' ', hash_to_address_link(data['address_version'], binaddr, dotdot, 10)]
return ret
return hash_to_address_link(data['address_version'], data['binaddr'], dotdot)
def call_handler(abe, page, cmd):
handler = abe.get_handler(cmd)
if handler is None:
raise PageNotFound()
handler(page)
def handle_chain(abe, page):
symbol = wsgiref.util.shift_path_info(page['env'])
chain = abe.chain_lookup_by_name(symbol)
page['chain'] = chain
cmd = wsgiref.util.shift_path_info(page['env'])
if cmd == '':
page['env']['SCRIPT_NAME'] = page['env']['SCRIPT_NAME'][:-1]
raise Redirect()
if cmd == 'chain' or cmd == 'chains':
raise PageNotFound()
if cmd is not None:
abe.call_handler(page, cmd)
return
page['title'] = chain.name
body = page['body']
body += abe.search_form(page)
try:
count = get_int_param(page, 'count') or 20
if count >= 2017:
count = 20
elif count <= 0:
count = 20
except ValueError:
count = 20
try:
hi = get_int_param(page, 'hi')
except ValueError:
hi = None
orig_hi = hi
if hi is None:
row = abe.store.selectrow("""
SELECT b.block_height
FROM block b
JOIN chain c ON (c.chain_last_block_id = b.block_id)
WHERE c.chain_id = ?
""", (chain.id,))
if row:
hi = row[0]
if hi is None:
if orig_hi is None and count > 0:
body += ['<p>I have no blocks in this chain.</p>']
else:
body += ['<p class="error">'
'The requested range contains no blocks.</p>\n']
return
rows = abe.store.selectall("""
SELECT b.block_hash, b.block_height, b.block_nTime, b.block_num_tx,
b.block_nBits, b.block_value_out,
b.block_total_seconds, b.block_satoshi_seconds,
b.block_total_satoshis, b.block_ss_destroyed,
b.block_total_ss
FROM block b
JOIN chain_candidate cc ON (b.block_id = cc.block_id)
WHERE cc.chain_id = ?
AND cc.block_height BETWEEN ? AND ?
AND cc.in_longest = 1
ORDER BY cc.block_height DESC LIMIT ?
""", (chain.id, hi - count + 1, hi, count))
if hi is None:
hi = int(rows[0][1])
basename = os.path.basename(page['env']['PATH_INFO'])
nav = ['<a href="',
basename, '?count=', str(count), '"><<</a>']
nav += [' <a href="', basename, '?hi=', str(hi + count),
'&count=', str(count), '"><</a>']
nav += [' ', '>']
if hi >= count:
nav[-1] = ['<a href="', basename, '?hi=', str(hi - count),
'&count=', str(count), '">', nav[-1], '</a>']
nav += [' ', '>>']
if hi != count - 1:
nav[-1] = ['<a href="', basename, '?hi=', str(count - 1),
'&count=', str(count), '">', nav[-1], '</a>']
for c in (20, 50, 100, 500, 2016):
nav += [' ']
if c != count:
nav += ['<a href="', basename, '?count=', str(c)]
if hi is not None:
nav += ['&hi=', str(max(hi, c - 1))]
nav += ['">']
nav += [' ', str(c)]
if c != count:
nav += ['</a>']
nav += [' <a href="', page['dotdot'], '">Search</a>']
extra = False
#extra = True
body += ['<p>', nav, '</p>\n',
'<table><tr><th>Block</th><th>Approx. Time</th>',
'<th>Transactions</th><th>Value Out</th>',
'<th>Difficulty</th><th>Outstanding</th>',
'<th>Average Age</th><th>Chain Age</th>',
'<th>% ',
'<a href="https://en.bitcoin.it/wiki/Bitcoin_Days_Destroyed">',
'CoinDD</a></th>',
['<th>Satoshi-seconds</th>',
'<th>Total ss</th>']
if extra else '',
'</tr>\n']
for row in rows:
(hash, height, nTime, num_tx, nBits, value_out,
seconds, ss, satoshis, destroyed, total_ss) = row
nTime = int(nTime)
value_out = int(value_out)
seconds = int(seconds)
satoshis = int(satoshis)
ss = int(ss) if ss else 0
total_ss = int(total_ss) if total_ss else 0
if satoshis == 0:
avg_age = ' '
else:
avg_age = '%5g' % (ss / satoshis / 86400.0)
if total_ss <= 0:
percent_destroyed = ' '
else:
percent_destroyed = '%5g%%' % (100.0 - (100.0 * ss / total_ss))
body += [
'<tr><td><a href="', page['dotdot'], 'block/',
abe.store.hashout_hex(hash),
'">', height, '</a>'
'</td><td>', format_time(int(nTime)),
'</td><td>', num_tx,
'</td><td>', format_satoshis(value_out, chain),
'</td><td>', util.calculate_difficulty(int(nBits)),
'</td><td>', format_satoshis(satoshis, chain),
'</td><td>', avg_age,
'</td><td>', '%5g' % (seconds / 86400.0),
'</td><td>', percent_destroyed,
['</td><td>', '%8g' % ss,
'</td><td>', '%8g' % total_ss] if extra else '',
'</td></tr>\n']
body += ['</table>\n<p>', nav, '</p>\n']
def _show_block(abe, page, dotdotblock, chain, **kwargs):
body = page['body']
try:
b = abe.store.export_block(chain, **kwargs)
except DataStore.MalformedHash:
body += ['<p class="error">Not in correct format.</p>']
return
if b is None:
body += ['<p class="error">Block not found.</p>']
return
in_longest = False
for cc in b['chain_candidates']:
if chain is None:
chain = cc['chain']
if chain.id == cc['chain'].id:
in_longest = cc['in_longest']
if in_longest:
page['title'] = [escape(chain.name), ' ', b['height']]
page['h1'] = ['<a href="', page['dotdot'], 'chain/',
escape(chain.name), '?hi=', b['height'], '">',
escape(chain.name), '</a> ', b['height']]
else:
page['title'] = ['Block ', b['hash'][:4], '...', b['hash'][-10:]]
body += abe.short_link(page, 'b/' + block_shortlink(b['hash']))
is_stake_chain = chain.has_feature('nvc_proof_of_stake')
is_stake_block = is_stake_chain and b['is_proof_of_stake']
body += ['<p>']
if is_stake_chain:
body += [
'Proof of Stake' if is_stake_block else 'Proof of Work',
': ',
format_satoshis(b['generated'], chain), ' coins generated<br />\n']
body += ['Hash: ', b['hash'], '<br />\n']
if b['hashPrev'] is not None:
body += ['Previous Block: <a href="', dotdotblock,
b['hashPrev'], '">', b['hashPrev'], '</a><br />\n']
if b['next_block_hashes']:
body += ['Next Block: ']
for hash in b['next_block_hashes']:
body += ['<a href="', dotdotblock, hash, '">', hash, '</a><br />\n']
body += [
['Height: ', b['height'], '<br />\n']
if b['height'] is not None else '',
'Version: ', b['version'], '<br />\n',
'Transaction Merkle Root: ', b['hashMerkleRoot'], '<br />\n',
'Time: ', b['nTime'], ' (', format_time(b['nTime']), ')<br />\n',
'Difficulty: ', format_difficulty(util.calculate_difficulty(b['nBits'])),
' (Bits: %x)' % (b['nBits'],), '<br />\n',
['Cumulative Difficulty: ', format_difficulty(
util.work_to_difficulty(b['chain_work'])), '<br />\n']
if b['chain_work'] is not None else '',
'Nonce: ', b['nNonce'], '<br />\n',
'Transactions: ', len(b['transactions']), '<br />\n',
'Value out: ', format_satoshis(b['value_out'], chain), '<br />\n',
'Transaction Fees: ', format_satoshis(b['fees'], chain), '<br />\n',
['Average Coin Age: %6g' % (b['satoshi_seconds'] / 86400.0 / b['chain_satoshis'],),
' days<br />\n']
if b['chain_satoshis'] and (b['satoshi_seconds'] is not None) else '',
'' if b['satoshis_destroyed'] is None else
['Coin-days Destroyed: ',
format_satoshis(b['satoshis_destroyed'] / 86400.0, chain), '<br />\n'],
['Cumulative Coin-days Destroyed: %6g%%<br />\n' %
(100 * (1 - float(b['satoshi_seconds']) / b['chain_satoshi_seconds']),)]
if b['chain_satoshi_seconds'] else '',
['sat=',b['chain_satoshis'],';sec=',seconds,';ss=',b['satoshi_seconds'],
';total_ss=',b['chain_satoshi_seconds'],';destroyed=',b['satoshis_destroyed']]
if abe.debug else '',
'</p>\n']
body += ['<h3>Transactions</h3>\n']
body += ['<table><tr><th>Transaction</th><th>Fee</th>'
'<th>Size (kB)</th><th>From (amount)</th><th>To (amount)</th>'
'</tr>\n']
for tx in b['transactions']:
body += ['<tr><td><a href="../tx/' + tx['hash'] + '">',
tx['hash'][:10], '...</a>'
'</td><td>', format_satoshis(tx['fees'], chain),
'</td><td>', tx['size'] / 1000.0,
'</td><td>']
if tx is b['transactions'][0]:
body += [
'POS ' if is_stake_block else '',
'Generation: ', format_satoshis(b['generated'], chain), ' + ',
format_satoshis(b['fees'], chain), ' total fees']
else:
for txin in tx['in']:
body += [abe.format_addresses(txin, page['dotdot'], chain), ': ',
format_satoshis(txin['value'], chain), '<br />']
body += ['</td><td>']
for txout in tx['out']:
if is_stake_block:
if tx is b['transactions'][0]:
assert txout['value'] == 0
assert len(tx['out']) == 1
body += [
format_satoshis(b['proof_of_stake_generated'], chain),
' included in the following transaction']
continue
if txout['value'] == 0:
continue
body += [abe.format_addresses(txout, page['dotdot'], chain), ': ',
format_satoshis(txout['value'], chain), '<br />']
body += ['</td></tr>\n']
body += '</table>\n'
def handle_block(abe, page):
block_hash = wsgiref.util.shift_path_info(page['env'])
if block_hash in (None, '') or page['env']['PATH_INFO'] != '':
raise PageNotFound()
block_hash = block_hash.lower() # Case-insensitive, BBE compatible
page['title'] = 'Block'
if not is_hash_prefix(block_hash):
page['body'] += ['<p class="error">Not a valid block hash.</p>']
return
abe._show_block(page, '', None, block_hash=block_hash)
def handle_tx(abe, page):
tx_hash = wsgiref.util.shift_path_info(page['env'])
if tx_hash in (None, '') or page['env']['PATH_INFO'] != '':
raise PageNotFound()
tx_hash = tx_hash.lower() # Case-insensitive, BBE compatible
page['title'] = ['Transaction ', tx_hash[:10], '...', tx_hash[-4:]]
body = page['body']
if not is_hash_prefix(tx_hash):
body += ['<p class="error">Not a valid transaction hash.</p>']
return
try:
# XXX Should pass chain to export_tx to help parse scripts.
tx = abe.store.export_tx(tx_hash = tx_hash, format = 'browser')
except DataStore.MalformedHash:
body += ['<p class="error">Not in correct format.</p>']
return
if tx is None:
body += ['<p class="error">Transaction not found.</p>']
return
return abe.show_tx(page, tx)
def show_tx(abe, page, tx):
body = page['body']
def row_to_html(row, this_ch, other_ch, no_link_text):
body = page['body']
body += [
'<tr>\n',
'<td><a name="', this_ch, row['pos'], '">', row['pos'],
'</a></td>\n<td>']
if row['o_hash'] is None:
body += [no_link_text]
else:
body += [
'<a href="', row['o_hash'], '#', other_ch, row['o_pos'],
'">', row['o_hash'][:10], '...:', row['o_pos'], '</a>']
body += [
'</td>\n',
'<td>', format_satoshis(row['value'], chain), '</td>\n',
'<td>', abe.format_addresses(row, '../', chain), '</td>\n']
if row['binscript'] is not None:
body += ['<td>', escape(decode_script(row['binscript'])), '</td>\n']
body += ['</tr>\n']
body += abe.short_link(page, 't/' + hexb58(tx['hash'][:14]))
body += ['<p>Hash: ', tx['hash'], '<br />\n']
chain = None
is_coinbase = None
for tx_cc in tx['chain_candidates']:
if chain is None:
chain = tx_cc['chain']
is_coinbase = (tx_cc['tx_pos'] == 0)
elif tx_cc['chain'].id != chain.id:
abe.log.warning('Transaction ' + tx['hash'] + ' in multiple chains: '
+ tx_cc['chain'].id + ', ' + chain.id)
blk_hash = tx_cc['block_hash']
body += [
'Appeared in <a href="../block/', blk_hash, '">',
escape(tx_cc['chain'].name), ' ',
tx_cc['block_height'] if tx_cc['in_longest'] else [blk_hash[:10], '...', blk_hash[-4:]],
'</a> (', format_time(tx_cc['block_nTime']), ')<br />\n']
if chain is None:
abe.log.warning('Assuming default chain for Transaction ' + tx['hash'])
chain = abe.get_default_chain()
body += [
'Number of inputs: ', len(tx['in']),
' (<a href="#inputs">Jump to inputs</a>)<br />\n',
'Total in: ', format_satoshis(tx['value_in'], chain), '<br />\n',
'Number of outputs: ', len(tx['out']),
' (<a href="#outputs">Jump to outputs</a>)<br />\n',
'Total out: ', format_satoshis(tx['value_out'], chain), '<br />\n',
'Size: ', tx['size'], ' bytes<br />\n',
'Fee: ', format_satoshis(0 if is_coinbase else
(tx['value_in'] and tx['value_out'] and
tx['value_in'] - tx['value_out']), chain),
'<br />\n',
'<a href="../rawtx/', tx['hash'], '">Raw transaction</a><br />\n']
body += ['</p>\n',
'<a name="inputs"><h3>Inputs</h3></a>\n<table>\n',
'<tr><th>Index</th><th>Previous output</th><th>Amount</th>',
'<th>From address</th>']
if abe.store.keep_scriptsig:
body += ['<th>ScriptSig</th>']
body += ['</tr>\n']
for txin in tx['in']:
row_to_html(txin, 'i', 'o',
'Generation' if is_coinbase else 'Unknown')
body += ['</table>\n',
'<a name="outputs"><h3>Outputs</h3></a>\n<table>\n',
'<tr><th>Index</th><th>Redeemed at input</th><th>Amount</th>',
'<th>To address</th><th>ScriptPubKey</th></tr>\n']
for txout in tx['out']:
row_to_html(txout, 'o', 'i', 'Not yet redeemed')
body += ['</table>\n']
def handle_rawtx(abe, page):
abe.do_raw(page, abe.do_rawtx)
def do_rawtx(abe, page, chain):
tx_hash = wsgiref.util.shift_path_info(page['env'])
if tx_hash in (None, '') or page['env']['PATH_INFO'] != '' \
or not is_hash_prefix(tx_hash):
return 'ERROR: Not in correct format' # BBE compatible
tx = abe.store.export_tx(tx_hash=tx_hash.lower())
if tx is None:
return 'ERROR: Transaction does not exist.' # BBE compatible
return json.dumps(tx, sort_keys=True, indent=2)
def handle_address(abe, page):
address = wsgiref.util.shift_path_info(page['env'])
if address in (None, '') or page['env']['PATH_INFO'] != '':
raise PageNotFound()
body = page['body']
page['title'] = 'Address ' + escape(address)
try:
history = abe.store.export_address_history(
address, chain=page['chain'], max_rows=abe.address_history_rows_max)
except DataStore.MalformedAddress:
page['status'] = '404 Not Found'
body += ['<p>Not a valid address.</p>']
return
if history is None:
body += ["<p>I'm sorry, this address has too many records"
" to display.</p>"]
return
binaddr = history['binaddr']
version = history['version']
chains = history['chains']
txpoints = history['txpoints']
balance = history['balance']
sent = history['sent']
received = history['received']
counts = history['counts']
if (not chains):
page['status'] = '404 Not Found'
body += ['<p>Address not seen on the network.</p>']
return
def format_amounts(amounts, link):
ret = []
for chain in chains:
if ret:
ret += [', ']
ret += [format_satoshis(amounts[chain.id], chain),
' ', escape(chain.code3)]
if link:
vers = chain.address_version
if page['chain'] is not None and version == page['chain'].script_addr_vers:
vers = chain.script_addr_vers or vers
other = util.hash_to_address(vers, binaddr)
if other != address:
ret[-1] = ['<a href="', page['dotdot'],
'address/', other,
'">', ret[-1], '</a>']
return ret
if abe.shortlink_type == "firstbits":
link = abe.store.get_firstbits(
address_version=version, db_pubkey_hash=abe.store.binin(binaddr),
chain_id = (page['chain'] and page['chain'].id))
if link:
link = link.replace('l', 'L')
else:
link = address
else:
link = address[0 : abe.shortlink_type]
body += abe.short_link(page, 'a/' + link)
body += ['<p>Balance: '] + format_amounts(balance, True)
if 'subbinaddr' in history:
chain = page['chain']
if chain is None:
for c in chains:
if c.script_addr_vers == version:
chain = c
break
if chain is None:
chain = chains[0]
body += ['<br />\nEscrow']
for subbinaddr in history['subbinaddr']:
body += [' ', hash_to_address_link(chain.address_version, subbinaddr, page['dotdot'], 10) ]
for chain in chains:
balance[chain.id] = 0 # Reset for history traversal.
body += ['<br />\n',
'Transactions in: ', counts[0], '<br />\n',
'Received: ', format_amounts(received, False), '<br />\n',
'Transactions out: ', counts[1], '<br />\n',
'Sent: ', format_amounts(sent, False), '<br />\n']
body += ['</p>\n'
'<h3>Transactions</h3>\n'
'<table class="addrhist">\n<tr><th>Transaction</th><th>Block</th>'
'<th>Approx. Time</th><th>Amount</th><th>Balance</th>'
'<th>Currency</th></tr>\n']
for elt in txpoints:
chain = elt['chain']
type = elt['type']
if type == 'direct':
balance[chain.id] += elt['value']
body += ['<tr class="', type, '"><td class="tx"><a href="../tx/', elt['tx_hash'],
'#', 'i' if elt['is_out'] else 'o', elt['pos'],
'">', elt['tx_hash'][:10], '...</a>',
'</td><td class="block"><a href="../block/', elt['blk_hash'],
'">', elt['height'], '</a></td><td class="time">',
format_time(elt['nTime']), '</td><td class="amount">']
if elt['value'] < 0:
value = '(' + format_satoshis(-elt['value'], chain) + ')'
else:
value = format_satoshis(elt['value'], chain)
if 'binaddr' in elt:
value = hash_to_address_link(chain.script_addr_vers, elt['binaddr'], page['dotdot'], text=value)
body += [value, '</td><td class="balance">',
format_satoshis(balance[chain.id], chain),
'</td><td class="currency">', escape(chain.code3),
'</td></tr>\n']
body += ['</table>\n']
def search_form(abe, page):
q = (page['params'].get('q') or [''])[0]
return [
'<p>Search by address, block number or hash, transaction or'
' public key hash, or chain name:</p>\n'
'<form action="', page['dotdot'], 'search"><p>\n'
'<input name="q" size="64" value="', escape(q), '" />'
'<button type="submit">Search</button>\n'
'<br />Address or hash search requires at least the first ',
HASH_PREFIX_MIN, ' characters.</p></form>\n']
def handle_search(abe, page):
page['title'] = 'Search'
q = (page['params'].get('q') or [''])[0]
if q == '':
page['body'] = [
'<p>Please enter search terms.</p>\n', abe.search_form(page)]
return
found = []
if HEIGHT_RE.match(q): found += abe.search_number(int(q))
if util.possible_address(q):found += abe.search_address(q)
elif ADDR_PREFIX_RE.match(q):found += abe.search_address_prefix(q)
if is_hash_prefix(q): found += abe.search_hash_prefix(q)
found += abe.search_general(q)
abe.show_search_results(page, found)
def show_search_results(abe, page, found):
if not found:
page['body'] = [
'<p>No results found.</p>\n', abe.search_form(page)]
return
if len(found) == 1:
# Undo shift_path_info.
sn = posixpath.dirname(page['env']['SCRIPT_NAME'])
if sn == '/': sn = ''
page['env']['SCRIPT_NAME'] = sn
page['env']['PATH_INFO'] = '/' + page['dotdot'] + found[0]['uri']
del(page['env']['QUERY_STRING'])
raise Redirect()
body = page['body']
body += ['<h3>Search Results</h3>\n<ul>\n']
for result in found:
body += [
'<li><a href="', page['dotdot'], escape(result['uri']), '">',
escape(result['name']), '</a></li>\n']
body += ['</ul>\n']
def search_number(abe, n):
def process(row):
(chain_name, dbhash, in_longest) = row
hexhash = abe.store.hashout_hex(dbhash)
if in_longest == 1:
name = str(n)
else:
name = hexhash
return {
'name': chain_name + ' ' + name,
'uri': 'block/' + hexhash,
}
return map(process, abe.store.selectall("""
SELECT c.chain_name, b.block_hash, cc.in_longest
FROM chain c
JOIN chain_candidate cc ON (cc.chain_id = c.chain_id)
JOIN block b ON (b.block_id = cc.block_id)
WHERE cc.block_height = ?
ORDER BY c.chain_name, cc.in_longest DESC
""", (n,)))
def search_hash_prefix(abe, q, types = ('tx', 'block', 'pubkey')):
q = q.lower()
ret = []
for t in types:
def process(row):
if t == 'tx': name = 'Transaction'
elif t == 'block': name = 'Block'
else:
# XXX Use Bitcoin address version until we implement
# /pubkey/... for this to link to.
return abe._found_address(
util.hash_to_address('\0', abe.store.binout(row[0])))
hash = abe.store.hashout_hex(row[0])
return {
'name': name + ' ' + hash,
'uri': t + '/' + hash,
}
if t == 'pubkey':
if len(q) > 40:
continue
lo = abe.store.binin_hex(q + '0' * (40 - len(q)))
hi = abe.store.binin_hex(q + 'f' * (40 - len(q)))
else:
lo = abe.store.hashin_hex(q + '0' * (64 - len(q)))
hi = abe.store.hashin_hex(q + 'f' * (64 - len(q)))
ret += map(process, abe.store.selectall(
"SELECT " + t + "_hash FROM " + t + " WHERE " + t +
# XXX hardcoded limit.
"_hash BETWEEN ? AND ? LIMIT 100",
(lo, hi)))
return ret
def _found_address(abe, address):
return { 'name': 'Address ' + address, 'uri': 'address/' + address }
def search_address(abe, address):
try:
binaddr = base58.bc_address_to_hash_160(address)
except Exception:
return abe.search_address_prefix(address)
return [abe._found_address(address)]
def search_address_prefix(abe, ap):
ret = []
ones = 0
for c in ap:
if c != '1':
break
ones += 1
all_ones = (ones == len(ap))
minlen = max(len(ap), 24)
l = max(35, len(ap)) # XXX Increase "35" to support multibyte
# address versions.
al = ap + ('1' * (l - len(ap)))
ah = ap + ('z' * (l - len(ap)))
def incr_str(s):
for i in range(len(s)-1, -1, -1):
if s[i] != '\xff':
return s[:i] + chr(ord(s[i])+1) + ('\0' * (len(s) - i - 1))
return '\1' + ('\0' * len(s))
def process(row):
hash = abe.store.binout(row[0])
address = util.hash_to_address(vl, hash)
if address.startswith(ap):
v = vl
else:
if vh != vl:
address = util.hash_to_address(vh, hash)
if not address.startswith(ap):
return None
v = vh
if abe.is_address_version(v):
return abe._found_address(address)
while l >= minlen:
vl, hl = util.decode_address(al)
vh, hh = util.decode_address(ah)
if ones:
if not all_ones and \
util.hash_to_address('\0', hh)[ones:][:1] == '1':
break
elif vh == '\0':
break
elif vh != vl and vh != incr_str(vl):
continue
if hl <= hh:
neg = ""
else:
neg = " NOT"
hl, hh = hh, hl
bl = abe.store.binin(hl)
bh = abe.store.binin(hh)
ret += filter(None, map(process, abe.store.selectall(
"SELECT pubkey_hash FROM pubkey WHERE pubkey_hash" +
# XXX hardcoded limit.
neg + " BETWEEN ? AND ? LIMIT 100", (bl, bh))))
l -= 1
al = al[:-1]
ah = ah[:-1]
return ret
def search_general(abe, q):
"""Search for something that is not an address, hash, or block number.
Currently, this is limited to chain names and currency codes."""
def process(row):
(name, code3) = row
return { 'name': name + ' (' + code3 + ')',
'uri': 'chain/' + str(name) }
ret = map(process, abe.store.selectall("""
SELECT chain_name, chain_code3
FROM chain
WHERE UPPER(chain_name) LIKE '%' || ? || '%'
OR UPPER(chain_code3) LIKE '%' || ? || '%'
""", (q.upper(), q.upper())))
return ret
def handle_t(abe, page):
abe.show_search_results(
page,
abe.search_hash_prefix(
b58hex(wsgiref.util.shift_path_info(page['env'])),
('tx',)))
def handle_b(abe, page):
if page.get('chain') is not None:
chain = page['chain']
height = wsgiref.util.shift_path_info(page['env'])
try:
height = int(height)
except Exception:
raise PageNotFound()
if height < 0 or page['env']['PATH_INFO'] != '':
raise PageNotFound()
cmd = wsgiref.util.shift_path_info(page['env'])
if cmd is not None:
raise PageNotFound() # XXX want to support /a/...
page['title'] = [escape(chain.name), ' ', height]
abe._show_block(page, page['dotdot'] + 'block/', chain, block_number=height)
return
abe.show_search_results(
page,
abe.search_hash_prefix(
shortlink_block(wsgiref.util.shift_path_info(page['env'])),
('block',)))
def handle_a(abe, page):
arg = wsgiref.util.shift_path_info(page['env'])
if abe.shortlink_type == "firstbits":
addrs = map(
abe._found_address,
abe.store.firstbits_to_addresses(
arg.lower(),
chain_id = page['chain'] and page['chain'].id))
else:
addrs = abe.search_address_prefix(arg)
abe.show_search_results(page, addrs)
def handle_unspent(abe, page):
abe.do_raw(page, abe.do_unspent)
def do_unspent(abe, page, chain):
addrs = wsgiref.util.shift_path_info(page['env'])
if addrs is None:
addrs = []
else:
addrs = addrs.split("|");
if len(addrs) < 1 or len(addrs) > MAX_UNSPENT_ADDRESSES:
return 'Number of addresses must be between 1 and ' + \
str(MAX_UNSPENT_ADDRESSES)
if chain:
chain_id = chain.id
bind = [chain_id]
else:
chain_id = None
bind = []
hashes = []
good_addrs = []
for address in addrs:
try:
hashes.append(abe.store.binin(
base58.bc_address_to_hash_160(address)))
good_addrs.append(address)
except Exception:
pass
addrs = good_addrs
bind += hashes
if len(hashes) == 0: # Address(es) are invalid.
return 'Error getting unspent outputs' # blockchain.info compatible
placeholders = "?" + (",?" * (len(hashes)-1))
max_rows = abe.address_history_rows_max
if max_rows >= 0:
bind += [max_rows + 1]
spent = set()
for txout_id, spent_chain_id in abe.store.selectall("""
SELECT txin.txout_id, cc.chain_id
FROM chain_candidate cc
JOIN block_tx ON (block_tx.block_id = cc.block_id)
JOIN txin ON (txin.tx_id = block_tx.tx_id)
JOIN txout prevout ON (txin.txout_id = prevout.txout_id)
JOIN pubkey ON (pubkey.pubkey_id = prevout.pubkey_id)
WHERE cc.in_longest = 1""" + ("" if chain_id is None else """
AND cc.chain_id = ?""") + """
AND pubkey.pubkey_hash IN (""" + placeholders + """)""" + (
"" if max_rows < 0 else """
LIMIT ?"""), bind):
spent.add((int(txout_id), int(spent_chain_id)))
abe.log.debug('spent: %s', spent)
received_rows = abe.store.selectall("""
SELECT
txout.txout_id,
cc.chain_id,
tx.tx_hash,
txout.txout_pos,
txout.txout_scriptPubKey,
txout.txout_value,
cc.block_height
FROM chain_candidate cc
JOIN block_tx ON (block_tx.block_id = cc.block_id)
JOIN tx ON (tx.tx_id = block_tx.tx_id)
JOIN txout ON (txout.tx_id = tx.tx_id)
JOIN pubkey ON (pubkey.pubkey_id = txout.pubkey_id)
WHERE cc.in_longest = 1""" + ("" if chain_id is None else """
AND cc.chain_id = ?""") + """
AND pubkey.pubkey_hash IN (""" + placeholders + """)""" + (
"" if max_rows < 0 else """
ORDER BY cc.block_height,
block_tx.tx_pos,
txout.txout_pos
LIMIT ?"""), bind)
if max_rows >= 0 and len(received_rows) > max_rows:
return "ERROR: too many records to process"
rows = []
for row in received_rows:
key = (int(row[0]), int(row[1]))
if key in spent:
continue
rows.append(row[2:])
if len(rows) == 0:
return 'No free outputs to spend [' + '|'.join(addrs) + ']'
out = []
for row in rows:
tx_hash, out_pos, script, value, height = row
tx_hash = abe.store.hashout_hex(tx_hash)
out_pos = None if out_pos is None else int(out_pos)
script = abe.store.binout_hex(script)
value = None if value is None else int(value)
height = None if height is None else int(height)
out.append({
'tx_hash': tx_hash,
'tx_output_n': out_pos,
'script': script,
'value': value,
'value_hex': None if value is None else "%x" % value,
'block_number': height})
return json.dumps({ 'unspent_outputs': out }, sort_keys=True, indent=2)
def do_raw(abe, page, func):
page['content_type'] = 'text/plain'
page['template'] = '%(body)s'
page['body'] = func(page, page['chain'])
def handle_q(abe, page):
cmd = wsgiref.util.shift_path_info(page['env'])
if cmd is None:
return abe.q(page)
func = getattr(abe, 'q_' + cmd, None)
if func is None:
raise PageNotFound()
abe.do_raw(page, func)
if page['content_type'] == 'text/plain':
jsonp = page['params'].get('jsonp', [None])[0]
fmt = page['params'].get('format', ["jsonp" if jsonp else "csv"])[0]
if fmt in ("json", "jsonp"):
page['body'] = json.dumps([page['body']])
if fmt == "jsonp":
page['body'] = (jsonp or "jsonp") + "(" + page['body'] + ")"
page['content_type'] = 'application/javascript'
else:
page['content_type'] = 'application/json'
def q(abe, page):
page['body'] = ['<p>Supported APIs:</p>\n<ul>\n']
for name in dir(abe):
if not name.startswith("q_"):
continue
cmd = name[2:]
page['body'] += ['<li><a href="q/', cmd, '">', cmd, '</a>']
val = getattr(abe, name)
if val.__doc__ is not None:
page['body'] += [' - ', escape(val.__doc__)]
page['body'] += ['</li>\n']
page['body'] += ['</ul>\n']
def get_max_block_height(abe, chain):
# "getblockcount" traditionally returns max(block_height),
# which is one less than the actual block count.
return abe.store.get_block_number(chain.id)
def q_getblockcount(abe, page, chain):
"""shows the current block number."""
if chain is None:
return 'Shows the greatest block height in CHAIN.\n' \
'/chain/CHAIN/q/getblockcount\n'
return abe.get_max_block_height(chain)
def q_getdifficulty(abe, page, chain):
"""shows the last solved block's difficulty."""
if chain is None:
return 'Shows the difficulty of the last block in CHAIN.\n' \
'/chain/CHAIN/q/getdifficulty\n'
target = abe.store.get_target(chain.id)
return "" if target is None else util.target_to_difficulty(target)
def q_translate_address(abe, page, chain):
"""shows the address in a given chain with a given address's hash."""
addr = wsgiref.util.shift_path_info(page['env'])
if chain is None or addr is None:
return 'Translates ADDRESS for use in CHAIN.\n' \
'/chain/CHAIN/q/translate_address/ADDRESS\n'
version, hash = util.decode_check_address(addr)
if hash is None:
return addr + " (INVALID ADDRESS)"
return util.hash_to_address(chain.address_version, hash)
def q_decode_address(abe, page, chain):
"""shows the version prefix and hash encoded in an address."""
addr = wsgiref.util.shift_path_info(page['env'])
if addr is None:
return "Shows ADDRESS's version byte(s) and public key hash" \
' as hex strings separated by colon (":").\n' \
'/q/decode_address/ADDRESS\n'
# XXX error check?
version, hash = util.decode_address(addr)
ret = version.encode('hex') + ":" + hash.encode('hex')
if util.hash_to_address(version, hash) != addr:
ret = "INVALID(" + ret + ")"
return ret
def q_addresstohash(abe, page, chain):
"""shows the public key hash encoded in an address."""
addr = wsgiref.util.shift_path_info(page['env'])
if addr is None:
return 'Shows the 160-bit hash encoded in ADDRESS.\n' \
'For BBE compatibility, the address is not checked for' \
' validity. See also /q/decode_address.\n' \
'/q/addresstohash/ADDRESS\n'
version, hash = util.decode_address(addr)
return hash.encode('hex').upper()
def q_hashtoaddress(abe, page, chain):
"""shows the address with the given version prefix and hash."""
arg1 = wsgiref.util.shift_path_info(page['env'])
arg2 = wsgiref.util.shift_path_info(page['env'])
if arg1 is None:
return \
'Converts a 160-bit hash and address version to an address.\n' \
'/q/hashtoaddress/HASH[/VERSION]\n'
if page['env']['PATH_INFO']:
return "ERROR: Too many arguments"
if arg2 is not None:
# BBE-compatible HASH/VERSION
version, hash = arg2, arg1
elif arg1.find(":") >= 0:
# VERSION:HASH as returned by /q/decode_address.
version, hash = arg1.split(":", 1)
elif chain:
version, hash = chain.address_version.encode('hex'), arg1
else:
# Default: Bitcoin address starting with "1".
version, hash = '00', arg1
try:
hash = hash.decode('hex')
version = version.decode('hex')
except Exception:
return 'ERROR: Arguments must be hexadecimal strings of even length'
return util.hash_to_address(version, hash)
def q_hashpubkey(abe, page, chain):
"""shows the 160-bit hash of the given public key."""
pubkey = wsgiref.util.shift_path_info(page['env'])
if pubkey is None:
return \
"Returns the 160-bit hash of PUBKEY.\n" \
"For example, the Bitcoin genesis block's output public key," \
" seen in its transaction output scriptPubKey, starts with\n" \
"04678afdb0fe..., and its hash is" \
" 62E907B15CBF27D5425399EBF6F0FB50EBB88F18, corresponding" \
" to address 1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa.\n" \
"/q/hashpubkey/PUBKEY\n"
try:
pubkey = pubkey.decode('hex')
except Exception:
return 'ERROR: invalid hexadecimal byte string.'
return util.pubkey_to_hash(pubkey).encode('hex').upper()
def q_checkaddress(abe, page, chain):
"""checks an address for validity."""
addr = wsgiref.util.shift_path_info(page['env'])
if addr is None:
return \
"Returns the version encoded in ADDRESS as a hex string.\n" \
"If ADDRESS is invalid, returns either X5, SZ, or CK for" \
" BBE compatibility.\n" \
"/q/checkaddress/ADDRESS\n"
if util.possible_address(addr):
version, hash = util.decode_address(addr)
if util.hash_to_address(version, hash) == addr:
return version.encode('hex').upper()
return 'CK'
if len(addr) >= 26:
return 'X5'
return 'SZ'
def q_nethash(abe, page, chain):
"""shows statistics about difficulty and network power."""
if chain is None:
return 'Shows statistics every INTERVAL blocks.\n' \
'Negative values count back from the last block.\n' \
'/chain/CHAIN/q/nethash[/INTERVAL[/START[/STOP]]]\n'
jsonp = page['params'].get('jsonp', [None])[0]
fmt = page['params'].get('format', ["jsonp" if jsonp else "csv"])[0]
interval = path_info_int(page, 144)
start = path_info_int(page, 0)
stop = path_info_int(page, None)
if stop == 0:
stop = None
if interval < 0 and start != 0:
return 'ERROR: Negative INTERVAL requires 0 START.'
if interval < 0 or start < 0 or (stop is not None and stop < 0):
count = abe.get_max_block_height(chain)
if start < 0:
start += count
if stop is not None and stop < 0:
stop += count
if interval < 0:
interval = -interval
start = count - (count / interval) * interval
# Select every INTERVAL blocks from START to STOP.
# Standard SQL lacks an "every Nth row" feature, so we
# provide it with the help of a table containing the integers.
# We don't need all integers, only as many as rows we want to
# fetch. We happen to have a table with the desired integers,
# namely chain_candidate; its block_height column covers the
# required range without duplicates if properly constrained.
# That is the story of the second JOIN.
if stop is not None:
stop_ix = (stop - start) / interval
rows = abe.store.selectall("""
SELECT b.block_height,
b.block_nTime,
b.block_chain_work,
b.block_nBits
FROM block b
JOIN chain_candidate cc ON (cc.block_id = b.block_id)
JOIN chain_candidate ints ON (
ints.chain_id = cc.chain_id
AND ints.in_longest = 1
AND ints.block_height * ? + ? = cc.block_height)
WHERE cc.in_longest = 1
AND cc.chain_id = ?""" + (
"" if stop is None else """
AND ints.block_height <= ?""") + """
ORDER BY cc.block_height""",
(interval, start, chain.id)
if stop is None else
(interval, start, chain.id, stop_ix))
if fmt == "csv":
ret = NETHASH_HEADER
elif fmt in ("json", "jsonp"):
ret = []
elif fmt == "svg":
page['template'] = NETHASH_SVG_TEMPLATE
page['template_vars']['block_time'] = 600; # XXX BTC-specific
ret = ""
else:
return "ERROR: unknown format: " + fmt
prev_nTime, prev_chain_work = 0, -1
for row in rows:
height, nTime, chain_work, nBits = row
nTime = float(nTime)
nBits = int(nBits)
target = util.calculate_target(nBits)
difficulty = util.target_to_difficulty(target)
work = util.target_to_work(target)
chain_work = abe.store.binout_int(chain_work) - work
if row is not rows[0] or fmt == "svg":
height = int(height)
interval_work = chain_work - prev_chain_work
avg_target = util.work_to_target(
interval_work / float(interval))
#if avg_target == target - 1:
# avg_target = target
interval_seconds = nTime - prev_nTime
if interval_seconds <= 0:
nethash = 'Infinity'
else:
nethash = "%.0f" % (interval_work / interval_seconds,)
if fmt == "csv":
ret += "%d,%d,%d,%d,%.3f,%d,%.0f,%s\n" % (
height, nTime, target, avg_target, difficulty, work,
interval_seconds / interval, nethash)
elif fmt in ("json", "jsonp"):
ret.append([
height, int(nTime), target, avg_target,
difficulty, work, chain_work, nethash])
elif fmt == "svg":
ret += '<abe:nethash t="%d" d="%d"' \
' w="%d"/>\n' % (nTime, work, interval_work)
prev_nTime, prev_chain_work = nTime, chain_work
if fmt == "csv":
return ret
elif fmt == "json":
page['content_type'] = 'application/json'
return json.dumps(ret)
elif fmt == "jsonp":
page['content_type'] = 'application/javascript'
return (jsonp or "jsonp") + "(" + json.dumps(ret) + ")"
elif fmt == "svg":
page['content_type'] = 'image/svg+xml'
return ret
def q_totalbc(abe, page, chain):
"""shows the amount of currency ever mined."""
if chain is None:
return 'Shows the amount of currency ever mined.\n' \
'This differs from the amount in circulation when' \
' coins are destroyed, as happens frequently in Namecoin.\n' \
'Unlike http://blockexplorer.com/q/totalbc, this does not' \
' support future block numbers, and it returns a sum of' \
' observed generations rather than a calculated value.\n' \
'/chain/CHAIN/q/totalbc[/HEIGHT]\n'
height = path_info_uint(page, None)
if height is None:
row = abe.store.selectrow("""
SELECT b.block_total_satoshis
FROM chain c
LEFT JOIN block b ON (c.chain_last_block_id = b.block_id)
WHERE c.chain_id = ?
""", (chain.id,))
else:
row = abe.store.selectrow("""
SELECT b.block_total_satoshis
FROM chain_candidate cc
LEFT JOIN block b ON (b.block_id = cc.block_id)
WHERE cc.chain_id = ?
AND cc.block_height = ?
AND cc.in_longest = 1
""", (chain.id, height))
if not row:
return 'ERROR: block %d not seen yet' % (height,)
return format_satoshis(row[0], chain) if row else 0
def q_getreceivedbyaddress(abe, page, chain):
"""shows the amount ever received by a given address."""
addr = wsgiref.util.shift_path_info(page['env'])
if chain is None or addr is None:
return 'returns amount of money received by given address (not balance, sends are not subtracted)\n' \
'/chain/CHAIN/q/getreceivedbyaddress/ADDRESS\n'
if not util.possible_address(addr):
return 'ERROR: address invalid'
version, hash = util.decode_address(addr)
return format_satoshis(abe.store.get_received(chain.id, hash), chain)
def q_getsentbyaddress(abe, page, chain):
"""shows the amount ever sent from a given address."""
addr = wsgiref.util.shift_path_info(page['env'])
if chain is None or addr is None:
return 'returns amount of money sent from given address\n' \
'/chain/CHAIN/q/getsentbyaddress/ADDRESS\n'
if not util.possible_address(addr):
return 'ERROR: address invalid'
version, hash = util.decode_address(addr)
return format_satoshis(abe.store.get_sent(chain.id, hash), chain)
def q_addressbalance(abe, page, chain):
"""amount ever received minus amount ever sent by a given address."""
addr = wsgiref.util.shift_path_info(page['env'])
if chain is None or addr is None:
return 'returns amount of money at the given address\n' \
'/chain/CHAIN/q/addressbalance/ADDRESS\n'
if not util.possible_address(addr):
return 'ERROR: address invalid'
version, hash = util.decode_address(addr)
total = abe.store.get_balance(chain.id, hash)
return ("ERROR: please try again" if total is None else
format_satoshis(total, chain))
def q_fb(abe, page, chain):
"""returns an address's firstbits."""
if not abe.store.use_firstbits:
raise PageNotFound()
addr = wsgiref.util.shift_path_info(page['env'])
if addr is None:
return 'Shows ADDRESS\'s firstbits:' \
' the shortest initial substring that uniquely and' \
' case-insensitively distinguishes ADDRESS from all' \
' others first appearing before it or in the same block.\n' \
'See http://firstbits.com/.\n' \
'Returns empty if ADDRESS has no firstbits.\n' \
'/chain/CHAIN/q/fb/ADDRESS\n' \
'/q/fb/ADDRESS\n'
if not util.possible_address(addr):
return 'ERROR: address invalid'
version, dbhash = util.decode_address(addr)
ret = abe.store.get_firstbits(
address_version = version,
db_pubkey_hash = abe.store.binin(dbhash),
chain_id = (chain and chain.id))
if ret is None:
return 'ERROR: address not in the chain.'
return ret
def q_addr(abe, page, chain):
"""returns the full address having the given firstbits."""
if not abe.store.use_firstbits:
raise PageNotFound()
fb = wsgiref.util.shift_path_info(page['env'])
if fb is None:
return 'Shows the address identified by FIRSTBITS:' \
' the first address in CHAIN to start with FIRSTBITS,' \
' where the comparison is case-insensitive.\n' \
'See http://firstbits.com/.\n' \
'Returns the argument if none matches.\n' \
'/chain/CHAIN/q/addr/FIRSTBITS\n' \
'/q/addr/FIRSTBITS\n'
return "\n".join(abe.store.firstbits_to_addresses(
fb, chain_id = (chain and chain.id)))
def handle_download(abe, page):
name = abe.args.download_name
if name is None:
name = re.sub(r'\W+', '-', ABE_APPNAME.lower()) + '-' + ABE_VERSION
fileobj = lambda: None
fileobj.func_dict['write'] = page['start_response'](
'200 OK',
[('Content-type', 'application/x-gtar-compressed'),
('Content-disposition', 'filename=' + name + '.tar.gz')])
import tarfile
with tarfile.TarFile.open(fileobj=fileobj, mode='w|gz',
format=tarfile.PAX_FORMAT) as tar:
tar.add(os.path.split(__file__)[0], name)
raise Streamed()
def serve_static(abe, path, start_response):
slen = len(abe.static_path)
if path[:slen] != abe.static_path:
raise PageNotFound()
path = path[slen:]
try:
# Serve static content.
# XXX Should check file modification time and handle HTTP
# if-modified-since. Or just hope serious users will map
# our htdocs as static in their web server.
# XXX is "+ '/' + path" adequate for non-POSIX systems?
found = open(abe.htdocs + '/' + path, "rb")
import mimetypes
type, enc = mimetypes.guess_type(path)
# XXX Should do something with enc if not None.
# XXX Should set Content-length.
start_response('200 OK', [('Content-type', type or 'text/plain')])
return found
except IOError:
raise PageNotFound()
# Change this if you want empty or multi-byte address versions.
def is_address_version(abe, v):
return len(v) == 1
def short_link(abe, page, link):
base = abe.base_url
if base is None:
env = page['env'].copy()
env['SCRIPT_NAME'] = posixpath.normpath(
posixpath.dirname(env['SCRIPT_NAME'] + env['PATH_INFO'])
+ '/' + page['dotdot'])
env['PATH_INFO'] = link
full = wsgiref.util.request_uri(env)
else:
full = base + link
return ['<p class="shortlink">Short Link: <a href="',
page['dotdot'], link, '">', full, '</a></p>\n']
def fix_path_info(abe, env):
ret = True
pi = env['PATH_INFO']
pi = posixpath.normpath(pi)
if pi[-1] != '/' and env['PATH_INFO'][-1:] == '/':
pi += '/'
if pi == '/':
pi += abe.home
if not '/' in abe.home:
ret = False
if pi == env['PATH_INFO']:
ret = False
else:
env['PATH_INFO'] = pi
return ret
def find_htdocs():
return os.path.join(os.path.split(__file__)[0], 'htdocs')
def get_int_param(page, name):
vals = page['params'].get(name)
return vals and int(vals[0])
def path_info_uint(page, default):
ret = path_info_int(page, None)
if ret is None or ret < 0:
return default
return ret
def path_info_int(page, default):
s = wsgiref.util.shift_path_info(page['env'])
if s is None:
return default
try:
return int(s)
except ValueError:
return default
def format_time(nTime):
import time
return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(nTime)))
def format_satoshis(satoshis, chain):
decimals = DEFAULT_DECIMALS if chain.decimals is None else chain.decimals
coin = 10 ** decimals
if satoshis is None:
return ''
if satoshis < 0:
return '-' + format_satoshis(-satoshis, chain)
satoshis = int(satoshis)
integer = satoshis / coin
frac = satoshis % coin
return (str(integer) +
('.' + (('0' * decimals) + str(frac))[-decimals:])
.rstrip('0').rstrip('.'))
def format_difficulty(diff):
idiff = int(diff)
ret = '.%03d' % (int(round((diff - idiff) * 1000)),)
while idiff > 999:
ret = (' %03d' % (idiff % 1000,)) + ret
idiff = idiff / 1000
return str(idiff) + ret
def hash_to_address_link(version, hash, dotdot, truncate_to=None, text=None):
if hash == DataStore.NULL_PUBKEY_HASH:
return 'Destroyed'
if hash is None:
return 'UNKNOWN'
addr = util.hash_to_address(version, hash)
if text is not None:
visible = text
elif truncate_to is None:
visible = addr
else:
visible = addr[:truncate_to] + '...'
return ['<a href="', dotdot, 'address/', addr, '">', visible, '</a>']
def decode_script(script):
if script is None:
return ''
try:
return deserialize.decode_script(script)
except KeyError, e:
return 'Nonstandard script'
def b58hex(b58):
try:
return base58.b58decode(b58, None).encode('hex_codec')
except Exception:
raise PageNotFound()
def hexb58(hex):
return base58.b58encode(hex.decode('hex_codec'))
def block_shortlink(block_hash):
zeroes = 0
for c in block_hash:
if c == '0':
zeroes += 1
else:
break
zeroes &= ~1
return hexb58("%02x%s" % (zeroes / 2, block_hash[zeroes : zeroes+12]))
def shortlink_block(link):
try:
data = base58.b58decode(link, None)
except Exception:
raise PageNotFound()
return ('00' * ord(data[0])) + data[1:].encode('hex_codec')
def is_hash_prefix(s):
return HASH_PREFIX_RE.match(s) and len(s) >= HASH_PREFIX_MIN
def flatten(l):
if isinstance(l, list):
return ''.join(map(flatten, l))
if l is None:
raise Exception('NoneType in HTML conversion')
if isinstance(l, unicode):
return l
return str(l)
def redirect(page):
uri = wsgiref.util.request_uri(page['env'])
page['start_response'](
'301 Moved Permanently',
[('Location', uri),
('Content-Type', 'text/html')])
return ('<html><head><title>Moved</title></head>\n'
'<body><h1>Moved</h1><p>This page has moved to '
'<a href="' + uri + '">' + uri + '</a></body></html>')
def serve(store):
args = store.args
abe = Abe(store, args)
# Hack preventing wsgiref.simple_server from resolving client addresses
bhs = __import__('BaseHTTPServer')
bhs.BaseHTTPRequestHandler.address_string = lambda x: x.client_address[0]
del(bhs)
if args.query is not None:
def start_response(status, headers):
pass
import urlparse
parsed = urlparse.urlparse(args.query)
print abe({
'SCRIPT_NAME': '',
'PATH_INFO': parsed.path,
'QUERY_STRING': parsed.query
}, start_response)
elif args.host or args.port:
# HTTP server.
if args.host is None:
args.host = "localhost"
from wsgiref.simple_server import make_server
port = int(args.port or 80)
httpd = make_server(args.host, port, abe)
abe.log.warning("Listening on http://%s:%d", args.host, port)
# httpd.shutdown() sometimes hangs, so don't call it. XXX
httpd.serve_forever()
else:
# FastCGI server.
from flup.server.fcgi import WSGIServer
# In the case where the web server starts Abe but can't signal
# it on server shutdown (because Abe runs as a different user)
# we arrange the following. FastCGI script passes its pid as
# --watch-pid=PID and enters an infinite loop. We check every
# minute whether it has terminated and exit when it has.
wpid = args.watch_pid
if wpid is not None:
wpid = int(wpid)
interval = 60.0 # XXX should be configurable.
from threading import Timer
import signal
def watch():
if not process_is_alive(wpid):
abe.log.warning("process %d terminated, exiting", wpid)
#os._exit(0) # sys.exit merely raises an exception.
os.kill(os.getpid(), signal.SIGTERM)
return
abe.log.log(0, "process %d found alive", wpid)
Timer(interval, watch).start()
Timer(interval, watch).start()
WSGIServer(abe).run()
def process_is_alive(pid):
# XXX probably fails spectacularly on Windows.
import errno
try:
os.kill(pid, 0)
return True
except OSError, e:
if e.errno == errno.EPERM:
return True # process exists, but we can't send it signals.
if e.errno == errno.ESRCH:
return False # no such process.
raise
def list_policies():
import pkgutil
import Chain
policies = []
for _, name, ispkg in pkgutil.iter_modules(path=[os.path.dirname(Chain.__file__)]):
if not ispkg:
policies.append(name)
return policies
def show_policy(policy):
import inspect
import Chain
try:
chain = Chain.create(policy)
except ImportError as e:
print("%s: policy unavailable (%s)" % (policy, e.message))
return
print("%s:" % policy)
parents = []
for cls in type(chain).__mro__[1:]:
if cls == Chain.BaseChain:
break
parents.append(cls)
if parents:
print(" Inherits from:")
for cls in parents:
print(" %s" % cls.__name__)
params = []
for attr in chain.POLICY_ATTRS:
val = getattr(chain, attr, None)
if val is not None:
params.append((attr, val))
if params:
print(" Parameters:")
for attr, val in params:
try:
try:
val = json.dumps(val)
except UnicodeError:
if type(val) == bytes:
# The value could be a magic number or address version.
val = json.dumps(unicode(val, 'latin_1'))
else:
val = repr(val)
except TypeError as e:
val = repr(val)
print(" %s: %s" % (attr, val))
doc = inspect.getdoc(chain)
if doc is not None:
print(" %s" % doc.replace('\n', '\n '))
def create_conf():
conf = {
"port": None,
"host": None,
"query": None,
"no_serve": None,
"no_load": None,
"timezone": None,
"debug": None,
"static_path": None,
"document_root": None,
"auto_agpl": None,
"download_name": None,
"watch_pid": None,
"base_url": None,
"logging": None,
"address_history_rows_max": None,
"shortlink_type": None,
"template": DEFAULT_TEMPLATE,
"template_vars": {
"ABE_URL": ABE_URL,
"APPNAME": ABE_APPNAME,
"VERSION": ABE_VERSION,
"COPYRIGHT": COPYRIGHT,
"COPYRIGHT_YEARS": COPYRIGHT_YEARS,
"COPYRIGHT_URL": COPYRIGHT_URL,
"DONATIONS_BTC": DONATIONS_BTC,
"DONATIONS_NMC": DONATIONS_NMC,
"CONTENT_TYPE": DEFAULT_CONTENT_TYPE,
"HOMEPAGE": DEFAULT_HOMEPAGE,
},
}
conf.update(DataStore.CONFIG_DEFAULTS)
return conf
def main(argv):
if argv[0] == '--show-policy':
for policy in argv[1:] or list_policies():
show_policy(policy)
return 0
elif argv[0] == '--list-policies':
print("Available chain policies:")
for name in list_policies():
print(" %s" % name)
return 0
args, argv = readconf.parse_argv(argv, create_conf())
if not argv:
pass
elif argv[0] in ('-h', '--help'):
print ("""Usage: python -m Abe.abe [-h] [--config=FILE] [--CONFIGVAR=VALUE]...
A Bitcoin block chain browser.
--help Show this help message and exit.
--version Show the program version and exit.
--print-htdocs-directory Show the static content directory name and exit.
--list-policies Show the available policy names for --datadir.
--show-policy POLICY... Describe the given policy.
--query /q/COMMAND Show the given URI content and exit.
--config FILE Read options from FILE.
All configuration variables may be given as command arguments.
See abe.conf for commented examples.""")
return 0
elif argv[0] in ('-v', '--version'):
print ABE_APPNAME, ABE_VERSION
print "Schema version", DataStore.SCHEMA_VERSION
return 0
elif argv[0] == '--print-htdocs-directory':
print find_htdocs()
return 0
else:
sys.stderr.write(
"Error: unknown option `%s'\n"
"See `python -m Abe.abe --help' for more information.\n"
% (argv[0],))
return 1
logging.basicConfig(
stream=sys.stdout,
level = logging.DEBUG if args.query is None else logging.ERROR,
format=DEFAULT_LOG_FORMAT)
if args.logging is not None:
import logging.config as logging_config
logging_config.dictConfig(args.logging)
# Set timezone
if args.timezone:
os.environ['TZ'] = args.timezone
if args.auto_agpl:
import tarfile
# --rpc-load-mempool loops forever, make sure it's used with
# --no-load/--no-serve so users know the implications
if args.rpc_load_mempool and not (args.no_load or args.no_serve):
sys.stderr.write("Error: --rpc-load-mempool requires --no-serve\n")
return 1
store = make_store(args)
if (not args.no_serve):
serve(store)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| 79,821
|
Python
|
.py
| 1,824
| 31.859649
| 114
| 0.514801
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,432
|
verify.py
|
bitcoin-abe_bitcoin-abe/Abe/verify.py
|
#!/usr/bin/env python
# Prototype database validation script. Same args as abe.py.
# Copyright(C) 2011,2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
import sys
import getopt
import DataStore
import util
import logging
# List of block statistics to check.
BLOCK_STATS_LIST = [
'value_in',
'value_out',
'total_satoshis',
'total_seconds',
'satoshi_seconds',
'total_ss',
'ss_destroyed',
]
class AbeVerify:
def __init__(self, store, logger):
self.store = store
self.logger = logger
self.block_min = None
self.block_max = None
self.ckmerkle = False
self.ckbti = False
self.ckstats = False
self.repair = False
self.blkstats = BLOCK_STATS_LIST
self.stats = {
'mchecked': 0, # Blocks checked for Merkel root
'mbad': 0, # Merkle errors
'schecked': 0, # Blocks checked for stats
'sbad': 0, # Blocks with any stats error
'btiblks': 0, # Blocks checked for block_txin links
'btichecked': 0, # block_txin txin_id's checked
'btimiss': 0, # Missing txin_id's
'btibad': 0, # txin_id's linked to wrong block (untested)
}
def verify_blockchain(self, chain_id, chain):
# Reset stats
self.stats = {key: 0 for key in self.stats}
params = (chain_id,)
if self.block_min is not None:
params += (self.block_min,)
if self.block_max is not None:
params += (self.block_max,)
# Retain height after loop
block_height = 0
for block_id, block_height in self.store.selectall("""
SELECT block_id, block_height
FROM chain_candidate
WHERE chain_id = ?""" + (
"" if self.block_min is None else """
AND block_height >= ?""") + (
"" if self.block_max is None else """
AND block_height <= ?""") + """
ORDER BY block_height ASC, block_id ASC""", params):
if block_height is None:
self.logger.error("Block id %d has no height (skipped)", block_id)
continue
if self.ckmerkle:
self.verify_tx_merkle_hash(block_id, chain)
self.procstats("Merkle trees", block_height,
self.stats['mchecked'], self.stats['mbad'])
if self.ckbti:
self.verify_block_txin(block_id, chain_id)
self.procstats("Block txins", block_height,
self.stats['btichecked'],
self.stats['btimiss'] + self.stats['btibad'],
blocks=self.stats['btiblks'])
if self.ckstats:
self.verify_block_stats(block_id, chain_id)
self.procstats("Block stats", block_height,
self.stats['schecked'], self.stats['sbad'])
if self.repair:
# XXX: Make this time-based? The goal is to not hold locks for
# too long, yet avoid committing many times per seconds on the
# earliest blocks
self.store.commit()
if self.ckmerkle:
self.procstats("Merkle trees", block_height, self.stats['mchecked'],
self.stats['mbad'], last=True)
if self.ckbti:
self.procstats("Block txins", block_height,
self.stats['btichecked'],
self.stats['btimiss'] + self.stats['btibad'],
blocks=self.stats['btiblks'], last=True)
if self.ckstats:
self.procstats("Block stats", block_height, self.stats['schecked'],
self.stats['sbad'], last=True)
if self.repair:
self.store.commit()
def procstats(self, name, height, checked, bad, blocks=False, last=False):
if blocks is False:
blocks = checked
if (blocks % 1000 == 0) is not last:
lst = ("last " if last else "")
self.logger.warning("%d %s (%sheight: %d): %s bad",
checked, name, lst, height, bad)
def verify_tx_merkle_hash(self, block_id, chain):
block_height, merkle_root, num_tx = self.store.selectrow("""
SELECT b.block_height, b.block_hashMerkleRoot, b.block_num_tx
FROM block b
WHERE b.block_id = ?""", (block_id,))
merkle_root = self.store.hashout(merkle_root)
tree = []
for (tx_hash,) in self.store.selectall("""
SELECT tx.tx_hash
FROM block_tx bt
JOIN tx ON (bt.tx_id = tx.tx_id)
WHERE bt.block_id = ?
ORDER BY bt.tx_pos""", (block_id,)):
tree.append(self.store.hashout(tx_hash))
# Log single error for either num_tx bad merkle
bad = 0
if len(tree) != num_tx:
self.logger.info("block %d (id %d): block_num_tx=%d but found %d",
block_height, block_id, num_tx, len(tree))
bad = 1
root = chain.merkle_root(tree) or util.NULL_HASH
if root != merkle_root:
self.logger.info("block %d (id %s): block_hashMerkleRoot mismatch",
block_height, block_id)
bad = 1
self.stats['mbad'] += bad
self.stats['mchecked'] += 1
def verify_block_txin(self, block_id, chain_id):
rows = self.store.selectall("""
SELECT txin_id, out_block_id
FROM block_txin
WHERE block_id = ?
ORDER BY txin_id ASC""", (block_id,))
known_ids = {row[0]: row[1] for row in rows}
checks = len(rows)
missing = set()
redo = set()
if checks:
# Find all missing txin_id's
for txin_id, in self.store.selectall("""
SELECT txin.txin_id
FROM block_tx bt
JOIN txin ON (txin.tx_id = bt.tx_id)
JOIN txout ON (txin.txout_id = txout.txout_id)
JOIN block_tx obt ON (txout.tx_id = obt.tx_id)
JOIN block ob ON (obt.block_id = ob.block_id)
WHERE bt.block_id = ?
AND ob.block_chain_work IS NOT NULL
AND bt.tx_pos <> 0
ORDER BY txin.txin_id ASC""", (block_id,)):
if txin_id not in known_ids:
missing.add(txin_id)
self.logger.info("block id %d: txin_id %d not found in "
"block_txin", block_id, txin_id)
# Check all txin_id's already present (what we would insert)
for txin_id, obt_id in self._populate_block_txin(int(block_id),
skip_txin=missing, check_only=True):
if obt_id != known_ids[txin_id]:
redo.add(txin_id)
self.logger.info("block id %d: txin_id %d out_block_id "
"is %d (should be %s)", block_id, txin_id,
known_ids[txin_id], obt_id)
if (redo or missing) and self.repair:
# Delete erroneous block_txin's and insert both sets
for txin_id in redo:
self.store.sql("""
DELETE FROM block_txin
WHERE block_id = ?
AND txin_id = ?""", (block_id, txin_id))
# Take out redo's from known_ids
skip_ids = set(known_ids).difference(redo)
self._populate_block_txin(int(block_id), skip_txin=skip_ids)
self.logger.info("block id %d: txin links repaired", block_id)
# Record stats
self.stats['btimiss'] += len(missing)
self.stats['btibad'] += len(redo)
self.stats['btiblks'] += 1
self.stats['btichecked'] += checks + len(missing)
def verify_block_stats(self, block_id, chain_id):
block_height, nTime, value_in, value_out, total_satoshis, \
total_seconds, satoshi_seconds, total_ss, ss_destroyed, \
prev_nTime, prev_satoshis, prev_seconds, prev_ss, \
prev_total_ss = self.store.selectrow("""
SELECT b.block_height, b.block_nTime, b.block_value_in,
b.block_value_out, b.block_total_satoshis,
b.block_total_seconds, b.block_satoshi_seconds,
b.block_total_ss, b.block_ss_destroyed,
prev.block_nTime, prev.block_total_satoshis,
prev.block_total_seconds, prev.block_satoshi_seconds,
prev.block_total_ss
FROM block b
LEFT JOIN block prev ON (b.prev_block_id = prev.block_id)
WHERE b.block_id = ?""", (block_id,))
if None in (prev_satoshis, prev_seconds, prev_ss, prev_total_ss):
if block_height == 0:
# For genesis block, fill in prev block stats with 0's
prev_satoshis = prev_seconds = prev_ss = prev_total_ss = 0
# This will make this block's total_seconds 0
prev_nTime = nTime
elif self.repair:
raise Exception("Repair with broken prev block, dazed and "
"confused... block %s (height %s): %s" % (
block_id, block_height, str((prev_satoshis, prev_seconds,
prev_ss, prev_total_ss))))
else:
# Prev block contain broken data; cannot check current (and
# it is likely bad as well)
self.logger.info("block %d (id %d): Bad prev block, skipping "
"as assumed bad block", block_height, block_id)
self.stats['schecked'] += 1
self.stats['sbad'] += 1
return
# A dict makes easier comparison
d = {
'value_in': value_in,
'value_out': value_out,
'total_satoshis': total_satoshis,
'total_seconds': total_seconds,
'satoshi_seconds': satoshi_seconds,
'total_ss': total_ss,
'ss_destroyed': ss_destroyed
}
b = dict()
# Modified version of self.store.get_received_and_last_block_id()
b['value_in'], = self.store.selectrow("""
SELECT COALESCE(value_sum, 0)
FROM chain c LEFT JOIN (
SELECT cc.chain_id, SUM(txout.txout_value) value_sum
FROM txout
JOIN txin ON (txin.txout_id = txout.txout_id)
JOIN block_tx ON (block_tx.tx_id = txin.tx_id)
JOIN block b ON (b.block_id = block_tx.block_id)
JOIN chain_candidate cc ON (cc.block_id = b.block_id)
WHERE
cc.chain_id = ? AND
b.block_id = ?
GROUP BY cc.chain_id
) a ON (c.chain_id = a.chain_id)
WHERE c.chain_id = ?""", (chain_id, block_id, chain_id))
b['value_in'] = (b['value_in'] if b['value_in'] else 0)
# Modified version of self.store.get_sent_and_last_block_id()
b['value_out'], = self.store.selectrow("""
SELECT COALESCE(value_sum, 0)
FROM chain c LEFT JOIN (
SELECT cc.chain_id, SUM(txout.txout_value) value_sum
FROM txout
JOIN block_tx ON (block_tx.tx_id = txout.tx_id)
JOIN block b ON (b.block_id = block_tx.block_id)
JOIN chain_candidate cc ON (cc.block_id = b.block_id)
WHERE
cc.chain_id = ? AND
b.block_id = ?
GROUP BY cc.chain_id
) a ON (c.chain_id = a.chain_id)
WHERE c.chain_id = ?""", (chain_id, block_id, chain_id))
b['value_out'] = (b['value_out'] if b['value_out'] else 0)
b['total_seconds'] = prev_seconds + nTime - prev_nTime
ss_created = prev_satoshis * (nTime - prev_nTime)
b['total_ss'] = prev_total_ss + ss_created
tx_ids = map(
lambda row: row[0],
self.store.selectall("""
SELECT tx_id
FROM block_tx
WHERE block_id = ?
ORDER BY tx_pos ASC""", (block_id,)))
b['ss_destroyed'] = 0
# Modified version of self.store._get_block_ss_destroyed()
block_ss_destroyed = 0
for tx_id in tx_ids:
destroyed = 0
# TODO: Warn if inner loop isn't used
# Don't do the math in SQL as we risk losing precision
for txout_value, block_nTime in self.store.selectall("""
SELECT COALESCE(txout_approx.txout_approx_value, 0),
b.block_nTime
FROM block_txin bti
JOIN txin ON (bti.txin_id = txin.txin_id)
JOIN txout_approx ON (txin.txout_id = txout_approx.txout_id)
JOIN block_tx obt ON (txout_approx.tx_id = obt.tx_id)
JOIN block b ON (obt.block_id = b.block_id)
WHERE bti.block_id = ? AND txin.tx_id = ?""",
(block_id, tx_id)):
destroyed += txout_value * (nTime - block_nTime)
b['ss_destroyed'] += destroyed
b['satoshi_seconds'] = prev_ss + ss_created - b['ss_destroyed']
# Modified version of self.store.tx_find_id_and_value (finding
# value_destroyed only)
value_destroyed = 0
for tid in tx_ids:
destroyed, = self.store.selectrow("""
SELECT SUM(txout.txout_value) - SUM(
CASE WHEN txout.pubkey_id IS NOT NULL AND txout.pubkey_id <= 0
THEN 0 ELSE txout.txout_value END)
FROM tx
LEFT JOIN txout ON (tx.tx_id = txout.tx_id)
WHERE tx.tx_id = ?""", (tid,))
value_destroyed += destroyed
b['total_satoshis'] = prev_satoshis + b['value_out'] \
- b['value_in'] - value_destroyed
if None in b.values():
raise Exception("Stats computation error: block %d (height %d): "
"%s" % (block_id, block_height, str(b)))
# Finally... Check stats values match between d and b
badcheck = False
for key in self.blkstats:
if d[key] != b[key]:
badcheck = True
self.logger.info("block %d (id %d): %s do not match: %s "
"(should be %s)", block_height, block_id,
key, d[key], b[key])
self.stats['schecked'] += 1
if badcheck and self.repair:
self.store.sql("""
UPDATE block
SET block_value_in = ?,
block_value_out = ?,
block_total_seconds = ?,
block_total_satoshis = ?,
block_satoshi_seconds = ?,
block_total_ss = ?,
block_ss_destroyed = ?
WHERE block_id = ?""",
(self.store.intin(b['value_in']),
self.store.intin(b['value_out']),
self.store.intin(b['total_seconds']),
self.store.intin(b['total_satoshis']),
self.store.intin(b['satoshi_seconds']),
self.store.intin(b['total_ss']),
self.store.intin(b['ss_destroyed']),
block_id))
self.logger.info("block %d (id %d): stats repaired",
block_height, block_id)
if badcheck:
self.stats['sbad'] += 1
# Copied and modified from the same function in DataStore.py
def _populate_block_txin(self, block_id, skip_txin=set(), check_only=False):
# Create rows in block_txin. In case of duplicate transactions,
# choose the one with the lowest block ID. XXX For consistency,
# it should be the lowest height instead of block ID.
txin_oblocks = {}
ret = []
for txin_id, oblock_id in self.store.selectall("""
SELECT txin.txin_id, obt.block_id
FROM block_tx bt
JOIN txin ON (txin.tx_id = bt.tx_id)
JOIN txout ON (txin.txout_id = txout.txout_id)
JOIN block_tx obt ON (txout.tx_id = obt.tx_id)
JOIN block ob ON (obt.block_id = ob.block_id)
WHERE bt.block_id = ?
AND ob.block_chain_work IS NOT NULL
ORDER BY txin.txin_id ASC, obt.block_id ASC""", (block_id,)):
# Repair only missing txins
if txin_id in skip_txin:
continue
# Save all candidate, lowest ID might not be a descendant if we
# have multiple block candidates
txin_oblocks.setdefault(txin_id, []).append(oblock_id)
for txin_id, oblock_ids in txin_oblocks.iteritems():
for oblock_id in oblock_ids:
if self.store.is_descended_from(block_id, int(oblock_id)):
if check_only:
# Skip update part to test what should be inserted
# NB: can't use yield as we also call method normally!
ret.append((txin_id, oblock_id))
continue
# Store lowest block id that is descended from our block
self.store.sql("""
INSERT INTO block_txin (block_id, txin_id, out_block_id)
VALUES (?, ?, ?)""", (block_id, txin_id, oblock_id))
return ret
def main(argv):
cmdline = util.CmdLine(argv)
cmdline.usage = lambda: \
"""Usage: verify.py --dbtype=MODULE --connect-args=ARGS [checks]
Check database consistency
Chain selection:
--chain LIST Comma-separated list of chains to check (Default: all)
Checks:
--check-all Check everything (overrides all other check options)
--merkle-roots Check merkle root hashes against block's transaction
--block-txins Check block txin-to-out-block links used in block
statistics computation
--block-stats Check block statistics computed from prev blocks and
transactions
Options:
--verbose Print all errors found (default)
--quiet Print only progress info and error summary
--silent Print nothing; no feedback beside return code
--min-height N Check only blocks starting at height N
--max-height N Stop checking blocks above height N
--blkstats LIST Comma-separated list of block statistics to check
Default: all valid values:
""" + ','.join(BLOCK_STATS_LIST) + """
--repair Attempt to repair the database (not all checks support
repair)
Warning: Some checks rely on previous blocks to have valid information.
Testing from a specific height does not guarantee the previous blocks are
valid and while the computed data may be relatively valid the whole thing
could still be totally off.
The checks can generate a lot of output in the default mode (--verbose). To
limit output to progress messages and results use the --quiet option.
"""
store, argv = cmdline.init()
if store is None:
return 0
logger = logging.getLogger("verify")
# Override any defined loggers from abe's config
logging.root.handlers = []
logging.basicConfig(stream=sys.stdout, level=logging.INFO,
format="%(asctime)s: %(name)s: %(message)s")
chk = AbeVerify(store, logger)
try:
opts, args = getopt.getopt(argv, "", [
'chain=',
'check-all',
'merkle-roots',
'block-txins',
'block-stats',
'verbose',
'quiet',
'silent',
'min-height=',
'max-height=',
'blkstats=',
'repair',
])
except getopt.GetoptError as e:
print e.msg, "\n\n", cmdline.usage()
return 1
chains = None
err = 0
for opt, arg in opts:
if opt == '--chain':
chains = arg.split(',')
if opt == '--check-all':
chk.ckmerkle, chk.ckbti, chk.ckstats = True, True, True
if opt == '--merkle-roots':
chk.ckmerkle = True
if opt == '--block-txins':
chk.ckbti = True
if opt == '--block-stats':
chk.ckstats = True
if opt == '--verbose':
logger.setLevel('INFO')
if opt == '--quiet':
logger.setLevel('WARNING')
if opt == '--silent':
logger.setLevel('ERROR')
if opt == '--min-height':
chk.block_min = int(arg)
if opt == '--max-height':
chk.block_max = int(arg)
if opt == '--blkstats':
chk.blkstats = arg.split(',')
if opt == '--repair':
chk.repair = True
if args:
print "Extra argument: %s!\n\n" % args[0], cmdline.usage()
return 1
if True not in (chk.ckmerkle, chk.ckbti, chk.ckstats):
print "No checks selected!\n\n", cmdline.usage()
return 1
for chain_id, in store.selectall("""
SELECT chain_id FROM chain ORDER BY chain_id DESC"""):
chain = store.chains_by.id[chain_id]
if chains is not None:
if chain.name not in chains:
continue
chains.remove(chain.name)
logger.warning("Checking %s chain (id %d) at height %d",
chain.name, chain_id, (chk.block_min if chk.block_min else 0))
try:
chk.verify_blockchain(chain_id, chain)
except KeyboardInterrupt:
# Prevents some DB warnings warnings
store.close()
raise
endmsg="Chain %s: %d blocks checked"
endparams = (max(chk.stats['mchecked'], chk.stats['schecked']),)
err += max(chk.stats['mbad'], chk.stats['sbad'])
if chk.ckmerkle and chk.stats['mbad']:
endmsg += ", %d bad merkle tree hashes"
endparams += (chk.stats['mbad'],)
if chk.ckbti and chk.stats['btimiss']:
endmsg += ", %d missing block txins"
endparams += (chk.stats['btimiss'],)
if chk.ckbti and chk.stats['btibad']:
endmsg += ", %d bad block txins"
endparams += (chk.stats['btibad'],)
if chk.ckstats and chk.stats['sbad']:
endmsg += ", %d bad blocks stats"
endparams += (chk.stats['sbad'],)
if len(endparams) == 1:
endmsg += ", no error found"
logger.warning(endmsg, chain.name, *endparams)
if chains:
err += 1
logger.warning("%d chain%s not found: %s",
len(chains),
("s" if len(chains) > 1 else ""),
', '.join(chains),
)
return err and 1
if __name__ == '__main__':
try:
sys.exit(main(sys.argv[1:]))
except KeyboardInterrupt:
print >>sys.stderr, "\rInterrupted!"
sys.exit(1)
| 24,026
|
Python
|
.py
| 523
| 32.982792
| 82
| 0.53415
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,433
|
DataStore.py
|
bitcoin-abe_bitcoin-abe/Abe/DataStore.py
|
# Copyright(C) 2011,2012,2013,2014 by Abe developers.
# DataStore.py: back end database access for Abe.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
# This module combines three functions that might be better split up:
# 1. Abe's schema
# 2. Abstraction over the schema for importing blocks, etc.
# 3. Code to load data by scanning blockfiles or using JSON-RPC.
import os
import re
import time
import errno
import logging
import SqlAbstraction
import Chain
# bitcointools -- modified deserialize.py to return raw transaction
import BCDataStream
import deserialize
import util
import base58
SCHEMA_TYPE = "Abe"
SCHEMA_VERSION = SCHEMA_TYPE + "41"
CONFIG_DEFAULTS = {
"dbtype": None,
"connect_args": None,
"binary_type": None,
"int_type": None,
"upgrade": None,
"rescan": None,
"commit_bytes": None,
"log_sql": None,
"log_rpc": None,
"default_chain": "Bitcoin",
"datadir": None,
"ignore_bit8_chains": None,
"use_firstbits": False,
"keep_scriptsig": True,
"import_tx": [],
"default_loader": "default",
"rpc_load_mempool": False,
}
WORK_BITS = 304 # XXX more than necessary.
CHAIN_CONFIG = [
{"chain":"Bitcoin"},
{"chain":"Testnet"},
{"chain":"Namecoin"},
{"chain":"Weeds", "policy":"Sha256Chain",
"code3":"WDS", "address_version":"\xf3", "magic":"\xf8\xbf\xb5\xda"},
{"chain":"BeerTokens", "policy":"Sha256Chain",
"code3":"BER", "address_version":"\xf2", "magic":"\xf7\xbf\xb5\xdb"},
{"chain":"SolidCoin", "policy":"Sha256Chain",
"code3":"SCN", "address_version":"\x7d", "magic":"\xde\xad\xba\xbe"},
{"chain":"ScTestnet", "policy":"Sha256Chain",
"code3":"SC0", "address_version":"\x6f", "magic":"\xca\xfe\xba\xbe"},
{"chain":"Worldcoin", "policy":"Sha256Chain",
"code3":"WDC", "address_version":"\x49", "magic":"\xfb\xc0\xb6\xdb"},
{"chain":"NovaCoin"},
{"chain":"CryptoCash"},
{"chain":"Anoncoin", "policy":"Sha256Chain",
"code3":"ANC", "address_version":"\x17", "magic":"\xFA\xCA\xBA\xDA" },
{"chain":"Hirocoin"},
{"chain":"Bitleu"},
{"chain":"Maxcoin"},
{"chain":"Dash"},
{"chain":"BlackCoin"},
{"chain":"Unbreakablecoin"},
{"chain":"Californium"},
#{"chain":"",
# "code3":"", "address_version":"\x", "magic":""},
]
NULL_PUBKEY_HASH = "\0" * Chain.PUBKEY_HASH_LENGTH
NULL_PUBKEY_ID = 0
PUBKEY_ID_NETWORK_FEE = NULL_PUBKEY_ID
# Size of the script and pubkey columns in bytes.
MAX_SCRIPT = SqlAbstraction.MAX_SCRIPT
MAX_PUBKEY = SqlAbstraction.MAX_PUBKEY
NO_CLOB = SqlAbstraction.NO_CLOB
# XXX This belongs in another module.
class InvalidBlock(Exception):
pass
class MerkleRootMismatch(InvalidBlock):
def __init__(ex, block_hash, tx_hashes):
ex.block_hash = block_hash
ex.tx_hashes = tx_hashes
def __str__(ex):
return 'Block header Merkle root does not match its transactions. ' \
'block hash=%s' % (ex.block_hash[::-1].encode('hex'),)
class MalformedHash(ValueError):
pass
class MalformedAddress(ValueError):
pass
class DataStore(object):
"""
Bitcoin data storage class based on DB-API 2 and standard SQL with
workarounds to support SQLite3, PostgreSQL/psycopg2, MySQL,
Oracle, ODBC, and IBM DB2.
"""
def __init__(store, args):
"""
Open and store a connection to the SQL database.
args.dbtype should name a DB-API 2 driver module, e.g.,
"sqlite3".
args.connect_args should be an argument to the module's
connect() method, or None for no argument, or a list of
arguments, or a dictionary of named arguments.
args.datadir names Bitcoin data directories containing
blk0001.dat to scan for new blocks.
"""
if args.datadir is None:
args.datadir = util.determine_db_dir()
if isinstance(args.datadir, str):
args.datadir = [args.datadir]
store.args = args
store.log = logging.getLogger(__name__)
store.rpclog = logging.getLogger(__name__ + ".rpc")
if not args.log_rpc:
store.rpclog.setLevel(logging.ERROR)
if args.dbtype is None:
store.log.warn("dbtype not configured, see abe.conf for examples");
store.dbmodule = None
store.config = CONFIG_DEFAULTS.copy()
store.datadirs = []
store.use_firstbits = CONFIG_DEFAULTS['use_firstbits']
store._sql = None
return
store.dbmodule = __import__(args.dbtype)
sql_args = lambda: 1
sql_args.module = store.dbmodule
sql_args.connect_args = args.connect_args
sql_args.binary_type = args.binary_type
sql_args.int_type = args.int_type
sql_args.log_sql = args.log_sql
sql_args.prefix = "abe_"
sql_args.config = {}
store.sql_args = sql_args
store.set_db(None)
store.init_sql()
store._blocks = {}
# Read the CONFIG and CONFIGVAR tables if present.
store.config = store._read_config()
if store.config is None:
store.keep_scriptsig = args.keep_scriptsig
elif 'keep_scriptsig' in store.config:
store.keep_scriptsig = store.config.get('keep_scriptsig') == "true"
else:
store.keep_scriptsig = CONFIG_DEFAULTS['keep_scriptsig']
store.refresh_ddl()
if store.config is None:
store.initialize()
else:
store.init_sql()
if store.config['schema_version'] == SCHEMA_VERSION:
pass
elif args.upgrade:
import upgrade
upgrade.upgrade_schema(store)
else:
raise Exception(
"Database schema version (%s) does not match software"
" (%s). Please run with --upgrade to convert database."
% (store.config['schema_version'], SCHEMA_VERSION))
store._sql.auto_reconnect = True
if args.rescan:
store.sql("UPDATE datadir SET blkfile_number=1, blkfile_offset=0")
store._init_datadirs()
store.init_chains()
store.commit_bytes = args.commit_bytes
if store.commit_bytes is None:
store.commit_bytes = 0 # Commit whenever possible.
else:
store.commit_bytes = int(store.commit_bytes)
store.bytes_since_commit = 0
store.use_firstbits = (store.config['use_firstbits'] == "true")
for hex_tx in args.import_tx:
chain_name = None
if isinstance(hex_tx, dict):
chain_name = hex_tx.get("chain")
hex_tx = hex_tx.get("tx")
store.maybe_import_binary_tx(chain_name, str(hex_tx).decode('hex'))
store.default_loader = args.default_loader
store.rpc_load_mempool = args.rpc_load_mempool
store.default_chain = args.default_chain;
store.commit()
def set_db(store, db):
store._sql = db
def get_db(store):
return store._sql
def connect(store):
return store._sql.connect()
def reconnect(store):
return store._sql.reconnect()
def close(store):
store._sql.close()
def commit(store):
store._sql.commit()
def rollback(store):
if store._sql is not None:
store._sql.rollback()
def sql(store, stmt, params=()):
store._sql.sql(stmt, params)
def ddl(store, stmt):
store._sql.ddl(stmt)
def selectrow(store, stmt, params=()):
return store._sql.selectrow(stmt, params)
def selectall(store, stmt, params=()):
return store._sql.selectall(stmt, params)
def rowcount(store):
return store._sql.rowcount()
def create_sequence(store, key):
store._sql.create_sequence(key)
def drop_sequence(store, key):
store._sql.drop_sequence(key)
def new_id(store, key):
return store._sql.new_id(key)
def init_sql(store):
sql_args = store.sql_args
if hasattr(store, 'config'):
for name in store.config.keys():
if name.startswith('sql.'):
sql_args.config[name[len('sql.'):]] = store.config[name]
if store._sql:
store._sql.close() # XXX Could just set_flavour.
store.set_db(SqlAbstraction.SqlAbstraction(sql_args))
store.init_binfuncs()
def init_binfuncs(store):
store.binin = store._sql.binin
store.binin_hex = store._sql.binin_hex
store.binin_int = store._sql.binin_int
store.binout = store._sql.binout
store.binout_hex = store._sql.binout_hex
store.binout_int = store._sql.binout_int
store.intin = store._sql.intin
store.hashin = store._sql.revin
store.hashin_hex = store._sql.revin_hex
store.hashout = store._sql.revout
store.hashout_hex = store._sql.revout_hex
def _read_config(store):
# Read table CONFIGVAR if it exists.
config = {}
try:
for name, value in store.selectall("""
SELECT configvar_name, configvar_value
FROM configvar"""):
config[name] = '' if value is None else value
if config:
return config
except store.dbmodule.DatabaseError:
try:
store.rollback()
except Exception:
pass
# Read legacy table CONFIG if it exists.
try:
row = store.selectrow("""
SELECT schema_version, binary_type
FROM config
WHERE config_id = 1""")
sv, btype = row
return { 'schema_version': sv, 'binary_type': btype }
except Exception:
try:
store.rollback()
except Exception:
pass
# Return None to indicate no schema found.
return None
def _init_datadirs(store):
"""Parse store.args.datadir, create store.datadirs."""
if store.args.datadir == []:
store.datadirs = []
return
datadirs = {}
for row in store.selectall("""
SELECT datadir_id, dirname, blkfile_number, blkfile_offset,
chain_id
FROM datadir"""):
id, dir, num, offs, chain_id = row
datadirs[dir] = {
"id": id,
"dirname": dir,
"blkfile_number": int(num),
"blkfile_offset": int(offs),
"chain_id": None if chain_id is None else int(chain_id),
"loader": None}
#print("datadirs: %r" % datadirs)
# By default, scan every dir we know. This doesn't happen in
# practise, because abe.py sets ~/.bitcoin as default datadir.
if store.args.datadir is None:
store.datadirs = datadirs.values()
return
def lookup_chain_id(name):
row = store.selectrow(
"SELECT chain_id FROM chain WHERE chain_name = ?",
(name,))
return None if row is None else int(row[0])
store.datadirs = []
for dircfg in store.args.datadir:
loader = None
conf = None
if isinstance(dircfg, dict):
#print("dircfg is dict: %r" % dircfg) # XXX
dirname = dircfg.get('dirname')
if dirname is None:
raise ValueError(
'Missing dirname in datadir configuration: '
+ str(dircfg))
if dirname in datadirs:
d = datadirs[dirname]
d['loader'] = dircfg.get('loader')
d['conf'] = dircfg.get('conf')
if d['chain_id'] is None and 'chain' in dircfg:
d['chain_id'] = lookup_chain_id(dircfg['chain'])
store.datadirs.append(d)
continue
loader = dircfg.get('loader')
conf = dircfg.get('conf')
chain_id = dircfg.get('chain_id')
if chain_id is None:
chain_name = dircfg.get('chain')
chain_id = lookup_chain_id(chain_name)
if chain_id is None and chain_name is not None:
chain_id = store.new_id('chain')
code3 = dircfg.get('code3')
if code3 is None:
# XXX Should default via policy.
code3 = '000' if chain_id > 999 else "%03d" % (
chain_id,)
addr_vers = dircfg.get('address_version')
if addr_vers is None:
addr_vers = "\0"
elif isinstance(addr_vers, unicode):
addr_vers = addr_vers.encode('latin_1')
script_addr_vers = dircfg.get('script_addr_vers')
if script_addr_vers is None:
script_addr_vers = "\x05"
elif isinstance(script_addr_vers, unicode):
script_addr_vers = script_addr_vers.encode('latin_1')
decimals = dircfg.get('decimals')
if decimals is not None:
decimals = int(decimals)
# XXX Could do chain_magic, but this datadir won't
# use it, because it knows its chain.
store.sql("""
INSERT INTO chain (
chain_id, chain_name, chain_code3,
chain_address_version, chain_script_addr_vers, chain_policy,
chain_decimals
) VALUES (?, ?, ?, ?, ?, ?, ?)""",
(chain_id, chain_name, code3,
store.binin(addr_vers), store.binin(script_addr_vers),
dircfg.get('policy', chain_name), decimals))
store.commit()
store.log.warning("Assigned chain_id %d to %s",
chain_id, chain_name)
elif dircfg in datadirs:
store.datadirs.append(datadirs[dircfg])
continue
else:
# Not a dict. A string naming a directory holding
# standard chains.
dirname = dircfg
chain_id = None
d = {
"id": store.new_id("datadir"),
"dirname": dirname,
"blkfile_number": 1,
"blkfile_offset": 0,
"chain_id": chain_id,
"loader": loader,
"conf": conf,
}
store.datadirs.append(d)
def init_chains(store):
store.chains_by = lambda: 0
store.chains_by.id = {}
store.chains_by.name = {}
store.chains_by.magic = {}
# Legacy config option.
no_bit8_chains = store.args.ignore_bit8_chains or []
if isinstance(no_bit8_chains, str):
no_bit8_chains = [no_bit8_chains]
for chain_id, magic, chain_name, chain_code3, address_version, script_addr_vers, \
chain_policy, chain_decimals in \
store.selectall("""
SELECT chain_id, chain_magic, chain_name, chain_code3,
chain_address_version, chain_script_addr_vers, chain_policy, chain_decimals
FROM chain
"""):
chain = Chain.create(
id = int(chain_id),
magic = store.binout(magic),
name = unicode(chain_name),
code3 = chain_code3 and unicode(chain_code3),
address_version = store.binout(address_version),
script_addr_vers = store.binout(script_addr_vers),
policy = unicode(chain_policy),
decimals = None if chain_decimals is None else \
int(chain_decimals))
# Legacy config option.
if chain.name in no_bit8_chains and \
chain.has_feature('block_version_bit8_merge_mine'):
chain = Chain.create(src=chain, policy="LegacyNoBit8")
store.chains_by.id[chain.id] = chain
store.chains_by.name[chain.name] = chain
store.chains_by.magic[bytes(chain.magic)] = chain
def get_chain_by_id(store, chain_id):
return store.chains_by.id[int(chain_id)]
def get_chain_by_name(store, name):
return store.chains_by.name.get(name, None)
def get_default_chain(store):
store.log.debug("Falling back to default (Bitcoin) policy.")
return Chain.create(store.default_chain)
def get_ddl(store, key):
return store._ddl[key]
def refresh_ddl(store):
store._ddl = {
"chain_summary":
# XXX I could do a lot with MATERIALIZED views.
"""CREATE VIEW chain_summary AS SELECT
cc.chain_id,
cc.in_longest,
b.block_id,
b.block_hash,
b.block_version,
b.block_hashMerkleRoot,
b.block_nTime,
b.block_nBits,
b.block_nNonce,
cc.block_height,
b.prev_block_id,
prev.block_hash prev_block_hash,
b.block_chain_work,
b.block_num_tx,
b.block_value_in,
b.block_value_out,
b.block_total_satoshis,
b.block_total_seconds,
b.block_satoshi_seconds,
b.block_total_ss,
b.block_ss_destroyed
FROM chain_candidate cc
JOIN block b ON (cc.block_id = b.block_id)
LEFT JOIN block prev ON (b.prev_block_id = prev.block_id)""",
"txout_detail":
"""CREATE VIEW txout_detail AS SELECT
cc.chain_id,
cc.in_longest,
cc.block_id,
b.block_hash,
b.block_height,
block_tx.tx_pos,
tx.tx_id,
tx.tx_hash,
tx.tx_lockTime,
tx.tx_version,
tx.tx_size,
txout.txout_id,
txout.txout_pos,
txout.txout_value,
txout.txout_scriptPubKey,
pubkey.pubkey_id,
pubkey.pubkey_hash,
pubkey.pubkey
FROM chain_candidate cc
JOIN block b ON (cc.block_id = b.block_id)
JOIN block_tx ON (b.block_id = block_tx.block_id)
JOIN tx ON (tx.tx_id = block_tx.tx_id)
JOIN txout ON (tx.tx_id = txout.tx_id)
LEFT JOIN pubkey ON (txout.pubkey_id = pubkey.pubkey_id)""",
"txin_detail":
"""CREATE VIEW txin_detail AS SELECT
cc.chain_id,
cc.in_longest,
cc.block_id,
b.block_hash,
b.block_height,
block_tx.tx_pos,
tx.tx_id,
tx.tx_hash,
tx.tx_lockTime,
tx.tx_version,
tx.tx_size,
txin.txin_id,
txin.txin_pos,
txin.txout_id prevout_id""" + (""",
txin.txin_scriptSig,
txin.txin_sequence""" if store.keep_scriptsig else """,
NULL txin_scriptSig,
NULL txin_sequence""") + """,
prevout.txout_value txin_value,
prevout.txout_scriptPubKey txin_scriptPubKey,
pubkey.pubkey_id,
pubkey.pubkey_hash,
pubkey.pubkey
FROM chain_candidate cc
JOIN block b ON (cc.block_id = b.block_id)
JOIN block_tx ON (b.block_id = block_tx.block_id)
JOIN tx ON (tx.tx_id = block_tx.tx_id)
JOIN txin ON (tx.tx_id = txin.tx_id)
LEFT JOIN txout prevout ON (txin.txout_id = prevout.txout_id)
LEFT JOIN pubkey
ON (prevout.pubkey_id = pubkey.pubkey_id)""",
"txout_approx":
# View of txout for drivers like sqlite3 that can not handle large
# integer arithmetic. For them, we transform the definition of
# txout_approx_value to DOUBLE PRECISION (approximate) by a CAST.
"""CREATE VIEW txout_approx AS SELECT
txout_id,
tx_id,
txout_value txout_approx_value
FROM txout""",
"configvar":
# ABE accounting. This table is read without knowledge of the
# database's SQL quirks, so it must use only the most widely supported
# features.
"""CREATE TABLE configvar (
configvar_name VARCHAR(100) NOT NULL PRIMARY KEY,
configvar_value VARCHAR(255)
)""",
"abe_sequences":
"""CREATE TABLE abe_sequences (
sequence_key VARCHAR(100) NOT NULL PRIMARY KEY,
nextid NUMERIC(30)
)""",
}
def initialize(store):
"""
Create the database schema.
"""
store.config = {}
store.configure()
for stmt in (
store._ddl['configvar'],
"""CREATE TABLE datadir (
datadir_id NUMERIC(10) NOT NULL PRIMARY KEY,
dirname VARCHAR(2000) NOT NULL,
blkfile_number NUMERIC(8) NULL,
blkfile_offset NUMERIC(20) NULL,
chain_id NUMERIC(10) NULL
)""",
# A block of the type used by Bitcoin.
"""CREATE TABLE block (
block_id NUMERIC(14) NOT NULL PRIMARY KEY,
block_hash BINARY(32) UNIQUE NOT NULL,
block_version NUMERIC(10),
block_hashMerkleRoot BINARY(32),
block_nTime NUMERIC(20),
block_nBits NUMERIC(10),
block_nNonce NUMERIC(10),
block_height NUMERIC(14) NULL,
prev_block_id NUMERIC(14) NULL,
search_block_id NUMERIC(14) NULL,
block_chain_work BINARY(""" + str(WORK_BITS / 8) + """),
block_value_in NUMERIC(30) NULL,
block_value_out NUMERIC(30),
block_total_satoshis NUMERIC(26) NULL,
block_total_seconds NUMERIC(20) NULL,
block_satoshi_seconds NUMERIC(28) NULL,
block_total_ss NUMERIC(28) NULL,
block_num_tx NUMERIC(10) NOT NULL,
block_ss_destroyed NUMERIC(28) NULL,
FOREIGN KEY (prev_block_id)
REFERENCES block (block_id),
FOREIGN KEY (search_block_id)
REFERENCES block (block_id)
)""",
# CHAIN comprises a magic number, a policy, and (indirectly via
# CHAIN_LAST_BLOCK_ID and the referenced block's ancestors) a genesis
# block, possibly null. A chain may have a currency code.
"""CREATE TABLE chain (
chain_id NUMERIC(10) NOT NULL PRIMARY KEY,
chain_name VARCHAR(100) UNIQUE NOT NULL,
chain_code3 VARCHAR(5) NULL,
chain_address_version VARBINARY(100) NOT NULL,
chain_script_addr_vers VARBINARY(100) NULL,
chain_magic BINARY(4) NULL,
chain_policy VARCHAR(255) NOT NULL,
chain_decimals NUMERIC(2) NULL,
chain_last_block_id NUMERIC(14) NULL,
FOREIGN KEY (chain_last_block_id)
REFERENCES block (block_id)
)""",
# CHAIN_CANDIDATE lists blocks that are, or might become, part of the
# given chain. IN_LONGEST is 1 when the block is in the chain, else 0.
# IN_LONGEST denormalizes information stored canonically in
# CHAIN.CHAIN_LAST_BLOCK_ID and BLOCK.PREV_BLOCK_ID.
"""CREATE TABLE chain_candidate (
chain_id NUMERIC(10) NOT NULL,
block_id NUMERIC(14) NOT NULL,
in_longest NUMERIC(1),
block_height NUMERIC(14),
PRIMARY KEY (chain_id, block_id),
FOREIGN KEY (block_id) REFERENCES block (block_id)
)""",
"""CREATE INDEX x_cc_block ON chain_candidate (block_id)""",
"""CREATE INDEX x_cc_chain_block_height
ON chain_candidate (chain_id, block_height)""",
"""CREATE INDEX x_cc_block_height ON chain_candidate (block_height)""",
# An orphan block must remember its hashPrev.
"""CREATE TABLE orphan_block (
block_id NUMERIC(14) NOT NULL PRIMARY KEY,
block_hashPrev BINARY(32) NOT NULL,
FOREIGN KEY (block_id) REFERENCES block (block_id)
)""",
"""CREATE INDEX x_orphan_block_hashPrev ON orphan_block (block_hashPrev)""",
# Denormalize the relationship inverse to BLOCK.PREV_BLOCK_ID.
"""CREATE TABLE block_next (
block_id NUMERIC(14) NOT NULL,
next_block_id NUMERIC(14) NOT NULL,
PRIMARY KEY (block_id, next_block_id),
FOREIGN KEY (block_id) REFERENCES block (block_id),
FOREIGN KEY (next_block_id) REFERENCES block (block_id)
)""",
# A transaction of the type used by Bitcoin.
"""CREATE TABLE tx (
tx_id NUMERIC(26) NOT NULL PRIMARY KEY,
tx_hash BINARY(32) UNIQUE NOT NULL,
tx_version NUMERIC(10),
tx_lockTime NUMERIC(10),
tx_size NUMERIC(10)
)""",
# Mempool TX not linked to any block, we must track them somewhere
# for efficient cleanup
"""CREATE TABLE unlinked_tx (
tx_id NUMERIC(26) NOT NULL,
PRIMARY KEY (tx_id),
FOREIGN KEY (tx_id)
REFERENCES tx (tx_id)
)""",
# Presence of transactions in blocks is many-to-many.
"""CREATE TABLE block_tx (
block_id NUMERIC(14) NOT NULL,
tx_id NUMERIC(26) NOT NULL,
tx_pos NUMERIC(10) NOT NULL,
PRIMARY KEY (block_id, tx_id),
UNIQUE (block_id, tx_pos),
FOREIGN KEY (block_id)
REFERENCES block (block_id),
FOREIGN KEY (tx_id)
REFERENCES tx (tx_id)
)""",
"""CREATE INDEX x_block_tx_tx ON block_tx (tx_id)""",
# A public key for sending bitcoins. PUBKEY_HASH is derivable from a
# Bitcoin or Testnet address.
"""CREATE TABLE pubkey (
pubkey_id NUMERIC(26) NOT NULL PRIMARY KEY,
pubkey_hash BINARY(20) UNIQUE NOT NULL,
pubkey VARBINARY(""" + str(MAX_PUBKEY) + """) NULL
)""",
"""CREATE TABLE multisig_pubkey (
multisig_id NUMERIC(26) NOT NULL,
pubkey_id NUMERIC(26) NOT NULL,
PRIMARY KEY (multisig_id, pubkey_id),
FOREIGN KEY (multisig_id) REFERENCES pubkey (pubkey_id),
FOREIGN KEY (pubkey_id) REFERENCES pubkey (pubkey_id)
)""",
"""CREATE INDEX x_multisig_pubkey_pubkey ON multisig_pubkey (pubkey_id)""",
# A transaction out-point.
"""CREATE TABLE txout (
txout_id NUMERIC(26) NOT NULL PRIMARY KEY,
tx_id NUMERIC(26) NOT NULL,
txout_pos NUMERIC(10) NOT NULL,
txout_value NUMERIC(30) NOT NULL,
txout_scriptPubKey VARBINARY(""" + str(MAX_SCRIPT) + """),
pubkey_id NUMERIC(26),
UNIQUE (tx_id, txout_pos),
FOREIGN KEY (pubkey_id)
REFERENCES pubkey (pubkey_id)
)""",
"""CREATE INDEX x_txout_pubkey ON txout (pubkey_id)""",
# A transaction in-point.
"""CREATE TABLE txin (
txin_id NUMERIC(26) NOT NULL PRIMARY KEY,
tx_id NUMERIC(26) NOT NULL,
txin_pos NUMERIC(10) NOT NULL,
txout_id NUMERIC(26)""" + (""",
txin_scriptSig VARBINARY(""" + str(MAX_SCRIPT) + """),
txin_sequence NUMERIC(10)""" if store.keep_scriptsig else "") + """,
UNIQUE (tx_id, txin_pos),
FOREIGN KEY (tx_id)
REFERENCES tx (tx_id)
)""",
"""CREATE INDEX x_txin_txout ON txin (txout_id)""",
# While TXIN.TXOUT_ID can not be found, we must remember TXOUT_POS,
# a.k.a. PREVOUT_N.
"""CREATE TABLE unlinked_txin (
txin_id NUMERIC(26) NOT NULL PRIMARY KEY,
txout_tx_hash BINARY(32) NOT NULL,
txout_pos NUMERIC(10) NOT NULL,
FOREIGN KEY (txin_id) REFERENCES txin (txin_id)
)""",
"""CREATE INDEX x_unlinked_txin_outpoint
ON unlinked_txin (txout_tx_hash, txout_pos)""",
"""CREATE TABLE block_txin (
block_id NUMERIC(14) NOT NULL,
txin_id NUMERIC(26) NOT NULL,
out_block_id NUMERIC(14) NOT NULL,
PRIMARY KEY (block_id, txin_id),
FOREIGN KEY (block_id) REFERENCES block (block_id),
FOREIGN KEY (txin_id) REFERENCES txin (txin_id),
FOREIGN KEY (out_block_id) REFERENCES block (block_id)
)""",
store._ddl['chain_summary'],
store._ddl['txout_detail'],
store._ddl['txin_detail'],
store._ddl['txout_approx'],
"""CREATE TABLE abe_lock (
lock_id NUMERIC(10) NOT NULL PRIMARY KEY,
pid VARCHAR(255) NULL
)""",
):
try:
store.ddl(stmt)
except Exception:
store.log.error("Failed: %s", stmt)
raise
for key in ['chain', 'datadir',
'tx', 'txout', 'pubkey', 'txin', 'block']:
store.create_sequence(key)
store.sql("INSERT INTO abe_lock (lock_id) VALUES (1)")
# Insert some well-known chain metadata.
for conf in CHAIN_CONFIG:
conf = conf.copy()
conf["name"] = conf.pop("chain")
if 'policy' in conf:
policy = conf.pop('policy')
else:
policy = conf['name']
chain = Chain.create(policy, **conf)
store.insert_chain(chain)
store.sql("""
INSERT INTO pubkey (pubkey_id, pubkey_hash) VALUES (?, ?)""",
(NULL_PUBKEY_ID, store.binin(NULL_PUBKEY_HASH)))
if store.args.use_firstbits:
store.config['use_firstbits'] = "true"
store.ddl(
"""CREATE TABLE abe_firstbits (
pubkey_id NUMERIC(26) NOT NULL,
block_id NUMERIC(14) NOT NULL,
address_version VARBINARY(10) NOT NULL,
firstbits VARCHAR(50) NOT NULL,
PRIMARY KEY (address_version, pubkey_id, block_id),
FOREIGN KEY (pubkey_id) REFERENCES pubkey (pubkey_id),
FOREIGN KEY (block_id) REFERENCES block (block_id)
)""")
store.ddl(
"""CREATE INDEX x_abe_firstbits
ON abe_firstbits (address_version, firstbits)""")
else:
store.config['use_firstbits'] = "false"
store.config['keep_scriptsig'] = \
"true" if store.args.keep_scriptsig else "false"
store.save_config()
store.commit()
def insert_chain(store, chain):
chain.id = store.new_id("chain")
store.sql("""
INSERT INTO chain (
chain_id, chain_magic, chain_name, chain_code3,
chain_address_version, chain_script_addr_vers, chain_policy, chain_decimals
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
(chain.id, store.binin(chain.magic), chain.name,
chain.code3, store.binin(chain.address_version), store.binin(chain.script_addr_vers),
chain.policy, chain.decimals))
def get_lock(store):
if store.version_below('Abe26'):
return None
conn = store.connect()
cur = conn.cursor()
cur.execute("UPDATE abe_lock SET pid = %d WHERE lock_id = 1"
% (os.getpid(),))
if cur.rowcount != 1:
raise Exception("unexpected rowcount")
cur.close()
# Check whether database supports concurrent updates. Where it
# doesn't (SQLite) we get exclusive access automatically.
try:
import random
letters = "".join([chr(random.randint(65, 90)) for x in xrange(10)])
store.sql("""
INSERT INTO configvar (configvar_name, configvar_value)
VALUES (?, ?)""",
("upgrade-lock-" + letters, 'x'))
except Exception:
store.release_lock(conn)
conn = None
store.rollback()
# XXX Should reread config.
return conn
def release_lock(store, conn):
if conn:
conn.rollback()
conn.close()
def version_below(store, vers):
try:
sv = float(store.config['schema_version'].replace(SCHEMA_TYPE, ''))
except ValueError:
return False
vers = float(vers.replace(SCHEMA_TYPE, ''))
return sv < vers
def configure(store):
config = store._sql.configure()
store.init_binfuncs()
for name in config.keys():
store.config['sql.' + name] = config[name]
def save_config(store):
store.config['schema_version'] = SCHEMA_VERSION
for name in store.config.keys():
store.save_configvar(name)
def save_configvar(store, name):
store.sql("UPDATE configvar SET configvar_value = ?"
" WHERE configvar_name = ?", (store.config[name], name))
if store.rowcount() == 0:
store.sql("INSERT INTO configvar (configvar_name, configvar_value)"
" VALUES (?, ?)", (name, store.config[name]))
def set_configvar(store, name, value):
store.config[name] = value
store.save_configvar(name)
def cache_block(store, block_id, height, prev_id, search_id):
assert isinstance(block_id, int), repr(block_id)
assert isinstance(height, int), repr(height)
assert prev_id is None or isinstance(prev_id, int)
assert search_id is None or isinstance(search_id, int)
block = {
'height': height,
'prev_id': prev_id,
'search_id': search_id}
store._blocks[block_id] = block
return block
def _load_block(store, block_id):
block = store._blocks.get(block_id)
if block is None:
row = store.selectrow("""
SELECT block_height, prev_block_id, search_block_id
FROM block
WHERE block_id = ?""", (block_id,))
if row is None:
return None
height, prev_id, search_id = row
block = store.cache_block(
block_id, int(height),
None if prev_id is None else int(prev_id),
None if search_id is None else int(search_id))
return block
def get_block_id_at_height(store, height, descendant_id):
if height is None:
return None
while True:
block = store._load_block(descendant_id)
if block['height'] == height:
return descendant_id
descendant_id = block[
'search_id'
if util.get_search_height(block['height']) >= height else
'prev_id']
def is_descended_from(store, block_id, ancestor_id):
# ret = store._is_descended_from(block_id, ancestor_id)
# store.log.debug("%d is%s descended from %d", block_id, '' if ret else ' NOT', ancestor_id)
# return ret
# def _is_descended_from(store, block_id, ancestor_id):
block = store._load_block(block_id)
ancestor = store._load_block(ancestor_id)
height = ancestor['height']
return block['height'] >= height and \
store.get_block_id_at_height(height, block_id) == ancestor_id
def get_block_height(store, block_id):
return store._load_block(int(block_id))['height']
def find_prev(store, hash):
row = store.selectrow("""
SELECT block_id, block_height, block_chain_work,
block_total_satoshis, block_total_seconds,
block_satoshi_seconds, block_total_ss, block_nTime
FROM block
WHERE block_hash=?""", (store.hashin(hash),))
if row is None:
return (None, None, None, None, None, None, None, None)
(id, height, chain_work, satoshis, seconds, satoshi_seconds,
total_ss, nTime) = row
return (id, None if height is None else int(height),
store.binout_int(chain_work),
None if satoshis is None else int(satoshis),
None if seconds is None else int(seconds),
None if satoshi_seconds is None else int(satoshi_seconds),
None if total_ss is None else int(total_ss),
int(nTime))
def import_block(store, b, chain_ids=None, chain=None):
# Import new transactions.
if chain_ids is None:
chain_ids = frozenset() if chain is None else frozenset([chain.id])
b['value_in'] = 0
b['value_out'] = 0
b['value_destroyed'] = 0
tx_hash_array = []
# In the common case, all the block's txins _are_ linked, and we
# can avoid a query if we notice this.
all_txins_linked = True
for pos in xrange(len(b['transactions'])):
tx = b['transactions'][pos]
if 'hash' not in tx:
if chain is None:
store.log.debug("Falling back to SHA256 transaction hash")
tx['hash'] = util.double_sha256(tx['__data__'])
else:
tx['hash'] = chain.transaction_hash(tx['__data__'])
tx_hash_array.append(tx['hash'])
tx['tx_id'] = store.tx_find_id_and_value(tx, pos == 0)
if tx['tx_id']:
all_txins_linked = False
else:
if store.commit_bytes == 0:
tx['tx_id'] = store.import_and_commit_tx(tx, pos == 0, chain)
else:
tx['tx_id'] = store.import_tx(tx, pos == 0, chain)
if tx.get('unlinked_count', 1) > 0:
all_txins_linked = False
if tx['value_in'] is None:
b['value_in'] = None
elif b['value_in'] is not None:
b['value_in'] += tx['value_in']
b['value_out'] += tx['value_out']
b['value_destroyed'] += tx['value_destroyed']
# Get a new block ID.
block_id = int(store.new_id("block"))
b['block_id'] = block_id
if chain is not None:
# Verify Merkle root.
if b['hashMerkleRoot'] != chain.merkle_root(tx_hash_array):
raise MerkleRootMismatch(b['hash'], tx_hash_array)
# Look for the parent block.
hashPrev = b['hashPrev']
if chain is None:
# XXX No longer used.
is_genesis = hashPrev == util.GENESIS_HASH_PREV
else:
is_genesis = hashPrev == chain.genesis_hash_prev
(prev_block_id, prev_height, prev_work, prev_satoshis,
prev_seconds, prev_ss, prev_total_ss, prev_nTime) = (
(None, -1, 0, 0, 0, 0, 0, b['nTime'])
if is_genesis else
store.find_prev(hashPrev))
b['prev_block_id'] = prev_block_id
b['height'] = None if prev_height is None else prev_height + 1
b['chain_work'] = util.calculate_work(prev_work, b['nBits'])
if prev_seconds is None:
b['seconds'] = None
else:
b['seconds'] = prev_seconds + b['nTime'] - prev_nTime
if prev_satoshis is None or prev_satoshis < 0 or b['value_in'] is None:
# XXX Abuse this field to save work in adopt_orphans.
b['satoshis'] = -1 - b['value_destroyed']
else:
b['satoshis'] = prev_satoshis + b['value_out'] - b['value_in'] \
- b['value_destroyed']
if prev_satoshis is None or prev_satoshis < 0:
ss_created = None
b['total_ss'] = None
else:
ss_created = prev_satoshis * (b['nTime'] - prev_nTime)
b['total_ss'] = prev_total_ss + ss_created
if b['height'] is None or b['height'] < 2:
b['search_block_id'] = None
else:
b['search_block_id'] = store.get_block_id_at_height(
util.get_search_height(int(b['height'])),
None if prev_block_id is None else int(prev_block_id))
# Insert the block table row.
try:
store.sql(
"""INSERT INTO block (
block_id, block_hash, block_version, block_hashMerkleRoot,
block_nTime, block_nBits, block_nNonce, block_height,
prev_block_id, block_chain_work, block_value_in,
block_value_out, block_total_satoshis,
block_total_seconds, block_total_ss, block_num_tx,
search_block_id
) VALUES (
?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?
)""",
(block_id, store.hashin(b['hash']), store.intin(b['version']),
store.hashin(b['hashMerkleRoot']), store.intin(b['nTime']),
store.intin(b['nBits']), store.intin(b['nNonce']),
b['height'], prev_block_id,
store.binin_int(b['chain_work'], WORK_BITS),
store.intin(b['value_in']), store.intin(b['value_out']),
store.intin(b['satoshis']), store.intin(b['seconds']),
store.intin(b['total_ss']),
len(b['transactions']), b['search_block_id']))
except store.dbmodule.DatabaseError:
if store.commit_bytes == 0:
# Rollback won't undo any previous changes, since we
# always commit.
store.rollback()
# If the exception is due to another process having
# inserted the same block, it is okay.
row = store.selectrow("""
SELECT block_id, block_satoshi_seconds
FROM block
WHERE block_hash = ?""",
(store.hashin(b['hash']),))
if row:
store.log.info("Block already inserted; block_id %d unsued",
block_id)
b['block_id'] = int(row[0])
b['ss'] = None if row[1] is None else int(row[1])
store.offer_block_to_chains(b, chain_ids)
return
# This is not an expected error, or our caller may have to
# rewind a block file. Let them deal with it.
raise
# List the block's transactions in block_tx.
for tx_pos in xrange(len(b['transactions'])):
tx = b['transactions'][tx_pos]
store.sql("DELETE FROM unlinked_tx WHERE tx_id = ?", (tx['tx_id'],))
store.sql("""
INSERT INTO block_tx
(block_id, tx_id, tx_pos)
VALUES (?, ?, ?)""",
(block_id, tx['tx_id'], tx_pos))
store.log.info("block_tx %d %d", block_id, tx['tx_id'])
if b['height'] is not None:
store._populate_block_txin(block_id)
if all_txins_linked or not store._has_unlinked_txins(block_id):
b['ss_destroyed'] = store._get_block_ss_destroyed(
block_id, b['nTime'],
map(lambda tx: tx['tx_id'], b['transactions']))
if ss_created is None or prev_ss is None:
b['ss'] = None
else:
b['ss'] = prev_ss + ss_created - b['ss_destroyed']
store.sql("""
UPDATE block
SET block_satoshi_seconds = ?,
block_ss_destroyed = ?
WHERE block_id = ?""",
(store.intin(b['ss']),
store.intin(b['ss_destroyed']),
block_id))
else:
b['ss_destroyed'] = None
b['ss'] = None
# Store the inverse hashPrev relationship or mark the block as
# an orphan.
if prev_block_id:
store.sql("""
INSERT INTO block_next (block_id, next_block_id)
VALUES (?, ?)""", (prev_block_id, block_id))
elif not is_genesis:
store.sql("INSERT INTO orphan_block (block_id, block_hashPrev)" +
" VALUES (?, ?)", (block_id, store.hashin(b['hashPrev'])))
for row in store.selectall("""
SELECT block_id FROM orphan_block WHERE block_hashPrev = ?""",
(store.hashin(b['hash']),)):
(orphan_id,) = row
store.sql("UPDATE block SET prev_block_id = ? WHERE block_id = ?",
(block_id, orphan_id))
store.sql("""
INSERT INTO block_next (block_id, next_block_id)
VALUES (?, ?)""", (block_id, orphan_id))
store.sql("DELETE FROM orphan_block WHERE block_id = ?",
(orphan_id,))
# offer_block_to_chains calls adopt_orphans, which propagates
# block_height and other cumulative data to the blocks
# attached above.
store.offer_block_to_chains(b, chain_ids)
return block_id
def _populate_block_txin(store, block_id):
# Create rows in block_txin. In case of duplicate transactions,
# choose the one with the lowest block height.
txin_oblocks = {}
for txin_id, oblock_id in store.selectall("""
SELECT txin.txin_id, obt.block_id
FROM block_tx bt
JOIN txin ON (txin.tx_id = bt.tx_id)
JOIN txout ON (txin.txout_id = txout.txout_id)
JOIN block_tx obt ON (txout.tx_id = obt.tx_id)
JOIN block ob ON (obt.block_id = ob.block_id)
WHERE bt.block_id = ?
AND ob.block_chain_work IS NOT NULL
ORDER BY txin.txin_id ASC, ob.block_height ASC""", (block_id,)):
# Save all candidate, lowest height might not be a descendant if
# we have multiple block candidates
txin_oblocks.setdefault(txin_id, []).append(oblock_id)
for txin_id, oblock_ids in txin_oblocks.iteritems():
for oblock_id in oblock_ids:
if store.is_descended_from(block_id, int(oblock_id)):
# Store lowest block height that is descended from our block
store.sql("""
INSERT INTO block_txin (block_id, txin_id, out_block_id)
VALUES (?, ?, ?)""", (block_id, txin_id, oblock_id))
break
def _has_unlinked_txins(store, block_id):
(unlinked_count,) = store.selectrow("""
SELECT COUNT(1)
FROM block_tx bt
JOIN txin ON (bt.tx_id = txin.tx_id)
JOIN unlinked_txin u ON (txin.txin_id = u.txin_id)
WHERE bt.block_id = ?""", (block_id,))
return unlinked_count > 0
def _get_block_ss_destroyed(store, block_id, nTime, tx_ids):
block_ss_destroyed = 0
for tx_id in tx_ids:
destroyed = 0
# Don't do the math in SQL as we risk losing precision
for txout_value, block_nTime in store.selectall("""
SELECT COALESCE(txout_approx.txout_approx_value, 0),
b.block_nTime
FROM block_txin bti
JOIN txin ON (bti.txin_id = txin.txin_id)
JOIN txout_approx ON (txin.txout_id = txout_approx.txout_id)
JOIN block_tx obt ON (txout_approx.tx_id = obt.tx_id)
JOIN block b ON (obt.block_id = b.block_id)
WHERE bti.block_id = ? AND txin.tx_id = ?""",
(block_id, tx_id)):
destroyed += txout_value * (nTime - block_nTime)
block_ss_destroyed += destroyed
return block_ss_destroyed
# Propagate cumulative values to descendant blocks. Return info
# about the longest chains containing b. The returned dictionary
# is keyed by the chain_id of a chain whose validation policy b
# satisfies. Each value is a pair (block, work) where block is
# the best block descended from b in the given chain, and work is
# the sum of orphan_work and the work between b and block. Only
# chains in chain_mask are considered. Even if no known chain
# contains b, this routine populates any descendant blocks'
# cumulative statistics that are known for b and returns an empty
# dictionary.
def adopt_orphans(store, b, orphan_work, chain_ids, chain_mask):
# XXX As originally written, this function occasionally hit
# Python's recursion limit. I am rewriting it iteratively
# with minimal changes, hence the odd style. Guido is
# particularly unhelpful here, rejecting even labeled loops.
ret = [None]
def receive(x):
ret[0] = x
def doit():
store._adopt_orphans_1(stack)
stack = [receive, chain_mask, chain_ids, orphan_work, b, doit]
while stack:
stack.pop()()
return ret[0]
def _adopt_orphans_1(store, stack):
def doit():
store._adopt_orphans_1(stack)
def continuation(x):
store._adopt_orphans_2(stack, x)
def didit():
ret = stack.pop()
stack.pop()(ret)
b = stack.pop()
orphan_work = stack.pop()
chain_ids = stack.pop()
chain_mask = stack.pop()
ret = {}
stack += [ ret, didit ]
block_id = b['block_id']
height = None if b['height'] is None else int(b['height'] + 1)
# If adding block b, b will not yet be in chain_candidate, so
# we rely on the chain_ids argument. If called recursively,
# look up chain_ids in chain_candidate.
if not chain_ids:
if chain_mask:
chain_mask = chain_mask.intersection(
store.find_chains_containing_block(block_id))
chain_ids = chain_mask
for chain_id in chain_ids:
ret[chain_id] = (b, orphan_work)
for row in store.selectall("""
SELECT bn.next_block_id, b.block_nBits,
b.block_value_out, b.block_value_in, b.block_nTime,
b.block_total_satoshis
FROM block_next bn
JOIN block b ON (bn.next_block_id = b.block_id)
WHERE bn.block_id = ?""", (block_id,)):
next_id, nBits, value_out, value_in, nTime, satoshis = row
nBits = int(nBits)
nTime = int(nTime)
satoshis = None if satoshis is None else int(satoshis)
new_work = util.calculate_work(orphan_work, nBits)
if b['chain_work'] is None:
chain_work = None
else:
chain_work = b['chain_work'] + new_work - orphan_work
if value_in is None:
value, count1, count2 = store.selectrow("""
SELECT SUM(txout.txout_value),
COUNT(1),
COUNT(txout.txout_value)
FROM block_tx bt
JOIN txin ON (bt.tx_id = txin.tx_id)
LEFT JOIN txout ON (txout.txout_id = txin.txout_id)
WHERE bt.block_id = ?""", (next_id,))
if count1 == count2 + 1:
value_in = int(value)
else:
store.log.warning(
"not updating block %d value_in: %s != %s + 1",
next_id, repr(count1), repr(count2))
else:
value_in = int(value_in)
generated = None if value_in is None else int(value_out - value_in)
if b['seconds'] is None:
seconds = None
total_ss = None
else:
new_seconds = nTime - b['nTime']
seconds = b['seconds'] + new_seconds
if b['total_ss'] is None or b['satoshis'] is None:
total_ss = None
else:
total_ss = b['total_ss'] + new_seconds * b['satoshis']
if satoshis < 0 and b['satoshis'] is not None and \
b['satoshis'] >= 0 and generated is not None:
satoshis += 1 + b['satoshis'] + generated
if height is None or height < 2:
search_block_id = None
else:
search_block_id = store.get_block_id_at_height(
util.get_search_height(height), int(block_id))
store.sql("""
UPDATE block
SET block_height = ?,
block_chain_work = ?,
block_value_in = ?,
block_total_seconds = ?,
block_total_satoshis = ?,
block_total_ss = ?,
search_block_id = ?
WHERE block_id = ?""",
(height, store.binin_int(chain_work, WORK_BITS),
store.intin(value_in),
store.intin(seconds), store.intin(satoshis),
store.intin(total_ss), search_block_id,
next_id))
ss = None
if height is not None:
store.sql("""
UPDATE chain_candidate SET block_height = ?
WHERE block_id = ?""",
(height, next_id))
store._populate_block_txin(int(next_id))
if b['ss'] is None or store._has_unlinked_txins(next_id):
pass
else:
tx_ids = map(
lambda row: row[0],
store.selectall("""
SELECT tx_id
FROM block_tx
WHERE block_id = ?""", (next_id,)))
destroyed = store._get_block_ss_destroyed(
next_id, nTime, tx_ids)
ss = b['ss'] + b['satoshis'] * (nTime - b['nTime']) \
- destroyed
store.sql("""
UPDATE block
SET block_satoshi_seconds = ?,
block_ss_destroyed = ?
WHERE block_id = ?""",
(store.intin(ss),
store.intin(destroyed),
next_id))
if store.use_firstbits:
for (addr_vers,) in store.selectall("""
SELECT c.chain_address_version
FROM chain c
JOIN chain_candidate cc ON (c.chain_id = cc.chain_id)
WHERE cc.block_id = ?""", (next_id,)):
store.do_vers_firstbits(addr_vers, int(next_id))
nb = {
"block_id": next_id,
"height": height,
"chain_work": chain_work,
"nTime": nTime,
"seconds": seconds,
"satoshis": satoshis,
"total_ss": total_ss,
"ss": ss}
stack += [ret, continuation,
chain_mask, None, new_work, nb, doit]
def _adopt_orphans_2(store, stack, next_ret):
ret = stack.pop()
for chain_id in ret.keys():
pair = next_ret[chain_id]
if pair and pair[1] > ret[chain_id][1]:
ret[chain_id] = pair
def _export_scriptPubKey(store, txout, chain, scriptPubKey):
"""In txout, set script_type, address_version, binaddr, and for multisig, required_signatures."""
if scriptPubKey is None:
txout['script_type'] = None
txout['binaddr'] = None
return
script_type, data = chain.parse_txout_script(scriptPubKey)
txout['script_type'] = script_type
txout['address_version'] = chain.address_version
if script_type == Chain.SCRIPT_TYPE_PUBKEY:
txout['binaddr'] = chain.pubkey_hash(data)
elif script_type == Chain.SCRIPT_TYPE_ADDRESS:
txout['binaddr'] = data
elif script_type == Chain.SCRIPT_TYPE_P2SH:
txout['address_version'] = chain.script_addr_vers
txout['binaddr'] = data
elif script_type == Chain.SCRIPT_TYPE_MULTISIG:
txout['required_signatures'] = data['m']
txout['binaddr'] = chain.pubkey_hash(scriptPubKey)
txout['subbinaddr'] = [
chain.pubkey_hash(pubkey)
for pubkey in data['pubkeys']
]
elif script_type == Chain.SCRIPT_TYPE_BURN:
txout['binaddr'] = NULL_PUBKEY_HASH
else:
txout['binaddr'] = None
def export_block(store, chain=None, block_hash=None, block_number=None):
"""
Return a dict with the following:
* chain_candidates[]
* chain
* in_longest
* chain_satoshis
* chain_satoshi_seconds
* chain_work
* fees
* generated
* hash
* hashMerkleRoot
* hashPrev
* height
* nBits
* next_block_hashes
* nNonce
* nTime
* satoshis_destroyed
* satoshi_seconds
* transactions[]
* fees
* hash
* in[]
* address_version
* binaddr
* value
* out[]
* address_version
* binaddr
* value
* size
* value_out
* version
Additionally, for multisig inputs and outputs:
* subbinaddr[]
* required_signatures
Additionally, for proof-of-stake chains:
* is_proof_of_stake
* proof_of_stake_generated
"""
if block_number is None and block_hash is None:
raise ValueError("export_block requires either block_hash or block_number")
where = []
bind = []
if chain is not None:
where.append('chain_id = ?')
bind.append(chain.id)
if block_hash is not None:
where.append('block_hash = ?')
bind.append(store.hashin_hex(block_hash))
if block_number is not None:
where.append('block_height = ? AND in_longest = 1')
bind.append(block_number)
sql = """
SELECT
chain_id,
in_longest,
block_id,
block_hash,
block_version,
block_hashMerkleRoot,
block_nTime,
block_nBits,
block_nNonce,
block_height,
prev_block_hash,
block_chain_work,
block_value_in,
block_value_out,
block_total_satoshis,
block_total_seconds,
block_satoshi_seconds,
block_total_ss,
block_ss_destroyed,
block_num_tx
FROM chain_summary
WHERE """ + ' AND '.join(where) + """
ORDER BY
in_longest DESC,
chain_id DESC"""
rows = store.selectall(sql, bind)
if len(rows) == 0:
return None
row = rows[0][2:]
def parse_cc(row):
chain_id, in_longest = row[:2]
return { "chain": store.get_chain_by_id(chain_id), "in_longest": in_longest }
# Absent the chain argument, default to highest chain_id, preferring to avoid side chains.
cc = map(parse_cc, rows)
# "chain" may be None, but "found_chain" will not.
found_chain = chain
if found_chain is None:
if len(cc) > 0:
found_chain = cc[0]['chain']
else:
# Should not normally get here.
found_chain = store.get_default_chain()
(block_id, block_hash, block_version, hashMerkleRoot,
nTime, nBits, nNonce, height,
prev_block_hash, block_chain_work, value_in, value_out,
satoshis, seconds, ss, total_ss, destroyed, num_tx) = (
row[0], store.hashout_hex(row[1]), row[2],
store.hashout_hex(row[3]), row[4], int(row[5]), row[6],
row[7], store.hashout_hex(row[8]),
store.binout_int(row[9]), int(row[10]), int(row[11]),
None if row[12] is None else int(row[12]),
None if row[13] is None else int(row[13]),
None if row[14] is None else int(row[14]),
None if row[15] is None else int(row[15]),
None if row[16] is None else int(row[16]),
int(row[17]),
)
next_hashes = [
store.hashout_hex(hash) for hash, il in
store.selectall("""
SELECT DISTINCT n.block_hash, cc.in_longest
FROM block_next bn
JOIN block n ON (bn.next_block_id = n.block_id)
JOIN chain_candidate cc ON (n.block_id = cc.block_id)
WHERE bn.block_id = ?
ORDER BY cc.in_longest DESC""",
(block_id,)) ]
tx_ids = []
txs = {}
block_out = 0
block_in = 0
for row in store.selectall("""
SELECT tx_id, tx_hash, tx_size, txout_value, txout_scriptPubKey
FROM txout_detail
WHERE block_id = ?
ORDER BY tx_pos, txout_pos
""", (block_id,)):
tx_id, tx_hash, tx_size, txout_value, scriptPubKey = (
row[0], row[1], row[2], int(row[3]), store.binout(row[4]))
tx = txs.get(tx_id)
if tx is None:
tx_ids.append(tx_id)
txs[tx_id] = {
"hash": store.hashout_hex(tx_hash),
"total_out": 0,
"total_in": 0,
"out": [],
"in": [],
"size": int(tx_size),
}
tx = txs[tx_id]
tx['total_out'] += txout_value
block_out += txout_value
txout = { 'value': txout_value }
store._export_scriptPubKey(txout, found_chain, scriptPubKey)
tx['out'].append(txout)
for row in store.selectall("""
SELECT tx_id, txin_value, txin_scriptPubKey
FROM txin_detail
WHERE block_id = ?
ORDER BY tx_pos, txin_pos
""", (block_id,)):
tx_id, txin_value, scriptPubKey = (
row[0], 0 if row[1] is None else int(row[1]),
store.binout(row[2]))
tx = txs.get(tx_id)
if tx is None:
# Strange, inputs but no outputs?
tx_ids.append(tx_id)
tx_hash, tx_size = store.selectrow("""
SELECT tx_hash, tx_size FROM tx WHERE tx_id = ?""",
(tx_id,))
txs[tx_id] = {
"hash": store.hashout_hex(tx_hash),
"total_out": 0,
"total_in": 0,
"out": [],
"in": [],
"size": int(tx_size),
}
tx = txs[tx_id]
tx['total_in'] += txin_value
block_in += txin_value
txin = { 'value': txin_value }
store._export_scriptPubKey(txin, found_chain, scriptPubKey)
tx['in'].append(txin)
generated = block_out - block_in
coinbase_tx = txs[tx_ids[0]]
coinbase_tx['fees'] = 0
block_fees = coinbase_tx['total_out'] - generated
b = {
'chain_candidates': cc,
'chain_satoshis': satoshis,
'chain_satoshi_seconds': total_ss,
'chain_work': block_chain_work,
'fees': block_fees,
'generated': generated,
'hash': block_hash,
'hashMerkleRoot': hashMerkleRoot,
'hashPrev': prev_block_hash,
'height': height,
'nBits': nBits,
'next_block_hashes': next_hashes,
'nNonce': nNonce,
'nTime': nTime,
'satoshis_destroyed': destroyed,
'satoshi_seconds': ss,
'transactions': [txs[tx_id] for tx_id in tx_ids],
'value_out': block_out,
'version': block_version,
}
is_stake_chain = chain is not None and chain.has_feature('nvc_proof_of_stake')
if is_stake_chain:
# Proof-of-stake display based loosely on CryptoManiac/novacoin and
# http://nvc.cryptocoinexplorer.com.
b['is_proof_of_stake'] = len(tx_ids) > 1 and coinbase_tx['total_out'] == 0
for tx_id in tx_ids[1:]:
tx = txs[tx_id]
tx['fees'] = tx['total_in'] - tx['total_out']
if is_stake_chain and b['is_proof_of_stake']:
b['proof_of_stake_generated'] = -txs[tx_ids[1]]['fees']
txs[tx_ids[1]]['fees'] = 0
b['fees'] += b['proof_of_stake_generated']
return b
def tx_find_id_and_value(store, tx, is_coinbase, check_only=False):
# Attention: value_out/undestroyed much match what is calculated in
# import_tx
row = store.selectrow("""
SELECT tx.tx_id, SUM(txout.txout_value), SUM(
CASE WHEN txout.pubkey_id IS NOT NULL AND txout.pubkey_id <= 0
THEN 0 ELSE txout.txout_value END)
FROM tx
LEFT JOIN txout ON (tx.tx_id = txout.tx_id)
WHERE tx_hash = ?
GROUP BY tx.tx_id""",
(store.hashin(tx['hash']),))
if row:
if check_only:
# Don't update tx, saves a statement when all we care is
# whenever tx_id is in store
return row[0]
tx_id, value_out, undestroyed = row
value_out = 0 if value_out is None else int(value_out)
undestroyed = 0 if undestroyed is None else int(undestroyed)
count_in, value_in = store.selectrow("""
SELECT COUNT(1), SUM(prevout.txout_value)
FROM txin
JOIN txout prevout ON (txin.txout_id = prevout.txout_id)
WHERE txin.tx_id = ?""", (tx_id,))
if (count_in or 0) < len(tx['txIn']):
value_in = 0 if is_coinbase else None
tx['value_in'] = None if value_in is None else int(value_in)
tx['value_out'] = value_out
tx['value_destroyed'] = value_out - undestroyed
return tx_id
return None
def import_tx(store, tx, is_coinbase, chain):
tx_id = store.new_id("tx")
dbhash = store.hashin(tx['hash'])
if 'size' not in tx:
tx['size'] = len(tx['__data__'])
store.sql("""
INSERT INTO tx (tx_id, tx_hash, tx_version, tx_lockTime, tx_size)
VALUES (?, ?, ?, ?, ?)""",
(tx_id, dbhash, store.intin(tx['version']),
store.intin(tx['lockTime']), tx['size']))
# Always consider tx are unlinked until they are added to block_tx.
# This is necessary as inserted tx can get committed to database
# before the block itself
store.sql("INSERT INTO unlinked_tx (tx_id) VALUES (?)", (tx_id,))
# Import transaction outputs.
tx['value_out'] = 0
tx['value_destroyed'] = 0
for pos in xrange(len(tx['txOut'])):
txout = tx['txOut'][pos]
tx['value_out'] += txout['value']
txout_id = store.new_id("txout")
pubkey_id = store.script_to_pubkey_id(chain, txout['scriptPubKey'])
# Attention: much match how tx_find_id_and_value gets undestroyed
# value
if pubkey_id is not None and pubkey_id <= 0:
tx['value_destroyed'] += txout['value']
store.sql("""
INSERT INTO txout (
txout_id, tx_id, txout_pos, txout_value,
txout_scriptPubKey, pubkey_id
) VALUES (?, ?, ?, ?, ?, ?)""",
(txout_id, tx_id, pos, store.intin(txout['value']),
store.binin(txout['scriptPubKey']), pubkey_id))
for row in store.selectall("""
SELECT txin_id
FROM unlinked_txin
WHERE txout_tx_hash = ?
AND txout_pos = ?""", (dbhash, pos)):
(txin_id,) = row
store.sql("UPDATE txin SET txout_id = ? WHERE txin_id = ?",
(txout_id, txin_id))
store.sql("DELETE FROM unlinked_txin WHERE txin_id = ?",
(txin_id,))
# Import transaction inputs.
tx['value_in'] = 0
tx['unlinked_count'] = 0
for pos in xrange(len(tx['txIn'])):
txin = tx['txIn'][pos]
txin_id = store.new_id("txin")
if is_coinbase:
txout_id = None
else:
txout_id, value = store.lookup_txout(
txin['prevout_hash'], txin['prevout_n'])
if value is None:
tx['value_in'] = None
elif tx['value_in'] is not None:
tx['value_in'] += value
store.sql("""
INSERT INTO txin (
txin_id, tx_id, txin_pos, txout_id""" + (""",
txin_scriptSig, txin_sequence""" if store.keep_scriptsig
else "") + """
) VALUES (?, ?, ?, ?""" + (", ?, ?" if store.keep_scriptsig
else "") + """)""",
(txin_id, tx_id, pos, txout_id,
store.binin(txin['scriptSig']),
store.intin(txin['sequence'])) if store.keep_scriptsig
else (txin_id, tx_id, pos, txout_id))
if not is_coinbase and txout_id is None:
tx['unlinked_count'] += 1
store.sql("""
INSERT INTO unlinked_txin (
txin_id, txout_tx_hash, txout_pos
) VALUES (?, ?, ?)""",
(txin_id, store.hashin(txin['prevout_hash']),
store.intin(txin['prevout_n'])))
# XXX Could populate PUBKEY.PUBKEY with txin scripts...
# or leave that to an offline process. Nothing in this program
# requires them.
return tx_id
def import_and_commit_tx(store, tx, is_coinbase, chain):
try:
tx_id = store.import_tx(tx, is_coinbase, chain)
store.commit()
except store.dbmodule.DatabaseError:
store.rollback()
# Violation of tx_hash uniqueness?
tx_id = store.tx_find_id_and_value(tx, is_coinbase)
if not tx_id:
raise
return tx_id
def maybe_import_binary_tx(store, chain_name, binary_tx):
if chain_name is None:
chain = store.get_default_chain()
else:
chain = store.get_chain_by_name(chain_name)
tx_hash = chain.transaction_hash(binary_tx)
(count,) = store.selectrow(
"SELECT COUNT(1) FROM tx WHERE tx_hash = ?",
(store.hashin(tx_hash),))
if count == 0:
tx = chain.parse_transaction(binary_tx)
tx['hash'] = tx_hash
store.import_tx(tx, chain.is_coinbase_tx(tx), chain)
store.imported_bytes(tx['size'])
def export_tx(store, tx_id=None, tx_hash=None, decimals=8, format="api", chain=None):
"""Return a dict as seen by /rawtx or None if not found."""
# TODO: merge _export_tx_detail with export_tx.
if format == 'browser':
return store._export_tx_detail(tx_hash, chain=chain)
tx = {}
is_bin = format == "binary"
if tx_id is not None:
row = store.selectrow("""
SELECT tx_hash, tx_version, tx_lockTime, tx_size
FROM tx
WHERE tx_id = ?
""", (tx_id,))
if row is None:
return None
tx['hash'] = store.hashout_hex(row[0])
elif tx_hash is not None:
row = store.selectrow("""
SELECT tx_id, tx_version, tx_lockTime, tx_size
FROM tx
WHERE tx_hash = ?
""", (store.hashin_hex(tx_hash),))
if row is None:
return None
tx['hash'] = tx_hash.decode('hex')[::-1] if is_bin else tx_hash
tx_id = row[0]
else:
raise ValueError("export_tx requires either tx_id or tx_hash.")
tx['version' if is_bin else 'ver'] = int(row[1])
tx['lockTime' if is_bin else 'lock_time'] = int(row[2])
tx['size'] = int(row[3])
txins = []
tx['txIn' if is_bin else 'in'] = txins
for row in store.selectall("""
SELECT
COALESCE(tx.tx_hash, uti.txout_tx_hash),
COALESCE(txout.txout_pos, uti.txout_pos)""" + (""",
txin_scriptSig,
txin_sequence""" if store.keep_scriptsig else "") + """
FROM txin
LEFT JOIN txout ON (txin.txout_id = txout.txout_id)
LEFT JOIN tx ON (txout.tx_id = tx.tx_id)
LEFT JOIN unlinked_txin uti ON (txin.txin_id = uti.txin_id)
WHERE txin.tx_id = ?
ORDER BY txin.txin_pos""", (tx_id,)):
prevout_hash = row[0]
prevout_n = None if row[1] is None else int(row[1])
if is_bin:
txin = {
'prevout_hash': store.hashout(prevout_hash),
'prevout_n': prevout_n}
else:
if prevout_hash is None:
prev_out = {
'hash': "0" * 64, # XXX should store this?
'n': 0xffffffff} # XXX should store this?
else:
prev_out = {
'hash': store.hashout_hex(prevout_hash),
'n': prevout_n}
txin = {'prev_out': prev_out}
if store.keep_scriptsig:
scriptSig = row[2]
sequence = row[3]
if is_bin:
txin['scriptSig'] = store.binout(scriptSig)
else:
txin['raw_scriptSig'] = store.binout_hex(scriptSig)
txin['sequence'] = None if sequence is None else int(sequence)
txins.append(txin)
txouts = []
tx['txOut' if is_bin else 'out'] = txouts
for satoshis, scriptPubKey in store.selectall("""
SELECT txout_value, txout_scriptPubKey
FROM txout
WHERE tx_id = ?
ORDER BY txout_pos""", (tx_id,)):
if is_bin:
txout = {
'value': int(satoshis),
'scriptPubKey': store.binout(scriptPubKey)}
else:
coin = 10 ** decimals
satoshis = int(satoshis)
integer = satoshis / coin
frac = satoshis % coin
txout = {
'value': ("%%d.%%0%dd" % (decimals,)) % (integer, frac),
'raw_scriptPubKey': store.binout_hex(scriptPubKey)}
txouts.append(txout)
if not is_bin:
tx['vin_sz'] = len(txins)
tx['vout_sz'] = len(txouts)
return tx
def _export_tx_detail(store, tx_hash, chain):
try:
dbhash = store.hashin_hex(tx_hash)
except TypeError:
raise MalformedHash()
row = store.selectrow("""
SELECT tx_id, tx_version, tx_lockTime, tx_size
FROM tx
WHERE tx_hash = ?
""", (dbhash,))
if row is None:
return None
tx_id = int(row[0])
tx = {
'hash': tx_hash,
'version': int(row[1]),
'lockTime': int(row[2]),
'size': int(row[3]),
}
def parse_tx_cc(row):
return {
'chain': store.get_chain_by_id(row[0]),
'in_longest': int(row[1]),
'block_nTime': int(row[2]),
'block_height': None if row[3] is None else int(row[3]),
'block_hash': store.hashout_hex(row[4]),
'tx_pos': int(row[5])
}
tx['chain_candidates'] = map(parse_tx_cc, store.selectall("""
SELECT cc.chain_id, cc.in_longest,
b.block_nTime, b.block_height, b.block_hash,
block_tx.tx_pos
FROM chain_candidate cc
JOIN block b ON (b.block_id = cc.block_id)
JOIN block_tx ON (block_tx.block_id = b.block_id)
WHERE block_tx.tx_id = ?
ORDER BY cc.chain_id, cc.in_longest DESC, b.block_hash
""", (tx_id,)))
if chain is None:
if len(tx['chain_candidates']) > 0:
chain = tx['chain_candidates'][0]['chain']
else:
chain = store.get_default_chain()
def parse_row(row):
pos, script, value, o_hash, o_pos = row[:5]
script = store.binout(script)
scriptPubKey = store.binout(row[5]) if len(row) >5 else script
ret = {
"pos": int(pos),
"binscript": script,
"value": None if value is None else int(value),
"o_hash": store.hashout_hex(o_hash),
"o_pos": None if o_pos is None else int(o_pos),
}
store._export_scriptPubKey(ret, chain, scriptPubKey)
return ret
# XXX Unneeded outer join.
tx['in'] = map(parse_row, store.selectall("""
SELECT
txin.txin_pos""" + (""",
txin.txin_scriptSig""" if store.keep_scriptsig else """,
NULL""") + """,
txout.txout_value,
COALESCE(prevtx.tx_hash, u.txout_tx_hash),
COALESCE(txout.txout_pos, u.txout_pos),
txout.txout_scriptPubKey
FROM txin
LEFT JOIN txout ON (txout.txout_id = txin.txout_id)
LEFT JOIN tx prevtx ON (txout.tx_id = prevtx.tx_id)
LEFT JOIN unlinked_txin u ON (u.txin_id = txin.txin_id)
WHERE txin.tx_id = ?
ORDER BY txin.txin_pos
""", (tx_id,)))
# XXX Only one outer join needed.
tx['out'] = map(parse_row, store.selectall("""
SELECT
txout.txout_pos,
txout.txout_scriptPubKey,
txout.txout_value,
nexttx.tx_hash,
txin.txin_pos
FROM txout
LEFT JOIN txin ON (txin.txout_id = txout.txout_id)
LEFT JOIN tx nexttx ON (txin.tx_id = nexttx.tx_id)
WHERE txout.tx_id = ?
ORDER BY txout.txout_pos
""", (tx_id,)))
def sum_values(rows):
ret = 0
for row in rows:
if row['value'] is None:
return None
ret += row['value']
return ret
tx['value_in'] = sum_values(tx['in'])
tx['value_out'] = sum_values(tx['out'])
return tx
def export_address_history(store, address, chain=None, max_rows=-1, types=frozenset(['direct', 'escrow'])):
version, binaddr = util.decode_check_address(address)
if binaddr is None:
raise MalformedAddress("Invalid address")
balance = {}
received = {}
sent = {}
counts = [0, 0]
chains = []
def adj_balance(txpoint):
chain = txpoint['chain']
if chain.id not in balance:
chains.append(chain)
balance[chain.id] = 0
received[chain.id] = 0
sent[chain.id] = 0
if txpoint['type'] == 'direct':
value = txpoint['value']
balance[chain.id] += value
if txpoint['is_out']:
sent[chain.id] -= value
else:
received[chain.id] += value
counts[txpoint['is_out']] += 1
dbhash = store.binin(binaddr)
txpoints = []
def parse_row(is_out, row_type, nTime, chain_id, height, blk_hash, tx_hash, pos, value, script=None):
chain = store.get_chain_by_id(chain_id)
txpoint = {
'type': row_type,
'is_out': int(is_out),
'nTime': int(nTime),
'chain': chain,
'height': int(height),
'blk_hash': store.hashout_hex(blk_hash),
'tx_hash': store.hashout_hex(tx_hash),
'pos': int(pos),
'value': int(value),
}
if script is not None:
store._export_scriptPubKey(txpoint, chain, store.binout(script))
return txpoint
def parse_direct_in(row): return parse_row(True, 'direct', *row)
def parse_direct_out(row): return parse_row(False, 'direct', *row)
def parse_escrow_in(row): return parse_row(True, 'escrow', *row)
def parse_escrow_out(row): return parse_row(False, 'escrow', *row)
def get_received(escrow):
return store.selectall("""
SELECT
b.block_nTime,
cc.chain_id,
b.block_height,
b.block_hash,
tx.tx_hash,
txin.txin_pos,
-prevout.txout_value""" + (""",
prevout.txout_scriptPubKey""" if escrow else "") + """
FROM chain_candidate cc
JOIN block b ON (b.block_id = cc.block_id)
JOIN block_tx ON (block_tx.block_id = b.block_id)
JOIN tx ON (tx.tx_id = block_tx.tx_id)
JOIN txin ON (txin.tx_id = tx.tx_id)
JOIN txout prevout ON (txin.txout_id = prevout.txout_id)""" + ("""
JOIN multisig_pubkey mp ON (mp.multisig_id = prevout.pubkey_id)""" if escrow else "") + """
JOIN pubkey ON (pubkey.pubkey_id = """ + ("mp" if escrow else "prevout") + """.pubkey_id)
WHERE pubkey.pubkey_hash = ?
AND cc.in_longest = 1""" + ("" if max_rows < 0 else """
LIMIT ?"""),
(dbhash,)
if max_rows < 0 else
(dbhash, max_rows + 1))
def get_sent(escrow):
return store.selectall("""
SELECT
b.block_nTime,
cc.chain_id,
b.block_height,
b.block_hash,
tx.tx_hash,
txout.txout_pos,
txout.txout_value""" + (""",
txout.txout_scriptPubKey""" if escrow else "") + """
FROM chain_candidate cc
JOIN block b ON (b.block_id = cc.block_id)
JOIN block_tx ON (block_tx.block_id = b.block_id)
JOIN tx ON (tx.tx_id = block_tx.tx_id)
JOIN txout ON (txout.tx_id = tx.tx_id)""" + ("""
JOIN multisig_pubkey mp ON (mp.multisig_id = txout.pubkey_id)""" if escrow else "") + """
JOIN pubkey ON (pubkey.pubkey_id = """ + ("mp" if escrow else "txout") + """.pubkey_id)
WHERE pubkey.pubkey_hash = ?
AND cc.in_longest = 1""" + ("" if max_rows < 0 else """
LIMIT ?"""),
(dbhash, max_rows + 1)
if max_rows >= 0 else
(dbhash,))
if 'direct' in types:
in_rows = get_received(False)
if len(in_rows) > max_rows >= 0:
return None # XXX Could still show address basic data.
txpoints += map(parse_direct_in, in_rows)
out_rows = get_sent(False)
if len(out_rows) > max_rows >= 0:
return None
txpoints += map(parse_direct_out, out_rows)
if 'escrow' in types:
in_rows = get_received(True)
if len(in_rows) > max_rows >= 0:
return None
txpoints += map(parse_escrow_in, in_rows)
out_rows = get_sent(True)
if len(out_rows) > max_rows >= 0:
return None
txpoints += map(parse_escrow_out, out_rows)
def cmp_txpoint(p1, p2):
return cmp(p1['nTime'], p2['nTime']) \
or cmp(p1['is_out'], p2['is_out']) \
or cmp(p1['height'], p2['height']) \
or cmp(p1['chain'].name, p2['chain'].name)
txpoints.sort(cmp_txpoint)
for txpoint in txpoints:
adj_balance(txpoint)
hist = {
'binaddr': binaddr,
'version': version,
'chains': chains,
'txpoints': txpoints,
'balance': balance,
'sent': sent,
'received': received,
'counts': counts
}
# Show P2SH address components, if known.
# XXX With some more work, we could find required_signatures.
for (subbinaddr,) in store.selectall("""
SELECT sub.pubkey_hash
FROM multisig_pubkey mp
JOIN pubkey top ON (mp.multisig_id = top.pubkey_id)
JOIN pubkey sub ON (mp.pubkey_id = sub.pubkey_id)
WHERE top.pubkey_hash = ?""", (dbhash,)):
if 'subbinaddr' not in hist:
hist['subbinaddr'] = []
hist['subbinaddr'].append(store.binout(subbinaddr))
return hist
# Called to indicate that the given block has the correct magic
# number and policy for the given chains. Updates CHAIN_CANDIDATE
# and CHAIN.CHAIN_LAST_BLOCK_ID as appropriate.
def offer_block_to_chains(store, b, chain_ids):
b['top'] = store.adopt_orphans(b, 0, chain_ids, chain_ids)
for chain_id in chain_ids:
store._offer_block_to_chain(b, chain_id)
def _offer_block_to_chain(store, b, chain_id):
if b['chain_work'] is None:
in_longest = 0
else:
# Do we produce a chain longer than the current chain?
# Query whether the new block (or its tallest descendant)
# beats the current chain_last_block_id. Also check
# whether the current best is our top, which indicates
# this block is in longest; this can happen in database
# repair scenarios.
top = b['top'][chain_id][0]
row = store.selectrow("""
SELECT b.block_id, b.block_height, b.block_chain_work
FROM block b, chain c
WHERE c.chain_id = ?
AND b.block_id = c.chain_last_block_id""", (chain_id,))
if row:
loser_id, loser_height, loser_work = row
if loser_id != top['block_id'] and \
store.binout_int(loser_work) >= top['chain_work']:
row = None
if row:
# New longest chain.
in_longest = 1
to_connect = []
to_disconnect = []
winner_id = top['block_id']
winner_height = top['height']
while loser_height > winner_height:
to_disconnect.insert(0, loser_id)
loser_id = store.get_prev_block_id(loser_id)
loser_height -= 1
while winner_height > loser_height:
to_connect.insert(0, winner_id)
winner_id = store.get_prev_block_id(winner_id)
winner_height -= 1
loser_height = None
while loser_id != winner_id:
to_disconnect.insert(0, loser_id)
loser_id = store.get_prev_block_id(loser_id)
to_connect.insert(0, winner_id)
winner_id = store.get_prev_block_id(winner_id)
winner_height -= 1
for block_id in to_disconnect:
store.disconnect_block(block_id, chain_id)
for block_id in to_connect:
store.connect_block(block_id, chain_id)
elif b['hashPrev'] == store.get_chain_by_id(chain_id).genesis_hash_prev:
in_longest = 1 # Assume only one genesis block per chain. XXX
else:
in_longest = 0
store.sql("""
INSERT INTO chain_candidate (
chain_id, block_id, in_longest, block_height
) VALUES (?, ?, ?, ?)""",
(chain_id, b['block_id'], in_longest, b['height']))
if in_longest > 0:
store.sql("""
UPDATE chain
SET chain_last_block_id = ?
WHERE chain_id = ?""", (top['block_id'], chain_id))
if store.use_firstbits and b['height'] is not None:
(addr_vers,) = store.selectrow("""
SELECT chain_address_version
FROM chain
WHERE chain_id = ?""", (chain_id,))
store.do_vers_firstbits(addr_vers, b['block_id'])
def offer_existing_block(store, hash, chain_id):
block_row = store.selectrow("""
SELECT block_id, block_height, block_chain_work,
block_nTime, block_total_seconds,
block_total_satoshis, block_satoshi_seconds,
block_total_ss
FROM block
WHERE block_hash = ?
""", (store.hashin(hash),))
if not block_row:
return False
if chain_id is None:
return True
# Block header already seen. Don't import the block,
# but try to add it to the chain.
b = {
"block_id": block_row[0],
"height": block_row[1],
"chain_work": store.binout_int(block_row[2]),
"nTime": block_row[3],
"seconds": block_row[4],
"satoshis": block_row[5],
"ss": block_row[6],
"total_ss": block_row[7]}
if store.selectrow("""
SELECT 1
FROM chain_candidate
WHERE block_id = ?
AND chain_id = ?""",
(b['block_id'], chain_id)):
store.log.info("block %d already in chain %d",
b['block_id'], chain_id)
else:
if b['height'] == 0:
b['hashPrev'] = store.get_chain_by_id(chain_id).genesis_hash_prev
else:
b['hashPrev'] = 'dummy' # Fool adopt_orphans.
store.offer_block_to_chains(b, frozenset([chain_id]))
return True
def find_next_blocks(store, block_id):
ret = []
for row in store.selectall(
"SELECT next_block_id FROM block_next WHERE block_id = ?",
(block_id,)):
ret.append(row[0])
return ret
def find_chains_containing_block(store, block_id):
ret = []
for row in store.selectall(
"SELECT chain_id FROM chain_candidate WHERE block_id = ?",
(block_id,)):
ret.append(row[0])
return frozenset(ret)
def get_prev_block_id(store, block_id):
return store.selectrow(
"SELECT prev_block_id FROM block WHERE block_id = ?",
(block_id,))[0]
def disconnect_block(store, block_id, chain_id):
store.sql("""
UPDATE chain_candidate
SET in_longest = 0
WHERE block_id = ? AND chain_id = ?""",
(block_id, chain_id))
def connect_block(store, block_id, chain_id):
store.sql("""
UPDATE chain_candidate
SET in_longest = 1
WHERE block_id = ? AND chain_id = ?""",
(block_id, chain_id))
def lookup_txout(store, tx_hash, txout_pos):
row = store.selectrow("""
SELECT txout.txout_id, txout.txout_value
FROM txout, tx
WHERE txout.tx_id = tx.tx_id
AND tx.tx_hash = ?
AND txout.txout_pos = ?""",
(store.hashin(tx_hash), txout_pos))
return (None, None) if row is None else (row[0], int(row[1]))
def script_to_pubkey_id(store, chain, script):
"""Extract address and script type from transaction output script."""
script_type, data = chain.parse_txout_script(script)
if script_type in (Chain.SCRIPT_TYPE_ADDRESS, Chain.SCRIPT_TYPE_P2SH):
return store.pubkey_hash_to_id(data)
if script_type == Chain.SCRIPT_TYPE_PUBKEY:
return store.pubkey_to_id(chain, data)
if script_type == Chain.SCRIPT_TYPE_MULTISIG:
script_hash = chain.script_hash(script)
multisig_id = store._pubkey_id(script_hash, script)
if not store.selectrow("SELECT 1 FROM multisig_pubkey WHERE multisig_id = ?", (multisig_id,)):
for pubkey in set(data['pubkeys']):
pubkey_id = store.pubkey_to_id(chain, pubkey)
store.sql("""
INSERT INTO multisig_pubkey (multisig_id, pubkey_id)
VALUES (?, ?)""", (multisig_id, pubkey_id))
return multisig_id
if script_type == Chain.SCRIPT_TYPE_BURN:
return PUBKEY_ID_NETWORK_FEE
return None
def pubkey_hash_to_id(store, pubkey_hash):
return store._pubkey_id(pubkey_hash, None)
def pubkey_to_id(store, chain, pubkey):
pubkey_hash = chain.pubkey_hash(pubkey)
return store._pubkey_id(pubkey_hash, pubkey)
def _pubkey_id(store, pubkey_hash, pubkey):
dbhash = store.binin(pubkey_hash) # binin, not hashin for 160-bit
row = store.selectrow("""
SELECT pubkey_id
FROM pubkey
WHERE pubkey_hash = ?""", (dbhash,))
if row:
return row[0]
pubkey_id = store.new_id("pubkey")
if pubkey is not None and len(pubkey) > MAX_PUBKEY:
pubkey = None
store.sql("""
INSERT INTO pubkey (pubkey_id, pubkey_hash, pubkey)
VALUES (?, ?, ?)""",
(pubkey_id, dbhash, store.binin(pubkey)))
return pubkey_id
def flush(store):
if store.bytes_since_commit > 0:
store.commit()
store.log.debug("commit")
store.bytes_since_commit = 0
def imported_bytes(store, size):
store.bytes_since_commit += size
if store.bytes_since_commit >= store.commit_bytes:
store.flush()
def catch_up(store):
for dircfg in store.datadirs:
try:
loader = dircfg['loader'] or store.default_loader
if loader == "blkfile":
store.catch_up_dir(dircfg)
elif loader in ("rpc", "rpc,blkfile", "default"):
if not store.catch_up_rpc(dircfg):
if loader == "rpc":
raise Exception("RPC load failed")
store.log.debug("catch_up_rpc: abort")
store.catch_up_dir(dircfg)
else:
raise Exception("Unknown datadir loader: %s" % loader)
store.flush()
except Exception, e:
store.log.exception("Failed to catch up %s", dircfg)
store.rollback()
def catch_up_rpc(store, dircfg):
"""
Load new blocks using RPC. Requires running *coind supporting
getblockhash, getblock with verbose=false, and optionally
getrawmempool/getrawtransaction (to load mempool tx). Requires
chain_id in the datadir table.
"""
chain_id = dircfg['chain_id']
if chain_id is None:
store.log.error("no chain_id")
return False
chain = store.chains_by.id[chain_id]
conffile = dircfg.get('conf') or chain.datadir_conf_file_name
conffile = os.path.join(dircfg['dirname'], conffile)
try:
conf = dict([line.strip().split("=", 1)
if "=" in line
else (line.strip(), True)
for line in open(conffile)
if line != "" and line[0] not in "#\r\n"])
except Exception, e:
store.log.error("failed to load %s: %s", conffile, e)
return False
rpcuser = conf.get("rpcuser", "")
rpcpassword = conf["rpcpassword"]
rpcconnect = conf.get("rpcconnect", "127.0.0.1")
rpcport = conf.get("rpcport", chain.datadir_rpcport)
url = "http://" + rpcuser + ":" + rpcpassword + "@" + rpcconnect \
+ ":" + str(rpcport)
ds = BCDataStream.BCDataStream()
if store.rpc_load_mempool:
# Cache tx imported from mempool, so we can avoid querying DB on each pass
rows = store.selectall("""
SELECT t.tx_hash
FROM unlinked_tx ut
JOIN tx t ON (ut.tx_id = t.tx_id)""")
store.mempool_tx = {store.hashout_hex(i[0]) for i in rows}
def rpc(func, *params):
store.rpclog.info("RPC>> %s %s", func, params)
ret = util.jsonrpc(url, func, *params)
if (store.rpclog.isEnabledFor(logging.INFO)):
store.rpclog.info("RPC<< %s",
re.sub(r'\[[^\]]{100,}\]', '[...]', str(ret)))
return ret
def get_blockhash(height):
try:
return rpc("getblockhash", height)
except util.JsonrpcException, e:
if e.code in (-1, -5, -8):
# Block number out of range...
# -1 is legacy code (pre-10.0), generic error
# -8 (RPC_INVALID_PARAMETER) first seen in bitcoind 10.x
# -5 (RPC_NOT_FOUND): Been suggested in #bitcoin-dev as more appropriate
return None
raise
# Returns -1 on error, so we'll get 0 on empty chain
height = store.get_block_number(chain.id) + 1
def get_tx(rpc_tx_hash):
try:
rpc_tx_hex = rpc("getrawtransaction", rpc_tx_hash)
except util.JsonrpcException, e:
if e.code != -5: # -5: transaction not in index.
raise
if height != 0:
return None
# The genesis transaction is unavailable. This is
# normal.
import genesis_tx
rpc_tx_hex = genesis_tx.get(rpc_tx_hash)
if rpc_tx_hex is None:
store.log.error("genesis transaction unavailable via RPC;"
" see import-tx in abe.conf")
return None
rpc_tx = rpc_tx_hex.decode('hex')
tx_hash = rpc_tx_hash.decode('hex')[::-1]
computed_tx_hash = chain.transaction_hash(rpc_tx)
if tx_hash != computed_tx_hash:
#raise InvalidBlock('transaction hash mismatch')
store.log.warn('transaction hash mismatch: %r != %r', tx_hash, computed_tx_hash)
tx = chain.parse_transaction(rpc_tx)
tx['hash'] = tx_hash
return tx
def first_new_block(height, next_hash):
"""Find the first new block."""
while height > 0:
hash = get_blockhash(height - 1)
if hash is not None and (1,) == store.selectrow("""
SELECT 1
FROM chain_candidate cc
JOIN block b ON (cc.block_id = b.block_id)
WHERE b.block_hash = ?
AND b.block_height IS NOT NULL
AND cc.chain_id = ?""", (
store.hashin_hex(str(hash)), chain.id)):
break
next_hash = hash
height -= 1
return (height, next_hash)
def catch_up_mempool(height):
# Next height check time
height_chk = time.time() + 1
while store.rpc_load_mempool:
# Import the memory pool.
mempool = rpc("getrawmempool")
for rpc_tx_hash in mempool:
# Skip any TX imported from previous run
if rpc_tx_hash in store.mempool_tx:
continue
# Break loop if new block found
if height_chk < time.time():
rpc_hash = get_blockhash(height)
if rpc_hash:
return rpc_hash
height_chk = time.time() + 1
tx = get_tx(rpc_tx_hash)
if tx is None:
# NB: On new blocks, older mempool tx are often missing
# This happens some other times too, just get over with
store.log.info("tx %s gone from mempool" % rpc_tx_hash)
continue
# XXX Race condition in low isolation levels.
tx_id = store.tx_find_id_and_value(tx, False, check_only=True)
if tx_id is None:
tx_id = store.import_tx(tx, False, chain)
store.log.info("mempool tx %d", tx_id)
store.imported_bytes(tx['size'])
# Only need to reset+save mempool tx cache once at the end
store.mempool_tx = set(mempool)
# Clean all unlinked tx not still in mempool
store.clean_unlinked_tx(store.mempool_tx)
store.log.info("mempool load completed, starting over...")
time.sleep(3)
return None
try:
# Get block hash at height, and at the same time, test
# bitcoind connectivity.
try:
next_hash = get_blockhash(height)
except util.JsonrpcException, e:
raise
except Exception, e:
# Connectivity failure.
store.log.error("RPC failed: %s", e)
return False
# Get the first new block (looking backward until hash match)
height, next_hash = first_new_block(height, next_hash)
# Import new blocks.
rpc_hash = next_hash or get_blockhash(height)
if rpc_hash is None:
rpc_hash = catch_up_mempool(height)
while rpc_hash is not None:
hash = rpc_hash.decode('hex')[::-1]
if store.offer_existing_block(hash, chain.id):
rpc_hash = get_blockhash(height + 1)
else:
# get full RPC block with "getblock <hash> False"
ds.write(rpc("getblock", rpc_hash, False).decode('hex'))
block_hash = chain.ds_block_header_hash(ds)
block = chain.ds_parse_block(ds)
assert hash == block_hash
block['hash'] = block_hash
# XXX Shouldn't be needed since we deserialize a valid block already
if chain.block_header_hash(chain.serialize_block_header(
block)) != hash:
raise InvalidBlock('block hash mismatch')
store.import_block(block, chain = chain)
store.imported_bytes(ds.read_cursor)
ds.clear()
rpc_hash = get_blockhash(height + 1)
height += 1
if rpc_hash is None:
rpc_hash = catch_up_mempool(height)
# Also look backwards in case we end up on an orphan block.
# NB: Call only when rpc_hash is not None, otherwise
# we'll override catch_up_mempool's behavior.
if rpc_hash:
height, rpc_hash = first_new_block(height, rpc_hash)
except util.JsonrpcMethodNotFound, e:
store.log.error("bitcoind %s not supported", e.method)
return False
except InvalidBlock, e:
store.log.error("RPC data not understood: %s", e)
return False
return True
# Load all blocks starting at the current file and offset.
def catch_up_dir(store, dircfg):
def open_blkfile(number):
store._refresh_dircfg(dircfg)
blkfile = {
'stream': BCDataStream.BCDataStream(),
'name': store.blkfile_name(dircfg, number),
'number': number
}
try:
file = open(blkfile['name'], "rb")
except IOError, e:
# Early bitcoind used blk0001.dat to blk9999.dat.
# Now it uses blocks/blk00000.dat to blocks/blk99999.dat.
# Abe starts by assuming the former scheme. If we don't
# find the expected file but do see blocks/blk00000.dat,
# switch to the new scheme. Record the switch by adding
# 100000 to each file number, so for example, 100123 means
# blocks/blk00123.dat but 123 still means blk0123.dat.
if blkfile['number'] > 9999 or e.errno != errno.ENOENT:
raise
new_number = 100000
blkfile['name'] = store.blkfile_name(dircfg, new_number)
file = open(blkfile['name'], "rb")
blkfile['number'] = new_number
try:
blkfile['stream'].map_file(file, 0)
except Exception:
# mmap can fail on an empty file, but empty files are okay.
file.seek(0, os.SEEK_END)
if file.tell() == 0:
blkfile['stream'].input = ""
blkfile['stream'].read_cursor = 0
else:
blkfile['stream'].map_file(file, 0)
finally:
file.close()
store.log.info("Opened %s", blkfile['name'])
return blkfile
def try_close_file(ds):
try:
ds.close_file()
except Exception, e:
store.log.info("BCDataStream: close_file: %s", e)
try:
blkfile = open_blkfile(dircfg['blkfile_number'])
except IOError, e:
store.log.warning("Skipping datadir %s: %s", dircfg['dirname'], e)
return
while True:
dircfg['blkfile_number'] = blkfile['number']
ds = blkfile['stream']
next_blkfile = None
try:
store.import_blkdat(dircfg, ds, blkfile['name'])
except Exception:
store.log.warning("Exception at %d" % ds.read_cursor)
try_close_file(ds)
raise
if next_blkfile is None:
# Try another file.
try:
next_blkfile = open_blkfile(dircfg['blkfile_number'] + 1)
except IOError, e:
if e.errno != errno.ENOENT:
raise
# No more block files.
return
except Exception, e:
if getattr(e, 'errno', None) == errno.ENOMEM:
# Assume 32-bit address space exhaustion.
store.log.warning(
"Cannot allocate memory for next blockfile: "
"skipping safety check")
try_close_file(ds)
blkfile = open_blkfile(dircfg['blkfile_number'] + 1)
dircfg['blkfile_offset'] = 0
continue
raise
finally:
if next_blkfile is None:
try_close_file(ds)
# Load any data written to the last file since we checked.
store.import_blkdat(dircfg, ds, blkfile['name'])
# Continue with the new file.
blkfile = next_blkfile
try_close_file(ds)
dircfg['blkfile_offset'] = 0
# Load all blocks from the given data stream.
def import_blkdat(store, dircfg, ds, filename="[unknown]"):
filenum = dircfg['blkfile_number']
ds.read_cursor = dircfg['blkfile_offset']
while filenum == dircfg['blkfile_number']:
if ds.read_cursor + 8 > len(ds.input):
break
offset = ds.read_cursor
magic = ds.read_bytes(4)
# Assume no real magic number starts with a NUL.
if magic[0] == "\0":
if filenum > 99999 and magic == "\0\0\0\0":
# As of Bitcoin 0.8, files often end with a NUL span.
ds.read_cursor = offset
break
# Skip NUL bytes at block end.
ds.read_cursor = offset
while ds.read_cursor < len(ds.input):
size = min(len(ds.input) - ds.read_cursor, 1000)
data = ds.read_bytes(size).lstrip("\0")
if (data != ""):
ds.read_cursor -= len(data)
break
store.log.info("Skipped %d NUL bytes at block end",
ds.read_cursor - offset)
continue
# Assume blocks obey the respective policy if they get here.
chain_id = dircfg['chain_id']
chain = store.chains_by.id.get(chain_id, None)
if chain is None:
chain = store.chains_by.magic.get(magic, None)
if chain is None:
store.log.warning(
"Chain not found for magic number %s in block file %s at"
" offset %d.", magic.encode('hex'), filename, offset)
not_magic = magic
# Read this file's initial magic number.
magic = ds.input[0:4]
if magic == not_magic:
ds.read_cursor = offset
break
store.log.info(
"Scanning for initial magic number %s.",
magic.encode('hex'))
ds.read_cursor = offset
offset = ds.input.find(magic, offset)
if offset == -1:
store.log.info("Magic number scan unsuccessful.")
break
store.log.info(
"Skipped %d bytes in block file %s at offset %d.",
offset - ds.read_cursor, filename, ds.read_cursor)
ds.read_cursor = offset
continue
length = ds.read_int32()
if ds.read_cursor + length > len(ds.input):
store.log.debug("incomplete block of length %d chain %d",
length, chain.id)
ds.read_cursor = offset
break
end = ds.read_cursor + length
hash = chain.ds_block_header_hash(ds)
# XXX should decode target and check hash against it to
# avoid loading garbage data. But not for merged-mined or
# CPU-mined chains that use different proof-of-work
# algorithms.
if not store.offer_existing_block(hash, chain.id):
b = chain.ds_parse_block(ds)
b["hash"] = hash
if (store.log.isEnabledFor(logging.DEBUG) and b["hashPrev"] == chain.genesis_hash_prev):
try:
store.log.debug("Chain %d genesis tx: %s", chain.id,
b['transactions'][0]['__data__'].encode('hex'))
except Exception:
pass
store.import_block(b, chain = chain)
if ds.read_cursor != end:
store.log.debug("Skipped %d bytes at block end",
end - ds.read_cursor)
ds.read_cursor = end
store.bytes_since_commit += length
if store.bytes_since_commit >= store.commit_bytes:
store.save_blkfile_offset(dircfg, ds.read_cursor)
store.flush()
store._refresh_dircfg(dircfg)
if ds.read_cursor != dircfg['blkfile_offset']:
store.save_blkfile_offset(dircfg, ds.read_cursor)
def blkfile_name(store, dircfg, number=None):
if number is None:
number = dircfg['blkfile_number']
if number > 9999:
return os.path.join(dircfg['dirname'], "blocks", "blk%05d.dat"
% (number - 100000,))
return os.path.join(dircfg['dirname'], "blk%04d.dat" % (number,))
def save_blkfile_offset(store, dircfg, offset):
store.sql("""
UPDATE datadir
SET blkfile_number = ?,
blkfile_offset = ?
WHERE datadir_id = ?""",
(dircfg['blkfile_number'], store.intin(offset),
dircfg['id']))
if store.rowcount() == 0:
store.sql("""
INSERT INTO datadir (datadir_id, dirname, blkfile_number,
blkfile_offset, chain_id)
VALUES (?, ?, ?, ?, ?)""",
(dircfg['id'], dircfg['dirname'],
dircfg['blkfile_number'],
store.intin(offset), dircfg['chain_id']))
dircfg['blkfile_offset'] = offset
def _refresh_dircfg(store, dircfg):
row = store.selectrow("""
SELECT blkfile_number, blkfile_offset
FROM datadir
WHERE dirname = ?""", (dircfg['dirname'],))
if row:
number, offset = map(int, row)
if (number > dircfg['blkfile_number'] or
(number == dircfg['blkfile_number'] and
offset > dircfg['blkfile_offset'])):
dircfg['blkfile_number'] = number
dircfg['blkfile_offset'] = offset
def get_block_number(store, chain_id):
row = store.selectrow("""
SELECT block_height
FROM chain_candidate
WHERE chain_id = ?
AND in_longest = 1
ORDER BY block_height DESC
LIMIT 1""", (chain_id,))
return int(row[0]) if row else -1
def get_target(store, chain_id):
rows = store.selectall("""
SELECT b.block_nBits
FROM block b
JOIN chain c ON (b.block_id = c.chain_last_block_id)
WHERE c.chain_id = ?""", (chain_id,))
return util.calculate_target(int(rows[0][0])) if rows else None
def get_received_and_last_block_id(store, chain_id, pubkey_hash,
block_height = None):
sql = """
SELECT COALESCE(value_sum, 0), c.chain_last_block_id
FROM chain c LEFT JOIN (
SELECT cc.chain_id, SUM(txout.txout_value) value_sum
FROM pubkey
JOIN txout ON (txout.pubkey_id = pubkey.pubkey_id)
JOIN block_tx ON (block_tx.tx_id = txout.tx_id)
JOIN block b ON (b.block_id = block_tx.block_id)
JOIN chain_candidate cc ON (cc.block_id = b.block_id)
WHERE
pubkey.pubkey_hash = ? AND
cc.chain_id = ? AND
cc.in_longest = 1""" + (
"" if block_height is None else """ AND
cc.block_height <= ?""") + """
GROUP BY cc.chain_id
) a ON (c.chain_id = a.chain_id)
WHERE c.chain_id = ?"""
dbhash = store.binin(pubkey_hash)
return store.selectrow(sql,
(dbhash, chain_id, chain_id)
if block_height is None else
(dbhash, chain_id, block_height, chain_id))
def get_received(store, chain_id, pubkey_hash, block_height = None):
return store.get_received_and_last_block_id(
chain_id, pubkey_hash, block_height)[0]
def get_sent_and_last_block_id(store, chain_id, pubkey_hash,
block_height = None):
sql = """
SELECT COALESCE(value_sum, 0), c.chain_last_block_id
FROM chain c LEFT JOIN (
SELECT cc.chain_id, SUM(txout.txout_value) value_sum
FROM pubkey
JOIN txout ON (txout.pubkey_id = pubkey.pubkey_id)
JOIN txin ON (txin.txout_id = txout.txout_id)
JOIN block_tx ON (block_tx.tx_id = txin.tx_id)
JOIN block b ON (b.block_id = block_tx.block_id)
JOIN chain_candidate cc ON (cc.block_id = b.block_id)
WHERE
pubkey.pubkey_hash = ? AND
cc.chain_id = ? AND
cc.in_longest = 1""" + (
"" if block_height is None else """ AND
cc.block_height <= ?""") + """
GROUP BY cc.chain_id
) a ON (c.chain_id = a.chain_id)
WHERE c.chain_id = ?"""
dbhash = store.binin(pubkey_hash)
return store.selectrow(sql,
(dbhash, chain_id, chain_id)
if block_height is None else
(dbhash, chain_id, block_height, chain_id))
def get_sent(store, chain_id, pubkey_hash, block_height = None):
return store.get_sent_and_last_block_id(
chain_id, pubkey_hash, block_height)[0]
def get_balance(store, chain_id, pubkey_hash):
sent, last_block_id = store.get_sent_and_last_block_id(
chain_id, pubkey_hash)
received, last_block_id_2 = store.get_received_and_last_block_id(
chain_id, pubkey_hash)
# Deal with the race condition.
for i in xrange(2):
if last_block_id == last_block_id_2:
break
store.log.debug("Requerying balance: %d != %d",
last_block_id, last_block_id_2)
received, last_block_id_2 = store.get_received(
chain_id, pubkey_hash, store.get_block_height(last_block_id))
if last_block_id == last_block_id_2:
break
store.log.info("Balance query affected by reorg? %d != %d",
last_block_id, last_block_id_2)
sent, last_block_id = store.get_sent(
chain_id, pubkey_hash, store.get_block_height(last_block_id_2))
if last_block_id != last_block_id_2:
store.log.warning("Balance query failed due to loader activity.")
return None
return received - sent
def firstbits_full(store, version, hash):
"""
Return the address in lowercase. An initial substring of this
will become the firstbits.
"""
return util.hash_to_address(version, hash).lower()
def insert_firstbits(store, pubkey_id, block_id, addr_vers, fb):
store.sql("""
INSERT INTO abe_firstbits (
pubkey_id, block_id, address_version, firstbits
)
VALUES (?, ?, ?, ?)""",
(pubkey_id, block_id, addr_vers, fb))
def cant_do_firstbits(store, addr_vers, block_id, pubkey_id):
store.log.info(
"No firstbits for pubkey_id %d, block_id %d, version '%s'",
pubkey_id, block_id, store.binout_hex(addr_vers))
store.insert_firstbits(pubkey_id, block_id, addr_vers, '')
def do_firstbits(store, addr_vers, block_id, fb, ids, full):
"""
Insert the firstbits that start with fb using addr_vers and
are first seen in block_id. Return the count of rows
inserted.
fb -- string, not a firstbits using addr_vers in any ancestor
of block_id
ids -- set of ids of all pubkeys first seen in block_id whose
firstbits start with fb
full -- map from pubkey_id to full firstbits
"""
if len(ids) <= 1:
for pubkey_id in ids:
store.insert_firstbits(pubkey_id, block_id, addr_vers, fb)
return len(ids)
pubkeys = {}
for pubkey_id in ids:
s = full[pubkey_id]
if s == fb:
store.cant_do_firstbits(addr_vers, block_id, pubkey_id)
continue
fb1 = fb + s[len(fb)]
ids1 = pubkeys.get(fb1)
if ids1 is None:
ids1 = set()
pubkeys[fb1] = ids1
ids1.add(pubkey_id)
count = 0
for fb1, ids1 in pubkeys.iteritems():
count += store.do_firstbits(addr_vers, block_id, fb1, ids1, full)
return count
def do_vers_firstbits(store, addr_vers, block_id):
"""
Create new firstbits records for block and addr_vers. All
ancestor blocks must have their firstbits already recorded.
"""
address_version = store.binout(addr_vers)
pubkeys = {} # firstbits to set of pubkey_id
full = {} # pubkey_id to full firstbits, or None if old
for pubkey_id, pubkey_hash, oblock_id in store.selectall("""
SELECT DISTINCT
pubkey.pubkey_id,
pubkey.pubkey_hash,
fb.block_id
FROM block b
JOIN block_tx bt ON (b.block_id = bt.block_id)
JOIN txout ON (bt.tx_id = txout.tx_id)
JOIN pubkey ON (txout.pubkey_id = pubkey.pubkey_id)
LEFT JOIN abe_firstbits fb ON (
fb.address_version = ?
AND fb.pubkey_id = pubkey.pubkey_id)
WHERE b.block_id = ?""", (addr_vers, block_id)):
pubkey_id = int(pubkey_id)
if (oblock_id is not None and
store.is_descended_from(block_id, int(oblock_id))):
full[pubkey_id] = None
if pubkey_id in full:
continue
full[pubkey_id] = store.firstbits_full(address_version,
store.binout(pubkey_hash))
for pubkey_id, s in full.iteritems():
if s is None:
continue
# This is the pubkey's first appearance in the chain.
# Find the longest match among earlier firstbits.
longest, longest_id = 0, None
substrs = [s[0:(i+1)] for i in xrange(len(s))]
for ancestor_id, fblen, o_pubkey_id in store.selectall("""
SELECT block_id, LENGTH(firstbits), pubkey_id
FROM abe_firstbits fb
WHERE address_version = ?
AND firstbits IN (?""" + (",?" * (len(s)-1)) + """
)""", tuple([addr_vers] + substrs)):
if fblen > longest and store.is_descended_from(
block_id, int(ancestor_id)):
longest, longest_id = fblen, o_pubkey_id
# If necessary, extend the new fb to distinguish it from
# the longest match.
if longest_id is not None:
(o_hash,) = store.selectrow(
"SELECT pubkey_hash FROM pubkey WHERE pubkey_id = ?",
(longest_id,))
o_fb = store.firstbits_full(
address_version, store.binout(o_hash))
max_len = min(len(s), len(o_fb))
while longest < max_len and s[longest] == o_fb[longest]:
longest += 1
if longest == len(s):
store.cant_do_firstbits(addr_vers, block_id, pubkey_id)
continue
fb = s[0 : (longest + 1)]
ids = pubkeys.get(fb)
if ids is None:
ids = set()
pubkeys[fb] = ids
ids.add(pubkey_id)
count = 0
for fb, ids in pubkeys.iteritems():
count += store.do_firstbits(addr_vers, block_id, fb, ids, full)
return count
def firstbits_to_addresses(store, fb, chain_id=None):
dbfb = fb.lower()
ret = []
bind = [fb[0:(i+1)] for i in xrange(len(fb))]
if chain_id is not None:
bind.append(chain_id)
for dbhash, vers in store.selectall("""
SELECT pubkey.pubkey_hash,
fb.address_version
FROM abe_firstbits fb
JOIN pubkey ON (fb.pubkey_id = pubkey.pubkey_id)
JOIN chain_candidate cc ON (cc.block_id = fb.block_id)
WHERE fb.firstbits IN (?""" + (",?" * (len(fb)-1)) + """)""" + ( \
"" if chain_id is None else """
AND cc.chain_id = ?"""), tuple(bind)):
address = util.hash_to_address(store.binout(vers),
store.binout(dbhash))
if address.lower().startswith(dbfb):
ret.append(address)
if len(ret) == 0 or (len(ret) > 1 and fb in ret):
ret = [fb] # assume exact address match
return ret
def get_firstbits(store, address_version=None, db_pubkey_hash=None,
chain_id=None):
"""
Return address's firstbits, or the longest of multiple
firstbits values if chain_id is not given, or None if address
has not appeared, or the empty string if address has appeared
but has no firstbits.
"""
vers, dbhash = store.binin(address_version), db_pubkey_hash
rows = store.selectall("""
SELECT fb.firstbits
FROM abe_firstbits fb
JOIN pubkey ON (fb.pubkey_id = pubkey.pubkey_id)
JOIN chain_candidate cc ON (fb.block_id = cc.block_id)
WHERE cc.in_longest = 1
AND fb.address_version = ?
AND pubkey.pubkey_hash = ?""" + (
"" if chain_id is None else """
AND cc.chain_id = ?"""),
(vers, dbhash) if chain_id is None else
(vers, dbhash, chain_id))
if not rows:
return None
ret = ""
for (fb,) in rows:
if len(fb) > len(ret):
ret = fb
return ret
def clean_unlinked_tx(store, known_tx=set()):
"""This method cleans up all unlinked tx'es found in table
`unlinked_tx` except where the tx hash is provided in known_tx
"""
rows = store.selectall("""
SELECT ut.tx_id, t.tx_hash
FROM unlinked_tx ut
JOIN tx t ON (ut.tx_id = t.tx_id)""")
if not rows:
return
if type(known_tx) is not set:
# convert list to set for faster lookups
known_tx = set(known_tx)
txcount = 0
for tx_id, tx_hash in rows:
if store.hashout_hex(tx_hash) in known_tx:
continue
store.log.debug("Removing unlinked tx: %r", tx_hash)
store._clean_unlinked_tx(tx_id)
txcount += 1
if txcount:
store.commit()
store.log.info("Cleaned up %d unlinked transactions", txcount)
else:
store.log.info("No unlinked transactions to clean up")
def _clean_unlinked_tx(store, tx_id):
"""Internal unlinked tx cleanup function, excluding the tracking table
`unlinked_tx`. This function is required by upgrade.py.
"""
# Clean up txin's
unlinked_txins = store.selectall("""
SELECT txin_id FROM txin
WHERE tx_id = ?""", (tx_id,))
for txin_id in unlinked_txins:
store.sql("DELETE FROM unlinked_txin WHERE txin_id = ?", (txin_id,))
store.sql("DELETE FROM txin WHERE tx_id = ?", (tx_id,))
# Clean up txouts & associated pupkeys ...
txout_pubkeys = set(store.selectall("""
SELECT pubkey_id FROM txout
WHERE tx_id = ? AND pubkey_id IS NOT NULL""", (tx_id,)))
# Also add multisig pubkeys if any
msig_pubkeys = set()
for pk_id in txout_pubkeys:
msig_pubkeys.update(store.selectall("""
SELECT pubkey_id FROM multisig_pubkey
WHERE multisig_id = ?""", (pk_id,)))
store.sql("DELETE FROM txout WHERE tx_id = ?", (tx_id,))
# Now delete orphan pubkeys... For simplicity merge both sets together
for pk_id in txout_pubkeys.union(msig_pubkeys):
(count,) = store.selectrow("""
SELECT COUNT(pubkey_id) FROM txout
WHERE pubkey_id = ?""", (pk_id,))
if count == 0:
store.sql("DELETE FROM multisig_pubkey WHERE multisig_id = ?", (pk_id,))
(count,) = store.selectrow("""
SELECT COUNT(pubkey_id) FROM multisig_pubkey
WHERE pubkey_id = ?""", (pk_id,))
if count == 0:
store.sql("DELETE FROM pubkey WHERE pubkey_id = ?", (pk_id,))
# Finally clean up tx itself
store.sql("DELETE FROM unlinked_tx WHERE tx_id = ?", (tx_id,))
store.sql("DELETE FROM tx WHERE tx_id = ?", (tx_id,))
def new(args):
return DataStore(args)
| 130,488
|
Python
|
.py
| 2,959
| 30.837445
| 111
| 0.519798
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,434
|
readconf.py
|
bitcoin-abe_bitcoin-abe/Abe/readconf.py
|
# Copyright(C) 2011,2012,2013 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see
# <http://www.gnu.org/licenses/gpl.html>.
def looks_like_json(val):
return val[:1] in ('"', '[', '{') or val in ('true', 'false', 'null')
def parse_argv(argv, conf={}, config_name='config', strict=False):
arg_dict = conf.copy()
args = lambda var: arg_dict[var]
args.func_dict = arg_dict
i = 0
while i < len(argv):
arg = argv[i]
if arg == '--':
i += 1
break
if arg[:2] != '--':
break
fwd = 1
# Strip leading "--" to form a config variable.
# --var=val and --var val are the same. --var+=val is different.
split = arg[2:].split('=', 1)
adding = False
if len(split) == 1:
var = split[0]
if i + 1 < len(argv) and argv[i + 1][:2] != '--':
fwd = 2
val = argv[i + 1]
else:
val = True
else:
var, val = split
if var[-1:] == '+':
var = var[:-1]
adding = True
if val is not True and looks_like_json(val):
val = parse_json(val)
var = var.replace('-', '_')
if var == config_name:
_include(set(), val, arg_dict, config_name, strict)
elif var not in conf:
break
elif adding:
add(arg_dict, var, val)
else:
arg_dict[var] = val
i += fwd
return args, argv[i:]
def include(filename, conf={}, config_name='config', strict=False):
_include(set(), filename, conf, config_name, strict)
return conf
class _Reader:
__slots__ = ['fp', 'lineno', 'line']
def __init__(rdr, fp):
rdr.fp = fp
rdr.lineno = 1
rdr.line = rdr.fp.read(1)
def eof(rdr):
return rdr.line == ''
def getc(rdr):
if rdr.eof():
return ''
ret = rdr.line[-1]
if ret == '\n':
rdr.lineno += 1
rdr.line = ''
c = rdr.fp.read(1)
if c == '':
rdr.line = ''
rdr.line += c
return ret
def peek(rdr):
if rdr.eof():
return ''
return rdr.line[-1]
def _readline(rdr):
ret = rdr.fp.readline()
rdr.line += ret
return ret
def readline(rdr):
ret = rdr.peek() + rdr._readline()
rdr.getc() # Consume the newline if not at EOF.
return ret
def get_error_context(rdr, e):
e.lineno = rdr.lineno
if not rdr.eof():
e.offset = len(rdr.line)
if rdr.peek() != '\n':
rdr._readline()
e.text = rdr.line
def _include(seen, filename, conf, config_name, strict):
if filename in seen:
raise Exception('Config file recursion')
with open(filename) as fp:
rdr = _Reader(fp)
try:
entries = read(rdr)
except SyntaxError, e:
if e.filename is None:
e.filename = filename
if e.lineno is None:
rdr.get_error_context(e)
raise
for var, val, additive in entries:
var = var.replace('-', '_')
if var == config_name:
import os
_include(seen | set(filename),
os.path.join(os.path.dirname(filename), val), conf,
config_name, strict)
elif var not in conf:
if strict:
raise ValueError(
"Unknown parameter `%s' in %s" % (var, filename))
elif additive and conf[var] is not None:
add(conf, var, val)
else:
conf[var] = val
return
def read(rdr):
"""
Read name-value pairs from file and return the results as a list
of triples: (name, value, additive) where "additive" is true if
"+=" occurred between name and value.
"NAME=VALUE" and "NAME VALUE" are equivalent. Whitespace around
names and values is ignored, as are lines starting with '#' and
empty lines. Values may be JSON strings, arrays, or objects. A
value that does not start with '"' or '{' or '[' and is not a
boolean is read as a one-line string. A line with just "NAME"
stores True as the value.
"""
entries = []
def store(name, value, additive):
entries.append((name, value, additive))
def skipspace(rdr):
while rdr.peek() in (' ', '\t', '\r'):
rdr.getc()
while True:
skipspace(rdr)
if rdr.eof():
break
if rdr.peek() == '\n':
rdr.getc()
continue
if rdr.peek() == '#':
rdr.readline()
continue
name = ''
while rdr.peek() not in (' ', '\t', '\r', '\n', '=', '+', ''):
name += rdr.getc()
if rdr.peek() not in ('=', '+'):
skipspace(rdr)
if rdr.peek() in ('\n', ''):
store(name, True, False)
continue
additive = False
if rdr.peek() in ('=', '+'):
if rdr.peek() == '+':
rdr.getc()
if rdr.peek() != '=':
raise SyntaxError("'+' without '='")
additive = True
rdr.getc()
skipspace(rdr)
if rdr.peek() in ('"', '[', '{'):
js = scan_json(rdr)
try:
store(name, parse_json(js), additive)
except ValueError, e:
raise wrap_json_error(rdr, js, e)
continue
# Unquoted, one-line string.
value = ''
while rdr.peek() not in ('\n', ''):
value += rdr.getc()
value = value.strip()
# Booleans and null.
if value == 'true':
value = True
elif value == 'false':
value = False
elif value == 'null':
value = None
store(name, value, additive)
return entries
def add(conf, var, val):
if var not in conf:
conf[var] = val
return
if isinstance(val, dict) and isinstance(conf[var], dict):
conf[var].update(val)
return
if not isinstance(conf[var], list):
conf[var] = [conf[var]]
if isinstance(val, list):
conf[var] += val
else:
conf[var].append(val)
# Scan to end of JSON object. Grrr, why can't json.py do this without
# reading all of fp?
def _scan_json_string(rdr):
ret = rdr.getc() # '"'
while True:
c = rdr.getc()
if c == '':
raise SyntaxError('End of file in JSON string')
# Accept raw control characters for readability.
if c == '\n':
c = '\\n'
if c == '\r':
c = '\\r'
if c == '\t':
c = '\\t'
ret += c
if c == '"':
return ret
if c == '\\':
ret += rdr.getc()
def _scan_json_nonstring(rdr):
# Assume we are at a number or true|false|null.
# Scan the token.
ret = ''
while rdr.peek() != '' and rdr.peek() in '-+0123456789.eEtrufalsn':
ret += rdr.getc()
return ret
def _scan_json_space(rdr):
# Scan whitespace including "," and ":". Strip comments for good measure.
ret = ''
while not rdr.eof() and rdr.peek() in ' \t\r\n,:#':
c = rdr.getc()
if c == '#':
c = rdr.readline() and '\n'
ret += c
return ret
def _scan_json_compound(rdr):
# Scan a JSON array or object.
ret = rdr.getc()
if ret == '{': end = '}'
if ret == '[': end = ']'
ret += _scan_json_space(rdr)
if rdr.peek() == end:
return ret + rdr.getc()
while True:
if rdr.eof():
raise SyntaxError('End of file in JSON value')
ret += scan_json(rdr)
ret += _scan_json_space(rdr)
if rdr.peek() == end:
return ret + rdr.getc()
def scan_json(rdr):
# Scan a JSON value.
c = rdr.peek()
if c == '"':
return _scan_json_string(rdr)
if c in ('[', '{'):
return _scan_json_compound(rdr)
ret = _scan_json_nonstring(rdr)
if ret == '':
raise SyntaxError('Invalid JSON')
return ret
def parse_json(js):
import json
return json.loads(js)
def wrap_json_error(rdr, js, e):
import re
match = re.search(r'(.*): line (\d+) column (\d+)', e.message, re.DOTALL)
if match:
e = SyntaxError(match.group(1))
json_lineno = int(match.group(2))
e.lineno = rdr.lineno - js.count('\n') + json_lineno - 1
e.text = js.split('\n')[json_lineno - 1]
e.offset = int(match.group(3))
if json_lineno == 1 and json_line1_column_bug():
e.offset += 1
return e
def json_line1_column_bug():
ret = False
try:
parse_json("{:")
except ValueError, e:
if "column 1" in e.message:
ret = True
finally:
return ret
| 9,530
|
Python
|
.py
| 294
| 23.765306
| 78
| 0.517999
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,435
|
admin.py
|
bitcoin-abe_bitcoin-abe/Abe/admin.py
|
#!/usr/bin/env python
# Copyright(C) 2012,2013,2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
"""Delete a chain from the database, etc."""
import sys
import logging
import util
def commit(store):
store.commit()
store.log.info("Commit.")
def log_rowcount(store, msg):
store.log.info(msg, store.rowcount())
def link_txin(store):
store.log.info(
"Linking missed transaction inputs to their previous outputs.")
store.sql("""
UPDATE txin SET txout_id = (
SELECT txout_id
FROM unlinked_txin JOIN txout JOIN tx ON (txout.tx_id = tx.tx_id)
WHERE txin.txin_id = unlinked_txin.txin_id
AND tx.tx_hash = unlinked_txin.txout_tx_hash
AND txout.txout_pos = unlinked_txin.txout_pos)
WHERE txout_id IS NULL""")
log_rowcount(store, "Updated %d txout_id.")
commit(store)
store.sql("""
DELETE FROM unlinked_txin
WHERE (SELECT txout_id FROM txin
WHERE txin.txin_id = unlinked_txin.txin_id) IS NOT NULL""")
log_rowcount(store, "Deleted %d unlinked_txin.")
commit(store)
def delete_tx(store, id_or_hash):
try:
tx_id = int(id_or_hash)
except ValueError:
(tx_id,) = store.selectrow(
"SELECT tx_id FROM tx WHERE tx_hash = ?",
(store.hashin_hex(id_or_hash),))
store.log.info("Deleting transaction with tx_id=%d", tx_id)
store.sql("""
DELETE FROM unlinked_txin WHERE txin_id IN (
SELECT txin_id FROM txin WHERE tx_id = ?)""",
(tx_id,))
log_rowcount(store, "Deleted %d from unlinked_txin.")
store.sql("DELETE FROM txin WHERE tx_id = ?", (tx_id,))
log_rowcount(store, "Deleted %d from txin.")
store.sql("DELETE FROM txout WHERE tx_id = ?", (tx_id,))
log_rowcount(store, "Deleted %d from txout.")
store.sql("DELETE FROM tx WHERE tx_id = ?", (tx_id,))
log_rowcount(store, "Deleted %d from tx.")
commit(store)
def rewind_datadir(store, dirname):
store.sql("""
UPDATE datadir
SET blkfile_number = 1, blkfile_offset = 0
WHERE dirname = ?
AND (blkfile_number > 1 OR blkfile_offset > 0)""",
(dirname,))
log_rowcount(store, "Datadir blockfile pointers rewound: %d")
commit(store)
def rewind_chain_blockfile(store, name, chain_id):
store.sql("""
UPDATE datadir
SET blkfile_number = 1, blkfile_offset = 0
WHERE chain_id = ?
AND (blkfile_number > 1 OR blkfile_offset > 0)""",
(chain_id,))
log_rowcount(store, "Datadir blockfile pointers rewound: %d")
def chain_name_to_id(store, name):
(chain_id,) = store.selectrow(
"SELECT chain_id FROM chain WHERE chain_name = ?", (name,))
return chain_id
def del_chain_blocks_1(store, name, chain_id):
store.sql("UPDATE chain SET chain_last_block_id = NULL WHERE chain_id = ?",
(chain_id,))
store.log.info("Nulled %s chain_last_block_id.", name)
store.sql("""
UPDATE block
SET prev_block_id = NULL,
search_block_id = NULL
WHERE block_id IN (
SELECT block_id FROM chain_candidate WHERE chain_id = ?)""",
(chain_id,))
log_rowcount(store, "Disconnected %d blocks from chain.")
commit(store)
store.sql("""
DELETE FROM orphan_block WHERE block_id IN (
SELECT block_id FROM chain_candidate WHERE chain_id = ?)""",
(chain_id,))
log_rowcount(store, "Deleted %d from orphan_block.")
commit(store)
store.sql("""
DELETE FROM block_next WHERE block_id IN (
SELECT block_id FROM chain_candidate WHERE chain_id = ?)""",
(chain_id,))
log_rowcount(store, "Deleted %d from block_next.")
commit(store)
store.sql("""
DELETE FROM block_txin WHERE block_id IN (
SELECT block_id FROM chain_candidate WHERE chain_id = ?)""",
(chain_id,))
log_rowcount(store, "Deleted %d from block_txin.")
commit(store)
if store.use_firstbits:
store.sql("""
DELETE FROM abe_firstbits WHERE block_id IN (
SELECT block_id FROM chain_candidate WHERE chain_id = ?)""",
(chain_id,))
log_rowcount(store, "Deleted %d from abe_firstbits.")
commit(store)
def del_chain_block_tx(store, name, chain_id):
store.sql("""
DELETE FROM block_tx WHERE block_id IN (
SELECT block_id FROM chain_candidate WHERE chain_id = ?)""",
(chain_id,))
log_rowcount(store, "Deleted %d from block_tx.")
commit(store)
def delete_chain_blocks(store, name, chain_id = None):
if chain_id is None:
chain_id = chain_name_to_id(store, name)
store.log.info("Deleting blocks in chain %s", name)
del_chain_blocks_1(store, name, chain_id)
del_chain_block_tx(store, name, chain_id)
del_chain_blocks_2(store, name, chain_id)
def delete_chain_transactions(store, name, chain_id = None):
if chain_id is None:
chain_id = chain_name_to_id(store, name)
store.log.info("Deleting transactions and blocks in chain %s", name)
del_chain_blocks_1(store, name, chain_id)
store.sql("""
DELETE FROM unlinked_txin WHERE txin_id IN (
SELECT txin.txin_id
FROM chain_candidate cc
JOIN block_tx bt ON (cc.block_id = bt.block_id)
JOIN txin ON (bt.tx_id = txin.tx_id)
WHERE cc.chain_id = ?)""", (chain_id,))
log_rowcount(store, "Deleted %d from unlinked_txin.")
store.sql("""
DELETE FROM txin WHERE tx_id IN (
SELECT bt.tx_id
FROM chain_candidate cc
JOIN block_tx bt ON (cc.block_id = bt.block_id)
WHERE cc.chain_id = ?)""", (chain_id,))
log_rowcount(store, "Deleted %d from txin.")
commit(store)
store.sql("""
DELETE FROM txout WHERE tx_id IN (
SELECT bt.tx_id
FROM chain_candidate cc
JOIN block_tx bt ON (cc.block_id = bt.block_id)
WHERE cc.chain_id = ?)""", (chain_id,))
log_rowcount(store, "Deleted %d from txout.")
commit(store)
tx_ids = []
for row in store.selectall("""
SELECT tx_id
FROM chain_candidate cc
JOIN block_tx bt ON (cc.block_id = bt.block_id)
WHERE cc.chain_id = ?""", (chain_id,)):
tx_ids.append(int(row[0]))
del_chain_block_tx(store, name, chain_id)
deleted = 0
store.log.info("Deleting from tx...")
for tx_id in tx_ids:
store.sql("DELETE FROM tx WHERE tx_id = ?", (tx_id,))
cnt = store.rowcount()
if cnt > 0:
deleted += 1
if deleted % 10000 == 0:
store.log.info("Deleting tx: %d", deleted)
commit(store)
store.log.info("Deleted %d from tx.", deleted)
commit(store)
del_chain_blocks_2(store, name, chain_id)
def del_chain_blocks_2(store, name, chain_id):
block_ids = []
for row in store.selectall(
"SELECT block_id FROM chain_candidate WHERE chain_id = ?", (chain_id,)):
block_ids.append(int(row[0]))
store.sql("""
DELETE FROM chain_candidate WHERE chain_id = ?""",
(chain_id,))
log_rowcount(store, "Deleted %d from chain_candidate.")
deleted = 0
for block_id in block_ids:
store.sql("DELETE FROM block WHERE block_id = ?", (block_id,))
deleted += store.rowcount()
store.log.info("Deleted %d from block.", deleted)
rewind_chain_blockfile(store, name, chain_id)
commit(store)
def main(argv):
cmdline = util.CmdLine(argv)
cmdline.usage = lambda: \
"""Usage: python -m Abe.admin [-h] [--config=FILE] COMMAND...
Options:
--help Show this help message and exit.
--config FILE Abe configuration file.
Commands:
delete-chain-blocks NAME Delete all blocks in the specified chain
from the database.
delete-chain-transactions NAME Delete all blocks and transactions in
the specified chain.
delete-tx TX_ID Delete the specified transaction.
delete-tx TX_HASH
link-txin Link transaction inputs to previous outputs.
rewind-datadir DIRNAME Reset the pointer to force a rescan of
blockfiles in DIRNAME."""
store, argv = cmdline.init()
if store is None:
return 0
while len(argv) != 0:
command = argv.pop(0)
if command == 'delete-chain-blocks':
delete_chain_blocks(store, argv.pop(0))
elif command == 'delete-chain-transactions':
delete_chain_transactions(store, argv.pop(0))
elif command == 'delete-tx':
delete_tx(store, argv.pop(0))
elif command == 'rewind-datadir':
rewind_datadir(store, argv.pop(0))
elif command == 'link-txin':
link_txin(store)
else:
raise ValueError("Unknown command: " + command)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| 9,888
|
Python
|
.py
| 239
| 33.179916
| 80
| 0.606797
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,436
|
deserialize.py
|
bitcoin-abe_bitcoin-abe/Abe/deserialize.py
|
#
#
#
from BCDataStream import *
from enumeration import Enumeration
from base58 import public_key_to_bc_address, hash_160_to_bc_address
import logging
import socket
import time
from util import short_hex, long_hex
import struct
def parse_CAddress(vds):
d = {}
d['nVersion'] = vds.read_int32()
d['nTime'] = vds.read_uint32()
d['nServices'] = vds.read_uint64()
d['pchReserved'] = vds.read_bytes(12)
d['ip'] = socket.inet_ntoa(vds.read_bytes(4))
d['port'] = socket.htons(vds.read_uint16())
return d
def deserialize_CAddress(d):
return d['ip']+":"+str(d['port'])+" (lastseen: %s)"%(time.ctime(d['nTime']),)
def parse_setting(setting, vds):
if setting[0] == "f": # flag (boolean) settings
return str(vds.read_boolean())
elif setting == "addrIncoming":
return "" # bitcoin 0.4 purposely breaks addrIncoming setting in encrypted wallets.
elif setting[0:4] == "addr": # CAddress
d = parse_CAddress(vds)
return deserialize_CAddress(d)
elif setting == "nTransactionFee":
return vds.read_int64()
elif setting == "nLimitProcessors":
return vds.read_int32()
return 'unknown setting'
def parse_TxIn(vds):
d = {}
d['prevout_hash'] = vds.read_bytes(32)
d['prevout_n'] = vds.read_uint32()
d['scriptSig'] = vds.read_bytes(vds.read_compact_size())
d['sequence'] = vds.read_uint32()
return d
def deserialize_TxIn(d, transaction_index=None, owner_keys=None):
if d['prevout_hash'] == "\x00"*32:
result = "TxIn: COIN GENERATED"
result += " coinbase:"+d['scriptSig'].encode('hex_codec')
elif transaction_index is not None and d['prevout_hash'] in transaction_index:
p = transaction_index[d['prevout_hash']]['txOut'][d['prevout_n']]
result = "TxIn: value: %f"%(p['value']/1.0e8,)
result += " prev("+long_hex(d['prevout_hash'][::-1])+":"+str(d['prevout_n'])+")"
else:
result = "TxIn: prev("+long_hex(d['prevout_hash'][::-1])+":"+str(d['prevout_n'])+")"
pk = extract_public_key(d['scriptSig'])
result += " pubkey: "+pk
result += " sig: "+decode_script(d['scriptSig'])
if d['sequence'] < 0xffffffff: result += " sequence: "+hex(d['sequence'])
return result
def parse_TxOut(vds):
d = {}
d['value'] = vds.read_int64()
d['scriptPubKey'] = vds.read_bytes(vds.read_compact_size())
return d
def deserialize_TxOut(d, owner_keys=None):
result = "TxOut: value: %f"%(d['value']/1.0e8,)
pk = extract_public_key(d['scriptPubKey'])
result += " pubkey: "+pk
result += " Script: "+decode_script(d['scriptPubKey'])
if owner_keys is not None:
if pk in owner_keys: result += " Own: True"
else: result += " Own: False"
return result
def parse_Transaction(vds, has_nTime=False):
d = {}
start_pos = vds.read_cursor
d['version'] = vds.read_int32()
if has_nTime:
d['nTime'] = vds.read_uint32()
n_vin = vds.read_compact_size()
d['txIn'] = []
for i in xrange(n_vin):
d['txIn'].append(parse_TxIn(vds))
n_vout = vds.read_compact_size()
d['txOut'] = []
for i in xrange(n_vout):
d['txOut'].append(parse_TxOut(vds))
d['lockTime'] = vds.read_uint32()
d['__data__'] = vds.input[start_pos:vds.read_cursor]
return d
def deserialize_Transaction(d, transaction_index=None, owner_keys=None, print_raw_tx=False):
result = "%d tx in, %d out\n"%(len(d['txIn']), len(d['txOut']))
for txIn in d['txIn']:
result += deserialize_TxIn(txIn, transaction_index) + "\n"
for txOut in d['txOut']:
result += deserialize_TxOut(txOut, owner_keys) + "\n"
if print_raw_tx == True:
result += "Transaction hex value: " + d['__data__'].encode('hex') + "\n"
return result
def parse_MerkleTx(vds):
d = parse_Transaction(vds)
d['hashBlock'] = vds.read_bytes(32)
n_merkleBranch = vds.read_compact_size()
d['merkleBranch'] = vds.read_bytes(32*n_merkleBranch)
d['nIndex'] = vds.read_int32()
return d
def deserialize_MerkleTx(d, transaction_index=None, owner_keys=None):
tx = deserialize_Transaction(d, transaction_index, owner_keys)
result = "block: "+(d['hashBlock'][::-1]).encode('hex_codec')
result += " %d hashes in merkle branch\n"%(len(d['merkleBranch'])/32,)
return result+tx
def parse_WalletTx(vds):
d = parse_MerkleTx(vds)
n_vtxPrev = vds.read_compact_size()
d['vtxPrev'] = []
for i in xrange(n_vtxPrev):
d['vtxPrev'].append(parse_MerkleTx(vds))
d['mapValue'] = {}
n_mapValue = vds.read_compact_size()
for i in xrange(n_mapValue):
key = vds.read_string()
value = vds.read_string()
d['mapValue'][key] = value
n_orderForm = vds.read_compact_size()
d['orderForm'] = []
for i in xrange(n_orderForm):
first = vds.read_string()
second = vds.read_string()
d['orderForm'].append( (first, second) )
d['fTimeReceivedIsTxTime'] = vds.read_uint32()
d['timeReceived'] = vds.read_uint32()
d['fromMe'] = vds.read_boolean()
d['spent'] = vds.read_boolean()
return d
def deserialize_WalletTx(d, transaction_index=None, owner_keys=None):
result = deserialize_MerkleTx(d, transaction_index, owner_keys)
result += "%d vtxPrev txns\n"%(len(d['vtxPrev']),)
result += "mapValue:"+str(d['mapValue'])
if len(d['orderForm']) > 0:
result += "\n"+" orderForm:"+str(d['orderForm'])
result += "\n"+"timeReceived:"+time.ctime(d['timeReceived'])
result += " fromMe:"+str(d['fromMe'])+" spent:"+str(d['spent'])
return result
# The CAuxPow (auxiliary proof of work) structure supports merged mining.
# A flag in the block version field indicates the structure's presence.
# As of 8/2011, the Original Bitcoin Client does not use it. CAuxPow
# originated in Namecoin; see
# https://github.com/vinced/namecoin/blob/mergedmine/doc/README_merged-mining.md.
def parse_AuxPow(vds):
d = parse_MerkleTx(vds)
n_chainMerkleBranch = vds.read_compact_size()
d['chainMerkleBranch'] = vds.read_bytes(32*n_chainMerkleBranch)
d['chainIndex'] = vds.read_int32()
d['parentBlock'] = parse_BlockHeader(vds)
return d
def parse_BlockHeader(vds):
d = {}
header_start = vds.read_cursor
d['version'] = vds.read_int32()
d['hashPrev'] = vds.read_bytes(32)
d['hashMerkleRoot'] = vds.read_bytes(32)
d['nTime'] = vds.read_uint32()
d['nBits'] = vds.read_uint32()
d['nNonce'] = vds.read_uint32()
header_end = vds.read_cursor
d['__header__'] = vds.input[header_start:header_end]
return d
def parse_Block(vds):
d = parse_BlockHeader(vds)
d['transactions'] = []
# if d['version'] & (1 << 8):
# d['auxpow'] = parse_AuxPow(vds)
nTransactions = vds.read_compact_size()
for i in xrange(nTransactions):
d['transactions'].append(parse_Transaction(vds))
return d
def deserialize_Block(d, print_raw_tx=False):
result = "Time: "+time.ctime(d['nTime'])+" Nonce: "+str(d['nNonce'])
result += "\nnBits: 0x"+hex(d['nBits'])
result += "\nhashMerkleRoot: 0x"+d['hashMerkleRoot'][::-1].encode('hex_codec')
result += "\nPrevious block: "+d['hashPrev'][::-1].encode('hex_codec')
result += "\n%d transactions:\n"%len(d['transactions'])
for t in d['transactions']:
result += deserialize_Transaction(t, print_raw_tx=print_raw_tx)+"\n"
result += "\nRaw block header: "+d['__header__'].encode('hex_codec')
return result
def parse_BlockLocator(vds):
d = { 'hashes' : [] }
nHashes = vds.read_compact_size()
for i in xrange(nHashes):
d['hashes'].append(vds.read_bytes(32))
return d
def deserialize_BlockLocator(d):
result = "Block Locator top: "+d['hashes'][0][::-1].encode('hex_codec')
return result
opcodes = Enumeration("Opcodes", [
("OP_0", 0), ("OP_PUSHDATA1",76), "OP_PUSHDATA2", "OP_PUSHDATA4", "OP_1NEGATE", "OP_RESERVED",
"OP_1", "OP_2", "OP_3", "OP_4", "OP_5", "OP_6", "OP_7",
"OP_8", "OP_9", "OP_10", "OP_11", "OP_12", "OP_13", "OP_14", "OP_15", "OP_16",
"OP_NOP", "OP_VER", "OP_IF", "OP_NOTIF", "OP_VERIF", "OP_VERNOTIF", "OP_ELSE", "OP_ENDIF", "OP_VERIFY",
"OP_RETURN", "OP_TOALTSTACK", "OP_FROMALTSTACK", "OP_2DROP", "OP_2DUP", "OP_3DUP", "OP_2OVER", "OP_2ROT", "OP_2SWAP",
"OP_IFDUP", "OP_DEPTH", "OP_DROP", "OP_DUP", "OP_NIP", "OP_OVER", "OP_PICK", "OP_ROLL", "OP_ROT",
"OP_SWAP", "OP_TUCK", "OP_CAT", "OP_SUBSTR", "OP_LEFT", "OP_RIGHT", "OP_SIZE", "OP_INVERT", "OP_AND",
"OP_OR", "OP_XOR", "OP_EQUAL", "OP_EQUALVERIFY", "OP_RESERVED1", "OP_RESERVED2", "OP_1ADD", "OP_1SUB", "OP_2MUL",
"OP_2DIV", "OP_NEGATE", "OP_ABS", "OP_NOT", "OP_0NOTEQUAL", "OP_ADD", "OP_SUB", "OP_MUL", "OP_DIV",
"OP_MOD", "OP_LSHIFT", "OP_RSHIFT", "OP_BOOLAND", "OP_BOOLOR",
"OP_NUMEQUAL", "OP_NUMEQUALVERIFY", "OP_NUMNOTEQUAL", "OP_LESSTHAN",
"OP_GREATERTHAN", "OP_LESSTHANOREQUAL", "OP_GREATERTHANOREQUAL", "OP_MIN", "OP_MAX",
"OP_WITHIN", "OP_RIPEMD160", "OP_SHA1", "OP_SHA256", "OP_HASH160",
"OP_HASH256", "OP_CODESEPARATOR", "OP_CHECKSIG", "OP_CHECKSIGVERIFY", "OP_CHECKMULTISIG",
"OP_CHECKMULTISIGVERIFY",
"OP_NOP1", "OP_NOP2", "OP_NOP3", "OP_NOP4", "OP_NOP5", "OP_NOP6", "OP_NOP7", "OP_NOP8", "OP_NOP9", "OP_NOP10",
("OP_INVALIDOPCODE", 0xFF),
])
def script_GetOp(bytes):
i = 0
while i < len(bytes):
vch = None
opcode = ord(bytes[i])
i += 1
if opcode <= opcodes.OP_PUSHDATA4:
nSize = opcode
if opcode == opcodes.OP_PUSHDATA1:
if i + 1 > len(bytes):
vch = "_INVALID_NULL"
i = len(bytes)
else:
nSize = ord(bytes[i])
i += 1
elif opcode == opcodes.OP_PUSHDATA2:
if i + 2 > len(bytes):
vch = "_INVALID_NULL"
i = len(bytes)
else:
(nSize,) = struct.unpack_from('<H', bytes, i)
i += 2
elif opcode == opcodes.OP_PUSHDATA4:
if i + 4 > len(bytes):
vch = "_INVALID_NULL"
i = len(bytes)
else:
(nSize,) = struct.unpack_from('<I', bytes, i)
i += 4
if i+nSize > len(bytes):
vch = "_INVALID_"+bytes[i:]
i = len(bytes)
else:
vch = bytes[i:i+nSize]
i += nSize
elif opcodes.OP_1 <= opcode <= opcodes.OP_16:
vch = chr(opcode - opcodes.OP_1 + 1)
elif opcode == opcodes.OP_1NEGATE:
vch = chr(255)
yield (opcode, vch)
def script_GetOpName(opcode):
try:
return (opcodes.whatis(opcode)).replace("OP_", "")
except KeyError:
return "InvalidOp_"+str(opcode)
def decode_script(bytes):
result = ''
for (opcode, vch) in script_GetOp(bytes):
if len(result) > 0: result += " "
if opcode <= opcodes.OP_PUSHDATA4:
result += "%d:"%(opcode,)
result += short_hex(vch)
else:
result += script_GetOpName(opcode)
return result
def match_decoded(decoded, to_match):
if len(decoded) != len(to_match):
return False;
for i in range(len(decoded)):
if to_match[i] == opcodes.OP_PUSHDATA4 and decoded[i][0] <= opcodes.OP_PUSHDATA4:
continue # Opcodes below OP_PUSHDATA4 all just push data onto stack, and are equivalent.
if to_match[i] != decoded[i][0]:
return False
return True
def extract_public_key(bytes, version='\x00'):
try:
decoded = [ x for x in script_GetOp(bytes) ]
except struct.error:
return "(None)"
# non-generated TxIn transactions push a signature
# (seventy-something bytes) and then their public key
# (33 or 65 bytes) onto the stack:
match = [ opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4 ]
if match_decoded(decoded, match):
return public_key_to_bc_address(decoded[1][1], version=version)
# The Genesis Block, self-payments, and pay-by-IP-address payments look like:
# 65 BYTES:... CHECKSIG
match = [ opcodes.OP_PUSHDATA4, opcodes.OP_CHECKSIG ]
if match_decoded(decoded, match):
return public_key_to_bc_address(decoded[0][1], version=version)
# Pay-by-Bitcoin-address TxOuts look like:
# DUP HASH160 20 BYTES:... EQUALVERIFY CHECKSIG
match = [ opcodes.OP_DUP, opcodes.OP_HASH160, opcodes.OP_PUSHDATA4, opcodes.OP_EQUALVERIFY, opcodes.OP_CHECKSIG ]
if match_decoded(decoded, match):
return hash_160_to_bc_address(decoded[2][1], version=version)
# BIP11 TxOuts look like one of these:
multisigs = [
[ opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4, opcodes.OP_1, opcodes.OP_CHECKMULTISIG ],
[ opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4, opcodes.OP_2, opcodes.OP_CHECKMULTISIG ],
[ opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4, opcodes.OP_3, opcodes.OP_CHECKMULTISIG ]
]
for match in multisigs:
if match_decoded(decoded, match):
return "["+','.join([public_key_to_bc_address(decoded[i][1]) for i in range(1,len(decoded)-1)])+"]"
# BIP16 TxOuts look like:
# HASH160 20 BYTES:... EQUAL
match = [ opcodes.OP_HASH160, 0x14, opcodes.OP_EQUAL ]
if match_decoded(decoded, match):
return hash_160_to_bc_address(decoded[1][1], version="\x05")
return "(None)"
| 12,744
|
Python
|
.py
| 312
| 36.820513
| 134
| 0.648023
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,437
|
base58.py
|
bitcoin-abe_bitcoin-abe/Abe/base58.py
|
#!/usr/bin/env python
"""encode/decode base58 in the same way that Bitcoin does"""
import math
__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
__b58base = len(__b58chars)
def b58encode(v):
""" encode v, which is a string of bytes, to base58.
"""
long_value = 0L
for (i, c) in enumerate(v[::-1]):
long_value += ord(c) << (8*i) # 2x speedup vs. exponentiation
result = ''
while long_value >= __b58base:
div, mod = divmod(long_value, __b58base)
result = __b58chars[mod] + result
long_value = div
result = __b58chars[long_value] + result
# Bitcoin does a little leading-zero-compression:
# leading 0-bytes in the input become leading-1s
nPad = 0
for c in v:
if c == '\0': nPad += 1
else: break
return (__b58chars[0]*nPad) + result
def b58decode(v, length):
""" decode v into a string of len bytes
"""
long_value = 0L
for (i, c) in enumerate(v[::-1]):
long_value += __b58chars.find(c) * (__b58base**i)
result = ''
while long_value >= 256:
div, mod = divmod(long_value, 256)
result = chr(mod) + result
long_value = div
result = chr(long_value) + result
nPad = 0
for c in v:
if c == __b58chars[0]: nPad += 1
else: break
result = chr(0)*nPad + result
if length is not None and len(result) != length:
return None
return result
try:
# Python Crypto library is at: http://www.dlitz.net/software/pycrypto/
# Needed for RIPEMD160 hash function, used to compute
# Bitcoin addresses from internal public keys.
import Crypto.Hash.SHA256 as SHA256
import Crypto.Hash.RIPEMD as RIPEMD160
have_crypto = True
except ImportError:
have_crypto = False
def hash_160(public_key):
if not have_crypto:
return ''
h1 = SHA256.new(public_key).digest()
h2 = RIPEMD160.new(h1).digest()
return h2
def public_key_to_bc_address(public_key, version="\x00"):
if not have_crypto or public_key is None:
return ''
h160 = hash_160(public_key)
return hash_160_to_bc_address(h160, version=version)
def hash_160_to_bc_address(h160, version="\x00"):
if not have_crypto:
return ''
vh160 = version+h160
h3=SHA256.new(SHA256.new(vh160).digest()).digest()
addr=vh160+h3[0:4]
return b58encode(addr)
def bc_address_to_hash_160(addr):
bytes = b58decode(addr, 25)
return bytes[1:21]
if __name__ == '__main__':
x = '005cc87f4a3fdfe3a2346b6953267ca867282630d3f9b78e64'.decode('hex_codec')
encoded = b58encode(x)
print encoded, '19TbMSWwHvnxAKy12iNm3KdbGfzfaMFViT'
print b58decode(encoded, len(x)).encode('hex_codec'), x.encode('hex_codec')
| 2,604
|
Python
|
.py
| 79
| 29.518987
| 80
| 0.68755
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,438
|
mixup.py
|
bitcoin-abe_bitcoin-abe/Abe/mixup.py
|
#!/usr/bin/env python
# Copyright(C) 2012,2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
"""Load blocks in different order for testing."""
import sys
import logging
import BCDataStream, util
def mixup_blocks(store, ds, count, datadir_chain = None, seed = None):
bytes_done = 0
offsets = []
for i in xrange(count):
if ds.read_cursor + 8 <= len(ds.input):
offsets.append(ds.read_cursor)
magic = ds.read_bytes(4)
length = ds.read_int32()
ds.read_cursor += length
if ds.read_cursor <= len(ds.input):
continue
raise IOError("End of input after %d blocks" % i)
if seed > 1 and seed <= count:
for i in xrange(0, seed * int(count/seed), seed):
offsets[i : i + seed] = offsets[i : i + seed][::-1]
elif seed == -3:
for i in xrange(0, 3 * int(count/3), 3):
offsets[i : i + 3] = offsets[i+1 : i + 3] + [offsets[i]]
print offsets
elif seed:
offsets = offsets[::-1] # XXX want random
for offset in offsets:
ds.read_cursor = offset
magic = ds.read_bytes(4)
length = ds.read_int32()
# Assume blocks obey the respective policy if they get here.
chain = datadir_chain
if chain is None:
chain = store.chains_by.magic.get(magic)
if chain is None:
ds.read_cursor = offset
raise ValueError(
"Chain not found for magic number %s in block file at"
" offset %d.", repr(magic), offset)
break
# XXX pasted out of DataStore.import_blkdat, which has since undergone
# considerable changes.
end = ds.read_cursor + length
hash = util.double_sha256(
ds.input[ds.read_cursor : ds.read_cursor + 80])
# XXX should decode target and check hash against it to
# avoid loading garbage data. But not for merged-mined or
# CPU-mined chains that use different proof-of-work
# algorithms. Time to resurrect policy_id?
block_row = store.selectrow("""
SELECT block_id, block_height, block_chain_work,
block_nTime, block_total_seconds,
block_total_satoshis, block_satoshi_seconds
FROM block
WHERE block_hash = ?
""", (store.hashin(hash),))
if block_row:
# Block header already seen. Don't import the block,
# but try to add it to the chain.
if chain is not None:
b = {
"block_id": block_row[0],
"height": block_row[1],
"chain_work": store.binout_int(block_row[2]),
"nTime": block_row[3],
"seconds": block_row[4],
"satoshis": block_row[5],
"ss": block_row[6]}
if store.selectrow("""
SELECT 1
FROM chain_candidate
WHERE block_id = ?
AND chain_id = ?""",
(b['block_id'], chain.id)):
store.log.info("block %d already in chain %d",
b['block_id'], chain.id)
b = None
else:
if b['height'] == 0:
b['hashPrev'] = GENESIS_HASH_PREV
else:
b['hashPrev'] = 'dummy' # Fool adopt_orphans.
store.offer_block_to_chains(b, frozenset([chain.id]))
else:
b = chain.ds_parse_block(ds)
b["hash"] = hash
chain_ids = frozenset([] if chain is None else [chain.id])
store.import_block(b, chain_ids = chain_ids)
if ds.read_cursor != end:
store.log.debug("Skipped %d bytes at block end",
end - ds.read_cursor)
bytes_done += length
if bytes_done >= store.commit_bytes:
store.log.debug("commit")
store.commit()
bytes_done = 0
if bytes_done > 0:
store.commit()
def main(argv):
conf = {
"count": 200,
"seed": 1,
"blkfile": None,
}
cmdline = util.CmdLine(argv, conf)
cmdline.usage = lambda: \
"""Usage: python -m Abe.mixup [-h] [--config=FILE] [--CONFIGVAR=VALUE]...
Load blocks out of order.
--help Show this help message and exit.
--config FILE Read options from FILE.
--count NUMBER Load COUNT blocks.
--blkfile FILE Load the first COUNT blocks from FILE.
--seed NUMBER Random seed (not implemented; 0=file order).
All configuration variables may be given as command arguments."""
store, argv = cmdline.init()
if store is None:
return 0
args = store.args
if args.blkfile is None:
raise ValueError("--blkfile is required.")
ds = BCDataStream.BCDataStream()
file = open(args.blkfile, "rb")
ds.map_file(file, 0)
file.close()
mixup_blocks(store, ds, int(args.count), None, int(args.seed or 0))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| 6,013
|
Python
|
.py
| 142
| 31.894366
| 81
| 0.549684
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,439
|
enumeration.py
|
bitcoin-abe_bitcoin-abe/Abe/enumeration.py
|
#
# enum-like type
# From the Python Cookbook, downloaded from http://code.activestate.com/recipes/67107/
#
import types, string, exceptions
class EnumException(exceptions.Exception):
pass
class Enumeration:
def __init__(self, name, enumList):
self.__doc__ = name
lookup = { }
reverseLookup = { }
i = 0
uniqueNames = [ ]
uniqueValues = [ ]
for x in enumList:
if type(x) == types.TupleType:
x, i = x
if type(x) != types.StringType:
raise EnumException, "enum name is not a string: " + x
if type(i) != types.IntType:
raise EnumException, "enum value is not an integer: " + i
if x in uniqueNames:
raise EnumException, "enum name is not unique: " + x
if i in uniqueValues:
raise EnumException, "enum value is not unique for " + x
uniqueNames.append(x)
uniqueValues.append(i)
lookup[x] = i
reverseLookup[i] = x
i = i + 1
self.lookup = lookup
self.reverseLookup = reverseLookup
def __getattr__(self, attr):
if not self.lookup.has_key(attr):
raise AttributeError
return self.lookup[attr]
def whatis(self, value):
return self.reverseLookup[value]
| 1,365
|
Python
|
.py
| 39
| 25.641026
| 86
| 0.568731
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,440
|
BCDataStream.py
|
bitcoin-abe_bitcoin-abe/Abe/BCDataStream.py
|
#
# Workalike python implementation of Bitcoin's CDataStream class.
#
import struct
import StringIO
import mmap
class SerializationError(Exception):
""" Thrown when there's a problem deserializing or serializing """
class BCDataStream(object):
def __init__(self):
self.input = None
self.read_cursor = 0
def clear(self):
self.input = None
self.read_cursor = 0
def write(self, bytes): # Initialize with string of bytes
if self.input is None:
self.input = bytes
else:
self.input += bytes
def map_file(self, file, start): # Initialize with bytes from file
self.input = mmap.mmap(file.fileno(), 0, access=mmap.ACCESS_READ)
self.read_cursor = start
def seek_file(self, position):
self.read_cursor = position
def close_file(self):
self.input.close()
def read_string(self):
# Strings are encoded depending on length:
# 0 to 252 : 1-byte-length followed by bytes (if any)
# 253 to 65,535 : byte'253' 2-byte-length followed by bytes
# 65,536 to 4,294,967,295 : byte '254' 4-byte-length followed by bytes
# ... and the Bitcoin client is coded to understand:
# greater than 4,294,967,295 : byte '255' 8-byte-length followed by bytes of string
# ... but I don't think it actually handles any strings that big.
if self.input is None:
raise SerializationError("call write(bytes) before trying to deserialize")
try:
length = self.read_compact_size()
except IndexError:
raise SerializationError("attempt to read past end of buffer")
return self.read_bytes(length)
def write_string(self, string):
# Length-encoded as with read-string
self.write_compact_size(len(string))
self.write(string)
def read_bytes(self, length):
try:
result = self.input[self.read_cursor:self.read_cursor+length]
self.read_cursor += length
return result
except IndexError:
raise SerializationError("attempt to read past end of buffer")
return ''
def read_boolean(self): return self.read_bytes(1)[0] != chr(0)
def read_int16 (self): return self._read_num('<h')
def read_uint16 (self): return self._read_num('<H')
def read_int32 (self): return self._read_num('<i')
def read_uint32 (self): return self._read_num('<I')
def read_int64 (self): return self._read_num('<q')
def read_uint64 (self): return self._read_num('<Q')
def write_boolean(self, val): return self.write(chr(1) if val else chr(0))
def write_int16 (self, val): return self._write_num('<h', val)
def write_uint16 (self, val): return self._write_num('<H', val)
def write_int32 (self, val): return self._write_num('<i', val)
def write_uint32 (self, val): return self._write_num('<I', val)
def write_int64 (self, val): return self._write_num('<q', val)
def write_uint64 (self, val): return self._write_num('<Q', val)
def read_compact_size(self):
size = ord(self.input[self.read_cursor])
self.read_cursor += 1
if size == 253:
size = self._read_num('<H')
elif size == 254:
size = self._read_num('<I')
elif size == 255:
size = self._read_num('<Q')
return size
def write_compact_size(self, size):
if size < 0:
raise SerializationError("attempt to write size < 0")
elif size < 253:
self.write(chr(size))
elif size < 2**16:
self.write('\xfd')
self._write_num('<H', size)
elif size < 2**32:
self.write('\xfe')
self._write_num('<I', size)
elif size < 2**64:
self.write('\xff')
self._write_num('<Q', size)
def _read_num(self, format):
(i,) = struct.unpack_from(format, self.input, self.read_cursor)
self.read_cursor += struct.calcsize(format)
return i
def _write_num(self, format, num):
s = struct.pack(format, num)
self.write(s)
| 3,809
|
Python
|
.py
| 99
| 33.777778
| 87
| 0.666667
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,441
|
Dash.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/Dash.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from .X11Chain import X11Chain
class Dash(X11Chain):
def __init__(chain, **kwargs):
chain.name = 'Dash'
chain.code3 = 'DASH'
chain.address_version = '\x4c'
chain.script_addr_vers = '\x05'
chain.magic = '\xbf\x0c\x6b\xbd'
X11Chain.__init__(chain, **kwargs)
datadir_conf_file_name = 'dash.conf'
datadir_rpcport = 9998
datadir_p2pport = 9999
| 1,114
|
Python
|
.py
| 26
| 39.153846
| 70
| 0.72048
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,442
|
NmcAuxPowChain.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/NmcAuxPowChain.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from . import BaseChain
from .. import deserialize
class NmcAuxPowChain(BaseChain):
"""
A blockchain that represents merge-mining proof-of-work in an "AuxPow" structure as does Namecoin.
"""
def __init__(chain, **kwargs):
BaseChain.__init__(chain, **kwargs)
def ds_parse_block_header(chain, ds):
d = BaseChain.ds_parse_block_header(chain, ds)
if d['version'] & (1 << 8):
d['auxpow'] = deserialize.parse_AuxPow(ds)
return d
def has_feature(chain, feature):
return feature == 'block_version_bit8_merge_mine' \
or BaseChain.has_feature(chain, feature)
| 1,349
|
Python
|
.py
| 30
| 40.8
| 102
| 0.715373
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,443
|
Maxcoin.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/Maxcoin.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from .KeccakChain import KeccakChain
from .. import util
class Maxcoin(KeccakChain):
"""
Maxcoin uses Keccak for block headers and single SHA-256 for transactions.
"""
def __init__(chain, **kwargs):
chain.name = 'Maxcoin'
chain.code3 = 'MAX'
chain.address_version = '\x6e'
chain.script_addr_vers = '\x70'
chain.magic = "\xf9\xbe\xbb\xd2"
super(Maxcoin, chain).__init__(**kwargs)
def transaction_hash(chain, binary_tx):
return util.sha256(binary_tx)
datadir_conf_file_name = 'maxcoin.conf'
datadir_rpcport = 8669
| 1,308
|
Python
|
.py
| 31
| 38.258065
| 78
| 0.71934
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,444
|
ScryptJaneChain.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/ScryptJaneChain.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from . import BaseChain
YAC_START_TIME = 1377557832
class ScryptJaneChain(BaseChain):
"""
A blockchain that uses Scrypt-Jane to hash block headers.
The current implementation requires the yac_scrypt module.
The ScryptJaneChain policy must be subclassed to provide the start_time
parameter in Unix time_t format.
"""
POLICY_ATTRS = BaseChain.POLICY_ATTRS + ['start_time']
def block_header_hash(chain, header):
import yac_scrypt
b = chain.parse_block_header(header)
return yac_scrypt.getPoWHash(header, b['nTime'] + YAC_START_TIME - chain.start_time)
| 1,318
|
Python
|
.py
| 28
| 43.785714
| 92
| 0.755452
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,445
|
Unbreakablecoin.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/Unbreakablecoin.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from .Sha256Chain import Sha256Chain
class Unbreakablecoin(Sha256Chain):
def __init__(chain, **kwargs):
chain.name = 'Unbreakablecoin'
chain.code3 = 'UNB'
chain.address_version = '\x00'
chain.script_addr_vers = '\x05'
chain.magic = '\x83\x33\x07\xb1'
Sha256Chain.__init__(chain, **kwargs)
datadir_conf_file_name = 'Unbreakablecoin.conf'
datadir_rpcport = 9337
datadir_p2pport = 9336
| 1,158
|
Python
|
.py
| 26
| 40.846154
| 70
| 0.731383
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,446
|
KeccakChain.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/KeccakChain.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from . import BaseChain
from .. import util
class KeccakChain(BaseChain):
"""
A blockchain using 256-bit SHA3 (Keccak) as the block header hash.
"""
def block_header_hash(chain, header):
return util.sha3_256(header)
| 950
|
Python
|
.py
| 22
| 40.863636
| 70
| 0.76
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,447
|
BlackCoin.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/BlackCoin.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from .NvcChain import NvcChain
class BlackCoin(NvcChain):
def __init__(chain, **kwargs):
chain.name = "BlackCoin"
chain.code3 = "BC"
chain.address_version = "\x19"
chain.script_addr_vers = "\x55"
chain.magic = "\x70\x35\x22\x05"
NvcChain.__init__(chain, **kwargs)
def block_header_hash(chain, header):
b = chain.parse_block_header(header)
if (b['version'] > 6):
from .. import util
return util.double_sha256(header)
import ltc_scrypt
return ltc_scrypt.getPoWHash(header)
datadir_conf_file_name = "blackcoin.conf"
datadir_rpcport = 15715
| 1,369
|
Python
|
.py
| 32
| 37.8125
| 70
| 0.70045
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,448
|
PpcPosChain.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/PpcPosChain.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from . import BaseChain
from .. import deserialize
class PpcPosChain(BaseChain):
"""
A blockchain with proof-of-stake as in Peercoin.
"""
def ds_parse_transaction(chain, ds):
return deserialize.parse_Transaction(ds, has_nTime=True)
def ds_parse_block(chain, ds):
d = BaseChain.ds_parse_block(chain, ds)
d['block_sig'] = ds.read_bytes(ds.read_compact_size())
return d
| 1,130
|
Python
|
.py
| 26
| 40.230769
| 70
| 0.740909
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,449
|
X11PosChain.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/X11PosChain.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from .X11Chain import X11Chain
from .PpcPosChain import PpcPosChain
class X11PosChain(X11Chain, PpcPosChain):
pass
| 829
|
Python
|
.py
| 18
| 44.555556
| 70
| 0.789604
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,450
|
Namecoin.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/Namecoin.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from .Sha256NmcAuxPowChain import Sha256NmcAuxPowChain
from . import SCRIPT_TYPE_UNKNOWN
from ..deserialize import opcodes
class Namecoin(Sha256NmcAuxPowChain):
"""
Namecoin represents name operations in transaction output scripts.
"""
def __init__(chain, **kwargs):
chain.name = 'Namecoin'
chain.code3 = 'NMC'
chain.address_version = '\x34'
chain.magic = '\xf9\xbe\xb4\xfe'
Sha256NmcAuxPowChain.__init__(chain, **kwargs)
_drops = (opcodes.OP_NOP, opcodes.OP_DROP, opcodes.OP_2DROP)
def parse_decoded_txout_script(chain, decoded):
start = 0
pushed = 0
# Tolerate (but ignore for now) name operations.
for i in xrange(len(decoded)):
opcode = decoded[i][0]
if decoded[i][1] is not None or \
opcode == opcodes.OP_0 or \
opcode == opcodes.OP_1NEGATE or \
(opcode >= opcodes.OP_1 and opcode <= opcodes.OP_16):
pushed += 1
elif opcode in chain._drops:
to_drop = chain._drops.index(opcode)
if pushed < to_drop:
break
pushed -= to_drop
start = i + 1
else:
return Sha256NmcAuxPowChain.parse_decoded_txout_script(chain, decoded[start:])
return SCRIPT_TYPE_UNKNOWN, decoded
datadir_conf_file_name = "namecoin.conf"
datadir_rpcport = 8336
| 2,180
|
Python
|
.py
| 50
| 35.64
| 94
| 0.649057
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,451
|
LegacyNoBit8.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/LegacyNoBit8.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from .Sha256Chain import Sha256Chain
class LegacyNoBit8(Sha256Chain):
"""
Same as Sha256Chain, for backwards compatibility.
"""
pass
| 859
|
Python
|
.py
| 20
| 40.9
| 70
| 0.773923
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,452
|
Bitcoin.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/Bitcoin.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from .Sha256Chain import Sha256Chain
class Bitcoin(Sha256Chain):
def __init__(chain, **kwargs):
chain.name = 'Bitcoin'
chain.code3 = 'BTC'
chain.address_version = '\x00'
chain.script_addr_vers = '\x05'
chain.magic = '\xf9\xbe\xb4\xd9'
Sha256Chain.__init__(chain, **kwargs)
| 1,035
|
Python
|
.py
| 23
| 41.521739
| 70
| 0.728444
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,453
|
Californium.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/Californium.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from .Sha256Chain import Sha256Chain
class Californium(Sha256Chain):
def __init__(chain, **kwargs):
chain.name = 'Californium'
# FIX make it 'CF'
chain.code3 = 'CF '
chain.address_version = '\x58'
chain.script_addr_vers = '\x1E'
chain.magic = '\x0f\xdb\xbb\x07'
Sha256Chain.__init__(chain, **kwargs)
datadir_conf_file_name = 'Californium.conf'
datadir_rpcport = 44254
datadir_p2pport = 44252
| 1,173
|
Python
|
.py
| 27
| 39.62963
| 70
| 0.722417
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,454
|
Sha256NmcAuxPowChain.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/Sha256NmcAuxPowChain.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from .Sha256Chain import Sha256Chain
from .NmcAuxPowChain import NmcAuxPowChain
class Sha256NmcAuxPowChain(Sha256Chain, NmcAuxPowChain):
pass
| 856
|
Python
|
.py
| 18
| 46.055556
| 70
| 0.796407
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,455
|
NvcChain.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/NvcChain.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from . import BaseChain
from .LtcScryptChain import LtcScryptChain
from .PpcPosChain import PpcPosChain
class NvcChain(LtcScryptChain, PpcPosChain):
"""
Chain with NovaCoin-style proof of stake.
"""
def has_feature(chain, feature):
return feature == 'nvc_proof_of_stake'
| 1,005
|
Python
|
.py
| 23
| 41.434783
| 70
| 0.771195
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,456
|
Sha256Chain.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/Sha256Chain.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from . import BaseChain
from .. import util
class Sha256Chain(BaseChain):
"""
A blockchain that hashes its block headers using double SHA2-256 as Bitcoin does.
"""
def block_header_hash(chain, header):
return util.double_sha256(header)
| 970
|
Python
|
.py
| 22
| 41.772727
| 85
| 0.765079
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,457
|
__init__.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/__init__.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from .. import deserialize, BCDataStream, util
from ..deserialize import opcodes
def create(policy, **kwargs):
mod = __import__(__name__ + '.' + policy, fromlist=[policy])
cls = getattr(mod, policy)
return cls(policy=policy, **kwargs)
PUBKEY_HASH_LENGTH = 20
MAX_MULTISIG_KEYS = 3
# Template to match a pubkey hash ("Bitcoin address transaction") in
# txout_scriptPubKey. OP_PUSHDATA4 matches any data push.
SCRIPT_ADDRESS_TEMPLATE = [
opcodes.OP_DUP, opcodes.OP_HASH160, opcodes.OP_PUSHDATA4, opcodes.OP_EQUALVERIFY, opcodes.OP_CHECKSIG ]
# Template to match a pubkey ("IP address transaction") in txout_scriptPubKey.
SCRIPT_PUBKEY_TEMPLATE = [ opcodes.OP_PUSHDATA4, opcodes.OP_CHECKSIG ]
# Template to match a BIP16 pay-to-script-hash (P2SH) output script.
SCRIPT_P2SH_TEMPLATE = [ opcodes.OP_HASH160, PUBKEY_HASH_LENGTH, opcodes.OP_EQUAL ]
# Template to match a script that can never be redeemed, used in Namecoin.
SCRIPT_BURN_TEMPLATE = [ opcodes.OP_RETURN ]
SCRIPT_TYPE_INVALID = 0
SCRIPT_TYPE_UNKNOWN = 1
SCRIPT_TYPE_PUBKEY = 2
SCRIPT_TYPE_ADDRESS = 3
SCRIPT_TYPE_BURN = 4
SCRIPT_TYPE_MULTISIG = 5
SCRIPT_TYPE_P2SH = 6
class BaseChain(object):
POLICY_ATTRS = ['magic', 'name', 'code3', 'address_version', 'decimals', 'script_addr_vers']
__all__ = ['id', 'policy'] + POLICY_ATTRS
def __init__(chain, src=None, **kwargs):
for attr in chain.__all__:
if attr in kwargs:
val = kwargs.get(attr)
elif hasattr(chain, attr):
continue
elif src is not None:
val = getattr(src, attr)
else:
val = None
setattr(chain, attr, val)
def has_feature(chain, feature):
return False
def ds_parse_block_header(chain, ds):
return deserialize.parse_BlockHeader(ds)
def ds_parse_transaction(chain, ds):
return deserialize.parse_Transaction(ds)
def ds_parse_block(chain, ds):
d = chain.ds_parse_block_header(ds)
d['transactions'] = []
nTransactions = ds.read_compact_size()
for i in xrange(nTransactions):
d['transactions'].append(chain.ds_parse_transaction(ds))
return d
def ds_serialize_block(chain, ds, block):
chain.ds_serialize_block_header(ds, block)
ds.write_compact_size(len(block['transactions']))
for tx in block['transactions']:
chain.ds_serialize_transaction(ds, tx)
def ds_serialize_block_header(chain, ds, block):
ds.write_int32(block['version'])
ds.write(block['hashPrev'])
ds.write(block['hashMerkleRoot'])
ds.write_uint32(block['nTime'])
ds.write_uint32(block['nBits'])
ds.write_uint32(block['nNonce'])
def ds_serialize_transaction(chain, ds, tx):
ds.write_int32(tx['version'])
ds.write_compact_size(len(tx['txIn']))
for txin in tx['txIn']:
chain.ds_serialize_txin(ds, txin)
ds.write_compact_size(len(tx['txOut']))
for txout in tx['txOut']:
chain.ds_serialize_txout(ds, txout)
ds.write_uint32(tx['lockTime'])
def ds_serialize_txin(chain, ds, txin):
ds.write(txin['prevout_hash'])
ds.write_uint32(txin['prevout_n'])
ds.write_string(txin['scriptSig'])
ds.write_uint32(txin['sequence'])
def ds_serialize_txout(chain, ds, txout):
ds.write_int64(txout['value'])
ds.write_string(txout['scriptPubKey'])
def serialize_block(chain, block):
ds = BCDataStream.BCDataStream()
chain.ds_serialize_block(ds, block)
return ds.input
def serialize_block_header(chain, block):
ds = BCDataStream.BCDataStream()
chain.ds_serialize_block_header(ds, block)
return ds.input
def serialize_transaction(chain, tx):
ds = BCDataStream.BCDataStream()
chain.ds_serialize_transaction(ds, tx)
return ds.input
def ds_block_header_hash(chain, ds):
return chain.block_header_hash(
ds.input[ds.read_cursor : ds.read_cursor + 80])
def transaction_hash(chain, binary_tx):
return util.double_sha256(binary_tx)
def merkle_hash(chain, hashes):
return util.double_sha256(hashes)
# Based on CBlock::BuildMerkleTree().
def merkle_root(chain, hashes):
while len(hashes) > 1:
size = len(hashes)
out = []
for i in xrange(0, size, 2):
i2 = min(i + 1, size - 1)
out.append(chain.merkle_hash(hashes[i] + hashes[i2]))
hashes = out
return hashes and hashes[0]
def parse_block_header(chain, header):
return chain.ds_parse_block_header(util.str_to_ds(header))
def parse_transaction(chain, binary_tx):
return chain.ds_parse_transaction(util.str_to_ds(binary_tx))
def is_coinbase_tx(chain, tx):
return len(tx['txIn']) == 1 and tx['txIn'][0]['prevout_hash'] == chain.coinbase_prevout_hash
coinbase_prevout_hash = util.NULL_HASH
coinbase_prevout_n = 0xffffffff
genesis_hash_prev = util.GENESIS_HASH_PREV
def parse_txout_script(chain, script):
"""
Return TYPE, DATA where the format of DATA depends on TYPE.
* SCRIPT_TYPE_INVALID - DATA is the raw script
* SCRIPT_TYPE_UNKNOWN - DATA is the decoded script
* SCRIPT_TYPE_PUBKEY - DATA is the binary public key
* SCRIPT_TYPE_ADDRESS - DATA is the binary public key hash
* SCRIPT_TYPE_BURN - DATA is None
* SCRIPT_TYPE_MULTISIG - DATA is {"m":M, "pubkeys":list_of_pubkeys}
* SCRIPT_TYPE_P2SH - DATA is the binary script hash
"""
if script is None:
raise ValueError()
try:
decoded = [ x for x in deserialize.script_GetOp(script) ]
except Exception:
return SCRIPT_TYPE_INVALID, script
return chain.parse_decoded_txout_script(decoded)
def parse_decoded_txout_script(chain, decoded):
if deserialize.match_decoded(decoded, SCRIPT_ADDRESS_TEMPLATE):
pubkey_hash = decoded[2][1]
if len(pubkey_hash) == PUBKEY_HASH_LENGTH:
return SCRIPT_TYPE_ADDRESS, pubkey_hash
elif deserialize.match_decoded(decoded, SCRIPT_PUBKEY_TEMPLATE):
pubkey = decoded[0][1]
return SCRIPT_TYPE_PUBKEY, pubkey
elif deserialize.match_decoded(decoded, SCRIPT_P2SH_TEMPLATE):
script_hash = decoded[1][1]
assert len(script_hash) == PUBKEY_HASH_LENGTH
return SCRIPT_TYPE_P2SH, script_hash
elif deserialize.match_decoded(decoded, SCRIPT_BURN_TEMPLATE):
return SCRIPT_TYPE_BURN, None
elif len(decoded) >= 4 and decoded[-1][0] == opcodes.OP_CHECKMULTISIG:
# cf. bitcoin/src/script.cpp:Solver
n = decoded[-2][0] + 1 - opcodes.OP_1
m = decoded[0][0] + 1 - opcodes.OP_1
if 1 <= m <= n <= MAX_MULTISIG_KEYS and len(decoded) == 3 + n and \
all([ decoded[i][0] <= opcodes.OP_PUSHDATA4 for i in range(1, 1+n) ]):
return SCRIPT_TYPE_MULTISIG, \
{ "m": m, "pubkeys": [ decoded[i][1] for i in range(1, 1+n) ] }
# Namecoin overrides this to accept name operations.
return SCRIPT_TYPE_UNKNOWN, decoded
def pubkey_hash(chain, pubkey):
return util.pubkey_to_hash(pubkey)
def script_hash(chain, script):
return chain.pubkey_hash(script)
datadir_conf_file_name = "bitcoin.conf"
datadir_rpcport = 8332
| 8,312
|
Python
|
.py
| 181
| 37.928177
| 107
| 0.645975
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,458
|
LtcScryptChain.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/LtcScryptChain.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from . import BaseChain
class LtcScryptChain(BaseChain):
"""
A blockchain using Litecoin's scrypt algorithm to hash block headers.
"""
def block_header_hash(chain, header):
import ltc_scrypt
return ltc_scrypt.getPoWHash(header)
| 970
|
Python
|
.py
| 22
| 41.409091
| 73
| 0.762963
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,459
|
Hirocoin.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/Hirocoin.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from .X11Chain import X11Chain
class Hirocoin(X11Chain):
def __init__(chain, **kwargs):
chain.name = 'Hirocoin'
chain.code3 = 'HIRO'
chain.address_version = '\x28'
chain.script_addr_vers = '\x05'
chain.magic = '\xfe\xc4\xb9\xde'
X11Chain.__init__(chain, **kwargs)
datadir_conf_file_name = 'hirocoin.conf'
datadir_rpcport = 9347
datadir_p2pport = 9348
| 1,126
|
Python
|
.py
| 26
| 39.615385
| 70
| 0.72354
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,460
|
Testnet.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/Testnet.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from .Sha256Chain import Sha256Chain
class Testnet(Sha256Chain):
"""
The original bitcoin test blockchain.
"""
def __init__(chain, **kwargs):
chain.name = 'Testnet'
chain.code3 = 'BC0'
chain.address_version = '\x6f'
chain.script_addr_vers = '\xc4'
chain.magic = '\xfa\xbf\xb5\xda'
Sha256Chain.__init__(chain, **kwargs)
# XXX
#datadir_conf_file_name = "bitcoin.conf"
#datadir_rpcport = 8332
| 1,177
|
Python
|
.py
| 29
| 36.758621
| 70
| 0.715035
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,461
|
X11Chain.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/X11Chain.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from . import BaseChain
class X11Chain(BaseChain):
"""
A blockchain that hashes block headers using the X11 algorithm.
The current implementation requires the xcoin_hash module.
"""
def block_header_hash(chain, header):
import xcoin_hash
return xcoin_hash.getPoWHash(header)
| 1,022
|
Python
|
.py
| 23
| 41.608696
| 70
| 0.763819
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,462
|
Bitleu.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/Bitleu.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from .ScryptJaneChain import ScryptJaneChain
from .PpcPosChain import PpcPosChain
class Bitleu(ScryptJaneChain, PpcPosChain):
def __init__(chain, **kwargs):
chain.name = 'Bitleu'
chain.code3 = 'BTL'
chain.address_version = "\x30"
chain.script_addr_vers = '\x1b'
chain.magic = "\xd9\xe6\xe7\xe5"
chain.decimals = 6
super(Bitleu, chain).__init__(**kwargs)
datadir_conf_file_name = "Bitleu.conf"
datadir_rpcport = 7997
start_time = 1394480376
| 1,223
|
Python
|
.py
| 28
| 39.892857
| 70
| 0.728799
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,463
|
NovaCoin.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/NovaCoin.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from .NvcChain import NvcChain
class NovaCoin(NvcChain):
def __init__(chain, **kwargs):
chain.name = 'NovaCoin'
chain.code3 = 'NVC'
chain.address_version = "\x08"
chain.magic = "\xe4\xe8\xe9\xe5"
chain.decimals = 6
NvcChain.__init__(chain, **kwargs)
datadir_conf_file_name = "novacoin.conf"
datadir_rpcport = 8344
| 1,085
|
Python
|
.py
| 25
| 39.76
| 70
| 0.725379
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,464
|
CryptoCash.py
|
bitcoin-abe_bitcoin-abe/Abe/Chain/CryptoCash.py
|
# Copyright(C) 2014 by Abe developers.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
from .NvcChain import NvcChain
class CryptoCash(NvcChain):
def __init__(chain, **kwargs):
chain.name = 'Cash'
chain.code3 = 'CAS'
chain.address_version = "\x22"
chain.magic = "\xe4\xc6\xfe\xe7"
NvcChain.__init__(chain, **kwargs)
datadir_conf_file_name = "Cash.conf"
datadir_rpcport = 3941
| 1,052
|
Python
|
.py
| 24
| 40.416667
| 70
| 0.728516
|
bitcoin-abe/bitcoin-abe
| 978
| 653
| 161
|
AGPL-3.0
|
9/5/2024, 5:10:46 PM (Europe/Amsterdam)
|
9,465
|
test_fuzzywuzzy_hypothesis.py
|
seatgeek_fuzzywuzzy/test_fuzzywuzzy_hypothesis.py
|
from itertools import product
from functools import partial
from string import ascii_letters, digits, punctuation
from hypothesis import given, assume, settings
import hypothesis.strategies as st
import pytest
from fuzzywuzzy import fuzz, process, utils
HYPOTHESIS_ALPHABET = ascii_letters + digits + punctuation
def scorers_processors():
"""
Generate a list of (scorer, processor) pairs for testing
:return: [(scorer, processor), ...]
"""
scorers = [fuzz.ratio,
fuzz.partial_ratio]
processors = [lambda x: x,
partial(utils.full_process, force_ascii=False),
partial(utils.full_process, force_ascii=True)]
splist = list(product(scorers, processors))
splist.extend(
[(fuzz.WRatio, partial(utils.full_process, force_ascii=True)),
(fuzz.QRatio, partial(utils.full_process, force_ascii=True)),
(fuzz.UWRatio, partial(utils.full_process, force_ascii=False)),
(fuzz.UQRatio, partial(utils.full_process, force_ascii=False)),
(fuzz.token_set_ratio, partial(utils.full_process, force_ascii=True)),
(fuzz.token_sort_ratio, partial(utils.full_process, force_ascii=True)),
(fuzz.partial_token_set_ratio, partial(utils.full_process, force_ascii=True)),
(fuzz.partial_token_sort_ratio, partial(utils.full_process, force_ascii=True))]
)
return splist
def full_scorers_processors():
"""
Generate a list of (scorer, processor) pairs for testing for scorers that use the full string only
:return: [(scorer, processor), ...]
"""
scorers = [fuzz.ratio]
processors = [lambda x: x,
partial(utils.full_process, force_ascii=False),
partial(utils.full_process, force_ascii=True)]
splist = list(product(scorers, processors))
splist.extend(
[(fuzz.WRatio, partial(utils.full_process, force_ascii=True)),
(fuzz.QRatio, partial(utils.full_process, force_ascii=True)),
(fuzz.UWRatio, partial(utils.full_process, force_ascii=False)),
(fuzz.UQRatio, partial(utils.full_process, force_ascii=False))]
)
return splist
@pytest.mark.parametrize('scorer,processor',
scorers_processors())
@given(data=st.data())
@settings(max_examples=20, deadline=5000)
def test_identical_strings_extracted(scorer, processor, data):
"""
Test that identical strings will always return a perfect match.
:param scorer:
:param processor:
:param data:
:return:
"""
# Draw a list of random strings
strings = data.draw(
st.lists(
st.text(min_size=10, max_size=100, alphabet=HYPOTHESIS_ALPHABET),
min_size=1,
max_size=10
)
)
# Draw a random integer for the index in that list
choiceidx = data.draw(st.integers(min_value=0, max_value=(len(strings) - 1)))
# Extract our choice from the list
choice = strings[choiceidx]
# Check process doesn't make our choice the empty string
assume(processor(choice) != '')
# Extract all perfect matches
result = process.extractBests(choice,
strings,
scorer=scorer,
processor=processor,
score_cutoff=100,
limit=None)
# Check we get a result
assert result != []
# Check the original is in the list
assert (choice, 100) in result
@pytest.mark.parametrize('scorer,processor',
full_scorers_processors())
@given(data=st.data())
@settings(max_examples=20, deadline=5000)
def test_only_identical_strings_extracted(scorer, processor, data):
"""
Test that only identical (post processing) strings score 100 on the test.
If two strings are not identical then using full comparison methods they should
not be a perfect (100) match.
:param scorer:
:param processor:
:param data:
:return:
"""
# Draw a list of random strings
strings = data.draw(
st.lists(
st.text(min_size=10, max_size=100, alphabet=HYPOTHESIS_ALPHABET),
min_size=1,
max_size=10)
)
# Draw a random integer for the index in that list
choiceidx = data.draw(st.integers(min_value=0, max_value=(len(strings) - 1)))
# Extract our choice from the list
choice = strings[choiceidx]
# Check process doesn't make our choice the empty string
assume(processor(choice) != '')
# Extract all perfect matches
result = process.extractBests(choice,
strings,
scorer=scorer,
processor=processor,
score_cutoff=100,
limit=None)
# Check we get a result
assert result != []
# Check THE ONLY result(s) we get are a perfect match for the (processed) original data
pchoice = processor(choice)
for r in result:
assert pchoice == processor(r[0])
| 5,123
|
Python
|
.py
| 124
| 32.379032
| 102
| 0.631388
|
seatgeek/fuzzywuzzy
| 9,206
| 876
| 107
|
GPL-2.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,466
|
setup.py
|
seatgeek_fuzzywuzzy/setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 SeatGeek
# This file is part of fuzzywuzzy.
from fuzzywuzzy import __version__
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='fuzzywuzzy',
version=__version__,
author='Adam Cohen',
author_email='adam@seatgeek.com',
packages=['fuzzywuzzy'],
extras_require={'speedup': ['python-levenshtein>=0.12']},
url='https://github.com/seatgeek/fuzzywuzzy',
license="GPLv2",
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
description='Fuzzy string matching in python',
long_description=open_file('README.rst').read(),
zip_safe=True,
)
| 1,178
|
Python
|
.py
| 35
| 28.8
| 75
| 0.65757
|
seatgeek/fuzzywuzzy
| 9,206
| 876
| 107
|
GPL-2.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,467
|
test_fuzzywuzzy.py
|
seatgeek_fuzzywuzzy/test_fuzzywuzzy.py
|
# -*- coding: utf8 -*-
from __future__ import unicode_literals
import unittest
import re
import sys
import pycodestyle
from fuzzywuzzy import fuzz
from fuzzywuzzy import process
from fuzzywuzzy import utils
from fuzzywuzzy.string_processing import StringProcessor
if sys.version_info[0] == 3:
unicode = str
class StringProcessingTest(unittest.TestCase):
def test_replace_non_letters_non_numbers_with_whitespace(self):
strings = ["new york mets - atlanta braves", "Cães danados",
"New York //// Mets $$$", "Ça va?"]
for string in strings:
proc_string = StringProcessor.replace_non_letters_non_numbers_with_whitespace(string)
regex = re.compile(r"(?ui)[\W]")
for expr in regex.finditer(proc_string):
self.assertEqual(expr.group(), " ")
def test_dont_condense_whitespace(self):
s1 = "new york mets - atlanta braves"
s2 = "new york mets atlanta braves"
p1 = StringProcessor.replace_non_letters_non_numbers_with_whitespace(s1)
p2 = StringProcessor.replace_non_letters_non_numbers_with_whitespace(s2)
self.assertNotEqual(p1, p2)
class UtilsTest(unittest.TestCase):
def setUp(self):
self.s1 = "new york mets"
self.s1a = "new york mets"
self.s2 = "new YORK mets"
self.s3 = "the wonderful new york mets"
self.s4 = "new york mets vs atlanta braves"
self.s5 = "atlanta braves vs new york mets"
self.s6 = "new york mets - atlanta braves"
self.mixed_strings = [
"Lorem Ipsum is simply dummy text of the printing and typesetting industry.",
"C'est la vie",
"Ça va?",
"Cães danados",
"\xacCamarões assados",
"a\xac\u1234\u20ac\U00008000",
"\u00C1"
]
def tearDown(self):
pass
def test_asciidammit(self):
for s in self.mixed_strings:
utils.asciidammit(s)
def test_asciionly(self):
for s in self.mixed_strings:
# ascii only only runs on strings
s = utils.asciidammit(s)
utils.asciionly(s)
def test_fullProcess(self):
for s in self.mixed_strings:
utils.full_process(s)
def test_fullProcessForceAscii(self):
for s in self.mixed_strings:
utils.full_process(s, force_ascii=True)
class RatioTest(unittest.TestCase):
def setUp(self):
self.s1 = "new york mets"
self.s1a = "new york mets"
self.s2 = "new YORK mets"
self.s3 = "the wonderful new york mets"
self.s4 = "new york mets vs atlanta braves"
self.s5 = "atlanta braves vs new york mets"
self.s6 = "new york mets - atlanta braves"
self.s7 = 'new york city mets - atlanta braves'
# test silly corner cases
self.s8 = '{'
self.s8a = '{'
self.s9 = '{a'
self.s9a = '{a'
self.s10 = 'a{'
self.s10a = '{b'
self.cirque_strings = [
"cirque du soleil - zarkana - las vegas",
"cirque du soleil ",
"cirque du soleil las vegas",
"zarkana las vegas",
"las vegas cirque du soleil at the bellagio",
"zarakana - cirque du soleil - bellagio"
]
self.baseball_strings = [
"new york mets vs chicago cubs",
"chicago cubs vs chicago white sox",
"philladelphia phillies vs atlanta braves",
"braves vs mets",
]
def tearDown(self):
pass
def testEqual(self):
self.assertEqual(fuzz.ratio(self.s1, self.s1a), 100)
self.assertEqual(fuzz.ratio(self.s8, self.s8a), 100)
self.assertEqual(fuzz.ratio(self.s9, self.s9a), 100)
def testCaseInsensitive(self):
self.assertNotEqual(fuzz.ratio(self.s1, self.s2), 100)
self.assertEqual(fuzz.ratio(utils.full_process(self.s1), utils.full_process(self.s2)), 100)
def testPartialRatio(self):
self.assertEqual(fuzz.partial_ratio(self.s1, self.s3), 100)
def testTokenSortRatio(self):
self.assertEqual(fuzz.token_sort_ratio(self.s1, self.s1a), 100)
def testPartialTokenSortRatio(self):
self.assertEqual(fuzz.partial_token_sort_ratio(self.s1, self.s1a), 100)
self.assertEqual(fuzz.partial_token_sort_ratio(self.s4, self.s5), 100)
self.assertEqual(fuzz.partial_token_sort_ratio(self.s8, self.s8a, full_process=False), 100)
self.assertEqual(fuzz.partial_token_sort_ratio(self.s9, self.s9a, full_process=True), 100)
self.assertEqual(fuzz.partial_token_sort_ratio(self.s9, self.s9a, full_process=False), 100)
self.assertEqual(fuzz.partial_token_sort_ratio(self.s10, self.s10a, full_process=False), 50)
def testTokenSetRatio(self):
self.assertEqual(fuzz.token_set_ratio(self.s4, self.s5), 100)
self.assertEqual(fuzz.token_set_ratio(self.s8, self.s8a, full_process=False), 100)
self.assertEqual(fuzz.token_set_ratio(self.s9, self.s9a, full_process=True), 100)
self.assertEqual(fuzz.token_set_ratio(self.s9, self.s9a, full_process=False), 100)
self.assertEqual(fuzz.token_set_ratio(self.s10, self.s10a, full_process=False), 50)
def testPartialTokenSetRatio(self):
self.assertEqual(fuzz.partial_token_set_ratio(self.s4, self.s7), 100)
def testQuickRatioEqual(self):
self.assertEqual(fuzz.QRatio(self.s1, self.s1a), 100)
def testQuickRatioCaseInsensitive(self):
self.assertEqual(fuzz.QRatio(self.s1, self.s2), 100)
def testQuickRatioNotEqual(self):
self.assertNotEqual(fuzz.QRatio(self.s1, self.s3), 100)
def testWRatioEqual(self):
self.assertEqual(fuzz.WRatio(self.s1, self.s1a), 100)
def testWRatioCaseInsensitive(self):
self.assertEqual(fuzz.WRatio(self.s1, self.s2), 100)
def testWRatioPartialMatch(self):
# a partial match is scaled by .9
self.assertEqual(fuzz.WRatio(self.s1, self.s3), 90)
def testWRatioMisorderedMatch(self):
# misordered full matches are scaled by .95
self.assertEqual(fuzz.WRatio(self.s4, self.s5), 95)
def testWRatioUnicode(self):
self.assertEqual(fuzz.WRatio(unicode(self.s1), unicode(self.s1a)), 100)
def testQRatioUnicode(self):
self.assertEqual(fuzz.WRatio(unicode(self.s1), unicode(self.s1a)), 100)
def testEmptyStringsScore100(self):
self.assertEqual(fuzz.ratio("", ""), 100)
self.assertEqual(fuzz.partial_ratio("", ""), 100)
def testIssueSeven(self):
s1 = "HSINCHUANG"
s2 = "SINJHUAN"
s3 = "LSINJHUANG DISTRIC"
s4 = "SINJHUANG DISTRICT"
self.assertTrue(fuzz.partial_ratio(s1, s2) > 75)
self.assertTrue(fuzz.partial_ratio(s1, s3) > 75)
self.assertTrue(fuzz.partial_ratio(s1, s4) > 75)
def testRatioUnicodeString(self):
s1 = "\u00C1"
s2 = "ABCD"
score = fuzz.ratio(s1, s2)
self.assertEqual(0, score)
def testPartialRatioUnicodeString(self):
s1 = "\u00C1"
s2 = "ABCD"
score = fuzz.partial_ratio(s1, s2)
self.assertEqual(0, score)
def testWRatioUnicodeString(self):
s1 = "\u00C1"
s2 = "ABCD"
score = fuzz.WRatio(s1, s2)
self.assertEqual(0, score)
# Cyrillic.
s1 = "\u043f\u0441\u0438\u0445\u043e\u043b\u043e\u0433"
s2 = "\u043f\u0441\u0438\u0445\u043e\u0442\u0435\u0440\u0430\u043f\u0435\u0432\u0442"
score = fuzz.WRatio(s1, s2, force_ascii=False)
self.assertNotEqual(0, score)
# Chinese.
s1 = "\u6211\u4e86\u89e3\u6570\u5b66"
s2 = "\u6211\u5b66\u6570\u5b66"
score = fuzz.WRatio(s1, s2, force_ascii=False)
self.assertNotEqual(0, score)
def testQRatioUnicodeString(self):
s1 = "\u00C1"
s2 = "ABCD"
score = fuzz.QRatio(s1, s2)
self.assertEqual(0, score)
# Cyrillic.
s1 = "\u043f\u0441\u0438\u0445\u043e\u043b\u043e\u0433"
s2 = "\u043f\u0441\u0438\u0445\u043e\u0442\u0435\u0440\u0430\u043f\u0435\u0432\u0442"
score = fuzz.QRatio(s1, s2, force_ascii=False)
self.assertNotEqual(0, score)
# Chinese.
s1 = "\u6211\u4e86\u89e3\u6570\u5b66"
s2 = "\u6211\u5b66\u6570\u5b66"
score = fuzz.QRatio(s1, s2, force_ascii=False)
self.assertNotEqual(0, score)
def testQratioForceAscii(self):
s1 = "ABCD\u00C1"
s2 = "ABCD"
score = fuzz.QRatio(s1, s2, force_ascii=True)
self.assertEqual(score, 100)
score = fuzz.QRatio(s1, s2, force_ascii=False)
self.assertLess(score, 100)
def testQRatioForceAscii(self):
s1 = "ABCD\u00C1"
s2 = "ABCD"
score = fuzz.WRatio(s1, s2, force_ascii=True)
self.assertEqual(score, 100)
score = fuzz.WRatio(s1, s2, force_ascii=False)
self.assertLess(score, 100)
def testTokenSetForceAscii(self):
s1 = "ABCD\u00C1 HELP\u00C1"
s2 = "ABCD HELP"
score = fuzz._token_set(s1, s2, force_ascii=True)
self.assertEqual(score, 100)
score = fuzz._token_set(s1, s2, force_ascii=False)
self.assertLess(score, 100)
def testTokenSortForceAscii(self):
s1 = "ABCD\u00C1 HELP\u00C1"
s2 = "ABCD HELP"
score = fuzz._token_sort(s1, s2, force_ascii=True)
self.assertEqual(score, 100)
score = fuzz._token_sort(s1, s2, force_ascii=False)
self.assertLess(score, 100)
class ValidatorTest(unittest.TestCase):
def setUp(self):
self.testFunc = lambda *args, **kwargs: (args, kwargs)
def testCheckForNone(self):
invalid_input = [
(None, None),
('Some', None),
(None, 'Some')
]
decorated_func = utils.check_for_none(self.testFunc)
for i in invalid_input:
self.assertEqual(decorated_func(*i), 0)
valid_input = ('Some', 'Some')
actual = decorated_func(*valid_input)
self.assertNotEqual(actual, 0)
def testCheckEmptyString(self):
invalid_input = [
('', ''),
('Some', ''),
('', 'Some')
]
decorated_func = utils.check_empty_string(self.testFunc)
for i in invalid_input:
self.assertEqual(decorated_func(*i), 0)
valid_input = ('Some', 'Some')
actual = decorated_func(*valid_input)
self.assertNotEqual(actual, 0)
class ProcessTest(unittest.TestCase):
def setUp(self):
self.s1 = "new york mets"
self.s1a = "new york mets"
self.s2 = "new YORK mets"
self.s3 = "the wonderful new york mets"
self.s4 = "new york mets vs atlanta braves"
self.s5 = "atlanta braves vs new york mets"
self.s6 = "new york mets - atlanta braves"
self.cirque_strings = [
"cirque du soleil - zarkana - las vegas",
"cirque du soleil ",
"cirque du soleil las vegas",
"zarkana las vegas",
"las vegas cirque du soleil at the bellagio",
"zarakana - cirque du soleil - bellagio"
]
self.baseball_strings = [
"new york mets vs chicago cubs",
"chicago cubs vs chicago white sox",
"philladelphia phillies vs atlanta braves",
"braves vs mets",
]
def testGetBestChoice1(self):
query = "new york mets at atlanta braves"
best = process.extractOne(query, self.baseball_strings)
self.assertEqual(best[0], "braves vs mets")
def testGetBestChoice2(self):
query = "philadelphia phillies at atlanta braves"
best = process.extractOne(query, self.baseball_strings)
self.assertEqual(best[0], self.baseball_strings[2])
def testGetBestChoice3(self):
query = "atlanta braves at philadelphia phillies"
best = process.extractOne(query, self.baseball_strings)
self.assertEqual(best[0], self.baseball_strings[2])
def testGetBestChoice4(self):
query = "chicago cubs vs new york mets"
best = process.extractOne(query, self.baseball_strings)
self.assertEqual(best[0], self.baseball_strings[0])
def testWithProcessor(self):
events = [
["chicago cubs vs new york mets", "CitiField", "2011-05-11", "8pm"],
["new york yankees vs boston red sox", "Fenway Park", "2011-05-11", "8pm"],
["atlanta braves vs pittsburgh pirates", "PNC Park", "2011-05-11", "8pm"],
]
query = ["new york mets vs chicago cubs", "CitiField", "2017-03-19", "8pm"],
best = process.extractOne(query, events, processor=lambda event: event[0])
self.assertEqual(best[0], events[0])
def testWithScorer(self):
choices = [
"new york mets vs chicago cubs",
"chicago cubs at new york mets",
"atlanta braves vs pittsbugh pirates",
"new york yankees vs boston red sox"
]
choices_dict = {
1: "new york mets vs chicago cubs",
2: "chicago cubs vs chicago white sox",
3: "philladelphia phillies vs atlanta braves",
4: "braves vs mets"
}
# in this hypothetical example we care about ordering, so we use quick ratio
query = "new york mets at chicago cubs"
scorer = fuzz.QRatio
# first, as an example, the normal way would select the "more
# 'complete' match of choices[1]"
best = process.extractOne(query, choices)
self.assertEqual(best[0], choices[1])
# now, use the custom scorer
best = process.extractOne(query, choices, scorer=scorer)
self.assertEqual(best[0], choices[0])
best = process.extractOne(query, choices_dict)
self.assertEqual(best[0], choices_dict[1])
def testWithCutoff(self):
choices = [
"new york mets vs chicago cubs",
"chicago cubs at new york mets",
"atlanta braves vs pittsbugh pirates",
"new york yankees vs boston red sox"
]
query = "los angeles dodgers vs san francisco giants"
# in this situation, this is an event that does not exist in the list
# we don't want to randomly match to something, so we use a reasonable cutoff
best = process.extractOne(query, choices, score_cutoff=50)
self.assertTrue(best is None)
# self.assertIsNone(best) # unittest.TestCase did not have assertIsNone until Python 2.7
# however if we had no cutoff, something would get returned
# best = process.extractOne(query, choices)
# self.assertIsNotNone(best)
def testWithCutoff2(self):
choices = [
"new york mets vs chicago cubs",
"chicago cubs at new york mets",
"atlanta braves vs pittsbugh pirates",
"new york yankees vs boston red sox"
]
query = "new york mets vs chicago cubs"
# Only find 100-score cases
res = process.extractOne(query, choices, score_cutoff=100)
self.assertTrue(res is not None)
best_match, score = res
self.assertTrue(best_match is choices[0])
def testEmptyStrings(self):
choices = [
"",
"new york mets vs chicago cubs",
"new york yankees vs boston red sox",
"",
""
]
query = "new york mets at chicago cubs"
best = process.extractOne(query, choices)
self.assertEqual(best[0], choices[1])
def testNullStrings(self):
choices = [
None,
"new york mets vs chicago cubs",
"new york yankees vs boston red sox",
None,
None
]
query = "new york mets at chicago cubs"
best = process.extractOne(query, choices)
self.assertEqual(best[0], choices[1])
def test_list_like_extract(self):
"""We should be able to use a list-like object for choices."""
def generate_choices():
choices = ['a', 'Bb', 'CcC']
for choice in choices:
yield choice
search = 'aaa'
result = [(value, confidence) for value, confidence in
process.extract(search, generate_choices())]
self.assertTrue(len(result) > 0)
def test_dict_like_extract(self):
"""We should be able to use a dict-like object for choices, not only a
dict, and still get dict-like output.
"""
try:
from UserDict import UserDict
except ImportError:
from collections import UserDict
choices = UserDict({'aa': 'bb', 'a1': None})
search = 'aaa'
result = process.extract(search, choices)
self.assertTrue(len(result) > 0)
for value, confidence, key in result:
self.assertTrue(value in choices.values())
def test_dedupe(self):
"""We should be able to use a list-like object for contains_dupes
"""
# Test 1
contains_dupes = ['Frodo Baggins', 'Tom Sawyer', 'Bilbo Baggin', 'Samuel L. Jackson', 'F. Baggins', 'Frody Baggins', 'Bilbo Baggins']
result = process.dedupe(contains_dupes)
self.assertTrue(len(result) < len(contains_dupes))
# Test 2
contains_dupes = ['Tom', 'Dick', 'Harry']
# we should end up with the same list since no duplicates are contained in the list (e.g. original list is returned)
deduped_list = ['Tom', 'Dick', 'Harry']
result = process.dedupe(contains_dupes)
self.assertEqual(result, deduped_list)
def test_simplematch(self):
basic_string = 'a, b'
match_strings = ['a, b']
result = process.extractOne(basic_string, match_strings, scorer=fuzz.ratio)
part_result = process.extractOne(basic_string, match_strings, scorer=fuzz.partial_ratio)
self.assertEqual(result, ('a, b', 100))
self.assertEqual(part_result, ('a, b', 100))
class TestCodeFormat(unittest.TestCase):
def test_pep8_conformance(self):
pep8style = pycodestyle.StyleGuide(quiet=False)
pep8style.options.ignore = pep8style.options.ignore + tuple(['E501'])
pep8style.input_dir('fuzzywuzzy')
result = pep8style.check_files()
self.assertEqual(result.total_errors, 0, "PEP8 POLICE - WOOOOOWOOOOOOOOOO")
if __name__ == '__main__':
unittest.main() # run all tests
| 18,565
|
Python
|
.py
| 423
| 34.673759
| 141
| 0.622421
|
seatgeek/fuzzywuzzy
| 9,206
| 876
| 107
|
GPL-2.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,468
|
benchmarks.py
|
seatgeek_fuzzywuzzy/benchmarks.py
|
# -*- coding: utf8 -*-
from timeit import timeit
import math
import csv
iterations = 100000
reader = csv.DictReader(open('data/titledata.csv'), delimiter='|')
titles = [i['custom_title'] for i in reader]
title_blob = '\n'.join(titles)
cirque_strings = [
"cirque du soleil - zarkana - las vegas",
"cirque du soleil ",
"cirque du soleil las vegas",
"zarkana las vegas",
"las vegas cirque du soleil at the bellagio",
"zarakana - cirque du soleil - bellagio"
]
choices = [
"",
"new york yankees vs boston red sox",
"",
"zarakana - cirque du soleil - bellagio",
None,
"cirque du soleil las vegas",
None
]
mixed_strings = [
"Lorem Ipsum is simply dummy text of the printing and typesetting industry.",
"C\\'est la vie",
u"Ça va?",
u"Cães danados",
u"\xacCamarões assados",
u"a\xac\u1234\u20ac\U00008000"
]
common_setup = "from fuzzywuzzy import fuzz, utils; "
basic_setup = "from fuzzywuzzy.string_processing import StringProcessor;"
def print_result_from_timeit(stmt='pass', setup='pass', number=1000000):
"""
Clean function to know how much time took the execution of one statement
"""
units = ["s", "ms", "us", "ns"]
duration = timeit(stmt, setup, number=int(number))
avg_duration = duration / float(number)
thousands = int(math.floor(math.log(avg_duration, 1000)))
print("Total time: %fs. Average run: %.3f%s." % (
duration, avg_duration * (1000 ** -thousands), units[-thousands]))
for s in choices:
print('Test validate_string for: "%s"' % s)
print_result_from_timeit('utils.validate_string(\'%s\')' % s, common_setup, number=iterations)
print('')
for s in mixed_strings + cirque_strings + choices:
print('Test full_process for: "%s"' % s)
print_result_from_timeit('utils.full_process(u\'%s\')' % s,
common_setup + basic_setup, number=iterations)
# benchmarking the core matching methods...
for s in cirque_strings:
print('Test fuzz.ratio for string: "%s"' % s)
print('-------------------------------')
print_result_from_timeit('fuzz.ratio(u\'cirque du soleil\', u\'%s\')' % s,
common_setup + basic_setup, number=iterations / 100)
for s in cirque_strings:
print('Test fuzz.partial_ratio for string: "%s"' % s)
print('-------------------------------')
print_result_from_timeit('fuzz.partial_ratio(u\'cirque du soleil\', u\'%s\')'
% s, common_setup + basic_setup, number=iterations / 100)
for s in cirque_strings:
print('Test fuzz.WRatio for string: "%s"' % s)
print('-------------------------------')
print_result_from_timeit('fuzz.WRatio(u\'cirque du soleil\', u\'%s\')' % s,
common_setup + basic_setup, number=iterations / 100)
print('Test process.exract(scorer = fuzz.QRatio) for string: "%s"' % s)
print('-------------------------------')
print_result_from_timeit('process.extract(u\'cirque du soleil\', choices, scorer = fuzz.QRatio)',
common_setup + basic_setup + " from fuzzywuzzy import process; import string,random; random.seed(18);"
" choices = [\'\'.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(30)) for s in range(5000)]",
number=10)
print('Test process.exract(scorer = fuzz.WRatio) for string: "%s"' % s)
print('-------------------------------')
print_result_from_timeit('process.extract(u\'cirque du soleil\', choices, scorer = fuzz.WRatio)',
common_setup + basic_setup + " from fuzzywuzzy import process; import string,random; random.seed(18);"
" choices = [\'\'.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(30)) for s in range(5000)]",
number=10)
# let me show you something
s = 'New York Yankees'
test = 'import functools\n'
test += 'title_blob = """%s"""\n' % title_blob
test += 'title_blob = title_blob.strip()\n'
test += 'titles = title_blob.split("\\n")\n'
print('Real world ratio(): "%s"' % s)
print('-------------------------------')
test += 'prepared_ratio = functools.partial(fuzz.ratio, "%s")\n' % s
test += 'titles.sort(key=prepared_ratio)\n'
print_result_from_timeit(test,
common_setup + basic_setup,
number=100)
| 4,442
|
Python
|
.py
| 94
| 40.510638
| 149
| 0.598194
|
seatgeek/fuzzywuzzy
| 9,206
| 876
| 107
|
GPL-2.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,469
|
test_fuzzywuzzy_pytest.py
|
seatgeek_fuzzywuzzy/test_fuzzywuzzy_pytest.py
|
from fuzzywuzzy import process
def test_process_warning(capsys):
"""Check that a string reduced to 0 by processor logs a warning to stderr"""
query = ':::::::'
choices = [':::::::']
_ = process.extractOne(query, choices)
out, err = capsys.readouterr()
outstr = ("WARNING:root:Applied processor reduces "
"input query to empty string, "
"all comparisons will have score 0. "
"[Query: ':::::::']\n")
assert err == outstr
| 496
|
Python
|
.py
| 12
| 33.916667
| 80
| 0.599581
|
seatgeek/fuzzywuzzy
| 9,206
| 876
| 107
|
GPL-2.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,470
|
process.py
|
seatgeek_fuzzywuzzy/fuzzywuzzy/process.py
|
#!/usr/bin/env python
# encoding: utf-8
from . import fuzz
from . import utils
import heapq
import logging
from functools import partial
default_scorer = fuzz.WRatio
default_processor = utils.full_process
def extractWithoutOrder(query, choices, processor=default_processor, scorer=default_scorer, score_cutoff=0):
"""Select the best match in a list or dictionary of choices.
Find best matches in a list or dictionary of choices, return a
generator of tuples containing the match and its score. If a dictionary
is used, also returns the key for each match.
Arguments:
query: An object representing the thing we want to find.
choices: An iterable or dictionary-like object containing choices
to be matched against the query. Dictionary arguments of
{key: value} pairs will attempt to match the query against
each value.
processor: Optional function of the form f(a) -> b, where a is the query or
individual choice and b is the choice to be used in matching.
This can be used to match against, say, the first element of
a list:
lambda x: x[0]
Defaults to fuzzywuzzy.utils.full_process().
scorer: Optional function for scoring matches between the query and
an individual processed choice. This should be a function
of the form f(query, choice) -> int.
By default, fuzz.WRatio() is used and expects both query and
choice to be strings.
score_cutoff: Optional argument for score threshold. No matches with
a score less than this number will be returned. Defaults to 0.
Returns:
Generator of tuples containing the match and its score.
If a list is used for choices, then the result will be 2-tuples.
If a dictionary is used, then the result will be 3-tuples containing
the key for each match.
For example, searching for 'bird' in the dictionary
{'bard': 'train', 'dog': 'man'}
may return
('train', 22, 'bard'), ('man', 0, 'dog')
"""
# Catch generators without lengths
def no_process(x):
return x
try:
if choices is None or len(choices) == 0:
return
except TypeError:
pass
# If the processor was removed by setting it to None
# perfom a noop as it still needs to be a function
if processor is None:
processor = no_process
# Run the processor on the input query.
processed_query = processor(query)
if len(processed_query) == 0:
logging.warning(u"Applied processor reduces input query to empty string, "
"all comparisons will have score 0. "
"[Query: \'{0}\']".format(query))
# Don't run full_process twice
if scorer in [fuzz.WRatio, fuzz.QRatio,
fuzz.token_set_ratio, fuzz.token_sort_ratio,
fuzz.partial_token_set_ratio, fuzz.partial_token_sort_ratio,
fuzz.UWRatio, fuzz.UQRatio] \
and processor == utils.full_process:
processor = no_process
# Only process the query once instead of for every choice
if scorer in [fuzz.UWRatio, fuzz.UQRatio]:
pre_processor = partial(utils.full_process, force_ascii=False)
scorer = partial(scorer, full_process=False)
elif scorer in [fuzz.WRatio, fuzz.QRatio,
fuzz.token_set_ratio, fuzz.token_sort_ratio,
fuzz.partial_token_set_ratio, fuzz.partial_token_sort_ratio]:
pre_processor = partial(utils.full_process, force_ascii=True)
scorer = partial(scorer, full_process=False)
else:
pre_processor = no_process
processed_query = pre_processor(processed_query)
try:
# See if choices is a dictionary-like object.
for key, choice in choices.items():
processed = pre_processor(processor(choice))
score = scorer(processed_query, processed)
if score >= score_cutoff:
yield (choice, score, key)
except AttributeError:
# It's a list; just iterate over it.
for choice in choices:
processed = pre_processor(processor(choice))
score = scorer(processed_query, processed)
if score >= score_cutoff:
yield (choice, score)
def extract(query, choices, processor=default_processor, scorer=default_scorer, limit=5):
"""Select the best match in a list or dictionary of choices.
Find best matches in a list or dictionary of choices, return a
list of tuples containing the match and its score. If a dictionary
is used, also returns the key for each match.
Arguments:
query: An object representing the thing we want to find.
choices: An iterable or dictionary-like object containing choices
to be matched against the query. Dictionary arguments of
{key: value} pairs will attempt to match the query against
each value.
processor: Optional function of the form f(a) -> b, where a is the query or
individual choice and b is the choice to be used in matching.
This can be used to match against, say, the first element of
a list:
lambda x: x[0]
Defaults to fuzzywuzzy.utils.full_process().
scorer: Optional function for scoring matches between the query and
an individual processed choice. This should be a function
of the form f(query, choice) -> int.
By default, fuzz.WRatio() is used and expects both query and
choice to be strings.
limit: Optional maximum for the number of elements returned. Defaults
to 5.
Returns:
List of tuples containing the match and its score.
If a list is used for choices, then the result will be 2-tuples.
If a dictionary is used, then the result will be 3-tuples containing
the key for each match.
For example, searching for 'bird' in the dictionary
{'bard': 'train', 'dog': 'man'}
may return
[('train', 22, 'bard'), ('man', 0, 'dog')]
"""
sl = extractWithoutOrder(query, choices, processor, scorer)
return heapq.nlargest(limit, sl, key=lambda i: i[1]) if limit is not None else \
sorted(sl, key=lambda i: i[1], reverse=True)
def extractBests(query, choices, processor=default_processor, scorer=default_scorer, score_cutoff=0, limit=5):
"""Get a list of the best matches to a collection of choices.
Convenience function for getting the choices with best scores.
Args:
query: A string to match against
choices: A list or dictionary of choices, suitable for use with
extract().
processor: Optional function for transforming choices before matching.
See extract().
scorer: Scoring function for extract().
score_cutoff: Optional argument for score threshold. No matches with
a score less than this number will be returned. Defaults to 0.
limit: Optional maximum for the number of elements returned. Defaults
to 5.
Returns: A a list of (match, score) tuples.
"""
best_list = extractWithoutOrder(query, choices, processor, scorer, score_cutoff)
return heapq.nlargest(limit, best_list, key=lambda i: i[1]) if limit is not None else \
sorted(best_list, key=lambda i: i[1], reverse=True)
def extractOne(query, choices, processor=default_processor, scorer=default_scorer, score_cutoff=0):
"""Find the single best match above a score in a list of choices.
This is a convenience method which returns the single best choice.
See extract() for the full arguments list.
Args:
query: A string to match against
choices: A list or dictionary of choices, suitable for use with
extract().
processor: Optional function for transforming choices before matching.
See extract().
scorer: Scoring function for extract().
score_cutoff: Optional argument for score threshold. If the best
match is found, but it is not greater than this number, then
return None anyway ("not a good enough match"). Defaults to 0.
Returns:
A tuple containing a single match and its score, if a match
was found that was above score_cutoff. Otherwise, returns None.
"""
best_list = extractWithoutOrder(query, choices, processor, scorer, score_cutoff)
try:
return max(best_list, key=lambda i: i[1])
except ValueError:
return None
def dedupe(contains_dupes, threshold=70, scorer=fuzz.token_set_ratio):
"""This convenience function takes a list of strings containing duplicates and uses fuzzy matching to identify
and remove duplicates. Specifically, it uses the process.extract to identify duplicates that
score greater than a user defined threshold. Then, it looks for the longest item in the duplicate list
since we assume this item contains the most entity information and returns that. It breaks string
length ties on an alphabetical sort.
Note: as the threshold DECREASES the number of duplicates that are found INCREASES. This means that the
returned deduplicated list will likely be shorter. Raise the threshold for fuzzy_dedupe to be less
sensitive.
Args:
contains_dupes: A list of strings that we would like to dedupe.
threshold: the numerical value (0,100) point at which we expect to find duplicates.
Defaults to 70 out of 100
scorer: Optional function for scoring matches between the query and
an individual processed choice. This should be a function
of the form f(query, choice) -> int.
By default, fuzz.token_set_ratio() is used and expects both query and
choice to be strings.
Returns:
A deduplicated list. For example:
In: contains_dupes = ['Frodo Baggin', 'Frodo Baggins', 'F. Baggins', 'Samwise G.', 'Gandalf', 'Bilbo Baggins']
In: fuzzy_dedupe(contains_dupes)
Out: ['Frodo Baggins', 'Samwise G.', 'Bilbo Baggins', 'Gandalf']
"""
extractor = []
# iterate over items in *contains_dupes*
for item in contains_dupes:
# return all duplicate matches found
matches = extract(item, contains_dupes, limit=None, scorer=scorer)
# filter matches based on the threshold
filtered = [x for x in matches if x[1] > threshold]
# if there is only 1 item in *filtered*, no duplicates were found so append to *extracted*
if len(filtered) == 1:
extractor.append(filtered[0][0])
else:
# alpha sort
filtered = sorted(filtered, key=lambda x: x[0])
# length sort
filter_sort = sorted(filtered, key=lambda x: len(x[0]), reverse=True)
# take first item as our 'canonical example'
extractor.append(filter_sort[0][0])
# uniquify *extractor* list
keys = {}
for e in extractor:
keys[e] = 1
extractor = keys.keys()
# check that extractor differs from contain_dupes (e.g. duplicates were found)
# if not, then return the original list
if len(extractor) == len(contains_dupes):
return contains_dupes
else:
return extractor
| 11,481
|
Python
|
.py
| 225
| 42.035556
| 122
| 0.664791
|
seatgeek/fuzzywuzzy
| 9,206
| 876
| 107
|
GPL-2.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,471
|
utils.py
|
seatgeek_fuzzywuzzy/fuzzywuzzy/utils.py
|
from __future__ import unicode_literals
import sys
import functools
from fuzzywuzzy.string_processing import StringProcessor
PY3 = sys.version_info[0] == 3
def validate_string(s):
"""
Check input has length and that length > 0
:param s:
:return: True if len(s) > 0 else False
"""
try:
return len(s) > 0
except TypeError:
return False
def check_for_equivalence(func):
@functools.wraps(func)
def decorator(*args, **kwargs):
if args[0] == args[1]:
return 100
return func(*args, **kwargs)
return decorator
def check_for_none(func):
@functools.wraps(func)
def decorator(*args, **kwargs):
if args[0] is None or args[1] is None:
return 0
return func(*args, **kwargs)
return decorator
def check_empty_string(func):
@functools.wraps(func)
def decorator(*args, **kwargs):
if len(args[0]) == 0 or len(args[1]) == 0:
return 0
return func(*args, **kwargs)
return decorator
bad_chars = str("").join([chr(i) for i in range(128, 256)]) # ascii dammit!
if PY3:
translation_table = dict((ord(c), None) for c in bad_chars)
unicode = str
def asciionly(s):
if PY3:
return s.translate(translation_table)
else:
return s.translate(None, bad_chars)
def asciidammit(s):
if type(s) is str:
return asciionly(s)
elif type(s) is unicode:
return asciionly(s.encode('ascii', 'ignore'))
else:
return asciidammit(unicode(s))
def make_type_consistent(s1, s2):
"""If both objects aren't either both string or unicode instances force them to unicode"""
if isinstance(s1, str) and isinstance(s2, str):
return s1, s2
elif isinstance(s1, unicode) and isinstance(s2, unicode):
return s1, s2
else:
return unicode(s1), unicode(s2)
def full_process(s, force_ascii=False):
"""Process string by
-- removing all but letters and numbers
-- trim whitespace
-- force to lower case
if force_ascii == True, force convert to ascii"""
if force_ascii:
s = asciidammit(s)
# Keep only Letters and Numbers (see Unicode docs).
string_out = StringProcessor.replace_non_letters_non_numbers_with_whitespace(s)
# Force into lowercase.
string_out = StringProcessor.to_lower_case(string_out)
# Remove leading and trailing whitespaces.
string_out = StringProcessor.strip(string_out)
return string_out
def intr(n):
'''Returns a correctly rounded integer'''
return int(round(n))
| 2,595
|
Python
|
.py
| 78
| 27.358974
| 94
| 0.65502
|
seatgeek/fuzzywuzzy
| 9,206
| 876
| 107
|
GPL-2.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,472
|
string_processing.py
|
seatgeek_fuzzywuzzy/fuzzywuzzy/string_processing.py
|
from __future__ import unicode_literals
import re
import string
import sys
PY3 = sys.version_info[0] == 3
if PY3:
string = str
class StringProcessor(object):
"""
This class defines method to process strings in the most
efficient way. Ideally all the methods below use unicode strings
for both input and output.
"""
regex = re.compile(r"(?ui)\W")
@classmethod
def replace_non_letters_non_numbers_with_whitespace(cls, a_string):
"""
This function replaces any sequence of non letters and non
numbers with a single white space.
"""
return cls.regex.sub(" ", a_string)
strip = staticmethod(string.strip)
to_lower_case = staticmethod(string.lower)
to_upper_case = staticmethod(string.upper)
| 780
|
Python
|
.py
| 24
| 27.583333
| 71
| 0.694667
|
seatgeek/fuzzywuzzy
| 9,206
| 876
| 107
|
GPL-2.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,473
|
StringMatcher.py
|
seatgeek_fuzzywuzzy/fuzzywuzzy/StringMatcher.py
|
#!/usr/bin/env python
# encoding: utf-8
"""
StringMatcher.py
ported from python-Levenshtein
[https://github.com/miohtama/python-Levenshtein]
License available here: https://github.com/miohtama/python-Levenshtein/blob/master/COPYING
"""
from Levenshtein import *
from warnings import warn
class StringMatcher:
"""A SequenceMatcher-like class built on the top of Levenshtein"""
def _reset_cache(self):
self._ratio = self._distance = None
self._opcodes = self._editops = self._matching_blocks = None
def __init__(self, isjunk=None, seq1='', seq2=''):
if isjunk:
warn("isjunk not NOT implemented, it will be ignored")
self._str1, self._str2 = seq1, seq2
self._reset_cache()
def set_seqs(self, seq1, seq2):
self._str1, self._str2 = seq1, seq2
self._reset_cache()
def set_seq1(self, seq1):
self._str1 = seq1
self._reset_cache()
def set_seq2(self, seq2):
self._str2 = seq2
self._reset_cache()
def get_opcodes(self):
if not self._opcodes:
if self._editops:
self._opcodes = opcodes(self._editops, self._str1, self._str2)
else:
self._opcodes = opcodes(self._str1, self._str2)
return self._opcodes
def get_editops(self):
if not self._editops:
if self._opcodes:
self._editops = editops(self._opcodes, self._str1, self._str2)
else:
self._editops = editops(self._str1, self._str2)
return self._editops
def get_matching_blocks(self):
if not self._matching_blocks:
self._matching_blocks = matching_blocks(self.get_opcodes(),
self._str1, self._str2)
return self._matching_blocks
def ratio(self):
if not self._ratio:
self._ratio = ratio(self._str1, self._str2)
return self._ratio
def quick_ratio(self):
# This is usually quick enough :o)
if not self._ratio:
self._ratio = ratio(self._str1, self._str2)
return self._ratio
def real_quick_ratio(self):
len1, len2 = len(self._str1), len(self._str2)
return 2.0 * min(len1, len2) / (len1 + len2)
def distance(self):
if not self._distance:
self._distance = distance(self._str1, self._str2)
return self._distance
| 2,437
|
Python
|
.py
| 64
| 29.265625
| 90
| 0.59737
|
seatgeek/fuzzywuzzy
| 9,206
| 876
| 107
|
GPL-2.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,474
|
fuzz.py
|
seatgeek_fuzzywuzzy/fuzzywuzzy/fuzz.py
|
#!/usr/bin/env python
# encoding: utf-8
from __future__ import unicode_literals
import platform
import warnings
try:
from .StringMatcher import StringMatcher as SequenceMatcher
except ImportError:
if platform.python_implementation() != "PyPy":
warnings.warn('Using slow pure-python SequenceMatcher. Install python-Levenshtein to remove this warning')
from difflib import SequenceMatcher
from . import utils
###########################
# Basic Scoring Functions #
###########################
@utils.check_for_none
@utils.check_for_equivalence
@utils.check_empty_string
def ratio(s1, s2):
s1, s2 = utils.make_type_consistent(s1, s2)
m = SequenceMatcher(None, s1, s2)
return utils.intr(100 * m.ratio())
@utils.check_for_none
@utils.check_for_equivalence
@utils.check_empty_string
def partial_ratio(s1, s2):
""""Return the ratio of the most similar substring
as a number between 0 and 100."""
s1, s2 = utils.make_type_consistent(s1, s2)
if len(s1) <= len(s2):
shorter = s1
longer = s2
else:
shorter = s2
longer = s1
m = SequenceMatcher(None, shorter, longer)
blocks = m.get_matching_blocks()
# each block represents a sequence of matching characters in a string
# of the form (idx_1, idx_2, len)
# the best partial match will block align with at least one of those blocks
# e.g. shorter = "abcd", longer = XXXbcdeEEE
# block = (1,3,3)
# best score === ratio("abcd", "Xbcd")
scores = []
for block in blocks:
long_start = block[1] - block[0] if (block[1] - block[0]) > 0 else 0
long_end = long_start + len(shorter)
long_substr = longer[long_start:long_end]
m2 = SequenceMatcher(None, shorter, long_substr)
r = m2.ratio()
if r > .995:
return 100
else:
scores.append(r)
return utils.intr(100 * max(scores))
##############################
# Advanced Scoring Functions #
##############################
def _process_and_sort(s, force_ascii, full_process=True):
"""Return a cleaned string with token sorted."""
# pull tokens
ts = utils.full_process(s, force_ascii=force_ascii) if full_process else s
tokens = ts.split()
# sort tokens and join
sorted_string = u" ".join(sorted(tokens))
return sorted_string.strip()
# Sorted Token
# find all alphanumeric tokens in the string
# sort those tokens and take ratio of resulting joined strings
# controls for unordered string elements
@utils.check_for_none
def _token_sort(s1, s2, partial=True, force_ascii=True, full_process=True):
sorted1 = _process_and_sort(s1, force_ascii, full_process=full_process)
sorted2 = _process_and_sort(s2, force_ascii, full_process=full_process)
if partial:
return partial_ratio(sorted1, sorted2)
else:
return ratio(sorted1, sorted2)
def token_sort_ratio(s1, s2, force_ascii=True, full_process=True):
"""Return a measure of the sequences' similarity between 0 and 100
but sorting the token before comparing.
"""
return _token_sort(s1, s2, partial=False, force_ascii=force_ascii, full_process=full_process)
def partial_token_sort_ratio(s1, s2, force_ascii=True, full_process=True):
"""Return the ratio of the most similar substring as a number between
0 and 100 but sorting the token before comparing.
"""
return _token_sort(s1, s2, partial=True, force_ascii=force_ascii, full_process=full_process)
@utils.check_for_none
def _token_set(s1, s2, partial=True, force_ascii=True, full_process=True):
"""Find all alphanumeric tokens in each string...
- treat them as a set
- construct two strings of the form:
<sorted_intersection><sorted_remainder>
- take ratios of those two strings
- controls for unordered partial matches"""
if not full_process and s1 == s2:
return 100
p1 = utils.full_process(s1, force_ascii=force_ascii) if full_process else s1
p2 = utils.full_process(s2, force_ascii=force_ascii) if full_process else s2
if not utils.validate_string(p1):
return 0
if not utils.validate_string(p2):
return 0
# pull tokens
tokens1 = set(p1.split())
tokens2 = set(p2.split())
intersection = tokens1.intersection(tokens2)
diff1to2 = tokens1.difference(tokens2)
diff2to1 = tokens2.difference(tokens1)
sorted_sect = " ".join(sorted(intersection))
sorted_1to2 = " ".join(sorted(diff1to2))
sorted_2to1 = " ".join(sorted(diff2to1))
combined_1to2 = sorted_sect + " " + sorted_1to2
combined_2to1 = sorted_sect + " " + sorted_2to1
# strip
sorted_sect = sorted_sect.strip()
combined_1to2 = combined_1to2.strip()
combined_2to1 = combined_2to1.strip()
if partial:
ratio_func = partial_ratio
else:
ratio_func = ratio
pairwise = [
ratio_func(sorted_sect, combined_1to2),
ratio_func(sorted_sect, combined_2to1),
ratio_func(combined_1to2, combined_2to1)
]
return max(pairwise)
def token_set_ratio(s1, s2, force_ascii=True, full_process=True):
return _token_set(s1, s2, partial=False, force_ascii=force_ascii, full_process=full_process)
def partial_token_set_ratio(s1, s2, force_ascii=True, full_process=True):
return _token_set(s1, s2, partial=True, force_ascii=force_ascii, full_process=full_process)
###################
# Combination API #
###################
# q is for quick
def QRatio(s1, s2, force_ascii=True, full_process=True):
"""
Quick ratio comparison between two strings.
Runs full_process from utils on both strings
Short circuits if either of the strings is empty after processing.
:param s1:
:param s2:
:param force_ascii: Allow only ASCII characters (Default: True)
:full_process: Process inputs, used here to avoid double processing in extract functions (Default: True)
:return: similarity ratio
"""
if full_process:
p1 = utils.full_process(s1, force_ascii=force_ascii)
p2 = utils.full_process(s2, force_ascii=force_ascii)
else:
p1 = s1
p2 = s2
if not utils.validate_string(p1):
return 0
if not utils.validate_string(p2):
return 0
return ratio(p1, p2)
def UQRatio(s1, s2, full_process=True):
"""
Unicode quick ratio
Calls QRatio with force_ascii set to False
:param s1:
:param s2:
:return: similarity ratio
"""
return QRatio(s1, s2, force_ascii=False, full_process=full_process)
# w is for weighted
def WRatio(s1, s2, force_ascii=True, full_process=True):
"""
Return a measure of the sequences' similarity between 0 and 100, using different algorithms.
**Steps in the order they occur**
#. Run full_process from utils on both strings
#. Short circuit if this makes either string empty
#. Take the ratio of the two processed strings (fuzz.ratio)
#. Run checks to compare the length of the strings
* If one of the strings is more than 1.5 times as long as the other
use partial_ratio comparisons - scale partial results by 0.9
(this makes sure only full results can return 100)
* If one of the strings is over 8 times as long as the other
instead scale by 0.6
#. Run the other ratio functions
* if using partial ratio functions call partial_ratio,
partial_token_sort_ratio and partial_token_set_ratio
scale all of these by the ratio based on length
* otherwise call token_sort_ratio and token_set_ratio
* all token based comparisons are scaled by 0.95
(on top of any partial scalars)
#. Take the highest value from these results
round it and return it as an integer.
:param s1:
:param s2:
:param force_ascii: Allow only ascii characters
:type force_ascii: bool
:full_process: Process inputs, used here to avoid double processing in extract functions (Default: True)
:return:
"""
if full_process:
p1 = utils.full_process(s1, force_ascii=force_ascii)
p2 = utils.full_process(s2, force_ascii=force_ascii)
else:
p1 = s1
p2 = s2
if not utils.validate_string(p1):
return 0
if not utils.validate_string(p2):
return 0
# should we look at partials?
try_partial = True
unbase_scale = .95
partial_scale = .90
base = ratio(p1, p2)
len_ratio = float(max(len(p1), len(p2))) / min(len(p1), len(p2))
# if strings are similar length, don't use partials
if len_ratio < 1.5:
try_partial = False
# if one string is much much shorter than the other
if len_ratio > 8:
partial_scale = .6
if try_partial:
partial = partial_ratio(p1, p2) * partial_scale
ptsor = partial_token_sort_ratio(p1, p2, full_process=False) \
* unbase_scale * partial_scale
ptser = partial_token_set_ratio(p1, p2, full_process=False) \
* unbase_scale * partial_scale
return utils.intr(max(base, partial, ptsor, ptser))
else:
tsor = token_sort_ratio(p1, p2, full_process=False) * unbase_scale
tser = token_set_ratio(p1, p2, full_process=False) * unbase_scale
return utils.intr(max(base, tsor, tser))
def UWRatio(s1, s2, full_process=True):
"""Return a measure of the sequences' similarity between 0 and 100,
using different algorithms. Same as WRatio but preserving unicode.
"""
return WRatio(s1, s2, force_ascii=False, full_process=full_process)
| 9,591
|
Python
|
.py
| 236
| 34.872881
| 114
| 0.669467
|
seatgeek/fuzzywuzzy
| 9,206
| 876
| 107
|
GPL-2.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,475
|
test_fuzzywuzzy_pytest.py
|
seatgeek_fuzzywuzzy/test_fuzzywuzzy_pytest.py
|
from fuzzywuzzy import process
def test_process_warning(capsys):
"""Check that a string reduced to 0 by processor logs a warning to stderr"""
query = ':::::::'
choices = [':::::::']
_ = process.extractOne(query, choices)
out, err = capsys.readouterr()
outstr = ("WARNING:root:Applied processor reduces "
"input query to empty string, "
"all comparisons will have score 0. "
"[Query: ':::::::']\n")
assert err == outstr
| 496
|
Python
|
.pyt
| 12
| 33.916667
| 80
| 0.599581
|
seatgeek/fuzzywuzzy
| 9,206
| 876
| 107
|
GPL-2.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,476
|
setup.py
|
p2pool_p2pool/setup.py
|
import os
import shutil
import sys
import zipfile
import platform
from distutils.core import setup
from distutils.sysconfig import get_python_lib
import py2exe
version = __import__('p2pool').__version__
im64 = '64' in platform.architecture()[0]
extra_includes = []
import p2pool.networks
extra_includes.extend('p2pool.networks.' + x for x in p2pool.networks.nets)
import p2pool.bitcoin.networks
extra_includes.extend('p2pool.bitcoin.networks.' + x for x in p2pool.bitcoin.networks.nets)
if os.path.exists('INITBAK'):
os.remove('INITBAK')
os.rename(os.path.join('p2pool', '__init__.py'), 'INITBAK')
try:
open(os.path.join('p2pool', '__init__.py'), 'wb').write('__version__ = %r%s%sDEBUG = False%s' % (version, os.linesep, os.linesep, os.linesep))
mfcdir = get_python_lib() + '\pythonwin\\'
mfcfiles = [os.path.join(mfcdir, i) for i in ["mfc90.dll", "mfc90u.dll", "mfcm90.dll", "mfcm90u.dll", "Microsoft.VC90.MFC.manifest"]]
bundle = 1
if im64:
bundle = bundle + 2
sys.argv[1:] = ['py2exe']
setup(name='p2pool',
version=version,
description='Peer-to-peer Bitcoin mining pool',
author='Forrest Voight',
author_email='forrest@forre.st',
url='http://p2pool.forre.st/',
data_files=[
('', ['README.md']),
("Microsoft.VC90.MFC", mfcfiles),
('web-static', [
'web-static/d3.v2.min.js',
'web-static/favicon.ico',
'web-static/graphs.html',
'web-static/index.html',
'web-static/share.html',
]),
],
console=['run_p2pool.py'],
options=dict(py2exe=dict(
bundle_files=bundle,
dll_excludes=['w9xpopen.exe', "mswsock.dll", "MSWSOCK.dll"],
includes=['twisted.web.resource', 'ltc_scrypt'] + extra_includes,
)),
zipfile=None,
)
finally:
os.remove(os.path.join('p2pool', '__init__.py'))
os.rename('INITBAK', os.path.join('p2pool', '__init__.py'))
win = '32'
if im64:
win = '64'
dir_name = 'p2pool_win' + win + '_' + version
if os.path.exists(dir_name):
shutil.rmtree(dir_name)
os.rename('dist', dir_name)
with zipfile.ZipFile(dir_name + '.zip', 'w', zipfile.ZIP_DEFLATED) as zf:
for dirpath, dirnames, filenames in os.walk(dir_name):
for filename in filenames:
zf.write(os.path.join(dirpath, filename))
print dir_name
| 2,441
|
Python
|
.py
| 66
| 30.742424
| 146
| 0.620296
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,477
|
fpconst.py
|
p2pool_p2pool/fpconst.py
|
"""Utilities for handling IEEE 754 floating point special values
This python module implements constants and functions for working with
IEEE754 double-precision special values. It provides constants for
Not-a-Number (NaN), Positive Infinity (PosInf), and Negative Infinity
(NegInf), as well as functions to test for these values.
The code is implemented in pure python by taking advantage of the
'struct' standard module. Care has been taken to generate proper
results on both big-endian and little-endian machines. Some efficiency
could be gained by translating the core routines into C.
See <http://babbage.cs.qc.edu/courses/cs341/IEEE-754references.html>
for reference material on the IEEE 754 floating point standard.
Further information on this package is available at
<http://www.analytics.washington.edu/statcomp/projects/rzope/fpconst/>.
------------------------------------------------------------------
Author: Gregory R. Warnes <Gregory.R.Warnes@Pfizer.com>
Date: 2005-02-24
Version: 0.7.2
Copyright: (c) 2003-2005 Pfizer, Licensed to PSF under a Contributor Agreement
License: Licensed under the Apache License, Version 2.0 (the"License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in
writing, software distributed under the License is
distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See
the License for the specific language governing
permissions and limitations under the License.
------------------------------------------------------------------
"""
__version__ = "0.7.2"
ident = "$Id: fpconst.py,v 1.16 2005/02/24 17:42:03 warnes Exp $"
import struct, operator
# check endianess
_big_endian = struct.pack('i',1)[0] != '\x01'
# and define appropriate constants
if(_big_endian):
NaN = struct.unpack('d', '\x7F\xF8\x00\x00\x00\x00\x00\x00')[0]
PosInf = struct.unpack('d', '\x7F\xF0\x00\x00\x00\x00\x00\x00')[0]
NegInf = -PosInf
else:
NaN = struct.unpack('d', '\x00\x00\x00\x00\x00\x00\xf8\xff')[0]
PosInf = struct.unpack('d', '\x00\x00\x00\x00\x00\x00\xf0\x7f')[0]
NegInf = -PosInf
def _double_as_bytes(dval):
"Use struct.unpack to decode a double precision float into eight bytes"
tmp = list(struct.unpack('8B',struct.pack('d', dval)))
if not _big_endian:
tmp.reverse()
return tmp
##
## Functions to extract components of the IEEE 754 floating point format
##
def _sign(dval):
"Extract the sign bit from a double-precision floating point value"
bb = _double_as_bytes(dval)
return bb[0] >> 7 & 0x01
def _exponent(dval):
"""Extract the exponentent bits from a double-precision floating
point value.
Note that for normalized values, the exponent bits have an offset
of 1023. As a consequence, the actual exponentent is obtained
by subtracting 1023 from the value returned by this function
"""
bb = _double_as_bytes(dval)
return (bb[0] << 4 | bb[1] >> 4) & 0x7ff
def _mantissa(dval):
"""Extract the _mantissa bits from a double-precision floating
point value."""
bb = _double_as_bytes(dval)
mantissa = bb[1] & 0x0f << 48
mantissa += bb[2] << 40
mantissa += bb[3] << 32
mantissa += bb[4]
return mantissa
def _zero_mantissa(dval):
"""Determine whether the mantissa bits of the given double are all
zero."""
bb = _double_as_bytes(dval)
return ((bb[1] & 0x0f) | reduce(operator.or_, bb[2:])) == 0
##
## Functions to test for IEEE 754 special values
##
def isNaN(value):
"Determine if the argument is a IEEE 754 NaN (Not a Number) value."
return (_exponent(value)==0x7ff and not _zero_mantissa(value))
def isInf(value):
"""Determine if the argument is an infinite IEEE 754 value (positive
or negative inifinity)"""
return (_exponent(value)==0x7ff and _zero_mantissa(value))
def isFinite(value):
"""Determine if the argument is an finite IEEE 754 value (i.e., is
not NaN, positive or negative inifinity)"""
return (_exponent(value)!=0x7ff)
def isPosInf(value):
"Determine if the argument is a IEEE 754 positive infinity value"
return (_sign(value)==0 and _exponent(value)==0x7ff and \
_zero_mantissa(value))
def isNegInf(value):
"Determine if the argument is a IEEE 754 negative infinity value"
return (_sign(value)==1 and _exponent(value)==0x7ff and \
_zero_mantissa(value))
##
## Functions to test public functions.
##
def test_isNaN():
assert( not isNaN(PosInf) )
assert( not isNaN(NegInf) )
assert( isNaN(NaN ) )
assert( not isNaN( 1.0) )
assert( not isNaN( -1.0) )
def test_isInf():
assert( isInf(PosInf) )
assert( isInf(NegInf) )
assert( not isInf(NaN ) )
assert( not isInf( 1.0) )
assert( not isInf( -1.0) )
def test_isFinite():
assert( not isFinite(PosInf) )
assert( not isFinite(NegInf) )
assert( not isFinite(NaN ) )
assert( isFinite( 1.0) )
assert( isFinite( -1.0) )
def test_isPosInf():
assert( isPosInf(PosInf) )
assert( not isPosInf(NegInf) )
assert( not isPosInf(NaN ) )
assert( not isPosInf( 1.0) )
assert( not isPosInf( -1.0) )
def test_isNegInf():
assert( not isNegInf(PosInf) )
assert( isNegInf(NegInf) )
assert( not isNegInf(NaN ) )
assert( not isNegInf( 1.0) )
assert( not isNegInf( -1.0) )
# overall test
def test():
test_isNaN()
test_isInf()
test_isFinite()
test_isPosInf()
test_isNegInf()
if __name__ == "__main__":
test()
| 5,754
|
Python
|
.py
| 144
| 36.0625
| 78
| 0.674623
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,478
|
portmapper.py
|
p2pool_p2pool/nattraverso/portmapper.py
|
"""
Generic NAT Port mapping interface.
TODO: Example
@author: Raphael Slinckx
@copyright: Copyright 2005
@license: LGPL
@contact: U{raphael@slinckx.net<mailto:raphael@slinckx.net>}
@version: 0.1.0
"""
__revision__ = "$id"
from twisted.internet.base import BasePort
# Public API
def get_port_mapper(proto="TCP"):
"""
Returns a L{NATMapper} instance, suited to map a port for
the given protocol. Defaults to TCP.
For the moment, only upnp mapper is available. It accepts both UDP and TCP.
@param proto: The protocol: 'TCP' or 'UDP'
@type proto: string
@return: A deferred called with a L{NATMapper} instance
@rtype: L{twisted.internet.defer.Deferred}
"""
import nattraverso.pynupnp
return nattraverso.pynupnp.get_port_mapper()
class NATMapper:
"""
Define methods to map port objects (as returned by twisted's listenXX).
This allows NAT to be traversed from incoming packets.
Currently the only implementation of this class is the UPnP Mapper, which
can map UDP and TCP ports, if an UPnP Device exists.
"""
def __init__(self):
raise NotImplementedError("Cannot instantiate the class")
def map(self, port):
"""
Create a mapping for the given twisted's port object.
The deferred will call back with a tuple (extaddr, extport):
- extaddr: The ip string of the external ip address of this host
- extport: the external port number used to map the given Port object
When called multiple times with the same Port,
callback with the existing mapping.
@param port: The port object to map
@type port: a L{twisted.internet.interfaces.IListeningPort} object
@return: A deferred called with the above defined tuple
@rtype: L{twisted.internet.defer.Deferred}
"""
raise NotImplementedError
def info(self, port):
"""
Returns the existing mapping for the given port object. That means map()
has to be called before.
@param port: The port object to retreive info from
@type port: a L{twisted.internet.interfaces.IListeningPort} object
@raise ValueError: When there is no such existing mapping
@return: a tuple (extaddress, extport).
@see: L{map() function<map>}
"""
raise NotImplementedError
def unmap(self, port):
"""
Remove an existing mapping for the given twisted's port object.
@param port: The port object to unmap
@type port: a L{twisted.internet.interfaces.IListeningPort} object
@return: A deferred called with None
@rtype: L{twisted.internet.defer.Deferred}
@raise ValueError: When there is no such existing mapping
"""
raise NotImplementedError
def get_port_mappings(self):
"""
Returns a deferred that will be called with a dictionnary of the
existing mappings.
The dictionnary structure is the following:
- Keys: tuple (protocol, external_port)
- protocol is "TCP" or "UDP".
- external_port is the external port number, as see on the
WAN side.
- Values:tuple (internal_ip, internal_port)
- internal_ip is the LAN ip address of the host.
- internal_port is the internal port number mapped
to external_port.
@return: A deferred called with the above defined dictionnary
@rtype: L{twisted.internet.defer.Deferred}
"""
raise NotImplementedError
def _check_valid_port(self, port):
"""Various Port object validity checks. Raise a ValueError."""
if not isinstance(port, BasePort):
raise ValueError("expected a Port, got %r"%(port))
if not port.connected:
raise ValueError("Port %r is not listening"%(port))
loc_addr = port.getHost()
if loc_addr.port == 0:
raise ValueError("Port %r has port number of 0"%(port))
| 4,157
|
Python
|
.py
| 94
| 35.138298
| 81
| 0.660991
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,479
|
utils.py
|
p2pool_p2pool/nattraverso/utils.py
|
"""
Various utility functions used in the nattraverso package.
@author: Raphael Slinckx
@copyright: Copyright 2005
@license: LGPL
@contact: U{raphael@slinckx.net<mailto:raphael@slinckx.net>}
@version: 0.1.0
"""
__revision__ = "$id"
def is_rfc1918_ip(ip):
"""
Checks if the given ip address is a rfc1918 one.
@param ip: The ip address to test
@type ip: a string "x.x.x.x"
@return: True if it's a LAN address, False otherwise
"""
if isinstance(ip, basestring):
ip = _ip_to_number(ip)
for net, mask in _nets:
if ip&mask == net:
return True
return False
def is_bogus_ip(ip):
"""
Checks if the given ip address is bogus, i.e. 0.0.0.0 or 127.0.0.1.
@param ip: The ip address to test
@type ip: a string "x.x.x.x"
@return: True if it's bogus, False otherwise
"""
return ip.startswith('0.') or ip.startswith('127.')
def _ip_to_number(ipstr):
"""
Translate a string ip address to a packed number.
@param ipstr: the ip address to transform
@type ipstr: a string "x.x.x.x"
@return: an int32 number representing the ip address
"""
net = [ int(digit) for digit in ipstr.split('.') ] + [ 0, 0, 0 ]
net = net[:4]
return ((((((0L+net[0])<<8) + net[1])<<8) + net[2])<<8) +net[3]
# List of rfc1918 net/mask
_rfc1918_networks = [('127', 8), ('192.168', 16), ('10', 8), ('172.16', 12)]
# Machine readable form of the above
_nets = [(_ip_to_number(net), (2L**32 -1)^(2L**(32-mask)-1))
for net, mask in _rfc1918_networks]
| 1,563
|
Python
|
.py
| 45
| 30.111111
| 76
| 0.626093
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,480
|
__init__.py
|
p2pool_p2pool/nattraverso/__init__.py
|
"""
This package offers ways to retreive ip addresses of the machine, and map ports
through various protocols.
Currently only UPnP is implemented and available, in the pynupnp module.
@author: Raphael Slinckx
@copyright: Copyright 2005
@license: LGPL
@contact: U{raphael@slinckx.net<mailto:raphael@slinckx.net>}
@version: 0.1.0
"""
__revision__ = "$id"
__version__ = "0.1.0"
| 378
|
Python
|
.py
| 12
| 30.25
| 79
| 0.768595
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,481
|
ipdiscover.py
|
p2pool_p2pool/nattraverso/ipdiscover.py
|
"""
Generic methods to retreive the IP address of the local machine.
TODO: Example
@author: Raphael Slinckx
@copyright: Copyright 2005
@license: LGPL
@contact: U{raphael@slinckx.net<mailto:raphael@slinckx.net>}
@version: 0.1.0
"""
__revision__ = "$id"
import random, socket, logging, itertools
from twisted.internet import defer, reactor
from twisted.internet.protocol import DatagramProtocol
from twisted.internet.error import CannotListenError
from nattraverso.utils import is_rfc1918_ip, is_bogus_ip
@defer.inlineCallbacks
def get_local_ip():
"""
Returns a deferred which will be called with a
2-uple (lan_flag, ip_address) :
- lan_flag:
- True if it's a local network (RFC1918)
- False if it's a WAN address
- ip_address is the actual ip address
@return: A deferred called with the above defined tuple
@rtype: L{twisted.internet.defer.Deferred}
"""
# first we try a connected udp socket, then via multicast
logging.debug("Resolving dns to get udp ip")
try:
ipaddr = yield reactor.resolve('A.ROOT-SERVERS.NET')
except:
pass
else:
udpprot = DatagramProtocol()
port = reactor.listenUDP(0, udpprot)
udpprot.transport.connect(ipaddr, 7)
localip = udpprot.transport.getHost().host
port.stopListening()
if is_bogus_ip(localip):
raise RuntimeError, "Invalid IP address returned"
else:
defer.returnValue((is_rfc1918_ip(localip), localip))
logging.debug("Multicast ping to retrieve local IP")
ipaddr = yield _discover_multicast()
defer.returnValue((is_rfc1918_ip(ipaddr), ipaddr))
@defer.inlineCallbacks
def get_external_ip():
"""
Returns a deferred which will be called with a
2-uple (wan_flag, ip_address):
- wan_flag:
- True if it's a WAN address
- False if it's a LAN address
- None if it's a localhost (127.0.0.1) address
- ip_address: the most accessible ip address of this machine
@return: A deferred called with the above defined tuple
@rtype: L{twisted.internet.defer.Deferred}
"""
try:
local, ipaddr = yield get_local_ip()
except:
defer.returnValue((None, "127.0.0.1"))
if not local:
defer.returnValue((True, ipaddr))
logging.debug("Got local ip, trying to use upnp to get WAN ip")
import nattraverso.pynupnp
try:
ipaddr2 = yield nattraverso.pynupnp.get_external_ip()
except:
defer.returnValue((False, ipaddr))
else:
defer.returnValue((True, ipaddr2))
class _LocalNetworkMulticast(DatagramProtocol):
def __init__(self, nonce):
from p2pool.util import variable
self.nonce = nonce
self.address_received = variable.Event()
def datagramReceived(self, dgram, addr):
"""Datagram received, we callback the IP address."""
logging.debug("Received multicast pong: %s; addr:%r", dgram, addr)
if dgram != self.nonce:
return
self.address_received.happened(addr[0])
@defer.inlineCallbacks
def _discover_multicast():
"""
Local IP discovery protocol via multicast:
- Broadcast 3 ping multicast packet with "ping" in it
- Wait for an answer
- Retrieve the ip address from the returning packet, which is ours
"""
nonce = str(random.randrange(2**64))
p = _LocalNetworkMulticast(nonce)
for attempt in itertools.count():
port = 11000 + random.randint(0, 5000)
try:
mcast = reactor.listenMulticast(port, p)
except CannotListenError:
if attempt >= 10:
raise
continue
else:
break
try:
yield mcast.joinGroup('239.255.255.250', socket.INADDR_ANY)
logging.debug("Sending multicast ping")
for i in xrange(3):
p.transport.write(nonce, ('239.255.255.250', port))
address, = yield p.address_received.get_deferred(5)
finally:
mcast.stopListening()
defer.returnValue(address)
| 4,180
|
Python
|
.py
| 113
| 29.504425
| 74
| 0.660615
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,482
|
soap.py
|
p2pool_p2pool/nattraverso/pynupnp/soap.py
|
"""
This module is a SOAP client using twisted's deferreds.
It uses the SOAPpy package.
@author: Raphael Slinckx
@copyright: Copyright 2005
@license: LGPL
@contact: U{raphael@slinckx.net<mailto:raphael@slinckx.net>}
@version: 0.1.0
"""
__revision__ = "$id"
import SOAPpy, logging
from SOAPpy.Config import Config
from twisted.web import client, error
#General config
Config.typed = False
class SoapError(Exception):
"""
This is a SOAP error message, not an HTTP error message.
The content of this error is a SOAPpy structure representing the
SOAP error message.
"""
pass
class SoapProxy:
"""
Proxy for an url to which we send SOAP rpc calls.
"""
def __init__(self, url, prefix):
"""
Init the proxy, it will connect to the given url, using the
given soap namespace.
@param url: The url of the remote host to call
@param prefix: The namespace prefix to use, eg.
'urn:schemas-upnp-org:service:WANIPConnection:1'
"""
logging.debug("Soap Proxy: '%s', prefix: '%s'", url, prefix)
self._url = url
self._prefix = prefix
def call(self, method, **kwargs):
"""
Call the given remote method with the given arguments, as keywords.
Returns a deferred, called with SOAPpy structure representing
the soap response.
@param method: The method name to call, eg. 'GetExternalIP'
@param kwargs: The parameters of the call, as keywords
@return: A deferred called with the external ip address of this host
@rtype: L{twisted.internet.defer.Deferred}
"""
payload = SOAPpy.buildSOAP(method=method, config=Config, namespace=self._prefix, kw=kwargs)
# Here begins the nasty hack
payload = payload.replace(
# Upnp wants s: instead of SOAP-ENV
'SOAP-ENV','s').replace(
# Doesn't seem to like these encoding stuff
'xmlns:SOAP-ENC="http://schemas.xmlsoap.org/soap/encoding/"', '').replace(
'SOAP-ENC:root="1"', '').replace(
# And it wants u: instead of ns1 namespace for arguments..
'ns1','u')
logging.debug("SOAP Payload:\n%s", payload)
return client.getPage(self._url, postdata=payload, method="POST",
headers={'content-type': 'text/xml', 'SOAPACTION': '%s#%s' % (self._prefix, method)}
).addCallbacks(self._got_page, self._got_error)
def _got_page(self, result):
"""
The http POST command was successful, we parse the SOAP
answer, and return it.
@param result: the xml content
"""
parsed = SOAPpy.parseSOAPRPC(result)
logging.debug("SOAP Answer:\n%s", result)
logging.debug("SOAP Parsed Answer: %r", parsed)
return parsed
def _got_error(self, res):
"""
The HTTP POST command did not succeed, depending on the error type:
- it's a SOAP error, we parse it and return a L{SoapError}.
- it's another type of error (http, other), we raise it as is
"""
logging.debug("SOAP Error:\n%s", res)
if isinstance(res.value, error.Error):
try:
logging.debug("SOAP Error content:\n%s", res.value.response)
raise SoapError(SOAPpy.parseSOAPRPC(res.value.response)["detail"])
except:
raise
raise Exception(res.value)
| 3,547
|
Python
|
.py
| 85
| 32.835294
| 103
| 0.629806
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,483
|
upnpxml.py
|
p2pool_p2pool/nattraverso/pynupnp/upnpxml.py
|
"""
This module parse an UPnP device's XML definition in an Object.
@author: Raphael Slinckx
@copyright: Copyright 2005
@license: LGPL
@contact: U{raphael@slinckx.net<mailto:raphael@slinckx.net>}
@version: 0.1.0
"""
__revision__ = "$id"
from xml.dom import minidom
import logging
# Allowed UPnP services to use when mapping ports/external addresses
WANSERVICES = ['urn:schemas-upnp-org:service:WANIPConnection:1',
'urn:schemas-upnp-org:service:WANPPPConnection:1']
class UPnPXml:
"""
This objects parses the XML definition, and stores the useful
results in attributes.
The device infos dictionnary may contain the following keys:
- friendlyname: A friendly name to call the device.
- manufacturer: A manufacturer name for the device.
Here are the different attributes:
- deviceinfos: A dictionnary of device infos as defined above.
- controlurl: The control url, this is the url to use when sending SOAP
requests to the device, relative to the base url.
- wanservice: The WAN service to be used, one of the L{WANSERVICES}
- urlbase: The base url to use when talking in SOAP to the device.
The full url to use is obtained by urljoin(urlbase, controlurl)
"""
def __init__(self, xml):
"""
Parse the given XML string for UPnP infos. This creates the attributes
when they are found, or None if no value was found.
@param xml: a xml string to parse
"""
logging.debug("Got UPNP Xml description:\n%s", xml)
doc = minidom.parseString(xml)
# Fetch various device info
self.deviceinfos = {}
try:
attributes = {
'friendlyname':'friendlyName',
'manufacturer' : 'manufacturer'
}
device = doc.getElementsByTagName('device')[0]
for name, tag in attributes.iteritems():
try:
self.deviceinfos[name] = device.getElementsByTagName(
tag)[0].firstChild.datas.encode('utf-8')
except:
pass
except:
pass
# Fetch device control url
self.controlurl = None
self.wanservice = None
for service in doc.getElementsByTagName('service'):
try:
stype = service.getElementsByTagName(
'serviceType')[0].firstChild.data.encode('utf-8')
if stype in WANSERVICES:
self.controlurl = service.getElementsByTagName(
'controlURL')[0].firstChild.data.encode('utf-8')
self.wanservice = stype
break
except:
pass
# Find base url
self.urlbase = None
try:
self.urlbase = doc.getElementsByTagName(
'URLBase')[0].firstChild.data.encode('utf-8')
except:
pass
| 3,026
|
Python
|
.py
| 74
| 30.121622
| 79
| 0.611246
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,484
|
__init__.py
|
p2pool_p2pool/nattraverso/pynupnp/__init__.py
|
"""
This package offers ways to retreive ip addresses of the machine, and map ports
through UPnP devices.
@author: Raphael Slinckx
@copyright: Copyright 2005
@license: LGPL
@contact: U{raphael@slinckx.net<mailto:raphael@slinckx.net>}
@version: 0.1.0
"""
__revision__ = "$id"
from nattraverso.pynupnp.upnp import search_upnp_device, UPnPMapper
def get_external_ip():
"""
Returns a deferred which will be called with the WAN ip address
retreived through UPnP. The ip is a string of the form "x.x.x.x"
@return: A deferred called with the external ip address of this host
@rtype: L{twisted.internet.defer.Deferred}
"""
return search_upnp_device().addCallback(lambda x: x.get_external_ip())
def get_port_mapper():
"""
Returns a deferred which will be called with a L{UPnPMapper} instance.
This is a L{nattraverso.portmapper.NATMapper} implementation.
@return: A deferred called with the L{UPnPMapper} instance.
@rtype: L{twisted.internet.defer.Deferred}
"""
return search_upnp_device().addCallback(lambda x: UPnPMapper(x))
| 1,088
|
Python
|
.py
| 27
| 36.703704
| 79
| 0.74212
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,485
|
upnp.py
|
p2pool_p2pool/nattraverso/pynupnp/upnp.py
|
"""
This module is the heart of the upnp support. Device discover, ip discovery
and port mappings are implemented here.
@author: Raphael Slinckx
@author: Anthony Baxter
@copyright: Copyright 2005
@license: LGPL
@contact: U{raphael@slinckx.net<mailto:raphael@slinckx.net>}
@version: 0.1.0
"""
__revision__ = "$id"
import socket, random, urlparse, logging
from twisted.internet import reactor, defer
from twisted.web import client
from twisted.internet.protocol import DatagramProtocol
from twisted.internet.error import CannotListenError
from twisted.python import failure
from nattraverso.pynupnp.soap import SoapProxy
from nattraverso.pynupnp.upnpxml import UPnPXml
from nattraverso import ipdiscover, portmapper
class UPnPError(Exception):
"""
A generic UPnP error, with a descriptive message as content.
"""
pass
class UPnPMapper(portmapper.NATMapper):
"""
This is the UPnP port mapper implementing the
L{NATMapper<portmapper.NATMapper>} interface.
@see: L{NATMapper<portmapper.NATMapper>}
"""
def __init__(self, upnp):
"""
Creates the mapper, with the given L{UPnPDevice} instance.
@param upnp: L{UPnPDevice} instance
"""
self._mapped = {}
self._upnp = upnp
def map(self, port):
"""
See interface
"""
self._check_valid_port(port)
#Port is already mapped
if port in self._mapped:
return defer.succeed(self._mapped[port])
#Trigger a new mapping creation, first fetch local ip.
result = ipdiscover.get_local_ip()
self._mapped[port] = result
return result.addCallback(self._map_got_local_ip, port)
def info(self, port):
"""
See interface
"""
# If the mapping exists, everything's ok
if port in self._mapped:
return self._mapped[port]
else:
raise ValueError('Port %r is not currently mapped'%(port))
def unmap(self, port):
"""
See interface
"""
if port in self._mapped:
existing = self._mapped[port]
#Pending mapping, queue an unmap,return existing deferred
if type(existing) is not tuple:
existing.addCallback(lambda x: self.unmap(port))
return existing
#Remove our local mapping
del self._mapped[port]
#Ask the UPnP to remove the mapping
extaddr, extport = existing
return self._upnp.remove_port_mapping(extport, port.getHost().type)
else:
raise ValueError('Port %r is not currently mapped'%(port))
def get_port_mappings(self):
"""
See interface
"""
return self._upnp.get_port_mappings()
def _map_got_local_ip(self, ip_result, port):
"""
We got the local ip address, retreive the existing port mappings
in the device.
@param ip_result: result of L{ipdiscover.get_local_ip}
@param port: a L{twisted.internet.interfaces.IListeningPort} we
want to map
"""
local, ip = ip_result
return self._upnp.get_port_mappings().addCallback(
self._map_got_port_mappings, ip, port)
def _map_got_port_mappings(self, mappings, ip, port):
"""
We got all the existing mappings in the device, find an unused one
and assign it for the requested port.
@param ip: The local ip of this host "x.x.x.x"
@param port: a L{twisted.internet.interfaces.IListeningPort} we
want to map
@param mappings: result of L{UPnPDevice.get_port_mappings}
"""
#Get the requested mapping's info
ptype = port.getHost().type
intport = port.getHost().port
for extport in [random.randrange(1025, 65536) for val in range(20)]:
# Check if there is an existing mapping, if it does not exist, bingo
if not (ptype, extport) in mappings:
break
if (ptype, extport) in mappings:
existing = mappings[ptype, extport]
local_ip, local_port = existing
if local_ip == ip and local_port == intport:
# Existing binding for this host/port/proto - replace it
break
# Triggers the creation of the mapping on the device
result = self._upnp.add_port_mapping(ip, intport, extport, 'pynupnp', ptype)
# We also need the external IP, so we queue first an
# External IP Discovery, then we add the mapping.
return result.addCallback(
lambda x: self._upnp.get_external_ip()).addCallback(
self._port_mapping_added, extport, port)
def _port_mapping_added(self, extaddr, extport, port):
"""
The port mapping was added in the device, this means::
Internet NAT LAN
|
> IP:extaddr |> IP:local ip
> Port:extport |> Port:port
|
@param extaddr: The exernal ip address
@param extport: The external port as number
@param port: The internal port as a
L{twisted.internet.interfaces.IListeningPort} object, that has been
mapped
"""
self._mapped[port] = (extaddr, extport)
return (extaddr, extport)
class UPnPDevice:
"""
Represents an UPnP device, with the associated infos, and remote methods.
"""
def __init__(self, soap_proxy, info):
"""
Build the device, with the given SOAP proxy, and the meta-infos.
@param soap_proxy: an initialized L{SoapProxy} to the device
@param info: a dictionnary of various infos concerning the
device extracted with L{UPnPXml}
"""
self._soap_proxy = soap_proxy
self._info = info
def get_external_ip(self):
"""
Triggers an external ip discovery on the upnp device. Returns
a deferred called with the external ip of this host.
@return: A deferred called with the ip address, as "x.x.x.x"
@rtype: L{twisted.internet.defer.Deferred}
"""
result = self._soap_proxy.call('GetExternalIPAddress')
result.addCallback(self._on_external_ip)
return result
def get_port_mappings(self):
"""
Retreive the existing port mappings
@see: L{portmapper.NATMapper.get_port_mappings}
@return: A deferred called with the dictionnary as defined
in the interface L{portmapper.NATMapper.get_port_mappings}
@rtype: L{twisted.internet.defer.Deferred}
"""
return self._get_port_mapping()
def add_port_mapping(self, local_ip, intport, extport, desc, proto, lease=0):
"""
Add a port mapping in the upnp device. Returns a deferred.
@param local_ip: the LAN ip of this host as "x.x.x.x"
@param intport: the internal port number
@param extport: the external port number
@param desc: the description of this mapping (string)
@param proto: "UDP" or "TCP"
@param lease: The duration of the lease in (mili)seconds(??)
@return: A deferred called with None when the mapping is done
@rtype: L{twisted.internet.defer.Deferred}
"""
result = self._soap_proxy.call('AddPortMapping', NewRemoteHost="",
NewExternalPort=extport,
NewProtocol=proto,
NewInternalPort=intport,
NewInternalClient=local_ip,
NewEnabled=1,
NewPortMappingDescription=desc,
NewLeaseDuration=lease)
return result.addCallbacks(self._on_port_mapping_added,
self._on_no_port_mapping_added)
def remove_port_mapping(self, extport, proto):
"""
Remove an existing port mapping on the device. Returns a deferred
@param extport: the external port number associated to the mapping
to be removed
@param proto: either "UDP" or "TCP"
@return: A deferred called with None when the mapping is done
@rtype: L{twisted.internet.defer.Deferred}
"""
result = self._soap_proxy.call('DeletePortMapping', NewRemoteHost="",
NewExternalPort=extport,
NewProtocol=proto)
return result.addCallbacks(self._on_port_mapping_removed,
self._on_no_port_mapping_removed)
# Private --------
def _on_external_ip(self, res):
"""
Called when we received the external ip address from the device.
@param res: the SOAPpy structure of the result
@return: the external ip string, as "x.x.x.x"
"""
logging.debug("Got external ip struct: %r", res)
return res['NewExternalIPAddress']
def _get_port_mapping(self, mapping_id=0, mappings=None):
"""
Fetch the existing mappings starting at index
"mapping_id" from the device.
To retreive all the mappings call this without parameters.
@param mapping_id: The index of the mapping to start fetching from
@param mappings: the dictionnary of already fetched mappings
@return: A deferred called with the existing mappings when all have been
retreived, see L{get_port_mappings}
@rtype: L{twisted.internet.defer.Deferred}
"""
if mappings == None:
mappings = {}
result = self._soap_proxy.call('GetGenericPortMappingEntry',
NewPortMappingIndex=mapping_id)
return result.addCallbacks(
lambda x: self._on_port_mapping_received(x, mapping_id+1, mappings),
lambda x: self._on_no_port_mapping_received( x, mappings))
def _on_port_mapping_received(self, response, mapping_id, mappings):
"""
Called we we receive a single mapping from the device.
@param response: a SOAPpy structure, representing the device's answer
@param mapping_id: The index of the next mapping in the device
@param mappings: the already fetched mappings, see L{get_port_mappings}
@return: A deferred called with the existing mappings when all have been
retreived, see L{get_port_mappings}
@rtype: L{twisted.internet.defer.Deferred}
"""
logging.debug("Got mapping struct: %r", response)
mappings[
response['NewProtocol'], response['NewExternalPort']
] = (response['NewInternalClient'], response['NewInternalPort'])
return self._get_port_mapping(mapping_id, mappings)
def _on_no_port_mapping_received(self, failure, mappings):
"""
Called when we have no more port mappings to retreive, or an
error occured while retreiving them.
Either we have a "SpecifiedArrayIndexInvalid" SOAP error, and that's ok,
it just means we have finished. If it returns some other error, then we
fail with an UPnPError.
@param mappings: the already retreived mappings
@param failure: the failure
@return: The existing mappings as defined in L{get_port_mappings}
@raise UPnPError: When we got any other error
than "SpecifiedArrayIndexInvalid"
"""
logging.debug("_on_no_port_mapping_received: %s", failure)
err = failure.value
message = err.args[0]["UPnPError"]["errorDescription"]
if "SpecifiedArrayIndexInvalid" == message:
return mappings
else:
return failure
def _on_port_mapping_added(self, response):
"""
The port mapping was successfully added, return None to the deferred.
"""
return None
def _on_no_port_mapping_added(self, failure):
"""
Called when the port mapping could not be added. Immediately
raise an UPnPError, with the SOAPpy structure inside.
@raise UPnPError: When the port mapping could not be added
"""
return failure
def _on_port_mapping_removed(self, response):
"""
The port mapping was successfully removed, return None to the deferred.
"""
return None
def _on_no_port_mapping_removed(self, failure):
"""
Called when the port mapping could not be removed. Immediately
raise an UPnPError, with the SOAPpy structure inside.
@raise UPnPError: When the port mapping could not be deleted
"""
return failure
# UPNP multicast address, port and request string
_UPNP_MCAST = '239.255.255.250'
_UPNP_PORT = 1900
_UPNP_SEARCH_REQUEST = """M-SEARCH * HTTP/1.1\r
Host:%s:%s\r
ST:urn:schemas-upnp-org:device:InternetGatewayDevice:1\r
Man:"ssdp:discover"\r
MX:3\r
\r
""" % (_UPNP_MCAST, _UPNP_PORT)
class UPnPProtocol(DatagramProtocol, object):
"""
The UPnP Device discovery udp multicast twisted protocol.
"""
def __init__(self, *args, **kwargs):
"""
Init the protocol, no parameters needed.
"""
super(UPnPProtocol, self).__init__(*args, **kwargs)
#Device discovery deferred
self._discovery = None
self._discovery_timeout = None
self.mcast = None
self._done = False
# Public methods
def search_device(self):
"""
Triggers a UPnP device discovery.
The returned deferred will be called with the L{UPnPDevice} that has
been found in the LAN.
@return: A deferred called with the detected L{UPnPDevice} instance.
@rtype: L{twisted.internet.defer.Deferred}
"""
if self._discovery is not None:
raise ValueError('already used')
self._discovery = defer.Deferred()
self._discovery_timeout = reactor.callLater(6, self._on_discovery_timeout)
attempt = 0
mcast = None
while True:
try:
self.mcast = reactor.listenMulticast(1900+attempt, self)
break
except CannotListenError:
attempt = random.randint(0, 500)
# joined multicast group, starting upnp search
self.mcast.joinGroup('239.255.255.250', socket.INADDR_ANY)
self.transport.write(_UPNP_SEARCH_REQUEST, (_UPNP_MCAST, _UPNP_PORT))
self.transport.write(_UPNP_SEARCH_REQUEST, (_UPNP_MCAST, _UPNP_PORT))
self.transport.write(_UPNP_SEARCH_REQUEST, (_UPNP_MCAST, _UPNP_PORT))
return self._discovery
#Private methods
def datagramReceived(self, dgram, address):
if self._done:
return
"""
This is private, handle the multicast answer from the upnp device.
"""
logging.debug("Got UPNP multicast search answer:\n%s", dgram)
#This is an HTTP response
response, message = dgram.split('\r\n', 1)
# Prepare status line
version, status, textstatus = response.split(None, 2)
if not version.startswith('HTTP'):
return
if status != "200":
return
# Launch the info fetching
def parse_discovery_response(message):
"""Separate headers and body from the received http answer."""
hdict = {}
body = ''
remaining = message
while remaining:
line, remaining = remaining.split('\r\n', 1)
line = line.strip()
if not line:
body = remaining
break
key, val = line.split(':', 1)
key = key.lower()
hdict.setdefault(key, []).append(val.strip())
return hdict, body
headers, body = parse_discovery_response(message)
if not 'location' in headers:
self._on_discovery_failed(
UPnPError(
"No location header in response to M-SEARCH!: %r"%headers))
return
loc = headers['location'][0]
result = client.getPage(url=loc)
result.addCallback(self._on_gateway_response, loc).addErrback(self._on_discovery_failed)
def _on_gateway_response(self, body, loc):
if self._done:
return
"""
Called with the UPnP device XML description fetched via HTTP.
If the device has suitable services for ip discovery and port mappings,
the callback returned in L{search_device} is called with
the discovered L{UPnPDevice}.
@raise UPnPError: When no suitable service has been
found in the description, or another error occurs.
@param body: The xml description of the device.
@param loc: the url used to retreive the xml description
"""
# Parse answer
upnpinfo = UPnPXml(body)
# Check if we have a base url, if not consider location as base url
urlbase = upnpinfo.urlbase
if urlbase == None:
urlbase = loc
# Check the control url, if None, then the device cannot do what we want
controlurl = upnpinfo.controlurl
if controlurl == None:
self._on_discovery_failed(UPnPError("upnp response showed no WANConnections"))
return
control_url2 = urlparse.urljoin(urlbase, controlurl)
soap_proxy = SoapProxy(control_url2, upnpinfo.wanservice)
self._on_discovery_succeeded(UPnPDevice(soap_proxy, upnpinfo.deviceinfos))
def _on_discovery_succeeded(self, res):
if self._done:
return
self._done = True
self.mcast.stopListening()
self._discovery_timeout.cancel()
self._discovery.callback(res)
def _on_discovery_failed(self, err):
if self._done:
return
self._done = True
self.mcast.stopListening()
self._discovery_timeout.cancel()
self._discovery.errback(err)
def _on_discovery_timeout(self):
if self._done:
return
self._done = True
self.mcast.stopListening()
self._discovery.errback(failure.Failure(defer.TimeoutError('in _on_discovery_timeout')))
def search_upnp_device ():
"""
Check the network for an UPnP device. Returns a deferred
with the L{UPnPDevice} instance as result, if found.
@return: A deferred called with the L{UPnPDevice} instance
@rtype: L{twisted.internet.defer.Deferred}
"""
return defer.maybeDeferred(UPnPProtocol().search_device)
| 18,985
|
Python
|
.py
| 438
| 32.901826
| 96
| 0.629673
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,486
|
convert_networks.py
|
p2pool_p2pool/dev/convert_networks.py
|
import sys
f = open(sys.argv[1])
while True:
if f.readline().strip() == 'nets = dict(': break
def nesting(l):
res = 0
for c in l:
if c == '(': res += 1
if c == ')': res -= 1
return res
def write_header(f, name):
if sys.argv[3] == 'p2pool':
f2.write('from p2pool.bitcoin import networks\n\n')
if name == 'bitcoin':
f2.write('''# CHAIN_LENGTH = number of shares back client keeps
# REAL_CHAIN_LENGTH = maximum number of shares back client uses to compute payout
# REAL_CHAIN_LENGTH must always be <= CHAIN_LENGTH
# REAL_CHAIN_LENGTH must be changed in sync with all other clients
# changes can be done by changing one, then the other
''')
elif sys.argv[3] == 'bitcoin':
f2.write('''import os
import platform
from twisted.internet import defer
from .. import data, helper
from p2pool.util import pack
''')
else: assert False, 'invalid type argument'
while True:
l = f.readline()
if not l.strip(): continue
if l.strip() == ')': break
name = l.strip().split('=')[0]
lines = []
while True:
l = f.readline()
if not l.strip(): continue
if l.strip() == '),': break
while nesting(l) != 0:
l += f.readline()
lines.append(l.split('=', 1))
with open(sys.argv[2] + name + '.py', 'wb') as f2:
write_header(f2, name)
for a, b in lines:
if ', #' in b: b = b.replace(', #', ' #')
elif b.strip().endswith(','): b = b.strip()[:-1]
else: assert False, b
f2.write('%s = %s\n' % (a.strip(), b.strip()))
| 1,620
|
Python
|
.py
| 48
| 27.75
| 81
| 0.573907
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,487
|
p2p.py
|
p2pool_p2pool/p2pool/p2p.py
|
from __future__ import division
import math
import random
import sys
import time
from twisted.internet import defer, protocol, reactor
from twisted.python import failure, log
import p2pool
from p2pool import data as p2pool_data
from p2pool.bitcoin import data as bitcoin_data
from p2pool.util import deferral, p2protocol, pack, variable
class PeerMisbehavingError(Exception):
pass
def fragment(f, **kwargs):
try:
f(**kwargs)
except p2protocol.TooLong:
fragment(f, **dict((k, v[:len(v)//2]) for k, v in kwargs.iteritems()))
fragment(f, **dict((k, v[len(v)//2:]) for k, v in kwargs.iteritems()))
class Protocol(p2protocol.Protocol):
VERSION = 1700
max_remembered_txs_size = 2500000
def __init__(self, node, incoming):
p2protocol.Protocol.__init__(self, node.net.PREFIX, 1000000, node.traffic_happened)
self.node = node
self.incoming = incoming
self.other_version = None
self.connected2 = False
def connectionMade(self):
self.factory.proto_made_connection(self)
self.connection_lost_event = variable.Event()
self.addr = self.transport.getPeer().host, self.transport.getPeer().port
self.send_version(
version=self.VERSION,
services=0,
addr_to=dict(
services=0,
address=self.transport.getPeer().host,
port=self.transport.getPeer().port,
),
addr_from=dict(
services=0,
address=self.transport.getHost().host,
port=self.transport.getHost().port,
),
nonce=self.node.nonce,
sub_version=p2pool.__version__,
mode=1,
best_share_hash=self.node.best_share_hash_func(),
)
self.timeout_delayed = reactor.callLater(10, self._connect_timeout)
self.get_shares = deferral.GenericDeferrer(
max_id=2**256,
func=lambda id, hashes, parents, stops: self.send_sharereq(id=id, hashes=hashes, parents=parents, stops=stops),
timeout=15,
on_timeout=self.disconnect,
)
self.remote_tx_hashes = set() # view of peer's known_txs # not actually initially empty, but sending txs instead of tx hashes won't hurt
self.remote_remembered_txs_size = 0
self.remembered_txs = {} # view of peer's mining_txs
self.remembered_txs_size = 0
self.known_txs_cache = {}
def _connect_timeout(self):
self.timeout_delayed = None
print 'Handshake timed out, disconnecting from %s:%i' % self.addr
self.disconnect()
def packetReceived(self, command, payload2):
try:
if command != 'version' and not self.connected2:
raise PeerMisbehavingError('first message was not version message')
p2protocol.Protocol.packetReceived(self, command, payload2)
except PeerMisbehavingError, e:
print 'Peer %s:%i misbehaving, will drop and ban. Reason:' % self.addr, e.message
self.badPeerHappened()
def badPeerHappened(self):
print "Bad peer banned:", self.addr
self.disconnect()
if self.transport.getPeer().host != '127.0.0.1': # never ban localhost
self.node.bans[self.transport.getPeer().host] = time.time() + 60*60
def _timeout(self):
self.timeout_delayed = None
print 'Connection timed out, disconnecting from %s:%i' % self.addr
self.disconnect()
def sendAdvertisement(self):
if self.node.serverfactory.listen_port is not None:
host=self.node.external_ip
port=self.node.serverfactory.listen_port.getHost().port
if host is not None:
if ':' in host:
host, port_str = host.split(':')
port = int(port_str)
if p2pool.DEBUG:
print 'Advertising for incoming connections: %s:%i' % (host, port)
# Advertise given external IP address, just as if there were another peer behind us, with that address, who asked us to advertise it for them
self.send_addrs(addrs=[
dict(
address=dict(
services=self.other_services,
address=host,
port=port,
),
timestamp=int(time.time()),
),
])
else:
if p2pool.DEBUG:
print 'Advertising for incoming connections'
# Ask peer to advertise what it believes our IP address to be
self.send_addrme(port=port)
message_version = pack.ComposedType([
('version', pack.IntType(32)),
('services', pack.IntType(64)),
('addr_to', bitcoin_data.address_type),
('addr_from', bitcoin_data.address_type),
('nonce', pack.IntType(64)),
('sub_version', pack.VarStrType()),
('mode', pack.IntType(32)), # always 1 for legacy compatibility
('best_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
])
def handle_version(self, version, services, addr_to, addr_from, nonce, sub_version, mode, best_share_hash):
if self.other_version is not None:
raise PeerMisbehavingError('more than one version message')
if version < getattr(self.node.net, 'MINIMUM_PROTOCOL_VERSION', 1400):
raise PeerMisbehavingError('peer too old')
self.other_version = version
self.other_sub_version = sub_version[:512]
self.other_services = services
if nonce == self.node.nonce:
raise PeerMisbehavingError('was connected to self')
if nonce in self.node.peers:
if p2pool.DEBUG:
print 'Detected duplicate connection, disconnecting from %s:%i' % self.addr
self.disconnect()
return
self.nonce = nonce
self.connected2 = True
self.timeout_delayed.cancel()
self.timeout_delayed = reactor.callLater(100, self._timeout)
old_dataReceived = self.dataReceived
def new_dataReceived(data):
if self.timeout_delayed is not None:
self.timeout_delayed.reset(100)
old_dataReceived(data)
self.dataReceived = new_dataReceived
self.factory.proto_connected(self)
self._stop_thread = deferral.run_repeatedly(lambda: [
self.send_ping(),
random.expovariate(1/100)][-1])
if self.node.advertise_ip:
self._stop_thread2 = deferral.run_repeatedly(lambda: [
self.sendAdvertisement(),
random.expovariate(1/(100*len(self.node.peers) + 1))][-1])
if best_share_hash is not None:
self.node.handle_share_hashes([best_share_hash], self)
def update_remote_view_of_my_known_txs(before, after):
added = set(after) - set(before)
removed = set(before) - set(after)
if added:
self.send_have_tx(tx_hashes=list(added))
if removed:
self.send_losing_tx(tx_hashes=list(removed))
# cache forgotten txs here for a little while so latency of "losing_tx" packets doesn't cause problems
key = max(self.known_txs_cache) + 1 if self.known_txs_cache else 0
self.known_txs_cache[key] = dict((h, before[h]) for h in removed)
reactor.callLater(20, self.known_txs_cache.pop, key)
watch_id = self.node.known_txs_var.transitioned.watch(update_remote_view_of_my_known_txs)
self.connection_lost_event.watch(lambda: self.node.known_txs_var.transitioned.unwatch(watch_id))
self.send_have_tx(tx_hashes=self.node.known_txs_var.value.keys())
def update_remote_view_of_my_mining_txs(before, after):
added = set(after) - set(before)
removed = set(before) - set(after)
if removed:
self.send_forget_tx(tx_hashes=list(removed))
self.remote_remembered_txs_size -= sum(100 + bitcoin_data.tx_type.packed_size(before[x]) for x in removed)
if added:
self.remote_remembered_txs_size += sum(100 + bitcoin_data.tx_type.packed_size(after[x]) for x in added)
assert self.remote_remembered_txs_size <= self.max_remembered_txs_size
fragment(self.send_remember_tx, tx_hashes=[x for x in added if x in self.remote_tx_hashes], txs=[after[x] for x in added if x not in self.remote_tx_hashes])
watch_id2 = self.node.mining_txs_var.transitioned.watch(update_remote_view_of_my_mining_txs)
self.connection_lost_event.watch(lambda: self.node.mining_txs_var.transitioned.unwatch(watch_id2))
self.remote_remembered_txs_size += sum(100 + bitcoin_data.tx_type.packed_size(x) for x in self.node.mining_txs_var.value.values())
assert self.remote_remembered_txs_size <= self.max_remembered_txs_size
fragment(self.send_remember_tx, tx_hashes=[], txs=self.node.mining_txs_var.value.values())
message_ping = pack.ComposedType([])
def handle_ping(self):
pass
message_addrme = pack.ComposedType([
('port', pack.IntType(16)),
])
def handle_addrme(self, port):
host = self.transport.getPeer().host
#print 'addrme from', host, port
if host == '127.0.0.1':
if random.random() < .8 and self.node.peers:
random.choice(self.node.peers.values()).send_addrme(port=port) # services...
else:
self.node.got_addr((self.transport.getPeer().host, port), self.other_services, int(time.time()))
if random.random() < .8 and self.node.peers:
random.choice(self.node.peers.values()).send_addrs(addrs=[
dict(
address=dict(
services=self.other_services,
address=host,
port=port,
),
timestamp=int(time.time()),
),
])
message_addrs = pack.ComposedType([
('addrs', pack.ListType(pack.ComposedType([
('timestamp', pack.IntType(64)),
('address', bitcoin_data.address_type),
]))),
])
def handle_addrs(self, addrs):
for addr_record in addrs:
self.node.got_addr((addr_record['address']['address'], addr_record['address']['port']), addr_record['address']['services'], min(int(time.time()), addr_record['timestamp']))
if random.random() < .8 and self.node.peers:
random.choice(self.node.peers.values()).send_addrs(addrs=[addr_record])
message_getaddrs = pack.ComposedType([
('count', pack.IntType(32)),
])
def handle_getaddrs(self, count):
if count > 100:
count = 100
self.send_addrs(addrs=[
dict(
timestamp=int(self.node.addr_store[host, port][2]),
address=dict(
services=self.node.addr_store[host, port][0],
address=host,
port=port,
),
) for host, port in
self.node.get_good_peers(count)
])
message_shares = pack.ComposedType([
('shares', pack.ListType(p2pool_data.share_type)),
])
def handle_shares(self, shares):
result = []
for wrappedshare in shares:
if wrappedshare['type'] < p2pool_data.Share.VERSION: continue
share = p2pool_data.load_share(wrappedshare, self.node.net, self.addr)
if wrappedshare['type'] >= 13:
txs = []
for tx_hash in share.share_info['new_transaction_hashes']:
if tx_hash in self.node.known_txs_var.value:
tx = self.node.known_txs_var.value[tx_hash]
else:
for cache in self.known_txs_cache.itervalues():
if tx_hash in cache:
tx = cache[tx_hash]
print 'Transaction %064x rescued from peer latency cache!' % (tx_hash,)
break
else:
print >>sys.stderr, 'Peer referenced unknown transaction %064x, disconnecting' % (tx_hash,)
self.disconnect()
return
txs.append(tx)
else:
txs = None
result.append((share, txs))
self.node.handle_shares(result, self)
def sendShares(self, shares, tracker, known_txs, include_txs_with=[]):
tx_hashes = set()
for share in shares:
if share.VERSION >= 13:
# send full transaction for every new_transaction_hash that peer does not know
for tx_hash in share.share_info['new_transaction_hashes']:
assert tx_hash in known_txs, 'tried to broadcast share without knowing all its new transactions'
if tx_hash not in self.remote_tx_hashes:
tx_hashes.add(tx_hash)
continue
if share.hash in include_txs_with:
x = share.get_other_tx_hashes(tracker)
if x is not None:
tx_hashes.update(x)
hashes_to_send = [x for x in tx_hashes if x not in self.node.mining_txs_var.value and x in known_txs]
new_remote_remembered_txs_size = self.remote_remembered_txs_size + sum(100 + bitcoin_data.tx_type.packed_size(known_txs[x]) for x in hashes_to_send)
if new_remote_remembered_txs_size > self.max_remembered_txs_size:
raise ValueError('shares have too many txs')
self.remote_remembered_txs_size = new_remote_remembered_txs_size
fragment(self.send_remember_tx, tx_hashes=[x for x in hashes_to_send if x in self.remote_tx_hashes], txs=[known_txs[x] for x in hashes_to_send if x not in self.remote_tx_hashes])
fragment(self.send_shares, shares=[share.as_share() for share in shares])
self.send_forget_tx(tx_hashes=hashes_to_send)
self.remote_remembered_txs_size -= sum(100 + bitcoin_data.tx_type.packed_size(known_txs[x]) for x in hashes_to_send)
message_sharereq = pack.ComposedType([
('id', pack.IntType(256)),
('hashes', pack.ListType(pack.IntType(256))),
('parents', pack.VarIntType()),
('stops', pack.ListType(pack.IntType(256))),
])
def handle_sharereq(self, id, hashes, parents, stops):
shares = self.node.handle_get_shares(hashes, parents, stops, self)
try:
self.send_sharereply(id=id, result='good', shares=[share.as_share() for share in shares])
except p2protocol.TooLong:
self.send_sharereply(id=id, result='too long', shares=[])
message_sharereply = pack.ComposedType([
('id', pack.IntType(256)),
('result', pack.EnumType(pack.VarIntType(), {0: 'good', 1: 'too long', 2: 'unk2', 3: 'unk3', 4: 'unk4', 5: 'unk5', 6: 'unk6'})),
('shares', pack.ListType(p2pool_data.share_type)),
])
class ShareReplyError(Exception): pass
def handle_sharereply(self, id, result, shares):
if result == 'good':
res = [p2pool_data.load_share(share, self.node.net, self.addr) for share in shares if share['type'] >= p2pool_data.Share.VERSION]
else:
res = failure.Failure(self.ShareReplyError(result))
self.get_shares.got_response(id, res)
message_bestblock = pack.ComposedType([
('header', bitcoin_data.block_header_type),
])
def handle_bestblock(self, header):
self.node.handle_bestblock(header, self)
message_have_tx = pack.ComposedType([
('tx_hashes', pack.ListType(pack.IntType(256))),
])
def handle_have_tx(self, tx_hashes):
#assert self.remote_tx_hashes.isdisjoint(tx_hashes)
self.remote_tx_hashes.update(tx_hashes)
while len(self.remote_tx_hashes) > 10000:
self.remote_tx_hashes.pop()
message_losing_tx = pack.ComposedType([
('tx_hashes', pack.ListType(pack.IntType(256))),
])
def handle_losing_tx(self, tx_hashes):
#assert self.remote_tx_hashes.issuperset(tx_hashes)
self.remote_tx_hashes.difference_update(tx_hashes)
message_remember_tx = pack.ComposedType([
('tx_hashes', pack.ListType(pack.IntType(256))),
('txs', pack.ListType(bitcoin_data.tx_type)),
])
def handle_remember_tx(self, tx_hashes, txs):
for tx_hash in tx_hashes:
if tx_hash in self.remembered_txs:
print >>sys.stderr, 'Peer referenced transaction twice, disconnecting'
self.disconnect()
return
if tx_hash in self.node.known_txs_var.value:
tx = self.node.known_txs_var.value[tx_hash]
else:
for cache in self.known_txs_cache.itervalues():
if tx_hash in cache:
tx = cache[tx_hash]
print 'Transaction %064x rescued from peer latency cache!' % (tx_hash,)
break
else:
print >>sys.stderr, 'Peer referenced unknown transaction %064x, disconnecting' % (tx_hash,)
self.disconnect()
return
self.remembered_txs[tx_hash] = tx
self.remembered_txs_size += 100 + bitcoin_data.tx_type.packed_size(tx)
new_known_txs = dict(self.node.known_txs_var.value)
warned = False
for tx in txs:
tx_hash = bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx))
if tx_hash in self.remembered_txs:
print >>sys.stderr, 'Peer referenced transaction twice, disconnecting'
self.disconnect()
return
if tx_hash in self.node.known_txs_var.value and not warned:
print 'Peer sent entire transaction %064x that was already received' % (tx_hash,)
warned = True
self.remembered_txs[tx_hash] = tx
self.remembered_txs_size += 100 + bitcoin_data.tx_type.packed_size(tx)
new_known_txs[tx_hash] = tx
self.node.known_txs_var.set(new_known_txs)
if self.remembered_txs_size >= self.max_remembered_txs_size:
raise PeerMisbehavingError('too much transaction data stored')
message_forget_tx = pack.ComposedType([
('tx_hashes', pack.ListType(pack.IntType(256))),
])
def handle_forget_tx(self, tx_hashes):
for tx_hash in tx_hashes:
self.remembered_txs_size -= 100 + bitcoin_data.tx_type.packed_size(self.remembered_txs[tx_hash])
assert self.remembered_txs_size >= 0
del self.remembered_txs[tx_hash]
def connectionLost(self, reason):
self.connection_lost_event.happened()
if self.timeout_delayed is not None:
self.timeout_delayed.cancel()
if self.connected2:
self.factory.proto_disconnected(self, reason)
self._stop_thread()
if self.node.advertise_ip:
self._stop_thread2()
self.connected2 = False
self.factory.proto_lost_connection(self, reason)
if p2pool.DEBUG:
print "Peer connection lost:", self.addr, reason
self.get_shares.respond_all(reason)
@defer.inlineCallbacks
def do_ping(self):
start = reactor.seconds()
yield self.get_shares(hashes=[0], parents=0, stops=[])
end = reactor.seconds()
defer.returnValue(end - start)
class ServerFactory(protocol.ServerFactory):
def __init__(self, node, max_conns):
self.node = node
self.max_conns = max_conns
self.conns = {}
self.running = False
self.listen_port = None
def buildProtocol(self, addr):
if sum(self.conns.itervalues()) >= self.max_conns or self.conns.get(self._host_to_ident(addr.host), 0) >= 3:
return None
if addr.host in self.node.bans and self.node.bans[addr.host] > time.time():
return None
p = Protocol(self.node, True)
p.factory = self
if p2pool.DEBUG:
print "Got peer connection from:", addr
return p
def _host_to_ident(self, host):
a, b, c, d = host.split('.')
return a, b
def proto_made_connection(self, proto):
ident = self._host_to_ident(proto.transport.getPeer().host)
self.conns[ident] = self.conns.get(ident, 0) + 1
def proto_lost_connection(self, proto, reason):
ident = self._host_to_ident(proto.transport.getPeer().host)
self.conns[ident] -= 1
if not self.conns[ident]:
del self.conns[ident]
def proto_connected(self, proto):
self.node.got_conn(proto)
def proto_disconnected(self, proto, reason):
self.node.lost_conn(proto, reason)
def start(self):
assert not self.running
self.running = True
def attempt_listen():
if self.running:
self.listen_port = reactor.listenTCP(self.node.port, self)
deferral.retry('Error binding to P2P port:', traceback=False)(attempt_listen)()
def stop(self):
assert self.running
self.running = False
return self.listen_port.stopListening()
class ClientFactory(protocol.ClientFactory):
def __init__(self, node, desired_conns, max_attempts):
self.node = node
self.desired_conns = desired_conns
self.max_attempts = max_attempts
self.attempts = set()
self.conns = set()
self.running = False
def _host_to_ident(self, host):
a, b, c, d = host.split('.')
return a, b
def buildProtocol(self, addr):
p = Protocol(self.node, False)
p.factory = self
return p
def startedConnecting(self, connector):
ident = self._host_to_ident(connector.getDestination().host)
if ident in self.attempts:
raise AssertionError('already have attempt')
self.attempts.add(ident)
def clientConnectionFailed(self, connector, reason):
self.attempts.remove(self._host_to_ident(connector.getDestination().host))
def clientConnectionLost(self, connector, reason):
self.attempts.remove(self._host_to_ident(connector.getDestination().host))
def proto_made_connection(self, proto):
pass
def proto_lost_connection(self, proto, reason):
pass
def proto_connected(self, proto):
self.conns.add(proto)
self.node.got_conn(proto)
def proto_disconnected(self, proto, reason):
self.conns.remove(proto)
self.node.lost_conn(proto, reason)
def start(self):
assert not self.running
self.running = True
self._stop_thinking = deferral.run_repeatedly(self._think)
def stop(self):
assert self.running
self.running = False
self._stop_thinking()
def _think(self):
try:
if len(self.conns) < self.desired_conns and len(self.attempts) < self.max_attempts and self.node.addr_store:
(host, port), = self.node.get_good_peers(1)
if self._host_to_ident(host) in self.attempts:
pass
elif host in self.node.bans and self.node.bans[host] > time.time():
pass
else:
#print 'Trying to connect to', host, port
reactor.connectTCP(host, port, self, timeout=5)
except:
log.err()
return random.expovariate(1/1)
class SingleClientFactory(protocol.ReconnectingClientFactory):
def __init__(self, node):
self.node = node
def buildProtocol(self, addr):
p = Protocol(self.node, incoming=False)
p.factory = self
return p
def proto_made_connection(self, proto):
pass
def proto_lost_connection(self, proto, reason):
pass
def proto_connected(self, proto):
self.resetDelay()
self.node.got_conn(proto)
def proto_disconnected(self, proto, reason):
self.node.lost_conn(proto, reason)
class Node(object):
def __init__(self, best_share_hash_func, port, net, addr_store={}, connect_addrs=set(), desired_outgoing_conns=10, max_outgoing_attempts=30, max_incoming_conns=50, preferred_storage=1000, known_txs_var=variable.Variable({}), mining_txs_var=variable.Variable({}), advertise_ip=True, external_ip=None):
self.best_share_hash_func = best_share_hash_func
self.port = port
self.net = net
self.addr_store = dict(addr_store)
self.connect_addrs = connect_addrs
self.preferred_storage = preferred_storage
self.known_txs_var = known_txs_var
self.mining_txs_var = mining_txs_var
self.advertise_ip = advertise_ip
self.external_ip = external_ip
self.traffic_happened = variable.Event()
self.nonce = random.randrange(2**64)
self.peers = {}
self.bans = {} # address -> end_time
self.clientfactory = ClientFactory(self, desired_outgoing_conns, max_outgoing_attempts)
self.serverfactory = ServerFactory(self, max_incoming_conns)
self.running = False
def start(self):
if self.running:
raise ValueError('already running')
self.clientfactory.start()
self.serverfactory.start()
self.singleclientconnectors = [reactor.connectTCP(addr, port, SingleClientFactory(self)) for addr, port in self.connect_addrs]
self.running = True
self._stop_thinking = deferral.run_repeatedly(self._think)
def _think(self):
try:
if len(self.addr_store) < self.preferred_storage and self.peers:
random.choice(self.peers.values()).send_getaddrs(count=8)
except:
log.err()
return random.expovariate(1/20)
@defer.inlineCallbacks
def stop(self):
if not self.running:
raise ValueError('already stopped')
self.running = False
self._stop_thinking()
yield self.clientfactory.stop()
yield self.serverfactory.stop()
for singleclientconnector in self.singleclientconnectors:
yield singleclientconnector.factory.stopTrying()
yield singleclientconnector.disconnect()
del self.singleclientconnectors
def got_conn(self, conn):
if conn.nonce in self.peers:
raise ValueError('already have peer')
self.peers[conn.nonce] = conn
print '%s peer %s:%i established. p2pool version: %i %r' % ('Incoming connection from' if conn.incoming else 'Outgoing connection to', conn.addr[0], conn.addr[1], conn.other_version, conn.other_sub_version)
def lost_conn(self, conn, reason):
if conn.nonce not in self.peers:
raise ValueError('''don't have peer''')
if conn is not self.peers[conn.nonce]:
raise ValueError('wrong conn')
del self.peers[conn.nonce]
print 'Lost peer %s:%i - %s' % (conn.addr[0], conn.addr[1], reason.getErrorMessage())
def got_addr(self, (host, port), services, timestamp):
if (host, port) in self.addr_store:
old_services, old_first_seen, old_last_seen = self.addr_store[host, port]
self.addr_store[host, port] = services, old_first_seen, max(old_last_seen, timestamp)
else:
if len(self.addr_store) < 10000:
self.addr_store[host, port] = services, timestamp, timestamp
def handle_shares(self, shares, peer):
print 'handle_shares', (shares, peer)
def handle_share_hashes(self, hashes, peer):
print 'handle_share_hashes', (hashes, peer)
def handle_get_shares(self, hashes, parents, stops, peer):
print 'handle_get_shares', (hashes, parents, stops, peer)
def handle_bestblock(self, header, peer):
print 'handle_bestblock', header
def get_good_peers(self, max_count):
t = time.time()
return [x[0] for x in sorted(self.addr_store.iteritems(), key=lambda (k, (services, first_seen, last_seen)):
-math.log(max(3600, last_seen - first_seen))/math.log(max(3600, t - last_seen))*random.expovariate(1)
)][:max_count]
| 29,251
|
Python
|
.py
| 592
| 37.084459
| 304
| 0.60652
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,488
|
__init__.py
|
p2pool_p2pool/p2pool/__init__.py
|
import os
import re
import sys
import traceback
import subprocess
def check_output(*popenargs, **kwargs):
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
raise ValueError((retcode, output))
return output
def _get_version():
try:
try:
return check_output(['git', 'describe', '--always', '--dirty'], cwd=os.path.dirname(os.path.abspath(sys.argv[0]))).strip()
except:
pass
try:
return check_output(['git.cmd', 'describe', '--always', '--dirty'], cwd=os.path.dirname(os.path.abspath(sys.argv[0]))).strip()
except:
pass
root_dir = os.path.abspath(os.path.dirname(sys.argv[0]))
git_dir = os.path.join(root_dir, '.git')
if os.path.exists(git_dir):
head = open(os.path.join(git_dir, 'HEAD')).read().strip()
prefix = 'ref: '
if head.startswith(prefix):
path = head[len(prefix):].split('/')
return open(os.path.join(git_dir, *path)).read().strip()[:7]
else:
return head[:7]
dir_name = os.path.split(root_dir)[1]
match = re.match('p2pool-([.0-9]+)', dir_name)
if match:
return match.groups()[0]
return 'unknown %s' % (dir_name.encode('hex'),)
except Exception, e:
traceback.print_exc()
return 'unknown %s' % (str(e).encode('hex'),)
__version__ = _get_version()
DEBUG = True
| 1,595
|
Python
|
.py
| 42
| 29.190476
| 138
| 0.574901
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,489
|
work.py
|
p2pool_p2pool/p2pool/work.py
|
from __future__ import division
from collections import deque
import base64
import random
import re
import sys
import time
from twisted.internet import defer
from twisted.python import log
import bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
from bitcoin import helper, script, worker_interface
from util import forest, jsonrpc, variable, deferral, math, pack
import p2pool, p2pool.data as p2pool_data
print_throttle = 0.0
class WorkerBridge(worker_interface.WorkerBridge):
COINBASE_NONCE_LENGTH = 8
def __init__(self, node, my_pubkey_hash, donation_percentage, merged_urls, worker_fee, args, pubkeys, bitcoind):
worker_interface.WorkerBridge.__init__(self)
self.recent_shares_ts_work = []
self.node = node
self.bitcoind = bitcoind
self.pubkeys = pubkeys
self.args = args
self.my_pubkey_hash = my_pubkey_hash
self.donation_percentage = args.donation_percentage
self.worker_fee = args.worker_fee
self.net = self.node.net.PARENT
self.running = True
self.pseudoshare_received = variable.Event()
self.share_received = variable.Event()
self.local_rate_monitor = math.RateMonitor(10*60)
self.local_addr_rate_monitor = math.RateMonitor(10*60)
self.removed_unstales_var = variable.Variable((0, 0, 0))
self.removed_doa_unstales_var = variable.Variable(0)
self.last_work_shares = variable.Variable( {} )
self.my_share_hashes = set()
self.my_doa_share_hashes = set()
self.address_throttle = 0
self.tracker_view = forest.TrackerView(self.node.tracker, forest.get_attributedelta_type(dict(forest.AttributeDelta.attrs,
my_count=lambda share: 1 if share.hash in self.my_share_hashes else 0,
my_doa_count=lambda share: 1 if share.hash in self.my_doa_share_hashes else 0,
my_orphan_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'orphan' else 0,
my_dead_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'doa' else 0,
)))
@self.node.tracker.verified.removed.watch
def _(share):
if share.hash in self.my_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value):
assert share.share_data['stale_info'] in [None, 'orphan', 'doa'] # we made these shares in this instance
self.removed_unstales_var.set((
self.removed_unstales_var.value[0] + 1,
self.removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 'orphan' else 0),
self.removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 'doa' else 0),
))
if share.hash in self.my_doa_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value):
self.removed_doa_unstales_var.set(self.removed_doa_unstales_var.value + 1)
# MERGED WORK
self.merged_work = variable.Variable({})
@defer.inlineCallbacks
def set_merged_work(merged_url, merged_userpass):
merged_proxy = jsonrpc.HTTPProxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass)))
while self.running:
auxblock = yield deferral.retry('Error while calling merged getauxblock on %s:' % (merged_url,), 30)(merged_proxy.rpc_getauxblock)()
target = auxblock['target'] if 'target' in auxblock else auxblock['_target']
self.merged_work.set(math.merge_dicts(self.merged_work.value, {auxblock['chainid']: dict(
hash=int(auxblock['hash'], 16),
target='p2pool' if target == 'p2pool' else pack.IntType(256).unpack(target.decode('hex')),
merged_proxy=merged_proxy,
)}))
yield deferral.sleep(1)
for merged_url, merged_userpass in merged_urls:
set_merged_work(merged_url, merged_userpass)
@self.merged_work.changed.watch
def _(new_merged_work):
print 'Got new merged mining work!'
# COMBINE WORK
self.current_work = variable.Variable(None)
def compute_work():
t = self.node.bitcoind_work.value
bb = self.node.best_block_header.value
if bb is not None and bb['previous_block'] == t['previous_block'] and self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(bb)) <= t['bits'].target:
print 'Skipping from block %x to block %x!' % (bb['previous_block'],
bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)))
t = dict(
version=bb['version'],
previous_block=bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)),
bits=bb['bits'], # not always true
coinbaseflags='',
height=t['height'] + 1,
time=bb['timestamp'] + 600, # better way?
transactions=[],
transaction_fees=[],
merkle_link=bitcoin_data.calculate_merkle_link([None], 0),
subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.node.bitcoind_work.value['height']),
last_update=self.node.bitcoind_work.value['last_update'],
)
self.current_work.set(t)
self.node.bitcoind_work.changed.watch(lambda _: compute_work())
self.node.best_block_header.changed.watch(lambda _: compute_work())
compute_work()
self.new_work_event = variable.Event()
@self.current_work.transitioned.watch
def _(before, after):
# trigger LP if version/previous_block/bits changed or transactions changed from nothing
if any(before[x] != after[x] for x in ['version', 'previous_block', 'bits']) or (not before['transactions'] and after['transactions']):
self.new_work_event.happened()
self.merged_work.changed.watch(lambda _: self.new_work_event.happened())
self.node.best_share_var.changed.watch(lambda _: self.new_work_event.happened())
def stop(self):
self.running = False
def get_stale_counts(self):
'''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
my_shares = len(self.my_share_hashes)
my_doa_shares = len(self.my_doa_share_hashes)
delta = self.tracker_view.get_delta_to_last(self.node.best_share_var.value)
my_shares_in_chain = delta.my_count + self.removed_unstales_var.value[0]
my_doa_shares_in_chain = delta.my_doa_count + self.removed_doa_unstales_var.value
orphans_recorded_in_chain = delta.my_orphan_announce_count + self.removed_unstales_var.value[1]
doas_recorded_in_chain = delta.my_dead_announce_count + self.removed_unstales_var.value[2]
my_shares_not_in_chain = my_shares - my_shares_in_chain
my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
@defer.inlineCallbacks
def freshen_addresses(self, c):
self.cur_address_throttle = time.time()
if self.cur_address_throttle - self.address_throttle < 30:
return
self.address_throttle=time.time()
print "ATTEMPTING TO FRESHEN ADDRESS."
self.address = yield deferral.retry('Error getting a dynamic address from bitcoind:', 5)(lambda: self.bitcoind.rpc_getnewaddress('p2pool'))()
new_pubkey = bitcoin_data.address_to_pubkey_hash(self.address, self.net)
self.pubkeys.popleft()
self.pubkeys.addkey(new_pubkey)
print " Updated payout pool:"
for i in range(len(self.pubkeys.keys)):
print ' ...payout %d: %s(%f)' % (i, bitcoin_data.pubkey_hash_to_address(self.pubkeys.keys[i], self.net),self.pubkeys.keyweights[i],)
self.pubkeys.updatestamp(c)
print " Next address rotation in : %fs" % (time.time()-c+self.args.timeaddresses)
def get_user_details(self, username):
contents = re.split('([+/])', username)
assert len(contents) % 2 == 1
user, contents2 = contents[0], contents[1:]
desired_pseudoshare_target = None
desired_share_target = None
for symbol, parameter in zip(contents2[::2], contents2[1::2]):
if symbol == '+':
try:
desired_pseudoshare_target = bitcoin_data.difficulty_to_target(float(parameter))
except:
if p2pool.DEBUG:
log.err()
elif symbol == '/':
try:
desired_share_target = bitcoin_data.difficulty_to_target(float(parameter))
except:
if p2pool.DEBUG:
log.err()
if self.args.address == 'dynamic':
i = self.pubkeys.weighted()
pubkey_hash = self.pubkeys.keys[i]
c = time.time()
if (c - self.pubkeys.stamp) > self.args.timeaddresses:
self.freshen_addresses(c)
if random.uniform(0, 100) < self.worker_fee:
pubkey_hash = self.my_pubkey_hash
else:
try:
pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, self.node.net.PARENT)
except: # XXX blah
if self.args.address != 'dynamic':
pubkey_hash = self.my_pubkey_hash
return user, pubkey_hash, desired_share_target, desired_pseudoshare_target
def preprocess_request(self, user):
if (self.node.p2p_node is None or len(self.node.p2p_node.peers) == 0) and self.node.net.PERSIST:
raise jsonrpc.Error_for_code(-12345)(u'p2pool is not connected to any peers')
if time.time() > self.current_work.value['last_update'] + 60:
raise jsonrpc.Error_for_code(-12345)(u'lost contact with bitcoind')
user, pubkey_hash, desired_share_target, desired_pseudoshare_target = self.get_user_details(user)
return pubkey_hash, desired_share_target, desired_pseudoshare_target
def _estimate_local_hash_rate(self):
if len(self.recent_shares_ts_work) == 50:
hash_rate = sum(work for ts, work in self.recent_shares_ts_work[1:])//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
if hash_rate > 0:
return hash_rate
return None
def get_local_rates(self):
miner_hash_rates = {}
miner_dead_hash_rates = {}
datums, dt = self.local_rate_monitor.get_datums_in_last()
for datum in datums:
miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
if datum['dead']:
miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
return miner_hash_rates, miner_dead_hash_rates
def get_local_addr_rates(self):
addr_hash_rates = {}
datums, dt = self.local_addr_rate_monitor.get_datums_in_last()
for datum in datums:
addr_hash_rates[datum['pubkey_hash']] = addr_hash_rates.get(datum['pubkey_hash'], 0) + datum['work']/dt
return addr_hash_rates
def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target):
global print_throttle
if (self.node.p2p_node is None or len(self.node.p2p_node.peers) == 0) and self.node.net.PERSIST:
raise jsonrpc.Error_for_code(-12345)(u'p2pool is not connected to any peers')
if self.node.best_share_var.value is None and self.node.net.PERSIST:
raise jsonrpc.Error_for_code(-12345)(u'p2pool is downloading shares')
if set(r[1:] if r.startswith('!') else r for r in self.node.bitcoind_work.value['rules']) - set(getattr(self.node.net, 'SOFTFORKS_REQUIRED', [])):
raise jsonrpc.Error_for_code(-12345)(u'unknown rule activated')
if self.merged_work.value:
tree, size = bitcoin_data.make_auxpow_tree(self.merged_work.value)
mm_hashes = [self.merged_work.value.get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
merkle_root=bitcoin_data.merkle_hash(mm_hashes),
size=size,
nonce=0,
))
mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in self.merged_work.value.iteritems()]
else:
mm_data = ''
mm_later = []
tx_hashes = [bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in self.current_work.value['transactions']]
tx_map = dict(zip(tx_hashes, self.current_work.value['transactions']))
previous_share = self.node.tracker.items[self.node.best_share_var.value] if self.node.best_share_var.value is not None else None
if previous_share is None:
share_type = p2pool_data.Share
else:
previous_share_type = type(previous_share)
if previous_share_type.SUCCESSOR is None or self.node.tracker.get_height(previous_share.hash) < self.node.net.CHAIN_LENGTH:
share_type = previous_share_type
else:
successor_type = previous_share_type.SUCCESSOR
counts = p2pool_data.get_desired_version_counts(self.node.tracker,
self.node.tracker.get_nth_parent_hash(previous_share.hash, self.node.net.CHAIN_LENGTH*9//10), self.node.net.CHAIN_LENGTH//10)
upgraded = counts.get(successor_type.VERSION, 0)/sum(counts.itervalues())
if upgraded > .65:
print 'Switchover imminent. Upgraded: %.3f%% Threshold: %.3f%%' % (upgraded*100, 95)
# Share -> NewShare only valid if 95% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version
if counts.get(successor_type.VERSION, 0) > sum(counts.itervalues())*95//100:
share_type = successor_type
else:
share_type = previous_share_type
if desired_share_target is None:
desired_share_target = 2**256-1
local_hash_rate = self._estimate_local_hash_rate()
if local_hash_rate is not None:
desired_share_target = min(desired_share_target,
bitcoin_data.average_attempts_to_target(local_hash_rate * self.node.net.SHARE_PERIOD / 0.0167)) # limit to 1.67% of pool shares by modulating share difficulty
local_addr_rates = self.get_local_addr_rates()
lookbehind = 3600//self.node.net.SHARE_PERIOD
block_subsidy = self.node.bitcoind_work.value['subsidy']
if previous_share is not None and self.node.tracker.get_height(previous_share.hash) > lookbehind:
expected_payout_per_block = local_addr_rates.get(pubkey_hash, 0)/p2pool_data.get_pool_attempts_per_second(self.node.tracker, self.node.best_share_var.value, lookbehind) \
* block_subsidy*(1-self.donation_percentage/100) # XXX doesn't use global stale rate to compute pool hash
if expected_payout_per_block < self.node.net.PARENT.DUST_THRESHOLD:
desired_share_target = min(desired_share_target,
bitcoin_data.average_attempts_to_target((bitcoin_data.target_to_average_attempts(self.node.bitcoind_work.value['bits'].target)*self.node.net.SPREAD)*self.node.net.PARENT.DUST_THRESHOLD/block_subsidy)
)
if True:
share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction(
tracker=self.node.tracker,
share_data=dict(
previous_share_hash=self.node.best_share_var.value,
coinbase=(script.create_push_script([
self.current_work.value['height'],
] + ([mm_data] if mm_data else []) + [
]) + self.current_work.value['coinbaseflags'])[:100],
nonce=random.randrange(2**32),
pubkey_hash=pubkey_hash,
subsidy=self.current_work.value['subsidy'],
donation=math.perfect_round(65535*self.donation_percentage/100),
stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
'orphan' if orphans > orphans_recorded_in_chain else
'doa' if doas > doas_recorded_in_chain else
None
)(*self.get_stale_counts()),
desired_version=(share_type.SUCCESSOR if share_type.SUCCESSOR is not None else share_type).VOTING_VERSION,
),
block_target=self.current_work.value['bits'].target,
desired_timestamp=int(time.time() + 0.5),
desired_target=desired_share_target,
ref_merkle_link=dict(branch=[], index=0),
desired_other_transaction_hashes_and_fees=zip(tx_hashes, self.current_work.value['transaction_fees']),
net=self.node.net,
known_txs=tx_map,
base_subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.current_work.value['height']),
)
packed_gentx = bitcoin_data.tx_id_type.pack(gentx) # stratum miners work with stripped transactions
other_transactions = [tx_map[tx_hash] for tx_hash in other_transaction_hashes]
mm_later = [(dict(aux_work, target=aux_work['target'] if aux_work['target'] != 'p2pool' else share_info['bits'].target), index, hashes) for aux_work, index, hashes in mm_later]
if desired_pseudoshare_target is None:
target = 2**256-1
local_hash_rate = self._estimate_local_hash_rate()
if local_hash_rate is not None:
target = min(target,
bitcoin_data.average_attempts_to_target(local_hash_rate * 1)) # limit to 1 share response every second by modulating pseudoshare difficulty
else:
target = desired_pseudoshare_target
target = max(target, share_info['bits'].target)
for aux_work, index, hashes in mm_later:
target = max(target, aux_work['target'])
target = math.clip(target, self.node.net.PARENT.SANE_TARGET_RANGE)
getwork_time = time.time()
lp_count = self.new_work_event.times
merkle_link = bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0) if share_info.get('segwit_data', None) is None else share_info['segwit_data']['txid_merkle_link']
if print_throttle is 0.0:
print_throttle = time.time()
else:
current_time = time.time()
if (current_time - print_throttle) > 5.0:
print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
bitcoin_data.target_to_difficulty(target),
bitcoin_data.target_to_difficulty(share_info['bits'].target),
self.current_work.value['subsidy']*1e-8, self.node.net.PARENT.SYMBOL,
len(self.current_work.value['transactions']),
)
print_throttle = time.time()
#need this for stats
self.last_work_shares.value[bitcoin_data.pubkey_hash_to_address(pubkey_hash, self.node.net.PARENT)]=share_info['bits']
ba = dict(
version=max(self.current_work.value['version'], 0x20000000),
previous_block=self.current_work.value['previous_block'],
merkle_link=merkle_link,
coinb1=packed_gentx[:-self.COINBASE_NONCE_LENGTH-4],
coinb2=packed_gentx[-4:],
timestamp=self.current_work.value['time'],
bits=self.current_work.value['bits'],
share_target=target,
)
received_header_hashes = set()
def got_response(header, user, coinbase_nonce):
assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH
new_packed_gentx = packed_gentx[:-self.COINBASE_NONCE_LENGTH-4] + coinbase_nonce + packed_gentx[-4:] if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else packed_gentx
new_gentx = bitcoin_data.tx_type.unpack(new_packed_gentx) if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else gentx
if bitcoin_data.is_segwit_tx(gentx): # reintroduce witness data to the gentx produced by stratum miners
new_gentx['marker'] = 0
new_gentx['flag'] = gentx['flag']
new_gentx['witness'] = gentx['witness']
header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
pow_hash = self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
try:
if pow_hash <= header['bits'].target or p2pool.DEBUG:
helper.submit_block(dict(header=header, txs=[new_gentx] + other_transactions), False, self.node.factory, self.node.bitcoind, self.node.bitcoind_work, self.node.net)
if pow_hash <= header['bits'].target:
print
print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
print
except:
log.err(None, 'Error while processing potential block:')
user, _, _, _ = self.get_user_details(user)
assert header['previous_block'] == ba['previous_block']
assert header['merkle_root'] == bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), merkle_link)
assert header['bits'] == ba['bits']
on_time = self.new_work_event.times == lp_count
for aux_work, index, hashes in mm_later:
try:
if pow_hash <= aux_work['target'] or p2pool.DEBUG:
df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
bitcoin_data.aux_pow_type.pack(dict(
merkle_tx=dict(
tx=new_gentx,
block_hash=header_hash,
merkle_link=merkle_link,
),
merkle_link=bitcoin_data.calculate_merkle_link(hashes, index),
parent_block_header=header,
)).encode('hex'),
)
@df.addCallback
def _(result, aux_work=aux_work):
if result != (pow_hash <= aux_work['target']):
print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
else:
print 'Merged block submittal result: %s' % (result,)
@df.addErrback
def _(err):
log.err(err, 'Error submitting merged block:')
except:
log.err(None, 'Error while processing merged mining POW:')
if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes:
last_txout_nonce = pack.IntType(8*self.COINBASE_NONCE_LENGTH).unpack(coinbase_nonce)
share = get_share(header, last_txout_nonce)
print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
user,
p2pool_data.format_hash(share.hash),
p2pool_data.format_hash(share.previous_hash),
time.time() - getwork_time,
' DEAD ON ARRIVAL' if not on_time else '',
)
self.my_share_hashes.add(share.hash)
if not on_time:
self.my_doa_share_hashes.add(share.hash)
self.node.tracker.add(share)
self.node.set_best_share()
try:
if (pow_hash <= header['bits'].target or p2pool.DEBUG) and self.node.p2p_node is not None:
self.node.p2p_node.broadcast_share(share.hash)
except:
log.err(None, 'Error forwarding block solution:')
self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time, share.hash)
if pow_hash > target:
print 'Worker %s submitted share with hash > target:' % (user,)
print ' Hash: %56x' % (pow_hash,)
print ' Target: %56x' % (target,)
elif header_hash in received_header_hashes:
print >>sys.stderr, 'Worker %s submitted share more than once!' % (user,)
else:
received_header_hashes.add(header_hash)
self.pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, user)
self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
while len(self.recent_shares_ts_work) > 50:
self.recent_shares_ts_work.pop(0)
self.local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=user, share_target=share_info['bits'].target))
self.local_addr_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), pubkey_hash=pubkey_hash))
return on_time
return ba, got_response
| 26,833
|
Python
|
.py
| 415
| 49.026506
| 223
| 0.601579
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,490
|
node.py
|
p2pool_p2pool/p2pool/node.py
|
import random
import sys
import time
from twisted.internet import defer, reactor
from twisted.python import log
from p2pool import data as p2pool_data, p2p
from p2pool.bitcoin import data as bitcoin_data, helper, height_tracker
from p2pool.util import deferral, variable
class P2PNode(p2p.Node):
def __init__(self, node, **kwargs):
self.node = node
p2p.Node.__init__(self,
best_share_hash_func=lambda: node.best_share_var.value,
net=node.net,
known_txs_var=node.known_txs_var,
mining_txs_var=node.mining_txs_var,
**kwargs)
def handle_shares(self, shares, peer):
if len(shares) > 5:
print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
new_count = 0
all_new_txs = {}
for share, new_txs in shares:
if new_txs is not None:
all_new_txs.update((bitcoin_data.hash256(bitcoin_data.tx_type.pack(new_tx)), new_tx) for new_tx in new_txs)
if share.hash in self.node.tracker.items:
#print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
continue
new_count += 1
#print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer_addr)
self.node.tracker.add(share)
new_known_txs = dict(self.node.known_txs_var.value)
new_known_txs.update(all_new_txs)
self.node.known_txs_var.set(new_known_txs)
if new_count:
self.node.set_best_share()
if len(shares) > 5:
print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(self.node.tracker.items), 2*self.node.net.CHAIN_LENGTH)
@defer.inlineCallbacks
def handle_share_hashes(self, hashes, peer):
new_hashes = [x for x in hashes if x not in self.node.tracker.items]
if not new_hashes:
return
try:
shares = yield peer.get_shares(
hashes=new_hashes,
parents=0,
stops=[],
)
except:
log.err(None, 'in handle_share_hashes:')
else:
self.handle_shares([(share, []) for share in shares], peer)
def handle_get_shares(self, hashes, parents, stops, peer):
parents = min(parents, 1000//len(hashes))
stops = set(stops)
shares = []
for share_hash in hashes:
for share in self.node.tracker.get_chain(share_hash, min(parents + 1, self.node.tracker.get_height(share_hash))):
if share.hash in stops:
break
shares.append(share)
if len(shares) > 0:
print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1])
return shares
def handle_bestblock(self, header, peer):
if self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header)) > header['bits'].target:
raise p2p.PeerMisbehavingError('received block header fails PoW test')
self.node.handle_header(header)
def broadcast_share(self, share_hash):
shares = []
for share in self.node.tracker.get_chain(share_hash, min(5, self.node.tracker.get_height(share_hash))):
if share.hash in self.shared_share_hashes:
break
self.shared_share_hashes.add(share.hash)
shares.append(share)
for peer in self.peers.itervalues():
peer.sendShares([share for share in shares if share.peer_addr != peer.addr], self.node.tracker, self.node.known_txs_var.value, include_txs_with=[share_hash])
def start(self):
p2p.Node.start(self)
self.shared_share_hashes = set(self.node.tracker.items)
self.node.tracker.removed.watch_weakref(self, lambda self, share: self.shared_share_hashes.discard(share.hash))
@apply
@defer.inlineCallbacks
def download_shares():
while True:
desired = yield self.node.desired_var.get_when_satisfies(lambda val: len(val) != 0)
peer_addr, share_hash = random.choice(desired)
if len(self.peers) == 0:
yield deferral.sleep(1)
continue
peer = random.choice(self.peers.values())
print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
try:
shares = yield peer.get_shares(
hashes=[share_hash],
parents=random.randrange(500), # randomize parents so that we eventually get past a too large block of shares
stops=list(set(self.node.tracker.heads) | set(
self.node.tracker.get_nth_parent_hash(head, min(max(0, self.node.tracker.get_height_and_last(head)[0] - 1), 10)) for head in self.node.tracker.heads
))[:100],
)
except defer.TimeoutError:
print 'Share request timed out!'
continue
except:
log.err(None, 'in download_shares:')
continue
if not shares:
yield deferral.sleep(1) # sleep so we don't keep rerequesting the same share nobody has
continue
self.handle_shares([(share, []) for share in shares], peer)
@self.node.best_block_header.changed.watch
def _(header):
for peer in self.peers.itervalues():
peer.send_bestblock(header=header)
# send share when the chain changes to their chain
self.node.best_share_var.changed.watch(self.broadcast_share)
@self.node.tracker.verified.added.watch
def _(share):
if not (share.pow_hash <= share.header['bits'].target):
return
def spread():
if (self.node.get_height_rel_highest(share.header['previous_block']) > -5 or
self.node.bitcoind_work.value['previous_block'] in [share.header['previous_block'], share.header_hash]):
self.broadcast_share(share.hash)
spread()
reactor.callLater(5, spread) # so get_height_rel_highest can update
class Node(object):
def __init__(self, factory, bitcoind, shares, known_verified_share_hashes, net):
self.factory = factory
self.bitcoind = bitcoind
self.net = net
self.tracker = p2pool_data.OkayTracker(self.net)
for share in shares:
self.tracker.add(share)
for share_hash in known_verified_share_hashes:
if share_hash in self.tracker.items:
self.tracker.verified.add(self.tracker.items[share_hash])
self.p2p_node = None # overwritten externally
@defer.inlineCallbacks
def start(self):
stop_signal = variable.Event()
self.stop = stop_signal.happened
# BITCOIND WORK
self.bitcoind_work = variable.Variable((yield helper.getwork(self.bitcoind)))
@defer.inlineCallbacks
def work_poller():
while stop_signal.times == 0:
flag = self.factory.new_block.get_deferred()
try:
self.bitcoind_work.set((yield helper.getwork(self.bitcoind, self.bitcoind_work.value['use_getblocktemplate'])))
except:
log.err()
yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
work_poller()
# PEER WORK
self.best_block_header = variable.Variable(None)
def handle_header(new_header, valid=False):
new_hash = self.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(new_header))
# check that header matches current target
if new_hash > self.bitcoind_work.value['bits'].target:
return
if not valid:
try:
_ = (yield self.bitcoind.rpc_getblockheader(new_hash))
except:
return
bitcoind_best_block = self.bitcoind_work.value['previous_block']
if (self.best_block_header.value is None
or (
new_header['previous_block'] == bitcoind_best_block and
bitcoin_data.hash256(bitcoin_data.block_header_type.pack(self.best_block_header.value)) == bitcoind_best_block
) # new is child of current and previous is current
or (
bitcoin_data.hash256(bitcoin_data.block_header_type.pack(new_header)) == bitcoind_best_block and
self.best_block_header.value['previous_block'] != bitcoind_best_block
)): # new is current and previous is not a child of current
self.best_block_header.set(new_header)
self.handle_header = handle_header
@defer.inlineCallbacks
def poll_header():
if self.factory.conn.value is None:
return
handle_header((yield self.factory.conn.value.get_block_header(self.bitcoind_work.value['previous_block'])), True)
self.bitcoind_work.changed.watch(lambda _: poll_header())
yield deferral.retry('Error while requesting best block header:')(poll_header)()
# BEST SHARE
self.known_txs_var = variable.Variable({}) # hash -> tx
self.mining_txs_var = variable.Variable({}) # hash -> tx
self.get_height_rel_highest = yield height_tracker.get_height_rel_highest_func(self.bitcoind, self.factory, lambda: self.bitcoind_work.value['previous_block'], self.net)
self.best_share_var = variable.Variable(None)
self.desired_var = variable.Variable(None)
self.bitcoind_work.changed.watch(lambda _: self.set_best_share())
self.set_best_share()
# setup p2p logic and join p2pool network
# update mining_txs according to getwork results
@self.bitcoind_work.changed.run_and_watch
def _(_=None):
new_mining_txs = {}
new_known_txs = dict(self.known_txs_var.value)
for tx_hash, tx in zip(self.bitcoind_work.value['transaction_hashes'], self.bitcoind_work.value['transactions']):
new_mining_txs[tx_hash] = tx
new_known_txs[tx_hash] = tx
self.mining_txs_var.set(new_mining_txs)
self.known_txs_var.set(new_known_txs)
# add p2p transactions from bitcoind to known_txs
@self.factory.new_tx.watch
def _(tx):
new_known_txs = dict(self.known_txs_var.value)
new_known_txs[bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx))] = tx
self.known_txs_var.set(new_known_txs)
# forward transactions seen to bitcoind
@self.known_txs_var.transitioned.watch
@defer.inlineCallbacks
def _(before, after):
yield deferral.sleep(random.expovariate(1/1))
if self.factory.conn.value is None:
return
for tx_hash in set(after) - set(before):
self.factory.conn.value.send_tx(tx=after[tx_hash])
@self.tracker.verified.added.watch
def _(share):
if not (share.pow_hash <= share.header['bits'].target):
return
block = share.as_block(self.tracker, self.known_txs_var.value)
if block is None:
print >>sys.stderr, 'GOT INCOMPLETE BLOCK FROM PEER! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
return
helper.submit_block(block, True, self.factory, self.bitcoind, self.bitcoind_work, self.net)
print
print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
print
def forget_old_txs():
new_known_txs = {}
if self.p2p_node is not None:
for peer in self.p2p_node.peers.itervalues():
new_known_txs.update(peer.remembered_txs)
new_known_txs.update(self.mining_txs_var.value)
for share in self.tracker.get_chain(self.best_share_var.value, min(120, self.tracker.get_height(self.best_share_var.value))):
for tx_hash in share.new_transaction_hashes:
if tx_hash in self.known_txs_var.value:
new_known_txs[tx_hash] = self.known_txs_var.value[tx_hash]
self.known_txs_var.set(new_known_txs)
t = deferral.RobustLoopingCall(forget_old_txs)
t.start(10)
stop_signal.watch(t.stop)
t = deferral.RobustLoopingCall(self.clean_tracker)
t.start(5)
stop_signal.watch(t.stop)
def set_best_share(self):
best, desired, decorated_heads, bad_peer_addresses = self.tracker.think(self.get_height_rel_highest, self.bitcoind_work.value['previous_block'], self.bitcoind_work.value['bits'], self.known_txs_var.value)
self.best_share_var.set(best)
self.desired_var.set(desired)
if self.p2p_node is not None:
for bad_peer_address in bad_peer_addresses:
# XXX O(n)
for peer in self.p2p_node.peers.itervalues():
if peer.addr == bad_peer_address:
peer.badPeerHappened()
break
def get_current_txouts(self):
return p2pool_data.get_expected_payouts(self.tracker, self.best_share_var.value, self.bitcoind_work.value['bits'].target, self.bitcoind_work.value['subsidy'], self.net)
def clean_tracker(self):
best, desired, decorated_heads, bad_peer_addresses = self.tracker.think(self.get_height_rel_highest, self.bitcoind_work.value['previous_block'], self.bitcoind_work.value['bits'], self.known_txs_var.value)
# eat away at heads
if decorated_heads:
for i in xrange(1000):
to_remove = set()
for share_hash, tail in self.tracker.heads.iteritems():
if share_hash in [head_hash for score, head_hash in decorated_heads[-5:]]:
#print 1
continue
if self.tracker.items[share_hash].time_seen > time.time() - 300:
#print 2
continue
if share_hash not in self.tracker.verified.items and max(self.tracker.items[after_tail_hash].time_seen for after_tail_hash in self.tracker.reverse.get(tail)) > time.time() - 120: # XXX stupid
#print 3
continue
to_remove.add(share_hash)
if not to_remove:
break
for share_hash in to_remove:
if share_hash in self.tracker.verified.items:
self.tracker.verified.remove(share_hash)
self.tracker.remove(share_hash)
#print "_________", to_remove
# drop tails
for i in xrange(1000):
to_remove = set()
for tail, heads in self.tracker.tails.iteritems():
if min(self.tracker.get_height(head) for head in heads) < 2*self.tracker.net.CHAIN_LENGTH + 10:
continue
to_remove.update(self.tracker.reverse.get(tail, set()))
if not to_remove:
break
# if removed from this, it must be removed from verified
#start = time.time()
for aftertail in to_remove:
if self.tracker.items[aftertail].previous_hash not in self.tracker.tails:
print "erk", aftertail, self.tracker.items[aftertail].previous_hash
continue
if aftertail in self.tracker.verified.items:
self.tracker.verified.remove(aftertail)
self.tracker.remove(aftertail)
#end = time.time()
#print "removed! %i %f" % (len(to_remove), (end - start)/len(to_remove))
self.set_best_share()
| 16,770
|
Python
|
.py
| 307
| 39.892508
| 212
| 0.592333
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,491
|
main.py
|
p2pool_p2pool/p2pool/main.py
|
from __future__ import division
import base64
import gc
import json
import os
import random
import sys
import time
import signal
import traceback
import urlparse
if '--iocp' in sys.argv:
from twisted.internet import iocpreactor
iocpreactor.install()
from twisted.internet import defer, reactor, protocol, tcp
from twisted.web import server
from twisted.python import log
from nattraverso import portmapper, ipdiscover
import bitcoin.p2p as bitcoin_p2p, bitcoin.data as bitcoin_data
from bitcoin import stratum, worker_interface, helper
from util import fixargparse, jsonrpc, variable, deferral, math, logging, switchprotocol
from . import networks, web, work
import p2pool, p2pool.data as p2pool_data, p2pool.node as p2pool_node
class keypool():
keys = []
keyweights = []
stamp = time.time()
payouttotal = 0.0
def addkey(self, n):
self.keys.append(n)
self.keyweights.append(random.uniform(0,100.0))
def delkey(self, n):
try:
i=self.keys.index(n)
self.keys.pop(i)
self.keyweights.pop(i)
except:
pass
def weighted(self):
choice=random.uniform(0,sum(self.keyweights))
tot = 0.0
ind = 0
for i in (self.keyweights):
tot += i
if tot >= choice:
return ind
ind += 1
return ind
def popleft(self):
if (len(self.keys) > 0):
dummyval=self.keys.pop(0)
if (len(self.keyweights) > 0):
dummyval=self.keyweights.pop(0)
def updatestamp(self, n):
self.stamp = n
def paytotal(self):
self.payouttotal = 0.0
for i in range(len(pubkeys.keys)):
self.payouttotal += node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(pubkeys.keys[i]), 0)*1e-8
return self.payouttotal
def getpaytotal(self):
return self.payouttotal
@defer.inlineCallbacks
def main(args, net, datadir_path, merged_urls, worker_endpoint):
try:
print 'p2pool (version %s)' % (p2pool.__version__,)
print
@defer.inlineCallbacks
def connect_p2p():
# connect to bitcoind over bitcoin-p2p
print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
factory = bitcoin_p2p.ClientFactory(net.PARENT)
reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
def long():
print ''' ...taking a while. Common reasons for this include all of bitcoind's connection slots being used...'''
long_dc = reactor.callLater(5, long)
yield factory.getProtocol() # waits until handshake is successful
if not long_dc.called: long_dc.cancel()
print ' ...success!'
print
defer.returnValue(factory)
if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections
factory = yield connect_p2p()
# connect to bitcoind over JSON-RPC and do initial getmemorypool
url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port)
print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
bitcoind = jsonrpc.HTTPProxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30)
yield helper.check(bitcoind, net)
temp_work = yield helper.getwork(bitcoind)
bitcoind_getinfo_var = variable.Variable(None)
@defer.inlineCallbacks
def poll_warnings():
bitcoind_getinfo_var.set((yield deferral.retry('Error while calling getinfo:')(bitcoind.rpc_getnetworkinfo)()))
yield poll_warnings()
deferral.RobustLoopingCall(poll_warnings).start(20*60)
print ' ...success!'
print ' Current block hash: %x' % (temp_work['previous_block'],)
print ' Current block height: %i' % (temp_work['height'] - 1,)
print
if not args.testnet:
factory = yield connect_p2p()
print 'Determining payout address...'
pubkeys = keypool()
if args.pubkey_hash is None and args.address != 'dynamic':
address_path = os.path.join(datadir_path, 'cached_payout_address')
if os.path.exists(address_path):
with open(address_path, 'rb') as f:
address = f.read().strip('\r\n')
print ' Loaded cached address: %s...' % (address,)
else:
address = None
if address is not None:
res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))()
if not res['isvalid'] or not res['ismine']:
print ' Cached address is either invalid or not controlled by local bitcoind!'
address = None
if address is None:
print ' Getting payout address from bitcoind...'
address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
with open(address_path, 'wb') as f:
f.write(address)
my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
print
pubkeys.addkey(my_pubkey_hash)
elif args.address != 'dynamic':
my_pubkey_hash = args.pubkey_hash
print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT)
print
pubkeys.addkey(my_pubkey_hash)
else:
print ' Entering dynamic address mode.'
if args.numaddresses < 2:
print ' ERROR: Can not use fewer than 2 addresses in dynamic mode. Resetting to 2.'
args.numaddresses = 2
for i in range(args.numaddresses):
address = yield deferral.retry('Error getting a dynamic address from bitcoind:', 5)(lambda: bitcoind.rpc_getnewaddress('p2pool'))()
new_pubkey = bitcoin_data.address_to_pubkey_hash(address, net.PARENT)
pubkeys.addkey(new_pubkey)
pubkeys.updatestamp(time.time())
my_pubkey_hash = pubkeys.keys[0]
for i in range(len(pubkeys.keys)):
print ' ...payout %d: %s' % (i, bitcoin_data.pubkey_hash_to_address(pubkeys.keys[i], net.PARENT),)
print "Loading shares..."
shares = {}
known_verified = set()
def share_cb(share):
share.time_seen = 0 # XXX
shares[share.hash] = share
if len(shares) % 1000 == 0 and shares:
print " %i" % (len(shares),)
ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net, share_cb, known_verified.add)
print " ...done loading %i shares (%i verified)!" % (len(shares), len(known_verified))
print
print 'Initializing work...'
node = p2pool_node.Node(factory, bitcoind, shares.values(), known_verified, net)
yield node.start()
for share_hash in shares:
if share_hash not in node.tracker.items:
ss.forget_share(share_hash)
for share_hash in known_verified:
if share_hash not in node.tracker.verified.items:
ss.forget_verified_share(share_hash)
node.tracker.removed.watch(lambda share: ss.forget_share(share.hash))
node.tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
def save_shares():
for share in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 2*net.CHAIN_LENGTH)):
ss.add_share(share)
if share.hash in node.tracker.verified.items:
ss.add_verified_hash(share.hash)
deferral.RobustLoopingCall(save_shares).start(60)
if len(shares) > net.CHAIN_LENGTH:
best_share = shares[node.best_share_var.value]
previous_share = shares[best_share.share_data['previous_share_hash']]
counts = p2pool_data.get_desired_version_counts(node.tracker, node.tracker.get_nth_parent_hash(previous_share.hash, net.CHAIN_LENGTH*9//10), net.CHAIN_LENGTH//10)
p2pool_data.update_min_protocol_version(counts, best_share)
print ' ...success!'
print
print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
@defer.inlineCallbacks
def parse(host):
port = net.P2P_PORT
if ':' in host:
host, port_str = host.split(':')
port = int(port_str)
defer.returnValue(((yield reactor.resolve(host)), port))
addrs = {}
if os.path.exists(os.path.join(datadir_path, 'addrs')):
try:
with open(os.path.join(datadir_path, 'addrs'), 'rb') as f:
addrs.update(dict((tuple(k), v) for k, v in json.loads(f.read())))
except:
print >>sys.stderr, 'error parsing addrs'
for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
try:
addr = yield addr_df
if addr not in addrs:
addrs[addr] = (0, time.time(), time.time())
except:
log.err()
connect_addrs = set()
for addr_df in map(parse, args.p2pool_nodes):
try:
connect_addrs.add((yield addr_df))
except:
log.err()
node.p2p_node = p2pool_node.P2PNode(node,
port=args.p2pool_port,
max_incoming_conns=args.p2pool_conns,
addr_store=addrs,
connect_addrs=connect_addrs,
desired_outgoing_conns=args.p2pool_outgoing_conns,
advertise_ip=args.advertise_ip,
external_ip=args.p2pool_external_ip,
)
node.p2p_node.start()
def save_addrs():
with open(os.path.join(datadir_path, 'addrs'), 'wb') as f:
f.write(json.dumps(node.p2p_node.addr_store.items()))
deferral.RobustLoopingCall(save_addrs).start(60)
print ' ...success!'
print
if args.upnp:
@defer.inlineCallbacks
def upnp_thread():
while True:
try:
is_lan, lan_ip = yield ipdiscover.get_local_ip()
if is_lan:
pm = yield portmapper.get_port_mapper()
yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
except defer.TimeoutError:
pass
except:
if p2pool.DEBUG:
log.err(None, 'UPnP error:')
yield deferral.sleep(random.expovariate(1/120))
upnp_thread()
# start listening for workers with a JSON-RPC server
print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
wb = work.WorkerBridge(node, my_pubkey_hash, args.donation_percentage, merged_urls, args.worker_fee, args, pubkeys, bitcoind)
web_root = web.get_web_root(wb, datadir_path, bitcoind_getinfo_var, static_dir=args.web_static)
caching_wb = worker_interface.CachingWorkerBridge(wb)
worker_interface.WorkerInterface(caching_wb).attach_to(web_root, get_handler=lambda request: request.redirect('/static/'))
web_serverfactory = server.Site(web_root)
serverfactory = switchprotocol.FirstByteSwitchFactory({'{': stratum.StratumServerFactory(caching_wb)}, web_serverfactory)
deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], serverfactory, interface=worker_endpoint[0])
with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
pass
print ' ...success!'
print
# done!
print 'Started successfully!'
print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],)
if args.donation_percentage > 1.1:
print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (args.donation_percentage,)
elif args.donation_percentage < .9:
print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (args.donation_percentage,)
else:
print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (args.donation_percentage,)
print 'You can increase this amount with --give-author argument! (or decrease it, if you must)'
print
if hasattr(signal, 'SIGALRM'):
signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
))
signal.siginterrupt(signal.SIGALRM, False)
deferral.RobustLoopingCall(signal.alarm, 30).start(1)
if args.irc_announce:
from twisted.words.protocols import irc
class IRCClient(irc.IRCClient):
nickname = 'p2pool%02i' % (random.randrange(100),)
channel = net.ANNOUNCE_CHANNEL
def lineReceived(self, line):
if p2pool.DEBUG:
print repr(line)
irc.IRCClient.lineReceived(self, line)
def signedOn(self):
self.in_channel = False
irc.IRCClient.signedOn(self)
self.factory.resetDelay()
self.join(self.channel)
@defer.inlineCallbacks
def new_share(share):
if not self.in_channel:
return
if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60:
yield deferral.sleep(random.expovariate(1/60))
message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages):
self.say(self.channel, message)
self._remember_message(message)
self.watch_id = node.tracker.verified.added.watch(new_share)
self.recent_messages = []
def joined(self, channel):
self.in_channel = True
def left(self, channel):
self.in_channel = False
def _remember_message(self, message):
self.recent_messages.append(message)
while len(self.recent_messages) > 100:
self.recent_messages.pop(0)
def privmsg(self, user, channel, message):
if channel == self.channel:
self._remember_message(message)
def connectionLost(self, reason):
node.tracker.verified.added.unwatch(self.watch_id)
print 'IRC connection lost:', reason.getErrorMessage()
class IRCClientFactory(protocol.ReconnectingClientFactory):
protocol = IRCClient
reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory(), bindAddress=(worker_endpoint[0], 0))
@defer.inlineCallbacks
def status_thread():
last_str = None
last_time = 0
while True:
yield deferral.sleep(3)
try:
height = node.tracker.get_height(node.best_share_var.value)
this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
height,
len(node.tracker.verified.items),
len(node.tracker.items),
len(node.p2p_node.peers),
sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
datums, dt = wb.local_rate_monitor.get_datums_in_last()
my_att_s = sum(datum['work']/dt for datum in datums)
my_shares_per_s = sum(datum['work']/dt/bitcoin_data.target_to_average_attempts(datum['share_target']) for datum in datums)
this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
math.format(int(my_att_s)),
math.format_dt(dt),
math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
math.format_dt(1/my_shares_per_s) if my_shares_per_s else '???',
)
if height > 2:
(stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, min(60*60//net.SHARE_PERIOD, height))
real_att_s = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop)
paystr = ''
paytot = 0.0
for i in range(len(pubkeys.keys)):
curtot = node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(pubkeys.keys[i]), 0)
paytot += curtot*1e-8
paystr += "(%.4f)" % (curtot*1e-8,)
paystr += "=%.4f" % (paytot,)
this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %s %s' % (
shares, stale_orphan_shares, stale_doa_shares,
math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
paystr, net.PARENT.SYMBOL,
)
this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
math.format(int(real_att_s)),
100*stale_prop,
math.format_dt(2**256 / node.bitcoind_work.value['bits'].target / real_att_s),
)
for warning in p2pool_data.get_warnings(node.tracker, node.best_share_var.value, net, bitcoind_getinfo_var.value, node.bitcoind_work.value):
print >>sys.stderr, '#'*40
print >>sys.stderr, '>>> Warning: ' + warning
print >>sys.stderr, '#'*40
if gc.garbage:
print '%i pieces of uncollectable cyclic garbage! Types: %r' % (len(gc.garbage), map(type, gc.garbage))
if this_str != last_str or time.time() > last_time + 15:
print this_str
last_str = this_str
last_time = time.time()
except:
log.err()
status_thread()
except:
reactor.stop()
log.err(None, 'Fatal error:')
def run():
if not hasattr(tcp.Client, 'abortConnection'):
print "Twisted doesn't have abortConnection! Upgrade to a newer version of Twisted to avoid memory leaks!"
print 'Pausing for 3 seconds...'
time.sleep(3)
realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
parser = fixargparse.FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
parser.add_argument('--version', action='version', version=p2pool.__version__)
parser.add_argument('--net',
help='use specified network (default: bitcoin)',
action='store', choices=sorted(realnets), default='bitcoin', dest='net_name')
parser.add_argument('--testnet',
help='''use the network's testnet''',
action='store_const', const=True, default=False, dest='testnet')
parser.add_argument('--debug',
help='enable debugging mode',
action='store_const', const=True, default=False, dest='debug')
parser.add_argument('-a', '--address',
help='generate payouts to this address (default: <address requested from bitcoind>), or (dynamic)',
type=str, action='store', default=None, dest='address')
parser.add_argument('-i', '--numaddresses',
help='number of bitcoin auto-generated addresses to maintain for getwork dynamic address allocation',
type=int, action='store', default=2, dest='numaddresses')
parser.add_argument('-t', '--timeaddresses',
help='seconds between acquisition of new address and removal of single old (default: 2 days or 172800s)',
type=int, action='store', default=172800, dest='timeaddresses')
parser.add_argument('--datadir',
help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
type=str, action='store', default=None, dest='datadir')
parser.add_argument('--logfile',
help='''log to this file (default: data/<NET>/log)''',
type=str, action='store', default=None, dest='logfile')
parser.add_argument('--web-static',
help='use an alternative web frontend in this directory (otherwise use the built-in frontend)',
type=str, action='store', default=None, dest='web_static')
parser.add_argument('--merged',
help='call getauxblock on this url to get work for merged mining (example: http://ncuser:ncpass@127.0.0.1:10332/)',
type=str, action='append', default=[], dest='merged_urls')
parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
help='donate this percentage of work towards the development of p2pool (default: 1.0)',
type=float, action='store', default=1.0, dest='donation_percentage')
parser.add_argument('--iocp',
help='use Windows IOCP API in order to avoid errors due to large number of sockets being open',
action='store_true', default=False, dest='iocp')
parser.add_argument('--irc-announce',
help='announce any blocks found on irc://irc.freenode.net/#p2pool',
action='store_true', default=False, dest='irc_announce')
parser.add_argument('--no-bugreport',
help='disable submitting caught exceptions to the author',
action='store_true', default=False, dest='no_bugreport')
p2pool_group = parser.add_argument_group('p2pool interface')
p2pool_group.add_argument('--p2pool-port', metavar='PORT',
help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
type=int, action='store', default=None, dest='p2pool_port')
p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
type=str, action='append', default=[], dest='p2pool_nodes')
parser.add_argument('--disable-upnp',
help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
action='store_false', default=True, dest='upnp')
p2pool_group.add_argument('--max-conns', metavar='CONNS',
help='maximum incoming connections (default: 40)',
type=int, action='store', default=40, dest='p2pool_conns')
p2pool_group.add_argument('--outgoing-conns', metavar='CONNS',
help='outgoing connections (default: 6)',
type=int, action='store', default=6, dest='p2pool_outgoing_conns')
p2pool_group.add_argument('--external-ip', metavar='ADDR[:PORT]',
help='specify your own public IP address instead of asking peers to discover it, useful for running dual WAN or asymmetric routing',
type=str, action='store', default=None, dest='p2pool_external_ip')
parser.add_argument('--disable-advertise',
help='''don't advertise local IP address as being available for incoming connections. useful for running a dark node, along with multiple -n ADDR's and --outgoing-conns 0''',
action='store_false', default=True, dest='advertise_ip')
worker_group = parser.add_argument_group('worker interface')
worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
type=str, action='store', default=None, dest='worker_endpoint')
worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
type=float, action='store', default=0, dest='worker_fee')
bitcoind_group = parser.add_argument_group('bitcoind interface')
bitcoind_group.add_argument('--bitcoind-config-path', metavar='BITCOIND_CONFIG_PATH',
help='custom configuration file path (when bitcoind -conf option used)',
type=str, action='store', default=None, dest='bitcoind_config_path')
bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS',
help='connect to this address (default: 127.0.0.1)',
type=str, action='store', default='127.0.0.1', dest='bitcoind_address')
bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT',
help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
type=int, action='store', default=None, dest='bitcoind_rpc_port')
bitcoind_group.add_argument('--bitcoind-rpc-ssl',
help='connect to JSON-RPC interface using SSL',
action='store_true', default=False, dest='bitcoind_rpc_ssl')
bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT',
help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
type=int, action='store', default=None, dest='bitcoind_p2p_port')
bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS',
help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)',
type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass')
args = parser.parse_args()
if args.debug:
p2pool.DEBUG = True
defer.setDebugging(True)
else:
p2pool.DEBUG = False
net_name = args.net_name + ('_testnet' if args.testnet else '')
net = networks.nets[net_name]
datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
if not os.path.exists(datadir_path):
os.makedirs(datadir_path)
if len(args.bitcoind_rpc_userpass) > 2:
parser.error('a maximum of two arguments are allowed')
args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:]
if args.bitcoind_rpc_password is None:
conf_path = args.bitcoind_config_path or net.PARENT.CONF_FILE_FUNC()
if not os.path.exists(conf_path):
parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n'''
'''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
'''\r\n'''
'''server=1\r\n'''
'''rpcpassword=%x\r\n'''
'''\r\n'''
'''Keep that password secret! After creating the file, restart Bitcoin.''' % (conf_path, random.randrange(2**128)))
conf = open(conf_path, 'rb').read()
contents = {}
for line in conf.splitlines(True):
if '#' in line:
line = line[:line.index('#')]
if '=' not in line:
continue
k, v = line.split('=', 1)
contents[k.strip()] = v.strip()
for conf_name, var_name, var_type in [
('rpcuser', 'bitcoind_rpc_username', str),
('rpcpassword', 'bitcoind_rpc_password', str),
('rpcport', 'bitcoind_rpc_port', int),
('port', 'bitcoind_p2p_port', int),
]:
if getattr(args, var_name) is None and conf_name in contents:
setattr(args, var_name, var_type(contents[conf_name]))
if 'rpcssl' in contents and contents['rpcssl'] != '0':
args.bitcoind_rpc_ssl = True
if args.bitcoind_rpc_password is None:
parser.error('''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''')
if args.bitcoind_rpc_username is None:
args.bitcoind_rpc_username = ''
if args.bitcoind_rpc_port is None:
args.bitcoind_rpc_port = net.PARENT.RPC_PORT
if args.bitcoind_p2p_port is None:
args.bitcoind_p2p_port = net.PARENT.P2P_PORT
if args.p2pool_port is None:
args.p2pool_port = net.P2P_PORT
if args.p2pool_outgoing_conns > 10:
parser.error('''--outgoing-conns can't be more than 10''')
if args.worker_endpoint is None:
worker_endpoint = '', net.WORKER_PORT
elif ':' not in args.worker_endpoint:
worker_endpoint = '', int(args.worker_endpoint)
else:
addr, port = args.worker_endpoint.rsplit(':', 1)
worker_endpoint = addr, int(port)
if args.address is not None and args.address != 'dynamic':
try:
args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT)
except Exception, e:
parser.error('error parsing address: ' + repr(e))
else:
args.pubkey_hash = None
def separate_url(url):
s = urlparse.urlsplit(url)
if '@' not in s.netloc:
parser.error('merged url netloc must contain an "@"')
userpass, new_netloc = s.netloc.rsplit('@', 1)
return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
merged_urls = map(separate_url, args.merged_urls)
if args.logfile is None:
args.logfile = os.path.join(datadir_path, 'log')
logfile = logging.LogFile(args.logfile)
pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
sys.stdout = logging.AbortPipe(pipe)
sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
if hasattr(signal, "SIGUSR1"):
def sigusr1(signum, frame):
print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
logfile.reopen()
print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
signal.signal(signal.SIGUSR1, sigusr1)
deferral.RobustLoopingCall(logfile.reopen).start(5)
class ErrorReporter(object):
def __init__(self):
self.last_sent = None
def emit(self, eventDict):
if not eventDict["isError"]:
return
if self.last_sent is not None and time.time() < self.last_sent + 5:
return
self.last_sent = time.time()
if 'failure' in eventDict:
text = ((eventDict.get('why') or 'Unhandled Error')
+ '\n' + eventDict['failure'].getTraceback())
else:
text = " ".join([str(m) for m in eventDict["message"]]) + "\n"
from twisted.web import client
client.getPage(
url='http://u.forre.st/p2pool_error.cgi',
method='POST',
postdata=p2pool.__version__ + ' ' + net.NAME + '\n' + text,
timeout=15,
).addBoth(lambda x: None)
if not args.no_bugreport:
log.addObserver(ErrorReporter().emit)
reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)
reactor.run()
| 34,303
|
Python
|
.py
| 585
| 45.169231
| 246
| 0.601605
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,492
|
web.py
|
p2pool_p2pool/p2pool/web.py
|
from __future__ import division
import errno
import json
import os
import sys
import time
import traceback
from twisted.internet import defer, reactor
from twisted.python import log
from twisted.web import resource, static
import p2pool
from bitcoin import data as bitcoin_data
from . import data as p2pool_data, p2p
from util import deferral, deferred_resource, graph, math, memory, pack, variable
def _atomic_read(filename):
try:
with open(filename, 'rb') as f:
return f.read()
except IOError, e:
if e.errno != errno.ENOENT:
raise
try:
with open(filename + '.new', 'rb') as f:
return f.read()
except IOError, e:
if e.errno != errno.ENOENT:
raise
return None
def _atomic_write(filename, data):
with open(filename + '.new', 'wb') as f:
f.write(data)
f.flush()
try:
os.fsync(f.fileno())
except:
pass
try:
os.rename(filename + '.new', filename)
except: # XXX windows can't overwrite
os.remove(filename)
os.rename(filename + '.new', filename)
def get_web_root(wb, datadir_path, bitcoind_getinfo_var, stop_event=variable.Event(), static_dir=None):
node = wb.node
start_time = time.time()
web_root = resource.Resource()
def get_users():
height, last = node.tracker.get_height_and_last(node.best_share_var.value)
weights, total_weight, donation_weight = node.tracker.get_cumulative_weights(node.best_share_var.value, min(height, 720), 65535*2**256)
res = {}
for script in sorted(weights, key=lambda s: weights[s]):
res[bitcoin_data.script2_to_address(script, node.net.PARENT)] = weights[script]/total_weight
return res
def get_current_scaled_txouts(scale, trunc=0):
txouts = node.get_current_txouts()
total = sum(txouts.itervalues())
results = dict((script, value*scale//total) for script, value in txouts.iteritems())
if trunc > 0:
total_random = 0
random_set = set()
for s in sorted(results, key=results.__getitem__):
if results[s] >= trunc:
break
total_random += results[s]
random_set.add(s)
if total_random:
winner = math.weighted_choice((script, results[script]) for script in random_set)
for script in random_set:
del results[script]
results[winner] = total_random
if sum(results.itervalues()) < int(scale):
results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
return results
def get_patron_sendmany(total=None, trunc='0.01'):
if total is None:
return 'need total argument. go to patron_sendmany/<TOTAL>'
total = int(float(total)*1e8)
trunc = int(float(trunc)*1e8)
return json.dumps(dict(
(bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e8)
for script, value in get_current_scaled_txouts(total, trunc).iteritems()
if bitcoin_data.script2_to_address(script, node.net.PARENT) is not None
))
def get_global_stats():
# averaged over last hour
if node.tracker.get_height(node.best_share_var.value) < 10:
return None
lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD)
nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind)
stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
diff = bitcoin_data.target_to_difficulty(wb.current_work.value['bits'].target)
return dict(
pool_nonstale_hash_rate=nonstale_hash_rate,
pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
pool_stale_prop=stale_prop,
min_difficulty=bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target),
network_block_difficulty=diff,
network_hashrate=(diff * 2**32 // node.net.PARENT.BLOCK_PERIOD),
)
def get_local_stats():
if node.tracker.get_height(node.best_share_var.value) < 10:
return None
lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD)
global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
my_unstale_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes)
my_orphan_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'orphan')
my_doa_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'doa')
my_share_count = my_unstale_count + my_orphan_count + my_doa_count
my_stale_count = my_orphan_count + my_doa_count
my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
for share in node.tracker.get_chain(node.best_share_var.value, lookbehind - 1)
if share.hash in wb.my_share_hashes)
actual_time = (node.tracker.items[node.best_share_var.value].timestamp -
node.tracker.items[node.tracker.get_nth_parent_hash(node.best_share_var.value, lookbehind - 1)].timestamp)
share_att_s = my_work / actual_time
miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
(stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
miner_last_difficulties = {}
for addr in wb.last_work_shares.value:
miner_last_difficulties[addr] = bitcoin_data.target_to_difficulty(wb.last_work_shares.value[addr].target)
return dict(
my_hash_rates_in_last_hour=dict(
note="DEPRECATED",
nonstale=share_att_s,
rewarded=share_att_s/(1 - global_stale_prop),
actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
),
my_share_counts_in_last_hour=dict(
shares=my_share_count,
unstale_shares=my_unstale_count,
stale_shares=my_stale_count,
orphan_stale_shares=my_orphan_count,
doa_stale_shares=my_doa_count,
),
my_stale_proportions_in_last_hour=dict(
stale=my_stale_prop,
orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
),
miner_hash_rates=miner_hash_rates,
miner_dead_hash_rates=miner_dead_hash_rates,
miner_last_difficulties=miner_last_difficulties,
efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection
efficiency=(1 - (stale_orphan_shares+stale_doa_shares)/shares)/(1 - global_stale_prop) if shares else None,
peers=dict(
incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming),
),
shares=dict(
total=shares,
orphan=stale_orphan_shares,
dead=stale_doa_shares,
),
uptime=time.time() - start_time,
attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target),
attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target),
block_value=node.bitcoind_work.value['subsidy']*1e-8,
warnings=p2pool_data.get_warnings(node.tracker, node.best_share_var.value, node.net, bitcoind_getinfo_var.value, node.bitcoind_work.value),
donation_proportion=wb.donation_percentage/100,
version=p2pool.__version__,
protocol_version=p2p.Protocol.VERSION,
fee=wb.worker_fee,
)
class WebInterface(deferred_resource.DeferredResource):
def __init__(self, func, mime_type='application/json', args=()):
deferred_resource.DeferredResource.__init__(self)
self.func, self.mime_type, self.args = func, mime_type, args
def getChild(self, child, request):
return WebInterface(self.func, self.mime_type, self.args + (child,))
@defer.inlineCallbacks
def render_GET(self, request):
request.setHeader('Content-Type', self.mime_type)
request.setHeader('Access-Control-Allow-Origin', '*')
res = yield self.func(*self.args)
defer.returnValue(json.dumps(res) if self.mime_type == 'application/json' else res)
def decent_height():
return min(node.tracker.get_height(node.best_share_var.value), 720)
web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, decent_height())/(1-p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, decent_height()))))
web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target)))
web_root.putChild('users', WebInterface(get_users))
web_root.putChild('user_stales', WebInterface(lambda: dict((bitcoin_data.pubkey_hash_to_address(ph, node.net.PARENT), prop) for ph, prop in
p2pool_data.get_user_stale_props(node.tracker, node.best_share_var.value, node.tracker.get_height(node.best_share_var.value)).iteritems())))
web_root.putChild('fee', WebInterface(lambda: wb.worker_fee))
web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e8) for script, value in node.get_current_txouts().iteritems())))
web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain'))
web_root.putChild('global_stats', WebInterface(get_global_stats))
web_root.putChild('local_stats', WebInterface(get_local_stats))
web_root.putChild('peer_addresses', WebInterface(lambda: ' '.join('%s%s' % (peer.transport.getPeer().host, ':'+str(peer.transport.getPeer().port) if peer.transport.getPeer().port != node.net.P2P_PORT else '') for peer in node.p2p_node.peers.itervalues())))
web_root.putChild('peer_txpool_sizes', WebInterface(lambda: dict(('%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port), peer.remembered_txs_size) for peer in node.p2p_node.peers.itervalues())))
web_root.putChild('pings', WebInterface(defer.inlineCallbacks(lambda: defer.returnValue(
dict([(a, (yield b)) for a, b in
[(
'%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port),
defer.inlineCallbacks(lambda peer=peer: defer.returnValue(
min([(yield peer.do_ping().addCallback(lambda x: x/0.001).addErrback(lambda fail: None)) for i in xrange(3)])
))()
) for peer in list(node.p2p_node.peers.itervalues())]
])
))))
web_root.putChild('peer_versions', WebInterface(lambda: dict(('%s:%i' % peer.addr, peer.other_sub_version) for peer in node.p2p_node.peers.itervalues())))
web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(wb.my_pubkey_hash, node.net.PARENT)))
web_root.putChild('payout_addrs', WebInterface(lambda: list(('%s' % bitcoin_data.pubkey_hash_to_address(add, node.net.PARENT)) for add in wb.pubkeys.keys)))
web_root.putChild('recent_blocks', WebInterface(lambda: [dict(
ts=s.timestamp,
hash='%064x' % s.header_hash,
number=p2pool_data.parse_bip0034(s.share_data['coinbase'])[0],
share='%064x' % s.hash,
) for s in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 24*60*60//node.net.SHARE_PERIOD)) if s.pow_hash <= s.header['bits'].target]))
web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time))
web_root.putChild('stale_rates', WebInterface(lambda: p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, decent_height(), rates=True)))
new_root = resource.Resource()
web_root.putChild('web', new_root)
stat_log = []
if os.path.exists(os.path.join(datadir_path, 'stats')):
try:
with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
stat_log = json.loads(f.read())
except:
log.err(None, 'Error loading stats:')
def update_stat_log():
while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
stat_log.pop(0)
lookbehind = 3600//node.net.SHARE_PERIOD
if node.tracker.get_height(node.best_share_var.value) < lookbehind:
return None
global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
(stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
my_current_payout=0.0
for add in wb.pubkeys.keys:
my_current_payout+=node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(add), 0)*1e-8
stat_log.append(dict(
time=time.time(),
pool_hash_rate=p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind)/(1-global_stale_prop),
pool_stale_prop=global_stale_prop,
local_hash_rates=miner_hash_rates,
local_dead_hash_rates=miner_dead_hash_rates,
shares=shares,
stale_shares=stale_orphan_shares + stale_doa_shares,
stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
current_payout=my_current_payout,
peers=dict(
incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming),
),
attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target),
attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target),
block_value=node.bitcoind_work.value['subsidy']*1e-8,
))
with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
f.write(json.dumps(stat_log))
x = deferral.RobustLoopingCall(update_stat_log)
x.start(5*60)
stop_event.watch(x.stop)
new_root.putChild('log', WebInterface(lambda: stat_log))
def get_share(share_hash_str):
if int(share_hash_str, 16) not in node.tracker.items:
return None
share = node.tracker.items[int(share_hash_str, 16)]
return dict(
parent='%064x' % share.previous_hash,
far_parent='%064x' % share.share_info['far_share_hash'],
children=['%064x' % x for x in sorted(node.tracker.reverse.get(share.hash, set()), key=lambda sh: -len(node.tracker.reverse.get(sh, set())))], # sorted from most children to least children
type_name=type(share).__name__,
local=dict(
verified=share.hash in node.tracker.verified.items,
time_first_seen=start_time if share.time_seen == 0 else share.time_seen,
peer_first_received_from=share.peer_addr,
),
share_data=dict(
timestamp=share.timestamp,
target=share.target,
max_target=share.max_target,
payout_address=bitcoin_data.script2_to_address(share.new_script, node.net.PARENT),
donation=share.share_data['donation']/65535,
stale_info=share.share_data['stale_info'],
nonce=share.share_data['nonce'],
desired_version=share.share_data['desired_version'],
absheight=share.absheight,
abswork=share.abswork,
),
block=dict(
hash='%064x' % share.header_hash,
header=dict(
version=share.header['version'],
previous_block='%064x' % share.header['previous_block'],
merkle_root='%064x' % share.header['merkle_root'],
timestamp=share.header['timestamp'],
target=share.header['bits'].target,
nonce=share.header['nonce'],
),
gentx=dict(
hash='%064x' % share.gentx_hash,
coinbase=share.share_data['coinbase'].ljust(2, '\x00').encode('hex'),
value=share.share_data['subsidy']*1e-8,
last_txout_nonce='%016x' % share.contents['last_txout_nonce'],
),
other_transaction_hashes=['%064x' % x for x in share.get_other_tx_hashes(node.tracker)],
),
)
def get_share_address(share_hash_str):
if int(share_hash_str, 16) not in node.tracker.items:
return None
share = node.tracker.items[int(share_hash_str, 16)]
return bitcoin_data.script2_to_address(share.new_script, node.net.PARENT)
new_root.putChild('payout_address', WebInterface(lambda share_hash_str: get_share_address(share_hash_str)))
new_root.putChild('share', WebInterface(lambda share_hash_str: get_share(share_hash_str)))
new_root.putChild('heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.heads]))
new_root.putChild('verified_heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.verified.heads]))
new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.tails for x in node.tracker.reverse.get(t, set())]))
new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.verified.tails for x in node.tracker.verified.reverse.get(t, set())]))
new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % node.best_share_var.value))
new_root.putChild('my_share_hashes', WebInterface(lambda: ['%064x' % my_share_hash for my_share_hash in wb.my_share_hashes]))
def get_share_data(share_hash_str):
if int(share_hash_str, 16) not in node.tracker.items:
return ''
share = node.tracker.items[int(share_hash_str, 16)]
return p2pool_data.share_type.pack(share.as_share())
new_root.putChild('share_data', WebInterface(lambda share_hash_str: get_share_data(share_hash_str), 'application/octet-stream'))
new_root.putChild('currency_info', WebInterface(lambda: dict(
symbol=node.net.PARENT.SYMBOL,
block_explorer_url_prefix=node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
address_explorer_url_prefix=node.net.PARENT.ADDRESS_EXPLORER_URL_PREFIX,
tx_explorer_url_prefix=node.net.PARENT.TX_EXPLORER_URL_PREFIX,
)))
new_root.putChild('version', WebInterface(lambda: p2pool.__version__))
hd_path = os.path.join(datadir_path, 'graph_db')
hd_data = _atomic_read(hd_path)
hd_obj = {}
if hd_data is not None:
try:
hd_obj = json.loads(hd_data)
except Exception:
log.err(None, 'Error reading graph database:')
dataview_descriptions = {
'last_hour': graph.DataViewDescription(150, 60*60),
'last_day': graph.DataViewDescription(300, 60*60*24),
'last_week': graph.DataViewDescription(300, 60*60*24*7),
'last_month': graph.DataViewDescription(300, 60*60*24*30),
'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
}
hd = graph.HistoryDatabase.from_obj({
'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
'local_share_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False,
multivalues=True, multivalue_undefined_means_0=True,
default_func=graph.make_multivalue_migrator(dict(good='local_share_hash_rate', dead='local_dead_share_hash_rate', orphan='local_orphan_share_hash_rate'),
post_func=lambda bins: [dict((k, (v[0] - (sum(bin.get(rem_k, (0, 0))[0] for rem_k in ['dead', 'orphan']) if k == 'good' else 0), v[1])) for k, v in bin.iteritems()) for bin in bins])),
'pool_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
multivalue_undefined_means_0=True),
'current_payout': graph.DataStreamDescription(dataview_descriptions),
'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True),
'peers': graph.DataStreamDescription(dataview_descriptions, multivalues=True, default_func=graph.make_multivalue_migrator(dict(incoming='incoming_peers', outgoing='outgoing_peers'))),
'miner_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
'miner_dead_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
'desired_version_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
multivalue_undefined_means_0=True),
'traffic_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
'getwork_latency': graph.DataStreamDescription(dataview_descriptions),
'memory_usage': graph.DataStreamDescription(dataview_descriptions),
}, hd_obj)
x = deferral.RobustLoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj())))
x.start(100)
stop_event.watch(x.stop)
@wb.pseudoshare_received.watch
def _(work, dead, user):
t = time.time()
hd.datastreams['local_hash_rate'].add_datum(t, work)
if dead:
hd.datastreams['local_dead_hash_rate'].add_datum(t, work)
if user is not None:
hd.datastreams['miner_hash_rates'].add_datum(t, {user: work})
if dead:
hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work})
@wb.share_received.watch
def _(work, dead, share_hash):
t = time.time()
if not dead:
hd.datastreams['local_share_hash_rates'].add_datum(t, dict(good=work))
else:
hd.datastreams['local_share_hash_rates'].add_datum(t, dict(dead=work))
def later():
res = node.tracker.is_child_of(share_hash, node.best_share_var.value)
if res is None: res = False # share isn't connected to sharechain? assume orphaned
if res and dead: # share was DOA, but is now in sharechain
# move from dead to good
hd.datastreams['local_share_hash_rates'].add_datum(t, dict(dead=-work, good=work))
elif not res and not dead: # share wasn't DOA, and isn't in sharechain
# move from good to orphan
hd.datastreams['local_share_hash_rates'].add_datum(t, dict(good=-work, orphan=work))
reactor.callLater(200, later)
@node.p2p_node.traffic_happened.watch
def _(name, bytes):
hd.datastreams['traffic_rate'].add_datum(time.time(), {name: bytes})
def add_point():
if node.tracker.get_height(node.best_share_var.value) < 10:
return None
lookbehind = min(node.net.CHAIN_LENGTH, 60*60//node.net.SHARE_PERIOD, node.tracker.get_height(node.best_share_var.value))
t = time.time()
pool_rates = p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, lookbehind, rates=True)
pool_total = sum(pool_rates.itervalues())
hd.datastreams['pool_rates'].add_datum(t, pool_rates)
current_txouts = node.get_current_txouts()
my_current_payouts = 0.0
for add in wb.pubkeys.keys:
my_current_payouts += current_txouts.get(bitcoin_data.pubkey_hash_to_script2(add), 0)*1e-8
hd.datastreams['current_payout'].add_datum(t, my_current_payouts)
miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
current_txouts_by_address = dict((bitcoin_data.script2_to_address(script, node.net.PARENT), amount) for script, amount in current_txouts.iteritems())
hd.datastreams['current_payouts'].add_datum(t, dict((user, current_txouts_by_address[user]*1e-8) for user in miner_hash_rates if user in current_txouts_by_address))
hd.datastreams['peers'].add_datum(t, dict(
incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming),
))
vs = p2pool_data.get_desired_version_counts(node.tracker, node.best_share_var.value, lookbehind)
vs_total = sum(vs.itervalues())
hd.datastreams['desired_version_rates'].add_datum(t, dict((str(k), v/vs_total*pool_total) for k, v in vs.iteritems()))
try:
hd.datastreams['memory_usage'].add_datum(t, memory.resident())
except:
if p2pool.DEBUG:
traceback.print_exc()
x = deferral.RobustLoopingCall(add_point)
x.start(5)
stop_event.watch(x.stop)
@node.bitcoind_work.changed.watch
def _(new_work):
hd.datastreams['getwork_latency'].add_datum(time.time(), new_work['latency'])
new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time())))
if static_dir is None:
static_dir = os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), 'web-static')
web_root.putChild('static', static.File(static_dir))
return web_root
| 26,699
|
Python
|
.py
| 433
| 50.796767
| 260
| 0.650054
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,493
|
data.py
|
p2pool_p2pool/p2pool/data.py
|
from __future__ import division
import hashlib
import os
import random
import sys
import time
from twisted.python import log
import p2pool
from p2pool.bitcoin import data as bitcoin_data, script, sha256
from p2pool.util import math, forest, pack
def parse_bip0034(coinbase):
_, opdata = script.parse(coinbase).next()
bignum = pack.IntType(len(opdata)*8).unpack(opdata)
if ord(opdata[-1]) & 0x80:
bignum = -bignum
return (bignum,)
# hashlink
hash_link_type = pack.ComposedType([
('state', pack.FixedStrType(32)),
('extra_data', pack.FixedStrType(0)), # bit of a hack, but since the donation script is at the end, const_ending is long enough to always make this empty
('length', pack.VarIntType()),
])
def prefix_to_hash_link(prefix, const_ending=''):
assert prefix.endswith(const_ending), (prefix, const_ending)
x = sha256.sha256(prefix)
return dict(state=x.state, extra_data=x.buf[:max(0, len(x.buf)-len(const_ending))], length=x.length//8)
def check_hash_link(hash_link, data, const_ending=''):
extra_length = hash_link['length'] % (512//8)
assert len(hash_link['extra_data']) == max(0, extra_length - len(const_ending))
extra = (hash_link['extra_data'] + const_ending)[len(hash_link['extra_data']) + len(const_ending) - extra_length:]
assert len(extra) == extra_length
return pack.IntType(256).unpack(hashlib.sha256(sha256.sha256(data, (hash_link['state'], extra, 8*hash_link['length'])).digest()).digest())
# shares
share_type = pack.ComposedType([
('type', pack.VarIntType()),
('contents', pack.VarStrType()),
])
def load_share(share, net, peer_addr):
assert peer_addr is None or isinstance(peer_addr, tuple)
if share['type'] < Share.VERSION:
from p2pool import p2p
raise p2p.PeerMisbehavingError('sent an obsolete share')
elif share['type'] == Share.VERSION:
return Share(net, peer_addr, Share.get_dynamic_types(net)['share_type'].unpack(share['contents']))
elif share['type'] == NewShare.VERSION:
return NewShare(net, peer_addr, NewShare.get_dynamic_types(net)['share_type'].unpack(share['contents']))
else:
raise ValueError('unknown share type: %r' % (share['type'],))
def is_segwit_activated(version, net):
assert not(version is None or net is None)
segwit_activation_version = getattr(net, 'SEGWIT_ACTIVATION_VERSION', 0)
return version >= segwit_activation_version and segwit_activation_version > 0
DONATION_SCRIPT = '4104ffd03de44a6e11b9917f3a29f9443283d9871c9d743ef30d5eddcd37094b64d1b3d8090496b53256786bf5c82932ec23c3b74d9f05a6f95a8b5529352656664bac'.decode('hex')
class BaseShare(object):
VERSION = 0
VOTING_VERSION = 0
SUCCESSOR = None
MAX_BLOCK_WEIGHT = 4000000
MAX_NEW_TXS_SIZE = 50000
small_block_header_type = pack.ComposedType([
('version', pack.VarIntType()),
('previous_block', pack.PossiblyNoneType(0, pack.IntType(256))),
('timestamp', pack.IntType(32)),
('bits', bitcoin_data.FloatingIntegerType()),
('nonce', pack.IntType(32)),
])
share_info_type = None
share_type = None
ref_type = None
gentx_before_refhash = pack.VarStrType().pack(DONATION_SCRIPT) + pack.IntType(64).pack(0) + pack.VarStrType().pack('\x6a\x28' + pack.IntType(256).pack(0) + pack.IntType(64).pack(0))[:3]
@classmethod
def get_dynamic_types(cls, net):
t = dict(share_info_type=None, share_type=None, ref_type=None)
segwit_data = ('segwit_data', pack.PossiblyNoneType(dict(txid_merkle_link=dict(branch=[], index=0), wtxid_merkle_root=2**256-1), pack.ComposedType([
('txid_merkle_link', pack.ComposedType([
('branch', pack.ListType(pack.IntType(256))),
('index', pack.IntType(0)), # it will always be 0
])),
('wtxid_merkle_root', pack.IntType(256))
])))
t['share_info_type'] = pack.ComposedType([
('share_data', pack.ComposedType([
('previous_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
('coinbase', pack.VarStrType()),
('nonce', pack.IntType(32)),
('pubkey_hash', pack.IntType(160)),
('subsidy', pack.IntType(64)),
('donation', pack.IntType(16)),
('stale_info', pack.EnumType(pack.IntType(8), dict((k, {0: None, 253: 'orphan', 254: 'doa'}.get(k, 'unk%i' % (k,))) for k in xrange(256)))),
('desired_version', pack.VarIntType()),
]))] + ([segwit_data] if is_segwit_activated(cls.VERSION, net) else []) + [
('new_transaction_hashes', pack.ListType(pack.IntType(256))),
('transaction_hash_refs', pack.ListType(pack.VarIntType(), 2)), # pairs of share_count, tx_count
('far_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
('max_bits', bitcoin_data.FloatingIntegerType()),
('bits', bitcoin_data.FloatingIntegerType()),
('timestamp', pack.IntType(32)),
('absheight', pack.IntType(32)),
('abswork', pack.IntType(128)),
])
t['share_type'] = pack.ComposedType([
('min_header', cls.small_block_header_type),
('share_info', t['share_info_type']),
('ref_merkle_link', pack.ComposedType([
('branch', pack.ListType(pack.IntType(256))),
('index', pack.IntType(0)),
])),
('last_txout_nonce', pack.IntType(64)),
('hash_link', hash_link_type),
('merkle_link', pack.ComposedType([
('branch', pack.ListType(pack.IntType(256))),
('index', pack.IntType(0)), # it will always be 0
])),
])
t['ref_type'] = pack.ComposedType([
('identifier', pack.FixedStrType(64//8)),
('share_info', t['share_info_type']),
])
return t
@classmethod
def generate_transaction(cls, tracker, share_data, block_target, desired_timestamp, desired_target, ref_merkle_link, desired_other_transaction_hashes_and_fees, net, known_txs=None, last_txout_nonce=0, base_subsidy=None, segwit_data=None):
previous_share = tracker.items[share_data['previous_share_hash']] if share_data['previous_share_hash'] is not None else None
height, last = tracker.get_height_and_last(share_data['previous_share_hash'])
assert height >= net.REAL_CHAIN_LENGTH or last is None
if height < net.TARGET_LOOKBEHIND:
pre_target3 = net.MAX_TARGET
else:
attempts_per_second = get_pool_attempts_per_second(tracker, share_data['previous_share_hash'], net.TARGET_LOOKBEHIND, min_work=True, integer=True)
pre_target = 2**256//(net.SHARE_PERIOD*attempts_per_second) - 1 if attempts_per_second else 2**256-1
pre_target2 = math.clip(pre_target, (previous_share.max_target*9//10, previous_share.max_target*11//10))
pre_target3 = math.clip(pre_target2, (net.MIN_TARGET, net.MAX_TARGET))
max_bits = bitcoin_data.FloatingInteger.from_target_upper_bound(pre_target3)
bits = bitcoin_data.FloatingInteger.from_target_upper_bound(math.clip(desired_target, (pre_target3//30, pre_target3)))
new_transaction_hashes = []
new_transaction_size = 0
transaction_hash_refs = []
other_transaction_hashes = []
past_shares = list(tracker.get_chain(share_data['previous_share_hash'], min(height, 100)))
tx_hash_to_this = {}
for i, share in enumerate(past_shares):
for j, tx_hash in enumerate(share.new_transaction_hashes):
if tx_hash not in tx_hash_to_this:
tx_hash_to_this[tx_hash] = [1+i, j] # share_count, tx_count
for tx_hash, fee in desired_other_transaction_hashes_and_fees:
if tx_hash in tx_hash_to_this:
this = tx_hash_to_this[tx_hash]
else:
if known_txs is not None:
this_size = bitcoin_data.tx_type.packed_size(known_txs[tx_hash])
if new_transaction_size + this_size > cls.MAX_NEW_TXS_SIZE: # limit the size of new txns/share
break
new_transaction_size += this_size
new_transaction_hashes.append(tx_hash)
this = [0, len(new_transaction_hashes)-1]
transaction_hash_refs.extend(this)
other_transaction_hashes.append(tx_hash)
included_transactions = set(other_transaction_hashes)
removed_fees = [fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash not in included_transactions]
definite_fees = sum(0 if fee is None else fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash in included_transactions)
if None not in removed_fees:
share_data = dict(share_data, subsidy=share_data['subsidy'] - sum(removed_fees))
else:
assert base_subsidy is not None
share_data = dict(share_data, subsidy=base_subsidy + definite_fees)
weights, total_weight, donation_weight = tracker.get_cumulative_weights(previous_share.share_data['previous_share_hash'] if previous_share is not None else None,
max(0, min(height, net.REAL_CHAIN_LENGTH) - 1),
65535*net.SPREAD*bitcoin_data.target_to_average_attempts(block_target),
)
assert total_weight == sum(weights.itervalues()) + donation_weight, (total_weight, sum(weights.itervalues()) + donation_weight)
amounts = dict((script, share_data['subsidy']*(199*weight)//(200*total_weight)) for script, weight in weights.iteritems()) # 99.5% goes according to weights prior to this share
this_script = bitcoin_data.pubkey_hash_to_script2(share_data['pubkey_hash'])
amounts[this_script] = amounts.get(this_script, 0) + share_data['subsidy']//200 # 0.5% goes to block finder
amounts[DONATION_SCRIPT] = amounts.get(DONATION_SCRIPT, 0) + share_data['subsidy'] - sum(amounts.itervalues()) # all that's left over is the donation weight and some extra satoshis due to rounding
if sum(amounts.itervalues()) != share_data['subsidy'] or any(x < 0 for x in amounts.itervalues()):
raise ValueError()
dests = sorted(amounts.iterkeys(), key=lambda script: (script == DONATION_SCRIPT, amounts[script], script))[-4000:] # block length limit, unlikely to ever be hit
segwit_activated = is_segwit_activated(cls.VERSION, net)
if segwit_data is None and known_txs is None:
segwit_activated = False
if not(segwit_activated or known_txs is None) and any(bitcoin_data.is_segwit_tx(known_txs[h]) for h in other_transaction_hashes):
raise ValueError('segwit transaction included before activation')
if segwit_activated and known_txs is not None:
share_txs = [(known_txs[h], bitcoin_data.get_txid(known_txs[h]), h) for h in other_transaction_hashes]
segwit_data = dict(txid_merkle_link=bitcoin_data.calculate_merkle_link([None] + [tx[1] for tx in share_txs], 0), wtxid_merkle_root=bitcoin_data.merkle_hash([0] + [bitcoin_data.get_wtxid(tx[0], tx[1], tx[2]) for tx in share_txs]))
if segwit_activated and segwit_data is not None:
witness_reserved_value_str = '[P2Pool]'*4
witness_reserved_value = pack.IntType(256).unpack(witness_reserved_value_str)
witness_commitment_hash = bitcoin_data.get_witness_commitment_hash(segwit_data['wtxid_merkle_root'], witness_reserved_value)
share_info = dict(
share_data=share_data,
far_share_hash=None if last is None and height < 99 else tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99),
max_bits=max_bits,
bits=bits,
timestamp=math.clip(desired_timestamp, (
(previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1
(previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1),
)) if previous_share is not None else desired_timestamp,
new_transaction_hashes=new_transaction_hashes,
transaction_hash_refs=transaction_hash_refs,
absheight=((previous_share.absheight if previous_share is not None else 0) + 1) % 2**32,
abswork=((previous_share.abswork if previous_share is not None else 0) + bitcoin_data.target_to_average_attempts(bits.target)) % 2**128,
)
if segwit_activated:
share_info['segwit_data'] = segwit_data
gentx = dict(
version=1,
tx_ins=[dict(
previous_output=None,
sequence=None,
script=share_data['coinbase'],
)],
tx_outs=([dict(value=0, script='\x6a\x24\xaa\x21\xa9\xed' + pack.IntType(256).pack(witness_commitment_hash))] if segwit_activated else []) +
[dict(value=amounts[script], script=script) for script in dests if amounts[script] or script == DONATION_SCRIPT] +
[dict(value=0, script='\x6a\x28' + cls.get_ref_hash(net, share_info, ref_merkle_link) + pack.IntType(64).pack(last_txout_nonce))],
lock_time=0,
)
if segwit_activated:
gentx['marker'] = 0
gentx['flag'] = 1
gentx['witness'] = [[witness_reserved_value_str]]
def get_share(header, last_txout_nonce=last_txout_nonce):
min_header = dict(header); del min_header['merkle_root']
share = cls(net, None, dict(
min_header=min_header,
share_info=share_info,
ref_merkle_link=dict(branch=[], index=0),
last_txout_nonce=last_txout_nonce,
hash_link=prefix_to_hash_link(bitcoin_data.tx_id_type.pack(gentx)[:-32-8-4], cls.gentx_before_refhash),
merkle_link=bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0),
))
assert share.header == header # checks merkle_root
return share
return share_info, gentx, other_transaction_hashes, get_share
@classmethod
def get_ref_hash(cls, net, share_info, ref_merkle_link):
return pack.IntType(256).pack(bitcoin_data.check_merkle_link(bitcoin_data.hash256(cls.get_dynamic_types(net)['ref_type'].pack(dict(
identifier=net.IDENTIFIER,
share_info=share_info,
))), ref_merkle_link))
__slots__ = 'net peer_addr contents min_header share_info hash_link merkle_link hash share_data max_target target timestamp previous_hash new_script desired_version gentx_hash header pow_hash header_hash new_transaction_hashes time_seen absheight abswork'.split(' ')
def __init__(self, net, peer_addr, contents):
dynamic_types = self.get_dynamic_types(net)
self.share_info_type = dynamic_types['share_info_type']
self.share_type = dynamic_types['share_type']
self.ref_type = dynamic_types['ref_type']
self.net = net
self.peer_addr = peer_addr
self.contents = contents
self.min_header = contents['min_header']
self.share_info = contents['share_info']
self.hash_link = contents['hash_link']
self.merkle_link = contents['merkle_link']
segwit_activated = is_segwit_activated(self.VERSION, net)
if not (2 <= len(self.share_info['share_data']['coinbase']) <= 100):
raise ValueError('''bad coinbase size! %i bytes''' % (len(self.share_info['share_data']['coinbase']),))
if len(self.merkle_link['branch']) > 16 or (segwit_activated and len(self.share_info['segwit_data']['txid_merkle_link']['branch']) > 16):
raise ValueError('merkle branch too long!')
assert not self.hash_link['extra_data'], repr(self.hash_link['extra_data'])
self.share_data = self.share_info['share_data']
self.max_target = self.share_info['max_bits'].target
self.target = self.share_info['bits'].target
self.timestamp = self.share_info['timestamp']
self.previous_hash = self.share_data['previous_share_hash']
self.new_script = bitcoin_data.pubkey_hash_to_script2(self.share_data['pubkey_hash'])
self.desired_version = self.share_data['desired_version']
self.absheight = self.share_info['absheight']
self.abswork = self.share_info['abswork']
n = set()
for share_count, tx_count in self.iter_transaction_hash_refs():
assert share_count < 110
if share_count == 0:
n.add(tx_count)
assert n == set(range(len(self.share_info['new_transaction_hashes'])))
self.gentx_hash = check_hash_link(
self.hash_link,
self.get_ref_hash(net, self.share_info, contents['ref_merkle_link']) + pack.IntType(64).pack(self.contents['last_txout_nonce']) + pack.IntType(32).pack(0),
self.gentx_before_refhash,
)
merkle_root = bitcoin_data.check_merkle_link(self.gentx_hash, self.share_info['segwit_data']['txid_merkle_link'] if segwit_activated else self.merkle_link)
self.header = dict(self.min_header, merkle_root=merkle_root)
self.pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(self.header))
self.hash = self.header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(self.header))
if self.target > net.MAX_TARGET:
from p2pool import p2p
raise p2p.PeerMisbehavingError('share target invalid')
if self.pow_hash > self.target:
from p2pool import p2p
raise p2p.PeerMisbehavingError('share PoW invalid')
self.new_transaction_hashes = self.share_info['new_transaction_hashes']
# XXX eww
self.time_seen = time.time()
def __repr__(self):
return 'Share' + repr((self.net, self.peer_addr, self.contents))
def as_share(self):
return dict(type=self.VERSION, contents=self.share_type.pack(self.contents))
def iter_transaction_hash_refs(self):
return zip(self.share_info['transaction_hash_refs'][::2], self.share_info['transaction_hash_refs'][1::2])
def check(self, tracker, other_txs=None):
from p2pool import p2p
counts = None
if self.share_data['previous_share_hash'] is not None:
previous_share = tracker.items[self.share_data['previous_share_hash']]
if tracker.get_height(self.share_data['previous_share_hash']) >= self.net.CHAIN_LENGTH:
counts = get_desired_version_counts(tracker, tracker.get_nth_parent_hash(previous_share.hash, self.net.CHAIN_LENGTH*9//10), self.net.CHAIN_LENGTH//10)
if type(self) is type(previous_share):
pass
elif type(self) is type(previous_share).SUCCESSOR:
# switch only valid if 60% of hashes in [self.net.CHAIN_LENGTH*9//10, self.net.CHAIN_LENGTH] for new version
if counts.get(self.VERSION, 0) < sum(counts.itervalues())*60//100:
raise p2p.PeerMisbehavingError('switch without enough hash power upgraded')
else:
raise p2p.PeerMisbehavingError('''%s can't follow %s''' % (type(self).__name__, type(previous_share).__name__))
elif type(self) is type(previous_share).SUCCESSOR:
raise p2p.PeerMisbehavingError('switch without enough history')
other_tx_hashes = [tracker.items[tracker.get_nth_parent_hash(self.hash, share_count)].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs()]
if other_txs is not None and not isinstance(other_txs, dict): other_txs = dict((bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)), tx) for tx in other_txs)
share_info, gentx, other_tx_hashes2, get_share = self.generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.contents['ref_merkle_link'], [(h, None) for h in other_tx_hashes], self.net,
known_txs=other_txs, last_txout_nonce=self.contents['last_txout_nonce'], segwit_data=self.share_info.get('segwit_data', None))
assert other_tx_hashes2 == other_tx_hashes
if share_info != self.share_info:
raise ValueError('share_info invalid')
if bitcoin_data.get_txid(gentx) != self.gentx_hash:
raise ValueError('''gentx doesn't match hash_link''')
if bitcoin_data.calculate_merkle_link([None] + other_tx_hashes, 0) != self.merkle_link: # the other hash commitments are checked in the share_info assertion
raise ValueError('merkle_link and other_tx_hashes do not match')
update_min_protocol_version(counts, self)
return gentx # only used by as_block
def get_other_tx_hashes(self, tracker):
parents_needed = max(share_count for share_count, tx_count in self.iter_transaction_hash_refs()) if self.share_info['transaction_hash_refs'] else 0
parents = tracker.get_height(self.hash) - 1
if parents < parents_needed:
return None
last_shares = list(tracker.get_chain(self.hash, parents_needed + 1))
return [last_shares[share_count].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs()]
def _get_other_txs(self, tracker, known_txs):
other_tx_hashes = self.get_other_tx_hashes(tracker)
if other_tx_hashes is None:
return None # not all parents present
if not all(tx_hash in known_txs for tx_hash in other_tx_hashes):
return None # not all txs present
return [known_txs[tx_hash] for tx_hash in other_tx_hashes]
def should_punish_reason(self, previous_block, bits, tracker, known_txs):
if (self.header['previous_block'], self.header['bits']) != (previous_block, bits) and self.header_hash != previous_block and self.peer_addr is not None:
return True, 'Block-stale detected! height(%x) < height(%x) or %08x != %08x' % (self.header['previous_block'], previous_block, self.header['bits'].bits, bits.bits)
if self.pow_hash <= self.header['bits'].target:
return -1, 'block solution'
other_txs = self._get_other_txs(tracker, known_txs)
if other_txs is None:
pass
else:
all_txs_size = sum(bitcoin_data.tx_type.packed_size(tx) for tx in other_txs)
stripped_txs_size = sum(bitcoin_data.tx_id_type.packed_size(tx) for tx in other_txs)
if all_txs_size + 3 * stripped_txs_size > self.MAX_BLOCK_WEIGHT:
return True, 'txs over block size limit'
new_txs_size = sum(bitcoin_data.tx_type.packed_size(known_txs[tx_hash]) for tx_hash in self.share_info['new_transaction_hashes'])
if new_txs_size > self.MAX_NEW_TXS_SIZE:
return True, 'new txs over limit'
return False, None
def as_block(self, tracker, known_txs):
other_txs = self._get_other_txs(tracker, known_txs)
if other_txs is None:
return None # not all txs present
return dict(header=self.header, txs=[self.check(tracker, other_txs)] + other_txs)
class NewShare(BaseShare):
VERSION = 17
VOTING_VERSION = 17
SUCCESSOR = None
MAX_NEW_TXS_SIZE = 100000
class Share(BaseShare):
VERSION = 16
VOTING_VERSION = 16
SUCCESSOR = NewShare
class WeightsSkipList(forest.TrackerSkipList):
# share_count, weights, total_weight
def get_delta(self, element):
from p2pool.bitcoin import data as bitcoin_data
share = self.tracker.items[element]
att = bitcoin_data.target_to_average_attempts(share.target)
return 1, {share.new_script: att*(65535-share.share_data['donation'])}, att*65535, att*share.share_data['donation']
def combine_deltas(self, (share_count1, weights1, total_weight1, total_donation_weight1), (share_count2, weights2, total_weight2, total_donation_weight2)):
return share_count1 + share_count2, math.add_dicts(weights1, weights2), total_weight1 + total_weight2, total_donation_weight1 + total_donation_weight2
def initial_solution(self, start, (max_shares, desired_weight)):
assert desired_weight % 65535 == 0, divmod(desired_weight, 65535)
return 0, None, 0, 0
def apply_delta(self, (share_count1, weights_list, total_weight1, total_donation_weight1), (share_count2, weights2, total_weight2, total_donation_weight2), (max_shares, desired_weight)):
if total_weight1 + total_weight2 > desired_weight and share_count2 == 1:
assert (desired_weight - total_weight1) % 65535 == 0
script, = weights2.iterkeys()
new_weights = {script: (desired_weight - total_weight1)//65535*weights2[script]//(total_weight2//65535)}
return share_count1 + share_count2, (weights_list, new_weights), desired_weight, total_donation_weight1 + (desired_weight - total_weight1)//65535*total_donation_weight2//(total_weight2//65535)
return share_count1 + share_count2, (weights_list, weights2), total_weight1 + total_weight2, total_donation_weight1 + total_donation_weight2
def judge(self, (share_count, weights_list, total_weight, total_donation_weight), (max_shares, desired_weight)):
if share_count > max_shares or total_weight > desired_weight:
return 1
elif share_count == max_shares or total_weight == desired_weight:
return 0
else:
return -1
def finalize(self, (share_count, weights_list, total_weight, total_donation_weight), (max_shares, desired_weight)):
assert share_count <= max_shares and total_weight <= desired_weight
assert share_count == max_shares or total_weight == desired_weight
return math.add_dicts(*math.flatten_linked_list(weights_list)), total_weight, total_donation_weight
class OkayTracker(forest.Tracker):
def __init__(self, net):
forest.Tracker.__init__(self, delta_type=forest.get_attributedelta_type(dict(forest.AttributeDelta.attrs,
work=lambda share: bitcoin_data.target_to_average_attempts(share.target),
min_work=lambda share: bitcoin_data.target_to_average_attempts(share.max_target),
)))
self.net = net
self.verified = forest.SubsetTracker(delta_type=forest.get_attributedelta_type(dict(forest.AttributeDelta.attrs,
work=lambda share: bitcoin_data.target_to_average_attempts(share.target),
)), subset_of=self)
self.get_cumulative_weights = WeightsSkipList(self)
def attempt_verify(self, share):
if share.hash in self.verified.items:
return True
height, last = self.get_height_and_last(share.hash)
if height < self.net.CHAIN_LENGTH + 1 and last is not None:
raise AssertionError()
try:
share.check(self)
except:
log.err(None, 'Share check failed: %064x -> %064x' % (share.hash, share.previous_hash if share.previous_hash is not None else 0))
return False
else:
self.verified.add(share)
return True
def think(self, block_rel_height_func, previous_block, bits, known_txs):
desired = set()
bad_peer_addresses = set()
# O(len(self.heads))
# make 'unverified heads' set?
# for each overall head, attempt verification
# if it fails, attempt on parent, and repeat
# if no successful verification because of lack of parents, request parent
bads = []
for head in set(self.heads) - set(self.verified.heads):
head_height, last = self.get_height_and_last(head)
for share in self.get_chain(head, head_height if last is None else min(5, max(0, head_height - self.net.CHAIN_LENGTH))):
if self.attempt_verify(share):
break
bads.append(share.hash)
else:
if last is not None:
desired.add((
self.items[random.choice(list(self.reverse[last]))].peer_addr,
last,
max(x.timestamp for x in self.get_chain(head, min(head_height, 5))),
min(x.target for x in self.get_chain(head, min(head_height, 5))),
))
for bad in bads:
assert bad not in self.verified.items
#assert bad in self.heads
bad_share = self.items[bad]
if bad_share.peer_addr is not None:
bad_peer_addresses.add(bad_share.peer_addr)
if p2pool.DEBUG:
print "BAD", bad
try:
self.remove(bad)
except NotImplementedError:
pass
# try to get at least CHAIN_LENGTH height for each verified head, requesting parents if needed
for head in list(self.verified.heads):
head_height, last_hash = self.verified.get_height_and_last(head)
last_height, last_last_hash = self.get_height_and_last(last_hash)
# XXX review boundary conditions
want = max(self.net.CHAIN_LENGTH - head_height, 0)
can = max(last_height - 1 - self.net.CHAIN_LENGTH, 0) if last_last_hash is not None else last_height
get = min(want, can)
#print 'Z', head_height, last_hash is None, last_height, last_last_hash is None, want, can, get
for share in self.get_chain(last_hash, get):
if not self.attempt_verify(share):
break
if head_height < self.net.CHAIN_LENGTH and last_last_hash is not None:
desired.add((
self.items[random.choice(list(self.verified.reverse[last_hash]))].peer_addr,
last_last_hash,
max(x.timestamp for x in self.get_chain(head, min(head_height, 5))),
min(x.target for x in self.get_chain(head, min(head_height, 5))),
))
# decide best tree
decorated_tails = sorted((self.score(max(self.verified.tails[tail_hash], key=self.verified.get_work), block_rel_height_func), tail_hash) for tail_hash in self.verified.tails)
if p2pool.DEBUG:
print len(decorated_tails), 'tails:'
for score, tail_hash in decorated_tails:
print format_hash(tail_hash), score
best_tail_score, best_tail = decorated_tails[-1] if decorated_tails else (None, None)
# decide best verified head
decorated_heads = sorted(((
self.verified.get_work(self.verified.get_nth_parent_hash(h, min(5, self.verified.get_height(h)))),
#self.items[h].peer_addr is None,
-self.items[h].should_punish_reason(previous_block, bits, self, known_txs)[0],
-self.items[h].time_seen,
), h) for h in self.verified.tails.get(best_tail, []))
if p2pool.DEBUG:
print len(decorated_heads), 'heads. Top 10:'
for score, head_hash in decorated_heads[-10:]:
print ' ', format_hash(head_hash), format_hash(self.items[head_hash].previous_hash), score
best_head_score, best = decorated_heads[-1] if decorated_heads else (None, None)
if best is not None:
best_share = self.items[best]
punish, punish_reason = best_share.should_punish_reason(previous_block, bits, self, known_txs)
if punish > 0:
print 'Punishing share for %r! Jumping from %s to %s!' % (punish_reason, format_hash(best), format_hash(best_share.previous_hash))
best = best_share.previous_hash
timestamp_cutoff = min(int(time.time()), best_share.timestamp) - 3600
target_cutoff = int(2**256//(self.net.SHARE_PERIOD*best_tail_score[1] + 1) * 2 + .5) if best_tail_score[1] is not None else 2**256-1
else:
timestamp_cutoff = int(time.time()) - 24*60*60
target_cutoff = 2**256-1
if p2pool.DEBUG:
print 'Desire %i shares. Cutoff: %s old diff>%.2f' % (len(desired), math.format_dt(time.time() - timestamp_cutoff), bitcoin_data.target_to_difficulty(target_cutoff))
for peer_addr, hash, ts, targ in desired:
print ' ', None if peer_addr is None else '%s:%i' % peer_addr, format_hash(hash), math.format_dt(time.time() - ts), bitcoin_data.target_to_difficulty(targ), ts >= timestamp_cutoff, targ <= target_cutoff
return best, [(peer_addr, hash) for peer_addr, hash, ts, targ in desired if ts >= timestamp_cutoff], decorated_heads, bad_peer_addresses
def score(self, share_hash, block_rel_height_func):
# returns approximate lower bound on chain's hashrate in the last self.net.CHAIN_LENGTH*15//16*self.net.SHARE_PERIOD time
head_height = self.verified.get_height(share_hash)
if head_height < self.net.CHAIN_LENGTH:
return head_height, None
end_point = self.verified.get_nth_parent_hash(share_hash, self.net.CHAIN_LENGTH*15//16)
block_height = max(block_rel_height_func(share.header['previous_block']) for share in
self.verified.get_chain(end_point, self.net.CHAIN_LENGTH//16))
return self.net.CHAIN_LENGTH, self.verified.get_delta(share_hash, end_point).work/((0 - block_height + 1)*self.net.PARENT.BLOCK_PERIOD)
def update_min_protocol_version(counts, share):
minpver = getattr(share.net, 'MINIMUM_PROTOCOL_VERSION', 1400)
newminpver = getattr(share.net, 'NEW_MINIMUM_PROTOCOL_VERSION', minpver)
if (counts is not None) and (type(share) is NewShare) and (minpver < newminpver):
if counts.get(share.VERSION, 0) >= sum(counts.itervalues())*95//100:
share.net.MINIMUM_PROTOCOL_VERSION = newminpver # Reject peers running obsolete nodes
print 'Setting MINIMUM_PROTOCOL_VERSION = %d' % (newminpver)
def get_pool_attempts_per_second(tracker, previous_share_hash, dist, min_work=False, integer=False):
assert dist >= 2
near = tracker.items[previous_share_hash]
far = tracker.items[tracker.get_nth_parent_hash(previous_share_hash, dist - 1)]
attempts = tracker.get_delta(near.hash, far.hash).work if not min_work else tracker.get_delta(near.hash, far.hash).min_work
time = near.timestamp - far.timestamp
if time <= 0:
time = 1
if integer:
return attempts//time
return attempts/time
def get_average_stale_prop(tracker, share_hash, lookbehind):
stales = sum(1 for share in tracker.get_chain(share_hash, lookbehind) if share.share_data['stale_info'] is not None)
return stales/(lookbehind + stales)
def get_stale_counts(tracker, share_hash, lookbehind, rates=False):
res = {}
for share in tracker.get_chain(share_hash, lookbehind - 1):
res['good'] = res.get('good', 0) + bitcoin_data.target_to_average_attempts(share.target)
s = share.share_data['stale_info']
if s is not None:
res[s] = res.get(s, 0) + bitcoin_data.target_to_average_attempts(share.target)
if rates:
dt = tracker.items[share_hash].timestamp - tracker.items[tracker.get_nth_parent_hash(share_hash, lookbehind - 1)].timestamp
res = dict((k, v/dt) for k, v in res.iteritems())
return res
def get_user_stale_props(tracker, share_hash, lookbehind):
res = {}
for share in tracker.get_chain(share_hash, lookbehind - 1):
stale, total = res.get(share.share_data['pubkey_hash'], (0, 0))
total += 1
if share.share_data['stale_info'] is not None:
stale += 1
total += 1
res[share.share_data['pubkey_hash']] = stale, total
return dict((pubkey_hash, stale/total) for pubkey_hash, (stale, total) in res.iteritems())
def get_expected_payouts(tracker, best_share_hash, block_target, subsidy, net):
weights, total_weight, donation_weight = tracker.get_cumulative_weights(best_share_hash, min(tracker.get_height(best_share_hash), net.REAL_CHAIN_LENGTH), 65535*net.SPREAD*bitcoin_data.target_to_average_attempts(block_target))
res = dict((script, subsidy*weight//total_weight) for script, weight in weights.iteritems())
res[DONATION_SCRIPT] = res.get(DONATION_SCRIPT, 0) + subsidy - sum(res.itervalues())
return res
def get_desired_version_counts(tracker, best_share_hash, dist):
res = {}
for share in tracker.get_chain(best_share_hash, dist):
res[share.desired_version] = res.get(share.desired_version, 0) + bitcoin_data.target_to_average_attempts(share.target)
return res
def get_warnings(tracker, best_share, net, bitcoind_getinfo, bitcoind_work_value):
res = []
desired_version_counts = get_desired_version_counts(tracker, best_share,
min(net.CHAIN_LENGTH, 60*60//net.SHARE_PERIOD, tracker.get_height(best_share)))
majority_desired_version = max(desired_version_counts, key=lambda k: desired_version_counts[k])
if majority_desired_version > (Share.SUCCESSOR if Share.SUCCESSOR is not None else Share).VOTING_VERSION and desired_version_counts[majority_desired_version] > sum(desired_version_counts.itervalues())/2:
res.append('A MAJORITY OF SHARES CONTAIN A VOTE FOR AN UNSUPPORTED SHARE IMPLEMENTATION! (v%i with %i%% support)\n'
'An upgrade is likely necessary. Check http://p2pool.forre.st/ for more information.' % (
majority_desired_version, 100*desired_version_counts[majority_desired_version]/sum(desired_version_counts.itervalues())))
if bitcoind_getinfo['warnings'] != '':
if 'This is a pre-release test build' not in bitcoind_getinfo['warnings']:
res.append('(from bitcoind) %s' % (bitcoind_getinfo['warnings'],))
version_warning = getattr(net, 'VERSION_WARNING', lambda v: None)(bitcoind_getinfo['version'])
if version_warning is not None:
res.append(version_warning)
if time.time() > bitcoind_work_value['last_update'] + 60:
res.append('''LOST CONTACT WITH BITCOIND for %s! Check that it isn't frozen or dead!''' % (math.format_dt(time.time() - bitcoind_work_value['last_update']),))
return res
def format_hash(x):
if x is None:
return 'xxxxxxxx'
return '%08x' % (x % 2**32)
class ShareStore(object):
def __init__(self, prefix, net, share_cb, verified_hash_cb):
self.dirname = os.path.dirname(os.path.abspath(prefix))
self.filename = os.path.basename(os.path.abspath(prefix))
self.net = net
known = {}
filenames, next = self.get_filenames_and_next()
for filename in filenames:
share_hashes, verified_hashes = known.setdefault(filename, (set(), set()))
with open(filename, 'rb') as f:
for line in f:
try:
type_id_str, data_hex = line.strip().split(' ')
type_id = int(type_id_str)
if type_id == 0:
pass
elif type_id == 1:
pass
elif type_id == 2:
verified_hash = int(data_hex, 16)
verified_hash_cb(verified_hash)
verified_hashes.add(verified_hash)
elif type_id == 5:
raw_share = share_type.unpack(data_hex.decode('hex'))
if raw_share['type'] < Share.VERSION:
continue
share = load_share(raw_share, self.net, None)
share_cb(share)
share_hashes.add(share.hash)
else:
raise NotImplementedError("share type %i" % (type_id,))
except Exception:
log.err(None, "HARMLESS error while reading saved shares, continuing where left off:")
self.known = known # filename -> (set of share hashes, set of verified hashes)
self.known_desired = dict((k, (set(a), set(b))) for k, (a, b) in known.iteritems())
def _add_line(self, line):
filenames, next = self.get_filenames_and_next()
if filenames and os.path.getsize(filenames[-1]) < 10e6:
filename = filenames[-1]
else:
filename = next
with open(filename, 'ab') as f:
f.write(line + '\n')
return filename
def add_share(self, share):
for filename, (share_hashes, verified_hashes) in self.known.iteritems():
if share.hash in share_hashes:
break
else:
filename = self._add_line("%i %s" % (5, share_type.pack(share.as_share()).encode('hex')))
share_hashes, verified_hashes = self.known.setdefault(filename, (set(), set()))
share_hashes.add(share.hash)
share_hashes, verified_hashes = self.known_desired.setdefault(filename, (set(), set()))
share_hashes.add(share.hash)
def add_verified_hash(self, share_hash):
for filename, (share_hashes, verified_hashes) in self.known.iteritems():
if share_hash in verified_hashes:
break
else:
filename = self._add_line("%i %x" % (2, share_hash))
share_hashes, verified_hashes = self.known.setdefault(filename, (set(), set()))
verified_hashes.add(share_hash)
share_hashes, verified_hashes = self.known_desired.setdefault(filename, (set(), set()))
verified_hashes.add(share_hash)
def get_filenames_and_next(self):
suffixes = sorted(int(x[len(self.filename):]) for x in os.listdir(self.dirname) if x.startswith(self.filename) and x[len(self.filename):].isdigit())
return [os.path.join(self.dirname, self.filename + str(suffix)) for suffix in suffixes], os.path.join(self.dirname, self.filename + (str(suffixes[-1] + 1) if suffixes else str(0)))
def forget_share(self, share_hash):
for filename, (share_hashes, verified_hashes) in self.known_desired.iteritems():
if share_hash in share_hashes:
share_hashes.remove(share_hash)
self.check_remove()
def forget_verified_share(self, share_hash):
for filename, (share_hashes, verified_hashes) in self.known_desired.iteritems():
if share_hash in verified_hashes:
verified_hashes.remove(share_hash)
self.check_remove()
def check_remove(self):
to_remove = set()
for filename, (share_hashes, verified_hashes) in self.known_desired.iteritems():
#print filename, len(share_hashes) + len(verified_hashes)
if not share_hashes and not verified_hashes:
to_remove.add(filename)
for filename in to_remove:
self.known.pop(filename)
self.known_desired.pop(filename)
os.remove(filename)
print "REMOVED", filename
| 43,694
|
Python
|
.py
| 687
| 51.946143
| 294
| 0.638083
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,494
|
test_data.py
|
p2pool_p2pool/p2pool/test/test_data.py
|
import random
import unittest
from p2pool import data
from p2pool.bitcoin import data as bitcoin_data
from p2pool.test.util import test_forest
from p2pool.util import forest
def random_bytes(length):
return ''.join(chr(random.randrange(2**8)) for i in xrange(length))
class Test(unittest.TestCase):
def test_hashlink1(self):
for i in xrange(100):
d = random_bytes(random.randrange(2048))
x = data.prefix_to_hash_link(d)
assert data.check_hash_link(x, '') == bitcoin_data.hash256(d)
def test_hashlink2(self):
for i in xrange(100):
d = random_bytes(random.randrange(2048))
d2 = random_bytes(random.randrange(2048))
x = data.prefix_to_hash_link(d)
assert data.check_hash_link(x, d2) == bitcoin_data.hash256(d + d2)
def test_hashlink3(self):
for i in xrange(100):
d = random_bytes(random.randrange(2048))
d2 = random_bytes(random.randrange(200))
d3 = random_bytes(random.randrange(2048))
x = data.prefix_to_hash_link(d + d2, d2)
assert data.check_hash_link(x, d3, d2) == bitcoin_data.hash256(d + d2 + d3)
def test_skiplist(self):
t = forest.Tracker()
d = data.WeightsSkipList(t)
for i in xrange(200):
t.add(test_forest.FakeShare(hash=i, previous_hash=i - 1 if i > 0 else None, new_script=i, share_data=dict(donation=1234), target=2**249))
for i in xrange(200):
a = random.randrange(200)
d(a, random.randrange(a + 1), 1000000*65535)[1]
| 1,605
|
Python
|
.py
| 35
| 37.028571
| 149
| 0.62951
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,495
|
test_p2p.py
|
p2pool_p2pool/p2pool/test/test_p2p.py
|
import random
from twisted.internet import defer, endpoints, protocol, reactor
from twisted.trial import unittest
from p2pool import networks, p2p
from p2pool.bitcoin import data as bitcoin_data
from p2pool.util import deferral
class Test(unittest.TestCase):
@defer.inlineCallbacks
def test_sharereq(self):
class MyNode(p2p.Node):
def __init__(self, df):
p2p.Node.__init__(self, lambda: None, 29333, networks.nets['bitcoin'], {}, set([('127.0.0.1', 9333)]), 0, 0, 0, 0)
self.df = df
def handle_share_hashes(self, hashes, peer):
peer.get_shares(
hashes=[hashes[0]],
parents=5,
stops=[],
).chainDeferred(self.df)
df = defer.Deferred()
n = MyNode(df)
n.start()
try:
yield df
finally:
yield n.stop()
@defer.inlineCallbacks
def test_tx_limit(self):
class MyNode(p2p.Node):
def __init__(self, df):
p2p.Node.__init__(self, lambda: None, 29333, networks.nets['bitcoin'], {}, set([('127.0.0.1', 9333)]), 0, 0, 0, 0)
self.df = df
self.sent_time = 0
@defer.inlineCallbacks
def got_conn(self, conn):
p2p.Node.got_conn(self, conn)
yield deferral.sleep(.5)
new_mining_txs = dict(self.mining_txs_var.value)
for i in xrange(3):
huge_tx = dict(
version=0,
tx_ins=[],
tx_outs=[dict(
value=0,
script='x'*900000,
)],
lock_time=i,
)
new_mining_txs[bitcoin_data.get_txid(huge_tx)] = huge_tx
self.mining_txs_var.set(new_mining_txs)
self.sent_time = reactor.seconds()
def lost_conn(self, conn, reason):
self.df.callback(None)
try:
p2p.Protocol.max_remembered_txs_size *= 10
df = defer.Deferred()
n = MyNode(df)
n.start()
yield df
if not (n.sent_time <= reactor.seconds() <= n.sent_time + 1):
raise ValueError('node did not disconnect within 1 seconds of receiving too much tx data')
yield n.stop()
finally:
p2p.Protocol.max_remembered_txs_size //= 10
| 2,698
|
Python
|
.py
| 64
| 25.984375
| 130
| 0.496571
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,496
|
test_node.py
|
p2pool_p2pool/p2pool/test/test_node.py
|
from __future__ import division
import base64
import random
import tempfile
from twisted.internet import defer, reactor
from twisted.python import failure
from twisted.trial import unittest
from twisted.web import client, resource, server
from p2pool import data, node, work, main
from p2pool.bitcoin import data as bitcoin_data, networks, worker_interface
from p2pool.util import deferral, jsonrpc, math, variable
class bitcoind(object): # can be used as p2p factory, p2p protocol, or rpc jsonrpc proxy
def __init__(self):
self.blocks = [0x000000000000016c169477c25421250ec5d32cf9c6d38538b5de970a2355fd89]
self.headers = {0x16c169477c25421250ec5d32cf9c6d38538b5de970a2355fd89: {
'nonce': 1853158954,
'timestamp': 1351658517,
'merkle_root': 2282849479936278423916707524932131168473430114569971665822757638339486597658L,
'version': 1,
'previous_block': 1048610514577342396345362905164852351970507722694242579238530L,
'bits': bitcoin_data.FloatingInteger(bits=0x1a0513c5, target=0x513c50000000000000000000000000000000000000000000000L),
}}
self.conn = variable.Variable(self)
self.new_headers = variable.Event()
self.new_block = variable.Event()
self.new_tx = variable.Event()
# p2p factory
def getProtocol(self):
return self
# p2p protocol
def send_block(self, block):
pass
def send_tx(self, tx):
pass
def get_block_header(self, block_hash):
return self.headers[block_hash]
# rpc jsonrpc proxy
def rpc_help(self):
return '\ngetblock '
def rpc_getblock(self, block_hash_hex):
block_hash = int(block_hash_hex, 16)
return dict(height=self.blocks.index(block_hash))
def __getattr__(self, name):
if name.startswith('rpc_'):
return lambda *args, **kwargs: failure.Failure(jsonrpc.Error_for_code(-32601)('Method not found'))
def rpc_getblocktemplate(self, param):
if param['mode'] == 'template':
pass
elif param['mode'] == 'submit':
result = param['data']
block = bitcoin_data.block_type.unpack(result.decode('hex'))
if sum(tx_out['value'] for tx_out in block['txs'][0]['tx_outs']) != sum(tx['tx_outs'][0]['value'] for tx in block['txs'][1:]) + 5000000000:
print 'invalid fee'
if block['header']['previous_block'] != self.blocks[-1]:
return False
if bitcoin_data.hash256(result.decode('hex')) > block['header']['bits'].target:
return False
header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header']))
self.blocks.append(header_hash)
self.headers[header_hash] = block['header']
reactor.callLater(0, self.new_block.happened)
return True
else:
raise jsonrpc.Error_for_code(-1)('invalid request')
txs = []
for i in xrange(100):
fee = i
txs.append(dict(
data=bitcoin_data.tx_type.pack(dict(version=1, tx_ins=[], tx_outs=[dict(value=fee, script='hello!'*100)], lock_time=0)).encode('hex'),
fee=fee,
))
return {
"version" : 2,
"previousblockhash" : '%064x' % (self.blocks[-1],),
"transactions" : txs,
"coinbaseaux" : {
"flags" : "062f503253482f"
},
"coinbasevalue" : 5000000000 + sum(tx['fee'] for tx in txs),
"target" : "0000000000000513c50000000000000000000000000000000000000000000000",
"mintime" : 1351655621,
"mutable" : [
"time",
"transactions",
"prevblock"
],
"noncerange" : "00000000ffffffff",
"sigoplimit" : 20000,
"sizelimit" : 1000000,
"curtime" : 1351659940,
"bits" : "21008000",
"height" : len(self.blocks),
}
@apply
class mm_provider(object):
def __getattr__(self, name):
print '>>>>>>>', name
def rpc_getauxblock(self, request, result1=None, result2=None):
if result1 is not None:
print result1, result2
return True
return {
"target" : "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", # 2**256*2/3
"hash" : "2756ea0315d46dc3d8d974f34380873fc88863845ac01a658ef11bc3b368af52",
"chainid" : 1
}
mynet = math.Object(
NAME='mynet',
PARENT=networks.nets['litecoin_testnet'],
SHARE_PERIOD=5, # seconds
CHAIN_LENGTH=20*60//3, # shares
REAL_CHAIN_LENGTH=20*60//3, # shares
TARGET_LOOKBEHIND=200, # shares
SPREAD=3, # blocks
IDENTIFIER='cca5e24ec6408b1e'.decode('hex'),
PREFIX='ad9614f6466a39cf'.decode('hex'),
P2P_PORT=19338,
MIN_TARGET=2**256 - 1,
MAX_TARGET=2**256 - 1,
PERSIST=False,
WORKER_PORT=19327,
BOOTSTRAP_ADDRS='72.14.191.28'.split(' '),
ANNOUNCE_CHANNEL='#p2pool-alt',
VERSION_CHECK=lambda v: True,
)
class MiniNode(object):
@classmethod
@defer.inlineCallbacks
def start(cls, net, factory, bitcoind, peer_ports, merged_urls):
self = cls()
self.n = node.Node(factory, bitcoind, [], [], net)
yield self.n.start()
self.n.p2p_node = node.P2PNode(self.n, port=0, max_incoming_conns=1000000, addr_store={}, connect_addrs=[('127.0.0.1', peer_port) for peer_port in peer_ports])
self.n.p2p_node.start()
wb = work.WorkerBridge(node=self.n, my_pubkey_hash=random.randrange(2**160), donation_percentage=random.uniform(0, 10), merged_urls=merged_urls, worker_fee=3, args=math.Object(donation_percentage=random.uniform(0, 10), address='foo', worker_fee=3, timeaddresses=1000), pubkeys=main.keypool(), bitcoind=bitcoind)
self.wb = wb
web_root = resource.Resource()
worker_interface.WorkerInterface(wb).attach_to(web_root)
self.web_port = reactor.listenTCP(0, server.Site(web_root))
defer.returnValue(self)
@defer.inlineCallbacks
def stop(self):
yield self.web_port.stopListening()
yield self.n.p2p_node.stop()
yield self.n.stop()
del self.web_port, self.n
class Test(unittest.TestCase):
@defer.inlineCallbacks
def test_node(self):
bitd = bitcoind()
mm_root = resource.Resource()
mm_root.putChild('', jsonrpc.HTTPServer(mm_provider))
mm_port = reactor.listenTCP(0, server.Site(mm_root))
n = node.Node(bitd, bitd, [], [], mynet)
yield n.start()
wb = work.WorkerBridge(node=n, my_pubkey_hash=42, donation_percentage=2, merged_urls=[('http://127.0.0.1:%i' % (mm_port.getHost().port,), '')], worker_fee=3, args=math.Object(donation_percentage=2, address='foo', worker_fee=3, timeaddresses=1000), pubkeys=main.keypool(), bitcoind=bitd)
web_root = resource.Resource()
worker_interface.WorkerInterface(wb).attach_to(web_root)
port = reactor.listenTCP(0, server.Site(web_root))
proxy = jsonrpc.HTTPProxy('http://127.0.0.1:' + str(port.getHost().port),
headers=dict(Authorization='Basic ' + base64.b64encode('user/0:password')))
yield deferral.sleep(3)
for i in xrange(100):
blah = yield proxy.rpc_getwork()
yield proxy.rpc_getwork(blah['data'])
yield deferral.sleep(3)
assert len(n.tracker.items) == 100
assert n.tracker.verified.get_height(n.best_share_var.value) == 100
wb.stop()
n.stop()
yield port.stopListening()
del n, wb, web_root, port, proxy
import gc
gc.collect()
gc.collect()
gc.collect()
yield deferral.sleep(20) # waiting for work_poller to exit
yield mm_port.stopListening()
#test_node.timeout = 15
@defer.inlineCallbacks
def test_nodes(self):
N = 3
SHARES = 600
bitd = bitcoind()
nodes = []
for i in xrange(N):
nodes.append((yield MiniNode.start(mynet, bitd, bitd, [mn.n.p2p_node.serverfactory.listen_port.getHost().port for mn in nodes], [])))
yield deferral.sleep(3)
for i in xrange(SHARES):
proxy = jsonrpc.HTTPProxy('http://127.0.0.1:' + str(random.choice(nodes).web_port.getHost().port),
headers=dict(Authorization='Basic ' + base64.b64encode('user/0:password')))
blah = yield proxy.rpc_getwork()
yield proxy.rpc_getwork(blah['data'])
yield deferral.sleep(.05)
print i
print type(nodes[0].n.tracker.items[nodes[0].n.best_share_var.value])
# crawl web pages
from p2pool import web
stop_event = variable.Event()
web2_root = web.get_web_root(nodes[0].wb, tempfile.mkdtemp(), variable.Variable({'errors': '', 'version': 100000}), stop_event)
web2_port = reactor.listenTCP(0, server.Site(web2_root))
for name in web2_root.listNames() + ['web/' + x for x in web2_root.getChildWithDefault('web', None).listNames()]:
if name in ['web/graph_data', 'web/share', 'web/share_data']: continue
print
print name
try:
res = yield client.getPage('http://127.0.0.1:%i/%s' % (web2_port.getHost().port, name))
except:
import traceback
traceback.print_exc()
else:
print repr(res)[:100]
print
yield web2_port.stopListening()
stop_event.happened()
del web2_root
yield deferral.sleep(3)
for i, n in enumerate(nodes):
assert len(n.n.tracker.items) == SHARES, (i, len(n.n.tracker.items))
assert n.n.tracker.verified.get_height(n.n.best_share_var.value) == SHARES, (i, n.n.tracker.verified.get_height(n.n.best_share_var.value))
assert type(n.n.tracker.items[nodes[0].n.best_share_var.value]) is (data.Share.SUCCESSOR if data.Share.SUCCESSOR is not None else data.Share)
assert type(n.n.tracker.items[n.n.tracker.get_nth_parent_hash(nodes[0].n.best_share_var.value, SHARES - 5)]) is data.Share
for n in nodes:
yield n.stop()
del nodes, n
import gc
gc.collect()
gc.collect()
gc.collect()
yield deferral.sleep(20) # waiting for work_poller to exit
test_nodes.timeout = 300
| 10,812
|
Python
|
.py
| 231
| 36.17316
| 319
| 0.619057
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,497
|
test_script.py
|
p2pool_p2pool/p2pool/test/bitcoin/test_script.py
|
import unittest
from p2pool.bitcoin import script
class Test(unittest.TestCase):
def test_all(self):
data = '76 A9 14 89 AB CD EF AB BA AB BA AB BA AB BA AB BA AB BA AB BA AB BA 88 AC'.replace(' ', '').decode('hex')
self.assertEquals(
list(script.parse(data)),
[('UNK_118', None), ('UNK_169', None), ('PUSH', '\x89\xab\xcd\xef\xab\xba\xab\xba\xab\xba\xab\xba\xab\xba\xab\xba\xab\xba\xab\xba'), ('UNK_136', None), ('CHECKSIG', None)],
)
self.assertEquals(script.get_sigop_count(data), 1)
| 552
|
Python
|
.py
| 10
| 48
| 184
| 0.616667
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,498
|
test_data.py
|
p2pool_p2pool/p2pool/test/bitcoin/test_data.py
|
import unittest
from p2pool.bitcoin import data, networks
from p2pool.util import pack
class Test(unittest.TestCase):
def test_header_hash(self):
assert data.hash256(data.block_header_type.pack(dict(
version=1,
previous_block=0x000000000000038a2a86b72387f93c51298298a732079b3b686df3603d2f6282,
merkle_root=0x37a43a3b812e4eb665975f46393b4360008824aab180f27d642de8c28073bc44,
timestamp=1323752685,
bits=data.FloatingInteger(437159528),
nonce=3658685446,
))) == 0x000000000000003aaaf7638f9f9c0d0c60e8b0eb817dcdb55fd2b1964efc5175
def test_header_hash_litecoin(self):
assert networks.nets['litecoin'].POW_FUNC(data.block_header_type.pack(dict(
version=1,
previous_block=0xd928d3066613d1c9dd424d5810cdd21bfeef3c698977e81ec1640e1084950073,
merkle_root=0x03f4b646b58a66594a182b02e425e7b3a93c8a52b600aa468f1bc5549f395f16,
timestamp=1327807194,
bits=data.FloatingInteger(0x1d01b56f),
nonce=20736,
))) < 2**256//2**30
def test_tx_hash(self):
assert data.get_txid(dict(
version=1,
tx_ins=[dict(
previous_output=None,
sequence=None,
script='70736a0468860e1a0452389500522cfabe6d6d2b2f33cf8f6291b184f1b291d24d82229463fcec239afea0ee34b4bfc622f62401000000000000004d696e656420627920425443204775696c6420ac1eeeed88'.decode('hex'),
)],
tx_outs=[dict(
value=5003880250,
script=data.pubkey_hash_to_script2(pack.IntType(160).unpack('ca975b00a8c203b8692f5a18d92dc5c2d2ebc57b'.decode('hex'))),
)],
lock_time=0,
)) == 0xb53802b2333e828d6532059f46ecf6b313a42d79f97925e457fbbfda45367e5c
def test_address_to_pubkey_hash(self):
assert data.address_to_pubkey_hash('1KUCp7YP5FP8ViRxhfszSUJCTAajK6viGy', networks.nets['bitcoin']) == pack.IntType(160).unpack('ca975b00a8c203b8692f5a18d92dc5c2d2ebc57b'.decode('hex'))
def test_merkle_hash(self):
assert data.merkle_hash([
0xb53802b2333e828d6532059f46ecf6b313a42d79f97925e457fbbfda45367e5c,
0x326dfe222def9cf571af37a511ccda282d83bedcc01dabf8aa2340d342398cf0,
0x5d2e0541c0f735bac85fa84bfd3367100a3907b939a0c13e558d28c6ffd1aea4,
0x8443faf58aa0079760750afe7f08b759091118046fe42794d3aca2aa0ff69da2,
0x4d8d1c65ede6c8eab843212e05c7b380acb82914eef7c7376a214a109dc91b9d,
0x1d750bc0fa276f89db7e6ed16eb1cf26986795121f67c03712210143b0cb0125,
0x5179349931d714d3102dfc004400f52ef1fed3b116280187ca85d1d638a80176,
0xa8b3f6d2d566a9239c9ad9ae2ed5178dee4a11560a8dd1d9b608fd6bf8c1e75,
0xab4d07cd97f9c0c4129cff332873a44efdcd33bdbfc7574fe094df1d379e772f,
0xf54a7514b1de8b5d9c2a114d95fba1e694b6e3e4a771fda3f0333515477d685b,
0x894e972d8a2fc6c486da33469b14137a7f89004ae07b95e63923a3032df32089,
0x86cdde1704f53fce33ab2d4f5bc40c029782011866d0e07316d695c41e32b1a0,
0xf7cf4eae5e497be8215778204a86f1db790d9c27fe6a5b9f745df5f3862f8a85,
0x2e72f7ddf157d64f538ec72562a820e90150e8c54afc4d55e0d6e3dbd8ca50a,
0x9f27471dfbc6ce3cbfcf1c8b25d44b8d1b9d89ea5255e9d6109e0f9fd662f75c,
0x995f4c9f78c5b75a0c19f0a32387e9fa75adaa3d62fba041790e06e02ae9d86d,
0xb11ec2ad2049aa32b4760d458ee9effddf7100d73c4752ea497e54e2c58ba727,
0xa439f288fbc5a3b08e5ffd2c4e2d87c19ac2d5e4dfc19fabfa33c7416819e1ec,
0x3aa33f886f1357b4bbe81784ec1cf05873b7c5930ab912ee684cc6e4f06e4c34,
0xcab9a1213037922d94b6dcd9c567aa132f16360e213c202ee59f16dde3642ac7,
0xa2d7a3d2715eb6b094946c6e3e46a88acfb37068546cabe40dbf6cd01a625640,
0x3d02764f24816aaa441a8d472f58e0f8314a70d5b44f8a6f88cc8c7af373b24e,
0xcc5adf077c969ebd78acebc3eb4416474aff61a828368113d27f72ad823214d0,
0xf2d8049d1971f02575eb37d3a732d46927b6be59a18f1bd0c7f8ed123e8a58a,
0x94ffe8d46a1accd797351894f1774995ed7df3982c9a5222765f44d9c3151dbb,
0x82268fa74a878636261815d4b8b1b01298a8bffc87336c0d6f13ef6f0373f1f0,
0x73f441f8763dd1869fe5c2e9d298b88dc62dc8c75af709fccb3622a4c69e2d55,
0xeb78fc63d4ebcdd27ed618fd5025dc61de6575f39b2d98e3be3eb482b210c0a0,
0x13375a426de15631af9afdf00c490e87cc5aab823c327b9856004d0b198d72db,
0x67d76a64fa9b6c5d39fde87356282ef507b3dec1eead4b54e739c74e02e81db4,
]) == 0x37a43a3b812e4eb665975f46393b4360008824aab180f27d642de8c28073bc44
| 4,617
|
Python
|
.py
| 71
| 53.169014
| 206
| 0.775591
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|
9,499
|
test_getwork.py
|
p2pool_p2pool/p2pool/test/bitcoin/test_getwork.py
|
import unittest
from p2pool.bitcoin import getwork, data as bitcoin_data
class Test(unittest.TestCase):
def test_all(self):
cases = [
{
'target': '0000000000000000000000000000000000000000000000f2b944000000000000',
'midstate': '5982f893102dec03e374b472647c4f19b1b6d21ae4b2ac624f3d2f41b9719404',
'hash1': '00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000',
'data': '0000000163930d52a5ffca79b29b95a659a302cd4e1654194780499000002274000000002e133d9e51f45bc0886d05252038e421e82bff18b67dc14b90d9c3c2f422cd5c4dd4598e1a44b9f200000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000'
},
{
'midstate' : 'f4a9b048c0cb9791bc94b13ee0eec21e713963d524fd140b58bb754dd7b0955f',
'data' : '000000019a1d7342fb62090bda686b22d90f9f73d0f5c418b9c980cd0000011a00000000680b07c8a2f97ecd831f951806857e09f98a3b81cdef1fa71982934fef8dc3444e18585d1a0abbcf00000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000',
'hash1' : '00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000',
'target' : '0000000000000000000000000000000000000000000000cfbb0a000000000000',
'extrathing': 'hi!',
},
{
'data' : '000000019a1d7342fb62090bda686b22d90f9f73d0f5c418b9c980cd0000011a00000000680b07c8a2f97ecd831f951806857e09f98a3b81cdef1fa71982934fef8dc3444e18585d1a0abbcf00000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000',
'hash1' : '00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000',
'target' : '0000000000000000000000000000000000000000000000cfbb0a000000000000',
'extrathing': 'hi!',
},
]
for case in cases:
ba = getwork.BlockAttempt.from_getwork(case)
extra = dict(case)
del extra['data'], extra['hash1'], extra['target']
extra.pop('midstate', None)
getwork_check = ba.getwork(**extra)
assert getwork_check == case or dict((k, v) for k, v in getwork_check.iteritems() if k != 'midstate') == case
case2s = [
getwork.BlockAttempt(
1,
0x148135e10208db85abb62754341a392eab1f186aab077a831cf7,
0x534ea08be1ab529f484369344b6d5423ef5a0767db9b3ebb4e182bbb67962520,
1305759879,
bitcoin_data.FloatingInteger.from_target_upper_bound(0x44b9f20000000000000000000000000000000000000000000000),
0x44b9f20000000000000000000000000000000000000000000000,
),
getwork.BlockAttempt(
1,
0x148135e10208db85abb62754341a392eab1f186aab077a831cf7,
0x534ea08be1ab529f484369344b6d5423ef5a0767db9b3ebb4e182bbb67962520,
1305759879,
bitcoin_data.FloatingInteger.from_target_upper_bound(0x44b9f20000000000000000000000000000000000000000000000),
432*2**230,
),
getwork.BlockAttempt(
1,
0x148135e10208db85abb62754341a392eab1f186aab077a831cf7,
0x534ea08be1ab529f484369344b6d5423ef5a0767db9b3ebb4e182bbb67962520,
1305759879,
bitcoin_data.FloatingInteger.from_target_upper_bound(0x44b9f20000000000000000000000000000000000000000000000),
7*2**240,
)
]
for case2 in case2s:
assert getwork.BlockAttempt.from_getwork(case2.getwork()) == case2
assert getwork.BlockAttempt.from_getwork(case2.getwork(ident='hi')) == case2
case2 = case2.update(previous_block=case2.previous_block - 10)
assert getwork.BlockAttempt.from_getwork(case2.getwork()) == case2
assert getwork.BlockAttempt.from_getwork(case2.getwork(ident='hi')) == case2
| 4,273
|
Python
|
.py
| 64
| 52.3125
| 284
| 0.716683
|
p2pool/p2pool
| 1,139
| 1,015
| 49
|
GPL-3.0
|
9/5/2024, 5:10:54 PM (Europe/Amsterdam)
|