2013-09-01 09:16:15 -07:00
|
|
|
# Electrum - lightweight Bitcoin client
|
|
|
|
# Copyright (C) 2012 thomasv@ecdsa.org
|
|
|
|
#
|
2016-02-23 02:36:42 -08:00
|
|
|
# Permission is hereby granted, free of charge, to any person
|
|
|
|
# obtaining a copy of this software and associated documentation files
|
|
|
|
# (the "Software"), to deal in the Software without restriction,
|
|
|
|
# including without limitation the rights to use, copy, modify, merge,
|
|
|
|
# publish, distribute, sublicense, and/or sell copies of the Software,
|
|
|
|
# and to permit persons to whom the Software is furnished to do so,
|
|
|
|
# subject to the following conditions:
|
2013-09-01 09:16:15 -07:00
|
|
|
#
|
2016-02-23 02:36:42 -08:00
|
|
|
# The above copyright notice and this permission notice shall be
|
|
|
|
# included in all copies or substantial portions of the Software.
|
2013-09-01 09:16:15 -07:00
|
|
|
#
|
2016-02-23 02:36:42 -08:00
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
|
|
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
|
|
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
|
|
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
|
|
|
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
|
|
|
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
|
|
|
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
|
|
# SOFTWARE.
|
2015-05-17 06:54:20 -07:00
|
|
|
import os
|
2017-07-24 06:54:42 -07:00
|
|
|
import threading
|
|
|
|
|
2017-01-22 10:25:24 -08:00
|
|
|
from . import util
|
|
|
|
from . import bitcoin
|
2018-05-16 11:56:48 -07:00
|
|
|
from . import constants
|
2017-01-22 10:25:24 -08:00
|
|
|
from .bitcoin import *
|
2013-09-01 09:16:15 -07:00
|
|
|
|
2018-06-15 18:29:29 -07:00
|
|
|
HDR_LEN = 1487
|
|
|
|
CHUNK_LEN = 100
|
|
|
|
|
|
|
|
MAX_TARGET = 0x0007FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF
|
|
|
|
POW_AVERAGING_WINDOW = 17
|
|
|
|
POW_MEDIAN_BLOCK_SPAN = 11
|
|
|
|
POW_MAX_ADJUST_DOWN = 32
|
|
|
|
POW_MAX_ADJUST_UP = 16
|
|
|
|
POW_DAMPING_FACTOR = 4
|
|
|
|
POW_TARGET_SPACING = 150
|
|
|
|
|
2018-06-28 10:15:38 -07:00
|
|
|
TARGET_CALC_BLOCKS = POW_AVERAGING_WINDOW + POW_MEDIAN_BLOCK_SPAN
|
|
|
|
|
2018-06-15 18:29:29 -07:00
|
|
|
AVERAGING_WINDOW_TIMESPAN = POW_AVERAGING_WINDOW * POW_TARGET_SPACING
|
|
|
|
|
|
|
|
MIN_ACTUAL_TIMESPAN = AVERAGING_WINDOW_TIMESPAN * \
|
|
|
|
(100 - POW_MAX_ADJUST_UP) // 100
|
|
|
|
|
|
|
|
MAX_ACTUAL_TIMESPAN = AVERAGING_WINDOW_TIMESPAN * \
|
|
|
|
(100 + POW_MAX_ADJUST_DOWN) // 100
|
|
|
|
|
2013-09-01 09:16:15 -07:00
|
|
|
|
2017-05-29 00:03:39 -07:00
|
|
|
def serialize_header(res):
|
|
|
|
s = int_to_hex(res.get('version'), 4) \
|
|
|
|
+ rev_hex(res.get('prev_block_hash')) \
|
|
|
|
+ rev_hex(res.get('merkle_root')) \
|
2018-06-15 18:29:29 -07:00
|
|
|
+ rev_hex(res.get('reserved_hash')) \
|
2017-05-29 00:03:39 -07:00
|
|
|
+ int_to_hex(int(res.get('timestamp')), 4) \
|
|
|
|
+ int_to_hex(int(res.get('bits')), 4) \
|
2018-06-15 18:29:29 -07:00
|
|
|
+ rev_hex(res.get('nonce')) \
|
|
|
|
+ rev_hex(res.get('sol_size')) \
|
|
|
|
+ rev_hex(res.get('solution'))
|
2017-05-29 00:03:39 -07:00
|
|
|
return s
|
|
|
|
|
|
|
|
def deserialize_header(s, height):
|
2018-05-16 11:56:48 -07:00
|
|
|
if not s:
|
|
|
|
raise Exception('Invalid header: {}'.format(s))
|
2018-06-15 18:29:29 -07:00
|
|
|
if len(s) != HDR_LEN:
|
2018-05-16 11:56:48 -07:00
|
|
|
raise Exception('Invalid header length: {}'.format(len(s)))
|
2017-03-15 04:13:20 -07:00
|
|
|
hex_to_int = lambda s: int('0x' + bh2u(s[::-1]), 16)
|
2017-05-29 00:03:39 -07:00
|
|
|
h = {}
|
|
|
|
h['version'] = hex_to_int(s[0:4])
|
|
|
|
h['prev_block_hash'] = hash_encode(s[4:36])
|
|
|
|
h['merkle_root'] = hash_encode(s[36:68])
|
2018-06-15 18:29:29 -07:00
|
|
|
h['reserved_hash'] = hash_encode(s[68:100])
|
|
|
|
h['timestamp'] = hex_to_int(s[100:104])
|
|
|
|
h['bits'] = hex_to_int(s[104:108])
|
|
|
|
h['nonce'] = hash_encode(s[108:140])
|
|
|
|
h['sol_size'] = hash_encode(s[140:143])
|
|
|
|
h['solution'] = hash_encode(s[143:1487])
|
2017-05-29 00:03:39 -07:00
|
|
|
h['block_height'] = height
|
|
|
|
return h
|
|
|
|
|
|
|
|
def hash_header(header):
|
|
|
|
if header is None:
|
|
|
|
return '0' * 64
|
|
|
|
if header.get('prev_block_hash') is None:
|
|
|
|
header['prev_block_hash'] = '00'*32
|
2017-03-15 04:13:20 -07:00
|
|
|
return hash_encode(Hash(bfh(serialize_header(header))))
|
2017-05-29 00:03:39 -07:00
|
|
|
|
|
|
|
|
2017-07-15 08:20:06 -07:00
|
|
|
blockchains = {}
|
|
|
|
|
|
|
|
def read_blockchains(config):
|
2017-07-19 02:26:13 -07:00
|
|
|
blockchains[0] = Blockchain(config, 0, None)
|
2017-07-20 12:28:27 -07:00
|
|
|
fdir = os.path.join(util.get_headers_dir(config), 'forks')
|
2017-07-20 06:29:22 -07:00
|
|
|
if not os.path.exists(fdir):
|
|
|
|
os.mkdir(fdir)
|
|
|
|
l = filter(lambda x: x.startswith('fork_'), os.listdir(fdir))
|
2017-07-18 09:10:22 -07:00
|
|
|
l = sorted(l, key = lambda x: int(x.split('_')[1]))
|
2017-07-19 02:26:13 -07:00
|
|
|
for filename in l:
|
|
|
|
checkpoint = int(filename.split('_')[2])
|
2017-07-20 06:05:47 -07:00
|
|
|
parent_id = int(filename.split('_')[1])
|
|
|
|
b = Blockchain(config, checkpoint, parent_id)
|
2018-05-16 11:56:48 -07:00
|
|
|
h = b.read_header(b.checkpoint)
|
|
|
|
if b.parent().can_connect(h, check_height=False):
|
|
|
|
blockchains[b.checkpoint] = b
|
|
|
|
else:
|
|
|
|
util.print_error("cannot connect", filename)
|
2017-07-15 08:20:06 -07:00
|
|
|
return blockchains
|
|
|
|
|
2017-07-17 10:32:10 -07:00
|
|
|
def check_header(header):
|
2017-07-15 08:20:06 -07:00
|
|
|
if type(header) is not dict:
|
|
|
|
return False
|
|
|
|
for b in blockchains.values():
|
2017-07-16 23:44:09 -07:00
|
|
|
if b.check_header(header):
|
2017-07-15 08:20:06 -07:00
|
|
|
return b
|
|
|
|
return False
|
|
|
|
|
2017-07-17 10:32:10 -07:00
|
|
|
def can_connect(header):
|
|
|
|
for b in blockchains.values():
|
|
|
|
if b.can_connect(header):
|
|
|
|
return b
|
|
|
|
return False
|
2017-07-15 08:20:06 -07:00
|
|
|
|
|
|
|
|
2015-09-06 05:40:00 -07:00
|
|
|
class Blockchain(util.PrintError):
|
2017-02-04 06:48:13 -08:00
|
|
|
"""
|
|
|
|
Manages blockchain headers and their verification
|
|
|
|
"""
|
2017-07-13 07:23:41 -07:00
|
|
|
|
2017-07-20 06:05:47 -07:00
|
|
|
def __init__(self, config, checkpoint, parent_id):
|
2013-09-01 09:16:15 -07:00
|
|
|
self.config = config
|
2017-05-29 00:03:39 -07:00
|
|
|
self.catch_up = None # interface catching up
|
2017-07-19 02:26:13 -07:00
|
|
|
self.checkpoint = checkpoint
|
2018-05-16 11:56:48 -07:00
|
|
|
self.checkpoints = constants.net.CHECKPOINTS
|
2017-07-20 06:05:47 -07:00
|
|
|
self.parent_id = parent_id
|
2017-07-24 06:54:42 -07:00
|
|
|
self.lock = threading.Lock()
|
|
|
|
with self.lock:
|
|
|
|
self.update_size()
|
2017-07-20 06:05:47 -07:00
|
|
|
|
|
|
|
def parent(self):
|
|
|
|
return blockchains[self.parent_id]
|
2017-07-16 23:44:09 -07:00
|
|
|
|
2017-07-18 09:10:22 -07:00
|
|
|
def get_max_child(self):
|
2017-03-15 04:13:20 -07:00
|
|
|
children = list(filter(lambda y: y.parent_id==self.checkpoint, blockchains.values()))
|
2017-07-18 09:10:22 -07:00
|
|
|
return max([x.checkpoint for x in children]) if children else None
|
|
|
|
|
2017-07-19 00:29:20 -07:00
|
|
|
def get_checkpoint(self):
|
2017-07-18 09:10:22 -07:00
|
|
|
mc = self.get_max_child()
|
2017-07-19 00:29:20 -07:00
|
|
|
return mc if mc is not None else self.checkpoint
|
|
|
|
|
|
|
|
def get_branch_size(self):
|
|
|
|
return self.height() - self.get_checkpoint() + 1
|
|
|
|
|
|
|
|
def get_name(self):
|
|
|
|
return self.get_hash(self.get_checkpoint()).lstrip('00')[0:10]
|
2017-07-18 09:10:22 -07:00
|
|
|
|
2017-07-16 23:44:09 -07:00
|
|
|
def check_header(self, header):
|
|
|
|
header_hash = hash_header(header)
|
|
|
|
height = header.get('block_height')
|
|
|
|
return header_hash == self.get_hash(height)
|
2015-05-08 04:43:42 -07:00
|
|
|
|
2017-07-24 06:54:42 -07:00
|
|
|
def fork(parent, header):
|
|
|
|
checkpoint = header.get('block_height')
|
2017-07-20 06:05:47 -07:00
|
|
|
self = Blockchain(parent.config, checkpoint, parent.checkpoint)
|
2017-07-19 02:14:11 -07:00
|
|
|
open(self.path(), 'w+').close()
|
2017-07-24 06:54:42 -07:00
|
|
|
self.save_header(header)
|
2017-07-15 04:51:40 -07:00
|
|
|
return self
|
|
|
|
|
2015-05-08 04:43:42 -07:00
|
|
|
def height(self):
|
2017-07-16 23:44:09 -07:00
|
|
|
return self.checkpoint + self.size() - 1
|
|
|
|
|
|
|
|
def size(self):
|
2017-07-24 06:54:42 -07:00
|
|
|
with self.lock:
|
|
|
|
return self._size
|
|
|
|
|
|
|
|
def update_size(self):
|
2017-07-19 02:14:11 -07:00
|
|
|
p = self.path()
|
2018-06-15 18:29:29 -07:00
|
|
|
self._size = os.path.getsize(p)//HDR_LEN if os.path.exists(p) else 0
|
2013-09-02 08:49:12 -07:00
|
|
|
|
2018-05-16 11:56:48 -07:00
|
|
|
def verify_header(self, header, prev_hash, target):
|
2017-05-29 00:03:39 -07:00
|
|
|
_hash = hash_header(header)
|
2017-03-23 03:58:56 -07:00
|
|
|
if prev_hash != header.get('prev_block_hash'):
|
2018-05-16 11:56:48 -07:00
|
|
|
raise Exception("prev hash mismatch: %s vs %s" % (prev_hash, header.get('prev_block_hash')))
|
|
|
|
if constants.net.TESTNET:
|
2017-03-23 03:58:56 -07:00
|
|
|
return
|
2018-05-16 11:56:48 -07:00
|
|
|
bits = self.target_to_bits(target)
|
2017-03-23 03:58:56 -07:00
|
|
|
if bits != header.get('bits'):
|
2018-05-16 11:56:48 -07:00
|
|
|
raise Exception("bits mismatch: %s vs %s" % (bits, header.get('bits')))
|
2017-03-23 03:58:56 -07:00
|
|
|
if int('0x' + _hash, 16) > target:
|
2018-05-16 11:56:48 -07:00
|
|
|
raise Exception("insufficient proof of work: %s vs target %s" % (int('0x' + _hash, 16), target))
|
2015-12-11 03:37:40 -08:00
|
|
|
|
2015-12-12 21:33:06 -08:00
|
|
|
def verify_chunk(self, index, data):
|
2018-06-15 18:29:29 -07:00
|
|
|
num = len(data) // HDR_LEN
|
|
|
|
prev_hash = self.get_hash(index * CHUNK_LEN - 1)
|
|
|
|
chunk_headers = {'empty': True}
|
2013-09-01 09:16:15 -07:00
|
|
|
for i in range(num):
|
2018-06-15 18:29:29 -07:00
|
|
|
raw_header = data[i*HDR_LEN:(i+1) * HDR_LEN]
|
|
|
|
height = index * CHUNK_LEN + i
|
|
|
|
header = deserialize_header(raw_header, height)
|
|
|
|
target = self.get_target(height, chunk_headers)
|
2018-05-16 11:56:48 -07:00
|
|
|
self.verify_header(header, prev_hash, target)
|
2018-06-15 18:29:29 -07:00
|
|
|
|
|
|
|
chunk_headers[height] = header
|
|
|
|
if i == 0:
|
|
|
|
chunk_headers['min_height'] = height
|
|
|
|
chunk_headers['empty'] = False
|
|
|
|
chunk_headers['max_height'] = height
|
2018-05-16 11:56:48 -07:00
|
|
|
prev_hash = hash_header(header)
|
2014-10-21 10:05:51 -07:00
|
|
|
|
2013-09-01 09:16:15 -07:00
|
|
|
def path(self):
|
2017-05-29 00:03:39 -07:00
|
|
|
d = util.get_headers_dir(self.config)
|
2017-07-20 06:29:22 -07:00
|
|
|
filename = 'blockchain_headers' if self.parent_id is None else os.path.join('forks', 'fork_%d_%d'%(self.parent_id, self.checkpoint))
|
2017-07-19 02:26:13 -07:00
|
|
|
return os.path.join(d, filename)
|
2013-09-01 09:16:15 -07:00
|
|
|
|
|
|
|
def save_chunk(self, index, chunk):
|
|
|
|
filename = self.path()
|
2018-06-15 18:29:29 -07:00
|
|
|
d = (index * CHUNK_LEN - self.checkpoint) * HDR_LEN
|
2017-07-16 23:44:09 -07:00
|
|
|
if d < 0:
|
|
|
|
chunk = chunk[-d:]
|
|
|
|
d = 0
|
2018-05-16 11:56:48 -07:00
|
|
|
truncate = index >= len(self.checkpoints)
|
|
|
|
self.write(chunk, d, truncate)
|
2017-07-21 00:02:58 -07:00
|
|
|
self.swap_with_parent()
|
2013-09-01 09:16:15 -07:00
|
|
|
|
2017-07-18 09:10:22 -07:00
|
|
|
def swap_with_parent(self):
|
2017-07-21 00:02:58 -07:00
|
|
|
if self.parent_id is None:
|
|
|
|
return
|
|
|
|
parent_branch_size = self.parent().height() - self.checkpoint + 1
|
|
|
|
if parent_branch_size >= self.size():
|
|
|
|
return
|
2017-07-20 06:05:47 -07:00
|
|
|
self.print_error("swap", self.checkpoint, self.parent_id)
|
|
|
|
parent_id = self.parent_id
|
2017-07-18 09:10:22 -07:00
|
|
|
checkpoint = self.checkpoint
|
2017-07-20 06:05:47 -07:00
|
|
|
parent = self.parent()
|
2017-07-25 01:03:15 -07:00
|
|
|
with open(self.path(), 'rb') as f:
|
|
|
|
my_data = f.read()
|
|
|
|
with open(parent.path(), 'rb') as f:
|
2018-06-15 18:29:29 -07:00
|
|
|
f.seek((checkpoint - parent.checkpoint)*HDR_LEN)
|
|
|
|
parent_data = f.read(parent_branch_size*HDR_LEN)
|
2017-07-25 01:03:15 -07:00
|
|
|
self.write(parent_data, 0)
|
2018-06-15 18:29:29 -07:00
|
|
|
parent.write(my_data, (checkpoint - parent.checkpoint)*HDR_LEN)
|
2017-07-20 02:36:59 -07:00
|
|
|
# store file path
|
|
|
|
for b in blockchains.values():
|
|
|
|
b.old_path = b.path()
|
2017-07-19 02:14:11 -07:00
|
|
|
# swap parameters
|
2017-07-20 06:05:47 -07:00
|
|
|
self.parent_id = parent.parent_id; parent.parent_id = parent_id
|
2017-07-18 09:10:22 -07:00
|
|
|
self.checkpoint = parent.checkpoint; parent.checkpoint = checkpoint
|
2017-07-25 09:47:36 -07:00
|
|
|
self._size = parent._size; parent._size = parent_branch_size
|
2017-07-20 02:36:59 -07:00
|
|
|
# move files
|
|
|
|
for b in blockchains.values():
|
|
|
|
if b in [self, parent]: continue
|
|
|
|
if b.old_path != b.path():
|
|
|
|
self.print_error("renaming", b.old_path, b.path())
|
|
|
|
os.rename(b.old_path, b.path())
|
2017-07-18 21:46:37 -07:00
|
|
|
# update pointers
|
|
|
|
blockchains[self.checkpoint] = self
|
|
|
|
blockchains[parent.checkpoint] = parent
|
2017-07-18 09:10:22 -07:00
|
|
|
|
2018-05-16 11:56:48 -07:00
|
|
|
def write(self, data, offset, truncate=True):
|
2017-07-16 23:44:09 -07:00
|
|
|
filename = self.path()
|
2017-07-24 06:54:42 -07:00
|
|
|
with self.lock:
|
|
|
|
with open(filename, 'rb+') as f:
|
2018-06-15 18:29:29 -07:00
|
|
|
if truncate and offset != self._size*HDR_LEN:
|
2017-07-25 01:03:15 -07:00
|
|
|
f.seek(offset)
|
|
|
|
f.truncate()
|
|
|
|
f.seek(offset)
|
2017-07-24 06:54:42 -07:00
|
|
|
f.write(data)
|
2017-07-25 01:03:15 -07:00
|
|
|
f.flush()
|
|
|
|
os.fsync(f.fileno())
|
2017-07-24 06:54:42 -07:00
|
|
|
self.update_size()
|
2017-07-25 01:03:15 -07:00
|
|
|
|
|
|
|
def save_header(self, header):
|
|
|
|
delta = header.get('block_height') - self.checkpoint
|
2017-03-15 04:13:20 -07:00
|
|
|
data = bfh(serialize_header(header))
|
2017-07-25 01:03:15 -07:00
|
|
|
assert delta == self.size()
|
2018-06-15 18:29:29 -07:00
|
|
|
assert len(data) == HDR_LEN
|
|
|
|
self.write(data, delta*HDR_LEN)
|
2017-07-21 00:02:58 -07:00
|
|
|
self.swap_with_parent()
|
2013-09-01 09:16:15 -07:00
|
|
|
|
2017-07-13 07:23:41 -07:00
|
|
|
def read_header(self, height):
|
2017-07-20 06:05:47 -07:00
|
|
|
assert self.parent_id != self.checkpoint
|
2017-08-01 02:00:12 -07:00
|
|
|
if height < 0:
|
|
|
|
return
|
2017-07-15 08:20:06 -07:00
|
|
|
if height < self.checkpoint:
|
2017-07-20 06:05:47 -07:00
|
|
|
return self.parent().read_header(height)
|
2017-07-19 05:26:44 -07:00
|
|
|
if height > self.height():
|
|
|
|
return
|
2017-07-15 08:20:06 -07:00
|
|
|
delta = height - self.checkpoint
|
2013-09-01 09:16:15 -07:00
|
|
|
name = self.path()
|
|
|
|
if os.path.exists(name):
|
2017-11-12 05:33:46 -08:00
|
|
|
with open(name, 'rb') as f:
|
2018-06-15 18:29:29 -07:00
|
|
|
f.seek(delta * HDR_LEN)
|
|
|
|
h = f.read(HDR_LEN)
|
|
|
|
if len(h) < HDR_LEN:
|
2018-05-16 11:56:48 -07:00
|
|
|
raise Exception('Expected to read a full header. This was only {} bytes'.format(len(h)))
|
|
|
|
elif not os.path.exists(util.get_headers_dir(self.config)):
|
|
|
|
raise Exception('Electrum datadir does not exist. Was it deleted while running?')
|
|
|
|
else:
|
|
|
|
raise Exception('Cannot find headers file but datadir is there. Should be at {}'.format(name))
|
2018-06-15 18:29:29 -07:00
|
|
|
if h == bytes([0])*HDR_LEN:
|
2018-05-16 11:56:48 -07:00
|
|
|
return None
|
2017-07-19 05:26:44 -07:00
|
|
|
return deserialize_header(h, height)
|
2013-09-01 09:16:15 -07:00
|
|
|
|
2017-05-29 00:03:39 -07:00
|
|
|
def get_hash(self, height):
|
2018-05-16 11:56:48 -07:00
|
|
|
if height == -1:
|
|
|
|
return '0000000000000000000000000000000000000000000000000000000000000000'
|
|
|
|
elif height == 0:
|
|
|
|
return constants.net.GENESIS
|
2018-06-28 10:15:38 -07:00
|
|
|
elif height < len(self.checkpoints) * CHUNK_LEN - TARGET_CALC_BLOCKS:
|
2018-06-15 18:29:29 -07:00
|
|
|
assert (height+1) % CHUNK_LEN == 0, height
|
|
|
|
index = height // CHUNK_LEN
|
2018-06-28 10:15:38 -07:00
|
|
|
h, t, extra_headers = self.checkpoints[index]
|
2018-05-16 11:56:48 -07:00
|
|
|
return h
|
|
|
|
else:
|
|
|
|
return hash_header(self.read_header(height))
|
|
|
|
|
2018-06-15 18:29:29 -07:00
|
|
|
def get_median_time(self, height, chunk_headers=None):
|
|
|
|
if chunk_headers is None or chunk_headers['empty']:
|
|
|
|
chunk_empty = True
|
|
|
|
else:
|
|
|
|
chunk_empty = False
|
|
|
|
min_height = chunk_headers['min_height']
|
|
|
|
max_height = chunk_headers['max_height']
|
|
|
|
|
|
|
|
height_range = range(max(0, height - POW_MEDIAN_BLOCK_SPAN),
|
|
|
|
max(1, height))
|
|
|
|
median = []
|
|
|
|
for h in height_range:
|
|
|
|
header = self.read_header(h)
|
|
|
|
if not header and not chunk_empty \
|
|
|
|
and min_height <= h <= max_height:
|
|
|
|
header = chunk_headers[h]
|
2018-06-28 10:15:38 -07:00
|
|
|
if not header:
|
|
|
|
raise Exception("Can not read header at height %s" % h)
|
2018-06-15 18:29:29 -07:00
|
|
|
median.append(header.get('timestamp'))
|
|
|
|
|
|
|
|
median.sort()
|
|
|
|
return median[len(median)//2];
|
|
|
|
|
|
|
|
def get_target(self, height, chunk_headers=None):
|
|
|
|
if chunk_headers is None or chunk_headers['empty']:
|
|
|
|
chunk_empty = True
|
|
|
|
else:
|
|
|
|
chunk_empty = False
|
|
|
|
min_height = chunk_headers['min_height']
|
|
|
|
max_height = chunk_headers['max_height']
|
|
|
|
|
|
|
|
if height <= POW_AVERAGING_WINDOW:
|
2018-05-16 11:56:48 -07:00
|
|
|
return MAX_TARGET
|
2018-06-15 18:29:29 -07:00
|
|
|
|
|
|
|
height_range = range(max(0, height - POW_AVERAGING_WINDOW),
|
|
|
|
max(1, height))
|
|
|
|
mean_target = 0
|
|
|
|
for h in height_range:
|
|
|
|
header = self.read_header(h)
|
|
|
|
if not header and not chunk_empty \
|
|
|
|
and min_height <= h <= max_height:
|
|
|
|
header = chunk_headers[h]
|
2018-06-28 10:15:38 -07:00
|
|
|
if not header:
|
|
|
|
raise Exception("Can not read header at height %s" % h)
|
2018-06-15 18:29:29 -07:00
|
|
|
mean_target += self.bits_to_target(header.get('bits'))
|
|
|
|
mean_target //= POW_AVERAGING_WINDOW
|
|
|
|
|
|
|
|
actual_timespan = self.get_median_time(height, chunk_headers) - \
|
|
|
|
self.get_median_time(height - POW_AVERAGING_WINDOW, chunk_headers)
|
|
|
|
actual_timespan = AVERAGING_WINDOW_TIMESPAN + \
|
|
|
|
int((actual_timespan - AVERAGING_WINDOW_TIMESPAN) / \
|
|
|
|
POW_DAMPING_FACTOR)
|
|
|
|
if actual_timespan < MIN_ACTUAL_TIMESPAN:
|
|
|
|
actual_timespan = MIN_ACTUAL_TIMESPAN
|
|
|
|
elif actual_timespan > MAX_ACTUAL_TIMESPAN:
|
|
|
|
actual_timespan = MAX_ACTUAL_TIMESPAN
|
|
|
|
|
|
|
|
next_target = mean_target // AVERAGING_WINDOW_TIMESPAN * actual_timespan
|
|
|
|
|
|
|
|
if next_target > MAX_TARGET:
|
|
|
|
next_target = MAX_TARGET
|
|
|
|
|
|
|
|
return next_target
|
2018-05-16 11:56:48 -07:00
|
|
|
|
|
|
|
def bits_to_target(self, bits):
|
|
|
|
bitsN = (bits >> 24) & 0xff
|
2018-06-15 18:29:29 -07:00
|
|
|
if not (bitsN >= 0x03 and bitsN <= 0x1f):
|
2018-06-30 17:56:21 -07:00
|
|
|
if not constants.net.TESTNET:
|
|
|
|
raise Exception("First part of bits should be in [0x03, 0x1f]")
|
2018-05-16 11:56:48 -07:00
|
|
|
bitsBase = bits & 0xffffff
|
|
|
|
if not (bitsBase >= 0x8000 and bitsBase <= 0x7fffff):
|
|
|
|
raise Exception("Second part of bits should be in [0x8000, 0x7fffff]")
|
|
|
|
return bitsBase << (8 * (bitsN-3))
|
|
|
|
|
|
|
|
def target_to_bits(self, target):
|
|
|
|
c = ("%064x" % target)[2:]
|
|
|
|
while c[:2] == '00' and len(c) > 6:
|
|
|
|
c = c[2:]
|
|
|
|
bitsN, bitsBase = len(c) // 2, int('0x' + c[:6], 16)
|
|
|
|
if bitsBase >= 0x800000:
|
|
|
|
bitsN += 1
|
|
|
|
bitsBase >>= 8
|
|
|
|
return bitsN << 24 | bitsBase
|
2013-09-01 09:16:15 -07:00
|
|
|
|
2017-07-20 22:52:38 -07:00
|
|
|
def can_connect(self, header, check_height=True):
|
2018-05-16 11:56:48 -07:00
|
|
|
if header is None:
|
|
|
|
return False
|
2017-07-18 12:32:34 -07:00
|
|
|
height = header['block_height']
|
2017-07-20 22:52:38 -07:00
|
|
|
if check_height and self.height() != height - 1:
|
2018-05-16 11:56:48 -07:00
|
|
|
#self.print_error("cannot connect at height", height)
|
2017-07-18 12:32:34 -07:00
|
|
|
return False
|
2017-07-19 08:23:46 -07:00
|
|
|
if height == 0:
|
2018-05-16 11:56:48 -07:00
|
|
|
return hash_header(header) == constants.net.GENESIS
|
|
|
|
try:
|
|
|
|
prev_hash = self.get_hash(height - 1)
|
|
|
|
except:
|
2017-03-31 03:45:09 -07:00
|
|
|
return False
|
2015-05-17 06:54:20 -07:00
|
|
|
if prev_hash != header.get('prev_block_hash'):
|
2017-03-31 03:45:09 -07:00
|
|
|
return False
|
2018-06-15 18:29:29 -07:00
|
|
|
target = self.get_target(height)
|
2015-12-11 03:37:40 -08:00
|
|
|
try:
|
2018-05-16 11:56:48 -07:00
|
|
|
self.verify_header(header, prev_hash, target)
|
|
|
|
except BaseException as e:
|
2015-12-11 03:37:40 -08:00
|
|
|
return False
|
2017-03-31 03:45:09 -07:00
|
|
|
return True
|
2015-05-17 06:54:20 -07:00
|
|
|
|
2015-12-12 21:33:06 -08:00
|
|
|
def connect_chunk(self, idx, hexdata):
|
2015-05-17 06:54:20 -07:00
|
|
|
try:
|
2017-02-04 06:48:13 -08:00
|
|
|
data = bfh(hexdata)
|
2015-12-12 21:33:06 -08:00
|
|
|
self.verify_chunk(idx, data)
|
2017-05-29 00:03:39 -07:00
|
|
|
#self.print_error("validated chunk %d" % idx)
|
2015-12-12 21:33:06 -08:00
|
|
|
self.save_chunk(idx, data)
|
2017-03-31 03:45:09 -07:00
|
|
|
return True
|
2015-12-11 03:37:40 -08:00
|
|
|
except BaseException as e:
|
2018-05-16 11:56:48 -07:00
|
|
|
self.print_error('verify_chunk %d failed'%idx, str(e))
|
2017-03-31 03:45:09 -07:00
|
|
|
return False
|
2018-05-16 11:56:48 -07:00
|
|
|
|
|
|
|
def get_checkpoints(self):
|
|
|
|
# for each chunk, store the hash of the last block and the target after the chunk
|
|
|
|
cp = []
|
2018-06-15 18:29:29 -07:00
|
|
|
n = self.height() // CHUNK_LEN
|
2018-05-16 11:56:48 -07:00
|
|
|
for index in range(n):
|
2018-06-15 18:29:29 -07:00
|
|
|
height = (index + 1) * CHUNK_LEN - 1
|
|
|
|
h = self.get_hash(height)
|
|
|
|
target = self.get_target(height)
|
2018-06-08 10:57:28 -07:00
|
|
|
if len(h.strip('0')) == 0:
|
|
|
|
raise Exception('%s file has not enough data.' % self.path())
|
2018-06-28 10:15:38 -07:00
|
|
|
extra_headers = []
|
|
|
|
if os.path.exists(self.path()):
|
|
|
|
with open(self.path(), 'rb') as f:
|
|
|
|
lower_header = height - TARGET_CALC_BLOCKS
|
|
|
|
for height in range(height, lower_header-1, -1):
|
|
|
|
f.seek(height*HDR_LEN)
|
|
|
|
hd = f.read(HDR_LEN)
|
|
|
|
if len(hd) < HDR_LEN:
|
|
|
|
raise Exception(
|
|
|
|
'Expected to read a full header.'
|
|
|
|
' This was only {} bytes'.format(len(hd)))
|
|
|
|
extra_headers.append((height, bh2u(hd)))
|
|
|
|
cp.append((h, target, extra_headers))
|
2018-05-16 11:56:48 -07:00
|
|
|
return cp
|