Merge pull request #2 from BTCP-community/zcl_header_chunks

Zcl header chunks
This commit is contained in:
Jon Layton 2018-01-14 20:23:40 -06:00 committed by GitHub
commit 18ca29bfa9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 221 additions and 427 deletions

View File

@ -84,6 +84,9 @@ class NetworkConstants:
cls.DEFAULT_PORTS = {'t': '50001', 's': '50002'}
cls.DEFAULT_SERVERS = read_json('servers.json', {})
cls.CHECKPOINTS = read_json('checkpoints.json', [])
cls.EQUIHASH_N = 200
cls.EQUIHASH_K = 9
cls.CHUNK_SIZE = 200
# https://github.com/z-classic/zclassic/blob/master/src/chainparams.cpp#L234
@classmethod
@ -97,7 +100,9 @@ class NetworkConstants:
cls.DEFAULT_PORTS = {'t': '51001', 's': '51002'}
cls.DEFAULT_SERVERS = read_json('servers_testnet.json', {})
cls.CHECKPOINTS = read_json('checkpoints_testnet.json', [])
cls.EQUIHASH_N = 200
cls.EQUIHASH_K = 9
cls.CHUNK_SIZE = 200
NetworkConstants.set_mainnet()
@ -242,20 +247,34 @@ def push_script(x):
# ZCASH specific utils methods
# https://github.com/zcash/zcash/blob/master/qa/rpc-tests/test_framework/mininode.py
BASIC_HEADER_SIZE = 140
hash_to_str = lambda x: bytes(reversed(x)).hex()
str_to_hash = lambda x: bytes(reversed(bytes.fromhex(x)))
def read_vector_size(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
return struct.unpack("<H", f.read(2))[0]
elif nit == 254:
return struct.unpack("<I", f.read(4))[0]
elif nit == 255:
return struct.unpack("<Q", f.read(8))[0]
return nit
def ser_char_vector(l):
r = b''
if l is None:
l = ''
l = b''
if len(l) < 253:
r = chr(len(l))
r = struct.pack("<B", len(l))
elif len(l) < 0x10000:
r = chr(253) + struct.pack("<H", len(l))
r = struct.pack("<B", 253) + struct.pack("<H", len(l))
elif len(l) < 0x100000000:
r = chr(254) + struct.pack("<I", len(l))
r = struct.pack("<B", 254) + struct.pack("<I", len(l))
else:
r = chr(255) + struct.pack("<Q", len(l))
for i in l:
r += chr(i)
r = struct.pack("<B", 255) + struct.pack("<Q", len(l))
r += bytes(l)
return r
@ -273,6 +292,9 @@ def deser_char_vector(f):
r.append(t)
return r
def vector_from_bytes(s):
return [v for v in s]
def deser_uint256(f):
r = 0
@ -282,7 +304,7 @@ def deser_uint256(f):
return r
def uint256_from_str(s):
def uint256_from_bytes(s):
r = 0
t = struct.unpack("<IIIIIIII", s[:32])
for i in range(8):
@ -301,30 +323,17 @@ def ser_uint256(u):
u >>= 32
return rs
def uint256_from_str(s):
r = 0
t = struct.unpack("<IIIIIIII", s[:32])
for i in range(8):
r += t[i] << (i * 32)
return r
def sha256(x):
x = to_bytes(x, 'utf8')
return bytes(hashlib.sha256(x).digest())
def Hash(x):
x = to_bytes(x, 'utf8')
out = bytes(sha256(sha256(x)))
return out
hash_encode = lambda x: bh2u(x[::-1])
hash_decode = lambda x: bfh(x)[::-1]
hmac_sha_512 = lambda x, y: hmac.new(x, y, hashlib.sha512).digest()
def is_new_seed(x, prefix=version.SEED_PREFIX):
from . import mnemonic
x = mnemonic.normalize_text(x)

View File

@ -28,6 +28,7 @@ from io import BytesIO
from . import util
from . import bitcoin
from .bitcoin import *
import base64
from .equihash import is_gbp_valid
import logging
@ -39,38 +40,37 @@ MAX_TARGET = 0x0007FFFFFFFF0000000000000000000000000000000000000000000000000000
def serialize_header(res):
r = b''
r += struct.pack("<i", res.get('version'))
r += ser_uint256(res.get('prev_block_hash'))
r += ser_uint256(res.get('merkle_root'))
r += ser_uint256(res.get('hash_reserved'))
r += str_to_hash(res.get('prev_block_hash'))
r += str_to_hash(res.get('merkle_root'))
r += str_to_hash(res.get('hash_reserved'))
r += struct.pack("<I", res.get('timestamp'))
r += struct.pack("<I", res.get('bits'))
r += ser_uint256(res.get('nonce'))
r += ser_char_vector(res.get('n_solution')).encode('utf-8')
r += str_to_hash(res.get('nonce'))
r += ser_char_vector(base64.b64decode(res.get('n_solution').encode('utf8')))
return r
def deserialize_header(f, height):
hex_to_int = lambda s: int('0x' + s[::-1].encode('hex'), 16)
h = {}
h['version'] = struct.unpack("<I", f.read(4))[0]
h['prev_block_hash'] = deser_uint256(f)
h['merkle_root'] = deser_uint256(f)
h['hash_reserved'] = deser_uint256(f)
h['prev_block_hash'] = hash_to_str(f.read(32))
h['merkle_root'] = hash_to_str(f.read(32))
h['hash_reserved'] = hash_to_str(f.read(32))
h['timestamp'] = struct.unpack("<I", f.read(4))[0]
h['bits'] = struct.unpack("<I", f.read(4))[0]
h['nonce'] = struct.unpack("<I", f.read(4))[0]
h['n_solution'] = deser_char_vector(f)
h['nonce'] = hash_to_str(f.read(32))
h['n_solution'] = base64.b64encode(bytes(deser_char_vector(f))).decode('utf8')
h['block_height'] = height
return h
def sha256_header(self, header):
return uint256_from_str(Hash(self.serialize_header(header)))
def sha256_header(header):
return uint256_from_bytes(Hash(serialize_header(header)))
def hash_header(header):
if header is None:
return '0' * 64
if header.get('prev_block_hash') is None:
header['prev_block_hash'] = '00'*32
return hash_encode(Hash(serialize_header(header)))
header['prev_block_hash'] = '00'*64
return hash_to_str(Hash(serialize_header(header)))
blockchains = {}
@ -117,7 +117,7 @@ class Blockchain(util.PrintError):
self.config = config
self.catch_up = None # interface catching up
self.checkpoint = checkpoint
self.checkpoints = bitcoin.NetworkConstants.CHECKPOINTS
self.checkpoints = NetworkConstants.CHECKPOINTS
self.parent_id = parent_id
self.lock = threading.Lock()
with self.lock:
@ -161,36 +161,51 @@ class Blockchain(util.PrintError):
def update_size(self):
p = self.path()
self._size = os.path.getsize(p) / 1484 if os.path.exists(p) else 0
self._size = 0
if os.path.exists(p):
with open(p, 'rb') as f:
eof = f.seek(0, 2)
f.seek(0, 0)
while True:
try:
f.seek(bitcoin.BASIC_HEADER_SIZE, 1)
vs = read_vector_size(f)
f.seek(vs, 1)
if f.tell() <= eof:
self._size += 1
if f.tell() >= eof:
break
except:
import traceback
traceback.print_exc()
break
def verify_header(self, header, prev_header, bits, target, nonce, n_solution):
prev_hash = self.sha256_header(prev_header)
_powhash = self.sha256_header(header)
if prev_hash != header.get('prev_block_hash'):
raise BaseException("prev hash mismatch: %s vs %s" % (prev_hash, header.get('prev_block_hash')))
if bitcoin.NetworkConstants.TESTNET:
return
bits = self.target_to_bits(target)
if bits != header.get('bits'):
raise BaseException("bits mismatch: %s vs %s" % (bits, header.get('bits')))
if int('0x' + _powhash, 16) > target:
def verify_header(self, header, prev_header):
if prev_header:
prev_hash = hash_header(prev_header)
if prev_hash != header.get('prev_block_hash'):
raise BaseException("prev hash mismatch: %s vs %s" % (prev_hash, header.get('prev_block_hash')))
_powhash = sha256_header(header)
target = self.bits_to_target(header['bits'])
if _powhash > target:
raise BaseException("insufficient proof of work: %s vs target %s" % (int('0x' + _powhash, 16), target))
if not is_gbp_valid(nonce, n_solution):
nonce = uint256_from_bytes(str_to_hash(header.get('nonce')))
n_solution = vector_from_bytes(base64.b64decode(header.get('n_solution').encode('utf8')))
if not is_gbp_valid(serialize_header(header), nonce, n_solution,
NetworkConstants.EQUIHASH_N, NetworkConstants.EQUIHASH_K):
raise BaseException("Equihash invalid")
def verify_chunk(self, index, data):
num = len(data) / 1484
data = BytesIO(data)
prev_header = None
if index != 0:
prev_header = self.read_header(index * 2016 - 1)
headers = {}
for i in range(num):
raw_header = data[i * 1484:(i + 1) * 1484]
header = self.deserialize_header(raw_header, index * 2016 + i)
headers[header.get('block_height')] = header
nonce, n_solution = headers.get('nonce'), header.get('n_solution')
bits, target = self.get_target(index * 2016 + i, headers)
self.verify_header(header, prev_header, bits, target, nonce, n_solution)
prev_header = self.read_header(index * NetworkConstants.CHUNK_SIZE - 1)
while True:
try:
header = self.deserialize_header(data, index * NetworkConstants.CHUNK_SIZE + i)
except:
break
self.verify_header(header, prev_header)
prev_header = header
def path(self):
@ -200,7 +215,8 @@ class Blockchain(util.PrintError):
def save_chunk(self, index, chunk):
filename = self.path()
d = (index * 2016 - self.checkpoint) * 1484
with open(filename, 'rb') as f:
d = self._height_to_offset(f, index * NetworkConstants.CHUNK_SIZE - self.checkpoint)
if d < 0:
chunk = chunk[-d:]
d = 0
@ -220,10 +236,14 @@ class Blockchain(util.PrintError):
with open(self.path(), 'rb') as f:
my_data = f.read()
with open(parent.path(), 'rb') as f:
f.seek((checkpoint - parent.checkpoint)*1484)
parent_data = f.read(parent_branch_size*1484)
self._height_to_offset
offset = self._height_to_offset(f, checkpoint - parent.checkpoint)
length = self._height_to_offset(f, parent_branch_size, offset)
f.seek(offset)
parent_data = f.read(length)
self.write(parent_data, 0)
parent.write(my_data, (checkpoint - parent.checkpoint)*1484)
parent.write(my_data, checkpoint - parent.checkpoint)
# store file path
for b in blockchains.values():
b.old_path = b.path()
@ -241,25 +261,42 @@ class Blockchain(util.PrintError):
blockchains[self.checkpoint] = self
blockchains[parent.checkpoint] = parent
def write(self, data, offset, truncate=True):
def _height_to_offset(self, f, height, start=0):
pos = f.tell()
eof = f.seek(0, 2)
f.seek(start, 0)
for i in range(height):
f.seek(bitcoin.BASIC_HEADER_SIZE, 1)
vs = read_vector_size(f)
f.seek(vs, 1)
if f.tell() > eof:
raise Exception('Out of file')
elif f.tell() == eof:
break
result = f.tell()
f.seek(pos, 0)
return result
def write(self, data, delta, truncate=False):
filename = self.path()
with self.lock:
with open(filename, 'rb+') as f:
if offset != self._size*1484:
f.seek(offset)
f.truncate()
eof = f.seek(0, 2)
offset = self._height_to_offset(f, delta)
f.seek(offset)
if truncate and offset < eof:
f.truncate()
f.write(data)
f.flush()
os.fsync(f.fileno())
self.update_size()
def save_header(self, header):
self.print_error("save_header", header.get('block_height'))
delta = header.get('block_height') - self.checkpoint
data = bfh(serialize_header(header))
data = serialize_header(header)
assert delta == self.size()
assert len(data) == 1484
self.write(data, delta*1484)
self.write(data, delta)
self.swap_with_parent()
def read_header(self, height):
@ -271,49 +308,22 @@ class Blockchain(util.PrintError):
if height > self.height():
return
idx, h = 0, None
delta = height - self.checkpoint
name = self.path()
if os.path.exists(name):
while idx <= height:
f = open(name, 'rb')
with open(name, 'rb') as f:
for i in range(delta):
f.seek(bitcoin.BASIC_HEADER_SIZE, 1)
vs = read_vector_size(f)
f.seek(vs, 1)
h = deserialize_header(f, height)
idx += 1
return h
def get_hash(self, height):
if height == -1:
return '0000000000000000000000000000000000000000000000000000000000000000'
elif height == 0:
return bitcoin.NetworkConstants.GENESIS
elif height < len(self.checkpoints) * 2016:
assert (height+1) % 2016 == 0, height
index = height // 2016
h, t = self.checkpoints[index]
return h
else:
return hash_header(self.read_header(height))
return self.hash_header(self.read_header(height))
def get_target(self, index):
# compute target from chunk x, used in chunk x+1
if bitcoin.NetworkConstants.TESTNET:
return 0
if index == -1:
return MAX_TARGET
if index < len(self.checkpoints):
h, t = self.checkpoints[index]
return t
# new target
first = self.read_header(index * 2016)
last = self.read_header(index * 2016 + 2015)
bits = last.get('bits')
target = self.bits_to_target(bits)
nActualTimespan = last.get('timestamp') - first.get('timestamp')
nTargetTimespan = 14 * 24 * 60 * 60
nActualTimespan = max(nActualTimespan, nTargetTimespan // 4)
nActualTimespan = min(nActualTimespan, nTargetTimespan * 4)
new_target = min(MAX_TARGET, (target * nActualTimespan) // nTargetTimespan)
return new_target
def hash_header(self, header):
return hash_header(header)
def bits_to_target(self, bits):
bitsN = (bits >> 24) & 0xff
@ -337,41 +347,35 @@ class Blockchain(util.PrintError):
def can_connect(self, header, check_height=True):
height = header['block_height']
if check_height and self.height() != height - 1:
#self.print_error("cannot connect at height", height)
self.print_error("cannot connect at height", height)
return False
if height == 0:
return hash_header(header) == bitcoin.NetworkConstants.GENESIS
return hash_header(header) == NetworkConstants.GENESIS
try:
prev_hash = self.get_hash(height - 1)
prev_header = self.read_header(height - 1)
prev_hash = self.hash_header(prev_header)
except:
return False
if prev_hash != header.get('prev_block_hash'):
return False
nonce, n_solution = headers.get('nonce'), header.get('n_solution')
bits, target = self.get_target(index * 2016 + i, headers)
try:
self.verify_header(header, prev_header, bits, target, nonce, n_solution)
except:
self.verify_header(header, prev_header)
except BaseException as e:
import traceback
traceback.print_exc()
self.print_error('verify_header failed', str(e))
return False
return True
def connect_chunk(self, idx, hexdata):
try:
data = bfh(hexdata)
data = bytes.fromhex(hexdata)
self.verify_chunk(idx, data)
#self.print_error("validated chunk %d" % idx)
self.print_error("validated chunk %d" % idx)
self.save_chunk(idx, data)
return True
except BaseException as e:
import traceback
traceback.print_exc()
self.print_error('verify_chunk failed', str(e))
return False
def get_checkpoints(self):
# for each chunk, store the hash of the last block and the target after the chunk
cp = []
n = self.height() // 2016
for index in range(n):
h = self.get_hash((index+1) * 2016 -1)
target = self.get_target(index)
cp.append((h, target))
return cp

View File

@ -2,6 +2,7 @@
from pyblake2 import blake2b
from operator import itemgetter
import struct
from functools import reduce
DEBUG = False
VERBOSE = False
@ -15,16 +16,16 @@ def expand_array(inp, out_len, bit_len, byte_pad=0):
assert bit_len >= 8 and word_size >= 7+bit_len
bit_len_mask = (1<<bit_len)-1
out_width = (bit_len+7)/8 + byte_pad
assert out_len == 8*out_width*len(inp)/bit_len
out_width = (bit_len+7)//8 + byte_pad
assert out_len == 8*out_width*len(inp)//bit_len
out = bytearray(out_len)
bit_len_mask = (1 << bit_len) - 1
# The acc_bits least-significant bits of acc_value represent a bit sequence
# in big-endian order.
acc_bits = 0;
acc_value = 0;
acc_bits = 0
acc_value = 0
j = 0
for i in range(len(inp)):
@ -51,16 +52,16 @@ def expand_array(inp, out_len, bit_len, byte_pad=0):
def compress_array(inp, out_len, bit_len, byte_pad=0):
assert bit_len >= 8 and word_size >= 7+bit_len
in_width = (bit_len+7)/8 + byte_pad
assert out_len == bit_len*len(inp)/(8*in_width)
in_width = (bit_len+7)//8 + byte_pad
assert out_len == bit_len*len(inp)//(8*in_width)
out = bytearray(out_len)
bit_len_mask = (1 << bit_len) - 1
# The acc_bits least-significant bits of acc_value represent a bit sequence
# in big-endian order.
acc_bits = 0;
acc_value = 0;
acc_bits = 0
acc_value = 0
j = 0
for i in range(out_len):
@ -85,27 +86,26 @@ def compress_array(inp, out_len, bit_len, byte_pad=0):
def get_indices_from_minimal(minimal, bit_len):
eh_index_size = 4
assert (bit_len+7)/8 <= eh_index_size
len_indices = 8*eh_index_size*len(minimal)/bit_len
byte_pad = eh_index_size - (bit_len+7)/8
assert (bit_len+7)//8 <= eh_index_size
len_indices = 8*eh_index_size*len(minimal)//bit_len
byte_pad = eh_index_size - (bit_len+7)//8
expanded = expand_array(minimal, len_indices, bit_len, byte_pad)
return [struct.unpack('>I', expanded[i:i+4])[0] for i in range(0, len_indices, eh_index_size)]
def get_minimal_from_indices(indices, bit_len):
eh_index_size = 4
assert (bit_len+7)/8 <= eh_index_size
assert (bit_len+7)//8 <= eh_index_size
len_indices = len(indices)*eh_index_size
min_len = bit_len*len_indices/(8*eh_index_size)
byte_pad = eh_index_size - (bit_len+7)/8
byte_indices = bytearray(''.join([struct.pack('>I', i) for i in indices]))
min_len = bit_len*len_indices//(8*eh_index_size)
byte_pad = eh_index_size - (bit_len+7)//8
byte_indices = bytearray(b''.join([struct.pack('>I', i) for i in indices]))
return compress_array(byte_indices, min_len, bit_len, byte_pad)
def hash_nonce(digest, nonce):
for i in range(8):
digest.update(struct.pack('<I', nonce >> (32*i)))
digest.update(struct.pack('<I', nonce >> (32*i) & 0xffffffff))
def hash_xi(digest, xi):
digest.update(struct.pack('<I', xi))
@ -123,7 +123,7 @@ def count_zeroes(h):
def has_collision(ha, hb, i, l):
res = [ha[j] == hb[j] for j in range((i-1)*l/8, i*l/8)]
res = [ha[j] == hb[j] for j in range((i-1)*l//8, i*l//8)]
return reduce(lambda x, y: x and y, res)
@ -242,9 +242,9 @@ def gbp_basic(digest, n, k):
def gbp_validate(digest, minimal, n, k):
validate_params(n, k)
collision_length = n/(k+1)
collision_length = n//(k+1)
hash_length = (k+1)*((collision_length+7)//8)
indices_per_hash_output = 512/n
indices_per_hash_output = 512//n
solution_width = (1 << k)*(collision_length+1)//8
if len(minimal) != solution_width:
@ -257,10 +257,10 @@ def gbp_validate(digest, minimal, n, k):
r = i % indices_per_hash_output
# X_i = H(I||V||x_i)
curr_digest = digest.copy()
hash_xi(curr_digest, i/indices_per_hash_output)
hash_xi(curr_digest, i//indices_per_hash_output)
tmp_hash = curr_digest.digest()
X.append((
expand_array(bytearray(tmp_hash[r*n/8:(r+1)*n/8]),
expand_array(bytearray(tmp_hash[r*n//8:(r+1)*n//8]),
hash_length, collision_length),
(i,)
))
@ -311,11 +311,10 @@ def validate_params(n, k):
# a bit different from https://github.com/zcash/zcash/blob/master/qa/rpc-tests/test_framework/mininode.py#L747
# since electrum is a SPV oriented and not a node
def is_gbp_valid(nNonce, nSolution, n=48, k=5):
def is_gbp_valid(header, nNonce, nSolution, n=48, k=5):
# H(I||...
digest = blake2b(digest_size=(512/n)*n/8, person=zcash_person(n, k))
digest.update(super(CBlock, self).serialize()[:108])
digest = blake2b(digest_size=(512//n)*n//8, person=zcash_person(n, k))
digest.update(header[:108])
hash_nonce(digest, nNonce)
if not gbp_validate(nSolution, digest, n, k):
return False
return True
return gbp_validate(digest, nSolution, n, k)

View File

@ -59,7 +59,7 @@ def parse_servers(result):
for v in item[2]:
if re.match("[st]\d*", v):
protocol, port = v[0], v[1:]
if port == '': port = bitcoin.NetworkConstants.DEFAULT_PORTS[protocol]
if port == '': port = NetworkConstants.DEFAULT_PORTS[protocol]
out[protocol] = port
elif re.match("v(.?)+", v):
version = v[1:]
@ -93,7 +93,7 @@ def filter_protocol(hostmap, protocol = 's'):
def pick_random_server(hostmap = None, protocol = 's', exclude_set = set()):
if hostmap is None:
hostmap = bitcoin.NetworkConstants.DEFAULT_SERVERS
hostmap = NetworkConstants.DEFAULT_SERVERS
eligible = list(set(filter_protocol(hostmap, protocol)) - exclude_set)
return random.choice(eligible) if eligible else None
@ -181,7 +181,7 @@ class Network(util.DaemonThread):
except:
self.default_server = None
if not self.default_server:
self.default_server = pick_random_server()
self.default_server = pick_random_server(protocol=self.protocol)
self.lock = threading.Lock()
self.pending_sends = []
self.message_id = 0
@ -358,7 +358,7 @@ class Network(util.DaemonThread):
return list(self.interfaces.keys())
def get_servers(self):
out = bitcoin.NetworkConstants.DEFAULT_SERVERS
out = NetworkConstants.DEFAULT_SERVERS
if self.irc_servers:
out.update(filter_version(self.irc_servers.copy()))
else:
@ -789,7 +789,7 @@ class Network(util.DaemonThread):
self.notify('updated')
def request_header(self, interface, height):
#interface.print_error("requesting header %d" % height)
interface.print_error("requesting header %d" % height)
self.queue_request('blockchain.block.get_header', [height], interface)
interface.request = height
interface.req_time = time.time()
@ -806,6 +806,7 @@ class Network(util.DaemonThread):
interface.print_error("unsolicited header",interface.request, height)
self.connection_down(interface.server)
return
interface.print_error("interface.mode %s" % interface.mode)
chain = blockchain.check_header(header)
if interface.mode == 'backward':
can_connect = blockchain.can_connect(header)
@ -821,7 +822,6 @@ class Network(util.DaemonThread):
interface.blockchain = chain
interface.good = height
next_height = (interface.bad + interface.good) // 2
assert next_height >= self.max_checkpoint(), (interface.bad, interface.good)
else:
if height == 0:
self.connection_down(interface.server)
@ -830,8 +830,7 @@ class Network(util.DaemonThread):
interface.bad = height
interface.bad_header = header
delta = interface.tip - height
next_height = max(self.max_checkpoint(), interface.tip - 2 * delta)
next_height = max(0, interface.tip - 2 * delta)
elif interface.mode == 'binary':
if chain:
interface.good = height
@ -841,7 +840,6 @@ class Network(util.DaemonThread):
interface.bad_header = header
if interface.bad != interface.good + 1:
next_height = (interface.bad + interface.good) // 2
assert next_height >= self.max_checkpoint()
elif not interface.blockchain.can_connect(interface.bad_header, check_height=False):
self.connection_down(interface.server)
next_height = None
@ -910,7 +908,7 @@ class Network(util.DaemonThread):
# If not finished, get the next header
if next_height:
if interface.mode == 'catch_up' and interface.tip > next_height + 50:
self.request_chunk(interface, next_height // 2016)
self.request_chunk(interface, next_height // NetworkConstants.CHUNK_SIZE)
else:
self.request_header(interface, next_height)
else:
@ -951,18 +949,36 @@ class Network(util.DaemonThread):
def init_headers_file(self):
b = self.blockchains[0]
print(b.get_hash(0), NetworkConstants.GENESIS)
if b.get_hash(0) == NetworkConstants.GENESIS:
self.downloading_headers = False
return
filename = b.path()
length = 80 * len(bitcoin.NetworkConstants.CHECKPOINTS) * 2016
if not os.path.exists(filename) or os.path.getsize(filename) < length:
with open(filename, 'wb') as f:
if length>0:
f.seek(length-1)
f.write(b'\x00')
with b.lock:
b.update_size()
def download_thread():
try:
import urllib, socket
socket.setdefaulttimeout(30)
self.print_error("downloading ", NetworkConstants.HEADERS_URL)
urllib.request.urlretrieve(NetworkConstants.HEADERS_URL, filename)
self.print_error("done.")
except Exception:
import traceback
traceback.print_exc()
self.print_error("download failed. creating file", filename)
open(filename, 'wb+').close()
b = self.blockchains[0]
with b.lock: b.update_size()
self.downloading_headers = False
self.downloading_headers = True
t = threading.Thread(target = download_thread)
t.daemon = True
t.start()
def run(self):
self.init_headers_file()
while self.is_running() and self.downloading_headers:
time.sleep(1)
while self.is_running():
self.maintain_sockets()
self.wait_on_sockets()
@ -974,23 +990,27 @@ class Network(util.DaemonThread):
def on_notify_header(self, interface, header):
height = header.get('block_height')
if not height:
return
if height < self.max_checkpoint():
self.connection_down(interface)
return
interface.tip_header = header
interface.tip = height
if interface.mode != 'default':
return
b = blockchain.check_header(header)
if b:
interface.blockchain = b
self.switch_lagging_interface()
self.notify('updated')
self.notify('interfaces')
return
b = blockchain.can_connect(header)
if b:
interface.blockchain = b
b.save_header(header)
@ -998,7 +1018,9 @@ class Network(util.DaemonThread):
self.notify('updated')
self.notify('interfaces')
return
tip = max([x.height() for x in self.blockchains.values()])
if tip >=0:
interface.mode = 'backward'
interface.bad = height
@ -1006,6 +1028,7 @@ class Network(util.DaemonThread):
self.request_header(interface, min(tip +1, height - 1))
else:
chain = self.blockchains[0]
if chain.catch_up is None:
chain.catch_up = interface
interface.mode = 'catch_up'
@ -1069,12 +1092,3 @@ class Network(util.DaemonThread):
if out != tx_hash:
return False, "error: " + out
return True, out
def export_checkpoints(self, path):
# run manually from the console to generate checkpoints
cp = self.blockchain().get_checkpoints()
with open(path, 'w') as f:
f.write(json.dumps(cp, indent=4))
def max_checkpoint(self):
return max(0, len(bitcoin.NetworkConstants.CHECKPOINTS) * 2016 - 1)

View File

@ -1,231 +1,3 @@
{
"E-X.not.fyi": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"ELECTRUMX.not.fyi": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"ELEX01.blackpole.online": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"VPS.hsmiths.com": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"bitcoin.freedomnode.com": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"btc.smsys.me": {
"pruning": "-",
"s": "995",
"version": "1.1"
},
"currentlane.lovebitco.in": {
"pruning": "-",
"t": "50001",
"version": "1.1"
},
"daedalus.bauerj.eu": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"de01.hamster.science": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"ecdsa.net": {
"pruning": "-",
"s": "110",
"t": "50001",
"version": "1.1"
},
"elec.luggs.co": {
"pruning": "-",
"s": "443",
"version": "1.1"
},
"electrum.akinbo.org": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"electrum.antumbra.se": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"electrum.be": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"electrum.coinucopia.io": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"electrum.cutie.ga": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"electrum.festivaldelhumor.org": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"electrum.hsmiths.com": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"electrum.qtornado.com": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"electrum.vom-stausee.de": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"electrum3.hachre.de": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"electrumx.bot.nu": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"electrumx.westeurope.cloudapp.azure.com": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"elx01.knas.systems": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"ex-btc.server-on.net": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"helicarrier.bauerj.eu": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"mooo.not.fyi": {
"pruning": "-",
"s": "50012",
"t": "50011",
"version": "1.1"
},
"ndnd.selfhost.eu": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"node.arihanc.com": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"node.xbt.eu": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"node1.volatilevictory.com": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"noserver4u.de": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"qmebr.spdns.org": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"raspi.hsmiths.com": {
"pruning": "-",
"s": "51002",
"t": "51001",
"version": "1.1"
},
"s2.noip.pl": {
"pruning": "-",
"s": "50102",
"version": "1.1"
},
"s5.noip.pl": {
"pruning": "-",
"s": "50105",
"version": "1.1"
},
"songbird.bauerj.eu": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"us.electrum.be": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
},
"us01.hamster.science": {
"pruning": "-",
"s": "50002",
"t": "50001",
"version": "1.1"
}
"localhost": {"t":"51001", "s":"51002"}
}

View File

@ -1,8 +1,3 @@
{
"testnetnode.arihanc.com": {"t":"51001", "s":"51002"},
"testnet1.bauerj.eu": {"t":"51001", "s":"51002"},
"14.3.140.101": {"t":"51001", "s":"51002"},
"testnet.hsmiths.com": {"t":"53011", "s":"53012"},
"electrum.akinbo.org": {"t":"51001", "s":"51002"},
"ELEX05.blackpole.online": {"t":"52011", "s":"52002"}
"localhost": {"t":"51001", "s":"51002"}
}

View File

@ -323,11 +323,11 @@ def user_dir():
if 'ANDROID_DATA' in os.environ:
return android_check_data_dir()
elif os.name == 'posix':
return os.path.join(os.environ["HOME"], ".electrum")
return os.path.join(os.environ["HOME"], ".electrum-zcl")
elif "APPDATA" in os.environ:
return os.path.join(os.environ["APPDATA"], "Electrum")
return os.path.join(os.environ["APPDATA"], "Electrum-zcl")
elif "LOCALAPPDATA" in os.environ:
return os.path.join(os.environ["LOCALAPPDATA"], "Electrum")
return os.path.join(os.environ["LOCALAPPDATA"], "Electrum-zcl")
else:
#raise Exception("No home directory found in environment variables.")
return

View File

@ -43,7 +43,7 @@ class SPV(ThreadJob):
if (tx_height > 0) and (tx_height <= lh):
header = self.network.blockchain().read_header(tx_height)
if header is None and self.network.interface:
index = tx_height // 2016
index = tx_height // NetworkConstants.CHUNK_SIZE
self.network.request_chunk(self.network.interface, index)
else:
if tx_hash not in self.merkle_roots:

View File

@ -2,9 +2,10 @@
# A simple script that connects to a server and displays block headers
import sys
import time
from electrum import SimpleConfig, Network
from electrum.util import print_msg, json_encode
from electrum_zcl import SimpleConfig, Network
from electrum_zcl.util import print_msg, json_encode
# start network
c = SimpleConfig()