Ported encoding/decoding of blocks to python3.5 based of the latest electrum 3.0.5 release

This commit is contained in:
Chris Sulmone 2018-01-13 00:55:50 -06:00 committed by Jon Layton
parent 830b6daf14
commit c6be316e10
11 changed files with 501 additions and 46 deletions

5
.gitignore vendored
View File

@ -20,3 +20,8 @@ bin/
# tox files # tox files
.cache/ .cache/
.coverage .coverage
# User-specific stuff:
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/dictionaries

13
.idea/electrum.iml Normal file
View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$" isTestSource="false" />
</content>
<orderEntry type="jdk" jdkName="Python 3.6 (electrum)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="TestRunnerService">
<option name="PROJECT_TEST_RUNNER" value="Unittests" />
</component>
</module>

4
.idea/misc.xml Normal file
View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.6 (electrum)" project-jdk-type="Python SDK" />
</project>

8
.idea/modules.xml Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/electrum.iml" filepath="$PROJECT_DIR$/.idea/electrum.iml" />
</modules>
</component>
</project>

6
.idea/vcs.xml Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>

View File

@ -28,6 +28,7 @@ Electrum is a pure python application. If you want to use the
Qt interface, install the Qt dependencies:: Qt interface, install the Qt dependencies::
sudo apt-get install python3-pyqt5 sudo apt-get install python3-pyqt5
sudo pip2 install pyblake2
If you downloaded the official package (tar.gz), you can run If you downloaded the official package (tar.gz), you can run
Electrum from its root directory, without installing it on your Electrum from its root directory, without installing it on your

View File

@ -9,6 +9,7 @@ from .simple_config import SimpleConfig, get_config, set_config
from . import bitcoin from . import bitcoin
from . import transaction from . import transaction
from . import daemon from . import daemon
from . import equihash
from .transaction import Transaction from .transaction import Transaction
from .plugins import BasePlugin from .plugins import BasePlugin
from .commands import Commands, known_commands from .commands import Commands, known_commands

View File

@ -29,6 +29,7 @@ import hmac
import os import os
import json import json
import struct
import ecdsa import ecdsa
import pyaes import pyaes
@ -78,7 +79,8 @@ class NetworkConstants:
cls.ADDRTYPE_P2PKH = 0 cls.ADDRTYPE_P2PKH = 0
cls.ADDRTYPE_P2SH = 5 cls.ADDRTYPE_P2SH = 5
cls.SEGWIT_HRP = "bc" cls.SEGWIT_HRP = "bc"
cls.GENESIS = "000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f" cls.HEADERS_URL = "https://headers.electrum.org/blockchain_headers" #TODO
cls.GENESIS = "0007104ccda289427919efc39dc9e4d499804b7bebc22df55f8b834301260602"
cls.DEFAULT_PORTS = {'t': '50001', 's': '50002'} cls.DEFAULT_PORTS = {'t': '50001', 's': '50002'}
cls.DEFAULT_SERVERS = read_json('servers.json', {}) cls.DEFAULT_SERVERS = read_json('servers.json', {})
cls.CHECKPOINTS = read_json('checkpoints.json', []) cls.CHECKPOINTS = read_json('checkpoints.json', [])
@ -237,6 +239,75 @@ def op_push(i):
def push_script(x): def push_script(x):
return op_push(len(x)//2) + x return op_push(len(x)//2) + x
# ZCASH specific utils methods
# https://github.com/zcash/zcash/blob/master/qa/rpc-tests/test_framework/mininode.py
def ser_char_vector(l):
r = b''
if l is None:
l = ''
if len(l) < 253:
r = chr(len(l))
elif len(l) < 0x10000:
r = chr(253) + struct.pack("<H", len(l))
elif len(l) < 0x100000000:
r = chr(254) + struct.pack("<I", len(l))
else:
r = chr(255) + struct.pack("<Q", len(l))
for i in l:
r += chr(i)
return r
def deser_char_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in range(nit):
t = struct.unpack("<B", f.read(1))[0]
r.append(t)
return r
def deser_uint256(f):
r = 0
for i in range(8):
t = struct.unpack("<I", f.read(4))[0]
r += t << (i * 32)
return r
def uint256_from_str(s):
r = 0
t = struct.unpack("<IIIIIIII", s[:32])
for i in range(8):
r += t[i] << (i * 32)
return r
def ser_uint256(u):
if isinstance(u, str):
u = int(u, 16)
if u is None:
u = 0
rs = b''
for i in range(8):
rs += struct.pack("<I", u & 0xFFFFFFFF)
u >>= 32
return rs
def uint256_from_str(s):
r = 0
t = struct.unpack("<IIIIIIII", s[:32])
for i in range(8):
r += t[i] << (i * 32)
return r
def sha256(x): def sha256(x):
x = to_bytes(x, 'utf8') x = to_bytes(x, 'utf8')
return bytes(hashlib.sha256(x).digest()) return bytes(hashlib.sha256(x).digest())

View File

@ -22,40 +22,54 @@
# SOFTWARE. # SOFTWARE.
import os import os
import threading import threading
import struct
from io import BytesIO
from . import util from . import util
from . import bitcoin from . import bitcoin
from .bitcoin import * from .bitcoin import *
from .equihash import is_gbp_valid
import logging
logging.basicConfig(level=logging.INFO)
MAX_TARGET = 0x00000000FFFF0000000000000000000000000000000000000000000000000000 MAX_TARGET = 0x00000000FFFF0000000000000000000000000000000000000000000000000000
def serialize_header(res): def serialize_header(res):
s = int_to_hex(res.get('version'), 4) \ r = b''
+ rev_hex(res.get('prev_block_hash')) \ r += struct.pack("<i", res.get('version'))
+ rev_hex(res.get('merkle_root')) \ r += ser_uint256(res.get('prev_block_hash'))
+ int_to_hex(int(res.get('timestamp')), 4) \ r += ser_uint256(res.get('merkle_root'))
+ int_to_hex(int(res.get('bits')), 4) \ r += ser_uint256(res.get('hash_reserved'))
+ int_to_hex(int(res.get('nonce')), 4) r += struct.pack("<I", res.get('timestamp'))
return s r += struct.pack("<I", res.get('bits'))
r += ser_uint256(res.get('nonce'))
r += ser_char_vector(res.get('n_solution')).encode('utf-8')
return r
def deserialize_header(s, height): def deserialize_header(f, height):
hex_to_int = lambda s: int('0x' + bh2u(s[::-1]), 16) hex_to_int = lambda s: int('0x' + s[::-1].encode('hex'), 16)
h = {} h = {}
h['version'] = hex_to_int(s[0:4]) h['version'] = struct.unpack("<I", f.read(4))[0]
h['prev_block_hash'] = hash_encode(s[4:36]) h['prev_block_hash'] = deser_uint256(f)
h['merkle_root'] = hash_encode(s[36:68]) h['merkle_root'] = deser_uint256(f)
h['timestamp'] = hex_to_int(s[68:72]) h['hash_reserved'] = deser_uint256(f)
h['bits'] = hex_to_int(s[72:76]) h['timestamp'] = struct.unpack("<I", f.read(4))[0]
h['nonce'] = hex_to_int(s[76:80]) h['bits'] = struct.unpack("<I", f.read(4))[0]
h['nonce'] = struct.unpack("<I", f.read(4))[0]
h['n_solution'] = deser_char_vector(f)
h['block_height'] = height h['block_height'] = height
return h return h
def sha256_header(self, header):
return uint256_from_str(Hash(self.serialize_header(header)))
def hash_header(header): def hash_header(header):
if header is None: if header is None:
return '0' * 64 return '0' * 64
if header.get('prev_block_hash') is None: if header.get('prev_block_hash') is None:
header['prev_block_hash'] = '00'*32 header['prev_block_hash'] = '00'*32
return hash_encode(Hash(bfh(serialize_header(header)))) return hash_encode(Hash(serialize_header(header)))
blockchains = {} blockchains = {}
@ -146,10 +160,11 @@ class Blockchain(util.PrintError):
def update_size(self): def update_size(self):
p = self.path() p = self.path()
self._size = os.path.getsize(p)//80 if os.path.exists(p) else 0 self._size = os.path.getsize(p) / 1484 if os.path.exists(p) else 0
def verify_header(self, header, prev_hash, target): def verify_header(self, header, prev_header, bits, target, nonce, n_solution):
_hash = hash_header(header) prev_hash = self.sha256_header(prev_header)
_powhash = self.sha256_header(header)
if prev_hash != header.get('prev_block_hash'): if prev_hash != header.get('prev_block_hash'):
raise BaseException("prev hash mismatch: %s vs %s" % (prev_hash, header.get('prev_block_hash'))) raise BaseException("prev hash mismatch: %s vs %s" % (prev_hash, header.get('prev_block_hash')))
if bitcoin.NetworkConstants.TESTNET: if bitcoin.NetworkConstants.TESTNET:
@ -157,18 +172,25 @@ class Blockchain(util.PrintError):
bits = self.target_to_bits(target) bits = self.target_to_bits(target)
if bits != header.get('bits'): if bits != header.get('bits'):
raise BaseException("bits mismatch: %s vs %s" % (bits, header.get('bits'))) raise BaseException("bits mismatch: %s vs %s" % (bits, header.get('bits')))
if int('0x' + _hash, 16) > target: if int('0x' + _powhash, 16) > target:
raise BaseException("insufficient proof of work: %s vs target %s" % (int('0x' + _hash, 16), target)) raise BaseException("insufficient proof of work: %s vs target %s" % (int('0x' + _powhash, 16), target))
if not is_gbp_valid(nonce, n_solution):
raise BaseException("Equihash invalid")
def verify_chunk(self, index, data): def verify_chunk(self, index, data):
num = len(data) // 80 num = len(data) / 1484
prev_hash = self.get_hash(index * 2016 - 1) prev_header = None
target = self.get_target(index-1) if index != 0:
prev_header = self.read_header(index * 2016 - 1)
headers = {}
for i in range(num): for i in range(num):
raw_header = data[i*80:(i+1) * 80] raw_header = data[i * 1484:(i + 1) * 1484]
header = deserialize_header(raw_header, index*2016 + i) header = self.deserialize_header(raw_header, index * 2016 + i)
self.verify_header(header, prev_hash, target) headers[header.get('block_height')] = header
prev_hash = hash_header(header) nonce, n_solution = headers.get('nonce'), header.get('n_solution')
bits, target = self.get_target(index * 2016 + i, headers)
self.verify_header(header, prev_header, bits, target, nonce, n_solution)
prev_header = header
def path(self): def path(self):
d = util.get_headers_dir(self.config) d = util.get_headers_dir(self.config)
@ -177,7 +199,7 @@ class Blockchain(util.PrintError):
def save_chunk(self, index, chunk): def save_chunk(self, index, chunk):
filename = self.path() filename = self.path()
d = (index * 2016 - self.checkpoint) * 80 d = (index * 2016 - self.checkpoint) * 1484
if d < 0: if d < 0:
chunk = chunk[-d:] chunk = chunk[-d:]
d = 0 d = 0
@ -197,10 +219,10 @@ class Blockchain(util.PrintError):
with open(self.path(), 'rb') as f: with open(self.path(), 'rb') as f:
my_data = f.read() my_data = f.read()
with open(parent.path(), 'rb') as f: with open(parent.path(), 'rb') as f:
f.seek((checkpoint - parent.checkpoint)*80) f.seek((checkpoint - parent.checkpoint)*1484)
parent_data = f.read(parent_branch_size*80) parent_data = f.read(parent_branch_size*1484)
self.write(parent_data, 0) self.write(parent_data, 0)
parent.write(my_data, (checkpoint - parent.checkpoint)*80) parent.write(my_data, (checkpoint - parent.checkpoint)*1484)
# store file path # store file path
for b in blockchains.values(): for b in blockchains.values():
b.old_path = b.path() b.old_path = b.path()
@ -222,7 +244,7 @@ class Blockchain(util.PrintError):
filename = self.path() filename = self.path()
with self.lock: with self.lock:
with open(filename, 'rb+') as f: with open(filename, 'rb+') as f:
if truncate and offset != self._size*80: if offset != self._size*1484:
f.seek(offset) f.seek(offset)
f.truncate() f.truncate()
f.seek(offset) f.seek(offset)
@ -235,8 +257,8 @@ class Blockchain(util.PrintError):
delta = header.get('block_height') - self.checkpoint delta = header.get('block_height') - self.checkpoint
data = bfh(serialize_header(header)) data = bfh(serialize_header(header))
assert delta == self.size() assert delta == self.size()
assert len(data) == 80 assert len(data) == 1484
self.write(data, delta*80) self.write(data, delta*1484)
self.swap_with_parent() self.swap_with_parent()
def read_header(self, height): def read_header(self, height):
@ -247,15 +269,16 @@ class Blockchain(util.PrintError):
return self.parent().read_header(height) return self.parent().read_header(height)
if height > self.height(): if height > self.height():
return return
delta = height - self.checkpoint
idx, h = 0, None
name = self.path() name = self.path()
if os.path.exists(name): if os.path.exists(name):
with open(name, 'rb') as f: while idx <= height:
f.seek(delta * 80) f = open(name, 'rb')
h = f.read(80) h = deserialize_header(f, height)
if h == bytes([0])*80: idx += 1
return None return h
return deserialize_header(h, height)
def get_hash(self, height): def get_hash(self, height):
if height == -1: if height == -1:
@ -323,10 +346,11 @@ class Blockchain(util.PrintError):
return False return False
if prev_hash != header.get('prev_block_hash'): if prev_hash != header.get('prev_block_hash'):
return False return False
target = self.get_target(height // 2016 - 1) nonce, n_solution = headers.get('nonce'), header.get('n_solution')
bits, target = self.get_target(index * 2016 + i, headers)
try: try:
self.verify_header(header, prev_hash, target) self.verify_header(header, prev_header, bits, target, nonce, n_solution)
except BaseException as e: except:
return False return False
return True return True

321
lib/equihash.py Normal file
View File

@ -0,0 +1,321 @@
# ZCASH implementation: https://github.com/zcash/zcash/blob/master/qa/rpc-tests/test_framework/equihash.py
from pyblake2 import blake2b
from operator import itemgetter
import struct
DEBUG = False
VERBOSE = False
word_size = 32
word_mask = (1<<word_size)-1
def expand_array(inp, out_len, bit_len, byte_pad=0):
assert bit_len >= 8 and word_size >= 7+bit_len
bit_len_mask = (1<<bit_len)-1
out_width = (bit_len+7)/8 + byte_pad
assert out_len == 8*out_width*len(inp)/bit_len
out = bytearray(out_len)
bit_len_mask = (1 << bit_len) - 1
# The acc_bits least-significant bits of acc_value represent a bit sequence
# in big-endian order.
acc_bits = 0;
acc_value = 0;
j = 0
for i in range(len(inp)):
acc_value = ((acc_value << 8) & word_mask) | inp[i]
acc_bits += 8
# When we have bit_len or more bits in the accumulator, write the next
# output element.
if acc_bits >= bit_len:
acc_bits -= bit_len
for x in range(byte_pad, out_width):
out[j+x] = (
# Big-endian
acc_value >> (acc_bits+(8*(out_width-x-1)))
) & (
# Apply bit_len_mask across byte boundaries
(bit_len_mask >> (8*(out_width-x-1))) & 0xFF
)
j += out_width
return out
def compress_array(inp, out_len, bit_len, byte_pad=0):
assert bit_len >= 8 and word_size >= 7+bit_len
in_width = (bit_len+7)/8 + byte_pad
assert out_len == bit_len*len(inp)/(8*in_width)
out = bytearray(out_len)
bit_len_mask = (1 << bit_len) - 1
# The acc_bits least-significant bits of acc_value represent a bit sequence
# in big-endian order.
acc_bits = 0;
acc_value = 0;
j = 0
for i in range(out_len):
# When we have fewer than 8 bits left in the accumulator, read the next
# input element.
if acc_bits < 8:
acc_value = ((acc_value << bit_len) & word_mask) | inp[j]
for x in range(byte_pad, in_width):
acc_value = acc_value | (
(
# Apply bit_len_mask across byte boundaries
inp[j+x] & ((bit_len_mask >> (8*(in_width-x-1))) & 0xFF)
) << (8*(in_width-x-1))); # Big-endian
j += in_width
acc_bits += bit_len
acc_bits -= 8
out[i] = (acc_value >> acc_bits) & 0xFF
return out
def get_indices_from_minimal(minimal, bit_len):
eh_index_size = 4
assert (bit_len+7)/8 <= eh_index_size
len_indices = 8*eh_index_size*len(minimal)/bit_len
byte_pad = eh_index_size - (bit_len+7)/8
expanded = expand_array(minimal, len_indices, bit_len, byte_pad)
return [struct.unpack('>I', expanded[i:i+4])[0] for i in range(0, len_indices, eh_index_size)]
def get_minimal_from_indices(indices, bit_len):
eh_index_size = 4
assert (bit_len+7)/8 <= eh_index_size
len_indices = len(indices)*eh_index_size
min_len = bit_len*len_indices/(8*eh_index_size)
byte_pad = eh_index_size - (bit_len+7)/8
byte_indices = bytearray(''.join([struct.pack('>I', i) for i in indices]))
return compress_array(byte_indices, min_len, bit_len, byte_pad)
def hash_nonce(digest, nonce):
for i in range(8):
digest.update(struct.pack('<I', nonce >> (32*i)))
def hash_xi(digest, xi):
digest.update(struct.pack('<I', xi))
return digest # For chaining
def count_zeroes(h):
# Convert to binary string
if type(h) == bytearray:
h = ''.join('{0:08b}'.format(x, 'b') for x in h)
else:
h = ''.join('{0:08b}'.format(ord(x), 'b') for x in h)
# Count leading zeroes
return (h+'1').index('1')
def has_collision(ha, hb, i, l):
res = [ha[j] == hb[j] for j in range((i-1)*l/8, i*l/8)]
return reduce(lambda x, y: x and y, res)
def distinct_indices(a, b):
for i in a:
for j in b:
if i == j:
return False
return True
def xor(ha, hb):
return bytearray(a^b for a,b in zip(ha,hb))
def gbp_basic(digest, n, k):
'''Implementation of Basic Wagner's algorithm for the GBP.'''
validate_params(n, k)
collision_length = n/(k+1)
hash_length = (k+1)*((collision_length+7)//8)
indices_per_hash_output = 512/n
# 1) Generate first list
if DEBUG: print('Generating first list')
X = []
tmp_hash = ''
for i in range(0, 2**(collision_length+1)):
r = i % indices_per_hash_output
if r == 0:
# X_i = H(I||V||x_i)
curr_digest = digest.copy()
hash_xi(curr_digest, i/indices_per_hash_output)
tmp_hash = curr_digest.digest()
X.append((
expand_array(bytearray(tmp_hash[r*n/8:(r+1)*n/8]),
hash_length, collision_length),
(i,)
))
# 3) Repeat step 2 until 2n/(k+1) bits remain
for i in range(1, k):
if DEBUG: print('Round %d:' % i)
# 2a) Sort the list
if DEBUG: print('- Sorting list')
X.sort(key=itemgetter(0))
if DEBUG and VERBOSE:
for Xi in X[-32:]:
print('%s %s' % (print_hash(Xi[0]), Xi[1]))
if DEBUG: print('- Finding collisions')
Xc = []
while len(X) > 0:
# 2b) Find next set of unordered pairs with collisions on first n/(k+1) bits
j = 1
while j < len(X):
if not has_collision(X[-1][0], X[-1-j][0], i, collision_length):
break
j += 1
# 2c) Store tuples (X_i ^ X_j, (i, j)) on the table
for l in range(0, j-1):
for m in range(l+1, j):
# Check that there are no duplicate indices in tuples i and j
if distinct_indices(X[-1-l][1], X[-1-m][1]):
if X[-1-l][1][0] < X[-1-m][1][0]:
concat = X[-1-l][1] + X[-1-m][1]
else:
concat = X[-1-m][1] + X[-1-l][1]
Xc.append((xor(X[-1-l][0], X[-1-m][0]), concat))
# 2d) Drop this set
while j > 0:
X.pop(-1)
j -= 1
# 2e) Replace previous list with new list
X = Xc
# k+1) Find a collision on last 2n(k+1) bits
if DEBUG:
print('Final round:')
print('- Sorting list')
X.sort(key=itemgetter(0))
if DEBUG and VERBOSE:
for Xi in X[-32:]:
print('%s %s' % (print_hash(Xi[0]), Xi[1]))
if DEBUG: print('- Finding collisions')
solns = []
while len(X) > 0:
j = 1
while j < len(X):
if not (has_collision(X[-1][0], X[-1-j][0], k, collision_length) and
has_collision(X[-1][0], X[-1-j][0], k+1, collision_length)):
break
j += 1
for l in range(0, j-1):
for m in range(l+1, j):
res = xor(X[-1-l][0], X[-1-m][0])
if count_zeroes(res) == 8*hash_length and distinct_indices(X[-1-l][1], X[-1-m][1]):
if DEBUG and VERBOSE:
print('Found solution:')
print('- %s %s' % (print_hash(X[-1-l][0]), X[-1-l][1]))
print('- %s %s' % (print_hash(X[-1-m][0]), X[-1-m][1]))
if X[-1-l][1][0] < X[-1-m][1][0]:
solns.append(list(X[-1-l][1] + X[-1-m][1]))
else:
solns.append(list(X[-1-m][1] + X[-1-l][1]))
# 2d) Drop this set
while j > 0:
X.pop(-1)
j -= 1
return [get_minimal_from_indices(soln, collision_length+1) for soln in solns]
def gbp_validate(digest, minimal, n, k):
validate_params(n, k)
collision_length = n/(k+1)
hash_length = (k+1)*((collision_length+7)//8)
indices_per_hash_output = 512/n
solution_width = (1 << k)*(collision_length+1)//8
if len(minimal) != solution_width:
print('Invalid solution length: %d (expected %d)' % \
(len(minimal), solution_width))
return False
X = []
for i in get_indices_from_minimal(minimal, collision_length+1):
r = i % indices_per_hash_output
# X_i = H(I||V||x_i)
curr_digest = digest.copy()
hash_xi(curr_digest, i/indices_per_hash_output)
tmp_hash = curr_digest.digest()
X.append((
expand_array(bytearray(tmp_hash[r*n/8:(r+1)*n/8]),
hash_length, collision_length),
(i,)
))
for r in range(1, k+1):
Xc = []
for i in range(0, len(X), 2):
if not has_collision(X[i][0], X[i+1][0], r, collision_length):
print('Invalid solution: invalid collision length between StepRows')
return False
if X[i+1][1][0] < X[i][1][0]:
print('Invalid solution: Index tree incorrectly ordered')
return False
if not distinct_indices(X[i][1], X[i+1][1]):
print('Invalid solution: duplicate indices')
return False
Xc.append((xor(X[i][0], X[i+1][0]), X[i][1] + X[i+1][1]))
X = Xc
if len(X) != 1:
print('Invalid solution: incorrect length after end of rounds: %d' % len(X))
return False
if count_zeroes(X[0][0]) != 8*hash_length:
print('Invalid solution: incorrect number of zeroes: %d' % count_zeroes(X[0][0]))
return False
return True
def zcash_person(n, k):
return b'ZcashPoW' + struct.pack('<II', n, k)
def print_hash(h):
if type(h) == bytearray:
return ''.join('{0:02x}'.format(x, 'x') for x in h)
else:
return ''.join('{0:02x}'.format(ord(x), 'x') for x in h)
def validate_params(n, k):
if (k >= n):
raise ValueError('n must be larger than k')
if (((n/(k+1))+1) >= 32):
raise ValueError('Parameters must satisfy n/(k+1)+1 < 32')
# a bit different from https://github.com/zcash/zcash/blob/master/qa/rpc-tests/test_framework/mininode.py#L747
# since electrum is a SPV oriented and not a node
def is_gbp_valid(nNonce, nSolution, n=48, k=5):
# H(I||...
digest = blake2b(digest_size=(512/n)*n/8, person=zcash_person(n, k))
digest.update(super(CBlock, self).serialize()[:108])
hash_nonce(digest, nNonce)
if not gbp_validate(nSolution, digest, n, k):
return False
return True

View File

@ -44,6 +44,7 @@ setup(
'protobuf', 'protobuf',
'dnspython', 'dnspython',
'jsonrpclib-pelix', 'jsonrpclib-pelix',
'pyblake2',
'PySocks>=1.6.6', 'PySocks>=1.6.6',
], ],
packages=[ packages=[