header validation

This commit is contained in:
nxsofsys 2018-01-14 23:12:30 +03:00
parent 7980846f72
commit 34150e8245
3 changed files with 18 additions and 16 deletions

View File

@ -84,7 +84,9 @@ class NetworkConstants:
cls.DEFAULT_PORTS = {'t': '50001', 's': '50002'} cls.DEFAULT_PORTS = {'t': '50001', 's': '50002'}
cls.DEFAULT_SERVERS = read_json('servers.json', {}) cls.DEFAULT_SERVERS = read_json('servers.json', {})
cls.CHECKPOINTS = read_json('checkpoints.json', []) cls.CHECKPOINTS = read_json('checkpoints.json', [])
cls.EQUIHASH_N = 200
cls.EQUIHASH_K = 9
@classmethod @classmethod
def set_testnet(cls): def set_testnet(cls):
cls.TESTNET = True cls.TESTNET = True
@ -97,7 +99,8 @@ class NetworkConstants:
cls.DEFAULT_PORTS = {'t':'51001', 's':'51002'} cls.DEFAULT_PORTS = {'t':'51001', 's':'51002'}
cls.DEFAULT_SERVERS = read_json('servers_testnet.json', {}) cls.DEFAULT_SERVERS = read_json('servers_testnet.json', {})
cls.CHECKPOINTS = read_json('checkpoints_testnet.json', []) cls.CHECKPOINTS = read_json('checkpoints_testnet.json', [])
cls.EQUIHASH_N = 200
cls.EQUIHASH_K = 9
NetworkConstants.set_testnet() NetworkConstants.set_testnet()

View File

@ -184,11 +184,10 @@ class Blockchain(util.PrintError):
target = self.bits_to_target(header['bits']) target = self.bits_to_target(header['bits'])
if _powhash > target: if _powhash > target:
raise BaseException("insufficient proof of work: %s vs target %s" % (int('0x' + _powhash, 16), target)) raise BaseException("insufficient proof of work: %s vs target %s" % (int('0x' + _powhash, 16), target))
if bitcoin.NetworkConstants.TESTNET:
return
nonce = uint256_from_bytes(str_to_hash(header.get('nonce'))) nonce = uint256_from_bytes(str_to_hash(header.get('nonce')))
n_solution = vector_from_bytes(base64.b64decode(header.get('n_solution').encode('utf8'))) n_solution = vector_from_bytes(base64.b64decode(header.get('n_solution').encode('utf8')))
if not is_gbp_valid(serialize_header(header), nonce, n_solution): if not is_gbp_valid(serialize_header(header), nonce, n_solution,
bitcoin.NetworkConstants.EQUIHASH_N, bitcoin.NetworkConstants.EQUIHASH_K):
raise BaseException("Equihash invalid") raise BaseException("Equihash invalid")
def verify_chunk(self, index, data): def verify_chunk(self, index, data):

View File

@ -2,6 +2,7 @@
from pyblake2 import blake2b from pyblake2 import blake2b
from operator import itemgetter from operator import itemgetter
import struct import struct
from functools import reduce
DEBUG = False DEBUG = False
VERBOSE = False VERBOSE = False
@ -15,8 +16,8 @@ def expand_array(inp, out_len, bit_len, byte_pad=0):
assert bit_len >= 8 and word_size >= 7+bit_len assert bit_len >= 8 and word_size >= 7+bit_len
bit_len_mask = (1<<bit_len)-1 bit_len_mask = (1<<bit_len)-1
out_width = (bit_len+7)/8 + byte_pad out_width = (bit_len+7)//8 + byte_pad
assert out_len == 8*out_width*len(inp)/bit_len assert out_len == 8*out_width*len(inp)//bit_len
out = bytearray(out_len) out = bytearray(out_len)
bit_len_mask = (1 << bit_len) - 1 bit_len_mask = (1 << bit_len) - 1
@ -52,7 +53,6 @@ def compress_array(inp, out_len, bit_len, byte_pad=0):
assert bit_len >= 8 and word_size >= 7+bit_len assert bit_len >= 8 and word_size >= 7+bit_len
in_width = (bit_len+7)//8 + byte_pad in_width = (bit_len+7)//8 + byte_pad
print('----->', out_len, bit_len*len(inp)//(8*in_width))
assert out_len == bit_len*len(inp)//(8*in_width) assert out_len == bit_len*len(inp)//(8*in_width)
out = bytearray(out_len) out = bytearray(out_len)
@ -86,9 +86,9 @@ def compress_array(inp, out_len, bit_len, byte_pad=0):
def get_indices_from_minimal(minimal, bit_len): def get_indices_from_minimal(minimal, bit_len):
eh_index_size = 4 eh_index_size = 4
assert (bit_len+7)/8 <= eh_index_size assert (bit_len+7)//8 <= eh_index_size
len_indices = 8*eh_index_size*len(minimal)/bit_len len_indices = 8*eh_index_size*len(minimal)//bit_len
byte_pad = eh_index_size - (bit_len+7)/8 byte_pad = eh_index_size - (bit_len+7)//8
expanded = expand_array(minimal, len_indices, bit_len, byte_pad) expanded = expand_array(minimal, len_indices, bit_len, byte_pad)
return [struct.unpack('>I', expanded[i:i+4])[0] for i in range(0, len_indices, eh_index_size)] return [struct.unpack('>I', expanded[i:i+4])[0] for i in range(0, len_indices, eh_index_size)]
@ -123,7 +123,7 @@ def count_zeroes(h):
def has_collision(ha, hb, i, l): def has_collision(ha, hb, i, l):
res = [ha[j] == hb[j] for j in range((i-1)*l/8, i*l/8)] res = [ha[j] == hb[j] for j in range((i-1)*l//8, i*l//8)]
return reduce(lambda x, y: x and y, res) return reduce(lambda x, y: x and y, res)
@ -242,9 +242,9 @@ def gbp_basic(digest, n, k):
def gbp_validate(digest, minimal, n, k): def gbp_validate(digest, minimal, n, k):
validate_params(n, k) validate_params(n, k)
collision_length = n/(k+1) collision_length = n//(k+1)
hash_length = (k+1)*((collision_length+7)//8) hash_length = (k+1)*((collision_length+7)//8)
indices_per_hash_output = 512/n indices_per_hash_output = 512//n
solution_width = (1 << k)*(collision_length+1)//8 solution_width = (1 << k)*(collision_length+1)//8
if len(minimal) != solution_width: if len(minimal) != solution_width:
@ -257,10 +257,10 @@ def gbp_validate(digest, minimal, n, k):
r = i % indices_per_hash_output r = i % indices_per_hash_output
# X_i = H(I||V||x_i) # X_i = H(I||V||x_i)
curr_digest = digest.copy() curr_digest = digest.copy()
hash_xi(curr_digest, i/indices_per_hash_output) hash_xi(curr_digest, i//indices_per_hash_output)
tmp_hash = curr_digest.digest() tmp_hash = curr_digest.digest()
X.append(( X.append((
expand_array(bytearray(tmp_hash[r*n/8:(r+1)*n/8]), expand_array(bytearray(tmp_hash[r*n//8:(r+1)*n//8]),
hash_length, collision_length), hash_length, collision_length),
(i,) (i,)
)) ))