Header block verification
This commit is contained in:
parent
035ba54141
commit
1601d13ce0
|
@ -248,7 +248,8 @@ class Blockchain:
|
|||
def get_next_difficulty(self, header_hash: bytes32) -> uint64:
|
||||
"""
|
||||
Returns the difficulty of the next block that extends onto header_hash.
|
||||
Used to calculate the number of iterations.
|
||||
Used to calculate the number of iterations. When changing this, also change the implementation
|
||||
in wallet_state_manager.py.
|
||||
"""
|
||||
block: Header = self.headers[header_hash]
|
||||
|
||||
|
@ -670,22 +671,22 @@ class Blockchain:
|
|||
if block.proof_of_time.challenge_hash != prev_challenge.get_hash():
|
||||
return False
|
||||
|
||||
# 6a. Check challenge total_weight = parent total_weight + difficulty
|
||||
# 6a. Check total_weight = parent total_weight + difficulty
|
||||
if block.weight != prev_full_block.weight + difficulty:
|
||||
return False
|
||||
|
||||
# 7a. Check challenge total_iters = parent total_iters + number_iters
|
||||
# 7a. Check total_iters = parent total_iters + number_iters
|
||||
if (
|
||||
block.header.data.total_iters
|
||||
!= prev_full_block.header.data.total_iters + number_of_iters
|
||||
):
|
||||
return False
|
||||
else:
|
||||
# 6b. Check challenge total_weight = parent total_weight + difficulty
|
||||
# 6b. Check total_weight = parent total_weight + difficulty
|
||||
if block.weight != difficulty:
|
||||
return False
|
||||
|
||||
# 7b. Check challenge total_iters = parent total_iters + number_iters
|
||||
# 7b. Check total_iters = parent total_iters + number_iters
|
||||
if block.header.data.total_iters != number_of_iters:
|
||||
return False
|
||||
|
||||
|
|
|
@ -1754,22 +1754,32 @@ class FullNode:
|
|||
proof_hashes_map = await self.store.get_proof_hashes()
|
||||
curr = self.blockchain.lca_block
|
||||
|
||||
hashes: List[Tuple[bytes32, Optional[uint64]]] = []
|
||||
hashes: List[Tuple[bytes32, Optional[Tuple[uint64, uint64, uint64]]]] = []
|
||||
while curr.height > 0:
|
||||
difficulty_change: Optional[uint64] = None
|
||||
difficulty_update: Optional[Tuple[uint64, uint64, uint64]] = None
|
||||
if (
|
||||
curr.height % self.constants["DIFFICULTY_EPOCH"]
|
||||
== self.constants["DIFFICULTY_DELAY"]
|
||||
or curr.height % self.constants["DIFFICULTY_EPOCH"] == 0
|
||||
):
|
||||
difficulty_change = self.blockchain.get_next_difficulty(
|
||||
curr.prev_header_hash
|
||||
difficulty_update = (
|
||||
self.blockchain.get_next_difficulty(curr.prev_header_hash),
|
||||
curr.data.timestamp,
|
||||
curr.data.total_iters,
|
||||
)
|
||||
hashes.append(
|
||||
(proof_hashes_map[curr.header_hash], difficulty_change)
|
||||
)
|
||||
hashes.append((proof_hashes_map[curr.header_hash], difficulty_update))
|
||||
curr = self.blockchain.headers[curr.prev_header_hash]
|
||||
|
||||
hashes.append((proof_hashes_map[self.blockchain.genesis.header_hash], None))
|
||||
hashes.append(
|
||||
(
|
||||
proof_hashes_map[self.blockchain.genesis.header_hash],
|
||||
(
|
||||
uint64(self.blockchain.genesis.weight),
|
||||
self.blockchain.genesis.header.data.timestamp,
|
||||
self.blockchain.genesis.header.data.total_iters,
|
||||
),
|
||||
)
|
||||
)
|
||||
response = wallet_protocol.RespondAllProofHashes(list(reversed(hashes)))
|
||||
yield OutboundMessage(
|
||||
NodeType.WALLET,
|
||||
|
|
|
@ -36,7 +36,7 @@ class RequestAllProofHashes:
|
|||
@dataclass(frozen=True)
|
||||
@cbor_message
|
||||
class RespondAllProofHashes:
|
||||
hashes: List[Tuple[bytes32, Optional[uint64]]]
|
||||
hashes: List[Tuple[bytes32, Optional[Tuple[uint64, uint64, uint64]]]]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
|
|
|
@ -21,5 +21,6 @@ class BlockRecord(Streamable):
|
|||
weight: uint128
|
||||
additions: List[Coin]
|
||||
removals: List[bytes32]
|
||||
timestamp: Optional[uint64]
|
||||
total_iters: Optional[uint64]
|
||||
new_challenge_hash: Optional[bytes32]
|
||||
|
|
|
@ -40,7 +40,7 @@ class WalletNode:
|
|||
cached_blocks: Dict[bytes32, Tuple[BlockRecord, HeaderBlock]]
|
||||
cached_removals: Dict[bytes32, List[bytes32]]
|
||||
cached_additions: Dict[bytes32, List[Coin]]
|
||||
proof_hashes: List[Tuple[bytes32, Optional[Tuple[uint64, uint64]]]]
|
||||
proof_hashes: List[Tuple[bytes32, Optional[Tuple[uint64, uint64, uint64]]]]
|
||||
header_hashes: List[bytes32]
|
||||
potential_blocks_received: Dict[uint32, asyncio.Event]
|
||||
potential_header_hashes: Dict[uint32, bytes32]
|
||||
|
@ -231,8 +231,10 @@ class WalletNode:
|
|||
raise TimeoutError("Took too long to fetch proof hashes.")
|
||||
|
||||
# TODO(mariano): Validate weight
|
||||
# - Request headers for a random subset
|
||||
# - Verify those proofs
|
||||
# Create map from height to difficulty
|
||||
# Randomly sample based on difficulty
|
||||
# Send requests for these heights
|
||||
# Verify these proofs
|
||||
|
||||
weight = self.wallet_state_manager.block_records[fork_point_hash].weight
|
||||
header_validate_start_height = max(
|
||||
|
@ -249,7 +251,7 @@ class WalletNode:
|
|||
]
|
||||
difficulty = uint64(weight - fork_point_parent_weight)
|
||||
for height in range(fork_point_height + 1, header_validate_start_height):
|
||||
_, difficulty_change = self.proof_hashes[height]
|
||||
_, difficulty_change, timestamp, total_iters = self.proof_hashes[height]
|
||||
weight += difficulty
|
||||
block_record = BlockRecord(
|
||||
self.header_hashes[height],
|
||||
|
@ -258,10 +260,15 @@ class WalletNode:
|
|||
weight,
|
||||
[],
|
||||
[],
|
||||
None,
|
||||
timestamp,
|
||||
total_iters,
|
||||
None,
|
||||
)
|
||||
res = await self.wallet_state_manager.receive_block(block_record, None)
|
||||
assert (
|
||||
res == ReceiveBlockResult.ADDED_TO_HEAD
|
||||
or res == ReceiveBlockResult.ADDED_AS_ORPHAN
|
||||
)
|
||||
|
||||
# Download headers in batches, and verify them as they come in. We download a few batches ahead,
|
||||
# in case there are delays. TODO(mariano): optimize sync by pipelining
|
||||
|
@ -273,10 +280,8 @@ class WalletNode:
|
|||
sleep_interval = 10
|
||||
|
||||
for height_checkpoint in range(
|
||||
header_validate_start_height + 1, tip_height + 1, 1
|
||||
header_validate_start_height + 1, tip_height + 1
|
||||
):
|
||||
end_height = min(height_checkpoint + 1, tip_height + 1)
|
||||
|
||||
total_time_slept = 0
|
||||
while True:
|
||||
if self._shut_down:
|
||||
|
@ -290,7 +295,6 @@ class WalletNode:
|
|||
batch_end = min(batch_start + 1, tip_height + 1)
|
||||
|
||||
if batch_start > tip_height:
|
||||
# We have asked for all blocks
|
||||
break
|
||||
|
||||
blocks_missing = any(
|
||||
|
@ -303,9 +307,6 @@ class WalletNode:
|
|||
time.time() - last_request_time > sleep_interval
|
||||
and blocks_missing
|
||||
) or (batch_end - 1) > highest_height_requested:
|
||||
# If we are missing blocks in this batch, and we haven't made a request in a while,
|
||||
# Make a request for this batch. Also, if we have never requested this batch, make
|
||||
# the request
|
||||
self.log.info(f"Requesting sync header {batch_start}")
|
||||
if batch_end - 1 > highest_height_requested:
|
||||
highest_height_requested = uint32(batch_end - 1)
|
||||
|
@ -319,13 +320,11 @@ class WalletNode:
|
|||
Delivery.RANDOM,
|
||||
)
|
||||
if request_made:
|
||||
# Reset the timer for requests, so we don't overload other peers with requests
|
||||
last_request_time = time.time()
|
||||
request_made = False
|
||||
|
||||
awaitables = [
|
||||
self.potential_blocks_received[uint32(height)].wait()
|
||||
for height in range(height_checkpoint, end_height)
|
||||
self.potential_blocks_received[uint32(height_checkpoint)].wait()
|
||||
]
|
||||
future = asyncio.gather(*awaitables, return_exceptions=True)
|
||||
try:
|
||||
|
@ -339,21 +338,17 @@ class WalletNode:
|
|||
total_time_slept += sleep_interval
|
||||
self.log.info("Did not receive desired headers")
|
||||
|
||||
# Verifies this batch, which we are guaranteed to have (since we broke from the above loop)
|
||||
for height in range(height_checkpoint, end_height):
|
||||
hh = self.potential_header_hashes[height]
|
||||
block_record, header_block = self.cached_blocks[hh]
|
||||
hh = self.potential_header_hashes[height_checkpoint]
|
||||
block_record, header_block = self.cached_blocks[hh]
|
||||
|
||||
res = await self.wallet_state_manager.receive_block(
|
||||
block_record, header_block
|
||||
)
|
||||
if (
|
||||
res == ReceiveBlockResult.INVALID_BLOCK
|
||||
or res == ReceiveBlockResult.DISCONNECTED_BLOCK
|
||||
):
|
||||
raise RuntimeError(
|
||||
f"Invalid block header {block_record.header_hash}"
|
||||
)
|
||||
res = await self.wallet_state_manager.receive_block(
|
||||
block_record, header_block
|
||||
)
|
||||
if (
|
||||
res == ReceiveBlockResult.INVALID_BLOCK
|
||||
or res == ReceiveBlockResult.DISCONNECTED_BLOCK
|
||||
):
|
||||
raise RuntimeError(f"Invalid block header {block_record.header_hash}")
|
||||
self.log.info(
|
||||
f"Finished sync process up to height {max(self.wallet_state_manager.height_to_hash.keys())}"
|
||||
)
|
||||
|
@ -369,25 +364,9 @@ class WalletNode:
|
|||
# 1. If disconnected and close, get parent header and return
|
||||
lca = self.wallet_state_manager.block_records[self.wallet_state_manager.lca]
|
||||
if block_record.prev_header_hash in self.wallet_state_manager.block_records:
|
||||
total_iters = uint64(
|
||||
self.wallet_state_manager.block_records[
|
||||
block_record.prev_header_hash
|
||||
].total_iters
|
||||
+ header_block.proof_of_time.number_of_iterations
|
||||
)
|
||||
block_record_with_total_iters = BlockRecord(
|
||||
block_record.header_hash,
|
||||
block_record.prev_header_hash,
|
||||
block_record.height,
|
||||
block_record.weight,
|
||||
block_record.additions,
|
||||
block_record.removals,
|
||||
total_iters,
|
||||
block_record.new_challenge_hash,
|
||||
)
|
||||
# We have completed a block that we can add to chain, so add it.
|
||||
res = await self.wallet_state_manager.receive_block(
|
||||
block_record_with_total_iters, header_block
|
||||
block_record, header_block
|
||||
)
|
||||
if res == ReceiveBlockResult.DISCONNECTED_BLOCK:
|
||||
self.log.error("Attempted to add disconnected block")
|
||||
|
@ -527,7 +506,8 @@ class WalletNode:
|
|||
block.weight,
|
||||
[],
|
||||
[],
|
||||
uint64(0),
|
||||
response.header_block.header.data.timestamp,
|
||||
response.header_block.header.data.total_iters,
|
||||
response.header_block.challenge.get_hash(),
|
||||
)
|
||||
finish_block = True
|
||||
|
@ -632,7 +612,8 @@ class WalletNode:
|
|||
block_record.weight,
|
||||
additions,
|
||||
removals,
|
||||
uint64(0),
|
||||
block_record.timestamp,
|
||||
block_record.total_iters,
|
||||
header_block.challenge.get_hash(),
|
||||
)
|
||||
self.cached_blocks[response.header_hash] = (new_br, header_block)
|
||||
|
@ -718,7 +699,8 @@ class WalletNode:
|
|||
block_record.weight,
|
||||
additions,
|
||||
removals,
|
||||
uint64(0),
|
||||
block_record.timestamp,
|
||||
block_record.total_iters,
|
||||
header_block.challenge.get_hash(),
|
||||
)
|
||||
self.cached_blocks[response.header_hash] = (new_br, header_block)
|
||||
|
|
|
@ -12,6 +12,7 @@ from src.types.hashable.spend_bundle import SpendBundle
|
|||
from src.types.sized_bytes import bytes32
|
||||
from src.types.full_block import FullBlock
|
||||
from src.types.challenge import Challenge
|
||||
from src.types.proof_of_space import ProofOfSpace
|
||||
from src.types.header_block import HeaderBlock
|
||||
from src.util.ints import uint32, uint64
|
||||
from src.util.hash import std_hash
|
||||
|
@ -20,6 +21,7 @@ from src.wallet.block_record import BlockRecord
|
|||
from src.wallet.wallet_puzzle_store import WalletPuzzleStore
|
||||
from src.wallet.wallet_store import WalletStore
|
||||
from src.wallet.wallet_transaction_store import WalletTransactionStore
|
||||
from src.consensus.block_rewards import calculate_block_reward
|
||||
from src.full_node.blockchain import ReceiveBlockResult
|
||||
from src.consensus.pot_iterations import (
|
||||
calculate_ips_from_iterations,
|
||||
|
@ -113,7 +115,8 @@ class WalletStateManager:
|
|||
genesis.weight,
|
||||
[],
|
||||
[],
|
||||
uint64(self.constants["VDF_IPS_STARTING"]),
|
||||
genesis_hb.header.data.timestamp,
|
||||
genesis_hb.header.data.total_iters,
|
||||
genesis_challenge.get_hash(),
|
||||
),
|
||||
genesis_hb,
|
||||
|
@ -460,10 +463,6 @@ class WalletStateManager:
|
|||
if block.weight > self.block_records[self.lca].weight:
|
||||
|
||||
fork_h = self.find_fork_for_lca(block)
|
||||
self.log.warning(
|
||||
f"Got block {block.header_hash} height {block.height} > LCA {self.lca}, "
|
||||
f"{self.block_records[self.lca].height}"
|
||||
)
|
||||
await self.reorg_rollback(fork_h)
|
||||
|
||||
# Add blocks between fork point and new lca
|
||||
|
@ -498,10 +497,194 @@ class WalletStateManager:
|
|||
|
||||
return ReceiveBlockResult.ADDED_AS_ORPHAN
|
||||
|
||||
def get_next_difficulty(self, header_hash: bytes32) -> uint64:
|
||||
"""
|
||||
Returns the difficulty of the next block that extends onto header_hash.
|
||||
Used to calculate the number of iterations. Based on the implementation in blockchain.py.
|
||||
"""
|
||||
block: BlockRecord = self.block_records[header_hash]
|
||||
|
||||
next_height: uint32 = uint32(block.height + 1)
|
||||
if next_height < self.constants["DIFFICULTY_EPOCH"]:
|
||||
# We are in the first epoch
|
||||
return uint64(self.constants["DIFFICULTY_STARTING"])
|
||||
|
||||
# Epochs are diffined as intervals of DIFFICULTY_EPOCH blocks, inclusive and indexed at 0.
|
||||
# For example, [0-2047], [2048-4095], etc. The difficulty changes DIFFICULTY_DELAY into the
|
||||
# epoch, as opposed to the first block (as in Bitcoin).
|
||||
elif (
|
||||
next_height % self.constants["DIFFICULTY_EPOCH"]
|
||||
!= self.constants["DIFFICULTY_DELAY"]
|
||||
):
|
||||
# Not at a point where difficulty would change
|
||||
prev_block: BlockRecord = self.block_records[block.prev_header_hash]
|
||||
assert prev_block is not None
|
||||
if prev_block is None:
|
||||
raise Exception("Previous block is invalid.")
|
||||
return uint64(block.weight - prev_block.weight)
|
||||
|
||||
# old diff curr diff new diff
|
||||
# ----------|-----|----------------------|-----|-----...
|
||||
# h1 h2 h3 i-1
|
||||
# Height1 is the last block 2 epochs ago, so we can include the time to mine 1st block in previous epoch
|
||||
height1 = uint32(
|
||||
next_height
|
||||
- self.constants["DIFFICULTY_EPOCH"]
|
||||
- self.constants["DIFFICULTY_DELAY"]
|
||||
- 1
|
||||
)
|
||||
# Height2 is the DIFFICULTY DELAYth block in the previous epoch
|
||||
height2 = uint32(next_height - self.constants["DIFFICULTY_EPOCH"] - 1)
|
||||
# Height3 is the last block in the previous epoch
|
||||
height3 = uint32(next_height - self.constants["DIFFICULTY_DELAY"] - 1)
|
||||
|
||||
# h1 to h2 timestamps are mined on previous difficulty, while and h2 to h3 timestamps are mined on the
|
||||
# current difficulty
|
||||
|
||||
block1, block2, block3 = None, None, None
|
||||
# Once we are before the fork point (and before the LCA), we can use the height_to_hash map
|
||||
if height1 >= 0:
|
||||
# height1 could be -1, for the first difficulty calculation
|
||||
block1 = self.block_records[self.height_to_hash[height1]]
|
||||
block2 = self.block_records[self.height_to_hash[height2]]
|
||||
block3 = self.block_records[self.height_to_hash[height3]]
|
||||
|
||||
# Current difficulty parameter (diff of block h = i - 1)
|
||||
Tc = self.get_next_difficulty(block.prev_header_hash)
|
||||
|
||||
# Previous difficulty parameter (diff of block h = i - 2048 - 1)
|
||||
Tp = self.get_next_difficulty(block2.prev_header_hash)
|
||||
timestamp1: uint64
|
||||
if block1:
|
||||
assert block1.timestamp is not None
|
||||
timestamp1 = block1.timestamp # i - 512 - 1
|
||||
else:
|
||||
# In the case of height == -1, there is no timestamp here, so assume the genesis block
|
||||
# took constants["BLOCK_TIME_TARGET"] seconds to mine.
|
||||
genesis = self.block_records[self.height_to_hash[uint32(0)]]
|
||||
timestamp1 = genesis.timestamp - self.constants["BLOCK_TIME_TARGET"]
|
||||
assert block2.timestamp is not None and block3.timestamp is not None
|
||||
timestamp2: uint64 = block2.timestamp # i - 2048 + 512 - 1
|
||||
timestamp3: uint64 = block3.timestamp # i - 512 - 1
|
||||
|
||||
# Numerator fits in 128 bits, so big int is not necessary
|
||||
# We multiply by the denominators here, so we only have one fraction in the end (avoiding floating point)
|
||||
term1 = (
|
||||
self.constants["DIFFICULTY_DELAY"]
|
||||
* Tp
|
||||
* (timestamp3 - timestamp2)
|
||||
* self.constants["BLOCK_TIME_TARGET"]
|
||||
)
|
||||
term2 = (
|
||||
(self.constants["DIFFICULTY_WARP_FACTOR"] - 1)
|
||||
* (self.constants["DIFFICULTY_EPOCH"] - self.constants["DIFFICULTY_DELAY"])
|
||||
* Tc
|
||||
* (timestamp2 - timestamp1)
|
||||
* self.constants["BLOCK_TIME_TARGET"]
|
||||
)
|
||||
|
||||
# Round down after the division
|
||||
new_difficulty: uint64 = uint64(
|
||||
(term1 + term2)
|
||||
// (
|
||||
self.constants["DIFFICULTY_WARP_FACTOR"]
|
||||
* (timestamp3 - timestamp2)
|
||||
* (timestamp2 - timestamp1)
|
||||
)
|
||||
)
|
||||
|
||||
# Only change by a max factor, to prevent attacks, as in greenpaper, and must be at least 1
|
||||
if new_difficulty >= Tc:
|
||||
return min(new_difficulty, uint64(self.constants["DIFFICULTY_FACTOR"] * Tc))
|
||||
else:
|
||||
return max(
|
||||
[
|
||||
uint64(1),
|
||||
new_difficulty,
|
||||
uint64(Tc // self.constants["DIFFICULTY_FACTOR"]),
|
||||
]
|
||||
)
|
||||
|
||||
def get_next_ips(
|
||||
self, block: BlockRecord, proof_of_space: ProofOfSpace, iterations: uint64
|
||||
) -> uint64:
|
||||
"""
|
||||
Returns the VDF speed in iterations per seconds, to be used for the next block. This depends on
|
||||
the number of iterations of the last epoch, and changes at the same block as the difficulty.
|
||||
Based on the implementation in blockchain.py.
|
||||
"""
|
||||
next_height: uint32 = uint32(block.height + 1)
|
||||
if next_height < self.constants["DIFFICULTY_EPOCH"]:
|
||||
# First epoch has a hardcoded vdf speed
|
||||
return self.constants["VDF_IPS_STARTING"]
|
||||
|
||||
prev_block: BlockRecord = self.block_records[block.prev_header_hash]
|
||||
|
||||
difficulty = self.get_next_difficulty(prev_block.header_hash)
|
||||
prev_ips = calculate_ips_from_iterations(
|
||||
proof_of_space, difficulty, iterations, self.constants["MIN_BLOCK_TIME"]
|
||||
)
|
||||
|
||||
if (
|
||||
next_height % self.constants["DIFFICULTY_EPOCH"]
|
||||
!= self.constants["DIFFICULTY_DELAY"]
|
||||
):
|
||||
# Not at a point where ips would change, so return the previous ips
|
||||
# TODO: cache this for efficiency
|
||||
return prev_ips
|
||||
|
||||
# ips (along with difficulty) will change in this block, so we need to calculate the new one.
|
||||
# The calculation is (iters_2 - iters_1) // (timestamp_2 - timestamp_1).
|
||||
# 1 and 2 correspond to height_1 and height_2, being the last block of the second to last, and last
|
||||
# block of the last epochs. Basically, it's total iterations over time, of previous epoch.
|
||||
|
||||
# Height1 is the last block 2 epochs ago, so we can include the iterations taken for mining first block in epoch
|
||||
height1 = uint32(
|
||||
next_height
|
||||
- self.constants["DIFFICULTY_EPOCH"]
|
||||
- self.constants["DIFFICULTY_DELAY"]
|
||||
- 1
|
||||
)
|
||||
# Height2 is the last block in the previous epoch
|
||||
height2 = uint32(next_height - self.constants["DIFFICULTY_DELAY"] - 1)
|
||||
|
||||
block1: Optional[BlockRecord] = None
|
||||
block2: Optional[BlockRecord] = None
|
||||
# Once we are before the fork point (and before the LCA), we can use the height_to_hash map
|
||||
if block1 is None and height1 >= 0:
|
||||
# height1 could be -1, for the first difficulty calculation
|
||||
block1 = self.block_records[self.height_to_hash[height1]]
|
||||
block2 = self.block_records[self.height_to_hash[height2]]
|
||||
assert block2 is not None
|
||||
|
||||
if block1 is not None:
|
||||
timestamp1 = block1.timestamp
|
||||
iters1 = block1.total_iters
|
||||
else:
|
||||
# In the case of height == -1, there is no timestamp here, so assume the genesis block
|
||||
# took constants["BLOCK_TIME_TARGET"] seconds to mine.
|
||||
genesis: BlockRecord = self.block_records[self.height_to_hash[uint32(0)]]
|
||||
timestamp1 = genesis.timestamp - self.constants["BLOCK_TIME_TARGET"]
|
||||
iters1 = genesis.total_iters
|
||||
|
||||
timestamp2 = block2.timestamp
|
||||
iters2 = block2.total_iters
|
||||
assert iters1 is not None and iters2 is not None
|
||||
assert timestamp1 is not None and timestamp2 is not None
|
||||
|
||||
new_ips = uint64((iters2 - iters1) // (timestamp2 - timestamp1))
|
||||
|
||||
# Only change by a max factor, and must be at least 1
|
||||
if new_ips >= prev_ips:
|
||||
return min(new_ips, uint64(self.constants["IPS_FACTOR"] * new_ips))
|
||||
else:
|
||||
return max(
|
||||
[uint64(1), new_ips, uint64(prev_ips // self.constants["IPS_FACTOR"])]
|
||||
)
|
||||
|
||||
async def validate_header_block(
|
||||
self, br: BlockRecord, header_block: HeaderBlock
|
||||
) -> bool:
|
||||
# TODO(mariano): implement
|
||||
# POS challenge hash == POT challenge hash == Challenge prev challenge hash
|
||||
if (
|
||||
header_block.proof_of_space.challenge_hash
|
||||
|
@ -532,34 +715,105 @@ class WalletStateManager:
|
|||
return False
|
||||
|
||||
# Calculate iters
|
||||
difficulty: uint64
|
||||
ips: uint64
|
||||
prev_block: Optional[BlockRecord]
|
||||
if br.height > 0:
|
||||
prev_block = self.block_records[br.prev_header_hash]
|
||||
difficulty = self.get_next_difficulty(br.prev_header_hash)
|
||||
assert prev_block is not None
|
||||
ips = self.get_next_ips(
|
||||
prev_block,
|
||||
header_block.proof_of_space,
|
||||
header_block.proof_of_time.number_of_iterations,
|
||||
)
|
||||
else:
|
||||
difficulty = uint64(self.constants["DIFFICULTY_STARTING"])
|
||||
ips = uint64(self.constants["VDF_IPS_STARTING"])
|
||||
|
||||
number_of_iters: uint64 = calculate_iterations_quality(
|
||||
quality_str,
|
||||
header_block.proof_of_space.size,
|
||||
difficulty,
|
||||
br.ips,
|
||||
ips,
|
||||
self.constants["MIN_BLOCK_TIME"],
|
||||
)
|
||||
|
||||
if header_block.proof_of_time is None:
|
||||
return False
|
||||
|
||||
if number_of_iters != header_block.proof_of_time.number_of_iterations:
|
||||
return False
|
||||
|
||||
# 4. Check PoT
|
||||
# Check PoT
|
||||
if not header_block.proof_of_time.is_valid(
|
||||
self.constants["DISCRIMINANT_SIZE_BITS"]
|
||||
):
|
||||
return False
|
||||
|
||||
# Validate challenge
|
||||
# - proofs hash is goo
|
||||
# - new work difficulty is good if necessary
|
||||
proofs_hash = std_hash(
|
||||
header_block.proof_of_space.get_hash()
|
||||
+ header_block.proof_of_time.output.get_hash()
|
||||
)
|
||||
if proofs_hash != header_block.challenge.proofs_hash:
|
||||
return False
|
||||
|
||||
if header_block.challenge.new_work_difficulty is not None:
|
||||
if header_block.challenge.new_work_difficulty != difficulty:
|
||||
return False
|
||||
|
||||
# Validate header:
|
||||
# - header hash and prev header hash match BR
|
||||
# - height and weight match BR
|
||||
# - check timestamp if we have required blocks
|
||||
# - check header pos hash
|
||||
# - check total iters match pot
|
||||
# - check coinbase sig
|
||||
# - check coinbase and fees amount
|
||||
if header_block.header.header_hash != br.header_hash:
|
||||
return False
|
||||
if header_block.header.prev_header_hash != br.prev_header_hash:
|
||||
return False
|
||||
if header_block.height != br.height:
|
||||
return False
|
||||
if header_block.weight != br.weight:
|
||||
return False
|
||||
if br.height > 0:
|
||||
assert prev_block is not None
|
||||
if prev_block.weight + difficulty != br.weight:
|
||||
return False
|
||||
if prev_block.total_iters is not None and br.total_iters is not None:
|
||||
if prev_block.total_iters + number_of_iters != br.total_iters:
|
||||
return False
|
||||
if prev_block.height + 1 != br.height:
|
||||
return False
|
||||
else:
|
||||
if br.weight != difficulty:
|
||||
return False
|
||||
if br.total_iters != number_of_iters:
|
||||
return False
|
||||
|
||||
# Check that block is not far in the future
|
||||
if (
|
||||
header_block.header.data.timestamp
|
||||
> time.time() + self.constants["MAX_FUTURE_TIME"]
|
||||
):
|
||||
return False
|
||||
|
||||
# Check header pos hash
|
||||
if (
|
||||
header_block.proof_of_space.get_hash()
|
||||
!= header_block.header.data.proof_of_space_hash
|
||||
):
|
||||
return False
|
||||
|
||||
# Check coinbase sig
|
||||
pair = header_block.header.data.coinbase_signature.PkMessagePair(
|
||||
header_block.proof_of_space.pool_pubkey,
|
||||
header_block.header.data.coinbase.name(),
|
||||
)
|
||||
|
||||
if not header_block.header.data.coinbase_signature.validate([pair]):
|
||||
return False
|
||||
|
||||
# Check coinbase and fees amount
|
||||
coinbase_reward = calculate_block_reward(br.height)
|
||||
if coinbase_reward != header_block.header.data.coinbase.amount:
|
||||
return False
|
||||
return True
|
||||
|
||||
def find_fork_for_lca(self, new_lca: BlockRecord) -> uint32:
|
||||
|
@ -634,7 +888,6 @@ class WalletStateManager:
|
|||
Rolls back and updates the coin_store and transaction store. It's possible this height
|
||||
is the tip, or even beyond the tip.
|
||||
"""
|
||||
self.log.warning(f"Rolling back to {index}")
|
||||
await self.wallet_store.rollback_lca_to_block(index)
|
||||
|
||||
reorged: List[TransactionRecord] = await self.tx_store.get_transaction_above(
|
||||
|
|
|
@ -854,6 +854,7 @@ class TestWalletProtocol:
|
|||
if (
|
||||
i % test_constants["DIFFICULTY_EPOCH"]
|
||||
== test_constants["DIFFICULTY_DELAY"]
|
||||
or i % test_constants["DIFFICULTY_EPOCH"] == 0
|
||||
):
|
||||
assert hashes[i][1] is not None
|
||||
else:
|
||||
|
|
|
@ -114,7 +114,7 @@ class TestFullSync:
|
|||
@pytest.mark.asyncio
|
||||
async def test_basic_sync_wallet(self, wallet_node):
|
||||
num_blocks = 25
|
||||
blocks = bt.get_consecutive_blocks(test_constants, num_blocks, [], 10)
|
||||
blocks = bt.get_consecutive_blocks(test_constants, num_blocks, [])
|
||||
full_node_1, wallet_node, server_1, server_2 = wallet_node
|
||||
|
||||
for i in range(1, len(blocks)):
|
||||
|
@ -149,7 +149,7 @@ class TestFullSync:
|
|||
# Tests a reorg with the wallet
|
||||
start = time.time()
|
||||
found = False
|
||||
blocks_reorg = bt.get_consecutive_blocks(test_constants, 15, blocks[:-5], 10)
|
||||
blocks_reorg = bt.get_consecutive_blocks(test_constants, 45, blocks[:-5])
|
||||
for i in range(1, len(blocks_reorg)):
|
||||
async for msg in full_node_1.respond_block(
|
||||
full_node_protocol.RespondBlock(blocks_reorg[i])
|
||||
|
@ -157,12 +157,12 @@ class TestFullSync:
|
|||
server_1.push_message(msg)
|
||||
start = time.time()
|
||||
|
||||
while time.time() - start < 60:
|
||||
while time.time() - start < 100:
|
||||
if (
|
||||
wallet_node.wallet_state_manager.block_records[
|
||||
wallet_node.wallet_state_manager.lca
|
||||
].height
|
||||
== 33
|
||||
== 63
|
||||
):
|
||||
found = True
|
||||
break
|
||||
|
|
Loading…
Reference in New Issue