persistence, pass all pytests

This commit is contained in:
Alex Wice 2019-11-04 09:27:58 +01:00
parent cea79184f6
commit 1aab0de425
12 changed files with 227 additions and 207 deletions

View File

@ -1,4 +1,4 @@
from src.store.full_node_store import FullNodeStore from src.db.database import FullNodeStore
from src.consensus.block_rewards import calculate_block_reward from src.consensus.block_rewards import calculate_block_reward
import logging import logging
from enum import Enum from enum import Enum
@ -46,11 +46,21 @@ class Blockchain:
self.height_to_hash: Dict[uint64, bytes32] = {} self.height_to_hash: Dict[uint64, bytes32] = {}
async def initialize(self): async def initialize(self):
async for block in self.store.get_blocks():
if not self.heads or block.height > self.heads[0].height:
self.heads = [block]
# TODO: are cases where the blockchain "fans out" handled appropriately?
self.height_to_hash[block.height] = block.header_hash
if not self.heads:
self.genesis = FullBlock.from_bytes(self.constants["GENESIS_BLOCK"]) self.genesis = FullBlock.from_bytes(self.constants["GENESIS_BLOCK"])
result = await self.receive_block(self.genesis) result = await self.receive_block(self.genesis)
if result != ReceiveBlockResult.ADDED_TO_HEAD: if result != ReceiveBlockResult.ADDED_TO_HEAD:
raise InvalidGenesisBlock() raise InvalidGenesisBlock()
assert self.lca_block assert self.lca_block
else:
self.lca_block = self.heads[0]
def get_current_tips(self) -> List[HeaderBlock]: def get_current_tips(self) -> List[HeaderBlock]:
""" """
@ -310,7 +320,7 @@ class Blockchain:
Adds a new block into the blockchain, if it's valid and connected to the current Adds a new block into the blockchain, if it's valid and connected to the current
blockchain, regardless of whether it is the child of a head, or another block. blockchain, regardless of whether it is the child of a head, or another block.
""" """
genesis: bool = block.height == 0 and len(self.tips) == 0 genesis: bool = block.height == 0 and not self.tips
if await self.store.get_block(block.header_hash) is not None: if await self.store.get_block(block.header_hash) is not None:
return ReceiveBlockResult.ALREADY_HAVE_BLOCK return ReceiveBlockResult.ALREADY_HAVE_BLOCK

View File

@ -1,5 +1,5 @@
from abc import ABC from abc import ABC
from typing import Optional, Tuple from typing import Optional, Tuple, AsyncGenerator
import asyncio import asyncio
import motor.motor_asyncio as maio import motor.motor_asyncio as maio
from src.types.proof_of_space import ProofOfSpace from src.types.proof_of_space import ProofOfSpace
@ -12,7 +12,6 @@ from src.util.ints import uint32, uint64
from src.db.codecs import codec_options from src.db.codecs import codec_options
import subprocess import subprocess
class Database(ABC): class Database(ABC):
# All databases must subclass this so that there's one client # All databases must subclass this so that there's one client
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
@ -24,22 +23,29 @@ class Database(ABC):
self.db = Database.client.get_database(db_name, codec_options=codec_options) self.db = Database.client.get_database(db_name, codec_options=codec_options)
class FullNodeDatabase(Database): class FullNodeStore(Database):
def __init__(self, db_name): def __init__(self, db_name):
super().__init__(db_name) super().__init__(db_name)
# Stored on database
getc = self.db.get_collection getc = self.db.get_collection
self.full_blocks = getc("full_blocks") self.full_blocks = getc("full_blocks")
self.potential_heads = getc("potential_heads") self.potential_heads = getc("potential_heads")
# potential_heads entries: self.potential_trunks = getc("potential_trunks")
# {_id: header_hash, block: Optional[FullBlock]}
self.potential_headers = getc("potential_headers")
self.potential_blocks = getc("potential_blocks") self.potential_blocks = getc("potential_blocks")
self.candidate_blocks = getc("candidate_blocks") self.candidate_blocks = getc("candidate_blocks")
self.unfinished_blocks = getc("unfinished_blocks") self.unfinished_blocks = getc("unfinished_blocks")
self.unfinished_blocks_leader = getc("unfinished_blocks_leader") self.unfinished_blocks_leader = getc("unfinished_blocks_leader")
self.sync_mode = getc("sync_mode") self.sync_mode = getc("sync_mode")
# Stored on memory
self.potential_blocks_received: Dict[uint32, Event] = {}
self.proof_of_time_estimate_ips: uint64 = uint64(3000)
# Lock
self.lock = asyncio.Lock() # external
async def _clear_database(self): async def _clear_database(self):
await self.full_blocks.drop() await self.full_blocks.drop()
await self.potential_heads.drop() await self.potential_heads.drop()
@ -59,10 +65,13 @@ class FullNodeDatabase(Database):
) )
async def get_block(self, header_hash: bytes32) -> Optional[FullBlock]: async def get_block(self, header_hash: bytes32) -> Optional[FullBlock]:
query = await self.full_blocks.find_one(header_hash) query = await self.full_blocks.find_one({"_id": header_hash})
if query is not None: if query is not None:
return FullBlock.from_bytes(query["block"]) return FullBlock.from_bytes(query["block"])
return None
async def get_blocks(self) -> AsyncGenerator[FullBlock, None]:
async for query in self.full_blocks.find({}):
yield FullBlock.from_bytes(query["block"])
async def set_sync_mode(self, sync_mode: bool) -> None: async def set_sync_mode(self, sync_mode: bool) -> None:
await self.sync_mode.update_one( await self.sync_mode.update_one(
@ -71,7 +80,7 @@ class FullNodeDatabase(Database):
async def get_sync_mode(self) -> bool: async def get_sync_mode(self) -> bool:
query = await self.sync_mode.find_one({"_id": 0}) query = await self.sync_mode.find_one({"_id": 0})
return query.get("value", False) if query else False return query.get("value", True) if query else True
async def _set_default_sync_mode(self, sync_mode): async def _set_default_sync_mode(self, sync_mode):
query = await self.sync_mode.find_one({"_id": 0}) query = await self.sync_mode.find_one({"_id": 0})
@ -86,6 +95,14 @@ class FullNodeDatabase(Database):
async def get_potential_heads_number(self) -> int: async def get_potential_heads_number(self) -> int:
return await self.potential_heads.count_documents({}) return await self.potential_heads.count_documents({})
async def get_potential_heads_tuples(self) -> AsyncGenerator[Tuple[bytes32, FullBlock], None]:
async for query in self.potential_heads.find({}):
if query and "block" in query:
block = FullBlock.from_bytes(query["block"])
else:
block = None
yield bytes32(query["_id"]), block
async def add_potential_head( async def add_potential_head(
self, header_hash: bytes32, block: Optional[FullBlock] = None self, header_hash: bytes32, block: Optional[FullBlock] = None
) -> None: ) -> None:
@ -121,6 +138,12 @@ class FullNodeDatabase(Database):
query = await self.potential_blocks.find_one({"_id": height}) query = await self.potential_blocks.find_one({"_id": height})
return FullBlock.from_bytes(query["block"]) if query else None return FullBlock.from_bytes(query["block"]) if query else None
async def set_potential_blocks_received(self, height: uint32, event: asyncio.Event):
self.potential_blocks_received[height] = event
async def get_potential_blocks_received(self, height: uint32) -> asyncio.Event:
return self.potential_blocks_received[height]
async def add_candidate_block( async def add_candidate_block(
self, self,
pos_hash: bytes32, pos_hash: bytes32,
@ -172,13 +195,19 @@ class FullNodeDatabase(Database):
query = await self.unfinished_blocks_leader.find_one({"_id": 0}) query = await self.unfinished_blocks_leader.find_one({"_id": 0})
return (query["header"], query["iters"]) if query else None return (query["header"], query["iters"]) if query else None
async def set_proof_of_time_estimate_ips(self, estimate: uint64):
self.proof_of_time_estimate_ips = estimate
async def get_proof_of_time_estimate_ips(self) -> uint64:
return self.proof_of_time_estimate_ips
# TODO: remove below when tested better # TODO: remove below when tested better
if __name__ == "__main__": if __name__ == "__main__":
async def tests(): async def tests():
print("started testing") print("started testing")
db = FullNodeDatabase("test3") db = FullNodeStore("test3")
await db._clear_database() await db._clear_database()
from src.consensus.constants import constants from src.consensus.constants import constants
@ -237,4 +266,4 @@ if __name__ == "__main__":
print("done testing") print("done testing")
Database.loop.run_until_complete(tests()) #Database.loop.run_until_complete(tests())

View File

@ -31,9 +31,8 @@ from src.consensus.constants import constants
from src.blockchain import Blockchain, ReceiveBlockResult from src.blockchain import Blockchain, ReceiveBlockResult
from src.server.outbound_message import OutboundMessage, Delivery, NodeType, Message from src.server.outbound_message import OutboundMessage, Delivery, NodeType, Message
from src.util.errors import BlockNotInBlockchain, PeersDontHaveBlock, InvalidUnfinishedBlock from src.util.errors import BlockNotInBlockchain, PeersDontHaveBlock, InvalidUnfinishedBlock
from src.store.full_node_store import FullNodeStore
from src.server.server import ChiaServer from src.server.server import ChiaServer
from src.db.database import FullNodeStore
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -59,7 +58,7 @@ class FullNode:
estimated proof of time rate, so farmer can calulate which proofs are good. estimated proof of time rate, so farmer can calulate which proofs are good.
""" """
requests: List[farmer_protocol.ProofOfSpaceFinalized] = [] requests: List[farmer_protocol.ProofOfSpaceFinalized] = []
async with (await self.store.get_lock()): async with self.store.lock:
for tip in self.blockchain.get_current_tips(): for tip in self.blockchain.get_current_tips():
assert tip.proof_of_time and tip.challenge assert tip.proof_of_time and tip.challenge
challenge_hash = tip.challenge.get_hash() challenge_hash = tip.challenge.get_hash()
@ -83,7 +82,7 @@ class FullNode:
Sends all of the current heads to all timelord peers. Sends all of the current heads to all timelord peers.
""" """
requests: List[timelord_protocol.ChallengeStart] = [] requests: List[timelord_protocol.ChallengeStart] = []
async with (await self.store.get_lock()): async with self.store.lock:
for head in self.blockchain.get_current_tips(): for head in self.blockchain.get_current_tips():
assert head.challenge assert head.challenge
challenge_hash = head.challenge.get_hash() challenge_hash = head.challenge.get_hash()
@ -99,7 +98,7 @@ class FullNode:
""" """
blocks: List[FullBlock] = [] blocks: List[FullBlock] = []
async with (await self.store.get_lock()): async with self.store.lock:
heads: List[HeaderBlock] = self.blockchain.get_current_tips() heads: List[HeaderBlock] = self.blockchain.get_current_tips()
for h in heads: for h in heads:
block = await self.blockchain.get_block(h.header.get_hash()) block = await self.blockchain.get_block(h.header.get_hash())
@ -166,7 +165,8 @@ class FullNode:
# Based on responses from peers about the current heads, see which head is the heaviest # Based on responses from peers about the current heads, see which head is the heaviest
# (similar to longest chain rule). # (similar to longest chain rule).
async with (await self.store.get_lock()):
async with self.store.lock:
potential_heads = (await self.store.get_potential_heads()).items() potential_heads = (await self.store.get_potential_heads()).items()
log.info(f"Have collected {len(potential_heads)} potential heads") log.info(f"Have collected {len(potential_heads)} potential heads")
for header_hash, _ in potential_heads: for header_hash, _ in potential_heads:
@ -178,7 +178,7 @@ class FullNode:
if highest_weight <= max([t.weight for t in self.blockchain.get_current_tips()]): if highest_weight <= max([t.weight for t in self.blockchain.get_current_tips()]):
log.info("Not performing sync, already caught up.") log.info("Not performing sync, already caught up.")
await self.store.set_sync_mode(False) await self.store.set_sync_mode(False)
await self.store.clear_sync_information() await self.store.clear_sync_info()
return return
assert tip_block assert tip_block
@ -198,9 +198,11 @@ class FullNode:
yield OutboundMessage(NodeType.FULL_NODE, Message("request_header_blocks", request), Delivery.RANDOM) yield OutboundMessage(NodeType.FULL_NODE, Message("request_header_blocks", request), Delivery.RANDOM)
await asyncio.sleep(sleep_interval) await asyncio.sleep(sleep_interval)
total_time_slept += sleep_interval total_time_slept += sleep_interval
async with (await self.store.get_lock()):
async with self.store.lock:
received_all_headers = True received_all_headers = True
local_headers = [] local_headers = []
for height in range(0, tip_height + 1): for height in range(0, tip_height + 1):
if await self.store.get_potential_header(uint32(height)) is None: if await self.store.get_potential_header(uint32(height)) is None:
received_all_headers = False received_all_headers = False
@ -217,20 +219,19 @@ class FullNode:
log.error(f"Validated weight of headers. Downloaded {len(headers)} headers, tip height {tip_height}") log.error(f"Validated weight of headers. Downloaded {len(headers)} headers, tip height {tip_height}")
assert tip_height + 1 == len(headers) assert tip_height + 1 == len(headers)
async with (await self.store.get_lock()): async with self.store.lock:
fork_point: HeaderBlock = self.blockchain.find_fork_point(headers) fork_point: HeaderBlock = self.blockchain.find_fork_point(headers)
# TODO: optimize, send many requests at once, and for more blocks # TODO: optimize, send many requests at once, and for more blocks
for height in range(fork_point.height + 1, tip_height + 1): for height in range(fork_point.height + 1, tip_height + 1):
# Only download from fork point (what we don't have) # Only download from fork point (what we don't have)
async with (await self.store.get_lock()): async with self.store.lock:
have_block = await self.store.get_potential_heads_full_block(headers[height].header.get_hash()) \ have_block = await self.store.get_potential_heads_full_block(headers[height].header.get_hash()) is not None
is not None
if not have_block: if not have_block:
request_sync = peer_protocol.RequestSyncBlocks(tip_block.header_block.header.header_hash, request_sync = peer_protocol.RequestSyncBlocks(tip_block.header_block.header.header_hash,
[uint64(height)]) [uint64(height)])
async with (await self.store.get_lock()): async with self.store.lock:
await self.store.set_potential_blocks_received(uint32(height), Event()) await self.store.set_potential_blocks_received(uint32(height), Event())
found = False found = False
for _ in range(30): for _ in range(30):
@ -245,10 +246,11 @@ class FullNode:
log.info("Did not receive desired block") log.info("Did not receive desired block")
if not found: if not found:
raise PeersDontHaveBlock(f"Did not receive desired block at height {height}") raise PeersDontHaveBlock(f"Did not receive desired block at height {height}")
async with (await self.store.get_lock()): async with self.store.lock:
# TODO: ban peers that provide bad blocks # TODO: ban peers that provide bad blocks
if have_block: if have_block:
block = await self.store.get_potential_heads_full_block(headers[height].header.get_hash()) block = await self.store.get_potential_head(headers[height].header.get_hash())
else: else:
block = await self.store.get_potential_block(uint32(height)) block = await self.store.get_potential_block(uint32(height))
assert block is not None assert block is not None
@ -261,10 +263,10 @@ class FullNode:
assert max([h.height for h in self.blockchain.get_current_tips()]) >= height assert max([h.height for h in self.blockchain.get_current_tips()]) >= height
await self.store.set_proof_of_time_estimate_ips(await self.blockchain.get_next_ips(block.header_hash)) await self.store.set_proof_of_time_estimate_ips(await self.blockchain.get_next_ips(block.header_hash))
async with (await self.store.get_lock()): async with self.store.lock:
log.info(f"Finished sync up to height {tip_height}") log.info(f"Finished sync up to height {tip_height}")
await self.store.set_sync_mode(False) await self.store.set_sync_mode(False)
await self.store.clear_sync_information() await self.store.clear_sync_info()
# Update farmers and timelord with most recent information # Update farmers and timelord with most recent information
async for msg in self._send_challenges_to_timelords(): async for msg in self._send_challenges_to_timelords():
@ -281,7 +283,8 @@ class FullNode:
if len(request.heights) > self.config['max_headers_to_send']: if len(request.heights) > self.config['max_headers_to_send']:
raise errors.TooManyheadersRequested(f"The max number of headers is {self.config['max_headers_to_send']},\ raise errors.TooManyheadersRequested(f"The max number of headers is {self.config['max_headers_to_send']},\
but requested {len(request.heights)}") but requested {len(request.heights)}")
async with (await self.store.get_lock()):
async with self.store.lock:
try: try:
headers: List[HeaderBlock] = await self.blockchain.get_header_blocks_by_height(request.heights, headers: List[HeaderBlock] = await self.blockchain.get_header_blocks_by_height(request.heights,
request.tip_header_hash) request.tip_header_hash)
@ -300,7 +303,7 @@ class FullNode:
""" """
Receive header blocks from a peer. Receive header blocks from a peer.
""" """
async with (await self.store.get_lock()): async with self.store.lock:
for header_block in request.header_blocks: for header_block in request.header_blocks:
await self.store.add_potential_header(header_block) await self.store.add_potential_header(header_block)
@ -314,7 +317,7 @@ class FullNode:
Responsd to a peers request for syncing blocks. Responsd to a peers request for syncing blocks.
""" """
blocks: List[FullBlock] = [] blocks: List[FullBlock] = []
async with (await self.store.get_lock()): async with self.store.lock:
tip_block: Optional[FullBlock] = await self.blockchain.get_block(request.tip_header_hash) tip_block: Optional[FullBlock] = await self.blockchain.get_block(request.tip_header_hash)
if tip_block is not None: if tip_block is not None:
if len(request.heights) > self.config['max_blocks_to_send']: if len(request.heights) > self.config['max_blocks_to_send']:
@ -347,7 +350,7 @@ class FullNode:
We have received the blocks that we needed for syncing. Add them to processing queue. We have received the blocks that we needed for syncing. Add them to processing queue.
""" """
# TODO: use an actual queue? # TODO: use an actual queue?
async with (await self.store.get_lock()): async with self.store.lock:
if not await self.store.get_sync_mode(): if not await self.store.get_sync_mode():
log.warning("Receiving sync blocks when we are not in sync mode.") log.warning("Receiving sync blocks when we are not in sync mode.")
return return
@ -374,7 +377,7 @@ class FullNode:
bytes(request.proof_of_space.proof)) bytes(request.proof_of_space.proof))
assert quality_string assert quality_string
async with (await self.store.get_lock()): async with self.store.lock:
# Retrieves the correct head for the challenge # Retrieves the correct head for the challenge
heads: List[HeaderBlock] = self.blockchain.get_current_tips() heads: List[HeaderBlock] = self.blockchain.get_current_tips()
target_head: Optional[HeaderBlock] = None target_head: Optional[HeaderBlock] = None
@ -416,7 +419,7 @@ class FullNode:
block_header_data_hash: bytes32 = block_header_data.get_hash() block_header_data_hash: bytes32 = block_header_data.get_hash()
# self.stores this block so we can submit it to the blockchain after it's signed by harvester # self.stores this block so we can submit it to the blockchain after it's signed by harvester
await self.store.add_candidate_block(proof_of_space_hash, (body, block_header_data, request.proof_of_space)) await self.store.add_candidate_block(proof_of_space_hash, body, block_header_data, request.proof_of_space)
message = farmer_protocol.HeaderHash(proof_of_space_hash, block_header_data_hash) message = farmer_protocol.HeaderHash(proof_of_space_hash, block_header_data_hash)
yield OutboundMessage(NodeType.FARMER, Message("header_hash", message), Delivery.RESPOND) yield OutboundMessage(NodeType.FARMER, Message("header_hash", message), Delivery.RESPOND)
@ -429,7 +432,7 @@ class FullNode:
block, which only needs a Proof of Time to be finished. If the signature is valid, block, which only needs a Proof of Time to be finished. If the signature is valid,
we call the unfinished_block routine. we call the unfinished_block routine.
""" """
async with (await self.store.get_lock()): async with self.store.lock:
if (await self.store.get_candidate_block(header_signature.pos_hash)) is None: if (await self.store.get_candidate_block(header_signature.pos_hash)) is None:
log.warning(f"PoS hash {header_signature.pos_hash} not found in database") log.warning(f"PoS hash {header_signature.pos_hash} not found in database")
return return
@ -456,7 +459,7 @@ class FullNode:
A proof of time, received by a peer timelord. We can use this to complete a block, A proof of time, received by a peer timelord. We can use this to complete a block,
and call the block routine (which handles propagation and verification of blocks). and call the block routine (which handles propagation and verification of blocks).
""" """
async with (await self.store.get_lock()): async with self.store.lock:
dict_key = (request.proof.challenge_hash, request.proof.number_of_iterations) dict_key = (request.proof.challenge_hash, request.proof.number_of_iterations)
unfinished_block_obj: Optional[FullBlock] = await self.store.get_unfinished_block(dict_key) unfinished_block_obj: Optional[FullBlock] = await self.store.get_unfinished_block(dict_key)
@ -496,9 +499,10 @@ class FullNode:
""" """
finish_block: bool = False finish_block: bool = False
propagate_proof: bool = False propagate_proof: bool = False
async with (await self.store.get_lock()): async with self.store.lock:
if (await self.store.get_unfinished_block((new_proof_of_time.proof.challenge_hash, if (await self.store.get_unfinished_block((new_proof_of_time.proof.challenge_hash,
new_proof_of_time.proof.number_of_iterations))): new_proof_of_time.proof.number_of_iterations))):
finish_block = True finish_block = True
elif new_proof_of_time.proof.is_valid(constants["DISCRIMINANT_SIZE_BITS"]): elif new_proof_of_time.proof.is_valid(constants["DISCRIMINANT_SIZE_BITS"]):
propagate_proof = True propagate_proof = True
@ -518,7 +522,7 @@ class FullNode:
We can validate it and if it's a good block, propagate it to other peers and We can validate it and if it's a good block, propagate it to other peers and
timelords. timelords.
""" """
async with (await self.store.get_lock()): async with self.store.lock:
if not self.blockchain.is_child_of_head(unfinished_block.block): if not self.blockchain.is_child_of_head(unfinished_block.block):
return return
@ -550,9 +554,9 @@ class FullNode:
# If this block is slow, sleep to allow faster blocks to come out first # If this block is slow, sleep to allow faster blocks to come out first
await asyncio.sleep(3) await asyncio.sleep(3)
async with (await self.store.get_lock()): async with self.store.lock:
leader: Tuple[uint32, uint64] = await self.store.get_unfinished_block_leader() leader: Tuple[uint32, uint64] = await self.store.get_unfinished_block_leader()
if unfinished_block.block.height > leader[0]: if leader is None or unfinished_block.block.height > leader[0]:
log.info(f"This is the first block at height {unfinished_block.block.height}, so propagate.") log.info(f"This is the first block at height {unfinished_block.block.height}, so propagate.")
# If this is the first block we see at this height, propagate # If this is the first block we see at this height, propagate
await self.store.set_unfinished_block_leader((unfinished_block.block.height, expected_time)) await self.store.set_unfinished_block_leader((unfinished_block.block.height, expected_time))
@ -586,11 +590,10 @@ class FullNode:
header_hash = block.block.header_block.header.get_hash() header_hash = block.block.header_block.header.get_hash()
async with (await self.store.get_lock()): async with self.store.lock:
if await self.store.get_sync_mode(): if await self.store.get_sync_mode():
# Add the block to our potential heads list # Add the block to our potential heads list
await self.store.add_potential_head(header_hash) await self.store.add_potential_head(header_hash, block.block)
await self.store.add_potential_heads_full_block(block.block)
return return
# Record our minimum height, and whether we have a full set of heads # Record our minimum height, and whether we have a full set of heads
least_height: uint32 = min([h.height for h in self.blockchain.get_current_tips()]) least_height: uint32 = min([h.height for h in self.blockchain.get_current_tips()])
@ -605,13 +608,13 @@ class FullNode:
return return
elif added == ReceiveBlockResult.DISCONNECTED_BLOCK: elif added == ReceiveBlockResult.DISCONNECTED_BLOCK:
log.warning(f"Disconnected block {header_hash}") log.warning(f"Disconnected block {header_hash}")
async with (await self.store.get_lock()): async with self.store.lock:
tip_height = max([head.height for head in self.blockchain.get_current_tips()]) tip_height = max([head.height for head in self.blockchain.get_current_tips()])
if block.block.height > tip_height + self.config["sync_blocks_behind_threshold"]: if block.block.height > tip_height + self.config["sync_blocks_behind_threshold"]:
async with (await self.store.get_lock()): async with self.store.lock:
await self.store.clear_sync_information() await self.store.clear_sync_info()
await self.store.add_potential_head(header_hash) await self.store.add_potential_head(header_hash, block.block)
await self.store.add_potential_heads_full_block(block.block)
log.info(f"We are too far behind this block. Our height is {tip_height} and block is at " log.info(f"We are too far behind this block. Our height is {tip_height} and block is at "
f"{block.block.height}") f"{block.block.height}")
# Perform a sync if we have to # Perform a sync if we have to
@ -644,8 +647,9 @@ class FullNode:
# height than the worst one (assuming we had a full set of heads). # height than the worst one (assuming we had a full set of heads).
deep_reorg: bool = (block.block.height < least_height) and full_heads deep_reorg: bool = (block.block.height < least_height) and full_heads
ips_changed: bool = False ips_changed: bool = False
async with (await self.store.get_lock()): async with self.store.lock:
log.info(f"Updated heads, new heights: {[b.height for b in self.blockchain.get_current_tips()]}") log.info(f"Updated heads, new heights: {[b.height for b in self.blockchain.get_current_tips()]}")
difficulty = await self.blockchain.get_next_difficulty(block.block.prev_header_hash) difficulty = await self.blockchain.get_next_difficulty(block.block.prev_header_hash)
next_vdf_ips = await self.blockchain.get_next_ips(block.block.header_hash) next_vdf_ips = await self.blockchain.get_next_ips(block.block.header_hash)
log.info(f"Difficulty {difficulty} IPS {next_vdf_ips}") log.info(f"Difficulty {difficulty} IPS {next_vdf_ips}")

View File

@ -6,7 +6,7 @@
# from src.util.network import parse_host_port, create_node_id # from src.util.network import parse_host_port, create_node_id
# from src.server.outbound_message import NodeType, OutboundMessage, Message, Delivery # from src.server.outbound_message import NodeType, OutboundMessage, Message, Delivery
# from src.types.peer_info import PeerInfo # from src.types.peer_info import PeerInfo
# from src.store.full_node_store import FullNodeStore # from src.db.database import FullNodeStore
# from src.blockchain import Blockchain # from src.blockchain import Blockchain

View File

@ -7,7 +7,7 @@ from src.server.server import ChiaServer
from src.util.network import parse_host_port from src.util.network import parse_host_port
from src.server.outbound_message import NodeType from src.server.outbound_message import NodeType
from src.types.peer_info import PeerInfo from src.types.peer_info import PeerInfo
from src.store.full_node_store import FullNodeStore from src.db.database import FullNodeStore
from src.blockchain import Blockchain from src.blockchain import Blockchain
@ -22,8 +22,11 @@ server_closed = False
async def main(): async def main():
# Create the store (DB) and full node instance # Create the store (DB) and full node instance
store = FullNodeStore() db_id = 0
await store.initialize() if "-id" in sys.argv:
db_id = int(sys.argv[sys.argv.index("-id") + 1])
store = FullNodeStore(f"fndb_{db_id}")
blockchain = Blockchain(store) blockchain = Blockchain(store)
await blockchain.initialize() await blockchain.initialize()
@ -95,5 +98,5 @@ async def main():
await wait_for_ui() await wait_for_ui()
await asyncio.get_running_loop().shutdown_asyncgens() await asyncio.get_running_loop().shutdown_asyncgens()
#asyncio.run(main())
asyncio.run(main()) FullNodeStore.loop.run_until_complete(main())

View File

@ -0,0 +1,28 @@
. .venv/bin/activate
_kill_servers() {
ps -e | grep python | awk '{print $1}' | xargs -L1 kill
ps -e | grep "vdf_server" | awk '{print $1}' | xargs -L1 kill
}
_kill_servers
python -m src.server.start_full_node "127.0.0.1" 8002 -id 1 -f &
P4=$!
python -m src.server.start_full_node "127.0.0.1" 8004 -id 2 -t -u 8222 &
P5=$!
python -m src.server.start_full_node "127.0.0.1" 8005 -id 3 &
P6=$!
_term() {
echo "Caught SIGTERM signal, killing all servers."
kill -TERM "$P4" 2>/dev/null
kill -TERM "$P5" 2>/dev/null
kill -TERM "$P6" 2>/dev/null
_kill_servers
}
trap _term SIGTERM
trap _term SIGINT
trap _term INT
wait $P4 $P5 $P6

View File

@ -0,0 +1,37 @@
. .venv/bin/activate
_kill_servers() {
ps -e | grep python | awk '{print $1}' | xargs -L1 kill
ps -e | grep "vdf_server" | awk '{print $1}' | xargs -L1 kill
}
_kill_servers
python -m src.server.start_plotter &
P1=$!
python -m src.server.start_timelord &
P2=$!
python -m src.server.start_farmer &
P3=$!
python -m src.server.start_full_node "127.0.0.1" 8002 -id 1 -f &
P4=$!
python -m src.server.start_full_node "127.0.0.1" 8004 -id 2 -t -u 8222 &
P5=$!
python -m src.server.start_full_node "127.0.0.1" 8005 -id 3 &
P6=$!
_term() {
echo "Caught SIGTERM signal, killing all servers."
kill -TERM "$P1" 2>/dev/null
kill -TERM "$P2" 2>/dev/null
kill -TERM "$P3" 2>/dev/null
kill -TERM "$P4" 2>/dev/null
kill -TERM "$P5" 2>/dev/null
kill -TERM "$P6" 2>/dev/null
_kill_servers
}
trap _term SIGTERM
trap _term SIGINT
trap _term INT
wait $P1 $P2 $P3 $P4 $P5 $P6

View File

@ -0,0 +1,28 @@
. .venv/bin/activate
_kill_servers() {
ps -e | grep python | awk '{print $1}' | xargs -L1 kill
ps -e | grep "vdf_server" | awk '{print $1}' | xargs -L1 kill
}
_kill_servers
python -m src.server.start_plotter &
P1=$!
python -m src.server.start_timelord &
P2=$!
python -m src.server.start_farmer &
P3=$!
_term() {
echo "Caught SIGTERM signal, killing all servers."
kill -TERM "$P1" 2>/dev/null
kill -TERM "$P2" 2>/dev/null
kill -TERM "$P3" 2>/dev/null
_kill_servers
}
trap _term SIGTERM
trap _term SIGINT
trap _term INT
wait $P1 $P2 $P3

View File

@ -1,121 +0,0 @@
from typing import Tuple, Optional, Dict, Counter
import collections
from asyncio import Lock, Event
from src.types.proof_of_space import ProofOfSpace
from src.types.header import HeaderData
from src.types.header_block import HeaderBlock
from src.types.body import Body
from src.types.full_block import FullBlock
from src.types.sized_bytes import bytes32
from src.util.ints import uint32, uint64
class FullNodeStore:
def __init__(self):
self.lock = Lock()
async def initialize(self):
self.full_blocks: Dict[bytes32, FullBlock] = {}
self.sync_mode: bool = True
# Block headers and blocks which we think might be heads, but we haven't verified yet.
# All these are used during sync mode
self.potential_heads: Counter[bytes32] = collections.Counter()
self.potential_heads_full_blocks: Dict[bytes32, FullBlock] = collections.Counter()
# Headers/headers downloaded for the during sync, by height
self.potential_headers: Dict[uint32, HeaderBlock] = {}
# Blocks downloaded during sync, by height
self.potential_blocks: Dict[uint32, FullBlock] = {}
# Event, which gets set whenever we receive the block at each height. Waited for by sync().
self.potential_blocks_received: Dict[uint32, Event] = {}
# These are the blocks that we created, but don't have the PoS from farmer yet,
# keyed from the proof of space hash
self.candidate_blocks: Dict[bytes32, Tuple[Body, HeaderData, ProofOfSpace]] = {}
# These are the blocks that we created, have PoS, but not PoT yet, keyed from the
# challenge hash and iterations
self.unfinished_blocks: Dict[Tuple[bytes32, uint64], FullBlock] = {}
# Latest height with unfinished blocks, and expected timestamp of the finishing
self.unfinished_blocks_leader: Tuple[uint32, uint64] = (uint32(0), uint64(9999999999))
self.proof_of_time_estimate_ips: uint64 = uint64(3000)
async def get_lock(self) -> Lock:
return self.lock
async def save_block(self, block: FullBlock):
self.full_blocks[block.header_hash] = block
async def get_block(self, header_hash: bytes32) -> Optional[FullBlock]:
return self.full_blocks.get(header_hash)
async def set_sync_mode(self, sync_mode: bool):
self.sync_mode = sync_mode
async def get_sync_mode(self) -> bool:
return self.sync_mode
async def clear_sync_information(self):
self.potential_heads.clear()
self.potential_heads_full_blocks.clear()
self.potential_headers.clear()
self.potential_blocks.clear()
self.potential_blocks_received.clear()
async def add_potential_head(self, header_hash: bytes32):
self.potential_heads[header_hash] += 1
async def get_potential_heads(self) -> Dict[bytes32, int]:
return self.potential_heads
async def add_potential_heads_full_block(self, block: FullBlock):
self.potential_heads_full_blocks[block.header_hash] = block
async def get_potential_heads_full_block(self, header_hash: bytes32) -> Optional[FullBlock]:
return self.potential_heads_full_blocks.get(header_hash)
async def add_potential_header(self, block: HeaderBlock):
self.potential_headers[block.height] = block
async def get_potential_header(self, height: uint32) -> Optional[HeaderBlock]:
return self.potential_headers.get(height)
async def add_potential_block(self, block: FullBlock):
self.potential_blocks[block.height] = block
async def get_potential_block(self, height: uint32) -> Optional[FullBlock]:
return self.potential_blocks.get(height)
async def set_potential_blocks_received(self, height: uint32, event: Event):
self.potential_blocks_received[height] = event
async def get_potential_blocks_received(self, height: uint32) -> Event:
return self.potential_blocks_received[height]
async def add_candidate_block(self, pos_hash: bytes32, block: Tuple[Body, HeaderData, ProofOfSpace]):
self.candidate_blocks[pos_hash] = block
async def get_candidate_block(self, pos_hash: bytes32) -> Optional[Tuple[Body, HeaderData, ProofOfSpace]]:
return self.candidate_blocks.get(pos_hash)
async def add_unfinished_block(self, key: Tuple[bytes32, uint64], block: FullBlock):
self.unfinished_blocks[key] = block
async def get_unfinished_block(self, key=Tuple[bytes32, uint64]) -> Optional[FullBlock]:
return self.unfinished_blocks.get(key)
async def get_unfinished_blocks(self) -> Dict[Tuple[bytes32, uint64], FullBlock]:
return self.unfinished_blocks
async def set_unfinished_block_leader(self, value: Tuple[uint32, uint64]):
self.unfinished_blocks_leader = value
async def get_unfinished_block_leader(self) -> Tuple[uint32, uint64]:
return self.unfinished_blocks_leader
async def set_proof_of_time_estimate_ips(self, estimate: uint64):
self.proof_of_time_estimate_ips = estimate
async def get_proof_of_time_estimate_ips(self) -> uint64:
return self.proof_of_time_estimate_ips

View File

@ -20,7 +20,7 @@ from prompt_toolkit.widgets import (
Button, Button,
SearchToolbar SearchToolbar
) )
from src.store.full_node_store import FullNodeStore from src.db.database import FullNodeStore
from src.blockchain import Blockchain from src.blockchain import Blockchain
from src.types.header_block import HeaderBlock from src.types.header_block import HeaderBlock
from src.types.full_block import FullBlock from src.types.full_block import FullBlock
@ -222,7 +222,7 @@ class FullNodeUI:
if max_block not in added_blocks: if max_block not in added_blocks:
added_blocks.append(max_block) added_blocks.append(max_block)
heads.remove(max_block) heads.remove(max_block)
async with await self.store.get_lock(): async with self.store.lock:
prev: Optional[FullBlock] = await self.store.get_block(max_block.prev_header_hash) prev: Optional[FullBlock] = await self.store.get_block(max_block.prev_header_hash)
if prev is not None: if prev is not None:
heads.append(prev.header_block) heads.append(prev.header_block)
@ -258,16 +258,17 @@ class FullNodeUI:
else: else:
new_con_rows = Window(width=D(), height=0) new_con_rows = Window(width=D(), height=0)
async with (await self.store.get_lock()): async with self.store.lock():
if (await self.store.get_sync_mode()): if (await self.store.get_sync_mode()):
potential_heads = await self.store.get_potential_heads() max_height = -1
fbs = [await self.store.get_potential_heads_full_block(ph) for ph in potential_heads] for _, block in await self.store.get_potential_heads_tuples():
if len(fbs) > 0: if block.height > max_height:
max_height = max([b.height for b in fbs]) max_height = block.height
self.syncing.text = f"Syncing up to {str(max_height)}"
if max_height >= 0:
self.syncing.text = f"Syncing up to {max_height}"
else: else:
self.syncing.text = f"Syncing" self.syncing.text = f"Syncing"
else: else:
self.syncing.text = "Not syncing" self.syncing.text = "Not syncing"
heads: List[HeaderBlock] = self.blockchain.get_current_tips() heads: List[HeaderBlock] = self.blockchain.get_current_tips()
@ -318,7 +319,7 @@ class FullNodeUI:
async def draw_block(self): async def draw_block(self):
block_hash: str = self.route.split("block/")[1] block_hash: str = self.route.split("block/")[1]
async with await self.store.get_lock(): async with self.store.lock:
block: Optional[FullBlock] = await self.store.get_block(bytes32(bytes.fromhex(block_hash))) block: Optional[FullBlock] = await self.store.get_block(bytes32(bytes.fromhex(block_hash)))
if block is not None: if block is not None:
self.block_msg.text = f"Block {str(block.header_hash)}" self.block_msg.text = f"Block {str(block.header_hash)}"

View File

@ -12,7 +12,7 @@ from src.types.header_block import HeaderBlock
from src.types.full_block import FullBlock from src.types.full_block import FullBlock
from src.types.header import HeaderData from src.types.header import HeaderData
from src.blockchain import Blockchain, ReceiveBlockResult from src.blockchain import Blockchain, ReceiveBlockResult
from src.store.full_node_store import FullNodeStore from src.db.database import FullNodeStore
from src.util.ints import uint64, uint32 from src.util.ints import uint64, uint32
from tests.block_tools import BlockTools from tests.block_tools import BlockTools
@ -30,6 +30,7 @@ test_constants: Dict[str, Any] = {
"DIFFICULTY_DELAY": 3 # EPOCH / WARP_FACTOR "DIFFICULTY_DELAY": 3 # EPOCH / WARP_FACTOR
} }
test_constants["GENESIS_BLOCK"] = bytes(bt.create_genesis_block(test_constants, bytes([0]*32), b'0')) test_constants["GENESIS_BLOCK"] = bytes(bt.create_genesis_block(test_constants, bytes([0]*32), b'0'))
#test_constants["GENESIS_BLOCK"] = b'\x15N3\xd3\xf9H\xc2K\x96\xfe\xf2f\xa2\xbf\x87\x0e\x0f,\xd0\xd4\x0f6s\xb1".\\\xf5\x8a\xb4\x03\x84\x8e\xf9\xbb\xa1\xca\xdef3:\xe4?\x0c\xe5\xc6\x12\x80\x17\xd2\xcc\xd7\xb4m\x94\xb7V\x959\xed4\x89\x04b\x08\x07^\xca`\x8f#%\xe9\x9c\x9d\x86y\x10\x96W\x9d\xce\xc1\x15r\x97\x91U\n\x11<\xdf\xb2\xfc\xfb<\x13\x00\x00\x00\x98\xf4\x88\xcb\xb2MYo]\xaf \xd8a>\x06\xfe\xc8F\x8d\x15\x90\x15\xbb\x04\xd48\x10\xc6\xd8b\x82\x88\x7fx<\xe5\xe6\x8b\x8f\x84\xdd\x1cU"\x83\xfb7\x9d`\xb0I\xb3\xbe;bvE\xc6\x92\xdd\xbe\x988\xe9y;\xc6.\xa1\xce\x94\xdc\xd8\xab\xaf\xba\x8f\xd8r\x8br\xc8\xa0\xac\xc0\xe9T\x87\x08\x08\x8b#-\xb6o\xf0\x1f\x0bzv\xb3\x81\x1a\xd4\xf7\x01\xdf\xc5A\x11\xe0\x0c\xc0\x87\xa6\xc2v\xbbR\xc4{"\xa5\xe5\xe0bx7\xfa\n\xae\xea\xfe\x02\xac\xef\xec\xd1\xc2\xc55\x06{\xe1\x0c\xb2\x99q\xd7\xd8\xcb\x97\x86\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\xeb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00Y\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x007\x03\x00\x00\x004\x00\x8c\xff\xc3\x00\x00\x00\x00\x00\x00\x01T\x00>\xff\xe3\x00\x80\x00[\x00\x00\x00\x00\x00\x00\x05R\x00\x08\x00\x05\x00j\xff\xfd\x00\x00\x00\x00\x00\x00\x17\xf0\x00j\xff\x99\x00j\x00\x03\x01\x03\xa1\xde8\x0f\xb75VB\xf6"`\x94\xc7\x0b\xaa\x1f\xa2Nv\x8a\xf9\xc9\x9a>\x13\xa3a\xc8\x0c\xcb?\x968\xc7\xeb\xc3\x10a\x1a\xa7\xfb\x85\xa7iu\x14`\x8f\x90\x16o\x97\xd5\t\xa4,\xe5\xed\xe1\x15\x86<\x9d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x1f\xeb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]\xbf\xd7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xa1\xde8\x0f\xb75VB\xf6"`\x94\xc7\x0b\xaa\x1f\xa2Nv\x8a\xf9\xc9\x9a>\x13\xa3a\xc8\x0c\xcb?\x13\x16J\xe5\xfc\xa9\x06\xe8A\xe9\xc0Ql\xfb\xaeF\xcd\xd6\xa7\x8ei\xc4\xfa\xd4i\x84\xee\xc9\xe2\xaa\xa4f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00OB!\x81)\xf0l\xbcg\xa3^\xef\x0e\xfc\xb7\x02\x80\xe4\xa9NO\x89\xa0\t\xc3C\xd9\xda\xff\xd7\t\xeebfC&8\x9c+n$\x00\xa4\xe85\x19\xb0\xf6\x18\xa1\xeeR\xae\xec \x82k\xe0v@;\x1c\xc14PMh\xfb\xe3\x1c\xbf\x84O\xcd\xbc\xc4\xb8\xeabz`\xf7\x06;\xf6q\x8b,\x18\tf~\xd1\x11l#\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\n\x8b)\xaa\x96x8\xd76J\xa6\x8b[\x98\t\xe0\\\xe3^7qD\x8c\xf5q\x08\xf2\xa2\xc9\xb03mvU\x1a\xe2\x181\x88\xfe\t\x03?\x12\xadj\x9d\xe8K\xb8!\xee\xe7e8\x82\xfb$\xf0Y\xfaJ\x10\x1f\x1a\xe5\xe9\xa8\xbb\xea\x87\xfc\xb12y\x94\x8d,\x16\xe4C\x02\xba\xe6\xac\x94{\xc4c\x07(\xb8\xeb\xab\xe3\xcfy{6\x98\t\xf4\x8fm\xd62\x85\x87\xb0\x03f\x01B]\xe3\xc6\x13l6\x8d\x0e\x18\xc64%\x97\x1a\xa6\xf4\x8b)\xaa\x96x8\xd76J\xa6\x8b[\x98\t\xe0\\\xe3^7qD\x8c\xf5q\x08\xf2\xa2\xc9\xb03mv\x00\x00\x00\x00\x00\x00\x00\x00\x00_\xec\xebf\xff\xc8o8\xd9Rxlmily\xc2\xdb\xc29\xddN\x91\xb4g)\xd7:\'\xfbW\xe9'
@pytest.fixture(scope="module") @pytest.fixture(scope="module")
@ -42,8 +43,8 @@ def event_loop():
class TestGenesisBlock(): class TestGenesisBlock():
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_basic_blockchain(self): async def test_basic_blockchain(self):
store = FullNodeStore() store = FullNodeStore("fndb_test")
await store.initialize() await store._clear_database()
bc1: Blockchain = Blockchain(store) bc1: Blockchain = Blockchain(store)
await bc1.initialize() await bc1.initialize()
assert len(bc1.get_current_tips()) == 1 assert len(bc1.get_current_tips()) == 1
@ -61,8 +62,8 @@ class TestBlockValidation():
""" """
Provides a list of 10 valid blocks, as well as a blockchain with 9 blocks added to it. Provides a list of 10 valid blocks, as well as a blockchain with 9 blocks added to it.
""" """
store = FullNodeStore() store = FullNodeStore("fndb_test")
await store.initialize() await store._clear_database()
blocks = bt.get_consecutive_blocks(test_constants, 10, [], 10) blocks = bt.get_consecutive_blocks(test_constants, 10, [], 10)
b: Blockchain = Blockchain(store, test_constants) b: Blockchain = Blockchain(store, test_constants)
await b.initialize() await b.initialize()
@ -202,8 +203,8 @@ class TestBlockValidation():
# Make it 5x faster than target time # Make it 5x faster than target time
blocks = bt.get_consecutive_blocks(test_constants, num_blocks, [], 2) blocks = bt.get_consecutive_blocks(test_constants, num_blocks, [], 2)
store = FullNodeStore() store = FullNodeStore("fndb_test")
await store.initialize() await store._clear_database()
b: Blockchain = Blockchain(store, test_constants) b: Blockchain = Blockchain(store, test_constants)
await b.initialize() await b.initialize()
for i in range(1, num_blocks): for i in range(1, num_blocks):
@ -228,8 +229,8 @@ class TestReorgs():
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_basic_reorg(self): async def test_basic_reorg(self):
blocks = bt.get_consecutive_blocks(test_constants, 100, [], 9) blocks = bt.get_consecutive_blocks(test_constants, 100, [], 9)
store = FullNodeStore() store = FullNodeStore("fndb_test")
await store.initialize() await store._clear_database()
b: Blockchain = Blockchain(store, test_constants) b: Blockchain = Blockchain(store, test_constants)
await b.initialize() await b.initialize()
@ -251,8 +252,8 @@ class TestReorgs():
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_reorg_from_genesis(self): async def test_reorg_from_genesis(self):
blocks = bt.get_consecutive_blocks(test_constants, 20, [], 9, b'0') blocks = bt.get_consecutive_blocks(test_constants, 20, [], 9, b'0')
store = FullNodeStore() store = FullNodeStore("fndb_test")
await store.initialize() await store._clear_database()
b: Blockchain = Blockchain(store, test_constants) b: Blockchain = Blockchain(store, test_constants)
await b.initialize() await b.initialize()
for block in blocks: for block in blocks:
@ -280,8 +281,8 @@ class TestReorgs():
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_lca(self): async def test_lca(self):
blocks = bt.get_consecutive_blocks(test_constants, 5, [], 9, b'0') blocks = bt.get_consecutive_blocks(test_constants, 5, [], 9, b'0')
store = FullNodeStore() store = FullNodeStore("fndb_test")
await store.initialize() await store._clear_database()
b: Blockchain = Blockchain(store, test_constants) b: Blockchain = Blockchain(store, test_constants)
await b.initialize() await b.initialize()
for block in blocks: for block in blocks: