Merge branch 'master' of github.com:Chia-Network/chia-blockchain into network

This commit is contained in:
Mariano Sorgente 2019-08-29 10:46:50 +09:00
commit f6e1506e44
15 changed files with 211 additions and 73 deletions

View File

@ -24,6 +24,7 @@ def approximate_parameters(T):
# 1/w is the approximate proportion of time spent on the proof
w = math.floor(T / (T/k + L * pow(2, k+1))) - 2
w = max(w, 0)
return (L, k, w)

File diff suppressed because one or more lines are too long

View File

@ -1,9 +1,11 @@
DIFFICULTY_STARTING = 1 << 32
DIFFICULTY_EPOCH = 2016 # The number of blocks per epoch
DIFFICULTY_TARGET = 200 # The target number of seconds per block
DIFFICULTY_STARTING = 50 # These are in units of 2^32
DIFFICULTY_EPOCH = 10 # The number of blocks per epoch
DIFFICULTY_TARGET = 10 # The target number of seconds per block
DIFFICULTY_FACTOR = 4 # The next difficulty is truncated to range [prev / FACTOR, prev * FACTOR]
DIFFICULTY_WARP_FACTOR = 4 # DELAY divides EPOCH in order to warp efficiently.
DIFFICULTY_DELAY = DIFFICULTY_EPOCH // DIFFICULTY_WARP_FACTOR # The delay in blocks before the difficulty reset applies.
DIFFICULTY_DELAY = DIFFICULTY_EPOCH // DIFFICULTY_WARP_FACTOR # The delay in blocks before the difficulty reset applies
DISCRIMINANT_SIZE_BITS = 1024
# The percentage of the difficulty target that the VDF must be run for, at a minimum
MIN_BLOCK_TIME_PERCENT = 20
MIN_VDF_ITERATIONS = 1 << 10
MIN_VDF_ITERATIONS = 1 # These are in units of 2^32

View File

@ -43,7 +43,7 @@ def calculate_iterations_quality(quality: bytes32, size: uint8, difficulty: uint
between 0 and 1, then divided by expected plot size, and finally multiplied by the
difficulty.
"""
dec_iters = (Decimal(int(difficulty)) *
dec_iters = (Decimal(int(difficulty) << 32) *
(_quality_to_decimal(quality) / _expected_plot_size(size)))
return uint64(max(MIN_VDF_ITERATIONS, int(dec_iters.to_integral_exact(rounding=ROUND_UP))))

View File

@ -243,6 +243,7 @@ async def proof_of_space_arrived(proof_of_space_arrived: farmer_protocol.ProofOf
@api_request
async def deep_reorg_notification(deep_reorg_notification: farmer_protocol.DeepReorgNotification):
# TODO: implement
# TODO: "forget everything and start over (reset db)"
log.error(f"Deep reorg notification not implemented.")

View File

@ -4,7 +4,7 @@ from secrets import token_bytes
from hashlib import sha256
from chiapos import Verifier
from blspy import Util, Signature, PrivateKey
from asyncio import Lock
from asyncio import Lock, sleep
from typing import Dict, List, Tuple, Optional
from src.util.api_decorators import api_request
from src.protocols import farmer_protocol
@ -18,6 +18,7 @@ from src.types.challenge import Challenge
from src.types.block_header import BlockHeaderData, BlockHeader
from src.types.proof_of_space import ProofOfSpace
from src.consensus.pot_iterations import calculate_iterations
from src.consensus.constants import DIFFICULTY_TARGET
from src.types.full_block import FullBlock
from src.types.fees_target import FeesTarget
from src.blockchain import Blockchain
@ -30,6 +31,7 @@ farmer_ip = "127.0.0.1"
farmer_port = 8001
timelord_ip = "127.0.0.1"
timelord_port = 8003
update_pot_estimate_interval: int = 30
class Database:
@ -44,7 +46,7 @@ class Database:
# These are the blocks that we created, have PoS, but not PoT yet, keyed from the
# block header hash
unfinished_blocks: Dict[Tuple[bytes32, int], FullBlock] = {}
proof_of_time_estimate_ips: uint64 = uint64(3000)
proof_of_time_estimate_ips: uint64 = uint64(1500)
log = logging.getLogger(__name__)
@ -86,6 +88,16 @@ async def send_challenges_to_timelords():
yield OutboundMessage("timelord", "challenge_start", request, False, True)
async def proof_of_time_estimate_interval():
while True:
estimated_ips: Optional[uint64] = db.blockchain.get_vdf_rate_estimate()
async with db.lock:
if estimated_ips is not None:
db.proof_of_time_estimate_ips = estimated_ips
log.info(f"Updated proof of time estimate to {estimated_ips} iterations per second.")
await sleep(update_pot_estimate_interval)
@api_request
async def request_header_hash(request: farmer_protocol.RequestHeaderHash):
"""
@ -167,15 +179,13 @@ async def header_signature(header_signature: farmer_protocol.HeaderSignature):
[pos.plot_pubkey])
block_header: BlockHeader = BlockHeader(block_header_data, header_signature.header_signature)
assert db.blockchain.block_can_be_added(block_header, block_body)
trunk: TrunkBlock = TrunkBlock(pos, None, None, block_header)
unfinished_block_obj: FullBlock = FullBlock(trunk, block_body)
# Propagate to ourselves (which validates and does further propagations)
request = peer_protocol.UnfinishedBlock(unfinished_block_obj)
async for m in unfinished_block(request):
# Yield all new messages (propagation to peers)
yield m
@ -188,6 +198,9 @@ async def proof_of_time_finished(request: timelord_protocol.ProofOfTimeFinished)
"""
async with db.lock:
dict_key = (request.proof.output.challenge_hash, request.proof.output.number_of_iterations)
if dict_key not in db.unfinished_blocks:
log.warn(f"Received a proof of time that we cannot use to complete a block {dict_key}")
return
unfinished_block_obj: FullBlock = db.unfinished_blocks[dict_key]
prev_block: TrunkBlock = db.blockchain.get_trunk_block(unfinished_block_obj.trunk_block.prev_header_hash)
difficulty: uint64 = db.blockchain.get_next_difficulty(unfinished_block_obj.trunk_block.prev_header_hash)
@ -195,7 +208,8 @@ async def proof_of_time_finished(request: timelord_protocol.ProofOfTimeFinished)
challenge: Challenge = Challenge(unfinished_block_obj.trunk_block.proof_of_space.get_hash(),
request.proof.output.get_hash(),
prev_block.challenge.height + 1,
prev_block.challenge.total_weight + difficulty)
prev_block.challenge.total_weight + difficulty,
prev_block.challenge.total_iters + request.proof.output.number_of_iterations)
new_trunk_block = TrunkBlock(unfinished_block_obj.trunk_block.proof_of_space,
request.proof,
@ -240,7 +254,10 @@ async def unfinished_block(unfinished_block: peer_protocol.UnfinishedBlock):
timelords.
"""
async with db.lock:
# TODO: verify block using blockchain class, including coinbase rewards
if not db.blockchain.is_child_of_head(unfinished_block.block):
return
# TODO(alex): verify block using blockchain class, including coinbase rewards
prev_block: TrunkBlock = db.blockchain.get_trunk_block(
unfinished_block.block.trunk_block.prev_header_hash)
@ -255,8 +272,14 @@ async def unfinished_block(unfinished_block: peer_protocol.UnfinishedBlock):
log.info(f"Have already seen unfinished block {(challenge_hash, iterations_needed)}")
return
expected_time: float = iterations_needed / db.proof_of_time_estimate_ips
# TODO(alex): tweak this
log.info(f"Expected finish time: {expected_time}")
if expected_time > 10 * DIFFICULTY_TARGET:
return
db.unfinished_blocks[(challenge_hash, iterations_needed)] = unfinished_block.block
# TODO: Only propagate if it's actually good
timelord_request = timelord_protocol.ProofOfSpaceInfo(challenge_hash, iterations_needed)
yield OutboundMessage("timelord", "proof_of_space_info", timelord_request, False, True)
@ -286,7 +309,6 @@ async def block(block: peer_protocol.Block):
if header_hash in db.bodies and block.block.body in db.bodies[header_hash]:
log.info(f"Already have block {header_hash}")
return
# TODO(alex): Check if we care about this block, we don't want to add random
# disconnected blocks. For example if it's on one of the heads, or if it's an older
# block that we need

View File

@ -9,7 +9,10 @@ from src.types.coinbase import CoinbaseInfo
Protocol between farmer and full node.
"""
"""
Farmer <- Full node
Update current height
"""
@cbor_message(tag=2000)
class ProofOfSpaceFinalized:
challenge_hash: bytes32

View File

@ -10,38 +10,74 @@ from src.types.peer_info import PeerInfo
Protocol between full nodes.
"""
"""
If already seen, ignore
Validate transaction
If consistent with at least 1/3 heads, store in mempool
Propagate transaction
"""
@cbor_message(tag=4000)
class NewTransaction:
transaction: Transaction
"""
TODO(alex): update this
If already seen, ignore
If prev block not a head, ignore
Call self.ProofOfTimeFinished
Propagate PoT (?)
"""
@cbor_message(tag=4001)
class NewProofOfTime:
proof: ProofOfTime
"""
TODO(alex): update this
If not a child of a head, ignore
If we have a PoT to complete this block, call self.Block
Otherwise: validate, store, and propagate
"""
@cbor_message(tag=4002)
class UnfinishedBlock:
# Block that does not have ProofOfTime and Challenge
block: FullBlock
"""
If have block, return block
TODO: request blocks?
"""
@cbor_message(tag=4003)
class RequestBlock:
header_hash: bytes32
"""
TODO(alex): update this
If already have, ignore
If not child of a head, or ancestor of a head, ignore
Add block to head
- Validate block
If heads updated, propagate block to full nodes, farmers, timelords
"""
@cbor_message(tag=4004)
class Block:
block: FullBlock
"""
Return full list of peers
"""
@cbor_message(tag=4005)
class RequestPeers:
pass
"""
Update list of peers
"""
@cbor_message(tag=4006)
class Peers:
peer_list: List[PeerInfo]

View File

@ -7,7 +7,11 @@ from src.types.proof_of_time import ProofOfTime
Protocol between timelord and full node.
"""
"""
If don't have the unfinished block, ignore
Validate PoT
Call self.Block
"""
@cbor_message(tag=3000)
class ProofOfTimeFinished:
proof: ProofOfTime

View File

@ -8,6 +8,19 @@ from src.server.server import start_chia_server, start_chia_client
logging.basicConfig(format='FullNode %(name)-23s: %(levelname)-8s %(message)s', level=logging.INFO)
global_connections = PeerConnections()
"""
Full node startup algorithm:
- Update peer list (?)
- Start server
- Sync:
- Check which are the heaviest bkocks
- Request flyclient proofs for all heads
- Blacklist peers with invalid heads
- Sync blockchain up to heads (request blocks in batches, and add to queue)
- If connected to farmer, send challenges
- If connected to timelord, send challenges
"""
async def main():
farmer_con_task, farmer_client = await start_chia_client(full_node.farmer_ip, full_node.farmer_port,
@ -25,6 +38,9 @@ async def main():
async for msg in full_node.send_challenges_to_timelords():
timelord_client.push(msg)
# Periodically update our estimate of proof of time speeds
asyncio.create_task(full_node.proof_of_time_estimate_interval())
await asyncio.gather(farmer_con_task, timelord_con_task, server)
asyncio.run(main())

View File

@ -30,4 +30,4 @@ class BlockHeader:
@property
def header_hash(self):
return sha256(self.serialize()).digest()
return bytes32(sha256(self.serialize()).digest())

View File

@ -9,6 +9,7 @@ class Challenge:
proof_of_time_output_hash: bytes32
height: uint32
total_weight: uint64
total_iters: uint64
def is_valid(self) -> bool:
# TODO

View File

@ -15,18 +15,14 @@ class TrunkBlock:
def is_valid(self):
if not self.proof_of_time or not self.challenge:
print("1 false")
return False
pos_quality = self.proof_of_space.verify_and_get_quality(self.proof_of_time.output.challenge_hash)
# TODO: check iterations
if not pos_quality:
print("2 false")
return False
if not self.proof_of_space.get_hash() == self.challenge.proof_of_space_hash:
print("3 false")
return False
if not self.proof_of_time.output.get_hash() == self.challenge.proof_of_time_output_hash:
print("4 false")
return False
return self.challenge.is_valid() and self.proof_of_time.is_valid() and self.header.is_valid()

View File

@ -20,6 +20,6 @@ def api_request(f):
inter = dict(binding.arguments)
print_args = {k: v for (k, v) in inter.items() if k != "source_connection"
and k != "all_connections"}
log.info(f"{f.__name__}({print_args})")
log.info(f"{f.__name__}({print_args})"[:200])
return f(**inter)
return f_substitute

File diff suppressed because one or more lines are too long