Create a test for block size (#558)

* add block size limit test
* calculate max_transactions_in_block and max_inputs_in_tx
* abstract block generation
* move all test vectors to zebra-test
This commit is contained in:
Alfredo Garcia 2020-07-06 21:37:32 -03:00 committed by GitHub
parent ac8185f2f3
commit db30e53470
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 225 additions and 15 deletions

View File

@ -41,7 +41,7 @@ pub struct Block {
/// in the Zcash specification. (But since blocks also contain a
/// block header and transaction count, the maximum size of a
/// transaction in the chain is approximately 1.5 kB smaller.)
const MAX_BLOCK_BYTES: u64 = 2_000_000;
pub const MAX_BLOCK_BYTES: u64 = 2_000_000;
impl Block {
/// Return the block height reported in the coinbase transaction, if any.

View File

@ -1,4 +1,4 @@
use chrono::{DateTime, NaiveDateTime, TimeZone, Utc};
use chrono::{TimeZone, Utc};
use std::io::{Cursor, ErrorKind, Write};
use proptest::{
@ -12,6 +12,8 @@ use crate::note_commitment_tree::SaplingNoteTreeRootHash;
use crate::serialization::{SerializationError, ZcashDeserialize, ZcashSerialize};
use crate::sha256d_writer::Sha256dWriter;
use crate::test::generate;
use super::*;
#[cfg(test)]
@ -74,18 +76,7 @@ fn blockheaderhash_debug() {
#[test]
fn blockheaderhash_from_blockheader() {
let some_bytes = [0; 32];
let blockheader = BlockHeader {
version: 4,
previous_block_hash: BlockHeaderHash(some_bytes),
merkle_root_hash: MerkleTreeRootHash(some_bytes),
final_sapling_root_hash: SaplingNoteTreeRootHash(some_bytes),
time: DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(61, 0), Utc),
bits: 0,
nonce: some_bytes,
solution: EquihashSolution([0; 1344]),
};
let blockheader = generate::block_header();
let hash = BlockHeaderHash::from(&blockheader);
@ -130,6 +121,74 @@ fn deserialize_block() {
.expect("block test vector should deserialize");
}
#[test]
fn block_limits_multi_tx() {
// Test multiple small transactions to fill a block max size
// Create a block just below the limit
let mut block = generate::large_multi_transaction_block();
// Serialize the block
let mut data = Vec::new();
block
.zcash_serialize(&mut data)
.expect("block should serialize as we are not limiting generation yet");
assert!(data.len() <= MAX_BLOCK_BYTES as usize);
// Deserialize by now is ok as we are lower than the limit
let block2 = Block::zcash_deserialize(&data[..])
.expect("block should deserialize as we are just below limit");
assert_eq!(block, block2);
// Add 1 more transaction to the block, limit will be reached
block = generate::oversized_multi_transaction_block();
// Serialize will still be fine
let mut data = Vec::new();
block
.zcash_serialize(&mut data)
.expect("block should serialize as we are not limiting generation yet");
assert!(data.len() > MAX_BLOCK_BYTES as usize);
// Deserialize will now fail
Block::zcash_deserialize(&data[..]).expect_err("block should not deserialize");
}
#[test]
fn block_limits_single_tx() {
// Test block limit with a big single transaction
// Create a block just below the limit
let mut block = generate::large_single_transaction_block();
// Serialize the block
let mut data = Vec::new();
block
.zcash_serialize(&mut data)
.expect("block should serialize as we are not limiting generation yet");
assert!(data.len() <= MAX_BLOCK_BYTES as usize);
// Deserialize by now is ok as we are lower than the limit
Block::zcash_deserialize(&data[..])
.expect("block should deserialize as we are just below limit");
// Add 1 more input to the transaction, limit will be reached
block = generate::oversized_single_transaction_block();
let mut data = Vec::new();
block
.zcash_serialize(&mut data)
.expect("block should serialize as we are not limiting generation yet");
assert!(data.len() > MAX_BLOCK_BYTES as usize);
// Will fail as block overall size is above limit
Block::zcash_deserialize(&data[..]).expect_err("block should not deserialize");
}
proptest! {
#[test]

View File

@ -29,6 +29,9 @@ pub use redjubjub;
#[cfg(test)]
use proptest_derive::Arbitrary;
#[cfg(test)]
pub mod test;
/// An enum describing the possible network choices.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
#[cfg_attr(test, derive(Arbitrary))]

2
zebra-chain/src/test.rs Normal file
View File

@ -0,0 +1,2 @@
//! Test support code
pub mod generate;

View File

@ -0,0 +1,144 @@
//! Generate blockchain testing constructions
use chrono::{DateTime, NaiveDateTime, Utc};
use std::sync::Arc;
use crate::{
block::{Block, BlockHeader, BlockHeaderHash, MAX_BLOCK_BYTES},
equihash_solution::EquihashSolution,
merkle_tree::MerkleTreeRootHash,
note_commitment_tree::SaplingNoteTreeRootHash,
serialization::{ZcashDeserialize, ZcashSerialize},
transaction::{Transaction, TransparentInput, TransparentOutput},
types::LockTime,
};
/// Generate a block header
pub fn block_header() -> BlockHeader {
let some_bytes = [0; 32];
BlockHeader {
version: 4,
previous_block_hash: BlockHeaderHash(some_bytes),
merkle_root_hash: MerkleTreeRootHash(some_bytes),
final_sapling_root_hash: SaplingNoteTreeRootHash(some_bytes),
time: DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(61, 0), Utc),
bits: 0,
nonce: some_bytes,
solution: EquihashSolution([0; 1344]),
}
}
/// Generate a block with multiple transactions just below limit
pub fn large_multi_transaction_block() -> Block {
multi_transaction_block(false)
}
/// Generate a block with one transaction and multiple inputs just below limit
pub fn large_single_transaction_block() -> Block {
single_transaction_block(false)
}
/// Generate a block with multiple transactions just above limit
pub fn oversized_multi_transaction_block() -> Block {
multi_transaction_block(true)
}
/// Generate a block with one transaction and multiple inputs just above limit
pub fn oversized_single_transaction_block() -> Block {
single_transaction_block(true)
}
// Implementation of block generation with multiple transactions
fn multi_transaction_block(oversized: bool) -> Block {
// A dummy transaction
let tx = Transaction::zcash_deserialize(&zebra_test::vectors::DUMMY_TX1[..]).unwrap();
// A block header
let blockheader = block_header();
// Serialize header
let mut data_header = Vec::new();
blockheader
.zcash_serialize(&mut data_header)
.expect("Block header should serialize");
// Calculate the number of transactions we need
let mut max_transactions_in_block =
(MAX_BLOCK_BYTES as usize - data_header.len()) / *&zebra_test::vectors::DUMMY_TX1[..].len();
if oversized {
max_transactions_in_block = max_transactions_in_block + 1;
}
// Create transactions to be just below or just above the limit
let many_transactions = std::iter::repeat(Arc::new(tx.clone()))
.take(max_transactions_in_block)
.collect::<Vec<_>>();
// Add the transactions into a block
Block {
header: blockheader,
transactions: many_transactions,
}
}
// Implementation of block generation with one transaction and multiple inputs
fn single_transaction_block(oversized: bool) -> Block {
// Dummy input and output
let input =
TransparentInput::zcash_deserialize(&zebra_test::vectors::DUMMY_INPUT1[..]).unwrap();
let output =
TransparentOutput::zcash_deserialize(&zebra_test::vectors::DUMMY_OUTPUT1[..]).unwrap();
// A block header
let blockheader = block_header();
// Serialize header
let mut data_header = Vec::new();
blockheader
.zcash_serialize(&mut data_header)
.expect("Block header should serialize");
// Serialize a LockTime
let locktime = LockTime::Time(DateTime::<Utc>::from_utc(
NaiveDateTime::from_timestamp(61, 0),
Utc,
));
let mut data_locktime = Vec::new();
locktime
.zcash_serialize(&mut data_locktime)
.expect("LockTime should serialize");
// Calculate the number of inputs we need
let mut max_inputs_in_tx = (MAX_BLOCK_BYTES as usize
- data_header.len()
- *&zebra_test::vectors::DUMMY_OUTPUT1[..].len()
- data_locktime.len())
/ (*&zebra_test::vectors::DUMMY_INPUT1[..].len() - 1);
if oversized {
max_inputs_in_tx = max_inputs_in_tx + 1;
}
let mut outputs = Vec::new();
// Create inputs to be just below the limit
let inputs = std::iter::repeat(input.clone())
.take(max_inputs_in_tx)
.collect::<Vec<_>>();
// 1 single output
outputs.push(output);
// Create a big transaction
let big_transaction = Transaction::V1 {
inputs: inputs.clone(),
outputs: outputs.clone(),
lock_time: locktime,
};
// Put the big transaction into a block
let transactions = vec![Arc::new(big_transaction.clone())];
Block {
header: blockheader,
transactions: transactions.clone(),
}
}

File diff suppressed because one or more lines are too long