Merge branch 'fix-z-gettreestate' of github.com:ZcashFoundation/zebra into fix-z-gettreestate

This commit is contained in:
Marek 2024-05-20 14:12:12 +02:00
commit 451a8a9e26
42 changed files with 414 additions and 188 deletions

View File

@ -203,7 +203,7 @@ jobs:
if: ${{ !cancelled() && !failure() && ((github.event_name == 'push' && github.ref_name == 'main') || github.event_name == 'release') }}
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
@ -226,7 +226,7 @@ jobs:
# Setup gcloud CLI
- name: Authenticate to Google Cloud
id: auth
uses: google-github-actions/auth@v2.1.2
uses: google-github-actions/auth@v2.1.3
with:
retries: '3'
workload_identity_provider: '${{ vars.GCP_WIF }}'
@ -305,7 +305,7 @@ jobs:
if: github.event_name == 'workflow_dispatch'
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
@ -328,7 +328,7 @@ jobs:
# Setup gcloud CLI
- name: Authenticate to Google Cloud
id: auth
uses: google-github-actions/auth@v2.1.2
uses: google-github-actions/auth@v2.1.3
with:
retries: '3'
workload_identity_provider: '${{ vars.GCP_WIF }}'

View File

@ -39,14 +39,14 @@ jobs:
contents: 'read'
id-token: 'write'
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
# Setup gcloud CLI
- name: Authenticate to Google Cloud
id: auth
uses: google-github-actions/auth@v2.1.2
uses: google-github-actions/auth@v2.1.3
with:
retries: '3'
workload_identity_provider: '${{ vars.GCP_WIF }}'
@ -107,14 +107,14 @@ jobs:
contents: 'read'
id-token: 'write'
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
# Setup gcloud CLI
- name: Authenticate to Google Cloud
id: auth
uses: google-github-actions/auth@v2.1.2
uses: google-github-actions/auth@v2.1.3
with:
retries: '3'
workload_identity_provider: '${{ vars.GCP_WIF }}'

View File

@ -23,7 +23,7 @@ jobs:
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
# Setup Rust with stable toolchain and minimal profile
- name: Setup Rust

View File

@ -60,8 +60,8 @@ jobs:
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
- uses: actions/checkout@v4.1.4
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: actions/checkout@v4.1.6
- uses: r7kamura/rust-problem-matchers@v1.5.0
# Setup Rust with stable toolchain and minimal profile
- name: Setup Rust
@ -122,10 +122,10 @@ jobs:
matrix: ${{ fromJson(needs.matrix.outputs.matrix) }}
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
- name: Install last version of Protoc
uses: arduino/setup-protoc@v3.0.0

View File

@ -69,7 +69,7 @@ jobs:
runs-on: ubuntu-latest-xl
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
@ -103,4 +103,4 @@ jobs:
run: cargo llvm-cov --lcov --no-run --output-path lcov.info
- name: Upload coverage report to Codecov
uses: codecov/codecov-action@v4.3.0
uses: codecov/codecov-action@v4.4.0

View File

@ -37,14 +37,14 @@ jobs:
rust: ${{ steps.changed-files-rust.outputs.any_changed == 'true' }}
workflows: ${{ steps.changed-files-workflows.outputs.any_changed == 'true' }}
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
fetch-depth: 0
- name: Rust files
id: changed-files-rust
uses: tj-actions/changed-files@v44.3.0
uses: tj-actions/changed-files@v44.4.0
with:
files: |
**/*.rs
@ -56,7 +56,7 @@ jobs:
- name: Workflow files
id: changed-files-workflows
uses: tj-actions/changed-files@v44.3.0
uses: tj-actions/changed-files@v44.4.0
with:
files: |
.github/workflows/*.yml
@ -69,7 +69,7 @@ jobs:
if: ${{ needs.changed-files.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
@ -119,10 +119,10 @@ jobs:
if: ${{ needs.changed-files.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
- name: Install last version of Protoc
uses: arduino/setup-protoc@v3.0.0
@ -149,9 +149,9 @@ jobs:
needs: changed-files
if: ${{ needs.changed-files.outputs.workflows == 'true' }}
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
- name: actionlint
uses: reviewdog/action-actionlint@v1.44.0
uses: reviewdog/action-actionlint@v1.46.0
with:
level: warning
fail_on_error: false
@ -166,7 +166,7 @@ jobs:
runs-on: ubuntu-latest
needs: changed-files
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
- uses: codespell-project/actions-codespell@v2.0
with:
only_warn: 1

View File

@ -121,7 +121,7 @@ jobs:
runs-on: ubuntu-latest-xl
needs: build
steps:
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
- name: Inject slug/short variables
uses: rlespinasse/github-slug-action@v4
@ -163,7 +163,7 @@ jobs:
runs-on: ubuntu-latest
needs: build
steps:
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
- name: Inject slug/short variables
uses: rlespinasse/github-slug-action@v4
@ -184,7 +184,7 @@ jobs:
runs-on: ubuntu-latest
needs: build
steps:
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
- name: Inject slug/short variables
uses: rlespinasse/github-slug-action@v4
@ -205,7 +205,7 @@ jobs:
runs-on: ubuntu-latest
needs: build
steps:
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
- name: Inject slug/short variables
uses: rlespinasse/github-slug-action@v4

View File

@ -94,10 +94,10 @@ jobs:
rust: beta
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
- name: Install last version of Protoc
uses: arduino/setup-protoc@v3.0.0
@ -183,10 +183,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
# Setup Rust with stable toolchain and minimal profile
- name: Setup Rust
@ -205,10 +205,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
- name: Install last version of Protoc
uses: arduino/setup-protoc@v3.0.0
@ -248,10 +248,10 @@ jobs:
continue-on-error: ${{ matrix.checks == 'advisories' }}
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
- name: Check ${{ matrix.checks }} with features ${{ matrix.features }}
uses: EmbarkStudios/cargo-deny-action@v1
@ -269,10 +269,10 @@ jobs:
steps:
- name: Checkout git repository
uses: actions/checkout@v4.1.4
uses: actions/checkout@v4.1.6
with:
persist-credentials: false
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
# Setup Rust with stable toolchain and minimal profile
- name: Setup Rust

View File

@ -85,11 +85,11 @@ jobs:
pull-requests: write
steps:
- name: Checkout the source code
uses: actions/checkout@v4.1.4
uses: actions/checkout@v4.1.6
with:
persist-credentials: false
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
- name: Setup mdBook
uses: jontze/action-mdbook@v3.0.0
@ -106,7 +106,7 @@ jobs:
# Setup gcloud CLI
- name: Authenticate to Google Cloud
id: auth
uses: google-github-actions/auth@v2.1.2
uses: google-github-actions/auth@v2.1.3
with:
retries: '3'
workload_identity_provider: '${{ vars.GCP_WIF }}'
@ -139,11 +139,11 @@ jobs:
pull-requests: write
steps:
- name: Checkout the source code
uses: actions/checkout@v4.1.4
uses: actions/checkout@v4.1.6
with:
persist-credentials: false
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
- name: Install last version of Protoc
uses: arduino/setup-protoc@v3.0.0
@ -165,7 +165,7 @@ jobs:
# Setup gcloud CLI
- name: Authenticate to Google Cloud
id: auth
uses: google-github-actions/auth@v2.1.2
uses: google-github-actions/auth@v2.1.3
with:
retries: '3'
workload_identity_provider: '${{ vars.GCP_WIF }}'

View File

@ -17,7 +17,7 @@ jobs:
dockerHubDescription:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false

View File

@ -29,7 +29,7 @@ jobs:
id-token: 'write'
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
@ -52,7 +52,7 @@ jobs:
# Setup gcloud CLI
- name: Authenticate to Google Cloud
id: auth
uses: google-github-actions/auth@v2.1.2
uses: google-github-actions/auth@v2.1.3
with:
retries: '3'
workload_identity_provider: '${{ vars.GCP_WIF }}'

View File

@ -67,10 +67,10 @@ jobs:
timeout-minutes: 15
runs-on: ubuntu-latest
steps:
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
- name: Checkout git repository
uses: actions/checkout@v4.1.4
uses: actions/checkout@v4.1.6
with:
persist-credentials: false

View File

@ -76,10 +76,10 @@ jobs:
contents: 'read'
id-token: 'write'
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
- name: Inject slug/short variables
uses: rlespinasse/github-slug-action@v4
@ -125,7 +125,7 @@ jobs:
- name: Authenticate to Google Cloud
id: auth
uses: google-github-actions/auth@v2.1.2
uses: google-github-actions/auth@v2.1.3
with:
retries: '3'
workload_identity_provider: '${{ vars.GCP_WIF }}'

View File

@ -118,11 +118,11 @@ jobs:
contents: 'read'
id-token: 'write'
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
fetch-depth: '2'
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
- name: Inject slug/short variables
uses: rlespinasse/github-slug-action@v4
@ -150,7 +150,7 @@ jobs:
# Setup gcloud CLI
- name: Authenticate to Google Cloud
id: auth
uses: google-github-actions/auth@v2.1.2
uses: google-github-actions/auth@v2.1.3
with:
retries: '3'
workload_identity_provider: '${{ vars.GCP_WIF }}'
@ -406,11 +406,11 @@ jobs:
contents: 'read'
id-token: 'write'
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
fetch-depth: '2'
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
- name: Inject slug/short variables
uses: rlespinasse/github-slug-action@v4
@ -450,7 +450,7 @@ jobs:
# Setup gcloud CLI
- name: Authenticate to Google Cloud
id: auth
uses: google-github-actions/auth@v2.1.2
uses: google-github-actions/auth@v2.1.3
with:
workload_identity_provider: '${{ vars.GCP_WIF }}'
service_account: '${{ vars.GCP_DEPLOYMENTS_SA }}'
@ -713,11 +713,11 @@ jobs:
contents: 'read'
id-token: 'write'
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
fetch-depth: '2'
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
- name: Inject slug/short variables
uses: rlespinasse/github-slug-action@v4
@ -727,7 +727,7 @@ jobs:
# Setup gcloud CLI
- name: Authenticate to Google Cloud
id: auth
uses: google-github-actions/auth@v2.1.2
uses: google-github-actions/auth@v2.1.3
with:
workload_identity_provider: '${{ vars.GCP_WIF }}'
service_account: '${{ vars.GCP_DEPLOYMENTS_SA }}'

View File

@ -37,7 +37,7 @@ jobs:
contents: 'read'
id-token: 'write'
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
fetch-depth: 0
@ -45,7 +45,7 @@ jobs:
# Setup gcloud CLI
- name: Authenticate to Google Cloud
id: auth
uses: google-github-actions/auth@v2.1.2
uses: google-github-actions/auth@v2.1.3
with:
retries: '3'
workload_identity_provider: '${{ vars.GCP_WIF }}'

View File

@ -38,7 +38,7 @@ jobs:
timeout-minutes: 30
runs-on: ubuntu-latest-m
steps:
- uses: actions/checkout@v4.1.4
- uses: actions/checkout@v4.1.6
with:
persist-credentials: false
@ -47,7 +47,7 @@ jobs:
with:
short-length: 7
- uses: r7kamura/rust-problem-matchers@v1.4.0
- uses: r7kamura/rust-problem-matchers@v1.5.0
- name: Run ${{ inputs.test_id }} test
run: |

View File

@ -6067,7 +6067,7 @@ dependencies = [
"metrics 0.22.3",
"num-integer",
"once_cell",
"orchard 0.6.0",
"orchard 0.7.1",
"proptest",
"proptest-derive",
"rand 0.8.5",

View File

@ -22,7 +22,7 @@ mod transaction;
pub mod arbitrary;
pub use genesis::*;
pub use network::{testnet, Network, NetworkKind};
pub use network::{magic::Magic, testnet, Network, NetworkKind};
pub use network_upgrade::*;
pub use transaction::*;

View File

@ -15,3 +15,15 @@ pub const SLOW_START_INTERVAL: Height = Height(20_000);
///
/// This calculation is exact, because `SLOW_START_INTERVAL` is divisible by 2.
pub const SLOW_START_SHIFT: Height = Height(SLOW_START_INTERVAL.0 / 2);
/// Magic numbers used to identify different Zcash networks.
pub mod magics {
use crate::parameters::network::magic::Magic;
/// The production mainnet.
pub const MAINNET: Magic = Magic([0x24, 0xe9, 0x27, 0x64]);
/// The testnet.
pub const TESTNET: Magic = Magic([0xfa, 0x1a, 0xf9, 0xbf]);
/// The regtest, see <https://github.com/zcash/zcash/blob/master/src/chainparams.cpp#L716-L719>
pub const REGTEST: Magic = Magic([0xaa, 0xe8, 0x3f, 0x5f]);
}

View File

@ -9,6 +9,7 @@ use crate::{
parameters::NetworkUpgrade,
};
pub mod magic;
pub mod testnet;
#[cfg(test)]
@ -75,7 +76,7 @@ impl From<Network> for NetworkKind {
}
/// An enum describing the possible network choices.
#[derive(Clone, Debug, Default, Eq, PartialEq, Hash, Serialize)]
#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize)]
#[serde(into = "NetworkKind")]
pub enum Network {
/// The production mainnet.

View File

@ -0,0 +1,56 @@
//! Network `Magic` type and implementation.
use std::fmt;
use crate::parameters::{constants::magics, Network};
#[cfg(any(test, feature = "proptest-impl"))]
use proptest_derive::Arbitrary;
/// A magic number identifying the network.
#[derive(Copy, Clone, Eq, PartialEq)]
#[cfg_attr(any(test, feature = "proptest-impl"), derive(Arbitrary))]
pub struct Magic(pub [u8; 4]);
impl fmt::Debug for Magic {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Magic").field(&hex::encode(self.0)).finish()
}
}
impl Network {
/// Get the magic value associated to this `Network`.
pub fn magic(&self) -> Magic {
match self {
Network::Mainnet => magics::MAINNET,
Network::Testnet(params) => params.network_magic(),
}
}
}
#[cfg(test)]
mod proptest {
use proptest::prelude::*;
use super::{magics, Magic};
#[test]
fn magic_debug() {
let _init_guard = zebra_test::init();
assert_eq!(format!("{:?}", magics::MAINNET), "Magic(\"24e92764\")");
assert_eq!(format!("{:?}", magics::TESTNET), "Magic(\"fa1af9bf\")");
assert_eq!(format!("{:?}", magics::REGTEST), "Magic(\"aae83f5f\")");
}
proptest! {
#[test]
fn proptest_magic_from_array(data in any::<[u8; 4]>()) {
let _init_guard = zebra_test::init();
assert_eq!(format!("{:?}", Magic(data)), format!("Magic({:x?})", hex::encode(data)));
}
}
}

View File

@ -6,12 +6,15 @@ use zcash_primitives::constants as zp_constants;
use crate::{
block::{self, Height},
parameters::{
constants::{SLOW_START_INTERVAL, SLOW_START_SHIFT},
constants::{magics, SLOW_START_INTERVAL, SLOW_START_SHIFT},
network_upgrade::TESTNET_ACTIVATION_HEIGHTS,
Network, NetworkUpgrade, NETWORK_UPGRADES_IN_ORDER,
},
work::difficulty::{ExpandedDifficulty, U256},
};
use super::magic::Magic;
/// The Regtest NU5 activation height in tests
// TODO: Serialize testnet parameters in Config then remove this and use a configured NU5 activation height.
#[cfg(any(test, feature = "proptest-impl"))]
@ -63,10 +66,12 @@ pub struct ConfiguredActivationHeights {
}
/// Builder for the [`Parameters`] struct.
#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct ParametersBuilder {
/// The name of this network to be used by the `Display` trait impl.
network_name: String,
/// The network magic, acts as an identifier for the network.
network_magic: Magic,
/// The genesis block hash
genesis_hash: block::Hash,
/// The network upgrade activation heights for this network, see [`Parameters::activation_heights`] for more details.
@ -79,6 +84,8 @@ pub struct ParametersBuilder {
hrp_sapling_payment_address: String,
/// Slow start interval for this network
slow_start_interval: Height,
/// Target difficulty limit for this network
target_difficulty_limit: ExpandedDifficulty,
/// A flag for disabling proof-of-work checks when Zebra is validating blocks
disable_pow: bool,
}
@ -88,6 +95,7 @@ impl Default for ParametersBuilder {
fn default() -> Self {
Self {
network_name: "UnknownTestnet".to_string(),
network_magic: magics::TESTNET,
// # Correctness
//
// `Genesis` network upgrade activation height must always be 0
@ -102,6 +110,19 @@ impl Default for ParametersBuilder {
.parse()
.expect("hard-coded hash parses"),
slow_start_interval: SLOW_START_INTERVAL,
// Testnet PoWLimit is defined as `2^251 - 1` on page 73 of the protocol specification:
// <https://zips.z.cash/protocol/protocol.pdf>
//
// `zcashd` converts the PoWLimit into a compact representation before
// using it to perform difficulty filter checks.
//
// The Zcash specification converts to compact for the default difficulty
// filter, but not for testnet minimum difficulty blocks. (ZIP 205 and
// ZIP 208 don't specify this conversion either.) See #1277 for details.
target_difficulty_limit: ExpandedDifficulty::from((U256::one() << 251) - 1)
.to_compact()
.to_expanded()
.expect("difficulty limits are valid expanded values"),
disable_pow: false,
}
}
@ -132,6 +153,20 @@ impl ParametersBuilder {
self
}
/// Sets the network name to be used in the [`Parameters`] being built.
pub fn with_network_magic(mut self, network_magic: Magic) -> Self {
assert!(
[magics::MAINNET, magics::REGTEST]
.into_iter()
.all(|reserved_magic| network_magic != reserved_magic),
"network magic should be distinct from reserved network magics"
);
self.network_magic = network_magic;
self
}
/// Checks that the provided Sapling human-readable prefixes (HRPs) are valid and unique, then
/// sets the Sapling HRPs to be used in the [`Parameters`] being built.
pub fn with_sapling_hrps(
@ -247,6 +282,16 @@ impl ParametersBuilder {
self
}
/// Sets the target difficulty limit to be used in the [`Parameters`] being built.
// TODO: Accept a hex-encoded String instead?
pub fn with_target_difficulty_limit(mut self, target_difficulty_limit: U256) -> Self {
self.target_difficulty_limit = ExpandedDifficulty::from(target_difficulty_limit)
.to_compact()
.to_expanded()
.expect("difficulty limits are valid expanded values");
self
}
/// Sets the `disable_pow` flag to be used in the [`Parameters`] being built.
pub fn with_disable_pow(mut self, disable_pow: bool) -> Self {
self.disable_pow = disable_pow;
@ -257,16 +302,19 @@ impl ParametersBuilder {
pub fn finish(self) -> Parameters {
let Self {
network_name,
network_magic,
genesis_hash,
activation_heights,
hrp_sapling_extended_spending_key,
hrp_sapling_extended_full_viewing_key,
hrp_sapling_payment_address,
slow_start_interval,
target_difficulty_limit,
disable_pow,
} = self;
Parameters {
network_name,
network_magic,
genesis_hash,
activation_heights,
hrp_sapling_extended_spending_key,
@ -274,6 +322,7 @@ impl ParametersBuilder {
hrp_sapling_payment_address,
slow_start_interval,
slow_start_shift: Height(slow_start_interval.0 / 2),
target_difficulty_limit,
disable_pow,
}
}
@ -285,10 +334,12 @@ impl ParametersBuilder {
}
/// Network consensus parameters for test networks such as Regtest and the default Testnet.
#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Parameters {
/// The name of this network to be used by the `Display` trait impl.
network_name: String,
/// The network magic, acts as an identifier for the network.
network_magic: Magic,
/// The genesis block hash
genesis_hash: block::Hash,
/// The network upgrade activation heights for this network.
@ -307,6 +358,8 @@ pub struct Parameters {
slow_start_interval: Height,
/// Slow start shift for this network, always half the slow start interval
slow_start_shift: Height,
/// Target difficulty limit for this network
target_difficulty_limit: ExpandedDifficulty,
/// A flag for disabling proof-of-work checks when Zebra is validating blocks
disable_pow: bool,
}
@ -336,8 +389,11 @@ impl Parameters {
Self {
network_name: "Regtest".to_string(),
network_magic: magics::REGTEST,
..Self::build()
.with_genesis_hash(REGTEST_GENESIS_HASH)
// This value is chosen to match zcashd, see: <https://github.com/zcash/zcash/blob/master/src/chainparams.cpp#L654>
.with_target_difficulty_limit(U256::from_big_endian(&[0x0f; 32]))
.with_disable_pow(true)
.with_slow_start_interval(Height::MIN)
.with_sapling_hrps(
@ -365,6 +421,7 @@ impl Parameters {
pub fn is_regtest(&self) -> bool {
let Self {
network_name,
network_magic,
genesis_hash,
// Activation heights are configurable on Regtest
activation_heights: _,
@ -373,16 +430,19 @@ impl Parameters {
hrp_sapling_payment_address,
slow_start_interval,
slow_start_shift,
target_difficulty_limit,
disable_pow,
} = Self::new_regtest(None);
self.network_name == network_name
&& self.network_magic == network_magic
&& self.genesis_hash == genesis_hash
&& self.hrp_sapling_extended_spending_key == hrp_sapling_extended_spending_key
&& self.hrp_sapling_extended_full_viewing_key == hrp_sapling_extended_full_viewing_key
&& self.hrp_sapling_payment_address == hrp_sapling_payment_address
&& self.slow_start_interval == slow_start_interval
&& self.slow_start_shift == slow_start_shift
&& self.target_difficulty_limit == target_difficulty_limit
&& self.disable_pow == disable_pow
}
@ -391,6 +451,11 @@ impl Parameters {
&self.network_name
}
/// Returns the network magic
pub fn network_magic(&self) -> Magic {
self.network_magic
}
/// Returns the genesis hash
pub fn genesis_hash(&self) -> block::Hash {
self.genesis_hash
@ -426,6 +491,11 @@ impl Parameters {
self.slow_start_shift
}
/// Returns the target difficulty limit for this network
pub fn target_difficulty_limit(&self) -> ExpandedDifficulty {
self.target_difficulty_limit
}
/// Returns true if proof-of-work validation should be disabled for this network
pub fn disable_pow(&self) -> bool {
self.disable_pow

View File

@ -100,7 +100,7 @@ pub const INVALID_COMPACT_DIFFICULTY: CompactDifficulty = CompactDifficulty(u32:
/// [section 7.7.2]: https://zips.z.cash/protocol/protocol.pdf#difficulty
//
// TODO: Use NonZeroU256, when available
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd)]
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct ExpandedDifficulty(U256);
/// A 128-bit unsigned "Work" value.
@ -696,11 +696,11 @@ impl ParameterDifficulty for Network {
/// See [`ParameterDifficulty::target_difficulty_limit`]
fn target_difficulty_limit(&self) -> ExpandedDifficulty {
let limit: U256 = match self {
/* 2^243 - 1 */
// Mainnet PoWLimit is defined as `2^243 - 1` on page 73 of the protocol specification:
// <https://zips.z.cash/protocol/protocol.pdf>
Network::Mainnet => (U256::one() << 243) - 1,
/* 2^251 - 1 */
// TODO: Add a `target_difficulty_limit` field to `testnet::Parameters` to return here. (`U256::from_big_endian(&[0x0f].repeat(8))` for Regtest)
Network::Testnet(_params) => (U256::one() << 251) - 1,
// 2^251 - 1 for the default testnet, see `testnet::ParametersBuilder::default`()
Network::Testnet(params) => return params.target_difficulty_limit(),
};
// `zcashd` converts the PoWLimit into a compact representation before

View File

@ -58,7 +58,7 @@ tower = { version = "0.4.13", features = ["timeout", "util", "buffer"] }
tracing = "0.1.39"
tracing-futures = "0.2.5"
orchard = "0.6.0"
orchard = "0.7.0"
zcash_proofs = { version = "0.13.0-rc.1", features = ["multicore" ] }
wagyu-zcash-parameters = "0.2.0"

View File

@ -176,11 +176,10 @@ where
Err(BlockError::MaxHeight(height, hash, block::Height::MAX))?;
}
if !network.disable_pow() {
// > The block data MUST be validated and checked against the server's usual
// > acceptance rules (excluding the check for a valid proof-of-work).
// <https://en.bitcoin.it/wiki/BIP_0023#Block_Proposal>
if request.is_proposal() {
if request.is_proposal() || network.disable_pow() {
check::difficulty_threshold_is_valid(&block.header, &network, &height, &hash)?;
} else {
// Do the difficulty checks first, to raise the threshold for
@ -188,7 +187,6 @@ where
check::difficulty_is_valid(&block.header, &network, &height, &hash)?;
check::equihash_solution_is_valid(&block.header)?;
}
}
// Next, check the Merkle root validity, to ensure that
// the header binds to the transactions in the blocks.

View File

@ -595,7 +595,14 @@ where
.ok_or(VerifyCheckpointError::CoinbaseHeight { hash })?;
self.check_height(height)?;
if !self.network.disable_pow() {
if self.network.disable_pow() {
crate::block::check::difficulty_threshold_is_valid(
&block.header,
&self.network,
&height,
&hash,
)?;
} else {
crate::block::check::difficulty_is_valid(&block.header, &self.network, &height, &hash)?;
crate::block::check::equihash_solution_is_valid(&block.header)?;
}

View File

@ -45,6 +45,9 @@ pub mod error;
pub mod router;
pub mod transaction;
#[cfg(any(test, feature = "proptest-impl"))]
pub use block::check::difficulty_is_valid;
pub use block::{
subsidy::{
funding_streams::{

View File

@ -7,7 +7,7 @@ use tower::ServiceExt;
use halo2::pasta::{group::ff::PrimeField, pallas};
use orchard::{
builder::Builder,
builder::{Builder, BundleType},
bundle::Flags,
circuit::ProvingKey,
keys::{FullViewingKey, Scope, SpendingKey},
@ -32,8 +32,6 @@ fn generate_test_vectors() {
let sk = SpendingKey::from_bytes([7; 32]).unwrap();
let recipient = FullViewingKey::from(&sk).address_at(0u32, Scope::External);
let enable_spends = true;
let enable_outputs = true;
let flags =
zebra_chain::orchard::Flags::ENABLE_SPENDS | zebra_chain::orchard::Flags::ENABLE_OUTPUTS;
@ -43,17 +41,20 @@ fn generate_test_vectors() {
let shielded_data: Vec<zebra_chain::orchard::ShieldedData> = (1..=4)
.map(|num_recipients| {
let mut builder = Builder::new(
Flags::from_parts(enable_spends, enable_outputs),
BundleType::Transactional {
flags: Flags::from_byte(flags.bits()).unwrap(),
bundle_required: true,
},
Anchor::from_bytes(anchor_bytes).unwrap(),
);
for _ in 0..num_recipients {
builder
.add_recipient(None, recipient, NoteValue::from_raw(note_value), None)
.add_output(None, recipient, NoteValue::from_raw(note_value), None)
.unwrap();
}
let bundle: Bundle<_, i64> = builder.build(rng).unwrap();
let bundle: Bundle<_, i64> = builder.build(rng).unwrap().unwrap().0;
let bundle = bundle
.create_proof(&proving_key, rng)

View File

@ -29,6 +29,7 @@ use crate::{
pub const ZECPAGES_SAPLING_VIEWING_KEY: &str = "zxviews1q0duytgcqqqqpqre26wkl45gvwwwd706xw608hucmvfalr759ejwf7qshjf5r9aa7323zulvz6plhttp5mltqcgs9t039cx2d09mgq05ts63n8u35hyv6h9nc9ctqqtue2u7cer2mqegunuulq2luhq3ywjcz35yyljewa4mgkgjzyfwh6fr6jd0dzd44ghk0nxdv2hnv4j5nxfwv24rwdmgllhe0p8568sgqt9ckt02v2kxf5ahtql6s0ltjpkckw8gtymxtxuu9gcr0swvz";
#[tokio::test(flavor = "multi_thread")]
#[cfg(not(target_os = "windows"))]
async fn test_grpc_response_data() {
let _init_guard = zebra_test::init();

View File

@ -26,6 +26,7 @@ pub const ZECPAGES_SAPLING_VIEWING_KEY: &str = "zxviews1q0duytgcqqqqpqre26wkl45g
/// Test the gRPC methods with mocked responses
#[tokio::test(flavor = "multi_thread")]
#[cfg(not(target_os = "windows"))]
async fn test_grpc_methods_mocked() {
let _init_guard = zebra_test::init();

View File

@ -16,7 +16,7 @@ use tracing::Span;
use zebra_chain::parameters::{
testnet::{self, ConfiguredActivationHeights},
Network, NetworkKind,
Magic, Network, NetworkKind,
};
use crate::{
@ -636,6 +636,7 @@ impl<'de> Deserialize<'de> for Config {
{
#[derive(Deserialize)]
struct DTestnetParameters {
network_magic: Option<[u8; 4]>,
network_name: Option<String>,
activation_heights: Option<ConfiguredActivationHeights>,
}
@ -718,26 +719,34 @@ impl<'de> Deserialize<'de> for Config {
NetworkKind::Testnet,
Some(DTestnetParameters {
network_name,
network_magic,
activation_heights,
}),
) => {
let mut params_builder = testnet::Parameters::build();
let should_avoid_default_peers =
network_magic.is_some() || activation_heights.is_some();
// Return an error if the initial testnet peers includes any of the default initial Mainnet or Testnet
// peers while activation heights or a custom network magic is configured.
if should_avoid_default_peers
&& contains_default_initial_peers(&initial_testnet_peers)
{
return Err(de::Error::custom(
"cannot use default initials peers with incompatible testnet",
));
}
if let Some(network_name) = network_name {
params_builder = params_builder.with_network_name(network_name)
}
// Retain default Testnet activation heights unless there's an empty [testnet_parameters.activation_heights] section.
if let Some(activation_heights) = activation_heights {
// Return an error if the initial testnet peers includes any of the default initial Mainnet or Testnet
// peers while activation heights are configured.
// TODO: Check that the network magic is different from the default Mainnet/Testnet network magic too?
if contains_default_initial_peers(&initial_testnet_peers) {
return Err(de::Error::custom(
"cannot use default initial testnet peers with configured activation heights",
));
if let Some(network_magic) = network_magic {
params_builder = params_builder.with_network_magic(Magic(network_magic));
}
// Retain default Testnet activation heights unless there's an empty [testnet_parameters.activation_heights] section.
if let Some(activation_heights) = activation_heights {
params_builder = params_builder.with_activation_heights(activation_heights)
}

View File

@ -422,15 +422,6 @@ lazy_static! {
/// [6.1.3.3 Efficient Resource Usage] <https://tools.ietf.org/rfcmarkup?doc=1123#page-77>
pub const DNS_LOOKUP_TIMEOUT: Duration = Duration::from_secs(5);
/// Magic numbers used to identify different Zcash networks.
pub mod magics {
use super::*;
/// The production mainnet.
pub const MAINNET: Magic = Magic([0x24, 0xe9, 0x27, 0x64]);
/// The testnet.
pub const TESTNET: Magic = Magic([0xfa, 0x1a, 0xf9, 0xbf]);
}
#[cfg(test)]
mod tests {
use zebra_chain::parameters::POST_BLOSSOM_POW_TARGET_SPACING;

View File

@ -145,6 +145,7 @@ async fn local_listener_unspecified_port_localhost_addr_v6() {
/// Test that zebra-network propagates fixed localhost listener ports to the `AddressBook`.
#[tokio::test]
#[cfg(not(target_os = "windows"))]
async fn local_listener_fixed_port_localhost_addr_v4() {
let _init_guard = zebra_test::init();
@ -161,6 +162,7 @@ async fn local_listener_fixed_port_localhost_addr_v4() {
/// Test that zebra-network propagates fixed localhost listener ports to the `AddressBook`.
#[tokio::test]
#[cfg(not(target_os = "windows"))]
async fn local_listener_fixed_port_localhost_addr_v6() {
let _init_guard = zebra_test::init();

View File

@ -13,7 +13,7 @@ use tokio_util::codec::{Decoder, Encoder};
use zebra_chain::{
block::{self, Block},
parameters::Network,
parameters::{Magic, Network},
serialization::{
sha256d, zcash_deserialize_bytes_external_count, zcash_deserialize_string_external_count,
CompactSizeMessage, FakeWriter, ReadZcashExt, SerializationError as Error,
@ -163,7 +163,7 @@ impl Encoder<Message> for Codec {
let start_len = dst.len();
{
let dst = &mut dst.writer();
dst.write_all(&self.builder.network.magic_value().0[..])?;
dst.write_all(&self.builder.network.magic().0[..])?;
dst.write_all(command)?;
dst.write_u32::<LittleEndian>(body_length as u32)?;
@ -389,7 +389,7 @@ impl Decoder for Codec {
"read header from src buffer"
);
if magic != self.builder.network.magic_value() {
if magic != self.builder.network.magic() {
return Err(Parse("supplied magic did not meet expectations"));
}
if body_len > self.builder.max_len {

View File

@ -587,3 +587,23 @@ fn reject_command_and_reason_size_limits() {
};
}
}
/// Check that the version test vector deserialization fails when there's a network magic mismatch.
#[test]
fn message_with_wrong_network_magic_returns_error() {
let _init_guard = zebra_test::init();
let mut codec = Codec::builder().finish();
let mut bytes = BytesMut::new();
codec
.encode(VERSION_TEST_VECTOR.clone(), &mut bytes)
.expect("encoding should succeed");
let mut codec = Codec::builder()
.for_network(&Network::new_default_testnet())
.finish();
codec
.decode(&mut bytes)
.expect_err("decoding message with mismatching network magic should return an error");
}

View File

@ -8,35 +8,11 @@ use zebra_chain::{
},
};
use crate::constants::{self, magics, CURRENT_NETWORK_PROTOCOL_VERSION};
use crate::constants::{self, CURRENT_NETWORK_PROTOCOL_VERSION};
#[cfg(any(test, feature = "proptest-impl"))]
use proptest_derive::Arbitrary;
/// A magic number identifying the network.
#[derive(Copy, Clone, Eq, PartialEq)]
#[cfg_attr(any(test, feature = "proptest-impl"), derive(Arbitrary))]
pub struct Magic(pub [u8; 4]);
impl fmt::Debug for Magic {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Magic").field(&hex::encode(self.0)).finish()
}
}
pub(crate) trait ParameterMagic {
fn magic_value(&self) -> Magic;
}
impl ParameterMagic for Network {
/// Get the magic value associated to this `Network`.
fn magic_value(&self) -> Magic {
match self {
Network::Mainnet => magics::MAINNET,
// TODO: Move `Magic` struct definition to `zebra-chain`, add it as a field in `testnet::Parameters`, and return it here.
Network::Testnet(_params) => magics::TESTNET,
}
}
}
/// A protocol version number.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
pub struct Version(pub u32);
@ -168,34 +144,6 @@ impl Default for Tweak {
#[cfg_attr(any(test, feature = "proptest-impl"), derive(Arbitrary))]
pub struct Filter(pub Vec<u8>);
#[cfg(test)]
mod proptest {
use proptest::prelude::*;
use super::Magic;
use crate::constants::magics;
#[test]
fn magic_debug() {
let _init_guard = zebra_test::init();
assert_eq!(format!("{:?}", magics::MAINNET), "Magic(\"24e92764\")");
assert_eq!(format!("{:?}", magics::TESTNET), "Magic(\"fa1af9bf\")");
}
proptest! {
#[test]
fn proptest_magic_from_array(data in any::<[u8; 4]>()) {
let _init_guard = zebra_test::init();
assert_eq!(format!("{:?}", Magic(data)), format!("Magic({:x?})", hex::encode(data)));
}
}
}
#[cfg(test)]
mod test {
use super::*;

View File

@ -1037,8 +1037,7 @@ where
// For consistency, this lookup must be performed first, then all the other lookups must
// be based on the hash.
//
// TODO: If this RPC is called a lot, just get the block header, rather than the whole
// block.
// TODO: If this RPC is called a lot, just get the block header, rather than the whole block.
let block = match state
.ready()
.and_then(|service| service.call(zebra_state::ReadRequest::Block(hash_or_height)))
@ -1062,7 +1061,7 @@ where
let height = hash_or_height
.height_or_else(|_| block.coinbase_height())
.expect("block height");
.expect("verified blocks have a coinbase height");
let time = u32::try_from(block.header.time.timestamp())
.expect("Timestamps of valid blocks always fit into u32.");

View File

@ -30,6 +30,7 @@ fn rpc_server_spawn_single_thread() {
/// Test that the JSON-RPC server spawns when configured with multiple threads.
#[test]
#[cfg(not(target_os = "windows"))]
fn rpc_server_spawn_parallel_threads() {
rpc_server_spawn(true)
}

View File

@ -33,7 +33,7 @@ pub const ADDRESS_HEIGHTS_FULL_RANGE: RangeInclusive<Height> = Height(1)..=Heigh
/// A convenience wrapper that efficiently stores unspent transparent outputs,
/// and the corresponding transaction IDs.
#[derive(Clone, Debug, Default, Eq, PartialEq, Hash)]
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct AddressUtxos {
/// A set of unspent transparent outputs.
utxos: BTreeMap<OutputLocation, transparent::Output>,

View File

@ -1395,7 +1395,7 @@ fn full_sync_testnet() -> Result<()> {
)
}
#[cfg(feature = "prometheus")]
#[cfg(all(feature = "prometheus", not(target_os = "windows")))]
#[tokio::test]
async fn metrics_endpoint() -> Result<()> {
use hyper::Client;
@ -1451,7 +1451,7 @@ async fn metrics_endpoint() -> Result<()> {
Ok(())
}
#[cfg(feature = "filter-reload")]
#[cfg(all(feature = "filter-reload", not(target_os = "windows")))]
#[tokio::test]
async fn tracing_endpoint() -> Result<()> {
use hyper::{Body, Client, Request};
@ -1549,6 +1549,7 @@ async fn tracing_endpoint() -> Result<()> {
/// Test that the JSON-RPC endpoint responds to a request,
/// when configured with a single thread.
#[tokio::test]
#[cfg(not(target_os = "windows"))]
async fn rpc_endpoint_single_thread() -> Result<()> {
rpc_endpoint(false).await
}
@ -1556,6 +1557,7 @@ async fn rpc_endpoint_single_thread() -> Result<()> {
/// Test that the JSON-RPC endpoint responds to a request,
/// when configured with multiple threads.
#[tokio::test]
#[cfg(not(target_os = "windows"))]
async fn rpc_endpoint_parallel_threads() -> Result<()> {
rpc_endpoint(true).await
}
@ -1623,6 +1625,7 @@ async fn rpc_endpoint(parallel_cpu_threads: bool) -> Result<()> {
///
/// https://zcash.github.io/rpc/getblockchaininfo.html
#[tokio::test]
#[cfg(not(target_os = "windows"))]
async fn rpc_endpoint_client_content_type() -> Result<()> {
let _init_guard = zebra_test::init();
if zebra_test::net::zebra_skip_network_tests() {
@ -2148,6 +2151,7 @@ fn lightwalletd_integration_test(test_type: TestType) -> Result<()> {
/// It is expected that the first node spawned will get exclusive use of the port.
/// The second node will panic with the Zcash listener conflict hint added in #1535.
#[test]
#[cfg(not(target_os = "windows"))]
fn zebra_zcash_listener_conflict() -> Result<()> {
let _init_guard = zebra_test::init();
@ -2176,7 +2180,7 @@ fn zebra_zcash_listener_conflict() -> Result<()> {
/// exclusive use of the port. The second node will panic with the Zcash metrics
/// conflict hint added in #1535.
#[test]
#[cfg(feature = "prometheus")]
#[cfg(all(feature = "prometheus", not(target_os = "windows")))]
fn zebra_metrics_conflict() -> Result<()> {
let _init_guard = zebra_test::init();
@ -2205,7 +2209,7 @@ fn zebra_metrics_conflict() -> Result<()> {
/// exclusive use of the port. The second node will panic with the Zcash tracing
/// conflict hint added in #1535.
#[test]
#[cfg(feature = "filter-reload")]
#[cfg(all(feature = "filter-reload", not(target_os = "windows")))]
fn zebra_tracing_conflict() -> Result<()> {
let _init_guard = zebra_test::init();
@ -2944,7 +2948,7 @@ fn scan_task_starts() -> Result<()> {
/// Test that the scanner gRPC server starts when the node starts.
#[tokio::test]
#[cfg(feature = "shielded-scan")]
#[cfg(all(feature = "shielded-scan", not(target_os = "windows")))]
async fn scan_rpc_server_starts() -> Result<()> {
use zebra_grpc::scanner::{scanner_client::ScannerClient, Empty};

View File

@ -0,0 +1,91 @@
# Default configuration for zebrad.
#
# This file can be used as a skeleton for custom configs.
#
# Unspecified fields use default values. Optional fields are Some(field) if the
# field is present and None if it is absent.
#
# This file is generated as an example using zebrad's current defaults.
# You should set only the config options you want to keep, and delete the rest.
# Only a subset of fields are present in the skeleton, since optional values
# whose default is None are omitted.
#
# The config format (including a complete list of sections and fields) is
# documented here:
# https://docs.rs/zebrad/latest/zebrad/config/struct.ZebradConfig.html
#
# zebrad attempts to load configs in the following order:
#
# 1. The -c flag on the command line, e.g., `zebrad -c myconfig.toml start`;
# 2. The file `zebrad.toml` in the users's preference directory (platform-dependent);
# 3. The default config.
#
# The user's preference directory and the default path to the `zebrad` config are platform dependent,
# based on `dirs::preference_dir`, see https://docs.rs/dirs/latest/dirs/fn.preference_dir.html :
#
# | Platform | Value | Example |
# | -------- | ------------------------------------- | ---------------------------------------------- |
# | Linux | `$XDG_CONFIG_HOME` or `$HOME/.config` | `/home/alice/.config/zebrad.toml` |
# | macOS | `$HOME/Library/Preferences` | `/Users/Alice/Library/Preferences/zebrad.toml` |
# | Windows | `{FOLDERID_RoamingAppData}` | `C:\Users\Alice\AppData\Local\zebrad.toml` |
[consensus]
checkpoint_sync = true
[mempool]
eviction_memory_time = "1h"
tx_cost_limit = 80000000
[metrics]
[mining]
debug_like_zcashd = true
[network]
cache_dir = true
crawl_new_peer_interval = "1m 1s"
initial_mainnet_peers = [
"dnsseed.z.cash:8233",
"dnsseed.str4d.xyz:8233",
"mainnet.seeder.zfnd.org:8233",
"mainnet.is.yolo.money:8233",
]
initial_testnet_peers = []
listen_addr = "0.0.0.0:8233"
max_connections_per_ip = 1
network = "Testnet"
peerset_initial_target_size = 25
[network.testnet_parameters]
network_name = "ConfiguredTestnet_1"
network_magic = [0, 0, 0, 0]
[network.testnet_parameters.activation_heights]
BeforeOverwinter = 1
Overwinter = 207_500
Sapling = 280_000
Blossom = 584_000
Heartwood = 903_800
Canopy = 1_028_500
NU5 = 1_842_420
[rpc]
debug_force_finished_sync = false
parallel_cpu_threads = 0
[state]
cache_dir = "cache_dir"
delete_old_database = true
ephemeral = false
[sync]
checkpoint_verify_concurrency_limit = 1000
download_concurrency_limit = 50
full_verify_concurrency_limit = 20
parallel_cpu_threads = 0
[tracing]
buffer_limit = 128000
force_use_color = false
use_color = true
use_journald = false

View File

@ -3,13 +3,14 @@
//! This test will get block templates via the `getblocktemplate` RPC method and submit them as new blocks
//! via the `submitblock` RPC method on Regtest.
use std::{net::SocketAddr, time::Duration};
use std::{net::SocketAddr, sync::Arc, time::Duration};
use color_eyre::eyre::{Context, Result};
use tracing::*;
use zebra_chain::{
parameters::{testnet::REGTEST_NU5_ACTIVATION_HEIGHT, Network, NetworkUpgrade},
primitives::byte_array::increment_big_endian,
serialization::ZcashSerialize,
};
use zebra_node_services::rpc_client::RpcRequestClient;
@ -44,7 +45,7 @@ pub(crate) async fn submit_blocks_test() -> Result<()> {
tokio::time::sleep(Duration::from_secs(30)).await;
info!("attempting to submit blocks");
submit_blocks(rpc_address).await?;
submit_blocks(network, rpc_address).await?;
zebrad.kill(false)?;
@ -58,7 +59,7 @@ pub(crate) async fn submit_blocks_test() -> Result<()> {
}
/// Get block templates and submit blocks
async fn submit_blocks(rpc_address: SocketAddr) -> Result<()> {
async fn submit_blocks(network: Network, rpc_address: SocketAddr) -> Result<()> {
let client = RpcRequestClient::new(rpc_address);
for height in 1..=NUM_BLOCKS_TO_SUBMIT {
@ -73,10 +74,20 @@ async fn submit_blocks(rpc_address: SocketAddr) -> Result<()> {
NetworkUpgrade::Nu5
};
let block_data = hex::encode(
proposal_block_from_template(&block_template, TimeSource::default(), network_upgrade)?
.zcash_serialize_to_vec()?,
);
let mut block =
proposal_block_from_template(&block_template, TimeSource::default(), network_upgrade)?;
let height = block
.coinbase_height()
.expect("should have a coinbase height");
while !network.disable_pow()
&& zebra_consensus::difficulty_is_valid(&block.header, &network, &height, &block.hash())
.is_err()
{
increment_big_endian(Arc::make_mut(&mut block.header).nonce.as_mut());
}
let block_data = hex::encode(block.zcash_serialize_to_vec()?);
let submit_block_response = client
.text_from_call("submitblock", format!(r#"["{block_data}"]"#))
@ -84,7 +95,7 @@ async fn submit_blocks(rpc_address: SocketAddr) -> Result<()> {
let was_submission_successful = submit_block_response.contains(r#""result":null"#);
if height % 40 == 0 {
if height.0 % 40 == 0 {
info!(
was_submission_successful,
?block_template,