mirror of https://github.com/zcash/halo2.git
Merge pull request #185 from kilic/shuffle
Implement native shuffle argument and api
This commit is contained in:
commit
6b43b6bad3
|
@ -0,0 +1,216 @@
|
|||
use std::{marker::PhantomData, vec};
|
||||
|
||||
use ff::FromUniformBytes;
|
||||
use halo2_proofs::{
|
||||
arithmetic::Field,
|
||||
circuit::{Layouter, SimpleFloorPlanner, Value},
|
||||
plonk::{
|
||||
create_proof, keygen_pk, keygen_vk, verify_proof, Advice, Circuit, Column,
|
||||
ConstraintSystem, Error, Fixed, Selector,
|
||||
},
|
||||
poly::Rotation,
|
||||
poly::{
|
||||
commitment::ParamsProver,
|
||||
ipa::{
|
||||
commitment::{IPACommitmentScheme, ParamsIPA},
|
||||
multiopen::{ProverIPA, VerifierIPA},
|
||||
strategy::AccumulatorStrategy,
|
||||
},
|
||||
VerificationStrategy,
|
||||
},
|
||||
transcript::{
|
||||
Blake2bRead, Blake2bWrite, Challenge255, TranscriptReadBuffer, TranscriptWriterBuffer,
|
||||
},
|
||||
};
|
||||
use halo2curves::{pasta::EqAffine, CurveAffine};
|
||||
use rand_core::OsRng;
|
||||
|
||||
struct ShuffleChip<F: Field> {
|
||||
config: ShuffleConfig,
|
||||
_marker: PhantomData<F>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct ShuffleConfig {
|
||||
input_0: Column<Advice>,
|
||||
input_1: Column<Fixed>,
|
||||
shuffle_0: Column<Advice>,
|
||||
shuffle_1: Column<Advice>,
|
||||
s_input: Selector,
|
||||
s_shuffle: Selector,
|
||||
}
|
||||
|
||||
impl<F: Field> ShuffleChip<F> {
|
||||
fn construct(config: ShuffleConfig) -> Self {
|
||||
Self {
|
||||
config,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
fn configure(
|
||||
meta: &mut ConstraintSystem<F>,
|
||||
input_0: Column<Advice>,
|
||||
input_1: Column<Fixed>,
|
||||
shuffle_0: Column<Advice>,
|
||||
shuffle_1: Column<Advice>,
|
||||
) -> ShuffleConfig {
|
||||
let s_shuffle = meta.complex_selector();
|
||||
let s_input = meta.complex_selector();
|
||||
meta.shuffle("shuffle", |meta| {
|
||||
let s_input = meta.query_selector(s_input);
|
||||
let s_shuffle = meta.query_selector(s_shuffle);
|
||||
let input_0 = meta.query_advice(input_0, Rotation::cur());
|
||||
let input_1 = meta.query_fixed(input_1, Rotation::cur());
|
||||
let shuffle_0 = meta.query_advice(shuffle_0, Rotation::cur());
|
||||
let shuffle_1 = meta.query_advice(shuffle_1, Rotation::cur());
|
||||
vec![
|
||||
(s_input.clone() * input_0, s_shuffle.clone() * shuffle_0),
|
||||
(s_input * input_1, s_shuffle * shuffle_1),
|
||||
]
|
||||
});
|
||||
ShuffleConfig {
|
||||
input_0,
|
||||
input_1,
|
||||
shuffle_0,
|
||||
shuffle_1,
|
||||
s_input,
|
||||
s_shuffle,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct MyCircuit<F: Field> {
|
||||
input_0: Vec<Value<F>>,
|
||||
input_1: Vec<F>,
|
||||
shuffle_0: Vec<Value<F>>,
|
||||
shuffle_1: Vec<Value<F>>,
|
||||
}
|
||||
|
||||
impl<F: Field> Circuit<F> for MyCircuit<F> {
|
||||
// Since we are using a single chip for everything, we can just reuse its config.
|
||||
type Config = ShuffleConfig;
|
||||
type FloorPlanner = SimpleFloorPlanner;
|
||||
#[cfg(feature = "circuit-params")]
|
||||
type Params = ();
|
||||
|
||||
fn without_witnesses(&self) -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
fn configure(meta: &mut ConstraintSystem<F>) -> Self::Config {
|
||||
let input_0 = meta.advice_column();
|
||||
let input_1 = meta.fixed_column();
|
||||
let shuffle_0 = meta.advice_column();
|
||||
let shuffle_1 = meta.advice_column();
|
||||
ShuffleChip::configure(meta, input_0, input_1, shuffle_0, shuffle_1)
|
||||
}
|
||||
|
||||
fn synthesize(
|
||||
&self,
|
||||
config: Self::Config,
|
||||
mut layouter: impl Layouter<F>,
|
||||
) -> Result<(), Error> {
|
||||
let ch = ShuffleChip::<F>::construct(config);
|
||||
layouter.assign_region(
|
||||
|| "load inputs",
|
||||
|mut region| {
|
||||
for (i, (input_0, input_1)) in
|
||||
self.input_0.iter().zip(self.input_1.iter()).enumerate()
|
||||
{
|
||||
region.assign_advice(|| "input_0", ch.config.input_0, i, || *input_0)?;
|
||||
region.assign_fixed(
|
||||
|| "input_1",
|
||||
ch.config.input_1,
|
||||
i,
|
||||
|| Value::known(*input_1),
|
||||
)?;
|
||||
ch.config.s_input.enable(&mut region, i)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
layouter.assign_region(
|
||||
|| "load shuffles",
|
||||
|mut region| {
|
||||
for (i, (shuffle_0, shuffle_1)) in
|
||||
self.shuffle_0.iter().zip(self.shuffle_1.iter()).enumerate()
|
||||
{
|
||||
region.assign_advice(|| "shuffle_0", ch.config.shuffle_0, i, || *shuffle_0)?;
|
||||
region.assign_advice(|| "shuffle_1", ch.config.shuffle_1, i, || *shuffle_1)?;
|
||||
ch.config.s_shuffle.enable(&mut region, i)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn test_prover<C: CurveAffine>(k: u32, circuit: MyCircuit<C::Scalar>, expected: bool)
|
||||
where
|
||||
C::Scalar: FromUniformBytes<64>,
|
||||
{
|
||||
let params = ParamsIPA::<C>::new(k);
|
||||
let vk = keygen_vk(¶ms, &circuit).unwrap();
|
||||
let pk = keygen_pk(¶ms, vk, &circuit).unwrap();
|
||||
|
||||
let proof = {
|
||||
let mut transcript = Blake2bWrite::<_, _, Challenge255<_>>::init(vec![]);
|
||||
|
||||
create_proof::<IPACommitmentScheme<C>, ProverIPA<C>, _, _, _, _>(
|
||||
¶ms,
|
||||
&pk,
|
||||
&[circuit],
|
||||
&[&[]],
|
||||
OsRng,
|
||||
&mut transcript,
|
||||
)
|
||||
.expect("proof generation should not fail");
|
||||
|
||||
transcript.finalize()
|
||||
};
|
||||
|
||||
let accepted = {
|
||||
let strategy = AccumulatorStrategy::new(¶ms);
|
||||
let mut transcript = Blake2bRead::<_, _, Challenge255<_>>::init(&proof[..]);
|
||||
|
||||
verify_proof::<IPACommitmentScheme<C>, VerifierIPA<C>, _, _, _>(
|
||||
¶ms,
|
||||
pk.get_vk(),
|
||||
strategy,
|
||||
&[&[]],
|
||||
&mut transcript,
|
||||
)
|
||||
.map(|strategy| strategy.finalize())
|
||||
.unwrap_or_default()
|
||||
};
|
||||
|
||||
assert_eq!(accepted, expected);
|
||||
}
|
||||
|
||||
fn main() {
|
||||
use halo2_proofs::dev::MockProver;
|
||||
use halo2curves::pasta::Fp;
|
||||
const K: u32 = 4;
|
||||
let input_0 = [1, 2, 4, 1]
|
||||
.map(|e: u64| Value::known(Fp::from(e)))
|
||||
.to_vec();
|
||||
let input_1 = [10, 20, 40, 10].map(Fp::from).to_vec();
|
||||
let shuffle_0 = [4, 1, 1, 2]
|
||||
.map(|e: u64| Value::known(Fp::from(e)))
|
||||
.to_vec();
|
||||
let shuffle_1 = [40, 10, 10, 20]
|
||||
.map(|e: u64| Value::known(Fp::from(e)))
|
||||
.to_vec();
|
||||
let circuit = MyCircuit {
|
||||
input_0,
|
||||
input_1,
|
||||
shuffle_0,
|
||||
shuffle_1,
|
||||
};
|
||||
let prover = MockProver::run(K, &circuit, vec![]).unwrap();
|
||||
prover.assert_satisfied();
|
||||
test_prover::<EqAffine>(K, circuit, true);
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
//! Tools for developing circuits.
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::fmt;
|
||||
|
@ -861,6 +862,42 @@ impl<F: FromUniformBytes<64> + Ord> MockProver<F> {
|
|||
})
|
||||
});
|
||||
|
||||
let load = |expression: &Expression<F>, row| {
|
||||
expression.evaluate_lazy(
|
||||
&|scalar| Value::Real(scalar),
|
||||
&|_| panic!("virtual selectors are removed during optimization"),
|
||||
&|query| {
|
||||
let query = self.cs.fixed_queries[query.index.unwrap()];
|
||||
let column_index = query.0.index();
|
||||
let rotation = query.1 .0;
|
||||
self.fixed[column_index][(row as i32 + n + rotation) as usize % n as usize]
|
||||
.into()
|
||||
},
|
||||
&|query| {
|
||||
let query = self.cs.advice_queries[query.index.unwrap()];
|
||||
let column_index = query.0.index();
|
||||
let rotation = query.1 .0;
|
||||
self.advice[column_index][(row as i32 + n + rotation) as usize % n as usize]
|
||||
.into()
|
||||
},
|
||||
&|query| {
|
||||
let query = self.cs.instance_queries[query.index.unwrap()];
|
||||
let column_index = query.0.index();
|
||||
let rotation = query.1 .0;
|
||||
Value::Real(
|
||||
self.instance[column_index]
|
||||
[(row as i32 + n + rotation) as usize % n as usize],
|
||||
)
|
||||
},
|
||||
&|challenge| Value::Real(self.challenges[challenge.index()]),
|
||||
&|a| -a,
|
||||
&|a, b| a + b,
|
||||
&|a, b| a * b,
|
||||
&|a, scalar| a * scalar,
|
||||
&Value::Real(F::ZERO),
|
||||
)
|
||||
};
|
||||
|
||||
let mut cached_table = Vec::new();
|
||||
let mut cached_table_identifier = Vec::new();
|
||||
// Check that all lookups exist in their respective tables.
|
||||
|
@ -870,44 +907,6 @@ impl<F: FromUniformBytes<64> + Ord> MockProver<F> {
|
|||
.iter()
|
||||
.enumerate()
|
||||
.flat_map(|(lookup_index, lookup)| {
|
||||
let load = |expression: &Expression<F>, row| {
|
||||
expression.evaluate_lazy(
|
||||
&|scalar| Value::Real(scalar),
|
||||
&|_| panic!("virtual selectors are removed during optimization"),
|
||||
&|query| {
|
||||
let query = self.cs.fixed_queries[query.index.unwrap()];
|
||||
let column_index = query.0.index();
|
||||
let rotation = query.1 .0;
|
||||
self.fixed[column_index]
|
||||
[(row as i32 + n + rotation) as usize % n as usize]
|
||||
.into()
|
||||
},
|
||||
&|query| {
|
||||
let query = self.cs.advice_queries[query.index.unwrap()];
|
||||
let column_index = query.0.index();
|
||||
let rotation = query.1 .0;
|
||||
self.advice[column_index]
|
||||
[(row as i32 + n + rotation) as usize % n as usize]
|
||||
.into()
|
||||
},
|
||||
&|query| {
|
||||
let query = self.cs.instance_queries[query.index.unwrap()];
|
||||
let column_index = query.0.index();
|
||||
let rotation = query.1 .0;
|
||||
Value::Real(
|
||||
self.instance[column_index]
|
||||
[(row as i32 + n + rotation) as usize % n as usize],
|
||||
)
|
||||
},
|
||||
&|challenge| Value::Real(self.challenges[challenge.index()]),
|
||||
&|a| -a,
|
||||
&|a, b| a + b,
|
||||
&|a, b| a * b,
|
||||
&|a, scalar| a * scalar,
|
||||
&Value::Real(F::ZERO),
|
||||
)
|
||||
};
|
||||
|
||||
assert!(lookup.table_expressions.len() == lookup.input_expressions.len());
|
||||
assert!(self.usable_rows.end > 0);
|
||||
|
||||
|
@ -1000,6 +999,68 @@ impl<F: FromUniformBytes<64> + Ord> MockProver<F> {
|
|||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
let shuffle_errors =
|
||||
self.cs
|
||||
.shuffles
|
||||
.iter()
|
||||
.enumerate()
|
||||
.flat_map(|(shuffle_index, shuffle)| {
|
||||
assert!(shuffle.shuffle_expressions.len() == shuffle.input_expressions.len());
|
||||
assert!(self.usable_rows.end > 0);
|
||||
|
||||
let mut shuffle_rows: Vec<Vec<Value<F>>> = self
|
||||
.usable_rows
|
||||
.clone()
|
||||
.map(|row| {
|
||||
let t = shuffle
|
||||
.shuffle_expressions
|
||||
.iter()
|
||||
.map(move |c| load(c, row))
|
||||
.collect();
|
||||
t
|
||||
})
|
||||
.collect();
|
||||
shuffle_rows.sort();
|
||||
|
||||
let mut input_rows: Vec<(Vec<Value<F>>, usize)> = self
|
||||
.usable_rows
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|input_row| {
|
||||
let t = shuffle
|
||||
.input_expressions
|
||||
.iter()
|
||||
.map(move |c| load(c, input_row))
|
||||
.collect();
|
||||
|
||||
(t, input_row)
|
||||
})
|
||||
.collect();
|
||||
input_rows.sort();
|
||||
|
||||
input_rows
|
||||
.iter()
|
||||
.zip(shuffle_rows.iter())
|
||||
.filter_map(|((input_value, row), shuffle_value)| {
|
||||
if shuffle_value != input_value {
|
||||
Some(VerifyFailure::Shuffle {
|
||||
name: shuffle.name.clone(),
|
||||
shuffle_index,
|
||||
location: FailureLocation::find_expressions(
|
||||
&self.cs,
|
||||
&self.regions,
|
||||
*row,
|
||||
shuffle.input_expressions.iter(),
|
||||
),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
let mapping = self.permutation.mapping();
|
||||
// Check that permutations preserve the original values of the cells.
|
||||
let perm_errors = {
|
||||
|
@ -1050,6 +1111,7 @@ impl<F: FromUniformBytes<64> + Ord> MockProver<F> {
|
|||
.chain(gate_errors)
|
||||
.chain(lookup_errors)
|
||||
.chain(perm_errors)
|
||||
.chain(shuffle_errors)
|
||||
.collect();
|
||||
if errors.is_empty() {
|
||||
Ok(())
|
||||
|
@ -1235,6 +1297,35 @@ impl<F: FromUniformBytes<64> + Ord> MockProver<F> {
|
|||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
let load = |expression: &Expression<F>, row| {
|
||||
expression.evaluate_lazy(
|
||||
&|scalar| Value::Real(scalar),
|
||||
&|_| panic!("virtual selectors are removed during optimization"),
|
||||
&|query| {
|
||||
self.fixed[query.column_index]
|
||||
[(row as i32 + n + query.rotation.0) as usize % n as usize]
|
||||
.into()
|
||||
},
|
||||
&|query| {
|
||||
self.advice[query.column_index]
|
||||
[(row as i32 + n + query.rotation.0) as usize % n as usize]
|
||||
.into()
|
||||
},
|
||||
&|query| {
|
||||
Value::Real(
|
||||
self.instance[query.column_index]
|
||||
[(row as i32 + n + query.rotation.0) as usize % n as usize],
|
||||
)
|
||||
},
|
||||
&|challenge| Value::Real(self.challenges[challenge.index()]),
|
||||
&|a| -a,
|
||||
&|a, b| a + b,
|
||||
&|a, b| a * b,
|
||||
&|a, scalar| a * scalar,
|
||||
&Value::Real(F::ZERO),
|
||||
)
|
||||
};
|
||||
|
||||
let mut cached_table = Vec::new();
|
||||
let mut cached_table_identifier = Vec::new();
|
||||
// Check that all lookups exist in their respective tables.
|
||||
|
@ -1244,35 +1335,6 @@ impl<F: FromUniformBytes<64> + Ord> MockProver<F> {
|
|||
.iter()
|
||||
.enumerate()
|
||||
.flat_map(|(lookup_index, lookup)| {
|
||||
let load = |expression: &Expression<F>, row| {
|
||||
expression.evaluate_lazy(
|
||||
&|scalar| Value::Real(scalar),
|
||||
&|_| panic!("virtual selectors are removed during optimization"),
|
||||
&|query| {
|
||||
self.fixed[query.column_index]
|
||||
[(row as i32 + n + query.rotation.0) as usize % n as usize]
|
||||
.into()
|
||||
},
|
||||
&|query| {
|
||||
self.advice[query.column_index]
|
||||
[(row as i32 + n + query.rotation.0) as usize % n as usize]
|
||||
.into()
|
||||
},
|
||||
&|query| {
|
||||
Value::Real(
|
||||
self.instance[query.column_index]
|
||||
[(row as i32 + n + query.rotation.0) as usize % n as usize],
|
||||
)
|
||||
},
|
||||
&|challenge| Value::Real(self.challenges[challenge.index()]),
|
||||
&|a| -a,
|
||||
&|a, b| a + b,
|
||||
&|a, b| a * b,
|
||||
&|a, scalar| a * scalar,
|
||||
&Value::Real(F::ZERO),
|
||||
)
|
||||
};
|
||||
|
||||
assert!(lookup.table_expressions.len() == lookup.input_expressions.len());
|
||||
assert!(self.usable_rows.end > 0);
|
||||
|
||||
|
@ -1360,6 +1422,68 @@ impl<F: FromUniformBytes<64> + Ord> MockProver<F> {
|
|||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
let shuffle_errors =
|
||||
self.cs
|
||||
.shuffles
|
||||
.iter()
|
||||
.enumerate()
|
||||
.flat_map(|(shuffle_index, shuffle)| {
|
||||
assert!(shuffle.shuffle_expressions.len() == shuffle.input_expressions.len());
|
||||
assert!(self.usable_rows.end > 0);
|
||||
|
||||
let mut shuffle_rows: Vec<Vec<Value<F>>> = self
|
||||
.usable_rows
|
||||
.clone()
|
||||
.map(|row| {
|
||||
let t = shuffle
|
||||
.shuffle_expressions
|
||||
.iter()
|
||||
.map(move |c| load(c, row))
|
||||
.collect();
|
||||
t
|
||||
})
|
||||
.collect();
|
||||
shuffle_rows.sort();
|
||||
|
||||
let mut input_rows: Vec<(Vec<Value<F>>, usize)> = self
|
||||
.usable_rows
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|input_row| {
|
||||
let t = shuffle
|
||||
.input_expressions
|
||||
.iter()
|
||||
.map(move |c| load(c, input_row))
|
||||
.collect();
|
||||
|
||||
(t, input_row)
|
||||
})
|
||||
.collect();
|
||||
input_rows.sort();
|
||||
|
||||
input_rows
|
||||
.iter()
|
||||
.zip(shuffle_rows.iter())
|
||||
.filter_map(|((input_value, row), shuffle_value)| {
|
||||
if shuffle_value != input_value {
|
||||
Some(VerifyFailure::Shuffle {
|
||||
name: shuffle.name.clone(),
|
||||
shuffle_index,
|
||||
location: FailureLocation::find_expressions(
|
||||
&self.cs,
|
||||
&self.regions,
|
||||
*row,
|
||||
shuffle.input_expressions.iter(),
|
||||
),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
let mapping = self.permutation.mapping();
|
||||
// Check that permutations preserve the original values of the cells.
|
||||
let perm_errors = {
|
||||
|
@ -1410,6 +1534,7 @@ impl<F: FromUniformBytes<64> + Ord> MockProver<F> {
|
|||
.chain(gate_errors)
|
||||
.chain(lookup_errors)
|
||||
.chain(perm_errors)
|
||||
.chain(shuffle_errors)
|
||||
.collect();
|
||||
if errors.is_empty() {
|
||||
Ok(())
|
||||
|
|
|
@ -178,6 +178,28 @@ pub enum VerifyFailure {
|
|||
/// lookup is active on a row adjacent to an unrelated region.
|
||||
location: FailureLocation,
|
||||
},
|
||||
/// A shuffle input did not exist in its corresponding map.
|
||||
Shuffle {
|
||||
/// The name of the lookup that is not satisfied.
|
||||
name: String,
|
||||
/// The index of the lookup that is not satisfied. These indices are assigned in
|
||||
/// the order in which `ConstraintSystem::lookup` is called during
|
||||
/// `Circuit::configure`.
|
||||
shuffle_index: usize,
|
||||
/// The location at which the lookup is not satisfied.
|
||||
///
|
||||
/// `FailureLocation::InRegion` is most common, and may be due to the intentional
|
||||
/// use of a lookup (if its inputs are conditional on a complex selector), or an
|
||||
/// unintentional lookup constraint that overlaps the region (indicating that the
|
||||
/// lookup's inputs should be made conditional).
|
||||
///
|
||||
/// `FailureLocation::OutsideRegion` is uncommon, and could mean that:
|
||||
/// - The input expressions do not correctly constrain a default value that exists
|
||||
/// in the table when the lookup is not being used.
|
||||
/// - The input expressions use a column queried at a non-zero `Rotation`, and the
|
||||
/// lookup is active on a row adjacent to an unrelated region.
|
||||
location: FailureLocation,
|
||||
},
|
||||
/// A permutation did not preserve the original value of a cell.
|
||||
Permutation {
|
||||
/// The column in which this permutation is not satisfied.
|
||||
|
@ -241,6 +263,17 @@ impl fmt::Display for VerifyFailure {
|
|||
name, lookup_index, location
|
||||
)
|
||||
}
|
||||
Self::Shuffle {
|
||||
name,
|
||||
shuffle_index,
|
||||
location,
|
||||
} => {
|
||||
write!(
|
||||
f,
|
||||
"Shuffle {}(index: {}) is not satisfied {}",
|
||||
name, shuffle_index, location
|
||||
)
|
||||
}
|
||||
Self::Permutation { column, location } => {
|
||||
write!(
|
||||
f,
|
||||
|
@ -611,6 +644,171 @@ fn render_lookup<F: Field>(
|
|||
}
|
||||
}
|
||||
|
||||
fn render_shuffle<F: Field>(
|
||||
prover: &MockProver<F>,
|
||||
name: &str,
|
||||
shuffle_index: usize,
|
||||
location: &FailureLocation,
|
||||
) {
|
||||
let n = prover.n as i32;
|
||||
let cs = &prover.cs;
|
||||
let shuffle = &cs.shuffles[shuffle_index];
|
||||
|
||||
// Get the absolute row on which the shuffle's inputs are being queried, so we can
|
||||
// fetch the input values.
|
||||
let row = match location {
|
||||
FailureLocation::InRegion { region, offset } => {
|
||||
prover.regions[region.index].rows.unwrap().0 + offset
|
||||
}
|
||||
FailureLocation::OutsideRegion { row } => *row,
|
||||
} as i32;
|
||||
|
||||
let shuffle_columns = shuffle.shuffle_expressions.iter().map(|expr| {
|
||||
expr.evaluate(
|
||||
&|f| format! {"Const: {:#?}", f},
|
||||
&|s| format! {"S{}", s.0},
|
||||
&|query| {
|
||||
format!(
|
||||
"{:?}",
|
||||
prover
|
||||
.cs
|
||||
.general_column_annotations
|
||||
.get(&metadata::Column::from((Any::Fixed, query.column_index)))
|
||||
.cloned()
|
||||
.unwrap_or_else(|| format!("F{}", query.column_index()))
|
||||
)
|
||||
},
|
||||
&|query| {
|
||||
format!(
|
||||
"{:?}",
|
||||
prover
|
||||
.cs
|
||||
.general_column_annotations
|
||||
.get(&metadata::Column::from((Any::advice(), query.column_index)))
|
||||
.cloned()
|
||||
.unwrap_or_else(|| format!("A{}", query.column_index()))
|
||||
)
|
||||
},
|
||||
&|query| {
|
||||
format!(
|
||||
"{:?}",
|
||||
prover
|
||||
.cs
|
||||
.general_column_annotations
|
||||
.get(&metadata::Column::from((Any::Instance, query.column_index)))
|
||||
.cloned()
|
||||
.unwrap_or_else(|| format!("I{}", query.column_index()))
|
||||
)
|
||||
},
|
||||
&|challenge| format! {"C{}", challenge.index()},
|
||||
&|query| format! {"-{}", query},
|
||||
&|a, b| format! {"{} + {}", a,b},
|
||||
&|a, b| format! {"{} * {}", a,b},
|
||||
&|a, b| format! {"{} * {:?}", a, b},
|
||||
)
|
||||
});
|
||||
|
||||
fn cell_value<'a, F: Field, Q: Into<AnyQuery> + Copy>(
|
||||
load: impl Fn(Q) -> Value<F> + 'a,
|
||||
) -> impl Fn(Q) -> BTreeMap<metadata::VirtualCell, String> + 'a {
|
||||
move |query| {
|
||||
let AnyQuery {
|
||||
column_type,
|
||||
column_index,
|
||||
rotation,
|
||||
..
|
||||
} = query.into();
|
||||
Some((
|
||||
((column_type, column_index).into(), rotation.0).into(),
|
||||
match load(query) {
|
||||
Value::Real(v) => util::format_value(v),
|
||||
Value::Poison => unreachable!(),
|
||||
},
|
||||
))
|
||||
.into_iter()
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
eprintln!("error: input does not exist in shuffle");
|
||||
eprint!(" (");
|
||||
for i in 0..shuffle.input_expressions.len() {
|
||||
eprint!("{}L{}", if i == 0 { "" } else { ", " }, i);
|
||||
}
|
||||
eprint!(") <-> (");
|
||||
for (i, column) in shuffle_columns.enumerate() {
|
||||
eprint!("{}{}", if i == 0 { "" } else { ", " }, column);
|
||||
}
|
||||
eprintln!(")");
|
||||
|
||||
eprintln!();
|
||||
eprintln!(" Shuffle '{}' inputs:", name);
|
||||
for (i, input) in shuffle.input_expressions.iter().enumerate() {
|
||||
// Fetch the cell values (since we don't store them in VerifyFailure::Shuffle).
|
||||
let cell_values = input.evaluate(
|
||||
&|_| BTreeMap::default(),
|
||||
&|_| panic!("virtual selectors are removed during optimization"),
|
||||
&cell_value(&util::load(n, row, &cs.fixed_queries, &prover.fixed)),
|
||||
&cell_value(&util::load(n, row, &cs.advice_queries, &prover.advice)),
|
||||
&cell_value(&util::load_instance(
|
||||
n,
|
||||
row,
|
||||
&cs.instance_queries,
|
||||
&prover.instance,
|
||||
)),
|
||||
&|_| BTreeMap::default(),
|
||||
&|a| a,
|
||||
&|mut a, mut b| {
|
||||
a.append(&mut b);
|
||||
a
|
||||
},
|
||||
&|mut a, mut b| {
|
||||
a.append(&mut b);
|
||||
a
|
||||
},
|
||||
&|a, _| a,
|
||||
);
|
||||
|
||||
// Collect the necessary rendering information:
|
||||
// - The columns involved in this constraint.
|
||||
// - How many cells are in each column.
|
||||
// - The grid of cell values, indexed by rotation.
|
||||
let mut columns = BTreeMap::<metadata::Column, usize>::default();
|
||||
let mut layout = BTreeMap::<i32, BTreeMap<metadata::Column, _>>::default();
|
||||
for (i, (cell, _)) in cell_values.iter().enumerate() {
|
||||
*columns.entry(cell.column).or_default() += 1;
|
||||
layout
|
||||
.entry(cell.rotation)
|
||||
.or_default()
|
||||
.entry(cell.column)
|
||||
.or_insert(format!("x{}", i));
|
||||
}
|
||||
|
||||
if i != 0 {
|
||||
eprintln!();
|
||||
}
|
||||
eprintln!(
|
||||
" Sh{} = {}",
|
||||
i,
|
||||
emitter::expression_to_string(input, &layout)
|
||||
);
|
||||
eprintln!(" ^");
|
||||
|
||||
emitter::render_cell_layout(" | ", location, &columns, &layout, |_, rotation| {
|
||||
if rotation == 0 {
|
||||
eprint!(" <--{{ Shuffle '{}' inputs queried here", name);
|
||||
}
|
||||
});
|
||||
|
||||
// Print the map from local variables to assigned values.
|
||||
eprintln!(" |");
|
||||
eprintln!(" | Assigned cell values:");
|
||||
for (i, (_, value)) in cell_values.iter().enumerate() {
|
||||
eprintln!(" | x{} = {}", i, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl VerifyFailure {
|
||||
/// Emits this failure in pretty-printed format to stderr.
|
||||
pub(super) fn emit<F: Field>(&self, prover: &MockProver<F>) {
|
||||
|
@ -641,6 +839,11 @@ impl VerifyFailure {
|
|||
lookup_index,
|
||||
location,
|
||||
} => render_lookup(prover, name, *lookup_index, location),
|
||||
Self::Shuffle {
|
||||
name,
|
||||
shuffle_index,
|
||||
location,
|
||||
} => render_shuffle(prover, name, *shuffle_index, location),
|
||||
_ => eprintln!("{}", self),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ mod evaluation;
|
|||
mod keygen;
|
||||
mod lookup;
|
||||
pub mod permutation;
|
||||
mod shuffle;
|
||||
mod vanishing;
|
||||
|
||||
mod prover;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use super::{lookup, permutation, Assigned, Error};
|
||||
use super::{lookup, permutation, shuffle, Assigned, Error};
|
||||
use crate::circuit::layouter::SyncDeps;
|
||||
use crate::dev::metadata;
|
||||
use crate::{
|
||||
|
@ -1564,6 +1564,10 @@ pub struct ConstraintSystem<F: Field> {
|
|||
// input expressions and a sequence of table expressions involved in the lookup.
|
||||
pub(crate) lookups: Vec<lookup::Argument<F>>,
|
||||
|
||||
// Vector of shuffle arguments, where each corresponds to a sequence of
|
||||
// input expressions and a sequence of shuffle expressions involved in the shuffle.
|
||||
pub(crate) shuffles: Vec<shuffle::Argument<F>>,
|
||||
|
||||
// List of indexes of Fixed columns which are associated to a circuit-general Column tied to their annotation.
|
||||
pub(crate) general_column_annotations: HashMap<metadata::Column, String>,
|
||||
|
||||
|
@ -1590,6 +1594,7 @@ pub struct PinnedConstraintSystem<'a, F: Field> {
|
|||
fixed_queries: &'a Vec<(Column<Fixed>, Rotation)>,
|
||||
permutation: &'a permutation::Argument,
|
||||
lookups: &'a Vec<lookup::Argument<F>>,
|
||||
shuffles: &'a Vec<shuffle::Argument<F>>,
|
||||
constants: &'a Vec<Column<Fixed>>,
|
||||
minimum_degree: &'a Option<usize>,
|
||||
}
|
||||
|
@ -1650,6 +1655,7 @@ impl<F: Field> Default for ConstraintSystem<F> {
|
|||
instance_queries: Vec::new(),
|
||||
permutation: permutation::Argument::new(),
|
||||
lookups: Vec::new(),
|
||||
shuffles: Vec::new(),
|
||||
general_column_annotations: HashMap::new(),
|
||||
constants: vec![],
|
||||
minimum_degree: None,
|
||||
|
@ -1676,6 +1682,7 @@ impl<F: Field> ConstraintSystem<F> {
|
|||
instance_queries: &self.instance_queries,
|
||||
permutation: &self.permutation,
|
||||
lookups: &self.lookups,
|
||||
shuffles: &self.shuffles,
|
||||
constants: &self.constants,
|
||||
minimum_degree: &self.minimum_degree,
|
||||
}
|
||||
|
@ -1756,6 +1763,29 @@ impl<F: Field> ConstraintSystem<F> {
|
|||
index
|
||||
}
|
||||
|
||||
/// Add a shuffle argument for some input expressions and table expressions.
|
||||
pub fn shuffle<S: AsRef<str>>(
|
||||
&mut self,
|
||||
name: S,
|
||||
shuffle_map: impl FnOnce(&mut VirtualCells<'_, F>) -> Vec<(Expression<F>, Expression<F>)>,
|
||||
) -> usize {
|
||||
let mut cells = VirtualCells::new(self);
|
||||
let shuffle_map = shuffle_map(&mut cells)
|
||||
.into_iter()
|
||||
.map(|(mut input, mut table)| {
|
||||
input.query_cells(&mut cells);
|
||||
table.query_cells(&mut cells);
|
||||
(input, table)
|
||||
})
|
||||
.collect();
|
||||
let index = self.shuffles.len();
|
||||
|
||||
self.shuffles
|
||||
.push(shuffle::Argument::new(name.as_ref(), shuffle_map));
|
||||
|
||||
index
|
||||
}
|
||||
|
||||
fn query_fixed_index(&mut self, column: Column<Fixed>, at: Rotation) -> usize {
|
||||
// Return existing query, if it exists
|
||||
for (index, fixed_query) in self.fixed_queries.iter().enumerate() {
|
||||
|
@ -2017,6 +2047,15 @@ impl<F: Field> ConstraintSystem<F> {
|
|||
replace_selectors(expr, &selector_replacements, true);
|
||||
}
|
||||
|
||||
for expr in self.shuffles.iter_mut().flat_map(|shuffle| {
|
||||
shuffle
|
||||
.input_expressions
|
||||
.iter_mut()
|
||||
.chain(shuffle.shuffle_expressions.iter_mut())
|
||||
}) {
|
||||
replace_selectors(expr, &selector_replacements, true);
|
||||
}
|
||||
|
||||
(self, polys)
|
||||
}
|
||||
|
||||
|
@ -2178,6 +2217,17 @@ impl<F: Field> ConstraintSystem<F> {
|
|||
.unwrap_or(1),
|
||||
);
|
||||
|
||||
// The lookup argument also serves alongside the gates and must be accounted
|
||||
// for.
|
||||
degree = std::cmp::max(
|
||||
degree,
|
||||
self.shuffles
|
||||
.iter()
|
||||
.map(|l| l.required_degree())
|
||||
.max()
|
||||
.unwrap_or(1),
|
||||
);
|
||||
|
||||
// Account for each gate to ensure our quotient polynomial is the
|
||||
// correct degree and that our extended domain is the right size.
|
||||
degree = std::cmp::max(
|
||||
|
@ -2306,6 +2356,11 @@ impl<F: Field> ConstraintSystem<F> {
|
|||
&self.lookups
|
||||
}
|
||||
|
||||
/// Returns shuffle arguments
|
||||
pub fn shuffles(&self) -> &Vec<shuffle::Argument<F>> {
|
||||
&self.shuffles
|
||||
}
|
||||
|
||||
/// Returns constants
|
||||
pub fn constants(&self) -> &Vec<Column<Fixed>> {
|
||||
&self.constants
|
||||
|
|
|
@ -26,7 +26,7 @@ use std::{
|
|||
ops::{Index, Mul, MulAssign},
|
||||
};
|
||||
|
||||
use super::{ConstraintSystem, Expression};
|
||||
use super::{shuffle, ConstraintSystem, Expression};
|
||||
|
||||
/// Return the index in the polynomial of size `isize` after rotation `rot`.
|
||||
fn get_rotation_idx(idx: usize, rot: i32, rot_scale: i32, isize: i32) -> usize {
|
||||
|
@ -186,6 +186,8 @@ pub struct Evaluator<C: CurveAffine> {
|
|||
pub custom_gates: GraphEvaluator<C>,
|
||||
/// Lookups evalution
|
||||
pub lookups: Vec<GraphEvaluator<C>>,
|
||||
/// Shuffle evalution
|
||||
pub shuffles: Vec<GraphEvaluator<C>>,
|
||||
}
|
||||
|
||||
/// GraphEvaluator
|
||||
|
@ -273,6 +275,39 @@ impl<C: CurveAffine> Evaluator<C> {
|
|||
ev.lookups.push(graph);
|
||||
}
|
||||
|
||||
// Shuffles
|
||||
for shuffle in cs.shuffles.iter() {
|
||||
let evaluate_lc = |expressions: &Vec<Expression<_>>, graph: &mut GraphEvaluator<C>| {
|
||||
let parts = expressions
|
||||
.iter()
|
||||
.map(|expr| graph.add_expression(expr))
|
||||
.collect();
|
||||
graph.add_calculation(Calculation::Horner(
|
||||
ValueSource::Constant(0),
|
||||
parts,
|
||||
ValueSource::Theta(),
|
||||
))
|
||||
};
|
||||
|
||||
let mut graph_input = GraphEvaluator::default();
|
||||
let compressed_input_coset = evaluate_lc(&shuffle.input_expressions, &mut graph_input);
|
||||
let _ = graph_input.add_calculation(Calculation::Add(
|
||||
compressed_input_coset,
|
||||
ValueSource::Gamma(),
|
||||
));
|
||||
|
||||
let mut graph_shuffle = GraphEvaluator::default();
|
||||
let compressed_shuffle_coset =
|
||||
evaluate_lc(&shuffle.shuffle_expressions, &mut graph_shuffle);
|
||||
let _ = graph_shuffle.add_calculation(Calculation::Add(
|
||||
compressed_shuffle_coset,
|
||||
ValueSource::Gamma(),
|
||||
));
|
||||
|
||||
ev.shuffles.push(graph_input);
|
||||
ev.shuffles.push(graph_shuffle);
|
||||
}
|
||||
|
||||
ev
|
||||
}
|
||||
|
||||
|
@ -288,6 +323,7 @@ impl<C: CurveAffine> Evaluator<C> {
|
|||
gamma: C::ScalarExt,
|
||||
theta: C::ScalarExt,
|
||||
lookups: &[Vec<lookup::prover::Committed<C>>],
|
||||
shuffles: &[Vec<shuffle::prover::Committed<C>>],
|
||||
permutations: &[permutation::prover::Committed<C>],
|
||||
) -> Polynomial<C::ScalarExt, ExtendedLagrangeCoeff> {
|
||||
let domain = &pk.vk.domain;
|
||||
|
@ -326,10 +362,11 @@ impl<C: CurveAffine> Evaluator<C> {
|
|||
|
||||
// Core expression evaluations
|
||||
let num_threads = multicore::current_num_threads();
|
||||
for (((advice, instance), lookups), permutation) in advice
|
||||
for ((((advice, instance), lookups), shuffles), permutation) in advice
|
||||
.iter()
|
||||
.zip(instance.iter())
|
||||
.zip(lookups.iter())
|
||||
.zip(shuffles.iter())
|
||||
.zip(permutations.iter())
|
||||
{
|
||||
// Custom gates
|
||||
|
@ -517,6 +554,68 @@ impl<C: CurveAffine> Evaluator<C> {
|
|||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Shuffle constraints
|
||||
for (n, shuffle) in shuffles.iter().enumerate() {
|
||||
let product_coset = pk.vk.domain.coeff_to_extended(shuffle.product_poly.clone());
|
||||
|
||||
// Shuffle constraints
|
||||
parallelize(&mut values, |values, start| {
|
||||
let input_evaluator = &self.shuffles[2 * n];
|
||||
let shuffle_evaluator = &self.shuffles[2 * n + 1];
|
||||
let mut eval_data_input = shuffle_evaluator.instance();
|
||||
let mut eval_data_shuffle = shuffle_evaluator.instance();
|
||||
for (i, value) in values.iter_mut().enumerate() {
|
||||
let idx = start + i;
|
||||
|
||||
let input_value = input_evaluator.evaluate(
|
||||
&mut eval_data_input,
|
||||
fixed,
|
||||
advice,
|
||||
instance,
|
||||
challenges,
|
||||
&beta,
|
||||
&gamma,
|
||||
&theta,
|
||||
&y,
|
||||
&C::ScalarExt::ZERO,
|
||||
idx,
|
||||
rot_scale,
|
||||
isize,
|
||||
);
|
||||
|
||||
let shuffle_value = shuffle_evaluator.evaluate(
|
||||
&mut eval_data_shuffle,
|
||||
fixed,
|
||||
advice,
|
||||
instance,
|
||||
challenges,
|
||||
&beta,
|
||||
&gamma,
|
||||
&theta,
|
||||
&y,
|
||||
&C::ScalarExt::ZERO,
|
||||
idx,
|
||||
rot_scale,
|
||||
isize,
|
||||
);
|
||||
|
||||
let r_next = get_rotation_idx(idx, 1, rot_scale, isize);
|
||||
|
||||
// l_0(X) * (1 - z(X)) = 0
|
||||
*value = *value * y + ((one - product_coset[idx]) * l0[idx]);
|
||||
// l_last(X) * (z(X)^2 - z(X)) = 0
|
||||
*value = *value * y
|
||||
+ ((product_coset[idx] * product_coset[idx] - product_coset[idx])
|
||||
* l_last[idx]);
|
||||
// (1 - (l_last(X) + l_blind(X))) * (z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma)) = 0
|
||||
*value = *value * y
|
||||
+ l_active_row[idx]
|
||||
* (product_coset[r_next] * shuffle_value
|
||||
- product_coset[idx] * input_value)
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
values
|
||||
}
|
||||
|
|
|
@ -15,8 +15,8 @@ use super::{
|
|||
Advice, Any, Assignment, Challenge, Circuit, Column, ConstraintSystem, FirstPhase, Fixed,
|
||||
FloorPlanner, Instance, Selector,
|
||||
},
|
||||
lookup, permutation, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX,
|
||||
ChallengeY, Error, Expression, ProvingKey,
|
||||
lookup, permutation, shuffle, vanishing, ChallengeBeta, ChallengeGamma, ChallengeTheta,
|
||||
ChallengeX, ChallengeY, Error, Expression, ProvingKey,
|
||||
};
|
||||
use crate::circuit::layouter::SyncDeps;
|
||||
use crate::{
|
||||
|
@ -476,6 +476,34 @@ where
|
|||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let shuffles: Vec<Vec<shuffle::prover::Committed<Scheme::Curve>>> = instance
|
||||
.iter()
|
||||
.zip(advice.iter())
|
||||
.map(|(instance, advice)| -> Result<Vec<_>, _> {
|
||||
// Compress expressions for each shuffle
|
||||
pk.vk
|
||||
.cs
|
||||
.shuffles
|
||||
.iter()
|
||||
.map(|shuffle| {
|
||||
shuffle.commit_product(
|
||||
pk,
|
||||
params,
|
||||
domain,
|
||||
theta,
|
||||
gamma,
|
||||
&advice.advice_polys,
|
||||
&pk.fixed_values,
|
||||
&instance.instance_values,
|
||||
&challenges,
|
||||
&mut rng,
|
||||
transcript,
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
// Commit to the vanishing argument's random polynomial for blinding h(x_3)
|
||||
let vanishing = vanishing::Argument::commit(params, domain, &mut rng, transcript)?;
|
||||
|
||||
|
@ -518,6 +546,7 @@ where
|
|||
*gamma,
|
||||
*theta,
|
||||
&lookups,
|
||||
&shuffles,
|
||||
&permutations,
|
||||
);
|
||||
|
||||
|
@ -605,12 +634,24 @@ where
|
|||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
// Evaluate the shuffles, if any, at omega^i x.
|
||||
let shuffles: Vec<Vec<shuffle::prover::Evaluated<Scheme::Curve>>> = shuffles
|
||||
.into_iter()
|
||||
.map(|shuffles| -> Result<Vec<_>, _> {
|
||||
shuffles
|
||||
.into_iter()
|
||||
.map(|p| p.evaluate(pk, x, transcript))
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let instances = instance
|
||||
.iter()
|
||||
.zip(advice.iter())
|
||||
.zip(permutations.iter())
|
||||
.zip(lookups.iter())
|
||||
.flat_map(|(((instance, advice), permutation), lookups)| {
|
||||
.zip(shuffles.iter())
|
||||
.flat_map(|((((instance, advice), permutation), lookups), shuffles)| {
|
||||
iter::empty()
|
||||
.chain(
|
||||
P::QUERY_INSTANCE
|
||||
|
@ -637,6 +678,7 @@ where
|
|||
)
|
||||
.chain(permutation.open(pk, x))
|
||||
.chain(lookups.iter().flat_map(move |p| p.open(pk, x)).into_iter())
|
||||
.chain(shuffles.iter().flat_map(move |p| p.open(pk, x)).into_iter())
|
||||
})
|
||||
.chain(
|
||||
pk.vk
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
use super::circuit::Expression;
|
||||
use ff::Field;
|
||||
use std::fmt::{self, Debug};
|
||||
|
||||
pub(crate) mod prover;
|
||||
pub(crate) mod verifier;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Argument<F: Field> {
|
||||
pub(crate) name: String,
|
||||
pub(crate) input_expressions: Vec<Expression<F>>,
|
||||
pub(crate) shuffle_expressions: Vec<Expression<F>>,
|
||||
}
|
||||
|
||||
impl<F: Field> Debug for Argument<F> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("Argument")
|
||||
.field("input_expressions", &self.input_expressions)
|
||||
.field("shuffle_expressions", &self.shuffle_expressions)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: Field> Argument<F> {
|
||||
/// Constructs a new shuffle argument.
|
||||
///
|
||||
/// `shuffle` is a sequence of `(input, shuffle)` tuples.
|
||||
pub fn new<S: AsRef<str>>(name: S, shuffle: Vec<(Expression<F>, Expression<F>)>) -> Self {
|
||||
let (input_expressions, shuffle_expressions) = shuffle.into_iter().unzip();
|
||||
Argument {
|
||||
name: name.as_ref().to_string(),
|
||||
input_expressions,
|
||||
shuffle_expressions,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn required_degree(&self) -> usize {
|
||||
assert_eq!(self.input_expressions.len(), self.shuffle_expressions.len());
|
||||
|
||||
let mut input_degree = 1;
|
||||
for expr in self.input_expressions.iter() {
|
||||
input_degree = std::cmp::max(input_degree, expr.degree());
|
||||
}
|
||||
let mut shuffle_degree = 1;
|
||||
for expr in self.shuffle_expressions.iter() {
|
||||
shuffle_degree = std::cmp::max(shuffle_degree, expr.degree());
|
||||
}
|
||||
|
||||
// (1 - (l_last + l_blind)) (z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma))
|
||||
std::cmp::max(2 + shuffle_degree, 2 + input_degree)
|
||||
}
|
||||
|
||||
/// Returns input of this argument
|
||||
pub fn input_expressions(&self) -> &Vec<Expression<F>> {
|
||||
&self.input_expressions
|
||||
}
|
||||
|
||||
/// Returns table of this argument
|
||||
pub fn shuffle_expressions(&self) -> &Vec<Expression<F>> {
|
||||
&self.shuffle_expressions
|
||||
}
|
||||
|
||||
/// Returns name of this argument
|
||||
pub fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
}
|
|
@ -0,0 +1,255 @@
|
|||
use super::super::{
|
||||
circuit::Expression, ChallengeBeta, ChallengeGamma, ChallengeTheta, ChallengeX, Error,
|
||||
ProvingKey,
|
||||
};
|
||||
use super::Argument;
|
||||
use crate::plonk::evaluation::evaluate;
|
||||
use crate::{
|
||||
arithmetic::{eval_polynomial, parallelize, CurveAffine},
|
||||
poly::{
|
||||
commitment::{Blind, Params},
|
||||
Coeff, EvaluationDomain, ExtendedLagrangeCoeff, LagrangeCoeff, Polynomial, ProverQuery,
|
||||
Rotation,
|
||||
},
|
||||
transcript::{EncodedChallenge, TranscriptWrite},
|
||||
};
|
||||
use ff::WithSmallOrderMulGroup;
|
||||
use group::{
|
||||
ff::{BatchInvert, Field},
|
||||
Curve,
|
||||
};
|
||||
use rand_core::RngCore;
|
||||
use std::{any::TypeId, convert::TryInto, num::ParseIntError, ops::Index};
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
iter,
|
||||
ops::{Mul, MulAssign},
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Compressed<C: CurveAffine> {
|
||||
input_expression: Polynomial<C::Scalar, LagrangeCoeff>,
|
||||
shuffle_expression: Polynomial<C::Scalar, LagrangeCoeff>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(in crate::plonk) struct Committed<C: CurveAffine> {
|
||||
pub(in crate::plonk) product_poly: Polynomial<C::Scalar, Coeff>,
|
||||
product_blind: Blind<C::Scalar>,
|
||||
}
|
||||
|
||||
pub(in crate::plonk) struct Evaluated<C: CurveAffine> {
|
||||
constructed: Committed<C>,
|
||||
}
|
||||
|
||||
impl<F: WithSmallOrderMulGroup<3>> Argument<F> {
|
||||
/// Given a Shuffle with input expressions [A_0, A_1, ..., A_{m-1}] and table expressions
|
||||
/// [S_0, S_1, ..., S_{m-1}], this method
|
||||
/// - constructs A_compressed = \theta^{m-1} A_0 + theta^{m-2} A_1 + ... + \theta A_{m-2} + A_{m-1}
|
||||
/// and S_compressed = \theta^{m-1} S_0 + theta^{m-2} S_1 + ... + \theta S_{m-2} + S_{m-1},
|
||||
fn compress<'a, 'params: 'a, C, P: Params<'params, C>>(
|
||||
&self,
|
||||
pk: &ProvingKey<C>,
|
||||
params: &P,
|
||||
domain: &EvaluationDomain<C::Scalar>,
|
||||
theta: ChallengeTheta<C>,
|
||||
advice_values: &'a [Polynomial<C::Scalar, LagrangeCoeff>],
|
||||
fixed_values: &'a [Polynomial<C::Scalar, LagrangeCoeff>],
|
||||
instance_values: &'a [Polynomial<C::Scalar, LagrangeCoeff>],
|
||||
challenges: &'a [C::Scalar],
|
||||
) -> Compressed<C>
|
||||
where
|
||||
C: CurveAffine<ScalarExt = F>,
|
||||
C::Curve: Mul<F, Output = C::Curve> + MulAssign<F>,
|
||||
{
|
||||
// Closure to get values of expressions and compress them
|
||||
let compress_expressions = |expressions: &[Expression<C::Scalar>]| {
|
||||
let compressed_expression = expressions
|
||||
.iter()
|
||||
.map(|expression| {
|
||||
pk.vk.domain.lagrange_from_vec(evaluate(
|
||||
expression,
|
||||
params.n() as usize,
|
||||
1,
|
||||
fixed_values,
|
||||
advice_values,
|
||||
instance_values,
|
||||
challenges,
|
||||
))
|
||||
})
|
||||
.fold(domain.empty_lagrange(), |acc, expression| {
|
||||
acc * *theta + &expression
|
||||
});
|
||||
compressed_expression
|
||||
};
|
||||
|
||||
// Get values of input expressions involved in the shuffle and compress them
|
||||
let input_expression = compress_expressions(&self.input_expressions);
|
||||
|
||||
// Get values of table expressions involved in the shuffle and compress them
|
||||
let shuffle_expression = compress_expressions(&self.shuffle_expressions);
|
||||
|
||||
Compressed {
|
||||
input_expression,
|
||||
shuffle_expression,
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a Shuffle with input expressions and table expressions this method
|
||||
/// constructs the grand product polynomial over the shuffle.
|
||||
/// The grand product polynomial is used to populate the Product<C> struct.
|
||||
/// The Product<C> struct is added to the Shuffle and finally returned by the method.
|
||||
pub(in crate::plonk) fn commit_product<
|
||||
'a,
|
||||
'params: 'a,
|
||||
C,
|
||||
P: Params<'params, C>,
|
||||
E: EncodedChallenge<C>,
|
||||
R: RngCore,
|
||||
T: TranscriptWrite<C, E>,
|
||||
>(
|
||||
&self,
|
||||
pk: &ProvingKey<C>,
|
||||
params: &P,
|
||||
domain: &EvaluationDomain<C::Scalar>,
|
||||
theta: ChallengeTheta<C>,
|
||||
gamma: ChallengeGamma<C>,
|
||||
advice_values: &'a [Polynomial<C::Scalar, LagrangeCoeff>],
|
||||
fixed_values: &'a [Polynomial<C::Scalar, LagrangeCoeff>],
|
||||
instance_values: &'a [Polynomial<C::Scalar, LagrangeCoeff>],
|
||||
challenges: &'a [C::Scalar],
|
||||
mut rng: R,
|
||||
transcript: &mut T,
|
||||
) -> Result<Committed<C>, Error>
|
||||
where
|
||||
C: CurveAffine<ScalarExt = F>,
|
||||
C::Curve: Mul<F, Output = C::Curve> + MulAssign<F>,
|
||||
{
|
||||
let compressed = self.compress(
|
||||
pk,
|
||||
params,
|
||||
domain,
|
||||
theta,
|
||||
advice_values,
|
||||
fixed_values,
|
||||
instance_values,
|
||||
challenges,
|
||||
);
|
||||
|
||||
let blinding_factors = pk.vk.cs.blinding_factors();
|
||||
|
||||
let mut shuffle_product = vec![C::Scalar::ZERO; params.n() as usize];
|
||||
parallelize(&mut shuffle_product, |shuffle_product, start| {
|
||||
for (shuffle_product, shuffle_value) in shuffle_product
|
||||
.iter_mut()
|
||||
.zip(compressed.shuffle_expression[start..].iter())
|
||||
{
|
||||
*shuffle_product = *gamma + shuffle_value;
|
||||
}
|
||||
});
|
||||
|
||||
shuffle_product.iter_mut().batch_invert();
|
||||
|
||||
parallelize(&mut shuffle_product, |product, start| {
|
||||
for (i, product) in product.iter_mut().enumerate() {
|
||||
let i = i + start;
|
||||
*product *= &(*gamma + compressed.input_expression[i]);
|
||||
}
|
||||
});
|
||||
|
||||
// Compute the evaluations of the shuffle product polynomial
|
||||
// over our domain, starting with z[0] = 1
|
||||
let z = iter::once(C::Scalar::ONE)
|
||||
.chain(shuffle_product)
|
||||
.scan(C::Scalar::ONE, |state, cur| {
|
||||
*state *= &cur;
|
||||
Some(*state)
|
||||
})
|
||||
// Take all rows including the "last" row which should
|
||||
// be a boolean (and ideally 1, else soundness is broken)
|
||||
.take(params.n() as usize - blinding_factors)
|
||||
// Chain random blinding factors.
|
||||
.chain((0..blinding_factors).map(|_| C::Scalar::random(&mut rng)))
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(z.len(), params.n() as usize);
|
||||
let z = pk.vk.domain.lagrange_from_vec(z);
|
||||
|
||||
#[cfg(feature = "sanity-checks")]
|
||||
{
|
||||
// While in Lagrange basis, check that product is correctly constructed
|
||||
let u = (params.n() as usize) - (blinding_factors + 1);
|
||||
assert_eq!(z[0], C::Scalar::ONE);
|
||||
for i in 0..u {
|
||||
let mut left = z[i + 1];
|
||||
let input_value = &compressed.input_expression[i];
|
||||
let shuffle_value = &compressed.shuffle_expression[i];
|
||||
left *= &(*gamma + shuffle_value);
|
||||
let mut right = z[i];
|
||||
right *= &(*gamma + input_value);
|
||||
assert_eq!(left, right);
|
||||
}
|
||||
assert_eq!(z[u], C::Scalar::ONE);
|
||||
}
|
||||
|
||||
let product_blind = Blind(C::Scalar::random(rng));
|
||||
let product_commitment = params.commit_lagrange(&z, product_blind).to_affine();
|
||||
let z = pk.vk.domain.lagrange_to_coeff(z);
|
||||
|
||||
// Hash product commitment
|
||||
transcript.write_point(product_commitment)?;
|
||||
|
||||
Ok(Committed::<C> {
|
||||
product_poly: z,
|
||||
product_blind,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: CurveAffine> Committed<C> {
|
||||
pub(in crate::plonk) fn evaluate<E: EncodedChallenge<C>, T: TranscriptWrite<C, E>>(
|
||||
self,
|
||||
pk: &ProvingKey<C>,
|
||||
x: ChallengeX<C>,
|
||||
transcript: &mut T,
|
||||
) -> Result<Evaluated<C>, Error> {
|
||||
let domain = &pk.vk.domain;
|
||||
let x_next = domain.rotate_omega(*x, Rotation::next());
|
||||
|
||||
let product_eval = eval_polynomial(&self.product_poly, *x);
|
||||
let product_next_eval = eval_polynomial(&self.product_poly, x_next);
|
||||
|
||||
// Hash each advice evaluation
|
||||
for eval in iter::empty()
|
||||
.chain(Some(product_eval))
|
||||
.chain(Some(product_next_eval))
|
||||
{
|
||||
transcript.write_scalar(eval)?;
|
||||
}
|
||||
|
||||
Ok(Evaluated { constructed: self })
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: CurveAffine> Evaluated<C> {
|
||||
pub(in crate::plonk) fn open<'a>(
|
||||
&'a self,
|
||||
pk: &'a ProvingKey<C>,
|
||||
x: ChallengeX<C>,
|
||||
) -> impl Iterator<Item = ProverQuery<'a, C>> + Clone {
|
||||
let x_next = pk.vk.domain.rotate_omega(*x, Rotation::next());
|
||||
|
||||
iter::empty()
|
||||
// Open shuffle product commitments at x
|
||||
.chain(Some(ProverQuery {
|
||||
point: *x,
|
||||
poly: &self.constructed.product_poly,
|
||||
blind: self.constructed.product_blind,
|
||||
}))
|
||||
// Open shuffle product commitments at x_next
|
||||
.chain(Some(ProverQuery {
|
||||
point: x_next,
|
||||
poly: &self.constructed.product_poly,
|
||||
blind: self.constructed.product_blind,
|
||||
}))
|
||||
}
|
||||
}
|
|
@ -0,0 +1,137 @@
|
|||
use std::iter;
|
||||
|
||||
use super::super::{circuit::Expression, ChallengeGamma, ChallengeTheta, ChallengeX};
|
||||
use super::Argument;
|
||||
use crate::{
|
||||
arithmetic::CurveAffine,
|
||||
plonk::{Error, VerifyingKey},
|
||||
poly::{commitment::MSM, Rotation, VerifierQuery},
|
||||
transcript::{EncodedChallenge, TranscriptRead},
|
||||
};
|
||||
use ff::Field;
|
||||
|
||||
pub struct Committed<C: CurveAffine> {
|
||||
product_commitment: C,
|
||||
}
|
||||
|
||||
pub struct Evaluated<C: CurveAffine> {
|
||||
committed: Committed<C>,
|
||||
product_eval: C::Scalar,
|
||||
product_next_eval: C::Scalar,
|
||||
}
|
||||
|
||||
impl<F: Field> Argument<F> {
|
||||
pub(in crate::plonk) fn read_product_commitment<
|
||||
C: CurveAffine<ScalarExt = F>,
|
||||
E: EncodedChallenge<C>,
|
||||
T: TranscriptRead<C, E>,
|
||||
>(
|
||||
&self,
|
||||
transcript: &mut T,
|
||||
) -> Result<Committed<C>, Error> {
|
||||
let product_commitment = transcript.read_point()?;
|
||||
|
||||
Ok(Committed { product_commitment })
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: CurveAffine> Committed<C> {
|
||||
pub(crate) fn evaluate<E: EncodedChallenge<C>, T: TranscriptRead<C, E>>(
|
||||
self,
|
||||
transcript: &mut T,
|
||||
) -> Result<Evaluated<C>, Error> {
|
||||
let product_eval = transcript.read_scalar()?;
|
||||
let product_next_eval = transcript.read_scalar()?;
|
||||
|
||||
Ok(Evaluated {
|
||||
committed: self,
|
||||
product_eval,
|
||||
product_next_eval,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: CurveAffine> Evaluated<C> {
|
||||
pub(in crate::plonk) fn expressions<'a>(
|
||||
&'a self,
|
||||
l_0: C::Scalar,
|
||||
l_last: C::Scalar,
|
||||
l_blind: C::Scalar,
|
||||
argument: &'a Argument<C::Scalar>,
|
||||
theta: ChallengeTheta<C>,
|
||||
gamma: ChallengeGamma<C>,
|
||||
advice_evals: &[C::Scalar],
|
||||
fixed_evals: &[C::Scalar],
|
||||
instance_evals: &[C::Scalar],
|
||||
challenges: &[C::Scalar],
|
||||
) -> impl Iterator<Item = C::Scalar> + 'a {
|
||||
let active_rows = C::Scalar::ONE - (l_last + l_blind);
|
||||
|
||||
let product_expression = || {
|
||||
// z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma)
|
||||
let compress_expressions = |expressions: &[Expression<C::Scalar>]| {
|
||||
expressions
|
||||
.iter()
|
||||
.map(|expression| {
|
||||
expression.evaluate(
|
||||
&|scalar| scalar,
|
||||
&|_| panic!("virtual selectors are removed during optimization"),
|
||||
&|query| fixed_evals[query.index.unwrap()],
|
||||
&|query| advice_evals[query.index.unwrap()],
|
||||
&|query| instance_evals[query.index.unwrap()],
|
||||
&|challenge| challenges[challenge.index()],
|
||||
&|a| -a,
|
||||
&|a, b| a + &b,
|
||||
&|a, b| a * &b,
|
||||
&|a, scalar| a * &scalar,
|
||||
)
|
||||
})
|
||||
.fold(C::Scalar::ZERO, |acc, eval| acc * &*theta + &eval)
|
||||
};
|
||||
// z(\omega X) (s(X) + \gamma)
|
||||
let left = self.product_next_eval
|
||||
* &(compress_expressions(&argument.shuffle_expressions) + &*gamma);
|
||||
// z(X) (a(X) + \gamma)
|
||||
let right =
|
||||
self.product_eval * &(compress_expressions(&argument.input_expressions) + &*gamma);
|
||||
|
||||
(left - &right) * &active_rows
|
||||
};
|
||||
|
||||
std::iter::empty()
|
||||
.chain(
|
||||
// l_0(X) * (1 - z'(X)) = 0
|
||||
Some(l_0 * &(C::Scalar::ONE - &self.product_eval)),
|
||||
)
|
||||
.chain(
|
||||
// l_last(X) * (z(X)^2 - z(X)) = 0
|
||||
Some(l_last * &(self.product_eval.square() - &self.product_eval)),
|
||||
)
|
||||
.chain(
|
||||
// (1 - (l_last(X) + l_blind(X))) * ( z(\omega X) (s(X) + \gamma) - z(X) (a(X) + \gamma))
|
||||
Some(product_expression()),
|
||||
)
|
||||
}
|
||||
|
||||
pub(in crate::plonk) fn queries<'r, M: MSM<C> + 'r>(
|
||||
&'r self,
|
||||
vk: &'r VerifyingKey<C>,
|
||||
x: ChallengeX<C>,
|
||||
) -> impl Iterator<Item = VerifierQuery<'r, C, M>> + Clone {
|
||||
let x_next = vk.domain.rotate_omega(*x, Rotation::next());
|
||||
|
||||
iter::empty()
|
||||
// Open shuffle product commitment at x
|
||||
.chain(Some(VerifierQuery::new_commitment(
|
||||
&self.committed.product_commitment,
|
||||
*x,
|
||||
self.product_eval,
|
||||
)))
|
||||
// Open shuffle product commitment at \omega x
|
||||
.chain(Some(VerifierQuery::new_commitment(
|
||||
&self.committed.product_commitment,
|
||||
x_next,
|
||||
self.product_next_eval,
|
||||
)))
|
||||
}
|
||||
}
|
|
@ -160,6 +160,17 @@ where
|
|||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let shuffles_committed = (0..num_proofs)
|
||||
.map(|_| -> Result<Vec<_>, _> {
|
||||
// Hash each shuffle product commitment
|
||||
vk.cs
|
||||
.shuffles
|
||||
.iter()
|
||||
.map(|argument| argument.read_product_commitment(transcript))
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let vanishing = vanishing::Argument::read_commitments_before_y(transcript)?;
|
||||
|
||||
// Sample y challenge, which keeps the gates linearly independent.
|
||||
|
@ -242,6 +253,16 @@ where
|
|||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let shuffles_evaluated = shuffles_committed
|
||||
.into_iter()
|
||||
.map(|shuffles| -> Result<Vec<_>, _> {
|
||||
shuffles
|
||||
.into_iter()
|
||||
.map(|shuffle| shuffle.evaluate(transcript))
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
// This check ensures the circuit is satisfied so long as the polynomial
|
||||
// commitments open to the correct values.
|
||||
let vanishing = {
|
||||
|
@ -265,63 +286,88 @@ where
|
|||
.zip(instance_evals.iter())
|
||||
.zip(permutations_evaluated.iter())
|
||||
.zip(lookups_evaluated.iter())
|
||||
.flat_map(|(((advice_evals, instance_evals), permutation), lookups)| {
|
||||
let challenges = &challenges;
|
||||
let fixed_evals = &fixed_evals;
|
||||
std::iter::empty()
|
||||
// Evaluate the circuit using the custom gates provided
|
||||
.chain(vk.cs.gates.iter().flat_map(move |gate| {
|
||||
gate.polynomials().iter().map(move |poly| {
|
||||
poly.evaluate(
|
||||
&|scalar| scalar,
|
||||
&|_| panic!("virtual selectors are removed during optimization"),
|
||||
&|query| fixed_evals[query.index.unwrap()],
|
||||
&|query| advice_evals[query.index.unwrap()],
|
||||
&|query| instance_evals[query.index.unwrap()],
|
||||
&|challenge| challenges[challenge.index()],
|
||||
&|a| -a,
|
||||
&|a, b| a + &b,
|
||||
&|a, b| a * &b,
|
||||
&|a, scalar| a * &scalar,
|
||||
)
|
||||
})
|
||||
}))
|
||||
.chain(permutation.expressions(
|
||||
vk,
|
||||
&vk.cs.permutation,
|
||||
&permutations_common,
|
||||
advice_evals,
|
||||
fixed_evals,
|
||||
instance_evals,
|
||||
l_0,
|
||||
l_last,
|
||||
l_blind,
|
||||
beta,
|
||||
gamma,
|
||||
x,
|
||||
))
|
||||
.chain(
|
||||
lookups
|
||||
.iter()
|
||||
.zip(vk.cs.lookups.iter())
|
||||
.flat_map(move |(p, argument)| {
|
||||
p.expressions(
|
||||
l_0,
|
||||
l_last,
|
||||
l_blind,
|
||||
argument,
|
||||
theta,
|
||||
beta,
|
||||
gamma,
|
||||
advice_evals,
|
||||
fixed_evals,
|
||||
instance_evals,
|
||||
challenges,
|
||||
.zip(shuffles_evaluated.iter())
|
||||
.flat_map(
|
||||
|((((advice_evals, instance_evals), permutation), lookups), shuffles)| {
|
||||
let challenges = &challenges;
|
||||
let fixed_evals = &fixed_evals;
|
||||
std::iter::empty()
|
||||
// Evaluate the circuit using the custom gates provided
|
||||
.chain(vk.cs.gates.iter().flat_map(move |gate| {
|
||||
gate.polynomials().iter().map(move |poly| {
|
||||
poly.evaluate(
|
||||
&|scalar| scalar,
|
||||
&|_| {
|
||||
panic!("virtual selectors are removed during optimization")
|
||||
},
|
||||
&|query| fixed_evals[query.index.unwrap()],
|
||||
&|query| advice_evals[query.index.unwrap()],
|
||||
&|query| instance_evals[query.index.unwrap()],
|
||||
&|challenge| challenges[challenge.index()],
|
||||
&|a| -a,
|
||||
&|a, b| a + &b,
|
||||
&|a, b| a * &b,
|
||||
&|a, scalar| a * &scalar,
|
||||
)
|
||||
})
|
||||
.into_iter(),
|
||||
)
|
||||
});
|
||||
}))
|
||||
.chain(permutation.expressions(
|
||||
vk,
|
||||
&vk.cs.permutation,
|
||||
&permutations_common,
|
||||
advice_evals,
|
||||
fixed_evals,
|
||||
instance_evals,
|
||||
l_0,
|
||||
l_last,
|
||||
l_blind,
|
||||
beta,
|
||||
gamma,
|
||||
x,
|
||||
))
|
||||
.chain(
|
||||
lookups
|
||||
.iter()
|
||||
.zip(vk.cs.lookups.iter())
|
||||
.flat_map(move |(p, argument)| {
|
||||
p.expressions(
|
||||
l_0,
|
||||
l_last,
|
||||
l_blind,
|
||||
argument,
|
||||
theta,
|
||||
beta,
|
||||
gamma,
|
||||
advice_evals,
|
||||
fixed_evals,
|
||||
instance_evals,
|
||||
challenges,
|
||||
)
|
||||
})
|
||||
.into_iter(),
|
||||
)
|
||||
.chain(
|
||||
shuffles
|
||||
.iter()
|
||||
.zip(vk.cs.shuffles.iter())
|
||||
.flat_map(move |(p, argument)| {
|
||||
p.expressions(
|
||||
l_0,
|
||||
l_last,
|
||||
l_blind,
|
||||
argument,
|
||||
theta,
|
||||
gamma,
|
||||
advice_evals,
|
||||
fixed_evals,
|
||||
instance_evals,
|
||||
challenges,
|
||||
)
|
||||
})
|
||||
.into_iter(),
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
vanishing.verify(params, expressions, y, xn)
|
||||
};
|
||||
|
@ -333,13 +379,20 @@ where
|
|||
.zip(advice_evals.iter())
|
||||
.zip(permutations_evaluated.iter())
|
||||
.zip(lookups_evaluated.iter())
|
||||
.zip(shuffles_evaluated.iter())
|
||||
.flat_map(
|
||||
|(
|
||||
(
|
||||
(((instance_commitments, instance_evals), advice_commitments), advice_evals),
|
||||
permutation,
|
||||
(
|
||||
(
|
||||
((instance_commitments, instance_evals), advice_commitments),
|
||||
advice_evals,
|
||||
),
|
||||
permutation,
|
||||
),
|
||||
lookups,
|
||||
),
|
||||
lookups,
|
||||
shuffles,
|
||||
)| {
|
||||
iter::empty()
|
||||
.chain(
|
||||
|
@ -372,6 +425,12 @@ where
|
|||
.flat_map(move |p| p.queries(vk, x))
|
||||
.into_iter(),
|
||||
)
|
||||
.chain(
|
||||
shuffles
|
||||
.iter()
|
||||
.flat_map(move |p| p.queries(vk, x))
|
||||
.into_iter(),
|
||||
)
|
||||
},
|
||||
)
|
||||
.chain(
|
||||
|
|
Loading…
Reference in New Issue