Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Prove Starks without constraints #1552

Merged
merged 21 commits into from
Mar 15, 2024
Merged
Show file tree
Hide file tree
Changes from 17 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## Unreleased

### Changed
- Make Starks without constraints provable ([#1552](https://github.com/0xPolygonZero/plonky2/pull/1552))

## [0.2.1] - 2024-03-01 (`starky` crate only)

### Changed
Expand Down
4 changes: 1 addition & 3 deletions field/src/polynomial/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,9 +88,7 @@ impl<F: Field> PolynomialValues<F> {
}

pub fn degree(&self) -> usize {
self.degree_plus_one()
.checked_sub(1)
.expect("deg(0) is undefined")
self.degree_plus_one().saturating_sub(1)
}

pub fn degree_plus_one(&self) -> usize {
Expand Down
10 changes: 8 additions & 2 deletions plonky2/src/recursion/recursive_verifier.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
#[cfg(not(feature = "std"))]
use alloc::vec;

use crate::field::extension::Extendable;
use crate::hash::hash_types::{HashOutTarget, RichField};
use crate::plonk::circuit_builder::CircuitBuilder;
Expand Down Expand Up @@ -149,13 +152,16 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
let cap_height = fri_params.config.cap_height;

let salt = salt_size(common_data.fri_params.hiding);
let num_leaves_per_oracle = &[
let num_leaves_per_oracle = &mut vec![
common_data.num_preprocessed_polys(),
config.num_wires + salt,
common_data.num_zs_partial_products_polys() + common_data.num_all_lookup_polys() + salt,
common_data.num_quotient_polys() + salt,
];

if common_data.num_quotient_polys() > 0 {
num_leaves_per_oracle.push(common_data.num_quotient_polys() + salt);
}

ProofTarget {
wires_cap: self.add_virtual_cap(cap_height),
plonk_zs_partial_products_cap: self.add_virtual_cap(cap_height),
Expand Down
193 changes: 13 additions & 180 deletions starky/src/fibonacci_stark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ use plonky2::plonk::circuit_builder::CircuitBuilder;

use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
use crate::lookup::{Column, Lookup};
use crate::stark::Stark;
use crate::util::trace_rows_to_poly_values;

Expand Down Expand Up @@ -132,135 +131,6 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for FibonacciStar
}
}

/// Similar system than above, but with extra columns to illustrate the permutation argument.
/// Computes a Fibonacci sequence with state `[x0, x1, i, j]` using the state transition
/// `x0' <- x1, x1' <- x0 + x1, i' <- i+1, j' <- j+1`.
/// Note: The `i, j` columns are the columns used to test the permutation argument.
#[derive(Copy, Clone)]
struct FibonacciWithPermutationStark<F: RichField + Extendable<D>, const D: usize> {
num_rows: usize,
_phantom: PhantomData<F>,
}

impl<F: RichField + Extendable<D>, const D: usize> FibonacciWithPermutationStark<F, D> {
// The first public input is `x0`.
const PI_INDEX_X0: usize = 0;
// The second public input is `x1`.
const PI_INDEX_X1: usize = 1;
// The third public input is the second element of the last row, which should be equal to the
// `num_rows`-th Fibonacci number.
const PI_INDEX_RES: usize = 2;

const fn new(num_rows: usize) -> Self {
Self {
num_rows,
_phantom: PhantomData,
}
}

/// Generate the trace using `x0, x1, 0, 1, 1` as initial state values.
fn generate_trace(&self, x0: F, x1: F) -> Vec<PolynomialValues<F>> {
let mut trace_rows = (0..self.num_rows)
.scan([x0, x1, F::ZERO, F::ONE, F::ONE], |acc, _| {
let tmp = *acc;
acc[0] = tmp[1];
acc[1] = tmp[0] + tmp[1];
acc[2] = tmp[2] + F::ONE;
acc[3] = tmp[3] + F::ONE;
// acc[4] (i.e. frequency column) remains unchanged, as we're permuting a strictly monotonous sequence.
Some(tmp)
})
.collect::<Vec<_>>();
trace_rows[self.num_rows - 1][3] = F::ZERO; // So that column 2 and 3 are permutation of one another.
trace_rows_to_poly_values(trace_rows)
}
}

const FIBONACCI_PERM_COLUMNS: usize = 5;
const FIBONACCI_PERM_PUBLIC_INPUTS: usize = 3;

impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D>
for FibonacciWithPermutationStark<F, D>
{
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, P::Scalar, FIBONACCI_PERM_COLUMNS, FIBONACCI_PERM_PUBLIC_INPUTS>
where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>;

type EvaluationFrameTarget = StarkFrame<
ExtensionTarget<D>,
ExtensionTarget<D>,
FIBONACCI_PERM_COLUMNS,
FIBONACCI_PERM_PUBLIC_INPUTS,
>;

fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: &Self::EvaluationFrame<FE, P, D2>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
{
let local_values = vars.get_local_values();
let next_values = vars.get_next_values();
let public_inputs = vars.get_public_inputs();

// Check public inputs.
yield_constr.constraint_first_row(local_values[0] - public_inputs[Self::PI_INDEX_X0]);
yield_constr.constraint_first_row(local_values[1] - public_inputs[Self::PI_INDEX_X1]);
yield_constr.constraint_last_row(local_values[1] - public_inputs[Self::PI_INDEX_RES]);

// x0' <- x1
yield_constr.constraint_transition(next_values[0] - local_values[1]);
// x1' <- x0 + x1
yield_constr.constraint_transition(next_values[1] - local_values[0] - local_values[1]);
}

fn eval_ext_circuit(
&self,
builder: &mut CircuitBuilder<F, D>,
vars: &Self::EvaluationFrameTarget,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let local_values = vars.get_local_values();
let next_values = vars.get_next_values();
let public_inputs = vars.get_public_inputs();
// Check public inputs.
let pis_constraints = [
builder.sub_extension(local_values[0], public_inputs[Self::PI_INDEX_X0]),
builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_X1]),
builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_RES]),
];
yield_constr.constraint_first_row(builder, pis_constraints[0]);
yield_constr.constraint_first_row(builder, pis_constraints[1]);
yield_constr.constraint_last_row(builder, pis_constraints[2]);

// x0' <- x1
let first_col_constraint = builder.sub_extension(next_values[0], local_values[1]);
yield_constr.constraint_transition(builder, first_col_constraint);
// x1' <- x0 + x1
let second_col_constraint = {
let tmp = builder.sub_extension(next_values[1], local_values[0]);
builder.sub_extension(tmp, local_values[1])
};
yield_constr.constraint_transition(builder, second_col_constraint);
}

fn constraint_degree(&self) -> usize {
2
}

fn lookups(&self) -> Vec<Lookup<F>> {
vec![Lookup {
columns: vec![Column::single(2)],
table_column: Column::single(3),
frequencies_column: Column::single(4),
filter_columns: vec![None; 1],
}]
}
}

#[cfg(test)]
mod tests {
use anyhow::Result;
Expand All @@ -274,7 +144,7 @@ mod tests {
use plonky2::util::timing::TimingTree;

use crate::config::StarkConfig;
use crate::fibonacci_stark::{FibonacciStark, FibonacciWithPermutationStark};
use crate::fibonacci_stark::FibonacciStark;
use crate::proof::StarkProofWithPublicInputs;
use crate::prover::prove;
use crate::recursive_verifier::{
Expand All @@ -294,30 +164,15 @@ mod tests {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S1 = FibonacciStark<F, D>;
type S2 = FibonacciWithPermutationStark<F, D>;
type S = FibonacciStark<F, D>;

let config = StarkConfig::standard_fast_config();
let num_rows = 1 << 5;
let public_inputs = [F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)];

// Test first STARK
let stark = S1::new(num_rows);
let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
let proof = prove::<F, C, S1, D>(
stark,
&config,
trace,
&public_inputs,
&mut TimingTree::default(),
)?;

verify_stark_proof(stark, proof, &config)?;

// Test second STARK
let stark = S2::new(num_rows);
let stark = S::new(num_rows);
let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
let proof = prove::<F, C, S2, D>(
let proof = prove::<F, C, S, D>(
stark,
&config,
trace,
Expand All @@ -333,14 +188,10 @@ mod tests {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S1 = FibonacciStark<F, D>;
type S2 = FibonacciWithPermutationStark<F, D>;
type S = FibonacciStark<F, D>;

let num_rows = 1 << 5;
let stark = S1::new(num_rows);
test_stark_low_degree(stark)?;

let stark = S2::new(num_rows);
let stark = S::new(num_rows);
test_stark_low_degree(stark)
}

Expand All @@ -349,14 +200,11 @@ mod tests {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S1 = FibonacciStark<F, D>;
type S2 = FibonacciWithPermutationStark<F, D>;
type S = FibonacciStark<F, D>;

let num_rows = 1 << 5;
let stark = S1::new(num_rows);
test_stark_circuit_constraints::<F, C, S1, D>(stark)?;
let stark = S2::new(num_rows);
test_stark_circuit_constraints::<F, C, S2, D>(stark)
let stark = S::new(num_rows);
test_stark_circuit_constraints::<F, C, S, D>(stark)
}

#[test]
Expand All @@ -365,31 +213,16 @@ mod tests {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S1 = FibonacciStark<F, D>;
type S2 = FibonacciWithPermutationStark<F, D>;
type S = FibonacciStark<F, D>;

let config = StarkConfig::standard_fast_config();
let num_rows = 1 << 5;
let public_inputs = [F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)];

// Test first STARK
let stark = S1::new(num_rows);
let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
let proof = prove::<F, C, S1, D>(
stark,
&config,
trace,
&public_inputs,
&mut TimingTree::default(),
)?;
verify_stark_proof(stark, proof.clone(), &config)?;

recursive_proof::<F, C, S1, C, D>(stark, proof, &config, true)?;

// Test second STARK
let stark = S2::new(num_rows);
let stark = S::new(num_rows);
let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
let proof = prove::<F, C, S2, D>(
let proof = prove::<F, C, S, D>(
stark,
&config,
trace,
Expand All @@ -398,7 +231,7 @@ mod tests {
)?;
verify_stark_proof(stark, proof.clone(), &config)?;

recursive_proof::<F, C, S2, C, D>(stark, proof, &config, true)
recursive_proof::<F, C, S, C, D>(stark, proof, &config, true)
}

fn recursive_proof<
Expand Down
17 changes: 11 additions & 6 deletions starky/src/get_challenges.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ fn get_challenges<F, C, const D: usize>(
challenges: Option<&GrandProductChallengeSet<F>>,
trace_cap: Option<&MerkleCap<F, C::Hasher>>,
auxiliary_polys_cap: Option<&MerkleCap<F, C::Hasher>>,
quotient_polys_cap: &MerkleCap<F, C::Hasher>,
quotient_polys_cap: Option<&MerkleCap<F, C::Hasher>>,
openings: &StarkOpeningSet<F, D>,
commit_phase_merkle_caps: &[MerkleCap<F, C::Hasher>],
final_poly: &PolynomialCoeffs<F::Extension>,
Expand Down Expand Up @@ -60,7 +60,9 @@ where

let stark_alphas = challenger.get_n_challenges(num_challenges);

challenger.observe_cap(quotient_polys_cap);
if let Some(quotient_polys_cap) = quotient_polys_cap {
challenger.observe_cap(quotient_polys_cap);
}
let stark_zeta = challenger.get_extension_challenge::<D>();

challenger.observe_openings(&openings.to_fri_openings());
Expand Down Expand Up @@ -125,7 +127,7 @@ where
challenges,
trace_cap,
auxiliary_polys_cap.as_ref(),
quotient_polys_cap,
quotient_polys_cap.as_ref(),
openings,
commit_phase_merkle_caps,
final_poly,
Expand Down Expand Up @@ -168,7 +170,7 @@ fn get_challenges_target<F, C, const D: usize>(
challenges: Option<&GrandProductChallengeSet<Target>>,
trace_cap: Option<&MerkleCapTarget>,
auxiliary_polys_cap: Option<&MerkleCapTarget>,
quotient_polys_cap: &MerkleCapTarget,
quotient_polys_cap: Option<&MerkleCapTarget>,
openings: &StarkOpeningSetTarget<D>,
commit_phase_merkle_caps: &[MerkleCapTarget],
final_poly: &PolynomialCoeffsExtTarget<D>,
Expand Down Expand Up @@ -200,7 +202,10 @@ where

let stark_alphas = challenger.get_n_challenges(builder, num_challenges);

challenger.observe_cap(quotient_polys_cap);
if let Some(cap) = quotient_polys_cap {
challenger.observe_cap(cap);
}

let stark_zeta = challenger.get_extension_challenge(builder);

challenger.observe_openings(&openings.to_fri_openings(builder.zero()));
Expand Down Expand Up @@ -266,7 +271,7 @@ impl<const D: usize> StarkProofTarget<D> {
challenges,
trace_cap,
auxiliary_polys_cap.as_ref(),
quotient_polys_cap,
quotient_polys_cap.as_ref(),
openings,
commit_phase_merkle_caps,
final_poly,
Expand Down
2 changes: 2 additions & 0 deletions starky/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -340,3 +340,5 @@ pub mod verifier;

#[cfg(test)]
pub mod fibonacci_stark;
#[cfg(test)]
pub mod permutation_stark;
Loading
Loading