Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into skalman--powers-of-…
Browse files Browse the repository at this point in the history
…two-gadget
  • Loading branch information
drskalman committed Dec 4, 2024
2 parents 2d6e73a + 395095e commit 86c019a
Show file tree
Hide file tree
Showing 18 changed files with 78 additions and 128 deletions.
28 changes: 26 additions & 2 deletions .github/workflows/rust.yml
Original file line number Diff line number Diff line change
@@ -1,19 +1,40 @@
name: Rust

on:
# Run CI on push only for 'main' branch
push:
branches: [ "**" ]
branches: [master]
# Run CI on pull request for all branches
pull_request:
branches: [ "**" ]
branches: ["**"]

concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true

env:
CARGO_TERM_COLOR: always
RUSTFLAGS: '-D warnings'
RUST_BACKTRACE: 1

jobs:
format:
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- uses: actions/checkout@v3
- name: Install toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
components: rustfmt
- name: Format
run: cargo fmt --all --check

build:
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- uses: actions/checkout@v3
- name: Install toolchain
Expand All @@ -26,6 +47,7 @@ jobs:

build-wasm32:
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- uses: actions/checkout@v3
- name: Install toolchain
Expand All @@ -39,6 +61,7 @@ jobs:

test:
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- uses: actions/checkout@v3
- name: Install toolchain
Expand All @@ -48,3 +71,4 @@ jobs:
toolchain: stable
- name: Run tests
run: cargo test --release

10 changes: 5 additions & 5 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,10 @@ members = [
]

[workspace.dependencies]
ark-std = { version = "0.4", default-features = false }
ark-ff = { version = "0.4", default-features = false }
ark-ec = { version = "0.4", default-features = false }
ark-poly = { version = "0.4", default-features = false }
ark-serialize = { version = "0.4", default-features = false, features = ["derive"] }
ark-std = { version = "0.5", default-features = false }
ark-ff = { version = "0.5", default-features = false }
ark-ec = { version = "0.5", default-features = false }
ark-poly = { version = "0.5", default-features = false }
ark-serialize = { version = "0.5", default-features = false, features = ["derive"] }
fflonk = { git = "https://github.com/w3f/fflonk", default-features = false }
rayon = { version = "1", default-features = false }
4 changes: 2 additions & 2 deletions common/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ edition = "2021"
authors = ["Sergey Vasilyev <swasilyev@gmail.com>"]
license = "MIT/Apache-2.0"
description = "Infrastructure for creating plonk-like proofs"
keywords = ["crypto", "cryptography", "plonk"]
keywords = ["cryptography", "plonk"]

[dependencies]
ark-std.workspace = true
Expand All @@ -18,7 +18,7 @@ rayon = { workspace = true, optional = true }
getrandom_or_panic = { version = "0.0.3", default-features = false }

[dev-dependencies]
ark-ed-on-bls12-381-bandersnatch = { version = "0.4", default-features = false }
ark-ed-on-bls12-381-bandersnatch = { version = "0.5", default-features = false }

[features]
default = ["std"]
Expand Down
6 changes: 2 additions & 4 deletions common/src/domain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,11 +98,9 @@ impl<F: FftField> Domain<F> {
pub(crate) fn divide_by_vanishing_poly(&self, poly: &DensePolynomial<F>) -> DensePolynomial<F> {
let (quotient, remainder) = if self.hiding {
let exclude_zk_rows = poly * self.zk_rows_vanishing_poly.as_ref().unwrap();
exclude_zk_rows
.divide_by_vanishing_poly(self.domains.x1)
.unwrap() //TODO error-handling
exclude_zk_rows.divide_by_vanishing_poly(self.domains.x1)
} else {
poly.divide_by_vanishing_poly(self.domains.x1).unwrap() //TODO error-handling
poly.divide_by_vanishing_poly(self.domains.x1)
};
assert!(remainder.is_zero()); //TODO error-handling
quotient
Expand Down
2 changes: 1 addition & 1 deletion common/src/gadgets/booleanity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ pub struct Booleanity<F: FftField> {
bits: BitColumn<F>,
}

impl<F: FftField> Booleanity<F> {
impl<'a, F: FftField> Booleanity<F> {
pub fn init(bits: BitColumn<F>) -> Self {
Self { bits }
}
Expand Down
1 change: 1 addition & 0 deletions common/src/gadgets/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ use ark_std::vec::Vec;

pub mod booleanity;
pub mod cond_add;
// pub mod inner_prod_pub;
pub mod fixed_cells;
pub mod inner_prod;
pub mod powers_of_two_multiples;
Expand Down
4 changes: 2 additions & 2 deletions common/src/gadgets/powers_of_two_multiples.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use core::marker::PhantomData;

use ark_ec::{AffineRepr, CurveGroup, Group};
use ark_ec::{AdditiveGroup, AffineRepr, CurveGroup};
use ark_ff::{FftField, Field, PrimeField};
use ark_poly::univariate::DensePolynomial;
use ark_poly::{Evaluations, GeneralEvaluationDomain};
Expand Down Expand Up @@ -73,7 +73,7 @@ where

fn evaluate_assignment(&self, z: &F) -> PowersOfTwoMultipleValuesTE<F, Curve> {
PowersOfTwoMultipleValuesTE {
point: (*self.point.x().unwrap(), *self.point.y().unwrap()),
point: (self.point.x().unwrap(), self.point.y().unwrap()),
not_last: self.not_last.evaluate(z),
multiples: self.multiples.evaluate(z),
_curve: PhantomData,
Expand Down
1 change: 1 addition & 0 deletions common/src/gadgets/sw_cond_add.rs
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,7 @@ where
let mut c1 = b * ((x1 - x2) * (x1 - x2) * (x1 + x2 + x3) - (y2 - y1) * (y2 - y1))
+ (F::one() - b) * (y3 - y1);

#[rustfmt::skip]
let mut c2 =
b * ((x1 - x2) * (y3 + y1) - (y2 - y1) * (x3 - x1)) + (F::one() - b) * (x3 - x1);

Expand Down
2 changes: 1 addition & 1 deletion common/src/test_helpers.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use ark_ec::{AffineRepr, CurveGroup, Group};
use ark_ec::{AdditiveGroup, AffineRepr, CurveGroup};
use ark_std::rand::Rng;
use ark_std::vec::Vec;
use ark_std::UniformRand;
Expand Down
5 changes: 5 additions & 0 deletions common/src/transcript.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,11 @@ pub trait PlonkTranscript<F: PrimeField, CS: PCS<F>>: Clone {
self._add_serializable(b"quotient", point);
}

fn add_kzg_proofs(&mut self, in_zeta: &CS::Proof, in_zeta_omega: &CS::Proof) {
self._add_serializable(b"kzg_proof_zeta", in_zeta);
self._add_serializable(b"kzg_proof_zeta_omega", in_zeta_omega);
}

fn get_evaluation_point(&mut self) -> F {
self._128_bit_point(b"evaluation_point")
}
Expand Down
6 changes: 2 additions & 4 deletions common/src/verifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,7 @@ use fflonk::pcs::{Commitment, PcsParams, PCS};

use crate::piop::VerifierPiop;
use crate::transcript::PlonkTranscript;
use crate::ColumnsCommited;
use crate::ColumnsEvaluated;
use crate::Proof;
use crate::{ColumnsCommited, ColumnsEvaluated, Proof};

pub struct PlonkVerifier<F: PrimeField, CS: PCS<F>, T: PlonkTranscript<F, CS>> {
// Polynomial commitment scheme verifier's key.
Expand Down Expand Up @@ -110,8 +108,8 @@ impl<F: PrimeField, CS: PCS<F>, T: PlonkTranscript<F, CS>> PlonkVerifier<F, CS,
let zeta = transcript.get_evaluation_point();
transcript.add_evaluations(&proof.columns_at_zeta, &proof.lin_at_zeta_omega);
let nus = transcript.get_kzg_aggregation_challenges(n_polys);
transcript.add_kzg_proofs(&proof.agg_at_zeta_proof, &proof.lin_at_zeta_omega_proof);
let challenges = Challenges { alphas, zeta, nus };

(challenges, transcript.to_rng())
}
}
Expand Down
10 changes: 5 additions & 5 deletions ring/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ edition = "2021"
authors = ["Sergey Vasilyev <swasilyev@gmail.com>"]
license = "MIT/Apache-2.0"
description = "zk-proof of knowledge of the blinding factor for a Pedersen commitment"
keywords = ["crypto", "cryptography", "zk-proof"]
keywords = ["cryptography", "ring-vrf"]

[dependencies]
ark-std.workspace = true
Expand All @@ -16,12 +16,12 @@ ark-serialize.workspace = true
fflonk.workspace = true
rayon = { workspace = true, optional = true }
common = { path = "../common", default-features = false }
arrayvec = { version = "0.7", default-features = false }
ark-transcript = { version = "0.0.2", default-features = false }
blake2 = { version = "0.10", default-features = false }
ark-transcript = { git = "https://github.com/w3f/ark-transcript", default-features = false }

[dev-dependencies]
ark-bls12-381 = { version = "0.4", default-features = false, features = ["curve"] }
ark-ed-on-bls12-381-bandersnatch = { version = "0.4", default-features = false }
ark-bls12-381 = { version = "0.5", default-features = false, features = ["curve"] }
ark-ed-on-bls12-381-bandersnatch = { version = "0.5", default-features = false }

[features]
default = [ "std" ]
Expand Down
76 changes: 3 additions & 73 deletions ring/src/piop/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,81 +21,11 @@ pub mod params;
mod prover;
mod verifier;

// Workaround while waiting for https://github.com/arkworks-rs/algebra/pull/837
// to be on [crates.io](https://crates.io/crates/ark-serialize) (allegedly ark-serialize 0.4.3 )
mod ark_serialize_837 {
use ark_serialize::{
CanonicalDeserialize, CanonicalSerialize, Compress, Read, SerializationError, Valid,
Validate,
};

#[derive(Clone, CanonicalSerialize)]
#[repr(transparent)]
pub struct ArrayWrap<T: CanonicalSerialize, const N: usize>(pub [T; N]);

impl<T: CanonicalDeserialize + CanonicalSerialize, const N: usize> Valid for ArrayWrap<T, N> {
fn check(&self) -> Result<(), SerializationError> {
self.0.check()
}
}

impl<T: CanonicalDeserialize + CanonicalSerialize, const N: usize> CanonicalDeserialize
for ArrayWrap<T, N>
{
fn deserialize_with_mode<R: Read>(
mut reader: R,
compress: Compress,
validate: Validate,
) -> Result<Self, SerializationError> {
let mut array = arrayvec::ArrayVec::<T, N>::new();
for _ in 0..N {
array.push(T::deserialize_with_mode(
&mut reader,
compress,
Validate::No,
)?);
}
if let ark_serialize::Validate::Yes = validate {
T::batch_check(array.iter())?
}
Ok(ArrayWrap(array.into_inner().ok().unwrap()))
}
}

impl<T: CanonicalDeserialize + CanonicalSerialize, const N: usize> core::ops::Deref
for ArrayWrap<T, N>
{
type Target = [T; N];

fn deref(&self) -> &Self::Target {
&self.0
}
}

// This is expected to panic until https://github.com/arkworks-rs/algebra/pull/837
// doesn't land on crates.io
#[test]
#[should_panic]
fn panics_without_ark_serialize_827() {
let buf = [0u8; 96];
let res = <[ark_bls12_381::G1Affine; 2]>::deserialize_compressed(&buf[..]);
assert!(res.is_err());
}

#[test]
fn workaround_waiting_for_ark_serialize_837() {
let buf = [0u8; 96];
let res = <ArrayWrap<ark_bls12_381::G1Affine, 2>>::deserialize_compressed(&buf[..]);
assert!(res.is_err());
}
}
use ark_serialize_837::*;

#[derive(Clone, CanonicalSerialize, CanonicalDeserialize)]
pub struct RingCommitments<F: PrimeField, C: Commitment<F>> {
pub(crate) bits: C,
pub(crate) inn_prod_acc: C,
pub(crate) cond_add_acc: ArrayWrap<C, 2>,
pub(crate) cond_add_acc: [C; 2],
pub(crate) phantom: PhantomData<F>,
}

Expand All @@ -112,11 +42,11 @@ impl<F: PrimeField, C: Commitment<F>> ColumnsCommited<F, C> for RingCommitments<

#[derive(Clone, CanonicalSerialize, CanonicalDeserialize)]
pub struct RingEvaluations<F: PrimeField> {
pub(crate) points: ArrayWrap<F, 2>,
pub(crate) points: [F; 2],
pub(crate) ring_selector: F,
pub(crate) bits: F,
pub(crate) inn_prod_acc: F,
pub(crate) cond_add_acc: ArrayWrap<F, 2>,
pub(crate) cond_add_acc: [F; 2],
}

impl<F: PrimeField> ColumnsEvaluated<F> for RingEvaluations<F> {
Expand Down
8 changes: 2 additions & 6 deletions ring/src/piop/params.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use ark_ec::{AffineRepr, CurveGroup, Group};
use ark_ec::{AdditiveGroup, AffineRepr, CurveGroup};
use ark_ff::{BigInteger, PrimeField};
use ark_std::{vec, vec::Vec};

Expand Down Expand Up @@ -47,7 +47,7 @@ impl<F: PrimeField, P: AffineRepr<BaseField = F>> PiopParams<F, P> {
pub fn fixed_columns(&self, keys: &[P]) -> FixedColumns<F, P> {
let ring_selector = self.keyset_part_selector();
let ring_selector = self.domain.public_column(ring_selector);
let points = self.points_column(keys);
let points = self.points_column(&keys);
FixedColumns {
points,
ring_selector,
Expand Down Expand Up @@ -87,10 +87,6 @@ impl<F: PrimeField, P: AffineRepr<BaseField = F>> PiopParams<F, P> {
]
.concat()
}

pub fn padding_point(&self) -> P {
self.padding_point
}
}

#[cfg(test)]
Expand Down
11 changes: 5 additions & 6 deletions ring/src/piop/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -103,10 +103,10 @@ where
commit: Fun,
) -> Self::Commitments {
let bits = commit(self.bits.as_poly());
let cond_add_acc = super::ArrayWrap([
let cond_add_acc = [
commit(self.cond_add.get_acc().xs.as_poly()),
commit(self.cond_add.get_acc().ys.as_poly()),
]);
];
let inn_prod_acc = commit(self.inner_prod.acc.as_poly());
Self::Commitments {
bits,
Expand All @@ -131,15 +131,14 @@ where
}

fn columns_evaluated(&self, zeta: &F) -> Self::Evaluations {
let points =
super::ArrayWrap([self.points.xs.evaluate(zeta), self.points.ys.evaluate(zeta)]);
let points = [self.points.xs.evaluate(zeta), self.points.ys.evaluate(zeta)];
let ring_selector = self.ring_selector.evaluate(zeta);
let bits = self.bits.evaluate(zeta);
let inn_prod_acc = self.inner_prod.acc.evaluate(zeta);
let cond_add_acc = super::ArrayWrap([
let cond_add_acc = [
self.cond_add.get_acc().xs.evaluate(zeta),
self.cond_add.get_acc().ys.evaluate(zeta),
]);
];
Self::Evaluations {
points,
ring_selector,
Expand Down
Loading

0 comments on commit 86c019a

Please sign in to comment.