diff --git a/src/commitments.rs b/src/commitments.rs index bac7ea73..8d21af24 100644 --- a/src/commitments.rs +++ b/src/commitments.rs @@ -2,8 +2,8 @@ use super::group::{GroupElement, GroupElementAffine, VartimeMultiscalarMul, GROU use super::scalar::Scalar; use crate::group::CompressGroupElement; use crate::parameters::*; -use ark_ec::{AffineRepr, CurveGroup}; -use ark_ff::PrimeField; +use ark_ec::{AffineRepr}; + use std::ops::Mul; use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; diff --git a/src/constraints.rs b/src/constraints.rs index 89162d4b..aefc60c7 100644 --- a/src/constraints.rs +++ b/src/constraints.rs @@ -23,7 +23,7 @@ use ark_crypto_primitives::sponge::{ constraints::CryptographicSpongeVar, poseidon::{constraints::PoseidonSpongeVar, PoseidonConfig}, }; -use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof, VerifierKey}; +use ark_poly_commit::multilinear_pc::data_structures::{Commitment}; use ark_r1cs_std::{ alloc::{AllocVar, AllocationMode}, fields::fp::FpVar, diff --git a/src/dense_mlpoly.rs b/src/dense_mlpoly.rs index 21b364b9..d8e2e045 100644 --- a/src/dense_mlpoly.rs +++ b/src/dense_mlpoly.rs @@ -2,7 +2,6 @@ use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; - use super::commitments::{Commitments, MultiCommitGens}; use super::errors::ProofVerifyError; use super::group::{ @@ -13,19 +12,13 @@ use super::math::Math; use super::nizk::{DotProductProofGens, DotProductProofLog}; use super::random::RandomTape; use super::scalar::Scalar; -use super::transcript::{AppendToTranscript, ProofTranscript}; -use ark_bls12_377::{Bls12_377 as I, G1Affine}; -use ark_ec::scalar_mul::variable_base::VariableBaseMSM; -use ark_ec::{pairing::Pairing, CurveGroup}; -use ark_ff::{One, PrimeField, UniformRand, Zero}; -use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; -use ark_poly_commit::multilinear_pc::data_structures::{ - Commitment, CommitterKey, Proof, UniversalParams, VerifierKey, -}; +use ark_bls12_377::Bls12_377 as I; +use ark_ff::{One, UniformRand, Zero}; +use ark_poly::MultilinearExtension; +use ark_poly_commit::multilinear_pc::data_structures::{CommitterKey, VerifierKey}; use ark_poly_commit::multilinear_pc::MultilinearPC; use ark_serialize::*; use core::ops::Index; -use merlin::Transcript; use std::ops::{Add, AddAssign, Neg, Sub, SubAssign}; #[cfg(feature = "multicore")] @@ -465,16 +458,6 @@ impl Index for DensePolynomial { } } -impl AppendToTranscript for PolyCommitment { - fn append_to_transcript(&self, label: &'static [u8], transcript: &mut Transcript) { - transcript.append_message(label, b"poly_commitment_begin"); - for i in 0..self.C.len() { - transcript.append_point(b"poly_commitment_share", &self.C[i]); - } - transcript.append_message(label, b"poly_commitment_end"); - } -} - impl AppendToPoseidon for PolyCommitment { fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { for i in 0..self.C.len() { @@ -593,7 +576,6 @@ impl PolyEvalProof { #[cfg(test)] mod tests { - use crate::parameters::poseidon_params; diff --git a/src/group.rs b/src/group.rs index f8e13bf7..4569c66e 100644 --- a/src/group.rs +++ b/src/group.rs @@ -1,7 +1,7 @@ use crate::errors::ProofVerifyError; use ark_ec::scalar_mul::variable_base::VariableBaseMSM; use ark_ec::Group; -use ark_ff::PrimeField; + use lazy_static::lazy_static; use super::scalar::Scalar; diff --git a/src/lib.rs b/src/lib.rs index ffdd1981..3982ceca 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -36,7 +36,6 @@ mod sparse_mlpoly; mod sqrt_pst; mod sumcheck; mod timer; -mod transcript; mod unipoly; pub mod parameters; diff --git a/src/mipp.rs b/src/mipp.rs index f9a9878b..06fe9e29 100644 --- a/src/mipp.rs +++ b/src/mipp.rs @@ -1,15 +1,15 @@ -use super::macros::*; + use ark_ec::scalar_mul::variable_base::VariableBaseMSM; use ark_ec::CurveGroup; use ark_ec::{pairing::Pairing, AffineRepr}; -use ark_ff::{BigInt, Field, PrimeField}; +use ark_ff::{Field, PrimeField}; use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; use ark_poly_commit::multilinear_pc::data_structures::{ - CommitmentG2, CommitterKey, Proof, ProofG1, VerifierKey, + CommitmentG2, CommitterKey, ProofG1, VerifierKey, }; use ark_poly_commit::multilinear_pc::MultilinearPC; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, SerializationError, Write}; -use ark_std::cfg_iter; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError, Write}; + use ark_std::One; use ark_std::Zero; use rayon::iter::ParallelIterator; @@ -35,7 +35,7 @@ impl MippProof { y: Vec, h: Vec, U: &E::G1Affine, - T: &::TargetField, + _T: &::TargetField, ) -> Result, Error> { // the values of vectors A and y rescaled at each step of the loop let (mut m_a, mut m_y) = (a.clone(), y.clone()); @@ -266,7 +266,7 @@ impl MippProof { let mut point: Vec = Vec::new(); let m = xs_inv.len(); - for i in 0..m { + for _i in 0..m { let r = transcript.challenge_scalar::(b"random_point"); point.push(r); } diff --git a/src/nizk/mod.rs b/src/nizk/mod.rs index 26667b7a..4c17ac33 100644 --- a/src/nizk/mod.rs +++ b/src/nizk/mod.rs @@ -1,423 +1,20 @@ #![allow(clippy::too_many_arguments)] use crate::math::Math; -use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; +use crate::poseidon_transcript::PoseidonTranscript; use super::commitments::{Commitments, MultiCommitGens}; use super::errors::ProofVerifyError; -use super::group::{ - CompressGroupElement, CompressedGroup, DecompressGroupElement, GroupElement, UnpackGroupElement, -}; +use super::group::{CompressGroupElement, CompressedGroup, UnpackGroupElement}; use super::random::RandomTape; use super::scalar::Scalar; -use ark_ec::CurveGroup; -use ark_ff::PrimeField; + + use ark_serialize::*; use std::ops::Mul; mod bullet; use bullet::BulletReductionProof; -#[derive(CanonicalSerialize, CanonicalDeserialize, Debug)] -pub struct KnowledgeProof { - alpha: CompressedGroup, - z1: Scalar, - z2: Scalar, -} - -impl KnowledgeProof { - fn protocol_name() -> &'static [u8] { - b"knowledge proof" - } - - pub fn prove( - gens_n: &MultiCommitGens, - transcript: &mut PoseidonTranscript, - random_tape: &mut RandomTape, - x: &Scalar, - r: &Scalar, - ) -> (KnowledgeProof, CompressedGroup) { - // transcript.append_protocol_name(KnowledgeProof::protocol_name()); - - // produce two random Scalars - let t1 = random_tape.random_scalar(b"t1"); - let t2 = random_tape.random_scalar(b"t2"); - - let C = x.commit(r, gens_n).compress(); - C.append_to_poseidon(transcript); - - let alpha = t1.commit(&t2, gens_n).compress(); - alpha.append_to_poseidon(transcript); - - let c = transcript.challenge_scalar(); - - let z1 = c * x + t1; - let z2 = c * r + t2; - - (KnowledgeProof { alpha, z1, z2 }, C) - } - - pub fn verify( - &self, - gens_n: &MultiCommitGens, - transcript: &mut PoseidonTranscript, - C: &CompressedGroup, - ) -> Result<(), ProofVerifyError> { - // transcript.append_protocol_name(KnowledgeProof::protocol_name()); - C.append_to_poseidon(transcript); - self.alpha.append_to_poseidon(transcript); - - let c = transcript.challenge_scalar(); - - let lhs = self.z1.commit(&self.z2, gens_n).compress(); - let rhs = (C.unpack()?.mul(c) + self.alpha.unpack()?).compress(); - - if lhs == rhs { - Ok(()) - } else { - Err(ProofVerifyError::InternalError) - } - } -} - -#[derive(CanonicalSerialize, CanonicalDeserialize, Debug)] -pub struct EqualityProof { - alpha: CompressedGroup, - z: Scalar, -} - -impl EqualityProof { - fn protocol_name() -> &'static [u8] { - b"equality proof" - } - - pub fn prove( - gens_n: &MultiCommitGens, - transcript: &mut PoseidonTranscript, - random_tape: &mut RandomTape, - v1: &Scalar, - s1: &Scalar, - v2: &Scalar, - s2: &Scalar, - ) -> (EqualityProof, CompressedGroup, CompressedGroup) { - // transcript.append_protocol_name(EqualityProof::protocol_name()); - - // produce a random Scalar - let r = random_tape.random_scalar(b"r"); - - let C1 = v1.commit(s1, gens_n).compress(); - transcript.append_point(&C1); - - let C2 = v2.commit(s2, gens_n).compress(); - transcript.append_point(&C2); - - let alpha = gens_n.h.mul(r).compress(); - transcript.append_point(&alpha); - - let c = transcript.challenge_scalar(); - - let z = c * ((*s1) - s2) + r; - - (EqualityProof { alpha, z }, C1, C2) - } - - pub fn verify( - &self, - gens_n: &MultiCommitGens, - transcript: &mut PoseidonTranscript, - C1: &CompressedGroup, - C2: &CompressedGroup, - ) -> Result<(), ProofVerifyError> { - // transcript.append_protocol_name(EqualityProof::protocol_name()); - - transcript.append_point(C1); - transcript.append_point(C2); - transcript.append_point(&self.alpha); - - let c = transcript.challenge_scalar(); - let rhs = { - let C = C1.unpack()? - C2.unpack()?; - (C.mul(c) + self.alpha.unpack()?).compress() - }; - println!("rhs {:?}", rhs); - - let lhs = gens_n.h.mul(self.z).compress(); - println!("lhs {:?}", lhs); - if lhs == rhs { - Ok(()) - } else { - Err(ProofVerifyError::InternalError) - } - } -} - -#[derive(CanonicalSerialize, CanonicalDeserialize, Debug)] -pub struct ProductProof { - alpha: CompressedGroup, - beta: CompressedGroup, - delta: CompressedGroup, - z: Vec, -} - -impl ProductProof { - fn protocol_name() -> &'static [u8] { - b"product proof" - } - - pub fn prove( - gens_n: &MultiCommitGens, - transcript: &mut PoseidonTranscript, - random_tape: &mut RandomTape, - x: &Scalar, - rX: &Scalar, - y: &Scalar, - rY: &Scalar, - z: &Scalar, - rZ: &Scalar, - ) -> ( - ProductProof, - CompressedGroup, - CompressedGroup, - CompressedGroup, - ) { - // transcript.append_protocol_name(ProductProof::protocol_name()); - - // produce five random Scalar - let b1 = random_tape.random_scalar(b"b1"); - let b2 = random_tape.random_scalar(b"b2"); - let b3 = random_tape.random_scalar(b"b3"); - let b4 = random_tape.random_scalar(b"b4"); - let b5 = random_tape.random_scalar(b"b5"); - - let X_unc = x.commit(rX, gens_n); - - let X = X_unc.compress(); - transcript.append_point(&X); - let X_new = GroupElement::decompress(&X); - - assert_eq!(X_unc, X_new.unwrap()); - - let Y = y.commit(rY, gens_n).compress(); - transcript.append_point(&Y); - - let Z = z.commit(rZ, gens_n).compress(); - transcript.append_point(&Z); - - let alpha = b1.commit(&b2, gens_n).compress(); - transcript.append_point(&alpha); - - let beta = b3.commit(&b4, gens_n).compress(); - transcript.append_point(&beta); - - let delta = { - let gens_X = &MultiCommitGens { - n: 1, - G: vec![GroupElement::decompress(&X).unwrap()], - h: gens_n.h, - }; - b3.commit(&b5, gens_X).compress() - }; - transcript.append_point(&delta); - - let c = transcript.challenge_scalar(); - - let z1 = b1 + c * x; - let z2 = b2 + c * rX; - let z3 = b3 + c * y; - let z4 = b4 + c * rY; - let z5 = b5 + c * ((*rZ) - (*rX) * y); - let z = [z1, z2, z3, z4, z5].to_vec(); - - ( - ProductProof { - alpha, - beta, - delta, - z, - }, - X, - Y, - Z, - ) - } - - fn check_equality( - P: &CompressedGroup, - X: &CompressedGroup, - c: &Scalar, - gens_n: &MultiCommitGens, - z1: &Scalar, - z2: &Scalar, - ) -> bool { - println!("{:?}", X); - let lhs = (GroupElement::decompress(P).unwrap() + GroupElement::decompress(X).unwrap().mul(c)) - .compress(); - let rhs = z1.commit(z2, gens_n).compress(); - - lhs == rhs - } - - pub fn verify( - &self, - gens_n: &MultiCommitGens, - transcript: &mut PoseidonTranscript, - X: &CompressedGroup, - Y: &CompressedGroup, - Z: &CompressedGroup, - ) -> Result<(), ProofVerifyError> { - // transcript.append_protocol_name(ProductProof::protocol_name()); - - X.append_to_poseidon(transcript); - Y.append_to_poseidon(transcript); - Z.append_to_poseidon(transcript); - self.alpha.append_to_poseidon(transcript); - self.beta.append_to_poseidon(transcript); - self.delta.append_to_poseidon(transcript); - - let z1 = self.z[0]; - let z2 = self.z[1]; - let z3 = self.z[2]; - let z4 = self.z[3]; - let z5 = self.z[4]; - - let c = transcript.challenge_scalar(); - - if ProductProof::check_equality(&self.alpha, X, &c, gens_n, &z1, &z2) - && ProductProof::check_equality(&self.beta, Y, &c, gens_n, &z3, &z4) - && ProductProof::check_equality( - &self.delta, - Z, - &c, - &MultiCommitGens { - n: 1, - G: vec![X.unpack()?], - h: gens_n.h, - }, - &z3, - &z5, - ) - { - Ok(()) - } else { - Err(ProofVerifyError::InternalError) - } - } -} - -#[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] -pub struct DotProductProof { - delta: CompressedGroup, - beta: CompressedGroup, - z: Vec, - z_delta: Scalar, - z_beta: Scalar, -} - -impl DotProductProof { - fn protocol_name() -> &'static [u8] { - b"dot product proof" - } - - pub fn compute_dotproduct(a: &[Scalar], b: &[Scalar]) -> Scalar { - assert_eq!(a.len(), b.len()); - (0..a.len()).map(|i| a[i] * b[i]).sum() - } - - pub fn prove( - gens_1: &MultiCommitGens, - gens_n: &MultiCommitGens, - transcript: &mut PoseidonTranscript, - random_tape: &mut RandomTape, - x_vec: &[Scalar], - blind_x: &Scalar, - a_vec: &[Scalar], - y: &Scalar, - blind_y: &Scalar, - ) -> (DotProductProof, CompressedGroup, CompressedGroup) { - // transcript.append_protocol_name(DotProductProof::protocol_name()); - - let n = x_vec.len(); - assert_eq!(x_vec.len(), a_vec.len()); - assert_eq!(gens_n.n, a_vec.len()); - assert_eq!(gens_1.n, 1); - - // produce randomness for the proofs - let d_vec = random_tape.random_vector(b"d_vec", n); - let r_delta = random_tape.random_scalar(b"r_delta"); - let r_beta = random_tape.random_scalar(b"r_beta"); - - let Cx = x_vec.commit(blind_x, gens_n).compress(); - Cx.append_to_poseidon(transcript); - - let Cy = y.commit(blind_y, gens_1).compress(); - Cy.append_to_poseidon(transcript); - - transcript.append_scalar_vector(a_vec); - - let delta = d_vec.commit(&r_delta, gens_n).compress(); - delta.append_to_poseidon(transcript); - - let dotproduct_a_d = DotProductProof::compute_dotproduct(a_vec, &d_vec); - - let beta = dotproduct_a_d.commit(&r_beta, gens_1).compress(); - beta.append_to_poseidon(transcript); - - let c = transcript.challenge_scalar(); - - let z = (0..d_vec.len()) - .map(|i| c * x_vec[i] + d_vec[i]) - .collect::>(); - - let z_delta = c * blind_x + r_delta; - let z_beta = c * blind_y + r_beta; - - ( - DotProductProof { - delta, - beta, - z, - z_delta, - z_beta, - }, - Cx, - Cy, - ) - } - - pub fn verify( - &self, - gens_1: &MultiCommitGens, - gens_n: &MultiCommitGens, - transcript: &mut PoseidonTranscript, - a: &[Scalar], - Cx: &CompressedGroup, - Cy: &CompressedGroup, - ) -> Result<(), ProofVerifyError> { - assert_eq!(gens_n.n, a.len()); - assert_eq!(gens_1.n, 1); - - // transcript.append_protocol_name(DotProductProof::protocol_name()); - Cx.append_to_poseidon(transcript); - Cy.append_to_poseidon(transcript); - transcript.append_scalar_vector(a); - self.delta.append_to_poseidon(transcript); - self.beta.append_to_poseidon(transcript); - - let c = transcript.challenge_scalar(); - - let mut result = - Cx.unpack()?.mul(c) + self.delta.unpack()? == self.z.commit(&self.z_delta, gens_n); - - let dotproduct_z_a = DotProductProof::compute_dotproduct(&self.z, a); - result &= - Cy.unpack()?.mul(c) + self.beta.unpack()? == dotproduct_z_a.commit(&self.z_beta, gens_1); - if result { - Ok(()) - } else { - Err(ProofVerifyError::InternalError) - } - } -} - #[derive(Clone)] pub struct DotProductProofGens { n: usize, @@ -574,9 +171,8 @@ impl DotProductProofLog { let z2_s = &self.z2; let lhs = ((Gamma_hat.mul(c_s) + beta_s).mul(a_hat_s) + delta_s).compress(); - let rhs = ((g_hat + gens.gens_1.G[0].mul(a_hat_s)).mul(z1_s) - + gens.gens_1.h.mul(z2_s)) - .compress(); + let rhs = + ((g_hat + gens.gens_1.G[0].mul(a_hat_s)).mul(z1_s) + gens.gens_1.h.mul(z2_s)).compress(); assert_eq!(lhs, rhs); @@ -595,133 +191,6 @@ mod tests { use super::*; use ark_std::UniformRand; - #[test] - fn check_knowledgeproof() { - let mut rng = ark_std::rand::thread_rng(); - - let gens_1 = MultiCommitGens::new(1, b"test-knowledgeproof"); - - let x = Scalar::rand(&mut rng); - let r = Scalar::rand(&mut rng); - - let params = poseidon_params(); - - let mut random_tape = RandomTape::new(b"proof"); - let mut prover_transcript = PoseidonTranscript::new(¶ms); - let (proof, committed_value) = - KnowledgeProof::prove(&gens_1, &mut prover_transcript, &mut random_tape, &x, &r); - - let mut verifier_transcript = PoseidonTranscript::new(¶ms); - assert!(proof - .verify(&gens_1, &mut verifier_transcript, &committed_value) - .is_ok()); - } - - #[test] - fn check_equalityproof() { - let mut rng = ark_std::rand::thread_rng(); - let params = poseidon_params(); - - let gens_1 = MultiCommitGens::new(1, b"test-equalityproof"); - let v1 = Scalar::rand(&mut rng); - let v2 = v1; - let s1 = Scalar::rand(&mut rng); - let s2 = Scalar::rand(&mut rng); - - let mut random_tape = RandomTape::new(b"proof"); - let mut prover_transcript = PoseidonTranscript::new(¶ms); - let (proof, C1, C2) = EqualityProof::prove( - &gens_1, - &mut prover_transcript, - &mut random_tape, - &v1, - &s1, - &v2, - &s2, - ); - - let mut verifier_transcript = PoseidonTranscript::new(¶ms); - assert!(proof - .verify(&gens_1, &mut verifier_transcript, &C1, &C2) - .is_ok()); - } - - #[test] - fn check_productproof() { - let mut rng = ark_std::rand::thread_rng(); - let pt = GroupElement::rand(&mut rng); - let pt_c = pt.compress(); - let pt2 = GroupElement::decompress(&pt_c).unwrap(); - assert_eq!(pt, pt2); - let params = poseidon_params(); - - let gens_1 = MultiCommitGens::new(1, b"test-productproof"); - let x = Scalar::rand(&mut rng); - let rX = Scalar::rand(&mut rng); - let y = Scalar::rand(&mut rng); - let rY = Scalar::rand(&mut rng); - let z = x * y; - let rZ = Scalar::rand(&mut rng); - - let mut random_tape = RandomTape::new(b"proof"); - let mut prover_transcript = PoseidonTranscript::new(¶ms); - let (proof, X, Y, Z) = ProductProof::prove( - &gens_1, - &mut prover_transcript, - &mut random_tape, - &x, - &rX, - &y, - &rY, - &z, - &rZ, - ); - - let mut verifier_transcript = PoseidonTranscript::new(¶ms); - assert!(proof - .verify(&gens_1, &mut verifier_transcript, &X, &Y, &Z) - .is_ok()); - } - - #[test] - fn check_dotproductproof() { - let mut rng = ark_std::rand::thread_rng(); - - let n = 1024; - - let gens_1 = MultiCommitGens::new(1, b"test-two"); - let gens_1024 = MultiCommitGens::new(n, b"test-1024"); - let params = poseidon_params(); - - let mut x: Vec = Vec::new(); - let mut a: Vec = Vec::new(); - for _ in 0..n { - x.push(Scalar::rand(&mut rng)); - a.push(Scalar::rand(&mut rng)); - } - let y = DotProductProofLog::compute_dotproduct(&x, &a); - let r_x = Scalar::rand(&mut rng); - let r_y = Scalar::rand(&mut rng); - - let mut random_tape = RandomTape::new(b"proof"); - let mut prover_transcript = PoseidonTranscript::new(¶ms); - let (proof, Cx, Cy) = DotProductProof::prove( - &gens_1, - &gens_1024, - &mut prover_transcript, - &mut random_tape, - &x, - &r_x, - &a, - &y, - &r_y, - ); - - let mut verifier_transcript = PoseidonTranscript::new(¶ms); - assert!(proof - .verify(&gens_1, &gens_1024, &mut verifier_transcript, &a, &Cx, &Cy) - .is_ok()); - } #[test] fn check_dotproductproof_log() { @@ -733,7 +202,7 @@ mod tests { let x: Vec = (0..n).map(|_i| Scalar::rand(&mut rng)).collect(); let a: Vec = (0..n).map(|_i| Scalar::rand(&mut rng)).collect(); - let y = DotProductProof::compute_dotproduct(&x, &a); + let y = DotProductProofLog::compute_dotproduct(&x, &a); let r_x = Scalar::rand(&mut rng); let r_y = Scalar::rand(&mut rng); diff --git a/src/poseidon_transcript.rs b/src/poseidon_transcript.rs index 79c10333..bcfefc88 100644 --- a/src/poseidon_transcript.rs +++ b/src/poseidon_transcript.rs @@ -7,7 +7,7 @@ use ark_crypto_primitives::sponge::{ CryptographicSponge, }; use ark_ec::pairing::Pairing; -use ark_ff::{Field, PrimeField}; +use ark_ff::{PrimeField}; use ark_poly_commit::multilinear_pc::data_structures::Commitment; use ark_serialize::CanonicalSerialize; use ark_serialize::Compress; diff --git a/src/r1csinstance.rs b/src/r1csinstance.rs index e3eb94e1..0cd8b747 100644 --- a/src/r1csinstance.rs +++ b/src/r1csinstance.rs @@ -1,5 +1,4 @@ use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; -use crate::transcript::AppendToTranscript; use super::dense_mlpoly::DensePolynomial; use super::errors::ProofVerifyError; @@ -15,8 +14,6 @@ use ark_ff::Field; use ark_serialize::*; use ark_std::{One, UniformRand, Zero}; use digest::{ExtendableOutput, Input}; - -use merlin::Transcript; use sha3::Shake256; #[derive(Debug, CanonicalSerialize, CanonicalDeserialize, Clone)] @@ -58,15 +55,6 @@ pub struct R1CSCommitment { comm: SparseMatPolyCommitment, } -impl AppendToTranscript for R1CSCommitment { - fn append_to_transcript(&self, _label: &'static [u8], transcript: &mut Transcript) { - transcript.append_u64(b"num_cons", self.num_cons as u64); - transcript.append_u64(b"num_vars", self.num_vars as u64); - transcript.append_u64(b"num_inputs", self.num_inputs as u64); - self.comm.append_to_transcript(b"comm", transcript); - } -} - impl AppendToPoseidon for R1CSCommitment { fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { transcript.append_u64(self.num_cons as u64); diff --git a/src/r1csproof.rs b/src/r1csproof.rs index 7789b492..ccf87d3e 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -1,5 +1,4 @@ #![allow(clippy::too_many_arguments)] -use super::commitments::MultiCommitGens; use super::dense_mlpoly::{DensePolynomial, EqPolynomial, PolyCommitmentGens}; use super::errors::ProofVerifyError; use crate::constraints::{VerifierCircuit, VerifierConfig}; @@ -13,9 +12,9 @@ use crate::sumcheck::SumcheckInstanceProof; use ark_bls12_377::Bls12_377 as I; use ark_bw6_761::BW6_761 as P; use ark_ec::pairing::Pairing; -use ark_poly::MultilinearExtension; + use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof}; -use ark_poly_commit::multilinear_pc::MultilinearPC; + use super::r1csinstance::R1CSInstance; @@ -47,31 +46,9 @@ pub struct R1CSProof { pub t: ::TargetField, pub mipp_proof: MippProof, } -#[derive(Clone)] -pub struct R1CSSumcheckGens { - gens_1: MultiCommitGens, - gens_3: MultiCommitGens, - gens_4: MultiCommitGens, -} - -// TODO: fix passing gens_1_ref -impl R1CSSumcheckGens { - pub fn new(label: &'static [u8], gens_1_ref: &MultiCommitGens) -> Self { - let gens_1 = gens_1_ref.clone(); - let gens_3 = MultiCommitGens::new(3, label); - let gens_4 = MultiCommitGens::new(4, label); - - R1CSSumcheckGens { - gens_1, - gens_3, - gens_4, - } - } -} #[derive(Clone)] pub struct R1CSGens { - gens_sc: R1CSSumcheckGens, gens_pc: PolyCommitmentGens, } @@ -79,8 +56,7 @@ impl R1CSGens { pub fn new(label: &'static [u8], _num_cons: usize, num_vars: usize) -> Self { let num_poly_vars = num_vars.log_2(); let gens_pc = PolyCommitmentGens::new(num_poly_vars, label); - let gens_sc = R1CSSumcheckGens::new(label, &gens_pc.gens.gens_1); - R1CSGens { gens_sc, gens_pc } + R1CSGens { gens_pc } } } @@ -422,7 +398,7 @@ impl R1CSProof { transcript_sat_state: self.transcript_sat_state, }; - let mut rng = ark_std::test_rng(); + let _rng = ark_std::test_rng(); let circuit = VerifierCircuit::new(&config, &mut rand::thread_rng()).unwrap(); let nc_inner = verify_constraints_inner(circuit.clone(), &num_cons); diff --git a/src/random.rs b/src/random.rs index 56793f78..f54c6bbd 100644 --- a/src/random.rs +++ b/src/random.rs @@ -1,28 +1,27 @@ use super::scalar::Scalar; -use super::transcript::ProofTranscript; +use crate::{parameters::poseidon_params, poseidon_transcript::PoseidonTranscript}; use ark_std::UniformRand; -use merlin::Transcript; pub struct RandomTape { - tape: Transcript, + tape: PoseidonTranscript, } impl RandomTape { - pub fn new(name: &'static [u8]) -> Self { + pub fn new(_name: &'static [u8]) -> Self { let tape = { let mut rng = ark_std::rand::thread_rng(); - let mut tape = Transcript::new(name); - tape.append_scalar(b"init_randomness", &Scalar::rand(&mut rng)); + let mut tape = PoseidonTranscript::new(&poseidon_params()); + tape.append_scalar(&Scalar::rand(&mut rng)); tape }; Self { tape } } - pub fn random_scalar(&mut self, label: &'static [u8]) -> Scalar { - self.tape.challenge_scalar(label) + pub fn random_scalar(&mut self, _label: &'static [u8]) -> Scalar { + self.tape.challenge_scalar() } - pub fn random_vector(&mut self, label: &'static [u8], len: usize) -> Vec { - self.tape.challenge_vector(label, len) + pub fn random_vector(&mut self, _label: &'static [u8], len: usize) -> Vec { + self.tape.challenge_vector(len) } } diff --git a/src/sparse_mlpoly.rs b/src/sparse_mlpoly.rs index d0663a93..c2764eb2 100644 --- a/src/sparse_mlpoly.rs +++ b/src/sparse_mlpoly.rs @@ -13,11 +13,9 @@ use super::product_tree::{DotProductCircuit, ProductCircuit, ProductCircuitEvalP use super::random::RandomTape; use super::scalar::Scalar; use super::timer::Timer; -use super::transcript::AppendToTranscript; use ark_ff::{Field, One, Zero}; use ark_serialize::*; use core::cmp::Ordering; -use merlin::Transcript; #[derive(Debug, CanonicalSerialize, CanonicalDeserialize, Clone)] pub struct SparseMatEntry { @@ -209,14 +207,6 @@ impl DerefsEvalProof { } } -impl AppendToTranscript for DerefsCommitment { - fn append_to_transcript(&self, label: &'static [u8], transcript: &mut Transcript) { - transcript.append_message(b"derefs_commitment", b"begin_derefs_commitment"); - self.comm_ops_val.append_to_transcript(label, transcript); - transcript.append_message(b"derefs_commitment", b"end_derefs_commitment"); - } -} - impl AppendToPoseidon for DerefsCommitment { fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { self.comm_ops_val.append_to_poseidon(transcript); @@ -336,20 +326,6 @@ pub struct SparseMatPolyCommitment { comm_comb_mem: PolyCommitment, } -impl AppendToTranscript for SparseMatPolyCommitment { - fn append_to_transcript(&self, _label: &'static [u8], transcript: &mut Transcript) { - transcript.append_u64(b"batch_size", self.batch_size as u64); - transcript.append_u64(b"num_ops", self.num_ops as u64); - transcript.append_u64(b"num_mem_cells", self.num_mem_cells as u64); - self - .comm_comb_ops - .append_to_transcript(b"comm_comb_ops", transcript); - self - .comm_comb_mem - .append_to_transcript(b"comm_comb_mem", transcript); - } -} - impl AppendToPoseidon for SparseMatPolyCommitment { fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { transcript.append_u64(self.batch_size as u64); diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index 04d21ba1..b423a2dc 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -1,7 +1,7 @@ use crate::mipp::MippProof; -use ark_bls12_377::{Bls12_377 as I, G1Affine, G1Projective as G1, G2Affine, G2Projective as G2}; +use ark_bls12_377::{Bls12_377 as I, G1Projective as G1}; use ark_ec::{pairing::Pairing, scalar_mul::variable_base::VariableBaseMSM, CurveGroup}; -use ark_ff::{BigInteger256, One, PrimeField}; +use ark_ff::{One}; use ark_poly_commit::multilinear_pc::{ data_structures::{Commitment, CommitterKey, Proof, VerifierKey}, MultilinearPC, diff --git a/src/sumcheck.rs b/src/sumcheck.rs index b34f0375..617bfaac 100644 --- a/src/sumcheck.rs +++ b/src/sumcheck.rs @@ -60,125 +60,6 @@ impl SumcheckInstanceProof { } } -// #[derive(CanonicalSerialize, CanonicalDeserialize, Debug)] -// pub struct ZKSumcheckInstanceProof { -// comm_polys: Vec, -// comm_evals: Vec, -// proofs: Vec, -// } - -// impl ZKSumcheckInstanceProof { -// pub fn new( -// comm_polys: Vec, -// comm_evals: Vec, -// proofs: Vec, -// ) -> Self { -// ZKSumcheckInstanceProof { -// comm_polys, -// comm_evals, -// proofs, -// } -// } - -// pub fn verify( -// &self, -// comm_claim: &CompressedGroup, -// num_rounds: usize, -// degree_bound: usize, -// gens_1: &MultiCommitGens, -// gens_n: &MultiCommitGens, -// transcript: &mut Transcript, -// ) -> Result<(CompressedGroup, Vec), ProofVerifyError> { -// // verify degree bound -// assert_eq!(gens_n.n, degree_bound + 1); - -// // verify that there is a univariate polynomial for each round -// assert_eq!(self.comm_polys.len(), num_rounds); -// assert_eq!(self.comm_evals.len(), num_rounds); - -// let mut r: Vec = Vec::new(); -// for i in 0..self.comm_polys.len() { -// let comm_poly = &self.comm_polys[i]; - -// // append the prover's polynomial to the transcript -// comm_poly.append_to_transcript(b"comm_poly", transcript); - -// //derive the verifier's challenge for the next round -// let r_i = transcript.challenge_scalar(b"challenge_nextround"); - -// // verify the proof of sum-check and evals -// let res = { -// let comm_claim_per_round = if i == 0 { -// comm_claim -// } else { -// &self.comm_evals[i - 1] -// }; -// let mut comm_eval = &self.comm_evals[i]; - -// // add two claims to transcript -// comm_claim_per_round.append_to_transcript(transcript); -// comm_eval.append_to_transcript(transcript); - -// // produce two weights -// let w = transcript.challenge_vector(2); - -// // compute a weighted sum of the RHS -// let comm_target = GroupElement::vartime_multiscalar_mul( -// w.as_slice(), -// iter::once(&comm_claim_per_round) -// .chain(iter::once(&comm_eval)) -// .map(|pt| GroupElement::decompress(pt).unwrap()) -// .collect::>() -// .as_slice(), -// ) -// .compress(); - -// let a = { -// // the vector to use to decommit for sum-check test -// let a_sc = { -// let mut a = vec![Scalar::one(); degree_bound + 1]; -// a[0] += Scalar::one(); -// a -// }; - -// // the vector to use to decommit for evaluation -// let a_eval = { -// let mut a = vec![Scalar::one(); degree_bound + 1]; -// for j in 1..a.len() { -// a[j] = a[j - 1] * r_i; -// } -// a -// }; - -// // take weighted sum of the two vectors using w -// assert_eq!(a_sc.len(), a_eval.len()); -// (0..a_sc.len()) -// .map(|i| w[0] * a_sc[i] + w[1] * a_eval[i]) -// .collect::>() -// }; - -// self.proofs[i] -// .verify( -// gens_1, -// gens_n, -// transcript, -// &a, -// &self.comm_polys[i], -// &comm_target, -// ) -// .is_ok() -// }; -// if !res { -// return Err(ProofVerifyError::InternalError); -// } - -// r.push(r_i); -// } - -// Ok((self.comm_evals[&self.comm_evals.len() - 1].clone(), r)) -// } -// } - impl SumcheckInstanceProof { pub fn prove_cubic_with_additive_term( claim: &Scalar, @@ -554,360 +435,3 @@ impl SumcheckInstanceProof { ) } } - -// impl ZKSumcheckInstanceProof { -// pub fn prove_quad( -// claim: &Scalar, -// blind_claim: &Scalar, -// num_rounds: usize, -// poly_A: &mut DensePolynomial, -// poly_B: &mut DensePolynomial, -// comb_func: F, -// gens_1: &MultiCommitGens, -// gens_n: &MultiCommitGens, -// transcript: &mut Transcript, -// random_tape: &mut RandomTape, -// ) -> (Self, Vec, Vec, Scalar) -// where -// F: Fn(&Scalar, &Scalar) -> Scalar, -// { -// let (blinds_poly, blinds_evals) = ( -// random_tape.random_vector(b"blinds_poly", num_rounds), -// random_tape.random_vector(b"blinds_evals", num_rounds), -// ); -// let mut claim_per_round = *claim; -// let mut comm_claim_per_round = claim_per_round.commit(blind_claim, gens_1).compress(); - -// let mut r: Vec = Vec::new(); -// let mut comm_polys: Vec = Vec::new(); -// let mut comm_evals: Vec = Vec::new(); -// let mut proofs: Vec = Vec::new(); - -// for j in 0..num_rounds { -// let (poly, comm_poly) = { -// let mut eval_point_0 = Scalar::zero(); -// let mut eval_point_2 = Scalar::zero(); - -// let len = poly_A.len() / 2; -// for i in 0..len { -// // eval 0: bound_func is A(low) -// eval_point_0 += comb_func(&poly_A[i], &poly_B[i]); - -// // eval 2: bound_func is -A(low) + 2*A(high) -// let poly_A_bound_point = poly_A[len + i] + poly_A[len + i] - poly_A[i]; -// let poly_B_bound_point = poly_B[len + i] + poly_B[len + i] - poly_B[i]; -// eval_point_2 += comb_func(&poly_A_bound_point, &poly_B_bound_point); -// } - -// let evals = vec![eval_point_0, claim_per_round - eval_point_0, eval_point_2]; -// let poly = UniPoly::from_evals(&evals); -// let comm_poly = poly.commit(gens_n, &blinds_poly[j]).compress(); -// (poly, comm_poly) -// }; - -// // append the prover's message to the transcript -// comm_poly.append_to_transcript(b"comm_poly", transcript); -// comm_polys.push(comm_poly); - -// //derive the verifier's challenge for the next round -// let r_j = transcript.challenge_scalar(b"challenge_nextround"); - -// // bound all tables to the verifier's challenege -// poly_A.bound_poly_var_top(&r_j); -// poly_B.bound_poly_var_top(&r_j); - -// // produce a proof of sum-check and of evaluation -// let (proof, claim_next_round, comm_claim_next_round) = { -// let eval = poly.evaluate(&r_j); -// let comm_eval = eval.commit(&blinds_evals[j], gens_1).compress(); - -// // we need to prove the following under homomorphic commitments: -// // (1) poly(0) + poly(1) = claim_per_round -// // (2) poly(r_j) = eval - -// // Our technique is to leverage dot product proofs: -// // (1) we can prove: = claim_per_round -// // (2) we can prove: >() -// .as_slice(), -// ) -// .compress(); - -// let blind = { -// let blind_sc = if j == 0 { -// blind_claim -// } else { -// &blinds_evals[j - 1] -// }; - -// let blind_eval = &blinds_evals[j]; - -// w[0] * blind_sc + w[1] * blind_eval -// }; -// assert_eq!(target.commit(&blind, gens_1).compress(), comm_target); - -// let a = { -// // the vector to use to decommit for sum-check test -// let a_sc = { -// let mut a = vec![Scalar::one(); poly.degree() + 1]; -// a[0] += Scalar::one(); -// a -// }; - -// // the vector to use to decommit for evaluation -// let a_eval = { -// let mut a = vec![Scalar::one(); poly.degree() + 1]; -// for j in 1..a.len() { -// a[j] = a[j - 1] * r_j; -// } -// a -// }; - -// // take weighted sum of the two vectors using w -// assert_eq!(a_sc.len(), a_eval.len()); -// (0..a_sc.len()) -// .map(|i| w[0] * a_sc[i] + w[1] * a_eval[i]) -// .collect::>() -// }; - -// let (proof, _comm_poly, _comm_sc_eval) = DotProductProof::prove( -// gens_1, -// gens_n, -// transcript, -// random_tape, -// &poly.as_vec(), -// &blinds_poly[j], -// &a, -// &target, -// &blind, -// ); - -// (proof, eval, comm_eval) -// }; - -// claim_per_round = claim_next_round; -// comm_claim_per_round = comm_claim_next_round; - -// proofs.push(proof); -// r.push(r_j); -// comm_evals.push(comm_claim_per_round.clone()); -// } - -// ( -// ZKSumcheckInstanceProof::new(comm_polys, comm_evals, proofs), -// r, -// vec![poly_A[0], poly_B[0]], -// blinds_evals[num_rounds - 1], -// ) -// } - -// pub fn prove_cubic_with_additive_term( -// claim: &Scalar, -// blind_claim: &Scalar, -// num_rounds: usize, -// poly_A: &mut DensePolynomial, -// poly_B: &mut DensePolynomial, -// poly_C: &mut DensePolynomial, -// poly_D: &mut DensePolynomial, -// comb_func: F, -// gens_1: &MultiCommitGens, -// gens_n: &MultiCommitGens, -// transcript: &mut Transcript, -// random_tape: &mut RandomTape, -// ) -> (Self, Vec, Vec, Scalar) -// where -// F: Fn(&Scalar, &Scalar, &Scalar, &Scalar) -> Scalar, -// { -// let (blinds_poly, blinds_evals) = ( -// random_tape.random_vector(b"blinds_poly", num_rounds), -// random_tape.random_vector(b"blinds_evals", num_rounds), -// ); - -// let mut claim_per_round = *claim; -// let mut comm_claim_per_round = claim_per_round.commit(blind_claim, gens_1).compress(); - -// let mut r: Vec = Vec::new(); -// let mut comm_polys: Vec = Vec::new(); -// let mut comm_evals: Vec = Vec::new(); -// let mut proofs: Vec = Vec::new(); - -// for j in 0..num_rounds { -// let (poly, comm_poly) = { -// let mut eval_point_0 = Scalar::zero(); -// let mut eval_point_2 = Scalar::zero(); -// let mut eval_point_3 = Scalar::zero(); - -// let len = poly_A.len() / 2; -// for i in 0..len { -// // eval 0: bound_func is A(low) -// eval_point_0 += comb_func(&poly_A[i], &poly_B[i], &poly_C[i], &poly_D[i]); - -// // eval 2: bound_func is -A(low) + 2*A(high) -// let poly_A_bound_point = poly_A[len + i] + poly_A[len + i] - poly_A[i]; -// let poly_B_bound_point = poly_B[len + i] + poly_B[len + i] - poly_B[i]; -// let poly_C_bound_point = poly_C[len + i] + poly_C[len + i] - poly_C[i]; -// let poly_D_bound_point = poly_D[len + i] + poly_D[len + i] - poly_D[i]; -// eval_point_2 += comb_func( -// &poly_A_bound_point, -// &poly_B_bound_point, -// &poly_C_bound_point, -// &poly_D_bound_point, -// ); - -// // eval 3: bound_func is -2A(low) + 3A(high); computed incrementally with bound_func applied to eval(2) -// let poly_A_bound_point = poly_A_bound_point + poly_A[len + i] - poly_A[i]; -// let poly_B_bound_point = poly_B_bound_point + poly_B[len + i] - poly_B[i]; -// let poly_C_bound_point = poly_C_bound_point + poly_C[len + i] - poly_C[i]; -// let poly_D_bound_point = poly_D_bound_point + poly_D[len + i] - poly_D[i]; -// eval_point_3 += comb_func( -// &poly_A_bound_point, -// &poly_B_bound_point, -// &poly_C_bound_point, -// &poly_D_bound_point, -// ); -// } - -// let evals = vec![ -// eval_point_0, -// claim_per_round - eval_point_0, -// eval_point_2, -// eval_point_3, -// ]; -// let poly = UniPoly::from_evals(&evals); -// let comm_poly = poly.commit(gens_n, &blinds_poly[j]).compress(); -// (poly, comm_poly) -// }; - -// // append the prover's message to the transcript -// comm_poly.append_to_transcript(b"comm_poly", transcript); -// comm_polys.push(comm_poly); - -// //derive the verifier's challenge for the next round -// let r_j = transcript.challenge_scalar(b"challenge_nextround"); - -// // bound all tables to the verifier's challenege -// poly_A.bound_poly_var_top(&r_j); -// poly_B.bound_poly_var_top(&r_j); -// poly_C.bound_poly_var_top(&r_j); -// poly_D.bound_poly_var_top(&r_j); - -// // produce a proof of sum-check and of evaluation -// let (proof, claim_next_round, comm_claim_next_round) = { -// let eval = poly.evaluate(&r_j); -// let comm_eval = eval.commit(&blinds_evals[j], gens_1).compress(); - -// // we need to prove the following under homomorphic commitments: -// // (1) poly(0) + poly(1) = claim_per_round -// // (2) poly(r_j) = eval - -// // Our technique is to leverage dot product proofs: -// // (1) we can prove: = claim_per_round -// // (2) we can prove: >() -// .as_slice(), -// ) -// .compress(); - -// let blind = { -// let blind_sc = if j == 0 { -// blind_claim -// } else { -// &blinds_evals[j - 1] -// }; - -// let blind_eval = &blinds_evals[j]; - -// w[0] * blind_sc + w[1] * blind_eval -// }; - -// let res = target.commit(&blind, gens_1); - -// assert_eq!(res.compress(), comm_target); - -// let a = { -// // the vector to use to decommit for sum-check test -// let a_sc = { -// let mut a = vec![Scalar::one(); poly.degree() + 1]; -// a[0] += Scalar::one(); -// a -// }; - -// // the vector to use to decommit for evaluation -// let a_eval = { -// let mut a = vec![Scalar::one(); poly.degree() + 1]; -// for j in 1..a.len() { -// a[j] = a[j - 1] * r_j; -// } -// a -// }; - -// // take weighted sum of the two vectors using w -// assert_eq!(a_sc.len(), a_eval.len()); -// (0..a_sc.len()) -// .map(|i| w[0] * a_sc[i] + w[1] * a_eval[i]) -// .collect::>() -// }; - -// let (proof, _comm_poly, _comm_sc_eval) = DotProductProof::prove( -// gens_1, -// gens_n, -// transcript, -// random_tape, -// &poly.as_vec(), -// &blinds_poly[j], -// &a, -// &target, -// &blind, -// ); - -// (proof, eval, comm_eval) -// }; - -// proofs.push(proof); -// claim_per_round = claim_next_round; -// comm_claim_per_round = comm_claim_next_round; -// r.push(r_j); -// comm_evals.push(comm_claim_per_round.clone()); -// } - -// ( -// ZKSumcheckInstanceProof::new(comm_polys, comm_evals, proofs), -// r, -// vec![poly_A[0], poly_B[0], poly_C[0], poly_D[0]], -// blinds_evals[num_rounds - 1], -// ) -// } -// } diff --git a/src/transcript.rs b/src/transcript.rs deleted file mode 100644 index 1932590f..00000000 --- a/src/transcript.rs +++ /dev/null @@ -1,69 +0,0 @@ -use super::scalar::Scalar; -use crate::group::CompressedGroup; -use ark_ff::{BigInteger, PrimeField}; -use ark_serialize::{CanonicalSerialize, Compress}; -use merlin::Transcript; - -pub trait ProofTranscript { - fn append_protocol_name(&mut self, protocol_name: &'static [u8]); - fn append_scalar(&mut self, label: &'static [u8], scalar: &Scalar); - fn append_point(&mut self, label: &'static [u8], point: &CompressedGroup); - fn challenge_scalar(&mut self, label: &'static [u8]) -> Scalar; - fn challenge_vector(&mut self, label: &'static [u8], len: usize) -> Vec; -} - -impl ProofTranscript for Transcript { - fn append_protocol_name(&mut self, protocol_name: &'static [u8]) { - self.append_message(b"protocol-name", protocol_name); - } - - fn append_scalar(&mut self, label: &'static [u8], scalar: &Scalar) { - self.append_message(label, scalar.into_bigint().to_bytes_le().as_slice()); - } - - fn append_point(&mut self, label: &'static [u8], point: &CompressedGroup) { - let mut point_encoded = Vec::new(); - point - .serialize_with_mode(&mut point_encoded, Compress::Yes) - .unwrap(); - self.append_message(label, point_encoded.as_slice()); - } - - fn challenge_scalar(&mut self, label: &'static [u8]) -> Scalar { - let mut buf = [0u8; 64]; - self.challenge_bytes(label, &mut buf); - Scalar::from_le_bytes_mod_order(&buf) - } - - fn challenge_vector(&mut self, label: &'static [u8], len: usize) -> Vec { - (0..len) - .map(|_i| self.challenge_scalar(label)) - .collect::>() - } -} - -pub trait AppendToTranscript { - fn append_to_transcript(&self, label: &'static [u8], transcript: &mut Transcript); -} - -impl AppendToTranscript for Scalar { - fn append_to_transcript(&self, label: &'static [u8], transcript: &mut Transcript) { - transcript.append_scalar(label, self); - } -} - -impl AppendToTranscript for [Scalar] { - fn append_to_transcript(&self, label: &'static [u8], transcript: &mut Transcript) { - transcript.append_message(label, b"begin_append_vector"); - for item in self { - transcript.append_scalar(label, item); - } - transcript.append_message(label, b"end_append_vector"); - } -} - -impl AppendToTranscript for CompressedGroup { - fn append_to_transcript(&self, label: &'static [u8], transcript: &mut Transcript) { - transcript.append_point(label, self); - } -} diff --git a/src/unipoly.rs b/src/unipoly.rs index d97b1892..93454a20 100644 --- a/src/unipoly.rs +++ b/src/unipoly.rs @@ -1,12 +1,7 @@ -use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; - -use super::commitments::{Commitments, MultiCommitGens}; -use super::group::GroupElement; use super::scalar::Scalar; -use super::transcript::{AppendToTranscript, ProofTranscript}; +use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; use ark_ff::Field; use ark_serialize::*; -use merlin::Transcript; // ax^2 + bx + c stored as vec![c,b,a] // ax^3 + bx^2 + cx + d stored as vec![d,c,b,a] #[derive(Debug, CanonicalDeserialize, CanonicalSerialize, Clone)] @@ -60,10 +55,6 @@ impl UniPoly { self.coeffs.len() - 1 } - pub fn as_vec(&self) -> Vec { - self.coeffs.clone() - } - pub fn eval_at_zero(&self) -> Scalar { self.coeffs[0] } @@ -89,10 +80,6 @@ impl UniPoly { coeffs_except_linear_term, } } - - pub fn commit(&self, gens: &MultiCommitGens, blind: &Scalar) -> GroupElement { - self.coeffs.commit(blind, gens) - } } impl CompressedUniPoly { @@ -122,16 +109,6 @@ impl AppendToPoseidon for UniPoly { } } -impl AppendToTranscript for UniPoly { - fn append_to_transcript(&self, label: &'static [u8], transcript: &mut Transcript) { - transcript.append_message(label, b"UniPoly_begin"); - for i in 0..self.coeffs.len() { - transcript.append_scalar(b"coeff", &self.coeffs[i]); - } - transcript.append_message(label, b"UniPoly_end"); - } -} - #[cfg(test)] mod tests {