Skip to content

Commit

Permalink
Merge branch 'snarkpack-integration' into feat/remove-useless
Browse files Browse the repository at this point in the history
  • Loading branch information
maramihali committed Feb 7, 2023
2 parents cdf8bd8 + 71e6762 commit 1a3c209
Show file tree
Hide file tree
Showing 17 changed files with 244 additions and 243 deletions.
52 changes: 21 additions & 31 deletions .github/workflows/testudo.yml
Original file line number Diff line number Diff line change
@@ -1,37 +1,27 @@
name: Build and Test Testudo

on:
push:
branches: [master]
pull_request:
branches: [master]
# The crate ark-ff uses the macro llvm_asm! when emitting asm which returns an
# error because it was deprecated in favour of asm!. We temporarily overcome
# this problem by setting the environment variable below (until the crate
# is updated).
env:
RUSTFLAGS: "--emit asm -C llvm-args=-x86-asm-syntax=intel"
on: [push, pull_request]

jobs:
build_nightly:
cargo-test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install
run: rustup default nightly
- name: Install rustfmt Components
run: rustup component add rustfmt
# - name: Install clippy
# run: rustup component add clippy
- name: Build
run: cargo build --verbose
- name: Run tests
run: cargo test --release --all-features --verbose
- name: Build examples
run: cargo build --examples --verbose
- name: Check Rustfmt Code Style
run: cargo fmt --all -- --check
# cargo clippy uses cargo check which returns an error when asm is emitted
# we want to emit asm for ark-ff operations so we avoid using clippy for # now
# - name: Check clippy warnings
# run: cargo clippy --all-targets --all-features
- name: Checkout sources
uses: actions/checkout@v2
with:
submodules: recursive

- name: Install toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
override: true

- uses: Swatinem/rust-cache@v2
with:
shared-key: cache-${{ hashFiles('**/Cargo.lock') }}
cache-on-failure: true

- name: cargo test
run: RUST_LOG=info cargo test --all --all-features -- --nocapture
30 changes: 14 additions & 16 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,20 +18,19 @@ itertools = "0.10.0"
colored = "2.0.0"
thiserror = "1.0"
json = "0.12.4"
ark-ff = { version = "^0.3.0", default-features = false }
ark-ec = { version = "^0.3.0", default-features = false }
ark-std = { version = "^0.3.0"}
ark-bls12-377 = { version = "^0.3.0", features = ["r1cs","curve"] }
ark-serialize = { version = "^0.3.0", features = ["derive"] }
ark-sponge = { version = "^0.3.0" , features = ["r1cs"] }
ark-crypto-primitives = { version = "^0.3.0", default-features = true }
ark-r1cs-std = { version = "^0.3.0", default-features = false }
ark-nonnative-field = { version = "0.3.0", default-features = false }
ark-relations = { version = "^0.3.0", default-features = false, optional = true }
ark-ff = { version = "^0.4.0", default-features = false }
ark-ec = { version = "^0.4.0", default-features = false }
ark-std = { version = "^0.4.0"}
ark-bls12-377 = { version = "^0.4.0", features = ["r1cs","curve"] }
ark-serialize = { version = "^0.4.0", features = ["derive"] }
ark-crypto-primitives = {version = "^0.4.0", features = ["sponge","r1cs","snark"] }
ark-r1cs-std = { version = "^0.4.0", default-features = false }
ark-relations = { version = "^0.4.0", default-features = false, optional = true }
ark-snark = { version = "^0.4.0", default-features = false }
ark-groth16 = { version = "^0.3.0", features = ["r1cs"] }
ark-bw6-761 = { version = "^0.3.0" }
ark-poly-commit = { version = "^0.3.0" }
ark-poly = {version = "^0.3.0"}
ark-bw6-761 = { version = "^0.4.0" }
ark-poly-commit = { version = "^0.4.0" }
ark-poly = {version = "^0.4.0"}


lazy_static = "1.4.0"
Expand Down Expand Up @@ -80,7 +79,6 @@ parallel = [ "std", "ark-ff/parallel", "ark-std/parallel", "ark-ec/parallel", "a
std = ["ark-ff/std", "ark-ec/std", "ark-std/std", "ark-relations/std", "ark-serialize/std"]

[patch.crates-io]
ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/", rev = "a2a5ac491ae005ba2afd03fd21b7d3160d794a83"}
ark-poly-commit = {git = "https://github.com/maramihali/poly-commit", branch="pst_g2"}

ark-poly-commit = {git = "https://github.com/cryptonetlab/ark-polycommit", branch="feat/pst_on_g2"}
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", features = ["r1cs"] }

18 changes: 9 additions & 9 deletions examples/cubic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ fn produce_r1cs() -> (
let mut B: Vec<(usize, usize, Vec<u8>)> = Vec::new();
let mut C: Vec<(usize, usize, Vec<u8>)> = Vec::new();

let one = Scalar::one().into_repr().to_bytes_le();
let one = Scalar::one().into_bigint().to_bytes_le();

// R1CS is a set of three sparse matrices A B C, where is a row for every
// constraint and a column for every entry in z = (vars, 1, inputs)
Expand Down Expand Up @@ -67,7 +67,7 @@ fn produce_r1cs() -> (
// constraint 3 entries in (A,B,C)
// constraint 3 is (Z3 + 5) * 1 - I0 = 0.
A.push((3, 3, one.clone()));
A.push((3, num_vars, Scalar::from(5u32).into_repr().to_bytes_le()));
A.push((3, num_vars, Scalar::from(5u32).into_bigint().to_bytes_le()));
B.push((3, num_vars, one.clone()));
C.push((3, num_vars + 1, one));

Expand All @@ -82,16 +82,16 @@ fn produce_r1cs() -> (
let i0 = z3 + Scalar::from(5u32); // constraint 3

// create a VarsAssignment
let mut vars = vec![Scalar::zero().into_repr().to_bytes_le(); num_vars];
vars[0] = z0.into_repr().to_bytes_le();
vars[1] = z1.into_repr().to_bytes_le();
vars[2] = z2.into_repr().to_bytes_le();
vars[3] = z3.into_repr().to_bytes_le();
let mut vars = vec![Scalar::zero().into_bigint().to_bytes_le(); num_vars];
vars[0] = z0.into_bigint().to_bytes_le();
vars[1] = z1.into_bigint().to_bytes_le();
vars[2] = z2.into_bigint().to_bytes_le();
vars[3] = z3.into_bigint().to_bytes_le();
let assignment_vars = VarsAssignment::new(&vars).unwrap();

// create an InputsAssignment
let mut inputs = vec![Scalar::zero().into_repr().to_bytes_le(); num_inputs];
inputs[0] = i0.into_repr().to_bytes_le();
let mut inputs = vec![Scalar::zero().into_bigint().to_bytes_le(); num_inputs];
inputs[0] = i0.into_bigint().to_bytes_le();
let assignment_inputs = InputsAssignment::new(&inputs).unwrap();

// check if the instance we created is satisfiable
Expand Down
4 changes: 3 additions & 1 deletion profiler/nizk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,9 @@ pub fn main() {
let proof = NIZK::prove(&inst, vars, &inputs, &gens, &mut prover_transcript);

let mut proof_encoded = Vec::new();
proof.serialize(&mut proof_encoded).unwrap();
proof
.serialize_with_mode(&mut proof_encoded, Compress::Yes)
.unwrap();
let msg_proof_len = format!("NIZK::proof_compressed_len {:?}", proof_encoded.len());
print(&msg_proof_len);

Expand Down
4 changes: 3 additions & 1 deletion profiler/snark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,9 @@ pub fn main() {
);

let mut proof_encoded = Vec::new();
proof.serialize(&mut proof_encoded).unwrap();
proof
.serialize_with_mode(&mut proof_encoded, Compress::Yes)
.unwrap();
let msg_proof_len = format!("SNARK::proof_compressed_len {:?}", proof_encoded.len());
print(&msg_proof_len);

Expand Down
15 changes: 8 additions & 7 deletions src/commitments.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@ use super::group::{GroupElement, GroupElementAffine, VartimeMultiscalarMul, GROU
use super::scalar::Scalar;
use crate::group::CompressGroupElement;
use crate::parameters::*;
use ark_ec::{AffineCurve, ProjectiveCurve};
use ark_ff::PrimeField;
use ark_ec::{AffineRepr};

use ark_sponge::poseidon::PoseidonSponge;
use ark_sponge::CryptographicSponge;
use std::ops::Mul;

use ark_crypto_primitives::sponge::poseidon::PoseidonSponge;
use ark_crypto_primitives::sponge::CryptographicSponge;

#[derive(Debug, Clone)]
pub struct MultiCommitGens {
Expand All @@ -29,7 +30,7 @@ impl MultiCommitGens {
let uniform_bytes = sponge.squeeze_bytes(64);
el_aff = GroupElementAffine::from_random_bytes(&uniform_bytes);
}
let el = el_aff.unwrap().mul_by_cofactor_to_projective();
let el = el_aff.unwrap().clear_cofactor().into_group();
gens.push(el);
}

Expand Down Expand Up @@ -80,13 +81,13 @@ impl Commitments for Scalar {
impl Commitments for Vec<Scalar> {
fn commit(&self, blind: &Scalar, gens_n: &MultiCommitGens) -> GroupElement {
assert_eq!(gens_n.n, self.len());
GroupElement::vartime_multiscalar_mul(self, &gens_n.G) + gens_n.h.mul(blind.into_repr())
GroupElement::vartime_multiscalar_mul(self, &gens_n.G) + gens_n.h.mul(blind)
}
}

impl Commitments for [Scalar] {
fn commit(&self, blind: &Scalar, gens_n: &MultiCommitGens) -> GroupElement {
assert_eq!(gens_n.n, self.len());
GroupElement::vartime_multiscalar_mul(self, &gens_n.G) + gens_n.h.mul(blind.into_repr())
GroupElement::vartime_multiscalar_mul(self, &gens_n.G) + gens_n.h.mul(blind)
}
}
29 changes: 12 additions & 17 deletions src/constraints.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,41 +10,36 @@ use crate::{

use ark_bls12_377::{constraints::PairingVar as IV, Bls12_377 as I, Fr};

use ark_crypto_primitives::{
snark::BooleanInputVar, CircuitSpecificSetupSNARK, SNARKGadget, SNARK,
};
use ark_crypto_primitives::snark::{BooleanInputVar, SNARKGadget};
use ark_snark::{CircuitSpecificSetupSNARK, SNARK};

use ark_ff::{BitIteratorLE, PrimeField, Zero};
use ark_groth16::{
constraints::{Groth16VerifierGadget, PreparedVerifyingKeyVar, ProofVar},
Groth16, PreparedVerifyingKey, Proof as GrothProof,
};

use ark_poly_commit::multilinear_pc::data_structures::Commitment;
use ark_crypto_primitives::sponge::{
constraints::CryptographicSpongeVar,
poseidon::{constraints::PoseidonSpongeVar, PoseidonConfig},
};
use ark_poly_commit::multilinear_pc::data_structures::{Commitment};
use ark_r1cs_std::{
alloc::{AllocVar, AllocationMode},
fields::fp::FpVar,
prelude::{Boolean, EqGadget, FieldVar},
};
use ark_relations::r1cs::{ConstraintSynthesizer, ConstraintSystemRef, Namespace, SynthesisError};
use ark_sponge::{
constraints::CryptographicSpongeVar,
poseidon::{constraints::PoseidonSpongeVar, PoseidonParameters},
};
use rand::{CryptoRng, Rng};

pub struct PoseidonTranscripVar {
pub cs: ConstraintSystemRef<Fr>,
pub sponge: PoseidonSpongeVar<Fr>,
pub params: PoseidonParameters<Fr>,
pub params: PoseidonConfig<Fr>,
}

impl PoseidonTranscripVar {
fn new(
cs: ConstraintSystemRef<Fr>,
params: &PoseidonParameters<Fr>,
challenge: Option<Fr>,
) -> Self {
fn new(cs: ConstraintSystemRef<Fr>, params: &PoseidonConfig<Fr>, challenge: Option<Fr>) -> Self {
let mut sponge = PoseidonSpongeVar::new(cs.clone(), params);

if let Some(c) = challenge {
Expand Down Expand Up @@ -245,7 +240,7 @@ pub struct R1CSVerificationCircuit {
pub input: Vec<Fr>,
pub input_as_sparse_poly: SparsePolynomial,
pub evals: (Fr, Fr, Fr),
pub params: PoseidonParameters<Fr>,
pub params: PoseidonConfig<Fr>,
pub prev_challenge: Fr,
pub claims_phase2: (Scalar, Scalar, Scalar, Scalar),
pub eval_vars_at_ry: Fr,
Expand Down Expand Up @@ -411,7 +406,7 @@ pub struct VerifierConfig {
pub input: Vec<Fr>,
pub input_as_sparse_poly: SparsePolynomial,
pub evals: (Fr, Fr, Fr),
pub params: PoseidonParameters<Fr>,
pub params: PoseidonConfig<Fr>,
pub prev_challenge: Fr,
pub claims_phase2: (Fr, Fr, Fr, Fr),
pub eval_vars_at_ry: Fr,
Expand Down Expand Up @@ -464,7 +459,7 @@ impl ConstraintSynthesizer<Fq> for VerifierCircuit {
let bits = pubs
.iter()
.map(|c| {
let bits: Vec<bool> = BitIteratorLE::new(c.into_repr().as_ref().to_vec()).collect();
let bits: Vec<bool> = BitIteratorLE::new(c.into_bigint().as_ref().to_vec()).collect();
Vec::new_witness(cs.clone(), || Ok(bits))
})
.collect::<Result<Vec<_>, _>>()?;
Expand Down
21 changes: 10 additions & 11 deletions src/group.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
use crate::errors::ProofVerifyError;
use ark_ec::msm::VariableBaseMSM;
use ark_ff::PrimeField;
use ark_ec::scalar_mul::variable_base::VariableBaseMSM;
use ark_ec::Group;

use lazy_static::lazy_static;

use super::scalar::Scalar;

use ark_ec::ProjectiveCurve;
use ark_ec::CurveGroup;
use ark_serialize::*;
use core::borrow::Borrow;

Expand All @@ -19,7 +19,7 @@ pub type Fr = ark_bls12_377::Fr;
pub struct CompressedGroup(pub Vec<u8>);

lazy_static! {
pub static ref GROUP_BASEPOINT: GroupElement = GroupElement::prime_subgroup_generator();
pub static ref GROUP_BASEPOINT: GroupElement = GroupElement::generator();
}

pub trait CompressGroupElement {
Expand All @@ -37,14 +37,16 @@ pub trait UnpackGroupElement {
impl CompressGroupElement for GroupElement {
fn compress(&self) -> CompressedGroup {
let mut point_encoding = Vec::new();
self.serialize(&mut point_encoding).unwrap();
self
.serialize_with_mode(&mut point_encoding, Compress::Yes)
.unwrap();
CompressedGroup(point_encoding)
}
}

impl DecompressGroupElement for GroupElement {
fn decompress(encoded: &CompressedGroup) -> Option<Self> {
let res = GroupElement::deserialize(&*encoded.0);
let res = GroupElement::deserialize_compressed(&*encoded.0);
if let Ok(r) = res {
Some(r)
} else {
Expand All @@ -67,14 +69,11 @@ pub trait VartimeMultiscalarMul {

impl VartimeMultiscalarMul for GroupElement {
fn vartime_multiscalar_mul(scalars: &[Scalar], points: &[GroupElement]) -> GroupElement {
let repr_scalars = scalars
.iter()
.map(|S| S.borrow().into_repr())
.collect::<Vec<<Scalar as PrimeField>::BigInt>>();
assert!(scalars.len() == points.len());
let aff_points = points
.iter()
.map(|P| P.borrow().into_affine())
.collect::<Vec<GroupElementAffine>>();
VariableBaseMSM::multi_scalar_mul(aff_points.as_slice(), repr_scalars.as_slice())
<Self as VariableBaseMSM>::msm_unchecked(aff_points.as_slice(), scalars)
}
}
Loading

0 comments on commit 1a3c209

Please sign in to comment.