Skip to content
This repository has been archived by the owner on Dec 18, 2023. It is now read-only.

Zcash-style serialization for BLS12-381 #129

Merged
merged 21 commits into from
Oct 19, 2022
Merged
Show file tree
Hide file tree
Changes from 19 commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
2c1f1a5
zcash-style serialization for BLS12-381
mmagician Oct 11, 2022
9e9a61a
Validate serialized points if validate flag == Yes
mmagician Oct 11, 2022
e8895b4
use the compress flag to determine the serialized size
mmagician Oct 11, 2022
ed2c973
If is_infinity flag is set, serialize the zero element
mmagician Oct 11, 2022
5c7051e
serialization with and without compression
mmagician Oct 11, 2022
0f9ddb1
Add deserialization tests
mmagician Oct 11, 2022
ee1abe9
Split out compressed and uncompressed deserialization methods
mmagician Oct 11, 2022
049c4b9
Fix compressed check for g1-uncompressed
mmagician Oct 12, 2022
ac2f057
Simplify test code, clean up comments
mmagician Oct 12, 2022
3e25413
Use the compress flag in g2
mmagician Oct 12, 2022
a546876
Use write_all instead of write
mmagician Oct 12, 2022
bcb0aaa
Should set the flags and write the whole bytes array, not just x_bytes
mmagician Oct 12, 2022
b1a816c
Apply suggestions from code review
mmagician Oct 16, 2022
cd97b6a
Move serialization helper structs and functions to a separate utils mod
mmagician Oct 16, 2022
fa02275
Replace the rest of `default` with `zero` to mean infinity
mmagician Oct 17, 2022
205096c
Move g1- & g2-serialization specific methods to mod util
mmagician Oct 17, 2022
c7a53c6
Import the full test vectors from https://github.com/zkcrypto/bls12_381
mmagician Oct 17, 2022
fd33b74
Merge branch 'master' into zcash-deser
mmagician Oct 17, 2022
f0ad760
Reorganise imports
mmagician Oct 17, 2022
d8b3338
consistent spelling of SERIALIZE
mmagician Oct 17, 2022
abdd204
Use `map_err`
Pratyush Oct 19, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion bls12_381/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,12 @@ edition = "2021"
ark-ff = { version="^0.3.0", default-features = false }
ark-ec = { version="^0.3.0", default-features = false }
ark-std = { version="^0.3.0", default-features = false }
ark-serialize = { version="^0.3.0", default-features = false }

[dev-dependencies]
ark-serialize = { version="^0.3.0", default-features = false }
ark-algebra-test-templates = { version="^0.3.0", default-features = false }
ark-algebra-bench-templates = { version = "^0.3.0", default-features = false }
hex = "^0.4.0"

[features]
default = [ "curve" ]
Expand Down
66 changes: 66 additions & 0 deletions bls12_381/src/curves/g1.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,13 @@ use ark_ec::{
AffineRepr, Group,
};
use ark_ff::{Field, MontFp, PrimeField, Zero};
use ark_serialize::{Compress, SerializationError};
use ark_std::{ops::Neg, One};

use crate::util::{
read_g1_compressed, read_g1_uncompressed, serialise_fq, EncodingFlags, G1_SERIALISED_SIZE,
mmagician marked this conversation as resolved.
Show resolved Hide resolved
};

pub type G1Affine = bls12::G1Affine<crate::Parameters>;
pub type G1Projective = bls12::G1Projective<crate::Parameters>;

Expand Down Expand Up @@ -70,6 +76,66 @@ impl SWCurveConfig for Parameters {
let h_eff = one_minus_x().into_bigint();
Parameters::mul_affine(&p, h_eff.as_ref()).into()
}

fn deserialize_with_mode<R: ark_serialize::Read>(
mut reader: R,
compress: ark_serialize::Compress,
validate: ark_serialize::Validate,
) -> Result<Affine<Self>, ark_serialize::SerializationError> {
let p = if compress == ark_serialize::Compress::Yes {
read_g1_compressed(&mut reader)?
} else {
read_g1_uncompressed(&mut reader)?
};

if validate == ark_serialize::Validate::Yes && !p.is_in_correct_subgroup_assuming_on_curve()
{
return Err(SerializationError::InvalidData);
}
Ok(p)
}

fn serialize_with_mode<W: ark_serialize::Write>(
item: &Affine<Self>,
mut writer: W,
compress: ark_serialize::Compress,
) -> Result<(), SerializationError> {
let encoding = EncodingFlags {
mmagician marked this conversation as resolved.
Show resolved Hide resolved
is_compressed: compress == ark_serialize::Compress::Yes,
is_infinity: item.is_zero(),
is_lexographically_largest: item.y > -item.y,
};
let mut p = *item;
if encoding.is_infinity {
p = G1Affine::zero();
}
// need to access the field struct `x` directly, otherwise we get None from xy()
// method
let x_bytes = serialise_fq(p.x);
if encoding.is_compressed {
let mut bytes: [u8; G1_SERIALISED_SIZE] = x_bytes;

encoding.encode_flags(&mut bytes);
writer.write_all(&bytes)?;
} else {
let mut bytes = [0u8; 2 * G1_SERIALISED_SIZE];
bytes[0..G1_SERIALISED_SIZE].copy_from_slice(&x_bytes[..]);
bytes[G1_SERIALISED_SIZE..].copy_from_slice(&serialise_fq(p.y)[..]);

encoding.encode_flags(&mut bytes);
writer.write_all(&bytes)?;
};

Ok(())
}

fn serialized_size(compress: Compress) -> usize {
if compress == Compress::Yes {
G1_SERIALISED_SIZE
} else {
G1_SERIALISED_SIZE * 2
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note that Zcash always uses compressed point encodings, so the uncompressed version should not be referred to as Zcash-style encoding in documentation.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@daira Noted, thanks. So far it's not mentioned in the docs, but will go into the CHANGELOG. I've just reworded it there, I hope it's fine like that?

}
}
}

fn one_minus_x() -> Fr {
Expand Down
76 changes: 75 additions & 1 deletion bls12_381/src/curves/g2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,13 @@ use ark_ec::{
AffineRepr, CurveGroup, Group,
};
use ark_ff::{Field, MontFp, Zero};
use ark_serialize::{Compress, SerializationError};

use crate::*;
use super::util::{serialise_fq, EncodingFlags, G2_SERIALISED_SIZE};
use crate::{
util::{read_g2_compressed, read_g2_uncompressed},
*,
};

pub type G2Affine = bls12::G2Affine<crate::Parameters>;
pub type G2Projective = bls12::G2Projective<crate::Parameters>;
Expand Down Expand Up @@ -105,6 +110,75 @@ impl SWCurveConfig for Parameters {
psi2_p2 += &-psi_p;
(psi2_p2 - p_projective).into_affine()
}

fn deserialize_with_mode<R: ark_serialize::Read>(
mut reader: R,
compress: ark_serialize::Compress,
validate: ark_serialize::Validate,
) -> Result<Affine<Self>, ark_serialize::SerializationError> {
let p = if compress == ark_serialize::Compress::Yes {
read_g2_compressed(&mut reader)?
} else {
read_g2_uncompressed(&mut reader)?
};

if validate == ark_serialize::Validate::Yes && !p.is_in_correct_subgroup_assuming_on_curve()
{
return Err(SerializationError::InvalidData);
}
Ok(p)
}

fn serialize_with_mode<W: ark_serialize::Write>(
item: &Affine<Self>,
mut writer: W,
compress: ark_serialize::Compress,
) -> Result<(), SerializationError> {
let encoding = EncodingFlags {
mmagician marked this conversation as resolved.
Show resolved Hide resolved
is_compressed: compress == ark_serialize::Compress::Yes,
is_infinity: item.is_zero(),
is_lexographically_largest: item.y > -item.y,
};
let mut p = *item;
if encoding.is_infinity {
p = G2Affine::zero();
}

let mut x_bytes = [0u8; G2_SERIALISED_SIZE];
let c1_bytes = serialise_fq(p.x.c1);
let c0_bytes = serialise_fq(p.x.c0);
x_bytes[0..48].copy_from_slice(&c1_bytes[..]);
x_bytes[48..96].copy_from_slice(&c0_bytes[..]);
if encoding.is_compressed {
let mut bytes: [u8; G2_SERIALISED_SIZE] = x_bytes;

encoding.encode_flags(&mut bytes);
writer.write_all(&bytes)?;
} else {
let mut bytes = [0u8; 2 * G2_SERIALISED_SIZE];

let mut y_bytes = [0u8; G2_SERIALISED_SIZE];
let c1_bytes = serialise_fq(p.y.c1);
let c0_bytes = serialise_fq(p.y.c0);
y_bytes[0..48].copy_from_slice(&c1_bytes[..]);
y_bytes[48..96].copy_from_slice(&c0_bytes[..]);
bytes[0..G2_SERIALISED_SIZE].copy_from_slice(&x_bytes);
bytes[G2_SERIALISED_SIZE..].copy_from_slice(&y_bytes);

encoding.encode_flags(&mut bytes);
writer.write_all(&bytes)?;
};

Ok(())
}

fn serialized_size(compress: ark_serialize::Compress) -> usize {
if compress == Compress::Yes {
G2_SERIALISED_SIZE
} else {
2 * G2_SERIALISED_SIZE
}
}
}

pub const G2_GENERATOR_X: Fq2 = Fq2::new(G2_GENERATOR_X_C0, G2_GENERATOR_X_C1);
Expand Down
1 change: 1 addition & 0 deletions bls12_381/src/curves/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use crate::{Fq, Fq12Config, Fq2Config, Fq6Config};

pub mod g1;
pub mod g2;
pub(crate) mod util;

#[cfg(test)]
mod tests;
Expand Down
62 changes: 0 additions & 62 deletions bls12_381/src/curves/tests.rs

This file was deleted.

Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
119 changes: 119 additions & 0 deletions bls12_381/src/curves/tests/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
use ark_algebra_test_templates::*;
use ark_ec::{AffineRepr, CurveGroup, Group};
use ark_ff::{fields::Field, One, UniformRand, Zero};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate};
use ark_std::{rand::Rng, test_rng, vec};

use crate::{Bls12_381, Fq, Fq2, Fr, G1Affine, G1Projective, G2Affine, G2Projective};

test_group!(g1; G1Projective; sw);
test_group!(g2; G2Projective; sw);
test_group!(pairing_output; ark_ec::pairing::PairingOutput<Bls12_381>; msm);
test_pairing!(pairing; crate::Bls12_381);

#[test]
fn test_g1_endomorphism_beta() {
assert!(crate::g1::BETA.pow(&[3u64]).is_one());
}

#[test]
fn test_g1_subgroup_membership_via_endomorphism() {
let mut rng = test_rng();
let generator = G1Projective::rand(&mut rng).into_affine();
assert!(generator.is_in_correct_subgroup_assuming_on_curve());
}

#[test]
fn test_g1_subgroup_non_membership_via_endomorphism() {
let mut rng = test_rng();
loop {
let x = Fq::rand(&mut rng);
let greatest = rng.gen();

if let Some(p) = G1Affine::get_point_from_x_unchecked(x, greatest) {
if !p.mul_bigint(Fr::characteristic()).is_zero() {
assert!(!p.is_in_correct_subgroup_assuming_on_curve());
return;
}
}
}
}

#[test]
fn test_g2_subgroup_membership_via_endomorphism() {
let mut rng = test_rng();
let generator = G2Projective::rand(&mut rng).into_affine();
assert!(generator.is_in_correct_subgroup_assuming_on_curve());
}

#[test]
fn test_g2_subgroup_non_membership_via_endomorphism() {
let mut rng = test_rng();
loop {
let x = Fq2::rand(&mut rng);
let greatest = rng.gen();

if let Some(p) = G2Affine::get_point_from_x_unchecked(x, greatest) {
if !p.mul_bigint(Fr::characteristic()).is_zero() {
assert!(!p.is_in_correct_subgroup_assuming_on_curve());
return;
}
}
}
}

// Test vectors and macro adapted from https://github.com/zkcrypto/bls12_381/blob/e224ad4ea1babfc582ccd751c2bf128611d10936/src/tests/mod.rs
macro_rules! test_vectors {
($projective:ident, $affine:ident, $compress:expr, $expected:ident) => {
let mut e = $projective::zero();

let mut v = vec![];
{
let mut expected = $expected;
for _ in 0..1000 {
let e_affine = $affine::from(e);
let mut serialized = vec![0u8; e.serialized_size($compress)];
e_affine
.serialize_with_mode(serialized.as_mut_slice(), $compress)
.unwrap();
v.extend_from_slice(&serialized[..]);

let mut decoded = serialized;
let len_of_encoding = decoded.len();
(&mut decoded[..]).copy_from_slice(&expected[0..len_of_encoding]);
expected = &expected[len_of_encoding..];
let decoded =
$affine::deserialize_with_mode(&decoded[..], $compress, Validate::Yes).unwrap();
assert_eq!(e_affine, decoded);

e += &$projective::generator();
}
}

assert_eq!(&v[..], $expected);
};
}

#[test]
fn g1_compressed_valid_test_vectors() {
let bytes: &'static [u8] = include_bytes!("g1_compressed_valid_test_vectors.dat");
test_vectors!(G1Projective, G1Affine, Compress::Yes, bytes);
}

#[test]
fn g1_uncompressed_valid_test_vectors() {
let bytes: &'static [u8] = include_bytes!("g1_uncompressed_valid_test_vectors.dat");
test_vectors!(G1Projective, G1Affine, Compress::No, bytes);
}

#[test]
fn g2_compressed_valid_test_vectors() {
let bytes: &'static [u8] = include_bytes!("g2_compressed_valid_test_vectors.dat");
test_vectors!(G2Projective, G2Affine, Compress::Yes, bytes);
}

#[test]
fn g2_uncompressed_valid_test_vectors() {
let bytes: &'static [u8] = include_bytes!("g2_uncompressed_valid_test_vectors.dat");
test_vectors!(G2Projective, G2Affine, Compress::No, bytes);
}
Loading