Skip to content

Commit

Permalink
Tests for interoperability of arkworks and group libraries.
Browse files Browse the repository at this point in the history
  • Loading branch information
mmaker committed Jan 31, 2024
1 parent 2be52a0 commit b0b9560
Show file tree
Hide file tree
Showing 10 changed files with 1,603 additions and 173 deletions.
1,610 changes: 1,496 additions & 114 deletions Cargo.lock

Large diffs are not rendered by default.

12 changes: 9 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "nimue"
version = "0.0.1-beta11"
version = "0.0.1-beta11gi"
authors = ["Michele Orrù <[email protected]>"]
description = "A library for Fiat-Shamir transcripts."
edition = "2021"
Expand All @@ -14,6 +14,8 @@ ark-ff = {git = "https://github.com/arkworks-rs/algebra"}
ark-serialize = {git = "https://github.com/arkworks-rs/algebra"}
ark-bls12-381 = {git = "https://github.com/arkworks-rs/algebra"}
ark-curve25519 = {git = "https://github.com/arkworks-rs/algebra"}
ark-pallas = {git = "https://github.com/arkworks-rs/algebra"}
ark-vesta = {git = "https://github.com/arkworks-rs/algebra"}


[dependencies]
Expand Down Expand Up @@ -46,9 +48,13 @@ hex = "0.4.3"
curve25519-dalek = {version="4.0.0", features=["group"]}
ark-curve25519 = "0.4.0"
# test algebraic hashers
ark-bls12-381 = {version="0.4.0"}

ark-bls12-381 = "0.4.0"
bls12_381 = "0.8.0"
anyhow = { version = "1.0.75", features = ["backtrace"] }
ark-pallas = { version = "0.4.0", features = ["std"] }
pallas = "0.22.0"
pasta_curves = "0.5.1"
ark-vesta = { version = "0.4.0", features = ["std"] }

[package.metadata.docs.rs]
rustdoc-args = [
Expand Down
23 changes: 18 additions & 5 deletions examples/schnorr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,12 @@ use rand::rngs::OsRng;

/// Extend the IO pattern with the Schnorr protocol.
trait SchnorrIOPattern<G: CurveGroup> {
/// Adds the entire Schnorr protocol to the IO pattern (statement and proof).
/// Shortcut: create a new schnorr proof with statement + proof.
fn new_schnorr_proof(domsep: &str) -> Self;

/// Add the statement of the Schnorr proof
fn add_schnorr_statement(self) -> Self;
/// Add the Schnorr protocol to the IO pattern.
fn add_schnorr_io(self) -> Self;
}

Expand All @@ -35,11 +40,20 @@ where
H: DuplexHash,
IOPattern<H>: GroupIOPattern<G> + FieldIOPattern<G::ScalarField>,
{
fn add_schnorr_io(self) -> Self {
fn new_schnorr_proof(domsep: &str) -> Self {
IOPattern::new(domsep)
.add_schnorr_statement()
.add_schnorr_io()
}

fn add_schnorr_statement(self) -> Self {
self.add_points(1, "generator (P)")
.add_points(1, "public key (X)")
.ratchet()
.add_points(1, "commitment (K)")
}

fn add_schnorr_io(self) -> Self {
self.add_points(1, "commitment (K)")
.challenge_scalars(1, "challenge (c)")
.add_scalars(1, "response (r)")
}
Expand Down Expand Up @@ -147,8 +161,7 @@ fn main() {
// type H = nimue::hash::legacy::DigestBridge<sha2::Sha256>;

// Set up the IO for the protocol transcript with domain separator "nimue::examples::schnorr"
let io = IOPattern::<H>::new("nimue::examples::schnorr");
let io = SchnorrIOPattern::<G>::add_schnorr_io(io);
let io: IOPattern<H> = SchnorrIOPattern::<G>::new_schnorr_proof("nimue::example");

// Set up the elements to prove
let P = G::generator();
Expand Down
8 changes: 3 additions & 5 deletions src/plugins/ark/poseidon.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
use ark_ff::PrimeField;

use crate::hash::index::impl_indexing;
use crate::hash::sponge::Sponge;
use crate::hash::Unit;
use crate::hash::index::impl_indexing;

#[derive(Clone)]
pub struct PoseidonSponge<F: PrimeField, const R: usize, const N: usize> {
Expand All @@ -25,7 +25,6 @@ pub struct PoseidonSponge<F: PrimeField, const R: usize, const N: usize> {
// Indexing over PoseidonSponge is just forwarded to indexing on the state.
impl_indexing!(PoseidonSponge, state, Output = F, Params = [F: PrimeField], Constants = [R, N]);


impl<F: PrimeField, const R: usize, const N: usize> PoseidonSponge<F, R, N> {
fn apply_s_box(&self, state: &mut [F], is_full_round: bool) {
// Full rounds apply the S Box (x^alpha) to every element of state
Expand Down Expand Up @@ -60,7 +59,6 @@ impl<F: PrimeField, const R: usize, const N: usize> PoseidonSponge<F, R, N> {
}
}


impl<F: PrimeField, const R: usize, const N: usize> zeroize::Zeroize for PoseidonSponge<F, R, N> {
fn zeroize(&mut self) {
self.state.zeroize();
Expand Down Expand Up @@ -98,8 +96,8 @@ where
self.apply_mds(&mut state);
}

for i in (full_rounds_over_2 + self.partial_rounds)
..(self.partial_rounds + self.full_rounds)
for i in
(full_rounds_over_2 + self.partial_rounds)..(self.partial_rounds + self.full_rounds)
{
self.apply_ark(&mut state, i);
self.apply_s_box(&mut state, true);
Expand Down
9 changes: 4 additions & 5 deletions src/plugins/ark/tests.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
use crate::{hash::sponge::DuplexSponge, UnitTranscript, IOPattern};
use crate::{hash::sponge::DuplexSponge, IOPattern, UnitTranscript};
use ark_bls12_381::Fr;
use ark_ff::{MontFp, Zero};

use super::poseidon::{PoseidonSponge};
use super::poseidon::PoseidonSponge;

type H = DuplexSponge<PoseidonSponge<Fr, 2, 3>>;
type F = Fr;

#[test]
fn test_poseidon_basic() {
let io = IOPattern::<H, F>::new("test")
.absorb(1, "in")
.squeeze(10, "out");
.absorb(1, "in")
.squeeze(10, "out");
let mut arthur = io.to_arthur();
arthur.add_units(&[F::from(0x42)]).unwrap();

Expand All @@ -21,7 +21,6 @@ fn test_poseidon_basic() {
for challenge in challenges {
assert_ne!(challenge, F::from(0));
}

}

/// Generate default parameters (bls381-fr-only) for alpha = 17, state-size = 8
Expand Down
8 changes: 5 additions & 3 deletions src/plugins/group/iopattern.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,14 @@ where
}
}

impl<G, H, const N: usize> GroupIOPattern<G> for IOPattern<H>
impl<G, H> GroupIOPattern<G> for IOPattern<H>
where
G: Group + GroupEncoding<Repr = [u8; N]>,
G: Group + GroupEncoding,
G::Repr: AsRef<[u8]>,
H: DuplexHash,
{
fn add_points(self, count: usize, label: &str) -> Self {
self.add_bytes(count * N, label)
let n = G::Repr::default().as_ref().len();
self.add_bytes(count * n, label)
}
}
12 changes: 7 additions & 5 deletions src/plugins/group/writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,26 +17,28 @@ where
}
}

impl<G, H, R, const N: usize> GroupPublic<G> for Arthur<H, u8, R>
impl<G, H, R> GroupPublic<G> for Arthur<H, u8, R>
where
G: Group + GroupEncoding<Repr = [u8; N]>,
G: Group + GroupEncoding,
G::Repr: AsRef<[u8]>,
H: DuplexHash,
R: RngCore + CryptoRng,
{
type Repr = Vec<u8>;
fn public_points(&mut self, input: &[G]) -> crate::ProofResult<Self::Repr> {
let mut buf = Vec::new();
for p in input.iter() {
buf.extend_from_slice(&<G as GroupEncoding>::to_bytes(p));
buf.extend_from_slice(&<G as GroupEncoding>::to_bytes(p).as_ref());
}
self.add_bytes(&buf)?;
Ok(buf)
}
}

impl<G, H, R, const N: usize> GroupWriter<G> for Arthur<H, u8, R>
impl<G, H, R> GroupWriter<G> for Arthur<H, u8, R>
where
G: Group + GroupEncoding<Repr = [u8; N]>,
G: Group + GroupEncoding,
G::Repr: AsRef<[u8]>,
H: DuplexHash,
R: RngCore + CryptoRng,
{
Expand Down
2 changes: 1 addition & 1 deletion src/plugins/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,4 @@ pub(super) const fn bytes_modp(modulus_bits: u32) -> usize {

/// Unit-tests for inter-operability among libraries.
#[cfg(all(test, feature = "ark", feature = "group"))]
mod tests;
mod tests;
91 changes: 60 additions & 31 deletions src/plugins/tests.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use ark_ec::PrimeGroup;
use ark_ec::CurveGroup;
use ark_serialize::CanonicalSerialize;
use group::{Group, GroupEncoding};
use group::GroupEncoding;

use crate::hash::Keccak;
use crate::{plugins, ByteIOPattern};
Expand Down Expand Up @@ -38,73 +38,102 @@ where
.challenge_scalars(1, "chal")
}

// Check that the transcripts generated using the Group trait can be compatible with transcripts generated using dalek.
// Check that the transcripts generated using the Group trait can be compatible with transcripts generated using group.
#[ignore = "Change sign of generator to make this work"]
#[test]
fn test_compatible_ark_dalek() {
fn test_compatible_curve25519() {
type ArkG = ark_curve25519::EdwardsProjective;
type ArkF = ark_curve25519::Fr;

type GroupG = curve25519_dalek::edwards::EdwardsPoint;
type GroupF = curve25519_dalek::scalar::Scalar;
let ark_scalar = ArkF::from(0x42);
let dalek_scalar = GroupF::from(0x42u64);
compatible_groups::<ArkG, GroupG>();
}

#[test]
fn test_compatible_bls12_381() {
type ArkG = ark_bls12_381::G1Projective;
type GroupG = bls12_381::G1Projective;
compatible_groups::<ArkG, GroupG>();
}

#[ignore = "Looks like there's a length mismatch in the encoded values."]
#[test]
fn test_compatible_pallas() {
type ArkG = ark_vesta::Projective;
type GroupG = pasta_curves::vesta::Point;
compatible_groups::<ArkG, GroupG>();

// type ArkG = ark_pallas::Projective;
// type GroupG = pasta_curves::pallas::Point;
// compatible_groups::<ArkG, GroupG>();
}

// Check that the transcripts generated using the Group trait can be compatible with transcripts generated using group.
fn compatible_groups<ArkG, GroupG>()
where
ArkG: CurveGroup,
GroupG: group::Group + GroupEncoding,
GroupG::Repr: AsRef<[u8]>,
{
use group::ff::PrimeField;

let ark_scalar = ArkG::ScalarField::from(0x42);
let group_scalar = GroupG::Scalar::from(0x42u64);
// ***IMPORTANT***
// Looks like dalek and arkworks use different generator points.
// Looks like group and arkworks use different generator points.
let ark_generator = ArkG::generator();
let dalek_generator = -GroupG::generator();
let group_generator = GroupG::generator();

// **basic checks**
// Check point encoding is the same in both libraries.
let mut ark_generator_bytes = Vec::new();
ark_generator
.serialize_compressed(&mut ark_generator_bytes)
.unwrap();
let dalek_generator_bytes = <GroupG as GroupEncoding>::to_bytes(&dalek_generator);
assert_eq!(&ark_generator_bytes, &dalek_generator_bytes);
let group_generator_bytes = <GroupG as GroupEncoding>::to_bytes(&group_generator);
assert_eq!(&ark_generator_bytes, &group_generator_bytes.as_ref());
// Check scalar encoding is the same in both libraries.
let mut ark_scalar_bytes = Vec::new();
ark_scalar
.serialize_compressed(&mut ark_scalar_bytes)
.unwrap();
let dalek_scalar_bytes = dalek_scalar.to_bytes();
assert_eq!(&ark_scalar_bytes, &dalek_scalar_bytes);
let group_scalar_bytes = group_scalar.to_repr();
assert_eq!(&ark_scalar_bytes, group_scalar_bytes.as_ref());

let ark_point = ark_generator * ark_scalar;
let dalek_point = dalek_generator * dalek_scalar;
let group_point = group_generator * group_scalar;

let ark_io = ark_iopattern::<ArkG, Keccak>();
let dalek_io = group_iopattern::<GroupG, Keccak>();
let group_io = group_iopattern::<GroupG, Keccak>();
let mut ark_chal = [0u8; 16];
let mut dalek_chal = [0u8; 16];
let mut group_chal = [0u8; 16];

// Check that the IO Patterns are the same.
let mut ark_prover = ark_io.to_arthur();
let mut dalek_prover = dalek_io.to_arthur();
assert_eq!(ark_io.as_bytes(), dalek_io.as_bytes());
let mut group_prover = group_io.to_arthur();
assert_eq!(ark_io.as_bytes(), group_io.as_bytes());

// Check that scalars absorption leads to the same transcript.
plugins::ark::FieldWriter::add_scalars(&mut ark_prover, &[ark_scalar]).unwrap();
ark_prover.fill_challenge_bytes(&mut ark_chal).unwrap();
plugins::group::FieldWriter::add_scalars(&mut dalek_prover, &[dalek_scalar]).unwrap();
dalek_prover.fill_challenge_bytes(&mut dalek_chal).unwrap();
assert_eq!(ark_chal, dalek_chal);
plugins::group::FieldWriter::add_scalars(&mut group_prover, &[group_scalar]).unwrap();
group_prover.fill_challenge_bytes(&mut group_chal).unwrap();
assert_eq!(ark_chal, group_chal);

// Check that points absorption leads to the same transcript.
plugins::ark::GroupWriter::add_points(&mut ark_prover, &[ark_point]).unwrap();
ark_prover.fill_challenge_bytes(&mut ark_chal).unwrap();
plugins::group::GroupWriter::add_points(&mut dalek_prover, &[dalek_point]).unwrap();
dalek_prover.fill_challenge_bytes(&mut dalek_chal).unwrap();
assert_eq!(ark_chal, dalek_chal);
plugins::group::GroupWriter::add_points(&mut group_prover, &[group_point]).unwrap();
group_prover.fill_challenge_bytes(&mut group_chal).unwrap();
assert_eq!(ark_chal, group_chal);

// Check that scalars challenges are interpreted in the same way from bytes.
let [ark_chal_scalar]: [ArkF; 1] =
let [ark_chal_scalar]: [ArkG::ScalarField; 1] =
plugins::ark::FieldChallenges::challenge_scalars(&mut ark_prover).unwrap();
let [dalek_chal_scalar]: [GroupF; 1] =
plugins::group::FieldChallenges::challenge_scalars(&mut dalek_prover).unwrap();
let [group_chal_scalar]: [GroupG::Scalar; 1] =
plugins::group::FieldChallenges::challenge_scalars(&mut group_prover).unwrap();
let mut ark_scalar_bytes = Vec::new();
ark_chal_scalar
.serialize_compressed(&mut ark_scalar_bytes)
.unwrap();
let dalek_scalar_bytes = dalek_chal_scalar.to_bytes();
assert_eq!(&ark_scalar_bytes, &dalek_scalar_bytes);
let group_scalar_bytes = group_chal_scalar.to_repr();
assert_eq!(&ark_scalar_bytes, group_scalar_bytes.as_ref());
}
1 change: 0 additions & 1 deletion src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ type Sha2 = DigestBridge<sha2::Sha256>;
type Blake2b512 = DigestBridge<blake2::Blake2b512>;
type Blake2s256 = DigestBridge<blake2::Blake2s256>;


/// How should a protocol without IOPattern be handled?
#[test]
fn test_iopattern() {
Expand Down

0 comments on commit b0b9560

Please sign in to comment.