Skip to content

Commit

Permalink
cl: restructure cl into a workspace and integrate nullifier proof
Browse files Browse the repository at this point in the history
  • Loading branch information
davidrusu committed Jul 12, 2024
1 parent 3d24590 commit 8876712
Show file tree
Hide file tree
Showing 32 changed files with 551 additions and 351 deletions.
28 changes: 9 additions & 19 deletions cl/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,21 +1,11 @@
[package]
name = "cl"
version = "0.1.0"
edition = "2021"
[workspace]
resolver = "2"
members = [ "cl", "ledger", "proof_statements", "risc0_proofs"]

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
serde = {version="1.0", features = ["derive"]}
bincode = "1.3.3"
risc0-groth16 = "1.0.1"
blake2 = "0.10.6"
# jubjub = "0.10.0"
group = "0.13.0"
rand_core = "0.6.0"
rand_chacha = "0.3.1"
lazy_static = "1.4.0"
hex = "0.4.3"
curve25519-dalek = {version = "4.1", features = ["serde", "digest", "rand_core"]}
sha2 = "0.10"
# Always optimize; building and running the risc0_proofs takes much longer without optimization.
[profile.dev]
opt-level = 3

[profile.release]
debug = 1
lto = true
20 changes: 20 additions & 0 deletions cl/cl/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
[package]
name = "cl"
version = "0.1.0"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
serde = {version="1.0", features = ["derive"]}
bincode = "1.3.3"
risc0-groth16 = "1.0.1"
blake2 = "0.10.6"
# jubjub = "0.10.0"
group = "0.13.0"
rand_core = "0.6.0"
rand_chacha = "0.3.1"
lazy_static = "1.4.0"
hex = "0.4.3"
curve25519-dalek = {version = "4.1", features = ["serde", "digest", "rand_core"]}
sha2 = "0.10"
File renamed without changes.
112 changes: 53 additions & 59 deletions cl/src/bundle.rs → cl/cl/src/bundle.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,8 @@
use std::collections::BTreeSet;

use serde::{Deserialize, Serialize};

use curve25519_dalek::{constants::RISTRETTO_BASEPOINT_POINT, ristretto::RistrettoPoint, Scalar};

use crate::{
error::Error,
note::NoteCommitment,
partial_tx::{PartialTx, PartialTxProof},
};
use crate::partial_tx::PartialTx;

/// The transaction bundle is a collection of partial transactions.
/// The goal in bundling transactions is to produce a set of partial transactions
Expand All @@ -24,11 +18,11 @@ pub struct BundleWitness {
pub balance_blinding: Scalar,
}

#[derive(Debug)]
pub struct BundleProof {
pub partials: Vec<PartialTxProof>,
pub balance_blinding: Scalar,
}
// #[derive(Debug)]
// pub struct BundleProof {
// pub partials: Vec<PartialTxProof>,
// pub balance_blinding: Scalar,
// }

impl Bundle {
pub fn balance(&self) -> RistrettoPoint {
Expand All @@ -40,53 +34,53 @@ impl Bundle {
== crate::balance::balance(0, RISTRETTO_BASEPOINT_POINT, balance_blinding_witness)
}

pub fn prove(
&self,
w: BundleWitness,
ptx_proofs: Vec<PartialTxProof>,
) -> Result<BundleProof, Error> {
if ptx_proofs.len() == self.partials.len() {
return Err(Error::ProofFailed);
}
let input_notes: Vec<NoteCommitment> = self
.partials
.iter()
.flat_map(|ptx| ptx.inputs.iter().map(|i| i.note_comm))
.collect();
if input_notes.len() != BTreeSet::from_iter(input_notes.iter()).len() {
return Err(Error::ProofFailed);
}

let output_notes: Vec<NoteCommitment> = self
.partials
.iter()
.flat_map(|ptx| ptx.outputs.iter().map(|o| o.note_comm))
.collect();
if output_notes.len() != BTreeSet::from_iter(output_notes.iter()).len() {
return Err(Error::ProofFailed);
}

if self.balance()
!= crate::balance::balance(0, RISTRETTO_BASEPOINT_POINT, w.balance_blinding)
{
return Err(Error::ProofFailed);
}

Ok(BundleProof {
partials: ptx_proofs,
balance_blinding: w.balance_blinding,
})
}

pub fn verify(&self, proof: BundleProof) -> bool {
proof.partials.len() == self.partials.len()
&& self.is_balanced(proof.balance_blinding)
&& self
.partials
.iter()
.zip(&proof.partials)
.all(|(p, p_proof)| p.verify(p_proof))
}
// pub fn prove(
// &self,
// w: BundleWitness,
// ptx_proofs: Vec<PartialTxProof>,
// ) -> Result<BundleProof, Error> {
// if ptx_proofs.len() == self.partials.len() {
// return Err(Error::ProofFailed);
// }
// let input_notes: Vec<NoteCommitment> = self
// .partials
// .iter()
// .flat_map(|ptx| ptx.inputs.iter().map(|i| i.note_comm))
// .collect();
// if input_notes.len() != BTreeSet::from_iter(input_notes.iter()).len() {
// return Err(Error::ProofFailed);
// }

// let output_notes: Vec<NoteCommitment> = self
// .partials
// .iter()
// .flat_map(|ptx| ptx.outputs.iter().map(|o| o.note_comm))
// .collect();
// if output_notes.len() != BTreeSet::from_iter(output_notes.iter()).len() {
// return Err(Error::ProofFailed);
// }

// if self.balance()
// != crate::balance::balance(0, RISTRETTO_BASEPOINT_POINT, w.balance_blinding)
// {
// return Err(Error::ProofFailed);
// }

// Ok(BundleProof {
// partials: ptx_proofs,
// balance_blinding: w.balance_blinding,
// })
// }

// pub fn verify(&self, proof: BundleProof) -> bool {
// proof.partials.len() == self.partials.len()
// && self.is_balanced(proof.balance_blinding)
// && self
// .partials
// .iter()
// .zip(&proof.partials)
// .all(|(p, p_proof)| p.verify(p_proof))
// }
}

#[cfg(test)]
Expand Down
File renamed without changes.
File renamed without changes.
118 changes: 58 additions & 60 deletions cl/src/input.rs → cl/cl/src/input.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,8 @@
/// which on their own may not balance (i.e. \sum inputs != \sum outputs)
use crate::{
balance::Balance,
error::Error,
note::{NoteCommitment, NoteWitness},
nullifier::{Nullifier, NullifierNonce, NullifierSecret},
partial_tx::PtxRoot,
};
use rand_core::RngCore;
// use risc0_groth16::{PublicInputsJson, Verifier};
Expand All @@ -20,7 +18,7 @@ pub struct Input {
pub balance: Balance,
}

#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct InputWitness {
pub note: NoteWitness,
pub nf_sk: NullifierSecret,
Expand All @@ -43,67 +41,67 @@ impl InputWitness {
balance: self.note.balance(),
}
}
}

// as we don't have SNARKS hooked up yet, the witness will be our proof
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct InputProof {
input: InputWitness,
ptx_root: PtxRoot,
death_proof: Vec<u8>,
}

impl Input {
pub fn prove(
&self,
w: &InputWitness,
ptx_root: PtxRoot,
death_proof: Vec<u8>,
) -> Result<InputProof, Error> {
if bincode::serialize(&w.commit()).unwrap() != bincode::serialize(&self).unwrap() {
Err(Error::ProofFailed)
} else {
Ok(InputProof {
input: w.clone(),
ptx_root,
death_proof,
})
pub fn to_output_witness(&self) -> crate::OutputWitness {
crate::OutputWitness {
note: self.note.clone(),
nf_pk: self.nf_sk.commit(),
nonce: self.nonce,
}
}
}

pub fn verify(&self, ptx_root: PtxRoot, proof: &InputProof) -> bool {
// verification checks the relation
// - nf_pk == hash(nf_sk)
// - note_comm == commit(note || nf_pk)
// - nullifier == hash(nf_sk || nonce)
// - balance == v * hash_to_curve(Unit) + blinding * H
// - ptx_root is the same one that was used in proving.

let witness = &proof.input;

let nf_pk = witness.nf_sk.commit();

// let death_constraint_was_committed_to =
// witness.note.death_constraint == bincode::serialize(&death_constraint).unwrap();

// let death_constraint_is_satisfied: bool = Verifier::from_json(
// bincode::deserialize(&proof.death_proof).unwrap(),
// PublicInputsJson {
// values: vec![ptx_root.hex()],
// },
// bincode::deserialize(&witness.note.death_constraint).unwrap(),
// )
// .unwrap()
// .verify()
// .is_ok();
let death_constraint_is_satisfied = true;
self.note_comm == witness.note.commit(nf_pk, witness.nonce)
&& self.nullifier == Nullifier::new(witness.nf_sk, witness.nonce)
&& self.balance == witness.note.balance()
&& ptx_root == proof.ptx_root
// && death_constraint_was_committed_to
&& death_constraint_is_satisfied
}
impl Input {
// pub fn prove(
// &self,
// w: &InputWitness,
// ptx_root: PtxRoot,
// death_proof: Vec<u8>,
// ) -> Result<InputProof, Error> {
// if bincode::serialize(&w.commit()).unwrap() != bincode::serialize(&self).unwrap() {
// Err(Error::ProofFailed)
// } else {
// Ok(InputProof {
// input: w.clone(),
// ptx_root,
// death_proof,
// })
// }
// }

// pub fn verify(&self, ptx_root: PtxRoot, proof: &InputProof) -> bool {
// // verification checks the relation
// // - nf_pk == hash(nf_sk)
// // - note_comm == commit(note || nf_pk)
// // - nullifier == hash(nf_sk || nonce)
// // - balance == v * hash_to_curve(Unit) + blinding * H
// // - ptx_root is the same one that was used in proving.

// let witness = &proof.input;

// let nf_pk = witness.nf_sk.commit();

// // let death_constraint_was_committed_to =
// // witness.note.death_constraint == bincode::serialize(&death_constraint).unwrap();

// // let death_constraint_is_satisfied: bool = Verifier::from_json(
// // bincode::deserialize(&proof.death_proof).unwrap(),
// // PublicInputsJson {
// // values: vec![ptx_root.hex()],
// // },
// // bincode::deserialize(&witness.note.death_constraint).unwrap(),
// // )
// // .unwrap()
// // .verify()
// // .is_ok();
// let death_constraint_is_satisfied = true;
// self.note_comm == witness.note.commit(nf_pk, witness.nonce)
// && self.nullifier == Nullifier::new(witness.nf_sk, witness.nonce)
// && self.balance == witness.note.balance()
// && ptx_root == proof.ptx_root
// // && death_constraint_was_committed_to
// && death_constraint_is_satisfied
// }

pub fn to_bytes(&self) -> [u8; 96] {
let mut bytes = [0u8; 96];
Expand Down
File renamed without changes.
Loading

0 comments on commit 8876712

Please sign in to comment.