diff --git a/cl/Cargo.toml b/cl/Cargo.toml index c42d6203..c90b7d9e 100644 --- a/cl/Cargo.toml +++ b/cl/Cargo.toml @@ -1,21 +1,11 @@ -[package] -name = "cl" -version = "0.1.0" -edition = "2021" +[workspace] +resolver = "2" +members = [ "cl", "ledger", "proof_statements", "risc0_proofs"] -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -serde = {version="1.0", features = ["derive"]} -bincode = "1.3.3" -risc0-groth16 = "1.0.1" -blake2 = "0.10.6" -# jubjub = "0.10.0" -group = "0.13.0" -rand_core = "0.6.0" -rand_chacha = "0.3.1" -lazy_static = "1.4.0" -hex = "0.4.3" -curve25519-dalek = {version = "4.1", features = ["serde", "digest", "rand_core"]} -sha2 = "0.10" +# Always optimize; building and running the risc0_proofs takes much longer without optimization. +[profile.dev] +opt-level = 3 +[profile.release] +debug = 1 +lto = true diff --git a/cl/cl/Cargo.toml b/cl/cl/Cargo.toml new file mode 100644 index 00000000..30ee6784 --- /dev/null +++ b/cl/cl/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "cl" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +serde = {version="1.0", features = ["derive"]} +bincode = "1.3.3" +risc0-groth16 = "1.0.1" +blake2 = "0.10.6" +# jubjub = "0.10.0" +group = "0.13.0" +rand_core = "0.6.0" +rand_chacha = "0.3.1" +lazy_static = "1.4.0" +hex = "0.4.3" +curve25519-dalek = {version = "4.1", features = ["serde", "digest", "rand_core"]} +sha2 = "0.10" \ No newline at end of file diff --git a/cl/src/balance.rs b/cl/cl/src/balance.rs similarity index 100% rename from cl/src/balance.rs rename to cl/cl/src/balance.rs diff --git a/cl/src/bundle.rs b/cl/cl/src/bundle.rs similarity index 69% rename from cl/src/bundle.rs rename to cl/cl/src/bundle.rs index db999ee2..bfb0030a 100644 --- a/cl/src/bundle.rs +++ b/cl/cl/src/bundle.rs @@ -1,14 +1,8 @@ -use std::collections::BTreeSet; - use serde::{Deserialize, Serialize}; use curve25519_dalek::{constants::RISTRETTO_BASEPOINT_POINT, ristretto::RistrettoPoint, Scalar}; -use crate::{ - error::Error, - note::NoteCommitment, - partial_tx::{PartialTx, PartialTxProof}, -}; +use crate::partial_tx::PartialTx; /// The transaction bundle is a collection of partial transactions. /// The goal in bundling transactions is to produce a set of partial transactions @@ -24,11 +18,11 @@ pub struct BundleWitness { pub balance_blinding: Scalar, } -#[derive(Debug)] -pub struct BundleProof { - pub partials: Vec, - pub balance_blinding: Scalar, -} +// #[derive(Debug)] +// pub struct BundleProof { +// pub partials: Vec, +// pub balance_blinding: Scalar, +// } impl Bundle { pub fn balance(&self) -> RistrettoPoint { @@ -40,53 +34,53 @@ impl Bundle { == crate::balance::balance(0, RISTRETTO_BASEPOINT_POINT, balance_blinding_witness) } - pub fn prove( - &self, - w: BundleWitness, - ptx_proofs: Vec, - ) -> Result { - if ptx_proofs.len() == self.partials.len() { - return Err(Error::ProofFailed); - } - let input_notes: Vec = self - .partials - .iter() - .flat_map(|ptx| ptx.inputs.iter().map(|i| i.note_comm)) - .collect(); - if input_notes.len() != BTreeSet::from_iter(input_notes.iter()).len() { - return Err(Error::ProofFailed); - } - - let output_notes: Vec = self - .partials - .iter() - .flat_map(|ptx| ptx.outputs.iter().map(|o| o.note_comm)) - .collect(); - if output_notes.len() != BTreeSet::from_iter(output_notes.iter()).len() { - return Err(Error::ProofFailed); - } - - if self.balance() - != crate::balance::balance(0, RISTRETTO_BASEPOINT_POINT, w.balance_blinding) - { - return Err(Error::ProofFailed); - } - - Ok(BundleProof { - partials: ptx_proofs, - balance_blinding: w.balance_blinding, - }) - } - - pub fn verify(&self, proof: BundleProof) -> bool { - proof.partials.len() == self.partials.len() - && self.is_balanced(proof.balance_blinding) - && self - .partials - .iter() - .zip(&proof.partials) - .all(|(p, p_proof)| p.verify(p_proof)) - } + // pub fn prove( + // &self, + // w: BundleWitness, + // ptx_proofs: Vec, + // ) -> Result { + // if ptx_proofs.len() == self.partials.len() { + // return Err(Error::ProofFailed); + // } + // let input_notes: Vec = self + // .partials + // .iter() + // .flat_map(|ptx| ptx.inputs.iter().map(|i| i.note_comm)) + // .collect(); + // if input_notes.len() != BTreeSet::from_iter(input_notes.iter()).len() { + // return Err(Error::ProofFailed); + // } + + // let output_notes: Vec = self + // .partials + // .iter() + // .flat_map(|ptx| ptx.outputs.iter().map(|o| o.note_comm)) + // .collect(); + // if output_notes.len() != BTreeSet::from_iter(output_notes.iter()).len() { + // return Err(Error::ProofFailed); + // } + + // if self.balance() + // != crate::balance::balance(0, RISTRETTO_BASEPOINT_POINT, w.balance_blinding) + // { + // return Err(Error::ProofFailed); + // } + + // Ok(BundleProof { + // partials: ptx_proofs, + // balance_blinding: w.balance_blinding, + // }) + // } + + // pub fn verify(&self, proof: BundleProof) -> bool { + // proof.partials.len() == self.partials.len() + // && self.is_balanced(proof.balance_blinding) + // && self + // .partials + // .iter() + // .zip(&proof.partials) + // .all(|(p, p_proof)| p.verify(p_proof)) + // } } #[cfg(test)] diff --git a/cl/src/crypto.rs b/cl/cl/src/crypto.rs similarity index 100% rename from cl/src/crypto.rs rename to cl/cl/src/crypto.rs diff --git a/cl/src/error.rs b/cl/cl/src/error.rs similarity index 100% rename from cl/src/error.rs rename to cl/cl/src/error.rs diff --git a/cl/src/input.rs b/cl/cl/src/input.rs similarity index 63% rename from cl/src/input.rs rename to cl/cl/src/input.rs index a8bde4cd..132085ce 100644 --- a/cl/src/input.rs +++ b/cl/cl/src/input.rs @@ -4,10 +4,8 @@ /// which on their own may not balance (i.e. \sum inputs != \sum outputs) use crate::{ balance::Balance, - error::Error, note::{NoteCommitment, NoteWitness}, nullifier::{Nullifier, NullifierNonce, NullifierSecret}, - partial_tx::PtxRoot, }; use rand_core::RngCore; // use risc0_groth16::{PublicInputsJson, Verifier}; @@ -20,7 +18,7 @@ pub struct Input { pub balance: Balance, } -#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct InputWitness { pub note: NoteWitness, pub nf_sk: NullifierSecret, @@ -43,67 +41,67 @@ impl InputWitness { balance: self.note.balance(), } } -} - -// as we don't have SNARKS hooked up yet, the witness will be our proof -#[derive(Debug, PartialEq, Eq, Clone)] -pub struct InputProof { - input: InputWitness, - ptx_root: PtxRoot, - death_proof: Vec, -} -impl Input { - pub fn prove( - &self, - w: &InputWitness, - ptx_root: PtxRoot, - death_proof: Vec, - ) -> Result { - if bincode::serialize(&w.commit()).unwrap() != bincode::serialize(&self).unwrap() { - Err(Error::ProofFailed) - } else { - Ok(InputProof { - input: w.clone(), - ptx_root, - death_proof, - }) + pub fn to_output_witness(&self) -> crate::OutputWitness { + crate::OutputWitness { + note: self.note.clone(), + nf_pk: self.nf_sk.commit(), + nonce: self.nonce, } } +} - pub fn verify(&self, ptx_root: PtxRoot, proof: &InputProof) -> bool { - // verification checks the relation - // - nf_pk == hash(nf_sk) - // - note_comm == commit(note || nf_pk) - // - nullifier == hash(nf_sk || nonce) - // - balance == v * hash_to_curve(Unit) + blinding * H - // - ptx_root is the same one that was used in proving. - - let witness = &proof.input; - - let nf_pk = witness.nf_sk.commit(); - - // let death_constraint_was_committed_to = - // witness.note.death_constraint == bincode::serialize(&death_constraint).unwrap(); - - // let death_constraint_is_satisfied: bool = Verifier::from_json( - // bincode::deserialize(&proof.death_proof).unwrap(), - // PublicInputsJson { - // values: vec![ptx_root.hex()], - // }, - // bincode::deserialize(&witness.note.death_constraint).unwrap(), - // ) - // .unwrap() - // .verify() - // .is_ok(); - let death_constraint_is_satisfied = true; - self.note_comm == witness.note.commit(nf_pk, witness.nonce) - && self.nullifier == Nullifier::new(witness.nf_sk, witness.nonce) - && self.balance == witness.note.balance() - && ptx_root == proof.ptx_root - // && death_constraint_was_committed_to - && death_constraint_is_satisfied - } +impl Input { + // pub fn prove( + // &self, + // w: &InputWitness, + // ptx_root: PtxRoot, + // death_proof: Vec, + // ) -> Result { + // if bincode::serialize(&w.commit()).unwrap() != bincode::serialize(&self).unwrap() { + // Err(Error::ProofFailed) + // } else { + // Ok(InputProof { + // input: w.clone(), + // ptx_root, + // death_proof, + // }) + // } + // } + + // pub fn verify(&self, ptx_root: PtxRoot, proof: &InputProof) -> bool { + // // verification checks the relation + // // - nf_pk == hash(nf_sk) + // // - note_comm == commit(note || nf_pk) + // // - nullifier == hash(nf_sk || nonce) + // // - balance == v * hash_to_curve(Unit) + blinding * H + // // - ptx_root is the same one that was used in proving. + + // let witness = &proof.input; + + // let nf_pk = witness.nf_sk.commit(); + + // // let death_constraint_was_committed_to = + // // witness.note.death_constraint == bincode::serialize(&death_constraint).unwrap(); + + // // let death_constraint_is_satisfied: bool = Verifier::from_json( + // // bincode::deserialize(&proof.death_proof).unwrap(), + // // PublicInputsJson { + // // values: vec![ptx_root.hex()], + // // }, + // // bincode::deserialize(&witness.note.death_constraint).unwrap(), + // // ) + // // .unwrap() + // // .verify() + // // .is_ok(); + // let death_constraint_is_satisfied = true; + // self.note_comm == witness.note.commit(nf_pk, witness.nonce) + // && self.nullifier == Nullifier::new(witness.nf_sk, witness.nonce) + // && self.balance == witness.note.balance() + // && ptx_root == proof.ptx_root + // // && death_constraint_was_committed_to + // && death_constraint_is_satisfied + // } pub fn to_bytes(&self) -> [u8; 96] { let mut bytes = [0u8; 96]; diff --git a/cl/src/lib.rs b/cl/cl/src/lib.rs similarity index 100% rename from cl/src/lib.rs rename to cl/cl/src/lib.rs diff --git a/cl/src/merkle.rs b/cl/cl/src/merkle.rs similarity index 92% rename from cl/src/merkle.rs rename to cl/cl/src/merkle.rs index a5c8edaa..a51ae1d5 100644 --- a/cl/src/merkle.rs +++ b/cl/cl/src/merkle.rs @@ -49,7 +49,7 @@ pub enum PathNode { Right([u8; 32]), } -pub fn verify_path(leaf: [u8; 32], path: &[PathNode], root: [u8; 32]) -> bool { +pub fn path_root(leaf: [u8; 32], path: &[PathNode]) -> [u8; 32] { let mut computed_hash = leaf; for path_node in path { @@ -63,7 +63,7 @@ pub fn verify_path(leaf: [u8; 32], path: &[PathNode], root: [u8; 32]) -> bool { } } - computed_hash == root + computed_hash } pub fn path(leaves: [[u8; 32]; N], idx: usize) -> Vec { @@ -163,7 +163,7 @@ mod test { let p = path::<1>(leaves, 0); let expected = vec![]; assert_eq!(p, expected); - assert!(verify_path(leaf(b"desert"), &p, r)); + assert_eq!(path_root(leaf(b"desert"), &p), r); } #[test] @@ -176,14 +176,14 @@ mod test { let p0 = path(leaves, 0); let expected0 = vec![PathNode::Right(leaf(b"sand"))]; assert_eq!(p0, expected0); - assert!(verify_path(leaf(b"desert"), &p0, r)); + assert_eq!(path_root(leaf(b"desert"), &p0), r); // --- proof for element at idx 1 let p1 = path(leaves, 1); let expected1 = vec![PathNode::Left(leaf(b"desert"))]; assert_eq!(p1, expected1); - assert!(verify_path(leaf(b"sand"), &p1, r)); + assert_eq!(path_root(leaf(b"sand"), &p1), r); } #[test] @@ -204,7 +204,7 @@ mod test { PathNode::Right(node(leaf(b"feels"), leaf(b"warm"))), ]; assert_eq!(p0, expected0); - assert!(verify_path(leaf(b"desert"), &p0, r)); + assert!(path_root(leaf(b"desert"), &p0), r); // --- proof for element at idx 1 @@ -214,7 +214,7 @@ mod test { PathNode::Right(node(leaf(b"feels"), leaf(b"warm"))), ]; assert_eq!(p1, expected1); - assert!(verify_path(leaf(b"sand"), &p1, r)); + assert_eq!(path_root(leaf(b"sand"), &p1), r); // --- proof for element at idx 2 @@ -224,7 +224,7 @@ mod test { PathNode::Left(node(leaf(b"desert"), leaf(b"sand"))), ]; assert_eq!(p2, expected2); - assert!(verify_path(leaf(b"feels"), &p2, r)); + assert_eq!(path_root(leaf(b"feels"), &p2), r); // --- proof for element at idx 3 @@ -234,6 +234,6 @@ mod test { PathNode::Left(node(leaf(b"desert"), leaf(b"sand"))), ]; assert_eq!(p3, expected3); - assert!(verify_path(leaf(b"warm"), &p3, r)); + assert_eq!(path_root(leaf(b"warm"), &p3), r); } } diff --git a/cl/src/note.rs b/cl/cl/src/note.rs similarity index 100% rename from cl/src/note.rs rename to cl/cl/src/note.rs diff --git a/cl/src/nullifier.rs b/cl/cl/src/nullifier.rs similarity index 100% rename from cl/src/nullifier.rs rename to cl/cl/src/nullifier.rs diff --git a/cl/src/output.rs b/cl/cl/src/output.rs similarity index 98% rename from cl/src/output.rs rename to cl/cl/src/output.rs index d4840160..be052493 100644 --- a/cl/src/output.rs +++ b/cl/cl/src/output.rs @@ -14,7 +14,7 @@ pub struct Output { pub balance: Balance, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct OutputWitness { pub note: NoteWitness, pub nf_pk: NullifierCommitment, diff --git a/cl/src/partial_tx.rs b/cl/cl/src/partial_tx.rs similarity index 70% rename from cl/src/partial_tx.rs rename to cl/cl/src/partial_tx.rs index 70fa58eb..61e5e060 100644 --- a/cl/src/partial_tx.rs +++ b/cl/cl/src/partial_tx.rs @@ -1,14 +1,11 @@ -use std::collections::BTreeSet; - use rand_core::RngCore; // use risc0_groth16::ProofJson; use curve25519_dalek::ristretto::RistrettoPoint; use serde::{Deserialize, Serialize}; -use crate::error::Error; -use crate::input::{Input, InputProof, InputWitness}; +use crate::input::{Input, InputWitness}; use crate::merkle; -use crate::output::{Output, OutputProof, OutputWitness}; +use crate::output::{Output, OutputWitness}; const MAX_INPUTS: usize = 8; const MAX_OUTPUTS: usize = 8; @@ -42,11 +39,11 @@ pub struct PartialTxWitness { pub outputs: Vec, } -#[derive(Debug)] -pub struct PartialTxProof { - pub inputs: Vec, - pub outputs: Vec, -} +// #[derive(Debug)] +// pub struct PartialTxProof { +// pub inputs: Vec, +// pub outputs: Vec, +// } impl PartialTx { pub fn from_witness(w: PartialTxWitness) -> Self { @@ -99,63 +96,63 @@ impl PartialTx { PtxRoot(root) } - pub fn prove( - &self, - w: PartialTxWitness, - death_proofs: Vec>, - ) -> Result { - if bincode::serialize(&Self::from_witness(w.clone())).unwrap() - != bincode::serialize(&self).unwrap() - { - return Err(Error::ProofFailed); - } - let input_note_comms = BTreeSet::from_iter(self.inputs.iter().map(|i| i.note_comm)); - let output_note_comms = BTreeSet::from_iter(self.outputs.iter().map(|o| o.note_comm)); - - if input_note_comms.len() != self.inputs.len() - || output_note_comms.len() != self.outputs.len() - { - return Err(Error::ProofFailed); - } - - let ptx_root = self.root(); - - let input_proofs: Vec = Result::from_iter( - self.inputs - .iter() - .zip(&w.inputs) - .zip(death_proofs.into_iter()) - .map(|((i, i_w), death_p)| i.prove(i_w, ptx_root, death_p)), - )?; - - let output_proofs: Vec = Result::from_iter( - self.outputs - .iter() - .zip(&w.outputs) - .map(|(o, o_w)| o.prove(o_w)), - )?; - - Ok(PartialTxProof { - inputs: input_proofs, - outputs: output_proofs, - }) - } - - pub fn verify(&self, proof: &PartialTxProof) -> bool { - let ptx_root = self.root(); - self.inputs.len() == proof.inputs.len() - && self.outputs.len() == proof.outputs.len() - && self - .inputs - .iter() - .zip(&proof.inputs) - .all(|(i, p)| i.verify(ptx_root, p)) - && self - .outputs - .iter() - .zip(&proof.outputs) - .all(|(o, p)| o.verify(p)) - } + // pub fn prove( + // &self, + // w: PartialTxWitness, + // death_proofs: Vec>, + // ) -> Result { + // if bincode::serialize(&Self::from_witness(w.clone())).unwrap() + // != bincode::serialize(&self).unwrap() + // { + // return Err(Error::ProofFailed); + // } + // let input_note_comms = BTreeSet::from_iter(self.inputs.iter().map(|i| i.note_comm)); + // let output_note_comms = BTreeSet::from_iter(self.outputs.iter().map(|o| o.note_comm)); + + // if input_note_comms.len() != self.inputs.len() + // || output_note_comms.len() != self.outputs.len() + // { + // return Err(Error::ProofFailed); + // } + + // let ptx_root = self.root(); + + // let input_proofs: Vec = Result::from_iter( + // self.inputs + // .iter() + // .zip(&w.inputs) + // .zip(death_proofs.into_iter()) + // .map(|((i, i_w), death_p)| i.prove(i_w, ptx_root, death_p)), + // )?; + + // let output_proofs: Vec = Result::from_iter( + // self.outputs + // .iter() + // .zip(&w.outputs) + // .map(|(o, o_w)| o.prove(o_w)), + // )?; + + // Ok(PartialTxProof { + // inputs: input_proofs, + // outputs: output_proofs, + // }) + // } + + // pub fn verify(&self, proof: &PartialTxProof) -> bool { + // let ptx_root = self.root(); + // self.inputs.len() == proof.inputs.len() + // && self.outputs.len() == proof.outputs.len() + // && self + // .inputs + // .iter() + // .zip(&proof.inputs) + // .all(|(i, p)| i.verify(ptx_root, p)) + // && self + // .outputs + // .iter() + // .zip(&proof.outputs) + // .all(|(o, p)| o.verify(p)) + // } pub fn balance(&self) -> RistrettoPoint { let in_sum: RistrettoPoint = self.inputs.iter().map(|i| i.balance.0).sum(); diff --git a/cl/src/test_util.rs b/cl/cl/src/test_util.rs similarity index 100% rename from cl/src/test_util.rs rename to cl/cl/src/test_util.rs diff --git a/cl/ledger/Cargo.toml b/cl/ledger/Cargo.toml new file mode 100644 index 00000000..a0767f22 --- /dev/null +++ b/cl/ledger/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "ledger" +version = "0.1.0" +edition = "2021" + +[dependencies] +cl = { path = "../cl" } +proof_statements = { path = "../proof_statements" } +nomos_cl_risc0_proofs = { path = "../risc0_proofs" } +risc0-zkvm = { version = "1.0", features = ["prove", "metal"] } +risc0-groth16 = { version = "1.0" } +rand = "0.8.5" +thiserror = "1.0.62" diff --git a/cl/ledger/src/error.rs b/cl/ledger/src/error.rs new file mode 100644 index 00000000..3204a721 --- /dev/null +++ b/cl/ledger/src/error.rs @@ -0,0 +1,9 @@ +use thiserror::Error; + +pub type Result = core::result::Result; + +#[derive(Error, Debug)] +pub enum Error { + #[error("risc0 failed to serde")] + Risc0Serde(#[from] risc0_zkvm::serde::Error), +} diff --git a/cl/ledger/src/input.rs b/cl/ledger/src/input.rs new file mode 100644 index 00000000..d4533f79 --- /dev/null +++ b/cl/ledger/src/input.rs @@ -0,0 +1,125 @@ +use proof_statements::nullifier::{NullifierPrivate, NullifierPublic}; + +use crate::error::Result; + +const MAX_NOTE_COMMS: usize = 2usize.pow(8); + +#[derive(Debug, Clone)] +pub struct InputNullifierProof { + receipt: risc0_zkvm::Receipt, +} + +impl InputNullifierProof { + pub fn public(&self) -> Result { + Ok(self.receipt.journal.decode()?) + } + + pub fn verify(&self, expected_public_inputs: NullifierPublic) -> bool { + let Ok(public_inputs) = self.public() else { + return false; + }; + + public_inputs == expected_public_inputs + && self + .receipt + .verify(nomos_cl_risc0_proofs::NULLIFIER_ID) + .is_ok() + } +} + +pub fn prove_input_nullifier( + input: &cl::InputWitness, + note_commitments: &[cl::NoteCommitment], +) -> InputNullifierProof { + let output = input.to_output_witness(); + let cm_leaves = note_commitment_leaves(note_commitments); + let output_cm = output.commit_note(); + let cm_idx = note_commitments + .iter() + .position(|c| c == &output_cm) + .unwrap(); + let cm_path = cl::merkle::path(cm_leaves, cm_idx); + + let secrets = NullifierPrivate { + nf_sk: input.nf_sk, + output, + cm_path, + }; + + let env = risc0_zkvm::ExecutorEnv::builder() + .write(&secrets) + .unwrap() + .build() + .unwrap(); + + // Obtain the default prover. + let prover = risc0_zkvm::default_prover(); + + use std::time::Instant; + let start_t = Instant::now(); + + // Proof information by proving the specified ELF binary. + // This struct contains the receipt along with statistics about execution of the guest + let opts = risc0_zkvm::ProverOpts::succinct(); + let prove_info = prover + .prove_with_opts(env, nomos_cl_risc0_proofs::NULLIFIER_ELF, &opts) + .unwrap(); + + println!( + "STARK prover time: {:.2?}, total_cycles: {}", + start_t.elapsed(), + prove_info.stats.total_cycles + ); + // extract the receipt. + let receipt = prove_info.receipt; + InputNullifierProof { receipt } +} + +fn note_commitment_leaves(note_commitments: &[cl::NoteCommitment]) -> [[u8; 32]; MAX_NOTE_COMMS] { + let note_comm_bytes = Vec::from_iter(note_commitments.iter().map(|c| c.as_bytes().to_vec())); + let cm_leaves = cl::merkle::padded_leaves::(¬e_comm_bytes); + cm_leaves +} + +#[cfg(test)] +mod test { + use proof_statements::nullifier::NullifierPublic; + use rand::thread_rng; + + use super::{note_commitment_leaves, prove_input_nullifier}; + + #[test] + fn test_input_nullifier_prover() { + let mut rng = thread_rng(); + let input = cl::InputWitness { + note: cl::NoteWitness { + balance: cl::BalanceWitness::random(32, "NMO", &mut rng), + death_constraint: vec![], + state: [0u8; 32], + }, + nf_sk: cl::NullifierSecret::random(&mut rng), + nonce: cl::NullifierNonce::random(&mut rng), + }; + + let notes = vec![input.to_output_witness().commit_note()]; + + let proof = prove_input_nullifier(&input, ¬es); + + let expected_public_inputs = NullifierPublic { + cm_root: cl::merkle::root(note_commitment_leaves(¬es)), + nf: input.commit().nullifier, + }; + + assert!(proof.verify(expected_public_inputs)); + + let wrong_public_inputs = NullifierPublic { + cm_root: cl::merkle::root(note_commitment_leaves(¬es)), + nf: cl::Nullifier::new( + cl::NullifierSecret::random(&mut rng), + cl::NullifierNonce::random(&mut rng), + ), + }; + + assert!(!proof.verify(wrong_public_inputs)); + } +} diff --git a/cl/ledger/src/lib.rs b/cl/ledger/src/lib.rs new file mode 100644 index 00000000..b4582dfe --- /dev/null +++ b/cl/ledger/src/lib.rs @@ -0,0 +1,17 @@ +pub mod error; +pub mod input; + +pub fn add(left: usize, right: usize) -> usize { + left + right +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn it_works() { + let result = add(2, 2); + assert_eq!(result, 4); + } +} diff --git a/cl/proof_statements/Cargo.toml b/cl/proof_statements/Cargo.toml new file mode 100644 index 00000000..b0a65e34 --- /dev/null +++ b/cl/proof_statements/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "proof_statements" +version = "0.1.0" +edition = "2021" + +[dependencies] +cl = { path = "../cl" } +serde = { version = "1.0", features = ["derive"] } \ No newline at end of file diff --git a/cl/proof_statements/src/lib.rs b/cl/proof_statements/src/lib.rs new file mode 100644 index 00000000..1d68566d --- /dev/null +++ b/cl/proof_statements/src/lib.rs @@ -0,0 +1 @@ +pub mod nullifier; diff --git a/cl/proof_statements/src/nullifier.rs b/cl/proof_statements/src/nullifier.rs new file mode 100644 index 00000000..709aa9d1 --- /dev/null +++ b/cl/proof_statements/src/nullifier.rs @@ -0,0 +1,22 @@ +use serde::{Deserialize, Serialize}; + +/// for public input `nf` (nullifier) and `root_cm` (root of merkle tree over commitment set). +/// the prover has knowledge of `output = (note, nf_pk, nonce)`, `nf` and `path` s.t. that the following constraints hold +/// 0. nf_pk = hash(nf_sk) +/// 1. nf = hash(nonce||nf_sk) +/// 2. note_cm = output_commitment(output) +/// 3. verify_merkle_path(note_cm, root, path) + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct NullifierPublic { + pub cm_root: [u8; 32], + pub nf: cl::Nullifier, + // TODO: we need a way to link this statement to a particular input. i.e. prove that the nullifier is actually derived from the input note. +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct NullifierPrivate { + pub nf_sk: cl::NullifierSecret, + pub output: cl::OutputWitness, + pub cm_path: Vec, +} diff --git a/cl/risc0_proofs/Cargo.toml b/cl/risc0_proofs/Cargo.toml new file mode 100644 index 00000000..18a61830 --- /dev/null +++ b/cl/risc0_proofs/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "nomos_cl_risc0_proofs" +version = "0.1.0" +edition = "2021" + +[build-dependencies] +risc0-build = { version = "1.0" } + +[package.metadata.risc0] +methods = ["nullifier"] + diff --git a/cl/risc0_proofs/build.rs b/cl/risc0_proofs/build.rs new file mode 100644 index 00000000..08a8a4eb --- /dev/null +++ b/cl/risc0_proofs/build.rs @@ -0,0 +1,3 @@ +fn main() { + risc0_build::embed_methods(); +} diff --git a/goas/methods/nullifier/Cargo.toml b/cl/risc0_proofs/nullifier/Cargo.toml similarity index 62% rename from goas/methods/nullifier/Cargo.toml rename to cl/risc0_proofs/nullifier/Cargo.toml index 6297c029..7a93f841 100644 --- a/goas/methods/nullifier/Cargo.toml +++ b/cl/risc0_proofs/nullifier/Cargo.toml @@ -8,13 +8,12 @@ edition = "2021" [dependencies] risc0-zkvm = { version = "1.0", default-features = false, features = ['std'] } serde = { version = "1.0", features = ["derive"] } -bincode = "1" -cl = { path = "../../../cl" } +cl = { path = "../../cl" } +proof_statements = { path = "../../proof_statements" } + [patch.crates-io] -# Placing these patch statement in the workspace Cargo.toml will add RISC Zero SHA-256 and bigint -# multiplication accelerator support for all downstream usages of the following crates. +# add RISC Zero accelerator support for all downstream usages of the following crates. sha2 = { git = "https://github.com/risc0/RustCrypto-hashes", tag = "sha2-v0.10.8-risczero.0" } -# k256 = { git = "https://github.com/risc0/RustCrypto-elliptic-curves", tag = "k256/v0.13.3-risczero.0" } crypto-bigint = { git = "https://github.com/risc0/RustCrypto-crypto-bigint", tag = "v0.5.5-risczero.0" } curve25519-dalek = { git = "https://github.com/risc0/curve25519-dalek", tag = "curve25519-4.1.2-risczero.0" } diff --git a/cl/risc0_proofs/nullifier/src/main.rs b/cl/risc0_proofs/nullifier/src/main.rs new file mode 100644 index 00000000..9c5c15e7 --- /dev/null +++ b/cl/risc0_proofs/nullifier/src/main.rs @@ -0,0 +1,28 @@ +/// Nullifier Proof +/// +/// Our goal: prove the nullifier nf was derived from a note that had previously been committed to. +/// +/// More formally, nullifier statement says: +/// for public input `nf` (nullifier) and `root_cm` (root of merkle tree over commitment set). +/// the prover has knowledge of `output = (note, nf_pk, nonce)`, `nf` and `path` s.t. that the following constraints hold +/// 0. nf_pk = hash(nf_sk) +/// 1. nf = hash(nonce||nf_sk) +/// 2. note_cm = output_commitment(output) +/// 3. verify_merkle_path(note_cm, root, path) +use cl::merkle; +use cl::nullifier::Nullifier; +use proof_statements::nullifier::{NullifierPrivate, NullifierPublic}; +use risc0_zkvm::guest::env; + +fn main() { + let secret: NullifierPrivate = env::read(); + assert_eq!(secret.output.nf_pk, secret.nf_sk.commit()); + + let cm_out = secret.output.commit_note(); + let cm_leaf = merkle::leaf(cm_out.as_bytes()); + let cm_root = merkle::path_root(cm_leaf, &secret.cm_path); + + let nf = Nullifier::new(secret.nf_sk, secret.output.nonce); + + env::commit(&NullifierPublic { cm_root, nf }); +} diff --git a/cl/risc0_proofs/src/lib.rs b/cl/risc0_proofs/src/lib.rs new file mode 100644 index 00000000..1bdb3085 --- /dev/null +++ b/cl/risc0_proofs/src/lib.rs @@ -0,0 +1 @@ +include!(concat!(env!("OUT_DIR"), "/methods.rs")); diff --git a/goas/host/Cargo.toml b/goas/host/Cargo.toml index b05a77bf..28647257 100644 --- a/goas/host/Cargo.toml +++ b/goas/host/Cargo.toml @@ -6,8 +6,8 @@ default-run = "host" [dependencies] methods = { path = "../methods" } -risc0-zkvm = { version = "1.0.1", features = ["prove"] } -risc0-groth16 = { version = "1.0.1" } +risc0-zkvm = { version = "1.0", features = ["prove", "metal"] } +risc0-groth16 = { version = "1.0" } tracing-subscriber = { version = "0.3", features = ["env-filter"] } serde = "1.0" blake2 = "0.10" @@ -16,4 +16,4 @@ common = { path = "../common" } tempfile = "3" clap = { version = "4", features = ["derive"] } rand = "0.8.5" -cl = { path = "../../cl" } +cl = { path = "../../cl/cl" } diff --git a/goas/host/src/main.rs b/goas/host/src/main.rs index ef54db8d..a1e9cc47 100644 --- a/goas/host/src/main.rs +++ b/goas/host/src/main.rs @@ -10,7 +10,8 @@ use clap::Parser; #[command(version, about, long_about = None)] enum Action { Stf, - Nullifier, + // Nullifier, + Transfer, } fn stf_prove_stark() { @@ -101,64 +102,66 @@ fn stf_prove_stark() { receipt.verify(methods::METHOD_ID).unwrap(); } -fn nf_prove_stark() { - let mut rng = rand::thread_rng(); - - let nf_sk = cl::NullifierSecret::random(&mut rng); - - let output = cl::OutputWitness { - note: cl::NoteWitness { - balance: cl::BalanceWitness::random(10, "NMO", &mut rng), - death_constraint: vec![], - state: [0u8; 32], - }, - nf_pk: nf_sk.commit(), - nonce: cl::NullifierNonce::random(&mut rng), - }; - let output_cm = output.commit_note().as_bytes().to_vec(); - let cm_set = cl::merkle::padded_leaves::<64>(&[output_cm]); - let cm_root = cl::merkle::root(cm_set); - let cm_path = cl::merkle::path(cm_set, 0); - let nf = cl::Nullifier::new(nf_sk, output.nonce); - - let env = ExecutorEnv::builder() - .write(&cm_root) - .unwrap() - .write(&nf) - .unwrap() - .write(&nf_sk) - .unwrap() - .write(&output) - .unwrap() - .write(&cm_path) - .unwrap() - .build() - .unwrap(); - - // Obtain the default prover. - let prover = default_prover(); - - use std::time::Instant; - let start_t = Instant::now(); - - // Proof information by proving the specified ELF binary. - // This struct contains the receipt along with statistics about execution of the guest - let opts = risc0_zkvm::ProverOpts::succinct(); - let prove_info = prover - .prove_with_opts(env, methods::NULLIFIER_ELF, &opts) - .unwrap(); - - println!("STARK prover time: {:.2?}", start_t.elapsed()); - // extract the receipt. - let receipt = prove_info.receipt; - - // TODO: Implement code for retrieving receipt journal here. - - std::fs::write("proof.stark", bincode::serialize(&receipt).unwrap()).unwrap(); - // The receipt was verified at the end of proving, but the below code is an - // example of how someone else could verify this receipt. - receipt.verify(methods::NULLIFIER_ID).unwrap(); -} +// fn nf_prove_stark() { +// let mut rng = rand::thread_rng(); + +// let nf_sk = cl::NullifierSecret::random(&mut rng); + +// let output = cl::OutputWitness { +// note: cl::NoteWitness { +// balance: cl::BalanceWitness::random(10, "NMO", &mut rng), +// death_constraint: vec![], +// state: [0u8; 32], +// }, +// nf_pk: nf_sk.commit(), +// nonce: cl::NullifierNonce::random(&mut rng), +// }; +// let output_cm = output.commit_note().as_bytes().to_vec(); +// let cm_set = cl::merkle::padded_leaves::<64>(&[output_cm]); +// let cm_root = cl::merkle::root(cm_set); +// let cm_path = cl::merkle::path(cm_set, 0); +// let nf = cl::Nullifier::new(nf_sk, output.nonce); + +// let env = ExecutorEnv::builder() +// .write(&cm_root) +// .unwrap() +// .write(&nf) +// .unwrap() +// .write(&nf_sk) +// .unwrap() +// .write(&output) +// .unwrap() +// .write(&cm_path) +// .unwrap() +// .build() +// .unwrap(); + +// // Obtain the default prover. +// let prover = default_prover(); + +// use std::time::Instant; +// let start_t = Instant::now(); + +// // Proof information by proving the specified ELF binary. +// // This struct contains the receipt along with statistics about execution of the guest +// let opts = risc0_zkvm::ProverOpts::succinct(); +// let prove_info = prover +// .prove_with_opts(env, methods::NULLIFIER_ELF, &opts) +// .unwrap(); + +// println!("STARK prover time: {:.2?}", start_t.elapsed()); +// // extract the receipt. +// let receipt = prove_info.receipt; + +// // TODO: Implement code for retrieving receipt journal here. + +// std::fs::write("proof.stark", bincode::serialize(&receipt).unwrap()).unwrap(); +// // The receipt was verified at the end of proving, but the below code is an +// // example of how someone else could verify this receipt. +// receipt.verify(methods::NULLIFIER_ID).unwrap(); +// } + +fn transfer_prove_stark() {} fn main() { // Initialize tracing. In order to view logs, run `RUST_LOG=info cargo run` @@ -170,7 +173,8 @@ fn main() { match action { Action::Stf => stf_prove_stark(), - Action::Nullifier => nf_prove_stark(), + // Action::Nullifier => nf_prove_stark(), + Action::Transfer => transfer_prove_stark(), } } diff --git a/goas/methods/Cargo.toml b/goas/methods/Cargo.toml index 71609506..0a943cf6 100644 --- a/goas/methods/Cargo.toml +++ b/goas/methods/Cargo.toml @@ -7,4 +7,4 @@ edition = "2021" risc0-build = { version = "1.0" } [package.metadata.risc0] -methods = ["guest", "nullifier"] +methods = ["guest"] diff --git a/goas/methods/guest/Cargo.toml b/goas/methods/guest/Cargo.toml index 24458779..c8f97849 100644 --- a/goas/methods/guest/Cargo.toml +++ b/goas/methods/guest/Cargo.toml @@ -13,7 +13,7 @@ blake2 = "0.10" serde = { version = "1.0", features = ["derive"] } bincode = "1" common = { path = "../../common" } -cl = { path = "../../../cl" } +cl = { path = "../../../cl/cl" } [patch.crates-io] # Placing these patch statement in the workspace Cargo.toml will add RISC Zero SHA-256 and bigint diff --git a/goas/methods/guest/src/main.rs b/goas/methods/guest/src/main.rs index 21e9da98..1465c866 100644 --- a/goas/methods/guest/src/main.rs +++ b/goas/methods/guest/src/main.rs @@ -1,9 +1,9 @@ use blake2::{Blake2s256, Digest}; -use risc0_zkvm::guest::env; -use common::*; -use cl::merkle; use cl::input::InputWitness; +use cl::merkle; use cl::output::OutputWitness; +use common::*; +use risc0_zkvm::guest::env; /// Public Inputs: /// * ptx_root: the root of the partial tx merkle tree of inputs/outputs @@ -31,8 +31,11 @@ fn execute( // a transfer is included as part of the same transaction in the cl let in_comm = in_note.commit().to_bytes(); eprintln!("input comm: {}", env::cycle_count()); - - assert!(merkle::verify_path(merkle::leaf(&in_comm), &in_ptx_path, input_root)); + + assert_eq!( + merkle::path_root(merkle::leaf(&in_comm), &in_ptx_path), + input_root + ); eprintln!("input merkle path: {}", env::cycle_count()); // check the commitments match the actual data @@ -61,8 +64,11 @@ fn execute( // (this is done in the death condition to disallow burning) let out_comm = out_note.commit().to_bytes(); eprintln!("output comm: {}", env::cycle_count()); - - assert!(merkle::verify_path(merkle::leaf(&out_comm), &out_ptx_path, output_root)); + + assert_eq!( + merkle::path_root(merkle::leaf(&out_comm), &out_ptx_path), + output_root + ); eprintln!("out merkle proof: {}", env::cycle_count()); } @@ -81,8 +87,19 @@ fn main() { let state: State = env::read(); let journal: Journal = env::read(); - eprintln!("parse input: {}", env::cycle_count()); - execute(ptx_root, input_root, output_root, in_ptx_path, out_ptx_path, in_note, out_note, input, state, journal); + eprintln!("parse input: {}", env::cycle_count()); + execute( + ptx_root, + input_root, + output_root, + in_ptx_path, + out_ptx_path, + in_note, + out_note, + input, + state, + journal, + ); } fn calculate_state_hash(state: &State) -> [u8; 32] { diff --git a/goas/methods/nullifier/src/main.rs b/goas/methods/nullifier/src/main.rs deleted file mode 100644 index 4ff79da4..00000000 --- a/goas/methods/nullifier/src/main.rs +++ /dev/null @@ -1,57 +0,0 @@ -/// Nullifier Proof -/// -/// Our goal: prove the nullifier nf was derived from a note that had previously been committed to. -/// -/// More formally, nullifier statement says: -/// for public input `nf` (nullifier) and `root_cm` (root of merkle tree over commitment set). -/// the prover has knowledge of `output = (note, nf_pk, nonce)`, `nf` and `path` s.t. that the following constraints hold -/// 0. nf_pk = hash(nf_sk) -/// 1. nf = hash(nonce||nf_sk) -/// 2. note_cm = output_commitment(output) -/// 3. verify_merkle_path(note_cm, root, path) -use cl::merkle; -use cl::nullifier::{Nullifier, NullifierSecret}; -use cl::output::OutputWitness; -use risc0_zkvm::guest::env; - -fn execute( - // public - cm_root: [u8; 32], - nf: Nullifier, - // private - nf_sk: NullifierSecret, - output: OutputWitness, - cm_path: Vec, -) { - eprintln!("start exec: {}", env::cycle_count()); - - assert_eq!(output.nf_pk, nf_sk.commit()); - eprintln!("output nullifier: {}", env::cycle_count()); - - assert_eq!(nf, Nullifier::new(nf_sk, output.nonce)); - eprintln!("nullifier: {}", env::cycle_count()); - - let cm_out = output.commit_note(); - eprintln!("out_cm: {}", env::cycle_count()); - - assert!(merkle::verify_path( - merkle::leaf(cm_out.as_bytes()), - &cm_path, - cm_root - )); - eprintln!("nullifier merkle path: {}", env::cycle_count()); -} - -fn main() { - // public input - let cm_root: [u8; 32] = env::read(); - let nf: Nullifier = env::read(); - - // private input - let nf_sk: NullifierSecret = env::read(); - let output: OutputWitness = env::read(); - let cm_path: Vec = env::read(); - - eprintln!("parse input: {}", env::cycle_count()); - execute(cm_root, nf, nf_sk, output, cm_path); -}