diff --git a/bellman/src/lib.rs b/bellman/src/lib.rs index d3ce4c1e5..f86630272 100644 --- a/bellman/src/lib.rs +++ b/bellman/src/lib.rs @@ -26,22 +26,22 @@ mod multiexp; #[cfg(test)] mod tests; -cfg_if! { - if #[cfg(feature = "multicore")] { - #[cfg(feature = "wasm")] - compile_error!("Multicore feature is not yet compatible with wasm target arch"); - - pub mod multicore; - mod worker { - pub use crate::multicore::*; - } - } else { +// cfg_if! { + // if #[cfg(feature = "multicore")] { + // #[cfg(feature = "wasm")] + // compile_error!("Multicore feature is not yet compatible with wasm target arch"); + + // pub mod multicore; + // mod worker { + // pub use crate::multicore::*; + // } + // } else { pub mod singlecore; mod worker { pub use crate::singlecore::*; } - } -} + // } +// } mod cs; pub use self::cs::*; diff --git a/phase2/.gitignore b/phase2/.gitignore index 9fdddcfa0..66f6b07f4 100644 --- a/phase2/.gitignore +++ b/phase2/.gitignore @@ -6,4 +6,5 @@ phase1radix2m* /*.json /*.bin /*.params -/verifier.sol \ No newline at end of file +/verifier.sol +data \ No newline at end of file diff --git a/phase2/package.json b/phase2/package.json index f1c5be4cd..f4f49553d 100644 --- a/phase2/package.json +++ b/phase2/package.json @@ -10,6 +10,7 @@ "license": "ISC", "dependencies": { "circom": "0.0.35", - "snarkjs": "git+https://github.com/kobigurk/snarkjs.git" + "snarkjs": "git+https://github.com/kobigurk/snarkjs.git", + "circomlib": "^2.0.5" } } diff --git a/phase2/src/bin/contribute.rs b/phase2/src/bin/contribute.rs index a15fc8d68..c5f3e678d 100644 --- a/phase2/src/bin/contribute.rs +++ b/phase2/src/bin/contribute.rs @@ -16,7 +16,8 @@ use phase2::parameters::MPCParameters; fn main() { let args: Vec = std::env::args().collect(); - if args.len() != 4 && args.len() != 6 { + + if args.len() != 3 && args.len() != 5 { println!("Usage: \n "); std::process::exit(exitcode::USAGE); } @@ -26,7 +27,9 @@ fn main() { } let in_params_filename = &args[1]; let out_params_filename = &args[2]; - let entropy = &args[3]; + + let entropy = if args.len() > 3 { args[3].clone() } else { "".to_string() }; + let print_progress = args.len() == 6 && args[4] == "-v"; let disallow_points_at_infinity = false; @@ -52,7 +55,10 @@ fn main() { } // Hash it all up to make a seed - h.input(&entropy.as_bytes()); + if entropy.len() > 0 { + h.input(&entropy.as_bytes()); + } + h.result() }; diff --git a/phase2/src/parameters.rs b/phase2/src/parameters.rs index 396358d9c..31817d558 100644 --- a/phase2/src/parameters.rs +++ b/phase2/src/parameters.rs @@ -4,10 +4,10 @@ extern crate byteorder; extern crate num_cpus; extern crate crossbeam; -#[cfg(feature = "wasm")] +// #[cfg(feature = "wasm")] use bellman_ce::singlecore::Worker; -#[cfg(not(feature = "wasm"))] -use bellman_ce::multicore::Worker; +// #[cfg(not(feature = "wasm"))] +// use bellman_ce::multicore::Worker; use byteorder::{ BigEndian, @@ -103,6 +103,7 @@ impl MPCParameters { ) -> Result where C: Circuit { + println!("MPCParameters::new()"); let mut assembly = KeypairAssembly { num_inputs: 0, num_aux: 0, @@ -144,6 +145,8 @@ impl MPCParameters { } } + println!("MPCParameters::try to load phase1radix2m"); + // Try to load "radix_directory/phase1radix2m{}" let f = match File::open(format!("{}/phase1radix2m{}", radix_directory, exp)) { Ok(f) => f, @@ -153,6 +156,9 @@ impl MPCParameters { }; let f = &mut BufReader::with_capacity(1024 * 1024, f); + + println!("MPCParameters::read_g1"); + let read_g1 = |reader: &mut BufReader| -> io::Result { let mut repr = G1Uncompressed::empty(); reader.read_exact(repr.as_mut())?; @@ -166,6 +172,8 @@ impl MPCParameters { }) }; + println!("MPCParameters::read_g2"); + let read_g2 = |reader: &mut BufReader| -> io::Result { let mut repr = G2Uncompressed::empty(); reader.read_exact(repr.as_mut())?; @@ -211,6 +219,8 @@ impl MPCParameters { let alpha_coeffs_g1 = Arc::new(alpha_coeffs_g1); let beta_coeffs_g1 = Arc::new(beta_coeffs_g1); + println!("MPCParameters::h"); + let mut h = Vec::with_capacity(m-1); for _ in 0..m-1 { h.push(read_g1(f)?); @@ -222,6 +232,12 @@ impl MPCParameters { let mut b_g1 = vec![G1::zero(); assembly.num_inputs + assembly.num_aux]; let mut b_g2 = vec![G2::zero(); assembly.num_inputs + assembly.num_aux]; + println!("MPCParameters::eval1"); + + println!("MPCParameters::worker (start)"); + let worker = Worker::new(); + println!("MPCParameters::worker (end)"); + fn eval( // Lagrange coefficients for tau coeffs_g1: Arc>, @@ -244,6 +260,7 @@ impl MPCParameters { worker: &Worker ) { + println!("MPCParameters::sanitycheck"); // Sanity check assert_eq!(a_g1.len(), at.len()); assert_eq!(a_g1.len(), bt.len()); @@ -252,8 +269,10 @@ impl MPCParameters { assert_eq!(a_g1.len(), b_g2.len()); assert_eq!(a_g1.len(), ext.len()); + println!("MPCParameters::worker.scope (enter)"); // Evaluate polynomials in multiple threads worker.scope(a_g1.len(), |scope, chunk| { + println!("MPCParameters::worker.scope (inside)"); for ((((((a_g1, b_g1), b_g2), ext), at), bt), ct) in a_g1.chunks_mut(chunk) .zip(b_g1.chunks_mut(chunk)) @@ -269,6 +288,7 @@ impl MPCParameters { let beta_coeffs_g1 = beta_coeffs_g1.clone(); scope.spawn(move |_| { + println!("MPCParameters::scope.spawn"); for ((((((a_g1, b_g1), b_g2), ext), at), bt), ct) in a_g1.iter_mut() .zip(b_g1.iter_mut()) @@ -304,8 +324,8 @@ impl MPCParameters { }); } - let worker = Worker::new(); + println!("MPCParameters::eval2"); // Evaluate for inputs. eval( coeffs_g1.clone(), @@ -322,6 +342,7 @@ impl MPCParameters { &worker ); + println!("MPCParameters::eval3"); // Evaluate for auxillary variables. eval( coeffs_g1.clone(), @@ -338,6 +359,8 @@ impl MPCParameters { &worker ); + println!("MPCParameters::for"); + // Don't allow any elements be unconstrained, so that // the L query is always fully dense. for e in l.iter() { @@ -417,9 +440,11 @@ impl MPCParameters { progress_update_interval: &u32 ) -> [u8; 64] { + println!("MPCParameters::contribute()"); // Generate a keypair let (pubkey, privkey) = keypair(rng, self); + println!("MPCParameters::batch_exp1()"); #[cfg(not(feature = "wasm"))] fn batch_exp(bases: &mut [C], coeff: C::Scalar, progress_update_interval: &u32, total_exps: &u32) { let coeff = coeff.into_repr(); @@ -469,6 +494,7 @@ impl MPCParameters { } } + println!("MPCParameters::batch_exp2()"); #[cfg(feature = "wasm")] fn batch_exp(bases: &mut [C], coeff: C::Scalar, progress_update_interval: &u32, total_exps: &u32) { let coeff = coeff.into_repr(); @@ -495,6 +521,7 @@ impl MPCParameters { } } + println!("MPCParameters::delta_inv"); let delta_inv = privkey.delta.inverse().expect("nonzero"); let mut l = (&self.params.l[..]).to_vec(); let mut h = (&self.params.h[..]).to_vec(); diff --git a/phase2/test.sh b/phase2/test.sh index 141a77404..899ddaa7d 100755 --- a/phase2/test.sh +++ b/phase2/test.sh @@ -13,21 +13,75 @@ cp ../powersoftau/phase1radix* . npm install # compile circuit + +circom ./circuits/circuit_transaction_10x2.circom -o ./circuits/circuit_transaction_10x2.json + npx circom circuit.circom -o circuit.json && npx snarkjs info -c circuit.json # npx snarkjs info -c circuit.json + + # initialize ceremony -cargo run --release --bin new circuit.json circom1.params ./ +# cargo run --release --bin new circuit.json circom1.params ./ +cargo run --release --bin new circuit-by-circom-2.json circom1.params ./ +cargo run --release --bin new transaction_2x2.json transaction_2x2_circom1.params ./ +cargo run --release --bin new transaction_3x2.json transaction_3x2_circom1.params ./ +cargo run --release --bin new transaction_4x2.json transaction_4x2_circom1.params ./ +cargo run --release --bin new transaction_5x2.json transaction_5x2_circom1.params ./ +cargo run --release --bin new transaction_6x2.json transaction_6x2_circom1.params ./ +cargo run --release --bin new transaction_7x2.json transaction_7x2_circom1.params ./ +cargo run --release --bin new transaction_8x2.json transaction_8x2_circom1.params ./ +# cargo run --release --bin new circuit_constraints.json circom1.params ./ + +cargo run --release --bin contribute circom1.params circom2.params +cargo run --release --bin contribute transaction_8x2_circom2.params transaction_8x2_circom3.params +cargo run --release --bin contribute transaction_8x2_circom3.params transaction_8x2_circom4.params +cargo run --release --bin contribute transaction_8x2_circom4.params transaction_8x2_circom5.params +cargo run --release --bin contribute transaction_8x2_circom5.params transaction_8x2_circom6.params +cargo run --release --bin contribute transaction_8x2_circom6.params transaction_8x2_circom7.params +cargo run --release --bin contribute transaction_8x2_circom7.params transaction_8x2_circom8.params +cargo run --release --bin contribute transaction_8x2_circom8.params transaction_8x2_circom9.params +cargo run --release --bin contribute transaction_8x2_circom9.params transaction_8x2_circom10.params -cargo run --release --bin contribute circom1.params circom2.params asdajdzixcjlzxjczxlkcjzxlkcj -cargo run --release --bin verify_contribution circuit.json circom1.params circom2.params ./ +# cargo run --release --bin verify_contribution circuit.json circom1.params circom2.params ./ +cargo run --release --bin verify_contribution transaction_1x2.json transaction_2x2_circom1.params transaction_2x2_circom2.params ./ -cargo run --release --bin contribute circom2.params circom3.params dsfjkshdfakjhsdf -cargo run --release --bin verify_contribution circuit.json circom2.params circom3.params ./ +cargo run --release --bin contribute circom2.params circom3.params +# cargo run --release --bin verify_contribution circuit.json circom2.params circom3.params ./ +cargo run --release --bin verify_contribution transaction_1x2.json circom2.params circom3.params ./ -cargo run --release --bin contribute circom3.params circom4.params askldfjklasdf +cargo run --release --bin contribute circom3.params circom4.params cargo run --release --bin verify_contribution circuit.json circom3.params circom4.params ./ + +cp ../powersoftau/phase1radix* . +cargo run --release --bin new circuit.json circom1.params ./ +cargo run --release --bin contribute circom1.params circom2.params +cargo run --release --bin contribute circom2.params circom3.params +cargo run --release --bin contribute circom3.params circom4.params +cargo run --release --bin contribute circom4.params circom5.params +cargo run --release --bin contribute circom5.params circom6.params +cargo run --release --bin contribute circom6.params circom7.params +cargo run --release --bin contribute circom7.params circom8.params +cargo run --release --bin contribute circom8.params circom9.params +cargo run --release --bin contribute circom9.params circom10.params +cargo run --release --bin contribute circom10.params circom11.params +cargo run --release --bin contribute circom11.params circom12.params +cargo run --release --bin contribute circom12.params circom13.params +cargo run --release --bin contribute circom13.params circom14.params +cargo run --release --bin contribute circom14.params circom15.params +cargo run --release --bin contribute circom15.params circom16.params +cargo run --release --bin contribute circom16.params circom17.params +cargo run --release --bin contribute circom17.params circom18.params +cargo run --release --bin contribute circom18.params circom19.params +cargo run --release --bin contribute circom19.params circom20.params + + + +npx snarkjs groth16 setup circuit.r1cs circom4.params circuit_0000.zkey +cargo run --release --bin copy_json transaction_0001_2x2.zkey pk.json transformed_pk.json + + # create dummy keys in circom format echo "Generating dummy key files..." npx snarkjs setup --protocol groth @@ -41,5 +95,9 @@ cargo run --release --bin generate_verifier circom4.params verifier.sol # try to generate and verify proof npx snarkjs calculatewitness -cargo run --release --bin prove circuit.json witness.json circom4.params proof.json public.json +cargo run --release --bin prove circuit-by-circom-2.json witness.json circom4.params proof.json public.json npx snarkjs verify --vk vk.json --proof proof.json + + +snarkjs wc circuit.wasm input.json witness.wtns +snarkjs wej witness.wtns witness.json \ No newline at end of file diff --git a/powersoftau/src/bin/beacon_constrained.rs b/powersoftau/src/bin/beacon_constrained.rs index e42b50ab4..b87dd3381 100644 --- a/powersoftau/src/bin/beacon_constrained.rs +++ b/powersoftau/src/bin/beacon_constrained.rs @@ -9,6 +9,9 @@ use powersoftau::{ use bellman_ce::pairing::bn256::Bn256; use memmap::MmapOptions; use std::fs::OpenOptions; +use memmap::MmapMut; +use std::io::BufWriter; +use std::fs::File; use std::io::Write; extern crate hex_literal; @@ -129,27 +132,16 @@ fn main() { }; // Create response file in this directory - let writer = OpenOptions::new() - .read(true) - .write(true) - .create_new(true) - .open(response_filename) - .expect("unable to create response file in this directory"); + let mut response_writer = BufWriter::new( + File::create(response_filename).expect("unable to create response file") + ); let required_output_length = match COMPRESS_THE_OUTPUT { UseCompression::Yes => parameters.contribution_size, UseCompression::No => parameters.accumulator_size + parameters.public_key_size, }; - writer - .set_len(required_output_length as u64) - .expect("must make output file large enough"); - - let mut writable_map = unsafe { - MmapOptions::new() - .map_mut(&writer) - .expect("unable to create a memory map for output") - }; + let mut writable_map = vec![0; required_output_length]; println!("Calculating previous contribution hash..."); @@ -183,29 +175,38 @@ fn main() { // Perform the transformation println!("Computing and writing your contribution, this could take a while..."); + let mmap_mut_ptr: *mut MmapMut = &mut writable_map as *mut Vec as *mut MmapMut; + // this computes a transformation and writes it - BatchedAccumulator::transform( - &readable_map, - &mut writable_map, - INPUT_IS_COMPRESSED, - COMPRESS_THE_OUTPUT, - CHECK_INPUT_CORRECTNESS, - &privkey, - ¶meters, - ) - .expect("must transform with the key"); + unsafe { + BatchedAccumulator::transform( + &readable_map, + &mut *mmap_mut_ptr, + INPUT_IS_COMPRESSED, + COMPRESS_THE_OUTPUT, + CHECK_INPUT_CORRECTNESS, + &privkey, + ¶meters, + ) + .expect("must transform with the key"); + } + println!("Finishing writing your contribution to response file..."); // Write the public key - pubkey - .write(&mut writable_map, COMPRESS_THE_OUTPUT, ¶meters) - .expect("unable to write public key"); + unsafe { + pubkey + .write(&mut *mmap_mut_ptr, COMPRESS_THE_OUTPUT, ¶meters) + .expect("unable to write public key"); + } - // Get the hash of the contribution, so the user can compare later - let output_readonly = writable_map - .make_read_only() - .expect("must make a map readonly"); - let contribution_hash = calculate_hash(&output_readonly); + // Write the processed data to the response file + response_writer + .write_all(&writable_map) + .expect("unable to write to response file"); + response_writer + .flush() + .expect("unable to flush response file"); print!( "Done!\n\n\ @@ -213,16 +214,5 @@ fn main() { The BLAKE2b hash of response file is:\n" ); - for line in contribution_hash.as_slice().chunks(16) { - print!("\t"); - for section in line.chunks(4) { - for b in section { - print!("{:02x}", b); - } - print!(" "); - } - println!(); - } - println!("Thank you for your participation, much appreciated! :)"); } diff --git a/powersoftau/src/bin/compute_constrained.rs b/powersoftau/src/bin/compute_constrained.rs index 379b8c9cb..fbbb16b3a 100644 --- a/powersoftau/src/bin/compute_constrained.rs +++ b/powersoftau/src/bin/compute_constrained.rs @@ -6,10 +6,10 @@ use powersoftau::{ }; use bellman_ce::pairing::bn256::Bn256; -use memmap::*; -use std::fs::OpenOptions; - -use std::io::{Read, Write}; +use std::fs::{File, OpenOptions}; +use std::io::{BufWriter, Read, Write}; +use memmap::MmapOptions; +use memmap::MmapMut; const INPUT_IS_COMPRESSED: UseCompression = UseCompression::No; const COMPRESS_THE_OUTPUT: UseCompression = UseCompression::Yes; @@ -54,15 +54,6 @@ fn main() { h.input(&[r]); } - // Ask the user to provide some information for additional entropy - let mut user_input = String::new(); - println!("Type some random text and press [ENTER] to provide additional entropy..."); - std::io::stdin() - .read_line(&mut user_input) - .expect("expected to read some random text from the user"); - - // Hash it all up to make a seed - h.input(&user_input.as_bytes()); h.result() }; @@ -109,27 +100,16 @@ fn main() { }; // Create response file in this directory - let writer = OpenOptions::new() - .read(true) - .write(true) - .create_new(true) - .open(response_filename) - .expect("unable to create response file"); + let mut response_writer = BufWriter::new( + File::create(response_filename).expect("unable to create response file") + ); let required_output_length = match COMPRESS_THE_OUTPUT { UseCompression::Yes => parameters.contribution_size, UseCompression::No => parameters.accumulator_size + parameters.public_key_size, }; - writer - .set_len(required_output_length as u64) - .expect("must make output file large enough"); - - let mut writable_map = unsafe { - MmapOptions::new() - .map_mut(&writer) - .expect("unable to create a memory map for output") - }; + let mut writable_map = vec![0; required_output_length]; println!("Calculating previous contribution hash..."); @@ -189,49 +169,43 @@ fn main() { // Perform the transformation println!("Computing and writing your contribution, this could take a while..."); + let mmap_mut_ptr: *mut MmapMut = &mut writable_map as *mut Vec as *mut MmapMut; + // this computes a transformation and writes it - BatchedAccumulator::transform( - &readable_map, - &mut writable_map, - INPUT_IS_COMPRESSED, - COMPRESS_THE_OUTPUT, - CHECK_INPUT_CORRECTNESS, - &privkey, - ¶meters, - ) - .expect("must transform with the key"); + unsafe { + BatchedAccumulator::transform( + &readable_map, + &mut *mmap_mut_ptr, + INPUT_IS_COMPRESSED, + COMPRESS_THE_OUTPUT, + CHECK_INPUT_CORRECTNESS, + &privkey, + ¶meters, + ) + .expect("must transform with the key"); + } println!("Finishing writing your contribution to response file..."); // Write the public key - pubkey - .write(&mut writable_map, COMPRESS_THE_OUTPUT, ¶meters) - .expect("unable to write public key"); - - writable_map.flush().expect("must flush a memory map"); + unsafe { + pubkey + .write(&mut *mmap_mut_ptr, COMPRESS_THE_OUTPUT, ¶meters) + .expect("unable to write public key"); + } - // Get the hash of the contribution, so the user can compare later - let output_readonly = writable_map - .make_read_only() - .expect("must make a map readonly"); - let contribution_hash = calculate_hash(&output_readonly); + // Write the processed data to the response file + response_writer + .write_all(&writable_map) + .expect("unable to write to response file"); + response_writer + .flush() + .expect("unable to flush response file"); print!( "Done!\n\n\ - Your contribution has been written to response file\n\n\ - The BLAKE2b hash of response file is:\n" + Your contribution has been written to response file\n" ); - for line in contribution_hash.as_slice().chunks(16) { - print!("\t"); - for section in line.chunks(4) { - for b in section { - print!("{:02x}", b); - } - print!(" "); - } - println!(); - } - println!("Thank you for your participation, much appreciated! :)"); } diff --git a/powersoftau/src/bin/new_constrained.rs b/powersoftau/src/bin/new_constrained.rs index 3953e0d43..0038cf95a 100644 --- a/powersoftau/src/bin/new_constrained.rs +++ b/powersoftau/src/bin/new_constrained.rs @@ -1,13 +1,11 @@ use powersoftau::batched_accumulator::BatchedAccumulator; -use powersoftau::parameters::UseCompression; -use powersoftau::utils::{blank_hash, calculate_hash}; +use powersoftau::parameters::{CeremonyParams, UseCompression}; +use powersoftau::utils::{blank_hash}; use bellman_ce::pairing::bn256::Bn256; -use memmap::*; use std::fs::OpenOptions; use std::io::Write; - -use powersoftau::parameters::CeremonyParams; +use memmap::MmapMut; const COMPRESS_NEW_CHALLENGE: UseCompression = UseCompression::No; @@ -32,7 +30,7 @@ fn main() { parameters.powers_g1_length ); - let file = OpenOptions::new() + let mut file = OpenOptions::new() .read(true) .write(true) .create_new(true) @@ -47,20 +45,11 @@ fn main() { file.set_len(expected_challenge_length as u64) .expect("unable to allocate large enough file"); - let mut writable_map = unsafe { - MmapOptions::new() - .map_mut(&file) - .expect("unable to create a memory map") - }; + let mut writable_vec = vec![0; expected_challenge_length]; // Write a blank BLAKE2b hash: let hash = blank_hash(); - (&mut writable_map[0..]) - .write_all(hash.as_slice()) - .expect("unable to write a default hash to mmap"); - writable_map - .flush() - .expect("unable to write blank hash to challenge file"); + writable_vec[0..hash.len()].copy_from_slice(hash.as_slice()); println!("Blank hash for an empty challenge:"); for line in hash.as_slice().chunks(16) { @@ -74,30 +63,15 @@ fn main() { println!(); } - BatchedAccumulator::generate_initial(&mut writable_map, COMPRESS_NEW_CHALLENGE, ¶meters) - .expect("generation of initial accumulator is successful"); - writable_map - .flush() - .expect("unable to flush memmap to disk"); - - // Get the hash of the contribution, so the user can compare later - let output_readonly = writable_map - .make_read_only() - .expect("must make a map readonly"); - let contribution_hash = calculate_hash(&output_readonly); - - println!("Empty contribution is formed with a hash:"); + let mmap_mut_ptr: *mut MmapMut = &mut writable_vec as *mut Vec as *mut MmapMut; - for line in contribution_hash.as_slice().chunks(16) { - print!("\t"); - for section in line.chunks(4) { - for b in section { - print!("{:02x}", b); - } - print!(" "); - } - println!(); + unsafe { + BatchedAccumulator::generate_initial(&mut *mmap_mut_ptr, COMPRESS_NEW_CHALLENGE, ¶meters) + .expect("generation of initial accumulator is successful"); } + file.write_all(&writable_vec) + .expect("unable to write to challenge file"); + println!("Wrote a fresh accumulator to challenge file"); } diff --git a/powersoftau/src/bin/verify_transform_constrained.rs b/powersoftau/src/bin/verify_transform_constrained.rs index ce5578a1b..87f605863 100644 --- a/powersoftau/src/bin/verify_transform_constrained.rs +++ b/powersoftau/src/bin/verify_transform_constrained.rs @@ -8,8 +8,9 @@ use powersoftau::{ use bellman_ce::pairing::bn256::Bn256; use memmap::*; use std::fs::OpenOptions; - use std::io::{Read, Write}; +use std::io::BufWriter; +use std::fs::File; const PREVIOUS_CHALLENGE_IS_COMPRESSED: UseCompression = UseCompression::No; const CONTRIBUTION_IS_COMPRESSED: UseCompression = UseCompression::Yes; @@ -95,7 +96,6 @@ fn main() { println!("Calculating previous challenge hash..."); // Check that contribution is correct - let current_accumulator_hash = calculate_hash(&challenge_readable_map); println!("Hash of the `challenge` file for verification:"); @@ -160,7 +160,6 @@ fn main() { .expect("wasn't able to deserialize the response file's public key"); // check that it follows the protocol - println!( "Verifying a contribution to contain proper powers and correspond to the public key..." ); @@ -192,63 +191,33 @@ fn main() { println!("Verification succeeded! Writing to new challenge file..."); // Create new challenge file in this directory - let writer = OpenOptions::new() - .read(true) - .write(true) - .create_new(true) - .open(new_challenge_filename) - .expect("unable to create new challenge file in this directory"); + let mut file = BufWriter::new(File::create(new_challenge_filename).expect("unable to create challenge file")); // Recomputation strips the public key and uses hashing to link with the previous contribution after decompression - writer - .set_len(parameters.accumulator_size as u64) - .expect("must make output file large enough"); - - let mut writable_map = unsafe { - MmapOptions::new() - .map_mut(&writer) - .expect("unable to create a memory map for output") - }; - - { - (&mut writable_map[0..]) - .write_all(response_hash.as_slice()) - .expect("unable to write a default hash to mmap"); - - writable_map - .flush() - .expect("unable to write hash to new challenge file"); - } - - BatchedAccumulator::decompress( - &response_readable_map, - &mut writable_map, - CheckForCorrectness::No, - ¶meters, - ) - .expect("must decompress a response for a new challenge"); - - writable_map.flush().expect("must flush the memory map"); - - let new_challenge_readable_map = writable_map - .make_read_only() - .expect("must make a map readonly"); - - let recompressed_hash = calculate_hash(&new_challenge_readable_map); - - println!("Here's the BLAKE2b hash of the decompressed participant's response as new_challenge file:"); - - for line in recompressed_hash.as_slice().chunks(16) { - print!("\t"); - for section in line.chunks(4) { - for b in section { - print!("{:02x}", b); - } - print!(" "); - } - println!(); + let mut writable_map = vec![0; parameters.accumulator_size]; + + (&mut writable_map[0..]) + .write_all(response_hash.as_slice()) + .expect("unable to write a default hash to mmap"); + + writable_map + .flush() + .expect("unable to write hash to new challenge file"); + + let temp_map: *mut MmapMut = &mut writable_map as *mut _ as *mut MmapMut; + unsafe { + BatchedAccumulator::decompress( + &response_readable_map, + &mut *temp_map, + CheckForCorrectness::No, + ¶meters, + ) + .expect("must decompress a response for a new challenge"); } + file.write_all(&writable_map).expect("unable to write to response file"); + file.flush().expect("unable to write hash to new challenge file"); + println!("Done! new challenge file contains the new challenge file. The other files"); println!("were left alone."); }