From 4b70df7a830360e710929e1b3964cae5664266b7 Mon Sep 17 00:00:00 2001 From: ControlCplusControlV <44706811+ControlCplusControlV@users.noreply.github.com> Date: Mon, 9 Jan 2023 06:40:43 -0700 Subject: [PATCH] Error Refactor Done (#68) * Controlc/fixed everything (#1) * Code cleanup (#57) Co-authored-by: ControlC ControlV * More Code Cleanup (#59) * Add in Verkle Proof Benchmark (#60) * fixed some unsafe unwraps * fixed some unsafe unwraps * some progress? * fixes * nits * mostly cleanup * clippy * yml fix * wtf is yml * sorry * verkle-trie refactors * Error Refactoring Done????: * Clippy working now Co-authored-by: ControlC ControlV * fix * addressed comments Co-authored-by: ControlC ControlV --- .gitignore | 1 + verkle-spec/src/code.rs | 2 +- verkle-spec/src/util.rs | 5 +- verkle-trie/benches/benchmarks/insert_10k.rs | 1 + verkle-trie/benches/benchmarks/proof_10k.rs | 7 +-- verkle-trie/benches/benchmarks/util.rs | 1 + verkle-trie/src/committer.rs | 5 +- verkle-trie/src/committer/precompute.rs | 4 +- verkle-trie/src/committer/test.rs | 2 +- verkle-trie/src/config.rs | 21 ++++----- verkle-trie/src/errors.rs | 48 ++++++++++++-------- verkle-trie/src/lib.rs | 10 ++-- verkle-trie/src/proof.rs | 39 ++++++++-------- verkle-trie/src/proof/prover.rs | 22 +++++---- verkle-trie/src/proof/stateless_updater.rs | 28 ++++++------ verkle-trie/src/trie.rs | 6 +-- 16 files changed, 109 insertions(+), 93 deletions(-) diff --git a/.gitignore b/.gitignore index c2ffe5b..7d8a336 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ Cargo.lock .DS_Store notes.md +precomputed_points.bin \ No newline at end of file diff --git a/verkle-spec/src/code.rs b/verkle-spec/src/code.rs index 9d8765f..316ef0b 100644 --- a/verkle-spec/src/code.rs +++ b/verkle-spec/src/code.rs @@ -209,7 +209,7 @@ fn simple_chunkify_test() { 23, 24, 25, 26, 27, 28, 29, 30, ], ]; - let code = code.clone().into_iter().flatten().collect(); + let code = code.into_iter().flatten().collect(); let chunked_code = chunkify_code(code); diff --git a/verkle-spec/src/util.rs b/verkle-spec/src/util.rs index 693a83e..65a6756 100644 --- a/verkle-spec/src/util.rs +++ b/verkle-spec/src/util.rs @@ -80,7 +80,6 @@ pub(crate) fn chunk_bytes(input: &[u8]) -> Vec { // Specialised version of `chunk_bytes` for 64 bytes without the padding pub(crate) fn chunk64(bytes64: [u8; 64]) -> [u128; 5] { const INPUT_LEN: u128 = 64; - debug_assert!(bytes64.len() as u128 == INPUT_LEN); let mut chunked_input = [[0u8; 16]; 5]; @@ -107,8 +106,10 @@ pub(crate) fn zero_align_bytes(mut bytes: Vec, alignment: usize) -> Vec if bytes.len() % alignment == 0 { return bytes; } + let pad_by = alignment - bytes.len() % alignment; bytes.extend(vec![0u8; pad_by]); + bytes } @@ -119,7 +120,7 @@ fn swap_byte() { let hash = H256::repeat_byte(2); let got = swap_last_byte(hash, U256::from(replacement_byte)); - let mut expected = hash.as_fixed_bytes().clone(); + let mut expected = *hash.as_fixed_bytes(); *expected.last_mut().unwrap() = replacement_byte; assert_eq!(*got.as_fixed_bytes(), expected) diff --git a/verkle-trie/benches/benchmarks/insert_10k.rs b/verkle-trie/benches/benchmarks/insert_10k.rs index 4ab497f..718da4e 100644 --- a/verkle-trie/benches/benchmarks/insert_10k.rs +++ b/verkle-trie/benches/benchmarks/insert_10k.rs @@ -28,6 +28,7 @@ fn insert_10k_from_10mil_step(c: &mut Criterion) { |mut trie| { // Insert different keys let key_vals = KEYS_10K.iter().map(|key_bytes| (*key_bytes, *key_bytes)); + #[allow(clippy::unit_arg)] black_box(trie.insert(key_vals)) }, BatchSize::SmallInput, diff --git a/verkle-trie/benches/benchmarks/proof_10k.rs b/verkle-trie/benches/benchmarks/proof_10k.rs index 3aa58de..58f3d8d 100644 --- a/verkle-trie/benches/benchmarks/proof_10k.rs +++ b/verkle-trie/benches/benchmarks/proof_10k.rs @@ -13,7 +13,8 @@ fn proof_10k_from_10mil_step(c: &mut Criterion) { let config = TestConfig::new(db); let mut trie = Trie::new(config); // Initial set of keys - let keys = generate_set_of_keys(1_000_000); + let _keys = generate_set_of_keys(1_000_000); + let key_vals = KEYS_10K.iter().map(|key_bytes| (*key_bytes, *key_bytes)); trie.insert(key_vals); @@ -24,9 +25,9 @@ fn proof_10k_from_10mil_step(c: &mut Criterion) { |b, _| { b.iter_batched( || trie.clone(), - |mut trie| { + |trie| { // Insert different keys - let key_vals = KEYS_10K.iter().map(|bytes| *bytes); + let key_vals = KEYS_10K.iter().copied(); black_box(trie.create_verkle_proof(key_vals)) }, BatchSize::SmallInput, diff --git a/verkle-trie/benches/benchmarks/util.rs b/verkle-trie/benches/benchmarks/util.rs index fd8499c..13513f4 100644 --- a/verkle-trie/benches/benchmarks/util.rs +++ b/verkle-trie/benches/benchmarks/util.rs @@ -6,6 +6,7 @@ use sha2::{Digest, Sha256}; pub static KEYS_10K: Lazy> = Lazy::new(|| generate_diff_set_of_keys(10_000).collect()); +#[allow(dead_code)] pub static SAME_KEYS_10K: Lazy> = Lazy::new(|| generate_set_of_keys(10_000).collect()); diff --git a/verkle-trie/src/committer.rs b/verkle-trie/src/committer.rs index 852395e..877b66f 100644 --- a/verkle-trie/src/committer.rs +++ b/verkle-trie/src/committer.rs @@ -8,9 +8,8 @@ pub mod test; // This is being done in the config file automatically pub trait Committer { // Commit to a lagrange polynomial, evaluations.len() must equal the size of the SRS at the moment - //TODO: We can make this &[Fr;256] since we have committed to 256, this would force the caller - // to handle the size of the slice - fn commit_lagrange(&self, evaluations: &[Fr]) -> Element; + fn commit_lagrange(&self, evaluations: &[Fr; 256]) -> Element; + // compute value * G for a specific generator in the SRS fn scalar_mul(&self, value: Fr, lagrange_index: usize) -> Element; diff --git a/verkle-trie/src/committer/precompute.rs b/verkle-trie/src/committer/precompute.rs index f6e1f56..7d31413 100644 --- a/verkle-trie/src/committer/precompute.rs +++ b/verkle-trie/src/committer/precompute.rs @@ -14,7 +14,7 @@ pub struct PrecomputeLagrange { impl<'a> Committer for &'a PrecomputeLagrange { // If compute these points at compile time, we can // dictate that evaluations should be an array - fn commit_lagrange(&self, evaluations: &[Fr]) -> Element { + fn commit_lagrange(&self, evaluations: &[Fr; 256]) -> Element { if evaluations.len() != self.num_points { panic!("wrong number of points") } @@ -59,7 +59,7 @@ impl<'a> Committer for &'a PrecomputeLagrange { } } impl Committer for PrecomputeLagrange { - fn commit_lagrange(&self, evaluations: &[Fr]) -> Element { + fn commit_lagrange(&self, evaluations: &[Fr; 256]) -> Element { (&self).commit_lagrange(evaluations) } diff --git a/verkle-trie/src/committer/test.rs b/verkle-trie/src/committer/test.rs index 07c4987..a48e63b 100644 --- a/verkle-trie/src/committer/test.rs +++ b/verkle-trie/src/committer/test.rs @@ -6,7 +6,7 @@ use banderwagon::{Element, Fr}; #[derive(Debug, Clone, Copy)] pub struct TestCommitter; impl Committer for TestCommitter { - fn commit_lagrange(&self, evaluations: &[Fr]) -> Element { + fn commit_lagrange(&self, evaluations: &[Fr; 256]) -> Element { let mut res = Element::zero(); for (val, point) in evaluations.iter().zip(CRS.G.iter()) { res += point * val; diff --git a/verkle-trie/src/config.rs b/verkle-trie/src/config.rs index 66340c5..aff6aa6 100644 --- a/verkle-trie/src/config.rs +++ b/verkle-trie/src/config.rs @@ -1,7 +1,7 @@ use crate::{ committer::{precompute::PrecomputeLagrange, test::TestCommitter}, constants::CRS, - errors::VerkleError, + errors::ConfigError, }; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use std::fs::File; @@ -17,47 +17,46 @@ pub struct Config { // them. It is possible to use this for tests too, one should ensure that the file exists // before running the tests; which are ran in parallel. const PRECOMPUTED_POINTS_PATH: &str = "precomputed_points.bin"; -// TODO: These two functions return Strings, when they should return a result with an enum variant ideally -// TODO: This is an API change and will be done in the API refactor phase. pub type VerkleConfig = Config; impl VerkleConfig { - pub fn new(db: Storage) -> Result { + pub fn new(db: Storage) -> Result { let file_exists = std::path::Path::new(PRECOMPUTED_POINTS_PATH).exists(); + if file_exists { - return Err(VerkleError::PrecomputedPointsFileExists); + return Err(ConfigError::PrecomputedPointsFileExists); } // File is not already precomputed, so we pre-compute the points and store them let mut file = match File::create(PRECOMPUTED_POINTS_PATH) { Ok(v) => v, - Err(e) => return Err(VerkleError::FileError(e)), + Err(e) => return Err(ConfigError::FileError(e)), }; let committer = PrecomputeLagrange::precompute(&CRS.G); let serialization_result = committer.serialize_unchecked(&mut file); if let Err(e) = serialization_result { - return Err(VerkleError::SerializationError(e)); + return Err(ConfigError::SerializationError(e)); } Ok(Config { db, committer }) } - pub fn open(db: Storage) -> Result { + pub fn open(db: Storage) -> Result { let file_exists = std::path::Path::new(PRECOMPUTED_POINTS_PATH).exists(); if !file_exists { - return Err(VerkleError::PrecomputedPointsNotFound); + return Err(ConfigError::PrecomputedPointsNotFound); } let mut file = match File::create(PRECOMPUTED_POINTS_PATH) { Ok(v) => v, - Err(e) => return Err(VerkleError::FileError(e)), + Err(e) => return Err(ConfigError::FileError(e)), }; let committer: PrecomputeLagrange = match CanonicalDeserialize::deserialize_unchecked(&mut file) { Ok(v) => v, - Err(e) => return Err(VerkleError::SerializationError(e)), + Err(e) => return Err(ConfigError::SerializationError(e)), }; Ok(Config { db, committer }) diff --git a/verkle-trie/src/errors.rs b/verkle-trie/src/errors.rs index 0d18ea6..70f756a 100644 --- a/verkle-trie/src/errors.rs +++ b/verkle-trie/src/errors.rs @@ -1,23 +1,17 @@ use ark_serialize::SerializationError; use thiserror::Error; -// A Wrapper Type for all errors that can occur within the Verkle Library -// Provides Single Error Enum for consumers of the library to match against // Right now there are lots of unwraps which are immediately switched to Results, but in the future // We likely be moved back to unwraps with safety comments -#[derive(Error, Debug)] -pub enum VerkleError { - #[error("Issue Occured Converting Type to Bytes")] - SerializationError(#[from] SerializationError), - #[error("Precomputed Points Exist Already")] - PrecomputedPointsFileExists, - #[error("Unable to Create Precomputed Points File")] - CannotCreatePrecomputedPoints(std::io::Error), - #[error("Precomputed Lagrage Points File Couldn't not be found")] - PrecomputedPointsNotFound, - #[error("Issue opening PrecomputedPointsFile")] - FileError(std::io::Error), +#[derive(Debug, Error)] +pub enum HintError { + #[error("General IO Error")] + IoError(#[from] std::io::Error), +} + +#[derive(Debug, Error)] +pub enum VerificationError { #[error("Invalid proof supplied")] InvalidProof, #[error("Invalid Length for Updated Values")] @@ -28,12 +22,26 @@ pub enum VerkleError { DuplicateKeys, #[error("Since the extension was not present in the trie, the suffix cannot have any previous values")] OldValueIsPopulated, - #[error("Prefix Cannot be Empty")] EmptyPrefix, +} + +#[derive(Debug, Error)] +pub enum ConfigError { + #[error("Precomputed Points Exist Already")] + PrecomputedPointsFileExists, + #[error("Issue opening PrecomputedPointsFile")] + FileError(std::io::Error), + #[error("Precomputed Lagrange Points File Couldn't not be found")] + PrecomputedPointsNotFound, + #[error("Serialization Either Failed or Data is Invalid")] + SerializationError(#[from] SerializationError), +} - #[error("Child Branch is Empty/Doesn't exist")] - NoChildBranch, - #[error("BranchMeta was not serialised properly")] - BranchMetaSerializedFaulty(String), -} // TODO group erros by assosiation, and break out into sub error enums +#[derive(Debug, Error)] +pub enum ProofCreationError { + #[error("Empty Key Set")] + EmptyKeySet, + #[error("Expected to have atleast one query, which will be against the root")] + ExpectedOneQueryAgainstRoot, +} diff --git a/verkle-trie/src/lib.rs b/verkle-trie/src/lib.rs index a56c405..0862fba 100644 --- a/verkle-trie/src/lib.rs +++ b/verkle-trie/src/lib.rs @@ -9,6 +9,7 @@ pub mod proof; pub mod trie; pub use config::*; +use errors::ProofCreationError; pub use trie::Trie; pub use banderwagon::{Element, Fr}; @@ -29,6 +30,7 @@ pub trait TrieTrait { fn insert_single(&mut self, key: Key, value: Value) { self.insert(vec![(key, value)].into_iter()) } + /// Gets the value at the `Key` if it exists /// Returns an error if it does not exist /// TODO: Find out if this method is ever needed @@ -41,8 +43,10 @@ pub trait TrieTrait { fn root_commitment(&self) -> Element; /// Creates a verkle proof over many keys - /// TODO: This will return a Result in the future - fn create_verkle_proof(&self, key: impl Iterator) -> proof::VerkleProof; + fn create_verkle_proof( + &self, + key: impl Iterator, + ) -> Result; } // Note: This is a 2 to 1 map, but the two preimages are identified to be the same @@ -80,6 +84,6 @@ mod tests { group_to_field(&generator) .serialize(&mut bytes[..]) .unwrap(); - assert_eq!(hex::encode(&bytes), expected); + assert_eq!(hex::encode(bytes), expected); } } diff --git a/verkle-trie/src/proof.rs b/verkle-trie/src/proof.rs index d812cbb..4ede153 100644 --- a/verkle-trie/src/proof.rs +++ b/verkle-trie/src/proof.rs @@ -1,17 +1,14 @@ -use crate::constants::{CRS, PRECOMPUTED_WEIGHTS}; +use crate::{ + constants::{CRS, PRECOMPUTED_WEIGHTS}, + errors::HintError, +}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use banderwagon::Element; use ipa_multipoint::multiproof::MultiPointProof; use ipa_multipoint::transcript::Transcript; use std::collections::{BTreeMap, BTreeSet}; - -use std::io::{Error, ErrorKind, Read, Result, Write}; - -// TODO: We use the IO Result while we do not have a dedicated Error enum -type IOResult = Result; -type IOError = Error; -type IOErrorKind = ErrorKind; +use std::io::{Read, Write}; mod key_path_finder; mod opening_data; @@ -71,7 +68,7 @@ impl VerificationHint { // We need the number of keys because we do not serialise the length of // the ext_status|| depth. This is equal to the number of keys in the proof, which // we assume the user knows. - pub fn read(mut reader: R) -> IOResult { + pub fn read(mut reader: R) -> Result { // First extract the stems with no values opened for them let mut num_stems = [0u8; 4]; reader.read_exact(&mut num_stems)?; @@ -117,7 +114,7 @@ impl VerificationHint { diff_stem_no_proof, }) } - pub fn write(&self, writer: &mut W) -> IOResult<()> { + pub fn write(&self, writer: &mut W) -> Result<(), HintError> { // Encode the number of stems with no value openings let num_stems = self.diff_stem_no_proof.len() as u32; writer.write_all(&num_stems.to_le_bytes())?; @@ -182,7 +179,7 @@ pub struct VerkleProof { } impl VerkleProof { - pub fn read(mut reader: R) -> IOResult { + pub fn read(mut reader: R) -> Result { let verification_hint = VerificationHint::read(&mut reader)?; let mut num_comms = [0u8; 4]; @@ -191,8 +188,9 @@ impl VerkleProof { let mut comms_sorted = Vec::new(); for _ in 0..num_comms { - let point: Element = CanonicalDeserialize::deserialize(&mut reader) - .map_err(|_| IOError::from(IOErrorKind::InvalidData))?; + let point: Element = CanonicalDeserialize::deserialize(&mut reader).map_err(|_| { + HintError::from(std::io::Error::from(std::io::ErrorKind::InvalidData)) + })?; comms_sorted.push(point); } @@ -207,7 +205,7 @@ impl VerkleProof { }) } - pub fn write(&self, mut writer: W) -> IOResult<()> { + pub fn write(&self, mut writer: W) -> Result<(), HintError> { // Errors are handled via anyhow because they are generic IO errors, not Verkle-specific self.verification_hint.write(&mut writer)?; @@ -216,8 +214,9 @@ impl VerkleProof { for comm in &self.comms_sorted { let mut comm_serialised = [0u8; 32]; - comm.serialize(&mut comm_serialised[..]) - .map_err(|_| IOError::from(IOErrorKind::InvalidInput))?; + comm.serialize(&mut comm_serialised[..]).map_err(|_| { + HintError::from(std::io::Error::from(std::io::ErrorKind::InvalidInput)) + })?; writer.write_all(&comm_serialised)?; } @@ -300,7 +299,7 @@ mod test { let root = vec![]; let meta = trie.storage.get_branch_meta(&root).unwrap(); - let proof = prover::create_verkle_proof(&trie.storage, keys.clone()); + let proof = prover::create_verkle_proof(&trie.storage, keys.clone()).unwrap(); let values: Vec<_> = keys.iter().map(|val| Some(*val)).collect(); let (ok, _) = proof.check(keys, values, meta.commitment); assert!(ok); @@ -316,7 +315,7 @@ mod test { let root = vec![]; let meta = trie.storage.get_branch_meta(&root).unwrap(); - let proof = prover::create_verkle_proof(&trie.storage, absent_keys.clone()); + let proof = prover::create_verkle_proof(&trie.storage, absent_keys.clone()).unwrap(); let (ok, _) = proof.check(absent_keys, absent_values, meta.commitment); assert!(ok); @@ -338,7 +337,7 @@ mod test { let meta = trie.storage.get_branch_meta(&root).unwrap(); let (pq, _) = prover::create_prover_queries(&trie.storage, keys.clone()); - let proof = prover::create_verkle_proof(&trie.storage, keys.clone()); + let proof = prover::create_verkle_proof(&trie.storage, keys.clone()).unwrap(); let values: Vec<_> = keys.iter().map(|val| Some(*val)).collect(); let (vq, _) = @@ -366,7 +365,7 @@ mod test { let root = vec![]; let _meta = trie.storage.get_branch_meta(&root).unwrap(); - let proof = prover::create_verkle_proof(&trie.storage, keys.clone()); + let proof = prover::create_verkle_proof(&trie.storage, keys.clone()).unwrap(); let mut bytes = Vec::new(); proof.write(&mut bytes).unwrap(); diff --git a/verkle-trie/src/proof/prover.rs b/verkle-trie/src/proof/prover.rs index 179d26c..eab9b84 100644 --- a/verkle-trie/src/proof/prover.rs +++ b/verkle-trie/src/proof/prover.rs @@ -2,6 +2,7 @@ use super::{VerificationHint, VerkleProof}; use crate::{ constants::{CRS, PRECOMPUTED_WEIGHTS}, database::ReadOnlyHigherDb, + errors::ProofCreationError, proof::opening_data::{OpeningData, Openings}, }; use ipa_multipoint::{ @@ -14,17 +15,20 @@ use std::collections::BTreeSet; pub fn create_verkle_proof( storage: &Storage, keys: Vec<[u8; 32]>, -) -> VerkleProof { - assert!(!keys.is_empty(), "cannot create a proof with no keys"); - +) -> Result { + if keys.is_empty() { + return Err(ProofCreationError::EmptyKeySet); + } let (queries, verification_hint) = create_prover_queries(storage, keys); // Commitments without duplicates and without the root, (implicitly) sorted by path, since the queries were // processed by path order - let root_comm = queries - .first() - .expect("expected to have at least one query. The first query will be against the root") - .commitment; + let root_query = match queries.first() { + Some(query) => query, + None => return Err(ProofCreationError::ExpectedOneQueryAgainstRoot), + }; + + let root_comm = root_query.commitment; let comms_sorted: Vec<_> = queries .iter() @@ -39,11 +43,11 @@ pub fn create_verkle_proof( let mut transcript = Transcript::new(b"vt"); let proof = MultiPoint::open(CRS.clone(), &PRECOMPUTED_WEIGHTS, &mut transcript, queries); - VerkleProof { + Ok(VerkleProof { comms_sorted, verification_hint, proof, - } + }) } // First we need to produce all of the key paths for a key diff --git a/verkle-trie/src/proof/stateless_updater.rs b/verkle-trie/src/proof/stateless_updater.rs index d3e46d5..7c8e47f 100644 --- a/verkle-trie/src/proof/stateless_updater.rs +++ b/verkle-trie/src/proof/stateless_updater.rs @@ -1,5 +1,5 @@ use crate::constants::TWO_POW_128; -use crate::{committer::Committer, errors::VerkleError, group_to_field, proof::ExtPresent}; +use crate::{committer::Committer, errors::VerificationError, group_to_field, proof::ExtPresent}; use ark_ff::{One, PrimeField, Zero}; use banderwagon::{Element, Fr}; use std::collections::{BTreeMap, HashSet}; @@ -16,11 +16,11 @@ pub fn verify_and_update( values: Vec>, updated_values: Vec>, commiter: C, -) -> Result { +) -> Result { // TODO: replace Clone with references if possible let (ok, update_hint) = proof.check(keys.clone(), values.clone(), root); if !ok { - return Err(VerkleError::InvalidProof); + return Err(VerificationError::InvalidProof); } let update_hint = @@ -37,23 +37,22 @@ pub(crate) fn update_root( updated_values: Vec>, root: Element, committer: C, -) -> Result { +) -> Result { if !values.len() == updated_values.len() { - return Err(VerkleError::UnexpectedUpdatedLength( + return Err(VerificationError::UnexpectedUpdatedLength( values.len(), updated_values.len(), )); } if !keys.len() == updated_values.len() { - return Err(VerkleError::MismatchedKeyLength); + return Err(VerificationError::MismatchedKeyLength); } // check that keys are unique // Since this is the main place this is used, make sure to exit early as soon as 2 keys are the same let keys_unique = has_unique_elements(keys.iter()); - // TODO return an error instead of panic if !keys_unique { - return Err(VerkleError::DuplicateKeys); + return Err(VerificationError::DuplicateKeys); } // TODO Check root against the root in commitments by path @@ -201,7 +200,7 @@ pub(crate) fn update_root( let mut c_2 = Element::zero(); for (suffix, (old_value, new_value)) in suffix_update { if old_value.is_some() { - return Err(VerkleError::OldValueIsPopulated); + return Err(VerificationError::OldValueIsPopulated); } // Split values into low_16 and high_16 let new_value_low_16 = new_value[0..16].to_vec(); @@ -523,10 +522,9 @@ impl SparseVerkleTree { mut prefix: Vec, old_value: Fr, new_value: Fr, - ) -> Result<(), VerkleError> { - // TODO check edge case, when prefix.is_empty is passed in + ) -> Result<(), VerificationError> { if prefix.is_empty() { - return Err(VerkleError::EmptyPrefix); + return Err(VerificationError::EmptyPrefix); } // First lets compute the delta between the old_value and the new value let mut delta = new_value - old_value; @@ -602,7 +600,7 @@ mod test { let root = vec![]; let meta = trie.storage.get_branch_meta(&root).unwrap(); - let proof = prover::create_verkle_proof(&trie.storage, keys.clone()); + let proof = prover::create_verkle_proof(&trie.storage, keys.clone()).unwrap(); let values: Vec<_> = keys.iter().map(|val| Some(*val)).collect(); let (ok, updated_hint) = proof.check(keys.clone(), values.clone(), meta.commitment); assert!(ok); @@ -648,7 +646,7 @@ mod test { let root = vec![]; let meta = trie.storage.get_branch_meta(&root).unwrap(); - let proof = prover::create_verkle_proof(&trie.storage, keys.clone()); + let proof = prover::create_verkle_proof(&trie.storage, keys.clone()).unwrap(); let (ok, updated_hint) = proof.check(keys.clone(), values.clone(), meta.commitment); assert!(ok); @@ -702,7 +700,7 @@ mod test { let meta = trie.storage.get_branch_meta(&root).unwrap(); - let proof = prover::create_verkle_proof(&trie.storage, keys.clone()); + let proof = prover::create_verkle_proof(&trie.storage, keys.clone()).unwrap(); let (ok, updated_hint) = proof.check(keys.clone(), values.clone(), meta.commitment); assert!(ok); diff --git a/verkle-trie/src/trie.rs b/verkle-trie/src/trie.rs index ef2a49c..589edf1 100644 --- a/verkle-trie/src/trie.rs +++ b/verkle-trie/src/trie.rs @@ -40,7 +40,7 @@ impl TrieTrait for Trie { fn create_verkle_proof( &self, keys: impl Iterator, - ) -> crate::proof::VerkleProof { + ) -> Result { use crate::proof::prover; prover::create_verkle_proof(&self.storage, keys.collect()) } @@ -1029,7 +1029,7 @@ mod tests { let mut byts = [0u8; 32]; trie.root_hash().serialize(&mut byts[..]).unwrap(); assert_eq!( - hex::encode(&byts), + hex::encode(byts), "fe2e17833b90719eddcad493c352ccd491730643ecee39060c7c1fff5fcc621a" ); } @@ -1060,7 +1060,7 @@ mod tests { let mut byts = [0u8; 32]; trie.root_hash().serialize(&mut byts[..]).unwrap(); assert_eq!( - hex::encode(&byts), + hex::encode(byts), "74ff8821eca20188de49340124f249dac94404efdb3838bb6b4d298e483cc20e" ); }