Skip to content

Commit

Permalink
Error Refactor Done (#68)
Browse files Browse the repository at this point in the history
* Controlc/fixed everything (#1)

* Code cleanup (#57)

Co-authored-by: ControlC ControlV <[email protected]>

* More Code Cleanup (#59)

* Add in Verkle Proof Benchmark (#60)

* fixed some unsafe unwraps

* fixed some unsafe unwraps

* some progress?

* fixes

* nits

* mostly cleanup

* clippy

* yml fix

* wtf is yml

* sorry

* verkle-trie refactors

* Error Refactoring Done????:

* Clippy working now

Co-authored-by: ControlC ControlV <[email protected]>

* fix

* addressed comments

Co-authored-by: ControlC ControlV <[email protected]>
  • Loading branch information
ControlCplusControlV and ControlC ControlV authored Jan 9, 2023
1 parent 40dd7b9 commit 4b70df7
Show file tree
Hide file tree
Showing 16 changed files with 109 additions and 93 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@
Cargo.lock
.DS_Store
notes.md
precomputed_points.bin
2 changes: 1 addition & 1 deletion verkle-spec/src/code.rs
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ fn simple_chunkify_test() {
23, 24, 25, 26, 27, 28, 29, 30,
],
];
let code = code.clone().into_iter().flatten().collect();
let code = code.into_iter().flatten().collect();

let chunked_code = chunkify_code(code);

Expand Down
5 changes: 3 additions & 2 deletions verkle-spec/src/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,6 @@ pub(crate) fn chunk_bytes(input: &[u8]) -> Vec<u128> {
// Specialised version of `chunk_bytes` for 64 bytes without the padding
pub(crate) fn chunk64(bytes64: [u8; 64]) -> [u128; 5] {
const INPUT_LEN: u128 = 64;
debug_assert!(bytes64.len() as u128 == INPUT_LEN);

let mut chunked_input = [[0u8; 16]; 5];

Expand All @@ -107,8 +106,10 @@ pub(crate) fn zero_align_bytes(mut bytes: Vec<u8>, alignment: usize) -> Vec<u8>
if bytes.len() % alignment == 0 {
return bytes;
}

let pad_by = alignment - bytes.len() % alignment;
bytes.extend(vec![0u8; pad_by]);

bytes
}

Expand All @@ -119,7 +120,7 @@ fn swap_byte() {
let hash = H256::repeat_byte(2);
let got = swap_last_byte(hash, U256::from(replacement_byte));

let mut expected = hash.as_fixed_bytes().clone();
let mut expected = *hash.as_fixed_bytes();
*expected.last_mut().unwrap() = replacement_byte;

assert_eq!(*got.as_fixed_bytes(), expected)
Expand Down
1 change: 1 addition & 0 deletions verkle-trie/benches/benchmarks/insert_10k.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ fn insert_10k_from_10mil_step(c: &mut Criterion) {
|mut trie| {
// Insert different keys
let key_vals = KEYS_10K.iter().map(|key_bytes| (*key_bytes, *key_bytes));
#[allow(clippy::unit_arg)]
black_box(trie.insert(key_vals))
},
BatchSize::SmallInput,
Expand Down
7 changes: 4 additions & 3 deletions verkle-trie/benches/benchmarks/proof_10k.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@ fn proof_10k_from_10mil_step(c: &mut Criterion) {
let config = TestConfig::new(db);
let mut trie = Trie::new(config);
// Initial set of keys
let keys = generate_set_of_keys(1_000_000);
let _keys = generate_set_of_keys(1_000_000);

let key_vals = KEYS_10K.iter().map(|key_bytes| (*key_bytes, *key_bytes));
trie.insert(key_vals);

Expand All @@ -24,9 +25,9 @@ fn proof_10k_from_10mil_step(c: &mut Criterion) {
|b, _| {
b.iter_batched(
|| trie.clone(),
|mut trie| {
|trie| {
// Insert different keys
let key_vals = KEYS_10K.iter().map(|bytes| *bytes);
let key_vals = KEYS_10K.iter().copied();
black_box(trie.create_verkle_proof(key_vals))
},
BatchSize::SmallInput,
Expand Down
1 change: 1 addition & 0 deletions verkle-trie/benches/benchmarks/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ use sha2::{Digest, Sha256};

pub static KEYS_10K: Lazy<Vec<[u8; 32]>> =
Lazy::new(|| generate_diff_set_of_keys(10_000).collect());
#[allow(dead_code)]
pub static SAME_KEYS_10K: Lazy<Vec<[u8; 32]>> =
Lazy::new(|| generate_set_of_keys(10_000).collect());

Expand Down
5 changes: 2 additions & 3 deletions verkle-trie/src/committer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,8 @@ pub mod test;
// This is being done in the config file automatically
pub trait Committer {
// Commit to a lagrange polynomial, evaluations.len() must equal the size of the SRS at the moment
//TODO: We can make this &[Fr;256] since we have committed to 256, this would force the caller
// to handle the size of the slice
fn commit_lagrange(&self, evaluations: &[Fr]) -> Element;
fn commit_lagrange(&self, evaluations: &[Fr; 256]) -> Element;

// compute value * G for a specific generator in the SRS
fn scalar_mul(&self, value: Fr, lagrange_index: usize) -> Element;

Expand Down
4 changes: 2 additions & 2 deletions verkle-trie/src/committer/precompute.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ pub struct PrecomputeLagrange {
impl<'a> Committer for &'a PrecomputeLagrange {
// If compute these points at compile time, we can
// dictate that evaluations should be an array
fn commit_lagrange(&self, evaluations: &[Fr]) -> Element {
fn commit_lagrange(&self, evaluations: &[Fr; 256]) -> Element {
if evaluations.len() != self.num_points {
panic!("wrong number of points")
}
Expand Down Expand Up @@ -59,7 +59,7 @@ impl<'a> Committer for &'a PrecomputeLagrange {
}
}
impl Committer for PrecomputeLagrange {
fn commit_lagrange(&self, evaluations: &[Fr]) -> Element {
fn commit_lagrange(&self, evaluations: &[Fr; 256]) -> Element {
(&self).commit_lagrange(evaluations)
}

Expand Down
2 changes: 1 addition & 1 deletion verkle-trie/src/committer/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use banderwagon::{Element, Fr};
#[derive(Debug, Clone, Copy)]
pub struct TestCommitter;
impl Committer for TestCommitter {
fn commit_lagrange(&self, evaluations: &[Fr]) -> Element {
fn commit_lagrange(&self, evaluations: &[Fr; 256]) -> Element {
let mut res = Element::zero();
for (val, point) in evaluations.iter().zip(CRS.G.iter()) {
res += point * val;
Expand Down
21 changes: 10 additions & 11 deletions verkle-trie/src/config.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use crate::{
committer::{precompute::PrecomputeLagrange, test::TestCommitter},
constants::CRS,
errors::VerkleError,
errors::ConfigError,
};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use std::fs::File;
Expand All @@ -17,47 +17,46 @@ pub struct Config<Storage, PolyCommit> {
// them. It is possible to use this for tests too, one should ensure that the file exists
// before running the tests; which are ran in parallel.
const PRECOMPUTED_POINTS_PATH: &str = "precomputed_points.bin";
// TODO: These two functions return Strings, when they should return a result with an enum variant ideally
// TODO: This is an API change and will be done in the API refactor phase.

pub type VerkleConfig<Storage> = Config<Storage, PrecomputeLagrange>;

impl<Storage> VerkleConfig<Storage> {
pub fn new(db: Storage) -> Result<Self, VerkleError> {
pub fn new(db: Storage) -> Result<Self, ConfigError> {
let file_exists = std::path::Path::new(PRECOMPUTED_POINTS_PATH).exists();

if file_exists {
return Err(VerkleError::PrecomputedPointsFileExists);
return Err(ConfigError::PrecomputedPointsFileExists);
}

// File is not already precomputed, so we pre-compute the points and store them
let mut file = match File::create(PRECOMPUTED_POINTS_PATH) {
Ok(v) => v,
Err(e) => return Err(VerkleError::FileError(e)),
Err(e) => return Err(ConfigError::FileError(e)),
};

let committer = PrecomputeLagrange::precompute(&CRS.G);
let serialization_result = committer.serialize_unchecked(&mut file);
if let Err(e) = serialization_result {
return Err(VerkleError::SerializationError(e));
return Err(ConfigError::SerializationError(e));
}

Ok(Config { db, committer })
}

pub fn open(db: Storage) -> Result<Self, VerkleError> {
pub fn open(db: Storage) -> Result<Self, ConfigError> {
let file_exists = std::path::Path::new(PRECOMPUTED_POINTS_PATH).exists();
if !file_exists {
return Err(VerkleError::PrecomputedPointsNotFound);
return Err(ConfigError::PrecomputedPointsNotFound);
}
let mut file = match File::create(PRECOMPUTED_POINTS_PATH) {
Ok(v) => v,
Err(e) => return Err(VerkleError::FileError(e)),
Err(e) => return Err(ConfigError::FileError(e)),
};

let committer: PrecomputeLagrange =
match CanonicalDeserialize::deserialize_unchecked(&mut file) {
Ok(v) => v,
Err(e) => return Err(VerkleError::SerializationError(e)),
Err(e) => return Err(ConfigError::SerializationError(e)),
};

Ok(Config { db, committer })
Expand Down
48 changes: 28 additions & 20 deletions verkle-trie/src/errors.rs
Original file line number Diff line number Diff line change
@@ -1,23 +1,17 @@
use ark_serialize::SerializationError;
use thiserror::Error;

// A Wrapper Type for all errors that can occur within the Verkle Library
// Provides Single Error Enum for consumers of the library to match against
// Right now there are lots of unwraps which are immediately switched to Results, but in the future
// We likely be moved back to unwraps with safety comments
#[derive(Error, Debug)]
pub enum VerkleError {
#[error("Issue Occured Converting Type to Bytes")]
SerializationError(#[from] SerializationError),
#[error("Precomputed Points Exist Already")]
PrecomputedPointsFileExists,
#[error("Unable to Create Precomputed Points File")]
CannotCreatePrecomputedPoints(std::io::Error),
#[error("Precomputed Lagrage Points File Couldn't not be found")]
PrecomputedPointsNotFound,
#[error("Issue opening PrecomputedPointsFile")]
FileError(std::io::Error),

#[derive(Debug, Error)]
pub enum HintError {
#[error("General IO Error")]
IoError(#[from] std::io::Error),
}

#[derive(Debug, Error)]
pub enum VerificationError {
#[error("Invalid proof supplied")]
InvalidProof,
#[error("Invalid Length for Updated Values")]
Expand All @@ -28,12 +22,26 @@ pub enum VerkleError {
DuplicateKeys,
#[error("Since the extension was not present in the trie, the suffix cannot have any previous values")]
OldValueIsPopulated,

#[error("Prefix Cannot be Empty")]
EmptyPrefix,
}

#[derive(Debug, Error)]
pub enum ConfigError {
#[error("Precomputed Points Exist Already")]
PrecomputedPointsFileExists,
#[error("Issue opening PrecomputedPointsFile")]
FileError(std::io::Error),
#[error("Precomputed Lagrange Points File Couldn't not be found")]
PrecomputedPointsNotFound,
#[error("Serialization Either Failed or Data is Invalid")]
SerializationError(#[from] SerializationError),
}

#[error("Child Branch is Empty/Doesn't exist")]
NoChildBranch,
#[error("BranchMeta was not serialised properly")]
BranchMetaSerializedFaulty(String),
} // TODO group erros by assosiation, and break out into sub error enums
#[derive(Debug, Error)]
pub enum ProofCreationError {
#[error("Empty Key Set")]
EmptyKeySet,
#[error("Expected to have atleast one query, which will be against the root")]
ExpectedOneQueryAgainstRoot,
}
10 changes: 7 additions & 3 deletions verkle-trie/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ pub mod proof;
pub mod trie;

pub use config::*;
use errors::ProofCreationError;
pub use trie::Trie;

pub use banderwagon::{Element, Fr};
Expand All @@ -29,6 +30,7 @@ pub trait TrieTrait {
fn insert_single(&mut self, key: Key, value: Value) {
self.insert(vec![(key, value)].into_iter())
}

/// Gets the value at the `Key` if it exists
/// Returns an error if it does not exist
/// TODO: Find out if this method is ever needed
Expand All @@ -41,8 +43,10 @@ pub trait TrieTrait {
fn root_commitment(&self) -> Element;

/// Creates a verkle proof over many keys
/// TODO: This will return a Result in the future
fn create_verkle_proof(&self, key: impl Iterator<Item = Key>) -> proof::VerkleProof;
fn create_verkle_proof(
&self,
key: impl Iterator<Item = Key>,
) -> Result<proof::VerkleProof, ProofCreationError>;
}

// Note: This is a 2 to 1 map, but the two preimages are identified to be the same
Expand Down Expand Up @@ -80,6 +84,6 @@ mod tests {
group_to_field(&generator)
.serialize(&mut bytes[..])
.unwrap();
assert_eq!(hex::encode(&bytes), expected);
assert_eq!(hex::encode(bytes), expected);
}
}
Loading

0 comments on commit 4b70df7

Please sign in to comment.