Skip to content

Commit

Permalink
fix : tests fix proving worker
Browse files Browse the repository at this point in the history
  • Loading branch information
Arun Jangra authored and Arun Jangra committed Jun 20, 2024
1 parent 9c9f438 commit bf2540a
Show file tree
Hide file tree
Showing 5 changed files with 192 additions and 168 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions crates/orchestrator/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ tracing-subscriber = { workspace = true, features = ["env-filter"] }
url = { workspace = true }
utils = { workspace = true }
uuid = { workspace = true, features = ["v4", "serde"] }
log = "0.4.21"

[features]
default = ["ethereum", "with_mongodb", "with_sqs"]
Expand Down
314 changes: 157 additions & 157 deletions crates/orchestrator/src/jobs/da_job/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -313,160 +313,160 @@ fn da_word(class_flag: bool, nonce_change: Option<FieldElement>, num_changes: u6
FieldElement::from_dec_str(&decimal_string).expect("issue while converting to fieldElement")
}

// #[cfg(test)]
// mod tests {
// use std::fs;
// use std::fs::File;
// use std::io::Read;
//
// use ::serde::{Deserialize, Serialize};
// use httpmock::prelude::*;
// use majin_blob_core::blob;
// use majin_blob_types::serde;
// use majin_blob_types::state_diffs::UnorderedEq;
// // use majin_blob_types::serde;
// use rstest::rstest;
// use serde_json::json;
//
// use super::*;
// use crate::tests::common::init_config;
//
// #[rstest]
// #[case(false, 1, 1, "18446744073709551617")]
// #[case(false, 1, 0, "18446744073709551616")]
// #[case(false, 0, 6, "6")]
// #[case(true, 1, 0, "340282366920938463481821351505477763072")]
// fn da_word_works(
// #[case] class_flag: bool,
// #[case] new_nonce: u64,
// #[case] num_changes: u64,
// #[case] expected: String,
// ) {
// let new_nonce = if new_nonce > 0 { Some(FieldElement::from(new_nonce)) } else { None };
// let da_word = da_word(class_flag, new_nonce, num_changes);
// let expected = FieldElement::from_dec_str(expected.as_str()).unwrap();
// assert_eq!(da_word, expected);
// }
//
// #[rstest]
// #[case(
// 631861,
// "src/jobs/da_job/test_data/state_update_from_block_631861.txt",
// "src/jobs/da_job/test_data/test_blob_631861.txt",
// "src/jobs/da_job/test_data/nonces_from_block_631861.txt"
// )]
// #[case(
// 638353,
// "src/jobs/da_job/test_data/state_update_from_block_638353.txt",
// "src/jobs/da_job/test_data/test_blob_638353.txt",
// "src/jobs/da_job/test_data/nonces_from_block_638353.txt"
// )]
// #[case(
// 640641,
// "src/jobs/da_job/test_data/state_update_from_block_640641.txt",
// "src/jobs/da_job/test_data/test_blob_640641.txt",
// "src/jobs/da_job/test_data/nonces_from_block_640641.txt"
// )]
// #[tokio::test]
// async fn test_state_update_to_blob_data(
// #[case] block_no: u64,
// #[case] state_update_file_path: &str,
// #[case] file_path: &str,
// #[case] nonce_file_path: &str,
// ) {
// let server = MockServer::start();
//
// let config = init_config(Some(format!("http://localhost:{}", server.port())), None, None, None, None).await;
//
// get_nonce_attached(&server, nonce_file_path);
//
// let state_update = read_state_update_from_file(state_update_file_path).expect("issue while reading");
// let blob_data = state_update_to_blob_data(block_no, state_update, &config)
// .await
// .expect("issue while converting state update to blob data");
//
// let blob_data_biguint = convert_to_biguint(blob_data);
//
// let block_data_state_diffs = serde::parse_state_diffs(blob_data_biguint.as_slice());
//
// let original_blob_data = serde::parse_file_to_blob_data(file_path);
// // converting the data to it's original format
// let recovered_blob_data = blob::recover(original_blob_data.clone());
// let blob_data_state_diffs = serde::parse_state_diffs(recovered_blob_data.as_slice());
//
// assert!(block_data_state_diffs.unordered_eq(&blob_data_state_diffs), "value of data json should be identical");
// }
//
// #[rstest]
// #[case("src/jobs/da_job/test_data/test_blob_631861.txt")]
// #[case("src/jobs/da_job/test_data/test_blob_638353.txt")]
// #[case("src/jobs/da_job/test_data/test_blob_639404.txt")]
// #[case("src/jobs/da_job/test_data/test_blob_640641.txt")]
// #[case("src/jobs/da_job/test_data/test_blob_640644.txt")]
// #[case("src/jobs/da_job/test_data/test_blob_640646.txt")]
// #[case("src/jobs/da_job/test_data/test_blob_640647.txt")]
// fn test_fft_transformation(#[case] file_to_check: &str) {
// // parsing the blob hex to the bigUints
// let original_blob_data = serde::parse_file_to_blob_data(file_to_check);
// // converting the data to it's original format
// let ifft_blob_data = blob::recover(original_blob_data.clone());
// // applying the fft function again on the original format
// let fft_blob_data = fft_transformation(ifft_blob_data);
//
// // ideally the data after fft transformation and the data before ifft should be same.
// assert_eq!(fft_blob_data, original_blob_data);
// }
//
// pub fn read_state_update_from_file(file_path: &str) -> Result<StateUpdate> {
// // let file_path = format!("state_update_block_no_{}.txt", block_no);
// let mut file = File::open(file_path)?;
// let mut json = String::new();
// file.read_to_string(&mut json)?;
// let state_update: StateUpdate = serde_json::from_str(&json)?;
// Ok(state_update)
// }
//
// #[derive(Serialize, Deserialize, Debug)]
// struct NonceAddress {
// nonce: String,
// address: String,
// }
//
// pub fn get_nonce_attached(server: &MockServer, file_path: &str) {
// // Read the file
// let file_content = fs::read_to_string(file_path).expect("Unable to read file");
//
// // Parse the JSON content into a vector of NonceAddress
// let nonce_addresses: Vec<NonceAddress> =
// serde_json::from_str(&file_content).expect("JSON was not well-formatted");
//
// // Set up mocks for each entry
// for entry in nonce_addresses {
// let address = entry.address.clone();
// let nonce = entry.nonce.clone();
// let response = json!({ "id": 1,"jsonrpc":"2.0","result": nonce });
// let field_element =
// FieldElement::from_dec_str(&address).expect("issue while converting the hex to field").to_bytes_be();
// let hex_field_element = vec_u8_to_hex_string(&field_element);
//
// server.mock(|when, then| {
// when.path("/").body_contains("starknet_getNonce").body_contains(hex_field_element);
// then.status(200).body(serde_json::to_vec(&response).unwrap());
// });
// }
// }
//
// fn vec_u8_to_hex_string(data: &[u8]) -> String {
// let hex_chars: Vec<String> = data.iter().map(|byte| format!("{:02x}", byte)).collect();
//
// let mut new_hex_chars = hex_chars.join("");
// new_hex_chars = new_hex_chars.trim_start_matches('0').to_string();
//
// // Handle the case where the trimmed string is empty (e.g., data was all zeros)
// if new_hex_chars.is_empty() {
// "0x0".to_string()
// } else {
// format!("0x{}", new_hex_chars)
// }
// }
// }
#[cfg(test)]
mod tests {
use std::fs;
use std::fs::File;
use std::io::Read;

use ::serde::{Deserialize, Serialize};
use httpmock::prelude::*;
use majin_blob_core::blob;
use majin_blob_types::serde;
use majin_blob_types::state_diffs::UnorderedEq;
// use majin_blob_types::serde;
use rstest::rstest;
use serde_json::json;

use super::*;
use crate::tests::common::init_config;

#[rstest]
#[case(false, 1, 1, "18446744073709551617")]
#[case(false, 1, 0, "18446744073709551616")]
#[case(false, 0, 6, "6")]
#[case(true, 1, 0, "340282366920938463481821351505477763072")]
fn da_word_works(
#[case] class_flag: bool,
#[case] new_nonce: u64,
#[case] num_changes: u64,
#[case] expected: String,
) {
let new_nonce = if new_nonce > 0 { Some(FieldElement::from(new_nonce)) } else { None };
let da_word = da_word(class_flag, new_nonce, num_changes);
let expected = FieldElement::from_dec_str(expected.as_str()).unwrap();
assert_eq!(da_word, expected);
}

#[rstest]
#[case(
631861,
"src/jobs/da_job/test_data/state_update_from_block_631861.txt",
"src/jobs/da_job/test_data/test_blob_631861.txt",
"src/jobs/da_job/test_data/nonces_from_block_631861.txt"
)]
#[case(
638353,
"src/jobs/da_job/test_data/state_update_from_block_638353.txt",
"src/jobs/da_job/test_data/test_blob_638353.txt",
"src/jobs/da_job/test_data/nonces_from_block_638353.txt"
)]
#[case(
640641,
"src/jobs/da_job/test_data/state_update_from_block_640641.txt",
"src/jobs/da_job/test_data/test_blob_640641.txt",
"src/jobs/da_job/test_data/nonces_from_block_640641.txt"
)]
#[tokio::test]
async fn test_state_update_to_blob_data(
#[case] block_no: u64,
#[case] state_update_file_path: &str,
#[case] file_path: &str,
#[case] nonce_file_path: &str,
) {
let server = MockServer::start();

let config = init_config(Some(format!("http://localhost:{}", server.port())), None, None, None, None).await;

get_nonce_attached(&server, nonce_file_path);

let state_update = read_state_update_from_file(state_update_file_path).expect("issue while reading");
let blob_data = state_update_to_blob_data(block_no, state_update, &config)
.await
.expect("issue while converting state update to blob data");

let blob_data_biguint = convert_to_biguint(blob_data);

let block_data_state_diffs = serde::parse_state_diffs(blob_data_biguint.as_slice());

let original_blob_data = serde::parse_file_to_blob_data(file_path);
// converting the data to it's original format
let recovered_blob_data = blob::recover(original_blob_data.clone());
let blob_data_state_diffs = serde::parse_state_diffs(recovered_blob_data.as_slice());

assert!(block_data_state_diffs.unordered_eq(&blob_data_state_diffs), "value of data json should be identical");
}

#[rstest]
#[case("src/jobs/da_job/test_data/test_blob_631861.txt")]
#[case("src/jobs/da_job/test_data/test_blob_638353.txt")]
#[case("src/jobs/da_job/test_data/test_blob_639404.txt")]
#[case("src/jobs/da_job/test_data/test_blob_640641.txt")]
#[case("src/jobs/da_job/test_data/test_blob_640644.txt")]
#[case("src/jobs/da_job/test_data/test_blob_640646.txt")]
#[case("src/jobs/da_job/test_data/test_blob_640647.txt")]
fn test_fft_transformation(#[case] file_to_check: &str) {
// parsing the blob hex to the bigUints
let original_blob_data = serde::parse_file_to_blob_data(file_to_check);
// converting the data to it's original format
let ifft_blob_data = blob::recover(original_blob_data.clone());
// applying the fft function again on the original format
let fft_blob_data = fft_transformation(ifft_blob_data);

// ideally the data after fft transformation and the data before ifft should be same.
assert_eq!(fft_blob_data, original_blob_data);
}

pub fn read_state_update_from_file(file_path: &str) -> Result<StateUpdate> {
// let file_path = format!("state_update_block_no_{}.txt", block_no);
let mut file = File::open(file_path)?;
let mut json = String::new();
file.read_to_string(&mut json)?;
let state_update: StateUpdate = serde_json::from_str(&json)?;
Ok(state_update)
}

#[derive(Serialize, Deserialize, Debug)]
struct NonceAddress {
nonce: String,
address: String,
}

pub fn get_nonce_attached(server: &MockServer, file_path: &str) {
// Read the file
let file_content = fs::read_to_string(file_path).expect("Unable to read file");

// Parse the JSON content into a vector of NonceAddress
let nonce_addresses: Vec<NonceAddress> =
serde_json::from_str(&file_content).expect("JSON was not well-formatted");

// Set up mocks for each entry
for entry in nonce_addresses {
let address = entry.address.clone();
let nonce = entry.nonce.clone();
let response = json!({ "id": 1,"jsonrpc":"2.0","result": nonce });
let field_element =
FieldElement::from_dec_str(&address).expect("issue while converting the hex to field").to_bytes_be();
let hex_field_element = vec_u8_to_hex_string(&field_element);

server.mock(|when, then| {
when.path("/").body_contains("starknet_getNonce").body_contains(hex_field_element);
then.status(200).body(serde_json::to_vec(&response).unwrap());
});
}
}

fn vec_u8_to_hex_string(data: &[u8]) -> String {
let hex_chars: Vec<String> = data.iter().map(|byte| format!("{:02x}", byte)).collect();

let mut new_hex_chars = hex_chars.join("");
new_hex_chars = new_hex_chars.trim_start_matches('0').to_string();

// Handle the case where the trimmed string is empty (e.g., data was all zeros)
if new_hex_chars.is_empty() {
"0x0".to_string()
} else {
format!("0x{}", new_hex_chars)
}
}
}
Loading

0 comments on commit bf2540a

Please sign in to comment.