Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[paused] feat: snos job #50

Closed
wants to merge 25 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
412aeec
feat(snos_job): Snos job draft - overall first structure
akhercha Jul 20, 2024
f41eeb9
Merge branch 'main' into feat/snos_job
akhercha Jul 20, 2024
7d898cd
feat(snos_job): Dummy code compiling 👍
akhercha Jul 20, 2024
3676b1b
feat(snos_job): Added module comment for dummy_state
akhercha Jul 22, 2024
b2ede88
feat(snos_job): Filled snos config from snos inpuyt
akhercha Jul 22, 2024
82b04d4
feat(snos_job): get_block_number as its function
akhercha Jul 22, 2024
2f1dab2
feat(snos_job): hotfix - get block nbr returns Result
akhercha Jul 22, 2024
799ce7e
Merge branch 'main' into feat/snos_job
akhercha Jul 22, 2024
b50c5df
feat(snos_job): Updated CHANGELOG.md
akhercha Jul 22, 2024
0a99bdb
feat(snos_job): Block number fetch + Dummy test
akhercha Jul 22, 2024
e9c3df5
feat(snos_job): Dummy RPC request code
akhercha Jul 22, 2024
2eab368
feat(snos_job): RPC request with json! macro + job config
akhercha Jul 22, 2024
9ee833e
feat(snos_job): Added http_client
akhercha Jul 22, 2024
b9e4569
feat(snos_job): Documentation + rename
akhercha Jul 22, 2024
9705039
feat(snos_job): RPC requests
akhercha Jul 22, 2024
e8ababa
feat(snos_job): Sync with main (merge)
akhercha Jul 22, 2024
a57698e
feat(snos_job): Sync with main
akhercha Jul 22, 2024
2f3b1d8
feat(snos_job): More concise conversions test
akhercha Jul 22, 2024
708991d
feat(snos_job): Storing outputs to S3
akhercha Jul 22, 2024
8919ed0
feat(snos_job): Quick renaming
akhercha Jul 22, 2024
5ade4f7
feat(snos_job): Organization update
akhercha Jul 22, 2024
8d2a124
feat(snos_job): Filled internal_id
akhercha Jul 22, 2024
98b3d51
feat(snos_job): Just naming update
akhercha Jul 22, 2024
4e82e2f
feat(snos_job): Updated TODOs & notes
akhercha Jul 23, 2024
df2f0d6
feat(snos_job): Comments + Naming'
akhercha Jul 23, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).

## Added

- `SnosJob` implementation
- Function to calculate the kzg proof of x_0.
- Tests for updating the state.
- Function to update the state and publish blob on ethereum in state update job.
Expand Down
12 changes: 8 additions & 4 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

18 changes: 12 additions & 6 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,10 @@ color-eyre = "0.6.2"
c-kzg = "1.0.0"
dotenvy = "0.15.7"
futures = "0.3.30"
indexmap = "2.1.0"
mongodb = { version = "2.8.1" }
omniqueue = { version = "0.2.0" }
reqwest = { version = "0.11.24" }
reqwest = { version = "0.12", features = ["json"] }
rstest = "0.18.2"
serde = { version = "1.0.197" }
serde_json = "1.0.114"
Expand All @@ -59,17 +60,22 @@ itertools = "0.13.0"
mockall = "0.12.1"
testcontainers = "0.18.0"

# Bmlockifier, using the same version than Madara
blockifier = { git = "https://github.com/Moonsong-Labs/blockifier", branch = "msl/derive-clone", features = [
"testing",
] }

# Cairo VM
cairo-vm = { git = "https://github.com/lambdaclass/cairo-vm", features = [
"extensive_hints",
"cairo-1-hints",
] }

# TODO: we currently use the Moonsong fork & the os-output-serde branch so we
# can deserialize our snos input json into a StarknetOsInput struct.
# TODO: update back to the main repo once it's merged
# Sharp (Starkware)
snos = { git = "https://github.com/keep-starknet-strange/snos" }
# Snos
snos = { git = "https://github.com/keep-starknet-strange/snos", branch = "main" }

# Starknet API
starknet_api = { version = "=0.10", features = ["testing"] }

# Madara prover API
madara-prover-common = { git = "https://github.com/Moonsong-Labs/madara-prover-api", branch = "od/use-latest-cairo-vm" }
Expand Down
4 changes: 4 additions & 0 deletions crates/orchestrator/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ aws-sdk-s3 = { version = "1.38.0", features = ["behavior-version-latest"] }
axum = { workspace = true, features = ["macros"] }
axum-macros = { workspace = true }
bincode = { workspace = true }
blockifier = { workspace = true }
bytes = "1.6.0"
c-kzg = { workspace = true }
cairo-vm = { workspace = true }
Expand All @@ -31,6 +32,7 @@ ethereum-da-client = { workspace = true, optional = true }
ethereum-settlement-client = { workspace = true }
futures = { workspace = true }
hex = { workspace = true }
indexmap = { workspace = true }
lazy_static = { workspace = true }
log = "0.4.21"
majin-blob-core = { git = "https://github.com/AbdelStark/majin-blob", branch = "main" }
Expand All @@ -42,6 +44,7 @@ num-bigint = { workspace = true }
num-traits = { workspace = true }
omniqueue = { workspace = true, optional = true }
prover-client-interface = { workspace = true }
reqwest = { workspace = true }
rstest = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
Expand All @@ -51,6 +54,7 @@ snos = { workspace = true }
starknet = { workspace = true }
starknet-core = "0.9.0"
starknet-settlement-client = { workspace = true }
starknet_api = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["sync", "macros", "rt-multi-thread"] }
tracing = { workspace = true }
Expand Down
30 changes: 25 additions & 5 deletions crates/orchestrator/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,15 @@ use crate::database::mongodb::MongoDb;
use crate::database::{Database, DatabaseConfig};
use crate::queue::sqs::SqsQueue;
use crate::queue::QueueProvider;
use crate::rpc::{config::HttpRpcConfig, HttpRpcClient};

/// The app config. It can be accessed from anywhere inside the service
/// by calling `config` function.
pub struct Config {
/// The starknet client to get data from the node
starknet_client: Arc<JsonRpcClient<HttpTransport>>,
/// An HTTP client used for RPC requests to L1/L2 nodes
http_rpc_client: Arc<HttpRpcClient>,
/// The DA client to interact with the DA layer
da_client: Box<dyn DaClient>,
/// The service that produces proof and registers it onchain
Expand All @@ -52,6 +55,8 @@ pub async fn init_config() -> Config {
let provider = JsonRpcClient::new(HttpTransport::new(
Url::parse(get_env_var_or_panic("MADARA_RPC_URL").as_str()).expect("Failed to parse URL"),
));
// init http client for rpc requests
let http_rpc_client = HttpRpcClient::new(HttpRpcConfig::new_from_env());

// init database
let database = Box::new(MongoDb::new(MongoDbConfig::new_from_env()).await);
Expand All @@ -64,24 +69,34 @@ pub async fn init_config() -> Config {
let settings_provider = DefaultSettingsProvider {};
let settlement_client = build_settlement_client(&settings_provider).await;
let prover_client = build_prover_service(&settings_provider);

let storage_client = build_storage_client().await;

Config::new(Arc::new(provider), da_client, prover_client, settlement_client, database, queue, storage_client)
let storage = build_storage_client().await;

Config::new(
Arc::new(provider),
Arc::new(http_rpc_client),
da_client,
prover_client,
settlement_client,
database,
queue,
storage,
)
}

impl Config {
/// Create a new config
#[allow(clippy::too_many_arguments)]
pub fn new(
starknet_client: Arc<JsonRpcClient<HttpTransport>>,
http_rpc_client: Arc<HttpRpcClient>,
da_client: Box<dyn DaClient>,
prover_client: Box<dyn ProverClient>,
settlement_client: Box<dyn SettlementClient>,
database: Box<dyn Database>,
queue: Box<dyn QueueProvider>,
storage: Box<dyn DataStorage>,
) -> Self {
Self { starknet_client, da_client, prover_client, settlement_client, database, queue, storage }
Self { starknet_client, http_rpc_client, da_client, prover_client, settlement_client, database, queue, storage }
}

/// Returns the starknet client
Expand All @@ -104,6 +119,11 @@ impl Config {
self.settlement_client.as_ref()
}

/// Returns the http client
pub fn http_rpc_client(&self) -> &Arc<HttpRpcClient> {
&self.http_rpc_client
}

/// Returns the database client
pub fn database(&self) -> &dyn Database {
self.database.as_ref()
Expand Down
1 change: 1 addition & 0 deletions crates/orchestrator/src/constants.rs
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
pub const BLOB_DATA_FILE_NAME: &str = "blob_data.txt";
pub const SNOS_OUTPUT_FILE_NAME: &str = "snos_output.json";
pub const CAIRO_PIE_FILE_NAME: &str = "cairo_pie.json";
1 change: 0 additions & 1 deletion crates/orchestrator/src/data_storage/aws_s3/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ pub struct AWSS3 {
impl AWSS3 {
/// Initializes a new AWS S3 client by passing the config
/// and returning it.
#[allow(dead_code)]
pub async fn new(config: AWSS3Config) -> Self {
// AWS cred building
let credentials = Credentials::new(
Expand Down
1 change: 1 addition & 0 deletions crates/orchestrator/src/data_storage/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ use mockall::automock;
/// the cloud provider storage.
/// The proposed storage format is :
/// ----<block_number>
/// ----<cairo_pie.json> (stored during the SNOS job)
/// ----<snos_output.json> (stored during the SNOS job)
/// ----<blob_data.txt> (stored during the DA job)
#[automock]
Expand Down
2 changes: 2 additions & 0 deletions crates/orchestrator/src/jobs/constants.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,5 @@ pub const JOB_METADATA_STATE_UPDATE_BLOCKS_TO_SETTLE_KEY: &str = "blocks_number_
pub const JOB_METADATA_STATE_UPDATE_FETCH_FROM_TESTS: &str = "fetch_from_test_data";
pub const JOB_METADATA_STATE_UPDATE_ATTEMPT_PREFIX: &str = "attempt_tx_hashes_";
pub const JOB_METADATA_STATE_UPDATE_LAST_FAILED_BLOCK_NO: &str = "last_failed_block_no";

pub const JOB_METADATA_SNOS_BLOCK: &str = "block_number_to_run";
86 changes: 86 additions & 0 deletions crates/orchestrator/src/jobs/snos_job/dummy_state.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
//! A Dummy state that does nothing.
//! It just implements the State and StateReader traits provided by Blockifier.
//!
//! This module needs to be deleted as soon as we can import the structure
//! [BlockifierStateAdapter] from Madara code (Currently, we have version
//! conflicts between snos <=> deoxys <=> cairo-vm)
//! OR
//! if it's not needed at all following the Snos code update. This update
//! will make the run of the OS easier to integrate with Madara, it may
//! not be needed to pass a State object.

use std::collections::HashSet;

use blockifier::execution::contract_class::{ContractClass, ContractClassV0};
use blockifier::state::cached_state::CommitmentStateDiff;
use blockifier::state::state_api::{State, StateReader, StateResult};
use indexmap::IndexMap;
use starknet_api::core::{ClassHash, CompiledClassHash, ContractAddress, Nonce};
use starknet_api::hash::StarkFelt;
use starknet_api::state::StorageKey;

pub struct DummyState;

impl StateReader for DummyState {
fn get_storage_at(&mut self, _contract_address: ContractAddress, _key: StorageKey) -> StateResult<StarkFelt> {
Ok(StarkFelt::ZERO)
}

fn get_nonce_at(&mut self, _contract_address: ContractAddress) -> StateResult<Nonce> {
Ok(Nonce::default())
}

fn get_class_hash_at(&mut self, _contract_address: ContractAddress) -> StateResult<ClassHash> {
Ok(ClassHash::default())
}

fn get_compiled_contract_class(&mut self, _class_hash: ClassHash) -> StateResult<ContractClass> {
Ok(ContractClass::V0(ContractClassV0::default()))
}

fn get_compiled_class_hash(&mut self, _class_hash: ClassHash) -> StateResult<CompiledClassHash> {
Ok(CompiledClassHash::default())
}
}

impl State for DummyState {
fn set_storage_at(
&mut self,
_contract_address: ContractAddress,
_key: StorageKey,
_value: StarkFelt,
) -> StateResult<()> {
Ok(())
}

fn increment_nonce(&mut self, _contract_address: ContractAddress) -> StateResult<()> {
Ok(())
}

fn set_class_hash_at(&mut self, _contract_address: ContractAddress, _class_hash: ClassHash) -> StateResult<()> {
Ok(())
}

fn set_contract_class(&mut self, _class_hash: ClassHash, _contract_class: ContractClass) -> StateResult<()> {
Ok(())
}

fn set_compiled_class_hash(
&mut self,
_class_hash: ClassHash,
_compiled_class_hash: CompiledClassHash,
) -> StateResult<()> {
Ok(())
}

fn to_state_diff(&mut self) -> CommitmentStateDiff {
CommitmentStateDiff {
address_to_class_hash: IndexMap::default(),
address_to_nonce: IndexMap::default(),
storage_updates: IndexMap::default(),
class_hash_to_compiled_class_hash: IndexMap::default(),
}
}

fn add_visited_pcs(&mut self, _class_hash: ClassHash, _pcs: &HashSet<usize>) {}
}
Loading
Loading