diff --git a/Cargo.lock b/Cargo.lock index 3bce76653d4..03ffce831a8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2957,7 +2957,6 @@ dependencies = [ "error-stack", "eyre", "iroha", - "iroha_config_base", "iroha_primitives", "json5", "serde", @@ -2974,14 +2973,10 @@ dependencies = [ "clap", "colored", "eyre", - "iroha_crypto", "iroha_data_model", - "iroha_executor_data_model", "iroha_genesis", - "iroha_primitives", "iroha_schema", "iroha_schema_gen", - "iroha_version", "parity-scale-codec", "serde", "serde_json", diff --git a/Cargo.toml b/Cargo.toml index 6028d2e1b92..551fc1728f6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -67,7 +67,6 @@ parking_lot = { version = "0.12.3" } tempfile = "3.10.1" path-absolutize = "3.1.1" -pathdiff = "0.2.1" bytes = "1.6.1" vergen = { version = "8.3.1", default-features = false } @@ -84,7 +83,6 @@ prometheus = { version = "0.13.4", default-features = false } clap = "4.5.11" owo-colors = "4.1.0" supports-color = "2.1.0" -inquire = "0.6.2" spinoff = "0.8.0" criterion = "0.5.1" @@ -116,7 +114,6 @@ rustc-hash = "1.1.0" serde = { version = "1.0.204", default-features = false } serde_json = { version = "1.0.121", default-features = false } -serde_yaml = "0.9.34" serde_with = { version = "3.9.0", default-features = false } parity-scale-codec = { version = "3.6.12", default-features = false } json5 = "0.4.1" diff --git a/crates/iroha/tests/asset.rs b/crates/iroha/tests/asset.rs index faf5f71fcc0..47db4c2550c 100644 --- a/crates/iroha/tests/asset.rs +++ b/crates/iroha/tests/asset.rs @@ -3,7 +3,7 @@ use iroha::{ crypto::KeyPair, data_model::{ asset::{AssetId, AssetType, AssetValue}, - isi::error::{InstructionEvaluationError, InstructionExecutionError, Mismatch, TypeError}, + isi::error::{InstructionEvaluationError, InstructionExecutionError, TypeError}, prelude::*, transaction::error::TransactionRejectionReason, }, @@ -400,16 +400,21 @@ fn fail_if_dont_satisfy_spec() { .downcast_ref::() .unwrap_or_else(|| panic!("Error {err} is not TransactionRejectionReason")); + let TransactionRejectionReason::Validation(ValidationFail::InstructionFailed( + InstructionExecutionError::Evaluate(InstructionEvaluationError::Type( + TypeError::AssetType(rejection_reason), + )), + )) = rejection_reason + else { + panic!("Wrong rejection reason"); + }; + assert_eq!( + *rejection_reason.expected(), + AssetType::Numeric(NumericSpec::integer()), + ); assert_eq!( - rejection_reason, - &TransactionRejectionReason::Validation(ValidationFail::InstructionFailed( - InstructionExecutionError::Evaluate(InstructionEvaluationError::Type( - TypeError::from(Mismatch { - expected: AssetType::Numeric(NumericSpec::integer()), - actual: AssetType::Numeric(NumericSpec::fractional(2)) - }) - )) - )) + *rejection_reason.actual(), + AssetType::Numeric(NumericSpec::fractional(2)) ); } diff --git a/crates/iroha/tests/events/pipeline.rs b/crates/iroha/tests/events/pipeline.rs index f0c990ad440..7bb4e1bd97f 100644 --- a/crates/iroha/tests/events/pipeline.rs +++ b/crates/iroha/tests/events/pipeline.rs @@ -1,6 +1,5 @@ use std::time::Duration; -use assert_matches::assert_matches; use eyre::Result; use futures_util::StreamExt; use iroha::data_model::{ @@ -54,20 +53,20 @@ async fn test_with_instruction_and_status( // Then timeout(Duration::from_secs(5), async move { - assert_matches!( - events.next().await.unwrap().unwrap(), - EventBox::Pipeline(PipelineEventBox::Transaction(TransactionEvent { - status: TransactionStatus::Queued, - .. - })) - ); - assert_matches!( - events.next().await.unwrap().unwrap(), - EventBox::Pipeline(PipelineEventBox::Transaction(TransactionEvent { - status, - .. - })) if status == *should_be - ); + let EventBox::Pipeline(PipelineEventBox::Transaction(event)) = + events.next().await.unwrap().unwrap() + else { + panic!("Expected transaction event"); + }; + assert_eq!(*event.status(), TransactionStatus::Queued); + + let EventBox::Pipeline(PipelineEventBox::Transaction(event)) = + events.next().await.unwrap().unwrap() + else { + panic!("Expected transaction event"); + }; + + assert_eq!(event.status(), should_be); }) .await?; diff --git a/crates/iroha/tests/extra_functional/multiple_blocks_created.rs b/crates/iroha/tests/extra_functional/multiple_blocks_created.rs index 96e45f4185d..8dc7a00086c 100644 --- a/crates/iroha/tests/extra_functional/multiple_blocks_created.rs +++ b/crates/iroha/tests/extra_functional/multiple_blocks_created.rs @@ -26,7 +26,7 @@ async fn multiple_blocks_created() -> Result<()> { // Given let network = NetworkBuilder::new() .with_peers(4) - .with_genesis_instruction(SetParameter(Parameter::Block( + .with_genesis_instruction(SetParameter::new(Parameter::Block( BlockParameter::MaxTransactions(NonZero::new(N_MAX_TXS_PER_BLOCK).expect("valid")), ))) .with_pipeline_time(Duration::from_secs(1)) diff --git a/crates/iroha/tests/pagination.rs b/crates/iroha/tests/pagination.rs index b0be0fb713e..2c8b40405d9 100644 --- a/crates/iroha/tests/pagination.rs +++ b/crates/iroha/tests/pagination.rs @@ -15,10 +15,7 @@ fn limits_should_work() -> Result<()> { let vec = client .query(FindAssetsDefinitions::new()) - .with_pagination(Pagination { - limit: Some(nonzero!(7_u64)), - offset: 1, - }) + .with_pagination(Pagination::new(Some(nonzero!(7_u64)), 1)) .execute_all()?; assert_eq!(vec.len(), 7); Ok(()) @@ -33,10 +30,7 @@ fn reported_length_should_be_accurate() -> Result<()> { let mut iter = client .query(FindAssetsDefinitions::new()) - .with_pagination(Pagination { - limit: Some(nonzero!(7_u64)), - offset: 1, - }) + .with_pagination(Pagination::new(Some(nonzero!(7_u64)), 1)) .with_fetch_size(FetchSize::new(Some(nonzero!(3_u64)))) .execute()?; @@ -68,10 +62,7 @@ fn fetch_size_should_work() -> Result<()> { let query = QueryWithParams::new( QueryWithFilter::new(FindAssetsDefinitions::new(), CompoundPredicate::PASS).into(), QueryParams::new( - Pagination { - limit: Some(nonzero!(7_u64)), - offset: 1, - }, + Pagination::new(Some(nonzero!(7_u64)), 1), Sorting::default(), FetchSize::new(Some(nonzero!(3_u64))), ), diff --git a/crates/iroha/tests/queries/asset.rs b/crates/iroha/tests/queries/asset.rs index a249df880cb..b02761a6221 100644 --- a/crates/iroha/tests/queries/asset.rs +++ b/crates/iroha/tests/queries/asset.rs @@ -78,7 +78,7 @@ fn find_asset_total_quantity() -> Result<()> { .query(FindAssetsDefinitions::new()) .filter_with(|asset_definition| asset_definition.id.eq(definition_id.clone())) .execute_single()? - .total_quantity) + .total_quantity()) }; // Assert that initial total quantity before any registrations and unregistrations is zero @@ -154,7 +154,7 @@ where .query(FindAssetsDefinitions::new()) .filter_with(|asset_definition| asset_definition.id.eq(definition_id.clone())) .execute_single()? - .total_quantity) + .total_quantity()) }; // Assert that initial total quantity before any burns and mints is zero diff --git a/crates/iroha/tests/set_parameter.rs b/crates/iroha/tests/set_parameter.rs index f06fc3c704a..6d38ffee885 100644 --- a/crates/iroha/tests/set_parameter.rs +++ b/crates/iroha/tests/set_parameter.rs @@ -9,22 +9,22 @@ use nonzero_ext::nonzero; #[test] fn can_change_parameter_value() -> Result<()> { let (network, _rt) = NetworkBuilder::new() - .with_genesis_instruction(SetParameter(Parameter::Block( + .with_genesis_instruction(SetParameter::new(Parameter::Block( BlockParameter::MaxTransactions(nonzero!(16u64)), ))) .start_blocking()?; let test_client = network.client(); let old_params: Parameters = test_client.query_single(FindParameters::new())?; - assert_eq!(old_params.block.max_transactions, nonzero!(16u64)); + assert_eq!(old_params.block().max_transactions(), nonzero!(16u64)); let new_value = nonzero!(32u64); - test_client.submit_blocking(SetParameter(Parameter::Block( + test_client.submit_blocking(SetParameter::new(Parameter::Block( BlockParameter::MaxTransactions(new_value), )))?; let params = test_client.query_single(FindParameters::new())?; - assert_eq!(params.block.max_transactions, new_value); + assert_eq!(params.block().max_transactions(), new_value); Ok(()) } diff --git a/crates/iroha/tests/sorting.rs b/crates/iroha/tests/sorting.rs index 9921a42a218..a5983b99dc1 100644 --- a/crates/iroha/tests/sorting.rs +++ b/crates/iroha/tests/sorting.rs @@ -22,10 +22,7 @@ fn correct_pagination_assets_after_creating_new_one() { const N_ASSETS: usize = 12; // 0 < pagination.start < missing_idx < pagination.end < N_ASSETS let missing_indices = vec![N_ASSETS / 2]; - let pagination = Pagination { - limit: Some(nonzero!(N_ASSETS as u64 / 3)), - offset: N_ASSETS as u64 / 3, - }; + let pagination = Pagination::new(Some(nonzero!(N_ASSETS as u64 / 3)), N_ASSETS as u64 / 3); let xor_filter = AssetPredicateBox::build(|asset| asset.id.definition_id.name.starts_with("xor")); diff --git a/crates/iroha/tests/triggers/by_call_trigger.rs b/crates/iroha/tests/triggers/by_call_trigger.rs index 8c537be33ae..81dd9adcfee 100644 --- a/crates/iroha/tests/triggers/by_call_trigger.rs +++ b/crates/iroha/tests/triggers/by_call_trigger.rs @@ -341,7 +341,7 @@ fn only_account_with_permission_can_register_trigger() -> Result<()> { .filter_with(|trigger| trigger.id.eq(trigger_id.clone())) .execute_single()?; - assert_eq!(found_trigger.id, trigger_id); + assert_eq!(*found_trigger.id(), trigger_id); Ok(()) } @@ -374,17 +374,17 @@ fn unregister_trigger() -> Result<()> { .query(FindTriggers::new()) .filter_with(|trigger| trigger.id.eq(trigger_id.clone())) .execute_single()?; - let found_action = found_trigger.action; - let Executable::Instructions(found_instructions) = found_action.executable else { + let found_action = found_trigger.action(); + let Executable::Instructions(found_instructions) = found_action.executable() else { panic!("Expected instructions"); }; let found_trigger = Trigger::new( - found_trigger.id, + found_trigger.id().clone(), Action::new( - Executable::Instructions(found_instructions), - found_action.repeats, - found_action.authority, - found_action.filter, + Executable::Instructions(found_instructions.to_owned()), + found_action.repeats(), + found_action.authority().clone(), + found_action.filter().clone(), ), ); assert_eq!(found_trigger, trigger); diff --git a/crates/iroha/tests/triggers/orphans.rs b/crates/iroha/tests/triggers/orphans.rs index 725007dbc64..8f8e018391c 100644 --- a/crates/iroha/tests/triggers/orphans.rs +++ b/crates/iroha/tests/triggers/orphans.rs @@ -5,13 +5,12 @@ use iroha::{ use iroha_test_network::*; use iroha_test_samples::gen_account_in; -fn find_trigger(iroha: &Client, trigger_id: &TriggerId) -> Option { +fn find_trigger(iroha: &Client, trigger_id: &TriggerId) -> Option { iroha .query(FindTriggers::new()) .filter_with(|trigger| trigger.id.eq(trigger_id.clone())) .execute_single() .ok() - .map(|trigger| trigger.id) } fn set_up_trigger(iroha: &Client) -> eyre::Result<(DomainId, AccountId, TriggerId)> { @@ -45,9 +44,10 @@ fn trigger_must_be_removed_on_action_authority_account_removal() -> eyre::Result let (network, _rt) = NetworkBuilder::new().start_blocking()?; let iroha = network.client(); let (_, the_one_who_fails, fail_on_account_events) = set_up_trigger(&iroha)?; + let trigger = find_trigger(&iroha, &fail_on_account_events); assert_eq!( - find_trigger(&iroha, &fail_on_account_events), - Some(fail_on_account_events.clone()) + trigger.as_ref().map(Identifiable::id), + Some(&fail_on_account_events.clone()) ); iroha.submit_blocking(Unregister::account(the_one_who_fails.clone()))?; assert_eq!(find_trigger(&iroha, &fail_on_account_events), None); @@ -59,9 +59,10 @@ fn trigger_must_be_removed_on_action_authority_domain_removal() -> eyre::Result< let (network, _rt) = NetworkBuilder::new().start_blocking()?; let iroha = network.client(); let (failand, _, fail_on_account_events) = set_up_trigger(&iroha)?; + let trigger = find_trigger(&iroha, &fail_on_account_events); assert_eq!( - find_trigger(&iroha, &fail_on_account_events), - Some(fail_on_account_events.clone()) + trigger.as_ref().map(Identifiable::id), + Some(&fail_on_account_events.clone()) ); iroha.submit_blocking(Unregister::domain(failand.clone()))?; assert_eq!(find_trigger(&iroha, &fail_on_account_events), None); diff --git a/crates/iroha/tests/triggers/time_trigger.rs b/crates/iroha/tests/triggers/time_trigger.rs index 2d09fe72e24..03c7418b740 100644 --- a/crates/iroha/tests/triggers/time_trigger.rs +++ b/crates/iroha/tests/triggers/time_trigger.rs @@ -40,9 +40,7 @@ fn mint_asset_after_3_sec() -> Result<()> { let account_id = ALICE_ID.clone(); let asset_id = AssetId::new(asset_definition_id.clone(), account_id.clone()); - let init_quantity = test_client.query_single(FindAssetQuantityById { - id: asset_id.clone(), - })?; + let init_quantity = test_client.query_single(FindAssetQuantityById::new(asset_id.clone()))?; let start_time = curr_time(); assert!( @@ -64,18 +62,16 @@ fn mint_asset_after_3_sec() -> Result<()> { // Schedule start is in the future so trigger isn't executed after creating a new block test_client.submit_blocking(Log::new(Level::DEBUG, "Just to create block".to_string()))?; - let after_registration_quantity = test_client.query_single(FindAssetQuantityById { - id: asset_id.clone(), - })?; + let after_registration_quantity = + test_client.query_single(FindAssetQuantityById::new(asset_id.clone()))?; assert_eq!(init_quantity, after_registration_quantity); // Sleep long enough that trigger start is in the past std::thread::sleep(network.pipeline_time()); test_client.submit_blocking(Log::new(Level::DEBUG, "Just to create block".to_string()))?; - let after_wait_quantity = test_client.query_single(FindAssetQuantityById { - id: asset_id.clone(), - })?; + let after_wait_quantity = + test_client.query_single(FindAssetQuantityById::new(asset_id.clone()))?; // Schedule is in the past now so trigger is executed assert_eq!( init_quantity.checked_add(1u32.into()).unwrap(), @@ -168,7 +164,7 @@ fn mint_nft_for_every_user_every_1_sec() -> Result<()> { let start_time = curr_time() + offset; let schedule = TimeSchedule::starting_at(start_time).with_period(TRIGGER_PERIOD); - let filter = TimeEventFilter(ExecutionTime::Schedule(schedule)); + let filter = TimeEventFilter::new(ExecutionTime::Schedule(schedule)); let register_trigger = Register::trigger(Trigger::new( "mint_nft_for_all".parse()?, Action::new( diff --git a/crates/iroha/tests/tx_history.rs b/crates/iroha/tests/tx_history.rs index 2c002a5231c..265ff3fcc3a 100644 --- a/crates/iroha/tests/tx_history.rs +++ b/crates/iroha/tests/tx_history.rs @@ -41,10 +41,7 @@ fn client_has_rejected_and_accepted_txs_should_return_tx_history() -> Result<()> let transactions = client .query(FindTransactions::new()) .filter_with(|tx| tx.transaction.value.authority.eq(account_id.clone())) - .with_pagination(Pagination { - limit: Some(nonzero!(50_u64)), - offset: 1, - }) + .with_pagination(Pagination::new(Some(nonzero!(50_u64)), 1)) .execute_all()?; assert_eq!(transactions.len(), 50); diff --git a/crates/iroha/tests/upgrade.rs b/crates/iroha/tests/upgrade.rs index 4e456510dec..c6f2c04f995 100644 --- a/crates/iroha/tests/upgrade.rs +++ b/crates/iroha/tests/upgrade.rs @@ -146,10 +146,9 @@ fn executor_upgrade_should_revoke_removed_permissions() -> Result<()> { .query(FindRoles::new()) .execute_all()? .into_iter() - .find(|role| role.id == test_role_id) + .find(|role| *role.id() == test_role_id) .expect("Failed to find Role") - .permissions - .iter() + .permissions() .any(|permission| { CanUnregisterDomain::try_from(permission) .is_ok_and(|permission| permission == can_unregister_domain) @@ -179,10 +178,9 @@ fn executor_upgrade_should_revoke_removed_permissions() -> Result<()> { .query(FindRoles::new()) .execute_all()? .into_iter() - .find(|role| role.id == test_role_id) + .find(|role| *role.id() == test_role_id) .expect("Failed to find Role") - .permissions - .iter() + .permissions() .any(|permission| { CanUnregisterDomain::try_from(permission) .is_ok_and(|permission| permission == can_unregister_domain) @@ -343,11 +341,10 @@ fn migration_should_cause_upgrade_event() { .await .unwrap(); while let Some(event) = stream.try_next().await.unwrap() { - if let EventBox::Data(DataEvent::Executor(ExecutorEvent::Upgraded(ExecutorUpgrade { - new_data_model, - }))) = event + if let EventBox::Data(DataEvent::Executor(ExecutorEvent::Upgraded(executor_upgrade))) = + event { - assert!(!new_data_model.permissions.is_empty()); + assert!(!executor_upgrade.new_data_model().permissions().is_empty()); break; } } diff --git a/crates/iroha_cli/Cargo.toml b/crates/iroha_cli/Cargo.toml index 019db3bcd9b..bf4dbce600c 100644 --- a/crates/iroha_cli/Cargo.toml +++ b/crates/iroha_cli/Cargo.toml @@ -29,7 +29,6 @@ path = "src/main.rs" [dependencies] iroha = { workspace = true } iroha_primitives = { workspace = true } -iroha_config_base = { workspace = true } thiserror = { workspace = true } error-stack = { workspace = true, features = ["eyre"] } diff --git a/crates/iroha_cli/src/main.rs b/crates/iroha_cli/src/main.rs index 7da333bf27e..f0e59471057 100644 --- a/crates/iroha_cli/src/main.rs +++ b/crates/iroha_cli/src/main.rs @@ -17,8 +17,6 @@ use thiserror::Error; /// Re-usable clap `--metadata ` (`-m`) argument. /// Should be combined with `#[command(flatten)]` attr. #[derive(clap::Args, Debug, Clone)] -// FIXME: `pub` is needed because Rust complains about "leaking private types" -// when this type is used inside of modules. I don't know how to fix it. pub struct MetadataArgs { /// The JSON/JSON5 file with key-value metadata pairs #[arg(short, long, value_name("PATH"), value_hint(clap::ValueHint::FilePath))] diff --git a/crates/iroha_codec/Cargo.toml b/crates/iroha_codec/Cargo.toml index fad92c53da8..fd1bac9b464 100644 --- a/crates/iroha_codec/Cargo.toml +++ b/crates/iroha_codec/Cargo.toml @@ -11,13 +11,8 @@ license.workspace = true workspace = true [dependencies] -iroha_data_model = { workspace = true, features = ["http"] } -iroha_executor_data_model = { workspace = true } -iroha_primitives = { workspace = true } iroha_schema = { workspace = true } iroha_schema_gen = { workspace = true } -iroha_crypto = { workspace = true } -iroha_version = { workspace = true } iroha_genesis = { workspace = true } clap = { workspace = true, features = ["derive", "cargo", "env", "string"] } @@ -28,6 +23,9 @@ serde_json = { workspace = true, features = ["std"]} serde = { workspace = true } supports-color = { workspace = true } +[dev-dependencies] +iroha_data_model = { workspace = true } + [build-dependencies] iroha_data_model = { workspace = true } diff --git a/crates/iroha_data_model/src/asset.rs b/crates/iroha_data_model/src/asset.rs index f0edc5dc68e..89dd37fbebb 100644 --- a/crates/iroha_data_model/src/asset.rs +++ b/crates/iroha_data_model/src/asset.rs @@ -130,7 +130,7 @@ mod model { /// The total amount of this asset in existence. /// /// For numeric assets - it is the sum of all asset values. For store assets - it is the count of all assets. - #[getset(get = "pub")] + #[getset(get_copy = "pub")] pub total_quantity: Numeric, } diff --git a/crates/iroha_data_model/src/isi.rs b/crates/iroha_data_model/src/isi.rs index 4bf8ec295a3..340626d3493 100644 --- a/crates/iroha_data_model/src/isi.rs +++ b/crates/iroha_data_model/src/isi.rs @@ -1232,6 +1232,7 @@ pub mod error { #[model] mod model { + use getset::Getters; use serde::{Deserialize, Serialize}; use super::*; @@ -1469,6 +1470,7 @@ pub mod error { Eq, PartialOrd, Ord, + Getters, Deserialize, Serialize, Decode, @@ -1479,12 +1481,27 @@ pub mod error { #[ffi_type] pub struct RepetitionError { /// Instruction type + #[getset(get = "pub")] pub instruction: InstructionType, /// Id of the object being repeated pub id: IdBox, } } + impl Mismatch { + /// The value that is needed for normal execution + pub fn expected(&self) -> &T { + &self.expected + } + } + + impl Mismatch { + /// The value that caused the error + pub fn actual(&self) -> &T { + &self.actual + } + } + impl From for InstructionExecutionError { fn from(err: TypeError) -> Self { Self::Evaluate(InstructionEvaluationError::Type(err)) diff --git a/crates/iroha_data_model/src/trigger.rs b/crates/iroha_data_model/src/trigger.rs index 34b6825fc01..a14e7ae85f4 100644 --- a/crates/iroha_data_model/src/trigger.rs +++ b/crates/iroha_data_model/src/trigger.rs @@ -160,8 +160,8 @@ pub mod action { /// The repeating scheme of the action. It's kept as part of the /// action and not inside the [`Trigger`] type, so that further /// sanity checking can be done. - pub fn repeats(&self) -> &Repeats { - &self.repeats + pub fn repeats(&self) -> Repeats { + self.repeats } /// Account executing this action pub fn authority(&self) -> &AccountId { diff --git a/crates/iroha_data_model_derive/Cargo.toml b/crates/iroha_data_model_derive/Cargo.toml index be1b79f628e..5b51fd9a2bc 100644 --- a/crates/iroha_data_model_derive/Cargo.toml +++ b/crates/iroha_data_model_derive/Cargo.toml @@ -22,7 +22,7 @@ manyhow = { workspace = true } iroha_macro_utils = { workspace = true } [dev-dependencies] -iroha_data_model = { workspace = true, features = ["http"] } +iroha_data_model = { workspace = true } iroha_schema = { workspace = true } parity-scale-codec = { workspace = true } derive_more = { workspace = true } diff --git a/crates/iroha_genesis/Cargo.toml b/crates/iroha_genesis/Cargo.toml index 3167e2d7975..ef04688fe56 100644 --- a/crates/iroha_genesis/Cargo.toml +++ b/crates/iroha_genesis/Cargo.toml @@ -13,7 +13,7 @@ workspace = true [dependencies] iroha_crypto = { workspace = true } iroha_schema = { workspace = true } -iroha_data_model = { workspace = true, features = ["http"] } +iroha_data_model = { workspace = true, features = ["std"] } derive_more = { workspace = true, features = ["deref"] } serde = { workspace = true, features = ["derive"] } diff --git a/crates/iroha_swarm/Cargo.toml b/crates/iroha_swarm/Cargo.toml index 92f47d97e0b..8831c10bd73 100644 --- a/crates/iroha_swarm/Cargo.toml +++ b/crates/iroha_swarm/Cargo.toml @@ -15,14 +15,14 @@ iroha_data_model.workspace = true iroha_primitives.workspace = true color-eyre.workspace = true path-absolutize.workspace = true -pathdiff.workspace = true owo-colors = { workspace = true, features = ["supports-colors"] } serde = { workspace = true, features = ["derive"] } clap = { workspace = true, features = ["derive"] } -serde_yaml.workspace = true serde_with = { workspace = true, features = ["json", "macros", "hex"] } -inquire.workspace = true displaydoc.workspace = true +serde_yaml = "0.9.34" +pathdiff = "0.2.1" +inquire = "0.6.2" [dev-dependencies] serde_json.workspace = true diff --git a/crates/iroha_test_network/src/lib.rs b/crates/iroha_test_network/src/lib.rs index 01276976b44..a03e70561b9 100644 --- a/crates/iroha_test_network/src/lib.rs +++ b/crates/iroha_test_network/src/lib.rs @@ -327,10 +327,10 @@ impl NetworkBuilder { block_time = duration / 3; commit_time = duration / 2; extra_isi.extend([ - InstructionBox::SetParameter(SetParameter(Parameter::Sumeragi( + InstructionBox::SetParameter(SetParameter::new(Parameter::Sumeragi( SumeragiParameter::BlockTimeMs(block_time.as_millis() as u64), ))), - InstructionBox::SetParameter(SetParameter(Parameter::Sumeragi( + InstructionBox::SetParameter(SetParameter::new(Parameter::Sumeragi( SumeragiParameter::CommitTimeMs(commit_time.as_millis() as u64), ))), ]); @@ -341,10 +341,10 @@ impl NetworkBuilder { let genesis = config::genesis( [ - InstructionBox::SetParameter(SetParameter(Parameter::Sumeragi( + InstructionBox::SetParameter(SetParameter::new(Parameter::Sumeragi( SumeragiParameter::BlockTimeMs(block_time.as_millis() as u64), ))), - InstructionBox::SetParameter(SetParameter(Parameter::Sumeragi( + InstructionBox::SetParameter(SetParameter::new(Parameter::Sumeragi( SumeragiParameter::CommitTimeMs(commit_time.as_millis() as u64), ))), ] diff --git a/crates/irohad/Cargo.toml b/crates/irohad/Cargo.toml index 9e76ed838d7..ee75417b35d 100644 --- a/crates/irohad/Cargo.toml +++ b/crates/irohad/Cargo.toml @@ -40,7 +40,7 @@ maintenance = { status = "actively-developed" } iroha_core = { workspace = true } iroha_logger = { workspace = true } iroha_futures = { workspace = true } -iroha_data_model = { workspace = true, features = ["http"] } +iroha_data_model = { workspace = true } iroha_primitives = { workspace = true } iroha_telemetry = { workspace = true, optional = true } iroha_config = { workspace = true } diff --git a/wasm_samples/executor_custom_instructions_complex/src/lib.rs b/wasm_samples/executor_custom_instructions_complex/src/lib.rs index 58dc40b82ef..549c7e3c162 100644 --- a/wasm_samples/executor_custom_instructions_complex/src/lib.rs +++ b/wasm_samples/executor_custom_instructions_complex/src/lib.rs @@ -87,7 +87,7 @@ impl executor_custom_data_model::complex_isi::Context for Context<'_> { _ => unreachable!(), })?; - Ok(*asset_definition.total_quantity()) + Ok(asset_definition.total_quantity()) } };