diff --git a/pallets/common/src/benchmarking.rs b/pallets/common/src/benchmarking.rs index c7837be160..6694314f44 100644 --- a/pallets/common/src/benchmarking.rs +++ b/pallets/common/src/benchmarking.rs @@ -17,14 +17,13 @@ #![allow(missing_docs)] use sp_std::vec::Vec; -use crate::{Config, CollectionHandle, Pallet}; +use crate::{Config, CollectionHandle, Pallet, BenchmarkPropertyWriter}; use pallet_evm::account::CrossAccountId; use frame_benchmarking::{benchmarks, account}; use up_data_structs::{ CollectionMode, CreateCollectionData, CollectionId, Property, PropertyKey, PropertyValue, - CollectionPermissions, NestingPermissions, AccessMode, PropertiesPermissionMap, - MAX_COLLECTION_NAME_LENGTH, MAX_COLLECTION_DESCRIPTION_LENGTH, MAX_TOKEN_PREFIX_LENGTH, - MAX_PROPERTIES_PER_ITEM, + CollectionPermissions, NestingPermissions, AccessMode, MAX_COLLECTION_NAME_LENGTH, + MAX_COLLECTION_DESCRIPTION_LENGTH, MAX_TOKEN_PREFIX_LENGTH, MAX_PROPERTIES_PER_ITEM, }; use frame_support::{ traits::{Get, fungible::Balanced, Imbalance, tokens::Precision}, @@ -124,16 +123,6 @@ fn create_collection( ) } -pub fn load_is_admin_and_property_permissions( - collection: &CollectionHandle, - sender: &T::CrossAccountId, -) -> (bool, PropertiesPermissionMap) { - ( - collection.is_owner_or_admin(sender), - >::property_permissions(collection.id), - ) -} - /// Helper macros, which handles all benchmarking preparation in semi-declarative way /// /// `name` is a substrate account @@ -227,11 +216,11 @@ benchmarks! { }: {collection_handle.check_allowlist(&sender)?;} - init_token_properties_common { + property_writer_load_collection_info { bench_init!{ owner: sub; collection: collection(owner); sender: sub; sender: cross_from_sub(sender); }; - }: {load_is_admin_and_property_permissions(&collection, &sender);} + }: {>::load_collection_info(&&collection, &sender);} } diff --git a/pallets/common/src/lib.rs b/pallets/common/src/lib.rs index fbbd29048a..a3346ab9fe 100644 --- a/pallets/common/src/lib.rs +++ b/pallets/common/src/lib.rs @@ -870,7 +870,7 @@ pub mod pallet { } /// Value representation with delayed initialization time. -pub struct LazyValue T> { +pub struct LazyValue { value: Option, f: Option, } @@ -2372,160 +2372,45 @@ impl From for Error { } } -/// A marker structure that enables the writer implementation -/// to provide the interface to write properties to **newly created** tokens. -pub struct NewTokenPropertyWriter; - -/// A marker structure that enables the writer implementation -/// to provide the interface to write properties to **already existing** tokens. -pub struct ExistingTokenPropertyWriter; - /// The type-safe interface for writing properties (setting or deleting) to tokens. /// It has two distinct implementations for newly created tokens and existing ones. /// /// This type utilizes the lazy evaluation to avoid repeating the computation /// of several performance-heavy or PoV-heavy tasks, /// such as checking the indirect ownership or reading the token property permissions. -pub struct PropertyWriter< - 'a, - T, - Handle, - WriterVariant, - FIsAdmin, - FPropertyPermissions, - FCheckTokenExist, - FGetProperties, -> where - T: Config, - FIsAdmin: FnOnce() -> bool, - FPropertyPermissions: FnOnce() -> PropertiesPermissionMap, -{ +pub struct PropertyWriter<'a, WriterVariant, T, Handle, FIsAdmin, FPropertyPermissions> { collection: &'a Handle, - is_collection_admin: LazyValue, - property_permissions: LazyValue, - check_token_exist: FCheckTokenExist, - get_properties: FGetProperties, + collection_lazy_info: PropertyWriterLazyCollectionInfo, _phantom: PhantomData<(T, WriterVariant)>, } -impl<'a, T, Handle, FIsAdmin, FPropertyPermissions, FCheckTokenExist, FGetProperties> - PropertyWriter< - 'a, - T, - Handle, - NewTokenPropertyWriter, - FIsAdmin, - FPropertyPermissions, - FCheckTokenExist, - FGetProperties, - > where - T: Config, - Handle: CommonCollectionOperations + Deref>, - FIsAdmin: FnOnce() -> bool, - FPropertyPermissions: FnOnce() -> PropertiesPermissionMap, - FCheckTokenExist: Copy + FnOnce(TokenId) -> bool, - FGetProperties: Copy + FnOnce(TokenId) -> TokenProperties, -{ - /// A function to write properties to a **newly created** token. - pub fn write_token_properties( - &mut self, - mint_target_is_sender: bool, - token_id: TokenId, - properties_updates: impl Iterator, - log: evm_coder::ethereum::Log, - ) -> DispatchResult { - self.internal_write_token_properties( - token_id, - properties_updates.map(|p| (p.key, Some(p.value))), - |_| Ok(mint_target_is_sender), - log, - ) - } -} - -impl<'a, T, Handle, FIsAdmin, FPropertyPermissions, FCheckTokenExist, FGetProperties> - PropertyWriter< - 'a, - T, - Handle, - ExistingTokenPropertyWriter, - FIsAdmin, - FPropertyPermissions, - FCheckTokenExist, - FGetProperties, - > where - T: Config, - Handle: CommonCollectionOperations + Deref>, - FIsAdmin: FnOnce() -> bool, - FPropertyPermissions: FnOnce() -> PropertiesPermissionMap, - FCheckTokenExist: Copy + FnOnce(TokenId) -> bool, - FGetProperties: Copy + FnOnce(TokenId) -> TokenProperties, -{ - /// A function to write properties to an **already existing** token. - pub fn write_token_properties( - &mut self, - sender: &T::CrossAccountId, - token_id: TokenId, - properties_updates: impl Iterator)>, - nesting_budget: &dyn Budget, - log: evm_coder::ethereum::Log, - ) -> DispatchResult { - self.internal_write_token_properties( - token_id, - properties_updates, - |collection| collection.check_token_indirect_owner(token_id, sender, nesting_budget), - log, - ) - } -} - -impl< - 'a, - T, - Handle, - WriterVariant, - FIsAdmin, - FPropertyPermissions, - FCheckTokenExist, - FGetProperties, - > - PropertyWriter< - 'a, - T, - Handle, - WriterVariant, - FIsAdmin, - FPropertyPermissions, - FCheckTokenExist, - FGetProperties, - > where +impl<'a, T, Handle, WriterVariant, FIsAdmin, FPropertyPermissions> + PropertyWriter<'a, WriterVariant, T, Handle, FIsAdmin, FPropertyPermissions> +where T: Config, Handle: CommonCollectionOperations + Deref>, FIsAdmin: FnOnce() -> bool, FPropertyPermissions: FnOnce() -> PropertiesPermissionMap, - FCheckTokenExist: Copy + FnOnce(TokenId) -> bool, - FGetProperties: Copy + FnOnce(TokenId) -> TokenProperties, { - fn internal_write_token_properties( + fn internal_write_token_properties( &mut self, token_id: TokenId, + mut token_lazy_info: PropertyWriterLazyTokenInfo< + FCheckTokenExist, + FCheckTokenOwner, + FGetProperties, + >, properties_updates: impl Iterator)>, - check_token_owner: FCheckTokenOwner, log: evm_coder::ethereum::Log, ) -> DispatchResult where - FCheckTokenOwner: FnOnce(&Handle) -> Result, + FCheckTokenExist: FnOnce() -> bool, + FCheckTokenOwner: FnOnce() -> Result, + FGetProperties: FnOnce() -> TokenProperties, { - let get_properties = self.get_properties; - let mut stored_properties = LazyValue::new(move || get_properties(token_id)); - - let mut is_token_owner = LazyValue::new(|| check_token_owner(self.collection)); - - let check_token_exist = self.check_token_exist; - let mut is_token_exist = LazyValue::new(move || check_token_exist(token_id)); - for (key, value) in properties_updates { let permission = self + .collection_lazy_info .property_permissions .value() .get(&key) @@ -2534,7 +2419,11 @@ impl< match permission { PropertyPermission { mutable: false, .. } - if stored_properties.value().get(&key).is_some() => + if token_lazy_info + .stored_properties + .value() + .get(&key) + .is_some() => { return Err(>::NoPermission.into()); } @@ -2546,15 +2435,16 @@ impl< } => check_token_permissions::( collection_admin, token_owner, - &mut self.is_collection_admin, - &mut is_token_owner, - &mut is_token_exist, + &mut self.collection_lazy_info.is_collection_admin, + &mut token_lazy_info.is_token_owner, + &mut token_lazy_info.is_token_exist, )?, } match value { Some(value) => { - stored_properties + token_lazy_info + .stored_properties .value_mut() .try_set(key.clone(), value) .map_err(>::from)?; @@ -2566,7 +2456,8 @@ impl< )); } None => { - stored_properties + token_lazy_info + .stored_properties .value_mut() .remove(&key) .map_err(>::from)?; @@ -2580,142 +2471,330 @@ impl< } } - let properties_changed = stored_properties.has_value(); + let properties_changed = token_lazy_info.stored_properties.has_value(); if properties_changed { >::deposit_log(log); self.collection - .set_token_properties_raw(token_id, stored_properties.into_inner()); + .set_token_properties_raw(token_id, token_lazy_info.stored_properties.into_inner()); } Ok(()) } } -/// Create a [`PropertyWriter`] for newly created tokens. -pub fn property_writer_for_new_token<'a, T, Handle>( - collection: &'a Handle, - sender: &'a T::CrossAccountId, -) -> PropertyWriter< - 'a, - T, - Handle, - NewTokenPropertyWriter, - impl FnOnce() -> bool + 'a, - impl FnOnce() -> PropertiesPermissionMap + 'a, - impl Copy + FnOnce(TokenId) -> bool + 'a, - impl Copy + FnOnce(TokenId) -> TokenProperties + 'a, -> +/// A helper structure for the [`PropertyWriter`] that holds +/// the collection-related info. The info is loaded using lazy evaluation. +/// This info is common for any token for which we write properties. +pub struct PropertyWriterLazyCollectionInfo { + is_collection_admin: LazyValue, + property_permissions: LazyValue, +} + +/// A helper structure for the [`PropertyWriter`] that holds +/// the token-related info. The info is loaded using lazy evaluation. +pub struct PropertyWriterLazyTokenInfo { + is_token_exist: LazyValue, + is_token_owner: LazyValue, FCheckTokenOwner>, + stored_properties: LazyValue, +} + +impl + PropertyWriterLazyTokenInfo +where + FCheckTokenExist: FnOnce() -> bool, + FCheckTokenOwner: FnOnce() -> Result, + FGetProperties: FnOnce() -> TokenProperties, +{ + /// Create a lazy token info. + pub fn new( + check_token_exist: FCheckTokenExist, + check_token_owner: FCheckTokenOwner, + get_token_properties: FGetProperties, + ) -> Self { + Self { + is_token_exist: LazyValue::new(check_token_exist), + is_token_owner: LazyValue::new(check_token_owner), + stored_properties: LazyValue::new(get_token_properties), + } + } +} + +/// A marker structure that enables the writer implementation +/// to provide the interface to write properties to **newly created** tokens. +pub struct NewTokenPropertyWriter(PhantomData); +impl NewTokenPropertyWriter { + /// Creates a [`PropertyWriter`] for **newly created** tokens. + pub fn new<'a, Handle>( + collection: &'a Handle, + sender: &'a T::CrossAccountId, + ) -> PropertyWriter< + 'a, + Self, + T, + Handle, + impl FnOnce() -> bool + 'a, + impl FnOnce() -> PropertiesPermissionMap + 'a, + > + where + T: Config, + Handle: CommonCollectionOperations + Deref>, + { + PropertyWriter { + collection, + collection_lazy_info: PropertyWriterLazyCollectionInfo { + is_collection_admin: LazyValue::new(|| collection.is_owner_or_admin(sender)), + property_permissions: LazyValue::new(|| { + >::property_permissions(collection.id) + }), + }, + _phantom: PhantomData, + } + } +} + +impl<'a, T, Handle, FIsAdmin, FPropertyPermissions> + PropertyWriter<'a, NewTokenPropertyWriter, T, Handle, FIsAdmin, FPropertyPermissions> where T: Config, Handle: CommonCollectionOperations + Deref>, + FIsAdmin: FnOnce() -> bool, + FPropertyPermissions: FnOnce() -> PropertiesPermissionMap, { - PropertyWriter { - collection, - is_collection_admin: LazyValue::new(|| collection.is_owner_or_admin(sender)), - property_permissions: LazyValue::new(|| >::property_permissions(collection.id)), - check_token_exist: |token_id| { - debug_assert!(collection.token_exists(token_id)); + /// A function to write properties to a **newly created** token. + pub fn write_token_properties( + &mut self, + mint_target_is_sender: bool, + token_id: TokenId, + properties_updates: impl Iterator, + log: evm_coder::ethereum::Log, + ) -> DispatchResult { + let check_token_exist = || { + debug_assert!(self.collection.token_exists(token_id)); true - }, - get_properties: |token_id| { - debug_assert!(collection.get_token_properties_raw(token_id).is_none()); + }; + + let check_token_owner = || Ok(mint_target_is_sender); + + let get_token_properties = || { + debug_assert!(self.collection.get_token_properties_raw(token_id).is_none()); TokenProperties::new() - }, - _phantom: PhantomData, + }; + + self.internal_write_token_properties( + token_id, + PropertyWriterLazyTokenInfo::new( + check_token_exist, + check_token_owner, + get_token_properties, + ), + properties_updates.map(|p| (p.key, Some(p.value))), + log, + ) } } -#[cfg(feature = "runtime-benchmarks")] -/// Create a `PropertyWriter` with preloaded `is_collection_admin` and `property_permissions. -/// Also: -/// * it will return `true` for the token ownership check. -/// * it will return empty stored properties without reading them from the storage. -pub fn collection_info_loaded_property_writer( - collection: &Handle, - is_collection_admin: bool, - property_permissions: PropertiesPermissionMap, -) -> PropertyWriter< - T, - Handle, - NewTokenPropertyWriter, - impl FnOnce() -> bool, - impl FnOnce() -> PropertiesPermissionMap, - impl Copy + FnOnce(TokenId) -> bool, - impl Copy + FnOnce(TokenId) -> TokenProperties, -> +/// A marker structure that enables the writer implementation +/// to provide the interface to write properties to **already existing** tokens. +pub struct ExistingTokenPropertyWriter(PhantomData); +impl ExistingTokenPropertyWriter { + /// Creates a [`PropertyWriter`] for **already existing** tokens. + pub fn new<'a, Handle>( + collection: &'a Handle, + sender: &'a T::CrossAccountId, + ) -> PropertyWriter< + 'a, + Self, + T, + Handle, + impl FnOnce() -> bool + 'a, + impl FnOnce() -> PropertiesPermissionMap + 'a, + > + where + Handle: CommonCollectionOperations + Deref>, + { + PropertyWriter { + collection, + collection_lazy_info: PropertyWriterLazyCollectionInfo { + is_collection_admin: LazyValue::new(|| collection.is_owner_or_admin(sender)), + property_permissions: LazyValue::new(|| { + >::property_permissions(collection.id) + }), + }, + _phantom: PhantomData, + } + } +} + +impl<'a, T, Handle, FIsAdmin, FPropertyPermissions> + PropertyWriter<'a, ExistingTokenPropertyWriter, T, Handle, FIsAdmin, FPropertyPermissions> where T: Config, Handle: CommonCollectionOperations + Deref>, + FIsAdmin: FnOnce() -> bool, + FPropertyPermissions: FnOnce() -> PropertiesPermissionMap, { - PropertyWriter { - collection, - is_collection_admin: LazyValue::new(move || is_collection_admin), - property_permissions: LazyValue::new(move || property_permissions), - check_token_exist: |_token_id| true, - get_properties: |_token_id| TokenProperties::new(), - _phantom: PhantomData, + /// A function to write properties to an **already existing** token. + pub fn write_token_properties( + &mut self, + sender: &T::CrossAccountId, + token_id: TokenId, + properties_updates: impl Iterator)>, + nesting_budget: &dyn Budget, + log: evm_coder::ethereum::Log, + ) -> DispatchResult { + let check_token_exist = || self.collection.token_exists(token_id); + let check_token_owner = || { + self.collection + .check_token_indirect_owner(token_id, sender, nesting_budget) + }; + let get_token_properties = || { + self.collection + .get_token_properties_raw(token_id) + .unwrap_or_default() + }; + + self.internal_write_token_properties( + token_id, + PropertyWriterLazyTokenInfo::new( + check_token_exist, + check_token_owner, + get_token_properties, + ), + properties_updates, + log, + ) } } -/// Create a [`PropertyWriter`] for already existing tokens. -pub fn property_writer_for_existing_token<'a, T, Handle>( - collection: &'a Handle, - sender: &'a T::CrossAccountId, -) -> PropertyWriter< - 'a, - T, - Handle, - ExistingTokenPropertyWriter, - impl FnOnce() -> bool + 'a, - impl FnOnce() -> PropertiesPermissionMap + 'a, - impl Copy + FnOnce(TokenId) -> bool + 'a, - impl Copy + FnOnce(TokenId) -> TokenProperties + 'a, -> +/// A marker structure that enables the writer implementation +/// to benchmark the token properties writing. +#[cfg(feature = "runtime-benchmarks")] +pub struct BenchmarkPropertyWriter(PhantomData); + +#[cfg(feature = "runtime-benchmarks")] +impl BenchmarkPropertyWriter { + /// Creates a [`PropertyWriter`] for benchmarking tokens properties writing. + pub fn new<'a, Handle, FIsAdmin, FPropertyPermissions>( + collection: &Handle, + collection_lazy_info: PropertyWriterLazyCollectionInfo, + ) -> PropertyWriter + where + Handle: CommonCollectionOperations + Deref>, + FIsAdmin: FnOnce() -> bool, + FPropertyPermissions: FnOnce() -> PropertiesPermissionMap, + { + PropertyWriter { + collection, + collection_lazy_info, + _phantom: PhantomData, + } + } + + /// Load the [`PropertyWriterLazyCollectionInfo`] from the storage. + pub fn load_collection_info( + collection_handle: &Handle, + sender: &T::CrossAccountId, + ) -> PropertyWriterLazyCollectionInfo< + impl FnOnce() -> bool, + impl FnOnce() -> PropertiesPermissionMap, + > + where + Handle: Deref>, + { + let is_collection_admin = collection_handle.is_owner_or_admin(sender); + let property_permissions = >::property_permissions(collection_handle.id); + + PropertyWriterLazyCollectionInfo { + is_collection_admin: LazyValue::new(move || is_collection_admin), + property_permissions: LazyValue::new(move || property_permissions), + } + } + + /// Load the [`PropertyWriterLazyTokenInfo`] with token properties from the storage. + pub fn load_token_properties( + collection: &Handle, + token_id: TokenId, + ) -> PropertyWriterLazyTokenInfo< + impl FnOnce() -> bool, + impl FnOnce() -> Result, + impl FnOnce() -> TokenProperties, + > + where + Handle: CommonCollectionOperations, + { + let stored_properties = collection + .get_token_properties_raw(token_id) + .unwrap_or_default(); + + PropertyWriterLazyTokenInfo { + is_token_exist: LazyValue::new(|| true), + is_token_owner: LazyValue::new(|| Ok(true)), + stored_properties: LazyValue::new(move || stored_properties), + } + } +} + +#[cfg(feature = "runtime-benchmarks")] +impl<'a, T, Handle, FIsAdmin, FPropertyPermissions> + PropertyWriter<'a, BenchmarkPropertyWriter, T, Handle, FIsAdmin, FPropertyPermissions> where T: Config, Handle: CommonCollectionOperations + Deref>, + FIsAdmin: FnOnce() -> bool, + FPropertyPermissions: FnOnce() -> PropertiesPermissionMap, { - PropertyWriter { - collection, - is_collection_admin: LazyValue::new(|| collection.is_owner_or_admin(sender)), - property_permissions: LazyValue::new(|| >::property_permissions(collection.id)), - check_token_exist: |token_id| collection.token_exists(token_id), - get_properties: |token_id| { - collection - .get_token_properties_raw(token_id) - .unwrap_or_default() - }, - _phantom: PhantomData, + /// A function to benchmark the writing of token properties. + pub fn write_token_properties( + &mut self, + token_id: TokenId, + properties_updates: impl Iterator, + log: evm_coder::ethereum::Log, + ) -> DispatchResult { + let check_token_exist = || true; + let check_token_owner = || Ok(true); + let get_token_properties = || TokenProperties::new(); + + self.internal_write_token_properties( + token_id, + PropertyWriterLazyTokenInfo::new( + check_token_exist, + check_token_owner, + get_token_properties, + ), + properties_updates.map(|p| (p.key, Some(p.value))), + log, + ) } } -/// Computes the weight delta for newly created tokens with properties. +/// Computes the weight of writing properties to tokens. /// * `properties_nums` - The properties num of each created token. -/// * `init_token_properties` - The function to obtain the weight from a token's properties num. -pub fn init_token_properties_delta Weight>( +/// * `per_token_weight_weight` - The function to obtain the weight +/// of writing properties from a token's properties num. +pub fn write_token_properties_total_weight Weight>( properties_nums: impl Iterator, - init_token_properties: I, + per_token_weight: I, ) -> Weight { - let mut delta = properties_nums + let mut weight = properties_nums .filter_map(|properties_num| { if properties_num > 0 { - Some(init_token_properties(properties_num)) + Some(per_token_weight(properties_num)) } else { None } }) .fold(Weight::zero(), |a, b| a.saturating_add(b)); - // If at least once the `init_token_properties` was called, - // it means at least one newly created token has properties. - // Becuase of that, some common collection data also was loaded and we need to add this weight. - // However, these common data was loaded only once which is guaranteed by the `PropertyWriter`. - if !delta.is_zero() { - delta = delta.saturating_add(>::init_token_properties_common()) + if !weight.is_zero() { + // If we are here, it means the token properties were written at least once. + // Because of that, some common collection data was also loaded; we must add this weight. + // However, this common data was loaded only once, which is guaranteed by the `PropertyWriter`. + + weight = weight.saturating_add(>::property_writer_load_collection_info()); } - delta + weight } #[cfg(any(feature = "tests", test))] diff --git a/pallets/nonfungible/Cargo.toml b/pallets/nonfungible/Cargo.toml index d1311cd899..4bc861a80d 100644 --- a/pallets/nonfungible/Cargo.toml +++ b/pallets/nonfungible/Cargo.toml @@ -22,6 +22,7 @@ sp-runtime = { workspace = true } sp-std = { workspace = true } struct-versioning = { workspace = true } up-data-structs = { workspace = true } +up-common = { workspace = true } [features] default = ["std"] @@ -30,6 +31,7 @@ runtime-benchmarks = [ 'frame-support/runtime-benchmarks', 'frame-system/runtime-benchmarks', 'up-data-structs/runtime-benchmarks', + 'up-common/std', ] std = [ "evm-coder/std", diff --git a/pallets/nonfungible/src/benchmarking.rs b/pallets/nonfungible/src/benchmarking.rs index 65367feead..83aa397c59 100644 --- a/pallets/nonfungible/src/benchmarking.rs +++ b/pallets/nonfungible/src/benchmarking.rs @@ -20,9 +20,7 @@ use crate::{Pallet, Config, NonfungibleHandle}; use frame_benchmarking::{benchmarks, account}; use pallet_common::{ bench_init, - benchmarking::{ - create_collection_raw, property_key, property_value, load_is_admin_and_property_permissions, - }, + benchmarking::{create_collection_raw, property_key, property_value}, CommonCollectionOperations, }; use sp_std::prelude::*; @@ -202,7 +200,21 @@ benchmarks! { let item = create_max_item(&collection, &owner, owner.clone())?; }: {>::set_token_properties(&collection, &owner, item, props.into_iter(), &Unlimited)?} - init_token_properties { + load_token_properties { + bench_init!{ + owner: sub; collection: collection(owner); + owner: cross_from_sub; + }; + + let item = create_max_item(&collection, &owner, owner.clone())?; + }: { + pallet_common::BenchmarkPropertyWriter::::load_token_properties( + &collection, + item, + ) + } + + write_token_properties { let b in 0..MAX_PROPERTIES_PER_ITEM; bench_init!{ owner: sub; collection: collection(owner); @@ -224,16 +236,14 @@ benchmarks! { }).collect::>(); let item = create_max_item(&collection, &owner, owner.clone())?; - let (is_collection_admin, property_permissions) = load_is_admin_and_property_permissions(&collection, &owner); - }: { - let mut property_writer = pallet_common::collection_info_loaded_property_writer( + let lazy_collection_info = pallet_common::BenchmarkPropertyWriter::::load_collection_info( &collection, - is_collection_admin, - property_permissions, + &owner, ); + }: { + let mut property_writer = pallet_common::BenchmarkPropertyWriter::new(&collection, lazy_collection_info); property_writer.write_token_properties( - true, item, props.into_iter(), crate::erc::ERC721TokenEvent::TokenChanged { diff --git a/pallets/nonfungible/src/common.rs b/pallets/nonfungible/src/common.rs index 4854cdf9bd..7893e77478 100644 --- a/pallets/nonfungible/src/common.rs +++ b/pallets/nonfungible/src/common.rs @@ -17,13 +17,15 @@ use core::marker::PhantomData; use frame_support::{dispatch::DispatchResultWithPostInfo, ensure, fail, weights::Weight}; +use up_common::constants::MAX_NESTING_BUDGET; use up_data_structs::{ TokenId, CreateItemExData, CollectionId, budget::Budget, Property, PropertyKey, PropertyKeyPermission, PropertyValue, TokenOwnerError, }; use pallet_common::{ CommonCollectionOperations, CommonWeightInfo, RefungibleExtensions, with_weight, - weights::WeightInfo as _, SelfWeightOf as PalletCommonWeightOf, init_token_properties_delta, + weights::WeightInfo as _, SelfWeightOf as PalletCommonWeightOf, + write_token_properties_total_weight, }; use pallet_structure::Pallet as PalletStructure; use sp_runtime::DispatchError; @@ -39,9 +41,9 @@ impl CommonWeightInfo for CommonWeights { fn create_multiple_items_ex(data: &CreateItemExData) -> Weight { match data { CreateItemExData::NFT(t) => >::create_multiple_items_ex(t.len() as u32) - .saturating_add(init_token_properties_delta::( + .saturating_add(write_token_properties_total_weight::( t.iter().map(|t| t.properties.len() as u32), - >::init_token_properties, + >::write_token_properties, )), _ => Weight::zero(), } @@ -49,12 +51,12 @@ impl CommonWeightInfo for CommonWeights { fn create_multiple_items(data: &[up_data_structs::CreateItemData]) -> Weight { >::create_multiple_items(data.len() as u32).saturating_add( - init_token_properties_delta::( + write_token_properties_total_weight::( data.iter().map(|t| match t { up_data_structs::CreateItemData::NFT(n) => n.properties.len() as u32, _ => 0, }), - >::init_token_properties, + >::write_token_properties, ), ) } @@ -72,11 +74,15 @@ impl CommonWeightInfo for CommonWeights { } fn set_token_properties(amount: u32) -> Weight { - >::set_token_properties(amount) + write_token_properties_total_weight::([amount].into_iter(), |amount| { + >::load_token_properties() + + >::find_parent_weight(MAX_NESTING_BUDGET) + + >::write_token_properties(amount) + }) } fn delete_token_properties(amount: u32) -> Weight { - >::delete_token_properties(amount) + Self::set_token_properties(amount) } fn set_token_property_permissions(amount: u32) -> Weight { diff --git a/pallets/nonfungible/src/lib.rs b/pallets/nonfungible/src/lib.rs index 02ad2165f0..6c3845a13d 100644 --- a/pallets/nonfungible/src/lib.rs +++ b/pallets/nonfungible/src/lib.rs @@ -569,7 +569,7 @@ impl Pallet { nesting_budget: &dyn Budget, ) -> DispatchResult { let mut property_writer = - pallet_common::property_writer_for_existing_token(collection, sender); + pallet_common::ExistingTokenPropertyWriter::new(collection, sender); property_writer.write_token_properties( sender, @@ -916,7 +916,7 @@ impl Pallet { // ========= - let mut property_writer = pallet_common::property_writer_for_new_token(collection, sender); + let mut property_writer = pallet_common::NewTokenPropertyWriter::new(collection, sender); with_transaction(|| { for (i, data) in data.iter().enumerate() { diff --git a/pallets/refungible/Cargo.toml b/pallets/refungible/Cargo.toml index 980c5384c3..d6446f3028 100644 --- a/pallets/refungible/Cargo.toml +++ b/pallets/refungible/Cargo.toml @@ -23,6 +23,7 @@ sp-runtime = { workspace = true } sp-std = { workspace = true } up-data-structs = { workspace = true } +up-common = { workspace = true } [features] default = ["std"] @@ -31,6 +32,7 @@ runtime-benchmarks = [ 'frame-support/runtime-benchmarks', 'frame-system/runtime-benchmarks', 'up-data-structs/runtime-benchmarks', + 'up-common/std', ] std = [ "evm-coder/std", diff --git a/pallets/refungible/src/benchmarking.rs b/pallets/refungible/src/benchmarking.rs index 630f4782cf..ae44572634 100644 --- a/pallets/refungible/src/benchmarking.rs +++ b/pallets/refungible/src/benchmarking.rs @@ -22,9 +22,7 @@ use core::iter::IntoIterator; use frame_benchmarking::{benchmarks, account}; use pallet_common::{ bench_init, - benchmarking::{ - create_collection_raw, property_key, property_value, load_is_admin_and_property_permissions, - }, + benchmarking::{create_collection_raw, property_key, property_value}, }; use sp_std::prelude::*; use up_data_structs::{ @@ -259,7 +257,21 @@ benchmarks! { let item = create_max_item(&collection, &owner, [(owner.clone(), 200)])?; }: {>::set_token_properties(&collection, &owner, item, props.into_iter(), &Unlimited)?} - init_token_properties { + load_token_properties { + bench_init!{ + owner: sub; collection: collection(owner); + owner: cross_from_sub; + }; + + let item = create_max_item(&collection, &owner, [(owner.clone(), 200)])?; + }: { + pallet_common::BenchmarkPropertyWriter::::load_token_properties( + &collection, + item, + ) + } + + write_token_properties { let b in 0..MAX_PROPERTIES_PER_ITEM; bench_init!{ owner: sub; collection: collection(owner); @@ -281,16 +293,14 @@ benchmarks! { }).collect::>(); let item = create_max_item(&collection, &owner, [(owner.clone(), 200)])?; - let (is_collection_admin, property_permissions) = load_is_admin_and_property_permissions(&collection, &owner); - }: { - let mut property_writer = pallet_common::collection_info_loaded_property_writer( + let lazy_collection_info = pallet_common::BenchmarkPropertyWriter::::load_collection_info( &collection, - is_collection_admin, - property_permissions, + &owner, ); + }: { + let mut property_writer = pallet_common::BenchmarkPropertyWriter::new(&collection, lazy_collection_info); property_writer.write_token_properties( - true, item, props.into_iter(), crate::erc::ERC721TokenEvent::TokenChanged { diff --git a/pallets/refungible/src/common.rs b/pallets/refungible/src/common.rs index 0d64f8deb8..8803ae0db5 100644 --- a/pallets/refungible/src/common.rs +++ b/pallets/refungible/src/common.rs @@ -18,6 +18,7 @@ use core::marker::PhantomData; use sp_std::collections::btree_map::BTreeMap; use frame_support::{dispatch::DispatchResultWithPostInfo, ensure, fail, weights::Weight, traits::Get}; +use up_common::constants::MAX_NESTING_BUDGET; use up_data_structs::{ CollectionId, TokenId, CreateItemExData, budget::Budget, Property, PropertyKey, PropertyValue, PropertyKeyPermission, CreateRefungibleExMultipleOwners, CreateRefungibleExSingleOwner, @@ -25,7 +26,7 @@ use up_data_structs::{ }; use pallet_common::{ CommonCollectionOperations, CommonWeightInfo, RefungibleExtensions, with_weight, - weights::WeightInfo as _, init_token_properties_delta, + weights::WeightInfo as _, write_token_properties_total_weight, }; use pallet_structure::{Pallet as PalletStructure, Error as StructureError}; use sp_runtime::{DispatchError}; @@ -49,14 +50,14 @@ pub struct CommonWeights(PhantomData); impl CommonWeightInfo for CommonWeights { fn create_multiple_items(data: &[up_data_structs::CreateItemData]) -> Weight { >::create_multiple_items(data.len() as u32).saturating_add( - init_token_properties_delta::( + write_token_properties_total_weight::( data.iter().map(|data| match data { up_data_structs::CreateItemData::ReFungible(rft_data) => { rft_data.properties.len() as u32 } _ => 0, }), - >::init_token_properties, + >::write_token_properties, ), ) } @@ -65,16 +66,16 @@ impl CommonWeightInfo for CommonWeights { match call { CreateItemExData::RefungibleMultipleOwners(i) => { >::create_multiple_items_ex_multiple_owners(i.users.len() as u32) - .saturating_add(init_token_properties_delta::( + .saturating_add(write_token_properties_total_weight::( [i.properties.len() as u32].into_iter(), - >::init_token_properties, + >::write_token_properties, )) } CreateItemExData::RefungibleMultipleItems(i) => { >::create_multiple_items_ex_multiple_items(i.len() as u32) - .saturating_add(init_token_properties_delta::( + .saturating_add(write_token_properties_total_weight::( i.iter().map(|d| d.properties.len() as u32), - >::init_token_properties, + >::write_token_properties, )) } _ => Weight::zero(), @@ -94,11 +95,15 @@ impl CommonWeightInfo for CommonWeights { } fn set_token_properties(amount: u32) -> Weight { - >::set_token_properties(amount) + write_token_properties_total_weight::([amount].into_iter(), |amount| { + >::load_token_properties() + + >::find_parent_weight(MAX_NESTING_BUDGET) + + >::write_token_properties(amount) + }) } fn delete_token_properties(amount: u32) -> Weight { - >::delete_token_properties(amount) + Self::set_token_properties(amount) } fn set_token_property_permissions(amount: u32) -> Weight { diff --git a/pallets/refungible/src/lib.rs b/pallets/refungible/src/lib.rs index e9f4f9575e..7270402cd7 100644 --- a/pallets/refungible/src/lib.rs +++ b/pallets/refungible/src/lib.rs @@ -508,7 +508,7 @@ impl Pallet { nesting_budget: &dyn Budget, ) -> DispatchResult { let mut property_writer = - pallet_common::property_writer_for_existing_token(collection, sender); + pallet_common::ExistingTokenPropertyWriter::new(collection, sender); property_writer.write_token_properties( sender, @@ -859,7 +859,7 @@ impl Pallet { // ========= - let mut property_writer = pallet_common::property_writer_for_new_token(collection, sender); + let mut property_writer = pallet_common::NewTokenPropertyWriter::new(collection, sender); with_transaction(|| { for (i, data) in data.iter().enumerate() { diff --git a/pallets/structure/src/lib.rs b/pallets/structure/src/lib.rs index 3374ccdd65..425c45df74 100644 --- a/pallets/structure/src/lib.rs +++ b/pallets/structure/src/lib.rs @@ -53,6 +53,7 @@ #![cfg_attr(not(feature = "std"), no_std)] +use frame_support::weights::Weight; use pallet_common::CommonCollectionOperations; use pallet_common::{erc::CrossAccountId, eth::is_collection}; use sp_std::collections::btree_set::BTreeSet; @@ -64,6 +65,7 @@ use pallet_common::{dispatch::CollectionDispatch}; use up_data_structs::{ CollectionId, TokenId, mapping::TokenAddressMapping, budget::Budget, TokenOwnerError, }; +use weights::WeightInfo; #[cfg(feature = "runtime-benchmarks")] pub mod benchmarking; @@ -409,4 +411,8 @@ impl Pallet { action(dispatch, token) } + + pub fn find_parent_weight(chain_length: u32) -> Weight { + >::find_parent().saturating_mul(chain_length as u64) + } } diff --git a/pallets/unique/Cargo.toml b/pallets/unique/Cargo.toml index 3807d5da5a..b1d4de99e2 100644 --- a/pallets/unique/Cargo.toml +++ b/pallets/unique/Cargo.toml @@ -31,6 +31,7 @@ std = [ 'pallet-nonfungible/std', 'sp-runtime/std', 'sp-std/std', + 'up-common/std', 'up-data-structs/std', ] stubgen = ["evm-coder/stubgen", "pallet-common/stubgen"] @@ -59,4 +60,5 @@ sp-core = { workspace = true } sp-io = { workspace = true } sp-runtime = { workspace = true } sp-std = { workspace = true } +up-common = { workspace = true } up-data-structs = { workspace = true } diff --git a/pallets/unique/src/lib.rs b/pallets/unique/src/lib.rs index 0b37621bf8..e3c41d10cf 100644 --- a/pallets/unique/src/lib.rs +++ b/pallets/unique/src/lib.rs @@ -90,6 +90,7 @@ pub mod pallet { use scale_info::TypeInfo; use frame_system::{ensure_signed, ensure_root}; use sp_std::{vec, vec::Vec}; + use up_common::constants::MAX_NESTING_BUDGET; use up_data_structs::{ MAX_COLLECTION_NAME_LENGTH, MAX_COLLECTION_DESCRIPTION_LENGTH, MAX_TOKEN_PREFIX_LENGTH, MAX_PROPERTIES_PER_ITEM, MAX_PROPERTY_KEY_LENGTH, MAX_PROPERTY_VALUE_LENGTH, @@ -105,9 +106,6 @@ pub mod pallet { }; use weights::WeightInfo; - /// A maximum number of levels of depth in the token nesting tree. - pub const NESTING_BUDGET: u32 = 5; - /// Errors for the common Unique transactions. #[pallet::error] pub enum Error { @@ -264,7 +262,7 @@ pub mod pallet { impl Pallet { /// A maximum number of levels of depth in the token nesting tree. fn nesting_budget() -> u32 { - NESTING_BUDGET + MAX_NESTING_BUDGET } /// Maximal length of a collection name. @@ -674,7 +672,7 @@ pub mod pallet { data: CreateItemData, ) -> DispatchResultWithPostInfo { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); - let budget = budget::Value::new(NESTING_BUDGET); + let budget = budget::Value::new(Self::nesting_budget()); dispatch_tx::(collection_id, |d| { d.create_item(sender, owner, data, &budget) @@ -709,7 +707,7 @@ pub mod pallet { ) -> DispatchResultWithPostInfo { ensure!(!items_data.is_empty(), Error::::EmptyArgument); let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); - let budget = budget::Value::new(NESTING_BUDGET); + let budget = budget::Value::new(Self::nesting_budget()); dispatch_tx::(collection_id, |d| { d.create_multiple_items(sender, owner, items_data, &budget) @@ -801,7 +799,7 @@ pub mod pallet { ensure!(!properties.is_empty(), Error::::EmptyArgument); let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); - let budget = budget::Value::new(NESTING_BUDGET); + let budget = budget::Value::new(Self::nesting_budget()); dispatch_tx::(collection_id, |d| { d.set_token_properties(sender, token_id, properties, &budget) @@ -834,7 +832,7 @@ pub mod pallet { ensure!(!property_keys.is_empty(), Error::::EmptyArgument); let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); - let budget = budget::Value::new(NESTING_BUDGET); + let budget = budget::Value::new(Self::nesting_budget()); dispatch_tx::(collection_id, |d| { d.delete_token_properties(sender, token_id, property_keys, &budget) @@ -895,7 +893,7 @@ pub mod pallet { data: CreateItemExData, ) -> DispatchResultWithPostInfo { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); - let budget = budget::Value::new(NESTING_BUDGET); + let budget = budget::Value::new(Self::nesting_budget()); dispatch_tx::(collection_id, |d| { d.create_multiple_items_ex(sender, data, &budget) @@ -1004,7 +1002,7 @@ pub mod pallet { value: u128, ) -> DispatchResultWithPostInfo { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); - let budget = budget::Value::new(NESTING_BUDGET); + let budget = budget::Value::new(Self::nesting_budget()); dispatch_tx::(collection_id, |d| { d.burn_from(sender, from, item_id, value, &budget) @@ -1042,7 +1040,7 @@ pub mod pallet { value: u128, ) -> DispatchResultWithPostInfo { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); - let budget = budget::Value::new(NESTING_BUDGET); + let budget = budget::Value::new(Self::nesting_budget()); dispatch_tx::(collection_id, |d| { d.transfer(sender, recipient, item_id, value, &budget) @@ -1148,7 +1146,7 @@ pub mod pallet { value: u128, ) -> DispatchResultWithPostInfo { let sender = T::CrossAccountId::from_sub(ensure_signed(origin)?); - let budget = budget::Value::new(NESTING_BUDGET); + let budget = budget::Value::new(Self::nesting_budget()); dispatch_tx::(collection_id, |d| { d.transfer_from(sender, from, recipient, item_id, value, &budget) diff --git a/primitives/common/src/constants.rs b/primitives/common/src/constants.rs index 390184190d..7f175d81c6 100644 --- a/primitives/common/src/constants.rs +++ b/primitives/common/src/constants.rs @@ -44,6 +44,10 @@ pub const UNIQUE: Balance = 100 * CENTIUNIQUE; /// Minimum balance required to create or keep an account open. pub const EXISTENTIAL_DEPOSIT: u128 = 0; + +/// A maximum number of levels of depth in the token nesting tree. +pub const MAX_NESTING_BUDGET: u32 = 5; + /// Amount of Balance reserved for candidate registration. pub const GENESIS_LICENSE_BOND: u128 = 1_000_000_000_000 * UNIQUE; /// Amount of maximum collators for Collator Selection.