Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[refactor] #3874: Remove IsAssetDefinitionOwner #3979

Merged
merged 1 commit into from
Oct 11, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file modified configs/peer/validator.wasm
Binary file not shown.
21 changes: 3 additions & 18 deletions core/src/smartcontracts/isi/asset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -422,7 +422,9 @@ pub mod query {
use eyre::{Result, WrapErr as _};
use iroha_data_model::{
asset::{Asset, AssetDefinition},
query::{asset::IsAssetDefinitionOwner, error::QueryExecutionFail as Error, MetadataValue},
query::{
asset::FindAssetDefinitionById, error::QueryExecutionFail as Error, MetadataValue,
},
};

use super::*;
Expand Down Expand Up @@ -698,21 +700,4 @@ pub mod query {
.map(Into::into)
}
}

impl ValidQuery for IsAssetDefinitionOwner {
#[metrics("is_asset_definition_owner")]
fn execute(&self, wsv: &WorldStateView) -> Result<bool, Error> {
let asset_definition_id = wsv
.evaluate(&self.asset_definition_id)
.wrap_err("Failed to get asset definition id")
.map_err(|e| Error::Evaluate(e.to_string()))?;
let account_id = wsv
.evaluate(&self.account_id)
.wrap_err("Failed to get account id")
.map_err(|e| Error::Evaluate(e.to_string()))?;

let entry = wsv.asset_definition(&asset_definition_id)?;
Ok(entry.owned_by == account_id)
}
}
}
1 change: 0 additions & 1 deletion core/src/smartcontracts/isi/query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,6 @@ impl ValidQuery for QueryBox {
FindAssetDefinitionById,
FindAssetQuantityById,
FindTotalAssetQuantityByAssetDefinitionId,
IsAssetDefinitionOwner,
FindDomainById,
FindBlockHeaderByHash,
FindTransactionByHash,
Expand Down
15 changes: 12 additions & 3 deletions data_model/derive/src/filter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,18 +35,27 @@ enum EventVariant {
impl FromVariant for EventVariant {
fn from_variant(variant: &Variant) -> darling::Result<Self> {
let syn2::Fields::Unnamed(fields) = &variant.fields else {
return Err(darling::Error::custom("Expected an enum with unnamed fields").with_span(&variant.fields));
return Err(
darling::Error::custom("Expected an enum with unnamed fields")
.with_span(&variant.fields),
);
};
// note: actually, we have only one field in the event variants
// this is not enforced by this macro, but by `IntoSchema`
let Some(first_field_ty) = fields.unnamed.first().map(|v| &v.ty) else {
return Err(darling::Error::custom("Expected at least one field").with_span(&fields));
};
let syn2::Type::Path(path) = first_field_ty else {
return Err(darling::Error::custom("Only identifiers supported as event types").with_span(first_field_ty));
return Err(
darling::Error::custom("Only identifiers supported as event types")
.with_span(first_field_ty),
);
};
let Some(first_field_ty_name) = path.path.get_ident() else {
return Err(darling::Error::custom("Only identifiers supported as event types").with_span(first_field_ty));
return Err(
darling::Error::custom("Only identifiers supported as event types")
.with_span(first_field_ty),
);
};

// What clippy suggests is much less readable in this case
Expand Down
4 changes: 3 additions & 1 deletion data_model/derive/src/has_origin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,9 @@ impl FromDeriveInput for HasOriginEnum {
let ident = input.ident.clone();
let generics = input.generics.clone();

let Some(variants) = darling::ast::Data::<HasOriginVariant, ()>::try_from(&input.data)?.take_enum() else {
let Some(variants) =
darling::ast::Data::<HasOriginVariant, ()>::try_from(&input.data)?.take_enum()
else {
return Err(darling::Error::custom("Expected enum"));
};

Expand Down
2 changes: 1 addition & 1 deletion data_model/derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -602,7 +602,7 @@ pub fn has_origin_derive(input: TokenStream) -> TokenStream {
let mut emitter = Emitter::new();

let Some(input) = emitter.handle(syn2::parse2(input)) else {
return emitter.finish_token_stream()
return emitter.finish_token_stream();
};

let result = has_origin::impl_has_origin(&mut emitter, &input);
Expand Down
1 change: 0 additions & 1 deletion data_model/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,6 @@ mod seal {
FindAssetsByDomainIdAndAssetDefinitionId,
FindAssetQuantityById,
FindTotalAssetQuantityByAssetDefinitionId,
IsAssetDefinitionOwner,
FindAssetKeyValueByIdAndKey,
FindAssetDefinitionKeyValueByIdAndKey,
FindAllDomains,
Expand Down
30 changes: 1 addition & 29 deletions data_model/src/query/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,6 @@ pub mod model {
FindAssetsByDomainIdAndAssetDefinitionId(FindAssetsByDomainIdAndAssetDefinitionId),
FindAssetQuantityById(FindAssetQuantityById),
FindTotalAssetQuantityByAssetDefinitionId(FindTotalAssetQuantityByAssetDefinitionId),
IsAssetDefinitionOwner(IsAssetDefinitionOwner),
FindAssetKeyValueByIdAndKey(FindAssetKeyValueByIdAndKey),
FindAssetDefinitionKeyValueByIdAndKey(FindAssetDefinitionKeyValueByIdAndKey),
FindAllDomains(FindAllDomains),
Expand Down Expand Up @@ -672,16 +671,6 @@ pub mod asset {
pub key: EvaluatesTo<Name>,
}

/// [`IsAssetDefinitionOwner`] Iroha Query checks if provided account is the asset definition owner.
#[derive(Display)]
#[display(fmt = "Check if `{account_id}` is creator of `{asset_definition_id}` asset")]
#[ffi_type]
pub struct IsAssetDefinitionOwner {
/// `Id` of an [`AssetDefinition`] to check.
pub asset_definition_id: EvaluatesTo<AssetDefinitionId>,
/// `Id` of a possible owner [`Account`].
pub account_id: EvaluatesTo<AccountId>,
}
}
impl Query for FindAllAssets {
type Output = Vec<Asset>;
Expand Down Expand Up @@ -735,10 +724,6 @@ pub mod asset {
type Output = MetadataValue;
}

impl Query for IsAssetDefinitionOwner {
type Output = bool;
}

impl FindAssetById {
/// Construct [`FindAssetById`].
pub fn new(id: impl Into<EvaluatesTo<AssetId>>) -> Self {
Expand Down Expand Up @@ -837,27 +822,14 @@ pub mod asset {
}
}

impl IsAssetDefinitionOwner {
/// Construct [`IsAssetDefinitionOwner`].
pub fn new(
asset_definition_id: impl Into<EvaluatesTo<AssetDefinitionId>>,
account_id: impl Into<EvaluatesTo<AccountId>>,
) -> Self {
Self {
asset_definition_id: asset_definition_id.into(),
account_id: account_id.into(),
}
}
}

/// The prelude re-exports most commonly used traits, structs and macros from this crate.
pub mod prelude {
pub use super::{
FindAllAssets, FindAllAssetsDefinitions, FindAssetById, FindAssetDefinitionById,
FindAssetDefinitionKeyValueByIdAndKey, FindAssetKeyValueByIdAndKey,
FindAssetQuantityById, FindAssetsByAccountId, FindAssetsByAssetDefinitionId,
FindAssetsByDomainId, FindAssetsByDomainIdAndAssetDefinitionId, FindAssetsByName,
FindTotalAssetQuantityByAssetDefinitionId, IsAssetDefinitionOwner,
FindTotalAssetQuantityByAssetDefinitionId,
};
}
}
Expand Down
3 changes: 0 additions & 3 deletions data_model/src/visit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,6 @@ pub trait Visit: ExpressionEvaluator {
visit_find_trigger_by_id(&FindTriggerById),
visit_find_trigger_key_value_by_id_and_key(&FindTriggerKeyValueByIdAndKey),
visit_find_triggers_by_domain_id(&FindTriggersByDomainId),
visit_is_asset_definition_owner(&IsAssetDefinitionOwner),

// Visit RegisterExpr
visit_register_peer(Register<Peer>),
Expand Down Expand Up @@ -242,7 +241,6 @@ pub fn visit_query<V: Visit + ?Sized>(visitor: &mut V, authority: &AccountId, qu
visit_find_trigger_by_id(FindTriggerById),
visit_find_trigger_key_value_by_id_and_key(FindTriggerKeyValueByIdAndKey),
visit_find_triggers_by_domain_id(FindTriggersByDomainId),
visit_is_asset_definition_owner(IsAssetDefinitionOwner),
}
}

Expand Down Expand Up @@ -782,5 +780,4 @@ leaf_visitors! {
visit_find_trigger_by_id(&FindTriggerById),
visit_find_trigger_key_value_by_id_and_key(&FindTriggerKeyValueByIdAndKey),
visit_find_triggers_by_domain_id(&FindTriggersByDomainId),
visit_is_asset_definition_owner(&IsAssetDefinitionOwner),
}
63 changes: 23 additions & 40 deletions docs/source/references/schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -2643,18 +2643,6 @@
"Ipv4Predicate": "Array<Interval<u8>, 4>",
"Ipv6Addr": "Array<u16, 8>",
"Ipv6Predicate": "Array<Interval<u16>, 8>",
"IsAssetDefinitionOwner": {
"Struct": [
{
"name": "asset_definition_id",
"type": "EvaluatesTo<AssetDefinitionId>"
},
{
"name": "account_id",
"type": "EvaluatesTo<AccountId>"
}
]
},
"LengthLimits": {
"Struct": [
{
Expand Down Expand Up @@ -3507,124 +3495,119 @@
"discriminant": 16,
"type": "FindTotalAssetQuantityByAssetDefinitionId"
},
{
"tag": "IsAssetDefinitionOwner",
"discriminant": 17,
"type": "IsAssetDefinitionOwner"
},
{
"tag": "FindAssetKeyValueByIdAndKey",
"discriminant": 18,
"discriminant": 17,
"type": "FindAssetKeyValueByIdAndKey"
},
{
"tag": "FindAssetDefinitionKeyValueByIdAndKey",
"discriminant": 19,
"discriminant": 18,
"type": "FindAssetDefinitionKeyValueByIdAndKey"
},
{
"tag": "FindAllDomains",
"discriminant": 20,
"discriminant": 19,
"type": "FindAllDomains"
},
{
"tag": "FindDomainById",
"discriminant": 21,
"discriminant": 20,
"type": "FindDomainById"
},
{
"tag": "FindDomainKeyValueByIdAndKey",
"discriminant": 22,
"discriminant": 21,
"type": "FindDomainKeyValueByIdAndKey"
},
{
"tag": "FindAllPeers",
"discriminant": 23,
"discriminant": 22,
"type": "FindAllPeers"
},
{
"tag": "FindAllBlocks",
"discriminant": 24,
"discriminant": 23,
"type": "FindAllBlocks"
},
{
"tag": "FindAllBlockHeaders",
"discriminant": 25,
"discriminant": 24,
"type": "FindAllBlockHeaders"
},
{
"tag": "FindBlockHeaderByHash",
"discriminant": 26,
"discriminant": 25,
"type": "FindBlockHeaderByHash"
},
{
"tag": "FindAllTransactions",
"discriminant": 27,
"discriminant": 26,
"type": "FindAllTransactions"
},
{
"tag": "FindTransactionsByAccountId",
"discriminant": 28,
"discriminant": 27,
"type": "FindTransactionsByAccountId"
},
{
"tag": "FindTransactionByHash",
"discriminant": 29,
"discriminant": 28,
"type": "FindTransactionByHash"
},
{
"tag": "FindPermissionTokensByAccountId",
"discriminant": 30,
"discriminant": 29,
"type": "FindPermissionTokensByAccountId"
},
{
"tag": "FindPermissionTokenSchema",
"discriminant": 31,
"discriminant": 30,
"type": "FindPermissionTokenSchema"
},
{
"tag": "FindAllActiveTriggerIds",
"discriminant": 32,
"discriminant": 31,
"type": "FindAllActiveTriggerIds"
},
{
"tag": "FindTriggerById",
"discriminant": 33,
"discriminant": 32,
"type": "FindTriggerById"
},
{
"tag": "FindTriggerKeyValueByIdAndKey",
"discriminant": 34,
"discriminant": 33,
"type": "FindTriggerKeyValueByIdAndKey"
},
{
"tag": "FindTriggersByDomainId",
"discriminant": 35,
"discriminant": 34,
"type": "FindTriggersByDomainId"
},
{
"tag": "FindAllRoles",
"discriminant": 36,
"discriminant": 35,
"type": "FindAllRoles"
},
{
"tag": "FindAllRoleIds",
"discriminant": 37,
"discriminant": 36,
"type": "FindAllRoleIds"
},
{
"tag": "FindRoleByRoleId",
"discriminant": 38,
"discriminant": 37,
"type": "FindRoleByRoleId"
},
{
"tag": "FindRolesByAccountId",
"discriminant": 39,
"discriminant": 38,
"type": "FindRolesByAccountId"
},
{
"tag": "FindAllParameters",
"discriminant": 40,
"discriminant": 39,
"type": "FindAllParameters"
}
]
Expand Down
10 changes: 6 additions & 4 deletions ffi/derive/src/attr_parse/derive.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,13 +70,15 @@ impl FromAttributes for DeriveAttrs {

for attr in attrs {
if attr.path().is_ident("derive") {
let Some(list) = accumulator.handle(attr.meta.require_list().map_err(Into::into)) else {
continue
let Some(list) = accumulator.handle(attr.meta.require_list().map_err(Into::into))
else {
continue;
};
let Some(paths) = accumulator.handle(
list.parse_args_with(Punctuated::<syn2::Path, Token![,]>::parse_terminated).map_err(Into::into)
list.parse_args_with(Punctuated::<syn2::Path, Token![,]>::parse_terminated)
.map_err(Into::into),
) else {
continue
continue;
};

for path in paths {
Expand Down
15 changes: 11 additions & 4 deletions ffi/derive/src/attr_parse/getset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -211,10 +211,17 @@ impl GetSetRawFieldAttr {
// iroha doesn't use the latter form, so it is not supported by `iroha_ffi_derive`
if attr.path().is_ident("getset") {
let Some(list) = accumulator.handle(attr.meta.require_list().map_err(Into::into))
else { continue };
let Some(tokens): Option<Punctuated<SpannedGetSetAttrToken, Token![,]>>
= accumulator.handle(list.parse_args_with(Punctuated::parse_terminated).map_err(Into::into))
else { continue };
else {
continue;
};
let Some(tokens): Option<Punctuated<SpannedGetSetAttrToken, Token![,]>> =
accumulator.handle(
list.parse_args_with(Punctuated::parse_terminated)
.map_err(Into::into),
)
else {
continue;
};

for token in tokens {
match token.token {
Expand Down
Loading
Loading