Skip to content

Commit

Permalink
fix: remove transparent_api feature from iroha dependencies
Browse files Browse the repository at this point in the history
Signed-off-by: Marin Veršić <[email protected]>
  • Loading branch information
mversic committed Oct 22, 2024
1 parent cba76c3 commit 007be63
Show file tree
Hide file tree
Showing 18 changed files with 55 additions and 68 deletions.
3 changes: 0 additions & 3 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 1 addition & 2 deletions crates/iroha/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,7 @@ tls-rustls-webpki-roots = [
iroha_config = { workspace = true }
iroha_config_base = { workspace = true }
iroha_crypto = { workspace = true }
# FIXME: should remove `transparent_api` feature. Check the other FIXME comment in dev-dependencies
iroha_data_model = { workspace = true, features = ["http", "transparent_api"] }
iroha_data_model = { workspace = true, features = ["http"] }
iroha_primitives = { workspace = true }
iroha_logger = { workspace = true }
iroha_telemetry = { workspace = true }
Expand Down
1 change: 1 addition & 0 deletions crates/iroha_config_base/src/toml.rs
Original file line number Diff line number Diff line change
Expand Up @@ -289,6 +289,7 @@ impl<'a> From<&'a mut Table> for Writer<'a> {
/// Extension trait to implement writing with [`Writer`] directly into [`Table`] in a chained manner.
pub trait WriteExt: Sized {
/// See [`Writer::write`].
#[must_use]
fn write<P: WritePath, T: Serialize>(self, path: P, value: T) -> Self;
}

Expand Down
5 changes: 1 addition & 4 deletions crates/iroha_p2p/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,7 @@ workspace = true
[dependencies]
iroha_logger = { workspace = true }
iroha_crypto = { workspace = true, default-features = true }
iroha_data_model = { workspace = true, default-features = true, features = [
"transparent_api",
] }
iroha_data_model = { workspace = true, default-features = true }
iroha_primitives = { workspace = true }
iroha_config = { workspace = true }
iroha_futures = { workspace = true }
Expand All @@ -39,4 +37,3 @@ bytes = { workspace = true }

[dev-dependencies]
iroha_config_base = { workspace = true }
iroha_test_network = { workspace = true }
18 changes: 9 additions & 9 deletions crates/iroha_p2p/src/network.rs
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,7 @@ impl<T: Pload, K: Kex, E: Enc> NetworkBase<T, K, E> {
.iter()
// Peer is not connected but should
.filter_map(|(peer, is_active)| {
(!self.peers.contains_key(&peer.public_key)
(!self.peers.contains_key(peer.public_key())
&& !self
.connecting_peers
.values()
Expand Down Expand Up @@ -362,7 +362,7 @@ impl<T: Pload, K: Kex, E: Enc> NetworkBase<T, K, E> {

fn connect_peer(&mut self, peer: &PeerId) {
iroha_logger::trace!(
listen_addr = %self.listen_addr, peer.id.address = %peer.address,
listen_addr = %self.listen_addr, peer.id.address = %peer.address(),
"Creating new peer actor",
);

Expand All @@ -372,7 +372,7 @@ impl<T: Pload, K: Kex, E: Enc> NetworkBase<T, K, E> {
let service_message_sender = self.service_message_sender.clone();
connecting::<T, K, E>(
// NOTE: we intentionally use peer's address and our public key, it's used during handshake
peer.address.clone(),
peer.address().clone(),
self.key_pair.clone(),
conn_id,
service_message_sender,
Expand Down Expand Up @@ -410,7 +410,7 @@ impl<T: Pload, K: Kex, E: Enc> NetworkBase<T, K, E> {
}

// Insert peer if peer not in peers yet or replace peer if it's disambiguator value is smaller than new one (simultaneous connections resolution rule)
match self.peers.get(&peer_id.public_key) {
match self.peers.get(peer_id.public_key()) {
Some(peer) if peer.disambiguator > disambiguator => {
iroha_logger::debug!(
"Peer is disconnected due to simultaneous connection resolution policy"
Expand All @@ -428,7 +428,7 @@ impl<T: Pload, K: Kex, E: Enc> NetworkBase<T, K, E> {
let ref_peer = RefPeer {
handle: ready_peer_handle,
conn_id: connection_id,
p2p_addr: peer_id.address.clone(),
p2p_addr: peer_id.address().clone(),
disambiguator,
};
let _ = peer_message_sender.send(self.peer_message_sender.clone());
Expand All @@ -439,10 +439,10 @@ impl<T: Pload, K: Kex, E: Enc> NetworkBase<T, K, E> {
fn peer_terminated(&mut self, Terminated { peer_id, conn_id }: Terminated) {
self.connecting_peers.remove(&conn_id);
if let Some(peer_id) = peer_id {
if let Some(peer) = self.peers.get(&peer_id.public_key) {
if let Some(peer) = self.peers.get(peer_id.public_key()) {
if peer.conn_id == conn_id {
iroha_logger::debug!(conn_id, peer=%peer_id, "Peer terminated");
self.peers.remove(&peer_id.public_key);
self.peers.remove(peer_id.public_key());
Self::remove_online_peer(&self.online_peers_sender, &peer_id);
}
}
Expand All @@ -451,11 +451,11 @@ impl<T: Pload, K: Kex, E: Enc> NetworkBase<T, K, E> {

fn post(&mut self, Post { data, peer_id }: Post<T>) {
iroha_logger::trace!(peer=%peer_id, "Post message");
match self.peers.get(&peer_id.public_key) {
match self.peers.get(peer_id.public_key()) {
Some(peer) => {
if peer.handle.post(data).is_err() {
iroha_logger::error!(peer=%peer_id, "Failed to send message to peer");
self.peers.remove(&peer_id.public_key);
self.peers.remove(peer_id.public_key());
Self::remove_online_peer(&self.online_peers_sender, &peer_id);
}
}
Expand Down
8 changes: 4 additions & 4 deletions crates/iroha_p2p/tests/integration/p2p.rs
Original file line number Diff line number Diff line change
Expand Up @@ -301,12 +301,12 @@ async fn start_network(
barrier: Arc<Barrier>,
shutdown_signal: ShutdownSignal,
) -> (PeerId, NetworkHandle<TestMessage>) {
info!(peer_addr = %peer.address, "Starting network");
info!(peer_addr = %peer.address(), "Starting network");

// This actor will get the messages from other peers and increment the counter
let actor = TestActor::start(messages);

let PeerId { address, .. } = peer.clone();
let address = peer.address().clone();
let idle_timeout = Duration::from_secs(60);
let config = Config {
address: WithOrigin::inline(address),
Expand All @@ -329,14 +329,14 @@ async fn start_network(
tokio::time::timeout(Duration::from_millis(10_000), async {
let mut connections = network.wait_online_peers_update(HashSet::len).await;
while conn_count != connections {
info!(peer_addr = %peer.address, %connections);
info!(peer_addr = %peer.address(), %connections);
connections = network.wait_online_peers_update(HashSet::len).await;
}
})
.await
.expect("Failed to get all connections");

info!(peer_addr = %peer.address, %conn_count, "Got all connections!");
info!(peer_addr = %peer.address(), %conn_count, "Got all connections!");

(peer, network)
}
Expand Down
6 changes: 3 additions & 3 deletions crates/iroha_schema_derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -221,15 +221,15 @@ pub fn schema_derive(input: TokenStream) -> TokenStream {
);

// add trait bounds on field types using the same algorithm that parity scale codec uses
emitter.handle(trait_bounds::add(
trait_bounds::add(
&input.ident,
&mut input.generics,
&input.data,
syn::parse_quote!(iroha_schema::IntoSchema),
&syn::parse_quote!(iroha_schema::IntoSchema),
None,
false,
&syn::parse_quote!(iroha_schema),
));
);

let impl_type_id = impl_type_id(&mut syn::parse2(original_input).unwrap());

Expand Down
53 changes: 23 additions & 30 deletions crates/iroha_schema_derive/src/trait_bounds.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
//! Algorithm for generating trait bounds in IntoSchema derive
//! Algorithm for generating trait bounds in `IntoSchema` derive
//!
//! Based on https://github.com/paritytech/parity-scale-codec/blob/2c61d4ab70dfa157556430546441cd2deb5031f2/derive/src/trait_bounds.rs
//! Based on <https://github.com/paritytech/parity-scale-codec/blob/2c61d4ab70dfa157556430546441cd2deb5031f2/derive/src/trait_bounds.rs>
use std::iter;

use proc_macro2::Ident;
use syn::{
parse_quote,
visit::{self, Visit},
Generics, Result, Type, TypePath,
Generics, Type, TypePath,
};

use crate::{IntoSchemaData, IntoSchemaField};
Expand Down Expand Up @@ -119,18 +119,18 @@ pub fn add(
generics: &mut Generics,
data: &IntoSchemaData,
// custom_trait_bound: Option<CustomTraitBound<N>>,
codec_bound: syn::Path,
codec_skip_bound: Option<syn::Path>,
codec_bound: &syn::Path,
codec_skip_bound: Option<&syn::Path>,
dumb_trait_bounds: bool,
crate_path: &syn::Path,
) -> Result<()> {
) {
let skip_type_params = Vec::<Ident>::new();
// NOTE: not implementing custom trait bounds for now
// can be implemented later if needed
// = match custom_trait_bound {
// Some(CustomTraitBound::SpecifiedBounds { bounds, .. }) => {
// generics.make_where_clause().predicates.extend(bounds);
// return Ok(());
// return;
// }
// Some(CustomTraitBound::SkipTypeParams { type_names, .. }) => {
// type_names.into_iter().collect::<Vec<_>>()
Expand All @@ -144,21 +144,21 @@ pub fn add(
.map(|tp| tp.ident.clone())
.collect::<Vec<_>>();
if ty_params.is_empty() {
return Ok(());
return;
}

let codec_types =
get_types_to_add_trait_bound(input_ident, data, &ty_params, dumb_trait_bounds)?;
get_types_to_add_trait_bound(input_ident, data, &ty_params, dumb_trait_bounds);

let compact_types = collect_types(data, |t| t.codec_attrs.compact)?
let compact_types = collect_types(data, |t| t.codec_attrs.compact)
.into_iter()
// Only add a bound if the type uses a generic
.filter(|ty| type_contain_idents(ty, &ty_params))
.collect::<Vec<_>>();

let skip_types = if codec_skip_bound.is_some() {
let needs_default_bound = |f: &IntoSchemaField| f.codec_attrs.skip;
collect_types(data, needs_default_bound)?
collect_types(data, needs_default_bound)
.into_iter()
// Only add a bound if the type uses a generic
.filter(|ty| type_contain_idents(ty, &ty_params))
Expand All @@ -170,27 +170,25 @@ pub fn add(
if !codec_types.is_empty() || !compact_types.is_empty() || !skip_types.is_empty() {
let where_clause = generics.make_where_clause();

codec_types.into_iter().for_each(|ty| {
for ty in codec_types {
where_clause
.predicates
.push(parse_quote!(#ty : #codec_bound))
});
}

compact_types.into_iter().for_each(|ty| {
for ty in compact_types {
where_clause
.predicates
.push(parse_quote!(#crate_path::Compact<#ty> : #codec_bound))
});
}

skip_types.into_iter().for_each(|ty| {
for ty in skip_types {
let codec_skip_bound = codec_skip_bound.as_ref();
where_clause
.predicates
.push(parse_quote!(#ty : #codec_skip_bound))
});
}
}

Ok(())
}

/// Returns all types that must be added to the where clause with the respective trait bound.
Expand All @@ -199,16 +197,16 @@ fn get_types_to_add_trait_bound(
data: &IntoSchemaData,
ty_params: &[Ident],
dumb_trait_bound: bool,
) -> Result<Vec<Type>> {
) -> Vec<Type> {
if dumb_trait_bound {
Ok(ty_params.iter().map(|t| parse_quote!( #t )).collect())
ty_params.iter().map(|t| parse_quote!( #t )).collect()
} else {
let needs_codec_bound = |f: &IntoSchemaField| {
!f.codec_attrs.compact
&& true // utils::get_encoded_as_type(f).is_none()
&& !f.codec_attrs.skip
};
let res = collect_types(data, needs_codec_bound)?
collect_types(data, needs_codec_bound)
.into_iter()
// Only add a bound if the type uses a generic
.filter(|ty| type_contain_idents(ty, ty_params))
Expand All @@ -226,16 +224,11 @@ fn get_types_to_add_trait_bound(
// Remove all remaining types that start/contain the input ident to not have them in the
// where clause.
.filter(|ty| !type_or_sub_type_path_starts_with_ident(ty, input_ident))
.collect();

Ok(res)
.collect()
}
}

fn collect_types(
data: &IntoSchemaData,
type_filter: fn(&IntoSchemaField) -> bool,
) -> Result<Vec<Type>> {
fn collect_types(data: &IntoSchemaData, type_filter: fn(&IntoSchemaField) -> bool) -> Vec<Type> {
let types = match *data {
IntoSchemaData::Struct(ref data) => data
.fields
Expand All @@ -258,5 +251,5 @@ fn collect_types(
.collect(),
};

Ok(types)
types
}
1 change: 1 addition & 0 deletions crates/iroha_schema_gen/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ license.workspace = true
workspace = true

[dependencies]
# TODO: `transparent_api` feature shouldn't be activated/required here
iroha_data_model = { workspace = true, features = ["http", "transparent_api"] }
iroha_executor_data_model = { workspace = true }

Expand Down
2 changes: 0 additions & 2 deletions crates/iroha_telemetry_derive/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,4 @@ manyhow = { workspace = true }
iroha_macro_utils = { workspace = true }

[dev-dependencies]
iroha_core = { workspace = true }

trybuild = { workspace = true }
3 changes: 2 additions & 1 deletion crates/iroha_telemetry_derive/tests/ui_fail/not_execute.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use iroha_core::state::StateTransaction;
use iroha_telemetry_derive::metrics;

struct StateTransaction;

#[metrics(+"test_query", "another_test_query_without_timing")]
fn exequte(_state_transaction: &StateTransaction) -> Result<(), ()> {
Ok(())
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
error: Function should be an `impl execute`
--> tests/ui_fail/not_execute.rs:5:4
--> tests/ui_fail/not_execute.rs:6:4
|
5 | fn exequte(_state_transaction: &StateTransaction) -> Result<(), ()> {
6 | fn exequte(_state_transaction: &StateTransaction) -> Result<(), ()> {
| ^^^^^^^
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
use iroha_telemetry_derive::metrics;
use iroha_core::state::StateTransaction;

type MyNotResult = Option<i32>;

struct StateTransaction;

#[metrics(+"test_query", "another_test_query_without_timing")]
fn execute(_state_transaction: &StateTransaction) -> MyNotResult {
None
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
error: Should return `Result`. Found MyNotResult
--> tests/ui_fail/not_return_result.rs:7:54
--> tests/ui_fail/not_return_result.rs:8:54
|
7 | fn execute(_state_transaction: &StateTransaction) -> MyNotResult {
8 | fn execute(_state_transaction: &StateTransaction) -> MyNotResult {
| ^^^^^^^^^^^
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use iroha_core::state::StateTransaction;
use iroha_telemetry_derive::metrics;

struct StateTransaction;

#[metrics(+"test_query", "another_test_query_without_timing")]
fn execute(_state_transaction: &StateTransaction) {
Ok::<(), ()>(());
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
error: `Fn` must return `Result`. Returns nothing instead.
--> tests/ui_fail/return_nothing.rs:4:1
--> tests/ui_fail/return_nothing.rs:5:1
|
4 | #[metrics(+"test_query", "another_test_query_without_timing")]
5 | #[metrics(+"test_query", "another_test_query_without_timing")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: this error originates in the attribute macro `metrics` (in Nightly builds, run with -Z macro-backtrace for more info)
Loading

0 comments on commit 007be63

Please sign in to comment.