From a518336be2798fa1fe2068b27adf5d0070544627 Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Tue, 5 Dec 2023 17:29:03 +0000 Subject: [PATCH 01/18] feat: comment what to do --- src/endpoints/addr_to_domain.rs | 1 + src/endpoints/domain_to_addr.rs | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/src/endpoints/addr_to_domain.rs b/src/endpoints/addr_to_domain.rs index 89d4ad0..8c11a82 100644 --- a/src/endpoints/addr_to_domain.rs +++ b/src/endpoints/addr_to_domain.rs @@ -30,6 +30,7 @@ pub async fn handler( ) -> impl IntoResponse { let domains = state.starknetid_db.collection::("domains"); let hex_addr = to_hex(&query.addr); + // todo: if not found or 0, check main id let document = domains .find_one( doc! { diff --git a/src/endpoints/domain_to_addr.rs b/src/endpoints/domain_to_addr.rs index 978a10d..dcddfa3 100644 --- a/src/endpoints/domain_to_addr.rs +++ b/src/endpoints/domain_to_addr.rs @@ -64,6 +64,7 @@ pub async fn handler( let domains = state .starknetid_db .collection::("domains"); + // also return the data written on starknetid let document = domains .find_one( doc! { @@ -73,7 +74,8 @@ pub async fn handler( None, ) .await; - + + // if not legacy_address, read the starknetid field match document { Ok(doc) => { if let Some(doc) = doc { From 1d1b0456ef0a9bcd0292151528aa45a3e765ba00 Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Thu, 7 Dec 2023 16:49:57 +0000 Subject: [PATCH 02/18] feat: add new domain_to_addr algo --- src/endpoints/domain_to_addr.rs | 78 +++++++++++++++++++++++---------- 1 file changed, 54 insertions(+), 24 deletions(-) diff --git a/src/endpoints/domain_to_addr.rs b/src/endpoints/domain_to_addr.rs index dcddfa3..756dc3a 100644 --- a/src/endpoints/domain_to_addr.rs +++ b/src/endpoints/domain_to_addr.rs @@ -7,7 +7,8 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; -use mongodb::bson::doc; +use futures::StreamExt; +use mongodb::{bson::doc, options::AggregateOptions}; use serde::{Deserialize, Serialize}; use std::sync::Arc; @@ -64,33 +65,62 @@ pub async fn handler( let domains = state .starknetid_db .collection::("domains"); - // also return the data written on starknetid - let document = domains - .find_one( - doc! { - "domain": &query.domain, - "_cursor.to": null, + + let pipeline = vec![ + doc! { "$match": { "_cursor.to": null, "domain": query.domain.clone() } }, + doc! { "$lookup": { + "from": "id_user_data", + "let": { "userId": "$id" }, + "pipeline": [ + doc! { "$match": { + "_cursor.to": { "$exists": false }, + "field": "0x000000000000000000000000000000000000000000000000737461726b6e6574", + "$expr": { "$eq": ["$id", "$$userId"] } + } } + ], + "as": "ownerData" + }}, + doc! { "$unwind": { "path": "$ownerData", "preserveNullAndEmptyArrays": true } }, + doc! { "$project": { + "addr": { + "$cond": { + "if": { "$and": [ + { "$ne": [{ "$type": "$legacy_address" }, "missing"] }, + { "$ne": ["$legacy_address", "0x00"] } + ] }, + "then": "$legacy_address", + "else": "$ownerData.data" + } }, - None, - ) - .await; - - // if not legacy_address, read the starknetid field - match document { - Ok(doc) => { - if let Some(doc) = doc { - let addr = doc.get_str("legacy_address").unwrap_or_default().to_owned(); - let domain_expiry = doc.get_i64("expiry").ok(); - let data = DomainToAddrData { - addr, - domain_expiry, + "domain_expiry" : "$expiry" + }}, + ]; + + // Execute the aggregation pipeline + let cursor: Result, &str> = domains + .aggregate(pipeline, AggregateOptions::default()) + .await + .map_err(|_| "Error while executing aggregation pipeline"); + + match cursor { + Ok(mut cursor) => { + while let Some(result) = cursor.next().await { + return match result { + Ok(doc) => { + let addr = doc.get_str("addr").unwrap_or_default().to_owned(); + let domain_expiry = doc.get_i64("domain_expiry").ok(); + let data = DomainToAddrData { + addr, + domain_expiry, + }; + (StatusCode::OK, Json(data)).into_response() + } + Err(e) => get_error(format!("Error calling the db: {}", e)), }; - (StatusCode::OK, headers, Json(data)).into_response() - } else { - get_error("no address found".to_string()) } + return get_error("No document found for the given domain".to_string()); } - Err(_) => get_error("Error while fetching from database".to_string()), + Err(e) => return get_error(format!("Error accessing the database: {}", e)), } } } From 02349730082d5590623b3da41abf52ae459a63ef Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Thu, 7 Dec 2023 18:03:34 +0000 Subject: [PATCH 03/18] feat: add priority reverse domain --- src/endpoints/addr_to_domain.rs | 91 +++++++++++++++++++++++---------- 1 file changed, 65 insertions(+), 26 deletions(-) diff --git a/src/endpoints/addr_to_domain.rs b/src/endpoints/addr_to_domain.rs index 8c11a82..c57a395 100644 --- a/src/endpoints/addr_to_domain.rs +++ b/src/endpoints/addr_to_domain.rs @@ -8,7 +8,11 @@ use axum::{ response::IntoResponse, Json, }; -use mongodb::bson::{doc, Bson}; +use futures::StreamExt; +use mongodb::{ + bson::{doc, Bson}, + options::AggregateOptions, +}; use serde::{Deserialize, Serialize}; use starknet::core::types::FieldElement; use std::sync::Arc; @@ -16,7 +20,7 @@ use std::sync::Arc; #[derive(Serialize)] pub struct AddrToDomainData { domain: String, - domain_expiry: i64, + domain_expiry: Option, } #[derive(Deserialize)] @@ -28,34 +32,69 @@ pub async fn handler( State(state): State>, Query(query): Query, ) -> impl IntoResponse { - let domains = state.starknetid_db.collection::("domains"); + let domains = state + .starknetid_db + .collection::("domains"); let hex_addr = to_hex(&query.addr); - // todo: if not found or 0, check main id - let document = domains - .find_one( - doc! { - "legacy_address": &hex_addr, - "rev_address": &hex_addr, - "_cursor.to": Bson::Null, - }, - None, - ) - .await; - match document { - Ok(doc) => { - if let Some(doc) = doc { - let domain = doc.get_str("domain").unwrap_or_default().to_owned(); - let expiry = doc.get_i64("expiry").unwrap_or_default(); - let data = AddrToDomainData { - domain, - domain_expiry: expiry, + let domains = state + .starknetid_db + .collection::("domains"); + + let pipeline = vec![ + doc! { "$match": { "_cursor.to": null, "rev_address": hex_addr } }, + doc! { "$lookup": { + "from": "id_owners", + "localField": "rev_address", + "foreignField": "owner", + "as": "identity" + }}, + doc! { "$unwind": "$identity" }, + doc! { "$lookup": { + "from": "id_user_data", + "let": { "id": "$identity.id" }, + "pipeline": [ + doc! { "$match": { + "_cursor.to": { "$exists": false }, + "field": "0x000000000000000000000000000000000000000000000000737461726b6e6574", + "$expr": { "$eq": ["$id", "$$id"] } + } } + ], + "as": "starknet_data" + }}, + doc! { "$unwind": "$starknet_data" }, + doc! { "$match": { + "$expr": { "$eq": ["$rev_address", "$starknet_data.data"] } + }}, + doc! { "$project": { + "domain": 1, + "domain_expiry" : "$expiry" + }}, + ]; + + let cursor: Result, &str> = domains + .aggregate(pipeline, AggregateOptions::default()) + .await + .map_err(|_| "Error while executing aggregation pipeline"); + + match cursor { + Ok(mut cursor) => { + while let Some(result) = cursor.next().await { + return match result { + Ok(doc) => { + let domain = doc.get_str("domain").unwrap_or_default().to_owned(); + let domain_expiry = doc.get_i64("domain_expiry").ok(); + let data = AddrToDomainData { + domain, + domain_expiry, + }; + (StatusCode::OK, Json(data)).into_response() + } + Err(e) => get_error(format!("Error calling the db: {}", e)), }; - (StatusCode::OK, Json(data)).into_response() - } else { - get_error("No domain found".to_string()) } + return get_error("No document found for the given address".to_string()); } - Err(_) => get_error("Error while fetching from database".to_string()), + Err(e) => return get_error(format!("Error accessing the database: {}", e)), } } From e5f461fa1d67af7deb9ca542d5afa2cca4f74fbf Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Fri, 8 Dec 2023 13:07:22 +0000 Subject: [PATCH 04/18] feat: add main id support in api.starknet.id --- src/endpoints/addr_to_domain.rs | 80 ++++++++++++++++++++++++++------- 1 file changed, 65 insertions(+), 15 deletions(-) diff --git a/src/endpoints/addr_to_domain.rs b/src/endpoints/addr_to_domain.rs index c57a395..2d5668d 100644 --- a/src/endpoints/addr_to_domain.rs +++ b/src/endpoints/addr_to_domain.rs @@ -9,10 +9,7 @@ use axum::{ Json, }; use futures::StreamExt; -use mongodb::{ - bson::{doc, Bson}, - options::AggregateOptions, -}; +use mongodb::{bson::doc, options::AggregateOptions}; use serde::{Deserialize, Serialize}; use starknet::core::types::FieldElement; use std::sync::Arc; @@ -32,21 +29,25 @@ pub async fn handler( State(state): State>, Query(query): Query, ) -> impl IntoResponse { - let domains = state - .starknetid_db - .collection::("domains"); let hex_addr = to_hex(&query.addr); let domains = state .starknetid_db .collection::("domains"); - - let pipeline = vec![ - doc! { "$match": { "_cursor.to": null, "rev_address": hex_addr } }, + let legacy_pipeline = vec![ + doc! { "$match": { "_cursor.to": null, "rev_address": &hex_addr } }, doc! { "$lookup": { "from": "id_owners", - "localField": "rev_address", - "foreignField": "owner", + "let": { "rev_address": "$rev_address" }, + "pipeline": [ + { "$match": { + "$or": [ + { "_cursor.to": null }, + { "_cursor.to": { "$exists": false } } + ], + "$expr": { "$eq": ["$owner", "$$rev_address"] } + } } + ], "as": "identity" }}, doc! { "$unwind": "$identity" }, @@ -72,8 +73,31 @@ pub async fn handler( }}, ]; + let id_owners = state + .starknetid_db + .collection::("id_owners"); + let main_id_pipeline = vec![ + doc! { "$match": { "_cursor.to": null, "owner": hex_addr, "main": true } }, + doc! { "$lookup": { + "from": "domains", + "let": { "id": "$id" }, + "pipeline": [ + doc! { "$match": { + "_cursor.to": { "$exists": false }, + "$expr": { "$eq": ["$id", "$$id"] } + } } + ], + "as": "domain_data" + }}, + doc! { "$unwind": "$domain_data" }, + doc! { "$project": { + "domain": "$domain_data.domain", + "domain_expiry" : "$domain_data.expiry" + }}, + ]; + let cursor: Result, &str> = domains - .aggregate(pipeline, AggregateOptions::default()) + .aggregate(legacy_pipeline, AggregateOptions::default()) .await .map_err(|_| "Error while executing aggregation pipeline"); @@ -93,8 +117,34 @@ pub async fn handler( Err(e) => get_error(format!("Error calling the db: {}", e)), }; } - return get_error("No document found for the given address".to_string()); + + // now trying default pipeline + let cursor: Result, &str> = id_owners + .aggregate(main_id_pipeline, AggregateOptions::default()) + .await + .map_err(|_| "Error while executing aggregation pipeline"); + + match cursor { + Ok(mut cursor) => { + while let Some(result) = cursor.next().await { + return match result { + Ok(doc) => { + let domain = doc.get_str("domain").unwrap_or_default().to_owned(); + let domain_expiry = doc.get_i64("domain_expiry").ok(); + let data = AddrToDomainData { + domain, + domain_expiry, + }; + (StatusCode::OK, Json(data)).into_response() + } + Err(e) => get_error(format!("Error calling the db: {}", e)), + }; + } + return get_error("No document found for the given address".to_string()); + } + Err(e) => get_error(format!("Error calling the db: {}", e)), + } } - Err(e) => return get_error(format!("Error accessing the database: {}", e)), + Err(e) => get_error(format!("Error accessing the database: {}", e)), } } From 603a63467c75944c6ea5561c7da48ff396c8f3d3 Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Fri, 8 Dec 2023 16:34:44 +0000 Subject: [PATCH 05/18] feat: add main id support in multiple addrs --- src/endpoints/addrs_to_domains.rs | 159 ++++++++++++++++++++++-------- 1 file changed, 118 insertions(+), 41 deletions(-) diff --git a/src/endpoints/addrs_to_domains.rs b/src/endpoints/addrs_to_domains.rs index e04e427..3dde35d 100644 --- a/src/endpoints/addrs_to_domains.rs +++ b/src/endpoints/addrs_to_domains.rs @@ -1,7 +1,4 @@ -use crate::{ - models::AppState, - utils::{get_error, to_hex}, -}; +use crate::{models::AppState, utils::to_hex}; use axum::{ extract::{Json, State}, http::StatusCode, @@ -23,61 +20,141 @@ pub struct AddrToDomainData { pub struct AddrToDomainsQuery { addresses: Vec, } - pub async fn handler( State(state): State>, Json(query): Json, ) -> impl IntoResponse { - let domains = state.starknetid_db.collection::("domains"); + let domains_collection = state + .starknetid_db + .collection::("domains"); + let id_owners_collection = state + .starknetid_db + .collection::("id_owners"); let addresses: Vec = query.addresses.iter().map(|addr| to_hex(addr)).collect(); + println!("addresses: {:?}", addresses); + + // Initialize results with all addresses set to domain: None + let mut results: Vec = addresses + .iter() + .map(|addr| AddrToDomainData { + domain: None, + address: addr.clone(), + }) + .collect(); + + // Primary Query (Legacy) + let legacy_pipeline = vec![ + doc! { "$match": { "_cursor.to": null, "rev_address": { "$in": &addresses } } }, + doc! { "$lookup": { + "from": "id_owners", + "let": { "rev_address": "$rev_address" }, + "pipeline": [ + { "$match": { + "$or": [ + { "_cursor.to": null }, + { "_cursor.to": { "$exists": false } } + ], + "$expr": { "$eq": ["$owner", "$$rev_address"] } + } } + ], + "as": "identity" + }}, + doc! { "$unwind": "$identity" }, + doc! { "$lookup": { + "from": "id_user_data", + "let": { "id": "$identity.id" }, + "pipeline": [ + doc! { "$match": { + "_cursor.to": { "$exists": false }, + "field": "0x000000000000000000000000000000000000000000000000737461726b6e6574", + "$expr": { "$eq": ["$id", "$$id"] } + } } + ], + "as": "starknet_data" + }}, + doc! { "$unwind": "$starknet_data" }, + doc! { "$match": { + "$expr": { "$eq": ["$rev_address", "$starknet_data.data"] } + }}, + doc! { "$project": { + "domain": 1, + "address" : "$rev_address", + }}, + ]; + let cursor = domains_collection + .aggregate(legacy_pipeline, AggregateOptions::default()) + .await; + + if let Ok(mut cursor) = cursor { + while let Some(doc) = cursor.next().await { + if let Ok(doc) = doc { + if let (Ok(domain), Ok(address)) = (doc.get_str("domain"), doc.get_str("address")) { + // Find the corresponding address in results and update its domain + if let Some(result) = results.iter_mut().find(|data| data.address == address) { + result.domain = Some(domain.to_string()); + } + } + } + } + } - let pipeline = vec![ + // Fallback Query + let fallback_addresses: Vec = results + .iter() + .filter_map(|data| { + if data.domain.is_none() { + Some(data.address.clone()) + } else { + None + } + }) + .collect(); + let fallback_pipeline = vec![ doc! { "$match": { - "legacy_address": { "$in": addresses.clone() }, "_cursor.to": null, - "$expr": { "$eq": ["$legacy_address", "$rev_address"] }, - }, + "owner": { "$in": fallback_addresses.clone() }, + "main": true + } }, + doc! { + "$lookup": { + "from": "domains", + "let": { "id": "$id" }, + "pipeline": [ + doc! { "$match": { + "_cursor.to": { "$exists": false }, + "$expr": { "$eq": ["$id", "$$id"] } + } } + ], + "as": "domain_data" + } + }, + doc! { "$unwind": "$domain_data" }, doc! { "$project": { "_id": 0, - "domain": 1, - "address": "$legacy_address", - }, + "domain": "$domain_data.domain", + "address": "$owner", + } }, ]; - - let aggregate_options = AggregateOptions::default(); - let cursor = domains.aggregate(pipeline, aggregate_options).await; - - match cursor { - Ok(mut cursor) => { - let mut results = Vec::new(); - while let Some(doc) = cursor.next().await { - if let Ok(doc) = doc { - let domain = doc.get_str("domain").map(|s| s.to_string()).ok(); - let address = doc.get_str("address").unwrap().to_string(); - let data = AddrToDomainData { domain, address }; - results.push(data); + let cursor = id_owners_collection + .aggregate(fallback_pipeline, AggregateOptions::default()) + .await; + if let Ok(mut cursor) = cursor { + while let Some(doc) = cursor.next().await { + if let Ok(doc) = doc { + if let (Ok(domain), Ok(address)) = (doc.get_str("domain"), doc.get_str("address")) { + // Find the corresponding address in results and update its domain + if let Some(result) = results.iter_mut().find(|data| data.address == address) { + result.domain = Some(domain.to_string()); + } } } - - for address in &addresses { - if !results - .iter() - .any(|data| data.address.to_string() == *address) - { - results.push(AddrToDomainData { - domain: None, - address: address.clone(), - }); - } - } - - (StatusCode::OK, Json(results)).into_response() } - Err(_) => get_error("Error while fetching from database".to_string()), } + + (StatusCode::OK, Json(results)).into_response() } From 26a0b5784cfaf0581dca1463ca06b6803683fa15 Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Tue, 12 Dec 2023 11:45:09 +0000 Subject: [PATCH 06/18] feat: add support for legacy resolving --- Cargo.toml | 2 + src/endpoints/addr_to_domain.rs | 137 +++++++++++++++++--------------- src/endpoints/domain_to_addr.rs | 2 +- 3 files changed, 75 insertions(+), 66 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 762814c..a4692ec 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,3 +19,5 @@ chrono = "0.4.24" reqwest = "0.11.20" ark-ff = "0.4.2" hex = "0.4.3" +error-stack = "0.4.1" +anyhow = "1.0.75" diff --git a/src/endpoints/addr_to_domain.rs b/src/endpoints/addr_to_domain.rs index 2d5668d..1b61f75 100644 --- a/src/endpoints/addr_to_domain.rs +++ b/src/endpoints/addr_to_domain.rs @@ -2,6 +2,7 @@ use crate::{ models::AppState, utils::{get_error, to_hex}, }; +use anyhow::{bail, Result}; use axum::{ extract::{Query, State}, http::StatusCode, @@ -9,7 +10,11 @@ use axum::{ Json, }; use futures::StreamExt; -use mongodb::{bson::doc, options::AggregateOptions}; +use mongodb::{ + bson::{doc, Document}, + options::AggregateOptions, + Cursor, +}; use serde::{Deserialize, Serialize}; use starknet::core::types::FieldElement; use std::sync::Arc; @@ -25,17 +30,72 @@ pub struct AddrToDomainQuery { addr: FieldElement, } +async fn read_cursor(mut cursor: Cursor) -> Result { + while let Some(result) = cursor.next().await { + let doc = result?; + let domain = doc.get_str("domain").unwrap_or_default().to_owned(); + let domain_expiry = doc.get_i64("domain_expiry").ok(); + return Ok(AddrToDomainData { + domain, + domain_expiry, + }); + } + bail!("No document found for the given address") +} + +async fn aggregate_data( + collection: mongodb::Collection, + pipeline: Vec, +) -> Result { + let cursor = collection + .aggregate(pipeline, AggregateOptions::default()) + .await?; + read_cursor(cursor).await +} + pub async fn handler( State(state): State>, Query(query): Query, ) -> impl IntoResponse { let hex_addr = to_hex(&query.addr); + let domains_collection = state.starknetid_db.collection::("domains"); + let id_owners_collection = state.starknetid_db.collection::("id_owners"); - let domains = state - .starknetid_db - .collection::("domains"); - let legacy_pipeline = vec![ - doc! { "$match": { "_cursor.to": null, "rev_address": &hex_addr } }, + let legacy_pipeline = create_legacy_pipeline(&hex_addr); + let normal_pipeline = create_normal_pipeline(&hex_addr); + let main_id_pipeline = create_main_id_pipeline(&hex_addr); + + let results = [ + aggregate_data(domains_collection.clone(), legacy_pipeline), + aggregate_data(domains_collection.clone(), normal_pipeline), + aggregate_data(id_owners_collection, main_id_pipeline), + ]; + + for result in results { + match result.await { + Ok(data) => return (StatusCode::OK, Json(data)).into_response(), + Err(_) => continue, + } + } + + get_error("No data found for the given address".to_string()) +} + +fn create_legacy_pipeline(address: &String) -> Vec { + vec![ + doc! { "$match": { "_cursor.to": null, "rev_address": address, "$expr": { + "$eq": ["$rev_address", "$legacy_address"] + } } }, + doc! { "$project": { + "domain": 1, + "domain_expiry" : "$expiry" + }}, + ] +} + +fn create_normal_pipeline(address: &String) -> Vec { + vec![ + doc! { "$match": { "_cursor.to": null, "rev_address": address } }, doc! { "$lookup": { "from": "id_owners", "let": { "rev_address": "$rev_address" }, @@ -71,13 +131,12 @@ pub async fn handler( "domain": 1, "domain_expiry" : "$expiry" }}, - ]; + ] +} - let id_owners = state - .starknetid_db - .collection::("id_owners"); - let main_id_pipeline = vec![ - doc! { "$match": { "_cursor.to": null, "owner": hex_addr, "main": true } }, +fn create_main_id_pipeline(address: &String) -> Vec { + vec![ + doc! { "$match": { "_cursor.to": null, "owner": address, "main": true } }, doc! { "$lookup": { "from": "domains", "let": { "id": "$id" }, @@ -94,57 +153,5 @@ pub async fn handler( "domain": "$domain_data.domain", "domain_expiry" : "$domain_data.expiry" }}, - ]; - - let cursor: Result, &str> = domains - .aggregate(legacy_pipeline, AggregateOptions::default()) - .await - .map_err(|_| "Error while executing aggregation pipeline"); - - match cursor { - Ok(mut cursor) => { - while let Some(result) = cursor.next().await { - return match result { - Ok(doc) => { - let domain = doc.get_str("domain").unwrap_or_default().to_owned(); - let domain_expiry = doc.get_i64("domain_expiry").ok(); - let data = AddrToDomainData { - domain, - domain_expiry, - }; - (StatusCode::OK, Json(data)).into_response() - } - Err(e) => get_error(format!("Error calling the db: {}", e)), - }; - } - - // now trying default pipeline - let cursor: Result, &str> = id_owners - .aggregate(main_id_pipeline, AggregateOptions::default()) - .await - .map_err(|_| "Error while executing aggregation pipeline"); - - match cursor { - Ok(mut cursor) => { - while let Some(result) = cursor.next().await { - return match result { - Ok(doc) => { - let domain = doc.get_str("domain").unwrap_or_default().to_owned(); - let domain_expiry = doc.get_i64("domain_expiry").ok(); - let data = AddrToDomainData { - domain, - domain_expiry, - }; - (StatusCode::OK, Json(data)).into_response() - } - Err(e) => get_error(format!("Error calling the db: {}", e)), - }; - } - return get_error("No document found for the given address".to_string()); - } - Err(e) => get_error(format!("Error calling the db: {}", e)), - } - } - Err(e) => get_error(format!("Error accessing the database: {}", e)), - } + ] } diff --git a/src/endpoints/domain_to_addr.rs b/src/endpoints/domain_to_addr.rs index 756dc3a..da5df88 100644 --- a/src/endpoints/domain_to_addr.rs +++ b/src/endpoints/domain_to_addr.rs @@ -86,7 +86,7 @@ pub async fn handler( "$cond": { "if": { "$and": [ { "$ne": [{ "$type": "$legacy_address" }, "missing"] }, - { "$ne": ["$legacy_address", "0x00"] } + { "$ne": ["$legacy_address", "0x0000000000000000000000000000000000000000000000000000000000000000"] } ] }, "then": "$legacy_address", "else": "$ownerData.data" From c8bafefe590be75ee514cd53f35a92539ee39e00 Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Tue, 12 Dec 2023 13:49:41 +0000 Subject: [PATCH 07/18] feat: add support for legacy resolving in multiple addrs_to_domains --- src/endpoints/addrs_to_domains.rs | 156 +++++++++++++++++++----------- 1 file changed, 98 insertions(+), 58 deletions(-) diff --git a/src/endpoints/addrs_to_domains.rs b/src/endpoints/addrs_to_domains.rs index 3dde35d..833ce64 100644 --- a/src/endpoints/addrs_to_domains.rs +++ b/src/endpoints/addrs_to_domains.rs @@ -1,17 +1,22 @@ use crate::{models::AppState, utils::to_hex}; +use anyhow::{Context, Result}; use axum::{ extract::{Json, State}, http::StatusCode, response::IntoResponse, }; use futures::stream::StreamExt; -use mongodb::{bson::doc, options::AggregateOptions}; +use mongodb::{ + bson::{doc, Document}, + options::AggregateOptions, + Cursor, +}; use serde::{Deserialize, Serialize}; use starknet::core::types::FieldElement; use std::sync::Arc; #[derive(Serialize)] -pub struct AddrToDomainData { +struct AddrToDomainData { domain: Option, address: String, } @@ -20,6 +25,37 @@ pub struct AddrToDomainData { pub struct AddrToDomainsQuery { addresses: Vec, } + +async fn process_cursor( + mut cursor: Cursor, + results: &mut Vec, +) -> Result<()> { + while let Some(result) = cursor.next().await { + let doc = result.context("Failed to retrieve document from cursor")?; + if let (Ok(domain), Ok(address)) = (doc.get_str("domain"), doc.get_str("address")) { + if let Some(data) = results.iter_mut().find(|d| d.address == address) { + if data.domain == None { + data.domain = Some(domain.to_string()); + } + } + } + } + Ok(()) +} + +async fn run_aggregation_pipeline( + collection: mongodb::Collection, + pipeline: Vec, + results: &mut Vec, +) -> Result<()> { + let cursor = collection + .aggregate(pipeline, AggregateOptions::default()) + .await + .context("Failed to execute aggregation pipeline")?; + + process_cursor(cursor, results).await +} + pub async fn handler( State(state): State>, Json(query): Json, @@ -31,21 +67,67 @@ pub async fn handler( .starknetid_db .collection::("id_owners"); - let addresses: Vec = query.addresses.iter().map(|addr| to_hex(addr)).collect(); - println!("addresses: {:?}", addresses); + let addresses: Vec = query.addresses.iter().map(to_hex).collect(); - // Initialize results with all addresses set to domain: None - let mut results: Vec = addresses + let mut results = addresses .iter() .map(|addr| AddrToDomainData { domain: None, address: addr.clone(), }) - .collect(); + .collect::>(); + + let legacy_pipeline = create_legacy_pipeline(&addresses); + if let Err(e) = + run_aggregation_pipeline(domains_collection.clone(), legacy_pipeline, &mut results).await + { + return (StatusCode::INTERNAL_SERVER_ERROR, Json(e.to_string())).into_response(); + } + + let normal_pipeline = create_normal_pipeline(&addresses); + if let Err(e) = + run_aggregation_pipeline(domains_collection.clone(), normal_pipeline, &mut results).await + { + return (StatusCode::INTERNAL_SERVER_ERROR, Json(e.to_string())).into_response(); + } + + let fallback_addresses = results + .iter() + .filter_map(|data| data.domain.is_none().then(|| data.address.clone())) + .collect::>(); + + let fallback_pipeline = create_fallback_pipeline(&fallback_addresses); + if let Err(e) = + run_aggregation_pipeline(id_owners_collection, fallback_pipeline, &mut results).await + { + return (StatusCode::INTERNAL_SERVER_ERROR, Json(e.to_string())).into_response(); + } + + (StatusCode::OK, Json(results)).into_response() +} + +fn create_legacy_pipeline(addresses: &[String]) -> Vec { + vec![ + doc! { + "$match": { + "legacy_address": { "$in": addresses.clone() }, + "_cursor.to": null, + "$expr": { "$eq": ["$legacy_address", "$rev_address"] }, + }, + }, + doc! { + "$project": { + "_id": 0, + "domain": 1, + "address": "$legacy_address", + }, + }, + ] +} - // Primary Query (Legacy) - let legacy_pipeline = vec![ - doc! { "$match": { "_cursor.to": null, "rev_address": { "$in": &addresses } } }, +fn create_normal_pipeline(addresses: &[String]) -> Vec { + vec![ + doc! { "$match": { "_cursor.to": null, "rev_address": { "$in": addresses } } }, doc! { "$lookup": { "from": "id_owners", "let": { "rev_address": "$rev_address" }, @@ -81,40 +163,15 @@ pub async fn handler( "domain": 1, "address" : "$rev_address", }}, - ]; - let cursor = domains_collection - .aggregate(legacy_pipeline, AggregateOptions::default()) - .await; - - if let Ok(mut cursor) = cursor { - while let Some(doc) = cursor.next().await { - if let Ok(doc) = doc { - if let (Ok(domain), Ok(address)) = (doc.get_str("domain"), doc.get_str("address")) { - // Find the corresponding address in results and update its domain - if let Some(result) = results.iter_mut().find(|data| data.address == address) { - result.domain = Some(domain.to_string()); - } - } - } - } - } + ] +} - // Fallback Query - let fallback_addresses: Vec = results - .iter() - .filter_map(|data| { - if data.domain.is_none() { - Some(data.address.clone()) - } else { - None - } - }) - .collect(); - let fallback_pipeline = vec![ +fn create_fallback_pipeline(fallback_addresses: &[String]) -> Vec { + vec![ doc! { "$match": { "_cursor.to": null, - "owner": { "$in": fallback_addresses.clone() }, + "owner": { "$in": fallback_addresses }, "main": true } }, @@ -139,22 +196,5 @@ pub async fn handler( "address": "$owner", } }, - ]; - let cursor = id_owners_collection - .aggregate(fallback_pipeline, AggregateOptions::default()) - .await; - if let Ok(mut cursor) = cursor { - while let Some(doc) = cursor.next().await { - if let Ok(doc) = doc { - if let (Ok(domain), Ok(address)) = (doc.get_str("domain"), doc.get_str("address")) { - // Find the corresponding address in results and update its domain - if let Some(result) = results.iter_mut().find(|data| data.address == address) { - result.domain = Some(domain.to_string()); - } - } - } - } - } - - (StatusCode::OK, Json(results)).into_response() + ] } From 4de68f8f195ea56973bc26062581f9743a28a165 Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Tue, 12 Dec 2023 14:43:44 +0000 Subject: [PATCH 08/18] fix: improve missing address to zero --- src/endpoints/domain_to_addr.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/endpoints/domain_to_addr.rs b/src/endpoints/domain_to_addr.rs index da5df88..997ac68 100644 --- a/src/endpoints/domain_to_addr.rs +++ b/src/endpoints/domain_to_addr.rs @@ -85,7 +85,7 @@ pub async fn handler( "addr": { "$cond": { "if": { "$and": [ - { "$ne": [{ "$type": "$legacy_address" }, "missing"] }, + { "$ne": [{ "$type": "$legacy_address" }, "0x0000000000000000000000000000000000000000000000000000000000000000"] }, { "$ne": ["$legacy_address", "0x0000000000000000000000000000000000000000000000000000000000000000"] } ] }, "then": "$legacy_address", From a5539d9ae44d5295391207400fe680c0139f262f Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Tue, 12 Dec 2023 16:46:30 +0000 Subject: [PATCH 09/18] feat: new response for to_data --- src/endpoints/domain_to_data.rs | 406 +++++++++--------------------- src/endpoints/id_to_data.rs | 422 ++++++++------------------------ src/models.rs | 98 +++++--- 3 files changed, 295 insertions(+), 631 deletions(-) diff --git a/src/endpoints/domain_to_data.rs b/src/endpoints/domain_to_data.rs index 34919b6..2497711 100644 --- a/src/endpoints/domain_to_data.rs +++ b/src/endpoints/domain_to_data.rs @@ -1,7 +1,6 @@ use crate::{ - models::{AppState, Data}, - resolving::get_custom_resolver, - utils::{fetch_img_url, get_error, to_hex, to_u256}, + models::{AppState, IdentityData}, + utils::get_error, }; use axum::{ extract::{Query, State}, @@ -9,27 +8,15 @@ use axum::{ response::{IntoResponse, Json}, }; use futures::StreamExt; -use mongodb::bson::{doc, Document}; +use mongodb::bson::{doc, from_bson, Bson, Document}; use serde::Deserialize; -use starknet::core::types::FieldElement; -use std::{collections::HashMap, sync::Arc}; +use std::sync::Arc; #[derive(Deserialize)] pub struct DomainQuery { domain: String, } -#[derive(Debug)] -pub struct VerifierData { - data: Option, - extended_data: Option>, -} - -const NFT_PP_CONTRACT: &'static str = - "0x00000000000000000000000000000000006e66745f70705f636f6e7472616374"; -const NFT_PP_ID: &'static str = - "0x00000000000000000000000000000000000000000000006e66745f70705f6964"; - pub async fn handler( State(state): State>, Query(query): Query, @@ -37,296 +24,139 @@ pub async fn handler( let mut headers = HeaderMap::new(); headers.insert("Cache-Control", HeaderValue::from_static("max-age=30")); - let domains = state - .starknetid_db - .collection::("domains"); - match get_custom_resolver(&domains, &query.domain).await { - None => {} - Some(res) => { - // todo: add support for argent and braavos here - return get_error(format!("custom resolver {} is not supported yet", res)); - } - } - - let starknet_ids = state - .starknetid_db - .collection::("id_owners"); - - let domain_document = domains - .find_one( - doc! { - "domain": &query.domain, - "_cursor.to": null, - }, - None, - ) - .await; + let collection = state.starknetid_db.collection::("id_owners"); - let (domain, addr, expiry, starknet_id) = match domain_document { - Ok(Some(doc)) => { - let domain = doc.get_str("domain").unwrap_or_default().to_owned(); - let addr = doc.get_str("legacy_address").ok().map(String::from); - let expiry = doc.get_i64("expiry").ok(); - let id = doc.get_str("id").unwrap_or_default().to_owned(); - (domain, addr, expiry, id) + let mut cursor = match collection.aggregate(get_pipeline(query.domain), None).await { + Ok(cursor) => cursor, + Err(_) => { + return ( + StatusCode::INTERNAL_SERVER_ERROR, + headers, + "Failed to retrieve data".to_string(), + ) + .into_response(); } - _ => return get_error("Error while fetching from database".to_string()), }; - let owner_document = starknet_ids - .find_one( - doc! { - "id": &starknet_id, - "_cursor.to": null, - }, - None, - ) - .await; - let owner_addr = match owner_document { - Ok(Some(doc)) => doc.get_str("owner").ok().map(String::from).unwrap(), - _ => return get_error("Error while fetching starknet-id from database".to_string()), + // The aggregation returns a single document + return if let Some(result) = cursor.next().await { + match result { + Ok(doc) => ( + StatusCode::OK, + headers, + Json(from_bson::(Bson::Document(doc)).expect("Malformed document")), + ) + .into_response(), + Err(err) => get_error(format!("Unexpected error: {}", err)), + } + } else { + get_error("Identity not found".to_string()) }; - let current_social_verifiers = state - .conf - .contracts - .verifiers - .clone() - .into_iter() - .map(|x| to_hex(&x)) - .collect::>(); - let mut all_social_verifiers = current_social_verifiers.clone(); - all_social_verifiers.extend(vec![to_hex(&state.conf.contracts.old_verifier)]); +} - let pipeline = vec![ +fn get_pipeline(domain: String) -> Vec { + vec![ doc! { "$match": { - "$or": [ - { - "field": { - "$in": ["0x0000000000000000000000000000000000000000000000000000676974687562", "0x0000000000000000000000000000000000000000000000000074776974746572", "0x00000000000000000000000000000000000000000000000000646973636f7264"] - }, - "verifier": { "$in": all_social_verifiers } // modified this to accommodate all social verifiers - }, - { - "field": "0x0000000000000000000000000070726f6f665f6f665f706572736f6e686f6f64", - "verifier": to_hex(&state.conf.contracts.pop_verifier) - }, - { - "field": { - // nft_pp_contract, nft_pp_id - "$in": ["0x00000000000000000000000000000000006e66745f70705f636f6e7472616374", "0x00000000000000000000000000000000000000000000006e66745f70705f6964", "0x00000000000000000000000000000000000000000000000000646973636f7264"] - }, - "verifier": to_hex(&state.conf.contracts.pp_verifier) - }, - ], - "id": &starknet_id, "_cursor.to": null, + "domain": domain } }, doc! { - "$sort": doc! { - "_cursor.from": 1 + "$lookup": { + "from": "id_owners", + "let": { + "id": "$id" + }, + "pipeline": [ + doc! { + "$match": { + "$or": [ + { "_cursor.to": null }, + { "_cursor.to": { "$exists": false } } + ], + "$expr": { "$eq": ["$id", "$$id"] } + } + } + ], + "as": "id_data" } }, + doc! { "$unwind": "$id_data" }, doc! { - "$group": { - "_id": { "field": "$field", "verifier": "$verifier" }, // group by both field and verifier - "data": { "$first": "$data" }, - "extended_data": { "$first": "$extended_data" } + "$lookup": { + "from": "id_user_data", + "let": { + "id": "$id" + }, + "pipeline": [ + doc! { + "$match": { + "$or": [ + { "_cursor.to": null }, + { "_cursor.to": { "$exists": false } } + ], + "$expr": { "$eq": ["$id", "$$id"] } + } + }, + doc! { + "$project": { + "_id": 0, + "field": 1, + "data": 1 + } + } + ], + "as": "user_data" } }, - ]; - - let starknet_ids_data = state - .starknetid_db - .collection::("id_verifier_data"); - let results = starknet_ids_data.aggregate(pipeline, None).await; - - let mut verifier_data_by_field: HashMap<(String, String), VerifierData> = HashMap::new(); - if let Ok(mut cursor) = results { - while let Some(result) = cursor.next().await { - if let Ok(doc) = result { - match doc.get_document("_id") { - Ok(inner_doc) => { - if let (Ok(verifier), Ok(field)) = - (inner_doc.get_str("verifier"), inner_doc.get_str("field")) - { - let data = doc.get_str("data").ok().map(String::from); - let extended_data = doc - .get_array("extended_data") - .ok() - .map(|bson_array| { - bson_array - .iter() - .filter_map(|bson| bson.as_str().map(String::from)) - .collect() - }) - .filter(|v: &Vec| !v.is_empty()); - verifier_data_by_field.insert( - (verifier.to_string(), field.to_string()), - VerifierData { - data, - extended_data, - }, - ); + doc! { + "$lookup": { + "from": "id_verifier_data", + "let": { + "id": "$id" + }, + "pipeline": [ + doc! { + "$match": { + "$or": [ + { "_cursor.to": null }, + { "_cursor.to": { "$exists": false } } + ], + "$expr": { "$eq": ["$id", "$$id"] } + } + }, + doc! { + "$project": { + "_id": 0, + "field": 1, + "data": 1, + "verifier": 1 } } - Err(_) => {} - } - } - } - } - - let mut github = None; - for verifier in current_social_verifiers.to_owned() { - match verifier_data_by_field.get(&( - verifier, - "0x0000000000000000000000000000000000000000000000000000676974687562".to_string(), - )) { - Some(verifier_data) => { - github = verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }); - } - None => {} - } - } - - let old_github = match verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.old_verifier), - "0x0000000000000000000000000000000000000000000000000000676974687562".to_string(), - )) { - Some(verifier_data) => verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }), - None => None, - }; - - let mut twitter = None; - for verifier in current_social_verifiers.to_owned() { - match verifier_data_by_field.get(&( - verifier, - "0x0000000000000000000000000000000000000000000000000074776974746572".to_string(), - )) { - Some(verifier_data) => { - twitter = verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }) + ], + "as": "verifier_data" } - None => {} - } - } - - let old_twitter = match verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.old_verifier), - "0x0000000000000000000000000000000000000000000000000074776974746572".to_string(), - )) { - Some(verifier_data) => verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }), - None => None, - }; - - let mut discord: Option = None; - for verifier in current_social_verifiers.to_owned() { - match verifier_data_by_field.get(&( - verifier, - "0x00000000000000000000000000000000000000000000000000646973636f7264".to_string(), - )) { - Some(verifier_data) => { - discord = verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }) + }, + doc! { + "$project": { + "_id": 0, + "id": 1, + "owner": "$id_data.owner", + "main": "$id_data.main", + "creation_date": "$id_data.creation_date", + "domain": { + "domain": "$domain", + "root": "$root", + "creation_date": "$creation_date", + "expiry": "$expiry", + "resolver": "$resolver", + "legacy_address": "$legacy_address", + "rev_address": "$rev_address" + }, + "user_data": 1, + "verifier_data": 1 } - None => {} - } - } - - let old_discord = match verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.old_verifier), - "0x00000000000000000000000000000000000000000000000000646973636f7264".to_string(), - )) { - Some(verifier_data) => verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }), - None => None, - }; - - let proof_of_personhood = match verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.pop_verifier), - "0x0000000000000000000000000070726f6f665f6f665f706572736f6e686f6f64".to_string(), - )) { - Some(verifier_data) => verifier_data.data.to_owned().and_then(|data| { - Some(data == "0x0000000000000000000000000000000000000000000000000000000000000001") - }), - None => None, - }; - - let img_url = match ( - verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.pp_verifier), - NFT_PP_CONTRACT.to_string(), - )), - verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.pp_verifier), - NFT_PP_ID.to_string(), - )), - ) { - (Option::Some(data_contract), Option::Some(data_id)) => { - let id_felts = data_id.to_owned().extended_data.as_ref().unwrap(); - let id = to_u256(id_felts.get(0).unwrap(), id_felts.get(1).unwrap()); - fetch_img_url( - &state.conf.starkscan.api_url, - &state.conf.starkscan.api_key, - data_contract.data.to_owned().unwrap(), - id.to_string(), - ) - .await - } - _ => None, - }; - - let is_owner_main_document = domains - .find_one( - doc! { - "domain": &domain, - "legacy_address": &owner_addr, - "rev_address": &owner_addr, - "_cursor.to": null, - }, - None, - ) - .await; - let is_owner_main = is_owner_main_document.is_ok() && is_owner_main_document.unwrap().is_some(); - - let data = Data { - domain: Some(domain), - addr, - domain_expiry: expiry, - is_owner_main, - owner_addr, - github, - old_github, // added this field - twitter, - old_twitter, // added this field - discord, - old_discord, // added this field - proof_of_personhood, - starknet_id: FieldElement::from_hex_be(&starknet_id).unwrap().to_string(), - img_url, - }; - - (StatusCode::OK, headers, Json(data)).into_response() + }, + ] } diff --git a/src/endpoints/id_to_data.rs b/src/endpoints/id_to_data.rs index 6295fec..6f48176 100644 --- a/src/endpoints/id_to_data.rs +++ b/src/endpoints/id_to_data.rs @@ -1,7 +1,6 @@ use crate::{ - models::{AppState, Data}, - resolving::get_custom_resolver, - utils::{fetch_img_url, get_error, to_hex, to_u256}, + models::{AppState, IdentityData}, + utils::{get_error, to_hex}, }; use axum::{ extract::{Query, State}, @@ -9,27 +8,16 @@ use axum::{ response::{IntoResponse, Json}, }; use futures::StreamExt; -use mongodb::bson::{doc, Bson, Document}; +use mongodb::bson::{doc, from_bson, Bson, Document}; use serde::Deserialize; use starknet::core::types::FieldElement; -use std::{collections::HashMap, sync::Arc}; +use std::sync::Arc; #[derive(Deserialize)] pub struct IdQuery { id: FieldElement, } -#[derive(Debug)] -pub struct VerifierData { - data: Option, - extended_data: Option>, -} - -const NFT_PP_CONTRACT: &'static str = - "0x00000000000000000000000000000000006e66745f70705f636f6e7472616374"; -const NFT_PP_ID: &'static str = - "0x00000000000000000000000000000000000000000000006e66745f70705f6964"; - pub async fn handler( State(state): State>, Query(query): Query, @@ -37,324 +25,128 @@ pub async fn handler( let mut headers = HeaderMap::new(); headers.insert("Cache-Control", HeaderValue::from_static("max-age=30")); - let domains = state - .starknetid_db - .collection::("domains"); - let starknet_ids = state - .starknetid_db - .collection::("id_owners"); - - let hex_id = to_hex(&query.id); - - let domain_document = domains - .find_one( - doc! { - "id": &hex_id, - "_cursor.to": Bson::Null, - }, - None, - ) - .await; - - let domain_data = match domain_document { - Ok(doc) => { - if let Some(doc) = doc { - let domain = doc.get_str("domain").unwrap_or_default().to_owned(); - if get_custom_resolver(&domains, &domain).await.is_none() { - let addr = doc.get_str("legacy_address").ok().map(String::from); - let expiry = doc.get_i64("expiry").ok(); - Some((domain, addr, expiry)) - } else { - // we don't handle subdomains, todo: add support for braavos and argent - None - } - } else { - None - } + let collection = state.starknetid_db.collection::("id_owners"); + + let mut cursor = match collection + .aggregate(get_pipeline(to_hex(&query.id)), None) + .await + { + Ok(cursor) => cursor, + Err(_) => { + return ( + StatusCode::INTERNAL_SERVER_ERROR, + headers, + "Failed to retrieve data".to_string(), + ) + .into_response(); } - Err(_) => return get_error("Error while fetching from database".to_string()), }; - let owner_document = starknet_ids - .find_one( - doc! { - "id": &hex_id, - "_cursor.to": null, - }, - None, - ) - .await; - - let owner = match owner_document { - Ok(doc) => doc.and_then(|doc| doc.get_str("owner").ok().map(String::from)), - Err(_) => return get_error("Error while fetching from database".to_string()), + // The aggregation returns a single document + return if let Some(result) = cursor.next().await { + match result { + Ok(doc) => ( + StatusCode::OK, + headers, + Json(from_bson::(Bson::Document(doc)).expect("Malformed document")), + ) + .into_response(), + Err(err) => get_error(format!("Unexpected error: {}", err)), + } + } else { + get_error("Identity not found".to_string()) }; +} - if owner.is_none() { - return get_error("starknet id not found".to_string()); - } - - let current_social_verifiers = state - .conf - .contracts - .verifiers - .clone() - .into_iter() - .map(|x| to_hex(&x)) - .collect::>(); - let mut all_social_verifiers = current_social_verifiers.clone(); - all_social_verifiers.extend(vec![to_hex(&state.conf.contracts.old_verifier)]); - let owner = owner.unwrap(); - let pipeline = vec![ +fn get_pipeline(id: String) -> Vec { + vec![ doc! { "$match": { - "$or": [ - { - "field": { - "$in": ["0x0000000000000000000000000000000000000000000000000000676974687562", "0x0000000000000000000000000000000000000000000000000074776974746572", "0x00000000000000000000000000000000000000000000000000646973636f7264"] - }, - "verifier": { "$in": all_social_verifiers } // modified this to accommodate both verifiers - }, - { - "field": "0x0000000000000000000000000070726f6f665f6f665f706572736f6e686f6f64", - "verifier": to_hex(&state.conf.contracts.pop_verifier) - }, - { - "field": { - // nft_pp_contract, nft_pp_id - "$in": ["0x00000000000000000000000000000000006e66745f70705f636f6e7472616374", "0x00000000000000000000000000000000000000000000006e66745f70705f6964", "0x00000000000000000000000000000000000000000000000000646973636f7264"] - }, - "verifier": to_hex(&state.conf.contracts.pp_verifier) - }, - ], - "id": &hex_id, "_cursor.to": null, + "id": id } }, doc! { - "$sort": doc! { - "_cursor.from": 1 + "$lookup": { + "from": "domains", + "let": {"id": "$id"}, + "pipeline": [ + doc! { + "$match": { + "$or": [ + {"_cursor.to": null}, + {"_cursor.to": {"$exists": false}} + ], + "$expr": {"$eq": ["$id", "$$id"]}, + } + } + ], + "as": "domain_data" } }, doc! { - "$group": { - "_id": { "field": "$field", "verifier": "$verifier" }, // group by both field and verifier - "data": { "$first": "$data" }, - "extended_data": { "$first": "$extended_data" } - } - }, - ]; - - let starknet_ids_data = state - .starknetid_db - .collection::("id_verifier_data"); - let results = starknet_ids_data.aggregate(pipeline, None).await; - - let mut verifier_data_by_field: HashMap<(String, String), VerifierData> = HashMap::new(); - if let Ok(mut cursor) = results { - while let Some(result) = cursor.next().await { - if let Ok(doc) = result { - match doc.get_document("_id") { - Ok(inner_doc) => { - if let (Ok(verifier), Ok(field)) = - (inner_doc.get_str("verifier"), inner_doc.get_str("field")) - { - let data = doc.get_str("data").ok().map(String::from); - let extended_data = doc - .get_array("extended_data") - .ok() - .map(|bson_array| { - bson_array - .iter() - .filter_map(|bson| bson.as_str().map(String::from)) - .collect() - }) - .filter(|v: &Vec| !v.is_empty()); - verifier_data_by_field.insert( - (verifier.to_string(), field.to_string()), - VerifierData { - data, - extended_data, - }, - ); + "$lookup": { + "from": "id_user_data", + "let": {"id": "$id"}, + "pipeline": [ + doc! { + "$match": { + "$or": [ + {"_cursor.to": null}, + {"_cursor.to": {"$exists": false}} + ], + "$expr": {"$eq": ["$id", "$$id"]}, + "data": { "$ne": null } } + }, + doc! { + "$project": {"_id": 0, "field": 1, "data": 1} } - Err(_) => {} - } - } - } - } - - let mut github = None; - for verifier in current_social_verifiers.to_owned() { - match verifier_data_by_field.get(&( - verifier, - "0x0000000000000000000000000000000000000000000000000000676974687562".to_string(), - )) { - Some(verifier_data) => { - github = verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }); - } - None => {} - } - } - - let old_github = match verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.old_verifier), - "0x0000000000000000000000000000000000000000000000000000676974687562".to_string(), - )) { - Some(verifier_data) => verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }), - None => None, - }; - - let mut twitter = None; - for verifier in current_social_verifiers.to_owned() { - match verifier_data_by_field.get(&( - verifier, - "0x0000000000000000000000000000000000000000000000000074776974746572".to_string(), - )) { - Some(verifier_data) => { - twitter = verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }) - } - None => {} - } - } - - let old_twitter = match verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.old_verifier), - "0x0000000000000000000000000000000000000000000000000074776974746572".to_string(), - )) { - Some(verifier_data) => verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }), - None => None, - }; - - let mut discord: Option = None; - for verifier in current_social_verifiers.to_owned() { - match verifier_data_by_field.get(&( - verifier, - "0x00000000000000000000000000000000000000000000000000646973636f7264".to_string(), - )) { - Some(verifier_data) => { - discord = verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }) + ], + "as": "user_data" } - None => {} - } - } - - let old_discord = match verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.old_verifier), - "0x00000000000000000000000000000000000000000000000000646973636f7264".to_string(), - )) { - Some(verifier_data) => verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }), - None => None, - }; - - let proof_of_personhood = match verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.pop_verifier), - "0x0000000000000000000000000070726f6f665f6f665f706572736f6e686f6f64".to_string(), - )) { - Some(verifier_data) => verifier_data.data.to_owned().and_then(|data| { - Some(data == "0x0000000000000000000000000000000000000000000000000000000000000001") - }), - None => None, - }; - - let img_url = match ( - verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.pp_verifier), - NFT_PP_CONTRACT.to_string(), - )), - verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.pp_verifier), - NFT_PP_ID.to_string(), - )), - ) { - (Option::Some(data_contract), Option::Some(data_id)) => { - let id_felts = data_id.to_owned().extended_data.as_ref().unwrap(); - let id = to_u256(id_felts.get(0).unwrap(), id_felts.get(1).unwrap()); - fetch_img_url( - &state.conf.starkscan.api_url, - &state.conf.starkscan.api_key, - data_contract.data.to_owned().unwrap(), - id.to_string(), - ) - .await - } - _ => None, - }; - - let data = match domain_data { - None => Data { - domain: None, - addr: None, - domain_expiry: None, - is_owner_main: false, - owner_addr: owner, - github, - twitter, - discord, - proof_of_personhood, - old_github, - old_twitter, - old_discord, - starknet_id: query.id.to_string(), - img_url, }, - Some((domain, addr, expiry)) => { - let is_owner_main_document = domains - .find_one( + doc! { + "$lookup": { + "from": "id_verifier_data", + "let": {"id": "$id"}, + "pipeline": [ doc! { - "domain": &domain, - "legacy_address": &owner, - "rev_address": &owner, - "_cursor.to": null, + "$match": { + "$or": [ + {"_cursor.to": null}, + {"_cursor.to": {"$exists": false}} + ], + "$expr": {"$eq": ["$id", "$$id"]}, + "data": { "$ne": null } + } }, - None, - ) - .await; - let is_owner_main = - is_owner_main_document.is_ok() && is_owner_main_document.unwrap().is_some(); - Data { - domain: Some(domain), - addr, - domain_expiry: expiry, - is_owner_main, - owner_addr: owner, - github, - twitter, - discord, - proof_of_personhood, - old_github, - old_twitter, - old_discord, - starknet_id: query.id.to_string(), - img_url, + doc! { + "$project": {"_id": 0, "field": 1, "data": 1, "verifier": 1} + } + ], + "as": "verifier_data" } - } - }; - - (StatusCode::OK, headers, Json(data)).into_response() + }, + doc! { + "$project": { + "_id": 0, + "id": 1, + "owner": 1, + "main": 1, + "creation_date": 1, + "domain": { + "domain": {"$arrayElemAt": ["$domain_data.domain", 0]}, + "root": {"$arrayElemAt": ["$domain_data.root", 0]}, + "creation_date": {"$arrayElemAt": ["$domain_data.creation_date", 0]}, + "expiry": {"$arrayElemAt": ["$domain_data.expiry", 0]}, + "resolver": {"$arrayElemAt": ["$domain_data.resolver", 0]}, + "legacy_address": {"$arrayElemAt": ["$domain_data.legacy_address", 0]}, + "rev_address": {"$arrayElemAt": ["$domain_data.rev_address", 0]} + }, + "user_data": 1, + "verifier_data": 1 + } + }, + ] } diff --git a/src/models.rs b/src/models.rs index a52ebb3..b002bab 100644 --- a/src/models.rs +++ b/src/models.rs @@ -1,7 +1,8 @@ use mongodb::Database; +use starknet::core::types::FieldElement; -use crate::config::Config; -use serde::{Deserialize, Serialize}; +use crate::{config::Config, utils::to_hex}; +use serde::{Deserialize, Serialize, Serializer}; use std::collections::HashMap; pub struct AppState { @@ -11,32 +12,73 @@ pub struct AppState { pub states: States, } -#[derive(Serialize)] -pub struct Data { - #[serde(skip_serializing_if = "Option::is_none")] - pub domain: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub addr: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub domain_expiry: Option, - pub is_owner_main: bool, - pub owner_addr: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub github: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub twitter: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub discord: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub old_github: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub old_twitter: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub old_discord: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub proof_of_personhood: Option, - pub starknet_id: String, - pub img_url: Option, +fn serialize_felt(field_element: &FieldElement, serializer: S) -> Result +where + S: Serializer, +{ + let hex_string = to_hex(field_element); + serializer.serialize_str(&hex_string) +} + +fn serialize_opt_felt( + field_element: &Option, + serializer: S, +) -> Result +where + S: Serializer, +{ + match field_element { + Some(fe) => { + let hex_string = to_hex(fe); + serializer.serialize_str(&hex_string) + } + None => serializer.serialize_none(), + } +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct IdentityData { + #[serde(serialize_with = "serialize_felt")] + pub id: FieldElement, + #[serde(serialize_with = "serialize_felt")] + pub owner: FieldElement, + pub main: bool, + pub creation_date: u64, + pub domain: Option, + pub user_data: Vec, + pub verifier_data: Vec, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct Domain { + pub domain: String, + pub root: bool, + pub creation_date: u64, + pub expiry: Option, + #[serde(serialize_with = "serialize_opt_felt")] + pub resolver: Option, + #[serde(serialize_with = "serialize_opt_felt")] + pub legacy_address: Option, + #[serde(serialize_with = "serialize_opt_felt")] + pub rev_address: Option, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct UserData { + #[serde(serialize_with = "serialize_felt")] + pub field: FieldElement, + #[serde(serialize_with = "serialize_felt")] + pub data: FieldElement, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct VerifierData { + #[serde(serialize_with = "serialize_felt")] + pub verifier: FieldElement, + #[serde(serialize_with = "serialize_felt")] + pub field: FieldElement, + #[serde(serialize_with = "serialize_felt")] + pub data: FieldElement, } #[derive(Deserialize, Debug)] From 319d4443f3a83bca62255685a9ba231f1ad284df Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Tue, 12 Dec 2023 16:54:37 +0000 Subject: [PATCH 10/18] update: starknet-rs --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index a4692ec..910dbf0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -starknet = { git = "https://github.com/Th0rgal/starknet-rs.git", branch = "feat/starknet-id" } +starknet = { git = "https://github.com/xJonathanLEI/starknet-rs", rev = "0df9ad3417a5f10d486348737fe75659ca4bcfdc" } axum = "0.6.18" futures = "0.3.28" mongodb = "2.5.0" From c7f057c334af6e8702af9dc46883725b702ef02f Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Wed, 13 Dec 2023 16:29:49 +0000 Subject: [PATCH 11/18] feat: simple endpoint to know if user as legacy rev address --- src/endpoints/addr_has_rev.rs | 54 +++++++++++++++++++++++++++++++++++ src/endpoints/mod.rs | 1 + src/main.rs | 1 + 3 files changed, 56 insertions(+) create mode 100644 src/endpoints/addr_has_rev.rs diff --git a/src/endpoints/addr_has_rev.rs b/src/endpoints/addr_has_rev.rs new file mode 100644 index 0000000..88abcc9 --- /dev/null +++ b/src/endpoints/addr_has_rev.rs @@ -0,0 +1,54 @@ +use crate::{ + models::AppState, + utils::{get_error, to_hex}, +}; +use axum::{ + extract::{Query, State}, + http::StatusCode, + response::IntoResponse, + Json, +}; +use mongodb::bson::doc; +use serde::{Deserialize, Serialize}; +use starknet::core::types::FieldElement; +use std::sync::Arc; + +#[derive(Serialize)] +pub struct AddrToDomainData { + has_rev: bool, +} + +#[derive(Deserialize)] +pub struct AddrHasRevQuery { + addr: FieldElement, +} + +pub async fn handler( + State(state): State>, + Query(query): Query, +) -> impl IntoResponse { + let domains = state + .starknetid_db + .collection::("domains"); + let hex_addr = to_hex(&query.addr); + let document = domains + .find_one( + doc! { + "_cursor.to" : null, + "rev_address" : hex_addr + }, + None, + ) + .await; + + match document { + Ok(doc) => ( + StatusCode::OK, + Json(AddrToDomainData { + has_rev: doc.is_some(), + }), + ) + .into_response(), + Err(_) => get_error("Error while fetching from database".to_string()), + } +} diff --git a/src/endpoints/mod.rs b/src/endpoints/mod.rs index fb4b532..e073672 100644 --- a/src/endpoints/mod.rs +++ b/src/endpoints/mod.rs @@ -1,3 +1,4 @@ +pub mod addr_has_rev; pub mod addr_to_available_ids; pub mod addr_to_domain; pub mod addr_to_external_domains; diff --git a/src/main.rs b/src/main.rs index ec0d9ab..39127db 100644 --- a/src/main.rs +++ b/src/main.rs @@ -84,6 +84,7 @@ async fn main() { .route("/data_to_ids", get(endpoints::data_to_ids::handler)) .route("/domain_to_addr", get(endpoints::domain_to_addr::handler)) .route("/domain_to_data", get(endpoints::domain_to_data::handler)) + .route("/addr_has_rev", get(endpoints::addr_has_rev::handler)) .route("/id_to_data", get(endpoints::id_to_data::handler)) .route("/uri", get(endpoints::uri::handler)) .route( From b8f80a5b407ec447a6b35d11026a4fa70e8ecd18 Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Thu, 21 Dec 2023 14:42:14 +0000 Subject: [PATCH 12/18] feat: improve performances reading user token ids --- src/endpoints/addr_to_available_ids.rs | 11 +++++++++-- src/endpoints/addr_to_full_ids.rs | 3 +++ src/endpoints/addrs_to_domains.rs | 3 +++ 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/src/endpoints/addr_to_available_ids.rs b/src/endpoints/addr_to_available_ids.rs index e13bf95..c920216 100644 --- a/src/endpoints/addr_to_available_ids.rs +++ b/src/endpoints/addr_to_available_ids.rs @@ -29,13 +29,20 @@ pub async fn handler( State(state): State>, Query(query): Query, ) -> impl IntoResponse { - let starknet_ids = state.starknetid_db.collection::("id_owners"); - let domains = state.starknetid_db.collection::("domains"); + let starknet_ids = state + .starknetid_db + .collection::("id_owners"); + let domains = state + .starknetid_db + .collection::("domains"); let addr = to_hex(&query.addr); let documents = starknet_ids .find( doc! { "owner": &addr, + "id" : { + "$ne" : null + }, "_cursor.to": null, }, None, diff --git a/src/endpoints/addr_to_full_ids.rs b/src/endpoints/addr_to_full_ids.rs index 7d692e9..0bf7dc2 100644 --- a/src/endpoints/addr_to_full_ids.rs +++ b/src/endpoints/addr_to_full_ids.rs @@ -64,6 +64,9 @@ pub async fn handler( doc! { "$match": doc! { "owner": to_hex(&query.addr), + "id" : { + "$ne" : null + }, "_cursor.to": Bson::Null } }, diff --git a/src/endpoints/addrs_to_domains.rs b/src/endpoints/addrs_to_domains.rs index 833ce64..71e46ce 100644 --- a/src/endpoints/addrs_to_domains.rs +++ b/src/endpoints/addrs_to_domains.rs @@ -133,6 +133,9 @@ fn create_normal_pipeline(addresses: &[String]) -> Vec { "let": { "rev_address": "$rev_address" }, "pipeline": [ { "$match": { + "id" : { + "$ne" : null + }, "$or": [ { "_cursor.to": null }, { "_cursor.to": { "$exists": false } } From 7bcb6c6edbc466842ef51114875bdd0eee70df72 Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Thu, 21 Dec 2023 16:23:33 +0000 Subject: [PATCH 13/18] fix: domain_to_data panicking --- src/endpoints/domain_to_data.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/endpoints/domain_to_data.rs b/src/endpoints/domain_to_data.rs index 2497711..cb4e786 100644 --- a/src/endpoints/domain_to_data.rs +++ b/src/endpoints/domain_to_data.rs @@ -24,7 +24,7 @@ pub async fn handler( let mut headers = HeaderMap::new(); headers.insert("Cache-Control", HeaderValue::from_static("max-age=30")); - let collection = state.starknetid_db.collection::("id_owners"); + let collection = state.starknetid_db.collection::("domains"); let mut cursor = match collection.aggregate(get_pipeline(query.domain), None).await { Ok(cursor) => cursor, @@ -123,7 +123,8 @@ fn get_pipeline(domain: String) -> Vec { { "_cursor.to": null }, { "_cursor.to": { "$exists": false } } ], - "$expr": { "$eq": ["$id", "$$id"] } + "$expr": { "$eq": ["$id", "$$id"] }, + "data": { "$ne": null } } }, doc! { From d4ad2f52eb49389b9e0a4770a04be205da997563 Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Sat, 23 Dec 2023 10:16:35 +0100 Subject: [PATCH 14/18] feat: add migrated field --- src/models.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/models.rs b/src/models.rs index b002bab..0659360 100644 --- a/src/models.rs +++ b/src/models.rs @@ -52,6 +52,7 @@ pub struct IdentityData { #[derive(Serialize, Deserialize, Debug)] pub struct Domain { pub domain: String, + pub migrated: bool, pub root: bool, pub creation_date: u64, pub expiry: Option, From 56ab9e7401302f39560668c8ead950c90e749e0c Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Sat, 23 Dec 2023 10:36:54 +0100 Subject: [PATCH 15/18] fix: missing field migrated --- src/endpoints/domain_to_data.rs | 1 + src/endpoints/id_to_data.rs | 1 + 2 files changed, 2 insertions(+) diff --git a/src/endpoints/domain_to_data.rs b/src/endpoints/domain_to_data.rs index cb4e786..2f9bf91 100644 --- a/src/endpoints/domain_to_data.rs +++ b/src/endpoints/domain_to_data.rs @@ -148,6 +148,7 @@ fn get_pipeline(domain: String) -> Vec { "creation_date": "$id_data.creation_date", "domain": { "domain": "$domain", + "migrated" : "$migrated", "root": "$root", "creation_date": "$creation_date", "expiry": "$expiry", diff --git a/src/endpoints/id_to_data.rs b/src/endpoints/id_to_data.rs index 6f48176..9a313c7 100644 --- a/src/endpoints/id_to_data.rs +++ b/src/endpoints/id_to_data.rs @@ -138,6 +138,7 @@ fn get_pipeline(id: String) -> Vec { "domain": { "domain": {"$arrayElemAt": ["$domain_data.domain", 0]}, "root": {"$arrayElemAt": ["$domain_data.root", 0]}, + "migrated" : {"$arrayElemAt": ["$domain_data.migrated", 0]}, "creation_date": {"$arrayElemAt": ["$domain_data.creation_date", 0]}, "expiry": {"$arrayElemAt": ["$domain_data.expiry", 0]}, "resolver": {"$arrayElemAt": ["$domain_data.resolver", 0]}, From c6c66cf9a5be75704c1bc668cabb1aca6e68180b Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Wed, 3 Jan 2024 11:39:02 +0000 Subject: [PATCH 16/18] feat: add extended verifier data in IdentityData --- src/endpoints/domain_to_data.rs | 32 +++++++++++++++++++++++++++++- src/endpoints/id_to_data.rs | 25 ++++++++++++++++++++++- src/models.rs | 35 ++++++++++++++++++++++++++++++++- 3 files changed, 89 insertions(+), 3 deletions(-) diff --git a/src/endpoints/domain_to_data.rs b/src/endpoints/domain_to_data.rs index 2f9bf91..98ad2cd 100644 --- a/src/endpoints/domain_to_data.rs +++ b/src/endpoints/domain_to_data.rs @@ -139,6 +139,35 @@ fn get_pipeline(domain: String) -> Vec { "as": "verifier_data" } }, + doc! { + "$lookup": { + "from": "id_verifier_data", + "let": { + "id": "$id" + }, + "pipeline": [ + doc! { + "$match": { + "$or": [ + { "_cursor.to": null }, + { "_cursor.to": { "$exists": false } } + ], + "$expr": { "$eq": ["$id", "$$id"] }, + "extended_data": { "$ne": null } + } + }, + doc! { + "$project": { + "_id": 0, + "field": 1, + "extended_data": 1, + "verifier": 1 + } + } + ], + "as": "extended_verifier_data" + } + }, doc! { "$project": { "_id": 0, @@ -157,7 +186,8 @@ fn get_pipeline(domain: String) -> Vec { "rev_address": "$rev_address" }, "user_data": 1, - "verifier_data": 1 + "verifier_data": 1, + "extended_verifier_data" : 1 } }, ] diff --git a/src/endpoints/id_to_data.rs b/src/endpoints/id_to_data.rs index 9a313c7..1e1e0ec 100644 --- a/src/endpoints/id_to_data.rs +++ b/src/endpoints/id_to_data.rs @@ -128,6 +128,28 @@ fn get_pipeline(id: String) -> Vec { "as": "verifier_data" } }, + doc! { + "$lookup": { + "from": "id_verifier_data", + "let": {"id": "$id"}, + "pipeline": [ + doc! { + "$match": { + "$or": [ + {"_cursor.to": null}, + {"_cursor.to": {"$exists": false}} + ], + "$expr": {"$eq": ["$id", "$$id"]}, + "extended_data": { "$ne": null } + } + }, + doc! { + "$project": {"_id": 0, "field": 1, "extended_data": 1, "verifier": 1} + } + ], + "as": "extended_verifier_data" + } + }, doc! { "$project": { "_id": 0, @@ -146,7 +168,8 @@ fn get_pipeline(id: String) -> Vec { "rev_address": {"$arrayElemAt": ["$domain_data.rev_address", 0]} }, "user_data": 1, - "verifier_data": 1 + "verifier_data": 1, + "extended_verifier_data": 1 } }, ] diff --git a/src/models.rs b/src/models.rs index 0659360..3a1153c 100644 --- a/src/models.rs +++ b/src/models.rs @@ -2,7 +2,7 @@ use mongodb::Database; use starknet::core::types::FieldElement; use crate::{config::Config, utils::to_hex}; -use serde::{Deserialize, Serialize, Serializer}; +use serde::{ser::SerializeSeq, Deserialize, Serialize, Serializer}; use std::collections::HashMap; pub struct AppState { @@ -36,6 +36,28 @@ where } } +fn serialize_vec_felt(vec: &Vec, serializer: S) -> Result +where + S: Serializer, +{ + let mut seq = serializer.serialize_seq(Some(vec.len()))?; + for element in vec { + seq.serialize_element(&SerializedFelt(element))?; + } + seq.end() +} + +struct SerializedFelt<'a>(&'a FieldElement); + +impl<'a> Serialize for SerializedFelt<'a> { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serialize_felt(self.0, serializer) + } +} + #[derive(Serialize, Deserialize, Debug)] pub struct IdentityData { #[serde(serialize_with = "serialize_felt")] @@ -47,6 +69,7 @@ pub struct IdentityData { pub domain: Option, pub user_data: Vec, pub verifier_data: Vec, + pub extended_verifier_data: Vec, } #[derive(Serialize, Deserialize, Debug)] @@ -82,6 +105,16 @@ pub struct VerifierData { pub data: FieldElement, } +#[derive(Serialize, Deserialize, Debug)] +pub struct ExtendedVerifierData { + #[serde(serialize_with = "serialize_felt")] + pub verifier: FieldElement, + #[serde(serialize_with = "serialize_felt")] + pub field: FieldElement, + #[serde(serialize_with = "serialize_vec_felt")] + pub extended_data: Vec, +} + #[derive(Deserialize, Debug)] pub struct State { pub rate: f32, From bfa943f2fb72de9df6e147460a7d22ad74cf9d5d Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Wed, 3 Jan 2024 14:25:55 +0000 Subject: [PATCH 17/18] fix: building --- Cargo.toml | 5 +++-- src/endpoints/crosschain/solana/claim.rs | 10 ++++------ 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 8ab91b6..165e5a0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,7 +6,8 @@ edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -starknet = { git = "https://github.com/xJonathanLEI/starknet-rs", rev = "0df9ad3417a5f10d486348737fe75659ca4bcfdc" } +starknet = { git = "https://github.com/xJonathanLEI/starknet-rs", rev = "c974e5cb42e8d8344cee910b76005ec46b4dd3ed" } +starknet-id = { git = "https://github.com/starknet-id/starknetid.rs", rev = "2b30c2453b96789a628c86d2edebb1023fa2e77d" } axum = "0.6.18" futures = "0.3.28" mongodb = "2.5.0" @@ -16,7 +17,7 @@ tokio = { version = "1.28.1", features = ["macros", "rt-multi-thread"] } toml = "0.7.4" tower-http = { version = "0.4.0", features = ["cors"] } chrono = "0.4.24" -reqwest = "0.11.20" +reqwest = { version = "0.11.20", features = ["json"] } ark-ff = "0.4.2" hex = "0.4.3" error-stack = "0.4.1" diff --git a/src/endpoints/crosschain/solana/claim.rs b/src/endpoints/crosschain/solana/claim.rs index d9f8f6d..d592531 100644 --- a/src/endpoints/crosschain/solana/claim.rs +++ b/src/endpoints/crosschain/solana/claim.rs @@ -13,13 +13,11 @@ use ed25519_dalek::{Signature, Verifier, VerifyingKey}; use mongodb::bson::doc; use serde::{Deserialize, Serialize}; use serde_json::json; -use starknet::{ - core::{ - crypto::{ecdsa_sign, pedersen_hash}, - types::FieldElement, - }, - id::encode, +use starknet::core::{ + crypto::{ecdsa_sign, pedersen_hash}, + types::FieldElement, }; +use starknet_id::encode; #[derive(Deserialize, Debug, Clone)] pub struct SigQuery { From d3ec41eac6a24e0e18d122c22f996e8603b538c5 Mon Sep 17 00:00:00 2001 From: Thomas Marchand Date: Fri, 19 Jan 2024 11:55:30 +0000 Subject: [PATCH 18/18] refactor: use axum_auto_routes --- Cargo.toml | 2 + src/endpoints/addr_has_rev.rs | 2 + src/endpoints/addr_to_available_ids.rs | 2 + src/endpoints/addr_to_domain.rs | 2 + src/endpoints/addr_to_external_domains.rs | 6 + src/endpoints/addr_to_full_ids.rs | 2 + src/endpoints/addr_to_token_id.rs | 2 + src/endpoints/addrs_to_domains.rs | 2 + src/endpoints/crosschain/solana/claim.rs | 6 + src/endpoints/data_to_ids.rs | 2 + src/endpoints/domain_to_addr.rs | 2 + src/endpoints/domain_to_data.rs | 2 + src/endpoints/galxe/verify.rs | 2 + src/endpoints/id_to_data.rs | 2 + src/endpoints/referral/add_click.rs | 2 + src/endpoints/referral/click_count.rs | 2 + src/endpoints/referral/revenue.rs | 2 + src/endpoints/referral/sales_count.rs | 2 + src/endpoints/renewal/get_metahash.rs | 2 + .../renewal/get_non_subscribed_domains.rs | 6 + src/endpoints/renewal/get_renewal_data.rs | 6 + src/endpoints/starkscan/fetch_nfts.rs | 2 + src/endpoints/stats/count_addrs.rs | 2 + src/endpoints/stats/count_club_domains.rs | 2 + src/endpoints/stats/count_created.rs | 2 + src/endpoints/stats/count_domains.rs | 2 + src/endpoints/stats/count_ids.rs | 2 + src/endpoints/stats/count_renewed.rs | 2 + src/endpoints/stats/expired_club_domains.rs | 6 + src/endpoints/uri.rs | 2 + src/main.rs | 118 +++--------------- src/utils.rs | 29 ++++- 32 files changed, 125 insertions(+), 102 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 165e5a0..90eb708 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,6 +8,7 @@ edition = "2021" [dependencies] starknet = { git = "https://github.com/xJonathanLEI/starknet-rs", rev = "c974e5cb42e8d8344cee910b76005ec46b4dd3ed" } starknet-id = { git = "https://github.com/starknet-id/starknetid.rs", rev = "2b30c2453b96789a628c86d2edebb1023fa2e77d" } +axum_auto_routes = { git = "https://github.com/Th0rgal/axum_auto_routes.git", rev = "f9e1d2083e887cd264642359c4aa851938da6f09" } axum = "0.6.18" futures = "0.3.28" mongodb = "2.5.0" @@ -26,3 +27,4 @@ lazy_static = "1.4.0" regex = "1.10.2" bs58 = "0.5.0" ed25519-dalek = "2.1.0" +ctor = "0.2.6" diff --git a/src/endpoints/addr_has_rev.rs b/src/endpoints/addr_has_rev.rs index 88abcc9..cddb10b 100644 --- a/src/endpoints/addr_has_rev.rs +++ b/src/endpoints/addr_has_rev.rs @@ -8,6 +8,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use mongodb::bson::doc; use serde::{Deserialize, Serialize}; use starknet::core::types::FieldElement; @@ -23,6 +24,7 @@ pub struct AddrHasRevQuery { addr: FieldElement, } +#[route(get, "/addr_has_rev", crate::endpoints::addr_has_rev)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/addr_to_available_ids.rs b/src/endpoints/addr_to_available_ids.rs index c920216..8fc6e26 100644 --- a/src/endpoints/addr_to_available_ids.rs +++ b/src/endpoints/addr_to_available_ids.rs @@ -9,6 +9,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::bson::doc; use serde::{Deserialize, Serialize}; @@ -25,6 +26,7 @@ pub struct AddrQuery { addr: FieldElement, } +#[route(get, "/addr_to_available_ids", crate::endpoints::addr_to_available_ids)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/addr_to_domain.rs b/src/endpoints/addr_to_domain.rs index 1b61f75..e172fc4 100644 --- a/src/endpoints/addr_to_domain.rs +++ b/src/endpoints/addr_to_domain.rs @@ -9,6 +9,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::{ bson::{doc, Document}, @@ -53,6 +54,7 @@ async fn aggregate_data( read_cursor(cursor).await } +#[route(get, "/addr_to_domain", crate::endpoints::addr_to_domain)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/addr_to_external_domains.rs b/src/endpoints/addr_to_external_domains.rs index 0a953e4..637f1c3 100644 --- a/src/endpoints/addr_to_external_domains.rs +++ b/src/endpoints/addr_to_external_domains.rs @@ -7,6 +7,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::IntoResponse, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::bson::doc; use serde::{Deserialize, Serialize}; @@ -23,6 +24,11 @@ pub struct DomainQuery { addr: FieldElement, } +#[route( + get, + "/addr_to_external_domains", + crate::endpoints::addr_to_external_domains +)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/addr_to_full_ids.rs b/src/endpoints/addr_to_full_ids.rs index 0bf7dc2..7002295 100644 --- a/src/endpoints/addr_to_full_ids.rs +++ b/src/endpoints/addr_to_full_ids.rs @@ -8,6 +8,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use futures::future::join_all; use futures::stream::StreamExt; use mongodb::{ @@ -52,6 +53,7 @@ pub struct FullIdResponse { full_ids: Vec, } +#[route(get, "/addr_to_full_ids", crate::endpoints::addr_to_full_ids)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/addr_to_token_id.rs b/src/endpoints/addr_to_token_id.rs index 67f4465..c94de63 100644 --- a/src/endpoints/addr_to_token_id.rs +++ b/src/endpoints/addr_to_token_id.rs @@ -7,6 +7,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use mongodb::bson::{doc, Bson}; use serde::{Deserialize, Serialize}; use starknet::core::types::FieldElement; @@ -22,6 +23,7 @@ pub struct TokenIdQuery { addr: FieldElement, } +#[route(get, "/addr_to_token_id", crate::endpoints::addr_to_token_id)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/addrs_to_domains.rs b/src/endpoints/addrs_to_domains.rs index 71e46ce..192e12b 100644 --- a/src/endpoints/addrs_to_domains.rs +++ b/src/endpoints/addrs_to_domains.rs @@ -5,6 +5,7 @@ use axum::{ http::StatusCode, response::IntoResponse, }; +use axum_auto_routes::route; use futures::stream::StreamExt; use mongodb::{ bson::{doc, Document}, @@ -56,6 +57,7 @@ async fn run_aggregation_pipeline( process_cursor(cursor, results).await } +#[route(post, "/addrs_to_domains", crate::endpoints::addrs_to_domains)] pub async fn handler( State(state): State>, Json(query): Json, diff --git a/src/endpoints/crosschain/solana/claim.rs b/src/endpoints/crosschain/solana/claim.rs index d592531..4b00e9c 100644 --- a/src/endpoints/crosschain/solana/claim.rs +++ b/src/endpoints/crosschain/solana/claim.rs @@ -8,6 +8,7 @@ use crate::{ utils::{get_error, to_hex}, }; use axum::{extract::State, http::StatusCode, response::IntoResponse, Json}; +use axum_auto_routes::route; use chrono::{Duration, Utc}; use ed25519_dalek::{Signature, Verifier, VerifyingKey}; use mongodb::bson::doc; @@ -64,6 +65,11 @@ lazy_static::lazy_static! { static ref SOL_SUBDOMAIN_STR: FieldElement = FieldElement::from_dec_str("9145722242464647959622012987758").unwrap(); } +#[route( + post, + "/crosschain/solana/claim", + crate::endpoints::crosschain::solana::claim +)] pub async fn handler( State(state): State>, Json(query): Json, diff --git a/src/endpoints/data_to_ids.rs b/src/endpoints/data_to_ids.rs index 2e3612f..8aeead0 100644 --- a/src/endpoints/data_to_ids.rs +++ b/src/endpoints/data_to_ids.rs @@ -7,6 +7,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use mongodb::bson::doc; use serde::{Deserialize, Serialize}; use starknet::core::types::FieldElement; @@ -24,6 +25,7 @@ pub struct StarknetIdQuery { data: FieldElement, } +#[route(get, "/data_to_ids", crate::endpoints::data_to_ids)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/domain_to_addr.rs b/src/endpoints/domain_to_addr.rs index 997ac68..63c67cc 100644 --- a/src/endpoints/domain_to_addr.rs +++ b/src/endpoints/domain_to_addr.rs @@ -7,6 +7,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::{bson::doc, options::AggregateOptions}; use serde::{Deserialize, Serialize}; @@ -23,6 +24,7 @@ pub struct DomainQuery { domain: String, } +#[route(get, "/domain_to_addr", crate::endpoints::domain_to_addr)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/domain_to_data.rs b/src/endpoints/domain_to_data.rs index 98ad2cd..655f26f 100644 --- a/src/endpoints/domain_to_data.rs +++ b/src/endpoints/domain_to_data.rs @@ -7,6 +7,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::bson::{doc, from_bson, Bson, Document}; use serde::Deserialize; @@ -17,6 +18,7 @@ pub struct DomainQuery { domain: String, } +#[route(get, "/domain_to_data", crate::endpoints::domain_to_data)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/galxe/verify.rs b/src/endpoints/galxe/verify.rs index c972732..c3dcc9b 100644 --- a/src/endpoints/galxe/verify.rs +++ b/src/endpoints/galxe/verify.rs @@ -2,6 +2,7 @@ use std::sync::Arc; use crate::models::AppState; use axum::{extract::State, http::StatusCode, response::IntoResponse, Json}; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::{bson::doc, bson::Document}; use serde::{Deserialize, Serialize}; @@ -16,6 +17,7 @@ pub struct SimpleResponse { result: &'static str, } +#[route(post, "/galxe/verify", crate::endpoints::galxe::verify)] pub async fn handler( State(state): State>, Json(query): Json, diff --git a/src/endpoints/id_to_data.rs b/src/endpoints/id_to_data.rs index 1e1e0ec..8339409 100644 --- a/src/endpoints/id_to_data.rs +++ b/src/endpoints/id_to_data.rs @@ -7,6 +7,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::bson::{doc, from_bson, Bson, Document}; use serde::Deserialize; @@ -18,6 +19,7 @@ pub struct IdQuery { id: FieldElement, } +#[route(get, "/id_to_data", crate::endpoints::id_to_data)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/referral/add_click.rs b/src/endpoints/referral/add_click.rs index f73244b..4ec6cb5 100644 --- a/src/endpoints/referral/add_click.rs +++ b/src/endpoints/referral/add_click.rs @@ -4,6 +4,7 @@ use axum::{ http::StatusCode, response::IntoResponse, }; +use axum_auto_routes::route; use chrono::Utc; use mongodb::{ bson::{doc, DateTime as BsonDateTime}, @@ -18,6 +19,7 @@ pub struct AddClickQuery { sponsor_addr: FieldElement, } +#[route(post, "/referral/add_click", crate::endpoints::referral::add_click)] pub async fn handler( State(state): State>, Json(query): Json, diff --git a/src/endpoints/referral/click_count.rs b/src/endpoints/referral/click_count.rs index 4ff7b01..c457091 100644 --- a/src/endpoints/referral/click_count.rs +++ b/src/endpoints/referral/click_count.rs @@ -4,6 +4,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use chrono::{DateTime, NaiveDateTime, Utc}; use futures::StreamExt; use mongodb::bson::{doc, Bson, DateTime as BsonDateTime}; @@ -22,6 +23,7 @@ pub struct IdQuery { spacing: i64, } +#[route(get, "/referral/click_count", crate::endpoints::referral::click_count)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/referral/revenue.rs b/src/endpoints/referral/revenue.rs index 35dd284..680f11f 100644 --- a/src/endpoints/referral/revenue.rs +++ b/src/endpoints/referral/revenue.rs @@ -4,6 +4,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use chrono::{DateTime, NaiveDateTime, Utc}; use futures::StreamExt; use mongodb::bson::{doc, Bson, DateTime as BsonDateTime}; @@ -22,6 +23,7 @@ pub struct IdQuery { spacing: i64, } +#[route(get, "/referral/revenue", crate::endpoints::referral::revenue)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/referral/sales_count.rs b/src/endpoints/referral/sales_count.rs index 2e84d56..f2fe2c9 100644 --- a/src/endpoints/referral/sales_count.rs +++ b/src/endpoints/referral/sales_count.rs @@ -4,6 +4,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use chrono::{DateTime, NaiveDateTime, Utc}; use futures::StreamExt; use mongodb::bson::{doc, Bson, DateTime as BsonDateTime}; @@ -22,6 +23,7 @@ pub struct IdQuery { spacing: i64, } +#[route(get, "/referral/sales_count", crate::endpoints::referral::sales_count)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/renewal/get_metahash.rs b/src/endpoints/renewal/get_metahash.rs index b8a5dc3..c571076 100644 --- a/src/endpoints/renewal/get_metahash.rs +++ b/src/endpoints/renewal/get_metahash.rs @@ -7,6 +7,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use futures::TryStreamExt; use mongodb::bson::{doc, Bson}; use serde::{Deserialize, Serialize}; @@ -24,6 +25,7 @@ pub struct GetMetaHashQuery { addr: FieldElement, } +#[route(get, "/renewal/get_metahash", crate::endpoints::renewal::get_metahash)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/renewal/get_non_subscribed_domains.rs b/src/endpoints/renewal/get_non_subscribed_domains.rs index dc7205e..3edaeb0 100644 --- a/src/endpoints/renewal/get_non_subscribed_domains.rs +++ b/src/endpoints/renewal/get_non_subscribed_domains.rs @@ -7,6 +7,7 @@ use axum::{ http::StatusCode, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::{bson::doc, options::AggregateOptions}; use regex::Regex; @@ -23,6 +24,11 @@ lazy_static::lazy_static! { static ref DOMAIN_REGEX: Regex = Regex::new(r"^[^.]+\.stark$").unwrap(); } +#[route( + get, + "/renewal/get_non_subscribed_domains", + crate::endpoints::renewal::get_non_subscribed_domains +)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/renewal/get_renewal_data.rs b/src/endpoints/renewal/get_renewal_data.rs index 10ef5f7..e669035 100644 --- a/src/endpoints/renewal/get_renewal_data.rs +++ b/src/endpoints/renewal/get_renewal_data.rs @@ -7,6 +7,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::{bson::doc, options::FindOptions}; use serde::{Deserialize, Serialize}; @@ -24,6 +25,11 @@ pub struct StarknetIdQuery { domain: String, } +#[route( + get, + "/renewal/get_renewal_data", + crate::endpoints::renewal::get_renewal_data +)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/starkscan/fetch_nfts.rs b/src/endpoints/starkscan/fetch_nfts.rs index 8a6c378..a034014 100644 --- a/src/endpoints/starkscan/fetch_nfts.rs +++ b/src/endpoints/starkscan/fetch_nfts.rs @@ -7,6 +7,7 @@ use axum::{ http::StatusCode, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use mongodb::bson::doc; use serde::{Deserialize, Serialize}; use serde_json::Value; @@ -44,6 +45,7 @@ pub struct StarkscanNftProps { minted_at_timestamp: i64, } +#[route(get, "/starkscan/fetch_nfts", crate::endpoints::starkscan::fetch_nfts)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/stats/count_addrs.rs b/src/endpoints/stats/count_addrs.rs index 31a4605..a11e2c4 100644 --- a/src/endpoints/stats/count_addrs.rs +++ b/src/endpoints/stats/count_addrs.rs @@ -5,6 +5,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::bson::{doc, Bson}; use serde::{Deserialize, Serialize}; @@ -20,6 +21,7 @@ pub struct CountAddrsQuery { since: i64, } +#[route(get, "/stats/count_addrs", crate::endpoints::stats::count_addrs)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/stats/count_club_domains.rs b/src/endpoints/stats/count_club_domains.rs index 70062ff..87278da 100644 --- a/src/endpoints/stats/count_club_domains.rs +++ b/src/endpoints/stats/count_club_domains.rs @@ -5,6 +5,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use futures::TryStreamExt; use mongodb::bson::{self, doc, Bson}; use serde::{Deserialize, Serialize}; @@ -22,6 +23,7 @@ pub struct CountClubDomainsQuery { since: i64, } +#[route(get, "/stats/count_club_domains", crate::endpoints::stats::count_club_domains)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/stats/count_created.rs b/src/endpoints/stats/count_created.rs index 1b89b14..5c85b12 100644 --- a/src/endpoints/stats/count_created.rs +++ b/src/endpoints/stats/count_created.rs @@ -5,6 +5,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::bson::{doc, Bson}; use serde::{Deserialize, Serialize}; @@ -23,6 +24,7 @@ pub struct CountCreatedQuery { segments: i64, } +#[route(get, "/stats/count_created", crate::endpoints::stats::count_created)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/stats/count_domains.rs b/src/endpoints/stats/count_domains.rs index 93ed665..6ca89f0 100644 --- a/src/endpoints/stats/count_domains.rs +++ b/src/endpoints/stats/count_domains.rs @@ -5,6 +5,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use mongodb::bson::{doc, Bson}; use serde::{Deserialize, Serialize}; use std::sync::Arc; @@ -19,6 +20,7 @@ pub struct CountDomainsQuery { since: i64, } +#[route(get, "/stats/count_domains", crate::endpoints::stats::count_domains)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/stats/count_ids.rs b/src/endpoints/stats/count_ids.rs index 93ed665..381410e 100644 --- a/src/endpoints/stats/count_ids.rs +++ b/src/endpoints/stats/count_ids.rs @@ -5,6 +5,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use mongodb::bson::{doc, Bson}; use serde::{Deserialize, Serialize}; use std::sync::Arc; @@ -19,6 +20,7 @@ pub struct CountDomainsQuery { since: i64, } +#[route(get, "/stats/count_ids", crate::endpoints::stats::count_ids)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/stats/count_renewed.rs b/src/endpoints/stats/count_renewed.rs index c905199..46ec8fc 100644 --- a/src/endpoints/stats/count_renewed.rs +++ b/src/endpoints/stats/count_renewed.rs @@ -5,6 +5,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::bson::{doc, Bson}; use serde::{Deserialize, Serialize}; @@ -23,6 +24,7 @@ pub struct CountRenewedQuery { segments: i64, } +#[route(get, "/stats/count_renewed", crate::endpoints::stats::count_renewed)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/stats/expired_club_domains.rs b/src/endpoints/stats/expired_club_domains.rs index 39fb3b0..74bf647 100644 --- a/src/endpoints/stats/expired_club_domains.rs +++ b/src/endpoints/stats/expired_club_domains.rs @@ -5,6 +5,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::{ bson::{doc, Bson}, @@ -19,6 +20,11 @@ pub struct CountClubDomainsData { count: i32, } +#[route( + get, + "/stats/expired_club_domains", + crate::endpoints::stats::expired_club_domains +)] pub async fn handler(State(state): State>) -> impl IntoResponse { let mut headers = HeaderMap::new(); headers.insert("Cache-Control", HeaderValue::from_static("max-age=60")); diff --git a/src/endpoints/uri.rs b/src/endpoints/uri.rs index 7536b91..9a11d71 100644 --- a/src/endpoints/uri.rs +++ b/src/endpoints/uri.rs @@ -7,6 +7,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use chrono::NaiveDateTime; use futures::StreamExt; use mongodb::bson::doc; @@ -47,6 +48,7 @@ const NFT_PP_CONTRACT: &'static str = const NFT_PP_ID: &'static str = "0x00000000000000000000000000000000000000000000006e66745f70705f6964"; +#[route(get, "/uri", crate::endpoints::uri)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/main.rs b/src/main.rs index 054eabb..f240bd3 100644 --- a/src/main.rs +++ b/src/main.rs @@ -6,17 +6,19 @@ mod models; mod resolving; mod tax; mod utils; -use axum::{ - http::StatusCode, - routing::{get, post}, - Router, -}; +use axum::{http::StatusCode, Router}; +use axum_auto_routes::route; use mongodb::{bson::doc, options::ClientOptions, Client}; -use std::net::SocketAddr; use std::sync::Arc; +use std::{net::SocketAddr, sync::Mutex}; +use utils::WithState; use tower_http::cors::{Any, CorsLayer}; +lazy_static::lazy_static! { + pub static ref ROUTE_REGISTRY: Mutex>> = Mutex::new(Vec::new()); +} + #[tokio::main] async fn main() { println!("starknetid_server: starting v{}", env!("CARGO_PKG_VERSION")); @@ -58,101 +60,14 @@ async fn main() { } let cors = CorsLayer::new().allow_headers(Any).allow_origin(Any); - let app = Router::new() - .route("/", get(root)) - .route( - "/addr_to_available_ids", - get(endpoints::addr_to_available_ids::handler), - ) - .route("/addr_to_domain", get(endpoints::addr_to_domain::handler)) - .route( - "/addr_to_external_domains", - get(endpoints::addr_to_external_domains::handler), - ) - .route( - "/addr_to_full_ids", - get(endpoints::addr_to_full_ids::handler), - ) - .route( - "/addr_to_token_id", - get(endpoints::addr_to_token_id::handler), - ) - .route( - "/addrs_to_domains", - post(endpoints::addrs_to_domains::handler), - ) - .route("/data_to_ids", get(endpoints::data_to_ids::handler)) - .route("/domain_to_addr", get(endpoints::domain_to_addr::handler)) - .route("/domain_to_data", get(endpoints::domain_to_data::handler)) - .route("/addr_has_rev", get(endpoints::addr_has_rev::handler)) - .route("/id_to_data", get(endpoints::id_to_data::handler)) - .route("/uri", get(endpoints::uri::handler)) - .route( - "/referral/add_click", - post(endpoints::referral::add_click::handler), - ) - .route( - "/referral/revenue", - get(endpoints::referral::revenue::handler), - ) - .route( - "/referral/sales_count", - get(endpoints::referral::sales_count::handler), - ) - .route( - "/referral/click_count", - get(endpoints::referral::click_count::handler), - ) - .route( - "/stats/count_addrs", - get(endpoints::stats::count_addrs::handler), - ) - .route( - "/stats/count_club_domains", - get(endpoints::stats::count_club_domains::handler), - ) - .route( - "/stats/count_domains", - get(endpoints::stats::count_domains::handler), - ) - .route( - "/stats/count_ids", - get(endpoints::stats::count_ids::handler), - ) - .route( - "/stats/count_created", - get(endpoints::stats::count_created::handler), - ) - .route( - "/stats/expired_club_domains", - get(endpoints::stats::expired_club_domains::handler), - ) - .route( - "/stats/count_renewed", - get(endpoints::stats::count_renewed::handler), - ) - .route( - "/starkscan/fetch_nfts", - get(endpoints::starkscan::fetch_nfts::handler), - ) - .route( - "/renewal/get_renewal_data", - get(endpoints::renewal::get_renewal_data::handler), - ) - .route( - "/renewal/get_metahash", - get(endpoints::renewal::get_metahash::handler), - ) - .route( - "/renewal/get_non_subscribed_domains", - get(endpoints::renewal::get_non_subscribed_domains::handler), - ) - .route("/galxe/verify", post(endpoints::galxe::verify::handler)) - .route( - "/crosschain/solana/claim", - post(endpoints::crosschain::solana::claim::handler), - ) - .with_state(shared_state) + let app = ROUTE_REGISTRY + .lock() + .unwrap() + .clone() + .into_iter() + .fold(Router::new().with_state(shared_state.clone()), |acc, r| { + acc.merge(r.to_router(shared_state.clone())) + }) .layer(cors); let addr = SocketAddr::from(([0, 0, 0, 0], conf.server.port)); @@ -163,6 +78,7 @@ async fn main() { .unwrap(); } +#[route(get, "/")] async fn root() -> (StatusCode, String) { ( StatusCode::ACCEPTED, diff --git a/src/utils.rs b/src/utils.rs index 7820a01..01e3bfa 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,12 +1,16 @@ use ark_ff::{biginteger::BigInteger256, BigInteger}; use axum::{ + body::Body, http::StatusCode, response::{IntoResponse, Response}, + Router, }; use serde::Serialize; use serde_json::Value; use starknet::core::types::FieldElement; -use std::fmt::Write; +use std::{fmt::Write, sync::Arc}; + +use crate::models::AppState; #[derive(Serialize)] pub struct ErrorMessage { @@ -102,3 +106,26 @@ pub async fn fetch_img_url( json.get("image_url") .and_then(|v| v.as_str().map(ToString::to_string)) } + +// required for axum_auto_routes +pub trait WithState: Send { + fn to_router(self: Box, shared_state: Arc) -> Router; + + fn box_clone(&self) -> Box; +} + +impl WithState for Router, Body> { + fn to_router(self: Box, shared_state: Arc) -> Router { + self.with_state(shared_state) + } + + fn box_clone(&self) -> Box { + Box::new((*self).clone()) + } +} + +impl Clone for Box { + fn clone(&self) -> Box { + self.box_clone() + } +}