diff --git a/Cargo.toml b/Cargo.toml index f1fb8a5..90eb708 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,7 +6,9 @@ edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -starknet = { git = "https://github.com/Th0rgal/starknet-rs.git", branch = "feat/starknet-id" } +starknet = { git = "https://github.com/xJonathanLEI/starknet-rs", rev = "c974e5cb42e8d8344cee910b76005ec46b4dd3ed" } +starknet-id = { git = "https://github.com/starknet-id/starknetid.rs", rev = "2b30c2453b96789a628c86d2edebb1023fa2e77d" } +axum_auto_routes = { git = "https://github.com/Th0rgal/axum_auto_routes.git", rev = "f9e1d2083e887cd264642359c4aa851938da6f09" } axum = "0.6.18" futures = "0.3.28" mongodb = "2.5.0" @@ -16,10 +18,13 @@ tokio = { version = "1.28.1", features = ["macros", "rt-multi-thread"] } toml = "0.7.4" tower-http = { version = "0.4.0", features = ["cors"] } chrono = "0.4.24" -reqwest = "0.11.20" +reqwest = { version = "0.11.20", features = ["json"] } ark-ff = "0.4.2" hex = "0.4.3" +error-stack = "0.4.1" +anyhow = "1.0.75" lazy_static = "1.4.0" regex = "1.10.2" bs58 = "0.5.0" ed25519-dalek = "2.1.0" +ctor = "0.2.6" diff --git a/src/endpoints/addr_has_rev.rs b/src/endpoints/addr_has_rev.rs new file mode 100644 index 0000000..cddb10b --- /dev/null +++ b/src/endpoints/addr_has_rev.rs @@ -0,0 +1,56 @@ +use crate::{ + models::AppState, + utils::{get_error, to_hex}, +}; +use axum::{ + extract::{Query, State}, + http::StatusCode, + response::IntoResponse, + Json, +}; +use axum_auto_routes::route; +use mongodb::bson::doc; +use serde::{Deserialize, Serialize}; +use starknet::core::types::FieldElement; +use std::sync::Arc; + +#[derive(Serialize)] +pub struct AddrToDomainData { + has_rev: bool, +} + +#[derive(Deserialize)] +pub struct AddrHasRevQuery { + addr: FieldElement, +} + +#[route(get, "/addr_has_rev", crate::endpoints::addr_has_rev)] +pub async fn handler( + State(state): State>, + Query(query): Query, +) -> impl IntoResponse { + let domains = state + .starknetid_db + .collection::("domains"); + let hex_addr = to_hex(&query.addr); + let document = domains + .find_one( + doc! { + "_cursor.to" : null, + "rev_address" : hex_addr + }, + None, + ) + .await; + + match document { + Ok(doc) => ( + StatusCode::OK, + Json(AddrToDomainData { + has_rev: doc.is_some(), + }), + ) + .into_response(), + Err(_) => get_error("Error while fetching from database".to_string()), + } +} diff --git a/src/endpoints/addr_to_available_ids.rs b/src/endpoints/addr_to_available_ids.rs index e13bf95..8fc6e26 100644 --- a/src/endpoints/addr_to_available_ids.rs +++ b/src/endpoints/addr_to_available_ids.rs @@ -9,6 +9,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::bson::doc; use serde::{Deserialize, Serialize}; @@ -25,17 +26,25 @@ pub struct AddrQuery { addr: FieldElement, } +#[route(get, "/addr_to_available_ids", crate::endpoints::addr_to_available_ids)] pub async fn handler( State(state): State>, Query(query): Query, ) -> impl IntoResponse { - let starknet_ids = state.starknetid_db.collection::("id_owners"); - let domains = state.starknetid_db.collection::("domains"); + let starknet_ids = state + .starknetid_db + .collection::("id_owners"); + let domains = state + .starknetid_db + .collection::("domains"); let addr = to_hex(&query.addr); let documents = starknet_ids .find( doc! { "owner": &addr, + "id" : { + "$ne" : null + }, "_cursor.to": null, }, None, diff --git a/src/endpoints/addr_to_domain.rs b/src/endpoints/addr_to_domain.rs index 89d4ad0..e172fc4 100644 --- a/src/endpoints/addr_to_domain.rs +++ b/src/endpoints/addr_to_domain.rs @@ -2,13 +2,20 @@ use crate::{ models::AppState, utils::{get_error, to_hex}, }; +use anyhow::{bail, Result}; use axum::{ extract::{Query, State}, http::StatusCode, response::IntoResponse, Json, }; -use mongodb::bson::{doc, Bson}; +use axum_auto_routes::route; +use futures::StreamExt; +use mongodb::{ + bson::{doc, Document}, + options::AggregateOptions, + Cursor, +}; use serde::{Deserialize, Serialize}; use starknet::core::types::FieldElement; use std::sync::Arc; @@ -16,7 +23,7 @@ use std::sync::Arc; #[derive(Serialize)] pub struct AddrToDomainData { domain: String, - domain_expiry: i64, + domain_expiry: Option, } #[derive(Deserialize)] @@ -24,37 +31,129 @@ pub struct AddrToDomainQuery { addr: FieldElement, } +async fn read_cursor(mut cursor: Cursor) -> Result { + while let Some(result) = cursor.next().await { + let doc = result?; + let domain = doc.get_str("domain").unwrap_or_default().to_owned(); + let domain_expiry = doc.get_i64("domain_expiry").ok(); + return Ok(AddrToDomainData { + domain, + domain_expiry, + }); + } + bail!("No document found for the given address") +} + +async fn aggregate_data( + collection: mongodb::Collection, + pipeline: Vec, +) -> Result { + let cursor = collection + .aggregate(pipeline, AggregateOptions::default()) + .await?; + read_cursor(cursor).await +} + +#[route(get, "/addr_to_domain", crate::endpoints::addr_to_domain)] pub async fn handler( State(state): State>, Query(query): Query, ) -> impl IntoResponse { - let domains = state.starknetid_db.collection::("domains"); let hex_addr = to_hex(&query.addr); - let document = domains - .find_one( - doc! { - "legacy_address": &hex_addr, - "rev_address": &hex_addr, - "_cursor.to": Bson::Null, - }, - None, - ) - .await; - - match document { - Ok(doc) => { - if let Some(doc) = doc { - let domain = doc.get_str("domain").unwrap_or_default().to_owned(); - let expiry = doc.get_i64("expiry").unwrap_or_default(); - let data = AddrToDomainData { - domain, - domain_expiry: expiry, - }; - (StatusCode::OK, Json(data)).into_response() - } else { - get_error("No domain found".to_string()) - } + let domains_collection = state.starknetid_db.collection::("domains"); + let id_owners_collection = state.starknetid_db.collection::("id_owners"); + + let legacy_pipeline = create_legacy_pipeline(&hex_addr); + let normal_pipeline = create_normal_pipeline(&hex_addr); + let main_id_pipeline = create_main_id_pipeline(&hex_addr); + + let results = [ + aggregate_data(domains_collection.clone(), legacy_pipeline), + aggregate_data(domains_collection.clone(), normal_pipeline), + aggregate_data(id_owners_collection, main_id_pipeline), + ]; + + for result in results { + match result.await { + Ok(data) => return (StatusCode::OK, Json(data)).into_response(), + Err(_) => continue, } - Err(_) => get_error("Error while fetching from database".to_string()), } + + get_error("No data found for the given address".to_string()) +} + +fn create_legacy_pipeline(address: &String) -> Vec { + vec![ + doc! { "$match": { "_cursor.to": null, "rev_address": address, "$expr": { + "$eq": ["$rev_address", "$legacy_address"] + } } }, + doc! { "$project": { + "domain": 1, + "domain_expiry" : "$expiry" + }}, + ] +} + +fn create_normal_pipeline(address: &String) -> Vec { + vec![ + doc! { "$match": { "_cursor.to": null, "rev_address": address } }, + doc! { "$lookup": { + "from": "id_owners", + "let": { "rev_address": "$rev_address" }, + "pipeline": [ + { "$match": { + "$or": [ + { "_cursor.to": null }, + { "_cursor.to": { "$exists": false } } + ], + "$expr": { "$eq": ["$owner", "$$rev_address"] } + } } + ], + "as": "identity" + }}, + doc! { "$unwind": "$identity" }, + doc! { "$lookup": { + "from": "id_user_data", + "let": { "id": "$identity.id" }, + "pipeline": [ + doc! { "$match": { + "_cursor.to": { "$exists": false }, + "field": "0x000000000000000000000000000000000000000000000000737461726b6e6574", + "$expr": { "$eq": ["$id", "$$id"] } + } } + ], + "as": "starknet_data" + }}, + doc! { "$unwind": "$starknet_data" }, + doc! { "$match": { + "$expr": { "$eq": ["$rev_address", "$starknet_data.data"] } + }}, + doc! { "$project": { + "domain": 1, + "domain_expiry" : "$expiry" + }}, + ] +} + +fn create_main_id_pipeline(address: &String) -> Vec { + vec![ + doc! { "$match": { "_cursor.to": null, "owner": address, "main": true } }, + doc! { "$lookup": { + "from": "domains", + "let": { "id": "$id" }, + "pipeline": [ + doc! { "$match": { + "_cursor.to": { "$exists": false }, + "$expr": { "$eq": ["$id", "$$id"] } + } } + ], + "as": "domain_data" + }}, + doc! { "$unwind": "$domain_data" }, + doc! { "$project": { + "domain": "$domain_data.domain", + "domain_expiry" : "$domain_data.expiry" + }}, + ] } diff --git a/src/endpoints/addr_to_external_domains.rs b/src/endpoints/addr_to_external_domains.rs index 0a953e4..637f1c3 100644 --- a/src/endpoints/addr_to_external_domains.rs +++ b/src/endpoints/addr_to_external_domains.rs @@ -7,6 +7,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::IntoResponse, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::bson::doc; use serde::{Deserialize, Serialize}; @@ -23,6 +24,11 @@ pub struct DomainQuery { addr: FieldElement, } +#[route( + get, + "/addr_to_external_domains", + crate::endpoints::addr_to_external_domains +)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/addr_to_full_ids.rs b/src/endpoints/addr_to_full_ids.rs index 7d692e9..7002295 100644 --- a/src/endpoints/addr_to_full_ids.rs +++ b/src/endpoints/addr_to_full_ids.rs @@ -8,6 +8,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use futures::future::join_all; use futures::stream::StreamExt; use mongodb::{ @@ -52,6 +53,7 @@ pub struct FullIdResponse { full_ids: Vec, } +#[route(get, "/addr_to_full_ids", crate::endpoints::addr_to_full_ids)] pub async fn handler( State(state): State>, Query(query): Query, @@ -64,6 +66,9 @@ pub async fn handler( doc! { "$match": doc! { "owner": to_hex(&query.addr), + "id" : { + "$ne" : null + }, "_cursor.to": Bson::Null } }, diff --git a/src/endpoints/addr_to_token_id.rs b/src/endpoints/addr_to_token_id.rs index 67f4465..c94de63 100644 --- a/src/endpoints/addr_to_token_id.rs +++ b/src/endpoints/addr_to_token_id.rs @@ -7,6 +7,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use mongodb::bson::{doc, Bson}; use serde::{Deserialize, Serialize}; use starknet::core::types::FieldElement; @@ -22,6 +23,7 @@ pub struct TokenIdQuery { addr: FieldElement, } +#[route(get, "/addr_to_token_id", crate::endpoints::addr_to_token_id)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/addrs_to_domains.rs b/src/endpoints/addrs_to_domains.rs index e04e427..192e12b 100644 --- a/src/endpoints/addrs_to_domains.rs +++ b/src/endpoints/addrs_to_domains.rs @@ -1,20 +1,23 @@ -use crate::{ - models::AppState, - utils::{get_error, to_hex}, -}; +use crate::{models::AppState, utils::to_hex}; +use anyhow::{Context, Result}; use axum::{ extract::{Json, State}, http::StatusCode, response::IntoResponse, }; +use axum_auto_routes::route; use futures::stream::StreamExt; -use mongodb::{bson::doc, options::AggregateOptions}; +use mongodb::{ + bson::{doc, Document}, + options::AggregateOptions, + Cursor, +}; use serde::{Deserialize, Serialize}; use starknet::core::types::FieldElement; use std::sync::Arc; #[derive(Serialize)] -pub struct AddrToDomainData { +struct AddrToDomainData { domain: Option, address: String, } @@ -24,15 +27,89 @@ pub struct AddrToDomainsQuery { addresses: Vec, } +async fn process_cursor( + mut cursor: Cursor, + results: &mut Vec, +) -> Result<()> { + while let Some(result) = cursor.next().await { + let doc = result.context("Failed to retrieve document from cursor")?; + if let (Ok(domain), Ok(address)) = (doc.get_str("domain"), doc.get_str("address")) { + if let Some(data) = results.iter_mut().find(|d| d.address == address) { + if data.domain == None { + data.domain = Some(domain.to_string()); + } + } + } + } + Ok(()) +} + +async fn run_aggregation_pipeline( + collection: mongodb::Collection, + pipeline: Vec, + results: &mut Vec, +) -> Result<()> { + let cursor = collection + .aggregate(pipeline, AggregateOptions::default()) + .await + .context("Failed to execute aggregation pipeline")?; + + process_cursor(cursor, results).await +} + +#[route(post, "/addrs_to_domains", crate::endpoints::addrs_to_domains)] pub async fn handler( State(state): State>, Json(query): Json, ) -> impl IntoResponse { - let domains = state.starknetid_db.collection::("domains"); + let domains_collection = state + .starknetid_db + .collection::("domains"); + let id_owners_collection = state + .starknetid_db + .collection::("id_owners"); - let addresses: Vec = query.addresses.iter().map(|addr| to_hex(addr)).collect(); + let addresses: Vec = query.addresses.iter().map(to_hex).collect(); + + let mut results = addresses + .iter() + .map(|addr| AddrToDomainData { + domain: None, + address: addr.clone(), + }) + .collect::>(); + + let legacy_pipeline = create_legacy_pipeline(&addresses); + if let Err(e) = + run_aggregation_pipeline(domains_collection.clone(), legacy_pipeline, &mut results).await + { + return (StatusCode::INTERNAL_SERVER_ERROR, Json(e.to_string())).into_response(); + } + + let normal_pipeline = create_normal_pipeline(&addresses); + if let Err(e) = + run_aggregation_pipeline(domains_collection.clone(), normal_pipeline, &mut results).await + { + return (StatusCode::INTERNAL_SERVER_ERROR, Json(e.to_string())).into_response(); + } + + let fallback_addresses = results + .iter() + .filter_map(|data| data.domain.is_none().then(|| data.address.clone())) + .collect::>(); + + let fallback_pipeline = create_fallback_pipeline(&fallback_addresses); + if let Err(e) = + run_aggregation_pipeline(id_owners_collection, fallback_pipeline, &mut results).await + { + return (StatusCode::INTERNAL_SERVER_ERROR, Json(e.to_string())).into_response(); + } + + (StatusCode::OK, Json(results)).into_response() +} - let pipeline = vec![ +fn create_legacy_pipeline(addresses: &[String]) -> Vec { + vec![ doc! { "$match": { "legacy_address": { "$in": addresses.clone() }, @@ -47,37 +124,82 @@ pub async fn handler( "address": "$legacy_address", }, }, - ]; - - let aggregate_options = AggregateOptions::default(); - let cursor = domains.aggregate(pipeline, aggregate_options).await; - - match cursor { - Ok(mut cursor) => { - let mut results = Vec::new(); - while let Some(doc) = cursor.next().await { - if let Ok(doc) = doc { - let domain = doc.get_str("domain").map(|s| s.to_string()).ok(); - let address = doc.get_str("address").unwrap().to_string(); - let data = AddrToDomainData { domain, address }; - results.push(data); - } - } + ] +} - for address in &addresses { - if !results - .iter() - .any(|data| data.address.to_string() == *address) - { - results.push(AddrToDomainData { - domain: None, - address: address.clone(), - }); - } - } +fn create_normal_pipeline(addresses: &[String]) -> Vec { + vec![ + doc! { "$match": { "_cursor.to": null, "rev_address": { "$in": addresses } } }, + doc! { "$lookup": { + "from": "id_owners", + "let": { "rev_address": "$rev_address" }, + "pipeline": [ + { "$match": { + "id" : { + "$ne" : null + }, + "$or": [ + { "_cursor.to": null }, + { "_cursor.to": { "$exists": false } } + ], + "$expr": { "$eq": ["$owner", "$$rev_address"] } + } } + ], + "as": "identity" + }}, + doc! { "$unwind": "$identity" }, + doc! { "$lookup": { + "from": "id_user_data", + "let": { "id": "$identity.id" }, + "pipeline": [ + doc! { "$match": { + "_cursor.to": { "$exists": false }, + "field": "0x000000000000000000000000000000000000000000000000737461726b6e6574", + "$expr": { "$eq": ["$id", "$$id"] } + } } + ], + "as": "starknet_data" + }}, + doc! { "$unwind": "$starknet_data" }, + doc! { "$match": { + "$expr": { "$eq": ["$rev_address", "$starknet_data.data"] } + }}, + doc! { "$project": { + "domain": 1, + "address" : "$rev_address", + }}, + ] +} - (StatusCode::OK, Json(results)).into_response() - } - Err(_) => get_error("Error while fetching from database".to_string()), - } +fn create_fallback_pipeline(fallback_addresses: &[String]) -> Vec { + vec![ + doc! { + "$match": { + "_cursor.to": null, + "owner": { "$in": fallback_addresses }, + "main": true + } + }, + doc! { + "$lookup": { + "from": "domains", + "let": { "id": "$id" }, + "pipeline": [ + doc! { "$match": { + "_cursor.to": { "$exists": false }, + "$expr": { "$eq": ["$id", "$$id"] } + } } + ], + "as": "domain_data" + } + }, + doc! { "$unwind": "$domain_data" }, + doc! { + "$project": { + "_id": 0, + "domain": "$domain_data.domain", + "address": "$owner", + } + }, + ] } diff --git a/src/endpoints/crosschain/solana/claim.rs b/src/endpoints/crosschain/solana/claim.rs index d9f8f6d..4b00e9c 100644 --- a/src/endpoints/crosschain/solana/claim.rs +++ b/src/endpoints/crosschain/solana/claim.rs @@ -8,18 +8,17 @@ use crate::{ utils::{get_error, to_hex}, }; use axum::{extract::State, http::StatusCode, response::IntoResponse, Json}; +use axum_auto_routes::route; use chrono::{Duration, Utc}; use ed25519_dalek::{Signature, Verifier, VerifyingKey}; use mongodb::bson::doc; use serde::{Deserialize, Serialize}; use serde_json::json; -use starknet::{ - core::{ - crypto::{ecdsa_sign, pedersen_hash}, - types::FieldElement, - }, - id::encode, +use starknet::core::{ + crypto::{ecdsa_sign, pedersen_hash}, + types::FieldElement, }; +use starknet_id::encode; #[derive(Deserialize, Debug, Clone)] pub struct SigQuery { @@ -66,6 +65,11 @@ lazy_static::lazy_static! { static ref SOL_SUBDOMAIN_STR: FieldElement = FieldElement::from_dec_str("9145722242464647959622012987758").unwrap(); } +#[route( + post, + "/crosschain/solana/claim", + crate::endpoints::crosschain::solana::claim +)] pub async fn handler( State(state): State>, Json(query): Json, diff --git a/src/endpoints/data_to_ids.rs b/src/endpoints/data_to_ids.rs index 2e3612f..8aeead0 100644 --- a/src/endpoints/data_to_ids.rs +++ b/src/endpoints/data_to_ids.rs @@ -7,6 +7,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use mongodb::bson::doc; use serde::{Deserialize, Serialize}; use starknet::core::types::FieldElement; @@ -24,6 +25,7 @@ pub struct StarknetIdQuery { data: FieldElement, } +#[route(get, "/data_to_ids", crate::endpoints::data_to_ids)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/domain_to_addr.rs b/src/endpoints/domain_to_addr.rs index 978a10d..63c67cc 100644 --- a/src/endpoints/domain_to_addr.rs +++ b/src/endpoints/domain_to_addr.rs @@ -7,7 +7,9 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; -use mongodb::bson::doc; +use axum_auto_routes::route; +use futures::StreamExt; +use mongodb::{bson::doc, options::AggregateOptions}; use serde::{Deserialize, Serialize}; use std::sync::Arc; @@ -22,6 +24,7 @@ pub struct DomainQuery { domain: String, } +#[route(get, "/domain_to_addr", crate::endpoints::domain_to_addr)] pub async fn handler( State(state): State>, Query(query): Query, @@ -64,31 +67,62 @@ pub async fn handler( let domains = state .starknetid_db .collection::("domains"); - let document = domains - .find_one( - doc! { - "domain": &query.domain, - "_cursor.to": null, + + let pipeline = vec![ + doc! { "$match": { "_cursor.to": null, "domain": query.domain.clone() } }, + doc! { "$lookup": { + "from": "id_user_data", + "let": { "userId": "$id" }, + "pipeline": [ + doc! { "$match": { + "_cursor.to": { "$exists": false }, + "field": "0x000000000000000000000000000000000000000000000000737461726b6e6574", + "$expr": { "$eq": ["$id", "$$userId"] } + } } + ], + "as": "ownerData" + }}, + doc! { "$unwind": { "path": "$ownerData", "preserveNullAndEmptyArrays": true } }, + doc! { "$project": { + "addr": { + "$cond": { + "if": { "$and": [ + { "$ne": [{ "$type": "$legacy_address" }, "0x0000000000000000000000000000000000000000000000000000000000000000"] }, + { "$ne": ["$legacy_address", "0x0000000000000000000000000000000000000000000000000000000000000000"] } + ] }, + "then": "$legacy_address", + "else": "$ownerData.data" + } }, - None, - ) - .await; + "domain_expiry" : "$expiry" + }}, + ]; + + // Execute the aggregation pipeline + let cursor: Result, &str> = domains + .aggregate(pipeline, AggregateOptions::default()) + .await + .map_err(|_| "Error while executing aggregation pipeline"); - match document { - Ok(doc) => { - if let Some(doc) = doc { - let addr = doc.get_str("legacy_address").unwrap_or_default().to_owned(); - let domain_expiry = doc.get_i64("expiry").ok(); - let data = DomainToAddrData { - addr, - domain_expiry, + match cursor { + Ok(mut cursor) => { + while let Some(result) = cursor.next().await { + return match result { + Ok(doc) => { + let addr = doc.get_str("addr").unwrap_or_default().to_owned(); + let domain_expiry = doc.get_i64("domain_expiry").ok(); + let data = DomainToAddrData { + addr, + domain_expiry, + }; + (StatusCode::OK, Json(data)).into_response() + } + Err(e) => get_error(format!("Error calling the db: {}", e)), }; - (StatusCode::OK, headers, Json(data)).into_response() - } else { - get_error("no address found".to_string()) } + return get_error("No document found for the given domain".to_string()); } - Err(_) => get_error("Error while fetching from database".to_string()), + Err(e) => return get_error(format!("Error accessing the database: {}", e)), } } } diff --git a/src/endpoints/domain_to_data.rs b/src/endpoints/domain_to_data.rs index 34919b6..655f26f 100644 --- a/src/endpoints/domain_to_data.rs +++ b/src/endpoints/domain_to_data.rs @@ -1,35 +1,24 @@ use crate::{ - models::{AppState, Data}, - resolving::get_custom_resolver, - utils::{fetch_img_url, get_error, to_hex, to_u256}, + models::{AppState, IdentityData}, + utils::get_error, }; use axum::{ extract::{Query, State}, http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use futures::StreamExt; -use mongodb::bson::{doc, Document}; +use mongodb::bson::{doc, from_bson, Bson, Document}; use serde::Deserialize; -use starknet::core::types::FieldElement; -use std::{collections::HashMap, sync::Arc}; +use std::sync::Arc; #[derive(Deserialize)] pub struct DomainQuery { domain: String, } -#[derive(Debug)] -pub struct VerifierData { - data: Option, - extended_data: Option>, -} - -const NFT_PP_CONTRACT: &'static str = - "0x00000000000000000000000000000000006e66745f70705f636f6e7472616374"; -const NFT_PP_ID: &'static str = - "0x00000000000000000000000000000000000000000000006e66745f70705f6964"; - +#[route(get, "/domain_to_data", crate::endpoints::domain_to_data)] pub async fn handler( State(state): State>, Query(query): Query, @@ -37,296 +26,171 @@ pub async fn handler( let mut headers = HeaderMap::new(); headers.insert("Cache-Control", HeaderValue::from_static("max-age=30")); - let domains = state - .starknetid_db - .collection::("domains"); - match get_custom_resolver(&domains, &query.domain).await { - None => {} - Some(res) => { - // todo: add support for argent and braavos here - return get_error(format!("custom resolver {} is not supported yet", res)); - } - } - - let starknet_ids = state - .starknetid_db - .collection::("id_owners"); + let collection = state.starknetid_db.collection::("domains"); - let domain_document = domains - .find_one( - doc! { - "domain": &query.domain, - "_cursor.to": null, - }, - None, - ) - .await; - - let (domain, addr, expiry, starknet_id) = match domain_document { - Ok(Some(doc)) => { - let domain = doc.get_str("domain").unwrap_or_default().to_owned(); - let addr = doc.get_str("legacy_address").ok().map(String::from); - let expiry = doc.get_i64("expiry").ok(); - let id = doc.get_str("id").unwrap_or_default().to_owned(); - (domain, addr, expiry, id) + let mut cursor = match collection.aggregate(get_pipeline(query.domain), None).await { + Ok(cursor) => cursor, + Err(_) => { + return ( + StatusCode::INTERNAL_SERVER_ERROR, + headers, + "Failed to retrieve data".to_string(), + ) + .into_response(); } - _ => return get_error("Error while fetching from database".to_string()), }; - let owner_document = starknet_ids - .find_one( - doc! { - "id": &starknet_id, - "_cursor.to": null, - }, - None, - ) - .await; - let owner_addr = match owner_document { - Ok(Some(doc)) => doc.get_str("owner").ok().map(String::from).unwrap(), - _ => return get_error("Error while fetching starknet-id from database".to_string()), + // The aggregation returns a single document + return if let Some(result) = cursor.next().await { + match result { + Ok(doc) => ( + StatusCode::OK, + headers, + Json(from_bson::(Bson::Document(doc)).expect("Malformed document")), + ) + .into_response(), + Err(err) => get_error(format!("Unexpected error: {}", err)), + } + } else { + get_error("Identity not found".to_string()) }; - let current_social_verifiers = state - .conf - .contracts - .verifiers - .clone() - .into_iter() - .map(|x| to_hex(&x)) - .collect::>(); - let mut all_social_verifiers = current_social_verifiers.clone(); - all_social_verifiers.extend(vec![to_hex(&state.conf.contracts.old_verifier)]); +} - let pipeline = vec![ +fn get_pipeline(domain: String) -> Vec { + vec![ doc! { "$match": { - "$or": [ - { - "field": { - "$in": ["0x0000000000000000000000000000000000000000000000000000676974687562", "0x0000000000000000000000000000000000000000000000000074776974746572", "0x00000000000000000000000000000000000000000000000000646973636f7264"] - }, - "verifier": { "$in": all_social_verifiers } // modified this to accommodate all social verifiers - }, - { - "field": "0x0000000000000000000000000070726f6f665f6f665f706572736f6e686f6f64", - "verifier": to_hex(&state.conf.contracts.pop_verifier) - }, - { - "field": { - // nft_pp_contract, nft_pp_id - "$in": ["0x00000000000000000000000000000000006e66745f70705f636f6e7472616374", "0x00000000000000000000000000000000000000000000006e66745f70705f6964", "0x00000000000000000000000000000000000000000000000000646973636f7264"] - }, - "verifier": to_hex(&state.conf.contracts.pp_verifier) - }, - ], - "id": &starknet_id, "_cursor.to": null, + "domain": domain } }, doc! { - "$sort": doc! { - "_cursor.from": 1 + "$lookup": { + "from": "id_owners", + "let": { + "id": "$id" + }, + "pipeline": [ + doc! { + "$match": { + "$or": [ + { "_cursor.to": null }, + { "_cursor.to": { "$exists": false } } + ], + "$expr": { "$eq": ["$id", "$$id"] } + } + } + ], + "as": "id_data" } }, + doc! { "$unwind": "$id_data" }, doc! { - "$group": { - "_id": { "field": "$field", "verifier": "$verifier" }, // group by both field and verifier - "data": { "$first": "$data" }, - "extended_data": { "$first": "$extended_data" } + "$lookup": { + "from": "id_user_data", + "let": { + "id": "$id" + }, + "pipeline": [ + doc! { + "$match": { + "$or": [ + { "_cursor.to": null }, + { "_cursor.to": { "$exists": false } } + ], + "$expr": { "$eq": ["$id", "$$id"] } + } + }, + doc! { + "$project": { + "_id": 0, + "field": 1, + "data": 1 + } + } + ], + "as": "user_data" } }, - ]; - - let starknet_ids_data = state - .starknetid_db - .collection::("id_verifier_data"); - let results = starknet_ids_data.aggregate(pipeline, None).await; - - let mut verifier_data_by_field: HashMap<(String, String), VerifierData> = HashMap::new(); - if let Ok(mut cursor) = results { - while let Some(result) = cursor.next().await { - if let Ok(doc) = result { - match doc.get_document("_id") { - Ok(inner_doc) => { - if let (Ok(verifier), Ok(field)) = - (inner_doc.get_str("verifier"), inner_doc.get_str("field")) - { - let data = doc.get_str("data").ok().map(String::from); - let extended_data = doc - .get_array("extended_data") - .ok() - .map(|bson_array| { - bson_array - .iter() - .filter_map(|bson| bson.as_str().map(String::from)) - .collect() - }) - .filter(|v: &Vec| !v.is_empty()); - verifier_data_by_field.insert( - (verifier.to_string(), field.to_string()), - VerifierData { - data, - extended_data, - }, - ); + doc! { + "$lookup": { + "from": "id_verifier_data", + "let": { + "id": "$id" + }, + "pipeline": [ + doc! { + "$match": { + "$or": [ + { "_cursor.to": null }, + { "_cursor.to": { "$exists": false } } + ], + "$expr": { "$eq": ["$id", "$$id"] }, + "data": { "$ne": null } + } + }, + doc! { + "$project": { + "_id": 0, + "field": 1, + "data": 1, + "verifier": 1 } } - Err(_) => {} - } - } - } - } - - let mut github = None; - for verifier in current_social_verifiers.to_owned() { - match verifier_data_by_field.get(&( - verifier, - "0x0000000000000000000000000000000000000000000000000000676974687562".to_string(), - )) { - Some(verifier_data) => { - github = verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }); + ], + "as": "verifier_data" } - None => {} - } - } - - let old_github = match verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.old_verifier), - "0x0000000000000000000000000000000000000000000000000000676974687562".to_string(), - )) { - Some(verifier_data) => verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }), - None => None, - }; - - let mut twitter = None; - for verifier in current_social_verifiers.to_owned() { - match verifier_data_by_field.get(&( - verifier, - "0x0000000000000000000000000000000000000000000000000074776974746572".to_string(), - )) { - Some(verifier_data) => { - twitter = verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }) + }, + doc! { + "$lookup": { + "from": "id_verifier_data", + "let": { + "id": "$id" + }, + "pipeline": [ + doc! { + "$match": { + "$or": [ + { "_cursor.to": null }, + { "_cursor.to": { "$exists": false } } + ], + "$expr": { "$eq": ["$id", "$$id"] }, + "extended_data": { "$ne": null } + } + }, + doc! { + "$project": { + "_id": 0, + "field": 1, + "extended_data": 1, + "verifier": 1 + } + } + ], + "as": "extended_verifier_data" } - None => {} - } - } - - let old_twitter = match verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.old_verifier), - "0x0000000000000000000000000000000000000000000000000074776974746572".to_string(), - )) { - Some(verifier_data) => verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }), - None => None, - }; - - let mut discord: Option = None; - for verifier in current_social_verifiers.to_owned() { - match verifier_data_by_field.get(&( - verifier, - "0x00000000000000000000000000000000000000000000000000646973636f7264".to_string(), - )) { - Some(verifier_data) => { - discord = verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }) + }, + doc! { + "$project": { + "_id": 0, + "id": 1, + "owner": "$id_data.owner", + "main": "$id_data.main", + "creation_date": "$id_data.creation_date", + "domain": { + "domain": "$domain", + "migrated" : "$migrated", + "root": "$root", + "creation_date": "$creation_date", + "expiry": "$expiry", + "resolver": "$resolver", + "legacy_address": "$legacy_address", + "rev_address": "$rev_address" + }, + "user_data": 1, + "verifier_data": 1, + "extended_verifier_data" : 1 } - None => {} - } - } - - let old_discord = match verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.old_verifier), - "0x00000000000000000000000000000000000000000000000000646973636f7264".to_string(), - )) { - Some(verifier_data) => verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }), - None => None, - }; - - let proof_of_personhood = match verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.pop_verifier), - "0x0000000000000000000000000070726f6f665f6f665f706572736f6e686f6f64".to_string(), - )) { - Some(verifier_data) => verifier_data.data.to_owned().and_then(|data| { - Some(data == "0x0000000000000000000000000000000000000000000000000000000000000001") - }), - None => None, - }; - - let img_url = match ( - verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.pp_verifier), - NFT_PP_CONTRACT.to_string(), - )), - verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.pp_verifier), - NFT_PP_ID.to_string(), - )), - ) { - (Option::Some(data_contract), Option::Some(data_id)) => { - let id_felts = data_id.to_owned().extended_data.as_ref().unwrap(); - let id = to_u256(id_felts.get(0).unwrap(), id_felts.get(1).unwrap()); - fetch_img_url( - &state.conf.starkscan.api_url, - &state.conf.starkscan.api_key, - data_contract.data.to_owned().unwrap(), - id.to_string(), - ) - .await - } - _ => None, - }; - - let is_owner_main_document = domains - .find_one( - doc! { - "domain": &domain, - "legacy_address": &owner_addr, - "rev_address": &owner_addr, - "_cursor.to": null, - }, - None, - ) - .await; - let is_owner_main = is_owner_main_document.is_ok() && is_owner_main_document.unwrap().is_some(); - - let data = Data { - domain: Some(domain), - addr, - domain_expiry: expiry, - is_owner_main, - owner_addr, - github, - old_github, // added this field - twitter, - old_twitter, // added this field - discord, - old_discord, // added this field - proof_of_personhood, - starknet_id: FieldElement::from_hex_be(&starknet_id).unwrap().to_string(), - img_url, - }; - - (StatusCode::OK, headers, Json(data)).into_response() + }, + ] } diff --git a/src/endpoints/galxe/verify.rs b/src/endpoints/galxe/verify.rs index c972732..c3dcc9b 100644 --- a/src/endpoints/galxe/verify.rs +++ b/src/endpoints/galxe/verify.rs @@ -2,6 +2,7 @@ use std::sync::Arc; use crate::models::AppState; use axum::{extract::State, http::StatusCode, response::IntoResponse, Json}; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::{bson::doc, bson::Document}; use serde::{Deserialize, Serialize}; @@ -16,6 +17,7 @@ pub struct SimpleResponse { result: &'static str, } +#[route(post, "/galxe/verify", crate::endpoints::galxe::verify)] pub async fn handler( State(state): State>, Json(query): Json, diff --git a/src/endpoints/id_to_data.rs b/src/endpoints/id_to_data.rs index 6295fec..8339409 100644 --- a/src/endpoints/id_to_data.rs +++ b/src/endpoints/id_to_data.rs @@ -1,35 +1,25 @@ use crate::{ - models::{AppState, Data}, - resolving::get_custom_resolver, - utils::{fetch_img_url, get_error, to_hex, to_u256}, + models::{AppState, IdentityData}, + utils::{get_error, to_hex}, }; use axum::{ extract::{Query, State}, http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use futures::StreamExt; -use mongodb::bson::{doc, Bson, Document}; +use mongodb::bson::{doc, from_bson, Bson, Document}; use serde::Deserialize; use starknet::core::types::FieldElement; -use std::{collections::HashMap, sync::Arc}; +use std::sync::Arc; #[derive(Deserialize)] pub struct IdQuery { id: FieldElement, } -#[derive(Debug)] -pub struct VerifierData { - data: Option, - extended_data: Option>, -} - -const NFT_PP_CONTRACT: &'static str = - "0x00000000000000000000000000000000006e66745f70705f636f6e7472616374"; -const NFT_PP_ID: &'static str = - "0x00000000000000000000000000000000000000000000006e66745f70705f6964"; - +#[route(get, "/id_to_data", crate::endpoints::id_to_data)] pub async fn handler( State(state): State>, Query(query): Query, @@ -37,324 +27,152 @@ pub async fn handler( let mut headers = HeaderMap::new(); headers.insert("Cache-Control", HeaderValue::from_static("max-age=30")); - let domains = state - .starknetid_db - .collection::("domains"); - let starknet_ids = state - .starknetid_db - .collection::("id_owners"); - - let hex_id = to_hex(&query.id); - - let domain_document = domains - .find_one( - doc! { - "id": &hex_id, - "_cursor.to": Bson::Null, - }, - None, - ) - .await; - - let domain_data = match domain_document { - Ok(doc) => { - if let Some(doc) = doc { - let domain = doc.get_str("domain").unwrap_or_default().to_owned(); - if get_custom_resolver(&domains, &domain).await.is_none() { - let addr = doc.get_str("legacy_address").ok().map(String::from); - let expiry = doc.get_i64("expiry").ok(); - Some((domain, addr, expiry)) - } else { - // we don't handle subdomains, todo: add support for braavos and argent - None - } - } else { - None - } + let collection = state.starknetid_db.collection::("id_owners"); + + let mut cursor = match collection + .aggregate(get_pipeline(to_hex(&query.id)), None) + .await + { + Ok(cursor) => cursor, + Err(_) => { + return ( + StatusCode::INTERNAL_SERVER_ERROR, + headers, + "Failed to retrieve data".to_string(), + ) + .into_response(); } - Err(_) => return get_error("Error while fetching from database".to_string()), }; - let owner_document = starknet_ids - .find_one( - doc! { - "id": &hex_id, - "_cursor.to": null, - }, - None, - ) - .await; - - let owner = match owner_document { - Ok(doc) => doc.and_then(|doc| doc.get_str("owner").ok().map(String::from)), - Err(_) => return get_error("Error while fetching from database".to_string()), + // The aggregation returns a single document + return if let Some(result) = cursor.next().await { + match result { + Ok(doc) => ( + StatusCode::OK, + headers, + Json(from_bson::(Bson::Document(doc)).expect("Malformed document")), + ) + .into_response(), + Err(err) => get_error(format!("Unexpected error: {}", err)), + } + } else { + get_error("Identity not found".to_string()) }; +} - if owner.is_none() { - return get_error("starknet id not found".to_string()); - } - - let current_social_verifiers = state - .conf - .contracts - .verifiers - .clone() - .into_iter() - .map(|x| to_hex(&x)) - .collect::>(); - let mut all_social_verifiers = current_social_verifiers.clone(); - all_social_verifiers.extend(vec![to_hex(&state.conf.contracts.old_verifier)]); - let owner = owner.unwrap(); - let pipeline = vec![ +fn get_pipeline(id: String) -> Vec { + vec![ doc! { "$match": { - "$or": [ - { - "field": { - "$in": ["0x0000000000000000000000000000000000000000000000000000676974687562", "0x0000000000000000000000000000000000000000000000000074776974746572", "0x00000000000000000000000000000000000000000000000000646973636f7264"] - }, - "verifier": { "$in": all_social_verifiers } // modified this to accommodate both verifiers - }, - { - "field": "0x0000000000000000000000000070726f6f665f6f665f706572736f6e686f6f64", - "verifier": to_hex(&state.conf.contracts.pop_verifier) - }, - { - "field": { - // nft_pp_contract, nft_pp_id - "$in": ["0x00000000000000000000000000000000006e66745f70705f636f6e7472616374", "0x00000000000000000000000000000000000000000000006e66745f70705f6964", "0x00000000000000000000000000000000000000000000000000646973636f7264"] - }, - "verifier": to_hex(&state.conf.contracts.pp_verifier) - }, - ], - "id": &hex_id, "_cursor.to": null, + "id": id } }, doc! { - "$sort": doc! { - "_cursor.from": 1 + "$lookup": { + "from": "domains", + "let": {"id": "$id"}, + "pipeline": [ + doc! { + "$match": { + "$or": [ + {"_cursor.to": null}, + {"_cursor.to": {"$exists": false}} + ], + "$expr": {"$eq": ["$id", "$$id"]}, + } + } + ], + "as": "domain_data" } }, doc! { - "$group": { - "_id": { "field": "$field", "verifier": "$verifier" }, // group by both field and verifier - "data": { "$first": "$data" }, - "extended_data": { "$first": "$extended_data" } + "$lookup": { + "from": "id_user_data", + "let": {"id": "$id"}, + "pipeline": [ + doc! { + "$match": { + "$or": [ + {"_cursor.to": null}, + {"_cursor.to": {"$exists": false}} + ], + "$expr": {"$eq": ["$id", "$$id"]}, + "data": { "$ne": null } + } + }, + doc! { + "$project": {"_id": 0, "field": 1, "data": 1} + } + ], + "as": "user_data" } }, - ]; - - let starknet_ids_data = state - .starknetid_db - .collection::("id_verifier_data"); - let results = starknet_ids_data.aggregate(pipeline, None).await; - - let mut verifier_data_by_field: HashMap<(String, String), VerifierData> = HashMap::new(); - if let Ok(mut cursor) = results { - while let Some(result) = cursor.next().await { - if let Ok(doc) = result { - match doc.get_document("_id") { - Ok(inner_doc) => { - if let (Ok(verifier), Ok(field)) = - (inner_doc.get_str("verifier"), inner_doc.get_str("field")) - { - let data = doc.get_str("data").ok().map(String::from); - let extended_data = doc - .get_array("extended_data") - .ok() - .map(|bson_array| { - bson_array - .iter() - .filter_map(|bson| bson.as_str().map(String::from)) - .collect() - }) - .filter(|v: &Vec| !v.is_empty()); - verifier_data_by_field.insert( - (verifier.to_string(), field.to_string()), - VerifierData { - data, - extended_data, - }, - ); + doc! { + "$lookup": { + "from": "id_verifier_data", + "let": {"id": "$id"}, + "pipeline": [ + doc! { + "$match": { + "$or": [ + {"_cursor.to": null}, + {"_cursor.to": {"$exists": false}} + ], + "$expr": {"$eq": ["$id", "$$id"]}, + "data": { "$ne": null } } + }, + doc! { + "$project": {"_id": 0, "field": 1, "data": 1, "verifier": 1} } - Err(_) => {} - } - } - } - } - - let mut github = None; - for verifier in current_social_verifiers.to_owned() { - match verifier_data_by_field.get(&( - verifier, - "0x0000000000000000000000000000000000000000000000000000676974687562".to_string(), - )) { - Some(verifier_data) => { - github = verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }); - } - None => {} - } - } - - let old_github = match verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.old_verifier), - "0x0000000000000000000000000000000000000000000000000000676974687562".to_string(), - )) { - Some(verifier_data) => verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }), - None => None, - }; - - let mut twitter = None; - for verifier in current_social_verifiers.to_owned() { - match verifier_data_by_field.get(&( - verifier, - "0x0000000000000000000000000000000000000000000000000074776974746572".to_string(), - )) { - Some(verifier_data) => { - twitter = verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }) - } - None => {} - } - } - - let old_twitter = match verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.old_verifier), - "0x0000000000000000000000000000000000000000000000000074776974746572".to_string(), - )) { - Some(verifier_data) => verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }), - None => None, - }; - - let mut discord: Option = None; - for verifier in current_social_verifiers.to_owned() { - match verifier_data_by_field.get(&( - verifier, - "0x00000000000000000000000000000000000000000000000000646973636f7264".to_string(), - )) { - Some(verifier_data) => { - discord = verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }) + ], + "as": "verifier_data" } - None => {} - } - } - - let old_discord = match verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.old_verifier), - "0x00000000000000000000000000000000000000000000000000646973636f7264".to_string(), - )) { - Some(verifier_data) => verifier_data.data.to_owned().and_then(|data| { - FieldElement::from_hex_be(&data) - .map(|fe| fe.to_string()) - .ok() - }), - None => None, - }; - - let proof_of_personhood = match verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.pop_verifier), - "0x0000000000000000000000000070726f6f665f6f665f706572736f6e686f6f64".to_string(), - )) { - Some(verifier_data) => verifier_data.data.to_owned().and_then(|data| { - Some(data == "0x0000000000000000000000000000000000000000000000000000000000000001") - }), - None => None, - }; - - let img_url = match ( - verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.pp_verifier), - NFT_PP_CONTRACT.to_string(), - )), - verifier_data_by_field.get(&( - to_hex(&state.conf.contracts.pp_verifier), - NFT_PP_ID.to_string(), - )), - ) { - (Option::Some(data_contract), Option::Some(data_id)) => { - let id_felts = data_id.to_owned().extended_data.as_ref().unwrap(); - let id = to_u256(id_felts.get(0).unwrap(), id_felts.get(1).unwrap()); - fetch_img_url( - &state.conf.starkscan.api_url, - &state.conf.starkscan.api_key, - data_contract.data.to_owned().unwrap(), - id.to_string(), - ) - .await - } - _ => None, - }; - - let data = match domain_data { - None => Data { - domain: None, - addr: None, - domain_expiry: None, - is_owner_main: false, - owner_addr: owner, - github, - twitter, - discord, - proof_of_personhood, - old_github, - old_twitter, - old_discord, - starknet_id: query.id.to_string(), - img_url, }, - Some((domain, addr, expiry)) => { - let is_owner_main_document = domains - .find_one( + doc! { + "$lookup": { + "from": "id_verifier_data", + "let": {"id": "$id"}, + "pipeline": [ doc! { - "domain": &domain, - "legacy_address": &owner, - "rev_address": &owner, - "_cursor.to": null, + "$match": { + "$or": [ + {"_cursor.to": null}, + {"_cursor.to": {"$exists": false}} + ], + "$expr": {"$eq": ["$id", "$$id"]}, + "extended_data": { "$ne": null } + } }, - None, - ) - .await; - let is_owner_main = - is_owner_main_document.is_ok() && is_owner_main_document.unwrap().is_some(); - Data { - domain: Some(domain), - addr, - domain_expiry: expiry, - is_owner_main, - owner_addr: owner, - github, - twitter, - discord, - proof_of_personhood, - old_github, - old_twitter, - old_discord, - starknet_id: query.id.to_string(), - img_url, + doc! { + "$project": {"_id": 0, "field": 1, "extended_data": 1, "verifier": 1} + } + ], + "as": "extended_verifier_data" } - } - }; - - (StatusCode::OK, headers, Json(data)).into_response() + }, + doc! { + "$project": { + "_id": 0, + "id": 1, + "owner": 1, + "main": 1, + "creation_date": 1, + "domain": { + "domain": {"$arrayElemAt": ["$domain_data.domain", 0]}, + "root": {"$arrayElemAt": ["$domain_data.root", 0]}, + "migrated" : {"$arrayElemAt": ["$domain_data.migrated", 0]}, + "creation_date": {"$arrayElemAt": ["$domain_data.creation_date", 0]}, + "expiry": {"$arrayElemAt": ["$domain_data.expiry", 0]}, + "resolver": {"$arrayElemAt": ["$domain_data.resolver", 0]}, + "legacy_address": {"$arrayElemAt": ["$domain_data.legacy_address", 0]}, + "rev_address": {"$arrayElemAt": ["$domain_data.rev_address", 0]} + }, + "user_data": 1, + "verifier_data": 1, + "extended_verifier_data": 1 + } + }, + ] } diff --git a/src/endpoints/mod.rs b/src/endpoints/mod.rs index 1097f2a..4adf95e 100644 --- a/src/endpoints/mod.rs +++ b/src/endpoints/mod.rs @@ -1,3 +1,4 @@ +pub mod addr_has_rev; pub mod addr_to_available_ids; pub mod addr_to_domain; pub mod addr_to_external_domains; diff --git a/src/endpoints/referral/add_click.rs b/src/endpoints/referral/add_click.rs index f73244b..4ec6cb5 100644 --- a/src/endpoints/referral/add_click.rs +++ b/src/endpoints/referral/add_click.rs @@ -4,6 +4,7 @@ use axum::{ http::StatusCode, response::IntoResponse, }; +use axum_auto_routes::route; use chrono::Utc; use mongodb::{ bson::{doc, DateTime as BsonDateTime}, @@ -18,6 +19,7 @@ pub struct AddClickQuery { sponsor_addr: FieldElement, } +#[route(post, "/referral/add_click", crate::endpoints::referral::add_click)] pub async fn handler( State(state): State>, Json(query): Json, diff --git a/src/endpoints/referral/click_count.rs b/src/endpoints/referral/click_count.rs index 4ff7b01..c457091 100644 --- a/src/endpoints/referral/click_count.rs +++ b/src/endpoints/referral/click_count.rs @@ -4,6 +4,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use chrono::{DateTime, NaiveDateTime, Utc}; use futures::StreamExt; use mongodb::bson::{doc, Bson, DateTime as BsonDateTime}; @@ -22,6 +23,7 @@ pub struct IdQuery { spacing: i64, } +#[route(get, "/referral/click_count", crate::endpoints::referral::click_count)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/referral/revenue.rs b/src/endpoints/referral/revenue.rs index 35dd284..680f11f 100644 --- a/src/endpoints/referral/revenue.rs +++ b/src/endpoints/referral/revenue.rs @@ -4,6 +4,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use chrono::{DateTime, NaiveDateTime, Utc}; use futures::StreamExt; use mongodb::bson::{doc, Bson, DateTime as BsonDateTime}; @@ -22,6 +23,7 @@ pub struct IdQuery { spacing: i64, } +#[route(get, "/referral/revenue", crate::endpoints::referral::revenue)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/referral/sales_count.rs b/src/endpoints/referral/sales_count.rs index 2e84d56..f2fe2c9 100644 --- a/src/endpoints/referral/sales_count.rs +++ b/src/endpoints/referral/sales_count.rs @@ -4,6 +4,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use chrono::{DateTime, NaiveDateTime, Utc}; use futures::StreamExt; use mongodb::bson::{doc, Bson, DateTime as BsonDateTime}; @@ -22,6 +23,7 @@ pub struct IdQuery { spacing: i64, } +#[route(get, "/referral/sales_count", crate::endpoints::referral::sales_count)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/renewal/get_metahash.rs b/src/endpoints/renewal/get_metahash.rs index b8a5dc3..c571076 100644 --- a/src/endpoints/renewal/get_metahash.rs +++ b/src/endpoints/renewal/get_metahash.rs @@ -7,6 +7,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use futures::TryStreamExt; use mongodb::bson::{doc, Bson}; use serde::{Deserialize, Serialize}; @@ -24,6 +25,7 @@ pub struct GetMetaHashQuery { addr: FieldElement, } +#[route(get, "/renewal/get_metahash", crate::endpoints::renewal::get_metahash)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/renewal/get_non_subscribed_domains.rs b/src/endpoints/renewal/get_non_subscribed_domains.rs index dc7205e..3edaeb0 100644 --- a/src/endpoints/renewal/get_non_subscribed_domains.rs +++ b/src/endpoints/renewal/get_non_subscribed_domains.rs @@ -7,6 +7,7 @@ use axum::{ http::StatusCode, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::{bson::doc, options::AggregateOptions}; use regex::Regex; @@ -23,6 +24,11 @@ lazy_static::lazy_static! { static ref DOMAIN_REGEX: Regex = Regex::new(r"^[^.]+\.stark$").unwrap(); } +#[route( + get, + "/renewal/get_non_subscribed_domains", + crate::endpoints::renewal::get_non_subscribed_domains +)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/renewal/get_renewal_data.rs b/src/endpoints/renewal/get_renewal_data.rs index 10ef5f7..e669035 100644 --- a/src/endpoints/renewal/get_renewal_data.rs +++ b/src/endpoints/renewal/get_renewal_data.rs @@ -7,6 +7,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::{bson::doc, options::FindOptions}; use serde::{Deserialize, Serialize}; @@ -24,6 +25,11 @@ pub struct StarknetIdQuery { domain: String, } +#[route( + get, + "/renewal/get_renewal_data", + crate::endpoints::renewal::get_renewal_data +)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/starkscan/fetch_nfts.rs b/src/endpoints/starkscan/fetch_nfts.rs index 8a6c378..a034014 100644 --- a/src/endpoints/starkscan/fetch_nfts.rs +++ b/src/endpoints/starkscan/fetch_nfts.rs @@ -7,6 +7,7 @@ use axum::{ http::StatusCode, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use mongodb::bson::doc; use serde::{Deserialize, Serialize}; use serde_json::Value; @@ -44,6 +45,7 @@ pub struct StarkscanNftProps { minted_at_timestamp: i64, } +#[route(get, "/starkscan/fetch_nfts", crate::endpoints::starkscan::fetch_nfts)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/stats/count_addrs.rs b/src/endpoints/stats/count_addrs.rs index 31a4605..a11e2c4 100644 --- a/src/endpoints/stats/count_addrs.rs +++ b/src/endpoints/stats/count_addrs.rs @@ -5,6 +5,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::bson::{doc, Bson}; use serde::{Deserialize, Serialize}; @@ -20,6 +21,7 @@ pub struct CountAddrsQuery { since: i64, } +#[route(get, "/stats/count_addrs", crate::endpoints::stats::count_addrs)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/stats/count_club_domains.rs b/src/endpoints/stats/count_club_domains.rs index 70062ff..87278da 100644 --- a/src/endpoints/stats/count_club_domains.rs +++ b/src/endpoints/stats/count_club_domains.rs @@ -5,6 +5,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use futures::TryStreamExt; use mongodb::bson::{self, doc, Bson}; use serde::{Deserialize, Serialize}; @@ -22,6 +23,7 @@ pub struct CountClubDomainsQuery { since: i64, } +#[route(get, "/stats/count_club_domains", crate::endpoints::stats::count_club_domains)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/stats/count_created.rs b/src/endpoints/stats/count_created.rs index 1b89b14..5c85b12 100644 --- a/src/endpoints/stats/count_created.rs +++ b/src/endpoints/stats/count_created.rs @@ -5,6 +5,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::bson::{doc, Bson}; use serde::{Deserialize, Serialize}; @@ -23,6 +24,7 @@ pub struct CountCreatedQuery { segments: i64, } +#[route(get, "/stats/count_created", crate::endpoints::stats::count_created)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/stats/count_domains.rs b/src/endpoints/stats/count_domains.rs index 93ed665..6ca89f0 100644 --- a/src/endpoints/stats/count_domains.rs +++ b/src/endpoints/stats/count_domains.rs @@ -5,6 +5,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use mongodb::bson::{doc, Bson}; use serde::{Deserialize, Serialize}; use std::sync::Arc; @@ -19,6 +20,7 @@ pub struct CountDomainsQuery { since: i64, } +#[route(get, "/stats/count_domains", crate::endpoints::stats::count_domains)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/stats/count_ids.rs b/src/endpoints/stats/count_ids.rs index 93ed665..381410e 100644 --- a/src/endpoints/stats/count_ids.rs +++ b/src/endpoints/stats/count_ids.rs @@ -5,6 +5,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use mongodb::bson::{doc, Bson}; use serde::{Deserialize, Serialize}; use std::sync::Arc; @@ -19,6 +20,7 @@ pub struct CountDomainsQuery { since: i64, } +#[route(get, "/stats/count_ids", crate::endpoints::stats::count_ids)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/stats/count_renewed.rs b/src/endpoints/stats/count_renewed.rs index c905199..46ec8fc 100644 --- a/src/endpoints/stats/count_renewed.rs +++ b/src/endpoints/stats/count_renewed.rs @@ -5,6 +5,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::bson::{doc, Bson}; use serde::{Deserialize, Serialize}; @@ -23,6 +24,7 @@ pub struct CountRenewedQuery { segments: i64, } +#[route(get, "/stats/count_renewed", crate::endpoints::stats::count_renewed)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/endpoints/stats/expired_club_domains.rs b/src/endpoints/stats/expired_club_domains.rs index 39fb3b0..74bf647 100644 --- a/src/endpoints/stats/expired_club_domains.rs +++ b/src/endpoints/stats/expired_club_domains.rs @@ -5,6 +5,7 @@ use axum::{ response::IntoResponse, Json, }; +use axum_auto_routes::route; use futures::StreamExt; use mongodb::{ bson::{doc, Bson}, @@ -19,6 +20,11 @@ pub struct CountClubDomainsData { count: i32, } +#[route( + get, + "/stats/expired_club_domains", + crate::endpoints::stats::expired_club_domains +)] pub async fn handler(State(state): State>) -> impl IntoResponse { let mut headers = HeaderMap::new(); headers.insert("Cache-Control", HeaderValue::from_static("max-age=60")); diff --git a/src/endpoints/uri.rs b/src/endpoints/uri.rs index 7536b91..9a11d71 100644 --- a/src/endpoints/uri.rs +++ b/src/endpoints/uri.rs @@ -7,6 +7,7 @@ use axum::{ http::{HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Json}, }; +use axum_auto_routes::route; use chrono::NaiveDateTime; use futures::StreamExt; use mongodb::bson::doc; @@ -47,6 +48,7 @@ const NFT_PP_CONTRACT: &'static str = const NFT_PP_ID: &'static str = "0x00000000000000000000000000000000000000000000006e66745f70705f6964"; +#[route(get, "/uri", crate::endpoints::uri)] pub async fn handler( State(state): State>, Query(query): Query, diff --git a/src/main.rs b/src/main.rs index 024ff29..f240bd3 100644 --- a/src/main.rs +++ b/src/main.rs @@ -6,17 +6,19 @@ mod models; mod resolving; mod tax; mod utils; -use axum::{ - http::StatusCode, - routing::{get, post}, - Router, -}; +use axum::{http::StatusCode, Router}; +use axum_auto_routes::route; use mongodb::{bson::doc, options::ClientOptions, Client}; -use std::net::SocketAddr; use std::sync::Arc; +use std::{net::SocketAddr, sync::Mutex}; +use utils::WithState; use tower_http::cors::{Any, CorsLayer}; +lazy_static::lazy_static! { + pub static ref ROUTE_REGISTRY: Mutex>> = Mutex::new(Vec::new()); +} + #[tokio::main] async fn main() { println!("starknetid_server: starting v{}", env!("CARGO_PKG_VERSION")); @@ -58,100 +60,14 @@ async fn main() { } let cors = CorsLayer::new().allow_headers(Any).allow_origin(Any); - let app = Router::new() - .route("/", get(root)) - .route( - "/addr_to_available_ids", - get(endpoints::addr_to_available_ids::handler), - ) - .route("/addr_to_domain", get(endpoints::addr_to_domain::handler)) - .route( - "/addr_to_external_domains", - get(endpoints::addr_to_external_domains::handler), - ) - .route( - "/addr_to_full_ids", - get(endpoints::addr_to_full_ids::handler), - ) - .route( - "/addr_to_token_id", - get(endpoints::addr_to_token_id::handler), - ) - .route( - "/addrs_to_domains", - post(endpoints::addrs_to_domains::handler), - ) - .route("/data_to_ids", get(endpoints::data_to_ids::handler)) - .route("/domain_to_addr", get(endpoints::domain_to_addr::handler)) - .route("/domain_to_data", get(endpoints::domain_to_data::handler)) - .route("/id_to_data", get(endpoints::id_to_data::handler)) - .route("/uri", get(endpoints::uri::handler)) - .route( - "/referral/add_click", - post(endpoints::referral::add_click::handler), - ) - .route( - "/referral/revenue", - get(endpoints::referral::revenue::handler), - ) - .route( - "/referral/sales_count", - get(endpoints::referral::sales_count::handler), - ) - .route( - "/referral/click_count", - get(endpoints::referral::click_count::handler), - ) - .route( - "/stats/count_addrs", - get(endpoints::stats::count_addrs::handler), - ) - .route( - "/stats/count_club_domains", - get(endpoints::stats::count_club_domains::handler), - ) - .route( - "/stats/count_domains", - get(endpoints::stats::count_domains::handler), - ) - .route( - "/stats/count_ids", - get(endpoints::stats::count_ids::handler), - ) - .route( - "/stats/count_created", - get(endpoints::stats::count_created::handler), - ) - .route( - "/stats/expired_club_domains", - get(endpoints::stats::expired_club_domains::handler), - ) - .route( - "/stats/count_renewed", - get(endpoints::stats::count_renewed::handler), - ) - .route( - "/starkscan/fetch_nfts", - get(endpoints::starkscan::fetch_nfts::handler), - ) - .route( - "/renewal/get_renewal_data", - get(endpoints::renewal::get_renewal_data::handler), - ) - .route( - "/renewal/get_metahash", - get(endpoints::renewal::get_metahash::handler), - ) - .route( - "/renewal/get_non_subscribed_domains", - get(endpoints::renewal::get_non_subscribed_domains::handler), - ) - .route("/galxe/verify", post(endpoints::galxe::verify::handler)) - .route( - "/crosschain/solana/claim", - post(endpoints::crosschain::solana::claim::handler), - ) - .with_state(shared_state) + let app = ROUTE_REGISTRY + .lock() + .unwrap() + .clone() + .into_iter() + .fold(Router::new().with_state(shared_state.clone()), |acc, r| { + acc.merge(r.to_router(shared_state.clone())) + }) .layer(cors); let addr = SocketAddr::from(([0, 0, 0, 0], conf.server.port)); @@ -162,6 +78,7 @@ async fn main() { .unwrap(); } +#[route(get, "/")] async fn root() -> (StatusCode, String) { ( StatusCode::ACCEPTED, diff --git a/src/models.rs b/src/models.rs index a52ebb3..3a1153c 100644 --- a/src/models.rs +++ b/src/models.rs @@ -1,7 +1,8 @@ use mongodb::Database; +use starknet::core::types::FieldElement; -use crate::config::Config; -use serde::{Deserialize, Serialize}; +use crate::{config::Config, utils::to_hex}; +use serde::{ser::SerializeSeq, Deserialize, Serialize, Serializer}; use std::collections::HashMap; pub struct AppState { @@ -11,32 +12,107 @@ pub struct AppState { pub states: States, } -#[derive(Serialize)] -pub struct Data { - #[serde(skip_serializing_if = "Option::is_none")] - pub domain: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub addr: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub domain_expiry: Option, - pub is_owner_main: bool, - pub owner_addr: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub github: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub twitter: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub discord: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub old_github: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub old_twitter: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub old_discord: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub proof_of_personhood: Option, - pub starknet_id: String, - pub img_url: Option, +fn serialize_felt(field_element: &FieldElement, serializer: S) -> Result +where + S: Serializer, +{ + let hex_string = to_hex(field_element); + serializer.serialize_str(&hex_string) +} + +fn serialize_opt_felt( + field_element: &Option, + serializer: S, +) -> Result +where + S: Serializer, +{ + match field_element { + Some(fe) => { + let hex_string = to_hex(fe); + serializer.serialize_str(&hex_string) + } + None => serializer.serialize_none(), + } +} + +fn serialize_vec_felt(vec: &Vec, serializer: S) -> Result +where + S: Serializer, +{ + let mut seq = serializer.serialize_seq(Some(vec.len()))?; + for element in vec { + seq.serialize_element(&SerializedFelt(element))?; + } + seq.end() +} + +struct SerializedFelt<'a>(&'a FieldElement); + +impl<'a> Serialize for SerializedFelt<'a> { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serialize_felt(self.0, serializer) + } +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct IdentityData { + #[serde(serialize_with = "serialize_felt")] + pub id: FieldElement, + #[serde(serialize_with = "serialize_felt")] + pub owner: FieldElement, + pub main: bool, + pub creation_date: u64, + pub domain: Option, + pub user_data: Vec, + pub verifier_data: Vec, + pub extended_verifier_data: Vec, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct Domain { + pub domain: String, + pub migrated: bool, + pub root: bool, + pub creation_date: u64, + pub expiry: Option, + #[serde(serialize_with = "serialize_opt_felt")] + pub resolver: Option, + #[serde(serialize_with = "serialize_opt_felt")] + pub legacy_address: Option, + #[serde(serialize_with = "serialize_opt_felt")] + pub rev_address: Option, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct UserData { + #[serde(serialize_with = "serialize_felt")] + pub field: FieldElement, + #[serde(serialize_with = "serialize_felt")] + pub data: FieldElement, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct VerifierData { + #[serde(serialize_with = "serialize_felt")] + pub verifier: FieldElement, + #[serde(serialize_with = "serialize_felt")] + pub field: FieldElement, + #[serde(serialize_with = "serialize_felt")] + pub data: FieldElement, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct ExtendedVerifierData { + #[serde(serialize_with = "serialize_felt")] + pub verifier: FieldElement, + #[serde(serialize_with = "serialize_felt")] + pub field: FieldElement, + #[serde(serialize_with = "serialize_vec_felt")] + pub extended_data: Vec, } #[derive(Deserialize, Debug)] diff --git a/src/utils.rs b/src/utils.rs index 7820a01..01e3bfa 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,12 +1,16 @@ use ark_ff::{biginteger::BigInteger256, BigInteger}; use axum::{ + body::Body, http::StatusCode, response::{IntoResponse, Response}, + Router, }; use serde::Serialize; use serde_json::Value; use starknet::core::types::FieldElement; -use std::fmt::Write; +use std::{fmt::Write, sync::Arc}; + +use crate::models::AppState; #[derive(Serialize)] pub struct ErrorMessage { @@ -102,3 +106,26 @@ pub async fn fetch_img_url( json.get("image_url") .and_then(|v| v.as_str().map(ToString::to_string)) } + +// required for axum_auto_routes +pub trait WithState: Send { + fn to_router(self: Box, shared_state: Arc) -> Router; + + fn box_clone(&self) -> Box; +} + +impl WithState for Router, Body> { + fn to_router(self: Box, shared_state: Arc) -> Router { + self.with_state(shared_state) + } + + fn box_clone(&self) -> Box { + Box::new((*self).clone()) + } +} + +impl Clone for Box { + fn clone(&self) -> Box { + self.box_clone() + } +}