From 14c9482b0db6cebbaff9cd30133283d7e3fb1a71 Mon Sep 17 00:00:00 2001 From: hopeyen Date: Fri, 15 Dec 2023 11:27:48 -0600 Subject: [PATCH] refactor: crate structures --- subfile-exchange/benches/read_chunk.rs | 2 +- subfile-exchange/benches/verify_chunk.rs | 47 +---- subfile-exchange/src/lib.rs | 4 - subfile-exchange/src/main.rs | 2 +- .../src/{publisher.rs => publisher/mod.rs} | 15 +- .../src/{ => subfile}/file_hasher.rs | 2 +- .../src/{ => subfile}/file_reader.rs | 0 subfile-exchange/src/{ => subfile}/ipfs.rs | 0 .../src/{subfile.rs => subfile/mod.rs} | 13 +- .../src/{ => subfile}/subfile_reader.rs | 2 +- subfile-exchange/src/subfile_client/mod.rs | 8 +- subfile-exchange/src/subfile_finder/mod.rs | 7 +- subfile-exchange/tests/discovery.rs | 2 +- subfile-exchange/tests/file_transfer.rs | 6 +- subfile-service/src/config.rs | 6 +- subfile-service/src/errors.rs | 75 ------- subfile-service/src/main.rs | 15 +- subfile-service/src/subfile_server/admin.rs | 4 +- subfile-service/src/subfile_server/mod.rs | 195 +++--------------- subfile-service/src/subfile_server/routes.rs | 158 ++++++++++++++ 20 files changed, 225 insertions(+), 338 deletions(-) rename subfile-exchange/src/{publisher.rs => publisher/mod.rs} (93%) rename subfile-exchange/src/{ => subfile}/file_hasher.rs (98%) rename subfile-exchange/src/{ => subfile}/file_reader.rs (100%) rename subfile-exchange/src/{ => subfile}/ipfs.rs (100%) rename subfile-exchange/src/{subfile.rs => subfile/mod.rs} (96%) rename subfile-exchange/src/{ => subfile}/subfile_reader.rs (98%) delete mode 100644 subfile-service/src/errors.rs create mode 100644 subfile-service/src/subfile_server/routes.rs diff --git a/subfile-exchange/benches/read_chunk.rs b/subfile-exchange/benches/read_chunk.rs index 64d6713..e0b5cd5 100644 --- a/subfile-exchange/benches/read_chunk.rs +++ b/subfile-exchange/benches/read_chunk.rs @@ -1,6 +1,6 @@ use criterion::{black_box, criterion_group, criterion_main, Criterion}; use std::{fs::File, path::PathBuf}; -use subfile_exchange::file_reader::read_chunk; +use subfile_exchange::subfile::file_reader::read_chunk; fn read_chunk_benchmark(c: &mut Criterion) { let file_path = black_box(PathBuf::from("../example-file/0017234600.dbin.zst")); diff --git a/subfile-exchange/benches/verify_chunk.rs b/subfile-exchange/benches/verify_chunk.rs index 8afa22d..ce3d437 100644 --- a/subfile-exchange/benches/verify_chunk.rs +++ b/subfile-exchange/benches/verify_chunk.rs @@ -4,32 +4,11 @@ use criterion::black_box; use criterion::Criterion; use criterion::{criterion_group, criterion_main}; -use subfile_exchange::file_hasher::verify_chunk; -use subfile_exchange::file_reader::read_chunk; +use subfile_exchange::subfile::{file_hasher::verify_chunk, file_reader::read_chunk}; use subfile_exchange::test_util::simple_chunk_file; fn verify_chunk_benchmark(c: &mut Criterion) { let file_path = black_box(PathBuf::from("../example-file/0017234600.dbin.zst")); - // let file = black_box(File::open(&file_path).unwrap()); - - // let file_size = black_box(file.metadata().map(|d| d.len()).unwrap()); - - // // Define different test ranges - // let ranges = black_box(vec![ - // (0, 999), // Small chunk from start - // (file_size / 3, file_size / 2), // partial chunk from middle - // (file_size - 1000, file_size - 1), // Small chunk from end - // ]); - - // c.bench_function("read_chunk", |b| { - // b.iter(|| { - // for &(start, end) in &ranges { - // let data = black_box(read_chunk(&file_path, (start, end)).unwrap()); - - // } - // }) - // }); - let chunk_file = black_box(simple_chunk_file()); // read a chunk let (start, end) = black_box(( @@ -46,27 +25,3 @@ fn verify_chunk_benchmark(c: &mut Criterion) { criterion_group!(benches, verify_chunk_benchmark); criterion_main!(benches); - -// pub const SUBFILE_MANIFEST = r#"files: -// - name: example-create-17686085.dbin -// hash: QmSgzLLsQzdRAQRA2d7X3wqLEUTBLSbRe2tqv9rJBy7Wqv -// - name: 0017234500.dbin.zst -// hash: Qmexz4ZariJteKHHXMxsSeSjvyLZf7SUWz77bsvLUQG1Vn -// - name: 0017234600.dbin.zst -// hash: QmadNB1AQnap3czUime3gEETBNUj7HHzww6hVh5F6w7Boo -// - name: 0017686111-c1ed20dc4cffd7bd-ebfe6d2b6a25625a-17686021-default.dbin -// hash: QmSEDiCKax7rjxS3kvGJ3dPdHkm2bztFZkR5KDqfpgyuQw -// - name: 0017686115-f8d105f60fa2e78d-7d23a3e458beaff1-17686021-default.dbin -// hash: QmVx3JX5TNrSqMKyP5xQJ2CYmcqG4VaBdPnbji3PuvUFx6 -// file_type: flatfiles -// spec_version: 0.0.0 -// description: random flatfiles -// chain_id: '0' -// block_range: -// start_block: null -// end_block: null"; - -// pub fn init_logger() { -// env::set_var("RUST_LOG", "warn,subfile_exchange=trace"); -// init_tracing(String::from("pretty")).unwrap(); -// } diff --git a/subfile-exchange/src/lib.rs b/subfile-exchange/src/lib.rs index 383eac4..1d0ec60 100644 --- a/subfile-exchange/src/lib.rs +++ b/subfile-exchange/src/lib.rs @@ -1,12 +1,8 @@ pub mod config; pub mod errors; -pub mod file_hasher; -pub mod file_reader; -pub mod ipfs; pub mod publisher; pub mod subfile; pub mod subfile_client; pub mod subfile_finder; -pub mod subfile_reader; pub mod test_util; pub mod util; diff --git a/subfile-exchange/src/main.rs b/subfile-exchange/src/main.rs index be887bc..0900fa2 100644 --- a/subfile-exchange/src/main.rs +++ b/subfile-exchange/src/main.rs @@ -2,8 +2,8 @@ use dotenv::dotenv; use subfile_exchange::{ config::{Cli, Role}, - ipfs::IpfsClient, publisher::SubfilePublisher, + subfile::ipfs::IpfsClient, subfile_client::SubfileDownloader, }; diff --git a/subfile-exchange/src/publisher.rs b/subfile-exchange/src/publisher/mod.rs similarity index 93% rename from subfile-exchange/src/publisher.rs rename to subfile-exchange/src/publisher/mod.rs index 3c341e8..02996f6 100644 --- a/subfile-exchange/src/publisher.rs +++ b/subfile-exchange/src/publisher/mod.rs @@ -2,8 +2,10 @@ use serde_yaml::to_string; use crate::config::PublisherArgs; use crate::errors::Error; -use crate::ipfs::{AddResponse, IpfsClient}; -use crate::subfile::{BlockRange, ChunkFile, FileMetaInfo, SubfileManifest}; +use crate::subfile::{ + ipfs::{AddResponse, IpfsClient}, + BlockRange, ChunkFile, FileMetaInfo, SubfileManifest, +}; pub struct SubfilePublisher { ipfs_client: IpfsClient, @@ -87,10 +89,6 @@ impl SubfilePublisher { pub async fn publish(&self) -> Result { let meta_info = self.hash_and_publish_files().await?; - // { - // Ok(added_hashes) => added_hashes, - // Err(e) => return Err(e), - // }; tracing::trace!( meta_info = tracing::field::debug(&meta_info), @@ -105,11 +103,6 @@ impl SubfilePublisher { ); Ok(ipfs_hash) } - // { - // Ok(ipfs_hash) => { - // } - // Err(e) => Err(e), - // }, Err(e) => Err(e), } } diff --git a/subfile-exchange/src/file_hasher.rs b/subfile-exchange/src/subfile/file_hasher.rs similarity index 98% rename from subfile-exchange/src/file_hasher.rs rename to subfile-exchange/src/subfile/file_hasher.rs index 6a3e1f5..c4bf2b1 100644 --- a/subfile-exchange/src/file_hasher.rs +++ b/subfile-exchange/src/subfile/file_hasher.rs @@ -49,7 +49,7 @@ pub fn verify_chunk(data: &Bytes, chunk_hash: &str) -> bool { #[cfg(test)] mod tests { - use crate::{file_reader::chunk_file, subfile::ChunkFile, test_util::*}; + use crate::{subfile::file_reader::chunk_file, subfile::ChunkFile, test_util::*}; use std::path::Path; #[test] diff --git a/subfile-exchange/src/file_reader.rs b/subfile-exchange/src/subfile/file_reader.rs similarity index 100% rename from subfile-exchange/src/file_reader.rs rename to subfile-exchange/src/subfile/file_reader.rs diff --git a/subfile-exchange/src/ipfs.rs b/subfile-exchange/src/subfile/ipfs.rs similarity index 100% rename from subfile-exchange/src/ipfs.rs rename to subfile-exchange/src/subfile/ipfs.rs diff --git a/subfile-exchange/src/subfile.rs b/subfile-exchange/src/subfile/mod.rs similarity index 96% rename from subfile-exchange/src/subfile.rs rename to subfile-exchange/src/subfile/mod.rs index ae13850..17e06cc 100644 --- a/subfile-exchange/src/subfile.rs +++ b/subfile-exchange/src/subfile/mod.rs @@ -1,3 +1,8 @@ +pub mod file_hasher; +pub mod file_reader; +pub mod ipfs; +pub mod subfile_reader; + use std::{ path::{Path, PathBuf}, str::FromStr, @@ -7,9 +12,11 @@ use serde::{Deserialize, Serialize}; use crate::{ errors::Error, - file_hasher::{hash_chunk, verify_chunk}, - file_reader::{chunk_file, format_path, read_chunk}, - ipfs::is_valid_ipfs_hash, + subfile::{ + file_hasher::{hash_chunk, verify_chunk}, + file_reader::{chunk_file, format_path, read_chunk}, + ipfs::is_valid_ipfs_hash, + }, }; /* Public Manifests */ diff --git a/subfile-exchange/src/subfile_reader.rs b/subfile-exchange/src/subfile/subfile_reader.rs similarity index 98% rename from subfile-exchange/src/subfile_reader.rs rename to subfile-exchange/src/subfile/subfile_reader.rs index 8eadbf7..debe2a0 100644 --- a/subfile-exchange/src/subfile_reader.rs +++ b/subfile-exchange/src/subfile/subfile_reader.rs @@ -4,7 +4,7 @@ use serde::de::DeserializeOwned; use crate::{ errors::Error, - ipfs::IpfsClient, + subfile::ipfs::IpfsClient, subfile::{ChunkFile, ChunkFileMeta, Subfile, SubfileManifest}, }; diff --git a/subfile-exchange/src/subfile_client/mod.rs b/subfile-exchange/src/subfile_client/mod.rs index 2eea790..325b61f 100644 --- a/subfile-exchange/src/subfile_client/mod.rs +++ b/subfile-exchange/src/subfile_client/mod.rs @@ -16,11 +16,11 @@ use tokio::sync::Mutex; use crate::config::DownloaderArgs; use crate::errors::Error; -use crate::file_hasher::verify_chunk; -use crate::ipfs::IpfsClient; -use crate::subfile::{ChunkFileMeta, Subfile}; +use crate::subfile::{ + file_hasher::verify_chunk, ipfs::IpfsClient, subfile_reader::read_subfile, ChunkFileMeta, + Subfile, +}; use crate::subfile_finder::{IndexerEndpoint, SubfileFinder}; -use crate::subfile_reader::read_subfile; use crate::util::build_wallet; use self::signer::ReceiptSigner; diff --git a/subfile-exchange/src/subfile_finder/mod.rs b/subfile-exchange/src/subfile_finder/mod.rs index 2b694c0..fb4d189 100644 --- a/subfile-exchange/src/subfile_finder/mod.rs +++ b/subfile-exchange/src/subfile_finder/mod.rs @@ -10,9 +10,10 @@ use tokio::sync::Mutex; use crate::errors::Error; -use crate::ipfs::IpfsClient; - -use crate::subfile_reader::{fetch_subfile_from_ipfs, read_subfile}; +use crate::subfile::{ + ipfs::IpfsClient, + subfile_reader::{fetch_subfile_from_ipfs, read_subfile}, +}; // Pair indexer operator address and indexer service endpoint (operator, indexer_url) // persumeably this should not be handled by clients themselves diff --git a/subfile-exchange/tests/discovery.rs b/subfile-exchange/tests/discovery.rs index c5a61fe..0d8bb4c 100644 --- a/subfile-exchange/tests/discovery.rs +++ b/subfile-exchange/tests/discovery.rs @@ -3,7 +3,7 @@ mod tests { use std::{process::Command, time::Duration}; use subfile_exchange::{ - ipfs::IpfsClient, + subfile::ipfs::IpfsClient, subfile_finder::{unavailable_files, FileAvailbilityMap, IndexerEndpoint, SubfileFinder}, test_util::server_ready, }; diff --git a/subfile-exchange/tests/file_transfer.rs b/subfile-exchange/tests/file_transfer.rs index 73b0926..821e59f 100644 --- a/subfile-exchange/tests/file_transfer.rs +++ b/subfile-exchange/tests/file_transfer.rs @@ -5,7 +5,7 @@ mod tests { use tokio::fs; use subfile_exchange::{ - config::DownloaderArgs, ipfs::IpfsClient, subfile_client::SubfileDownloader, + config::DownloaderArgs, subfile::ipfs::IpfsClient, subfile_client::SubfileDownloader, test_util::server_ready, }; @@ -21,8 +21,8 @@ mod tests { let mut server_process = Command::new("cargo") .arg("run") .arg("-p") - .arg("subfile-exchange") - .arg("server") + .arg("subfile-service") + .arg("--") .arg("--mnemonic") .arg("sheriff obscure trick beauty army fat wink legal flee leader section suit") .arg("--subfiles") diff --git a/subfile-service/src/config.rs b/subfile-service/src/config.rs index d755540..cfd7bef 100644 --- a/subfile-service/src/config.rs +++ b/subfile-service/src/config.rs @@ -14,7 +14,7 @@ use tracing_subscriber::FmtSubscriber; author = "hopeyen" )] #[command(author, version, about, long_about = None, arg_required_else_help = true)] -pub struct Cli { +pub struct Config { #[command(flatten)] pub server: ServerArgs, #[arg( @@ -36,10 +36,10 @@ pub struct Cli { pub log_format: LogFormat, } -impl Cli { +impl Config { /// Parse config arguments pub fn args() -> Self { - let config = Cli::parse(); + let config = Config::parse(); // Enables tracing under RUST_LOG variable init_tracing(config.log_format.to_string()).expect("Could not set up global default subscriber for logger, check environmental variable `RUST_LOG` or the CLI input `log-level`"); config diff --git a/subfile-service/src/errors.rs b/subfile-service/src/errors.rs deleted file mode 100644 index 77ba109..0000000 --- a/subfile-service/src/errors.rs +++ /dev/null @@ -1,75 +0,0 @@ -use std::{error::Error as StdError, fmt}; - -#[derive(Debug)] -pub enum Error { - InvalidConfig(String), - FileIOError(std::io::Error), - InvalidRange(String), - IPFSError(reqwest::Error), - SubfileError(String), - Request(reqwest::Error), - DataUnavilable(String), - ChunkInvalid(String), - ServerError(ServerError), - JsonError(serde_json::Error), - YamlError(serde_yaml::Error), - InvalidPriceFormat(String), -} - -impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::InvalidConfig(ref msg) => write!(f, "Invalid configuration: {}", msg), - Error::FileIOError(ref err) => write!(f, "File IO error: {}", err), - Error::InvalidRange(ref msg) => write!(f, "Invalid range: {}", msg), - Error::IPFSError(ref err) => write!(f, "IPFS error: {}", err), - Error::SubfileError(ref msg) => write!(f, "Subfile error: {}", msg), - Error::Request(ref err) => write!(f, "Client error: {}", err), - Error::DataUnavilable(ref err) => write!(f, "Client error: {}", err), - Error::ChunkInvalid(ref err) => write!(f, "Chunk invalid error: {}", err), - Error::ServerError(ref err) => write!(f, "Server error: {}", err), - Error::JsonError(ref err) => write!(f, "JSON error: {}", err), - Error::YamlError(ref err) => write!(f, "YAML error: {}", err), - Error::InvalidPriceFormat(ref msg) => write!(f, "Price format error: {}", msg), - } - } -} - -impl StdError for Error { - fn source(&self) -> Option<&(dyn StdError + 'static)> { - match *self { - Error::FileIOError(ref e) => Some(e), - Error::IPFSError(ref e) => Some(e), - Error::JsonError(ref e) => Some(e), - Error::YamlError(ref e) => Some(e), - Error::Request(ref e) => Some(e), - Error::ServerError(ref e) => e.source(), - _ => None, - } - } -} - -impl fmt::Display for ServerError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - ServerError::ContextError(ref msg) => write!(f, "Context error: {}", msg), - ServerError::RequestBodyError(ref msg) => write!(f, "Request body error: {}", msg), - ServerError::HeaderParseError(ref msg) => write!(f, "Header parse error: {}", msg), - ServerError::MethodParseError(ref msg) => write!(f, "Method parse error: {}", msg), - ServerError::ParamsParseError(ref msg) => write!(f, "Params parse error: {}", msg), - ServerError::BuildResponseError(ref msg) => write!(f, "Build response error: {}", msg), - } - } -} - -impl StdError for ServerError {} - -#[derive(Debug)] -pub enum ServerError { - ContextError(String), - RequestBodyError(String), - HeaderParseError(String), - MethodParseError(String), - ParamsParseError(String), - BuildResponseError(String), -} diff --git a/subfile-service/src/main.rs b/subfile-service/src/main.rs index 41544b1..3ac5984 100644 --- a/subfile-service/src/main.rs +++ b/subfile-service/src/main.rs @@ -1,20 +1,11 @@ use dotenv::dotenv; -use subfile_exchange::ipfs::IpfsClient; -use subfile_service::{config::Cli, subfile_server::init_server}; +use subfile_service::{config::Config, subfile_server::init_server}; #[tokio::main] async fn main() { dotenv().ok(); - let cli: Cli = Cli::args(); + let config: Config = Config::args(); - tracing::info!(cli = tracing::field::debug(&cli), "Running cli"); - - let client = if let Ok(client) = IpfsClient::new(&cli.ipfs_gateway) { - client - } else { - IpfsClient::localhost() - }; - - let _ = init_server(&client, cli.server).await; + let _ = init_server(config).await; } diff --git a/subfile-service/src/subfile_server/admin.rs b/subfile-service/src/subfile_server/admin.rs index d4a4036..cb888b2 100644 --- a/subfile-service/src/subfile_server/admin.rs +++ b/subfile-service/src/subfile_server/admin.rs @@ -3,8 +3,8 @@ use hyper::{Body, Request, Response, StatusCode}; use serde_json::{json, Value}; use subfile_exchange::errors::ServerError; use subfile_exchange::{ - errors::Error, ipfs::is_valid_ipfs_hash, subfile::validate_subfile_entries, - subfile_reader::read_subfile, + errors::Error, + subfile::{ipfs::is_valid_ipfs_hash, subfile_reader::read_subfile, validate_subfile_entries}, }; use super::{create_error_response, ServerContext}; diff --git a/subfile-service/src/subfile_server/mod.rs b/subfile-service/src/subfile_server/mod.rs index f9662c5..8dfaf48 100644 --- a/subfile-service/src/subfile_server/mod.rs +++ b/subfile-service/src/subfile_server/mod.rs @@ -1,30 +1,25 @@ // #![cfg(feature = "acceptor")] -use http::header::CONTENT_RANGE; + use hyper::service::{make_service_fn, service_fn}; use serde_json::json; use std::collections::HashMap; use std::sync::Arc; use tokio::sync::Mutex; -use subfile_exchange::errors::{Error, ServerError}; -use subfile_exchange::ipfs::IpfsClient; -use subfile_exchange::subfile::{validate_subfile_entries, Subfile}; -use subfile_exchange::subfile_reader::read_subfile; - -use crate::config::ServerArgs; -use crate::subfile_server::{ - admin::handle_admin_request, - util::{public_key, Health, Operator}, +use subfile_exchange::errors::Error; +use subfile_exchange::subfile::{ + ipfs::IpfsClient, subfile_reader::read_subfile, validate_subfile_entries, Subfile, }; + +use crate::config::{Config, ServerArgs}; +use crate::subfile_server::{admin::handle_admin_request, util::public_key}; // #![cfg(feature = "acceptor")] // use hyper_rustls::TlsAcceptor; use hyper::{Body, Request, Response, StatusCode}; -use self::range::{parse_range_header, serve_file, serve_file_range}; -use self::util::PackageVersion; - pub mod admin; pub mod range; +pub mod routes; pub mod util; // Define a struct for the server state @@ -33,7 +28,7 @@ pub struct ServerState { pub client: IpfsClient, pub operator_public_key: String, pub subfiles: HashMap, // Keyed by IPFS hash - pub release: PackageVersion, + pub release: util::PackageVersion, pub free_query_auth_token: Option, // Add bearer prefix pub admin_auth_token: Option, // Add bearer prefix pub price_per_byte: f32, @@ -41,13 +36,21 @@ pub struct ServerState { pub type ServerContext = Arc>; -pub async fn init_server(client: &IpfsClient, config: ServerArgs) { +pub async fn init_server(config: Config) { + let client = if let Ok(client) = IpfsClient::new(&config.ipfs_gateway) { + client + } else { + IpfsClient::localhost() + }; + + let config = config.server; + let port = config.port; let addr = format!("{}:{}", config.host, port) .parse() .expect("Invalid address"); - let state = initialize_subfile_server_context(client, config) + let state = initialize_subfile_server_context(&client, config) .await .expect("Failed to initiate subfile server"); @@ -108,7 +111,7 @@ async fn initialize_subfile_server_context( let mut server_state = ServerState { client: client.clone(), subfiles: HashMap::new(), - release: PackageVersion::from(build_info()), + release: util::PackageVersion::from(build_info()), free_query_auth_token, admin_auth_token, operator_public_key: public_key(&config.mnemonic) @@ -141,14 +144,16 @@ pub async fn handle_request( .status(StatusCode::OK) .body("Ready to roll!".into()) .unwrap()), - "/operator" => operator_info(&context).await, - "/status" => status(&context).await, - "/health" => health().await, - "/version" => version(&context).await, - "/cost" => cost(&context).await, + "/operator" => routes::operator_info(&context).await, + "/status" => routes::status(&context).await, + "/health" => routes::health().await, + "/version" => routes::version(&context).await, + "/cost" => routes::cost(&context).await, "/admin" => handle_admin_request(req, &context).await, //TODO: consider routing through file level IPFS - path if path.starts_with("/subfiles/id/") => file_service(path, &req, &context).await, + path if path.starts_with("/subfiles/id/") => { + routes::file_service(path, &req, &context).await + } _ => Ok(Response::builder() .status(StatusCode::NOT_FOUND) .body("Route not found".into()) @@ -156,150 +161,6 @@ pub async fn handle_request( } } -/// Endpoint for server health -pub async fn health() -> Result, Error> { - let health = Health { healthy: true }; - let health_json = serde_json::to_string(&health).map_err(Error::JsonError)?; - Response::builder() - .status(StatusCode::OK) - .body(Body::from(health_json)) - .map_err(|e| Error::ServerError(ServerError::BuildResponseError(e.to_string()))) -} - -/// Endpoint for package version -pub async fn version(context: &ServerContext) -> Result, Error> { - let version = context.lock().await.release.version.clone(); - Response::builder() - .status(StatusCode::OK) - .body(Body::from(version)) - .map_err(|e| Error::ServerError(ServerError::BuildResponseError(e.to_string()))) -} - -/// Endpoint for cost to download per byte -pub async fn cost(context: &ServerContext) -> Result, Error> { - let price = context.lock().await.price_per_byte.to_string(); - Response::builder() - .status(StatusCode::OK) - .body(Body::from(price)) - .map_err(|e| Error::ServerError(ServerError::BuildResponseError(e.to_string()))) -} - -/// Endpoint for status availability -pub async fn status(context: &ServerContext) -> Result, Error> { - let subfile_mapping = context.lock().await.subfiles.clone(); - let subfile_ipfses: Vec = subfile_mapping - .keys() - .map(|i| i.to_owned()) - .collect::>(); - let json = serde_json::to_string(&subfile_ipfses).map_err(Error::JsonError)?; - - tracing::debug!(json, "Serving status"); - Response::builder() - .status(StatusCode::OK) - .body(Body::from(json)) - .map_err(|e| Error::ServerError(ServerError::BuildResponseError(e.to_string()))) -} - -// Define a handler function for the `/info` route -pub async fn operator_info(context: &ServerContext) -> Result, Error> { - let public_key = context.lock().await.operator_public_key.clone(); - let operator = Operator { public_key }; - let json = serde_json::to_string(&operator).map_err(Error::JsonError)?; - tracing::debug!(json, "Operator info response"); - Response::builder() - .status(StatusCode::OK) - .body(Body::from(json)) - .map_err(|e| Error::ServerError(ServerError::BuildResponseError(e.to_string()))) -} - -// Serve file requests -pub async fn file_service( - path: &str, - req: &Request, - context: &ServerContext, -) -> Result, Error> { - tracing::debug!("Received file range request"); - let id = path.trim_start_matches("/subfiles/id/"); - - let context_ref = context.lock().await; - tracing::debug!( - subfiles = tracing::field::debug(&context_ref), - id, - "Received file range request" - ); - - // Validate the auth token - let auth_token = req - .headers() - .get(http::header::AUTHORIZATION) - .and_then(|t| t.to_str().ok()); - - let free = context_ref.free_query_auth_token.is_none() - || (auth_token.is_some() - && context_ref.free_query_auth_token.is_some() - && auth_token.unwrap() == context_ref.free_query_auth_token.as_deref().unwrap()); - - if !free { - tracing::warn!("Respond with unauthorized query"); - return Ok(Response::builder() - .status(StatusCode::UNAUTHORIZED) - .body("Paid service is not implemented, need free query authentication".into()) - .unwrap()); - } - - let requested_subfile = match context_ref.subfiles.get(id) { - Some(s) => s.clone(), - None => { - tracing::debug!( - server_context = tracing::field::debug(&context_ref), - id, - "Requested subfile is not served locally" - ); - return Ok(Response::builder() - .status(StatusCode::NOT_FOUND) - .body("Subfile not found".into()) - .unwrap()); - } - }; - - match req.headers().get("file_hash") { - Some(hash) if hash.to_str().is_ok() => { - let mut file_path = requested_subfile.local_path.clone(); - let chunk_file = match requested_subfile - .chunk_files - .iter() - .find(|file| file.meta_info.hash == hash.to_str().unwrap()) - { - Some(c) => c, - None => { - return Ok(Response::builder() - .status(StatusCode::NOT_FOUND) - .body("Chunk file not found".into()) - .unwrap()) - } - }; - file_path.push(chunk_file.meta_info.name.clone()); - // Parse the range header to get the start and end bytes - match req.headers().get(CONTENT_RANGE) { - Some(r) => { - tracing::debug!("Parse content range header"); - let range = parse_range_header(r)?; - //TODO: validate receipt - serve_file_range(&file_path, range).await - } - None => { - tracing::info!("Serve file"); - serve_file(&file_path).await - } - } - } - _ => Ok(Response::builder() - .status(StatusCode::NOT_ACCEPTABLE) - .body("Missing required chunk_file_hash header".into()) - .unwrap()), - } -} - /// Create an error response pub fn create_error_response(msg: &str, status_code: StatusCode) -> Response { let body = json!({ "error": msg }).to_string(); diff --git a/subfile-service/src/subfile_server/routes.rs b/subfile-service/src/subfile_server/routes.rs new file mode 100644 index 0000000..4d6f419 --- /dev/null +++ b/subfile-service/src/subfile_server/routes.rs @@ -0,0 +1,158 @@ +// #![cfg(feature = "acceptor")] +use http::header::CONTENT_RANGE; + +use subfile_exchange::errors::{Error, ServerError}; + +use crate::subfile_server::util::{Health, Operator}; +// #![cfg(feature = "acceptor")] +// use hyper_rustls::TlsAcceptor; +use hyper::{Body, Request, Response, StatusCode}; + +use super::{ + range::{parse_range_header, serve_file, serve_file_range}, + ServerContext, +}; + +/// Endpoint for server health +pub async fn health() -> Result, Error> { + let health = Health { healthy: true }; + let health_json = serde_json::to_string(&health).map_err(Error::JsonError)?; + Response::builder() + .status(StatusCode::OK) + .body(Body::from(health_json)) + .map_err(|e| Error::ServerError(ServerError::BuildResponseError(e.to_string()))) +} + +/// Endpoint for package version +pub async fn version(context: &ServerContext) -> Result, Error> { + let version = context.lock().await.release.version.clone(); + Response::builder() + .status(StatusCode::OK) + .body(Body::from(version)) + .map_err(|e| Error::ServerError(ServerError::BuildResponseError(e.to_string()))) +} + +/// Endpoint for cost to download per byte +pub async fn cost(context: &ServerContext) -> Result, Error> { + let price = context.lock().await.price_per_byte.to_string(); + Response::builder() + .status(StatusCode::OK) + .body(Body::from(price)) + .map_err(|e| Error::ServerError(ServerError::BuildResponseError(e.to_string()))) +} + +/// Endpoint for status availability +pub async fn status(context: &ServerContext) -> Result, Error> { + let subfile_mapping = context.lock().await.subfiles.clone(); + let subfile_ipfses: Vec = subfile_mapping + .keys() + .map(|i| i.to_owned()) + .collect::>(); + let json = serde_json::to_string(&subfile_ipfses).map_err(Error::JsonError)?; + + tracing::debug!(json, "Serving status"); + Response::builder() + .status(StatusCode::OK) + .body(Body::from(json)) + .map_err(|e| Error::ServerError(ServerError::BuildResponseError(e.to_string()))) +} + +// Define a handler function for the `/info` route +pub async fn operator_info(context: &ServerContext) -> Result, Error> { + let public_key = context.lock().await.operator_public_key.clone(); + let operator = Operator { public_key }; + let json = serde_json::to_string(&operator).map_err(Error::JsonError)?; + tracing::debug!(json, "Operator info response"); + Response::builder() + .status(StatusCode::OK) + .body(Body::from(json)) + .map_err(|e| Error::ServerError(ServerError::BuildResponseError(e.to_string()))) +} + +// Serve file requests +pub async fn file_service( + path: &str, + req: &Request, + context: &ServerContext, +) -> Result, Error> { + tracing::debug!("Received file range request"); + let id = path.trim_start_matches("/subfiles/id/"); + + let context_ref = context.lock().await; + tracing::debug!( + subfiles = tracing::field::debug(&context_ref), + id, + "Received file range request" + ); + + // Validate the auth token + let auth_token = req + .headers() + .get(http::header::AUTHORIZATION) + .and_then(|t| t.to_str().ok()); + + let free = context_ref.free_query_auth_token.is_none() + || (auth_token.is_some() + && context_ref.free_query_auth_token.is_some() + && auth_token.unwrap() == context_ref.free_query_auth_token.as_deref().unwrap()); + + if !free { + tracing::warn!("Respond with unauthorized query"); + return Ok(Response::builder() + .status(StatusCode::UNAUTHORIZED) + .body("Paid service is not implemented, need free query authentication".into()) + .unwrap()); + } + + let requested_subfile = match context_ref.subfiles.get(id) { + Some(s) => s.clone(), + None => { + tracing::debug!( + server_context = tracing::field::debug(&context_ref), + id, + "Requested subfile is not served locally" + ); + return Ok(Response::builder() + .status(StatusCode::NOT_FOUND) + .body("Subfile not found".into()) + .unwrap()); + } + }; + + match req.headers().get("file_hash") { + Some(hash) if hash.to_str().is_ok() => { + let mut file_path = requested_subfile.local_path.clone(); + let chunk_file = match requested_subfile + .chunk_files + .iter() + .find(|file| file.meta_info.hash == hash.to_str().unwrap()) + { + Some(c) => c, + None => { + return Ok(Response::builder() + .status(StatusCode::NOT_FOUND) + .body("Chunk file not found".into()) + .unwrap()) + } + }; + file_path.push(chunk_file.meta_info.name.clone()); + // Parse the range header to get the start and end bytes + match req.headers().get(CONTENT_RANGE) { + Some(r) => { + tracing::debug!("Parse content range header"); + let range = parse_range_header(r)?; + //TODO: validate receipt + serve_file_range(&file_path, range).await + } + None => { + tracing::info!("Serve file"); + serve_file(&file_path).await + } + } + } + _ => Ok(Response::builder() + .status(StatusCode::NOT_ACCEPTABLE) + .body("Missing required chunk_file_hash header".into()) + .unwrap()), + } +}