Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

POST /hashed #59

Merged
merged 3 commits into from
Mar 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 1 addition & 4 deletions .github/workflows/rust.yml
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
name: Rust

on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]
- push

env:
CARGO_TERM_COLOR: always
Expand Down
21 changes: 11 additions & 10 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,13 @@ axum-macros = "0.4.0"
bao = "0.12.1"
blake3 = "1.5.0"
bytes = "1.5.0"
carbonado = "0.4.0"
carbonado = "0.4.1"
chrono = "0.4.31"
clap = { version = "4.4.11", features = ["derive"] }
directories = "5.0.1"
file-format = { version = "0.24.0", features = ["reader-xml"] }
flexi_logger = { version = "0.27.3", features = ["colors"] }
flexi_syslog = { version = "0.5.2", git = "https://github.com/cryptoquick/flexi_syslog.git" }
flexi_logger = { version = "0.27.4", features = ["colors"] }
flexi_syslog = "0.5.3"
futures-util = "0.3.29"
hex = "0.4.3"
http = "1.0.0"
Expand Down
34 changes: 18 additions & 16 deletions src/backend/fs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ pub type FileStream = Pin<Box<dyn Stream<Item = Result<Bytes>> + Send>>;
pub async fn write_file<'a>(
pk: &Secp256k1PubKey,
file_stream: FileStream,
name: Option<String>,
file_name: FileName,
mime_type_receiver: watch::Receiver<String>,
) -> Result<Blake3Hash> {
trace!("write_file, create a shared secret using ECDH");
Expand All @@ -50,9 +50,10 @@ pub async fn write_file<'a>(
ensure_pk_dirs_exist(&write_pk_str).await?;

trace!("Initialize Blake3 keyed hasher");
let file_hasher = Arc::new(Mutex::new(blake3::Hasher::new_keyed(
&x_only_pk.serialize(),
)));
let file_hasher = Arc::new(Mutex::new(match file_name {
FileName::PubKeyed => blake3::Hasher::new_keyed(&x_only_pk.serialize()),
_ => blake3::Hasher::new(),
}));

trace!("Iterate through file body stream");
let thread_file_hasher = file_hasher.clone();
Expand Down Expand Up @@ -112,11 +113,17 @@ pub async fn write_file<'a>(
let final_mime_type_str = current_mime_type.lock().await.clone();
debug!(">>>>>>>>>> Mime_Type has Changed {final_mime_type_str:?}");

let write_pk_str = write_pk_str.to_owned();
let file_name = match file_name {
FileName::Named(name) => name,
FileName::PubKeyed => file_hash.to_string(),
FileName::Hashed => file_hash.to_string(),
};

trace!("Append each hash to its catalog");
write_catalog(
&write_pk_str,
&file_hash,
name,
write_pk_str,
file_name,
&final_mime_type_str,
&segment_hashes,
)
Expand Down Expand Up @@ -174,9 +181,8 @@ pub fn write_segment(sk: &[u8], pk: &[u8], encoded: &Encoded) -> Result<BaoHash>
}

pub async fn write_catalog(
write_pk_str: &str,
file_hash: &Blake3Hash,
name: Option<String>,
write_pk_str: String,
file_name: String,
mime_type: &str,
segment_hashes: &[BaoHash],
) -> Result<()> {
Expand All @@ -186,10 +192,6 @@ pub async fn write_catalog(
.flat_map(|bao_hash| bao_hash.to_bytes())
.collect();

let write_pk_str = write_pk_str.to_owned();
let file_hash = file_hash.to_string();
let name = name.unwrap_or(file_hash);

let date_utc = Utc.from_utc_datetime(&NaiveDateTime::from_timestamp_opt(61, 0).unwrap());
let date = date_utc.to_string();
let mime_type = mime_type.to_string();
Expand All @@ -198,7 +200,7 @@ pub async fn write_catalog(

// HEADER METADATA
let cat_data = CborData {
name: name.to_string(),
name: file_name.to_string(),
date,
mime_type,
};
Expand All @@ -212,7 +214,7 @@ pub async fn write_catalog(
let write_pk_str = write_pk_str.clone();

let contents = contents.clone();
let name = name.clone();
let name = file_name.clone();
let cbor_len = length as u8;
let cbor_data = cbor_data.clone();
let metadata = Some(cbor_data.clone());
Expand Down
45 changes: 29 additions & 16 deletions src/frontend/http.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,16 +25,13 @@ use crate::{
prelude::*,
};

async fn write_file_handler(pk: &str, body: Body, name: Option<String>) -> Result<String> {
async fn write_file_handler(pk: &str, body: Body, file_name: FileName) -> Result<String> {
let pk = &Secp256k1PubKey::try_from(pk)?;

let name_clone = name.clone().unwrap_or("Not Named".to_string());
let extension = name_clone.split('.').last().unwrap_or_default();
if extension.is_empty() {
debug!(">>>>>> NO EXTENSION FROM NAME {}", extension);
} else {
debug!(">>>>>> EXTENSION FROM NAME {}", extension);
}
// let extension = match file_name {
// FileName::Named(name) => Some(name.split('.').last().unwrap_or_default().to_owned()),
// _ => None,
// };

// Create a watch channel for MIME type updates
let (mime_type_sender, mime_type_receiver) = watch::channel("init_mime_type".to_string());
Expand Down Expand Up @@ -87,16 +84,31 @@ async fn write_file_handler(pk: &str, body: Body, name: Option<String>) -> Resul
.boxed();

// Call write_file with the receiver part of the channel
let Blake3Hash(hash) = write_file(pk, file_stream, name, mime_type_receiver).await?;
let Blake3Hash(hash) = write_file(pk, file_stream, file_name, mime_type_receiver).await?;

Ok(hash.to_hex().to_string())
}

#[axum_macros::debug_handler]
async fn post_file(Path(pk): Path<String>, body: Body) -> Result<impl IntoResponse, AppError> {
async fn post_file_hashed(
Path(pk): Path<String>,
body: Body,
) -> Result<impl IntoResponse, AppError> {
debug!("post_file called with {pk}");

let hash = write_file_handler(&pk, body, FileName::Hashed).await?;

Ok((StatusCode::OK, hash))
}

#[axum_macros::debug_handler]
async fn post_file_pubkeyed(
Path(pk): Path<String>,
body: Body,
) -> Result<impl IntoResponse, AppError> {
debug!("post_file called with {pk}");

let hash = write_file_handler(&pk, body, None).await?;
let hash = write_file_handler(&pk, body, FileName::PubKeyed).await?;

Ok((StatusCode::OK, hash))
}
Expand All @@ -115,7 +127,7 @@ async fn post_file_named(
return Err(AppError(StatusCode::BAD_REQUEST, anyhow!("Provided file name contains characters that have not been encoded. It should be: {reencoded}")));
}

let hash = write_file_handler(&pk, body, Some(name)).await?;
let hash = write_file_handler(&pk, body, FileName::Named(name)).await?;

Ok((StatusCode::OK, hash))
}
Expand Down Expand Up @@ -174,10 +186,11 @@ async fn key(Path(pk): Path<String>) -> Result<impl IntoResponse, AppError> {
pub async fn start() -> Result<()> {
let app = Router::new()
.route("/remove/:pk/:blake3_hash", delete(remove_file))
.route("/store/:pk", post(post_file))
.route("/store_named/:pk/:name", post(post_file_named))
.route("/retrieve/:pk/:blake3_hash", get(get_file))
.route("/retrieve_named/:pk/:name", get(get_file_named))
.route("/hashed/:pk", post(post_file_hashed))
.route("/pubkeyed/:pk", post(post_file_pubkeyed))
.route("/named/:pk/:name", post(post_file_named))
.route("/hashed/:pk/:blake3_hash", get(get_file))
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Here it seems "get(get_file))" retrieves both "hashed" and "pubkeyed"

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Correct, they're both hashes

.route("/named/:pk/:name", get(get_file_named))
.route("/key/:pk", get(key))
// .route("/catalog/:blake3_hash", get(get_catalog))
// .route("/raw/:bao_hash", get(get_raw))
Expand Down
7 changes: 7 additions & 0 deletions src/structs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,3 +96,10 @@ pub struct CborData {
pub mime_type: String,
// pub metadata: Option<Vec<u8>>,
}

pub enum FileName {
Named(String),
PubKeyed,
Hashed,
// None,
}
8 changes: 4 additions & 4 deletions tests/file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use carbonado_node::{
backend::fs::{read_file, write_file, FileStream},
config::node_shared_secret,
prelude::SEGMENT_SIZE,
structs::{Hash, Lookup},
structs::{FileName, Hash, Lookup},
};

use futures_util::{stream, StreamExt, TryStreamExt};
Expand Down Expand Up @@ -58,7 +58,7 @@ async fn write_read() -> Result<()> {
let blake3_hash = write_file(
&Secp256k1PubKey::new(pk),
file_stream,
None,
FileName::PubKeyed,
mime_type_receiver,
)
.await?;
Expand Down Expand Up @@ -125,7 +125,7 @@ async fn read_write_delete_file() -> Result<()> {
let file_did_write = write_file(
&Secp256k1PubKey::new(pk),
file_stream,
None,
FileName::PubKeyed,
mime_type_receiver.clone(),
)
.await
Expand All @@ -150,7 +150,7 @@ async fn read_write_delete_file() -> Result<()> {
let blake3_hash = write_file(
&Secp256k1PubKey::new(pk),
file_stream,
None,
FileName::PubKeyed,
mime_type_receiver,
)
.await
Expand Down
Loading