Skip to content

Commit

Permalink
backup_test
Browse files Browse the repository at this point in the history
  • Loading branch information
rustchain64 committed Oct 12, 2023
1 parent c8c139e commit 64fd3da
Show file tree
Hide file tree
Showing 9 changed files with 243 additions and 68 deletions.
69 changes: 69 additions & 0 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "lldb",
"request": "launch",
"name": "Debug unit tests in library 'carbonado-node'",
"cargo": {
"args": ["test", "--no-run", "--lib", "--package=carbonado-node"],
"filter": {
"name": "carbonado-node",
"kind": "lib"
}
},
"args": [],
"cwd": "${workspaceFolder}"
},
{
"type": "lldb",
"request": "launch",
"name": "Debug executable 'carbonadod'",
"cargo": {
"args": ["build", "--bin=carbonadod", "--package=carbonado-node"],
"filter": {
"name": "carbonadod",
"kind": "bin"
}
},
"args": [],
"cwd": "${workspaceFolder}"
},
{
"type": "lldb",
"request": "launch",
"name": "Debug unit tests in executable 'carbonadod'",
"cargo": {
"args": [
"test",
"--no-run",
"--bin=carbonadod",
"--package=carbonado-node"
],
"filter": {
"name": "carbonadod",
"kind": "bin"
}
},
"args": [],
"cwd": "${workspaceFolder}"
},
{
"type": "lldb",
"request": "launch",
"name": "Debug integration test 'file'",
"cargo": {
"args": ["test", "--no-run", "--test=file", "--package=carbonado-node"],
"filter": {
"name": "file",
"kind": "test"
}
},
"args": [],
"cwd": "${workspaceFolder}"
}
]
}
32 changes: 2 additions & 30 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

7 changes: 6 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@ name = "carbonado-node"
version = "0.1.0"
edition = "2021"

[profile.release]
debug = true

[[bin]]
name = "carbonadod"

Expand Down Expand Up @@ -30,7 +33,8 @@ hex = "0.4.3"
# human_bytes = "0.4.1"
infer = { version = "0.15.0", default-features = false }
log = "0.4.17"
magic = "0.13.0"
#magic = "0.13.0"
nom = "7.1.3"
once_cell = "1.17.1"
par-stream = { version = "0.10.2", features = ["runtime-tokio"] }
percent-encoding = { version = "2.2.0", git = "https://github.com/cryptoquick/rust-url.git", branch = "addl-percent-encode-sets" }
Expand All @@ -45,6 +49,7 @@ secp256k1 = { version = "0.27.0", features = [
serde = { version = "1.0.152", features = ["derive"] }
serde_cbor = "0.11"
syslog = "6.0.1"
thiserror = "1.0"
tokio = { version = "1.26.0", features = ["full"] }
toml = "0.7.2"
tower-http = { version = "0.4.0", features = ["cors"] }
Expand Down
88 changes: 75 additions & 13 deletions src/backend/fs.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use anyhow::{anyhow, Error, Result};
use axum::body::Bytes;
use bytes::BytesMut;
use carbonado::{constants::Format, file::Header, structs::Encoded};
use carbonado::{constants::Format, error::CarbonadoError, file::Header, structs::Encoded};
use chrono::{NaiveDateTime, TimeZone, Utc};
use futures_util::{stream, Stream, StreamExt, TryStreamExt};
use infer::{self};
Expand All @@ -13,6 +13,7 @@ use rayon::{
};
use secp256k1::{PublicKey, SecretKey};
use std::{
fmt::DebugTuple,

Check failure on line 16 in src/backend/fs.rs

View workflow job for this annotation

GitHub Actions / lint

error: unused import: `fmt::DebugTuple` --> src/backend/fs.rs:16:5 | 16 | fmt::DebugTuple, | ^^^^^^^^^^^^^^^ | = note: `-D unused-imports` implied by `-D warnings`
fs::{self, OpenOptions},
io::{self, Read, Seek, Write},
path::{Path, PathBuf},
Expand All @@ -27,6 +28,20 @@ use crate::{
prelude::*,
};

use bao::Hash;
// mod structs {
// pub struct Hash {
// // ... your fields here, probably:
// bytes: [u8; 32],
// }

// impl From<[u8; 32]> for Hash {
// fn from(bytes: [u8; 32]) -> Self {
// Hash { bytes }
// }
// }
// }

pub type FileStream = Pin<Box<dyn Stream<Item = Result<Bytes>> + Send>>;

pub async fn write_file<'a>(
Expand Down Expand Up @@ -115,10 +130,17 @@ pub fn write_segment(sk: &[u8], pk: &[u8], encoded: &Encoded) -> Result<BaoHash>
metadata,
)?;
let header_bytes = header.try_to_vec()?;
// Helper function for naming a Carbonado file/Header.filename() archive file.
// file name is generated here
// but it is not stored as metadata
let file_name = header.file_name();

let path = file_path(chunk_index, &hex::encode(pk), SEGMENT_DIR, &file_name)?;

// path tells it where to write segment but doesn't preserve it
// if we write it to sgement header, it won't be readable at the corrrect time.
// we need it to be wirtten to file?

trace!("Write segment at {path:?}");
let mut file = OpenOptions::new()
.create_new(true)
Expand Down Expand Up @@ -180,6 +202,10 @@ pub async fn write_catalog(
let cbor_data = cbor_data.clone();
let metadata = Some(cbor_data.clone());

// convert to <Option[u8; 8]>
let my_array_option: Option<[u8; 8]> = metadata.and_then(vec_to_array);
let metadata = my_array_option;

move || {
let header = CatHeader { cbor_len, metadata };
let header_bytes = header.try_to_vec()?;
Expand Down Expand Up @@ -216,17 +242,18 @@ pub async fn write_catalog(
}

pub fn read_file(pk: &Secp256k1PubKey, lookup: &Lookup) -> Result<FileStream> {
debug!("Read file wiht lookup: {lookup}");
debug!(">>>>>>>>>>>>>>>>>> Read file witn lookup: {lookup} <<<<<<<<<<<<<<<<<<<<<<<<<<,");

trace!("Create a shared secret using ECDH");
trace!(">>>>>>>>>>> SEGMENTS Create a shared secret using ECDH");
let ss = node_shared_secret(&pk.into_inner())?.secret_bytes();
let write_pk = PublicKey::from_secret_key_global(&SecretKey::from_slice(&ss)?);
let write_pk_str = write_pk.to_string();

trace!("Read catalog file bytes, parse out each hash, plus the segment Carbonado format");
trace!(">>>>>>>>>>> Read catalog file bytes, parse out each hash, plus the segment Carbonado format");
let catalog_file = read_catalog(pk, lookup)?;

trace!("For each hash, read each chunk into a segment, then decode that segment");
trace!(">>>>>>>>>>> For each hash, read each chunk into a segment, then decode that segment");

let file_bytes: FileStream = stream::iter(catalog_file)
.par_then(None, move |segment_hash| {
let write_pk_str = write_pk_str.clone();
Expand All @@ -239,7 +266,20 @@ pub fn read_file(pk: &Secp256k1PubKey, lookup: &Lookup) -> Result<FileStream> {
&format!("{}.c{}", segment_hash, NODE_FORMAT),
)?;

// segment hash is wrong, causing to look for a filename that is not there.
debug!("243 > {}", segment_hash);
debug!("244 >");
debug!("245 >");

println!("Message here {}", write_pk_str);

let mut chunk_file = OpenOptions::new().read(true).open(chunk_path)?;

debug!(
">>>> >>>>>>>>>>> ############ try to read in from path :: chunk_file >>>>>"
);

// HEADER VERSUS CAT HEADER
let header = Header::try_from(&chunk_file)?;

let segment: Vec<u8> = if SYS_CFG.drive_redundancy > 1 {
Expand All @@ -250,18 +290,15 @@ pub fn read_file(pk: &Secp256k1PubKey, lookup: &Lookup) -> Result<FileStream> {
let write_pk_str = write_pk_str.clone();
let segment_hash = segment_hash.clone();
move || {
trace!("Get catalogs directory path");
debug!(" >>>>>>> ===== Get catalogs directory path");

let path = file_path(
volume_index,
&write_pk_str,
SEGMENT_DIR,
&segment_hash,
)?;

// let cookie = Cookie::open(magic::flags::MIME_TYPE)?;
// let mime_type = cookie.file(path)?;
// println!("====>>>>>> LIKE MAGIC :: MIME Type: {}", mime_type);

trace!("Read segment file at {path:?}");
let mut file = OpenOptions::new().read(true).open(path)?;

Expand Down Expand Up @@ -311,14 +348,19 @@ pub fn read_file(pk: &Secp256k1PubKey, lookup: &Lookup) -> Result<FileStream> {
}

pub fn read_catalog(pk: &Secp256k1PubKey, lookup: &Lookup) -> Result<Vec<BaoHash>> {
trace!("Create a shared secret using ECDH");
trace!("CATALOG Create a shared secret using ECDH");
let ss = node_shared_secret(&pk.into_inner())?.secret_bytes();
let write_pk = PublicKey::from_secret_key_global(&SecretKey::from_slice(&ss)?);
let write_pk_str = write_pk.to_string();

let path = file_path(0, &write_pk_str, CATALOG_DIR, &lookup.to_string())?;
let path = file_path(
0,
&write_pk_str,
CATALOG_DIR,
&format!("{}.cat", &lookup.to_string()),
)?;

trace!("Read catalog at {path:?}");
trace!("################# >>>> Read catalog at {path:?}");
let mut file = OpenOptions::new().read(true).open(path)?;

let mut bytes = vec![];
Expand Down Expand Up @@ -411,3 +453,23 @@ fn remove_dir_catalogs(path: PathBuf, file: PathBuf) -> io::Result<()> {
// }
// Ok(())
// }

fn vec_to_array(vec: Vec<u8>) -> Option<[u8; 8]> {
if vec.len() == 8 {
let mut array = [0u8; 8];
array.copy_from_slice(&vec);
Some(array)
} else {
None
}
}

/// Decodes a Bao hash from a hexadecimal string.
pub fn decode_bao_hash(hash: &[u8]) -> Result<Hash, CarbonadoError> {
if hash.len() != bao::HASH_SIZE {
Err(CarbonadoError::HashDecodeError(bao::HASH_SIZE, hash.len()))
} else {
let hash_array: [u8; bao::HASH_SIZE] = hash[..].try_into()?;
Ok(hash_array.into())
}
}
4 changes: 2 additions & 2 deletions src/constants.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,5 @@ pub const SEGMENT_SIZE: usize = 1024 * 1024;
pub const SEGMENT_DIR: &str = "segments";
pub const CATALOG_DIR: &str = "catalogs";

/// "Magic number" used by the Carbonado file format. 12 bytes: "CARBONADO", and a version, 00, plus a newline character
pub const MAGICNO: &[u8; 16] = b"CARBONADONODE00\n";
/// "Magic number" used by the Carbonado-node file format. 12 bytes: "CAT_MAGICNO", and a version, 00, plus a newline character
pub const CAT_MAGICNO: [u8; 16] = *b"CARBONADONODE00\n";
8 changes: 8 additions & 0 deletions src/errors.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
use thiserror::Error;

#[derive(Error, Debug)]
pub enum CarbonadoNodeError {
/// Invalid magic number
#[error("File header lacks Carbonado magic number and may not be a proper Carbonado file. Magic number found was {0}.")]
CatInvalidMagicNumber(String),
}
Loading

0 comments on commit 64fd3da

Please sign in to comment.