From 49f80e603d7dc2ef2a989e90064a0f9c90e4462e Mon Sep 17 00:00:00 2001 From: Ruediger Klaehn Date: Mon, 18 Nov 2024 11:13:33 +0200 Subject: [PATCH 1/8] Use blobs and tags cli from iroh-blobs crate --- Cargo.lock | 6 +- Cargo.toml | 2 +- iroh-cli/Cargo.toml | 1 + iroh-cli/src/commands.rs | 15 +- iroh-cli/src/commands/blobs.rs | 1221 -------------------------------- iroh-cli/src/commands/rpc.rs | 5 +- iroh-cli/src/commands/tags.rs | 45 -- 7 files changed, 17 insertions(+), 1278 deletions(-) delete mode 100644 iroh-cli/src/commands/blobs.rs delete mode 100644 iroh-cli/src/commands/tags.rs diff --git a/Cargo.lock b/Cargo.lock index f7cf0c2999..c75d1d9e9f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2705,13 +2705,15 @@ dependencies = [ [[package]] name = "iroh-blobs" version = "0.28.1" -source = "git+https://github.com/n0-computer/iroh-blobs?branch=main#290eb42cfd2d13202208264c2674ce423829f571" +source = "git+https://github.com/n0-computer/iroh-blobs?branch=cli#daad706ee0025e095e76c019c3f8d96d899a5bb3" dependencies = [ "anyhow", "async-channel", "bao-tree", "bytes", "chrono", + "clap", + "console", "derive_more", "futures-buffered", "futures-lite 2.4.0", @@ -2719,6 +2721,7 @@ dependencies = [ "genawaiter", "hashlink", "hex", + "indicatif", "iroh-base", "iroh-io", "iroh-metrics", @@ -2777,6 +2780,7 @@ dependencies = [ "human-time", "indicatif", "iroh", + "iroh-blobs", "iroh-docs", "iroh-gossip", "iroh-metrics", diff --git a/Cargo.toml b/Cargo.toml index 2f995ba8f6..6ca23e89a0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -57,4 +57,4 @@ iroh-router = { path = "./iroh-router" } iroh-gossip = { git = "https://github.com/n0-computer/iroh-gossip", branch = "main" } iroh-docs = { git = "https://github.com/n0-computer/iroh-docs", branch = "main" } -iroh-blobs = { git = "https://github.com/n0-computer/iroh-blobs", branch = "main" } +iroh-blobs = { git = "https://github.com/n0-computer/iroh-blobs", branch = "cli" } diff --git a/iroh-cli/Cargo.toml b/iroh-cli/Cargo.toml index d36cbe0111..de885624cb 100644 --- a/iroh-cli/Cargo.toml +++ b/iroh-cli/Cargo.toml @@ -44,6 +44,7 @@ iroh = { version = "0.28.1", path = "../iroh", features = ["metrics"] } iroh-gossip = "0.28.1" iroh-docs = { version = "0.28.0", features = ["rpc"]} iroh-metrics = { version = "0.28.0" } +iroh-blobs = { version = "0.28.1", features = ["cli"] } parking_lot = "0.12.1" pkarr = { version = "2.2.0", default-features = false } portable-atomic = "1" diff --git a/iroh-cli/src/commands.rs b/iroh-cli/src/commands.rs index 0b2812909f..628baaf967 100644 --- a/iroh-cli/src/commands.rs +++ b/iroh-cli/src/commands.rs @@ -6,16 +6,12 @@ use std::{ use anyhow::{ensure, Context, Result}; use clap::Parser; use iroh::client::Iroh; +use iroh_blobs::cli::{BlobAddOptions, BlobSource}; -use self::{ - blobs::{BlobAddOptions, BlobSource}, - rpc::RpcCommands, - start::RunType, -}; +use self::{rpc::RpcCommands, start::RunType}; use crate::config::{ConsoleEnv, NodeConfig}; pub(crate) mod authors; -pub(crate) mod blobs; pub(crate) mod console; pub(crate) mod docs; pub(crate) mod doctor; @@ -23,7 +19,7 @@ pub(crate) mod gossip; pub(crate) mod net; pub(crate) mod rpc; pub(crate) mod start; -pub(crate) mod tags; +pub use iroh_blobs::{cli as blobs, cli::tags}; /// iroh is a tool for building distributed apps. /// @@ -191,7 +187,10 @@ impl Cli { |client| async move { match add_command { None => Ok(()), - Some(command) => command.run(&client).await, + Some(command) => { + let node_addr = client.net().node_addr().await?; + command.run(&client.blobs(), node_addr).await + } } }, ) diff --git a/iroh-cli/src/commands/blobs.rs b/iroh-cli/src/commands/blobs.rs deleted file mode 100644 index a7f7b19f96..0000000000 --- a/iroh-cli/src/commands/blobs.rs +++ /dev/null @@ -1,1221 +0,0 @@ -//! Define blob-related commands. - -use std::{ - collections::{BTreeMap, HashMap}, - net::SocketAddr, - path::PathBuf, - time::Duration, -}; - -use anyhow::{anyhow, bail, ensure, Context, Result}; -use clap::Subcommand; -use console::{style, Emoji}; -use futures_lite::{Stream, StreamExt}; -use indicatif::{ - HumanBytes, HumanDuration, MultiProgress, ProgressBar, ProgressDrawTarget, ProgressState, - ProgressStyle, -}; -use iroh::{ - base::{node_addr::AddrInfoOptions, ticket::BlobTicket}, - blobs::{ - get::{db::DownloadProgress, progress::BlobProgress, Stats}, - net_protocol::DownloadMode, - provider::AddProgress, - store::{ - ConsistencyCheckProgress, ExportFormat, ExportMode, ReportLevel, ValidateProgress, - }, - util::SetTagOption, - BlobFormat, Hash, HashAndFormat, Tag, - }, - client::{ - blobs::{ - BlobInfo, BlobStatus, CollectionInfo, DownloadOptions, IncompleteBlobInfo, WrapOption, - }, - Iroh, - }, - net::{key::PublicKey, NodeAddr, RelayUrl}, -}; -use tokio::io::AsyncWriteExt; - -/// Subcommands for the blob command. -#[allow(clippy::large_enum_variant)] -#[derive(Subcommand, Debug, Clone)] -pub enum BlobCommands { - /// Add data from PATH to the running node. - Add { - /// Path to a file or folder. - /// - /// If set to `STDIN`, the data will be read from stdin. - source: BlobSource, - - #[clap(flatten)] - options: BlobAddOptions, - }, - /// Download data to the running node's database and provide it. - /// - /// In addition to downloading the data, you can also specify an optional output directory - /// where the data will be exported to after it has been downloaded. - Get { - /// Ticket or Hash to use. - #[clap(name = "TICKET OR HASH")] - ticket: TicketOrHash, - /// Additional socket address to use to contact the node. Can be used multiple times. - #[clap(long)] - address: Vec, - /// Override the relay URL to use to contact the node. - #[clap(long)] - relay_url: Option, - /// Override to treat the blob as a raw blob or a hash sequence. - #[clap(long)] - recursive: Option, - /// If set, the ticket's direct addresses will not be used. - #[clap(long)] - override_addresses: bool, - /// NodeId of the provider. - #[clap(long)] - node: Option, - /// Directory or file in which to save the file(s). - /// - /// If set to `STDOUT` the output will be redirected to stdout. - /// - /// If not specified, the data will only be stored internally. - #[clap(long, short)] - out: Option, - /// If set, the data will be moved to the output directory, and iroh will assume that it - /// will not change. - #[clap(long, default_value_t = false)] - stable: bool, - /// Tag to tag the data with. - #[clap(long)] - tag: Option, - /// If set, will queue the download in the download queue. - /// - /// Use this if you are doing many downloads in parallel and want to limit the number of - /// downloads running concurrently. - #[clap(long)] - queued: bool, - }, - /// Export a blob from the internal blob store to the local filesystem. - Export { - /// The hash to export. - hash: Hash, - /// Directory or file in which to save the file(s). - /// - /// If set to `STDOUT` the output will be redirected to stdout. - out: OutputTarget, - /// Set to true if the hash refers to a collection and you want to export all children of - /// the collection. - #[clap(long, default_value_t = false)] - recursive: bool, - /// If set, the data will be moved to the output directory, and iroh will assume that it - /// will not change. - #[clap(long, default_value_t = false)] - stable: bool, - }, - /// List available content on the node. - #[clap(subcommand)] - List(ListCommands), - /// Validate hashes on the running node. - Validate { - #[clap(short, long, action(clap::ArgAction::Count))] - verbose: u8, - /// Repair the store by removing invalid data - /// - /// Caution: this will remove data to make the store consistent, even - /// if the data might be salvageable. E.g. for an entry for which the - /// outboard data is missing, the entry will be removed, even if the - /// data is complete. - #[clap(long, default_value_t = false)] - repair: bool, - }, - /// Perform a database consistency check on the running node. - ConsistencyCheck { - #[clap(short, long, action(clap::ArgAction::Count))] - verbose: u8, - /// Repair the store by removing invalid data - /// - /// Caution: this will remove data to make the store consistent, even - /// if the data might be salvageable. E.g. for an entry for which the - /// outboard data is missing, the entry will be removed, even if the - /// data is complete. - #[clap(long, default_value_t = false)] - repair: bool, - }, - /// Delete content on the node. - #[clap(subcommand)] - Delete(DeleteCommands), - /// Get a ticket to share this blob. - Share { - /// Hash of the blob to share. - hash: Hash, - /// Options to configure the address information in the generated ticket. - /// - /// Use `relay-and-addresses` in networks with no internet connectivity. - #[clap(long, default_value_t = AddrInfoOptions::Id)] - addr_options: AddrInfoOptions, - /// If the blob is a collection, the requester will also fetch the listed blobs. - #[clap(long, default_value_t = false)] - recursive: bool, - /// Display the contents of this ticket too. - #[clap(long, hide = true)] - debug: bool, - }, -} - -/// Possible outcomes of an input. -#[derive(Debug, Clone, derive_more::Display)] -pub enum TicketOrHash { - Ticket(BlobTicket), - Hash(Hash), -} - -impl std::str::FromStr for TicketOrHash { - type Err = anyhow::Error; - - fn from_str(s: &str) -> std::result::Result { - if let Ok(ticket) = BlobTicket::from_str(s) { - return Ok(Self::Ticket(ticket)); - } - if let Ok(hash) = Hash::from_str(s) { - return Ok(Self::Hash(hash)); - } - Err(anyhow!("neither a valid ticket or hash")) - } -} - -impl BlobCommands { - /// Runs the blob command given the iroh client. - pub async fn run(self, iroh: &Iroh) -> Result<()> { - match self { - Self::Get { - ticket, - mut address, - relay_url, - recursive, - override_addresses, - node, - out, - stable, - tag, - queued, - } => { - let (node_addr, hash, format) = match ticket { - TicketOrHash::Ticket(ticket) => { - let (node_addr, hash, blob_format) = ticket.into_parts(); - - // create the node address with the appropriate overrides - let node_addr = { - let NodeAddr { node_id, info } = node_addr; - let addresses = if override_addresses { - // use only the cli supplied ones - address - } else { - // use both the cli supplied ones and the ticket ones - address.extend(info.direct_addresses); - address - }; - - // prefer direct arg over ticket - let relay_url = relay_url.or(info.relay_url); - - NodeAddr::from_parts(node_id, relay_url, addresses) - }; - - // check if the blob format has an override - let blob_format = match recursive { - Some(true) => BlobFormat::HashSeq, - Some(false) => BlobFormat::Raw, - None => blob_format, - }; - - (node_addr, hash, blob_format) - } - TicketOrHash::Hash(hash) => { - // check if the blob format has an override - let blob_format = match recursive { - Some(true) => BlobFormat::HashSeq, - Some(false) => BlobFormat::Raw, - None => BlobFormat::Raw, - }; - - let Some(node) = node else { - bail!("missing NodeId"); - }; - - let node_addr = NodeAddr::from_parts(node, relay_url, address); - (node_addr, hash, blob_format) - } - }; - - if format != BlobFormat::Raw && out == Some(OutputTarget::Stdout) { - return Err(anyhow::anyhow!("The input arguments refer to a collection of blobs and output is set to STDOUT. Only single blobs may be passed in this case.")); - } - - let tag = match tag { - Some(tag) => SetTagOption::Named(Tag::from(tag)), - None => SetTagOption::Auto, - }; - - let mode = match queued { - true => DownloadMode::Queued, - false => DownloadMode::Direct, - }; - - let mut stream = iroh - .blobs() - .download_with_opts( - hash, - DownloadOptions { - format, - nodes: vec![node_addr], - tag, - mode, - }, - ) - .await?; - - show_download_progress(hash, &mut stream).await?; - - match out { - None => {} - Some(OutputTarget::Stdout) => { - // we asserted above that `OutputTarget::Stdout` is only permitted if getting a - // single hash and not a hashseq. - let mut blob_read = iroh.blobs().read(hash).await?; - tokio::io::copy(&mut blob_read, &mut tokio::io::stdout()).await?; - } - Some(OutputTarget::Path(path)) => { - let absolute = std::env::current_dir()?.join(&path); - if matches!(format, BlobFormat::HashSeq) { - ensure!(!absolute.is_dir(), "output must not be a directory"); - } - let recursive = format == BlobFormat::HashSeq; - let mode = match stable { - true => ExportMode::TryReference, - false => ExportMode::Copy, - }; - let format = match recursive { - true => ExportFormat::Collection, - false => ExportFormat::Blob, - }; - tracing::info!("exporting to {} -> {}", path.display(), absolute.display()); - let stream = iroh.blobs().export(hash, absolute, format, mode).await?; - - // TODO: report export progress - stream.await?; - } - }; - - Ok(()) - } - Self::Export { - hash, - out, - recursive, - stable, - } => { - match out { - OutputTarget::Stdout => { - ensure!( - !recursive, - "Recursive option is not supported when exporting to STDOUT" - ); - let mut blob_read = iroh.blobs().read(hash).await?; - tokio::io::copy(&mut blob_read, &mut tokio::io::stdout()).await?; - } - OutputTarget::Path(path) => { - let absolute = std::env::current_dir()?.join(&path); - if !recursive { - ensure!(!absolute.is_dir(), "output must not be a directory"); - } - let mode = match stable { - true => ExportMode::TryReference, - false => ExportMode::Copy, - }; - let format = match recursive { - true => ExportFormat::Collection, - false => ExportFormat::Blob, - }; - tracing::info!( - "exporting {hash} to {} -> {}", - path.display(), - absolute.display() - ); - let stream = iroh.blobs().export(hash, absolute, format, mode).await?; - // TODO: report export progress - stream.await?; - } - }; - Ok(()) - } - Self::List(cmd) => cmd.run(iroh).await, - Self::Delete(cmd) => cmd.run(iroh).await, - Self::Validate { verbose, repair } => validate(iroh, verbose, repair).await, - Self::ConsistencyCheck { verbose, repair } => { - consistency_check(iroh, verbose, repair).await - } - Self::Add { - source: path, - options, - } => add_with_opts(iroh, path, options).await, - Self::Share { - hash, - addr_options, - recursive, - debug, - } => { - let format = if recursive { - BlobFormat::HashSeq - } else { - BlobFormat::Raw - }; - let status = iroh.blobs().status(hash).await?; - let mut addr: NodeAddr = iroh.net().node_addr().await?; - addr.apply_options(addr_options); - let ticket = BlobTicket::new(addr, hash, format)?; - - let (blob_status, size) = match (status, format) { - (BlobStatus::Complete { size }, BlobFormat::Raw) => ("blob", size), - (BlobStatus::Partial { size }, BlobFormat::Raw) => { - ("incomplete blob", size.value()) - } - (BlobStatus::Complete { size }, BlobFormat::HashSeq) => ("collection", size), - (BlobStatus::Partial { size }, BlobFormat::HashSeq) => { - ("incomplete collection", size.value()) - } - (BlobStatus::NotFound, _) => { - return Err(anyhow!("blob is missing")); - } - }; - println!( - "Ticket for {blob_status} {hash} ({})\n{ticket}", - HumanBytes(size) - ); - - if debug { - println!("{ticket:#?}") - } - Ok(()) - } - } - } -} - -/// Options for the `blob add` command. -#[derive(clap::Args, Debug, Clone)] -pub struct BlobAddOptions { - /// Add in place - /// - /// Set this to true only if you are sure that the data in its current location - /// will not change. - #[clap(long, default_value_t = false)] - pub in_place: bool, - - /// Tag to tag the data with. - #[clap(long)] - pub tag: Option, - - /// Wrap the added file or directory in a collection. - /// - /// When adding a single file, without `wrap` the file is added as a single blob and no - /// collection is created. When enabling `wrap` it also creates a collection with a - /// single entry, where the entry's name is the filename and the entry's content is blob. - /// - /// When adding a directory, a collection is always created. - /// Without `wrap`, the collection directly contains the entries from the added directory. - /// With `wrap`, the directory will be nested so that all names in the collection are - /// prefixed with the directory name, thus preserving the name of the directory. - /// - /// When adding content from STDIN and setting `wrap` you also need to set `filename` to name - /// the entry pointing to the content from STDIN. - #[clap(long, default_value_t = false)] - pub wrap: bool, - - /// Override the filename used for the entry in the created collection. - /// - /// Only supported `wrap` is set. - /// Required when adding content from STDIN and setting `wrap`. - #[clap(long, requires = "wrap")] - pub filename: Option, - - /// Do not print the all-in-one ticket to get the added data from this node. - #[clap(long)] - pub no_ticket: bool, -} - -/// Possible list subcommands. -#[derive(Subcommand, Debug, Clone)] -pub enum ListCommands { - /// List the available blobs on the running provider. - Blobs, - /// List the blobs on the running provider that are not full files. - IncompleteBlobs, - /// List the available collections on the running provider. - Collections, -} - -impl ListCommands { - /// Runs a list subcommand. - pub async fn run(self, iroh: &Iroh) -> Result<()> { - match self { - Self::Blobs => { - let mut response = iroh.blobs().list().await?; - while let Some(item) = response.next().await { - let BlobInfo { path, hash, size } = item?; - println!("{} {} ({})", path, hash, HumanBytes(size)); - } - } - Self::IncompleteBlobs => { - let mut response = iroh.blobs().list_incomplete().await?; - while let Some(item) = response.next().await { - let IncompleteBlobInfo { hash, size, .. } = item?; - println!("{} ({})", hash, HumanBytes(size)); - } - } - Self::Collections => { - let mut response = iroh.blobs().list_collections()?; - while let Some(item) = response.next().await { - let CollectionInfo { - tag, - hash, - total_blobs_count, - total_blobs_size, - } = item?; - let total_blobs_count = total_blobs_count.unwrap_or_default(); - let total_blobs_size = total_blobs_size.unwrap_or_default(); - println!( - "{}: {} {} {} ({})", - tag, - hash, - total_blobs_count, - if total_blobs_count > 1 { - "blobs" - } else { - "blob" - }, - HumanBytes(total_blobs_size), - ); - } - } - } - Ok(()) - } -} - -/// Possible delete subcommands. -#[derive(Subcommand, Debug, Clone)] -pub enum DeleteCommands { - /// Delete the given blobs - Blob { - /// Blobs to delete - #[arg(required = true)] - hash: Hash, - }, -} - -impl DeleteCommands { - /// Runs the delete command. - pub async fn run(self, iroh: &Iroh) -> Result<()> { - match self { - Self::Blob { hash } => { - let response = iroh.blobs().delete_blob(hash).await; - if let Err(e) = response { - eprintln!("Error: {}", e); - } - } - } - Ok(()) - } -} - -/// Returns the corresponding [`ReportLevel`] given the verbosity level. -fn get_report_level(verbose: u8) -> ReportLevel { - match verbose { - 0 => ReportLevel::Warn, - 1 => ReportLevel::Info, - _ => ReportLevel::Trace, - } -} - -/// Applies the report level to the given text. -fn apply_report_level(text: String, level: ReportLevel) -> console::StyledObject { - match level { - ReportLevel::Trace => style(text).dim(), - ReportLevel::Info => style(text), - ReportLevel::Warn => style(text).yellow(), - ReportLevel::Error => style(text).red(), - } -} - -/// Checks the consistency of the blobs on the running node, and repairs inconsistencies if instructed. -pub async fn consistency_check(iroh: &Iroh, verbose: u8, repair: bool) -> Result<()> { - let mut response = iroh.blobs().consistency_check(repair).await?; - let verbosity = get_report_level(verbose); - let print = |level: ReportLevel, entry: Option, message: String| { - if level < verbosity { - return; - } - let level_text = level.to_string().to_lowercase(); - let text = if let Some(hash) = entry { - format!("{}: {} ({})", level_text, message, hash.to_hex()) - } else { - format!("{}: {}", level_text, message) - }; - let styled = apply_report_level(text, level); - eprintln!("{}", styled); - }; - - while let Some(item) = response.next().await { - match item? { - ConsistencyCheckProgress::Start => { - eprintln!("Starting consistency check ..."); - } - ConsistencyCheckProgress::Update { - message, - entry, - level, - } => { - print(level, entry, message); - } - ConsistencyCheckProgress::Done { .. } => { - eprintln!("Consistency check done"); - } - ConsistencyCheckProgress::Abort(error) => { - eprintln!("Consistency check error {}", error); - break; - } - } - } - Ok(()) -} - -/// Checks the validity of the blobs on the running node, and repairs anything invalid if instructed. -pub async fn validate(iroh: &Iroh, verbose: u8, repair: bool) -> Result<()> { - let mut state = ValidateProgressState::new(); - let mut response = iroh.blobs().validate(repair).await?; - let verbosity = get_report_level(verbose); - let print = |level: ReportLevel, entry: Option, message: String| { - if level < verbosity { - return; - } - let level_text = level.to_string().to_lowercase(); - let text = if let Some(hash) = entry { - format!("{}: {} ({})", level_text, message, hash.to_hex()) - } else { - format!("{}: {}", level_text, message) - }; - let styled = apply_report_level(text, level); - eprintln!("{}", styled); - }; - - let mut partial = BTreeMap::new(); - - while let Some(item) = response.next().await { - match item? { - ValidateProgress::PartialEntry { - id, - hash, - path, - size, - } => { - partial.insert(id, hash); - print( - ReportLevel::Trace, - Some(hash), - format!( - "Validating partial entry {} {} {}", - id, - path.unwrap_or_default(), - size - ), - ); - } - ValidateProgress::PartialEntryProgress { id, offset } => { - let entry = partial.get(&id).cloned(); - print( - ReportLevel::Trace, - entry, - format!("Partial entry {} at {}", id, offset), - ); - } - ValidateProgress::PartialEntryDone { id, ranges } => { - let entry: Option = partial.remove(&id); - print( - ReportLevel::Info, - entry, - format!("Partial entry {} done {:?}", id, ranges.to_chunk_ranges()), - ); - } - ValidateProgress::Starting { total } => { - state.starting(total); - } - ValidateProgress::Entry { - id, - hash, - path, - size, - } => { - state.add_entry(id, hash, path, size); - } - ValidateProgress::EntryProgress { id, offset } => { - state.progress(id, offset); - } - ValidateProgress::EntryDone { id, error } => { - state.done(id, error); - } - ValidateProgress::Abort(error) => { - state.abort(error.to_string()); - break; - } - ValidateProgress::AllDone => { - break; - } - } - } - Ok(()) -} - -/// Collection of all the validation progress state. -struct ValidateProgressState { - mp: MultiProgress, - pbs: HashMap, - overall: ProgressBar, - total: u64, - errors: u64, - successes: u64, -} - -impl ValidateProgressState { - /// Creates a new validation progress state collection. - fn new() -> Self { - let mp = MultiProgress::new(); - let overall = mp.add(ProgressBar::new(0)); - overall.enable_steady_tick(Duration::from_millis(500)); - Self { - mp, - pbs: HashMap::new(), - overall, - total: 0, - errors: 0, - successes: 0, - } - } - - /// Sets the total number to the provided value and style the progress bar to starting. - fn starting(&mut self, total: u64) { - self.total = total; - self.errors = 0; - self.successes = 0; - self.overall.set_position(0); - self.overall.set_length(total); - self.overall.set_style( - ProgressStyle::default_bar() - .template("{spinner:.green} [{bar:60.cyan/blue}] {msg}") - .unwrap() - .progress_chars("=>-"), - ); - } - - /// Adds a message to the progress bar in the given `id`. - fn add_entry(&mut self, id: u64, hash: Hash, path: Option, size: u64) { - let pb = self.mp.insert_before(&self.overall, ProgressBar::new(size)); - pb.set_style(ProgressStyle::default_bar() - .template("{spinner:.green} [{bar:40.cyan/blue}] {msg} {bytes}/{total_bytes} ({bytes_per_sec}, eta {eta})").unwrap() - .progress_chars("=>-")); - let msg = if let Some(path) = path { - format!("{} {}", hash.to_hex(), path) - } else { - hash.to_hex().to_string() - }; - pb.set_message(msg); - pb.set_position(0); - pb.set_length(size); - pb.enable_steady_tick(Duration::from_millis(500)); - self.pbs.insert(id, pb); - } - - /// Progresses the progress bar with `id` by `progress` amount. - fn progress(&mut self, id: u64, progress: u64) { - if let Some(pb) = self.pbs.get_mut(&id) { - pb.set_position(progress); - } - } - - /// Set an error in the progress bar. Consumes the [`ValidateProgressState`]. - fn abort(self, error: String) { - let error_line = self.mp.add(ProgressBar::new(0)); - error_line.set_style(ProgressStyle::default_bar().template("{msg}").unwrap()); - error_line.set_message(error); - } - - /// Finishes a progress bar with a given error message. - fn done(&mut self, id: u64, error: Option) { - if let Some(pb) = self.pbs.remove(&id) { - let ok_char = style(Emoji("✔", "OK")).green(); - let fail_char = style(Emoji("✗", "Error")).red(); - let ok = error.is_none(); - let msg = match error { - Some(error) => format!("{} {} {}", pb.message(), fail_char, error), - None => format!("{} {}", pb.message(), ok_char), - }; - if ok { - self.successes += 1; - } else { - self.errors += 1; - } - self.overall.set_position(self.errors + self.successes); - self.overall.set_message(format!( - "Overall {} {}, {} {}", - self.errors, fail_char, self.successes, ok_char - )); - if ok { - pb.finish_and_clear(); - } else { - pb.set_style(ProgressStyle::default_bar().template("{msg}").unwrap()); - pb.finish_with_message(msg); - } - } - } -} - -/// Where the data should be read from. -#[derive(Debug, Clone, derive_more::Display, PartialEq, Eq)] -pub enum BlobSource { - /// Reads from stdin - #[display("STDIN")] - Stdin, - /// Reads from the provided path - #[display("{}", _0.display())] - Path(PathBuf), -} - -impl From for BlobSource { - fn from(s: String) -> Self { - if s == "STDIN" { - return BlobSource::Stdin; - } - - BlobSource::Path(s.into()) - } -} - -/// Data source for adding data to iroh. -#[derive(Debug, Clone)] -pub enum BlobSourceIroh { - /// A file or directory on the node's local file system. - LocalFs { path: PathBuf, in_place: bool }, - /// Data passed via STDIN. - Stdin, -} - -/// Whether to print an all-in-one ticket. -#[derive(Debug, Clone)] -pub enum TicketOption { - /// Do not print an all-in-one ticket - None, - /// Print an all-in-one ticket. - Print, -} - -/// Adds a [`BlobSource`] given some [`BlobAddOptions`]. -pub async fn add_with_opts( - client: &iroh::client::Iroh, - source: BlobSource, - opts: BlobAddOptions, -) -> Result<()> { - let tag = match opts.tag { - Some(tag) => SetTagOption::Named(Tag::from(tag)), - None => SetTagOption::Auto, - }; - let ticket = match opts.no_ticket { - true => TicketOption::None, - false => TicketOption::Print, - }; - let source = match source { - BlobSource::Stdin => BlobSourceIroh::Stdin, - BlobSource::Path(path) => BlobSourceIroh::LocalFs { - path, - in_place: opts.in_place, - }, - }; - let wrap = match (opts.wrap, opts.filename) { - (true, None) => WrapOption::Wrap { name: None }, - (true, Some(filename)) => WrapOption::Wrap { - name: Some(filename), - }, - (false, None) => WrapOption::NoWrap, - (false, Some(_)) => bail!("`--filename` may not be used without `--wrap`"), - }; - - add(client, source, tag, ticket, wrap).await -} - -/// Adds data to iroh, either from a path or, if path is `None`, from STDIN. -pub async fn add( - client: &iroh::client::Iroh, - source: BlobSourceIroh, - tag: SetTagOption, - ticket: TicketOption, - wrap: WrapOption, -) -> Result<()> { - let (hash, format, entries) = match source { - BlobSourceIroh::LocalFs { path, in_place } => { - let absolute = path.canonicalize()?; - println!("Adding {} as {}...", path.display(), absolute.display()); - - // tell the node to add the data - let stream = client - .blobs() - .add_from_path(absolute, in_place, tag, wrap) - .await?; - aggregate_add_response(stream).await? - } - BlobSourceIroh::Stdin => { - println!("Adding from STDIN..."); - // Store STDIN content into a temporary file - let (file, path) = tempfile::NamedTempFile::new()?.into_parts(); - let mut file = tokio::fs::File::from_std(file); - let path_buf = path.to_path_buf(); - // Copy from stdin to the file, until EOF - tokio::io::copy(&mut tokio::io::stdin(), &mut file).await?; - file.flush().await?; - drop(file); - - // tell the node to add the data - let stream = client - .blobs() - .add_from_path(path_buf, false, tag, wrap) - .await?; - aggregate_add_response(stream).await? - } - }; - - print_add_response(hash, format, entries); - if let TicketOption::Print = ticket { - let status = client.status().await?; - let ticket = BlobTicket::new(status.addr, hash, format)?; - println!("All-in-one ticket: {ticket}"); - } - Ok(()) -} - -/// Entry with a given name, size, and hash. -#[derive(Debug)] -pub struct ProvideResponseEntry { - pub name: String, - pub size: u64, - pub hash: Hash, -} - -/// Combines the [`AddProgress`] outputs from a [`Stream`] into a single tuple. -pub async fn aggregate_add_response( - mut stream: impl Stream> + Unpin, -) -> Result<(Hash, BlobFormat, Vec)> { - let mut hash_and_format = None; - let mut collections = BTreeMap::)>::new(); - let mut mp = Some(ProvideProgressState::new()); - while let Some(item) = stream.next().await { - match item? { - AddProgress::Found { name, id, size } => { - tracing::trace!("Found({id},{name},{size})"); - if let Some(mp) = mp.as_mut() { - mp.found(name.clone(), id, size); - } - collections.insert(id, (name, size, None)); - } - AddProgress::Progress { id, offset } => { - tracing::trace!("Progress({id}, {offset})"); - if let Some(mp) = mp.as_mut() { - mp.progress(id, offset); - } - } - AddProgress::Done { hash, id } => { - tracing::trace!("Done({id},{hash:?})"); - if let Some(mp) = mp.as_mut() { - mp.done(id, hash); - } - match collections.get_mut(&id) { - Some((_, _, ref mut h)) => { - *h = Some(hash); - } - None => { - anyhow::bail!("Got Done for unknown collection id {id}"); - } - } - } - AddProgress::AllDone { hash, format, .. } => { - tracing::trace!("AllDone({hash:?})"); - if let Some(mp) = mp.take() { - mp.all_done(); - } - hash_and_format = Some(HashAndFormat { hash, format }); - break; - } - AddProgress::Abort(e) => { - if let Some(mp) = mp.take() { - mp.error(); - } - anyhow::bail!("Error while adding data: {e}"); - } - } - } - let HashAndFormat { hash, format } = - hash_and_format.context("Missing hash for collection or blob")?; - let entries = collections - .into_iter() - .map(|(_, (name, size, hash))| { - let hash = hash.context(format!("Missing hash for {name}"))?; - Ok(ProvideResponseEntry { name, size, hash }) - }) - .collect::>>()?; - Ok((hash, format, entries)) -} - -/// Prints out the add response. -pub fn print_add_response(hash: Hash, format: BlobFormat, entries: Vec) { - let mut total_size = 0; - for ProvideResponseEntry { name, size, hash } in entries { - total_size += size; - println!("- {}: {} {:#}", name, HumanBytes(size), hash); - } - println!("Total: {}", HumanBytes(total_size)); - println!(); - match format { - BlobFormat::Raw => println!("Blob: {}", hash), - BlobFormat::HashSeq => println!("Collection: {}", hash), - } -} - -/// Progress state for providing. -#[derive(Debug)] -pub struct ProvideProgressState { - mp: MultiProgress, - pbs: HashMap, -} - -impl ProvideProgressState { - /// Creates a new provide progress state. - fn new() -> Self { - Self { - mp: MultiProgress::new(), - pbs: HashMap::new(), - } - } - - /// Inserts a new progress bar with the given id, name, and size. - fn found(&mut self, name: String, id: u64, size: u64) { - let pb = self.mp.add(ProgressBar::new(size)); - pb.set_style(ProgressStyle::default_bar() - .template("{spinner:.green} [{bar:40.cyan/blue}] {msg} {bytes}/{total_bytes} ({bytes_per_sec}, eta {eta})").unwrap() - .progress_chars("=>-")); - pb.set_message(name); - pb.set_length(size); - pb.set_position(0); - pb.enable_steady_tick(Duration::from_millis(500)); - self.pbs.insert(id, pb); - } - - /// Adds some progress to the progress bar with the given id. - fn progress(&mut self, id: u64, progress: u64) { - if let Some(pb) = self.pbs.get_mut(&id) { - pb.set_position(progress); - } - } - - /// Sets the multiprogress bar with the given id as finished and clear it. - fn done(&mut self, id: u64, _hash: Hash) { - if let Some(pb) = self.pbs.remove(&id) { - pb.finish_and_clear(); - self.mp.remove(&pb); - } - } - - /// Sets the multiprogress bar as finished and clear them. - fn all_done(self) { - self.mp.clear().ok(); - } - - /// Clears the multiprogress bar. - fn error(self) { - self.mp.clear().ok(); - } -} - -/// Displays the download progress for a given stream. -pub async fn show_download_progress( - hash: Hash, - mut stream: impl Stream> + Unpin, -) -> Result<()> { - eprintln!("Fetching: {}", hash); - let mp = MultiProgress::new(); - mp.set_draw_target(ProgressDrawTarget::stderr()); - let op = mp.add(make_overall_progress()); - let ip = mp.add(make_individual_progress()); - op.set_message(format!("{} Connecting ...\n", style("[1/3]").bold().dim())); - let mut seq = false; - while let Some(x) = stream.next().await { - match x? { - DownloadProgress::InitialState(state) => { - if state.connected { - op.set_message(format!("{} Requesting ...\n", style("[2/3]").bold().dim())); - } - if let Some(count) = state.root.child_count { - op.set_message(format!( - "{} Downloading {} blob(s)\n", - style("[3/3]").bold().dim(), - count + 1, - )); - op.set_length(count + 1); - op.reset(); - op.set_position(state.current.map(u64::from).unwrap_or(0)); - seq = true; - } - if let Some(blob) = state.get_current() { - if let Some(size) = blob.size { - ip.set_length(size.value()); - ip.reset(); - match blob.progress { - BlobProgress::Pending => {} - BlobProgress::Progressing(offset) => ip.set_position(offset), - BlobProgress::Done => ip.finish_and_clear(), - } - if !seq { - op.finish_and_clear(); - } - } - } - } - DownloadProgress::FoundLocal { .. } => {} - DownloadProgress::Connected => { - op.set_message(format!("{} Requesting ...\n", style("[2/3]").bold().dim())); - } - DownloadProgress::FoundHashSeq { children, .. } => { - op.set_message(format!( - "{} Downloading {} blob(s)\n", - style("[3/3]").bold().dim(), - children + 1, - )); - op.set_length(children + 1); - op.reset(); - seq = true; - } - DownloadProgress::Found { size, child, .. } => { - if seq { - op.set_position(child.into()); - } else { - op.finish_and_clear(); - } - ip.set_length(size); - ip.reset(); - } - DownloadProgress::Progress { offset, .. } => { - ip.set_position(offset); - } - DownloadProgress::Done { .. } => { - ip.finish_and_clear(); - } - DownloadProgress::AllDone(Stats { - bytes_read, - elapsed, - .. - }) => { - op.finish_and_clear(); - eprintln!( - "Transferred {} in {}, {}/s", - HumanBytes(bytes_read), - HumanDuration(elapsed), - HumanBytes((bytes_read as f64 / elapsed.as_secs_f64()) as u64) - ); - break; - } - DownloadProgress::Abort(e) => { - bail!("download aborted: {}", e); - } - } - } - Ok(()) -} - -/// Where the data should be stored. -#[derive(Debug, Clone, derive_more::Display, PartialEq, Eq)] -pub enum OutputTarget { - /// Writes to stdout - #[display("STDOUT")] - Stdout, - /// Writes to the provided path - #[display("{}", _0.display())] - Path(PathBuf), -} - -impl From for OutputTarget { - fn from(s: String) -> Self { - if s == "STDOUT" { - return OutputTarget::Stdout; - } - - OutputTarget::Path(s.into()) - } -} - -/// Creates a [`ProgressBar`] with some defaults for the overall progress. -fn make_overall_progress() -> ProgressBar { - let pb = ProgressBar::hidden(); - pb.enable_steady_tick(std::time::Duration::from_millis(100)); - pb.set_style( - ProgressStyle::with_template( - "{msg}{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {pos}/{len}", - ) - .unwrap() - .progress_chars("#>-"), - ); - pb -} - -/// Creates a [`ProgressBar`] with some defaults for the individual progress. -fn make_individual_progress() -> ProgressBar { - let pb = ProgressBar::hidden(); - pb.enable_steady_tick(std::time::Duration::from_millis(100)); - pb.set_style( - ProgressStyle::with_template("{msg}{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {bytes}/{total_bytes} ({eta})") - .unwrap() - .with_key( - "eta", - |state: &ProgressState, w: &mut dyn std::fmt::Write| { - write!(w, "{:.1}s", state.eta().as_secs_f64()).unwrap() - }, - ) - .progress_chars("#>-"), - ); - pb -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_blob_source() { - assert_eq!( - BlobSource::from(BlobSource::Stdin.to_string()), - BlobSource::Stdin - ); - - assert_eq!( - BlobSource::from(BlobSource::Path("hello/world".into()).to_string()), - BlobSource::Path("hello/world".into()), - ); - } - - #[test] - fn test_output_target() { - assert_eq!( - OutputTarget::from(OutputTarget::Stdout.to_string()), - OutputTarget::Stdout - ); - - assert_eq!( - OutputTarget::from(OutputTarget::Path("hello/world".into()).to_string()), - OutputTarget::Path("hello/world".into()), - ); - } -} diff --git a/iroh-cli/src/commands/rpc.rs b/iroh-cli/src/commands/rpc.rs index 01bb523de3..9a39deeb56 100644 --- a/iroh-cli/src/commands/rpc.rs +++ b/iroh-cli/src/commands/rpc.rs @@ -92,12 +92,13 @@ pub enum RpcCommands { impl RpcCommands { /// Run the RPC command given the iroh client and the console environment. pub async fn run(self, iroh: &Iroh, env: &ConsoleEnv) -> Result<()> { + let node_id = || async move { iroh.net().node_addr().await }; match self { Self::Net { command } => command.run(iroh).await, - Self::Blobs { command } => command.run(iroh).await, + Self::Blobs { command } => command.run(&iroh.blobs(), node_id().await?).await, Self::Docs { command } => command.run(iroh, env).await, Self::Authors { command } => command.run(iroh, env).await, - Self::Tags { command } => command.run(iroh).await, + Self::Tags { command } => command.run(&iroh.tags()).await, Self::Gossip { command } => command.run(iroh).await, Self::Stats => { let stats = iroh.stats().await?; diff --git a/iroh-cli/src/commands/tags.rs b/iroh-cli/src/commands/tags.rs deleted file mode 100644 index 4789a26395..0000000000 --- a/iroh-cli/src/commands/tags.rs +++ /dev/null @@ -1,45 +0,0 @@ -//! Define the tags subcommand. - -use anyhow::Result; -use bytes::Bytes; -use clap::Subcommand; -use futures_lite::StreamExt; -use iroh::{blobs::Tag, client::Iroh}; - -/// Commands to manage tags. -#[derive(Subcommand, Debug, Clone)] -#[allow(clippy::large_enum_variant)] -pub enum TagCommands { - /// List all tags - List, - /// Delete a tag - Delete { - tag: String, - #[clap(long, default_value_t = false)] - hex: bool, - }, -} - -impl TagCommands { - /// Runs the tag command given the iroh client. - pub async fn run(self, iroh: &Iroh) -> Result<()> { - match self { - Self::List => { - let mut response = iroh.tags().list().await?; - while let Some(res) = response.next().await { - let res = res?; - println!("{}: {} ({:?})", res.name, res.hash, res.format); - } - } - Self::Delete { tag, hex } => { - let tag = if hex { - Tag::from(Bytes::from(hex::decode(tag)?)) - } else { - Tag::from(tag) - }; - iroh.tags().delete(tag).await?; - } - } - Ok(()) - } -} From 45d80ecd6aab1fdfbe0c651f359689434efe1a16 Mon Sep 17 00:00:00 2001 From: Ruediger Klaehn Date: Mon, 18 Nov 2024 12:43:20 +0200 Subject: [PATCH 2/8] Remove docs cli and authors cli and replace it with the stuff from iroh-docs --- Cargo.lock | 9 +- Cargo.toml | 3 +- iroh-cli/Cargo.toml | 2 +- iroh-cli/src/commands.rs | 15 +- iroh-cli/src/commands/authors.rs | 106 ---- iroh-cli/src/commands/console.rs | 6 +- iroh-cli/src/commands/docs.rs | 1024 ------------------------------ iroh-cli/src/commands/rpc.rs | 6 +- iroh-cli/src/config.rs | 197 +----- 9 files changed, 26 insertions(+), 1342 deletions(-) delete mode 100644 iroh-cli/src/commands/authors.rs delete mode 100644 iroh-cli/src/commands/docs.rs diff --git a/Cargo.lock b/Cargo.lock index c75d1d9e9f..c6343b7539 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1166,7 +1166,9 @@ checksum = "658bce805d770f407bc62102fca7c2c64ceef2fbcb2b8bd19d2765ce093980de" dependencies = [ "console", "shell-words", + "tempfile", "thiserror 1.0.68", + "zeroize", ] [[package]] @@ -2868,17 +2870,21 @@ dependencies = [ [[package]] name = "iroh-docs" version = "0.28.0" -source = "git+https://github.com/n0-computer/iroh-docs?branch=main#6f1d8ebf7c344e31b2bcf77eb7e8dd72540c971b" dependencies = [ "anyhow", "async-channel", "bytes", + "clap", + "colored", + "console", "derive_more", + "dialoguer", "ed25519-dalek", "futures-buffered", "futures-lite 2.4.0", "futures-util", "hex", + "indicatif", "iroh-base", "iroh-blake3", "iroh-blobs", @@ -2899,6 +2905,7 @@ dependencies = [ "self_cell", "serde", "serde-error", + "shellexpand", "strum 0.26.3", "tempfile", "thiserror 2.0.3", diff --git a/Cargo.toml b/Cargo.toml index 6ca23e89a0..9ed150abc0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -56,5 +56,6 @@ iroh-test = { path = "./iroh-test" } iroh-router = { path = "./iroh-router" } iroh-gossip = { git = "https://github.com/n0-computer/iroh-gossip", branch = "main" } -iroh-docs = { git = "https://github.com/n0-computer/iroh-docs", branch = "main" } +# iroh-docs = { git = "https://github.com/n0-computer/iroh-docs", branch = "cli" } +iroh-docs = { path = "../iroh-docs" } iroh-blobs = { git = "https://github.com/n0-computer/iroh-blobs", branch = "cli" } diff --git a/iroh-cli/Cargo.toml b/iroh-cli/Cargo.toml index de885624cb..6fd4208499 100644 --- a/iroh-cli/Cargo.toml +++ b/iroh-cli/Cargo.toml @@ -42,7 +42,7 @@ human-time = "0.1.6" indicatif = { version = "0.17", features = ["tokio"] } iroh = { version = "0.28.1", path = "../iroh", features = ["metrics"] } iroh-gossip = "0.28.1" -iroh-docs = { version = "0.28.0", features = ["rpc"]} +iroh-docs = { version = "0.28.0", features = ["rpc", "cli"]} iroh-metrics = { version = "0.28.0" } iroh-blobs = { version = "0.28.1", features = ["cli"] } parking_lot = "0.12.1" diff --git a/iroh-cli/src/commands.rs b/iroh-cli/src/commands.rs index 628baaf967..2e3c231a52 100644 --- a/iroh-cli/src/commands.rs +++ b/iroh-cli/src/commands.rs @@ -7,19 +7,19 @@ use anyhow::{ensure, Context, Result}; use clap::Parser; use iroh::client::Iroh; use iroh_blobs::cli::{BlobAddOptions, BlobSource}; +use iroh_docs::cli::ConsoleEnv; use self::{rpc::RpcCommands, start::RunType}; -use crate::config::{ConsoleEnv, NodeConfig}; +use crate::config::NodeConfig; -pub(crate) mod authors; pub(crate) mod console; -pub(crate) mod docs; pub(crate) mod doctor; pub(crate) mod gossip; pub(crate) mod net; pub(crate) mod rpc; pub(crate) mod start; pub use iroh_blobs::{cli as blobs, cli::tags}; +pub use iroh_docs::{cli as docs, cli::authors}; /// iroh is a tool for building distributed apps. /// @@ -121,7 +121,8 @@ impl Cli { self.rpc_addr, RunType::SingleCommandNoAbort, |iroh| async move { - let env = ConsoleEnv::for_console(data_dir_owned, &iroh).await?; + let env = + ConsoleEnv::for_console(data_dir_owned, &iroh.authors()).await?; console::run(&iroh, &env).await }, ) @@ -133,7 +134,7 @@ impl Cli { } else { Iroh::connect_path(data_dir).await.context("rpc connect")? }; - let env = ConsoleEnv::for_console(data_dir_owned, &iroh).await?; + let env = ConsoleEnv::for_console(data_dir_owned, &iroh.authors()).await?; console::run(&iroh, &env).await } } @@ -147,7 +148,7 @@ impl Cli { self.rpc_addr, RunType::SingleCommandAbortable, move |iroh| async move { - let env = ConsoleEnv::for_cli(data_dir_owned, &iroh).await?; + let env = ConsoleEnv::for_cli(data_dir_owned, &iroh.authors()).await?; command.run(&iroh, &env).await }, ) @@ -159,7 +160,7 @@ impl Cli { } else { Iroh::connect_path(data_dir).await.context("rpc connect")? }; - let env = ConsoleEnv::for_cli(data_dir_owned, &iroh).await?; + let env = ConsoleEnv::for_cli(data_dir_owned, &iroh.authors()).await?; command.run(&iroh, &env).await } } diff --git a/iroh-cli/src/commands/authors.rs b/iroh-cli/src/commands/authors.rs deleted file mode 100644 index 586b223026..0000000000 --- a/iroh-cli/src/commands/authors.rs +++ /dev/null @@ -1,106 +0,0 @@ -//! Define the commands to manage authors. - -use anyhow::{bail, Result}; -use clap::Parser; -use derive_more::FromStr; -use futures_lite::StreamExt; -use iroh::{ - base::base32::fmt_short, - client::Iroh, - docs::{Author, AuthorId}, -}; - -use crate::config::ConsoleEnv; - -/// Commands to manage authors. -#[derive(Debug, Clone, Parser)] -pub enum AuthorCommands { - /// Set the active author (Note: only works within the Iroh console). - Switch { author: AuthorId }, - /// Create a new author. - Create { - /// Switch to the created author (Note: only works in the Iroh console). - #[clap(long)] - switch: bool, - }, - /// Delete an author. - Delete { author: AuthorId }, - /// Export an author. - Export { author: AuthorId }, - /// Import an author. - Import { author: String }, - /// Print the default author for this node. - Default { - /// Switch to the default author (Note: only works in the Iroh console). - #[clap(long)] - switch: bool, - }, - /// List authors. - #[clap(alias = "ls")] - List, -} - -impl AuthorCommands { - /// Runs the author command given an iroh client and console environment. - pub async fn run(self, iroh: &Iroh, env: &ConsoleEnv) -> Result<()> { - match self { - Self::Switch { author } => { - env.set_author(author)?; - println!("Active author is now {}", fmt_short(author.as_bytes())); - } - Self::List => { - let mut stream = iroh.authors().list().await?; - while let Some(author_id) = stream.try_next().await? { - println!("{}", author_id); - } - } - Self::Default { switch } => { - if switch && !env.is_console() { - bail!("The --switch flag is only supported within the Iroh console."); - } - let author_id = iroh.authors().default().await?; - println!("{}", author_id); - if switch { - env.set_author(author_id)?; - println!("Active author is now {}", fmt_short(author_id.as_bytes())); - } - } - Self::Create { switch } => { - if switch && !env.is_console() { - bail!("The --switch flag is only supported within the Iroh console."); - } - - let author_id = iroh.authors().create().await?; - println!("{}", author_id); - - if switch { - env.set_author(author_id)?; - println!("Active author is now {}", fmt_short(author_id.as_bytes())); - } - } - Self::Delete { author } => { - iroh.authors().delete(author).await?; - println!("Deleted author {}", fmt_short(author.as_bytes())); - } - Self::Export { author } => match iroh.authors().export(author).await? { - Some(author) => { - println!("{}", author); - } - None => { - println!("No author found {}", fmt_short(author)); - } - }, - Self::Import { author } => match Author::from_str(&author) { - Ok(author) => { - let id = author.id(); - iroh.authors().import(author).await?; - println!("Imported {}", fmt_short(id)); - } - Err(err) => { - eprintln!("Invalid author key: {}", err); - } - }, - } - Ok(()) - } -} diff --git a/iroh-cli/src/commands/console.rs b/iroh-cli/src/commands/console.rs index 2aeb39f3b9..feebf58434 100644 --- a/iroh-cli/src/commands/console.rs +++ b/iroh-cli/src/commands/console.rs @@ -4,13 +4,11 @@ use anyhow::Result; use clap::{Parser, Subcommand}; use colored::Colorize; use iroh::{base::base32::fmt_short, client::Iroh}; +use iroh_docs::cli::ConsoleEnv; use rustyline::{error::ReadlineError, Config, DefaultEditor}; use tokio::sync::{mpsc, oneshot}; -use crate::{ - commands::rpc::RpcCommands, - config::{ConsoleEnv, ConsolePaths}, -}; +use crate::{commands::rpc::RpcCommands, config::ConsolePaths}; /// Runs the iroh console pub async fn run(iroh: &Iroh, env: &ConsoleEnv) -> Result<()> { diff --git a/iroh-cli/src/commands/docs.rs b/iroh-cli/src/commands/docs.rs deleted file mode 100644 index fc827890d6..0000000000 --- a/iroh-cli/src/commands/docs.rs +++ /dev/null @@ -1,1024 +0,0 @@ -//! Define commands for interacting with documents in Iroh. - -use std::{ - cell::RefCell, - collections::BTreeMap, - path::{Path, PathBuf}, - rc::Rc, - time::{Duration, Instant}, -}; - -use anyhow::{anyhow, bail, Context, Result}; -use clap::Parser; -use colored::Colorize; -use dialoguer::Confirm; -use futures_buffered::BufferedStreamExt; -use futures_lite::{Stream, StreamExt}; -use indicatif::{HumanBytes, HumanDuration, MultiProgress, ProgressBar, ProgressStyle}; -use iroh::{ - base::{base32::fmt_short, node_addr::AddrInfoOptions}, - blobs::{provider::AddProgress, util::SetTagOption, Hash, Tag}, - client::{blobs::WrapOption, Doc, Iroh}, - docs::{ - store::{DownloadPolicy, FilterKind, Query, SortDirection}, - AuthorId, DocTicket, NamespaceId, - }, - util::fs::{path_content_info, path_to_key, PathContent}, -}; -use iroh_docs::{ - engine::Origin, - rpc::client::docs::{Entry, LiveEvent, ShareMode}, -}; -use tokio::io::AsyncReadExt; - -use crate::config::ConsoleEnv; - -/// The maximum length of content to display before truncating. -const MAX_DISPLAY_CONTENT_LEN: u64 = 80; - -/// Different modes to display content. -#[derive(Debug, Clone, Copy, clap::ValueEnum)] -pub enum DisplayContentMode { - /// Displays the content if small enough, otherwise it displays the content hash. - Auto, - /// Display the content unconditionally. - Content, - /// Display the hash of the content. - Hash, - /// Display the shortened hash of the content. - ShortHash, -} - -/// General download policy for a document. -#[derive(Debug, Clone, Copy, clap::ValueEnum, derive_more::Display)] -pub enum FetchKind { - /// Download everything in this document. - Everything, - /// Download nothing in this document. - Nothing, -} - -/// Subcommands for the download policy command. -#[derive(Debug, Clone, clap::Subcommand)] -pub enum DlPolicyCmd { - Set { - /// Document to operate on. - /// - /// Required unless the document is set through the IROH_DOC environment variable. - /// Within the Iroh console, the active document can also set with `doc switch`. - #[clap(short, long)] - doc: Option, - /// Set the general download policy for this document. - kind: FetchKind, - /// Add an exception to the download policy. - /// An exception must be formatted as ::. - /// - /// - can be either `prefix` or `exact`. - /// - /// - can be either `utf8` or `hex`. - #[clap(short, long, value_name = "matching_kind>::, - }, - Get { - /// Document to operate on. - /// - /// Required unless the document is set through the IROH_DOC environment variable. - /// Within the Iroh console, the active document can also set with `doc switch`. - #[clap(short, long)] - doc: Option, - }, -} - -/// Possible `Document` commands. -#[derive(Debug, Clone, Parser)] -pub enum DocCommands { - /// Set the active document (only works within the Iroh console). - Switch { id: NamespaceId }, - /// Create a new document. - Create { - /// Switch to the created document (only in the Iroh console). - #[clap(long)] - switch: bool, - }, - /// Join a document from a ticket. - Join { - ticket: DocTicket, - /// Switch to the joined document (only in the Iroh console). - #[clap(long)] - switch: bool, - }, - /// List documents. - List, - /// Share a document with peers. - Share { - /// Document to operate on. - /// - /// Required unless the document is set through the IROH_DOC environment variable. - /// Within the Iroh console, the active document can also set with `doc switch`. - #[clap(short, long)] - doc: Option, - /// The sharing mode. - mode: ShareMode, - /// Options to configure the address information in the generated ticket. - /// - /// Use `relay-and-addresses` in networks with no internet connectivity. - #[clap(long, default_value_t = AddrInfoOptions::Id)] - addr_options: AddrInfoOptions, - }, - /// Set an entry in a document. - Set { - /// Document to operate on. - /// - /// Required unless the document is set through the IROH_DOC environment variable. - /// Within the Iroh console, the active document can also set with `doc switch`. - #[clap(short, long)] - doc: Option, - /// Author of the entry. - /// - /// Required unless the author is set through the IROH_AUTHOR environment variable. - /// Within the Iroh console, the active author can also set with `author switch`. - #[clap(long)] - author: Option, - /// Key to the entry (parsed as UTF-8 string). - key: String, - /// Content to store for this entry (parsed as UTF-8 string) - value: String, - }, - /// Set the download policies for a document. - #[clap(subcommand)] - DlPolicy(DlPolicyCmd), - /// Get entries in a document. - /// - /// Shows the author, content hash and content length for all entries for this key. - Get { - /// Document to operate on. - /// - /// Required unless the document is set through the IROH_DOC environment variable. - /// Within the Iroh console, the active document can also set with `doc switch`. - #[clap(short, long)] - doc: Option, - /// Key to the entry (parsed as UTF-8 string). - key: String, - /// If true, get all entries that start with KEY. - #[clap(short, long)] - prefix: bool, - /// Filter by author. - #[clap(long)] - author: Option, - /// How to show the contents of the key. - #[clap(short, long, value_enum, default_value_t=DisplayContentMode::Auto)] - mode: DisplayContentMode, - }, - /// Delete all entries below a key prefix. - Del { - /// Document to operate on. - /// - /// Required unless the document is set through the IROH_DOC environment variable. - /// Within the Iroh console, the active document can also set with `doc switch`. - #[clap(short, long)] - doc: Option, - /// Author of the entry. - /// - /// Required unless the author is set through the IROH_AUTHOR environment variable. - /// Within the Iroh console, the active author can also set with `author switch`. - #[clap(long)] - author: Option, - /// Prefix to delete. All entries whose key starts with or is equal to the prefix will be - /// deleted. - prefix: String, - }, - /// List all keys in a document. - #[clap(alias = "ls")] - Keys { - /// Document to operate on. - /// - /// Required unless the document is set through the IROH_DOC environment variable. - /// Within the Iroh console, the active document can also set with `doc switch`. - #[clap(short, long)] - doc: Option, - /// Filter by author. - #[clap(long)] - author: Option, - /// Optional key prefix (parsed as UTF-8 string) - prefix: Option, - /// How to sort the entries - #[clap(long, default_value_t=Sorting::Author)] - sort: Sorting, - /// Sort in descending order - #[clap(long)] - desc: bool, - /// How to show the contents of the keys. - #[clap(short, long, value_enum, default_value_t=DisplayContentMode::ShortHash)] - mode: DisplayContentMode, - }, - /// Import data into a document - Import { - /// Document to operate on. - /// - /// Required unless the document is set through the IROH_DOC environment variable. - /// Within the Iroh console, the active document can also be set with `doc switch`. - #[clap(short, long)] - doc: Option, - /// Author of the entry. - /// - /// Required unless the author is set through the IROH_AUTHOR environment variable. - /// Within the Iroh console, the active author can also be set with `author switch`. - #[clap(long)] - author: Option, - /// Prefix to add to imported entries (parsed as UTF-8 string). Defaults to no prefix - #[clap(long)] - prefix: Option, - /// Path to a local file or directory to import - /// - /// Pathnames will be used as the document key - path: String, - /// If true, don't copy the file into iroh, reference the existing file instead - /// - /// Moving a file imported with `in-place` will result in data corruption - #[clap(short, long)] - in_place: bool, - /// When true, you will not get a prompt to confirm you want to import the files - #[clap(long, default_value_t = false)] - no_prompt: bool, - }, - /// Export the most recent data for a key from a document - Export { - /// Document to operate on. - /// - /// Required unless the document is set through the IROH_DOC environment variable. - /// Within the Iroh console, the active document can also be set with `doc switch`. - #[clap(short, long)] - doc: Option, - /// Key to the entry (parsed as UTF-8 string) - /// - /// When just the key is present, will export the latest entry for that key. - key: String, - /// Path to export to - #[clap(short, long)] - out: String, - }, - /// Watch for changes and events on a document - Watch { - /// Document to operate on. - /// - /// Required unless the document is set through the IROH_DOC environment variable. - /// Within the Iroh console, the active document can also set with `doc switch`. - #[clap(short, long)] - doc: Option, - }, - /// Stop syncing a document. - Leave { - /// Document to operate on. - /// - /// Required unless the document is set through the IROH_DOC environment variable. - /// Within the Iroh console, the active document can also set with `doc switch`. - doc: Option, - }, - /// Delete a document from the local node. - /// - /// This is a destructive operation. Both the document secret key and all entries in the - /// document will be permanently deleted from the node's storage. Content blobs will be deleted - /// through garbage collection unless they are referenced from another document or tag. - Drop { - /// Document to operate on. - /// - /// Required unless the document is set through the IROH_DOC environment variable. - /// Within the Iroh console, the active document can also set with `doc switch`. - doc: Option, - }, -} - -/// How to sort. -#[derive(clap::ValueEnum, Clone, Debug, Default, strum::Display)] -#[strum(serialize_all = "kebab-case")] -pub enum Sorting { - /// Sort by author, then key - #[default] - Author, - /// Sort by key, then author - Key, -} - -impl From for iroh::docs::store::SortBy { - fn from(value: Sorting) -> Self { - match value { - Sorting::Author => Self::AuthorKey, - Sorting::Key => Self::KeyAuthor, - } - } -} - -impl DocCommands { - /// Runs the document command given the iroh client and the console environment. - pub async fn run(self, iroh: &Iroh, env: &ConsoleEnv) -> Result<()> { - match self { - Self::Switch { id: doc } => { - env.set_doc(doc)?; - println!("Active doc is now {}", fmt_short(doc.as_bytes())); - } - Self::Create { switch } => { - if switch && !env.is_console() { - bail!("The --switch flag is only supported within the Iroh console."); - } - - let doc = iroh.docs().create().await?; - println!("{}", doc.id()); - - if switch { - env.set_doc(doc.id())?; - println!("Active doc is now {}", fmt_short(doc.id().as_bytes())); - } - } - Self::Join { ticket, switch } => { - if switch && !env.is_console() { - bail!("The --switch flag is only supported within the Iroh console."); - } - - let doc = iroh.docs().import(ticket).await?; - println!("{}", doc.id()); - - if switch { - env.set_doc(doc.id())?; - println!("Active doc is now {}", fmt_short(doc.id().as_bytes())); - } - } - Self::List => { - let mut stream = iroh.docs().list().await?; - while let Some((id, kind)) = stream.try_next().await? { - println!("{id} {kind}") - } - } - Self::Share { - doc, - mode, - addr_options, - } => { - let doc = get_doc(iroh, env, doc).await?; - let ticket = doc.share(mode, addr_options).await?; - println!("{}", ticket); - } - Self::Set { - doc, - author, - key, - value, - } => { - let doc = get_doc(iroh, env, doc).await?; - let author = author.unwrap_or(env.author()); - let key = key.as_bytes().to_vec(); - let value = value.as_bytes().to_vec(); - let hash = doc.set_bytes(author, key, value).await?; - println!("{}", hash); - } - Self::Del { - doc, - author, - prefix, - } => { - let doc = get_doc(iroh, env, doc).await?; - let author = author.unwrap_or(env.author()); - let prompt = - format!("Deleting all entries whose key starts with {prefix}. Continue?"); - if Confirm::new() - .with_prompt(prompt) - .interact() - .unwrap_or(false) - { - let key = prefix.as_bytes().to_vec(); - let removed = doc.del(author, key).await?; - println!("Deleted {removed} entries."); - println!( - "Inserted an empty entry for author {} with key {prefix}.", - fmt_short(author) - ); - } else { - println!("Aborted.") - } - } - Self::Get { - doc, - key, - prefix, - author, - mode, - } => { - let doc = get_doc(iroh, env, doc).await?; - let key = key.as_bytes().to_vec(); - let query = Query::all(); - let query = match (author, prefix) { - (None, false) => query.key_exact(key), - (None, true) => query.key_prefix(key), - (Some(author), true) => query.author(author).key_prefix(key), - (Some(author), false) => query.author(author).key_exact(key), - }; - - let mut stream = doc.get_many(query).await?; - while let Some(entry) = stream.try_next().await? { - println!("{}", fmt_entry(&iroh.blobs(), &entry, mode).await); - } - } - Self::Keys { - doc, - prefix, - author, - mode, - sort, - desc, - } => { - let doc = get_doc(iroh, env, doc).await?; - let mut query = Query::all(); - if let Some(author) = author { - query = query.author(author); - } - if let Some(prefix) = prefix { - query = query.key_prefix(prefix); - } - let direction = match desc { - true => SortDirection::Desc, - false => SortDirection::Asc, - }; - query = query.sort_by(sort.into(), direction); - let mut stream = doc.get_many(query).await?; - while let Some(entry) = stream.try_next().await? { - println!("{}", fmt_entry(&iroh.blobs(), &entry, mode).await); - } - } - Self::Leave { doc } => { - let doc = get_doc(iroh, env, doc).await?; - doc.leave().await?; - println!("Doc {} is now inactive", fmt_short(doc.id())); - } - Self::Import { - doc, - author, - prefix, - path, - in_place, - no_prompt, - } => { - let doc = get_doc(iroh, env, doc).await?; - let author = author.unwrap_or(env.author()); - let mut prefix = prefix.unwrap_or_else(|| String::from("")); - - if prefix.ends_with('/') { - prefix.pop(); - } - let root = canonicalize_path(&path)?.canonicalize()?; - let tag = tag_from_file_name(&root)?; - - let root0 = root.clone(); - println!("Preparing import..."); - // get information about the directory or file we are trying to import - // and confirm with the user that they still want to import the file - let PathContent { size, files } = - tokio::task::spawn_blocking(|| path_content_info(root0)).await??; - if !no_prompt { - let prompt = format!("Import {files} files totaling {}?", HumanBytes(size)); - if !Confirm::new() - .with_prompt(prompt) - .interact() - .unwrap_or(false) - { - println!("Aborted."); - return Ok(()); - } else { - print!("\r"); - } - } - - let stream = iroh - .blobs() - .add_from_path( - root.clone(), - in_place, - SetTagOption::Named(tag.clone()), - WrapOption::NoWrap, - ) - .await?; - let root_prefix = match root.parent() { - Some(p) => p.to_path_buf(), - None => PathBuf::new(), - }; - let start = Instant::now(); - import_coordinator(doc, author, root_prefix, prefix, stream, size, files).await?; - println!("Success! ({})", HumanDuration(start.elapsed())); - } - Self::Export { doc, key, out } => { - let doc = get_doc(iroh, env, doc).await?; - let key_str = key.clone(); - let key = key.as_bytes().to_vec(); - let path: PathBuf = canonicalize_path(&out)?; - let mut stream = doc.get_many(Query::key_exact(key)).await?; - let entry = match stream.try_next().await? { - None => { - println!(""); - return Ok(()); - } - Some(e) => e, - }; - match iroh.blobs().read(entry.content_hash()).await { - Ok(mut content) => { - if let Some(dir) = path.parent() { - if let Err(err) = std::fs::create_dir_all(dir) { - println!( - "", - path.display() - ); - } - }; - let pb = ProgressBar::new(content.size()); - pb.set_style(ProgressStyle::default_bar() - .template("{spinner:.green} [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({bytes_per_sec}, eta {eta})").unwrap() - .progress_chars("=>-")); - let file = tokio::fs::File::create(path.clone()).await?; - if let Err(err) = - tokio::io::copy(&mut content, &mut pb.wrap_async_write(file)).await - { - pb.finish_and_clear(); - println!("", path.display()) - } else { - pb.finish_and_clear(); - println!("wrote '{key_str}' to {}", path.display()); - } - } - Err(err) => println!(""), - } - } - Self::Watch { doc } => { - let doc = get_doc(iroh, env, doc).await?; - let mut stream = doc.subscribe().await?; - let blobs = iroh.blobs(); - while let Some(event) = stream.next().await { - let event = event?; - match event { - LiveEvent::InsertLocal { entry } => { - println!( - "local change: {}", - fmt_entry(&blobs, &entry, DisplayContentMode::Auto).await - ) - } - LiveEvent::InsertRemote { - entry, - from, - content_status, - } => { - let content = match content_status { - iroh::docs::ContentStatus::Complete => { - fmt_entry(&blobs, &entry, DisplayContentMode::Auto).await - } - iroh::docs::ContentStatus::Incomplete => { - let (Ok(content) | Err(content)) = - fmt_content(&blobs, &entry, DisplayContentMode::ShortHash) - .await; - format!("", content, human_len(&entry)) - } - iroh::docs::ContentStatus::Missing => { - let (Ok(content) | Err(content)) = - fmt_content(&blobs, &entry, DisplayContentMode::ShortHash) - .await; - format!("", content, human_len(&entry)) - } - }; - println!( - "remote change via @{}: {}", - fmt_short(from.as_bytes()), - content - ) - } - LiveEvent::ContentReady { hash } => { - println!("content ready: {}", fmt_short(hash.as_bytes())) - } - LiveEvent::SyncFinished(event) => { - let origin = match event.origin { - Origin::Accept => "they initiated", - Origin::Connect(_) => "we initiated", - }; - match event.result { - Ok(details) => { - println!( - "synced peer {} ({origin}, received {}, sent {}", - fmt_short(event.peer), - details.entries_received, - details.entries_sent - ) - } - Err(err) => println!( - "failed to sync with peer {} ({origin}): {err}", - fmt_short(event.peer) - ), - } - } - LiveEvent::NeighborUp(peer) => { - println!("neighbor peer up: {peer:?}"); - } - LiveEvent::NeighborDown(peer) => { - println!("neighbor peer down: {peer:?}"); - } - LiveEvent::PendingContentReady => { - println!("all pending content is now ready") - } - } - } - } - Self::Drop { doc } => { - let doc = get_doc(iroh, env, doc).await?; - println!( - "Deleting a document will permanently remove the document secret key, all document entries, \n\ - and all content blobs which are not referenced from other docs or tags." - ); - let prompt = format!("Delete document {}?", fmt_short(doc.id())); - if Confirm::new() - .with_prompt(prompt) - .interact() - .unwrap_or(false) - { - iroh.docs().drop_doc(doc.id()).await?; - println!("Doc {} has been deleted.", fmt_short(doc.id())); - } else { - println!("Aborted.") - } - } - Self::DlPolicy(DlPolicyCmd::Set { doc, kind, except }) => { - let doc = get_doc(iroh, env, doc).await?; - let download_policy = match kind { - FetchKind::Everything => DownloadPolicy::EverythingExcept(except), - FetchKind::Nothing => DownloadPolicy::NothingExcept(except), - }; - if let Err(e) = doc.set_download_policy(download_policy).await { - println!("Could not set the document's download policy. {e}") - } - } - Self::DlPolicy(DlPolicyCmd::Get { doc }) => { - let doc = get_doc(iroh, env, doc).await?; - match doc.get_download_policy().await { - Ok(dl_policy) => { - let (kind, exceptions) = match dl_policy { - DownloadPolicy::NothingExcept(exceptions) => { - (FetchKind::Nothing, exceptions) - } - DownloadPolicy::EverythingExcept(exceptions) => { - (FetchKind::Everything, exceptions) - } - }; - println!("Download {kind} in this document."); - if !exceptions.is_empty() { - println!("Exceptions:"); - for exception in exceptions { - println!("{exception}") - } - } - } - Err(x) => { - println!("Could not get the document's download policy: {x}") - } - } - } - } - Ok(()) - } -} - -/// Gets the document given the client, the environment (and maybe the [`NamespaceID`]). -async fn get_doc(iroh: &Iroh, env: &ConsoleEnv, id: Option) -> anyhow::Result { - let doc_id = env.doc(id)?; - iroh.docs() - .open(doc_id) - .await? - .context("Document not found") -} - -/// Formats the content. If an error occurs it's returned in a formatted, friendly way. -async fn fmt_content( - blobs: &iroh::client::blobs::Client, - entry: &Entry, - mode: DisplayContentMode, -) -> Result { - let read_failed = |err: anyhow::Error| format!(""); - let encode_hex = |err: std::string::FromUtf8Error| format!("0x{}", hex::encode(err.as_bytes())); - let as_utf8 = |buf: Vec| String::from_utf8(buf).map(|repr| format!("\"{repr}\"")); - - match mode { - DisplayContentMode::Auto => { - if entry.content_len() < MAX_DISPLAY_CONTENT_LEN { - // small content: read fully as UTF-8 - let bytes = blobs - .read_to_bytes(entry.content_hash()) - .await - .map_err(read_failed)?; - Ok(as_utf8(bytes.into()).unwrap_or_else(encode_hex)) - } else { - // large content: read just the first part as UTF-8 - let mut blob_reader = blobs - .read(entry.content_hash()) - .await - .map_err(read_failed)?; - let mut buf = Vec::with_capacity(MAX_DISPLAY_CONTENT_LEN as usize + 5); - - blob_reader - .read_buf(&mut buf) - .await - .map_err(|io_err| read_failed(io_err.into()))?; - let mut repr = as_utf8(buf).unwrap_or_else(encode_hex); - // let users know this is not shown in full - repr.push_str("..."); - Ok(repr) - } - } - DisplayContentMode::Content => { - // read fully as UTF-8 - let bytes = blobs - .read_to_bytes(entry.content_hash()) - .await - .map_err(read_failed)?; - Ok(as_utf8(bytes.into()).unwrap_or_else(encode_hex)) - } - DisplayContentMode::ShortHash => { - let hash = entry.content_hash(); - Ok(fmt_short(hash.as_bytes())) - } - DisplayContentMode::Hash => { - let hash = entry.content_hash(); - Ok(hash.to_string()) - } - } -} - -/// Converts the [`Entry`] to human-readable bytes. -fn human_len(entry: &Entry) -> HumanBytes { - HumanBytes(entry.content_len()) -} - -/// Formats an entry for display as a `String`. -#[must_use = "this won't be printed, you need to print it yourself"] -async fn fmt_entry( - blobs: &iroh::client::blobs::Client, - entry: &Entry, - mode: DisplayContentMode, -) -> String { - let key = std::str::from_utf8(entry.key()) - .unwrap_or("") - .bold(); - let author = fmt_short(entry.author()); - let (Ok(content) | Err(content)) = fmt_content(blobs, entry, mode).await; - let len = human_len(entry); - format!("@{author}: {key} = {content} ({len})") -} - -/// Converts a path to a canonical path. -fn canonicalize_path(path: &str) -> anyhow::Result { - let path = PathBuf::from(shellexpand::tilde(&path).to_string()); - Ok(path) -} - -/// Creates a [`Tag`] from a file name (given as a [`Path`]). -fn tag_from_file_name(path: &Path) -> anyhow::Result { - match path.file_name() { - Some(name) => name - .to_os_string() - .into_string() - .map(|t| t.into()) - .map_err(|e| anyhow!("{e:?} contains invalid Unicode")), - None => bail!("the given `path` does not have a proper directory or file name"), - } -} - -/// Takes the `BlobsClient::add_from_path` and coordinates adding blobs to a -/// document via the hash of the blob. It also creates and powers the -/// `ImportProgressBar`. -#[tracing::instrument(skip_all)] -async fn import_coordinator( - doc: Doc, - author_id: AuthorId, - root: PathBuf, - prefix: String, - blob_add_progress: impl Stream> + Send + Unpin + 'static, - expected_size: u64, - expected_entries: u64, -) -> Result<()> { - let imp = ImportProgressBar::new( - &root.display().to_string(), - doc.id(), - expected_size, - expected_entries, - ); - let task_imp = imp.clone(); - - let collections = Rc::new(RefCell::new(BTreeMap::< - u64, - (String, u64, Option, u64), - >::new())); - - let doc2 = doc.clone(); - let imp2 = task_imp.clone(); - - let _stats: Vec<_> = blob_add_progress - .filter_map(|item| { - let item = match item.context("Error adding files") { - Err(e) => return Some(Err(e)), - Ok(item) => item, - }; - match item { - AddProgress::Found { name, id, size } => { - tracing::info!("Found({id},{name},{size})"); - imp.add_found(name.clone(), size); - collections.borrow_mut().insert(id, (name, size, None, 0)); - None - } - AddProgress::Progress { id, offset } => { - tracing::info!("Progress({id}, {offset})"); - if let Some((_, size, _, last_val)) = collections.borrow_mut().get_mut(&id) { - assert!(*last_val <= offset, "wtf"); - assert!(offset <= *size, "wtf2"); - imp.add_progress(offset - *last_val); - *last_val = offset; - } - None - } - AddProgress::Done { hash, id } => { - tracing::info!("Done({id},{hash:?})"); - match collections.borrow_mut().get_mut(&id) { - Some((path_str, size, ref mut h, last_val)) => { - imp.add_progress(*size - *last_val); - imp.import_found(path_str.clone()); - let path = PathBuf::from(path_str.clone()); - *h = Some(hash); - let key = - match path_to_key(path, Some(prefix.clone()), Some(root.clone())) { - Ok(k) => k.to_vec(), - Err(e) => { - tracing::info!( - "error getting key from {}, id {id}", - path_str - ); - return Some(Err(anyhow::anyhow!( - "Issue creating a key for entry {hash:?}: {e}" - ))); - } - }; - // send update to doc - tracing::info!( - "setting entry {} (id: {id}) to doc", - String::from_utf8(key.clone()).unwrap() - ); - Some(Ok((key, hash, *size))) - } - None => { - tracing::info!( - "error: got `AddProgress::Done` for unknown collection id {id}" - ); - Some(Err(anyhow::anyhow!( - "Received progress information on an unknown file." - ))) - } - } - } - AddProgress::AllDone { hash, .. } => { - imp.add_done(); - tracing::info!("AddProgress::AllDone({hash:?})"); - None - } - AddProgress::Abort(e) => { - tracing::info!("Error while adding data: {e}"); - Some(Err(anyhow::anyhow!("Error while adding files: {e}"))) - } - } - }) - .map(move |res| { - let doc = doc2.clone(); - let imp = imp2.clone(); - async move { - match res { - Ok((key, hash, size)) => { - let doc = doc.clone(); - doc.set_hash(author_id, key, hash, size).await?; - imp.import_progress(); - Ok(size) - } - Err(err) => Err(err), - } - } - }) - .buffered_unordered(128) - .try_collect() - .await?; - - task_imp.all_done(); - Ok(()) -} - -/// Progress bar for importing files. -#[derive(Debug, Clone)] -struct ImportProgressBar { - mp: MultiProgress, - import: ProgressBar, - add: ProgressBar, -} - -impl ImportProgressBar { - /// Creates a new import progress bar. - fn new(source: &str, doc_id: NamespaceId, expected_size: u64, expected_entries: u64) -> Self { - let mp = MultiProgress::new(); - let add = mp.add(ProgressBar::new(0)); - add.set_style(ProgressStyle::default_bar() - .template("{msg}\n{spinner:.green} [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({bytes_per_sec}, eta {eta})").unwrap() - .progress_chars("=>-")); - add.set_message(format!("Importing from {source}...")); - add.set_length(expected_size); - add.set_position(0); - add.enable_steady_tick(Duration::from_millis(500)); - - let doc_id = fmt_short(doc_id.to_bytes()); - let import = mp.add(ProgressBar::new(0)); - import.set_style(ProgressStyle::default_bar() - .template("{msg}\n{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} ({per_sec}, eta {eta})").unwrap() - .progress_chars("=>-")); - import.set_message(format!("Adding to doc {doc_id}...")); - import.set_length(expected_entries); - import.set_position(0); - import.enable_steady_tick(Duration::from_millis(500)); - - Self { mp, import, add } - } - - fn add_found(&self, _name: String, _size: u64) {} - - fn import_found(&self, _name: String) {} - - /// Marks having made some progress to the progress bar. - fn add_progress(&self, size: u64) { - self.add.inc(size); - } - - /// Marks having made one unit of progress on the import progress bar. - fn import_progress(&self) { - self.import.inc(1); - } - - /// Sets the `add` progress bar as completed. - fn add_done(&self) { - self.add.set_position(self.add.length().unwrap_or_default()); - } - - /// Sets the all progress bars as done. - fn all_done(self) { - self.mp.clear().ok(); - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[tokio::test] - async fn test_doc_import() -> Result<()> { - let temp_dir = tempfile::tempdir().context("tempdir")?; - - tokio::fs::create_dir_all(temp_dir.path()) - .await - .context("create dir all")?; - - let foobar = temp_dir.path().join("foobar"); - tokio::fs::write(foobar, "foobar") - .await - .context("write foobar")?; - let foo = temp_dir.path().join("foo"); - tokio::fs::write(foo, "foo").await.context("write foo")?; - - let data_dir = tempfile::tempdir()?; - - let node = crate::commands::start::start_node(data_dir.path(), None, None).await?; - let client = node.client(); - let doc = client.docs().create().await.context("doc create")?; - let author = client.authors().create().await.context("author create")?; - - // set up command, getting iroh node - let cli = ConsoleEnv::for_console(data_dir.path().to_owned(), &node) - .await - .context("ConsoleEnv")?; - let iroh = iroh::client::Iroh::connect_path(data_dir.path()) - .await - .context("rpc connect")?; - - let command = DocCommands::Import { - doc: Some(doc.id()), - author: Some(author), - prefix: None, - path: temp_dir.path().to_string_lossy().into(), - in_place: false, - no_prompt: true, - }; - - command.run(&iroh, &cli).await.context("DocCommands run")?; - - let keys: Vec<_> = doc - .get_many(Query::all()) - .await - .context("doc get many")? - .try_collect() - .await?; - assert_eq!(2, keys.len()); - - iroh.shutdown(false).await?; - Ok(()) - } -} diff --git a/iroh-cli/src/commands/rpc.rs b/iroh-cli/src/commands/rpc.rs index 9a39deeb56..646a54a38b 100644 --- a/iroh-cli/src/commands/rpc.rs +++ b/iroh-cli/src/commands/rpc.rs @@ -3,12 +3,12 @@ use anyhow::Result; use clap::Subcommand; use iroh::client::Iroh; +use iroh_docs::cli::ConsoleEnv; use super::{ authors::AuthorCommands, blobs::BlobCommands, docs::DocCommands, gossip::GossipCommands, net::NetCommands, tags::TagCommands, }; -use crate::config::ConsoleEnv; /// Commands to manage the iroh RPC. #[derive(Subcommand, Debug, Clone)] @@ -96,8 +96,8 @@ impl RpcCommands { match self { Self::Net { command } => command.run(iroh).await, Self::Blobs { command } => command.run(&iroh.blobs(), node_id().await?).await, - Self::Docs { command } => command.run(iroh, env).await, - Self::Authors { command } => command.run(iroh, env).await, + Self::Docs { command } => command.run(&iroh.docs(), &iroh.blobs(), env).await, + Self::Authors { command } => command.run(&iroh.authors(), env).await, Self::Tags { command } => command.run(&iroh.tags()).await, Self::Gossip { command } => command.run(iroh).await, Self::Stats => { diff --git a/iroh-cli/src/config.rs b/iroh-cli/src/config.rs index 297b30a0dd..6b8b27639a 100644 --- a/iroh-cli/src/config.rs +++ b/iroh-cli/src/config.rs @@ -9,19 +9,13 @@ use std::{ time::Duration, }; -use anyhow::{anyhow, bail, Context, Result}; +use anyhow::{anyhow, Result}; use iroh::{ - client::Iroh, - docs::{AuthorId, NamespaceId}, net::{RelayMap, RelayNode}, node::GcPolicy, }; -use parking_lot::RwLock; -use serde::{Deserialize, Serialize}; -use tracing::warn; +use serde::Deserialize; -const ENV_AUTHOR: &str = "IROH_AUTHOR"; -const ENV_DOC: &str = "IROH_DOC"; const ENV_CONFIG_DIR: &str = "IROH_CONFIG_DIR"; const ENV_FILE_RUST_LOG: &str = "IROH_FILE_RUST_LOG"; @@ -150,193 +144,6 @@ impl From for GcPolicy { } } -/// Environment for CLI and REPL -/// -/// This is cheaply cloneable and has interior mutability. If not running in the console -/// environment, [Self::set_doc] and [Self::set_author] will lead to an error, as changing the -/// environment is only supported within the console. -#[derive(Clone, Debug)] -pub(crate) struct ConsoleEnv(Arc>); - -#[derive(PartialEq, Eq, Debug, Deserialize, Serialize, Clone)] -struct ConsoleEnvInner { - /// Active author. Read from IROH_AUTHOR env variable. - /// For console also read from/persisted to a file (see [`ConsolePaths::DefaultAuthor`]) - /// Defaults to the node's default author if both are empty. - author: AuthorId, - /// Active doc. Read from IROH_DOC env variable. Not persisted. - doc: Option, - is_console: bool, - iroh_data_dir: PathBuf, -} - -impl ConsoleEnv { - /// Read from environment variables and the console config file. - pub(crate) async fn for_console(iroh_data_dir: PathBuf, iroh: &Iroh) -> Result { - let console_data_dir = ConsolePaths::root(&iroh_data_dir); - tokio::fs::create_dir_all(&console_data_dir) - .await - .with_context(|| { - format!( - "failed to create console data directory at `{}`", - console_data_dir.to_string_lossy() - ) - })?; - - Self::migrate_console_files_016_017(&iroh_data_dir).await?; - - let configured_author = Self::get_console_default_author(&iroh_data_dir)?; - let author = env_author(configured_author, iroh).await?; - let env = ConsoleEnvInner { - author, - doc: env_doc()?, - is_console: true, - iroh_data_dir, - }; - Ok(Self(Arc::new(RwLock::new(env)))) - } - - /// Read only from environment variables. - pub(crate) async fn for_cli(iroh_data_dir: PathBuf, iroh: &Iroh) -> Result { - let author = env_author(None, iroh).await?; - let env = ConsoleEnvInner { - author, - doc: env_doc()?, - is_console: false, - iroh_data_dir, - }; - Ok(Self(Arc::new(RwLock::new(env)))) - } - - fn get_console_default_author(iroh_data_root: &Path) -> anyhow::Result> { - let author_path = ConsolePaths::CurrentAuthor.with_iroh_data_dir(iroh_data_root); - if let Ok(s) = std::fs::read_to_string(&author_path) { - let author = AuthorId::from_str(&s).with_context(|| { - format!( - "Failed to parse author file at {}", - author_path.to_string_lossy() - ) - })?; - Ok(Some(author)) - } else { - Ok(None) - } - } - - /// True if running in a Iroh console session, false for a CLI command - pub(crate) fn is_console(&self) -> bool { - self.0.read().is_console - } - - /// Return the iroh data directory - pub(crate) fn iroh_data_dir(&self) -> PathBuf { - self.0.read().iroh_data_dir.clone() - } - - /// Set the active author. - /// - /// Will error if not running in the Iroh console. - /// Will persist to a file in the Iroh data dir otherwise. - pub(crate) fn set_author(&self, author: AuthorId) -> anyhow::Result<()> { - let author_path = ConsolePaths::CurrentAuthor.with_iroh_data_dir(self.iroh_data_dir()); - let mut inner = self.0.write(); - if !inner.is_console { - bail!("Switching the author is only supported within the Iroh console, not on the command line"); - } - inner.author = author; - std::fs::write(author_path, author.to_string().as_bytes())?; - Ok(()) - } - - /// Set the active document. - /// - /// Will error if not running in the Iroh console. - /// Will not persist, only valid for the current console session. - pub(crate) fn set_doc(&self, doc: NamespaceId) -> anyhow::Result<()> { - let mut inner = self.0.write(); - if !inner.is_console { - bail!("Switching the document is only supported within the Iroh console, not on the command line"); - } - inner.doc = Some(doc); - Ok(()) - } - - /// Get the active document. - pub(crate) fn doc(&self, arg: Option) -> anyhow::Result { - let inner = self.0.read(); - let doc_id = arg.or(inner.doc).ok_or_else(|| { - anyhow!( - "Missing document id. Set the active document with the `IROH_DOC` environment variable or the `-d` option.\n\ - In the console, you can also set the active document with `doc switch`." - ) - })?; - Ok(doc_id) - } - - /// Get the active author. - /// - /// This is either the node's default author, or in the console optionally the author manually - /// switched to. - pub(crate) fn author(&self) -> AuthorId { - let inner = self.0.read(); - inner.author - } - - pub(crate) async fn migrate_console_files_016_017(iroh_data_dir: &Path) -> Result<()> { - // In iroh up to 0.16, we stored console settings directly in the data directory. Starting - // from 0.17, they live in a subdirectory and have new paths. - let old_current_author = iroh_data_dir.join("default_author.pubkey"); - if old_current_author.is_file() { - if let Err(err) = tokio::fs::rename( - &old_current_author, - ConsolePaths::CurrentAuthor.with_iroh_data_dir(iroh_data_dir), - ) - .await - { - warn!(path=%old_current_author.to_string_lossy(), "failed to migrate the console's current author file: {err}"); - } - } - let old_history = iroh_data_dir.join("history"); - if old_history.is_file() { - if let Err(err) = tokio::fs::rename( - &old_history, - ConsolePaths::History.with_iroh_data_dir(iroh_data_dir), - ) - .await - { - warn!(path=%old_history.to_string_lossy(), "failed to migrate the console's history file: {err}"); - } - } - Ok(()) - } -} - -async fn env_author(from_config: Option, iroh: &Iroh) -> Result { - if let Some(author) = env::var(ENV_AUTHOR) - .ok() - .map(|s| { - s.parse() - .context("Failed to parse IROH_AUTHOR environment variable") - }) - .transpose()? - .or(from_config) - { - Ok(author) - } else { - iroh.authors().default().await - } -} - -fn env_doc() -> Result> { - env::var(ENV_DOC) - .ok() - .map(|s| { - s.parse() - .context("Failed to parse IROH_DOC environment variable") - }) - .transpose() -} - /// Parse [`ENV_FILE_RUST_LOG`] as [`tracing_subscriber::EnvFilter`]. Returns `None` if not /// present. fn env_file_rust_log() -> Option> { From cd189625af931b06fc756b19f4d2ae537f3331ce Mon Sep 17 00:00:00 2001 From: Ruediger Klaehn Date: Mon, 18 Nov 2024 12:44:56 +0200 Subject: [PATCH 3/8] Use git dep again --- Cargo.lock | 1 + Cargo.toml | 3 +-- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c6343b7539..b1f36301ec 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2870,6 +2870,7 @@ dependencies = [ [[package]] name = "iroh-docs" version = "0.28.0" +source = "git+https://github.com/n0-computer/iroh-docs?branch=cli#c54e186c476fefd9f075374e200f632a7b0ca1f0" dependencies = [ "anyhow", "async-channel", diff --git a/Cargo.toml b/Cargo.toml index 9ed150abc0..8271ed2669 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -56,6 +56,5 @@ iroh-test = { path = "./iroh-test" } iroh-router = { path = "./iroh-router" } iroh-gossip = { git = "https://github.com/n0-computer/iroh-gossip", branch = "main" } -# iroh-docs = { git = "https://github.com/n0-computer/iroh-docs", branch = "cli" } -iroh-docs = { path = "../iroh-docs" } +iroh-docs = { git = "https://github.com/n0-computer/iroh-docs", branch = "cli" } iroh-blobs = { git = "https://github.com/n0-computer/iroh-blobs", branch = "cli" } From a3cfb8a7775f573484c827ac71fb5acb33644220 Mon Sep 17 00:00:00 2001 From: Ruediger Klaehn Date: Mon, 18 Nov 2024 13:02:59 +0200 Subject: [PATCH 4/8] use cli logic from iroh-gossip crate --- Cargo.lock | 4 +- Cargo.toml | 2 +- iroh-cli/src/commands.rs | 2 +- iroh-cli/src/commands/gossip.rs | 128 -------------------------------- iroh-cli/src/commands/rpc.rs | 2 +- 5 files changed, 6 insertions(+), 132 deletions(-) delete mode 100644 iroh-cli/src/commands/gossip.rs diff --git a/Cargo.lock b/Cargo.lock index b1f36301ec..b57fccfeb6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2919,16 +2919,18 @@ dependencies = [ [[package]] name = "iroh-gossip" version = "0.28.1" -source = "git+https://github.com/n0-computer/iroh-gossip?branch=main#ba0f6b0f54a740d8eae7ee6683f4aa1d8d8c8eb2" +source = "git+https://github.com/n0-computer/iroh-gossip?branch=cli#16f35050fe47534052e79dcbca42da4212dc6256" dependencies = [ "anyhow", "async-channel", "bytes", + "clap", "derive_more", "ed25519-dalek", "futures-concurrency", "futures-lite 2.4.0", "futures-util", + "hex", "indexmap 2.6.0", "iroh-base", "iroh-blake3", diff --git a/Cargo.toml b/Cargo.toml index 8271ed2669..b7323b2bbd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -55,6 +55,6 @@ iroh-metrics = { path = "./iroh-metrics" } iroh-test = { path = "./iroh-test" } iroh-router = { path = "./iroh-router" } -iroh-gossip = { git = "https://github.com/n0-computer/iroh-gossip", branch = "main" } +iroh-gossip = { git = "https://github.com/n0-computer/iroh-gossip", branch = "cli" } iroh-docs = { git = "https://github.com/n0-computer/iroh-docs", branch = "cli" } iroh-blobs = { git = "https://github.com/n0-computer/iroh-blobs", branch = "cli" } diff --git a/iroh-cli/src/commands.rs b/iroh-cli/src/commands.rs index 2e3c231a52..2bbf6b95ab 100644 --- a/iroh-cli/src/commands.rs +++ b/iroh-cli/src/commands.rs @@ -14,12 +14,12 @@ use crate::config::NodeConfig; pub(crate) mod console; pub(crate) mod doctor; -pub(crate) mod gossip; pub(crate) mod net; pub(crate) mod rpc; pub(crate) mod start; pub use iroh_blobs::{cli as blobs, cli::tags}; pub use iroh_docs::{cli as docs, cli::authors}; +pub use iroh_gossip::cli as gossip; /// iroh is a tool for building distributed apps. /// diff --git a/iroh-cli/src/commands/gossip.rs b/iroh-cli/src/commands/gossip.rs deleted file mode 100644 index a5f8fb964d..0000000000 --- a/iroh-cli/src/commands/gossip.rs +++ /dev/null @@ -1,128 +0,0 @@ -//! Define the gossiping subcommands. - -use std::str::FromStr as _; - -use anyhow::{Context, Result}; -use bao_tree::blake3; -use clap::{ArgGroup, Subcommand}; -use futures_lite::StreamExt; -use futures_util::SinkExt; -use iroh::{client::Iroh, net::NodeId}; -use iroh_gossip::rpc::client::SubscribeOpts; -use tokio::io::AsyncBufReadExt; - -/// Commands to manage gossiping. -#[derive(Subcommand, Debug, Clone)] -#[allow(clippy::large_enum_variant)] -pub enum GossipCommands { - /// Subscribe to a gossip topic - #[command( - long_about = r#"Subscribe to a gossip topic - -Example usage: - - $ iroh gossip subscribe --topic test --start - -This will print the current node's id. Open another terminal -or another machine and you can join the same topic: - - # on another machine/terminal - $ iroh gossip subscribe --topic test --start - -Any lines entered in stdin will be sent to the given topic -and received messages will be printed to stdout line-by-line. - -The process waits for Ctrl+C to exit."#, - group( - ArgGroup::new("input") - .required(true) - .args(&["topic", "raw_topic"]) - ) - )] - Subscribe { - /// The topic to subscribe to. - /// - /// This will be hashed with BLAKE3 to get the actual topic ID. - #[clap(long)] - topic: Option, - /// The raw topic to subscribe to as hex. Needs to be 32 bytes, i.e. 64 hex characters. - #[clap(long)] - raw_topic: Option, - /// The set of nodes that are also part of the gossip swarm to bootstrap with. - /// - /// If empty, this will bootstrap a new swarm. Running the command will print - /// the node's `NodeId`, which can be used as the bootstrap argument in other nodes. - bootstrap: Vec, - /// If enabled, all gossip events will be printed, including neighbor up/down events. - #[clap(long, short)] - verbose: bool, - }, -} - -impl GossipCommands { - /// Runs the gossip command given the iroh client. - pub async fn run(self, iroh: &Iroh) -> Result<()> { - match self { - Self::Subscribe { - topic, - raw_topic, - bootstrap, - verbose, - } => { - let bootstrap = bootstrap - .into_iter() - .map(|node_id| NodeId::from_str(&node_id).map_err(|e| { - anyhow::anyhow!("Failed to parse bootstrap node id \"{node_id}\": {e}\nMust be a valid base32-encoded iroh node id.") - })) - .collect::>()?; - - let topic = match (topic, raw_topic) { - (Some(topic), None) => blake3::hash(topic.as_bytes()).into(), - (None, Some(raw_topic)) => { - let mut slice = [0; 32]; - hex::decode_to_slice(raw_topic, &mut slice) - .context("failed to decode raw topic")?; - slice.into() - } - _ => anyhow::bail!("either topic or raw_topic must be provided"), - }; - - let opts = SubscribeOpts { - bootstrap, - subscription_capacity: 1024, - }; - - let (mut sink, mut stream) = iroh.gossip().subscribe_with_opts(topic, opts).await?; - let mut input_lines = tokio::io::BufReader::new(tokio::io::stdin()).lines(); - loop { - tokio::select! { - line = input_lines.next_line() => { - let line = line.context("failed to read from stdin")?; - if let Some(line) = line { - sink.send(iroh_gossip::net::Command::Broadcast(line.into())).await?; - } else { - break; - } - } - res = stream.next() => { - let res = res.context("gossip stream ended")?.context("failed to read gossip stream")?; - match res { - iroh_gossip::net::Event::Gossip(event) => { - if verbose { - println!("{:?}", event); - } else if let iroh_gossip::net::GossipEvent::Received(iroh_gossip::net::Message { content, .. }) = event { - println!("{:?}", content); - } - } - iroh_gossip::net::Event::Lagged => { - anyhow::bail!("gossip stream lagged"); - } - }; - } - } - } - } - } - Ok(()) - } -} diff --git a/iroh-cli/src/commands/rpc.rs b/iroh-cli/src/commands/rpc.rs index 646a54a38b..c7c2782482 100644 --- a/iroh-cli/src/commands/rpc.rs +++ b/iroh-cli/src/commands/rpc.rs @@ -99,7 +99,7 @@ impl RpcCommands { Self::Docs { command } => command.run(&iroh.docs(), &iroh.blobs(), env).await, Self::Authors { command } => command.run(&iroh.authors(), env).await, Self::Tags { command } => command.run(&iroh.tags()).await, - Self::Gossip { command } => command.run(iroh).await, + Self::Gossip { command } => command.run(&iroh.gossip()).await, Self::Stats => { let stats = iroh.stats().await?; for (name, details) in stats.iter() { From aa68633a003ad0840143f274fa0963bfdc68a602 Mon Sep 17 00:00:00 2001 From: Ruediger Klaehn Date: Mon, 18 Nov 2024 15:25:25 +0200 Subject: [PATCH 5/8] no pub export it's a bin crate anyway... --- iroh-cli/src/commands.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/iroh-cli/src/commands.rs b/iroh-cli/src/commands.rs index 628baaf967..62ba66748f 100644 --- a/iroh-cli/src/commands.rs +++ b/iroh-cli/src/commands.rs @@ -19,7 +19,7 @@ pub(crate) mod gossip; pub(crate) mod net; pub(crate) mod rpc; pub(crate) mod start; -pub use iroh_blobs::{cli as blobs, cli::tags}; +pub(crate) use iroh_blobs::{cli as blobs, cli::tags}; /// iroh is a tool for building distributed apps. /// From 94624fd21bcfbdb33094e2bf8187879fc0d39df3 Mon Sep 17 00:00:00 2001 From: Ruediger Klaehn Date: Mon, 18 Nov 2024 15:39:58 +0200 Subject: [PATCH 6/8] use iroh-blobs/main now that cli is merged --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c75d1d9e9f..67f69b47d9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2705,7 +2705,7 @@ dependencies = [ [[package]] name = "iroh-blobs" version = "0.28.1" -source = "git+https://github.com/n0-computer/iroh-blobs?branch=cli#daad706ee0025e095e76c019c3f8d96d899a5bb3" +source = "git+https://github.com/n0-computer/iroh-blobs?branch=main#cdcb863d7c75cdd7258c8f5001154272df7460c0" dependencies = [ "anyhow", "async-channel", diff --git a/Cargo.toml b/Cargo.toml index 6ca23e89a0..2f995ba8f6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -57,4 +57,4 @@ iroh-router = { path = "./iroh-router" } iroh-gossip = { git = "https://github.com/n0-computer/iroh-gossip", branch = "main" } iroh-docs = { git = "https://github.com/n0-computer/iroh-docs", branch = "main" } -iroh-blobs = { git = "https://github.com/n0-computer/iroh-blobs", branch = "cli" } +iroh-blobs = { git = "https://github.com/n0-computer/iroh-blobs", branch = "main" } From c9d6d1a5236ec582d2676648b1c1001d3a943677 Mon Sep 17 00:00:00 2001 From: Ruediger Klaehn Date: Mon, 18 Nov 2024 17:07:18 +0200 Subject: [PATCH 7/8] require cli on iroh-gossip --- Cargo.lock | 2 +- iroh-cli/Cargo.toml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4b96ab97e7..804c13b25c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2919,7 +2919,7 @@ dependencies = [ [[package]] name = "iroh-gossip" version = "0.28.1" -source = "git+https://github.com/n0-computer/iroh-gossip?branch=cli#16f35050fe47534052e79dcbca42da4212dc6256" +source = "git+https://github.com/n0-computer/iroh-gossip?branch=main#7c90c3f351585e7a364bfbf3941d07592b20dec6" dependencies = [ "anyhow", "async-channel", diff --git a/iroh-cli/Cargo.toml b/iroh-cli/Cargo.toml index 6fd4208499..1127703c5b 100644 --- a/iroh-cli/Cargo.toml +++ b/iroh-cli/Cargo.toml @@ -41,10 +41,10 @@ hex = "0.4.3" human-time = "0.1.6" indicatif = { version = "0.17", features = ["tokio"] } iroh = { version = "0.28.1", path = "../iroh", features = ["metrics"] } -iroh-gossip = "0.28.1" -iroh-docs = { version = "0.28.0", features = ["rpc", "cli"]} -iroh-metrics = { version = "0.28.0" } iroh-blobs = { version = "0.28.1", features = ["cli"] } +iroh-gossip = { version = "0.28.1", features = ["cli"] } +iroh-docs = { version = "0.28.0", features = ["cli"] } +iroh-metrics = { version = "0.28.0" } parking_lot = "0.12.1" pkarr = { version = "2.2.0", default-features = false } portable-atomic = "1" From d245c20e9fb4b1d796e51bd3d2020b2ed03788e3 Mon Sep 17 00:00:00 2001 From: Ruediger Klaehn Date: Mon, 18 Nov 2024 18:41:40 +0200 Subject: [PATCH 8/8] fix cargo.toml --- iroh-cli/Cargo.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/iroh-cli/Cargo.toml b/iroh-cli/Cargo.toml index b56b4ad154..b1d797440d 100644 --- a/iroh-cli/Cargo.toml +++ b/iroh-cli/Cargo.toml @@ -42,10 +42,9 @@ human-time = "0.1.6" indicatif = { version = "0.17", features = ["tokio"] } iroh = { version = "0.28.1", path = "../iroh", features = ["metrics"] } iroh-blobs = { version = "0.28.1", features = ["cli"] } -iroh-gossip = { version = "0.28.1", features = ["cli"] } iroh-docs = { version = "0.28.0", features = ["cli"] } +iroh-gossip = { version = "0.28.1", features = ["cli"] } iroh-metrics = { version = "0.28.0" } -iroh-blobs = { version = "0.28.1", features = ["cli"] } parking_lot = "0.12.1" pkarr = { version = "2.2.0", default-features = false } portable-atomic = "1"