From efa24eb044ee428b9196e54189f6a82132a31284 Mon Sep 17 00:00:00 2001 From: Frando Date: Tue, 4 Nov 2025 12:15:07 +0100 Subject: [PATCH 01/12] update --- Cargo.lock | 94 ++++++++++++++++++++++++++++++++---------------------- Cargo.toml | 8 ++++- 2 files changed, 63 insertions(+), 39 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 76a37d68..4cd2b076 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1607,8 +1607,7 @@ dependencies = [ [[package]] name = "iroh" version = "0.94.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9428cef1eafd2eac584269986d1949e693877ac12065b401dfde69f664b07ac" +source = "git+https://github.com/n0-computer/iroh.git?branch=main#30c23e8dbaa02d17ab57ba41f0aa5271b0a411dc" dependencies = [ "aead", "backon", @@ -1630,10 +1629,9 @@ dependencies = [ "iroh-quinn-proto", "iroh-quinn-udp", "iroh-relay", + "n0-error", "n0-future", - "n0-snafu", "n0-watcher", - "nested_enum_utils", "netdev", "netwatch", "pin-project", @@ -1648,7 +1646,6 @@ dependencies = [ "rustls-webpki", "serde", "smallvec", - "snafu", "strum", "swarm-discovery", "time", @@ -1664,19 +1661,16 @@ dependencies = [ [[package]] name = "iroh-base" -version = "0.94.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db942f6f3d6fa9b475690c6e8e6684d60591dd886bf1bdfef4c60d89d502215c" +version = "0.94.1" +source = "git+https://github.com/n0-computer/iroh.git?branch=main#30c23e8dbaa02d17ab57ba41f0aa5271b0a411dc" dependencies = [ "curve25519-dalek", "data-encoding", "derive_more 2.0.1", "ed25519-dalek", - "n0-snafu", - "nested_enum_utils", + "n0-error", "rand_core 0.9.3", "serde", - "snafu", "url", "zeroize", "zeroize_derive", @@ -1750,24 +1744,24 @@ dependencies = [ [[package]] name = "iroh-metrics" -version = "0.36.1" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "090161e84532a0cb78ab13e70abb882b769ec67cf5a2d2dcea39bd002e1f7172" +checksum = "79e3381da7c93c12d353230c74bba26131d1c8bf3a4d8af0fec041546454582e" dependencies = [ "iroh-metrics-derive", "itoa", + "n0-error", "postcard", "ryu", "serde", - "snafu", "tracing", ] [[package]] name = "iroh-metrics-derive" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a39de3779d200dadde3a27b9fbdb34389a2af1b85ea445afca47bf4d7672573" +checksum = "d4e12bd0763fd16062f5cc5e8db15dd52d26e75a8af4c7fb57ccee3589b344b8" dependencies = [ "heck", "proc-macro2", @@ -1833,8 +1827,7 @@ dependencies = [ [[package]] name = "iroh-relay" version = "0.94.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "360e201ab1803201de9a125dd838f7a4d13e6ba3a79aeb46c7fbf023266c062e" +source = "git+https://github.com/n0-computer/iroh.git?branch=main#30c23e8dbaa02d17ab57ba41f0aa5271b0a411dc" dependencies = [ "blake3", "bytes", @@ -1852,9 +1845,8 @@ dependencies = [ "iroh-quinn", "iroh-quinn-proto", "lru 0.16.1", + "n0-error", "n0-future", - "n0-snafu", - "nested_enum_utils", "num_enum", "pin-project", "pkarr", @@ -1866,7 +1858,6 @@ dependencies = [ "serde", "serde_bytes", "sha1", - "snafu", "strum", "tokio", "tokio-rustls", @@ -1910,21 +1901,19 @@ dependencies = [ [[package]] name = "irpc" version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52cf44fdb253f2a3e22e5ecfa8efa466929f8b7cdd4fc0f958f655406e8cdab6" +source = "git+https://github.com/n0-computer/irpc.git?branch=matheus23%2Firpc-n0-error#d3df5515741f14aa68191bccbb4b5f614dfc6aa9" dependencies = [ - "anyhow", "futures-buffered", "futures-util", "iroh-quinn", "irpc-derive", + "n0-error", "n0-future", "postcard", "rcgen", "rustls", "serde", "smallvec", - "thiserror 2.0.12", "tokio", "tokio-util", "tracing", @@ -1933,8 +1922,7 @@ dependencies = [ [[package]] name = "irpc-derive" version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "969df6effc474e714fb7e738eb9859aa22f40dc2280cadeab245817075c7f273" +source = "git+https://github.com/n0-computer/irpc.git?branch=matheus23%2Firpc-n0-error#d3df5515741f14aa68191bccbb4b5f614dfc6aa9" dependencies = [ "proc-macro2", "quote", @@ -2138,6 +2126,27 @@ dependencies = [ "uuid", ] +[[package]] +name = "n0-error" +version = "0.1.0" +source = "git+https://github.com/n0-computer/n0-error.git?branch=Frando%2Fanyhow-ext#1a9790b89101cd91dcebad9da28c215e991f2947" +dependencies = [ + "derive_more 2.0.1", + "n0-error-macros", + "spez", +] + +[[package]] +name = "n0-error-macros" +version = "0.1.0" +source = "git+https://github.com/n0-computer/n0-error.git?branch=Frando%2Fanyhow-ext#1a9790b89101cd91dcebad9da28c215e991f2947" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.104", +] + [[package]] name = "n0-future" version = "0.3.0" @@ -2174,13 +2183,13 @@ dependencies = [ [[package]] name = "n0-watcher" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34c65e127e06e5a2781b28df6a33ea474a7bddc0ac0cfea888bd20c79a1b6516" +checksum = "38acf13c1ddafc60eb7316d52213467f8ccb70b6f02b65e7d97f7799b1f50be4" dependencies = [ "derive_more 2.0.1", + "n0-error", "n0-future", - "snafu", ] [[package]] @@ -2262,9 +2271,9 @@ dependencies = [ [[package]] name = "netwatch" -version = "0.11.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98d7ec7abdbfe67ee70af3f2002326491178419caea22254b9070e6ff0c83491" +checksum = "26f2acd376ef48b6c326abf3ba23c449e0cb8aa5c2511d189dd8a8a3bfac889b" dependencies = [ "atomic-waker", "bytes", @@ -2273,9 +2282,9 @@ dependencies = [ "iroh-quinn-udp", "js-sys", "libc", + "n0-error", "n0-future", "n0-watcher", - "nested_enum_utils", "netdev", "netlink-packet-core", "netlink-packet-route", @@ -2283,7 +2292,6 @@ dependencies = [ "netlink-sys", "pin-project-lite", "serde", - "snafu", "socket2 0.6.0", "time", "tokio", @@ -2548,9 +2556,9 @@ checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" [[package]] name = "portmapper" -version = "0.11.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d73aa9bd141e0ff6060fea89a5437883f3b9ceea1cda71c790b90e17d072a3b3" +checksum = "7b575f975dcf03e258b0c7ab3f81497d7124f508884c37da66a7314aa2a8d467" dependencies = [ "base64", "bytes", @@ -2561,13 +2569,12 @@ dependencies = [ "igd-next", "iroh-metrics", "libc", - "nested_enum_utils", + "n0-error", "netwatch", "num_enum", "rand 0.9.2", "serde", "smallvec", - "snafu", "socket2 0.6.0", "time", "tokio", @@ -3436,6 +3443,17 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "spez" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c87e960f4dca2788eeb86bbdde8dd246be8948790b7618d656e68f9b720a86e8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.104", +] + [[package]] name = "spin" version = "0.9.8" diff --git a/Cargo.toml b/Cargo.toml index ddeb8594..bed3a387 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -42,7 +42,7 @@ genawaiter = { version = "0.99.1", features = ["futures03"] } iroh-base = "0.94" iroh-tickets = "0.1" irpc = { version = "0.10.0", features = ["rpc", "quinn_endpoint_setup", "spans", "stream", "derive"], default-features = false } -iroh-metrics = { version = "0.36" } +iroh-metrics = { version = "0.37" } redb = { version = "2.6.3", optional = true } reflink-copy = { version = "0.1.24", optional = true } @@ -69,3 +69,9 @@ hide-proto-docs = [] metrics = [] default = ["hide-proto-docs", "fs-store"] fs-store = ["dep:redb", "dep:reflink-copy"] + +[patch.crates-io] +iroh = { git = "https://github.com/n0-computer/iroh.git", branch = "main" } +iroh-base = { git = "https://github.com/n0-computer/iroh.git", branch = "main" } +irpc = { git = "https://github.com/n0-computer/irpc.git", branch = "matheus23/irpc-n0-error" } +n0-error = { git = "https://github.com/n0-computer/n0-error.git", branch = "Frando/anyhow-ext" } From 8e0adef809d372fdcdbff7715a6fc2c8ad2fee70 Mon Sep 17 00:00:00 2001 From: Frando Date: Tue, 4 Nov 2025 14:42:43 +0100 Subject: [PATCH 02/12] refactor: chatgpt ports to n0-error --- Cargo.lock | 1 + Cargo.toml | 1 + src/api.rs | 134 +++++++++++------------------------- src/api/remote.rs | 104 ++++++++++++++-------------- src/get.rs | 5 +- src/get/error.rs | 59 +++++----------- src/get/request.rs | 11 ++- src/hash.rs | 20 ++---- src/protocol.rs | 18 ++--- src/provider.rs | 58 ++++++++-------- src/provider/events.rs | 46 ++++++------- src/store/fs.rs | 6 +- src/store/fs/bao_file.rs | 2 +- src/store/mem.rs | 2 +- src/store/readonly_mem.rs | 2 +- src/util.rs | 2 +- src/util/connection_pool.rs | 50 +++++++------- 17 files changed, 211 insertions(+), 310 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4cd2b076..8283597c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1702,6 +1702,7 @@ dependencies = [ "iroh-test", "iroh-tickets", "irpc", + "n0-error", "n0-future", "n0-snafu", "nested_enum_utils", diff --git a/Cargo.toml b/Cargo.toml index bed3a387..e06fda6e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -45,6 +45,7 @@ irpc = { version = "0.10.0", features = ["rpc", "quinn_endpoint_setup", "spans", iroh-metrics = { version = "0.37" } redb = { version = "2.6.3", optional = true } reflink-copy = { version = "0.1.24", optional = true } +n0-error = "0.1.0" [dev-dependencies] clap = { version = "4.5.31", features = ["derive"] } diff --git a/src/api.rs b/src/api.rs index 3abb13bd..813665d6 100644 --- a/src/api.rs +++ b/src/api.rs @@ -17,12 +17,12 @@ use std::{io, net::SocketAddr, ops::Deref}; use bao_tree::io::EncodeError; use iroh::Endpoint; use irpc::rpc::{listen, RemoteService}; -use n0_snafu::SpanTrace; +use n0_error::e; +use n0_error::stack_error; use nested_enum_utils::common_fields; use proto::{Request, ShutdownRequest, SyncDbRequest}; use ref_cast::RefCast; use serde::{Deserialize, Serialize}; -use snafu::{Backtrace, IntoError, Snafu}; use tags::Tags; pub mod blobs; @@ -35,75 +35,71 @@ pub use crate::{store::util::Tag, util::temp_tag::TempTag}; pub(crate) type ApiClient = irpc::Client; -#[common_fields({ - backtrace: Option, - #[snafu(implicit)] - span_trace: SpanTrace, -})] #[allow(missing_docs)] #[non_exhaustive] -#[derive(Debug, Snafu)] +#[stack_error(derive, add_meta)] pub enum RequestError { /// Request failed due to rpc error. - #[snafu(display("rpc error: {source}"))] + #[error("rpc error: {source}")] Rpc { source: irpc::Error }, /// Request failed due an actual error. - #[snafu(display("inner error: {source}"))] - Inner { source: Error }, + #[error("inner error: {source}")] + Inner { #[error(std_err)] source: Error }, } impl From for RequestError { fn from(value: irpc::Error) -> Self { - RpcSnafu.into_error(value) + e!(RequestError::Rpc, value) } } impl From for RequestError { fn from(value: Error) -> Self { - InnerSnafu.into_error(value) + e!(RequestError::Inner, value) } } impl From for RequestError { fn from(value: io::Error) -> Self { - InnerSnafu.into_error(value.into()) + e!(RequestError::Inner, value.into()) } } impl From for RequestError { fn from(value: irpc::channel::mpsc::RecvError) -> Self { - RpcSnafu.into_error(value.into()) + e!(RequestError::Rpc, value.into()) } } pub type RequestResult = std::result::Result; -#[common_fields({ - backtrace: Option, - #[snafu(implicit)] - span_trace: SpanTrace, -})] #[allow(missing_docs)] #[non_exhaustive] -#[derive(Debug, Snafu)] +#[stack_error(derive, add_meta, from_sources)] pub enum ExportBaoError { - #[snafu(display("send error: {source}"))] + #[error("send error")] Send { source: irpc::channel::SendError }, - #[snafu(display("mpsc recv error: {source}"))] + #[error("mpsc recv error")] MpscRecv { source: irpc::channel::mpsc::RecvError, }, - #[snafu(display("oneshot recv error: {source}"))] + #[error("oneshot recv error")] OneshotRecv { source: irpc::channel::oneshot::RecvError, }, - #[snafu(display("request error: {source}"))] + #[error("request error")] Request { source: irpc::RequestError }, - #[snafu(display("io error: {source}"))] - ExportBaoIo { source: io::Error }, - #[snafu(display("encode error: {source}"))] - ExportBaoInner { source: bao_tree::io::EncodeError }, - #[snafu(display("client error: {source}"))] + #[error("io error")] + ExportBaoIo { + #[error(std_err)] + source: io::Error, + }, + #[error("encode error")] + ExportBaoInner { + #[error(std_err)] + source: bao_tree::io::EncodeError, + }, + #[error("client error")] ClientError { source: ProgressError }, } @@ -124,57 +120,15 @@ impl From for Error { impl From for ExportBaoError { fn from(e: irpc::Error) -> Self { match e { - irpc::Error::MpscRecv(e) => MpscRecvSnafu.into_error(e), - irpc::Error::OneshotRecv(e) => OneshotRecvSnafu.into_error(e), - irpc::Error::Send(e) => SendSnafu.into_error(e), - irpc::Error::Request(e) => RequestSnafu.into_error(e), - irpc::Error::Write(e) => ExportBaoIoSnafu.into_error(e.into()), + irpc::Error::MpscRecv { source: e, .. } => e!(ExportBaoError::MpscRecv, e), + irpc::Error::OneshotRecv { source: e, .. } => e!(ExportBaoError::OneshotRecv, e), + irpc::Error::Send { source: e, .. } => e!(ExportBaoError::Send, e), + irpc::Error::Request { source: e, .. } => e!(ExportBaoError::Request, e), + irpc::Error::Write { source: e, .. } => e!(ExportBaoError::ExportBaoIo, e.into()), } } } -impl From for ExportBaoError { - fn from(value: io::Error) -> Self { - ExportBaoIoSnafu.into_error(value) - } -} - -impl From for ExportBaoError { - fn from(value: irpc::channel::mpsc::RecvError) -> Self { - MpscRecvSnafu.into_error(value) - } -} - -impl From for ExportBaoError { - fn from(value: irpc::channel::oneshot::RecvError) -> Self { - OneshotRecvSnafu.into_error(value) - } -} - -impl From for ExportBaoError { - fn from(value: irpc::channel::SendError) -> Self { - SendSnafu.into_error(value) - } -} - -impl From for ExportBaoError { - fn from(value: irpc::RequestError) -> Self { - RequestSnafu.into_error(value) - } -} - -impl From for ExportBaoError { - fn from(value: bao_tree::io::EncodeError) -> Self { - ExportBaoInnerSnafu.into_error(value) - } -} - -impl From for ExportBaoError { - fn from(value: ProgressError) -> Self { - ClientSnafu.into_error(value) - } -} - pub type ExportBaoResult = std::result::Result; #[derive(Debug, derive_more::Display, derive_more::From, Serialize, Deserialize)] @@ -196,13 +150,11 @@ impl Error { E: Into>, { Self::Io(io::Error::other(msg.into())) - } +} } impl From for Error { - fn from(e: irpc::Error) -> Self { - Self::Io(e.into()) - } + fn from(e: irpc::Error) -> Self { Self::Io(e.into()) } } impl From for Error { @@ -215,27 +167,19 @@ impl From for Error { } impl From for Error { - fn from(e: irpc::channel::mpsc::RecvError) -> Self { - Self::Io(e.into()) - } + fn from(e: irpc::channel::mpsc::RecvError) -> Self { Self::Io(e.into()) } } impl From for Error { - fn from(e: irpc::rpc::WriteError) -> Self { - Self::Io(e.into()) - } + fn from(e: irpc::rpc::WriteError) -> Self { Self::Io(e.into()) } } impl From for Error { - fn from(e: irpc::RequestError) -> Self { - Self::Io(e.into()) - } + fn from(e: irpc::RequestError) -> Self { Self::Io(e.into()) } } impl From for Error { - fn from(e: irpc::channel::SendError) -> Self { - Self::Io(e.into()) - } + fn from(e: irpc::channel::SendError) -> Self { Self::Io(e.into()) } } impl std::error::Error for Error { @@ -255,6 +199,10 @@ impl From for Error { } } +impl From for Error { + fn from(e: io::Error) -> Self { Self::Io(e) } +} + pub type Result = std::result::Result; /// The main entry point for the store API. diff --git a/src/api/remote.rs b/src/api/remote.rs index a71b5c00..fb34af00 100644 --- a/src/api/remote.rs +++ b/src/api/remote.rs @@ -16,10 +16,10 @@ use genawaiter::sync::{Co, Gen}; use iroh::endpoint::Connection; use irpc::util::{AsyncReadVarintExt, WriteVarintExt}; use n0_future::{io, Stream, StreamExt}; -use n0_snafu::SpanTrace; +use n0_error::stack_error; use nested_enum_utils::common_fields; use ref_cast::RefCast; -use snafu::{Backtrace, IntoError, ResultExt, Snafu}; +use n0_error::{e, StdResultExt}; use tracing::{debug, trace}; use super::blobs::{Bitfield, ExportBaoOptions}; @@ -34,7 +34,6 @@ use crate::{ AtBlobHeader, AtConnected, AtEndBlob, BlobContentNext, ConnectedNext, DecodeError, EndBlobNext, }, - get_error::{BadRequestSnafu, LocalFailureSnafu}, GetError, GetResult, Stats, StreamPair, }, hashseq::{HashSeq, HashSeqIter}, @@ -124,7 +123,7 @@ impl GetProgress { pub async fn complete(self) -> GetResult { just_result(self.stream()).await.unwrap_or_else(|| { - Err(LocalFailureSnafu.into_error(anyhow::anyhow!("stream closed without result"))) + Err(e!(GetError::LocalFailure, anyhow::anyhow!("stream closed without result"))) }) } } @@ -536,7 +535,7 @@ impl Remote { let local = self .local(content) .await - .map_err(|e: anyhow::Error| LocalFailureSnafu.into_error(e))?; + .map_err(|e: anyhow::Error| e!(GetError::LocalFailure, e))?; if local.is_complete() { return Ok(Default::default()); } @@ -681,7 +680,7 @@ impl Remote { let store = self.store(); let root = request.hash; let conn = conn.open_stream_pair().await.map_err(|e| { - LocalFailureSnafu.into_error(anyhow::anyhow!("failed to open stream pair: {e}")) + e!(GetError::LocalFailure, anyhow::anyhow!("failed to open stream pair: {e}")) })?; // I am cloning the connection, but it's fine because the original connection or ConnectionRef stays alive // for the duration of the operation. @@ -689,7 +688,11 @@ impl Remote { AtConnected::new(conn.t0, conn.recv, conn.send, request, Default::default()); trace!("Getting header"); // read the header - let next_child = match connected.next().await? { + let next_child = match connected + .next() + .await + .map_err(|e| e!(GetError::ConnectedNext, e))? + { ConnectedNext::StartRoot(at_start_root) => { let header = at_start_root.next(); let end = get_blob_ranges_impl(header, root, store, &mut progress).await?; @@ -709,9 +712,9 @@ impl Remote { store .get_bytes(root) .await - .map_err(|e| LocalFailureSnafu.into_error(e.into()))?, + .map_err(|e| e!(GetError::LocalFailure, e.into()))?, ) - .context(BadRequestSnafu)?; + .map_err(|e| e!(GetError::BadRequest, e.into()))?; // let mut hash_seq = LazyHashSeq::new(store.blobs().clone(), root); loop { let at_start_child = match next_child { @@ -734,7 +737,10 @@ impl Remote { Err(at_closing) => at_closing, }; // read the rest, if any - let stats = at_closing.next().await?; + let stats = at_closing + .next() + .await + .map_err(|e| e!(GetError::AtClosingNext, e))?; trace!(?stats, "get hash seq done"); Ok(stats) } @@ -796,53 +802,41 @@ impl Remote { Err(at_closing) => at_closing, }; // read the rest, if any - let stats = at_closing.next().await?; + let stats = at_closing + .next() + .await + .map_err(|e| e!(GetError::AtClosingNext, e))?; trace!(?stats, "get hash seq done"); Ok(stats) } } /// Failures for a get operation -#[common_fields({ - backtrace: Option, - #[snafu(implicit)] - span_trace: SpanTrace, -})] #[allow(missing_docs)] #[non_exhaustive] -#[derive(Debug, Snafu)] +#[stack_error(derive, add_meta)] pub enum ExecuteError { /// Network or IO operation failed. - #[snafu(display("Unable to open bidi stream"))] - Connection { - source: iroh::endpoint::ConnectionError, - }, - #[snafu(display("Unable to read from the remote"))] + #[error("Unable to open bidi stream")] + Connection { source: iroh::endpoint::ConnectionError }, + #[error("Unable to read from the remote")] Read { source: iroh::endpoint::ReadError }, - #[snafu(display("Error sending the request"))] - Send { - source: crate::get::fsm::ConnectedNextError, - }, - #[snafu(display("Unable to read size"))] - Size { - source: crate::get::fsm::AtBlobHeaderNextError, - }, - #[snafu(display("Error while decoding the data"))] - Decode { - source: crate::get::fsm::DecodeError, - }, - #[snafu(display("Internal error while reading the hash sequence"))] + #[error("Error sending the request")] + Send { source: crate::get::fsm::ConnectedNextError }, + #[error("Unable to read size")] + Size { source: crate::get::fsm::AtBlobHeaderNextError }, + #[error("Error while decoding the data")] + Decode { source: crate::get::fsm::DecodeError }, + #[error("Internal error while reading the hash sequence")] ExportBao { source: api::ExportBaoError }, - #[snafu(display("Hash sequence has an invalid length"))] - InvalidHashSeq { source: anyhow::Error }, - #[snafu(display("Internal error importing the data"))] + #[error("Hash sequence has an invalid length")] + InvalidHashSeq { #[error(std_err)] source: anyhow::Error }, + #[error("Internal error importing the data")] ImportBao { source: crate::api::RequestError }, - #[snafu(display("Error sending download progress - receiver closed"))] + #[error("Error sending download progress - receiver closed")] SendDownloadProgress { source: irpc::channel::SendError }, - #[snafu(display("Internal error importing the data"))] - MpscSend { - source: tokio::sync::mpsc::error::SendError, - }, + #[error("Internal error importing the data")] + MpscSend { #[error(std_err)] source: tokio::sync::mpsc::error::SendError }, } pub trait GetStreamPair: Send + 'static { @@ -877,13 +871,16 @@ async fn get_blob_ranges_impl( store: &Store, mut progress: impl Sink, ) -> GetResult> { - let (mut content, size) = header.next().await?; + let (mut content, size) = header + .next() + .await + .map_err(|e| e!(GetError::AtBlobHeaderNext, e))?; let Some(size) = NonZeroU64::new(size) else { return if hash == Hash::EMPTY { - let end = content.drain().await?; + let end = content.drain().await.map_err(|e| e!(GetError::Decode, e))?; Ok(end) } else { - Err(DecodeError::leaf_hash_mismatch(ChunkNum(0)).into()) + Err(e!(GetError::Decode, DecodeError::leaf_hash_mismatch(ChunkNum(0)))) }; }; let buffer_size = get_buffer_size(size); @@ -891,17 +888,20 @@ async fn get_blob_ranges_impl( let handle = store .import_bao(hash, size, buffer_size) .await - .map_err(|e| LocalFailureSnafu.into_error(e.into()))?; + .map_err(|e| e!(GetError::LocalFailure, e.into()))?; let write = async move { GetResult::Ok(loop { match content.next().await { BlobContentNext::More((next, res)) => { - let item = res?; + let item = res.map_err(|e| e!(GetError::Decode, e))?; progress .send(next.stats().payload_bytes_read) .await - .map_err(|e| LocalFailureSnafu.into_error(e.into()))?; - handle.tx.send(item).await?; + .map_err(|e| e!(GetError::LocalFailure, e.into()))?; + handle.tx + .send(item) + .await + .map_err(|e| e!(GetError::IrpcSend, e))?; content = next; } BlobContentNext::Done(end) => { @@ -913,7 +913,7 @@ async fn get_blob_ranges_impl( }; let complete = async move { handle.rx.await.map_err(|e| { - LocalFailureSnafu.into_error(anyhow::anyhow!("error reading from import stream: {e}")) + e!(GetError::LocalFailure, anyhow::anyhow!("error reading from import stream: {e}")) }) }; let (_, end) = tokio::try_join!(complete, write)?; @@ -1053,7 +1053,7 @@ where self.sender .send(self.payload_bytes_sent) .await - .map_err(|e| ProgressError::Internal { source: e.into() })?; + .map_err(|e| n0_error::e!(ProgressError::Internal, e.into()))?; Ok(()) } diff --git a/src/get.rs b/src/get.rs index 15f40ea1..d46ca899 100644 --- a/src/get.rs +++ b/src/get.rs @@ -39,7 +39,6 @@ use crate::{ mod error; pub mod request; -pub(crate) use error::get_error; pub use error::{GetError, GetResult}; type DefaultReader = iroh::endpoint::RecvStream; @@ -121,7 +120,7 @@ pub mod fsm { use super::*; use crate::{ - get::get_error::BadRequestSnafu, + protocol::{ GetManyRequest, GetRequest, NonEmptyRequestRangeSpecIter, Request, MAX_MESSAGE_SIZE, }, @@ -158,7 +157,7 @@ pub mod fsm { .map_err(|e| OpenSnafu.into_error(e.into()))?; let request = Request::GetMany(request); let request_bytes = postcard::to_stdvec(&request) - .map_err(|source| BadRequestSnafu.into_error(source.into()))?; + .map_err(|source| n0_error::e!(GetError::BadRequest, source.into()))?; writer .send_bytes(request_bytes.into()) .await diff --git a/src/get/error.rs b/src/get/error.rs index 5cc44e35..c60b7e0c 100644 --- a/src/get/error.rs +++ b/src/get/error.rs @@ -2,54 +2,33 @@ use std::io; use iroh::endpoint::{ConnectionError, ReadError, VarInt, WriteError}; -use n0_snafu::SpanTrace; +use n0_error::stack_error; use nested_enum_utils::common_fields; -use snafu::{Backtrace, Snafu}; +use n0_error::e; use crate::get::fsm::{ AtBlobHeaderNextError, AtClosingNextError, ConnectedNextError, DecodeError, InitialNextError, }; /// Failures for a get operation -#[common_fields({ - backtrace: Option, - #[snafu(implicit)] - span_trace: SpanTrace, -})] -#[derive(Debug, Snafu)] -#[snafu(visibility(pub(crate)))] -#[snafu(module)] +#[stack_error(derive, add_meta)] pub enum GetError { - #[snafu(transparent)] - InitialNext { - source: InitialNextError, - }, - #[snafu(transparent)] - ConnectedNext { - source: ConnectedNextError, - }, - #[snafu(transparent)] - AtBlobHeaderNext { - source: AtBlobHeaderNextError, - }, - #[snafu(transparent)] - Decode { - source: DecodeError, - }, - #[snafu(transparent)] - IrpcSend { - source: irpc::channel::SendError, - }, - #[snafu(transparent)] - AtClosingNext { - source: AtClosingNextError, - }, - LocalFailure { - source: anyhow::Error, - }, - BadRequest { - source: anyhow::Error, - }, + #[error(transparent)] + InitialNext { #[error(std_err)] source: InitialNextError }, + #[error(transparent)] + ConnectedNext { #[error(std_err)] source: ConnectedNextError }, + #[error(transparent)] + AtBlobHeaderNext { #[error(std_err)] source: AtBlobHeaderNextError }, + #[error(transparent)] + Decode { #[error(std_err)] source: DecodeError }, + #[error(transparent)] + IrpcSend { #[error(std_err)] source: irpc::channel::SendError }, + #[error(transparent)] + AtClosingNext { #[error(std_err)] source: AtClosingNextError }, + #[error("local failure: {source}")] + LocalFailure { #[error(std_err)] source: anyhow::Error }, + #[error("bad request: {source}")] + BadRequest { #[error(std_err)] source: anyhow::Error }, } impl GetError { diff --git a/src/get/request.rs b/src/get/request.rs index e55235cc..f607c4ca 100644 --- a/src/get/request.rs +++ b/src/get/request.rs @@ -20,12 +20,11 @@ use iroh::endpoint::Connection; use n0_future::{Stream, StreamExt}; use nested_enum_utils::enum_conversions; use rand::Rng; -use snafu::IntoError; +use n0_error::e; use tokio::sync::mpsc; use super::{fsm, GetError, GetResult, Stats}; use crate::{ - get::get_error::{BadRequestSnafu, LocalFailureSnafu}, hashseq::HashSeq, protocol::{ChunkRangesExt, ChunkRangesSeq, GetRequest}, Hash, HashAndFormat, @@ -58,7 +57,7 @@ impl GetBlobResult { let mut parts = Vec::new(); let stats = loop { let Some(item) = self.next().await else { - return Err(LocalFailureSnafu.into_error(anyhow::anyhow!("unexpected end"))); + return Err(e!(GetError::LocalFailure, anyhow::anyhow!("unexpected end"))); }; match item { GetBlobItem::Item(item) => { @@ -238,11 +237,11 @@ pub async fn get_hash_seq_and_sizes( let (at_blob_content, size) = at_start_root.next().await?; // check the size to avoid parsing a maliciously large hash seq if size > max_size { - return Err(BadRequestSnafu.into_error(anyhow::anyhow!("size too large"))); + return Err(e!(GetError::BadRequest, anyhow::anyhow!("size too large"))); } let (mut curr, hash_seq) = at_blob_content.concatenate_into_vec().await?; - let hash_seq = - HashSeq::try_from(Bytes::from(hash_seq)).map_err(|e| BadRequestSnafu.into_error(e))?; + let hash_seq = HashSeq::try_from(Bytes::from(hash_seq)) + .map_err(|e| e!(GetError::BadRequest, e.into()))?; let mut sizes = Vec::with_capacity(hash_seq.len()); let closing = loop { match curr.next() { diff --git a/src/hash.rs b/src/hash.rs index 22fe333d..3b9f5b47 100644 --- a/src/hash.rs +++ b/src/hash.rs @@ -4,11 +4,10 @@ use std::{borrow::Borrow, fmt, str::FromStr}; use arrayvec::ArrayString; use bao_tree::blake3; -use n0_snafu::SpanTrace; use nested_enum_utils::common_fields; use postcard::experimental::max_size::MaxSize; use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; -use snafu::{Backtrace, ResultExt, Snafu}; +use n0_error::{e, stack_error, StdResultExt}; use crate::store::util::DD; @@ -137,19 +136,14 @@ impl fmt::Display for Hash { } } -#[common_fields({ - backtrace: Option, - #[snafu(implicit)] - span_trace: SpanTrace, -})] #[allow(missing_docs)] #[non_exhaustive] -#[derive(Debug, Snafu)] +#[stack_error(derive, add_meta, std_sources)] pub enum HexOrBase32ParseError { - #[snafu(display("Invalid length"))] + #[error("Invalid length")] DecodeInvalidLength {}, - #[snafu(display("Failed to decode {source}"))] - Decode { source: data_encoding::DecodeError }, + #[error("Failed to decode {source}")] + Decode { #[error(std_err)] source: data_encoding::DecodeError }, } impl FromStr for Hash { @@ -167,10 +161,10 @@ impl FromStr for Hash { match res { Ok(len) => { if len != 32 { - return Err(DecodeInvalidLengthSnafu.build()); + return Err(e!(HexOrBase32ParseError::DecodeInvalidLength)); } } - Err(partial) => return Err(partial.error).context(DecodeSnafu), + Err(partial) => return Err(e!(HexOrBase32ParseError::Decode, partial.error)), } Ok(Self(blake3::Hash::from_bytes(bytes))) } diff --git a/src/protocol.rs b/src/protocol.rs index db5faf06..e32ffae8 100644 --- a/src/protocol.rs +++ b/src/protocol.rs @@ -388,7 +388,7 @@ use serde::{Deserialize, Serialize}; mod range_spec; pub use bao_tree::ChunkRanges; pub use range_spec::{ChunkRangesSeq, NonEmptyRequestRangeSpecIter, RangeSpec}; -use snafu::{GenerateImplicitData, Snafu}; +use n0_error::stack_error; use crate::{api::blobs::Bitfield, util::RecvStreamExt, BlobFormat, Hash, HashAndFormat}; @@ -703,20 +703,10 @@ impl From for VarInt { } /// Unknown error_code, can not be converted into [`Closed`]. -#[derive(Debug, Snafu)] -#[snafu(display("Unknown error_code: {code}"))] +#[stack_error(derive, add_meta)] +#[error("Unknown error_code: {code}")] pub struct UnknownErrorCode { code: u64, - backtrace: Option, -} - -impl UnknownErrorCode { - pub(crate) fn new(code: u64) -> Self { - Self { - code, - backtrace: GenerateImplicitData::generate(), - } - } } impl TryFrom for Closed { @@ -727,7 +717,7 @@ impl TryFrom for Closed { 0 => Ok(Self::StreamDropped), 1 => Ok(Self::ProviderTerminating), 2 => Ok(Self::RequestReceived), - val => Err(UnknownErrorCode::new(val)), + val => Err(n0_error::e!(UnknownErrorCode { code: val })), } } } diff --git a/src/provider.rs b/src/provider.rs index 39025401..1409cd90 100644 --- a/src/provider.rs +++ b/src/provider.rs @@ -17,7 +17,7 @@ use iroh_io::{AsyncStreamReader, AsyncStreamWriter}; use n0_future::StreamExt; use quinn::ConnectionError; use serde::{Deserialize, Serialize}; -use snafu::Snafu; +use n0_error::stack_error; use tokio::select; use tracing::{debug, debug_span, Instrument}; @@ -388,15 +388,14 @@ pub async fn handle_stream( Ok(()) } -#[derive(Debug, Snafu)] -#[snafu(module)] +#[stack_error(derive, add_meta, std_sources)] pub enum HandleGetError { - #[snafu(transparent)] - ExportBao { - source: ExportBaoError, - }, - InvalidHashSeq, - InvalidOffset, + #[error(transparent)] + ExportBao { #[error(std_err)] source: ExportBaoError }, + #[error("Invalid hash sequence")] + InvalidHashSeq {}, + #[error("Invalid offset")] + InvalidOffset {}, } impl HasErrorCode for HandleGetError { @@ -404,9 +403,10 @@ impl HasErrorCode for HandleGetError { match self { HandleGetError::ExportBao { source: ExportBaoError::ClientError { source, .. }, + .. } => source.code(), - HandleGetError::InvalidHashSeq => ERR_INTERNAL, - HandleGetError::InvalidOffset => ERR_INTERNAL, + HandleGetError::InvalidHashSeq { .. } => ERR_INTERNAL, + HandleGetError::InvalidOffset { .. } => ERR_INTERNAL, _ => ERR_INTERNAL, } } @@ -471,10 +471,10 @@ pub async fn handle_get( Ok(()) } -#[derive(Debug, Snafu)] +#[stack_error(derive, add_meta, std_sources)] pub enum HandleGetManyError { - #[snafu(transparent)] - ExportBao { source: ExportBaoError }, + #[error(transparent)] + ExportBao { #[error(std_err)] source: ExportBaoError }, } impl HasErrorCode for HandleGetManyError { @@ -482,6 +482,7 @@ impl HasErrorCode for HandleGetManyError { match self { Self::ExportBao { source: ExportBaoError::ClientError { source, .. }, + .. } => source.code(), _ => ERR_INTERNAL, } @@ -519,19 +520,16 @@ pub async fn handle_get_many( Ok(()) } -#[derive(Debug, Snafu)] +#[stack_error(derive, add_meta, std_sources)] pub enum HandlePushError { - #[snafu(transparent)] - ExportBao { - source: ExportBaoError, - }, + #[error(transparent)] + ExportBao { #[error(std_err)] source: ExportBaoError }, - InvalidHashSeq, + #[error("Invalid hash sequence")] + InvalidHashSeq {}, - #[snafu(transparent)] - Request { - source: RequestError, - }, + #[error(transparent)] + Request { #[error(std_err)] source: RequestError }, } impl HasErrorCode for HandlePushError { @@ -539,6 +537,7 @@ impl HasErrorCode for HandlePushError { match self { Self::ExportBao { source: ExportBaoError::ClientError { source, .. }, + .. } => source.code(), _ => ERR_INTERNAL, } @@ -608,14 +607,13 @@ pub(crate) async fn send_blob( .await } -#[derive(Debug, Snafu)] +#[stack_error(derive, add_meta, std_sources)] pub enum HandleObserveError { - ObserveStreamClosed, + #[error("observe stream closed")] + ObserveStreamClosed {}, - #[snafu(transparent)] - RemoteClosed { - source: io::Error, - }, + #[error(transparent)] + RemoteClosed { #[error(std_err)] source: io::Error }, } impl HasErrorCode for HandleObserveError { diff --git a/src/provider/events.rs b/src/provider/events.rs index 932570e9..448e9f7f 100644 --- a/src/provider/events.rs +++ b/src/provider/events.rs @@ -5,7 +5,7 @@ use irpc::{ rpc_requests, Channels, WithChannels, }; use serde::{Deserialize, Serialize}; -use snafu::Snafu; +use n0_error::stack_error; use crate::{ protocol::{ @@ -85,22 +85,22 @@ pub enum AbortReason { } /// Errors that can occur when sending progress updates. -#[derive(Debug, Snafu)] +#[stack_error(derive, add_meta)] pub enum ProgressError { - Limit, - Permission, - #[snafu(transparent)] - Internal { - source: irpc::Error, - }, + #[error("limit")] + Limit {}, + #[error("permission")] + Permission {}, + #[error(transparent)] + Internal { source: irpc::Error }, } impl From for io::Error { fn from(value: ProgressError) -> Self { match value { - ProgressError::Limit => io::ErrorKind::QuotaExceeded.into(), - ProgressError::Permission => io::ErrorKind::PermissionDenied.into(), - ProgressError::Internal { source } => source.into(), + ProgressError::Limit { .. } => io::ErrorKind::QuotaExceeded.into(), + ProgressError::Permission { .. } => io::ErrorKind::PermissionDenied.into(), + ProgressError::Internal { source, .. } => source.into(), } } } @@ -112,8 +112,8 @@ pub trait HasErrorCode { impl HasErrorCode for ProgressError { fn code(&self) -> quinn::VarInt { match self { - ProgressError::Limit => ERR_LIMIT, - ProgressError::Permission => ERR_PERMISSION, + ProgressError::Limit { .. } => ERR_LIMIT, + ProgressError::Permission { .. } => ERR_PERMISSION, ProgressError::Internal { .. } => ERR_INTERNAL, } } @@ -122,8 +122,8 @@ impl HasErrorCode for ProgressError { impl ProgressError { pub fn reason(&self) -> &'static [u8] { match self { - ProgressError::Limit => b"limit", - ProgressError::Permission => b"permission", + ProgressError::Limit { .. } => b"limit", + ProgressError::Permission { .. } => b"permission", ProgressError::Internal { .. } => b"internal", } } @@ -132,33 +132,27 @@ impl ProgressError { impl From for ProgressError { fn from(value: AbortReason) -> Self { match value { - AbortReason::RateLimited => ProgressError::Limit, - AbortReason::Permission => ProgressError::Permission, + AbortReason::RateLimited => n0_error::e!(ProgressError::Limit), + AbortReason::Permission => n0_error::e!(ProgressError::Permission), } } } impl From for ProgressError { fn from(value: irpc::channel::mpsc::RecvError) -> Self { - ProgressError::Internal { - source: value.into(), - } + n0_error::e!(ProgressError::Internal, value.into()) } } impl From for ProgressError { fn from(value: irpc::channel::oneshot::RecvError) -> Self { - ProgressError::Internal { - source: value.into(), - } + n0_error::e!(ProgressError::Internal, value.into()) } } impl From for ProgressError { fn from(value: irpc::channel::SendError) -> Self { - ProgressError::Internal { - source: value.into(), - } + n0_error::e!(ProgressError::Internal, value.into()) } } diff --git a/src/store/fs.rs b/src/store/fs.rs index 8bf43f3d..f6293ae8 100644 --- a/src/store/fs.rs +++ b/src/store/fs.rs @@ -734,7 +734,7 @@ impl HashSpecificCommand for ExportPathMsg { _ => unreachable!(), }; self.tx - .send(ExportProgressItem::Error(api::Error::Io(err))) + .send(ExportProgressItem::Error(api::Error::from(err))) .await .ok(); } @@ -766,7 +766,7 @@ impl HashSpecificCommand for ExportRangesMsg { _ => unreachable!(), }; self.tx - .send(ExportRangesItem::Error(api::Error::Io(err))) + .send(ExportRangesItem::Error(api::Error::from(err))) .await .ok(); } @@ -781,7 +781,7 @@ impl HashSpecificCommand for ImportBaoMsg { SpawnArg::Dead => io::Error::other("entity is dead"), _ => unreachable!(), }; - self.tx.send(Err(api::Error::Io(err))).await.ok(); + self.tx.send(Err(api::Error::from(err))).await.ok(); } } impl HashSpecific for (TempTag, ImportEntryMsg) { diff --git a/src/store/fs/bao_file.rs b/src/store/fs/bao_file.rs index 3b09f8da..0502cead 100644 --- a/src/store/fs/bao_file.rs +++ b/src/store/fs/bao_file.rs @@ -740,7 +740,7 @@ impl BaoFileStorageSubscriber { tokio::select! { _ = tx.closed() => { // the sender is closed, we are done - Err(irpc::channel::SendError::ReceiverClosed.into()) + Err(n0_error::e!(irpc::channel::SendError::ReceiverClosed).into()) } e = self.receiver.changed() => Ok(e?), } diff --git a/src/store/mem.rs b/src/store/mem.rs index 76bc0e6e..76e23854 100644 --- a/src/store/mem.rs +++ b/src/store/mem.rs @@ -1069,7 +1069,7 @@ impl BaoFileStorageSubscriber { tokio::select! { _ = tx.closed() => { // the sender is closed, we are done - Err(irpc::channel::SendError::ReceiverClosed.into()) + Err(n0_error::e!(irpc::channel::SendError::ReceiverClosed).into()) } e = self.receiver.changed() => Ok(e?), } diff --git a/src/store/readonly_mem.rs b/src/store/readonly_mem.rs index cb46228c..004e1ee5 100644 --- a/src/store/readonly_mem.rs +++ b/src/store/readonly_mem.rs @@ -94,7 +94,7 @@ impl Actor { async fn handle_command(&mut self, cmd: Command) -> Option> { match cmd { Command::ImportBao(ImportBaoMsg { tx, .. }) => { - tx.send(Err(api::Error::Io(io::Error::other( + tx.send(Err(api::Error::from(io::Error::other( "import not supported", )))) .await diff --git a/src/util.rs b/src/util.rs index c0acfcaa..7606d759 100644 --- a/src/util.rs +++ b/src/util.rs @@ -446,7 +446,7 @@ pub(crate) mod sink { self.0 .send(value) .await - .map_err(|_| irpc::channel::SendError::ReceiverClosed) + .map_err(|_| n0_error::e!(irpc::channel::SendError::ReceiverClosed)) } } diff --git a/src/util/connection_pool.rs b/src/util/connection_pool.rs index e3c2d3a1..54f32533 100644 --- a/src/util/connection_pool.rs +++ b/src/util/connection_pool.rs @@ -27,7 +27,7 @@ use n0_future::{ future::{self}, FuturesUnordered, MaybeFuture, Stream, StreamExt, }; -use snafu::Snafu; +use n0_error::stack_error; use tokio::sync::{ mpsc::{self, error::SendError as TokioSendError}, oneshot, Notify, @@ -109,45 +109,41 @@ impl ConnectionRef { /// /// This includes the normal iroh connection errors as well as pool specific /// errors such as timeouts and connection limits. -#[derive(Debug, Clone, Snafu)] -#[snafu(module)] +#[stack_error(derive, add_meta)] pub enum PoolConnectError { /// Connection pool is shut down - Shutdown, + #[error("Connection pool is shut down")] + Shutdown {}, /// Timeout during connect - Timeout, + #[error("Timeout during connect")] + Timeout {}, /// Too many connections - TooManyConnections, + #[error("Too many connections")] + TooManyConnections {}, /// Error during connect - ConnectError { source: Arc }, + #[error(transparent)] + ConnectError { source: ConnectError }, /// Error during on_connect callback - OnConnectError { source: Arc }, + #[error(transparent)] + OnConnectError { #[error(std_err)] source: io::Error }, } impl From for PoolConnectError { - fn from(e: ConnectError) -> Self { - PoolConnectError::ConnectError { - source: Arc::new(e), - } - } + fn from(e: ConnectError) -> Self { n0_error::e!(PoolConnectError::ConnectError, e) } } impl From for PoolConnectError { - fn from(e: io::Error) -> Self { - PoolConnectError::OnConnectError { - source: Arc::new(e), - } - } + fn from(e: io::Error) -> Self { n0_error::e!(PoolConnectError::OnConnectError, e) } } /// Error when calling a fn on the [`ConnectionPool`]. /// /// The only thing that can go wrong is that the connection pool is shut down. -#[derive(Debug, Snafu)] -#[snafu(module)] +#[stack_error(derive, add_meta)] pub enum ConnectionPoolError { /// The connection pool has been shut down - Shutdown, + #[error("The connection pool has been shut down")] + Shutdown {}, } enum ActorMessage { @@ -362,7 +358,9 @@ impl Actor { trace!("removing oldest idle connection {}", idle); self.connections.remove(&idle); } else { - msg.tx.send(Err(PoolConnectError::TooManyConnections)).ok(); + msg.tx + .send(Err(n0_error::e!(PoolConnectError::TooManyConnections))) + .ok(); return; } } @@ -439,8 +437,8 @@ impl ConnectionPool { self.tx .send(ActorMessage::RequestRef(RequestRef { id, tx })) .await - .map_err(|_| PoolConnectError::Shutdown)?; - rx.await.map_err(|_| PoolConnectError::Shutdown)? + .map_err(|_| n0_error::e!(PoolConnectError::Shutdown))?; + rx.await.map_err(|_| n0_error::e!(PoolConnectError::Shutdown))? } /// Close an existing connection, if it exists @@ -451,7 +449,7 @@ impl ConnectionPool { self.tx .send(ActorMessage::ConnectionShutdown { id }) .await - .map_err(|_| ConnectionPoolError::Shutdown)?; + .map_err(|_| n0_error::e!(ConnectionPoolError::Shutdown))?; Ok(()) } @@ -465,7 +463,7 @@ impl ConnectionPool { self.tx .send(ActorMessage::ConnectionIdle { id }) .await - .map_err(|_| ConnectionPoolError::Shutdown)?; + .map_err(|_| n0_error::e!(ConnectionPoolError::Shutdown))?; Ok(()) } } From 8d0af3249f744e3ffdff0cf5063dd35aa9662798 Mon Sep 17 00:00:00 2001 From: Frando Date: Tue, 4 Nov 2025 16:10:37 +0100 Subject: [PATCH 03/12] fix fix fix fix --- Cargo.lock | 11 +- Cargo.toml | 5 +- README.md | 2 +- examples/common/mod.rs | 3 +- examples/compression.rs | 2 +- examples/custom-protocol.rs | 2 +- examples/expiring-tags.rs | 12 +- examples/get-blob.rs | 8 +- examples/limit.rs | 2 +- examples/mdns-discovery.rs | 4 +- examples/random_store.rs | 6 +- examples/transfer-collection.rs | 4 +- examples/transfer.rs | 2 +- src/api.rs | 45 ++++---- src/api/blobs.rs | 11 +- src/api/downloader.rs | 24 ++-- src/api/remote.rs | 98 ++++++++++------ src/format/collection.rs | 45 +++++--- src/get.rs | 160 ++++++++++++-------------- src/get/error.rs | 41 +++++-- src/get/request.rs | 16 ++- src/hash.rs | 18 +-- src/hashseq.rs | 5 +- src/net_protocol.rs | 2 +- src/provider.rs | 49 ++++---- src/provider/events.rs | 6 +- src/store/fs.rs | 19 ++-- src/store/fs/bao_file.rs | 9 +- src/store/fs/meta.rs | 194 ++++++++++++++++++++------------ src/store/fs/meta/proto.rs | 2 +- src/store/mem.rs | 19 ++-- src/store/util.rs | 6 +- src/test.rs | 4 +- src/ticket.rs | 2 +- src/util/connection_pool.rs | 34 +++--- 35 files changed, 492 insertions(+), 380 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8283597c..40bafa87 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -124,9 +124,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.98" +version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" dependencies = [ "backtrace", ] @@ -1704,8 +1704,6 @@ dependencies = [ "irpc", "n0-error", "n0-future", - "n0-snafu", - "nested_enum_utils", "postcard", "proptest", "rand 0.9.2", @@ -1718,7 +1716,6 @@ dependencies = [ "serde_json", "serde_test", "smallvec", - "snafu", "tempfile", "test-strategy", "testresult", @@ -2130,7 +2127,7 @@ dependencies = [ [[package]] name = "n0-error" version = "0.1.0" -source = "git+https://github.com/n0-computer/n0-error.git?branch=Frando%2Fanyhow-ext#1a9790b89101cd91dcebad9da28c215e991f2947" +source = "git+https://github.com/n0-computer/n0-error.git?branch=Frando%2Farc-stack#3be0e6140b9d7cbdda710f4b1bc56493a28bc323" dependencies = [ "derive_more 2.0.1", "n0-error-macros", @@ -2140,7 +2137,7 @@ dependencies = [ [[package]] name = "n0-error-macros" version = "0.1.0" -source = "git+https://github.com/n0-computer/n0-error.git?branch=Frando%2Fanyhow-ext#1a9790b89101cd91dcebad9da28c215e991f2947" +source = "git+https://github.com/n0-computer/n0-error.git?branch=Frando%2Farc-stack#3be0e6140b9d7cbdda710f4b1bc56493a28bc323" dependencies = [ "heck", "proc-macro2", diff --git a/Cargo.toml b/Cargo.toml index e06fda6e..5a3b868c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,10 +19,8 @@ derive_more = { version = "2.0.1", features = ["from", "try_from", "into", "debu futures-lite = "2.6.0" quinn = { package = "iroh-quinn", version = "0.14.0" } n0-future = "0.3.0" -n0-snafu = "0.2.2" range-collections = { version = "0.4.6", features = ["serde"] } smallvec = { version = "1", features = ["serde", "const_new"] } -snafu = "0.8.5" tokio = { version = "1.43.0", features = ["full"] } tokio-util = { version = "0.7.13", features = ["full"] } tracing = "0.1.41" @@ -33,7 +31,6 @@ serde = "1.0.217" postcard = { version = "1.1.1", features = ["experimental-derive", "use-std"] } data-encoding = "2.8.0" chrono = "0.4.39" -nested_enum_utils = "0.2.1" ref-cast = "1.0.24" arrayvec = "0.7.6" iroh = "0.94" @@ -75,4 +72,4 @@ fs-store = ["dep:redb", "dep:reflink-copy"] iroh = { git = "https://github.com/n0-computer/iroh.git", branch = "main" } iroh-base = { git = "https://github.com/n0-computer/iroh.git", branch = "main" } irpc = { git = "https://github.com/n0-computer/irpc.git", branch = "matheus23/irpc-n0-error" } -n0-error = { git = "https://github.com/n0-computer/n0-error.git", branch = "Frando/anyhow-ext" } +n0-error = { git = "https://github.com/n0-computer/n0-error.git", branch = "Frando/arc-stack" } diff --git a/README.md b/README.md index 0153f326..c3d87b98 100644 --- a/README.md +++ b/README.md @@ -37,7 +37,7 @@ use iroh::{protocol::Router, Endpoint}; use iroh_blobs::{store::mem::MemStore, BlobsProtocol, ticket::BlobTicket}; #[tokio::main] -async fn main() -> anyhow::Result<()> { +async fn main() -> n0_error::Result<()> { // create an iroh endpoint that includes the standard discovery mechanisms // we've built at number0 let endpoint = Endpoint::bind().await?; diff --git a/examples/common/mod.rs b/examples/common/mod.rs index 08f6c795..54a76ee7 100644 --- a/examples/common/mod.rs +++ b/examples/common/mod.rs @@ -1,6 +1,6 @@ #![allow(dead_code)] -use anyhow::Result; use iroh::SecretKey; +use n0_error::{Result, StackResultExt}; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter}; /// Gets a secret key from the IROH_SECRET environment variable or generates a new random one. @@ -8,7 +8,6 @@ use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilte pub fn get_or_generate_secret_key() -> Result { use std::{env, str::FromStr}; - use anyhow::Context; if let Ok(secret) = env::var("IROH_SECRET") { // Parse the secret key from string SecretKey::from_str(&secret).context("Invalid secret key format") diff --git a/examples/compression.rs b/examples/compression.rs index eb83f91d..c5ec7d9b 100644 --- a/examples/compression.rs +++ b/examples/compression.rs @@ -5,7 +5,7 @@ mod common; use std::{fmt::Debug, path::PathBuf}; -use anyhow::Result; +use n0_error::Result; use clap::Parser; use common::setup_logging; use iroh::protocol::ProtocolHandler; diff --git a/examples/custom-protocol.rs b/examples/custom-protocol.rs index 6d782f19..16a19f1b 100644 --- a/examples/custom-protocol.rs +++ b/examples/custom-protocol.rs @@ -40,7 +40,7 @@ use std::{ sync::{Arc, Mutex}, }; -use anyhow::Result; +use n0_error::Result; use clap::Parser; use iroh::{ discovery::pkarr::PkarrResolver, diff --git a/examples/expiring-tags.rs b/examples/expiring-tags.rs index e19771e8..ee7c1f6e 100644 --- a/examples/expiring-tags.rs +++ b/examples/expiring-tags.rs @@ -30,7 +30,7 @@ async fn create_expiring_tag( hashes: &[Hash], prefix: &str, expiry: SystemTime, -) -> anyhow::Result<()> { +) -> n0_error::Result<()> { let expiry = chrono::DateTime::::from(expiry); let expiry = expiry.to_rfc3339_opts(chrono::SecondsFormat::Secs, true); let tagname = format!("{prefix}-{expiry}"); @@ -53,7 +53,7 @@ async fn create_expiring_tag( Ok(()) } -async fn delete_expired_tags(blobs: &Store, prefix: &str, bulk: bool) -> anyhow::Result<()> { +async fn delete_expired_tags(blobs: &Store, prefix: &str, bulk: bool) -> n0_error::Result<()> { let prefix = format!("{prefix}-"); let now = chrono::Utc::now(); let end = format!( @@ -100,7 +100,7 @@ async fn delete_expired_tags(blobs: &Store, prefix: &str, bulk: bool) -> anyhow: Ok(()) } -async fn print_store_info(store: &Store) -> anyhow::Result<()> { +async fn print_store_info(store: &Store) -> n0_error::Result<()> { let now = chrono::Utc::now(); let mut tags = store.tags().list().await?; println!( @@ -121,7 +121,7 @@ async fn print_store_info(store: &Store) -> anyhow::Result<()> { Ok(()) } -async fn info_task(store: Store) -> anyhow::Result<()> { +async fn info_task(store: Store) -> n0_error::Result<()> { tokio::time::sleep(Duration::from_secs(1)).await; loop { print_store_info(&store).await?; @@ -129,7 +129,7 @@ async fn info_task(store: Store) -> anyhow::Result<()> { } } -async fn delete_expired_tags_task(store: Store, prefix: &str) -> anyhow::Result<()> { +async fn delete_expired_tags_task(store: Store, prefix: &str) -> n0_error::Result<()> { loop { delete_expired_tags(&store, prefix, false).await?; tokio::time::sleep(Duration::from_secs(5)).await; @@ -137,7 +137,7 @@ async fn delete_expired_tags_task(store: Store, prefix: &str) -> anyhow::Result< } #[tokio::main] -async fn main() -> anyhow::Result<()> { +async fn main() -> n0_error::Result<()> { tracing_subscriber::fmt::init(); let path = std::env::current_dir()?.join("blobs"); let options = Options { diff --git a/examples/get-blob.rs b/examples/get-blob.rs index bfaa409a..2bd9a955 100644 --- a/examples/get-blob.rs +++ b/examples/get-blob.rs @@ -25,7 +25,7 @@ pub struct Cli { } #[tokio::main] -async fn main() -> anyhow::Result<()> { +async fn main() -> n0_error::Result<()> { setup_logging(); let cli = Cli::parse(); let ticket = cli.ticket; @@ -33,7 +33,7 @@ async fn main() -> anyhow::Result<()> { .discovery(PkarrResolver::n0_dns()) .bind() .await?; - anyhow::ensure!( + n0_error::ensure!( ticket.format() == BlobFormat::Raw, "This example only supports raw blobs." ); @@ -54,10 +54,10 @@ async fn main() -> anyhow::Result<()> { break stats; } Some(GetBlobItem::Error(err)) => { - anyhow::bail!("Error while streaming blob: {err}"); + n0_error::bail!("Error while streaming blob: {err}"); } None => { - anyhow::bail!("Stream ended unexpectedly."); + n0_error::bail!("Stream ended unexpectedly."); } } } diff --git a/examples/limit.rs b/examples/limit.rs index 4a9a379e..dc74a1bd 100644 --- a/examples/limit.rs +++ b/examples/limit.rs @@ -18,7 +18,7 @@ use std::{ }, }; -use anyhow::Result; +use n0_error::Result; use clap::Parser; use common::setup_logging; use iroh::{protocol::Router, EndpointAddr, EndpointId, SecretKey}; diff --git a/examples/mdns-discovery.rs b/examples/mdns-discovery.rs index 638042ea..9240a87a 100644 --- a/examples/mdns-discovery.rs +++ b/examples/mdns-discovery.rs @@ -13,7 +13,7 @@ //! Run that command on another machine in the same local network, replacing [FILE_PATH] to the path on which you want to save the transferred content. use std::path::{Path, PathBuf}; -use anyhow::{ensure, Result}; +use n0_error::{ensure, Result}; use clap::{Parser, Subcommand}; use iroh::{ discovery::mdns::MdnsDiscovery, protocol::Router, Endpoint, PublicKey, RelayMode, SecretKey, @@ -127,7 +127,7 @@ async fn connect(node_id: PublicKey, hash: Hash, out: Option) -> Result } #[tokio::main] -async fn main() -> anyhow::Result<()> { +async fn main() -> n0_error::Result<()> { setup_logging(); let cli = Cli::parse(); diff --git a/examples/random_store.rs b/examples/random_store.rs index dd1dc6f0..ae296ab7 100644 --- a/examples/random_store.rs +++ b/examples/random_store.rs @@ -1,6 +1,6 @@ use std::{env, path::PathBuf, str::FromStr}; -use anyhow::{Context, Result}; +use n0_error::{Context, Result}; use clap::{Parser, Subcommand}; use iroh::{discovery::static_provider::StaticProvider, SecretKey}; use iroh_blobs::{ @@ -189,7 +189,7 @@ async fn main() -> Result<()> { } } -async fn provide(args: ProvideArgs) -> anyhow::Result<()> { +async fn provide(args: ProvideArgs) -> n0_error::Result<()> { println!("{args:?}"); let tempdir = if args.common.path.is_none() { Some(tempfile::tempdir_in(".").context("Failed to create temporary directory")?) @@ -252,7 +252,7 @@ async fn provide(args: ProvideArgs) -> anyhow::Result<()> { Ok(()) } -async fn request(args: RequestArgs) -> anyhow::Result<()> { +async fn request(args: RequestArgs) -> n0_error::Result<()> { println!("{args:?}"); let tempdir = if args.common.path.is_none() { Some(tempfile::tempdir_in(".").context("Failed to create temporary directory")?) diff --git a/examples/transfer-collection.rs b/examples/transfer-collection.rs index 73fea9cd..ab0e1983 100644 --- a/examples/transfer-collection.rs +++ b/examples/transfer-collection.rs @@ -7,7 +7,7 @@ //! $ cargo run --example transfer-collection use std::collections::HashMap; -use anyhow::{Context, Result}; +use n0_error::{Context, Result}; use iroh::{ discovery::static_provider::StaticProvider, protocol::Router, Endpoint, EndpointAddr, RelayMode, }; @@ -104,7 +104,7 @@ impl Node { } #[tokio::main] -async fn main() -> anyhow::Result<()> { +async fn main() -> n0_error::Result<()> { // create a local provider for nodes to discover each other. // outside of a development environment, production apps would // use `Endpoint::bind()` or a similar method diff --git a/examples/transfer.rs b/examples/transfer.rs index 65bc7db3..777ad96c 100644 --- a/examples/transfer.rs +++ b/examples/transfer.rs @@ -4,7 +4,7 @@ use iroh::{protocol::Router, Endpoint}; use iroh_blobs::{store::mem::MemStore, ticket::BlobTicket, BlobsProtocol}; #[tokio::main] -async fn main() -> anyhow::Result<()> { +async fn main() -> n0_error::Result<()> { // Create an endpoint, it allows creating and accepting // connections in the iroh p2p world let endpoint = Endpoint::bind().await?; diff --git a/src/api.rs b/src/api.rs index 813665d6..f6f165eb 100644 --- a/src/api.rs +++ b/src/api.rs @@ -19,7 +19,6 @@ use iroh::Endpoint; use irpc::rpc::{listen, RemoteService}; use n0_error::e; use n0_error::stack_error; -use nested_enum_utils::common_fields; use proto::{Request, ShutdownRequest, SyncDbRequest}; use ref_cast::RefCast; use serde::{Deserialize, Serialize}; @@ -44,7 +43,10 @@ pub enum RequestError { Rpc { source: irpc::Error }, /// Request failed due an actual error. #[error("inner error: {source}")] - Inner { #[error(std_err)] source: Error }, + Inner { + #[error(std_err)] + source: Error, + }, } impl From for RequestError { @@ -79,7 +81,7 @@ pub type RequestResult = std::result::Result; pub enum ExportBaoError { #[error("send error")] Send { source: irpc::channel::SendError }, - #[error("mpsc recv error")] + #[error("mpsc recv e api.acp.pro-channelsrror")] MpscRecv { source: irpc::channel::mpsc::RecvError, }, @@ -131,10 +133,11 @@ impl From for ExportBaoError { pub type ExportBaoResult = std::result::Result; -#[derive(Debug, derive_more::Display, derive_more::From, Serialize, Deserialize)] +#[derive(Serialize, Deserialize)] +#[stack_error(derive, std_sources, from_sources)] pub enum Error { #[serde(with = "crate::util::serde::io_error_serde")] - Io(io::Error), + Io(#[error(source)] io::Error), } impl Error { @@ -150,11 +153,13 @@ impl Error { E: Into>, { Self::Io(io::Error::other(msg.into())) -} + } } impl From for Error { - fn from(e: irpc::Error) -> Self { Self::Io(e.into()) } + fn from(e: irpc::Error) -> Self { + Self::Io(e.into()) + } } impl From for Error { @@ -167,26 +172,26 @@ impl From for Error { } impl From for Error { - fn from(e: irpc::channel::mpsc::RecvError) -> Self { Self::Io(e.into()) } + fn from(e: irpc::channel::mpsc::RecvError) -> Self { + Self::Io(e.into()) + } } impl From for Error { - fn from(e: irpc::rpc::WriteError) -> Self { Self::Io(e.into()) } + fn from(e: irpc::rpc::WriteError) -> Self { + Self::Io(e.into()) + } } impl From for Error { - fn from(e: irpc::RequestError) -> Self { Self::Io(e.into()) } + fn from(e: irpc::RequestError) -> Self { + Self::Io(e.into()) + } } impl From for Error { - fn from(e: irpc::channel::SendError) -> Self { Self::Io(e.into()) } -} - -impl std::error::Error for Error { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { - match self { - Error::Io(e) => Some(e), - } + fn from(e: irpc::channel::SendError) -> Self { + Self::Io(e.into()) } } @@ -199,10 +204,6 @@ impl From for Error { } } -impl From for Error { - fn from(e: io::Error) -> Self { Self::Io(e) } -} - pub type Result = std::result::Result; /// The main entry point for the store API. diff --git a/src/api/blobs.rs b/src/api/blobs.rs index 82233e71..a79395a7 100644 --- a/src/api/blobs.rs +++ b/src/api/blobs.rs @@ -25,6 +25,7 @@ use bytes::Bytes; use genawaiter::sync::Gen; use iroh_io::AsyncStreamWriter; use irpc::channel::{mpsc, oneshot}; +use n0_error::AnyError; use n0_future::{future, stream, Stream, StreamExt}; use range_collections::{range_set::RangeSetRange, RangeSet2}; use ref_cast::RefCast; @@ -113,7 +114,7 @@ impl Blobs { /// use iroh_blobs::{store::mem::MemStore, api::blobs::Blobs}; /// use tokio::io::AsyncReadExt; /// - /// # async fn example() -> anyhow::Result<()> { + /// # async fn example() -> n0_error::Result<()> { /// let store = MemStore::new(); /// let tag = store.add_slice(b"Hello, world!").await?; /// let mut reader = store.reader(tag.hash); @@ -291,7 +292,7 @@ impl Blobs { sender.send(ImportByteStreamUpdate::Bytes(item?)).await?; } sender.send(ImportByteStreamUpdate::Done).await?; - anyhow::Ok(()) + n0_error::Ok(()) }; let _ = tokio::join!(send, recv); }); @@ -972,14 +973,14 @@ impl ExportBaoProgress { /// to get all non-corrupted sections. pub fn hashes_with_index( self, - ) -> impl Stream> { + ) -> impl Stream> { let mut stream = self.stream(); Gen::new(|co| async move { while let Some(item) = stream.next().await { let leaf = match item { EncodedItem::Leaf(leaf) => leaf, EncodedItem::Error(e) => { - co.yield_(Err(e.into())).await; + co.yield_(Err(AnyError::from_std(e))).await; continue; } _ => continue, @@ -1000,7 +1001,7 @@ impl ExportBaoProgress { } /// Same as [`Self::hashes_with_index`], but without the indexes. - pub fn hashes(self) -> impl Stream> { + pub fn hashes(self) -> impl Stream> { self.hashes_with_index().map(|x| x.map(|(_, hash)| hash)) } diff --git a/src/api/downloader.rs b/src/api/downloader.rs index fffacc14..d9fdd6fd 100644 --- a/src/api/downloader.rs +++ b/src/api/downloader.rs @@ -6,10 +6,10 @@ use std::{ sync::Arc, }; -use anyhow::bail; use genawaiter::sync::Gen; use iroh::{Endpoint, EndpointId}; use irpc::{channel::mpsc, rpc_requests}; +use n0_error::{anyerr, Result}; use n0_future::{future, stream, BufferedStreamExt, Stream, StreamExt}; use rand::seq::SliceRandom; use serde::{de::Error, Deserialize, Serialize}; @@ -48,7 +48,7 @@ struct DownloaderActor { #[derive(Debug, Serialize, Deserialize)] pub enum DownloadProgressItem { #[serde(skip)] - Error(anyhow::Error), + Error(n0_error::AnyError), TryProvider { id: EndpointId, request: Arc, @@ -107,7 +107,7 @@ async fn handle_download_impl( pool: ConnectionPool, request: DownloadRequest, tx: &mut mpsc::Sender, -) -> anyhow::Result<()> { +) -> Result<()> { match request.strategy { SplitStrategy::Split => handle_download_split_impl(store, pool, request, tx).await?, SplitStrategy::None => match request.request { @@ -128,7 +128,7 @@ async fn handle_download_split_impl( pool: ConnectionPool, request: DownloadRequest, tx: &mut mpsc::Sender, -) -> anyhow::Result<()> { +) -> Result<()> { let providers = request.providers; let requests = split_request(&request.request, &providers, &pool, &store, Drain).await?; let (progress_tx, progress_rx) = tokio::sync::mpsc::channel(32); @@ -314,14 +314,16 @@ impl DownloadProgress { }))) } - async fn complete(self) -> anyhow::Result<()> { + async fn complete(self) -> Result<()> { let rx = self.fut.await?; let stream = rx.into_stream(); tokio::pin!(stream); while let Some(item) = stream.next().await { match item? { DownloadProgressItem::Error(e) => Err(e)?, - DownloadProgressItem::DownloadError => anyhow::bail!("Download error"), + DownloadProgressItem::DownloadError => { + n0_error::bail_any!("Download error"); + } _ => {} } } @@ -330,7 +332,7 @@ impl DownloadProgress { } impl IntoFuture for DownloadProgress { - type Output = anyhow::Result<()>; + type Output = Result<()>; type IntoFuture = future::Boxed; fn into_future(self) -> Self::IntoFuture { @@ -373,7 +375,7 @@ async fn split_request<'a>( pool: &ConnectionPool, store: &Store, progress: impl Sink, -) -> anyhow::Result + Send + 'a>> { +) -> Result + Send + 'a>> { Ok(match request { FiniteRequest::Get(req) => { let Some(_first) = req.ranges.iter_infinite().next() else { @@ -382,7 +384,7 @@ async fn split_request<'a>( let first = GetRequest::blob(req.hash); execute_get(pool, Arc::new(first), providers, store, progress).await?; let size = store.observe(req.hash).await?.size(); - anyhow::ensure!(size % 32 == 0, "Size is not a multiple of 32"); + n0_error::ensure_any!(size % 32 == 0, "Size is not a multiple of 32"); let n = size / 32; Box::new( req.ranges @@ -429,7 +431,7 @@ async fn execute_get( providers: &Arc, store: &Store, mut progress: impl Sink, -) -> anyhow::Result<()> { +) -> Result<()> { let remote = store.remote(); let mut providers = providers.find_providers(request.content()); while let Some(provider) = providers.next().await { @@ -481,7 +483,7 @@ async fn execute_get( } } } - bail!("Unable to download {}", request.hash); + Err(anyerr!("Unable to download {}", request.hash)) } /// Trait for pluggable content discovery strategies. diff --git a/src/api/remote.rs b/src/api/remote.rs index fb34af00..f8a0c129 100644 --- a/src/api/remote.rs +++ b/src/api/remote.rs @@ -15,11 +15,10 @@ use bao_tree::{ use genawaiter::sync::{Co, Gen}; use iroh::endpoint::Connection; use irpc::util::{AsyncReadVarintExt, WriteVarintExt}; +use n0_error::{e, AnyError, StdResultExt}; +use n0_error::{stack_error, Result}; use n0_future::{io, Stream, StreamExt}; -use n0_error::stack_error; -use nested_enum_utils::common_fields; use ref_cast::RefCast; -use n0_error::{e, StdResultExt}; use tracing::{debug, trace}; use super::blobs::{Bitfield, ExportBaoOptions}; @@ -123,7 +122,10 @@ impl GetProgress { pub async fn complete(self) -> GetResult { just_result(self.stream()).await.unwrap_or_else(|| { - Err(e!(GetError::LocalFailure, anyhow::anyhow!("stream closed without result"))) + Err(e!( + GetError::LocalFailure, + n0_error::anyerr!("stream closed without result") + )) }) } } @@ -135,11 +137,11 @@ pub enum PushProgressItem { /// The request was completed. Done(Stats), /// The request was closed, but not completed. - Error(anyhow::Error), + Error(AnyError), } -impl From> for PushProgressItem { - fn from(res: anyhow::Result) -> Self { +impl From> for PushProgressItem { + fn from(res: Result) -> Self { match res { Ok(stats) => Self::Done(stats), Err(e) => Self::Error(e), @@ -147,7 +149,7 @@ impl From> for PushProgressItem { } } -impl TryFrom for anyhow::Result { +impl TryFrom for Result { type Error = &'static str; fn try_from(item: PushProgressItem) -> Result { @@ -165,7 +167,7 @@ pub struct PushProgress { } impl IntoFuture for PushProgress { - type Output = anyhow::Result; + type Output = Result; type IntoFuture = n0_future::boxed::BoxFuture; fn into_future(self) -> n0_future::boxed::BoxFuture { @@ -178,10 +180,10 @@ impl PushProgress { into_stream(self.rx, self.fut) } - pub async fn complete(self) -> anyhow::Result { + pub async fn complete(self) -> Result { just_result(self.stream()) .await - .unwrap_or_else(|| Err(anyhow::anyhow!("stream closed without result"))) + .unwrap_or_else(|| Err(n0_error::anyerr!("stream closed without result"))) } } @@ -440,7 +442,7 @@ impl Remote { pub async fn local_for_request( &self, request: impl Into>, - ) -> anyhow::Result { + ) -> Result { let request = request.into(); let root = request.hash; let bitfield = self.store().observe(root).await?; @@ -493,7 +495,7 @@ impl Remote { } /// Get the local info for a given blob or hash sequence, at the present time. - pub async fn local(&self, content: impl Into) -> anyhow::Result { + pub async fn local(&self, content: impl Into) -> Result { let request = GetRequest::from(content.into()); self.local_for_request(request).await } @@ -535,7 +537,7 @@ impl Remote { let local = self .local(content) .await - .map_err(|e: anyhow::Error| e!(GetError::LocalFailure, e))?; + .map_err(|e| e!(GetError::LocalFailure, e))?; if local.is_complete() { return Ok(Default::default()); } @@ -598,16 +600,16 @@ impl Remote { conn: Connection, request: PushRequest, progress: impl Sink, - ) -> anyhow::Result { + ) -> Result { let hash = request.hash; debug!(%hash, "pushing"); - let (mut send, mut recv) = conn.open_bi().await?; + let (mut send, mut recv) = conn.open_bi().await.anyerr()?; let mut context = StreamContext { payload_bytes_sent: 0, sender: progress, }; // we are not going to need this! - recv.stop(0u32.into())?; + recv.stop(0u32.into()).anyerr()?; // write the request. Unlike for reading, we can just serialize it sync using postcard. let request = write_push_request(request, &mut send).await?; let mut request_ranges = request.ranges.iter_infinite(); @@ -621,7 +623,7 @@ impl Remote { } if request.ranges.is_blob() { // we are done - send.finish()?; + send.finish().anyerr()?; return Ok(Default::default()); } let hash_seq = self.store().get_bytes(root).await?; @@ -636,7 +638,7 @@ impl Remote { .await?; } } - send.finish()?; + send.finish().anyerr()?; Ok(Default::default()) } @@ -680,7 +682,10 @@ impl Remote { let store = self.store(); let root = request.hash; let conn = conn.open_stream_pair().await.map_err(|e| { - e!(GetError::LocalFailure, anyhow::anyhow!("failed to open stream pair: {e}")) + e!( + GetError::LocalFailure, + n0_error::anyerr!("failed to open stream pair: {e}") + ) })?; // I am cloning the connection, but it's fine because the original connection or ConnectionRef stays alive // for the duration of the operation. @@ -818,25 +823,43 @@ impl Remote { pub enum ExecuteError { /// Network or IO operation failed. #[error("Unable to open bidi stream")] - Connection { source: iroh::endpoint::ConnectionError }, + Connection { + #[error(std_err)] + source: iroh::endpoint::ConnectionError, + }, #[error("Unable to read from the remote")] - Read { source: iroh::endpoint::ReadError }, + Read { + #[error(std_err)] + source: iroh::endpoint::ReadError, + }, #[error("Error sending the request")] - Send { source: crate::get::fsm::ConnectedNextError }, + Send { + #[error(std_err)] + source: crate::get::fsm::ConnectedNextError, + }, #[error("Unable to read size")] - Size { source: crate::get::fsm::AtBlobHeaderNextError }, + Size { + #[error(std_err)] + source: crate::get::fsm::AtBlobHeaderNextError, + }, #[error("Error while decoding the data")] - Decode { source: crate::get::fsm::DecodeError }, + Decode { + #[error(std_err)] + source: crate::get::fsm::DecodeError, + }, #[error("Internal error while reading the hash sequence")] ExportBao { source: api::ExportBaoError }, #[error("Hash sequence has an invalid length")] - InvalidHashSeq { #[error(std_err)] source: anyhow::Error }, + InvalidHashSeq { source: AnyError }, #[error("Internal error importing the data")] ImportBao { source: crate::api::RequestError }, #[error("Error sending download progress - receiver closed")] SendDownloadProgress { source: irpc::channel::SendError }, #[error("Internal error importing the data")] - MpscSend { #[error(std_err)] source: tokio::sync::mpsc::error::SendError }, + MpscSend { + #[error(std_err)] + source: tokio::sync::mpsc::error::SendError, + }, } pub trait GetStreamPair: Send + 'static { @@ -880,7 +903,10 @@ async fn get_blob_ranges_impl( let end = content.drain().await.map_err(|e| e!(GetError::Decode, e))?; Ok(end) } else { - Err(e!(GetError::Decode, DecodeError::leaf_hash_mismatch(ChunkNum(0)))) + Err(e!( + GetError::Decode, + DecodeError::leaf_hash_mismatch(ChunkNum(0)) + )) }; }; let buffer_size = get_buffer_size(size); @@ -898,7 +924,8 @@ async fn get_blob_ranges_impl( .send(next.stats().payload_bytes_read) .await .map_err(|e| e!(GetError::LocalFailure, e.into()))?; - handle.tx + handle + .tx .send(item) .await .map_err(|e| e!(GetError::IrpcSend, e))?; @@ -913,7 +940,10 @@ async fn get_blob_ranges_impl( }; let complete = async move { handle.rx.await.map_err(|e| { - e!(GetError::LocalFailure, anyhow::anyhow!("error reading from import stream: {e}")) + e!( + GetError::LocalFailure, + n0_error::anyerr!("error reading from import stream: {e}") + ) }) }; let (_, end) = tokio::try_join!(complete, write)?; @@ -936,7 +966,7 @@ pub(crate) struct HashSeqChunk { } impl TryFrom for HashSeqChunk { - type Error = anyhow::Error; + type Error = AnyError; fn try_from(leaf: Leaf) -> Result { let offset = leaf.offset; @@ -983,7 +1013,7 @@ impl LazyHashSeq { } #[allow(dead_code)] - pub async fn get_from_offset(&mut self, offset: u64) -> anyhow::Result> { + pub async fn get_from_offset(&mut self, offset: u64) -> Result> { if offset == 0 { Ok(Some(self.hash)) } else { @@ -992,7 +1022,7 @@ impl LazyHashSeq { } #[allow(dead_code)] - pub async fn get(&mut self, child_offset: u64) -> anyhow::Result> { + pub async fn get(&mut self, child_offset: u64) -> Result> { // check if we have the hash in the current chunk if let Some(chunk) = &self.current_chunk { if let Some(hash) = chunk.get(child_offset) { @@ -1015,7 +1045,7 @@ impl LazyHashSeq { async fn write_push_request( request: PushRequest, stream: &mut impl SendStream, -) -> anyhow::Result { +) -> Result { let mut request_bytes = Vec::new(); request_bytes.push(RequestType::Push as u8); request_bytes.write_length_prefixed(&request).unwrap(); diff --git a/src/format/collection.rs b/src/format/collection.rs index fd8884fd..b3cb5ec2 100644 --- a/src/format/collection.rs +++ b/src/format/collection.rs @@ -1,9 +1,10 @@ //! The collection type used by iroh use std::{collections::BTreeMap, future::Future}; -use anyhow::Context; +// n0_error::Context is no longer exported; use explicit mapping instead. use bao_tree::blake3; use bytes::Bytes; +use n0_error::{Result, StdResultExt}; use serde::{Deserialize, Serialize}; use crate::{ @@ -66,11 +67,11 @@ impl IntoIterator for Collection { /// A simple store trait for loading blobs pub trait SimpleStore { /// Load a blob from the store - fn load(&self, hash: Hash) -> impl Future> + Send + '_; + fn load(&self, hash: Hash) -> impl Future> + Send + '_; } impl SimpleStore for crate::api::Store { - async fn load(&self, hash: Hash) -> anyhow::Result { + async fn load(&self, hash: Hash) -> Result { Ok(self.get_bytes(hash).await?) } } @@ -115,23 +116,26 @@ impl Collection { /// the links array, and the collection. pub async fn read_fsm( fsm_at_start_root: fsm::AtStartRoot, - ) -> anyhow::Result<(fsm::EndBlobNext, HashSeq, Collection)> { + ) -> Result<(fsm::EndBlobNext, HashSeq, Collection)> { let (next, links) = { let curr = fsm_at_start_root.next(); let (curr, data) = curr.concatenate_into_vec().await?; - let links = HashSeq::new(data.into()).context("links could not be parsed")?; + let links = HashSeq::new(data.into()) + .ok_or_else(|| n0_error::anyerr!("links could not be parsed"))?; (curr.next(), links) }; let fsm::EndBlobNext::MoreChildren(at_meta) = next else { - anyhow::bail!("expected meta"); + n0_error::bail_any!("expected meta"); }; let (next, collection) = { let mut children = links.clone(); - let meta_link = children.pop_front().context("meta link not found")?; + let meta_link = children + .pop_front() + .ok_or_else(|| n0_error::anyerr!("meta link not found"))?; let curr = at_meta.next(meta_link); let (curr, names) = curr.concatenate_into_vec().await?; - let names = postcard::from_bytes::(&names)?; - anyhow::ensure!( + let names = postcard::from_bytes::(&names).anyerr()?; + n0_error::ensure_any!( names.header == *Self::HEADER, "expected header {:?}, got {:?}", Self::HEADER, @@ -148,7 +152,7 @@ impl Collection { /// Returns the collection, a map from blob offsets to bytes, and the stats. pub async fn read_fsm_all( fsm_at_start_root: crate::get::fsm::AtStartRoot, - ) -> anyhow::Result<(Collection, BTreeMap, Stats)> { + ) -> Result<(Collection, BTreeMap, Stats)> { let (next, links, collection) = Self::read_fsm(fsm_at_start_root).await?; let mut res = BTreeMap::new(); let mut curr = next; @@ -156,7 +160,7 @@ impl Collection { match curr { fsm::EndBlobNext::MoreChildren(more) => { let child_offset = more.offset() - 1; - let Some(hash) = links.get(usize::try_from(child_offset)?) else { + let Some(hash) = links.get(usize::try_from(child_offset).anyerr()?) else { break more.finish(); }; let header = more.next(hash); @@ -172,13 +176,16 @@ impl Collection { } /// Create a new collection from a hash sequence and metadata. - pub async fn load(root: Hash, store: &impl SimpleStore) -> anyhow::Result { + pub async fn load(root: Hash, store: &impl SimpleStore) -> Result { let hs = store.load(root).await?; let hs = HashSeq::try_from(hs)?; - let meta_hash = hs.iter().next().context("empty hash seq")?; + let meta_hash = hs + .iter() + .next() + .ok_or_else(|| n0_error::anyerr!("empty hash seq"))?; let meta = store.load(meta_hash).await?; - let meta: CollectionMeta = postcard::from_bytes(&meta)?; - anyhow::ensure!( + let meta: CollectionMeta = postcard::from_bytes(&meta).anyerr()?; + n0_error::ensure_any!( meta.names.len() + 1 == hs.len(), "names and links length mismatch" ); @@ -187,9 +194,9 @@ impl Collection { /// Store a collection in a store. returns the root hash of the collection /// as a TempTag. - pub async fn store(self, db: &Store) -> anyhow::Result { + pub async fn store(self, db: &Store) -> Result { let (links, meta) = self.into_parts(); - let meta_bytes = postcard::to_stdvec(&meta)?; + let meta_bytes = postcard::to_stdvec(&meta).anyerr()?; let meta_tag = db.add_bytes(meta_bytes).temp_tag().await?; let links_bytes = std::iter::once(meta_tag.hash()) .chain(links) @@ -257,6 +264,8 @@ impl Collection { #[cfg(test)] mod tests { + use n0_error::{Result, StackResultExt}; + use super::*; #[test] @@ -322,7 +331,7 @@ mod tests { } impl SimpleStore for TestStore { - async fn load(&self, hash: Hash) -> anyhow::Result { + async fn load(&self, hash: Hash) -> Result { self.0.get(&hash).cloned().context("not found") } } diff --git a/src/get.rs b/src/get.rs index d46ca899..5f5358fe 100644 --- a/src/get.rs +++ b/src/get.rs @@ -21,13 +21,10 @@ use std::{ time::{Duration, Instant}, }; -use anyhow::Result; use bao_tree::{io::fsm::BaoContentItem, ChunkNum}; use fsm::RequestCounters; -use n0_snafu::SpanTrace; -use nested_enum_utils::common_fields; +use n0_error::Result; use serde::{Deserialize, Serialize}; -use snafu::{Backtrace, IntoError, ResultExt, Snafu}; use tracing::{debug, error}; use crate::{ @@ -117,10 +114,10 @@ pub mod fsm { use derive_more::From; use iroh::endpoint::Connection; use iroh_io::AsyncSliceWriter; + use n0_error::{e, stack_error, AnyError}; use super::*; use crate::{ - protocol::{ GetManyRequest, GetRequest, NonEmptyRequestRangeSpecIter, Request, MAX_MESSAGE_SIZE, }, @@ -154,14 +151,14 @@ pub mod fsm { let (mut writer, reader) = connection .open_bi() .await - .map_err(|e| OpenSnafu.into_error(e.into()))?; + .map_err(|e| e!(InitialNextError::Open, e.into()))?; let request = Request::GetMany(request); let request_bytes = postcard::to_stdvec(&request) - .map_err(|source| n0_error::e!(GetError::BadRequest, source.into()))?; + .map_err(|source| e!(GetError::BadRequest, AnyError::from_std(source)))?; writer .send_bytes(request_bytes.into()) .await - .context(connected_next_error::WriteSnafu)?; + .map_err(|source| e!(ConnectedNextError::Write, source))?; let Request::GetMany(request) = request else { unreachable!(); }; @@ -245,7 +242,7 @@ pub mod fsm { .connection .open_bi() .await - .map_err(|e| OpenSnafu.into_error(e.into()))?; + .map_err(|e| e!(InitialNextError::Open, e.into()))?; Ok(AtConnected { start, reader, @@ -256,17 +253,14 @@ pub mod fsm { } } - /// Error that you can get from [`AtConnected::next`] - #[common_fields({ - backtrace: Option, - #[snafu(implicit)] - span_trace: SpanTrace, - })] - #[allow(missing_docs)] - #[derive(Debug, Snafu)] - #[non_exhaustive] + /// Error that you can get from [`AtInitial::next`] + #[stack_error(derive, add_meta, from_sources)] pub enum InitialNextError { - Open { source: io::Error }, + #[error("open: {source}")] + Open { + #[error(std_err)] + source: io::Error, + }, } /// State of the get response machine after the handshake has been sent @@ -291,25 +285,23 @@ pub mod fsm { } /// Error that you can get from [`AtConnected::next`] - #[common_fields({ - backtrace: Option, - #[snafu(implicit)] - span_trace: SpanTrace, - })] - #[allow(missing_docs)] - #[derive(Debug, Snafu)] - #[snafu(module)] - #[non_exhaustive] + #[stack_error(derive, add_meta)] pub enum ConnectedNextError { /// Error when serializing the request - #[snafu(display("postcard ser: {source}"))] - PostcardSer { source: postcard::Error }, + #[error("postcard ser: {source}")] + PostcardSer { + #[error(std_err)] + source: postcard::Error, + }, /// The serialized request is too long to be sent - #[snafu(display("request too big"))] + #[error("request too big")] RequestTooBig {}, /// Error when writing the request to the [`SendStream`]. - #[snafu(display("write: {source}"))] - Write { source: io::Error }, + #[error("write: {source}")] + Write { + #[error(std_err)] + source: io::Error, + }, } impl AtConnected { @@ -348,14 +340,14 @@ pub mod fsm { debug!("sending request"); let wrapped = Request::Get(request); let request_bytes = postcard::to_stdvec(&wrapped) - .context(connected_next_error::PostcardSerSnafu)?; + .map_err(|source| e!(ConnectedNextError::PostcardSer, source))?; let Request::Get(x) = wrapped else { unreachable!(); }; request = x; if request_bytes.len() > MAX_MESSAGE_SIZE { - return Err(connected_next_error::RequestTooBigSnafu.build()); + return Err(e!(ConnectedNextError::RequestTooBig)); } // write the request itself @@ -363,11 +355,11 @@ pub mod fsm { writer .send_bytes(request_bytes.into()) .await - .context(connected_next_error::WriteSnafu)?; + .map_err(|source| e!(ConnectedNextError::Write, source))?; writer .sync() .await - .context(connected_next_error::WriteSnafu)?; + .map_err(|source| e!(ConnectedNextError::Write, source))?; len }; @@ -501,23 +493,19 @@ pub mod fsm { } /// Error that you can get from [`AtBlobHeader::next`] - #[common_fields({ - backtrace: Option, - #[snafu(implicit)] - span_trace: SpanTrace, - })] - #[non_exhaustive] - #[derive(Debug, Snafu)] - #[snafu(module)] + #[stack_error(derive, add_meta)] pub enum AtBlobHeaderNextError { /// Eof when reading the size header /// /// This indicates that the provider does not have the requested data. - #[snafu(display("not found"))] + #[error("not found")] NotFound {}, /// Generic io error - #[snafu(display("io: {source}"))] - Read { source: io::Error }, + #[error("io: {source}")] + Read { + #[error(std_err)] + source: io::Error, + }, } impl From for io::Error { @@ -537,9 +525,9 @@ pub mod fsm { let mut size = [0; 8]; self.reader.recv_exact(&mut size).await.map_err(|cause| { if cause.kind() == io::ErrorKind::UnexpectedEof { - at_blob_header_next_error::NotFoundSnafu.build() + e!(AtBlobHeaderNextError::NotFound) } else { - at_blob_header_next_error::ReadSnafu.into_error(cause) + e!(AtBlobHeaderNextError::Read, cause) } })?; self.misc.other_bytes_read += 8; @@ -644,51 +632,49 @@ pub mod fsm { /// variants indicate that the provider has sent us invalid data. A well-behaved /// provider should never do this, so this is an indication that the provider is /// not behaving correctly. - #[common_fields({ - backtrace: Option, - #[snafu(implicit)] - span_trace: SpanTrace, - })] #[non_exhaustive] - #[derive(Debug, Snafu)] - #[snafu(module)] + #[stack_error(derive, add_meta)] pub enum DecodeError { /// A chunk was not found or invalid, so the provider stopped sending data - #[snafu(display("not found"))] + #[error("not found")] ChunkNotFound {}, /// A parent was not found or invalid, so the provider stopped sending data - #[snafu(display("parent not found {node:?}"))] + #[error("parent not found {node:?}")] ParentNotFound { node: TreeNode }, /// A parent was not found or invalid, so the provider stopped sending data - #[snafu(display("chunk not found {num}"))] + #[error("chunk not found {num}")] LeafNotFound { num: ChunkNum }, /// The hash of a parent did not match the expected hash - #[snafu(display("parent hash mismatch: {node:?}"))] + #[error("parent hash mismatch: {node:?}")] ParentHashMismatch { node: TreeNode }, /// The hash of a leaf did not match the expected hash - #[snafu(display("leaf hash mismatch: {num}"))] + #[error("leaf hash mismatch: {num}")] LeafHashMismatch { num: ChunkNum }, /// Error when reading from the stream - #[snafu(display("read: {source}"))] - Read { source: io::Error }, + #[error("read: {source}")] + Read { + #[error(std_err)] + source: io::Error, + }, /// A generic io error - #[snafu(display("io: {source}"))] - Write { source: io::Error }, + #[error("io: {source}")] + Write { + #[error(std_err)] + source: io::Error, + }, } impl DecodeError { pub(crate) fn leaf_hash_mismatch(num: ChunkNum) -> Self { - decode_error::LeafHashMismatchSnafu { num }.build() + e!(DecodeError::LeafHashMismatch { num }) } } impl From for DecodeError { fn from(cause: AtBlobHeaderNextError) -> Self { match cause { - AtBlobHeaderNextError::NotFound { .. } => decode_error::ChunkNotFoundSnafu.build(), - AtBlobHeaderNextError::Read { source, .. } => { - decode_error::ReadSnafu.into_error(source) - } + AtBlobHeaderNextError::NotFound { .. } => e!(DecodeError::ChunkNotFound), + AtBlobHeaderNextError::Read { source, .. } => e!(DecodeError::Read, source), } } } @@ -713,18 +699,18 @@ pub mod fsm { fn from(value: bao_tree::io::DecodeError) -> Self { match value { bao_tree::io::DecodeError::ParentNotFound(node) => { - decode_error::ParentNotFoundSnafu { node }.build() + e!(DecodeError::ParentNotFound { node }) } bao_tree::io::DecodeError::LeafNotFound(num) => { - decode_error::LeafNotFoundSnafu { num }.build() + e!(DecodeError::LeafNotFound { num }) } bao_tree::io::DecodeError::ParentHashMismatch(node) => { - decode_error::ParentHashMismatchSnafu { node }.build() + e!(DecodeError::ParentHashMismatch { node }) } bao_tree::io::DecodeError::LeafHashMismatch(num) => { - decode_error::LeafHashMismatchSnafu { num }.build() + e!(DecodeError::LeafHashMismatch { num }) } - bao_tree::io::DecodeError::Io(cause) => decode_error::ReadSnafu.into_error(cause), + bao_tree::io::DecodeError::Io(cause) => e!(DecodeError::Read, cause), } } } @@ -854,13 +840,13 @@ pub mod fsm { outboard .save(parent.node, &parent.pair) .await - .map_err(|e| decode_error::WriteSnafu.into_error(e))?; + .map_err(|e| e!(DecodeError::Write, e))?; } } BaoContentItem::Leaf(leaf) => { data.write_bytes_at(leaf.offset, leaf.data) .await - .map_err(|e| decode_error::WriteSnafu.into_error(e))?; + .map_err(|e| e!(DecodeError::Write, e))?; } } } @@ -886,7 +872,7 @@ pub mod fsm { BaoContentItem::Leaf(leaf) => { data.write_bytes_at(leaf.offset, leaf.data) .await - .map_err(|e| decode_error::WriteSnafu.into_error(e))?; + .map_err(|e| e!(DecodeError::Write, e))?; } } } @@ -971,18 +957,14 @@ pub mod fsm { } /// Error that you can get from [`AtBlobHeader::next`] - #[common_fields({ - backtrace: Option, - #[snafu(implicit)] - span_trace: SpanTrace, - })] - #[non_exhaustive] - #[derive(Debug, Snafu)] - #[snafu(module)] + #[stack_error(derive, add_meta, from_sources)] pub enum AtClosingNextError { /// Generic io error - #[snafu(transparent)] - Read { source: io::Error }, + #[error(transparent)] + Read { + #[error(std_err)] + source: io::Error, + }, } #[derive(Debug, Serialize, Deserialize, Default, Clone, Copy, PartialEq, Eq)] diff --git a/src/get/error.rs b/src/get/error.rs index c60b7e0c..33e79afe 100644 --- a/src/get/error.rs +++ b/src/get/error.rs @@ -3,8 +3,7 @@ use std::io; use iroh::endpoint::{ConnectionError, ReadError, VarInt, WriteError}; use n0_error::stack_error; -use nested_enum_utils::common_fields; -use n0_error::e; +use n0_error::AnyError; use crate::get::fsm::{ AtBlobHeaderNextError, AtClosingNextError, ConnectedNextError, DecodeError, InitialNextError, @@ -14,21 +13,39 @@ use crate::get::fsm::{ #[stack_error(derive, add_meta)] pub enum GetError { #[error(transparent)] - InitialNext { #[error(std_err)] source: InitialNextError }, + InitialNext { + #[error(from)] + source: InitialNextError, + }, #[error(transparent)] - ConnectedNext { #[error(std_err)] source: ConnectedNextError }, + ConnectedNext { + #[error(from)] + source: ConnectedNextError, + }, #[error(transparent)] - AtBlobHeaderNext { #[error(std_err)] source: AtBlobHeaderNextError }, + AtBlobHeaderNext { + #[error(from)] + source: AtBlobHeaderNextError, + }, #[error(transparent)] - Decode { #[error(std_err)] source: DecodeError }, + Decode { + #[error(from)] + source: DecodeError, + }, #[error(transparent)] - IrpcSend { #[error(std_err)] source: irpc::channel::SendError }, + IrpcSend { + #[error(from)] + source: irpc::channel::SendError, + }, #[error(transparent)] - AtClosingNext { #[error(std_err)] source: AtClosingNextError }, - #[error("local failure: {source}")] - LocalFailure { #[error(std_err)] source: anyhow::Error }, - #[error("bad request: {source}")] - BadRequest { #[error(std_err)] source: anyhow::Error }, + AtClosingNext { + #[error(from)] + source: AtClosingNextError, + }, + #[error("local failure")] + LocalFailure { source: AnyError }, + #[error("bad request")] + BadRequest { source: AnyError }, } impl GetError { diff --git a/src/get/request.rs b/src/get/request.rs index f607c4ca..7d3cfa5d 100644 --- a/src/get/request.rs +++ b/src/get/request.rs @@ -17,10 +17,10 @@ use bao_tree::{io::BaoContentItem, ChunkNum, ChunkRanges}; use bytes::Bytes; use genawaiter::sync::{Co, Gen}; use iroh::endpoint::Connection; +use n0_error::e; use n0_future::{Stream, StreamExt}; use nested_enum_utils::enum_conversions; use rand::Rng; -use n0_error::e; use tokio::sync::mpsc; use super::{fsm, GetError, GetResult, Stats}; @@ -57,7 +57,10 @@ impl GetBlobResult { let mut parts = Vec::new(); let stats = loop { let Some(item) = self.next().await else { - return Err(e!(GetError::LocalFailure, anyhow::anyhow!("unexpected end"))); + return Err(e!( + GetError::LocalFailure, + n0_error::anyerr!("unexpected end") + )); }; match item { GetBlobItem::Item(item) => { @@ -237,11 +240,14 @@ pub async fn get_hash_seq_and_sizes( let (at_blob_content, size) = at_start_root.next().await?; // check the size to avoid parsing a maliciously large hash seq if size > max_size { - return Err(e!(GetError::BadRequest, anyhow::anyhow!("size too large"))); + return Err(e!( + GetError::BadRequest, + n0_error::anyerr!("size too large") + )); } let (mut curr, hash_seq) = at_blob_content.concatenate_into_vec().await?; - let hash_seq = HashSeq::try_from(Bytes::from(hash_seq)) - .map_err(|e| e!(GetError::BadRequest, e.into()))?; + let hash_seq = + HashSeq::try_from(Bytes::from(hash_seq)).map_err(|e| e!(GetError::BadRequest, e.into()))?; let mut sizes = Vec::with_capacity(hash_seq.len()); let closing = loop { match curr.next() { diff --git a/src/hash.rs b/src/hash.rs index 3b9f5b47..88fc2d2d 100644 --- a/src/hash.rs +++ b/src/hash.rs @@ -4,10 +4,9 @@ use std::{borrow::Borrow, fmt, str::FromStr}; use arrayvec::ArrayString; use bao_tree::blake3; -use nested_enum_utils::common_fields; +use n0_error::{e, stack_error, StdResultExt}; use postcard::experimental::max_size::MaxSize; use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; -use n0_error::{e, stack_error, StdResultExt}; use crate::store::util::DD; @@ -143,7 +142,10 @@ pub enum HexOrBase32ParseError { #[error("Invalid length")] DecodeInvalidLength {}, #[error("Failed to decode {source}")] - Decode { #[error(std_err)] source: data_encoding::DecodeError }, + Decode { + #[error(std_err)] + source: data_encoding::DecodeError, + }, } impl FromStr for Hash { @@ -393,21 +395,23 @@ impl fmt::Display for HashAndFormat { } impl FromStr for HashAndFormat { - type Err = anyhow::Error; + type Err = n0_error::AnyError; fn from_str(s: &str) -> Result { let s = s.as_bytes(); let mut hash = [0u8; 32]; match s.len() { 64 => { - hex::decode_to_slice(s, &mut hash)?; + hex::decode_to_slice(s, &mut hash).anyerr()?; Ok(Self::raw(hash.into())) } 65 if s[0].eq_ignore_ascii_case(&b's') => { - hex::decode_to_slice(&s[1..], &mut hash)?; + hex::decode_to_slice(&s[1..], &mut hash).anyerr()?; Ok(Self::hash_seq(hash.into())) } - _ => anyhow::bail!("invalid hash and format"), + _ => { + n0_error::bail_any!("invalid hash and format"); + } } } } diff --git a/src/hashseq.rs b/src/hashseq.rs index 98d96e45..84aa950c 100644 --- a/src/hashseq.rs +++ b/src/hashseq.rs @@ -2,6 +2,7 @@ use std::fmt::Debug; use bytes::Bytes; +use n0_error::{anyerr, AnyError}; use crate::Hash; @@ -34,10 +35,10 @@ impl FromIterator for HashSeq { } impl TryFrom for HashSeq { - type Error = anyhow::Error; + type Error = AnyError; fn try_from(bytes: Bytes) -> Result { - Self::new(bytes).ok_or_else(|| anyhow::anyhow!("invalid hash sequence")) + Self::new(bytes).ok_or_else(|| anyerr!("invalid hash sequence")) } } diff --git a/src/net_protocol.rs b/src/net_protocol.rs index 4eb11265..99464dd3 100644 --- a/src/net_protocol.rs +++ b/src/net_protocol.rs @@ -5,7 +5,7 @@ //! # Example //! //! ```rust -//! # async fn example() -> anyhow::Result<()> { +//! # async fn example() -> n0_error::Result<()> { //! use iroh::{protocol::Router, Endpoint}; //! use iroh_blobs::{store, ticket::BlobTicket, BlobsProtocol}; //! diff --git a/src/provider.rs b/src/provider.rs index 1409cd90..ee61756f 100644 --- a/src/provider.rs +++ b/src/provider.rs @@ -10,14 +10,14 @@ use std::{ time::{Duration, Instant}, }; -use anyhow::Result; use bao_tree::ChunkRanges; use iroh::endpoint::{self, VarInt}; use iroh_io::{AsyncStreamReader, AsyncStreamWriter}; +use n0_error::Result; +use n0_error::{e, stack_error}; use n0_future::StreamExt; use quinn::ConnectionError; use serde::{Deserialize, Serialize}; -use n0_error::stack_error; use tokio::select; use tracing::{debug, debug_span, Instrument}; @@ -376,7 +376,7 @@ async fn handle_read_result( pub async fn handle_stream( mut pair: StreamPair, store: Store, -) -> anyhow::Result<()> { +) -> n0_error::Result<()> { let request = pair.read_request().await?; match request { Request::Get(request) => handle_get(pair, store, request).await?, @@ -388,10 +388,13 @@ pub async fn handle_stream( Ok(()) } -#[stack_error(derive, add_meta, std_sources)] +#[stack_error(derive, add_meta, from_sources)] pub enum HandleGetError { #[error(transparent)] - ExportBao { #[error(std_err)] source: ExportBaoError }, + ExportBao { + #[error(std_err)] + source: ExportBaoError, + }, #[error("Invalid hash sequence")] InvalidHashSeq {}, #[error("Invalid offset")] @@ -437,12 +440,12 @@ async fn handle_get_impl( None => { let bytes = store.get_bytes(hash).await?; let hs = - HashSeq::try_from(bytes).map_err(|_| HandleGetError::InvalidHashSeq)?; + HashSeq::try_from(bytes).map_err(|_| e!(HandleGetError::InvalidHashSeq))?; hash_seq = Some(hs); hash_seq.as_ref().unwrap() } }; - let o = usize::try_from(offset - 1).map_err(|_| HandleGetError::InvalidOffset)?; + let o = usize::try_from(offset - 1).map_err(|_| e!(HandleGetError::InvalidOffset))?; let Some(hash) = hash_seq.get(o) else { break; }; @@ -453,7 +456,7 @@ async fn handle_get_impl( .inner .sync() .await - .map_err(|e| HandleGetError::ExportBao { source: e.into() })?; + .map_err(|e| e!(HandleGetError::ExportBao, e.into()))?; Ok(()) } @@ -462,7 +465,7 @@ pub async fn handle_get( mut pair: StreamPair, store: Store, request: GetRequest, -) -> anyhow::Result<()> { +) -> n0_error::Result<()> { let res = pair.get_request(|| request.clone()).await; let tracker = handle_read_request_result(&mut pair, res).await?; let mut writer = pair.into_writer(tracker).await?; @@ -471,10 +474,10 @@ pub async fn handle_get( Ok(()) } -#[stack_error(derive, add_meta, std_sources)] +#[stack_error(derive, add_meta, from_sources)] pub enum HandleGetManyError { #[error(transparent)] - ExportBao { #[error(std_err)] source: ExportBaoError }, + ExportBao { source: ExportBaoError }, } impl HasErrorCode for HandleGetManyError { @@ -511,7 +514,7 @@ pub async fn handle_get_many( mut pair: StreamPair, store: Store, request: GetManyRequest, -) -> anyhow::Result<()> { +) -> n0_error::Result<()> { let res = pair.get_many_request(|| request.clone()).await; let tracker = handle_read_request_result(&mut pair, res).await?; let mut writer = pair.into_writer(tracker).await?; @@ -520,16 +523,16 @@ pub async fn handle_get_many( Ok(()) } -#[stack_error(derive, add_meta, std_sources)] +#[stack_error(derive, add_meta, from_sources)] pub enum HandlePushError { #[error(transparent)] - ExportBao { #[error(std_err)] source: ExportBaoError }, + ExportBao { source: ExportBaoError }, #[error("Invalid hash sequence")] InvalidHashSeq {}, #[error(transparent)] - Request { #[error(std_err)] source: RequestError }, + Request { source: RequestError }, } impl HasErrorCode for HandlePushError { @@ -568,7 +571,7 @@ async fn handle_push_impl( } // todo: we assume here that the hash sequence is complete. For some requests this might not be the case. We would need `LazyHashSeq` for that, but it is buggy as of now! let hash_seq = store.get_bytes(hash).await?; - let hash_seq = HashSeq::try_from(hash_seq).map_err(|_| HandlePushError::InvalidHashSeq)?; + let hash_seq = HashSeq::try_from(hash_seq).map_err(|_| e!(HandlePushError::InvalidHashSeq))?; for (child_hash, child_ranges) in hash_seq.into_iter().zip(request_ranges) { if child_ranges.is_empty() { continue; @@ -584,7 +587,7 @@ pub async fn handle_push( mut pair: StreamPair, store: Store, request: PushRequest, -) -> anyhow::Result<()> { +) -> n0_error::Result<()> { let res = pair.push_request(|| request.clone()).await; let tracker = handle_read_request_result(&mut pair, res).await?; let mut reader = pair.into_reader(tracker).await?; @@ -607,13 +610,13 @@ pub(crate) async fn send_blob( .await } -#[stack_error(derive, add_meta, std_sources)] +#[stack_error(derive, add_meta, std_sources, from_sources)] pub enum HandleObserveError { #[error("observe stream closed")] ObserveStreamClosed {}, #[error(transparent)] - RemoteClosed { #[error(std_err)] source: io::Error }, + RemoteClosed { source: io::Error }, } impl HasErrorCode for HandleObserveError { @@ -634,18 +637,18 @@ async fn handle_observe_impl( .observe(request.hash) .stream() .await - .map_err(|_| HandleObserveError::ObserveStreamClosed)?; + .map_err(|_| e!(HandleObserveError::ObserveStreamClosed))?; let mut old = stream .next() .await - .ok_or(HandleObserveError::ObserveStreamClosed)?; + .ok_or_else(|| e!(HandleObserveError::ObserveStreamClosed))?; // send the initial bitfield send_observe_item(writer, &old).await?; // send updates until the remote loses interest loop { select! { new = stream.next() => { - let new = new.ok_or(HandleObserveError::ObserveStreamClosed)?; + let new = new.ok_or_else(|| e!(HandleObserveError::ObserveStreamClosed))?; let diff = old.diff(&new); if diff.is_empty() { continue; @@ -676,7 +679,7 @@ pub async fn handle_observe( mut pair: StreamPair, store: Store, request: ObserveRequest, -) -> anyhow::Result<()> { +) -> n0_error::Result<()> { let res = pair.observe_request(|| request.clone()).await; let tracker = handle_read_request_result(&mut pair, res).await?; let mut writer = pair.into_writer(tracker).await?; diff --git a/src/provider/events.rs b/src/provider/events.rs index 448e9f7f..af20fdc4 100644 --- a/src/provider/events.rs +++ b/src/provider/events.rs @@ -4,8 +4,8 @@ use irpc::{ channel::{mpsc, none::NoSender, oneshot}, rpc_requests, Channels, WithChannels, }; +use n0_error::{e, stack_error}; use serde::{Deserialize, Serialize}; -use n0_error::stack_error; use crate::{ protocol::{ @@ -85,7 +85,7 @@ pub enum AbortReason { } /// Errors that can occur when sending progress updates. -#[stack_error(derive, add_meta)] +#[stack_error(derive, add_meta, from_sources)] pub enum ProgressError { #[error("limit")] Limit {}, @@ -501,7 +501,7 @@ impl EventSender { RequestUpdates::Active(tx) } RequestMode::Disabled => { - return Err(ProgressError::Permission); + return Err(e!(ProgressError::Permission)); } _ => RequestUpdates::None, }, diff --git a/src/store/fs.rs b/src/store/fs.rs index f6293ae8..0cb040da 100644 --- a/src/store/fs.rs +++ b/src/store/fs.rs @@ -94,6 +94,7 @@ use entry_state::{DataLocation, OutboardLocation}; use import::{ImportEntry, ImportSource}; use irpc::{channel::mpsc, RpcMessage}; use meta::list_blobs; +use n0_error::{Result, StdResultExt}; use n0_future::{future::yield_now, io}; use nested_enum_utils::enum_conversions; use range_collections::range_set::RangeSetRange; @@ -638,7 +639,7 @@ impl Actor { fs_commands_rx: tokio::sync::mpsc::Receiver, fs_commands_tx: tokio::sync::mpsc::Sender, options: Arc, - ) -> anyhow::Result { + ) -> Result { trace!( "creating data directory: {}", options.path.data_path.display() @@ -1388,7 +1389,7 @@ async fn copy_with_progress( impl FsStore { /// Load or create a new store. - pub async fn load(root: impl AsRef) -> anyhow::Result { + pub async fn load(root: impl AsRef) -> Result { let path = root.as_ref(); let db_path = path.join("blobs.db"); let options = Options::new(path); @@ -1396,7 +1397,7 @@ impl FsStore { } /// Load or create a new store with custom options, returning an additional sender for file store specific commands. - pub async fn load_with_opts(db_path: PathBuf, options: Options) -> anyhow::Result { + pub async fn load_with_opts(db_path: PathBuf, options: Options) -> Result { static THREAD_NR: AtomicU64 = AtomicU64::new(0); let rt = tokio::runtime::Builder::new_multi_thread() .thread_name_fn(|| { @@ -1420,7 +1421,8 @@ impl FsStore { fs_commands_tx.clone(), Arc::new(options), )) - .await??; + .await + .anyerr()??; handle.spawn(actor.run()); let store = FsStore::new(commands_tx.into(), fs_commands_tx); if let Some(config) = gc_config { @@ -1476,7 +1478,7 @@ impl FsStore { } } - pub async fn dump(&self) -> anyhow::Result<()> { + pub async fn dump(&self) -> Result<()> { let (tx, rx) = oneshot::channel(); self.db .send( @@ -1486,8 +1488,9 @@ impl FsStore { } .into(), ) - .await?; - rx.await??; + .await + .anyerr()?; + rx.await.anyerr()??; Ok(()) } } @@ -1541,7 +1544,7 @@ pub mod tests { fn create_n0_bao_full( data: &[u8], ranges: &ChunkRanges, - ) -> anyhow::Result<(Hash, ChunkRanges, Vec)> { + ) -> n0_error::Result<(Hash, ChunkRanges, Vec)> { let ranges = round_up_request(data.len() as u64, ranges); let (hash, encoded) = create_n0_bao(data, &ranges)?; Ok((hash, ranges, encoded)) diff --git a/src/store/fs/bao_file.rs b/src/store/fs/bao_file.rs index 0502cead..72ab4801 100644 --- a/src/store/fs/bao_file.rs +++ b/src/store/fs/bao_file.rs @@ -19,6 +19,7 @@ use bao_tree::{ use bytes::{Bytes, BytesMut}; use derive_more::Debug; use irpc::channel::mpsc; +use n0_error::{Result, StdResultExt}; use tokio::sync::watch; use tracing::{debug, info, trace}; @@ -706,7 +707,7 @@ impl BaoFileStorageSubscriber { /// Forward observed *values* to the given sender /// /// Returns an error if sending fails, or if the last sender is dropped - pub async fn forward(mut self, mut tx: mpsc::Sender) -> anyhow::Result<()> { + pub async fn forward(mut self, mut tx: mpsc::Sender) -> Result<()> { let value = self.receiver.borrow().bitfield(); tx.send(value).await?; loop { @@ -720,7 +721,7 @@ impl BaoFileStorageSubscriber { /// /// Returns an error if sending fails, or if the last sender is dropped #[allow(dead_code)] - pub async fn forward_delta(mut self, mut tx: mpsc::Sender) -> anyhow::Result<()> { + pub async fn forward_delta(mut self, mut tx: mpsc::Sender) -> Result<()> { let value = self.receiver.borrow().bitfield(); let mut old = value.clone(); tx.send(value).await?; @@ -736,13 +737,13 @@ impl BaoFileStorageSubscriber { } } - async fn update_or_closed(&mut self, tx: &mut mpsc::Sender) -> anyhow::Result<()> { + async fn update_or_closed(&mut self, tx: &mut mpsc::Sender) -> Result<()> { tokio::select! { _ = tx.closed() => { // the sender is closed, we are done Err(n0_error::e!(irpc::channel::SendError::ReceiverClosed).into()) } - e = self.receiver.changed() => Ok(e?), + e = self.receiver.changed() => Ok(e.anyerr()?), } } } diff --git a/src/store/fs/meta.rs b/src/store/fs/meta.rs index aac43cb4..86cc8a39 100644 --- a/src/store/fs/meta.rs +++ b/src/store/fs/meta.rs @@ -11,10 +11,9 @@ use std::{ use bao_tree::BaoTree; use bytes::Bytes; use irpc::channel::mpsc; -use n0_snafu::SpanTrace; -use nested_enum_utils::common_fields; +use n0_error::{anyerr, AnyError}; +use n0_error::{e, stack_error}; use redb::{Database, DatabaseError, ReadableTable}; -use snafu::{Backtrace, ResultExt, Snafu}; use tokio::pin; use crate::{ @@ -51,27 +50,39 @@ use crate::store::IROH_BLOCK_SIZE; /// /// What can go wrong are various things with redb, as well as io errors related /// to files other than redb. -#[common_fields({ - backtrace: Option, - #[snafu(implicit)] - span_trace: SpanTrace, -})] #[allow(missing_docs)] #[non_exhaustive] -#[derive(Debug, Snafu)] +#[stack_error(derive, add_meta, from_sources)] pub enum ActorError { - #[snafu(display("table error: {source}"))] - Table { source: redb::TableError }, - #[snafu(display("database error: {source}"))] - Database { source: redb::DatabaseError }, - #[snafu(display("transaction error: {source}"))] - Transaction { source: redb::TransactionError }, - #[snafu(display("commit error: {source}"))] - Commit { source: redb::CommitError }, - #[snafu(display("storage error: {source}"))] - Storage { source: redb::StorageError }, - #[snafu(display("inconsistent database state: {msg}"))] + #[error("table error: {source}")] + Table { + #[error(std_err)] + source: redb::TableError, + }, + #[error("database error: {source}")] + Database { + #[error(std_err)] + source: redb::DatabaseError, + }, + #[error("transaction error: {source}")] + Transaction { + #[error(std_err)] + source: redb::TransactionError, + }, + #[error("commit error: {source}")] + Commit { + #[error(std_err)] + source: redb::CommitError, + }, + #[error("storage error: {source}")] + Storage { + #[error(std_err)] + source: redb::StorageError, + }, + #[error("inconsistent database state: {msg}")] Inconsistent { msg: String }, + #[error(transparent)] + Other { source: AnyError }, } impl From for io::Error { @@ -82,11 +93,11 @@ impl From for io::Error { impl ActorError { pub(super) fn inconsistent(msg: String) -> Self { - InconsistentSnafu { msg }.build() + e!(ActorError::Inconsistent { msg }) } } -pub type ActorResult = Result; +pub type ActorResult = std::result::Result; #[derive(Debug, Clone)] pub struct Db { @@ -195,7 +206,7 @@ impl Db { fn handle_get(cmd: Get, tables: &impl ReadableTables) -> ActorResult<()> { trace!("{cmd:?}"); let Get { hash, tx, .. } = cmd; - let Some(entry) = tables.blobs().get(hash).context(StorageSnafu)? else { + let Some(entry) = tables.blobs().get(hash)? else { tx.send(GetResult { state: Ok(None) }); return Ok(()); }; @@ -223,26 +234,42 @@ fn handle_get(cmd: Get, tables: &impl ReadableTables) -> ActorResult<()> { fn handle_dump(cmd: Dump, tables: &impl ReadableTables) -> ActorResult<()> { trace!("{cmd:?}"); trace!("dumping database"); - for e in tables.blobs().iter().context(StorageSnafu)? { - let (k, v) = e.context(StorageSnafu)?; + for e in tables + .blobs() + .iter() + .map_err(|e| e!(ActorError::Storage, e))? + { + let (k, v) = e.map_err(|e| e!(ActorError::Storage, e))?; let k = k.value(); let v = v.value(); println!("blobs: {} -> {:?}", k.to_hex(), v); } - for e in tables.tags().iter().context(StorageSnafu)? { - let (k, v) = e.context(StorageSnafu)?; + for e in tables + .tags() + .iter() + .map_err(|e| e!(ActorError::Storage, e))? + { + let (k, v) = e.map_err(|e| e!(ActorError::Storage, e))?; let k = k.value(); let v = v.value(); println!("tags: {k} -> {v:?}"); } - for e in tables.inline_data().iter().context(StorageSnafu)? { - let (k, v) = e.context(StorageSnafu)?; + for e in tables + .inline_data() + .iter() + .map_err(|e| e!(ActorError::Storage, e))? + { + let (k, v) = e.map_err(|e| e!(ActorError::Storage, e))?; let k = k.value(); let v = v.value(); println!("inline_data: {} -> {:?}", k.to_hex(), v.len()); } - for e in tables.inline_outboard().iter().context(StorageSnafu)? { - let (k, v) = e.context(StorageSnafu)?; + for e in tables + .inline_outboard() + .iter() + .map_err(|e| e!(ActorError::Storage, e))? + { + let (k, v) = e.map_err(|e| e!(ActorError::Storage, e))?; let k = k.value(); let v = v.value(); println!("inline_outboard: {} -> {:?}", k.to_hex(), v.len()); @@ -271,11 +298,19 @@ async fn handle_get_blob_status( tx, .. } = msg; - let res = match tables.blobs().get(hash).context(StorageSnafu)? { + let res = match tables + .blobs() + .get(hash) + .map_err(|e| e!(ActorError::Storage, e))? + { Some(entry) => match entry.value() { EntryState::Complete { data_location, .. } => match data_location { DataLocation::Inline(_) => { - let Some(data) = tables.inline_data().get(hash).context(StorageSnafu)? else { + let Some(data) = tables + .inline_data() + .get(hash) + .map_err(|e| e!(ActorError::Storage, e))? + else { return Err(ActorError::inconsistent(format!( "inconsistent database state: {} not found", hash.to_hex() @@ -312,7 +347,11 @@ async fn handle_list_tags(msg: ListTagsMsg, tables: &impl ReadableTables) -> Act let from = from.map(Bound::Included).unwrap_or(Bound::Unbounded); let to = to.map(Bound::Excluded).unwrap_or(Bound::Unbounded); let mut res = Vec::new(); - for item in tables.tags().range((from, to)).context(StorageSnafu)? { + for item in tables + .tags() + .range((from, to)) + .map_err(|e| e!(ActorError::Storage, e))? + { match item { Ok((k, v)) => { let v = v.value(); @@ -325,9 +364,7 @@ async fn handle_list_tags(msg: ListTagsMsg, tables: &impl ReadableTables) -> Act res.push(crate::api::Result::Ok(info)); } } - Err(e) => { - res.push(Err(crate::api::Error::other(e))); - } + Err(e) => res.push(Err(crate::api::Error::other(e))), } } tx.send(res).await.ok(); @@ -345,11 +382,7 @@ fn handle_update( } = cmd; protected.insert(hash); trace!("updating hash {} to {}", hash.to_hex(), state.fmt_short()); - let old_entry_opt = tables - .blobs - .get(hash) - .context(StorageSnafu)? - .map(|e| e.value()); + let old_entry_opt = tables.blobs.get(hash)?.map(|e| e.value()); let (state, data, outboard): (_, Option, Option) = match state { EntryState::Complete { data_location, @@ -381,18 +414,21 @@ fn handle_update( } None => state, }; - tables.blobs.insert(hash, state).context(StorageSnafu)?; + tables + .blobs + .insert(hash, state) + .map_err(|e| e!(ActorError::Storage, e))?; if let Some(data) = data { tables .inline_data .insert(hash, data.as_ref()) - .context(StorageSnafu)?; + .map_err(|e| e!(ActorError::Storage, e))?; } if let Some(outboard) = outboard { tables .inline_outboard .insert(hash, outboard.as_ref()) - .context(StorageSnafu)?; + .map_err(|e| e!(ActorError::Storage, e))?; } if let Some(tx) = tx { tx.send(Ok(())); @@ -424,18 +460,21 @@ fn handle_set(cmd: Set, protected: &mut HashSet, tables: &mut Tables) -> A } EntryState::Partial { size } => (EntryState::Partial { size }, None, None), }; - tables.blobs.insert(hash, state).context(StorageSnafu)?; + tables + .blobs + .insert(hash, state) + .map_err(|e| e!(ActorError::Storage, e))?; if let Some(data) = data { tables .inline_data .insert(hash, data.as_ref()) - .context(StorageSnafu)?; + .map_err(|e| e!(ActorError::Storage, e))?; } if let Some(outboard) = outboard { tables .inline_outboard .insert(hash, outboard.as_ref()) - .context(StorageSnafu)?; + .map_err(|e| e!(ActorError::Storage, e))?; } tx.send(Ok(())); Ok(()) @@ -473,12 +512,12 @@ impl Actor { cmds: tokio::sync::mpsc::Receiver, mut ds: DeleteHandle, options: BatchOptions, - ) -> anyhow::Result { + ) -> Result { debug!("creating or opening meta database at {}", db_path.display()); let mut db = match redb::Database::create(db_path) { Ok(db) => db, Err(DatabaseError::UpgradeRequired(1)) => { - return Err(anyhow::anyhow!("migration from v1 no longer supported")); + return Err(anyerr!("migration from v1 no longer supported").into()); } Err(err) => return Err(err.into()), }; @@ -537,7 +576,7 @@ impl Actor { trace!("delete {hash}: skip (protected)"); continue; } - if let Some(entry) = tables.blobs.remove(hash).context(StorageSnafu)? { + if let Some(entry) = tables.blobs.remove(hash)? { match entry.value() { EntryState::Complete { data_location, @@ -546,7 +585,7 @@ impl Actor { trace!("delete {hash}: currently complete. will be deleted."); match data_location { DataLocation::Inline(_) => { - tables.inline_data.remove(hash).context(StorageSnafu)?; + tables.inline_data.remove(hash)?; } DataLocation::Owned(_) => { // mark the data for deletion @@ -556,7 +595,7 @@ impl Actor { } match outboard_location { OutboardLocation::Inline(_) => { - tables.inline_outboard.remove(hash).context(StorageSnafu)?; + tables.inline_outboard.remove(hash)?; } OutboardLocation::Owned => { // mark the outboard for deletion @@ -607,10 +646,7 @@ impl Actor { let tag = Tag::auto(SystemTime::now(), |x| { matches!(tables.tags.get(Tag(Bytes::copy_from_slice(x))), Ok(Some(_))) }); - tables - .tags - .insert(tag.clone(), value) - .context(StorageSnafu)?; + tables.tags.insert(tag.clone(), value)?; tag }; tx.send(Ok(tag.clone())).await.ok(); @@ -626,14 +662,11 @@ impl Actor { } = cmd; let from = from.map(Bound::Included).unwrap_or(Bound::Unbounded); let to = to.map(Bound::Excluded).unwrap_or(Bound::Unbounded); - let removing = tables - .tags - .extract_from_if((from, to), |_, _| true) - .context(StorageSnafu)?; + let removing = tables.tags.extract_from_if((from, to), |_, _| true)?; // drain the iterator to actually remove the tags let mut deleted = 0; for res in removing { - res.context(StorageSnafu)?; + res?; deleted += 1; } tx.send(Ok(deleted)).await.ok(); @@ -647,7 +680,7 @@ impl Actor { tx, .. } = cmd; - let value = match tables.tags.remove(from).context(StorageSnafu)? { + let value = match tables.tags.remove(from)? { Some(value) => value.value(), None => { tx.send(Err(api::Error::io( @@ -659,7 +692,7 @@ impl Actor { return Ok(()); } }; - tables.tags.insert(to, value).context(StorageSnafu)?; + tables.tags.insert(to, value)?; tx.send(Ok(())).await.ok(); Ok(()) } @@ -737,8 +770,10 @@ impl Actor { } TopLevelCommand::Snapshot(cmd) => { trace!("{cmd:?}"); - let txn = db.begin_read().context(TransactionSnafu)?; - let snapshot = ReadOnlyTables::new(&txn).context(TableSnafu)?; + let txn = db + .begin_read() + .map_err(|e| e!(ActorError::Transaction, e))?; + let snapshot = ReadOnlyTables::new(&txn).map_err(|e| e!(ActorError::Table, e))?; cmd.tx.send(snapshot).ok(); None } @@ -764,8 +799,10 @@ impl Actor { Command::ReadOnly(cmd) => { let op = TxnNum::Read(op); self.cmds.push_back(cmd.into()).ok(); - let tx = db.begin_read().context(TransactionSnafu)?; - let tables = ReadOnlyTables::new(&tx).context(TableSnafu)?; + let tx = db + .begin_read() + .map_err(|e| e!(ActorError::Transaction, e))?; + let tables = ReadOnlyTables::new(&tx).map_err(|e| e!(ActorError::Table, e))?; let timeout = tokio::time::sleep(self.options.max_read_duration); pin!(timeout); let mut n = 0; @@ -782,8 +819,11 @@ impl Actor { let op = TxnNum::Write(op); self.cmds.push_back(cmd.into()).ok(); let ftx = self.ds.begin_write(); - let tx = db.begin_write().context(TransactionSnafu)?; - let mut tables = Tables::new(&tx, &ftx).context(TableSnafu)?; + let tx = db + .begin_write() + .map_err(|e| e!(ActorError::Transaction, e))?; + let mut tables = + Tables::new(&tx, &ftx).map_err(|e| e!(ActorError::Table, e))?; let timeout = tokio::time::sleep(self.options.max_read_duration); pin!(timeout); let mut n = 0; @@ -800,7 +840,7 @@ impl Actor { } } drop(tables); - tx.commit().context(CommitSnafu)?; + tx.commit().map_err(|e| e!(ActorError::Commit, e))?; ftx.commit(); } } @@ -853,7 +893,11 @@ fn load_data( ) -> ActorResult> { Ok(match location { DataLocation::Inline(()) => { - let Some(data) = tables.inline_data().get(hash).context(StorageSnafu)? else { + let Some(data) = tables + .inline_data() + .get(hash) + .map_err(|e| e!(ActorError::Storage, e))? + else { return Err(ActorError::inconsistent(format!( "inconsistent database state: {} should have inline data but does not", hash.to_hex() @@ -874,7 +918,11 @@ fn load_outboard( Ok(match location { OutboardLocation::NotNeeded => OutboardLocation::NotNeeded, OutboardLocation::Inline(_) => { - let Some(outboard) = tables.inline_outboard().get(hash).context(StorageSnafu)? else { + let Some(outboard) = tables + .inline_outboard() + .get(hash) + .map_err(|e| e!(ActorError::Storage, e))? + else { return Err(ActorError::inconsistent(format!( "inconsistent database state: {} should have inline outboard but does not", hash.to_hex() diff --git a/src/store/fs/meta/proto.rs b/src/store/fs/meta/proto.rs index 6f4aaa6c..3b1f1953 100644 --- a/src/store/fs/meta/proto.rs +++ b/src/store/fs/meta/proto.rs @@ -45,7 +45,7 @@ pub struct GetResult { /// of the inline data and inline outboard tables if necessary. #[derive(Debug)] pub struct Dump { - pub tx: oneshot::Sender>, + pub tx: oneshot::Sender>, pub span: Span, } diff --git a/src/store/mem.rs b/src/store/mem.rs index 76e23854..9ffb22a9 100644 --- a/src/store/mem.rs +++ b/src/store/mem.rs @@ -29,6 +29,7 @@ use bao_tree::{ }; use bytes::Bytes; use irpc::channel::mpsc; +use n0_error::{Result, StdResultExt}; use n0_future::future::yield_now; use range_collections::range_set::RangeSetRange; use tokio::{ @@ -106,7 +107,7 @@ impl Default for MemStore { #[derive(derive_more::From)] enum TaskResult { Unit(()), - Import(anyhow::Result), + Import(Result), Scope(Scope), } @@ -446,7 +447,7 @@ impl Actor { tx.send(tt).await.ok(); } - async fn finish_import(&mut self, res: anyhow::Result) { + async fn finish_import(&mut self, res: Result) { let import_data = match res { Ok(entry) => entry, Err(e) => { @@ -710,7 +711,7 @@ async fn import_bytes( scope: Scope, format: BlobFormat, tx: mpsc::Sender, -) -> anyhow::Result { +) -> Result { tx.send(AddProgressItem::Size(data.len() as u64)).await?; tx.send(AddProgressItem::CopyDone).await?; let outboard = PreOrderMemOutboard::create(&data, IROH_BLOCK_SIZE); @@ -728,7 +729,7 @@ async fn import_byte_stream( format: BlobFormat, mut rx: mpsc::Receiver, tx: mpsc::Sender, -) -> anyhow::Result { +) -> Result { let mut res = Vec::new(); loop { match rx.recv().await { @@ -756,7 +757,7 @@ async fn import_byte_stream( } #[instrument(skip_all, fields(path = %cmd.path.display()))] -async fn import_path(cmd: ImportPathMsg) -> anyhow::Result { +async fn import_path(cmd: ImportPathMsg) -> Result { let ImportPathMsg { inner: ImportPathRequest { @@ -1035,7 +1036,7 @@ impl BaoFileStorageSubscriber { /// Forward observed *values* to the given sender /// /// Returns an error if sending fails, or if the last sender is dropped - pub async fn forward(mut self, mut tx: mpsc::Sender) -> anyhow::Result<()> { + pub async fn forward(mut self, mut tx: mpsc::Sender) -> Result<()> { let value = self.receiver.borrow().bitfield(); tx.send(value).await?; loop { @@ -1049,7 +1050,7 @@ impl BaoFileStorageSubscriber { /// /// Returns an error if sending fails, or if the last sender is dropped #[allow(dead_code)] - pub async fn forward_delta(mut self, mut tx: mpsc::Sender) -> anyhow::Result<()> { + pub async fn forward_delta(mut self, mut tx: mpsc::Sender) -> Result<()> { let value = self.receiver.borrow().bitfield(); let mut old = value.clone(); tx.send(value).await?; @@ -1065,13 +1066,13 @@ impl BaoFileStorageSubscriber { } } - async fn update_or_closed(&mut self, tx: &mut mpsc::Sender) -> anyhow::Result<()> { + async fn update_or_closed(&mut self, tx: &mut mpsc::Sender) -> Result<()> { tokio::select! { _ = tx.closed() => { // the sender is closed, we are done Err(n0_error::e!(irpc::channel::SendError::ReceiverClosed).into()) } - e = self.receiver.changed() => Ok(e?), + e = self.receiver.changed() => Ok(e.anyerr()?), } } } diff --git a/src/store/util.rs b/src/store/util.rs index 03be152b..e6d66d8b 100644 --- a/src/store/util.rs +++ b/src/store/util.rs @@ -409,17 +409,19 @@ impl bao_tree::io::mixed::Sender for BaoTreeSender { #[cfg(feature = "fs-store")] pub mod tests { use bao_tree::{io::outboard::PreOrderMemOutboard, ChunkRanges}; + use n0_error::{Result, StdResultExt}; use crate::{hash::Hash, store::IROH_BLOCK_SIZE}; /// Create n0 flavoured bao. Note that this can be used to request ranges below a chunk group size, /// which can not be exported via bao because we don't store hashes below the chunk group level. - pub fn create_n0_bao(data: &[u8], ranges: &ChunkRanges) -> anyhow::Result<(Hash, Vec)> { + pub fn create_n0_bao(data: &[u8], ranges: &ChunkRanges) -> Result<(Hash, Vec)> { let outboard = PreOrderMemOutboard::create(data, IROH_BLOCK_SIZE); let mut encoded = Vec::new(); let size = data.len() as u64; encoded.extend_from_slice(&size.to_le_bytes()); - bao_tree::io::sync::encode_ranges_validated(data, &outboard, ranges, &mut encoded)?; + bao_tree::io::sync::encode_ranges_validated(data, &outboard, ranges, &mut encoded) + .anyerr()?; Ok((outboard.root.into(), encoded)) } } diff --git a/src/test.rs b/src/test.rs index 3ecb1c87..f6c05f62 100644 --- a/src/test.rs +++ b/src/test.rs @@ -14,7 +14,7 @@ pub async fn create_random_blobs( num_blobs: usize, blob_size: impl Fn(usize, &mut R) -> usize, mut rand: R, -) -> anyhow::Result> { +) -> n0_error::Result> { // generate sizes and seeds, non-parrallelized so it is deterministic let sizes = (0..num_blobs) .map(|n| (blob_size(n, &mut rand), rand.random::())) @@ -39,7 +39,7 @@ pub async fn add_hash_sequences( num_seqs: usize, seq_size: impl Fn(usize, &mut R) -> usize, mut rand: R, -) -> anyhow::Result> { +) -> n0_error::Result> { let infos = stream::iter(0..num_seqs) .then(|n| { let size = seq_size(n, &mut rand); diff --git a/src/ticket.rs b/src/ticket.rs index 55ef00ae..95c94993 100644 --- a/src/ticket.rs +++ b/src/ticket.rs @@ -1,7 +1,7 @@ //! Tickets for blobs. use std::{collections::BTreeSet, net::SocketAddr, str::FromStr}; -use anyhow::Result; +use n0_error::Result; use iroh::{EndpointAddr, EndpointId, RelayUrl}; use iroh_tickets::{ParseError, Ticket}; use serde::{Deserialize, Serialize}; diff --git a/src/util/connection_pool.rs b/src/util/connection_pool.rs index 54f32533..020ca245 100644 --- a/src/util/connection_pool.rs +++ b/src/util/connection_pool.rs @@ -23,11 +23,11 @@ use iroh::{ endpoint::{ConnectError, Connection}, Endpoint, EndpointId, }; +use n0_error::{e, stack_error}; use n0_future::{ future::{self}, FuturesUnordered, MaybeFuture, Stream, StreamExt, }; -use n0_error::stack_error; use tokio::sync::{ mpsc::{self, error::SendError as TokioSendError}, oneshot, Notify, @@ -110,30 +110,38 @@ impl ConnectionRef { /// This includes the normal iroh connection errors as well as pool specific /// errors such as timeouts and connection limits. #[stack_error(derive, add_meta)] +#[derive(Clone)] pub enum PoolConnectError { /// Connection pool is shut down #[error("Connection pool is shut down")] Shutdown {}, /// Timeout during connect - #[error("Timeout during connect")] + #[error("Timeout during connect")] Timeout {}, /// Too many connections #[error("Too many connections")] TooManyConnections {}, /// Error during connect #[error(transparent)] - ConnectError { source: ConnectError }, + ConnectError { source: Arc }, /// Error during on_connect callback #[error(transparent)] - OnConnectError { #[error(std_err)] source: io::Error }, + OnConnectError { + #[error(std_err)] + source: Arc, + }, } impl From for PoolConnectError { - fn from(e: ConnectError) -> Self { n0_error::e!(PoolConnectError::ConnectError, e) } + fn from(e: ConnectError) -> Self { + e!(PoolConnectError::ConnectError, Arc::new(e)) + } } impl From for PoolConnectError { - fn from(e: io::Error) -> Self { n0_error::e!(PoolConnectError::OnConnectError, e) } + fn from(e: io::Error) -> Self { + e!(PoolConnectError::OnConnectError, Arc::new(e)) + } } /// Error when calling a fn on the [`ConnectionPool`]. @@ -193,7 +201,7 @@ impl Context { let state = conn_fut .timeout(context.options.connect_timeout) .await - .map_err(|_| PoolConnectError::Timeout) + .map_err(|_| e!(PoolConnectError::Timeout)) .and_then(|r| r); let conn_close = match &state { Ok(conn) => { @@ -359,7 +367,7 @@ impl Actor { self.connections.remove(&idle); } else { msg.tx - .send(Err(n0_error::e!(PoolConnectError::TooManyConnections))) + .send(Err(e!(PoolConnectError::TooManyConnections))) .ok(); return; } @@ -437,8 +445,8 @@ impl ConnectionPool { self.tx .send(ActorMessage::RequestRef(RequestRef { id, tx })) .await - .map_err(|_| n0_error::e!(PoolConnectError::Shutdown))?; - rx.await.map_err(|_| n0_error::e!(PoolConnectError::Shutdown))? + .map_err(|_| e!(PoolConnectError::Shutdown))?; + rx.await.map_err(|_| e!(PoolConnectError::Shutdown))? } /// Close an existing connection, if it exists @@ -449,7 +457,7 @@ impl ConnectionPool { self.tx .send(ActorMessage::ConnectionShutdown { id }) .await - .map_err(|_| n0_error::e!(ConnectionPoolError::Shutdown))?; + .map_err(|_| e!(ConnectionPoolError::Shutdown))?; Ok(()) } @@ -463,7 +471,7 @@ impl ConnectionPool { self.tx .send(ActorMessage::ConnectionIdle { id }) .await - .map_err(|_| n0_error::e!(ConnectionPoolError::Shutdown))?; + .map_err(|_| e!(ConnectionPoolError::Shutdown))?; Ok(()) } } @@ -685,7 +693,7 @@ mod tests { // trying to connect to an id for which we have info, but the other // end is not listening, will lead to a timeout. let res = client.echo(non_listening, b"Hello, world!".to_vec()).await; - assert!(matches!(res, Err(PoolConnectError::Timeout))); + assert!(matches!(res, Err(PoolConnectError::Timeout { .. }))); } Ok(()) } From 1bd52718f73962b7ea2e4a602666daaa5bc2a46f Mon Sep 17 00:00:00 2001 From: Frando Date: Tue, 4 Nov 2025 16:13:01 +0100 Subject: [PATCH 04/12] fixup --- Cargo.lock | 7 ++++--- Cargo.toml | 1 + 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 40bafa87..18ef1b9f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1704,6 +1704,7 @@ dependencies = [ "irpc", "n0-error", "n0-future", + "nested_enum_utils", "postcard", "proptest", "rand 0.9.2", @@ -2192,14 +2193,14 @@ dependencies = [ [[package]] name = "nested_enum_utils" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43fa9161ed44d30e9702fe42bd78693bceac0fed02f647da749f36109023d3a3" +checksum = "b1d5475271bdd36a4a2769eac1ef88df0f99428ea43e52dfd8b0ee5cb674695f" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.104", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 5a3b868c..a496bedc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,6 +43,7 @@ iroh-metrics = { version = "0.37" } redb = { version = "2.6.3", optional = true } reflink-copy = { version = "0.1.24", optional = true } n0-error = "0.1.0" +nested_enum_utils = "0.2.3" [dev-dependencies] clap = { version = "4.5.31", features = ["derive"] } From 384196942d42f6496f7eee5e7b8658ba67ef316f Mon Sep 17 00:00:00 2001 From: Frando Date: Tue, 4 Nov 2025 16:17:58 +0100 Subject: [PATCH 05/12] more fix fix fix --- examples/compression.rs | 6 +++--- examples/custom-protocol.rs | 14 +++++++------- examples/get-blob.rs | 5 +++-- examples/limit.rs | 10 +++++----- examples/mdns-discovery.rs | 6 +++--- examples/random_store.rs | 4 ++-- examples/transfer-collection.rs | 2 +- examples/transfer.rs | 9 +++++---- src/util/connection_pool.rs | 16 ++++++++-------- 9 files changed, 37 insertions(+), 35 deletions(-) diff --git a/examples/compression.rs b/examples/compression.rs index c5ec7d9b..82378e01 100644 --- a/examples/compression.rs +++ b/examples/compression.rs @@ -5,7 +5,6 @@ mod common; use std::{fmt::Debug, path::PathBuf}; -use n0_error::Result; use clap::Parser; use common::setup_logging; use iroh::protocol::ProtocolHandler; @@ -20,6 +19,7 @@ use iroh_blobs::{ store::mem::MemStore, ticket::BlobTicket, }; +use n0_error::{Result, StdResultExt}; use tracing::debug; use crate::common::get_or_generate_secret_key; @@ -200,7 +200,7 @@ async fn main() -> Result<()> { println!("Node is running. Press Ctrl-C to exit."); tokio::signal::ctrl_c().await?; println!("Shutting down."); - router.shutdown().await?; + router.shutdown().await.anyerr()?; } Args::Get { ticket, target } => { let store = MemStore::new(); @@ -208,7 +208,7 @@ async fn main() -> Result<()> { .connect(ticket.addr().clone(), lz4::Compression::ALPN) .await?; let connection_id = conn.stable_id() as u64; - let (send, recv) = conn.open_bi().await?; + let (send, recv) = conn.open_bi().await.anyerr()?; let send = compression.send_stream(send); let recv = compression.recv_stream(recv); let sp = StreamPair::new(connection_id, recv, send); diff --git a/examples/custom-protocol.rs b/examples/custom-protocol.rs index 16a19f1b..e5cc5bc0 100644 --- a/examples/custom-protocol.rs +++ b/examples/custom-protocol.rs @@ -40,7 +40,6 @@ use std::{ sync::{Arc, Mutex}, }; -use n0_error::Result; use clap::Parser; use iroh::{ discovery::pkarr::PkarrResolver, @@ -49,6 +48,7 @@ use iroh::{ Endpoint, EndpointId, }; use iroh_blobs::{api::Store, store::mem::MemStore, BlobsProtocol, Hash}; +use n0_error::{anyerr, Result, StdResultExt}; mod common; use common::{get_or_generate_secret_key, setup_logging}; @@ -110,7 +110,7 @@ async fn listen(text: Vec) -> Result<()> { // Wait for Ctrl-C to be pressed. tokio::signal::ctrl_c().await?; - node.shutdown().await?; + node.shutdown().await.anyerr()?; Ok(()) } @@ -275,14 +275,14 @@ pub async fn query_remote( let blobs_conn = endpoint.connect(endpoint_id, iroh_blobs::ALPN).await?; // Open a bi-directional in our connection. - let (mut send, mut recv) = conn.open_bi().await?; + let (mut send, mut recv) = conn.open_bi().await.anyerr()?; // Send our query. - send.write_all(query.as_bytes()).await?; + send.write_all(query.as_bytes()).await.anyerr()?; // Finish the send stream, signalling that no further data will be sent. // This makes the `read_to_end` call on the accepting side terminate. - send.finish()?; + send.finish().anyerr()?; // In this example, we simply collect all results into a vector. // For real protocols, you'd usually want to return a stream of results instead. @@ -298,7 +298,7 @@ pub async fn query_remote( // so in this case we break our loop. Err(iroh::endpoint::ReadExactError::FinishedEarly(_)) => break, // Other errors are connection errors, so we bail. - Err(err) => return Err(err.into()), + Err(err) => return Err(anyerr!(err)), Ok(_) => {} }; // Upcast the raw bytes to the `Hash` type. @@ -315,7 +315,7 @@ pub async fn query_remote( /// Read a blob from the local blob store and print it to STDOUT. async fn read_and_print(store: &Store, hash: Hash) -> Result<()> { let content = store.get_bytes(hash).await?; - let message = String::from_utf8(content.to_vec())?; + let message = String::from_utf8(content.to_vec()).anyerr()?; println!("{}: {message}", hash.fmt_short()); Ok(()) } diff --git a/examples/get-blob.rs b/examples/get-blob.rs index 2bd9a955..1582ba7d 100644 --- a/examples/get-blob.rs +++ b/examples/get-blob.rs @@ -5,6 +5,7 @@ use clap::Parser; use common::setup_logging; use iroh::discovery::pkarr::PkarrResolver; use iroh_blobs::{get::request::GetBlobItem, ticket::BlobTicket, BlobFormat}; +use n0_error::bail_any; use n0_future::StreamExt; use tokio::io::AsyncWriteExt; @@ -54,10 +55,10 @@ async fn main() -> n0_error::Result<()> { break stats; } Some(GetBlobItem::Error(err)) => { - n0_error::bail!("Error while streaming blob: {err}"); + bail_any!(err, "Error while streaming blob"); } None => { - n0_error::bail!("Stream ended unexpectedly."); + bail_any!("Stream ended unexpectedly."); } } } diff --git a/examples/limit.rs b/examples/limit.rs index dc74a1bd..773bfde7 100644 --- a/examples/limit.rs +++ b/examples/limit.rs @@ -18,7 +18,6 @@ use std::{ }, }; -use n0_error::Result; use clap::Parser; use common::setup_logging; use iroh::{protocol::Router, EndpointAddr, EndpointId, SecretKey}; @@ -31,6 +30,7 @@ use iroh_blobs::{ ticket::BlobTicket, BlobFormat, BlobsProtocol, Hash, }; +use n0_error::{Result, StdResultExt}; use rand::rng; use crate::common::get_or_generate_secret_key; @@ -276,7 +276,7 @@ async fn main() -> Result<()> { } tokio::signal::ctrl_c().await?; - router.shutdown().await?; + router.shutdown().await.anyerr()?; } Args::ByHash { paths } => { let store = MemStore::new(); @@ -304,7 +304,7 @@ async fn main() -> Result<()> { println!("{}: {ticket} ({permitted})", path.display()); } tokio::signal::ctrl_c().await?; - router.shutdown().await?; + router.shutdown().await.anyerr()?; } Args::Throttle { paths, delay_ms } => { let store = MemStore::new(); @@ -316,7 +316,7 @@ async fn main() -> Result<()> { println!("{}: {ticket}", path.display()); } tokio::signal::ctrl_c().await?; - router.shutdown().await?; + router.shutdown().await.anyerr()?; } Args::MaxConnections { paths, @@ -331,7 +331,7 @@ async fn main() -> Result<()> { println!("{}: {ticket}", path.display()); } tokio::signal::ctrl_c().await?; - router.shutdown().await?; + router.shutdown().await.anyerr()?; } } Ok(()) diff --git a/examples/mdns-discovery.rs b/examples/mdns-discovery.rs index 9240a87a..9c628930 100644 --- a/examples/mdns-discovery.rs +++ b/examples/mdns-discovery.rs @@ -13,12 +13,12 @@ //! Run that command on another machine in the same local network, replacing [FILE_PATH] to the path on which you want to save the transferred content. use std::path::{Path, PathBuf}; -use n0_error::{ensure, Result}; use clap::{Parser, Subcommand}; use iroh::{ discovery::mdns::MdnsDiscovery, protocol::Router, Endpoint, PublicKey, RelayMode, SecretKey, }; use iroh_blobs::{store::mem::MemStore, BlobsProtocol, Hash}; +use n0_error::{ensure, Result, StdResultExt}; mod common; use common::{get_or_generate_secret_key, setup_logging}; @@ -74,7 +74,7 @@ async fn accept(path: &Path) -> Result<()> { if !path.is_file() { println!("Content must be a file."); - node.shutdown().await?; + node.shutdown().await.anyerr()?; return Ok(()); } let absolute = path.canonicalize()?; @@ -82,7 +82,7 @@ async fn accept(path: &Path) -> Result<()> { let tag = store.add_path(absolute).await?; println!("To fetch the blob:\n\tcargo run --example mdns-discovery --features examples -- connect {} {} -o [FILE_PATH]", node.endpoint().id(), tag.hash); tokio::signal::ctrl_c().await?; - node.shutdown().await?; + node.shutdown().await.anyerr()?; Ok(()) } diff --git a/examples/random_store.rs b/examples/random_store.rs index ae296ab7..623cc887 100644 --- a/examples/random_store.rs +++ b/examples/random_store.rs @@ -1,6 +1,5 @@ use std::{env, path::PathBuf, str::FromStr}; -use n0_error::{Context, Result}; use clap::{Parser, Subcommand}; use iroh::{discovery::static_provider::StaticProvider, SecretKey}; use iroh_blobs::{ @@ -12,6 +11,7 @@ use iroh_blobs::{ }; use iroh_tickets::endpoint::EndpointTicket; use irpc::RpcMessage; +use n0_error::{Result, StackResultExt, StdResultExt}; use n0_future::StreamExt; use rand::{rngs::StdRng, Rng, SeedableRng}; use tokio::signal::ctrl_c; @@ -247,7 +247,7 @@ async fn provide(args: ProvideArgs) -> n0_error::Result<()> { println!("Node address: {addr:?}"); println!("ticket:\n{ticket}"); ctrl_c().await?; - router.shutdown().await?; + router.shutdown().await.anyerr()?; dump_task.abort(); Ok(()) } diff --git a/examples/transfer-collection.rs b/examples/transfer-collection.rs index ab0e1983..cc4d9956 100644 --- a/examples/transfer-collection.rs +++ b/examples/transfer-collection.rs @@ -7,7 +7,6 @@ //! $ cargo run --example transfer-collection use std::collections::HashMap; -use n0_error::{Context, Result}; use iroh::{ discovery::static_provider::StaticProvider, protocol::Router, Endpoint, EndpointAddr, RelayMode, }; @@ -17,6 +16,7 @@ use iroh_blobs::{ store::mem::MemStore, BlobsProtocol, Hash, HashAndFormat, }; +use n0_error::{Result, StackResultExt}; /// Node is something you'd define in your application. It can contain whatever /// shared state you'd want to couple with network operations. diff --git a/examples/transfer.rs b/examples/transfer.rs index 777ad96c..417654ef 100644 --- a/examples/transfer.rs +++ b/examples/transfer.rs @@ -2,6 +2,7 @@ use std::path::PathBuf; use iroh::{protocol::Router, Endpoint}; use iroh_blobs::{store::mem::MemStore, ticket::BlobTicket, BlobsProtocol}; +use n0_error::StdResultExt; #[tokio::main] async fn main() -> n0_error::Result<()> { @@ -21,7 +22,7 @@ async fn main() -> n0_error::Result<()> { match arg_refs.as_slice() { ["send", filename] => { - let filename: PathBuf = filename.parse()?; + let filename: PathBuf = filename.parse().anyerr()?; let abs_path = std::path::absolute(&filename)?; println!("Hashing file."); @@ -49,12 +50,12 @@ async fn main() -> n0_error::Result<()> { // Gracefully shut down the node println!("Shutting down."); - router.shutdown().await?; + router.shutdown().await.anyerr()?; } ["receive", ticket, filename] => { - let filename: PathBuf = filename.parse()?; + let filename: PathBuf = filename.parse().anyerr()?; let abs_path = std::path::absolute(filename)?; - let ticket: BlobTicket = ticket.parse()?; + let ticket: BlobTicket = ticket.parse().anyerr()?; // For receiving files, we create a "downloader" that allows us to fetch files // from other nodes via iroh connections diff --git a/src/util/connection_pool.rs b/src/util/connection_pool.rs index 020ca245..8618d888 100644 --- a/src/util/connection_pool.rs +++ b/src/util/connection_pool.rs @@ -553,8 +553,8 @@ mod tests { protocol::{AcceptError, ProtocolHandler, Router}, Endpoint, EndpointAddr, EndpointId, RelayMode, SecretKey, TransportAddr, Watcher, }; + use n0_error::{AnyError, Result, StdResultExt}; use n0_future::{io, stream, BufferedStreamExt, StreamExt}; - use n0_snafu::ResultExt; use testresult::TestResult; use tracing::trace; @@ -588,14 +588,14 @@ mod tests { } } - async fn echo_client(conn: &Connection, text: &[u8]) -> n0_snafu::Result> { + async fn echo_client(conn: &Connection, text: &[u8]) -> Result> { let conn_id = conn.stable_id(); - let id = conn.remote_id().e()?; + let id = conn.remote_id().anyerr()?; trace!(%id, %conn_id, "Sending echo request"); - let (mut send, mut recv) = conn.open_bi().await.e()?; - send.write_all(text).await.e()?; - send.finish().e()?; - let response = recv.read_to_end(1000).await.e()?; + let (mut send, mut recv) = conn.open_bi().await.anyerr()?; + send.write_all(text).await.anyerr()?; + send.finish().anyerr()?; + let response = recv.read_to_end(1000).await.anyerr()?; trace!(%id, %conn_id, "Received echo response"); Ok(response) } @@ -653,7 +653,7 @@ mod tests { &self, id: EndpointId, text: Vec, - ) -> Result), n0_snafu::Error>, PoolConnectError> { + ) -> Result), AnyError>, PoolConnectError> { let conn = self.pool.get_or_connect(id).await?; let id = conn.stable_id(); match echo_client(&conn, &text).await { From 94b70328fd96a986064c25f4455b80d4bd577665 Mon Sep 17 00:00:00 2001 From: Frando Date: Tue, 4 Nov 2025 16:18:08 +0100 Subject: [PATCH 06/12] fmt --- src/api.rs | 3 +-- src/api/remote.rs | 3 +-- src/get/error.rs | 3 +-- src/protocol.rs | 2 +- src/provider.rs | 3 +-- src/store/fs/meta.rs | 3 +-- src/ticket.rs | 2 +- 7 files changed, 7 insertions(+), 12 deletions(-) diff --git a/src/api.rs b/src/api.rs index f6f165eb..74c1aaaf 100644 --- a/src/api.rs +++ b/src/api.rs @@ -17,8 +17,7 @@ use std::{io, net::SocketAddr, ops::Deref}; use bao_tree::io::EncodeError; use iroh::Endpoint; use irpc::rpc::{listen, RemoteService}; -use n0_error::e; -use n0_error::stack_error; +use n0_error::{e, stack_error}; use proto::{Request, ShutdownRequest, SyncDbRequest}; use ref_cast::RefCast; use serde::{Deserialize, Serialize}; diff --git a/src/api/remote.rs b/src/api/remote.rs index f8a0c129..d6069000 100644 --- a/src/api/remote.rs +++ b/src/api/remote.rs @@ -15,8 +15,7 @@ use bao_tree::{ use genawaiter::sync::{Co, Gen}; use iroh::endpoint::Connection; use irpc::util::{AsyncReadVarintExt, WriteVarintExt}; -use n0_error::{e, AnyError, StdResultExt}; -use n0_error::{stack_error, Result}; +use n0_error::{e, stack_error, AnyError, Result, StdResultExt}; use n0_future::{io, Stream, StreamExt}; use ref_cast::RefCast; use tracing::{debug, trace}; diff --git a/src/get/error.rs b/src/get/error.rs index 33e79afe..8fa7ed4a 100644 --- a/src/get/error.rs +++ b/src/get/error.rs @@ -2,8 +2,7 @@ use std::io; use iroh::endpoint::{ConnectionError, ReadError, VarInt, WriteError}; -use n0_error::stack_error; -use n0_error::AnyError; +use n0_error::{stack_error, AnyError}; use crate::get::fsm::{ AtBlobHeaderNextError, AtClosingNextError, ConnectedNextError, DecodeError, InitialNextError, diff --git a/src/protocol.rs b/src/protocol.rs index e32ffae8..c7552664 100644 --- a/src/protocol.rs +++ b/src/protocol.rs @@ -387,8 +387,8 @@ use range_collections::{range_set::RangeSetEntry, RangeSet2}; use serde::{Deserialize, Serialize}; mod range_spec; pub use bao_tree::ChunkRanges; -pub use range_spec::{ChunkRangesSeq, NonEmptyRequestRangeSpecIter, RangeSpec}; use n0_error::stack_error; +pub use range_spec::{ChunkRangesSeq, NonEmptyRequestRangeSpecIter, RangeSpec}; use crate::{api::blobs::Bitfield, util::RecvStreamExt, BlobFormat, Hash, HashAndFormat}; diff --git a/src/provider.rs b/src/provider.rs index ee61756f..bde65022 100644 --- a/src/provider.rs +++ b/src/provider.rs @@ -13,8 +13,7 @@ use std::{ use bao_tree::ChunkRanges; use iroh::endpoint::{self, VarInt}; use iroh_io::{AsyncStreamReader, AsyncStreamWriter}; -use n0_error::Result; -use n0_error::{e, stack_error}; +use n0_error::{e, stack_error, Result}; use n0_future::StreamExt; use quinn::ConnectionError; use serde::{Deserialize, Serialize}; diff --git a/src/store/fs/meta.rs b/src/store/fs/meta.rs index 86cc8a39..6f6b6f60 100644 --- a/src/store/fs/meta.rs +++ b/src/store/fs/meta.rs @@ -11,8 +11,7 @@ use std::{ use bao_tree::BaoTree; use bytes::Bytes; use irpc::channel::mpsc; -use n0_error::{anyerr, AnyError}; -use n0_error::{e, stack_error}; +use n0_error::{anyerr, e, stack_error, AnyError}; use redb::{Database, DatabaseError, ReadableTable}; use tokio::pin; diff --git a/src/ticket.rs b/src/ticket.rs index 95c94993..dbca0c7c 100644 --- a/src/ticket.rs +++ b/src/ticket.rs @@ -1,9 +1,9 @@ //! Tickets for blobs. use std::{collections::BTreeSet, net::SocketAddr, str::FromStr}; -use n0_error::Result; use iroh::{EndpointAddr, EndpointId, RelayUrl}; use iroh_tickets::{ParseError, Ticket}; +use n0_error::Result; use serde::{Deserialize, Serialize}; use crate::{BlobFormat, Hash, HashAndFormat}; From 3cfc77ce0fc3eb16522e06ac42f96636aa2562a7 Mon Sep 17 00:00:00 2001 From: Frando Date: Tue, 4 Nov 2025 16:22:17 +0100 Subject: [PATCH 07/12] fixup --- README.md | 9 +++++---- examples/get-blob.rs | 4 ++-- examples/mdns-discovery.rs | 4 ++-- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index c3d87b98..5cd65002 100644 --- a/README.md +++ b/README.md @@ -35,9 +35,10 @@ Here is a basic example of how to set up `iroh-blobs` with `iroh`: ```rust,no_run use iroh::{protocol::Router, Endpoint}; use iroh_blobs::{store::mem::MemStore, BlobsProtocol, ticket::BlobTicket}; +use n0_error::{Result, StdResultExt}; #[tokio::main] -async fn main() -> n0_error::Result<()> { +async fn main() -> Result<()> { // create an iroh endpoint that includes the standard discovery mechanisms // we've built at number0 let endpoint = Endpoint::bind().await?; @@ -45,7 +46,7 @@ async fn main() -> n0_error::Result<()> { // create a protocol handler using an in-memory blob store. let store = MemStore::new(); let tag = store.add_slice(b"Hello world").await?; - + let _ = endpoint.online().await; let addr = endpoint.addr(); let ticket = BlobTicket::new(addr, tag.hash, tag.format); @@ -62,7 +63,7 @@ async fn main() -> n0_error::Result<()> { tokio::signal::ctrl_c().await; // clean shutdown of router and store - router.shutdown().await?; + router.shutdown().await.anyerr()?; Ok(()) } ``` @@ -86,4 +87,4 @@ at your option. Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in this project by you, as defined in the Apache-2.0 license, -shall be dual licensed as above, without any additional terms or conditions. \ No newline at end of file +shall be dual licensed as above, without any additional terms or conditions. diff --git a/examples/get-blob.rs b/examples/get-blob.rs index 1582ba7d..2e7fff20 100644 --- a/examples/get-blob.rs +++ b/examples/get-blob.rs @@ -5,7 +5,7 @@ use clap::Parser; use common::setup_logging; use iroh::discovery::pkarr::PkarrResolver; use iroh_blobs::{get::request::GetBlobItem, ticket::BlobTicket, BlobFormat}; -use n0_error::bail_any; +use n0_error::{bail_any, ensure_any}; use n0_future::StreamExt; use tokio::io::AsyncWriteExt; @@ -34,7 +34,7 @@ async fn main() -> n0_error::Result<()> { .discovery(PkarrResolver::n0_dns()) .bind() .await?; - n0_error::ensure!( + ensure_any!( ticket.format() == BlobFormat::Raw, "This example only supports raw blobs." ); diff --git a/examples/mdns-discovery.rs b/examples/mdns-discovery.rs index 9c628930..808d84fe 100644 --- a/examples/mdns-discovery.rs +++ b/examples/mdns-discovery.rs @@ -18,7 +18,7 @@ use iroh::{ discovery::mdns::MdnsDiscovery, protocol::Router, Endpoint, PublicKey, RelayMode, SecretKey, }; use iroh_blobs::{store::mem::MemStore, BlobsProtocol, Hash}; -use n0_error::{ensure, Result, StdResultExt}; +use n0_error::{ensure_any, Result, StdResultExt}; mod common; use common::{get_or_generate_secret_key, setup_logging}; @@ -109,7 +109,7 @@ async fn connect(node_id: PublicKey, hash: Hash, out: Option) -> Result ); if let Some(path) = out { let absolute = std::env::current_dir()?.join(&path); - ensure!(!absolute.is_dir(), "output must not be a directory"); + ensure_any!(!absolute.is_dir(), "output must not be a directory"); println!( "exporting {hash} to {} -> {}", path.display(), From 549c9d7b859b86f1262d6809f59c6e8803305d62 Mon Sep 17 00:00:00 2001 From: Frando Date: Tue, 4 Nov 2025 16:23:21 +0100 Subject: [PATCH 08/12] clippy --- src/api/remote.rs | 2 +- src/get/request.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/api/remote.rs b/src/api/remote.rs index d6069000..527f7588 100644 --- a/src/api/remote.rs +++ b/src/api/remote.rs @@ -718,7 +718,7 @@ impl Remote { .await .map_err(|e| e!(GetError::LocalFailure, e.into()))?, ) - .map_err(|e| e!(GetError::BadRequest, e.into()))?; + .map_err(|e| e!(GetError::BadRequest, e))?; // let mut hash_seq = LazyHashSeq::new(store.blobs().clone(), root); loop { let at_start_child = match next_child { diff --git a/src/get/request.rs b/src/get/request.rs index 7d3cfa5d..c4c15cdf 100644 --- a/src/get/request.rs +++ b/src/get/request.rs @@ -247,7 +247,7 @@ pub async fn get_hash_seq_and_sizes( } let (mut curr, hash_seq) = at_blob_content.concatenate_into_vec().await?; let hash_seq = - HashSeq::try_from(Bytes::from(hash_seq)).map_err(|e| e!(GetError::BadRequest, e.into()))?; + HashSeq::try_from(Bytes::from(hash_seq)).map_err(|e| e!(GetError::BadRequest, e))?; let mut sizes = Vec::with_capacity(hash_seq.len()); let closing = loop { match curr.next() { From 5d9783ebfbff5ab70c2cfca5088ba33ff85f0b57 Mon Sep 17 00:00:00 2001 From: Frando Date: Tue, 4 Nov 2025 16:26:48 +0100 Subject: [PATCH 09/12] deps: remove anyhow --- Cargo.lock | 1 - Cargo.toml | 1 - 2 files changed, 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 18ef1b9f..fff5ea26 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1680,7 +1680,6 @@ dependencies = [ name = "iroh-blobs" version = "0.96.0" dependencies = [ - "anyhow", "arrayvec", "async-compression", "atomic_refcell", diff --git a/Cargo.toml b/Cargo.toml index a496bedc..370143f5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,7 +12,6 @@ keywords = ["hashing", "quic", "blake3", "streaming"] rust-version = "1.85" [dependencies] -anyhow = "1.0.95" bao-tree = { version = "0.15.1", features = ["experimental-mixed", "tokio_fsm", "validate", "serde"], default-features = false } bytes = { version = "1", features = ["serde"] } derive_more = { version = "2.0.1", features = ["from", "try_from", "into", "debug", "display", "deref", "deref_mut"] } From b4a41e0552ce0337aebedb28a1db1921243c9b0e Mon Sep 17 00:00:00 2001 From: Frando Date: Wed, 12 Nov 2025 12:02:13 +0100 Subject: [PATCH 10/12] re-add anyhow as dev dependency --- Cargo.lock | 61 +++++++++++++++++++++++++++++++++++++++--------------- Cargo.toml | 14 +++++-------- 2 files changed, 49 insertions(+), 26 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fff5ea26..081da560 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1606,8 +1606,9 @@ dependencies = [ [[package]] name = "iroh" -version = "0.94.0" -source = "git+https://github.com/n0-computer/iroh.git?branch=main#30c23e8dbaa02d17ab57ba41f0aa5271b0a411dc" +version = "0.95.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2374ba3cdaac152dc6ada92d971f7328e6408286faab3b7350842b2ebbed4789" dependencies = [ "aead", "backon", @@ -1623,7 +1624,7 @@ dependencies = [ "http", "igd-next", "instant", - "iroh-base", + "iroh-base 0.95.1", "iroh-metrics", "iroh-quinn", "iroh-quinn-proto", @@ -1662,7 +1663,28 @@ dependencies = [ [[package]] name = "iroh-base" version = "0.94.1" -source = "git+https://github.com/n0-computer/iroh.git?branch=main#30c23e8dbaa02d17ab57ba41f0aa5271b0a411dc" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7db6dfffe81a58daae02b72c7784c20feef5b5d3849b190ed1c96a8fa0b3cae8" +dependencies = [ + "curve25519-dalek", + "data-encoding", + "derive_more 2.0.1", + "ed25519-dalek", + "n0-snafu", + "nested_enum_utils", + "rand_core 0.9.3", + "serde", + "snafu", + "url", + "zeroize", + "zeroize_derive", +] + +[[package]] +name = "iroh-base" +version = "0.95.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25a8c5fb1cc65589f0d7ab44269a76f615a8c4458356952c9b0ef1c93ea45ff8" dependencies = [ "curve25519-dalek", "data-encoding", @@ -1680,6 +1702,7 @@ dependencies = [ name = "iroh-blobs" version = "0.96.0" dependencies = [ + "anyhow", "arrayvec", "async-compression", "atomic_refcell", @@ -1694,7 +1717,7 @@ dependencies = [ "genawaiter", "hex", "iroh", - "iroh-base", + "iroh-base 0.95.1", "iroh-io", "iroh-metrics", "iroh-quinn", @@ -1824,8 +1847,9 @@ dependencies = [ [[package]] name = "iroh-relay" -version = "0.94.0" -source = "git+https://github.com/n0-computer/iroh.git?branch=main#30c23e8dbaa02d17ab57ba41f0aa5271b0a411dc" +version = "0.95.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43fbdf2aeffa7d6ede1a31f6570866c2199b1cee96a0b563994623795d1bac2c" dependencies = [ "blake3", "bytes", @@ -1838,7 +1862,7 @@ dependencies = [ "http-body-util", "hyper", "hyper-util", - "iroh-base", + "iroh-base 0.95.1", "iroh-metrics", "iroh-quinn", "iroh-quinn-proto", @@ -1888,7 +1912,7 @@ checksum = "7683c7819693eb8b3d61d1d45ffa92e2faeb07762eb0c3debb50ad795538d221" dependencies = [ "data-encoding", "derive_more 2.0.1", - "iroh-base", + "iroh-base 0.94.1", "n0-snafu", "nested_enum_utils", "postcard", @@ -1898,8 +1922,9 @@ dependencies = [ [[package]] name = "irpc" -version = "0.10.0" -source = "git+https://github.com/n0-computer/irpc.git?branch=matheus23%2Firpc-n0-error#d3df5515741f14aa68191bccbb4b5f614dfc6aa9" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bee97aaa18387c4f0aae61058195dc9f9dea3e41c0e272973fe3e9bf611563d" dependencies = [ "futures-buffered", "futures-util", @@ -1919,8 +1944,9 @@ dependencies = [ [[package]] name = "irpc-derive" -version = "0.8.0" -source = "git+https://github.com/n0-computer/irpc.git?branch=matheus23%2Firpc-n0-error#d3df5515741f14aa68191bccbb4b5f614dfc6aa9" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58148196d2230183c9679431ac99b57e172000326d664e8456fa2cd27af6505a" dependencies = [ "proc-macro2", "quote", @@ -2127,7 +2153,8 @@ dependencies = [ [[package]] name = "n0-error" version = "0.1.0" -source = "git+https://github.com/n0-computer/n0-error.git?branch=Frando%2Farc-stack#3be0e6140b9d7cbdda710f4b1bc56493a28bc323" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a4839a11b62f1fdd75be912ee20634053c734c2240e867ded41c7f50822c549" dependencies = [ "derive_more 2.0.1", "n0-error-macros", @@ -2136,10 +2163,10 @@ dependencies = [ [[package]] name = "n0-error-macros" -version = "0.1.0" -source = "git+https://github.com/n0-computer/n0-error.git?branch=Frando%2Farc-stack#3be0e6140b9d7cbdda710f4b1bc56493a28bc323" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f4b756a0dea61b57e698ae937be1fb60cd17050c3ddd9962c756e07af4288e9" dependencies = [ - "heck", "proc-macro2", "quote", "syn 2.0.104", diff --git a/Cargo.toml b/Cargo.toml index 370143f5..f22bc1f4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,12 +32,12 @@ data-encoding = "2.8.0" chrono = "0.4.39" ref-cast = "1.0.24" arrayvec = "0.7.6" -iroh = "0.94" +iroh = "0.95" self_cell = "1.1.0" genawaiter = { version = "0.99.1", features = ["futures03"] } -iroh-base = "0.94" +iroh-base = "0.95" iroh-tickets = "0.1" -irpc = { version = "0.10.0", features = ["rpc", "quinn_endpoint_setup", "spans", "stream", "derive"], default-features = false } +irpc = { version = "0.11.0", features = ["rpc", "quinn_endpoint_setup", "spans", "stream", "derive"], default-features = false } iroh-metrics = { version = "0.37" } redb = { version = "2.6.3", optional = true } reflink-copy = { version = "0.1.24", optional = true } @@ -58,9 +58,10 @@ tracing-subscriber = { version = "0.3.20", features = ["fmt"] } tracing-test = "0.2.5" walkdir = "2.5.0" atomic_refcell = "0.1.13" -iroh = { version = "0.94", features = ["discovery-local-network"]} +iroh = { version = "0.95", features = ["discovery-local-network"]} async-compression = { version = "0.4.30", features = ["lz4", "tokio"] } concat_const = "0.2.0" +anyhow = "1.0.100" [features] hide-proto-docs = [] @@ -68,8 +69,3 @@ metrics = [] default = ["hide-proto-docs", "fs-store"] fs-store = ["dep:redb", "dep:reflink-copy"] -[patch.crates-io] -iroh = { git = "https://github.com/n0-computer/iroh.git", branch = "main" } -iroh-base = { git = "https://github.com/n0-computer/iroh.git", branch = "main" } -irpc = { git = "https://github.com/n0-computer/irpc.git", branch = "matheus23/irpc-n0-error" } -n0-error = { git = "https://github.com/n0-computer/n0-error.git", branch = "Frando/arc-stack" } From d2dc3e088a63312396706dbeff396cac7c790f94 Mon Sep 17 00:00:00 2001 From: Frando Date: Wed, 12 Nov 2025 12:11:45 +0100 Subject: [PATCH 11/12] chore: fmt --- src/get.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/get.rs b/src/get.rs index 940f23db..25e74c66 100644 --- a/src/get.rs +++ b/src/get.rs @@ -23,8 +23,8 @@ use std::{ use bao_tree::{io::fsm::BaoContentItem, ChunkNum}; use fsm::RequestCounters; -use n0_future::time::Instant; use n0_error::Result; +use n0_future::time::Instant; use serde::{Deserialize, Serialize}; use tracing::{debug, error}; From 26a5bc758874bd587d47fcb7da84f806f95d0e90 Mon Sep 17 00:00:00 2001 From: Frando Date: Wed, 12 Nov 2025 12:15:37 +0100 Subject: [PATCH 12/12] fixup wasm --- src/store/mem.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/store/mem.rs b/src/store/mem.rs index 9c7b91ec..1d1583ac 100644 --- a/src/store/mem.rs +++ b/src/store/mem.rs @@ -756,9 +756,9 @@ async fn import_byte_stream( } #[cfg(wasm_browser)] -async fn import_path(cmd: ImportPathMsg) -> anyhow::Result { +async fn import_path(cmd: ImportPathMsg) -> Result { let _: ImportPathRequest = cmd.inner; - Err(anyhow::anyhow!( + Err(n0_error::anyerr!( "import_path is not supported in the browser" )) }