diff --git a/Cargo.lock b/Cargo.lock index 885efcb97..e49a5fb59 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10,13 +10,19 @@ checksum = "2b7e4e8cf778db814365e46839949ca74df4efb10e87ba4913e6ec5967ef0285" [[package]] name = "addr2line" -version = "0.12.1" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a49806b9dadc843c61e7c97e72490ad7f7220ae249012fbda9ad0609457c0543" +checksum = "3e61f2b7f93d2c7d2b08263acaa4a363b3e276806c68af6134c44f523bf1aacd" dependencies = [ "gimli", ] +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + [[package]] name = "adler32" version = "1.0.4" @@ -50,6 +56,15 @@ dependencies = [ "winapi 0.3.8", ] +[[package]] +name = "anyhow" +version = "1.0.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595d3cfa7a60d4555cb5067b99f07142a08ea778de5cf993f7b75c7d8fabc486" +dependencies = [ + "backtrace", +] + [[package]] name = "arc-swap" version = "0.4.8" @@ -124,13 +139,15 @@ checksum = "f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d" [[package]] name = "backtrace" -version = "0.3.48" +version = "0.3.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0df2f85c8a2abbe3b7d7e748052fdd9b76a0458fdeb16ad4223f5eca78c7c130" +checksum = "e7a905d892734eea339e896738c14b9afce22b5318f64b951e70bf3844419b01" dependencies = [ "addr2line", - "cfg-if 0.1.10", + "cc", + "cfg-if 1.0.0", "libc", + "miniz_oxide 0.4.4", "object", "rustc-demangle", ] @@ -273,9 +290,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.54" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bbb73db36c1246e9034e307d0fba23f9a2e251faa47ade70c1bd252220c8311" +checksum = "e70cc2f62c6ce1868963827bd677764c62d07c3d9a3e1fb1177ee1a9ab199eb2" dependencies = [ "jobserver", ] @@ -723,6 +740,7 @@ checksum = "212d0f5754cb6769937f4501cc0e67f4f4483c8d2c3e1e922ee9edbe4ab4c7c0" name = "docs-rs" version = "0.6.0" dependencies = [ + "anyhow", "arc-swap", "backtrace", "badge", @@ -778,6 +796,7 @@ dependencies = [ "systemstat", "tempfile", "tera", + "thiserror", "thread_local", "time 0.1.43", "tokio", @@ -918,7 +937,7 @@ dependencies = [ "cfg-if 0.1.10", "crc32fast", "libc", - "miniz_oxide", + "miniz_oxide 0.3.6", ] [[package]] @@ -1167,9 +1186,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.21.0" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcc8e0c9bce37868955864dbecd2b1ab2bdf967e6f28066d65aaac620444b65c" +checksum = "f0a01e0497841a3b2db4f8afa483cce65f7e96a3498bd6c541734792aeac8fe7" [[package]] name = "git2" @@ -1607,9 +1626,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.91" +version = "0.2.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8916b1f6ca17130ec6568feccee27c156ad12037880833a3b842a823236502e7" +checksum = "320cfe77175da3a483efed4bc0adc1968ca050b098ce4f2f1c13a56626128790" [[package]] name = "libflate" @@ -1772,9 +1791,9 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "memchr" -version = "2.3.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400" +checksum = "b16bd47d9e329435e309c58469fe0791c2d0d1ba96ec0954152a5ae2b04387dc" [[package]] name = "memoffset" @@ -1831,6 +1850,16 @@ dependencies = [ "adler32", ] +[[package]] +name = "miniz_oxide" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" +dependencies = [ + "adler", + "autocfg 1.0.0", +] + [[package]] name = "mio" version = "0.6.22" @@ -2044,9 +2073,12 @@ dependencies = [ [[package]] name = "object" -version = "0.19.0" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cbca9424c482ee628fa549d9c812e2cd22f1180b9222c9200fdfa6eb31aecb2" +checksum = "c55827317fb4c08822499848a14237d2874d6f139828893017237e7ab93eb386" +dependencies = [ + "memchr", +] [[package]] name = "once_cell" @@ -3727,18 +3759,18 @@ checksum = "8eaa81235c7058867fa8c0e7314f33dcce9c215f535d1913822a2b3f5e289f3c" [[package]] name = "thiserror" -version = "1.0.20" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dfdd070ccd8ccb78f4ad66bf1982dc37f620ef696c6b5028fe2ed83dd3d0d08" +checksum = "93119e4feac1cbe6c798c34d3a53ea0026b0b1de6a120deef895137c0529bfe2" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.20" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd80fc12f73063ac132ac92aceea36734f04a1d93c1240c6944e23a3b8841793" +checksum = "060d69a0afe7796bf42e9e2ff91f5ee691fb15c53d38b4b62a9a53eb23164745" dependencies = [ "proc-macro2", "quote", diff --git a/Cargo.toml b/Cargo.toml index fadf47ffe..82a66efaf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -35,8 +35,10 @@ r2d2_postgres = "0.18" url = { version = "2.1.1", features = ["serde"] } badge = { path = "crates/badge" } docsrs-metadata = { path = "crates/metadata" } -backtrace = "0.3" -failure = { version = "0.1.3", features = ["backtrace"] } +anyhow = { version = "1.0.42", features = ["backtrace"]} +backtrace = "0.3.61" +failure = "0.1.8" +thiserror = "1.0.26" comrak = { version = "0.10.1", default-features = false } toml = "0.5" schemamama = "0.3" diff --git a/src/bin/cratesfyi.rs b/src/bin/cratesfyi.rs index deba7de8e..a40ffe7fb 100644 --- a/src/bin/cratesfyi.rs +++ b/src/bin/cratesfyi.rs @@ -3,6 +3,7 @@ use std::fmt::Write; use std::path::PathBuf; use std::sync::Arc; +use anyhow::{anyhow, Context as _, Error, Result}; use docs_rs::db::{self, add_path_into_database, Pool, PoolClient}; use docs_rs::repositories::RepositoryStatsUpdater; use docs_rs::utils::{remove_crate_priority, set_crate_priority}; @@ -10,7 +11,6 @@ use docs_rs::{ BuildQueue, Config, Context, DocBuilder, Index, Metrics, PackageKind, RustwideBuilder, Server, Storage, }; -use failure::{err_msg, Error, ResultExt}; use once_cell::sync::OnceCell; use structopt::StructOpt; use strum::VariantNames; @@ -21,12 +21,14 @@ pub fn main() { if let Err(err) = CommandLine::from_args().handle_args() { let mut msg = format!("Error: {}", err); - for cause in err.iter_causes() { + for cause in err.chain() { write!(msg, "\n\nCaused by:\n {}", cause).unwrap(); } eprintln!("{}", msg); - if !err.backtrace().is_empty() { - eprintln!("\nStack backtrace:\n{}", err.backtrace()); + + let backtrace = err.backtrace().to_string(); + if !backtrace.is_empty() { + eprintln!("\nStack backtrace:\n{}", backtrace); } std::process::exit(1); } @@ -108,7 +110,7 @@ enum CommandLine { } impl CommandLine { - pub fn handle_args(self) -> Result<(), Error> { + pub fn handle_args(self) -> Result<()> { let ctx = BinContext::new(); match self { @@ -166,7 +168,7 @@ enum QueueSubcommand { } impl QueueSubcommand { - pub fn handle_args(self, ctx: BinContext) -> Result<(), Error> { + pub fn handle_args(self, ctx: BinContext) -> Result<()> { match self { Self::Add { crate_name, @@ -205,7 +207,7 @@ enum PrioritySubcommand { } impl PrioritySubcommand { - pub fn handle_args(self, ctx: BinContext) -> Result<(), Error> { + pub fn handle_args(self, ctx: BinContext) -> Result<()> { match self { Self::Set { pattern, priority } => { set_crate_priority(&mut *ctx.conn()?, &pattern, priority) @@ -239,7 +241,7 @@ struct Build { } impl Build { - pub fn handle_args(self, ctx: BinContext) -> Result<(), Error> { + pub fn handle_args(self, ctx: BinContext) -> Result<()> { self.subcommand.handle_args(ctx, self.skip_if_exists) } } @@ -286,10 +288,10 @@ enum BuildSubcommand { } impl BuildSubcommand { - pub fn handle_args(self, ctx: BinContext, skip_if_exists: bool) -> Result<(), Error> { + pub fn handle_args(self, ctx: BinContext, skip_if_exists: bool) -> Result<()> { let docbuilder = DocBuilder::new(ctx.config()?, ctx.pool()?, ctx.build_queue()?); - let rustwide_builder = || -> Result { + let rustwide_builder = || -> Result { let mut builder = RustwideBuilder::init(&ctx)?; builder.set_skip_build_if_exists(skip_if_exists); Ok(builder) @@ -317,9 +319,10 @@ impl BuildSubcommand { let registry_url = ctx.config()?.registry_url.clone(); builder .build_package( - &crate_name.ok_or_else(|| err_msg("must specify name if not local"))?, + &crate_name + .with_context(|| anyhow!("must specify name if not local"))?, &crate_version - .ok_or_else(|| err_msg("must specify version if not local"))?, + .with_context(|| anyhow!("must specify version if not local"))?, registry_url .as_ref() .map(|s| PackageKind::Registry(s.as_str())) @@ -412,7 +415,7 @@ enum DatabaseSubcommand { } impl DatabaseSubcommand { - pub fn handle_args(self, ctx: BinContext) -> Result<(), Error> { + pub fn handle_args(self, ctx: BinContext) -> Result<()> { match self { Self::Migrate { version } => { db::migrate(version, &mut *ctx.conn()?) @@ -482,7 +485,7 @@ enum BlacklistSubcommand { } impl BlacklistSubcommand { - fn handle_args(self, ctx: BinContext) -> Result<(), Error> { + fn handle_args(self, ctx: BinContext) -> Result<()> { let mut conn = &mut *ctx.conn()?; match self { Self::List => { @@ -545,14 +548,14 @@ impl BinContext { } } - fn conn(&self) -> Result { + fn conn(&self) -> Result { Ok(self.pool()?.get()?) } } macro_rules! lazy { ( $(fn $name:ident($self:ident) -> $type:ty = $init:expr);+ $(;)? ) => { - $(fn $name(&$self) -> Result, Error> { + $(fn $name(&$self) -> Result> { Ok($self .$name .get_or_try_init::<_, Error>(|| Ok(Arc::new($init)))? @@ -591,7 +594,7 @@ impl Context for BinContext { }; } - fn pool(&self) -> Result { + fn pool(&self) -> Result { Ok(self .pool .get_or_try_init::<_, Error>(|| Ok(Pool::new(&*self.config()?, self.metrics()?)?))? diff --git a/src/build_queue.rs b/src/build_queue.rs index d650176f4..26e1b9099 100644 --- a/src/build_queue.rs +++ b/src/build_queue.rs @@ -170,7 +170,7 @@ mod tests { let assert_next_and_fail = |name| -> Result<()> { queue.process_next_crate(|krate| { assert_eq!(name, krate.name); - failure::bail!("simulate a failure"); + anyhow::bail!("simulate a failure"); })?; Ok(()) }; @@ -278,7 +278,7 @@ mod tests { assert_eq!(queue.failed_count()?, 0); queue.process_next_crate(|krate| { assert_eq!("foo", krate.name); - failure::bail!("this failed"); + anyhow::bail!("this failed"); })?; } assert_eq!(queue.failed_count()?, 1); diff --git a/src/config.rs b/src/config.rs index 532cc642d..72b19fdf6 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1,7 +1,8 @@ use crate::storage::StorageKind; -use failure::{bail, format_err, Error, Fail, ResultExt}; +use anyhow::{anyhow, bail, Context, Result}; use rusoto_core::Region; use std::env::VarError; +use std::error::Error; use std::path::PathBuf; use std::str::FromStr; @@ -65,7 +66,7 @@ pub struct Config { } impl Config { - pub fn from_env() -> Result { + pub fn from_env() -> Result { let old_vars = [ ("CRATESFYI_PREFIX", "DOCSRS_PREFIX"), ("CRATESFYI_DATABASE_URL", "DOCSRS_DATABASE_URL"), @@ -78,7 +79,7 @@ impl Config { ]; for (old_var, new_var) in old_vars { if std::env::var(old_var).is_ok() { - failure::bail!( + bail!( "env variable {} is no longer accepted; use {} instead", old_var, new_var @@ -138,36 +139,36 @@ impl Config { } } -fn env(var: &str, default: T) -> Result +fn env(var: &str, default: T) -> Result where T: FromStr, - T::Err: Fail, + T::Err: Error + Send + Sync + 'static, { Ok(maybe_env(var)?.unwrap_or(default)) } -fn require_env(var: &str) -> Result +fn require_env(var: &str) -> Result where T: FromStr, - T::Err: Fail, + ::Err: Error + Send + Sync + 'static, { - maybe_env(var)?.ok_or_else(|| format_err!("configuration variable {} is missing", var)) + maybe_env(var)?.with_context(|| anyhow!("configuration variable {} is missing", var)) } -fn maybe_env(var: &str) -> Result, Error> +fn maybe_env(var: &str) -> Result> where T: FromStr, - T::Err: Fail, + T::Err: Error + Send + Sync + 'static, { match std::env::var(var) { Ok(content) => Ok(content .parse::() .map(Some) - .with_context(|_| format!("failed to parse configuration variable {}", var))?), + .with_context(|| format!("failed to parse configuration variable {}", var))?), Err(VarError::NotPresent) => { log::trace!("optional configuration variable {} is not set", var); Ok(None) } - Err(VarError::NotUnicode(_)) => bail!("configuration variable {} is not UTF-8", var), + Err(VarError::NotUnicode(_)) => Err(anyhow!("configuration variable {} is not UTF-8", var)), } } diff --git a/src/context.rs b/src/context.rs index 104182bf2..a2a54bf32 100644 --- a/src/context.rs +++ b/src/context.rs @@ -1,15 +1,15 @@ use crate::db::Pool; +use crate::error::Result; use crate::repositories::RepositoryStatsUpdater; use crate::{BuildQueue, Config, Index, Metrics, Storage}; -use failure::Error; use std::sync::Arc; pub trait Context { - fn config(&self) -> Result, Error>; - fn build_queue(&self) -> Result, Error>; - fn storage(&self) -> Result, Error>; - fn pool(&self) -> Result; - fn metrics(&self) -> Result, Error>; - fn index(&self) -> Result, Error>; - fn repository_stats_updater(&self) -> Result, Error>; + fn config(&self) -> Result>; + fn build_queue(&self) -> Result>; + fn storage(&self) -> Result>; + fn pool(&self) -> Result; + fn metrics(&self) -> Result>; + fn index(&self) -> Result>; + fn repository_stats_updater(&self) -> Result>; } diff --git a/src/db/blacklist.rs b/src/db/blacklist.rs index 56abfd031..37c713da9 100644 --- a/src/db/blacklist.rs +++ b/src/db/blacklist.rs @@ -1,17 +1,17 @@ -use failure::{Error, Fail}; +use crate::error::Result; use postgres::Client; -#[derive(Debug, Fail)] +#[derive(Debug, thiserror::Error)] enum BlacklistError { - #[fail(display = "crate {} is already on the blacklist", _0)] + #[error("crate {0} is already on the blacklist")] CrateAlreadyOnBlacklist(String), - #[fail(display = "crate {} is not on the blacklist", _0)] + #[error("crate {0} is not on the blacklist")] CrateNotOnBlacklist(String), } /// Returns whether the given name is blacklisted. -pub fn is_blacklisted(conn: &mut Client, name: &str) -> Result { +pub fn is_blacklisted(conn: &mut Client, name: &str) -> Result { let rows = conn.query( "SELECT COUNT(*) FROM blacklisted_crates WHERE crate_name = $1;", &[&name], @@ -22,7 +22,7 @@ pub fn is_blacklisted(conn: &mut Client, name: &str) -> Result { } /// Returns the crate names on the blacklist, sorted ascending. -pub fn list_crates(conn: &mut Client) -> Result, Error> { +pub fn list_crates(conn: &mut Client) -> Result> { let rows = conn.query( "SELECT crate_name FROM blacklisted_crates ORDER BY crate_name asc;", &[], @@ -32,7 +32,7 @@ pub fn list_crates(conn: &mut Client) -> Result, Error> { } /// Adds a crate to the blacklist. -pub fn add_crate(conn: &mut Client, name: &str) -> Result<(), Error> { +pub fn add_crate(conn: &mut Client, name: &str) -> Result<()> { if is_blacklisted(conn, name)? { return Err(BlacklistError::CrateAlreadyOnBlacklist(name.into()).into()); } @@ -46,7 +46,7 @@ pub fn add_crate(conn: &mut Client, name: &str) -> Result<(), Error> { } /// Removes a crate from the blacklist. -pub fn remove_crate(conn: &mut Client, name: &str) -> Result<(), Error> { +pub fn remove_crate(conn: &mut Client, name: &str) -> Result<()> { if !is_blacklisted(conn, name)? { return Err(BlacklistError::CrateNotOnBlacklist(name.into()).into()); } diff --git a/src/db/delete.rs b/src/db/delete.rs index 6f7251975..b40dedc96 100644 --- a/src/db/delete.rs +++ b/src/db/delete.rs @@ -1,18 +1,18 @@ +use crate::error::Result; use crate::Storage; -use failure::{Error, Fail}; use postgres::Client; /// List of directories in docs.rs's underlying storage (either the database or S3) containing a /// subdirectory named after the crate. Those subdirectories will be deleted. static STORAGE_PATHS_TO_DELETE: &[&str] = &["rustdoc", "sources"]; -#[derive(Debug, Fail)] +#[derive(Debug, thiserror::Error)] enum CrateDeletionError { - #[fail(display = "crate is missing: {}", _0)] + #[error("crate is missing: {0}")] MissingCrate(String), } -pub fn delete_crate(conn: &mut Client, storage: &Storage, name: &str) -> Result<(), Error> { +pub fn delete_crate(conn: &mut Client, storage: &Storage, name: &str) -> Result<()> { let crate_id = get_id(conn, name)?; delete_crate_from_database(conn, name, crate_id)?; @@ -28,7 +28,7 @@ pub fn delete_version( storage: &Storage, name: &str, version: &str, -) -> Result<(), Error> { +) -> Result<()> { delete_version_from_database(conn, name, version)?; for prefix in STORAGE_PATHS_TO_DELETE { @@ -38,7 +38,7 @@ pub fn delete_version( Ok(()) } -fn get_id(conn: &mut Client, name: &str) -> Result { +fn get_id(conn: &mut Client, name: &str) -> Result { let crate_id_res = conn.query("SELECT id FROM crates WHERE name = $1", &[&name])?; if let Some(row) = crate_id_res.into_iter().next() { Ok(row.get("id")) @@ -56,7 +56,7 @@ const METADATA: &[(&str, &str)] = &[ ("doc_coverage", "release_id"), ]; -fn delete_version_from_database(conn: &mut Client, name: &str, version: &str) -> Result<(), Error> { +fn delete_version_from_database(conn: &mut Client, name: &str, version: &str) -> Result<()> { let crate_id = get_id(conn, name)?; let mut transaction = conn.transaction()?; for &(table, column) in METADATA { @@ -88,7 +88,7 @@ fn delete_version_from_database(conn: &mut Client, name: &str, version: &str) -> transaction.commit().map_err(Into::into) } -fn delete_crate_from_database(conn: &mut Client, name: &str, crate_id: i32) -> Result<(), Error> { +fn delete_crate_from_database(conn: &mut Client, name: &str, crate_id: i32) -> Result<()> { let mut transaction = conn.transaction()?; transaction.execute( @@ -120,16 +120,15 @@ mod tests { use super::*; use crate::index::api::CrateOwner; use crate::test::{assert_success, wrapper}; - use failure::Error; use postgres::Client; - fn crate_exists(conn: &mut Client, name: &str) -> Result { + fn crate_exists(conn: &mut Client, name: &str) -> Result { Ok(!conn .query("SELECT * FROM crates WHERE name = $1;", &[&name])? .is_empty()) } - fn release_exists(conn: &mut Client, id: i32) -> Result { + fn release_exists(conn: &mut Client, id: i32) -> Result { Ok(!conn .query("SELECT * FROM releases WHERE id = $1;", &[&id])? .is_empty()) @@ -179,7 +178,7 @@ mod tests { #[test] fn test_delete_version() { wrapper(|env| { - fn owners(conn: &mut Client, crate_id: i32) -> Result, Error> { + fn owners(conn: &mut Client, crate_id: i32) -> Result> { Ok(conn .query( "SELECT name FROM owners diff --git a/src/db/pool.rs b/src/db/pool.rs index 4bef9599a..6b6e5c770 100644 --- a/src/db/pool.rs +++ b/src/db/pool.rs @@ -122,14 +122,14 @@ impl r2d2::CustomizeConnection for SetSchema { } } -#[derive(Debug, failure::Fail)] +#[derive(Debug, thiserror::Error)] pub enum PoolError { - #[fail(display = "the provided database URL was not valid")] - InvalidDatabaseUrl(#[fail(cause)] postgres::Error), + #[error("the provided database URL was not valid")] + InvalidDatabaseUrl(#[from] postgres::Error), - #[fail(display = "failed to create the database connection pool")] - PoolCreationFailed(#[fail(cause)] r2d2::Error), + #[error("failed to create the database connection pool")] + PoolCreationFailed(r2d2::Error), - #[fail(display = "failed to get a database connection")] - ClientError(#[fail(cause)] r2d2::Error), + #[error("failed to get a database connection")] + ClientError(r2d2::Error), } diff --git a/src/docbuilder/crates.rs b/src/docbuilder/crates.rs index fa02143ac..a5dd6461c 100644 --- a/src/docbuilder/crates.rs +++ b/src/docbuilder/crates.rs @@ -1,5 +1,5 @@ use crate::error::Result; -use failure::err_msg; +use anyhow::{ensure, Context}; use serde_json::Value; use std::io::prelude::*; use std::io::BufReader; @@ -29,17 +29,15 @@ where continue; }; - let obj = data - .as_object() - .ok_or_else(|| err_msg("Not a JSON object"))?; + let obj = data.as_object().context("Not a JSON object")?; let crate_name = obj .get("name") .and_then(|n| n.as_str()) - .ok_or_else(|| err_msg("`name` not found in JSON object"))?; + .context("`name` not found in JSON object")?; let vers = obj .get("vers") .and_then(|n| n.as_str()) - .ok_or_else(|| err_msg("`vers` not found in JSON object"))?; + .context("`vers` not found in JSON object")?; // Skip yanked crates if obj.get("yanked").and_then(|n| n.as_bool()).unwrap_or(false) { @@ -65,9 +63,7 @@ pub fn crates_from_path(path: &Path, func: &mut F) -> Result<()> where F: FnMut(&str, &str), { - if !path.is_dir() { - return Err(err_msg("Not a directory")); - } + ensure!(!path.is_dir(), "Not a directory"); for file in path.read_dir()? { let file = file?; diff --git a/src/docbuilder/rustwide_builder.rs b/src/docbuilder/rustwide_builder.rs index 95838db53..c4a3e5051 100644 --- a/src/docbuilder/rustwide_builder.rs +++ b/src/docbuilder/rustwide_builder.rs @@ -11,7 +11,9 @@ use crate::storage::CompressionAlgorithms; use crate::utils::{copy_dir_all, parse_rustc_version, CargoMetadata}; use crate::{db::blacklist::is_blacklisted, utils::MetadataPackage}; use crate::{Config, Context, Index, Metrics, Storage}; +use anyhow::{anyhow, bail, Error}; use docsrs_metadata::{Metadata, DEFAULT_TARGETS, HOST_TARGET}; +use failure::Error as FailureError; use log::{debug, info, warn, LevelFilter}; use postgres::Client; use rustwide::cmd::{Command, CommandError, SandboxBuilder, SandboxImage}; @@ -64,8 +66,10 @@ impl RustwideBuilder { builder = builder.fast_init(true); } - let workspace = builder.init()?; - workspace.purge_all_build_dirs()?; + let workspace = builder.init().map_err(FailureError::compat)?; + workspace + .purge_all_build_dirs() + .map_err(FailureError::compat)?; let toolchain = Toolchain::dist(&config.toolchain); @@ -109,7 +113,7 @@ impl RustwideBuilder { if let Some(&ToolchainError::NotInstalled) = err.downcast_ref::() { Vec::new() } else { - return Err(err); + return Err(err.compat().into()); } } }; @@ -128,14 +132,20 @@ impl RustwideBuilder { // and will not be reinstalled until explicitly requested by a crate. for target in installed_targets { if !targets_to_install.remove(&target) { - self.toolchain.remove_target(&self.workspace, &target)?; + self.toolchain + .remove_target(&self.workspace, &target) + .map_err(FailureError::compat)?; } } - self.toolchain.install(&self.workspace)?; + self.toolchain + .install(&self.workspace) + .map_err(FailureError::compat)?; for target in &targets_to_install { - self.toolchain.add_target(&self.workspace, target)?; + self.toolchain + .add_target(&self.workspace, target) + .map_err(FailureError::compat)?; } // NOTE: rustup will automatically refuse to update the toolchain // if `rustfmt` is not available in the newer version @@ -165,9 +175,7 @@ impl RustwideBuilder { info!("found rustc {}", line); Ok(line.clone()) } else { - Err(::failure::err_msg( - "invalid output returned by `rustc --version`", - )) + Err(anyhow!("invalid output returned by `rustc --version`",)) } } @@ -183,40 +191,47 @@ impl RustwideBuilder { let mut build_dir = self .workspace .build_dir(&format!("essential-files-{}", rustc_version)); - build_dir.purge()?; + build_dir.purge().map_err(FailureError::compat)?; // This is an empty library crate that is supposed to always build. let krate = Crate::crates_io(DUMMY_CRATE_NAME, DUMMY_CRATE_VERSION); - krate.fetch(&self.workspace)?; + krate.fetch(&self.workspace).map_err(FailureError::compat)?; build_dir .build(&self.toolchain, &krate, self.prepare_sandbox(&limits)) .run(|build| { - let metadata = Metadata::from_crate_root(&build.host_source_dir())?; + (|| -> Result<()> { + let metadata = Metadata::from_crate_root(&build.host_source_dir())?; - let res = self.execute_build(HOST_TARGET, true, build, &limits, &metadata, true)?; - if !res.result.successful { - failure::bail!("failed to build dummy crate for {}", self.rustc_version); - } + let res = + self.execute_build(HOST_TARGET, true, build, &limits, &metadata, true)?; + if !res.result.successful { + bail!("failed to build dummy crate for {}", self.rustc_version); + } - info!("copying essential files for {}", self.rustc_version); - let source = build.host_target_dir().join("doc"); - let dest = tempfile::Builder::new() - .prefix("essential-files") - .tempdir()?; - copy_dir_all(source, &dest)?; - add_path_into_database(&self.storage, "", &dest)?; - conn.query( - "INSERT INTO config (name, value) VALUES ('rustc_version', $1) \ + info!("copying essential files for {}", self.rustc_version); + let source = build.host_target_dir().join("doc"); + let dest = tempfile::Builder::new() + .prefix("essential-files") + .tempdir()?; + copy_dir_all(source, &dest)?; + add_path_into_database(&self.storage, "", &dest)?; + conn.query( + "INSERT INTO config (name, value) VALUES ('rustc_version', $1) \ ON CONFLICT (name) DO UPDATE SET value = $1;", - &[&Value::String(self.rustc_version.clone())], - )?; + &[&Value::String(self.rustc_version.clone())], + )?; - Ok(()) - })?; + Ok(()) + })() + .map_err(|e| failure::Error::from_boxed_compat(e.into())) + }) + .map_err(|e| e.compat())?; - build_dir.purge()?; - krate.purge_from_cache(&self.workspace)?; + build_dir.purge().map_err(FailureError::compat)?; + krate + .purge_from_cache(&self.workspace) + .map_err(FailureError::compat)?; Ok(()) } @@ -270,14 +285,13 @@ impl RustwideBuilder { let limits = Limits::for_crate(&mut conn, name)?; #[cfg(target_os = "linux")] if !self.config.disable_memory_limit { - use failure::ResultExt; - + use anyhow::Context; let mem_info = procfs::Meminfo::new().context("failed to read /proc/meminfo")?; let available = mem_info .mem_available .expect("kernel version too old for determining memory limit"); if limits.memory() as u64 > available { - failure::bail!("not enough memory to build {} {}: needed {} MiB, have {} MiB\nhelp: set DOCSRS_DISABLE_MEMORY_LIMIT=true to force a build", + bail!("not enough memory to build {} {}: needed {} MiB, have {} MiB\nhelp: set DOCSRS_DISABLE_MEMORY_LIMIT=true to force a build", name, version, limits.memory() / 1024 / 1024, available / 1024 / 1024 ); } else { @@ -290,125 +304,133 @@ impl RustwideBuilder { } let mut build_dir = self.workspace.build_dir(&format!("{}-{}", name, version)); - build_dir.purge()?; + build_dir.purge().map_err(FailureError::compat)?; let krate = match kind { PackageKind::Local(path) => Crate::local(path), PackageKind::CratesIo => Crate::crates_io(name, version), PackageKind::Registry(registry) => Crate::registry(registry, name, version), }; - krate.fetch(&self.workspace)?; + krate.fetch(&self.workspace).map_err(FailureError::compat)?; let local_storage = tempfile::Builder::new().prefix("docsrs-docs").tempdir()?; let successful = build_dir .build(&self.toolchain, &krate, self.prepare_sandbox(&limits)) .run(|build| { - use docsrs_metadata::BuildTargets; - - let mut has_docs = false; - let mut successful_targets = Vec::new(); - let metadata = Metadata::from_crate_root(&build.host_source_dir())?; - let BuildTargets { - default_target, - other_targets, - } = metadata.targets(self.config.include_default_targets); - - // Perform an initial build - let res = - self.execute_build(default_target, true, build, &limits, &metadata, false)?; - if res.result.successful { - if let Some(name) = res.cargo_metadata.root().library_name() { - let host_target = build.host_target_dir(); - has_docs = host_target.join("doc").join(name).is_dir(); + (|| -> Result { + use docsrs_metadata::BuildTargets; + + let mut has_docs = false; + let mut successful_targets = Vec::new(); + let metadata = Metadata::from_crate_root(&build.host_source_dir())?; + let BuildTargets { + default_target, + other_targets, + } = metadata.targets(self.config.include_default_targets); + + // Perform an initial build + let res = + self.execute_build(default_target, true, build, &limits, &metadata, false)?; + if res.result.successful { + if let Some(name) = res.cargo_metadata.root().library_name() { + let host_target = build.host_target_dir(); + has_docs = host_target.join("doc").join(name).is_dir(); + } } - } - let mut algs = HashSet::new(); - if has_docs { - debug!("adding documentation for the default target to the database"); - self.copy_docs(&build.host_target_dir(), local_storage.path(), "", true)?; - - successful_targets.push(res.target.clone()); - - // Then build the documentation for all the targets - // Limit the number of targets so that no one can try to build all 200000 possible targets - for target in other_targets.into_iter().take(limits.targets()) { - debug!("building package {} {} for {}", name, version, target); - self.build_target( - target, - build, - &limits, - local_storage.path(), - &mut successful_targets, - &metadata, - )?; - } - let new_algs = self.upload_docs(name, version, local_storage.path())?; + let mut algs = HashSet::new(); + if has_docs { + debug!("adding documentation for the default target to the database"); + self.copy_docs(&build.host_target_dir(), local_storage.path(), "", true)?; + + successful_targets.push(res.target.clone()); + + // Then build the documentation for all the targets + // Limit the number of targets so that no one can try to build all 200000 possible targets + for target in other_targets.into_iter().take(limits.targets()) { + debug!("building package {} {} for {}", name, version, target); + self.build_target( + target, + build, + &limits, + local_storage.path(), + &mut successful_targets, + &metadata, + )?; + } + let new_algs = self.upload_docs(name, version, local_storage.path())?; + algs.extend(new_algs); + }; + + // Store the sources even if the build fails + debug!("adding sources into database"); + let prefix = format!("sources/{}/{}", name, version); + let (files_list, new_algs) = + add_path_into_database(&self.storage, &prefix, build.host_source_dir())?; algs.extend(new_algs); - }; - - // Store the sources even if the build fails - debug!("adding sources into database"); - let prefix = format!("sources/{}/{}", name, version); - let (files_list, new_algs) = - add_path_into_database(&self.storage, &prefix, build.host_source_dir())?; - algs.extend(new_algs); - - let has_examples = build.host_source_dir().join("examples").is_dir(); - if res.result.successful { - self.metrics.successful_builds.inc(); - } else if res.cargo_metadata.root().is_library() { - self.metrics.failed_builds.inc(); - } else { - self.metrics.non_library_builds.inc(); - } - let release_data = match self.index.api().get_release_data(name, version) { - Ok(data) => data, - Err(err) => { - warn!("{:#?}", err); - ReleaseData::default() + let has_examples = build.host_source_dir().join("examples").is_dir(); + if res.result.successful { + self.metrics.successful_builds.inc(); + } else if res.cargo_metadata.root().is_library() { + self.metrics.failed_builds.inc(); + } else { + self.metrics.non_library_builds.inc(); } - }; - - let cargo_metadata = res.cargo_metadata.root(); - let repository = self.get_repo(cargo_metadata)?; - - let release_id = add_package_into_database( - &mut conn, - cargo_metadata, - &build.host_source_dir(), - &res.result, - &res.target, - files_list, - successful_targets, - &release_data, - has_docs, - has_examples, - algs, - repository, - )?; - - if let Some(doc_coverage) = res.doc_coverage { - add_doc_coverage(&mut conn, release_id, doc_coverage)?; - } - let build_id = add_build_into_database(&mut conn, release_id, &res.result)?; - let build_log_path = format!("build-logs/{}/{}.txt", build_id, default_target); - self.storage.store_one(build_log_path, res.build_log)?; + let release_data = match self.index.api().get_release_data(name, version) { + Ok(data) => data, + Err(err) => { + warn!("{:#?}", err); + ReleaseData::default() + } + }; - // Some crates.io crate data is mutable, so we proactively update it during a release - match self.index.api().get_crate_data(name) { - Ok(crate_data) => update_crate_data_in_database(&mut conn, name, &crate_data)?, - Err(err) => warn!("{:#?}", err), - } + let cargo_metadata = res.cargo_metadata.root(); + let repository = self.get_repo(cargo_metadata)?; + + let release_id = add_package_into_database( + &mut conn, + cargo_metadata, + &build.host_source_dir(), + &res.result, + &res.target, + files_list, + successful_targets, + &release_data, + has_docs, + has_examples, + algs, + repository, + )?; - Ok(res.result.successful) - })?; + if let Some(doc_coverage) = res.doc_coverage { + add_doc_coverage(&mut conn, release_id, doc_coverage)?; + } + + let build_id = add_build_into_database(&mut conn, release_id, &res.result)?; + let build_log_path = format!("build-logs/{}/{}.txt", build_id, default_target); + self.storage.store_one(build_log_path, res.build_log)?; + + // Some crates.io crate data is mutable, so we proactively update it during a release + match self.index.api().get_crate_data(name) { + Ok(crate_data) => { + update_crate_data_in_database(&mut conn, name, &crate_data)? + } + Err(err) => warn!("{:#?}", err), + } + + Ok(res.result.successful) + })() + .map_err(|e| failure::Error::from_boxed_compat(e.into())) + }) + .map_err(|e| e.compat())?; - build_dir.purge()?; - krate.purge_from_cache(&self.workspace)?; + build_dir.purge().map_err(FailureError::compat)?; + krate + .purge_from_cache(&self.workspace) + .map_err(FailureError::compat)?; local_storage.close()?; Ok(successful) } @@ -531,7 +553,7 @@ impl RustwideBuilder { let successful = logging::capture(&storage, || { self.prepare_command(build, target, metadata, limits, rustdoc_flags) - .and_then(|command| command.run().map_err(failure::Error::from)) + .and_then(|command| command.run().map_err(Error::from)) .is_ok() }); @@ -573,7 +595,9 @@ impl RustwideBuilder { // If the explicit target is not a tier one target, we need to install it. if !docsrs_metadata::DEFAULT_TARGETS.contains(&target) { // This is a no-op if the target is already installed. - self.toolchain.add_target(&self.workspace, target)?; + self.toolchain + .add_target(&self.workspace, target) + .map_err(FailureError::compat)?; } // Add docs.rs specific arguments @@ -679,7 +703,9 @@ impl RustwideBuilder { } fn get_repo(&self, metadata: &MetadataPackage) -> Result> { - self.repository_stats_updater.load_repository(metadata) + self.repository_stats_updater + .load_repository(metadata) + .map_err(Into::into) } } diff --git a/src/error.rs b/src/error.rs index 45ad49bec..2c47bb94f 100644 --- a/src/error.rs +++ b/src/error.rs @@ -1,18 +1,7 @@ //! Errors used in docs.rs -use std::result::Result as StdResult; +pub(crate) use anyhow::Result; -pub(crate) use failure::Error; - -pub type Result = StdResult; - -#[derive(Debug, Copy, Clone)] +#[derive(Debug, Copy, Clone, thiserror::Error)] +#[error("the size limit for the buffer was reached")] pub(crate) struct SizeLimitReached; - -impl std::error::Error for SizeLimitReached {} - -impl std::fmt::Display for SizeLimitReached { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "the size limit for the buffer was reached") - } -} diff --git a/src/index/api.rs b/src/index/api.rs index adc541742..4c6a57859 100644 --- a/src/index/api.rs +++ b/src/index/api.rs @@ -1,5 +1,5 @@ +use anyhow::{anyhow, Context}; use chrono::{DateTime, Utc}; -use failure::{err_msg, ResultExt}; use reqwest::header::{HeaderValue, ACCEPT, USER_AGENT}; use semver::Version; use serde::Deserialize; @@ -68,7 +68,7 @@ impl Api { fn api_base(&self) -> Result { self.api_base .clone() - .ok_or_else(|| err_msg("index is missing an api base url")) + .with_context(|| anyhow!("index is missing an api base url")) } pub fn get_crate_data(&self, name: &str) -> Result { @@ -100,7 +100,7 @@ impl Api { let url = { let mut url = self.api_base()?; url.path_segments_mut() - .map_err(|()| err_msg("Invalid API url"))? + .map_err(|()| anyhow!("Invalid API url"))? .extend(&["api", "v1", "crates", name, "versions"]); url }; @@ -128,7 +128,7 @@ impl Api { .versions .into_iter() .find(|data| data.num == version) - .ok_or_else(|| err_msg("Could not find version in response"))?; + .with_context(|| anyhow!("Could not find version in response"))?; Ok((version.created_at, version.yanked, version.downloads)) } @@ -138,7 +138,7 @@ impl Api { let url = { let mut url = self.api_base()?; url.path_segments_mut() - .map_err(|()| err_msg("Invalid API url"))? + .map_err(|()| anyhow!("Invalid API url"))? .extend(&["api", "v1", "crates", name, "owners"]); url }; diff --git a/src/index/crates.rs b/src/index/crates.rs index f2d7f120e..664dd10d5 100644 --- a/src/index/crates.rs +++ b/src/index/crates.rs @@ -1,6 +1,5 @@ +use anyhow::Context; use crates_index::Crate; -use failure::ResultExt; - pub(crate) struct Crates { repo: git2::Repository, } @@ -10,7 +9,7 @@ impl Crates { Self { repo } } - pub(crate) fn walk(&self, mut f: impl FnMut(Crate)) -> Result<(), failure::Error> { + pub(crate) fn walk(&self, mut f: impl FnMut(Crate)) -> Result<(), anyhow::Error> { log::debug!("Walking crates in index"); let tree = self .repo @@ -28,9 +27,9 @@ impl Crates { log::warn!("not a crate '{}'", entry.name().unwrap()); } } - Result::<(), failure::Error>::Ok(()) + Result::<(), anyhow::Error>::Ok(()) })() - .with_context(|_| { + .with_context(|| { format!( "loading crate details from '{}'", entry.name().unwrap_or("") diff --git a/src/index/mod.rs b/src/index/mod.rs index b8a6c26d9..f95c5a528 100644 --- a/src/index/mod.rs +++ b/src/index/mod.rs @@ -1,10 +1,10 @@ use std::{path::PathBuf, process::Command}; +use anyhow::Context; use url::Url; use self::api::Api; use crate::error::Result; -use failure::ResultExt; pub(crate) mod api; #[cfg(feature = "consistency_check")] @@ -36,7 +36,7 @@ fn load_config(repo: &git2::Repository) -> Result { .tree()?; let file = tree .get_name("config.json") - .ok_or_else(|| failure::format_err!("registry index missing config"))?; + .with_context(|| anyhow::anyhow!("registry index missing config"))?; let config = serde_json::from_slice(repo.find_blob(file.id())?.content())?; Ok(config) } diff --git a/src/metrics/mod.rs b/src/metrics/mod.rs index d99c79b3b..f0591903b 100644 --- a/src/metrics/mod.rs +++ b/src/metrics/mod.rs @@ -5,8 +5,8 @@ use self::macros::MetricFromOpts; use crate::db::Pool; use crate::target::TargetAtom; use crate::BuildQueue; +use anyhow::Error; use dashmap::DashMap; -use failure::Error; use prometheus::proto::MetricFamily; use std::time::{Duration, Instant}; diff --git a/src/repositories/github.rs b/src/repositories/github.rs index e3c6a8a20..f913d235a 100644 --- a/src/repositories/github.rs +++ b/src/repositories/github.rs @@ -144,7 +144,7 @@ impl RepositoryForge for GitHub { ("RATE_LIMITED", []) => { return Err(RateLimitReached.into()); } - _ => failure::bail!("error updating repositories: {}", error.message), + _ => anyhow::bail!("error updating repositories: {}", error.message), } } @@ -258,6 +258,7 @@ struct GraphIssues { mod tests { use super::GitHub; use crate::repositories::updater::{repository_name, RepositoryForge}; + use crate::repositories::RateLimitReached; use mockito::mock; #[test] @@ -275,7 +276,7 @@ mod tests { .create(); match updater.fetch_repositories(&[String::new()]) { - Err(e) if format!("{:?}", e).contains("RateLimitReached") => {} + Err(e) if e.downcast_ref::().is_some() => {} x => panic!("Expected Err(RateLimitReached), found: {:?}", x), } Ok(()) @@ -295,7 +296,7 @@ mod tests { .create(); match updater.fetch_repositories(&[String::new()]) { - Err(e) if format!("{:?}", e).contains("RateLimitReached") => {} + Err(e) if e.downcast_ref::().is_some() => {} x => panic!("Expected Err(RateLimitReached), found: {:?}", x), } Ok(()) diff --git a/src/repositories/gitlab.rs b/src/repositories/gitlab.rs index 46c912bc9..e24076bb1 100644 --- a/src/repositories/gitlab.rs +++ b/src/repositories/gitlab.rs @@ -126,7 +126,7 @@ impl RepositoryForge for GitLab { if let Some(data) = response.data { if !response.errors.is_empty() { - failure::bail!("error updating repositories: {:?}", response.errors); + anyhow::bail!("error updating repositories: {:?}", response.errors); } for node in data.projects.nodes.into_iter().flatten() { let repo = Repository { @@ -154,7 +154,7 @@ impl RepositoryForge for GitLab { } else if rate_limit.map(|x| x < 1).unwrap_or(false) { Err(RateLimitReached.into()) } else { - failure::bail!("no data") + anyhow::bail!("no data") } } } @@ -248,6 +248,7 @@ struct GraphProject { mod tests { use super::GitLab; use crate::repositories::updater::{repository_name, RepositoryForge}; + use crate::repositories::RateLimitReached; use mockito::mock; #[test] @@ -263,11 +264,11 @@ mod tests { match updater.fetch_repository( &repository_name("https://gitlab.com/foo/bar").expect("repository_name failed"), ) { - Err(e) if format!("{:?}", e).contains("RateLimitReached") => {} + Err(e) if e.downcast_ref::().is_some() => {} x => panic!("Expected Err(RateLimitReached), found: {:?}", x), } match updater.fetch_repositories(&[String::new()]) { - Err(e) if format!("{:?}", e).contains("RateLimitReached") => {} + Err(e) if e.downcast_ref::().is_some() => {} x => panic!("Expected Err(RateLimitReached), found: {:?}", x), } } diff --git a/src/repositories/mod.rs b/src/repositories/mod.rs index 9a298eb9b..9a6932390 100644 --- a/src/repositories/mod.rs +++ b/src/repositories/mod.rs @@ -11,8 +11,8 @@ pub const APP_USER_AGENT: &str = concat!( include_str!(concat!(env!("OUT_DIR"), "/git_version")) ); -#[derive(Debug, failure::Fail)] -#[fail(display = "rate limit reached")] +#[derive(Debug, thiserror::Error)] +#[error("rate limit reached")] struct RateLimitReached; mod github; diff --git a/src/repositories/updater.rs b/src/repositories/updater.rs index df8425f40..a985350b7 100644 --- a/src/repositories/updater.rs +++ b/src/repositories/updater.rs @@ -119,7 +119,7 @@ impl RepositoryStatsUpdater { }; return match res { Ok(repo_id) => Ok(Some(repo_id)), - Err(err) => failure::bail!("failed to collect `{}` stats: {}", updater.host(), err), + Err(err) => anyhow::bail!("failed to collect `{}` stats: {}", updater.host(), err), }; } // It means that none of our updaters have a matching host. diff --git a/src/storage/compression.rs b/src/storage/compression.rs index b6452d795..0e50bfe5e 100644 --- a/src/storage/compression.rs +++ b/src/storage/compression.rs @@ -1,4 +1,4 @@ -use failure::Error; +use anyhow::Error; use std::{collections::HashSet, fmt, io::Read}; pub type CompressionAlgorithms = HashSet; diff --git a/src/storage/database.rs b/src/storage/database.rs index 577ccfa8a..7a427cadb 100644 --- a/src/storage/database.rs +++ b/src/storage/database.rs @@ -1,7 +1,7 @@ use super::{Blob, StorageTransaction}; use crate::db::Pool; +use crate::error::Result; use crate::Metrics; -use failure::Error; use postgres::Transaction; use std::sync::Arc; @@ -15,13 +15,13 @@ impl DatabaseBackend { Self { pool, metrics } } - pub(super) fn exists(&self, path: &str) -> Result { + pub(super) fn exists(&self, path: &str) -> Result { let query = "SELECT COUNT(*) > 0 FROM files WHERE path = $1"; let mut conn = self.pool.get()?; Ok(conn.query(query, &[&path])?[0].get(0)) } - pub(super) fn get(&self, path: &str, max_size: usize) -> Result { + pub(super) fn get(&self, path: &str, max_size: usize) -> Result { use std::convert::TryInto; // The maximum size for a BYTEA (the type used for `content`) is 1GB, so this cast is safe: @@ -67,7 +67,7 @@ impl DatabaseBackend { } } - pub(super) fn start_connection(&self) -> Result { + pub(super) fn start_connection(&self) -> Result { Ok(DatabaseClient { conn: self.pool.get()?, metrics: self.metrics.clone(), @@ -81,9 +81,7 @@ pub(super) struct DatabaseClient { } impl DatabaseClient { - pub(super) fn start_storage_transaction( - &mut self, - ) -> Result, Error> { + pub(super) fn start_storage_transaction(&mut self) -> Result> { Ok(DatabaseStorageTransaction { transaction: self.conn.transaction()?, metrics: &self.metrics, @@ -97,7 +95,7 @@ pub(super) struct DatabaseStorageTransaction<'a> { } impl<'a> StorageTransaction for DatabaseStorageTransaction<'a> { - fn store_batch(&mut self, batch: Vec) -> Result<(), Error> { + fn store_batch(&mut self, batch: Vec) -> Result<()> { for blob in batch { let compression = blob.compression.map(|alg| alg as i32); self.transaction.query( @@ -112,7 +110,7 @@ impl<'a> StorageTransaction for DatabaseStorageTransaction<'a> { Ok(()) } - fn delete_prefix(&mut self, prefix: &str) -> Result<(), Error> { + fn delete_prefix(&mut self, prefix: &str) -> Result<()> { self.transaction.execute( "DELETE FROM files WHERE path LIKE $1;", &[&format!("{}%", prefix.replace('%', "\\%"))], @@ -120,7 +118,7 @@ impl<'a> StorageTransaction for DatabaseStorageTransaction<'a> { Ok(()) } - fn complete(self: Box) -> Result<(), Error> { + fn complete(self: Box) -> Result<()> { self.transaction.commit()?; Ok(()) } diff --git a/src/storage/mod.rs b/src/storage/mod.rs index c4d32b9c3..436e4f51f 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -5,9 +5,10 @@ mod s3; pub use self::compression::{compress, decompress, CompressionAlgorithm, CompressionAlgorithms}; use self::database::DatabaseBackend; use self::s3::S3Backend; +use crate::error::Result; use crate::{db::Pool, Config, Metrics}; +use anyhow::ensure; use chrono::{DateTime, Utc}; -use failure::{err_msg, Error}; use path_slash::PathExt; use std::{ collections::{HashMap, HashSet}, @@ -19,8 +20,8 @@ use std::{ const MAX_CONCURRENT_UPLOADS: usize = 1000; -#[derive(Debug, failure::Fail)] -#[fail(display = "path not found")] +#[derive(Debug, thiserror::Error)] +#[error("path not found")] pub(crate) struct PathNotFoundError; #[derive(Clone, Debug, PartialEq, Eq, Hash)] @@ -32,7 +33,7 @@ pub(crate) struct Blob { pub(crate) compression: Option, } -fn get_file_list_from_dir>(path: P, files: &mut Vec) -> Result<(), Error> { +fn get_file_list_from_dir>(path: P, files: &mut Vec) -> Result<()> { let path = path.as_ref(); for file in path.read_dir()? { @@ -48,13 +49,13 @@ fn get_file_list_from_dir>(path: P, files: &mut Vec) -> Ok(()) } -pub fn get_file_list>(path: P) -> Result, Error> { +pub fn get_file_list>(path: P) -> Result> { let path = path.as_ref(); let mut files = Vec::new(); - if !path.exists() { - return Err(err_msg("File not found")); - } else if path.is_file() { + ensure!(path.exists(), "File not found"); + + if path.is_file() { files.push(PathBuf::from(path.file_name().unwrap())); } else if path.is_dir() { get_file_list_from_dir(path, &mut files)?; @@ -67,8 +68,8 @@ pub fn get_file_list>(path: P) -> Result, Error> { Ok(files) } -#[derive(Debug, failure::Fail)] -#[fail(display = "invalid storage backend")] +#[derive(Debug, thiserror::Error)] +#[error("invalid storage backend")] pub(crate) struct InvalidStorageBackendError; #[derive(Debug)] @@ -99,7 +100,7 @@ pub struct Storage { } impl Storage { - pub fn new(pool: Pool, metrics: Arc, config: &Config) -> Result { + pub fn new(pool: Pool, metrics: Arc, config: &Config) -> Result { Ok(Storage { backend: match config.storage_backend { StorageKind::Database => { @@ -110,14 +111,14 @@ impl Storage { }) } - pub(crate) fn exists(&self, path: &str) -> Result { + pub(crate) fn exists(&self, path: &str) -> Result { match &self.backend { StorageBackend::Database(db) => db.exists(path), StorageBackend::S3(s3) => s3.exists(path), } } - pub(crate) fn get(&self, path: &str, max_size: usize) -> Result { + pub(crate) fn get(&self, path: &str, max_size: usize) -> Result { let mut blob = match &self.backend { StorageBackend::Database(db) => db.get(path, max_size), StorageBackend::S3(s3) => s3.get(path, max_size), @@ -129,9 +130,9 @@ impl Storage { Ok(blob) } - fn transaction(&self, f: F) -> Result + fn transaction(&self, f: F) -> Result where - F: FnOnce(&mut dyn StorageTransaction) -> Result, + F: FnOnce(&mut dyn StorageTransaction) -> Result, { let mut conn; let mut trans: Box = match &self.backend { @@ -154,7 +155,7 @@ impl Storage { &self, prefix: &Path, root_dir: &Path, - ) -> Result<(HashMap, HashSet), Error> { + ) -> Result<(HashMap, HashSet)> { let mut file_paths_and_mimes = HashMap::new(); let mut algs = HashSet::with_capacity(1); @@ -168,7 +169,7 @@ impl Storage { .ok() .map(|file| (file_path, file)) }) - .map(|(file_path, file)| -> Result<_, Error> { + .map(|(file_path, file)| -> Result<_> { let alg = CompressionAlgorithm::default(); let content = compress(file, alg)?; let bucket_path = prefix.join(&file_path).to_slash().unwrap(); @@ -192,7 +193,7 @@ impl Storage { } #[cfg(test)] - pub(crate) fn store_blobs(&self, blobs: Vec) -> Result<(), Error> { + pub(crate) fn store_blobs(&self, blobs: Vec) -> Result<()> { self.store_inner(blobs.into_iter().map(Ok)) } @@ -202,7 +203,7 @@ impl Storage { &self, path: impl Into, content: impl Into>, - ) -> Result { + ) -> Result { let path = path.into(); let content = content.into(); let alg = CompressionAlgorithm::default(); @@ -221,17 +222,14 @@ impl Storage { Ok(alg) } - fn store_inner( - &self, - blobs: impl IntoIterator>, - ) -> Result<(), Error> { + fn store_inner(&self, blobs: impl IntoIterator>) -> Result<()> { let mut blobs = blobs.into_iter(); self.transaction(|trans| { loop { let batch: Vec<_> = blobs .by_ref() .take(MAX_CONCURRENT_UPLOADS) - .collect::>()?; + .collect::>()?; if batch.is_empty() { break; } @@ -241,7 +239,7 @@ impl Storage { }) } - pub(crate) fn delete_prefix(&self, prefix: &str) -> Result<(), Error> { + pub(crate) fn delete_prefix(&self, prefix: &str) -> Result<()> { self.transaction(|trans| trans.delete_prefix(prefix)) } @@ -249,7 +247,7 @@ impl Storage { // we leak the web server, and Drop isn't executed in that case (since the leaked web server // still holds a reference to the storage). #[cfg(test)] - pub(crate) fn cleanup_after_test(&self) -> Result<(), Error> { + pub(crate) fn cleanup_after_test(&self) -> Result<()> { if let StorageBackend::S3(s3) = &self.backend { s3.cleanup_after_test()?; } @@ -267,9 +265,9 @@ impl std::fmt::Debug for Storage { } trait StorageTransaction { - fn store_batch(&mut self, batch: Vec) -> Result<(), Error>; - fn delete_prefix(&mut self, prefix: &str) -> Result<(), Error>; - fn complete(self: Box) -> Result<(), Error>; + fn store_batch(&mut self, batch: Vec) -> Result<()>; + fn delete_prefix(&mut self, prefix: &str) -> Result<()>; + fn complete(self: Box) -> Result<()>; } fn detect_mime(file_path: impl AsRef) -> &'static str { @@ -343,7 +341,7 @@ mod backend_tests { use super::*; use std::fs; - fn test_exists(storage: &Storage) -> Result<(), Error> { + fn test_exists(storage: &Storage) -> Result<()> { assert!(!storage.exists("path/to/file.txt").unwrap()); let blob = Blob { path: "path/to/file.txt".into(), @@ -358,7 +356,7 @@ mod backend_tests { Ok(()) } - fn test_get_object(storage: &Storage) -> Result<(), Error> { + fn test_get_object(storage: &Storage) -> Result<()> { let blob = Blob { path: "foo/bar.txt".into(), mime: "text/plain".into(), @@ -384,7 +382,7 @@ mod backend_tests { Ok(()) } - fn test_get_too_big(storage: &Storage) -> Result<(), Error> { + fn test_get_too_big(storage: &Storage) -> Result<()> { const MAX_SIZE: usize = 1024; let small_blob = Blob { @@ -418,7 +416,7 @@ mod backend_tests { Ok(()) } - fn test_store_blobs(storage: &Storage, metrics: &Metrics) -> Result<(), Error> { + fn test_store_blobs(storage: &Storage, metrics: &Metrics) -> Result<()> { const NAMES: &[&str] = &[ "a", "b", @@ -451,7 +449,7 @@ mod backend_tests { Ok(()) } - fn test_store_all(storage: &Storage, metrics: &Metrics) -> Result<(), Error> { + fn test_store_all(storage: &Storage, metrics: &Metrics) -> Result<()> { let dir = tempfile::Builder::new() .prefix("docs.rs-upload-test") .tempdir()?; @@ -498,7 +496,7 @@ mod backend_tests { Ok(()) } - fn test_batched_uploads(storage: &Storage) -> Result<(), Error> { + fn test_batched_uploads(storage: &Storage) -> Result<()> { let now = Utc::now(); let uploads: Vec<_> = (0..=MAX_CONCURRENT_UPLOADS + 1) .map(|i| { @@ -523,7 +521,7 @@ mod backend_tests { Ok(()) } - fn test_delete_prefix(storage: &Storage) -> Result<(), Error> { + fn test_delete_prefix(storage: &Storage) -> Result<()> { test_deletion( storage, "foo/bar/", @@ -539,7 +537,7 @@ mod backend_tests { ) } - fn test_delete_percent(storage: &Storage) -> Result<(), Error> { + fn test_delete_percent(storage: &Storage) -> Result<()> { // PostgreSQL treats "%" as a special char when deleting a prefix. Make sure any "%" in the // provided prefix is properly escaped. test_deletion( @@ -557,7 +555,7 @@ mod backend_tests { start: &[&str], present: &[&str], missing: &[&str], - ) -> Result<(), Error> { + ) -> Result<()> { storage.store_blobs( start .iter() diff --git a/src/storage/s3.rs b/src/storage/s3.rs index c4802e731..14b418cc6 100644 --- a/src/storage/s3.rs +++ b/src/storage/s3.rs @@ -1,7 +1,7 @@ use super::{Blob, StorageTransaction}; use crate::{Config, Metrics}; +use anyhow::{anyhow, Context, Error}; use chrono::{DateTime, NaiveDateTime, Utc}; -use failure::Error; use futures_util::{ future::TryFutureExt, stream::{FuturesUnordered, StreamExt}, @@ -113,7 +113,7 @@ impl S3Backend { let mut body = res .body - .ok_or_else(|| failure::err_msg("Received a response from S3 with no body"))?; + .with_context(|| anyhow!("Received a response from S3 with no body"))?; while let Some(data) = body.next().await.transpose()? { content.write_all(data.as_ref())?; @@ -262,7 +262,7 @@ impl<'a> StorageTransaction for S3StorageTransaction<'a> { log::error!("error deleting file from s3: {:?}", err); } - failure::bail!("deleting from s3 failed"); + anyhow::bail!("deleting from s3 failed"); } continuation_token = list.next_continuation_token; diff --git a/src/test/fakes.rs b/src/test/fakes.rs index 5ff0cd4e6..e61757712 100644 --- a/src/test/fakes.rs +++ b/src/test/fakes.rs @@ -3,8 +3,8 @@ use crate::docbuilder::{BuildResult, DocCoverage}; use crate::index::api::{CrateData, CrateOwner, ReleaseData}; use crate::storage::Storage; use crate::utils::{Dependency, MetadataPackage, Target}; +use anyhow::{Context, Error}; use chrono::{DateTime, Utc}; -use failure::{Error, ResultExt}; use postgres::Client; use std::collections::HashMap; use std::sync::Arc; @@ -278,13 +278,13 @@ impl<'a> FakeRelease<'a> { for (path, data) in files { if path.starts_with('/') { - failure::bail!("absolute paths not supported"); + anyhow::bail!("absolute paths not supported"); } // allow `src/main.rs` if let Some(parent) = Path::new(path).parent() { let path = path_prefix.join(parent); fs::create_dir_all(&path) - .with_context(|_| format!("failed to create {}", path.display()))?; + .with_context(|| format!("failed to create {}", path.display()))?; } let file = path_prefix.join(&path); log::debug!("writing file {}", file.display()); diff --git a/src/test/mod.rs b/src/test/mod.rs index b965e96e0..8ebf26339 100644 --- a/src/test/mod.rs +++ b/src/test/mod.rs @@ -2,11 +2,11 @@ mod fakes; pub(crate) use self::fakes::FakeBuild; use crate::db::{Pool, PoolClient}; +use crate::error::Result; use crate::repositories::RepositoryStatsUpdater; use crate::storage::{Storage, StorageKind}; use crate::web::Server; use crate::{BuildQueue, Config, Context, Index, Metrics}; -use failure::Error; use log::error; use once_cell::unsync::OnceCell; use postgres::Client as Connection; @@ -17,7 +17,7 @@ use reqwest::{ use std::fs; use std::{panic, sync::Arc}; -pub(crate) fn wrapper(f: impl FnOnce(&TestEnvironment) -> Result<(), Error>) { +pub(crate) fn wrapper(f: impl FnOnce(&TestEnvironment) -> Result<()>) { let _ = dotenv::dotenv(); let env = TestEnvironment::new(); @@ -31,7 +31,7 @@ pub(crate) fn wrapper(f: impl FnOnce(&TestEnvironment) -> Result<(), Error>) { if let Err(err) = result { eprintln!("the test failed: {}", err); - for cause in err.iter_causes() { + for cause in err.chain() { eprintln!(" caused by: {}", cause); } @@ -42,25 +42,21 @@ pub(crate) fn wrapper(f: impl FnOnce(&TestEnvironment) -> Result<(), Error>) { } /// Make sure that a URL returns a status code between 200-299 -pub(crate) fn assert_success(path: &str, web: &TestFrontend) -> Result<(), Error> { +pub(crate) fn assert_success(path: &str, web: &TestFrontend) -> Result<()> { let status = web.get(path).send()?.status(); assert!(status.is_success(), "failed to GET {}: {}", path, status); Ok(()) } /// Make sure that a URL returns a 404 -pub(crate) fn assert_not_found(path: &str, web: &TestFrontend) -> Result<(), Error> { +pub(crate) fn assert_not_found(path: &str, web: &TestFrontend) -> Result<()> { let status = web.get(path).send()?.status(); assert_eq!(status, 404, "GET {} should have been a 404", path); Ok(()) } /// Make sure that a URL redirects to a specific page -pub(crate) fn assert_redirect( - path: &str, - expected_target: &str, - web: &TestFrontend, -) -> Result<(), Error> { +pub(crate) fn assert_redirect(path: &str, expected_target: &str, web: &TestFrontend) -> Result<()> { // Reqwest follows redirects automatically let response = web.get(path).send()?; let status = response.status(); @@ -249,31 +245,31 @@ impl TestEnvironment { } impl Context for TestEnvironment { - fn config(&self) -> Result, Error> { + fn config(&self) -> Result> { Ok(TestEnvironment::config(self)) } - fn build_queue(&self) -> Result, Error> { + fn build_queue(&self) -> Result> { Ok(TestEnvironment::build_queue(self)) } - fn storage(&self) -> Result, Error> { + fn storage(&self) -> Result> { Ok(TestEnvironment::storage(self)) } - fn pool(&self) -> Result { + fn pool(&self) -> Result { Ok(self.db().pool()) } - fn metrics(&self) -> Result, Error> { + fn metrics(&self) -> Result> { Ok(self.metrics()) } - fn index(&self) -> Result, Error> { + fn index(&self) -> Result> { Ok(self.index()) } - fn repository_stats_updater(&self) -> Result, Error> { + fn repository_stats_updater(&self) -> Result> { Ok(self.repository_stats_updater()) } } @@ -285,7 +281,7 @@ pub(crate) struct TestDatabase { } impl TestDatabase { - fn new(config: &Config, metrics: Arc) -> Result { + fn new(config: &Config, metrics: Arc) -> Result { // A random schema name is generated and used for the current connection. This allows each // test to create a fresh instance of the database to run within. let schema = format!("docs_rs_test_schema_{}", rand::random::()); diff --git a/src/utils/cargo_metadata.rs b/src/utils/cargo_metadata.rs index c84aceacf..e9088313f 100644 --- a/src/utils/cargo_metadata.rs +++ b/src/utils/cargo_metadata.rs @@ -1,4 +1,5 @@ use crate::error::Result; +use anyhow::bail; use rustwide::{cmd::Command, Toolchain, Workspace}; use serde::{Deserialize, Serialize}; use std::collections::HashMap; @@ -24,9 +25,7 @@ impl CargoMetadata { let metadata = if let (Some(serialized), None) = (iter.next(), iter.next()) { serde_json::from_str::(serialized)? } else { - return Err(::failure::err_msg( - "invalid output returned by `cargo metadata`", - )); + bail!("invalid output returned by `cargo metadata`"); }; let root = metadata.resolve.root; diff --git a/src/utils/consistency/db.rs b/src/utils/consistency/db.rs index 52cc4df91..1df11ced4 100644 --- a/src/utils/consistency/db.rs +++ b/src/utils/consistency/db.rs @@ -1,7 +1,7 @@ use super::data::{Crate, CrateName, Data, Release, Version}; use std::collections::BTreeMap; -pub(crate) fn load(conn: &mut postgres::Client) -> Result { +pub(crate) fn load(conn: &mut postgres::Client) -> Result { let rows = conn.query( " SELECT diff --git a/src/utils/consistency/index.rs b/src/utils/consistency/index.rs index 4b0a30360..3a0f83936 100644 --- a/src/utils/consistency/index.rs +++ b/src/utils/consistency/index.rs @@ -1,7 +1,7 @@ use super::data::{Crate, CrateName, Data, Release, Version}; use crate::Index; -pub(crate) fn load(index: &Index) -> Result { +pub(crate) fn load(index: &Index) -> Result { let mut data = Data::default(); index.crates()?.walk(|krate| { diff --git a/src/utils/consistency/mod.rs b/src/utils/consistency/mod.rs index 5ee568b3b..da676c469 100644 --- a/src/utils/consistency/mod.rs +++ b/src/utils/consistency/mod.rs @@ -1,6 +1,5 @@ use self::diff::{Diff, Diffable}; use crate::Index; -use failure::ResultExt; mod data; mod db; @@ -11,9 +10,9 @@ pub fn run_check( conn: &mut postgres::Client, index: &Index, dry_run: bool, -) -> Result<(), failure::Error> { +) -> Result<(), anyhow::Error> { if !dry_run { - failure::bail!("TODO: only a --dry-run synchronization is supported currently"); + anyhow::bail!("TODO: only a --dry-run synchronization is supported currently"); } log::info!("Loading data from database..."); diff --git a/src/utils/daemon.rs b/src/utils/daemon.rs index f27770f1a..abf7c49d7 100644 --- a/src/utils/daemon.rs +++ b/src/utils/daemon.rs @@ -3,7 +3,7 @@ //! This daemon will start web server, track new packages and build them use crate::{utils::queue_builder, Context, DocBuilder, RustwideBuilder}; -use failure::Error; +use anyhow::Error; use log::{debug, error, info}; use std::thread; use std::time::{Duration, Instant}; @@ -87,11 +87,11 @@ pub fn start_daemon(context: &dyn Context, enable_registry_watcher: bool) -> Res )?; // Never returns; `server` blocks indefinitely when dropped - // NOTE: if a failure occurred earlier in `start_daemon`, the server will _not_ be joined - + // NOTE: if a anyhow occurred earlier in `start_daemon`, the server will _not_ be joined - // instead it will get killed when the process exits. server_thread .join() - .map_err(|_| failure::err_msg("web server panicked")) + .map_err(|_| anyhow::anyhow!("web server panicked")) } pub(crate) fn cron(name: &'static str, interval: Duration, exec: F) -> Result<(), Error> diff --git a/src/utils/queue_builder.rs b/src/utils/queue_builder.rs index 2d0e5c4a0..f5b4dd0ef 100644 --- a/src/utils/queue_builder.rs +++ b/src/utils/queue_builder.rs @@ -1,5 +1,5 @@ use crate::{docbuilder::RustwideBuilder, utils::pubsubhubbub, BuildQueue, DocBuilder}; -use failure::Error; +use anyhow::Error; use log::{debug, error, info, warn}; use std::panic::{catch_unwind, AssertUnwindSafe}; use std::sync::Arc; diff --git a/src/utils/rustc_version.rs b/src/utils/rustc_version.rs index afd854900..e5fb821e0 100644 --- a/src/utils/rustc_version.rs +++ b/src/utils/rustc_version.rs @@ -1,5 +1,5 @@ use crate::error::Result; -use failure::err_msg; +use anyhow::{anyhow, Context}; use regex::Regex; /// Parses rustc commit hash from rustc version string @@ -7,7 +7,7 @@ pub fn parse_rustc_version>(version: S) -> Result { let version_regex = Regex::new(r" ([\w.-]+) \((\w+) (\d+)-(\d+)-(\d+)\)")?; let captures = version_regex .captures(version.as_ref()) - .ok_or_else(|| err_msg("Failed to parse rustc version"))?; + .with_context(|| anyhow!("Failed to parse rustc version"))?; Ok(format!( "{}{}{}-{}-{}", diff --git a/src/web/crate_details.rs b/src/web/crate_details.rs index 9172904f2..b247a3ce0 100644 --- a/src/web/crate_details.rs +++ b/src/web/crate_details.rs @@ -309,7 +309,7 @@ mod tests { use super::*; use crate::index::api::CrateOwner; use crate::test::{wrapper, TestDatabase}; - use failure::Error; + use anyhow::{Context, Error}; use kuchiki::traits::TendrilSink; use std::collections::HashMap; @@ -325,7 +325,7 @@ mod tests { version, db.repository_stats_updater(), ) - .ok_or_else(|| failure::err_msg("could not fetch crate details"))?; + .with_context(|| anyhow::anyhow!("could not fetch crate details"))?; assert_eq!( details.last_successful_build, diff --git a/src/web/error.rs b/src/web/error.rs index f06d0aa5e..048d8b7ef 100644 --- a/src/web/error.rs +++ b/src/web/error.rs @@ -2,37 +2,26 @@ use crate::{ db::PoolError, web::{page::WebPage, releases::Search, ErrorPage}, }; -use failure::Fail; use iron::{status::Status, Handler, IronError, IronResult, Request, Response}; -use std::{error::Error, fmt}; -#[derive(Debug, Copy, Clone)] +#[derive(Debug, Copy, Clone, thiserror::Error)] pub enum Nope { + #[error("Requested resource not found")] ResourceNotFound, + #[error("Requested build not found")] BuildNotFound, + #[error("Requested crate not found")] CrateNotFound, + #[error("Requested owner not found")] OwnerNotFound, + #[error("Requested crate does not have specified version")] VersionNotFound, + #[error("Search yielded no results")] NoResults, + #[error("Internal server error")] InternalServerError, } -impl fmt::Display for Nope { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str(match *self { - Nope::ResourceNotFound => "Requested resource not found", - Nope::BuildNotFound => "Requested build not found", - Nope::CrateNotFound => "Requested crate not found", - Nope::OwnerNotFound => "Requested owner not found", - Nope::VersionNotFound => "Requested crate does not have specified version", - Nope::NoResults => "Search yielded no results", - Nope::InternalServerError => "Internal server error", - }) - } -} - -impl Error for Nope {} - impl From for IronError { fn from(err: Nope) -> IronError { use iron::status; @@ -139,7 +128,7 @@ impl Handler for Nope { impl From for IronError { fn from(err: PoolError) -> IronError { - IronError::new(err.compat(), Status::InternalServerError) + IronError::new(err, Status::InternalServerError) } } diff --git a/src/web/extensions.rs b/src/web/extensions.rs index c8ac60120..709b694ed 100644 --- a/src/web/extensions.rs +++ b/src/web/extensions.rs @@ -2,7 +2,7 @@ use crate::web::page::TemplateData; use crate::{ db::Pool, repositories::RepositoryStatsUpdater, BuildQueue, Config, Context, Metrics, Storage, }; -use failure::Error; +use anyhow::Error; use iron::{BeforeMiddleware, IronResult, Request}; use std::sync::Arc; diff --git a/src/web/mod.rs b/src/web/mod.rs index 9e2c2637d..c846d0589 100644 --- a/src/web/mod.rs +++ b/src/web/mod.rs @@ -94,11 +94,11 @@ mod source; mod statics; use crate::{impl_webpage, Context}; +use anyhow::Error; use chrono::{DateTime, Utc}; use csp::CspMiddleware; use error::Nope; use extensions::InjectExtensions; -use failure::Error; use iron::{ self, headers::{Expires, HttpDate}, diff --git a/src/web/page/templates.rs b/src/web/page/templates.rs index 2b0306bb0..be7a872ee 100644 --- a/src/web/page/templates.rs +++ b/src/web/page/templates.rs @@ -1,7 +1,7 @@ use crate::{db::Pool, error::Result}; +use anyhow::Context; use arc_swap::ArcSwap; use chrono::{DateTime, Utc}; -use failure::ResultExt; use notify::{watcher, RecursiveMode, Watcher}; use path_slash::PathExt; use postgres::Client; @@ -81,7 +81,7 @@ fn load_rustc_resource_suffix(conn: &mut Client) -> Result { )?; if res.is_empty() { - failure::bail!("missing rustc version"); + anyhow::bail!("missing rustc version"); } if let Ok(vers) = res[0].try_get::<_, Value>("value") { @@ -90,7 +90,7 @@ fn load_rustc_resource_suffix(conn: &mut Client) -> Result { } } - failure::bail!("failed to parse the rustc version"); + anyhow::bail!("failed to parse the rustc version"); } pub(super) fn load_templates(conn: &mut Client) -> Result { @@ -105,13 +105,13 @@ pub(super) fn load_templates(conn: &mut Client) -> Result { // // TODO: remove this when https://github.com/Gilnaa/globwalk/issues/29 is fixed let mut tera = Tera::default(); - let template_files = find_templates_in_filesystem(TEMPLATES_DIRECTORY).with_context(|_| { + let template_files = find_templates_in_filesystem(TEMPLATES_DIRECTORY).with_context(|| { format!( "failed to search {:?} for tera templates", TEMPLATES_DIRECTORY ) })?; - tera.add_template_files(template_files).with_context(|_| { + tera.add_template_files(template_files).with_context(|| { format!( "failed while loading tera templates in {:?}", TEMPLATES_DIRECTORY @@ -170,9 +170,9 @@ fn find_templates_in_filesystem(base: &str) -> Result Result<(i64, Vec), failure::Error> { +) -> Result<(i64, Vec), anyhow::Error> { query = query.trim(); if query.is_empty() { return Ok((0, Vec::new())); @@ -696,8 +696,8 @@ mod tests { use super::*; use crate::index::api::CrateOwner; use crate::test::{assert_redirect, assert_success, wrapper, TestFrontend}; + use anyhow::Error; use chrono::{Duration, TimeZone}; - use failure::Error; use kuchiki::traits::TendrilSink; use std::collections::HashSet; diff --git a/src/web/rustdoc.rs b/src/web/rustdoc.rs index bb32f6007..7e07c55df 100644 --- a/src/web/rustdoc.rs +++ b/src/web/rustdoc.rs @@ -229,10 +229,10 @@ impl RustdocPage { metrics.html_rewrite_ooms.inc(); let config = extension!(req, Config); - let err = failure::err_msg(format!( + let err = anyhow::anyhow!( "Failed to serve the rustdoc file '{}' because rewriting it surpassed the memory limit of {} bytes", file_path, config.max_parse_memory, - )); + ); ctry!(req, Err(err)) } @@ -696,6 +696,7 @@ impl Handler for SharedResourceHandler { #[cfg(test)] mod test { use crate::test::*; + use anyhow::Context; use kuchiki::traits::TendrilSink; use reqwest::StatusCode; use std::collections::BTreeMap; @@ -703,7 +704,7 @@ mod test { fn try_latest_version_redirect( path: &str, web: &TestFrontend, - ) -> Result, failure::Error> { + ) -> Result, anyhow::Error> { assert_success(path, web)?; let data = web.get(path).send()?.text()?; log::info!("fetched path {} and got content {}\nhelp: if this is missing the header, remember to add ", path, data); @@ -722,9 +723,9 @@ mod test { } } - fn latest_version_redirect(path: &str, web: &TestFrontend) -> Result { + fn latest_version_redirect(path: &str, web: &TestFrontend) -> Result { try_latest_version_redirect(path, web)? - .ok_or_else(|| failure::format_err!("no redirect found for {}", path)) + .with_context(|| anyhow::anyhow!("no redirect found for {}", path)) } #[test] @@ -1002,7 +1003,7 @@ mod test { #[test] fn yanked_release_shows_warning_in_nav() { - fn has_yanked_warning(path: &str, web: &TestFrontend) -> Result { + fn has_yanked_warning(path: &str, web: &TestFrontend) -> Result { assert_success(path, web)?; let data = web.get(path).send()?.text()?; Ok(kuchiki::parse_html() @@ -1206,7 +1207,7 @@ mod test { fn get_platform_links( path: &str, web: &TestFrontend, - ) -> Result, failure::Error> { + ) -> Result, anyhow::Error> { assert_success(path, web)?; let data = web.get(path).send()?.text()?; let dom = kuchiki::parse_html().one(data); @@ -1227,7 +1228,7 @@ mod test { web: &TestFrontend, path: &str, links: &[(&str, &str)], - ) -> Result<(), failure::Error> { + ) -> Result<(), anyhow::Error> { let mut links: BTreeMap<_, _> = links.iter().copied().collect(); for (platform, link, rel) in get_platform_links(path, web)? { @@ -1587,7 +1588,7 @@ mod test { .create()?; let web = env.frontend(); - let status = |version| -> Result<_, failure::Error> { + let status = |version| -> Result<_, anyhow::Error> { let page = kuchiki::parse_html().one(web.get("/crate/hexponent/0.3.0").send()?.text()?); let selector = format!(r#"ul > li a[href="/crate/hexponent/{}"]"#, version);