diff --git a/src/controllers/category.rs b/src/controllers/category.rs index 40fc5fa7418..015c12cef06 100644 --- a/src/controllers/category.rs +++ b/src/controllers/category.rs @@ -6,77 +6,86 @@ use crate::schema::categories; use crate::views::{EncodableCategory, EncodableCategoryWithSubcategories}; /// Handles the `GET /categories` route. -pub fn index(req: ConduitRequest) -> AppResult> { - let query = req.query(); - // FIXME: There are 69 categories, 47 top level. This isn't going to - // grow by an OoM. We need a limit for /summary, but we don't need - // to paginate this. - let options = PaginationOptions::builder().gather(&req)?; - let offset = options.offset().unwrap_or_default(); - let sort = query.get("sort").map_or("alpha", String::as_str); +pub async fn index(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let query = req.query(); + // FIXME: There are 69 categories, 47 top level. This isn't going to + // grow by an OoM. We need a limit for /summary, but we don't need + // to paginate this. + let options = PaginationOptions::builder().gather(&req)?; + let offset = options.offset().unwrap_or_default(); + let sort = query.get("sort").map_or("alpha", String::as_str); - let conn = req.app().db_read()?; - let categories = - Category::toplevel(&conn, sort, i64::from(options.per_page), i64::from(offset))?; - let categories = categories - .into_iter() - .map(Category::into) - .collect::>(); + let conn = req.app().db_read()?; + let categories = + Category::toplevel(&conn, sort, i64::from(options.per_page), i64::from(offset))?; + let categories = categories + .into_iter() + .map(Category::into) + .collect::>(); - // Query for the total count of categories - let total = Category::count_toplevel(&conn)?; + // Query for the total count of categories + let total = Category::count_toplevel(&conn)?; - Ok(Json(json!({ - "categories": categories, - "meta": { "total": total }, - }))) + Ok(Json(json!({ + "categories": categories, + "meta": { "total": total }, + }))) + }) + .await } /// Handles the `GET /categories/:category_id` route. -pub fn show(req: ConduitRequest) -> AppResult> { - let slug = req.param("category_id").unwrap(); - let conn = req.app().db_read()?; - let cat: Category = Category::by_slug(slug).first(&*conn)?; - let subcats = cat - .subcategories(&conn)? - .into_iter() - .map(Category::into) - .collect(); - let parents = cat - .parent_categories(&conn)? - .into_iter() - .map(Category::into) - .collect(); +pub async fn show(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let slug = req.param("category_id").unwrap(); + let conn = req.app().db_read()?; + let cat: Category = Category::by_slug(slug).first(&*conn)?; + let subcats = cat + .subcategories(&conn)? + .into_iter() + .map(Category::into) + .collect(); + let parents = cat + .parent_categories(&conn)? + .into_iter() + .map(Category::into) + .collect(); - let cat = EncodableCategory::from(cat); - let cat_with_subcats = EncodableCategoryWithSubcategories { - id: cat.id, - category: cat.category, - slug: cat.slug, - description: cat.description, - created_at: cat.created_at, - crates_cnt: cat.crates_cnt, - subcategories: subcats, - parent_categories: parents, - }; + let cat = EncodableCategory::from(cat); + let cat_with_subcats = EncodableCategoryWithSubcategories { + id: cat.id, + category: cat.category, + slug: cat.slug, + description: cat.description, + created_at: cat.created_at, + crates_cnt: cat.crates_cnt, + subcategories: subcats, + parent_categories: parents, + }; - Ok(Json(json!({ "category": cat_with_subcats }))) + Ok(Json(json!({ "category": cat_with_subcats }))) + }) + .await } /// Handles the `GET /category_slugs` route. -pub fn slugs(req: ConduitRequest) -> AppResult> { - let conn = req.app().db_read()?; - let slugs: Vec = categories::table - .select((categories::slug, categories::slug, categories::description)) - .order(categories::slug) - .load(&*conn)?; +pub async fn slugs(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let conn = req.app().db_read()?; + let slugs: Vec = categories::table + .select((categories::slug, categories::slug, categories::description)) + .order(categories::slug) + .load(&*conn)?; - #[derive(Serialize, Queryable)] - struct Slug { - id: String, - slug: String, - description: String, - } + #[derive(Serialize, Queryable)] + struct Slug { + id: String, + slug: String, + description: String, + } - Ok(Json(json!({ "category_slugs": slugs }))) + Ok(Json(json!({ "category_slugs": slugs }))) + }) + .await } diff --git a/src/controllers/crate_owner_invitation.rs b/src/controllers/crate_owner_invitation.rs index 51cb13ab364..dcabd14dbf0 100644 --- a/src/controllers/crate_owner_invitation.rs +++ b/src/controllers/crate_owner_invitation.rs @@ -16,55 +16,61 @@ use indexmap::IndexMap; use std::collections::{HashMap, HashSet}; /// Handles the `GET /api/v1/me/crate_owner_invitations` route. -pub fn list(req: ConduitRequest) -> AppResult> { - let auth = AuthCheck::only_cookie().check(&req)?; - let user_id = auth.user_id(); - - let PrivateListResponse { - invitations, users, .. - } = prepare_list(&req, auth, ListFilter::InviteeId(user_id))?; - - // The schema for the private endpoints is converted to the schema used by v1 endpoints. - let crate_owner_invitations = invitations - .into_iter() - .map(|private| { - Ok(EncodableCrateOwnerInvitationV1 { - invited_by_username: users - .iter() - .find(|u| u.id == private.inviter_id) - .ok_or_else(|| internal(&format!("missing user {}", private.inviter_id)))? - .login - .clone(), - invitee_id: private.invitee_id, - inviter_id: private.inviter_id, - crate_name: private.crate_name, - crate_id: private.crate_id, - created_at: private.created_at, - expires_at: private.expires_at, +pub async fn list(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let auth = AuthCheck::only_cookie().check(&req)?; + let user_id = auth.user_id(); + + let PrivateListResponse { + invitations, users, .. + } = prepare_list(&req, auth, ListFilter::InviteeId(user_id))?; + + // The schema for the private endpoints is converted to the schema used by v1 endpoints. + let crate_owner_invitations = invitations + .into_iter() + .map(|private| { + Ok(EncodableCrateOwnerInvitationV1 { + invited_by_username: users + .iter() + .find(|u| u.id == private.inviter_id) + .ok_or_else(|| internal(&format!("missing user {}", private.inviter_id)))? + .login + .clone(), + invitee_id: private.invitee_id, + inviter_id: private.inviter_id, + crate_name: private.crate_name, + crate_id: private.crate_id, + created_at: private.created_at, + expires_at: private.expires_at, + }) }) - }) - .collect::>>()?; + .collect::>>()?; - Ok(Json(json!({ - "crate_owner_invitations": crate_owner_invitations, - "users": users, - }))) + Ok(Json(json!({ + "crate_owner_invitations": crate_owner_invitations, + "users": users, + }))) + }) + .await } /// Handles the `GET /api/private/crate_owner_invitations` route. -pub fn private_list(req: ConduitRequest) -> AppResult> { - let auth = AuthCheck::only_cookie().check(&req)?; - - let filter = if let Some(crate_name) = req.query().get("crate_name") { - ListFilter::CrateName(crate_name.clone()) - } else if let Some(id) = req.query().get("invitee_id").and_then(|i| i.parse().ok()) { - ListFilter::InviteeId(id) - } else { - return Err(bad_request("missing or invalid filter")); - }; +pub async fn private_list(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let auth = AuthCheck::only_cookie().check(&req)?; + + let filter = if let Some(crate_name) = req.query().get("crate_name") { + ListFilter::CrateName(crate_name.clone()) + } else if let Some(id) = req.query().get("invitee_id").and_then(|i| i.parse().ok()) { + ListFilter::InviteeId(id) + } else { + return Err(bad_request("missing or invalid filter")); + }; - let list = prepare_list(&req, auth, filter)?; - Ok(Json(list)) + let list = prepare_list(&req, auth, filter)?; + Ok(Json(list)) + }) + .await } enum ListFilter { @@ -250,45 +256,51 @@ struct OwnerInvitation { } /// Handles the `PUT /api/v1/me/crate_owner_invitations/:crate_id` route. -pub fn handle_invite(mut req: ConduitRequest) -> AppResult> { - let crate_invite: OwnerInvitation = - serde_json::from_reader(req.body_mut()).map_err(|_| bad_request("invalid json request"))?; +pub async fn handle_invite(mut req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let crate_invite: OwnerInvitation = serde_json::from_reader(req.body_mut()) + .map_err(|_| bad_request("invalid json request"))?; - let crate_invite = crate_invite.crate_owner_invite; + let crate_invite = crate_invite.crate_owner_invite; - let auth = AuthCheck::default().check(&req)?; - let user_id = auth.user_id(); + let auth = AuthCheck::default().check(&req)?; + let user_id = auth.user_id(); - let state = req.app(); - let conn = &*state.db_write()?; - let config = &state.config; + let state = req.app(); + let conn = &*state.db_write()?; + let config = &state.config; - let invitation = CrateOwnerInvitation::find_by_id(user_id, crate_invite.crate_id, conn)?; - if crate_invite.accepted { - invitation.accept(conn, config)?; - } else { - invitation.decline(conn)?; - } + let invitation = CrateOwnerInvitation::find_by_id(user_id, crate_invite.crate_id, conn)?; + if crate_invite.accepted { + invitation.accept(conn, config)?; + } else { + invitation.decline(conn)?; + } - Ok(Json(json!({ "crate_owner_invitation": crate_invite }))) + Ok(Json(json!({ "crate_owner_invitation": crate_invite }))) + }) + .await } /// Handles the `PUT /api/v1/me/crate_owner_invitations/accept/:token` route. -pub fn handle_invite_with_token(req: ConduitRequest) -> AppResult> { - let state = req.app(); - let config = &state.config; - let conn = state.db_write()?; - - let req_token = req.param("token").unwrap(); - - let invitation = CrateOwnerInvitation::find_by_token(req_token, &conn)?; - let crate_id = invitation.crate_id; - invitation.accept(&conn, config)?; - - Ok(Json(json!({ - "crate_owner_invitation": { - "crate_id": crate_id, - "accepted": true, - }, - }))) +pub async fn handle_invite_with_token(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let state = req.app(); + let config = &state.config; + let conn = state.db_write()?; + + let req_token = req.param("token").unwrap(); + + let invitation = CrateOwnerInvitation::find_by_token(req_token, &conn)?; + let crate_id = invitation.crate_id; + invitation.accept(&conn, config)?; + + Ok(Json(json!({ + "crate_owner_invitation": { + "crate_id": crate_id, + "accepted": true, + }, + }))) + }) + .await } diff --git a/src/controllers/github/secret_scanning.rs b/src/controllers/github/secret_scanning.rs index b8fd6b67b62..a52a68f5a8f 100644 --- a/src/controllers/github/secret_scanning.rs +++ b/src/controllers/github/secret_scanning.rs @@ -232,39 +232,42 @@ pub enum GitHubSecretAlertFeedbackLabel { } /// Handles the `POST /api/github/secret-scanning/verify` route. -pub fn verify(mut req: ConduitRequest) -> AppResult>> { - let max_size = 8192; - let length = req - .content_length() - .ok_or_else(|| bad_request("missing header: Content-Length"))?; - - if length > max_size { - return Err(bad_request(&format!("max content length is: {max_size}"))); - } - - let mut json = vec![0; length as usize]; - read_fill(req.body_mut(), &mut json)?; - - let state = req.app(); - verify_github_signature(req.headers(), state, &json) - .map_err(|e| bad_request(&format!("failed to verify request signature: {e:?}")))?; - - let alerts: Vec = json::from_slice(&json) - .map_err(|e| bad_request(&format!("invalid secret alert request: {e:?}")))?; +pub async fn verify(mut req: ConduitRequest) -> AppResult>> { + conduit_compat(move || { + let max_size = 8192; + let length = req + .content_length() + .ok_or_else(|| bad_request("missing header: Content-Length"))?; + + if length > max_size { + return Err(bad_request(&format!("max content length is: {max_size}"))); + } - let feedback = alerts - .into_iter() - .map(|alert| { - let label = alert_revoke_token(state, &alert)?; - Ok(GitHubSecretAlertFeedback { - token_raw: alert.token, - token_type: alert.r#type, - label, + let mut json = vec![0; length as usize]; + read_fill(req.body_mut(), &mut json)?; + + let state = req.app(); + verify_github_signature(req.headers(), state, &json) + .map_err(|e| bad_request(&format!("failed to verify request signature: {e:?}")))?; + + let alerts: Vec = json::from_slice(&json) + .map_err(|e| bad_request(&format!("invalid secret alert request: {e:?}")))?; + + let feedback = alerts + .into_iter() + .map(|alert| { + let label = alert_revoke_token(state, &alert)?; + Ok(GitHubSecretAlertFeedback { + token_raw: alert.token, + token_type: alert.r#type, + label, + }) }) - }) - .collect::>()?; + .collect::>()?; - Ok(Json(feedback)) + Ok(Json(feedback)) + }) + .await } #[cfg(test)] diff --git a/src/controllers/krate/downloads.rs b/src/controllers/krate/downloads.rs index 5e02b16ac1b..0529f97a2b7 100644 --- a/src/controllers/krate/downloads.rs +++ b/src/controllers/krate/downloads.rs @@ -13,47 +13,51 @@ use crate::sql::to_char; use crate::views::EncodableVersionDownload; /// Handles the `GET /crates/:crate_id/downloads` route. -pub fn downloads(req: ConduitRequest) -> AppResult> { - use diesel::dsl::*; - use diesel::sql_types::BigInt; - - let crate_name = req.param("crate_id").unwrap(); - let conn = req.app().db_read()?; - let krate: Crate = Crate::by_name(crate_name).first(&*conn)?; - - let mut versions: Vec = krate.all_versions().load(&*conn)?; - versions.sort_by_cached_key(|version| cmp::Reverse(semver::Version::parse(&version.num).ok())); - let (latest_five, rest) = versions.split_at(cmp::min(5, versions.len())); - - let downloads = VersionDownload::belonging_to(latest_five) - .filter(version_downloads::date.gt(date(now - 90.days()))) - .order(version_downloads::date.asc()) - .load(&*conn)? - .into_iter() - .map(VersionDownload::into) - .collect::>(); - - let sum_downloads = sql::("SUM(version_downloads.downloads)"); - let extra: Vec = VersionDownload::belonging_to(rest) - .select(( - to_char(version_downloads::date, "YYYY-MM-DD"), - sum_downloads, - )) - .filter(version_downloads::date.gt(date(now - 90.days()))) - .group_by(version_downloads::date) - .order(version_downloads::date.asc()) - .load(&*conn)?; - - #[derive(Serialize, Queryable)] - struct ExtraDownload { - date: String, - downloads: i64, - } - - Ok(Json(json!({ - "version_downloads": downloads, - "meta": { - "extra_downloads": extra, - }, - }))) +pub async fn downloads(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + use diesel::dsl::*; + use diesel::sql_types::BigInt; + + let crate_name = req.param("crate_id").unwrap(); + let conn = req.app().db_read()?; + let krate: Crate = Crate::by_name(crate_name).first(&*conn)?; + + let mut versions: Vec = krate.all_versions().load(&*conn)?; + versions + .sort_by_cached_key(|version| cmp::Reverse(semver::Version::parse(&version.num).ok())); + let (latest_five, rest) = versions.split_at(cmp::min(5, versions.len())); + + let downloads = VersionDownload::belonging_to(latest_five) + .filter(version_downloads::date.gt(date(now - 90.days()))) + .order(version_downloads::date.asc()) + .load(&*conn)? + .into_iter() + .map(VersionDownload::into) + .collect::>(); + + let sum_downloads = sql::("SUM(version_downloads.downloads)"); + let extra: Vec = VersionDownload::belonging_to(rest) + .select(( + to_char(version_downloads::date, "YYYY-MM-DD"), + sum_downloads, + )) + .filter(version_downloads::date.gt(date(now - 90.days()))) + .group_by(version_downloads::date) + .order(version_downloads::date.asc()) + .load(&*conn)?; + + #[derive(Serialize, Queryable)] + struct ExtraDownload { + date: String, + downloads: i64, + } + + Ok(Json(json!({ + "version_downloads": downloads, + "meta": { + "extra_downloads": extra, + }, + }))) + }) + .await } diff --git a/src/controllers/krate/follow.rs b/src/controllers/krate/follow.rs index 154085e90cd..ef9fcc579cb 100644 --- a/src/controllers/krate/follow.rs +++ b/src/controllers/krate/follow.rs @@ -21,37 +21,46 @@ fn follow_target( } /// Handles the `PUT /crates/:crate_id/follow` route. -pub fn follow(req: ConduitRequest) -> AppResult { - let user_id = AuthCheck::default().check(&req)?.user_id(); - let conn = req.app().db_write()?; - let follow = follow_target(&req, &conn, user_id)?; - diesel::insert_into(follows::table) - .values(&follow) - .on_conflict_do_nothing() - .execute(&*conn)?; - - ok_true() +pub async fn follow(req: ConduitRequest) -> AppResult { + conduit_compat(move || { + let user_id = AuthCheck::default().check(&req)?.user_id(); + let conn = req.app().db_write()?; + let follow = follow_target(&req, &conn, user_id)?; + diesel::insert_into(follows::table) + .values(&follow) + .on_conflict_do_nothing() + .execute(&*conn)?; + + ok_true() + }) + .await } /// Handles the `DELETE /crates/:crate_id/follow` route. -pub fn unfollow(req: ConduitRequest) -> AppResult { - let user_id = AuthCheck::default().check(&req)?.user_id(); - let conn = req.app().db_write()?; - let follow = follow_target(&req, &conn, user_id)?; - diesel::delete(&follow).execute(&*conn)?; +pub async fn unfollow(req: ConduitRequest) -> AppResult { + conduit_compat(move || { + let user_id = AuthCheck::default().check(&req)?.user_id(); + let conn = req.app().db_write()?; + let follow = follow_target(&req, &conn, user_id)?; + diesel::delete(&follow).execute(&*conn)?; - ok_true() + ok_true() + }) + .await } /// Handles the `GET /crates/:crate_id/following` route. -pub fn following(req: ConduitRequest) -> AppResult> { - use diesel::dsl::exists; +pub async fn following(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + use diesel::dsl::exists; - let user_id = AuthCheck::only_cookie().check(&req)?.user_id(); - let conn = req.app().db_read_prefer_primary()?; - let follow = follow_target(&req, &conn, user_id)?; - let following = - diesel::select(exists(follows::table.find(follow.id()))).get_result::(&*conn)?; + let user_id = AuthCheck::only_cookie().check(&req)?.user_id(); + let conn = req.app().db_read_prefer_primary()?; + let follow = follow_target(&req, &conn, user_id)?; + let following = + diesel::select(exists(follows::table.find(follow.id()))).get_result::(&*conn)?; - Ok(Json(json!({ "following": following }))) + Ok(Json(json!({ "following": following }))) + }) + .await } diff --git a/src/controllers/krate/metadata.rs b/src/controllers/krate/metadata.rs index e47157fbdfc..3882a44fd47 100644 --- a/src/controllers/krate/metadata.rs +++ b/src/controllers/krate/metadata.rs @@ -22,217 +22,225 @@ use crate::views::{ use crate::models::krate::ALL_COLUMNS; /// Handles the `GET /summary` route. -pub fn summary(req: ConduitRequest) -> AppResult> { - use crate::schema::crates::dsl::*; - use diesel::dsl::all; +pub async fn summary(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + use crate::schema::crates::dsl::*; + use diesel::dsl::all; - let state = req.app(); - let config = &state.config; + let state = req.app(); + let config = &state.config; - let conn = state.db_read()?; - let num_crates: i64 = crates.count().get_result(&*conn)?; - let num_downloads: i64 = metadata::table - .select(metadata::total_downloads) - .get_result(&*conn)?; + let conn = state.db_read()?; + let num_crates: i64 = crates.count().get_result(&*conn)?; + let num_downloads: i64 = metadata::table + .select(metadata::total_downloads) + .get_result(&*conn)?; - let encode_crates = |data: Vec<(Crate, Option)>| -> AppResult> { - let recent_downloads = data.iter().map(|&(_, s)| s).collect::>(); + let encode_crates = |data: Vec<(Crate, Option)>| -> AppResult> { + let recent_downloads = data.iter().map(|&(_, s)| s).collect::>(); - let krates = data.into_iter().map(|(c, _)| c).collect::>(); + let krates = data.into_iter().map(|(c, _)| c).collect::>(); - let versions: Vec = krates.versions().load(&*conn)?; - versions - .grouped_by(&krates) - .into_iter() - .map(TopVersions::from_versions) - .zip(krates) - .zip(recent_downloads) - .map(|((top_versions, krate), recent_downloads)| { - Ok(EncodableCrate::from_minimal( - krate, - Some(&top_versions), - None, - false, - recent_downloads, - )) - }) - .collect() - }; - - let selection = (ALL_COLUMNS, recent_crate_downloads::downloads.nullable()); - - let new_crates = crates - .left_join(recent_crate_downloads::table) - .order(created_at.desc()) - .select(selection) - .limit(10) - .load(&*conn)?; - let just_updated = crates - .left_join(recent_crate_downloads::table) - .filter(updated_at.ne(created_at)) - .order(updated_at.desc()) - .select(selection) - .limit(10) - .load(&*conn)?; - - let mut most_downloaded_query = crates.left_join(recent_crate_downloads::table).into_boxed(); - if !config.excluded_crate_names.is_empty() { - most_downloaded_query = - most_downloaded_query.filter(name.ne(all(&config.excluded_crate_names))); - } - let most_downloaded = most_downloaded_query - .then_order_by(downloads.desc()) - .select(selection) - .limit(10) - .load(&*conn)?; - - let mut most_recently_downloaded_query = crates - .inner_join(recent_crate_downloads::table) - .into_boxed(); - if !config.excluded_crate_names.is_empty() { - most_recently_downloaded_query = - most_recently_downloaded_query.filter(name.ne(all(&config.excluded_crate_names))); - } - let most_recently_downloaded = most_recently_downloaded_query - .then_order_by(recent_crate_downloads::downloads.desc()) - .select(selection) - .limit(10) - .load(&*conn)?; - - let popular_keywords = keywords::table - .order(keywords::crates_cnt.desc()) - .limit(10) - .load(&*conn)? - .into_iter() - .map(Keyword::into) - .collect::>(); - - let popular_categories = Category::toplevel(&conn, "crates", 10, 0)? - .into_iter() - .map(Category::into) - .collect::>(); - - Ok(Json(json!({ - "num_downloads": num_downloads, - "num_crates": num_crates, - "new_crates": encode_crates(new_crates)?, - "most_downloaded": encode_crates(most_downloaded)?, - "most_recently_downloaded": encode_crates(most_recently_downloaded)?, - "just_updated": encode_crates(just_updated)?, - "popular_keywords": popular_keywords, - "popular_categories": popular_categories, - }))) -} + let versions: Vec = krates.versions().load(&*conn)?; + versions + .grouped_by(&krates) + .into_iter() + .map(TopVersions::from_versions) + .zip(krates) + .zip(recent_downloads) + .map(|((top_versions, krate), recent_downloads)| { + Ok(EncodableCrate::from_minimal( + krate, + Some(&top_versions), + None, + false, + recent_downloads, + )) + }) + .collect() + }; -/// Handles the `GET /crates/:crate_id` route. -pub fn show(req: ConduitRequest) -> AppResult> { - let name = req.param("crate_id").unwrap(); - let include = req - .query() - .get("include") - .map(|mode| ShowIncludeMode::from_str(mode)) - .transpose()? - .unwrap_or_default(); - - let conn = req.app().db_read()?; - let krate: Crate = Crate::by_name(name).first(&*conn)?; - - let versions_publishers_and_audit_actions = if include.versions { - let mut versions_and_publishers: Vec<(Version, Option)> = krate - .all_versions() - .left_outer_join(users::table) - .select((versions::all_columns, users::all_columns.nullable())) + let selection = (ALL_COLUMNS, recent_crate_downloads::downloads.nullable()); + + let new_crates = crates + .left_join(recent_crate_downloads::table) + .order(created_at.desc()) + .select(selection) + .limit(10) + .load(&*conn)?; + let just_updated = crates + .left_join(recent_crate_downloads::table) + .filter(updated_at.ne(created_at)) + .order(updated_at.desc()) + .select(selection) + .limit(10) .load(&*conn)?; - versions_and_publishers - .sort_by_cached_key(|(version, _)| Reverse(semver::Version::parse(&version.num).ok())); - let versions = versions_and_publishers - .iter() - .map(|(v, _)| v) - .cloned() - .collect::>(); - Some( - versions_and_publishers - .into_iter() - .zip(VersionOwnerAction::for_versions(&conn, &versions)?.into_iter()) - .map(|((v, pb), aas)| (v, pb, aas)) - .collect::>(), - ) - } else { - None - }; - let ids = versions_publishers_and_audit_actions - .as_ref() - .map(|vps| vps.iter().map(|v| v.0.id).collect()); - - let kws = if include.keywords { - Some( - CrateKeyword::belonging_to(&krate) - .inner_join(keywords::table) - .select(keywords::all_columns) - .load(&*conn)?, - ) - } else { - None - }; - let cats = if include.categories { - Some( - CrateCategory::belonging_to(&krate) - .inner_join(categories::table) - .select(categories::all_columns) - .load(&*conn)?, - ) - } else { - None - }; - let recent_downloads = if include.downloads { - RecentCrateDownloads::belonging_to(&krate) - .select(recent_crate_downloads::downloads) - .get_result(&*conn) - .optional()? - } else { - None - }; - - let badges = if include.badges { Some(vec![]) } else { None }; - - let top_versions = if include.versions { - Some(krate.top_versions(&conn)?) - } else { - None - }; - - let encodable_crate = EncodableCrate::from( - krate.clone(), - top_versions.as_ref(), - ids, - kws.as_deref(), - cats.as_deref(), - badges, - false, - recent_downloads, - ); - let encodable_versions = versions_publishers_and_audit_actions.map(|vpa| { - vpa.into_iter() - .map(|(v, pb, aas)| EncodableVersion::from(v, &krate.name, pb, aas)) - .collect::>() - }); - let encodable_keywords = kws.map(|kws| { - kws.into_iter() + let mut most_downloaded_query = + crates.left_join(recent_crate_downloads::table).into_boxed(); + if !config.excluded_crate_names.is_empty() { + most_downloaded_query = + most_downloaded_query.filter(name.ne(all(&config.excluded_crate_names))); + } + let most_downloaded = most_downloaded_query + .then_order_by(downloads.desc()) + .select(selection) + .limit(10) + .load(&*conn)?; + + let mut most_recently_downloaded_query = crates + .inner_join(recent_crate_downloads::table) + .into_boxed(); + if !config.excluded_crate_names.is_empty() { + most_recently_downloaded_query = + most_recently_downloaded_query.filter(name.ne(all(&config.excluded_crate_names))); + } + let most_recently_downloaded = most_recently_downloaded_query + .then_order_by(recent_crate_downloads::downloads.desc()) + .select(selection) + .limit(10) + .load(&*conn)?; + + let popular_keywords = keywords::table + .order(keywords::crates_cnt.desc()) + .limit(10) + .load(&*conn)? + .into_iter() .map(Keyword::into) - .collect::>() - }); - let encodable_cats = cats.map(|cats| { - cats.into_iter() + .collect::>(); + + let popular_categories = Category::toplevel(&conn, "crates", 10, 0)? + .into_iter() .map(Category::into) - .collect::>() - }); - Ok(Json(json!({ - "crate": encodable_crate, - "versions": encodable_versions, - "keywords": encodable_keywords, - "categories": encodable_cats, - }))) + .collect::>(); + + Ok(Json(json!({ + "num_downloads": num_downloads, + "num_crates": num_crates, + "new_crates": encode_crates(new_crates)?, + "most_downloaded": encode_crates(most_downloaded)?, + "most_recently_downloaded": encode_crates(most_recently_downloaded)?, + "just_updated": encode_crates(just_updated)?, + "popular_keywords": popular_keywords, + "popular_categories": popular_categories, + }))) + }) + .await +} + +/// Handles the `GET /crates/:crate_id` route. +pub async fn show(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let name = req.param("crate_id").unwrap(); + let include = req + .query() + .get("include") + .map(|mode| ShowIncludeMode::from_str(mode)) + .transpose()? + .unwrap_or_default(); + + let conn = req.app().db_read()?; + let krate: Crate = Crate::by_name(name).first(&*conn)?; + + let versions_publishers_and_audit_actions = if include.versions { + let mut versions_and_publishers: Vec<(Version, Option)> = krate + .all_versions() + .left_outer_join(users::table) + .select((versions::all_columns, users::all_columns.nullable())) + .load(&*conn)?; + versions_and_publishers.sort_by_cached_key(|(version, _)| { + Reverse(semver::Version::parse(&version.num).ok()) + }); + + let versions = versions_and_publishers + .iter() + .map(|(v, _)| v) + .cloned() + .collect::>(); + Some( + versions_and_publishers + .into_iter() + .zip(VersionOwnerAction::for_versions(&conn, &versions)?.into_iter()) + .map(|((v, pb), aas)| (v, pb, aas)) + .collect::>(), + ) + } else { + None + }; + let ids = versions_publishers_and_audit_actions + .as_ref() + .map(|vps| vps.iter().map(|v| v.0.id).collect()); + + let kws = if include.keywords { + Some( + CrateKeyword::belonging_to(&krate) + .inner_join(keywords::table) + .select(keywords::all_columns) + .load(&*conn)?, + ) + } else { + None + }; + let cats = if include.categories { + Some( + CrateCategory::belonging_to(&krate) + .inner_join(categories::table) + .select(categories::all_columns) + .load(&*conn)?, + ) + } else { + None + }; + let recent_downloads = if include.downloads { + RecentCrateDownloads::belonging_to(&krate) + .select(recent_crate_downloads::downloads) + .get_result(&*conn) + .optional()? + } else { + None + }; + + let badges = if include.badges { Some(vec![]) } else { None }; + + let top_versions = if include.versions { + Some(krate.top_versions(&conn)?) + } else { + None + }; + + let encodable_crate = EncodableCrate::from( + krate.clone(), + top_versions.as_ref(), + ids, + kws.as_deref(), + cats.as_deref(), + badges, + false, + recent_downloads, + ); + let encodable_versions = versions_publishers_and_audit_actions.map(|vpa| { + vpa.into_iter() + .map(|(v, pb, aas)| EncodableVersion::from(v, &krate.name, pb, aas)) + .collect::>() + }); + let encodable_keywords = kws.map(|kws| { + kws.into_iter() + .map(Keyword::into) + .collect::>() + }); + let encodable_cats = cats.map(|cats| { + cats.into_iter() + .map(Category::into) + .collect::>() + }); + Ok(Json(json!({ + "crate": encodable_crate, + "versions": encodable_versions, + "keywords": encodable_keywords, + "categories": encodable_cats, + }))) + }) + .await } #[derive(Debug)] @@ -298,95 +306,104 @@ impl FromStr for ShowIncludeMode { } /// Handles the `GET /crates/:crate_id/:version/readme` route. -pub fn readme(req: ConduitRequest) -> AppResult { - let crate_name = req.param("crate_id").unwrap(); - let version = req.param("version").unwrap(); - - let redirect_url = req - .app() - .config - .uploader() - .readme_location(crate_name, version); - - if req.wants_json() { - Ok(Json(json!({ "url": redirect_url })).into_response()) - } else { - Ok(req.redirect(redirect_url)) - } +pub async fn readme(req: ConduitRequest) -> AppResult { + conduit_compat(move || { + let crate_name = req.param("crate_id").unwrap(); + let version = req.param("version").unwrap(); + + let redirect_url = req + .app() + .config + .uploader() + .readme_location(crate_name, version); + + if req.wants_json() { + Ok(Json(json!({ "url": redirect_url })).into_response()) + } else { + Ok(req.redirect(redirect_url)) + } + }) + .await } /// Handles the `GET /crates/:crate_id/versions` route. // FIXME: Not sure why this is necessary since /crates/:crate_id returns // this information already, but ember is definitely requesting it -pub fn versions(req: ConduitRequest) -> AppResult> { - let crate_name = req.param("crate_id").unwrap(); - let conn = req.app().db_read()?; - let krate: Crate = Crate::by_name(crate_name).first(&*conn)?; - let mut versions_and_publishers: Vec<(Version, Option)> = krate - .all_versions() - .left_outer_join(users::table) - .select((versions::all_columns, users::all_columns.nullable())) - .load(&*conn)?; - - versions_and_publishers - .sort_by_cached_key(|(version, _)| Reverse(semver::Version::parse(&version.num).ok())); - - let versions = versions_and_publishers - .iter() - .map(|(v, _)| v) - .cloned() - .collect::>(); - let versions = versions_and_publishers - .into_iter() - .zip(VersionOwnerAction::for_versions(&conn, &versions)?.into_iter()) - .map(|((v, pb), aas)| EncodableVersion::from(v, crate_name, pb, aas)) - .collect::>(); - - Ok(Json(json!({ "versions": versions }))) +pub async fn versions(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let crate_name = req.param("crate_id").unwrap(); + let conn = req.app().db_read()?; + let krate: Crate = Crate::by_name(crate_name).first(&*conn)?; + let mut versions_and_publishers: Vec<(Version, Option)> = krate + .all_versions() + .left_outer_join(users::table) + .select((versions::all_columns, users::all_columns.nullable())) + .load(&*conn)?; + + versions_and_publishers + .sort_by_cached_key(|(version, _)| Reverse(semver::Version::parse(&version.num).ok())); + + let versions = versions_and_publishers + .iter() + .map(|(v, _)| v) + .cloned() + .collect::>(); + let versions = versions_and_publishers + .into_iter() + .zip(VersionOwnerAction::for_versions(&conn, &versions)?.into_iter()) + .map(|((v, pb), aas)| EncodableVersion::from(v, crate_name, pb, aas)) + .collect::>(); + + Ok(Json(json!({ "versions": versions }))) + }) + .await } /// Handles the `GET /crates/:crate_id/reverse_dependencies` route. -pub fn reverse_dependencies(req: ConduitRequest) -> AppResult> { - use diesel::dsl::any; - - let pagination_options = PaginationOptions::builder().gather(&req)?; - let name = req.param("crate_id").unwrap(); - let conn = req.app().db_read()?; - let krate: Crate = Crate::by_name(name).first(&*conn)?; - let (rev_deps, total) = krate.reverse_dependencies(&conn, pagination_options)?; - let rev_deps: Vec<_> = rev_deps - .into_iter() - .map(|dep| EncodableDependency::from_reverse_dep(dep, &krate.name)) - .collect(); - - let version_ids: Vec = rev_deps.iter().map(|dep| dep.version_id).collect(); - - let versions_and_publishers: Vec<(Version, String, Option)> = versions::table - .filter(versions::id.eq(any(version_ids))) - .inner_join(crates::table) - .left_outer_join(users::table) - .select(( - versions::all_columns, - crates::name, - users::all_columns.nullable(), - )) - .load(&*conn)?; - let versions = versions_and_publishers - .iter() - .map(|(v, _, _)| v) - .cloned() - .collect::>(); - let versions = versions_and_publishers - .into_iter() - .zip(VersionOwnerAction::for_versions(&conn, &versions)?.into_iter()) - .map(|((version, krate_name, published_by), actions)| { - EncodableVersion::from(version, &krate_name, published_by, actions) - }) - .collect::>(); - - Ok(Json(json!({ - "dependencies": rev_deps, - "versions": versions, - "meta": { "total": total }, - }))) +pub async fn reverse_dependencies(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + use diesel::dsl::any; + + let pagination_options = PaginationOptions::builder().gather(&req)?; + let name = req.param("crate_id").unwrap(); + let conn = req.app().db_read()?; + let krate: Crate = Crate::by_name(name).first(&*conn)?; + let (rev_deps, total) = krate.reverse_dependencies(&conn, pagination_options)?; + let rev_deps: Vec<_> = rev_deps + .into_iter() + .map(|dep| EncodableDependency::from_reverse_dep(dep, &krate.name)) + .collect(); + + let version_ids: Vec = rev_deps.iter().map(|dep| dep.version_id).collect(); + + let versions_and_publishers: Vec<(Version, String, Option)> = versions::table + .filter(versions::id.eq(any(version_ids))) + .inner_join(crates::table) + .left_outer_join(users::table) + .select(( + versions::all_columns, + crates::name, + users::all_columns.nullable(), + )) + .load(&*conn)?; + let versions = versions_and_publishers + .iter() + .map(|(v, _, _)| v) + .cloned() + .collect::>(); + let versions = versions_and_publishers + .into_iter() + .zip(VersionOwnerAction::for_versions(&conn, &versions)?.into_iter()) + .map(|((version, krate_name, published_by), actions)| { + EncodableVersion::from(version, &krate_name, published_by, actions) + }) + .collect::>(); + + Ok(Json(json!({ + "dependencies": rev_deps, + "versions": versions, + "meta": { "total": total }, + }))) + }) + .await } diff --git a/src/controllers/krate/owners.rs b/src/controllers/krate/owners.rs index 75a89da8e48..ef5e7f2fce0 100644 --- a/src/controllers/krate/owners.rs +++ b/src/controllers/krate/owners.rs @@ -7,53 +7,62 @@ use crate::models::{Crate, Owner, Rights, Team, User}; use crate::views::EncodableOwner; /// Handles the `GET /crates/:crate_id/owners` route. -pub fn owners(req: ConduitRequest) -> AppResult> { - let crate_name = req.param("crate_id").unwrap(); - let conn = req.app().db_read()?; - let krate: Crate = Crate::by_name(crate_name).first(&*conn)?; - let owners = krate - .owners(&conn)? - .into_iter() - .map(Owner::into) - .collect::>(); - - Ok(Json(json!({ "users": owners }))) +pub async fn owners(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let crate_name = req.param("crate_id").unwrap(); + let conn = req.app().db_read()?; + let krate: Crate = Crate::by_name(crate_name).first(&*conn)?; + let owners = krate + .owners(&conn)? + .into_iter() + .map(Owner::into) + .collect::>(); + + Ok(Json(json!({ "users": owners }))) + }) + .await } /// Handles the `GET /crates/:crate_id/owner_team` route. -pub fn owner_team(req: ConduitRequest) -> AppResult> { - let crate_name = req.param("crate_id").unwrap(); - let conn = req.app().db_read()?; - let krate: Crate = Crate::by_name(crate_name).first(&*conn)?; - let owners = Team::owning(&krate, &conn)? - .into_iter() - .map(Owner::into) - .collect::>(); - - Ok(Json(json!({ "teams": owners }))) +pub async fn owner_team(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let crate_name = req.param("crate_id").unwrap(); + let conn = req.app().db_read()?; + let krate: Crate = Crate::by_name(crate_name).first(&*conn)?; + let owners = Team::owning(&krate, &conn)? + .into_iter() + .map(Owner::into) + .collect::>(); + + Ok(Json(json!({ "teams": owners }))) + }) + .await } /// Handles the `GET /crates/:crate_id/owner_user` route. -pub fn owner_user(req: ConduitRequest) -> AppResult> { - let crate_name = req.param("crate_id").unwrap(); - let conn = req.app().db_read()?; - let krate: Crate = Crate::by_name(crate_name).first(&*conn)?; - let owners = User::owning(&krate, &conn)? - .into_iter() - .map(Owner::into) - .collect::>(); - - Ok(Json(json!({ "users": owners }))) +pub async fn owner_user(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let crate_name = req.param("crate_id").unwrap(); + let conn = req.app().db_read()?; + let krate: Crate = Crate::by_name(crate_name).first(&*conn)?; + let owners = User::owning(&krate, &conn)? + .into_iter() + .map(Owner::into) + .collect::>(); + + Ok(Json(json!({ "users": owners }))) + }) + .await } /// Handles the `PUT /crates/:crate_id/owners` route. -pub fn add_owners(mut req: ConduitRequest) -> AppResult> { - modify_owners(&mut req, true) +pub async fn add_owners(mut req: ConduitRequest) -> AppResult> { + conduit_compat(move || modify_owners(&mut req, true)).await } /// Handles the `DELETE /crates/:crate_id/owners` route. -pub fn remove_owners(mut req: ConduitRequest) -> AppResult> { - modify_owners(&mut req, false) +pub async fn remove_owners(mut req: ConduitRequest) -> AppResult> { + conduit_compat(move || modify_owners(&mut req, false)).await } /// Parse the JSON request body of requests to modify the owners of a crate. diff --git a/src/controllers/krate/publish.rs b/src/controllers/krate/publish.rs index 9ee882ee2c5..06327394677 100644 --- a/src/controllers/krate/publish.rs +++ b/src/controllers/krate/publish.rs @@ -42,248 +42,251 @@ pub const WILDCARD_ERROR_MESSAGE: &str = "wildcard (`*`) dependency constraints /// Currently blocks the HTTP thread, perhaps some function calls can spawn new /// threads and return completion or error through other methods a `cargo publish /// --status` command, via crates.io's front end, or email. -pub fn publish(mut req: ConduitRequest) -> AppResult> { - let app = req.app().clone(); - - // The format of the req.body() of a publish request is as follows: - // - // metadata length - // metadata in JSON about the crate being published - // .crate tarball length - // .crate tarball file - // - // - The metadata is read and interpreted in the parse_new_headers function. - // - The .crate tarball length is read in this function in order to save the size of the file - // in the version record in the database. - // - Then the .crate tarball length is passed to the upload_crate function where the actual - // file is read and uploaded. - - let new_crate = parse_new_headers(&mut req)?; - - req.add_custom_metadata("crate_name", new_crate.name.to_string()); - req.add_custom_metadata("crate_version", new_crate.vers.to_string()); - - let conn = app.primary_database.get()?; - - // this query should only be used for the endpoint scope calculation - // since a race condition there would only cause `publish-new` instead of - // `publish-update` to be used. - let existing_crate = Crate::by_name(&new_crate.name) - .first::(&*conn) - .optional()?; - - let endpoint_scope = match existing_crate { - Some(_) => EndpointScope::PublishUpdate, - None => EndpointScope::PublishNew, - }; - - let auth = AuthCheck::default() - .with_endpoint_scope(endpoint_scope) - .for_crate(&new_crate.name) - .check(&req)?; - - let api_token_id = auth.api_token_id(); - let user = auth.user(); - - let verified_email_address = user.verified_email(&conn)?; - let verified_email_address = verified_email_address.ok_or_else(|| { - cargo_err(&format!( - "A verified email address is required to publish crates to crates.io. \ - Visit https://{}/me to set and verify your email address.", - app.config.domain_name, - )) - })?; - - // Create a transaction on the database, if there are no errors, - // commit the transactions to record a new or updated crate. - conn.transaction(|| { - let _ = &new_crate; - let name = new_crate.name; - let vers = &*new_crate.vers; - let links = new_crate.links; - let repo = new_crate.repository; - let features = new_crate - .features - .into_iter() - .map(|(k, v)| (k.0, v.into_iter().map(|v| v.0).collect())) - .collect(); - let keywords = new_crate - .keywords - .iter() - .map(|s| s.as_str()) - .collect::>(); - let categories = new_crate - .categories - .iter() - .map(|s| s.as_str()) - .collect::>(); - - // Persist the new crate, if it doesn't already exist - let persist = NewCrate { - name: &name, - description: new_crate.description.as_deref(), - homepage: new_crate.homepage.as_deref(), - documentation: new_crate.documentation.as_deref(), - readme: new_crate.readme.as_deref(), - repository: repo.as_deref(), - max_upload_size: None, +pub async fn publish(mut req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let app = req.app().clone(); + + // The format of the req.body() of a publish request is as follows: + // + // metadata length + // metadata in JSON about the crate being published + // .crate tarball length + // .crate tarball file + // + // - The metadata is read and interpreted in the parse_new_headers function. + // - The .crate tarball length is read in this function in order to save the size of the file + // in the version record in the database. + // - Then the .crate tarball length is passed to the upload_crate function where the actual + // file is read and uploaded. + + let new_crate = parse_new_headers(&mut req)?; + + req.add_custom_metadata("crate_name", new_crate.name.to_string()); + req.add_custom_metadata("crate_version", new_crate.vers.to_string()); + + let conn = app.primary_database.get()?; + + // this query should only be used for the endpoint scope calculation + // since a race condition there would only cause `publish-new` instead of + // `publish-update` to be used. + let existing_crate = Crate::by_name(&new_crate.name) + .first::(&*conn) + .optional()?; + + let endpoint_scope = match existing_crate { + Some(_) => EndpointScope::PublishUpdate, + None => EndpointScope::PublishNew, }; - let license_file = new_crate.license_file.as_deref(); - let krate = - persist.create_or_update(&conn, user.id, Some(&app.config.publish_rate_limit))?; + let auth = AuthCheck::default() + .with_endpoint_scope(endpoint_scope) + .for_crate(&new_crate.name) + .check(&req)?; - let owners = krate.owners(&conn)?; - if user.rights(&app, &owners)? < Rights::Publish { - return Err(cargo_err(MISSING_RIGHTS_ERROR_MESSAGE)); - } + let api_token_id = auth.api_token_id(); + let user = auth.user(); - if krate.name != *name { - return Err(cargo_err(&format_args!( - "crate was previously named `{}`", - krate.name - ))); - } + let verified_email_address = user.verified_email(&conn)?; + let verified_email_address = verified_email_address.ok_or_else(|| { + cargo_err(&format!( + "A verified email address is required to publish crates to crates.io. \ + Visit https://{}/me to set and verify your email address.", + app.config.domain_name, + )) + })?; - if let Some(daily_version_limit) = app.config.new_version_rate_limit { - let published_today = count_versions_published_today(krate.id, &conn)?; - if published_today >= daily_version_limit as i64 { - return Err(cargo_err( - "You have published too many versions of this crate in the last 24 hours", - )); - } - } + // Create a transaction on the database, if there are no errors, + // commit the transactions to record a new or updated crate. + conn.transaction(|| { + let _ = &new_crate; + let name = new_crate.name; + let vers = &*new_crate.vers; + let links = new_crate.links; + let repo = new_crate.repository; + let features = new_crate + .features + .into_iter() + .map(|(k, v)| (k.0, v.into_iter().map(|v| v.0).collect())) + .collect(); + let keywords = new_crate + .keywords + .iter() + .map(|s| s.as_str()) + .collect::>(); + let categories = new_crate + .categories + .iter() + .map(|s| s.as_str()) + .collect::>(); + + // Persist the new crate, if it doesn't already exist + let persist = NewCrate { + name: &name, + description: new_crate.description.as_deref(), + homepage: new_crate.homepage.as_deref(), + documentation: new_crate.documentation.as_deref(), + readme: new_crate.readme.as_deref(), + repository: repo.as_deref(), + max_upload_size: None, + }; - // Length of the .crate tarball, which appears after the metadata in the request body. - // TODO: Not sure why we're using the total content length (metadata + .crate file length) - // to compare against the max upload size... investigate that and perhaps change to use - // this file length. - let file_length = read_le_u32(req.body_mut())?; + let license_file = new_crate.license_file.as_deref(); + let krate = + persist.create_or_update(&conn, user.id, Some(&app.config.publish_rate_limit))?; - let content_length = req - .content_length() - .ok_or_else(|| cargo_err("missing header: Content-Length"))?; + let owners = krate.owners(&conn)?; + if user.rights(&app, &owners)? < Rights::Publish { + return Err(cargo_err(MISSING_RIGHTS_ERROR_MESSAGE)); + } - let maximums = Maximums::new( - krate.max_upload_size, - app.config.max_upload_size, - app.config.max_unpack_size, - ); + if krate.name != *name { + return Err(cargo_err(&format_args!( + "crate was previously named `{}`", + krate.name + ))); + } - if content_length > maximums.max_upload_size { - return Err(cargo_err(&format_args!( - "max upload size is: {}", - maximums.max_upload_size - ))); - } + if let Some(daily_version_limit) = app.config.new_version_rate_limit { + let published_today = count_versions_published_today(krate.id, &conn)?; + if published_today >= daily_version_limit as i64 { + return Err(cargo_err( + "You have published too many versions of this crate in the last 24 hours", + )); + } + } + + // Length of the .crate tarball, which appears after the metadata in the request body. + // TODO: Not sure why we're using the total content length (metadata + .crate file length) + // to compare against the max upload size... investigate that and perhaps change to use + // this file length. + let file_length = read_le_u32(req.body_mut())?; + + let content_length = req + .content_length() + .ok_or_else(|| cargo_err("missing header: Content-Length"))?; + + let maximums = Maximums::new( + krate.max_upload_size, + app.config.max_upload_size, + app.config.max_unpack_size, + ); + + if content_length > maximums.max_upload_size { + return Err(cargo_err(&format_args!( + "max upload size is: {}", + maximums.max_upload_size + ))); + } - // This is only redundant for now. Eventually the duplication will be removed. - let license = new_crate.license.clone(); - - // Read tarball from request - let mut tarball = Vec::new(); - LimitErrorReader::new(req.body_mut(), maximums.max_upload_size) - .read_to_end(&mut tarball)?; - let hex_cksum: String = Sha256::digest(&tarball).encode_hex(); - - // Persist the new version of this crate - let version = NewVersion::new( - krate.id, - vers, - &features, - license, - license_file, - // Downcast is okay because the file length must be less than the max upload size - // to get here, and max upload sizes are way less than i32 max - file_length as i32, - user.id, - hex_cksum.clone(), - links.clone(), - )? - .save(&conn, &verified_email_address)?; - - insert_version_owner_action( - &conn, - version.id, - user.id, - api_token_id, - VersionAction::Publish, - )?; - - // Link this new version to all dependencies - let git_deps = add_dependencies(&conn, &new_crate.deps, version.id)?; - - // Update all keywords for this crate - Keyword::update_crate(&conn, &krate, &keywords)?; - - // Update all categories for this crate, collecting any invalid categories - // in order to be able to warn about them - let ignored_invalid_categories = Category::update_crate(&conn, &krate, &categories)?; - - let top_versions = krate.top_versions(&conn)?; - - let pkg_name = format!("{}-{}", krate.name, vers); - let cargo_vcs_info = verify_tarball(&pkg_name, &tarball, maximums.max_unpack_size)?; - let pkg_path_in_vcs = cargo_vcs_info.map(|info| info.path_in_vcs); - - if let Some(readme) = new_crate.readme { - worker::render_and_upload_readme( + // This is only redundant for now. Eventually the duplication will be removed. + let license = new_crate.license.clone(); + + // Read tarball from request + let mut tarball = Vec::new(); + LimitErrorReader::new(req.body_mut(), maximums.max_upload_size) + .read_to_end(&mut tarball)?; + let hex_cksum: String = Sha256::digest(&tarball).encode_hex(); + + // Persist the new version of this crate + let version = NewVersion::new( + krate.id, + vers, + &features, + license, + license_file, + // Downcast is okay because the file length must be less than the max upload size + // to get here, and max upload sizes are way less than i32 max + file_length as i32, + user.id, + hex_cksum.clone(), + links.clone(), + )? + .save(&conn, &verified_email_address)?; + + insert_version_owner_action( + &conn, version.id, - readme, - new_crate - .readme_file - .unwrap_or_else(|| String::from("README.md")), - repo, - pkg_path_in_vcs, - ) - .enqueue(&conn)?; - } + user.id, + api_token_id, + VersionAction::Publish, + )?; + + // Link this new version to all dependencies + let git_deps = add_dependencies(&conn, &new_crate.deps, version.id)?; + + // Update all keywords for this crate + Keyword::update_crate(&conn, &krate, &keywords)?; + + // Update all categories for this crate, collecting any invalid categories + // in order to be able to warn about them + let ignored_invalid_categories = Category::update_crate(&conn, &krate, &categories)?; + + let top_versions = krate.top_versions(&conn)?; + + let pkg_name = format!("{}-{}", krate.name, vers); + let cargo_vcs_info = verify_tarball(&pkg_name, &tarball, maximums.max_unpack_size)?; + let pkg_path_in_vcs = cargo_vcs_info.map(|info| info.path_in_vcs); + + if let Some(readme) = new_crate.readme { + worker::render_and_upload_readme( + version.id, + readme, + new_crate + .readme_file + .unwrap_or_else(|| String::from("README.md")), + repo, + pkg_path_in_vcs, + ) + .enqueue(&conn)?; + } - // Upload crate tarball - app.config - .uploader() - .upload_crate(app.http_client(), tarball, &krate, vers)?; - - let (features, features2): (BTreeMap<_, _>, BTreeMap<_, _>) = - features.into_iter().partition(|(_k, vals)| { - !vals - .iter() - .any(|v| v.starts_with("dep:") || v.contains("?/")) - }); - let (features2, v) = if features2.is_empty() { - (None, None) - } else { - (Some(features2), Some(2)) - }; + // Upload crate tarball + app.config + .uploader() + .upload_crate(app.http_client(), tarball, &krate, vers)?; + + let (features, features2): (BTreeMap<_, _>, BTreeMap<_, _>) = + features.into_iter().partition(|(_k, vals)| { + !vals + .iter() + .any(|v| v.starts_with("dep:") || v.contains("?/")) + }); + let (features2, v) = if features2.is_empty() { + (None, None) + } else { + (Some(features2), Some(2)) + }; - // Register this crate in our local git repo. - let git_crate = cargo_registry_index::Crate { - name: name.0, - vers: vers.to_string(), - cksum: hex_cksum, - features, - features2, - deps: git_deps, - yanked: Some(false), - links, - v, - }; - worker::add_crate(git_crate).enqueue(&conn)?; - - // The `other` field on `PublishWarnings` was introduced to handle a temporary warning - // that is no longer needed. As such, crates.io currently does not return any `other` - // warnings at this time, but if we need to, the field is available. - let warnings = PublishWarnings { - invalid_categories: ignored_invalid_categories, - invalid_badges: vec![], - other: vec![], - }; + // Register this crate in our local git repo. + let git_crate = cargo_registry_index::Crate { + name: name.0, + vers: vers.to_string(), + cksum: hex_cksum, + features, + features2, + deps: git_deps, + yanked: Some(false), + links, + v, + }; + worker::add_crate(git_crate).enqueue(&conn)?; + + // The `other` field on `PublishWarnings` was introduced to handle a temporary warning + // that is no longer needed. As such, crates.io currently does not return any `other` + // warnings at this time, but if we need to, the field is available. + let warnings = PublishWarnings { + invalid_categories: ignored_invalid_categories, + invalid_badges: vec![], + other: vec![], + }; - Ok(Json(GoodCrate { - krate: EncodableCrate::from_minimal(krate, Some(&top_versions), None, false, None), - warnings, - })) + Ok(Json(GoodCrate { + krate: EncodableCrate::from_minimal(krate, Some(&top_versions), None, false, None), + warnings, + })) + }) }) + .await } /// Counts the number of versions for `krate_id` that were published within diff --git a/src/controllers/krate/search.rs b/src/controllers/krate/search.rs index f3fd2624a42..4fc90779960 100644 --- a/src/controllers/krate/search.rs +++ b/src/controllers/krate/search.rs @@ -38,312 +38,315 @@ use crate::sql::{array_agg, canon_crate_name, lower}; /// caused the break. In the future, we should look at splitting this /// function out to cover the different use cases, and create unit tests /// for them. -pub fn search(req: ConduitRequest) -> AppResult> { - use diesel::sql_types::{Bool, Text}; - - let params = req.query(); - let sort = params.get("sort").map(|s| &**s); - let include_yanked = params - .get("include_yanked") - .map(|s| s == "yes") - .unwrap_or(true); - - // Remove 0x00 characters from the query string because Postgres can not - // handle them and will return an error, which would cause us to throw - // an Internal Server Error ourselves. - let q_string = params.get("q").map(|q| q.replace('\u{0}', "")); - - let selection = ( - ALL_COLUMNS, - false.into_sql::(), - recent_crate_downloads::downloads.nullable(), - ); - let mut query = crates::table - .left_join(recent_crate_downloads::table) - .select(selection) - .into_boxed(); - - let mut supports_seek = true; - - if let Some(q_string) = &q_string { - // Searching with a query string always puts the exact match at the start of the results, - // so we can't support seek-based pagination with it. - supports_seek = false; - - if !q_string.is_empty() { - let sort = params.get("sort").map(|s| &**s).unwrap_or("relevance"); - - let q = sql::("plainto_tsquery('english', ") - .bind::(q_string) - .sql(")"); +pub async fn search(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + use diesel::sql_types::{Bool, Text}; + + let params = req.query(); + let sort = params.get("sort").map(|s| &**s); + let include_yanked = params + .get("include_yanked") + .map(|s| s == "yes") + .unwrap_or(true); + + // Remove 0x00 characters from the query string because Postgres can not + // handle them and will return an error, which would cause us to throw + // an Internal Server Error ourselves. + let q_string = params.get("q").map(|q| q.replace('\u{0}', "")); + + let selection = ( + ALL_COLUMNS, + false.into_sql::(), + recent_crate_downloads::downloads.nullable(), + ); + let mut query = crates::table + .left_join(recent_crate_downloads::table) + .select(selection) + .into_boxed(); + + let mut supports_seek = true; + + if let Some(q_string) = &q_string { + // Searching with a query string always puts the exact match at the start of the results, + // so we can't support seek-based pagination with it. + supports_seek = false; + + if !q_string.is_empty() { + let sort = params.get("sort").map(|s| &**s).unwrap_or("relevance"); + + let q = sql::("plainto_tsquery('english', ") + .bind::(q_string) + .sql(")"); + query = query.filter( + q.clone() + .matches(crates::textsearchable_index_col) + .or(Crate::loosly_matches_name(q_string)), + ); + + query = query.select(( + ALL_COLUMNS, + Crate::with_name(q_string), + recent_crate_downloads::downloads.nullable(), + )); + query = query.order(Crate::with_name(q_string).desc()); + + if sort == "relevance" { + let rank = ts_rank_cd(crates::textsearchable_index_col, q); + query = query.then_order_by(rank.desc()) + } + } + } + + if let Some(cat) = params.get("category") { + // Calculating the total number of results with filters is not supported yet. + supports_seek = false; + query = query.filter( - q.clone() - .matches(crates::textsearchable_index_col) - .or(Crate::loosly_matches_name(q_string)), + crates::id.eq_any( + crates_categories::table + .select(crates_categories::crate_id) + .inner_join(categories::table) + .filter( + categories::slug + .eq(cat) + .or(categories::slug.like(format!("{cat}::%"))), + ), + ), ); + } - query = query.select(( - ALL_COLUMNS, - Crate::with_name(q_string), - recent_crate_downloads::downloads.nullable(), - )); - query = query.order(Crate::with_name(q_string).desc()); + if let Some(kws) = params.get("all_keywords") { + // Calculating the total number of results with filters is not supported yet. + supports_seek = false; - if sort == "relevance" { - let rank = ts_rank_cd(crates::textsearchable_index_col, q); - query = query.then_order_by(rank.desc()) - } + let names: Vec<_> = kws + .split_whitespace() + .map(|name| name.to_lowercase()) + .collect(); + + query = query.filter( + // FIXME: Just use `.contains` in Diesel 2.0 + // https://github.com/diesel-rs/diesel/issues/2066 + Contains::new( + crates_keywords::table + .inner_join(keywords::table) + .filter(crates_keywords::crate_id.eq(crates::id)) + .select(array_agg(keywords::keyword)) + .single_value(), + names.into_sql::>(), + ), + ); + } else if let Some(kw) = params.get("keyword") { + // Calculating the total number of results with filters is not supported yet. + supports_seek = false; + + query = query.filter( + crates::id.eq_any( + crates_keywords::table + .select(crates_keywords::crate_id) + .inner_join(keywords::table) + .filter(lower(keywords::keyword).eq(lower(kw))), + ), + ); + } else if let Some(letter) = params.get("letter") { + // Calculating the total number of results with filters is not supported yet. + supports_seek = false; + + let pattern = format!( + "{}%", + letter + .chars() + .next() + .ok_or_else(|| bad_request("letter value must contain 1 character"))? + .to_lowercase() + .collect::() + ); + query = query.filter(canon_crate_name(crates::name).like(pattern)); + } else if let Some(user_id) = params.get("user_id").and_then(|s| s.parse::().ok()) { + // Calculating the total number of results with filters is not supported yet. + supports_seek = false; + + query = query.filter( + crates::id.eq_any( + CrateOwner::by_owner_kind(OwnerKind::User) + .select(crate_owners::crate_id) + .filter(crate_owners::owner_id.eq(user_id)), + ), + ); + } else if let Some(team_id) = params.get("team_id").and_then(|s| s.parse::().ok()) { + // Calculating the total number of results with filters is not supported yet. + supports_seek = false; + + query = query.filter( + crates::id.eq_any( + CrateOwner::by_owner_kind(OwnerKind::Team) + .select(crate_owners::crate_id) + .filter(crate_owners::owner_id.eq(team_id)), + ), + ); + } else if params.get("following").is_some() { + // Calculating the total number of results with filters is not supported yet. + supports_seek = false; + + let user_id = AuthCheck::default().check(&req)?.user_id(); + + query = query.filter( + crates::id.eq_any( + follows::table + .select(follows::crate_id) + .filter(follows::user_id.eq(user_id)), + ), + ); + } else if params.get("ids[]").is_some() { + // Calculating the total number of results with filters is not supported yet. + supports_seek = false; + + let query_bytes = req.uri().query().unwrap_or("").as_bytes(); + let ids: Vec<_> = url::form_urlencoded::parse(query_bytes) + .filter(|(key, _)| key == "ids[]") + .map(|(_, value)| value.to_string()) + .collect(); + + query = query.filter(crates::name.eq(any(ids))); } - } - - if let Some(cat) = params.get("category") { - // Calculating the total number of results with filters is not supported yet. - supports_seek = false; - - query = query.filter( - crates::id.eq_any( - crates_categories::table - .select(crates_categories::crate_id) - .inner_join(categories::table) - .filter( - categories::slug - .eq(cat) - .or(categories::slug.like(format!("{cat}::%"))), - ), - ), - ); - } - - if let Some(kws) = params.get("all_keywords") { - // Calculating the total number of results with filters is not supported yet. - supports_seek = false; - - let names: Vec<_> = kws - .split_whitespace() - .map(|name| name.to_lowercase()) - .collect(); - - query = query.filter( - // FIXME: Just use `.contains` in Diesel 2.0 - // https://github.com/diesel-rs/diesel/issues/2066 - Contains::new( - crates_keywords::table - .inner_join(keywords::table) - .filter(crates_keywords::crate_id.eq(crates::id)) - .select(array_agg(keywords::keyword)) - .single_value(), - names.into_sql::>(), - ), - ); - } else if let Some(kw) = params.get("keyword") { - // Calculating the total number of results with filters is not supported yet. - supports_seek = false; - - query = query.filter( - crates::id.eq_any( - crates_keywords::table - .select(crates_keywords::crate_id) - .inner_join(keywords::table) - .filter(lower(keywords::keyword).eq(lower(kw))), - ), - ); - } else if let Some(letter) = params.get("letter") { - // Calculating the total number of results with filters is not supported yet. - supports_seek = false; - - let pattern = format!( - "{}%", - letter - .chars() - .next() - .ok_or_else(|| bad_request("letter value must contain 1 character"))? - .to_lowercase() - .collect::() - ); - query = query.filter(canon_crate_name(crates::name).like(pattern)); - } else if let Some(user_id) = params.get("user_id").and_then(|s| s.parse::().ok()) { - // Calculating the total number of results with filters is not supported yet. - supports_seek = false; - - query = query.filter( - crates::id.eq_any( - CrateOwner::by_owner_kind(OwnerKind::User) - .select(crate_owners::crate_id) - .filter(crate_owners::owner_id.eq(user_id)), - ), - ); - } else if let Some(team_id) = params.get("team_id").and_then(|s| s.parse::().ok()) { - // Calculating the total number of results with filters is not supported yet. - supports_seek = false; - - query = query.filter( - crates::id.eq_any( - CrateOwner::by_owner_kind(OwnerKind::Team) - .select(crate_owners::crate_id) - .filter(crate_owners::owner_id.eq(team_id)), - ), - ); - } else if params.get("following").is_some() { - // Calculating the total number of results with filters is not supported yet. - supports_seek = false; - - let user_id = AuthCheck::default().check(&req)?.user_id(); - - query = query.filter( - crates::id.eq_any( - follows::table - .select(follows::crate_id) - .filter(follows::user_id.eq(user_id)), - ), - ); - } else if params.get("ids[]").is_some() { - // Calculating the total number of results with filters is not supported yet. - supports_seek = false; - - let query_bytes = req.uri().query().unwrap_or("").as_bytes(); - let ids: Vec<_> = url::form_urlencoded::parse(query_bytes) - .filter(|(key, _)| key == "ids[]") - .map(|(_, value)| value.to_string()) - .collect(); - - query = query.filter(crates::name.eq(any(ids))); - } - - if !include_yanked { - // Calculating the total number of results with filters is not supported yet. - supports_seek = false; - - query = query.filter(exists( - versions::table - .filter(versions::crate_id.eq(crates::id)) - .filter(versions::yanked.eq(false)), - )); - } - - // Any sort other than 'relevance' (default) would ignore exact crate name matches - if sort == Some("downloads") { - // Custom sorting is not supported yet with seek. - supports_seek = false; - - query = query.order(crates::downloads.desc()) - } else if sort == Some("recent-downloads") { - // Custom sorting is not supported yet with seek. - supports_seek = false; - - query = query.order(recent_crate_downloads::downloads.desc().nulls_last()) - } else if sort == Some("recent-updates") { - // Custom sorting is not supported yet with seek. - supports_seek = false; - - query = query.order(crates::updated_at.desc()); - } else if sort == Some("new") { - // Custom sorting is not supported yet with seek. - supports_seek = false; - - query = query.order(crates::created_at.desc()); - } else { - query = query.then_order_by(crates::name.asc()) - } - - let pagination: PaginationOptions = PaginationOptions::builder() - .limit_page_numbers() - .enable_seek(supports_seek) - .gather(&req)?; - let conn = req.app().db_read()?; - - let (explicit_page, seek) = match pagination.page.clone() { - Page::Numeric(_) => (true, None), - Page::Seek(s) => (false, Some(s.decode::()?)), - Page::Unspecified => (false, None), - }; - - // To avoid breaking existing users, seek-based pagination is only used if an explicit page has - // not been provided. This way clients relying on meta.next_page will use the faster seek-based - // paginations, while client hardcoding pages handling will use the slower offset-based code. - let (total, next_page, prev_page, data, conn) = if supports_seek && !explicit_page { - // Equivalent of: - // `WHERE name > (SELECT name FROM crates WHERE id = $1) LIMIT $2` - query = query.limit(pagination.per_page as i64); - if let Some(seek) = seek { - let crate_name: String = crates::table - .find(seek) - .select(crates::name) - .get_result(&*conn)?; - query = query.filter(crates::name.gt(crate_name)); + + if !include_yanked { + // Calculating the total number of results with filters is not supported yet. + supports_seek = false; + + query = query.filter(exists( + versions::table + .filter(versions::crate_id.eq(crates::id)) + .filter(versions::yanked.eq(false)), + )); } - // This does a full index-only scan over the crates table to gather how many crates were - // published. Unfortunately on PostgreSQL counting the rows in a table requires scanning - // the table, and the `total` field is part of the stable registries API. - // - // If this becomes a problem in the future the crates count could be denormalized, at least - // for the filterless happy path. - let total: i64 = crates::table.count().get_result(&*conn)?; - - let results: Vec<(Crate, bool, Option)> = query.load(&*conn)?; - - let next_page = if let Some(last) = results.last() { - let mut params = IndexMap::new(); - params.insert( - "seek".into(), - crate::controllers::helpers::pagination::encode_seek(last.0.id)?, - ); - Some(req.query_with_params(params)) + // Any sort other than 'relevance' (default) would ignore exact crate name matches + if sort == Some("downloads") { + // Custom sorting is not supported yet with seek. + supports_seek = false; + + query = query.order(crates::downloads.desc()) + } else if sort == Some("recent-downloads") { + // Custom sorting is not supported yet with seek. + supports_seek = false; + + query = query.order(recent_crate_downloads::downloads.desc().nulls_last()) + } else if sort == Some("recent-updates") { + // Custom sorting is not supported yet with seek. + supports_seek = false; + + query = query.order(crates::updated_at.desc()); + } else if sort == Some("new") { + // Custom sorting is not supported yet with seek. + supports_seek = false; + + query = query.order(crates::created_at.desc()); + } else { + query = query.then_order_by(crates::name.asc()) + } + + let pagination: PaginationOptions = PaginationOptions::builder() + .limit_page_numbers() + .enable_seek(supports_seek) + .gather(&req)?; + let conn = req.app().db_read()?; + + let (explicit_page, seek) = match pagination.page.clone() { + Page::Numeric(_) => (true, None), + Page::Seek(s) => (false, Some(s.decode::()?)), + Page::Unspecified => (false, None), + }; + + // To avoid breaking existing users, seek-based pagination is only used if an explicit page has + // not been provided. This way clients relying on meta.next_page will use the faster seek-based + // paginations, while client hardcoding pages handling will use the slower offset-based code. + let (total, next_page, prev_page, data, conn) = if supports_seek && !explicit_page { + // Equivalent of: + // `WHERE name > (SELECT name FROM crates WHERE id = $1) LIMIT $2` + query = query.limit(pagination.per_page as i64); + if let Some(seek) = seek { + let crate_name: String = crates::table + .find(seek) + .select(crates::name) + .get_result(&*conn)?; + query = query.filter(crates::name.gt(crate_name)); + } + + // This does a full index-only scan over the crates table to gather how many crates were + // published. Unfortunately on PostgreSQL counting the rows in a table requires scanning + // the table, and the `total` field is part of the stable registries API. + // + // If this becomes a problem in the future the crates count could be denormalized, at least + // for the filterless happy path. + let total: i64 = crates::table.count().get_result(&*conn)?; + + let results: Vec<(Crate, bool, Option)> = query.load(&*conn)?; + + let next_page = if let Some(last) = results.last() { + let mut params = IndexMap::new(); + params.insert( + "seek".into(), + crate::controllers::helpers::pagination::encode_seek(last.0.id)?, + ); + Some(req.query_with_params(params)) + } else { + None + }; + + (total, next_page, None, results, conn) } else { - None + let query = query.pages_pagination(pagination); + let data: Paginated<(Crate, bool, Option)> = query.load(&conn)?; + ( + data.total(), + data.next_page_params().map(|p| req.query_with_params(p)), + data.prev_page_params().map(|p| req.query_with_params(p)), + data.into_iter().collect::>(), + conn, + ) }; - (total, next_page, None, results, conn) - } else { - let query = query.pages_pagination(pagination); - let data: Paginated<(Crate, bool, Option)> = query.load(&conn)?; - ( - data.total(), - data.next_page_params().map(|p| req.query_with_params(p)), - data.prev_page_params().map(|p| req.query_with_params(p)), - data.into_iter().collect::>(), - conn, - ) - }; - - let perfect_matches = data.iter().map(|&(_, b, _)| b).collect::>(); - let recent_downloads = data - .iter() - .map(|&(_, _, s)| s.unwrap_or(0)) - .collect::>(); - let crates = data.into_iter().map(|(c, _, _)| c).collect::>(); - - let versions: Vec = crates.versions().load(&*conn)?; - let versions = versions - .grouped_by(&crates) - .into_iter() - .map(TopVersions::from_versions); - - let crates = versions - .zip(crates) - .zip(perfect_matches) - .zip(recent_downloads) - .map( - |(((max_version, krate), perfect_match), recent_downloads)| { - EncodableCrate::from_minimal( - krate, - Some(&max_version), - Some(vec![]), - perfect_match, - Some(recent_downloads), - ) + let perfect_matches = data.iter().map(|&(_, b, _)| b).collect::>(); + let recent_downloads = data + .iter() + .map(|&(_, _, s)| s.unwrap_or(0)) + .collect::>(); + let crates = data.into_iter().map(|(c, _, _)| c).collect::>(); + + let versions: Vec = crates.versions().load(&*conn)?; + let versions = versions + .grouped_by(&crates) + .into_iter() + .map(TopVersions::from_versions); + + let crates = versions + .zip(crates) + .zip(perfect_matches) + .zip(recent_downloads) + .map( + |(((max_version, krate), perfect_match), recent_downloads)| { + EncodableCrate::from_minimal( + krate, + Some(&max_version), + Some(vec![]), + perfect_match, + Some(recent_downloads), + ) + }, + ) + .collect::>(); + + Ok(Json(json!({ + "crates": crates, + "meta": { + "total": total, + "next_page": next_page, + "prev_page": prev_page, }, - ) - .collect::>(); - - Ok(Json(json!({ - "crates": crates, - "meta": { - "total": total, - "next_page": next_page, - "prev_page": prev_page, - }, - }))) + }))) + }) + .await } diesel_infix_operator!(Contains, "@>"); diff --git a/src/controllers/metrics.rs b/src/controllers/metrics.rs index fede67c00f0..68d88945ee6 100644 --- a/src/controllers/metrics.rs +++ b/src/controllers/metrics.rs @@ -4,37 +4,40 @@ use axum::response::IntoResponse; use prometheus::{Encoder, TextEncoder}; /// Handles the `GET /api/private/metrics/:kind` endpoint. -pub fn prometheus(req: ConduitRequest) -> AppResult { - let app = req.app(); +pub async fn prometheus(req: ConduitRequest) -> AppResult { + conduit_compat(move || { + let app = req.app(); - if let Some(expected_token) = &app.config.metrics_authorization_token { - let provided_token = req - .headers() - .get(header::AUTHORIZATION) - .and_then(|value| value.to_str().ok()) - .and_then(|value| value.strip_prefix("Bearer ")); + if let Some(expected_token) = &app.config.metrics_authorization_token { + let provided_token = req + .headers() + .get(header::AUTHORIZATION) + .and_then(|value| value.to_str().ok()) + .and_then(|value| value.strip_prefix("Bearer ")); - if provided_token != Some(expected_token.as_str()) { - return Err(forbidden()); + if provided_token != Some(expected_token.as_str()) { + return Err(forbidden()); + } + } else { + // To avoid accidentally leaking metrics if the environment variable is not set, prevent + // access to any metrics endpoint if the authorization token is not configured. + return Err(Box::new(MetricsDisabled)); } - } else { - // To avoid accidentally leaking metrics if the environment variable is not set, prevent - // access to any metrics endpoint if the authorization token is not configured. - return Err(Box::new(MetricsDisabled)); - } - let metrics = match req.param("kind").unwrap() { - "service" => app.service_metrics.gather(&*app.db_read()?)?, - "instance" => app.instance_metrics.gather(app)?, - _ => return Err(not_found()), - }; + let metrics = match req.param("kind").unwrap() { + "service" => app.service_metrics.gather(&*app.db_read()?)?, + "instance" => app.instance_metrics.gather(app)?, + _ => return Err(not_found()), + }; - let mut output = Vec::new(); - TextEncoder::new().encode(&metrics, &mut output)?; + let mut output = Vec::new(); + TextEncoder::new().encode(&metrics, &mut output)?; - Ok(( - [(header::CONTENT_TYPE, "text/plain; charset=utf-8")], - output, - ) - .into_response()) + Ok(( + [(header::CONTENT_TYPE, "text/plain; charset=utf-8")], + output, + ) + .into_response()) + }) + .await } diff --git a/src/controllers/team.rs b/src/controllers/team.rs index 4607e97a1d6..8912cfa5156 100644 --- a/src/controllers/team.rs +++ b/src/controllers/team.rs @@ -5,12 +5,15 @@ use crate::schema::teams; use crate::views::EncodableTeam; /// Handles the `GET /teams/:team_id` route. -pub fn show_team(req: ConduitRequest) -> AppResult> { - use self::teams::dsl::{login, teams}; +pub async fn show_team(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + use self::teams::dsl::{login, teams}; - let name = req.param("team_id").unwrap(); - let conn = req.app().db_read()?; - let team: Team = teams.filter(login.eq(name)).first(&*conn)?; + let name = req.param("team_id").unwrap(); + let conn = req.app().db_read()?; + let team: Team = teams.filter(login.eq(name)).first(&*conn)?; - Ok(Json(json!({ "team": EncodableTeam::from(team) }))) + Ok(Json(json!({ "team": EncodableTeam::from(team) }))) + }) + .await } diff --git a/src/controllers/token.rs b/src/controllers/token.rs index 85c1da4c9b2..e595bad80e2 100644 --- a/src/controllers/token.rs +++ b/src/controllers/token.rs @@ -9,103 +9,115 @@ use axum::response::IntoResponse; use serde_json as json; /// Handles the `GET /me/tokens` route. -pub fn list(req: ConduitRequest) -> AppResult> { - let auth = AuthCheck::only_cookie().check(&req)?; - let conn = req.app().db_read_prefer_primary()?; - let user = auth.user(); - - let tokens: Vec = ApiToken::belonging_to(&user) - .filter(api_tokens::revoked.eq(false)) - .order(api_tokens::created_at.desc()) - .load(&*conn)?; - - Ok(Json(json!({ "api_tokens": tokens }))) +pub async fn list(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let auth = AuthCheck::only_cookie().check(&req)?; + let conn = req.app().db_read_prefer_primary()?; + let user = auth.user(); + + let tokens: Vec = ApiToken::belonging_to(&user) + .filter(api_tokens::revoked.eq(false)) + .order(api_tokens::created_at.desc()) + .load(&*conn)?; + + Ok(Json(json!({ "api_tokens": tokens }))) + }) + .await } /// Handles the `PUT /me/tokens` route. -pub fn new(mut req: ConduitRequest) -> AppResult> { - /// The incoming serialization format for the `ApiToken` model. - #[derive(Deserialize, Serialize)] - struct NewApiToken { - name: String, - } - - /// The incoming serialization format for the `ApiToken` model. - #[derive(Deserialize, Serialize)] - struct NewApiTokenRequest { - api_token: NewApiToken, - } - - let max_size = 2000; - let length = req - .content_length() - .ok_or_else(|| bad_request("missing header: Content-Length"))?; - - if length > max_size { - return Err(bad_request(&format!("max content length is: {max_size}"))); - } - - let new: NewApiTokenRequest = json::from_reader(req.body_mut()) - .map_err(|e| bad_request(&format!("invalid new token request: {e:?}")))?; - - let name = &new.api_token.name; - if name.is_empty() { - return Err(bad_request("name must have a value")); - } - - let auth = AuthCheck::default().check(&req)?; - if auth.api_token_id().is_some() { - return Err(bad_request( - "cannot use an API token to create a new API token", - )); - } - - let conn = req.app().db_write()?; - let user = auth.user(); - - let max_token_per_user = 500; - let count: i64 = ApiToken::belonging_to(&user).count().get_result(&*conn)?; - if count >= max_token_per_user { - return Err(bad_request(&format!( - "maximum tokens per user is: {max_token_per_user}" - ))); - } - - let api_token = ApiToken::insert(&conn, user.id, name)?; - let api_token = EncodableApiTokenWithToken::from(api_token); - - Ok(Json(json!({ "api_token": api_token }))) +pub async fn new(mut req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + /// The incoming serialization format for the `ApiToken` model. + #[derive(Deserialize, Serialize)] + struct NewApiToken { + name: String, + } + + /// The incoming serialization format for the `ApiToken` model. + #[derive(Deserialize, Serialize)] + struct NewApiTokenRequest { + api_token: NewApiToken, + } + + let max_size = 2000; + let length = req + .content_length() + .ok_or_else(|| bad_request("missing header: Content-Length"))?; + + if length > max_size { + return Err(bad_request(&format!("max content length is: {max_size}"))); + } + + let new: NewApiTokenRequest = json::from_reader(req.body_mut()) + .map_err(|e| bad_request(&format!("invalid new token request: {e:?}")))?; + + let name = &new.api_token.name; + if name.is_empty() { + return Err(bad_request("name must have a value")); + } + + let auth = AuthCheck::default().check(&req)?; + if auth.api_token_id().is_some() { + return Err(bad_request( + "cannot use an API token to create a new API token", + )); + } + + let conn = req.app().db_write()?; + let user = auth.user(); + + let max_token_per_user = 500; + let count: i64 = ApiToken::belonging_to(&user).count().get_result(&*conn)?; + if count >= max_token_per_user { + return Err(bad_request(&format!( + "maximum tokens per user is: {max_token_per_user}" + ))); + } + + let api_token = ApiToken::insert(&conn, user.id, name)?; + let api_token = EncodableApiTokenWithToken::from(api_token); + + Ok(Json(json!({ "api_token": api_token }))) + }) + .await } /// Handles the `DELETE /me/tokens/:id` route. -pub fn revoke(req: ConduitRequest) -> AppResult> { - let id = req - .param("id") - .unwrap() - .parse::() - .map_err(|e| bad_request(&format!("invalid token id: {e:?}")))?; - - let auth = AuthCheck::default().check(&req)?; - let conn = req.app().db_write()?; - let user = auth.user(); - diesel::update(ApiToken::belonging_to(&user).find(id)) - .set(api_tokens::revoked.eq(true)) - .execute(&*conn)?; - - Ok(Json(json!({}))) +pub async fn revoke(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let id = req + .param("id") + .unwrap() + .parse::() + .map_err(|e| bad_request(&format!("invalid token id: {e:?}")))?; + + let auth = AuthCheck::default().check(&req)?; + let conn = req.app().db_write()?; + let user = auth.user(); + diesel::update(ApiToken::belonging_to(&user).find(id)) + .set(api_tokens::revoked.eq(true)) + .execute(&*conn)?; + + Ok(Json(json!({}))) + }) + .await } /// Handles the `DELETE /tokens/current` route. -pub fn revoke_current(req: ConduitRequest) -> AppResult { - let auth = AuthCheck::default().check(&req)?; - let api_token_id = auth - .api_token_id() - .ok_or_else(|| bad_request("token not provided"))?; - - let conn = req.app().db_write()?; - diesel::update(api_tokens::table.filter(api_tokens::id.eq(api_token_id))) - .set(api_tokens::revoked.eq(true)) - .execute(&*conn)?; - - Ok(StatusCode::NO_CONTENT.into_response()) +pub async fn revoke_current(req: ConduitRequest) -> AppResult { + conduit_compat(move || { + let auth = AuthCheck::default().check(&req)?; + let api_token_id = auth + .api_token_id() + .ok_or_else(|| bad_request("token not provided"))?; + + let conn = req.app().db_write()?; + diesel::update(api_tokens::table.filter(api_tokens::id.eq(api_token_id))) + .set(api_tokens::revoked.eq(true)) + .execute(&*conn)?; + + Ok(StatusCode::NO_CONTENT.into_response()) + }) + .await } diff --git a/src/controllers/user/me.rs b/src/controllers/user/me.rs index 1993e8f42bf..7bb14968c3c 100644 --- a/src/controllers/user/me.rs +++ b/src/controllers/user/me.rs @@ -13,256 +13,274 @@ use crate::schema::{crate_owners, crates, emails, follows, users, versions}; use crate::views::{EncodableMe, EncodablePrivateUser, EncodableVersion, OwnedCrate}; /// Handles the `GET /me` route. -pub fn me(req: ConduitRequest) -> AppResult> { - let user_id = AuthCheck::only_cookie().check(&req)?.user_id(); - let conn = req.app().db_read_prefer_primary()?; - - let (user, verified, email, verification_sent): (User, Option, Option, bool) = - users::table - .find(user_id) - .left_join(emails::table) - .select(( - users::all_columns, - emails::verified.nullable(), - emails::email.nullable(), - emails::token_generated_at.nullable().is_not_null(), - )) - .first(&*conn)?; - - let owned_crates = CrateOwner::by_owner_kind(OwnerKind::User) - .inner_join(crates::table) - .filter(crate_owners::owner_id.eq(user_id)) - .select((crates::id, crates::name, crate_owners::email_notifications)) - .order(crates::name.asc()) - .load(&*conn)? - .into_iter() - .map(|(id, name, email_notifications)| OwnedCrate { - id, - name, - email_notifications, - }) - .collect(); - - let verified = verified.unwrap_or(false); - let verification_sent = verified || verification_sent; - Ok(Json(EncodableMe { - user: EncodablePrivateUser::from(user, email, verified, verification_sent), - owned_crates, - })) +pub async fn me(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let user_id = AuthCheck::only_cookie().check(&req)?.user_id(); + let conn = req.app().db_read_prefer_primary()?; + + let (user, verified, email, verification_sent): (User, Option, Option, bool) = + users::table + .find(user_id) + .left_join(emails::table) + .select(( + users::all_columns, + emails::verified.nullable(), + emails::email.nullable(), + emails::token_generated_at.nullable().is_not_null(), + )) + .first(&*conn)?; + + let owned_crates = CrateOwner::by_owner_kind(OwnerKind::User) + .inner_join(crates::table) + .filter(crate_owners::owner_id.eq(user_id)) + .select((crates::id, crates::name, crate_owners::email_notifications)) + .order(crates::name.asc()) + .load(&*conn)? + .into_iter() + .map(|(id, name, email_notifications)| OwnedCrate { + id, + name, + email_notifications, + }) + .collect(); + + let verified = verified.unwrap_or(false); + let verification_sent = verified || verification_sent; + Ok(Json(EncodableMe { + user: EncodablePrivateUser::from(user, email, verified, verification_sent), + owned_crates, + })) + }) + .await } /// Handles the `GET /me/updates` route. -pub fn updates(req: ConduitRequest) -> AppResult> { - use diesel::dsl::any; - - let auth = AuthCheck::only_cookie().check(&req)?; - let user = auth.user(); - - let followed_crates = Follow::belonging_to(&user).select(follows::crate_id); - let query = versions::table - .inner_join(crates::table) - .left_outer_join(users::table) - .filter(crates::id.eq(any(followed_crates))) - .order(versions::created_at.desc()) - .select(( - versions::all_columns, - crates::name, - users::all_columns.nullable(), - )) - .pages_pagination(PaginationOptions::builder().gather(&req)?); - let conn = req.app().db_read_prefer_primary()?; - let data: Paginated<(Version, String, Option)> = query.load(&conn)?; - let more = data.next_page_params().is_some(); - let versions = data.iter().map(|(v, _, _)| v).cloned().collect::>(); - let data = data - .into_iter() - .zip(VersionOwnerAction::for_versions(&conn, &versions)?.into_iter()) - .map(|((v, cn, pb), voas)| (v, cn, pb, voas)); - - let versions = data - .into_iter() - .map(|(version, crate_name, published_by, actions)| { - EncodableVersion::from(version, &crate_name, published_by, actions) - }) - .collect::>(); - - Ok(Json(json!({ - "versions": versions, - "meta": { "more": more }, - }))) +pub async fn updates(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + use diesel::dsl::any; + + let auth = AuthCheck::only_cookie().check(&req)?; + let user = auth.user(); + + let followed_crates = Follow::belonging_to(&user).select(follows::crate_id); + let query = versions::table + .inner_join(crates::table) + .left_outer_join(users::table) + .filter(crates::id.eq(any(followed_crates))) + .order(versions::created_at.desc()) + .select(( + versions::all_columns, + crates::name, + users::all_columns.nullable(), + )) + .pages_pagination(PaginationOptions::builder().gather(&req)?); + let conn = req.app().db_read_prefer_primary()?; + let data: Paginated<(Version, String, Option)> = query.load(&conn)?; + let more = data.next_page_params().is_some(); + let versions = data.iter().map(|(v, _, _)| v).cloned().collect::>(); + let data = data + .into_iter() + .zip(VersionOwnerAction::for_versions(&conn, &versions)?.into_iter()) + .map(|((v, cn, pb), voas)| (v, cn, pb, voas)); + + let versions = data + .into_iter() + .map(|(version, crate_name, published_by, actions)| { + EncodableVersion::from(version, &crate_name, published_by, actions) + }) + .collect::>(); + + Ok(Json(json!({ + "versions": versions, + "meta": { "more": more }, + }))) + }) + .await } /// Handles the `PUT /users/:user_id` route. -pub fn update_user(mut req: ConduitRequest) -> AppResult { - use self::emails::user_id; - use diesel::insert_into; +pub async fn update_user(mut req: ConduitRequest) -> AppResult { + conduit_compat(move || { + use self::emails::user_id; + use diesel::insert_into; - let auth = AuthCheck::default().check(&req)?; + let auth = AuthCheck::default().check(&req)?; - let param_user_id = req.param("user_id").unwrap(); + let param_user_id = req.param("user_id").unwrap(); - let user = auth.user(); + let user = auth.user(); - // need to check if current user matches user to be updated - if user.id.to_string() != param_user_id { - return Err(bad_request("current user does not match requested user")); - } + // need to check if current user matches user to be updated + if user.id.to_string() != param_user_id { + return Err(bad_request("current user does not match requested user")); + } - #[derive(Deserialize)] - struct UserUpdate { - user: User, - } + #[derive(Deserialize)] + struct UserUpdate { + user: User, + } - #[derive(Deserialize)] - struct User { - email: Option, - } + #[derive(Deserialize)] + struct User { + email: Option, + } - let user_update: UserUpdate = - serde_json::from_reader(req.body_mut()).map_err(|_| bad_request("invalid json request"))?; + let user_update: UserUpdate = serde_json::from_reader(req.body_mut()) + .map_err(|_| bad_request("invalid json request"))?; - let user_email = match &user_update.user.email { - Some(email) => email.trim(), - None => return Err(bad_request("empty email rejected")), - }; - - if user_email.is_empty() { - return Err(bad_request("empty email rejected")); - } - - let state = req.app(); - let conn = state.db_write()?; - conn.transaction::<_, BoxedAppError, _>(|| { - let new_email = NewEmail { - user_id: user.id, - email: user_email, + let user_email = match &user_update.user.email { + Some(email) => email.trim(), + None => return Err(bad_request("empty email rejected")), }; - let token: String = insert_into(emails::table) - .values(&new_email) - .on_conflict(user_id) - .do_update() - .set(&new_email) - .returning(emails::token) - .get_result(&*conn) - .map_err(|_| server_error("Error in creating token"))?; - - // This swallows any errors that occur while attempting to send the email. Some users have - // an invalid email set in their GitHub profile, and we should let them sign in even though - // we're trying to silently use their invalid address during signup and can't send them an - // email. They'll then have to provide a valid email address. - let _ = state - .emails - .send_user_confirm(user_email, &user.gh_login, &token); - - Ok(()) - })?; - - ok_true() + if user_email.is_empty() { + return Err(bad_request("empty email rejected")); + } + + let state = req.app(); + let conn = state.db_write()?; + conn.transaction::<_, BoxedAppError, _>(|| { + let new_email = NewEmail { + user_id: user.id, + email: user_email, + }; + + let token: String = insert_into(emails::table) + .values(&new_email) + .on_conflict(user_id) + .do_update() + .set(&new_email) + .returning(emails::token) + .get_result(&*conn) + .map_err(|_| server_error("Error in creating token"))?; + + // This swallows any errors that occur while attempting to send the email. Some users have + // an invalid email set in their GitHub profile, and we should let them sign in even though + // we're trying to silently use their invalid address during signup and can't send them an + // email. They'll then have to provide a valid email address. + let _ = state + .emails + .send_user_confirm(user_email, &user.gh_login, &token); + + Ok(()) + })?; + + ok_true() + }) + .await } /// Handles the `PUT /confirm/:email_token` route -pub fn confirm_user_email(req: ConduitRequest) -> AppResult { - use diesel::update; +pub async fn confirm_user_email(req: ConduitRequest) -> AppResult { + conduit_compat(move || { + use diesel::update; - let conn = req.app().db_write()?; - let req_token = req.param("email_token").unwrap(); + let conn = req.app().db_write()?; + let req_token = req.param("email_token").unwrap(); - let updated_rows = update(emails::table.filter(emails::token.eq(req_token))) - .set(emails::verified.eq(true)) - .execute(&*conn)?; + let updated_rows = update(emails::table.filter(emails::token.eq(req_token))) + .set(emails::verified.eq(true)) + .execute(&*conn)?; - if updated_rows == 0 { - return Err(bad_request("Email belonging to token not found.")); - } + if updated_rows == 0 { + return Err(bad_request("Email belonging to token not found.")); + } - ok_true() + ok_true() + }) + .await } /// Handles `PUT /user/:user_id/resend` route -pub fn regenerate_token_and_send(req: ConduitRequest) -> AppResult { - use diesel::dsl::sql; - use diesel::update; - - let param_user_id = req - .param("user_id") - .unwrap() - .parse::() - .map_err(|err| err.chain(bad_request("invalid user_id")))?; - - let auth = AuthCheck::default().check(&req)?; - - let state = req.app(); - let conn = state.db_write()?; - let user = auth.user(); - - // need to check if current user matches user to be updated - if user.id != param_user_id { - return Err(bad_request("current user does not match requested user")); - } - - conn.transaction(|| { - let email: Email = update(Email::belonging_to(&user)) - .set(emails::token.eq(sql("DEFAULT"))) - .get_result(&*conn) - .map_err(|_| bad_request("Email could not be found"))?; - - state - .emails - .send_user_confirm(&email.email, &user.gh_login, &email.token) - })?; - - ok_true() +pub async fn regenerate_token_and_send(req: ConduitRequest) -> AppResult { + conduit_compat(move || { + use diesel::dsl::sql; + use diesel::update; + + let param_user_id = req + .param("user_id") + .unwrap() + .parse::() + .map_err(|err| err.chain(bad_request("invalid user_id")))?; + + let auth = AuthCheck::default().check(&req)?; + + let state = req.app(); + let conn = state.db_write()?; + let user = auth.user(); + + // need to check if current user matches user to be updated + if user.id != param_user_id { + return Err(bad_request("current user does not match requested user")); + } + + conn.transaction(|| { + let email: Email = update(Email::belonging_to(&user)) + .set(emails::token.eq(sql("DEFAULT"))) + .get_result(&*conn) + .map_err(|_| bad_request("Email could not be found"))?; + + state + .emails + .send_user_confirm(&email.email, &user.gh_login, &email.token) + })?; + + ok_true() + }) + .await } /// Handles `PUT /me/email_notifications` route -pub fn update_email_notifications(mut req: ConduitRequest) -> AppResult { - use self::crate_owners::dsl::*; - use diesel::pg::upsert::excluded; - - #[derive(Deserialize)] - struct CrateEmailNotifications { - id: i32, - email_notifications: bool, - } - - let updates: HashMap = - serde_json::from_reader::<_, Vec>(req.body_mut()) - .map_err(|_| bad_request("invalid json request"))? - .iter() - .map(|c| (c.id, c.email_notifications)) - .collect(); +pub async fn update_email_notifications(mut req: ConduitRequest) -> AppResult { + conduit_compat(move || { + use self::crate_owners::dsl::*; + use diesel::pg::upsert::excluded; + + #[derive(Deserialize)] + struct CrateEmailNotifications { + id: i32, + email_notifications: bool, + } + + let updates: HashMap = + serde_json::from_reader::<_, Vec>(req.body_mut()) + .map_err(|_| bad_request("invalid json request"))? + .iter() + .map(|c| (c.id, c.email_notifications)) + .collect(); + + let user_id = AuthCheck::default().check(&req)?.user_id(); + let conn = req.app().db_write()?; + + // Build inserts from existing crates belonging to the current user + let to_insert = CrateOwner::by_owner_kind(OwnerKind::User) + .filter(owner_id.eq(user_id)) + .select((crate_id, owner_id, owner_kind, email_notifications)) + .load(&*conn)? + .into_iter() + // Remove records whose `email_notifications` will not change from their current value + .map( + |(c_id, o_id, o_kind, e_notifications): (i32, i32, i32, bool)| { + let current_e_notifications = *updates.get(&c_id).unwrap_or(&e_notifications); + ( + crate_id.eq(c_id), + owner_id.eq(o_id), + owner_kind.eq(o_kind), + email_notifications.eq(current_e_notifications), + ) + }, + ) + .collect::>(); + + // Upsert crate owners; this should only actually exectute updates + diesel::insert_into(crate_owners) + .values(&to_insert) + .on_conflict((crate_id, owner_id, owner_kind)) + .do_update() + .set(email_notifications.eq(excluded(email_notifications))) + .execute(&*conn)?; - let user_id = AuthCheck::default().check(&req)?.user_id(); - let conn = req.app().db_write()?; - - // Build inserts from existing crates belonging to the current user - let to_insert = CrateOwner::by_owner_kind(OwnerKind::User) - .filter(owner_id.eq(user_id)) - .select((crate_id, owner_id, owner_kind, email_notifications)) - .load(&*conn)? - .into_iter() - // Remove records whose `email_notifications` will not change from their current value - .map( - |(c_id, o_id, o_kind, e_notifications): (i32, i32, i32, bool)| { - let current_e_notifications = *updates.get(&c_id).unwrap_or(&e_notifications); - ( - crate_id.eq(c_id), - owner_id.eq(o_id), - owner_kind.eq(o_kind), - email_notifications.eq(current_e_notifications), - ) - }, - ) - .collect::>(); - - // Upsert crate owners; this should only actually exectute updates - diesel::insert_into(crate_owners) - .values(&to_insert) - .on_conflict((crate_id, owner_id, owner_kind)) - .do_update() - .set(email_notifications.eq(excluded(email_notifications))) - .execute(&*conn)?; - - ok_true() + ok_true() + }) + .await } diff --git a/src/controllers/user/other.rs b/src/controllers/user/other.rs index 2eec1ac4591..6be6911edea 100644 --- a/src/controllers/user/other.rs +++ b/src/controllers/user/other.rs @@ -6,36 +6,42 @@ use crate::sql::lower; use crate::views::EncodablePublicUser; /// Handles the `GET /users/:user_id` route. -pub fn show(req: ConduitRequest) -> AppResult> { - use self::users::dsl::{gh_login, id, users}; - - let name = lower(req.param("user_id").unwrap()); - let conn = req.app().db_read_prefer_primary()?; - let user: User = users - .filter(lower(gh_login).eq(name)) - .order(id.desc()) - .first(&*conn)?; - - Ok(Json(json!({ "user": EncodablePublicUser::from(user) }))) +pub async fn show(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + use self::users::dsl::{gh_login, id, users}; + + let name = lower(req.param("user_id").unwrap()); + let conn = req.app().db_read_prefer_primary()?; + let user: User = users + .filter(lower(gh_login).eq(name)) + .order(id.desc()) + .first(&*conn)?; + + Ok(Json(json!({ "user": EncodablePublicUser::from(user) }))) + }) + .await } /// Handles the `GET /users/:user_id/stats` route. -pub fn stats(req: ConduitRequest) -> AppResult> { - use diesel::dsl::sum; - - let user_id = req - .param("user_id") - .unwrap() - .parse::() - .map_err(|err| err.chain(bad_request("invalid user_id")))?; - let conn = req.app().db_read_prefer_primary()?; - - let data: i64 = CrateOwner::by_owner_kind(OwnerKind::User) - .inner_join(crates::table) - .filter(crate_owners::owner_id.eq(user_id)) - .select(sum(crates::downloads)) - .first::>(&*conn)? - .unwrap_or(0); - - Ok(Json(json!({ "total_downloads": data }))) +pub async fn stats(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + use diesel::dsl::sum; + + let user_id = req + .param("user_id") + .unwrap() + .parse::() + .map_err(|err| err.chain(bad_request("invalid user_id")))?; + let conn = req.app().db_read_prefer_primary()?; + + let data: i64 = CrateOwner::by_owner_kind(OwnerKind::User) + .inner_join(crates::table) + .filter(crate_owners::owner_id.eq(user_id)) + .select(sum(crates::downloads)) + .first::>(&*conn)? + .unwrap_or(0); + + Ok(Json(json!({ "total_downloads": data }))) + }) + .await } diff --git a/src/controllers/user/session.rs b/src/controllers/user/session.rs index aacf02faef1..da36e680384 100644 --- a/src/controllers/user/session.rs +++ b/src/controllers/user/session.rs @@ -26,17 +26,20 @@ use crate::views::EncodableMe; /// "url": "https://github.com/login/oauth/authorize?client_id=...&state=...&scope=read%3Aorg" /// } /// ``` -pub fn begin(mut req: ConduitRequest) -> AppResult> { - let (url, state) = req - .app() - .github_oauth - .authorize_url(oauth2::CsrfToken::new_random) - .add_scope(Scope::new("read:org".to_string())) - .url(); - let state = state.secret().to_string(); - req.session_insert("github_oauth_state".to_string(), state.clone()); - - Ok(Json(json!({ "url": url.to_string(), "state": state }))) +pub async fn begin(mut req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let (url, state) = req + .app() + .github_oauth + .authorize_url(oauth2::CsrfToken::new_random) + .add_scope(Scope::new("read:org".to_string())) + .url(); + let state = state.secret().to_string(); + req.session_insert("github_oauth_state".to_string(), state.clone()); + + Ok(Json(json!({ "url": url.to_string(), "state": state }))) + }) + .await } /// Handles the `GET /api/private/session/authorize` route. @@ -67,41 +70,46 @@ pub fn begin(mut req: ConduitRequest) -> AppResult> { /// } /// } /// ``` -pub fn authorize(mut req: ConduitRequest) -> AppResult> { - // Parse the url query - let mut query = req.query(); - let code = query.remove("code").unwrap_or_default(); - let state = query.remove("state").unwrap_or_default(); - - // Make sure that the state we just got matches the session state that we - // should have issued earlier. - { - let session_state = req.session_remove("github_oauth_state"); - let session_state = session_state.as_deref(); - if Some(&state[..]) != session_state { - return Err(bad_request("invalid state parameter")); +pub async fn authorize(mut req: ConduitRequest) -> AppResult> { + let req = conduit_compat(move || { + // Parse the url query + let mut query = req.query(); + let code = query.remove("code").unwrap_or_default(); + let state = query.remove("state").unwrap_or_default(); + + // Make sure that the state we just got matches the session state that we + // should have issued earlier. + { + let session_state = req.session_remove("github_oauth_state"); + let session_state = session_state.as_deref(); + if Some(&state[..]) != session_state { + return Err(bad_request("invalid state parameter")); + } } - } - let app = req.app(); + let app = req.app(); - // Fetch the access token from GitHub using the code we just got - let code = AuthorizationCode::new(code); - let token = app - .github_oauth - .exchange_code(code) - .request(http_client) - .map_err(|err| err.chain(server_error("Error obtaining token")))?; - let token = token.access_token(); + // Fetch the access token from GitHub using the code we just got + let code = AuthorizationCode::new(code); + let token = app + .github_oauth + .exchange_code(code) + .request(http_client) + .map_err(|err| err.chain(server_error("Error obtaining token")))?; + let token = token.access_token(); - // Fetch the user info from GitHub using the access token we just got and create a user record - let ghuser = app.github.current_user(token)?; - let user = save_user_to_database(&ghuser, token.secret(), &app.emails, &*app.db_write()?)?; + // Fetch the user info from GitHub using the access token we just got and create a user record + let ghuser = app.github.current_user(token)?; + let user = save_user_to_database(&ghuser, token.secret(), &app.emails, &*app.db_write()?)?; - // Log in by setting a cookie and the middleware authentication - req.session_insert("user_id".to_string(), user.id.to_string()); + // Log in by setting a cookie and the middleware authentication + req.session_insert("user_id".to_string(), user.id.to_string()); + + Ok(req) + }) + .await?; - super::me::me(req) + super::me::me(req).await } fn save_user_to_database( @@ -135,7 +143,7 @@ fn save_user_to_database( } /// Handles the `DELETE /api/private/session` route. -pub fn logout(mut req: ConduitRequest) -> Json { +pub async fn logout(mut req: ConduitRequest) -> Json { req.session_remove("user_id"); Json(true) } diff --git a/src/controllers/version/deprecated.rs b/src/controllers/version/deprecated.rs index 2ddf521433a..aa2bb66a05b 100644 --- a/src/controllers/version/deprecated.rs +++ b/src/controllers/version/deprecated.rs @@ -12,61 +12,67 @@ use crate::schema::*; use crate::views::EncodableVersion; /// Handles the `GET /versions` route. -pub fn index(req: ConduitRequest) -> AppResult> { - use diesel::dsl::any; - let conn = req.app().db_read()?; +pub async fn index(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + use diesel::dsl::any; + let conn = req.app().db_read()?; - // Extract all ids requested. - let query = url::form_urlencoded::parse(req.uri().query().unwrap_or("").as_bytes()); - let ids = query - .filter_map(|(ref a, ref b)| if *a == "ids[]" { b.parse().ok() } else { None }) - .collect::>(); + // Extract all ids requested. + let query = url::form_urlencoded::parse(req.uri().query().unwrap_or("").as_bytes()); + let ids = query + .filter_map(|(ref a, ref b)| if *a == "ids[]" { b.parse().ok() } else { None }) + .collect::>(); - let versions_and_publishers: Vec<(Version, String, Option)> = versions::table - .inner_join(crates::table) - .left_outer_join(users::table) - .select(( - versions::all_columns, - crates::name, - users::all_columns.nullable(), - )) - .filter(versions::id.eq(any(ids))) - .load(&*conn)?; - let versions = versions_and_publishers - .iter() - .map(|(v, _, _)| v) - .cloned() - .collect::>(); - let versions = versions_and_publishers - .into_iter() - .zip(VersionOwnerAction::for_versions(&conn, &versions)?.into_iter()) - .map(|((version, crate_name, published_by), actions)| { - EncodableVersion::from(version, &crate_name, published_by, actions) - }) - .collect::>(); + let versions_and_publishers: Vec<(Version, String, Option)> = versions::table + .inner_join(crates::table) + .left_outer_join(users::table) + .select(( + versions::all_columns, + crates::name, + users::all_columns.nullable(), + )) + .filter(versions::id.eq(any(ids))) + .load(&*conn)?; + let versions = versions_and_publishers + .iter() + .map(|(v, _, _)| v) + .cloned() + .collect::>(); + let versions = versions_and_publishers + .into_iter() + .zip(VersionOwnerAction::for_versions(&conn, &versions)?.into_iter()) + .map(|((version, crate_name, published_by), actions)| { + EncodableVersion::from(version, &crate_name, published_by, actions) + }) + .collect::>(); - Ok(Json(json!({ "versions": versions }))) + Ok(Json(json!({ "versions": versions }))) + }) + .await } /// Handles the `GET /versions/:version_id` route. /// The frontend doesn't appear to hit this endpoint. Instead, the version information appears to /// be returned by `krate::show`. -pub fn show_by_id(req: ConduitRequest) -> AppResult> { - let id = req.param("version_id").unwrap(); - let id = id.parse().unwrap_or(0); - let conn = req.app().db_read()?; - let (version, krate, published_by): (Version, Crate, Option) = versions::table - .find(id) - .inner_join(crates::table) - .left_outer_join(users::table) - .select(( - versions::all_columns, - crate::models::krate::ALL_COLUMNS, - users::all_columns.nullable(), - )) - .first(&*conn)?; - let audit_actions = VersionOwnerAction::by_version(&conn, &version)?; +pub async fn show_by_id(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let id = req.param("version_id").unwrap(); + let id = id.parse().unwrap_or(0); + let conn = req.app().db_read()?; + let (version, krate, published_by): (Version, Crate, Option) = versions::table + .find(id) + .inner_join(crates::table) + .left_outer_join(users::table) + .select(( + versions::all_columns, + crate::models::krate::ALL_COLUMNS, + users::all_columns.nullable(), + )) + .first(&*conn)?; + let audit_actions = VersionOwnerAction::by_version(&conn, &version)?; - let version = EncodableVersion::from(version, &krate.name, published_by, audit_actions); - Ok(Json(json!({ "version": version }))) + let version = EncodableVersion::from(version, &krate.name, published_by, audit_actions); + Ok(Json(json!({ "version": version }))) + }) + .await } diff --git a/src/controllers/version/downloads.rs b/src/controllers/version/downloads.rs index d5d7e8dae64..0af5f97dc3e 100644 --- a/src/controllers/version/downloads.rs +++ b/src/controllers/version/downloads.rs @@ -13,129 +13,135 @@ use chrono::{Duration, NaiveDate, Utc}; /// Handles the `GET /crates/:crate_id/:version/download` route. /// This returns a URL to the location where the crate is stored. -pub fn download(req: ConduitRequest) -> AppResult { - let app = req.app(); - - let mut crate_name = req.param("crate_id").unwrap().to_string(); - let version = req.param("version").unwrap(); - - let mut log_metadata = None; - - let cache_key = (crate_name.to_string(), version.to_string()); - if let Some(version_id) = app.version_id_cacher.get(&cache_key) { - app.instance_metrics.version_id_cache_hits.inc(); - - // The increment does not happen instantly, but it's deferred to be executed in a batch - // along with other downloads. See crate::downloads_counter for the implementation. - app.downloads_counter.increment(version_id); - } else { - app.instance_metrics.version_id_cache_misses.inc(); - - // When no database connection is ready unconditional redirects will be performed. This could - // happen if the pool is not healthy or if an operator manually configured the application to - // always perform unconditional redirects (for example as part of the mitigations for an - // outage). See the comments below for a description of what unconditional redirects do. - let conn = if app.config.force_unconditional_redirects { - None - } else { - match app.db_read_prefer_primary() { - Ok(conn) => Some(conn), - Err(PoolError::UnhealthyPool) => None, - Err(err) => return Err(err.into()), - } - }; +pub async fn download(req: ConduitRequest) -> AppResult { + conduit_compat(move || { + let app = req.app(); - if let Some(conn) = &conn { - use self::versions::dsl::*; - - // Returns the crate name as stored in the database, or an error if we could - // not load the version ID from the database. - let (version_id, canonical_crate_name) = app - .instance_metrics - .downloads_select_query_execution_time - .observe_closure_duration(|| { - versions - .inner_join(crates::table) - .select((id, crates::name)) - .filter(Crate::with_name(&crate_name)) - .filter(num.eq(version)) - .first::<(i32, String)>(&**conn) - })?; - - if canonical_crate_name != crate_name { - app.instance_metrics - .downloads_non_canonical_crate_name_total - .inc(); - log_metadata = Some(("bot", "dl")); - crate_name = canonical_crate_name; - } else { - // The version_id is only cached if the provided crate name was canonical. - // Non-canonical requests fallback to the "slow" path with a DB query, but - // we typically only get a few hundred non-canonical requests in a day anyway. - app.version_id_cacher.insert(cache_key, version_id); - } + let mut crate_name = req.param("crate_id").unwrap().to_string(); + let version = req.param("version").unwrap(); + + let mut log_metadata = None; + + let cache_key = (crate_name.to_string(), version.to_string()); + if let Some(version_id) = app.version_id_cacher.get(&cache_key) { + app.instance_metrics.version_id_cache_hits.inc(); // The increment does not happen instantly, but it's deferred to be executed in a batch // along with other downloads. See crate::downloads_counter for the implementation. app.downloads_counter.increment(version_id); } else { - // The download endpoint is the most critical route in the whole crates.io application, - // as it's relied upon by users and automations to download crates. Keeping it working - // is the most important thing for us. - // - // The endpoint relies on the database to fetch the canonical crate name (with the - // right capitalization and hyphenation), but that's only needed to serve clients who - // don't call the endpoint with the crate's canonical name. - // - // Thankfully Cargo always uses the right name when calling the endpoint, and we can - // keep it working during a full database outage by unconditionally redirecting without - // checking whether the crate exists or the rigth name is used. Non-Cargo clients might - // get a 404 response instead of a 500, but that's worth it. - // - // Without a working database we also can't count downloads, but that's also less - // critical than keeping Cargo downloads operational. - - app.instance_metrics - .downloads_unconditional_redirects_total - .inc(); - log_metadata = Some(("unconditional_redirect", "true")); - } - }; + app.instance_metrics.version_id_cache_misses.inc(); + + // When no database connection is ready unconditional redirects will be performed. This could + // happen if the pool is not healthy or if an operator manually configured the application to + // always perform unconditional redirects (for example as part of the mitigations for an + // outage). See the comments below for a description of what unconditional redirects do. + let conn = if app.config.force_unconditional_redirects { + None + } else { + match app.db_read_prefer_primary() { + Ok(conn) => Some(conn), + Err(PoolError::UnhealthyPool) => None, + Err(err) => return Err(err.into()), + } + }; + + if let Some(conn) = &conn { + use self::versions::dsl::*; + + // Returns the crate name as stored in the database, or an error if we could + // not load the version ID from the database. + let (version_id, canonical_crate_name) = app + .instance_metrics + .downloads_select_query_execution_time + .observe_closure_duration(|| { + versions + .inner_join(crates::table) + .select((id, crates::name)) + .filter(Crate::with_name(&crate_name)) + .filter(num.eq(version)) + .first::<(i32, String)>(&**conn) + })?; + + if canonical_crate_name != crate_name { + app.instance_metrics + .downloads_non_canonical_crate_name_total + .inc(); + log_metadata = Some(("bot", "dl")); + crate_name = canonical_crate_name; + } else { + // The version_id is only cached if the provided crate name was canonical. + // Non-canonical requests fallback to the "slow" path with a DB query, but + // we typically only get a few hundred non-canonical requests in a day anyway. + app.version_id_cacher.insert(cache_key, version_id); + } + + // The increment does not happen instantly, but it's deferred to be executed in a batch + // along with other downloads. See crate::downloads_counter for the implementation. + app.downloads_counter.increment(version_id); + } else { + // The download endpoint is the most critical route in the whole crates.io application, + // as it's relied upon by users and automations to download crates. Keeping it working + // is the most important thing for us. + // + // The endpoint relies on the database to fetch the canonical crate name (with the + // right capitalization and hyphenation), but that's only needed to serve clients who + // don't call the endpoint with the crate's canonical name. + // + // Thankfully Cargo always uses the right name when calling the endpoint, and we can + // keep it working during a full database outage by unconditionally redirecting without + // checking whether the crate exists or the rigth name is used. Non-Cargo clients might + // get a 404 response instead of a 500, but that's worth it. + // + // Without a working database we also can't count downloads, but that's also less + // critical than keeping Cargo downloads operational. + + app.instance_metrics + .downloads_unconditional_redirects_total + .inc(); + log_metadata = Some(("unconditional_redirect", "true")); + } + }; - let redirect_url = app.config.uploader().crate_location(&crate_name, version); + let redirect_url = app.config.uploader().crate_location(&crate_name, version); - if let Some((key, value)) = log_metadata { - req.add_custom_metadata(key, value); - } + if let Some((key, value)) = log_metadata { + req.add_custom_metadata(key, value); + } - if req.wants_json() { - Ok(Json(json!({ "url": redirect_url })).into_response()) - } else { - Ok(req.redirect(redirect_url)) - } + if req.wants_json() { + Ok(Json(json!({ "url": redirect_url })).into_response()) + } else { + Ok(req.redirect(redirect_url)) + } + }) + .await } /// Handles the `GET /crates/:crate_id/:version/downloads` route. -pub fn downloads(req: ConduitRequest) -> AppResult> { - let (crate_name, semver) = extract_crate_name_and_semver(&req)?; - - let conn = req.app().db_read()?; - let (version, _) = version_and_crate(&conn, crate_name, semver)?; - - let cutoff_end_date = req - .query() - .get("before_date") - .and_then(|d| NaiveDate::parse_from_str(d, "%F").ok()) - .unwrap_or_else(|| Utc::now().date_naive()); - let cutoff_start_date = cutoff_end_date - Duration::days(89); - - let downloads = VersionDownload::belonging_to(&version) - .filter(version_downloads::date.between(cutoff_start_date, cutoff_end_date)) - .order(version_downloads::date) - .load(&*conn)? - .into_iter() - .map(VersionDownload::into) - .collect::>(); - - Ok(Json(json!({ "version_downloads": downloads }))) +pub async fn downloads(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let (crate_name, semver) = extract_crate_name_and_semver(&req)?; + + let conn = req.app().db_read()?; + let (version, _) = version_and_crate(&conn, crate_name, semver)?; + + let cutoff_end_date = req + .query() + .get("before_date") + .and_then(|d| NaiveDate::parse_from_str(d, "%F").ok()) + .unwrap_or_else(|| Utc::now().date_naive()); + let cutoff_start_date = cutoff_end_date - Duration::days(89); + + let downloads = VersionDownload::belonging_to(&version) + .filter(version_downloads::date.between(cutoff_start_date, cutoff_end_date)) + .order(version_downloads::date) + .load(&*conn)? + .into_iter() + .map(VersionDownload::into) + .collect::>(); + + Ok(Json(json!({ "version_downloads": downloads }))) + }) + .await } diff --git a/src/controllers/version/metadata.rs b/src/controllers/version/metadata.rs index 2a3d8395542..c7613b1f37e 100644 --- a/src/controllers/version/metadata.rs +++ b/src/controllers/version/metadata.rs @@ -18,21 +18,24 @@ use super::{extract_crate_name_and_semver, version_and_crate}; /// In addition to returning cached data from the index, this returns /// fields for `id`, `version_id`, and `downloads` (which appears to always /// be 0) -pub fn dependencies(req: ConduitRequest) -> AppResult> { - let (crate_name, semver) = extract_crate_name_and_semver(&req)?; - let conn = req.app().db_read()?; - let (version, _) = version_and_crate(&conn, crate_name, semver)?; - let deps = version.dependencies(&conn)?; - let deps = deps - .into_iter() - .map(|(dep, crate_name)| EncodableDependency::from_dep(dep, &crate_name)) - .collect::>(); - - Ok(Json(json!({ "dependencies": deps }))) +pub async fn dependencies(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let (crate_name, semver) = extract_crate_name_and_semver(&req)?; + let conn = req.app().db_read()?; + let (version, _) = version_and_crate(&conn, crate_name, semver)?; + let deps = version.dependencies(&conn)?; + let deps = deps + .into_iter() + .map(|(dep, crate_name)| EncodableDependency::from_dep(dep, &crate_name)) + .collect::>(); + + Ok(Json(json!({ "dependencies": deps }))) + }) + .await } /// Handles the `GET /crates/:crate_id/:version/authors` route. -pub fn authors(_req: ConduitRequest) -> Json { +pub async fn authors() -> Json { // Currently we return the empty list. // Because the API is not used anymore after RFC https://github.com/rust-lang/rfcs/pull/3052. @@ -46,13 +49,16 @@ pub fn authors(_req: ConduitRequest) -> Json { /// /// The frontend doesn't appear to hit this endpoint, but our tests do, and it seems to be a useful /// API route to have. -pub fn show(req: ConduitRequest) -> AppResult> { - let (crate_name, semver) = extract_crate_name_and_semver(&req)?; - let conn = req.app().db_read()?; - let (version, krate) = version_and_crate(&conn, crate_name, semver)?; - let published_by = version.published_by(&conn); - let actions = VersionOwnerAction::by_version(&conn, &version)?; - - let version = EncodableVersion::from(version, &krate.name, published_by, actions); - Ok(Json(json!({ "version": version }))) +pub async fn show(req: ConduitRequest) -> AppResult> { + conduit_compat(move || { + let (crate_name, semver) = extract_crate_name_and_semver(&req)?; + let conn = req.app().db_read()?; + let (version, krate) = version_and_crate(&conn, crate_name, semver)?; + let published_by = version.published_by(&conn); + let actions = VersionOwnerAction::by_version(&conn, &version)?; + + let version = EncodableVersion::from(version, &krate.name, published_by, actions); + Ok(Json(json!({ "version": version }))) + }) + .await } diff --git a/src/controllers/version/yank.rs b/src/controllers/version/yank.rs index 1c0ff72c98e..12332f1c850 100644 --- a/src/controllers/version/yank.rs +++ b/src/controllers/version/yank.rs @@ -19,13 +19,13 @@ use crate::worker; /// Crate deletion is not implemented to avoid breaking builds, /// and the goal of yanking a crate is to prevent crates /// beginning to depend on the yanked crate version. -pub fn yank(req: ConduitRequest) -> AppResult { - modify_yank(&req, true) +pub async fn yank(req: ConduitRequest) -> AppResult { + conduit_compat(move || modify_yank(&req, true)).await } /// Handles the `PUT /crates/:crate_id/:version/unyank` route. -pub fn unyank(req: ConduitRequest) -> AppResult { - modify_yank(&req, false) +pub async fn unyank(req: ConduitRequest) -> AppResult { + conduit_compat(move || modify_yank(&req, false)).await } /// Changes `yanked` flag on a crate version record diff --git a/src/router.rs b/src/router.rs index e8dfae56477..af703e2c4a2 100644 --- a/src/router.rs +++ b/src/router.rs @@ -1,7 +1,7 @@ use axum::response::IntoResponse; use axum::routing::{delete, get, post, put}; use axum::Router; -use conduit_axum::{ConduitAxumHandler, ConduitRequest, Handler, HandlerResult}; +use conduit_axum::{ConduitRequest, Handler, HandlerResult}; use crate::app::AppState; use crate::controllers::*; @@ -11,170 +11,146 @@ use crate::Env; pub fn build_axum_router(state: AppState) -> Router { let mut router = Router::new() // Route used by both `cargo search` and the frontend - .route("/api/v1/crates", get(conduit(krate::search::search))) + .route("/api/v1/crates", get(krate::search::search)) // Routes used by `cargo` - .route("/api/v1/crates/new", put(conduit(krate::publish::publish))) + .route("/api/v1/crates/new", put(krate::publish::publish)) .route( "/api/v1/crates/:crate_id/owners", - get(conduit(krate::owners::owners)) - .put(conduit(krate::owners::add_owners)) - .delete(conduit(krate::owners::remove_owners)), + get(krate::owners::owners) + .put(krate::owners::add_owners) + .delete(krate::owners::remove_owners), ) .route( "/api/v1/crates/:crate_id/:version/yank", - delete(conduit(version::yank::yank)), + delete(version::yank::yank), ) .route( "/api/v1/crates/:crate_id/:version/unyank", - put(conduit(version::yank::unyank)), + put(version::yank::unyank), ) .route( "/api/v1/crates/:crate_id/:version/download", - get(conduit(version::downloads::download)), + get(version::downloads::download), ) // Routes that appear to be unused - .route("/api/v1/versions", get(conduit(version::deprecated::index))) + .route("/api/v1/versions", get(version::deprecated::index)) .route( "/api/v1/versions/:version_id", - get(conduit(version::deprecated::show_by_id)), + get(version::deprecated::show_by_id), ) // Routes used by the frontend - .route( - "/api/v1/crates/:crate_id", - get(conduit(krate::metadata::show)), - ) + .route("/api/v1/crates/:crate_id", get(krate::metadata::show)) .route( "/api/v1/crates/:crate_id/:version", - get(conduit(version::metadata::show)), + get(version::metadata::show), ) .route( "/api/v1/crates/:crate_id/:version/readme", - get(conduit(krate::metadata::readme)), + get(krate::metadata::readme), ) .route( "/api/v1/crates/:crate_id/:version/dependencies", - get(conduit(version::metadata::dependencies)), + get(version::metadata::dependencies), ) .route( "/api/v1/crates/:crate_id/:version/downloads", - get(conduit(version::downloads::downloads)), + get(version::downloads::downloads), ) .route( "/api/v1/crates/:crate_id/:version/authors", - get(conduit(version::metadata::authors)), + get(version::metadata::authors), ) .route( "/api/v1/crates/:crate_id/downloads", - get(conduit(krate::downloads::downloads)), + get(krate::downloads::downloads), ) .route( "/api/v1/crates/:crate_id/versions", - get(conduit(krate::metadata::versions)), + get(krate::metadata::versions), ) .route( "/api/v1/crates/:crate_id/follow", - put(conduit(krate::follow::follow)).delete(conduit(krate::follow::unfollow)), + put(krate::follow::follow).delete(krate::follow::unfollow), ) .route( "/api/v1/crates/:crate_id/following", - get(conduit(krate::follow::following)), + get(krate::follow::following), ) .route( "/api/v1/crates/:crate_id/owner_team", - get(conduit(krate::owners::owner_team)), + get(krate::owners::owner_team), ) .route( "/api/v1/crates/:crate_id/owner_user", - get(conduit(krate::owners::owner_user)), + get(krate::owners::owner_user), ) .route( "/api/v1/crates/:crate_id/reverse_dependencies", - get(conduit(krate::metadata::reverse_dependencies)), + get(krate::metadata::reverse_dependencies), ) .route("/api/v1/keywords", get(keyword::index)) .route("/api/v1/keywords/:keyword_id", get(keyword::show)) - .route("/api/v1/categories", get(conduit(category::index))) - .route( - "/api/v1/categories/:category_id", - get(conduit(category::show)), - ) - .route("/api/v1/category_slugs", get(conduit(category::slugs))) + .route("/api/v1/categories", get(category::index)) + .route("/api/v1/categories/:category_id", get(category::show)) + .route("/api/v1/category_slugs", get(category::slugs)) .route( "/api/v1/users/:user_id", - get(conduit(user::other::show)).put(conduit(user::me::update_user)), - ) - .route( - "/api/v1/users/:user_id/stats", - get(conduit(user::other::stats)), - ) - .route("/api/v1/teams/:team_id", get(conduit(team::show_team))) - .route("/api/v1/me", get(conduit(user::me::me))) - .route("/api/v1/me/updates", get(conduit(user::me::updates))) - .route( - "/api/v1/me/tokens", - get(conduit(token::list)).put(conduit(token::new)), - ) - .route("/api/v1/me/tokens/:id", delete(conduit(token::revoke))) - .route( - "/api/v1/tokens/current", - delete(conduit(token::revoke_current)), + get(user::other::show).put(user::me::update_user), ) + .route("/api/v1/users/:user_id/stats", get(user::other::stats)) + .route("/api/v1/teams/:team_id", get(team::show_team)) + .route("/api/v1/me", get(user::me::me)) + .route("/api/v1/me/updates", get(user::me::updates)) + .route("/api/v1/me/tokens", get(token::list).put(token::new)) + .route("/api/v1/me/tokens/:id", delete(token::revoke)) + .route("/api/v1/tokens/current", delete(token::revoke_current)) .route( "/api/v1/me/crate_owner_invitations", - get(conduit(crate_owner_invitation::list)), + get(crate_owner_invitation::list), ) .route( "/api/v1/me/crate_owner_invitations/:crate_id", - put(conduit(crate_owner_invitation::handle_invite)), + put(crate_owner_invitation::handle_invite), ) .route( "/api/v1/me/crate_owner_invitations/accept/:token", - put(conduit(crate_owner_invitation::handle_invite_with_token)), + put(crate_owner_invitation::handle_invite_with_token), ) .route( "/api/v1/me/email_notifications", - put(conduit(user::me::update_email_notifications)), + put(user::me::update_email_notifications), ) - .route("/api/v1/summary", get(conduit(krate::metadata::summary))) + .route("/api/v1/summary", get(krate::metadata::summary)) .route( "/api/v1/confirm/:email_token", - put(conduit(user::me::confirm_user_email)), + put(user::me::confirm_user_email), ) .route( "/api/v1/users/:user_id/resend", - put(conduit(user::me::regenerate_token_and_send)), + put(user::me::regenerate_token_and_send), ) .route( "/api/v1/site_metadata", get(site_metadata::show_deployed_sha), ) // Session management - .route( - "/api/private/session/begin", - get(conduit(user::session::begin)), - ) + .route("/api/private/session/begin", get(user::session::begin)) .route( "/api/private/session/authorize", - get(conduit(user::session::authorize)), - ) - .route( - "/api/private/session", - delete(conduit(user::session::logout)), + get(user::session::authorize), ) + .route("/api/private/session", delete(user::session::logout)) // Metrics - .route( - "/api/private/metrics/:kind", - get(conduit(metrics::prometheus)), - ) + .route("/api/private/metrics/:kind", get(metrics::prometheus)) // Crate ownership invitations management in the frontend .route( "/api/private/crate_owner_invitations", - get(conduit(crate_owner_invitation::private_list)), + get(crate_owner_invitation::private_list), ) // Alerts from GitHub scanning for exposed API tokens .route( "/api/github/secret-scanning/verify", - post(conduit(github::secret_scanning::verify)), + post(github::secret_scanning::verify), ); // Only serve the local checkout of the git index in development mode. @@ -192,12 +168,6 @@ pub fn build_axum_router(state: AppState) -> Router { .with_state(state) } -fn conduit( - handler: fn(ConduitRequest) -> R, -) -> ConduitAxumHandler> { - ConduitAxumHandler::wrap(C(handler)) -} - struct C(pub fn(ConduitRequest) -> R); impl Handler for C {