diff --git a/src/bin/background-worker.rs b/src/bin/background-worker.rs index ada1edfd123..2a503748c19 100644 --- a/src/bin/background-worker.rs +++ b/src/bin/background-worker.rs @@ -41,6 +41,7 @@ fn main() { let repository_config = RepositoryConfig::from_environment(); let repository = Repository::open(&repository_config).expect("Failed to clone index"); + println!("Index cloned"); let environment = Environment::new( repository, diff --git a/src/bin/enqueue-job.rs b/src/bin/enqueue-job.rs index f5df8b7cc3c..568efc72811 100644 --- a/src/bin/enqueue-job.rs +++ b/src/bin/enqueue-job.rs @@ -5,7 +5,11 @@ use diesel::PgConnection; fn main() -> AppResult<()> { let conn = db::connect_now()?; let mut args = std::env::args().skip(1); - match &*args.next().unwrap_or_default() { + + let job = args.next().unwrap_or_default(); + println!("Enqueueing background job: {}", job); + + match &*job { "update_downloads" => tasks::update_downloads().enqueue(&conn), "dump_db" => { let database_url = args.next().unwrap_or_else(|| env("DATABASE_URL")); diff --git a/src/tasks/dump_db.rs b/src/tasks/dump_db.rs index 8d64d1bf184..41d0e2a11d1 100644 --- a/src/tasks/dump_db.rs +++ b/src/tasks/dump_db.rs @@ -16,10 +16,16 @@ pub fn dump_db( target_name: String, ) -> Result<(), PerformError> { let directory = DumpDirectory::create()?; + + println!("Begin exporting database"); directory.populate(&database_url)?; + + println!("Creating tarball"); let tarball = DumpTarball::create(&directory.export_dir)?; - tarball.upload(&target_name, &env.uploader)?; - println!("Database dump uploaded to {}.", &target_name); + + println!("Uploading tarball"); + let size = tarball.upload(&target_name, &env.uploader)?; + println!("Database dump uploaded {} bytes to {}.", size, &target_name); Ok(()) } @@ -145,7 +151,7 @@ impl DumpTarball { Ok(result) } - fn upload(&self, target_name: &str, uploader: &Uploader) -> Result<(), PerformError> { + fn upload(&self, target_name: &str, uploader: &Uploader) -> Result { let client = reqwest::Client::new(); let tarfile = File::open(&self.tarball_path)?; let content_length = tarfile.metadata()?.len(); @@ -160,7 +166,7 @@ impl DumpTarball { header::HeaderMap::new(), ) .map_err(std_error_no_send)?; - Ok(()) + Ok(content_length) } } diff --git a/src/tasks/update_downloads.rs b/src/tasks/update_downloads.rs index 6feb218b3dc..b59de4b032b 100644 --- a/src/tasks/update_downloads.rs +++ b/src/tasks/update_downloads.rs @@ -23,7 +23,10 @@ fn update(conn: &PgConnection) -> QueryResult<()> { .filter(processed.eq(false)) .filter(downloads.ne(counted)) .load(conn)?; + + println!("Updating {} versions", rows.len()); collect(conn, &rows)?; + println!("Finished updating versions"); // Anything older than 24 hours ago will be frozen and will not be queried // against again. @@ -33,17 +36,18 @@ fn update(conn: &PgConnection) -> QueryResult<()> { .filter(downloads.eq(counted)) .filter(processed.eq(false)) .execute(conn)?; + println!("Finished freezing old version_downloads"); no_arg_sql_function!(refresh_recent_crate_downloads, ()); select(refresh_recent_crate_downloads).execute(conn)?; + println!("Finished running refresh_recent_crate_downloads"); + Ok(()) } fn collect(conn: &PgConnection, rows: &[VersionDownload]) -> QueryResult<()> { use diesel::update; - println!("updating {} versions", rows.len()); - for download in rows { let amt = download.downloads - download.counted;