Skip to content

Commit 621312f

Browse files
authored
Move Job constructors into Job impl (#6360)
This makes it a little easier to understand that these functions are creating `Job` instances that need to be enqueued afterwards. It also means we have all the constructors in one place instead of being spread out across multiple files.
1 parent 76c6d63 commit 621312f

File tree

11 files changed

+72
-85
lines changed

11 files changed

+72
-85
lines changed

src/admin/enqueue_job.rs

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1+
use crate::background_jobs::Job;
2+
use crate::db;
13
use crate::schema::background_jobs::dsl::*;
2-
use crate::{db, worker};
34
use anyhow::Result;
45
use diesel::prelude::*;
56

@@ -41,15 +42,15 @@ pub fn run(command: Command) -> Result<()> {
4142
println!("Did not enqueue update_downloads, existing job already in progress");
4243
Ok(())
4344
} else {
44-
Ok(worker::update_downloads().enqueue(conn)?)
45+
Ok(Job::update_downloads().enqueue(conn)?)
4546
}
4647
}
4748
Command::DumpDb {
4849
database_url,
4950
target_name,
50-
} => Ok(worker::dump_db(database_url, target_name).enqueue(conn)?),
51-
Command::DailyDbMaintenance => Ok(worker::daily_db_maintenance().enqueue(conn)?),
52-
Command::SquashIndex => Ok(worker::squash_index().enqueue(conn)?),
53-
Command::NormalizeIndex { dry_run } => Ok(worker::normalize_index(dry_run).enqueue(conn)?),
51+
} => Ok(Job::dump_db(database_url, target_name).enqueue(conn)?),
52+
Command::DailyDbMaintenance => Ok(Job::daily_db_maintenance().enqueue(conn)?),
53+
Command::SquashIndex => Ok(Job::squash_index().enqueue(conn)?),
54+
Command::NormalizeIndex { dry_run } => Ok(Job::normalize_index(dry_run).enqueue(conn)?),
5455
}
5556
}

src/admin/yank_version.rs

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -68,8 +68,6 @@ fn yank(opts: Opts, conn: &mut PgConnection) {
6868
if dotenv::var("FEATURE_INDEX_SYNC").is_ok() {
6969
Job::enqueue_sync_to_index(&krate.name, conn).unwrap();
7070
} else {
71-
crate::worker::sync_yanked(krate.name, v.num)
72-
.enqueue(conn)
73-
.unwrap();
71+
Job::sync_yanked(krate.name, v.num).enqueue(conn).unwrap();
7472
}
7573
}

src/background_jobs.rs

Lines changed: 55 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -78,18 +78,69 @@ impl Job {
7878
Ok(())
7979
}
8080

81-
pub fn sync_to_git_index<T: ToString>(krate: T) -> Job {
82-
Job::SyncToGitIndex(SyncToIndexJob {
81+
pub fn add_crate(krate: cargo_registry_index::Crate) -> Self {
82+
Self::IndexAddCrate(IndexAddCrateJob { krate })
83+
}
84+
85+
pub fn daily_db_maintenance() -> Self {
86+
Self::DailyDbMaintenance
87+
}
88+
89+
pub fn dump_db(database_url: String, target_name: String) -> Self {
90+
Self::DumpDb(DumpDbJob {
91+
database_url,
92+
target_name,
93+
})
94+
}
95+
96+
pub fn normalize_index(dry_run: bool) -> Self {
97+
Self::NormalizeIndex(NormalizeIndexJob { dry_run })
98+
}
99+
100+
pub fn render_and_upload_readme(
101+
version_id: i32,
102+
text: String,
103+
readme_path: String,
104+
base_url: Option<String>,
105+
pkg_path_in_vcs: Option<String>,
106+
) -> Self {
107+
Self::RenderAndUploadReadme(RenderAndUploadReadmeJob {
108+
version_id,
109+
text,
110+
readme_path,
111+
base_url,
112+
pkg_path_in_vcs,
113+
})
114+
}
115+
116+
pub fn squash_index() -> Self {
117+
Self::IndexSquash
118+
}
119+
120+
pub fn sync_to_git_index<T: ToString>(krate: T) -> Self {
121+
Self::SyncToGitIndex(SyncToIndexJob {
83122
krate: krate.to_string(),
84123
})
85124
}
86125

87-
pub fn sync_to_sparse_index<T: ToString>(krate: T) -> Job {
88-
Job::SyncToSparseIndex(SyncToIndexJob {
126+
pub fn sync_to_sparse_index<T: ToString>(krate: T) -> Self {
127+
Self::SyncToSparseIndex(SyncToIndexJob {
89128
krate: krate.to_string(),
90129
})
91130
}
92131

132+
pub fn sync_yanked(krate: String, version_num: String) -> Self {
133+
Self::IndexUpdateYanked(IndexUpdateYankedJob { krate, version_num })
134+
}
135+
136+
pub fn update_crate_index(crate_name: String) -> Self {
137+
Self::IndexSyncToHttp(IndexSyncToHttpJob { crate_name })
138+
}
139+
140+
pub fn update_downloads() -> Self {
141+
Self::UpdateDownloads
142+
}
143+
93144
fn as_type_str(&self) -> &'static str {
94145
match self {
95146
Job::DailyDbMaintenance => Self::DAILY_DB_MAINTENANCE,

src/controllers/krate/publish.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@ use crate::models::{
1717
insert_version_owner_action, Category, Crate, DependencyKind, Keyword, NewCrate, NewVersion,
1818
Rights, VersionAction,
1919
};
20-
use crate::worker;
2120

2221
use crate::middleware::log_request::RequestLogExt;
2322
use crate::models::token::EndpointScope;
@@ -231,7 +230,7 @@ pub async fn publish(app: AppState, req: BytesRequest) -> AppResult<Json<GoodCra
231230
let pkg_path_in_vcs = cargo_vcs_info.map(|info| info.path_in_vcs);
232231

233232
if let Some(readme) = new_crate.readme {
234-
worker::render_and_upload_readme(
233+
Job::render_and_upload_readme(
235234
version.id,
236235
readme,
237236
new_crate
@@ -276,7 +275,7 @@ pub async fn publish(app: AppState, req: BytesRequest) -> AppResult<Json<GoodCra
276275
if app.config.feature_index_sync {
277276
Job::enqueue_sync_to_index(&krate.name, conn)?;
278277
} else {
279-
worker::add_crate(git_crate).enqueue(conn)?;
278+
Job::add_crate(git_crate).enqueue(conn)?;
280279
}
281280

282281
// The `other` field on `PublishWarnings` was introduced to handle a temporary warning

src/controllers/version/yank.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@ use crate::models::token::EndpointScope;
99
use crate::models::Rights;
1010
use crate::models::{insert_version_owner_action, VersionAction};
1111
use crate::schema::versions;
12-
use crate::worker;
1312

1413
/// Handles the `DELETE /crates/:crate_id/:version/yank` route.
1514
/// This does not delete a crate version, it makes the crate
@@ -88,7 +87,7 @@ fn modify_yank(
8887
if state.config.feature_index_sync {
8988
Job::enqueue_sync_to_index(&krate.name, conn)?;
9089
} else {
91-
worker::sync_yanked(krate.name, version.num).enqueue(conn)?;
90+
Job::sync_yanked(krate.name, version.num).enqueue(conn)?;
9291
}
9392

9493
ok_true()

src/worker/daily_db_maintenance.rs

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
use crate::background_jobs::Job;
21
use crate::swirl::PerformError;
32
/// Run daily database maintenance tasks
43
///
@@ -17,7 +16,3 @@ pub(crate) fn perform_daily_db_maintenance(conn: &mut PgConnection) -> Result<()
1716
info!("Finished running VACUUM on version_downloads table");
1817
Ok(())
1918
}
20-
21-
pub fn daily_db_maintenance() -> Job {
22-
Job::DailyDbMaintenance
23-
}

src/worker/dump_db.rs

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,9 @@ use std::{
55
};
66

77
use self::configuration::VisibilityConfig;
8-
use crate::{background_jobs::DumpDbJob, swirl::PerformError};
8+
use crate::swirl::PerformError;
99
use crate::{
10-
background_jobs::{Environment, Job},
10+
background_jobs::Environment,
1111
uploaders::{UploadBucket, Uploader},
1212
};
1313
use reqwest::header;
@@ -33,13 +33,6 @@ pub fn perform_dump_db(
3333
Ok(())
3434
}
3535

36-
pub fn dump_db(database_url: String, target_name: String) -> Job {
37-
Job::DumpDb(DumpDbJob {
38-
database_url,
39-
target_name,
40-
})
41-
}
42-
4336
/// Manage the export directory.
4437
///
4538
/// Create the directory, populate it with the psql scripts and CSV dumps, and

src/worker/git.rs

Lines changed: 3 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,4 @@
1-
use crate::background_jobs::{
2-
Environment, IndexAddCrateJob, IndexSyncToHttpJob, IndexUpdateYankedJob, Job, NormalizeIndexJob,
3-
};
1+
use crate::background_jobs::{Environment, Job, NormalizeIndexJob};
42
use crate::models;
53
use crate::schema;
64
use crate::swirl::PerformError;
@@ -35,14 +33,10 @@ pub fn perform_index_add_crate(
3533
repo.commit_and_push(&message, &dst)?;
3634

3735
// Queue another background job to update the http-based index as well.
38-
update_crate_index(krate.name.clone()).enqueue(conn)?;
36+
Job::update_crate_index(krate.name.clone()).enqueue(conn)?;
3937
Ok(())
4038
}
4139

42-
pub fn add_crate(krate: Crate) -> Job {
43-
Job::IndexAddCrate(IndexAddCrateJob { krate })
44-
}
45-
4640
#[instrument(skip(env))]
4741
pub fn perform_index_sync_to_http(
4842
env: &Environment,
@@ -71,10 +65,6 @@ pub fn perform_index_sync_to_http(
7165
Ok(())
7266
}
7367

74-
pub fn update_crate_index(crate_name: String) -> Job {
75-
Job::IndexSyncToHttp(IndexSyncToHttpJob { crate_name })
76-
}
77-
7868
/// Regenerates or removes an index file for a single crate
7969
#[instrument(skip_all, fields(krate.name = ?krate))]
8070
pub fn sync_to_git_index(
@@ -222,15 +212,11 @@ pub fn perform_index_update_yanked(
222212
}
223213

224214
// Queue another background job to update the http-based index as well.
225-
update_crate_index(krate.to_string()).enqueue(conn)?;
215+
Job::update_crate_index(krate.to_string()).enqueue(conn)?;
226216

227217
Ok(())
228218
}
229219

230-
pub fn sync_yanked(krate: String, version_num: String) -> Job {
231-
Job::IndexUpdateYanked(IndexUpdateYankedJob { krate, version_num })
232-
}
233-
234220
/// Collapse the index into a single commit, archiving the current history in a snapshot branch.
235221
#[instrument(skip(env))]
236222
pub fn perform_index_squash(env: &Environment) -> Result<(), PerformError> {
@@ -268,10 +254,6 @@ pub fn perform_index_squash(env: &Environment) -> Result<(), PerformError> {
268254
Ok(())
269255
}
270256

271-
pub fn squash_index() -> Job {
272-
Job::IndexSquash
273-
}
274-
275257
pub fn perform_normalize_index(
276258
env: &Environment,
277259
args: NormalizeIndexJob,
@@ -341,7 +323,3 @@ pub fn perform_normalize_index(
341323

342324
Ok(())
343325
}
344-
345-
pub fn normalize_index(dry_run: bool) -> Job {
346-
Job::NormalizeIndex(NormalizeIndexJob { dry_run })
347-
}

src/worker/mod.rs

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -10,12 +10,6 @@ mod git;
1010
mod readmes;
1111
mod update_downloads;
1212

13-
pub use daily_db_maintenance::daily_db_maintenance;
14-
pub use dump_db::dump_db;
15-
pub use git::{add_crate, normalize_index, squash_index, sync_yanked};
16-
pub use readmes::render_and_upload_readme;
17-
pub use update_downloads::update_downloads;
18-
1913
pub(crate) use daily_db_maintenance::perform_daily_db_maintenance;
2014
pub(crate) use dump_db::perform_dump_db;
2115
pub(crate) use git::{

src/worker/readmes.rs

Lines changed: 1 addition & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ use crate::swirl::PerformError;
44
use cargo_registry_markdown::text_to_html;
55
use diesel::PgConnection;
66

7-
use crate::background_jobs::{Environment, Job, RenderAndUploadReadmeJob};
7+
use crate::background_jobs::Environment;
88
use crate::models::Version;
99

1010
pub fn perform_render_and_upload_readme(
@@ -33,19 +33,3 @@ pub fn perform_render_and_upload_readme(
3333
Ok(())
3434
})
3535
}
36-
37-
pub fn render_and_upload_readme(
38-
version_id: i32,
39-
text: String,
40-
readme_path: String,
41-
base_url: Option<String>,
42-
pkg_path_in_vcs: Option<String>,
43-
) -> Job {
44-
Job::RenderAndUploadReadme(RenderAndUploadReadmeJob {
45-
version_id,
46-
text,
47-
readme_path,
48-
base_url,
49-
pkg_path_in_vcs,
50-
})
51-
}

0 commit comments

Comments
 (0)