diff --git a/Cargo.lock b/Cargo.lock index 71c270dfc4d..f7e968bb6a7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -271,7 +271,6 @@ dependencies = [ "futures-channel", "futures-util", "git2", - "handlebars", "hex", "http", "hyper", @@ -281,6 +280,7 @@ dependencies = [ "lazy_static", "lettre", "license-exprs", + "minijinja", "oauth2", "parking_lot", "prometheus", @@ -1037,20 +1037,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "handlebars" -version = "4.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66b09e2322d20d14bc2572401ce7c1d60b4748580a76c230ed9c1f8938f0c833" -dependencies = [ - "log", - "pest", - "pest_derive", - "quick-error", - "serde", - "serde_json", -] - [[package]] name = "hashbrown" version = "0.11.2" @@ -1495,6 +1481,15 @@ version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" +[[package]] +name = "minijinja" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2276f0485003f1f9835e9de1e9b42cbaf7282f97b7c12be18017259ade75f640" +dependencies = [ + "serde", +] + [[package]] name = "miniz_oxide" version = "0.4.4" @@ -1908,12 +1903,6 @@ dependencies = [ "thiserror", ] -[[package]] -name = "quick-error" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" - [[package]] name = "quote" version = "1.0.9" diff --git a/Cargo.toml b/Cargo.toml index 88e00018e56..699e2fb3c60 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -58,7 +58,6 @@ flate2 = "1.0" futures-channel = { version = "0.3.1", default-features = false } futures-util = "0.3" git2 = "0.13.0" -handlebars = "4.1.3" hex = "0.4" http = "0.2" hyper = { version = "0.14", features = ["client", "http1"] } @@ -66,6 +65,7 @@ indexmap = { version = "1.0.2", features = ["serde-1"] } jemallocator = { version = "0.3", features = ['unprefixed_malloc_on_supported_platforms', 'profiling'] } lettre = { version = "0.10.0-beta.3", default-features = false, features = ["file-transport", "smtp-transport", "native-tls", "hostname", "builder"] } license-exprs = "1.6" +minijinja = "0.6.0" oauth2 = { version = "4.0.0", default-features = false, features = ["reqwest"] } parking_lot = "0.11" prometheus = { version = "0.13.0", default-features = false } diff --git a/src/tasks/dump_db/dump-export.sql.hbs b/src/tasks/dump_db/dump-export.sql.hbs deleted file mode 100644 index 2b7c4765fa2..00000000000 --- a/src/tasks/dump_db/dump-export.sql.hbs +++ /dev/null @@ -1,9 +0,0 @@ -BEGIN ISOLATION LEVEL REPEATABLE READ, READ ONLY; -{{#each tables}} -{{#if this.filter}} - \copy (SELECT {{this.columns}} FROM "{{this.name}}" WHERE {{this.filter}}) TO 'data/{{this.name}}.csv' WITH CSV HEADER -{{else}} - \copy "{{this.name}}" ({{this.columns}}) TO 'data/{{this.name}}.csv' WITH CSV HEADER -{{/if}} -{{/each}} -COMMIT; diff --git a/src/tasks/dump_db/dump-export.sql.j2 b/src/tasks/dump_db/dump-export.sql.j2 new file mode 100644 index 00000000000..5a620080051 --- /dev/null +++ b/src/tasks/dump_db/dump-export.sql.j2 @@ -0,0 +1,9 @@ +BEGIN ISOLATION LEVEL REPEATABLE READ, READ ONLY; +{% for table in tables %} +{% if table.filter %} + \copy (SELECT {{table.columns}} FROM "{{table.name}}" WHERE {{table.filter}}) TO 'data/{{table.name}}.csv' WITH CSV HEADER +{% else %} + \copy "{{table.name}}" ({{table.columns}}) TO 'data/{{table.name}}.csv' WITH CSV HEADER +{% endif %} +{% endfor %} +COMMIT; diff --git a/src/tasks/dump_db/dump-import.sql.hbs b/src/tasks/dump_db/dump-import.sql.hbs deleted file mode 100644 index 4f9147772aa..00000000000 --- a/src/tasks/dump_db/dump-import.sql.hbs +++ /dev/null @@ -1,38 +0,0 @@ -BEGIN; - -- Disable triggers on each table. -{{#each tables}} - ALTER TABLE "{{this.name}}" DISABLE TRIGGER ALL; -{{/each}} - - -- Set defaults for non-nullable columns not included in the dump. -{{#each tables as |table|}} -{{#each column_defaults}} - ALTER TABLE "{{table.name}}" ALTER COLUMN "{{@key}}" SET DEFAULT {{this}}; -{{/each}} -{{/each}} - - -- Truncate all tables. -{{#each tables}} - TRUNCATE "{{this.name}}" RESTART IDENTITY CASCADE; -{{/each}} - - -- Enable this trigger so that `crates.textsearchable_index_col` can be excluded from the export - ALTER TABLE "crates" ENABLE TRIGGER "trigger_crates_tsvector_update"; - - -- Import the CSV data. -{{#each tables}} - \copy "{{this.name}}" ({{this.columns}}) FROM 'data/{{this.name}}.csv' WITH CSV HEADER -{{/each}} - - -- Drop the defaults again. -{{#each tables as |table|}} -{{#each column_defaults}} - ALTER TABLE "{{table.name}}" ALTER COLUMN "{{@key}}" DROP DEFAULT; -{{/each}} -{{/each}} - - -- Reenable triggers on each table. -{{#each tables}} - ALTER TABLE "{{this.name}}" ENABLE TRIGGER ALL; -{{/each}} -COMMIT; diff --git a/src/tasks/dump_db/dump-import.sql.j2 b/src/tasks/dump_db/dump-import.sql.j2 new file mode 100644 index 00000000000..5c7c49b68c8 --- /dev/null +++ b/src/tasks/dump_db/dump-import.sql.j2 @@ -0,0 +1,38 @@ +BEGIN; + -- Disable triggers on each table. +{% for table in tables %} + ALTER TABLE "{{table.name}}" DISABLE TRIGGER ALL; +{% endfor %} + + -- Set defaults for non-nullable columns not included in the dump. +{% for table in tables %} +{% for cd in table.column_defaults %} + ALTER TABLE "{{table.name}}" ALTER COLUMN "{{cd.column}}" SET DEFAULT {{cd.value}}; +{% endfor %} +{% endfor %} + + -- Truncate all tables. +{% for table in tables %} + TRUNCATE "{{table.name}}" RESTART IDENTITY CASCADE; +{% endfor %} + + -- Enable this trigger so that `crates.textsearchable_index_col` can be excluded from the export + ALTER TABLE "crates" ENABLE TRIGGER "trigger_crates_tsvector_update"; + + -- Import the CSV data. +{% for table in tables %} + \copy "{{table.name}}" ({{table.columns}}) FROM 'data/{{table.name}}.csv' WITH CSV HEADER +{% endfor %} + + -- Drop the defaults again. +{% for table in tables %} +{% for cd in table.column_defaults %} + ALTER TABLE "{{table.name}}" ALTER COLUMN "{{cd.column}}" DROP DEFAULT; +{% endfor %} +{% endfor %} + + -- Reenable triggers on each table. +{% for table in tables %} + ALTER TABLE "{{table.name}}" ENABLE TRIGGER ALL; +{% endfor %} +COMMIT; diff --git a/src/tasks/dump_db/gen_scripts.rs b/src/tasks/dump_db/gen_scripts.rs index 5b17114ba65..af0ddbf3a93 100644 --- a/src/tasks/dump_db/gen_scripts.rs +++ b/src/tasks/dump_db/gen_scripts.rs @@ -1,4 +1,4 @@ -use std::{collections::BTreeMap, fs::File, path::Path}; +use std::{fs::File, path::Path}; use crate::tasks::dump_db::configuration::{ColumnVisibility, TableConfig, VisibilityConfig}; use swirl::PerformError; @@ -16,11 +16,17 @@ struct HandlebarsTableContext<'a> { name: &'a str, filter: Option, columns: String, - column_defaults: BTreeMap<&'a str, &'a str>, + column_defaults: Vec>, +} + +#[derive(Debug, Serialize)] +struct ColumnDefault<'a> { + column: &'a str, + value: &'a str, } impl TableConfig { - fn handlebars_context<'a>(&'a self, name: &'a str) -> Option> { + fn template_context<'a>(&'a self, name: &'a str) -> Option> { let columns = self .columns .iter() @@ -35,7 +41,10 @@ impl TableConfig { let column_defaults = self .column_defaults .iter() - .map(|(k, v)| (k.as_str(), v.as_str())) + .map(|(k, v)| ColumnDefault { + column: k.as_str(), + value: v.as_str(), + }) .collect(); Some(HandlebarsTableContext { name, @@ -49,41 +58,53 @@ impl TableConfig { /// Subset of the configuration data to be passed on to the Handlbars template. #[derive(Debug, Serialize)] -struct HandlebarsContext<'a> { +struct TemplateContext<'a> { tables: Vec>, } impl VisibilityConfig { - fn handlebars_context(&self) -> HandlebarsContext<'_> { + fn template_context(&self) -> TemplateContext<'_> { let tables = self .topological_sort() .into_iter() - .filter_map(|table| self.0[table].handlebars_context(table)) + .filter_map(|table| self.0[table].template_context(table)) .collect(); - HandlebarsContext { tables } + TemplateContext { tables } } - fn gen_psql_scripts(&self, export_sql: W, import_sql: W) -> Result<(), PerformError> + fn gen_psql_scripts( + &self, + mut export_writer: W, + mut import_writer: W, + ) -> Result<(), PerformError> where W: std::io::Write, { - let context = self.handlebars_context(); - let mut handlebars = handlebars::Handlebars::new(); - handlebars.register_escape_fn(handlebars::no_escape); + use minijinja::Environment; + + let mut env = Environment::new(); + env.add_template("dump-export.sql", include_str!("dump-export.sql.j2"))?; + env.add_template("dump-import.sql", include_str!("dump-import.sql.j2"))?; + + let context = self.template_context(); + + debug!("Rendering dump-export.sql file…"); + let export_sql = env + .get_template("dump-export.sql") + .unwrap() + .render(&context)?; + + debug!("Rendering dump-import.sql file…"); + let import_sql = env + .get_template("dump-import.sql") + .unwrap() + .render(&context)?; debug!("Writing dump-export.sql file…"); - handlebars.render_template_to_write( - include_str!("dump-export.sql.hbs"), - &context, - export_sql, - )?; + export_writer.write_all(export_sql.as_bytes())?; debug!("Writing dump-import.sql file…"); - handlebars.render_template_to_write( - include_str!("dump-import.sql.hbs"), - &context, - import_sql, - )?; + import_writer.write_all(import_sql.as_bytes())?; Ok(()) }