#![feature(concat_idents)] use std::error::Error; use std::fmt::Display; use std::process::exit; use clap::*; use dotenv::dotenv; use migration::schema_list::PostgreSQLSchema; use migration::sea_orm::{ConnectOptions, Database, DbConn}; use sea_orm_migration::prelude::*; use tracing_subscriber::layer::SubscriberExt; use tracing_subscriber::util::SubscriberInitExt; use tracing_subscriber::EnvFilter; // const MIGRATION_DIR: &str = "./"; macro_rules! migrate_schema { ($cli: expr, $schema_name: ident, $migrator_name: ident) => {{ run_cli( &mut $cli, PostgreSQLSchema::$schema_name, migration::$migrator_name, ) .await; }}; } #[async_std::main] async fn main() { dotenv().ok(); let mut cli: Cli = Cli::parse(); init_logger(cli.verbose); migrate_schema!(cli, Public, PublicMigrator); migrate_schema!(cli, Jobs, JobsMigrator); migrate_schema!(cli, Carts, CartsMigrator); migrate_schema!(cli, Discounts, DiscountsMigrator); } pub async fn run_cli(cli: &mut Cli, schema: PostgreSQLSchema, migrator: M) where M: MigratorTrait, { let url = cli .database_url .as_ref() .expect("Environment variable 'DATABASE_URL' not set"); let schema = schema.as_str().to_string(); let connect_options = ConnectOptions::new(url.clone()) .set_schema_search_path(schema.clone()) .to_owned(); let db = Database::connect(connect_options) .await .expect("Fail to acquire database connection"); db.execute_unprepared(&format!("CREATE SCHEMA {}", schema)) .await .ok(); db.execute_unprepared(&format!("SET search_path = '{}'", schema)) .await .unwrap(); let res = run_migrate(migrator, &db, cli.command.clone()).await; if cfg!(debug_assertions) { res.unwrap(); } else { res.unwrap_or_else(handle_error); } } pub async fn run_migrate( _: M, db: &DbConn, command: Option, ) -> Result<(), Box> where M: MigratorTrait, { match command { Some(MigrateSubcommands::Fresh) => M::fresh(db).await?, Some(MigrateSubcommands::Refresh) => M::refresh(db).await?, Some(MigrateSubcommands::Reset) => M::reset(db).await?, Some(MigrateSubcommands::Status) => M::status(db).await?, Some(MigrateSubcommands::Up { num }) => M::up(db, num).await?, Some(MigrateSubcommands::Down { num }) => M::down(db, Some(num)).await?, // Some(MigrateSubcommands::Init) => run_migrate_init(MIGRATION_DIR)?, // Some(MigrateSubcommands::Generate { // migration_name, // universal_time: _, // local_time, // }) => run_migrate_generate(MIGRATION_DIR, &migration_name, !local_time)?, _ => M::up(db, None).await?, }; Ok(()) } fn init_logger(verbose: bool) { let filter = match verbose { true => "debug", false => "sea_orm_migration=trace", }; let filter_layer = EnvFilter::try_new(filter).unwrap(); if verbose { let fmt_layer = tracing_subscriber::fmt::layer(); tracing_subscriber::registry() .with(filter_layer) .with(fmt_layer) .init() } else { let fmt_layer = tracing_subscriber::fmt::layer() .with_target(true) .with_level(true) .without_time(); tracing_subscriber::registry() .with(filter_layer) .with(fmt_layer) .init() }; } fn handle_error(error: E) where E: Display, { eprintln!("{error}"); exit(1); } #[derive(Parser)] #[clap(version)] pub struct Cli { #[clap(action, short = 'v', long, global = true, help = "Show debug messages")] verbose: bool, #[clap( value_parser, global = true, short = 'u', long, env = "DATABASE_URL", help = "Database URL" )] database_url: Option, #[clap(subcommand)] command: Option, } #[derive(Subcommand, PartialEq, Eq, Debug)] pub enum Commands { #[clap( about = "Codegen related commands", arg_required_else_help = true, display_order = 10 )] Generate { #[clap(subcommand)] command: GenerateSubcommands, }, #[clap(about = "Migration related commands", display_order = 20)] Migrate { #[clap( value_parser, global = true, short = 'd', long, help = "Migration script directory. If your migrations are in their own crate, you can provide the root of that crate. If your migrations are in a submodule of your app, you should provide the directory of that submodule.", default_value = "./migration" )] migration_dir: String, #[clap( value_parser, global = true, short = 'u', long, env = "DATABASE_URL", help = "Database URL" )] database_url: Option, #[clap(subcommand)] command: Option, }, } #[derive(Subcommand, PartialEq, Eq, Debug, Clone)] pub enum MigrateSubcommands { #[clap(about = "Initialize migration directory", display_order = 10)] Init, #[clap(about = "Generate a new, empty migration", display_order = 20)] Generate { #[clap( value_parser, required = true, takes_value = true, help = "Name of the new migration" )] migration_name: String, #[clap( action, long, default_value = "true", help = "Generate migration file based on Utc time", conflicts_with = "local-time", display_order = 1001 )] universal_time: bool, #[clap( action, long, help = "Generate migration file based on Local time", conflicts_with = "universal-time", display_order = 1002 )] local_time: bool, }, #[clap( about = "Drop all tables from the database, then reapply all migrations", display_order = 30 )] Fresh, #[clap( about = "Rollback all applied migrations, then reapply all migrations", display_order = 40 )] Refresh, #[clap(about = "Rollback all applied migrations", display_order = 50)] Reset, #[clap(about = "Check the status of all migrations", display_order = 60)] Status, #[clap(about = "Apply pending migrations", display_order = 70)] Up { #[clap( value_parser, short, long, help = "Number of pending migrations to apply" )] num: Option, }, #[clap( value_parser, about = "Rollback applied migrations", display_order = 80 )] Down { #[clap( value_parser, short, long, default_value = "1", help = "Number of applied migrations to be rolled back", display_order = 90 )] num: u32, }, } #[derive(Subcommand, PartialEq, Eq, Debug)] pub enum GenerateSubcommands { #[clap(about = "Generate entity")] #[clap(arg_required_else_help = true)] #[clap(group(ArgGroup::new("formats").args(&["compact-format", "expanded-format"])))] #[clap(group(ArgGroup::new("group-tables").args(&["tables", "include-hidden-tables"])))] Entity { #[clap(action, long, help = "Generate entity file of compact format")] compact_format: bool, #[clap(action, long, help = "Generate entity file of expanded format")] expanded_format: bool, #[clap( action, long, help = "Generate entity file for hidden tables (i.e. table name starts with an underscore)" )] include_hidden_tables: bool, #[clap( value_parser, short = 't', long, use_value_delimiter = true, takes_value = true, help = "Generate entity file for specified tables only (comma separated)" )] tables: Vec, #[clap( value_parser, long, use_value_delimiter = true, takes_value = true, default_value = "seaql_migrations", help = "Skip generating entity file for specified tables (comma separated)" )] ignore_tables: Vec, #[clap( value_parser, long, default_value = "1", help = "The maximum amount of connections to use when connecting to the database." )] max_connections: u32, #[clap( value_parser, short = 'o', long, default_value = "./", help = "Entity file output directory" )] output_dir: String, #[clap( value_parser, short = 's', long, env = "DATABASE_SCHEMA", default_value = "public", long_help = "Database schema\n \ - For MySQL, this argument is ignored.\n \ - For PostgreSQL, this argument is optional with default value 'public'." )] database_schema: String, #[clap( value_parser, short = 'u', long, env = "DATABASE_URL", help = "Database URL" )] database_url: String, #[clap( value_parser, long, default_value = "none", help = "Automatically derive serde Serialize / Deserialize traits for the entity (none, \ serialize, deserialize, both)" )] with_serde: String, #[clap( action, long, help = "Generate a serde field attribute, '#[serde(skip_deserializing)]', for the primary key fields to skip them during deserialization, this flag will be affective only when '--with-serde' is 'both' or 'deserialize'" )] serde_skip_deserializing_primary_key: bool, #[clap( action, long, default_value = "false", help = "Opt-in to add skip attributes to hidden columns (i.e. when 'with-serde' enabled and column name starts with an underscore)" )] serde_skip_hidden_column: bool, #[clap( action, long, default_value = "false", long_help = "Automatically derive the Copy trait on generated enums.\n\ Enums generated from a database don't have associated data by default, and as such can \ derive Copy. " )] with_copy_enums: bool, #[clap( arg_enum, value_parser, long, default_value = "chrono", help = "The datetime crate to use for generating entities." )] date_time_crate: DateTimeCrate, #[clap( action, long, short = 'l', default_value = "false", help = "Generate index file as `lib.rs` instead of `mod.rs`." )] lib: bool, #[clap( value_parser, long, use_value_delimiter = true, takes_value = true, help = "Add extra derive macros to generated model struct (comma separated), e.g. `--model-extra-derives 'ts_rs::Ts','CustomDerive'`" )] model_extra_derives: Vec, #[clap( value_parser, long, use_value_delimiter = true, takes_value = true, help = r#"Add extra attributes to generated model struct, no need for `#[]` (comma separated), e.g. `--model-extra-attributes 'serde(rename_all = "camelCase")','ts(export)'`"# )] model_extra_attributes: Vec, }, } #[derive(ArgEnum, Copy, Clone, Debug, PartialEq, Eq)] pub enum DateTimeCrate { Chrono, Time, }