sqlx/sqlx-cli/src/opt.rs
2025-01-06 16:14:17 -08:00

479 lines
15 KiB
Rust

use std::env;
use std::ops::{Deref, Not};
use std::path::Path;
use anyhow::Context;
use chrono::Utc;
use clap::{Args, Parser};
#[cfg(feature = "completions")]
use clap_complete::Shell;
use crate::config::Config;
use sqlx::migrate::Migrator;
use crate::config::migrate::{DefaultMigrationType, DefaultVersioning};
#[derive(Parser, Debug)]
#[clap(version, about, author)]
pub struct Opt {
#[clap(subcommand)]
pub command: Command,
}
#[derive(Parser, Debug)]
pub enum Command {
#[clap(alias = "db")]
Database(DatabaseOpt),
/// Generate query metadata to support offline compile-time verification.
///
/// Saves metadata for all invocations of `query!` and related macros to a `.sqlx` directory
/// in the current directory (or workspace root with `--workspace`), overwriting if needed.
///
/// During project compilation, the absence of the `DATABASE_URL` environment variable or
/// the presence of `SQLX_OFFLINE` (with a value of `true` or `1`) will constrain the
/// compile-time verification to only read from the cached query metadata.
#[clap(alias = "prep")]
Prepare {
/// Run in 'check' mode. Exits with 0 if the query metadata is up-to-date. Exits with
/// 1 if the query metadata needs updating.
#[clap(long)]
check: bool,
/// Prepare query macros in dependencies that exist outside the current crate or workspace.
#[clap(long)]
all: bool,
/// Generate a single workspace-level `.sqlx` folder.
///
/// This option is intended for workspaces where multiple crates use SQLx. If there is only
/// one, it is better to run `cargo sqlx prepare` without this option inside that crate.
#[clap(long)]
workspace: bool,
/// Arguments to be passed to `cargo rustc ...`.
#[clap(last = true)]
args: Vec<String>,
#[clap(flatten)]
connect_opts: ConnectOpts,
},
#[clap(alias = "mig")]
Migrate(MigrateOpt),
#[cfg(feature = "completions")]
/// Generate shell completions for the specified shell
Completions { shell: Shell },
}
/// Group of commands for creating and dropping your database.
#[derive(Parser, Debug)]
pub struct DatabaseOpt {
#[clap(subcommand)]
pub command: DatabaseCommand,
}
#[derive(Parser, Debug)]
pub enum DatabaseCommand {
/// Creates the database specified in your DATABASE_URL.
Create {
#[clap(flatten)]
connect_opts: ConnectOpts,
},
/// Drops the database specified in your DATABASE_URL.
Drop {
#[clap(flatten)]
confirmation: Confirmation,
#[clap(flatten)]
connect_opts: ConnectOpts,
/// PostgreSQL only: force drops the database.
#[clap(long, short, default_value = "false")]
force: bool,
},
/// Drops the database specified in your DATABASE_URL, re-creates it, and runs any pending migrations.
Reset {
#[clap(flatten)]
confirmation: Confirmation,
#[clap(flatten)]
source: Source,
#[clap(flatten)]
connect_opts: ConnectOpts,
/// PostgreSQL only: force drops the database.
#[clap(long, short, default_value = "false")]
force: bool,
},
/// Creates the database specified in your DATABASE_URL and runs any pending migrations.
Setup {
#[clap(flatten)]
source: Source,
#[clap(flatten)]
connect_opts: ConnectOpts,
},
}
/// Group of commands for creating and running migrations.
#[derive(Parser, Debug)]
pub struct MigrateOpt {
#[clap(subcommand)]
pub command: MigrateCommand,
}
#[derive(Parser, Debug)]
pub enum MigrateCommand {
/// Create a new migration with the given description.
///
/// --------------------------------
///
/// Migrations may either be simple, or reversible.
///
/// Reversible migrations can be reverted with `sqlx migrate revert`, simple migrations cannot.
///
/// Reversible migrations are created as a pair of two files with the same filename but
/// extensions `.up.sql` and `.down.sql` for the up-migration and down-migration, respectively.
///
/// The up-migration should contain the commands to be used when applying the migration,
/// while the down-migration should contain the commands to reverse the changes made by the
/// up-migration.
///
/// When writing down-migrations, care should be taken to ensure that they
/// do not leave the database in an inconsistent state.
///
/// Simple migrations have just `.sql` for their extension and represent an up-migration only.
///
/// Note that reverting a migration is **destructive** and will likely result in data loss.
/// Reverting a migration will not restore any data discarded by commands in the up-migration.
///
/// It is recommended to always back up the database before running migrations.
///
/// --------------------------------
///
/// For convenience, this command attempts to detect if reversible migrations are in-use.
///
/// If the latest existing migration is reversible, the new migration will also be reversible.
///
/// Otherwise, a simple migration is created.
///
/// This behavior can be overridden by `--simple` or `--reversible`, respectively.
///
/// The default type to use can also be set in `sqlx.toml`.
///
/// --------------------------------
///
/// A version number will be automatically assigned to the migration.
///
/// Migrations are applied in ascending order by version number.
/// Version numbers do not need to be strictly consecutive.
///
/// The migration process will abort if SQLx encounters a migration with a version number
/// less than _any_ previously applied migration.
///
/// Migrations should only be created with increasing version number.
///
/// --------------------------------
///
/// For convenience, this command will attempt to detect if sequential versioning is in use,
/// and if so, continue the sequence.
///
/// Sequential versioning is inferred if:
///
/// * The version numbers of the last two migrations differ by exactly 1, or:
///
/// * only one migration exists and its version number is either 0 or 1.
///
/// Otherwise, timestamp versioning (`YYYYMMDDHHMMSS`) is assumed.
///
/// This behavior can be overridden by `--timestamp` or `--sequential`, respectively.
///
/// The default versioning to use can also be set in `sqlx.toml`.
Add(AddMigrationOpts),
/// Run all pending migrations.
Run {
#[clap(flatten)]
source: Source,
/// List all the migrations to be run without applying
#[clap(long)]
dry_run: bool,
#[clap(flatten)]
ignore_missing: IgnoreMissing,
#[clap(flatten)]
connect_opts: ConnectOpts,
/// Apply migrations up to the specified version. If unspecified, apply all
/// pending migrations. If already at the target version, then no-op.
#[clap(long)]
target_version: Option<i64>,
},
/// Revert the latest migration with a down file.
Revert {
#[clap(flatten)]
source: Source,
/// List the migration to be reverted without applying
#[clap(long)]
dry_run: bool,
#[clap(flatten)]
ignore_missing: IgnoreMissing,
#[clap(flatten)]
connect_opts: ConnectOpts,
/// Revert migrations down to the specified version. If unspecified, revert
/// only the last migration. Set to 0 to revert all migrations. If already
/// at the target version, then no-op.
#[clap(long)]
target_version: Option<i64>,
},
/// List all available migrations.
Info {
#[clap(flatten)]
source: Source,
#[clap(flatten)]
connect_opts: ConnectOpts,
},
/// Generate a `build.rs` to trigger recompilation when a new migration is added.
///
/// Must be run in a Cargo project root.
BuildScript {
#[clap(flatten)]
source: Source,
/// Overwrite the build script if it already exists.
#[clap(long)]
force: bool,
},
}
#[derive(Args, Debug)]
pub struct AddMigrationOpts {
pub description: String,
#[clap(flatten)]
pub source: Source,
/// If set, create an up-migration only. Conflicts with `--reversible`.
#[clap(long, conflicts_with = "reversible")]
simple: bool,
/// If set, create a pair of up and down migration files with same version.
///
/// Conflicts with `--simple`.
#[clap(short, long, conflicts_with = "simple")]
reversible: bool,
/// If set, use timestamp versioning for the new migration. Conflicts with `--sequential`.
///
/// Timestamp format: `YYYYMMDDHHMMSS`
#[clap(short, long, conflicts_with = "sequential")]
timestamp: bool,
/// If set, use sequential versioning for the new migration. Conflicts with `--timestamp`.
#[clap(short, long, conflicts_with = "timestamp")]
sequential: bool,
}
/// Argument for the migration scripts source.
#[derive(Args, Debug)]
pub struct Source {
/// Path to folder containing migrations.
#[clap(long, default_value = "migrations")]
source: String,
}
impl Deref for Source {
type Target = String;
fn deref(&self) -> &Self::Target {
&self.source
}
}
impl AsRef<Path> for Source {
fn as_ref(&self) -> &Path {
Path::new(&self.source)
}
}
/// Argument for the database URL.
#[derive(Args, Debug)]
pub struct ConnectOpts {
/// Location of the DB, by default will be read from the DATABASE_URL env var or `.env` files.
#[clap(long, short = 'D')]
pub database_url: Option<String>,
/// The maximum time, in seconds, to try connecting to the database server before
/// returning an error.
#[clap(long, default_value = "10")]
pub connect_timeout: u64,
/// Set whether or not to create SQLite databases in Write-Ahead Log (WAL) mode:
/// https://www.sqlite.org/wal.html
///
/// WAL mode is enabled by default for SQLite databases created by `sqlx-cli`.
///
/// However, if your application sets a `journal_mode` on `SqliteConnectOptions` to something
/// other than `Wal`, then it will have to take the database file out of WAL mode on connecting,
/// which requires an exclusive lock and may return a `database is locked` (`SQLITE_BUSY`) error.
#[cfg(feature = "_sqlite")]
#[clap(long, action = clap::ArgAction::Set, default_value = "true")]
pub sqlite_create_db_wal: bool,
}
impl ConnectOpts {
/// Require a database URL to be provided, otherwise
/// return an error.
pub fn expect_db_url(&self) -> anyhow::Result<&str> {
self.database_url.as_deref().context("BUG: database_url not populated")
}
/// Populate `database_url` from the environment, if not set.
pub fn populate_db_url(&mut self, config: &Config) -> anyhow::Result<()> {
if self.database_url.is_some() {
return Ok(());
}
let var = config.common.database_url_var();
let context = if var != "DATABASE_URL" {
" (`common.database-url-var` in `sqlx.toml`)"
} else {
""
};
match env::var(var) {
Ok(url) => {
if !context.is_empty() {
eprintln!("Read database url from `{var}`{context}");
}
self.database_url = Some(url)
},
Err(env::VarError::NotPresent) => {
anyhow::bail!("`--database-url` or `{var}`{context} must be set")
}
Err(env::VarError::NotUnicode(_)) => {
anyhow::bail!("`{var}`{context} is not valid UTF-8");
}
}
Ok(())
}
}
/// Argument for automatic confirmation.
#[derive(Args, Copy, Clone, Debug)]
pub struct Confirmation {
/// Automatic confirmation. Without this option, you will be prompted before dropping
/// your database.
#[clap(short)]
pub yes: bool,
}
/// Argument for ignoring applied migrations that were not resolved.
#[derive(Args, Copy, Clone, Debug)]
pub struct IgnoreMissing {
/// Ignore applied migrations that are missing in the resolved migrations
#[clap(long)]
ignore_missing: bool,
}
impl Deref for IgnoreMissing {
type Target = bool;
fn deref(&self) -> &Self::Target {
&self.ignore_missing
}
}
impl Not for IgnoreMissing {
type Output = bool;
fn not(self) -> Self::Output {
!self.ignore_missing
}
}
impl AddMigrationOpts {
pub fn reversible(&self, config: &Config, migrator: &Migrator) -> bool {
if self.reversible { return true; }
if self.simple { return false; }
match config.migrate.defaults.migration_type {
DefaultMigrationType::Inferred => {
migrator
.iter()
.last()
.is_some_and(|m| m.migration_type.is_reversible())
}
DefaultMigrationType::Simple => {
false
}
DefaultMigrationType::Reversible => {
true
}
}
}
pub fn version_prefix(&self, config: &Config, migrator: &Migrator) -> String {
let default_versioning = &config.migrate.defaults.migration_versioning;
if self.timestamp || matches!(default_versioning, DefaultVersioning::Timestamp) {
return next_timestamp();
}
if self.sequential || matches!(default_versioning, DefaultVersioning::Sequential) {
return next_sequential(migrator)
.unwrap_or_else(|| fmt_sequential(1));
}
next_sequential(migrator).unwrap_or_else(next_timestamp)
}
}
fn next_timestamp() -> String {
Utc::now().format("%Y%m%d%H%M%S").to_string()
}
fn next_sequential(migrator: &Migrator) -> Option<String> {
let next_version = migrator
.migrations
.windows(2)
.last()
.and_then(|migrations| {
match migrations {
[previous, latest] => {
// If the latest two versions differ by 1, infer sequential.
(latest.version - previous.version == 1)
.then_some(latest.version + 1)
},
[latest] => {
// If only one migration exists and its version is 0 or 1, infer sequential
matches!(latest.version, 0 | 1)
.then_some(latest.version + 1)
}
_ => unreachable!(),
}
});
next_version.map(fmt_sequential)
}
fn fmt_sequential(version: i64) -> String {
format!("{version:04}")
}