feat: create sqlx.toml format (#3383)

* feat: create `sqlx.toml` format

* feat: add support for ignored_chars config to sqlx_core::migrate

* chore: test ignored_chars with `U+FEFF` (ZWNBSP/BOM)

https://en.wikipedia.org/wiki/Byte_order_mark

* refactor: make `Config` always compiled

simplifies usage while still making parsing optional for less generated code

* refactor: add origin information to `Column`

* feat(macros): implement `type_override` and `column_override` from `sqlx.toml`

* refactor(sqlx.toml): make all keys kebab-case, create `macros.preferred-crates`

* feat: make macros aware of `macros.preferred-crates`

* feat: make `sqlx-cli` aware of `database-url-var`

* feat: teach macros about `migrate.table-name`, `migrations-dir`

* feat: teach macros about `migrate.ignored-chars`

* chore: delete unused source file `sqlx-cli/src/migration.rs`

* feat: teach `sqlx-cli` about `migrate.defaults`

* feat: teach `sqlx-cli` about `migrate.migrations-dir`

* feat: teach `sqlx-cli` about `migrate.table-name`

* feat: introduce `migrate.create-schemas`

* WIP feat: create multi-tenant database example

* fix(postgres): don't fetch `ColumnOrigin` for transparently-prepared statements

* feat: progress on axum-multi-tenant example

* feat(config): better errors for mislabeled fields

* WIP feat: filling out axum-multi-tenant example

* feat: multi-tenant example

No longer Axum-based because filling out the request routes would have distracted from the purpose of the example.

* chore(ci): test multi-tenant example

* fixup after merge

* fix(ci): enable `sqlx-toml` in CLI build for examples

* fix: CI, README for `multi-tenant`

* fix: clippy warnings

* fix: multi-tenant README

* fix: sequential versioning inference for migrations

* fix: migration versioning with explicit overrides

* fix: only warn on ambiguous crates if the invocation relies on it

* fix: remove unused imports

* fix: doctest

* fix: `sqlx mig add` behavior and tests

* fix: restore original type-checking order

* fix: deprecation warning in `tests/postgres/macros.rs`

* feat: create postgres/multi-database example

* fix: examples/postgres/multi-database

* fix: cargo fmt

* chore: add tests for config `migrate.defaults`

* fix: sqlx-cli/tests/add.rs

* feat(cli): add `--config` override to all relevant commands

* chore: run `sqlx mig add` test with `RUST_BACKTRACE=1`

* fix: properly canonicalize config path for `sqlx mig add` test

* fix: get `sqlx mig add` test passing

* fix(cli): test `migrate.ignored-chars`, fix bugs

* feat: create `macros.preferred-crates` example

* fix(examples): use workspace `sqlx`

* fix: examples

* fix(sqlite): unexpected feature flags in `type_checking.rs`

* fix: run `cargo fmt`

* fix: more example fixes

* fix(ci): preferred-crates setup

* fix(examples): enable default-features for workspace `sqlx`

* fix(examples): issues in `preferred-crates`

* chore: adjust error message for missing param type in `query!()`

* doc: mention new `sqlx.toml` configuration

* chore: add `CHANGELOG` entry

Normally I generate these when cutting the release, but I wanted to take time to editorialize this one.

* doc: fix new example titles

* refactor: make `sqlx-toml` feature non-default, improve errors

* refactor: eliminate panics in `Config` read path

* chore: remove unused `axum` dependency from new examples

* fix(config): restore fallback to default config for macros

* chore(config): remove use of `once_cell` (to match `main`)
This commit is contained in:
Austin Bonander
2025-06-30 16:34:46 -07:00
committed by GitHub
parent 764ae2f702
commit 25cbeedab4
127 changed files with 6443 additions and 1138 deletions

View File

@@ -32,6 +32,14 @@ _tls-none = []
# support offline/decoupled building (enables serialization of `Describe`)
offline = ["serde", "either/serde"]
# Enable parsing of `sqlx.toml`.
# For simplicity, the `config` module is always enabled,
# but disabling this disables the `serde` derives and the `toml` crate,
# which is a good bit less code to compile if the feature isn't being used.
sqlx-toml = ["serde", "toml/parse"]
_unstable-doc = ["sqlx-toml"]
[dependencies]
# Runtimes
async-std = { workspace = true, optional = true }
@@ -71,6 +79,7 @@ percent-encoding = "2.1.0"
regex = { version = "1.5.5", optional = true }
serde = { version = "1.0.132", features = ["derive", "rc"], optional = true }
serde_json = { version = "1.0.73", features = ["raw_value"], optional = true }
toml = { version = "0.8.16", optional = true }
sha2 = { version = "0.10.0", default-features = false, optional = true }
#sqlformat = "0.2.0"
thiserror = "2.0.0"

View File

@@ -44,18 +44,44 @@ impl MigrateDatabase for Any {
}
impl Migrate for AnyConnection {
fn ensure_migrations_table(&mut self) -> BoxFuture<'_, Result<(), MigrateError>> {
Box::pin(async { self.get_migrate()?.ensure_migrations_table().await })
fn create_schema_if_not_exists<'e>(
&'e mut self,
schema_name: &'e str,
) -> BoxFuture<'e, Result<(), MigrateError>> {
Box::pin(async {
self.get_migrate()?
.create_schema_if_not_exists(schema_name)
.await
})
}
fn dirty_version(&mut self) -> BoxFuture<'_, Result<Option<i64>, MigrateError>> {
Box::pin(async { self.get_migrate()?.dirty_version().await })
fn ensure_migrations_table<'e>(
&'e mut self,
table_name: &'e str,
) -> BoxFuture<'e, Result<(), MigrateError>> {
Box::pin(async {
self.get_migrate()?
.ensure_migrations_table(table_name)
.await
})
}
fn list_applied_migrations(
&mut self,
) -> BoxFuture<'_, Result<Vec<AppliedMigration>, MigrateError>> {
Box::pin(async { self.get_migrate()?.list_applied_migrations().await })
fn dirty_version<'e>(
&'e mut self,
table_name: &'e str,
) -> BoxFuture<'e, Result<Option<i64>, MigrateError>> {
Box::pin(async { self.get_migrate()?.dirty_version(table_name).await })
}
fn list_applied_migrations<'e>(
&'e mut self,
table_name: &'e str,
) -> BoxFuture<'e, Result<Vec<AppliedMigration>, MigrateError>> {
Box::pin(async {
self.get_migrate()?
.list_applied_migrations(table_name)
.await
})
}
fn lock(&mut self) -> BoxFuture<'_, Result<(), MigrateError>> {
@@ -66,17 +92,19 @@ impl Migrate for AnyConnection {
Box::pin(async { self.get_migrate()?.unlock().await })
}
fn apply<'e: 'm, 'm>(
fn apply<'e>(
&'e mut self,
migration: &'m Migration,
) -> BoxFuture<'m, Result<Duration, MigrateError>> {
Box::pin(async { self.get_migrate()?.apply(migration).await })
table_name: &'e str,
migration: &'e Migration,
) -> BoxFuture<'e, Result<Duration, MigrateError>> {
Box::pin(async { self.get_migrate()?.apply(table_name, migration).await })
}
fn revert<'e: 'm, 'm>(
fn revert<'e>(
&'e mut self,
migration: &'m Migration,
) -> BoxFuture<'m, Result<Duration, MigrateError>> {
Box::pin(async { self.get_migrate()?.revert(migration).await })
table_name: &'e str,
migration: &'e Migration,
) -> BoxFuture<'e, Result<Duration, MigrateError>> {
Box::pin(async { self.get_migrate()?.revert(table_name, migration).await })
}
}

View File

@@ -2,6 +2,7 @@ use crate::database::Database;
use crate::error::Error;
use std::fmt::Debug;
use std::sync::Arc;
pub trait Column: 'static + Send + Sync + Debug {
type Database: Database<Column = Self>;
@@ -20,6 +21,61 @@ pub trait Column: 'static + Send + Sync + Debug {
/// Gets the type information for the column.
fn type_info(&self) -> &<Self::Database as Database>::TypeInfo;
/// If this column comes from a table, return the table and original column name.
///
/// Returns [`ColumnOrigin::Expression`] if the column is the result of an expression
/// or else the source table could not be determined.
///
/// Returns [`ColumnOrigin::Unknown`] if the database driver does not have that information,
/// or has not overridden this method.
// This method returns an owned value instead of a reference,
// to give the implementor more flexibility.
fn origin(&self) -> ColumnOrigin {
ColumnOrigin::Unknown
}
}
/// A [`Column`] that originates from a table.
#[derive(Debug, Clone)]
#[cfg_attr(feature = "offline", derive(serde::Serialize, serde::Deserialize))]
pub struct TableColumn {
/// The name of the table (optionally schema-qualified) that the column comes from.
pub table: Arc<str>,
/// The original name of the column.
pub name: Arc<str>,
}
/// The possible statuses for our knowledge of the origin of a [`Column`].
#[derive(Debug, Clone, Default)]
#[cfg_attr(feature = "offline", derive(serde::Serialize, serde::Deserialize))]
pub enum ColumnOrigin {
/// The column is known to originate from a table.
///
/// Included is the table name and original column name.
Table(TableColumn),
/// The column originates from an expression, or else its origin could not be determined.
Expression,
/// The database driver does not know the column origin at this time.
///
/// This may happen if:
/// * The connection is in the middle of executing a query,
/// and cannot query the catalog to fetch this information.
/// * The connection does not have access to the database catalog.
/// * The implementation of [`Column`] did not override [`Column::origin()`].
#[default]
Unknown,
}
impl ColumnOrigin {
/// Returns the true column origin, if known.
pub fn table_column(&self) -> Option<&TableColumn> {
if let Self::Table(table_column) = self {
Some(table_column)
} else {
None
}
}
}
/// A type that can be used to index into a [`Row`] or [`Statement`].

View File

@@ -0,0 +1,49 @@
/// Configuration shared by multiple components.
#[derive(Debug, Default)]
#[cfg_attr(
feature = "sqlx-toml",
derive(serde::Deserialize),
serde(default, rename_all = "kebab-case", deny_unknown_fields)
)]
pub struct Config {
/// Override the database URL environment variable.
///
/// This is used by both the macros and `sqlx-cli`.
///
/// Case-sensitive. Defaults to `DATABASE_URL`.
///
/// Example: Multi-Database Project
/// -------
/// You can use multiple databases in the same project by breaking it up into multiple crates,
/// then using a different environment variable for each.
///
/// For example, with two crates in the workspace named `foo` and `bar`:
///
/// #### `foo/sqlx.toml`
/// ```toml
/// [common]
/// database-url-var = "FOO_DATABASE_URL"
/// ```
///
/// #### `bar/sqlx.toml`
/// ```toml
/// [common]
/// database-url-var = "BAR_DATABASE_URL"
/// ```
///
/// #### `.env`
/// ```text
/// FOO_DATABASE_URL=postgres://postgres@localhost:5432/foo
/// BAR_DATABASE_URL=postgres://postgres@localhost:5432/bar
/// ```
///
/// The query macros used in `foo` will use `FOO_DATABASE_URL`,
/// and the ones used in `bar` will use `BAR_DATABASE_URL`.
pub database_url_var: Option<String>,
}
impl Config {
pub fn database_url_var(&self) -> &str {
self.database_url_var.as_deref().unwrap_or("DATABASE_URL")
}
}

View File

@@ -0,0 +1,418 @@
use std::collections::BTreeMap;
/// Configuration for the `query!()` family of macros.
///
/// See also [`common::Config`][crate::config::common::Config] for renaming `DATABASE_URL`.
#[derive(Debug, Default)]
#[cfg_attr(
feature = "sqlx-toml",
derive(serde::Deserialize),
serde(default, rename_all = "kebab-case", deny_unknown_fields)
)]
pub struct Config {
/// Specify which crates' types to use when types from multiple crates apply.
///
/// See [`PreferredCrates`] for details.
pub preferred_crates: PreferredCrates,
/// Specify global overrides for mapping SQL type names to Rust type names.
///
/// Default type mappings are defined by the database driver.
/// Refer to the `sqlx::types` module for details.
///
/// ## Note: Case-Sensitive
/// Currently, the case of the type name MUST match the name SQLx knows it by.
/// Built-in types are spelled in all-uppercase to match SQL convention.
///
/// However, user-created types in Postgres are all-lowercase unless quoted.
///
/// ## Note: Orthogonal to Nullability
/// These overrides do not affect whether `query!()` decides to wrap a column in `Option<_>`
/// or not. They only override the inner type used.
///
/// ## Note: Schema Qualification (Postgres)
/// Type names may be schema-qualified in Postgres. If so, the schema should be part
/// of the type string, e.g. `'foo.bar'` to reference type `bar` in schema `foo`.
///
/// The schema and/or type name may additionally be quoted in the string
/// for a quoted identifier (see next section).
///
/// Schema qualification should not be used for types in the search path.
///
/// ## Note: Quoted Identifiers (Postgres)
/// Type names using [quoted identifiers in Postgres] must also be specified with quotes here.
///
/// Note, however, that the TOML format parses way the outer pair of quotes,
/// so for quoted names in Postgres, double-quoting is necessary,
/// e.g. `'"Foo"'` for SQL type `"Foo"`.
///
/// To reference a schema-qualified type with a quoted name, use double-quotes after the
/// dot, e.g. `'foo."Bar"'` to reference type `"Bar"` of schema `foo`, and vice versa for
/// quoted schema names.
///
/// We recommend wrapping all type names in single quotes, as shown below,
/// to avoid confusion.
///
/// MySQL/MariaDB and SQLite do not support custom types, so quoting type names should
/// never be necessary.
///
/// [quoted identifiers in Postgres]: https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
// Note: we wanted to be able to handle this intelligently,
// but the `toml` crate authors weren't interested: https://github.com/toml-rs/toml/issues/761
//
// We decided to just encourage always quoting type names instead.
/// Example: Custom Wrapper Types
/// -------
/// Does SQLx not support a type that you need? Do you want additional semantics not
/// implemented on the built-in types? You can create a custom wrapper,
/// or use an external crate.
///
/// #### `sqlx.toml`
/// ```toml
/// [macros.type-overrides]
/// # Override a built-in type
/// 'UUID' = "crate::types::MyUuid"
///
/// # Support an external or custom wrapper type (e.g. from the `isn` Postgres extension)
/// # (NOTE: FOR DOCUMENTATION PURPOSES ONLY; THIS CRATE/TYPE DOES NOT EXIST AS OF WRITING)
/// 'isbn13' = "isn_rs::sqlx::ISBN13"
/// ```
///
/// Example: Custom Types in Postgres
/// -------
/// If you have a custom type in Postgres that you want to map without needing to use
/// the type override syntax in `sqlx::query!()` every time, you can specify a global
/// override here.
///
/// For example, a custom enum type `foo`:
///
/// #### Migration or Setup SQL (e.g. `migrations/0_setup.sql`)
/// ```sql
/// CREATE TYPE foo AS ENUM ('Bar', 'Baz');
/// ```
///
/// #### `src/types.rs`
/// ```rust,no_run
/// #[derive(sqlx::Type)]
/// pub enum Foo {
/// Bar,
/// Baz
/// }
/// ```
///
/// If you're not using `PascalCase` in your enum variants then you'll want to use
/// `#[sqlx(rename_all = "<strategy>")]` on your enum.
/// See [`Type`][crate::type::Type] for details.
///
/// #### `sqlx.toml`
/// ```toml
/// [macros.type-overrides]
/// # Map SQL type `foo` to `crate::types::Foo`
/// 'foo' = "crate::types::Foo"
/// ```
///
/// Example: Schema-Qualified Types
/// -------
/// (See `Note` section above for details.)
///
/// ```toml
/// [macros.type-overrides]
/// # Map SQL type `foo.foo` to `crate::types::Foo`
/// 'foo.foo' = "crate::types::Foo"
/// ```
///
/// Example: Quoted Identifiers
/// -------
/// If a type or schema uses quoted identifiers,
/// it must be wrapped in quotes _twice_ for SQLx to know the difference:
///
/// ```toml
/// [macros.type-overrides]
/// # `"Foo"` in SQLx
/// '"Foo"' = "crate::types::Foo"
/// # **NOT** `"Foo"` in SQLx (parses as just `Foo`)
/// "Foo" = "crate::types::Foo"
///
/// # Schema-qualified
/// '"foo".foo' = "crate::types::Foo"
/// 'foo."Foo"' = "crate::types::Foo"
/// '"foo"."Foo"' = "crate::types::Foo"
/// ```
///
/// (See `Note` section above for details.)
// TODO: allow specifying different types for input vs output
// e.g. to accept `&[T]` on input but output `Vec<T>`
pub type_overrides: BTreeMap<SqlType, RustType>,
/// Specify per-table and per-column overrides for mapping SQL types to Rust types.
///
/// Default type mappings are defined by the database driver.
/// Refer to the `sqlx::types` module for details.
///
/// The supported syntax is similar to [`type_overrides`][Self::type_overrides],
/// (with the same caveat for quoted names!) but column names must be qualified
/// by a separately quoted table name, which may optionally be schema-qualified.
///
/// Multiple columns for the same SQL table may be written in the same table in TOML
/// (see examples below).
///
/// ## Note: Orthogonal to Nullability
/// These overrides do not affect whether `query!()` decides to wrap a column in `Option<_>`
/// or not. They only override the inner type used.
///
/// ## Note: Schema Qualification
/// Table names may be schema-qualified. If so, the schema should be part
/// of the table name string, e.g. `'foo.bar'` to reference table `bar` in schema `foo`.
///
/// The schema and/or type name may additionally be quoted in the string
/// for a quoted identifier (see next section).
///
/// Postgres users: schema qualification should not be used for tables in the search path.
///
/// ## Note: Quoted Identifiers
/// Schema, table, or column names using quoted identifiers ([MySQL], [Postgres], [SQLite])
/// in SQL must also be specified with quotes here.
///
/// Postgres and SQLite use double-quotes (`"Foo"`) while MySQL uses backticks (`\`Foo\`).
///
/// Note, however, that the TOML format parses way the outer pair of quotes,
/// so for quoted names in Postgres, double-quoting is necessary,
/// e.g. `'"Foo"'` for SQL name `"Foo"`.
///
/// To reference a schema-qualified table with a quoted name, use the appropriate quotation
/// characters after the dot, e.g. `'foo."Bar"'` to reference table `"Bar"` of schema `foo`,
/// and vice versa for quoted schema names.
///
/// We recommend wrapping all table and column names in single quotes, as shown below,
/// to avoid confusion.
///
/// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/identifiers.html
/// [Postgres]: https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
/// [SQLite]: https://sqlite.org/lang_keywords.html
// Note: we wanted to be able to handle this intelligently,
// but the `toml` crate authors weren't interested: https://github.com/toml-rs/toml/issues/761
//
// We decided to just encourage always quoting type names instead.
///
/// Example
/// -------
///
/// #### `sqlx.toml`
/// ```toml
/// [macros.table-overrides.'foo']
/// # Map column `bar` of table `foo` to Rust type `crate::types::Foo`:
/// 'bar' = "crate::types::Bar"
///
/// # Quoted column name
/// # Note: same quoting requirements as `macros.type_overrides`
/// '"Bar"' = "crate::types::Bar"
///
/// # Note: will NOT work (parses as `Bar`)
/// # "Bar" = "crate::types::Bar"
///
/// # Table name may be quoted (note the wrapping single-quotes)
/// [macros.table-overrides.'"Foo"']
/// 'bar' = "crate::types::Bar"
/// '"Bar"' = "crate::types::Bar"
///
/// # Table name may also be schema-qualified.
/// # Note how the dot is inside the quotes.
/// [macros.table-overrides.'my_schema.my_table']
/// 'my_column' = "crate::types::MyType"
///
/// # Quoted schema, table, and column names
/// [macros.table-overrides.'"My Schema"."My Table"']
/// '"My Column"' = "crate::types::MyType"
/// ```
pub table_overrides: BTreeMap<TableName, BTreeMap<ColumnName, RustType>>,
}
#[derive(Debug, Default)]
#[cfg_attr(
feature = "sqlx-toml",
derive(serde::Deserialize),
serde(default, rename_all = "kebab-case")
)]
pub struct PreferredCrates {
/// Specify the crate to use for mapping date/time types to Rust.
///
/// The default behavior is to use whatever crate is enabled,
/// [`chrono`] or [`time`] (the latter takes precedent).
///
/// [`chrono`]: crate::types::chrono
/// [`time`]: crate::types::time
///
/// Example: Always Use Chrono
/// -------
/// Thanks to Cargo's [feature unification], a crate in the dependency graph may enable
/// the `time` feature of SQLx which will force it on for all crates using SQLx,
/// which will result in problems if your crate wants to use types from [`chrono`].
///
/// You can use the type override syntax (see `sqlx::query!` for details),
/// or you can force an override globally by setting this option.
///
/// #### `sqlx.toml`
/// ```toml
/// [macros.preferred-crates]
/// date-time = "chrono"
/// ```
///
/// [feature unification]: https://doc.rust-lang.org/cargo/reference/features.html#feature-unification
pub date_time: DateTimeCrate,
/// Specify the crate to use for mapping `NUMERIC` types to Rust.
///
/// The default behavior is to use whatever crate is enabled,
/// [`bigdecimal`] or [`rust_decimal`] (the latter takes precedent).
///
/// [`bigdecimal`]: crate::types::bigdecimal
/// [`rust_decimal`]: crate::types::rust_decimal
///
/// Example: Always Use `bigdecimal`
/// -------
/// Thanks to Cargo's [feature unification], a crate in the dependency graph may enable
/// the `rust_decimal` feature of SQLx which will force it on for all crates using SQLx,
/// which will result in problems if your crate wants to use types from [`bigdecimal`].
///
/// You can use the type override syntax (see `sqlx::query!` for details),
/// or you can force an override globally by setting this option.
///
/// #### `sqlx.toml`
/// ```toml
/// [macros.preferred-crates]
/// numeric = "bigdecimal"
/// ```
///
/// [feature unification]: https://doc.rust-lang.org/cargo/reference/features.html#feature-unification
pub numeric: NumericCrate,
}
/// The preferred crate to use for mapping date/time types to Rust.
#[derive(Debug, Default, PartialEq, Eq)]
#[cfg_attr(
feature = "sqlx-toml",
derive(serde::Deserialize),
serde(rename_all = "snake_case")
)]
pub enum DateTimeCrate {
/// Use whichever crate is enabled (`time` then `chrono`).
#[default]
Inferred,
/// Always use types from [`chrono`][crate::types::chrono].
///
/// ```toml
/// [macros.preferred-crates]
/// date-time = "chrono"
/// ```
Chrono,
/// Always use types from [`time`][crate::types::time].
///
/// ```toml
/// [macros.preferred-crates]
/// date-time = "time"
/// ```
Time,
}
/// The preferred crate to use for mapping `NUMERIC` types to Rust.
#[derive(Debug, Default, PartialEq, Eq)]
#[cfg_attr(
feature = "sqlx-toml",
derive(serde::Deserialize),
serde(rename_all = "snake_case")
)]
pub enum NumericCrate {
/// Use whichever crate is enabled (`rust_decimal` then `bigdecimal`).
#[default]
Inferred,
/// Always use types from [`bigdecimal`][crate::types::bigdecimal].
///
/// ```toml
/// [macros.preferred-crates]
/// numeric = "bigdecimal"
/// ```
#[cfg_attr(feature = "sqlx-toml", serde(rename = "bigdecimal"))]
BigDecimal,
/// Always use types from [`rust_decimal`][crate::types::rust_decimal].
///
/// ```toml
/// [macros.preferred-crates]
/// numeric = "rust_decimal"
/// ```
RustDecimal,
}
/// A SQL type name; may optionally be schema-qualified.
///
/// See [`macros.type-overrides`][Config::type_overrides] for usages.
pub type SqlType = Box<str>;
/// A SQL table name; may optionally be schema-qualified.
///
/// See [`macros.table-overrides`][Config::table_overrides] for usages.
pub type TableName = Box<str>;
/// A column in a SQL table.
///
/// See [`macros.table-overrides`][Config::table_overrides] for usages.
pub type ColumnName = Box<str>;
/// A Rust type name or path.
///
/// Should be a global path (not relative).
pub type RustType = Box<str>;
/// Internal getter methods.
impl Config {
/// Get the override for a given type name (optionally schema-qualified).
pub fn type_override(&self, type_name: &str) -> Option<&str> {
// TODO: make this case-insensitive
self.type_overrides.get(type_name).map(|s| &**s)
}
/// Get the override for a given column and table name (optionally schema-qualified).
pub fn column_override(&self, table: &str, column: &str) -> Option<&str> {
self.table_overrides
.get(table)
.and_then(|by_column| by_column.get(column))
.map(|s| &**s)
}
}
impl DateTimeCrate {
/// Returns `self == Self::Inferred`
#[inline(always)]
pub fn is_inferred(&self) -> bool {
*self == Self::Inferred
}
#[inline(always)]
pub fn crate_name(&self) -> Option<&str> {
match self {
Self::Inferred => None,
Self::Chrono => Some("chrono"),
Self::Time => Some("time"),
}
}
}
impl NumericCrate {
/// Returns `self == Self::Inferred`
#[inline(always)]
pub fn is_inferred(&self) -> bool {
*self == Self::Inferred
}
#[inline(always)]
pub fn crate_name(&self) -> Option<&str> {
match self {
Self::Inferred => None,
Self::BigDecimal => Some("bigdecimal"),
Self::RustDecimal => Some("rust_decimal"),
}
}
}

View File

@@ -0,0 +1,212 @@
use std::collections::BTreeSet;
/// Configuration for migrations when executed using `sqlx::migrate!()` or through `sqlx-cli`.
///
/// ### Note
/// A manually constructed [`Migrator`][crate::migrate::Migrator] will not be aware of these
/// configuration options. We recommend using `sqlx::migrate!()` instead.
///
/// ### Warning: Potential Data Loss or Corruption!
/// Many of these options, if changed after migrations are set up,
/// can result in data loss or corruption of a production database
/// if the proper precautions are not taken.
///
/// Be sure you know what you are doing and that you read all relevant documentation _thoroughly_.
#[derive(Debug, Default)]
#[cfg_attr(
feature = "sqlx-toml",
derive(serde::Deserialize),
serde(default, rename_all = "kebab-case", deny_unknown_fields)
)]
pub struct Config {
/// Specify the names of schemas to create if they don't already exist.
///
/// This is done before checking the existence of the migrations table
/// (`_sqlx_migrations` or overridden `table_name` below) so that it may be placed in
/// one of these schemas.
///
/// ### Example
/// `sqlx.toml`:
/// ```toml
/// [migrate]
/// create-schemas = ["foo"]
/// ```
pub create_schemas: BTreeSet<Box<str>>,
/// Override the name of the table used to track executed migrations.
///
/// May be schema-qualified and/or contain quotes. Defaults to `_sqlx_migrations`.
///
/// Potentially useful for multi-tenant databases.
///
/// ### Warning: Potential Data Loss or Corruption!
/// Changing this option for a production database will likely result in data loss or corruption
/// as the migration machinery will no longer be aware of what migrations have been applied
/// and will attempt to re-run them.
///
/// You should create the new table as a copy of the existing migrations table (with contents!),
/// and be sure all instances of your application have been migrated to the new
/// table before deleting the old one.
///
/// ### Example
/// `sqlx.toml`:
/// ```toml
/// [migrate]
/// # Put `_sqlx_migrations` in schema `foo`
/// table-name = "foo._sqlx_migrations"
/// ```
pub table_name: Option<Box<str>>,
/// Override the directory used for migrations files.
///
/// Relative to the crate root for `sqlx::migrate!()`, or the current directory for `sqlx-cli`.
pub migrations_dir: Option<Box<str>>,
/// Specify characters that should be ignored when hashing migrations.
///
/// Any characters contained in the given array will be dropped when a migration is hashed.
///
/// ### Warning: May Change Hashes for Existing Migrations
/// Changing the characters considered in hashing migrations will likely
/// change the output of the hash.
///
/// This may require manual rectification for deployed databases.
///
/// ### Example: Ignore Carriage Return (`<CR>` | `\r`)
/// Line ending differences between platforms can result in migrations having non-repeatable
/// hashes. The most common culprit is the carriage return (`<CR>` | `\r`), which Windows
/// uses in its line endings alongside line feed (`<LF>` | `\n`), often written `CRLF` or `\r\n`,
/// whereas Linux and macOS use only line feeds.
///
/// `sqlx.toml`:
/// ```toml
/// [migrate]
/// ignored-chars = ["\r"]
/// ```
///
/// For projects using Git, this can also be addressed using [`.gitattributes`]:
///
/// ```text
/// # Force newlines in migrations to be line feeds on all platforms
/// migrations/*.sql text eol=lf
/// ```
///
/// This may require resetting or re-checking out the migrations files to take effect.
///
/// [`.gitattributes`]: https://git-scm.com/docs/gitattributes
///
/// ### Example: Ignore all Whitespace Characters
/// To make your migrations amenable to reformatting, you may wish to tell SQLx to ignore
/// _all_ whitespace characters in migrations.
///
/// ##### Warning: Beware Syntactically Significant Whitespace!
/// If your migrations use string literals or quoted identifiers which contain whitespace,
/// this configuration will cause the migration machinery to ignore some changes to these.
/// This may result in a mismatch between the development and production versions of
/// your database.
///
/// `sqlx.toml`:
/// ```toml
/// [migrate]
/// # Ignore common whitespace characters when hashing
/// ignored-chars = [" ", "\t", "\r", "\n"] # Space, tab, CR, LF
/// ```
// Likely lower overhead for small sets than `HashSet`.
pub ignored_chars: BTreeSet<char>,
/// Specify default options for new migrations created with `sqlx migrate add`.
pub defaults: MigrationDefaults,
}
#[derive(Debug, Default)]
#[cfg_attr(
feature = "sqlx-toml",
derive(serde::Deserialize),
serde(default, rename_all = "kebab-case")
)]
pub struct MigrationDefaults {
/// Specify the default type of migration that `sqlx migrate add` should create by default.
///
/// ### Example: Use Reversible Migrations by Default
/// `sqlx.toml`:
/// ```toml
/// [migrate.defaults]
/// migration-type = "reversible"
/// ```
pub migration_type: DefaultMigrationType,
/// Specify the default scheme that `sqlx migrate add` should use for version integers.
///
/// ### Example: Use Sequential Versioning by Default
/// `sqlx.toml`:
/// ```toml
/// [migrate.defaults]
/// migration-versioning = "sequential"
/// ```
pub migration_versioning: DefaultVersioning,
}
/// The default type of migration that `sqlx migrate add` should create by default.
#[derive(Debug, Default, PartialEq, Eq)]
#[cfg_attr(
feature = "sqlx-toml",
derive(serde::Deserialize),
serde(rename_all = "snake_case")
)]
pub enum DefaultMigrationType {
/// Create the same migration type as that of the latest existing migration,
/// or `Simple` otherwise.
#[default]
Inferred,
/// Create non-reversible migrations (`<VERSION>_<DESCRIPTION>.sql`) by default.
Simple,
/// Create reversible migrations (`<VERSION>_<DESCRIPTION>.up.sql` and `[...].down.sql`) by default.
Reversible,
}
/// The default scheme that `sqlx migrate add` should use for version integers.
#[derive(Debug, Default, PartialEq, Eq)]
#[cfg_attr(
feature = "sqlx-toml",
derive(serde::Deserialize),
serde(rename_all = "snake_case")
)]
pub enum DefaultVersioning {
/// Infer the versioning scheme from existing migrations:
///
/// * If the versions of the last two migrations differ by `1`, infer `Sequential`.
/// * If only one migration exists and has version `1`, infer `Sequential`.
/// * Otherwise, infer `Timestamp`.
#[default]
Inferred,
/// Use UTC timestamps for migration versions.
///
/// This is the recommended versioning format as it's less likely to collide when multiple
/// developers are creating migrations on different branches.
///
/// The exact timestamp format is unspecified.
Timestamp,
/// Use sequential integers for migration versions.
Sequential,
}
#[cfg(feature = "migrate")]
impl Config {
pub fn migrations_dir(&self) -> &str {
self.migrations_dir.as_deref().unwrap_or("migrations")
}
pub fn table_name(&self) -> &str {
self.table_name.as_deref().unwrap_or("_sqlx_migrations")
}
pub fn to_resolve_config(&self) -> crate::migrate::ResolveConfig {
let mut config = crate::migrate::ResolveConfig::new();
config.ignore_chars(self.ignored_chars.iter().copied());
config
}
}

207
sqlx-core/src/config/mod.rs Normal file
View File

@@ -0,0 +1,207 @@
//! (Exported for documentation only) Guide and reference for `sqlx.toml` files.
//!
//! To use, create a `sqlx.toml` file in your crate root (the same directory as your `Cargo.toml`).
//! The configuration in a `sqlx.toml` configures SQLx *only* for the current crate.
//!
//! Requires the `sqlx-toml` feature (not enabled by default).
//!
//! `sqlx-cli` will also read `sqlx.toml` when running migrations.
//!
//! See the [`Config`] type and its fields for individual configuration options.
//!
//! See the [reference][`_reference`] for the full `sqlx.toml` file.
use std::error::Error;
use std::fmt::Debug;
use std::io;
use std::path::{Path, PathBuf};
/// Configuration shared by multiple components.
///
/// See [`common::Config`] for details.
pub mod common;
/// Configuration for the `query!()` family of macros.
///
/// See [`macros::Config`] for details.
pub mod macros;
/// Configuration for migrations when executed using `sqlx::migrate!()` or through `sqlx-cli`.
///
/// See [`migrate::Config`] for details.
pub mod migrate;
/// Reference for `sqlx.toml` files
///
/// Source: `sqlx-core/src/config/reference.toml`
///
/// ```toml
#[doc = include_str!("reference.toml")]
/// ```
pub mod _reference {}
#[cfg(all(test, feature = "sqlx-toml"))]
mod tests;
/// The parsed structure of a `sqlx.toml` file.
#[derive(Debug, Default)]
#[cfg_attr(
feature = "sqlx-toml",
derive(serde::Deserialize),
serde(default, rename_all = "kebab-case", deny_unknown_fields)
)]
pub struct Config {
/// Configuration shared by multiple components.
///
/// See [`common::Config`] for details.
pub common: common::Config,
/// Configuration for the `query!()` family of macros.
///
/// See [`macros::Config`] for details.
pub macros: macros::Config,
/// Configuration for migrations when executed using `sqlx::migrate!()` or through `sqlx-cli`.
///
/// See [`migrate::Config`] for details.
pub migrate: migrate::Config,
}
/// Error returned from various methods of [`Config`].
#[derive(thiserror::Error, Debug)]
pub enum ConfigError {
/// The loading method expected `CARGO_MANIFEST_DIR` to be set and it wasn't.
///
/// This is necessary to locate the root of the crate currently being compiled.
///
/// See [the "Environment Variables" page of the Cargo Book][cargo-env] for details.
///
/// [cargo-env]: https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates
#[error("environment variable `CARGO_MANIFEST_DIR` must be set and valid")]
Env(
#[from]
#[source]
std::env::VarError,
),
/// No configuration file was found. Not necessarily fatal.
#[error("config file {path:?} not found")]
NotFound { path: PathBuf },
/// An I/O error occurred while attempting to read the config file at `path`.
///
/// If the error is [`io::ErrorKind::NotFound`], [`Self::NotFound`] is returned instead.
#[error("error reading config file {path:?}")]
Io {
path: PathBuf,
#[source]
error: io::Error,
},
/// An error in the TOML was encountered while parsing the config file at `path`.
///
/// The error gives line numbers and context when printed with `Display`/`ToString`.
///
/// Only returned if the `sqlx-toml` feature is enabled.
#[error("error parsing config file {path:?}")]
Parse {
path: PathBuf,
/// Type-erased [`toml::de::Error`].
#[source]
error: Box<dyn Error + Send + Sync + 'static>,
},
/// A `sqlx.toml` file was found or specified, but the `sqlx-toml` feature is not enabled.
#[error("SQLx found config file at {path:?} but the `sqlx-toml` feature was not enabled")]
ParseDisabled { path: PathBuf },
}
impl ConfigError {
/// Create a [`ConfigError`] from a [`std::io::Error`].
///
/// Maps to either `NotFound` or `Io`.
pub fn from_io(path: impl Into<PathBuf>, error: io::Error) -> Self {
if error.kind() == io::ErrorKind::NotFound {
Self::NotFound { path: path.into() }
} else {
Self::Io {
path: path.into(),
error,
}
}
}
/// If this error means the file was not found, return the path that was attempted.
pub fn not_found_path(&self) -> Option<&Path> {
if let Self::NotFound { path } = self {
Some(path)
} else {
None
}
}
}
/// Internal methods for loading a `Config`.
#[allow(clippy::result_large_err)]
impl Config {
/// Get the cached config, or read `$CARGO_MANIFEST_DIR/sqlx.toml`.
///
/// On success, the config is cached in a `static` and returned by future calls.
///
/// Errors if `CARGO_MANIFEST_DIR` is not set, or if the config file could not be read.
///
/// If the file does not exist, the cache is populated with `Config::default()`.
pub fn try_from_crate_or_default() -> Result<Self, ConfigError> {
Self::read_from(get_crate_path()?).or_else(|e| {
if let ConfigError::NotFound { .. } = e {
Ok(Config::default())
} else {
Err(e)
}
})
}
/// Get the cached config, or attempt to read it from the path given.
///
/// On success, the config is cached in a `static` and returned by future calls.
///
/// Errors if the config file does not exist, or could not be read.
pub fn try_from_path(path: PathBuf) -> Result<Self, ConfigError> {
Self::read_from(path)
}
#[cfg(feature = "sqlx-toml")]
fn read_from(path: PathBuf) -> Result<Self, ConfigError> {
// The `toml` crate doesn't provide an incremental reader.
let toml_s = match std::fs::read_to_string(&path) {
Ok(toml) => toml,
Err(error) => {
return Err(ConfigError::from_io(path, error));
}
};
// TODO: parse and lint TOML structure before deserializing
// Motivation: https://github.com/toml-rs/toml/issues/761
tracing::debug!("read config TOML from {path:?}:\n{toml_s}");
toml::from_str(&toml_s).map_err(|error| ConfigError::Parse {
path,
error: Box::new(error),
})
}
#[cfg(not(feature = "sqlx-toml"))]
fn read_from(path: PathBuf) -> Result<Self, ConfigError> {
match path.try_exists() {
Ok(true) => Err(ConfigError::ParseDisabled { path }),
Ok(false) => Err(ConfigError::NotFound { path }),
Err(e) => Err(ConfigError::from_io(path, e)),
}
}
}
fn get_crate_path() -> Result<PathBuf, ConfigError> {
let mut path = PathBuf::from(std::env::var("CARGO_MANIFEST_DIR")?);
path.push("sqlx.toml");
Ok(path)
}

View File

@@ -0,0 +1,194 @@
# `sqlx.toml` reference.
#
# Note: shown values are *not* defaults.
# They are explicitly set to non-default values to test parsing.
# Refer to the comment for a given option for its default value.
###############################################################################################
# Configuration shared by multiple components.
[common]
# Change the environment variable to get the database URL.
#
# This is used by both the macros and `sqlx-cli`.
#
# If not specified, defaults to `DATABASE_URL`
database-url-var = "FOO_DATABASE_URL"
###############################################################################################
# Configuration for the `query!()` family of macros.
[macros]
[macros.preferred-crates]
# Force the macros to use the `chrono` crate for date/time types, even if `time` is enabled.
#
# Defaults to "inferred": use whichever crate is enabled (`time` takes precedence over `chrono`).
date-time = "chrono"
# Or, ensure the macros always prefer `time`
# in case new date/time crates are added in the future:
# date-time = "time"
# Force the macros to use the `rust_decimal` crate for `NUMERIC`, even if `bigdecimal` is enabled.
#
# Defaults to "inferred": use whichever crate is enabled (`bigdecimal` takes precedence over `rust_decimal`).
numeric = "rust_decimal"
# Or, ensure the macros always prefer `bigdecimal`
# in case new decimal crates are added in the future:
# numeric = "bigdecimal"
# Set global overrides for mapping SQL types to Rust types.
#
# Default type mappings are defined by the database driver.
# Refer to the `sqlx::types` module for details.
#
# Postgres users: schema qualification should not be used for types in the search path.
#
# ### Note: Orthogonal to Nullability
# These overrides do not affect whether `query!()` decides to wrap a column in `Option<_>`
# or not. They only override the inner type used.
[macros.type-overrides]
# Override a built-in type (map all `UUID` columns to `crate::types::MyUuid`)
# Note: currently, the case of the type name MUST match.
# Built-in types are spelled in all-uppercase to match SQL convention.
'UUID' = "crate::types::MyUuid"
# Support an external or custom wrapper type (e.g. from the `isn` Postgres extension)
# (NOTE: FOR DOCUMENTATION PURPOSES ONLY; THIS CRATE/TYPE DOES NOT EXIST AS OF WRITING)
'isbn13' = "isn_rs::isbn::ISBN13"
# SQL type `foo` to Rust type `crate::types::Foo`:
'foo' = "crate::types::Foo"
# SQL type `"Bar"` to Rust type `crate::types::Bar`; notice the extra pair of quotes:
'"Bar"' = "crate::types::Bar"
# Will NOT work (the first pair of quotes are parsed by TOML)
# "Bar" = "crate::types::Bar"
# Schema qualified
'foo.bar' = "crate::types::Bar"
# Schema qualified and quoted
'foo."Bar"' = "crate::schema::foo::Bar"
# Quoted schema name
'"Foo".bar' = "crate::schema::foo::Bar"
# Quoted schema and type name
'"Foo"."Bar"' = "crate::schema::foo::Bar"
# Set per-table and per-column overrides for mapping SQL types to Rust types.
#
# Note: table name is required in the header.
#
# Postgres users: schema qualification should not be used for types in the search path.
#
# ### Note: Orthogonal to Nullability
# These overrides do not affect whether `query!()` decides to wrap a column in `Option<_>`
# or not. They only override the inner type used.
[macros.table-overrides.'foo']
# Map column `bar` of table `foo` to Rust type `crate::types::Foo`:
'bar' = "crate::types::Bar"
# Quoted column name
# Note: same quoting requirements as `macros.type_overrides`
'"Bar"' = "crate::types::Bar"
# Note: will NOT work (parses as `Bar`)
# "Bar" = "crate::types::Bar"
# Table name may be quoted (note the wrapping single-quotes)
[macros.table-overrides.'"Foo"']
'bar' = "crate::types::Bar"
'"Bar"' = "crate::types::Bar"
# Table name may also be schema-qualified.
# Note how the dot is inside the quotes.
[macros.table-overrides.'my_schema.my_table']
'my_column' = "crate::types::MyType"
# Quoted schema, table, and column names
[macros.table-overrides.'"My Schema"."My Table"']
'"My Column"' = "crate::types::MyType"
###############################################################################################
# Configuration for migrations when executed using `sqlx::migrate!()` or through `sqlx-cli`.
#
# ### Note
# A manually constructed [`Migrator`][crate::migrate::Migrator] will not be aware of these
# configuration options. We recommend using `sqlx::migrate!()` instead.
#
# ### Warning: Potential Data Loss or Corruption!
# Many of these options, if changed after migrations are set up,
# can result in data loss or corruption of a production database
# if the proper precautions are not taken.
#
# Be sure you know what you are doing and that you read all relevant documentation _thoroughly_.
[migrate]
# Override the name of the table used to track executed migrations.
#
# May be schema-qualified and/or contain quotes. Defaults to `_sqlx_migrations`.
#
# Potentially useful for multi-tenant databases.
#
# ### Warning: Potential Data Loss or Corruption!
# Changing this option for a production database will likely result in data loss or corruption
# as the migration machinery will no longer be aware of what migrations have been applied
# and will attempt to re-run them.
#
# You should create the new table as a copy of the existing migrations table (with contents!),
# and be sure all instances of your application have been migrated to the new
# table before deleting the old one.
table-name = "foo._sqlx_migrations"
# Override the directory used for migrations files.
#
# Relative to the crate root for `sqlx::migrate!()`, or the current directory for `sqlx-cli`.
migrations-dir = "foo/migrations"
# Specify characters that should be ignored when hashing migrations.
#
# Any characters contained in the given set will be dropped when a migration is hashed.
#
# Defaults to an empty array (don't drop any characters).
#
# ### Warning: May Change Hashes for Existing Migrations
# Changing the characters considered in hashing migrations will likely
# change the output of the hash.
#
# This may require manual rectification for deployed databases.
# ignored-chars = []
# Ignore Carriage Returns (`<CR>` | `\r`)
# Note that the TOML format requires double-quoted strings to process escapes.
# ignored-chars = ["\r"]
# Ignore common whitespace characters (beware syntatically significant whitespace!)
# Space, tab, CR, LF, zero-width non-breaking space (U+FEFF)
#
# U+FEFF is added by some editors as a magic number at the beginning of a text file indicating it is UTF-8 encoded,
# where it is known as a byte-order mark (BOM): https://en.wikipedia.org/wiki/Byte_order_mark
ignored-chars = [" ", "\t", "\r", "\n", "\uFEFF"]
# Set default options for new migrations.
[migrate.defaults]
# Specify reversible migrations by default (for `sqlx migrate create`).
#
# Defaults to "inferred": uses the type of the last migration, or "simple" otherwise.
migration-type = "reversible"
# Specify simple (non-reversible) migrations by default.
# migration-type = "simple"
# Specify sequential versioning by default (for `sqlx migrate create`).
#
# Defaults to "inferred": guesses the versioning scheme from the latest migrations,
# or "timestamp" otherwise.
migration-versioning = "sequential"
# Specify timestamp versioning by default.
# migration-versioning = "timestamp"

View File

@@ -0,0 +1,93 @@
use crate::config::{self, Config};
use std::collections::BTreeSet;
#[test]
fn reference_parses_as_config() {
let config: Config = toml::from_str(include_str!("reference.toml"))
// The `Display` impl of `toml::Error` is *actually* more useful than `Debug`
.unwrap_or_else(|e| panic!("expected reference.toml to parse as Config: {e}"));
assert_common_config(&config.common);
assert_macros_config(&config.macros);
assert_migrate_config(&config.migrate);
}
fn assert_common_config(config: &config::common::Config) {
assert_eq!(config.database_url_var.as_deref(), Some("FOO_DATABASE_URL"));
}
fn assert_macros_config(config: &config::macros::Config) {
use config::macros::*;
assert_eq!(config.preferred_crates.date_time, DateTimeCrate::Chrono);
assert_eq!(config.preferred_crates.numeric, NumericCrate::RustDecimal);
// Type overrides
// Don't need to cover everything, just some important canaries.
assert_eq!(config.type_override("UUID"), Some("crate::types::MyUuid"));
assert_eq!(config.type_override("foo"), Some("crate::types::Foo"));
assert_eq!(config.type_override(r#""Bar""#), Some("crate::types::Bar"),);
assert_eq!(
config.type_override(r#""Foo".bar"#),
Some("crate::schema::foo::Bar"),
);
assert_eq!(
config.type_override(r#""Foo"."Bar""#),
Some("crate::schema::foo::Bar"),
);
// Column overrides
assert_eq!(
config.column_override("foo", "bar"),
Some("crate::types::Bar"),
);
assert_eq!(
config.column_override("foo", r#""Bar""#),
Some("crate::types::Bar"),
);
assert_eq!(
config.column_override(r#""Foo""#, "bar"),
Some("crate::types::Bar"),
);
assert_eq!(
config.column_override(r#""Foo""#, r#""Bar""#),
Some("crate::types::Bar"),
);
assert_eq!(
config.column_override("my_schema.my_table", "my_column"),
Some("crate::types::MyType"),
);
assert_eq!(
config.column_override(r#""My Schema"."My Table""#, r#""My Column""#),
Some("crate::types::MyType"),
);
}
fn assert_migrate_config(config: &config::migrate::Config) {
use config::migrate::*;
assert_eq!(config.table_name.as_deref(), Some("foo._sqlx_migrations"));
assert_eq!(config.migrations_dir.as_deref(), Some("foo/migrations"));
let ignored_chars = BTreeSet::from([' ', '\t', '\r', '\n', '\u{FEFF}']);
assert_eq!(config.ignored_chars, ignored_chars);
assert_eq!(
config.defaults.migration_type,
DefaultMigrationType::Reversible
);
assert_eq!(
config.defaults.migration_versioning,
DefaultVersioning::Sequential
);
}

View File

@@ -91,6 +91,8 @@ pub mod any;
#[cfg(feature = "migrate")]
pub mod testing;
pub mod config;
pub use error::{Error, Result};
pub use either::Either;

View File

@@ -39,4 +39,7 @@ pub enum MigrateError {
"migration {0} is partially applied; fix and remove row from `_sqlx_migrations` table"
)]
Dirty(i64),
#[error("database driver does not support creation of schemas at migrate time: {0}")]
CreateSchemasNotSupported(String),
}

View File

@@ -25,18 +25,31 @@ pub trait MigrateDatabase {
// 'e = Executor
pub trait Migrate {
/// Create a database schema with the given name if it does not already exist.
fn create_schema_if_not_exists<'e>(
&'e mut self,
schema_name: &'e str,
) -> BoxFuture<'e, Result<(), MigrateError>>;
// ensure migrations table exists
// will create or migrate it if needed
fn ensure_migrations_table(&mut self) -> BoxFuture<'_, Result<(), MigrateError>>;
fn ensure_migrations_table<'e>(
&'e mut self,
table_name: &'e str,
) -> BoxFuture<'e, Result<(), MigrateError>>;
// Return the version on which the database is dirty or None otherwise.
// "dirty" means there is a partially applied migration that failed.
fn dirty_version(&mut self) -> BoxFuture<'_, Result<Option<i64>, MigrateError>>;
fn dirty_version<'e>(
&'e mut self,
table_name: &'e str,
) -> BoxFuture<'e, Result<Option<i64>, MigrateError>>;
// Return the ordered list of applied migrations
fn list_applied_migrations(
&mut self,
) -> BoxFuture<'_, Result<Vec<AppliedMigration>, MigrateError>>;
fn list_applied_migrations<'e>(
&'e mut self,
table_name: &'e str,
) -> BoxFuture<'e, Result<Vec<AppliedMigration>, MigrateError>>;
// Should acquire a database lock so that only one migration process
// can run at a time. [`Migrate`] will call this function before applying
@@ -50,16 +63,18 @@ pub trait Migrate {
// run SQL from migration in a DDL transaction
// insert new row to [_migrations] table on completion (success or failure)
// returns the time taking to run the migration SQL
fn apply<'e: 'm, 'm>(
fn apply<'e>(
&'e mut self,
migration: &'m Migration,
) -> BoxFuture<'m, Result<Duration, MigrateError>>;
table_name: &'e str,
migration: &'e Migration,
) -> BoxFuture<'e, Result<Duration, MigrateError>>;
// run a revert SQL from migration in a DDL transaction
// deletes the row in [_migrations] table with specified migration version on completion (success or failure)
// returns the time taking to run the migration SQL
fn revert<'e: 'm, 'm>(
fn revert<'e>(
&'e mut self,
migration: &'m Migration,
) -> BoxFuture<'m, Result<Duration, MigrateError>>;
table_name: &'e str,
migration: &'e Migration,
) -> BoxFuture<'e, Result<Duration, MigrateError>>;
}

View File

@@ -1,6 +1,5 @@
use std::borrow::Cow;
use sha2::{Digest, Sha384};
use std::borrow::Cow;
use super::MigrationType;
@@ -22,8 +21,26 @@ impl Migration {
sql: Cow<'static, str>,
no_tx: bool,
) -> Self {
let checksum = Cow::Owned(Vec::from(Sha384::digest(sql.as_bytes()).as_slice()));
let checksum = checksum(&sql);
Self::with_checksum(
version,
description,
migration_type,
sql,
checksum.into(),
no_tx,
)
}
pub(crate) fn with_checksum(
version: i64,
description: Cow<'static, str>,
migration_type: MigrationType,
sql: Cow<'static, str>,
checksum: Cow<'static, [u8]>,
no_tx: bool,
) -> Self {
Migration {
version,
description,
@@ -40,3 +57,39 @@ pub struct AppliedMigration {
pub version: i64,
pub checksum: Cow<'static, [u8]>,
}
pub fn checksum(sql: &str) -> Vec<u8> {
Vec::from(Sha384::digest(sql).as_slice())
}
pub fn checksum_fragments<'a>(fragments: impl Iterator<Item = &'a str>) -> Vec<u8> {
let mut digest = Sha384::new();
for fragment in fragments {
digest.update(fragment);
}
digest.finalize().to_vec()
}
#[test]
fn fragments_checksum_equals_full_checksum() {
// Copied from `examples/postgres/axum-social-with-tests/migrations/3_comment.sql`
let sql = "\
\u{FEFF}create table comment (\r\n\
\tcomment_id uuid primary key default gen_random_uuid(),\r\n\
\tpost_id uuid not null references post(post_id),\r\n\
\tuser_id uuid not null references \"user\"(user_id),\r\n\
\tcontent text not null,\r\n\
\tcreated_at timestamptz not null default now()\r\n\
);\r\n\
\r\n\
create index on comment(post_id, created_at);\r\n\
";
// Should yield a string for each character
let fragments_checksum = checksum_fragments(sql.split(""));
let full_checksum = checksum(sql);
assert_eq!(fragments_checksum, full_checksum);
}

View File

@@ -74,8 +74,9 @@ impl MigrationType {
}
}
#[deprecated = "unused"]
pub fn infer(migrator: &Migrator, reversible: bool) -> MigrationType {
match migrator.iter().next() {
match migrator.iter().last() {
Some(first_migration) => first_migration.migration_type,
None => {
if reversible {

View File

@@ -23,25 +23,11 @@ pub struct Migrator {
pub locking: bool,
#[doc(hidden)]
pub no_tx: bool,
}
#[doc(hidden)]
pub table_name: Cow<'static, str>,
fn validate_applied_migrations(
applied_migrations: &[AppliedMigration],
migrator: &Migrator,
) -> Result<(), MigrateError> {
if migrator.ignore_missing {
return Ok(());
}
let migrations: HashSet<_> = migrator.iter().map(|m| m.version).collect();
for applied_migration in applied_migrations {
if !migrations.contains(&applied_migration.version) {
return Err(MigrateError::VersionMissing(applied_migration.version));
}
}
Ok(())
#[doc(hidden)]
pub create_schemas: Cow<'static, [Cow<'static, str>]>,
}
impl Migrator {
@@ -51,6 +37,8 @@ impl Migrator {
ignore_missing: false,
no_tx: false,
locking: true,
table_name: Cow::Borrowed("_sqlx_migrations"),
create_schemas: Cow::Borrowed(&[]),
};
/// Creates a new instance with the given source.
@@ -81,6 +69,38 @@ impl Migrator {
})
}
/// Override the name of the table used to track executed migrations.
///
/// May be schema-qualified and/or contain quotes. Defaults to `_sqlx_migrations`.
///
/// Potentially useful for multi-tenant databases.
///
/// ### Warning: Potential Data Loss or Corruption!
/// Changing this option for a production database will likely result in data loss or corruption
/// as the migration machinery will no longer be aware of what migrations have been applied
/// and will attempt to re-run them.
///
/// You should create the new table as a copy of the existing migrations table (with contents!),
/// and be sure all instances of your application have been migrated to the new
/// table before deleting the old one.
pub fn dangerous_set_table_name(&mut self, table_name: impl Into<Cow<'static, str>>) -> &Self {
self.table_name = table_name.into();
self
}
/// Add a schema name to be created if it does not already exist.
///
/// May be used with [`Self::dangerous_set_table_name()`] to place the migrations table
/// in a new schema without requiring it to exist first.
///
/// ### Note: Support Depends on Database
/// SQLite cannot create new schemas without attaching them to a database file,
/// the path of which must be specified separately in an [`ATTACH DATABASE`](https://www.sqlite.org/lang_attach.html) command.
pub fn create_schema(&mut self, schema_name: impl Into<Cow<'static, str>>) -> &Self {
self.create_schemas.to_mut().push(schema_name.into());
self
}
/// Specify whether applied migrations that are missing from the resolved migrations should be ignored.
pub fn set_ignore_missing(&mut self, ignore_missing: bool) -> &Self {
self.ignore_missing = ignore_missing;
@@ -134,12 +154,21 @@ impl Migrator {
<A::Connection as Deref>::Target: Migrate,
{
let mut conn = migrator.acquire().await?;
self.run_direct(&mut *conn).await
self.run_direct(None, &mut *conn).await
}
pub async fn run_to<'a, A>(&self, target: i64, migrator: A) -> Result<(), MigrateError>
where
A: Acquire<'a>,
<A::Connection as Deref>::Target: Migrate,
{
let mut conn = migrator.acquire().await?;
self.run_direct(Some(target), &mut *conn).await
}
// Getting around the annoying "implementation of `Acquire` is not general enough" error
#[doc(hidden)]
pub async fn run_direct<C>(&self, conn: &mut C) -> Result<(), MigrateError>
pub async fn run_direct<C>(&self, target: Option<i64>, conn: &mut C) -> Result<(), MigrateError>
where
C: Migrate,
{
@@ -148,16 +177,20 @@ impl Migrator {
conn.lock().await?;
}
for schema_name in self.create_schemas.iter() {
conn.create_schema_if_not_exists(schema_name).await?;
}
// creates [_migrations] table only if needed
// eventually this will likely migrate previous versions of the table
conn.ensure_migrations_table().await?;
conn.ensure_migrations_table(&self.table_name).await?;
let version = conn.dirty_version().await?;
let version = conn.dirty_version(&self.table_name).await?;
if let Some(version) = version {
return Err(MigrateError::Dirty(version));
}
let applied_migrations = conn.list_applied_migrations().await?;
let applied_migrations = conn.list_applied_migrations(&self.table_name).await?;
validate_applied_migrations(&applied_migrations, self)?;
let applied_migrations: HashMap<_, _> = applied_migrations
@@ -166,6 +199,11 @@ impl Migrator {
.collect();
for migration in self.iter() {
if target.is_some_and(|target| target < migration.version) {
// Target version reached
break;
}
if migration.migration_type.is_down_migration() {
continue;
}
@@ -177,7 +215,7 @@ impl Migrator {
}
}
None => {
conn.apply(migration).await?;
conn.apply(&self.table_name, migration).await?;
}
}
}
@@ -222,14 +260,14 @@ impl Migrator {
// creates [_migrations] table only if needed
// eventually this will likely migrate previous versions of the table
conn.ensure_migrations_table().await?;
conn.ensure_migrations_table(&self.table_name).await?;
let version = conn.dirty_version().await?;
let version = conn.dirty_version(&self.table_name).await?;
if let Some(version) = version {
return Err(MigrateError::Dirty(version));
}
let applied_migrations = conn.list_applied_migrations().await?;
let applied_migrations = conn.list_applied_migrations(&self.table_name).await?;
validate_applied_migrations(&applied_migrations, self)?;
let applied_migrations: HashMap<_, _> = applied_migrations
@@ -244,7 +282,7 @@ impl Migrator {
.filter(|m| applied_migrations.contains_key(&m.version))
.filter(|m| m.version > target)
{
conn.revert(migration).await?;
conn.revert(&self.table_name, migration).await?;
}
// unlock the migrator to allow other migrators to run
@@ -256,3 +294,22 @@ impl Migrator {
Ok(())
}
}
fn validate_applied_migrations(
applied_migrations: &[AppliedMigration],
migrator: &Migrator,
) -> Result<(), MigrateError> {
if migrator.ignore_missing {
return Ok(());
}
let migrations: HashSet<_> = migrator.iter().map(|m| m.version).collect();
for applied_migration in applied_migrations {
if !migrations.contains(&applied_migration.version) {
return Err(MigrateError::VersionMissing(applied_migration.version));
}
}
Ok(())
}

View File

@@ -11,7 +11,7 @@ pub use migrate::{Migrate, MigrateDatabase};
pub use migration::{AppliedMigration, Migration};
pub use migration_type::MigrationType;
pub use migrator::Migrator;
pub use source::MigrationSource;
pub use source::{MigrationSource, ResolveConfig, ResolveWith};
#[doc(hidden)]
pub use source::resolve_blocking;
pub use source::{resolve_blocking, resolve_blocking_with_config};

View File

@@ -1,8 +1,9 @@
use crate::error::BoxDynError;
use crate::migrate::{Migration, MigrationType};
use crate::migrate::{migration, Migration, MigrationType};
use futures_core::future::BoxFuture;
use std::borrow::Cow;
use std::collections::BTreeSet;
use std::fmt::Debug;
use std::fs;
use std::io;
@@ -28,19 +29,48 @@ pub trait MigrationSource<'s>: Debug {
impl<'s> MigrationSource<'s> for &'s Path {
fn resolve(self) -> BoxFuture<'s, Result<Vec<Migration>, BoxDynError>> {
Box::pin(async move {
let canonical = self.canonicalize()?;
let migrations_with_paths =
crate::rt::spawn_blocking(move || resolve_blocking(&canonical)).await?;
Ok(migrations_with_paths.into_iter().map(|(m, _p)| m).collect())
})
// Behavior changed from previous because `canonicalize()` is potentially blocking
// since it might require going to disk to fetch filesystem data.
self.to_owned().resolve()
}
}
impl MigrationSource<'static> for PathBuf {
fn resolve(self) -> BoxFuture<'static, Result<Vec<Migration>, BoxDynError>> {
Box::pin(async move { self.as_path().resolve().await })
// Technically this could just be `Box::pin(spawn_blocking(...))`
// but that would actually be a breaking behavior change because it would call
// `spawn_blocking()` on the current thread
Box::pin(async move {
crate::rt::spawn_blocking(move || {
let migrations_with_paths = resolve_blocking(&self)?;
Ok(migrations_with_paths.into_iter().map(|(m, _p)| m).collect())
})
.await
})
}
}
/// A [`MigrationSource`] implementation with configurable resolution.
///
/// `S` may be `PathBuf`, `&Path` or any type that implements `Into<PathBuf>`.
///
/// See [`ResolveConfig`] for details.
#[derive(Debug)]
pub struct ResolveWith<S>(pub S, pub ResolveConfig);
impl<'s, S: Debug + Into<PathBuf> + Send + 's> MigrationSource<'s> for ResolveWith<S> {
fn resolve(self) -> BoxFuture<'s, Result<Vec<Migration>, BoxDynError>> {
Box::pin(async move {
let path = self.0.into();
let config = self.1;
let migrations_with_paths =
crate::rt::spawn_blocking(move || resolve_blocking_with_config(&path, &config))
.await?;
Ok(migrations_with_paths.into_iter().map(|(m, _p)| m).collect())
})
}
}
@@ -52,11 +82,87 @@ pub struct ResolveError {
source: Option<io::Error>,
}
/// Configuration for migration resolution using [`ResolveWith`].
#[derive(Debug, Default)]
pub struct ResolveConfig {
ignored_chars: BTreeSet<char>,
}
impl ResolveConfig {
/// Return a default, empty configuration.
pub fn new() -> Self {
ResolveConfig {
ignored_chars: BTreeSet::new(),
}
}
/// Ignore a character when hashing migrations.
///
/// The migration SQL string itself will still contain the character,
/// but it will not be included when calculating the checksum.
///
/// This can be used to ignore whitespace characters so changing formatting
/// does not change the checksum.
///
/// Adding the same `char` more than once is a no-op.
///
/// ### Note: Changes Migration Checksum
/// This will change the checksum of resolved migrations,
/// which may cause problems with existing deployments.
///
/// **Use at your own risk.**
pub fn ignore_char(&mut self, c: char) -> &mut Self {
self.ignored_chars.insert(c);
self
}
/// Ignore one or more characters when hashing migrations.
///
/// The migration SQL string itself will still contain these characters,
/// but they will not be included when calculating the checksum.
///
/// This can be used to ignore whitespace characters so changing formatting
/// does not change the checksum.
///
/// Adding the same `char` more than once is a no-op.
///
/// ### Note: Changes Migration Checksum
/// This will change the checksum of resolved migrations,
/// which may cause problems with existing deployments.
///
/// **Use at your own risk.**
pub fn ignore_chars(&mut self, chars: impl IntoIterator<Item = char>) -> &mut Self {
self.ignored_chars.extend(chars);
self
}
/// Iterate over the set of ignored characters.
///
/// Duplicate `char`s are not included.
pub fn ignored_chars(&self) -> impl Iterator<Item = char> + '_ {
self.ignored_chars.iter().copied()
}
}
// FIXME: paths should just be part of `Migration` but we can't add a field backwards compatibly
// since it's `#[non_exhaustive]`.
#[doc(hidden)]
pub fn resolve_blocking(path: &Path) -> Result<Vec<(Migration, PathBuf)>, ResolveError> {
let s = fs::read_dir(path).map_err(|e| ResolveError {
message: format!("error reading migration directory {}: {e}", path.display()),
resolve_blocking_with_config(path, &ResolveConfig::new())
}
#[doc(hidden)]
pub fn resolve_blocking_with_config(
path: &Path,
config: &ResolveConfig,
) -> Result<Vec<(Migration, PathBuf)>, ResolveError> {
let path = path.canonicalize().map_err(|e| ResolveError {
message: format!("error canonicalizing path {}", path.display()),
source: Some(e),
})?;
let s = fs::read_dir(&path).map_err(|e| ResolveError {
message: format!("error reading migration directory {}", path.display()),
source: Some(e),
})?;
@@ -65,7 +171,7 @@ pub fn resolve_blocking(path: &Path) -> Result<Vec<(Migration, PathBuf)>, Resolv
for res in s {
let entry = res.map_err(|e| ResolveError {
message: format!(
"error reading contents of migration directory {}: {e}",
"error reading contents of migration directory {}",
path.display()
),
source: Some(e),
@@ -126,12 +232,15 @@ pub fn resolve_blocking(path: &Path) -> Result<Vec<(Migration, PathBuf)>, Resolv
// opt-out of migration transaction
let no_tx = sql.starts_with("-- no-transaction");
let checksum = checksum_with(&sql, &config.ignored_chars);
migrations.push((
Migration::new(
Migration::with_checksum(
version,
Cow::Owned(description),
migration_type,
Cow::Owned(sql),
checksum.into(),
no_tx,
),
entry_path,
@@ -143,3 +252,47 @@ pub fn resolve_blocking(path: &Path) -> Result<Vec<(Migration, PathBuf)>, Resolv
Ok(migrations)
}
fn checksum_with(sql: &str, ignored_chars: &BTreeSet<char>) -> Vec<u8> {
if ignored_chars.is_empty() {
// This is going to be much faster because it doesn't have to UTF-8 decode `sql`.
return migration::checksum(sql);
}
migration::checksum_fragments(sql.split(|c| ignored_chars.contains(&c)))
}
#[test]
fn checksum_with_ignored_chars() {
// Ensure that `checksum_with` returns the same digest for a given set of ignored chars
// as the equivalent string with the characters removed.
let ignored_chars = [
' ', '\t', '\r', '\n',
// Zero-width non-breaking space (ZWNBSP), often added as a magic-number at the beginning
// of UTF-8 encoded files as a byte-order mark (BOM):
// https://en.wikipedia.org/wiki/Byte_order_mark
'\u{FEFF}',
];
// Copied from `examples/postgres/axum-social-with-tests/migrations/3_comment.sql`
let sql = "\
\u{FEFF}create table comment (\r\n\
\tcomment_id uuid primary key default gen_random_uuid(),\r\n\
\tpost_id uuid not null references post(post_id),\r\n\
\tuser_id uuid not null references \"user\"(user_id),\r\n\
\tcontent text not null,\r\n\
\tcreated_at timestamptz not null default now()\r\n\
);\r\n\
\r\n\
create index on comment(post_id, created_at);\r\n\
";
let stripped_sql = sql.replace(&ignored_chars[..], "");
let ignored_chars = BTreeSet::from(ignored_chars);
let digest_ignored = checksum_with(sql, &ignored_chars);
let digest_stripped = migration::checksum(&stripped_sql);
assert_eq!(digest_ignored, digest_stripped);
}

View File

@@ -256,7 +256,7 @@ async fn setup_test_db<DB: Database>(
if let Some(migrator) = args.migrator {
migrator
.run_direct(&mut conn)
.run_direct(None, &mut conn)
.await
.expect("failed to apply migrations");
}

View File

@@ -1,3 +1,4 @@
use crate::config::macros::PreferredCrates;
use crate::database::Database;
use crate::decode::Decode;
use crate::type_info::TypeInfo;
@@ -26,12 +27,18 @@ pub trait TypeChecking: Database {
///
/// If the type has a borrowed equivalent suitable for query parameters,
/// this is that borrowed type.
fn param_type_for_id(id: &Self::TypeInfo) -> Option<&'static str>;
fn param_type_for_id(
id: &Self::TypeInfo,
preferred_crates: &PreferredCrates,
) -> Result<&'static str, Error>;
/// Get the full path of the Rust type that corresponds to the given `TypeInfo`, if applicable.
///
/// Always returns the owned version of the type, suitable for decoding from `Row`.
fn return_type_for_id(id: &Self::TypeInfo) -> Option<&'static str>;
fn return_type_for_id(
id: &Self::TypeInfo,
preferred_crates: &PreferredCrates,
) -> Result<&'static str, Error>;
/// Get the name of the Cargo feature gate that must be enabled to process the given `TypeInfo`,
/// if applicable.
@@ -43,6 +50,22 @@ pub trait TypeChecking: Database {
fn fmt_value_debug(value: &<Self as Database>::Value) -> FmtValue<'_, Self>;
}
pub type Result<T, E = Error> = std::result::Result<T, E>;
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("no built-in mapping found for SQL type; a type override may be required")]
NoMappingFound,
#[error("Cargo feature for configured `macros.preferred-crates.date-time` not enabled")]
DateTimeCrateFeatureNotEnabled,
#[error("Cargo feature for configured `macros.preferred-crates.numeric` not enabled")]
NumericCrateFeatureNotEnabled,
#[error("multiple date-time types are possible; falling back to `{fallback}`")]
AmbiguousDateTimeType { fallback: &'static str },
#[error("multiple numeric types are possible; falling back to `{fallback}`")]
AmbiguousNumericType { fallback: &'static str },
}
/// An adapter for [`Value`] which attempts to decode the value and format it when printed using [`Debug`].
pub struct FmtValue<'v, DB>
where
@@ -140,36 +163,304 @@ macro_rules! impl_type_checking {
},
ParamChecking::$param_checking:ident,
feature-types: $ty_info:ident => $get_gate:expr,
datetime-types: {
chrono: {
$($chrono_ty:ty $(| $chrono_input:ty)?),*$(,)?
},
time: {
$($time_ty:ty $(| $time_input:ty)?),*$(,)?
},
},
numeric-types: {
bigdecimal: {
$($bigdecimal_ty:ty $(| $bigdecimal_input:ty)?),*$(,)?
},
rust_decimal: {
$($rust_decimal_ty:ty $(| $rust_decimal_input:ty)?),*$(,)?
},
},
) => {
impl $crate::type_checking::TypeChecking for $database {
const PARAM_CHECKING: $crate::type_checking::ParamChecking = $crate::type_checking::ParamChecking::$param_checking;
fn param_type_for_id(info: &Self::TypeInfo) -> Option<&'static str> {
match () {
fn param_type_for_id(
info: &Self::TypeInfo,
preferred_crates: &$crate::config::macros::PreferredCrates,
) -> Result<&'static str, $crate::type_checking::Error> {
use $crate::config::macros::{DateTimeCrate, NumericCrate};
use $crate::type_checking::Error;
// Check non-special types
// ---------------------
$(
$(#[$meta])?
if <$ty as sqlx_core::types::Type<$database>>::type_info() == *info {
return Ok($crate::select_input_type!($ty $(, $input)?));
}
)*
$(
$(#[$meta])?
if <$ty as sqlx_core::types::Type<$database>>::compatible(info) {
return Ok($crate::select_input_type!($ty $(, $input)?));
}
)*
// Check `macros.preferred-crates.date-time`
//
// Due to legacy reasons, `time` takes precedent over `chrono` if both are enabled.
// Any crates added later should be _lower_ priority than `chrono` to avoid breakages.
// ----------------------------------------
#[cfg(feature = "time")]
if matches!(preferred_crates.date_time, DateTimeCrate::Time | DateTimeCrate::Inferred) {
$(
$(#[$meta])?
_ if <$ty as sqlx_core::types::Type<$database>>::type_info() == *info => Some($crate::select_input_type!($ty $(, $input)?)),
if <$time_ty as sqlx_core::types::Type<$database>>::type_info() == *info {
if cfg!(feature = "chrono") {
return Err($crate::type_checking::Error::AmbiguousDateTimeType {
fallback: $crate::select_input_type!($time_ty $(, $time_input)?),
});
}
return Ok($crate::select_input_type!($time_ty $(, $time_input)?));
}
)*
$(
$(#[$meta])?
_ if <$ty as sqlx_core::types::Type<$database>>::compatible(info) => Some($crate::select_input_type!($ty $(, $input)?)),
if <$time_ty as sqlx_core::types::Type<$database>>::compatible(info) {
if cfg!(feature = "chrono") {
return Err($crate::type_checking::Error::AmbiguousDateTimeType {
fallback: $crate::select_input_type!($time_ty $(, $time_input)?),
});
}
return Ok($crate::select_input_type!($time_ty $(, $time_input)?));
}
)*
_ => None
}
#[cfg(not(feature = "time"))]
if preferred_crates.date_time == DateTimeCrate::Time {
return Err(Error::DateTimeCrateFeatureNotEnabled);
}
#[cfg(feature = "chrono")]
if matches!(preferred_crates.date_time, DateTimeCrate::Chrono | DateTimeCrate::Inferred) {
$(
if <$chrono_ty as sqlx_core::types::Type<$database>>::type_info() == *info {
return Ok($crate::select_input_type!($chrono_ty $(, $chrono_input)?));
}
)*
$(
if <$chrono_ty as sqlx_core::types::Type<$database>>::compatible(info) {
return Ok($crate::select_input_type!($chrono_ty $(, $chrono_input)?));
}
)*
}
#[cfg(not(feature = "chrono"))]
if preferred_crates.date_time == DateTimeCrate::Chrono {
return Err(Error::DateTimeCrateFeatureNotEnabled);
}
// Check `macros.preferred-crates.numeric`
//
// Due to legacy reasons, `bigdecimal` takes precedent over `rust_decimal` if
// both are enabled.
// ----------------------------------------
#[cfg(feature = "bigdecimal")]
if matches!(preferred_crates.numeric, NumericCrate::BigDecimal | NumericCrate::Inferred) {
$(
if <$bigdecimal_ty as sqlx_core::types::Type<$database>>::type_info() == *info {
if cfg!(feature = "rust_decimal") {
return Err($crate::type_checking::Error::AmbiguousNumericType {
fallback: $crate::select_input_type!($bigdecimal_ty $(, $bigdecimal_input)?),
});
}
return Ok($crate::select_input_type!($bigdecimal_ty $(, $bigdecimal_input)?));
}
)*
$(
if <$bigdecimal_ty as sqlx_core::types::Type<$database>>::compatible(info) {
if cfg!(feature = "rust_decimal") {
return Err($crate::type_checking::Error::AmbiguousNumericType {
fallback: $crate::select_input_type!($bigdecimal_ty $(, $bigdecimal_input)?),
});
}
return Ok($crate::select_input_type!($bigdecimal_ty $(, $bigdecimal_input)?));
}
)*
}
#[cfg(not(feature = "bigdecimal"))]
if preferred_crates.numeric == NumericCrate::BigDecimal {
return Err(Error::NumericCrateFeatureNotEnabled);
}
#[cfg(feature = "rust_decimal")]
if matches!(preferred_crates.numeric, NumericCrate::RustDecimal | NumericCrate::Inferred) {
$(
if <$rust_decimal_ty as sqlx_core::types::Type<$database>>::type_info() == *info {
return Ok($crate::select_input_type!($rust_decimal_ty $(, $rust_decimal_input)?));
}
)*
$(
if <$rust_decimal_ty as sqlx_core::types::Type<$database>>::compatible(info) {
return Ok($crate::select_input_type!($rust_decimal_ty $(, $rust_decimal_input)?));
}
)*
}
#[cfg(not(feature = "rust_decimal"))]
if preferred_crates.numeric == NumericCrate::RustDecimal {
return Err(Error::NumericCrateFeatureNotEnabled);
}
Err(Error::NoMappingFound)
}
fn return_type_for_id(info: &Self::TypeInfo) -> Option<&'static str> {
match () {
fn return_type_for_id(
info: &Self::TypeInfo,
preferred_crates: &$crate::config::macros::PreferredCrates,
) -> Result<&'static str, $crate::type_checking::Error> {
use $crate::config::macros::{DateTimeCrate, NumericCrate};
use $crate::type_checking::Error;
// Check non-special types
// ---------------------
$(
$(#[$meta])?
if <$ty as sqlx_core::types::Type<$database>>::type_info() == *info {
return Ok(stringify!($ty));
}
)*
$(
$(#[$meta])?
if <$ty as sqlx_core::types::Type<$database>>::compatible(info) {
return Ok(stringify!($ty));
}
)*
// Check `macros.preferred-crates.date-time`
//
// Due to legacy reasons, `time` takes precedent over `chrono` if both are enabled.
// Any crates added later should be _lower_ priority than `chrono` to avoid breakages.
// ----------------------------------------
#[cfg(feature = "time")]
if matches!(preferred_crates.date_time, DateTimeCrate::Time | DateTimeCrate::Inferred) {
$(
$(#[$meta])?
_ if <$ty as sqlx_core::types::Type<$database>>::type_info() == *info => Some(stringify!($ty)),
if <$time_ty as sqlx_core::types::Type<$database>>::type_info() == *info {
if cfg!(feature = "chrono") {
return Err($crate::type_checking::Error::AmbiguousDateTimeType {
fallback: stringify!($time_ty),
});
}
return Ok(stringify!($time_ty));
}
)*
$(
$(#[$meta])?
_ if <$ty as sqlx_core::types::Type<$database>>::compatible(info) => Some(stringify!($ty)),
if <$time_ty as sqlx_core::types::Type<$database>>::compatible(info) {
if cfg!(feature = "chrono") {
return Err($crate::type_checking::Error::AmbiguousDateTimeType {
fallback: stringify!($time_ty),
});
}
return Ok(stringify!($time_ty));
}
)*
_ => None
}
#[cfg(not(feature = "time"))]
if preferred_crates.date_time == DateTimeCrate::Time {
return Err(Error::DateTimeCrateFeatureNotEnabled);
}
#[cfg(feature = "chrono")]
if matches!(preferred_crates.date_time, DateTimeCrate::Chrono | DateTimeCrate::Inferred) {
$(
if <$chrono_ty as sqlx_core::types::Type<$database>>::type_info() == *info {
return Ok(stringify!($chrono_ty));
}
)*
$(
if <$chrono_ty as sqlx_core::types::Type<$database>>::compatible(info) {
return Ok(stringify!($chrono_ty));
}
)*
}
#[cfg(not(feature = "chrono"))]
if preferred_crates.date_time == DateTimeCrate::Chrono {
return Err(Error::DateTimeCrateFeatureNotEnabled);
}
// Check `macros.preferred-crates.numeric`
//
// Due to legacy reasons, `bigdecimal` takes precedent over `rust_decimal` if
// both are enabled.
// ----------------------------------------
#[cfg(feature = "bigdecimal")]
if matches!(preferred_crates.numeric, NumericCrate::BigDecimal | NumericCrate::Inferred) {
$(
if <$bigdecimal_ty as sqlx_core::types::Type<$database>>::type_info() == *info {
if cfg!(feature = "rust_decimal") {
return Err($crate::type_checking::Error::AmbiguousNumericType {
fallback: stringify!($bigdecimal_ty),
});
}
return Ok(stringify!($bigdecimal_ty));
}
)*
$(
if <$bigdecimal_ty as sqlx_core::types::Type<$database>>::compatible(info) {
if cfg!(feature = "rust_decimal") {
return Err($crate::type_checking::Error::AmbiguousNumericType {
fallback: stringify!($bigdecimal_ty),
});
}
return Ok(stringify!($bigdecimal_ty));
}
)*
}
#[cfg(not(feature = "bigdecimal"))]
if preferred_crates.numeric == NumericCrate::BigDecimal {
return Err(Error::NumericCrateFeatureNotEnabled);
}
#[cfg(feature = "rust_decimal")]
if matches!(preferred_crates.numeric, NumericCrate::RustDecimal | NumericCrate::Inferred) {
$(
if <$rust_decimal_ty as sqlx_core::types::Type<$database>>::type_info() == *info {
return Ok($crate::select_input_type!($rust_decimal_ty $(, $rust_decimal_input)?));
}
)*
$(
if <$rust_decimal_ty as sqlx_core::types::Type<$database>>::compatible(info) {
return Ok($crate::select_input_type!($rust_decimal_ty $(, $rust_decimal_input)?));
}
)*
}
#[cfg(not(feature = "rust_decimal"))]
if preferred_crates.numeric == NumericCrate::RustDecimal {
return Err(Error::NumericCrateFeatureNotEnabled);
}
Err(Error::NoMappingFound)
}
fn get_feature_gate($ty_info: &Self::TypeInfo) -> Option<&'static str> {
@@ -181,13 +472,50 @@ macro_rules! impl_type_checking {
let info = value.type_info();
match () {
#[cfg(feature = "time")]
{
$(
$(#[$meta])?
_ if <$ty as sqlx_core::types::Type<$database>>::compatible(&info) => $crate::type_checking::FmtValue::debug::<$ty>(value),
if <$time_ty as sqlx_core::types::Type<$database>>::compatible(&info) {
return $crate::type_checking::FmtValue::debug::<$time_ty>(value);
}
)*
_ => $crate::type_checking::FmtValue::unknown(value),
}
#[cfg(feature = "chrono")]
{
$(
if <$chrono_ty as sqlx_core::types::Type<$database>>::compatible(&info) {
return $crate::type_checking::FmtValue::debug::<$chrono_ty>(value);
}
)*
}
#[cfg(feature = "bigdecimal")]
{
$(
if <$bigdecimal_ty as sqlx_core::types::Type<$database>>::compatible(&info) {
return $crate::type_checking::FmtValue::debug::<$bigdecimal_ty>(value);
}
)*
}
#[cfg(feature = "rust_decimal")]
{
$(
if <$rust_decimal_ty as sqlx_core::types::Type<$database>>::compatible(&info) {
return $crate::type_checking::FmtValue::debug::<$rust_decimal_ty>(value);
}
)*
}
$(
$(#[$meta])?
if <$ty as sqlx_core::types::Type<$database>>::compatible(&info) {
return $crate::type_checking::FmtValue::debug::<$ty>(value);
}
)*
$crate::type_checking::FmtValue::unknown(value)
}
}
};