Merge remote-tracking branch 'utter-step/time-rs'

This commit is contained in:
Ryan Leckey 2020-03-21 01:24:48 -07:00
commit 4a98a51a19
18 changed files with 1209 additions and 22 deletions

View File

@ -39,33 +39,33 @@ jobs:
# check w/deny warnings in sqlx-core: async-std
- working-directory: sqlx-core
run: cargo rustc --no-default-features --features 'bigdecimal_bigint ipnetwork chrono uuid postgres mysql tls runtime-async-std' -- -D warnings --emit=metadata
run: cargo rustc --no-default-features --features 'bigdecimal_bigint ipnetwork chrono time uuid postgres mysql tls runtime-async-std' -- -D warnings --emit=metadata
# check w/deny warnings in sqlx-core: tokio
# `cargo rustc -p sqlx-core` ignores `--no-default-features` and builds with `runtime-async-std` anyway
# https://github.com/rust-lang/cargo/issues/5364
- working-directory: sqlx-core
run: cargo rustc --no-default-features --features 'bigdecimal_bigint ipnetwork chrono uuid postgres mysql tls runtime-tokio' -- -D warnings --emit=metadata
run: cargo rustc --no-default-features --features 'bigdecimal_bigint ipnetwork chrono time uuid postgres mysql tls runtime-tokio' -- -D warnings --emit=metadata
# check w/deny warnings: async-std
- run: cargo rustc --no-default-features --features 'bigdecimal ipnetwork chrono uuid postgres mysql macros tls runtime-async-std' -- -D warnings --emit=metadata
- run: cargo rustc --no-default-features --features 'bigdecimal ipnetwork chrono time uuid postgres mysql macros tls runtime-async-std' -- -D warnings --emit=metadata
# check w/deny warnings: tokio
- run: cargo rustc --no-default-features --features 'bigdecimal ipnetwork chrono uuid postgres mysql macros tls runtime-tokio' -- -D warnings --emit=metadata
- run: cargo rustc --no-default-features --features 'bigdecimal ipnetwork chrono time uuid postgres mysql macros tls runtime-tokio' -- -D warnings --emit=metadata
# unit test: async-std
- run: cargo test --manifest-path sqlx-core/Cargo.toml --no-default-features --features 'bigdecimal ipnetwork chrono uuid postgres mysql tls runtime-async-std'
- run: cargo test --manifest-path sqlx-core/Cargo.toml --no-default-features --features 'bigdecimal ipnetwork chrono time uuid postgres mysql tls runtime-async-std'
# unit test: tokio
- run: cargo test --manifest-path sqlx-core/Cargo.toml --no-default-features --features 'bigdecimal ipnetwork chrono uuid postgres mysql tls runtime-tokio'
- run: cargo test --manifest-path sqlx-core/Cargo.toml --no-default-features --features 'bigdecimal ipnetwork chrono time uuid postgres mysql tls runtime-tokio'
# integration test: sqlite + async-std
- run: cargo test --no-default-features --features 'runtime-async-std sqlite macros uuid chrono tls'
- run: cargo test --no-default-features --features 'runtime-async-std sqlite macros tls'
env:
DATABASE_URL: "sqlite://tests/fixtures/sqlite.sqlite"
# integration test: sqlite + tokio
- run: cargo test --no-default-features --features 'runtime-tokio sqlite macros uuid chrono tls'
- run: cargo test --no-default-features --features 'runtime-tokio sqlite macros tls'
env:
DATABASE_URL: "sqlite://tests/fixtures/sqlite.sqlite"
@ -120,16 +120,36 @@ jobs:
# -----------------------------------------------------
# integration test: async-std
# integration test: async-std (chrono)
- run: cargo test --no-default-features --features 'runtime-async-std postgres macros uuid chrono bigdecimal ipnetwork tls'
env:
DATABASE_URL: postgres://postgres:postgres@localhost:${{ job.services.postgres.ports[5432] }}/postgres
# integration test: tokio
# integration test: async-std (time)
- run: cargo test --no-default-features --features 'runtime-async-std postgres macros uuid time bigdecimal ipnetwork tls'
env:
DATABASE_URL: postgres://postgres:postgres@localhost:${{ job.services.postgres.ports[5432] }}/postgres
# integration test: async-std (time + chrono)
- run: cargo test --no-default-features --features 'runtime-async-std postgres macros uuid chrono time bigdecimal ipnetwork tls'
env:
DATABASE_URL: postgres://postgres:postgres@localhost:${{ job.services.postgres.ports[5432] }}/postgres
# integration test: tokio (chrono)
- run: cargo test --no-default-features --features 'runtime-tokio postgres macros uuid chrono bigdecimal ipnetwork tls'
env:
DATABASE_URL: postgres://postgres:postgres@localhost:${{ job.services.postgres.ports[5432] }}/postgres
# integration test: tokio (time)
- run: cargo test --no-default-features --features 'runtime-tokio postgres macros uuid time bigdecimal ipnetwork tls'
env:
DATABASE_URL: postgres://postgres:postgres@localhost:${{ job.services.postgres.ports[5432] }}/postgres
# integration test: tokio (time + chrono)
- run: cargo test --no-default-features --features 'runtime-tokio postgres macros uuid chrono time bigdecimal ipnetwork tls'
env:
DATABASE_URL: postgres://postgres:postgres@localhost:${{ job.services.postgres.ports[5432] }}/postgres
# UI feature gate tests: async-std
- run: cargo test --no-default-features --features 'runtime-async-std postgres macros bigdecimal ipnetwork tls'
env:
@ -182,20 +202,48 @@ jobs:
# -----------------------------------------------------
# integration test: async-std
# integration test: async-std (chrono)
- run: cargo test --no-default-features --features 'runtime-async-std mysql macros uuid chrono tls'
env:
# pass the path to the CA that the MySQL service generated
# NOTE: Github Actions' YML parser doesn't handle multiline strings correctly
DATABASE_URL: mysql://root:password@localhost:${{ job.services.mysql.ports[3306] }}/sqlx?ssl-mode=VERIFY_CA&ssl-ca=%2Fdata%2Fmysql%2Fca.pem
# integration test: tokio
# integration test: async-std (time)
- run: cargo test --no-default-features --features 'runtime-async-std mysql macros uuid time tls'
env:
# pass the path to the CA that the MySQL service generated
# NOTE: Github Actions' YML parser doesn't handle multiline strings correctly
DATABASE_URL: mysql://root:password@localhost:${{ job.services.mysql.ports[3306] }}/sqlx?ssl-mode=VERIFY_CA&ssl-ca=%2Fdata%2Fmysql%2Fca.pem
# integration test: async-std (time + chrono)
- run: cargo test --no-default-features --features 'runtime-async-std mysql macros uuid time chrono tls'
env:
# pass the path to the CA that the MySQL service generated
# NOTE: Github Actions' YML parser doesn't handle multiline strings correctly
DATABASE_URL: mysql://root:password@localhost:${{ job.services.mysql.ports[3306] }}/sqlx?ssl-mode=VERIFY_CA&ssl-ca=%2Fdata%2Fmysql%2Fca.pem
# integration test: tokio (chrono)
- run: cargo test --no-default-features --features 'runtime-tokio mysql macros uuid chrono tls'
env:
# pass the path to the CA that the MySQL service generated
# NOTE: Github Actions' YML parser doesn't handle multiline strings correctly
DATABASE_URL: mysql://root:password@localhost:${{ job.services.mysql.ports[3306] }}/sqlx?ssl-mode=VERIFY_CA&ssl-ca=%2Fdata%2Fmysql%2Fca.pem
# integration test: tokio (time)
- run: cargo test --no-default-features --features 'runtime-tokio mysql macros uuid time tls'
env:
# pass the path to the CA that the MySQL service generated
# NOTE: Github Actions' YML parser doesn't handle multiline strings correctly
DATABASE_URL: mysql://root:password@localhost:${{ job.services.mysql.ports[3306] }}/sqlx?ssl-mode=VERIFY_CA&ssl-ca=%2Fdata%2Fmysql%2Fca.pem
# integration test: tokio (time + chrono)
- run: cargo test --no-default-features --features 'runtime-tokio mysql macros uuid chrono time tls'
env:
# pass the path to the CA that the MySQL service generated
# NOTE: Github Actions' YML parser doesn't handle multiline strings correctly
DATABASE_URL: mysql://root:password@localhost:${{ job.services.mysql.ports[3306] }}/sqlx?ssl-mode=VERIFY_CA&ssl-ca=%2Fdata%2Fmysql%2Fca.pem
# UI feature gate tests: async-std
- run: cargo test --no-default-features --features 'runtime-async-std mysql macros tls' --test ui-tests
env:
@ -251,16 +299,36 @@ jobs:
# -----------------------------------------------------
# integration test: async-std
# integration test: async-std (chrono)
- run: cargo test --no-default-features --features 'runtime-async-std mysql macros chrono uuid chrono tls'
env:
DATABASE_URL: mariadb://root:password@localhost:${{ job.services.mariadb.ports[3306] }}/sqlx
# integration test: tokio
# integration test: async-std (time)
- run: cargo test --no-default-features --features 'runtime-async-std mysql macros time uuid chrono tls'
env:
DATABASE_URL: mariadb://root:password@localhost:${{ job.services.mariadb.ports[3306] }}/sqlx
# integration test: async-std (time + chrono)
- run: cargo test --no-default-features --features 'runtime-async-std mysql macros time chrono uuid chrono tls'
env:
DATABASE_URL: mariadb://root:password@localhost:${{ job.services.mariadb.ports[3306] }}/sqlx
# integration test: tokio (chrono)
- run: cargo test --no-default-features --features 'runtime-tokio mysql macros uuid chrono tls'
env:
DATABASE_URL: mariadb://root:password@localhost:${{ job.services.mariadb.ports[3306] }}/sqlx
# integration test: tokio (time)
- run: cargo test --no-default-features --features 'runtime-tokio mysql macros uuid time tls'
env:
DATABASE_URL: mariadb://root:password@localhost:${{ job.services.mariadb.ports[3306] }}/sqlx
# integration test: tokio (time + chrono)
- run: cargo test --no-default-features --features 'runtime-tokio mysql macros uuid time chrono tls'
env:
DATABASE_URL: mariadb://root:password@localhost:${{ job.services.mariadb.ports[3306] }}/sqlx
# UI feature gate tests: async-std
- run: cargo test --no-default-features --features 'runtime-async-std mysql macros tls'
env:

56
Cargo.lock generated
View File

@ -295,7 +295,7 @@ checksum = "80094f509cf8b5ae86a4966a39b3ff66cd7e2a3e594accec3743ff3fabeab5b2"
dependencies = [
"num-integer",
"num-traits",
"time",
"time 0.1.42",
]
[[package]]
@ -788,7 +788,7 @@ dependencies = [
"log",
"net2",
"rustc_version",
"time",
"time 0.1.42",
"tokio 0.1.22",
"tokio-buf",
"tokio-executor",
@ -1461,6 +1461,17 @@ dependencies = [
"semver",
]
[[package]]
name = "rustversion"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3bba175698996010c4f6dce5e7f173b6eb781fce25d2cfc45e27091ce0b79f6"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "ryu"
version = "1.0.3"
@ -1728,6 +1739,7 @@ dependencies = [
"serde_json",
"sha-1",
"sha2",
"time 0.2.9",
"tokio 0.2.13",
"url",
"uuid",
@ -1803,6 +1815,12 @@ dependencies = [
"tokio 0.2.13",
]
[[package]]
name = "standback"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4edf667ea8f60afc06d6aeec079d20d5800351109addec1faea678a8663da4e1"
[[package]]
name = "string"
version = "0.2.1"
@ -1962,6 +1980,40 @@ dependencies = [
"winapi 0.3.8",
]
[[package]]
name = "time"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6329a7835505d46f5f3a9a2c237f8d6bf5ca6f0015decb3698ba57fcdbb609ba"
dependencies = [
"cfg-if",
"rustversion",
"standback",
"time-macros",
]
[[package]]
name = "time-macros"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ae9b6e9f095bc105e183e3cd493d72579be3181ad4004fceb01adbe9eecab2d"
dependencies = [
"proc-macro-hack",
"time-macros-impl",
]
[[package]]
name = "time-macros-impl"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e987cfe0537f575b5fc99909de6185f6c19c3ad8889e2275e686a873d0869ba1"
dependencies = [
"proc-macro-hack",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "todos-postgres"
version = "0.1.0"

View File

@ -28,7 +28,7 @@ authors = [
]
[package.metadata.docs.rs]
features = [ "tls", "postgres", "mysql", "uuid", "chrono", "json" ]
features = [ "tls", "postgres", "mysql", "uuid", "chrono", "json", "time" ]
rustdoc-args = ["--cfg", "docsrs"]
[features]
@ -51,6 +51,7 @@ chrono = [ "sqlx-core/chrono", "sqlx-macros/chrono" ]
ipnetwork = [ "sqlx-core/ipnetwork", "sqlx-macros/ipnetwork" ]
uuid = [ "sqlx-core/uuid", "sqlx-macros/uuid" ]
json = [ "sqlx-core/json" ]
time = [ "sqlx-core/time", "sqlx-macros/time" ]
[dependencies]
sqlx-core = { version = "0.3.0-alpha.1", path = "sqlx-core", default-features = false }

View File

@ -84,6 +84,8 @@ sqlx = { version = "0.2", default-features = false, features = [ "runtime-tokio"
* `chrono`: Add support for date and time types from `chrono`.
* `time`: Add support for date and time types from `time` crate (alternative to `chrono`, prefered by `query!` macro, if both enabled)
* `tls`: Add support for TLS connections.
## Examples

View File

@ -58,6 +58,7 @@ tokio = { version = "0.2.13", default-features = false, features = [ "dns", "fs"
url = { version = "2.1.1", default-features = false }
uuid = { version = "0.8.1", default-features = false, optional = true, features = [ "std" ] }
serde = { version = "1.0", features = [ "derive" ], optional = true }
time = { version = "0.2.7", default-features = false, optional = true }
serde_json = { version = "1.0", features = [ "raw_value" ], optional = true }
# <https://github.com/jgallagher/rusqlite/tree/master/libsqlite3-sys>

View File

@ -8,6 +8,9 @@ mod uint;
#[cfg(feature = "chrono")]
mod chrono;
#[cfg(feature = "time")]
mod time;
use std::fmt::{self, Debug, Display};
use crate::decode::Decode;

View File

@ -0,0 +1,259 @@
use std::convert::TryFrom;
use byteorder::{ByteOrder, LittleEndian};
use time::{Date, OffsetDateTime, PrimitiveDateTime, Time, UtcOffset};
use crate::decode::{Decode, DecodeError};
use crate::encode::Encode;
use crate::io::{Buf, BufMut};
use crate::mysql::protocol::TypeId;
use crate::mysql::types::MySqlTypeInfo;
use crate::mysql::MySql;
use crate::types::HasSqlType;
impl HasSqlType<OffsetDateTime> for MySql {
fn type_info() -> MySqlTypeInfo {
MySqlTypeInfo::new(TypeId::TIMESTAMP)
}
}
impl Encode<MySql> for OffsetDateTime {
fn encode(&self, buf: &mut Vec<u8>) {
let utc_dt = self.to_offset(UtcOffset::UTC);
let primitive_dt = PrimitiveDateTime::new(utc_dt.date(), utc_dt.time());
Encode::<MySql>::encode(&primitive_dt, buf);
}
}
impl Decode<MySql> for OffsetDateTime {
fn decode(buf: &[u8]) -> Result<Self, DecodeError> {
let primitive: PrimitiveDateTime = Decode::<MySql>::decode(buf)?;
Ok(primitive.assume_utc())
}
}
impl HasSqlType<Time> for MySql {
fn type_info() -> MySqlTypeInfo {
MySqlTypeInfo::new(TypeId::TIME)
}
}
impl Encode<MySql> for Time {
fn encode(&self, buf: &mut Vec<u8>) {
let len = Encode::<MySql>::size_hint(self) - 1;
buf.push(len as u8);
// Time is not negative
buf.push(0);
// "date on 4 bytes little-endian format" (?)
// https://mariadb.com/kb/en/resultset-row/#teimstamp-binary-encoding
buf.advance(4);
encode_time(self, len > 9, buf);
}
fn size_hint(&self) -> usize {
if self.nanosecond() == 0 {
// if micro_seconds is 0, length is 8 and micro_seconds is not sent
9
} else {
// otherwise length is 12
13
}
}
}
impl Decode<MySql> for Time {
fn decode(mut buf: &[u8]) -> Result<Self, DecodeError> {
// data length, expecting 8 or 12 (fractional seconds)
let len = buf.get_u8()?;
// is negative : int<1>
let is_negative = buf.get_u8()?;
assert_eq!(is_negative, 0, "Negative dates/times are not supported");
// "date on 4 bytes little-endian format" (?)
// https://mariadb.com/kb/en/resultset-row/#timestamp-binary-encoding
buf.advance(4);
decode_time(len - 5, buf)
}
}
impl HasSqlType<Date> for MySql {
fn type_info() -> MySqlTypeInfo {
MySqlTypeInfo::new(TypeId::DATE)
}
}
impl Encode<MySql> for Date {
fn encode(&self, buf: &mut Vec<u8>) {
buf.push(4);
encode_date(self, buf);
}
fn size_hint(&self) -> usize {
5
}
}
impl Decode<MySql> for Date {
fn decode(buf: &[u8]) -> Result<Self, DecodeError> {
decode_date(&buf[1..])
}
}
impl HasSqlType<PrimitiveDateTime> for MySql {
fn type_info() -> MySqlTypeInfo {
MySqlTypeInfo::new(TypeId::DATETIME)
}
}
impl Encode<MySql> for PrimitiveDateTime {
fn encode(&self, buf: &mut Vec<u8>) {
let len = Encode::<MySql>::size_hint(self) - 1;
buf.push(len as u8);
encode_date(&self.date(), buf);
if len > 4 {
encode_time(&self.time(), len > 8, buf);
}
}
fn size_hint(&self) -> usize {
// to save space the packet can be compressed:
match (self.hour(), self.minute(), self.second(), self.nanosecond()) {
// if hour, minutes, seconds and micro_seconds are all 0,
// length is 4 and no other field is sent
(0, 0, 0, 0) => 5,
// if micro_seconds is 0, length is 7
// and micro_seconds is not sent
(_, _, _, 0) => 8,
// otherwise length is 11
(_, _, _, _) => 12,
}
}
}
impl Decode<MySql> for PrimitiveDateTime {
fn decode(buf: &[u8]) -> Result<Self, DecodeError> {
let len = buf[0];
let date = decode_date(&buf[1..])?;
let dt = if len > 4 {
date.with_time(decode_time(len - 4, &buf[5..])?)
} else {
date.midnight()
};
Ok(dt)
}
}
fn encode_date(date: &Date, buf: &mut Vec<u8>) {
// MySQL supports years from 1000 - 9999
let year = u16::try_from(date.year())
.unwrap_or_else(|_| panic!("Date out of range for Mysql: {}", date));
buf.extend_from_slice(&year.to_le_bytes());
buf.push(date.month());
buf.push(date.day());
}
fn decode_date(buf: &[u8]) -> Result<Date, DecodeError> {
Date::try_from_ymd(
LittleEndian::read_u16(buf) as i32,
buf[2] as u8,
buf[3] as u8,
)
.map_err(|e| DecodeError::Message(Box::new(format!("Error while decoding Date: {}", e))))
}
fn encode_time(time: &Time, include_micros: bool, buf: &mut Vec<u8>) {
buf.push(time.hour());
buf.push(time.minute());
buf.push(time.second());
if include_micros {
buf.put_u32::<LittleEndian>((time.nanosecond() / 1000) as u32);
}
}
fn decode_time(len: u8, mut buf: &[u8]) -> Result<Time, DecodeError> {
let hour = buf.get_u8()?;
let minute = buf.get_u8()?;
let seconds = buf.get_u8()?;
let micros = if len > 3 {
// microseconds : int<EOF>
buf.get_uint::<LittleEndian>(buf.len())?
} else {
0
};
Time::try_from_hms_micro(hour, minute, seconds, micros as u32)
.map_err(|e| DecodeError::Message(Box::new(format!("Time out of range for MySQL: {}", e))))
}
#[cfg(test)]
use time::{date, time};
#[test]
fn test_encode_date_time() {
let mut buf = Vec::new();
// test values from https://dev.mysql.com/doc/internals/en/binary-protocol-value.html
let date = PrimitiveDateTime::new(date!(2010 - 10 - 17), time!(19:27:30.000001));
Encode::<MySql>::encode(&date, &mut buf);
assert_eq!(*buf, [11, 218, 7, 10, 17, 19, 27, 30, 1, 0, 0, 0]);
buf.clear();
let date = PrimitiveDateTime::new(date!(2010 - 10 - 17), time!(19:27:30));
Encode::<MySql>::encode(&date, &mut buf);
assert_eq!(*buf, [7, 218, 7, 10, 17, 19, 27, 30]);
buf.clear();
let date = PrimitiveDateTime::new(date!(2010 - 10 - 17), time!(00:00:00));
Encode::<MySql>::encode(&date, &mut buf);
assert_eq!(*buf, [4, 218, 7, 10, 17]);
}
#[test]
fn test_decode_date_time() {
// test values from https://dev.mysql.com/doc/internals/en/binary-protocol-value.html
let buf = [11, 218, 7, 10, 17, 19, 27, 30, 1, 0, 0, 0];
let date1 = <PrimitiveDateTime as Decode<MySql>>::decode(&buf).unwrap();
assert_eq!(date1.to_string(), "2010-10-17 19:27:30.000001");
let buf = [7, 218, 7, 10, 17, 19, 27, 30];
let date2 = <PrimitiveDateTime as Decode<MySql>>::decode(&buf).unwrap();
assert_eq!(date2.to_string(), "2010-10-17 19:27:30");
let buf = [4, 218, 7, 10, 17];
let date3 = <PrimitiveDateTime as Decode<MySql>>::decode(&buf).unwrap();
assert_eq!(date3.to_string(), "2010-10-17 0:00");
}
#[test]
fn test_encode_date() {
let mut buf = Vec::new();
let date: Date = date!(2010 - 10 - 17);
Encode::<MySql>::encode(&date, &mut buf);
assert_eq!(*buf, [4, 218, 7, 10, 17]);
}
#[test]
fn test_decode_date() {
let buf = [4, 218, 7, 10, 17];
let date = <Date as Decode<MySql>>::decode(&buf).unwrap();
assert_eq!(date, date!(2010 - 10 - 17));
}

View File

@ -237,6 +237,49 @@ fn postgres_epoch() -> DateTime<Utc> {
Utc.ymd(2000, 1, 1).and_hms(0, 0, 0)
}
#[test]
fn test_encode_time() {
let mut buf = Vec::new();
Encode::<Postgres>::encode(&NaiveTime::from_hms(0, 0, 0), &mut buf);
assert_eq!(buf, [0; 8]);
buf.clear();
// one second
Encode::<Postgres>::encode(&NaiveTime::from_hms(0, 0, 1), &mut buf);
assert_eq!(buf, 1_000_000i64.to_be_bytes());
buf.clear();
// two hours
Encode::<Postgres>::encode(&NaiveTime::from_hms(2, 0, 0), &mut buf);
let expected = 1_000_000i64 * 60 * 60 * 2;
assert_eq!(buf, expected.to_be_bytes());
buf.clear();
// 3:14:15.000001
Encode::<Postgres>::encode(&NaiveTime::from_hms_micro(3, 14, 15, 1), &mut buf);
let expected = 1_000_000i64 * 60 * 60 * 3 + 1_000_000i64 * 60 * 14 + 1_000_000i64 * 15 + 1;
assert_eq!(buf, expected.to_be_bytes());
buf.clear();
}
#[test]
fn test_decode_time() {
let buf = [0u8; 8];
let time: NaiveTime = Decode::<Postgres>::decode(&buf).unwrap();
assert_eq!(time, NaiveTime::from_hms(0, 0, 0),);
// half an hour
let buf = (1_000_000i64 * 60 * 30).to_be_bytes();
let time: NaiveTime = Decode::<Postgres>::decode(&buf).unwrap();
assert_eq!(time, NaiveTime::from_hms(0, 30, 0),);
// 12:53:05.125305
let buf = (1_000_000i64 * 60 * 60 * 12 + 1_000_000i64 * 60 * 53 + 1_000_000i64 * 5 + 125305)
.to_be_bytes();
let time: NaiveTime = Decode::<Postgres>::decode(&buf).unwrap();
assert_eq!(time, NaiveTime::from_hms_micro(12, 53, 5, 125305),);
}
#[test]
fn test_encode_datetime() {
let mut buf = Vec::new();

View File

@ -77,6 +77,9 @@ mod bigdecimal;
#[cfg(feature = "chrono")]
mod chrono;
#[cfg(feature = "time")]
mod time;
#[cfg(feature = "uuid")]
mod uuid;

View File

@ -0,0 +1,345 @@
use std::convert::TryInto;
use std::mem;
use time::{date, offset, Date, NumericalDuration, OffsetDateTime, PrimitiveDateTime, Time};
use crate::decode::{Decode, DecodeError};
use crate::encode::Encode;
use crate::postgres::protocol::TypeId;
use crate::postgres::types::PgTypeInfo;
use crate::postgres::Postgres;
use crate::types::HasSqlType;
const POSTGRES_EPOCH: PrimitiveDateTime = date!(2000 - 1 - 1).midnight();
impl HasSqlType<Time> for Postgres {
fn type_info() -> PgTypeInfo {
PgTypeInfo::new(TypeId::TIME)
}
}
impl HasSqlType<Date> for Postgres {
fn type_info() -> PgTypeInfo {
PgTypeInfo::new(TypeId::DATE)
}
}
impl HasSqlType<PrimitiveDateTime> for Postgres {
fn type_info() -> PgTypeInfo {
PgTypeInfo::new(TypeId::TIMESTAMP)
}
}
impl HasSqlType<OffsetDateTime> for Postgres {
fn type_info() -> PgTypeInfo {
PgTypeInfo::new(TypeId::TIMESTAMPTZ)
}
}
impl HasSqlType<[Time]> for Postgres {
fn type_info() -> PgTypeInfo {
PgTypeInfo::new(TypeId::ARRAY_TIME)
}
}
impl HasSqlType<[Date]> for Postgres {
fn type_info() -> PgTypeInfo {
PgTypeInfo::new(TypeId::ARRAY_DATE)
}
}
impl HasSqlType<[PrimitiveDateTime]> for Postgres {
fn type_info() -> PgTypeInfo {
PgTypeInfo::new(TypeId::ARRAY_TIMESTAMP)
}
}
impl HasSqlType<[OffsetDateTime]> for Postgres {
fn type_info() -> PgTypeInfo {
PgTypeInfo::new(TypeId::ARRAY_TIMESTAMPTZ)
}
}
fn microseconds_since_midnight(time: Time) -> i64 {
time.hour() as i64 * 60 * 60 * 1_000_000
+ time.minute() as i64 * 60 * 1_000_000
+ time.second() as i64 * 1_000_000
+ time.microsecond() as i64
}
fn from_microseconds_since_midnight(mut microsecond: u64) -> Result<Time, DecodeError> {
#![allow(clippy::cast_possible_truncation)]
microsecond %= 86_400 * 1_000_000;
Time::try_from_hms_micro(
(microsecond / 1_000_000 / 60 / 60) as u8,
(microsecond / 1_000_000 / 60 % 60) as u8,
(microsecond / 1_000_000 % 60) as u8,
(microsecond % 1_000_000) as u32,
)
.map_err(|e| DecodeError::Message(Box::new(format!("Time out of range for Postgres: {}", e))))
}
impl Decode<Postgres> for Time {
fn decode(raw: &[u8]) -> Result<Self, DecodeError> {
let micros: i64 = Decode::<Postgres>::decode(raw)?;
from_microseconds_since_midnight(micros as u64)
}
}
impl Encode<Postgres> for Time {
fn encode(&self, buf: &mut Vec<u8>) {
let micros = microseconds_since_midnight(*self);
Encode::<Postgres>::encode(&micros, buf);
}
fn size_hint(&self) -> usize {
mem::size_of::<u64>()
}
}
impl Decode<Postgres> for Date {
fn decode(raw: &[u8]) -> Result<Self, DecodeError> {
let n: i32 = Decode::<Postgres>::decode(raw)?;
Ok(date!(2000 - 1 - 1) + n.days())
}
}
impl Encode<Postgres> for Date {
fn encode(&self, buf: &mut Vec<u8>) {
let days: i32 = (*self - date!(2000 - 1 - 1))
.whole_days()
.try_into()
// TODO: How does Diesel handle this?
.unwrap_or_else(|_| panic!("Date out of range for Postgres: {:?}", self));
Encode::<Postgres>::encode(&days, buf)
}
fn size_hint(&self) -> usize {
mem::size_of::<i32>()
}
}
impl Decode<Postgres> for PrimitiveDateTime {
fn decode(raw: &[u8]) -> Result<Self, DecodeError> {
let n: i64 = Decode::<Postgres>::decode(raw)?;
Ok(POSTGRES_EPOCH + n.microseconds())
}
}
impl Encode<Postgres> for PrimitiveDateTime {
fn encode(&self, buf: &mut Vec<u8>) {
let micros: i64 = (*self - POSTGRES_EPOCH)
.whole_microseconds()
.try_into()
.unwrap_or_else(|_| panic!("PrimitiveDateTime out of range for Postgres: {:?}", self));
Encode::<Postgres>::encode(&micros, buf);
}
fn size_hint(&self) -> usize {
mem::size_of::<i64>()
}
}
impl Decode<Postgres> for OffsetDateTime {
fn decode(raw: &[u8]) -> Result<Self, DecodeError> {
let date_time: PrimitiveDateTime = Decode::<Postgres>::decode(raw)?;
Ok(date_time.assume_utc())
}
}
impl Encode<Postgres> for OffsetDateTime {
fn encode(&self, buf: &mut Vec<u8>) {
let utc_dt = self.to_offset(offset!(UTC));
let primitive_dt = PrimitiveDateTime::new(utc_dt.date(), utc_dt.time());
Encode::<Postgres>::encode(&primitive_dt, buf);
}
fn size_hint(&self) -> usize {
mem::size_of::<i64>()
}
}
#[cfg(test)]
use time::time;
#[test]
fn test_encode_time() {
let mut buf = Vec::new();
Encode::<Postgres>::encode(&time!(0:00), &mut buf);
assert_eq!(buf, [0; 8]);
buf.clear();
// one second
Encode::<Postgres>::encode(&time!(0:00:01), &mut buf);
assert_eq!(buf, 1_000_000i64.to_be_bytes());
buf.clear();
// two hours
Encode::<Postgres>::encode(&time!(2:00), &mut buf);
let expected = 1_000_000i64 * 60 * 60 * 2;
assert_eq!(buf, expected.to_be_bytes());
buf.clear();
// 3:14:15.000001
Encode::<Postgres>::encode(&time!(3:14:15.000001), &mut buf);
let expected = 1_000_000i64 * 60 * 60 * 3 + 1_000_000i64 * 60 * 14 + 1_000_000i64 * 15 + 1;
assert_eq!(buf, expected.to_be_bytes());
buf.clear();
}
#[test]
fn test_decode_time() {
let buf = [0u8; 8];
let time: Time = Decode::<Postgres>::decode(&buf).unwrap();
assert_eq!(time, time!(0:00));
// half an hour
let buf = (1_000_000i64 * 60 * 30).to_be_bytes();
let time: Time = Decode::<Postgres>::decode(&buf).unwrap();
assert_eq!(time, time!(0:30));
// 12:53:05.125305
let buf = (1_000_000i64 * 60 * 60 * 12 + 1_000_000i64 * 60 * 53 + 1_000_000i64 * 5 + 125305)
.to_be_bytes();
let time: Time = Decode::<Postgres>::decode(&buf).unwrap();
assert_eq!(time, time!(12:53:05.125305));
}
#[test]
fn test_encode_datetime() {
let mut buf = Vec::new();
Encode::<Postgres>::encode(&POSTGRES_EPOCH, &mut buf);
assert_eq!(buf, [0; 8]);
buf.clear();
// one hour past epoch
let date = POSTGRES_EPOCH + 1.hours();
Encode::<Postgres>::encode(&date, &mut buf);
assert_eq!(buf, 3_600_000_000i64.to_be_bytes());
buf.clear();
// some random date
let date = PrimitiveDateTime::new(date!(2019 - 12 - 11), time!(11:01:05));
let expected = (date - POSTGRES_EPOCH).whole_microseconds() as i64;
Encode::<Postgres>::encode(&date, &mut buf);
assert_eq!(buf, expected.to_be_bytes());
buf.clear();
}
#[test]
fn test_decode_datetime() {
let buf = [0u8; 8];
let date: PrimitiveDateTime = Decode::<Postgres>::decode(&buf).unwrap();
assert_eq!(
date,
PrimitiveDateTime::new(date!(2000 - 01 - 01), time!(00:00:00))
);
let buf = 3_600_000_000i64.to_be_bytes();
let date: PrimitiveDateTime = Decode::<Postgres>::decode(&buf).unwrap();
assert_eq!(
date,
PrimitiveDateTime::new(date!(2000 - 01 - 01), time!(01:00:00))
);
let buf = 629_377_265_000_000i64.to_be_bytes();
let date: PrimitiveDateTime = Decode::<Postgres>::decode(&buf).unwrap();
assert_eq!(
date,
PrimitiveDateTime::new(date!(2019 - 12 - 11), time!(11:01:05))
);
}
#[test]
fn test_encode_offsetdatetime() {
let mut buf = Vec::new();
Encode::<Postgres>::encode(&POSTGRES_EPOCH.assume_utc(), &mut buf);
assert_eq!(buf, [0; 8]);
buf.clear();
// one hour past epoch in MSK (2 hours before epoch in UTC)
let date = (POSTGRES_EPOCH + 1.hours()).assume_offset(offset!(+3));
Encode::<Postgres>::encode(&date, &mut buf);
assert_eq!(buf, (-7_200_000_000i64).to_be_bytes());
buf.clear();
// some random date in MSK
let date =
PrimitiveDateTime::new(date!(2019 - 12 - 11), time!(11:01:05)).assume_offset(offset!(+3));
let expected = (date - POSTGRES_EPOCH.assume_utc()).whole_microseconds() as i64;
Encode::<Postgres>::encode(&date, &mut buf);
assert_eq!(buf, expected.to_be_bytes());
buf.clear();
}
#[test]
fn test_decode_offsetdatetime() {
let buf = [0u8; 8];
let date: OffsetDateTime = Decode::<Postgres>::decode(&buf).unwrap();
assert_eq!(
date,
PrimitiveDateTime::new(date!(2000 - 01 - 01), time!(00:00:00)).assume_utc()
);
let buf = 3_600_000_000i64.to_be_bytes();
let date: OffsetDateTime = Decode::<Postgres>::decode(&buf).unwrap();
assert_eq!(
date,
PrimitiveDateTime::new(date!(2000 - 01 - 01), time!(01:00:00)).assume_utc()
);
let buf = 629_377_265_000_000i64.to_be_bytes();
let date: OffsetDateTime = Decode::<Postgres>::decode(&buf).unwrap();
assert_eq!(
date,
PrimitiveDateTime::new(date!(2019 - 12 - 11), time!(11:01:05)).assume_utc()
);
}
#[test]
fn test_encode_date() {
let mut buf = Vec::new();
let date = date!(2000 - 1 - 1);
Encode::<Postgres>::encode(&date, &mut buf);
assert_eq!(buf, [0; 4]);
buf.clear();
let date = date!(2001 - 1 - 1);
Encode::<Postgres>::encode(&date, &mut buf);
// 2000 was a leap year
assert_eq!(buf, 366i32.to_be_bytes());
buf.clear();
let date = date!(2019 - 12 - 11);
Encode::<Postgres>::encode(&date, &mut buf);
assert_eq!(buf, 7284i32.to_be_bytes());
buf.clear();
}
#[test]
fn test_decode_date() {
let buf = [0; 4];
let date: Date = Decode::<Postgres>::decode(&buf).unwrap();
assert_eq!(date, date!(2000 - 01 - 01));
let buf = 366i32.to_be_bytes();
let date: Date = Decode::<Postgres>::decode(&buf).unwrap();
assert_eq!(date, date!(2001 - 01 - 01));
let buf = 7284i32.to_be_bytes();
let date: Date = Decode::<Postgres>::decode(&buf).unwrap();
assert_eq!(date, date!(2019 - 12 - 11));
}

View File

@ -14,6 +14,12 @@ pub mod chrono {
pub use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc};
}
#[cfg(feature = "time")]
#[cfg_attr(docsrs, doc(cfg(feature = "time")))]
pub mod time {
pub use time::{Date, OffsetDateTime, PrimitiveDateTime, Time, UtcOffset};
}
#[cfg(feature = "bigdecimal")]
#[cfg_attr(docsrs, doc(cfg(feature = "bigdecimal")))]
pub use bigdecimal::BigDecimal;

View File

@ -29,6 +29,7 @@ sqlite = [ "sqlx/sqlite" ]
# type
bigdecimal = [ "sqlx/bigdecimal_bigint" ]
chrono = [ "sqlx/chrono" ]
time = [ "sqlx/time" ]
ipnetwork = [ "sqlx/ipnetwork" ]
uuid = [ "sqlx/uuid" ]

View File

@ -17,17 +17,29 @@ impl_database_ext! {
// BINARY, VAR_BINARY, BLOB
Vec<u8>,
#[cfg(feature = "chrono")]
#[cfg(all(feature = "chrono", not(feature = "time")))]
sqlx::types::chrono::NaiveTime,
#[cfg(feature = "chrono")]
#[cfg(all(feature = "chrono", not(feature = "time")))]
sqlx::types::chrono::NaiveDate,
#[cfg(feature = "chrono")]
#[cfg(all(feature = "chrono", not(feature = "time")))]
sqlx::types::chrono::NaiveDateTime,
#[cfg(feature = "chrono")]
#[cfg(all(feature = "chrono", not(feature = "time")))]
sqlx::types::chrono::DateTime<sqlx::types::chrono::Utc>,
#[cfg(feature = "time")]
sqlx::types::time::Time,
#[cfg(feature = "time")]
sqlx::types::time::Date,
#[cfg(feature = "time")]
sqlx::types::time::PrimitiveDateTime,
#[cfg(feature = "time")]
sqlx::types::time::OffsetDateTime,
},
ParamChecking::Weak,
feature-types: info => info.type_feature_gate(),

View File

@ -8,7 +8,6 @@ impl_database_ext! {
f32,
f64,
// BYTEA
Vec<u8> | &[u8],
#[cfg(feature = "uuid")]
@ -26,6 +25,18 @@ impl_database_ext! {
#[cfg(feature = "chrono")]
sqlx::types::chrono::DateTime<sqlx::types::chrono::Utc> | sqlx::types::chrono::DateTime<_>,
#[cfg(feature = "time")]
sqlx::types::time::Time,
#[cfg(feature = "time")]
sqlx::types::time::Date,
#[cfg(feature = "time")]
sqlx::types::time::PrimitiveDateTime,
#[cfg(feature = "time")]
sqlx::types::time::OffsetDateTime,
#[cfg(feature = "bigdecimal")]
sqlx::types::BigDecimal,

View File

@ -0,0 +1,88 @@
use sqlx::types::chrono::{DateTime, NaiveDate, NaiveTime, Utc};
use sqlx::{mysql::MySqlConnection, Connection, Row};
async fn connect() -> anyhow::Result<MySqlConnection> {
Ok(MySqlConnection::open(dotenv::var("DATABASE_URL")?).await?)
}
#[cfg(all(feature = "chrono", not(feature = "time")))]
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn mysql_chrono_date() -> anyhow::Result<()> {
let mut conn = connect().await?;
let value = NaiveDate::from_ymd(2019, 1, 2);
let row = sqlx::query!(
"SELECT (DATE '2019-01-02' = ?) as _1, CAST(? AS DATE) as _2",
value,
value
)
.fetch_one(&mut conn)
.await?;
assert!(row._1 != 0);
assert_eq!(value, row._2);
Ok(())
}
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn mysql_chrono_date_time() -> anyhow::Result<()> {
let mut conn = connect().await?;
let value = NaiveDate::from_ymd(2019, 1, 2).and_hms(5, 10, 20);
let row = sqlx::query("SELECT '2019-01-02 05:10:20' = ?, ?")
.bind(&value)
.bind(&value)
.fetch_one(&mut conn)
.await?;
assert!(row.get::<bool, _>(0));
assert_eq!(value, row.get(1));
Ok(())
}
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn mysql_chrono_time() -> anyhow::Result<()> {
let mut conn = connect().await?;
let value = NaiveTime::from_hms_micro(5, 10, 20, 115100);
let row = sqlx::query("SELECT TIME '05:10:20.115100' = ?, TIME '05:10:20.115100'")
.bind(&value)
.fetch_one(&mut conn)
.await?;
assert!(row.get::<bool, _>(0));
assert_eq!(value, row.get(1));
Ok(())
}
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn mysql_chrono_timestamp() -> anyhow::Result<()> {
let mut conn = connect().await?;
let value = DateTime::<Utc>::from_utc(
NaiveDate::from_ymd(2019, 1, 2).and_hms_micro(5, 10, 20, 115100),
Utc,
);
let row = sqlx::query(
"SELECT TIMESTAMP '2019-01-02 05:10:20.115100' = ?, TIMESTAMP '2019-01-02 05:10:20.115100'",
)
.bind(&value)
.fetch_one(&mut conn)
.await?;
assert!(row.get::<bool, _>(0));
assert_eq!(value, row.get(1));
Ok(())
}

92
tests/mysql-types-time.rs Normal file
View File

@ -0,0 +1,92 @@
use sqlx::types::time::{Date, OffsetDateTime, Time, UtcOffset};
use sqlx::{mysql::MySqlConnection, Connection, Row};
async fn connect() -> anyhow::Result<MySqlConnection> {
Ok(MySqlConnection::open(dotenv::var("DATABASE_URL")?).await?)
}
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn mysql_timers_date() -> anyhow::Result<()> {
let mut conn = connect().await?;
// TODO: maybe use macro here? but is it OK to include `time` as test dependency?
let value = Date::try_from_ymd(2019, 1, 2).unwrap();
let row = sqlx::query!(
"SELECT (DATE '2019-01-02' = ?) as _1, CAST(? AS DATE) as _2",
value,
value
)
.fetch_one(&mut conn)
.await?;
assert!(row._1 != 0);
assert_eq!(value, row._2);
Ok(())
}
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn mysql_timers_date_time() -> anyhow::Result<()> {
let mut conn = connect().await?;
let value = Date::try_from_ymd(2019, 1, 2)
.unwrap()
.try_with_hms(5, 10, 20)
.unwrap();
let row = sqlx::query("SELECT '2019-01-02 05:10:20' = ?, ?")
.bind(&value)
.bind(&value)
.fetch_one(&mut conn)
.await?;
assert!(row.get::<bool, _>(0));
assert_eq!(value, row.get(1));
Ok(())
}
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn mysql_timers_time() -> anyhow::Result<()> {
let mut conn = connect().await?;
let value = Time::try_from_hms_micro(5, 10, 20, 115100).unwrap();
let row = sqlx::query("SELECT TIME '05:10:20.115100' = ?, TIME '05:10:20.115100'")
.bind(&value)
.fetch_one(&mut conn)
.await?;
assert!(row.get::<bool, _>(0));
assert_eq!(value, row.get(1));
Ok(())
}
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn mysql_timers_timestamp() -> anyhow::Result<()> {
let mut conn = connect().await?;
let value = Date::try_from_ymd(2019, 1, 2)
.unwrap()
.try_with_hms_micro(5, 10, 20, 115100)
.unwrap()
.assume_utc();
let row = sqlx::query(
"SELECT TIMESTAMP '2019-01-02 05:10:20.115100' = ?, TIMESTAMP '2019-01-02 05:10:20.115100'",
)
.bind(&value)
.fetch_one(&mut conn)
.await?;
assert!(row.get::<bool, _>(0));
assert_eq!(value, row.get(1));
Ok(())
}

View File

@ -0,0 +1,91 @@
use sqlx::types::chrono::{DateTime, NaiveDate, Utc};
use sqlx::{Connection, PgConnection, Row};
#[cfg(all(feature = "chrono", not(feature = "time")))]
use sqlx::types::chrono::NaiveTime;
async fn connect() -> anyhow::Result<PgConnection> {
Ok(PgConnection::open(dotenv::var("DATABASE_URL")?).await?)
}
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn postgres_chrono_date() -> anyhow::Result<()> {
let mut conn = connect().await?;
let value = NaiveDate::from_ymd(2019, 1, 2);
let row = sqlx::query("SELECT DATE '2019-01-02' = $1, $1")
.bind(&value)
.fetch_one(&mut conn)
.await?;
assert!(row.get::<bool, _>(0));
assert_eq!(value, row.get(1));
Ok(())
}
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn mysql_chrono_date_time() -> anyhow::Result<()> {
let mut conn = connect().await?;
let value = NaiveDate::from_ymd(2019, 1, 2).and_hms(5, 10, 20);
let row = sqlx::query("SELECT '2019-01-02 05:10:20' = $1, $1")
.bind(&value)
.fetch_one(&mut conn)
.await?;
assert!(row.get::<bool, _>(0));
assert_eq!(value, row.get(1));
Ok(())
}
#[cfg(all(feature = "chrono", not(feature = "time")))]
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn postgres_chrono_time() -> anyhow::Result<()> {
let mut conn = connect().await?;
let value = NaiveTime::from_hms_micro(5, 10, 20, 115100);
let row = sqlx::query!(
"SELECT TIME '05:10:20.115100' = $1 AS equality, TIME '05:10:20.115100' AS time",
value,
)
.fetch_one(&mut conn)
.await?;
assert!(row.equality);
assert_eq!(value, row.time);
Ok(())
}
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn postgres_chrono_timestamp_tz() -> anyhow::Result<()> {
let mut conn = connect().await?;
let value = DateTime::<Utc>::from_utc(
NaiveDate::from_ymd(2019, 1, 2).and_hms_micro(5, 10, 20, 115100),
Utc,
);
let row = sqlx::query(
"SELECT TIMESTAMPTZ '2019-01-02 05:10:20.115100' = $1, TIMESTAMPTZ '2019-01-02 05:10:20.115100'",
)
.bind(&value)
.fetch_one(&mut conn)
.await?;
assert!(row.get::<bool, _>(0));
let out: DateTime<Utc> = row.get(1);
assert_eq!(value, out);
Ok(())
}

View File

@ -0,0 +1,109 @@
use sqlx::types::time::{Date, OffsetDateTime, Time, UtcOffset};
use sqlx::{Connection, PgConnection, Row};
async fn connect() -> anyhow::Result<PgConnection> {
Ok(PgConnection::open(dotenv::var("DATABASE_URL")?).await?)
}
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn postgres_timers_date() -> anyhow::Result<()> {
let mut conn = connect().await?;
let value = Date::try_from_ymd(2019, 1, 2).unwrap();
let row = sqlx::query("SELECT DATE '2019-01-02' = $1, $1")
.bind(&value)
.fetch_one(&mut conn)
.await?;
assert!(row.get::<bool, _>(0));
assert_eq!(value, row.get(1));
Ok(())
}
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn mysql_timers_date_time() -> anyhow::Result<()> {
let mut conn = connect().await?;
let value = Date::try_from_ymd(2019, 1, 2)
.unwrap()
.try_with_hms(5, 10, 20)
.unwrap();
let row = sqlx::query("SELECT '2019-01-02 05:10:20' = $1, $1")
.bind(&value)
.fetch_one(&mut conn)
.await?;
assert!(row.get::<bool, _>(0));
assert_eq!(value, row.get(1));
Ok(())
}
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn postgres_timers_time() -> anyhow::Result<()> {
let mut conn = connect().await?;
let value = Time::try_from_hms_micro(5, 10, 20, 115100).unwrap();
let row = sqlx::query!(
"SELECT TIME '05:10:20.115100' = $1 AS equality, TIME '05:10:20.115100' AS time",
value
)
.fetch_one(&mut conn)
.await?;
assert!(row.equality);
assert_eq!(value, row.time);
Ok(())
}
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn postgres_timers_timestamp_tz() -> anyhow::Result<()> {
let mut conn = connect().await?;
let value = Date::try_from_ymd(2019, 1, 2)
.unwrap()
.try_with_hms_micro(5, 10, 20, 115100)
.unwrap()
.assume_utc();
let row = sqlx::query(
"SELECT TIMESTAMPTZ '2019-01-02 05:10:20.115100' = $1, TIMESTAMPTZ '2019-01-02 05:10:20.115100'",
)
.bind(&value)
.fetch_one(&mut conn)
.await?;
assert!(row.get::<bool, _>(0));
let out: OffsetDateTime = row.get(1);
assert_eq!(value, out);
let value = Date::try_from_ymd(2019, 1, 2)
.unwrap()
.try_with_hms_micro(5, 10, 20, 115100)
.unwrap()
.assume_offset(UtcOffset::east_hours(3));
let row = sqlx::query(
"SELECT TIMESTAMPTZ '2019-01-02 02:10:20.115100' = $1, TIMESTAMPTZ '2019-01-02 02:10:20.115100'",
)
.bind(&value)
.fetch_one(&mut conn)
.await?;
assert!(row.get::<bool, _>(0));
let out: OffsetDateTime = row.get(1);
assert_eq!(value, out);
Ok(())
}