Merge remote-tracking branch 'origin/main' into sqlx-toml

# Conflicts:
#	Cargo.lock
#	Cargo.toml
#	sqlx-cli/src/database.rs
#	sqlx-cli/src/lib.rs
#	sqlx-mysql/src/connection/executor.rs
This commit is contained in:
Austin Bonander
2025-02-27 17:04:34 -08:00
61 changed files with 1984 additions and 995 deletions

View File

@@ -5,7 +5,7 @@ use crate::{
use futures_core::future::BoxFuture;
use futures_core::stream::BoxStream;
use futures_util::{stream, StreamExt, TryFutureExt, TryStreamExt};
use std::future;
use std::{future, pin::pin};
use sqlx_core::any::{
Any, AnyArguments, AnyColumn, AnyConnectOptions, AnyConnectionBackend, AnyQueryResult, AnyRow,
@@ -115,8 +115,7 @@ impl AnyConnectionBackend for PgConnection {
Box::pin(async move {
let arguments = arguments?;
let stream = self.run(query, arguments, 1, persistent, None).await?;
futures_util::pin_mut!(stream);
let mut stream = pin!(self.run(query, arguments, 1, persistent, None).await?);
if let Some(Either::Right(row)) = stream.try_next().await? {
return Ok(Some(AnyRow::try_from(&row)?));

View File

@@ -22,7 +22,7 @@ use sqlx_core::error::BoxDynError;
// that has a patch, we then apply the patch which should write to &mut Vec<u8>,
// backtrack and update the prefixed-len, then write until the next patch offset
#[derive(Default)]
#[derive(Default, Debug, Clone)]
pub struct PgArgumentBuffer {
buffer: Vec<u8>,
@@ -46,20 +46,32 @@ pub struct PgArgumentBuffer {
type_holes: Vec<(usize, HoleKind)>, // Vec<{ offset, type_name }>
}
#[derive(Debug, Clone)]
enum HoleKind {
Type { name: UStr },
Array(Arc<PgArrayOf>),
}
#[derive(Clone)]
struct Patch {
buf_offset: usize,
arg_index: usize,
#[allow(clippy::type_complexity)]
callback: Box<dyn Fn(&mut [u8], &PgTypeInfo) + 'static + Send + Sync>,
callback: Arc<dyn Fn(&mut [u8], &PgTypeInfo) + 'static + Send + Sync>,
}
impl fmt::Debug for Patch {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Patch")
.field("buf_offset", &self.buf_offset)
.field("arg_index", &self.arg_index)
.field("callback", &"<callback>")
.finish()
}
}
/// Implementation of [`Arguments`] for PostgreSQL.
#[derive(Default)]
#[derive(Default, Debug, Clone)]
pub struct PgArguments {
// Types of each bind parameter
pub(crate) types: Vec<PgTypeInfo>,
@@ -194,7 +206,7 @@ impl PgArgumentBuffer {
self.patches.push(Patch {
buf_offset: offset,
arg_index,
callback: Box::new(callback),
callback: Arc::new(callback),
});
}

View File

@@ -15,10 +15,10 @@ use crate::{
use futures_core::future::BoxFuture;
use futures_core::stream::BoxStream;
use futures_core::Stream;
use futures_util::{pin_mut, TryStreamExt};
use futures_util::TryStreamExt;
use sqlx_core::arguments::Arguments;
use sqlx_core::Either;
use std::{borrow::Cow, sync::Arc};
use std::{borrow::Cow, pin::pin, sync::Arc};
async fn prepare(
conn: &mut PgConnection,
@@ -395,8 +395,7 @@ impl<'c> Executor<'c> for &'c mut PgConnection {
Box::pin(try_stream! {
let arguments = arguments?;
let s = self.run(sql, arguments, 0, persistent, metadata).await?;
pin_mut!(s);
let mut s = pin!(self.run(sql, arguments, 0, persistent, metadata).await?);
while let Some(v) = s.try_next().await? {
r#yield!(v);
@@ -422,8 +421,7 @@ impl<'c> Executor<'c> for &'c mut PgConnection {
Box::pin(async move {
let arguments = arguments?;
let s = self.run(sql, arguments, 1, persistent, metadata).await?;
pin_mut!(s);
let mut s = pin!(self.run(sql, arguments, 1, persistent, metadata).await?);
// With deferred constraints we need to check all responses as we
// could get a OK response (with uncommitted data), only to get an

View File

@@ -129,6 +129,9 @@ impl PgPoolCopyExt for Pool<Postgres> {
}
}
// (1 GiB - 1) - 1 - length prefix (4 bytes)
pub const PG_COPY_MAX_DATA_LEN: usize = 0x3fffffff - 1 - 4;
/// A connection in streaming `COPY FROM STDIN` mode.
///
/// Created by [PgConnection::copy_in_raw] or [Pool::copy_out_raw].
@@ -186,15 +189,20 @@ impl<C: DerefMut<Target = PgConnection>> PgCopyIn<C> {
/// Send a chunk of `COPY` data.
///
/// The data is sent in chunks if it exceeds the maximum length of a `CopyData` message (1 GiB - 6
/// bytes) and may be partially sent if this call is cancelled.
///
/// If you're copying data from an `AsyncRead`, maybe consider [Self::read_from] instead.
pub async fn send(&mut self, data: impl Deref<Target = [u8]>) -> Result<&mut Self> {
self.conn
.as_deref_mut()
.expect("send_data: conn taken")
.inner
.stream
.send(CopyData(data))
.await?;
for chunk in data.deref().chunks(PG_COPY_MAX_DATA_LEN) {
self.conn
.as_deref_mut()
.expect("send_data: conn taken")
.inner
.stream
.send(CopyData(chunk))
.await?;
}
Ok(self)
}
@@ -230,10 +238,10 @@ impl<C: DerefMut<Target = PgConnection>> PgCopyIn<C> {
}
// Write the length
let read32 = u32::try_from(read)
.map_err(|_| err_protocol!("number of bytes read exceeds 2^32: {}", read))?;
let read32 = i32::try_from(read)
.map_err(|_| err_protocol!("number of bytes read exceeds 2^31 - 1: {}", read))?;
(&mut buf.get_mut()[1..]).put_u32(read32 + 4);
(&mut buf.get_mut()[1..]).put_i32(read32 + 4);
conn.inner.stream.flush().await?;
}

View File

@@ -34,6 +34,9 @@ mod value;
#[doc(hidden)]
pub mod any;
#[doc(hidden)]
pub use copy::PG_COPY_MAX_DATA_LEN;
#[cfg(feature = "migrate")]
mod migrate;

View File

@@ -1,20 +1,18 @@
use std::fmt::Write;
use std::ops::Deref;
use std::str::FromStr;
use std::sync::atomic::{AtomicBool, Ordering};
use std::time::{Duration, SystemTime};
use std::time::Duration;
use futures_core::future::BoxFuture;
use once_cell::sync::OnceCell;
use crate::connection::Connection;
use sqlx_core::connection::Connection;
use sqlx_core::query_scalar::query_scalar;
use crate::error::Error;
use crate::executor::Executor;
use crate::pool::{Pool, PoolOptions};
use crate::query::query;
use crate::query_scalar::query_scalar;
use crate::{PgConnectOptions, PgConnection, Postgres};
pub(crate) use sqlx_core::testing::*;
@@ -22,7 +20,6 @@ pub(crate) use sqlx_core::testing::*;
// Using a blocking `OnceCell` here because the critical sections are short.
static MASTER_POOL: OnceCell<Pool<Postgres>> = OnceCell::new();
// Automatically delete any databases created before the start of the test binary.
static DO_CLEANUP: AtomicBool = AtomicBool::new(true);
impl TestSupport for Postgres {
fn test_context(args: &TestArgs) -> BoxFuture<'_, Result<TestContext<Self>, Error>> {
@@ -33,19 +30,11 @@ impl TestSupport for Postgres {
Box::pin(async move {
let mut conn = MASTER_POOL
.get()
.expect("cleanup_test() invoked outside `#[sqlx::test]")
.expect("cleanup_test() invoked outside `#[sqlx::test]`")
.acquire()
.await?;
conn.execute(&format!("drop database if exists {db_name:?};")[..])
.await?;
query("delete from _sqlx_test.databases where db_name = $1")
.bind(db_name)
.execute(&mut *conn)
.await?;
Ok(())
do_cleanup(&mut conn, db_name).await
})
}
@@ -55,13 +44,42 @@ impl TestSupport for Postgres {
let mut conn = PgConnection::connect(&url).await?;
let now = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap();
let delete_db_names: Vec<String> =
query_scalar("select db_name from _sqlx_test.databases")
.fetch_all(&mut conn)
.await?;
if delete_db_names.is_empty() {
return Ok(None);
}
let mut deleted_db_names = Vec::with_capacity(delete_db_names.len());
let mut command = String::new();
for db_name in &delete_db_names {
command.clear();
writeln!(command, "drop database if exists {db_name:?};").ok();
match conn.execute(&*command).await {
Ok(_deleted) => {
deleted_db_names.push(db_name);
}
// Assume a database error just means the DB is still in use.
Err(Error::Database(dbe)) => {
eprintln!("could not clean test database {db_name:?}: {dbe}")
}
// Bubble up other errors
Err(e) => return Err(e),
}
}
query("delete from _sqlx_test.databases where db_name = any($1::text[])")
.bind(&deleted_db_names)
.execute(&mut conn)
.await?;
let num_deleted = do_cleanup(&mut conn, now).await?;
let _ = conn.close().await;
Ok(Some(num_deleted))
Ok(Some(delete_db_names.len()))
})
}
@@ -116,8 +134,9 @@ async fn test_context(args: &TestArgs) -> Result<TestContext<Postgres>, Error> {
// I couldn't find a bug on the mailing list for `CREATE SCHEMA` specifically,
// but a clearly related bug with `CREATE TABLE` has been known since 2007:
// https://www.postgresql.org/message-id/200710222037.l9MKbCJZ098744%40wwwmaster.postgresql.org
// magic constant 8318549251334697844 is just 8 ascii bytes 'sqlxtest'.
r#"
lock table pg_catalog.pg_namespace in share row exclusive mode;
select pg_advisory_xact_lock(8318549251334697844);
create schema if not exists _sqlx_test;
@@ -135,31 +154,22 @@ async fn test_context(args: &TestArgs) -> Result<TestContext<Postgres>, Error> {
)
.await?;
// Record the current time _before_ we acquire the `DO_CLEANUP` permit. This
// prevents the first test thread from accidentally deleting new test dbs
// created by other test threads if we're a bit slow.
let now = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap();
let db_name = Postgres::db_name(args);
do_cleanup(&mut conn, &db_name).await?;
// Only run cleanup if the test binary just started.
if DO_CLEANUP.swap(false, Ordering::SeqCst) {
do_cleanup(&mut conn, now).await?;
}
let new_db_name: String = query_scalar(
query(
r#"
insert into _sqlx_test.databases(db_name, test_path)
select '_sqlx_test_' || nextval('_sqlx_test.database_ids'), $1
returning db_name
insert into _sqlx_test.databases(db_name, test_path) values ($1, $2)
"#,
)
.bind(&db_name)
.bind(args.test_path)
.fetch_one(&mut *conn)
.execute(&mut *conn)
.await?;
conn.execute(&format!("create database {new_db_name:?}")[..])
.await?;
let create_command = format!("create database {db_name:?}");
debug_assert!(create_command.starts_with("create database \""));
conn.execute(&(create_command)[..]).await?;
Ok(TestContext {
pool_opts: PoolOptions::new()
@@ -174,52 +184,18 @@ async fn test_context(args: &TestArgs) -> Result<TestContext<Postgres>, Error> {
.connect_options()
.deref()
.clone()
.database(&new_db_name),
db_name: new_db_name,
.database(&db_name),
db_name,
})
}
async fn do_cleanup(conn: &mut PgConnection, created_before: Duration) -> Result<usize, Error> {
// since SystemTime is not monotonic we added a little margin here to avoid race conditions with other threads
let created_before = i64::try_from(created_before.as_secs()).unwrap() - 2;
let delete_db_names: Vec<String> = query_scalar(
"select db_name from _sqlx_test.databases \
where created_at < (to_timestamp($1) at time zone 'UTC')",
)
.bind(created_before)
.fetch_all(&mut *conn)
.await?;
if delete_db_names.is_empty() {
return Ok(0);
}
let mut deleted_db_names = Vec::with_capacity(delete_db_names.len());
let delete_db_names = delete_db_names.into_iter();
let mut command = String::new();
for db_name in delete_db_names {
command.clear();
writeln!(command, "drop database if exists {db_name:?};").ok();
match conn.execute(&*command).await {
Ok(_deleted) => {
deleted_db_names.push(db_name);
}
// Assume a database error just means the DB is still in use.
Err(Error::Database(dbe)) => {
eprintln!("could not clean test database {db_name:?}: {dbe}")
}
// Bubble up other errors
Err(e) => return Err(e),
}
}
query("delete from _sqlx_test.databases where db_name = any($1::text[])")
.bind(&deleted_db_names)
async fn do_cleanup(conn: &mut PgConnection, db_name: &str) -> Result<(), Error> {
let delete_db_command = format!("drop database if exists {db_name:?};");
conn.execute(&*delete_db_command).await?;
query("delete from _sqlx_test.databases where db_name = $1::text")
.bind(db_name)
.execute(&mut *conn)
.await?;
Ok(deleted_db_names.len())
Ok(())
}

View File

@@ -36,6 +36,10 @@ impl_type_checking!(
sqlx::postgres::types::PgLine,
sqlx::postgres::types::PgLSeg,
sqlx::postgres::types::PgBox,
#[cfg(feature = "uuid")]
sqlx::types::Uuid,

View File

@@ -185,7 +185,7 @@ pub enum PgTypeKind {
Range(PgTypeInfo),
}
#[derive(Debug)]
#[derive(Debug, Clone)]
#[cfg_attr(feature = "offline", derive(serde::Serialize, serde::Deserialize))]
pub struct PgArrayOf {
pub(crate) elem_name: UStr,

View File

@@ -20,7 +20,7 @@ const IS_POINT_FLAG: u32 = 1 << 31;
#[derive(Debug, Clone, PartialEq)]
pub enum PgCube {
/// A one-dimensional point.
// FIXME: `Point1D(f64)
// FIXME: `Point1D(f64)`
Point(f64),
/// An N-dimensional point ("represented internally as a zero-volume cube").
// FIXME: `PointND(f64)`
@@ -32,7 +32,7 @@ pub enum PgCube {
// FIXME: add `Cube3D { lower_left: [f64; 3], upper_right: [f64; 3] }`?
/// An N-dimensional cube with points representing lower-left and upper-right corners, respectively.
// FIXME: CubeND { lower_left: Vec<f64>, upper_right: Vec<f64> }`
// FIXME: `CubeND { lower_left: Vec<f64>, upper_right: Vec<f64> }`
MultiDimension(Vec<Vec<f64>>),
}

View File

@@ -0,0 +1,321 @@
use crate::decode::Decode;
use crate::encode::{Encode, IsNull};
use crate::error::BoxDynError;
use crate::types::Type;
use crate::{PgArgumentBuffer, PgHasArrayType, PgTypeInfo, PgValueFormat, PgValueRef, Postgres};
use sqlx_core::bytes::Buf;
use std::str::FromStr;
const ERROR: &str = "error decoding BOX";
/// ## Postgres Geometric Box type
///
/// Description: Rectangular box
/// Representation: `((upper_right_x,upper_right_y),(lower_left_x,lower_left_y))`
///
/// Boxes are represented by pairs of points that are opposite corners of the box. Values of type box are specified using any of the following syntaxes:
///
/// ```text
/// ( ( upper_right_x , upper_right_y ) , ( lower_left_x , lower_left_y ) )
/// ( upper_right_x , upper_right_y ) , ( lower_left_x , lower_left_y )
/// upper_right_x , upper_right_y , lower_left_x , lower_left_y
/// ```
/// where `(upper_right_x,upper_right_y) and (lower_left_x,lower_left_y)` are any two opposite corners of the box.
/// Any two opposite corners can be supplied on input, but the values will be reordered as needed to store the upper right and lower left corners, in that order.
///
/// See https://www.postgresql.org/docs/16/datatype-geometric.html#DATATYPE-GEOMETRIC-BOXES
#[derive(Debug, Clone, PartialEq)]
pub struct PgBox {
pub upper_right_x: f64,
pub upper_right_y: f64,
pub lower_left_x: f64,
pub lower_left_y: f64,
}
impl Type<Postgres> for PgBox {
fn type_info() -> PgTypeInfo {
PgTypeInfo::with_name("box")
}
}
impl PgHasArrayType for PgBox {
fn array_type_info() -> PgTypeInfo {
PgTypeInfo::with_name("_box")
}
}
impl<'r> Decode<'r, Postgres> for PgBox {
fn decode(value: PgValueRef<'r>) -> Result<Self, Box<dyn std::error::Error + Send + Sync>> {
match value.format() {
PgValueFormat::Text => Ok(PgBox::from_str(value.as_str()?)?),
PgValueFormat::Binary => Ok(PgBox::from_bytes(value.as_bytes()?)?),
}
}
}
impl<'q> Encode<'q, Postgres> for PgBox {
fn produces(&self) -> Option<PgTypeInfo> {
Some(PgTypeInfo::with_name("box"))
}
fn encode_by_ref(&self, buf: &mut PgArgumentBuffer) -> Result<IsNull, BoxDynError> {
self.serialize(buf)?;
Ok(IsNull::No)
}
}
impl FromStr for PgBox {
type Err = BoxDynError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let sanitised = s.replace(['(', ')', '[', ']', ' '], "");
let mut parts = sanitised.split(',');
let upper_right_x = parts
.next()
.and_then(|s| s.parse::<f64>().ok())
.ok_or_else(|| format!("{}: could not get upper_right_x from {}", ERROR, s))?;
let upper_right_y = parts
.next()
.and_then(|s| s.parse::<f64>().ok())
.ok_or_else(|| format!("{}: could not get upper_right_y from {}", ERROR, s))?;
let lower_left_x = parts
.next()
.and_then(|s| s.parse::<f64>().ok())
.ok_or_else(|| format!("{}: could not get lower_left_x from {}", ERROR, s))?;
let lower_left_y = parts
.next()
.and_then(|s| s.parse::<f64>().ok())
.ok_or_else(|| format!("{}: could not get lower_left_y from {}", ERROR, s))?;
if parts.next().is_some() {
return Err(format!("{}: too many numbers inputted in {}", ERROR, s).into());
}
Ok(PgBox {
upper_right_x,
upper_right_y,
lower_left_x,
lower_left_y,
})
}
}
impl PgBox {
fn from_bytes(mut bytes: &[u8]) -> Result<PgBox, BoxDynError> {
let upper_right_x = bytes.get_f64();
let upper_right_y = bytes.get_f64();
let lower_left_x = bytes.get_f64();
let lower_left_y = bytes.get_f64();
Ok(PgBox {
upper_right_x,
upper_right_y,
lower_left_x,
lower_left_y,
})
}
fn serialize(&self, buff: &mut PgArgumentBuffer) -> Result<(), String> {
let min_x = &self.upper_right_x.min(self.lower_left_x);
let min_y = &self.upper_right_y.min(self.lower_left_y);
let max_x = &self.upper_right_x.max(self.lower_left_x);
let max_y = &self.upper_right_y.max(self.lower_left_y);
buff.extend_from_slice(&max_x.to_be_bytes());
buff.extend_from_slice(&max_y.to_be_bytes());
buff.extend_from_slice(&min_x.to_be_bytes());
buff.extend_from_slice(&min_y.to_be_bytes());
Ok(())
}
#[cfg(test)]
fn serialize_to_vec(&self) -> Vec<u8> {
let mut buff = PgArgumentBuffer::default();
self.serialize(&mut buff).unwrap();
buff.to_vec()
}
}
#[cfg(test)]
mod box_tests {
use std::str::FromStr;
use super::PgBox;
const BOX_BYTES: &[u8] = &[
64, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 192, 0, 0, 0, 0, 0, 0, 0, 192, 0, 0, 0,
0, 0, 0, 0,
];
#[test]
fn can_deserialise_box_type_bytes_in_order() {
let pg_box = PgBox::from_bytes(BOX_BYTES).unwrap();
assert_eq!(
pg_box,
PgBox {
upper_right_x: 2.,
upper_right_y: 2.,
lower_left_x: -2.,
lower_left_y: -2.
}
)
}
#[test]
fn can_deserialise_box_type_str_first_syntax() {
let pg_box = PgBox::from_str("[( 1, 2), (3, 4 )]").unwrap();
assert_eq!(
pg_box,
PgBox {
upper_right_x: 1.,
upper_right_y: 2.,
lower_left_x: 3.,
lower_left_y: 4.
}
);
}
#[test]
fn can_deserialise_box_type_str_second_syntax() {
let pg_box = PgBox::from_str("(( 1, 2), (3, 4 ))").unwrap();
assert_eq!(
pg_box,
PgBox {
upper_right_x: 1.,
upper_right_y: 2.,
lower_left_x: 3.,
lower_left_y: 4.
}
);
}
#[test]
fn can_deserialise_box_type_str_third_syntax() {
let pg_box = PgBox::from_str("(1, 2), (3, 4 )").unwrap();
assert_eq!(
pg_box,
PgBox {
upper_right_x: 1.,
upper_right_y: 2.,
lower_left_x: 3.,
lower_left_y: 4.
}
);
}
#[test]
fn can_deserialise_box_type_str_fourth_syntax() {
let pg_box = PgBox::from_str("1, 2, 3, 4").unwrap();
assert_eq!(
pg_box,
PgBox {
upper_right_x: 1.,
upper_right_y: 2.,
lower_left_x: 3.,
lower_left_y: 4.
}
);
}
#[test]
fn cannot_deserialise_too_many_numbers() {
let input_str = "1, 2, 3, 4, 5";
let pg_box = PgBox::from_str(input_str);
assert!(pg_box.is_err());
if let Err(err) = pg_box {
assert_eq!(
err.to_string(),
format!("error decoding BOX: too many numbers inputted in {input_str}")
)
}
}
#[test]
fn cannot_deserialise_too_few_numbers() {
let input_str = "1, 2, 3 ";
let pg_box = PgBox::from_str(input_str);
assert!(pg_box.is_err());
if let Err(err) = pg_box {
assert_eq!(
err.to_string(),
format!("error decoding BOX: could not get lower_left_y from {input_str}")
)
}
}
#[test]
fn cannot_deserialise_invalid_numbers() {
let input_str = "1, 2, 3, FOUR";
let pg_box = PgBox::from_str(input_str);
assert!(pg_box.is_err());
if let Err(err) = pg_box {
assert_eq!(
err.to_string(),
format!("error decoding BOX: could not get lower_left_y from {input_str}")
)
}
}
#[test]
fn can_deserialise_box_type_str_float() {
let pg_box = PgBox::from_str("(1.1, 2.2), (3.3, 4.4)").unwrap();
assert_eq!(
pg_box,
PgBox {
upper_right_x: 1.1,
upper_right_y: 2.2,
lower_left_x: 3.3,
lower_left_y: 4.4
}
);
}
#[test]
fn can_serialise_box_type_in_order() {
let pg_box = PgBox {
upper_right_x: 2.,
lower_left_x: -2.,
upper_right_y: -2.,
lower_left_y: 2.,
};
assert_eq!(pg_box.serialize_to_vec(), BOX_BYTES,)
}
#[test]
fn can_serialise_box_type_out_of_order() {
let pg_box = PgBox {
upper_right_x: -2.,
lower_left_x: 2.,
upper_right_y: 2.,
lower_left_y: -2.,
};
assert_eq!(pg_box.serialize_to_vec(), BOX_BYTES,)
}
#[test]
fn can_order_box() {
let pg_box = PgBox {
upper_right_x: -2.,
lower_left_x: 2.,
upper_right_y: 2.,
lower_left_y: -2.,
};
let bytes = pg_box.serialize_to_vec();
let pg_box = PgBox::from_bytes(&bytes).unwrap();
assert_eq!(
pg_box,
PgBox {
upper_right_x: 2.,
upper_right_y: 2.,
lower_left_x: -2.,
lower_left_y: -2.
}
)
}
}

View File

@@ -0,0 +1,283 @@
use crate::decode::Decode;
use crate::encode::{Encode, IsNull};
use crate::error::BoxDynError;
use crate::types::Type;
use crate::{PgArgumentBuffer, PgHasArrayType, PgTypeInfo, PgValueFormat, PgValueRef, Postgres};
use sqlx_core::bytes::Buf;
use std::str::FromStr;
const ERROR: &str = "error decoding LSEG";
/// ## Postgres Geometric Line Segment type
///
/// Description: Finite line segment
/// Representation: `((start_x,start_y),(end_x,end_y))`
///
///
/// Line segments are represented by pairs of points that are the endpoints of the segment. Values of type lseg are specified using any of the following syntaxes:
/// ```text
/// [ ( start_x , start_y ) , ( end_x , end_y ) ]
/// ( ( start_x , start_y ) , ( end_x , end_y ) )
/// ( start_x , start_y ) , ( end_x , end_y )
/// start_x , start_y , end_x , end_y
/// ```
/// where `(start_x,start_y) and (end_x,end_y)` are the end points of the line segment.
///
/// See https://www.postgresql.org/docs/16/datatype-geometric.html#DATATYPE-LSEG
#[doc(alias = "line segment")]
#[derive(Debug, Clone, PartialEq)]
pub struct PgLSeg {
pub start_x: f64,
pub start_y: f64,
pub end_x: f64,
pub end_y: f64,
}
impl Type<Postgres> for PgLSeg {
fn type_info() -> PgTypeInfo {
PgTypeInfo::with_name("lseg")
}
}
impl PgHasArrayType for PgLSeg {
fn array_type_info() -> PgTypeInfo {
PgTypeInfo::with_name("_lseg")
}
}
impl<'r> Decode<'r, Postgres> for PgLSeg {
fn decode(value: PgValueRef<'r>) -> Result<Self, Box<dyn std::error::Error + Send + Sync>> {
match value.format() {
PgValueFormat::Text => Ok(PgLSeg::from_str(value.as_str()?)?),
PgValueFormat::Binary => Ok(PgLSeg::from_bytes(value.as_bytes()?)?),
}
}
}
impl<'q> Encode<'q, Postgres> for PgLSeg {
fn produces(&self) -> Option<PgTypeInfo> {
Some(PgTypeInfo::with_name("lseg"))
}
fn encode_by_ref(&self, buf: &mut PgArgumentBuffer) -> Result<IsNull, BoxDynError> {
self.serialize(buf)?;
Ok(IsNull::No)
}
}
impl FromStr for PgLSeg {
type Err = BoxDynError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let sanitised = s.replace(['(', ')', '[', ']', ' '], "");
let mut parts = sanitised.split(',');
let start_x = parts
.next()
.and_then(|s| s.parse::<f64>().ok())
.ok_or_else(|| format!("{}: could not get start_x from {}", ERROR, s))?;
let start_y = parts
.next()
.and_then(|s| s.parse::<f64>().ok())
.ok_or_else(|| format!("{}: could not get start_y from {}", ERROR, s))?;
let end_x = parts
.next()
.and_then(|s| s.parse::<f64>().ok())
.ok_or_else(|| format!("{}: could not get end_x from {}", ERROR, s))?;
let end_y = parts
.next()
.and_then(|s| s.parse::<f64>().ok())
.ok_or_else(|| format!("{}: could not get end_y from {}", ERROR, s))?;
if parts.next().is_some() {
return Err(format!("{}: too many numbers inputted in {}", ERROR, s).into());
}
Ok(PgLSeg {
start_x,
start_y,
end_x,
end_y,
})
}
}
impl PgLSeg {
fn from_bytes(mut bytes: &[u8]) -> Result<PgLSeg, BoxDynError> {
let start_x = bytes.get_f64();
let start_y = bytes.get_f64();
let end_x = bytes.get_f64();
let end_y = bytes.get_f64();
Ok(PgLSeg {
start_x,
start_y,
end_x,
end_y,
})
}
fn serialize(&self, buff: &mut PgArgumentBuffer) -> Result<(), BoxDynError> {
buff.extend_from_slice(&self.start_x.to_be_bytes());
buff.extend_from_slice(&self.start_y.to_be_bytes());
buff.extend_from_slice(&self.end_x.to_be_bytes());
buff.extend_from_slice(&self.end_y.to_be_bytes());
Ok(())
}
#[cfg(test)]
fn serialize_to_vec(&self) -> Vec<u8> {
let mut buff = PgArgumentBuffer::default();
self.serialize(&mut buff).unwrap();
buff.to_vec()
}
}
#[cfg(test)]
mod lseg_tests {
use std::str::FromStr;
use super::PgLSeg;
const LINE_SEGMENT_BYTES: &[u8] = &[
63, 241, 153, 153, 153, 153, 153, 154, 64, 1, 153, 153, 153, 153, 153, 154, 64, 10, 102,
102, 102, 102, 102, 102, 64, 17, 153, 153, 153, 153, 153, 154,
];
#[test]
fn can_deserialise_lseg_type_bytes() {
let lseg = PgLSeg::from_bytes(LINE_SEGMENT_BYTES).unwrap();
assert_eq!(
lseg,
PgLSeg {
start_x: 1.1,
start_y: 2.2,
end_x: 3.3,
end_y: 4.4
}
)
}
#[test]
fn can_deserialise_lseg_type_str_first_syntax() {
let lseg = PgLSeg::from_str("[( 1, 2), (3, 4 )]").unwrap();
assert_eq!(
lseg,
PgLSeg {
start_x: 1.,
start_y: 2.,
end_x: 3.,
end_y: 4.
}
);
}
#[test]
fn can_deserialise_lseg_type_str_second_syntax() {
let lseg = PgLSeg::from_str("(( 1, 2), (3, 4 ))").unwrap();
assert_eq!(
lseg,
PgLSeg {
start_x: 1.,
start_y: 2.,
end_x: 3.,
end_y: 4.
}
);
}
#[test]
fn can_deserialise_lseg_type_str_third_syntax() {
let lseg = PgLSeg::from_str("(1, 2), (3, 4 )").unwrap();
assert_eq!(
lseg,
PgLSeg {
start_x: 1.,
start_y: 2.,
end_x: 3.,
end_y: 4.
}
);
}
#[test]
fn can_deserialise_lseg_type_str_fourth_syntax() {
let lseg = PgLSeg::from_str("1, 2, 3, 4").unwrap();
assert_eq!(
lseg,
PgLSeg {
start_x: 1.,
start_y: 2.,
end_x: 3.,
end_y: 4.
}
);
}
#[test]
fn can_deserialise_too_many_numbers() {
let input_str = "1, 2, 3, 4, 5";
let lseg = PgLSeg::from_str(input_str);
assert!(lseg.is_err());
if let Err(err) = lseg {
assert_eq!(
err.to_string(),
format!("error decoding LSEG: too many numbers inputted in {input_str}")
)
}
}
#[test]
fn can_deserialise_too_few_numbers() {
let input_str = "1, 2, 3";
let lseg = PgLSeg::from_str(input_str);
assert!(lseg.is_err());
if let Err(err) = lseg {
assert_eq!(
err.to_string(),
format!("error decoding LSEG: could not get end_y from {input_str}")
)
}
}
#[test]
fn can_deserialise_invalid_numbers() {
let input_str = "1, 2, 3, FOUR";
let lseg = PgLSeg::from_str(input_str);
assert!(lseg.is_err());
if let Err(err) = lseg {
assert_eq!(
err.to_string(),
format!("error decoding LSEG: could not get end_y from {input_str}")
)
}
}
#[test]
fn can_deserialise_lseg_type_str_float() {
let lseg = PgLSeg::from_str("(1.1, 2.2), (3.3, 4.4)").unwrap();
assert_eq!(
lseg,
PgLSeg {
start_x: 1.1,
start_y: 2.2,
end_x: 3.3,
end_y: 4.4
}
);
}
#[test]
fn can_serialise_lseg_type() {
let lseg = PgLSeg {
start_x: 1.1,
start_y: 2.2,
end_x: 3.3,
end_y: 4.4,
};
assert_eq!(lseg.serialize_to_vec(), LINE_SEGMENT_BYTES,)
}
}

View File

@@ -1,2 +1,4 @@
pub mod r#box;
pub mod line;
pub mod line_segment;
pub mod point;

View File

@@ -21,8 +21,10 @@
//! | [`PgLQuery`] | LQUERY |
//! | [`PgCiText`] | CITEXT<sup>1</sup> |
//! | [`PgCube`] | CUBE |
//! | [`PgPoint] | POINT |
//! | [`PgLine] | LINE |
//! | [`PgPoint`] | POINT |
//! | [`PgLine`] | LINE |
//! | [`PgLSeg`] | LSEG |
//! | [`PgBox`] | BOX |
//! | [`PgHstore`] | HSTORE |
//!
//! <sup>1</sup> SQLx generally considers `CITEXT` to be compatible with `String`, `&str`, etc.,
@@ -259,7 +261,9 @@ pub use array::PgHasArrayType;
pub use citext::PgCiText;
pub use cube::PgCube;
pub use geometry::line::PgLine;
pub use geometry::line_segment::PgLSeg;
pub use geometry::point::PgPoint;
pub use geometry::r#box::PgBox;
pub use hstore::PgHstore;
pub use interval::PgInterval;
pub use lquery::PgLQuery;

View File

@@ -41,13 +41,13 @@ impl<'a> PgRecordEncoder<'a> {
{
let ty = value.produces().unwrap_or_else(T::type_info);
if let PgType::DeclareWithName(name) = ty.0 {
match ty.0 {
// push a hole for this type ID
// to be filled in on query execution
self.buf.patch_type_by_name(&name);
} else {
PgType::DeclareWithName(name) => self.buf.patch_type_by_name(&name),
PgType::DeclareArrayOf(array) => self.buf.patch_array_type(array),
// write type id
self.buf.extend(&ty.0.oid().0.to_be_bytes());
pg_type => self.buf.extend(&pg_type.oid().0.to_be_bytes()),
}
self.buf.encode(value)?;