Extract a PgSequenceDecoder from ArrayDecoder and RecordDecoder and improve to support mutual nesting

This commit is contained in:
Ryan Leckey 2020-03-20 19:56:25 -07:00
parent 13997ce957
commit e42a995fd9
8 changed files with 694 additions and 370 deletions

View File

@ -1,202 +1,69 @@
/// Encoding and decoding of Postgres arrays. Documentation of the byte format can be found [here](https://git.postgresql.org/gitweb/?p=postgresql.git;a=blob;f=src/include/utils/array.h;h=7f7e744cb12bc872f628f90dad99dfdf074eb314;hb=master#l6)
use crate::decode::Decode;
use crate::decode::DecodeError;
//! Encoding and decoding of Postgres arrays.
use crate::database::Database;
use crate::decode::{Decode, DecodeOwned};
use crate::encode::Encode;
use crate::io::{Buf, BufMut};
use crate::postgres::database::Postgres;
use crate::types::HasSqlType;
use std::marker::PhantomData;
use crate::postgres::types::raw::{PgArrayDecoder, PgArrayEncoder};
use crate::postgres::PgValue;
use crate::types::Type;
impl<T> Encode<Postgres> for [T]
where
T: Encode<Postgres>,
Postgres: HasSqlType<T>,
T: Type<Postgres>,
{
fn encode(&self, buf: &mut Vec<u8>) {
let mut encoder = ArrayEncoder::new(buf);
let mut encoder = PgArrayEncoder::new(buf);
for item in self {
encoder.push(item);
encoder.encode(item);
}
encoder.finish();
}
}
impl<T> Encode<Postgres> for Vec<T>
where
[T]: Encode<Postgres>,
Postgres: HasSqlType<T>,
T: Encode<Postgres>,
T: Type<Postgres>,
{
fn encode(&self, buf: &mut Vec<u8>) {
self.as_slice().encode(buf)
}
}
impl<T> Decode<Postgres> for Vec<T>
impl<'de, T> Decode<'de, Postgres> for Vec<T>
where
T: Decode<Postgres>,
Postgres: HasSqlType<T>,
T: 'de,
T: DecodeOwned<Postgres>,
[T]: Type<Postgres>,
T: Type<Postgres>,
{
fn decode(buf: &[u8]) -> Result<Self, DecodeError> {
let decoder = ArrayDecoder::<T>::new(buf)?;
decoder.collect()
fn decode(value: Option<PgValue<'de>>) -> crate::Result<Self> {
PgArrayDecoder::<T>::new(value)?.collect()
}
}
type Order = byteorder::BigEndian;
struct ArrayDecoder<'a, T>
impl<T, DB> Type<DB> for Vec<Option<T>>
where
T: Decode<Postgres>,
Postgres: HasSqlType<T>,
DB: Database,
[T]: Type<DB>,
{
left: usize,
did_error: bool,
buf: &'a [u8],
phantom: PhantomData<T>,
}
impl<T> ArrayDecoder<'_, T>
where
T: Decode<Postgres>,
Postgres: HasSqlType<T>,
{
fn new(mut buf: &[u8]) -> Result<ArrayDecoder<T>, DecodeError> {
let ndim = buf.get_i32::<Order>()?;
let dataoffset = buf.get_i32::<Order>()?;
let elemtype = buf.get_i32::<Order>()?;
if ndim == 0 {
return Ok(ArrayDecoder {
left: 0,
did_error: false,
buf,
phantom: PhantomData,
});
}
assert_eq!(ndim, 1, "only arrays of dimension 1 is supported");
let dimensions = buf.get_i32::<Order>()?;
let lower_bnds = buf.get_i32::<Order>()?;
assert_eq!(dataoffset, 0, "arrays with [null bitmap] is not supported");
assert_eq!(
elemtype,
<Postgres as HasSqlType<T>>::type_info().id.0 as i32,
"mismatched array element type"
);
assert_eq!(lower_bnds, 1);
Ok(ArrayDecoder {
left: dimensions as usize,
did_error: false,
buf,
phantom: PhantomData,
})
}
/// Decodes the next element without worring how many are left, or if it previously errored
fn decode_next_element(&mut self) -> Result<T, DecodeError> {
let len = self.buf.get_i32::<Order>()?;
let bytes = self.buf.get_bytes(len as usize)?;
Decode::decode(bytes)
#[inline]
fn type_info() -> DB::TypeInfo {
<[T] as Type<DB>>::type_info()
}
}
impl<T> Iterator for ArrayDecoder<'_, T>
impl<T, DB> Type<DB> for [Option<T>]
where
T: Decode<Postgres>,
Postgres: HasSqlType<T>,
DB: Database,
[T]: Type<DB>,
{
type Item = Result<T, DecodeError>;
fn next(&mut self) -> Option<Result<T, DecodeError>> {
if self.did_error || self.left == 0 {
return None;
}
self.left -= 1;
let decoded = self.decode_next_element();
self.did_error = decoded.is_err();
Some(decoded)
}
}
struct ArrayEncoder<'a, T>
where
T: Encode<Postgres>,
Postgres: HasSqlType<T>,
{
count: usize,
len_start_index: usize,
buf: &'a mut Vec<u8>,
phantom: PhantomData<T>,
}
impl<T> ArrayEncoder<'_, T>
where
T: Encode<Postgres>,
Postgres: HasSqlType<T>,
{
fn new(buf: &mut Vec<u8>) -> ArrayEncoder<T> {
let ty = <Postgres as HasSqlType<T>>::type_info();
// ndim
buf.put_i32::<Order>(1);
// dataoffset
buf.put_i32::<Order>(0);
// elemtype
buf.put_i32::<Order>(ty.id.0 as i32);
let len_start_index = buf.len();
// dimensions
buf.put_i32::<Order>(0);
// lower_bnds
buf.put_i32::<Order>(1);
ArrayEncoder {
count: 0,
len_start_index,
buf,
phantom: PhantomData,
}
}
fn push(&mut self, item: &T) {
// Allocate space for the length of the encoded elemement up front
let el_len_index = self.buf.len();
self.buf.put_i32::<Order>(0);
// Allocate the element it self
let el_start = self.buf.len();
Encode::encode(item, self.buf);
let el_end = self.buf.len();
// Now we know the actual length of the encoded element
let el_len = el_end - el_start;
// And we can now go back and update the length
self.buf[el_len_index..el_start].copy_from_slice(&(el_len as i32).to_be_bytes());
self.count += 1;
}
fn update_len(&mut self) {
const I32_SIZE: usize = std::mem::size_of::<i32>();
let size_bytes = (self.count as i32).to_be_bytes();
self.buf[self.len_start_index..self.len_start_index + I32_SIZE]
.copy_from_slice(&size_bytes);
}
}
impl<T> Drop for ArrayEncoder<'_, T>
where
T: Encode<Postgres>,
Postgres: HasSqlType<T>,
{
fn drop(&mut self) {
self.update_len();
#[inline]
fn type_info() -> DB::TypeInfo {
<[T] as Type<DB>>::type_info()
}
}

View File

@ -64,9 +64,10 @@ mod bool;
mod bytes;
mod float;
mod int;
mod record;
mod str;
// types we want to integration test but don't want to stabilize
// internal types used by other types to encode or decode related formats
#[doc(hidden)]
pub mod raw;

View File

@ -0,0 +1,251 @@
use crate::decode::DecodeOwned;
use crate::encode::{Encode, IsNull};
use crate::io::{Buf, BufMut};
use crate::postgres::types::raw::sequence::PgSequenceDecoder;
use crate::postgres::{PgValue, Postgres};
use crate::types::Type;
use byteorder::BE;
use std::convert::TryInto;
use std::marker::PhantomData;
// https://git.postgresql.org/gitweb/?p=postgresql.git;a=blob;f=src/include/utils/array.h;h=7f7e744cb12bc872f628f90dad99dfdf074eb314;hb=master#l6
// https://git.postgresql.org/gitweb/?p=postgresql.git;a=blob;f=src/backend/utils/adt/arrayfuncs.c;h=7a4a5aaa86dc1c8cffa2d899c89511dc317d485b;hb=master#l1547
pub(crate) struct PgArrayEncoder<'enc, T> {
count: usize,
len_start_index: usize,
buf: &'enc mut Vec<u8>,
phantom: PhantomData<T>,
}
impl<'enc, T> PgArrayEncoder<'enc, T>
where
T: Encode<Postgres>,
T: Type<Postgres>,
{
pub(crate) fn new(buf: &'enc mut Vec<u8>) -> Self {
let ty = <T as Type<Postgres>>::type_info();
// ndim
buf.put_i32::<BE>(1);
// dataoffset
buf.put_i32::<BE>(0);
// elemtype
buf.put_i32::<BE>(ty.id.0 as i32);
let len_start_index = buf.len();
// dimensions
buf.put_i32::<BE>(0);
// lower_bnds
buf.put_i32::<BE>(1);
Self {
count: 0,
len_start_index,
buf,
phantom: PhantomData,
}
}
pub(crate) fn encode(&mut self, item: T) {
// Allocate space for the length of the encoded elemement up front
let el_len_index = self.buf.len();
self.buf.put_i32::<BE>(0);
// Allocate the element it self
let el_start = self.buf.len();
if let IsNull::Yes = Encode::<Postgres>::encode_nullable(&item, self.buf) {
self.buf[el_len_index..el_start].copy_from_slice(&(-1_i32).to_be_bytes());
} else {
let el_end = self.buf.len();
// Now we know the actual length of the encoded element
let el_len = el_end - el_start;
// And we can now go back and update the length
self.buf[el_len_index..el_start].copy_from_slice(&(el_len as i32).to_be_bytes());
}
self.count += 1;
}
pub(crate) fn finish(&mut self) {
const I32_SIZE: usize = std::mem::size_of::<i32>();
let size_bytes = (self.count as i32).to_be_bytes();
self.buf[self.len_start_index..self.len_start_index + I32_SIZE]
.copy_from_slice(&size_bytes);
}
}
pub(crate) struct PgArrayDecoder<'de, T> {
inner: PgSequenceDecoder<'de>,
phantom: PhantomData<T>,
}
impl<'de, T> PgArrayDecoder<'de, T>
where
T: DecodeOwned<Postgres>,
T: Type<Postgres>,
{
pub(crate) fn new(value: Option<PgValue<'de>>) -> crate::Result<Self> {
let mut value = value.try_into()?;
match value {
PgValue::Binary(ref mut buf) => {
// number of dimensions of the array
let ndim = buf.get_i32::<BE>()?;
if ndim == 0 {
return Ok(Self {
inner: PgSequenceDecoder::new(PgValue::Binary(&[]), false),
phantom: PhantomData,
});
}
if ndim != 1 {
return Err(decode_err!(
"encountered an array of {} dimensions; only one-dimensional arrays are supported",
ndim
));
}
// offset to stored data
// this doesn't matter as the data is always at the end of the header
let _dataoffset = buf.get_i32::<BE>()?;
// TODO: Validate element type with whatever framework is put in place to do so
// As a reminder, we have no way to do this yet and still account for [compatible]
// types.
// element type OID
let _elemtype = buf.get_i32::<BE>()?;
// length of each array axis
let _dimensions = buf.get_i32::<BE>()?;
// lower boundary of each dimension
let lower_bnds = buf.get_i32::<BE>()?;
if lower_bnds != 1 {
return Err(decode_err!(
"encountered an array with a lower bound of {} in the first dimension; only arrays starting at one are supported",
lower_bnds
));
}
}
PgValue::Text(_) => {}
}
Ok(Self {
inner: PgSequenceDecoder::new(value, false),
phantom: PhantomData,
})
}
fn decode(&mut self) -> crate::Result<Option<T>> {
self.inner.decode()
}
}
impl<'de, T> Iterator for PgArrayDecoder<'de, T>
where
T: 'de,
T: DecodeOwned<Postgres>,
T: Type<Postgres>,
{
type Item = crate::Result<T>;
#[inline]
fn next(&mut self) -> Option<crate::Result<T>> {
self.decode().transpose()
}
}
#[cfg(test)]
mod tests {
use super::PgArrayDecoder;
use super::PgArrayEncoder;
use crate::postgres::PgValue;
const BUF_BINARY_I32: &[u8] = b"\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x17\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x04";
#[test]
fn it_encodes_i32() {
let mut buf = Vec::new();
let mut encoder = PgArrayEncoder::new(&mut buf);
for val in &[1_i32, 2, 3, 4] {
encoder.encode(*val);
}
encoder.finish();
assert_eq!(buf, BUF_BINARY_I32);
}
#[test]
fn it_decodes_text_i32() -> crate::Result<()> {
let s = "{1,152,-12412}";
let mut decoder = PgArrayDecoder::<i32>::new(Some(PgValue::Text(s)))?;
assert_eq!(decoder.decode()?, Some(1));
assert_eq!(decoder.decode()?, Some(152));
assert_eq!(decoder.decode()?, Some(-12412));
assert_eq!(decoder.decode()?, None);
Ok(())
}
#[test]
fn it_decodes_text_str() -> crate::Result<()> {
let s = "{\"\",\"\\\"\"}";
let mut decoder = PgArrayDecoder::<String>::new(Some(PgValue::Text(s)))?;
assert_eq!(decoder.decode()?, Some("".to_string()));
assert_eq!(decoder.decode()?, Some("\"".to_string()));
assert_eq!(decoder.decode()?, None);
Ok(())
}
#[test]
fn it_decodes_binary_nulls() -> crate::Result<()> {
let mut decoder = PgArrayDecoder::<Option<bool>>::new(Some(PgValue::Binary(
b"\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x10\x00\x00\x00\x04\x00\x00\x00\x01\xff\xff\xff\xff\x00\x00\x00\x01\x01\xff\xff\xff\xff\x00\x00\x00\x01\x00"
)))?;
assert_eq!(decoder.decode()?, Some(None));
assert_eq!(decoder.decode()?, Some(Some(true)));
assert_eq!(decoder.decode()?, Some(None));
assert_eq!(decoder.decode()?, Some(Some(false)));
Ok(())
}
#[test]
fn it_decodes_binary_i32() -> crate::Result<()> {
let mut decoder = PgArrayDecoder::<i32>::new(Some(PgValue::Binary(BUF_BINARY_I32)))?;
let val_1 = decoder.decode()?;
let val_2 = decoder.decode()?;
let val_3 = decoder.decode()?;
let val_4 = decoder.decode()?;
assert_eq!(val_1, Some(1));
assert_eq!(val_2, Some(2));
assert_eq!(val_3, Some(3));
assert_eq!(val_4, Some(4));
assert!(decoder.decode()?.is_none());
Ok(())
}
}

View File

@ -1,5 +1,12 @@
mod array;
mod numeric;
mod record;
mod sequence;
pub(crate) use array::{PgArrayDecoder, PgArrayEncoder};
// Used in integration tests
pub use numeric::{PgNumeric, PgNumericSign};
// Used in #[derive(Type)] for structs
pub use record::{PgRecordDecoder, PgRecordEncoder};

View File

@ -1,8 +1,8 @@
use crate::decode::Decode;
use crate::decode::DecodeOwned;
use crate::encode::{Encode, IsNull};
use crate::io::Buf;
use crate::postgres::protocol::TypeId;
use crate::postgres::{PgTypeInfo, PgValue, Postgres};
use crate::postgres::types::raw::sequence::PgSequenceDecoder;
use crate::postgres::{PgValue, Postgres};
use crate::types::Type;
use byteorder::BigEndian;
use std::convert::TryInto;
@ -43,7 +43,6 @@ impl<'a> PgRecordEncoder<'a> {
let start = self.buf.len();
if let IsNull::Yes = value.encode_nullable(self.buf) {
// replaces zeros with actual length
self.buf[start - 4..start].copy_from_slice(&(-1_i32).to_be_bytes());
} else {
let end = self.buf.len();
@ -60,216 +59,34 @@ impl<'a> PgRecordEncoder<'a> {
}
}
pub struct PgRecordDecoder<'de> {
value: PgValue<'de>,
}
pub struct PgRecordDecoder<'de>(PgSequenceDecoder<'de>);
impl<'de> PgRecordDecoder<'de> {
pub fn new(value: Option<PgValue<'de>>) -> crate::Result<Self> {
let mut value: PgValue = value.try_into()?;
match value {
PgValue::Text(_) => {}
PgValue::Binary(ref mut buf) => {
let _expected_len = buf.get_u32::<BigEndian>()?;
}
PgValue::Text(ref mut s) => {
// remove outer ( ... )
*s = &s[1..(s.len() - 1)];
}
}
Ok(Self { value })
Ok(Self(PgSequenceDecoder::new(value, true)))
}
#[inline]
pub fn decode<T>(&mut self) -> crate::Result<T>
where
T: Decode<'de, Postgres>,
T: DecodeOwned<Postgres>,
T: Type<Postgres>,
{
match self.value {
PgValue::Binary(ref mut buf) => {
// TODO: We should fail if this type is not _compatible_; but
// I want to make sure we handle this _and_ the outer level
// type mismatch errors at the same time
let _oid = buf.get_u32::<BigEndian>()?;
let len = buf.get_i32::<BigEndian>()? as isize;
let value = if len < 0 {
T::decode(None)?
} else {
let value_buf = &buf[..(len as usize)];
*buf = &buf[(len as usize)..];
T::decode(Some(PgValue::Binary(value_buf)))?
};
Ok(value)
}
PgValue::Text(ref mut s) => {
let mut in_quotes = false;
let mut in_escape = false;
let mut is_quoted = false;
let mut prev_ch = '\0';
let mut eos = false;
let mut prev_index = 0;
let mut value = String::new();
let index = 'outer: loop {
let mut iter = s.char_indices();
while let Some((index, ch)) = iter.next() {
match ch {
',' if !in_quotes => {
break 'outer Some(prev_index);
}
',' if prev_ch == '\0' => {
break 'outer None;
}
'"' if prev_ch == '"' && index != 1 => {
// Quotes are escaped with another quote
in_quotes = false;
value.push('"');
}
'"' if in_quotes => {
in_quotes = false;
}
'\'' if in_escape => {
in_escape = false;
value.push('\'');
}
'"' if in_escape => {
in_escape = false;
value.push('"');
}
'\\' if in_escape => {
in_escape = false;
value.push('\\');
}
'\\' => {
in_escape = true;
}
'"' => {
is_quoted = true;
in_quotes = true;
}
ch => {
value.push(ch);
}
}
prev_index = index;
prev_ch = ch;
}
eos = true;
break 'outer if prev_ch == '\0' {
// NULL values have zero characters
// Empty strings are ""
None
} else {
Some(prev_index)
};
};
let value = index.map(|index| {
let mut s = &s[..=index];
if is_quoted {
s = &s[1..s.len() - 1];
}
PgValue::Text(s)
});
let value = T::decode(value)?;
if !eos {
*s = &s[index.unwrap_or(0) + 2..];
} else {
*s = "";
}
Ok(value)
}
}
self.0
.decode()?
.ok_or_else(|| decode_err!("no field `{0}` on {0}-element record", self.0.len()))
}
}
macro_rules! impl_pg_record_for_tuple {
($( $idx:ident : $T:ident ),+) => {
impl<$($T,)+> Type<Postgres> for ($($T,)+) {
#[inline]
fn type_info() -> PgTypeInfo {
PgTypeInfo {
id: TypeId(2249),
name: Some("RECORD".into()),
}
}
}
impl<'de, $($T,)+> Decode<'de, Postgres> for ($($T,)+)
where
$($T: crate::types::Type<Postgres>,)+
$($T: crate::decode::Decode<'de, Postgres>,)+
{
fn decode(value: Option<PgValue<'de>>) -> crate::Result<Self> {
let mut decoder = PgRecordDecoder::new(value)?;
$(let $idx: $T = decoder.decode()?;)+
Ok(($($idx,)+))
}
}
};
}
impl_pg_record_for_tuple!(_1: T1);
impl_pg_record_for_tuple!(_1: T1, _2: T2);
impl_pg_record_for_tuple!(_1: T1, _2: T2, _3: T3);
impl_pg_record_for_tuple!(_1: T1, _2: T2, _3: T3, _4: T4);
impl_pg_record_for_tuple!(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5);
impl_pg_record_for_tuple!(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6);
impl_pg_record_for_tuple!(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7);
impl_pg_record_for_tuple!(
_1: T1,
_2: T2,
_3: T3,
_4: T4,
_5: T5,
_6: T6,
_7: T7,
_8: T8
);
impl_pg_record_for_tuple!(
_1: T1,
_2: T2,
_3: T3,
_4: T4,
_5: T5,
_6: T6,
_7: T7,
_8: T8,
_9: T9
);
#[test]
fn test_encode_field() {
let value = "Foo Bar";
@ -301,7 +118,7 @@ fn test_decode_field() {
let mut encoder = PgRecordEncoder::new(&mut buf);
encoder.encode(&value);
let mut buf = buf.as_slice();
let buf = buf.as_slice();
let mut decoder = PgRecordDecoder::new(Some(PgValue::Binary(buf))).unwrap();
let value_decoded: String = decoder.decode().unwrap();

View File

@ -0,0 +1,201 @@
use crate::decode::DecodeOwned;
use crate::io::Buf;
use crate::postgres::{PgValue, Postgres};
use crate::types::Type;
use byteorder::BigEndian;
pub(crate) struct PgSequenceDecoder<'de> {
value: PgValue<'de>,
len: usize,
mixed: bool,
}
impl<'de> PgSequenceDecoder<'de> {
pub(crate) fn new(mut value: PgValue<'de>, mixed: bool) -> Self {
match value {
PgValue::Binary(_) => {
// assume that this has already gotten tweaked by the caller as
// tuples and arrays have a very different header
}
PgValue::Text(ref mut s) => {
// remove the outer ( ... ) or { ... }
*s = &s[1..(s.len() - 1)];
}
}
Self {
value,
mixed,
len: 0,
}
}
pub(crate) fn len(&self) -> usize {
self.len
}
pub(crate) fn decode<T>(&mut self) -> crate::Result<Option<T>>
where
T: DecodeOwned<Postgres>,
T: Type<Postgres>,
{
match self.value {
PgValue::Binary(ref mut buf) => {
if buf.is_empty() {
return Ok(None);
}
// mixed sequences can contain values of many different types
// the OID of the type is encoded next to each value
if self.mixed {
// TODO: We should fail if this type is not _compatible_; but
// I want to make sure we handle this _and_ the outer level
// type mismatch errors at the same time
let _oid = buf.get_u32::<BigEndian>()?;
}
let len = buf.get_i32::<BigEndian>()? as isize;
let value = if len < 0 {
T::decode(None)?
} else {
let value_buf = &buf[..(len as usize)];
*buf = &buf[(len as usize)..];
T::decode(Some(PgValue::Binary(value_buf)))?
};
self.len += 1;
Ok(Some(value))
}
PgValue::Text(ref mut s) => {
if s.is_empty() {
return Ok(None);
}
let mut value = String::new();
let mut in_quotes = false;
let mut in_escape = false;
let mut in_maybe_quote_escape = false;
let end: Option<usize> = 'outer: loop {
let mut iter = s.char_indices().peekable();
while let Some((index, ch)) = iter.next() {
if in_maybe_quote_escape {
if ch == '"' {
// double quote escape
value.push('"');
in_maybe_quote_escape = false;
continue;
} else {
// that was actually a quote
in_quotes = !in_quotes;
}
}
match ch {
',' if !in_quotes => break 'outer Some(index),
'\\' if !in_escape => {
in_escape = true;
}
_ if in_escape => {
// special escape sequences only matter for string parsing
// we only will ever receive stuff like "\\b" that we translate
// to "\b"
value.push(ch);
// skip prev_ch assignment for
//an escape sequence resolution
in_escape = false;
continue;
}
'"' if in_quotes => {
in_maybe_quote_escape = true;
}
'"' => {
in_quotes = !in_quotes;
}
_ => value.push(ch),
}
}
// Reached the end of the string
break None;
};
let value = T::decode(if end == Some(0) {
None
} else if !self.mixed && value == "NULL" {
// Yes, in arrays the text encoding of a NULL is just NULL
None
} else {
Some(PgValue::Text(&value))
})?;
*s = if let Some(end) = end {
&s[end + 1..]
} else {
""
};
self.len += 1;
Ok(Some(value))
}
}
}
}
impl<'de> From<&'de str> for PgSequenceDecoder<'de> {
fn from(s: &'de str) -> Self {
Self::new(PgValue::Text(s), false)
}
}
#[cfg(test)]
mod tests {
use super::PgSequenceDecoder;
#[test]
fn it_decodes_text_number() -> crate::Result<()> {
// select (10,20,-220);
let data = "(10,20,-220)";
let mut decoder = PgSequenceDecoder::from(data);
assert_eq!(decoder.decode::<i32>()?, Some(10_i32));
assert_eq!(decoder.decode::<i32>()?, Some(20_i32));
assert_eq!(decoder.decode::<i32>()?, Some(-220_i32));
assert_eq!(decoder.decode::<i32>()?, None);
Ok(())
}
#[test]
fn it_decodes_text_nested_sequence() -> crate::Result<()> {
// select ((1,array[false,true]),array[(1,4),(5,2)]);
let data = r#"("(1,""{f,t}"")","{""(1,4)"",""(5,2)""}")"#;
let mut decoder = PgSequenceDecoder::from(data);
assert_eq!(
decoder.decode::<(i32, Vec<bool>)>()?,
Some((1, vec![false, true]))
);
assert_eq!(
decoder.decode::<Vec<(i32, i32)>>()?,
Some(vec![(1_i32, 4_i32), (5_i32, 2_i32),])
);
Ok(())
}
}

View File

@ -0,0 +1,91 @@
use crate::decode::{Decode, DecodeOwned};
use crate::postgres::protocol::TypeId;
use crate::postgres::row::PgValue;
use crate::postgres::types::raw::PgRecordDecoder;
use crate::postgres::types::PgTypeInfo;
use crate::postgres::Postgres;
use crate::types::Type;
macro_rules! impl_pg_record_for_tuple {
($( $idx:ident : $T:ident ),+) => {
impl<$($T,)+> Type<Postgres> for ($($T,)+) {
#[inline]
fn type_info() -> PgTypeInfo {
PgTypeInfo {
id: TypeId(2249),
name: Some("RECORD".into()),
}
}
}
impl<$($T,)+> Type<Postgres> for [($($T,)+)] {
#[inline]
fn type_info() -> PgTypeInfo {
PgTypeInfo {
id: TypeId(2287),
name: Some("RECORD[]".into()),
}
}
}
impl<$($T,)+> Type<Postgres> for Vec<($($T,)+)> {
#[inline]
fn type_info() -> PgTypeInfo {
<[($($T,)+)] as Type<Postgres>>::type_info()
}
}
impl<'de, $($T,)+> Decode<'de, Postgres> for ($($T,)+)
where
$($T: 'de,)+
$($T: Type<Postgres>,)+
$($T: DecodeOwned<Postgres>,)+
{
fn decode(value: Option<PgValue<'de>>) -> crate::Result<Self> {
let mut decoder = PgRecordDecoder::new(value)?;
$(let $idx: $T = decoder.decode()?;)+
Ok(($($idx,)+))
}
}
};
}
impl_pg_record_for_tuple!(_1: T1);
impl_pg_record_for_tuple!(_1: T1, _2: T2);
impl_pg_record_for_tuple!(_1: T1, _2: T2, _3: T3);
impl_pg_record_for_tuple!(_1: T1, _2: T2, _3: T3, _4: T4);
impl_pg_record_for_tuple!(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5);
impl_pg_record_for_tuple!(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6);
impl_pg_record_for_tuple!(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7);
impl_pg_record_for_tuple!(
_1: T1,
_2: T2,
_3: T3,
_4: T4,
_5: T5,
_6: T6,
_7: T7,
_8: T8
);
impl_pg_record_for_tuple!(
_1: T1,
_2: T2,
_3: T3,
_4: T4,
_5: T5,
_6: T6,
_7: T7,
_8: T8,
_9: T9
);

View File

@ -282,11 +282,100 @@ async fn test_unprepared_anonymous_record() -> anyhow::Result<()> {
Ok(())
}
test!(postgres_int_vec: Vec<i32>: "ARRAY[1, 2, 3]::int[]" == vec![1, 2, 3i32], "ARRAY[3, 292, 15, 2, 3]::int[]" == vec![3, 292, 15, 2, 3], "ARRAY[7, 6, 5, 4, 3, 2, 1]::int[]" == vec![7, 6, 5, 4, 3, 2, 1], "ARRAY[]::int[]" == vec![] as Vec<i32>);
test!(postgres_string_vec: Vec<String>: "ARRAY['Hello', 'world', 'friend']::text[]" == vec!["Hello", "world", "friend"]);
test!(postgres_bool_vec: Vec<bool>: "ARRAY[true, true, false, true]::bool[]" == vec![true, true, false, true]);
test!(postgres_real_vec: Vec<f32>: "ARRAY[0.0, 1.0, 3.14, 1.234, -0.002, 100000.0]::real[]" == vec![0.0, 1.0, 3.14, 1.234, -0.002, 100000.0_f32]);
test!(postgres_double_vec: Vec<f64>: "ARRAY[0.0, 1.0, 3.14, 1.234, -0.002, 100000.0]::double precision[]" == vec![0.0, 1.0, 3.14, 1.234, -0.002, 100000.0_f64]);
// This is trying to break my complete lack of understanding of null bitmaps for array/record
// decoding. The docs in pg are either wrong or I'm reading the wrong docs.
test_type!(lots_of_nulls_vec(Postgres, Vec<Option<bool>>,
"ARRAY[NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, true]::bool[]" == {
vec![None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, Some(true)]
},
));
test_type!(bool_vec(Postgres, Vec<bool>,
"ARRAY[true, true, false, true]::bool[]" == vec![true, true, false, true],
));
test_type!(bool_opt_vec(Postgres, Vec<Option<bool>>,
"ARRAY[NULL, true, NULL, false]::bool[]" == vec![None, Some(true), None, Some(false)],
));
test_type!(f32_vec(Postgres, Vec<f32>,
"ARRAY[0.0, 1.0, 3.14, 1.234, -0.002, 100000.0]::real[]" == vec![0.0_f32, 1.0, 3.14, 1.234, -0.002, 100000.0],
));
test_type!(f64_vec(Postgres, Vec<f64>,
"ARRAY[0.0, 1.0, 3.14, 1.234, -0.002, 100000.0]::double precision[]" == vec![0.0_f64, 1.0, 3.14, 1.234, -0.002, 100000.0],
));
test_type!(i16_vec(Postgres, Vec<i16>,
"ARRAY[1, 152, -12412]::smallint[]" == vec![1_i16, 152, -12412],
"ARRAY[]::smallint[]" == Vec::<i16>::new(),
"ARRAY[0]::smallint[]" == vec![0_i16]
));
test_type!(string_vec(Postgres, Vec<String>,
"ARRAY['', '\"']::text[]"
== vec!["".to_string(), "\"".to_string()],
"ARRAY['Hello, World', '', 'Goodbye']::text[]"
== vec!["Hello, World".to_string(), "".to_string(), "Goodbye".to_string()],
));
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn test_unprepared_anonymous_record_arrays() -> anyhow::Result<()> {
let mut conn = new::<Postgres>().await?;
// record of arrays
let mut cursor = conn.fetch("SELECT (ARRAY['', '\"']::text[], false)");
let row = cursor.next().await?.unwrap();
let rec: (Vec<String>, bool) = row.get(0);
assert_eq!(rec, (vec!["".to_string(), "\"".to_string()], false));
// array of records
let mut cursor = conn.fetch("SELECT ARRAY[('','\"'), (NULL,'')]::record[]");
let row = cursor.next().await?.unwrap();
let rec: Vec<(Option<String>, String)> = row.get(0);
assert_eq!(
rec,
vec![
(Some(String::from("")), String::from("\"")),
(None, String::from(""))
]
);
Ok(())
}
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn test_prepared_anonymous_record_arrays() -> anyhow::Result<()> {
let mut conn = new::<Postgres>().await?;
// record of arrays
let rec: ((Vec<String>, bool),) = sqlx::query_as("SELECT (ARRAY['', '\"']::text[], false)")
.fetch_one(&mut conn)
.await?;
assert_eq!(rec.0, (vec!["".to_string(), "\"".to_string()], false));
// array of records
let rec: (Vec<(Option<String>, String)>,) =
sqlx::query_as("SELECT ARRAY[('','\"'), (NULL,'')]::record[]")
.fetch_one(&mut conn)
.await?;
assert_eq!(
rec.0,
vec![
(Some(String::from("")), String::from("\"")),
(None, String::from(""))
]
);
Ok(())
}
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]