Finish up JSON/JSONB support for Postgres

* implement for &serde_json::RawValue

 * sqlx::types::Json<T> is a common type that all JSON-compatible databases can implement for,
   postgres implements Json<T> as JSONB

 * sqlx::postgres::types::PgJson<T> resolves to JSON

 * sqlx::postgres::types::PgJsonB<T> resolves to JSONB
This commit is contained in:
Ryan Leckey
2020-03-21 01:16:34 -07:00
parent b7e53e885a
commit 05d8ac2747
13 changed files with 324 additions and 174 deletions

1
Cargo.lock generated
View File

@@ -1657,6 +1657,7 @@ dependencies = [
"futures 0.3.4",
"paste",
"serde",
"serde_json",
"sqlx-core 0.3.0-alpha.1",
"sqlx-macros 0.3.0-alpha.1",
"sqlx-test",

View File

@@ -67,6 +67,7 @@ trybuild = "1.0.24"
sqlx-test = { path = "./sqlx-test" }
paste = "0.1.7"
serde = { version = "1.0", features = [ "derive" ] }
serde_json = "1.0.48"
[[test]]
name = "postgres-macros"

View File

@@ -58,7 +58,7 @@ tokio = { version = "0.2.13", default-features = false, features = [ "dns", "fs"
url = { version = "2.1.1", default-features = false }
uuid = { version = "0.8.1", default-features = false, optional = true, features = [ "std" ] }
serde = { version = "1.0", features = [ "derive" ], optional = true }
serde_json = { version = "1.0", optional = true }
serde_json = { version = "1.0", features = [ "raw_value" ], optional = true }
# <https://github.com/jgallagher/rusqlite/tree/master/libsqlite3-sys>
[dependencies.libsqlite3-sys]

View File

@@ -9,9 +9,6 @@ pub use listen::{PgListener, PgNotification};
pub use row::{PgRow, PgValue};
pub use types::PgTypeInfo;
#[cfg(feature = "json")]
pub use types::{Json, Jsonb};
mod arguments;
mod connection;
mod cursor;

View File

@@ -1,38 +1,57 @@
use crate::decode::{Decode, DecodeError};
use crate::decode::Decode;
use crate::encode::Encode;
use crate::io::{Buf, BufMut};
use crate::postgres::protocol::TypeId;
use crate::postgres::types::PgTypeInfo;
use crate::postgres::Postgres;
use crate::types::HasSqlType;
use crate::postgres::types::{PgJsonb, PgTypeInfo};
use crate::postgres::{PgValue, Postgres};
use crate::types::{Json, Type};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use serde_json::value::RawValue as JsonRawValue;
use serde_json::Value as JsonValue;
impl HasSqlType<Value> for Postgres {
// <https://www.postgresql.org/docs/12/datatype-json.html>
// In general, most applications should prefer to store JSON data as jsonb,
// unless there are quite specialized needs, such as legacy assumptions
// about ordering of object keys.
impl Type<Postgres> for JsonValue {
fn type_info() -> PgTypeInfo {
PgTypeInfo::new(TypeId::JSON)
<PgJsonb<Self> as Type<Postgres>>::type_info()
}
}
impl Encode<Postgres> for Value {
impl Encode<Postgres> for JsonValue {
fn encode(&self, buf: &mut Vec<u8>) {
Json(self).encode(buf)
PgJsonb(self).encode(buf)
}
}
impl Decode<Postgres> for Value {
fn decode(buf: &[u8]) -> Result<Self, DecodeError> {
let Json(item) = Decode::decode(buf)?;
Ok(item)
impl<'de> Decode<'de, Postgres> for JsonValue {
fn decode(value: Option<PgValue<'de>>) -> crate::Result<Postgres, Self> {
<PgJsonb<Self> as Decode<Postgres>>::decode(value).map(|item| item.0)
}
}
#[derive(Debug, PartialEq)]
pub struct Json<T>(pub T);
impl<T> HasSqlType<Json<T>> for Postgres {
impl Type<Postgres> for &'_ JsonRawValue {
fn type_info() -> PgTypeInfo {
PgTypeInfo::new(TypeId::JSON)
<PgJsonb<Self> as Type<Postgres>>::type_info()
}
}
impl Encode<Postgres> for &'_ JsonRawValue {
fn encode(&self, buf: &mut Vec<u8>) {
PgJsonb(self).encode(buf)
}
}
impl<'de> Decode<'de, Postgres> for &'de JsonRawValue {
fn decode(value: Option<PgValue<'de>>) -> crate::Result<Postgres, Self> {
<PgJsonb<Self> as Decode<Postgres>>::decode(value).map(|item| item.0)
}
}
impl<T> Type<Postgres> for Json<T> {
fn type_info() -> PgTypeInfo {
<PgJsonb<T> as Type<Postgres>>::type_info()
}
}
@@ -41,53 +60,16 @@ where
T: Serialize,
{
fn encode(&self, buf: &mut Vec<u8>) {
serde_json::to_writer(buf, &self.0)
.expect("failed to serialize json for encoding to database");
PgJsonb(&self.0).encode(buf)
}
}
impl<T> Decode<Postgres> for Json<T>
impl<'de, T> Decode<'de, Postgres> for Json<T>
where
T: for<'a> Deserialize<'a>,
T: 'de,
T: Deserialize<'de>,
{
fn decode(buf: &[u8]) -> Result<Self, DecodeError> {
let item = serde_json::from_slice(buf)?;
Ok(Json(item))
}
}
#[derive(Debug, PartialEq)]
pub struct Jsonb<T>(pub T);
impl<T> HasSqlType<Jsonb<T>> for Postgres {
fn type_info() -> PgTypeInfo {
PgTypeInfo::new(TypeId::JSONB)
}
}
impl<T> Encode<Postgres> for Jsonb<T>
where
T: Serialize,
{
fn encode(&self, buf: &mut Vec<u8>) {
// TODO: I haven't been figure out what this byte is, but it is required or else we get the error:
// Error: unsupported jsonb version number 34
buf.put_u8(1);
serde_json::to_writer(buf, &self.0)
.expect("failed to serialize json for encoding to database");
}
}
impl<T> Decode<Postgres> for Jsonb<T>
where
T: for<'a> Deserialize<'a>,
{
fn decode(mut buf: &[u8]) -> Result<Self, DecodeError> {
// TODO: I don't know what this byte is, similarly to Encode
let _ = buf.get_u8()?;
let item = serde_json::from_slice(buf)?;
Ok(Jsonb(item))
fn decode(value: Option<PgValue<'de>>) -> crate::Result<Postgres, Self> {
<PgJsonb<T> as Decode<Postgres>>::decode(value).map(|item| Self(item.0))
}
}

View File

@@ -87,7 +87,7 @@ pub mod json;
mod ipnetwork;
#[cfg(feature = "json")]
pub use json::{Json, Jsonb};
pub use raw::{PgJson, PgJsonb};
/// Type information for a Postgres SQL type.
#[derive(Debug, Clone)]

View File

@@ -0,0 +1,92 @@
use crate::decode::Decode;
use crate::encode::Encode;
use crate::io::{Buf, BufMut};
use crate::postgres::protocol::TypeId;
use crate::postgres::types::PgTypeInfo;
use crate::postgres::{PgValue, Postgres};
use crate::types::Type;
use serde::{Deserialize, Serialize};
use std::convert::TryInto;
#[derive(Debug, PartialEq)]
pub struct PgJson<T>(pub T);
impl<T> Type<Postgres> for PgJson<T> {
fn type_info() -> PgTypeInfo {
PgTypeInfo::new(TypeId::JSON, "JSON")
}
}
impl<T> Encode<Postgres> for PgJson<T>
where
T: Serialize,
{
fn encode(&self, buf: &mut Vec<u8>) {
serde_json::to_writer(buf, &self.0)
.expect("failed to serialize json for encoding to database");
}
}
impl<'de, T> Decode<'de, Postgres> for PgJson<T>
where
T: 'de,
T: Deserialize<'de>,
{
fn decode(value: Option<PgValue<'de>>) -> crate::Result<Postgres, Self> {
(match value.try_into()? {
PgValue::Text(s) => serde_json::from_str(s),
PgValue::Binary(buf) => serde_json::from_slice(buf),
})
.map(PgJson)
.map_err(crate::Error::decode)
}
}
// This type has the Pg prefix as it is a postgres-only extension
// unlike the normal Json<T> wrapper
#[derive(Debug, PartialEq)]
pub struct PgJsonb<T>(pub T);
impl<T> Type<Postgres> for PgJsonb<T> {
fn type_info() -> PgTypeInfo {
PgTypeInfo::new(TypeId::JSONB, "JSONB")
}
}
impl<T> Encode<Postgres> for PgJsonb<T>
where
T: Serialize,
{
fn encode(&self, buf: &mut Vec<u8>) {
// JSONB version (as of 2020-03-20)
buf.put_u8(1);
serde_json::to_writer(buf, &self.0)
.expect("failed to serialize json for encoding to database");
}
}
impl<'de, T> Decode<'de, Postgres> for PgJsonb<T>
where
T: 'de,
T: Deserialize<'de>,
{
fn decode(value: Option<PgValue<'de>>) -> crate::Result<Postgres, Self> {
(match value.try_into()? {
PgValue::Text(s) => serde_json::from_str(s),
PgValue::Binary(mut buf) => {
let version = buf.get_u8()?;
assert_eq!(
version, 1,
"unsupported JSONB format version {}; please open an issue",
version
);
serde_json::from_slice(buf)
}
})
.map(PgJsonb)
.map_err(crate::Error::decode)
}
}

View File

@@ -3,6 +3,12 @@ mod numeric;
mod record;
mod sequence;
#[cfg(feature = "json")]
mod json;
#[cfg(feature = "json")]
pub use json::{PgJson, PgJsonb};
pub(crate) use array::{PgArrayDecoder, PgArrayEncoder};
// Used in integration tests

View File

@@ -70,6 +70,7 @@ macro_rules! impl_from_row_for_tuple {
($db:ident, $r:ident; $( ($idx:tt) -> $T:ident );+;) => {
impl<'c, $($T,)+> crate::row::FromRow<'c, $r<'c>> for ($($T,)+)
where
$($T: 'c,)+
$($T: crate::types::Type<$db>,)+
$($T: for<'r> crate::decode::Decode<'r, $db>,)+
{

View File

@@ -24,6 +24,11 @@ pub mod ipnetwork {
pub use ipnetwork::{IpNetwork, Ipv4Network, Ipv6Network};
}
#[cfg(feature = "json")]
#[cfg_attr(docsrs, doc(cfg(feature = "json")))]
#[derive(Debug, PartialEq)]
pub struct Json<T>(pub T);
pub trait TypeInfo: Debug + Display + Clone {
/// Compares type information to determine if `other` is compatible at the Rust level
/// with `self`.

View File

@@ -19,10 +19,15 @@ where
// Test type encoding and decoding
#[macro_export]
macro_rules! test_type {
($name:ident($db:ident, $ty:ty, $sql:literal, $($text:literal == $value:expr),+ $(,)?)) => {
$crate::test_prepared_type!($name($db, $ty, $sql, $($text == $value),+));
$crate::test_unprepared_type!($name($db, $ty, $($text == $value),+));
};
($name:ident($db:ident, $ty:ty, $($text:literal == $value:expr),+ $(,)?)) => {
$crate::test_prepared_type!($name($db, $ty, $($text == $value),+));
$crate::test_unprepared_type!($name($db, $ty, $($text == $value),+));
}
};
}
// Test type decoding for the simple (unprepared) query API
@@ -52,9 +57,56 @@ macro_rules! test_unprepared_type {
}
}
// TODO: This macro is cursed. Needs a good re-factor.
// Test type encoding and decoding for the prepared query API
#[macro_export]
macro_rules! test_prepared_type {
($name:ident($db:ident, $ty:ty, $sql:literal, $($text:literal == $value:expr),+ $(,)?)) => {
paste::item! {
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn [< test_prepared_type_ $name >] () -> anyhow::Result<()> {
use sqlx::prelude::*;
let mut conn = sqlx_test::new::<$db>().await?;
$(
let query = format!($sql, $text);
let rec: (bool, Option<String>, $ty, $ty) = sqlx::query_as(&query)
.bind($value)
.bind($value)
.bind($value)
.fetch_one(&mut conn)
.await?;
assert!(rec.0,
"[1] DB value mismatch; given value: {:?}\n\
as received: {:?}\n\
as returned: {:?}\n\
round-trip: {:?}",
$value, rec.1, rec.2, rec.3);
assert_eq!($value, rec.2,
"[2] DB value mismatch; given value: {:?}\n\
as received: {:?}\n\
as returned: {:?}\n\
round-trip: {:?}",
$value, rec.1, rec.2, rec.3);
assert_eq!($value, rec.3,
"[3] DB value mismatch; given value: {:?}\n\
as received: {:?}\n\
as returned: {:?}\n\
round-trip: {:?}",
$value, rec.1, rec.2, rec.3);
)+
Ok(())
}
}
};
($name:ident($db:ident, $ty:ty, $($text:literal == $value:expr),+ $(,)?)) => {
paste::item! {
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
@@ -99,7 +151,7 @@ macro_rules! test_prepared_type {
Ok(())
}
}
}
};
}
#[macro_export]

View File

@@ -1,66 +0,0 @@
use sqlx::{postgres::{PgConnection, Json, Jsonb}, Connection as _, Row};
use serde::{Deserialize, Serialize};
async fn connect() -> anyhow::Result<PgConnection> {
Ok(PgConnection::open(dotenv::var("DATABASE_URL")?).await?)
}
macro_rules! test {
($name:ident: $ty:ty: $($text:literal == $value:expr),+) => {
mod $name {
use super::*;
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn json () -> anyhow::Result<()> {
let mut conn = connect().await?;
// Always use jsonb for the comparison, as json does not support equality
$(
let row = sqlx::query(&format!("SELECT {}::json::jsonb = $1::jsonb, $1 as _1", $text))
.bind(Json($value))
.fetch_one(&mut conn)
.await?;
assert!(row.get::<bool, _>(0));
assert!(Json($value) == row.get::<Json<$ty>, _>("_1"));
)+
Ok(())
}
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn jsonb () -> anyhow::Result<()> {
let mut conn = connect().await?;
$(
let row = sqlx::query(&format!("SELECT {}::jsonb = $1::jsonb, $1 as _1", $text))
.bind(Jsonb($value))
.fetch_one(&mut conn)
.await?;
assert!(row.get::<bool, _>(0));
assert!(Jsonb($value) == row.get::<Jsonb<$ty>, _>("_1"));
)+
Ok(())
}
}
}
}
test!(postgres_json_string: String: "'\"Hello, World!\"'" == "Hello, World!".to_string());
test!(postgres_json_emoji_simple: String: "'\"😎\"'" == "😎".to_string());
test!(postgres_json_emoji_multi: String: "'\"🙋‍♀️\"'" == "🙋‍♀️".to_string());
test!(postgres_json_vec: Vec<String>: "'[\"Hello\", \"World!\"]'" == vec!["Hello".to_string(), "World!".to_string()]);
#[derive(Deserialize, Serialize, Debug, PartialEq)]
struct Friend {
name: String,
age: u32,
}
test!(postgres_json_struct: Friend: "'{\"name\":\"Joe\",\"age\":33}'" == Friend { name: "Joe".to_string(), age: 33 });

View File

@@ -200,6 +200,50 @@ mod chrono {
));
}
// This is trying to break my complete lack of understanding of null bitmaps for array/record
// decoding. The docs in pg are either wrong or I'm reading the wrong docs.
test_type!(lots_of_nulls_vec(Postgres, Vec<Option<bool>>,
"ARRAY[NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, true]::bool[]" == {
vec![None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, Some(true)]
},
));
test_type!(bool_vec(Postgres, Vec<bool>,
"ARRAY[true, true, false, true]::bool[]" == vec![true, true, false, true],
));
test_type!(bool_opt_vec(Postgres, Vec<Option<bool>>,
"ARRAY[NULL, true, NULL, false]::bool[]" == vec![None, Some(true), None, Some(false)],
));
test_type!(f32_vec(Postgres, Vec<f32>,
"ARRAY[0.0, 1.0, 3.14, 1.234, -0.002, 100000.0]::real[]" == vec![0.0_f32, 1.0, 3.14, 1.234, -0.002, 100000.0],
));
test_type!(f64_vec(Postgres, Vec<f64>,
"ARRAY[0.0, 1.0, 3.14, 1.234, -0.002, 100000.0]::double precision[]" == vec![0.0_f64, 1.0, 3.14, 1.234, -0.002, 100000.0],
));
test_type!(i16_vec(Postgres, Vec<i16>,
"ARRAY[1, 152, -12412]::smallint[]" == vec![1_i16, 152, -12412],
"ARRAY[]::smallint[]" == Vec::<i16>::new(),
"ARRAY[0]::smallint[]" == vec![0_i16]
));
test_type!(string_vec(Postgres, Vec<String>,
"ARRAY['', '\"']::text[]"
== vec!["".to_string(), "\"".to_string()],
"ARRAY['Hello, World', '', 'Goodbye']::text[]"
== vec!["Hello, World".to_string(), "".to_string(), "Goodbye".to_string()],
));
//
// These require some annoyingly different tests as anonymous records cannot be read from the
// database. If someone enterprising comes along and wants to try and just the macro to handle
// this, that would be super awesome.
//
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn test_prepared_anonymous_record() -> anyhow::Result<()> {
@@ -282,44 +326,6 @@ async fn test_unprepared_anonymous_record() -> anyhow::Result<()> {
Ok(())
}
// This is trying to break my complete lack of understanding of null bitmaps for array/record
// decoding. The docs in pg are either wrong or I'm reading the wrong docs.
test_type!(lots_of_nulls_vec(Postgres, Vec<Option<bool>>,
"ARRAY[NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, true]::bool[]" == {
vec![None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, Some(true)]
},
));
test_type!(bool_vec(Postgres, Vec<bool>,
"ARRAY[true, true, false, true]::bool[]" == vec![true, true, false, true],
));
test_type!(bool_opt_vec(Postgres, Vec<Option<bool>>,
"ARRAY[NULL, true, NULL, false]::bool[]" == vec![None, Some(true), None, Some(false)],
));
test_type!(f32_vec(Postgres, Vec<f32>,
"ARRAY[0.0, 1.0, 3.14, 1.234, -0.002, 100000.0]::real[]" == vec![0.0_f32, 1.0, 3.14, 1.234, -0.002, 100000.0],
));
test_type!(f64_vec(Postgres, Vec<f64>,
"ARRAY[0.0, 1.0, 3.14, 1.234, -0.002, 100000.0]::double precision[]" == vec![0.0_f64, 1.0, 3.14, 1.234, -0.002, 100000.0],
));
test_type!(i16_vec(Postgres, Vec<i16>,
"ARRAY[1, 152, -12412]::smallint[]" == vec![1_i16, 152, -12412],
"ARRAY[]::smallint[]" == Vec::<i16>::new(),
"ARRAY[0]::smallint[]" == vec![0_i16]
));
test_type!(string_vec(Postgres, Vec<String>,
"ARRAY['', '\"']::text[]"
== vec!["".to_string(), "\"".to_string()],
"ARRAY['Hello, World', '', 'Goodbye']::text[]"
== vec!["Hello, World".to_string(), "".to_string(), "Goodbye".to_string()],
));
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn test_unprepared_anonymous_record_arrays() -> anyhow::Result<()> {
@@ -479,3 +485,76 @@ END $$;
Ok(())
}
//
// JSON
//
#[cfg(feature = "json")]
mod json {
use super::*;
use serde_json::value::RawValue;
use serde_json::{json, Value as JsonValue};
use sqlx::postgres::types::PgJson;
use sqlx::postgres::PgRow;
use sqlx::types::Json;
use sqlx::Row;
// When testing JSON, coerce to JSONB for `=` comparison as `JSON = JSON` is not
// supported in PostgreSQL
test_type!(json(
Postgres,
PgJson<JsonValue>,
"SELECT {0}::jsonb is not distinct from $1::jsonb, $2::text as _1, {0} as _2, $3 as _3",
"'\"Hello, World\"'::json" == PgJson(json!("Hello, World")),
"'\"😎\"'::json" == PgJson(json!("😎")),
"'\"🙋‍♀️\"'::json" == PgJson(json!("🙋‍♀️")),
"'[\"Hello\", \"World!\"]'::json" == PgJson(json!(["Hello", "World!"]))
));
test_type!(jsonb(
Postgres,
JsonValue,
"'\"Hello, World\"'::jsonb" == json!("Hello, World"),
"'\"😎\"'::jsonb" == json!("😎"),
"'\"🙋‍♀️\"'::jsonb" == json!("🙋‍♀️"),
"'[\"Hello\", \"World!\"]'::jsonb" == json!(["Hello", "World!"])
));
#[derive(serde::Deserialize, serde::Serialize, Debug, PartialEq)]
struct Friend {
name: String,
age: u32,
}
// The default JSON type that SQLx chooses is JSONB
// sqlx::types::Json -> JSONB
// sqlx::postgres::types::PgJson -> JSON
// sqlx::postgres::types::PgJsonB -> JSONB
test_type!(jsonb_struct(Postgres, Json<Friend>,
"'{\"name\":\"Joe\",\"age\":33}'::jsonb" == Json(Friend { name: "Joe".to_string(), age: 33 })
));
test_type!(json_struct(
Postgres,
PgJson<Friend>,
"SELECT {0}::jsonb is not distinct from $1::jsonb, $2::text as _1, {0} as _2, $3 as _3",
"'{\"name\":\"Joe\",\"age\":33}'::json" == PgJson(Friend { name: "Joe".to_string(), age: 33 })
));
#[cfg_attr(feature = "runtime-async-std", async_std::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
async fn test_prepared_jsonb_raw_value() -> anyhow::Result<()> {
let mut conn = new::<Postgres>().await?;
let mut cursor = sqlx::query("SELECT '{\"hello\": \"world\"}'::jsonb").fetch(&mut conn);
let row: PgRow = cursor.next().await?.unwrap();
let value: &RawValue = row.get::<&RawValue, usize>(0_usize);
assert_eq!(value.get(), "{\"hello\": \"world\"}");
Ok(())
}
}