Don't panic in serde_test on running out of tokens

This commit is contained in:
David Tolnay 2023-05-04 16:37:04 -07:00
parent 8f4d37c7ec
commit 6326ceec3f
No known key found for this signature in database
GPG Key ID: F9BA143B95FF6D82

View File

@ -33,10 +33,8 @@ fn unexpected(token: Token) -> Error {
)) ))
} }
macro_rules! end_of_tokens { fn end_of_tokens() -> Error {
() => { de::Error::custom("ran out of tokens to deserialize")
panic!("ran out of tokens to deserialize")
};
} }
impl<'de> Deserializer<'de> { impl<'de> Deserializer<'de> {
@ -48,11 +46,8 @@ impl<'de> Deserializer<'de> {
self.tokens.first().cloned() self.tokens.first().cloned()
} }
fn peek_token(&self) -> Token { fn peek_token(&self) -> Result<Token, Error> {
match self.peek_token_opt() { self.peek_token_opt().ok_or_else(end_of_tokens)
Some(token) => token,
None => end_of_tokens!(),
}
} }
pub fn next_token_opt(&mut self) -> Option<Token> { pub fn next_token_opt(&mut self) -> Option<Token> {
@ -65,14 +60,10 @@ impl<'de> Deserializer<'de> {
} }
} }
fn next_token(&mut self) -> Token { fn next_token(&mut self) -> Result<Token, Error> {
match self.tokens.split_first() { let (&first, rest) = self.tokens.split_first().ok_or_else(end_of_tokens)?;
Some((&first, rest)) => { self.tokens = rest;
self.tokens = rest; Ok(first)
first
}
None => end_of_tokens!(),
}
} }
pub fn remaining(&self) -> usize { pub fn remaining(&self) -> usize {
@ -128,7 +119,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
where where
V: Visitor<'de>, V: Visitor<'de>,
{ {
let token = self.next_token(); let token = self.next_token()?;
match token { match token {
Token::Bool(v) => visitor.visit_bool(v), Token::Bool(v) => visitor.visit_bool(v),
Token::I8(v) => visitor.visit_i8(v), Token::I8(v) => visitor.visit_i8(v),
@ -160,47 +151,47 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
Token::Map { len } => self.visit_map(len, Token::MapEnd, visitor), Token::Map { len } => self.visit_map(len, Token::MapEnd, visitor),
Token::Struct { len, .. } => self.visit_map(Some(len), Token::StructEnd, visitor), Token::Struct { len, .. } => self.visit_map(Some(len), Token::StructEnd, visitor),
Token::Enum { .. } => { Token::Enum { .. } => {
let variant = self.next_token(); let variant = self.next_token()?;
let next = self.peek_token(); let next = self.peek_token()?;
match (variant, next) { match (variant, next) {
(Token::Str(variant), Token::Unit) => { (Token::Str(variant), Token::Unit) => {
self.next_token(); self.next_token()?;
visitor.visit_str(variant) visitor.visit_str(variant)
} }
(Token::BorrowedStr(variant), Token::Unit) => { (Token::BorrowedStr(variant), Token::Unit) => {
self.next_token(); self.next_token()?;
visitor.visit_borrowed_str(variant) visitor.visit_borrowed_str(variant)
} }
(Token::String(variant), Token::Unit) => { (Token::String(variant), Token::Unit) => {
self.next_token(); self.next_token()?;
visitor.visit_string(variant.to_string()) visitor.visit_string(variant.to_string())
} }
(Token::Bytes(variant), Token::Unit) => { (Token::Bytes(variant), Token::Unit) => {
self.next_token(); self.next_token()?;
visitor.visit_bytes(variant) visitor.visit_bytes(variant)
} }
(Token::BorrowedBytes(variant), Token::Unit) => { (Token::BorrowedBytes(variant), Token::Unit) => {
self.next_token(); self.next_token()?;
visitor.visit_borrowed_bytes(variant) visitor.visit_borrowed_bytes(variant)
} }
(Token::ByteBuf(variant), Token::Unit) => { (Token::ByteBuf(variant), Token::Unit) => {
self.next_token(); self.next_token()?;
visitor.visit_byte_buf(variant.to_vec()) visitor.visit_byte_buf(variant.to_vec())
} }
(Token::U8(variant), Token::Unit) => { (Token::U8(variant), Token::Unit) => {
self.next_token(); self.next_token()?;
visitor.visit_u8(variant) visitor.visit_u8(variant)
} }
(Token::U16(variant), Token::Unit) => { (Token::U16(variant), Token::Unit) => {
self.next_token(); self.next_token()?;
visitor.visit_u16(variant) visitor.visit_u16(variant)
} }
(Token::U32(variant), Token::Unit) => { (Token::U32(variant), Token::Unit) => {
self.next_token(); self.next_token()?;
visitor.visit_u32(variant) visitor.visit_u32(variant)
} }
(Token::U64(variant), Token::Unit) => { (Token::U64(variant), Token::Unit) => {
self.next_token(); self.next_token()?;
visitor.visit_u64(variant) visitor.visit_u64(variant)
} }
(variant, Token::Unit) => Err(unexpected(variant)), (variant, Token::Unit) => Err(unexpected(variant)),
@ -239,13 +230,13 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
where where
V: Visitor<'de>, V: Visitor<'de>,
{ {
match self.peek_token() { match self.peek_token()? {
Token::Unit | Token::None => { Token::Unit | Token::None => {
self.next_token(); self.next_token()?;
visitor.visit_none() visitor.visit_none()
} }
Token::Some => { Token::Some => {
self.next_token(); self.next_token()?;
visitor.visit_some(self) visitor.visit_some(self)
} }
_ => self.deserialize_any(visitor), _ => self.deserialize_any(visitor),
@ -261,9 +252,9 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
where where
V: Visitor<'de>, V: Visitor<'de>,
{ {
match self.peek_token() { match self.peek_token()? {
Token::Enum { name: n } if name == n => { Token::Enum { name: n } if name == n => {
self.next_token(); self.next_token()?;
visitor.visit_enum(DeserializerEnumVisitor { de: self }) visitor.visit_enum(DeserializerEnumVisitor { de: self })
} }
@ -283,7 +274,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
where where
V: Visitor<'de>, V: Visitor<'de>,
{ {
match self.peek_token() { match self.peek_token()? {
Token::UnitStruct { .. } => { Token::UnitStruct { .. } => {
assert_next_token(self, Token::UnitStruct { name: name })?; assert_next_token(self, Token::UnitStruct { name: name })?;
visitor.visit_unit() visitor.visit_unit()
@ -300,7 +291,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
where where
V: Visitor<'de>, V: Visitor<'de>,
{ {
match self.peek_token() { match self.peek_token()? {
Token::NewtypeStruct { .. } => { Token::NewtypeStruct { .. } => {
assert_next_token(self, Token::NewtypeStruct { name: name })?; assert_next_token(self, Token::NewtypeStruct { name: name })?;
visitor.visit_newtype_struct(self) visitor.visit_newtype_struct(self)
@ -313,21 +304,21 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
where where
V: Visitor<'de>, V: Visitor<'de>,
{ {
match self.peek_token() { match self.peek_token()? {
Token::Unit | Token::UnitStruct { .. } => { Token::Unit | Token::UnitStruct { .. } => {
self.next_token(); self.next_token()?;
visitor.visit_unit() visitor.visit_unit()
} }
Token::Seq { .. } => { Token::Seq { .. } => {
self.next_token(); self.next_token()?;
self.visit_seq(Some(len), Token::SeqEnd, visitor) self.visit_seq(Some(len), Token::SeqEnd, visitor)
} }
Token::Tuple { .. } => { Token::Tuple { .. } => {
self.next_token(); self.next_token()?;
self.visit_seq(Some(len), Token::TupleEnd, visitor) self.visit_seq(Some(len), Token::TupleEnd, visitor)
} }
Token::TupleStruct { .. } => { Token::TupleStruct { .. } => {
self.next_token(); self.next_token()?;
self.visit_seq(Some(len), Token::TupleStructEnd, visitor) self.visit_seq(Some(len), Token::TupleStructEnd, visitor)
} }
_ => self.deserialize_any(visitor), _ => self.deserialize_any(visitor),
@ -343,9 +334,9 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
where where
V: Visitor<'de>, V: Visitor<'de>,
{ {
match self.peek_token() { match self.peek_token()? {
Token::Unit => { Token::Unit => {
self.next_token(); self.next_token()?;
visitor.visit_unit() visitor.visit_unit()
} }
Token::UnitStruct { .. } => { Token::UnitStruct { .. } => {
@ -353,11 +344,11 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
visitor.visit_unit() visitor.visit_unit()
} }
Token::Seq { .. } => { Token::Seq { .. } => {
self.next_token(); self.next_token()?;
self.visit_seq(Some(len), Token::SeqEnd, visitor) self.visit_seq(Some(len), Token::SeqEnd, visitor)
} }
Token::Tuple { .. } => { Token::Tuple { .. } => {
self.next_token(); self.next_token()?;
self.visit_seq(Some(len), Token::TupleEnd, visitor) self.visit_seq(Some(len), Token::TupleEnd, visitor)
} }
Token::TupleStruct { len: n, .. } => { Token::TupleStruct { len: n, .. } => {
@ -377,13 +368,13 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
where where
V: Visitor<'de>, V: Visitor<'de>,
{ {
match self.peek_token() { match self.peek_token()? {
Token::Struct { len: n, .. } => { Token::Struct { len: n, .. } => {
assert_next_token(self, Token::Struct { name: name, len: n })?; assert_next_token(self, Token::Struct { name: name, len: n })?;
self.visit_map(Some(fields.len()), Token::StructEnd, visitor) self.visit_map(Some(fields.len()), Token::StructEnd, visitor)
} }
Token::Map { .. } => { Token::Map { .. } => {
self.next_token(); self.next_token()?;
self.visit_map(Some(fields.len()), Token::MapEnd, visitor) self.visit_map(Some(fields.len()), Token::MapEnd, visitor)
} }
_ => self.deserialize_any(visitor), _ => self.deserialize_any(visitor),
@ -473,7 +464,7 @@ impl<'de, 'a> EnumAccess<'de> for DeserializerEnumVisitor<'a, 'de> {
where where
V: DeserializeSeed<'de>, V: DeserializeSeed<'de>,
{ {
match self.de.peek_token() { match self.de.peek_token()? {
Token::UnitVariant { variant: v, .. } Token::UnitVariant { variant: v, .. }
| Token::NewtypeVariant { variant: v, .. } | Token::NewtypeVariant { variant: v, .. }
| Token::TupleVariant { variant: v, .. } | Token::TupleVariant { variant: v, .. }
@ -494,9 +485,9 @@ impl<'de, 'a> VariantAccess<'de> for DeserializerEnumVisitor<'a, 'de> {
type Error = Error; type Error = Error;
fn unit_variant(self) -> Result<(), Error> { fn unit_variant(self) -> Result<(), Error> {
match self.de.peek_token() { match self.de.peek_token()? {
Token::UnitVariant { .. } => { Token::UnitVariant { .. } => {
self.de.next_token(); self.de.next_token()?;
Ok(()) Ok(())
} }
_ => Deserialize::deserialize(self.de), _ => Deserialize::deserialize(self.de),
@ -507,9 +498,9 @@ impl<'de, 'a> VariantAccess<'de> for DeserializerEnumVisitor<'a, 'de> {
where where
T: DeserializeSeed<'de>, T: DeserializeSeed<'de>,
{ {
match self.de.peek_token() { match self.de.peek_token()? {
Token::NewtypeVariant { .. } => { Token::NewtypeVariant { .. } => {
self.de.next_token(); self.de.next_token()?;
seed.deserialize(self.de) seed.deserialize(self.de)
} }
_ => seed.deserialize(self.de), _ => seed.deserialize(self.de),
@ -520,9 +511,9 @@ impl<'de, 'a> VariantAccess<'de> for DeserializerEnumVisitor<'a, 'de> {
where where
V: Visitor<'de>, V: Visitor<'de>,
{ {
match self.de.peek_token() { match self.de.peek_token()? {
Token::TupleVariant { len: enum_len, .. } => { Token::TupleVariant { len: enum_len, .. } => {
let token = self.de.next_token(); let token = self.de.next_token()?;
if len == enum_len { if len == enum_len {
self.de self.de
@ -534,7 +525,7 @@ impl<'de, 'a> VariantAccess<'de> for DeserializerEnumVisitor<'a, 'de> {
Token::Seq { Token::Seq {
len: Some(enum_len), len: Some(enum_len),
} => { } => {
let token = self.de.next_token(); let token = self.de.next_token()?;
if len == enum_len { if len == enum_len {
self.de.visit_seq(Some(len), Token::SeqEnd, visitor) self.de.visit_seq(Some(len), Token::SeqEnd, visitor)
@ -554,9 +545,9 @@ impl<'de, 'a> VariantAccess<'de> for DeserializerEnumVisitor<'a, 'de> {
where where
V: Visitor<'de>, V: Visitor<'de>,
{ {
match self.de.peek_token() { match self.de.peek_token()? {
Token::StructVariant { len: enum_len, .. } => { Token::StructVariant { len: enum_len, .. } => {
let token = self.de.next_token(); let token = self.de.next_token()?;
if fields.len() == enum_len { if fields.len() == enum_len {
self.de self.de
@ -568,7 +559,7 @@ impl<'de, 'a> VariantAccess<'de> for DeserializerEnumVisitor<'a, 'de> {
Token::Map { Token::Map {
len: Some(enum_len), len: Some(enum_len),
} => { } => {
let token = self.de.next_token(); let token = self.de.next_token()?;
if fields.len() == enum_len { if fields.len() == enum_len {
self.de self.de