From b6605b57e04b28f0a1cdbcbd65f21a9e7f6550f1 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 5 Apr 2017 10:26:34 -0700 Subject: [PATCH 1/3] Modernize serde_test Token names --- serde_test/src/token.rs | 50 +++++++++++++++++++---------------------- 1 file changed, 23 insertions(+), 27 deletions(-) diff --git a/serde_test/src/token.rs b/serde_test/src/token.rs index 5e9cead0..a9d01a33 100644 --- a/serde_test/src/token.rs +++ b/serde_test/src/token.rs @@ -54,11 +54,13 @@ pub enum Token { /// A serialized `ByteBuf` ByteBuf(&'static [u8]), - /// The header to a serialized `Option`. + /// The header to a serialized `Option` containing some value. /// - /// `None` is serialized as `Option(false)`, while `Some` is serialized as `Option(true)`, then - /// the value contained in the option. - Option(bool), + /// The tokens of the value follow after this header. + Some, + + /// A serialized `Option` containing none. + None, /// A serialized `()`. Unit, @@ -70,52 +72,46 @@ pub enum Token { /// /// Newtype structs are serialized with this header, followed by the value contained in the /// newtype struct. - StructNewType(&'static str), + NewtypeStruct(&'static str), /// The header to an enum of the given name. - /// - /// This token is only used for deserializers, and ensures that the following tokens are read as - /// an enum. Because this is never emitted by serializers, calling `assert_ser_tokens` or - /// `assert_tokens` will fail if this token is used. - /// - /// TODO: Trash this. - EnumStart(&'static str), + Enum(&'static str), /// A unit variant of an enum of the given name, of the given name. /// /// The first string represents the name of the enum, and the second represents the name of the /// variant. - EnumUnit(&'static str, &'static str), + UnitVariant(&'static str, &'static str), /// The header to a newtype variant of an enum of the given name, of the given name. /// /// The first string represents the name of the enum, and the second represents the name of the /// variant. The value contained within this enum works the same as `StructNewType`. - EnumNewType(&'static str, &'static str), + NewtypeVariant(&'static str, &'static str), /// The header to a sequence of the given length. /// /// These are serialized via `serialize_seq`, which takes an optional length. After this /// header is a list of elements, followed by `SeqEnd`. - SeqStart(Option), + Seq(Option), /// The header to an array of the given length. /// /// These are serialized via `serialize_seq_fized_size`, which requires a length. After this /// header is a list of elements, followed by `SeqEnd`. - SeqArrayStart(usize), + SeqFixedSize(usize), /// An indicator of the end of a sequence. SeqEnd, - /// The header to a tuple of the given length, similar to `SeqArrayStart`. - TupleStart(usize), + /// The header to a tuple of the given length, similar to `SeqFixedSize`. + Tuple(usize), /// An indicator of the end of a tuple, similar to `SeqEnd`. TupleEnd, /// The header to a tuple struct of the given name and length. - TupleStructStart(&'static str, usize), + TupleStruct(&'static str, usize), /// An indicator of the end of a tuple struct, similar to `TupleEnd`. TupleStructEnd, @@ -124,27 +120,27 @@ pub enum Token { /// /// These are serialized via `serialize_map`, which takes an optional length. After this header /// is a list of key-value pairs, followed by `MapEnd`. - MapStart(Option), + Map(Option), /// An indicator of the end of a map. MapEnd, - /// The header of a struct of the given name and length, similar to `MapStart`. - StructStart(&'static str, usize), + /// The header of a struct of the given name and length, similar to `Map`. + Struct(&'static str, usize), /// An indicator of the end of a struct, similar to `MapEnd`. StructEnd, /// The header to a tuple variant of an enum of the given name, of the given name and length. - EnumSeqStart(&'static str, &'static str, usize), + TupleVariant(&'static str, &'static str, usize), /// An indicator of the end of a tuple variant, similar to `TupleEnd`. - EnumSeqEnd, + TupleVariantEnd, /// The header of a struct variant of an enum of the given name, of the given name and length, - /// similar to `StructStart`. - EnumMapStart(&'static str, &'static str, usize), + /// similar to `Struct`. + StructVariant(&'static str, &'static str, usize), /// An indicator of the end of a struct, similar to `StructEnd`. - EnumMapEnd, + StructVariantEnd, } From 826b53f691eb523436d1e4f0b28c5bc0aec52ecd Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 5 Apr 2017 10:30:46 -0700 Subject: [PATCH 2/3] Update token names in serde_test --- serde_test/src/de.rs | 102 +++++++++++++++++++++--------------------- serde_test/src/ser.rs | 34 +++++++------- 2 files changed, 68 insertions(+), 68 deletions(-) diff --git a/serde_test/src/de.rs b/serde_test/src/de.rs index 8bfb8f70..fcbb762b 100644 --- a/serde_test/src/de.rs +++ b/serde_test/src/de.rs @@ -104,32 +104,32 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> { Token::Bytes(v) => visitor.visit_bytes(v), Token::BorrowedBytes(v) => visitor.visit_borrowed_bytes(v), Token::ByteBuf(v) => visitor.visit_byte_buf(v.to_vec()), - Token::Option(false) => visitor.visit_none(), - Token::Option(true) => visitor.visit_some(self), + Token::None => visitor.visit_none(), + Token::Some => visitor.visit_some(self), Token::Unit => visitor.visit_unit(), Token::UnitStruct(_name) => visitor.visit_unit(), - Token::StructNewType(_name) => visitor.visit_newtype_struct(self), - Token::SeqStart(len) => { + Token::NewtypeStruct(_name) => visitor.visit_newtype_struct(self), + Token::Seq(len) => { self.visit_seq(len, Token::SeqEnd, visitor) } - Token::SeqArrayStart(len) => { + Token::SeqFixedSize(len) => { self.visit_seq(Some(len), Token::SeqEnd, visitor) } - Token::TupleStart(len) => { + Token::Tuple(len) => { self.visit_seq(Some(len), Token::TupleEnd, visitor) } - Token::TupleStructStart(_, len) => { + Token::TupleStruct(_, len) => { self.visit_seq(Some(len), Token::TupleStructEnd, visitor) } - Token::MapStart(len) => { + Token::Map(len) => { self.visit_map(len, Token::MapEnd, visitor) } - Token::StructStart(_, len) => { + Token::Struct(_, len) => { self.visit_map(Some(len), Token::StructEnd, visitor) } - Token::EnumStart(_) => { + Token::Enum(_) => { let variant = self.next_token().ok_or(Error::EndOfTokens)?; let next = *self.tokens.first().ok_or(Error::EndOfTokens)?; match (variant, next) { @@ -153,14 +153,14 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> { } } } - Token::EnumUnit(_, variant) => visitor.visit_str(variant), - Token::EnumNewType(_, variant) => { + Token::UnitVariant(_, variant) => visitor.visit_str(variant), + Token::NewtypeVariant(_, variant) => { visitor.visit_map(EnumMapVisitor::new(self, Token::Str(variant), EnumFormat::Any)) } - Token::EnumSeqStart(_, variant, _) => { + Token::TupleVariant(_, variant, _) => { visitor.visit_map(EnumMapVisitor::new(self, Token::Str(variant), EnumFormat::Seq)) } - Token::EnumMapStart(_, variant, _) => { + Token::StructVariant(_, variant, _) => { visitor.visit_map(EnumMapVisitor::new(self, Token::Str(variant), EnumFormat::Map)) } Token::SeqEnd | @@ -168,8 +168,8 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> { Token::TupleStructEnd | Token::MapEnd | Token::StructEnd | - Token::EnumSeqEnd | - Token::EnumMapEnd => Err(Error::UnexpectedToken(token)), + Token::TupleVariantEnd | + Token::StructVariantEnd => Err(Error::UnexpectedToken(token)), } } @@ -180,11 +180,11 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> { { match self.tokens.first() { Some(&Token::Unit) | - Some(&Token::Option(false)) => { + Some(&Token::None) => { self.next_token(); visitor.visit_none() } - Some(&Token::Option(true)) => { + Some(&Token::Some) => { self.next_token(); visitor.visit_some(self) } @@ -201,15 +201,15 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> { where V: Visitor<'de> { match self.tokens.first() { - Some(&Token::EnumStart(n)) if name == n => { + Some(&Token::Enum(n)) if name == n => { self.next_token(); visitor.visit_enum(DeserializerEnumVisitor { de: self }) } - Some(&Token::EnumUnit(n, _)) | - Some(&Token::EnumNewType(n, _)) | - Some(&Token::EnumSeqStart(n, _, _)) | - Some(&Token::EnumMapStart(n, _, _)) if name == n => { + Some(&Token::UnitVariant(n, _)) | + Some(&Token::NewtypeVariant(n, _)) | + Some(&Token::TupleVariant(n, _, _)) | + Some(&Token::StructVariant(n, _, _)) if name == n => { visitor.visit_enum(DeserializerEnumVisitor { de: self }) } Some(_) => { @@ -241,7 +241,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> { where V: Visitor<'de> { match self.tokens.first() { - Some(&Token::StructNewType(n)) => { + Some(&Token::NewtypeStruct(n)) => { self.next_token(); if name == n { visitor.visit_newtype_struct(self) @@ -258,7 +258,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> { where V: Visitor<'de> { match self.tokens.first() { - Some(&Token::SeqArrayStart(_)) => { + Some(&Token::SeqFixedSize(_)) => { self.next_token(); self.visit_seq(Some(len), Token::SeqEnd, visitor) } @@ -276,19 +276,19 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> { self.next_token(); visitor.visit_unit() } - Some(&Token::SeqStart(_)) => { + Some(&Token::Seq(_)) => { self.next_token(); self.visit_seq(Some(len), Token::SeqEnd, visitor) } - Some(&Token::SeqArrayStart(_)) => { + Some(&Token::SeqFixedSize(_)) => { self.next_token(); self.visit_seq(Some(len), Token::SeqEnd, visitor) } - Some(&Token::TupleStart(_)) => { + Some(&Token::Tuple(_)) => { self.next_token(); self.visit_seq(Some(len), Token::TupleEnd, visitor) } - Some(&Token::TupleStructStart(_, _)) => { + Some(&Token::TupleStruct(_, _)) => { self.next_token(); self.visit_seq(Some(len), Token::TupleStructEnd, @@ -319,19 +319,19 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> { Err(Error::InvalidName(n)) } } - Some(&Token::SeqStart(_)) => { + Some(&Token::Seq(_)) => { self.next_token(); self.visit_seq(Some(len), Token::SeqEnd, visitor) } - Some(&Token::SeqArrayStart(_)) => { + Some(&Token::SeqFixedSize(_)) => { self.next_token(); self.visit_seq(Some(len), Token::SeqEnd, visitor) } - Some(&Token::TupleStart(_)) => { + Some(&Token::Tuple(_)) => { self.next_token(); self.visit_seq(Some(len), Token::TupleEnd, visitor) } - Some(&Token::TupleStructStart(n, _)) => { + Some(&Token::TupleStruct(n, _)) => { self.next_token(); if name == n { self.visit_seq(Some(len), @@ -354,7 +354,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> { where V: Visitor<'de> { match self.tokens.first() { - Some(&Token::StructStart(n, _)) => { + Some(&Token::Struct(n, _)) => { self.next_token(); if name == n { self.visit_map(Some(fields.len()), @@ -364,7 +364,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> { Err(Error::InvalidName(n)) } } - Some(&Token::MapStart(_)) => { + Some(&Token::Map(_)) => { self.next_token(); self.visit_map(Some(fields.len()), Token::MapEnd, visitor) } @@ -448,10 +448,10 @@ impl<'de, 'a> EnumVisitor<'de> for DeserializerEnumVisitor<'a, 'de> { where V: DeserializeSeed<'de> { match self.de.tokens.first() { - Some(&Token::EnumUnit(_, v)) | - Some(&Token::EnumNewType(_, v)) | - Some(&Token::EnumSeqStart(_, v, _)) | - Some(&Token::EnumMapStart(_, v, _)) => { + Some(&Token::UnitVariant(_, v)) | + Some(&Token::NewtypeVariant(_, v)) | + Some(&Token::TupleVariant(_, v, _)) | + Some(&Token::StructVariant(_, v, _)) => { let de = v.into_deserializer(); let value = try!(seed.deserialize(de)); Ok((value, self)) @@ -470,7 +470,7 @@ impl<'de, 'a> VariantVisitor<'de> for DeserializerEnumVisitor<'a, 'de> { fn visit_unit(self) -> Result<(), Error> { match self.de.tokens.first() { - Some(&Token::EnumUnit(_, _)) => { + Some(&Token::UnitVariant(_, _)) => { self.de.next_token(); Ok(()) } @@ -483,7 +483,7 @@ impl<'de, 'a> VariantVisitor<'de> for DeserializerEnumVisitor<'a, 'de> { where T: DeserializeSeed<'de> { match self.de.tokens.first() { - Some(&Token::EnumNewType(_, _)) => { + Some(&Token::NewtypeVariant(_, _)) => { self.de.next_token(); seed.deserialize(self.de) } @@ -496,16 +496,16 @@ impl<'de, 'a> VariantVisitor<'de> for DeserializerEnumVisitor<'a, 'de> { where V: Visitor<'de> { match self.de.tokens.first() { - Some(&Token::EnumSeqStart(_, _, enum_len)) => { + Some(&Token::TupleVariant(_, _, enum_len)) => { let token = self.de.next_token().unwrap(); if len == enum_len { - self.de.visit_seq(Some(len), Token::EnumSeqEnd, visitor) + self.de.visit_seq(Some(len), Token::TupleVariantEnd, visitor) } else { Err(Error::UnexpectedToken(token)) } } - Some(&Token::SeqStart(Some(enum_len))) => { + Some(&Token::Seq(Some(enum_len))) => { let token = self.de.next_token().unwrap(); if len == enum_len { @@ -523,18 +523,18 @@ impl<'de, 'a> VariantVisitor<'de> for DeserializerEnumVisitor<'a, 'de> { where V: Visitor<'de> { match self.de.tokens.first() { - Some(&Token::EnumMapStart(_, _, enum_len)) => { + Some(&Token::StructVariant(_, _, enum_len)) => { let token = self.de.next_token().unwrap(); if fields.len() == enum_len { self.de.visit_map(Some(fields.len()), - Token::EnumMapEnd, + Token::StructVariantEnd, visitor) } else { Err(Error::UnexpectedToken(token)) } } - Some(&Token::MapStart(Some(enum_len))) => { + Some(&Token::Map(Some(enum_len))) => { let token = self.de.next_token().unwrap(); if fields.len() == enum_len { @@ -605,11 +605,11 @@ impl<'de, 'a> MapVisitor<'de> for EnumMapVisitor<'a, 'de> { let visitor = DeserializerSeqVisitor { de: self.de, len: None, - end: Token::EnumSeqEnd, + end: Token::TupleVariantEnd, }; try!(seed.deserialize(SeqVisitorDeserializer::new(visitor))) }; - try!(self.de.expect_token(Token::EnumSeqEnd)); + try!(self.de.expect_token(Token::TupleVariantEnd)); Ok(value) } EnumFormat::Map => { @@ -617,11 +617,11 @@ impl<'de, 'a> MapVisitor<'de> for EnumMapVisitor<'a, 'de> { let visitor = DeserializerMapVisitor { de: self.de, len: None, - end: Token::EnumMapEnd, + end: Token::StructVariantEnd, }; try!(seed.deserialize(MapVisitorDeserializer::new(visitor))) }; - try!(self.de.expect_token(Token::EnumMapEnd)); + try!(self.de.expect_token(Token::StructVariantEnd)); Ok(value) } EnumFormat::Any => seed.deserialize(&mut *self.de), diff --git a/serde_test/src/ser.rs b/serde_test/src/ser.rs index 919565b1..2a82970f 100644 --- a/serde_test/src/ser.rs +++ b/serde_test/src/ser.rs @@ -149,12 +149,12 @@ impl<'s, 'a> ser::Serializer for &'s mut Serializer<'a> { _variant_index: usize, variant: &'static str) -> Result<(), Error> { - if self.tokens.first() == Some(&Token::EnumStart(name)) { + if self.tokens.first() == Some(&Token::Enum(name)) { self.next_token(); assert_next_token!(self, Str(variant)); assert_next_token!(self, Unit); } else { - assert_next_token!(self, EnumUnit(name, variant)); + assert_next_token!(self, UnitVariant(name, variant)); } Ok(()) } @@ -162,7 +162,7 @@ impl<'s, 'a> ser::Serializer for &'s mut Serializer<'a> { fn serialize_newtype_struct(self, name: &'static str, value: &T) -> Result<(), Error> where T: Serialize { - assert_next_token!(self, StructNewType(name)); + assert_next_token!(self, NewtypeStruct(name)); value.serialize(self) } @@ -174,44 +174,44 @@ impl<'s, 'a> ser::Serializer for &'s mut Serializer<'a> { -> Result<(), Error> where T: Serialize { - if self.tokens.first() == Some(&Token::EnumStart(name)) { + if self.tokens.first() == Some(&Token::Enum(name)) { self.next_token(); assert_next_token!(self, Str(variant)); } else { - assert_next_token!(self, EnumNewType(name, variant)); + assert_next_token!(self, NewtypeVariant(name, variant)); } value.serialize(self) } fn serialize_none(self) -> Result<(), Error> { - assert_next_token!(self, Option(false)); + assert_next_token!(self, None); Ok(()) } fn serialize_some(self, value: &T) -> Result<(), Error> where T: Serialize { - assert_next_token!(self, Option(true)); + assert_next_token!(self, Some); value.serialize(self) } fn serialize_seq(self, len: Option) -> Result { - assert_next_token!(self, SeqStart(len)); + assert_next_token!(self, Seq(len)); Ok(self) } fn serialize_seq_fixed_size(self, len: usize) -> Result { - assert_next_token!(self, SeqArrayStart(len)); + assert_next_token!(self, SeqFixedSize(len)); Ok(self) } fn serialize_tuple(self, len: usize) -> Result { - assert_next_token!(self, TupleStart(len)); + assert_next_token!(self, Tuple(len)); Ok(self) } fn serialize_tuple_struct(self, name: &'static str, len: usize) -> Result { - assert_next_token!(self, TupleStructStart(name, len)); + assert_next_token!(self, TupleStruct(name, len)); Ok(self) } @@ -221,17 +221,17 @@ impl<'s, 'a> ser::Serializer for &'s mut Serializer<'a> { variant: &'static str, len: usize) -> Result { - assert_next_token!(self, EnumSeqStart(name, variant, len)); + assert_next_token!(self, TupleVariant(name, variant, len)); Ok(self) } fn serialize_map(self, len: Option) -> Result { - assert_next_token!(self, MapStart(len)); + assert_next_token!(self, Map(len)); Ok(self) } fn serialize_struct(self, name: &'static str, len: usize) -> Result { - assert_next_token!(self, StructStart(name, len)); + assert_next_token!(self, Struct(name, len)); Ok(self) } @@ -241,7 +241,7 @@ impl<'s, 'a> ser::Serializer for &'s mut Serializer<'a> { variant: &'static str, len: usize) -> Result { - assert_next_token!(self, EnumMapStart(name, variant, len)); + assert_next_token!(self, StructVariant(name, variant, len)); Ok(self) } } @@ -305,7 +305,7 @@ impl<'s, 'a> ser::SerializeTupleVariant for &'s mut Serializer<'a> { } fn end(self) -> Result<(), Error> { - assert_next_token!(self, EnumSeqEnd); + assert_next_token!(self, TupleVariantEnd); Ok(()) } } @@ -367,7 +367,7 @@ impl<'s, 'a> ser::SerializeStructVariant for &'s mut Serializer<'a> { } fn end(self) -> Result<(), Self::Error> { - assert_next_token!(self, EnumMapEnd); + assert_next_token!(self, StructVariantEnd); Ok(()) } } From 51d3fb1ebc89727bb4a722452ed976e8b8439323 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 5 Apr 2017 10:40:14 -0700 Subject: [PATCH 3/3] Update token names in test suite --- test_suite/tests/test_annotations.rs | 124 ++++++++--------- test_suite/tests/test_borrow.rs | 6 +- test_suite/tests/test_bytes.rs | 8 +- test_suite/tests/test_de.rs | 200 +++++++++++++-------------- test_suite/tests/test_macros.rs | 158 ++++++++++----------- test_suite/tests/test_ser.rs | 74 +++++----- 6 files changed, 285 insertions(+), 285 deletions(-) diff --git a/test_suite/tests/test_annotations.rs b/test_suite/tests/test_annotations.rs index 1eef647c..490fb724 100644 --- a/test_suite/tests/test_annotations.rs +++ b/test_suite/tests/test_annotations.rs @@ -85,7 +85,7 @@ fn test_default_struct() { assert_de_tokens( &DefaultStruct { a1: 1, a2: 2, a3: 3, a4: 0, a5: 123 }, &[ - Token::StructStart("DefaultStruct", 3), + Token::Struct("DefaultStruct", 3), Token::Str("a1"), Token::I32(1), @@ -109,7 +109,7 @@ fn test_default_struct() { assert_de_tokens( &DefaultStruct { a1: 1, a2: 0, a3: 123, a4: 0, a5: 123 }, &[ - Token::StructStart("DefaultStruct", 1), + Token::Struct("DefaultStruct", 1), Token::Str("a1"), Token::I32(1), @@ -142,7 +142,7 @@ fn test_default_enum() { assert_de_tokens( &DefaultEnum::Struct { a1: 1, a2: 2, a3: 3, a4: 0, a5: 123 }, &[ - Token::EnumMapStart("DefaultEnum", "Struct", 3), + Token::StructVariant("DefaultEnum", "Struct", 3), Token::Str("a1"), Token::I32(1), @@ -159,19 +159,19 @@ fn test_default_enum() { Token::Str("a5"), Token::I32(5), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); assert_de_tokens( &DefaultEnum::Struct { a1: 1, a2: 0, a3: 123, a4: 0, a5: 123 }, &[ - Token::EnumMapStart("DefaultEnum", "Struct", 3), + Token::StructVariant("DefaultEnum", "Struct", 3), Token::Str("a1"), Token::I32(1), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); } @@ -199,7 +199,7 @@ fn test_no_std_default() { assert_de_tokens( &ContainsNoStdDefault { a: NoStdDefault(123) }, &[ - Token::StructStart("ContainsNoStdDefault", 1), + Token::Struct("ContainsNoStdDefault", 1), Token::StructEnd, ] ); @@ -207,10 +207,10 @@ fn test_no_std_default() { assert_de_tokens( &ContainsNoStdDefault { a: NoStdDefault(8) }, &[ - Token::StructStart("ContainsNoStdDefault", 1), + Token::Struct("ContainsNoStdDefault", 1), Token::Str("a"), - Token::StructNewType("NoStdDefault"), + Token::NewtypeStruct("NoStdDefault"), Token::I8(8), Token::StructEnd, @@ -271,7 +271,7 @@ fn test_elt_not_deserialize() { e: NotDeserializeEnum::Trouble, }, &[ - Token::StructStart("ContainsNotDeserialize", 3), + Token::Struct("ContainsNotDeserialize", 3), Token::StructEnd, ] ); @@ -289,7 +289,7 @@ fn test_ignore_unknown() { assert_de_tokens( &DefaultStruct { a1: 1, a2: 2, a3: 3, a4: 0, a5: 123 }, &[ - Token::StructStart("DefaultStruct", 5), + Token::Struct("DefaultStruct", 5), Token::Str("whoops1"), Token::I32(2), @@ -298,7 +298,7 @@ fn test_ignore_unknown() { Token::I32(1), Token::Str("whoops2"), - Token::SeqStart(Some(1)), + Token::Seq(Some(1)), Token::I32(2), Token::SeqEnd, @@ -317,7 +317,7 @@ fn test_ignore_unknown() { assert_de_tokens_error::( &[ - Token::StructStart("DenyUnknown", 2), + Token::Struct("DenyUnknown", 2), Token::Str("a1"), Token::I32(1), @@ -349,7 +349,7 @@ fn test_rename_struct() { assert_tokens( &RenameStruct { a1: 1, a2: 2 }, &[ - Token::StructStart("Superhero", 2), + Token::Struct("Superhero", 2), Token::Str("a1"), Token::I32(1), @@ -364,7 +364,7 @@ fn test_rename_struct() { assert_ser_tokens( &RenameStructSerializeDeserialize { a1: 1, a2: 2 }, &[ - Token::StructStart("SuperheroSer", 2), + Token::Struct("SuperheroSer", 2), Token::Str("a1"), Token::I32(1), @@ -379,7 +379,7 @@ fn test_rename_struct() { assert_de_tokens( &RenameStructSerializeDeserialize { a1: 1, a2: 2 }, &[ - Token::StructStart("SuperheroDe", 2), + Token::Struct("SuperheroDe", 2), Token::Str("a1"), Token::I32(1), @@ -425,14 +425,14 @@ fn test_rename_enum() { assert_tokens( &RenameEnum::Batman, &[ - Token::EnumUnit("Superhero", "bruce_wayne"), + Token::UnitVariant("Superhero", "bruce_wayne"), ] ); assert_tokens( &RenameEnum::Superman(0), &[ - Token::EnumNewType("Superhero", "clark_kent"), + Token::NewtypeVariant("Superhero", "clark_kent"), Token::I8(0), ] ); @@ -440,22 +440,22 @@ fn test_rename_enum() { assert_tokens( &RenameEnum::WonderWoman(0, 1), &[ - Token::EnumSeqStart("Superhero", "diana_prince", 2), + Token::TupleVariant("Superhero", "diana_prince", 2), Token::I8(0), Token::I8(1), - Token::EnumSeqEnd, + Token::TupleVariantEnd, ] ); assert_tokens( &RenameEnum::Flash { a: 1 }, &[ - Token::EnumMapStart("Superhero", "barry_allan", 1), + Token::StructVariant("Superhero", "barry_allan", 1), Token::Str("b"), Token::I32(1), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); @@ -465,7 +465,7 @@ fn test_rename_enum() { b: String::new(), }, &[ - Token::EnumMapStart("SuperheroSer", "dick_grayson", 2), + Token::StructVariant("SuperheroSer", "dick_grayson", 2), Token::Str("a"), Token::I8(0), @@ -473,7 +473,7 @@ fn test_rename_enum() { Token::Str("c"), Token::Str(""), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); @@ -483,7 +483,7 @@ fn test_rename_enum() { b: String::new(), }, &[ - Token::EnumMapStart("SuperheroDe", "jason_todd", 2), + Token::StructVariant("SuperheroDe", "jason_todd", 2), Token::Str("a"), Token::I8(0), @@ -491,7 +491,7 @@ fn test_rename_enum() { Token::Str("d"), Token::Str(""), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); } @@ -515,7 +515,7 @@ fn test_skip_serializing_struct() { c: 3, }, &[ - Token::StructStart("SkipSerializingStruct", 2), + Token::Struct("SkipSerializingStruct", 2), Token::Str("a"), Token::I8(1), @@ -534,7 +534,7 @@ fn test_skip_serializing_struct() { c: 123, }, &[ - Token::StructStart("SkipSerializingStruct", 1), + Token::Struct("SkipSerializingStruct", 1), Token::Str("a"), Token::I8(1), @@ -565,7 +565,7 @@ fn test_skip_serializing_enum() { c: 3, }, &[ - Token::EnumMapStart("SkipSerializingEnum", "Struct", 2), + Token::StructVariant("SkipSerializingEnum", "Struct", 2), Token::Str("a"), Token::I8(1), @@ -573,7 +573,7 @@ fn test_skip_serializing_enum() { Token::Str("c"), Token::I32(3), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); @@ -584,12 +584,12 @@ fn test_skip_serializing_enum() { c: 123, }, &[ - Token::EnumMapStart("SkipSerializingEnum", "Struct", 1), + Token::StructVariant("SkipSerializingEnum", "Struct", 1), Token::Str("a"), Token::I8(1), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); } @@ -630,10 +630,10 @@ fn test_elt_not_serialize() { d: NotSerializeEnum::Trouble, }, &[ - Token::StructStart("ContainsNotSerialize", 2), + Token::Struct("ContainsNotSerialize", 2), Token::Str("a"), - Token::Option(true), + Token::Some, Token::I8(1), Token::Str("d"), @@ -660,7 +660,7 @@ fn test_serialize_with_struct() { b: 2, }, &[ - Token::StructStart("SerializeWithStruct", 2), + Token::Struct("SerializeWithStruct", 2), Token::Str("a"), Token::I8(1), @@ -678,7 +678,7 @@ fn test_serialize_with_struct() { b: 123, }, &[ - Token::StructStart("SerializeWithStruct", 2), + Token::Struct("SerializeWithStruct", 2), Token::Str("a"), Token::I8(1), @@ -709,7 +709,7 @@ fn test_serialize_with_enum() { b: 2, }, &[ - Token::EnumMapStart("SerializeWithEnum", "Struct", 2), + Token::StructVariant("SerializeWithEnum", "Struct", 2), Token::Str("a"), Token::I8(1), @@ -717,7 +717,7 @@ fn test_serialize_with_enum() { Token::Str("b"), Token::Bool(false), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); @@ -727,7 +727,7 @@ fn test_serialize_with_enum() { b: 123, }, &[ - Token::EnumMapStart("SerializeWithEnum", "Struct", 2), + Token::StructVariant("SerializeWithEnum", "Struct", 2), Token::Str("a"), Token::I8(1), @@ -735,7 +735,7 @@ fn test_serialize_with_enum() { Token::Str("b"), Token::Bool(true), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); } @@ -755,7 +755,7 @@ fn test_deserialize_with_struct() { b: 2, }, &[ - Token::StructStart("DeserializeWithStruct", 2), + Token::Struct("DeserializeWithStruct", 2), Token::Str("a"), Token::I8(1), @@ -773,7 +773,7 @@ fn test_deserialize_with_struct() { b: 123, }, &[ - Token::StructStart("DeserializeWithStruct", 2), + Token::Struct("DeserializeWithStruct", 2), Token::Str("a"), Token::I8(1), @@ -803,7 +803,7 @@ fn test_deserialize_with_enum() { b: 2, }, &[ - Token::EnumMapStart("DeserializeWithEnum", "Struct", 2), + Token::StructVariant("DeserializeWithEnum", "Struct", 2), Token::Str("a"), Token::I8(1), @@ -811,7 +811,7 @@ fn test_deserialize_with_enum() { Token::Str("b"), Token::Bool(false), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); @@ -821,7 +821,7 @@ fn test_deserialize_with_enum() { b: 123, }, &[ - Token::EnumMapStart("DeserializeWithEnum", "Struct", 2), + Token::StructVariant("DeserializeWithEnum", "Struct", 2), Token::Str("a"), Token::I8(1), @@ -829,7 +829,7 @@ fn test_deserialize_with_enum() { Token::Str("b"), Token::Bool(true), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); } @@ -838,7 +838,7 @@ fn test_deserialize_with_enum() { fn test_missing_renamed_field_struct() { assert_de_tokens_error::( &[ - Token::StructStart("Superhero", 2), + Token::Struct("Superhero", 2), Token::Str("a1"), Token::I32(1), @@ -850,7 +850,7 @@ fn test_missing_renamed_field_struct() { assert_de_tokens_error::( &[ - Token::StructStart("SuperheroDe", 2), + Token::Struct("SuperheroDe", 2), Token::Str("a1"), Token::I32(1), @@ -865,21 +865,21 @@ fn test_missing_renamed_field_struct() { fn test_missing_renamed_field_enum() { assert_de_tokens_error::( &[ - Token::EnumMapStart("Superhero", "barry_allan", 1), + Token::StructVariant("Superhero", "barry_allan", 1), - Token::EnumMapEnd, + Token::StructVariantEnd, ], Error::Message("missing field `b`".to_owned()), ); assert_de_tokens_error::>( &[ - Token::EnumMapStart("SuperheroDe", "jason_todd", 2), + Token::StructVariant("SuperheroDe", "jason_todd", 2), Token::Str("a"), Token::I8(0), - Token::EnumMapEnd, + Token::StructVariantEnd, ], Error::Message("missing field `d`".to_owned()), ); @@ -895,17 +895,17 @@ enum InvalidLengthEnum { fn test_invalid_length_enum() { assert_de_tokens_error::( &[ - Token::EnumSeqStart("InvalidLengthEnum", "A", 3), + Token::TupleVariant("InvalidLengthEnum", "A", 3), Token::I32(1), - Token::EnumSeqEnd, + Token::TupleVariantEnd, ], Error::Message("invalid length 1, expected tuple of 3 elements".to_owned()), ); assert_de_tokens_error::( &[ - Token::EnumSeqStart("InvalidLengthEnum", "B", 3), + Token::TupleVariant("InvalidLengthEnum", "B", 3), Token::I32(1), - Token::EnumSeqEnd, + Token::TupleVariantEnd, ], Error::Message("invalid length 1, expected tuple of 2 elements".to_owned()), ); @@ -966,26 +966,26 @@ impl From> for EnumToU32 { #[test] fn test_from_into_traits() { assert_ser_tokens::(&EnumToU32::One, - &[Token::Option(true), + &[Token::Some, Token::U32(1) ] ); assert_ser_tokens::(&EnumToU32::Nothing, - &[Token::Option(false)] + &[Token::None] ); assert_de_tokens::(&EnumToU32::Two, - &[Token::Option(true), + &[Token::Some, Token::U32(2) ] ); assert_ser_tokens::(&StructFromEnum(Some(5)), - &[Token::Option(false)] + &[Token::None] ); assert_ser_tokens::(&StructFromEnum(None), - &[Token::Option(false)] + &[Token::None] ); assert_de_tokens::(&StructFromEnum(Some(2)), - &[Token::Option(true), + &[Token::Some, Token::U32(2) ] ); diff --git a/test_suite/tests/test_borrow.rs b/test_suite/tests/test_borrow.rs index c12bcdcd..b440a380 100644 --- a/test_suite/tests/test_borrow.rs +++ b/test_suite/tests/test_borrow.rs @@ -89,7 +89,7 @@ fn test_tuple() { assert_de_tokens( &("str", &b"bytes"[..]), &[ - Token::TupleStart(2), + Token::Tuple(2), Token::BorrowedStr("str"), Token::BorrowedBytes(b"bytes"), Token::TupleEnd, @@ -108,7 +108,7 @@ fn test_struct() { assert_de_tokens( &Borrowing { bs: "str", bb: b"bytes" }, &[ - Token::StructStart("Borrowing", 2), + Token::Struct("Borrowing", 2), Token::BorrowedStr("bs"), Token::BorrowedStr("str"), @@ -132,7 +132,7 @@ fn test_cow() { } let tokens = &[ - Token::StructStart("Cows", 2), + Token::Struct("Cows", 2), Token::Str("copied"), Token::BorrowedStr("copied"), diff --git a/test_suite/tests/test_bytes.rs b/test_suite/tests/test_bytes.rs index 3cc941d3..4da64e1c 100644 --- a/test_suite/tests/test_bytes.rs +++ b/test_suite/tests/test_bytes.rs @@ -22,11 +22,11 @@ fn test_byte_buf() { assert_de_tokens(&empty, &[Token::Str("")]); assert_de_tokens(&empty, &[Token::String("")]); assert_de_tokens(&empty, &[ - Token::SeqStart(None), + Token::Seq(None), Token::SeqEnd, ]); assert_de_tokens(&empty, &[ - Token::SeqStart(Some(0)), + Token::Seq(Some(0)), Token::SeqEnd, ]); @@ -36,14 +36,14 @@ fn test_byte_buf() { assert_de_tokens(&buf, &[Token::Str("ABC")]); assert_de_tokens(&buf, &[Token::String("ABC")]); assert_de_tokens(&buf, &[ - Token::SeqStart(None), + Token::Seq(None), Token::U8(65), Token::U8(66), Token::U8(67), Token::SeqEnd, ]); assert_de_tokens(&buf, &[ - Token::SeqStart(Some(3)), + Token::Seq(Some(3)), Token::U8(65), Token::U8(66), Token::U8(67), diff --git a/test_suite/tests/test_de.rs b/test_suite/tests/test_de.rs index 5fe900e1..eb8a9705 100644 --- a/test_suite/tests/test_de.rs +++ b/test_suite/tests/test_de.rs @@ -151,7 +151,7 @@ fn assert_de_tokens_ignore(ignorable_tokens: &[Token]) { // Embed the tokens to be ignored in the normal token // stream for an IgnoreBase type let concated_tokens : Vec = vec![ - Token::MapStart(Some(2)), + Token::Map(Some(2)), Token::Str("a"), Token::I32(1), @@ -218,20 +218,20 @@ declare_tests! { } test_option { None:: => &[Token::Unit], - None:: => &[Token::Option(false)], + None:: => &[Token::None], Some(1) => &[ - Token::Option(true), + Token::Some, Token::I32(1), ], } test_result { Ok::(0) => &[ - Token::EnumStart("Result"), + Token::Enum("Result"), Token::Str("Ok"), Token::I32(0), ], Err::(1) => &[ - Token::EnumStart("Result"), + Token::Enum("Result"), Token::Str("Err"), Token::I32(1), ], @@ -245,44 +245,44 @@ declare_tests! { Token::UnitStruct("UnitStruct"), ], UnitStruct => &[ - Token::SeqStart(Some(0)), + Token::Seq(Some(0)), Token::SeqEnd, ], UnitStruct => &[ - Token::SeqStart(None), + Token::Seq(None), Token::SeqEnd, ], } test_newtype_struct { NewtypeStruct(1) => &[ - Token::StructNewType("NewtypeStruct"), + Token::NewtypeStruct("NewtypeStruct"), Token::I32(1), ], } test_tuple_struct { TupleStruct(1, 2, 3) => &[ - Token::SeqStart(Some(3)), + Token::Seq(Some(3)), Token::I32(1), Token::I32(2), Token::I32(3), Token::SeqEnd, ], TupleStruct(1, 2, 3) => &[ - Token::SeqStart(None), + Token::Seq(None), Token::I32(1), Token::I32(2), Token::I32(3), Token::SeqEnd, ], TupleStruct(1, 2, 3) => &[ - Token::TupleStructStart("TupleStruct", 3), + Token::TupleStruct("TupleStruct", 3), Token::I32(1), Token::I32(2), Token::I32(3), Token::TupleStructEnd, ], TupleStruct(1, 2, 3) => &[ - Token::TupleStructStart("TupleStruct", 3), + Token::TupleStruct("TupleStruct", 3), Token::I32(1), Token::I32(2), Token::I32(3), @@ -291,47 +291,47 @@ declare_tests! { } test_btreeset { BTreeSet::::new() => &[ - Token::SeqStart(Some(0)), + Token::Seq(Some(0)), Token::SeqEnd, ], btreeset![btreeset![], btreeset![1], btreeset![2, 3]] => &[ - Token::SeqStart(Some(3)), - Token::SeqStart(Some(0)), + Token::Seq(Some(3)), + Token::Seq(Some(0)), Token::SeqEnd, - Token::SeqStart(Some(1)), + Token::Seq(Some(1)), Token::I32(1), Token::SeqEnd, - Token::SeqStart(Some(2)), + Token::Seq(Some(2)), Token::I32(2), Token::I32(3), Token::SeqEnd, Token::SeqEnd, ], BTreeSet::::new() => &[ - Token::TupleStructStart("Anything", 0), + Token::TupleStruct("Anything", 0), Token::TupleStructEnd, ], } test_hashset { HashSet::::new() => &[ - Token::SeqStart(Some(0)), + Token::Seq(Some(0)), Token::SeqEnd, ], hashset![1, 2, 3] => &[ - Token::SeqStart(Some(3)), + Token::Seq(Some(3)), Token::I32(1), Token::I32(2), Token::I32(3), Token::SeqEnd, ], HashSet::::new() => &[ - Token::TupleStructStart("Anything", 0), + Token::TupleStruct("Anything", 0), Token::TupleStructEnd, ], hashset![FnvHasher @ 1, 2, 3] => &[ - Token::SeqStart(Some(3)), + Token::Seq(Some(3)), Token::I32(1), Token::I32(2), Token::I32(3), @@ -340,93 +340,93 @@ declare_tests! { } test_vec { Vec::::new() => &[ - Token::SeqStart(Some(0)), + Token::Seq(Some(0)), Token::SeqEnd, ], vec![vec![], vec![1], vec![2, 3]] => &[ - Token::SeqStart(Some(3)), - Token::SeqStart(Some(0)), + Token::Seq(Some(3)), + Token::Seq(Some(0)), Token::SeqEnd, - Token::SeqStart(Some(1)), + Token::Seq(Some(1)), Token::I32(1), Token::SeqEnd, - Token::SeqStart(Some(2)), + Token::Seq(Some(2)), Token::I32(2), Token::I32(3), Token::SeqEnd, Token::SeqEnd, ], Vec::::new() => &[ - Token::TupleStructStart("Anything", 0), + Token::TupleStruct("Anything", 0), Token::TupleStructEnd, ], } test_array { [0; 0] => &[ - Token::SeqStart(Some(0)), + Token::Seq(Some(0)), Token::SeqEnd, ], [0; 0] => &[ - Token::SeqArrayStart(0), + Token::SeqFixedSize(0), Token::SeqEnd, ], ([0; 0], [1], [2, 3]) => &[ - Token::SeqStart(Some(3)), - Token::SeqStart(Some(0)), + Token::Seq(Some(3)), + Token::Seq(Some(0)), Token::SeqEnd, - Token::SeqStart(Some(1)), + Token::Seq(Some(1)), Token::I32(1), Token::SeqEnd, - Token::SeqStart(Some(2)), + Token::Seq(Some(2)), Token::I32(2), Token::I32(3), Token::SeqEnd, Token::SeqEnd, ], ([0; 0], [1], [2, 3]) => &[ - Token::SeqArrayStart(3), - Token::SeqArrayStart(0), + Token::SeqFixedSize(3), + Token::SeqFixedSize(0), Token::SeqEnd, - Token::SeqArrayStart(1), + Token::SeqFixedSize(1), Token::I32(1), Token::SeqEnd, - Token::SeqArrayStart(2), + Token::SeqFixedSize(2), Token::I32(2), Token::I32(3), Token::SeqEnd, Token::SeqEnd, ], [0; 0] => &[ - Token::TupleStructStart("Anything", 0), + Token::TupleStruct("Anything", 0), Token::TupleStructEnd, ], } test_tuple { (1,) => &[ - Token::SeqStart(Some(1)), + Token::Seq(Some(1)), Token::I32(1), Token::SeqEnd, ], (1, 2, 3) => &[ - Token::SeqStart(Some(3)), + Token::Seq(Some(3)), Token::I32(1), Token::I32(2), Token::I32(3), Token::SeqEnd, ], (1,) => &[ - Token::TupleStart(1), + Token::Tuple(1), Token::I32(1), Token::TupleEnd, ], (1, 2, 3) => &[ - Token::TupleStart(3), + Token::Tuple(3), Token::I32(1), Token::I32(2), Token::I32(3), @@ -435,17 +435,17 @@ declare_tests! { } test_btreemap { BTreeMap::::new() => &[ - Token::MapStart(Some(0)), + Token::Map(Some(0)), Token::MapEnd, ], btreemap![1 => 2] => &[ - Token::MapStart(Some(1)), + Token::Map(Some(1)), Token::I32(1), Token::I32(2), Token::MapEnd, ], btreemap![1 => 2, 3 => 4] => &[ - Token::MapStart(Some(2)), + Token::Map(Some(2)), Token::I32(1), Token::I32(2), @@ -454,13 +454,13 @@ declare_tests! { Token::MapEnd, ], btreemap![1 => btreemap![], 2 => btreemap![3 => 4, 5 => 6]] => &[ - Token::MapStart(Some(2)), + Token::Map(Some(2)), Token::I32(1), - Token::MapStart(Some(0)), + Token::Map(Some(0)), Token::MapEnd, Token::I32(2), - Token::MapStart(Some(2)), + Token::Map(Some(2)), Token::I32(3), Token::I32(4), @@ -470,23 +470,23 @@ declare_tests! { Token::MapEnd, ], BTreeMap::::new() => &[ - Token::StructStart("Anything", 0), + Token::Struct("Anything", 0), Token::StructEnd, ], } test_hashmap { HashMap::::new() => &[ - Token::MapStart(Some(0)), + Token::Map(Some(0)), Token::MapEnd, ], hashmap![1 => 2] => &[ - Token::MapStart(Some(1)), + Token::Map(Some(1)), Token::I32(1), Token::I32(2), Token::MapEnd, ], hashmap![1 => 2, 3 => 4] => &[ - Token::MapStart(Some(2)), + Token::Map(Some(2)), Token::I32(1), Token::I32(2), @@ -495,13 +495,13 @@ declare_tests! { Token::MapEnd, ], hashmap![1 => hashmap![], 2 => hashmap![3 => 4, 5 => 6]] => &[ - Token::MapStart(Some(2)), + Token::Map(Some(2)), Token::I32(1), - Token::MapStart(Some(0)), + Token::Map(Some(0)), Token::MapEnd, Token::I32(2), - Token::MapStart(Some(2)), + Token::Map(Some(2)), Token::I32(3), Token::I32(4), @@ -511,11 +511,11 @@ declare_tests! { Token::MapEnd, ], HashMap::::new() => &[ - Token::StructStart("Anything", 0), + Token::Struct("Anything", 0), Token::StructEnd, ], hashmap![FnvHasher @ 1 => 2, 3 => 4] => &[ - Token::MapStart(Some(2)), + Token::Map(Some(2)), Token::I32(1), Token::I32(2), @@ -526,7 +526,7 @@ declare_tests! { } test_struct { Struct { a: 1, b: 2, c: 0 } => &[ - Token::MapStart(Some(3)), + Token::Map(Some(3)), Token::Str("a"), Token::I32(1), @@ -535,7 +535,7 @@ declare_tests! { Token::MapEnd, ], Struct { a: 1, b: 2, c: 0 } => &[ - Token::StructStart("Struct", 3), + Token::Struct("Struct", 3), Token::Str("a"), Token::I32(1), @@ -544,7 +544,7 @@ declare_tests! { Token::StructEnd, ], Struct { a: 1, b: 2, c: 0 } => &[ - Token::SeqStart(Some(3)), + Token::Seq(Some(3)), Token::I32(1), Token::I32(2), Token::SeqEnd, @@ -552,7 +552,7 @@ declare_tests! { } test_struct_with_skip { Struct { a: 1, b: 2, c: 0 } => &[ - Token::MapStart(Some(3)), + Token::Map(Some(3)), Token::Str("a"), Token::I32(1), @@ -567,7 +567,7 @@ declare_tests! { Token::MapEnd, ], Struct { a: 1, b: 2, c: 0 } => &[ - Token::StructStart("Struct", 3), + Token::Struct("Struct", 3), Token::Str("a"), Token::I32(1), @@ -584,11 +584,11 @@ declare_tests! { } test_struct_skip_all { StructSkipAll { a: 0 } => &[ - Token::StructStart("StructSkipAll", 0), + Token::Struct("StructSkipAll", 0), Token::StructEnd, ], StructSkipAll { a: 0 } => &[ - Token::StructStart("StructSkipAll", 1), + Token::Struct("StructSkipAll", 1), Token::Str("a"), Token::I32(1), @@ -599,13 +599,13 @@ declare_tests! { } test_struct_skip_all_deny_unknown { StructSkipAllDenyUnknown { a: 0 } => &[ - Token::StructStart("StructSkipAllDenyUnknown", 0), + Token::Struct("StructSkipAllDenyUnknown", 0), Token::StructEnd, ], } test_struct_default { StructDefault { a: 50, b: "overwritten".to_string() } => &[ - Token::StructStart("StructDefault", 1), + Token::Struct("StructDefault", 1), Token::Str("a"), Token::I32(50), @@ -614,33 +614,33 @@ declare_tests! { Token::StructEnd, ], StructDefault { a: 100, b: "default".to_string() } => &[ - Token::StructStart("StructDefault", 0), + Token::Struct("StructDefault", 0), Token::StructEnd, ], } test_enum_unit { Enum::Unit => &[ - Token::EnumUnit("Enum", "Unit"), + Token::UnitVariant("Enum", "Unit"), ], } test_enum_simple { Enum::Simple(1) => &[ - Token::EnumNewType("Enum", "Simple"), + Token::NewtypeVariant("Enum", "Simple"), Token::I32(1), ], } test_enum_seq { Enum::Seq(1, 2, 3) => &[ - Token::EnumSeqStart("Enum", "Seq", 3), + Token::TupleVariant("Enum", "Seq", 3), Token::I32(1), Token::I32(2), Token::I32(3), - Token::EnumSeqEnd, + Token::TupleVariantEnd, ], } test_enum_map { Enum::Map { a: 1, b: 2, c: 3 } => &[ - Token::EnumMapStart("Enum", "Map", 3), + Token::StructVariant("Enum", "Map", 3), Token::Str("a"), Token::I32(1), @@ -649,19 +649,19 @@ declare_tests! { Token::Str("c"), Token::I32(3), - Token::EnumMapEnd, + Token::StructVariantEnd, ], } test_enum_unit_usize { Enum::Unit => &[ - Token::EnumStart("Enum"), + Token::Enum("Enum"), Token::U32(0), Token::Unit, ], } test_enum_unit_bytes { Enum::Unit => &[ - Token::EnumStart("Enum"), + Token::Enum("Enum"), Token::Bytes(b"Unit"), Token::Unit, ], @@ -671,7 +671,7 @@ declare_tests! { } test_boxed_slice { Box::new([0, 1, 2]) => &[ - Token::SeqStart(Some(3)), + Token::Seq(Some(3)), Token::I32(0), Token::I32(1), Token::I32(2), @@ -680,7 +680,7 @@ declare_tests! { } test_duration { Duration::new(1, 2) => &[ - Token::StructStart("Duration", 2), + Token::Struct("Duration", 2), Token::Str("secs"), Token::U64(1), @@ -689,7 +689,7 @@ declare_tests! { Token::StructEnd, ], Duration::new(1, 2) => &[ - Token::SeqStart(Some(2)), + Token::Seq(Some(2)), Token::I64(1), Token::I64(2), Token::SeqEnd, @@ -697,7 +697,7 @@ declare_tests! { } test_range { 1u32..2u32 => &[ - Token::StructStart("Range", 2), + Token::Struct("Range", 2), Token::Str("start"), Token::U32(1), @@ -706,7 +706,7 @@ declare_tests! { Token::StructEnd, ], 1u32..2u32 => &[ - Token::SeqStart(Some(2)), + Token::Seq(Some(2)), Token::U64(1), Token::U64(2), Token::SeqEnd, @@ -742,9 +742,9 @@ fn test_osstring() { let value = OsString::from_vec(vec![1, 2, 3]); let tokens = [ - Token::EnumStart("OsString"), + Token::Enum("OsString"), Token::Str("Unix"), - Token::SeqStart(Some(2)), + Token::Seq(Some(2)), Token::U8(1), Token::U8(2), Token::U8(3), @@ -762,9 +762,9 @@ fn test_osstring() { let value = OsString::from_wide(&[1, 2, 3]); let tokens = [ - Token::EnumStart("OsString"), + Token::Enum("OsString"), Token::Str("Windows"), - Token::SeqStart(Some(2)), + Token::Seq(Some(2)), Token::U16(1), Token::U16(2), Token::U16(3), @@ -816,7 +816,7 @@ fn test_cstr_internal_null_end() { declare_error_tests! { test_unknown_field { &[ - Token::StructStart("StructDenyUnknown", 2), + Token::Struct("StructDenyUnknown", 2), Token::Str("a"), Token::I32(0), @@ -826,39 +826,39 @@ declare_error_tests! { } test_skipped_field_is_unknown { &[ - Token::StructStart("StructDenyUnknown", 2), + Token::Struct("StructDenyUnknown", 2), Token::Str("b"), ], Error::Message("unknown field `b`, expected `a`".to_owned()), } test_skip_all_deny_unknown { &[ - Token::StructStart("StructSkipAllDenyUnknown", 1), + Token::Struct("StructSkipAllDenyUnknown", 1), Token::Str("a"), ], Error::Message("unknown field `a`, there are no fields".to_owned()), } test_unknown_variant { &[ - Token::EnumUnit("Enum", "Foo"), + Token::UnitVariant("Enum", "Foo"), ], Error::Message("unknown variant `Foo`, expected one of `Unit`, `Simple`, `Seq`, `Map`".to_owned()), } test_enum_skipped_variant { &[ - Token::EnumUnit("Enum", "Skipped"), + Token::UnitVariant("Enum", "Skipped"), ], Error::Message("unknown variant `Skipped`, expected one of `Unit`, `Simple`, `Seq`, `Map`".to_owned()), } test_enum_skip_all { &[ - Token::EnumUnit("EnumSkipAll", "Skipped"), + Token::UnitVariant("EnumSkipAll", "Skipped"), ], Error::Message("unknown variant `Skipped`, there are no variants".to_owned()), } test_struct_seq_too_long { &[ - Token::SeqStart(Some(4)), + Token::Seq(Some(4)), Token::I32(1), Token::I32(2), Token::I32(3), @@ -867,7 +867,7 @@ declare_error_tests! { } test_duplicate_field_struct { &[ - Token::MapStart(Some(3)), + Token::Map(Some(3)), Token::Str("a"), Token::I32(1), @@ -877,7 +877,7 @@ declare_error_tests! { } test_duplicate_field_enum { &[ - Token::EnumMapStart("Enum", "Map", 3), + Token::StructVariant("Enum", "Map", 3), Token::Str("a"), Token::I32(1), @@ -887,7 +887,7 @@ declare_error_tests! { } test_enum_out_of_range { &[ - Token::EnumStart("Enum"), + Token::Enum("Enum"), Token::U32(4), Token::Unit, ], @@ -895,7 +895,7 @@ declare_error_tests! { } test_short_tuple<(u8, u8, u8)> { &[ - Token::TupleStart(1), + Token::Tuple(1), Token::U8(1), Token::TupleEnd, ], @@ -903,7 +903,7 @@ declare_error_tests! { } test_short_array<[u8; 3]> { &[ - Token::SeqStart(Some(1)), + Token::Seq(Some(1)), Token::U8(1), Token::SeqEnd, ], @@ -923,21 +923,21 @@ declare_error_tests! { } test_unit_from_empty_seq<()> { &[ - Token::SeqStart(Some(0)), + Token::Seq(Some(0)), Token::SeqEnd, ], Error::Message("invalid type: sequence, expected unit".into()), } test_unit_from_empty_seq_without_len<()> { &[ - Token::SeqStart(None), + Token::Seq(None), Token::SeqEnd, ], Error::Message("invalid type: sequence, expected unit".into()), } test_unit_from_tuple_struct<()> { &[ - Token::TupleStructStart("Anything", 0), + Token::TupleStruct("Anything", 0), Token::TupleStructEnd, ], Error::Message("invalid type: sequence, expected unit".into()), diff --git a/test_suite/tests/test_macros.rs b/test_suite/tests/test_macros.rs index f739e860..344b5832 100644 --- a/test_suite/tests/test_macros.rs +++ b/test_suite/tests/test_macros.rs @@ -164,7 +164,7 @@ fn test_ser_named_tuple() { assert_ser_tokens( &SerNamedTuple(&a, &mut b, c), &[ - Token::TupleStructStart("SerNamedTuple", 3), + Token::TupleStruct("SerNamedTuple", 3), Token::I32(5), Token::I32(6), Token::I32(7), @@ -178,7 +178,7 @@ fn test_de_named_tuple() { assert_de_tokens( &DeNamedTuple(5, 6, 7), &[ - Token::SeqStart(Some(3)), + Token::Seq(Some(3)), Token::I32(5), Token::I32(6), Token::I32(7), @@ -189,7 +189,7 @@ fn test_de_named_tuple() { assert_de_tokens( &DeNamedTuple(5, 6, 7), &[ - Token::TupleStructStart("DeNamedTuple", 3), + Token::TupleStruct("DeNamedTuple", 3), Token::I32(5), Token::I32(6), Token::I32(7), @@ -211,7 +211,7 @@ fn test_ser_named_map() { c: c, }, &[ - Token::StructStart("SerNamedMap", 3), + Token::Struct("SerNamedMap", 3), Token::Str("a"), Token::I32(5), @@ -236,7 +236,7 @@ fn test_de_named_map() { c: 7, }, &[ - Token::StructStart("DeNamedMap", 3), + Token::Struct("DeNamedMap", 3), Token::Str("a"), Token::I32(5), @@ -257,7 +257,7 @@ fn test_ser_enum_unit() { assert_ser_tokens( &SerEnum::Unit::, &[ - Token::EnumUnit("SerEnum", "Unit"), + Token::UnitVariant("SerEnum", "Unit"), ] ); } @@ -277,12 +277,12 @@ fn test_ser_enum_seq() { &mut d, ), &[ - Token::EnumSeqStart("SerEnum", "Seq", 4), + Token::TupleVariant("SerEnum", "Seq", 4), Token::I8(1), Token::I32(2), Token::I32(3), Token::I32(4), - Token::EnumSeqEnd, + Token::TupleVariantEnd, ], ); } @@ -302,7 +302,7 @@ fn test_ser_enum_map() { d: &mut d, }, &[ - Token::EnumMapStart("SerEnum", "Map", 4), + Token::StructVariant("SerEnum", "Map", 4), Token::Str("a"), Token::I8(1), @@ -316,7 +316,7 @@ fn test_ser_enum_map() { Token::Str("d"), Token::I32(4), - Token::EnumMapEnd, + Token::StructVariantEnd, ], ); } @@ -326,7 +326,7 @@ fn test_de_enum_unit() { assert_tokens( &DeEnum::Unit::, &[ - Token::EnumUnit("DeEnum", "Unit"), + Token::UnitVariant("DeEnum", "Unit"), ], ); } @@ -346,12 +346,12 @@ fn test_de_enum_seq() { d, ), &[ - Token::EnumSeqStart("DeEnum", "Seq", 4), + Token::TupleVariant("DeEnum", "Seq", 4), Token::I8(1), Token::I32(2), Token::I32(3), Token::I32(4), - Token::EnumSeqEnd, + Token::TupleVariantEnd, ], ); } @@ -371,7 +371,7 @@ fn test_de_enum_map() { d: d, }, &[ - Token::EnumMapStart("DeEnum", "Map", 4), + Token::StructVariant("DeEnum", "Map", 4), Token::Str("a"), Token::I8(1), @@ -385,7 +385,7 @@ fn test_de_enum_map() { Token::Str("d"), Token::I32(4), - Token::EnumMapEnd, + Token::StructVariantEnd, ], ); } @@ -397,7 +397,7 @@ fn test_lifetimes() { assert_ser_tokens( &Lifetimes::LifetimeSeq(&value), &[ - Token::EnumNewType("Lifetimes", "LifetimeSeq"), + Token::NewtypeVariant("Lifetimes", "LifetimeSeq"), Token::I32(5), ] ); @@ -405,7 +405,7 @@ fn test_lifetimes() { assert_ser_tokens( &Lifetimes::NoLifetimeSeq(5), &[ - Token::EnumNewType("Lifetimes", "NoLifetimeSeq"), + Token::NewtypeVariant("Lifetimes", "NoLifetimeSeq"), Token::I32(5), ] ); @@ -413,24 +413,24 @@ fn test_lifetimes() { assert_ser_tokens( &Lifetimes::LifetimeMap { a: &value }, &[ - Token::EnumMapStart("Lifetimes", "LifetimeMap", 1), + Token::StructVariant("Lifetimes", "LifetimeMap", 1), Token::Str("a"), Token::I32(5), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); assert_ser_tokens( &Lifetimes::NoLifetimeMap { a: 5 }, &[ - Token::EnumMapStart("Lifetimes", "NoLifetimeMap", 1), + Token::StructVariant("Lifetimes", "NoLifetimeMap", 1), Token::Str("a"), Token::I32(5), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); } @@ -440,7 +440,7 @@ fn test_generic_struct() { assert_tokens( &GenericStruct { x: 5u32 }, &[ - Token::StructStart("GenericStruct", 1), + Token::Struct("GenericStruct", 1), Token::Str("x"), Token::U32(5), @@ -455,7 +455,7 @@ fn test_generic_newtype_struct() { assert_tokens( &GenericNewTypeStruct(5u32), &[ - Token::StructNewType("GenericNewTypeStruct"), + Token::NewtypeStruct("GenericNewTypeStruct"), Token::U32(5), ] ); @@ -466,7 +466,7 @@ fn test_generic_tuple_struct() { assert_tokens( &GenericTupleStruct(5u32, 6u32), &[ - Token::TupleStructStart("GenericTupleStruct", 2), + Token::TupleStruct("GenericTupleStruct", 2), Token::U32(5), Token::U32(6), Token::TupleStructEnd, @@ -479,7 +479,7 @@ fn test_generic_enum_unit() { assert_tokens( &GenericEnum::Unit::, &[ - Token::EnumUnit("GenericEnum", "Unit"), + Token::UnitVariant("GenericEnum", "Unit"), ] ); } @@ -489,7 +489,7 @@ fn test_generic_enum_newtype() { assert_tokens( &GenericEnum::NewType::(5), &[ - Token::EnumNewType("GenericEnum", "NewType"), + Token::NewtypeVariant("GenericEnum", "NewType"), Token::U32(5), ] ); @@ -500,10 +500,10 @@ fn test_generic_enum_seq() { assert_tokens( &GenericEnum::Seq::(5, 6), &[ - Token::EnumSeqStart("GenericEnum", "Seq", 2), + Token::TupleVariant("GenericEnum", "Seq", 2), Token::U32(5), Token::U32(6), - Token::EnumSeqEnd, + Token::TupleVariantEnd, ] ); } @@ -513,7 +513,7 @@ fn test_generic_enum_map() { assert_tokens( &GenericEnum::Map:: { x: 5, y: 6 }, &[ - Token::EnumMapStart("GenericEnum", "Map", 2), + Token::StructVariant("GenericEnum", "Map", 2), Token::Str("x"), Token::U32(5), @@ -521,7 +521,7 @@ fn test_generic_enum_map() { Token::Str("y"), Token::U32(6), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); } @@ -531,7 +531,7 @@ fn test_default_ty_param() { assert_tokens( &DefaultTyParam:: { phantom: PhantomData }, &[ - Token::StructStart("DefaultTyParam", 1), + Token::Struct("DefaultTyParam", 1), Token::Str("phantom"), Token::UnitStruct("PhantomData"), @@ -551,7 +551,7 @@ fn test_enum_state_field() { assert_tokens( &SomeEnum::Key { key: 'a', state: true }, &[ - Token::EnumMapStart("SomeEnum", "Key", 2), + Token::StructVariant("SomeEnum", "Key", 2), Token::Str("key"), Token::Char('a'), @@ -559,7 +559,7 @@ fn test_enum_state_field() { Token::Str("state"), Token::Bool(true), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); } @@ -584,7 +584,7 @@ fn test_untagged_enum() { assert_tokens( &Untagged::A { a: 1 }, &[ - Token::StructStart("Untagged", 1), + Token::Struct("Untagged", 1), Token::Str("a"), Token::U8(1), @@ -596,7 +596,7 @@ fn test_untagged_enum() { assert_tokens( &Untagged::B { b: 2 }, &[ - Token::StructStart("Untagged", 1), + Token::Struct("Untagged", 1), Token::Str("b"), Token::U8(2), @@ -628,7 +628,7 @@ fn test_untagged_enum() { assert_tokens( &Untagged::F(1, 2), &[ - Token::TupleStart(2), + Token::Tuple(2), Token::U8(1), Token::U8(2), Token::TupleEnd, @@ -637,14 +637,14 @@ fn test_untagged_enum() { assert_de_tokens_error::( &[ - Token::Option(false), + Token::None, ], Error::Message("data did not match any variant of untagged enum Untagged".to_owned()), ); assert_de_tokens_error::( &[ - Token::TupleStart(1), + Token::Tuple(1), Token::U8(1), Token::TupleEnd, ], @@ -653,7 +653,7 @@ fn test_untagged_enum() { assert_de_tokens_error::( &[ - Token::TupleStart(3), + Token::Tuple(3), Token::U8(1), Token::U8(2), Token::U8(3), @@ -691,7 +691,7 @@ fn test_internally_tagged_enum() { assert_tokens( &InternallyTagged::A { a: 1 }, &[ - Token::StructStart("InternallyTagged", 2), + Token::Struct("InternallyTagged", 2), Token::Str("type"), Token::Str("A"), @@ -706,7 +706,7 @@ fn test_internally_tagged_enum() { assert_tokens( &InternallyTagged::B { b: 2 }, &[ - Token::StructStart("InternallyTagged", 2), + Token::Struct("InternallyTagged", 2), Token::Str("type"), Token::Str("B"), @@ -721,7 +721,7 @@ fn test_internally_tagged_enum() { assert_tokens( &InternallyTagged::C, &[ - Token::StructStart("InternallyTagged", 1), + Token::Struct("InternallyTagged", 1), Token::Str("type"), Token::Str("C"), @@ -733,7 +733,7 @@ fn test_internally_tagged_enum() { assert_tokens( &InternallyTagged::D(BTreeMap::new()), &[ - Token::MapStart(Some(1)), + Token::Map(Some(1)), Token::Str("type"), Token::Str("D"), @@ -745,7 +745,7 @@ fn test_internally_tagged_enum() { assert_tokens( &InternallyTagged::E(Newtype(BTreeMap::new())), &[ - Token::MapStart(Some(1)), + Token::Map(Some(1)), Token::Str("type"), Token::Str("E"), @@ -757,7 +757,7 @@ fn test_internally_tagged_enum() { assert_tokens( &InternallyTagged::F(Struct { f: 6 }), &[ - Token::StructStart("Struct", 2), + Token::Struct("Struct", 2), Token::Str("type"), Token::Str("F"), @@ -771,7 +771,7 @@ fn test_internally_tagged_enum() { assert_de_tokens_error::( &[ - Token::MapStart(Some(0)), + Token::Map(Some(0)), Token::MapEnd, ], Error::Message("missing field `type`".to_owned()), @@ -779,7 +779,7 @@ fn test_internally_tagged_enum() { assert_de_tokens_error::( &[ - Token::MapStart(Some(1)), + Token::Map(Some(1)), Token::Str("type"), Token::Str("Z"), @@ -805,7 +805,7 @@ fn test_adjacently_tagged_enum() { assert_tokens( &AdjacentlyTagged::Unit::, &[ - Token::StructStart("AdjacentlyTagged", 1), + Token::Struct("AdjacentlyTagged", 1), Token::Str("t"), Token::Str("Unit"), @@ -818,7 +818,7 @@ fn test_adjacently_tagged_enum() { assert_de_tokens( &AdjacentlyTagged::Unit::, &[ - Token::StructStart("AdjacentlyTagged", 1), + Token::Struct("AdjacentlyTagged", 1), Token::Str("t"), Token::Str("Unit"), @@ -834,7 +834,7 @@ fn test_adjacently_tagged_enum() { assert_de_tokens( &AdjacentlyTagged::Unit::, &[ - Token::StructStart("AdjacentlyTagged", 1), + Token::Struct("AdjacentlyTagged", 1), Token::Str("c"), Token::Unit, @@ -850,7 +850,7 @@ fn test_adjacently_tagged_enum() { assert_tokens( &AdjacentlyTagged::Newtype::(1), &[ - Token::StructStart("AdjacentlyTagged", 2), + Token::Struct("AdjacentlyTagged", 2), Token::Str("t"), Token::Str("Newtype"), @@ -866,7 +866,7 @@ fn test_adjacently_tagged_enum() { assert_de_tokens( &AdjacentlyTagged::Newtype::(1), &[ - Token::StructStart("AdjacentlyTagged", 2), + Token::Struct("AdjacentlyTagged", 2), Token::Str("c"), Token::U8(1), @@ -882,13 +882,13 @@ fn test_adjacently_tagged_enum() { assert_tokens( &AdjacentlyTagged::Tuple::(1, 1), &[ - Token::StructStart("AdjacentlyTagged", 2), + Token::Struct("AdjacentlyTagged", 2), Token::Str("t"), Token::Str("Tuple"), Token::Str("c"), - Token::TupleStart(2), + Token::Tuple(2), Token::U8(1), Token::U8(1), Token::TupleEnd, @@ -901,10 +901,10 @@ fn test_adjacently_tagged_enum() { assert_de_tokens( &AdjacentlyTagged::Tuple::(1, 1), &[ - Token::StructStart("AdjacentlyTagged", 2), + Token::Struct("AdjacentlyTagged", 2), Token::Str("c"), - Token::TupleStart(2), + Token::Tuple(2), Token::U8(1), Token::U8(1), Token::TupleEnd, @@ -920,13 +920,13 @@ fn test_adjacently_tagged_enum() { assert_tokens( &AdjacentlyTagged::Struct:: { f: 1 }, &[ - Token::StructStart("AdjacentlyTagged", 2), + Token::Struct("AdjacentlyTagged", 2), Token::Str("t"), Token::Str("Struct"), Token::Str("c"), - Token::StructStart("Struct", 1), + Token::Struct("Struct", 1), Token::Str("f"), Token::U8(1), Token::StructEnd, @@ -939,10 +939,10 @@ fn test_adjacently_tagged_enum() { assert_de_tokens( &AdjacentlyTagged::Struct:: { f: 1 }, &[ - Token::StructStart("AdjacentlyTagged", 2), + Token::Struct("AdjacentlyTagged", 2), Token::Str("c"), - Token::StructStart("Struct", 1), + Token::Struct("Struct", 1), Token::Str("f"), Token::U8(1), Token::StructEnd, @@ -974,7 +974,7 @@ fn test_enum_in_internally_tagged_enum() { assert_tokens( &Outer::Inner(Inner::Unit), &[ - Token::MapStart(Some(2)), + Token::Map(Some(2)), Token::Str("type"), Token::Str("Inner"), @@ -989,7 +989,7 @@ fn test_enum_in_internally_tagged_enum() { assert_tokens( &Outer::Inner(Inner::Newtype(1)), &[ - Token::MapStart(Some(2)), + Token::Map(Some(2)), Token::Str("type"), Token::Str("Inner"), @@ -1004,13 +1004,13 @@ fn test_enum_in_internally_tagged_enum() { assert_tokens( &Outer::Inner(Inner::Tuple(1, 1)), &[ - Token::MapStart(Some(2)), + Token::Map(Some(2)), Token::Str("type"), Token::Str("Inner"), Token::Str("Tuple"), - Token::TupleStructStart("Tuple", 2), + Token::TupleStruct("Tuple", 2), Token::U8(1), Token::U8(1), Token::TupleStructEnd, @@ -1022,13 +1022,13 @@ fn test_enum_in_internally_tagged_enum() { assert_tokens( &Outer::Inner(Inner::Struct { f: 1 }), &[ - Token::MapStart(Some(2)), + Token::Map(Some(2)), Token::Str("type"), Token::Str("Inner"), Token::Str("Struct"), - Token::StructStart("Struct", 1), + Token::Struct("Struct", 1), Token::Str("f"), Token::U8(1), Token::StructEnd, @@ -1057,14 +1057,14 @@ fn test_enum_in_untagged_enum() { assert_tokens( &Outer::Inner(Inner::Unit), &[ - Token::EnumUnit("Inner", "Unit"), + Token::UnitVariant("Inner", "Unit"), ] ); assert_tokens( &Outer::Inner(Inner::Newtype(1)), &[ - Token::EnumNewType("Inner", "Newtype"), + Token::NewtypeVariant("Inner", "Newtype"), Token::U8(1), ] ); @@ -1072,22 +1072,22 @@ fn test_enum_in_untagged_enum() { assert_tokens( &Outer::Inner(Inner::Tuple(1, 1)), &[ - Token::EnumSeqStart("Inner", "Tuple", 2), + Token::TupleVariant("Inner", "Tuple", 2), Token::U8(1), Token::U8(1), - Token::EnumSeqEnd, + Token::TupleVariantEnd, ] ); assert_tokens( &Outer::Inner(Inner::Struct { f: 1 }), &[ - Token::EnumMapStart("Inner", "Struct", 1), + Token::StructVariant("Inner", "Struct", 1), Token::Str("f"), Token::U8(1), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); } @@ -1124,43 +1124,43 @@ fn test_rename_all() { assert_tokens( &E::Serialize { serialize: true, serialize_seq: true }, &[ - Token::EnumMapStart("E", "serialize", 2), + Token::StructVariant("E", "serialize", 2), Token::Str("serialize"), Token::Bool(true), Token::Str("serializeSeq"), Token::Bool(true), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); assert_tokens( &E::SerializeSeq { serialize: true, serialize_seq: true }, &[ - Token::EnumMapStart("E", "serialize_seq", 2), + Token::StructVariant("E", "serialize_seq", 2), Token::Str("serialize"), Token::Bool(true), Token::Str("serialize-seq"), Token::Bool(true), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); assert_tokens( &E::SerializeMap { serialize: true, serialize_seq: true }, &[ - Token::EnumMapStart("E", "serialize_map", 2), + Token::StructVariant("E", "serialize_map", 2), Token::Str("SERIALIZE"), Token::Bool(true), Token::Str("SERIALIZE_SEQ"), Token::Bool(true), - Token::EnumMapEnd, + Token::StructVariantEnd, ] ); assert_tokens( &S { serialize: true, serialize_seq: true }, &[ - Token::StructStart("S", 2), + Token::Struct("S", 2), Token::Str("Serialize"), Token::Bool(true), Token::Str("SerializeSeq"), diff --git a/test_suite/tests/test_ser.rs b/test_suite/tests/test_ser.rs index 89df7851..3c5b4385 100644 --- a/test_suite/tests/test_ser.rs +++ b/test_suite/tests/test_ser.rs @@ -102,29 +102,29 @@ declare_tests! { "abc".to_owned() => &[Token::Str("abc")], } test_option { - None:: => &[Token::Option(false)], + None:: => &[Token::None], Some(1) => &[ - Token::Option(true), + Token::Some, Token::I32(1), ], } test_result { Ok::(0) => &[ - Token::EnumNewType("Result", "Ok"), + Token::NewtypeVariant("Result", "Ok"), Token::I32(0), ], Err::(1) => &[ - Token::EnumNewType("Result", "Err"), + Token::NewtypeVariant("Result", "Err"), Token::I32(1), ], } test_slice { &[0][..0] => &[ - Token::SeqStart(Some(0)), + Token::Seq(Some(0)), Token::SeqEnd, ], &[1, 2, 3][..] => &[ - Token::SeqStart(Some(3)), + Token::Seq(Some(3)), Token::I32(1), Token::I32(2), Token::I32(3), @@ -133,11 +133,11 @@ declare_tests! { } test_array { [0; 0] => &[ - Token::SeqArrayStart(0), + Token::SeqFixedSize(0), Token::SeqEnd, ], [1, 2, 3] => &[ - Token::SeqArrayStart(3), + Token::SeqFixedSize(3), Token::I32(1), Token::I32(2), Token::I32(3), @@ -146,19 +146,19 @@ declare_tests! { } test_vec { Vec::::new() => &[ - Token::SeqStart(Some(0)), + Token::Seq(Some(0)), Token::SeqEnd, ], vec![vec![], vec![1], vec![2, 3]] => &[ - Token::SeqStart(Some(3)), - Token::SeqStart(Some(0)), + Token::Seq(Some(3)), + Token::Seq(Some(0)), Token::SeqEnd, - Token::SeqStart(Some(1)), + Token::Seq(Some(1)), Token::I32(1), Token::SeqEnd, - Token::SeqStart(Some(2)), + Token::Seq(Some(2)), Token::I32(2), Token::I32(3), Token::SeqEnd, @@ -167,28 +167,28 @@ declare_tests! { } test_hashset { HashSet::::new() => &[ - Token::SeqStart(Some(0)), + Token::Seq(Some(0)), Token::SeqEnd, ], hashset![1] => &[ - Token::SeqStart(Some(1)), + Token::Seq(Some(1)), Token::I32(1), Token::SeqEnd, ], hashset![FnvHasher @ 1] => &[ - Token::SeqStart(Some(1)), + Token::Seq(Some(1)), Token::I32(1), Token::SeqEnd, ], } test_tuple { (1,) => &[ - Token::TupleStart(1), + Token::Tuple(1), Token::I32(1), Token::TupleEnd, ], (1, 2, 3) => &[ - Token::TupleStart(3), + Token::Tuple(3), Token::I32(1), Token::I32(2), Token::I32(3), @@ -197,13 +197,13 @@ declare_tests! { } test_btreemap { btreemap![1 => 2] => &[ - Token::MapStart(Some(1)), + Token::Map(Some(1)), Token::I32(1), Token::I32(2), Token::MapEnd, ], btreemap![1 => 2, 3 => 4] => &[ - Token::MapStart(Some(2)), + Token::Map(Some(2)), Token::I32(1), Token::I32(2), @@ -212,13 +212,13 @@ declare_tests! { Token::MapEnd, ], btreemap![1 => btreemap![], 2 => btreemap![3 => 4, 5 => 6]] => &[ - Token::MapStart(Some(2)), + Token::Map(Some(2)), Token::I32(1), - Token::MapStart(Some(0)), + Token::Map(Some(0)), Token::MapEnd, Token::I32(2), - Token::MapStart(Some(2)), + Token::Map(Some(2)), Token::I32(3), Token::I32(4), @@ -230,17 +230,17 @@ declare_tests! { } test_hashmap { HashMap::::new() => &[ - Token::MapStart(Some(0)), + Token::Map(Some(0)), Token::MapEnd, ], hashmap![1 => 2] => &[ - Token::MapStart(Some(1)), + Token::Map(Some(1)), Token::I32(1), Token::I32(2), Token::MapEnd, ], hashmap![FnvHasher @ 1 => 2] => &[ - Token::MapStart(Some(1)), + Token::Map(Some(1)), Token::I32(1), Token::I32(2), Token::MapEnd, @@ -251,7 +251,7 @@ declare_tests! { } test_tuple_struct { TupleStruct(1, 2, 3) => &[ - Token::TupleStructStart("TupleStruct", 3), + Token::TupleStruct("TupleStruct", 3), Token::I32(1), Token::I32(2), Token::I32(3), @@ -260,7 +260,7 @@ declare_tests! { } test_struct { Struct { a: 1, b: 2, c: 3 } => &[ - Token::StructStart("Struct", 3), + Token::Struct("Struct", 3), Token::Str("a"), Token::I32(1), @@ -273,22 +273,22 @@ declare_tests! { ], } test_enum { - Enum::Unit => &[Token::EnumUnit("Enum", "Unit")], - Enum::One(42) => &[Token::EnumNewType("Enum", "One"), Token::I32(42)], + Enum::Unit => &[Token::UnitVariant("Enum", "Unit")], + Enum::One(42) => &[Token::NewtypeVariant("Enum", "One"), Token::I32(42)], Enum::Seq(1, 2) => &[ - Token::EnumSeqStart("Enum", "Seq", 2), + Token::TupleVariant("Enum", "Seq", 2), Token::I32(1), Token::I32(2), - Token::EnumSeqEnd, + Token::TupleVariantEnd, ], Enum::Map { a: 1, b: 2 } => &[ - Token::EnumMapStart("Enum", "Map", 2), + Token::StructVariant("Enum", "Map", 2), Token::Str("a"), Token::I32(1), Token::Str("b"), Token::I32(2), - Token::EnumMapEnd, + Token::StructVariantEnd, ], } test_box { @@ -296,7 +296,7 @@ declare_tests! { } test_boxed_slice { Box::new([0, 1, 2]) => &[ - Token::SeqArrayStart(3), + Token::SeqFixedSize(3), Token::I32(0), Token::I32(1), Token::I32(2), @@ -305,7 +305,7 @@ declare_tests! { } test_duration { Duration::new(1, 2) => &[ - Token::StructStart("Duration", 2), + Token::Struct("Duration", 2), Token::Str("secs"), Token::U64(1), @@ -316,7 +316,7 @@ declare_tests! { } test_range { 1u32..2u32 => &[ - Token::StructStart("Range", 2), + Token::Struct("Range", 2), Token::Str("start"), Token::U32(1),