mirror of
https://github.com/serde-rs/serde.git
synced 2025-10-02 07:21:12 +00:00
rewrite deserialization to allow for copy-less struct fields
This commit is contained in:
parent
292a820e3a
commit
475fd50564
@ -288,128 +288,57 @@ fn deserialize_struct(
|
|||||||
deserializer: P<ast::Expr>,
|
deserializer: P<ast::Expr>,
|
||||||
token: P<ast::Expr>
|
token: P<ast::Expr>
|
||||||
) -> P<ast::Expr> {
|
) -> P<ast::Expr> {
|
||||||
let serial_names: Vec<Option<token::InternedString>> =
|
let type_name_str = cx.expr_str(span, token::get_ident(type_ident));
|
||||||
definitions.iter().map(|def|
|
|
||||||
find_serial_name(def.node.attrs.iter())
|
|
||||||
).collect();
|
|
||||||
|
|
||||||
let struct_block = deserialize_struct_from_struct(
|
|
||||||
cx,
|
|
||||||
span,
|
|
||||||
type_ident,
|
|
||||||
serial_names.as_slice(),
|
|
||||||
fields,
|
|
||||||
deserializer.clone()
|
|
||||||
);
|
|
||||||
|
|
||||||
let map_block = deserialize_struct_from_map(
|
|
||||||
cx,
|
|
||||||
span,
|
|
||||||
type_ident,
|
|
||||||
serial_names.as_slice(),
|
|
||||||
fields,
|
|
||||||
deserializer.clone()
|
|
||||||
);
|
|
||||||
|
|
||||||
quote_expr!(
|
|
||||||
cx,
|
|
||||||
match $token {
|
|
||||||
::serde::de::StructStart(_, _) => $struct_block,
|
|
||||||
::serde::de::MapStart(_) => $map_block,
|
|
||||||
token => {
|
|
||||||
let expected_tokens = [
|
|
||||||
::serde::de::StructStartKind,
|
|
||||||
::serde::de::MapStartKind,
|
|
||||||
];
|
|
||||||
Err($deserializer.syntax_error(token, expected_tokens))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn deserialize_struct_from_struct(
|
|
||||||
cx: &ExtCtxt,
|
|
||||||
span: Span,
|
|
||||||
type_ident: Ident,
|
|
||||||
serial_names: &[Option<token::InternedString>],
|
|
||||||
fields: &StaticFields,
|
|
||||||
deserializer: P<ast::Expr>
|
|
||||||
) -> P<ast::Expr> {
|
|
||||||
//let expect_struct_field = cx.ident_of("expect_struct_field");
|
|
||||||
|
|
||||||
let call = deserializable_static_fields(
|
|
||||||
cx,
|
|
||||||
span,
|
|
||||||
type_ident,
|
|
||||||
serial_names.as_slice(),
|
|
||||||
fields,
|
|
||||||
|cx, span, name| {
|
|
||||||
let name = cx.expr_str(span, name);
|
|
||||||
quote_expr!(
|
|
||||||
cx,
|
|
||||||
try!($deserializer.expect_struct_field($name))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
quote_expr!(cx, {
|
|
||||||
let result = $call;
|
|
||||||
try!($deserializer.expect_struct_end());
|
|
||||||
Ok(result)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn deserialize_struct_from_map(
|
|
||||||
cx: &ExtCtxt,
|
|
||||||
span: Span,
|
|
||||||
type_ident: Ident,
|
|
||||||
serial_names: &[Option<token::InternedString>],
|
|
||||||
fields: &StaticFields,
|
|
||||||
deserializer: P<ast::Expr>
|
|
||||||
) -> P<ast::Expr> {
|
|
||||||
let fields = match *fields {
|
let fields = match *fields {
|
||||||
Unnamed(_) => fail!(),
|
Unnamed(_) => fail!(),
|
||||||
Named(ref fields) => fields.as_slice(),
|
Named(ref fields) => fields.as_slice(),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Declare each field.
|
// Convert each field into a unique ident.
|
||||||
let let_fields: Vec<P<ast::Stmt>> = fields.iter()
|
let field_idents: Vec<ast::Ident> = fields.iter()
|
||||||
.map(|&(name, _)| {
|
.enumerate()
|
||||||
quote_stmt!(cx, let mut $name = None)
|
.map(|(idx, _)| {
|
||||||
|
cx.ident_of(format!("field{}", idx).as_slice())
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
// Convert each field into their string.
|
||||||
|
let field_strs: Vec<P<ast::Expr>> = fields.iter()
|
||||||
|
.zip(definitions.iter())
|
||||||
|
.map(|(&(name, _), def)| {
|
||||||
|
match find_serial_name(def.node.attrs.iter()) {
|
||||||
|
Some(serial) => cx.expr_str(span, serial),
|
||||||
|
None => cx.expr_str(span, token::get_ident(name)),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Declare the static vec slice of field names.
|
||||||
|
let static_fields = cx.expr_vec_slice(span, field_strs.clone());
|
||||||
|
|
||||||
|
// Declare each field.
|
||||||
|
let let_fields: Vec<P<ast::Stmt>> = field_idents.iter()
|
||||||
|
.map(|ident| quote_stmt!(cx, let mut $ident = None))
|
||||||
|
.collect();
|
||||||
|
|
||||||
// Declare key arms.
|
// Declare key arms.
|
||||||
let key_arms: Vec<ast::Arm> = serial_names.iter()
|
let idx_arms: Vec<ast::Arm> = field_idents.iter()
|
||||||
.zip(fields.iter())
|
.enumerate()
|
||||||
.map(|(serial, &(name, span))| {
|
.map(|(idx, ident)| {
|
||||||
let serial_name = match serial {
|
|
||||||
&Some(ref string) => string.clone(),
|
|
||||||
&None => token::get_ident(name),
|
|
||||||
};
|
|
||||||
let s = cx.expr_str(span, serial_name);
|
|
||||||
quote_arm!(cx,
|
quote_arm!(cx,
|
||||||
$s => {
|
Some($idx) => { $ident = Some(try!($deserializer.expect_struct_value())); }
|
||||||
$name = Some(
|
)
|
||||||
try!(::serde::de::Deserializable::deserialize($deserializer))
|
|
||||||
);
|
|
||||||
continue;
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let extract_fields: Vec<P<ast::Stmt>> = serial_names.iter()
|
let extract_fields: Vec<P<ast::Stmt>> = field_idents.iter()
|
||||||
.zip(fields.iter())
|
.zip(field_strs.iter())
|
||||||
.map(|(serial, &(name, span))| {
|
.map(|(ident, field_str)| {
|
||||||
let serial_name = match serial {
|
|
||||||
&Some(ref string) => string.clone(),
|
|
||||||
&None => token::get_ident(name),
|
|
||||||
};
|
|
||||||
let name_str = cx.expr_str(span, serial_name);
|
|
||||||
quote_stmt!(cx,
|
quote_stmt!(cx,
|
||||||
let $name = match $name {
|
let $ident = match $ident {
|
||||||
Some($name) => $name,
|
Some($ident) => $ident,
|
||||||
None => try!($deserializer.missing_field($name_str)),
|
None => try!($deserializer.missing_field($field_str)),
|
||||||
};
|
};
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@ -419,41 +348,34 @@ fn deserialize_struct_from_map(
|
|||||||
span,
|
span,
|
||||||
type_ident,
|
type_ident,
|
||||||
fields.iter()
|
fields.iter()
|
||||||
.map(|&(name, span)| {
|
.zip(field_idents.iter())
|
||||||
cx.field_imm(span, name, cx.expr_ident(span, name))
|
.map(|(&(name, _), ident)| {
|
||||||
|
cx.field_imm(span, name, cx.expr_ident(span, *ident))
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
);
|
);
|
||||||
|
|
||||||
quote_expr!(cx, {
|
quote_expr!(cx, {
|
||||||
|
try!($deserializer.expect_struct_start($token, $type_name_str));
|
||||||
|
|
||||||
|
static FIELDS: &'static [&'static str] = $static_fields;
|
||||||
$let_fields
|
$let_fields
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
let token = match try!($deserializer.expect_token()) {
|
let idx = match try!($deserializer.expect_struct_field_or_end(FIELDS)) {
|
||||||
::serde::de::End => { break; }
|
Some(idx) => idx,
|
||||||
token => token,
|
None => { break; }
|
||||||
};
|
};
|
||||||
|
|
||||||
{
|
match idx {
|
||||||
let key = match token {
|
$idx_arms
|
||||||
::serde::de::Str(s) => s,
|
Some(_) => unreachable!(),
|
||||||
::serde::de::String(ref s) => s.as_slice(),
|
None => {
|
||||||
token => {
|
let _: ::serde::de::IgnoreTokens =
|
||||||
let expected_tokens = [
|
try!(::serde::de::Deserializable::deserialize($deserializer));
|
||||||
::serde::de::StrKind,
|
|
||||||
::serde::de::StringKind,
|
|
||||||
];
|
|
||||||
return Err($deserializer.syntax_error(token, expected_tokens));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
match key {
|
|
||||||
$key_arms
|
|
||||||
_ => { }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
//try!($deserializer.ignore_field(token))
|
||||||
try!($deserializer.ignore_field(token))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
$extract_fields
|
$extract_fields
|
||||||
|
77
src/de.rs
77
src/de.rs
@ -356,25 +356,29 @@ pub trait Deserializer<E>: Iterator<Result<Token, E>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn expect_struct_field<
|
fn expect_struct_field_or_end(&mut self,
|
||||||
T: Deserializable<Self, E>
|
fields: &'static [&'static str]
|
||||||
>(&mut self, name: &str) -> Result<T, E> {
|
) -> Result<option::Option<option::Option<uint>>, E> {
|
||||||
match try!(self.expect_token()) {
|
match try!(self.expect_token()) {
|
||||||
Str(n) => {
|
End => {
|
||||||
if name != n {
|
Ok(None)
|
||||||
return Err(self.unexpected_name_error(Str(n)));
|
|
||||||
}
|
}
|
||||||
|
Str(n) => {
|
||||||
|
Ok(Some(fields.iter().position(|field| **field == n)))
|
||||||
}
|
}
|
||||||
String(n) => {
|
String(n) => {
|
||||||
if name != n.as_slice() {
|
Ok(Some(fields.iter().position(|field| **field == n.as_slice())))
|
||||||
return Err(self.unexpected_name_error(String(n)));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
token => {
|
token => {
|
||||||
return Err(self.syntax_error(token, STR_TOKEN_KINDS));
|
Err(self.syntax_error(token, STR_TOKEN_KINDS))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn expect_struct_value<
|
||||||
|
T: Deserializable<Self, E>
|
||||||
|
>(&mut self) -> Result<T, E> {
|
||||||
Deserializable::deserialize(self)
|
Deserializable::deserialize(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -982,7 +986,7 @@ mod tests {
|
|||||||
use std::{option, string};
|
use std::{option, string};
|
||||||
use serialize::Decoder;
|
use serialize::Decoder;
|
||||||
|
|
||||||
use super::{Deserializer, Deserializable, Token, TokenKind};
|
use super::{Deserializer, Deserializable, Token, TokenKind, IgnoreTokens};
|
||||||
use super::{
|
use super::{
|
||||||
Null,
|
Null,
|
||||||
Bool,
|
Bool,
|
||||||
@ -1034,11 +1038,29 @@ mod tests {
|
|||||||
#[inline]
|
#[inline]
|
||||||
fn deserialize_token(d: &mut D, token: Token) -> Result<Inner, E> {
|
fn deserialize_token(d: &mut D, token: Token) -> Result<Inner, E> {
|
||||||
try!(d.expect_struct_start(token, "Inner"));
|
try!(d.expect_struct_start(token, "Inner"));
|
||||||
let a = try!(d.expect_struct_field("a"));
|
|
||||||
let b = try!(d.expect_struct_field("b"));
|
let mut a = None;
|
||||||
let c = try!(d.expect_struct_field("c"));
|
let mut b = None;
|
||||||
try!(d.expect_struct_end());
|
let mut c = None;
|
||||||
Ok(Inner { a: a, b: b, c: c })
|
|
||||||
|
static FIELDS: &'static [&'static str] = &["a", "b", "c"];
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let idx = match try!(d.expect_struct_field_or_end(FIELDS)) {
|
||||||
|
Some(idx) => idx,
|
||||||
|
None => { break; }
|
||||||
|
};
|
||||||
|
|
||||||
|
match idx {
|
||||||
|
Some(0) => { a = Some(try!(d.expect_struct_value())); }
|
||||||
|
Some(1) => { b = Some(try!(d.expect_struct_value())); }
|
||||||
|
Some(2) => { c = Some(try!(d.expect_struct_value())); }
|
||||||
|
Some(_) => unreachable!(),
|
||||||
|
None => { let _: IgnoreTokens = try!(Deserializable::deserialize(d)); }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Inner { a: a.unwrap(), b: b.unwrap(), c: c.unwrap() })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1053,9 +1075,25 @@ mod tests {
|
|||||||
#[inline]
|
#[inline]
|
||||||
fn deserialize_token(d: &mut D, token: Token) -> Result<Outer, E> {
|
fn deserialize_token(d: &mut D, token: Token) -> Result<Outer, E> {
|
||||||
try!(d.expect_struct_start(token, "Outer"));
|
try!(d.expect_struct_start(token, "Outer"));
|
||||||
let inner = try!(d.expect_struct_field("inner"));
|
|
||||||
try!(d.expect_struct_end());
|
static FIELDS: &'static [&'static str] = ["inner"];
|
||||||
Ok(Outer { inner: inner })
|
|
||||||
|
let mut inner = None;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let idx = match try!(d.expect_struct_field_or_end(FIELDS)) {
|
||||||
|
Some(idx) => idx,
|
||||||
|
None => { break; }
|
||||||
|
};
|
||||||
|
|
||||||
|
match idx {
|
||||||
|
Some(0) => { inner = Some(try!(d.expect_struct_value())); }
|
||||||
|
Some(_) => unreachable!(),
|
||||||
|
None => { let _: IgnoreTokens = try!(Deserializable::deserialize(d)); }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Outer { inner: inner.unwrap() })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1094,7 +1132,6 @@ mod tests {
|
|||||||
SyntaxError(Vec<TokenKind>),
|
SyntaxError(Vec<TokenKind>),
|
||||||
UnexpectedName,
|
UnexpectedName,
|
||||||
ConversionError,
|
ConversionError,
|
||||||
IncompleteValue,
|
|
||||||
MissingField(&'static str),
|
MissingField(&'static str),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
148
src/json/mod.rs
148
src/json/mod.rs
@ -742,6 +742,14 @@ impl de::Deserializer<ParserError> for JsonDeserializer {
|
|||||||
None => Err(UnknownVariantError(variant)),
|
None => Err(UnknownVariantError(variant)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn expect_struct_start(&mut self, token: de::Token, _name: &str) -> Result<(), ParserError> {
|
||||||
|
match token {
|
||||||
|
de::MapStart(_) => Ok(()),
|
||||||
|
_ => Err(self.syntax_error(token, [de::MapStartKind])),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The failed expectation of InvalidSyntax
|
/// The failed expectation of InvalidSyntax
|
||||||
@ -1644,6 +1652,7 @@ pub struct Parser<Iter> {
|
|||||||
col: uint,
|
col: uint,
|
||||||
// A state machine is kept to make it possible to interupt and resume parsing.
|
// A state machine is kept to make it possible to interupt and resume parsing.
|
||||||
state_stack: Vec<ParserState>,
|
state_stack: Vec<ParserState>,
|
||||||
|
buf: string::String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Iter: Iterator<char>> Iterator<Result<de::Token, ParserError>> for Parser<Iter> {
|
impl<Iter: Iterator<char>> Iterator<Result<de::Token, ParserError>> for Parser<Iter> {
|
||||||
@ -1669,8 +1678,20 @@ impl<Iter: Iterator<char>> Iterator<Result<de::Token, ParserError>> for Parser<I
|
|||||||
ParseValue => Some(self.parse_value()),
|
ParseValue => Some(self.parse_value()),
|
||||||
ParseListStart => Some(self.parse_list_start()),
|
ParseListStart => Some(self.parse_list_start()),
|
||||||
ParseListCommaOrEnd => Some(self.parse_list_comma_or_end()),
|
ParseListCommaOrEnd => Some(self.parse_list_comma_or_end()),
|
||||||
ParseObjectStart => Some(self.parse_object_start()),
|
ParseObjectStart => {
|
||||||
ParseObjectCommaOrEnd => Some(self.parse_object_comma_or_end()),
|
match self.parse_object_start() {
|
||||||
|
Ok(Some(s)) => Some(Ok(de::String(s.to_string()))),
|
||||||
|
Ok(None) => Some(Ok(de::End)),
|
||||||
|
Err(err) => Some(Err(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ParseObjectCommaOrEnd => {
|
||||||
|
match self.parse_object_comma_or_end() {
|
||||||
|
Ok(Some(s)) => Some(Ok(de::String(s.to_string()))),
|
||||||
|
Ok(None) => Some(Ok(de::End)),
|
||||||
|
Err(err) => Some(Err(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
//ParseObjectKey => Some(self.parse_object_key()),
|
//ParseObjectKey => Some(self.parse_object_key()),
|
||||||
ParseObjectValue => Some(self.parse_object_value()),
|
ParseObjectValue => Some(self.parse_object_value()),
|
||||||
}
|
}
|
||||||
@ -1679,6 +1700,7 @@ impl<Iter: Iterator<char>> Iterator<Result<de::Token, ParserError>> for Parser<I
|
|||||||
|
|
||||||
impl<Iter: Iterator<char>> Parser<Iter> {
|
impl<Iter: Iterator<char>> Parser<Iter> {
|
||||||
/// Creates the JSON parser.
|
/// Creates the JSON parser.
|
||||||
|
#[inline]
|
||||||
pub fn new(rdr: Iter) -> Parser<Iter> {
|
pub fn new(rdr: Iter) -> Parser<Iter> {
|
||||||
let mut p = Parser {
|
let mut p = Parser {
|
||||||
rdr: rdr,
|
rdr: rdr,
|
||||||
@ -1686,6 +1708,7 @@ impl<Iter: Iterator<char>> Parser<Iter> {
|
|||||||
line: 1,
|
line: 1,
|
||||||
col: 0,
|
col: 0,
|
||||||
state_stack: vec!(ParseValue),
|
state_stack: vec!(ParseValue),
|
||||||
|
buf: string::String::with_capacity(100),
|
||||||
};
|
};
|
||||||
p.bump();
|
p.bump();
|
||||||
return p;
|
return p;
|
||||||
@ -1782,6 +1805,7 @@ impl<Iter: Iterator<char>> Parser<Iter> {
|
|||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
fn parse_decimal(&mut self, res: f64) -> Result<f64, ParserError> {
|
fn parse_decimal(&mut self, res: f64) -> Result<f64, ParserError> {
|
||||||
self.bump();
|
self.bump();
|
||||||
|
|
||||||
@ -1874,27 +1898,30 @@ impl<Iter: Iterator<char>> Parser<Iter> {
|
|||||||
Ok(n)
|
Ok(n)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_string(&mut self) -> Result<string::String, ParserError> {
|
fn parse_string(&mut self) -> Result<&str, ParserError> {
|
||||||
|
self.buf.clear();
|
||||||
|
|
||||||
let mut escape = false;
|
let mut escape = false;
|
||||||
let mut res = string::String::new();
|
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
self.bump();
|
let ch = match self.next_char() {
|
||||||
if self.eof() {
|
Some(ch) => ch,
|
||||||
return self.error(EOFWhileParsingString);
|
None => { return self.error(EOFWhileParsingString); }
|
||||||
}
|
};
|
||||||
|
|
||||||
if escape {
|
if escape {
|
||||||
match self.ch_or_null() {
|
match ch {
|
||||||
'"' => res.push('"'),
|
'"' => self.buf.push('"'),
|
||||||
'\\' => res.push('\\'),
|
'\\' => self.buf.push('\\'),
|
||||||
'/' => res.push('/'),
|
'/' => self.buf.push('/'),
|
||||||
'b' => res.push('\x08'),
|
'b' => self.buf.push('\x08'),
|
||||||
'f' => res.push('\x0c'),
|
'f' => self.buf.push('\x0c'),
|
||||||
'n' => res.push('\n'),
|
'n' => self.buf.push('\n'),
|
||||||
'r' => res.push('\r'),
|
'r' => self.buf.push('\r'),
|
||||||
't' => res.push('\t'),
|
't' => self.buf.push('\t'),
|
||||||
'u' => match try!(self.decode_hex_escape()) {
|
'u' => {
|
||||||
|
let c = match try!(self.decode_hex_escape()) {
|
||||||
0xDC00 ... 0xDFFF => return self.error(LoneLeadingSurrogateInHexEscape),
|
0xDC00 ... 0xDFFF => return self.error(LoneLeadingSurrogateInHexEscape),
|
||||||
|
|
||||||
// Non-BMP characters are encoded as a sequence of
|
// Non-BMP characters are encoded as a sequence of
|
||||||
@ -1909,29 +1936,34 @@ impl<Iter: Iterator<char>> Parser<Iter> {
|
|||||||
|
|
||||||
let buf = [n1, try!(self.decode_hex_escape())];
|
let buf = [n1, try!(self.decode_hex_escape())];
|
||||||
match str::utf16_items(buf.as_slice()).next() {
|
match str::utf16_items(buf.as_slice()).next() {
|
||||||
Some(ScalarValue(c)) => res.push(c),
|
Some(ScalarValue(c)) => c,
|
||||||
_ => return self.error(LoneLeadingSurrogateInHexEscape),
|
_ => return self.error(LoneLeadingSurrogateInHexEscape),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
n => match char::from_u32(n as u32) {
|
n => match char::from_u32(n as u32) {
|
||||||
Some(c) => res.push(c),
|
Some(c) => c,
|
||||||
None => return self.error(InvalidUnicodeCodePoint),
|
None => return self.error(InvalidUnicodeCodePoint),
|
||||||
},
|
}
|
||||||
},
|
};
|
||||||
|
|
||||||
|
self.buf.push(c);
|
||||||
|
}
|
||||||
_ => return self.error(InvalidEscape),
|
_ => return self.error(InvalidEscape),
|
||||||
}
|
}
|
||||||
escape = false;
|
escape = false;
|
||||||
} else if self.ch_is('\\') {
|
|
||||||
escape = true;
|
|
||||||
} else {
|
} else {
|
||||||
match self.ch {
|
match ch {
|
||||||
Some('"') => {
|
'"' => {
|
||||||
self.bump();
|
self.bump();
|
||||||
return Ok(res);
|
return Ok(self.buf.as_slice());
|
||||||
},
|
}
|
||||||
Some(c) => res.push(c),
|
'\\' => {
|
||||||
None => unreachable!()
|
escape = true;
|
||||||
|
}
|
||||||
|
ch => {
|
||||||
|
self.buf.push(ch);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1966,26 +1998,26 @@ impl<Iter: Iterator<char>> Parser<Iter> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_object_start(&mut self) -> Result<de::Token, ParserError> {
|
fn parse_object_start(&mut self) -> Result<Option<&str>, ParserError> {
|
||||||
self.parse_whitespace();
|
self.parse_whitespace();
|
||||||
|
|
||||||
if self.ch_is('}') {
|
if self.ch_is('}') {
|
||||||
self.bump();
|
self.bump();
|
||||||
Ok(de::End)
|
Ok(None)
|
||||||
} else {
|
} else {
|
||||||
self.parse_object_key()
|
Ok(Some(try!(self.parse_object_key())))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_object_comma_or_end(&mut self) -> Result<de::Token, ParserError> {
|
fn parse_object_comma_or_end(&mut self) -> Result<Option<&str>, ParserError> {
|
||||||
self.parse_whitespace();
|
self.parse_whitespace();
|
||||||
|
|
||||||
if self.ch_is(',') {
|
if self.ch_is(',') {
|
||||||
self.bump();
|
self.bump();
|
||||||
self.parse_object_key()
|
Ok(Some(try!(self.parse_object_key())))
|
||||||
} else if self.ch_is('}') {
|
} else if self.ch_is('}') {
|
||||||
self.bump();
|
self.bump();
|
||||||
Ok(de::End)
|
Ok(None)
|
||||||
} else if self.eof() {
|
} else if self.eof() {
|
||||||
self.error_event(EOFWhileParsingObject)
|
self.error_event(EOFWhileParsingObject)
|
||||||
} else {
|
} else {
|
||||||
@ -1993,19 +2025,18 @@ impl<Iter: Iterator<char>> Parser<Iter> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_object_key(&mut self) -> Result<de::Token, ParserError> {
|
fn parse_object_key(&mut self) -> Result<&str, ParserError> {
|
||||||
self.parse_whitespace();
|
self.parse_whitespace();
|
||||||
|
|
||||||
self.state_stack.push(ParseObjectValue);
|
|
||||||
|
|
||||||
if self.eof() {
|
if self.eof() {
|
||||||
return self.error_event(EOFWhileParsingString);
|
return self.error_event(EOFWhileParsingString);
|
||||||
}
|
}
|
||||||
|
|
||||||
match self.ch_or_null() {
|
match self.ch_or_null() {
|
||||||
'"' => {
|
'"' => {
|
||||||
let s = try!(self.parse_string());
|
self.state_stack.push(ParseObjectValue);
|
||||||
Ok(de::String(s))
|
|
||||||
|
Ok(try!(self.parse_string()))
|
||||||
}
|
}
|
||||||
_ => self.error_event(KeyMustBeAString),
|
_ => self.error_event(KeyMustBeAString),
|
||||||
}
|
}
|
||||||
@ -2038,8 +2069,7 @@ impl<Iter: Iterator<char>> Parser<Iter> {
|
|||||||
'f' => self.parse_ident("alse", de::Bool(false)),
|
'f' => self.parse_ident("alse", de::Bool(false)),
|
||||||
'0' ... '9' | '-' => self.parse_number(),
|
'0' ... '9' | '-' => self.parse_number(),
|
||||||
'"' => {
|
'"' => {
|
||||||
let s = try!(self.parse_string());
|
Ok(de::String(try!(self.parse_string()).to_string()))
|
||||||
Ok(de::String(s))
|
|
||||||
}
|
}
|
||||||
'[' => {
|
'[' => {
|
||||||
self.bump();
|
self.bump();
|
||||||
@ -2066,7 +2096,7 @@ impl<Iter: Iterator<char>> Parser<Iter> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn error_event(&mut self, reason: ErrorCode) -> Result<de::Token, ParserError> {
|
fn error_event<T>(&mut self, reason: ErrorCode) -> Result<T, ParserError> {
|
||||||
self.state_stack.clear();
|
self.state_stack.clear();
|
||||||
Err(SyntaxError(reason, self.line, self.col))
|
Err(SyntaxError(reason, self.line, self.col))
|
||||||
}
|
}
|
||||||
@ -2153,6 +2183,36 @@ impl<Iter: Iterator<char>> de::Deserializer<ParserError> for Parser<Iter> {
|
|||||||
_ => self.error(InvalidSyntax(EnumEnd)),
|
_ => self.error(InvalidSyntax(EnumEnd)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn expect_struct_start(&mut self, token: de::Token, _name: &str) -> Result<(), ParserError> {
|
||||||
|
match token {
|
||||||
|
de::MapStart(_) => Ok(()),
|
||||||
|
_ => Err(self.syntax_error(token, [de::MapStartKind])),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn expect_struct_field_or_end(&mut self,
|
||||||
|
fields: &'static [&'static str]
|
||||||
|
) -> Result<Option<Option<uint>>, ParserError> {
|
||||||
|
let result = match self.state_stack.pop() {
|
||||||
|
Some(ParseObjectStart) => {
|
||||||
|
try!(self.parse_object_start())
|
||||||
|
}
|
||||||
|
Some(ParseObjectCommaOrEnd) => {
|
||||||
|
try!(self.parse_object_comma_or_end())
|
||||||
|
}
|
||||||
|
_ => fail!("invalid internal state"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let s = match result {
|
||||||
|
Some(s) => s,
|
||||||
|
None => { return Ok(None); }
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Some(fields.iter().position(|field| **field == s.as_slice())))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Decodes a json value from an `Iterator<Char>`.
|
/// Decodes a json value from an `Iterator<Char>`.
|
||||||
|
Loading…
x
Reference in New Issue
Block a user