From 3d5141a2f1531ed7583a0c3e8762d60d1f7b3e96 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Sun, 20 May 2018 19:34:52 -0700 Subject: [PATCH] Update to proc-macro2 0.4 --- serde_derive/Cargo.toml | 6 +- serde_derive/src/bound.rs | 22 +++--- serde_derive/src/de.rs | 100 +++++++++++++-------------- serde_derive/src/fragment.rs | 17 ++--- serde_derive/src/internals/ast.rs | 6 +- serde_derive/src/internals/attr.rs | 105 +++++++++++++++-------------- serde_derive/src/pretend.rs | 19 +++--- serde_derive/src/ser.rs | 51 +++++++------- serde_derive/src/try.rs | 7 +- serde_derive_internals/Cargo.toml | 4 +- 10 files changed, 168 insertions(+), 169 deletions(-) diff --git a/serde_derive/Cargo.toml b/serde_derive/Cargo.toml index f0e8e538..b288a74d 100644 --- a/serde_derive/Cargo.toml +++ b/serde_derive/Cargo.toml @@ -23,9 +23,9 @@ name = "serde_derive" proc-macro = true [dependencies] -proc-macro2 = "0.3" -quote = "0.5.2" -syn = { version = "0.13", features = ["visit"] } +proc-macro2 = "0.4" +quote = "0.6" +syn = { version = "0.14", features = ["visit"] } [dev-dependencies] serde = { version = "1.0", path = "../serde" } diff --git a/serde_derive/src/bound.rs b/serde_derive/src/bound.rs index 88061f3f..080cf7f0 100644 --- a/serde_derive/src/bound.rs +++ b/serde_derive/src/bound.rs @@ -140,9 +140,9 @@ pub fn with_bound( } } if path.leading_colon.is_none() && path.segments.len() == 1 { - let id = path.segments[0].ident; - if self.all_type_params.contains(&id) { - self.relevant_type_params.insert(id); + let id = &path.segments[0].ident; + if self.all_type_params.contains(id) { + self.relevant_type_params.insert(id.clone()); } } visit::visit_path(self, path); @@ -157,7 +157,7 @@ pub fn with_bound( fn visit_macro(&mut self, _mac: &'ast syn::Macro) {} } - let all_type_params = generics.type_params().map(|param| param.ident).collect(); + let all_type_params = generics.type_params().map(|param| param.ident.clone()).collect(); let mut visitor = FindTyParams { all_type_params: all_type_params, @@ -185,7 +185,7 @@ pub fn with_bound( let associated_type_usage = visitor.associated_type_usage; let new_predicates = generics .type_params() - .map(|param| param.ident) + .map(|param| param.ident.clone()) .filter(|id| relevant_type_params.contains(id)) .map(|id| syn::TypePath { qself: None, @@ -247,7 +247,7 @@ pub fn with_lifetime_bound(generics: &syn::Generics, lifetime: &str) -> syn::Gen let bound = syn::Lifetime::new(lifetime, Span::call_site()); let def = syn::LifetimeDef { attrs: Vec::new(), - lifetime: bound, + lifetime: bound.clone(), colon_token: None, bounds: Punctuated::new(), }; @@ -257,10 +257,10 @@ pub fn with_lifetime_bound(generics: &syn::Generics, lifetime: &str) -> syn::Gen .chain(generics.params.iter().cloned().map(|mut param| { match param { syn::GenericParam::Lifetime(ref mut param) => { - param.bounds.push(bound); + param.bounds.push(bound.clone()); } syn::GenericParam::Type(ref mut param) => { - param.bounds.push(syn::TypeParamBound::Lifetime(bound)); + param.bounds.push(syn::TypeParamBound::Lifetime(bound.clone())); } syn::GenericParam::Const(_) => {} } @@ -280,7 +280,7 @@ fn type_of_item(cont: &Container) -> syn::Type { path: syn::Path { leading_colon: None, segments: vec![syn::PathSegment { - ident: cont.ident, + ident: cont.ident.clone(), arguments: syn::PathArguments::AngleBracketed( syn::AngleBracketedGenericArguments { colon2_token: None, @@ -293,11 +293,11 @@ fn type_of_item(cont: &Container) -> syn::Type { syn::GenericParam::Type(ref param) => { syn::GenericArgument::Type(syn::Type::Path(syn::TypePath { qself: None, - path: param.ident.into(), + path: param.ident.clone().into(), })) } syn::GenericParam::Lifetime(ref param) => { - syn::GenericArgument::Lifetime(param.lifetime) + syn::GenericArgument::Lifetime(param.lifetime.clone()) } syn::GenericParam::Const(_) => { panic!("Serde does not support const generics yet"); diff --git a/serde_derive/src/de.rs b/serde_derive/src/de.rs index eaee536c..875c941a 100644 --- a/serde_derive/src/de.rs +++ b/serde_derive/src/de.rs @@ -6,8 +6,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use proc_macro2::{Literal, Span}; -use quote::{ToTokens, Tokens}; +use proc_macro2::{Literal, Span, TokenStream}; +use quote::ToTokens; use syn::punctuated::Punctuated; use syn::spanned::Spanned; use syn::{self, Ident, Index, Member}; @@ -21,13 +21,13 @@ use try; use std::collections::BTreeSet; -pub fn expand_derive_deserialize(input: &syn::DeriveInput) -> Result { +pub fn expand_derive_deserialize(input: &syn::DeriveInput) -> Result { let ctxt = Ctxt::new(); let cont = Container::from_ast(&ctxt, input); precondition(&ctxt, &cont); try!(ctxt.check()); - let ident = cont.ident; + let ident = &cont.ident; let params = Parameters::new(&cont); let (de_impl_generics, _, ty_generics, where_clause) = split_with_de_lifetime(¶ms); let dummy_const = Ident::new( @@ -130,10 +130,10 @@ struct Parameters { impl Parameters { fn new(cont: &Container) -> Self { - let local = cont.ident; + let local = cont.ident.clone(); let this = match cont.attrs.remote() { Some(remote) => remote.clone(), - None => cont.ident.into(), + None => cont.ident.clone().into(), }; let borrowed = borrowed_lifetimes(cont); let generics = build_generics(cont, &borrowed); @@ -150,8 +150,8 @@ impl Parameters { /// Type name to use in error messages and `&'static str` arguments to /// various Deserializer methods. - fn type_name(&self) -> &str { - self.this.segments.last().unwrap().value().ident.as_ref() + fn type_name(&self) -> String { + self.this.segments.last().unwrap().value().ident.to_string() } } @@ -382,11 +382,11 @@ fn deserialize_unit_struct(params: &Parameters, cattrs: &attr::Container) -> Fra } fn deserialize_tuple( - variant_ident: Option, + variant_ident: Option<&syn::Ident>, params: &Parameters, fields: &[Field], cattrs: &attr::Container, - deserializer: Option, + deserializer: Option, ) -> Fragment { let this = ¶ms.this; let (de_impl_generics, de_ty_generics, ty_generics, where_clause) = @@ -399,7 +399,7 @@ fn deserialize_tuple( // and use an `Into` conversion to get the remote type. If there are no // getters then construct the target type directly. let construct = if params.has_getter { - let local = params.local; + let local = ¶ms.local; quote!(#local) } else { quote!(#this) @@ -407,7 +407,7 @@ fn deserialize_tuple( let is_enum = variant_ident.is_some(); let type_path = match variant_ident { - Some(variant_ident) => quote!(#construct::#variant_ident), + Some(ref variant_ident) => quote!(#construct::#variant_ident), None => construct, }; let expecting = match variant_ident { @@ -486,7 +486,7 @@ fn deserialize_tuple_in_place( params: &Parameters, fields: &[Field], cattrs: &attr::Container, - deserializer: Option, + deserializer: Option, ) -> Fragment { let this = ¶ms.this; let (de_impl_generics, de_ty_generics, ty_generics, where_clause) = @@ -570,7 +570,7 @@ fn deserialize_tuple_in_place( } fn deserialize_seq( - type_path: &Tokens, + type_path: &TokenStream, params: &Parameters, fields: &[Field], is_struct: bool, @@ -750,7 +750,7 @@ fn deserialize_seq_in_place( } } -fn deserialize_newtype_struct(type_path: &Tokens, params: &Parameters, field: &Field) -> Tokens { +fn deserialize_newtype_struct(type_path: &TokenStream, params: &Parameters, field: &Field) -> TokenStream { let delife = params.borrowed.de_lifetime(); let field_ty = field.ty; @@ -788,7 +788,7 @@ fn deserialize_newtype_struct(type_path: &Tokens, params: &Parameters, field: &F } #[cfg(feature = "deserialize_in_place")] -fn deserialize_newtype_struct_in_place(params: &Parameters, field: &Field) -> Tokens { +fn deserialize_newtype_struct_in_place(params: &Parameters, field: &Field) -> TokenStream { // We do not generate deserialize_in_place if every field has a deserialize_with. assert!(field.attrs.deserialize_with().is_none()); @@ -811,11 +811,11 @@ enum Untagged { } fn deserialize_struct( - variant_ident: Option, + variant_ident: Option<&syn::Ident>, params: &Parameters, fields: &[Field], cattrs: &attr::Container, - deserializer: Option, + deserializer: Option, untagged: &Untagged, ) -> Fragment { let is_enum = variant_ident.is_some(); @@ -829,14 +829,14 @@ fn deserialize_struct( // and use an `Into` conversion to get the remote type. If there are no // getters then construct the target type directly. let construct = if params.has_getter { - let local = params.local; + let local = ¶ms.local; quote!(#local) } else { quote!(#this) }; let type_path = match variant_ident { - Some(variant_ident) => quote!(#construct::#variant_ident), + Some(ref variant_ident) => quote!(#construct::#variant_ident), None => construct, }; let expecting = match variant_ident { @@ -965,7 +965,7 @@ fn deserialize_struct_in_place( params: &Parameters, fields: &[Field], cattrs: &attr::Container, - deserializer: Option, + deserializer: Option, ) -> Option { let is_enum = variant_ident.is_some(); @@ -1350,7 +1350,7 @@ fn deserialize_adjacently_tagged_enum( .filter(|&(_, variant)| !variant.attrs.skip_deserializing() && is_unit(variant)) .map(|(i, variant)| { let variant_index = field_i(i); - let variant_ident = variant.ident; + let variant_ident = &variant.ident; quote! { __Field::#variant_index => _serde::export::Ok(#this::#variant_ident), } @@ -1614,7 +1614,7 @@ fn deserialize_externally_tagged_variant( }; } - let variant_ident = variant.ident; + let variant_ident = &variant.ident; match variant.style { Style::Unit => { @@ -1645,19 +1645,19 @@ fn deserialize_internally_tagged_variant( params: &Parameters, variant: &Variant, cattrs: &attr::Container, - deserializer: Tokens, + deserializer: TokenStream, ) -> Fragment { if variant.attrs.deserialize_with().is_some() { return deserialize_untagged_variant(params, variant, cattrs, deserializer); } - let variant_ident = variant.ident; + let variant_ident = &variant.ident; match variant.style { Style::Unit => { let this = ¶ms.this; let type_name = params.type_name(); - let variant_name = variant.ident.as_ref(); + let variant_name = variant.ident.to_string(); quote_block! { try!(_serde::Deserializer::deserialize_any(#deserializer, _serde::private::de::InternallyTaggedUnitVisitor::new(#type_name, #variant_name))); _serde::export::Ok(#this::#variant_ident) @@ -1685,7 +1685,7 @@ fn deserialize_untagged_variant( params: &Parameters, variant: &Variant, cattrs: &attr::Container, - deserializer: Tokens, + deserializer: TokenStream, ) -> Fragment { if let Some(path) = variant.attrs.deserialize_with() { let (wrapper, wrapper_ty, unwrap_fn) = wrap_deserialize_variant_with(params, variant, path); @@ -1696,13 +1696,13 @@ fn deserialize_untagged_variant( }; } - let variant_ident = variant.ident; + let variant_ident = &variant.ident; match variant.style { Style::Unit => { let this = ¶ms.this; let type_name = params.type_name(); - let variant_name = variant.ident.as_ref(); + let variant_name = variant.ident.to_string(); quote_expr! { match _serde::Deserializer::deserialize_any( #deserializer, @@ -1738,7 +1738,7 @@ fn deserialize_untagged_variant( } fn deserialize_externally_tagged_newtype_variant( - variant_ident: syn::Ident, + variant_ident: &syn::Ident, params: &Parameters, field: &Field, ) -> Fragment { @@ -1765,10 +1765,10 @@ fn deserialize_externally_tagged_newtype_variant( } fn deserialize_untagged_newtype_variant( - variant_ident: syn::Ident, + variant_ident: &syn::Ident, params: &Parameters, field: &Field, - deserializer: &Tokens, + deserializer: &TokenStream, ) -> Fragment { let this = ¶ms.this; let field_ty = field.ty; @@ -1865,7 +1865,7 @@ fn deserialize_custom_identifier( let this = quote!(#this); let (ordinary, fallthrough) = if let Some(last) = variants.last() { - let last_ident = last.ident; + let last_ident = &last.ident; if last.attrs.other() { let ordinary = &variants[..variants.len() - 1]; let fallthrough = quote!(_serde::export::Ok(#this::#last_ident)); @@ -1888,7 +1888,7 @@ fn deserialize_custom_identifier( let names_idents: Vec<_> = ordinary .iter() - .map(|variant| (variant.attrs.name().deserialize_name(), variant.ident)) + .map(|variant| (variant.attrs.name().deserialize_name(), variant.ident.clone())) .collect(); let names = names_idents.iter().map(|&(ref name, _)| name); @@ -1941,10 +1941,10 @@ fn deserialize_custom_identifier( } fn deserialize_identifier( - this: &Tokens, + this: &TokenStream, fields: &[(String, Ident)], is_variant: bool, - fallthrough: Option, + fallthrough: Option, collect_other_fields: bool, ) -> Fragment { let field_strs = fields.iter().map(|&(ref name, _)| name); @@ -2198,7 +2198,7 @@ fn deserialize_identifier( } fn deserialize_struct_as_struct_visitor( - struct_path: &Tokens, + struct_path: &TokenStream, params: &Parameters, fields: &[Field], cattrs: &attr::Container, @@ -2227,7 +2227,7 @@ fn deserialize_struct_as_struct_visitor( } fn deserialize_struct_as_map_visitor( - struct_path: &Tokens, + struct_path: &TokenStream, params: &Parameters, fields: &[Field], cattrs: &attr::Container, @@ -2247,7 +2247,7 @@ fn deserialize_struct_as_map_visitor( } fn deserialize_map( - struct_path: &Tokens, + struct_path: &TokenStream, params: &Parameters, fields: &[Field], cattrs: &attr::Container, @@ -2635,9 +2635,9 @@ fn field_i(i: usize) -> Ident { /// in a trait to prevent it from accessing the internal `Deserialize` state. fn wrap_deserialize_with( params: &Parameters, - value_ty: &Tokens, + value_ty: &TokenStream, deserialize_with: &syn::ExprPath, -) -> (Tokens, Tokens) { +) -> (TokenStream, TokenStream) { let this = ¶ms.this; let (de_impl_generics, de_ty_generics, ty_generics, where_clause) = split_with_de_lifetime(params); @@ -2673,7 +2673,7 @@ fn wrap_deserialize_field_with( params: &Parameters, field_ty: &syn::Type, deserialize_with: &syn::ExprPath, -) -> (Tokens, Tokens) { +) -> (TokenStream, TokenStream) { wrap_deserialize_with(params, "e!(#field_ty), deserialize_with) } @@ -2681,9 +2681,9 @@ fn wrap_deserialize_variant_with( params: &Parameters, variant: &Variant, deserialize_with: &syn::ExprPath, -) -> (Tokens, Tokens, Tokens) { +) -> (TokenStream, TokenStream, TokenStream) { let this = ¶ms.this; - let variant_ident = variant.ident; + let variant_ident = &variant.ident; let field_tys = variant.fields.iter().map(|field| field.ty); let (wrapper, wrapper_ty) = @@ -2763,7 +2763,7 @@ struct DeImplGenerics<'a>(&'a Parameters); struct InPlaceImplGenerics<'a>(&'a Parameters); impl<'a> ToTokens for DeImplGenerics<'a> { - fn to_tokens(&self, tokens: &mut Tokens) { + fn to_tokens(&self, tokens: &mut TokenStream) { let mut generics = self.0.generics.clone(); if let Some(de_lifetime) = self.0.borrowed.de_lifetime_def() { generics.params = Some(syn::GenericParam::Lifetime(de_lifetime)) @@ -2778,7 +2778,7 @@ impl<'a> ToTokens for DeImplGenerics<'a> { #[cfg(feature = "deserialize_in_place")] impl<'a> ToTokens for InPlaceImplGenerics<'a> { - fn to_tokens(&self, tokens: &mut Tokens) { + fn to_tokens(&self, tokens: &mut TokenStream) { let place_lifetime = place_lifetime(); let mut generics = self.0.generics.clone(); @@ -2786,12 +2786,12 @@ impl<'a> ToTokens for InPlaceImplGenerics<'a> { for param in &mut generics.params { match *param { syn::GenericParam::Lifetime(ref mut param) => { - param.bounds.push(place_lifetime.lifetime); + param.bounds.push(place_lifetime.lifetime.clone()); } syn::GenericParam::Type(ref mut param) => { param .bounds - .push(syn::TypeParamBound::Lifetime(place_lifetime.lifetime)); + .push(syn::TypeParamBound::Lifetime(place_lifetime.lifetime.clone())); } syn::GenericParam::Const(_) => {} } @@ -2823,7 +2823,7 @@ struct DeTypeGenerics<'a>(&'a Parameters); struct InPlaceTypeGenerics<'a>(&'a Parameters); impl<'a> ToTokens for DeTypeGenerics<'a> { - fn to_tokens(&self, tokens: &mut Tokens) { + fn to_tokens(&self, tokens: &mut TokenStream) { let mut generics = self.0.generics.clone(); if self.0.borrowed.de_lifetime_def().is_some() { let def = syn::LifetimeDef { @@ -2844,7 +2844,7 @@ impl<'a> ToTokens for DeTypeGenerics<'a> { #[cfg(feature = "deserialize_in_place")] impl<'a> ToTokens for InPlaceTypeGenerics<'a> { - fn to_tokens(&self, tokens: &mut Tokens) { + fn to_tokens(&self, tokens: &mut TokenStream) { let mut generics = self.0.generics.clone(); generics.params = Some(syn::GenericParam::Lifetime(place_lifetime())) .into_iter() diff --git a/serde_derive/src/fragment.rs b/serde_derive/src/fragment.rs index 1241ab68..769d2552 100644 --- a/serde_derive/src/fragment.rs +++ b/serde_derive/src/fragment.rs @@ -6,15 +6,16 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use quote::{ToTokens, Tokens}; +use quote::ToTokens; use syn::token; +use proc_macro2::TokenStream; pub enum Fragment { /// Tokens that can be used as an expression. - Expr(Tokens), + Expr(TokenStream), /// Tokens that can be used inside a block. The surrounding curly braces are /// not part of these tokens. - Block(Tokens), + Block(TokenStream), } macro_rules! quote_expr { @@ -33,7 +34,7 @@ macro_rules! quote_block { /// Block fragments in curly braces. pub struct Expr(pub Fragment); impl ToTokens for Expr { - fn to_tokens(&self, out: &mut Tokens) { + fn to_tokens(&self, out: &mut TokenStream) { match self.0 { Fragment::Expr(ref expr) => expr.to_tokens(out), Fragment::Block(ref block) => { @@ -46,7 +47,7 @@ impl ToTokens for Expr { /// Interpolate a fragment as the statements of a block. pub struct Stmts(pub Fragment); impl ToTokens for Stmts { - fn to_tokens(&self, out: &mut Tokens) { + fn to_tokens(&self, out: &mut TokenStream) { match self.0 { Fragment::Expr(ref expr) => expr.to_tokens(out), Fragment::Block(ref block) => block.to_tokens(out), @@ -58,7 +59,7 @@ impl ToTokens for Stmts { /// involves putting a comma after expressions and curly braces around blocks. pub struct Match(pub Fragment); impl ToTokens for Match { - fn to_tokens(&self, out: &mut Tokens) { + fn to_tokens(&self, out: &mut TokenStream) { match self.0 { Fragment::Expr(ref expr) => { expr.to_tokens(out); @@ -71,8 +72,8 @@ impl ToTokens for Match { } } -impl AsRef for Fragment { - fn as_ref(&self) -> &Tokens { +impl AsRef for Fragment { + fn as_ref(&self) -> &TokenStream { match *self { Fragment::Expr(ref expr) => expr, Fragment::Block(ref block) => block, diff --git a/serde_derive/src/internals/ast.rs b/serde_derive/src/internals/ast.rs index 1013be2c..093910d7 100644 --- a/serde_derive/src/internals/ast.rs +++ b/serde_derive/src/internals/ast.rs @@ -87,7 +87,7 @@ impl<'a> Container<'a> { } let item = Container { - ident: item.ident, + ident: item.ident.clone(), attrs: attrs, data: data, generics: &item.generics, @@ -124,7 +124,7 @@ fn enum_from_ast<'a>( let (style, fields) = struct_from_ast(cx, &variant.fields, Some(&attrs), container_default); Variant { - ident: variant.ident, + ident: variant.ident.clone(), attrs: attrs, style: style, fields: fields, @@ -167,7 +167,7 @@ fn fields_from_ast<'a>( .enumerate() .map(|(i, field)| Field { member: match field.ident { - Some(ident) => syn::Member::Named(ident), + Some(ref ident) => syn::Member::Named(ident.clone()), None => syn::Member::Unnamed(i.into()), }, attrs: attr::Field::from_ast(cx, i, field, attrs, container_default), diff --git a/serde_derive/src/internals/attr.rs b/serde_derive/src/internals/attr.rs index f79c906c..43c689a0 100644 --- a/serde_derive/src/internals/attr.rs +++ b/serde_derive/src/internals/attr.rs @@ -200,7 +200,7 @@ impl Container { match meta_item { // Parse `#[serde(rename = "foo")]` Meta(NameValue(ref m)) if m.ident == "rename" => { - if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) { + if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) { ser_name.set(s.value()); de_name.set(s.value()); } @@ -216,7 +216,7 @@ impl Container { // Parse `#[serde(rename_all = "foo")]` Meta(NameValue(ref m)) if m.ident == "rename_all" => { - if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) { + if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) { match RenameRule::from_str(&s.value()) { Ok(rename_rule) => rename_all.set(rename_rule), Err(()) => cx.error(format!( @@ -229,12 +229,12 @@ impl Container { } // Parse `#[serde(deny_unknown_fields)]` - Meta(Word(word)) if word == "deny_unknown_fields" => { + Meta(Word(ref word)) if word == "deny_unknown_fields" => { deny_unknown_fields.set_true(); } // Parse `#[serde(default)]` - Meta(Word(word)) if word == "default" => match item.data { + Meta(Word(ref word)) if word == "default" => match item.data { syn::Data::Struct(syn::DataStruct { fields: syn::Fields::Named(_), .. @@ -249,7 +249,7 @@ impl Container { // Parse `#[serde(default = "...")]` Meta(NameValue(ref m)) if m.ident == "default" => { - if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { + if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) { match item.data { syn::Data::Struct(syn::DataStruct { fields: syn::Fields::Named(_), @@ -268,7 +268,7 @@ impl Container { // Parse `#[serde(bound = "D: Serialize")]` Meta(NameValue(ref m)) if m.ident == "bound" => { if let Ok(where_predicates) = - parse_lit_into_where(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) + parse_lit_into_where(cx, &m.ident, &m.ident, &m.lit) { ser_bound.set(where_predicates.clone()); de_bound.set(where_predicates); @@ -284,7 +284,7 @@ impl Container { } // Parse `#[serde(untagged)]` - Meta(Word(word)) if word == "untagged" => match item.data { + Meta(Word(ref word)) if word == "untagged" => match item.data { syn::Data::Enum(_) => { untagged.set_true(); } @@ -295,7 +295,7 @@ impl Container { // Parse `#[serde(tag = "type")]` Meta(NameValue(ref m)) if m.ident == "tag" => { - if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) { + if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) { match item.data { syn::Data::Enum(_) => { internal_tag.set(s.value()); @@ -309,7 +309,7 @@ impl Container { // Parse `#[serde(content = "c")]` Meta(NameValue(ref m)) if m.ident == "content" => { - if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) { + if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) { match item.data { syn::Data::Enum(_) => { content.set(s.value()); @@ -324,23 +324,23 @@ impl Container { // Parse `#[serde(from = "Type")] Meta(NameValue(ref m)) if m.ident == "from" => { - if let Ok(from_ty) = parse_lit_into_ty(cx, m.ident.as_ref(), &m.lit) { + if let Ok(from_ty) = parse_lit_into_ty(cx, &m.ident, &m.lit) { type_from.set_opt(Some(from_ty)); } } // Parse `#[serde(into = "Type")] Meta(NameValue(ref m)) if m.ident == "into" => { - if let Ok(into_ty) = parse_lit_into_ty(cx, m.ident.as_ref(), &m.lit) { + if let Ok(into_ty) = parse_lit_into_ty(cx, &m.ident, &m.lit) { type_into.set_opt(Some(into_ty)); } } // Parse `#[serde(remote = "...")]` Meta(NameValue(ref m)) if m.ident == "remote" => { - if let Ok(path) = parse_lit_into_path(cx, m.ident.as_ref(), &m.lit) { + if let Ok(path) = parse_lit_into_path(cx, &m.ident, &m.lit) { if is_primitive_path(&path, "Self") { - remote.set(item.ident.into()); + remote.set(item.ident.clone().into()); } else { remote.set(path); } @@ -348,12 +348,12 @@ impl Container { } // Parse `#[serde(field_identifier)]` - Meta(Word(word)) if word == "field_identifier" => { + Meta(Word(ref word)) if word == "field_identifier" => { field_identifier.set_true(); } // Parse `#[serde(variant_identifier)]` - Meta(Word(word)) if word == "variant_identifier" => { + Meta(Word(ref word)) if word == "variant_identifier" => { variant_identifier.set_true(); } @@ -556,7 +556,7 @@ impl Variant { match meta_item { // Parse `#[serde(rename = "foo")]` Meta(NameValue(ref m)) if m.ident == "rename" => { - if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) { + if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) { ser_name.set(s.value()); de_name.set(s.value()); } @@ -572,7 +572,7 @@ impl Variant { // Parse `#[serde(rename_all = "foo")]` Meta(NameValue(ref m)) if m.ident == "rename_all" => { - if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) { + if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) { match RenameRule::from_str(&s.value()) { Ok(rename_rule) => rename_all.set(rename_rule), Err(()) => cx.error(format!( @@ -585,30 +585,30 @@ impl Variant { } // Parse `#[serde(skip)]` - Meta(Word(word)) if word == "skip" => { + Meta(Word(ref word)) if word == "skip" => { skip_serializing.set_true(); skip_deserializing.set_true(); } // Parse `#[serde(skip_deserializing)]` - Meta(Word(word)) if word == "skip_deserializing" => { + Meta(Word(ref word)) if word == "skip_deserializing" => { skip_deserializing.set_true(); } // Parse `#[serde(skip_serializing)]` - Meta(Word(word)) if word == "skip_serializing" => { + Meta(Word(ref word)) if word == "skip_serializing" => { skip_serializing.set_true(); } // Parse `#[serde(other)]` - Meta(Word(word)) if word == "other" => { + Meta(Word(ref word)) if word == "other" => { other.set_true(); } // Parse `#[serde(bound = "D: Serialize")]` Meta(NameValue(ref m)) if m.ident == "bound" => { if let Ok(where_predicates) = - parse_lit_into_where(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) + parse_lit_into_where(cx, &m.ident, &m.ident, &m.lit) { ser_bound.set(where_predicates.clone()); de_bound.set(where_predicates); @@ -625,7 +625,7 @@ impl Variant { // Parse `#[serde(with = "...")]` Meta(NameValue(ref m)) if m.ident == "with" => { - if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { + if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) { let mut ser_path = path.clone(); ser_path .path @@ -643,14 +643,14 @@ impl Variant { // Parse `#[serde(serialize_with = "...")]` Meta(NameValue(ref m)) if m.ident == "serialize_with" => { - if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { + if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) { serialize_with.set(path); } } // Parse `#[serde(deserialize_with = "...")]` Meta(NameValue(ref m)) if m.ident == "deserialize_with" => { - if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { + if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) { deserialize_with.set(path); } } @@ -828,7 +828,7 @@ impl Field { match meta_item { // Parse `#[serde(rename = "foo")]` Meta(NameValue(ref m)) if m.ident == "rename" => { - if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) { + if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) { ser_name.set(s.value()); de_name.set(s.value()); } @@ -843,57 +843,57 @@ impl Field { } // Parse `#[serde(default)]` - Meta(Word(word)) if word == "default" => { + Meta(Word(ref word)) if word == "default" => { default.set(Default::Default); } // Parse `#[serde(default = "...")]` Meta(NameValue(ref m)) if m.ident == "default" => { - if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { + if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) { default.set(Default::Path(path)); } } // Parse `#[serde(skip_serializing)]` - Meta(Word(word)) if word == "skip_serializing" => { + Meta(Word(ref word)) if word == "skip_serializing" => { skip_serializing.set_true(); } // Parse `#[serde(skip_deserializing)]` - Meta(Word(word)) if word == "skip_deserializing" => { + Meta(Word(ref word)) if word == "skip_deserializing" => { skip_deserializing.set_true(); } // Parse `#[serde(skip)]` - Meta(Word(word)) if word == "skip" => { + Meta(Word(ref word)) if word == "skip" => { skip_serializing.set_true(); skip_deserializing.set_true(); } // Parse `#[serde(skip_serializing_if = "...")]` Meta(NameValue(ref m)) if m.ident == "skip_serializing_if" => { - if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { + if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) { skip_serializing_if.set(path); } } // Parse `#[serde(serialize_with = "...")]` Meta(NameValue(ref m)) if m.ident == "serialize_with" => { - if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { + if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) { serialize_with.set(path); } } // Parse `#[serde(deserialize_with = "...")]` Meta(NameValue(ref m)) if m.ident == "deserialize_with" => { - if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { + if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) { deserialize_with.set(path); } } // Parse `#[serde(with = "...")]` Meta(NameValue(ref m)) if m.ident == "with" => { - if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { + if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) { let mut ser_path = path.clone(); ser_path .path @@ -912,7 +912,7 @@ impl Field { // Parse `#[serde(bound = "D: Serialize")]` Meta(NameValue(ref m)) if m.ident == "bound" => { if let Ok(where_predicates) = - parse_lit_into_where(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) + parse_lit_into_where(cx, &m.ident, &m.ident, &m.lit) { ser_bound.set(where_predicates.clone()); de_bound.set(where_predicates); @@ -928,7 +928,7 @@ impl Field { } // Parse `#[serde(borrow)]` - Meta(Word(word)) if word == "borrow" => { + Meta(Word(ref word)) if word == "borrow" => { if let Ok(borrowable) = borrowable_lifetimes(cx, &ident, &field.ty) { borrowed_lifetimes.set(borrowable); } @@ -937,7 +937,7 @@ impl Field { // Parse `#[serde(borrow = "'a + 'b")]` Meta(NameValue(ref m)) if m.ident == "borrow" => { if let Ok(lifetimes) = - parse_lit_into_lifetimes(cx, m.ident.as_ref(), &m.lit) + parse_lit_into_lifetimes(cx, &m.ident, &m.lit) { if let Ok(borrowable) = borrowable_lifetimes(cx, &ident, &field.ty) { for lifetime in &lifetimes { @@ -955,13 +955,13 @@ impl Field { // Parse `#[serde(getter = "...")]` Meta(NameValue(ref m)) if m.ident == "getter" => { - if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) { + if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) { getter.set(path); } } // Parse `#[serde(flatten)]` - Meta(Word(word)) if word == "flatten" => { + Meta(Word(ref word)) if word == "flatten" => { flatten.set_true(); } @@ -1137,21 +1137,22 @@ fn get_ser_and_de<'a, T, F>( ) -> Result, ()> where T: 'a, - F: Fn(&Ctxt, &str, &str, &'a syn::Lit) -> Result, + F: Fn(&Ctxt, &Ident, &Ident, &'a syn::Lit) -> Result, { let mut ser_meta = Attr::none(cx, attr_name); let mut de_meta = Attr::none(cx, attr_name); + let attr_name = Ident::new(attr_name, Span::call_site()); for meta in metas { match *meta { Meta(NameValue(ref meta)) if meta.ident == "serialize" => { - if let Ok(v) = f(cx, attr_name, meta.ident.as_ref(), &meta.lit) { + if let Ok(v) = f(cx, &attr_name, &meta.ident, &meta.lit) { ser_meta.set(v); } } Meta(NameValue(ref meta)) if meta.ident == "deserialize" => { - if let Ok(v) = f(cx, attr_name, meta.ident.as_ref(), &meta.lit) { + if let Ok(v) = f(cx, &attr_name, &meta.ident, &meta.lit) { de_meta.set(v); } } @@ -1200,8 +1201,8 @@ pub fn get_serde_meta_items(attr: &syn::Attribute) -> Option( cx: &Ctxt, - attr_name: &str, - meta_item_name: &str, + attr_name: &Ident, + meta_item_name: &Ident, lit: &'a syn::Lit, ) -> Result<&'a syn::LitStr, ()> { if let syn::Lit::Str(ref lit) = *lit { @@ -1215,7 +1216,7 @@ fn get_lit_str<'a>( } } -fn parse_lit_into_path(cx: &Ctxt, attr_name: &str, lit: &syn::Lit) -> Result { +fn parse_lit_into_path(cx: &Ctxt, attr_name: &Ident, lit: &syn::Lit) -> Result { let string = try!(get_lit_str(cx, attr_name, attr_name, lit)); parse_lit_str(string) .map_err(|_| cx.error(format!("failed to parse path: {:?}", string.value()))) @@ -1223,7 +1224,7 @@ fn parse_lit_into_path(cx: &Ctxt, attr_name: &str, lit: &syn::Lit) -> Result Result { let string = try!(get_lit_str(cx, attr_name, attr_name, lit)); @@ -1233,8 +1234,8 @@ fn parse_lit_into_expr_path( fn parse_lit_into_where( cx: &Ctxt, - attr_name: &str, - meta_item_name: &str, + attr_name: &Ident, + meta_item_name: &Ident, lit: &syn::Lit, ) -> Result, ()> { let string = try!(get_lit_str(cx, attr_name, meta_item_name, lit)); @@ -1249,7 +1250,7 @@ fn parse_lit_into_where( .map_err(|err| cx.error(err)) } -fn parse_lit_into_ty(cx: &Ctxt, attr_name: &str, lit: &syn::Lit) -> Result { +fn parse_lit_into_ty(cx: &Ctxt, attr_name: &Ident, lit: &syn::Lit) -> Result { let string = try!(get_lit_str(cx, attr_name, attr_name, lit)); parse_lit_str(string).map_err(|_| { @@ -1265,7 +1266,7 @@ fn parse_lit_into_ty(cx: &Ctxt, attr_name: &str, lit: &syn::Lit) -> Result Result, ()> { let string = try!(get_lit_str(cx, attr_name, attr_name, lit)); @@ -1286,7 +1287,7 @@ fn parse_lit_into_lifetimes( if let Ok(BorrowedLifetimes(lifetimes)) = parse_lit_str(string) { let mut set = BTreeSet::new(); for lifetime in lifetimes { - if !set.insert(lifetime) { + if !set.insert(lifetime.clone()) { cx.error(format!("duplicate borrowed lifetime `{}`", lifetime)); } } diff --git a/serde_derive/src/pretend.rs b/serde_derive/src/pretend.rs index cd51b4dd..c4ffe12d 100644 --- a/serde_derive/src/pretend.rs +++ b/serde_derive/src/pretend.rs @@ -1,5 +1,4 @@ -use proc_macro2::Span; -use quote::Tokens; +use proc_macro2::{Span, TokenStream}; use syn::Ident; use internals::ast::{Container, Data, Field, Style}; @@ -21,7 +20,7 @@ use internals::ast::{Container, Data, Field, Style}; // 8 | enum EnumDef { V } // | ^ // -pub fn pretend_used(cont: &Container) -> Tokens { +pub fn pretend_used(cont: &Container) -> TokenStream { let pretend_fields = pretend_fields_used(cont); let pretend_variants = pretend_variants_used(cont); @@ -49,8 +48,8 @@ pub fn pretend_used(cont: &Container) -> Tokens { // The `ref` is important in case the user has written a Drop impl on their // type. Rust does not allow destructuring a struct or enum that has a Drop // impl. -fn pretend_fields_used(cont: &Container) -> Tokens { - let type_ident = cont.ident; +fn pretend_fields_used(cont: &Container) -> TokenStream { + let type_ident = &cont.ident; let (_, ty_generics, _) = cont.generics.split_for_impl(); let patterns = match cont.data { @@ -58,7 +57,7 @@ fn pretend_fields_used(cont: &Container) -> Tokens { .iter() .filter_map(|variant| match variant.style { Style::Struct => { - let variant_ident = variant.ident; + let variant_ident = &variant.ident; let pat = struct_pattern(&variant.fields); Some(quote!(#type_ident::#variant_ident #pat)) } @@ -93,7 +92,7 @@ fn pretend_fields_used(cont: &Container) -> Tokens { // _ => {} // } // -fn pretend_variants_used(cont: &Container) -> Tokens { +fn pretend_variants_used(cont: &Container) -> TokenStream { let variants = match cont.data { Data::Enum(ref variants) => variants, Data::Struct(_, _) => { @@ -101,12 +100,12 @@ fn pretend_variants_used(cont: &Container) -> Tokens { } }; - let type_ident = cont.ident; + let type_ident = &cont.ident; let (_, ty_generics, _) = cont.generics.split_for_impl(); let turbofish = ty_generics.as_turbofish(); let cases = variants.iter().map(|variant| { - let variant_ident = variant.ident; + let variant_ident = &variant.ident; let placeholders = &(0..variant.fields.len()) .map(|i| Ident::new(&format!("__v{}", i), Span::call_site())) .collect::>(); @@ -133,7 +132,7 @@ fn pretend_variants_used(cont: &Container) -> Tokens { quote!(#(#cases)*) } -fn struct_pattern(fields: &[Field]) -> Tokens { +fn struct_pattern(fields: &[Field]) -> TokenStream { let members = fields.iter().map(|field| &field.member); let placeholders = (0..fields.len()).map(|i| Ident::new(&format!("__v{}", i), Span::call_site())); diff --git a/serde_derive/src/ser.rs b/serde_derive/src/ser.rs index 710cc69e..11e5b483 100644 --- a/serde_derive/src/ser.rs +++ b/serde_derive/src/ser.rs @@ -6,8 +6,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use proc_macro2::Span; -use quote::Tokens; +use proc_macro2::{Span, TokenStream}; use syn::spanned::Spanned; use syn::{self, Ident, Index, Member}; @@ -18,13 +17,13 @@ use internals::{attr, Ctxt}; use pretend; use try; -pub fn expand_derive_serialize(input: &syn::DeriveInput) -> Result { +pub fn expand_derive_serialize(input: &syn::DeriveInput) -> Result { let ctxt = Ctxt::new(); let cont = Container::from_ast(&ctxt, input); precondition(&ctxt, &cont); try!(ctxt.check()); - let ident = cont.ident; + let ident = &cont.ident; let params = Parameters::new(&cont); let (impl_generics, ty_generics, where_clause) = params.generics.split_for_impl(); let dummy_const = Ident::new(&format!("_IMPL_SERIALIZE_FOR_{}", ident), Span::call_site()); @@ -110,7 +109,7 @@ impl Parameters { let this = match cont.attrs.remote() { Some(remote) => remote.clone(), - None => cont.ident.into(), + None => cont.ident.clone().into(), }; let generics = build_generics(cont); @@ -125,8 +124,8 @@ impl Parameters { /// Type name to use in error messages and `&'static str` arguments to /// various Serializer methods. - fn type_name(&self) -> &str { - self.this.segments.last().unwrap().value().ident.as_ref() + fn type_name(&self) -> String { + self.this.segments.last().unwrap().value().ident.to_string() } } @@ -187,7 +186,7 @@ fn serialize_body(cont: &Container, params: &Parameters) -> Fragment { } fn serialize_into(params: &Parameters, type_into: &syn::Type) -> Fragment { - let self_var = params.self_var; + let self_var = ¶ms.self_var; quote_block! { _serde::Serialize::serialize( &_serde::export::Into::<#type_into>::into(_serde::export::Clone::clone(#self_var)), @@ -352,7 +351,7 @@ fn serialize_struct_as_map( fn serialize_enum(params: &Parameters, variants: &[Variant], cattrs: &attr::Container) -> Fragment { assert!(variants.len() as u64 <= u64::from(u32::max_value())); - let self_var = params.self_var; + let self_var = ¶ms.self_var; let arms: Vec<_> = variants .iter() @@ -374,9 +373,9 @@ fn serialize_variant( variant: &Variant, variant_index: u32, cattrs: &attr::Container, -) -> Tokens { +) -> TokenStream { let this = ¶ms.this; - let variant_ident = variant.ident; + let variant_ident = &variant.ident; if variant.attrs.skip_serializing() { let skipped_msg = format!( @@ -524,7 +523,7 @@ fn serialize_internally_tagged_variant( let variant_name = variant.attrs.name().serialize_name(); let enum_ident_str = params.type_name(); - let variant_ident_str = variant.ident.as_ref(); + let variant_ident_str = variant.ident.to_string(); if let Some(path) = variant.attrs.serialize_with() { let ser = wrap_serialize_variant_with(params, path, variant); @@ -983,7 +982,7 @@ fn serialize_tuple_struct_visitor( params: &Parameters, is_enum: bool, tuple_trait: &TupleTrait, -) -> Vec { +) -> Vec { fields .iter() .enumerate() @@ -1031,7 +1030,7 @@ fn serialize_struct_visitor( params: &Parameters, is_enum: bool, struct_trait: &StructTrait, -) -> Vec { +) -> Vec { fields .iter() .filter(|&field| !field.attrs.skip_serializing()) @@ -1095,8 +1094,8 @@ fn wrap_serialize_field_with( params: &Parameters, field_ty: &syn::Type, serialize_with: &syn::ExprPath, - field_expr: &Tokens, -) -> Tokens { + field_expr: &TokenStream, +) -> TokenStream { wrap_serialize_with(params, serialize_with, &[field_ty], &[quote!(#field_expr)]) } @@ -1104,14 +1103,14 @@ fn wrap_serialize_variant_with( params: &Parameters, serialize_with: &syn::ExprPath, variant: &Variant, -) -> Tokens { +) -> TokenStream { let field_tys: Vec<_> = variant.fields.iter().map(|field| field.ty).collect(); let field_exprs: Vec<_> = variant .fields .iter() .map(|field| { let id = match field.member { - Member::Named(ident) => ident, + Member::Named(ref ident) => ident.clone(), Member::Unnamed(ref member) => { Ident::new(&format!("__field{}", member.index), Span::call_site()) } @@ -1131,8 +1130,8 @@ fn wrap_serialize_with( params: &Parameters, serialize_with: &syn::ExprPath, field_tys: &[&syn::Type], - field_exprs: &[Tokens], -) -> Tokens { + field_exprs: &[TokenStream], +) -> TokenStream { let this = ¶ms.this; let (_, ty_generics, where_clause) = params.generics.split_for_impl(); @@ -1178,7 +1177,7 @@ fn wrap_serialize_with( // _serde::ser::SerializeStruct::end(__serde_state) // // where we want to omit the `mut` to avoid a warning. -fn mut_if(is_mut: bool) -> Option { +fn mut_if(is_mut: bool) -> Option { if is_mut { Some(quote!(mut)) } else { @@ -1186,8 +1185,8 @@ fn mut_if(is_mut: bool) -> Option { } } -fn get_member(params: &Parameters, field: &Field, member: &Member) -> Tokens { - let self_var = params.self_var; +fn get_member(params: &Parameters, field: &Field, member: &Member) -> TokenStream { + let self_var = ¶ms.self_var; match (params.is_remote, field.attrs.getter()) { (false, None) => quote!(&#self_var.#member), (true, None) => { @@ -1212,7 +1211,7 @@ enum StructTrait { } impl StructTrait { - fn serialize_field(&self, span: Span) -> Tokens { + fn serialize_field(&self, span: Span) -> TokenStream { match *self { StructTrait::SerializeMap => { quote_spanned!(span=> _serde::ser::SerializeMap::serialize_entry) @@ -1226,7 +1225,7 @@ impl StructTrait { } } - fn skip_field(&self, span: Span) -> Option { + fn skip_field(&self, span: Span) -> Option { match *self { StructTrait::SerializeMap => None, StructTrait::SerializeStruct => { @@ -1246,7 +1245,7 @@ enum TupleTrait { } impl TupleTrait { - fn serialize_element(&self, span: Span) -> Tokens { + fn serialize_element(&self, span: Span) -> TokenStream { match *self { TupleTrait::SerializeTuple => { quote_spanned!(span=> _serde::ser::SerializeTuple::serialize_element) diff --git a/serde_derive/src/try.rs b/serde_derive/src/try.rs index 74dd277f..48829b63 100644 --- a/serde_derive/src/try.rs +++ b/serde_derive/src/try.rs @@ -1,13 +1,12 @@ -use proc_macro2::{Op, Spacing}; -use quote::Tokens; +use proc_macro2::{Punct, Spacing, TokenStream}; // None of our generated code requires the `From::from` error conversion // performed by the standard library's `try!` macro. With this simplified macro // we see a significant improvement in type checking and borrow checking time of // the generated code and a slight improvement in binary size. -pub fn replacement() -> Tokens { +pub fn replacement() -> TokenStream { // Cannot pass `$expr` to `quote!` prior to Rust 1.17.0 so interpolate it. - let dollar = Op::new('$', Spacing::Alone); + let dollar = Punct::new('$', Spacing::Alone); quote! { #[allow(unused_macros)] diff --git a/serde_derive_internals/Cargo.toml b/serde_derive_internals/Cargo.toml index a782149a..0375e019 100644 --- a/serde_derive_internals/Cargo.toml +++ b/serde_derive_internals/Cargo.toml @@ -15,8 +15,8 @@ include = ["Cargo.toml", "lib.rs", "src/**/*.rs", "README.md", "LICENSE-APACHE", path = "lib.rs" [dependencies] -proc-macro2 = "0.3" -syn = { version = "0.13", default-features = false, features = ["derive", "parsing", "clone-impls"] } +proc-macro2 = "0.4" +syn = { version = "0.14", default-features = false, features = ["derive", "parsing", "clone-impls"] } [badges] travis-ci = { repository = "serde-rs/serde" }