Replace qualified uses of proc_macro2::TokenStream

This commit is contained in:
Jonas Platte 2021-02-04 13:47:00 +01:00 committed by Ryan Leckey
parent 1803020650
commit cf97315c74
7 changed files with 33 additions and 34 deletions

View File

@ -3,6 +3,7 @@ use super::attributes::{
check_weak_enum_attributes, parse_child_attributes, parse_container_attributes,
};
use super::rename_all;
use proc_macro2::TokenStream;
use quote::quote;
use syn::punctuated::Punctuated;
use syn::token::Comma;
@ -11,7 +12,7 @@ use syn::{
FieldsUnnamed, Stmt, Variant,
};
pub fn expand_derive_decode(input: &DeriveInput) -> syn::Result<proc_macro2::TokenStream> {
pub fn expand_derive_decode(input: &DeriveInput) -> syn::Result<TokenStream> {
let attrs = parse_container_attributes(&input.attrs)?;
match &input.data {
Data::Struct(DataStruct {
@ -49,7 +50,7 @@ pub fn expand_derive_decode(input: &DeriveInput) -> syn::Result<proc_macro2::Tok
fn expand_derive_decode_transparent(
input: &DeriveInput,
field: &Field,
) -> syn::Result<proc_macro2::TokenStream> {
) -> syn::Result<TokenStream> {
check_transparent_attributes(input, field)?;
let ident = &input.ident;
@ -85,7 +86,7 @@ fn expand_derive_decode_transparent(
fn expand_derive_decode_weak_enum(
input: &DeriveInput,
variants: &Punctuated<Variant, Comma>,
) -> syn::Result<proc_macro2::TokenStream> {
) -> syn::Result<TokenStream> {
let attr = check_weak_enum_attributes(input, &variants)?;
let repr = attr.repr.unwrap();
@ -118,7 +119,7 @@ fn expand_derive_decode_weak_enum(
fn expand_derive_decode_strong_enum(
input: &DeriveInput,
variants: &Punctuated<Variant, Comma>,
) -> syn::Result<proc_macro2::TokenStream> {
) -> syn::Result<TokenStream> {
let cattr = check_strong_enum_attributes(input, &variants)?;
let ident = &input.ident;
@ -148,7 +149,7 @@ fn expand_derive_decode_strong_enum(
}
};
let mut tts = proc_macro2::TokenStream::new();
let mut tts = TokenStream::new();
if cfg!(feature = "mysql") {
tts.extend(quote!(
@ -192,10 +193,10 @@ fn expand_derive_decode_strong_enum(
fn expand_derive_decode_struct(
input: &DeriveInput,
fields: &Punctuated<Field, Comma>,
) -> syn::Result<proc_macro2::TokenStream> {
) -> syn::Result<TokenStream> {
check_struct_attributes(input, fields)?;
let mut tts = proc_macro2::TokenStream::new();
let mut tts = TokenStream::new();
if cfg!(feature = "postgres") {
let ident = &input.ident;

View File

@ -3,7 +3,7 @@ use super::attributes::{
check_weak_enum_attributes, parse_child_attributes, parse_container_attributes,
};
use super::rename_all;
use proc_macro2::Span;
use proc_macro2::{Span, TokenStream};
use quote::quote;
use syn::punctuated::Punctuated;
use syn::token::Comma;
@ -12,7 +12,7 @@ use syn::{
FieldsUnnamed, Lifetime, LifetimeDef, Stmt, Variant,
};
pub fn expand_derive_encode(input: &DeriveInput) -> syn::Result<proc_macro2::TokenStream> {
pub fn expand_derive_encode(input: &DeriveInput) -> syn::Result<TokenStream> {
let args = parse_container_attributes(&input.attrs)?;
match &input.data {
@ -51,7 +51,7 @@ pub fn expand_derive_encode(input: &DeriveInput) -> syn::Result<proc_macro2::Tok
fn expand_derive_encode_transparent(
input: &DeriveInput,
field: &Field,
) -> syn::Result<proc_macro2::TokenStream> {
) -> syn::Result<TokenStream> {
check_transparent_attributes(input, field)?;
let ident = &input.ident;
@ -97,7 +97,7 @@ fn expand_derive_encode_transparent(
fn expand_derive_encode_weak_enum(
input: &DeriveInput,
variants: &Punctuated<Variant, Comma>,
) -> syn::Result<proc_macro2::TokenStream> {
) -> syn::Result<TokenStream> {
let attr = check_weak_enum_attributes(input, &variants)?;
let repr = attr.repr.unwrap();
let ident = &input.ident;
@ -129,7 +129,7 @@ fn expand_derive_encode_weak_enum(
fn expand_derive_encode_strong_enum(
input: &DeriveInput,
variants: &Punctuated<Variant, Comma>,
) -> syn::Result<proc_macro2::TokenStream> {
) -> syn::Result<TokenStream> {
let cattr = check_strong_enum_attributes(input, &variants)?;
let ident = &input.ident;
@ -176,10 +176,10 @@ fn expand_derive_encode_strong_enum(
fn expand_derive_encode_struct(
input: &DeriveInput,
fields: &Punctuated<Field, Comma>,
) -> syn::Result<proc_macro2::TokenStream> {
) -> syn::Result<TokenStream> {
check_struct_attributes(input, &fields)?;
let mut tts = proc_macro2::TokenStream::new();
let mut tts = TokenStream::new();
if cfg!(feature = "postgres") {
let ident = &input.ident;

View File

@ -11,19 +11,16 @@ pub(crate) use row::expand_derive_from_row;
use self::attributes::RenameAll;
use heck::{CamelCase, KebabCase, MixedCase, ShoutySnakeCase, SnakeCase};
use proc_macro2::TokenStream;
use std::iter::FromIterator;
use syn::DeriveInput;
pub(crate) fn expand_derive_type_encode_decode(
input: &DeriveInput,
) -> syn::Result<proc_macro2::TokenStream> {
pub(crate) fn expand_derive_type_encode_decode(input: &DeriveInput) -> syn::Result<TokenStream> {
let encode_tts = expand_derive_encode(input)?;
let decode_tts = expand_derive_decode(input)?;
let type_tts = expand_derive_type(input)?;
let combined = proc_macro2::TokenStream::from_iter(
encode_tts.into_iter().chain(decode_tts).chain(type_tts),
);
let combined = TokenStream::from_iter(encode_tts.into_iter().chain(decode_tts).chain(type_tts));
Ok(combined)
}

View File

@ -1,4 +1,4 @@
use proc_macro2::Span;
use proc_macro2::{Span, TokenStream};
use quote::quote;
use syn::{
parse_quote, punctuated::Punctuated, token::Comma, Data, DataStruct, DeriveInput, Field,
@ -10,7 +10,7 @@ use super::{
rename_all,
};
pub fn expand_derive_from_row(input: &DeriveInput) -> syn::Result<proc_macro2::TokenStream> {
pub fn expand_derive_from_row(input: &DeriveInput) -> syn::Result<TokenStream> {
match &input.data {
Data::Struct(DataStruct {
fields: Fields::Named(FieldsNamed { named, .. }),
@ -39,7 +39,7 @@ pub fn expand_derive_from_row(input: &DeriveInput) -> syn::Result<proc_macro2::T
fn expand_derive_from_row_struct(
input: &DeriveInput,
fields: &Punctuated<Field, Comma>,
) -> syn::Result<proc_macro2::TokenStream> {
) -> syn::Result<TokenStream> {
let ident = &input.ident;
let generics = &input.generics;
@ -122,7 +122,7 @@ fn expand_derive_from_row_struct(
fn expand_derive_from_row_struct_unnamed(
input: &DeriveInput,
fields: &Punctuated<Field, Comma>,
) -> syn::Result<proc_macro2::TokenStream> {
) -> syn::Result<TokenStream> {
let ident = &input.ident;
let generics = &input.generics;

View File

@ -2,6 +2,7 @@ use super::attributes::{
check_strong_enum_attributes, check_struct_attributes, check_transparent_attributes,
check_weak_enum_attributes, parse_container_attributes,
};
use proc_macro2::TokenStream;
use quote::quote;
use syn::punctuated::Punctuated;
use syn::token::Comma;
@ -10,7 +11,7 @@ use syn::{
FieldsUnnamed, Variant,
};
pub fn expand_derive_type(input: &DeriveInput) -> syn::Result<proc_macro2::TokenStream> {
pub fn expand_derive_type(input: &DeriveInput) -> syn::Result<TokenStream> {
let attrs = parse_container_attributes(&input.attrs)?;
match &input.data {
Data::Struct(DataStruct {
@ -48,7 +49,7 @@ pub fn expand_derive_type(input: &DeriveInput) -> syn::Result<proc_macro2::Token
fn expand_derive_has_sql_type_transparent(
input: &DeriveInput,
field: &Field,
) -> syn::Result<proc_macro2::TokenStream> {
) -> syn::Result<TokenStream> {
let attr = check_transparent_attributes(input, field)?;
let ident = &input.ident;
@ -82,7 +83,7 @@ fn expand_derive_has_sql_type_transparent(
));
}
let mut tts = proc_macro2::TokenStream::new();
let mut tts = TokenStream::new();
if cfg!(feature = "postgres") {
let ty_name = attr
@ -105,7 +106,7 @@ fn expand_derive_has_sql_type_transparent(
fn expand_derive_has_sql_type_weak_enum(
input: &DeriveInput,
variants: &Punctuated<Variant, Comma>,
) -> syn::Result<proc_macro2::TokenStream> {
) -> syn::Result<TokenStream> {
let attr = check_weak_enum_attributes(input, variants)?;
let repr = attr.repr.unwrap();
let ident = &input.ident;
@ -126,11 +127,11 @@ fn expand_derive_has_sql_type_weak_enum(
fn expand_derive_has_sql_type_strong_enum(
input: &DeriveInput,
variants: &Punctuated<Variant, Comma>,
) -> syn::Result<proc_macro2::TokenStream> {
) -> syn::Result<TokenStream> {
let attributes = check_strong_enum_attributes(input, variants)?;
let ident = &input.ident;
let mut tts = proc_macro2::TokenStream::new();
let mut tts = TokenStream::new();
if cfg!(feature = "mysql") {
tts.extend(quote!(
@ -181,11 +182,11 @@ fn expand_derive_has_sql_type_strong_enum(
fn expand_derive_has_sql_type_struct(
input: &DeriveInput,
fields: &Punctuated<Field, Comma>,
) -> syn::Result<proc_macro2::TokenStream> {
) -> syn::Result<TokenStream> {
let attributes = check_struct_attributes(input, fields)?;
let ident = &input.ident;
let mut tts = proc_macro2::TokenStream::new();
let mut tts = TokenStream::new();
if cfg!(feature = "postgres") {
let ty_name = attributes

View File

@ -55,7 +55,7 @@ impl ToTokens for QuotedMigration {
}
// mostly copied from sqlx-core/src/migrate/source.rs
pub(crate) fn expand_migrator_from_dir(dir: LitStr) -> crate::Result<proc_macro2::TokenStream> {
pub(crate) fn expand_migrator_from_dir(dir: LitStr) -> crate::Result<TokenStream> {
let path = crate::common::resolve_path(&dir.value(), dir.span())?;
let mut migrations = Vec::new();

View File

@ -67,7 +67,7 @@ pub fn quote_args<DB: DatabaseExt>(
format!("unsupported type {} for param #{}", param_ty, i + 1)
}
})?
.parse::<proc_macro2::TokenStream>()
.parse::<TokenStream>()
.map_err(|_| format!("Rust type mapping for {} not parsable", param_ty))?
}