1
0
Fork 0
mirror of https://gitlab.com/msrd0/gotham-restful.git synced 2025-02-23 04:52:28 +00:00

simplify derive/macro code

This commit is contained in:
Dominic 2020-05-04 19:08:22 +02:00
parent 7ef964b0a0
commit 110ef2be7a
Signed by: msrd0
GPG key ID: DCC8C247452E98F9
8 changed files with 108 additions and 145 deletions

View file

@ -1,5 +1,4 @@
use proc_macro::TokenStream; use proc_macro2::TokenStream;
use proc_macro2::TokenStream as TokenStream2;
use quote::{format_ident, quote}; use quote::{format_ident, quote};
use std::cmp::min; use std::cmp::min;
use syn::{ use syn::{
@ -10,16 +9,10 @@ use syn::{
Field, Field,
Fields, Fields,
Ident, Ident,
Result,
Type Type
}; };
pub fn expand_from_body(tokens : TokenStream) -> TokenStream
{
expand(tokens)
.unwrap_or_else(|err| err.to_compile_error())
.into()
}
struct ParsedFields struct ParsedFields
{ {
fields : Vec<(Ident, Type)>, fields : Vec<(Ident, Type)>,
@ -28,7 +21,7 @@ struct ParsedFields
impl ParsedFields impl ParsedFields
{ {
fn from_named<I>(fields : I) -> Result<Self, Error> fn from_named<I>(fields : I) -> Result<Self>
where where
I : Iterator<Item = Field> I : Iterator<Item = Field>
{ {
@ -36,7 +29,7 @@ impl ParsedFields
Ok(Self { fields, named: true }) Ok(Self { fields, named: true })
} }
fn from_unnamed<I>(fields : I) -> Result<Self, Error> fn from_unnamed<I>(fields : I) -> Result<Self>
where where
I : Iterator<Item = Field> I : Iterator<Item = Field>
{ {
@ -44,16 +37,15 @@ impl ParsedFields
Ok(Self { fields, named: false }) Ok(Self { fields, named: false })
} }
fn from_unit() -> Result<Self, Error> fn from_unit() -> Result<Self>
{ {
Ok(Self { fields: Vec::new(), named: false }) Ok(Self { fields: Vec::new(), named: false })
} }
} }
fn expand(tokens : TokenStream) -> Result<TokenStream2, Error> pub fn expand_from_body(input : DeriveInput) -> Result<TokenStream>
{ {
let krate = super::krate(); let krate = super::krate();
let input : DeriveInput = syn::parse(tokens)?;
let ident = input.ident; let ident = input.ident;
let generics = input.generics; let generics = input.generics;

View file

@ -1,6 +1,7 @@
use proc_macro::TokenStream; use proc_macro::TokenStream;
use proc_macro2::TokenStream as TokenStream2; use proc_macro2::TokenStream as TokenStream2;
use quote::quote; use quote::quote;
use syn::{parse_macro_input, parse_macro_input::ParseMacroInput, DeriveInput, Result};
mod util; mod util;
@ -16,95 +17,118 @@ mod resource_error;
use resource_error::expand_resource_error; use resource_error::expand_resource_error;
#[cfg(feature = "openapi")] #[cfg(feature = "openapi")]
mod openapi_type; mod openapi_type;
#[cfg(feature = "openapi")]
use openapi_type::expand_openapi_type;
#[inline] #[inline]
fn print_tokens(tokens : TokenStream) -> TokenStream fn print_tokens(tokens : TokenStream2) -> TokenStream
{ {
//eprintln!("{}", tokens); //eprintln!("{}", tokens);
tokens tokens.into()
} }
#[inline]
fn expand_derive<F>(input : TokenStream, expand : F) -> TokenStream
where
F : FnOnce(DeriveInput) -> Result<TokenStream2>
{
print_tokens(expand(parse_macro_input!(input))
.unwrap_or_else(|err| err.to_compile_error()))
}
#[inline]
fn expand_macro<F, A, I>(attrs : TokenStream, item : TokenStream, expand : F) -> TokenStream
where
F : FnOnce(A, I) -> Result<TokenStream2>,
A : ParseMacroInput,
I : ParseMacroInput
{
print_tokens(expand(parse_macro_input!(attrs), parse_macro_input!(item))
.unwrap_or_else(|err| err.to_compile_error()))
}
#[inline]
fn krate() -> TokenStream2 fn krate() -> TokenStream2
{ {
quote!(::gotham_restful) quote!(::gotham_restful)
} }
#[proc_macro_derive(FromBody)] #[proc_macro_derive(FromBody)]
pub fn derive_from_body(tokens : TokenStream) -> TokenStream pub fn derive_from_body(input : TokenStream) -> TokenStream
{ {
print_tokens(expand_from_body(tokens)) expand_derive(input, expand_from_body)
} }
#[cfg(feature = "openapi")] #[cfg(feature = "openapi")]
#[proc_macro_derive(OpenapiType, attributes(openapi))] #[proc_macro_derive(OpenapiType, attributes(openapi))]
pub fn derive_openapi_type(tokens : TokenStream) -> TokenStream pub fn derive_openapi_type(input : TokenStream) -> TokenStream
{ {
print_tokens(openapi_type::expand(tokens)) expand_derive(input, expand_openapi_type)
} }
#[proc_macro_derive(RequestBody, attributes(supported_types))] #[proc_macro_derive(RequestBody, attributes(supported_types))]
pub fn derive_request_body(tokens : TokenStream) -> TokenStream pub fn derive_request_body(input : TokenStream) -> TokenStream
{ {
print_tokens(expand_request_body(tokens)) expand_derive(input, expand_request_body)
} }
#[proc_macro_derive(Resource, attributes(rest_resource))] #[proc_macro_derive(Resource, attributes(rest_resource))]
pub fn derive_resource(tokens : TokenStream) -> TokenStream pub fn derive_resource(input : TokenStream) -> TokenStream
{ {
print_tokens(expand_resource(tokens)) expand_derive(input, expand_resource)
} }
#[proc_macro_derive(ResourceError, attributes(display, from, status))] #[proc_macro_derive(ResourceError, attributes(display, from, status))]
pub fn derive_resource_error(tokens : TokenStream) -> TokenStream pub fn derive_resource_error(input : TokenStream) -> TokenStream
{ {
print_tokens(expand_resource_error(tokens)) expand_derive(input, expand_resource_error)
} }
#[proc_macro_attribute] #[proc_macro_attribute]
pub fn rest_read_all(attr : TokenStream, item : TokenStream) -> TokenStream pub fn rest_read_all(attr : TokenStream, item : TokenStream) -> TokenStream
{ {
print_tokens(expand_method(Method::ReadAll, attr, item)) expand_macro(attr, item, |attr, item| expand_method(Method::ReadAll, attr, item))
} }
#[proc_macro_attribute] #[proc_macro_attribute]
pub fn rest_read(attr : TokenStream, item : TokenStream) -> TokenStream pub fn rest_read(attr : TokenStream, item : TokenStream) -> TokenStream
{ {
print_tokens(expand_method(Method::Read, attr, item)) expand_macro(attr, item, |attr, item| expand_method(Method::Read, attr, item))
} }
#[proc_macro_attribute] #[proc_macro_attribute]
pub fn rest_search(attr : TokenStream, item : TokenStream) -> TokenStream pub fn rest_search(attr : TokenStream, item : TokenStream) -> TokenStream
{ {
print_tokens(expand_method(Method::Search, attr, item)) expand_macro(attr, item, |attr, item| expand_method(Method::Search, attr, item))
} }
#[proc_macro_attribute] #[proc_macro_attribute]
pub fn rest_create(attr : TokenStream, item : TokenStream) -> TokenStream pub fn rest_create(attr : TokenStream, item : TokenStream) -> TokenStream
{ {
print_tokens(expand_method(Method::Create, attr, item)) expand_macro(attr, item, |attr, item| expand_method(Method::Create, attr, item))
} }
#[proc_macro_attribute] #[proc_macro_attribute]
pub fn rest_update_all(attr : TokenStream, item : TokenStream) -> TokenStream pub fn rest_update_all(attr : TokenStream, item : TokenStream) -> TokenStream
{ {
print_tokens(expand_method(Method::UpdateAll, attr, item)) expand_macro(attr, item, |attr, item| expand_method(Method::UpdateAll, attr, item))
} }
#[proc_macro_attribute] #[proc_macro_attribute]
pub fn rest_update(attr : TokenStream, item : TokenStream) -> TokenStream pub fn rest_update(attr : TokenStream, item : TokenStream) -> TokenStream
{ {
print_tokens(expand_method(Method::Update, attr, item)) expand_macro(attr, item, |attr, item| expand_method(Method::Update, attr, item))
} }
#[proc_macro_attribute] #[proc_macro_attribute]
pub fn rest_delete_all(attr : TokenStream, item : TokenStream) -> TokenStream pub fn rest_delete_all(attr : TokenStream, item : TokenStream) -> TokenStream
{ {
print_tokens(expand_method(Method::DeleteAll, attr, item)) expand_macro(attr, item, |attr, item| expand_method(Method::DeleteAll, attr, item))
} }
#[proc_macro_attribute] #[proc_macro_attribute]
pub fn rest_delete(attr : TokenStream, item : TokenStream) -> TokenStream pub fn rest_delete(attr : TokenStream, item : TokenStream) -> TokenStream
{ {
print_tokens(expand_method(Method::Delete, attr, item)) expand_macro(attr, item, |attr, item| expand_method(Method::Delete, attr, item))
} }

View file

@ -1,10 +1,8 @@
use crate::util::CollectToResult; use crate::util::CollectToResult;
use heck::{CamelCase, SnakeCase}; use heck::{CamelCase, SnakeCase};
use proc_macro::TokenStream; use proc_macro2::{Ident, Span, TokenStream};
use proc_macro2::{Ident, Span, TokenStream as TokenStream2};
use quote::{format_ident, quote}; use quote::{format_ident, quote};
use syn::{ use syn::{
parse_macro_input,
spanned::Spanned, spanned::Spanned,
Attribute, Attribute,
AttributeArgs, AttributeArgs,
@ -16,6 +14,7 @@ use syn::{
Meta, Meta,
NestedMeta, NestedMeta,
PatType, PatType,
Result,
ReturnType, ReturnType,
Type Type
}; };
@ -35,8 +34,9 @@ pub enum Method
impl FromStr for Method impl FromStr for Method
{ {
type Err = String; type Err = Error;
fn from_str(str : &str) -> Result<Self, Self::Err>
fn from_str(str : &str) -> Result<Self>
{ {
match str { match str {
"ReadAll" | "read_all" => Ok(Self::ReadAll), "ReadAll" | "read_all" => Ok(Self::ReadAll),
@ -47,7 +47,7 @@ impl FromStr for Method
"Update" | "update" => Ok(Self::Update), "Update" | "update" => Ok(Self::Update),
"DeleteAll" | "delete_all" => Ok(Self::DeleteAll), "DeleteAll" | "delete_all" => Ok(Self::DeleteAll),
"Delete" | "delete" => Ok(Self::Delete), "Delete" | "delete" => Ok(Self::Delete),
_ => Err(format!("Unknown method: `{}'", str)) _ => Err(Error::new(Span::call_site(), format!("Unknown method: `{}'", str)))
} }
} }
} }
@ -148,7 +148,7 @@ impl MethodArgumentType
matches!(self, Self::AuthStatus(_) | Self::AuthStatusRef(_)) matches!(self, Self::AuthStatus(_) | Self::AuthStatusRef(_))
} }
fn quote_ty(&self) -> Option<TokenStream2> fn quote_ty(&self) -> Option<TokenStream>
{ {
match self { match self {
Self::MethodArg(ty) | Self::DatabaseConnection(ty) | Self::AuthStatus(ty) | Self::AuthStatusRef(ty) => Some(quote!(#ty)), Self::MethodArg(ty) | Self::DatabaseConnection(ty) | Self::AuthStatus(ty) | Self::AuthStatusRef(ty) => Some(quote!(#ty)),
@ -172,7 +172,7 @@ impl Spanned for MethodArgument
} }
} }
fn interpret_arg_ty(attrs : &[Attribute], name : &str, ty : Type) -> Result<MethodArgumentType, Error> fn interpret_arg_ty(attrs : &[Attribute], name : &str, ty : Type) -> Result<MethodArgumentType>
{ {
let attr = attrs.iter() let attr = attrs.iter()
.find(|arg| arg.path.segments.iter().any(|path| &path.ident.to_string() == "rest_arg")) .find(|arg| arg.path.segments.iter().any(|path| &path.ident.to_string() == "rest_arg"))
@ -206,7 +206,7 @@ fn interpret_arg_ty(attrs : &[Attribute], name : &str, ty : Type) -> Result<Meth
Ok(MethodArgumentType::MethodArg(ty)) Ok(MethodArgumentType::MethodArg(ty))
} }
fn interpret_arg(index : usize, arg : &PatType) -> Result<MethodArgument, Error> fn interpret_arg(index : usize, arg : &PatType) -> Result<MethodArgument>
{ {
let pat = &arg.pat; let pat = &arg.pat;
let ident = format_ident!("arg{}", index); let ident = format_ident!("arg{}", index);
@ -217,7 +217,7 @@ fn interpret_arg(index : usize, arg : &PatType) -> Result<MethodArgument, Error>
} }
#[cfg(feature = "openapi")] #[cfg(feature = "openapi")]
fn expand_operation_id(attrs : &[NestedMeta]) -> TokenStream2 fn expand_operation_id(attrs : &[NestedMeta]) -> TokenStream
{ {
let mut operation_id : Option<&Lit> = None; let mut operation_id : Option<&Lit> = None;
for meta in attrs for meta in attrs
@ -243,12 +243,12 @@ fn expand_operation_id(attrs : &[NestedMeta]) -> TokenStream2
} }
#[cfg(not(feature = "openapi"))] #[cfg(not(feature = "openapi"))]
fn expand_operation_id(_ : &[NestedMeta]) -> TokenStream2 fn expand_operation_id(_ : &[NestedMeta]) -> TokenStream
{ {
quote!() quote!()
} }
fn expand_wants_auth(attrs : &[NestedMeta], default : bool) -> TokenStream2 fn expand_wants_auth(attrs : &[NestedMeta], default : bool) -> TokenStream
{ {
let default_lit = Lit::Bool(LitBool { value: default, span: Span::call_site() }); let default_lit = Lit::Bool(LitBool { value: default, span: Span::call_site() });
let mut wants_auth = &default_lit; let mut wants_auth = &default_lit;
@ -272,21 +272,18 @@ fn expand_wants_auth(attrs : &[NestedMeta], default : bool) -> TokenStream2
} }
#[allow(clippy::comparison_chain)] #[allow(clippy::comparison_chain)]
fn expand(method : Method, attrs : TokenStream, item : TokenStream) -> Result<TokenStream2, Error> pub fn expand_method(method : Method, mut attrs : AttributeArgs, fun : ItemFn) -> Result<TokenStream>
{ {
let krate = super::krate(); let krate = super::krate();
// parse attributes // parse attributes
// TODO this is not public api but syn currently doesn't offer another convenient way to parse AttributeArgs let resource_path = match attrs.remove(0) {
let mut method_attrs : AttributeArgs = parse_macro_input::parse(attrs)?;
let resource_path = match method_attrs.remove(0) {
NestedMeta::Meta(Meta::Path(path)) => path, NestedMeta::Meta(Meta::Path(path)) => path,
p => return Err(Error::new(p.span(), "Expected name of the Resource struct this method belongs to")) p => return Err(Error::new(p.span(), "Expected name of the Resource struct this method belongs to"))
}; };
let resource_name = resource_path.segments.last().map(|s| s.ident.to_string()) let resource_name = resource_path.segments.last().map(|s| s.ident.to_string())
.ok_or_else(|| Error::new(resource_path.span(), "Resource name must not be empty"))?; .ok_or_else(|| Error::new(resource_path.span(), "Resource name must not be empty"))?;
let fun : ItemFn = syn::parse(item)?;
let fun_ident = &fun.sig.ident; let fun_ident = &fun.sig.ident;
let fun_vis = &fun.vis; let fun_vis = &fun.vis;
let fun_is_async = fun.sig.asyncness.is_some(); let fun_is_async = fun.sig.asyncness.is_some();
@ -337,7 +334,7 @@ fn expand(method : Method, attrs : TokenStream, item : TokenStream) -> Result<To
{ {
return Err(Error::new(fun_ident.span(), "Too few arguments")); return Err(Error::new(fun_ident.span(), "Too few arguments"));
} }
let generics : Vec<TokenStream2> = generics_args.iter() let generics : Vec<TokenStream> = generics_args.iter()
.map(|arg| arg.ty.quote_ty().unwrap()) .map(|arg| arg.ty.quote_ty().unwrap())
.zip(ty_names) .zip(ty_names)
.map(|(arg, name)| { .map(|(arg, name)| {
@ -347,7 +344,7 @@ fn expand(method : Method, attrs : TokenStream, item : TokenStream) -> Result<To
.collect(); .collect();
// extract the definition of our method // extract the definition of our method
let mut args_def : Vec<TokenStream2> = args.iter() let mut args_def : Vec<TokenStream> = args.iter()
.filter(|arg| (*arg).ty.is_method_arg()) .filter(|arg| (*arg).ty.is_method_arg())
.map(|arg| { .map(|arg| {
let ident = &arg.ident; let ident = &arg.ident;
@ -357,7 +354,7 @@ fn expand(method : Method, attrs : TokenStream, item : TokenStream) -> Result<To
args_def.insert(0, quote!(mut #state_ident : #krate::State)); args_def.insert(0, quote!(mut #state_ident : #krate::State));
// extract the arguments to pass over to the supplied method // extract the arguments to pass over to the supplied method
let args_pass : Vec<TokenStream2> = args.iter().map(|arg| match (&arg.ty, &arg.ident) { let args_pass : Vec<TokenStream> = args.iter().map(|arg| match (&arg.ty, &arg.ident) {
(MethodArgumentType::StateRef, _) => quote!(&#state_ident), (MethodArgumentType::StateRef, _) => quote!(&#state_ident),
(MethodArgumentType::StateMutRef, _) => quote!(&mut #state_ident), (MethodArgumentType::StateMutRef, _) => quote!(&mut #state_ident),
(MethodArgumentType::MethodArg(_), ident) => quote!(#ident), (MethodArgumentType::MethodArg(_), ident) => quote!(#ident),
@ -417,8 +414,8 @@ fn expand(method : Method, attrs : TokenStream, item : TokenStream) -> Result<To
} }
// attribute generated code // attribute generated code
let operation_id = expand_operation_id(&method_attrs); let operation_id = expand_operation_id(&attrs);
let wants_auth = expand_wants_auth(&method_attrs, args.iter().any(|arg| (*arg).ty.is_auth_status())); let wants_auth = expand_wants_auth(&attrs, args.iter().any(|arg| (*arg).ty.is_auth_status()));
// put everything together // put everything together
Ok(quote! { Ok(quote! {
@ -466,10 +463,3 @@ fn expand(method : Method, attrs : TokenStream, item : TokenStream) -> Result<To
} }
}) })
} }
pub fn expand_method(method : Method, attrs : TokenStream, item : TokenStream) -> TokenStream
{
expand(method, attrs, item)
.unwrap_or_else(|err| err.to_compile_error())
.into()
}

View file

@ -1,6 +1,5 @@
use crate::util::{CollectToResult, remove_parens}; use crate::util::{CollectToResult, remove_parens};
use proc_macro::TokenStream; use proc_macro2::{Ident, TokenStream};
use proc_macro2::TokenStream as TokenStream2;
use quote::quote; use quote::quote;
use syn::{ use syn::{
parse_macro_input, parse_macro_input,
@ -16,29 +15,23 @@ use syn::{
Fields, Fields,
Generics, Generics,
GenericParam, GenericParam,
Ident,
Lit, Lit,
Meta, Meta,
NestedMeta, NestedMeta,
Result,
Variant Variant
}; };
pub fn expand(tokens : TokenStream) -> TokenStream pub fn expand_openapi_type(input : DeriveInput) -> Result<TokenStream>
{ {
let input = parse_macro_input!(tokens as DeriveInput); match (input.ident, input.generics, input.attrs, input.data) {
let output = match (input.ident, input.generics, input.attrs, input.data) {
(ident, generics, attrs, Data::Enum(inum)) => expand_enum(ident, generics, attrs, inum), (ident, generics, attrs, Data::Enum(inum)) => expand_enum(ident, generics, attrs, inum),
(ident, generics, attrs, Data::Struct(strukt)) => expand_struct(ident, generics, attrs, strukt), (ident, generics, attrs, Data::Struct(strukt)) => expand_struct(ident, generics, attrs, strukt),
(_, _, _, Data::Union(uni)) => Err(Error::new(uni.union_token.span(), "#[derive(OpenapiType)] only works for structs and enums")) (_, _, _, Data::Union(uni)) => Err(Error::new(uni.union_token.span(), "#[derive(OpenapiType)] only works for structs and enums"))
}; }
output
.unwrap_or_else(|err| err.to_compile_error())
.into()
} }
fn expand_where(generics : &Generics) -> TokenStream2 fn expand_where(generics : &Generics) -> TokenStream
{ {
if generics.params.is_empty() if generics.params.is_empty()
{ {
@ -66,7 +59,7 @@ struct Attrs
rename : Option<String> rename : Option<String>
} }
fn to_string(lit : &Lit) -> Result<String, Error> fn to_string(lit : &Lit) -> Result<String>
{ {
match lit { match lit {
Lit::Str(str) => Ok(str.value()), Lit::Str(str) => Ok(str.value()),
@ -74,7 +67,7 @@ fn to_string(lit : &Lit) -> Result<String, Error>
} }
} }
fn to_bool(lit : &Lit) -> Result<bool, Error> fn to_bool(lit : &Lit) -> Result<bool>
{ {
match lit { match lit {
Lit::Bool(bool) => Ok(bool.value), Lit::Bool(bool) => Ok(bool.value),
@ -82,7 +75,7 @@ fn to_bool(lit : &Lit) -> Result<bool, Error>
} }
} }
fn parse_attributes(input : &[Attribute]) -> Result<Attrs, Error> fn parse_attributes(input : &[Attribute]) -> Result<Attrs>
{ {
let mut parsed = Attrs::default(); let mut parsed = Attrs::default();
for attr in input for attr in input
@ -111,7 +104,7 @@ fn parse_attributes(input : &[Attribute]) -> Result<Attrs, Error>
Ok(parsed) Ok(parsed)
} }
fn expand_variant(variant : &Variant) -> Result<TokenStream2, Error> fn expand_variant(variant : &Variant) -> Result<TokenStream>
{ {
if variant.fields != Fields::Unit if variant.fields != Fields::Unit
{ {
@ -131,7 +124,7 @@ fn expand_variant(variant : &Variant) -> Result<TokenStream2, Error>
}) })
} }
fn expand_enum(ident : Ident, generics : Generics, attrs : Vec<Attribute>, input : DataEnum) -> Result<TokenStream2, Error> fn expand_enum(ident : Ident, generics : Generics, attrs : Vec<Attribute>, input : DataEnum) -> Result<TokenStream>
{ {
let krate = super::krate(); let krate = super::krate();
let where_clause = expand_where(&generics); let where_clause = expand_where(&generics);
@ -176,7 +169,7 @@ fn expand_enum(ident : Ident, generics : Generics, attrs : Vec<Attribute>, input
}) })
} }
fn expand_field(field : &Field) -> Result<TokenStream2, Error> fn expand_field(field : &Field) -> Result<TokenStream>
{ {
let ident = match &field.ident { let ident = match &field.ident {
Some(ident) => ident, Some(ident) => ident,
@ -231,7 +224,7 @@ fn expand_field(field : &Field) -> Result<TokenStream2, Error>
}}) }})
} }
pub fn expand_struct(ident : Ident, generics : Generics, attrs : Vec<Attribute>, input : DataStruct) -> Result<TokenStream2, Error> fn expand_struct(ident : Ident, generics : Generics, attrs : Vec<Attribute>, input : DataStruct) -> Result<TokenStream>
{ {
let krate = super::krate(); let krate = super::krate();
let where_clause = expand_where(&generics); let where_clause = expand_where(&generics);
@ -243,7 +236,7 @@ pub fn expand_struct(ident : Ident, generics : Generics, attrs : Vec<Attribute>,
None => ident.to_string() None => ident.to_string()
}; };
let fields : Vec<TokenStream2> = match input.fields { let fields : Vec<TokenStream> = match input.fields {
Fields::Named(named_fields) => { Fields::Named(named_fields) => {
named_fields.named.iter() named_fields.named.iter()
.map(expand_field) .map(expand_field)

View file

@ -1,17 +1,15 @@
use crate::util::CollectToResult; use crate::util::CollectToResult;
use proc_macro::TokenStream; use proc_macro2::{Ident, TokenStream};
use proc_macro2::TokenStream as TokenStream2;
use quote::quote; use quote::quote;
use std::iter; use std::iter;
use syn::{ use syn::{
parenthesized, parenthesized,
parse::{Parse, ParseStream, Result as SynResult}, parse::{Parse, ParseStream},
punctuated::Punctuated, punctuated::Punctuated,
DeriveInput, DeriveInput,
Error,
Generics, Generics,
Ident,
Path, Path,
Result,
Token Token
}; };
@ -19,7 +17,7 @@ struct MimeList(Punctuated<Path, Token![,]>);
impl Parse for MimeList impl Parse for MimeList
{ {
fn parse(input: ParseStream) -> SynResult<Self> fn parse(input: ParseStream) -> Result<Self>
{ {
let content; let content;
let _paren = parenthesized!(content in input); let _paren = parenthesized!(content in input);
@ -29,13 +27,13 @@ impl Parse for MimeList
} }
#[cfg(not(feature = "openapi"))] #[cfg(not(feature = "openapi"))]
fn impl_openapi_type(_ident : &Ident, _generics : &Generics) -> TokenStream2 fn impl_openapi_type(_ident : &Ident, _generics : &Generics) -> TokenStream
{ {
quote!() quote!()
} }
#[cfg(feature = "openapi")] #[cfg(feature = "openapi")]
fn impl_openapi_type(ident : &Ident, generics : &Generics) -> TokenStream2 fn impl_openapi_type(ident : &Ident, generics : &Generics) -> TokenStream
{ {
let krate = super::krate(); let krate = super::krate();
@ -55,10 +53,9 @@ fn impl_openapi_type(ident : &Ident, generics : &Generics) -> TokenStream2
} }
} }
fn expand(tokens : TokenStream) -> Result<TokenStream2, Error> pub fn expand_request_body(input : DeriveInput) -> Result<TokenStream>
{ {
let krate = super::krate(); let krate = super::krate();
let input : DeriveInput = syn::parse(tokens)?;
let ident = input.ident; let ident = input.ident;
let generics = input.generics; let generics = input.generics;
@ -66,7 +63,7 @@ fn expand(tokens : TokenStream) -> Result<TokenStream2, Error>
.filter(|attr| attr.path.segments.iter().last().map(|segment| segment.ident.to_string()) == Some("supported_types".to_string())) .filter(|attr| attr.path.segments.iter().last().map(|segment| segment.ident.to_string()) == Some("supported_types".to_string()))
.flat_map(|attr| .flat_map(|attr|
syn::parse2::<MimeList>(attr.tokens) syn::parse2::<MimeList>(attr.tokens)
.map(|list| Box::new(list.0.into_iter().map(Ok)) as Box<dyn Iterator<Item = Result<Path, Error>>>) .map(|list| Box::new(list.0.into_iter().map(Ok)) as Box<dyn Iterator<Item = Result<Path>>>)
.unwrap_or_else(|err| Box::new(iter::once(Err(err))))) .unwrap_or_else(|err| Box::new(iter::once(Err(err)))))
.collect_to_result()?; .collect_to_result()?;
@ -90,10 +87,3 @@ fn expand(tokens : TokenStream) -> Result<TokenStream2, Error>
#impl_openapi_type #impl_openapi_type
}) })
} }
pub fn expand_request_body(tokens : TokenStream) -> TokenStream
{
expand(tokens)
.unwrap_or_else(|err| err.to_compile_error())
.into()
}

View file

@ -1,9 +1,5 @@
use crate::{ use crate::{method::Method, util::CollectToResult};
method::Method, use proc_macro2::{Ident, TokenStream};
util::CollectToResult
};
use proc_macro::TokenStream;
use proc_macro2::TokenStream as TokenStream2;
use quote::quote; use quote::quote;
use syn::{ use syn::{
parenthesized, parenthesized,
@ -11,7 +7,7 @@ use syn::{
punctuated::Punctuated, punctuated::Punctuated,
DeriveInput, DeriveInput,
Error, Error,
Ident, Result,
Token Token
}; };
use std::{iter, str::FromStr}; use std::{iter, str::FromStr};
@ -20,7 +16,7 @@ struct MethodList(Punctuated<Ident, Token![,]>);
impl Parse for MethodList impl Parse for MethodList
{ {
fn parse(input: ParseStream) -> Result<Self, Error> fn parse(input: ParseStream) -> Result<Self>
{ {
let content; let content;
let _paren = parenthesized!(content in input); let _paren = parenthesized!(content in input);
@ -29,10 +25,9 @@ impl Parse for MethodList
} }
} }
fn expand(tokens : TokenStream) -> Result<TokenStream2, Error> pub fn expand_resource(input : DeriveInput) -> Result<TokenStream>
{ {
let krate = super::krate(); let krate = super::krate();
let input : DeriveInput = syn::parse(tokens)?;
let ident = input.ident; let ident = input.ident;
let name = ident.to_string(); let name = ident.to_string();
@ -46,7 +41,7 @@ fn expand(tokens : TokenStream) -> Result<TokenStream2, Error>
let mod_ident = method.mod_ident(&name); let mod_ident = method.mod_ident(&name);
let ident = method.setup_ident(&name); let ident = method.setup_ident(&name);
Ok(quote!(#mod_ident::#ident(&mut route);)) Ok(quote!(#mod_ident::#ident(&mut route);))
})) as Box<dyn Iterator<Item = Result<TokenStream2, Error>>>, })) as Box<dyn Iterator<Item = Result<TokenStream>>>,
Err(err) => Box::new(iter::once(Err(err))) Err(err) => Box::new(iter::once(Err(err)))
}).collect_to_result()?; }).collect_to_result()?;
@ -65,10 +60,3 @@ fn expand(tokens : TokenStream) -> Result<TokenStream2, Error>
} }
}) })
} }
pub fn expand_resource(tokens : TokenStream) -> TokenStream
{
expand(tokens)
.unwrap_or_else(|err| err.to_compile_error())
.into()
}

View file

@ -1,12 +1,10 @@
use crate::util::{CollectToResult, remove_parens}; use crate::util::{CollectToResult, remove_parens};
use lazy_static::lazy_static; use lazy_static::lazy_static;
use proc_macro::TokenStream; use proc_macro2::{Ident, TokenStream};
use proc_macro2::TokenStream as TokenStream2;
use quote::{format_ident, quote}; use quote::{format_ident, quote};
use regex::Regex; use regex::Regex;
use std::iter; use std::iter;
use syn::{ use syn::{
parse_macro_input,
spanned::Spanned, spanned::Spanned,
Attribute, Attribute,
Data, Data,
@ -14,10 +12,10 @@ use syn::{
Error, Error,
Fields, Fields,
GenericParam, GenericParam,
Ident,
LitStr, LitStr,
Path, Path,
PathSegment, PathSegment,
Result,
Type, Type,
Variant Variant
}; };
@ -40,7 +38,7 @@ struct ErrorVariant
display : Option<LitStr> display : Option<LitStr>
} }
fn process_variant(variant : Variant) -> Result<ErrorVariant, Error> fn process_variant(variant : Variant) -> Result<ErrorVariant>
{ {
let status = match variant.attrs.iter() let status = match variant.attrs.iter()
.find(|attr| attr.path.segments.iter().last().map(|segment| segment.ident.to_string()) == Some("status".to_string())) .find(|attr| attr.path.segments.iter().last().map(|segment| segment.ident.to_string()) == Some("status".to_string()))
@ -114,7 +112,7 @@ lazy_static! {
impl ErrorVariant impl ErrorVariant
{ {
fn fields_pat(&self) -> TokenStream2 fn fields_pat(&self) -> TokenStream
{ {
let mut fields = self.fields.iter().map(|field| &field.ident).peekable(); let mut fields = self.fields.iter().map(|field| &field.ident).peekable();
if fields.peek().is_none() { if fields.peek().is_none() {
@ -126,7 +124,7 @@ impl ErrorVariant
} }
} }
fn to_display_match_arm(&self, formatter_ident : &Ident, enum_ident : &Ident) -> Result<TokenStream2, Error> fn to_display_match_arm(&self, formatter_ident : &Ident, enum_ident : &Ident) -> Result<TokenStream>
{ {
let ident = &self.ident; let ident = &self.ident;
let display = self.display.as_ref().ok_or_else(|| Error::new(self.ident.span(), "Missing display string for this variant"))?; let display = self.display.as_ref().ok_or_else(|| Error::new(self.ident.span(), "Missing display string for this variant"))?;
@ -142,7 +140,7 @@ impl ErrorVariant
}) })
} }
fn into_match_arm(self, krate : &TokenStream2, enum_ident : &Ident) -> TokenStream2 fn into_match_arm(self, krate : &TokenStream, enum_ident : &Ident) -> TokenStream
{ {
let ident = &self.ident; let ident = &self.ident;
let fields_pat = self.fields_pat(); let fields_pat = self.fields_pat();
@ -177,7 +175,7 @@ impl ErrorVariant
} }
} }
fn were(&self) -> Option<TokenStream2> fn were(&self) -> Option<TokenStream>
{ {
match self.from_ty.as_ref() { match self.from_ty.as_ref() {
Some((_, ty)) => Some(quote!( #ty : ::std::error::Error )), Some((_, ty)) => Some(quote!( #ty : ::std::error::Error )),
@ -186,10 +184,9 @@ impl ErrorVariant
} }
} }
fn expand(tokens : TokenStream) -> Result<TokenStream2, Error> pub fn expand_resource_error(input : DeriveInput) -> Result<TokenStream>
{ {
let krate = super::krate(); let krate = super::krate();
let input = parse_macro_input::parse::<DeriveInput>(tokens)?;
let ident = input.ident; let ident = input.ident;
let generics = input.generics; let generics = input.generics;
@ -228,7 +225,7 @@ fn expand(tokens : TokenStream) -> Result<TokenStream2, Error>
}) })
}; };
let mut from_impls : Vec<TokenStream2> = Vec::new(); let mut from_impls : Vec<TokenStream> = Vec::new();
for var in &variants for var in &variants
{ {
@ -290,10 +287,3 @@ fn expand(tokens : TokenStream) -> Result<TokenStream2, Error>
#( #from_impls )* #( #from_impls )*
}) })
} }
pub fn expand_resource_error(tokens : TokenStream) -> TokenStream
{
expand(tokens)
.unwrap_or_else(|err| err.to_compile_error())
.into()
}

View file

@ -1,8 +1,4 @@
use proc_macro2::{ use proc_macro2::{Delimiter, TokenStream, TokenTree};
Delimiter,
TokenStream as TokenStream2,
TokenTree
};
use std::iter; use std::iter;
use syn::Error; use syn::Error;
@ -33,7 +29,7 @@ where
} }
pub fn remove_parens(input : TokenStream2) -> TokenStream2 pub fn remove_parens(input : TokenStream) -> TokenStream
{ {
let iter = input.into_iter().flat_map(|tt| { let iter = input.into_iter().flat_map(|tt| {
if let TokenTree::Group(group) = &tt if let TokenTree::Group(group) = &tt