diff --git a/Cargo.lock b/Cargo.lock index 0df7fbe..4e5ce2b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -976,7 +976,6 @@ dependencies = [ name = "tengri_proc" version = "0.13.0" dependencies = [ - "heck", "proc-macro2", "quote", "syn", diff --git a/proc/src/lib.rs b/proc/src/lib.rs index 38c8b15..ac176da 100644 --- a/proc/src/lib.rs +++ b/proc/src/lib.rs @@ -2,7 +2,6 @@ extern crate proc_macro; pub(crate) use std::collections::{BTreeMap, BTreeSet}; pub(crate) use std::cmp::Ordering; -pub(crate) use std::sync::Arc; pub(crate) use proc_macro::TokenStream; pub(crate) use proc_macro2::{ TokenStream as TokenStream2, TokenTree, @@ -11,48 +10,39 @@ pub(crate) use proc_macro2::{ pub(crate) use syn::{ parse, parse_macro_input, parse_quote as pq, braced, bracketed, parenthesized, Token, - Arm, Expr, Attribute, Meta, MetaList, Path, PathSegment, PathArguments, - ImplItem, ImplItemFn, LitStr, Type, ItemImpl, ReturnType, Signature, FnArg, PatType, + Arm, Expr, Attribute, Meta, MetaList, Path, PathSegment, PathArguments, ImplItem, LitStr, Type, parse::{Parse, ParseStream, Result}, token::{PathSep, Brace}, punctuated::Punctuated, }; pub(crate) use quote::{quote, TokenStreamExt, ToTokens}; -pub(crate) use heck::{AsKebabCase, AsUpperCamelCase}; mod proc_view; mod proc_expose; -mod proc_command; #[proc_macro_attribute] pub fn view (meta: TokenStream, item: TokenStream) -> TokenStream { - use self::proc_view::{ViewDef, ViewMeta, ViewImpl}; - write_macro(ViewDef( + use self::proc_view::{ViewDefinition, ViewMeta, ViewImpl}; + write_macro(ViewDefinition( parse_macro_input!(meta as ViewMeta), - parse_macro_input!(item as ViewImpl), + parse_macro_input!(data as ViewImpl), )) } #[proc_macro_attribute] pub fn expose (meta: TokenStream, item: TokenStream) -> TokenStream { - use self::proc_expose::{ExposeDef, ExposeMeta, ExposeImpl}; - write_macro(ExposeDef( + use self::proc_view::{ExposeDefinition, ExposeMeta, ExposeImpl}; + write_macro(ExposeDefinition( parse_macro_input!(meta as ExposeMeta), - parse_macro_input!(item as ExposeImpl), - )) -} - -#[proc_macro_attribute] -pub fn command (meta: TokenStream, item: TokenStream) -> TokenStream { - use self::proc_command::{CommandDef, CommandMeta, CommandImpl}; - write_macro(CommandDef( - parse_macro_input!(meta as CommandMeta), - parse_macro_input!(item as CommandImpl), + parse_macro_input!(data as ExposeImpl), )) } fn write_macro (t: T) -> TokenStream { let mut out = TokenStream2::new(); - t.to_tokens(&mut out); + self::proc_expose::ExposeDefinition( + parse_macro_input!(meta as ExposeMeta), + parse_macro_input!(data as ExposeImpl), + ).to_tokens(&mut out); out.into() } diff --git a/proc/src/proc_command.rs b/proc/src/proc_command.rs deleted file mode 100644 index ac1e2a9..0000000 --- a/proc/src/proc_command.rs +++ /dev/null @@ -1,144 +0,0 @@ -use crate::*; - -#[derive(Debug, Clone)] -pub(crate) struct CommandDef(pub(crate) CommandMeta, pub(crate) CommandImpl); - -#[derive(Debug, Clone)] -pub(crate) struct CommandMeta { - target: Ident, -} - -#[derive(Debug, Clone)] -pub(crate) struct CommandImpl(ItemImpl, BTreeMap, CommandArm>); - -#[derive(Debug, Clone)] -struct CommandVariant(Ident, Vec); - -#[derive(Debug, Clone)] -struct CommandArm(Arc, Ident, Vec, ReturnType); - -impl Parse for CommandMeta { - fn parse (input: ParseStream) -> Result { - Ok(Self { - target: input.parse::()?, - }) - } -} - -impl Parse for CommandImpl { - fn parse (input: ParseStream) -> Result { - let block = input.parse::()?; - let mut exposed: BTreeMap, CommandArm> = Default::default(); - for item in block.items.iter() { - if let ImplItem::Fn(ImplItemFn { - sig: Signature { ident, inputs, output, .. }, .. - }) = item { - let key: Arc = - format!("{}", AsKebabCase(format!("{}", &ident))).into(); - let variant: Arc = - format!("{}", AsUpperCamelCase(format!("{}", &ident))).into(); - if exposed.contains_key(&key) { - return Err(input.error(format!("already defined: {ident}"))); - } - exposed.insert(key, CommandArm( - variant, - ident.clone(), - inputs.iter().map(|x|x.clone()).collect(), - output.clone(), - )); - } - } - Ok(Self(block, exposed)) - } -} - -impl ToTokens for CommandDef { - fn to_tokens (&self, out: &mut TokenStream2) { - let Self(CommandMeta { target }, CommandImpl(block, exposed)) = self; - let enumeration = &block.self_ty; - let definitions = exposed.values().map(|x|CommandVariant( - x.1.clone(), - x.2.clone(), - )); - let implementations = exposed.values().map(|x|CommandArm( - x.0.clone(), - x.1.clone(), - x.2.clone(), - x.3.clone(), - )); - for token in quote! { - #block - enum #enumeration { - #(#definitions)* - } - impl Command<#target> for #enumeration { - fn execute (self, state: &mut #target) -> Perhaps { - match self { - #(#implementations)* - } - } - } - } { - out.append(token) - } - } -} - -impl ToTokens for CommandVariant { - fn to_tokens (&self, out: &mut TokenStream2) { - let Self(ident, args) = self; - out.append(LitStr::new(&format!("{}", ident), Span::call_site()) - .token()); - out.append(Group::new(Delimiter::Parenthesis, { - let mut out = TokenStream2::new(); - for arg in args.iter() { - if let FnArg::Typed(PatType { attrs, pat, colon_token, ty }) = arg { - out.append(LitStr::new( - &format!("{}", quote! { #ty }), - Span::call_site() - ).token()); - out.append(Punct::new(',', Alone)); - } - } - out - })); - out.append(Punct::new(',', Alone)); - } -} - -impl ToTokens for CommandArm { - fn to_tokens (&self, out: &mut TokenStream2) { - let Self(symbol, ident, args, returnType) = self; - out.append(Punct::new(':', Joint)); - out.append(Punct::new(':', Alone)); - out.append(Ident::new("tengri", Span::call_site())); - out.append(Punct::new(':', Joint)); - out.append(Punct::new(':', Alone)); - out.append(Ident::new("dsl", Span::call_site())); - out.append(Punct::new(':', Joint)); - out.append(Punct::new(':', Alone)); - out.append(Ident::new("Value", Span::call_site())); - out.append(Punct::new(':', Joint)); - out.append(Punct::new(':', Alone)); - out.append(Ident::new("Sym", Span::call_site())); - out.append(Group::new(Delimiter::Parenthesis, { - let mut out = TokenStream2::new(); - for arg in args.iter() { - out.append(LitStr::new(&symbol, Span::call_site()).token()); - } - out - })); - out.append(Punct::new('=', Joint)); - out.append(Punct::new('>', Alone)); - out.append(LitStr::new(&format!("{}", ident), Span::call_site()).token()); - out.append(Group::new(Delimiter::Parenthesis, { - let mut out = TokenStream2::new(); - for arg in args.iter() { - // TODO - //out.append(LitStr::new(&symbol, Span::call_site()).token()); - } - out - })); - out.append(Punct::new(',', Alone)); - } -} diff --git a/proc/src/proc_expose.rs b/proc/src/proc_expose.rs index bf7ae58..e153fa6 100644 --- a/proc/src/proc_expose.rs +++ b/proc/src/proc_expose.rs @@ -1,7 +1,8 @@ use crate::*; +use syn::parse::discouraged::Speculative; #[derive(Debug, Clone)] -pub(crate) struct ExposeDef(pub(crate) ExposeMeta, pub(crate) ExposeImpl); +pub(crate) struct ExposeDefinition(pub(crate) ExposeMeta, pub(crate) ExposeImpl); #[derive(Debug, Clone)] pub(crate) struct ExposeMeta; @@ -9,11 +10,14 @@ pub(crate) struct ExposeMeta; #[derive(Debug, Clone)] pub(crate) struct ExposeImpl { block: ItemImpl, - exposed: BTreeMap>, + exposed: BTreeMap>, } #[derive(Debug, Clone)] -struct ExposeArm(String, Ident); +struct ExposeArm { + key: ExposeSym, + value: Expr +} #[derive(Debug, Clone)] struct ExposeSym(LitStr); @@ -30,17 +34,17 @@ impl Parse for ExposeMeta { impl Parse for ExposeImpl { fn parse (input: ParseStream) -> Result { let block = input.parse::()?; - let mut exposed: BTreeMap> = Default::default(); + let mut exposed: BTreeMap> = Default::default(); for item in block.items.iter() { if let ImplItem::Fn(ImplItemFn { sig: Signature { ident, output, .. }, .. }) = item { if let ReturnType::Type(_, return_type) = output { let return_type = ExposeType(return_type.clone()); - if !exposed.contains_key(&return_type) { - exposed.insert(return_type.clone(), Default::default()); + if !exposed.contains_key(return_type) { + exposed.insert(return_type.clone(), Default::default()) } let values = exposed.get_mut(&return_type).unwrap(); - let key = format!(":{}", AsKebabCase(format!("{}", &ident))); - if values.contains_key(&key) { + let key = format!(":{}", AsKebabCase(&ident)); + if values.contains_key(key) { return Err(input.error(format!("already defined: {key}"))) } values.insert(key, ident.clone()); @@ -53,9 +57,9 @@ impl Parse for ExposeImpl { } } -impl ToTokens for ExposeDef { +impl ToTokens for ExposeDefinition { fn to_tokens (&self, out: &mut TokenStream2) { - let Self(meta, data) = self; + let Self { meta, data } = self; for token in quote! { #data } { out.append(token) } @@ -64,12 +68,11 @@ impl ToTokens for ExposeDef { impl ToTokens for ExposeImpl { fn to_tokens (&self, out: &mut TokenStream2) { - let Self { block, exposed } = self; - let target = &self.block.self_ty; - for token in quote! { #block } { + let Self { target, items, types } = self; + for token in quote! { impl #target { #(#items)* } } { out.append(token); } - for (t, variants) in exposed.iter() { + for (t, variants) in types.iter() { let predefined = match format!("{}", quote! { #t }).as_str() { "bool" => vec![ quote! { ::tengri::dsl::Value::Sym(":true") => true }, @@ -102,7 +105,7 @@ impl ToTokens for ExposeImpl { impl ToTokens for ExposeArm { fn to_tokens (&self, out: &mut TokenStream2) { - let Self(key, value) = self; + let Self { key, value } = self; out.append(Punct::new(':', Joint)); out.append(Punct::new(':', Alone)); out.append(Ident::new("tengri", Span::call_site())); @@ -117,7 +120,7 @@ impl ToTokens for ExposeArm { out.append(Ident::new("Sym", Span::call_site())); out.append(Group::new(Delimiter::Parenthesis, { let mut out = TokenStream2::new(); - out.append(LitStr::new(&key, Span::call_site()).token()); + out.append(key.0.token()); out })); out.append(Punct::new('=', Joint)); @@ -162,7 +165,7 @@ impl Eq for ExposeSym {} impl From for ExposeType { fn from (this: Type) -> Self { - Self(Box::new(this)) + Self(this) } } diff --git a/proc/src/proc_view.rs b/proc/src/proc_view.rs index c70cb31..bb805e9 100644 --- a/proc/src/proc_view.rs +++ b/proc/src/proc_view.rs @@ -1,7 +1,7 @@ use crate::*; #[derive(Debug, Clone)] -pub(crate) struct ViewDef(pub(crate) ViewMeta, pub(crate) ViewImpl); +pub(crate) struct ViewDefinition(pub(crate) ViewMeta, pub(crate) ViewImpl); #[derive(Debug, Clone)] pub(crate) struct ViewMeta { @@ -11,11 +11,9 @@ pub(crate) struct ViewMeta { #[derive(Debug, Clone)] pub(crate) struct ViewImpl { block: ItemImpl, - exposed: BTreeMap, + exposed: BTreeSet, } -struct ViewArm(String, Ident); - impl Parse for ViewMeta { fn parse (input: ParseStream) -> Result { Ok(Self { @@ -26,39 +24,55 @@ impl Parse for ViewMeta { impl Parse for ViewImpl { fn parse (input: ParseStream) -> Result { - let block = input.parse::()?; - let mut exposed: BTreeMap = Default::default(); - for item in block.items.iter() { - if let ImplItem::Fn(ImplItemFn { sig: Signature { ident, .. }, .. }) = item { - let key = format!(":{}", AsKebabCase(format!("{}", &ident))); - if exposed.contains_key(&key) { - return Err(input.error(format!("already defined: {ident}"))); + let _ = input.parse::()?; + let mut syms = vec![]; + Ok(Self { + target: input.parse::()?, + items: { + let group; + let brace = braced!(group in input); + let mut items = vec![]; + while !group.is_empty() { + let item = group.parse::()?; + if let Some(expose) = &item.expose { + if let ImplItem::Fn(ref item) = item.item { + let symbol = expose.clone(); + let name = item.sig.ident.clone(); + syms.push(ViewSym { symbol, name }) + } else { + return Err( + input.error("only fn items can be exposed to #[tengri::view]") + ) + } + } + items.push(item); } - exposed.insert(key, ident.clone()); - } - } - Ok(Self { block, exposed }) + items + }, + syms, + }) } } -impl ToTokens for ViewDef { +impl ToTokens for ViewDefinition { fn to_tokens (&self, out: &mut TokenStream2) { - let Self(ViewMeta { output }, ViewImpl { block, exposed }) = self; - let ident = &block.self_ty; - let exposed: Vec<_> = exposed.iter().map(|(k,v)|ViewArm(k.clone(), v.clone())).collect(); + let Self(ViewMeta { output }, ViewImpl { target, syms, items }) = self; for token in quote! { - #block + /// Augmented by [tengri_proc]. + impl #target { + #(#items)* + } /// Generated by [tengri_proc]. - impl ::tengri::output::Content<#output> for #ident { + impl ::tengri::output::Content<#output> for #target { fn content (&self) -> impl Render<#output> { self.size.of(::tengri::output::View(self, self.config.view)) } } /// Generated by [tengri_proc]. - impl<'a> ::tengri::output::ViewContext<'a, #output> for #ident { + impl<'a> ::tengri::output::ViewContext<'a, #output> for #target { fn get_content_sym (&'a self, value: &Value<'a>) -> Option> { match value { - #(#exposed)* + #(#syms)* _ => panic!("expected Sym(content), got: {value:?}") } } @@ -69,9 +83,8 @@ impl ToTokens for ViewDef { } } -impl ToTokens for ViewArm { +impl ToTokens for ViewSym { fn to_tokens (&self, out: &mut TokenStream2) { - let Self(key, value) = self; out.append(Punct::new(':', Joint)); out.append(Punct::new(':', Alone)); out.append(Ident::new("tengri", Span::call_site())); @@ -86,7 +99,7 @@ impl ToTokens for ViewArm { out.append(Ident::new("Sym", Span::call_site())); out.append(Group::new(Delimiter::Parenthesis, { let mut out = TokenStream2::new(); - out.append(LitStr::new(key, Span::call_site()).token()); + out.append(self.symbol.clone()); out })); out.append(Punct::new('=', Joint)); @@ -96,7 +109,7 @@ impl ToTokens for ViewArm { let mut out = TokenStream2::new(); out.append(Ident::new("self", Span::call_site())); out.append(Punct::new('.', Alone)); - out.append(value.clone()); + out.append(self.name.clone()); out.append(Group::new(Delimiter::Parenthesis, TokenStream2::new())); out.append(Punct::new('.', Alone)); out.append(Ident::new("boxed", Span::call_site())); @@ -107,43 +120,6 @@ impl ToTokens for ViewArm { } } -//impl ToTokens for ViewSym { - //fn to_tokens (&self, out: &mut TokenStream2) { - //out.append(Punct::new(':', Joint)); - //out.append(Punct::new(':', Alone)); - //out.append(Ident::new("tengri", Span::call_site())); - //out.append(Punct::new(':', Joint)); - //out.append(Punct::new(':', Alone)); - //out.append(Ident::new("dsl", Span::call_site())); - //out.append(Punct::new(':', Joint)); - //out.append(Punct::new(':', Alone)); - //out.append(Ident::new("Value", Span::call_site())); - //out.append(Punct::new(':', Joint)); - //out.append(Punct::new(':', Alone)); - //out.append(Ident::new("Sym", Span::call_site())); - //out.append(Group::new(Delimiter::Parenthesis, { - //let mut out = TokenStream2::new(); - //out.append(self.symbol.clone()); - //out - //})); - //out.append(Punct::new('=', Joint)); - //out.append(Punct::new('>', Alone)); - //out.append(Ident::new("Some", Span::call_site())); - //out.append(Group::new(Delimiter::Parenthesis, { - //let mut out = TokenStream2::new(); - //out.append(Ident::new("self", Span::call_site())); - //out.append(Punct::new('.', Alone)); - //out.append(self.name.clone()); - //out.append(Group::new(Delimiter::Parenthesis, TokenStream2::new())); - //out.append(Punct::new('.', Alone)); - //out.append(Ident::new("boxed", Span::call_site())); - //out.append(Group::new(Delimiter::Parenthesis, TokenStream2::new())); - //out - //})); - //out.append(Punct::new(',', Alone)); - //} -//} - fn nth_segment_is (segments: &Punctuated, n: usize, x: &str) -> bool { if let Some(PathSegment { arguments: PathArguments::None, ident, .. }) = segments.get(n) { if format!("{ident}") == x { @@ -153,17 +129,17 @@ fn nth_segment_is (segments: &Punctuated, n: usize, x: &st return false } -//impl std::cmp::PartialEq for ViewItem { - //fn eq (&self, other: &Self) -> bool { - //self.item == other.item && (format!("{:?}", self.expose) == format!("{:?}", other.expose)) - //} -//} +impl std::cmp::PartialEq for ViewItem { + fn eq (&self, other: &Self) -> bool { + self.item == other.item && (format!("{:?}", self.expose) == format!("{:?}", other.expose)) + } +} -//impl std::cmp::PartialEq for ViewSym { - //fn eq (&self, other: &Self) -> bool { - //self.name == other.name && (format!("{}", self.symbol) == format!("{}", other.symbol)) - //} -//} +impl std::cmp::PartialEq for ViewSym { + fn eq (&self, other: &Self) -> bool { + self.name == other.name && (format!("{}", self.symbol) == format!("{}", other.symbol)) + } +} #[cfg(test)] #[test] fn test_view_meta () { let x: ViewMeta = pq! { SomeOutput }; @@ -172,7 +148,6 @@ fn nth_segment_is (segments: &Punctuated, n: usize, x: &st } #[cfg(test)] #[test] fn test_view_impl () { - // TODO let x: ViewImpl = pq! { impl Foo { /// docstring1 @@ -184,20 +159,20 @@ fn nth_segment_is (segments: &Punctuated, n: usize, x: &st } }; let expected_target: Ident = pq! { Foo }; - //assert_eq!(x.target, expected_target); - //assert_eq!(x.items.len(), 2); - //assert_eq!(x.items[0].item, pq! { - ///// docstring1 - //#[bar] fn a_view () {} - //}); - //assert_eq!(x.items[1].item, pq! { - //#[baz] - ///// docstring2 - //#[baz] fn is_not_view () {} - //}); - //assert_eq!(x.syms, vec![ - //ViewArm( { symbol: pq! { ":view1" }, name: pq! { a_view }, }, - //]); + assert_eq!(x.target, expected_target); + assert_eq!(x.items.len(), 2); + assert_eq!(x.items[0].item, pq! { + /// docstring1 + #[bar] fn a_view () {} + }); + assert_eq!(x.items[1].item, pq! { + #[baz] + /// docstring2 + #[baz] fn is_not_view () {} + }); + assert_eq!(x.syms, vec![ + ViewSym { symbol: pq! { ":view1" }, name: pq! { a_view }, }, + ]); } #[cfg(test)] #[test] fn test_view_definition () {