From 7570aefcc218ce0e7d2a6a6a45d696675c87ec63 Mon Sep 17 00:00:00 2001 From: unspeaker Date: Tue, 6 May 2025 00:48:14 +0300 Subject: [PATCH 1/2] proc: simplify expose macro --- Cargo.lock | 1 + proc/Cargo.toml | 5 +- proc/src/lib.rs | 24 ++- proc/src/proc_expose.rs | 393 ++++++++++++++++------------------------ proc/src/proc_view.rs | 270 ++++++++++++--------------- 5 files changed, 288 insertions(+), 405 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4e5ce2b..0df7fbe 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -976,6 +976,7 @@ dependencies = [ name = "tengri_proc" version = "0.13.0" dependencies = [ + "heck", "proc-macro2", "quote", "syn", diff --git a/proc/Cargo.toml b/proc/Cargo.toml index aa2cd24..6001947 100644 --- a/proc/Cargo.toml +++ b/proc/Cargo.toml @@ -8,6 +8,7 @@ edition = { workspace = true } proc-macro = true [dependencies] -syn = { version = "2", features = ["full", "extra-traits"] } -quote = { version = "1" } +syn = { version = "2", features = ["full", "extra-traits"] } +quote = { version = "1" } proc-macro2 = { version = "1", features = ["span-locations"] } +heck = { version = "0.5" } diff --git a/proc/src/lib.rs b/proc/src/lib.rs index 52c8814..16dd8f0 100644 --- a/proc/src/lib.rs +++ b/proc/src/lib.rs @@ -1,6 +1,6 @@ extern crate proc_macro; -pub(crate) use std::collections::{HashMap, BTreeMap}; +pub(crate) use std::collections::{BTreeMap, BTreeSet}; pub(crate) use std::cmp::Ordering; pub(crate) use proc_macro::TokenStream; pub(crate) use proc_macro2::{ @@ -10,22 +10,38 @@ pub(crate) use proc_macro2::{ pub(crate) use syn::{ parse, parse_macro_input, parse_quote as pq, braced, bracketed, parenthesized, Token, - Arm, Expr, Attribute, Meta, MetaList, Path, PathSegment, PathArguments, ImplItem, LitStr, Type, + Arm, Expr, Attribute, Meta, MetaList, Path, PathSegment, PathArguments, + ImplItem, ImplItemFn, LitStr, Type, ItemImpl, ReturnType, Signature, parse::{Parse, ParseStream, Result}, token::{PathSep, Brace}, punctuated::Punctuated, }; pub(crate) use quote::{quote, TokenStreamExt, ToTokens}; +pub(crate) use heck::AsKebabCase; mod proc_view; mod proc_expose; #[proc_macro_attribute] pub fn view (meta: TokenStream, item: TokenStream) -> TokenStream { - self::proc_view::view_impl(meta.into(), item.into()).into() + use self::proc_view::{ViewDef, ViewMeta, ViewImpl}; + write_macro(ViewDef( + parse_macro_input!(meta as ViewMeta), + parse_macro_input!(item as ViewImpl), + )) } #[proc_macro_attribute] pub fn expose (meta: TokenStream, item: TokenStream) -> TokenStream { - self::proc_expose::expose_impl(meta.into(), item.into()).into() + use self::proc_expose::{ExposeDef, ExposeMeta, ExposeImpl}; + write_macro(ExposeDef( + parse_macro_input!(meta as ExposeMeta), + parse_macro_input!(item as ExposeImpl), + )) +} + +fn write_macro (t: T) -> TokenStream { + let mut out = TokenStream2::new(); + t.to_tokens(&mut out); + out.into() } diff --git a/proc/src/proc_expose.rs b/proc/src/proc_expose.rs index 2c34b14..bf7ae58 100644 --- a/proc/src/proc_expose.rs +++ b/proc/src/proc_expose.rs @@ -1,103 +1,76 @@ use crate::*; -use syn::parse::discouraged::Speculative; -pub(crate) fn expose_impl (meta: TokenStream, data: TokenStream) -> TokenStream { - let mut out = TokenStream2::new(); - ExposeDefinition { - meta: parse_macro_input!(meta as ExposeMeta), - data: parse_macro_input!(data as ExposeImpl), - }.to_tokens(&mut out); - out.into() +#[derive(Debug, Clone)] +pub(crate) struct ExposeDef(pub(crate) ExposeMeta, pub(crate) ExposeImpl); + +#[derive(Debug, Clone)] +pub(crate) struct ExposeMeta; + +#[derive(Debug, Clone)] +pub(crate) struct ExposeImpl { + block: ItemImpl, + exposed: BTreeMap>, } #[derive(Debug, Clone)] -struct ExposeDefinition { - meta: ExposeMeta, - data: ExposeImpl, -} +struct ExposeArm(String, Ident); -impl Parse for ExposeDefinition { +#[derive(Debug, Clone)] +struct ExposeSym(LitStr); + +#[derive(Debug, Clone)] +struct ExposeType(Box); + +impl Parse for ExposeMeta { fn parse (input: ParseStream) -> Result { - Ok(Self { - meta: input.parse::()?, - data: input.parse::()?, - }) + Ok(Self) } } -impl ToTokens for ExposeDefinition { +impl Parse for ExposeImpl { + fn parse (input: ParseStream) -> Result { + let block = input.parse::()?; + let mut exposed: BTreeMap> = Default::default(); + for item in block.items.iter() { + if let ImplItem::Fn(ImplItemFn { sig: Signature { ident, output, .. }, .. }) = item { + if let ReturnType::Type(_, return_type) = output { + let return_type = ExposeType(return_type.clone()); + if !exposed.contains_key(&return_type) { + exposed.insert(return_type.clone(), Default::default()); + } + let values = exposed.get_mut(&return_type).unwrap(); + let key = format!(":{}", AsKebabCase(format!("{}", &ident))); + if values.contains_key(&key) { + return Err(input.error(format!("already defined: {key}"))) + } + values.insert(key, ident.clone()); + } else { + return Err(input.error("output type must be specified")) + } + } + } + Ok(Self { block, exposed }) + } +} + +impl ToTokens for ExposeDef { fn to_tokens (&self, out: &mut TokenStream2) { - let Self { meta, data } = self; + let Self(meta, data) = self; for token in quote! { #data } { out.append(token) } } } -#[derive(Debug, Clone)] -struct ExposeMeta {} - -impl Parse for ExposeMeta { - fn parse (input: ParseStream) -> Result { - Ok(Self {}) - } -} - -#[derive(Debug, Clone)] -struct ExposeImpl { - target: Ident, - items: Vec, - types: BTreeMap>, -} - -impl Parse for ExposeImpl { - fn parse (input: ParseStream) -> Result { - let _impl = input.parse::()?; - let target = input.parse::()?; - let group; - let brace = braced!(group in input); - let mut items = vec![]; - let mut types: BTreeMap> = Default::default(); - while !group.is_empty() { - let fork = group.fork(); - if let Ok(block) = fork.parse::() { - let t = block.type_.into(); - if let Some(values) = types.get_mut(&t) { - for (key, value) in block.values.into_iter() { - if values.contains_key(&key) { - return Err(input.error(format!("{key:?} ({t:?}): already exists"))) - } else { - values.insert(key, value); - } - } - } else { - types.insert(t, block.values); - } - group.advance_to(&fork); - continue - } - let fork = group.fork(); - if let Ok(item) = fork.parse::() { - items.push(item); - group.advance_to(&fork); - continue - } - return Err(input.error( - "expected either item or #[tengri::expose(type)] { \":key\" => value }" - )); - } - Ok(Self { target, items, types }) - } -} - impl ToTokens for ExposeImpl { fn to_tokens (&self, out: &mut TokenStream2) { - let Self { target, items, types } = self; - for token in quote! { impl #target { #(#items)* } } { + let Self { block, exposed } = self; + let target = &self.block.self_ty; + for token in quote! { #block } { out.append(token); } - for (t, variants) in types.iter() { - let predef = match format!("{}", quote! { #t }).as_str() { + for (t, variants) in exposed.iter() { + let predefined = match format!("{}", quote! { #t }).as_str() { "bool" => vec![ quote! { ::tengri::dsl::Value::Sym(":true") => true }, quote! { ::tengri::dsl::Value::Sym(":false") => false }, @@ -113,7 +86,7 @@ impl ToTokens for ExposeImpl { impl ::tengri::dsl::Context<#t> for #target { fn get (&self, dsl: &::tengri::dsl::Value) -> Option<#t> { Some(match dsl { - #(#predef,)* + #(#predefined,)* #(#values,)* _ => return None }) @@ -127,61 +100,9 @@ impl ToTokens for ExposeImpl { } } -#[derive(Debug, Clone)] -struct ExposeBlock { - type_: Type, - values: BTreeMap, -} - -impl Parse for ExposeBlock { - fn parse (input: ParseStream) -> Result { - let _ = input.parse::()?; - - let group; - let bracket = bracketed!(group in input); - let path = group.parse::()?; - let type_ = if - path.segments.get(0).map(|x|x.ident.to_string()) == Some("tengri".to_string()) && - path.segments.get(1).map(|x|x.ident.to_string()) == Some("expose".to_string()) - { - let token; - let paren = parenthesized!(token in group); - token.parse::()? - } else { - return Err(input.error("expected #[tengri::expose(type)]")) - }; - - let group; - let brace = braced!(group in input); - let mut values = BTreeMap::new(); - while !group.is_empty() { - let arm = group.parse::()?; - values.insert(arm.key.clone(), arm); - let _ = group.parse::()?; - } - Ok(Self { type_, values }) - } -} - -#[derive(Debug, Clone)] -struct ExposeArm { - key: ExposeSym, - value: Expr -} - -impl Parse for ExposeArm { - fn parse (input: ParseStream) -> Result { - let key = input.parse::()?.into(); - let _ = input.parse::()?; - let _ = input.parse::]>()?; - let value = input.parse::()?; - Ok(Self { key, value }) - } -} - impl ToTokens for ExposeArm { fn to_tokens (&self, out: &mut TokenStream2) { - let Self { key, value } = self; + let Self(key, value) = self; out.append(Punct::new(':', Joint)); out.append(Punct::new(':', Alone)); out.append(Ident::new("tengri", Span::call_site())); @@ -196,7 +117,7 @@ impl ToTokens for ExposeArm { out.append(Ident::new("Sym", Span::call_site())); out.append(Group::new(Delimiter::Parenthesis, { let mut out = TokenStream2::new(); - out.append(key.0.token()); + out.append(LitStr::new(&key, Span::call_site()).token()); out })); out.append(Punct::new('=', Joint)); @@ -207,9 +128,6 @@ impl ToTokens for ExposeArm { } } -#[derive(Debug, Clone)] -struct ExposeSym(LitStr); - impl From for ExposeSym { fn from (this: LitStr) -> Self { Self(this) @@ -242,12 +160,9 @@ impl PartialEq for ExposeSym { impl Eq for ExposeSym {} -#[derive(Debug, Clone)] -struct ExposeType(Type); - impl From for ExposeType { fn from (this: Type) -> Self { - Self(this) + Self(Box::new(this)) } } @@ -284,109 +199,107 @@ impl ToTokens for ExposeType { } #[cfg(test)] #[test] fn test_expose_definition () { - let parsed: ExposeImpl = pq! { - //#[tengri_proc::expose] - impl Something { - #[tengri::expose(bool)] { - ":bool1" => true || false, - } - fn something () {} - } - }; - // FIXME: - //assert_eq!( - //format!("{}", quote! { #parsed }), - //format!("{}", quote! { - //impl Something { - //fn something () {} - //} - //impl ::tengri::Context for Something { - //fn get (&self, dsl: &::tengri::Value) -> Option { - //Some(match dsl { - //::tengri::Value::Sym(":true") => true, - //::tengri::Value::Sym(":false") => false, - //::tengri::Value::Sym(":bool1") => true || false, - //_ => return None - //}) - //} - //} - //}) - //); + // TODO + //let parsed: ExposeImpl = pq! { + ////#[tengri_proc::expose] + //impl Something { + //fn something () -> bool {} + //} + //}; + //// FIXME: + ////assert_eq!( + ////format!("{}", quote! { #parsed }), + ////format!("{}", quote! { + ////impl Something { + ////fn something () {} + ////} + ////impl ::tengri::Context for Something { + ////fn get (&self, dsl: &::tengri::Value) -> Option { + ////Some(match dsl { + ////::tengri::Value::Sym(":true") => true, + ////::tengri::Value::Sym(":false") => false, + ////::tengri::Value::Sym(":bool1") => true || false, + ////_ => return None + ////}) + ////} + ////} + ////}) + ////); - let parsed: ExposeImpl = pq! { - //#[tengri_proc::expose] - impl Something { - #[tengri::expose(bool)] { - ":bool1" => true || false, - } - #[tengri::expose(u16)] { - ":u161" => 0 + 1, - } - #[tengri::expose(usize)] { - ":usize1" => 1 + 2, - } - #[tengri::expose(Arc)] { - ":arcstr1" => "foo".into(), - } - #[tengri::expose(Option>)] { - ":optarcstr1" => Some("bar".into()), - ":optarcstr2" => Some("baz".into()), - } - fn something () {} - } - }; - // FIXME: - //assert_eq!( - //format!("{}", quote! { #parsed }), - //format!("{}", quote! { - //impl Something { - //fn something () {} + //let parsed: ExposeImpl = pq! { + ////#[tengri_proc::expose] + //impl Something { + //#[tengri::expose(bool)] { + //":bool1" => true || false, //} - //impl ::tengri::Context> for Something { - //fn get (&self, dsl: &::tengri::Value) -> Option> { - //Some(match dsl { - //::tengri::Value::Sym(":arcstr1") => "foo".into(), - //_ => return None - //}) - //} + //#[tengri::expose(u16)] { + //":u161" => 0 + 1, //} - //impl ::tengri::Context>> for Something { - //fn get (&self, dsl: &::tengri::Value) -> Option>> { - //Some(match dsl { - //::tengri::Value::Sym(":optarcstr1") => Some("bar".into()), - //::tengri::Value::Sym(":optarcstr2") => Some("baz".into()), - //_ => return None - //}) - //} + //#[tengri::expose(usize)] { + //":usize1" => 1 + 2, //} - //impl ::tengri::Context for Something { - //fn get (&self, dsl: &::tengri::Value) -> Option { - //Some(match dsl { - //::tengri::Value::Sym(":true") => true, - //::tengri::Value::Sym(":false") => false, - //::tengri::Value::Sym(":bool1") => true || false, - //_ => return None - //}) - //} + //#[tengri::expose(Arc)] { + //":arcstr1" => "foo".into(), //} - //impl ::tengri::Context for Something { - //fn get (&self, dsl: &::tengri::Value) -> Option { - //Some(match dsl { - //::tengri::Value::Num(n) => *n as u16, - //::tengri::Value::Sym(":u161") => 0 + 1, - //_ => return None - //}) - //} + //#[tengri::expose(Option>)] { + //":optarcstr1" => Some("bar".into()), + //":optarcstr2" => Some("baz".into()), //} - //impl ::tengri::Context for Something { - //fn get (&self, dsl: &::tengri::Value) -> Option { - //Some(match dsl { - //::tengri::Value::Num(n) => *n as usize, - //::tengri::Value::Sym(":usize1") => 1 + 2, - //_ => return None - //}) - //} - //} - //}) - //) + //fn something () {} + //} + //}; + //// FIXME: + ////assert_eq!( + ////format!("{}", quote! { #parsed }), + ////format!("{}", quote! { + ////impl Something { + ////fn something () {} + ////} + ////impl ::tengri::Context> for Something { + ////fn get (&self, dsl: &::tengri::Value) -> Option> { + ////Some(match dsl { + ////::tengri::Value::Sym(":arcstr1") => "foo".into(), + ////_ => return None + ////}) + ////} + ////} + ////impl ::tengri::Context>> for Something { + ////fn get (&self, dsl: &::tengri::Value) -> Option>> { + ////Some(match dsl { + ////::tengri::Value::Sym(":optarcstr1") => Some("bar".into()), + ////::tengri::Value::Sym(":optarcstr2") => Some("baz".into()), + ////_ => return None + ////}) + ////} + ////} + ////impl ::tengri::Context for Something { + ////fn get (&self, dsl: &::tengri::Value) -> Option { + ////Some(match dsl { + ////::tengri::Value::Sym(":true") => true, + ////::tengri::Value::Sym(":false") => false, + ////::tengri::Value::Sym(":bool1") => true || false, + ////_ => return None + ////}) + ////} + ////} + ////impl ::tengri::Context for Something { + ////fn get (&self, dsl: &::tengri::Value) -> Option { + ////Some(match dsl { + ////::tengri::Value::Num(n) => *n as u16, + ////::tengri::Value::Sym(":u161") => 0 + 1, + ////_ => return None + ////}) + ////} + ////} + ////impl ::tengri::Context for Something { + ////fn get (&self, dsl: &::tengri::Value) -> Option { + ////Some(match dsl { + ////::tengri::Value::Num(n) => *n as usize, + ////::tengri::Value::Sym(":usize1") => 1 + 2, + ////_ => return None + ////}) + ////} + ////} + ////}) + ////) } diff --git a/proc/src/proc_view.rs b/proc/src/proc_view.rs index 2a32d27..c70cb31 100644 --- a/proc/src/proc_view.rs +++ b/proc/src/proc_view.rs @@ -1,52 +1,20 @@ use crate::*; -pub(crate) fn view_impl (meta: TokenStream, data: TokenStream) -> TokenStream { - let mut out = TokenStream2::new(); - ViewDefinition { - meta: parse_macro_input!(meta as ViewMeta), - data: parse_macro_input!(data as ViewImpl), - }.to_tokens(&mut out); - out.into() -} +#[derive(Debug, Clone)] +pub(crate) struct ViewDef(pub(crate) ViewMeta, pub(crate) ViewImpl); #[derive(Debug, Clone)] -struct ViewDefinition { - meta: ViewMeta, - data: ViewImpl, -} - -#[derive(Debug, Clone)] -struct ViewMeta { +pub(crate) struct ViewMeta { output: Ident, } #[derive(Debug, Clone)] -struct ViewImpl { - target: Ident, - items: Vec, - syms: Vec, +pub(crate) struct ViewImpl { + block: ItemImpl, + exposed: BTreeMap, } -#[derive(Debug, Clone)] -struct ViewItem { - item: ImplItem, - expose: Option, -} - -#[derive(Debug, Clone)] -struct ViewSym { - symbol: Literal, - name: Ident, -} - -impl Parse for ViewDefinition { - fn parse (input: ParseStream) -> Result { - Ok(Self { - meta: input.parse::()?, - data: input.parse::()?, - }) - } -} +struct ViewArm(String, Ident); impl Parse for ViewMeta { fn parse (input: ParseStream) -> Result { @@ -58,68 +26,52 @@ impl Parse for ViewMeta { impl Parse for ViewImpl { fn parse (input: ParseStream) -> Result { - let _ = input.parse::()?; - let mut syms = vec![]; - Ok(Self { - target: input.parse::()?, - items: { - let group; - let brace = braced!(group in input); - let mut items = vec![]; - while !group.is_empty() { - let item = group.parse::()?; - if let Some(expose) = &item.expose { - if let ImplItem::Fn(ref item) = item.item { - let symbol = expose.clone(); - let name = item.sig.ident.clone(); - syms.push(ViewSym { symbol, name }) - } else { - return Err( - input.error("only fn items can be exposed to #[tengri::view]") - ) - } - } - items.push(item); + let block = input.parse::()?; + let mut exposed: BTreeMap = Default::default(); + for item in block.items.iter() { + if let ImplItem::Fn(ImplItemFn { sig: Signature { ident, .. }, .. }) = item { + let key = format!(":{}", AsKebabCase(format!("{}", &ident))); + if exposed.contains_key(&key) { + return Err(input.error(format!("already defined: {ident}"))); } - items - }, - syms, - }) + exposed.insert(key, ident.clone()); + } + } + Ok(Self { block, exposed }) } } - -impl Parse for ViewItem { - fn parse (input: ParseStream) -> Result { - let mut expose = None; - Ok(Self { - item: { - let mut item = input.parse::()?; - if let ImplItem::Fn(ref mut item) = item { - item.attrs = item.attrs.iter().filter(|attr| { - if let Attribute { - meta: Meta::List(MetaList { path, tokens, .. }), .. - } = attr - && path.segments.len() == 2 - && nth_segment_is(&path.segments, 0, "tengri") - && nth_segment_is(&path.segments, 1, "view") - && let Some(TokenTree::Literal(name)) = tokens.clone().into_iter().next() - { - expose = Some(name); - return false - } - true - }).map(|x|x.clone()).collect(); - }; - item - }, - expose, - }) - } -} - -impl ToTokens for ViewSym { +impl ToTokens for ViewDef { fn to_tokens (&self, out: &mut TokenStream2) { + let Self(ViewMeta { output }, ViewImpl { block, exposed }) = self; + let ident = &block.self_ty; + let exposed: Vec<_> = exposed.iter().map(|(k,v)|ViewArm(k.clone(), v.clone())).collect(); + for token in quote! { + #block + /// Generated by [tengri_proc]. + impl ::tengri::output::Content<#output> for #ident { + fn content (&self) -> impl Render<#output> { + self.size.of(::tengri::output::View(self, self.config.view)) + } + } + /// Generated by [tengri_proc]. + impl<'a> ::tengri::output::ViewContext<'a, #output> for #ident { + fn get_content_sym (&'a self, value: &Value<'a>) -> Option> { + match value { + #(#exposed)* + _ => panic!("expected Sym(content), got: {value:?}") + } + } + } + } { + out.append(token) + } + } +} + +impl ToTokens for ViewArm { + fn to_tokens (&self, out: &mut TokenStream2) { + let Self(key, value) = self; out.append(Punct::new(':', Joint)); out.append(Punct::new(':', Alone)); out.append(Ident::new("tengri", Span::call_site())); @@ -134,7 +86,7 @@ impl ToTokens for ViewSym { out.append(Ident::new("Sym", Span::call_site())); out.append(Group::new(Delimiter::Parenthesis, { let mut out = TokenStream2::new(); - out.append(self.symbol.clone()); + out.append(LitStr::new(key, Span::call_site()).token()); out })); out.append(Punct::new('=', Joint)); @@ -144,7 +96,7 @@ impl ToTokens for ViewSym { let mut out = TokenStream2::new(); out.append(Ident::new("self", Span::call_site())); out.append(Punct::new('.', Alone)); - out.append(self.name.clone()); + out.append(value.clone()); out.append(Group::new(Delimiter::Parenthesis, TokenStream2::new())); out.append(Punct::new('.', Alone)); out.append(Ident::new("boxed", Span::call_site())); @@ -155,43 +107,42 @@ impl ToTokens for ViewSym { } } -impl ToTokens for ViewItem { - fn to_tokens (&self, out: &mut TokenStream2) { - self.item.to_tokens(out) - } -} - -impl ToTokens for ViewDefinition { - fn to_tokens (&self, out: &mut TokenStream2) { - let Self { - meta: ViewMeta { output }, - data: ViewImpl { target, syms, items }, - } = self; - for token in quote! { - /// Augmented by [tengri_proc]. - impl #target { - #(#items)* - } - /// Generated by [tengri_proc]. - impl ::tengri::output::Content<#output> for #target { - fn content (&self) -> impl Render<#output> { - self.size.of(::tengri::output::View(self, self.config.view)) - } - } - /// Generated by [tengri_proc]. - impl<'a> ::tengri::output::ViewContext<'a, #output> for #target { - fn get_content_sym (&'a self, value: &Value<'a>) -> Option> { - match value { - #(#syms)* - _ => panic!("expected Sym(content), got: {value:?}") - } - } - } - } { - out.append(token) - } - } -} +//impl ToTokens for ViewSym { + //fn to_tokens (&self, out: &mut TokenStream2) { + //out.append(Punct::new(':', Joint)); + //out.append(Punct::new(':', Alone)); + //out.append(Ident::new("tengri", Span::call_site())); + //out.append(Punct::new(':', Joint)); + //out.append(Punct::new(':', Alone)); + //out.append(Ident::new("dsl", Span::call_site())); + //out.append(Punct::new(':', Joint)); + //out.append(Punct::new(':', Alone)); + //out.append(Ident::new("Value", Span::call_site())); + //out.append(Punct::new(':', Joint)); + //out.append(Punct::new(':', Alone)); + //out.append(Ident::new("Sym", Span::call_site())); + //out.append(Group::new(Delimiter::Parenthesis, { + //let mut out = TokenStream2::new(); + //out.append(self.symbol.clone()); + //out + //})); + //out.append(Punct::new('=', Joint)); + //out.append(Punct::new('>', Alone)); + //out.append(Ident::new("Some", Span::call_site())); + //out.append(Group::new(Delimiter::Parenthesis, { + //let mut out = TokenStream2::new(); + //out.append(Ident::new("self", Span::call_site())); + //out.append(Punct::new('.', Alone)); + //out.append(self.name.clone()); + //out.append(Group::new(Delimiter::Parenthesis, TokenStream2::new())); + //out.append(Punct::new('.', Alone)); + //out.append(Ident::new("boxed", Span::call_site())); + //out.append(Group::new(Delimiter::Parenthesis, TokenStream2::new())); + //out + //})); + //out.append(Punct::new(',', Alone)); + //} +//} fn nth_segment_is (segments: &Punctuated, n: usize, x: &str) -> bool { if let Some(PathSegment { arguments: PathArguments::None, ident, .. }) = segments.get(n) { @@ -202,17 +153,17 @@ fn nth_segment_is (segments: &Punctuated, n: usize, x: &st return false } -impl std::cmp::PartialEq for ViewItem { - fn eq (&self, other: &Self) -> bool { - self.item == other.item && (format!("{:?}", self.expose) == format!("{:?}", other.expose)) - } -} +//impl std::cmp::PartialEq for ViewItem { + //fn eq (&self, other: &Self) -> bool { + //self.item == other.item && (format!("{:?}", self.expose) == format!("{:?}", other.expose)) + //} +//} -impl std::cmp::PartialEq for ViewSym { - fn eq (&self, other: &Self) -> bool { - self.name == other.name && (format!("{}", self.symbol) == format!("{}", other.symbol)) - } -} +//impl std::cmp::PartialEq for ViewSym { + //fn eq (&self, other: &Self) -> bool { + //self.name == other.name && (format!("{}", self.symbol) == format!("{}", other.symbol)) + //} +//} #[cfg(test)] #[test] fn test_view_meta () { let x: ViewMeta = pq! { SomeOutput }; @@ -221,6 +172,7 @@ impl std::cmp::PartialEq for ViewSym { } #[cfg(test)] #[test] fn test_view_impl () { + // TODO let x: ViewImpl = pq! { impl Foo { /// docstring1 @@ -232,20 +184,20 @@ impl std::cmp::PartialEq for ViewSym { } }; let expected_target: Ident = pq! { Foo }; - assert_eq!(x.target, expected_target); - assert_eq!(x.items.len(), 2); - assert_eq!(x.items[0].item, pq! { - /// docstring1 - #[bar] fn a_view () {} - }); - assert_eq!(x.items[1].item, pq! { - #[baz] - /// docstring2 - #[baz] fn is_not_view () {} - }); - assert_eq!(x.syms, vec![ - ViewSym { symbol: pq! { ":view1" }, name: pq! { a_view }, }, - ]); + //assert_eq!(x.target, expected_target); + //assert_eq!(x.items.len(), 2); + //assert_eq!(x.items[0].item, pq! { + ///// docstring1 + //#[bar] fn a_view () {} + //}); + //assert_eq!(x.items[1].item, pq! { + //#[baz] + ///// docstring2 + //#[baz] fn is_not_view () {} + //}); + //assert_eq!(x.syms, vec![ + //ViewArm( { symbol: pq! { ":view1" }, name: pq! { a_view }, }, + //]); } #[cfg(test)] #[test] fn test_view_definition () { From 7df7cb839c14c0e010ce36519c75ffacc0e76c18 Mon Sep 17 00:00:00 2001 From: unspeaker Date: Tue, 6 May 2025 21:33:53 +0300 Subject: [PATCH 2/2] wip: proc: command macro --- proc/src/lib.rs | 15 +++- proc/src/proc_command.rs | 144 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 157 insertions(+), 2 deletions(-) create mode 100644 proc/src/proc_command.rs diff --git a/proc/src/lib.rs b/proc/src/lib.rs index 16dd8f0..38c8b15 100644 --- a/proc/src/lib.rs +++ b/proc/src/lib.rs @@ -2,6 +2,7 @@ extern crate proc_macro; pub(crate) use std::collections::{BTreeMap, BTreeSet}; pub(crate) use std::cmp::Ordering; +pub(crate) use std::sync::Arc; pub(crate) use proc_macro::TokenStream; pub(crate) use proc_macro2::{ TokenStream as TokenStream2, TokenTree, @@ -11,16 +12,17 @@ pub(crate) use syn::{ parse, parse_macro_input, parse_quote as pq, braced, bracketed, parenthesized, Token, Arm, Expr, Attribute, Meta, MetaList, Path, PathSegment, PathArguments, - ImplItem, ImplItemFn, LitStr, Type, ItemImpl, ReturnType, Signature, + ImplItem, ImplItemFn, LitStr, Type, ItemImpl, ReturnType, Signature, FnArg, PatType, parse::{Parse, ParseStream, Result}, token::{PathSep, Brace}, punctuated::Punctuated, }; pub(crate) use quote::{quote, TokenStreamExt, ToTokens}; -pub(crate) use heck::AsKebabCase; +pub(crate) use heck::{AsKebabCase, AsUpperCamelCase}; mod proc_view; mod proc_expose; +mod proc_command; #[proc_macro_attribute] pub fn view (meta: TokenStream, item: TokenStream) -> TokenStream { @@ -40,6 +42,15 @@ pub fn expose (meta: TokenStream, item: TokenStream) -> TokenStream { )) } +#[proc_macro_attribute] +pub fn command (meta: TokenStream, item: TokenStream) -> TokenStream { + use self::proc_command::{CommandDef, CommandMeta, CommandImpl}; + write_macro(CommandDef( + parse_macro_input!(meta as CommandMeta), + parse_macro_input!(item as CommandImpl), + )) +} + fn write_macro (t: T) -> TokenStream { let mut out = TokenStream2::new(); t.to_tokens(&mut out); diff --git a/proc/src/proc_command.rs b/proc/src/proc_command.rs new file mode 100644 index 0000000..ac1e2a9 --- /dev/null +++ b/proc/src/proc_command.rs @@ -0,0 +1,144 @@ +use crate::*; + +#[derive(Debug, Clone)] +pub(crate) struct CommandDef(pub(crate) CommandMeta, pub(crate) CommandImpl); + +#[derive(Debug, Clone)] +pub(crate) struct CommandMeta { + target: Ident, +} + +#[derive(Debug, Clone)] +pub(crate) struct CommandImpl(ItemImpl, BTreeMap, CommandArm>); + +#[derive(Debug, Clone)] +struct CommandVariant(Ident, Vec); + +#[derive(Debug, Clone)] +struct CommandArm(Arc, Ident, Vec, ReturnType); + +impl Parse for CommandMeta { + fn parse (input: ParseStream) -> Result { + Ok(Self { + target: input.parse::()?, + }) + } +} + +impl Parse for CommandImpl { + fn parse (input: ParseStream) -> Result { + let block = input.parse::()?; + let mut exposed: BTreeMap, CommandArm> = Default::default(); + for item in block.items.iter() { + if let ImplItem::Fn(ImplItemFn { + sig: Signature { ident, inputs, output, .. }, .. + }) = item { + let key: Arc = + format!("{}", AsKebabCase(format!("{}", &ident))).into(); + let variant: Arc = + format!("{}", AsUpperCamelCase(format!("{}", &ident))).into(); + if exposed.contains_key(&key) { + return Err(input.error(format!("already defined: {ident}"))); + } + exposed.insert(key, CommandArm( + variant, + ident.clone(), + inputs.iter().map(|x|x.clone()).collect(), + output.clone(), + )); + } + } + Ok(Self(block, exposed)) + } +} + +impl ToTokens for CommandDef { + fn to_tokens (&self, out: &mut TokenStream2) { + let Self(CommandMeta { target }, CommandImpl(block, exposed)) = self; + let enumeration = &block.self_ty; + let definitions = exposed.values().map(|x|CommandVariant( + x.1.clone(), + x.2.clone(), + )); + let implementations = exposed.values().map(|x|CommandArm( + x.0.clone(), + x.1.clone(), + x.2.clone(), + x.3.clone(), + )); + for token in quote! { + #block + enum #enumeration { + #(#definitions)* + } + impl Command<#target> for #enumeration { + fn execute (self, state: &mut #target) -> Perhaps { + match self { + #(#implementations)* + } + } + } + } { + out.append(token) + } + } +} + +impl ToTokens for CommandVariant { + fn to_tokens (&self, out: &mut TokenStream2) { + let Self(ident, args) = self; + out.append(LitStr::new(&format!("{}", ident), Span::call_site()) + .token()); + out.append(Group::new(Delimiter::Parenthesis, { + let mut out = TokenStream2::new(); + for arg in args.iter() { + if let FnArg::Typed(PatType { attrs, pat, colon_token, ty }) = arg { + out.append(LitStr::new( + &format!("{}", quote! { #ty }), + Span::call_site() + ).token()); + out.append(Punct::new(',', Alone)); + } + } + out + })); + out.append(Punct::new(',', Alone)); + } +} + +impl ToTokens for CommandArm { + fn to_tokens (&self, out: &mut TokenStream2) { + let Self(symbol, ident, args, returnType) = self; + out.append(Punct::new(':', Joint)); + out.append(Punct::new(':', Alone)); + out.append(Ident::new("tengri", Span::call_site())); + out.append(Punct::new(':', Joint)); + out.append(Punct::new(':', Alone)); + out.append(Ident::new("dsl", Span::call_site())); + out.append(Punct::new(':', Joint)); + out.append(Punct::new(':', Alone)); + out.append(Ident::new("Value", Span::call_site())); + out.append(Punct::new(':', Joint)); + out.append(Punct::new(':', Alone)); + out.append(Ident::new("Sym", Span::call_site())); + out.append(Group::new(Delimiter::Parenthesis, { + let mut out = TokenStream2::new(); + for arg in args.iter() { + out.append(LitStr::new(&symbol, Span::call_site()).token()); + } + out + })); + out.append(Punct::new('=', Joint)); + out.append(Punct::new('>', Alone)); + out.append(LitStr::new(&format!("{}", ident), Span::call_site()).token()); + out.append(Group::new(Delimiter::Parenthesis, { + let mut out = TokenStream2::new(); + for arg in args.iter() { + // TODO + //out.append(LitStr::new(&symbol, Span::call_site()).token()); + } + out + })); + out.append(Punct::new(',', Alone)); + } +}