Compare commits

..

2 commits

Author SHA1 Message Date
7df7cb839c wip: proc: command macro
Some checks are pending
/ build (push) Waiting to run
2025-05-06 23:23:03 +03:00
7570aefcc2 proc: simplify expose macro 2025-05-06 21:33:46 +03:00
5 changed files with 272 additions and 95 deletions

1
Cargo.lock generated
View file

@ -976,6 +976,7 @@ dependencies = [
name = "tengri_proc" name = "tengri_proc"
version = "0.13.0" version = "0.13.0"
dependencies = [ dependencies = [
"heck",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn", "syn",

View file

@ -2,6 +2,7 @@ extern crate proc_macro;
pub(crate) use std::collections::{BTreeMap, BTreeSet}; pub(crate) use std::collections::{BTreeMap, BTreeSet};
pub(crate) use std::cmp::Ordering; pub(crate) use std::cmp::Ordering;
pub(crate) use std::sync::Arc;
pub(crate) use proc_macro::TokenStream; pub(crate) use proc_macro::TokenStream;
pub(crate) use proc_macro2::{ pub(crate) use proc_macro2::{
TokenStream as TokenStream2, TokenTree, TokenStream as TokenStream2, TokenTree,
@ -10,39 +11,48 @@ pub(crate) use proc_macro2::{
pub(crate) use syn::{ pub(crate) use syn::{
parse, parse_macro_input, parse_quote as pq, parse, parse_macro_input, parse_quote as pq,
braced, bracketed, parenthesized, Token, braced, bracketed, parenthesized, Token,
Arm, Expr, Attribute, Meta, MetaList, Path, PathSegment, PathArguments, ImplItem, LitStr, Type, Arm, Expr, Attribute, Meta, MetaList, Path, PathSegment, PathArguments,
ImplItem, ImplItemFn, LitStr, Type, ItemImpl, ReturnType, Signature, FnArg, PatType,
parse::{Parse, ParseStream, Result}, parse::{Parse, ParseStream, Result},
token::{PathSep, Brace}, token::{PathSep, Brace},
punctuated::Punctuated, punctuated::Punctuated,
}; };
pub(crate) use quote::{quote, TokenStreamExt, ToTokens}; pub(crate) use quote::{quote, TokenStreamExt, ToTokens};
pub(crate) use heck::{AsKebabCase, AsUpperCamelCase};
mod proc_view; mod proc_view;
mod proc_expose; mod proc_expose;
mod proc_command;
#[proc_macro_attribute] #[proc_macro_attribute]
pub fn view (meta: TokenStream, item: TokenStream) -> TokenStream { pub fn view (meta: TokenStream, item: TokenStream) -> TokenStream {
use self::proc_view::{ViewDefinition, ViewMeta, ViewImpl}; use self::proc_view::{ViewDef, ViewMeta, ViewImpl};
write_macro(ViewDefinition( write_macro(ViewDef(
parse_macro_input!(meta as ViewMeta), parse_macro_input!(meta as ViewMeta),
parse_macro_input!(data as ViewImpl), parse_macro_input!(item as ViewImpl),
)) ))
} }
#[proc_macro_attribute] #[proc_macro_attribute]
pub fn expose (meta: TokenStream, item: TokenStream) -> TokenStream { pub fn expose (meta: TokenStream, item: TokenStream) -> TokenStream {
use self::proc_view::{ExposeDefinition, ExposeMeta, ExposeImpl}; use self::proc_expose::{ExposeDef, ExposeMeta, ExposeImpl};
write_macro(ExposeDefinition( write_macro(ExposeDef(
parse_macro_input!(meta as ExposeMeta), parse_macro_input!(meta as ExposeMeta),
parse_macro_input!(data as ExposeImpl), parse_macro_input!(item as ExposeImpl),
))
}
#[proc_macro_attribute]
pub fn command (meta: TokenStream, item: TokenStream) -> TokenStream {
use self::proc_command::{CommandDef, CommandMeta, CommandImpl};
write_macro(CommandDef(
parse_macro_input!(meta as CommandMeta),
parse_macro_input!(item as CommandImpl),
)) ))
} }
fn write_macro <T: ToTokens> (t: T) -> TokenStream { fn write_macro <T: ToTokens> (t: T) -> TokenStream {
let mut out = TokenStream2::new(); let mut out = TokenStream2::new();
self::proc_expose::ExposeDefinition( t.to_tokens(&mut out);
parse_macro_input!(meta as ExposeMeta),
parse_macro_input!(data as ExposeImpl),
).to_tokens(&mut out);
out.into() out.into()
} }

144
proc/src/proc_command.rs Normal file
View file

@ -0,0 +1,144 @@
use crate::*;
#[derive(Debug, Clone)]
pub(crate) struct CommandDef(pub(crate) CommandMeta, pub(crate) CommandImpl);
#[derive(Debug, Clone)]
pub(crate) struct CommandMeta {
target: Ident,
}
#[derive(Debug, Clone)]
pub(crate) struct CommandImpl(ItemImpl, BTreeMap<Arc<str>, CommandArm>);
#[derive(Debug, Clone)]
struct CommandVariant(Ident, Vec<FnArg>);
#[derive(Debug, Clone)]
struct CommandArm(Arc<str>, Ident, Vec<FnArg>, ReturnType);
impl Parse for CommandMeta {
fn parse (input: ParseStream) -> Result<Self> {
Ok(Self {
target: input.parse::<Ident>()?,
})
}
}
impl Parse for CommandImpl {
fn parse (input: ParseStream) -> Result<Self> {
let block = input.parse::<ItemImpl>()?;
let mut exposed: BTreeMap<Arc<str>, CommandArm> = Default::default();
for item in block.items.iter() {
if let ImplItem::Fn(ImplItemFn {
sig: Signature { ident, inputs, output, .. }, ..
}) = item {
let key: Arc<str> =
format!("{}", AsKebabCase(format!("{}", &ident))).into();
let variant: Arc<str> =
format!("{}", AsUpperCamelCase(format!("{}", &ident))).into();
if exposed.contains_key(&key) {
return Err(input.error(format!("already defined: {ident}")));
}
exposed.insert(key, CommandArm(
variant,
ident.clone(),
inputs.iter().map(|x|x.clone()).collect(),
output.clone(),
));
}
}
Ok(Self(block, exposed))
}
}
impl ToTokens for CommandDef {
fn to_tokens (&self, out: &mut TokenStream2) {
let Self(CommandMeta { target }, CommandImpl(block, exposed)) = self;
let enumeration = &block.self_ty;
let definitions = exposed.values().map(|x|CommandVariant(
x.1.clone(),
x.2.clone(),
));
let implementations = exposed.values().map(|x|CommandArm(
x.0.clone(),
x.1.clone(),
x.2.clone(),
x.3.clone(),
));
for token in quote! {
#block
enum #enumeration {
#(#definitions)*
}
impl Command<#target> for #enumeration {
fn execute (self, state: &mut #target) -> Perhaps<Self> {
match self {
#(#implementations)*
}
}
}
} {
out.append(token)
}
}
}
impl ToTokens for CommandVariant {
fn to_tokens (&self, out: &mut TokenStream2) {
let Self(ident, args) = self;
out.append(LitStr::new(&format!("{}", ident), Span::call_site())
.token());
out.append(Group::new(Delimiter::Parenthesis, {
let mut out = TokenStream2::new();
for arg in args.iter() {
if let FnArg::Typed(PatType { attrs, pat, colon_token, ty }) = arg {
out.append(LitStr::new(
&format!("{}", quote! { #ty }),
Span::call_site()
).token());
out.append(Punct::new(',', Alone));
}
}
out
}));
out.append(Punct::new(',', Alone));
}
}
impl ToTokens for CommandArm {
fn to_tokens (&self, out: &mut TokenStream2) {
let Self(symbol, ident, args, returnType) = self;
out.append(Punct::new(':', Joint));
out.append(Punct::new(':', Alone));
out.append(Ident::new("tengri", Span::call_site()));
out.append(Punct::new(':', Joint));
out.append(Punct::new(':', Alone));
out.append(Ident::new("dsl", Span::call_site()));
out.append(Punct::new(':', Joint));
out.append(Punct::new(':', Alone));
out.append(Ident::new("Value", Span::call_site()));
out.append(Punct::new(':', Joint));
out.append(Punct::new(':', Alone));
out.append(Ident::new("Sym", Span::call_site()));
out.append(Group::new(Delimiter::Parenthesis, {
let mut out = TokenStream2::new();
for arg in args.iter() {
out.append(LitStr::new(&symbol, Span::call_site()).token());
}
out
}));
out.append(Punct::new('=', Joint));
out.append(Punct::new('>', Alone));
out.append(LitStr::new(&format!("{}", ident), Span::call_site()).token());
out.append(Group::new(Delimiter::Parenthesis, {
let mut out = TokenStream2::new();
for arg in args.iter() {
// TODO
//out.append(LitStr::new(&symbol, Span::call_site()).token());
}
out
}));
out.append(Punct::new(',', Alone));
}
}

View file

@ -1,8 +1,7 @@
use crate::*; use crate::*;
use syn::parse::discouraged::Speculative;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub(crate) struct ExposeDefinition(pub(crate) ExposeMeta, pub(crate) ExposeImpl); pub(crate) struct ExposeDef(pub(crate) ExposeMeta, pub(crate) ExposeImpl);
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub(crate) struct ExposeMeta; pub(crate) struct ExposeMeta;
@ -10,14 +9,11 @@ pub(crate) struct ExposeMeta;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub(crate) struct ExposeImpl { pub(crate) struct ExposeImpl {
block: ItemImpl, block: ItemImpl,
exposed: BTreeMap<ExposeType, BTreeMap<ExposeSym, ExposeArm>>, exposed: BTreeMap<ExposeType, BTreeMap<String, Ident>>,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct ExposeArm { struct ExposeArm(String, Ident);
key: ExposeSym,
value: Expr
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct ExposeSym(LitStr); struct ExposeSym(LitStr);
@ -34,17 +30,17 @@ impl Parse for ExposeMeta {
impl Parse for ExposeImpl { impl Parse for ExposeImpl {
fn parse (input: ParseStream) -> Result<Self> { fn parse (input: ParseStream) -> Result<Self> {
let block = input.parse::<ItemImpl>()?; let block = input.parse::<ItemImpl>()?;
let mut exposed: BTreeMap<ExposeType, BTreeMap<ExposeSym, ExposeArm>> = Default::default(); let mut exposed: BTreeMap<ExposeType, BTreeMap<String, Ident>> = Default::default();
for item in block.items.iter() { for item in block.items.iter() {
if let ImplItem::Fn(ImplItemFn { sig: Signature { ident, output, .. }, .. }) = item { if let ImplItem::Fn(ImplItemFn { sig: Signature { ident, output, .. }, .. }) = item {
if let ReturnType::Type(_, return_type) = output { if let ReturnType::Type(_, return_type) = output {
let return_type = ExposeType(return_type.clone()); let return_type = ExposeType(return_type.clone());
if !exposed.contains_key(return_type) { if !exposed.contains_key(&return_type) {
exposed.insert(return_type.clone(), Default::default()) exposed.insert(return_type.clone(), Default::default());
} }
let values = exposed.get_mut(&return_type).unwrap(); let values = exposed.get_mut(&return_type).unwrap();
let key = format!(":{}", AsKebabCase(&ident)); let key = format!(":{}", AsKebabCase(format!("{}", &ident)));
if values.contains_key(key) { if values.contains_key(&key) {
return Err(input.error(format!("already defined: {key}"))) return Err(input.error(format!("already defined: {key}")))
} }
values.insert(key, ident.clone()); values.insert(key, ident.clone());
@ -57,9 +53,9 @@ impl Parse for ExposeImpl {
} }
} }
impl ToTokens for ExposeDefinition { impl ToTokens for ExposeDef {
fn to_tokens (&self, out: &mut TokenStream2) { fn to_tokens (&self, out: &mut TokenStream2) {
let Self { meta, data } = self; let Self(meta, data) = self;
for token in quote! { #data } { for token in quote! { #data } {
out.append(token) out.append(token)
} }
@ -68,11 +64,12 @@ impl ToTokens for ExposeDefinition {
impl ToTokens for ExposeImpl { impl ToTokens for ExposeImpl {
fn to_tokens (&self, out: &mut TokenStream2) { fn to_tokens (&self, out: &mut TokenStream2) {
let Self { target, items, types } = self; let Self { block, exposed } = self;
for token in quote! { impl #target { #(#items)* } } { let target = &self.block.self_ty;
for token in quote! { #block } {
out.append(token); out.append(token);
} }
for (t, variants) in types.iter() { for (t, variants) in exposed.iter() {
let predefined = match format!("{}", quote! { #t }).as_str() { let predefined = match format!("{}", quote! { #t }).as_str() {
"bool" => vec![ "bool" => vec![
quote! { ::tengri::dsl::Value::Sym(":true") => true }, quote! { ::tengri::dsl::Value::Sym(":true") => true },
@ -105,7 +102,7 @@ impl ToTokens for ExposeImpl {
impl ToTokens for ExposeArm { impl ToTokens for ExposeArm {
fn to_tokens (&self, out: &mut TokenStream2) { fn to_tokens (&self, out: &mut TokenStream2) {
let Self { key, value } = self; let Self(key, value) = self;
out.append(Punct::new(':', Joint)); out.append(Punct::new(':', Joint));
out.append(Punct::new(':', Alone)); out.append(Punct::new(':', Alone));
out.append(Ident::new("tengri", Span::call_site())); out.append(Ident::new("tengri", Span::call_site()));
@ -120,7 +117,7 @@ impl ToTokens for ExposeArm {
out.append(Ident::new("Sym", Span::call_site())); out.append(Ident::new("Sym", Span::call_site()));
out.append(Group::new(Delimiter::Parenthesis, { out.append(Group::new(Delimiter::Parenthesis, {
let mut out = TokenStream2::new(); let mut out = TokenStream2::new();
out.append(key.0.token()); out.append(LitStr::new(&key, Span::call_site()).token());
out out
})); }));
out.append(Punct::new('=', Joint)); out.append(Punct::new('=', Joint));
@ -165,7 +162,7 @@ impl Eq for ExposeSym {}
impl From<Type> for ExposeType { impl From<Type> for ExposeType {
fn from (this: Type) -> Self { fn from (this: Type) -> Self {
Self(this) Self(Box::new(this))
} }
} }

View file

@ -1,7 +1,7 @@
use crate::*; use crate::*;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub(crate) struct ViewDefinition(pub(crate) ViewMeta, pub(crate) ViewImpl); pub(crate) struct ViewDef(pub(crate) ViewMeta, pub(crate) ViewImpl);
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub(crate) struct ViewMeta { pub(crate) struct ViewMeta {
@ -11,9 +11,11 @@ pub(crate) struct ViewMeta {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub(crate) struct ViewImpl { pub(crate) struct ViewImpl {
block: ItemImpl, block: ItemImpl,
exposed: BTreeSet<Ident>, exposed: BTreeMap<String, Ident>,
} }
struct ViewArm(String, Ident);
impl Parse for ViewMeta { impl Parse for ViewMeta {
fn parse (input: ParseStream) -> Result<Self> { fn parse (input: ParseStream) -> Result<Self> {
Ok(Self { Ok(Self {
@ -24,55 +26,39 @@ impl Parse for ViewMeta {
impl Parse for ViewImpl { impl Parse for ViewImpl {
fn parse (input: ParseStream) -> Result<Self> { fn parse (input: ParseStream) -> Result<Self> {
let _ = input.parse::<Token![impl]>()?; let block = input.parse::<ItemImpl>()?;
let mut syms = vec![]; let mut exposed: BTreeMap<String, Ident> = Default::default();
Ok(Self { for item in block.items.iter() {
target: input.parse::<Ident>()?, if let ImplItem::Fn(ImplItemFn { sig: Signature { ident, .. }, .. }) = item {
items: { let key = format!(":{}", AsKebabCase(format!("{}", &ident)));
let group; if exposed.contains_key(&key) {
let brace = braced!(group in input); return Err(input.error(format!("already defined: {ident}")));
let mut items = vec![]; }
while !group.is_empty() { exposed.insert(key, ident.clone());
let item = group.parse::<ViewItem>()?;
if let Some(expose) = &item.expose {
if let ImplItem::Fn(ref item) = item.item {
let symbol = expose.clone();
let name = item.sig.ident.clone();
syms.push(ViewSym { symbol, name })
} else {
return Err(
input.error("only fn items can be exposed to #[tengri::view]")
)
} }
} }
items.push(item); Ok(Self { block, exposed })
}
items
},
syms,
})
} }
} }
impl ToTokens for ViewDefinition { impl ToTokens for ViewDef {
fn to_tokens (&self, out: &mut TokenStream2) { fn to_tokens (&self, out: &mut TokenStream2) {
let Self(ViewMeta { output }, ViewImpl { target, syms, items }) = self; let Self(ViewMeta { output }, ViewImpl { block, exposed }) = self;
let ident = &block.self_ty;
let exposed: Vec<_> = exposed.iter().map(|(k,v)|ViewArm(k.clone(), v.clone())).collect();
for token in quote! { for token in quote! {
/// Augmented by [tengri_proc]. #block
impl #target {
#(#items)*
}
/// Generated by [tengri_proc]. /// Generated by [tengri_proc].
impl ::tengri::output::Content<#output> for #target { impl ::tengri::output::Content<#output> for #ident {
fn content (&self) -> impl Render<#output> { fn content (&self) -> impl Render<#output> {
self.size.of(::tengri::output::View(self, self.config.view)) self.size.of(::tengri::output::View(self, self.config.view))
} }
} }
/// Generated by [tengri_proc]. /// Generated by [tengri_proc].
impl<'a> ::tengri::output::ViewContext<'a, #output> for #target { impl<'a> ::tengri::output::ViewContext<'a, #output> for #ident {
fn get_content_sym (&'a self, value: &Value<'a>) -> Option<RenderBox<'a, #output>> { fn get_content_sym (&'a self, value: &Value<'a>) -> Option<RenderBox<'a, #output>> {
match value { match value {
#(#syms)* #(#exposed)*
_ => panic!("expected Sym(content), got: {value:?}") _ => panic!("expected Sym(content), got: {value:?}")
} }
} }
@ -83,8 +69,9 @@ impl ToTokens for ViewDefinition {
} }
} }
impl ToTokens for ViewSym { impl ToTokens for ViewArm {
fn to_tokens (&self, out: &mut TokenStream2) { fn to_tokens (&self, out: &mut TokenStream2) {
let Self(key, value) = self;
out.append(Punct::new(':', Joint)); out.append(Punct::new(':', Joint));
out.append(Punct::new(':', Alone)); out.append(Punct::new(':', Alone));
out.append(Ident::new("tengri", Span::call_site())); out.append(Ident::new("tengri", Span::call_site()));
@ -99,7 +86,7 @@ impl ToTokens for ViewSym {
out.append(Ident::new("Sym", Span::call_site())); out.append(Ident::new("Sym", Span::call_site()));
out.append(Group::new(Delimiter::Parenthesis, { out.append(Group::new(Delimiter::Parenthesis, {
let mut out = TokenStream2::new(); let mut out = TokenStream2::new();
out.append(self.symbol.clone()); out.append(LitStr::new(key, Span::call_site()).token());
out out
})); }));
out.append(Punct::new('=', Joint)); out.append(Punct::new('=', Joint));
@ -109,7 +96,7 @@ impl ToTokens for ViewSym {
let mut out = TokenStream2::new(); let mut out = TokenStream2::new();
out.append(Ident::new("self", Span::call_site())); out.append(Ident::new("self", Span::call_site()));
out.append(Punct::new('.', Alone)); out.append(Punct::new('.', Alone));
out.append(self.name.clone()); out.append(value.clone());
out.append(Group::new(Delimiter::Parenthesis, TokenStream2::new())); out.append(Group::new(Delimiter::Parenthesis, TokenStream2::new()));
out.append(Punct::new('.', Alone)); out.append(Punct::new('.', Alone));
out.append(Ident::new("boxed", Span::call_site())); out.append(Ident::new("boxed", Span::call_site()));
@ -120,6 +107,43 @@ impl ToTokens for ViewSym {
} }
} }
//impl ToTokens for ViewSym {
//fn to_tokens (&self, out: &mut TokenStream2) {
//out.append(Punct::new(':', Joint));
//out.append(Punct::new(':', Alone));
//out.append(Ident::new("tengri", Span::call_site()));
//out.append(Punct::new(':', Joint));
//out.append(Punct::new(':', Alone));
//out.append(Ident::new("dsl", Span::call_site()));
//out.append(Punct::new(':', Joint));
//out.append(Punct::new(':', Alone));
//out.append(Ident::new("Value", Span::call_site()));
//out.append(Punct::new(':', Joint));
//out.append(Punct::new(':', Alone));
//out.append(Ident::new("Sym", Span::call_site()));
//out.append(Group::new(Delimiter::Parenthesis, {
//let mut out = TokenStream2::new();
//out.append(self.symbol.clone());
//out
//}));
//out.append(Punct::new('=', Joint));
//out.append(Punct::new('>', Alone));
//out.append(Ident::new("Some", Span::call_site()));
//out.append(Group::new(Delimiter::Parenthesis, {
//let mut out = TokenStream2::new();
//out.append(Ident::new("self", Span::call_site()));
//out.append(Punct::new('.', Alone));
//out.append(self.name.clone());
//out.append(Group::new(Delimiter::Parenthesis, TokenStream2::new()));
//out.append(Punct::new('.', Alone));
//out.append(Ident::new("boxed", Span::call_site()));
//out.append(Group::new(Delimiter::Parenthesis, TokenStream2::new()));
//out
//}));
//out.append(Punct::new(',', Alone));
//}
//}
fn nth_segment_is (segments: &Punctuated<PathSegment, PathSep>, n: usize, x: &str) -> bool { fn nth_segment_is (segments: &Punctuated<PathSegment, PathSep>, n: usize, x: &str) -> bool {
if let Some(PathSegment { arguments: PathArguments::None, ident, .. }) = segments.get(n) { if let Some(PathSegment { arguments: PathArguments::None, ident, .. }) = segments.get(n) {
if format!("{ident}") == x { if format!("{ident}") == x {
@ -129,17 +153,17 @@ fn nth_segment_is (segments: &Punctuated<PathSegment, PathSep>, n: usize, x: &st
return false return false
} }
impl std::cmp::PartialEq for ViewItem { //impl std::cmp::PartialEq for ViewItem {
fn eq (&self, other: &Self) -> bool { //fn eq (&self, other: &Self) -> bool {
self.item == other.item && (format!("{:?}", self.expose) == format!("{:?}", other.expose)) //self.item == other.item && (format!("{:?}", self.expose) == format!("{:?}", other.expose))
} //}
} //}
impl std::cmp::PartialEq for ViewSym { //impl std::cmp::PartialEq for ViewSym {
fn eq (&self, other: &Self) -> bool { //fn eq (&self, other: &Self) -> bool {
self.name == other.name && (format!("{}", self.symbol) == format!("{}", other.symbol)) //self.name == other.name && (format!("{}", self.symbol) == format!("{}", other.symbol))
} //}
} //}
#[cfg(test)] #[test] fn test_view_meta () { #[cfg(test)] #[test] fn test_view_meta () {
let x: ViewMeta = pq! { SomeOutput }; let x: ViewMeta = pq! { SomeOutput };
@ -148,6 +172,7 @@ impl std::cmp::PartialEq for ViewSym {
} }
#[cfg(test)] #[test] fn test_view_impl () { #[cfg(test)] #[test] fn test_view_impl () {
// TODO
let x: ViewImpl = pq! { let x: ViewImpl = pq! {
impl Foo { impl Foo {
/// docstring1 /// docstring1
@ -159,20 +184,20 @@ impl std::cmp::PartialEq for ViewSym {
} }
}; };
let expected_target: Ident = pq! { Foo }; let expected_target: Ident = pq! { Foo };
assert_eq!(x.target, expected_target); //assert_eq!(x.target, expected_target);
assert_eq!(x.items.len(), 2); //assert_eq!(x.items.len(), 2);
assert_eq!(x.items[0].item, pq! { //assert_eq!(x.items[0].item, pq! {
/// docstring1 ///// docstring1
#[bar] fn a_view () {} //#[bar] fn a_view () {}
}); //});
assert_eq!(x.items[1].item, pq! { //assert_eq!(x.items[1].item, pq! {
#[baz] //#[baz]
/// docstring2 ///// docstring2
#[baz] fn is_not_view () {} //#[baz] fn is_not_view () {}
}); //});
assert_eq!(x.syms, vec![ //assert_eq!(x.syms, vec![
ViewSym { symbol: pq! { ":view1" }, name: pq! { a_view }, }, //ViewArm( { symbol: pq! { ":view1" }, name: pq! { a_view }, },
]); //]);
} }
#[cfg(test)] #[test] fn test_view_definition () { #[cfg(test)] #[test] fn test_view_definition () {