mirror of
https://codeberg.org/unspeaker/tengri.git
synced 2025-12-06 19:56:44 +01:00
Compare commits
2 commits
b827a5c640
...
7df7cb839c
| Author | SHA1 | Date | |
|---|---|---|---|
| 7df7cb839c | |||
| 7570aefcc2 |
5 changed files with 272 additions and 95 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
|
@ -976,6 +976,7 @@ dependencies = [
|
|||
name = "tengri_proc"
|
||||
version = "0.13.0"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ extern crate proc_macro;
|
|||
|
||||
pub(crate) use std::collections::{BTreeMap, BTreeSet};
|
||||
pub(crate) use std::cmp::Ordering;
|
||||
pub(crate) use std::sync::Arc;
|
||||
pub(crate) use proc_macro::TokenStream;
|
||||
pub(crate) use proc_macro2::{
|
||||
TokenStream as TokenStream2, TokenTree,
|
||||
|
|
@ -10,39 +11,48 @@ pub(crate) use proc_macro2::{
|
|||
pub(crate) use syn::{
|
||||
parse, parse_macro_input, parse_quote as pq,
|
||||
braced, bracketed, parenthesized, Token,
|
||||
Arm, Expr, Attribute, Meta, MetaList, Path, PathSegment, PathArguments, ImplItem, LitStr, Type,
|
||||
Arm, Expr, Attribute, Meta, MetaList, Path, PathSegment, PathArguments,
|
||||
ImplItem, ImplItemFn, LitStr, Type, ItemImpl, ReturnType, Signature, FnArg, PatType,
|
||||
parse::{Parse, ParseStream, Result},
|
||||
token::{PathSep, Brace},
|
||||
punctuated::Punctuated,
|
||||
};
|
||||
pub(crate) use quote::{quote, TokenStreamExt, ToTokens};
|
||||
pub(crate) use heck::{AsKebabCase, AsUpperCamelCase};
|
||||
|
||||
mod proc_view;
|
||||
mod proc_expose;
|
||||
mod proc_command;
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn view (meta: TokenStream, item: TokenStream) -> TokenStream {
|
||||
use self::proc_view::{ViewDefinition, ViewMeta, ViewImpl};
|
||||
write_macro(ViewDefinition(
|
||||
use self::proc_view::{ViewDef, ViewMeta, ViewImpl};
|
||||
write_macro(ViewDef(
|
||||
parse_macro_input!(meta as ViewMeta),
|
||||
parse_macro_input!(data as ViewImpl),
|
||||
parse_macro_input!(item as ViewImpl),
|
||||
))
|
||||
}
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn expose (meta: TokenStream, item: TokenStream) -> TokenStream {
|
||||
use self::proc_view::{ExposeDefinition, ExposeMeta, ExposeImpl};
|
||||
write_macro(ExposeDefinition(
|
||||
use self::proc_expose::{ExposeDef, ExposeMeta, ExposeImpl};
|
||||
write_macro(ExposeDef(
|
||||
parse_macro_input!(meta as ExposeMeta),
|
||||
parse_macro_input!(data as ExposeImpl),
|
||||
parse_macro_input!(item as ExposeImpl),
|
||||
))
|
||||
}
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn command (meta: TokenStream, item: TokenStream) -> TokenStream {
|
||||
use self::proc_command::{CommandDef, CommandMeta, CommandImpl};
|
||||
write_macro(CommandDef(
|
||||
parse_macro_input!(meta as CommandMeta),
|
||||
parse_macro_input!(item as CommandImpl),
|
||||
))
|
||||
}
|
||||
|
||||
fn write_macro <T: ToTokens> (t: T) -> TokenStream {
|
||||
let mut out = TokenStream2::new();
|
||||
self::proc_expose::ExposeDefinition(
|
||||
parse_macro_input!(meta as ExposeMeta),
|
||||
parse_macro_input!(data as ExposeImpl),
|
||||
).to_tokens(&mut out);
|
||||
t.to_tokens(&mut out);
|
||||
out.into()
|
||||
}
|
||||
|
|
|
|||
144
proc/src/proc_command.rs
Normal file
144
proc/src/proc_command.rs
Normal file
|
|
@ -0,0 +1,144 @@
|
|||
use crate::*;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct CommandDef(pub(crate) CommandMeta, pub(crate) CommandImpl);
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct CommandMeta {
|
||||
target: Ident,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct CommandImpl(ItemImpl, BTreeMap<Arc<str>, CommandArm>);
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct CommandVariant(Ident, Vec<FnArg>);
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct CommandArm(Arc<str>, Ident, Vec<FnArg>, ReturnType);
|
||||
|
||||
impl Parse for CommandMeta {
|
||||
fn parse (input: ParseStream) -> Result<Self> {
|
||||
Ok(Self {
|
||||
target: input.parse::<Ident>()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for CommandImpl {
|
||||
fn parse (input: ParseStream) -> Result<Self> {
|
||||
let block = input.parse::<ItemImpl>()?;
|
||||
let mut exposed: BTreeMap<Arc<str>, CommandArm> = Default::default();
|
||||
for item in block.items.iter() {
|
||||
if let ImplItem::Fn(ImplItemFn {
|
||||
sig: Signature { ident, inputs, output, .. }, ..
|
||||
}) = item {
|
||||
let key: Arc<str> =
|
||||
format!("{}", AsKebabCase(format!("{}", &ident))).into();
|
||||
let variant: Arc<str> =
|
||||
format!("{}", AsUpperCamelCase(format!("{}", &ident))).into();
|
||||
if exposed.contains_key(&key) {
|
||||
return Err(input.error(format!("already defined: {ident}")));
|
||||
}
|
||||
exposed.insert(key, CommandArm(
|
||||
variant,
|
||||
ident.clone(),
|
||||
inputs.iter().map(|x|x.clone()).collect(),
|
||||
output.clone(),
|
||||
));
|
||||
}
|
||||
}
|
||||
Ok(Self(block, exposed))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for CommandDef {
|
||||
fn to_tokens (&self, out: &mut TokenStream2) {
|
||||
let Self(CommandMeta { target }, CommandImpl(block, exposed)) = self;
|
||||
let enumeration = &block.self_ty;
|
||||
let definitions = exposed.values().map(|x|CommandVariant(
|
||||
x.1.clone(),
|
||||
x.2.clone(),
|
||||
));
|
||||
let implementations = exposed.values().map(|x|CommandArm(
|
||||
x.0.clone(),
|
||||
x.1.clone(),
|
||||
x.2.clone(),
|
||||
x.3.clone(),
|
||||
));
|
||||
for token in quote! {
|
||||
#block
|
||||
enum #enumeration {
|
||||
#(#definitions)*
|
||||
}
|
||||
impl Command<#target> for #enumeration {
|
||||
fn execute (self, state: &mut #target) -> Perhaps<Self> {
|
||||
match self {
|
||||
#(#implementations)*
|
||||
}
|
||||
}
|
||||
}
|
||||
} {
|
||||
out.append(token)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for CommandVariant {
|
||||
fn to_tokens (&self, out: &mut TokenStream2) {
|
||||
let Self(ident, args) = self;
|
||||
out.append(LitStr::new(&format!("{}", ident), Span::call_site())
|
||||
.token());
|
||||
out.append(Group::new(Delimiter::Parenthesis, {
|
||||
let mut out = TokenStream2::new();
|
||||
for arg in args.iter() {
|
||||
if let FnArg::Typed(PatType { attrs, pat, colon_token, ty }) = arg {
|
||||
out.append(LitStr::new(
|
||||
&format!("{}", quote! { #ty }),
|
||||
Span::call_site()
|
||||
).token());
|
||||
out.append(Punct::new(',', Alone));
|
||||
}
|
||||
}
|
||||
out
|
||||
}));
|
||||
out.append(Punct::new(',', Alone));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for CommandArm {
|
||||
fn to_tokens (&self, out: &mut TokenStream2) {
|
||||
let Self(symbol, ident, args, returnType) = self;
|
||||
out.append(Punct::new(':', Joint));
|
||||
out.append(Punct::new(':', Alone));
|
||||
out.append(Ident::new("tengri", Span::call_site()));
|
||||
out.append(Punct::new(':', Joint));
|
||||
out.append(Punct::new(':', Alone));
|
||||
out.append(Ident::new("dsl", Span::call_site()));
|
||||
out.append(Punct::new(':', Joint));
|
||||
out.append(Punct::new(':', Alone));
|
||||
out.append(Ident::new("Value", Span::call_site()));
|
||||
out.append(Punct::new(':', Joint));
|
||||
out.append(Punct::new(':', Alone));
|
||||
out.append(Ident::new("Sym", Span::call_site()));
|
||||
out.append(Group::new(Delimiter::Parenthesis, {
|
||||
let mut out = TokenStream2::new();
|
||||
for arg in args.iter() {
|
||||
out.append(LitStr::new(&symbol, Span::call_site()).token());
|
||||
}
|
||||
out
|
||||
}));
|
||||
out.append(Punct::new('=', Joint));
|
||||
out.append(Punct::new('>', Alone));
|
||||
out.append(LitStr::new(&format!("{}", ident), Span::call_site()).token());
|
||||
out.append(Group::new(Delimiter::Parenthesis, {
|
||||
let mut out = TokenStream2::new();
|
||||
for arg in args.iter() {
|
||||
// TODO
|
||||
//out.append(LitStr::new(&symbol, Span::call_site()).token());
|
||||
}
|
||||
out
|
||||
}));
|
||||
out.append(Punct::new(',', Alone));
|
||||
}
|
||||
}
|
||||
|
|
@ -1,8 +1,7 @@
|
|||
use crate::*;
|
||||
use syn::parse::discouraged::Speculative;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ExposeDefinition(pub(crate) ExposeMeta, pub(crate) ExposeImpl);
|
||||
pub(crate) struct ExposeDef(pub(crate) ExposeMeta, pub(crate) ExposeImpl);
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ExposeMeta;
|
||||
|
|
@ -10,14 +9,11 @@ pub(crate) struct ExposeMeta;
|
|||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ExposeImpl {
|
||||
block: ItemImpl,
|
||||
exposed: BTreeMap<ExposeType, BTreeMap<ExposeSym, ExposeArm>>,
|
||||
exposed: BTreeMap<ExposeType, BTreeMap<String, Ident>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct ExposeArm {
|
||||
key: ExposeSym,
|
||||
value: Expr
|
||||
}
|
||||
struct ExposeArm(String, Ident);
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct ExposeSym(LitStr);
|
||||
|
|
@ -34,17 +30,17 @@ impl Parse for ExposeMeta {
|
|||
impl Parse for ExposeImpl {
|
||||
fn parse (input: ParseStream) -> Result<Self> {
|
||||
let block = input.parse::<ItemImpl>()?;
|
||||
let mut exposed: BTreeMap<ExposeType, BTreeMap<ExposeSym, ExposeArm>> = Default::default();
|
||||
let mut exposed: BTreeMap<ExposeType, BTreeMap<String, Ident>> = Default::default();
|
||||
for item in block.items.iter() {
|
||||
if let ImplItem::Fn(ImplItemFn { sig: Signature { ident, output, .. }, .. }) = item {
|
||||
if let ReturnType::Type(_, return_type) = output {
|
||||
let return_type = ExposeType(return_type.clone());
|
||||
if !exposed.contains_key(return_type) {
|
||||
exposed.insert(return_type.clone(), Default::default())
|
||||
if !exposed.contains_key(&return_type) {
|
||||
exposed.insert(return_type.clone(), Default::default());
|
||||
}
|
||||
let values = exposed.get_mut(&return_type).unwrap();
|
||||
let key = format!(":{}", AsKebabCase(&ident));
|
||||
if values.contains_key(key) {
|
||||
let key = format!(":{}", AsKebabCase(format!("{}", &ident)));
|
||||
if values.contains_key(&key) {
|
||||
return Err(input.error(format!("already defined: {key}")))
|
||||
}
|
||||
values.insert(key, ident.clone());
|
||||
|
|
@ -57,9 +53,9 @@ impl Parse for ExposeImpl {
|
|||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ExposeDefinition {
|
||||
impl ToTokens for ExposeDef {
|
||||
fn to_tokens (&self, out: &mut TokenStream2) {
|
||||
let Self { meta, data } = self;
|
||||
let Self(meta, data) = self;
|
||||
for token in quote! { #data } {
|
||||
out.append(token)
|
||||
}
|
||||
|
|
@ -68,11 +64,12 @@ impl ToTokens for ExposeDefinition {
|
|||
|
||||
impl ToTokens for ExposeImpl {
|
||||
fn to_tokens (&self, out: &mut TokenStream2) {
|
||||
let Self { target, items, types } = self;
|
||||
for token in quote! { impl #target { #(#items)* } } {
|
||||
let Self { block, exposed } = self;
|
||||
let target = &self.block.self_ty;
|
||||
for token in quote! { #block } {
|
||||
out.append(token);
|
||||
}
|
||||
for (t, variants) in types.iter() {
|
||||
for (t, variants) in exposed.iter() {
|
||||
let predefined = match format!("{}", quote! { #t }).as_str() {
|
||||
"bool" => vec![
|
||||
quote! { ::tengri::dsl::Value::Sym(":true") => true },
|
||||
|
|
@ -105,7 +102,7 @@ impl ToTokens for ExposeImpl {
|
|||
|
||||
impl ToTokens for ExposeArm {
|
||||
fn to_tokens (&self, out: &mut TokenStream2) {
|
||||
let Self { key, value } = self;
|
||||
let Self(key, value) = self;
|
||||
out.append(Punct::new(':', Joint));
|
||||
out.append(Punct::new(':', Alone));
|
||||
out.append(Ident::new("tengri", Span::call_site()));
|
||||
|
|
@ -120,7 +117,7 @@ impl ToTokens for ExposeArm {
|
|||
out.append(Ident::new("Sym", Span::call_site()));
|
||||
out.append(Group::new(Delimiter::Parenthesis, {
|
||||
let mut out = TokenStream2::new();
|
||||
out.append(key.0.token());
|
||||
out.append(LitStr::new(&key, Span::call_site()).token());
|
||||
out
|
||||
}));
|
||||
out.append(Punct::new('=', Joint));
|
||||
|
|
@ -165,7 +162,7 @@ impl Eq for ExposeSym {}
|
|||
|
||||
impl From<Type> for ExposeType {
|
||||
fn from (this: Type) -> Self {
|
||||
Self(this)
|
||||
Self(Box::new(this))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
use crate::*;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ViewDefinition(pub(crate) ViewMeta, pub(crate) ViewImpl);
|
||||
pub(crate) struct ViewDef(pub(crate) ViewMeta, pub(crate) ViewImpl);
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ViewMeta {
|
||||
|
|
@ -11,9 +11,11 @@ pub(crate) struct ViewMeta {
|
|||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ViewImpl {
|
||||
block: ItemImpl,
|
||||
exposed: BTreeSet<Ident>,
|
||||
exposed: BTreeMap<String, Ident>,
|
||||
}
|
||||
|
||||
struct ViewArm(String, Ident);
|
||||
|
||||
impl Parse for ViewMeta {
|
||||
fn parse (input: ParseStream) -> Result<Self> {
|
||||
Ok(Self {
|
||||
|
|
@ -24,55 +26,39 @@ impl Parse for ViewMeta {
|
|||
|
||||
impl Parse for ViewImpl {
|
||||
fn parse (input: ParseStream) -> Result<Self> {
|
||||
let _ = input.parse::<Token![impl]>()?;
|
||||
let mut syms = vec![];
|
||||
Ok(Self {
|
||||
target: input.parse::<Ident>()?,
|
||||
items: {
|
||||
let group;
|
||||
let brace = braced!(group in input);
|
||||
let mut items = vec![];
|
||||
while !group.is_empty() {
|
||||
let item = group.parse::<ViewItem>()?;
|
||||
if let Some(expose) = &item.expose {
|
||||
if let ImplItem::Fn(ref item) = item.item {
|
||||
let symbol = expose.clone();
|
||||
let name = item.sig.ident.clone();
|
||||
syms.push(ViewSym { symbol, name })
|
||||
} else {
|
||||
return Err(
|
||||
input.error("only fn items can be exposed to #[tengri::view]")
|
||||
)
|
||||
}
|
||||
}
|
||||
items.push(item);
|
||||
let block = input.parse::<ItemImpl>()?;
|
||||
let mut exposed: BTreeMap<String, Ident> = Default::default();
|
||||
for item in block.items.iter() {
|
||||
if let ImplItem::Fn(ImplItemFn { sig: Signature { ident, .. }, .. }) = item {
|
||||
let key = format!(":{}", AsKebabCase(format!("{}", &ident)));
|
||||
if exposed.contains_key(&key) {
|
||||
return Err(input.error(format!("already defined: {ident}")));
|
||||
}
|
||||
items
|
||||
},
|
||||
syms,
|
||||
})
|
||||
exposed.insert(key, ident.clone());
|
||||
}
|
||||
}
|
||||
Ok(Self { block, exposed })
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ViewDefinition {
|
||||
impl ToTokens for ViewDef {
|
||||
fn to_tokens (&self, out: &mut TokenStream2) {
|
||||
let Self(ViewMeta { output }, ViewImpl { target, syms, items }) = self;
|
||||
let Self(ViewMeta { output }, ViewImpl { block, exposed }) = self;
|
||||
let ident = &block.self_ty;
|
||||
let exposed: Vec<_> = exposed.iter().map(|(k,v)|ViewArm(k.clone(), v.clone())).collect();
|
||||
for token in quote! {
|
||||
/// Augmented by [tengri_proc].
|
||||
impl #target {
|
||||
#(#items)*
|
||||
}
|
||||
#block
|
||||
/// Generated by [tengri_proc].
|
||||
impl ::tengri::output::Content<#output> for #target {
|
||||
impl ::tengri::output::Content<#output> for #ident {
|
||||
fn content (&self) -> impl Render<#output> {
|
||||
self.size.of(::tengri::output::View(self, self.config.view))
|
||||
}
|
||||
}
|
||||
/// Generated by [tengri_proc].
|
||||
impl<'a> ::tengri::output::ViewContext<'a, #output> for #target {
|
||||
impl<'a> ::tengri::output::ViewContext<'a, #output> for #ident {
|
||||
fn get_content_sym (&'a self, value: &Value<'a>) -> Option<RenderBox<'a, #output>> {
|
||||
match value {
|
||||
#(#syms)*
|
||||
#(#exposed)*
|
||||
_ => panic!("expected Sym(content), got: {value:?}")
|
||||
}
|
||||
}
|
||||
|
|
@ -83,8 +69,9 @@ impl ToTokens for ViewDefinition {
|
|||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ViewSym {
|
||||
impl ToTokens for ViewArm {
|
||||
fn to_tokens (&self, out: &mut TokenStream2) {
|
||||
let Self(key, value) = self;
|
||||
out.append(Punct::new(':', Joint));
|
||||
out.append(Punct::new(':', Alone));
|
||||
out.append(Ident::new("tengri", Span::call_site()));
|
||||
|
|
@ -99,7 +86,7 @@ impl ToTokens for ViewSym {
|
|||
out.append(Ident::new("Sym", Span::call_site()));
|
||||
out.append(Group::new(Delimiter::Parenthesis, {
|
||||
let mut out = TokenStream2::new();
|
||||
out.append(self.symbol.clone());
|
||||
out.append(LitStr::new(key, Span::call_site()).token());
|
||||
out
|
||||
}));
|
||||
out.append(Punct::new('=', Joint));
|
||||
|
|
@ -109,7 +96,7 @@ impl ToTokens for ViewSym {
|
|||
let mut out = TokenStream2::new();
|
||||
out.append(Ident::new("self", Span::call_site()));
|
||||
out.append(Punct::new('.', Alone));
|
||||
out.append(self.name.clone());
|
||||
out.append(value.clone());
|
||||
out.append(Group::new(Delimiter::Parenthesis, TokenStream2::new()));
|
||||
out.append(Punct::new('.', Alone));
|
||||
out.append(Ident::new("boxed", Span::call_site()));
|
||||
|
|
@ -120,6 +107,43 @@ impl ToTokens for ViewSym {
|
|||
}
|
||||
}
|
||||
|
||||
//impl ToTokens for ViewSym {
|
||||
//fn to_tokens (&self, out: &mut TokenStream2) {
|
||||
//out.append(Punct::new(':', Joint));
|
||||
//out.append(Punct::new(':', Alone));
|
||||
//out.append(Ident::new("tengri", Span::call_site()));
|
||||
//out.append(Punct::new(':', Joint));
|
||||
//out.append(Punct::new(':', Alone));
|
||||
//out.append(Ident::new("dsl", Span::call_site()));
|
||||
//out.append(Punct::new(':', Joint));
|
||||
//out.append(Punct::new(':', Alone));
|
||||
//out.append(Ident::new("Value", Span::call_site()));
|
||||
//out.append(Punct::new(':', Joint));
|
||||
//out.append(Punct::new(':', Alone));
|
||||
//out.append(Ident::new("Sym", Span::call_site()));
|
||||
//out.append(Group::new(Delimiter::Parenthesis, {
|
||||
//let mut out = TokenStream2::new();
|
||||
//out.append(self.symbol.clone());
|
||||
//out
|
||||
//}));
|
||||
//out.append(Punct::new('=', Joint));
|
||||
//out.append(Punct::new('>', Alone));
|
||||
//out.append(Ident::new("Some", Span::call_site()));
|
||||
//out.append(Group::new(Delimiter::Parenthesis, {
|
||||
//let mut out = TokenStream2::new();
|
||||
//out.append(Ident::new("self", Span::call_site()));
|
||||
//out.append(Punct::new('.', Alone));
|
||||
//out.append(self.name.clone());
|
||||
//out.append(Group::new(Delimiter::Parenthesis, TokenStream2::new()));
|
||||
//out.append(Punct::new('.', Alone));
|
||||
//out.append(Ident::new("boxed", Span::call_site()));
|
||||
//out.append(Group::new(Delimiter::Parenthesis, TokenStream2::new()));
|
||||
//out
|
||||
//}));
|
||||
//out.append(Punct::new(',', Alone));
|
||||
//}
|
||||
//}
|
||||
|
||||
fn nth_segment_is (segments: &Punctuated<PathSegment, PathSep>, n: usize, x: &str) -> bool {
|
||||
if let Some(PathSegment { arguments: PathArguments::None, ident, .. }) = segments.get(n) {
|
||||
if format!("{ident}") == x {
|
||||
|
|
@ -129,17 +153,17 @@ fn nth_segment_is (segments: &Punctuated<PathSegment, PathSep>, n: usize, x: &st
|
|||
return false
|
||||
}
|
||||
|
||||
impl std::cmp::PartialEq for ViewItem {
|
||||
fn eq (&self, other: &Self) -> bool {
|
||||
self.item == other.item && (format!("{:?}", self.expose) == format!("{:?}", other.expose))
|
||||
}
|
||||
}
|
||||
//impl std::cmp::PartialEq for ViewItem {
|
||||
//fn eq (&self, other: &Self) -> bool {
|
||||
//self.item == other.item && (format!("{:?}", self.expose) == format!("{:?}", other.expose))
|
||||
//}
|
||||
//}
|
||||
|
||||
impl std::cmp::PartialEq for ViewSym {
|
||||
fn eq (&self, other: &Self) -> bool {
|
||||
self.name == other.name && (format!("{}", self.symbol) == format!("{}", other.symbol))
|
||||
}
|
||||
}
|
||||
//impl std::cmp::PartialEq for ViewSym {
|
||||
//fn eq (&self, other: &Self) -> bool {
|
||||
//self.name == other.name && (format!("{}", self.symbol) == format!("{}", other.symbol))
|
||||
//}
|
||||
//}
|
||||
|
||||
#[cfg(test)] #[test] fn test_view_meta () {
|
||||
let x: ViewMeta = pq! { SomeOutput };
|
||||
|
|
@ -148,6 +172,7 @@ impl std::cmp::PartialEq for ViewSym {
|
|||
}
|
||||
|
||||
#[cfg(test)] #[test] fn test_view_impl () {
|
||||
// TODO
|
||||
let x: ViewImpl = pq! {
|
||||
impl Foo {
|
||||
/// docstring1
|
||||
|
|
@ -159,20 +184,20 @@ impl std::cmp::PartialEq for ViewSym {
|
|||
}
|
||||
};
|
||||
let expected_target: Ident = pq! { Foo };
|
||||
assert_eq!(x.target, expected_target);
|
||||
assert_eq!(x.items.len(), 2);
|
||||
assert_eq!(x.items[0].item, pq! {
|
||||
/// docstring1
|
||||
#[bar] fn a_view () {}
|
||||
});
|
||||
assert_eq!(x.items[1].item, pq! {
|
||||
#[baz]
|
||||
/// docstring2
|
||||
#[baz] fn is_not_view () {}
|
||||
});
|
||||
assert_eq!(x.syms, vec![
|
||||
ViewSym { symbol: pq! { ":view1" }, name: pq! { a_view }, },
|
||||
]);
|
||||
//assert_eq!(x.target, expected_target);
|
||||
//assert_eq!(x.items.len(), 2);
|
||||
//assert_eq!(x.items[0].item, pq! {
|
||||
///// docstring1
|
||||
//#[bar] fn a_view () {}
|
||||
//});
|
||||
//assert_eq!(x.items[1].item, pq! {
|
||||
//#[baz]
|
||||
///// docstring2
|
||||
//#[baz] fn is_not_view () {}
|
||||
//});
|
||||
//assert_eq!(x.syms, vec![
|
||||
//ViewArm( { symbol: pq! { ":view1" }, name: pq! { a_view }, },
|
||||
//]);
|
||||
}
|
||||
|
||||
#[cfg(test)] #[test] fn test_view_definition () {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue