From ab07fd2b4303e1c91325aa3f5bc0014ec5a82961 Mon Sep 17 00:00:00 2001 From: unspeaker Date: Fri, 9 May 2025 19:45:25 +0300 Subject: [PATCH] dsl: compact --- dsl/src/dsl.rs | 324 ++++++++++++++++++++++++++++++++++++ dsl/src/dsl_context.rs | 40 ----- dsl/src/dsl_error.rs | 15 -- dsl/src/dsl_iter.rs | 157 ----------------- dsl/src/dsl_macros.rs | 46 ----- dsl/src/dsl_token.rs | 120 ------------- dsl/src/lib.rs | 39 ++++- output/src/ops/transform.rs | 6 +- 8 files changed, 363 insertions(+), 384 deletions(-) create mode 100644 dsl/src/dsl.rs delete mode 100644 dsl/src/dsl_context.rs delete mode 100644 dsl/src/dsl_error.rs delete mode 100644 dsl/src/dsl_iter.rs delete mode 100644 dsl/src/dsl_macros.rs delete mode 100644 dsl/src/dsl_token.rs diff --git a/dsl/src/dsl.rs b/dsl/src/dsl.rs new file mode 100644 index 0000000..b3d9448 --- /dev/null +++ b/dsl/src/dsl.rs @@ -0,0 +1,324 @@ +use crate::*; +use thiserror::Error; + +pub type ParseResult = Result; + +#[derive(Error, Debug, Copy, Clone, PartialEq)] pub enum ParseError { + #[error("parse failed: not implemented")] + Unimplemented, + #[error("parse failed: empty")] + Empty, + #[error("parse failed: incomplete")] + Incomplete, + #[error("parse failed: unexpected character '{0}'")] + Unexpected(char), + #[error("parse failed: error #{0}")] + Code(u8), +} + +pub trait TryFromDsl<'state, T>: Sized { + fn try_from_expr <'source: 'state> ( + _state: &'state T, _iter: TokenIter<'source> + ) -> Option { + None + } + fn try_from_atom <'source: 'state> ( + state: &'state T, value: Value<'source> + ) -> Option { + if let Exp(0, iter) = value { + return Self::try_from_expr(state, iter.clone()) + } + None + } +} + +pub trait TryIntoDsl: Sized { + fn try_into_atom (&self) -> Option; +} + +/// Map EDN tokens to parameters of a given type for a given context +pub trait Context<'state, U>: Sized { + fn get <'source> (&'state self, _iter: &mut TokenIter<'source>) -> Option { + None + } +} + +impl<'state, T: Context<'state, U>, U> Context<'state, U> for &T { + fn get <'source> (&'state self, iter: &mut TokenIter<'source>) -> Option { + (*self).get(iter) + } +} + +impl<'state, T: Context<'state, U>, U> Context<'state, U> for Option { + fn get <'source> (&'state self, iter: &mut TokenIter<'source>) -> Option { + self.as_ref().map(|s|s.get(iter)).flatten() + } +} + +/// Implement the const iterator pattern. +#[macro_export] macro_rules! const_iter { + ($(<$l:lifetime>)?|$self:ident: $Struct:ty| => $Item:ty => $expr:expr) => { + impl$(<$l>)? Iterator for $Struct { + type Item = $Item; + fn next (&mut $self) -> Option<$Item> { $expr } + } + impl$(<$l>)? ConstIntoIter for $Struct { + type Kind = IsIteratorKind; + type Item = $Item; + type IntoIter = Self; + } + } +} + +/// Provides a native [Iterator] API over the [ConstIntoIter] [SourceIter] +/// [TokenIter::next] returns just the [Token] and mutates `self`, +/// instead of returning an updated version of the struct as [SourceIter::next] does. +#[derive(Copy, Clone, Debug, Default, PartialEq)] +pub struct TokenIter<'a>( + pub SourceIter<'a> +); + +impl<'a> TokenIter<'a> { + pub const fn new (source: &'a str) -> Self { + Self(SourceIter::new(source)) + } + pub const fn peek (&self) -> Option> { + self.0.peek() + } +} + +impl<'a> Iterator for TokenIter<'a> { + type Item = Token<'a>; + fn next (&mut self) -> Option> { + self.0.next().map(|(item, rest)|{self.0 = rest; item}) + } +} + +impl<'a> From<&'a str> for TokenIter<'a> { + fn from (source: &'a str) -> Self{ + Self(SourceIter(source)) + } +} + +impl<'a> From> for TokenIter<'a> { + fn from (source: SourceIter<'a>) -> Self{ + Self(source) + } +} + +/// Owns a reference to the source text. +/// [SourceIter::next] emits subsequent pairs of: +/// * a [Token] and +/// * the source text remaining +/// * [ ] TODO: maybe [SourceIter::next] should wrap the remaining source in `Self` ? +#[derive(Copy, Clone, Debug, Default, PartialEq)] +pub struct SourceIter<'a>(pub &'a str); + +const_iter!(<'a>|self: SourceIter<'a>| => Token<'a> => self.next_mut().map(|(result, _)|result)); + +impl<'a> From<&'a str> for SourceIter<'a> { + fn from (source: &'a str) -> Self{ + Self::new(source) + } +} + +impl<'a> SourceIter<'a> { + pub const fn new (source: &'a str) -> Self { + Self(source) + } + pub const fn chomp (&self, index: usize) -> Self { + Self(split_at(self.0, index).1) + } + pub const fn next (mut self) -> Option<(Token<'a>, Self)> { + Self::next_mut(&mut self) + } + pub const fn peek (&self) -> Option> { + peek_src(self.0) + } + pub const fn next_mut (&mut self) -> Option<(Token<'a>, Self)> { + match self.peek() { + Some(token) => Some((token, self.chomp(token.end()))), + None => None + } + } +} +/// Static iteration helper. +#[macro_export] macro_rules! iterate { + ($expr:expr => $arg: pat => $body:expr) => { + let mut iter = $expr; + while let Some(($arg, next)) = iter.next() { + $body; + iter = next; + } + } +} + +pub const fn peek_src <'a> (source: &'a str) -> Option> { + let mut token: Token<'a> = Token::new(source, 0, 0, Nil); + iterate!(char_indices(source) => (start, c) => token = match token.value() { + Err(_) => return Some(token), + Nil => match c { + ' '|'\n'|'\r'|'\t' => + token.grow(), + '(' => + Token::new(source, start, 1, Exp(1, TokenIter::new(str_range(source, start, start + 1)))), + '"' => + Token::new(source, start, 1, Str(str_range(source, start, start + 1))), + ':'|'@' => + Token::new(source, start, 1, Sym(str_range(source, start, start + 1))), + '/'|'a'..='z' => + Token::new(source, start, 1, Key(str_range(source, start, start + 1))), + '0'..='9' => + Token::new(source, start, 1, match to_digit(c) { + Ok(c) => Value::Num(c), + Result::Err(e) => Value::Err(e) + }), + _ => token.error(Unexpected(c)) + }, + Str(_) => match c { + '"' => return Some(token), + _ => token.grow_str(), + }, + Num(n) => match c { + '0'..='9' => token.grow_num(n, c), + ' '|'\n'|'\r'|'\t'|')' => return Some(token), + _ => token.error(Unexpected(c)) + }, + Sym(_) => match c { + 'a'..='z'|'A'..='Z'|'0'..='9'|'-' => token.grow_sym(), + ' '|'\n'|'\r'|'\t'|')' => return Some(token), + _ => token.error(Unexpected(c)) + }, + Key(_) => match c { + 'a'..='z'|'0'..='9'|'-'|'/' => token.grow_key(), + ' '|'\n'|'\r'|'\t'|')' => return Some(token), + _ => token.error(Unexpected(c)) + }, + Exp(depth, _) => match depth { + 0 => return Some(token.grow_exp()), + _ => match c { + ')' => token.grow_out(), + '(' => token.grow_in(), + _ => token.grow_exp(), + } + }, + }); + match token.value() { + Nil => None, + _ => Some(token), + } +} + +pub const fn to_number (digits: &str) -> Result { + let mut value = 0; + iterate!(char_indices(digits) => (_, c) => match to_digit(c) { + Ok(digit) => value = 10 * value + digit, + Result::Err(e) => return Result::Err(e) + }); + Ok(value) +} + +pub const fn to_digit (c: char) -> Result { + Ok(match c { + '0' => 0, '1' => 1, '2' => 2, '3' => 3, '4' => 4, + '5' => 5, '6' => 6, '7' => 7, '8' => 8, '9' => 9, + _ => return Result::Err(Unexpected(c)) + }) +} + +#[derive(Debug, Copy, Clone, Default, PartialEq)] pub struct Token<'source> { + pub source: &'source str, + pub start: usize, + pub length: usize, + pub value: Value<'source>, +} + +#[derive(Debug, Copy, Clone, Default, PartialEq)] pub enum Value<'source> { + #[default] Nil, + Err(ParseError), + Num(usize), + Sym(&'source str), + Key(&'source str), + Str(&'source str), + Exp(usize, TokenIter<'source>), +} + +impl<'source> Token<'source> { + pub const fn new (source: &'source str, start: usize, length: usize, value: Value<'source>) -> Self { + Self { source, start, length, value } + } + pub const fn end (&self) -> usize { + self.start.saturating_add(self.length) + } + pub const fn slice (&'source self) -> &'source str { + self.slice_source(self.source) + //str_range(self.source, self.start, self.end()) + } + pub const fn slice_source <'range> (&'source self, source: &'range str) -> &'range str { + str_range(source, self.start, self.end()) + } + pub const fn slice_source_exp <'range> (&'source self, source: &'range str) -> &'range str { + str_range(source, self.start.saturating_add(1), self.end()) + } + pub const fn value (&self) -> Value { + self.value + } + pub const fn error (self, error: ParseError) -> Self { + Self { value: Value::Err(error), ..self } + } + pub const fn grow (self) -> Self { + Self { length: self.length.saturating_add(1), ..self } + } + pub const fn grow_num (self, m: usize, c: char) -> Self { + match to_digit(c) { + Ok(n) => Self { value: Num(10*m+n), ..self.grow() }, + Result::Err(e) => Self { value: Err(e), ..self.grow() }, + } + } + pub const fn grow_key (self) -> Self { + let mut token = self.grow(); + token.value = Key(token.slice_source(self.source)); + token + } + pub const fn grow_sym (self) -> Self { + let mut token = self.grow(); + token.value = Sym(token.slice_source(self.source)); + token + } + pub const fn grow_str (self) -> Self { + let mut token = self.grow(); + token.value = Str(token.slice_source(self.source)); + token + } + pub const fn grow_exp (self) -> Self { + let mut token = self.grow(); + if let Exp(depth, _) = token.value { + token.value = Exp(depth, TokenIter::new(token.slice_source_exp(self.source))); + } else { + unreachable!() + } + token + } + pub const fn grow_in (self) -> Self { + let mut token = self.grow_exp(); + if let Value::Exp(depth, source) = token.value { + token.value = Value::Exp(depth.saturating_add(1), source) + } else { + unreachable!() + } + token + } + pub const fn grow_out (self) -> Self { + let mut token = self.grow_exp(); + if let Value::Exp(depth, source) = token.value { + if depth > 0 { + token.value = Value::Exp(depth - 1, source) + } else { + return self.error(Unexpected(')')) + } + } else { + unreachable!() + } + token + } +} diff --git a/dsl/src/dsl_context.rs b/dsl/src/dsl_context.rs deleted file mode 100644 index 4acbf63..0000000 --- a/dsl/src/dsl_context.rs +++ /dev/null @@ -1,40 +0,0 @@ -use crate::*; - -pub trait TryFromDsl<'state, T>: Sized { - fn try_from_expr <'source: 'state> ( - _state: &'state T, _iter: TokenIter<'source> - ) -> Option { - None - } - fn try_from_atom <'source: 'state> ( - state: &'state T, value: Value<'source> - ) -> Option { - if let Exp(0, iter) = value { - return Self::try_from_expr(state, iter.clone()) - } - None - } -} - -pub trait TryIntoDsl: Sized { - fn try_into_atom (&self) -> Option; -} - -/// Map EDN tokens to parameters of a given type for a given context -pub trait Context<'state, U>: Sized { - fn get <'source> (&'state self, _iter: &mut TokenIter<'source>) -> Option { - None - } -} - -impl<'state, T: Context<'state, U>, U> Context<'state, U> for &T { - fn get <'source> (&'state self, iter: &mut TokenIter<'source>) -> Option { - (*self).get(iter) - } -} - -impl<'state, T: Context<'state, U>, U> Context<'state, U> for Option { - fn get <'source> (&'state self, iter: &mut TokenIter<'source>) -> Option { - self.as_ref().map(|s|s.get(iter)).flatten() - } -} diff --git a/dsl/src/dsl_error.rs b/dsl/src/dsl_error.rs deleted file mode 100644 index 40b687d..0000000 --- a/dsl/src/dsl_error.rs +++ /dev/null @@ -1,15 +0,0 @@ -use crate::*; -use thiserror::Error; -pub type ParseResult = Result; -#[derive(Error, Debug, Copy, Clone, PartialEq)] pub enum ParseError { - #[error("parse failed: not implemented")] - Unimplemented, - #[error("parse failed: empty")] - Empty, - #[error("parse failed: incomplete")] - Incomplete, - #[error("parse failed: unexpected character '{0}'")] - Unexpected(char), - #[error("parse failed: error #{0}")] - Code(u8), -} diff --git a/dsl/src/dsl_iter.rs b/dsl/src/dsl_iter.rs deleted file mode 100644 index acdbe4b..0000000 --- a/dsl/src/dsl_iter.rs +++ /dev/null @@ -1,157 +0,0 @@ -//! The token iterator [TokenIter] allows you to get the -//! general-purpose syntactic [Token]s represented by the source text. -//! -//! Both iterators are `peek`able: -//! -//! ``` -//! let src = include_str!("../test.edn"); -//! let mut view = tengri_dsl::TokenIter::new(src); -//! assert_eq!(view.0.0, src); -//! assert_eq!(view.peek(), view.0.peek()) -//! ``` -use crate::*; - -/// Provides a native [Iterator] API over the [ConstIntoIter] [SourceIter] -/// [TokenIter::next] returns just the [Token] and mutates `self`, -/// instead of returning an updated version of the struct as [SourceIter::next] does. -#[derive(Copy, Clone, Debug, Default, PartialEq)] pub struct TokenIter<'a>( - pub SourceIter<'a> -); - -impl<'a> TokenIter<'a> { - pub const fn new (source: &'a str) -> Self { - Self(SourceIter::new(source)) - } - pub const fn peek (&self) -> Option> { - self.0.peek() - } -} - -impl<'a> Iterator for TokenIter<'a> { - type Item = Token<'a>; - fn next (&mut self) -> Option> { - self.0.next().map(|(item, rest)|{self.0 = rest; item}) - } -} - -impl<'a> From<&'a str> for TokenIter<'a> { - fn from (source: &'a str) -> Self{ - Self(SourceIter(source)) - } -} - -impl<'a> From> for TokenIter<'a> { - fn from (source: SourceIter<'a>) -> Self{ - Self(source) - } -} - -/// Owns a reference to the source text. -/// [SourceIter::next] emits subsequent pairs of: -/// * a [Token] and -/// * the source text remaining -/// * [ ] TODO: maybe [SourceIter::next] should wrap the remaining source in `Self` ? -#[derive(Copy, Clone, Debug, Default, PartialEq)] -pub struct SourceIter<'a>(pub &'a str); - -const_iter!(<'a>|self: SourceIter<'a>| => Token<'a> => self.next_mut().map(|(result, _)|result)); - -impl<'a> From<&'a str> for SourceIter<'a> { - fn from (source: &'a str) -> Self{ - Self::new(source) - } -} - -impl<'a> SourceIter<'a> { - pub const fn new (source: &'a str) -> Self { - Self(source) - } - pub const fn chomp (&self, index: usize) -> Self { - Self(split_at(self.0, index).1) - } - pub const fn next (mut self) -> Option<(Token<'a>, Self)> { - Self::next_mut(&mut self) - } - pub const fn peek (&self) -> Option> { - peek_src(self.0) - } - pub const fn next_mut (&mut self) -> Option<(Token<'a>, Self)> { - match self.peek() { - Some(token) => Some((token, self.chomp(token.end()))), - None => None - } - } -} - -pub const fn peek_src <'a> (source: &'a str) -> Option> { - let mut token: Token<'a> = Token::new(source, 0, 0, Nil); - iterate!(char_indices(source) => (start, c) => token = match token.value() { - Err(_) => return Some(token), - Nil => match c { - ' '|'\n'|'\r'|'\t' => - token.grow(), - '(' => - Token::new(source, start, 1, Exp(1, TokenIter::new(str_range(source, start, start + 1)))), - '"' => - Token::new(source, start, 1, Str(str_range(source, start, start + 1))), - ':'|'@' => - Token::new(source, start, 1, Sym(str_range(source, start, start + 1))), - '/'|'a'..='z' => - Token::new(source, start, 1, Key(str_range(source, start, start + 1))), - '0'..='9' => - Token::new(source, start, 1, match to_digit(c) { - Ok(c) => Value::Num(c), - Result::Err(e) => Value::Err(e) - }), - _ => token.error(Unexpected(c)) - }, - Str(_) => match c { - '"' => return Some(token), - _ => token.grow_str(), - }, - Num(n) => match c { - '0'..='9' => token.grow_num(n, c), - ' '|'\n'|'\r'|'\t'|')' => return Some(token), - _ => token.error(Unexpected(c)) - }, - Sym(_) => match c { - 'a'..='z'|'A'..='Z'|'0'..='9'|'-' => token.grow_sym(), - ' '|'\n'|'\r'|'\t'|')' => return Some(token), - _ => token.error(Unexpected(c)) - }, - Key(_) => match c { - 'a'..='z'|'0'..='9'|'-'|'/' => token.grow_key(), - ' '|'\n'|'\r'|'\t'|')' => return Some(token), - _ => token.error(Unexpected(c)) - }, - Exp(depth, _) => match depth { - 0 => return Some(token.grow_exp()), - _ => match c { - ')' => token.grow_out(), - '(' => token.grow_in(), - _ => token.grow_exp(), - } - }, - }); - match token.value() { - Nil => None, - _ => Some(token), - } -} - -pub const fn to_number (digits: &str) -> Result { - let mut value = 0; - iterate!(char_indices(digits) => (_, c) => match to_digit(c) { - Ok(digit) => value = 10 * value + digit, - Result::Err(e) => return Result::Err(e) - }); - Ok(value) -} - -pub const fn to_digit (c: char) -> Result { - Ok(match c { - '0' => 0, '1' => 1, '2' => 2, '3' => 3, '4' => 4, - '5' => 5, '6' => 6, '7' => 7, '8' => 8, '9' => 9, - _ => return Result::Err(Unexpected(c)) - }) -} diff --git a/dsl/src/dsl_macros.rs b/dsl/src/dsl_macros.rs deleted file mode 100644 index 93b2cea..0000000 --- a/dsl/src/dsl_macros.rs +++ /dev/null @@ -1,46 +0,0 @@ -/// Static iteration helper. -#[macro_export] macro_rules! iterate { - ($expr:expr => $arg: pat => $body:expr) => { - let mut iter = $expr; - while let Some(($arg, next)) = iter.next() { - $body; - iter = next; - } - } -} - -/// Implement the const iterator pattern. -#[macro_export] macro_rules! const_iter { - ($(<$l:lifetime>)?|$self:ident: $Struct:ty| => $Item:ty => $expr:expr) => { - impl$(<$l>)? Iterator for $Struct { - type Item = $Item; - fn next (&mut $self) -> Option<$Item> { $expr } - } - impl$(<$l>)? ConstIntoIter for $Struct { - type Kind = IsIteratorKind; - type Item = $Item; - type IntoIter = Self; - } - } -} - -#[macro_export] macro_rules! get_value { - ($state:expr => $token:expr) => { - if let Some(value) = $state.get(&$token.value) { - value - } else { - panic!("no value corresponding to {:?}", &$token.value); - } - } -} - -#[macro_export] macro_rules! get_content { - ($state:expr => $token:expr) => { - if let Some(content) = $state.get_content(&$token.value) { - content - } else { - panic!("no content corresponding to {:?}", &$token.value); - } - } -} - diff --git a/dsl/src/dsl_token.rs b/dsl/src/dsl_token.rs deleted file mode 100644 index e26ee93..0000000 --- a/dsl/src/dsl_token.rs +++ /dev/null @@ -1,120 +0,0 @@ -//! [Token]s are parsed substrings with an associated [Value]. -//! -//! * [ ] FIXME: Value may be [Err] which may shadow [Result::Err] -//! * [Value::Exp] wraps an expression depth and a [SourceIter] -//! with the remaining part of the expression. -//! * expression depth other that 0 mean unclosed parenthesis. -//! * closing and unopened parenthesis panics during reading. -//! * [ ] TODO: signed depth might be interesting -//! * [Value::Sym] and [Value::Key] are stringish literals -//! with slightly different parsing rules. -//! * [Value::Num] is an unsigned integer literal. -//!``` -//! use tengri_dsl::{*, Value::*}; -//! let source = include_str!("../test.edn"); -//! let mut view = TokenIter::new(source); -//! assert_eq!(view.peek(), Some(Token { -//! source, -//! start: 0, -//! length: source.len(), -//! value: Exp(0, TokenIter::new(&source[1..])) -//! })); -//!``` -use crate::*; - -#[derive(Debug, Copy, Clone, Default, PartialEq)] pub struct Token<'source> { - pub source: &'source str, - pub start: usize, - pub length: usize, - pub value: Value<'source>, -} - -#[derive(Debug, Copy, Clone, Default, PartialEq)] pub enum Value<'source> { - #[default] Nil, - Err(ParseError), - Num(usize), - Sym(&'source str), - Key(&'source str), - Str(&'source str), - Exp(usize, TokenIter<'source>), -} - -impl<'source> Token<'source> { - pub const fn new (source: &'source str, start: usize, length: usize, value: Value<'source>) -> Self { - Self { source, start, length, value } - } - pub const fn end (&self) -> usize { - self.start.saturating_add(self.length) - } - pub const fn slice (&'source self) -> &'source str { - self.slice_source(self.source) - //str_range(self.source, self.start, self.end()) - } - pub const fn slice_source <'range> (&'source self, source: &'range str) -> &'range str { - str_range(source, self.start, self.end()) - } - pub const fn slice_source_exp <'range> (&'source self, source: &'range str) -> &'range str { - str_range(source, self.start.saturating_add(1), self.end()) - } - pub const fn value (&self) -> Value { - self.value - } - pub const fn error (self, error: ParseError) -> Self { - Self { value: Value::Err(error), ..self } - } - pub const fn grow (self) -> Self { - Self { length: self.length.saturating_add(1), ..self } - } - pub const fn grow_num (self, m: usize, c: char) -> Self { - match to_digit(c) { - Ok(n) => Self { value: Num(10*m+n), ..self.grow() }, - Result::Err(e) => Self { value: Err(e), ..self.grow() }, - } - } - pub const fn grow_key (self) -> Self { - let mut token = self.grow(); - token.value = Key(token.slice_source(self.source)); - token - } - pub const fn grow_sym (self) -> Self { - let mut token = self.grow(); - token.value = Sym(token.slice_source(self.source)); - token - } - pub const fn grow_str (self) -> Self { - let mut token = self.grow(); - token.value = Str(token.slice_source(self.source)); - token - } - pub const fn grow_exp (self) -> Self { - let mut token = self.grow(); - if let Exp(depth, _) = token.value { - token.value = Exp(depth, TokenIter::new(token.slice_source_exp(self.source))); - } else { - unreachable!() - } - token - } - pub const fn grow_in (self) -> Self { - let mut token = self.grow_exp(); - if let Value::Exp(depth, source) = token.value { - token.value = Value::Exp(depth.saturating_add(1), source) - } else { - unreachable!() - } - token - } - pub const fn grow_out (self) -> Self { - let mut token = self.grow_exp(); - if let Value::Exp(depth, source) = token.value { - if depth > 0 { - token.value = Value::Exp(depth - 1, source) - } else { - return self.error(Unexpected(')')) - } - } else { - unreachable!() - } - token - } -} diff --git a/dsl/src/lib.rs b/dsl/src/lib.rs index 411974a..52f8281 100644 --- a/dsl/src/lib.rs +++ b/dsl/src/lib.rs @@ -1,3 +1,36 @@ +//! [Token]s are parsed substrings with an associated [Value]. +//! +//! * [ ] FIXME: Value may be [Err] which may shadow [Result::Err] +//! * [Value::Exp] wraps an expression depth and a [SourceIter] +//! with the remaining part of the expression. +//! * expression depth other that 0 mean unclosed parenthesis. +//! * closing and unopened parenthesis panics during reading. +//! * [ ] TODO: signed depth might be interesting +//! * [Value::Sym] and [Value::Key] are stringish literals +//! with slightly different parsing rules. +//! * [Value::Num] is an unsigned integer literal. +//!``` +//! use tengri_dsl::{*, Value::*}; +//! let source = include_str!("../test.edn"); +//! let mut view = TokenIter::new(source); +//! assert_eq!(view.peek(), Some(Token { +//! source, +//! start: 0, +//! length: source.len(), +//! value: Exp(0, TokenIter::new(&source[1..])) +//! })); +//!``` +//! The token iterator [TokenIter] allows you to get the +//! general-purpose syntactic [Token]s represented by the source text. +//! +//! Both iterators are `peek`able: +//! +//! ``` +//! let src = include_str!("../test.edn"); +//! let mut view = tengri_dsl::TokenIter::new(src); +//! assert_eq!(view.0.0, src); +//! assert_eq!(view.peek(), view.0.peek()) +//! ``` #![feature(adt_const_params)] #![feature(type_alias_impl_trait)] #![feature(impl_trait_in_fn_trait_return)] @@ -8,11 +41,7 @@ pub(crate) use konst::iter::{ConstIntoIter, IsIteratorKind}; pub(crate) use konst::string::{split_at, str_range, char_indices}; pub(crate) use std::fmt::Debug; -mod dsl_error; pub use self::dsl_error::*; -mod dsl_token; pub use self::dsl_token::*; -mod dsl_iter; pub use self::dsl_iter::*; -mod dsl_context; pub use self::dsl_context::*; -mod dsl_macros; +mod dsl; pub use self::dsl::*; #[cfg(test)] mod test_token_iter { use crate::*; diff --git a/output/src/ops/transform.rs b/output/src/ops/transform.rs index 0dd2264..611d22b 100644 --- a/output/src/ops/transform.rs +++ b/output/src/ops/transform.rs @@ -43,7 +43,11 @@ macro_rules! transform_xy { if k == $x || k == $y || k == $xy { let _ = iter.next().unwrap(); let token = iter.next().expect("no content specified"); - let content = get_content!(state => token); + let content = if let Some(content) = state.get_content(&token.value) { + content + } else { + panic!("no content corresponding to {:?}", &token.value); + }; return Some(match k { $x => Self::x(content), $y => Self::y(content),