mirror of
https://codeberg.org/unspeaker/tengri.git
synced 2025-12-06 19:56:44 +01:00
dsl: compact
This commit is contained in:
parent
5e09f5a4bb
commit
ab07fd2b43
8 changed files with 363 additions and 384 deletions
324
dsl/src/dsl.rs
Normal file
324
dsl/src/dsl.rs
Normal file
|
|
@ -0,0 +1,324 @@
|
|||
use crate::*;
|
||||
use thiserror::Error;
|
||||
|
||||
pub type ParseResult<T> = Result<T, ParseError>;
|
||||
|
||||
#[derive(Error, Debug, Copy, Clone, PartialEq)] pub enum ParseError {
|
||||
#[error("parse failed: not implemented")]
|
||||
Unimplemented,
|
||||
#[error("parse failed: empty")]
|
||||
Empty,
|
||||
#[error("parse failed: incomplete")]
|
||||
Incomplete,
|
||||
#[error("parse failed: unexpected character '{0}'")]
|
||||
Unexpected(char),
|
||||
#[error("parse failed: error #{0}")]
|
||||
Code(u8),
|
||||
}
|
||||
|
||||
pub trait TryFromDsl<'state, T>: Sized {
|
||||
fn try_from_expr <'source: 'state> (
|
||||
_state: &'state T, _iter: TokenIter<'source>
|
||||
) -> Option<Self> {
|
||||
None
|
||||
}
|
||||
fn try_from_atom <'source: 'state> (
|
||||
state: &'state T, value: Value<'source>
|
||||
) -> Option<Self> {
|
||||
if let Exp(0, iter) = value {
|
||||
return Self::try_from_expr(state, iter.clone())
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub trait TryIntoDsl<T>: Sized {
|
||||
fn try_into_atom (&self) -> Option<Value>;
|
||||
}
|
||||
|
||||
/// Map EDN tokens to parameters of a given type for a given context
|
||||
pub trait Context<'state, U>: Sized {
|
||||
fn get <'source> (&'state self, _iter: &mut TokenIter<'source>) -> Option<U> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<'state, T: Context<'state, U>, U> Context<'state, U> for &T {
|
||||
fn get <'source> (&'state self, iter: &mut TokenIter<'source>) -> Option<U> {
|
||||
(*self).get(iter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'state, T: Context<'state, U>, U> Context<'state, U> for Option<T> {
|
||||
fn get <'source> (&'state self, iter: &mut TokenIter<'source>) -> Option<U> {
|
||||
self.as_ref().map(|s|s.get(iter)).flatten()
|
||||
}
|
||||
}
|
||||
|
||||
/// Implement the const iterator pattern.
|
||||
#[macro_export] macro_rules! const_iter {
|
||||
($(<$l:lifetime>)?|$self:ident: $Struct:ty| => $Item:ty => $expr:expr) => {
|
||||
impl$(<$l>)? Iterator for $Struct {
|
||||
type Item = $Item;
|
||||
fn next (&mut $self) -> Option<$Item> { $expr }
|
||||
}
|
||||
impl$(<$l>)? ConstIntoIter for $Struct {
|
||||
type Kind = IsIteratorKind;
|
||||
type Item = $Item;
|
||||
type IntoIter = Self;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Provides a native [Iterator] API over the [ConstIntoIter] [SourceIter]
|
||||
/// [TokenIter::next] returns just the [Token] and mutates `self`,
|
||||
/// instead of returning an updated version of the struct as [SourceIter::next] does.
|
||||
#[derive(Copy, Clone, Debug, Default, PartialEq)]
|
||||
pub struct TokenIter<'a>(
|
||||
pub SourceIter<'a>
|
||||
);
|
||||
|
||||
impl<'a> TokenIter<'a> {
|
||||
pub const fn new (source: &'a str) -> Self {
|
||||
Self(SourceIter::new(source))
|
||||
}
|
||||
pub const fn peek (&self) -> Option<Token<'a>> {
|
||||
self.0.peek()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for TokenIter<'a> {
|
||||
type Item = Token<'a>;
|
||||
fn next (&mut self) -> Option<Token<'a>> {
|
||||
self.0.next().map(|(item, rest)|{self.0 = rest; item})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a str> for TokenIter<'a> {
|
||||
fn from (source: &'a str) -> Self{
|
||||
Self(SourceIter(source))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<SourceIter<'a>> for TokenIter<'a> {
|
||||
fn from (source: SourceIter<'a>) -> Self{
|
||||
Self(source)
|
||||
}
|
||||
}
|
||||
|
||||
/// Owns a reference to the source text.
|
||||
/// [SourceIter::next] emits subsequent pairs of:
|
||||
/// * a [Token] and
|
||||
/// * the source text remaining
|
||||
/// * [ ] TODO: maybe [SourceIter::next] should wrap the remaining source in `Self` ?
|
||||
#[derive(Copy, Clone, Debug, Default, PartialEq)]
|
||||
pub struct SourceIter<'a>(pub &'a str);
|
||||
|
||||
const_iter!(<'a>|self: SourceIter<'a>| => Token<'a> => self.next_mut().map(|(result, _)|result));
|
||||
|
||||
impl<'a> From<&'a str> for SourceIter<'a> {
|
||||
fn from (source: &'a str) -> Self{
|
||||
Self::new(source)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> SourceIter<'a> {
|
||||
pub const fn new (source: &'a str) -> Self {
|
||||
Self(source)
|
||||
}
|
||||
pub const fn chomp (&self, index: usize) -> Self {
|
||||
Self(split_at(self.0, index).1)
|
||||
}
|
||||
pub const fn next (mut self) -> Option<(Token<'a>, Self)> {
|
||||
Self::next_mut(&mut self)
|
||||
}
|
||||
pub const fn peek (&self) -> Option<Token<'a>> {
|
||||
peek_src(self.0)
|
||||
}
|
||||
pub const fn next_mut (&mut self) -> Option<(Token<'a>, Self)> {
|
||||
match self.peek() {
|
||||
Some(token) => Some((token, self.chomp(token.end()))),
|
||||
None => None
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Static iteration helper.
|
||||
#[macro_export] macro_rules! iterate {
|
||||
($expr:expr => $arg: pat => $body:expr) => {
|
||||
let mut iter = $expr;
|
||||
while let Some(($arg, next)) = iter.next() {
|
||||
$body;
|
||||
iter = next;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn peek_src <'a> (source: &'a str) -> Option<Token<'a>> {
|
||||
let mut token: Token<'a> = Token::new(source, 0, 0, Nil);
|
||||
iterate!(char_indices(source) => (start, c) => token = match token.value() {
|
||||
Err(_) => return Some(token),
|
||||
Nil => match c {
|
||||
' '|'\n'|'\r'|'\t' =>
|
||||
token.grow(),
|
||||
'(' =>
|
||||
Token::new(source, start, 1, Exp(1, TokenIter::new(str_range(source, start, start + 1)))),
|
||||
'"' =>
|
||||
Token::new(source, start, 1, Str(str_range(source, start, start + 1))),
|
||||
':'|'@' =>
|
||||
Token::new(source, start, 1, Sym(str_range(source, start, start + 1))),
|
||||
'/'|'a'..='z' =>
|
||||
Token::new(source, start, 1, Key(str_range(source, start, start + 1))),
|
||||
'0'..='9' =>
|
||||
Token::new(source, start, 1, match to_digit(c) {
|
||||
Ok(c) => Value::Num(c),
|
||||
Result::Err(e) => Value::Err(e)
|
||||
}),
|
||||
_ => token.error(Unexpected(c))
|
||||
},
|
||||
Str(_) => match c {
|
||||
'"' => return Some(token),
|
||||
_ => token.grow_str(),
|
||||
},
|
||||
Num(n) => match c {
|
||||
'0'..='9' => token.grow_num(n, c),
|
||||
' '|'\n'|'\r'|'\t'|')' => return Some(token),
|
||||
_ => token.error(Unexpected(c))
|
||||
},
|
||||
Sym(_) => match c {
|
||||
'a'..='z'|'A'..='Z'|'0'..='9'|'-' => token.grow_sym(),
|
||||
' '|'\n'|'\r'|'\t'|')' => return Some(token),
|
||||
_ => token.error(Unexpected(c))
|
||||
},
|
||||
Key(_) => match c {
|
||||
'a'..='z'|'0'..='9'|'-'|'/' => token.grow_key(),
|
||||
' '|'\n'|'\r'|'\t'|')' => return Some(token),
|
||||
_ => token.error(Unexpected(c))
|
||||
},
|
||||
Exp(depth, _) => match depth {
|
||||
0 => return Some(token.grow_exp()),
|
||||
_ => match c {
|
||||
')' => token.grow_out(),
|
||||
'(' => token.grow_in(),
|
||||
_ => token.grow_exp(),
|
||||
}
|
||||
},
|
||||
});
|
||||
match token.value() {
|
||||
Nil => None,
|
||||
_ => Some(token),
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn to_number (digits: &str) -> Result<usize, ParseError> {
|
||||
let mut value = 0;
|
||||
iterate!(char_indices(digits) => (_, c) => match to_digit(c) {
|
||||
Ok(digit) => value = 10 * value + digit,
|
||||
Result::Err(e) => return Result::Err(e)
|
||||
});
|
||||
Ok(value)
|
||||
}
|
||||
|
||||
pub const fn to_digit (c: char) -> Result<usize, ParseError> {
|
||||
Ok(match c {
|
||||
'0' => 0, '1' => 1, '2' => 2, '3' => 3, '4' => 4,
|
||||
'5' => 5, '6' => 6, '7' => 7, '8' => 8, '9' => 9,
|
||||
_ => return Result::Err(Unexpected(c))
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Default, PartialEq)] pub struct Token<'source> {
|
||||
pub source: &'source str,
|
||||
pub start: usize,
|
||||
pub length: usize,
|
||||
pub value: Value<'source>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Default, PartialEq)] pub enum Value<'source> {
|
||||
#[default] Nil,
|
||||
Err(ParseError),
|
||||
Num(usize),
|
||||
Sym(&'source str),
|
||||
Key(&'source str),
|
||||
Str(&'source str),
|
||||
Exp(usize, TokenIter<'source>),
|
||||
}
|
||||
|
||||
impl<'source> Token<'source> {
|
||||
pub const fn new (source: &'source str, start: usize, length: usize, value: Value<'source>) -> Self {
|
||||
Self { source, start, length, value }
|
||||
}
|
||||
pub const fn end (&self) -> usize {
|
||||
self.start.saturating_add(self.length)
|
||||
}
|
||||
pub const fn slice (&'source self) -> &'source str {
|
||||
self.slice_source(self.source)
|
||||
//str_range(self.source, self.start, self.end())
|
||||
}
|
||||
pub const fn slice_source <'range> (&'source self, source: &'range str) -> &'range str {
|
||||
str_range(source, self.start, self.end())
|
||||
}
|
||||
pub const fn slice_source_exp <'range> (&'source self, source: &'range str) -> &'range str {
|
||||
str_range(source, self.start.saturating_add(1), self.end())
|
||||
}
|
||||
pub const fn value (&self) -> Value {
|
||||
self.value
|
||||
}
|
||||
pub const fn error (self, error: ParseError) -> Self {
|
||||
Self { value: Value::Err(error), ..self }
|
||||
}
|
||||
pub const fn grow (self) -> Self {
|
||||
Self { length: self.length.saturating_add(1), ..self }
|
||||
}
|
||||
pub const fn grow_num (self, m: usize, c: char) -> Self {
|
||||
match to_digit(c) {
|
||||
Ok(n) => Self { value: Num(10*m+n), ..self.grow() },
|
||||
Result::Err(e) => Self { value: Err(e), ..self.grow() },
|
||||
}
|
||||
}
|
||||
pub const fn grow_key (self) -> Self {
|
||||
let mut token = self.grow();
|
||||
token.value = Key(token.slice_source(self.source));
|
||||
token
|
||||
}
|
||||
pub const fn grow_sym (self) -> Self {
|
||||
let mut token = self.grow();
|
||||
token.value = Sym(token.slice_source(self.source));
|
||||
token
|
||||
}
|
||||
pub const fn grow_str (self) -> Self {
|
||||
let mut token = self.grow();
|
||||
token.value = Str(token.slice_source(self.source));
|
||||
token
|
||||
}
|
||||
pub const fn grow_exp (self) -> Self {
|
||||
let mut token = self.grow();
|
||||
if let Exp(depth, _) = token.value {
|
||||
token.value = Exp(depth, TokenIter::new(token.slice_source_exp(self.source)));
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
token
|
||||
}
|
||||
pub const fn grow_in (self) -> Self {
|
||||
let mut token = self.grow_exp();
|
||||
if let Value::Exp(depth, source) = token.value {
|
||||
token.value = Value::Exp(depth.saturating_add(1), source)
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
token
|
||||
}
|
||||
pub const fn grow_out (self) -> Self {
|
||||
let mut token = self.grow_exp();
|
||||
if let Value::Exp(depth, source) = token.value {
|
||||
if depth > 0 {
|
||||
token.value = Value::Exp(depth - 1, source)
|
||||
} else {
|
||||
return self.error(Unexpected(')'))
|
||||
}
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
token
|
||||
}
|
||||
}
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
use crate::*;
|
||||
|
||||
pub trait TryFromDsl<'state, T>: Sized {
|
||||
fn try_from_expr <'source: 'state> (
|
||||
_state: &'state T, _iter: TokenIter<'source>
|
||||
) -> Option<Self> {
|
||||
None
|
||||
}
|
||||
fn try_from_atom <'source: 'state> (
|
||||
state: &'state T, value: Value<'source>
|
||||
) -> Option<Self> {
|
||||
if let Exp(0, iter) = value {
|
||||
return Self::try_from_expr(state, iter.clone())
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub trait TryIntoDsl<T>: Sized {
|
||||
fn try_into_atom (&self) -> Option<Value>;
|
||||
}
|
||||
|
||||
/// Map EDN tokens to parameters of a given type for a given context
|
||||
pub trait Context<'state, U>: Sized {
|
||||
fn get <'source> (&'state self, _iter: &mut TokenIter<'source>) -> Option<U> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<'state, T: Context<'state, U>, U> Context<'state, U> for &T {
|
||||
fn get <'source> (&'state self, iter: &mut TokenIter<'source>) -> Option<U> {
|
||||
(*self).get(iter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'state, T: Context<'state, U>, U> Context<'state, U> for Option<T> {
|
||||
fn get <'source> (&'state self, iter: &mut TokenIter<'source>) -> Option<U> {
|
||||
self.as_ref().map(|s|s.get(iter)).flatten()
|
||||
}
|
||||
}
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
use crate::*;
|
||||
use thiserror::Error;
|
||||
pub type ParseResult<T> = Result<T, ParseError>;
|
||||
#[derive(Error, Debug, Copy, Clone, PartialEq)] pub enum ParseError {
|
||||
#[error("parse failed: not implemented")]
|
||||
Unimplemented,
|
||||
#[error("parse failed: empty")]
|
||||
Empty,
|
||||
#[error("parse failed: incomplete")]
|
||||
Incomplete,
|
||||
#[error("parse failed: unexpected character '{0}'")]
|
||||
Unexpected(char),
|
||||
#[error("parse failed: error #{0}")]
|
||||
Code(u8),
|
||||
}
|
||||
|
|
@ -1,157 +0,0 @@
|
|||
//! The token iterator [TokenIter] allows you to get the
|
||||
//! general-purpose syntactic [Token]s represented by the source text.
|
||||
//!
|
||||
//! Both iterators are `peek`able:
|
||||
//!
|
||||
//! ```
|
||||
//! let src = include_str!("../test.edn");
|
||||
//! let mut view = tengri_dsl::TokenIter::new(src);
|
||||
//! assert_eq!(view.0.0, src);
|
||||
//! assert_eq!(view.peek(), view.0.peek())
|
||||
//! ```
|
||||
use crate::*;
|
||||
|
||||
/// Provides a native [Iterator] API over the [ConstIntoIter] [SourceIter]
|
||||
/// [TokenIter::next] returns just the [Token] and mutates `self`,
|
||||
/// instead of returning an updated version of the struct as [SourceIter::next] does.
|
||||
#[derive(Copy, Clone, Debug, Default, PartialEq)] pub struct TokenIter<'a>(
|
||||
pub SourceIter<'a>
|
||||
);
|
||||
|
||||
impl<'a> TokenIter<'a> {
|
||||
pub const fn new (source: &'a str) -> Self {
|
||||
Self(SourceIter::new(source))
|
||||
}
|
||||
pub const fn peek (&self) -> Option<Token<'a>> {
|
||||
self.0.peek()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for TokenIter<'a> {
|
||||
type Item = Token<'a>;
|
||||
fn next (&mut self) -> Option<Token<'a>> {
|
||||
self.0.next().map(|(item, rest)|{self.0 = rest; item})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a str> for TokenIter<'a> {
|
||||
fn from (source: &'a str) -> Self{
|
||||
Self(SourceIter(source))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<SourceIter<'a>> for TokenIter<'a> {
|
||||
fn from (source: SourceIter<'a>) -> Self{
|
||||
Self(source)
|
||||
}
|
||||
}
|
||||
|
||||
/// Owns a reference to the source text.
|
||||
/// [SourceIter::next] emits subsequent pairs of:
|
||||
/// * a [Token] and
|
||||
/// * the source text remaining
|
||||
/// * [ ] TODO: maybe [SourceIter::next] should wrap the remaining source in `Self` ?
|
||||
#[derive(Copy, Clone, Debug, Default, PartialEq)]
|
||||
pub struct SourceIter<'a>(pub &'a str);
|
||||
|
||||
const_iter!(<'a>|self: SourceIter<'a>| => Token<'a> => self.next_mut().map(|(result, _)|result));
|
||||
|
||||
impl<'a> From<&'a str> for SourceIter<'a> {
|
||||
fn from (source: &'a str) -> Self{
|
||||
Self::new(source)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> SourceIter<'a> {
|
||||
pub const fn new (source: &'a str) -> Self {
|
||||
Self(source)
|
||||
}
|
||||
pub const fn chomp (&self, index: usize) -> Self {
|
||||
Self(split_at(self.0, index).1)
|
||||
}
|
||||
pub const fn next (mut self) -> Option<(Token<'a>, Self)> {
|
||||
Self::next_mut(&mut self)
|
||||
}
|
||||
pub const fn peek (&self) -> Option<Token<'a>> {
|
||||
peek_src(self.0)
|
||||
}
|
||||
pub const fn next_mut (&mut self) -> Option<(Token<'a>, Self)> {
|
||||
match self.peek() {
|
||||
Some(token) => Some((token, self.chomp(token.end()))),
|
||||
None => None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn peek_src <'a> (source: &'a str) -> Option<Token<'a>> {
|
||||
let mut token: Token<'a> = Token::new(source, 0, 0, Nil);
|
||||
iterate!(char_indices(source) => (start, c) => token = match token.value() {
|
||||
Err(_) => return Some(token),
|
||||
Nil => match c {
|
||||
' '|'\n'|'\r'|'\t' =>
|
||||
token.grow(),
|
||||
'(' =>
|
||||
Token::new(source, start, 1, Exp(1, TokenIter::new(str_range(source, start, start + 1)))),
|
||||
'"' =>
|
||||
Token::new(source, start, 1, Str(str_range(source, start, start + 1))),
|
||||
':'|'@' =>
|
||||
Token::new(source, start, 1, Sym(str_range(source, start, start + 1))),
|
||||
'/'|'a'..='z' =>
|
||||
Token::new(source, start, 1, Key(str_range(source, start, start + 1))),
|
||||
'0'..='9' =>
|
||||
Token::new(source, start, 1, match to_digit(c) {
|
||||
Ok(c) => Value::Num(c),
|
||||
Result::Err(e) => Value::Err(e)
|
||||
}),
|
||||
_ => token.error(Unexpected(c))
|
||||
},
|
||||
Str(_) => match c {
|
||||
'"' => return Some(token),
|
||||
_ => token.grow_str(),
|
||||
},
|
||||
Num(n) => match c {
|
||||
'0'..='9' => token.grow_num(n, c),
|
||||
' '|'\n'|'\r'|'\t'|')' => return Some(token),
|
||||
_ => token.error(Unexpected(c))
|
||||
},
|
||||
Sym(_) => match c {
|
||||
'a'..='z'|'A'..='Z'|'0'..='9'|'-' => token.grow_sym(),
|
||||
' '|'\n'|'\r'|'\t'|')' => return Some(token),
|
||||
_ => token.error(Unexpected(c))
|
||||
},
|
||||
Key(_) => match c {
|
||||
'a'..='z'|'0'..='9'|'-'|'/' => token.grow_key(),
|
||||
' '|'\n'|'\r'|'\t'|')' => return Some(token),
|
||||
_ => token.error(Unexpected(c))
|
||||
},
|
||||
Exp(depth, _) => match depth {
|
||||
0 => return Some(token.grow_exp()),
|
||||
_ => match c {
|
||||
')' => token.grow_out(),
|
||||
'(' => token.grow_in(),
|
||||
_ => token.grow_exp(),
|
||||
}
|
||||
},
|
||||
});
|
||||
match token.value() {
|
||||
Nil => None,
|
||||
_ => Some(token),
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn to_number (digits: &str) -> Result<usize, ParseError> {
|
||||
let mut value = 0;
|
||||
iterate!(char_indices(digits) => (_, c) => match to_digit(c) {
|
||||
Ok(digit) => value = 10 * value + digit,
|
||||
Result::Err(e) => return Result::Err(e)
|
||||
});
|
||||
Ok(value)
|
||||
}
|
||||
|
||||
pub const fn to_digit (c: char) -> Result<usize, ParseError> {
|
||||
Ok(match c {
|
||||
'0' => 0, '1' => 1, '2' => 2, '3' => 3, '4' => 4,
|
||||
'5' => 5, '6' => 6, '7' => 7, '8' => 8, '9' => 9,
|
||||
_ => return Result::Err(Unexpected(c))
|
||||
})
|
||||
}
|
||||
|
|
@ -1,46 +0,0 @@
|
|||
/// Static iteration helper.
|
||||
#[macro_export] macro_rules! iterate {
|
||||
($expr:expr => $arg: pat => $body:expr) => {
|
||||
let mut iter = $expr;
|
||||
while let Some(($arg, next)) = iter.next() {
|
||||
$body;
|
||||
iter = next;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Implement the const iterator pattern.
|
||||
#[macro_export] macro_rules! const_iter {
|
||||
($(<$l:lifetime>)?|$self:ident: $Struct:ty| => $Item:ty => $expr:expr) => {
|
||||
impl$(<$l>)? Iterator for $Struct {
|
||||
type Item = $Item;
|
||||
fn next (&mut $self) -> Option<$Item> { $expr }
|
||||
}
|
||||
impl$(<$l>)? ConstIntoIter for $Struct {
|
||||
type Kind = IsIteratorKind;
|
||||
type Item = $Item;
|
||||
type IntoIter = Self;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export] macro_rules! get_value {
|
||||
($state:expr => $token:expr) => {
|
||||
if let Some(value) = $state.get(&$token.value) {
|
||||
value
|
||||
} else {
|
||||
panic!("no value corresponding to {:?}", &$token.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export] macro_rules! get_content {
|
||||
($state:expr => $token:expr) => {
|
||||
if let Some(content) = $state.get_content(&$token.value) {
|
||||
content
|
||||
} else {
|
||||
panic!("no content corresponding to {:?}", &$token.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1,120 +0,0 @@
|
|||
//! [Token]s are parsed substrings with an associated [Value].
|
||||
//!
|
||||
//! * [ ] FIXME: Value may be [Err] which may shadow [Result::Err]
|
||||
//! * [Value::Exp] wraps an expression depth and a [SourceIter]
|
||||
//! with the remaining part of the expression.
|
||||
//! * expression depth other that 0 mean unclosed parenthesis.
|
||||
//! * closing and unopened parenthesis panics during reading.
|
||||
//! * [ ] TODO: signed depth might be interesting
|
||||
//! * [Value::Sym] and [Value::Key] are stringish literals
|
||||
//! with slightly different parsing rules.
|
||||
//! * [Value::Num] is an unsigned integer literal.
|
||||
//!```
|
||||
//! use tengri_dsl::{*, Value::*};
|
||||
//! let source = include_str!("../test.edn");
|
||||
//! let mut view = TokenIter::new(source);
|
||||
//! assert_eq!(view.peek(), Some(Token {
|
||||
//! source,
|
||||
//! start: 0,
|
||||
//! length: source.len(),
|
||||
//! value: Exp(0, TokenIter::new(&source[1..]))
|
||||
//! }));
|
||||
//!```
|
||||
use crate::*;
|
||||
|
||||
#[derive(Debug, Copy, Clone, Default, PartialEq)] pub struct Token<'source> {
|
||||
pub source: &'source str,
|
||||
pub start: usize,
|
||||
pub length: usize,
|
||||
pub value: Value<'source>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Default, PartialEq)] pub enum Value<'source> {
|
||||
#[default] Nil,
|
||||
Err(ParseError),
|
||||
Num(usize),
|
||||
Sym(&'source str),
|
||||
Key(&'source str),
|
||||
Str(&'source str),
|
||||
Exp(usize, TokenIter<'source>),
|
||||
}
|
||||
|
||||
impl<'source> Token<'source> {
|
||||
pub const fn new (source: &'source str, start: usize, length: usize, value: Value<'source>) -> Self {
|
||||
Self { source, start, length, value }
|
||||
}
|
||||
pub const fn end (&self) -> usize {
|
||||
self.start.saturating_add(self.length)
|
||||
}
|
||||
pub const fn slice (&'source self) -> &'source str {
|
||||
self.slice_source(self.source)
|
||||
//str_range(self.source, self.start, self.end())
|
||||
}
|
||||
pub const fn slice_source <'range> (&'source self, source: &'range str) -> &'range str {
|
||||
str_range(source, self.start, self.end())
|
||||
}
|
||||
pub const fn slice_source_exp <'range> (&'source self, source: &'range str) -> &'range str {
|
||||
str_range(source, self.start.saturating_add(1), self.end())
|
||||
}
|
||||
pub const fn value (&self) -> Value {
|
||||
self.value
|
||||
}
|
||||
pub const fn error (self, error: ParseError) -> Self {
|
||||
Self { value: Value::Err(error), ..self }
|
||||
}
|
||||
pub const fn grow (self) -> Self {
|
||||
Self { length: self.length.saturating_add(1), ..self }
|
||||
}
|
||||
pub const fn grow_num (self, m: usize, c: char) -> Self {
|
||||
match to_digit(c) {
|
||||
Ok(n) => Self { value: Num(10*m+n), ..self.grow() },
|
||||
Result::Err(e) => Self { value: Err(e), ..self.grow() },
|
||||
}
|
||||
}
|
||||
pub const fn grow_key (self) -> Self {
|
||||
let mut token = self.grow();
|
||||
token.value = Key(token.slice_source(self.source));
|
||||
token
|
||||
}
|
||||
pub const fn grow_sym (self) -> Self {
|
||||
let mut token = self.grow();
|
||||
token.value = Sym(token.slice_source(self.source));
|
||||
token
|
||||
}
|
||||
pub const fn grow_str (self) -> Self {
|
||||
let mut token = self.grow();
|
||||
token.value = Str(token.slice_source(self.source));
|
||||
token
|
||||
}
|
||||
pub const fn grow_exp (self) -> Self {
|
||||
let mut token = self.grow();
|
||||
if let Exp(depth, _) = token.value {
|
||||
token.value = Exp(depth, TokenIter::new(token.slice_source_exp(self.source)));
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
token
|
||||
}
|
||||
pub const fn grow_in (self) -> Self {
|
||||
let mut token = self.grow_exp();
|
||||
if let Value::Exp(depth, source) = token.value {
|
||||
token.value = Value::Exp(depth.saturating_add(1), source)
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
token
|
||||
}
|
||||
pub const fn grow_out (self) -> Self {
|
||||
let mut token = self.grow_exp();
|
||||
if let Value::Exp(depth, source) = token.value {
|
||||
if depth > 0 {
|
||||
token.value = Value::Exp(depth - 1, source)
|
||||
} else {
|
||||
return self.error(Unexpected(')'))
|
||||
}
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
token
|
||||
}
|
||||
}
|
||||
|
|
@ -1,3 +1,36 @@
|
|||
//! [Token]s are parsed substrings with an associated [Value].
|
||||
//!
|
||||
//! * [ ] FIXME: Value may be [Err] which may shadow [Result::Err]
|
||||
//! * [Value::Exp] wraps an expression depth and a [SourceIter]
|
||||
//! with the remaining part of the expression.
|
||||
//! * expression depth other that 0 mean unclosed parenthesis.
|
||||
//! * closing and unopened parenthesis panics during reading.
|
||||
//! * [ ] TODO: signed depth might be interesting
|
||||
//! * [Value::Sym] and [Value::Key] are stringish literals
|
||||
//! with slightly different parsing rules.
|
||||
//! * [Value::Num] is an unsigned integer literal.
|
||||
//!```
|
||||
//! use tengri_dsl::{*, Value::*};
|
||||
//! let source = include_str!("../test.edn");
|
||||
//! let mut view = TokenIter::new(source);
|
||||
//! assert_eq!(view.peek(), Some(Token {
|
||||
//! source,
|
||||
//! start: 0,
|
||||
//! length: source.len(),
|
||||
//! value: Exp(0, TokenIter::new(&source[1..]))
|
||||
//! }));
|
||||
//!```
|
||||
//! The token iterator [TokenIter] allows you to get the
|
||||
//! general-purpose syntactic [Token]s represented by the source text.
|
||||
//!
|
||||
//! Both iterators are `peek`able:
|
||||
//!
|
||||
//! ```
|
||||
//! let src = include_str!("../test.edn");
|
||||
//! let mut view = tengri_dsl::TokenIter::new(src);
|
||||
//! assert_eq!(view.0.0, src);
|
||||
//! assert_eq!(view.peek(), view.0.peek())
|
||||
//! ```
|
||||
#![feature(adt_const_params)]
|
||||
#![feature(type_alias_impl_trait)]
|
||||
#![feature(impl_trait_in_fn_trait_return)]
|
||||
|
|
@ -8,11 +41,7 @@ pub(crate) use konst::iter::{ConstIntoIter, IsIteratorKind};
|
|||
pub(crate) use konst::string::{split_at, str_range, char_indices};
|
||||
pub(crate) use std::fmt::Debug;
|
||||
|
||||
mod dsl_error; pub use self::dsl_error::*;
|
||||
mod dsl_token; pub use self::dsl_token::*;
|
||||
mod dsl_iter; pub use self::dsl_iter::*;
|
||||
mod dsl_context; pub use self::dsl_context::*;
|
||||
mod dsl_macros;
|
||||
mod dsl; pub use self::dsl::*;
|
||||
|
||||
#[cfg(test)] mod test_token_iter {
|
||||
use crate::*;
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue