This commit is contained in:
Alain Emilia Anna Zscheile 2024-05-26 13:48:25 +02:00
parent 80b80f4760
commit 0e3f680f89
9 changed files with 1043 additions and 3 deletions

2
Cargo.lock generated
View file

@ -441,8 +441,6 @@ version = "0.1.0"
dependencies = [
"miette",
"thiserror",
"unicode-ident",
"unicode-normalization",
"yz-string-utils",
"zxtw2-literal",
]

View file

@ -0,0 +1,14 @@
[package]
name = "zxtw2-syntax"
version = "0.1.0"
edition = "2021"
license = "Apache-2.0"
[dependencies]
miette = "7.2"
thiserror = "1.0"
zxtw2-literal.path = "../zxtw2-literal"
[dependencies.yz-string-utils]
version = "0.4"
features = ["consume-ident"]

View file

@ -0,0 +1,168 @@
/*
* SPDX-FileCopyrightText: 2023 Alain Zscheile <fogti+devel@ytrizja.de>
*
* SPDX-License-Identifier: Apache-2.0
*/
use core::{fmt, result::Result as CoreResult};
use miette::Diagnostic;
use std::sync::Arc;
use crate::EvEqSourceSpan;
#[derive(Clone, Debug)]
pub struct Error {
pub span: EvEqSourceSpan,
pub kind: ErrorKind,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if f.alternate() {
write!(f, "{}: ", self.span)?;
}
fmt::Display::fmt(&self.kind, f)
}
}
impl std::error::Error for Error {
#[inline(always)]
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
self.kind.source()
}
}
pub type Result<T> = CoreResult<T, Error>;
#[derive(Debug)]
pub struct FullError<S: miette::SourceCode + 'static> {
pub span: EvEqSourceSpan,
pub kind: ErrorKind,
pub code: miette::NamedSource<S>,
}
impl Error {
#[inline]
pub fn with_code<S: miette::SourceCode + 'static>(
self,
code: miette::NamedSource<S>,
) -> FullError<S> {
FullError {
span: self.span,
kind: self.kind,
code,
}
}
}
impl<S: miette::SourceCode + 'static> fmt::Display for FullError<S> {
#[inline(always)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.kind, f)
}
}
impl<S: miette::SourceCode + fmt::Debug + 'static> std::error::Error for FullError<S> {
#[inline(always)]
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
self.kind.source()
}
}
impl<S: miette::SourceCode + fmt::Debug + 'static> miette::Diagnostic for FullError<S> {
#[inline(always)]
fn code<'a>(&'a self) -> Option<Box<dyn fmt::Display + 'a>> {
self.kind.code()
}
#[inline(always)]
fn severity(&self) -> Option<miette::Severity> {
None
}
#[inline(always)]
fn source_code(&self) -> Option<&dyn miette::SourceCode> {
Some(&self.code)
}
#[inline(always)]
fn labels(&self) -> Option<Box<dyn Iterator<Item = miette::LabeledSpan> + '_>> {
use miette::LabeledSpan as Lsp;
Some(Box::new(
Some(Lsp::new(None, self.span.0.offset(), self.span.0.len())).into_iter(),
))
}
}
#[derive(Clone, Debug, Diagnostic, thiserror::Error)]
pub enum ErrorKind {
#[error("expected {0}")]
#[diagnostic(code(yanais::parser::expected))]
Expected(ErrorCtx),
#[error("end of file encountered inside {0}")]
#[diagnostic(code(yanais::parser::unexpected_eof))]
UnexpectedEof(ErrorCtx),
#[error("unexpected token in context {0:?}: {1:?}")]
UnexpectedToken(ErrorCtx, crate::lex::TokenKind),
#[error("unhandled character '{0}'")]
#[diagnostic(code(yanais::lexer::unhandled_char))]
UnhandledChar(char),
#[error(transparent)]
#[diagnostic(code(yanais::parser::invalid_int))]
InvalidInt(#[from] core::num::ParseIntError),
#[error("comment nesting overflowed")]
#[diagnostic(code(yanais::lexer::comment_nest_overflow))]
CommentNestOverflow,
#[error("duplicated record field identifier {0:?}")]
#[diagnostic(code(yanais::parser::record_dup_ident))]
RecordDupIdent(Arc<str>),
#[error("duplicated pattern field identifier {0:?}")]
#[diagnostic(code(yanais::parser::pattern_dup_ident))]
PatternDupIdent(Arc<str>),
#[error("unknown identifer {0:?}")]
#[diagnostic(code(yanais::parser::unknown_ident))]
UnknownIdent(Arc<str>),
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum ErrorCtx {
Comment,
Expression,
Lambda,
Let,
Literal,
Parentheses,
Pattern,
Record,
RefOf,
Select,
String,
Ident,
}
impl fmt::Display for ErrorCtx {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(match self {
ErrorCtx::Comment => "comment",
ErrorCtx::Expression => "expression",
ErrorCtx::Lambda => "(ty)lambda",
ErrorCtx::Let => "let expression",
ErrorCtx::Literal => "literal",
ErrorCtx::Parentheses => "parentheses",
ErrorCtx::Pattern => "pattern",
ErrorCtx::Record => "record",
ErrorCtx::RefOf => "ref-of",
ErrorCtx::Select => "selection",
ErrorCtx::String => "string",
ErrorCtx::Ident => "identifier",
})
}
}

View file

@ -0,0 +1,182 @@
use crate::lex::{Token, TokenKind as Tok};
use crate::{
pat::PatternMT, record::Record, Env as ParseEnv, Error, ErrorCtx as Ectx, ErrorKind as Pek,
EvEqSourceSpan, Parse, Result, SelIdent,
};
use zxtw2_literal::Literal;
#[derive(Clone, Debug)]
pub struct Lambda {
pub pat: PatternMT,
pub body_span: EvEqSourceSpan,
pub body: Box<Expr>,
}
impl Parse for Lambda {
fn parse(env: &mut ParseEnv<'_>) -> Result<Self> {
let pat = PatternMT::parse(env)?;
env.lxr.expect(Tok::RArr, Ectx::Lambda)?;
let body_start = env.lxr.offset();
let body = Box::new(Expr::parse(env)?);
let body_end = env.lxr.offset();
Ok(Self {
pat,
body_span: (body_start..body_end).into(),
body,
})
}
}
#[derive(Clone, Debug)]
pub enum Expr {
Infer,
Literal(Literal),
Quote(usize),
Use(SelIdent),
RefUse(SelIdent),
Lambda(Lambda),
TyLambda { binds: Box<Expr>, lam: Lambda },
Apply(Box<Expr>, Vec<(EvEqSourceSpan, Expr)>),
Record(Record<Expr>),
TyRecord(Record<Expr>),
}
/// parse a "minimal expression", meaning it can be a lambda or argument for an apply expression
/// so it shouldn't "snap up" stuff like an apply expression
fn parse_minexpr(env: &mut ParseEnv<'_>) -> Result<Expr> {
let Token {
span: fi_span,
kind: fi_kind,
} = env.lxr.next_in_noeof(Ectx::Expression)?;
match fi_kind {
Tok::Ident(i) => {
if let Some(x) = env.lookup(&i) {
Ok(Expr::Use(SelIdent {
span: fi_span,
dbidx: x,
}))
} else {
Err(Error {
span: fi_span,
kind: Pek::UnknownIdent(i),
})
}
}
Tok::RefOf => {
let Token { span, kind } = env.lxr.next_in_noeof(Ectx::RefOf)?;
match kind {
Tok::Ident(i) => {
if let Some(x) = env.lookup(&i) {
Ok(Expr::Use(SelIdent { span, dbidx: x }))
} else {
Err(Error {
span,
kind: Pek::UnknownIdent(i),
})
}
}
_ => Err(Error {
span,
kind: Pek::UnexpectedToken(Ectx::RefOf, kind),
}),
}
}
Tok::Infer => Ok(Expr::Infer),
Tok::Literal(lit) => Ok(Expr::Literal(lit)),
Tok::Lambda => Lambda::parse(env).map(Expr::Lambda),
Tok::TyLambda => {
env.lxr.expect(Tok::LParen, Ectx::Parentheses)?;
let binds = Box::new(Expr::parse(env)?);
env.lxr.expect(Tok::RParen, Ectx::Parentheses)?;
let lam = Lambda::parse(env)?;
Ok(Expr::TyLambda { binds, lam })
}
Tok::LParen => {
let inner = Expr::parse(env)?;
env.lxr.expect(Tok::RParen, Ectx::Parentheses)?;
Ok(inner)
}
_ => Err(Error {
span: fi_span,
kind: Pek::UnexpectedToken(Ectx::Expression, fi_kind),
}),
}
// TODO: support record destructuring when dealing with references to records...
}
impl Parse for Expr {
fn parse(env: &mut ParseEnv<'_>) -> Result<Self> {
fn parse_inner(env: &mut ParseEnv<'_>) -> Result<Expr> {
use crate::pat::PatternTrait;
let mut letbinds = Vec::<(_, EvEqSourceSpan, _, usize)>::new();
while env.lxr.got(Tok::Let).is_some() {
let key = PatternMT::parse(env)?;
env.lxr.expect(Tok::Assign, Ectx::Let)?;
let value_start = env.lxr.offset();
let value = Expr::parse(env)?;
let value_end = env.lxr.offset();
key.push_to_penv(env);
env.lxr.expect(Tok::SemiColon, Ectx::Let)?;
let next_start = env.lxr.peek_span().offset();
letbinds.push((key, (value_start..value_end).into(), value, next_start));
}
let mut args = Vec::new();
let mut base = parse_minexpr(env)?;
let reside_knamcnt = env.names.len();
loop {
let lxrbak = env.lxr.clone();
let arg_start = env.lxr.offset();
let arg = parse_minexpr(env);
let arg_end = env.lxr.offset();
assert_eq!(env.names.len(), reside_knamcnt);
match arg {
Ok(x) => args.push(((arg_start..arg_end).into(), x)),
Err(_) => {
// do not eat errors without backtracking the lexer
env.lxr = lxrbak;
break;
}
}
}
if !args.is_empty() {
base = Expr::Apply(Box::new(base), args);
}
if !letbinds.is_empty() {
// turn let bindings into lambda invocations...
let body_span_end = env.lxr.offset();
for (pat, value_span, value, next_start) in letbinds.into_iter().rev() {
base = Expr::Apply(
Box::new(Expr::Lambda(Lambda {
pat,
body_span: (next_start..body_span_end).into(),
body: Box::new(base),
})),
vec![(value_span, value)],
);
}
}
Ok(base)
}
// make sure we don't leak names
// (the assert_eq checks serve the same purpose)
let knamcnt = env.names.len();
let res = parse_inner(env);
assert!(env.names.len() >= knamcnt);
env.names.truncate(knamcnt);
res
}
}

View file

@ -0,0 +1,263 @@
/*
* SPDX-FileCopyrightText: 2023 Alain Zscheile <fogti+devel@ytrizja.de>
*
* SPDX-License-Identifier: Apache-2.0
*/
use core::fmt;
use std::sync::Arc;
use super::{Error, ErrorCtx, ErrorKind, EvEqSourceSpan, Result};
pub use yz_string_utils::StrLexerBase;
use zxtw2_literal::Literal;
#[derive(Clone)]
pub struct Lexer<'a> {
inner: StrLexerBase<'a>,
}
#[derive(Clone, Debug, PartialEq)]
pub struct Token {
pub kind: TokenKind,
pub span: EvEqSourceSpan,
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "@ {}: {:?}", self.span, self.kind)
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum TokenKind {
Ident(Arc<str>),
PatOut(Arc<str>),
DotIdent(Arc<str>),
String(Box<str>),
Literal(Literal),
Caret,
LParen,
RParen,
LBrace,
RBrace,
LArr,
RArr,
Dot,
RefOf,
DubColon,
SemiColon,
Assign,
// keywords
Lambda,
TyLambda,
Infer,
Let,
}
impl crate::MaybeParse for (EvEqSourceSpan, Literal) {
const DFL_CTX: ErrorCtx = ErrorCtx::Literal;
fn maybe_parse(env: &mut crate::Env<'_>) -> Result<Option<Self>> {
let mut nxtlxr = env.lxr.clone();
let Token {
kind,
span: tok_span,
} = match nxtlxr.next().transpose()? {
None => return Ok(None),
Some(x) => x,
};
use TokenKind as Tk;
Ok(if let Tk::Literal(lit) = kind {
env.lxr = nxtlxr;
Some((tok_span, lit))
} else {
None
})
}
}
impl<'a> Lexer<'a> {
#[inline]
pub fn new(inp: &'a str) -> Self {
Self {
inner: StrLexerBase { inp, offset: 0 },
}
}
#[inline(always)]
pub fn offset(&self) -> usize {
self.inner.offset
}
pub fn peek(&self) -> Option<Result<Token>> {
self.clone().next()
}
pub fn peek_span(&self) -> EvEqSourceSpan {
let mut this = self.clone();
match this.next() {
Some(Ok(Token { span, .. })) => span,
Some(Err(Error { span, .. })) => span,
None => (this.inner.offset..this.inner.offset).into(),
}
}
// handle EOF as error
pub fn next_in_noeof(&mut self, ctx: ErrorCtx) -> Result<Token> {
let offset = self.offset();
self.next().unwrap_or_else(|| {
Err(Error {
span: (offset..offset).into(),
kind: ErrorKind::UnexpectedEof(ctx),
})
})
}
// consume token if it is expected (for optional tokens)
pub fn got(&mut self, xkind: TokenKind) -> Option<EvEqSourceSpan> {
let mut nxt = self.clone();
match nxt.next() {
Some(Ok(Token { span, kind })) if xkind == kind => {
*self = nxt;
Some(span)
}
_ => None,
}
}
// like `got`, but produce a proper error message if it is not there
pub fn expect(&mut self, xkind: TokenKind, ctx: ErrorCtx) -> Result<EvEqSourceSpan> {
let mut nxt = self.clone();
let Token { span, kind } = nxt.next_in_noeof(ctx)?;
if xkind == kind {
*self = nxt;
Ok(span)
} else {
Err(Error {
span,
kind: ErrorKind::Expected(ctx),
})
}
}
}
impl Iterator for Lexer<'_> {
type Item = Result<Token>;
fn next(&mut self) -> Option<Result<Token>> {
use TokenKind as Tk;
let slb = &mut self.inner;
let mut offset;
let tmp = 'lvl: loop {
// handle whitespace
slb.consume_select(|i| i.is_whitespace());
if slb.inp.is_empty() {
return None;
}
offset = slb.offset;
break match slb.inp.chars().next()? {
'0'..='9' => {
let s = slb.consume_select(|i| i.is_ascii_digit());
debug_assert!(!s.is_empty());
s.parse()
.map(|i| TokenKind::Literal(Literal::Natural(i)))
.map_err(|e| e.into())
}
'"' => {
let mut escape = false;
let mut it = slb.inp.chars().peekable();
let mut res = String::new();
loop {
let x = match it.next() {
None => break 'lvl Err(ErrorKind::UnexpectedEof(ErrorCtx::String)),
Some(x) => x,
};
slb.consume(x.len_utf8());
if escape {
escape = false;
// TODO: parse escape codes...
res.push(x);
} else {
match x {
'"' => break,
'\\' => escape = true,
_ => res.push(x),
}
}
}
Ok(Tk::String(res.into_boxed_str()))
}
c => {
if let Some(s) = slb.try_consume_ident() {
// identifier
let s: Arc<str> = s.into();
// handle keywords
break Ok(match &*s {
"λ" => Tk::Lambda,
"Λ" => Tk::TyLambda,
"infer" => Tk::Infer,
"let" => Tk::Let,
_ => Tk::Ident(s),
});
}
slb.consume(c.len_utf8());
Ok(match c {
'.' => Tk::DotIdent(slb.try_consume_ident().map_or_else(|| Arc::from(""), Arc::from)),
'$' => Tk::PatOut(slb.try_consume_ident().map_or_else(|| Arc::from(""), Arc::from)),
'&' => Tk::RefOf,
';' => Tk::SemiColon,
'^' => Tk::Caret,
'←' => Tk::LArr,
'→' => Tk::RArr,
'{' => Tk::LBrace,
'}' => Tk::RBrace,
'(' /* ')' */ => {
if slb.inp.starts_with('*') {
// comment
let mut lvl = 1u32;
let mut it = slb.inp.chars().peekable();
while lvl > 0 {
let c = match it.next() {
Some(c) => c,
None => break 'lvl Err(ErrorKind::UnexpectedEof(ErrorCtx::Comment)),
};
slb.consume(c.len_utf8());
let c2 = it.peek().copied();
match (c, c2) {
('(', Some('*')) => lvl = match lvl.checked_add(1) {
Some(x) => x,
None => break 'lvl Err(ErrorKind::CommentNestOverflow),
},
('*', Some(')')) => {
lvl -= 1;
it.next();
slb.consume(1);
},
_ => {}
}
}
continue;
} else {
Tk::LParen
}
}
/* '(' */ ')' => Tk::RParen,
_ => break 'lvl Err(ErrorKind::UnhandledChar(c)),
})
}
};
};
let span = (offset..slb.offset).into();
Some(
tmp.map(|kind| Token { span, kind })
.map_err(|kind| Error { span, kind }),
)
}
}

View file

@ -0,0 +1,199 @@
/*
* SPDX-FileCopyrightText: 2023 Alain Zscheile <fogti+devel@ytrizja.de>
*
* SPDX-License-Identifier: Apache-2.0
*/
use core::{cmp, fmt};
use miette::SourceSpan;
use std::sync::Arc;
mod error;
pub use error::{Error, ErrorCtx, ErrorKind, FullError, Result};
pub mod expr;
pub mod lex;
pub mod pat;
pub mod record;
pub use zxtw2_literal::Literal;
#[macro_export]
macro_rules! none_up {
($x:expr) => {
match $x {
None => return Ok(None),
Some(x) => x,
}
};
}
/// A SourceSpan which is always equal to all other SourceSpans
/// (to be used to ignore span differences in expression comparisons)
#[derive(Clone, Copy)]
pub struct EvEqSourceSpan(pub SourceSpan);
impl EvEqSourceSpan {
#[inline(always)]
pub fn offset(&self) -> usize {
self.0.offset()
}
#[inline(always)]
pub fn len(&self) -> usize {
self.0.len()
}
#[inline(always)]
pub fn is_empty(&self) -> bool {
self.0.len() == 0
}
}
impl cmp::PartialEq for EvEqSourceSpan {
#[inline(always)]
fn eq(&self, _: &Self) -> bool {
true
}
}
impl fmt::Display for EvEqSourceSpan {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}..+{}", self.0.offset(), self.0.len())
}
}
impl fmt::Debug for EvEqSourceSpan {
#[inline(always)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
<Self as fmt::Display>::fmt(self, f)
}
}
impl From<(usize, usize)> for EvEqSourceSpan {
#[inline(always)]
fn from(x: (usize, usize)) -> Self {
Self(x.into())
}
}
impl From<core::ops::Range<usize>> for EvEqSourceSpan {
#[inline(always)]
fn from(x: core::ops::Range<usize>) -> Self {
Self(x.into())
}
}
impl From<EvEqSourceSpan> for SourceSpan {
#[inline(always)]
fn from(x: EvEqSourceSpan) -> SourceSpan {
x.0
}
}
impl<'a> From<&'a EvEqSourceSpan> for SourceSpan {
#[inline(always)]
fn from(x: &'a EvEqSourceSpan) -> SourceSpan {
x.0
}
}
#[derive(Clone)]
pub struct Env<'a> {
pub lxr: lex::Lexer<'a>,
pub names: Vec<Arc<str>>,
}
impl<'a> Env<'a> {
pub fn new(lxr: lex::Lexer<'a>) -> Self {
Self {
lxr,
names: Vec::new(),
}
}
pub fn lookup(&self, name: &str) -> Option<usize> {
self.names.iter().rev().enumerate().find_map(
|(n, i)| {
if &**i == name {
Some(n)
} else {
None
}
},
)
}
}
pub trait Parse: Sized {
fn parse(env: &mut Env<'_>) -> Result<Self>;
}
pub trait MaybeParse: Sized {
const DFL_CTX: ErrorCtx;
/// this function allows to clearly differentiate between
/// recoverable failures and unrecoverable ones.
fn maybe_parse(env: &mut Env<'_>) -> Result<Option<Self>>;
}
impl<T: MaybeParse> Parse for T {
fn parse(env: &mut Env<'_>) -> Result<Self> {
let knamcnt = env.names.len();
let span = env.lxr.peek_span();
let mres = T::maybe_parse(env);
assert_eq!(env.names.len(), knamcnt);
mres?.ok_or_else(|| Error {
span,
kind: ErrorKind::Expected(<T as MaybeParse>::DFL_CTX),
})
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct TaggedIdent {
pub span: EvEqSourceSpan,
pub name: Arc<str>,
}
#[derive(Clone, Debug)]
pub struct SelIdent {
pub span: EvEqSourceSpan,
// de-Bruijn index
pub dbidx: usize,
}
impl MaybeParse for TaggedIdent {
const DFL_CTX: ErrorCtx = ErrorCtx::Ident;
fn maybe_parse(env: &mut Env<'_>) -> Result<Option<Self>> {
let lxrbak = env.lxr.clone();
let lex::Token { span, kind } = none_up!(env.lxr.next())?;
Ok(if let lex::TokenKind::Ident(name) = kind {
Some(TaggedIdent { span, name })
} else {
env.lxr = lxrbak;
None
})
}
}
impl MaybeParse for SelIdent {
const DFL_CTX: ErrorCtx = ErrorCtx::Ident;
fn maybe_parse(env: &mut Env<'_>) -> Result<Option<Self>> {
let lxrbak = env.lxr.clone();
let lex::Token { span, kind } = none_up!(env.lxr.next())?;
if let lex::TokenKind::Ident(name) = kind {
match env.lookup(&name) {
Some(dbidx) => Ok(Some(SelIdent { span, dbidx })),
None => Err(Error {
span,
kind: ErrorKind::UnknownIdent(name),
}),
}
} else {
env.lxr = lxrbak;
Ok(None)
}
}
}

View file

@ -0,0 +1,148 @@
/*
* SPDX-FileCopyrightText: 2023 Alain Zscheile <fogti+devel@ytrizja.de>
*
* SPDX-License-Identifier: Apache-2.0
*/
use core::convert::Infallible;
use std::sync::Arc;
use crate::lex::{self, TokenKind as Tok};
use crate::{
expr::Expr, none_up, record::Record, Env as ParseEnv, Error, ErrorCtx, ErrorKind as Pek,
EvEqSourceSpan, MaybeParse, Parse, TaggedIdent,
};
// infallible patterns
pub trait PatternTrait {
fn foreach_exports<'i, E, F>(&'i self, f: &mut F) -> Result<(), E>
where
F: FnMut(&'i TaggedIdent) -> Result<(), E>;
fn push_to_penv(&self, env: &mut ParseEnv<'_>) {
self.foreach_exports::<Infallible, _>(&mut |i| {
env.names.push(Arc::clone(&i.name));
Ok(())
})
.unwrap();
}
fn count_exports(&self) -> usize {
let mut counter = 0;
self.foreach_exports::<Infallible, _>(&mut |_| {
counter += 1;
Ok(())
})
.unwrap();
counter
}
fn pop_from_penv(&self, env: &mut ParseEnv<'_>) {
let counter = self.count_exports();
let enl = env.names.len();
env.names.truncate(enl - counter);
}
fn extract_exports(&self, outp: &mut Vec<Arc<str>>) -> Result<(), Error> {
self.foreach_exports::<Error, _>(&mut |i| {
let i2 = Arc::clone(&i.name);
if outp.iter().any(|j| i.name == *j) {
Err(Error {
span: i.span,
kind: Pek::PatternDupIdent(i2),
})
} else {
outp.push(i2);
Ok(())
}
})
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum Pattern {
Ignore(EvEqSourceSpan),
Name(TaggedIdent),
Record(Record<Pattern>),
}
impl PatternTrait for Pattern {
fn foreach_exports<'i, E, F>(&'i self, f: &mut F) -> Result<(), E>
where
F: FnMut(&'i TaggedIdent) -> Result<(), E>,
{
match self {
Pattern::Ignore(_) => Ok(()),
Pattern::Name(i) => f(i),
Pattern::Record(xs) => xs.0[..].iter().try_for_each(|i| i.2.foreach_exports(f)),
}
}
}
impl MaybeParse for Pattern {
const DFL_CTX: ErrorCtx = ErrorCtx::Pattern;
fn maybe_parse(env: &mut ParseEnv<'_>) -> Result<Option<Self>, Error> {
let mut nxtlxr = env.lxr.clone();
let lex::Token {
kind,
span: tok_span,
} = match nxtlxr.next().transpose()? {
None => return Ok(None),
Some(x) => x,
};
use lex::TokenKind as Tk;
let ret = match kind {
Tk::PatOut(nam) if nam.is_empty() => Pattern::Ignore(tok_span),
Tk::PatOut(name) => Pattern::Name(TaggedIdent {
span: tok_span,
name,
}),
Tk::LBrace => return Record::parse(env).map(|i| Some(Pattern::Record(i))),
_ => return Ok(None),
};
env.lxr = nxtlxr;
Ok(Some(ret))
}
}
#[derive(Clone, Debug)]
pub struct PatternMT {
pub pat: Pattern,
pub pty: Option<Box<(EvEqSourceSpan, Expr)>>,
}
impl PatternTrait for PatternMT {
#[inline(always)]
fn foreach_exports<'i, E, F>(&'i self, f: &mut F) -> Result<(), E>
where
F: FnMut(&'i TaggedIdent) -> Result<(), E>,
{
self.pat.foreach_exports(f)
}
}
impl MaybeParse for PatternMT {
const DFL_CTX: ErrorCtx = ErrorCtx::Pattern;
fn maybe_parse(env: &mut ParseEnv<'_>) -> Result<Option<Self>, Error> {
let pat = none_up!(Pattern::maybe_parse(env)?);
// check for name collisions
pat.extract_exports(&mut Vec::new())?;
let pty = if env.lxr.got(Tok::DubColon).is_some() {
let span_start = env.lxr.offset();
let ptyx = Expr::parse(env)?;
let span_end = env.lxr.offset();
Some(Box::new(((span_start..span_end).into(), ptyx)))
} else {
None
};
Ok(Some(PatternMT { pat, pty }))
}
}

View file

@ -0,0 +1,68 @@
/*
* SPDX-FileCopyrightText: 2023 Alain Zscheile <fogti+devel@ytrizja.de>
*
* SPDX-License-Identifier: Apache-2.0
*/
use std::sync::Arc;
use crate::{
lex::{Token, TokenKind as Tok},
none_up, Env as ParseEnv, Error as Perr, ErrorCtx as PeCtx, ErrorKind as Pek, EvEqSourceSpan,
MaybeParse, Parse, Result as Pres,
};
#[derive(Clone, Debug, PartialEq)]
pub struct Record<V>(pub Vec<(EvEqSourceSpan, Option<Arc<str>>, V)>);
impl<V: Parse> MaybeParse for Record<V> {
const DFL_CTX: PeCtx = PeCtx::Record;
fn maybe_parse(env: &mut ParseEnv<'_>) -> Pres<Option<Self>> {
none_up!(env.lxr.got(Tok::LBrace));
let mut fields: Vec<(_, Option<Arc<str>>, _)> = Vec::new();
// warning: this code executes in O(|fields|²)
loop {
let span_start;
let name = {
let mut lxrnxt = env.lxr.clone();
let Token { kind, span } = match lxrnxt.next_in_noeof(PeCtx::Record) {
Err(e) => {
env.lxr = lxrnxt;
return Err(e);
}
Ok(x) => x,
};
span_start = span.offset();
match kind {
Tok::DotIdent(i) => {
if env.lxr.expect(Tok::Assign, PeCtx::Record).is_ok() {
if fields.iter().any(|(_, j, _)| j.as_ref() == Some(&i)) {
return Err(Perr {
span,
kind: Pek::RecordDupIdent(i),
});
}
// "fronttrack"
env.lxr = lxrnxt;
Some(i)
} else {
None
}
}
Tok::RBrace => break,
_ => None,
}
};
let expr = V::parse(env)?;
let span_end = env.lxr.offset();
env.lxr.expect(Tok::SemiColon, PeCtx::Record)?;
fields.push(((span_start..span_end).into(), name, expr));
}
env.lxr.expect(Tok::RBrace, PeCtx::Record)?;
Ok(Some(Record(fields)))
}
}

View file

@ -1,5 +1,5 @@
let option = import "./option.xtw2";
λ $inner : Type → μ $stack → .{
λ $lay : Layout → λ $inner : lay → μ $stack → .{
t = ^{
peek : $inner;
pop : option stack.t;