mirror of
https://github.com/Noratrieb/ub.git
synced 2026-01-14 16:45:05 +01:00
something works at least
This commit is contained in:
parent
aa4da62e2c
commit
b1756c7c21
7 changed files with 234 additions and 64 deletions
|
|
@ -6,6 +6,7 @@ edition = "2021"
|
|||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
ariadne = "0.1.5"
|
||||
chumsky = "0.8.0"
|
||||
logos = "0.12.0"
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
use std::fmt::{Debug, Display, Formatter};
|
||||
|
||||
use logos::Logos;
|
||||
|
||||
#[derive(Logos, Debug, Clone, Hash, PartialEq, Eq)]
|
||||
|
|
@ -91,6 +93,52 @@ pub enum Token<'a> {
|
|||
Error,
|
||||
}
|
||||
|
||||
impl<'a> Display for Token<'a> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Token::Comment => f.write_str("comment"),
|
||||
Token::BraceO => f.write_str("{"),
|
||||
Token::BraceC => f.write_str("}"),
|
||||
Token::BracketO => f.write_str("["),
|
||||
Token::BracketC => f.write_str("]"),
|
||||
Token::ParenO => f.write_str("("),
|
||||
Token::ParenC => f.write_str(")"),
|
||||
Token::Dot => f.write_str("."),
|
||||
Token::Comma => f.write_str(","),
|
||||
Token::Semi => f.write_str(";"),
|
||||
Token::Eq => f.write_str("="),
|
||||
Token::EqEq => f.write_str("=="),
|
||||
Token::Bang => f.write_str("!"),
|
||||
Token::BangEq => f.write_str("!="),
|
||||
Token::Greater => f.write_str(">"),
|
||||
Token::Less => f.write_str("<"),
|
||||
Token::GreaterEq => f.write_str(">="),
|
||||
Token::LessEq => f.write_str("<="),
|
||||
Token::Asterisk => f.write_str("*"),
|
||||
Token::Slash => f.write_str("/"),
|
||||
Token::Plus => f.write_str("+"),
|
||||
Token::Minus => f.write_str("-"),
|
||||
Token::Or => f.write_str("|"),
|
||||
Token::And => f.write_str("&"),
|
||||
Token::OrOr => f.write_str("||"),
|
||||
Token::AndAnd => f.write_str("&&"),
|
||||
Token::Caret => f.write_str("^"),
|
||||
Token::Arrow => f.write_str("->"),
|
||||
Token::Colon => f.write_str(":"),
|
||||
Token::Struct => f.write_str("struct"),
|
||||
Token::Fn => f.write_str("fn"),
|
||||
Token::If => f.write_str("if"),
|
||||
Token::Else => f.write_str("else"),
|
||||
Token::While => f.write_str("while"),
|
||||
Token::Loop => f.write_str("loop"),
|
||||
Token::Ident(ident) => write!(f, "identifier `{ident}`"),
|
||||
Token::String(str) => write!(f, "\"{str}\""),
|
||||
Token::Integer(int) => write!(f, "{int}"),
|
||||
Token::Error => f.write_str("error"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lex<'src>(code: &'src str) -> logos::Lexer<'_, Token<'src>> {
|
||||
Token::lexer(code)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use ariadne::{Color, Fmt, Label, Report, ReportKind, Source};
|
||||
use logos::Logos;
|
||||
|
||||
use crate::lexer::Token;
|
||||
|
|
@ -16,16 +17,89 @@ pub fn parse(_str: &str, _file_name: PathBuf) -> Result<ast::File, ()> {
|
|||
}
|
||||
|
||||
pub fn test() {
|
||||
let lexer = Token::lexer(
|
||||
"
|
||||
let src = "
|
||||
fn main() {
|
||||
if 1 { 5 + 5; }
|
||||
// if 1 { 5 + 5; }
|
||||
u64 hello = 5;
|
||||
}
|
||||
",
|
||||
);
|
||||
";
|
||||
|
||||
let lexer = Token::lexer(src);
|
||||
let len = lexer.source().len();
|
||||
|
||||
let r = parser::parse(lexer.spanned(), len, "test_file".into());
|
||||
let (file, errors) = parser::parse(lexer.spanned(), len, "test_file".into());
|
||||
|
||||
println!("{r:#?}");
|
||||
if let Some(file) = file {
|
||||
println!("AST: {file:#?}");
|
||||
}
|
||||
|
||||
errors
|
||||
.into_iter()
|
||||
.map(|e| e.map(|c| c.to_string()))
|
||||
.for_each(|e| {
|
||||
let report = Report::build(ReportKind::Error, (), e.span().start);
|
||||
|
||||
let report = match e.reason() {
|
||||
chumsky::error::SimpleReason::Unclosed { span, delimiter } => report
|
||||
.with_message(format!(
|
||||
"Unclosed delimiter {}",
|
||||
delimiter.fg(Color::Yellow)
|
||||
))
|
||||
.with_label(
|
||||
Label::new(span.clone())
|
||||
.with_message(format!(
|
||||
"Unclosed delimiter {}",
|
||||
delimiter.fg(Color::Yellow)
|
||||
))
|
||||
.with_color(Color::Yellow),
|
||||
)
|
||||
.with_label(
|
||||
Label::new(e.span())
|
||||
.with_message(format!(
|
||||
"Must be closed before this {}",
|
||||
e.found()
|
||||
.unwrap_or(&"end of file".to_string())
|
||||
.fg(Color::Red)
|
||||
))
|
||||
.with_color(Color::Red),
|
||||
),
|
||||
chumsky::error::SimpleReason::Unexpected => report
|
||||
.with_message(format!(
|
||||
"{}, expected {}",
|
||||
if e.found().is_some() {
|
||||
"Unexpected token in input"
|
||||
} else {
|
||||
"Unexpected end of input"
|
||||
},
|
||||
if e.expected().len() == 0 {
|
||||
"something else".to_string()
|
||||
} else {
|
||||
e.expected()
|
||||
.map(|expected| match expected {
|
||||
Some(expected) => expected.to_string(),
|
||||
None => "end of input".to_string(),
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
))
|
||||
.with_label(
|
||||
Label::new(e.span())
|
||||
.with_message(format!(
|
||||
"Unexpected token {}",
|
||||
e.found()
|
||||
.unwrap_or(&"end of file".to_string())
|
||||
.fg(Color::Red)
|
||||
))
|
||||
.with_color(Color::Red),
|
||||
),
|
||||
chumsky::error::SimpleReason::Custom(msg) => report.with_message(msg).with_label(
|
||||
Label::new(e.span())
|
||||
.with_message(format!("{}", msg.fg(Color::Red)))
|
||||
.with_color(Color::Red),
|
||||
),
|
||||
};
|
||||
|
||||
report.finish().print(Source::from(&src)).unwrap();
|
||||
});
|
||||
}
|
||||
|
|
|
|||
|
|
@ -156,35 +156,38 @@ fn statement_parser<'src>() -> impl Parser<Token<'src>, Stmt, Error = Error<'src
|
|||
})
|
||||
});
|
||||
|
||||
let if_stmt = recursive(|if_stmt| {
|
||||
just(Token::If)
|
||||
.ignore_then(expr_parser())
|
||||
.then(
|
||||
stmt.clone()
|
||||
.repeated()
|
||||
.delimited_by(just(Token::BraceO), just(Token::BraceC)),
|
||||
)
|
||||
.then(
|
||||
just(Token::Else).ignore_then(
|
||||
if_stmt
|
||||
.map(|if_stmt| ElsePart::ElseIf(Box::new(if_stmt)))
|
||||
.or(stmt
|
||||
.clone()
|
||||
.repeated()
|
||||
.delimited_by(just(Token::BraceO), just(Token::BraceC))
|
||||
.map_with_span(ElsePart::Else))
|
||||
.or_not(),
|
||||
),
|
||||
)
|
||||
.map_with_span(|((cond, body), else_part), span| IfStmt {
|
||||
cond,
|
||||
body,
|
||||
else_part,
|
||||
span,
|
||||
})
|
||||
})
|
||||
.map(Stmt::IfStmt);
|
||||
choice((var_decl, assignment, if_stmt, expr_parser().map(Stmt::Expr)))
|
||||
// let if_stmt = recursive(|if_stmt| {
|
||||
// just(Token::If)
|
||||
// .ignore_then(expr_parser())
|
||||
// .then(
|
||||
// stmt.clone()
|
||||
// .repeated()
|
||||
// .delimited_by(just(Token::BraceO), just(Token::BraceC)),
|
||||
// )
|
||||
// .then(
|
||||
// just(Token::Else).ignore_then(
|
||||
// if_stmt
|
||||
// .map(|if_stmt| ElsePart::ElseIf(Box::new(if_stmt)))
|
||||
// .or(stmt
|
||||
// .clone()
|
||||
// .repeated()
|
||||
// .delimited_by(just(Token::BraceO), just(Token::BraceC))
|
||||
// .map_with_span(ElsePart::Else))
|
||||
// .or_not(),
|
||||
// ),
|
||||
// )
|
||||
// .map_with_span(|((cond, body), else_part), span| IfStmt {
|
||||
// cond,
|
||||
// body,
|
||||
// else_part,
|
||||
// span,
|
||||
// })
|
||||
// })
|
||||
// .map(Stmt::IfStmt);
|
||||
|
||||
var_decl
|
||||
.or(assignment)
|
||||
.or(expr_parser().map(Stmt::Expr))
|
||||
.then_ignore(just(Token::Semi))
|
||||
})
|
||||
.labelled("statement")
|
||||
|
|
@ -257,10 +260,10 @@ fn file_parser<'src>(
|
|||
file_name: PathBuf,
|
||||
) -> impl Parser<Token<'src>, File, Error = Error<'src>> + Clone {
|
||||
item_parser()
|
||||
.repeated()
|
||||
// .repeated()
|
||||
.map(move |items| File {
|
||||
name: file_name.clone(),
|
||||
items,
|
||||
items: vec![items],
|
||||
})
|
||||
.labelled("file")
|
||||
}
|
||||
|
|
@ -301,30 +304,24 @@ mod tests {
|
|||
#[test]
|
||||
fn expression() {
|
||||
let r = parse("fn main() { (4 / hallo()) + 5; }");
|
||||
insta::assert_debug_snapshot!(r)
|
||||
insta::assert_debug_snapshot!(r);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function() {
|
||||
let r = parse("fn foo() -> u64 { 1 + 5; }");
|
||||
insta::assert_debug_snapshot!(r)
|
||||
insta::assert_debug_snapshot!(r);
|
||||
}
|
||||
|
||||
//#[test]
|
||||
//fn nested_function() {
|
||||
// let r = parse("fn foo() { fn foo2() {} fn foo3() {} }");
|
||||
// insta::assert_debug_snapshot!(r)
|
||||
//}
|
||||
|
||||
#[test]
|
||||
fn nested_function2() {
|
||||
let r = parse("fn foo() { fn foo2() {} 1 + 5; }");
|
||||
insta::assert_debug_snapshot!(r)
|
||||
fn var_decl() {
|
||||
let r = parse("fn foo() -> u64 { u64 hello = 5; }");
|
||||
insta::assert_debug_snapshot!(r);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn struct_() {
|
||||
let r = parse("struct X { y: u64, x: u64 }");
|
||||
insta::assert_debug_snapshot!(r)
|
||||
insta::assert_debug_snapshot!(r);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,14 +0,0 @@
|
|||
---
|
||||
source: parser/src/parser.rs
|
||||
assertion_line: 330
|
||||
expression: r
|
||||
---
|
||||
(
|
||||
Some(
|
||||
File {
|
||||
name: "parser__parser__tests",
|
||||
items: [],
|
||||
},
|
||||
),
|
||||
[],
|
||||
)
|
||||
48
parser/src/snapshots/parser__parser__tests__var_decl.snap
Normal file
48
parser/src/snapshots/parser__parser__tests__var_decl.snap
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
---
|
||||
source: parser/src/parser.rs
|
||||
assertion_line: 319
|
||||
expression: r
|
||||
---
|
||||
(
|
||||
Some(
|
||||
File {
|
||||
name: "parser__parser__tests",
|
||||
items: [
|
||||
FnDecl(
|
||||
FnDecl {
|
||||
name: "foo",
|
||||
params: [],
|
||||
ret_ty: Some(
|
||||
Ty {
|
||||
span: 12..15,
|
||||
kind: U64,
|
||||
},
|
||||
),
|
||||
span: 0..2,
|
||||
body: [
|
||||
VarDecl(
|
||||
VarDecl {
|
||||
name: "hello",
|
||||
ty: Ty {
|
||||
span: 18..21,
|
||||
kind: U64,
|
||||
},
|
||||
rhs: Some(
|
||||
Literal(
|
||||
Integer(
|
||||
5,
|
||||
30..31,
|
||||
),
|
||||
),
|
||||
),
|
||||
span: 0..0,
|
||||
},
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
[],
|
||||
)
|
||||
Loading…
Add table
Add a link
Reference in a new issue