first working code

```
int main() {
    long a = 0;
}
```

codegens correctly.
This commit is contained in:
nora 2023-05-28 18:52:51 +02:00
parent 92243712e7
commit d0be270de2
14 changed files with 235 additions and 160 deletions

17
Cargo.lock generated
View file

@ -51,6 +51,16 @@ dependencies = [
"smallvec", "smallvec",
] ]
[[package]]
name = "ariadne"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "367fd0ad87307588d087544707bc5fbf4805ded96c7db922b70d368fa1cb5702"
dependencies = [
"unicode-width",
"yansi",
]
[[package]] [[package]]
name = "autocfg" name = "autocfg"
version = "1.1.0" version = "1.1.0"
@ -685,6 +695,7 @@ name = "uwucc"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"analysis", "analysis",
"ariadne",
"bumpalo", "bumpalo",
"codegen", "codegen",
"dbg-pls", "dbg-pls",
@ -759,3 +770,9 @@ checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
dependencies = [ dependencies = [
"linked-hash-map", "linked-hash-map",
] ]
[[package]]
name = "yansi"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"

View file

@ -14,3 +14,4 @@ analysis = { path = "./analysis" }
codegen = { path = "./codegen" } codegen = { path = "./codegen" }
parser = { path = "./parser" } parser = { path = "./parser" }
dbg-pls = { version = "0.3.2", features = ["derive", "colors"] } dbg-pls = { version = "0.3.2", features = ["derive", "colors"] }
ariadne = "0.2.0"

View file

@ -138,12 +138,14 @@ pub enum StatementKind {
Store { Store {
ptr: Operand, ptr: Operand,
value: Operand, value: Operand,
/// Amount of bytes to store.
size: Operand, size: Operand,
align: Operand, align: Operand,
}, },
Load { Load {
result: Register, result: Register,
ptr: Operand, ptr: Operand,
/// Amount of bytes to load.
size: Operand, size: Operand,
align: Operand, align: Operand,
}, },

View file

@ -8,43 +8,3 @@ pub mod ty;
pub use ctxt::LoweringCx; pub use ctxt::LoweringCx;
pub use lower::lower_translation_unit; pub use lower::lower_translation_unit;
use parser::Span;
#[derive(Debug)]
pub struct Error {
msg: String,
span: Option<Span>,
notes: Vec<Note>,
}
#[derive(Debug)]
struct Note {
msg: String,
span: Option<Span>,
}
impl Error {
pub fn new(msg: impl Into<String>, span: Span) -> Self {
Self {
msg: msg.into(),
span: Some(span),
notes: Vec::new(),
}
}
pub fn new_without_span(msg: impl Into<String>) -> Self {
Self {
msg: msg.into(),
span: None,
notes: Vec::new(),
}
}
pub fn note_spanned(mut self, msg: impl Into<String>, span: Span) -> Self {
self.notes.push(Note {
msg: msg.into(),
span: Some(span),
});
self
}
}

View file

@ -3,7 +3,7 @@ mod typeck;
use parser::{ use parser::{
ast::{self, ExprBinary}, ast::{self, ExprBinary},
Span, Symbol, Error, Span, Symbol,
}; };
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
@ -15,7 +15,6 @@ use crate::{
VariableInfo, VariableInfoKind, VariableInfo, VariableInfoKind,
}, },
ty::{Ty, TyKind}, ty::{Ty, TyKind},
Error,
}; };
type Result<T, E = Error> = std::result::Result<T, E>; type Result<T, E = Error> = std::result::Result<T, E>;

View file

@ -1,14 +1,11 @@
use parser::{ use parser::{
ast::{IntSign, IntTy, IntTyKind}, ast::{IntSign, IntTy, IntTyKind},
Span, Error, Span,
}; };
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use super::{FnLoweringCtxt, Result}; use super::{FnLoweringCtxt, Result};
use crate::{ use crate::ty::{Ty, TyKind};
ty::{Ty, TyKind},
Error,
};
pub(super) type Coercions<'cx> = SmallVec<[(Coercion, Ty<'cx>); 2]>; pub(super) type Coercions<'cx> = SmallVec<[(Coercion, Ty<'cx>); 2]>;

View file

@ -1,5 +1,3 @@
#![allow(unused)]
mod x86_64; mod x86_64;
use std::process::Stdio; use std::process::Stdio;
@ -9,8 +7,9 @@ use object::{
elf, elf,
write::{Object, Symbol}, write::{Object, Symbol},
}; };
use parser::Error;
type Result<T, E = analysis::Error> = std::result::Result<T, E>; type Result<T, E = Error> = std::result::Result<T, E>;
pub fn generate<'cx>(lcx: &'cx LoweringCx<'cx>, ir: &Ir<'cx>) -> Result<()> { pub fn generate<'cx>(lcx: &'cx LoweringCx<'cx>, ir: &Ir<'cx>) -> Result<()> {
let mut obj = Object::new( let mut obj = Object::new(
@ -43,25 +42,24 @@ pub fn generate<'cx>(lcx: &'cx LoweringCx<'cx>, ir: &Ir<'cx>) -> Result<()> {
obj.add_symbol(sym); obj.add_symbol(sym);
} }
let object_file = obj.write().map_err(|err| { let object_file = obj
analysis::Error::new_without_span(format!("failed to create object file: {err}")) .write()
})?; .map_err(|err| Error::new_without_span(format!("failed to create object file: {err}")))?;
std::fs::write("main.o", object_file).map_err(|err| { std::fs::write("main.o", object_file).map_err(|err| {
analysis::Error::new_without_span(format!("failed to write object file main.o: {err}")) Error::new_without_span(format!("failed to write object file main.o: {err}"))
})?; })?;
let output = std::process::Command::new("cc") let output = std::process::Command::new("cc")
.arg("main.o") .arg("main.o")
.arg("-g")
.stdout(Stdio::inherit()) .stdout(Stdio::inherit())
.stderr(Stdio::inherit()) .stderr(Stdio::inherit())
.output() .output()
.map_err(|err| analysis::Error::new_without_span(format!("failed to spawn `cc`: {err}")))?; .map_err(|err| Error::new_without_span(format!("failed to spawn `cc`: {err}")))?;
if !output.status.success() { if !output.status.success() {
return Err(analysis::Error::new_without_span(format!( return Err(Error::new_without_span(format!("linking with `cc` failed")));
"linking with `cc` failed"
)));
} else { } else {
// std::fs::remove_file("main.o").map_err(|err| { // std::fs::remove_file("main.o").map_err(|err| {
// analysis::Error::new_without_span(format!( // analysis::Error::new_without_span(format!(

View file

@ -65,29 +65,31 @@
//! | %r12-r14 | callee-saved registers | Yes | //! | %r12-r14 | callee-saved registers | Yes |
//! | %r15 | callee-saved register; optionally used as GOT base pointer | Yes | //! | %r15 | callee-saved register; optionally used as GOT base pointer | Yes |
#![allow(unused_variables, dead_code)]
use analysis::{ use analysis::{
ir::{self, BbIdx, Func, Location, Operand, Register, Statement, StatementKind}, ir::{self, BbIdx, Branch, Func, Location, Operand, Register, Statement, StatementKind},
LoweringCx, LoweringCx,
}; };
use iced_x86::{ use iced_x86::{
code_asm::{self as x, CodeAssembler}, code_asm::{self as x, CodeAssembler},
IcedError, Formatter, IcedError, NasmFormatter,
}; };
use parser::Span; use parser::{Error, Span};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use crate::Result; use crate::Result;
trait IcedErrExt { trait IcedErrExt {
type T; type T;
fn sp(self, span: Span) -> Result<Self::T, analysis::Error>; fn sp(self, span: Span) -> Result<Self::T, Error>;
} }
impl<T> IcedErrExt for Result<T, IcedError> { impl<T> IcedErrExt for Result<T, IcedError> {
type T = T; type T = T;
fn sp(self, span: Span) -> Result<Self::T, analysis::Error> { fn sp(self, span: Span) -> Result<Self::T, Error> {
self.map_err(|e| analysis::Error::new(e.to_string(), span)) self.map_err(|e| Error::new(e.to_string(), span))
} }
} }
@ -97,9 +99,13 @@ struct MachineReg(usize);
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
enum RegValue { enum RegValue {
/// The SSA register contains an address on the stack.
/// The offest is the offset from the start of the function.
StackRelative { offset: u64 },
/// The SSA register resides on the stack as it has been spilled. /// The SSA register resides on the stack as it has been spilled.
Stack { offset: u64 }, /// This should be rather rare in practice.
/// The SSA register resides in a machine register Spilled { offset: u64 },
/// The SSA register resides in a machine register.
MachineReg(MachineReg), MachineReg(MachineReg),
} }
@ -143,7 +149,8 @@ impl<'cx> AsmCtxt<'cx> {
fn generate_func(&mut self, func: &Func<'cx>) -> Result<()> { fn generate_func(&mut self, func: &Func<'cx>) -> Result<()> {
// TODO: Prologue // TODO: Prologue
self.a.push(x::rbx); self.a.push(x::rbx).sp(func.def_span)?;
self.a.push(x::rsp).sp(func.def_span)?;
loop { loop {
let bb = &func.bbs[self.bb_idx.as_usize()]; let bb = &func.bbs[self.bb_idx.as_usize()];
@ -159,6 +166,7 @@ impl<'cx> AsmCtxt<'cx> {
size, size,
align: _, align: _,
} => { } => {
// For alloca, we allocate some space on the stack by subtracting from RSP.
// TODO: Align // TODO: Align
match size { match size {
Operand::Const(c) => { Operand::Const(c) => {
@ -166,11 +174,13 @@ impl<'cx> AsmCtxt<'cx> {
self.a.sub(x::rsp, offset).sp(st_sp)?; self.a.sub(x::rsp, offset).sp(st_sp)?;
self.current_stack_offset += offset as u64; self.current_stack_offset += offset as u64;
} }
Operand::Reg(_) => todo!("dynamic alloca is not supported"), Operand::Reg(_) => {
todo!("dynamic alloca is not supported. get a better computer")
}
}; };
self.reg_map.insert( self.reg_map.insert(
reg, reg,
RegValue::Stack { RegValue::StackRelative {
offset: self.current_stack_offset, offset: self.current_stack_offset,
}, },
); );
@ -180,26 +190,44 @@ impl<'cx> AsmCtxt<'cx> {
value, value,
size, size,
align, align,
} => match ptr { } => {
Operand::Const(_) => todo!("const stores not implemented"), let Operand::Const(size) = size else {
Operand::Reg(reg) => { todo!("non const size");
let value = self.reg_map[&reg]; };
let stack_offset = match value { if size.as_i32() != 8 {
RegValue::Stack { offset } => offset, todo!("stores of less or more than 8 bytes: {size}");
RegValue::MachineReg(_) => todo!("machine reg"),
};
//let rhs = match value {
// Operand::Const(c) => {}
// Operand::Reg(reg) => {}
//};
// mov [rbp + OFFSET], RHS
//self.a.add_instruction(Instruction::with2(Code::Mov, op0, op1))
self.a.mov(x::ptr(x::rax), x::rbx);
} }
}, match ptr {
Operand::Const(_) => todo!("const stores not implemented"),
Operand::Reg(reg) => {
let ptr_value = self.reg_map[&reg];
match (ptr_value, value) {
(RegValue::StackRelative { offset }, Operand::Const(c)) => {
let offset_from_cur = self.current_stack_offset - offset;
dbg!(offset_from_cur, c);
self.a
.mov(x::qword_ptr(x::rsp + offset_from_cur), c.as_i32())
.sp(st_sp)?;
}
(RegValue::StackRelative { offset }, Operand::Reg(value)) => {
todo!("stack relative ptr + reg value")
}
(RegValue::Spilled { .. }, _) => todo!("spilled"),
(RegValue::MachineReg(_), _) => todo!("machine reg"),
};
//let rhs = match value {
// Operand::Const(c) => {}
// Operand::Reg(reg) => {}
//};
// mov [rsp + OFFSET], RHS
//self.a.add_instruction(Instruction::with2(Code::Mov, op0, op1))
// self.a.mov(x::ptr(x::rax), x::rbx).sp(st_sp);
}
}
}
StatementKind::Load { StatementKind::Load {
result, result,
ptr, ptr,
@ -228,7 +256,20 @@ impl<'cx> AsmCtxt<'cx> {
} }
} }
todo!("next bbs"); match bb.term {
Branch::Ret(_) => {
self.a
.add(x::rsp, i32::try_from(self.current_stack_offset).unwrap())
.sp(func.def_span)?;
self.a.pop(x::rsp).sp(func.def_span)?;
self.a.pop(x::rbx).sp(func.def_span)?;
self.a.mov(x::rax, 0_u64).sp(func.def_span)?;
self.a.ret().sp(func.def_span)?;
break;
}
Branch::Switch { .. } => todo!("switch"),
Branch::Goto(_) => todo!("goto"),
}
} }
Ok(()) Ok(())
@ -239,7 +280,7 @@ pub fn generate_func<'cx>(lcx: &'cx LoweringCx<'cx>, func: &Func<'cx>) -> Result
assert_eq!(func.arity, 0, "arguments??? in MY uwucc????"); assert_eq!(func.arity, 0, "arguments??? in MY uwucc????");
let fn_sp = func.def_span; let fn_sp = func.def_span;
let mut a = CodeAssembler::new(64).sp(fn_sp)?; let a = CodeAssembler::new(64).sp(fn_sp)?;
let mut cx = AsmCtxt { let mut cx = AsmCtxt {
lcx, lcx,
@ -255,5 +296,14 @@ pub fn generate_func<'cx>(lcx: &'cx LoweringCx<'cx>, func: &Func<'cx>) -> Result
let code = cx.a.assemble(0x4000).sp(fn_sp)?; let code = cx.a.assemble(0x4000).sp(fn_sp)?;
print!("{}:\n---", func.name);
let mut output = String::new();
let mut formatter = NasmFormatter::new();
for instr in cx.a.instructions() {
output.push('\n');
formatter.format(instr, &mut output);
}
println!("{output}\n---");
Ok(code) Ok(code)
} }

View file

@ -27,6 +27,7 @@
llvmPackages_16.bintools llvmPackages_16.bintools
rustup rustup
cargo-insta cargo-insta
gdb
]; ];
shellHook = '' shellHook = ''

View file

@ -4,8 +4,8 @@
use std::fmt::Debug; use std::fmt::Debug;
use ast::TranslationUnit; use ast::TranslationUnit;
use dbg_pls::DebugPls;
use self::parser::ParserError;
use crate::token::Token; use crate::token::Token;
pub mod ast; pub mod ast;
@ -63,12 +63,63 @@ impl Debug for Span {
} }
} }
#[derive(Debug)]
pub struct Error {
pub msg: String,
pub span: Option<Span>,
pub notes: Vec<Note>,
}
#[derive(Debug)]
pub struct Note {
pub msg: String,
pub span: Option<Span>,
}
impl Error {
pub fn new(msg: impl Into<String>, span: Span) -> Self {
Self {
msg: msg.into(),
span: Some(span),
notes: Vec::new(),
}
}
pub fn new_without_span(msg: impl Into<String>) -> Self {
Self {
msg: msg.into(),
span: None,
notes: Vec::new(),
}
}
pub fn note_spanned(mut self, msg: impl Into<String>, span: Span) -> Self {
self.notes.push(Note {
msg: msg.into(),
span: Some(span),
});
self
}
}
impl DebugPls for Error {
fn fmt(&self, f: dbg_pls::Formatter<'_>) {
f.debug_struct("Error")
.field("span", &self.span)
.field("msg", &self.msg)
.finish();
}
}
fn lex_and_pre(src: &str) -> impl Iterator<Item = (Token<'_>, Span)> + '_ { fn lex_and_pre(src: &str) -> impl Iterator<Item = (Token<'_>, Span)> + '_ {
let pre_tokens = pre::preprocess_tokens(src); let pre_tokens = pre::preprocess_tokens(src);
token::pre_tokens_to_tokens(pre_tokens) token::pre_tokens_to_tokens(pre_tokens)
} }
pub fn parse_file(src: &str) -> Result<TranslationUnit, ParserError> { pub fn parse_file(src: &str) -> Result<TranslationUnit, Error> {
let lexer = lex_and_pre(src); let lexer = lex_and_pre(src);
parser::parse_declarations(lexer) parser::parse_declarations(lexer)
} }

View file

@ -1,4 +1,3 @@
use dbg_pls::{DebugPls, Formatter};
use peekmore::PeekMoreIterator; use peekmore::PeekMoreIterator;
use crate::{ use crate::{
@ -10,41 +9,22 @@ use crate::{
pre::Punctuator as P, pre::Punctuator as P,
sym::Symbol, sym::Symbol,
token::{Keyword as Kw, Token as Tok}, token::{Keyword as Kw, Token as Tok},
Span, Spanned, Error, Span, Spanned,
}; };
mod expr; mod expr;
#[derive(Debug)] impl Error {
pub struct ParserError {
span: Span,
message: String,
}
impl ParserError {
fn new(span: Span, message: String) -> Self {
Self { span, message }
}
fn eof() -> Self { fn eof() -> Self {
Self::new(Span::default(), "unexpected end of file".to_string()) Self::new("unexpected end of file", Span::default())
} }
fn unsupported(span: Span, token: &Tok<'_>) -> Self { fn unsupported(span: Span, token: &Tok<'_>) -> Self {
Self::new(span, format!("`{token}` is not supported")) Self::new(format!("`{token}` is not supported"), span)
} }
} }
impl DebugPls for ParserError { type Result<T, E = Error> = std::result::Result<T, E>;
fn fmt(&self, f: Formatter<'_>) {
f.debug_struct("ParserError")
.field("span", &self.span)
.field("message", &self.message)
.finish();
}
}
type Result<T, E = ParserError> = std::result::Result<T, E>;
pub struct Parser<'src, I> pub struct Parser<'src, I>
where where
@ -59,12 +39,12 @@ macro_rules! expect_parser {
match $self.next_t()? { match $self.next_t()? {
($pat, span) => span, ($pat, span) => span,
(token, span) => { (token, span) => {
return Err(ParserError::new( return Err(Error::new(
span,
format!( format!(
concat!("expected `", stringify!($pat), "`, found {}"), concat!("expected `", stringify!($pat), "`, found {}"),
token token
), ),
span,
)) ))
} }
} }
@ -126,23 +106,23 @@ where
} }
fn next_t(&mut self) -> Result<(Tok<'src>, Span)> { fn next_t(&mut self) -> Result<(Tok<'src>, Span)> {
self.lex.next().ok_or_else(ParserError::eof) self.lex.next().ok_or_else(Error::eof)
} }
fn peek_t(&mut self) -> Result<&(Tok<'src>, Span)> { fn peek_t(&mut self) -> Result<&(Tok<'src>, Span)> {
self.lex.peek().ok_or_else(ParserError::eof) self.lex.peek().ok_or_else(Error::eof)
} }
fn peek_t_n(&mut self, n: usize) -> Result<&(Tok<'src>, Span)> { fn peek_t_n(&mut self, n: usize) -> Result<&(Tok<'src>, Span)> {
self.lex.peek_nth(n).ok_or_else(ParserError::eof) self.lex.peek_nth(n).ok_or_else(Error::eof)
} }
fn ident(&mut self) -> Result<Ident> { fn ident(&mut self) -> Result<Ident> {
match self.next_t()? { match self.next_t()? {
(Tok::Ident(ident), span) => Ok((Symbol::intern(ident), span)), (Tok::Ident(ident), span) => Ok((Symbol::intern(ident), span)),
(tok, span) => Err(ParserError::new( (tok, span) => Err(Error::new(
span,
format!("expected identifier, found `{tok}`"), format!("expected identifier, found `{tok}`"),
span,
)), )),
} }
} }
@ -176,7 +156,7 @@ where
/// This does NOT eat the semicolon! /// This does NOT eat the semicolon!
fn declaration(&mut self) -> Result<Spanned<Decl>> { fn declaration(&mut self) -> Result<Spanned<Decl>> {
if let Some((tok, span)) = eat!(self, Tok::Kw(Kw::StaticAssert)) { if let Some((tok, span)) = eat!(self, Tok::Kw(Kw::StaticAssert)) {
return Err(ParserError::unsupported(span, &tok)); return Err(Error::unsupported(span, &tok));
} }
let (decl_spec, span) = self.decl_specifiers()?; let (decl_spec, span) = self.decl_specifiers()?;
@ -264,7 +244,7 @@ where
// (6.7.5) alignment-specifier: // (6.7.5) alignment-specifier:
Tok::Kw(Kw::Alignas) => { Tok::Kw(Kw::Alignas) => {
let (token, span) = self.next_t()?; let (token, span) = self.next_t()?;
return Err(ParserError::unsupported(span, &token)); return Err(Error::unsupported(span, &token));
} }
// if it's neither of the above, it has to be a type-specifier // if it's neither of the above, it has to be a type-specifier
_ => { _ => {
@ -319,10 +299,7 @@ where
} }
Tok::Kw(Kw::Signed) => { Tok::Kw(Kw::Signed) => {
if signedness.is_some() { if signedness.is_some() {
return Err(ParserError::new( return Err(Error::new("cannot specify signedness twice", span));
span,
"cannot specify signedness twice".to_string(),
));
} }
if let Ok((Tok::Kw(Kw::Char | Kw::Short | Kw::Int | Kw::Long), _)) = if let Ok((Tok::Kw(Kw::Char | Kw::Short | Kw::Int | Kw::Long), _)) =
self.peek_t() self.peek_t()
@ -335,10 +312,7 @@ where
} }
Tok::Kw(Kw::Unsigned) => { Tok::Kw(Kw::Unsigned) => {
if signedness.is_some() { if signedness.is_some() {
return Err(ParserError::new( return Err(Error::new("cannot specify signedness twice", span));
span,
"cannot specify signedness twice".to_string(),
));
} }
if let Ok((Tok::Kw(Kw::Char | Kw::Short | Kw::Int | Kw::Long), _)) = if let Ok((Tok::Kw(Kw::Char | Kw::Short | Kw::Int | Kw::Long), _)) =
self.peek_t() self.peek_t()
@ -355,12 +329,9 @@ where
TypeSpecifier::Integer(IntTy(IntSign::Unsigned, IntTyKind::Bool)) TypeSpecifier::Integer(IntTy(IntSign::Unsigned, IntTyKind::Bool))
} }
Tok::Kw(Kw::Complex) => { Tok::Kw(Kw::Complex) => {
return Err(ParserError::new( return Err(Error::new("tf are you doing with complex numbers", span))
span,
"tf are you doing with complex numbers".to_string(),
))
} }
tok => return Err(ParserError::new(span, format!("Invalid token: `{tok}`"))), tok => return Err(Error::new(format!("Invalid token: `{tok}`"), span)),
}; };
break Ok((ty, span)); break Ok((ty, span));
@ -413,7 +384,7 @@ where
// the wrong way around because borrowing // the wrong way around because borrowing
if let (Tok::Punct(P::ParenClose), _) = self.peek_t_n(1)? { if let (Tok::Punct(P::ParenClose), _) = self.peek_t_n(1)? {
if let &(ref tok @ Tok::Kw(Kw::Void), span) = self.peek_t()? { if let &(ref tok @ Tok::Kw(Kw::Void), span) = self.peek_t()? {
return Err(ParserError::unsupported(span, tok)); return Err(Error::unsupported(span, tok));
} }
} }
} }

View file

@ -7,7 +7,7 @@ use crate::{
ArithOpKind, Atom, BinaryOp, ComparisonKind, Expr, ExprBinary, ExprPostfix, ExprUnary, ArithOpKind, Atom, BinaryOp, ComparisonKind, Expr, ExprBinary, ExprPostfix, ExprUnary,
PostfixOp, UnaryOp, PostfixOp, UnaryOp,
}, },
parser::{eat, expect, Parser, ParserError, Result}, parser::{eat, expect, Error, Parser, Result},
pre::Punctuator as P, pre::Punctuator as P,
sym::Symbol, sym::Symbol,
token::{Constant, Token as Tok}, token::{Constant, Token as Tok},
@ -40,7 +40,7 @@ where
} }
&(Tok::Punct(punct), span) => { &(Tok::Punct(punct), span) => {
let r_bp = prefix_binding_power(&Tok::Punct(punct)).ok_or_else(|| { let r_bp = prefix_binding_power(&Tok::Punct(punct)).ok_or_else(|| {
ParserError::new(span, format!("expected expression, found {punct}")) Error::new(format!("expected expression, found {punct}"), span)
})?; })?;
let Some(op) = unary_op_from_token(&Tok::Punct(punct)) else { panic!() }; let Some(op) = unary_op_from_token(&Tok::Punct(punct)) else { panic!() };
let rhs = self.expr_bp(r_bp)?; let rhs = self.expr_bp(r_bp)?;
@ -56,9 +56,9 @@ where
)); ));
} }
(tok, span) => { (tok, span) => {
return Err(ParserError::new( return Err(Error::new(
*span,
format!("expected expression, found {tok}"), format!("expected expression, found {tok}"),
*span,
)); ));
} }
}; };

View file

@ -1,12 +1,12 @@
use super::Tok; use super::Tok;
use crate::{ast::ExternalDecl, parser::ParserError, Span, Spanned}; use crate::{ast::ExternalDecl, parser::Error, Span, Spanned};
fn lex_and_pre(src: &str) -> impl Iterator<Item = (Tok<'_>, Span)> + '_ { fn lex_and_pre(src: &str) -> impl Iterator<Item = (Tok<'_>, Span)> + '_ {
let pre_tokens = crate::pre::preprocess_tokens(src); let pre_tokens = crate::pre::preprocess_tokens(src);
crate::token::pre_tokens_to_tokens(pre_tokens) crate::token::pre_tokens_to_tokens(pre_tokens)
} }
fn pretty_print(ast: &Result<Vec<Spanned<ExternalDecl>>, ParserError>) -> String { fn pretty_print(ast: &Result<Vec<Spanned<ExternalDecl>>, Error>) -> String {
let mut vec = Vec::new(); let mut vec = Vec::new();
match ast { match ast {

View file

@ -1,4 +1,5 @@
use analysis::LoweringCx; use analysis::LoweringCx;
use parser::Error;
fn main() { fn main() {
let input_file = std::env::args().nth(1).expect("first argument"); let input_file = std::env::args().nth(1).expect("first argument");
@ -8,25 +9,52 @@ fn main() {
}); });
let ast = parser::parse_file(&src); let ast = parser::parse_file(&src);
dbg_pls::color!(&ast); // dbg_pls::color!(&ast);
let Ok(ast) = ast else { let ast = ast.unwrap_or_else(|err| report_fatal(&input_file, &src, err));
std::process::exit(1);
};
let mut printer = parser::pretty::PrettyPrinter::new(std::io::stdout().lock(), false); let mut printer = parser::pretty::PrettyPrinter::new(std::io::stdout().lock(), false);
println!("// START CODE -------------------"); println!("-------- AST pretty");
printer.translation_unit(&ast).unwrap(); printer.translation_unit(&ast).unwrap();
println!("// END CODE -------------------");
let arena = bumpalo::Bump::new(); let arena = bumpalo::Bump::new();
let mut lcx = LoweringCx::new(&arena); let mut lcx = LoweringCx::new(&arena);
let ir = analysis::lower_translation_unit(&mut lcx, &ast).unwrap_or_else(|err| { println!("-------- IR");
dbg!(err); let ir = analysis::lower_translation_unit(&mut lcx, &ast)
std::process::exit(1); .unwrap_or_else(|err| report_fatal(&input_file, &src, err));
});
codegen::generate(&lcx, &ir).unwrap_or_else(|err| { println!("-------- ASM");
dbg!(err); codegen::generate(&lcx, &ir).unwrap_or_else(|err| report_fatal(&input_file, &src, err));
std::process::exit(1); }
});
fn report_fatal(filename: &str, source: &str, error: Error) -> ! {
use ariadne::{Label, Report, ReportKind, Source};
let line = match error.span {
Some(span) => {
let mut line = 0;
source.char_indices().find(|(i, c)| {
if *c == '\n' {
line += 1;
}
// exit if we have found the start
*i >= span.start
});
line
}
None => 0,
};
let mut rep = Report::build(ReportKind::Error, filename, line).with_message(&error.msg);
if let Some(span) = error.span {
rep = rep
.with_label(Label::new((filename, span.start..span.end)))
.with_message(&error.msg);
}
rep.finish()
.eprint((filename, Source::from(source)))
.unwrap();
std::process::exit(1);
} }