mirror of
https://github.com/Noratrieb/dilaria.git
synced 2026-01-14 17:35:03 +01:00
lex string literals, with bug
This commit is contained in:
parent
52d740af9e
commit
70a35e2a10
3 changed files with 47 additions and 3 deletions
48
src/lex.rs
48
src/lex.rs
|
|
@ -182,7 +182,7 @@ impl<'code> Iterator for Lexer<'code> {
|
|||
kind: TokenType::BangEqual,
|
||||
};
|
||||
} else {
|
||||
return Some(Err(LexError));
|
||||
return Some(Err(LexError("Expected '=' after '!'".to_string())));
|
||||
};
|
||||
}
|
||||
'>' => {
|
||||
|
|
@ -201,10 +201,30 @@ impl<'code> Iterator for Lexer<'code> {
|
|||
start,
|
||||
);
|
||||
}
|
||||
'"' => {
|
||||
let mut escaped = false;
|
||||
let end = loop {
|
||||
match self.code.next() {
|
||||
Some((end, '"')) if !escaped => break end,
|
||||
Some((_, '\\')) if !escaped => escaped = true,
|
||||
Some((_, _)) => escaped = false,
|
||||
None => {
|
||||
return Some(Err(LexError(
|
||||
"reached EOF expecting '\"'".to_string(),
|
||||
)))
|
||||
}
|
||||
}
|
||||
};
|
||||
break Token::new(
|
||||
Span::new(start, end - start),
|
||||
TokenType::String(&self.src[start + 1..end]),
|
||||
);
|
||||
}
|
||||
char => {
|
||||
if char.is_ascii_digit() {
|
||||
let mut had_dot = false;
|
||||
let end = loop {
|
||||
// peek here because the character signaling the end should not be consumed
|
||||
match self.code.peek() {
|
||||
Some((_, '.')) if !had_dot => {
|
||||
let _ = self.code.next();
|
||||
|
|
@ -218,7 +238,9 @@ impl<'code> Iterator for Lexer<'code> {
|
|||
}
|
||||
};
|
||||
let number_str = &self.src[start..end];
|
||||
let number = number_str.parse().map_err(|_| LexError);
|
||||
let number = number_str
|
||||
.parse::<f64>()
|
||||
.map_err(|err| LexError(err.to_string()));
|
||||
match number {
|
||||
Ok(number) => {
|
||||
break Token::new(
|
||||
|
|
@ -246,7 +268,7 @@ fn is_valid_ident_start(char: char) -> bool {
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct LexError;
|
||||
pub struct LexError(String);
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
|
|
@ -353,4 +375,24 @@ mod test {
|
|||
],
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn string() {
|
||||
lex_test(r#""uwu""#, vec![String("uwu")])
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strings() {
|
||||
lex_test(
|
||||
r#"( "hi" "uwu" "\"uwu\"" "no \\ u" )"#,
|
||||
vec![
|
||||
ParenO,
|
||||
String("hi"),
|
||||
String("uwu"),
|
||||
String("\"uwu\""),
|
||||
String("no \\ u"),
|
||||
ParenC,
|
||||
],
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
mod lex;
|
||||
mod parse;
|
||||
mod string;
|
||||
|
||||
pub fn run_program(program: &str) {
|
||||
let lexer = lex::Lexer::lex(program);
|
||||
|
|
|
|||
1
src/string.rs
Normal file
1
src/string.rs
Normal file
|
|
@ -0,0 +1 @@
|
|||
pub struct StringInterner;
|
||||
Loading…
Add table
Add a link
Reference in a new issue