tests that work!

This commit is contained in:
nora 2022-06-21 13:29:20 +02:00
parent a5d063b944
commit 7fc10f3b6c
5 changed files with 301 additions and 10 deletions

View file

@ -136,6 +136,7 @@ where
let mut ident = vec![c]; let mut ident = vec![c];
while let Some((span, c)) = self.src.peek() { while let Some((span, c)) = self.src.peek() {
println!("uwu {c}");
let (span, c) = (*span, *c); let (span, c) = (*span, *c);
if c.is_c_identifier() { if c.is_c_identifier() {
self.src.next(); self.src.next();
@ -257,14 +258,18 @@ where
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
use PToken::Punctuator as TokP; use PToken::Punctuator as TokP;
let (start_span, char1) = self.src.next()?; let mut start_span;
let char2 = self.src.peek().map(|(_, c)| *c);
let char3 = self.src.peek_nth(2).map(|(_, c)| *c);
let (token, end_span) = loop { let (token, end_span) = loop {
let (span, char1) = self.src.next()?;
start_span = span;
let char2 = self.src.peek().map(|(_, c)| *c);
let char3 = self.src.peek_nth(2).map(|(_, c)| *c);
match (char1, char2, char3) { match (char1, char2, char3) {
// IDENTIFIER // IDENTIFIER
(c, _, _) if c.is_c_identifier_nondigit() => { (c, _, _) if c.is_c_identifier_nondigit() => {
println!("AA");
break self.identifier(c, start_span); break self.identifier(c, start_span);
} }
// NUMBER // NUMBER
@ -354,20 +359,28 @@ pub fn preprocess_tokens(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
fn lex_test(str: &str) { macro_rules! lex_test {
let bytes = str.bytes().enumerate(); ($str:expr) => {
let tokens = super::preprocess_tokens(bytes); let bytes = $str.bytes().enumerate();
let tokens = tokens.collect::<Vec<_>>(); let tokens = super::preprocess_tokens(bytes);
insta::assert_debug_snapshot!(tokens); let tokens = tokens.collect::<Vec<_>>();
insta::assert_debug_snapshot!(tokens);
};
}
#[test]
fn identifiers() {
let src = r#"AAAA BBBB CCCC"#;
lex_test!(src);
} }
#[test] #[test]
fn hello_world() { fn hello_world() {
let src = r#"\ let src = r#"
int main() { int main() {
puts("Hello, World!"); puts("Hello, World!");
} }
"#; "#;
lex_test(src); lex_test!(src);
} }
} }

View file

@ -0,0 +1,100 @@
---
source: parser/src/pre/lexer.rs
expression: tokens
---
[
(
Identifier(
[
105,
110,
116,
],
),
1..4,
),
(
Identifier(
[
109,
97,
105,
110,
],
),
5..9,
),
(
Punctuator(
ParenOpen,
),
9..10,
),
(
Punctuator(
ParenClose,
),
10..11,
),
(
Punctuator(
BraceOpen,
),
12..13,
),
(
Identifier(
[
112,
117,
116,
115,
],
),
18..22,
),
(
Punctuator(
ParenOpen,
),
22..23,
),
(
StringLiteral(
[
72,
101,
108,
108,
111,
44,
32,
87,
111,
114,
108,
100,
33,
],
),
23..37,
),
(
Punctuator(
ParenClose,
),
38..39,
),
(
Punctuator(
Semicolon,
),
39..40,
),
(
Punctuator(
BraceClose,
),
41..42,
),
]

View file

@ -0,0 +1,39 @@
---
source: parser/src/pre/lexer.rs
expression: tokens
---
[
(
Identifier(
[
65,
65,
65,
65,
],
),
0..4,
),
(
Identifier(
[
66,
66,
66,
66,
],
),
5..9,
),
(
Identifier(
[
67,
67,
67,
67,
],
),
10..14,
),
]

View file

@ -0,0 +1,39 @@
---
source: parser/src/pre/lexer.rs
expression: tokens
---
[
(
Identifier(
[
65,
65,
65,
65,
],
),
0..4,
),
(
Identifier(
[
66,
66,
66,
66,
],
),
5..9,
),
(
Identifier(
[
67,
67,
67,
67,
],
),
10..14,
),
]

View file

@ -0,0 +1,100 @@
---
source: parser/src/pre/lexer.rs
expression: tokens
---
[
(
Identifier(
[
105,
110,
116,
],
),
1..4,
),
(
Identifier(
[
109,
97,
105,
110,
],
),
5..9,
),
(
Punctuator(
ParenOpen,
),
9..10,
),
(
Punctuator(
ParenClose,
),
10..11,
),
(
Punctuator(
BraceOpen,
),
12..13,
),
(
Identifier(
[
112,
117,
116,
115,
],
),
18..22,
),
(
Punctuator(
ParenOpen,
),
22..23,
),
(
StringLiteral(
[
72,
101,
108,
108,
111,
44,
32,
87,
111,
114,
108,
100,
33,
],
),
23..37,
),
(
Punctuator(
ParenClose,
),
38..39,
),
(
Punctuator(
Semicolon,
),
39..40,
),
(
Punctuator(
BraceClose,
),
41..42,
),
]