mirror of
https://github.com/Noratrieb/dilaria.git
synced 2026-01-14 17:35:03 +01:00
improve code (some clippy::pedantic lints)
This commit is contained in:
parent
a42cec3075
commit
5699ab190a
7 changed files with 33 additions and 36 deletions
15
src/ast.rs
15
src/ast.rs
|
|
@ -76,8 +76,7 @@ pub enum ElsePart<'ast> {
|
|||
impl ElsePart<'_> {
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
ElsePart::Else(_, span) => *span,
|
||||
ElsePart::ElseIf(_, span) => *span,
|
||||
ElsePart::Else(_, span) | ElsePart::ElseIf(_, span) => *span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -123,12 +122,12 @@ pub enum Literal<'ast> {
|
|||
impl Literal<'_> {
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
Literal::String(_, span) => *span,
|
||||
Literal::Number(_, span) => *span,
|
||||
Literal::Array(_, span) => *span,
|
||||
Literal::Object(span) => *span,
|
||||
Literal::Boolean(_, span) => *span,
|
||||
Literal::Null(span) => *span,
|
||||
Literal::String(_, span)
|
||||
| Literal::Number(_, span)
|
||||
| Literal::Array(_, span)
|
||||
| Literal::Object(span)
|
||||
| Literal::Boolean(_, span)
|
||||
| Literal::Null(span) => *span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -28,8 +28,7 @@ impl Env<'_> {
|
|||
env.locals.get(&name.sym).copied().or_else(|| {
|
||||
env.outer
|
||||
.as_ref()
|
||||
.map(|outer| lookup_inner(&outer.borrow(), name))
|
||||
.flatten()
|
||||
.and_then(|outer| lookup_inner(&outer.borrow(), name))
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -68,7 +67,7 @@ pub fn compile<'ast, 'bc, 'gc>(
|
|||
blocks: Vec::new_in(bytecode_bump),
|
||||
current_block: 0,
|
||||
bump: bytecode_bump,
|
||||
env: Rc::new(RefCell::new(Default::default())),
|
||||
env: Rc::new(RefCell::new(Env::default())),
|
||||
rt,
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -156,7 +156,7 @@ impl Drop for RtAlloc {
|
|||
for str in &self.symbols {
|
||||
let raw = str.0.as_ptr();
|
||||
// SAFETY: No one has free these, see `Gc<T>`
|
||||
let _ = unsafe { Box::from_raw(raw) };
|
||||
drop(unsafe { Box::from_raw(raw) });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -171,7 +171,7 @@ impl Symbol {
|
|||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.gc.deref()
|
||||
&*self.gc
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
33
src/lex.rs
33
src/lex.rs
|
|
@ -124,8 +124,7 @@ impl<'code, 'gc> Lexer<'code, 'gc> {
|
|||
fn expect(&mut self, expected: char) -> bool {
|
||||
self.code
|
||||
.peek()
|
||||
.map(|(_, char)| *char == expected)
|
||||
.unwrap_or(false)
|
||||
.map_or(false, |(_, char)| *char == expected)
|
||||
}
|
||||
|
||||
fn maybe_next_char(
|
||||
|
|
@ -372,17 +371,17 @@ mod test {
|
|||
|
||||
#[test]
|
||||
fn smiley_face() {
|
||||
lex_test(">>.<<")
|
||||
lex_test(">>.<<");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn greater_than_less_than_equal() {
|
||||
lex_test(">= <= == < < >=")
|
||||
lex_test(">= <= == < < >=");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_no_no() {
|
||||
lex_test("!= != = !=")
|
||||
lex_test("!= != = !=");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -401,7 +400,7 @@ mod test {
|
|||
|
||||
#[test]
|
||||
fn fancy_stuff() {
|
||||
lex_test(". ,- * -, .")
|
||||
lex_test(". ,- * -, .");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -422,7 +421,7 @@ pls :) o(* ̄▽ ̄*)ブ
|
|||
|
||||
i like the indentation here ngl | sneak for -> ## for ## <- sneak for
|
||||
## and",
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -437,42 +436,42 @@ pls :) o(* ̄▽ ̄*)ブ
|
|||
# # and
|
||||
## or
|
||||
",
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn greeting() {
|
||||
lex_test("-.- /%")
|
||||
lex_test("-.- /%");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn countdown() {
|
||||
lex_test("3 . . 2 . . 1 . . 0")
|
||||
lex_test("3 . . 2 . . 1 . . 0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn larger_numbers() {
|
||||
lex_test("123456789, 123456789.1234, 64785903")
|
||||
lex_test("123456789, 123456789.1234, 64785903");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn string() {
|
||||
lex_test(r#""uwu""#)
|
||||
lex_test(r#""uwu""#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strings() {
|
||||
lex_test(r#"( "hi" "uwu" "\"uwu\"" "no \\ u" )"#)
|
||||
lex_test(r#"( "hi" "uwu" "\"uwu\"" "no \\ u" )"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keywords() {
|
||||
lex_test("let fn if else loop while break for true false null and not or print")
|
||||
lex_test("let fn if else loop while break for true false null and not or print");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keyword_and_ident() {
|
||||
lex_test("let variable be a loop if false is true")
|
||||
lex_test("let variable be a loop if false is true");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -501,7 +500,7 @@ pls :) o(* ̄▽ ̄*)ブ
|
|||
.map(|word| format!("{} ", word))
|
||||
.collect::<StdString>();
|
||||
|
||||
lex_test(&sentences)
|
||||
lex_test(&sentences);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -515,6 +514,6 @@ pls :) o(* ̄▽ ̄*)ブ
|
|||
println("Hello \\ World!")
|
||||
}
|
||||
}"#,
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -46,19 +46,19 @@ pub fn run_program(program: &str, cfg: &mut Config) {
|
|||
let ast = parse::parse(lexer, &ast_alloc);
|
||||
|
||||
match ast {
|
||||
Ok(ast) => process_ast(program, ast, runtime, cfg),
|
||||
Ok(ast) => process_ast(program, &ast, runtime, cfg),
|
||||
Err(err) => errors::display_error(program, err),
|
||||
}
|
||||
}
|
||||
|
||||
fn process_ast(program: &str, ast: Program, mut runtime: RtAlloc, cfg: &mut Config<'_>) {
|
||||
fn process_ast(program: &str, ast: &Program, mut runtime: RtAlloc, cfg: &mut Config<'_>) {
|
||||
if cfg.debug {
|
||||
println!("AST:\n{:?}\n", ast);
|
||||
}
|
||||
|
||||
let bytecode_alloc = Bump::new();
|
||||
|
||||
let bytecode = compile::compile(&ast, &bytecode_alloc, &mut runtime);
|
||||
let bytecode = compile::compile(ast, &bytecode_alloc, &mut runtime);
|
||||
|
||||
match bytecode {
|
||||
Ok(code) => {
|
||||
|
|
@ -107,5 +107,5 @@ pub fn _fuzz_lex(program: &str) {
|
|||
// SAFETY: Just this scope
|
||||
let mut runtime = unsafe { RtAlloc::new() };
|
||||
let lexer = lex::Lexer::new(program, &mut runtime);
|
||||
for _ in lexer {}
|
||||
for _token in lexer {}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -29,6 +29,6 @@ fn main() {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
eprintln!("Usage: <filename>")
|
||||
eprintln!("Usage: <filename>");
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -217,7 +217,7 @@ where
|
|||
Ok(IfStmt {
|
||||
span: keyword_span
|
||||
.extend(body.span)
|
||||
.option_extend(else_part.as_ref().map(|part| part.span())),
|
||||
.option_extend(else_part.as_ref().map(ElsePart::span)),
|
||||
cond,
|
||||
body,
|
||||
else_part: else_part.map(|part| &*self.bump.alloc(part)),
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue