mirror of
https://github.com/Noratrieb/riverdelta.git
synced 2026-01-14 16:35:03 +01:00
Add assignments
This commit is contained in:
parent
6d2a2fe474
commit
0bf9aed35e
6 changed files with 113 additions and 32 deletions
19
src/ast.ts
19
src/ast.ts
|
|
@ -72,9 +72,18 @@ export type ExprLet = {
|
||||||
type?: Type;
|
type?: Type;
|
||||||
rhs: Expr;
|
rhs: Expr;
|
||||||
// IMPORTANT: This is (sadly) shared with ExprBlock.
|
// IMPORTANT: This is (sadly) shared with ExprBlock.
|
||||||
|
// TODO: Stop this sharing and just store the stack of blocks in typeck.
|
||||||
local?: LocalInfo;
|
local?: LocalInfo;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// A bit like ExprBinary except there are restrictions
|
||||||
|
// on the LHS and precedence is unrestricted.
|
||||||
|
export type ExprAssign = {
|
||||||
|
kind: "assign";
|
||||||
|
lhs: Expr;
|
||||||
|
rhs: Expr;
|
||||||
|
};
|
||||||
|
|
||||||
export type ExprBlock = {
|
export type ExprBlock = {
|
||||||
kind: "block";
|
kind: "block";
|
||||||
exprs: Expr[];
|
exprs: Expr[];
|
||||||
|
|
@ -128,6 +137,7 @@ export type ExprBreak = {
|
||||||
/**
|
/**
|
||||||
* The break target block.
|
* The break target block.
|
||||||
* May be any control flow block, labelled from inside out.
|
* May be any control flow block, labelled from inside out.
|
||||||
|
* TODO: This is not a good solution at all and pretty broken.
|
||||||
*/
|
*/
|
||||||
target?: number;
|
target?: number;
|
||||||
};
|
};
|
||||||
|
|
@ -141,6 +151,7 @@ export type ExprStructLiteral = {
|
||||||
export type ExprKind =
|
export type ExprKind =
|
||||||
| ExprEmpty
|
| ExprEmpty
|
||||||
| ExprLet
|
| ExprLet
|
||||||
|
| ExprAssign
|
||||||
| ExprBlock
|
| ExprBlock
|
||||||
| ExprLiteral
|
| ExprLiteral
|
||||||
| ExprIdent
|
| ExprIdent
|
||||||
|
|
@ -482,6 +493,14 @@ export function superFoldExpr(expr: Expr, folder: Folder): Expr {
|
||||||
rhs: folder.expr(expr.rhs),
|
rhs: folder.expr(expr.rhs),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
case "assign": {
|
||||||
|
return {
|
||||||
|
...expr,
|
||||||
|
kind: "assign",
|
||||||
|
lhs: folder.expr(expr.lhs),
|
||||||
|
rhs: folder.expr(expr.rhs),
|
||||||
|
};
|
||||||
|
}
|
||||||
case "block": {
|
case "block": {
|
||||||
return {
|
return {
|
||||||
...expr,
|
...expr,
|
||||||
|
|
|
||||||
32
src/index.ts
32
src/index.ts
|
|
@ -10,32 +10,14 @@ import fs from "fs";
|
||||||
import { exec } from "child_process";
|
import { exec } from "child_process";
|
||||||
|
|
||||||
const input = `
|
const input = `
|
||||||
import("wasi_snapshot_preview1" "fd_write")
|
|
||||||
fd_write(a: I32, b: I32, c: I32, d: I32): I32;
|
|
||||||
|
|
||||||
function coolerPrint(a: String) = (
|
|
||||||
let ptr = __string_ptr(a);
|
|
||||||
let len = __string_len(a);
|
|
||||||
|
|
||||||
let mem = 1024_I32;
|
|
||||||
|
|
||||||
__i32_store(mem + 4_I32, ptr);
|
|
||||||
__i32_store(mem + 8_I32, 2_I32);
|
|
||||||
|
|
||||||
fd_write(
|
|
||||||
// stdout
|
|
||||||
1_I32,
|
|
||||||
// iovec
|
|
||||||
mem + 4_I32,
|
|
||||||
// len
|
|
||||||
len,
|
|
||||||
// return value
|
|
||||||
mem,
|
|
||||||
);
|
|
||||||
);
|
|
||||||
|
|
||||||
function main() = (
|
function main() = (
|
||||||
coolerPrint("uwu\\n");
|
let i = 0;
|
||||||
|
loop (
|
||||||
|
if i > 10 then break;
|
||||||
|
|
||||||
|
print("uwu\\n");
|
||||||
|
i = i + 1;
|
||||||
|
);
|
||||||
);
|
);
|
||||||
`;
|
`;
|
||||||
|
|
||||||
|
|
|
||||||
39
src/lower.ts
39
src/lower.ts
|
|
@ -272,6 +272,36 @@ function lowerExpr(fcx: FuncContext, instrs: wasm.Instr[], expr: Expr) {
|
||||||
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
case "assign": {
|
||||||
|
lowerExpr(fcx, instrs, expr.rhs);
|
||||||
|
const { lhs } = expr;
|
||||||
|
switch (lhs.kind) {
|
||||||
|
case "ident": {
|
||||||
|
const res = lhs.value.res!;
|
||||||
|
|
||||||
|
switch (res.kind) {
|
||||||
|
case "local": {
|
||||||
|
const location =
|
||||||
|
fcx.varLocations[fcx.varLocations.length - 1 - res.index];
|
||||||
|
storeVariable(instrs, location);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case "item": {
|
||||||
|
throw new Error("cannot store to item");
|
||||||
|
}
|
||||||
|
case "builtin": {
|
||||||
|
throw new Error("cannot store to builtin");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
default: {
|
||||||
|
throw new Error("invalid lhs side of assignment");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
case "block": {
|
case "block": {
|
||||||
const prevVarLocationLengths = fcx.varLocations.length;
|
const prevVarLocationLengths = fcx.varLocations.length;
|
||||||
|
|
||||||
|
|
@ -610,6 +640,15 @@ function loadVariable(instrs: wasm.Instr[], loc: VarLocation) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function storeVariable(instrs: wasm.Instr[], loc: VarLocation) {
|
||||||
|
// Stores are just like loads, just the other way around.
|
||||||
|
const types = loc.types.map((_, i) => i);
|
||||||
|
types.reverse();
|
||||||
|
types.forEach((i) => {
|
||||||
|
instrs.push({ kind: "local.set", imm: loc.localIdx + i });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
function computeAbi(ty: TyFn): FnAbi {
|
function computeAbi(ty: TyFn): FnAbi {
|
||||||
function argRetAbi(param: Ty): ArgRetAbi {
|
function argRetAbi(param: Ty): ArgRetAbi {
|
||||||
switch (param.kind) {
|
switch (param.kind) {
|
||||||
|
|
|
||||||
|
|
@ -174,13 +174,15 @@ function parseFunctionSig(t: Token[]): [Token[], FunctionSig] {
|
||||||
|
|
||||||
function parseExpr(t: Token[]): [Token[], Expr] {
|
function parseExpr(t: Token[]): [Token[], Expr] {
|
||||||
/*
|
/*
|
||||||
EXPR = COMPARISON
|
EXPR = ASSIGNMENT
|
||||||
|
|
||||||
LET = "let" NAME { ":" TYPE } "=" EXPR "in" EXPR
|
LET = "let" NAME { ":" TYPE } "=" EXPR "in" EXPR
|
||||||
IF = "if" EXPR "then" EXPR { "else" EXPR }
|
IF = "if" EXPR "then" EXPR { "else" EXPR }
|
||||||
LOOP = "loop" EXPR
|
LOOP = "loop" EXPR
|
||||||
BREAK = "break"
|
BREAK = "break"
|
||||||
|
|
||||||
|
ASSIGNMENT = COMPARISON { "=" ASSIGNMENT }
|
||||||
|
|
||||||
// The precende here is pretty arbitrary since we forbid mixing of operators
|
// The precende here is pretty arbitrary since we forbid mixing of operators
|
||||||
// with different precedence classes anyways.
|
// with different precedence classes anyways.
|
||||||
COMPARISON = LOGICAL { ( ">" | "<" | "==" | "<=" | ">=" | "!=" ) COMPARISON }
|
COMPARISON = LOGICAL { ( ">" | "<" | "==" | "<=" | ">=" | "!=" ) COMPARISON }
|
||||||
|
|
@ -199,12 +201,17 @@ function parseExpr(t: Token[]): [Token[], Expr] {
|
||||||
STRUCT_INIT = "{" { NAME ":" EXPR } { "," NAME ":" EXPR } { "," } "}"
|
STRUCT_INIT = "{" { NAME ":" EXPR } { "," NAME ":" EXPR } { "," } "}"
|
||||||
EXPR_LIST = { EXPR { "," EXPR } { "," } }
|
EXPR_LIST = { EXPR { "," EXPR } { "," } }
|
||||||
*/
|
*/
|
||||||
return parseExprComparison(t);
|
return parseExprAssignment(t);
|
||||||
|
}
|
||||||
|
|
||||||
|
function mkBinaryExpr(lhs: Expr, rhs: Expr, span: Span, kind: string): Expr {
|
||||||
|
return { kind: "binary", binaryKind: kind as BinaryKind, lhs, rhs, span };
|
||||||
}
|
}
|
||||||
|
|
||||||
function mkParserExprBinary(
|
function mkParserExprBinary(
|
||||||
lower: Parser<Expr>,
|
lower: Parser<Expr>,
|
||||||
kinds: string[]
|
kinds: string[],
|
||||||
|
mkExpr = mkBinaryExpr
|
||||||
): Parser<Expr> {
|
): Parser<Expr> {
|
||||||
function parser(t: Token[]): [Token[], Expr] {
|
function parser(t: Token[]): [Token[], Expr] {
|
||||||
let lhs;
|
let lhs;
|
||||||
|
|
@ -216,10 +223,7 @@ function mkParserExprBinary(
|
||||||
let rhs;
|
let rhs;
|
||||||
[t, rhs] = parser(t);
|
[t, rhs] = parser(t);
|
||||||
const span = spanMerge(lhs.span, rhs.span);
|
const span = spanMerge(lhs.span, rhs.span);
|
||||||
return [
|
return [t, mkExpr(lhs, rhs, span, peek.kind)];
|
||||||
t,
|
|
||||||
{ kind: "binary", binaryKind: peek.kind as BinaryKind, lhs, rhs, span },
|
|
||||||
];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return [t, lhs];
|
return [t, lhs];
|
||||||
|
|
@ -245,6 +249,12 @@ const parseExprComparison = mkParserExprBinary(
|
||||||
COMPARISON_KINDS
|
COMPARISON_KINDS
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const parseExprAssignment = mkParserExprBinary(
|
||||||
|
parseExprComparison,
|
||||||
|
["="],
|
||||||
|
(lhs, rhs, span) => ({ kind: "assign", lhs, rhs, span })
|
||||||
|
);
|
||||||
|
|
||||||
function parseExprUnary(t: Token[]): [Token[], Expr] {
|
function parseExprUnary(t: Token[]): [Token[], Expr] {
|
||||||
const [, peak] = next(t);
|
const [, peak] = next(t);
|
||||||
if (peak.kind in UNARY_KINDS) {
|
if (peak.kind in UNARY_KINDS) {
|
||||||
|
|
|
||||||
|
|
@ -78,6 +78,9 @@ function printExpr(expr: Expr, indent: number): string {
|
||||||
indent + 1
|
indent + 1
|
||||||
)}`;
|
)}`;
|
||||||
}
|
}
|
||||||
|
case "assign": {
|
||||||
|
return `${printExpr(expr.lhs, indent)} = ${printExpr(expr.rhs, indent)}`;
|
||||||
|
}
|
||||||
case "block": {
|
case "block": {
|
||||||
const exprs = expr.exprs.map((expr) => printExpr(expr, indent + 1));
|
const exprs = expr.exprs.map((expr) => printExpr(expr, indent + 1));
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -557,6 +557,34 @@ export function checkBody(
|
||||||
span: expr.span,
|
span: expr.span,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
case "assign": {
|
||||||
|
const lhs = this.expr(expr.lhs);
|
||||||
|
const rhs = this.expr(expr.rhs);
|
||||||
|
|
||||||
|
infcx.assign(lhs.ty!, rhs.ty!, expr.span);
|
||||||
|
|
||||||
|
switch (lhs.kind) {
|
||||||
|
case "ident":
|
||||||
|
if (lhs.value.res!.kind !== "local") {
|
||||||
|
throw new CompilerError("cannot assign to items", expr.span);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default: {
|
||||||
|
throw new CompilerError(
|
||||||
|
"invalid left-hand side of assignment",
|
||||||
|
lhs.span
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
...expr,
|
||||||
|
kind: "assign",
|
||||||
|
lhs,
|
||||||
|
rhs,
|
||||||
|
ty: TY_UNIT,
|
||||||
|
};
|
||||||
|
}
|
||||||
case "block": {
|
case "block": {
|
||||||
currentNestingDepth++;
|
currentNestingDepth++;
|
||||||
const prevLocalTysLen = localTys.length;
|
const prevLocalTysLen = localTys.length;
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue