reference counting

This commit is contained in:
nora 2023-08-02 23:19:10 +02:00
parent d9ab81bed1
commit 9ece18a48a
18 changed files with 477 additions and 159 deletions

View file

@ -1 +1,2 @@
/target
/target
/ui-tests/target

3
.prettierrc.json Normal file
View file

@ -0,0 +1,3 @@
{
"trailingComma": "all"
}

View file

@ -448,6 +448,7 @@ export const BUILTINS = [
"__memory_size",
"__memory_grow",
"__i32_extend_to_i64_u",
"___transmute",
] as const;
export type BuiltinName = (typeof BUILTINS)[number];
@ -565,7 +566,7 @@ const ITEM_DEFAULT = Symbol("item must not be overriden");
export function mkDefaultFolder<
From extends Phase,
To extends Phase
To extends Phase,
>(): ItemFolder<From, To> {
const folder: ItemFolder<From, To> = {
newItemsById: new ComplexMap(),
@ -585,7 +586,7 @@ export function mkDefaultFolder<
export function foldAst<From extends Phase, To extends Phase>(
ast: Crate<From>,
folder: Folder<From, To>
folder: Folder<From, To>,
): Crate<To> {
if ((folder.item as any)[ITEM_DEFAULT] !== ITEM_DEFAULT) {
throw new Error("must not override `item` on folders");
@ -603,7 +604,7 @@ export function foldAst<From extends Phase, To extends Phase>(
export function superFoldItem<From extends Phase, To extends Phase>(
item: Item<From>,
folder: Folder<From, To>
folder: Folder<From, To>,
): Item<To> {
switch (item.kind) {
case "function": {
@ -683,7 +684,7 @@ export function superFoldItem<From extends Phase, To extends Phase>(
export function superFoldExpr<From extends Phase, To extends Phase>(
expr: Expr<From>,
folder: Folder<From, To>
folder: Folder<From, To>,
): Expr<To> {
const span = expr.span;
switch (expr.kind) {
@ -800,7 +801,7 @@ export function superFoldExpr<From extends Phase, To extends Phase>(
export function superFoldType<From extends Phase, To extends Phase>(
type: Type<From>,
folder: Folder<From, To>
folder: Folder<From, To>,
): Type<To> {
const span = type.span;
switch (type.kind) {

View file

@ -44,3 +44,33 @@ it("should compute struct layout correctly", () => {
}
`);
});
it("should compute single field struct layout correctly", () => {
const ty: TyStruct = {
kind: "struct",
name: "",
fields: [["owo", TY_INT]],
};
const layout = layoutOfStruct(ty);
expect(layout).toMatchInlineSnapshot(`
{
"align": 8,
"fields": [
{
"ty": {
"kind": "int",
},
"types": [
{
"offset": 4,
"type": "i64",
},
],
},
],
"size": 8,
}
`);
});

View file

@ -37,6 +37,7 @@ const WASM_PAGE = 65536;
const DUMMY_IDX = 9999999;
const ALLOCATE_ITEM: string[] = ["std", "rt", "allocateItem"];
const DEALLOCATE_ITEM: string[] = ["std", "rt", "deallocateItem"];
type RelocationKind =
| {
@ -111,10 +112,10 @@ function appendData(cx: Context, newData: Uint8Array): number {
}
}
const KNOWN_DEF_PATHS = [ALLOCATE_ITEM];
const KNOWN_DEF_PATHS = [ALLOCATE_ITEM, DEALLOCATE_ITEM];
function getKnownDefPaths(
crates: Crate<Typecked>[]
crates: Crate<Typecked>[],
): ComplexMap<string[], ItemId> {
const knows = new ComplexMap<string[], ItemId>();
@ -141,7 +142,7 @@ function getKnownDefPaths(
};
crates.forEach((crate) =>
crate.rootItems.forEach((item) => folder.item(item))
crate.rootItems.forEach((item) => folder.item(item)),
);
return knows;
@ -231,7 +232,7 @@ export function lower(gcx: GlobalContext): wasm.Module {
const idx = cx.funcIndices.get(rel.res);
if (idx === undefined) {
throw new Error(
`no function found for relocation '${JSON.stringify(rel.res)}'`
`no function found for relocation '${JSON.stringify(rel.res)}'`,
);
}
rel.instr.func = idx.kind === "func" ? offset + idx.idx : idx.idx;
@ -241,7 +242,7 @@ export function lower(gcx: GlobalContext): wasm.Module {
const idx = cx.globalIndices.get(rel.res);
if (idx === undefined) {
throw new Error(
`no global found for relocation '${JSON.stringify(rel.res)}'`
`no global found for relocation '${JSON.stringify(rel.res)}'`,
);
}
rel.instr.imm = idx;
@ -260,10 +261,10 @@ export function lower(gcx: GlobalContext): wasm.Module {
function lowerImport(
cx: Context,
item: Item<Typecked>,
def: ImportDef<Typecked>
def: ImportDef<Typecked>,
) {
const existing = cx.mod.imports.findIndex(
(imp) => imp.module === def.module.value && imp.name === def.func.value
(imp) => imp.module === def.module.value && imp.name === def.func.value,
);
let idx;
@ -271,7 +272,7 @@ function lowerImport(
idx = existing;
} else {
const abi = computeAbi(def.ty!);
const { type: wasmType } = wasmTypeForAbi(abi);
const { type: wasmType } = wasmTypeForAbi(abi, def.ty!);
const type = internFuncType(cx, wasmType);
idx = cx.mod.imports.length;
@ -291,7 +292,7 @@ function lowerImport(
function lowerGlobal(
cx: Context,
item: Item<Typecked>,
def: GlobalItem<Typecked>
def: GlobalItem<Typecked>,
) {
const globalIdx = cx.mod.globals.length;
@ -334,13 +335,14 @@ type FuncContext = {
varLocations: VarLocation[];
loopDepths: Map<LoopId, number>;
currentBlockDepth: number;
scratchLocals: Map<wasm.ValType, wasm.LocalIdx[]>;
};
type FnAbi = { params: ArgRetAbi[]; ret: ArgRetAbi };
type ArgRetAbi = wasm.ValType[];
type VarLocation = { localIdx: number; types: wasm.ValType[] };
type VarLocation = { localIdx: number; types: wasm.ValType[]; ty: Ty };
type StructFieldLayout = {
types: { offset: number; type: wasm.ValType }[];
@ -356,10 +358,10 @@ type StructLayout = {
function lowerFunc(
cx: Context,
item: Item<Typecked>,
func: FunctionDef<Typecked>
func: FunctionDef<Typecked>,
) {
const abi = computeAbi(func.ty!);
const { type: wasmType, paramLocations } = wasmTypeForAbi(abi);
const { type: wasmType, paramLocations } = wasmTypeForAbi(abi, func.ty!);
const type = internFuncType(cx, wasmType);
const wasmFunc: wasm.Func = {
@ -378,16 +380,26 @@ function lowerFunc(
varLocations: paramLocations,
loopDepths: new Map(),
currentBlockDepth: 0,
scratchLocals: new Map(),
};
lowerExpr(fcx, wasmFunc.body, fcx.func.body);
paramLocations.forEach((local) => {
const refcount = needsRefcount(local.ty);
if (refcount !== undefined) {
// TODO: correctly deal with tuples
loadVariable(wasmFunc.body, local);
subRefcount(fcx, wasmFunc.body, refcount);
}
});
const idx = fcx.cx.mod.funcs.length;
fcx.cx.mod.funcs.push(wasmFunc);
fcx.cx.funcIndices.set(
{ kind: "item", id: fcx.item.id },
{ kind: "func", idx }
{ kind: "func", idx },
);
}
@ -399,7 +411,7 @@ Expression lowering.
function lowerExpr(
fcx: FuncContext,
instrs: wasm.Instr[],
expr: Expr<Typecked>
expr: Expr<Typecked>,
) {
const ty = expr.ty;
@ -420,7 +432,7 @@ function lowerExpr(
instrs.push({ kind: "local.set", imm: local + i });
});
fcx.varLocations.push({ localIdx: local, types });
fcx.varLocations.push({ localIdx: local, types, ty: expr.rhs.ty });
break;
}
@ -475,7 +487,7 @@ function lowerExpr(
} else {
const instr: wasm.Instr = {
kind: "block",
instrs: lowerExprBlockBody(fcx, expr),
instrs: lowerExprBlockBody(fcx, expr, prevVarLocationLengths),
type: blockTypeForBody(fcx.cx, expr.ty),
};
@ -518,6 +530,17 @@ function lowerExpr(
const location =
fcx.varLocations[fcx.varLocations.length - 1 - res.index];
loadVariable(instrs, location);
const refcount = needsRefcount(expr.ty);
if (refcount !== undefined) {
addRefcount(
fcx,
instrs,
refcount === "string" ? "string" : "struct",
);
}
break;
}
case "item": {
@ -694,7 +717,7 @@ function lowerExpr(
case "__i32_load": {
assertArgs(1);
lowerExpr(fcx, instrs, expr.args[0]);
instrs.push({ kind: "i64.load", imm: {} });
instrs.push({ kind: "i32.load", imm: {} });
break exprKind;
}
case "__i64_load": {
@ -752,6 +775,11 @@ function lowerExpr(
instrs.push({ kind: "i64.extend_i32_u" });
break exprKind;
}
case "___transmute": {
expr.args.map((arg) => lowerExpr(fcx, instrs, arg));
// don't do anything
break exprKind;
}
}
}
@ -791,7 +819,7 @@ function lowerExpr(
const resultSize = resultAbi.length;
const wasmIdx = wasmTypeIdxForTupleField(
expr.lhs.ty,
expr.field.fieldIdx!
expr.field.fieldIdx!,
);
// lhsSize=5, resultSize=2, wasmIdx=2
@ -808,13 +836,13 @@ function lowerExpr(
if (expr.field.fieldIdx! > 0) {
// Keep the result in scratch space.
storeVariable(instrs, { localIdx, types: resultAbi });
storeVariable(instrs, { localIdx, types: resultAbi, ty: expr.ty });
Array(wasmIdx)
.fill(0)
.forEach(() => instrs.push({ kind: "drop" }));
loadVariable(instrs, { localIdx, types: resultAbi });
loadVariable(instrs, { localIdx, types: resultAbi, ty: expr.ty });
}
break;
@ -824,9 +852,7 @@ function lowerExpr(
const layout = layoutOfStruct(ty);
const field = layout.fields[expr.field.fieldIdx!];
// TODO: SCRATCH LOCALS
const ptrLocal = fcx.wasmType.params.length + fcx.wasm.locals.length;
fcx.wasm.locals.push("i32");
const ptrLocal = getScratchLocals(fcx, "i32", 1)[0];
// We save the local for getting it later for all the field parts.
instrs.push({
@ -860,7 +886,7 @@ function lowerExpr(
break;
default: {
throw new Error(
`unsupported struct content type: ${fieldPart.type}`
`unsupported struct content type: ${fieldPart.type}`,
);
}
}
@ -958,13 +984,12 @@ function lowerExpr(
res: { kind: "item", id: allocateItemId },
});
instrs.push(allocate);
// TODO: scratch locals...
const ptrLocal = fcx.wasmType.params.length + fcx.wasm.locals.length;
fcx.wasm.locals.push("i32");
const ptrLocal = getScratchLocals(fcx, "i32", 1)[0];
instrs.push({ kind: "local.tee", imm: ptrLocal });
// Store the refcount
instrs.push({ kind: "i32.const", imm: 0n });
instrs.push({ kind: "i32.const", imm: 1n });
instrs.push({ kind: "i32.store", imm: { align: 4 } });
// Now, set all fields.
@ -996,7 +1021,7 @@ function lowerExpr(
break;
default: {
throw new Error(
`unsupported struct content type: ${fieldPart.type}`
`unsupported struct content type: ${fieldPart.type}`,
);
}
}
@ -1012,8 +1037,6 @@ function lowerExpr(
expr.fields.forEach((field) => lowerExpr(fcx, instrs, field));
break;
}
case "refcount":
todo("refcount");
default: {
const _: never = expr;
}
@ -1027,29 +1050,89 @@ function lowerExpr(
function lowerExprBlockBody(
fcx: FuncContext,
expr: ExprBlock<Typecked> & Expr<Typecked>
expr: ExprBlock<Typecked> & Expr<Typecked>,
prevVarLocationLength: number,
): wasm.Instr[] {
fcx.currentBlockDepth++;
const innerInstrs: wasm.Instr[] = [];
const instrs: wasm.Instr[] = [];
const headExprs = expr.exprs.slice(0, -1);
const tailExpr = expr.exprs[expr.exprs.length - 1];
for (const inner of headExprs) {
lowerExpr(fcx, innerInstrs, inner);
lowerExpr(fcx, instrs, inner);
if (inner.ty.kind === "never") {
// The rest of the block is unreachable, so we don't bother codegening it.
break;
}
const types = wasmTypeForBody(inner.ty);
types.forEach(() => innerInstrs.push({ kind: "drop" }));
const refcount = needsRefcount(inner.ty);
if (refcount !== undefined) {
subRefcount(fcx, instrs, refcount);
} else {
// TODO: correctly deal with tuples
types.forEach(() => instrs.push({ kind: "drop" }));
}
}
lowerExpr(fcx, innerInstrs, tailExpr);
lowerExpr(fcx, instrs, tailExpr);
const thisBlockLocals = fcx.varLocations.slice(prevVarLocationLength);
thisBlockLocals.forEach((local) => {
const refcount = needsRefcount(local.ty);
if (refcount !== undefined) {
// TODO: correctly deal with tuples
loadVariable(instrs, local);
subRefcount(fcx, instrs, refcount);
}
});
fcx.currentBlockDepth--;
return innerInstrs;
return instrs;
}
function getScratchLocals(
fcx: FuncContext,
type: wasm.ValType,
amount: number,
): wasm.LocalIdx[] {
function addLocals(fcx: FuncContext, type: wasm.ValType[]): wasm.LocalIdx[] {
const local = fcx.wasm.locals.length + fcx.wasmType.params.length;
fcx.wasm.locals.push(...type);
return type.map((_, i) => local + i);
}
const existing = fcx.scratchLocals.get(type);
if (!existing) {
const locals = addLocals(
fcx,
Array(amount)
.fill(0)
.map(() => type),
);
fcx.scratchLocals.set(type, locals);
return locals;
} else {
const toAdd = amount - existing.length;
if (toAdd > 0) {
const locals = addLocals(
fcx,
Array(toAdd)
.fill(0)
.map(() => type),
);
existing.push(...locals);
return existing;
}
return existing.slice(0, amount);
}
}
function loadVariable(instrs: wasm.Instr[], loc: VarLocation) {
@ -1105,17 +1188,21 @@ function computeAbi(ty: TyFn): FnAbi {
return { params, ret };
}
function wasmTypeForAbi(abi: FnAbi): {
function wasmTypeForAbi(
abi: FnAbi,
ty: TyFn,
): {
type: wasm.FuncType;
paramLocations: VarLocation[];
} {
const params: wasm.ValType[] = [];
const paramLocations: VarLocation[] = [];
abi.params.forEach((arg) => {
abi.params.forEach((arg, i) => {
paramLocations.push({
localIdx: params.length,
types: arg,
ty: ty.params[i],
});
params.push(...arg);
});
@ -1168,7 +1255,7 @@ export function layoutOfStruct(ty: TyStruct): StructLayout {
// TODO: Use the max alignment instead.
const align = fieldWasmTys.some((field) =>
field.some((type) => type === "i64")
field.some((type) => type === "i64"),
)
? 8
: 4;
@ -1185,7 +1272,8 @@ export function layoutOfStruct(ty: TyStruct): StructLayout {
const types = field.map((type) => {
const size = sizeOfValtype(type);
if (size === 8 && offset % 8 !== 0) {
// we don't want padding for the first field as the allocator takes care of that.
if (offset !== 4 && size === 8 && offset % 8 !== 0) {
// padding.
offset += 4;
}
@ -1203,6 +1291,9 @@ export function layoutOfStruct(ty: TyStruct): StructLayout {
return value;
});
// we ignore the refcount for struct size.
offset -= 4;
if (align === 8 && offset % 8 !== 0) {
offset += 4;
}
@ -1230,6 +1321,169 @@ function wasmTypeIdxForTupleField(ty: TyTuple, idx: number): number {
return head.reduce((a, b) => a + b.length, 0);
}
// Refcounts:
/*
* Injects `refcount` expressions into the code to make sure
* that no memory is leaked and no memory is freed too early.
*
* When do we need to adjust the refcount?
*
* When looking at reference counts, we need to distiguish between moves
* and copies of a struct. When a struct is moved, no reference count has
* to be changed. When it is copied, we need to increment the reference count.
*
* ```
* let a = S {};
* foo(a); // COPY
* ```
* ```
* let a = identity(S {}); // MOVE
* ```
*
* Due to the way the language is structured, this analysis is fairly simple:
* Most expressions are considered moves, but identifiers like `a` are considered
* copies. This is sound because the only way to refer to a value twice is to bind
* it to a variable. So whenever we load a variable of type struct, we need to bump
* the refcount.
*
* Then we just need to decrement all the locals (and params!) refcounts when they go
* out of scope.
*
* This leaves us with the following rules:
* - when loading an identifier, add an increment
* - when the end of a block is reached, decrement all locals
* - when the end of a function is reached, decrement all params
* - when an expression value is ignored, decrement
*/
function needsRefcount(ty: Ty): StructLayout | "string" | undefined {
switch (ty.kind) {
case "string":
// TODO: deal with strings
return undefined;
case "struct":
return layoutOfStruct(ty);
case "list":
todo("no lists yet");
case "var":
varUnreachable();
default:
return undefined;
}
}
function addRefcount(
fcx: FuncContext,
instrs: wasm.Instr[],
kind: "struct" | "string",
) {
const layout: wasm.ValType[] = kind === "string" ? ["i32", "i32"] : ["i32"];
const [ptr, len] = getScratchLocals(fcx, "i32", layout.length);
// stack: PTR, {LEN}
const innerInstrs: wasm.Instr[] = [];
if (kind === "string") {
innerInstrs.push({ kind: "local.set", imm: len }); // stack: PTR
}
innerInstrs.push({ kind: "local.tee", imm: ptr }); // stack: PTR
innerInstrs.push({ kind: "local.get", imm: ptr }); // stack: PTR, PTR
innerInstrs.push({ kind: "local.get", imm: ptr }); // stack: PTR, PTR, PTR
innerInstrs.push({ kind: "i32.load", imm: { align: 4 } }); // stack: PTR, PTR, cnt
innerInstrs.push({ kind: "i32.const", imm: 1n }); // stack: PTR, PTR, cnt, 1
innerInstrs.push({ kind: "i32.add" }); // stack: PTR, PTR, cnt
innerInstrs.push({ kind: "i32.store", imm: { align: 4 } }); // stack: PTR
if (kind === "string") {
innerInstrs.push({ kind: "local.get", imm: len }); // stack: PTR, {LEN}
}
// stack: PTR, {LEN}
instrs.push({
kind: "block",
instrs: innerInstrs,
type: {
kind: "typeidx",
idx: internFuncType(fcx.cx, { params: layout, returns: layout }),
},
});
}
function subRefcount(
fcx: FuncContext,
instrs: wasm.Instr[],
kind: StructLayout | "string",
) {
const deallocateItemId = fcx.cx.knownDefPaths.get(DEALLOCATE_ITEM);
if (!deallocateItemId) {
throw new Error("std.rt.deallocateItem not found");
}
const layout: wasm.ValType[] = kind === "string" ? ["i32", "i32"] : ["i32"];
const [ptr, len] = getScratchLocals(fcx, "i32", layout.length);
const count = ptr;
const innerInstrs: wasm.Instr[] = [];
// stack: PTR, {LEN}
if (kind === "string") {
innerInstrs.push({ kind: "local.set", imm: len }); // stack: PTR
}
innerInstrs.push({ kind: "local.tee", imm: ptr }); // stack: PTR
innerInstrs.push({ kind: "local.get", imm: ptr }); // stack: PTR, PTR
innerInstrs.push({ kind: "i32.load", imm: { align: 4 } }); // stack: PTR, cnt
innerInstrs.push({ kind: "i32.const", imm: 1n }); // stack: PTR, cnt, 1
innerInstrs.push({ kind: "i32.sub" }); // stack: PTR, cnt
innerInstrs.push({ kind: "local.tee", imm: count }); // stack: PTR, cnt
innerInstrs.push({
kind: "if",
then: [
// stack: PTR
{ kind: "local.get", imm: count }, // stack: PTR, cnt
{ kind: "i32.store", imm: { align: 4 } }, // stack:
],
else: (() => {
// stack: PTR
const instrs: wasm.Instr[] = [];
if (kind === "string") {
instrs.push({ kind: "local.get", imm: len }); // stack: PTR, len
} else {
instrs.push({ kind: "i32.const", imm: BigInt(kind.size) }); // stack: PTR, len
}
const deallocateCall: wasm.Instr = { kind: "call", func: DUMMY_IDX };
fcx.cx.relocations.push({
kind: "funccall",
instr: deallocateCall,
res: { kind: "item", id: deallocateItemId },
});
instrs.push(deallocateCall); // stack:
return instrs;
})(),
type: {
kind: "typeidx",
idx: internFuncType(fcx.cx, { params: ["i32"], returns: [] }),
},
});
instrs.push({
kind: "block",
instrs: innerInstrs,
type: {
kind: "typeidx",
idx: internFuncType(fcx.cx, { params: layout, returns: [] }),
},
});
// stack:
}
function todo(msg: string): never {
throw new Error(`TODO: ${msg}`);
}
@ -1304,7 +1558,7 @@ function addRt(cx: Context, crates: Crate<Typecked>[]) {
cx.funcIndices.set(
{ kind: "builtin", name: "print" },
{ kind: "func", idx: printIdx }
{ kind: "func", idx: printIdx },
);
mod.exports.push({

View file

@ -7,7 +7,7 @@ import path from "path";
export type CrateLoader = (
gcx: GlobalContext,
name: string,
span: Span
span: Span,
) => DepCrate;
/**
@ -27,12 +27,12 @@ export class GlobalContext {
public findItem<P extends Phase>(
id: ItemId,
localCrate?: Crate<P>
localCrate?: Crate<P>,
): Item<P | Final> {
const crate = unwrap(
[...(localCrate ? [localCrate] : []), ...this.finalizedCrates].find(
(crate) => crate.id === id.crateId
)
(crate) => crate.id === id.crateId,
),
);
if (id.itemIdx === 0) {
@ -73,7 +73,7 @@ export function parseArgs(hardcodedInput: string): Options {
console.error(process.argv);
console.error(
`error: filename must have \`.nil\` extension: \`${filename}\``
`error: filename must have \`.nil\` extension: \`${filename}\``,
);
process.exit(1);
}
@ -99,7 +99,6 @@ export function parseArgs(hardcodedInput: string): Options {
"parsed",
"resolved",
"typecked",
"wat",
"wasm-validate",
]);
}

View file

@ -5,7 +5,7 @@ it("should extract lines correctly", () => {
const lineSpans = lines({ content: input });
const lineContents = lineSpans.map(({ start, end }) =>
input.slice(start, end)
input.slice(start, end),
);
expect(lineContents).toStrictEqual(["AAA", "meow", ":3", "", ""]);

View file

@ -7,7 +7,7 @@ export class Span {
constructor(
public start: number,
public end: number,
public file: LoadedFile
public file: LoadedFile,
) {}
public merge(b: Span): Span {
@ -18,7 +18,7 @@ export class Span {
return new Span(
Math.min(this.start, b.start),
Math.max(this.end, b.end),
this.file
this.file,
);
}
@ -46,7 +46,7 @@ export class CompilerError extends Error {
export function withErrorPrinter<R>(
f: () => R,
afterError: (e: CompilerError) => R
afterError: (e: CompilerError) => R,
): R {
try {
return f();
@ -69,7 +69,7 @@ function renderError(e: CompilerError) {
span.start === Number.MAX_SAFE_INTEGER
? lineSpans[lineSpans.length - 1]
: lineSpans.find(
(line) => line.start <= span.start && line.end >= span.start
(line) => line.start <= span.start && line.end >= span.start,
);
if (!line) {
throw Error(`Span out of bounds: ${span.start}..${span.end}`);
@ -90,8 +90,8 @@ function renderError(e: CompilerError) {
console.error(
`${" ".repeat(String(lineNo).length)} ${" ".repeat(
startRelLine
)}${"^".repeat(spanLength)}`
startRelLine,
)}${"^".repeat(spanLength)}`,
);
}

View file

@ -1,6 +1,6 @@
import { LoadedFile, Span, withErrorPrinter } from "./error";
import { isValidIdent, tokenize } from "./lexer";
import { lower as lowerToWasm } from "./lower";
import { lower as lowerToWasm } from "./codegen";
import { ParseState, parse } from "./parser";
import { printAst } from "./printer";
import { resolve } from "./resolve";
@ -16,14 +16,19 @@ const INPUT = `
type A = { a: Int };
function main() = (
let a = A { a: 100 };
printA(a);
uwu();
);
function printA(a: A) = (
print("ABCDEFGH\\n");
print("ABCDEFGH\\n");
function uwu() = (
let a = A { a: 100 };
eat(a /*+1*/);
A { a: 100 };
/*-1*/
);
function eat(a: A) = ;
`;
function main() {
@ -32,7 +37,7 @@ function main() {
if (!isValidIdent(packageName)) {
console.error(
`error: package name \`${packageName}\` is not a valid identifer`
`error: package name \`${packageName}\` is not a valid identifer`,
);
process.exit(1);
}
@ -123,7 +128,7 @@ function main() {
});
}
},
() => process.exit(1)
() => process.exit(1),
);
}

View file

@ -206,7 +206,7 @@ export function tokenize(file: LoadedFile): Token[] {
default:
throw new CompilerError(
`invalid escape character: ${input[i]}`,
new Span(span.end - 1, span.end, file)
new Span(span.end - 1, span.end, file),
);
}
continue;
@ -231,7 +231,7 @@ export function tokenize(file: LoadedFile): Token[] {
const int = parseInt(digit, 10);
if (Number.isNaN(int)) {
throw new Error(
`\`${digit}\` was tokenized to a number even though it is not`
`\`${digit}\` was tokenized to a number even though it is not`,
);
}

View file

@ -11,7 +11,7 @@ import { typeck } from "./typeck";
export function loadModuleFile(
relativeTo: string,
moduleName: string,
span: Span
span: Span,
): LoadedFile {
let searchDir: string;
if (relativeTo.endsWith(".mod.nil")) {
@ -20,7 +20,7 @@ export function loadModuleFile(
} else if (relativeTo.endsWith(".nil")) {
throw new CompilerError(
`.nil files cannot have submodules. use .mod.nil in a subdirectory`,
span
span,
);
} else {
searchDir = relativeTo;
@ -43,7 +43,7 @@ export function loadModuleFile(
if (content === undefined || filePath === undefined) {
throw new CompilerError(
`failed to load ${moduleName}, could not find ${options.join(" or ")}`,
span
span,
);
}
@ -53,13 +53,13 @@ export function loadModuleFile(
export const loadCrate: CrateLoader = (
gcx: GlobalContext,
name: string,
span: Span
span: Span,
): DepCrate => {
// We really, really want a good algorithm for finding crates.
// But right now we just look for files in the CWD.
const existing = gcx.finalizedCrates.find(
(crate) => crate.packageName === name
(crate) => crate.packageName === name,
);
if (existing) {
return existing;
@ -75,7 +75,6 @@ export const loadCrate: CrateLoader = (
const parseState: ParseState = { tokens, file };
const ast = parse(name, parseState, crateId);
const resolved = resolve(gcx, ast);
console.log(resolved);
const typecked = typeck(gcx, resolved);
@ -85,8 +84,8 @@ export const loadCrate: CrateLoader = (
() => {
throw new CompilerError(
`failed to load crate ${name}: crate contains errors`,
span
span,
);
}
},
);
};

View file

@ -51,7 +51,7 @@ type Parser<T> = (t: State) => [State, T];
export function parse(
packageName: string,
t: State,
crateId: number
crateId: number,
): Crate<Built> {
const [, items] = parseItems(t);
@ -195,7 +195,7 @@ function parseItem(t: State): [State, Item<Parsed>] {
if (name.span.file.path === undefined) {
throw new CompilerError(
`no known source file for statement, cannot load file relative to it`,
name.span
name.span,
);
}
const file = loadModuleFile(name.span.file.path, name.ident, name.span);
@ -304,7 +304,7 @@ function mkBinaryExpr(
lhs: Expr<Parsed>,
rhs: Expr<Parsed>,
span: Span,
kind: string
kind: string,
): Expr<Parsed> {
return { kind: "binary", binaryKind: kind as BinaryKind, lhs, rhs, span };
}
@ -312,7 +312,7 @@ function mkBinaryExpr(
function mkParserExprBinary(
lower: Parser<Expr<Parsed>>,
kinds: string[],
mkExpr = mkBinaryExpr
mkExpr = mkBinaryExpr,
): Parser<Expr<Parsed>> {
function parser(t: State): [State, Expr<Parsed>] {
let lhs;
@ -337,25 +337,25 @@ function mkParserExprBinary(
const parseExprArithFactor = mkParserExprBinary(
parseExprUnary,
ARITH_FACTOR_KINDS
ARITH_FACTOR_KINDS,
);
const parseExprArithTerm = mkParserExprBinary(
parseExprArithFactor,
ARITH_TERM_KINDS
ARITH_TERM_KINDS,
);
const parseExprLogical = mkParserExprBinary(parseExprArithTerm, LOGICAL_KINDS);
const parseExprComparison = mkParserExprBinary(
parseExprLogical,
COMPARISON_KINDS
COMPARISON_KINDS,
);
const parseExprAssignment = mkParserExprBinary(
parseExprComparison,
["="],
(lhs, rhs, span) => ({ kind: "assign", lhs, rhs, span })
(lhs, rhs, span) => ({ kind: "assign", lhs, rhs, span }),
);
function parseExprUnary(t: State): [State, Expr<Parsed>] {
@ -566,7 +566,7 @@ function parseExprAtom(startT: State): [State, Expr<Parsed>] {
}
function parseStructInit(
t: State
t: State,
): [State, ExprStructLiteral<Parsed>["fields"]] {
[t] = expectNext(t, "{");
@ -582,7 +582,7 @@ function parseStructInit(
[t, expr] = parseExpr(t);
return [t, { name: { name: name.ident, span: name.span }, expr }];
}
},
);
return [t, fields];
@ -640,7 +640,7 @@ function parseType(t: State): [State, Type<Parsed>] {
default: {
throw new CompilerError(
`unexpected token: \`${tok.kind}\`, expected type`,
span
span,
);
}
}
@ -651,7 +651,7 @@ function parseType(t: State): [State, Type<Parsed>] {
function parseCommaSeparatedList<R>(
t: State,
terminator: Token["kind"],
parser: Parser<R>
parser: Parser<R>,
): [State, R[]] {
const items: R[] = [];
@ -682,7 +682,7 @@ function parseCommaSeparatedList<R>(
function eat<T extends BaseToken>(
t: State,
kind: T["kind"]
kind: T["kind"],
): [State, T | undefined] {
if (peekKind(t) === kind) {
return expectNext(t, kind);
@ -696,20 +696,20 @@ function peekKind(t: State): Token["kind"] | undefined {
function expectNext<T extends BaseToken>(
t: State,
kind: T["kind"]
kind: T["kind"],
): [State, T & Token] {
let tok;
[t, tok] = maybeNextT(t);
if (!tok) {
throw new CompilerError(
`expected \`${kind}\`, found end of file`,
Span.eof(t.file)
Span.eof(t.file),
);
}
if (tok.kind !== kind) {
throw new CompilerError(
`expected \`${kind}\`, found \`${tok.kind}\``,
tok.span
tok.span,
);
}
return [t, tok as unknown as T & Token];
@ -742,7 +742,7 @@ function validateAst(ast: Crate<Built>) {
itemInner(item: Item<Built>): Item<Built> {
if (seenItemIds.has(item.id)) {
throw new Error(
`duplicate item id: ${item.id.toString()} for ${item.node.name}`
`duplicate item id: ${item.id.toString()} for ${item.node.name}`,
);
}
seenItemIds.add(item.id);
@ -772,7 +772,7 @@ function validateAst(ast: Crate<Built>) {
if (ourClass !== innerClass) {
throw new CompilerError(
`mixing operators without parentheses is not allowed. ${side} is ${inner.binaryKind}, which is different from ${expr.binaryKind}`,
expr.span
expr.span,
);
}
}
@ -801,7 +801,7 @@ function buildCrate(
packageName: string,
rootItems: Item<Parsed>[],
crateId: number,
rootFile: LoadedFile
rootFile: LoadedFile,
): Crate<Built> {
const itemId = new Ids();
itemId.next(); // crate root ID

View file

@ -47,7 +47,7 @@ function printItem(item: Item<AnyPhase>): string {
id +
`global ${item.node.name}: ${printType(item.node.type)} = ${printExpr(
item.node.init,
0
0,
)};`
);
}
@ -64,7 +64,7 @@ function printFunction(func: FunctionDef<AnyPhase>): string {
function printTypeDef(type: TypeDef<AnyPhase>): string {
const fields = type.fields.map(
({ name, type }) => `${ind(1)}${name.name}: ${printType(type)},`
({ name, type }) => `${ind(1)}${name.name}: ${printType(type)},`,
);
const fieldPart =
@ -80,7 +80,7 @@ function printImportDef(def: ImportDef<AnyPhase>): string {
const ret = def.returnType ? `: ${printType(def.returnType)}` : "";
return `import ${printStringLiteral(def.module)} ${printStringLiteral(
def.func
def.func,
)}(${args})${ret};`;
}
@ -98,7 +98,7 @@ function printExpr(expr: Expr<AnyPhase>, indent: number): string {
return `let ${expr.name.name}${type} = ${printExpr(
expr.rhs,
indent + 1
indent + 1,
)}`;
}
case "assign": {
@ -146,7 +146,7 @@ function printExpr(expr: Expr<AnyPhase>, indent: number): string {
case "binary": {
return `${printExpr(expr.lhs, indent)} ${expr.binaryKind} ${printExpr(
expr.rhs,
indent
indent,
)}`;
}
case "unary": {
@ -175,7 +175,7 @@ function printExpr(expr: Expr<AnyPhase>, indent: number): string {
: "";
return `if ${printExpr(expr.cond, indent + 1)} then ${printExpr(
expr.then,
indent + 1
indent + 1,
)}${elsePart}`;
}
case "loop": {

View file

@ -35,7 +35,7 @@ function loadCrate(cx: Context, name: string, span: Span): Map<string, ItemId> {
const loadedCrate = cx.gcx.crateLoader(cx.gcx, name, span);
const contents = new Map(
loadedCrate.rootItems.map((item) => [item.node.name, item.id])
loadedCrate.rootItems.map((item) => [item.node.name, item.id]),
);
return contents;
@ -45,7 +45,7 @@ function resolveModItem(
cx: Context,
mod: ModItem<Built> | ExternItem,
item: Item<Built>,
name: string
name: string,
): ItemId | undefined {
const cachedContents = cx.modContentsCache.get(item.id);
if (cachedContents) {
@ -66,7 +66,7 @@ function resolveModItem(
export function resolve(
gcx: GlobalContext,
ast: Crate<Built>
ast: Crate<Built>,
): Crate<Resolved> {
const cx: Context = {
ast,
@ -88,7 +88,7 @@ export function resolve(
function resolveModule(
cx: Context,
modName: string[],
contents: Item<Built>[]
contents: Item<Built>[],
): Item<Resolved>[] {
const items = new Map<string, ItemId>();
@ -97,7 +97,7 @@ function resolveModule(
if (existing !== undefined) {
throw new CompilerError(
`item \`${item.node.name}\` has already been declared`,
item.span
item.span,
);
}
items.set(item.node.name, item.id);
@ -109,7 +109,7 @@ function resolveModule(
const popped = scopes.pop();
if (popped !== expected) {
throw new Error(
`Scopes corrupted, wanted to pop ${expected} but popped ${popped}`
`Scopes corrupted, wanted to pop ${expected} but popped ${popped}`,
);
}
};
@ -224,7 +224,7 @@ function resolveModule(
blockLocals.push([]);
const exprs = expr.exprs.map<Expr<Resolved>>((inner) =>
this.expr(inner)
this.expr(inner),
);
scopes.length = prevScopeLength;
@ -271,7 +271,7 @@ function resolveModule(
if (typeof expr.field.value === "number") {
throw new CompilerError(
"module contents cannot be indexed with a number",
expr.field.span
expr.field.span,
);
}
@ -279,12 +279,12 @@ function resolveModule(
cx,
module.node,
module,
expr.field.value
expr.field.value,
);
if (pathResItem === undefined) {
throw new CompilerError(
`module ${module.node.name} has no item ${expr.field.value}`,
expr.field.span
expr.field.span,
);
}

View file

@ -2,7 +2,7 @@ import { TY_INT, TY_STRING, TY_UNIT } from "./ast";
import { Span } from "./error";
import { InferContext } from "./typeck";
const SPAN: Span = Span.startOfFile({content: ""});
const SPAN: Span = Span.startOfFile({ content: "" });
it("should infer types across assignments", () => {
const infcx = new InferContext();

View file

@ -29,6 +29,7 @@ import {
TyStruct,
Item,
StructLiteralField,
superFoldExpr,
} from "./ast";
import { GlobalContext } from "./context";
import { CompilerError, Span } from "./error";
@ -96,7 +97,7 @@ function typeOfBuiltinValue(name: BuiltinName, span: Span): Ty {
function lowerAstTyBase(
type: Type<Resolved>,
lowerIdentTy: (ident: IdentWithRes<Resolved>) => Ty,
typeOfItem: (itemId: ItemId, cause: Span) => Ty
typeOfItem: (itemId: ItemId, cause: Span) => Ty,
): Ty {
switch (type.kind) {
case "ident": {
@ -112,7 +113,7 @@ function lowerAstTyBase(
return {
kind: "tuple",
elems: type.elems.map((type) =>
lowerAstTyBase(type, lowerIdentTy, typeOfItem)
lowerAstTyBase(type, lowerIdentTy, typeOfItem),
),
};
}
@ -124,7 +125,7 @@ function lowerAstTyBase(
export function typeck(
gcx: GlobalContext,
ast: Crate<Resolved>
ast: Crate<Resolved>,
): Crate<Typecked> {
const itemTys = new ComplexMap<ItemId, Ty | null>();
@ -141,13 +142,13 @@ export function typeck(
case "mod": {
throw new CompilerError(
`module ${item.node.name} cannot be used as a type or value`,
cause
cause,
);
}
case "extern": {
throw new CompilerError(
`extern declaration ${item.node.name} cannot be used as a type or value`,
cause
cause,
);
}
}
@ -161,7 +162,7 @@ export function typeck(
if (ty === null) {
throw new CompilerError(
`cycle computing type of #G${itemId.toString()}`,
item.span
item.span,
);
}
itemTys.set(itemId, null);
@ -199,13 +200,13 @@ export function typeck(
case "mod": {
throw new CompilerError(
`module ${item.node.name} cannot be used as a type or value`,
cause
cause,
);
}
case "extern": {
throw new CompilerError(
`extern declaration ${item.node.name} cannot be used as a type or value`,
cause
cause,
);
}
case "global": {
@ -233,7 +234,7 @@ export function typeck(
}
}
},
typeOfItem
typeOfItem,
);
}
@ -274,7 +275,7 @@ export function typeck(
default: {
throw new CompilerError(
`import parameters must be I32 or Int`,
item.node.params[i].span
item.node.params[i].span,
);
}
}
@ -288,7 +289,7 @@ export function typeck(
default: {
throw new CompilerError(
`import return must be I32 or Int`,
item.node.returnType!.span
item.node.returnType!.span,
);
}
}
@ -321,7 +322,7 @@ export function typeck(
if (fieldNames.has(name)) {
throw new CompilerError(
`type ${item.node.name} has a duplicate field: ${name.name}`,
name.span
name.span,
);
}
fieldNames.add(name);
@ -366,7 +367,7 @@ export function typeck(
if (init.kind !== "literal" || init.value.kind !== "int") {
throw new CompilerError(
"globals must be initialized with an integer literal",
init.span
init.span,
);
}
@ -406,7 +407,7 @@ export function typeck(
if (ty.kind !== "tuple" || ty.elems.length !== 0) {
throw new CompilerError(
`\`main\` has an invalid signature. main takes no arguments and returns nothing`,
item.span
item.span,
);
}
}
@ -421,7 +422,7 @@ export function typeck(
if (!main) {
throw new CompilerError(
`\`main\` function not found`,
Span.startOfFile(ast.rootFile)
Span.startOfFile(ast.rootFile),
);
}
@ -586,7 +587,7 @@ export class InferContext {
throw new CompilerError(
`cannot assign ${printTy(rhs)} to ${printTy(lhs)}`,
span
span,
);
}
}
@ -596,7 +597,7 @@ export function checkBody(
ast: Crate<Resolved>,
body: Expr<Resolved>,
fnTy: TyFn,
typeOfItem: (itemId: ItemId, cause: Span) => Ty
typeOfItem: (itemId: ItemId, cause: Span) => Ty,
): Expr<Typecked> {
const localTys = [...fnTy.params];
const loopState: { hasBreak: boolean; loopId: LoopId }[] = [];
@ -634,13 +635,13 @@ export function checkBody(
return builtinAsTy(res.name, ident.span);
}
},
typeOfItem
typeOfItem,
);
}
const checker: Folder<Resolved, Typecked> = {
...mkDefaultFolder(),
expr(expr) {
expr(expr): Expr<Typecked> {
switch (expr.kind) {
case "empty": {
return { ...expr, ty: TY_UNIT };
@ -695,7 +696,7 @@ export function checkBody(
case "builtin":
throw new CompilerError(
"cannot assign to builtins",
expr.span
expr.span,
);
}
break;
@ -703,7 +704,7 @@ export function checkBody(
default: {
throw new CompilerError(
"invalid left-hand side of assignment",
lhs.span
lhs.span,
);
}
}
@ -777,13 +778,30 @@ export function checkBody(
return checkUnary(expr, rhs);
}
case "call": {
if (
expr.lhs.kind === "ident" &&
expr.lhs.value.res.kind === "builtin" &&
expr.lhs.value.res.name === "___transmute"
) {
const ty = infcx.newVar();
const args = expr.args.map((arg) => this.expr(arg));
const ret: Expr<Typecked> = {
...expr,
lhs: { ...expr.lhs, ty: TY_UNIT },
args,
ty,
};
return ret;
}
const lhs = this.expr(expr.lhs);
lhs.ty = infcx.resolveIfPossible(lhs.ty);
const lhsTy = lhs.ty;
if (lhsTy.kind !== "fn") {
throw new CompilerError(
`expression of type ${printTy(lhsTy)} is not callable`,
lhs.span
lhs.span,
);
}
@ -793,7 +811,7 @@ export function checkBody(
if (args.length <= i) {
throw new CompilerError(
`missing argument of type ${printTy(param)}`,
expr.span
expr.span,
);
}
const arg = checker.expr(args[i]);
@ -804,7 +822,7 @@ export function checkBody(
if (args.length > lhsTy.params.length) {
throw new CompilerError(
`too many arguments passed, expected ${lhsTy.params.length}, found ${args.length}`,
expr.span
expr.span,
);
}
@ -827,13 +845,13 @@ export function checkBody(
} else {
throw new CompilerError(
`tuple with ${elems.length} elements cannot be indexed with ${field.value}`,
field.span
field.span,
);
}
} else {
throw new CompilerError(
"tuple fields must be accessed with numbers",
field.span
field.span,
);
}
break;
@ -841,14 +859,14 @@ export function checkBody(
case "struct": {
if (typeof field.value === "string") {
const idx = lhs.ty.fields.findIndex(
([name]) => name === field.value
([name]) => name === field.value,
);
if (idx === -1) {
throw new CompilerError(
`field \`${field.value}\` does not exist on ${printTy(
lhs.ty
lhs.ty,
)}`,
field.span
field.span,
);
}
@ -857,7 +875,7 @@ export function checkBody(
} else {
throw new CompilerError(
"struct fields must be accessed with their name",
field.span
field.span,
);
}
break;
@ -865,9 +883,9 @@ export function checkBody(
default: {
throw new CompilerError(
`cannot access field \`${field.value}\` on type \`${printTy(
lhs.ty
lhs.ty,
)}\``,
expr.span
expr.span,
);
}
}
@ -933,7 +951,7 @@ export function checkBody(
}
case "structLiteral": {
const fields = expr.fields.map<StructLiteralField<Typecked>>(
({ name, expr }) => ({ name, expr: this.expr(expr) })
({ name, expr }) => ({ name, expr: this.expr(expr) }),
);
const structTy = typeOf(expr.name.res, expr.name.span);
@ -941,7 +959,7 @@ export function checkBody(
if (structTy.kind !== "struct") {
throw new CompilerError(
`struct literal is only allowed for struct types`,
expr.span
expr.span,
);
}
@ -949,12 +967,12 @@ export function checkBody(
fields.forEach(({ name, expr: field }, i) => {
const fieldIdx = structTy.fields.findIndex(
(def) => def[0] === name.name
(def) => def[0] === name.name,
);
if (fieldIdx == -1) {
throw new CompilerError(
`field ${name.name} doesn't exist on type ${expr.name.name}`,
name.span
name.span,
);
}
const fieldTy = structTy.fields[fieldIdx];
@ -972,7 +990,7 @@ export function checkBody(
if (missing.length > 0) {
throw new CompilerError(
`missing fields in literal: ${missing.join(", ")}`,
expr.span
expr.span,
);
}
@ -1025,7 +1043,9 @@ export function checkBody(
});
}
return { ...expr, ty };
const innerExpr = superFoldExpr(expr, this);
return { ...innerExpr, ty };
},
type(type) {
return type;
@ -1043,7 +1063,7 @@ export function checkBody(
function checkBinary(
expr: Expr<Resolved> & ExprBinary<Resolved>,
lhs: Expr<Typecked>,
rhs: Expr<Typecked>
rhs: Expr<Typecked>,
): Expr<Typecked> {
const lhsTy = lhs.ty;
const rhsTy = rhs.ty;
@ -1085,13 +1105,13 @@ function checkBinary(
`invalid types for binary operation: ${printTy(lhs.ty)} ${
expr.binaryKind
} ${printTy(rhs.ty)}`,
expr.span
expr.span,
);
}
function checkUnary(
expr: Expr<Resolved> & ExprUnary<Resolved>,
rhs: Expr<Typecked>
rhs: Expr<Typecked>,
): Expr<Typecked> {
const rhsTy = rhs.ty;
@ -1108,6 +1128,6 @@ function checkUnary(
throw new CompilerError(
`invalid types for unary operation: ${expr.unaryKind} ${printTy(rhs.ty)}`,
expr.span
expr.span,
);
}

View file

@ -56,7 +56,7 @@ class FmtCtx {
this.indentation--;
if (this.indentation < 0) {
throw new Error(
"Cannot dedent from 0 indents, there are more dedents than indents"
"Cannot dedent from 0 indents, there are more dedents than indents",
);
}
this.linebreak();
@ -92,7 +92,7 @@ class FmtCtx {
word(
word: string | number | bigint,
color: (s: string) => string = identity
color: (s: string) => string = identity,
) {
const last = this.wordsInSexpr.length - 1;
if (this.wordsInSexpr[last] > 0 && !this.freshLinebreak) {

View file

@ -34,5 +34,11 @@ function allocateItem(objSize: I32, align: I32): I32 = (
);
);
HEAD_PTR = newHeadPtr;
actualObjPtr
);
function deallocateItem(ptr: I32, objSize: I32) = (
std.println("uwu deawwocate :3");
);