mirror of
https://github.com/Noratrieb/riverdelta.git
synced 2026-01-14 16:35:03 +01:00
reference counting
This commit is contained in:
parent
d9ab81bed1
commit
9ece18a48a
18 changed files with 477 additions and 159 deletions
|
|
@ -1 +1,2 @@
|
||||||
/target
|
/target
|
||||||
|
/ui-tests/target
|
||||||
3
.prettierrc.json
Normal file
3
.prettierrc.json
Normal file
|
|
@ -0,0 +1,3 @@
|
||||||
|
{
|
||||||
|
"trailingComma": "all"
|
||||||
|
}
|
||||||
11
src/ast.ts
11
src/ast.ts
|
|
@ -448,6 +448,7 @@ export const BUILTINS = [
|
||||||
"__memory_size",
|
"__memory_size",
|
||||||
"__memory_grow",
|
"__memory_grow",
|
||||||
"__i32_extend_to_i64_u",
|
"__i32_extend_to_i64_u",
|
||||||
|
"___transmute",
|
||||||
] as const;
|
] as const;
|
||||||
|
|
||||||
export type BuiltinName = (typeof BUILTINS)[number];
|
export type BuiltinName = (typeof BUILTINS)[number];
|
||||||
|
|
@ -565,7 +566,7 @@ const ITEM_DEFAULT = Symbol("item must not be overriden");
|
||||||
|
|
||||||
export function mkDefaultFolder<
|
export function mkDefaultFolder<
|
||||||
From extends Phase,
|
From extends Phase,
|
||||||
To extends Phase
|
To extends Phase,
|
||||||
>(): ItemFolder<From, To> {
|
>(): ItemFolder<From, To> {
|
||||||
const folder: ItemFolder<From, To> = {
|
const folder: ItemFolder<From, To> = {
|
||||||
newItemsById: new ComplexMap(),
|
newItemsById: new ComplexMap(),
|
||||||
|
|
@ -585,7 +586,7 @@ export function mkDefaultFolder<
|
||||||
|
|
||||||
export function foldAst<From extends Phase, To extends Phase>(
|
export function foldAst<From extends Phase, To extends Phase>(
|
||||||
ast: Crate<From>,
|
ast: Crate<From>,
|
||||||
folder: Folder<From, To>
|
folder: Folder<From, To>,
|
||||||
): Crate<To> {
|
): Crate<To> {
|
||||||
if ((folder.item as any)[ITEM_DEFAULT] !== ITEM_DEFAULT) {
|
if ((folder.item as any)[ITEM_DEFAULT] !== ITEM_DEFAULT) {
|
||||||
throw new Error("must not override `item` on folders");
|
throw new Error("must not override `item` on folders");
|
||||||
|
|
@ -603,7 +604,7 @@ export function foldAst<From extends Phase, To extends Phase>(
|
||||||
|
|
||||||
export function superFoldItem<From extends Phase, To extends Phase>(
|
export function superFoldItem<From extends Phase, To extends Phase>(
|
||||||
item: Item<From>,
|
item: Item<From>,
|
||||||
folder: Folder<From, To>
|
folder: Folder<From, To>,
|
||||||
): Item<To> {
|
): Item<To> {
|
||||||
switch (item.kind) {
|
switch (item.kind) {
|
||||||
case "function": {
|
case "function": {
|
||||||
|
|
@ -683,7 +684,7 @@ export function superFoldItem<From extends Phase, To extends Phase>(
|
||||||
|
|
||||||
export function superFoldExpr<From extends Phase, To extends Phase>(
|
export function superFoldExpr<From extends Phase, To extends Phase>(
|
||||||
expr: Expr<From>,
|
expr: Expr<From>,
|
||||||
folder: Folder<From, To>
|
folder: Folder<From, To>,
|
||||||
): Expr<To> {
|
): Expr<To> {
|
||||||
const span = expr.span;
|
const span = expr.span;
|
||||||
switch (expr.kind) {
|
switch (expr.kind) {
|
||||||
|
|
@ -800,7 +801,7 @@ export function superFoldExpr<From extends Phase, To extends Phase>(
|
||||||
|
|
||||||
export function superFoldType<From extends Phase, To extends Phase>(
|
export function superFoldType<From extends Phase, To extends Phase>(
|
||||||
type: Type<From>,
|
type: Type<From>,
|
||||||
folder: Folder<From, To>
|
folder: Folder<From, To>,
|
||||||
): Type<To> {
|
): Type<To> {
|
||||||
const span = type.span;
|
const span = type.span;
|
||||||
switch (type.kind) {
|
switch (type.kind) {
|
||||||
|
|
|
||||||
|
|
@ -44,3 +44,33 @@ it("should compute struct layout correctly", () => {
|
||||||
}
|
}
|
||||||
`);
|
`);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should compute single field struct layout correctly", () => {
|
||||||
|
const ty: TyStruct = {
|
||||||
|
kind: "struct",
|
||||||
|
name: "",
|
||||||
|
fields: [["owo", TY_INT]],
|
||||||
|
};
|
||||||
|
|
||||||
|
const layout = layoutOfStruct(ty);
|
||||||
|
|
||||||
|
expect(layout).toMatchInlineSnapshot(`
|
||||||
|
{
|
||||||
|
"align": 8,
|
||||||
|
"fields": [
|
||||||
|
{
|
||||||
|
"ty": {
|
||||||
|
"kind": "int",
|
||||||
|
},
|
||||||
|
"types": [
|
||||||
|
{
|
||||||
|
"offset": 4,
|
||||||
|
"type": "i64",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"size": 8,
|
||||||
|
}
|
||||||
|
`);
|
||||||
|
});
|
||||||
|
|
|
||||||
338
src/codegen.ts
338
src/codegen.ts
|
|
@ -37,6 +37,7 @@ const WASM_PAGE = 65536;
|
||||||
const DUMMY_IDX = 9999999;
|
const DUMMY_IDX = 9999999;
|
||||||
|
|
||||||
const ALLOCATE_ITEM: string[] = ["std", "rt", "allocateItem"];
|
const ALLOCATE_ITEM: string[] = ["std", "rt", "allocateItem"];
|
||||||
|
const DEALLOCATE_ITEM: string[] = ["std", "rt", "deallocateItem"];
|
||||||
|
|
||||||
type RelocationKind =
|
type RelocationKind =
|
||||||
| {
|
| {
|
||||||
|
|
@ -111,10 +112,10 @@ function appendData(cx: Context, newData: Uint8Array): number {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const KNOWN_DEF_PATHS = [ALLOCATE_ITEM];
|
const KNOWN_DEF_PATHS = [ALLOCATE_ITEM, DEALLOCATE_ITEM];
|
||||||
|
|
||||||
function getKnownDefPaths(
|
function getKnownDefPaths(
|
||||||
crates: Crate<Typecked>[]
|
crates: Crate<Typecked>[],
|
||||||
): ComplexMap<string[], ItemId> {
|
): ComplexMap<string[], ItemId> {
|
||||||
const knows = new ComplexMap<string[], ItemId>();
|
const knows = new ComplexMap<string[], ItemId>();
|
||||||
|
|
||||||
|
|
@ -141,7 +142,7 @@ function getKnownDefPaths(
|
||||||
};
|
};
|
||||||
|
|
||||||
crates.forEach((crate) =>
|
crates.forEach((crate) =>
|
||||||
crate.rootItems.forEach((item) => folder.item(item))
|
crate.rootItems.forEach((item) => folder.item(item)),
|
||||||
);
|
);
|
||||||
|
|
||||||
return knows;
|
return knows;
|
||||||
|
|
@ -231,7 +232,7 @@ export function lower(gcx: GlobalContext): wasm.Module {
|
||||||
const idx = cx.funcIndices.get(rel.res);
|
const idx = cx.funcIndices.get(rel.res);
|
||||||
if (idx === undefined) {
|
if (idx === undefined) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`no function found for relocation '${JSON.stringify(rel.res)}'`
|
`no function found for relocation '${JSON.stringify(rel.res)}'`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
rel.instr.func = idx.kind === "func" ? offset + idx.idx : idx.idx;
|
rel.instr.func = idx.kind === "func" ? offset + idx.idx : idx.idx;
|
||||||
|
|
@ -241,7 +242,7 @@ export function lower(gcx: GlobalContext): wasm.Module {
|
||||||
const idx = cx.globalIndices.get(rel.res);
|
const idx = cx.globalIndices.get(rel.res);
|
||||||
if (idx === undefined) {
|
if (idx === undefined) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`no global found for relocation '${JSON.stringify(rel.res)}'`
|
`no global found for relocation '${JSON.stringify(rel.res)}'`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
rel.instr.imm = idx;
|
rel.instr.imm = idx;
|
||||||
|
|
@ -260,10 +261,10 @@ export function lower(gcx: GlobalContext): wasm.Module {
|
||||||
function lowerImport(
|
function lowerImport(
|
||||||
cx: Context,
|
cx: Context,
|
||||||
item: Item<Typecked>,
|
item: Item<Typecked>,
|
||||||
def: ImportDef<Typecked>
|
def: ImportDef<Typecked>,
|
||||||
) {
|
) {
|
||||||
const existing = cx.mod.imports.findIndex(
|
const existing = cx.mod.imports.findIndex(
|
||||||
(imp) => imp.module === def.module.value && imp.name === def.func.value
|
(imp) => imp.module === def.module.value && imp.name === def.func.value,
|
||||||
);
|
);
|
||||||
|
|
||||||
let idx;
|
let idx;
|
||||||
|
|
@ -271,7 +272,7 @@ function lowerImport(
|
||||||
idx = existing;
|
idx = existing;
|
||||||
} else {
|
} else {
|
||||||
const abi = computeAbi(def.ty!);
|
const abi = computeAbi(def.ty!);
|
||||||
const { type: wasmType } = wasmTypeForAbi(abi);
|
const { type: wasmType } = wasmTypeForAbi(abi, def.ty!);
|
||||||
const type = internFuncType(cx, wasmType);
|
const type = internFuncType(cx, wasmType);
|
||||||
|
|
||||||
idx = cx.mod.imports.length;
|
idx = cx.mod.imports.length;
|
||||||
|
|
@ -291,7 +292,7 @@ function lowerImport(
|
||||||
function lowerGlobal(
|
function lowerGlobal(
|
||||||
cx: Context,
|
cx: Context,
|
||||||
item: Item<Typecked>,
|
item: Item<Typecked>,
|
||||||
def: GlobalItem<Typecked>
|
def: GlobalItem<Typecked>,
|
||||||
) {
|
) {
|
||||||
const globalIdx = cx.mod.globals.length;
|
const globalIdx = cx.mod.globals.length;
|
||||||
|
|
||||||
|
|
@ -334,13 +335,14 @@ type FuncContext = {
|
||||||
varLocations: VarLocation[];
|
varLocations: VarLocation[];
|
||||||
loopDepths: Map<LoopId, number>;
|
loopDepths: Map<LoopId, number>;
|
||||||
currentBlockDepth: number;
|
currentBlockDepth: number;
|
||||||
|
scratchLocals: Map<wasm.ValType, wasm.LocalIdx[]>;
|
||||||
};
|
};
|
||||||
|
|
||||||
type FnAbi = { params: ArgRetAbi[]; ret: ArgRetAbi };
|
type FnAbi = { params: ArgRetAbi[]; ret: ArgRetAbi };
|
||||||
|
|
||||||
type ArgRetAbi = wasm.ValType[];
|
type ArgRetAbi = wasm.ValType[];
|
||||||
|
|
||||||
type VarLocation = { localIdx: number; types: wasm.ValType[] };
|
type VarLocation = { localIdx: number; types: wasm.ValType[]; ty: Ty };
|
||||||
|
|
||||||
type StructFieldLayout = {
|
type StructFieldLayout = {
|
||||||
types: { offset: number; type: wasm.ValType }[];
|
types: { offset: number; type: wasm.ValType }[];
|
||||||
|
|
@ -356,10 +358,10 @@ type StructLayout = {
|
||||||
function lowerFunc(
|
function lowerFunc(
|
||||||
cx: Context,
|
cx: Context,
|
||||||
item: Item<Typecked>,
|
item: Item<Typecked>,
|
||||||
func: FunctionDef<Typecked>
|
func: FunctionDef<Typecked>,
|
||||||
) {
|
) {
|
||||||
const abi = computeAbi(func.ty!);
|
const abi = computeAbi(func.ty!);
|
||||||
const { type: wasmType, paramLocations } = wasmTypeForAbi(abi);
|
const { type: wasmType, paramLocations } = wasmTypeForAbi(abi, func.ty!);
|
||||||
const type = internFuncType(cx, wasmType);
|
const type = internFuncType(cx, wasmType);
|
||||||
|
|
||||||
const wasmFunc: wasm.Func = {
|
const wasmFunc: wasm.Func = {
|
||||||
|
|
@ -378,16 +380,26 @@ function lowerFunc(
|
||||||
varLocations: paramLocations,
|
varLocations: paramLocations,
|
||||||
loopDepths: new Map(),
|
loopDepths: new Map(),
|
||||||
currentBlockDepth: 0,
|
currentBlockDepth: 0,
|
||||||
|
scratchLocals: new Map(),
|
||||||
};
|
};
|
||||||
|
|
||||||
lowerExpr(fcx, wasmFunc.body, fcx.func.body);
|
lowerExpr(fcx, wasmFunc.body, fcx.func.body);
|
||||||
|
|
||||||
|
paramLocations.forEach((local) => {
|
||||||
|
const refcount = needsRefcount(local.ty);
|
||||||
|
if (refcount !== undefined) {
|
||||||
|
// TODO: correctly deal with tuples
|
||||||
|
loadVariable(wasmFunc.body, local);
|
||||||
|
subRefcount(fcx, wasmFunc.body, refcount);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
const idx = fcx.cx.mod.funcs.length;
|
const idx = fcx.cx.mod.funcs.length;
|
||||||
fcx.cx.mod.funcs.push(wasmFunc);
|
fcx.cx.mod.funcs.push(wasmFunc);
|
||||||
|
|
||||||
fcx.cx.funcIndices.set(
|
fcx.cx.funcIndices.set(
|
||||||
{ kind: "item", id: fcx.item.id },
|
{ kind: "item", id: fcx.item.id },
|
||||||
{ kind: "func", idx }
|
{ kind: "func", idx },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -399,7 +411,7 @@ Expression lowering.
|
||||||
function lowerExpr(
|
function lowerExpr(
|
||||||
fcx: FuncContext,
|
fcx: FuncContext,
|
||||||
instrs: wasm.Instr[],
|
instrs: wasm.Instr[],
|
||||||
expr: Expr<Typecked>
|
expr: Expr<Typecked>,
|
||||||
) {
|
) {
|
||||||
const ty = expr.ty;
|
const ty = expr.ty;
|
||||||
|
|
||||||
|
|
@ -420,7 +432,7 @@ function lowerExpr(
|
||||||
instrs.push({ kind: "local.set", imm: local + i });
|
instrs.push({ kind: "local.set", imm: local + i });
|
||||||
});
|
});
|
||||||
|
|
||||||
fcx.varLocations.push({ localIdx: local, types });
|
fcx.varLocations.push({ localIdx: local, types, ty: expr.rhs.ty });
|
||||||
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
@ -475,7 +487,7 @@ function lowerExpr(
|
||||||
} else {
|
} else {
|
||||||
const instr: wasm.Instr = {
|
const instr: wasm.Instr = {
|
||||||
kind: "block",
|
kind: "block",
|
||||||
instrs: lowerExprBlockBody(fcx, expr),
|
instrs: lowerExprBlockBody(fcx, expr, prevVarLocationLengths),
|
||||||
type: blockTypeForBody(fcx.cx, expr.ty),
|
type: blockTypeForBody(fcx.cx, expr.ty),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -518,6 +530,17 @@ function lowerExpr(
|
||||||
const location =
|
const location =
|
||||||
fcx.varLocations[fcx.varLocations.length - 1 - res.index];
|
fcx.varLocations[fcx.varLocations.length - 1 - res.index];
|
||||||
loadVariable(instrs, location);
|
loadVariable(instrs, location);
|
||||||
|
|
||||||
|
const refcount = needsRefcount(expr.ty);
|
||||||
|
|
||||||
|
if (refcount !== undefined) {
|
||||||
|
addRefcount(
|
||||||
|
fcx,
|
||||||
|
instrs,
|
||||||
|
refcount === "string" ? "string" : "struct",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case "item": {
|
case "item": {
|
||||||
|
|
@ -694,7 +717,7 @@ function lowerExpr(
|
||||||
case "__i32_load": {
|
case "__i32_load": {
|
||||||
assertArgs(1);
|
assertArgs(1);
|
||||||
lowerExpr(fcx, instrs, expr.args[0]);
|
lowerExpr(fcx, instrs, expr.args[0]);
|
||||||
instrs.push({ kind: "i64.load", imm: {} });
|
instrs.push({ kind: "i32.load", imm: {} });
|
||||||
break exprKind;
|
break exprKind;
|
||||||
}
|
}
|
||||||
case "__i64_load": {
|
case "__i64_load": {
|
||||||
|
|
@ -752,6 +775,11 @@ function lowerExpr(
|
||||||
instrs.push({ kind: "i64.extend_i32_u" });
|
instrs.push({ kind: "i64.extend_i32_u" });
|
||||||
break exprKind;
|
break exprKind;
|
||||||
}
|
}
|
||||||
|
case "___transmute": {
|
||||||
|
expr.args.map((arg) => lowerExpr(fcx, instrs, arg));
|
||||||
|
// don't do anything
|
||||||
|
break exprKind;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -791,7 +819,7 @@ function lowerExpr(
|
||||||
const resultSize = resultAbi.length;
|
const resultSize = resultAbi.length;
|
||||||
const wasmIdx = wasmTypeIdxForTupleField(
|
const wasmIdx = wasmTypeIdxForTupleField(
|
||||||
expr.lhs.ty,
|
expr.lhs.ty,
|
||||||
expr.field.fieldIdx!
|
expr.field.fieldIdx!,
|
||||||
);
|
);
|
||||||
|
|
||||||
// lhsSize=5, resultSize=2, wasmIdx=2
|
// lhsSize=5, resultSize=2, wasmIdx=2
|
||||||
|
|
@ -808,13 +836,13 @@ function lowerExpr(
|
||||||
|
|
||||||
if (expr.field.fieldIdx! > 0) {
|
if (expr.field.fieldIdx! > 0) {
|
||||||
// Keep the result in scratch space.
|
// Keep the result in scratch space.
|
||||||
storeVariable(instrs, { localIdx, types: resultAbi });
|
storeVariable(instrs, { localIdx, types: resultAbi, ty: expr.ty });
|
||||||
|
|
||||||
Array(wasmIdx)
|
Array(wasmIdx)
|
||||||
.fill(0)
|
.fill(0)
|
||||||
.forEach(() => instrs.push({ kind: "drop" }));
|
.forEach(() => instrs.push({ kind: "drop" }));
|
||||||
|
|
||||||
loadVariable(instrs, { localIdx, types: resultAbi });
|
loadVariable(instrs, { localIdx, types: resultAbi, ty: expr.ty });
|
||||||
}
|
}
|
||||||
|
|
||||||
break;
|
break;
|
||||||
|
|
@ -824,9 +852,7 @@ function lowerExpr(
|
||||||
const layout = layoutOfStruct(ty);
|
const layout = layoutOfStruct(ty);
|
||||||
const field = layout.fields[expr.field.fieldIdx!];
|
const field = layout.fields[expr.field.fieldIdx!];
|
||||||
|
|
||||||
// TODO: SCRATCH LOCALS
|
const ptrLocal = getScratchLocals(fcx, "i32", 1)[0];
|
||||||
const ptrLocal = fcx.wasmType.params.length + fcx.wasm.locals.length;
|
|
||||||
fcx.wasm.locals.push("i32");
|
|
||||||
|
|
||||||
// We save the local for getting it later for all the field parts.
|
// We save the local for getting it later for all the field parts.
|
||||||
instrs.push({
|
instrs.push({
|
||||||
|
|
@ -860,7 +886,7 @@ function lowerExpr(
|
||||||
break;
|
break;
|
||||||
default: {
|
default: {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`unsupported struct content type: ${fieldPart.type}`
|
`unsupported struct content type: ${fieldPart.type}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -958,13 +984,12 @@ function lowerExpr(
|
||||||
res: { kind: "item", id: allocateItemId },
|
res: { kind: "item", id: allocateItemId },
|
||||||
});
|
});
|
||||||
instrs.push(allocate);
|
instrs.push(allocate);
|
||||||
// TODO: scratch locals...
|
|
||||||
const ptrLocal = fcx.wasmType.params.length + fcx.wasm.locals.length;
|
const ptrLocal = getScratchLocals(fcx, "i32", 1)[0];
|
||||||
fcx.wasm.locals.push("i32");
|
|
||||||
instrs.push({ kind: "local.tee", imm: ptrLocal });
|
instrs.push({ kind: "local.tee", imm: ptrLocal });
|
||||||
|
|
||||||
// Store the refcount
|
// Store the refcount
|
||||||
instrs.push({ kind: "i32.const", imm: 0n });
|
instrs.push({ kind: "i32.const", imm: 1n });
|
||||||
instrs.push({ kind: "i32.store", imm: { align: 4 } });
|
instrs.push({ kind: "i32.store", imm: { align: 4 } });
|
||||||
|
|
||||||
// Now, set all fields.
|
// Now, set all fields.
|
||||||
|
|
@ -996,7 +1021,7 @@ function lowerExpr(
|
||||||
break;
|
break;
|
||||||
default: {
|
default: {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`unsupported struct content type: ${fieldPart.type}`
|
`unsupported struct content type: ${fieldPart.type}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1012,8 +1037,6 @@ function lowerExpr(
|
||||||
expr.fields.forEach((field) => lowerExpr(fcx, instrs, field));
|
expr.fields.forEach((field) => lowerExpr(fcx, instrs, field));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case "refcount":
|
|
||||||
todo("refcount");
|
|
||||||
default: {
|
default: {
|
||||||
const _: never = expr;
|
const _: never = expr;
|
||||||
}
|
}
|
||||||
|
|
@ -1027,29 +1050,89 @@ function lowerExpr(
|
||||||
|
|
||||||
function lowerExprBlockBody(
|
function lowerExprBlockBody(
|
||||||
fcx: FuncContext,
|
fcx: FuncContext,
|
||||||
expr: ExprBlock<Typecked> & Expr<Typecked>
|
expr: ExprBlock<Typecked> & Expr<Typecked>,
|
||||||
|
prevVarLocationLength: number,
|
||||||
): wasm.Instr[] {
|
): wasm.Instr[] {
|
||||||
fcx.currentBlockDepth++;
|
fcx.currentBlockDepth++;
|
||||||
const innerInstrs: wasm.Instr[] = [];
|
const instrs: wasm.Instr[] = [];
|
||||||
|
|
||||||
const headExprs = expr.exprs.slice(0, -1);
|
const headExprs = expr.exprs.slice(0, -1);
|
||||||
const tailExpr = expr.exprs[expr.exprs.length - 1];
|
const tailExpr = expr.exprs[expr.exprs.length - 1];
|
||||||
|
|
||||||
for (const inner of headExprs) {
|
for (const inner of headExprs) {
|
||||||
lowerExpr(fcx, innerInstrs, inner);
|
lowerExpr(fcx, instrs, inner);
|
||||||
if (inner.ty.kind === "never") {
|
if (inner.ty.kind === "never") {
|
||||||
// The rest of the block is unreachable, so we don't bother codegening it.
|
// The rest of the block is unreachable, so we don't bother codegening it.
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
const types = wasmTypeForBody(inner.ty);
|
const types = wasmTypeForBody(inner.ty);
|
||||||
types.forEach(() => innerInstrs.push({ kind: "drop" }));
|
|
||||||
|
const refcount = needsRefcount(inner.ty);
|
||||||
|
if (refcount !== undefined) {
|
||||||
|
subRefcount(fcx, instrs, refcount);
|
||||||
|
} else {
|
||||||
|
// TODO: correctly deal with tuples
|
||||||
|
types.forEach(() => instrs.push({ kind: "drop" }));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
lowerExpr(fcx, innerInstrs, tailExpr);
|
lowerExpr(fcx, instrs, tailExpr);
|
||||||
|
|
||||||
|
const thisBlockLocals = fcx.varLocations.slice(prevVarLocationLength);
|
||||||
|
|
||||||
|
thisBlockLocals.forEach((local) => {
|
||||||
|
const refcount = needsRefcount(local.ty);
|
||||||
|
if (refcount !== undefined) {
|
||||||
|
// TODO: correctly deal with tuples
|
||||||
|
loadVariable(instrs, local);
|
||||||
|
subRefcount(fcx, instrs, refcount);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
fcx.currentBlockDepth--;
|
fcx.currentBlockDepth--;
|
||||||
|
|
||||||
return innerInstrs;
|
return instrs;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getScratchLocals(
|
||||||
|
fcx: FuncContext,
|
||||||
|
type: wasm.ValType,
|
||||||
|
amount: number,
|
||||||
|
): wasm.LocalIdx[] {
|
||||||
|
function addLocals(fcx: FuncContext, type: wasm.ValType[]): wasm.LocalIdx[] {
|
||||||
|
const local = fcx.wasm.locals.length + fcx.wasmType.params.length;
|
||||||
|
|
||||||
|
fcx.wasm.locals.push(...type);
|
||||||
|
|
||||||
|
return type.map((_, i) => local + i);
|
||||||
|
}
|
||||||
|
|
||||||
|
const existing = fcx.scratchLocals.get(type);
|
||||||
|
if (!existing) {
|
||||||
|
const locals = addLocals(
|
||||||
|
fcx,
|
||||||
|
Array(amount)
|
||||||
|
.fill(0)
|
||||||
|
.map(() => type),
|
||||||
|
);
|
||||||
|
|
||||||
|
fcx.scratchLocals.set(type, locals);
|
||||||
|
return locals;
|
||||||
|
} else {
|
||||||
|
const toAdd = amount - existing.length;
|
||||||
|
if (toAdd > 0) {
|
||||||
|
const locals = addLocals(
|
||||||
|
fcx,
|
||||||
|
Array(toAdd)
|
||||||
|
.fill(0)
|
||||||
|
.map(() => type),
|
||||||
|
);
|
||||||
|
|
||||||
|
existing.push(...locals);
|
||||||
|
return existing;
|
||||||
|
}
|
||||||
|
return existing.slice(0, amount);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function loadVariable(instrs: wasm.Instr[], loc: VarLocation) {
|
function loadVariable(instrs: wasm.Instr[], loc: VarLocation) {
|
||||||
|
|
@ -1105,17 +1188,21 @@ function computeAbi(ty: TyFn): FnAbi {
|
||||||
return { params, ret };
|
return { params, ret };
|
||||||
}
|
}
|
||||||
|
|
||||||
function wasmTypeForAbi(abi: FnAbi): {
|
function wasmTypeForAbi(
|
||||||
|
abi: FnAbi,
|
||||||
|
ty: TyFn,
|
||||||
|
): {
|
||||||
type: wasm.FuncType;
|
type: wasm.FuncType;
|
||||||
paramLocations: VarLocation[];
|
paramLocations: VarLocation[];
|
||||||
} {
|
} {
|
||||||
const params: wasm.ValType[] = [];
|
const params: wasm.ValType[] = [];
|
||||||
const paramLocations: VarLocation[] = [];
|
const paramLocations: VarLocation[] = [];
|
||||||
|
|
||||||
abi.params.forEach((arg) => {
|
abi.params.forEach((arg, i) => {
|
||||||
paramLocations.push({
|
paramLocations.push({
|
||||||
localIdx: params.length,
|
localIdx: params.length,
|
||||||
types: arg,
|
types: arg,
|
||||||
|
ty: ty.params[i],
|
||||||
});
|
});
|
||||||
params.push(...arg);
|
params.push(...arg);
|
||||||
});
|
});
|
||||||
|
|
@ -1168,7 +1255,7 @@ export function layoutOfStruct(ty: TyStruct): StructLayout {
|
||||||
|
|
||||||
// TODO: Use the max alignment instead.
|
// TODO: Use the max alignment instead.
|
||||||
const align = fieldWasmTys.some((field) =>
|
const align = fieldWasmTys.some((field) =>
|
||||||
field.some((type) => type === "i64")
|
field.some((type) => type === "i64"),
|
||||||
)
|
)
|
||||||
? 8
|
? 8
|
||||||
: 4;
|
: 4;
|
||||||
|
|
@ -1185,7 +1272,8 @@ export function layoutOfStruct(ty: TyStruct): StructLayout {
|
||||||
const types = field.map((type) => {
|
const types = field.map((type) => {
|
||||||
const size = sizeOfValtype(type);
|
const size = sizeOfValtype(type);
|
||||||
|
|
||||||
if (size === 8 && offset % 8 !== 0) {
|
// we don't want padding for the first field as the allocator takes care of that.
|
||||||
|
if (offset !== 4 && size === 8 && offset % 8 !== 0) {
|
||||||
// padding.
|
// padding.
|
||||||
offset += 4;
|
offset += 4;
|
||||||
}
|
}
|
||||||
|
|
@ -1203,6 +1291,9 @@ export function layoutOfStruct(ty: TyStruct): StructLayout {
|
||||||
return value;
|
return value;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// we ignore the refcount for struct size.
|
||||||
|
offset -= 4;
|
||||||
|
|
||||||
if (align === 8 && offset % 8 !== 0) {
|
if (align === 8 && offset % 8 !== 0) {
|
||||||
offset += 4;
|
offset += 4;
|
||||||
}
|
}
|
||||||
|
|
@ -1230,6 +1321,169 @@ function wasmTypeIdxForTupleField(ty: TyTuple, idx: number): number {
|
||||||
return head.reduce((a, b) => a + b.length, 0);
|
return head.reduce((a, b) => a + b.length, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Refcounts:
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Injects `refcount` expressions into the code to make sure
|
||||||
|
* that no memory is leaked and no memory is freed too early.
|
||||||
|
*
|
||||||
|
* When do we need to adjust the refcount?
|
||||||
|
*
|
||||||
|
* When looking at reference counts, we need to distiguish between moves
|
||||||
|
* and copies of a struct. When a struct is moved, no reference count has
|
||||||
|
* to be changed. When it is copied, we need to increment the reference count.
|
||||||
|
*
|
||||||
|
* ```
|
||||||
|
* let a = S {};
|
||||||
|
* foo(a); // COPY
|
||||||
|
* ```
|
||||||
|
* ```
|
||||||
|
* let a = identity(S {}); // MOVE
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* Due to the way the language is structured, this analysis is fairly simple:
|
||||||
|
* Most expressions are considered moves, but identifiers like `a` are considered
|
||||||
|
* copies. This is sound because the only way to refer to a value twice is to bind
|
||||||
|
* it to a variable. So whenever we load a variable of type struct, we need to bump
|
||||||
|
* the refcount.
|
||||||
|
*
|
||||||
|
* Then we just need to decrement all the locals (and params!) refcounts when they go
|
||||||
|
* out of scope.
|
||||||
|
*
|
||||||
|
* This leaves us with the following rules:
|
||||||
|
* - when loading an identifier, add an increment
|
||||||
|
* - when the end of a block is reached, decrement all locals
|
||||||
|
* - when the end of a function is reached, decrement all params
|
||||||
|
* - when an expression value is ignored, decrement
|
||||||
|
*/
|
||||||
|
|
||||||
|
function needsRefcount(ty: Ty): StructLayout | "string" | undefined {
|
||||||
|
switch (ty.kind) {
|
||||||
|
case "string":
|
||||||
|
// TODO: deal with strings
|
||||||
|
return undefined;
|
||||||
|
case "struct":
|
||||||
|
return layoutOfStruct(ty);
|
||||||
|
case "list":
|
||||||
|
todo("no lists yet");
|
||||||
|
case "var":
|
||||||
|
varUnreachable();
|
||||||
|
default:
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function addRefcount(
|
||||||
|
fcx: FuncContext,
|
||||||
|
instrs: wasm.Instr[],
|
||||||
|
kind: "struct" | "string",
|
||||||
|
) {
|
||||||
|
const layout: wasm.ValType[] = kind === "string" ? ["i32", "i32"] : ["i32"];
|
||||||
|
|
||||||
|
const [ptr, len] = getScratchLocals(fcx, "i32", layout.length);
|
||||||
|
|
||||||
|
// stack: PTR, {LEN}
|
||||||
|
const innerInstrs: wasm.Instr[] = [];
|
||||||
|
|
||||||
|
if (kind === "string") {
|
||||||
|
innerInstrs.push({ kind: "local.set", imm: len }); // stack: PTR
|
||||||
|
}
|
||||||
|
|
||||||
|
innerInstrs.push({ kind: "local.tee", imm: ptr }); // stack: PTR
|
||||||
|
innerInstrs.push({ kind: "local.get", imm: ptr }); // stack: PTR, PTR
|
||||||
|
innerInstrs.push({ kind: "local.get", imm: ptr }); // stack: PTR, PTR, PTR
|
||||||
|
innerInstrs.push({ kind: "i32.load", imm: { align: 4 } }); // stack: PTR, PTR, cnt
|
||||||
|
innerInstrs.push({ kind: "i32.const", imm: 1n }); // stack: PTR, PTR, cnt, 1
|
||||||
|
innerInstrs.push({ kind: "i32.add" }); // stack: PTR, PTR, cnt
|
||||||
|
innerInstrs.push({ kind: "i32.store", imm: { align: 4 } }); // stack: PTR
|
||||||
|
|
||||||
|
if (kind === "string") {
|
||||||
|
innerInstrs.push({ kind: "local.get", imm: len }); // stack: PTR, {LEN}
|
||||||
|
}
|
||||||
|
// stack: PTR, {LEN}
|
||||||
|
|
||||||
|
instrs.push({
|
||||||
|
kind: "block",
|
||||||
|
instrs: innerInstrs,
|
||||||
|
type: {
|
||||||
|
kind: "typeidx",
|
||||||
|
idx: internFuncType(fcx.cx, { params: layout, returns: layout }),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function subRefcount(
|
||||||
|
fcx: FuncContext,
|
||||||
|
instrs: wasm.Instr[],
|
||||||
|
kind: StructLayout | "string",
|
||||||
|
) {
|
||||||
|
const deallocateItemId = fcx.cx.knownDefPaths.get(DEALLOCATE_ITEM);
|
||||||
|
if (!deallocateItemId) {
|
||||||
|
throw new Error("std.rt.deallocateItem not found");
|
||||||
|
}
|
||||||
|
|
||||||
|
const layout: wasm.ValType[] = kind === "string" ? ["i32", "i32"] : ["i32"];
|
||||||
|
|
||||||
|
const [ptr, len] = getScratchLocals(fcx, "i32", layout.length);
|
||||||
|
const count = ptr;
|
||||||
|
|
||||||
|
const innerInstrs: wasm.Instr[] = [];
|
||||||
|
|
||||||
|
// stack: PTR, {LEN}
|
||||||
|
if (kind === "string") {
|
||||||
|
innerInstrs.push({ kind: "local.set", imm: len }); // stack: PTR
|
||||||
|
}
|
||||||
|
innerInstrs.push({ kind: "local.tee", imm: ptr }); // stack: PTR
|
||||||
|
innerInstrs.push({ kind: "local.get", imm: ptr }); // stack: PTR, PTR
|
||||||
|
innerInstrs.push({ kind: "i32.load", imm: { align: 4 } }); // stack: PTR, cnt
|
||||||
|
innerInstrs.push({ kind: "i32.const", imm: 1n }); // stack: PTR, cnt, 1
|
||||||
|
innerInstrs.push({ kind: "i32.sub" }); // stack: PTR, cnt
|
||||||
|
innerInstrs.push({ kind: "local.tee", imm: count }); // stack: PTR, cnt
|
||||||
|
innerInstrs.push({
|
||||||
|
kind: "if",
|
||||||
|
then: [
|
||||||
|
// stack: PTR
|
||||||
|
{ kind: "local.get", imm: count }, // stack: PTR, cnt
|
||||||
|
{ kind: "i32.store", imm: { align: 4 } }, // stack:
|
||||||
|
],
|
||||||
|
else: (() => {
|
||||||
|
// stack: PTR
|
||||||
|
const instrs: wasm.Instr[] = [];
|
||||||
|
|
||||||
|
if (kind === "string") {
|
||||||
|
instrs.push({ kind: "local.get", imm: len }); // stack: PTR, len
|
||||||
|
} else {
|
||||||
|
instrs.push({ kind: "i32.const", imm: BigInt(kind.size) }); // stack: PTR, len
|
||||||
|
}
|
||||||
|
|
||||||
|
const deallocateCall: wasm.Instr = { kind: "call", func: DUMMY_IDX };
|
||||||
|
fcx.cx.relocations.push({
|
||||||
|
kind: "funccall",
|
||||||
|
instr: deallocateCall,
|
||||||
|
res: { kind: "item", id: deallocateItemId },
|
||||||
|
});
|
||||||
|
instrs.push(deallocateCall); // stack:
|
||||||
|
|
||||||
|
return instrs;
|
||||||
|
})(),
|
||||||
|
type: {
|
||||||
|
kind: "typeidx",
|
||||||
|
idx: internFuncType(fcx.cx, { params: ["i32"], returns: [] }),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
instrs.push({
|
||||||
|
kind: "block",
|
||||||
|
instrs: innerInstrs,
|
||||||
|
type: {
|
||||||
|
kind: "typeidx",
|
||||||
|
idx: internFuncType(fcx.cx, { params: layout, returns: [] }),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// stack:
|
||||||
|
}
|
||||||
|
|
||||||
function todo(msg: string): never {
|
function todo(msg: string): never {
|
||||||
throw new Error(`TODO: ${msg}`);
|
throw new Error(`TODO: ${msg}`);
|
||||||
}
|
}
|
||||||
|
|
@ -1304,7 +1558,7 @@ function addRt(cx: Context, crates: Crate<Typecked>[]) {
|
||||||
|
|
||||||
cx.funcIndices.set(
|
cx.funcIndices.set(
|
||||||
{ kind: "builtin", name: "print" },
|
{ kind: "builtin", name: "print" },
|
||||||
{ kind: "func", idx: printIdx }
|
{ kind: "func", idx: printIdx },
|
||||||
);
|
);
|
||||||
|
|
||||||
mod.exports.push({
|
mod.exports.push({
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ import path from "path";
|
||||||
export type CrateLoader = (
|
export type CrateLoader = (
|
||||||
gcx: GlobalContext,
|
gcx: GlobalContext,
|
||||||
name: string,
|
name: string,
|
||||||
span: Span
|
span: Span,
|
||||||
) => DepCrate;
|
) => DepCrate;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -27,12 +27,12 @@ export class GlobalContext {
|
||||||
|
|
||||||
public findItem<P extends Phase>(
|
public findItem<P extends Phase>(
|
||||||
id: ItemId,
|
id: ItemId,
|
||||||
localCrate?: Crate<P>
|
localCrate?: Crate<P>,
|
||||||
): Item<P | Final> {
|
): Item<P | Final> {
|
||||||
const crate = unwrap(
|
const crate = unwrap(
|
||||||
[...(localCrate ? [localCrate] : []), ...this.finalizedCrates].find(
|
[...(localCrate ? [localCrate] : []), ...this.finalizedCrates].find(
|
||||||
(crate) => crate.id === id.crateId
|
(crate) => crate.id === id.crateId,
|
||||||
)
|
),
|
||||||
);
|
);
|
||||||
|
|
||||||
if (id.itemIdx === 0) {
|
if (id.itemIdx === 0) {
|
||||||
|
|
@ -73,7 +73,7 @@ export function parseArgs(hardcodedInput: string): Options {
|
||||||
console.error(process.argv);
|
console.error(process.argv);
|
||||||
|
|
||||||
console.error(
|
console.error(
|
||||||
`error: filename must have \`.nil\` extension: \`${filename}\``
|
`error: filename must have \`.nil\` extension: \`${filename}\``,
|
||||||
);
|
);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
@ -99,7 +99,6 @@ export function parseArgs(hardcodedInput: string): Options {
|
||||||
"parsed",
|
"parsed",
|
||||||
"resolved",
|
"resolved",
|
||||||
"typecked",
|
"typecked",
|
||||||
"wat",
|
|
||||||
"wasm-validate",
|
"wasm-validate",
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ it("should extract lines correctly", () => {
|
||||||
|
|
||||||
const lineSpans = lines({ content: input });
|
const lineSpans = lines({ content: input });
|
||||||
const lineContents = lineSpans.map(({ start, end }) =>
|
const lineContents = lineSpans.map(({ start, end }) =>
|
||||||
input.slice(start, end)
|
input.slice(start, end),
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(lineContents).toStrictEqual(["AAA", "meow", ":3", "", ""]);
|
expect(lineContents).toStrictEqual(["AAA", "meow", ":3", "", ""]);
|
||||||
|
|
|
||||||
12
src/error.ts
12
src/error.ts
|
|
@ -7,7 +7,7 @@ export class Span {
|
||||||
constructor(
|
constructor(
|
||||||
public start: number,
|
public start: number,
|
||||||
public end: number,
|
public end: number,
|
||||||
public file: LoadedFile
|
public file: LoadedFile,
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
public merge(b: Span): Span {
|
public merge(b: Span): Span {
|
||||||
|
|
@ -18,7 +18,7 @@ export class Span {
|
||||||
return new Span(
|
return new Span(
|
||||||
Math.min(this.start, b.start),
|
Math.min(this.start, b.start),
|
||||||
Math.max(this.end, b.end),
|
Math.max(this.end, b.end),
|
||||||
this.file
|
this.file,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -46,7 +46,7 @@ export class CompilerError extends Error {
|
||||||
|
|
||||||
export function withErrorPrinter<R>(
|
export function withErrorPrinter<R>(
|
||||||
f: () => R,
|
f: () => R,
|
||||||
afterError: (e: CompilerError) => R
|
afterError: (e: CompilerError) => R,
|
||||||
): R {
|
): R {
|
||||||
try {
|
try {
|
||||||
return f();
|
return f();
|
||||||
|
|
@ -69,7 +69,7 @@ function renderError(e: CompilerError) {
|
||||||
span.start === Number.MAX_SAFE_INTEGER
|
span.start === Number.MAX_SAFE_INTEGER
|
||||||
? lineSpans[lineSpans.length - 1]
|
? lineSpans[lineSpans.length - 1]
|
||||||
: lineSpans.find(
|
: lineSpans.find(
|
||||||
(line) => line.start <= span.start && line.end >= span.start
|
(line) => line.start <= span.start && line.end >= span.start,
|
||||||
);
|
);
|
||||||
if (!line) {
|
if (!line) {
|
||||||
throw Error(`Span out of bounds: ${span.start}..${span.end}`);
|
throw Error(`Span out of bounds: ${span.start}..${span.end}`);
|
||||||
|
|
@ -90,8 +90,8 @@ function renderError(e: CompilerError) {
|
||||||
|
|
||||||
console.error(
|
console.error(
|
||||||
`${" ".repeat(String(lineNo).length)} ${" ".repeat(
|
`${" ".repeat(String(lineNo).length)} ${" ".repeat(
|
||||||
startRelLine
|
startRelLine,
|
||||||
)}${"^".repeat(spanLength)}`
|
)}${"^".repeat(spanLength)}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
21
src/index.ts
21
src/index.ts
|
|
@ -1,6 +1,6 @@
|
||||||
import { LoadedFile, Span, withErrorPrinter } from "./error";
|
import { LoadedFile, Span, withErrorPrinter } from "./error";
|
||||||
import { isValidIdent, tokenize } from "./lexer";
|
import { isValidIdent, tokenize } from "./lexer";
|
||||||
import { lower as lowerToWasm } from "./lower";
|
import { lower as lowerToWasm } from "./codegen";
|
||||||
import { ParseState, parse } from "./parser";
|
import { ParseState, parse } from "./parser";
|
||||||
import { printAst } from "./printer";
|
import { printAst } from "./printer";
|
||||||
import { resolve } from "./resolve";
|
import { resolve } from "./resolve";
|
||||||
|
|
@ -16,14 +16,19 @@ const INPUT = `
|
||||||
type A = { a: Int };
|
type A = { a: Int };
|
||||||
|
|
||||||
function main() = (
|
function main() = (
|
||||||
let a = A { a: 100 };
|
uwu();
|
||||||
printA(a);
|
|
||||||
);
|
);
|
||||||
|
|
||||||
function printA(a: A) = (
|
function uwu() = (
|
||||||
print("ABCDEFGH\\n");
|
let a = A { a: 100 };
|
||||||
print("ABCDEFGH\\n");
|
eat(a /*+1*/);
|
||||||
|
|
||||||
|
A { a: 100 };
|
||||||
|
|
||||||
|
/*-1*/
|
||||||
);
|
);
|
||||||
|
|
||||||
|
function eat(a: A) = ;
|
||||||
`;
|
`;
|
||||||
|
|
||||||
function main() {
|
function main() {
|
||||||
|
|
@ -32,7 +37,7 @@ function main() {
|
||||||
|
|
||||||
if (!isValidIdent(packageName)) {
|
if (!isValidIdent(packageName)) {
|
||||||
console.error(
|
console.error(
|
||||||
`error: package name \`${packageName}\` is not a valid identifer`
|
`error: package name \`${packageName}\` is not a valid identifer`,
|
||||||
);
|
);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
@ -123,7 +128,7 @@ function main() {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
() => process.exit(1)
|
() => process.exit(1),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -206,7 +206,7 @@ export function tokenize(file: LoadedFile): Token[] {
|
||||||
default:
|
default:
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`invalid escape character: ${input[i]}`,
|
`invalid escape character: ${input[i]}`,
|
||||||
new Span(span.end - 1, span.end, file)
|
new Span(span.end - 1, span.end, file),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
|
|
@ -231,7 +231,7 @@ export function tokenize(file: LoadedFile): Token[] {
|
||||||
const int = parseInt(digit, 10);
|
const int = parseInt(digit, 10);
|
||||||
if (Number.isNaN(int)) {
|
if (Number.isNaN(int)) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`\`${digit}\` was tokenized to a number even though it is not`
|
`\`${digit}\` was tokenized to a number even though it is not`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ import { typeck } from "./typeck";
|
||||||
export function loadModuleFile(
|
export function loadModuleFile(
|
||||||
relativeTo: string,
|
relativeTo: string,
|
||||||
moduleName: string,
|
moduleName: string,
|
||||||
span: Span
|
span: Span,
|
||||||
): LoadedFile {
|
): LoadedFile {
|
||||||
let searchDir: string;
|
let searchDir: string;
|
||||||
if (relativeTo.endsWith(".mod.nil")) {
|
if (relativeTo.endsWith(".mod.nil")) {
|
||||||
|
|
@ -20,7 +20,7 @@ export function loadModuleFile(
|
||||||
} else if (relativeTo.endsWith(".nil")) {
|
} else if (relativeTo.endsWith(".nil")) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`.nil files cannot have submodules. use .mod.nil in a subdirectory`,
|
`.nil files cannot have submodules. use .mod.nil in a subdirectory`,
|
||||||
span
|
span,
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
searchDir = relativeTo;
|
searchDir = relativeTo;
|
||||||
|
|
@ -43,7 +43,7 @@ export function loadModuleFile(
|
||||||
if (content === undefined || filePath === undefined) {
|
if (content === undefined || filePath === undefined) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`failed to load ${moduleName}, could not find ${options.join(" or ")}`,
|
`failed to load ${moduleName}, could not find ${options.join(" or ")}`,
|
||||||
span
|
span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -53,13 +53,13 @@ export function loadModuleFile(
|
||||||
export const loadCrate: CrateLoader = (
|
export const loadCrate: CrateLoader = (
|
||||||
gcx: GlobalContext,
|
gcx: GlobalContext,
|
||||||
name: string,
|
name: string,
|
||||||
span: Span
|
span: Span,
|
||||||
): DepCrate => {
|
): DepCrate => {
|
||||||
// We really, really want a good algorithm for finding crates.
|
// We really, really want a good algorithm for finding crates.
|
||||||
// But right now we just look for files in the CWD.
|
// But right now we just look for files in the CWD.
|
||||||
|
|
||||||
const existing = gcx.finalizedCrates.find(
|
const existing = gcx.finalizedCrates.find(
|
||||||
(crate) => crate.packageName === name
|
(crate) => crate.packageName === name,
|
||||||
);
|
);
|
||||||
if (existing) {
|
if (existing) {
|
||||||
return existing;
|
return existing;
|
||||||
|
|
@ -75,7 +75,6 @@ export const loadCrate: CrateLoader = (
|
||||||
const parseState: ParseState = { tokens, file };
|
const parseState: ParseState = { tokens, file };
|
||||||
const ast = parse(name, parseState, crateId);
|
const ast = parse(name, parseState, crateId);
|
||||||
const resolved = resolve(gcx, ast);
|
const resolved = resolve(gcx, ast);
|
||||||
console.log(resolved);
|
|
||||||
|
|
||||||
const typecked = typeck(gcx, resolved);
|
const typecked = typeck(gcx, resolved);
|
||||||
|
|
||||||
|
|
@ -85,8 +84,8 @@ export const loadCrate: CrateLoader = (
|
||||||
() => {
|
() => {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`failed to load crate ${name}: crate contains errors`,
|
`failed to load crate ${name}: crate contains errors`,
|
||||||
span
|
span,
|
||||||
);
|
);
|
||||||
}
|
},
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -51,7 +51,7 @@ type Parser<T> = (t: State) => [State, T];
|
||||||
export function parse(
|
export function parse(
|
||||||
packageName: string,
|
packageName: string,
|
||||||
t: State,
|
t: State,
|
||||||
crateId: number
|
crateId: number,
|
||||||
): Crate<Built> {
|
): Crate<Built> {
|
||||||
const [, items] = parseItems(t);
|
const [, items] = parseItems(t);
|
||||||
|
|
||||||
|
|
@ -195,7 +195,7 @@ function parseItem(t: State): [State, Item<Parsed>] {
|
||||||
if (name.span.file.path === undefined) {
|
if (name.span.file.path === undefined) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`no known source file for statement, cannot load file relative to it`,
|
`no known source file for statement, cannot load file relative to it`,
|
||||||
name.span
|
name.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
const file = loadModuleFile(name.span.file.path, name.ident, name.span);
|
const file = loadModuleFile(name.span.file.path, name.ident, name.span);
|
||||||
|
|
@ -304,7 +304,7 @@ function mkBinaryExpr(
|
||||||
lhs: Expr<Parsed>,
|
lhs: Expr<Parsed>,
|
||||||
rhs: Expr<Parsed>,
|
rhs: Expr<Parsed>,
|
||||||
span: Span,
|
span: Span,
|
||||||
kind: string
|
kind: string,
|
||||||
): Expr<Parsed> {
|
): Expr<Parsed> {
|
||||||
return { kind: "binary", binaryKind: kind as BinaryKind, lhs, rhs, span };
|
return { kind: "binary", binaryKind: kind as BinaryKind, lhs, rhs, span };
|
||||||
}
|
}
|
||||||
|
|
@ -312,7 +312,7 @@ function mkBinaryExpr(
|
||||||
function mkParserExprBinary(
|
function mkParserExprBinary(
|
||||||
lower: Parser<Expr<Parsed>>,
|
lower: Parser<Expr<Parsed>>,
|
||||||
kinds: string[],
|
kinds: string[],
|
||||||
mkExpr = mkBinaryExpr
|
mkExpr = mkBinaryExpr,
|
||||||
): Parser<Expr<Parsed>> {
|
): Parser<Expr<Parsed>> {
|
||||||
function parser(t: State): [State, Expr<Parsed>] {
|
function parser(t: State): [State, Expr<Parsed>] {
|
||||||
let lhs;
|
let lhs;
|
||||||
|
|
@ -337,25 +337,25 @@ function mkParserExprBinary(
|
||||||
|
|
||||||
const parseExprArithFactor = mkParserExprBinary(
|
const parseExprArithFactor = mkParserExprBinary(
|
||||||
parseExprUnary,
|
parseExprUnary,
|
||||||
ARITH_FACTOR_KINDS
|
ARITH_FACTOR_KINDS,
|
||||||
);
|
);
|
||||||
|
|
||||||
const parseExprArithTerm = mkParserExprBinary(
|
const parseExprArithTerm = mkParserExprBinary(
|
||||||
parseExprArithFactor,
|
parseExprArithFactor,
|
||||||
ARITH_TERM_KINDS
|
ARITH_TERM_KINDS,
|
||||||
);
|
);
|
||||||
|
|
||||||
const parseExprLogical = mkParserExprBinary(parseExprArithTerm, LOGICAL_KINDS);
|
const parseExprLogical = mkParserExprBinary(parseExprArithTerm, LOGICAL_KINDS);
|
||||||
|
|
||||||
const parseExprComparison = mkParserExprBinary(
|
const parseExprComparison = mkParserExprBinary(
|
||||||
parseExprLogical,
|
parseExprLogical,
|
||||||
COMPARISON_KINDS
|
COMPARISON_KINDS,
|
||||||
);
|
);
|
||||||
|
|
||||||
const parseExprAssignment = mkParserExprBinary(
|
const parseExprAssignment = mkParserExprBinary(
|
||||||
parseExprComparison,
|
parseExprComparison,
|
||||||
["="],
|
["="],
|
||||||
(lhs, rhs, span) => ({ kind: "assign", lhs, rhs, span })
|
(lhs, rhs, span) => ({ kind: "assign", lhs, rhs, span }),
|
||||||
);
|
);
|
||||||
|
|
||||||
function parseExprUnary(t: State): [State, Expr<Parsed>] {
|
function parseExprUnary(t: State): [State, Expr<Parsed>] {
|
||||||
|
|
@ -566,7 +566,7 @@ function parseExprAtom(startT: State): [State, Expr<Parsed>] {
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseStructInit(
|
function parseStructInit(
|
||||||
t: State
|
t: State,
|
||||||
): [State, ExprStructLiteral<Parsed>["fields"]] {
|
): [State, ExprStructLiteral<Parsed>["fields"]] {
|
||||||
[t] = expectNext(t, "{");
|
[t] = expectNext(t, "{");
|
||||||
|
|
||||||
|
|
@ -582,7 +582,7 @@ function parseStructInit(
|
||||||
[t, expr] = parseExpr(t);
|
[t, expr] = parseExpr(t);
|
||||||
|
|
||||||
return [t, { name: { name: name.ident, span: name.span }, expr }];
|
return [t, { name: { name: name.ident, span: name.span }, expr }];
|
||||||
}
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
return [t, fields];
|
return [t, fields];
|
||||||
|
|
@ -640,7 +640,7 @@ function parseType(t: State): [State, Type<Parsed>] {
|
||||||
default: {
|
default: {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`unexpected token: \`${tok.kind}\`, expected type`,
|
`unexpected token: \`${tok.kind}\`, expected type`,
|
||||||
span
|
span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -651,7 +651,7 @@ function parseType(t: State): [State, Type<Parsed>] {
|
||||||
function parseCommaSeparatedList<R>(
|
function parseCommaSeparatedList<R>(
|
||||||
t: State,
|
t: State,
|
||||||
terminator: Token["kind"],
|
terminator: Token["kind"],
|
||||||
parser: Parser<R>
|
parser: Parser<R>,
|
||||||
): [State, R[]] {
|
): [State, R[]] {
|
||||||
const items: R[] = [];
|
const items: R[] = [];
|
||||||
|
|
||||||
|
|
@ -682,7 +682,7 @@ function parseCommaSeparatedList<R>(
|
||||||
|
|
||||||
function eat<T extends BaseToken>(
|
function eat<T extends BaseToken>(
|
||||||
t: State,
|
t: State,
|
||||||
kind: T["kind"]
|
kind: T["kind"],
|
||||||
): [State, T | undefined] {
|
): [State, T | undefined] {
|
||||||
if (peekKind(t) === kind) {
|
if (peekKind(t) === kind) {
|
||||||
return expectNext(t, kind);
|
return expectNext(t, kind);
|
||||||
|
|
@ -696,20 +696,20 @@ function peekKind(t: State): Token["kind"] | undefined {
|
||||||
|
|
||||||
function expectNext<T extends BaseToken>(
|
function expectNext<T extends BaseToken>(
|
||||||
t: State,
|
t: State,
|
||||||
kind: T["kind"]
|
kind: T["kind"],
|
||||||
): [State, T & Token] {
|
): [State, T & Token] {
|
||||||
let tok;
|
let tok;
|
||||||
[t, tok] = maybeNextT(t);
|
[t, tok] = maybeNextT(t);
|
||||||
if (!tok) {
|
if (!tok) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`expected \`${kind}\`, found end of file`,
|
`expected \`${kind}\`, found end of file`,
|
||||||
Span.eof(t.file)
|
Span.eof(t.file),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (tok.kind !== kind) {
|
if (tok.kind !== kind) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`expected \`${kind}\`, found \`${tok.kind}\``,
|
`expected \`${kind}\`, found \`${tok.kind}\``,
|
||||||
tok.span
|
tok.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return [t, tok as unknown as T & Token];
|
return [t, tok as unknown as T & Token];
|
||||||
|
|
@ -742,7 +742,7 @@ function validateAst(ast: Crate<Built>) {
|
||||||
itemInner(item: Item<Built>): Item<Built> {
|
itemInner(item: Item<Built>): Item<Built> {
|
||||||
if (seenItemIds.has(item.id)) {
|
if (seenItemIds.has(item.id)) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`duplicate item id: ${item.id.toString()} for ${item.node.name}`
|
`duplicate item id: ${item.id.toString()} for ${item.node.name}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
seenItemIds.add(item.id);
|
seenItemIds.add(item.id);
|
||||||
|
|
@ -772,7 +772,7 @@ function validateAst(ast: Crate<Built>) {
|
||||||
if (ourClass !== innerClass) {
|
if (ourClass !== innerClass) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`mixing operators without parentheses is not allowed. ${side} is ${inner.binaryKind}, which is different from ${expr.binaryKind}`,
|
`mixing operators without parentheses is not allowed. ${side} is ${inner.binaryKind}, which is different from ${expr.binaryKind}`,
|
||||||
expr.span
|
expr.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -801,7 +801,7 @@ function buildCrate(
|
||||||
packageName: string,
|
packageName: string,
|
||||||
rootItems: Item<Parsed>[],
|
rootItems: Item<Parsed>[],
|
||||||
crateId: number,
|
crateId: number,
|
||||||
rootFile: LoadedFile
|
rootFile: LoadedFile,
|
||||||
): Crate<Built> {
|
): Crate<Built> {
|
||||||
const itemId = new Ids();
|
const itemId = new Ids();
|
||||||
itemId.next(); // crate root ID
|
itemId.next(); // crate root ID
|
||||||
|
|
|
||||||
|
|
@ -47,7 +47,7 @@ function printItem(item: Item<AnyPhase>): string {
|
||||||
id +
|
id +
|
||||||
`global ${item.node.name}: ${printType(item.node.type)} = ${printExpr(
|
`global ${item.node.name}: ${printType(item.node.type)} = ${printExpr(
|
||||||
item.node.init,
|
item.node.init,
|
||||||
0
|
0,
|
||||||
)};`
|
)};`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -64,7 +64,7 @@ function printFunction(func: FunctionDef<AnyPhase>): string {
|
||||||
|
|
||||||
function printTypeDef(type: TypeDef<AnyPhase>): string {
|
function printTypeDef(type: TypeDef<AnyPhase>): string {
|
||||||
const fields = type.fields.map(
|
const fields = type.fields.map(
|
||||||
({ name, type }) => `${ind(1)}${name.name}: ${printType(type)},`
|
({ name, type }) => `${ind(1)}${name.name}: ${printType(type)},`,
|
||||||
);
|
);
|
||||||
|
|
||||||
const fieldPart =
|
const fieldPart =
|
||||||
|
|
@ -80,7 +80,7 @@ function printImportDef(def: ImportDef<AnyPhase>): string {
|
||||||
const ret = def.returnType ? `: ${printType(def.returnType)}` : "";
|
const ret = def.returnType ? `: ${printType(def.returnType)}` : "";
|
||||||
|
|
||||||
return `import ${printStringLiteral(def.module)} ${printStringLiteral(
|
return `import ${printStringLiteral(def.module)} ${printStringLiteral(
|
||||||
def.func
|
def.func,
|
||||||
)}(${args})${ret};`;
|
)}(${args})${ret};`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -98,7 +98,7 @@ function printExpr(expr: Expr<AnyPhase>, indent: number): string {
|
||||||
|
|
||||||
return `let ${expr.name.name}${type} = ${printExpr(
|
return `let ${expr.name.name}${type} = ${printExpr(
|
||||||
expr.rhs,
|
expr.rhs,
|
||||||
indent + 1
|
indent + 1,
|
||||||
)}`;
|
)}`;
|
||||||
}
|
}
|
||||||
case "assign": {
|
case "assign": {
|
||||||
|
|
@ -146,7 +146,7 @@ function printExpr(expr: Expr<AnyPhase>, indent: number): string {
|
||||||
case "binary": {
|
case "binary": {
|
||||||
return `${printExpr(expr.lhs, indent)} ${expr.binaryKind} ${printExpr(
|
return `${printExpr(expr.lhs, indent)} ${expr.binaryKind} ${printExpr(
|
||||||
expr.rhs,
|
expr.rhs,
|
||||||
indent
|
indent,
|
||||||
)}`;
|
)}`;
|
||||||
}
|
}
|
||||||
case "unary": {
|
case "unary": {
|
||||||
|
|
@ -175,7 +175,7 @@ function printExpr(expr: Expr<AnyPhase>, indent: number): string {
|
||||||
: "";
|
: "";
|
||||||
return `if ${printExpr(expr.cond, indent + 1)} then ${printExpr(
|
return `if ${printExpr(expr.cond, indent + 1)} then ${printExpr(
|
||||||
expr.then,
|
expr.then,
|
||||||
indent + 1
|
indent + 1,
|
||||||
)}${elsePart}`;
|
)}${elsePart}`;
|
||||||
}
|
}
|
||||||
case "loop": {
|
case "loop": {
|
||||||
|
|
|
||||||
|
|
@ -35,7 +35,7 @@ function loadCrate(cx: Context, name: string, span: Span): Map<string, ItemId> {
|
||||||
const loadedCrate = cx.gcx.crateLoader(cx.gcx, name, span);
|
const loadedCrate = cx.gcx.crateLoader(cx.gcx, name, span);
|
||||||
|
|
||||||
const contents = new Map(
|
const contents = new Map(
|
||||||
loadedCrate.rootItems.map((item) => [item.node.name, item.id])
|
loadedCrate.rootItems.map((item) => [item.node.name, item.id]),
|
||||||
);
|
);
|
||||||
|
|
||||||
return contents;
|
return contents;
|
||||||
|
|
@ -45,7 +45,7 @@ function resolveModItem(
|
||||||
cx: Context,
|
cx: Context,
|
||||||
mod: ModItem<Built> | ExternItem,
|
mod: ModItem<Built> | ExternItem,
|
||||||
item: Item<Built>,
|
item: Item<Built>,
|
||||||
name: string
|
name: string,
|
||||||
): ItemId | undefined {
|
): ItemId | undefined {
|
||||||
const cachedContents = cx.modContentsCache.get(item.id);
|
const cachedContents = cx.modContentsCache.get(item.id);
|
||||||
if (cachedContents) {
|
if (cachedContents) {
|
||||||
|
|
@ -66,7 +66,7 @@ function resolveModItem(
|
||||||
|
|
||||||
export function resolve(
|
export function resolve(
|
||||||
gcx: GlobalContext,
|
gcx: GlobalContext,
|
||||||
ast: Crate<Built>
|
ast: Crate<Built>,
|
||||||
): Crate<Resolved> {
|
): Crate<Resolved> {
|
||||||
const cx: Context = {
|
const cx: Context = {
|
||||||
ast,
|
ast,
|
||||||
|
|
@ -88,7 +88,7 @@ export function resolve(
|
||||||
function resolveModule(
|
function resolveModule(
|
||||||
cx: Context,
|
cx: Context,
|
||||||
modName: string[],
|
modName: string[],
|
||||||
contents: Item<Built>[]
|
contents: Item<Built>[],
|
||||||
): Item<Resolved>[] {
|
): Item<Resolved>[] {
|
||||||
const items = new Map<string, ItemId>();
|
const items = new Map<string, ItemId>();
|
||||||
|
|
||||||
|
|
@ -97,7 +97,7 @@ function resolveModule(
|
||||||
if (existing !== undefined) {
|
if (existing !== undefined) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`item \`${item.node.name}\` has already been declared`,
|
`item \`${item.node.name}\` has already been declared`,
|
||||||
item.span
|
item.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
items.set(item.node.name, item.id);
|
items.set(item.node.name, item.id);
|
||||||
|
|
@ -109,7 +109,7 @@ function resolveModule(
|
||||||
const popped = scopes.pop();
|
const popped = scopes.pop();
|
||||||
if (popped !== expected) {
|
if (popped !== expected) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Scopes corrupted, wanted to pop ${expected} but popped ${popped}`
|
`Scopes corrupted, wanted to pop ${expected} but popped ${popped}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
@ -224,7 +224,7 @@ function resolveModule(
|
||||||
blockLocals.push([]);
|
blockLocals.push([]);
|
||||||
|
|
||||||
const exprs = expr.exprs.map<Expr<Resolved>>((inner) =>
|
const exprs = expr.exprs.map<Expr<Resolved>>((inner) =>
|
||||||
this.expr(inner)
|
this.expr(inner),
|
||||||
);
|
);
|
||||||
|
|
||||||
scopes.length = prevScopeLength;
|
scopes.length = prevScopeLength;
|
||||||
|
|
@ -271,7 +271,7 @@ function resolveModule(
|
||||||
if (typeof expr.field.value === "number") {
|
if (typeof expr.field.value === "number") {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
"module contents cannot be indexed with a number",
|
"module contents cannot be indexed with a number",
|
||||||
expr.field.span
|
expr.field.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -279,12 +279,12 @@ function resolveModule(
|
||||||
cx,
|
cx,
|
||||||
module.node,
|
module.node,
|
||||||
module,
|
module,
|
||||||
expr.field.value
|
expr.field.value,
|
||||||
);
|
);
|
||||||
if (pathResItem === undefined) {
|
if (pathResItem === undefined) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`module ${module.node.name} has no item ${expr.field.value}`,
|
`module ${module.node.name} has no item ${expr.field.value}`,
|
||||||
expr.field.span
|
expr.field.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ import { TY_INT, TY_STRING, TY_UNIT } from "./ast";
|
||||||
import { Span } from "./error";
|
import { Span } from "./error";
|
||||||
import { InferContext } from "./typeck";
|
import { InferContext } from "./typeck";
|
||||||
|
|
||||||
const SPAN: Span = Span.startOfFile({content: ""});
|
const SPAN: Span = Span.startOfFile({ content: "" });
|
||||||
|
|
||||||
it("should infer types across assignments", () => {
|
it("should infer types across assignments", () => {
|
||||||
const infcx = new InferContext();
|
const infcx = new InferContext();
|
||||||
|
|
|
||||||
104
src/typeck.ts
104
src/typeck.ts
|
|
@ -29,6 +29,7 @@ import {
|
||||||
TyStruct,
|
TyStruct,
|
||||||
Item,
|
Item,
|
||||||
StructLiteralField,
|
StructLiteralField,
|
||||||
|
superFoldExpr,
|
||||||
} from "./ast";
|
} from "./ast";
|
||||||
import { GlobalContext } from "./context";
|
import { GlobalContext } from "./context";
|
||||||
import { CompilerError, Span } from "./error";
|
import { CompilerError, Span } from "./error";
|
||||||
|
|
@ -96,7 +97,7 @@ function typeOfBuiltinValue(name: BuiltinName, span: Span): Ty {
|
||||||
function lowerAstTyBase(
|
function lowerAstTyBase(
|
||||||
type: Type<Resolved>,
|
type: Type<Resolved>,
|
||||||
lowerIdentTy: (ident: IdentWithRes<Resolved>) => Ty,
|
lowerIdentTy: (ident: IdentWithRes<Resolved>) => Ty,
|
||||||
typeOfItem: (itemId: ItemId, cause: Span) => Ty
|
typeOfItem: (itemId: ItemId, cause: Span) => Ty,
|
||||||
): Ty {
|
): Ty {
|
||||||
switch (type.kind) {
|
switch (type.kind) {
|
||||||
case "ident": {
|
case "ident": {
|
||||||
|
|
@ -112,7 +113,7 @@ function lowerAstTyBase(
|
||||||
return {
|
return {
|
||||||
kind: "tuple",
|
kind: "tuple",
|
||||||
elems: type.elems.map((type) =>
|
elems: type.elems.map((type) =>
|
||||||
lowerAstTyBase(type, lowerIdentTy, typeOfItem)
|
lowerAstTyBase(type, lowerIdentTy, typeOfItem),
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
@ -124,7 +125,7 @@ function lowerAstTyBase(
|
||||||
|
|
||||||
export function typeck(
|
export function typeck(
|
||||||
gcx: GlobalContext,
|
gcx: GlobalContext,
|
||||||
ast: Crate<Resolved>
|
ast: Crate<Resolved>,
|
||||||
): Crate<Typecked> {
|
): Crate<Typecked> {
|
||||||
const itemTys = new ComplexMap<ItemId, Ty | null>();
|
const itemTys = new ComplexMap<ItemId, Ty | null>();
|
||||||
|
|
||||||
|
|
@ -141,13 +142,13 @@ export function typeck(
|
||||||
case "mod": {
|
case "mod": {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`module ${item.node.name} cannot be used as a type or value`,
|
`module ${item.node.name} cannot be used as a type or value`,
|
||||||
cause
|
cause,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
case "extern": {
|
case "extern": {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`extern declaration ${item.node.name} cannot be used as a type or value`,
|
`extern declaration ${item.node.name} cannot be used as a type or value`,
|
||||||
cause
|
cause,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -161,7 +162,7 @@ export function typeck(
|
||||||
if (ty === null) {
|
if (ty === null) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`cycle computing type of #G${itemId.toString()}`,
|
`cycle computing type of #G${itemId.toString()}`,
|
||||||
item.span
|
item.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
itemTys.set(itemId, null);
|
itemTys.set(itemId, null);
|
||||||
|
|
@ -199,13 +200,13 @@ export function typeck(
|
||||||
case "mod": {
|
case "mod": {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`module ${item.node.name} cannot be used as a type or value`,
|
`module ${item.node.name} cannot be used as a type or value`,
|
||||||
cause
|
cause,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
case "extern": {
|
case "extern": {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`extern declaration ${item.node.name} cannot be used as a type or value`,
|
`extern declaration ${item.node.name} cannot be used as a type or value`,
|
||||||
cause
|
cause,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
case "global": {
|
case "global": {
|
||||||
|
|
@ -233,7 +234,7 @@ export function typeck(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
typeOfItem
|
typeOfItem,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -274,7 +275,7 @@ export function typeck(
|
||||||
default: {
|
default: {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`import parameters must be I32 or Int`,
|
`import parameters must be I32 or Int`,
|
||||||
item.node.params[i].span
|
item.node.params[i].span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -288,7 +289,7 @@ export function typeck(
|
||||||
default: {
|
default: {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`import return must be I32 or Int`,
|
`import return must be I32 or Int`,
|
||||||
item.node.returnType!.span
|
item.node.returnType!.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -321,7 +322,7 @@ export function typeck(
|
||||||
if (fieldNames.has(name)) {
|
if (fieldNames.has(name)) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`type ${item.node.name} has a duplicate field: ${name.name}`,
|
`type ${item.node.name} has a duplicate field: ${name.name}`,
|
||||||
name.span
|
name.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
fieldNames.add(name);
|
fieldNames.add(name);
|
||||||
|
|
@ -366,7 +367,7 @@ export function typeck(
|
||||||
if (init.kind !== "literal" || init.value.kind !== "int") {
|
if (init.kind !== "literal" || init.value.kind !== "int") {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
"globals must be initialized with an integer literal",
|
"globals must be initialized with an integer literal",
|
||||||
init.span
|
init.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -406,7 +407,7 @@ export function typeck(
|
||||||
if (ty.kind !== "tuple" || ty.elems.length !== 0) {
|
if (ty.kind !== "tuple" || ty.elems.length !== 0) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`\`main\` has an invalid signature. main takes no arguments and returns nothing`,
|
`\`main\` has an invalid signature. main takes no arguments and returns nothing`,
|
||||||
item.span
|
item.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -421,7 +422,7 @@ export function typeck(
|
||||||
if (!main) {
|
if (!main) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`\`main\` function not found`,
|
`\`main\` function not found`,
|
||||||
Span.startOfFile(ast.rootFile)
|
Span.startOfFile(ast.rootFile),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -586,7 +587,7 @@ export class InferContext {
|
||||||
|
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`cannot assign ${printTy(rhs)} to ${printTy(lhs)}`,
|
`cannot assign ${printTy(rhs)} to ${printTy(lhs)}`,
|
||||||
span
|
span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -596,7 +597,7 @@ export function checkBody(
|
||||||
ast: Crate<Resolved>,
|
ast: Crate<Resolved>,
|
||||||
body: Expr<Resolved>,
|
body: Expr<Resolved>,
|
||||||
fnTy: TyFn,
|
fnTy: TyFn,
|
||||||
typeOfItem: (itemId: ItemId, cause: Span) => Ty
|
typeOfItem: (itemId: ItemId, cause: Span) => Ty,
|
||||||
): Expr<Typecked> {
|
): Expr<Typecked> {
|
||||||
const localTys = [...fnTy.params];
|
const localTys = [...fnTy.params];
|
||||||
const loopState: { hasBreak: boolean; loopId: LoopId }[] = [];
|
const loopState: { hasBreak: boolean; loopId: LoopId }[] = [];
|
||||||
|
|
@ -634,13 +635,13 @@ export function checkBody(
|
||||||
return builtinAsTy(res.name, ident.span);
|
return builtinAsTy(res.name, ident.span);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
typeOfItem
|
typeOfItem,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const checker: Folder<Resolved, Typecked> = {
|
const checker: Folder<Resolved, Typecked> = {
|
||||||
...mkDefaultFolder(),
|
...mkDefaultFolder(),
|
||||||
expr(expr) {
|
expr(expr): Expr<Typecked> {
|
||||||
switch (expr.kind) {
|
switch (expr.kind) {
|
||||||
case "empty": {
|
case "empty": {
|
||||||
return { ...expr, ty: TY_UNIT };
|
return { ...expr, ty: TY_UNIT };
|
||||||
|
|
@ -695,7 +696,7 @@ export function checkBody(
|
||||||
case "builtin":
|
case "builtin":
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
"cannot assign to builtins",
|
"cannot assign to builtins",
|
||||||
expr.span
|
expr.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
@ -703,7 +704,7 @@ export function checkBody(
|
||||||
default: {
|
default: {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
"invalid left-hand side of assignment",
|
"invalid left-hand side of assignment",
|
||||||
lhs.span
|
lhs.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -777,13 +778,30 @@ export function checkBody(
|
||||||
return checkUnary(expr, rhs);
|
return checkUnary(expr, rhs);
|
||||||
}
|
}
|
||||||
case "call": {
|
case "call": {
|
||||||
|
if (
|
||||||
|
expr.lhs.kind === "ident" &&
|
||||||
|
expr.lhs.value.res.kind === "builtin" &&
|
||||||
|
expr.lhs.value.res.name === "___transmute"
|
||||||
|
) {
|
||||||
|
const ty = infcx.newVar();
|
||||||
|
const args = expr.args.map((arg) => this.expr(arg));
|
||||||
|
const ret: Expr<Typecked> = {
|
||||||
|
...expr,
|
||||||
|
lhs: { ...expr.lhs, ty: TY_UNIT },
|
||||||
|
args,
|
||||||
|
ty,
|
||||||
|
};
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
const lhs = this.expr(expr.lhs);
|
const lhs = this.expr(expr.lhs);
|
||||||
lhs.ty = infcx.resolveIfPossible(lhs.ty);
|
lhs.ty = infcx.resolveIfPossible(lhs.ty);
|
||||||
const lhsTy = lhs.ty;
|
const lhsTy = lhs.ty;
|
||||||
if (lhsTy.kind !== "fn") {
|
if (lhsTy.kind !== "fn") {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`expression of type ${printTy(lhsTy)} is not callable`,
|
`expression of type ${printTy(lhsTy)} is not callable`,
|
||||||
lhs.span
|
lhs.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -793,7 +811,7 @@ export function checkBody(
|
||||||
if (args.length <= i) {
|
if (args.length <= i) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`missing argument of type ${printTy(param)}`,
|
`missing argument of type ${printTy(param)}`,
|
||||||
expr.span
|
expr.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
const arg = checker.expr(args[i]);
|
const arg = checker.expr(args[i]);
|
||||||
|
|
@ -804,7 +822,7 @@ export function checkBody(
|
||||||
if (args.length > lhsTy.params.length) {
|
if (args.length > lhsTy.params.length) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`too many arguments passed, expected ${lhsTy.params.length}, found ${args.length}`,
|
`too many arguments passed, expected ${lhsTy.params.length}, found ${args.length}`,
|
||||||
expr.span
|
expr.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -827,13 +845,13 @@ export function checkBody(
|
||||||
} else {
|
} else {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`tuple with ${elems.length} elements cannot be indexed with ${field.value}`,
|
`tuple with ${elems.length} elements cannot be indexed with ${field.value}`,
|
||||||
field.span
|
field.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
"tuple fields must be accessed with numbers",
|
"tuple fields must be accessed with numbers",
|
||||||
field.span
|
field.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
@ -841,14 +859,14 @@ export function checkBody(
|
||||||
case "struct": {
|
case "struct": {
|
||||||
if (typeof field.value === "string") {
|
if (typeof field.value === "string") {
|
||||||
const idx = lhs.ty.fields.findIndex(
|
const idx = lhs.ty.fields.findIndex(
|
||||||
([name]) => name === field.value
|
([name]) => name === field.value,
|
||||||
);
|
);
|
||||||
if (idx === -1) {
|
if (idx === -1) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`field \`${field.value}\` does not exist on ${printTy(
|
`field \`${field.value}\` does not exist on ${printTy(
|
||||||
lhs.ty
|
lhs.ty,
|
||||||
)}`,
|
)}`,
|
||||||
field.span
|
field.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -857,7 +875,7 @@ export function checkBody(
|
||||||
} else {
|
} else {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
"struct fields must be accessed with their name",
|
"struct fields must be accessed with their name",
|
||||||
field.span
|
field.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
@ -865,9 +883,9 @@ export function checkBody(
|
||||||
default: {
|
default: {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`cannot access field \`${field.value}\` on type \`${printTy(
|
`cannot access field \`${field.value}\` on type \`${printTy(
|
||||||
lhs.ty
|
lhs.ty,
|
||||||
)}\``,
|
)}\``,
|
||||||
expr.span
|
expr.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -933,7 +951,7 @@ export function checkBody(
|
||||||
}
|
}
|
||||||
case "structLiteral": {
|
case "structLiteral": {
|
||||||
const fields = expr.fields.map<StructLiteralField<Typecked>>(
|
const fields = expr.fields.map<StructLiteralField<Typecked>>(
|
||||||
({ name, expr }) => ({ name, expr: this.expr(expr) })
|
({ name, expr }) => ({ name, expr: this.expr(expr) }),
|
||||||
);
|
);
|
||||||
|
|
||||||
const structTy = typeOf(expr.name.res, expr.name.span);
|
const structTy = typeOf(expr.name.res, expr.name.span);
|
||||||
|
|
@ -941,7 +959,7 @@ export function checkBody(
|
||||||
if (structTy.kind !== "struct") {
|
if (structTy.kind !== "struct") {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`struct literal is only allowed for struct types`,
|
`struct literal is only allowed for struct types`,
|
||||||
expr.span
|
expr.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -949,12 +967,12 @@ export function checkBody(
|
||||||
|
|
||||||
fields.forEach(({ name, expr: field }, i) => {
|
fields.forEach(({ name, expr: field }, i) => {
|
||||||
const fieldIdx = structTy.fields.findIndex(
|
const fieldIdx = structTy.fields.findIndex(
|
||||||
(def) => def[0] === name.name
|
(def) => def[0] === name.name,
|
||||||
);
|
);
|
||||||
if (fieldIdx == -1) {
|
if (fieldIdx == -1) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`field ${name.name} doesn't exist on type ${expr.name.name}`,
|
`field ${name.name} doesn't exist on type ${expr.name.name}`,
|
||||||
name.span
|
name.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
const fieldTy = structTy.fields[fieldIdx];
|
const fieldTy = structTy.fields[fieldIdx];
|
||||||
|
|
@ -972,7 +990,7 @@ export function checkBody(
|
||||||
if (missing.length > 0) {
|
if (missing.length > 0) {
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`missing fields in literal: ${missing.join(", ")}`,
|
`missing fields in literal: ${missing.join(", ")}`,
|
||||||
expr.span
|
expr.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1025,7 +1043,9 @@ export function checkBody(
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return { ...expr, ty };
|
const innerExpr = superFoldExpr(expr, this);
|
||||||
|
|
||||||
|
return { ...innerExpr, ty };
|
||||||
},
|
},
|
||||||
type(type) {
|
type(type) {
|
||||||
return type;
|
return type;
|
||||||
|
|
@ -1043,7 +1063,7 @@ export function checkBody(
|
||||||
function checkBinary(
|
function checkBinary(
|
||||||
expr: Expr<Resolved> & ExprBinary<Resolved>,
|
expr: Expr<Resolved> & ExprBinary<Resolved>,
|
||||||
lhs: Expr<Typecked>,
|
lhs: Expr<Typecked>,
|
||||||
rhs: Expr<Typecked>
|
rhs: Expr<Typecked>,
|
||||||
): Expr<Typecked> {
|
): Expr<Typecked> {
|
||||||
const lhsTy = lhs.ty;
|
const lhsTy = lhs.ty;
|
||||||
const rhsTy = rhs.ty;
|
const rhsTy = rhs.ty;
|
||||||
|
|
@ -1085,13 +1105,13 @@ function checkBinary(
|
||||||
`invalid types for binary operation: ${printTy(lhs.ty)} ${
|
`invalid types for binary operation: ${printTy(lhs.ty)} ${
|
||||||
expr.binaryKind
|
expr.binaryKind
|
||||||
} ${printTy(rhs.ty)}`,
|
} ${printTy(rhs.ty)}`,
|
||||||
expr.span
|
expr.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkUnary(
|
function checkUnary(
|
||||||
expr: Expr<Resolved> & ExprUnary<Resolved>,
|
expr: Expr<Resolved> & ExprUnary<Resolved>,
|
||||||
rhs: Expr<Typecked>
|
rhs: Expr<Typecked>,
|
||||||
): Expr<Typecked> {
|
): Expr<Typecked> {
|
||||||
const rhsTy = rhs.ty;
|
const rhsTy = rhs.ty;
|
||||||
|
|
||||||
|
|
@ -1108,6 +1128,6 @@ function checkUnary(
|
||||||
|
|
||||||
throw new CompilerError(
|
throw new CompilerError(
|
||||||
`invalid types for unary operation: ${expr.unaryKind} ${printTy(rhs.ty)}`,
|
`invalid types for unary operation: ${expr.unaryKind} ${printTy(rhs.ty)}`,
|
||||||
expr.span
|
expr.span,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -56,7 +56,7 @@ class FmtCtx {
|
||||||
this.indentation--;
|
this.indentation--;
|
||||||
if (this.indentation < 0) {
|
if (this.indentation < 0) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
"Cannot dedent from 0 indents, there are more dedents than indents"
|
"Cannot dedent from 0 indents, there are more dedents than indents",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
this.linebreak();
|
this.linebreak();
|
||||||
|
|
@ -92,7 +92,7 @@ class FmtCtx {
|
||||||
|
|
||||||
word(
|
word(
|
||||||
word: string | number | bigint,
|
word: string | number | bigint,
|
||||||
color: (s: string) => string = identity
|
color: (s: string) => string = identity,
|
||||||
) {
|
) {
|
||||||
const last = this.wordsInSexpr.length - 1;
|
const last = this.wordsInSexpr.length - 1;
|
||||||
if (this.wordsInSexpr[last] > 0 && !this.freshLinebreak) {
|
if (this.wordsInSexpr[last] > 0 && !this.freshLinebreak) {
|
||||||
|
|
|
||||||
|
|
@ -34,5 +34,11 @@ function allocateItem(objSize: I32, align: I32): I32 = (
|
||||||
);
|
);
|
||||||
);
|
);
|
||||||
|
|
||||||
|
HEAD_PTR = newHeadPtr;
|
||||||
|
|
||||||
actualObjPtr
|
actualObjPtr
|
||||||
|
);
|
||||||
|
|
||||||
|
function deallocateItem(ptr: I32, objSize: I32) = (
|
||||||
|
std.println("uwu deawwocate :3");
|
||||||
);
|
);
|
||||||
Loading…
Add table
Add a link
Reference in a new issue