mirror of
https://github.com/Noratrieb/riverdelta.git
synced 2026-01-14 08:25:02 +01:00
error recovery!
This commit is contained in:
parent
c0c08488ba
commit
ef04f21100
18 changed files with 799 additions and 366 deletions
|
|
@ -1,2 +1,2 @@
|
|||
/target
|
||||
/ui-tests/target
|
||||
/ui-harness/target
|
||||
53
src/ast.ts
53
src/ast.ts
|
|
@ -1,4 +1,4 @@
|
|||
import { LoadedFile, Span } from "./error";
|
||||
import { ErrorEmitted, LoadedFile, Span, unreachable } from "./error";
|
||||
import { LitIntType } from "./lexer";
|
||||
import { ComplexMap } from "./utils";
|
||||
|
||||
|
|
@ -58,6 +58,7 @@ export type Crate<P extends Phase> = {
|
|||
itemsById: ComplexMap<ItemId, Item<P>>;
|
||||
packageName: string;
|
||||
rootFile: LoadedFile;
|
||||
fatalError: ErrorEmitted | undefined;
|
||||
} & P["typeckResults"];
|
||||
|
||||
export type DepCrate = Crate<Final>;
|
||||
|
|
@ -103,7 +104,8 @@ export type ItemKind<P extends Phase> =
|
|||
| ItemKindImport<P>
|
||||
| ItemKindMod<P>
|
||||
| ItemKindExtern
|
||||
| ItemKindGlobal<P>;
|
||||
| ItemKindGlobal<P>
|
||||
| ItemKindError;
|
||||
|
||||
type ItemVariant<Variant, P extends Phase> = Variant & Item<P>;
|
||||
|
||||
|
|
@ -113,6 +115,7 @@ export type ItemImport<P extends Phase> = ItemVariant<ItemKindImport<P>, P>;
|
|||
export type ItemMod<P extends Phase> = ItemVariant<ItemKindMod<P>, P>;
|
||||
export type ItemExtern<P extends Phase> = ItemVariant<ItemKindExtern, P>;
|
||||
export type ItemGlobal<P extends Phase> = ItemVariant<ItemKindGlobal<P>, P>;
|
||||
export type ItemError<P extends Phase> = ItemVariant<ItemKindError, P>;
|
||||
|
||||
export type Item<P extends Phase> = ItemKind<P> & {
|
||||
span: Span;
|
||||
|
|
@ -178,6 +181,11 @@ export type ItemKindGlobal<P extends Phase> = {
|
|||
ty?: Ty;
|
||||
};
|
||||
|
||||
export type ItemKindError = {
|
||||
kind: "error";
|
||||
err: ErrorEmitted;
|
||||
};
|
||||
|
||||
export type ExprEmpty = { kind: "empty" };
|
||||
|
||||
export type ExprLet<P extends Phase> = {
|
||||
|
|
@ -294,11 +302,16 @@ export type StructLiteralField<P extends Phase> = {
|
|||
fieldIdx?: number;
|
||||
};
|
||||
|
||||
export type TupleLiteral<P extends Phase> = {
|
||||
export type ExprTupleLiteral<P extends Phase> = {
|
||||
kind: "tupleLiteral";
|
||||
fields: Expr<P>[];
|
||||
};
|
||||
|
||||
export type ExprError = {
|
||||
kind: "error";
|
||||
err: ErrorEmitted;
|
||||
};
|
||||
|
||||
export type ExprKind<P extends Phase> =
|
||||
| ExprEmpty
|
||||
| ExprLet<P>
|
||||
|
|
@ -315,7 +328,8 @@ export type ExprKind<P extends Phase> =
|
|||
| ExprLoop<P>
|
||||
| ExprBreak
|
||||
| ExprStructLiteral<P>
|
||||
| TupleLiteral<P>;
|
||||
| ExprTupleLiteral<P>
|
||||
| ExprError;
|
||||
|
||||
export type Expr<P extends Phase> = ExprKind<P> & {
|
||||
span: Span;
|
||||
|
|
@ -382,7 +396,7 @@ const BINARY_KIND_PREC_CLASS = new Map<BinaryKind, number>([
|
|||
export function binaryExprPrecedenceClass(k: BinaryKind): number {
|
||||
const cls = BINARY_KIND_PREC_CLASS.get(k);
|
||||
if (cls === undefined) {
|
||||
throw new Error(`Invalid binary kind: '${k}'`);
|
||||
unreachable(`Invalid binary kind: '${k}'`);
|
||||
}
|
||||
return cls;
|
||||
}
|
||||
|
|
@ -407,7 +421,8 @@ export type TypeKind<P extends Phase> =
|
|||
kind: "rawptr";
|
||||
inner: Type<P>;
|
||||
}
|
||||
| { kind: "never" };
|
||||
| { kind: "never" }
|
||||
| { kind: "error"; err: ErrorEmitted };
|
||||
|
||||
export type Type<P extends Phase> = TypeKind<P> & {
|
||||
span: Span;
|
||||
|
|
@ -437,7 +452,8 @@ export type Resolution =
|
|||
| {
|
||||
kind: "builtin";
|
||||
name: BuiltinName;
|
||||
};
|
||||
}
|
||||
| { kind: "error"; err: ErrorEmitted };
|
||||
|
||||
export const BUILTINS = [
|
||||
"print",
|
||||
|
|
@ -527,6 +543,11 @@ export type TyNever = {
|
|||
kind: "never";
|
||||
};
|
||||
|
||||
export type TyError = {
|
||||
kind: "error";
|
||||
err: ErrorEmitted;
|
||||
};
|
||||
|
||||
export type Ty =
|
||||
| TyString
|
||||
| TyInt
|
||||
|
|
@ -538,7 +559,8 @@ export type Ty =
|
|||
| TyVar
|
||||
| TyStruct
|
||||
| TyRawPtr
|
||||
| TyNever;
|
||||
| TyNever
|
||||
| TyError;
|
||||
|
||||
export function tyIsUnit(ty: Ty): ty is TyUnit {
|
||||
return ty.kind === "tuple" && ty.elems.length === 0;
|
||||
|
|
@ -591,7 +613,7 @@ export function mkDefaultFolder<
|
|||
return newItem;
|
||||
},
|
||||
itemInner(_item) {
|
||||
throw new Error("unimplemented");
|
||||
unreachable("unimplemented");
|
||||
},
|
||||
};
|
||||
(folder.item as any)[ITEM_DEFAULT] = ITEM_DEFAULT;
|
||||
|
|
@ -604,7 +626,7 @@ export function foldAst<From extends Phase, To extends Phase>(
|
|||
folder: Folder<From, To>,
|
||||
): Crate<To> {
|
||||
if ((folder.item as any)[ITEM_DEFAULT] !== ITEM_DEFAULT) {
|
||||
throw new Error("must not override `item` on folders");
|
||||
unreachable("must not override `item` on folders");
|
||||
}
|
||||
|
||||
return {
|
||||
|
|
@ -614,6 +636,7 @@ export function foldAst<From extends Phase, To extends Phase>(
|
|||
typeckResults: "typeckResults" in ast ? ast.typeckResults : undefined,
|
||||
packageName: ast.packageName,
|
||||
rootFile: ast.rootFile,
|
||||
fatalError: ast.fatalError,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
@ -701,6 +724,9 @@ export function superFoldItem<From extends Phase, To extends Phase>(
|
|||
init: folder.expr(item.init),
|
||||
};
|
||||
}
|
||||
case "error": {
|
||||
return { ...item };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -818,6 +844,9 @@ export function superFoldExpr<From extends Phase, To extends Phase>(
|
|||
fields: expr.fields.map(folder.expr.bind(folder)),
|
||||
};
|
||||
}
|
||||
case "error": {
|
||||
return { ...expr };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -857,9 +886,11 @@ export function superFoldType<From extends Phase, To extends Phase>(
|
|||
case "never": {
|
||||
return { ...type, kind: "never" };
|
||||
}
|
||||
case "error":
|
||||
return { ...type };
|
||||
}
|
||||
}
|
||||
|
||||
export function varUnreachable(): never {
|
||||
throw new Error("Type variables must not occur after type checking");
|
||||
unreachable("Type variables must not occur after type checking");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -210,6 +210,8 @@ export function lower(gcx: GlobalContext): wasm.Module {
|
|||
case "extern":
|
||||
case "type":
|
||||
break;
|
||||
case "error":
|
||||
unreachable("codegen should never see errors");
|
||||
default: {
|
||||
const _: never = item;
|
||||
}
|
||||
|
|
@ -233,7 +235,7 @@ export function lower(gcx: GlobalContext): wasm.Module {
|
|||
case "funccall": {
|
||||
const idx = cx.funcIndices.get(rel.res);
|
||||
if (idx === undefined) {
|
||||
throw new Error(
|
||||
unreachable(
|
||||
`no function found for relocation '${JSON.stringify(rel.res)}'`,
|
||||
);
|
||||
}
|
||||
|
|
@ -243,7 +245,7 @@ export function lower(gcx: GlobalContext): wasm.Module {
|
|||
case "globalref": {
|
||||
const idx = cx.globalIndices.get(rel.res);
|
||||
if (idx === undefined) {
|
||||
throw new Error(
|
||||
unreachable(
|
||||
`no global found for relocation '${JSON.stringify(rel.res)}'`,
|
||||
);
|
||||
}
|
||||
|
|
@ -299,11 +301,11 @@ function lowerGlobal(cx: Context, def: ItemGlobal<Typecked>) {
|
|||
valtype = "i64";
|
||||
break;
|
||||
default:
|
||||
throw new Error(`invalid global ty: ${printTy(def.init.ty)}`);
|
||||
unreachable(`invalid global ty: ${printTy(def.init.ty)}`);
|
||||
}
|
||||
|
||||
if (def.init.kind !== "literal" || def.init.value.kind !== "int") {
|
||||
throw new Error(`invalid global init: ${JSON.stringify(def)}`);
|
||||
unreachable(`invalid global init: ${JSON.stringify(def)}`);
|
||||
}
|
||||
|
||||
const init: wasm.Instr = {
|
||||
|
|
@ -464,7 +466,7 @@ function tryLowerLValue(
|
|||
case "item": {
|
||||
const item = fcx.cx.gcx.findItem(res.id);
|
||||
if (item.kind !== "global") {
|
||||
throw new Error("cannot store to non-global item");
|
||||
unreachable("cannot store to non-global item");
|
||||
}
|
||||
|
||||
return {
|
||||
|
|
@ -473,9 +475,11 @@ function tryLowerLValue(
|
|||
};
|
||||
}
|
||||
case "builtin": {
|
||||
throw new Error("cannot store to builtin");
|
||||
unreachable("cannot store to builtin");
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
case "fieldAccess": {
|
||||
// Field access lvalues (or rather, lvalues in general) are made of two important parts:
|
||||
|
|
@ -666,7 +670,7 @@ function lowerExpr(
|
|||
case "print":
|
||||
todo("print function");
|
||||
default: {
|
||||
throw new Error(`${res.name}#B is not a value`);
|
||||
unreachable(`${res.name}#B is not a value`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -758,7 +762,7 @@ function lowerExpr(
|
|||
case "*":
|
||||
case "/":
|
||||
case "%":
|
||||
throw new Error(`Invalid bool binary expr: ${expr.binaryKind}`);
|
||||
unreachable(`Invalid bool binary expr: ${expr.binaryKind}`);
|
||||
}
|
||||
|
||||
instrs.push({ kind });
|
||||
|
|
@ -785,7 +789,7 @@ function lowerExpr(
|
|||
instrs.push({ kind: "i32.const", imm: -1n });
|
||||
instrs.push({ kind: "i32.xor" });
|
||||
} else {
|
||||
throw new Error("invalid type for !");
|
||||
unreachable("invalid type for !");
|
||||
}
|
||||
break;
|
||||
case "-":
|
||||
|
|
@ -801,7 +805,7 @@ function lowerExpr(
|
|||
const { res } = expr.lhs.value;
|
||||
if (res.kind === "builtin") {
|
||||
const assertArgs = (n: number) => {
|
||||
if (expr.args.length !== n) throw new Error("nope");
|
||||
if (expr.args.length !== n) unreachable("nope");
|
||||
};
|
||||
switch (res.name) {
|
||||
case "trap": {
|
||||
|
|
@ -951,7 +955,7 @@ function lowerExpr(
|
|||
});
|
||||
break;
|
||||
default: {
|
||||
throw new Error(
|
||||
unreachable(
|
||||
`unsupported struct content type: ${fieldPart.type}`,
|
||||
);
|
||||
}
|
||||
|
|
@ -961,7 +965,7 @@ function lowerExpr(
|
|||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error("invalid field access lhs");
|
||||
unreachable("invalid field access lhs");
|
||||
}
|
||||
|
||||
break;
|
||||
|
|
@ -1042,7 +1046,7 @@ function lowerExpr(
|
|||
const allocate: wasm.Instr = { kind: "call", func: DUMMY_IDX };
|
||||
const allocateItemId = fcx.cx.knownDefPaths.get(ALLOCATE_ITEM);
|
||||
if (!allocateItemId) {
|
||||
throw new Error("std.rt.allocateItem not found");
|
||||
unreachable("std.rt.allocateItem not found");
|
||||
}
|
||||
fcx.cx.relocations.push({
|
||||
kind: "funccall",
|
||||
|
|
@ -1074,6 +1078,8 @@ function lowerExpr(
|
|||
expr.fields.forEach((field) => lowerExpr(fcx, instrs, field));
|
||||
break;
|
||||
}
|
||||
case "error":
|
||||
unreachable("codegen should never see errors");
|
||||
default: {
|
||||
const _: never = expr;
|
||||
}
|
||||
|
|
@ -1247,6 +1253,8 @@ function argRetAbi(param: Ty): ArgRetAbi {
|
|||
return [];
|
||||
case "var":
|
||||
varUnreachable();
|
||||
case "error":
|
||||
unreachable("codegen should not see errors");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1302,6 +1310,8 @@ function wasmTypeForBody(ty: Ty): wasm.ValType[] {
|
|||
return [];
|
||||
case "var":
|
||||
varUnreachable();
|
||||
case "error":
|
||||
unreachable("codegen should not see errors");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1316,7 +1326,7 @@ function sizeOfValtype(type: wasm.ValType): number {
|
|||
case "v128":
|
||||
case "funcref":
|
||||
case "externref":
|
||||
throw new Error("types not emitted");
|
||||
unreachable("types not emitted");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1490,7 +1500,7 @@ function subRefcount(
|
|||
) {
|
||||
const deallocateItemId = fcx.cx.knownDefPaths.get(DEALLOCATE_ITEM);
|
||||
if (!deallocateItemId) {
|
||||
throw new Error("std.rt.deallocateItem not found");
|
||||
unreachable("std.rt.deallocateItem not found");
|
||||
}
|
||||
|
||||
const layout: wasm.ValType[] = kind === "string" ? ["i32", "i32"] : ["i32"];
|
||||
|
|
@ -1556,7 +1566,7 @@ function subRefcount(
|
|||
}
|
||||
|
||||
function todo(msg: string): never {
|
||||
throw new Error(`TODO: ${msg}`);
|
||||
unreachable(`TODO: ${msg}`);
|
||||
}
|
||||
|
||||
// Make the program runnable using wasi-preview-1
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { Crate, DepCrate, Final, Item, ItemId, Phase } from "./ast";
|
||||
import { Span } from "./error";
|
||||
import { ErrorHandler, Span } from "./error";
|
||||
import { Ids, unwrap } from "./utils";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
|
|
@ -20,6 +20,7 @@ export type CrateLoader = (
|
|||
* dependencies (which also use the same context) do not care about that.
|
||||
*/
|
||||
export class GlobalContext {
|
||||
public error: ErrorHandler = new ErrorHandler();
|
||||
public finalizedCrates: Crate<Final>[] = [];
|
||||
public crateId: Ids = new Ids();
|
||||
|
||||
|
|
@ -38,6 +39,17 @@ export class GlobalContext {
|
|||
allCrates.find((crate) => crate.id === id.crateId),
|
||||
);
|
||||
|
||||
if (crate.fatalError) {
|
||||
return {
|
||||
kind: "error",
|
||||
defPath: [],
|
||||
err: crate.fatalError,
|
||||
id: new ItemId(crate.id, 0),
|
||||
name: "",
|
||||
span: Span.startOfFile(crate.rootFile),
|
||||
};
|
||||
}
|
||||
|
||||
if (id.itemIdx === 0) {
|
||||
const contents: Item<P>[] | Item<Final>[] = crate.rootItems;
|
||||
// Typescript does not seem to be able to understand this here.
|
||||
|
|
|
|||
56
src/error.ts
56
src/error.ts
|
|
@ -1,3 +1,5 @@
|
|||
import chalk from "chalk";
|
||||
|
||||
export type LoadedFile = {
|
||||
path?: string;
|
||||
content: string;
|
||||
|
|
@ -33,34 +35,44 @@ export class Span {
|
|||
public static DUMMY: Span = new Span(0, 0, { content: "" });
|
||||
}
|
||||
|
||||
export class CompilerError extends Error {
|
||||
export type Emitter = (string: string) => void;
|
||||
|
||||
export class ErrorHandler {
|
||||
private errors: CompilerError[] = [];
|
||||
|
||||
constructor(
|
||||
private emitter = (msg: string) => globalThis.console.error(msg),
|
||||
) {}
|
||||
|
||||
public emit(err: CompilerError): ErrorEmitted {
|
||||
renderError(this.emitter, err);
|
||||
this.errors.push(err);
|
||||
return ERROR_EMITTED;
|
||||
}
|
||||
|
||||
public hasErrors(): boolean {
|
||||
return this.errors.length > 0;
|
||||
}
|
||||
}
|
||||
|
||||
const ERROR_EMITTED = Symbol();
|
||||
export type ErrorEmitted = typeof ERROR_EMITTED;
|
||||
|
||||
export class CompilerError {
|
||||
msg: string;
|
||||
span: Span;
|
||||
|
||||
constructor(msg: string, span: Span) {
|
||||
super(msg);
|
||||
this.msg = msg;
|
||||
this.span = span;
|
||||
}
|
||||
}
|
||||
|
||||
export function withErrorPrinter<R>(
|
||||
f: () => R,
|
||||
afterError: (e: CompilerError) => R,
|
||||
): R {
|
||||
try {
|
||||
return f();
|
||||
} catch (e) {
|
||||
if (e instanceof CompilerError) {
|
||||
renderError(e);
|
||||
return afterError(e);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Shadow console.
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const console = {};
|
||||
|
||||
function renderError(e: CompilerError) {
|
||||
function renderError(emitter: Emitter, e: CompilerError) {
|
||||
const { span } = e;
|
||||
const { content } = span.file;
|
||||
|
||||
|
|
@ -76,10 +88,10 @@ function renderError(e: CompilerError) {
|
|||
}
|
||||
const lineIdx = lineSpans.indexOf(line);
|
||||
const lineNo = lineIdx + 1;
|
||||
console.error(`error: ${e.message}`);
|
||||
console.error(` --> ${span.file.path ?? "<unknown>"}:${lineNo}`);
|
||||
emitter(chalk.red(`error: ${e.msg}`));
|
||||
emitter(` --> ${span.file.path ?? "<unknown>"}:${lineNo}`);
|
||||
|
||||
console.error(`${lineNo} | ${spanToSnippet(content, line)}`);
|
||||
emitter(`${lineNo} | ${spanToSnippet(content, line)}`);
|
||||
const startRelLine =
|
||||
span.start === Number.MAX_SAFE_INTEGER ? 0 : span.start - line.start;
|
||||
|
||||
|
|
@ -88,7 +100,7 @@ function renderError(e: CompilerError) {
|
|||
? 1
|
||||
: min(span.end, line.end) - span.start;
|
||||
|
||||
console.error(
|
||||
emitter(
|
||||
`${" ".repeat(String(lineNo).length)} ${" ".repeat(
|
||||
startRelLine,
|
||||
)}${"^".repeat(spanLength)}`,
|
||||
|
|
|
|||
176
src/index.ts
176
src/index.ts
|
|
@ -1,4 +1,4 @@
|
|||
import { LoadedFile, Span, withErrorPrinter } from "./error";
|
||||
import { LoadedFile, Span } from "./error";
|
||||
import { isValidIdent, tokenize } from "./lexer";
|
||||
import { lower as lowerToWasm } from "./codegen";
|
||||
import { ParseState, parse } from "./parser";
|
||||
|
|
@ -16,9 +16,9 @@ const INPUT = `
|
|||
type A = struct { a: Int };
|
||||
|
||||
function main() = (
|
||||
let a = A { a: 0 };
|
||||
rawr(___transmute(a));
|
||||
std.printInt(a.a);
|
||||
let a: Int = "";
|
||||
let b: Int = "";
|
||||
c;
|
||||
);
|
||||
|
||||
function rawr(a: *A) = (
|
||||
|
|
@ -42,91 +42,95 @@ function main() {
|
|||
const gcx = new GlobalContext(opts, loadCrate);
|
||||
const mainCrate = gcx.crateId.next();
|
||||
|
||||
withErrorPrinter(
|
||||
() => {
|
||||
const start = Date.now();
|
||||
const start = Date.now();
|
||||
|
||||
if (packageName !== "std") {
|
||||
gcx.crateLoader(gcx, "std", Span.startOfFile(file));
|
||||
if (packageName !== "std") {
|
||||
gcx.crateLoader(gcx, "std", Span.startOfFile(file));
|
||||
}
|
||||
|
||||
const tokens = tokenize(gcx.error, file);
|
||||
// We treat lexer errors as fatal.
|
||||
if (!tokens.ok) {
|
||||
process.exit(1);
|
||||
}
|
||||
if (debug.has("tokens")) {
|
||||
console.log("-----TOKENS------------");
|
||||
console.log(tokens);
|
||||
}
|
||||
|
||||
const parseState: ParseState = { tokens: tokens.tokens, gcx, file };
|
||||
|
||||
const ast: Crate<Built> = parse(packageName, parseState, mainCrate);
|
||||
if (debug.has("ast")) {
|
||||
console.log("-----AST---------------");
|
||||
|
||||
console.dir(ast.rootItems, { depth: 50 });
|
||||
|
||||
console.log("-----AST pretty--------");
|
||||
const printed = printAst(ast);
|
||||
console.log(printed);
|
||||
}
|
||||
|
||||
if (debug.has("resolved")) {
|
||||
console.log("-----AST resolved------");
|
||||
}
|
||||
const resolved = resolve(gcx, ast);
|
||||
if (debug.has("resolved")) {
|
||||
const resolvedPrinted = printAst(resolved);
|
||||
console.log(resolvedPrinted);
|
||||
}
|
||||
|
||||
if (debug.has("typecked")) {
|
||||
console.log("-----AST typecked------");
|
||||
}
|
||||
const typecked: Crate<Typecked> = typeck(gcx, resolved);
|
||||
if (debug.has("typecked")) {
|
||||
const typeckPrinted = printAst(typecked);
|
||||
console.log(typeckPrinted);
|
||||
}
|
||||
|
||||
if (debug.has("wat")) {
|
||||
console.log("-----wasm--------------");
|
||||
}
|
||||
|
||||
// Codegen should never handle errornous code.
|
||||
if (gcx.error.hasErrors()) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
gcx.finalizedCrates.push(typecked);
|
||||
const wasmModule = lowerToWasm(gcx);
|
||||
const moduleStringColor = writeModuleWatToString(wasmModule, true);
|
||||
const moduleString = writeModuleWatToString(wasmModule);
|
||||
|
||||
if (debug.has("wat")) {
|
||||
console.log(moduleStringColor);
|
||||
}
|
||||
|
||||
if (!opts.noOutput) {
|
||||
fs.writeFileSync("out.wat", moduleString);
|
||||
}
|
||||
|
||||
if (debug.has("wasm-validate")) {
|
||||
console.log("--validate wasm-tools--");
|
||||
|
||||
exec("wasm-tools validate out.wat", (error, stdout, stderr) => {
|
||||
if (error && error.code === 1) {
|
||||
console.log(stderr);
|
||||
} else if (error) {
|
||||
console.error(`failed to spawn wasm-tools: ${error.message}`);
|
||||
} else {
|
||||
if (stderr) {
|
||||
console.log(stderr);
|
||||
}
|
||||
if (stdout) {
|
||||
console.log(stdout);
|
||||
}
|
||||
}
|
||||
|
||||
const tokens = tokenize(file);
|
||||
if (debug.has("tokens")) {
|
||||
console.log("-----TOKENS------------");
|
||||
console.log(tokens);
|
||||
}
|
||||
|
||||
const parseState: ParseState = { tokens, file };
|
||||
|
||||
const ast: Crate<Built> = parse(packageName, parseState, mainCrate);
|
||||
if (debug.has("ast")) {
|
||||
console.log("-----AST---------------");
|
||||
|
||||
console.dir(ast.rootItems, { depth: 50 });
|
||||
|
||||
console.log("-----AST pretty--------");
|
||||
const printed = printAst(ast);
|
||||
console.log(printed);
|
||||
}
|
||||
|
||||
if (debug.has("resolved")) {
|
||||
console.log("-----AST resolved------");
|
||||
}
|
||||
const resolved = resolve(gcx, ast);
|
||||
if (debug.has("resolved")) {
|
||||
const resolvedPrinted = printAst(resolved);
|
||||
console.log(resolvedPrinted);
|
||||
}
|
||||
|
||||
if (debug.has("typecked")) {
|
||||
console.log("-----AST typecked------");
|
||||
}
|
||||
const typecked: Crate<Typecked> = typeck(gcx, resolved);
|
||||
if (debug.has("typecked")) {
|
||||
const typeckPrinted = printAst(typecked);
|
||||
console.log(typeckPrinted);
|
||||
}
|
||||
|
||||
if (debug.has("wat")) {
|
||||
console.log("-----wasm--------------");
|
||||
}
|
||||
|
||||
gcx.finalizedCrates.push(typecked);
|
||||
const wasmModule = lowerToWasm(gcx);
|
||||
const moduleStringColor = writeModuleWatToString(wasmModule, true);
|
||||
const moduleString = writeModuleWatToString(wasmModule);
|
||||
|
||||
if (debug.has("wat")) {
|
||||
console.log(moduleStringColor);
|
||||
}
|
||||
|
||||
if (!opts.noOutput) {
|
||||
fs.writeFileSync("out.wat", moduleString);
|
||||
}
|
||||
|
||||
if (debug.has("wasm-validate")) {
|
||||
console.log("--validate wasm-tools--");
|
||||
|
||||
exec("wasm-tools validate out.wat", (error, stdout, stderr) => {
|
||||
if (error && error.code === 1) {
|
||||
console.log(stderr);
|
||||
} else if (error) {
|
||||
console.error(`failed to spawn wasm-tools: ${error.message}`);
|
||||
} else {
|
||||
if (stderr) {
|
||||
console.log(stderr);
|
||||
}
|
||||
if (stdout) {
|
||||
console.log(stdout);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`finished in ${Date.now() - start}ms`);
|
||||
});
|
||||
}
|
||||
},
|
||||
() => process.exit(1),
|
||||
);
|
||||
console.log(`finished in ${Date.now() - start}ms`);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
|
|
|
|||
|
|
@ -1,17 +1,20 @@
|
|||
import { ErrorHandler, unreachable } from "./error";
|
||||
import { tokenize } from "./lexer";
|
||||
|
||||
it("should tokenize an emtpy function", () => {
|
||||
const input = `function hello() = ;`;
|
||||
|
||||
const tokens = tokenize({ content: input });
|
||||
const tokens = tokenize(new ErrorHandler(), { content: input });
|
||||
if (!tokens.ok) unreachable("lexer error");
|
||||
|
||||
expect(tokens).toMatchSnapshot();
|
||||
expect(tokens.tokens).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it("should tokenize hello world", () => {
|
||||
const input = `print("hello world")`;
|
||||
|
||||
const tokens = tokenize({ content: input });
|
||||
const tokens = tokenize(new ErrorHandler(), { content: input });
|
||||
if (!tokens.ok) unreachable("lexer error");
|
||||
|
||||
expect(tokens).toMatchSnapshot();
|
||||
expect(tokens.tokens).toMatchSnapshot();
|
||||
});
|
||||
|
|
|
|||
51
src/lexer.ts
51
src/lexer.ts
|
|
@ -1,4 +1,10 @@
|
|||
import { CompilerError, LoadedFile, Span } from "./error";
|
||||
import {
|
||||
CompilerError,
|
||||
ErrorEmitted,
|
||||
ErrorHandler,
|
||||
LoadedFile,
|
||||
Span,
|
||||
} from "./error";
|
||||
|
||||
export type DatalessToken =
|
||||
| "function"
|
||||
|
|
@ -86,7 +92,40 @@ const SINGLE_PUNCT: string[] = [
|
|||
"%",
|
||||
];
|
||||
|
||||
export function tokenize(file: LoadedFile): Token[] {
|
||||
class LexerError extends Error {
|
||||
constructor(public inner: CompilerError) {
|
||||
super("lexer error");
|
||||
}
|
||||
}
|
||||
|
||||
export type LexerResult =
|
||||
| {
|
||||
ok: true;
|
||||
tokens: Token[];
|
||||
}
|
||||
| {
|
||||
ok: false;
|
||||
|
||||
err: ErrorEmitted;
|
||||
};
|
||||
|
||||
export function tokenize(handler: ErrorHandler, file: LoadedFile): LexerResult {
|
||||
try {
|
||||
return { ok: true, tokens: tokenizeInner(file) };
|
||||
} catch (e) {
|
||||
if (e instanceof LexerError) {
|
||||
const err: ErrorEmitted = handler.emit(e.inner);
|
||||
return { ok: false, err };
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function tokenizeInner(file: LoadedFile): Token[] {
|
||||
const err = (msg: string, span: Span) =>
|
||||
new LexerError(new CompilerError(msg, span));
|
||||
|
||||
const { content: input } = file;
|
||||
const tokens: Token[] = [];
|
||||
let i = 0;
|
||||
|
|
@ -109,7 +148,7 @@ export function tokenize(file: LoadedFile): Token[] {
|
|||
while (input[i] !== "*" && input[i + 1] !== "/") {
|
||||
i++;
|
||||
if (input[i] === undefined) {
|
||||
throw new CompilerError("unterminated block comment", span);
|
||||
throw err("unterminated block comment", span);
|
||||
}
|
||||
}
|
||||
i++;
|
||||
|
|
@ -205,7 +244,7 @@ export function tokenize(file: LoadedFile): Token[] {
|
|||
result.push("\x19");
|
||||
break;
|
||||
default:
|
||||
throw new CompilerError(
|
||||
throw err(
|
||||
`invalid escape character: ${input[i]}`,
|
||||
new Span(span.end - 1, span.end, file),
|
||||
);
|
||||
|
|
@ -215,7 +254,7 @@ export function tokenize(file: LoadedFile): Token[] {
|
|||
|
||||
result.push(next);
|
||||
if (next === undefined) {
|
||||
throw new CompilerError(`Unterminated string literal`, span);
|
||||
throw err(`Unterminated string literal`, span);
|
||||
}
|
||||
}
|
||||
const value = result.join("");
|
||||
|
|
@ -263,7 +302,7 @@ export function tokenize(file: LoadedFile): Token[] {
|
|||
} else if (isWhitespace(next)) {
|
||||
// ignore
|
||||
} else {
|
||||
throw new CompilerError(`invalid character: \`${next}\``, span);
|
||||
throw err(`invalid character: \`${next}\``, span);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,27 +1,41 @@
|
|||
import { DepCrate } from "./ast";
|
||||
import { CrateId, DepCrate } from "./ast";
|
||||
import { CrateLoader, GlobalContext } from "./context";
|
||||
import { CompilerError, LoadedFile, Span, withErrorPrinter } from "./error";
|
||||
import { CompilerError, ErrorEmitted, LoadedFile, Span } from "./error";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { tokenize } from "./lexer";
|
||||
import { ParseState, parse } from "./parser";
|
||||
import { resolve } from "./resolve";
|
||||
import { typeck } from "./typeck";
|
||||
import { ComplexMap } from "./utils";
|
||||
|
||||
export type LoadResult<T> =
|
||||
| {
|
||||
ok: true;
|
||||
value: T;
|
||||
}
|
||||
| {
|
||||
ok: false;
|
||||
err: CompilerError;
|
||||
};
|
||||
|
||||
export function loadModuleFile(
|
||||
relativeTo: string,
|
||||
moduleName: string,
|
||||
span: Span,
|
||||
): LoadedFile {
|
||||
): LoadResult<LoadedFile> {
|
||||
let searchDir: string;
|
||||
if (relativeTo.endsWith(".mod.nil")) {
|
||||
// x/uwu.mod.nil searches in x/
|
||||
searchDir = path.dirname(relativeTo);
|
||||
} else if (relativeTo.endsWith(".nil")) {
|
||||
throw new CompilerError(
|
||||
`.nil files cannot have submodules. use .mod.nil in a subdirectory`,
|
||||
span,
|
||||
);
|
||||
return {
|
||||
ok: false,
|
||||
err: new CompilerError(
|
||||
`.nil files cannot have submodules. use .mod.nil in a subdirectory`,
|
||||
span,
|
||||
),
|
||||
};
|
||||
} else {
|
||||
searchDir = relativeTo;
|
||||
}
|
||||
|
|
@ -41,13 +55,34 @@ export function loadModuleFile(
|
|||
});
|
||||
|
||||
if (content === undefined || filePath === undefined) {
|
||||
throw new CompilerError(
|
||||
`failed to load ${moduleName}, could not find ${options.join(" or ")}`,
|
||||
span,
|
||||
);
|
||||
return {
|
||||
ok: false,
|
||||
err: new CompilerError(
|
||||
`failed to load ${moduleName}, could not find ${options.join(" or ")}`,
|
||||
span,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
return { content, path: filePath };
|
||||
return { ok: true, value: { content, path: filePath } };
|
||||
}
|
||||
|
||||
function dummyErrorCrate(
|
||||
id: CrateId,
|
||||
packageName: string,
|
||||
emitted: ErrorEmitted,
|
||||
): DepCrate {
|
||||
return {
|
||||
id,
|
||||
packageName,
|
||||
rootItems: [],
|
||||
itemsById: new ComplexMap(),
|
||||
rootFile: { content: "<dummy>" },
|
||||
fatalError: emitted,
|
||||
typeckResults: {
|
||||
main: undefined,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export const loadCrate: CrateLoader = (
|
||||
|
|
@ -65,27 +100,27 @@ export const loadCrate: CrateLoader = (
|
|||
return existing;
|
||||
}
|
||||
|
||||
return withErrorPrinter(
|
||||
(): DepCrate => {
|
||||
const file = loadModuleFile(".", name, span);
|
||||
const crateId = gcx.crateId.next();
|
||||
|
||||
const crateId = gcx.crateId.next();
|
||||
const file = loadModuleFile(".", name, span);
|
||||
if (!file.ok) {
|
||||
return dummyErrorCrate(crateId, name, gcx.error.emit(file.err));
|
||||
}
|
||||
|
||||
const tokens = tokenize(file);
|
||||
const parseState: ParseState = { tokens, file };
|
||||
const ast = parse(name, parseState, crateId);
|
||||
const resolved = resolve(gcx, ast);
|
||||
const tokens = tokenize(gcx.error, file.value);
|
||||
if (!tokens.ok) {
|
||||
return dummyErrorCrate(crateId, name, tokens.err);
|
||||
}
|
||||
const parseState: ParseState = {
|
||||
tokens: tokens.tokens,
|
||||
file: file.value,
|
||||
gcx,
|
||||
};
|
||||
const ast = parse(name, parseState, crateId);
|
||||
const resolved = resolve(gcx, ast);
|
||||
|
||||
const typecked = typeck(gcx, resolved);
|
||||
const typecked = typeck(gcx, resolved);
|
||||
|
||||
gcx.finalizedCrates.push(typecked);
|
||||
return typecked;
|
||||
},
|
||||
() => {
|
||||
throw new CompilerError(
|
||||
`failed to load crate ${name}: crate contains errors`,
|
||||
span,
|
||||
);
|
||||
},
|
||||
);
|
||||
gcx.finalizedCrates.push(typecked);
|
||||
return typecked;
|
||||
};
|
||||
|
|
|
|||
138
src/parser.ts
138
src/parser.ts
|
|
@ -28,7 +28,8 @@ import {
|
|||
StructLiteralField,
|
||||
TypeDefKind,
|
||||
} from "./ast";
|
||||
import { CompilerError, LoadedFile, Span } from "./error";
|
||||
import { GlobalContext } from "./context";
|
||||
import { CompilerError, ErrorEmitted, LoadedFile, Span } from "./error";
|
||||
import {
|
||||
BaseToken,
|
||||
Token,
|
||||
|
|
@ -39,21 +40,48 @@ import {
|
|||
import { loadModuleFile } from "./loader";
|
||||
import { ComplexMap, ComplexSet, Ids } from "./utils";
|
||||
|
||||
export type ParseState = { tokens: Token[]; file: LoadedFile };
|
||||
export type ParseState = {
|
||||
tokens: Token[];
|
||||
file: LoadedFile;
|
||||
gcx: GlobalContext;
|
||||
};
|
||||
type State = ParseState;
|
||||
|
||||
type Parser<T> = (t: State) => [State, T];
|
||||
|
||||
class FatalParseError extends Error {
|
||||
constructor(public inner: ErrorEmitted) {
|
||||
super("fatal parser error");
|
||||
}
|
||||
}
|
||||
|
||||
export function parse(
|
||||
packageName: string,
|
||||
t: State,
|
||||
crateId: number,
|
||||
): Crate<Built> {
|
||||
const [, items] = parseItems(t);
|
||||
let items: Item<Parsed>[];
|
||||
let fatalError: ErrorEmitted | undefined = undefined;
|
||||
try {
|
||||
[, items] = parseItems(t);
|
||||
} catch (e) {
|
||||
if (e instanceof FatalParseError) {
|
||||
items = [];
|
||||
fatalError = e.inner;
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
const ast: Crate<Built> = buildCrate(packageName, items, crateId, t.file);
|
||||
const ast: Crate<Built> = buildCrate(
|
||||
packageName,
|
||||
items,
|
||||
crateId,
|
||||
t.file,
|
||||
fatalError,
|
||||
);
|
||||
|
||||
validateAst(ast);
|
||||
validateAst(ast, t.gcx);
|
||||
|
||||
return ast;
|
||||
}
|
||||
|
|
@ -200,15 +228,32 @@ function parseItem(t: State): [State, Item<Parsed>] {
|
|||
[t] = expectNext(t, ")");
|
||||
} else {
|
||||
if (name.span.file.path === undefined) {
|
||||
throw new CompilerError(
|
||||
`no known source file for statement, cannot load file relative to it`,
|
||||
name.span,
|
||||
t.gcx.error.emit(
|
||||
new CompilerError(
|
||||
`no known source file for statement, cannot load file relative to it`,
|
||||
name.span,
|
||||
),
|
||||
);
|
||||
}
|
||||
const file = loadModuleFile(name.span.file.path, name.ident, name.span);
|
||||
|
||||
const tokens = tokenize(file);
|
||||
[, contents] = parseItems({ file, tokens });
|
||||
contents = [];
|
||||
} else {
|
||||
const file = loadModuleFile(name.span.file.path, name.ident, name.span);
|
||||
|
||||
if (!file.ok) {
|
||||
t.gcx.error.emit(file.err);
|
||||
contents = [];
|
||||
} else {
|
||||
const tokens = tokenize(t.gcx.error, file.value);
|
||||
if (!tokens.ok) {
|
||||
throw new FatalParseError(tokens.err);
|
||||
}
|
||||
[, contents] = parseItems({
|
||||
file: file.value,
|
||||
tokens: tokens.tokens,
|
||||
gcx: t.gcx,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[t] = expectNext(t, ";");
|
||||
|
|
@ -244,7 +289,7 @@ function parseItem(t: State): [State, Item<Parsed>] {
|
|||
};
|
||||
return [t, global];
|
||||
} else {
|
||||
unexpectedToken(tok, "item");
|
||||
unexpectedToken(t, tok, "item");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -413,7 +458,7 @@ function parseExprCall(t: State): [State, Expr<Parsed>] {
|
|||
} else if (access.kind === "lit_int") {
|
||||
value = access.value;
|
||||
} else {
|
||||
unexpectedToken(access, "identifier or integer");
|
||||
unexpectedToken(t, access, "identifier or integer");
|
||||
}
|
||||
|
||||
lhs = {
|
||||
|
|
@ -467,7 +512,7 @@ function parseExprAtom(startT: State): [State, Expr<Parsed>] {
|
|||
|
||||
return [t, { kind: "tupleLiteral", span, fields: [expr, ...rest] }];
|
||||
}
|
||||
unexpectedToken(peek, "`,`, `;` or `)`");
|
||||
unexpectedToken(t, peek, "`,`, `;` or `)`");
|
||||
}
|
||||
|
||||
if (tok.kind === "lit_string") {
|
||||
|
|
@ -657,9 +702,13 @@ function parseType(t: State): [State, Type<Parsed>] {
|
|||
return [t, { kind: "rawptr", inner, span }];
|
||||
}
|
||||
default: {
|
||||
throw new CompilerError(
|
||||
`unexpected token: \`${tok.kind}\`, expected type`,
|
||||
span,
|
||||
throw new FatalParseError(
|
||||
t.gcx.error.emit(
|
||||
new CompilerError(
|
||||
`unexpected token: \`${tok.kind}\`, expected type`,
|
||||
span,
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -688,7 +737,7 @@ function parseCommaSeparatedList<R>(
|
|||
// No comma? Fine, you don't like trailing commas.
|
||||
// But this better be the end.
|
||||
if (peekKind(t) !== terminator) {
|
||||
unexpectedToken(next(t)[1], `, or ${terminator}`);
|
||||
unexpectedToken(t, next(t)[1], `, or ${terminator}`);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
|
@ -720,15 +769,23 @@ function expectNext<T extends BaseToken>(
|
|||
let tok;
|
||||
[t, tok] = maybeNextT(t);
|
||||
if (!tok) {
|
||||
throw new CompilerError(
|
||||
`expected \`${kind}\`, found end of file`,
|
||||
Span.eof(t.file),
|
||||
throw new FatalParseError(
|
||||
t.gcx.error.emit(
|
||||
new CompilerError(
|
||||
`expected \`${kind}\`, found end of file`,
|
||||
Span.eof(t.file),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
if (tok.kind !== kind) {
|
||||
throw new CompilerError(
|
||||
`expected \`${kind}\`, found \`${tok.kind}\``,
|
||||
tok.span,
|
||||
throw new FatalParseError(
|
||||
t.gcx.error.emit(
|
||||
new CompilerError(
|
||||
`expected \`${kind}\`, found \`${tok.kind}\``,
|
||||
tok.span,
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
return [t, tok as unknown as T & Token];
|
||||
|
|
@ -737,7 +794,11 @@ function expectNext<T extends BaseToken>(
|
|||
function next(t: State): [State, Token] {
|
||||
const [rest, next] = maybeNextT(t);
|
||||
if (!next) {
|
||||
throw new CompilerError("unexpected end of file", Span.eof(t.file));
|
||||
throw new FatalParseError(
|
||||
t.gcx.error.emit(
|
||||
new CompilerError("unexpected end of file", Span.eof(t.file)),
|
||||
),
|
||||
);
|
||||
}
|
||||
return [rest, next];
|
||||
}
|
||||
|
|
@ -749,11 +810,15 @@ function maybeNextT(t: State): [State, Token | undefined] {
|
|||
return [{ ...t, tokens: rest }, next];
|
||||
}
|
||||
|
||||
function unexpectedToken(token: Token, expected: string): never {
|
||||
throw new CompilerError(`unexpected token, expected ${expected}`, token.span);
|
||||
function unexpectedToken(t: ParseState, token: Token, expected: string): never {
|
||||
throw new FatalParseError(
|
||||
t.gcx.error.emit(
|
||||
new CompilerError(`unexpected token, expected ${expected}`, token.span),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
function validateAst(ast: Crate<Built>) {
|
||||
function validateAst(ast: Crate<Built>, gcx: GlobalContext) {
|
||||
const seenItemIds = new ComplexSet();
|
||||
|
||||
const validator: Folder<Built, Built> = {
|
||||
|
|
@ -781,7 +846,10 @@ function validateAst(ast: Crate<Built>) {
|
|||
});
|
||||
return expr;
|
||||
} else if (expr.kind === "let") {
|
||||
throw new CompilerError("let is only allowed in blocks", expr.span);
|
||||
gcx.error.emit(
|
||||
new CompilerError("let is only allowed in blocks", expr.span),
|
||||
);
|
||||
return superFoldExpr(expr, this);
|
||||
} else if (expr.kind === "binary") {
|
||||
const checkPrecedence = (inner: Expr<Built>, side: string) => {
|
||||
if (inner.kind === "binary") {
|
||||
|
|
@ -789,9 +857,11 @@ function validateAst(ast: Crate<Built>) {
|
|||
const innerClass = binaryExprPrecedenceClass(inner.binaryKind);
|
||||
|
||||
if (ourClass !== innerClass) {
|
||||
throw new CompilerError(
|
||||
`mixing operators without parentheses is not allowed. ${side} is ${inner.binaryKind}, which is different from ${expr.binaryKind}`,
|
||||
expr.span,
|
||||
gcx.error.emit(
|
||||
new CompilerError(
|
||||
`mixing operators without parentheses is not allowed. ${side} is ${inner.binaryKind}, which is different from ${expr.binaryKind}`,
|
||||
expr.span,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -821,6 +891,7 @@ function buildCrate(
|
|||
rootItems: Item<Parsed>[],
|
||||
crateId: number,
|
||||
rootFile: LoadedFile,
|
||||
fatalError: ErrorEmitted | undefined,
|
||||
): Crate<Built> {
|
||||
const itemId = new Ids();
|
||||
itemId.next(); // crate root ID
|
||||
|
|
@ -832,6 +903,7 @@ function buildCrate(
|
|||
itemsById: new ComplexMap(),
|
||||
packageName,
|
||||
rootFile,
|
||||
fatalError,
|
||||
};
|
||||
|
||||
const assigner: Folder<Parsed, Built> = {
|
||||
|
|
|
|||
|
|
@ -51,6 +51,8 @@ function printItem(item: Item<AnyPhase>): string {
|
|||
)};`
|
||||
);
|
||||
}
|
||||
case "error":
|
||||
return "<ERROR>";
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -203,6 +205,8 @@ function printExpr(expr: Expr<AnyPhase>, indent: number): string {
|
|||
.map((expr) => printExpr(expr, indent))
|
||||
.join(", ")})`;
|
||||
}
|
||||
case "error":
|
||||
return "<ERROR>";
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -218,6 +222,8 @@ function printType(type: Type<AnyPhase>): string {
|
|||
return `*${printType(type.inner)}`;
|
||||
case "never":
|
||||
return "!";
|
||||
case "error":
|
||||
return "<ERROR>";
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -230,6 +236,9 @@ function printRes(res: Resolution): string {
|
|||
case "builtin": {
|
||||
return `#B`;
|
||||
}
|
||||
case "error": {
|
||||
return "#E";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -274,6 +283,8 @@ export function printTy(ty: Ty): string {
|
|||
case "never": {
|
||||
return "!";
|
||||
}
|
||||
case "error":
|
||||
return "<ERROR>";
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ import {
|
|||
ItemExtern,
|
||||
} from "./ast";
|
||||
import { GlobalContext } from "./context";
|
||||
import { CompilerError, Span } from "./error";
|
||||
import { CompilerError, ErrorEmitted, Span } from "./error";
|
||||
import { ComplexMap } from "./utils";
|
||||
|
||||
const BUILTIN_SET = new Set<string>(BUILTINS);
|
||||
|
|
@ -81,6 +81,7 @@ export function resolve(
|
|||
rootItems,
|
||||
packageName: ast.packageName,
|
||||
rootFile: ast.rootFile,
|
||||
fatalError: ast.fatalError,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
@ -94,12 +95,15 @@ function resolveModule(
|
|||
contents.forEach((item) => {
|
||||
const existing = items.get(item.name);
|
||||
if (existing !== undefined) {
|
||||
throw new CompilerError(
|
||||
`item \`${item.name}\` has already been declared`,
|
||||
item.span,
|
||||
cx.gcx.error.emit(
|
||||
new CompilerError(
|
||||
`item \`${item.name}\` has already been declared`,
|
||||
item.span,
|
||||
),
|
||||
);
|
||||
} else {
|
||||
items.set(item.name, item.id);
|
||||
}
|
||||
items.set(item.name, item.id);
|
||||
});
|
||||
|
||||
const scopes: string[] = [];
|
||||
|
|
@ -148,7 +152,12 @@ function resolveModule(
|
|||
return { kind: "builtin", name: ident.name as BuiltinName };
|
||||
}
|
||||
|
||||
throw new CompilerError(`cannot find ${ident.name}`, ident.span);
|
||||
return {
|
||||
kind: "error",
|
||||
err: cx.gcx.error.emit(
|
||||
new CompilerError(`cannot find ${ident.name}`, ident.span),
|
||||
),
|
||||
};
|
||||
};
|
||||
|
||||
const blockLocals: LocalInfo[][] = [];
|
||||
|
|
@ -260,30 +269,39 @@ function resolveModule(
|
|||
const module = cx.gcx.findItem(res.id, cx.ast);
|
||||
|
||||
if (module.kind === "mod" || module.kind === "extern") {
|
||||
let pathRes: Resolution;
|
||||
|
||||
if (typeof expr.field.value === "number") {
|
||||
throw new CompilerError(
|
||||
"module contents cannot be indexed with a number",
|
||||
expr.field.span,
|
||||
const err: ErrorEmitted = cx.gcx.error.emit(
|
||||
new CompilerError(
|
||||
"module contents cannot be indexed with a number",
|
||||
expr.field.span,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
const pathResItem = resolveModItem(
|
||||
cx,
|
||||
module,
|
||||
expr.field.value,
|
||||
);
|
||||
if (pathResItem === undefined) {
|
||||
throw new CompilerError(
|
||||
`module ${module.name} has no item ${expr.field.value}`,
|
||||
expr.field.span,
|
||||
pathRes = { kind: "error", err };
|
||||
} else {
|
||||
const pathResItem = resolveModItem(
|
||||
cx,
|
||||
module,
|
||||
expr.field.value,
|
||||
);
|
||||
}
|
||||
|
||||
const pathRes: Resolution = { kind: "item", id: pathResItem };
|
||||
if (pathResItem === undefined) {
|
||||
const err: ErrorEmitted = cx.gcx.error.emit(
|
||||
new CompilerError(
|
||||
`module ${module.name} has no item ${expr.field.value}`,
|
||||
expr.field.span,
|
||||
),
|
||||
);
|
||||
pathRes = { kind: "error", err };
|
||||
} else {
|
||||
pathRes = { kind: "item", id: pathResItem };
|
||||
}
|
||||
}
|
||||
const span = lhs.span.merge(expr.field.span);
|
||||
return {
|
||||
kind: "path",
|
||||
segments: [...segments, expr.field.value],
|
||||
segments: [...segments, String(expr.field.value)],
|
||||
value: { res: pathRes, span },
|
||||
span,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,11 +1,13 @@
|
|||
import { TY_INT, TY_STRING, TY_UNIT } from "./ast";
|
||||
import { Span } from "./error";
|
||||
import { Emitter, ErrorHandler, Span } from "./error";
|
||||
import { InferContext } from "./typeck";
|
||||
|
||||
const SPAN: Span = Span.startOfFile({ content: "" });
|
||||
|
||||
const dummyEmitter: Emitter = () => {};
|
||||
|
||||
it("should infer types across assignments", () => {
|
||||
const infcx = new InferContext();
|
||||
const infcx = new InferContext(new ErrorHandler(dummyEmitter));
|
||||
|
||||
const a = infcx.newVar();
|
||||
const b = infcx.newVar();
|
||||
|
|
@ -26,7 +28,9 @@ it("should infer types across assignments", () => {
|
|||
});
|
||||
|
||||
it("should conflict assignments to resolvable type vars", () => {
|
||||
const infcx = new InferContext();
|
||||
let errorLines = 0;
|
||||
const emitter = () => (errorLines += 1);
|
||||
const infcx = new InferContext(new ErrorHandler(emitter));
|
||||
|
||||
const a = infcx.newVar();
|
||||
const b = infcx.newVar();
|
||||
|
|
@ -34,11 +38,15 @@ it("should conflict assignments to resolvable type vars", () => {
|
|||
infcx.assign(a, b, SPAN);
|
||||
infcx.assign(b, TY_INT, SPAN);
|
||||
|
||||
expect(() => infcx.assign(a, TY_STRING, SPAN)).toThrow();
|
||||
expect(errorLines).toEqual(0);
|
||||
|
||||
infcx.assign(a, TY_STRING, SPAN);
|
||||
|
||||
expect(errorLines).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("should not cycle", () => {
|
||||
const infcx = new InferContext();
|
||||
const infcx = new InferContext(new ErrorHandler(dummyEmitter));
|
||||
|
||||
const a = infcx.newVar();
|
||||
const b = infcx.newVar();
|
||||
|
|
|
|||
404
src/typeck.ts
404
src/typeck.ts
|
|
@ -32,7 +32,13 @@ import {
|
|||
ExprCall,
|
||||
} from "./ast";
|
||||
import { GlobalContext } from "./context";
|
||||
import { CompilerError, Span, unreachable } from "./error";
|
||||
import {
|
||||
CompilerError,
|
||||
ErrorEmitted,
|
||||
ErrorHandler,
|
||||
Span,
|
||||
unreachable,
|
||||
} from "./error";
|
||||
import { printTy } from "./printer";
|
||||
import { ComplexMap } from "./utils";
|
||||
|
||||
|
|
@ -52,7 +58,22 @@ function mkTyFn(params: Ty[], returnTy: Ty): Ty {
|
|||
return { kind: "fn", params, returnTy };
|
||||
}
|
||||
|
||||
function builtinAsTy(name: string, span: Span): Ty {
|
||||
function tyError(cx: TypeckCtx, err: CompilerError): Ty {
|
||||
return {
|
||||
kind: "error",
|
||||
err: emitError(cx, err),
|
||||
};
|
||||
}
|
||||
|
||||
function tyErrorFrom(prev: { err: ErrorEmitted }): Ty {
|
||||
return { kind: "error", err: prev.err };
|
||||
}
|
||||
|
||||
function emitError(cx: TypeckCtx, err: CompilerError): ErrorEmitted {
|
||||
return cx.gcx.error.emit(err);
|
||||
}
|
||||
|
||||
function builtinAsTy(cx: TypeckCtx, name: string, span: Span): Ty {
|
||||
switch (name) {
|
||||
case "String": {
|
||||
return TY_STRING;
|
||||
|
|
@ -67,12 +88,12 @@ function builtinAsTy(name: string, span: Span): Ty {
|
|||
return TY_BOOL;
|
||||
}
|
||||
default: {
|
||||
throw new CompilerError(`\`${name}\` is not a type`, span);
|
||||
return tyError(cx, new CompilerError(`\`${name}\` is not a type`, span));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function typeOfBuiltinValue(name: BuiltinName, span: Span): Ty {
|
||||
function typeOfBuiltinValue(cx: TypeckCtx, name: BuiltinName, span: Span): Ty {
|
||||
switch (name) {
|
||||
case "false":
|
||||
case "true":
|
||||
|
|
@ -96,7 +117,10 @@ function typeOfBuiltinValue(name: BuiltinName, span: Span): Ty {
|
|||
case "__i32_extend_to_i64_u":
|
||||
return mkTyFn([TY_I32], TY_INT);
|
||||
default: {
|
||||
throw new CompilerError(`\`${name}\` cannot be used as a value`, span);
|
||||
return tyError(
|
||||
cx,
|
||||
new CompilerError(`\`${name}\` cannot be used as a value`, span),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -115,7 +139,10 @@ function lowerAstTy(cx: TypeckCtx, type: Type<Resolved>): Ty {
|
|||
return typeOfItem(cx, res.id, type.span);
|
||||
}
|
||||
case "builtin": {
|
||||
return builtinAsTy(res.name, ident.span);
|
||||
return builtinAsTy(cx, res.name, ident.span);
|
||||
}
|
||||
case "error": {
|
||||
return tyErrorFrom(res);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -134,9 +161,9 @@ function lowerAstTy(cx: TypeckCtx, type: Type<Resolved>): Ty {
|
|||
case "rawptr": {
|
||||
const inner = lowerAstTy(cx, type.inner);
|
||||
if (inner.kind !== "struct") {
|
||||
throw new CompilerError(
|
||||
"raw pointers must point to structs",
|
||||
type.span,
|
||||
return tyError(
|
||||
cx,
|
||||
new CompilerError("raw pointers must point to structs", type.span),
|
||||
);
|
||||
}
|
||||
|
||||
|
|
@ -145,6 +172,9 @@ function lowerAstTy(cx: TypeckCtx, type: Type<Resolved>): Ty {
|
|||
case "never": {
|
||||
return TY_NEVER;
|
||||
}
|
||||
case "error": {
|
||||
return tyErrorFrom(type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -161,15 +191,21 @@ function typeOfItem(cx: TypeckCtx, itemId: ItemId, cause: Span): Ty {
|
|||
case "global":
|
||||
return item.ty!;
|
||||
case "mod": {
|
||||
throw new CompilerError(
|
||||
`module ${item.name} cannot be used as a type or value`,
|
||||
cause,
|
||||
return tyError(
|
||||
cx,
|
||||
new CompilerError(
|
||||
`module ${item.name} cannot be used as a type or value`,
|
||||
cause,
|
||||
),
|
||||
);
|
||||
}
|
||||
case "extern": {
|
||||
throw new CompilerError(
|
||||
`extern declaration ${item.name} cannot be used as a type or value`,
|
||||
cause,
|
||||
return tyError(
|
||||
cx,
|
||||
new CompilerError(
|
||||
`extern declaration ${item.name} cannot be used as a type or value`,
|
||||
cause,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -181,9 +217,12 @@ function typeOfItem(cx: TypeckCtx, itemId: ItemId, cause: Span): Ty {
|
|||
return cachedTy;
|
||||
}
|
||||
if (cachedTy === null) {
|
||||
throw new CompilerError(
|
||||
`cycle computing type of #G${itemId.toString()}`,
|
||||
item.span,
|
||||
return tyError(
|
||||
cx,
|
||||
new CompilerError(
|
||||
`cycle computing type of #G${itemId.toString()}`,
|
||||
item.span,
|
||||
),
|
||||
);
|
||||
}
|
||||
cx.itemTys.set(itemId, null);
|
||||
|
|
@ -230,21 +269,30 @@ function typeOfItem(cx: TypeckCtx, itemId: ItemId, cause: Span): Ty {
|
|||
break;
|
||||
}
|
||||
case "mod": {
|
||||
throw new CompilerError(
|
||||
`module ${item.name} cannot be used as a type or value`,
|
||||
cause,
|
||||
return tyError(
|
||||
cx,
|
||||
new CompilerError(
|
||||
`module ${item.name} cannot be used as a type or value`,
|
||||
cause,
|
||||
),
|
||||
);
|
||||
}
|
||||
case "extern": {
|
||||
throw new CompilerError(
|
||||
`extern declaration ${item.name} cannot be used as a type or value`,
|
||||
cause,
|
||||
return tyError(
|
||||
cx,
|
||||
new CompilerError(
|
||||
`extern declaration ${item.name} cannot be used as a type or value`,
|
||||
cause,
|
||||
),
|
||||
);
|
||||
}
|
||||
case "global": {
|
||||
ty = lowerAstTy(cx, item.type);
|
||||
break;
|
||||
}
|
||||
case "error": {
|
||||
return tyErrorFrom(item);
|
||||
}
|
||||
}
|
||||
|
||||
cx.itemTys.set(item.id, ty);
|
||||
|
|
@ -286,9 +334,12 @@ export function typeck(
|
|||
case "i32":
|
||||
break;
|
||||
default: {
|
||||
throw new CompilerError(
|
||||
`import parameters must be I32 or Int`,
|
||||
item.params[i].span,
|
||||
emitError(
|
||||
cx,
|
||||
new CompilerError(
|
||||
`import parameters must be I32 or Int`,
|
||||
item.params[i].span,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -300,9 +351,12 @@ export function typeck(
|
|||
case "i32":
|
||||
break;
|
||||
default: {
|
||||
throw new CompilerError(
|
||||
`import return must be I32, Int or ()`,
|
||||
item.returnType!.span,
|
||||
emitError(
|
||||
cx,
|
||||
new CompilerError(
|
||||
`import return must be I32, Int or ()`,
|
||||
item.returnType!.span,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -323,12 +377,16 @@ export function typeck(
|
|||
const fieldNames = new Set();
|
||||
item.type.fields.forEach(({ name }) => {
|
||||
if (fieldNames.has(name)) {
|
||||
throw new CompilerError(
|
||||
`type ${item.name} has a duplicate field: ${name.name}`,
|
||||
name.span,
|
||||
emitError(
|
||||
cx,
|
||||
new CompilerError(
|
||||
`type ${item.name} has a duplicate field: ${name.name}`,
|
||||
name.span,
|
||||
),
|
||||
);
|
||||
} else {
|
||||
fieldNames.add(name);
|
||||
}
|
||||
fieldNames.add(name);
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
@ -363,23 +421,32 @@ export function typeck(
|
|||
const ty = typeOfItem(cx, item.id, item.span);
|
||||
const { init } = item;
|
||||
|
||||
let initChecked: Expr<Typecked>;
|
||||
if (init.kind !== "literal" || init.value.kind !== "int") {
|
||||
throw new CompilerError(
|
||||
"globals must be initialized with an integer literal",
|
||||
init.span,
|
||||
const err: ErrorEmitted = emitError(
|
||||
cx,
|
||||
new CompilerError(
|
||||
"globals must be initialized with an integer literal",
|
||||
init.span,
|
||||
),
|
||||
);
|
||||
initChecked = exprError(err, init.span);
|
||||
} else {
|
||||
const initTy = init.value.type === "I32" ? TY_I32 : TY_INT;
|
||||
const infcx = new InferContext(cx.gcx.error);
|
||||
infcx.assign(ty, initTy, init.span);
|
||||
initChecked = { ...init, ty };
|
||||
}
|
||||
|
||||
const initTy = init.value.type === "I32" ? TY_I32 : TY_INT;
|
||||
const infcx = new InferContext();
|
||||
infcx.assign(ty, initTy, init.span);
|
||||
|
||||
return {
|
||||
...item,
|
||||
ty,
|
||||
init: { ...init, ty },
|
||||
init: initChecked,
|
||||
};
|
||||
}
|
||||
case "error": {
|
||||
return { ...item };
|
||||
}
|
||||
}
|
||||
},
|
||||
expr(_expr) {
|
||||
|
|
@ -398,9 +465,12 @@ export function typeck(
|
|||
const main = typecked.rootItems.find((item) => {
|
||||
if (item.kind === "function" && item.name === "main") {
|
||||
if (!tyIsUnit(item.ty!.returnTy)) {
|
||||
throw new CompilerError(
|
||||
`\`main\` has an invalid signature. main takes no arguments and returns nothing`,
|
||||
item.span,
|
||||
emitError(
|
||||
cx,
|
||||
new CompilerError(
|
||||
`\`main\` has an invalid signature. main takes no arguments and returns nothing`,
|
||||
item.span,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
|
|
@ -412,15 +482,19 @@ export function typeck(
|
|||
if (ast.id === 0) {
|
||||
// Only the final id=0 crate needs and cares about main.
|
||||
if (!main) {
|
||||
throw new CompilerError(
|
||||
`\`main\` function not found`,
|
||||
Span.startOfFile(ast.rootFile),
|
||||
emitError(
|
||||
cx,
|
||||
new CompilerError(
|
||||
`\`main\` function not found`,
|
||||
Span.startOfFile(ast.rootFile),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
typecked.typeckResults = {
|
||||
main: { kind: "item", id: main.id },
|
||||
};
|
||||
typecked.typeckResults = { main: undefined };
|
||||
if (main) {
|
||||
typecked.typeckResults.main = { kind: "item", id: main.id };
|
||||
}
|
||||
}
|
||||
|
||||
return typecked;
|
||||
|
|
@ -442,6 +516,8 @@ type TyVarRes =
|
|||
export class InferContext {
|
||||
tyVars: TyVarRes[] = [];
|
||||
|
||||
constructor(public error: ErrorHandler) {}
|
||||
|
||||
public newVar(): Ty {
|
||||
const index = this.tyVars.length;
|
||||
this.tyVars.push({ kind: "unknown" });
|
||||
|
|
@ -524,7 +600,13 @@ export class InferContext {
|
|||
return;
|
||||
}
|
||||
|
||||
if (lhs.kind === "error" || rhs.kind === "error") {
|
||||
// This Is Fine 🐶🔥.
|
||||
return;
|
||||
}
|
||||
|
||||
if (rhs.kind === "never") {
|
||||
// not sure whether this is entirely correct wrt inference.. it will work out, probably.
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -585,9 +667,11 @@ export class InferContext {
|
|||
}
|
||||
}
|
||||
|
||||
throw new CompilerError(
|
||||
`cannot assign ${printTy(rhs)} to ${printTy(lhs)}`,
|
||||
span,
|
||||
this.error.emit(
|
||||
new CompilerError(
|
||||
`cannot assign ${printTy(rhs)} to ${printTy(lhs)}`,
|
||||
span,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -612,17 +696,28 @@ function typeOfValue(fcx: FuncCtx, res: Resolution, span: Span): Ty {
|
|||
return typeOfItem(fcx.cx, res.id, span);
|
||||
}
|
||||
case "builtin":
|
||||
return typeOfBuiltinValue(res.name, span);
|
||||
return typeOfBuiltinValue(fcx.cx, res.name, span);
|
||||
case "error":
|
||||
return tyErrorFrom(res);
|
||||
}
|
||||
}
|
||||
|
||||
function exprError(err: ErrorEmitted, span: Span): Expr<Typecked> {
|
||||
return {
|
||||
kind: "error",
|
||||
err,
|
||||
span,
|
||||
ty: tyErrorFrom({ err }),
|
||||
};
|
||||
}
|
||||
|
||||
export function checkBody(
|
||||
cx: TypeckCtx,
|
||||
ast: Crate<Resolved>,
|
||||
body: Expr<Resolved>,
|
||||
fnTy: TyFn,
|
||||
): Expr<Typecked> {
|
||||
const infcx = new InferContext();
|
||||
const infcx = new InferContext(cx.gcx.error);
|
||||
|
||||
const fcx: FuncCtx = {
|
||||
cx,
|
||||
|
|
@ -683,26 +778,32 @@ export function checkBody(
|
|||
case "item": {
|
||||
const item = cx.gcx.findItem(res.id, ast);
|
||||
if (item.kind !== "global") {
|
||||
throw new CompilerError("cannot assign to item", expr.span);
|
||||
emitError(
|
||||
fcx.cx,
|
||||
new CompilerError("cannot assign to item", expr.span),
|
||||
);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "builtin":
|
||||
throw new CompilerError(
|
||||
"cannot assign to builtins",
|
||||
expr.span,
|
||||
emitError(
|
||||
fcx.cx,
|
||||
new CompilerError("cannot assign to builtins", expr.span),
|
||||
);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "fieldAccess": {
|
||||
checkLValue(lhs);
|
||||
checkLValue(cx, lhs);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
throw new CompilerError(
|
||||
"invalid left-hand side of assignment",
|
||||
lhs.span,
|
||||
emitError(
|
||||
fcx.cx,
|
||||
new CompilerError(
|
||||
"invalid left-hand side of assignment",
|
||||
lhs.span,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -764,7 +865,7 @@ export function checkBody(
|
|||
case "unary": {
|
||||
const rhs = this.expr(expr.rhs);
|
||||
rhs.ty = infcx.resolveIfPossible(rhs.ty);
|
||||
return checkUnary(expr, rhs);
|
||||
return checkUnary(fcx, expr, rhs);
|
||||
}
|
||||
case "call": {
|
||||
return checkCall(fcx, expr);
|
||||
|
|
@ -775,7 +876,7 @@ export function checkBody(
|
|||
|
||||
const { field } = expr;
|
||||
let ty: Ty;
|
||||
let fieldIdx: number;
|
||||
let fieldIdx: number | undefined;
|
||||
switch (lhs.ty.kind) {
|
||||
case "tuple": {
|
||||
const { elems } = lhs.ty;
|
||||
|
|
@ -784,15 +885,21 @@ export function checkBody(
|
|||
ty = elems[field.value];
|
||||
fieldIdx = field.value;
|
||||
} else {
|
||||
throw new CompilerError(
|
||||
`tuple with ${elems.length} elements cannot be indexed with ${field.value}`,
|
||||
field.span,
|
||||
ty = tyError(
|
||||
fcx.cx,
|
||||
new CompilerError(
|
||||
`tuple with ${elems.length} elements cannot be indexed with ${field.value}`,
|
||||
field.span,
|
||||
),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
throw new CompilerError(
|
||||
"tuple fields must be accessed with numbers",
|
||||
field.span,
|
||||
ty = tyError(
|
||||
fcx.cx,
|
||||
new CompilerError(
|
||||
"tuple fields must be accessed with numbers",
|
||||
field.span,
|
||||
),
|
||||
);
|
||||
}
|
||||
break;
|
||||
|
|
@ -804,30 +911,40 @@ export function checkBody(
|
|||
if (typeof field.value === "string") {
|
||||
const idx = fields.findIndex(([name]) => name === field.value);
|
||||
if (idx === -1) {
|
||||
throw new CompilerError(
|
||||
`field \`${field.value}\` does not exist on ${printTy(
|
||||
lhs.ty,
|
||||
)}`,
|
||||
field.span,
|
||||
ty = tyError(
|
||||
fcx.cx,
|
||||
new CompilerError(
|
||||
`field \`${field.value}\` does not exist on ${printTy(
|
||||
lhs.ty,
|
||||
)}`,
|
||||
field.span,
|
||||
),
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
ty = fields[idx][1];
|
||||
fieldIdx = idx;
|
||||
} else {
|
||||
throw new CompilerError(
|
||||
"struct fields must be accessed with their name",
|
||||
field.span,
|
||||
ty = tyError(
|
||||
fcx.cx,
|
||||
new CompilerError(
|
||||
"struct fields must be accessed with their name",
|
||||
field.span,
|
||||
),
|
||||
);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
throw new CompilerError(
|
||||
`cannot access field \`${field.value}\` on type \`${printTy(
|
||||
lhs.ty,
|
||||
)}\``,
|
||||
expr.span,
|
||||
ty = tyError(
|
||||
fcx.cx,
|
||||
new CompilerError(
|
||||
`cannot access field \`${field.value}\` on type \`${printTy(
|
||||
lhs.ty,
|
||||
)}\``,
|
||||
expr.span,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -881,7 +998,11 @@ export function checkBody(
|
|||
case "break": {
|
||||
const loopStateLength = fcx.loopState.length;
|
||||
if (loopStateLength === 0) {
|
||||
throw new CompilerError("break outside loop", expr.span);
|
||||
const err: ErrorEmitted = emitError(
|
||||
fcx.cx,
|
||||
new CompilerError("break outside loop", expr.span),
|
||||
);
|
||||
return exprError(err, expr.span);
|
||||
}
|
||||
const target = fcx.loopState[loopStateLength - 1].loopId;
|
||||
fcx.loopState[loopStateLength - 1].hasBreak = true;
|
||||
|
|
@ -900,10 +1021,14 @@ export function checkBody(
|
|||
const structTy = typeOfValue(fcx, expr.name.res, expr.name.span);
|
||||
|
||||
if (structTy.kind !== "struct") {
|
||||
throw new CompilerError(
|
||||
`struct literal is only allowed for struct types`,
|
||||
expr.span,
|
||||
const err: ErrorEmitted = emitError(
|
||||
fcx.cx,
|
||||
new CompilerError(
|
||||
`struct literal is only allowed for struct types`,
|
||||
expr.span,
|
||||
),
|
||||
);
|
||||
return exprError(err, expr.span);
|
||||
}
|
||||
|
||||
const assignedFields = new Set();
|
||||
|
|
@ -913,9 +1038,12 @@ export function checkBody(
|
|||
(def) => def[0] === name.name,
|
||||
);
|
||||
if (fieldIdx == -1) {
|
||||
throw new CompilerError(
|
||||
`field ${name.name} doesn't exist on type ${expr.name.name}`,
|
||||
name.span,
|
||||
emitError(
|
||||
fcx.cx,
|
||||
new CompilerError(
|
||||
`field ${name.name} doesn't exist on type ${expr.name.name}`,
|
||||
name.span,
|
||||
),
|
||||
);
|
||||
}
|
||||
const fieldTy = structTy.fields[fieldIdx];
|
||||
|
|
@ -931,9 +1059,12 @@ export function checkBody(
|
|||
}
|
||||
});
|
||||
if (missing.length > 0) {
|
||||
throw new CompilerError(
|
||||
`missing fields in literal: ${missing.join(", ")}`,
|
||||
expr.span,
|
||||
emitError(
|
||||
fcx.cx,
|
||||
new CompilerError(
|
||||
`missing fields in literal: ${missing.join(", ")}`,
|
||||
expr.span,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
|
|
@ -949,6 +1080,9 @@ export function checkBody(
|
|||
|
||||
return { ...expr, fields, ty };
|
||||
}
|
||||
case "error": {
|
||||
return { ...expr, ty: tyErrorFrom(expr) };
|
||||
}
|
||||
}
|
||||
},
|
||||
itemInner(_item) {
|
||||
|
|
@ -968,23 +1102,23 @@ export function checkBody(
|
|||
|
||||
infcx.assign(fnTy.returnTy, checked.ty, body.span);
|
||||
|
||||
const resolved = resolveBody(infcx, checked);
|
||||
const resolved = resolveBody(fcx, checked);
|
||||
|
||||
return resolved;
|
||||
}
|
||||
|
||||
function checkLValue(expr: Expr<Typecked>) {
|
||||
function checkLValue(cx: TypeckCtx, expr: Expr<Typecked>) {
|
||||
switch (expr.kind) {
|
||||
case "ident":
|
||||
case "path":
|
||||
break;
|
||||
case "fieldAccess":
|
||||
checkLValue(expr.lhs);
|
||||
checkLValue(cx, expr.lhs);
|
||||
break;
|
||||
default:
|
||||
throw new CompilerError(
|
||||
"invalid left-hand side of assignment",
|
||||
expr.span,
|
||||
emitError(
|
||||
cx,
|
||||
new CompilerError("invalid left-hand side of assignment", expr.span),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -1035,15 +1169,20 @@ function checkBinary(
|
|||
}
|
||||
}
|
||||
|
||||
throw new CompilerError(
|
||||
`invalid types for binary operation: ${printTy(lhs.ty)} ${
|
||||
expr.binaryKind
|
||||
} ${printTy(rhs.ty)}`,
|
||||
expr.span,
|
||||
const ty = tyError(
|
||||
fcx.cx,
|
||||
new CompilerError(
|
||||
`invalid types for binary operation: ${printTy(lhs.ty)} ${
|
||||
expr.binaryKind
|
||||
} ${printTy(rhs.ty)}`,
|
||||
expr.span,
|
||||
),
|
||||
);
|
||||
return { ...expr, lhs, rhs, ty };
|
||||
}
|
||||
|
||||
function checkUnary(
|
||||
fcx: FuncCtx,
|
||||
expr: Expr<Resolved> & ExprUnary<Resolved>,
|
||||
rhs: Expr<Typecked>,
|
||||
): Expr<Typecked> {
|
||||
|
|
@ -1060,10 +1199,14 @@ function checkUnary(
|
|||
// Negating an unsigned integer is a bad idea.
|
||||
}
|
||||
|
||||
throw new CompilerError(
|
||||
`invalid types for unary operation: ${expr.unaryKind} ${printTy(rhs.ty)}`,
|
||||
expr.span,
|
||||
const ty = tyError(
|
||||
fcx.cx,
|
||||
new CompilerError(
|
||||
`invalid types for unary operation: ${expr.unaryKind} ${printTy(rhs.ty)}`,
|
||||
expr.span,
|
||||
),
|
||||
);
|
||||
return { ...expr, rhs, ty };
|
||||
}
|
||||
|
||||
function checkCall(
|
||||
|
|
@ -1089,21 +1232,30 @@ function checkCall(
|
|||
|
||||
const lhs = fcx.checkExpr(expr.lhs);
|
||||
lhs.ty = fcx.infcx.resolveIfPossible(lhs.ty);
|
||||
|
||||
// check args before checking the lhs.
|
||||
const args = expr.args.map((arg) => fcx.checkExpr(arg));
|
||||
|
||||
const lhsTy = lhs.ty;
|
||||
if (lhsTy.kind !== "fn") {
|
||||
throw new CompilerError(
|
||||
`expression of type ${printTy(lhsTy)} is not callable`,
|
||||
lhs.span,
|
||||
const ty = tyError(
|
||||
fcx.cx,
|
||||
new CompilerError(
|
||||
`expression of type ${printTy(lhsTy)} is not callable`,
|
||||
lhs.span,
|
||||
),
|
||||
);
|
||||
return { ...expr, lhs, args, ty };
|
||||
}
|
||||
|
||||
const args = expr.args.map((arg) => fcx.checkExpr(arg));
|
||||
|
||||
lhsTy.params.forEach((param, i) => {
|
||||
if (args.length <= i) {
|
||||
throw new CompilerError(
|
||||
`missing argument of type ${printTy(param)}`,
|
||||
expr.span,
|
||||
emitError(
|
||||
fcx.cx,
|
||||
new CompilerError(
|
||||
`missing argument of type ${printTy(param)}`,
|
||||
expr.span,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
|
|
@ -1111,24 +1263,24 @@ function checkCall(
|
|||
});
|
||||
|
||||
if (args.length > lhsTy.params.length) {
|
||||
throw new CompilerError(
|
||||
`too many arguments passed, expected ${lhsTy.params.length}, found ${args.length}`,
|
||||
expr.span,
|
||||
emitError(
|
||||
fcx.cx,
|
||||
new CompilerError(
|
||||
`too many arguments passed, expected ${lhsTy.params.length}, found ${args.length}`,
|
||||
expr.span,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
return { ...expr, lhs, args, ty: lhsTy.returnTy };
|
||||
}
|
||||
|
||||
function resolveBody(
|
||||
infcx: InferContext,
|
||||
checked: Expr<Typecked>,
|
||||
): Expr<Typecked> {
|
||||
function resolveBody(fcx: FuncCtx, checked: Expr<Typecked>): Expr<Typecked> {
|
||||
const resolveTy = (ty: Ty, span: Span) => {
|
||||
const resTy = infcx.resolveIfPossible(ty);
|
||||
const resTy = fcx.infcx.resolveIfPossible(ty);
|
||||
// TODO: When doing deep resolution, we need to check for _any_ vars.
|
||||
if (resTy.kind === "var") {
|
||||
throw new CompilerError("cannot infer type", span);
|
||||
return tyError(fcx.cx, new CompilerError("cannot infer type", span));
|
||||
}
|
||||
return resTy;
|
||||
};
|
||||
|
|
|
|||
7
test.nil
7
test.nil
|
|
@ -1,6 +1,11 @@
|
|||
type A = struct { a: Int };
|
||||
|
||||
function main() = (
|
||||
let a = A { a: 0 };
|
||||
let a: Int = "";
|
||||
let b: Int = "";
|
||||
c;
|
||||
);
|
||||
|
||||
function rawr(a: *A) = (
|
||||
a.a = 1;
|
||||
);
|
||||
5
ui-tests/basic_recovery.nil
Normal file
5
ui-tests/basic_recovery.nil
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
function main() = (
|
||||
let a: Int = "";
|
||||
let b: Int = "";
|
||||
c;
|
||||
);
|
||||
12
ui-tests/basic_recovery.stderr
Normal file
12
ui-tests/basic_recovery.stderr
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
error: cannot find c
|
||||
--> $DIR/basic_recovery.nil:4
|
||||
4 | c;
|
||||
^
|
||||
error: cannot assign String to Int
|
||||
--> $DIR/basic_recovery.nil:2
|
||||
2 | let a: Int = "";
|
||||
^
|
||||
error: cannot assign String to Int
|
||||
--> $DIR/basic_recovery.nil:3
|
||||
3 | let b: Int = "";
|
||||
^
|
||||
|
|
@ -2,3 +2,7 @@ error: unexpected end of file
|
|||
--> $DIR/mismatched_parens.nil:2
|
||||
2 |
|
||||
^
|
||||
error: `main` function not found
|
||||
--> $DIR/mismatched_parens.nil:1
|
||||
1 | function main() = (
|
||||
^
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue