refactorings

This commit is contained in:
nora 2023-08-02 19:18:52 +02:00
parent 7eeaf548d0
commit 2f1f4a9798
12 changed files with 85 additions and 100 deletions

View file

@ -1,6 +1,6 @@
import { DUMMY_SPAN, LoadedFile, Span } from "./error"; import { LoadedFile, Span } from "./error";
import { LitIntType } from "./lexer"; import { LitIntType } from "./lexer";
import { ComplexMap, unwrap } from "./utils"; import { ComplexMap } from "./utils";
export type Phase = { export type Phase = {
res: unknown; res: unknown;
@ -57,7 +57,7 @@ export type Crate<P extends Phase> = {
rootItems: Item<P>[]; rootItems: Item<P>[];
itemsById: ComplexMap<ItemId, Item<P>>; itemsById: ComplexMap<ItemId, Item<P>>;
packageName: string; packageName: string;
rootFile: LoadedFile, rootFile: LoadedFile;
} & P["typeckResults"]; } & P["typeckResults"];
export type DepCrate = Crate<Final>; export type DepCrate = Crate<Final>;
@ -85,6 +85,10 @@ export class ItemId {
return new ItemId(999999, 999999); return new ItemId(999999, 999999);
} }
static crateRoot(crate: CrateId): ItemId {
return new ItemId(crate, 0);
}
toString(): string { toString(): string {
if (this.crateId === 0) { if (this.crateId === 0) {
return `${this.itemIdx}`; return `${this.itemIdx}`;
@ -535,29 +539,6 @@ export type TypeckResults = {
main: Resolution | undefined; main: Resolution | undefined;
}; };
export function findCrateItem<P extends Phase>(
crate: Crate<P>,
id: ItemId
): Item<P> {
if (id.crateId !== crate.id) {
throw new Error("trying to get item from the wrong crate");
}
if (id.itemIdx === 0) {
// Return a synthetic module representing the crate root.
return {
kind: "mod",
node: {
contents: crate.rootItems,
name: crate.packageName,
},
span: DUMMY_SPAN,
id,
};
}
return unwrap(crate.itemsById.get(id));
}
// folders // folders
export type FoldFn<From, To> = (value: From) => To; export type FoldFn<From, To> = (value: From) => To;

View file

@ -1,5 +1,5 @@
import { Crate, DepCrate, Final, Item, ItemId, Phase } from "./ast"; import { Crate, DepCrate, Final, Item, ItemId, Phase } from "./ast";
import { DUMMY_SPAN, Span } from "./error"; import { Span } from "./error";
import { Ids, unwrap } from "./utils"; import { Ids, unwrap } from "./utils";
import fs from "fs"; import fs from "fs";
import path from "path"; import path from "path";
@ -20,17 +20,19 @@ export type CrateLoader = (
* dependencies (which also use the same context) do not care about that. * dependencies (which also use the same context) do not care about that.
*/ */
export class GlobalContext { export class GlobalContext {
public depCrates: Crate<Final>[] = []; public finalizedCrates: Crate<Final>[] = [];
public crateId: Ids = new Ids(); public crateId: Ids = new Ids();
constructor(public opts: Options, public crateLoader: CrateLoader) {} constructor(public opts: Options, public crateLoader: CrateLoader) {}
public findItem<P extends Phase>( public findItem<P extends Phase>(
id: ItemId, id: ItemId,
localCrate: Crate<P> localCrate?: Crate<P>
): Item<P | Final> { ): Item<P | Final> {
const crate = unwrap( const crate = unwrap(
[localCrate, ...this.depCrates].find((crate) => crate.id === id.crateId) [...(localCrate ? [localCrate] : []), ...this.finalizedCrates].find(
(crate) => crate.id === id.crateId
)
); );
if (id.itemIdx === 0) { if (id.itemIdx === 0) {
@ -41,7 +43,7 @@ export class GlobalContext {
contents: crate.rootItems, contents: crate.rootItems,
name: crate.packageName, name: crate.packageName,
}, },
span: DUMMY_SPAN, span: Span.startOfFile(crate.rootFile),
id, id,
}; };
} }

View file

@ -3,30 +3,35 @@ export type LoadedFile = {
content: string; content: string;
}; };
export type Span = { export class Span {
start: number; constructor(
end: number; public start: number,
file: LoadedFile; public end: number,
}; public file: LoadedFile
) {}
export function spanMerge(a: Span, b: Span): Span { public merge(b: Span): Span {
if (a.file !== b.file) { if (this.file !== b.file) {
throw new Error("cannot merge spans from different files"); throw new Error("cannot merge spans from different files");
}
return new Span(
Math.min(this.start, b.start),
Math.max(this.end, b.end),
this.file
);
} }
return { public static eof(file: LoadedFile): Span {
start: Math.min(a.start, b.start), return new Span(Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, file);
end: Math.max(a.end, b.end), }
file: a.file,
};
}
export const DUMMY_SPAN: Span = { start: 0, end: 0, file: { content: "" } }; public static startOfFile(file: LoadedFile): Span {
export const eofSpan = (file: LoadedFile): Span => ({ return new Span(0, 1, file);
start: Number.MAX_SAFE_INTEGER, }
end: Number.MAX_SAFE_INTEGER,
file, public static DUMMY: Span = new Span(0, 0, { content: "" });
}); }
export class CompilerError extends Error { export class CompilerError extends Error {
msg: string; msg: string;
@ -98,11 +103,11 @@ function spanToSnippet(input: string, span: Span): string {
} }
export function lines(file: LoadedFile): Span[] { export function lines(file: LoadedFile): Span[] {
const lines: Span[] = [{ start: 0, end: 0, file }]; const lines: Span[] = [new Span(0, 0, file)];
for (let i = 0; i < file.content.length; i++) { for (let i = 0; i < file.content.length; i++) {
if (file.content[i] === "\n") { if (file.content[i] === "\n") {
lines.push({ start: i + 1, end: i + 1, file }); lines.push(new Span(i + 1, i + 1, file));
} else { } else {
lines[lines.length - 1].end++; lines[lines.length - 1].end++;
} }

View file

@ -1,4 +1,4 @@
import { LoadedFile, withErrorPrinter } from "./error"; import { LoadedFile, Span, withErrorPrinter } from "./error";
import { isValidIdent, tokenize } from "./lexer"; import { isValidIdent, tokenize } from "./lexer";
import { lower as lowerToWasm } from "./lower"; import { lower as lowerToWasm } from "./lower";
import { ParseState, parse } from "./parser"; import { ParseState, parse } from "./parser";
@ -13,8 +13,6 @@ import { GlobalContext, parseArgs } from "./context";
import { loadCrate } from "./loader"; import { loadCrate } from "./loader";
const INPUT = ` const INPUT = `
extern mod std;
type A = { a: Int }; type A = { a: Int };
function main() = ( function main() = (
@ -49,6 +47,8 @@ function main() {
const gcx = new GlobalContext(opts, loadCrate); const gcx = new GlobalContext(opts, loadCrate);
const mainCrate = gcx.crateId.next(); const mainCrate = gcx.crateId.next();
gcx.crateLoader(gcx, "std", Span.startOfFile(file));
withErrorPrinter( withErrorPrinter(
() => { () => {
const start = Date.now(); const start = Date.now();
@ -93,7 +93,9 @@ function main() {
if (debug.has("wat")) { if (debug.has("wat")) {
console.log("-----wasm--------------"); console.log("-----wasm--------------");
} }
const wasmModule = lowerToWasm([typecked, ...gcx.depCrates]);
gcx.finalizedCrates.push(typecked);
const wasmModule = lowerToWasm(gcx);
const moduleStringColor = writeModuleWatToString(wasmModule, true); const moduleStringColor = writeModuleWatToString(wasmModule, true);
const moduleString = writeModuleWatToString(wasmModule); const moduleString = writeModuleWatToString(wasmModule);

View file

@ -3,7 +3,7 @@ import { tokenize } from "./lexer";
it("should tokenize an emtpy function", () => { it("should tokenize an emtpy function", () => {
const input = `function hello() = ;`; const input = `function hello() = ;`;
const tokens = tokenize(input); const tokens = tokenize({ content: input });
expect(tokens).toMatchSnapshot(); expect(tokens).toMatchSnapshot();
}); });
@ -11,7 +11,7 @@ it("should tokenize an emtpy function", () => {
it("should tokenize hello world", () => { it("should tokenize hello world", () => {
const input = `print("hello world")`; const input = `print("hello world")`;
const tokens = tokenize(input); const tokens = tokenize({ content: input });
expect(tokens).toMatchSnapshot(); expect(tokens).toMatchSnapshot();
}); });

View file

@ -92,7 +92,7 @@ export function tokenize(file: LoadedFile): Token[] {
finish: while (i < input.length) { finish: while (i < input.length) {
const next = input[i]; const next = input[i];
const span: Span = { start: i, end: i + 1, file }; const span: Span = new Span(i, i + 1, file);
if (next === "/" && input[i + 1] === "/") { if (next === "/" && input[i + 1] === "/") {
while (input[i] !== "\n") { while (input[i] !== "\n") {
@ -206,7 +206,7 @@ export function tokenize(file: LoadedFile): Token[] {
default: default:
throw new CompilerError( throw new CompilerError(
`invalid escape character: ${input[i]}`, `invalid escape character: ${input[i]}`,
{ start: span.end - 1, end: span.end, file } new Span(span.end - 1, span.end, file)
); );
} }
continue; continue;

View file

@ -45,7 +45,7 @@ export const loadCrate: CrateLoader = (
// We really, really want a good algorithm for finding crates. // We really, really want a good algorithm for finding crates.
// But right now we just look for files in the CWD. // But right now we just look for files in the CWD.
const existing = gcx.depCrates.find((crate) => crate.packageName === name); const existing = gcx.finalizedCrates.find((crate) => crate.packageName === name);
if (existing) { if (existing) {
return existing; return existing;
} }
@ -64,7 +64,7 @@ export const loadCrate: CrateLoader = (
const typecked = typeck(gcx, resolved); const typecked = typeck(gcx, resolved);
gcx.depCrates.push(typecked); gcx.finalizedCrates.push(typecked);
return typecked; return typecked;
}, },
() => { () => {

View file

@ -2,7 +2,6 @@ import {
Crate, Crate,
Expr, Expr,
ExprBlock, ExprBlock,
Final,
Folder, Folder,
FunctionDef, FunctionDef,
GlobalItem, GlobalItem,
@ -16,12 +15,12 @@ import {
TyStruct, TyStruct,
TyTuple, TyTuple,
Typecked, Typecked,
findCrateItem,
mkDefaultFolder, mkDefaultFolder,
superFoldExpr, superFoldExpr,
superFoldItem, superFoldItem,
varUnreachable, varUnreachable,
} from "./ast"; } from "./ast";
import { GlobalContext } from "./context";
import { printTy } from "./printer"; import { printTy } from "./printer";
import { ComplexMap, encodeUtf8, unwrap } from "./utils"; import { ComplexMap, encodeUtf8, unwrap } from "./utils";
import * as wasm from "./wasm/defs"; import * as wasm from "./wasm/defs";
@ -61,7 +60,7 @@ export type Context = {
reservedHeapMemoryStart: number; reservedHeapMemoryStart: number;
funcIndices: ComplexMap<Resolution, FuncOrImport>; funcIndices: ComplexMap<Resolution, FuncOrImport>;
globalIndices: ComplexMap<Resolution, wasm.GlobalIdx>; globalIndices: ComplexMap<Resolution, wasm.GlobalIdx>;
crates: Crate<Final>[]; gcx: GlobalContext;
relocations: Relocation[]; relocations: Relocation[];
knownDefPaths: ComplexMap<string[], ItemId>; knownDefPaths: ComplexMap<string[], ItemId>;
}; };
@ -112,13 +111,6 @@ function appendData(cx: Context, newData: Uint8Array): number {
} }
} }
function findItem(cx: Context, id: ItemId): Item<Typecked> {
return findCrateItem(
unwrap(cx.crates.find((crate) => crate.id === id.crateId)),
id
);
}
const KNOWN_DEF_PATHS = [ALLOCATE_ITEM]; const KNOWN_DEF_PATHS = [ALLOCATE_ITEM];
function getKnownDefPaths( function getKnownDefPaths(
@ -155,8 +147,8 @@ function getKnownDefPaths(
return knows; return knows;
} }
export function lower(crates: Crate<Final>[]): wasm.Module { export function lower(gcx: GlobalContext): wasm.Module {
const knownDefPaths = getKnownDefPaths(crates); const knownDefPaths = getKnownDefPaths(gcx.finalizedCrates);
const mod: wasm.Module = { const mod: wasm.Module = {
types: [], types: [],
@ -183,12 +175,12 @@ export function lower(crates: Crate<Final>[]): wasm.Module {
}); });
const cx: Context = { const cx: Context = {
gcx,
mod, mod,
funcTypes: new ComplexMap(), funcTypes: new ComplexMap(),
funcIndices: new ComplexMap(), funcIndices: new ComplexMap(),
globalIndices: new ComplexMap(), globalIndices: new ComplexMap(),
reservedHeapMemoryStart: 0, reservedHeapMemoryStart: 0,
crates,
relocations: [], relocations: [],
knownDefPaths, knownDefPaths,
}; };
@ -221,7 +213,7 @@ export function lower(crates: Crate<Final>[]): wasm.Module {
} }
}); });
} }
crates.forEach((ast) => lowerMod(ast.rootItems)); gcx.finalizedCrates.forEach((ast) => lowerMod(ast.rootItems));
const HEAP_ALIGN = 0x08; const HEAP_ALIGN = 0x08;
cx.reservedHeapMemoryStart = cx.reservedHeapMemoryStart =
@ -229,7 +221,7 @@ export function lower(crates: Crate<Final>[]): wasm.Module {
? (mod.datas[0].init.length + (HEAP_ALIGN - 1)) & ~(HEAP_ALIGN - 1) ? (mod.datas[0].init.length + (HEAP_ALIGN - 1)) & ~(HEAP_ALIGN - 1)
: 0; : 0;
addRt(cx, crates); addRt(cx, gcx.finalizedCrates);
// THE LINKER // THE LINKER
const offset = cx.mod.imports.length; const offset = cx.mod.imports.length;
@ -447,7 +439,7 @@ function lowerExpr(
break; break;
} }
case "item": { case "item": {
const item = findItem(fcx.cx, res.id); const item = fcx.cx.gcx.findItem(res.id);
if (item.kind !== "global") { if (item.kind !== "global") {
throw new Error("cannot store to non-global item"); throw new Error("cannot store to non-global item");
} }
@ -529,7 +521,7 @@ function lowerExpr(
break; break;
} }
case "item": { case "item": {
const item = findItem(fcx.cx, res.id); const item = fcx.cx.gcx.findItem(res.id);
switch (item.kind) { switch (item.kind) {
case "global": { case "global": {
const instr: wasm.Instr = { kind: "global.get", imm: DUMMY_IDX }; const instr: wasm.Instr = { kind: "global.get", imm: DUMMY_IDX };

View file

@ -32,7 +32,7 @@ import {
GlobalItem, GlobalItem,
StructLiteralField, StructLiteralField,
} from "./ast"; } from "./ast";
import { CompilerError, eofSpan, LoadedFile, Span, spanMerge } from "./error"; import { CompilerError, LoadedFile, Span } from "./error";
import { BaseToken, Token, TokenIdent, TokenLitString } from "./lexer"; import { BaseToken, Token, TokenIdent, TokenLitString } from "./lexer";
import { ComplexMap, ComplexSet, Ids } from "./utils"; import { ComplexMap, ComplexSet, Ids } from "./utils";
@ -297,7 +297,7 @@ function mkParserExprBinary(
[t, tok] = next(t); [t, tok] = next(t);
let rhs; let rhs;
[t, rhs] = parser(t); [t, rhs] = parser(t);
const span = spanMerge(lhs.span, rhs.span); const span = lhs.span.merge(rhs.span);
return [t, mkExpr(lhs, rhs, span, tok.kind)]; return [t, mkExpr(lhs, rhs, span, tok.kind)];
} }
@ -380,7 +380,7 @@ function parseExprCall(t: State): [State, Expr<Parsed>] {
kind: "fieldAccess", kind: "fieldAccess",
lhs, lhs,
field: { span: access.span, value }, field: { span: access.span, value },
span: spanMerge(lhs.span, access.span), span: lhs.span.merge(access.span),
}; };
} }
} }
@ -676,7 +676,7 @@ function expectNext<T extends BaseToken>(
if (!tok) { if (!tok) {
throw new CompilerError( throw new CompilerError(
`expected \`${kind}\`, found end of file`, `expected \`${kind}\`, found end of file`,
eofSpan(t.file) Span.eof(t.file)
); );
} }
if (tok.kind !== kind) { if (tok.kind !== kind) {
@ -691,7 +691,7 @@ function expectNext<T extends BaseToken>(
function next(t: State): [State, Token] { function next(t: State): [State, Token] {
const [rest, next] = maybeNextT(t); const [rest, next] = maybeNextT(t);
if (!next) { if (!next) {
throw new CompilerError("unexpected end of file", eofSpan(t.file)); throw new CompilerError("unexpected end of file", Span.eof(t.file));
} }
return [rest, next]; return [rest, next];
} }

View file

@ -19,7 +19,7 @@ import {
ExternItem, ExternItem,
} from "./ast"; } from "./ast";
import { GlobalContext } from "./context"; import { GlobalContext } from "./context";
import { CompilerError, spanMerge } from "./error"; import { CompilerError } from "./error";
import { ComplexMap } from "./utils"; import { ComplexMap } from "./utils";
const BUILTIN_SET = new Set<string>(BUILTINS); const BUILTIN_SET = new Set<string>(BUILTINS);
@ -128,11 +128,14 @@ function resolveModule(
}; };
} }
if (ident.name === cx.ast.packageName) { // All loaded crates are in scope.
return { for (const crate of [cx.ast, ...cx.gcx.finalizedCrates]) {
kind: "item", if (ident.name === crate.packageName) {
id: new ItemId(cx.ast.id, 0), return {
}; kind: "item",
id: ItemId.crateRoot(crate.id),
};
}
} }
if (BUILTIN_SET.has(ident.name)) { if (BUILTIN_SET.has(ident.name)) {
@ -278,7 +281,7 @@ function resolveModule(
kind: "path", kind: "path",
segments: [...segments, expr.field.value], segments: [...segments, expr.field.value],
res: pathRes, res: pathRes,
span: spanMerge(lhs.span, expr.field.span), span: lhs.span.merge(expr.field.span),
}; };
} }
} }

View file

@ -1,7 +1,9 @@
import { TY_INT, TY_STRING, TY_UNIT } from "./ast"; import { TY_INT, TY_STRING, TY_UNIT } from "./ast";
import { DUMMY_SPAN as SPAN } from "./error"; import { Span } from "./error";
import { InferContext } from "./typeck"; import { InferContext } from "./typeck";
const SPAN: Span = Span.startOfFile({content: ""});
it("should infer types across assignments", () => { it("should infer types across assignments", () => {
const infcx = new InferContext(); const infcx = new InferContext();

View file

@ -28,7 +28,6 @@ import {
Typecked, Typecked,
TyStruct, TyStruct,
Item, Item,
findCrateItem,
StructLiteralField, StructLiteralField,
} from "./ast"; } from "./ast";
import { GlobalContext } from "./context"; import { GlobalContext } from "./context";
@ -154,7 +153,7 @@ export function typeck(
} }
} }
const item = findCrateItem(ast, itemId); const item = gcx.findItem(itemId, ast);
const ty = itemTys.get(itemId); const ty = itemTys.get(itemId);
if (ty) { if (ty) {
return ty; return ty;
@ -420,11 +419,10 @@ export function typeck(
if (ast.id === 0) { if (ast.id === 0) {
// Only the final id=0 crate needs and cares about main. // Only the final id=0 crate needs and cares about main.
if (!main) { if (!main) {
throw new CompilerError(`\`main\` function not found`, { throw new CompilerError(
start: 0, `\`main\` function not found`,
end: 1, Span.startOfFile(ast.rootFile)
file: ast.rootFile, );
});
} }
typecked.typeckResults = { typecked.typeckResults = {