mirror of
https://github.com/Noratrieb/riverdelta.git
synced 2026-01-14 08:25:02 +01:00
avoid wasm symbol conflicts
This commit is contained in:
parent
6bdbf14ecb
commit
e951cd5ee1
8 changed files with 188 additions and 99 deletions
|
|
@ -5,6 +5,7 @@ export type Ast = {
|
|||
rootItems: Item[];
|
||||
typeckResults?: TypeckResults;
|
||||
itemsById: Map<ItemId, Item>;
|
||||
packageName: string;
|
||||
};
|
||||
|
||||
export type Identifier = {
|
||||
|
|
@ -36,6 +37,7 @@ export type ItemKind =
|
|||
export type Item = ItemKind & {
|
||||
span: Span;
|
||||
id: ItemId;
|
||||
defPath?: string[];
|
||||
};
|
||||
|
||||
export type FunctionDef = {
|
||||
|
|
@ -493,6 +495,7 @@ export function foldAst(ast: Ast, folder: Folder): Ast {
|
|||
rootItems: ast.rootItems.map((item) => folder.item(item)),
|
||||
itemsById: ast.itemsById,
|
||||
typeckResults: ast.typeckResults,
|
||||
packageName: ast.packageName,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
32
src/index.ts
32
src/index.ts
|
|
@ -1,5 +1,5 @@
|
|||
import { withErrorHandler } from "./error";
|
||||
import { tokenize } from "./lexer";
|
||||
import { isValidIdent, tokenize } from "./lexer";
|
||||
import { lower as lowerToWasm } from "./lower";
|
||||
import { parse } from "./parser";
|
||||
import { printAst } from "./printer";
|
||||
|
|
@ -7,28 +7,50 @@ import { resolve } from "./resolve";
|
|||
import { typeck } from "./typeck";
|
||||
import { writeModuleWatToString } from "./wasm/wat";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { exec } from "child_process";
|
||||
|
||||
const INPUT = `
|
||||
function main() = (
|
||||
owo.uwu.meow();
|
||||
owo.uwu.main();
|
||||
owo.owo();
|
||||
);
|
||||
|
||||
mod owo (
|
||||
mod uwu (
|
||||
function meow() =;
|
||||
function main() =;
|
||||
);
|
||||
function owo() = ;
|
||||
);
|
||||
`;
|
||||
|
||||
function main() {
|
||||
let input: string;
|
||||
let packageName: string;
|
||||
if (process.argv.length > 2) {
|
||||
input = fs.readFileSync(process.argv[2], { encoding: "utf-8" });
|
||||
const filename = process.argv[2];
|
||||
if (path.extname(filename) !== ".nil") {
|
||||
console.error(
|
||||
`error: filename must have \`.nil\` extension: \`${filename}\``
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
input = fs.readFileSync(filename, { encoding: "utf-8" });
|
||||
packageName = path.basename(filename, ".nil");
|
||||
} else {
|
||||
input = INPUT;
|
||||
packageName = "test";
|
||||
}
|
||||
|
||||
if (!isValidIdent(packageName)) {
|
||||
console.error(`error: package name \`${packageName}\` is not a valid identifer`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`package name: '${packageName}'`);
|
||||
|
||||
|
||||
withErrorHandler(input, () => {
|
||||
const start = Date.now();
|
||||
|
||||
|
|
@ -36,7 +58,7 @@ function main() {
|
|||
console.log("-----TOKENS------------");
|
||||
console.log(tokens);
|
||||
|
||||
const ast = parse(tokens);
|
||||
const ast = parse(packageName, tokens);
|
||||
console.log("-----AST---------------");
|
||||
|
||||
console.dir(ast.rootItems, { depth: 50 });
|
||||
|
|
|
|||
13
src/lexer.ts
13
src/lexer.ts
|
|
@ -272,6 +272,19 @@ export function tokenize(input: string): Token[] {
|
|||
return tokens;
|
||||
}
|
||||
|
||||
export function isValidIdent(ident: string): boolean {
|
||||
if (!isIdentStart(ident[0])) {
|
||||
return false;
|
||||
}
|
||||
for (let i = 1; i < ident.length; i++) {
|
||||
const char = ident[i];
|
||||
if (!isIdentContinue(char)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function isIdentStart(char: string): boolean {
|
||||
return (
|
||||
(char <= "Z" && char >= "A") || (char <= "z" && char >= "a") || char === "_"
|
||||
|
|
|
|||
100
src/lower.ts
100
src/lower.ts
|
|
@ -12,7 +12,7 @@ import {
|
|||
TyTuple,
|
||||
varUnreachable,
|
||||
} from "./ast";
|
||||
import { encodeUtf8, unwrap } from "./utils";
|
||||
import { ComplexMap, encodeUtf8, unwrap } from "./utils";
|
||||
import * as wasm from "./wasm/defs";
|
||||
|
||||
const USIZE: wasm.ValType = "i32";
|
||||
|
|
@ -29,43 +29,37 @@ type Relocation = {
|
|||
instr: wasm.Instr & { func: wasm.FuncIdx };
|
||||
} & { res: Resolution };
|
||||
|
||||
type StringifiedMap<K, V> = { _map: Map<string, V> };
|
||||
|
||||
function setMap<K, V>(map: StringifiedMap<K, V>, key: K, value: V) {
|
||||
map._map.set(JSON.stringify(key), value);
|
||||
}
|
||||
|
||||
function getMap<K, V>(map: StringifiedMap<K, V>, key: K): V | undefined {
|
||||
return map._map.get(JSON.stringify(key));
|
||||
}
|
||||
|
||||
type FuncOrImport =
|
||||
| { kind: "func"; idx: wasm.FuncIdx }
|
||||
| { kind: "import"; idx: number };
|
||||
|
||||
export type Context = {
|
||||
mod: wasm.Module;
|
||||
funcTypes: StringifiedMap<wasm.FuncType, wasm.TypeIdx>;
|
||||
funcTypes: ComplexMap<wasm.FuncType, wasm.TypeIdx>;
|
||||
reservedHeapMemoryStart: number;
|
||||
funcIndices: StringifiedMap<Resolution, FuncOrImport>;
|
||||
funcIndices: ComplexMap<Resolution, FuncOrImport>;
|
||||
ast: Ast;
|
||||
relocations: Relocation[];
|
||||
};
|
||||
|
||||
function mangleDefPath(defPath: string[]): string {
|
||||
return `nil__${defPath.map(escapeIdentName).join("__")}`;
|
||||
}
|
||||
|
||||
function escapeIdentName(name: string): string {
|
||||
// This allows the implementation to use 2 leading underscores
|
||||
// for any names and it will not conflict.
|
||||
return name.startsWith("__") ? `_${name}` : name;
|
||||
// to separate ident parts of in a loading position and avoid conflicts.
|
||||
return name.replace(/__/g, "___");
|
||||
}
|
||||
|
||||
function internFuncType(cx: Context, type: wasm.FuncType): wasm.TypeIdx {
|
||||
const existing = getMap(cx.funcTypes, type);
|
||||
const existing = cx.funcTypes.get(type);
|
||||
if (existing !== undefined) {
|
||||
return existing;
|
||||
}
|
||||
const idx = cx.mod.types.length;
|
||||
cx.mod.types.push(type);
|
||||
setMap(cx.funcTypes, type, idx);
|
||||
cx.funcTypes.set(type, idx);
|
||||
return idx;
|
||||
}
|
||||
|
||||
|
|
@ -121,25 +115,34 @@ export function lower(ast: Ast): wasm.Module {
|
|||
|
||||
const cx: Context = {
|
||||
mod,
|
||||
funcTypes: { _map: new Map() },
|
||||
funcIndices: { _map: new Map() },
|
||||
funcTypes: new ComplexMap(),
|
||||
funcIndices: new ComplexMap(),
|
||||
reservedHeapMemoryStart: 0,
|
||||
ast,
|
||||
relocations: [],
|
||||
};
|
||||
|
||||
ast.rootItems.forEach((item) => {
|
||||
switch (item.kind) {
|
||||
case "function": {
|
||||
lowerFunc(cx, item, item.node);
|
||||
break;
|
||||
function lowerMod(items: Item[]) {
|
||||
items.forEach((item) => {
|
||||
switch (item.kind) {
|
||||
case "function": {
|
||||
lowerFunc(cx, item, item.node);
|
||||
break;
|
||||
}
|
||||
case "import": {
|
||||
lowerImport(cx, item, item.node);
|
||||
break;
|
||||
}
|
||||
case "mod": {
|
||||
if (item.node.modKind.kind === "inline") {
|
||||
lowerMod(item.node.modKind.contents);
|
||||
}
|
||||
}
|
||||
}
|
||||
case "import": {
|
||||
lowerImport(cx, item, item.node);
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
lowerMod(ast.rootItems);
|
||||
|
||||
const HEAP_ALIGN = 0x08;
|
||||
cx.reservedHeapMemoryStart =
|
||||
|
|
@ -154,7 +157,7 @@ export function lower(ast: Ast): wasm.Module {
|
|||
cx.relocations.forEach((rel) => {
|
||||
switch (rel.kind) {
|
||||
case "funccall": {
|
||||
const idx = getMap<Resolution, FuncOrImport>(cx.funcIndices, rel.res);
|
||||
const idx = cx.funcIndices.get(rel.res);
|
||||
if (idx === undefined) {
|
||||
throw new Error(
|
||||
`no function found for relocation '${JSON.stringify(rel.res)}'`
|
||||
|
|
@ -193,11 +196,7 @@ function lowerImport(cx: Context, item: Item, def: ImportDef) {
|
|||
});
|
||||
}
|
||||
|
||||
setMap<Resolution, FuncOrImport>(
|
||||
cx.funcIndices,
|
||||
{ kind: "item", id: item.id },
|
||||
{ kind: "import", idx }
|
||||
);
|
||||
cx.funcIndices.set({ kind: "item", id: item.id }, { kind: "import", idx });
|
||||
}
|
||||
|
||||
type FuncContext = {
|
||||
|
|
@ -223,7 +222,7 @@ function lowerFunc(cx: Context, item: Item, func: FunctionDef) {
|
|||
const type = internFuncType(cx, wasmType);
|
||||
|
||||
const wasmFunc: wasm.Func = {
|
||||
_name: escapeIdentName(func.name),
|
||||
_name: mangleDefPath(item.defPath!),
|
||||
type,
|
||||
locals: [],
|
||||
body: [],
|
||||
|
|
@ -244,8 +243,8 @@ function lowerFunc(cx: Context, item: Item, func: FunctionDef) {
|
|||
|
||||
const idx = fcx.cx.mod.funcs.length;
|
||||
fcx.cx.mod.funcs.push(wasmFunc);
|
||||
setMap<Resolution, FuncOrImport>(
|
||||
fcx.cx.funcIndices,
|
||||
|
||||
fcx.cx.funcIndices.set(
|
||||
{ kind: "item", id: fcx.item.id },
|
||||
{ kind: "func", idx }
|
||||
);
|
||||
|
|
@ -353,8 +352,10 @@ function lowerExpr(fcx: FuncContext, instrs: wasm.Instr[], expr: Expr) {
|
|||
}
|
||||
break;
|
||||
}
|
||||
case "path":
|
||||
case "ident": {
|
||||
const res = expr.value.res!;
|
||||
const res = expr.kind === "ident" ? expr.value.res! : expr.res;
|
||||
|
||||
switch (res.kind) {
|
||||
case "local": {
|
||||
const location =
|
||||
|
|
@ -382,9 +383,6 @@ function lowerExpr(fcx: FuncContext, instrs: wasm.Instr[], expr: Expr) {
|
|||
|
||||
break;
|
||||
}
|
||||
case "path": {
|
||||
todo("path");
|
||||
}
|
||||
case "binary": {
|
||||
// By evaluating the LHS first, the RHS is on top, which
|
||||
// is correct as it's popped first. Evaluating the LHS first
|
||||
|
|
@ -498,12 +496,15 @@ function lowerExpr(fcx: FuncContext, instrs: wasm.Instr[], expr: Expr) {
|
|||
break;
|
||||
}
|
||||
case "call": {
|
||||
if (expr.lhs.kind !== "ident") {
|
||||
if (expr.lhs.kind !== "ident" && expr.lhs.kind !== "path") {
|
||||
todo("non constant calls");
|
||||
}
|
||||
|
||||
if (expr.lhs.value.res!.kind === "builtin") {
|
||||
switch (expr.lhs.value.res!.name) {
|
||||
const res =
|
||||
expr.lhs.kind === "ident" ? expr.lhs.value.res! : expr.lhs.res;
|
||||
|
||||
if (res.kind === "builtin") {
|
||||
switch (res.name) {
|
||||
case "trap": {
|
||||
instrs.push({ kind: "unreachable" });
|
||||
break exprKind;
|
||||
|
|
@ -553,7 +554,7 @@ function lowerExpr(fcx: FuncContext, instrs: wasm.Instr[], expr: Expr) {
|
|||
fcx.cx.relocations.push({
|
||||
kind: "funccall",
|
||||
instr: callInstr,
|
||||
res: expr.lhs.value.res!,
|
||||
res,
|
||||
});
|
||||
|
||||
expr.args.forEach((arg) => {
|
||||
|
|
@ -893,7 +894,7 @@ function addRt(cx: Context, ast: Ast) {
|
|||
const iovecArray = reserveMemory(8);
|
||||
|
||||
const print: wasm.Func = {
|
||||
_name: "___print",
|
||||
_name: "nil__print",
|
||||
locals: [],
|
||||
type: internFuncType(cx, { params: [POINTER, USIZE], returns: [] }),
|
||||
body: [
|
||||
|
|
@ -917,8 +918,7 @@ function addRt(cx: Context, ast: Ast) {
|
|||
const printIdx = cx.mod.funcs.length;
|
||||
cx.mod.funcs.push(print);
|
||||
|
||||
setMap(
|
||||
cx.funcIndices,
|
||||
cx.funcIndices.set(
|
||||
{ kind: "builtin", name: "print" },
|
||||
{ kind: "func", idx: printIdx }
|
||||
);
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ import { Ids } from "./utils";
|
|||
|
||||
type Parser<T> = (t: Token[]) => [Token[], T];
|
||||
|
||||
export function parse(t: Token[]): Ast {
|
||||
export function parse(packageName: string, t: Token[]): Ast {
|
||||
const items: Item[] = [];
|
||||
|
||||
while (t.length > 0) {
|
||||
|
|
@ -41,7 +41,7 @@ export function parse(t: Token[]): Ast {
|
|||
items.push(item);
|
||||
}
|
||||
|
||||
const ast = assignIds(items);
|
||||
const ast = buildAst(packageName, items);
|
||||
|
||||
validateAst(ast);
|
||||
|
||||
|
|
@ -711,13 +711,14 @@ function validateAst(ast: Ast) {
|
|||
foldAst(ast, validator);
|
||||
}
|
||||
|
||||
function assignIds(rootItems: Item[]): Ast {
|
||||
function buildAst(packageName: string, rootItems: Item[]): Ast {
|
||||
const itemId = new Ids();
|
||||
const loopId = new Ids();
|
||||
|
||||
const ast: Ast = {
|
||||
rootItems,
|
||||
itemsById: new Map(),
|
||||
packageName,
|
||||
};
|
||||
|
||||
const assigner: Folder = {
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ function resolveModItem(
|
|||
case "inline": {
|
||||
const contents = new Map(
|
||||
mod.modKind.contents.map((item) => [item.node.name, item.id])
|
||||
);
|
||||
);
|
||||
cx.modContentsCache.set(modId, contents);
|
||||
return contents.get(name);
|
||||
}
|
||||
|
|
@ -52,11 +52,15 @@ function resolveModItem(
|
|||
export function resolve(ast: Ast): Ast {
|
||||
const cx: Context = { ast, modContentsCache: new Map() };
|
||||
|
||||
const rootItems = resolveModule(cx, ast.rootItems);
|
||||
const rootItems = resolveModule(cx, [ast.packageName], ast.rootItems);
|
||||
return { ...ast, rootItems };
|
||||
}
|
||||
|
||||
function resolveModule(cx: Context, contents: Item[]): Item[] {
|
||||
function resolveModule(
|
||||
cx: Context,
|
||||
modName: string[],
|
||||
contents: Item[]
|
||||
): Item[] {
|
||||
const items = new Map<string, number>();
|
||||
|
||||
contents.forEach((item) => {
|
||||
|
|
@ -117,6 +121,8 @@ function resolveModule(cx: Context, contents: Item[]): Item[] {
|
|||
return cx.ast;
|
||||
},
|
||||
itemInner(item) {
|
||||
const defPath = [...modName, item.node.name];
|
||||
|
||||
switch (item.kind) {
|
||||
case "function": {
|
||||
const params = item.node.params.map(({ name, span, type }) => ({
|
||||
|
|
@ -143,22 +149,28 @@ function resolveModule(cx: Context, contents: Item[]): Item[] {
|
|||
body,
|
||||
},
|
||||
id: item.id,
|
||||
defPath,
|
||||
};
|
||||
}
|
||||
case "mod": {
|
||||
if (item.node.modKind.kind === "inline") {
|
||||
const contents = resolveModule(cx, item.node.modKind.contents);
|
||||
const contents = resolveModule(
|
||||
cx,
|
||||
defPath,
|
||||
item.node.modKind.contents
|
||||
);
|
||||
return {
|
||||
...item,
|
||||
kind: "mod",
|
||||
node: { ...item.node, modKind: { kind: "inline", contents } },
|
||||
defPath,
|
||||
};
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return superFoldItem(item, this);
|
||||
return { ...superFoldItem(item, this), defPath };
|
||||
},
|
||||
expr(expr) {
|
||||
switch (expr.kind) {
|
||||
|
|
@ -195,9 +207,16 @@ function resolveModule(cx: Context, contents: Item[]): Item[] {
|
|||
};
|
||||
}
|
||||
case "fieldAccess": {
|
||||
if (expr.lhs.kind === "ident") {
|
||||
// If the lhs is a module we need to convert this into a path.
|
||||
const res = resolveIdent(expr.lhs.value);
|
||||
// We convert field accesses to paths if the lhs refers to a module.
|
||||
|
||||
const lhs = this.expr(expr.lhs);
|
||||
|
||||
if (lhs.kind === "ident" || lhs.kind === "path") {
|
||||
const res =
|
||||
lhs.kind === "ident" ? resolveIdent(lhs.value) : lhs.res;
|
||||
const segments =
|
||||
lhs.kind === "ident" ? [lhs.value.name] : lhs.segments;
|
||||
|
||||
if (res.kind === "item") {
|
||||
const module = unwrap(cx.ast.itemsById.get(res.id));
|
||||
if (module.kind === "mod") {
|
||||
|
|
@ -225,9 +244,9 @@ function resolveModule(cx: Context, contents: Item[]): Item[] {
|
|||
|
||||
return {
|
||||
kind: "path",
|
||||
segments: [expr.lhs.value.name, expr.field.value],
|
||||
segments: [...segments, expr.field.value],
|
||||
res: pathRes,
|
||||
span: spanMerge(expr.lhs.span, expr.field.span),
|
||||
span: spanMerge(lhs.span, expr.field.span),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
|||
23
src/utils.ts
23
src/utils.ts
|
|
@ -16,3 +16,26 @@ export function unwrap<T>(value: T | undefined): T {
|
|||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* A `Map` that can have arbitrarily complex keys.
|
||||
* It uses JSON+string equality instead of refernece equality.
|
||||
*/
|
||||
export class ComplexMap<K, V> {
|
||||
inner: Map<string | number, V> = new Map();
|
||||
|
||||
public get(key: K): V | undefined {
|
||||
return this.inner.get(this.mangleKey(key));
|
||||
}
|
||||
|
||||
public set(key: K, value: V) {
|
||||
this.inner.set(this.mangleKey(key), value);
|
||||
}
|
||||
|
||||
private mangleKey(key: K): string | number {
|
||||
if (typeof key === "string" || typeof key === "number") {
|
||||
return key;
|
||||
}
|
||||
return JSON.stringify(key);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -26,19 +26,21 @@ import {
|
|||
|
||||
const identity = (s: string) => s;
|
||||
|
||||
class Formatter {
|
||||
class FmtCtx {
|
||||
print: (chunk: string) => void;
|
||||
indentation: number;
|
||||
wordsInSexpr: number[];
|
||||
freshLinebreak: boolean;
|
||||
color: boolean;
|
||||
mod: Module;
|
||||
|
||||
constructor(print: (chunk: string) => void, color = true) {
|
||||
constructor(print: (chunk: string) => void, mod: Module, color = true) {
|
||||
this.print = print;
|
||||
this.indentation = 0;
|
||||
this.wordsInSexpr = [];
|
||||
this.freshLinebreak = false;
|
||||
this.color = color;
|
||||
this.mod = mod;
|
||||
}
|
||||
|
||||
linebreak() {
|
||||
|
|
@ -103,6 +105,10 @@ class Formatter {
|
|||
this.wordsInSexpr[last]++;
|
||||
}
|
||||
|
||||
comment(content: string | number) {
|
||||
this.word(`(;${content};)`, chalk.gray);
|
||||
}
|
||||
|
||||
startSexpr() {
|
||||
this.word("(");
|
||||
this.wordsInSexpr.push(0);
|
||||
|
|
@ -121,21 +127,18 @@ class Formatter {
|
|||
export function writeModuleWatToString(module: Module, color = false): string {
|
||||
const parts: string[] = [];
|
||||
const writer = (s: string) => parts.push(s);
|
||||
printModule(module, new Formatter(writer, color));
|
||||
printModule(module, new FmtCtx(writer, module, color));
|
||||
return parts.join("");
|
||||
}
|
||||
|
||||
export function writeModuleWat(module: Module, f: Formatter) {
|
||||
printModule(module, f);
|
||||
}
|
||||
// base
|
||||
|
||||
function printString(s: string, f: Formatter) {
|
||||
function printString(s: string, f: FmtCtx) {
|
||||
// TODO: escaping
|
||||
f.word(`"${s}"`);
|
||||
}
|
||||
|
||||
function printBinaryString(buf: Uint8Array, f: Formatter) {
|
||||
function printBinaryString(buf: Uint8Array, f: FmtCtx) {
|
||||
const parts: string[] = [];
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
const byte = buf[i];
|
||||
|
|
@ -151,7 +154,7 @@ function printBinaryString(buf: Uint8Array, f: Formatter) {
|
|||
f.word(`"${parts.join("")}"`);
|
||||
}
|
||||
|
||||
function printId(id: string | undefined, f: Formatter) {
|
||||
function printId(id: string | undefined, f: FmtCtx) {
|
||||
if (id) {
|
||||
f.word(`$${id}`);
|
||||
}
|
||||
|
|
@ -159,11 +162,11 @@ function printId(id: string | undefined, f: Formatter) {
|
|||
|
||||
// types
|
||||
|
||||
function printValType(type: ValType, f: Formatter) {
|
||||
function printValType(type: ValType, f: FmtCtx) {
|
||||
f.type(type);
|
||||
}
|
||||
|
||||
function printFuncType(type: FuncType, f: Formatter) {
|
||||
function printFuncType(type: FuncType, f: FmtCtx) {
|
||||
f.sexpr(() => {
|
||||
f.keyword("func");
|
||||
f.sexpr(() => {
|
||||
|
|
@ -177,17 +180,17 @@ function printFuncType(type: FuncType, f: Formatter) {
|
|||
});
|
||||
}
|
||||
|
||||
function printLimits(limits: Limits, f: Formatter) {
|
||||
function printLimits(limits: Limits, f: FmtCtx) {
|
||||
f.word(limits.min);
|
||||
f.word(limits.max);
|
||||
}
|
||||
|
||||
function printTableType(type: TableType, f: Formatter) {
|
||||
function printTableType(type: TableType, f: FmtCtx) {
|
||||
printLimits(type.limits, f);
|
||||
printValType(type.reftype, f);
|
||||
}
|
||||
|
||||
function printGlobalType(type: GlobalType, f: Formatter) {
|
||||
function printGlobalType(type: GlobalType, f: FmtCtx) {
|
||||
if (type.mut === "const") {
|
||||
printValType(type.type, f);
|
||||
} else {
|
||||
|
|
@ -200,7 +203,7 @@ function printGlobalType(type: GlobalType, f: Formatter) {
|
|||
|
||||
// instrs
|
||||
|
||||
function printBlockType(type: Blocktype, f: Formatter) {
|
||||
function printBlockType(type: Blocktype, f: FmtCtx) {
|
||||
f.sexpr(() => {
|
||||
f.keyword("type");
|
||||
if (type.kind === "typeidx") {
|
||||
|
|
@ -211,7 +214,7 @@ function printBlockType(type: Blocktype, f: Formatter) {
|
|||
});
|
||||
}
|
||||
|
||||
function printMemarg(arg: MemArg, f: Formatter) {
|
||||
function printMemarg(arg: MemArg, f: FmtCtx) {
|
||||
if (arg.offset /*0->false*/) {
|
||||
f.word(`offset=${arg.offset}`);
|
||||
}
|
||||
|
|
@ -225,7 +228,7 @@ function printMemarg(arg: MemArg, f: Formatter) {
|
|||
* Start: indented start of first instr
|
||||
* End: start of next line
|
||||
*/
|
||||
function printInstrBlock(instrs: Instr[], f: Formatter) {
|
||||
function printInstrBlock(instrs: Instr[], f: FmtCtx) {
|
||||
instrs.forEach((nested, i) => {
|
||||
printInstr(nested, f);
|
||||
if (i !== instrs.length - 1) {
|
||||
|
|
@ -235,7 +238,7 @@ function printInstrBlock(instrs: Instr[], f: Formatter) {
|
|||
f.breakDedent();
|
||||
}
|
||||
|
||||
function printInstr(instr: Instr, f: Formatter) {
|
||||
function printInstr(instr: Instr, f: FmtCtx) {
|
||||
switch (instr.kind) {
|
||||
case "block":
|
||||
case "loop":
|
||||
|
|
@ -430,10 +433,15 @@ function printInstr(instr: Instr, f: Formatter) {
|
|||
instr.labels.forEach((label) => f.word(label));
|
||||
f.word(instr.label);
|
||||
break;
|
||||
case "call":
|
||||
case "call": {
|
||||
f.controlFlow(instr.kind);
|
||||
f.word(instr.func);
|
||||
const name = f.mod.funcs[instr.func]?._name;
|
||||
if (name) {
|
||||
f.comment(name);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "call_indirect":
|
||||
f.controlFlow(instr.kind);
|
||||
f.word(instr.table);
|
||||
|
|
@ -504,14 +512,14 @@ function printInstr(instr: Instr, f: Formatter) {
|
|||
|
||||
// modules
|
||||
|
||||
function printType(type: FuncType, f: Formatter) {
|
||||
function printType(type: FuncType, f: FmtCtx) {
|
||||
f.sexpr(() => {
|
||||
f.keyword("type");
|
||||
printFuncType(type, f);
|
||||
});
|
||||
}
|
||||
|
||||
function printImport(import_: Import, f: Formatter) {
|
||||
function printImport(import_: Import, f: FmtCtx) {
|
||||
f.sexpr(() => {
|
||||
f.keyword("import");
|
||||
printString(import_.module, f);
|
||||
|
|
@ -541,12 +549,12 @@ function printImport(import_: Import, f: Formatter) {
|
|||
});
|
||||
}
|
||||
|
||||
function printFunction(func: Func, idx: number, f: Formatter) {
|
||||
function printFunction(func: Func, idx: number, f: FmtCtx) {
|
||||
f.sexpr(() => {
|
||||
f.keyword("func");
|
||||
printId(func._name, f);
|
||||
|
||||
f.word(`(;${idx};)`, chalk.gray);
|
||||
f.comment(idx);
|
||||
|
||||
f.sexpr(() => {
|
||||
f.keyword("type");
|
||||
|
|
@ -571,7 +579,7 @@ function printFunction(func: Func, idx: number, f: Formatter) {
|
|||
});
|
||||
}
|
||||
|
||||
function printTable(table: Table, f: Formatter) {
|
||||
function printTable(table: Table, f: FmtCtx) {
|
||||
f.sexpr(() => {
|
||||
f.keyword("table");
|
||||
printId(table._name, f);
|
||||
|
|
@ -579,7 +587,7 @@ function printTable(table: Table, f: Formatter) {
|
|||
});
|
||||
}
|
||||
|
||||
function printMem(mem: Mem, f: Formatter) {
|
||||
function printMem(mem: Mem, f: FmtCtx) {
|
||||
f.sexpr(() => {
|
||||
f.keyword("memory");
|
||||
printId(mem._name, f);
|
||||
|
|
@ -588,7 +596,7 @@ function printMem(mem: Mem, f: Formatter) {
|
|||
});
|
||||
}
|
||||
|
||||
function printGlobal(global: Global, f: Formatter) {
|
||||
function printGlobal(global: Global, f: FmtCtx) {
|
||||
f.sexpr(() => {
|
||||
f.keyword("global");
|
||||
printId(global._name, f);
|
||||
|
|
@ -600,7 +608,7 @@ function printGlobal(global: Global, f: Formatter) {
|
|||
});
|
||||
}
|
||||
|
||||
function printExport(export_: Export, f: Formatter) {
|
||||
function printExport(export_: Export, f: FmtCtx) {
|
||||
const desc = export_.desc;
|
||||
|
||||
f.sexpr(() => {
|
||||
|
|
@ -614,18 +622,18 @@ function printExport(export_: Export, f: Formatter) {
|
|||
});
|
||||
}
|
||||
|
||||
function printStart(start: Start, f: Formatter) {
|
||||
function printStart(start: Start, f: FmtCtx) {
|
||||
f.sexpr(() => {
|
||||
f.keyword("start");
|
||||
f.word(start.func);
|
||||
});
|
||||
}
|
||||
|
||||
function printElem(_elem: Elem, f: Formatter) {
|
||||
function printElem(_elem: Elem, f: FmtCtx) {
|
||||
todo();
|
||||
}
|
||||
|
||||
function printData(data: Data, f: Formatter) {
|
||||
function printData(data: Data, f: FmtCtx) {
|
||||
let mode = data.mode;
|
||||
|
||||
f.sexpr(() => {
|
||||
|
|
@ -656,7 +664,7 @@ function printData(data: Data, f: Formatter) {
|
|||
});
|
||||
}
|
||||
|
||||
function printModule(module: Module, f: Formatter) {
|
||||
function printModule(module: Module, f: FmtCtx) {
|
||||
f.sexpr(() => {
|
||||
f.keyword("module");
|
||||
printId(module._name, f);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue