import dbg from "debug"; import * as FS from "fs"; import Color from "chalk"; import * as Path from "path"; import * as Https from "https"; import * as Http from "http"; import tokenize, { TokenizerError } from "./tokenizer"; import parse, { Parsed, ParserError } from "./parser"; import get_ir, { IR, IRError } from "./ir"; import compile, { CompileTarget } from "./compile"; import { ESMTypescriptTarget, NodeJSTypescriptTarget, } from "./targets/typescript"; import { CSharpTarget } from "./targets/csharp"; import { RustTarget } from "./targets/rust"; import { ZIGTarget } from "./targets/zig"; import { DartTarget } from "./targets/dart"; import { URL } from "url"; class CatchedError extends Error {} const log = dbg("app"); export const Targets = new Map(); Targets.set("ts-esm", ESMTypescriptTarget); Targets.set("ts-node", NodeJSTypescriptTarget); Targets.set("c#", CSharpTarget as typeof CompileTarget); Targets.set("rust", RustTarget as typeof CompileTarget); Targets.set("zig", ZIGTarget as typeof CompileTarget); Targets.set("dart", DartTarget as typeof CompileTarget); function indexToLineAndCol(src: string, index: number) { let line = 1; let col = 1; for (let i = 0; i < index; i++) { if (src.charAt(i) === "\n") { line++; col = 1; } else { col++; } } return { line, col }; } function resolve(base: string, ...parts: string[]) { if (base.startsWith("http://") || base.startsWith("https://")) { let u = new URL(base); for (const part of parts) { u = new URL(part, u); } return u.href; } else { return Path.resolve(base, ...parts); } } async function fetchFileFromURL(url: string) { return new Promise((yes, no) => { const makeReq = url.startsWith("https://") ? Https.request : Http.request; const req = makeReq(url, (res) => { let chunks: Buffer[] = []; res.on("data", (chunk) => { chunks.push(Buffer.from(chunk)); }); res.on("error", no); res.on("end", () => yes(Buffer.concat(chunks).toString("utf-8"))); }); req.on("error", no); req.end(); }); } const fileCache = new Map(); async function getFile(name: string) { if (fileCache.has(name)) return fileCache.get(name); else { try { if (name.startsWith("http://") || name.startsWith("https://")) { const data = await fetchFileFromURL(name); fileCache.set(name, data); return data; } else { const data = FS.readFileSync(name, "utf-8"); fileCache.set(name, data); return data; } } catch (err) { log(err); await printError(new Error(`Cannot open file ${name};`), null, 0); } } return undefined; } async function printError(err: Error, file: string | null, idx: number) { let loc = { line: 0, col: 0 }; if (file != null) { const data = getFile(file); if (data) loc = indexToLineAndCol(await data, idx); } console.error(`${Color.red("ERROR: at")} ${file}:${loc.line}:${loc.col}`); console.error(" ", err.message); log(err.stack); } export type Target = { type: string; output: string; }; export type CompileOptions = { input: string; targets: Target[]; emitDefinitions?: string; }; type ProcessContext = { options: CompileOptions; processedFiles: Set; }; async function processFile( ctx: ProcessContext, file: string, root = false ): Promise { file = resolve(file); if (ctx.processedFiles.has(file)) { log("Skipping file %s since it has already been processed", file); return null; } ctx.processedFiles.add(file); log("Processing file %s", file); const content = await getFile(file); if (content == undefined) throw new Error("Could not open file " + file); try { log("Tokenizing %s", file); const tokens = tokenize(content); log("Parsing %s", file); const parsed = parse(tokens, file); log("Resolving imports of %s", file); let resolved: Parsed = []; for (const statement of parsed) { if (statement.type == "import") { let res: string; if (file.startsWith("http://") || file.startsWith("https://")) { res = resolve(file, statement.path + ".jrpc"); } else { const base = Path.dirname(file); res = resolve(base, statement.path + ".jrpc"); } resolved.push(...((await processFile(ctx, res)) || [])); } else { resolved.push(statement); } } return resolved.filter((e) => !!e).flat(1) as Parsed; } catch (err) { if (err instanceof TokenizerError) { await printError(err, file, err.index); if (!root) throw new CatchedError(); } else if (err instanceof ParserError) { await printError(err, file, err.token.startIdx); if (!root) throw new CatchedError(); } else if (root && err instanceof CatchedError) { return null; } else { throw err; } } } export default async function startCompile(options: CompileOptions) { const ctx = { options, processedFiles: new Set(), } as ProcessContext; let ir: IR | undefined = undefined; if (options.input.endsWith(".json")) { ir = JSON.parse(FS.readFileSync(options.input, "utf-8")); } else { const parsed = await processFile(ctx, options.input, true); if (!parsed) process.exit(1); // Errors should have already been emitted try { ir = get_ir(parsed); } catch (err) { if (err instanceof IRError) { await printError( err, err.statement.location.file, err.statement.location.idx ); process.exit(1); } else { throw err; } } } if (!ir) throw new Error("Error compiling: Cannot get IR"); if (options.emitDefinitions) { FS.writeFileSync( options.emitDefinitions, JSON.stringify(ir, undefined, 3) ); } if (options.targets.length <= 0) { console.log(Color.yellow("WARNING:"), "No targets selected!"); } options.targets.forEach((target) => { const tg = Targets.get(target.type) as any; if (!tg) { console.log(Color.red("ERROR:"), "Target not supported!"); return; } compile(ir, new tg(target.output, ir.options)); }); }