import dbg from "debug"; import * as FS from "fs"; import Color from "chalk"; import * as Path from "path"; import tokenize, { TokenizerError } from "./tokenizer"; import parse, { Parsed, ParserError } from "./parser"; import get_ir, { IR, IRError } from "./ir"; import compile, { CompileTarget } from "./compile"; import { ESMTypescriptTarget, NodeJSTypescriptTarget, } from "./targets/typescript"; import { CSharpTarget } from "./targets/csharp"; class CatchedError extends Error {} const log = dbg("app"); const Targets = new Map(); Targets.set("ts-esm", ESMTypescriptTarget); Targets.set("ts-node", NodeJSTypescriptTarget); Targets.set("c#", CSharpTarget as typeof CompileTarget); function indexToLineAndCol(src: string, index: number) { let line = 1; let col = 1; for (let i = 0; i < index; i++) { if (src.charAt(i) === "\n") { line++; col = 1; } else { col++; } } return { line, col }; } const fileCache = new Map(); function getFile(name: string) { if (fileCache.has(name)) return fileCache.get(name); else { try { const data = FS.readFileSync(name, "utf-8"); fileCache.set(name, data); return data; } catch (err) { printError(new Error(`Cannot open file ${name};`), null, 0); } } return undefined; } function printError(err: Error, file: string | null, idx: number) { let loc = { line: 0, col: 0 }; if (file != null) { const data = getFile(file); if (data) loc = indexToLineAndCol(data, idx); } console.error(`${Color.red("ERROR: at")} ${file}:${loc.line}:${loc.col}`); console.error(" ", err.message); log(err.stack); } export type Target = { type: string; output: string; }; export type CompileOptions = { input: string; targets: Target[]; emitDefinitions?: string; }; type ProcessContext = { options: CompileOptions; processedFiles: Set; }; function processFile( ctx: ProcessContext, file: string, root = false ): Parsed | null { file = Path.resolve(file); if (ctx.processedFiles.has(file)) { log("Skipping file %s since it has already be processed", file); return null; } ctx.processedFiles.add(file); log("Processing file %s", file); const content = getFile(file); if (!content) throw new Error("Could not open file " + file); try { log("Tokenizing %s", file); const tokens = tokenize(content); log("Parsing %s", file); const parsed = parse(tokens, file); log("Resolving imports of %s", file); let resolved = parsed .map((statement) => { if (statement.type == "import") { const base = Path.dirname(file); const resolved = Path.resolve( Path.join(base, statement.path + ".jrpc") ); return processFile(ctx, resolved); } else { return statement; } }) .filter((e) => !!e) .flat(1) as Parsed; return resolved; } catch (err) { if (err instanceof TokenizerError) { printError(err, file, err.index); if (!root) throw new CatchedError(); } else if (err instanceof ParserError) { printError(err, file, err.token.startIdx); if (!root) throw new CatchedError(); } else if (root && err instanceof CatchedError) { return null; } else { throw err; } } } export default function startCompile(options: CompileOptions) { const ctx = { options, processedFiles: new Set(), } as ProcessContext; let ir: IR | undefined = undefined; if (options.input.endsWith(".json")) { ir = JSON.parse(FS.readFileSync(options.input, "utf-8")); } else { const parsed = processFile(ctx, options.input, true); if (!parsed) process.exit(1); // Errors should have already been emitted try { ir = get_ir(parsed); } catch (err) { if (err instanceof IRError) { printError( err, err.statement.location.file, err.statement.location.idx ); process.exit(1); } else { throw err; } } } if (!ir) throw new Error("Error compiling: Cannot get IR"); if (options.emitDefinitions) { FS.writeFileSync( options.emitDefinitions, JSON.stringify(ir, undefined, 3) ); } if (options.targets.length <= 0) { console.log(Color.yellow("WARNING:"), "No targets selected!"); } options.targets.forEach((target) => { const tg = Targets.get(target.type) as any; if (!tg) { console.log(Color.red("ERROR:"), "Target not supported!"); return; } compile(ir, new tg(target.output, ir.options)); }); }