add http support

This commit is contained in:
Fabian Stamm 2022-07-30 10:16:40 +00:00
parent 1774306b06
commit 98bc2d4148
3 changed files with 81 additions and 33 deletions

View File

@ -1,6 +1,6 @@
{ {
"name": "@hibas123/jrpcgen", "name": "@hibas123/jrpcgen",
"version": "1.1.4", "version": "1.1.5",
"main": "lib/index.js", "main": "lib/index.js",
"license": "MIT", "license": "MIT",
"packageManager": "yarn@3.1.1", "packageManager": "yarn@3.1.1",
@ -24,6 +24,9 @@
"templates/CSharp/*.csproj", "templates/CSharp/*.csproj",
"templates/Rust/Cargo.toml", "templates/Rust/Cargo.toml",
"templates/Rust/src/lib.rs", "templates/Rust/src/lib.rs",
"templates/Dart/service_client.dart",
"templates/Dart/base.dart",
"templates/Dart/pubspec.yaml",
"examples/*.jrpcproj", "examples/*.jrpcproj",
"src/**", "src/**",
"tsconfig.json" "tsconfig.json"

View File

@ -316,7 +316,10 @@ export default function get_ir(parsed: Parsed): IR {
builtin.push("bytes"); builtin.push("bytes");
} }
} else { } else {
throw new IRError(statement, "Invalid statement!"); throw new IRError(
statement,
"Invalid statement: " + (statement as any).type
);
} }
}); });

View File

@ -2,6 +2,8 @@ import dbg from "debug";
import * as FS from "fs"; import * as FS from "fs";
import Color from "chalk"; import Color from "chalk";
import * as Path from "path"; import * as Path from "path";
import * as Https from "https";
import * as Http from "http";
import tokenize, { TokenizerError } from "./tokenizer"; import tokenize, { TokenizerError } from "./tokenizer";
import parse, { Parsed, ParserError } from "./parser"; import parse, { Parsed, ParserError } from "./parser";
import get_ir, { IR, IRError } from "./ir"; import get_ir, { IR, IRError } from "./ir";
@ -14,6 +16,7 @@ import { CSharpTarget } from "./targets/csharp";
import { RustTarget } from "./targets/rust"; import { RustTarget } from "./targets/rust";
import { ZIGTarget } from "./targets/zig"; import { ZIGTarget } from "./targets/zig";
import { DartTarget } from "./targets/dart"; import { DartTarget } from "./targets/dart";
import { URL } from "url";
class CatchedError extends Error {} class CatchedError extends Error {}
@ -43,26 +46,63 @@ function indexToLineAndCol(src: string, index: number) {
return { line, col }; return { line, col };
} }
function resolve(base: string, ...parts: string[]) {
if (base.startsWith("http://") || base.startsWith("https://")) {
let u = new URL(base);
for (const part of parts) {
u = new URL(part, u);
}
return u.href;
} else {
return Path.resolve(base, ...parts);
}
}
async function fetchFileFromURL(url: string) {
return new Promise<string>((yes, no) => {
const makeReq = url.startsWith("https://") ? Https.request : Http.request;
const req = makeReq(url, (res) => {
let chunks: Buffer[] = [];
res.on("data", (chunk) => {
chunks.push(Buffer.from(chunk));
});
res.on("error", no);
res.on("end", () => yes(Buffer.concat(chunks).toString("utf-8")));
});
req.on("error", no);
req.end();
});
}
const fileCache = new Map<string, string>(); const fileCache = new Map<string, string>();
function getFile(name: string) {
async function getFile(name: string) {
if (fileCache.has(name)) return fileCache.get(name); if (fileCache.has(name)) return fileCache.get(name);
else { else {
try { try {
const data = FS.readFileSync(name, "utf-8"); if (name.startsWith("http://") || name.startsWith("https://")) {
fileCache.set(name, data); const data = await fetchFileFromURL(name);
return data; fileCache.set(name, data);
return data;
} else {
const data = FS.readFileSync(name, "utf-8");
fileCache.set(name, data);
return data;
}
} catch (err) { } catch (err) {
printError(new Error(`Cannot open file ${name};`), null, 0); log(err);
await printError(new Error(`Cannot open file ${name};`), null, 0);
} }
} }
return undefined; return undefined;
} }
function printError(err: Error, file: string | null, idx: number) { async function printError(err: Error, file: string | null, idx: number) {
let loc = { line: 0, col: 0 }; let loc = { line: 0, col: 0 };
if (file != null) { if (file != null) {
const data = getFile(file); const data = getFile(file);
if (data) loc = indexToLineAndCol(data, idx); if (data) loc = indexToLineAndCol(await data, idx);
} }
console.error(`${Color.red("ERROR: at")} ${file}:${loc.line}:${loc.col}`); console.error(`${Color.red("ERROR: at")} ${file}:${loc.line}:${loc.col}`);
@ -86,21 +126,21 @@ type ProcessContext = {
processedFiles: Set<string>; processedFiles: Set<string>;
}; };
function processFile( async function processFile(
ctx: ProcessContext, ctx: ProcessContext,
file: string, file: string,
root = false root = false
): Parsed | null { ): Promise<Parsed | null> {
file = Path.resolve(file); file = resolve(file);
if (ctx.processedFiles.has(file)) { if (ctx.processedFiles.has(file)) {
log("Skipping file %s since it has already be processed", file); log("Skipping file %s since it has already been processed", file);
return null; return null;
} }
ctx.processedFiles.add(file); ctx.processedFiles.add(file);
log("Processing file %s", file); log("Processing file %s", file);
const content = getFile(file); const content = await getFile(file);
if (!content) throw new Error("Could not open file " + file); if (content == undefined) throw new Error("Could not open file " + file);
try { try {
log("Tokenizing %s", file); log("Tokenizing %s", file);
const tokens = tokenize(content); const tokens = tokenize(content);
@ -108,27 +148,29 @@ function processFile(
const parsed = parse(tokens, file); const parsed = parse(tokens, file);
log("Resolving imports of %s", file); log("Resolving imports of %s", file);
let resolved = parsed let resolved: Parsed = [];
.map((statement) => { for (const statement of parsed) {
if (statement.type == "import") { if (statement.type == "import") {
const base = Path.dirname(file); let res: string;
const resolved = Path.resolve( if (file.startsWith("http://") || file.startsWith("https://")) {
Path.join(base, statement.path + ".jrpc") res = resolve(file, statement.path + ".jrpc");
);
return processFile(ctx, resolved);
} else { } else {
return statement; const base = Path.dirname(file);
res = resolve(base, statement.path + ".jrpc");
} }
}) resolved.push(...((await processFile(ctx, res)) || []));
.filter((e) => !!e) } else {
.flat(1) as Parsed; resolved.push(statement);
return resolved; }
}
return resolved.filter((e) => !!e).flat(1) as Parsed;
} catch (err) { } catch (err) {
if (err instanceof TokenizerError) { if (err instanceof TokenizerError) {
printError(err, file, err.index); await printError(err, file, err.index);
if (!root) throw new CatchedError(); if (!root) throw new CatchedError();
} else if (err instanceof ParserError) { } else if (err instanceof ParserError) {
printError(err, file, err.token.startIdx); await printError(err, file, err.token.startIdx);
if (!root) throw new CatchedError(); if (!root) throw new CatchedError();
} else if (root && err instanceof CatchedError) { } else if (root && err instanceof CatchedError) {
return null; return null;
@ -138,7 +180,7 @@ function processFile(
} }
} }
export default function startCompile(options: CompileOptions) { export default async function startCompile(options: CompileOptions) {
const ctx = { const ctx = {
options, options,
processedFiles: new Set(), processedFiles: new Set(),
@ -148,14 +190,14 @@ export default function startCompile(options: CompileOptions) {
if (options.input.endsWith(".json")) { if (options.input.endsWith(".json")) {
ir = JSON.parse(FS.readFileSync(options.input, "utf-8")); ir = JSON.parse(FS.readFileSync(options.input, "utf-8"));
} else { } else {
const parsed = processFile(ctx, options.input, true); const parsed = await processFile(ctx, options.input, true);
if (!parsed) process.exit(1); // Errors should have already been emitted if (!parsed) process.exit(1); // Errors should have already been emitted
try { try {
ir = get_ir(parsed); ir = get_ir(parsed);
} catch (err) { } catch (err) {
if (err instanceof IRError) { if (err instanceof IRError) {
printError( await printError(
err, err,
err.statement.location.file, err.statement.location.file,
err.statement.location.idx err.statement.location.idx