First Commit
Yes, that is what I do at 31.12.2021 at 22:39 local time...
This commit is contained in:
74
src/compile.ts
Normal file
74
src/compile.ts
Normal file
@ -0,0 +1,74 @@
|
||||
import * as FS from "fs";
|
||||
import * as Path from "path";
|
||||
import {
|
||||
EnumDefinition,
|
||||
IR,
|
||||
ServiceDefinition,
|
||||
Step,
|
||||
TypeDefinition,
|
||||
} from "./ir";
|
||||
|
||||
export abstract class CompileTarget {
|
||||
abstract name: string;
|
||||
constructor(private outputFolder: string) {
|
||||
if (!FS.existsSync(outputFolder)) {
|
||||
FS.mkdirSync(outputFolder, {
|
||||
recursive: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
abstract start(): void;
|
||||
|
||||
abstract generateType(definition: TypeDefinition): void;
|
||||
|
||||
abstract generateEnum(definition: EnumDefinition): void;
|
||||
|
||||
abstract generateService(definition: ServiceDefinition): void;
|
||||
|
||||
abstract finalize(steps: Step[]): void;
|
||||
|
||||
protected writeFile(name: string, content: string | Promise<string>) {
|
||||
if (content instanceof Promise) {
|
||||
content.then((res) =>
|
||||
FS.writeFileSync(Path.join(this.outputFolder, name), res)
|
||||
);
|
||||
} else {
|
||||
FS.writeFileSync(Path.join(this.outputFolder, name), content);
|
||||
}
|
||||
}
|
||||
|
||||
protected getTemplate(name: string): string {
|
||||
let path = Path.join(__dirname, "../templates/" + name);
|
||||
let file = FS.readFileSync(path, "utf-8");
|
||||
|
||||
const splitted = file.split("\n");
|
||||
let res = [];
|
||||
let ignore = false;
|
||||
for (const line of splitted) {
|
||||
if (ignore) {
|
||||
ignore = false;
|
||||
} else if (line.trim().startsWith("//@template-ignore")) {
|
||||
ignore = true;
|
||||
} else {
|
||||
res.push(line);
|
||||
}
|
||||
}
|
||||
|
||||
return res.join("\n");
|
||||
}
|
||||
}
|
||||
|
||||
export default function compile(ir: IR, target: CompileTarget) {
|
||||
// Types are verified. They are now ready to be compiled to targets
|
||||
|
||||
// setState("Building for target: " + target.name);
|
||||
ir.forEach((step) => {
|
||||
const [type, def] = step;
|
||||
if (type == "type") target.generateType(def as TypeDefinition);
|
||||
else if (type == "enum") target.generateEnum(def as EnumDefinition);
|
||||
else if (type == "service")
|
||||
target.generateService(def as ServiceDefinition);
|
||||
});
|
||||
if (target.finalize) target.finalize(ir);
|
||||
}
|
62
src/index.ts
Normal file
62
src/index.ts
Normal file
@ -0,0 +1,62 @@
|
||||
import yargs from "yargs";
|
||||
import { hideBin } from "yargs/helpers";
|
||||
import startCompile, { Target } from "./process";
|
||||
|
||||
import dbg from "debug";
|
||||
const log = dbg("app");
|
||||
|
||||
dbg.disable();
|
||||
|
||||
yargs(hideBin(process.argv))
|
||||
.version("1.0.0")
|
||||
.command(
|
||||
"compile <input>",
|
||||
"Compile source",
|
||||
(yargs) => {
|
||||
return yargs
|
||||
.positional("input", {
|
||||
describe: "Input file",
|
||||
type: "string",
|
||||
demandOption: true,
|
||||
})
|
||||
.option("definition", {
|
||||
type: "string",
|
||||
describe: "Emit definition json at specified location",
|
||||
})
|
||||
.option("output", {
|
||||
type: "string",
|
||||
describe: "Output lang and location 'ts:out/' 'c:/test'",
|
||||
alias: "o",
|
||||
coerce: (arg: string | string[] | undefined) => {
|
||||
if (!arg) return [];
|
||||
if (!Array.isArray(arg)) arg = [arg];
|
||||
return arg.map((input) => {
|
||||
const [type, output] = input.split(":", 2);
|
||||
return {
|
||||
type,
|
||||
output,
|
||||
} as Target;
|
||||
});
|
||||
},
|
||||
array: true,
|
||||
});
|
||||
},
|
||||
(argv) => {
|
||||
if (argv.verbose) {dbg.enable("app");}
|
||||
log("Received compile command with args", argv);
|
||||
|
||||
startCompile({
|
||||
input: argv.input,
|
||||
targets: argv.output as any,
|
||||
emitDefinitions: argv.definition
|
||||
})
|
||||
}
|
||||
)
|
||||
.option("verbose", {
|
||||
alias: "v",
|
||||
type: "boolean",
|
||||
describe: "Adds additional outputs",
|
||||
})
|
||||
.strictCommands()
|
||||
.demandCommand()
|
||||
.parse();
|
232
src/ir.ts
Normal file
232
src/ir.ts
Normal file
@ -0,0 +1,232 @@
|
||||
import type { Parsed, StatementNode } from "./parser";
|
||||
import dbg from "debug";
|
||||
const log = dbg("app");
|
||||
|
||||
const builtin = ["number", "string", "boolean"];
|
||||
|
||||
export class IRError extends Error {
|
||||
constructor(public statement: StatementNode, message: string) {
|
||||
super("Error Compiling: " + message);
|
||||
}
|
||||
}
|
||||
|
||||
export interface TypeFieldDefinition {
|
||||
name: string;
|
||||
type: string;
|
||||
array: boolean;
|
||||
map?: string;
|
||||
}
|
||||
|
||||
export interface TypeDefinition {
|
||||
name: string;
|
||||
depends: string[];
|
||||
fields: TypeFieldDefinition[];
|
||||
}
|
||||
|
||||
export interface EnumValueDefinition {
|
||||
name: string;
|
||||
value: number;
|
||||
}
|
||||
|
||||
export interface EnumDefinition {
|
||||
name: string;
|
||||
values: EnumValueDefinition[];
|
||||
}
|
||||
|
||||
export interface ServiceFunctionParamsDefinition {
|
||||
name: string;
|
||||
inputs: { type: string; name: string }[];
|
||||
return: string | undefined;
|
||||
}
|
||||
export type ServiceFunctionDefinition = ServiceFunctionParamsDefinition;
|
||||
|
||||
export interface ServiceDefinition {
|
||||
name: string;
|
||||
depends: string[];
|
||||
functions: ServiceFunctionDefinition[];
|
||||
}
|
||||
|
||||
export type Step = [
|
||||
"type" | "enum" | "service",
|
||||
TypeDefinition | EnumDefinition | ServiceDefinition
|
||||
];
|
||||
|
||||
export type IR = Step[];
|
||||
|
||||
export default function get_ir(parsed: Parsed): IR {
|
||||
log("Generatie IR from parse output");
|
||||
let defined: string[] = [];
|
||||
let types: string[] = [];
|
||||
let enums: string[] = [];
|
||||
|
||||
// Verifiy and generating steps
|
||||
|
||||
let steps: Step[] = [];
|
||||
|
||||
parsed.forEach((statement) => {
|
||||
log("Working on statement of type %s", statement.type);
|
||||
if (statement.type == "import")
|
||||
throw new IRError(
|
||||
statement,
|
||||
"Import statements are invalid at this step!"
|
||||
);
|
||||
|
||||
if (statement.type === "type") {
|
||||
if (defined.indexOf(statement.name) >= 0) {
|
||||
throw new IRError(
|
||||
statement,
|
||||
`Type ${statement.name} already defined!`
|
||||
);
|
||||
}
|
||||
|
||||
let depends: string[] = [];
|
||||
const fields = statement.fields.map<TypeFieldDefinition>((field) => {
|
||||
if (field.type !== "type_field") {
|
||||
throw new IRError(field, "Invalid statement!");
|
||||
}
|
||||
|
||||
if (defined.indexOf(field.fieldtype) < 0) {
|
||||
if (builtin.indexOf(field.fieldtype) < 0) {
|
||||
throw new IRError(
|
||||
field,
|
||||
`Type ${field.fieldtype} is not defined!`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
if (depends.indexOf(field.fieldtype) < 0)
|
||||
depends.push(field.fieldtype);
|
||||
}
|
||||
|
||||
if (
|
||||
field.map &&
|
||||
field.map !== "number" &&
|
||||
field.map !== "string"
|
||||
) {
|
||||
throw new IRError(
|
||||
field,
|
||||
`Type ${field.map} is not valid as map key!`
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
name: field.name,
|
||||
type: field.fieldtype,
|
||||
array: field.array,
|
||||
map: field.map,
|
||||
};
|
||||
});
|
||||
steps.push([
|
||||
statement.type,
|
||||
{
|
||||
name: statement.name,
|
||||
depends,
|
||||
fields,
|
||||
},
|
||||
]);
|
||||
defined.push(statement.name);
|
||||
types.push(statement.name);
|
||||
} else if (statement.type === "enum") {
|
||||
if (defined.indexOf(statement.name) >= 0) {
|
||||
throw new IRError(
|
||||
statement,
|
||||
`Type ${statement.name} already defined!`
|
||||
);
|
||||
}
|
||||
|
||||
let last = -1;
|
||||
let values = statement.values.map<EnumValueDefinition>((valueS) => {
|
||||
let value = last + 1;
|
||||
if (valueS.value) {
|
||||
if (valueS.value <= last) {
|
||||
throw new IRError(
|
||||
statement,
|
||||
"Enum value must be larger than the previous one!"
|
||||
);
|
||||
} else {
|
||||
value = valueS.value;
|
||||
}
|
||||
}
|
||||
|
||||
last = value;
|
||||
|
||||
return {
|
||||
name: valueS.name,
|
||||
value,
|
||||
};
|
||||
});
|
||||
steps.push([
|
||||
"enum",
|
||||
{
|
||||
name: statement.name,
|
||||
values,
|
||||
} as EnumDefinition,
|
||||
]);
|
||||
defined.push(statement.name);
|
||||
enums.push(statement.name);
|
||||
} else if (statement.type === "service") {
|
||||
if (defined.indexOf(statement.name) >= 0) {
|
||||
throw new IRError(
|
||||
statement,
|
||||
`Type ${statement.name} already defined!`
|
||||
);
|
||||
}
|
||||
|
||||
let depends: string[] = [];
|
||||
let alreadyFoundFunctions = new Set<string>();
|
||||
let functions = statement.functions.map((fnc) => {
|
||||
if (alreadyFoundFunctions.has(fnc.name))
|
||||
throw new IRError(
|
||||
fnc,
|
||||
`Function with name ${fnc.name} already defined!`
|
||||
);
|
||||
alreadyFoundFunctions.add(fnc.name);
|
||||
if (fnc.return_type) {
|
||||
if (defined.indexOf(fnc.return_type) >= 0) {
|
||||
if (!depends.some((a) => a === fnc.return_type))
|
||||
depends.push(fnc.return_type);
|
||||
} else {
|
||||
if (builtin.indexOf(fnc.return_type) < 0) {
|
||||
throw new IRError(
|
||||
fnc,
|
||||
`Type ${fnc.return_type} is not defined`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const input of fnc.inputs) {
|
||||
if (defined.indexOf(input.type) >= 0) {
|
||||
if (!depends.some((a) => a === input.type))
|
||||
depends.push(input.type);
|
||||
} else {
|
||||
if (builtin.indexOf(input.type) < 0) {
|
||||
throw new IRError(
|
||||
fnc,
|
||||
`Type ${input.type} is not defined`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
name: fnc.name,
|
||||
inputs: fnc.inputs,
|
||||
return: fnc.return_type,
|
||||
} as ServiceFunctionDefinition;
|
||||
});
|
||||
|
||||
steps.push([
|
||||
"service",
|
||||
{
|
||||
name: statement.name,
|
||||
depends,
|
||||
functions,
|
||||
} as ServiceDefinition,
|
||||
]);
|
||||
} else {
|
||||
throw new IRError(statement, "Invalid statement!");
|
||||
}
|
||||
});
|
||||
|
||||
return steps;
|
||||
}
|
381
src/parser.ts
Normal file
381
src/parser.ts
Normal file
@ -0,0 +1,381 @@
|
||||
import type { Token } from "./tokenizer";
|
||||
|
||||
export interface DefinitionNode {
|
||||
type: string;
|
||||
location: {
|
||||
file: string;
|
||||
idx: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface ImportStatement extends DefinitionNode {
|
||||
type: "import";
|
||||
path: string;
|
||||
}
|
||||
|
||||
export interface TypeFieldStatement extends DefinitionNode {
|
||||
type: "type_field";
|
||||
name: string;
|
||||
fieldtype: string;
|
||||
array: boolean;
|
||||
map?: string;
|
||||
}
|
||||
|
||||
export interface EnumValueStatement extends DefinitionNode {
|
||||
type: "enum_value";
|
||||
name: string;
|
||||
value?: number;
|
||||
}
|
||||
|
||||
export interface EnumStatement extends DefinitionNode {
|
||||
type: "enum";
|
||||
name: string;
|
||||
values: EnumValueStatement[];
|
||||
}
|
||||
|
||||
export interface TypeStatement extends DefinitionNode {
|
||||
type: "type";
|
||||
name: string;
|
||||
fields: TypeFieldStatement[];
|
||||
}
|
||||
|
||||
export interface IServiceFunctionInput {
|
||||
name: string;
|
||||
type: string;
|
||||
}
|
||||
|
||||
export interface ServiceFunctionStatement extends DefinitionNode {
|
||||
type: "service_function";
|
||||
inputs: IServiceFunctionInput[];
|
||||
name: string;
|
||||
return_type: string | undefined; // Makes it a notification
|
||||
}
|
||||
|
||||
export interface ServiceStatement extends DefinitionNode {
|
||||
type: "service";
|
||||
name: string;
|
||||
functions: ServiceFunctionStatement[];
|
||||
}
|
||||
|
||||
export type RootStatementNode =
|
||||
| ImportStatement
|
||||
| TypeStatement
|
||||
| ServiceStatement
|
||||
| EnumStatement;
|
||||
export type StatementNode =
|
||||
| RootStatementNode
|
||||
| TypeFieldStatement
|
||||
| ServiceFunctionStatement
|
||||
| EnumValueStatement;
|
||||
|
||||
export type Parsed = RootStatementNode[];
|
||||
|
||||
export class ParserError extends Error {
|
||||
token: Token;
|
||||
constructor(message: string, token: Token) {
|
||||
super(message);
|
||||
this.token = token;
|
||||
}
|
||||
}
|
||||
|
||||
export default function parse(tokens: Token[], file: string): Parsed {
|
||||
const tokenIterator = tokens[Symbol.iterator]();
|
||||
let currentToken: Token = tokenIterator.next().value;
|
||||
let nextToken: Token = tokenIterator.next().value;
|
||||
|
||||
const eatToken = (value?: string) => {
|
||||
if (value && value !== currentToken.value) {
|
||||
throw new ParserError(
|
||||
`Unexpected token value, expected '${value}', received '${currentToken.value}'`,
|
||||
currentToken
|
||||
);
|
||||
}
|
||||
let idx = currentToken.startIdx;
|
||||
currentToken = nextToken;
|
||||
nextToken = tokenIterator.next().value;
|
||||
return idx;
|
||||
};
|
||||
|
||||
const eatText = (): [string, number] => {
|
||||
checkTypes("text");
|
||||
let val = currentToken.value;
|
||||
let idx = currentToken.startIdx;
|
||||
eatToken();
|
||||
return [val, idx];
|
||||
};
|
||||
const eatNumber = (): number => {
|
||||
checkTypes("number");
|
||||
let val = Number(currentToken.value);
|
||||
if (Number.isNaN(val)) {
|
||||
throw new ParserError(
|
||||
`Value cannot be parsed as number! ${currentToken.value}`,
|
||||
currentToken
|
||||
);
|
||||
}
|
||||
eatToken();
|
||||
return val;
|
||||
};
|
||||
|
||||
const checkTypes = (...types: string[]) => {
|
||||
if (types.indexOf(currentToken.type) < 0) {
|
||||
throw new ParserError(
|
||||
`Unexpected token value, expected ${types.join(" | ")}, received '${
|
||||
currentToken.value
|
||||
}'`,
|
||||
currentToken
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
// const parseUnionField = (): UnionFieldStatement => {
|
||||
// let idx = currentToken.startIdx;
|
||||
// let name = currentToken.value;
|
||||
// eatToken();
|
||||
// eatToken(":");
|
||||
// let [type] = eatText();
|
||||
// eatToken("=");
|
||||
// let label = eatNumber();
|
||||
// eatToken(";");
|
||||
|
||||
// return {
|
||||
// type: "union_field",
|
||||
// name,
|
||||
// label,
|
||||
// fieldtype: type,
|
||||
// location: { file, idx },
|
||||
// };
|
||||
// };
|
||||
|
||||
const parseTypeField = (): TypeFieldStatement => {
|
||||
const idx = currentToken.startIdx;
|
||||
let name = currentToken.value;
|
||||
eatToken();
|
||||
eatToken(":");
|
||||
|
||||
let array = false;
|
||||
let type: string;
|
||||
let mapKey: string | undefined = undefined;
|
||||
|
||||
if (currentToken.type === "curly_open") {
|
||||
eatToken("{");
|
||||
[mapKey] = eatText();
|
||||
eatToken(",");
|
||||
[type] = eatText();
|
||||
eatToken("}");
|
||||
} else {
|
||||
[type] = eatText();
|
||||
if (currentToken.type === "array") {
|
||||
array = true;
|
||||
eatToken("[]");
|
||||
}
|
||||
}
|
||||
|
||||
eatToken(";");
|
||||
return {
|
||||
type: "type_field",
|
||||
name,
|
||||
fieldtype: type,
|
||||
array,
|
||||
map: mapKey,
|
||||
location: { file, idx },
|
||||
};
|
||||
};
|
||||
|
||||
const parseTypeStatement = (): TypeStatement => {
|
||||
const idx = eatToken("type");
|
||||
let [name] = eatText();
|
||||
eatToken("{");
|
||||
let fields: TypeFieldStatement[] = [];
|
||||
while (currentToken.type === "text" || currentToken.type === "keyword") {
|
||||
//Keywords can also be field names
|
||||
fields.push(parseTypeField());
|
||||
}
|
||||
|
||||
eatToken("}");
|
||||
|
||||
return {
|
||||
type: "type",
|
||||
name,
|
||||
fields,
|
||||
location: { file, idx },
|
||||
};
|
||||
};
|
||||
|
||||
// const parseUnionStatement = (): UnionStatement => {
|
||||
// const idx = eatToken("union");
|
||||
// let [name] = eatText();
|
||||
// eatToken("{");
|
||||
// let fields: UnionFieldStatement[] = [];
|
||||
// while (currentToken.type === "text") {
|
||||
// fields.push(parseUnionField());
|
||||
// }
|
||||
|
||||
// eatToken("}");
|
||||
|
||||
// return {
|
||||
// type: "union",
|
||||
// name,
|
||||
// fields,
|
||||
// location: { file, idx },
|
||||
// };
|
||||
// };
|
||||
|
||||
const parseImportStatement = (): ImportStatement => {
|
||||
const idx = eatToken("import");
|
||||
checkTypes("text", "string");
|
||||
let path = currentToken.value;
|
||||
if (currentToken.type === "string") {
|
||||
path = path.substring(1, path.length - 1);
|
||||
}
|
||||
|
||||
eatToken();
|
||||
eatToken(";");
|
||||
return {
|
||||
type: "import",
|
||||
path,
|
||||
location: { file, idx },
|
||||
};
|
||||
};
|
||||
|
||||
const parseEnumValue = (): EnumValueStatement => {
|
||||
let [name, idx] = eatText();
|
||||
let value = undefined;
|
||||
if (currentToken.type === "equals") {
|
||||
eatToken("=");
|
||||
value = eatNumber();
|
||||
}
|
||||
return {
|
||||
type: "enum_value",
|
||||
name,
|
||||
value,
|
||||
location: { file, idx },
|
||||
};
|
||||
};
|
||||
|
||||
const parseEnumStatement = (): EnumStatement => {
|
||||
let idx = eatToken("enum");
|
||||
let [name] = eatText();
|
||||
eatToken("{");
|
||||
let values: EnumValueStatement[] = [];
|
||||
let next = currentToken.type === "text";
|
||||
while (next) {
|
||||
values.push(parseEnumValue());
|
||||
if (currentToken.type === "comma") {
|
||||
eatToken(",");
|
||||
next = true;
|
||||
} else {
|
||||
next = false;
|
||||
}
|
||||
}
|
||||
eatToken("}");
|
||||
|
||||
return {
|
||||
type: "enum",
|
||||
name: name,
|
||||
values: values,
|
||||
location: { file, idx },
|
||||
};
|
||||
};
|
||||
|
||||
const parseServiceFunction = (
|
||||
notification?: boolean
|
||||
): ServiceFunctionStatement => {
|
||||
const [name, idx] = eatText();
|
||||
|
||||
eatToken("(");
|
||||
|
||||
let input_streaming: string | undefined = undefined;
|
||||
let inputs = [];
|
||||
|
||||
if (currentToken.value !== ")") {
|
||||
while (true) {
|
||||
const [name] = eatText();
|
||||
eatToken(":");
|
||||
const [type] = eatText();
|
||||
inputs.push({ name, type });
|
||||
if (currentToken.value !== ",") break;
|
||||
eatToken(",");
|
||||
}
|
||||
}
|
||||
|
||||
eatToken(")");
|
||||
|
||||
let return_type = undefined;
|
||||
if (!notification) {
|
||||
eatToken(":");
|
||||
|
||||
return_type = eatText()[0];
|
||||
}
|
||||
|
||||
eatToken(";");
|
||||
|
||||
return {
|
||||
type: "service_function",
|
||||
name,
|
||||
location: {
|
||||
file,
|
||||
idx,
|
||||
},
|
||||
inputs,
|
||||
return_type,
|
||||
};
|
||||
};
|
||||
|
||||
const parseServiceStatement = (): ServiceStatement => {
|
||||
let idx = eatToken("service");
|
||||
let [name] = eatText();
|
||||
eatToken("{");
|
||||
let functions: ServiceFunctionStatement[] = [];
|
||||
|
||||
while (currentToken.type === "text") {
|
||||
let notification = false;
|
||||
if (currentToken.value == "notification") {
|
||||
eatText();
|
||||
notification = true;
|
||||
}
|
||||
functions.push(parseServiceFunction(notification));
|
||||
}
|
||||
eatToken("}");
|
||||
|
||||
return {
|
||||
type: "service",
|
||||
name: name,
|
||||
functions,
|
||||
location: { file, idx },
|
||||
};
|
||||
};
|
||||
|
||||
const parseStatement = () => {
|
||||
if (currentToken.type === "keyword") {
|
||||
switch (currentToken.value) {
|
||||
case "type":
|
||||
return parseTypeStatement();
|
||||
// case "union":
|
||||
// return parseUnionStatement();
|
||||
case "import":
|
||||
return parseImportStatement();
|
||||
case "enum":
|
||||
return parseEnumStatement();
|
||||
case "service":
|
||||
return parseServiceStatement();
|
||||
default:
|
||||
throw new ParserError(
|
||||
`Unknown keyword ${currentToken.value}`,
|
||||
currentToken
|
||||
);
|
||||
}
|
||||
} else {
|
||||
throw new ParserError(
|
||||
`Invalid statement! ${currentToken.value}`,
|
||||
currentToken
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const nodes: RootStatementNode[] = [];
|
||||
while (currentToken) {
|
||||
nodes.push(parseStatement());
|
||||
}
|
||||
|
||||
return nodes;
|
||||
}
|
154
src/process.ts
Normal file
154
src/process.ts
Normal file
@ -0,0 +1,154 @@
|
||||
import dbg from "debug";
|
||||
import * as FS from "fs";
|
||||
import Color from "chalk";
|
||||
import * as Path from "path";
|
||||
import tokenize, { TokenizerError } from "./tokenizer";
|
||||
import parse, { Parsed, ParserError } from "./parser";
|
||||
import get_ir, { IR } from "./ir";
|
||||
import compile, { CompileTarget } from "./compile";
|
||||
import { ESMTypescriptTarget, NodeJSTypescriptTarget } from "./targets/typescript";
|
||||
|
||||
const log = dbg("app");
|
||||
|
||||
|
||||
const Targets = new Map<string,typeof CompileTarget>();
|
||||
|
||||
Targets.set("ts-esm", ESMTypescriptTarget)
|
||||
Targets.set("ts-node", NodeJSTypescriptTarget)
|
||||
|
||||
|
||||
function indexToLineAndCol(src: string, index: number) {
|
||||
let line = 1;
|
||||
let col = 1;
|
||||
for (let i = 0; i < index; i++) {
|
||||
if (src.charAt(i) === "\n") {
|
||||
line++;
|
||||
col = 1;
|
||||
} else {
|
||||
col++;
|
||||
}
|
||||
}
|
||||
|
||||
return { line, col };
|
||||
}
|
||||
|
||||
const fileCache = new Map<string, string>();
|
||||
function getFile(name: string) {
|
||||
if (fileCache.has(name)) return fileCache.get(name);
|
||||
else {
|
||||
try {
|
||||
const data = FS.readFileSync(name, "utf-8");
|
||||
fileCache.set(name, data);
|
||||
return data;
|
||||
} catch (err) {
|
||||
printError(new Error(`Cannot open file ${name};`), null, 0);
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function printError(err: Error, file: string | null, idx: number) {
|
||||
let loc = { line: 0, col: 0 };
|
||||
if (file != null) {
|
||||
const data = getFile(file);
|
||||
if (data) loc = indexToLineAndCol(data, idx);
|
||||
}
|
||||
|
||||
console.error(`${Color.red("ERROR: at")} ${file}:${loc.line}:${loc.col}`);
|
||||
console.error(" ", err.message);
|
||||
log(err.stack);
|
||||
}
|
||||
|
||||
export type Target = {
|
||||
type: string;
|
||||
output: string;
|
||||
};
|
||||
|
||||
export type CompileOptions = {
|
||||
input: string;
|
||||
targets: Target[];
|
||||
emitDefinitions?: string;
|
||||
};
|
||||
|
||||
type ProcessContext = {
|
||||
options: CompileOptions;
|
||||
processedFiles: Set<string>;
|
||||
};
|
||||
|
||||
function processFile(ctx: ProcessContext, file: string): Parsed | null {
|
||||
file = Path.resolve(file);
|
||||
if (ctx.processedFiles.has(file)) {
|
||||
log("Skipping file %s since it has already be processed", file);
|
||||
return null;
|
||||
}
|
||||
log("Processing file %s", file);
|
||||
|
||||
const content = getFile(file);
|
||||
if (!content) throw new Error("Could not open file " + file);
|
||||
try {
|
||||
log("Tokenizing %s", file);
|
||||
const tokens = tokenize(content);
|
||||
log("Parsing %s", file);
|
||||
const parsed = parse(tokens, file);
|
||||
|
||||
log("Resolving imports of %s", file);
|
||||
let resolved = parsed
|
||||
.map((statement) => {
|
||||
if (statement.type == "import") {
|
||||
const base = Path.dirname(file);
|
||||
const resolved = Path.resolve(Path.join(base, statement.path));
|
||||
return processFile(ctx, resolved);
|
||||
} else {
|
||||
return statement;
|
||||
}
|
||||
})
|
||||
.filter((e) => !!e)
|
||||
.flat(1) as Parsed;
|
||||
return resolved;
|
||||
} catch (err) {
|
||||
if (err instanceof TokenizerError) {
|
||||
printError(err, file, err.index);
|
||||
} else if (err instanceof ParserError) {
|
||||
printError(err, file, err.token.startIdx);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export default function startCompile(options: CompileOptions) {
|
||||
const ctx = {
|
||||
options,
|
||||
processedFiles: new Set(),
|
||||
} as ProcessContext;
|
||||
|
||||
let ir: IR | undefined = undefined;
|
||||
if(options.input.endsWith(".json")) {
|
||||
ir = JSON.parse(FS.readFileSync(options.input, "utf-8"));
|
||||
} else {
|
||||
const parsed = processFile(ctx, options.input);
|
||||
// console.log(([...parsed].pop() as any).functions)
|
||||
if(!parsed)
|
||||
throw new Error("Error compiling: Parse output is undefined!");
|
||||
|
||||
ir = get_ir(parsed);
|
||||
}
|
||||
|
||||
if(!ir)
|
||||
throw new Error("Error compiling: Cannot get IR");
|
||||
|
||||
if(options.emitDefinitions) {
|
||||
FS.writeFileSync(options.emitDefinitions, JSON.stringify(ir));
|
||||
}
|
||||
|
||||
options.targets.forEach(target => {
|
||||
const tg = Targets.get(target.type) as any
|
||||
if(!tg) {
|
||||
console.log(Color.red("ERROR:"), "Target not supported!");
|
||||
return;
|
||||
}
|
||||
compile(ir, new tg(target.output)); //TODO: implement
|
||||
})
|
||||
}
|
426
src/targets/typescript.ts
Normal file
426
src/targets/typescript.ts
Normal file
@ -0,0 +1,426 @@
|
||||
import {
|
||||
TypeDefinition,
|
||||
ServiceDefinition,
|
||||
EnumDefinition,
|
||||
TypeFieldDefinition,
|
||||
Step,
|
||||
} from "../ir";
|
||||
|
||||
import { CompileTarget, } from "../compile";
|
||||
|
||||
type lineAppender = (ind: number, line: string | string[]) => void;
|
||||
|
||||
const conversion = {
|
||||
boolean: "boolean",
|
||||
number: "number",
|
||||
string: "string",
|
||||
};
|
||||
|
||||
function toJSType(type: string): string {
|
||||
return (conversion as any)[type] || type;
|
||||
}
|
||||
|
||||
export class TypescriptTarget extends CompileTarget {
|
||||
name = "Typescript";
|
||||
|
||||
flavour: "esm" | "node" = "node";
|
||||
|
||||
start() {}
|
||||
|
||||
private generateImport(imports: string, path: string) {
|
||||
return `import ${imports} from "${
|
||||
path + (this.flavour === "esm" ? ".ts" : "")
|
||||
}";\n`;
|
||||
}
|
||||
|
||||
private generateImports(
|
||||
a: lineAppender,
|
||||
def: TypeDefinition | ServiceDefinition
|
||||
) {
|
||||
a(
|
||||
0,
|
||||
def.depends.map((dep) =>
|
||||
this.generateImport(
|
||||
`${dep}, { verify_${dep} }`,
|
||||
"./" + dep
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
private getFileName(typename: string) {
|
||||
return typename + ".ts";
|
||||
}
|
||||
|
||||
private writeFormattedFile(file: string, code: string) {
|
||||
this.writeFile(file, code);
|
||||
//TODO: Add Prettier back
|
||||
// const formatted = format(code, {
|
||||
// parser: "typescript",
|
||||
// tabWidth: 3,
|
||||
// });
|
||||
|
||||
// this.writeFile(file, formatted);
|
||||
}
|
||||
|
||||
generateType(def: TypeDefinition) {
|
||||
let lines: string[] = [];
|
||||
const a: lineAppender = (i, t) => {
|
||||
if (!Array.isArray(t)) {
|
||||
t = [t];
|
||||
}
|
||||
t.forEach((l) => lines.push(" ".repeat(i) + l.trim()));
|
||||
};
|
||||
|
||||
this.generateImports(a, def);
|
||||
a(0, `export default class ${def.name} {`);
|
||||
a(
|
||||
1,
|
||||
def.fields.map((field) => {
|
||||
let type = "";
|
||||
if (field.array) {
|
||||
type = toJSType(field.type) + "[]";
|
||||
} else if (field.map) {
|
||||
type = `Map<${toJSType(field.map)}, ${toJSType(field.type)}>`;
|
||||
} else {
|
||||
type = toJSType(field.type);
|
||||
}
|
||||
return `${field.name}?: ${type}; `;
|
||||
})
|
||||
);
|
||||
|
||||
a(0, ``);
|
||||
a(1, `constructor(init?:Partial<${def.name}>){`);
|
||||
a(2, `if(init){`)
|
||||
def.fields.forEach(field=>{
|
||||
a(3, `if(init["${field.name}"])`)
|
||||
a(4, `this.${field.name} = init["${field.name}"];`)
|
||||
})
|
||||
a(2, `}`);
|
||||
a(1, `}`);
|
||||
|
||||
a(0, ``);
|
||||
|
||||
|
||||
a(0, ``);
|
||||
|
||||
a(1, `static verify(data: ${def.name}){`);
|
||||
a(2, `return verify_${def.name}(data);`);
|
||||
a(1, `}`)
|
||||
a(0, "}");
|
||||
|
||||
a(0, `export function verify_${def.name}(data: ${def.name}): boolean {`);
|
||||
{
|
||||
def.fields.forEach((field) => {
|
||||
a(
|
||||
1,
|
||||
`if(data["${field.name}"] !== null && data["${field.name}"] !== undefined ) {`
|
||||
);
|
||||
|
||||
const ap: lineAppender = (i, l) => a(i + 2, l);
|
||||
const verifyType = ( )=>{};
|
||||
a(2, "// TODO: Implement")
|
||||
//TODO: Build verification
|
||||
// if (field.array) {
|
||||
// a(2, `if(!Array.isArray(data["${field.name}"])) return false`);
|
||||
// a(2, `if(!(data["${field.name}"].some(e=>))) return false`)
|
||||
// serializeArray(field, `data["${field.name}"]`, ap);
|
||||
// } else if (field.map) {
|
||||
// serializeMap(field, `data["${field.name}"]`, ap);
|
||||
// } else {
|
||||
// serializeType(field, `data["${field.name}"]`, true, ap);
|
||||
// }
|
||||
a(1, "}");
|
||||
a(0, ``);
|
||||
});
|
||||
a(1, `return true`);
|
||||
}
|
||||
a(0, `}`);
|
||||
|
||||
this.writeFormattedFile(this.getFileName(def.name), lines.join("\n"));
|
||||
}
|
||||
|
||||
generateEnum(def: EnumDefinition) {
|
||||
let lines: string[] = [];
|
||||
const a: lineAppender = (i, t) => {
|
||||
if (!Array.isArray(t)) {
|
||||
t = [t];
|
||||
}
|
||||
t.forEach((l) => lines.push(" ".repeat(i) + l.trim()));
|
||||
};
|
||||
|
||||
a(0, `enum ${def.name} {`);
|
||||
for (const value of def.values) {
|
||||
a(1, `${value.name}=${value.value},`);
|
||||
}
|
||||
a(0, `}`);
|
||||
|
||||
a(0, `export default ${def.name}`);
|
||||
|
||||
a(
|
||||
0,
|
||||
`export function verify_${def.name} (data: ${def.name}): boolean {`
|
||||
);
|
||||
a(1, `return ${def.name}[data] != undefined`);
|
||||
a(0, "}");
|
||||
|
||||
|
||||
this.writeFormattedFile(this.getFileName(def.name), lines.join("\n"));
|
||||
}
|
||||
|
||||
generateServiceClient(def: ServiceDefinition) {
|
||||
let lines: string[] = [];
|
||||
const a: lineAppender = (i, t) => {
|
||||
if (!Array.isArray(t)) {
|
||||
t = [t];
|
||||
}
|
||||
t.forEach((l) => lines.push(" ".repeat(i) + l.trim()));
|
||||
};
|
||||
|
||||
this.generateImports(a, def);
|
||||
|
||||
a(0, `export type {`);
|
||||
def.depends.forEach((dep) => {
|
||||
a(1, `${dep},`);
|
||||
});
|
||||
a(0, `}`);
|
||||
|
||||
this.writeFile(
|
||||
"service_client.ts",
|
||||
this.generateImport(
|
||||
"{ RequestObject, ResponseObject, ErrorCodes, Logging }",
|
||||
"./service_base"
|
||||
) +
|
||||
"\n\n" +
|
||||
this.getTemplate("ts_service_client.ts")
|
||||
);
|
||||
|
||||
a(
|
||||
0,
|
||||
this.generateImport(
|
||||
"{ Service, ServiceProvider, getRandomID }",
|
||||
"./service_client"
|
||||
)
|
||||
);
|
||||
|
||||
a(0, `export class ${def.name} extends Service {`);
|
||||
a(1, `constructor(provider: ServiceProvider){`);
|
||||
a(2, `super(provider, "${def.name}");`);
|
||||
a(1, `}`);
|
||||
|
||||
for(const fnc of def.functions) {
|
||||
const params = fnc.inputs.map(e=>`${e.name}: ${toJSType(e.type)}`).join(",");
|
||||
//TODO: Prio 1 : Verify response!
|
||||
//TODO: Prio 2 : Add optional parameters to this and the declaration file
|
||||
//TODO: Prio 3 : Maybe verify params? But the server will to this regardless so... Maybe not?`
|
||||
if(!fnc.return) {
|
||||
a(1, `${fnc.name}(${params}): void {`);
|
||||
a(2, `this._provider.sendMessage({`);
|
||||
a(3, `jsonrpc: "2.0",`);
|
||||
a(3, `method: "${def.name}.${fnc.name}",`);
|
||||
a(3, `params: [...arguments]`);
|
||||
a(2, `});`);
|
||||
a(1, `}`);
|
||||
} else {
|
||||
const retType = fnc.return ? toJSType(fnc.return) : "void";
|
||||
a(1, `${fnc.name}(${params}): Promise<${retType}> {`);
|
||||
a(2, `return new Promise<${retType}>((ok, err) => {`)
|
||||
a(3, `this._provider.sendMessage({`);
|
||||
a(4, `jsonrpc: "2.0",`);
|
||||
a(4, `id: getRandomID(16),`);
|
||||
a(4, `method: "${def.name}.${fnc.name}",`);
|
||||
a(4, `params: [...arguments]`);
|
||||
a(3, `}, {`);
|
||||
a(4, `ok, err`);
|
||||
a(3, `});`);
|
||||
a(2, `});`);
|
||||
a(1, `}`);
|
||||
}
|
||||
a(0, ``);
|
||||
}
|
||||
|
||||
a(0, `}`);
|
||||
|
||||
this.writeFormattedFile(
|
||||
this.getFileName(def.name + "_client"),
|
||||
lines.join("\n")
|
||||
);
|
||||
}
|
||||
|
||||
generateServiceServer(def: ServiceDefinition) {
|
||||
let lines: string[] = [];
|
||||
const a: lineAppender = (i, t) => {
|
||||
if (!Array.isArray(t)) {
|
||||
t = [t];
|
||||
}
|
||||
t.forEach((l) => lines.push(" ".repeat(i) + l.trim()));
|
||||
};
|
||||
|
||||
this.writeFile(
|
||||
"service_server.ts",
|
||||
this.generateImport(
|
||||
"{ RequestObject, ResponseObject, ErrorCodes, Logging }",
|
||||
"./service_base"
|
||||
) +
|
||||
"\n\n" +
|
||||
this.getTemplate("ts_service_server.ts")
|
||||
);
|
||||
|
||||
this.generateImports(a, def);
|
||||
|
||||
a(0, `export type {`);
|
||||
def.depends.forEach((dep) => {
|
||||
a(1, `${dep},`);
|
||||
});
|
||||
a(0, `}`);
|
||||
|
||||
a(
|
||||
0,
|
||||
this.generateImport(
|
||||
"{ Service }",
|
||||
"./service_server"
|
||||
)
|
||||
);
|
||||
|
||||
|
||||
a(0, `export abstract class ${def.name}<T> extends Service<T> {`);
|
||||
a(1, `public name = "${def.name}";`);
|
||||
a(1, `constructor(){`);
|
||||
a(2, `super();`);
|
||||
for(const fnc of def.functions) {
|
||||
a(2, `this.functions.add("${fnc.name}")`);
|
||||
}
|
||||
a(1, `}`)
|
||||
a(0, ``);
|
||||
|
||||
for(const fnc of def.functions) {
|
||||
const params = [...fnc.inputs.map(e=>`${e.name}: ${toJSType(e.type)}`), `ctx: T`].join(", ");
|
||||
const retVal = fnc.return ? `Promise<${toJSType(fnc.return)}>` : `void`;
|
||||
a(1, `abstract ${fnc.name}(${params}): ${retVal};`)
|
||||
|
||||
// a(0, ``);
|
||||
|
||||
a(1, `_${fnc.name}(params: any[] | any, ctx: T): ${retVal} {`);
|
||||
a(2, `let p: any[] = [];`);
|
||||
a(2, `if(Array.isArray(params)){`);
|
||||
//TODO: Verify params!
|
||||
a(3, `p = params;`);
|
||||
a(2, `} else {`);
|
||||
for(const param of fnc.inputs) {
|
||||
a(3, `p.push(params["${param.name}"])`);
|
||||
}
|
||||
a(2, `}`);
|
||||
a(2, `p.push(ctx);`) //TODO: Either this or [...p, ctx] but idk
|
||||
a(2, `return this.${fnc.name}.call(this, ...p);`);
|
||||
a(1, `}`)
|
||||
a(0, ``);
|
||||
}
|
||||
|
||||
a(0, `}`)
|
||||
|
||||
this.writeFormattedFile(
|
||||
this.getFileName(def.name + "_server"),
|
||||
lines.join("\n")
|
||||
);
|
||||
}
|
||||
|
||||
generateService(def: ServiceDefinition) {
|
||||
this.writeFile("service_base.ts", this.getTemplate("ts_service_base.ts"));
|
||||
this.generateServiceClient(def);
|
||||
this.generateServiceServer(def);
|
||||
}
|
||||
|
||||
finalize(steps: Step[]) {
|
||||
let linesClient: string[] = [];
|
||||
let linesServer: string[] = [];
|
||||
|
||||
const ac: lineAppender = (i, t) => {
|
||||
if (!Array.isArray(t)) {
|
||||
t = [t];
|
||||
}
|
||||
t.forEach((l) => linesClient.push(" ".repeat(i) + l.trim()));
|
||||
};
|
||||
|
||||
const as: lineAppender = (i, t) => {
|
||||
if (!Array.isArray(t)) {
|
||||
t = [t];
|
||||
}
|
||||
t.forEach((l) => linesServer.push(" ".repeat(i) + l.trim()));
|
||||
};
|
||||
|
||||
let lines:string[] = [];
|
||||
const a: lineAppender = (i, t) => {
|
||||
if (!Array.isArray(t)) {
|
||||
t = [t];
|
||||
}
|
||||
t.forEach((l) => lines.push(" ".repeat(i) + l.trim()));
|
||||
};
|
||||
|
||||
|
||||
let hasService = false;
|
||||
steps.forEach(([type, def]) => {
|
||||
switch(type) {
|
||||
case "type":
|
||||
a(0, this.generateImport(`${def.name}, { verify_${def.name} }`, "./" + def.name))
|
||||
|
||||
a(0, `export { verify_${def.name} }`)
|
||||
a(0, `export type { ${def.name} }`);
|
||||
a(0,``);
|
||||
break;
|
||||
case "enum":
|
||||
a(0, this.generateImport(`${def.name}, { verify_${def.name} }`, "./" + def.name))
|
||||
a(0, `export { ${def.name}, verify_${def.name} }`)
|
||||
a(0,``);
|
||||
break;
|
||||
|
||||
case "service":
|
||||
let ext = this.flavour == "esm" ? ".ts" : "";
|
||||
if(!hasService) {
|
||||
hasService = true;
|
||||
ac(0, `export * from "./service_client${ext}"`);
|
||||
ac(0,``);
|
||||
|
||||
as(0, `export * from "./service_server${ext}"`);
|
||||
as(0,``);
|
||||
|
||||
a(0, `export * as Client from "./index_client${ext}"`);
|
||||
a(0, `export * as Server from "./index_server${ext}"`);
|
||||
a(0, `export { Logging } from "./service_base${ext}"`);
|
||||
a(0,``)
|
||||
//TODO: Export service globals
|
||||
}
|
||||
|
||||
ac(0, `export { ${def.name} } from "./${def.name}_client${ext}"`);
|
||||
as(0, `export { ${def.name} } from "./${def.name}_server${ext}"`);
|
||||
ac(0,``);
|
||||
as(0,``);
|
||||
break;
|
||||
}
|
||||
})
|
||||
|
||||
this.writeFormattedFile(
|
||||
this.getFileName("index"),
|
||||
lines.join("\n")
|
||||
);
|
||||
|
||||
this.writeFormattedFile(
|
||||
this.getFileName("index_client"),
|
||||
linesClient.join("\n")
|
||||
);
|
||||
|
||||
this.writeFormattedFile(
|
||||
this.getFileName("index_server"),
|
||||
linesServer.join("\n")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class ESMTypescriptTarget extends TypescriptTarget {
|
||||
name = "ts-esm";
|
||||
flavour: "esm" = "esm";
|
||||
}
|
||||
|
||||
export class NodeJSTypescriptTarget extends TypescriptTarget {
|
||||
name = "ts-node";
|
||||
flavour: "node" = "node";
|
||||
}
|
93
src/tokenizer.ts
Normal file
93
src/tokenizer.ts
Normal file
@ -0,0 +1,93 @@
|
||||
export type TokenTypes =
|
||||
| "space"
|
||||
| "comment"
|
||||
| "string"
|
||||
| "keyword"
|
||||
| "colon"
|
||||
| "semicolon"
|
||||
| "comma"
|
||||
| "equals"
|
||||
| "curly_open"
|
||||
| "curly_close"
|
||||
| "bracket_open"
|
||||
| "bracket_close"
|
||||
| "array"
|
||||
| "questionmark"
|
||||
| "number"
|
||||
| "text";
|
||||
|
||||
export type Token = {
|
||||
type: TokenTypes;
|
||||
value: string;
|
||||
startIdx: number;
|
||||
endIdx: number;
|
||||
};
|
||||
|
||||
type Matcher = (input: string, index: number) => undefined | Token;
|
||||
|
||||
export class TokenizerError extends Error {
|
||||
index: number;
|
||||
constructor(message: string, index: number) {
|
||||
super(message);
|
||||
this.index = index;
|
||||
}
|
||||
}
|
||||
|
||||
function regexMatcher(regex: string | RegExp, type: TokenTypes): Matcher {
|
||||
if (typeof regex === "string") regex = new RegExp(regex);
|
||||
|
||||
return (input: string, index: number) => {
|
||||
let matches = input.substring(index).match(regex as RegExp);
|
||||
if (!matches || matches.length <= 0) return undefined;
|
||||
|
||||
return {
|
||||
type,
|
||||
value: matches[0],
|
||||
startIdx: index,
|
||||
endIdx: index + matches[0].length,
|
||||
} as Token;
|
||||
};
|
||||
}
|
||||
|
||||
const matcher = [
|
||||
regexMatcher(/^\s+/, "space"),
|
||||
regexMatcher(/^(\/\*)(.|\s)*?(\*\/)/g, "comment"),
|
||||
regexMatcher(/^\/\/.+/, "comment"),
|
||||
regexMatcher(/^#.+/, "comment"),
|
||||
regexMatcher(/^".*?"/, "string"),
|
||||
// regexMatcher(/(?<=^")(.*?)(?=")/, "string"),
|
||||
regexMatcher(/^(type|enum|import|service)\b/, "keyword"),
|
||||
regexMatcher(/^\:/, "colon"),
|
||||
regexMatcher(/^\;/, "semicolon"),
|
||||
regexMatcher(/^\,/, "comma"),
|
||||
regexMatcher(/^\=/, "equals"),
|
||||
regexMatcher(/^{/, "curly_open"),
|
||||
regexMatcher(/^}/, "curly_close"),
|
||||
regexMatcher(/^\(/, "bracket_open"),
|
||||
regexMatcher(/^\)/, "bracket_close"),
|
||||
regexMatcher(/^\[\]/, "array"),
|
||||
regexMatcher(/^\?/, "questionmark"),
|
||||
regexMatcher(/^[\.0-9]+/, "number"),
|
||||
regexMatcher(/^[a-zA-Z_]([a-zA-Z0-9_]?)+/, "text"),
|
||||
];
|
||||
|
||||
export default function tokenize(input: string) {
|
||||
let index = 0;
|
||||
let tokens: Token[] = [];
|
||||
while (index < input.length) {
|
||||
const matches = matcher.map((m) => m(input, index)).filter((e) => !!e);
|
||||
let match = matches[0];
|
||||
if (match) {
|
||||
if (match.type !== "space" && match.type !== "comment") {
|
||||
tokens.push(match);
|
||||
}
|
||||
index += match.value.length;
|
||||
} else {
|
||||
throw new TokenizerError(
|
||||
`Unexpected token '${input.substring(index, index + 1)}'`,
|
||||
index
|
||||
);
|
||||
}
|
||||
}
|
||||
return tokens;
|
||||
}
|
Reference in New Issue
Block a user