Compare commits
4 Commits
46aff0c61b
...
137a3659b7
Author | SHA1 | Date | |
---|---|---|---|
|
137a3659b7 | ||
|
b3b202c9f9 | ||
|
6e947bde57 | ||
|
a291851b5a |
11
README.md
11
README.md
@ -6,6 +6,9 @@ Type/Service definition language and code generator for json-rpc 2.0. Currently
|
|||||||
| ------- | --------------------------------- |
|
| ------- | --------------------------------- |
|
||||||
| ts-node | Typescript for NodeJS |
|
| ts-node | Typescript for NodeJS |
|
||||||
| ts-esm | Typescript in ESM format for Deno |
|
| ts-esm | Typescript in ESM format for Deno |
|
||||||
|
| rust | Rust |
|
||||||
|
| dart | Dart |
|
||||||
|
| c# | C# |
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
@ -23,7 +26,7 @@ enum TestEnum {
|
|||||||
type Test {
|
type Test {
|
||||||
testen: TestEnum;
|
testen: TestEnum;
|
||||||
someString: string;
|
someString: string;
|
||||||
someNumber: number;
|
someNumber?: number;
|
||||||
array: string[];
|
array: string[];
|
||||||
map: {number, TestEnum};
|
map: {number, TestEnum};
|
||||||
}
|
}
|
||||||
@ -44,4 +47,8 @@ Then run the generator like this `jrpc compile test.jrpc -o=ts-node:output/`.
|
|||||||
|
|
||||||
This will generate the Client and Server code in the specified folder.
|
This will generate the Client and Server code in the specified folder.
|
||||||
|
|
||||||
//TODO: Make Documentation better
|
## TODOS
|
||||||
|
|
||||||
|
1. Documentation
|
||||||
|
2. Null Checks/Enforcements in all languages
|
||||||
|
3. More and better tests
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
type TestAtom {
|
type TestAtom {
|
||||||
val_number: float;
|
val_number?: float;
|
||||||
val_boolean: boolean;
|
val_boolean?: boolean;
|
||||||
val_string: string;
|
val_string?: string;
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@hibas123/jrpcgen",
|
"name": "@hibas123/jrpcgen",
|
||||||
"version": "1.2.0",
|
"version": "1.2.6",
|
||||||
"main": "lib/index.js",
|
"main": "lib/index.js",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"packageManager": "yarn@3.1.1",
|
"packageManager": "yarn@3.1.1",
|
||||||
|
@ -4,13 +4,15 @@ import yargs from "yargs";
|
|||||||
import { hideBin } from "yargs/helpers";
|
import { hideBin } from "yargs/helpers";
|
||||||
import startCompile, { Target, Targets } from "./process";
|
import startCompile, { Target, Targets } from "./process";
|
||||||
|
|
||||||
|
const pkg = require("../package.json");
|
||||||
|
|
||||||
import dbg from "debug";
|
import dbg from "debug";
|
||||||
const log = dbg("app");
|
const log = dbg("app");
|
||||||
|
|
||||||
dbg.disable();
|
dbg.disable();
|
||||||
|
|
||||||
yargs(hideBin(process.argv))
|
yargs(hideBin(process.argv))
|
||||||
.version("1.0.0")
|
.version(pkg.version)
|
||||||
.command(
|
.command(
|
||||||
"compile <input>",
|
"compile <input>",
|
||||||
"Compile source",
|
"Compile source",
|
||||||
|
@ -14,6 +14,7 @@ export interface TypeFieldDefinition {
|
|||||||
name: string;
|
name: string;
|
||||||
type: string;
|
type: string;
|
||||||
array: boolean;
|
array: boolean;
|
||||||
|
optional: boolean;
|
||||||
map?: string;
|
map?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -127,6 +128,7 @@ export default function get_ir(parsed: Parsed): IR {
|
|||||||
type: field.fieldtype,
|
type: field.fieldtype,
|
||||||
array: field.array,
|
array: field.array,
|
||||||
map: field.map,
|
map: field.map,
|
||||||
|
optional: field.optional,
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
steps.push([
|
steps.push([
|
||||||
|
@ -16,6 +16,7 @@ export interface ImportStatement extends DefinitionNode {
|
|||||||
export interface TypeFieldStatement extends DefinitionNode {
|
export interface TypeFieldStatement extends DefinitionNode {
|
||||||
type: "type_field";
|
type: "type_field";
|
||||||
name: string;
|
name: string;
|
||||||
|
optional: boolean;
|
||||||
fieldtype: string;
|
fieldtype: string;
|
||||||
array: boolean;
|
array: boolean;
|
||||||
map?: string;
|
map?: string;
|
||||||
@ -139,8 +140,7 @@ export default function parse(tokens: Token[], file: string): Parsed {
|
|||||||
const checkTypes = (...types: string[]) => {
|
const checkTypes = (...types: string[]) => {
|
||||||
if (types.indexOf(currentToken.type) < 0) {
|
if (types.indexOf(currentToken.type) < 0) {
|
||||||
throw new ParserError(
|
throw new ParserError(
|
||||||
`Unexpected token value, expected ${types.join(" | ")}, received '${
|
`Unexpected token value, expected ${types.join(" | ")}, received '${currentToken.value
|
||||||
currentToken.value
|
|
||||||
}'`,
|
}'`,
|
||||||
currentToken
|
currentToken
|
||||||
);
|
);
|
||||||
@ -170,6 +170,11 @@ export default function parse(tokens: Token[], file: string): Parsed {
|
|||||||
const idx = currentToken.startIdx;
|
const idx = currentToken.startIdx;
|
||||||
let name = currentToken.value;
|
let name = currentToken.value;
|
||||||
eatToken();
|
eatToken();
|
||||||
|
let optional = false;
|
||||||
|
if (currentToken.type === "questionmark") {
|
||||||
|
eatToken("?");
|
||||||
|
optional = true;
|
||||||
|
}
|
||||||
eatToken(":");
|
eatToken(":");
|
||||||
|
|
||||||
let array = false;
|
let array = false;
|
||||||
@ -198,6 +203,7 @@ export default function parse(tokens: Token[], file: string): Parsed {
|
|||||||
array,
|
array,
|
||||||
map: mapKey,
|
map: mapKey,
|
||||||
location: { file, idx },
|
location: { file, idx },
|
||||||
|
optional
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -303,9 +309,9 @@ export default function parse(tokens: Token[], file: string): Parsed {
|
|||||||
eatToken("(");
|
eatToken("(");
|
||||||
let args: string[] = [];
|
let args: string[] = [];
|
||||||
let first = true;
|
let first = true;
|
||||||
while(currentToken.value !== ")") {
|
while (currentToken.value !== ")") {
|
||||||
if(first) {
|
if (first) {
|
||||||
first= false;
|
first = false;
|
||||||
} else {
|
} else {
|
||||||
eatToken(",");
|
eatToken(",");
|
||||||
}
|
}
|
||||||
@ -399,8 +405,8 @@ export default function parse(tokens: Token[], file: string): Parsed {
|
|||||||
let functions: ServiceFunctionStatement[] = [];
|
let functions: ServiceFunctionStatement[] = [];
|
||||||
|
|
||||||
while (currentToken.type !== "curly_close") {
|
while (currentToken.type !== "curly_close") {
|
||||||
let decorators:Decorators = new Map;
|
let decorators: Decorators = new Map;
|
||||||
while(currentToken.type == "at") {
|
while (currentToken.type == "at") {
|
||||||
parseFunctionDecorator(decorators);
|
parseFunctionDecorator(decorators);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -427,8 +433,8 @@ export default function parse(tokens: Token[], file: string): Parsed {
|
|||||||
|
|
||||||
let [key] = eatText()
|
let [key] = eatText()
|
||||||
let value: string = undefined;
|
let value: string = undefined;
|
||||||
if(currentToken.type == "string") {
|
if (currentToken.type == "string") {
|
||||||
value = currentToken.value.slice(1,-1);
|
value = currentToken.value.slice(1, -1);
|
||||||
eatToken();
|
eatToken();
|
||||||
} else {
|
} else {
|
||||||
[value] = eatText()
|
[value] = eatText()
|
||||||
@ -437,8 +443,8 @@ export default function parse(tokens: Token[], file: string): Parsed {
|
|||||||
eatToken(";");
|
eatToken(";");
|
||||||
|
|
||||||
return {
|
return {
|
||||||
type :"define",
|
type: "define",
|
||||||
location: {file, idx},
|
location: { file, idx },
|
||||||
key,
|
key,
|
||||||
value
|
value
|
||||||
}
|
}
|
||||||
|
@ -18,7 +18,7 @@ import { ZIGTarget } from "./targets/zig";
|
|||||||
import { DartTarget } from "./targets/dart";
|
import { DartTarget } from "./targets/dart";
|
||||||
import { URL } from "url";
|
import { URL } from "url";
|
||||||
|
|
||||||
class CatchedError extends Error {}
|
class CatchedError extends Error { }
|
||||||
|
|
||||||
const log = dbg("app");
|
const log = dbg("app");
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import chalk from "chalk";
|
import chalk from "chalk";
|
||||||
import { CompileTarget } from "../compile";
|
import { CompileTarget } from "../compile";
|
||||||
import { TypeDefinition, EnumDefinition, ServiceDefinition, Step } from "../ir";
|
import { TypeDefinition, EnumDefinition, ServiceDefinition, Step, IR } from "../ir";
|
||||||
import { lineAppender, LineAppender } from "../utils";
|
import { lineAppender, LineAppender } from "../utils";
|
||||||
|
|
||||||
const conversion = {
|
const conversion = {
|
||||||
@ -71,17 +71,36 @@ export class RustTarget extends CompileTarget<{ rust_crate: string }> {
|
|||||||
a(0, `#[derive(Clone, Debug, Serialize, Deserialize)]`);
|
a(0, `#[derive(Clone, Debug, Serialize, Deserialize)]`);
|
||||||
a(0, `pub struct ${definition.name} {`);
|
a(0, `pub struct ${definition.name} {`);
|
||||||
for (const field of definition.fields) {
|
for (const field of definition.fields) {
|
||||||
|
a(1, `#[allow(non_snake_case)]`);
|
||||||
|
|
||||||
|
let fn = `pub ${field.name}:`;
|
||||||
|
if (field.name == "type") {
|
||||||
|
// TODO: Add other keywords as well!
|
||||||
|
console.log(
|
||||||
|
chalk.yellow("[RUST] WARNING:"),
|
||||||
|
"Field name 'type' is not allowed in Rust. Renaming to 'type_'"
|
||||||
|
);
|
||||||
|
fn = `pub type_:`;
|
||||||
|
a(1, `#[serde(rename = "type")]`);
|
||||||
|
}
|
||||||
|
let opts = "";
|
||||||
|
let opte = "";
|
||||||
|
if (field.optional) {
|
||||||
|
opts = "Option<";
|
||||||
|
opte = ">";
|
||||||
|
}
|
||||||
|
|
||||||
if (field.array) {
|
if (field.array) {
|
||||||
a(1, `pub ${field.name}: Vec<${toRustType(field.type)}>,`);
|
a(1, `${fn} ${opts}Vec<${toRustType(field.type)}>${opte},`);
|
||||||
} else if (field.map) {
|
} else if (field.map) {
|
||||||
a(
|
a(
|
||||||
1,
|
1,
|
||||||
`pub ${field.name}: HashMap<${toRustType(
|
`${fn} ${opts}HashMap<${toRustType(
|
||||||
field.map
|
field.map
|
||||||
)}, ${toRustType(field.type)}>,`
|
)}, ${toRustType(field.type)}>${opte},`
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
a(1, `pub ${field.name}: ${toRustType(field.type)},`);
|
a(1, `${fn} ${opts}${toRustType(field.type)}${opte},`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
a(0, `}`);
|
a(0, `}`);
|
||||||
@ -101,7 +120,7 @@ export class RustTarget extends CompileTarget<{ rust_crate: string }> {
|
|||||||
a(0, `#[repr(i64)]`);
|
a(0, `#[repr(i64)]`);
|
||||||
a(
|
a(
|
||||||
0,
|
0,
|
||||||
"#[derive(Clone, Copy, Debug, Eq, PartialEq, IntEnum, Deserialize, Serialize)]"
|
"#[derive(Clone, Copy, Debug, Eq, PartialEq, IntEnum)]"
|
||||||
);
|
);
|
||||||
a(0, `pub enum ${definition.name} {`);
|
a(0, `pub enum ${definition.name} {`);
|
||||||
for (const field of definition.values) {
|
for (const field of definition.values) {
|
||||||
@ -142,6 +161,7 @@ export class RustTarget extends CompileTarget<{ rust_crate: string }> {
|
|||||||
let ret = fnc.return
|
let ret = fnc.return
|
||||||
? typeToRust(fnc.return.type, fnc.return.array)
|
? typeToRust(fnc.return.type, fnc.return.array)
|
||||||
: "()";
|
: "()";
|
||||||
|
a(1, `#[allow(non_snake_case)]`);
|
||||||
a(1, `pub async fn ${fnc.name}(&self, ${params}) -> Result<${ret}> {`);
|
a(1, `pub async fn ${fnc.name}(&self, ${params}) -> Result<${ret}> {`);
|
||||||
a(2, `let l_req = JRPCRequest {`);
|
a(2, `let l_req = JRPCRequest {`);
|
||||||
a(3, `jsonrpc: "2.0".to_owned(),`);
|
a(3, `jsonrpc: "2.0".to_owned(),`);
|
||||||
@ -206,6 +226,7 @@ export class RustTarget extends CompileTarget<{ rust_crate: string }> {
|
|||||||
let ret = fnc.return
|
let ret = fnc.return
|
||||||
? typeToRust(fnc.return.type, fnc.return.array)
|
? typeToRust(fnc.return.type, fnc.return.array)
|
||||||
: "()";
|
: "()";
|
||||||
|
a(1, `#[allow(non_snake_case)]`);
|
||||||
a(1, `async fn ${fnc.name}(&self, ${params}) -> Result<${ret}>;`);
|
a(1, `async fn ${fnc.name}(&self, ${params}) -> Result<${ret}>;`);
|
||||||
}
|
}
|
||||||
a(0, `}`);
|
a(0, `}`);
|
||||||
@ -216,6 +237,7 @@ export class RustTarget extends CompileTarget<{ rust_crate: string }> {
|
|||||||
a(0, `}`);
|
a(0, `}`);
|
||||||
a(0, ``);
|
a(0, ``);
|
||||||
a(0, `impl ${definition.name}Handler {`);
|
a(0, `impl ${definition.name}Handler {`);
|
||||||
|
//TODO: Maybe add a new definition like, pub fn new2<T>(implementation: T) where T: ${definition.name} + Sync + Send + 'static {}
|
||||||
a(
|
a(
|
||||||
1,
|
1,
|
||||||
`pub fn new(implementation: Box<dyn ${definition.name} + Sync + Send + 'static>) -> Arc<Self> {`
|
`pub fn new(implementation: Box<dyn ${definition.name} + Sync + Send + 'static>) -> Arc<Self> {`
|
||||||
@ -235,6 +257,7 @@ export class RustTarget extends CompileTarget<{ rust_crate: string }> {
|
|||||||
a(1, `fn get_id(&self) -> String { "${definition.name}".to_owned() }`);
|
a(1, `fn get_id(&self) -> String { "${definition.name}".to_owned() }`);
|
||||||
a(0, ``);
|
a(0, ``);
|
||||||
|
|
||||||
|
a(1, `#[allow(non_snake_case)]`);
|
||||||
a(
|
a(
|
||||||
1,
|
1,
|
||||||
`async fn handle(&self, msg: &JRPCRequest, function: &str) -> Result<(bool, Value)> {`
|
`async fn handle(&self, msg: &JRPCRequest, function: &str) -> Result<(bool, Value)> {`
|
||||||
|
@ -33,8 +33,7 @@ export class TypescriptTarget extends CompileTarget {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private generateImport(imports: string, path: string) {
|
private generateImport(imports: string, path: string) {
|
||||||
return `import ${imports} from "${
|
return `import ${imports} from "${path + (this.flavour === "esm" ? ".ts" : "")
|
||||||
path + (this.flavour === "esm" ? ".ts" : "")
|
|
||||||
}";\n`;
|
}";\n`;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -89,7 +88,7 @@ export class TypescriptTarget extends CompileTarget {
|
|||||||
} else {
|
} else {
|
||||||
type = toJSType(field.type);
|
type = toJSType(field.type);
|
||||||
}
|
}
|
||||||
return `${field.name}?: ${type}; `;
|
return `${field.name}${field.optional ? "?" : ""}: ${type}; `;
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -125,10 +124,18 @@ export class TypescriptTarget extends CompileTarget {
|
|||||||
);
|
);
|
||||||
a(1, `let res = new ${def.name}() as any;`);
|
a(1, `let res = new ${def.name}() as any;`);
|
||||||
def.fields.forEach((field) => {
|
def.fields.forEach((field) => {
|
||||||
|
if (field.optional) {
|
||||||
a(
|
a(
|
||||||
1,
|
1,
|
||||||
`if(data["${field.name}"] !== null && data["${field.name}"] !== undefined) {`
|
`if(data["${field.name}"] !== null && data["${field.name}"] !== undefined) {`
|
||||||
);
|
);
|
||||||
|
} else {
|
||||||
|
a(
|
||||||
|
1,
|
||||||
|
`if(data["${field.name}"] === null || data["${field.name}"] === undefined) throw new VerificationError("${def.name}", "${field.name}", data["${field.name}"]);`
|
||||||
|
);
|
||||||
|
a(1, `else {`);
|
||||||
|
}
|
||||||
if (field.array) {
|
if (field.array) {
|
||||||
a(
|
a(
|
||||||
2,
|
2,
|
||||||
@ -335,8 +342,7 @@ export class TypescriptTarget extends CompileTarget {
|
|||||||
`ctx: T`,
|
`ctx: T`,
|
||||||
].join(", ");
|
].join(", ");
|
||||||
const retVal = fnc.return
|
const retVal = fnc.return
|
||||||
? `Promise<${
|
? `Promise<${toJSType(fnc.return.type) + (fnc.return.array ? "[]" : "")
|
||||||
toJSType(fnc.return.type) + (fnc.return.array ? "[]" : "")
|
|
||||||
}>`
|
}>`
|
||||||
: `void`;
|
: `void`;
|
||||||
a(1, `abstract ${fnc.name}(${params}): ${retVal};`);
|
a(1, `abstract ${fnc.name}(${params}): ${retVal};`);
|
||||||
@ -381,8 +387,7 @@ export class TypescriptTarget extends CompileTarget {
|
|||||||
2,
|
2,
|
||||||
`return this.${fnc.name}.call(this, ...p)` + //TODO: Refactor. This line is way to compicated for anyone to understand, including me
|
`return this.${fnc.name}.call(this, ...p)` + //TODO: Refactor. This line is way to compicated for anyone to understand, including me
|
||||||
(fnc.return
|
(fnc.return
|
||||||
? `.then(${
|
? `.then(${fnc.return?.array
|
||||||
fnc.return?.array
|
|
||||||
? `res => res.map(e => apply_${fnc.return.type}(e))`
|
? `res => res.map(e => apply_${fnc.return.type}(e))`
|
||||||
: `res => apply_${fnc.return.type}(res)`
|
: `res => apply_${fnc.return.type}(res)`
|
||||||
});`
|
});`
|
||||||
|
@ -6,11 +6,10 @@ edition = "2021"
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
int-enum = "0.5.0"
|
int-enum = { version ="0.5.0", features = ["serde", "convert"] }
|
||||||
serde = { version = "1.0.147", features = ["derive"] }
|
serde = { version = "1.0.147", features = ["derive"] }
|
||||||
serde_json = "1.0.88"
|
serde_json = "1.0.88"
|
||||||
nanoid = "0.4.0"
|
nanoid = "0.4.0"
|
||||||
tokio = { version = "1.22.0", features = ["full"] }
|
tokio = { version = "1.22.0", features = ["full"] }
|
||||||
log = "0.4.17"
|
log = "0.4.17"
|
||||||
simple_logger = { version = "4.0.0", features = ["threads", "colored", "timestamps", "stderr"] }
|
|
||||||
async-trait = "0.1.59"
|
async-trait = "0.1.59"
|
||||||
|
@ -1,14 +1,24 @@
|
|||||||
|
import { isGeneratorFunction } from "util/types";
|
||||||
|
|
||||||
|
function form_verficiation_error_message(type?: string, field?: string) {
|
||||||
|
let msg = "Parameter verification failed! ";
|
||||||
|
if (type && field) {
|
||||||
|
msg += `At ${type}.${field}! `;
|
||||||
|
} else if (type) {
|
||||||
|
msg += `At type ${type}! `;
|
||||||
|
} else if (field) {
|
||||||
|
msg += `At field ${field}! `;
|
||||||
|
}
|
||||||
|
return msg;
|
||||||
|
}
|
||||||
|
|
||||||
export class VerificationError extends Error {
|
export class VerificationError extends Error {
|
||||||
constructor(
|
constructor(
|
||||||
public readonly type?: string,
|
public readonly type?: string,
|
||||||
public readonly field?: string,
|
public readonly field?: string,
|
||||||
public readonly value?: any
|
public readonly value?: any
|
||||||
) {
|
) {
|
||||||
super(
|
super(form_verficiation_error_message(type, field));
|
||||||
"Parameter verification failed! " +
|
|
||||||
(type ? "Expected " + type + "! " : "") +
|
|
||||||
(field ? "At: " + field + "! " : "")
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -33,4 +43,4 @@ export function apply_boolean(data: any) {
|
|||||||
return Boolean(data);
|
return Boolean(data);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function apply_void(data: any) {}
|
export function apply_void(data: any) { }
|
||||||
|
Loading…
Reference in New Issue
Block a user