11 Commits

17 changed files with 208 additions and 107 deletions

View File

@ -6,6 +6,9 @@ Type/Service definition language and code generator for json-rpc 2.0. Currently
| ------- | --------------------------------- | | ------- | --------------------------------- |
| ts-node | Typescript for NodeJS | | ts-node | Typescript for NodeJS |
| ts-esm | Typescript in ESM format for Deno | | ts-esm | Typescript in ESM format for Deno |
| rust | Rust |
| dart | Dart |
| c# | C# |
## Usage ## Usage
@ -23,7 +26,7 @@ enum TestEnum {
type Test { type Test {
testen: TestEnum; testen: TestEnum;
someString: string; someString: string;
someNumber: number; someNumber?: number;
array: string[]; array: string[];
map: {number, TestEnum}; map: {number, TestEnum};
} }
@ -44,4 +47,8 @@ Then run the generator like this `jrpc compile test.jrpc -o=ts-node:output/`.
This will generate the Client and Server code in the specified folder. This will generate the Client and Server code in the specified folder.
//TODO: Make Documentation better ## TODOS
1. Documentation
2. Null Checks/Enforcements in all languages
3. More and better tests

View File

@ -41,6 +41,10 @@ class TestSrvimpl : Example.TestServiceServer<int>
throw new Exception("This is a remote error :)"); throw new Exception("This is a remote error :)");
} }
public override Task<double> FunctionWithKeywords(double type, double static_, double event_, int ctx)
{
throw new NotImplementedException();
}
} }
class CopyTransportS2 : Example.JRpcTransport class CopyTransportS2 : Example.JRpcTransport

View File

@ -52,6 +52,8 @@ service TestService {
FunctionWithArrayAsParamAndReturn(values1: float[], values2: float[]): float[]; FunctionWithArrayAsParamAndReturn(values1: float[], values2: float[]): float[];
FunctionWithKeywords(type: float, static: float, event: float): float;
} }
type Test2 { type Test2 {
@ -59,6 +61,12 @@ type Test2 {
age: int; age: int;
} }
type TestKeywords {
type: float;
static: float;
event: float;
}
service SimpleTestService { service SimpleTestService {
@Description("asdasdasd") @Description("asdasdasd")
GetTest(name: string, age: int): Test2; GetTest(name: string, age: int): Test2;

View File

@ -1,5 +1,5 @@
type TestAtom { type TestAtom {
val_number: float; val_number?: float;
val_boolean: boolean; val_boolean?: boolean;
val_string: string; val_string?: string;
} }

View File

@ -1,6 +1,6 @@
{ {
"name": "@hibas123/jrpcgen", "name": "@hibas123/jrpcgen",
"version": "1.2.0", "version": "1.2.17",
"main": "lib/index.js", "main": "lib/index.js",
"license": "MIT", "license": "MIT",
"packageManager": "yarn@3.1.1", "packageManager": "yarn@3.1.1",
@ -48,4 +48,4 @@
"dependencies": { "dependencies": {
"fs-extra": "^10.0.0" "fs-extra": "^10.0.0"
} }
} }

View File

@ -4,13 +4,15 @@ import yargs from "yargs";
import { hideBin } from "yargs/helpers"; import { hideBin } from "yargs/helpers";
import startCompile, { Target, Targets } from "./process"; import startCompile, { Target, Targets } from "./process";
const pkg = require("../package.json");
import dbg from "debug"; import dbg from "debug";
const log = dbg("app"); const log = dbg("app");
dbg.disable(); dbg.disable();
yargs(hideBin(process.argv)) yargs(hideBin(process.argv))
.version("1.0.0") .version(pkg.version)
.command( .command(
"compile <input>", "compile <input>",
"Compile source", "Compile source",

View File

@ -14,6 +14,7 @@ export interface TypeFieldDefinition {
name: string; name: string;
type: string; type: string;
array: boolean; array: boolean;
optional: boolean;
map?: string; map?: string;
} }
@ -127,6 +128,7 @@ export default function get_ir(parsed: Parsed): IR {
type: field.fieldtype, type: field.fieldtype,
array: field.array, array: field.array,
map: field.map, map: field.map,
optional: field.optional,
}; };
}); });
steps.push([ steps.push([

View File

@ -16,6 +16,7 @@ export interface ImportStatement extends DefinitionNode {
export interface TypeFieldStatement extends DefinitionNode { export interface TypeFieldStatement extends DefinitionNode {
type: "type_field"; type: "type_field";
name: string; name: string;
optional: boolean;
fieldtype: string; fieldtype: string;
array: boolean; array: boolean;
map?: string; map?: string;
@ -114,8 +115,8 @@ export default function parse(tokens: Token[], file: string): Parsed {
return idx; return idx;
}; };
const eatText = (): [string, number] => { const eatText = (allowKeyword?: boolean): [string, number] => {
checkTypes("text"); checkTypes(...(allowKeyword ? ["text", "keyword"] : ["text"]));
let val = currentToken.value; let val = currentToken.value;
let idx = currentToken.startIdx; let idx = currentToken.startIdx;
eatToken(); eatToken();
@ -134,13 +135,12 @@ export default function parse(tokens: Token[], file: string): Parsed {
return val; return val;
}; };
const checkTypes = (...types: string[]) => { const checkTypes = (...types: string[]) => {
if (types.indexOf(currentToken.type) < 0) { if (types.indexOf(currentToken.type) < 0) {
throw new ParserError( throw new ParserError(
`Unexpected token value, expected ${types.join(" | ")}, received '${ `Unexpected token value, expected ${types.join(" | ")}, received '${currentToken.value
currentToken.value
}'`, }'`,
currentToken currentToken
); );
@ -170,6 +170,11 @@ export default function parse(tokens: Token[], file: string): Parsed {
const idx = currentToken.startIdx; const idx = currentToken.startIdx;
let name = currentToken.value; let name = currentToken.value;
eatToken(); eatToken();
let optional = false;
if (currentToken.type === "questionmark") {
eatToken("?");
optional = true;
}
eatToken(":"); eatToken(":");
let array = false; let array = false;
@ -198,6 +203,7 @@ export default function parse(tokens: Token[], file: string): Parsed {
array, array,
map: mapKey, map: mapKey,
location: { file, idx }, location: { file, idx },
optional
}; };
}; };
@ -303,9 +309,9 @@ export default function parse(tokens: Token[], file: string): Parsed {
eatToken("("); eatToken("(");
let args: string[] = []; let args: string[] = [];
let first = true; let first = true;
while(currentToken.value !== ")") { while (currentToken.value !== ")") {
if(first) { if (first) {
first= false; first = false;
} else { } else {
eatToken(","); eatToken(",");
} }
@ -345,7 +351,7 @@ export default function parse(tokens: Token[], file: string): Parsed {
if (currentToken.value !== ")") { if (currentToken.value !== ")") {
while (true) { while (true) {
const [name] = eatText(); const [name] = eatText(true);
eatToken(":"); eatToken(":");
const [type] = eatText(); const [type] = eatText();
let array = false; let array = false;
@ -399,8 +405,8 @@ export default function parse(tokens: Token[], file: string): Parsed {
let functions: ServiceFunctionStatement[] = []; let functions: ServiceFunctionStatement[] = [];
while (currentToken.type !== "curly_close") { while (currentToken.type !== "curly_close") {
let decorators:Decorators = new Map; let decorators: Decorators = new Map;
while(currentToken.type == "at") { while (currentToken.type == "at") {
parseFunctionDecorator(decorators); parseFunctionDecorator(decorators);
} }
@ -427,8 +433,8 @@ export default function parse(tokens: Token[], file: string): Parsed {
let [key] = eatText() let [key] = eatText()
let value: string = undefined; let value: string = undefined;
if(currentToken.type == "string") { if (currentToken.type == "string") {
value = currentToken.value.slice(1,-1); value = currentToken.value.slice(1, -1);
eatToken(); eatToken();
} else { } else {
[value] = eatText() [value] = eatText()
@ -437,8 +443,8 @@ export default function parse(tokens: Token[], file: string): Parsed {
eatToken(";"); eatToken(";");
return { return {
type :"define", type: "define",
location: {file, idx}, location: { file, idx },
key, key,
value value
} }

View File

@ -18,7 +18,7 @@ import { ZIGTarget } from "./targets/zig";
import { DartTarget } from "./targets/dart"; import { DartTarget } from "./targets/dart";
import { URL } from "url"; import { URL } from "url";
class CatchedError extends Error {} class CatchedError extends Error { }
const log = dbg("app"); const log = dbg("app");
@ -46,15 +46,22 @@ function indexToLineAndCol(src: string, index: number) {
return { line, col }; return { line, col };
} }
function resolve(base: string, ...parts: string[]) { function resolve(base: string, sub?: string) {
if (base.startsWith("http://") || base.startsWith("https://")) { if (sub && (sub.startsWith("http://") || sub.startsWith("https://"))) {
let u = new URL(sub);
return u.href;
} else if (base.startsWith("http://") || base.startsWith("https://")) {
let u = new URL(base); let u = new URL(base);
for (const part of parts) { if (sub) {
u = new URL(part, u); if (!sub.endsWith(".jrpc")) {
sub += ".jrpc";
}
u = new URL(sub, u);
} }
return u.href; return u.href;
} else { } else {
return Path.resolve(base, ...parts); if (!sub) return Path.resolve(base);
else return Path.resolve(Path.dirname(base), sub + ".jrpc");
} }
} }
@ -151,13 +158,7 @@ async function processFile(
let resolved: Parsed = []; let resolved: Parsed = [];
for (const statement of parsed) { for (const statement of parsed) {
if (statement.type == "import") { if (statement.type == "import") {
let res: string; let res = resolve(file, statement.path);
if (file.startsWith("http://") || file.startsWith("https://")) {
res = resolve(file, statement.path + ".jrpc");
} else {
const base = Path.dirname(file);
res = resolve(base, statement.path + ".jrpc");
}
resolved.push(...((await processFile(ctx, res)) || [])); resolved.push(...((await processFile(ctx, res)) || []));
} else { } else {
resolved.push(statement); resolved.push(statement);

View File

@ -8,6 +8,7 @@ import {
import { CompileTarget } from "../compile"; import { CompileTarget } from "../compile";
import { LineAppender } from "../utils"; import { LineAppender } from "../utils";
import chalk from "chalk";
const conversion = { const conversion = {
boolean: "bool", boolean: "bool",
@ -22,6 +23,16 @@ function toCSharpType(type: string): string {
return (conversion as any)[type] || type; return (conversion as any)[type] || type;
} }
// TODO: Add other keywords as well!
const keywords = new Set(["event", "internal", "public", "private", "static"]);
const fixKeywordName = (name: string) => {
if (keywords.has(name)) {
return `${name}_`;
}
return name;
}
export class CSharpTarget extends CompileTarget<{ csharp_namespace: string }> { export class CSharpTarget extends CompileTarget<{ csharp_namespace: string }> {
name: string = "c#"; name: string = "c#";
@ -61,11 +72,20 @@ export class CSharpTarget extends CompileTarget<{ csharp_namespace: string }> {
a(0, ``); a(0, ``);
a(0, `public class ${definition.name} {`); a(0, `public class ${definition.name} {`);
for (const field of definition.fields) { for (const field of definition.fields) {
let fn = field.name;
if (keywords.has(field.name)) {
console.log(
chalk.yellow("[RUST] WARNING:"),
`Field name '${fn}' is not allowed in C#. Renaming to '${fn}_'`
);
fn = fixKeywordName(fn);
a(1, `[JsonPropertyName("${fn}")]`);
}
if (field.array) { if (field.array) {
a( a(
1, 1,
`public IList<${toCSharpType(field.type)}>? ${ `public IList<${toCSharpType(field.type)}>? ${fn
field.name
} { get; set; }` } { get; set; }`
); );
} else if (field.map) { } else if (field.map) {
@ -73,12 +93,12 @@ export class CSharpTarget extends CompileTarget<{ csharp_namespace: string }> {
1, 1,
`public Dictionary<${toCSharpType(field.map)}, ${toCSharpType( `public Dictionary<${toCSharpType(field.map)}, ${toCSharpType(
field.type field.type
)}>? ${field.name} { get; set; }` )}>? ${fn} { get; set; }`
); );
} else { } else {
a( a(
1, 1,
`public ${toCSharpType(field.type)}? ${field.name} { get; set; }` `public ${toCSharpType(field.type)}? ${fn} { get; set; }`
); );
} }
} }
@ -127,10 +147,12 @@ export class CSharpTarget extends CompileTarget<{ csharp_namespace: string }> {
for (const fnc of definition.functions) { for (const fnc of definition.functions) {
let params = fnc.inputs let params = fnc.inputs
.map((inp) => { .map((inp) => {
let name = fixKeywordName(inp.name);
if (inp.array) { if (inp.array) {
return `List<${toCSharpType(inp.type)}> ${inp.name}`; return `List<${toCSharpType(inp.type)}> ${name}`;
} else { } else {
return `${toCSharpType(inp.type)} ${inp.name}`; return `${toCSharpType(inp.type)} ${name}`;
} }
}) })
.join(", "); .join(", ");
@ -139,7 +161,7 @@ export class CSharpTarget extends CompileTarget<{ csharp_namespace: string }> {
a( a(
2, 2,
`var param = new JsonArray(${fnc.inputs `var param = new JsonArray(${fnc.inputs
.map((e) => `JsonSerializer.SerializeToNode(${e.name})`) .map((e) => `JsonSerializer.SerializeToNode(${fixKeywordName(e.name)})`)
.join(", ")});` .join(", ")});`
); );
@ -219,10 +241,11 @@ export class CSharpTarget extends CompileTarget<{ csharp_namespace: string }> {
for (const fnc of definition.functions) { for (const fnc of definition.functions) {
let params = [ let params = [
...fnc.inputs.map((inp) => { ...fnc.inputs.map((inp) => {
let name = fixKeywordName(inp.name)
if (inp.array) { if (inp.array) {
return `List<${toCSharpType(inp.type)}> ${inp.name}`; return `List<${toCSharpType(inp.type)}> ${name}`;
} else { } else {
return `${toCSharpType(inp.type)} ${inp.name}`; return `${toCSharpType(inp.type)} ${name}`;
} }
}), }),
"TContext ctx", "TContext ctx",
@ -272,15 +295,15 @@ export class CSharpTarget extends CompileTarget<{ csharp_namespace: string }> {
a( a(
4, 4,
pref + pref +
`this.${fnc.name}(${[ `this.${fnc.name}(${[
...fnc.inputs.map((inp, idx) => { ...fnc.inputs.map((inp, idx) => {
let type = inp.array let type = inp.array
? `List<${toCSharpType(inp.type)}>` ? `List<${toCSharpType(inp.type)}>`
: `${toCSharpType(inp.type)}`; : `${toCSharpType(inp.type)}`;
return `param[${idx}]!.Deserialize<${type}>()`; return `param[${idx}]!.Deserialize<${type}>()`;
}), }),
"context", "context",
].join(", ")});` ].join(", ")});`
); );
if (fnc.return && fnc.return.type != "void") { if (fnc.return && fnc.return.type != "void") {
@ -314,5 +337,5 @@ export class CSharpTarget extends CompileTarget<{ csharp_namespace: string }> {
this.generateServiceServer(definition); this.generateServiceServer(definition);
} }
finalize(steps: Step[]): void {} finalize(steps: Step[]): void { }
} }

View File

@ -1,6 +1,6 @@
import chalk from "chalk"; import chalk from "chalk";
import { CompileTarget } from "../compile"; import { CompileTarget } from "../compile";
import { TypeDefinition, EnumDefinition, ServiceDefinition, Step } from "../ir"; import { TypeDefinition, EnumDefinition, ServiceDefinition, Step, IR } from "../ir";
import { lineAppender, LineAppender } from "../utils"; import { lineAppender, LineAppender } from "../utils";
const conversion = { const conversion = {
@ -23,6 +23,16 @@ function toSnake(input: string) {
); );
} }
// TODO: Add other keywords as well!
const keywords = new Set(["type", "static"]);
const fixKeywordName = (name: string) => {
if (keywords.has(name)) {
return `${name}_`;
}
return name;
}
export class RustTarget extends CompileTarget<{ rust_crate: string }> { export class RustTarget extends CompileTarget<{ rust_crate: string }> {
name: string = "rust"; name: string = "rust";
@ -71,17 +81,36 @@ export class RustTarget extends CompileTarget<{ rust_crate: string }> {
a(0, `#[derive(Clone, Debug, Serialize, Deserialize)]`); a(0, `#[derive(Clone, Debug, Serialize, Deserialize)]`);
a(0, `pub struct ${definition.name} {`); a(0, `pub struct ${definition.name} {`);
for (const field of definition.fields) { for (const field of definition.fields) {
a(1, `#[allow(non_snake_case)]`);
let fn = `pub ${field.name}:`;
if (keywords.has(field.name)) {
// TODO: Add other keywords as well!
console.log(
chalk.yellow("[RUST] WARNING:"),
`Field name '${field.name}' is not allowed in Rust. Renaming to '${field.name}_'`
);
fn = `pub ${fixKeywordName(field.name)}:`;
a(1, `#[serde(rename = "${field.name}")]`);
}
let opts = "";
let opte = "";
if (field.optional) {
opts = "Option<";
opte = ">";
}
if (field.array) { if (field.array) {
a(1, `pub ${field.name}: Vec<${toRustType(field.type)}>,`); a(1, `${fn} ${opts}Vec<${toRustType(field.type)}>${opte},`);
} else if (field.map) { } else if (field.map) {
a( a(
1, 1,
`pub ${field.name}: HashMap<${toRustType( `${fn} ${opts}HashMap<${toRustType(
field.map field.map
)}, ${toRustType(field.type)}>,` )}, ${toRustType(field.type)}>${opte},`
); );
} else { } else {
a(1, `pub ${field.name}: ${toRustType(field.type)},`); a(1, `${fn} ${opts}${toRustType(field.type)}${opte},`);
} }
} }
a(0, `}`); a(0, `}`);
@ -101,7 +130,7 @@ export class RustTarget extends CompileTarget<{ rust_crate: string }> {
a(0, `#[repr(i64)]`); a(0, `#[repr(i64)]`);
a( a(
0, 0,
"#[derive(Clone, Copy, Debug, Eq, PartialEq, IntEnum, Deserialize, Serialize)]" "#[derive(Clone, Copy, Debug, Eq, PartialEq, IntEnum)]"
); );
a(0, `pub enum ${definition.name} {`); a(0, `pub enum ${definition.name} {`);
for (const field of definition.values) { for (const field of definition.values) {
@ -137,17 +166,18 @@ export class RustTarget extends CompileTarget<{ rust_crate: string }> {
a(0, ``); a(0, ``);
for (const fnc of definition.functions) { for (const fnc of definition.functions) {
let params = fnc.inputs let params = fnc.inputs
.map((i) => i.name + ": " + typeToRust(i.type, i.array)) .map((i) => fixKeywordName(i.name) + ": " + typeToRust(i.type, i.array))
.join(", "); .join(", ");
let ret = fnc.return let ret = fnc.return
? typeToRust(fnc.return.type, fnc.return.array) ? typeToRust(fnc.return.type, fnc.return.array)
: "()"; : "()";
a(1, `#[allow(non_snake_case)]`);
a(1, `pub async fn ${fnc.name}(&self, ${params}) -> Result<${ret}> {`); a(1, `pub async fn ${fnc.name}(&self, ${params}) -> Result<${ret}> {`);
a(2, `let l_req = JRPCRequest {`); a(2, `let l_req = JRPCRequest {`);
a(3, `jsonrpc: "2.0".to_owned(),`); a(3, `jsonrpc: "2.0".to_owned(),`);
a(3, `id: None, // 'id' will be set by the send_request function`); a(3, `id: None, // 'id' will be set by the send_request function`);
a(3, `method: "${definition.name}.${fnc.name}".to_owned(),`); a(3, `method: "${definition.name}.${fnc.name}".to_owned(),`);
a(3, `params: json!([${fnc.inputs.map((e) => e.name)}])`); a(3, `params: json!([${fnc.inputs.map((e) => fixKeywordName(e.name))}])`);
a(2, `};`); a(2, `};`);
a(2, ``); a(2, ``);
if (fnc.return) { if (fnc.return) {
@ -200,12 +230,13 @@ export class RustTarget extends CompileTarget<{ rust_crate: string }> {
let params = let params =
fnc.inputs.length > 0 fnc.inputs.length > 0
? fnc.inputs ? fnc.inputs
.map((i) => i.name + ": " + typeToRust(i.type, i.array)) .map((i) => fixKeywordName(i.name) + ": " + typeToRust(i.type, i.array))
.join(", ") .join(", ")
: ""; : "";
let ret = fnc.return let ret = fnc.return
? typeToRust(fnc.return.type, fnc.return.array) ? typeToRust(fnc.return.type, fnc.return.array)
: "()"; : "()";
a(1, `#[allow(non_snake_case)]`);
a(1, `async fn ${fnc.name}(&self, ${params}) -> Result<${ret}>;`); a(1, `async fn ${fnc.name}(&self, ${params}) -> Result<${ret}>;`);
} }
a(0, `}`); a(0, `}`);
@ -216,6 +247,7 @@ export class RustTarget extends CompileTarget<{ rust_crate: string }> {
a(0, `}`); a(0, `}`);
a(0, ``); a(0, ``);
a(0, `impl ${definition.name}Handler {`); a(0, `impl ${definition.name}Handler {`);
//TODO: Maybe add a new definition like, pub fn new2<T>(implementation: T) where T: ${definition.name} + Sync + Send + 'static {}
a( a(
1, 1,
`pub fn new(implementation: Box<dyn ${definition.name} + Sync + Send + 'static>) -> Arc<Self> {` `pub fn new(implementation: Box<dyn ${definition.name} + Sync + Send + 'static>) -> Arc<Self> {`
@ -235,6 +267,7 @@ export class RustTarget extends CompileTarget<{ rust_crate: string }> {
a(1, `fn get_id(&self) -> String { "${definition.name}".to_owned() }`); a(1, `fn get_id(&self) -> String { "${definition.name}".to_owned() }`);
a(0, ``); a(0, ``);
a(1, `#[allow(non_snake_case)]`);
a( a(
1, 1,
`async fn handle(&self, msg: &JRPCRequest, function: &str) -> Result<(bool, Value)> {` `async fn handle(&self, msg: &JRPCRequest, function: &str) -> Result<(bool, Value)> {`

View File

@ -33,9 +33,8 @@ export class TypescriptTarget extends CompileTarget {
} }
private generateImport(imports: string, path: string) { private generateImport(imports: string, path: string) {
return `import ${imports} from "${ return `import ${imports} from "${path + (this.flavour === "esm" ? ".js" : "")
path + (this.flavour === "esm" ? ".ts" : "") }";\n`;
}";\n`;
} }
private generateImports( private generateImports(
@ -89,7 +88,7 @@ export class TypescriptTarget extends CompileTarget {
} else { } else {
type = toJSType(field.type); type = toJSType(field.type);
} }
return `${field.name}?: ${type}; `; return `${field.name}${field.optional ? "?" : ""}: ${type}; `;
}) })
); );
@ -125,10 +124,18 @@ export class TypescriptTarget extends CompileTarget {
); );
a(1, `let res = new ${def.name}() as any;`); a(1, `let res = new ${def.name}() as any;`);
def.fields.forEach((field) => { def.fields.forEach((field) => {
a( if (field.optional) {
1, a(
`if(data["${field.name}"] !== null && data["${field.name}"] !== undefined) {` 1,
); `if(data["${field.name}"] !== null && data["${field.name}"] !== undefined) {`
);
} else {
a(
1,
`if(data["${field.name}"] === null || data["${field.name}"] === undefined) throw new VerificationError("${def.name}", "${field.name}", data["${field.name}"]);`
);
a(1, `else {`);
}
if (field.array) { if (field.array) {
a( a(
2, 2,
@ -200,12 +207,12 @@ export class TypescriptTarget extends CompileTarget {
this.writeFormattedFile( this.writeFormattedFile(
"service_client.ts", "service_client.ts",
this.generateImport( this.generateImport(
"{ RequestObject, ResponseObject, ErrorCodes, Logging }", "{ type RequestObject, type ResponseObject, ErrorCodes, Logging }",
"./service_base" "./service_base"
) + ) +
this.generateImport(" { VerificationError }", "./ts_base") + this.generateImport(" { VerificationError }", "./ts_base") +
"\n\n" + "\n\n" +
this.getTemplate("ts_service_client.ts") this.getTemplate("ts_service_client.ts")
); );
const { a, getResult } = LineAppender(); const { a, getResult } = LineAppender();
@ -297,12 +304,12 @@ export class TypescriptTarget extends CompileTarget {
this.writeFormattedFile( this.writeFormattedFile(
"service_server.ts", "service_server.ts",
this.generateImport( this.generateImport(
"{ RequestObject, ResponseObject, ErrorCodes, Logging }", "{ type RequestObject, type ResponseObject, ErrorCodes, Logging }",
"./service_base" "./service_base"
) + ) +
this.generateImport(" { VerificationError }", "./ts_base") + this.generateImport(" { VerificationError }", "./ts_base") +
"\n\n" + "\n\n" +
this.getTemplate("ts_service_server.ts") this.getTemplate("ts_service_server.ts")
); );
this.generateImports(a, def); this.generateImports(a, def);
@ -335,9 +342,8 @@ export class TypescriptTarget extends CompileTarget {
`ctx: T`, `ctx: T`,
].join(", "); ].join(", ");
const retVal = fnc.return const retVal = fnc.return
? `Promise<${ ? `Promise<${toJSType(fnc.return.type) + (fnc.return.array ? "[]" : "")
toJSType(fnc.return.type) + (fnc.return.array ? "[]" : "") }>`
}>`
: `void`; : `void`;
a(1, `abstract ${fnc.name}(${params}): ${retVal};`); a(1, `abstract ${fnc.name}(${params}): ${retVal};`);
@ -380,13 +386,12 @@ export class TypescriptTarget extends CompileTarget {
a( a(
2, 2,
`return this.${fnc.name}.call(this, ...p)` + //TODO: Refactor. This line is way to compicated for anyone to understand, including me `return this.${fnc.name}.call(this, ...p)` + //TODO: Refactor. This line is way to compicated for anyone to understand, including me
(fnc.return (fnc.return
? `.then(${ ? `.then(${fnc.return?.array
fnc.return?.array ? `res => res.map(e => apply_${fnc.return.type}(e))`
? `res => res.map(e => apply_${fnc.return.type}(e))` : `res => apply_${fnc.return.type}(res)`
: `res => apply_${fnc.return.type}(res)` });`
});` : "")
: "")
); );
a(1, `}`); a(1, `}`);
a(0, ``); a(0, ``);

View File

@ -6,11 +6,10 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
int-enum = "0.5.0" int-enum = { version ="0.5.0", features = ["serde", "convert"] }
serde = { version = "1.0.147", features = ["derive"] } serde = { version = "1.0.147", features = ["derive"] }
serde_json = "1.0.88" serde_json = "1.0.88"
nanoid = "0.4.0" nanoid = "0.4.0"
tokio = { version = "1.22.0", features = ["full"] } tokio = { version = "1.22.0", features = ["full"] }
log = "0.4.17" log = "0.4.17"
simple_logger = { version = "4.0.0", features = ["threads", "colored", "timestamps", "stderr"] }
async-trait = "0.1.59" async-trait = "0.1.59"

View File

@ -40,7 +40,7 @@ pub struct JRPCError {
pub struct JRPCResult { pub struct JRPCResult {
pub jsonrpc: String, pub jsonrpc: String,
pub id: String, pub id: String,
pub result: Value, pub result: Option<Value>,
pub error: Option<JRPCError>, pub error: Option<JRPCError>,
} }
@ -76,8 +76,11 @@ impl JRPCClient {
if let Some(result) = result { if let Some(result) = result {
if let Some(error) = result.error { if let Some(error) = result.error {
return Err(format!("Error while receiving result: {}", error.message).into()); return Err(format!("Error while receiving result: {}", error.message).into());
} else if let Some(result) = result.result {
return Ok(result);
} else { } else {
return Ok(result.result); return Ok(Value::Null);
// return Err(format!("No result received").into());
} }
} else { } else {
return Err("Error while receiving result".into()); return Err("Error while receiving result".into());
@ -133,7 +136,7 @@ impl JRPCSession {
let result = JRPCResult { let result = JRPCResult {
jsonrpc: "2.0".to_string(), jsonrpc: "2.0".to_string(),
id: request_id, id: request_id,
result: Value::Null, result: None,
error: Some(error), error: Some(error),
}; };
@ -169,7 +172,7 @@ impl JRPCSession {
.send(JRPCResult { .send(JRPCResult {
jsonrpc: "2.0".to_string(), jsonrpc: "2.0".to_string(),
id: request.id.unwrap(), id: request.id.unwrap(),
result, result: Some(result),
error: None, error: None,
}) })
.await; .await;

View File

@ -1,14 +1,22 @@
function form_verficiation_error_message(type?: string, field?: string) {
let msg = "Parameter verification failed! ";
if (type && field) {
msg += `At ${type}.${field}! `;
} else if (type) {
msg += `At type ${type}! `;
} else if (field) {
msg += `At field ${field}! `;
}
return msg;
}
export class VerificationError extends Error { export class VerificationError extends Error {
constructor( constructor(
public readonly type?: string, public readonly type?: string,
public readonly field?: string, public readonly field?: string,
public readonly value?: any public readonly value?: any
) { ) {
super( super(form_verficiation_error_message(type, field));
"Parameter verification failed! " +
(type ? "Expected " + type + "! " : "") +
(field ? "At: " + field + "! " : "")
);
} }
} }
@ -33,4 +41,4 @@ export function apply_boolean(data: any) {
return Boolean(data); return Boolean(data);
} }
export function apply_void(data: any) {} export function apply_void(data: any) { }

View File

@ -1,7 +1,7 @@
//@template-ignore //@template-ignore
import { VerificationError } from "./ts_base"; import { VerificationError } from "./ts_base";
//@template-ignore //@template-ignore
import { RequestObject, ResponseObject, ErrorCodes, Logging } from "./ts_service_base"; import { type RequestObject, type ResponseObject, ErrorCodes, Logging } from "./ts_service_base";
export type IMessageCallback = (data: any) => void; export type IMessageCallback = (data: any) => void;

View File

@ -1,14 +1,14 @@
//@template-ignore //@template-ignore
import { VerificationError } from "./ts_base"; import { VerificationError } from "./ts_base";
//@template-ignore //@template-ignore
import { RequestObject, ResponseObject, ErrorCodes, Logging } from "./ts_service_base"; import { type RequestObject, type ResponseObject, ErrorCodes, Logging } from "./ts_service_base";
export class Service<T> { export class Service<T> {
public name: string = null as any; public name: string = null as any;
public functions = new Set<string>(); public functions = new Set<string>();
constructor() {} constructor() { }
} }
type ISendMessageCB = (data: any, catchedErr?: Error) => void; type ISendMessageCB = (data: any, catchedErr?: Error) => void;
@ -37,7 +37,7 @@ class Session<T> {
this.ctx = ctx || {}; this.ctx = ctx || {};
} }
send(data: any, catchedErr?:Error) { send(data: any, catchedErr?: Error) {
Logging.log("SERVER: Sending Message", data) Logging.log("SERVER: Sending Message", data)
this._send(data, catchedErr); this._send(data, catchedErr);
} }
@ -95,7 +95,7 @@ class Session<T> {
} }
let result = await (service as any)["_" + fncName](data.params, this.ctx); let result = await (service as any)["_" + fncName](data.params, this.ctx);
if(data.id) { //Request if (data.id) { //Request
this.send({ this.send({
jsonrpc: "2.0", jsonrpc: "2.0",
id: data.id, id: data.id,