Support enums
This commit is contained in:
parent
6e947bde57
commit
b3b202c9f9
11
README.md
11
README.md
@ -6,6 +6,9 @@ Type/Service definition language and code generator for json-rpc 2.0. Currently
|
|||||||
| ------- | --------------------------------- |
|
| ------- | --------------------------------- |
|
||||||
| ts-node | Typescript for NodeJS |
|
| ts-node | Typescript for NodeJS |
|
||||||
| ts-esm | Typescript in ESM format for Deno |
|
| ts-esm | Typescript in ESM format for Deno |
|
||||||
|
| rust | Rust |
|
||||||
|
| dart | Dart |
|
||||||
|
| c# | C# |
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
@ -23,7 +26,7 @@ enum TestEnum {
|
|||||||
type Test {
|
type Test {
|
||||||
testen: TestEnum;
|
testen: TestEnum;
|
||||||
someString: string;
|
someString: string;
|
||||||
someNumber: number;
|
someNumber?: number;
|
||||||
array: string[];
|
array: string[];
|
||||||
map: {number, TestEnum};
|
map: {number, TestEnum};
|
||||||
}
|
}
|
||||||
@ -44,4 +47,8 @@ Then run the generator like this `jrpc compile test.jrpc -o=ts-node:output/`.
|
|||||||
|
|
||||||
This will generate the Client and Server code in the specified folder.
|
This will generate the Client and Server code in the specified folder.
|
||||||
|
|
||||||
//TODO: Make Documentation better
|
## TODOS
|
||||||
|
|
||||||
|
1. Documentation
|
||||||
|
2. Null Checks/Enforcements in all languages
|
||||||
|
3. More and better tests
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
type TestAtom {
|
type TestAtom {
|
||||||
val_number: float;
|
val_number?: float;
|
||||||
val_boolean: boolean;
|
val_boolean?: boolean;
|
||||||
val_string: string;
|
val_string?: string;
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@hibas123/jrpcgen",
|
"name": "@hibas123/jrpcgen",
|
||||||
"version": "1.2.3",
|
"version": "1.2.5",
|
||||||
"main": "lib/index.js",
|
"main": "lib/index.js",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"packageManager": "yarn@3.1.1",
|
"packageManager": "yarn@3.1.1",
|
||||||
|
@ -14,6 +14,7 @@ export interface TypeFieldDefinition {
|
|||||||
name: string;
|
name: string;
|
||||||
type: string;
|
type: string;
|
||||||
array: boolean;
|
array: boolean;
|
||||||
|
optional: boolean;
|
||||||
map?: string;
|
map?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -127,6 +128,7 @@ export default function get_ir(parsed: Parsed): IR {
|
|||||||
type: field.fieldtype,
|
type: field.fieldtype,
|
||||||
array: field.array,
|
array: field.array,
|
||||||
map: field.map,
|
map: field.map,
|
||||||
|
optional: field.optional,
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
steps.push([
|
steps.push([
|
||||||
|
@ -16,6 +16,7 @@ export interface ImportStatement extends DefinitionNode {
|
|||||||
export interface TypeFieldStatement extends DefinitionNode {
|
export interface TypeFieldStatement extends DefinitionNode {
|
||||||
type: "type_field";
|
type: "type_field";
|
||||||
name: string;
|
name: string;
|
||||||
|
optional: boolean;
|
||||||
fieldtype: string;
|
fieldtype: string;
|
||||||
array: boolean;
|
array: boolean;
|
||||||
map?: string;
|
map?: string;
|
||||||
@ -134,13 +135,12 @@ export default function parse(tokens: Token[], file: string): Parsed {
|
|||||||
return val;
|
return val;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
const checkTypes = (...types: string[]) => {
|
const checkTypes = (...types: string[]) => {
|
||||||
if (types.indexOf(currentToken.type) < 0) {
|
if (types.indexOf(currentToken.type) < 0) {
|
||||||
throw new ParserError(
|
throw new ParserError(
|
||||||
`Unexpected token value, expected ${types.join(" | ")}, received '${
|
`Unexpected token value, expected ${types.join(" | ")}, received '${currentToken.value
|
||||||
currentToken.value
|
|
||||||
}'`,
|
}'`,
|
||||||
currentToken
|
currentToken
|
||||||
);
|
);
|
||||||
@ -170,6 +170,11 @@ export default function parse(tokens: Token[], file: string): Parsed {
|
|||||||
const idx = currentToken.startIdx;
|
const idx = currentToken.startIdx;
|
||||||
let name = currentToken.value;
|
let name = currentToken.value;
|
||||||
eatToken();
|
eatToken();
|
||||||
|
let optional = false;
|
||||||
|
if (currentToken.type === "questionmark") {
|
||||||
|
eatToken("?");
|
||||||
|
optional = true;
|
||||||
|
}
|
||||||
eatToken(":");
|
eatToken(":");
|
||||||
|
|
||||||
let array = false;
|
let array = false;
|
||||||
@ -198,6 +203,7 @@ export default function parse(tokens: Token[], file: string): Parsed {
|
|||||||
array,
|
array,
|
||||||
map: mapKey,
|
map: mapKey,
|
||||||
location: { file, idx },
|
location: { file, idx },
|
||||||
|
optional
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -303,9 +309,9 @@ export default function parse(tokens: Token[], file: string): Parsed {
|
|||||||
eatToken("(");
|
eatToken("(");
|
||||||
let args: string[] = [];
|
let args: string[] = [];
|
||||||
let first = true;
|
let first = true;
|
||||||
while(currentToken.value !== ")") {
|
while (currentToken.value !== ")") {
|
||||||
if(first) {
|
if (first) {
|
||||||
first= false;
|
first = false;
|
||||||
} else {
|
} else {
|
||||||
eatToken(",");
|
eatToken(",");
|
||||||
}
|
}
|
||||||
@ -399,8 +405,8 @@ export default function parse(tokens: Token[], file: string): Parsed {
|
|||||||
let functions: ServiceFunctionStatement[] = [];
|
let functions: ServiceFunctionStatement[] = [];
|
||||||
|
|
||||||
while (currentToken.type !== "curly_close") {
|
while (currentToken.type !== "curly_close") {
|
||||||
let decorators:Decorators = new Map;
|
let decorators: Decorators = new Map;
|
||||||
while(currentToken.type == "at") {
|
while (currentToken.type == "at") {
|
||||||
parseFunctionDecorator(decorators);
|
parseFunctionDecorator(decorators);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -427,8 +433,8 @@ export default function parse(tokens: Token[], file: string): Parsed {
|
|||||||
|
|
||||||
let [key] = eatText()
|
let [key] = eatText()
|
||||||
let value: string = undefined;
|
let value: string = undefined;
|
||||||
if(currentToken.type == "string") {
|
if (currentToken.type == "string") {
|
||||||
value = currentToken.value.slice(1,-1);
|
value = currentToken.value.slice(1, -1);
|
||||||
eatToken();
|
eatToken();
|
||||||
} else {
|
} else {
|
||||||
[value] = eatText()
|
[value] = eatText()
|
||||||
@ -437,8 +443,8 @@ export default function parse(tokens: Token[], file: string): Parsed {
|
|||||||
eatToken(";");
|
eatToken(";");
|
||||||
|
|
||||||
return {
|
return {
|
||||||
type :"define",
|
type: "define",
|
||||||
location: {file, idx},
|
location: { file, idx },
|
||||||
key,
|
key,
|
||||||
value
|
value
|
||||||
}
|
}
|
||||||
|
@ -83,17 +83,24 @@ export class RustTarget extends CompileTarget<{ rust_crate: string }> {
|
|||||||
fn = `pub type_:`;
|
fn = `pub type_:`;
|
||||||
a(1, `#[serde(rename = "type")]`);
|
a(1, `#[serde(rename = "type")]`);
|
||||||
}
|
}
|
||||||
|
let opts = "";
|
||||||
|
let opte = "";
|
||||||
|
if (field.optional) {
|
||||||
|
opts = "Option<";
|
||||||
|
opte = ">";
|
||||||
|
}
|
||||||
|
|
||||||
if (field.array) {
|
if (field.array) {
|
||||||
a(1, `${fn} Vec<${toRustType(field.type)}>,`);
|
a(1, `${fn} ${opts}Vec<${toRustType(field.type)}>${opte},`);
|
||||||
} else if (field.map) {
|
} else if (field.map) {
|
||||||
a(
|
a(
|
||||||
1,
|
1,
|
||||||
`${fn} HashMap<${toRustType(
|
`${fn} ${opts}HashMap<${toRustType(
|
||||||
field.map
|
field.map
|
||||||
)}, ${toRustType(field.type)}>,`
|
)}, ${toRustType(field.type)}>${opte},`
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
a(1, `${fn} ${toRustType(field.type)},`);
|
a(1, `${fn} ${opts}${toRustType(field.type)}${opte},`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
a(0, `}`);
|
a(0, `}`);
|
||||||
@ -113,7 +120,7 @@ export class RustTarget extends CompileTarget<{ rust_crate: string }> {
|
|||||||
a(0, `#[repr(i64)]`);
|
a(0, `#[repr(i64)]`);
|
||||||
a(
|
a(
|
||||||
0,
|
0,
|
||||||
"#[derive(Clone, Copy, Debug, Eq, PartialEq, IntEnum, Deserialize, Serialize)]"
|
"#[derive(Clone, Copy, Debug, Eq, PartialEq, IntEnum)]"
|
||||||
);
|
);
|
||||||
a(0, `pub enum ${definition.name} {`);
|
a(0, `pub enum ${definition.name} {`);
|
||||||
for (const field of definition.values) {
|
for (const field of definition.values) {
|
||||||
|
@ -33,9 +33,8 @@ export class TypescriptTarget extends CompileTarget {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private generateImport(imports: string, path: string) {
|
private generateImport(imports: string, path: string) {
|
||||||
return `import ${imports} from "${
|
return `import ${imports} from "${path + (this.flavour === "esm" ? ".ts" : "")
|
||||||
path + (this.flavour === "esm" ? ".ts" : "")
|
}";\n`;
|
||||||
}";\n`;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private generateImports(
|
private generateImports(
|
||||||
@ -89,7 +88,7 @@ export class TypescriptTarget extends CompileTarget {
|
|||||||
} else {
|
} else {
|
||||||
type = toJSType(field.type);
|
type = toJSType(field.type);
|
||||||
}
|
}
|
||||||
return `${field.name}?: ${type}; `;
|
return `${field.name}${field.optional ? "?" : ""}: ${type}; `;
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -125,10 +124,18 @@ export class TypescriptTarget extends CompileTarget {
|
|||||||
);
|
);
|
||||||
a(1, `let res = new ${def.name}() as any;`);
|
a(1, `let res = new ${def.name}() as any;`);
|
||||||
def.fields.forEach((field) => {
|
def.fields.forEach((field) => {
|
||||||
a(
|
if (field.optional) {
|
||||||
1,
|
a(
|
||||||
`if(data["${field.name}"] !== null && data["${field.name}"] !== undefined) {`
|
1,
|
||||||
);
|
`if(data["${field.name}"] !== null && data["${field.name}"] !== undefined) {`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
a(
|
||||||
|
1,
|
||||||
|
`if(data["${field.name}"] === null || data["${field.name}"] === undefined) throw new VerificationError("${def.name}", "${field.name}", data["${field.name}"]);`
|
||||||
|
);
|
||||||
|
a(1, `else {`);
|
||||||
|
}
|
||||||
if (field.array) {
|
if (field.array) {
|
||||||
a(
|
a(
|
||||||
2,
|
2,
|
||||||
@ -203,9 +210,9 @@ export class TypescriptTarget extends CompileTarget {
|
|||||||
"{ RequestObject, ResponseObject, ErrorCodes, Logging }",
|
"{ RequestObject, ResponseObject, ErrorCodes, Logging }",
|
||||||
"./service_base"
|
"./service_base"
|
||||||
) +
|
) +
|
||||||
this.generateImport(" { VerificationError }", "./ts_base") +
|
this.generateImport(" { VerificationError }", "./ts_base") +
|
||||||
"\n\n" +
|
"\n\n" +
|
||||||
this.getTemplate("ts_service_client.ts")
|
this.getTemplate("ts_service_client.ts")
|
||||||
);
|
);
|
||||||
|
|
||||||
const { a, getResult } = LineAppender();
|
const { a, getResult } = LineAppender();
|
||||||
@ -300,9 +307,9 @@ export class TypescriptTarget extends CompileTarget {
|
|||||||
"{ RequestObject, ResponseObject, ErrorCodes, Logging }",
|
"{ RequestObject, ResponseObject, ErrorCodes, Logging }",
|
||||||
"./service_base"
|
"./service_base"
|
||||||
) +
|
) +
|
||||||
this.generateImport(" { VerificationError }", "./ts_base") +
|
this.generateImport(" { VerificationError }", "./ts_base") +
|
||||||
"\n\n" +
|
"\n\n" +
|
||||||
this.getTemplate("ts_service_server.ts")
|
this.getTemplate("ts_service_server.ts")
|
||||||
);
|
);
|
||||||
|
|
||||||
this.generateImports(a, def);
|
this.generateImports(a, def);
|
||||||
@ -335,9 +342,8 @@ export class TypescriptTarget extends CompileTarget {
|
|||||||
`ctx: T`,
|
`ctx: T`,
|
||||||
].join(", ");
|
].join(", ");
|
||||||
const retVal = fnc.return
|
const retVal = fnc.return
|
||||||
? `Promise<${
|
? `Promise<${toJSType(fnc.return.type) + (fnc.return.array ? "[]" : "")
|
||||||
toJSType(fnc.return.type) + (fnc.return.array ? "[]" : "")
|
}>`
|
||||||
}>`
|
|
||||||
: `void`;
|
: `void`;
|
||||||
a(1, `abstract ${fnc.name}(${params}): ${retVal};`);
|
a(1, `abstract ${fnc.name}(${params}): ${retVal};`);
|
||||||
|
|
||||||
@ -380,13 +386,12 @@ export class TypescriptTarget extends CompileTarget {
|
|||||||
a(
|
a(
|
||||||
2,
|
2,
|
||||||
`return this.${fnc.name}.call(this, ...p)` + //TODO: Refactor. This line is way to compicated for anyone to understand, including me
|
`return this.${fnc.name}.call(this, ...p)` + //TODO: Refactor. This line is way to compicated for anyone to understand, including me
|
||||||
(fnc.return
|
(fnc.return
|
||||||
? `.then(${
|
? `.then(${fnc.return?.array
|
||||||
fnc.return?.array
|
? `res => res.map(e => apply_${fnc.return.type}(e))`
|
||||||
? `res => res.map(e => apply_${fnc.return.type}(e))`
|
: `res => apply_${fnc.return.type}(res)`
|
||||||
: `res => apply_${fnc.return.type}(res)`
|
});`
|
||||||
});`
|
: "")
|
||||||
: "")
|
|
||||||
);
|
);
|
||||||
a(1, `}`);
|
a(1, `}`);
|
||||||
a(0, ``);
|
a(0, ``);
|
||||||
|
@ -6,7 +6,7 @@ edition = "2021"
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
int-enum = "0.5.0"
|
int-enum = { version ="0.5.0", features = ["serde", "convert"] }
|
||||||
serde = { version = "1.0.147", features = ["derive"] }
|
serde = { version = "1.0.147", features = ["derive"] }
|
||||||
serde_json = "1.0.88"
|
serde_json = "1.0.88"
|
||||||
nanoid = "0.4.0"
|
nanoid = "0.4.0"
|
||||||
|
Loading…
Reference in New Issue
Block a user