Restructure and start working on CLI

This commit is contained in:
Fabian Stamm
2025-05-26 16:43:40 +02:00
parent 883b6da7eb
commit b61518de00
38 changed files with 134 additions and 8 deletions

111
libjrpc/src/compile.rs Normal file
View File

@ -0,0 +1,111 @@
use std::{collections::HashMap, path::PathBuf};
use anyhow::{Context, Result};
use crate::{
ir::{EnumDefinition, ServiceDefinition, TypeDefinition},
IR,
};
pub trait Compile {
fn new(options: &HashMap<String, String>) -> Result<Self>
where
Self: Sized;
fn name(&self) -> String;
fn start(&mut self, ctx: &mut CompileContext) -> Result<()>;
fn generate_type(
&mut self,
ctx: &mut CompileContext,
definition: &TypeDefinition,
) -> Result<()>;
fn generate_enum(
&mut self,
ctx: &mut CompileContext,
definition: &EnumDefinition,
) -> Result<()>;
fn generate_service(
&mut self,
ctx: &mut CompileContext,
definition: &ServiceDefinition,
) -> Result<()>;
fn finalize(&mut self, ctx: &mut CompileContext, ir: &IR) -> Result<()>;
}
pub struct CompileContext {
output_folder: PathBuf,
}
impl CompileContext {
pub fn new(output_folder: &str) -> Self {
CompileContext {
output_folder: output_folder.into(),
}
}
pub fn write_file(&self, filename: &str, content: String) -> Result<()> {
let res_path = self.output_folder.clone().join(filename);
let res_dir = res_path.parent().context("Path has no parent!")?;
std::fs::create_dir_all(res_dir)?;
std::fs::write(res_path, content)?;
Ok(())
}
}
pub struct FileGenerator {
content: Vec<String>,
}
impl FileGenerator {
pub fn new() -> Self {
FileGenerator {
content: Vec::new(),
}
}
pub fn a<T: ToString>(&mut self, indent: usize, content: T) {
let line = " ".repeat(indent) + &content.to_string();
self.content.push(line);
}
pub fn a0<T: ToString>(&mut self, content: T) {
self.a(0, content);
}
pub fn a1<T: ToString>(&mut self, content: T) {
self.a(1, content);
}
pub fn a2<T: ToString>(&mut self, content: T) {
self.a(2, content);
}
pub fn a3<T: ToString>(&mut self, content: T) {
self.a(3, content);
}
pub fn a4<T: ToString>(&mut self, content: T) {
self.a(4, content);
}
pub fn a5<T: ToString>(&mut self, content: T) {
self.a(5, content);
}
pub fn a6<T: ToString>(&mut self, content: T) {
self.a(6, content);
}
pub fn add_line(&mut self, line: &str) {
self.content.push(line.to_string());
}
pub fn get_content(&self) -> String {
self.content.join("\n")
}
pub fn into_content(self) -> String {
self.get_content()
}
}

490
libjrpc/src/ir.rs Normal file
View File

@ -0,0 +1,490 @@
use std::{
collections::{HashMap, HashSet},
error::Error,
fmt::Display,
hash::{Hash, Hasher},
};
use anyhow::Result;
use crate::parser::{
EnumStatement, Node, ParserPosition, RootNode, ServiceStatement, TypeStatement,
};
static BUILT_INS: [&str; 6] = ["int", "float", "string", "boolean", "bytes", "void"];
pub trait Definition {
fn get_position(&self) -> ParserPosition;
}
#[derive(Debug, Clone)]
pub struct IR {
pub options: HashMap<String, String>,
pub steps: Vec<Step>,
}
#[derive(Debug, Clone)]
pub enum Step {
Type(TypeDefinition),
Enum(EnumDefinition),
Service(ServiceDefinition),
}
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub enum Type {
Int,
Float,
String,
Bool,
Bytes,
Void,
Custom(String),
}
impl ToString for Type {
fn to_string(&self) -> String {
match self {
Type::Int => "int".to_string(),
Type::Float => "float".to_string(),
Type::String => "string".to_string(),
Type::Bool => "bool".to_string(),
Type::Bytes => "bytes".to_string(),
Type::Void => "void".to_string(),
Type::Custom(name) => name.clone(),
}
}
}
impl Hash for Type {
fn hash<H: Hasher>(&self, state: &mut H) {
self.to_string().hash(state);
}
}
impl From<&String> for Type {
fn from(value: &String) -> Self {
Self::from(value.as_str())
}
}
impl From<String> for Type {
fn from(value: String) -> Self {
Self::from(value.as_str())
}
}
impl From<&str> for Type {
fn from(s: &str) -> Self {
match s {
"int" => Type::Int,
"float" => Type::Float,
"string" => Type::String,
"bool" => Type::Bool,
"boolean" => Type::Bool,
"bytes" => Type::Bytes,
"void" => Type::Void,
_ => Type::Custom(s.to_string()),
}
}
}
#[derive(Debug, Clone)]
pub struct TypeDefinition {
pub name: String,
pub depends: HashSet<Type>,
pub fields: Vec<Field>,
pub position: ParserPosition,
}
impl Definition for TypeDefinition {
fn get_position(&self) -> ParserPosition {
self.position.clone()
}
}
#[derive(Debug, Clone)]
pub struct Field {
pub name: String,
pub typ: Type,
pub array: bool,
pub optional: bool,
pub map: Option<Type>,
}
#[derive(Debug, Clone)]
pub struct EnumDefinition {
pub name: String,
pub values: Vec<EnumField>,
pub position: ParserPosition,
}
impl Definition for EnumDefinition {
fn get_position(&self) -> ParserPosition {
self.position.clone()
}
}
#[derive(Debug, Clone)]
pub struct EnumField {
pub name: String,
pub value: i32,
}
#[derive(Debug, Clone)]
pub struct ServiceDefinition {
pub name: String,
pub depends: HashSet<Type>,
pub methods: Vec<Method>,
pub position: ParserPosition,
}
impl Definition for ServiceDefinition {
fn get_position(&self) -> ParserPosition {
self.position.clone()
}
}
#[derive(Debug, Clone)]
pub struct Method {
pub name: String,
pub inputs: Vec<MethodInput>,
pub output: Option<MethodOutput>,
pub decorators: MethodDecorators,
}
#[derive(Debug, Clone)]
pub struct MethodInput {
pub name: String,
pub typ: Type,
pub array: bool,
pub optional: bool,
}
#[derive(Debug, Clone)]
pub struct MethodOutput {
pub typ: Type,
pub array: bool,
}
#[derive(Debug, Clone)]
pub struct MethodDecorators {
pub description: Option<String>,
pub parameter_descriptions: HashMap<String, String>,
pub return_description: Option<String>,
}
fn build_type(stmt: &TypeStatement) -> Result<TypeDefinition> {
let mut typedef = TypeDefinition {
position: stmt.position.clone(),
name: stmt.name.clone(),
depends: HashSet::new(),
fields: Vec::new(),
};
for field in &stmt.fields {
let typ = Type::from(&field.fieldtype);
typedef.depends.insert(typ.clone());
if let Some(maptype) = &field.map {
if maptype != "string" && maptype != "int" {
return Err(IRError::new("Map type must be string or int", field).into());
}
}
typedef.fields.push(Field {
name: field.name.clone(),
typ: typ.clone(),
array: field.array,
optional: field.optional,
map: field.map.as_ref().map(|s| Type::from(s)),
});
}
Ok(typedef)
}
fn build_enum(stmt: &EnumStatement) -> Result<EnumDefinition> {
let mut enumdef = EnumDefinition {
position: stmt.position.clone(),
name: stmt.name.clone(),
values: Vec::new(),
};
let mut last = -1 as i32;
for field in &stmt.values {
let value = if let Some(value) = field.value {
if value > std::i32::MAX as i64 {
return Err(IRError::new("Enum value too large", field).into());
}
let value = value as i32;
if value <= last {
return Err(IRError::new("Enum values must be increasing", field).into());
}
last = value;
value
} else {
last = last + 1;
last
};
enumdef.values.push(EnumField {
name: field.name.clone(),
value,
});
}
Ok(enumdef)
}
fn build_service(stmt: &ServiceStatement) -> Result<ServiceDefinition> {
let mut servdef = ServiceDefinition {
position: stmt.position.clone(),
name: stmt.name.clone(),
depends: HashSet::new(),
methods: Vec::new(),
};
for method in &stmt.methods {
let mut methoddef = Method {
name: method.name.clone(),
inputs: Vec::new(),
output: method.return_type.as_ref().map(|rt| {
let typ = Type::from(&rt.fieldtype);
if typ != Type::Void {
servdef.depends.insert(typ.clone());
}
MethodOutput {
typ,
array: rt.array,
}
}),
decorators: MethodDecorators {
description: None,
parameter_descriptions: HashMap::new(),
return_description: None,
},
};
let mut optional_starts = false;
for inp in &method.inputs {
let typ = Type::from(&inp.fieldtype);
servdef.depends.insert(typ.clone());
if optional_starts && !inp.optional {
return Err(
IRError::new("Optional fields must come after required fields", inp).into(),
);
}
if inp.optional {
optional_starts = true;
}
methoddef.inputs.push(MethodInput {
name: inp.name.clone(),
typ,
array: inp.array,
optional: inp.optional,
});
}
for decorator in &method.decorators {
match decorator.name.as_str() {
"Description" => {
if methoddef.decorators.description.is_some() {
return Err(IRError::new("Duplicate description", decorator).into());
}
if decorator.args.len() != 1 {
return Err(IRError::new(
"Description must have exactly one argument",
decorator,
)
.into());
}
methoddef.decorators.description = Some(decorator.args[0].clone());
}
"Returns" => {
if methoddef.decorators.return_description.is_some() {
return Err(IRError::new("Duplicate return description", decorator).into());
}
if decorator.args.len() != 1 {
return Err(IRError::new(
"Returns must have exactly one argument",
decorator,
)
.into());
}
methoddef.decorators.return_description = Some(decorator.args[0].clone());
}
"Param" => {
if decorator.args.len() != 2 {
return Err(IRError::new(
"Param must have exactly two arguments",
decorator,
)
.into());
}
let name = decorator.args[0].clone();
let description = decorator.args[1].clone();
if methoddef
.decorators
.parameter_descriptions
.contains_key(&name)
{
return Err(
IRError::new("Duplicate parameter description", decorator).into()
);
}
if methoddef.inputs.iter().find(|i| i.name == name).is_none() {
return Err(IRError::new("Parameter not found", decorator).into());
}
methoddef
.decorators
.parameter_descriptions
.insert(name, description);
}
_ => {
return Err(IRError::new("Unknown decorator", decorator).into());
}
}
}
servdef.methods.push(methoddef);
}
Ok(servdef)
}
pub fn build_ir(root: &Vec<RootNode>) -> Result<IR> {
let mut options = HashMap::<String, String>::new();
let mut steps = Vec::new();
for node in root {
match node {
RootNode::Type(stmt) => steps.push(Step::Type(build_type(stmt)?)),
RootNode::Enum(stmt) => steps.push(Step::Enum(build_enum(stmt)?)),
RootNode::Service(stmt) => steps.push(Step::Service(build_service(stmt)?)),
RootNode::Define(stmt) => {
if options.contains_key(&stmt.key) {
return Err(IRError::new("Duplicate define", stmt).into());
}
if (stmt.key == "use_messagepack" || stmt.key == "allow_bytes")
&& stmt.value == "true"
{
options.insert("allow_bytes".to_owned(), "true".to_owned());
}
options.insert(stmt.key.clone(), stmt.value.clone());
}
RootNode::Import(_) => {
panic!("Import not supported at this stage!");
}
}
}
let mut all_types = HashSet::<String>::new();
let mut serv_types = HashSet::<String>::new();
for bi in &BUILT_INS {
all_types.insert(bi.to_string());
}
for step in &steps {
match step {
Step::Type(typedef) => {
if all_types.contains(&typedef.name) {
return Err(IRError::new_from_def("Duplicate type", typedef).into());
}
all_types.insert(typedef.name.clone());
}
Step::Enum(enumdef) => {
if all_types.contains(&enumdef.name) {
return Err(IRError::new_from_def("Duplicate type", enumdef).into());
}
all_types.insert(enumdef.name.clone());
}
Step::Service(servdef) => {
if serv_types.contains(&servdef.name) {
return Err(IRError::new_from_def("Duplicate type", servdef).into());
}
serv_types.insert(servdef.name.clone());
}
}
}
// Verify dependencies
for step in &steps {
match step {
Step::Type(typedef) => {
for dep in &typedef.depends {
if let Type::Custom(dep) = dep {
if !all_types.contains(dep) {
return Err(IRError::new_from_def(
&format!("Type {} depends on unknown type {}", typedef.name, dep),
typedef,
)
.into());
}
}
}
}
Step::Service(servdef) => {
for dep in &servdef.depends {
if let Type::Custom(dep) = dep {
if !all_types.contains(dep) {
return Err(IRError::new_from_def(
&format!(
"Service {} depends on unknown type {}",
servdef.name, dep
),
servdef,
)
.into());
}
}
}
}
_ => {}
}
}
Ok(IR { options, steps })
}
#[derive(Debug, Clone)]
pub struct IRError {
pub message: String,
pub position: ParserPosition,
}
impl IRError {
fn new(msg: &str, node: &impl Node) -> IRError {
IRError {
message: format!("{}: {}", msg, node.get_name()),
position: node.get_position(),
}
}
fn new_from_def(msg: &str, def: &impl Definition) -> IRError {
IRError {
message: msg.to_string(),
position: def.get_position(),
}
}
}
impl Error for IRError {}
impl Display for IRError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "ParserError: {} at {:?}", self.message, self.position)
}
}

43
libjrpc/src/lib.rs Normal file
View File

@ -0,0 +1,43 @@
mod compile;
mod ir;
mod parser;
mod process;
mod shared;
pub mod targets;
mod tokenizer;
pub use ir::IR;
pub use parser::{Parser, RootNode};
pub use process::FileProcessor;
pub use tokenizer::{tokenize, Token, TokenError, TokenPosition, TokenType};
#[cfg(test)]
mod test {
use crate::{
compile::{Compile, CompileContext},
targets::{self, rust::RustCompiler},
};
#[cfg(test)]
#[ctor::ctor]
fn init() {
env_logger::init();
}
#[test]
pub fn parse_jrpc() {
let mut fp = crate::process::FileProcessor::new();
// let ir = fp.start_compile("./test.jrpc").unwrap();
let ir = fp.start_compile("http://127.0.0.1:7878/test.jrpc").unwrap();
println!("{:?}", ir);
}
#[test]
pub fn generate_rust() {
let mut fp = crate::process::FileProcessor::new();
let ir = fp.start_compile("./test.jrpc").unwrap();
targets::compile::<RustCompiler>(ir, "./output/rust").unwrap();
}
}

683
libjrpc/src/parser.rs Normal file
View File

@ -0,0 +1,683 @@
use anyhow::Result;
use log::{debug, trace};
use std::{collections::HashMap, error::Error, fmt::Display, sync::Arc};
use crate::{Token, TokenPosition, TokenType};
pub type PResult<T> = Result<T, ParserError>;
#[derive(Debug, Clone)]
pub struct ParserPosition {
path: Arc<String>,
position: usize,
token_positions: Vec<TokenPosition>,
}
impl ParserPosition {
pub fn from_token(token: &Token) -> Self {
Self {
path: token.2.path.clone(),
position: token.2.start,
token_positions: vec![token.2.clone()],
}
}
}
pub trait Node {
fn get_position(&self) -> ParserPosition;
fn get_name(&self) -> String {
String::from("")
}
}
#[derive(Debug, Clone)]
pub enum RootNode {
Define(DefineStatement),
Import(ImportStatement),
Service(ServiceStatement),
Type(TypeStatement),
Enum(EnumStatement),
}
impl Node for RootNode {
fn get_position(&self) -> ParserPosition {
match self {
RootNode::Define(stmt) => stmt.get_position(),
RootNode::Import(stmt) => stmt.get_position(),
RootNode::Service(stmt) => stmt.get_position(),
RootNode::Type(stmt) => stmt.get_position(),
RootNode::Enum(stmt) => stmt.get_position(),
}
}
fn get_name(&self) -> String {
match self {
RootNode::Define(_) => String::from("define"),
RootNode::Import(_) => String::from("import"),
RootNode::Service(_) => String::from("service"),
RootNode::Type(_) => String::from("type"),
RootNode::Enum(_) => String::from("enum"),
}
}
}
#[derive(Debug, Clone)]
pub struct DefineStatement {
pub position: ParserPosition,
pub key: String,
pub value: String,
}
impl Node for DefineStatement {
fn get_position(&self) -> ParserPosition {
self.position.clone()
}
fn get_name(&self) -> String {
format!("define {}", self.key)
}
}
#[derive(Debug, Clone)]
pub struct ImportStatement {
pub position: ParserPosition,
pub path: String,
}
impl Node for ImportStatement {
fn get_position(&self) -> ParserPosition {
self.position.clone()
}
fn get_name(&self) -> String {
format!("import {}", self.path)
}
}
#[derive(Debug, Clone)]
pub struct TypeStatement {
pub position: ParserPosition,
pub name: String,
pub fields: Vec<TypeFieldNode>,
}
impl Node for TypeStatement {
fn get_position(&self) -> ParserPosition {
self.position.clone()
}
fn get_name(&self) -> String {
format!("type {}", self.name)
}
}
#[derive(Debug, Clone)]
pub struct TypeFieldNode {
pub position: ParserPosition,
pub name: String,
pub optional: bool,
pub fieldtype: String,
pub array: bool,
pub map: Option<String>,
}
impl Node for TypeFieldNode {
fn get_position(&self) -> ParserPosition {
self.position.clone()
}
fn get_name(&self) -> String {
format!("type_field {}", self.name)
}
}
#[derive(Debug, Clone)]
pub struct EnumStatement {
pub position: ParserPosition,
pub name: String,
pub values: Vec<EnumValueNode>,
}
impl Node for EnumStatement {
fn get_position(&self) -> ParserPosition {
self.position.clone()
}
fn get_name(&self) -> String {
format!("enum {}", self.name)
}
}
#[derive(Debug, Clone)]
pub struct EnumValueNode {
pub position: ParserPosition,
pub name: String,
pub value: Option<i64>,
}
impl Node for EnumValueNode {
fn get_position(&self) -> ParserPosition {
self.position.clone()
}
fn get_name(&self) -> String {
format!("enum_field {}", self.name)
}
}
#[derive(Debug, Clone)]
pub struct ServiceStatement {
pub position: ParserPosition,
pub name: String,
pub methods: Vec<ServiceMethodNode>,
}
impl Node for ServiceStatement {
fn get_position(&self) -> ParserPosition {
self.position.clone()
}
fn get_name(&self) -> String {
format!("service {}", self.name)
}
}
#[derive(Debug, Clone)]
pub struct ServiceMethodNode {
pub position: ParserPosition,
pub name: String,
pub inputs: Vec<ServiceMethodInputNode>,
pub return_type: Option<ServiceMethodReturnNode>,
pub decorators: Vec<Decorator>,
}
impl Node for ServiceMethodNode {
fn get_position(&self) -> ParserPosition {
self.position.clone()
}
fn get_name(&self) -> String {
format!("method {}", self.name)
}
}
#[derive(Debug, Clone)]
pub struct Decorator {
pub position: ParserPosition,
pub name: String,
pub args: Vec<String>,
}
impl Node for Decorator {
fn get_position(&self) -> ParserPosition {
self.position.clone()
}
fn get_name(&self) -> String {
format!("decorator {}", self.name)
}
}
#[derive(Debug, Clone)]
pub struct ServiceMethodInputNode {
pub position: ParserPosition,
pub name: String,
pub fieldtype: String,
pub optional: bool,
pub array: bool,
}
impl Node for ServiceMethodInputNode {
fn get_position(&self) -> ParserPosition {
self.position.clone()
}
fn get_name(&self) -> String {
format!("method_arg {}", self.name)
}
}
#[derive(Debug, Clone)]
pub struct ServiceMethodReturnNode {
pub position: ParserPosition,
pub fieldtype: String,
pub array: bool,
}
impl Node for ServiceMethodReturnNode {
fn get_position(&self) -> ParserPosition {
self.position.clone()
}
}
#[derive(Debug, Clone)]
pub struct Parser {
tokens: Vec<Token>,
position: usize,
}
impl Parser {
pub fn new(tokens: Vec<Token>) -> Parser {
// Remove all comments and spaces at this stage
let tokens = tokens
.into_iter()
.filter(|e| e.0 != TokenType::Comment && e.0 != TokenType::Space)
.collect();
Parser {
tokens,
position: 0,
}
}
fn has_current_token(&self) -> bool {
self.position < self.tokens.len()
}
fn current_token(&mut self) -> &Token {
&self.tokens[self.position]
}
fn assert_types(&mut self, token_types: &[TokenType]) -> PResult<()> {
if token_types.iter().any(|t| *t == self.current_token().0) {
Ok(())
} else {
Err(ParserError::new(
&format!("Unexpected token. Expected on of {:?}", token_types),
self.current_token(),
))
}
}
fn eat_token(&mut self) -> PResult<(String, ParserPosition)> {
debug!("Parser::eat_token()");
let pos = ParserPosition::from_token(self.current_token());
let value = self.current_token().1.clone();
self.position += 1;
Ok((value, pos))
}
fn eat_token_value(&mut self, value: &str) -> PResult<(String, ParserPosition)> {
debug!("Parser::eat_token_value({})", value);
let (val, pos) = self.eat_token()?;
if val != value {
return Err(ParserError::new(
&format!("Expected token of value {}", value),
self.current_token(),
));
}
Ok((val, pos))
}
fn eat_token_type(&mut self, token_type: TokenType) -> PResult<(String, ParserPosition)> {
debug!("Parser::eat_token_type({:?})", token_type);
if self.current_token().0 != token_type {
return Err(ParserError::new(
&format!("Expected token of type {:?}", token_type),
self.current_token(),
));
}
self.eat_token()
}
fn eat_text(&mut self) -> PResult<(String, ParserPosition)> {
debug!("Parser::eat_text()");
self.eat_token_type(TokenType::Text)
}
fn eat_text_with_keywords(&mut self) -> PResult<(String, ParserPosition)> {
debug!("Parser::eat_text_with_keywords()");
self.assert_types(&[TokenType::Text, TokenType::Keyword])?;
self.eat_token()
}
fn eat_string_or_text_as_raw(&mut self) -> PResult<String> {
debug!("Parser::eat_string_or_text_as_raw()");
self.assert_types(&[TokenType::String, TokenType::Text])?;
let token = self.current_token();
if token.0 == TokenType::String {
let (value, _) = self.eat_token()?;
let count = value.chars().count();
Ok(value.chars().skip(1).take(count - 2).collect())
} else {
let (value, _) = self.eat_token()?;
Ok(value)
}
}
fn eat_number(&mut self) -> PResult<i64> {
debug!("Parser::eat_number()");
self.assert_types(&[TokenType::Number])?;
let (value, _) = self.eat_token()?;
value
.parse()
.map_err(|_| ParserError::new("Invalid number", self.current_token()))
}
fn parse_define(&mut self) -> PResult<DefineStatement> {
debug!("Parser::parse_define()");
let (_, position) = self.eat_token_value("define")?;
let (key, _) = self.eat_token()?;
trace!("Parser::parse_define()::key = {}", key);
let value = self.eat_string_or_text_as_raw()?;
self.eat_token_type(TokenType::Semicolon)?;
Ok(DefineStatement {
position,
key,
value,
})
}
fn parse_import(&mut self) -> PResult<ImportStatement> {
debug!("Parser::parse_import()");
let (_, pos) = self.eat_token_value("import")?;
let path = self.eat_string_or_text_as_raw()?;
self.eat_token_type(TokenType::Semicolon)?;
Ok(ImportStatement {
position: pos,
path,
})
}
fn parse_enum_value(&mut self) -> PResult<EnumValueNode> {
debug!("Parser::parse_enum_value()");
let (name, pos) = self.eat_token()?;
let value = if self.current_token().0 == TokenType::Equals {
self.eat_token()?;
Some(self.eat_number()?)
} else {
None
};
Ok(EnumValueNode {
position: pos,
name,
value,
})
}
fn parse_enum(&mut self) -> PResult<EnumStatement> {
debug!("Parser::parse_enum()");
let (_, pos) = self.eat_token_value("enum")?;
let (name, _) = self.eat_token()?;
trace!("Parser::parse_enum()::name = {}", name);
self.eat_token_type(TokenType::CurlyOpen)?;
let mut values: Vec<EnumValueNode> = Vec::new();
while self.current_token().0 == TokenType::Text {
values.push(self.parse_enum_value()?);
if self.current_token().0 == TokenType::Comma {
//TODO: Maybe use a next flag or break or something with the commas
self.eat_token()?;
}
}
self.eat_token_type(TokenType::CurlyClose)?;
Ok(EnumStatement {
position: pos,
name,
values,
})
}
fn parse_type_field(&mut self) -> PResult<TypeFieldNode> {
debug!("Parser::parse_type_field()");
let (name, pos) = self.eat_text_with_keywords()?;
trace!("Parser::parse_type_field()::name = {}", name);
let mut optional = false;
let mut array = false;
let mut map = None;
if self.current_token().0 == TokenType::Questionmark {
optional = true;
self.eat_token()?;
}
_ = self.eat_token_type(TokenType::Colon);
let fieldtype = if self.current_token().0 == TokenType::CurlyOpen {
self.eat_token()?;
let (key_type, _) = self.eat_text()?;
self.eat_token_type(TokenType::Comma)?;
let (value_type, _) = self.eat_text()?;
self.eat_token_type(TokenType::CurlyClose)?;
map = Some(key_type);
value_type
} else {
let (type_name, _) = self.eat_text()?;
if self.current_token().0 == TokenType::Array {
array = true;
self.eat_token()?;
}
type_name
};
self.eat_token_type(TokenType::Semicolon)?;
Ok(TypeFieldNode {
position: pos,
name,
optional,
fieldtype,
map,
array,
})
}
fn parse_type(&mut self) -> PResult<TypeStatement> {
debug!("Parser::parse_type()");
let (_, pos) = self.eat_token_value("type")?;
let (name, _) = self.eat_text()?;
trace!("Parser::prase_type()::name = {}", name);
self.eat_token_type(TokenType::CurlyOpen)?;
let mut fields = Vec::new();
while self.current_token().0 == TokenType::Text
|| self.current_token().0 == TokenType::Keyword
{
fields.push(self.parse_type_field()?);
}
self.eat_token_type(TokenType::CurlyClose)?;
Ok(TypeStatement {
position: pos,
name,
fields,
})
}
fn parse_decorator(&mut self) -> PResult<Decorator> {
debug!("Parser::parse_decorator()");
let (_, position) = self.eat_token_type(TokenType::At)?;
let (decorator, _) = self.eat_text()?;
trace!("Parser::parse_decorator()::name = {}", decorator);
self.eat_token_type(TokenType::BracketOpen)?;
let mut args = Vec::new();
let mut first = true;
while self.current_token().0 != TokenType::BracketClose {
if first {
first = false
} else {
self.eat_token_type(TokenType::Comma)?;
}
args.push(self.eat_string_or_text_as_raw()?);
}
self.eat_token_type(TokenType::BracketClose)?;
Ok(Decorator {
name: decorator,
args,
position,
})
}
fn parse_method(
&mut self,
decorators: Vec<Decorator>,
notification: bool,
) -> PResult<ServiceMethodNode> {
debug!(
"Parser::parse_method({}, {})",
decorators.len(),
notification
);
let (name, pos) = self.eat_text()?;
trace!("Parser::parse_method()::name = {}", name);
self.eat_token_type(TokenType::BracketOpen)?;
let mut inputs = Vec::new();
while self.current_token().0 != TokenType::BracketClose {
let (name, position) = self.eat_text_with_keywords()?;
let mut optional = false;
if self.current_token().0 == TokenType::Questionmark {
optional = true;
self.eat_token()?;
}
self.eat_token_type(TokenType::Colon)?;
let (fieldtype, _) = self.eat_text()?;
let mut array = false;
if self.current_token().0 == TokenType::Array {
array = true;
self.eat_token()?;
}
inputs.push(ServiceMethodInputNode {
name,
fieldtype,
array,
optional,
position,
});
if self.current_token().0 == TokenType::Comma {
self.eat_token()?;
}
trace!(
"Parser::parse_method()::params_next_token: {}",
self.current_token().1
);
}
self.eat_token_type(TokenType::BracketClose)?;
let mut return_type = None;
if !notification {
self.eat_token_type(TokenType::Colon)?;
let (fieldtype, position) = self.eat_text()?;
let mut array = false;
if self.current_token().0 == TokenType::Array {
array = true;
self.eat_token()?;
}
return_type = Some(ServiceMethodReturnNode {
position,
fieldtype,
array,
});
}
self.eat_token_type(TokenType::Semicolon)?;
Ok(ServiceMethodNode {
position: pos,
name,
inputs,
return_type,
decorators,
})
}
fn parse_service(&mut self) -> PResult<ServiceStatement> {
debug!("Parser::parse_service()");
let (_, pos) = self.eat_token_value("service")?;
let (name, _) = self.eat_text()?;
trace!("Parser::parse_service()::name = {}", name);
self.eat_token_type(TokenType::CurlyOpen)?;
let mut methods = Vec::new();
while self.current_token().0 != TokenType::CurlyClose {
let mut decorators = Vec::new();
while self.current_token().0 == TokenType::At {
decorators.push(self.parse_decorator()?);
}
let mut notification = false;
if self.current_token().1 == "notification" {
self.eat_token()?;
notification = true;
}
methods.push(self.parse_method(decorators, notification)?);
}
self.eat_token_type(TokenType::CurlyClose)?;
Ok(ServiceStatement {
position: pos,
name,
methods,
})
}
fn parse_statement(&mut self) -> PResult<RootNode> {
debug!("Parser::parse_statement()");
let token = self.current_token();
if let TokenType::Keyword = token.0 {
trace!("Parser::parse_statement()::type = {}", token.1);
return match token.1.as_str() {
"define" => Ok(RootNode::Define(self.parse_define()?)),
"import" => Ok(RootNode::Import(self.parse_import()?)),
"type" => Ok(RootNode::Type(self.parse_type()?)),
"service" => Ok(RootNode::Service(self.parse_service()?)),
"enum" => Ok(RootNode::Enum(self.parse_enum()?)),
_ => Err(ParserError::new("Unknown keyword", token)),
};
} else {
Err(ParserError::new("Expected keyword", token))
}
}
pub fn parse(&mut self) -> PResult<Vec<RootNode>> {
debug!("Parser::parse()");
let mut nodes = Vec::new();
while self.has_current_token() {
nodes.push(self.parse_statement()?);
}
Ok(nodes)
}
}
#[derive(Debug, Clone)]
pub struct ParserError {
pub message: String,
pub token: Token,
}
impl ParserError {
fn new(msg: &str, token: &Token) -> ParserError {
ParserError {
message: format!("{}: {}", msg, token.1),
token: token.clone(),
}
}
}
impl Error for ParserError {}
impl Display for ParserError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "ParserError: {} at {:?}", self.message, self.token)
}
}

135
libjrpc/src/process.rs Normal file
View File

@ -0,0 +1,135 @@
use std::{
collections::{HashMap, HashSet},
path::PathBuf,
sync::Arc,
};
use anyhow::Result;
use log::trace;
use url::Url;
use crate::{ir::IR, parser::RootNode};
pub struct FileProcessor {
file_cache: HashMap<String, String>,
processed_files: HashSet<String>,
}
impl FileProcessor {
pub fn new() -> Self {
trace!("FileProcessor::new()");
Self {
file_cache: HashMap::new(),
processed_files: HashSet::new(),
}
}
fn is_url(inp: Option<&str>) -> bool {
trace!("FileProcessor::is_url({:?})", inp);
#[cfg(feature = "http")]
if let Some(inp) = inp {
if inp.starts_with("http://") || inp.starts_with("https://") {
return true;
}
}
false
}
fn resolve_path(input: &str, context: Option<&str>) -> String {
trace!("FileProcessor::resolve_path({}, {:?})", input, context);
let mut input = input.to_string();
#[cfg(feature = "http")]
if cfg!(feature = "http") && (Self::is_url(Some(&input)) || Self::is_url(context)) {
if Self::is_url(Some(&input)) {
input.to_string()
} else {
let url = Url::parse(context.unwrap()).unwrap();
if !input.ends_with(".jrpc") {
input = format!("{}.jrpc", input);
}
url.join(&input).unwrap().to_string()
}
} else {
if !input.ends_with(".jrpc") {
input = format!("{}.jrpc", input);
}
if let Some(context) = context {
let mut path = PathBuf::from(context);
path.pop();
path = path.join(input);
path.to_str().unwrap().to_string()
} else {
input
}
}
}
fn get_file_url(&mut self, url: &str) -> Result<String> {
trace!("FileProcessor::get_file_url({})", url);
let resp = reqwest::blocking::get(url)?;
let body = resp.text()?;
self.file_cache.insert(url.to_string(), body.clone());
Ok(body)
}
fn get_file_path(&mut self, path: &str) -> Result<String> {
trace!("FileProcessor::get_file_path({})", path);
let body = std::fs::read_to_string(path)?;
self.file_cache.insert(path.to_string(), body.clone());
Ok(body)
}
fn get_file(&mut self, name: &str) -> Result<String> {
trace!("FileProcessor::get_file({})", name);
if self.file_cache.contains_key(name) {
return Ok(self.file_cache.get(name).unwrap().clone());
} else {
#[cfg(feature = "http")]
{
if name.starts_with("http://") || name.starts_with("https://") {
return self.get_file_url(name);
} else {
return self.get_file_path(name);
}
}
#[cfg(not(feature = "http"))]
{
return self.get_file_path(name);
}
}
}
fn process_file(&mut self, file: &str, root: bool) -> Result<Vec<RootNode>> {
let file = Self::resolve_path(file, None);
trace!("FileProcessor::process_file({}, {})", file, root);
if self.processed_files.contains(&file) {
return Ok(vec![]);
}
self.processed_files.insert(file.clone());
let content = self.get_file(&file)?;
let tokens = crate::tokenizer::tokenize(Arc::new(file.to_string()), content)?;
let parsed = crate::parser::Parser::new(tokens).parse()?;
let mut result = Vec::new();
for stmt in &parsed {
if let crate::parser::RootNode::Import(stmt) = stmt {
let file = Self::resolve_path(&stmt.path, Some(&file));
let s = self.process_file(&file, false)?;
result.extend(s);
} else {
result.push(stmt.clone());
}
}
Ok(result)
}
pub fn start_compile(&mut self, file: &str) -> Result<IR> {
trace!("FileProcessor::start_compile({})", file);
let parsed = self.process_file(file, true)?;
let ir = crate::ir::build_ir(&parsed)?;
Ok(ir)
}
}

28
libjrpc/src/shared.rs Normal file
View File

@ -0,0 +1,28 @@
pub enum Keywords {
Type,
Enum,
Import,
Service,
Define,
}
impl Keywords {
pub fn is_keyword(input: &str) -> bool {
match input {
"type" | "enum" | "import" | "service" | "define" => true,
_ => false,
}
}
}
impl ToString for Keywords {
fn to_string(&self) -> String {
match self {
Keywords::Type => "type".to_string(),
Keywords::Enum => "enum".to_string(),
Keywords::Import => "import".to_string(),
Keywords::Service => "service".to_string(),
Keywords::Define => "define".to_string(),
}
}
}

View File

@ -0,0 +1,28 @@
use anyhow::Result;
use crate::{
compile::{Compile, CompileContext},
IR,
};
pub mod rust;
pub fn compile<T: Compile>(ir: IR, output: &str) -> Result<()> {
let mut ctx = CompileContext::new(output);
let mut compiler = T::new(&ir.options)?;
compiler.start(&mut ctx)?;
for step in ir.steps.iter() {
match step {
crate::ir::Step::Type(definition) => compiler.generate_type(&mut ctx, &definition)?,
crate::ir::Step::Enum(definition) => compiler.generate_enum(&mut ctx, &definition)?,
crate::ir::Step::Service(definition) => {
compiler.generate_service(&mut ctx, &definition)?
}
}
}
compiler.finalize(&mut ctx, &ir)?;
Ok(())
}

568
libjrpc/src/targets/rust.rs Normal file
View File

@ -0,0 +1,568 @@
use anyhow::Result;
use log::warn;
use std::collections::{HashMap, HashSet};
use crate::compile::{Compile, CompileContext, FileGenerator};
use crate::ir::{EnumDefinition, ServiceDefinition, Step, Type, TypeDefinition};
use crate::shared::Keywords;
use crate::IR;
pub struct RustCompiler {
crate_name: String,
}
static RUST_KEYWORDS: [&'static str; 6] = ["type", "return", "static", "pub", "enum", "self"];
impl RustCompiler {
fn type_to_rust(typ: &Type) -> String {
match typ {
Type::String => "String".to_string(),
Type::Int => "i64".to_string(),
Type::Float => "f64".to_string(),
Type::Bool => "bool".to_string(),
Type::Bytes => "Vec<u8>".to_string(),
Type::Void => "()".to_string(),
Type::Custom(name) => name.clone(),
}
}
fn type_to_rust_ext(typ: &Type, optional: bool, array: bool) -> String {
let mut result = Self::type_to_rust(typ);
if optional {
result = format!("Option<{}>", result);
}
if array {
result = format!("Vec<{}>", result);
}
result
}
fn add_dependencies(
&mut self,
file: &mut FileGenerator,
depends: &HashSet<Type>,
) -> Result<()> {
for dep in depends {
match dep {
Type::Custom(name) => {
file.a0(&format!("use crate::{};", name));
}
_ => {}
}
}
file.a0("");
file.a0("");
Ok(())
}
fn fix_keyword_name(name: &str) -> String {
if RUST_KEYWORDS.contains(&name) {
format!("{}_", name)
} else {
name.to_string()
}
}
fn to_snake(name: &str) -> String {
let mut result = String::new();
let mut last_upper = false;
for c in name.chars() {
if c.is_uppercase() {
if last_upper {
result.push(c.to_ascii_lowercase());
} else {
if !result.is_empty() {
result.push('_');
}
result.push(c.to_ascii_lowercase());
}
last_upper = true;
} else {
result.push(c);
last_upper = false;
}
}
result
}
fn generate_service_lib(ctx: &CompileContext, ir: &IR) -> Result<()> {
let mut f = FileGenerator::new();
let mut fc = FileGenerator::new();
let mut fs = FileGenerator::new();
f.a0("pub mod base_lib;");
f.a0("pub use base_lib::{JRPCServer, JRPCClient, Result};");
for step in ir.steps.iter() {
match step {
Step::Type(def) => {
f.a0(format!("mod {};", Self::to_snake(&def.name)));
f.a(
0,
format!("pub use {}::{};", Self::to_snake(&def.name), def.name),
);
}
Step::Enum(def) => {
f.a0(format!("mod {};", Self::to_snake(&def.name)));
f.a(
0,
format!("pub use {}::{};", Self::to_snake(&def.name), def.name),
);
}
Step::Service(def) => {
fs.a0(format!("mod {};", Self::to_snake(&def.name)));
fs.a0(format!(
"pub use {}::{{ {}, {}Handler }};",
Self::to_snake(&def.name),
def.name,
def.name
));
fc.a0(format!("mod {};", Self::to_snake(&def.name)));
fc.a0(format!(
"pub use {}::{};",
Self::to_snake(&def.name),
def.name,
));
}
}
}
f.a0("pub mod server;");
f.a0("pub mod client;");
ctx.write_file("src/lib.rs", f.get_content())?;
ctx.write_file("src/server/mod.rs", fs.get_content())?;
ctx.write_file("src/client/mod.rs", fc.get_content())?;
Ok(())
}
fn generate_service_server(
&mut self,
ctx: &mut CompileContext,
definition: &ServiceDefinition,
) -> anyhow::Result<()> {
let mut f = FileGenerator::new();
self.add_dependencies(&mut f, &definition.depends)?;
f.a0("use crate::base_lib::{JRPCServerService, JRPCRequest, Result};");
f.a0("use serde_json::Value;");
f.a0("use std::sync::Arc;");
f.a0("use async_trait::async_trait;");
f.a0("#[async_trait]");
f.a0(format!("pub trait {} {{", definition.name));
for method in definition.methods.iter() {
let params = method
.inputs
.iter()
.map(|arg| {
format!(
"{}: {}",
Self::fix_keyword_name(&arg.name),
Self::type_to_rust_ext(&arg.typ, arg.optional, arg.array)
)
})
.collect::<Vec<String>>()
.join(", ");
let ret = method.output.as_ref().map_or_else(
|| "()".to_owned(),
|r| Self::type_to_rust_ext(&r.typ, false, r.array),
);
f.a1("#[allow(non_snake_case)]");
f.a(
1,
format!(
"async fn {}(&self, {}) -> Result<{}>;",
Self::fix_keyword_name(&method.name),
params,
ret
),
);
}
f.a0("}");
f.a0("");
f.a0(format!("pub struct {}Handler {{", definition.name));
f.a1(format!(
"implementation: Box<dyn {} + Sync + Send + 'static>,",
definition.name
));
f.a0("}");
f.a0("");
f.a0(format!("impl {}Handler {{", definition.name));
f.a1(format!(
"pub fn new(implementation: Box<dyn {} + Sync + Send + 'static>) -> Arc<Self> {{",
definition.name,
));
f.a2("Arc::from(Self { implementation })");
f.a1("}");
f.a0("}");
f.a0("");
f.a0("#[async_trait]");
f.a0(format!(
"impl JRPCServerService for {}Handler {{",
definition.name
));
f.a1(format!(
"fn get_id(&self) -> String {{ \"{}\".to_owned() }} ",
definition.name
));
f.a1("");
f.a1("#[allow(non_snake_case)]");
f.a1(
"async fn handle(&self, msg: &JRPCRequest, function: &str) -> Result<(bool, Value)> {",
);
f.a2("match function {");
// TODO: Implement optional arguments!
for method in &definition.methods {
f.a3(format!(
"\"{}\" => {{",
Self::fix_keyword_name(&method.name)
));
if method.inputs.len() < 1 {
f.a5(format!(
"let res = self.implementation.{}().await?;",
method.name
));
f.a5("Ok((true, serde_json::to_value(res)?))");
} else {
f.a4("if msg.params.is_array() {");
if method.inputs.len() > 0 {
f.a5(
"let arr = msg.params.as_array().unwrap(); //TODO: Check if this can fail.",
);
}
f.a5(format!("let res = self.implementation.{}(", method.name));
for (i, arg) in method.inputs.iter().enumerate() {
f.a6(format!("serde_json::from_value(arr[{}].clone())", i));
f.a(
7,
format!(
".map_err(|_| \"Parameter for field '{}' should be of type '{}'!\")?{}",
arg.name,
arg.typ.to_string(),
if i < method.inputs.len() - 1 { "," } else { "" }
),
);
}
f.a5(").await?;");
if let Some(_output) = &method.output {
f.a5("Ok((true, serde_json::to_value(res)?))");
} else {
f.a5("_ = res;");
f.a5("Ok((true, Value::Null))");
}
f.a4("} else if msg.params.is_object() {");
f.a5("let obj = msg.params.as_object().unwrap(); //TODO: Check if this can fail.");
f.a5(format!("let res = self.implementation.{}(", method.name));
for (i, arg) in method.inputs.iter().enumerate() {
f.a6(format!(
"serde_json::from_value(obj.get(\"{}\").ok_or(\"Parameter of field '{}' missing!\")?.clone())",
arg.name,
arg.name
));
f.a(
7,
format!(
".map_err(|_| \"Parameter for field {} should be of type '{}'!\")?{}",
arg.name,
arg.typ.to_string(),
if i < method.inputs.len() - 1 { "," } else { "" }
),
);
}
f.a5(").await?;");
if let Some(_output) = &method.output {
f.a5("Ok((true, serde_json::to_value(res)?))");
} else {
f.a5("Ok((false, Value::Null))");
}
f.a4("} else {");
f.a5("Err(Box::from(\"Invalid parameters??\".to_owned()))");
f.a4("}");
}
f.a3("}");
}
f.a3("_ => { Err(Box::from(format!(\"Invalid function {}\", function).to_owned())) },");
f.a2("}");
f.a1("}");
f.a0("}");
ctx.write_file(
&format!("src/server/{}.rs", Self::to_snake(&definition.name)),
f.into_content(),
)?;
Ok(())
}
fn generate_service_client(
&mut self,
ctx: &mut CompileContext,
definition: &ServiceDefinition,
) -> anyhow::Result<()> {
let mut f = FileGenerator::new();
self.add_dependencies(&mut f, &definition.depends)?;
f.a0("use crate::base_lib::{JRPCClient, JRPCRequest, Result};");
f.a0("use serde_json::json;");
f.a0("");
f.a0(format!("pub struct {} {{", definition.name));
f.a1("client: JRPCClient,");
f.a0("}");
f.a0("");
f.a0(format!("impl {} {{", definition.name));
f.a1("pub fn new(client: JRPCClient) -> Self {");
f.a2(format!("Self {{ client }}"));
f.a1("}");
f.a0("");
for method in &definition.methods {
let params = method
.inputs
.iter()
.map(|arg| {
format!(
"{}: {}",
Self::fix_keyword_name(&arg.name),
Self::type_to_rust_ext(&arg.typ, arg.optional, arg.array)
)
})
.collect::<Vec<String>>()
.join(", ");
let ret = method.output.as_ref().map_or("()".to_string(), |output| {
Self::type_to_rust_ext(&output.typ, false, output.array)
});
f.a1("#[allow(non_snake_case)]");
f.a1(format!(
"pub async fn {}(&self, {}) -> Result<{}> {{",
method.name, params, ret
));
f.a2("let l_req = JRPCRequest {");
f.a3("jsonrpc: \"2.0\".to_owned(),");
f.a3("id: None, // 'id' will be set by the send_request function");
f.a3(format!(
"method: \"{}.{}\".to_owned(),",
definition.name, method.name
));
f.a3(format!(
"params: json!([{}])",
method
.inputs
.iter()
.map(|e| Self::fix_keyword_name(&e.name))
.collect::<Vec<String>>()
.join(", ")
));
f.a2("};");
if let Some(output) = &method.output {
f.a2("let l_res = self.client.send_request(l_req).await;");
f.a2("match l_res {");
f.a3("Err(e) => Err(e),");
if output.typ == Type::Void {
f.a3("Ok(_) => Ok(())");
} else {
f.a3("Ok(o) => serde_json::from_value(o).map_err(|e| Box::from(e))");
}
f.a2("}");
} else {
f.a2("self.client.send_notification(l_req).await;");
f.a2("Ok(())");
}
f.a1("}");
}
f.a0("}");
ctx.write_file(
&format!("src/client/{}.rs", Self::to_snake(&definition.name)),
f.into_content(),
)?;
Ok(())
}
}
impl Compile for RustCompiler {
fn new(options: &HashMap<String, String>) -> anyhow::Result<Self> {
let crate_name = if let Some(crate_name) = options.get("rust_crate") {
crate_name.to_string()
} else {
anyhow::bail!("crate option is required for rust compiler");
};
if let Some(allow_bytes) = options.get("allow_bytes") {
if allow_bytes == "true" {
anyhow::bail!("allow_bytes option is not supported for rust compiler");
}
}
Ok(RustCompiler { crate_name })
}
fn name(&self) -> String {
"rust".to_string()
}
fn start(&mut self, ctx: &mut CompileContext) -> anyhow::Result<()> {
ctx.write_file(
"Cargo.toml",
include_str!("../../templates/Rust/Cargo.toml")
.to_owned()
.replace("__name__", &self.crate_name),
)?;
ctx.write_file(
"src/base_lib.rs",
include_str!("../../templates/Rust/src/lib.rs").to_owned(),
)?;
Ok(())
}
fn generate_type(
&mut self,
ctx: &mut CompileContext,
definition: &TypeDefinition,
) -> anyhow::Result<()> {
let mut f = FileGenerator::new();
if definition.fields.iter().any(|e| e.map.is_some()) {
f.a0("use std::collections::hash_map::HashMap;")
}
f.a0("use serde::{Deserialize, Serialize};");
self.add_dependencies(&mut f, &definition.depends)?;
f.a0("#[derive(Clone, Debug, Serialize, Deserialize)]");
f.a0(format!("pub struct {} {{", definition.name));
for field in definition.fields.iter() {
f.a(1, "#[allow(non_snake_case)]");
let func = format!("pub {}:", Self::fix_keyword_name(&field.name));
if Keywords::is_keyword(&field.name) {
warn!(
"[RUST] Warning: Field name '{}' is not allowed in Rust. Renaming to '{}_'",
field.name, field.name
);
f.a(1, format!("#[serde(rename = \"{}\")]", field.name));
}
let mut opts = String::new();
let mut opte = String::new();
if field.optional {
opts = "Option<".to_string();
opte = ">".to_string();
}
if field.array {
f.a(
1,
format!(
"{} {}Vec<{}>{},",
func,
opts,
Self::type_to_rust(&field.typ),
opte
),
);
} else if let Some(map) = &field.map {
f.a(
1,
format!(
"{} {}HashMap<{}, {}>{},",
func,
opts,
Self::type_to_rust(map),
Self::type_to_rust(&field.typ),
opte
),
);
} else {
f.a(
1,
format!(
"{} {}{}{},",
func,
opts,
Self::type_to_rust(&field.typ),
opte
),
);
}
}
f.a0("}");
ctx.write_file(
&format!("src/{}.rs", Self::to_snake(&definition.name)),
f.into_content(),
)?;
Ok(())
}
fn generate_enum(
&mut self,
ctx: &mut CompileContext,
definition: &EnumDefinition,
) -> anyhow::Result<()> {
let mut f = FileGenerator::new();
f.a0("use int_enum::IntEnum;");
// f.a0("use serde::{Deserialize, Serialize};");
f.a0("");
f.a0("");
f.a0("#[repr(i64)]");
f.a0("#[derive(Clone, Copy, Debug, Eq, PartialEq, IntEnum)]");
f.a0(format!("pub enum {} {{", definition.name));
for val in definition.values.iter() {
f.a(1, format!("{} = {},", val.name, val.value));
}
f.a0("}");
ctx.write_file(
&format!("src/{}.rs", Self::to_snake(&definition.name)),
f.into_content(),
)?;
Ok(())
}
fn generate_service(
&mut self,
ctx: &mut CompileContext,
definition: &ServiceDefinition,
) -> anyhow::Result<()> {
self.generate_service_client(ctx, definition)?;
self.generate_service_server(ctx, definition)?;
Ok(())
}
fn finalize(&mut self, ctx: &mut CompileContext, ir: &IR) -> anyhow::Result<()> {
Self::generate_service_lib(ctx, ir)?;
Ok(())
}
}

197
libjrpc/src/tokenizer.rs Normal file
View File

@ -0,0 +1,197 @@
use anyhow::Result;
use lazy_static::lazy_static;
use regex::Regex;
use std::{error::Error, fmt::Display, sync::Arc};
fn match_regex(regex: &Regex, input: &str, index: usize) -> Option<String> {
// println!("Matching regex {:?} at index {}", regex, index);
let (_, b) = input.split_at(index);
let Some(caps) = regex.captures(b) else {
return None;
};
let capture = caps.get(0).unwrap();
if capture.start() != 0 {
return None;
}
Some(capture.as_str().to_owned())
}
struct TokenizerRegex(Regex, TokenType);
lazy_static! {
static ref REGEXES: Vec<TokenizerRegex> = {
let mut regexes = Vec::new();
regexes.push(TokenizerRegex(
Regex::new(r#"^\s+"#).unwrap(),
TokenType::Space,
));
regexes.push(TokenizerRegex( //FIXME: This regex is not working
Regex::new(r#"(?s)^(\/\*)(.|\s)*?(\*\/)/"#).unwrap(),
TokenType::Comment,
));
regexes.push(TokenizerRegex(
Regex::new(r#"^\/\/.+"#).unwrap(),
TokenType::Comment,
));
regexes.push(TokenizerRegex(
Regex::new(r#"^#.+"#).unwrap(),
TokenType::Comment,
));
regexes.push(TokenizerRegex(
Regex::new(r#"^".*?""#).unwrap(),
TokenType::String,
));
regexes.push(TokenizerRegex(
Regex::new(r#"^(type|enum|import|service|define)\b"#).unwrap(),
TokenType::Keyword,
));
regexes.push(TokenizerRegex(Regex::new(r#"^\@"#).unwrap(), TokenType::At));
regexes.push(TokenizerRegex(
Regex::new(r#"^\:"#).unwrap(),
TokenType::Colon,
));
regexes.push(TokenizerRegex(
Regex::new(r#"^\;"#).unwrap(),
TokenType::Semicolon,
));
regexes.push(TokenizerRegex(
Regex::new(r#"^\,"#).unwrap(),
TokenType::Comma,
));
regexes.push(TokenizerRegex(
Regex::new(r#"^\="#).unwrap(),
TokenType::Equals,
));
regexes.push(TokenizerRegex(
Regex::new(r#"^\{"#).unwrap(),
TokenType::CurlyOpen,
));
regexes.push(TokenizerRegex(
Regex::new(r#"^\}"#).unwrap(),
TokenType::CurlyClose,
));
regexes.push(TokenizerRegex(
Regex::new(r#"^\("#).unwrap(),
TokenType::BracketOpen,
));
regexes.push(TokenizerRegex(
Regex::new(r#"^\)"#).unwrap(),
TokenType::BracketClose,
));
regexes.push(TokenizerRegex(
Regex::new(r#"^\[\]"#).unwrap(),
TokenType::Array,
));
regexes.push(TokenizerRegex(
Regex::new(r#"^\?"#).unwrap(),
TokenType::Questionmark,
));
regexes.push(TokenizerRegex(
Regex::new(r#"^[\.0-9]+"#).unwrap(),
TokenType::Number,
));
regexes.push(TokenizerRegex(
Regex::new(r#"^[a-zA-Z_]([a-zA-Z0-9_]?)+"#).unwrap(),
TokenType::Text,
));
regexes
};
}
#[derive(Debug, Clone)]
pub struct TokenPosition {
pub path: Arc<String>,
pub start: usize,
pub end: usize,
}
impl TokenPosition {
pub fn new(path: Arc<String>, start: usize, end: usize) -> Self {
Self { path, start, end }
}
}
impl Display for TokenPosition {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
//TODO: Map index to line and column
write!(f, "{}:{}:{}", self.path, self.start, self.end)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum TokenType {
Space,
Comment,
String,
Keyword,
At,
Colon,
Semicolon,
Comma,
Equals,
CurlyOpen,
CurlyClose,
BracketOpen,
BracketClose,
Array,
Questionmark,
Number,
Text,
}
#[derive(Debug, Clone)]
pub struct TokenError {
pub message: String,
pub position: TokenPosition,
}
impl Error for TokenError {}
impl Display for TokenError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "TokenError: {} at {:?}", self.message, self.position)
}
}
#[derive(Debug, Clone)]
pub struct Token(pub TokenType, pub String, pub TokenPosition);
pub fn tokenize(path: Arc<String>, input: String) -> Result<Vec<Token>> {
let mut tokens = Vec::new();
let mut index = 0;
while index < input.len() {
let res = REGEXES
.iter()
.map(|regex| (match_regex(&regex.0, &input, index), regex.1))
.find(|x| x.0.is_some());
if let Some(mat) = res {
let value = mat.0.unwrap();
let value_len = value.len();
tokens.push(Token(
mat.1,
value,
TokenPosition::new(path.clone(), index, index + value_len),
));
index += value_len;
} else {
return Err(TokenError {
message: format!(
"Unexpected character: {}",
input.chars().skip(index).take(4).collect::<String>()
),
position: TokenPosition::new(path.clone(), index, index + 1),
}
.into());
}
}
// println!("{:?}", tokens);
Ok(tokens)
}