Start working on rust compile step

This commit is contained in:
Fabian Stamm
2024-09-29 23:46:29 +02:00
parent 2876eea11f
commit a3f5a396e5
26 changed files with 1534 additions and 58 deletions

View File

@ -0,0 +1,75 @@
use std::{collections::HashMap, path::PathBuf};
use anyhow::{Context, Result};
use crate::ir::{EnumDefinition, ServiceDefinition, TypeDefinition};
pub trait Compile {
fn name(&self) -> String;
fn start(&mut self, ctx: &mut CompileContext, options: HashMap<String, String>) -> Result<()>;
fn generate_type(&mut self, ctx: &mut CompileContext, definition: TypeDefinition)
-> Result<()>;
fn generate_enum(&mut self, ctx: &mut CompileContext, definition: EnumDefinition)
-> Result<()>;
fn generate_service(
&mut self,
ctx: &mut CompileContext,
definition: ServiceDefinition,
) -> Result<()>;
fn finalize(&mut self, ctx: &mut CompileContext) -> Result<()>;
}
pub struct CompileContext {
output_folder: PathBuf,
}
impl CompileContext {
pub fn new(output_folder: &str) -> Self {
CompileContext {
output_folder: output_folder.into(),
}
}
pub fn write_file(&self, filename: &str, content: String) -> Result<()> {
let res_path = self.output_folder.clone().join(filename);
let res_dir = res_path.parent().context("Path has no parent!")?;
std::fs::create_dir_all(res_dir)?;
std::fs::write(res_path, content)?;
Ok(())
}
}
pub struct FileGenerator {
content: String,
}
impl FileGenerator {
pub fn new() -> Self {
FileGenerator {
content: String::new(),
}
}
pub fn a<T: ToString>(&mut self, indent: u32, content: T) {
for _ in 0..indent {
self.content.push_str(" ");
}
self.content.push_str(&content.to_string());
self.content.push_str("\n");
}
pub fn add_line(&mut self, line: &str) {
self.content.push_str(line);
self.content.push_str("\n");
}
pub fn get_content(&mut self) -> String {
self.content.clone()
}
pub fn into_content(self) -> String {
self.content
}
}

View File

@ -11,7 +11,7 @@ use crate::parser::{
EnumStatement, Node, ParserPosition, RootNode, ServiceStatement, TypeStatement,
};
static BUILT_INS: [&str; 4] = ["int", "float", "string", "boolean"];
static BUILT_INS: [&str; 5] = ["int", "float", "string", "boolean", "bytes"];
pub trait Definition {
fn get_position(&self) -> ParserPosition;
@ -19,8 +19,8 @@ pub trait Definition {
#[derive(Debug, Clone)]
pub struct IR {
options: HashMap<String, String>,
steps: Vec<Step>,
pub options: HashMap<String, String>,
pub steps: Vec<Step>,
}
#[derive(Debug, Clone)]
@ -36,6 +36,7 @@ pub enum Type {
Float,
String,
Bool,
Bytes,
Custom(String),
}
impl Hash for Type {
@ -45,6 +46,7 @@ impl Hash for Type {
Type::Float => "float".hash(state),
Type::String => "string".hash(state),
Type::Bool => "bool".hash(state),
Type::Bytes => "bytes".hash(state),
Type::Custom(name) => name.hash(state),
}
}
@ -52,10 +54,10 @@ impl Hash for Type {
#[derive(Debug, Clone)]
pub struct TypeDefinition {
name: String,
depends: HashSet<Type>,
fields: Vec<Field>,
position: ParserPosition,
pub name: String,
pub depends: HashSet<Type>,
pub fields: Vec<Field>,
pub position: ParserPosition,
}
impl Definition for TypeDefinition {
@ -66,18 +68,18 @@ impl Definition for TypeDefinition {
#[derive(Debug, Clone)]
pub struct Field {
name: String,
typ: Type,
array: bool,
optional: bool,
map: Option<Type>,
pub name: String,
pub typ: Type,
pub array: bool,
pub optional: bool,
pub map: Option<Type>,
}
#[derive(Debug, Clone)]
pub struct EnumDefinition {
name: String,
values: Vec<EnumField>,
position: ParserPosition,
pub name: String,
pub values: Vec<EnumField>,
pub position: ParserPosition,
}
impl Definition for EnumDefinition {
@ -88,16 +90,16 @@ impl Definition for EnumDefinition {
#[derive(Debug, Clone)]
pub struct EnumField {
name: String,
value: i32,
pub name: String,
pub value: i32,
}
#[derive(Debug, Clone)]
pub struct ServiceDefinition {
name: String,
depends: HashSet<Type>,
methods: Vec<Method>,
position: ParserPosition,
pub name: String,
pub depends: HashSet<Type>,
pub methods: Vec<Method>,
pub position: ParserPosition,
}
impl Definition for ServiceDefinition {
@ -108,31 +110,31 @@ impl Definition for ServiceDefinition {
#[derive(Debug, Clone)]
pub struct Method {
name: String,
inputs: Vec<MethodInput>,
output: Option<MethodOutput>,
decorators: MethodDecorators,
pub name: String,
pub inputs: Vec<MethodInput>,
pub output: Option<MethodOutput>,
pub decorators: MethodDecorators,
}
#[derive(Debug, Clone)]
pub struct MethodInput {
name: String,
typ: Type,
array: bool,
optional: bool,
pub name: String,
pub typ: Type,
pub array: bool,
pub optional: bool,
}
#[derive(Debug, Clone)]
pub struct MethodOutput {
typ: Type,
array: bool,
pub typ: Type,
pub array: bool,
}
#[derive(Debug, Clone)]
pub struct MethodDecorators {
description: Option<String>,
parameter_descriptions: HashMap<String, String>,
return_description: Option<String>,
pub description: Option<String>,
pub parameter_descriptions: HashMap<String, String>,
pub return_description: Option<String>,
}
fn typename_to_type(name: &str) -> Type {
@ -221,7 +223,9 @@ fn build_service(stmt: &ServiceStatement) -> Result<ServiceDefinition> {
inputs: Vec::new(),
output: method.return_type.as_ref().map(|rt| {
let typ = typename_to_type(&rt.fieldtype);
servdef.depends.insert(typ.clone());
if typ != Type::Custom("void".to_string()) {
servdef.depends.insert(typ.clone());
}
MethodOutput {
typ,
array: rt.array,

View File

@ -1,15 +1,18 @@
mod compile;
mod ir;
mod parser;
mod process;
mod shared;
mod targets;
mod tokenizer;
pub use ir::IR;
pub use parser::{Parser, RootNode};
pub use process::FileProcessor;
pub use tokenizer::{tokenize, Token, TokenError, TokenPosition, TokenType};
#[cfg(test)]
mod test {
use std::sync::Arc;
#[cfg(test)]
#[ctor::ctor]
fn init() {
@ -18,22 +21,10 @@ mod test {
#[test]
pub fn parse_jrpc() {
let tokens = crate::tokenizer::tokenize(
Arc::new("../test.jrpc".to_owned()),
include_str!("../test.jrpc").to_owned(),
)
.unwrap();
let mut fp = crate::process::FileProcessor::new();
// let ir = fp.start_compile("./test.jrpc").unwrap();
let ir = fp.start_compile("http://127.0.0.1:7878/test.jrpc").unwrap();
let parsed = crate::parser::Parser::new(tokens).parse().unwrap();
// println!("Parsed: {:?}", parsed);
let ir = crate::ir::build_ir(&parsed).unwrap();
println!("IR: {:?}", ir);
// let result = crate::JRPCParser::parse(crate::Rule::root, );
// match result {
// Ok(result) => println!("{:?}", result),
// Err(err) => println!("{:?}", err),
// }
println!("{:?}", ir);
}
}

View File

@ -1,12 +1,14 @@
use std::{
collections::{HashMap, HashSet},
path::PathBuf,
sync::Arc,
};
use anyhow::Result;
use log::trace;
use url::Url;
use crate::ir::IR;
use crate::{ir::IR, parser::RootNode};
pub struct FileProcessor {
file_cache: HashMap<String, String>,
@ -15,6 +17,7 @@ pub struct FileProcessor {
impl FileProcessor {
pub fn new() -> Self {
trace!("FileProcessor::new()");
Self {
file_cache: HashMap::new(),
processed_files: HashSet::new(),
@ -22,6 +25,7 @@ impl FileProcessor {
}
fn is_url(inp: Option<&str>) -> bool {
trace!("FileProcessor::is_url({:?})", inp);
#[cfg(feature = "http")]
if let Some(inp) = inp {
if inp.starts_with("http://") || inp.starts_with("https://") {
@ -32,13 +36,14 @@ impl FileProcessor {
}
fn resolve_path(input: &str, context: Option<&str>) -> String {
trace!("FileProcessor::resolve_path({}, {:?})", input, context);
let mut input = input.to_string();
#[cfg(feature = "http")]
if cfg!(feature = "http") && (Self::is_url(Some(&input)) || Self::is_url(context)) {
if Self::is_url(Some(&input)) {
input.to_string()
} else {
let mut url = Url::parse(context.unwrap()).unwrap();
let url = Url::parse(context.unwrap()).unwrap();
if !input.ends_with(".jrpc") {
input = format!("{}.jrpc", input);
}
@ -50,6 +55,7 @@ impl FileProcessor {
}
if let Some(context) = context {
let mut path = PathBuf::from(context);
path.pop();
path = path.join(input);
path.to_str().unwrap().to_string()
} else {
@ -59,6 +65,7 @@ impl FileProcessor {
}
fn get_file_url(&mut self, url: &str) -> Result<String> {
trace!("FileProcessor::get_file_url({})", url);
let resp = reqwest::blocking::get(url)?;
let body = resp.text()?;
self.file_cache.insert(url.to_string(), body.clone());
@ -66,12 +73,14 @@ impl FileProcessor {
}
fn get_file_path(&mut self, path: &str) -> Result<String> {
trace!("FileProcessor::get_file_path({})", path);
let body = std::fs::read_to_string(path)?;
self.file_cache.insert(path.to_string(), body.clone());
Ok(body)
}
fn get_file(&mut self, name: &str) -> Result<String> {
trace!("FileProcessor::get_file({})", name);
if self.file_cache.contains_key(name) {
return Ok(self.file_cache.get(name).unwrap().clone());
} else {
@ -91,9 +100,36 @@ impl FileProcessor {
}
}
fn process_file(&mut self, file: &str, root: bool) {}
fn process_file(&mut self, file: &str, root: bool) -> Result<Vec<RootNode>> {
let file = Self::resolve_path(file, None);
trace!("FileProcessor::process_file({}, {})", file, root);
if self.processed_files.contains(&file) {
return Ok(vec![]);
}
self.processed_files.insert(file.clone());
let content = self.get_file(&file)?;
let tokens = crate::tokenizer::tokenize(Arc::new(file.to_string()), content)?;
let parsed = crate::parser::Parser::new(tokens).parse()?;
let mut result = Vec::new();
for stmt in &parsed {
if let crate::parser::RootNode::Import(stmt) = stmt {
let file = Self::resolve_path(&stmt.path, Some(&file));
let s = self.process_file(&file, false)?;
result.extend(s);
} else {
result.push(stmt.clone());
}
}
Ok(result)
}
pub fn start_compile(&mut self, file: &str) -> Result<IR> {
Err(anyhow::anyhow!("Not implemented"))
trace!("FileProcessor::start_compile({})", file);
let parsed = self.process_file(file, true)?;
let ir = crate::ir::build_ir(&parsed)?;
Ok(ir)
}
}

View File

@ -5,3 +5,24 @@ pub enum Keywords {
Service,
Define,
}
impl Keywords {
pub fn is_keyword(input: &str) -> bool {
match input {
"type" | "enum" | "import" | "service" | "define" => true,
_ => false,
}
}
}
impl ToString for Keywords {
fn to_string(&self) -> String {
match self {
Keywords::Type => "type".to_string(),
Keywords::Enum => "enum".to_string(),
Keywords::Import => "import".to_string(),
Keywords::Service => "service".to_string(),
Keywords::Define => "define".to_string(),
}
}
}

View File

@ -0,0 +1 @@
mod rust;

View File

@ -0,0 +1,214 @@
use anyhow::Result;
use log::warn;
use std::collections::{HashMap, HashSet};
use crate::compile::{Compile, CompileContext, FileGenerator};
use crate::ir::{EnumDefinition, ServiceDefinition, Type, TypeDefinition};
use crate::shared::Keywords;
pub struct RustCompiler {
crate_name: String,
}
impl RustCompiler {
fn type_to_rust(typ: Type) -> String {
match typ {
Type::String => "String".to_string(),
Type::Int => "i64".to_string(),
Type::Float => "f64".to_string(),
Type::Bool => "bool".to_string(),
Type::Bytes => "Vec<u8>".to_string(),
Type::Custom(name) => name,
}
}
fn add_dependencies(
&mut self,
file: &mut FileGenerator,
depends: &HashSet<Type>,
) -> Result<()> {
for dep in depends {
match dep {
Type::Custom(name) => {
file.a(0, &format!("use crate::{};", name));
}
_ => {}
}
}
file.a(0, "");
file.a(0, "");
Ok(())
}
fn fix_keyword_name(name: &str) -> String {
if Keywords::is_keyword(name) {
format!("{}_", name)
} else {
name.to_string()
}
}
fn to_snake(name: &str) -> String {
let mut result = String::new();
let mut last_upper = false;
for c in name.chars() {
if c.is_uppercase() {
if last_upper {
result.push(c.to_ascii_lowercase());
} else {
if !result.is_empty() {
result.push('_');
}
result.push(c.to_ascii_lowercase());
}
last_upper = true;
} else {
result.push(c);
last_upper = false;
}
}
result
}
}
impl Compile for RustCompiler {
fn name(&self) -> String {
"rust".to_string()
}
fn start(
&mut self,
ctx: &mut CompileContext,
options: HashMap<String, String>,
) -> anyhow::Result<()> {
if let Some(crate_name) = options.get("crate") {
self.crate_name = crate_name.to_string();
} else {
anyhow::bail!("crate option is required for rust compiler");
}
if let Some(allow_bytes) = options.get("allow_bytes") {
if allow_bytes == "true" {
anyhow::bail!("allow_bytes option is not supported for rust compiler");
}
}
ctx.write_file(
"Cargo.toml",
include_str!("../../templates/Rust/Cargo.toml")
.to_owned()
.replace("__name__", &self.crate_name),
)?;
ctx.write_file(
"src/base_lib.rs",
include_str!("../../templates/Rust/src/lib.rs").to_owned(),
)?;
todo!()
}
fn generate_type(
&mut self,
ctx: &mut CompileContext,
definition: TypeDefinition,
) -> anyhow::Result<()> {
let mut f = FileGenerator::new();
if definition.fields.iter().any(|e| e.map.is_some()) {
f.a(0, "use std::collections::hash_map::HashMap;")
}
f.a(0, "use serde::{Deserialize, Serialize};");
self.add_dependencies(&mut f, &definition.depends)?;
f.a(0, "#[derive(Clone, Debug, Serialize, Deserialize)]");
f.a(0, format!("pub struct {} {{", definition.name));
for field in definition.fields {
f.a(1, "#[allow(non_snake_case)]");
let mut func = format!("pub {}: ", Self::fix_keyword_name(&field.name));
if Keywords::is_keyword(&field.name) {
warn!(
"[RUST] Warning: Field name '{}' is not allowed in Rust. Renaming to '{}_'",
field.name, field.name
);
f.a(1, "#[serde(rename = \"type\")]");
}
let mut opts = String::new();
let mut opte = String::new();
if field.optional {
opts = "Option<".to_string();
opte = ">".to_string();
}
if field.array {
f.a(
1,
format!(
"{} {}Vec<{}>{},",
func,
opts,
Self::type_to_rust(field.typ),
opte
),
);
} else if let Some(map) = field.map {
f.a(
1,
format!(
"{} {}HashMap<{},{}>{},",
func,
opts,
Self::type_to_rust(map),
Self::type_to_rust(field.typ),
opte
),
);
} else {
f.a(
1,
format!(
"{} {}{}{},",
func,
opts,
Self::type_to_rust(field.typ),
opte
),
);
}
}
f.a(0, "}");
ctx.write_file(
&format!("src/{}.rs", Self::to_snake(&definition.name)),
f.into_content(),
)?;
Ok(())
}
fn generate_enum(
&mut self,
ctx: &mut CompileContext,
definition: EnumDefinition,
) -> anyhow::Result<()> {
todo!()
}
fn generate_service(
&mut self,
ctx: &mut CompileContext,
definition: ServiceDefinition,
) -> anyhow::Result<()> {
todo!()
}
fn finalize(&mut self, ctx: &mut CompileContext) -> anyhow::Result<()> {
todo!()
}
}