From 8805072573e211f7bbb6d94d635f74026213eb63 Mon Sep 17 00:00:00 2001 From: Nikkuss Date: Thu, 3 Apr 2025 21:28:46 +0400 Subject: [PATCH] rewrite generator --- Cargo.toml | 3 + src/config.rs | 6 + src/generate.rs | 350 ------------------------------------- src/generator/column.rs | 32 ++++ src/generator/discover.rs | 150 ++++++++++++++++ src/generator/file.rs | 95 ++++++++++ src/generator/mod.rs | 213 ++++++++++++++++++++++ src/generator/table.rs | 46 +++++ src/main.rs | 21 ++- src/templates/model.rs.hbs | 12 ++ 10 files changed, 569 insertions(+), 359 deletions(-) delete mode 100644 src/generate.rs create mode 100644 src/generator/column.rs create mode 100644 src/generator/discover.rs create mode 100644 src/generator/file.rs create mode 100644 src/generator/mod.rs create mode 100644 src/generator/table.rs create mode 100644 src/templates/model.rs.hbs diff --git a/Cargo.toml b/Cargo.toml index 6eab654..f3281ab 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,7 +8,9 @@ clap = { version = "4.5.32", features = ["derive", "env"] } color-eyre = "0.6.3" comment-parser = "0.1.0" figment = { version = "0.10.19", features = ["yaml"] } +handlebars = "6.3.2" heck = "0.5.0" +include_dir = "0.7.4" indicatif = "0.17.11" inquire = "0.7.5" prettytable = "0.10.0" @@ -16,6 +18,7 @@ quote = "1.0.40" sea-orm-codegen = "1.1.8" sea-schema = { version = "0.16.1", features = ["sqlx-all"] } serde = { version = "1.0.219", features = ["derive"] } +serde_json = "1.0.140" serde_yaml = "0.9.34" sqlx = { version = "0.8.3", features = ["mysql", "postgres", "sqlite"] } syn = { version = "2.0.100", features = ["extra-traits", "full"] } diff --git a/src/config.rs b/src/config.rs index a475485..f1113b9 100644 --- a/src/config.rs +++ b/src/config.rs @@ -104,6 +104,7 @@ pub struct Config { pub db: DbConfig, pub sea_orm: SeaOrmConfig, pub output: OutputConfig, + pub templates: TemplateConfig, } #[derive(Deserialize, Serialize, Debug, Clone)] @@ -167,6 +168,10 @@ pub enum DateTimeCrate { Time, Chrono, } +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct TemplateConfig { + pub model: Option, +} impl From for CodegenDateTimeCrate { fn from(date_time_crate: DateTimeCrate) -> CodegenDateTimeCrate { @@ -243,6 +248,7 @@ impl Default for Config { output: OutputConfig { path: PathBuf::from("./entities"), }, + templates: TemplateConfig { model: None }, } } } diff --git a/src/generate.rs b/src/generate.rs deleted file mode 100644 index f3b61c7..0000000 --- a/src/generate.rs +++ /dev/null @@ -1,350 +0,0 @@ -use core::time; - -const HEADER: &str = r#"== Schema Information"#; -const COMMENTHEAD: &str = r#"/*"#; -const COMMENTBODY: &str = r#" *"#; -const COMMENTTAIL: &str = r#"*/"#; -use crate::{config::DateTimeCrate, Config}; -use color_eyre::{ - eyre::{eyre, ContextCompat, Report}, - Result, -}; -use comment_parser::{CommentParser, Event}; -use prettytable::{format, row, Table}; -use sea_orm_codegen::OutputFile; -use sea_schema::sea_query::{self, ColumnSpec, ColumnType, StringLen, TableCreateStatement}; -use tokio::{fs, task::JoinSet}; -use url::Url; - -pub async fn get_tables( - database_url: String, - config: &Config, -) -> Result<(Option, Vec)> { - let url = Url::parse(&database_url)?; - - tracing::trace!(?url); - - let is_sqlite = url.scheme() == "sqlite"; - let filter_tables = config.sea_orm.entity.tables.get_filter(); - - // let filter_tables = |table: &String| -> bool { - // config.sea_orm.entity.table.only.is_empty() || config.sea_orm.table.only.contains(table) - // }; - // - // let filter_hidden_tables = |table: &str| -> bool { - // if false { - // true - // } else { - // !table.starts_with('_') - // } - // }; - // - // let filter_skip_tables = - // |table: &String| -> bool { !config.sea_orm.table.exclude.contains(table) }; - let database_name: &str = (if !is_sqlite { - let database_name = url - .path_segments() - .context("No database name as part of path")? - .next() - .context("No database name as part of path")?; - - if database_name.is_empty() { - return Err(eyre!("Database path name is empty")); - } - Ok::<&str, Report>(database_name) - } else { - Ok(Default::default()) - })?; - - let (schema_name, table_stmts) = match url.scheme() { - "mysql" => { - use sea_schema::mysql::discovery::SchemaDiscovery; - use sqlx::MySql; - - tracing::info!("Connecting to MySQL"); - let connection = sqlx_connect::( - config.db.max_connections, - config.db.acquire_timeout, - url.as_str(), - None, - ) - .await?; - - tracing::info!("Discovering schema"); - let schema_discovery = SchemaDiscovery::new(connection, database_name); - let schema = schema_discovery.discover().await?; - let table_stmts = schema - .tables - .into_iter() - .filter(|schema| filter_tables(&schema.info.name)) - // .filter(|schema| filter_hidden_tables(&schema.info.name)) - // .filter(|schema| filter_skip_tables(&schema.info.name)) - .map(|schema| schema.write()) - .collect(); - (None, table_stmts) - } - "sqlite" => { - use sea_schema::sqlite::discovery::SchemaDiscovery; - use sqlx::Sqlite; - - tracing::info!("Connecting to SQLite"); - let connection = sqlx_connect::( - config.db.max_connections, - config.db.acquire_timeout, - url.as_str(), - None, - ) - .await?; - - tracing::info!("Discovering schema"); - let schema_discovery = SchemaDiscovery::new(connection); - let schema = schema_discovery - .discover() - .await? - .merge_indexes_into_table(); - let table_stmts = schema - .tables - .into_iter() - .filter(|schema| filter_tables(&schema.name)) - // .filter(|schema| filter_hidden_tables(&schema.name)) - // .filter(|schema| filter_skip_tables(&schema.name)) - .map(|schema| schema.write()) - .collect(); - (None, table_stmts) - } - "postgres" | "potgresql" => { - use sea_schema::postgres::discovery::SchemaDiscovery; - use sqlx::Postgres; - - tracing::info!("Connecting to Postgres"); - let schema = &config.db.database_schema.as_deref().unwrap_or("public"); - let connection = sqlx_connect::( - config.db.max_connections, - config.db.acquire_timeout, - url.as_str(), - Some(schema), - ) - .await?; - tracing::info!("Discovering schema"); - let schema_discovery = SchemaDiscovery::new(connection, schema); - let schema = schema_discovery.discover().await?; - tracing::info!(?schema); - let table_stmts = schema - .tables - .into_iter() - .filter(|schema| filter_tables(&schema.info.name)) - // .filter(|schema| filter_hidden_tables(&schema.info.name)) - // .filter(|schema| filter_skip_tables(&schema.info.name)) - .map(|schema| schema.write()) - .collect(); - (config.db.database_schema.clone(), table_stmts) - } - _ => unimplemented!("{} is not supported", url.scheme()), - }; - tracing::info!("Schema discovered"); - - Ok((schema_name, table_stmts)) -} -async fn sqlx_connect( - max_connections: u32, - acquire_timeout: u64, - url: &str, - schema: Option<&str>, -) -> Result> -where - DB: sqlx::Database, - for<'a> &'a mut ::Connection: sqlx::Executor<'a>, -{ - let mut pool_options = sqlx::pool::PoolOptions::::new() - .max_connections(max_connections) - .acquire_timeout(time::Duration::from_secs(acquire_timeout)); - // Set search_path for Postgres, E.g. Some("public") by default - // MySQL & SQLite connection initialize with schema `None` - if let Some(schema) = schema { - let sql = format!("SET search_path = '{schema}'"); - pool_options = pool_options.after_connect(move |conn, _| { - let sql = sql.clone(); - Box::pin(async move { - sqlx::Executor::execute(conn, sql.as_str()) - .await - .map(|_| ()) - }) - }); - } - pool_options.connect(url).await.map_err(Into::into) -} - -pub async fn generate_models( - tables: Vec, - config: Config, -) -> Result> { - tracing::debug!(?tables); - let output_path = config.output.path.clone(); - let files = tables - .into_iter() - .map(|table| { - let output_path = output_path.clone(); - let config = config.clone(); - async move { - let table_name = match table.get_table_name() { - Some(table_ref) => match table_ref { - sea_query::TableRef::Table(t) - | sea_query::TableRef::SchemaTable(_, t) - | sea_query::TableRef::DatabaseSchemaTable(_, _, t) - | sea_query::TableRef::TableAlias(t, _) - | sea_query::TableRef::SchemaTableAlias(_, t, _) - | sea_query::TableRef::DatabaseSchemaTableAlias(_, _, t, _) => { - t.to_string() - } - _ => unimplemented!(), - }, - None => return Err(eyre!("Table name not found")), - }; - let table_str = generate_table(table, config.clone()).await?; - tracing::debug!(?table_str); - let filename = format!("{}.rs", table_name); - let file_path = output_path.join(&filename); - let exists = file_path.exists(); - let content = if exists { - let mut file_content = fs::read_to_string(file_path).await?; - let rules = comment_parser::get_syntax("rust").unwrap(); - let comments = CommentParser::new(&file_content, rules); - let mut found = false; - for comment in comments { - if let Event::BlockComment(a, b) = comment { - tracing::debug!(?a, ?b); - if b.contains(HEADER) { - found = true; - file_content = file_content.replace(a, &table_str); - tracing::debug!("Found header"); - break; - } - } - } - if found { - Ok::(file_content) - } else { - let merged_content = format!("{}\n\n{}", table_str, file_content); - Ok::(merged_content) - } - } else { - Ok::(table_str) - }?; - - Ok(OutputFile { - name: filename, - content, - }) - } - }) - .collect::>>(); - let files = files - .join_all() - .await - .into_iter() - .collect::>>()?; - Ok(files) -} - -async fn generate_table(table: TableCreateStatement, config: Config) -> Result { - let mut string = String::new(); - string.push_str(format!("{COMMENTHEAD} {HEADER}\n").as_str()); - - let table_name = match table.get_table_name() { - Some(table_ref) => match table_ref { - sea_query::TableRef::Table(t) - | sea_query::TableRef::SchemaTable(_, t) - | sea_query::TableRef::DatabaseSchemaTable(_, _, t) - | sea_query::TableRef::TableAlias(t, _) - | sea_query::TableRef::SchemaTableAlias(_, t, _) - | sea_query::TableRef::DatabaseSchemaTableAlias(_, _, t, _) => t.to_string(), - _ => unimplemented!(), - }, - None => return Err(eyre!("Table name not found")), - }; - let mut inner = String::new(); - inner.push_str(format!("{COMMENTBODY}\n Table name: {table_name}\n\n\n").as_str()); - inner = inner.strip_suffix('\n').unwrap().to_string(); - let mut ptable = Table::new(); - let format = format::FormatBuilder::new() - .column_separator(' ') - .borders(' ') - .separators( - &[ - format::LinePosition::Bottom, - format::LinePosition::Title, - // format::LinePosition::Top, - ], - format::LineSeparator::default(), - ) - .padding(1, 1) - .build(); - ptable.set_format(format); - ptable.set_titles(row!["Name", "Type", "RustType", "Attributes"]); - let indexes = table.get_indexes(); - tracing::info!(?indexes); - for column in table.get_columns() { - let name = column.get_column_name(); - if let Some(column_type) = column.get_column_type() { - let column_type_rust = - type_to_rust_string(column_type, config.sea_orm.entity.date_time_crate.clone()); - let column_type = - type_to_string(column_type, config.sea_orm.entity.date_time_crate.clone()); - let attrs = attrs_to_string(column.get_column_spec()); - ptable.add_row(row![name, column_type, column_type_rust, attrs]); - } - } - inner.push_str(ptable.to_string().as_str()); - - string.push_str( - inner - .replace("\n", format!("\n{} ", COMMENTBODY).as_str()) - .as_str(), - ); - string.push_str(format!("\n{COMMENTTAIL}\n").as_str()); - Ok(string) -} - -fn type_to_string(column: &ColumnType, date_time_create: DateTimeCrate) -> String { - tracing::debug!(?column, ?date_time_create); - match column { - ColumnType::Char(Some(max)) => format!("Char({}max)", max), - ColumnType::Char(None) => "Char".to_owned(), - ColumnType::String(StringLen::None) => "String".to_owned(), - ColumnType::String(StringLen::Max) => "String(Max)".to_owned(), - ColumnType::String(StringLen::N(len)) => format!("String({}max)", len), - ColumnType::Text => "Text".to_owned(), - ColumnType::Integer => "Integer".to_owned(), - - ColumnType::Custom(_) => "String".to_owned(), - _ => unimplemented!(), - } -} -fn type_to_rust_string(column: &ColumnType, date_time_create: DateTimeCrate) -> String { - tracing::debug!(?column, ?date_time_create); - match column { - ColumnType::Char(_) | ColumnType::String(_) | ColumnType::Text | ColumnType::Custom(_) => { - "String".to_owned() - } - ColumnType::Integer => "i32".to_owned(), - _ => unimplemented!(), - } -} - -fn attrs_to_string(column: &Vec) -> String { - tracing::debug!(?column); - column - .iter() - .filter_map(|c| match c { - ColumnSpec::PrimaryKey => Some("primary key"), - ColumnSpec::UniqueKey => Some("unique key"), - ColumnSpec::AutoIncrement => Some("autoincrement"), - ColumnSpec::NotNull => Some("not null"), - _ => None, - }) - .map(|s| s.to_string()) - .collect::>() - .join(", ") -} - -// diff --git a/src/generator/column.rs b/src/generator/column.rs new file mode 100644 index 0000000..cfbfd9d --- /dev/null +++ b/src/generator/column.rs @@ -0,0 +1,32 @@ +use color_eyre::{eyre::ContextCompat, Result}; +use sea_schema::sea_query::{ColumnDef, ColumnSpec, ColumnType, IndexCreateStatement}; +#[derive(Clone, Debug)] +pub struct Column { + pub name: String, + pub col_type: ColumnType, + pub attrs: Vec, +} + +impl Column { + pub fn new(column: ColumnDef, index: Option) -> Result { + let name = column.get_column_name(); + let col_type = column + .get_column_type() + .context("Unable to get column type")? + .clone(); + let mut attrs = column.get_column_spec().clone(); + if let Some(index) = index { + if index.is_unique_key() { + attrs.push(ColumnSpec::UniqueKey) + } + if index.is_primary_key() { + attrs.push(ColumnSpec::PrimaryKey); + } + } + Ok(Column { + name: name.to_string(), + col_type, + attrs: attrs.to_vec(), + }) + } +} diff --git a/src/generator/discover.rs b/src/generator/discover.rs new file mode 100644 index 0000000..ea494b9 --- /dev/null +++ b/src/generator/discover.rs @@ -0,0 +1,150 @@ +use core::time; + +use color_eyre::eyre::{eyre, Context, ContextCompat, Report, Result}; +use sea_schema::sea_query::TableCreateStatement; +use url::Url; + +use crate::config::Config; +pub async fn get_tables( + database_url: String, + config: &Config, +) -> Result<(Option, Vec)> { + let url = Url::parse(&database_url)?; + + tracing::trace!(?url); + + let is_sqlite = url.scheme() == "sqlite"; + let filter_tables = config.sea_orm.entity.tables.get_filter(); + + let database_name: &str = (if !is_sqlite { + let database_name = url + .path_segments() + .context("No database name as part of path")? + .next() + .context("No database name as part of path")?; + + if database_name.is_empty() { + return Err(eyre!("Database path name is empty")); + } + Ok::<&str, Report>(database_name) + } else { + Ok(Default::default()) + })?; + + let (schema_name, table_stmts) = match url.scheme() { + "mysql" => { + use sea_schema::mysql::discovery::SchemaDiscovery; + use sqlx::MySql; + + tracing::info!("Connecting to MySQL"); + let connection = sqlx_connect::( + config.db.max_connections, + config.db.acquire_timeout, + url.as_str(), + None, + ) + .await?; + + tracing::info!("Discovering schema"); + let schema_discovery = SchemaDiscovery::new(connection, database_name); + let schema = schema_discovery.discover().await?; + let table_stmts = schema + .tables + .into_iter() + .filter(|schema| filter_tables(&schema.info.name)) + // .filter(|schema| filter_hidden_tables(&schema.info.name)) + // .filter(|schema| filter_skip_tables(&schema.info.name)) + .map(|schema| schema.write()) + .collect(); + (None, table_stmts) + } + "sqlite" => { + use sea_schema::sqlite::discovery::SchemaDiscovery; + use sqlx::Sqlite; + + tracing::info!("Connecting to SQLite"); + let connection = sqlx_connect::( + config.db.max_connections, + config.db.acquire_timeout, + url.as_str(), + None, + ) + .await?; + + tracing::info!("Discovering schema"); + let schema_discovery = SchemaDiscovery::new(connection); + let schema = schema_discovery + .discover() + .await? + .merge_indexes_into_table(); + let table_stmts = schema + .tables + .into_iter() + .filter(|schema| filter_tables(&schema.name)) + // .filter(|schema| filter_hidden_tables(&schema.name)) + // .filter(|schema| filter_skip_tables(&schema.name)) + .map(|schema| schema.write()) + .collect(); + (None, table_stmts) + } + "postgres" | "potgresql" => { + use sea_schema::postgres::discovery::SchemaDiscovery; + use sqlx::Postgres; + + tracing::info!("Connecting to Postgres"); + let schema = &config.db.database_schema.as_deref().unwrap_or("public"); + let connection = sqlx_connect::( + config.db.max_connections, + config.db.acquire_timeout, + url.as_str(), + Some(schema), + ) + .await?; + tracing::info!("Discovering schema"); + let schema_discovery = SchemaDiscovery::new(connection, schema); + let schema = schema_discovery.discover().await?; + tracing::info!(?schema); + let table_stmts = schema + .tables + .into_iter() + .filter(|schema| filter_tables(&schema.info.name)) + // .filter(|schema| filter_hidden_tables(&schema.info.name)) + // .filter(|schema| filter_skip_tables(&schema.info.name)) + .map(|schema| schema.write()) + .collect(); + (config.db.database_schema.clone(), table_stmts) + } + _ => unimplemented!("{} is not supported", url.scheme()), + }; + tracing::info!("Schema discovered"); + + Ok((schema_name, table_stmts)) +} +async fn sqlx_connect( + max_connections: u32, + acquire_timeout: u64, + url: &str, + schema: Option<&str>, +) -> Result> +where + DB: sqlx::Database, + for<'a> &'a mut ::Connection: sqlx::Executor<'a>, +{ + let mut pool_options = sqlx::pool::PoolOptions::::new() + .max_connections(max_connections) + .acquire_timeout(time::Duration::from_secs(acquire_timeout)); + // Set search_path for Postgres, E.g. Some("public") by default + // MySQL & SQLite connection initialize with schema `None` + if let Some(schema) = schema { + let sql = format!("SET search_path = '{schema}'"); + pool_options = pool_options.after_connect(move |conn, _| { + let sql = sql.clone(); + Box::pin(async move { + sqlx::Executor::execute(conn, sql.as_str()) + .await + .map(|_| ()) + }) + }); + } + pool_options.connect(url).await.map_err(Into::into) +} diff --git a/src/generator/file.rs b/src/generator/file.rs new file mode 100644 index 0000000..f0e1610 --- /dev/null +++ b/src/generator/file.rs @@ -0,0 +1,95 @@ +use std::path::PathBuf; + +use crate::config::Config; + +use super::table::Table; +use color_eyre::Result; +use handlebars::Handlebars; +use prettytable::{format, row, Table as PTable}; +use sea_orm_codegen::OutputFile; +use serde::Serialize; +use tokio::fs; +const HEADER: &str = r#"== Schema Information"#; +const COMMENTHEAD: &str = r#"/*"#; +const COMMENTBODY: &str = r#" *"#; +const COMMENTTAIL: &str = r#"*/"#; + +#[derive(Debug, Clone)] +pub struct FileGenerator { + filename: String, + table: Table, +} +#[derive(Debug, Clone, Serialize)] +pub struct FileContext { + has_prelude: bool, + prelude: String, +} + +impl FileGenerator { + pub fn new(table: Table) -> Result { + let filename = format!("{}.rs", table.name); + Ok(FileGenerator { table, filename }) + } + pub async fn build_file<'a>( + &self, + config: &Config, + handlebars: &'a Handlebars<'a>, + ) -> Result { + let filepath = config.output.path.join(&self.filename); + Ok(OutputFile { + name: filepath.to_str().unwrap().to_string(), + content: self.generate_file(config, handlebars).await?, + }) + } + pub async fn generate_file<'a>( + &self, + config: &Config, + handlebars: &'a Handlebars<'a>, + ) -> Result { + let filepath = config.output.path.join(&self.filename); + let file_context = FileContext { + has_prelude: false, + prelude: String::new(), + }; + let generated_header = self.generate_header(config).await?; + if filepath.exists() { + let mut file_content = fs::read_to_string(filepath).await?; + Ok(file_content) + } else { + let content = handlebars.render("model", &file_context)?; + Ok(format!("{}{}", generated_header, content)) + } + } + pub async fn generate_header(&self, config: &Config) -> Result { + let mut column_info_table = PTable::new(); + let format = format::FormatBuilder::new() + .column_separator(' ') + .borders(' ') + .separators( + &[ + format::LinePosition::Bottom, + format::LinePosition::Title, + // format::LinePosition::Top, + ], + format::LineSeparator::default(), + ) + .padding(1, 1) + .build(); + column_info_table.set_format(format); + column_info_table.set_titles(row!["Name", "Type", "RustType", "Attributes"]); + // let indexes = table.get_indexes(); + // tracing::info!(?indexes); + // for column in table.get_columns() { + // let name = column.get_column_name(); + // if let Some(column_type) = column.get_column_type() { + // let column_type_rust = + // type_to_rust_string(column_type, config.sea_orm.entity.date_time_crate.clone()); + // let column_type = + // type_to_string(column_type, config.sea_orm.entity.date_time_crate.clone()); + // let attrs = attrs_to_string(column.get_column_spec()); + // ptable.add_row(row![name, column_type, column_type_rust, attrs]); + // } + // } + Ok(String::new()) + } +} diff --git a/src/generator/mod.rs b/src/generator/mod.rs new file mode 100644 index 0000000..efe40d9 --- /dev/null +++ b/src/generator/mod.rs @@ -0,0 +1,213 @@ +pub mod column; +pub mod discover; +pub mod file; +pub mod table; + +use core::time; + +use crate::{config::DateTimeCrate, Config}; +use color_eyre::{ + eyre::{eyre, ContextCompat, Report}, + Result, +}; +use comment_parser::{CommentParser, Event}; +use handlebars::Handlebars; +use prettytable::{format, row}; +use sea_orm_codegen::OutputFile; +use sea_schema::sea_query::{self, ColumnSpec, ColumnType, StringLen, TableCreateStatement}; +use table::Table; +use tokio::{fs, task::JoinSet}; +use url::Url; + +pub async fn update_files<'a>( + tables: Vec, + config: Config, + handlebars: &'a Handlebars<'a>, +) -> Result> { + let tables = tables + .into_iter() + .map(|table| Table::new(table.clone(), config.clone())) + .collect::>>()?; + let mut files = Vec::new(); + for table in tables { + let comment = file::FileGenerator::new(table)?; + let file = comment.build_file(&config, &handlebars).await?; + files.push(file); + } + Ok(Vec::new()) +} + +// pub async fn generate_models( +// tables: Vec, +// config: Config, +// ) -> Result> { +// tracing::debug!(?tables); +// let output_path = config.output.path.clone(); +// let files = tables +// .into_iter() +// .map(|table| { +// let output_path = output_path.clone(); +// let config = config.clone(); +// async move { +// let table_name = match table.get_table_name() { +// Some(table_ref) => match table_ref { +// sea_query::TableRef::Table(t) +// | sea_query::TableRef::SchemaTable(_, t) +// | sea_query::TableRef::DatabaseSchemaTable(_, _, t) +// | sea_query::TableRef::TableAlias(t, _) +// | sea_query::TableRef::SchemaTableAlias(_, t, _) +// | sea_query::TableRef::DatabaseSchemaTableAlias(_, _, t, _) => { +// t.to_string() +// } +// _ => unimplemented!(), +// }, +// None => return Err(eyre!("Table name not found")), +// }; +// let table_str = generate_table(table, config.clone()).await?; +// tracing::debug!(?table_str); +// let filename = format!("{}.rs", table_name); +// let file_path = output_path.join(&filename); +// let exists = file_path.exists(); +// let content = if exists { +// let mut file_content = fs::read_to_string(file_path).await?; +// let rules = comment_parser::get_syntax("rust").unwrap(); +// let comments = CommentParser::new(&file_content, rules); +// let mut found = false; +// for comment in comments { +// if let Event::BlockComment(a, b) = comment { +// tracing::debug!(?a, ?b); +// if b.contains(HEADER) { +// found = true; +// file_content = file_content.replace(a, &table_str); +// tracing::debug!("Found header"); +// break; +// } +// } +// } +// if found { +// Ok::(file_content) +// } else { +// let merged_content = format!("{}\n\n{}", table_str, file_content); +// Ok::(merged_content) +// } +// } else { +// Ok::(table_str) +// }?; +// +// Ok(OutputFile { +// name: filename, +// content, +// }) +// } +// }) +// .collect::>>(); +// let files = files +// .join_all() +// .await +// .into_iter() +// .collect::>>()?; +// Ok(files) +// } + +// async fn generate_table(table: TableCreateStatement, config: Config) -> Result { +// let mut string = String::new(); +// string.push_str(format!("{COMMENTHEAD} {HEADER}\n").as_str()); +// +// let table_name = match table.get_table_name() { +// Some(table_ref) => match table_ref { +// sea_query::TableRef::Table(t) +// | sea_query::TableRef::SchemaTable(_, t) +// | sea_query::TableRef::DatabaseSchemaTable(_, _, t) +// | sea_query::TableRef::TableAlias(t, _) +// | sea_query::TableRef::SchemaTableAlias(_, t, _) +// | sea_query::TableRef::DatabaseSchemaTableAlias(_, _, t, _) => t.to_string(), +// _ => unimplemented!(), +// }, +// None => return Err(eyre!("Table name not found")), +// }; +// let mut inner = String::new(); +// inner.push_str(format!("{COMMENTBODY}\n Table name: {table_name}\n\n\n").as_str()); +// inner = inner.strip_suffix('\n').unwrap().to_string(); +// let mut ptable = Table::new(); +// let format = format::FormatBuilder::new() +// .column_separator(' ') +// .borders(' ') +// .separators( +// &[ +// format::LinePosition::Bottom, +// format::LinePosition::Title, +// // format::LinePosition::Top, +// ], +// format::LineSeparator::default(), +// ) +// .padding(1, 1) +// .build(); +// ptable.set_format(format); +// ptable.set_titles(row!["Name", "Type", "RustType", "Attributes"]); +// let indexes = table.get_indexes(); +// tracing::info!(?indexes); +// for column in table.get_columns() { +// let name = column.get_column_name(); +// if let Some(column_type) = column.get_column_type() { +// let column_type_rust = +// type_to_rust_string(column_type, config.sea_orm.entity.date_time_crate.clone()); +// let column_type = +// type_to_string(column_type, config.sea_orm.entity.date_time_crate.clone()); +// let attrs = attrs_to_string(column.get_column_spec()); +// ptable.add_row(row![name, column_type, column_type_rust, attrs]); +// } +// } +// inner.push_str(ptable.to_string().as_str()); +// +// string.push_str( +// inner +// .replace("\n", format!("\n{} ", COMMENTBODY).as_str()) +// .as_str(), +// ); +// string.push_str(format!("\n{COMMENTTAIL}\n").as_str()); +// Ok(string) +// } +// +// fn type_to_string(column: &ColumnType, date_time_create: DateTimeCrate) -> String { +// tracing::debug!(?column, ?date_time_create); +// match column { +// ColumnType::Char(Some(max)) => format!("Char({}max)", max), +// ColumnType::Char(None) => "Char".to_owned(), +// ColumnType::String(StringLen::None) => "String".to_owned(), +// ColumnType::String(StringLen::Max) => "String(Max)".to_owned(), +// ColumnType::String(StringLen::N(len)) => format!("String({}max)", len), +// ColumnType::Text => "Text".to_owned(), +// ColumnType::Integer => "Integer".to_owned(), +// +// ColumnType::Custom(v) => v.to_string(), +// _ => unimplemented!(), +// } +// } +// fn type_to_rust_string(column: &ColumnType, date_time_create: DateTimeCrate) -> String { +// tracing::debug!(?column, ?date_time_create); +// match column { +// ColumnType::Char(_) | ColumnType::String(_) | ColumnType::Text | ColumnType::Custom(_) => { +// "String".to_owned() +// } +// ColumnType::Integer => "i32".to_owned(), +// _ => unimplemented!(), +// } +// } +// +// fn attrs_to_string(column: &Vec) -> String { +// tracing::debug!(?column); +// column +// .iter() +// .filter_map(|c| match c { +// ColumnSpec::PrimaryKey => Some("primary key"), +// ColumnSpec::UniqueKey => Some("unique key"), +// ColumnSpec::AutoIncrement => Some("autoincrement"), +// ColumnSpec::NotNull => Some("not null"), +// _ => None, +// }) +// .map(|s| s.to_string()) +// .collect::>() +// .join(", ") +// } +// +// // diff --git a/src/generator/table.rs b/src/generator/table.rs new file mode 100644 index 0000000..cf18926 --- /dev/null +++ b/src/generator/table.rs @@ -0,0 +1,46 @@ +use super::column::Column; +use crate::config::Config; +use color_eyre::{eyre::eyre, Result}; +use sea_schema::sea_query::{self, ColumnDef, IndexCreateStatement, TableCreateStatement}; +use tracing::instrument; + +#[derive(Debug, Clone)] +pub struct Table { + pub name: String, + pub columns: Vec, +} + +impl Table { + pub fn new(statement: TableCreateStatement, config: Config) -> Result { + let table_name = match statement.get_table_name() { + Some(table_ref) => match table_ref { + sea_query::TableRef::Table(t) + | sea_query::TableRef::SchemaTable(_, t) + | sea_query::TableRef::DatabaseSchemaTable(_, _, t) + | sea_query::TableRef::TableAlias(t, _) + | sea_query::TableRef::SchemaTableAlias(_, t, _) + | sea_query::TableRef::DatabaseSchemaTableAlias(_, _, t, _) => t.to_string(), + _ => unimplemented!(), + }, + None => return Err(eyre!("Table name not found")), + }; + tracing::debug!(?table_name); + let columns_raw = statement.get_columns(); + let indexes = statement.get_indexes(); + let columns = columns_raw + .iter() + .map(|column| { + let name = column.get_column_name(); + let index = indexes + .iter() + .find(|index| index.get_index_spec().get_column_names().contains(&name)); + Column::new(column.clone(), index.cloned()) + }) + .collect::>>()?; + tracing::debug!(?columns); + Ok(Table { + columns, + name: table_name, + }) + } +} diff --git a/src/main.rs b/src/main.rs index f8c90af..00c1e61 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,6 +1,5 @@ mod config; -mod generate; -use std::{path::PathBuf, str::FromStr}; +mod generator; use clap::Parser; use color_eyre::{eyre::eyre, Report, Result}; @@ -36,9 +35,12 @@ async fn main() -> Result<()> { .extract()?; tracing::info!(?config); tracing::info!(?args); + + let mut handlebars = Handlebars::new(); + let output_dir = &config.output.path; let output_internal_entities = output_dir.join("_entities"); - let (_, table_stmts) = generate::get_tables(args.database_url, &config).await?; + let (_, table_stmts) = generator::discover::get_tables(args.database_url, &config).await?; let writer_context = config.clone().into(); let output = EntityTransformer::transform(table_stmts.clone())?.generate(&writer_context); let mut files = output @@ -46,12 +48,13 @@ async fn main() -> Result<()> { .into_iter() .map(|OutputFile { name, content }| (output_internal_entities.join(name), content)) .collect::>(); - let generate_files = generate::generate_models(table_stmts, config.clone()) - .await? - .into_iter() - .map(|OutputFile { name, content }| (output_dir.join(name), content)) - .collect::>(); - files.extend(generate_files); + generator::update_files(table_stmts, config.clone(), &handlebars).await?; + // let generate_files = generator::generate_models(table_stmts, config.clone()) + // .await? + // .into_iter() + // .map(|OutputFile { name, content }| (output_dir.join(name), content)) + // .collect::>(); + // files.extend(generate_files); tracing::info!("Generated {} files", files.len()); fs::create_dir_all(&output_internal_entities).await?; let progress_bar = ProgressBar::new((files.len() * 2) as u64) diff --git a/src/templates/model.rs.hbs b/src/templates/model.rs.hbs new file mode 100644 index 0000000..0b85b03 --- /dev/null +++ b/src/templates/model.rs.hbs @@ -0,0 +1,12 @@ +use {{entities_path}}::{ActiveMode, Model, Entity}; +{{#if has_prelude}} +use {{prelude}}; +{{/if}} + +impl Model { + +} + +impl ActiveModel { + +}