finish up annotate and cleanup project

This commit is contained in:
2025-04-16 14:49:47 +04:00
parent 1a745ff17f
commit 2b668ba89f
18 changed files with 161 additions and 793 deletions

View File

@@ -28,6 +28,7 @@ pub fn pathbuf_to_rust_path(path: PathBuf) -> String {
path
}
#[derive(Debug, Clone)]
#[allow(unused)]
pub enum InsertPoint {
Start,
Replace(String),

View File

@@ -19,15 +19,5 @@ pub async fn generate(database_url: &str, root_config: DocumentMut) -> Result<()
.write_files()
.await?;
// let db_filter = config.sea_orm.entity.tables.get_filter();
// let (table_stmts, db_type) =
// discover::get_tables(database_url.to_owned(), db_filter, &config.db).await?;
// let tables = table_stmts
// .into_iter()
// .map(Table::new)
// .collect::<Result<Vec<Table>>>()?;
//
// let model_outputs = modules::models::generate_models(database_url, config, handlebars).await?;
// files.extend(model_outputs);
Ok(())
}

View File

@@ -1,5 +1,6 @@
use color_eyre::Result;
use minijinja::Environment;
use serde::Serialize;
use crate::generator::modules::{
discovery::{db::DbType, table::Table},
@@ -7,13 +8,26 @@ use crate::generator::modules::{
};
use comfy_table::{Cell, ContentArrangement, Table as CTable};
use super::{AnnotateCommentConfig, COMMENTBODY, COMMENTHEAD, COMMENTTAIL};
use super::{
AnnotateCommentConfig, COMMENTBODY, COMMENTHEAD, COMMENTTAIL, HEADER, SETTINGSDELIMITER,
};
#[derive(Debug, Serialize)]
struct CommentContext<'a> {
pub table_name: &'a str,
pub config: &'a AnnotateCommentConfig,
pub column_info_table: String,
pub comment_config: Option<String>,
pub config_delimiter: &'a str,
}
pub fn generate_comment(
table: &Table,
config: &AnnotateCommentConfig,
environment: &Environment<'static>,
db_type: &DbType,
date_time_crate: &DateTimeCrate,
comment_config: Option<AnnotateCommentConfig>,
) -> Result<String> {
let mut column_info_table = CTable::new();
let mut header = Vec::new();
@@ -50,38 +64,35 @@ pub fn generate_comment(
row.push(Cell::new(column_type));
}
if config.column_attributes.unwrap() {
let attrs_string = column.attrs_to_string();
let exclude = config.column_exclude_attributes.clone().unwrap();
let filter: Box<dyn Fn(&String) -> bool> = Box::new(move |f| {
let exclude = exclude.clone();
!exclude.contains(f)
});
let attrs_string = column.attrs_to_string(Some(filter));
row.push(Cell::new(attrs_string));
}
column_info_table.add_row(row);
}
// column_info_table.to_string()
// let config_part = match parsed_settings {
// Some(settings) => {
// let settings_str = serde_yaml::to_string(&settings)?;
// let settings_str = settings_str
// .lines()
// .map(|line| format!(" {}", line))
// .collect::<Vec<_>>()
// .join("\n");
// format!(
// "{SETTINGSDELIMITER}\n{}\n{SETTINGSDELIMITER}\n\n",
// settings_str
// )
// }
// None => String::new(),
// };
let context = CommentContext {
table_name: &table.name,
config,
column_info_table: column_info_table.to_string(),
comment_config: comment_config
.and_then(|f| toml::to_string_pretty(&f).ok())
.map(|s| {
s.lines()
.map(|line| format!(" {}", line))
.collect::<Vec<_>>()
.join("\n")
}),
config_delimiter: SETTINGSDELIMITER,
};
let template = environment.get_template("annotate.comment")?;
let rendered_data = template.render(&context)?;
// let table_name = &table.name;
// let table_name_str = if config.table_name {
// format!("Table: {}\n", table_name)
// } else {
// String::new()
// };
// let string = format!("{HEADER}\n{config_part}{table_name_str}\n{column_info_table}");
// let padded_string = Self::pad_comment(&string);
Ok(String::new())
Ok(pad_comment(&rendered_data))
}
pub fn pad_comment(s: &str) -> String {
@@ -90,7 +101,7 @@ pub fn pad_comment(s: &str) -> String {
for (index, part) in parts.iter().enumerate() {
let first = index == 0;
let comment = match first {
true => COMMENTHEAD.to_string(),
true => format!("{} {}\n{}", COMMENTHEAD, HEADER, COMMENTBODY),
false => COMMENTBODY.to_string(),
};
let padded_part = format!("{} {}\n", comment, part);
@@ -99,3 +110,12 @@ pub fn pad_comment(s: &str) -> String {
padded.push_str(COMMENTTAIL);
padded
}
pub fn find_settings_block(file_content: &str) -> Option<String> {
let delimiter_length = SETTINGSDELIMITER.len();
let start_pos = file_content.find(SETTINGSDELIMITER)?;
let end_pos = file_content[start_pos + delimiter_length..].find(SETTINGSDELIMITER)?;
let content = &file_content[start_pos + delimiter_length..start_pos + end_pos];
let content = content.replace(&format!("\n{COMMENTBODY}"), "\n");
Some(content)
}

View File

@@ -1,12 +1,18 @@
pub mod comment;
use std::path::PathBuf;
use crate::generator::file::InsertPoint;
use super::{
discovery::DiscoveredSchema, models::ModelsConfig, sea_orm::SeaOrmConfig, Module,
ModulesContext,
};
use color_eyre::Result;
use comment_parser::{CommentParser, Event};
use minijinja::Environment;
use serde::{Deserialize, Serialize};
use tokio::fs;
const HEADER: &str = r#"== Schema Information"#;
const COMMENTHEAD: &str = r#"/*"#;
const COMMENTBODY: &str = r#" *"#;
@@ -23,6 +29,8 @@ pub struct AnnotateConfig {
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct AnnotateCommentConfig {
#[serde(skip_serializing_if = "Option::is_none")]
pub enable: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub max_wdith: Option<u16>,
#[serde(skip_serializing_if = "Option::is_none")]
@@ -42,6 +50,7 @@ pub struct AnnotateCommentConfig {
impl Default for AnnotateCommentConfig {
fn default() -> Self {
Self {
enable: Some(true),
max_wdith: Some(80),
table_name: Some(true),
column_name: Some(true),
@@ -74,6 +83,7 @@ impl Module for AnnotateModule {
async fn execute(&mut self, ctx: &mut ModulesContext) -> Result<()> {
let map = ctx.get_anymap();
let file_manager = ctx.get_file_manager();
let mut file_chunks: Vec<(PathBuf, String, Option<InsertPoint>)> = Vec::new();
if let (Some(config), Some(environment), Some(schema)) = (
map.get::<AnnotateConfig>(),
map.get::<Environment<'static>>(),
@@ -95,13 +105,67 @@ impl Module for AnnotateModule {
environment,
&schema.database_type,
&sea_orm_config.entity.date_time_crate,
);
None,
)?;
file_chunks.push((path, comment, Some(InsertPoint::Start)))
} else {
// file must already exist therefor read and process it
let content = fs::read_to_string(&path).await?;
let rules = comment_parser::get_syntax("rust").unwrap();
let parser = CommentParser::new(&content, rules);
for comment in parser {
if let Event::BlockComment(body, _) = comment {
if body.contains(HEADER) {
tracing::debug!("Found header");
let mut comment_config = config.comment.clone();
let mut file_comment_config = None;
if let Some(parsed_settings) =
comment::find_settings_block(&content)
{
match toml::from_str::<AnnotateCommentConfig>(
&parsed_settings,
) {
Ok(s) => {
file_comment_config = Some(s.clone());
comment_config =
serde_merge::omerge(&config.comment, s)?;
}
Err(e) => {
return Err(e.into());
// if !settings.ignore_errors {
// return Err(e.into());
// }
}
}
}
tracing::info!(?comment_config, ?file_comment_config);
if comment_config.enable.unwrap() {
let comment = comment::generate_comment(
table,
&comment_config,
environment,
&schema.database_type,
&sea_orm_config.entity.date_time_crate,
file_comment_config,
)?;
file_chunks.push((path.clone(), content.clone(), None));
file_chunks.push((
path,
comment,
Some(InsertPoint::Replace(body.to_string())),
));
break;
}
}
}
}
}
}
}
}
let file_manager = ctx.get_file_manager_mut();
for file in file_chunks {
file_manager.insert(&file.0, &file.1, file.2)?;
}
Ok(())
} else {
Ok(())

View File

@@ -1,7 +1,7 @@
use color_eyre::{eyre::ContextCompat, Result};
use heck::ToUpperCamelCase;
use sea_schema::sea_query::{ColumnDef, ColumnSpec, ColumnType, IndexCreateStatement};
use serde::{Deserialize, Serialize};
use serde::Serialize;
use crate::generator::modules::sea_orm::config::DateTimeCrate;
@@ -37,30 +37,18 @@ impl Column {
attrs: attrs.to_vec(),
})
}
// pub fn get_info_row(&self, config: &ModelConfig) -> Result<Vec<Cell>> {
// let column_type_rust = self.get_rs_type(&config.comment.date_time_crate);
// let column_type = self.get_db_type(&config.db_type);
// let attrs = self.attrs_to_string();
// let mut cols = Vec::new();
// if config.comment.column_name {
// cols.push(Cell::new(self.name.clone()))
// }
// if config.comment.column_name {
// cols.push(Cell::new(column_type.clone()))
// }
// if config.comment.column_rust_type {
// cols.push(Cell::new(column_type_rust.clone()))
// }
// if config.comment.column_attributes {
// cols.push(Cell::new(attrs.clone()));
// }
// Ok(cols)
// }
pub fn attrs_to_string(&self) -> String {
#[allow(clippy::type_complexity)]
pub fn attrs_to_string(&self, filter: Option<Box<dyn Fn(&String) -> bool>>) -> String {
self.attrs
.iter()
.filter_map(Self::get_addr_type)
.map(|s| s.to_string())
.filter(|s| {
if let Some(filter) = &filter {
return filter(s);
};
true
})
.collect::<Vec<String>>()
.join(", ")
}

View File

@@ -1,7 +1,7 @@
use super::column::Column;
use color_eyre::{eyre::eyre, Result};
use sea_schema::sea_query::{self, TableCreateStatement};
use serde::{Deserialize, Serialize};
use serde::Serialize;
#[derive(Debug, Clone, Serialize)]
pub struct Table {

View File

@@ -109,7 +109,7 @@ impl ModuleManager {
Box::new(DiscoveryModule),
Box::new(SeaOrmModule),
Box::new(ModelsModule),
Box::new(AnnotateModule::default()),
Box::new(AnnotateModule),
];
Self {
modules,

View File

@@ -1,236 +0,0 @@
use color_eyre::{eyre::ContextCompat, Result};
use comfy_table::Cell;
use heck::ToUpperCamelCase;
use sea_schema::sea_query::{ColumnDef, ColumnSpec, ColumnType, IndexCreateStatement};
use super::{discover::DbType, ModelConfig};
use crate::config::sea_orm_config::DateTimeCrate;
#[derive(Clone, Debug)]
pub struct Column {
pub name: String,
pub col_type: ColumnType,
pub attrs: Vec<ColumnSpec>,
}
impl Column {
pub fn new(column: ColumnDef, index: Option<IndexCreateStatement>) -> Result<Self> {
let name = column.get_column_name();
let col_type = column
.get_column_type()
.context("Unable to get column type")?
.clone();
let mut attrs = column.get_column_spec().clone();
if let Some(index) = index {
if index.is_unique_key() {
attrs.push(ColumnSpec::UniqueKey)
}
if index.is_primary_key() {
attrs.push(ColumnSpec::PrimaryKey);
}
}
Ok(Column {
name: name.to_string(),
col_type,
attrs: attrs.to_vec(),
})
}
pub fn get_info_row(&self, config: &ModelConfig) -> Result<Vec<Cell>> {
let column_type_rust = self.get_rs_type(&config.comment.date_time_crate);
let column_type = self.get_db_type(&config.db_type);
let attrs = self.attrs_to_string();
let mut cols = Vec::new();
if config.comment.column_name {
cols.push(Cell::new(self.name.clone()))
}
if config.comment.column_name {
cols.push(Cell::new(column_type.clone()))
}
if config.comment.column_rust_type {
cols.push(Cell::new(column_type_rust.clone()))
}
if config.comment.column_attributes {
cols.push(Cell::new(attrs.clone()));
}
Ok(cols)
}
pub fn attrs_to_string(&self) -> String {
self.attrs
.iter()
.filter_map(Self::get_addr_type)
.map(|s| s.to_string())
.collect::<Vec<String>>()
.join(", ")
}
pub fn get_addr_type(attr: &ColumnSpec) -> Option<String> {
match attr {
ColumnSpec::PrimaryKey => Some("primary key".to_owned()),
ColumnSpec::Null => unimplemented!(),
ColumnSpec::NotNull => Some("not null".to_owned()),
ColumnSpec::Default(_) => unimplemented!(),
ColumnSpec::AutoIncrement => Some("autoincrement".to_owned()),
ColumnSpec::UniqueKey => Some("unique key".to_owned()),
ColumnSpec::Check(_) => unimplemented!(),
ColumnSpec::Generated { .. } => unimplemented!(),
ColumnSpec::Extra(_) => unimplemented!(),
ColumnSpec::Comment(_) => unimplemented!(),
ColumnSpec::Using(_) => unimplemented!(),
}
}
pub fn get_db_type(&self, db_type: &DbType) -> String {
fn write_db_type(col_type: &ColumnType, db_type: &DbType) -> String {
#[allow(unreachable_patterns)]
match (col_type, db_type) {
(ColumnType::Char(_), _) => "char".to_owned(),
(ColumnType::String(_), _) => "varchar".to_owned(),
(ColumnType::Text, _) => "text".to_owned(),
(ColumnType::TinyInteger, DbType::MySql | DbType::Sqlite) => "tinyint".to_owned(),
(ColumnType::TinyInteger, DbType::Postgres) => "smallint".to_owned(),
(ColumnType::SmallInteger, _) => "smallint".to_owned(),
(ColumnType::Integer, DbType::MySql) => "int".to_owned(),
(ColumnType::Integer, _) => "integer".to_owned(),
(ColumnType::BigInteger, DbType::MySql | DbType::Postgres) => "bigint".to_owned(),
(ColumnType::BigInteger, DbType::Sqlite) => "integer".to_owned(),
(ColumnType::TinyUnsigned, DbType::MySql) => "tinyint unsigned".to_owned(),
(ColumnType::TinyUnsigned, DbType::Postgres) => "smallint".to_owned(),
(ColumnType::TinyUnsigned, DbType::Sqlite) => "tinyint".to_owned(),
(ColumnType::SmallUnsigned, DbType::MySql) => "smallint unsigned".to_owned(),
(ColumnType::SmallUnsigned, DbType::Postgres | DbType::Sqlite) => {
"smallint".to_owned()
}
(ColumnType::Unsigned, DbType::MySql) => "int unsigned".to_owned(),
(ColumnType::Unsigned, DbType::Postgres | DbType::Sqlite) => "integer".to_owned(),
(ColumnType::BigUnsigned, DbType::MySql) => "bigint unsigned".to_owned(),
(ColumnType::BigUnsigned, DbType::Postgres) => "bigint".to_owned(),
(ColumnType::BigUnsigned, DbType::Sqlite) => "integer".to_owned(),
(ColumnType::Float, DbType::MySql | DbType::Sqlite) => "float".to_owned(),
(ColumnType::Float, DbType::Postgres) => "real".to_owned(),
(ColumnType::Double, DbType::MySql | DbType::Sqlite) => "double".to_owned(),
(ColumnType::Double, DbType::Postgres) => "double precision".to_owned(),
(ColumnType::Decimal(_), DbType::MySql | DbType::Postgres) => "decimal".to_owned(),
(ColumnType::Decimal(_), DbType::Sqlite) => "real".to_owned(),
(ColumnType::DateTime, DbType::MySql) => "datetime".to_owned(),
(ColumnType::DateTime, DbType::Postgres) => "timestamp w/o tz".to_owned(),
(ColumnType::DateTime, DbType::Sqlite) => "datetime_text".to_owned(),
(ColumnType::Timestamp, DbType::MySql | DbType::Postgres) => "timestamp".to_owned(),
(ColumnType::Timestamp, DbType::Sqlite) => "timestamp_text".to_owned(),
(ColumnType::TimestampWithTimeZone, DbType::MySql) => "timestamp".to_owned(),
(ColumnType::TimestampWithTimeZone, DbType::Postgres) => {
"timestamp w tz".to_owned()
}
(ColumnType::TimestampWithTimeZone, DbType::Sqlite) => {
"timestamp_with_timezone_text".to_owned()
}
(ColumnType::Time, DbType::MySql | DbType::Postgres) => "time".to_owned(),
(ColumnType::Time, DbType::Sqlite) => "time_text".to_owned(),
(ColumnType::Date, DbType::MySql | DbType::Postgres) => "date".to_owned(),
(ColumnType::Date, DbType::Sqlite) => "date_text".to_owned(),
(ColumnType::Year, DbType::MySql) => "year".to_owned(),
(ColumnType::Interval(_, _), DbType::Postgres) => "interval".to_owned(),
(ColumnType::Blob, DbType::MySql | DbType::Sqlite) => "blob".to_owned(),
(ColumnType::Blob, DbType::Postgres) => "bytea".to_owned(),
(ColumnType::Binary(_), DbType::MySql) => "binary".to_owned(),
(ColumnType::Binary(_), DbType::Postgres) => "bytea".to_owned(),
(ColumnType::Binary(_), DbType::Sqlite) => "blob".to_owned(),
(ColumnType::VarBinary(_), DbType::MySql) => "varbinary".to_owned(),
(ColumnType::VarBinary(_), DbType::Postgres) => "bytea".to_owned(),
(ColumnType::VarBinary(_), DbType::Sqlite) => "varbinary_blob".to_owned(),
(ColumnType::Bit(_), DbType::MySql | DbType::Postgres) => "bit".to_owned(),
(ColumnType::VarBit(_), DbType::MySql) => "bit".to_owned(),
(ColumnType::VarBit(_), DbType::Postgres) => "varbit".to_owned(),
(ColumnType::Boolean, DbType::MySql | DbType::Postgres) => "bool".to_owned(),
(ColumnType::Boolean, DbType::Sqlite) => "boolean".to_owned(),
(ColumnType::Money(_), DbType::MySql) => "decimal".to_owned(),
(ColumnType::Money(_), DbType::Postgres) => "money".to_owned(),
(ColumnType::Money(_), DbType::Sqlite) => "real_money".to_owned(),
(ColumnType::Json, DbType::MySql | DbType::Postgres) => "json".to_owned(),
(ColumnType::Json, DbType::Sqlite) => "json_text".to_owned(),
(ColumnType::JsonBinary, DbType::MySql) => "json".to_owned(),
(ColumnType::JsonBinary, DbType::Postgres) => "jsonb".to_owned(),
(ColumnType::JsonBinary, DbType::Sqlite) => "jsonb_text".to_owned(),
(ColumnType::Uuid, DbType::MySql) => "binary(16)".to_owned(),
(ColumnType::Uuid, DbType::Postgres) => "uuid".to_owned(),
(ColumnType::Uuid, DbType::Sqlite) => "uuid_text".to_owned(),
(ColumnType::Enum { name, .. }, DbType::MySql) => {
format!("ENUM({})", name.to_string().to_upper_camel_case())
}
(ColumnType::Enum { name, .. }, DbType::Postgres) => {
name.to_string().to_uppercase()
}
(ColumnType::Enum { .. }, DbType::Sqlite) => "enum_text".to_owned(),
(ColumnType::Array(column_type), DbType::Postgres) => {
format!("{}[]", write_db_type(column_type, db_type)).to_uppercase()
}
(ColumnType::Vector(_), DbType::Postgres) => "vector".to_owned(),
(ColumnType::Cidr, DbType::Postgres) => "cidr".to_owned(),
(ColumnType::Inet, DbType::Postgres) => "inet".to_owned(),
(ColumnType::MacAddr, DbType::Postgres) => "macaddr".to_owned(),
(ColumnType::LTree, DbType::Postgres) => "ltree".to_owned(),
_ => unimplemented!(),
}
}
write_db_type(&self.col_type, db_type)
}
pub fn get_rs_type(&self, date_time_crate: &DateTimeCrate) -> String {
fn write_rs_type(col_type: &ColumnType, date_time_crate: &DateTimeCrate) -> String {
#[allow(unreachable_patterns)]
match col_type {
ColumnType::Char(_)
| ColumnType::String(_)
| ColumnType::Text
| ColumnType::Custom(_) => "String".to_owned(),
ColumnType::TinyInteger => "i8".to_owned(),
ColumnType::SmallInteger => "i16".to_owned(),
ColumnType::Integer => "i32".to_owned(),
ColumnType::BigInteger => "i64".to_owned(),
ColumnType::TinyUnsigned => "u8".to_owned(),
ColumnType::SmallUnsigned => "u16".to_owned(),
ColumnType::Unsigned => "u32".to_owned(),
ColumnType::BigUnsigned => "u64".to_owned(),
ColumnType::Float => "f32".to_owned(),
ColumnType::Double => "f64".to_owned(),
ColumnType::Json | ColumnType::JsonBinary => "Json".to_owned(),
ColumnType::Date => match date_time_crate {
DateTimeCrate::Chrono => "Date".to_owned(),
DateTimeCrate::Time => "TimeDate".to_owned(),
},
ColumnType::Time => match date_time_crate {
DateTimeCrate::Chrono => "Time".to_owned(),
DateTimeCrate::Time => "TimeTime".to_owned(),
},
ColumnType::DateTime => match date_time_crate {
DateTimeCrate::Chrono => "DateTime".to_owned(),
DateTimeCrate::Time => "TimeDateTime".to_owned(),
},
ColumnType::Timestamp => match date_time_crate {
DateTimeCrate::Chrono => "DateTimeUtc".to_owned(),
DateTimeCrate::Time => "TimeDateTime".to_owned(),
},
ColumnType::TimestampWithTimeZone => match date_time_crate {
DateTimeCrate::Chrono => "DateTimeWithTimeZone".to_owned(),
DateTimeCrate::Time => "TimeDateTimeWithTimeZone".to_owned(),
},
ColumnType::Decimal(_) | ColumnType::Money(_) => "Decimal".to_owned(),
ColumnType::Uuid => "Uuid".to_owned(),
ColumnType::Binary(_) | ColumnType::VarBinary(_) | ColumnType::Blob => {
"Vec<u8>".to_owned()
}
ColumnType::Boolean => "bool".to_owned(),
ColumnType::Enum { name, .. } => name.to_string().to_upper_camel_case(),
ColumnType::Array(column_type) => {
format!("Vec<{}>", write_rs_type(column_type, date_time_crate))
}
ColumnType::Vector(_) => "::pgvector::Vector".to_owned(),
ColumnType::Bit(None | Some(1)) => "bool".to_owned(),
ColumnType::Bit(_) | ColumnType::VarBit(_) => "Vec<u8>".to_owned(),
ColumnType::Year => "i32".to_owned(),
ColumnType::Cidr | ColumnType::Inet => "IpNetwork".to_owned(),
ColumnType::Interval(_, _) | ColumnType::MacAddr | ColumnType::LTree => {
"String".to_owned()
}
_ => unimplemented!(),
}
}
write_rs_type(&self.col_type, date_time_crate)
}
}

View File

@@ -1,147 +0,0 @@
use super::{table::Table, CommentConfig, CommentConfigSerde, ModelConfig};
use color_eyre::Result;
use comfy_table::{ContentArrangement, Table as CTable};
use comment_parser::{CommentParser, Event};
const HEADER: &str = r#"== Schema Information"#;
const COMMENTHEAD: &str = r#"/*"#;
const COMMENTBODY: &str = r#" *"#;
const COMMENTTAIL: &str = r#"*/"#;
const SETTINGSDELIMITER: &str = r#"```"#;
pub struct ModelCommentGenerator {}
impl ModelCommentGenerator {
pub fn find_settings_block(file_content: &str) -> Option<String> {
let delimiter_length = SETTINGSDELIMITER.len();
let start_pos = file_content.find(SETTINGSDELIMITER)?;
let end_pos = file_content[start_pos + delimiter_length..].find(SETTINGSDELIMITER)?;
let content = &file_content[start_pos + delimiter_length..start_pos + end_pos];
let content = content.replace(&format!("\n{COMMENTBODY}"), "\n");
Some(content)
}
pub fn generate_comment(
table: Table,
file_content: &str,
config: &ModelConfig,
) -> Result<String> {
let rules = comment_parser::get_syntax("rust").unwrap();
let parser = CommentParser::new(file_content, rules);
for comment in parser {
if let Event::BlockComment(body, _) = comment {
if body.contains(HEADER) {
tracing::debug!("Found header");
let mut settings = config.comment.clone();
let mut new_settings = None;
if let Some(parsed_settings) = Self::find_settings_block(file_content) {
tracing::info!(?new_settings);
match serde_yaml::from_str::<CommentConfigSerde>(&parsed_settings) {
Ok(s) => {
new_settings = Some(s.clone());
settings = s.merge(&settings);
tracing::info!(?settings);
}
Err(e) => {
if !settings.ignore_errors {
return Err(e.into());
}
}
}
}
tracing::debug!(?table.name, ?settings);
if settings.enable {
let comment =
Self::generate_comment_content(table, config, &settings, new_settings)?;
return Ok(file_content.replace(body, &comment));
}
}
}
}
let comment = Self::generate_comment_content(table, config, &config.comment, None)?;
Ok(format!("{}\n{}", comment, file_content))
}
pub fn generate_comment_content(
table: Table,
model_config: &ModelConfig,
config: &CommentConfig,
parsed_settings: Option<CommentConfigSerde>,
) -> Result<String> {
let mut model_config = model_config.clone();
model_config.comment = config.clone();
let column_info_table = if config.column_info {
let mut column_info_table = CTable::new();
let mut header = Vec::new();
if config.column_name {
header.push("Name");
}
if config.column_db_type {
header.push("DbType");
}
if config.column_rust_type {
header.push("RsType");
}
if config.column_attributes {
header.push("Attrs");
}
column_info_table
.load_preset(" -+=++ + ++")
.set_content_arrangement(ContentArrangement::Dynamic)
.set_header(header);
if let Some(width) = config.max_width {
column_info_table.set_width(width);
}
for column in &table.columns {
column_info_table.add_row(column.get_info_row(&model_config)?);
}
column_info_table.to_string()
} else {
String::new()
};
let config_part = match parsed_settings {
Some(settings) => {
let settings_str = serde_yaml::to_string(&settings)?;
let settings_str = settings_str
.lines()
.map(|line| format!(" {}", line))
.collect::<Vec<_>>()
.join("\n");
format!(
"{SETTINGSDELIMITER}\n{}\n{SETTINGSDELIMITER}\n\n",
settings_str
)
}
None => String::new(),
};
let table_name = &table.name;
let table_name_str = if config.table_name {
format!("Table: {}\n", table_name)
} else {
String::new()
};
let string = format!("{HEADER}\n{config_part}{table_name_str}\n{column_info_table}");
let padded_string = Self::pad_comment(&string);
Ok(padded_string)
}
pub fn pad_comment(s: &str) -> String {
let parts = s.split('\n').collect::<Vec<_>>();
let mut padded = String::new();
for (index, part) in parts.iter().enumerate() {
let first = index == 0;
let comment = match first {
true => COMMENTHEAD.to_string(),
false => COMMENTBODY.to_string(),
};
let padded_part = format!("{} {}\n", comment, part);
padded.push_str(&padded_part);
}
padded.push_str(COMMENTTAIL);
padded
}
// pub async fn generate_header(&self, config: &Config, db_type: &DbType) -> Result<String> {
//
// }
}

View File

@@ -1,95 +0,0 @@
use std::path::PathBuf;
use crate::generator::file::GeneratedFileChunk;
use super::{comment::ModelCommentGenerator, table::Table, ModelConfig};
use color_eyre::Result;
use handlebars::Handlebars;
use heck::ToPascalCase;
use serde::Serialize;
use tokio::fs;
#[derive(Debug, Clone)]
pub struct FileGenerator;
#[derive(Debug, Clone, Serialize)]
pub struct ModelContext {
entities_path: String,
model_path: String,
model_name: String,
active_model_name: String,
prelude_path: Option<String>,
}
impl FileGenerator {
pub async fn generate_file<'a>(
table: Table,
config: &ModelConfig,
handlebars: &'a Handlebars<'a>,
) -> Result<Vec<GeneratedFileChunk>> {
let mut file_chunks = Vec::new();
file_chunks.push(GeneratedFileChunk {
path: config.models_path.join("mod.rs"),
content: format!("pub mod {};", table.name),
priority: 0,
});
let filepath = config.models_path.join(format!("{}.rs", table.name));
tracing::debug!(?filepath, "Generating file");
if filepath.exists() {
file_chunks
.extend(Self::handle_existing_file(table, &filepath, config, handlebars).await?);
} else {
let model_name = format!("{}Model", table.name).to_pascal_case();
let active_model_name = format!("{}ActiveModel", table.name).to_pascal_case();
let context = ModelContext {
entities_path: format!("super::{}", config.relative_entities_path),
model_path: table.name.clone(),
model_name,
active_model_name,
prelude_path: Some("super::prelude".to_string()),
};
tracing::info!(?context, "Generating new file",);
let content = if config.prelude {
file_chunks.push(GeneratedFileChunk {
path: config.models_path.join("prelude.rs"),
content: format!(
"pub use super::{}::{}::{{Model as {},ActiveModel as {},Entity as {}}};",
config.relative_entities_path,
table.name,
context.model_name,
context.active_model_name,
table.name.clone().to_pascal_case()
),
priority: 0,
});
handlebars.render("modelprelude", &context)?
} else {
handlebars.render("model", &context)?
};
file_chunks.push(GeneratedFileChunk {
path: filepath,
content,
priority: 0,
});
}
Ok(file_chunks)
}
async fn handle_existing_file<'a>(
table: Table,
filepath: &PathBuf,
config: &ModelConfig,
_handlebars: &'a Handlebars<'a>,
) -> Result<Vec<GeneratedFileChunk>> {
let mut file_chunks = Vec::new();
let mut file_content = fs::read_to_string(filepath).await?;
if config.comment.enable {
file_content = ModelCommentGenerator::generate_comment(table, &file_content, config)?;
}
file_chunks.push(GeneratedFileChunk {
path: filepath.clone(),
content: file_content,
priority: 0,
});
Ok(file_chunks)
}
}

View File

@@ -12,7 +12,6 @@ use color_eyre::{
eyre::{eyre, Context, ContextCompat},
Result,
};
use heck::ToPascalCase;
use minijinja::Environment;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Deserialize, Serialize)]
@@ -153,11 +152,11 @@ impl Module for ModelsModule {
config.clone(),
);
// files.push((mod_path.clone(), format!("pub mod {};", table.name)));
// if path.exists() {
// tracing::debug!(?path, "Model file already exists");
// continue;
// }
files.push((mod_path.clone(), format!("pub mod {};", table.name)));
if path.exists() {
tracing::debug!(?path, "Model file already exists");
continue;
}
let content = templates
.get_template("model")?
@@ -176,168 +175,3 @@ impl Module for ModelsModule {
Ok(())
}
}
//
//
// use crate::{
// config::{sea_orm_config::DateTimeCrate, Config},
// generator::{
// discover::{self, DbType},
// file::GeneratedFileChunk,
// },
// };
// use color_eyre::Result;
// use file::FileGenerator;
// use handlebars::Handlebars;
// use sea_orm_codegen::{EntityTransformer, EntityWriterContext, OutputFile};
// use sea_schema::sea_query::TableCreateStatement;
// use serde::{Deserialize, Serialize};
// use std::path::PathBuf;
// use table::Table;
//
// pub mod column;
// pub mod comment;
// pub mod file;
// pub mod table;
// #[derive(Debug, Clone)]
// pub struct ModelConfig {
// pub models_path: PathBuf,
// pub prelude: bool,
// pub entities_path: PathBuf,
// pub relative_entities_path: String,
// pub enable: bool,
// pub comment: CommentConfig,
// pub db_type: DbType,
// }
// #[derive(Debug, Clone)]
// pub struct CommentConfig {
// pub max_width: Option<u16>,
// pub enable: bool,
// pub table_name: bool,
// pub column_info: bool,
// pub column_name: bool,
// pub column_rust_type: bool,
// pub column_db_type: bool,
// pub column_attributes: bool,
// pub ignore_errors: bool,
// pub date_time_crate: DateTimeCrate,
// }
// #[derive(Debug, Clone, Serialize, Deserialize)]
// pub struct CommentConfigSerde {
// #[serde(skip_serializing_if = "Option::is_none")]
// pub max_width: Option<u16>,
// #[serde(skip_serializing_if = "Option::is_none")]
// pub enable: Option<bool>,
// #[serde(skip_serializing_if = "Option::is_none")]
// pub table_name: Option<bool>,
// #[serde(skip_serializing_if = "Option::is_none")]
// pub name: Option<bool>,
// #[serde(skip_serializing_if = "Option::is_none")]
// pub info: Option<bool>,
// #[serde(skip_serializing_if = "Option::is_none")]
// pub rust_type: Option<bool>,
// #[serde(skip_serializing_if = "Option::is_none")]
// pub db_type: Option<bool>,
// #[serde(skip_serializing_if = "Option::is_none")]
// pub attributes: Option<bool>,
// }
// impl CommentConfigSerde {
// pub fn merge(&self, config: &CommentConfig) -> CommentConfig {
// CommentConfig {
// max_width: self.max_width.or(config.max_width),
// table_name: self.table_name.unwrap_or(config.table_name),
// column_name: self.name.unwrap_or(config.column_name),
// column_info: self.info.unwrap_or(config.column_info),
// column_rust_type: self.rust_type.unwrap_or(config.column_rust_type),
// column_db_type: self.db_type.unwrap_or(config.column_db_type),
// column_attributes: self.attributes.unwrap_or(config.column_attributes),
// ignore_errors: config.ignore_errors,
// enable: self.enable.unwrap_or(config.enable),
// date_time_crate: config.date_time_crate.clone(),
// }
// }
// }
//
// impl ModelConfig {
// pub fn new(config: Config, db_type: DbType) -> Self {
// let models_path = config.output.path.join(&config.output.models.path);
// let entities_path = models_path.join(&config.output.models.entities);
// ModelConfig {
// db_type,
// prelude: config.output.models.prelude,
// entities_path,
// models_path,
// relative_entities_path: config.output.models.entities.clone(),
// enable: config.output.models.enable,
// comment: CommentConfig {
// max_width: config.output.models.comment.max_width,
// enable: config.output.models.comment.enable,
// table_name: config.output.models.comment.table_name,
// column_name: config.output.models.comment.column_name,
// column_info: config.output.models.comment.column_info,
// column_rust_type: config.output.models.comment.column_rust_type,
// column_db_type: config.output.models.comment.column_db_type,
// column_attributes: config.output.models.comment.column_attributes,
// ignore_errors: config.output.models.comment.ignore_errors,
// date_time_crate: config.sea_orm.entity.date_time_crate,
// },
// }
// }
// }
//
// pub async fn generate_models<'a>(
// database_url: &str,
// config: &Config,
// handlebars: &'a Handlebars<'a>,
// ) -> Result<Vec<GeneratedFileChunk>> {
// let mut files = Vec::new();
// let db_filter = config.sea_orm.entity.tables.get_filter();
// let (table_stmts, db_type) =
// discover::get_tables(database_url.to_owned(), db_filter, &config.db).await?;
// let model_config = ModelConfig::new(config.clone(), db_type);
//
// let writer_context = config.clone().into();
// files.extend(
// generate_entities(table_stmts.clone(), model_config.clone(), writer_context).await?,
// );
//
// files.push(GeneratedFileChunk {
// path: model_config.models_path.join("mod.rs"),
// content: format!("pub mod {};", model_config.relative_entities_path),
// priority: 0,
// });
// let tables = table_stmts
// .into_iter()
// .map(Table::new)
// .collect::<Result<Vec<Table>>>()?;
//
// if model_config.enable {
// for table in tables {
// files.extend(FileGenerator::generate_file(table, &model_config, handlebars).await?);
// }
// if model_config.prelude {
// files.push(GeneratedFileChunk {
// path: model_config.models_path.join("mod.rs"),
// content: String::from("pub mod prelude;"),
// priority: 0,
// })
// }
// }
// Ok(files)
// }
//
// pub async fn generate_entities(
// table_statements: Vec<TableCreateStatement>,
// config: ModelConfig,
// writer_context: EntityWriterContext,
// ) -> Result<Vec<GeneratedFileChunk>> {
// let output = EntityTransformer::transform(table_statements)?.generate(&writer_context);
// Ok(output
// .files
// .into_iter()
// .map(|OutputFile { name, content }| GeneratedFileChunk {
// path: config.entities_path.join(name),
// content,
// priority: 0,
// })
// .collect::<Vec<_>>())
// }

View File

@@ -1,50 +0,0 @@
use super::column::Column;
use color_eyre::{eyre::eyre, Result};
use sea_schema::sea_query::{self, TableCreateStatement};
#[derive(Debug, Clone)]
pub struct Table {
pub name: String,
pub columns: Vec<Column>,
}
impl Table {
pub fn new(statement: TableCreateStatement) -> Result<Table> {
let table_name = match statement.get_table_name() {
Some(table_ref) => match table_ref {
sea_query::TableRef::Table(t)
| sea_query::TableRef::SchemaTable(_, t)
| sea_query::TableRef::DatabaseSchemaTable(_, _, t)
| sea_query::TableRef::TableAlias(t, _)
| sea_query::TableRef::SchemaTableAlias(_, t, _)
| sea_query::TableRef::DatabaseSchemaTableAlias(_, _, t, _) => t.to_string(),
_ => unimplemented!(),
},
None => return Err(eyre!("Table name not found")),
};
tracing::debug!(?table_name);
let columns_raw = statement.get_columns();
let indexes = statement.get_indexes();
for column in columns_raw {
tracing::debug!(?column);
}
for index in indexes {
tracing::debug!(?index);
}
let columns = columns_raw
.iter()
.map(|column| {
let name = column.get_column_name();
let index = indexes
.iter()
.find(|index| index.get_index_spec().get_column_names().contains(&name));
Column::new(column.clone(), index.cloned())
})
.collect::<Result<Vec<Column>>>()?;
tracing::debug!(?columns);
Ok(Table {
columns,
name: table_name,
})
}
}

View File

@@ -0,0 +1,9 @@
{% if comment_config %}{{config_delimiter}}
{{comment_config}}
{{config_delimiter}}{% endif %}
Table: {{table_name}}
Columns:
{{column_info_table}}

View File

@@ -1,9 +0,0 @@
use {{entities_path}}::{{model_path}}::{ActiveModel, Model, Entity};
use sea_orm::ActiveModelBehavior;
#[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {}
impl Model {}
impl ActiveModel {}

View File

@@ -1,9 +0,0 @@
use {{prelude_path}}::*;
use sea_orm::ActiveModelBehavior;
#[async_trait::async_trait]
impl ActiveModelBehavior for {{active_model_name}} {}
impl {{model_name}} {}
impl {{active_model_name}} {}

View File

@@ -1,3 +0,0 @@
{{#each tables}}
pub use {{entities_path}}::{{model_name}}::{ActiveModel as {{active_model_name}}, Model as {{model_name}}, Entity as {{entity_name}}};
{{/each}}

View File

@@ -6,3 +6,4 @@ pub mod _entities;
pub mod user;
pub mod user;

View File

@@ -1,10 +1,20 @@
/* == Schema Information
* ```
* column_db_type = false
* ```
*
* Table: user
*
* Columns:
*
* Name RsType Attrs
* +==========+========+======================================+
* id i32 autoincrement, not null, primary key
* username String not null, unique key
* email String not null, unique key
* password String not null
* test String not null, unique key
* +----------+--------+--------------------------------------+
*/
use super::prelude::*;
use sea_orm::ActiveModelBehavior;
#[async_trait::async_trait]
impl ActiveModelBehavior for UserActiveModel {}
impl UserModel {}
impl UserActiveModel {}
sdsfsdfsdfsdf