rewrite generator

This commit is contained in:
2025-04-03 21:28:46 +04:00
parent 556ae626ae
commit 8805072573
10 changed files with 569 additions and 359 deletions

View File

@@ -8,7 +8,9 @@ clap = { version = "4.5.32", features = ["derive", "env"] }
color-eyre = "0.6.3"
comment-parser = "0.1.0"
figment = { version = "0.10.19", features = ["yaml"] }
handlebars = "6.3.2"
heck = "0.5.0"
include_dir = "0.7.4"
indicatif = "0.17.11"
inquire = "0.7.5"
prettytable = "0.10.0"
@@ -16,6 +18,7 @@ quote = "1.0.40"
sea-orm-codegen = "1.1.8"
sea-schema = { version = "0.16.1", features = ["sqlx-all"] }
serde = { version = "1.0.219", features = ["derive"] }
serde_json = "1.0.140"
serde_yaml = "0.9.34"
sqlx = { version = "0.8.3", features = ["mysql", "postgres", "sqlite"] }
syn = { version = "2.0.100", features = ["extra-traits", "full"] }

View File

@@ -104,6 +104,7 @@ pub struct Config {
pub db: DbConfig,
pub sea_orm: SeaOrmConfig,
pub output: OutputConfig,
pub templates: TemplateConfig,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
@@ -167,6 +168,10 @@ pub enum DateTimeCrate {
Time,
Chrono,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct TemplateConfig {
pub model: Option<String>,
}
impl From<DateTimeCrate> for CodegenDateTimeCrate {
fn from(date_time_crate: DateTimeCrate) -> CodegenDateTimeCrate {
@@ -243,6 +248,7 @@ impl Default for Config {
output: OutputConfig {
path: PathBuf::from("./entities"),
},
templates: TemplateConfig { model: None },
}
}
}

View File

@@ -1,350 +0,0 @@
use core::time;
const HEADER: &str = r#"== Schema Information"#;
const COMMENTHEAD: &str = r#"/*"#;
const COMMENTBODY: &str = r#" *"#;
const COMMENTTAIL: &str = r#"*/"#;
use crate::{config::DateTimeCrate, Config};
use color_eyre::{
eyre::{eyre, ContextCompat, Report},
Result,
};
use comment_parser::{CommentParser, Event};
use prettytable::{format, row, Table};
use sea_orm_codegen::OutputFile;
use sea_schema::sea_query::{self, ColumnSpec, ColumnType, StringLen, TableCreateStatement};
use tokio::{fs, task::JoinSet};
use url::Url;
pub async fn get_tables(
database_url: String,
config: &Config,
) -> Result<(Option<String>, Vec<TableCreateStatement>)> {
let url = Url::parse(&database_url)?;
tracing::trace!(?url);
let is_sqlite = url.scheme() == "sqlite";
let filter_tables = config.sea_orm.entity.tables.get_filter();
// let filter_tables = |table: &String| -> bool {
// config.sea_orm.entity.table.only.is_empty() || config.sea_orm.table.only.contains(table)
// };
//
// let filter_hidden_tables = |table: &str| -> bool {
// if false {
// true
// } else {
// !table.starts_with('_')
// }
// };
//
// let filter_skip_tables =
// |table: &String| -> bool { !config.sea_orm.table.exclude.contains(table) };
let database_name: &str = (if !is_sqlite {
let database_name = url
.path_segments()
.context("No database name as part of path")?
.next()
.context("No database name as part of path")?;
if database_name.is_empty() {
return Err(eyre!("Database path name is empty"));
}
Ok::<&str, Report>(database_name)
} else {
Ok(Default::default())
})?;
let (schema_name, table_stmts) = match url.scheme() {
"mysql" => {
use sea_schema::mysql::discovery::SchemaDiscovery;
use sqlx::MySql;
tracing::info!("Connecting to MySQL");
let connection = sqlx_connect::<MySql>(
config.db.max_connections,
config.db.acquire_timeout,
url.as_str(),
None,
)
.await?;
tracing::info!("Discovering schema");
let schema_discovery = SchemaDiscovery::new(connection, database_name);
let schema = schema_discovery.discover().await?;
let table_stmts = schema
.tables
.into_iter()
.filter(|schema| filter_tables(&schema.info.name))
// .filter(|schema| filter_hidden_tables(&schema.info.name))
// .filter(|schema| filter_skip_tables(&schema.info.name))
.map(|schema| schema.write())
.collect();
(None, table_stmts)
}
"sqlite" => {
use sea_schema::sqlite::discovery::SchemaDiscovery;
use sqlx::Sqlite;
tracing::info!("Connecting to SQLite");
let connection = sqlx_connect::<Sqlite>(
config.db.max_connections,
config.db.acquire_timeout,
url.as_str(),
None,
)
.await?;
tracing::info!("Discovering schema");
let schema_discovery = SchemaDiscovery::new(connection);
let schema = schema_discovery
.discover()
.await?
.merge_indexes_into_table();
let table_stmts = schema
.tables
.into_iter()
.filter(|schema| filter_tables(&schema.name))
// .filter(|schema| filter_hidden_tables(&schema.name))
// .filter(|schema| filter_skip_tables(&schema.name))
.map(|schema| schema.write())
.collect();
(None, table_stmts)
}
"postgres" | "potgresql" => {
use sea_schema::postgres::discovery::SchemaDiscovery;
use sqlx::Postgres;
tracing::info!("Connecting to Postgres");
let schema = &config.db.database_schema.as_deref().unwrap_or("public");
let connection = sqlx_connect::<Postgres>(
config.db.max_connections,
config.db.acquire_timeout,
url.as_str(),
Some(schema),
)
.await?;
tracing::info!("Discovering schema");
let schema_discovery = SchemaDiscovery::new(connection, schema);
let schema = schema_discovery.discover().await?;
tracing::info!(?schema);
let table_stmts = schema
.tables
.into_iter()
.filter(|schema| filter_tables(&schema.info.name))
// .filter(|schema| filter_hidden_tables(&schema.info.name))
// .filter(|schema| filter_skip_tables(&schema.info.name))
.map(|schema| schema.write())
.collect();
(config.db.database_schema.clone(), table_stmts)
}
_ => unimplemented!("{} is not supported", url.scheme()),
};
tracing::info!("Schema discovered");
Ok((schema_name, table_stmts))
}
async fn sqlx_connect<DB>(
max_connections: u32,
acquire_timeout: u64,
url: &str,
schema: Option<&str>,
) -> Result<sqlx::Pool<DB>>
where
DB: sqlx::Database,
for<'a> &'a mut <DB as sqlx::Database>::Connection: sqlx::Executor<'a>,
{
let mut pool_options = sqlx::pool::PoolOptions::<DB>::new()
.max_connections(max_connections)
.acquire_timeout(time::Duration::from_secs(acquire_timeout));
// Set search_path for Postgres, E.g. Some("public") by default
// MySQL & SQLite connection initialize with schema `None`
if let Some(schema) = schema {
let sql = format!("SET search_path = '{schema}'");
pool_options = pool_options.after_connect(move |conn, _| {
let sql = sql.clone();
Box::pin(async move {
sqlx::Executor::execute(conn, sql.as_str())
.await
.map(|_| ())
})
});
}
pool_options.connect(url).await.map_err(Into::into)
}
pub async fn generate_models(
tables: Vec<TableCreateStatement>,
config: Config,
) -> Result<Vec<OutputFile>> {
tracing::debug!(?tables);
let output_path = config.output.path.clone();
let files = tables
.into_iter()
.map(|table| {
let output_path = output_path.clone();
let config = config.clone();
async move {
let table_name = match table.get_table_name() {
Some(table_ref) => match table_ref {
sea_query::TableRef::Table(t)
| sea_query::TableRef::SchemaTable(_, t)
| sea_query::TableRef::DatabaseSchemaTable(_, _, t)
| sea_query::TableRef::TableAlias(t, _)
| sea_query::TableRef::SchemaTableAlias(_, t, _)
| sea_query::TableRef::DatabaseSchemaTableAlias(_, _, t, _) => {
t.to_string()
}
_ => unimplemented!(),
},
None => return Err(eyre!("Table name not found")),
};
let table_str = generate_table(table, config.clone()).await?;
tracing::debug!(?table_str);
let filename = format!("{}.rs", table_name);
let file_path = output_path.join(&filename);
let exists = file_path.exists();
let content = if exists {
let mut file_content = fs::read_to_string(file_path).await?;
let rules = comment_parser::get_syntax("rust").unwrap();
let comments = CommentParser::new(&file_content, rules);
let mut found = false;
for comment in comments {
if let Event::BlockComment(a, b) = comment {
tracing::debug!(?a, ?b);
if b.contains(HEADER) {
found = true;
file_content = file_content.replace(a, &table_str);
tracing::debug!("Found header");
break;
}
}
}
if found {
Ok::<String, Report>(file_content)
} else {
let merged_content = format!("{}\n\n{}", table_str, file_content);
Ok::<String, Report>(merged_content)
}
} else {
Ok::<String, Report>(table_str)
}?;
Ok(OutputFile {
name: filename,
content,
})
}
})
.collect::<JoinSet<Result<OutputFile>>>();
let files = files
.join_all()
.await
.into_iter()
.collect::<Result<Vec<OutputFile>>>()?;
Ok(files)
}
async fn generate_table(table: TableCreateStatement, config: Config) -> Result<String> {
let mut string = String::new();
string.push_str(format!("{COMMENTHEAD} {HEADER}\n").as_str());
let table_name = match table.get_table_name() {
Some(table_ref) => match table_ref {
sea_query::TableRef::Table(t)
| sea_query::TableRef::SchemaTable(_, t)
| sea_query::TableRef::DatabaseSchemaTable(_, _, t)
| sea_query::TableRef::TableAlias(t, _)
| sea_query::TableRef::SchemaTableAlias(_, t, _)
| sea_query::TableRef::DatabaseSchemaTableAlias(_, _, t, _) => t.to_string(),
_ => unimplemented!(),
},
None => return Err(eyre!("Table name not found")),
};
let mut inner = String::new();
inner.push_str(format!("{COMMENTBODY}\n Table name: {table_name}\n\n\n").as_str());
inner = inner.strip_suffix('\n').unwrap().to_string();
let mut ptable = Table::new();
let format = format::FormatBuilder::new()
.column_separator(' ')
.borders(' ')
.separators(
&[
format::LinePosition::Bottom,
format::LinePosition::Title,
// format::LinePosition::Top,
],
format::LineSeparator::default(),
)
.padding(1, 1)
.build();
ptable.set_format(format);
ptable.set_titles(row!["Name", "Type", "RustType", "Attributes"]);
let indexes = table.get_indexes();
tracing::info!(?indexes);
for column in table.get_columns() {
let name = column.get_column_name();
if let Some(column_type) = column.get_column_type() {
let column_type_rust =
type_to_rust_string(column_type, config.sea_orm.entity.date_time_crate.clone());
let column_type =
type_to_string(column_type, config.sea_orm.entity.date_time_crate.clone());
let attrs = attrs_to_string(column.get_column_spec());
ptable.add_row(row![name, column_type, column_type_rust, attrs]);
}
}
inner.push_str(ptable.to_string().as_str());
string.push_str(
inner
.replace("\n", format!("\n{} ", COMMENTBODY).as_str())
.as_str(),
);
string.push_str(format!("\n{COMMENTTAIL}\n").as_str());
Ok(string)
}
fn type_to_string(column: &ColumnType, date_time_create: DateTimeCrate) -> String {
tracing::debug!(?column, ?date_time_create);
match column {
ColumnType::Char(Some(max)) => format!("Char({}max)", max),
ColumnType::Char(None) => "Char".to_owned(),
ColumnType::String(StringLen::None) => "String".to_owned(),
ColumnType::String(StringLen::Max) => "String(Max)".to_owned(),
ColumnType::String(StringLen::N(len)) => format!("String({}max)", len),
ColumnType::Text => "Text".to_owned(),
ColumnType::Integer => "Integer".to_owned(),
ColumnType::Custom(_) => "String".to_owned(),
_ => unimplemented!(),
}
}
fn type_to_rust_string(column: &ColumnType, date_time_create: DateTimeCrate) -> String {
tracing::debug!(?column, ?date_time_create);
match column {
ColumnType::Char(_) | ColumnType::String(_) | ColumnType::Text | ColumnType::Custom(_) => {
"String".to_owned()
}
ColumnType::Integer => "i32".to_owned(),
_ => unimplemented!(),
}
}
fn attrs_to_string(column: &Vec<ColumnSpec>) -> String {
tracing::debug!(?column);
column
.iter()
.filter_map(|c| match c {
ColumnSpec::PrimaryKey => Some("primary key"),
ColumnSpec::UniqueKey => Some("unique key"),
ColumnSpec::AutoIncrement => Some("autoincrement"),
ColumnSpec::NotNull => Some("not null"),
_ => None,
})
.map(|s| s.to_string())
.collect::<Vec<String>>()
.join(", ")
}
//

32
src/generator/column.rs Normal file
View File

@@ -0,0 +1,32 @@
use color_eyre::{eyre::ContextCompat, Result};
use sea_schema::sea_query::{ColumnDef, ColumnSpec, ColumnType, IndexCreateStatement};
#[derive(Clone, Debug)]
pub struct Column {
pub name: String,
pub col_type: ColumnType,
pub attrs: Vec<ColumnSpec>,
}
impl Column {
pub fn new(column: ColumnDef, index: Option<IndexCreateStatement>) -> Result<Self> {
let name = column.get_column_name();
let col_type = column
.get_column_type()
.context("Unable to get column type")?
.clone();
let mut attrs = column.get_column_spec().clone();
if let Some(index) = index {
if index.is_unique_key() {
attrs.push(ColumnSpec::UniqueKey)
}
if index.is_primary_key() {
attrs.push(ColumnSpec::PrimaryKey);
}
}
Ok(Column {
name: name.to_string(),
col_type,
attrs: attrs.to_vec(),
})
}
}

150
src/generator/discover.rs Normal file
View File

@@ -0,0 +1,150 @@
use core::time;
use color_eyre::eyre::{eyre, Context, ContextCompat, Report, Result};
use sea_schema::sea_query::TableCreateStatement;
use url::Url;
use crate::config::Config;
pub async fn get_tables(
database_url: String,
config: &Config,
) -> Result<(Option<String>, Vec<TableCreateStatement>)> {
let url = Url::parse(&database_url)?;
tracing::trace!(?url);
let is_sqlite = url.scheme() == "sqlite";
let filter_tables = config.sea_orm.entity.tables.get_filter();
let database_name: &str = (if !is_sqlite {
let database_name = url
.path_segments()
.context("No database name as part of path")?
.next()
.context("No database name as part of path")?;
if database_name.is_empty() {
return Err(eyre!("Database path name is empty"));
}
Ok::<&str, Report>(database_name)
} else {
Ok(Default::default())
})?;
let (schema_name, table_stmts) = match url.scheme() {
"mysql" => {
use sea_schema::mysql::discovery::SchemaDiscovery;
use sqlx::MySql;
tracing::info!("Connecting to MySQL");
let connection = sqlx_connect::<MySql>(
config.db.max_connections,
config.db.acquire_timeout,
url.as_str(),
None,
)
.await?;
tracing::info!("Discovering schema");
let schema_discovery = SchemaDiscovery::new(connection, database_name);
let schema = schema_discovery.discover().await?;
let table_stmts = schema
.tables
.into_iter()
.filter(|schema| filter_tables(&schema.info.name))
// .filter(|schema| filter_hidden_tables(&schema.info.name))
// .filter(|schema| filter_skip_tables(&schema.info.name))
.map(|schema| schema.write())
.collect();
(None, table_stmts)
}
"sqlite" => {
use sea_schema::sqlite::discovery::SchemaDiscovery;
use sqlx::Sqlite;
tracing::info!("Connecting to SQLite");
let connection = sqlx_connect::<Sqlite>(
config.db.max_connections,
config.db.acquire_timeout,
url.as_str(),
None,
)
.await?;
tracing::info!("Discovering schema");
let schema_discovery = SchemaDiscovery::new(connection);
let schema = schema_discovery
.discover()
.await?
.merge_indexes_into_table();
let table_stmts = schema
.tables
.into_iter()
.filter(|schema| filter_tables(&schema.name))
// .filter(|schema| filter_hidden_tables(&schema.name))
// .filter(|schema| filter_skip_tables(&schema.name))
.map(|schema| schema.write())
.collect();
(None, table_stmts)
}
"postgres" | "potgresql" => {
use sea_schema::postgres::discovery::SchemaDiscovery;
use sqlx::Postgres;
tracing::info!("Connecting to Postgres");
let schema = &config.db.database_schema.as_deref().unwrap_or("public");
let connection = sqlx_connect::<Postgres>(
config.db.max_connections,
config.db.acquire_timeout,
url.as_str(),
Some(schema),
)
.await?;
tracing::info!("Discovering schema");
let schema_discovery = SchemaDiscovery::new(connection, schema);
let schema = schema_discovery.discover().await?;
tracing::info!(?schema);
let table_stmts = schema
.tables
.into_iter()
.filter(|schema| filter_tables(&schema.info.name))
// .filter(|schema| filter_hidden_tables(&schema.info.name))
// .filter(|schema| filter_skip_tables(&schema.info.name))
.map(|schema| schema.write())
.collect();
(config.db.database_schema.clone(), table_stmts)
}
_ => unimplemented!("{} is not supported", url.scheme()),
};
tracing::info!("Schema discovered");
Ok((schema_name, table_stmts))
}
async fn sqlx_connect<DB>(
max_connections: u32,
acquire_timeout: u64,
url: &str,
schema: Option<&str>,
) -> Result<sqlx::Pool<DB>>
where
DB: sqlx::Database,
for<'a> &'a mut <DB as sqlx::Database>::Connection: sqlx::Executor<'a>,
{
let mut pool_options = sqlx::pool::PoolOptions::<DB>::new()
.max_connections(max_connections)
.acquire_timeout(time::Duration::from_secs(acquire_timeout));
// Set search_path for Postgres, E.g. Some("public") by default
// MySQL & SQLite connection initialize with schema `None`
if let Some(schema) = schema {
let sql = format!("SET search_path = '{schema}'");
pool_options = pool_options.after_connect(move |conn, _| {
let sql = sql.clone();
Box::pin(async move {
sqlx::Executor::execute(conn, sql.as_str())
.await
.map(|_| ())
})
});
}
pool_options.connect(url).await.map_err(Into::into)
}

95
src/generator/file.rs Normal file
View File

@@ -0,0 +1,95 @@
use std::path::PathBuf;
use crate::config::Config;
use super::table::Table;
use color_eyre::Result;
use handlebars::Handlebars;
use prettytable::{format, row, Table as PTable};
use sea_orm_codegen::OutputFile;
use serde::Serialize;
use tokio::fs;
const HEADER: &str = r#"== Schema Information"#;
const COMMENTHEAD: &str = r#"/*"#;
const COMMENTBODY: &str = r#" *"#;
const COMMENTTAIL: &str = r#"*/"#;
#[derive(Debug, Clone)]
pub struct FileGenerator {
filename: String,
table: Table,
}
#[derive(Debug, Clone, Serialize)]
pub struct FileContext {
has_prelude: bool,
prelude: String,
}
impl FileGenerator {
pub fn new(table: Table) -> Result<FileGenerator> {
let filename = format!("{}.rs", table.name);
Ok(FileGenerator { table, filename })
}
pub async fn build_file<'a>(
&self,
config: &Config,
handlebars: &'a Handlebars<'a>,
) -> Result<OutputFile> {
let filepath = config.output.path.join(&self.filename);
Ok(OutputFile {
name: filepath.to_str().unwrap().to_string(),
content: self.generate_file(config, handlebars).await?,
})
}
pub async fn generate_file<'a>(
&self,
config: &Config,
handlebars: &'a Handlebars<'a>,
) -> Result<String> {
let filepath = config.output.path.join(&self.filename);
let file_context = FileContext {
has_prelude: false,
prelude: String::new(),
};
let generated_header = self.generate_header(config).await?;
if filepath.exists() {
let mut file_content = fs::read_to_string(filepath).await?;
Ok(file_content)
} else {
let content = handlebars.render("model", &file_context)?;
Ok(format!("{}{}", generated_header, content))
}
}
pub async fn generate_header(&self, config: &Config) -> Result<String> {
let mut column_info_table = PTable::new();
let format = format::FormatBuilder::new()
.column_separator(' ')
.borders(' ')
.separators(
&[
format::LinePosition::Bottom,
format::LinePosition::Title,
// format::LinePosition::Top,
],
format::LineSeparator::default(),
)
.padding(1, 1)
.build();
column_info_table.set_format(format);
column_info_table.set_titles(row!["Name", "Type", "RustType", "Attributes"]);
// let indexes = table.get_indexes();
// tracing::info!(?indexes);
// for column in table.get_columns() {
// let name = column.get_column_name();
// if let Some(column_type) = column.get_column_type() {
// let column_type_rust =
// type_to_rust_string(column_type, config.sea_orm.entity.date_time_crate.clone());
// let column_type =
// type_to_string(column_type, config.sea_orm.entity.date_time_crate.clone());
// let attrs = attrs_to_string(column.get_column_spec());
// ptable.add_row(row![name, column_type, column_type_rust, attrs]);
// }
// }
Ok(String::new())
}
}

213
src/generator/mod.rs Normal file
View File

@@ -0,0 +1,213 @@
pub mod column;
pub mod discover;
pub mod file;
pub mod table;
use core::time;
use crate::{config::DateTimeCrate, Config};
use color_eyre::{
eyre::{eyre, ContextCompat, Report},
Result,
};
use comment_parser::{CommentParser, Event};
use handlebars::Handlebars;
use prettytable::{format, row};
use sea_orm_codegen::OutputFile;
use sea_schema::sea_query::{self, ColumnSpec, ColumnType, StringLen, TableCreateStatement};
use table::Table;
use tokio::{fs, task::JoinSet};
use url::Url;
pub async fn update_files<'a>(
tables: Vec<TableCreateStatement>,
config: Config,
handlebars: &'a Handlebars<'a>,
) -> Result<Vec<OutputFile>> {
let tables = tables
.into_iter()
.map(|table| Table::new(table.clone(), config.clone()))
.collect::<Result<Vec<Table>>>()?;
let mut files = Vec::new();
for table in tables {
let comment = file::FileGenerator::new(table)?;
let file = comment.build_file(&config, &handlebars).await?;
files.push(file);
}
Ok(Vec::new())
}
// pub async fn generate_models(
// tables: Vec<TableCreateStatement>,
// config: Config,
// ) -> Result<Vec<OutputFile>> {
// tracing::debug!(?tables);
// let output_path = config.output.path.clone();
// let files = tables
// .into_iter()
// .map(|table| {
// let output_path = output_path.clone();
// let config = config.clone();
// async move {
// let table_name = match table.get_table_name() {
// Some(table_ref) => match table_ref {
// sea_query::TableRef::Table(t)
// | sea_query::TableRef::SchemaTable(_, t)
// | sea_query::TableRef::DatabaseSchemaTable(_, _, t)
// | sea_query::TableRef::TableAlias(t, _)
// | sea_query::TableRef::SchemaTableAlias(_, t, _)
// | sea_query::TableRef::DatabaseSchemaTableAlias(_, _, t, _) => {
// t.to_string()
// }
// _ => unimplemented!(),
// },
// None => return Err(eyre!("Table name not found")),
// };
// let table_str = generate_table(table, config.clone()).await?;
// tracing::debug!(?table_str);
// let filename = format!("{}.rs", table_name);
// let file_path = output_path.join(&filename);
// let exists = file_path.exists();
// let content = if exists {
// let mut file_content = fs::read_to_string(file_path).await?;
// let rules = comment_parser::get_syntax("rust").unwrap();
// let comments = CommentParser::new(&file_content, rules);
// let mut found = false;
// for comment in comments {
// if let Event::BlockComment(a, b) = comment {
// tracing::debug!(?a, ?b);
// if b.contains(HEADER) {
// found = true;
// file_content = file_content.replace(a, &table_str);
// tracing::debug!("Found header");
// break;
// }
// }
// }
// if found {
// Ok::<String, Report>(file_content)
// } else {
// let merged_content = format!("{}\n\n{}", table_str, file_content);
// Ok::<String, Report>(merged_content)
// }
// } else {
// Ok::<String, Report>(table_str)
// }?;
//
// Ok(OutputFile {
// name: filename,
// content,
// })
// }
// })
// .collect::<JoinSet<Result<OutputFile>>>();
// let files = files
// .join_all()
// .await
// .into_iter()
// .collect::<Result<Vec<OutputFile>>>()?;
// Ok(files)
// }
// async fn generate_table(table: TableCreateStatement, config: Config) -> Result<String> {
// let mut string = String::new();
// string.push_str(format!("{COMMENTHEAD} {HEADER}\n").as_str());
//
// let table_name = match table.get_table_name() {
// Some(table_ref) => match table_ref {
// sea_query::TableRef::Table(t)
// | sea_query::TableRef::SchemaTable(_, t)
// | sea_query::TableRef::DatabaseSchemaTable(_, _, t)
// | sea_query::TableRef::TableAlias(t, _)
// | sea_query::TableRef::SchemaTableAlias(_, t, _)
// | sea_query::TableRef::DatabaseSchemaTableAlias(_, _, t, _) => t.to_string(),
// _ => unimplemented!(),
// },
// None => return Err(eyre!("Table name not found")),
// };
// let mut inner = String::new();
// inner.push_str(format!("{COMMENTBODY}\n Table name: {table_name}\n\n\n").as_str());
// inner = inner.strip_suffix('\n').unwrap().to_string();
// let mut ptable = Table::new();
// let format = format::FormatBuilder::new()
// .column_separator(' ')
// .borders(' ')
// .separators(
// &[
// format::LinePosition::Bottom,
// format::LinePosition::Title,
// // format::LinePosition::Top,
// ],
// format::LineSeparator::default(),
// )
// .padding(1, 1)
// .build();
// ptable.set_format(format);
// ptable.set_titles(row!["Name", "Type", "RustType", "Attributes"]);
// let indexes = table.get_indexes();
// tracing::info!(?indexes);
// for column in table.get_columns() {
// let name = column.get_column_name();
// if let Some(column_type) = column.get_column_type() {
// let column_type_rust =
// type_to_rust_string(column_type, config.sea_orm.entity.date_time_crate.clone());
// let column_type =
// type_to_string(column_type, config.sea_orm.entity.date_time_crate.clone());
// let attrs = attrs_to_string(column.get_column_spec());
// ptable.add_row(row![name, column_type, column_type_rust, attrs]);
// }
// }
// inner.push_str(ptable.to_string().as_str());
//
// string.push_str(
// inner
// .replace("\n", format!("\n{} ", COMMENTBODY).as_str())
// .as_str(),
// );
// string.push_str(format!("\n{COMMENTTAIL}\n").as_str());
// Ok(string)
// }
//
// fn type_to_string(column: &ColumnType, date_time_create: DateTimeCrate) -> String {
// tracing::debug!(?column, ?date_time_create);
// match column {
// ColumnType::Char(Some(max)) => format!("Char({}max)", max),
// ColumnType::Char(None) => "Char".to_owned(),
// ColumnType::String(StringLen::None) => "String".to_owned(),
// ColumnType::String(StringLen::Max) => "String(Max)".to_owned(),
// ColumnType::String(StringLen::N(len)) => format!("String({}max)", len),
// ColumnType::Text => "Text".to_owned(),
// ColumnType::Integer => "Integer".to_owned(),
//
// ColumnType::Custom(v) => v.to_string(),
// _ => unimplemented!(),
// }
// }
// fn type_to_rust_string(column: &ColumnType, date_time_create: DateTimeCrate) -> String {
// tracing::debug!(?column, ?date_time_create);
// match column {
// ColumnType::Char(_) | ColumnType::String(_) | ColumnType::Text | ColumnType::Custom(_) => {
// "String".to_owned()
// }
// ColumnType::Integer => "i32".to_owned(),
// _ => unimplemented!(),
// }
// }
//
// fn attrs_to_string(column: &Vec<ColumnSpec>) -> String {
// tracing::debug!(?column);
// column
// .iter()
// .filter_map(|c| match c {
// ColumnSpec::PrimaryKey => Some("primary key"),
// ColumnSpec::UniqueKey => Some("unique key"),
// ColumnSpec::AutoIncrement => Some("autoincrement"),
// ColumnSpec::NotNull => Some("not null"),
// _ => None,
// })
// .map(|s| s.to_string())
// .collect::<Vec<String>>()
// .join(", ")
// }
//
// //

46
src/generator/table.rs Normal file
View File

@@ -0,0 +1,46 @@
use super::column::Column;
use crate::config::Config;
use color_eyre::{eyre::eyre, Result};
use sea_schema::sea_query::{self, ColumnDef, IndexCreateStatement, TableCreateStatement};
use tracing::instrument;
#[derive(Debug, Clone)]
pub struct Table {
pub name: String,
pub columns: Vec<Column>,
}
impl Table {
pub fn new(statement: TableCreateStatement, config: Config) -> Result<Table> {
let table_name = match statement.get_table_name() {
Some(table_ref) => match table_ref {
sea_query::TableRef::Table(t)
| sea_query::TableRef::SchemaTable(_, t)
| sea_query::TableRef::DatabaseSchemaTable(_, _, t)
| sea_query::TableRef::TableAlias(t, _)
| sea_query::TableRef::SchemaTableAlias(_, t, _)
| sea_query::TableRef::DatabaseSchemaTableAlias(_, _, t, _) => t.to_string(),
_ => unimplemented!(),
},
None => return Err(eyre!("Table name not found")),
};
tracing::debug!(?table_name);
let columns_raw = statement.get_columns();
let indexes = statement.get_indexes();
let columns = columns_raw
.iter()
.map(|column| {
let name = column.get_column_name();
let index = indexes
.iter()
.find(|index| index.get_index_spec().get_column_names().contains(&name));
Column::new(column.clone(), index.cloned())
})
.collect::<Result<Vec<Column>>>()?;
tracing::debug!(?columns);
Ok(Table {
columns,
name: table_name,
})
}
}

View File

@@ -1,6 +1,5 @@
mod config;
mod generate;
use std::{path::PathBuf, str::FromStr};
mod generator;
use clap::Parser;
use color_eyre::{eyre::eyre, Report, Result};
@@ -36,9 +35,12 @@ async fn main() -> Result<()> {
.extract()?;
tracing::info!(?config);
tracing::info!(?args);
let mut handlebars = Handlebars::new();
let output_dir = &config.output.path;
let output_internal_entities = output_dir.join("_entities");
let (_, table_stmts) = generate::get_tables(args.database_url, &config).await?;
let (_, table_stmts) = generator::discover::get_tables(args.database_url, &config).await?;
let writer_context = config.clone().into();
let output = EntityTransformer::transform(table_stmts.clone())?.generate(&writer_context);
let mut files = output
@@ -46,12 +48,13 @@ async fn main() -> Result<()> {
.into_iter()
.map(|OutputFile { name, content }| (output_internal_entities.join(name), content))
.collect::<Vec<_>>();
let generate_files = generate::generate_models(table_stmts, config.clone())
.await?
.into_iter()
.map(|OutputFile { name, content }| (output_dir.join(name), content))
.collect::<Vec<_>>();
files.extend(generate_files);
generator::update_files(table_stmts, config.clone(), &handlebars).await?;
// let generate_files = generator::generate_models(table_stmts, config.clone())
// .await?
// .into_iter()
// .map(|OutputFile { name, content }| (output_dir.join(name), content))
// .collect::<Vec<_>>();
// files.extend(generate_files);
tracing::info!("Generated {} files", files.len());
fs::create_dir_all(&output_internal_entities).await?;
let progress_bar = ProgressBar::new((files.len() * 2) as u64)

View File

@@ -0,0 +1,12 @@
use {{entities_path}}::{ActiveMode, Model, Entity};
{{#if has_prelude}}
use {{prelude}};
{{/if}}
impl Model {
}
impl ActiveModel {
}