restructure
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
use core::time;
|
||||
|
||||
use color_eyre::eyre::{eyre, Context, ContextCompat, Report, Result};
|
||||
use color_eyre::eyre::{eyre, ContextCompat, Report, Result};
|
||||
use sea_schema::sea_query::TableCreateStatement;
|
||||
use url::Url;
|
||||
|
||||
@@ -59,8 +59,6 @@ pub async fn get_tables(
|
||||
.tables
|
||||
.into_iter()
|
||||
.filter(|schema| filter(&schema.info.name))
|
||||
// .filter(|schema| filter_hidden_tables(&schema.info.name))
|
||||
// .filter(|schema| filter_skip_tables(&schema.info.name))
|
||||
.map(|schema| schema.write())
|
||||
.collect();
|
||||
(table_stmts, DbType::MySql)
|
||||
@@ -88,8 +86,6 @@ pub async fn get_tables(
|
||||
.tables
|
||||
.into_iter()
|
||||
.filter(|schema| filter(&schema.name))
|
||||
// .filter(|schema| filter_hidden_tables(&schema.name))
|
||||
// .filter(|schema| filter_skip_tables(&schema.name))
|
||||
.map(|schema| schema.write())
|
||||
.collect();
|
||||
(table_stmts, DbType::Sqlite)
|
||||
@@ -118,8 +114,6 @@ pub async fn get_tables(
|
||||
.tables
|
||||
.into_iter()
|
||||
.filter(|schema| filter(&schema.info.name))
|
||||
// .filter(|schema| filter_hidden_tables(&schema.info.name))
|
||||
// .filter(|schema| filter_skip_tables(&schema.info.name))
|
||||
.map(|schema| schema.write())
|
||||
.collect();
|
||||
(table_stmts, DbType::Postgres)
|
||||
@@ -36,3 +36,47 @@ pub fn combine_chunks(chunks: Vec<GeneratedFileChunk>) -> Result<Vec<GeneratedFi
|
||||
}
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
pub fn pathbuf_to_rust_path(path: &PathBuf) -> String {
|
||||
let mut components = path.components();
|
||||
let mut path = String::new();
|
||||
for component in components {
|
||||
match component {
|
||||
std::path::Component::Prefix(_) => {
|
||||
// Handle Windows-specific prefixes if necessary
|
||||
}
|
||||
std::path::Component::RootDir => {
|
||||
// Ignore root directory component
|
||||
}
|
||||
std::path::Component::CurDir => {
|
||||
// Ignore current directory component
|
||||
}
|
||||
std::path::Component::ParentDir => {
|
||||
if path.is_empty() || path.ends_with("::") {
|
||||
path.push_str("super::");
|
||||
} else {
|
||||
path.push_str("::super::");
|
||||
}
|
||||
}
|
||||
std::path::Component::Normal(name) => {
|
||||
if !path.is_empty() && !path.ends_with("::") {
|
||||
path.push_str("::");
|
||||
}
|
||||
path.push_str(name.to_str().unwrap());
|
||||
}
|
||||
}
|
||||
}
|
||||
path
|
||||
}
|
||||
|
||||
mod test {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn test_pathbuf_to_rust_path() {
|
||||
let path = PathBuf::from("src/models/user.rs");
|
||||
let rust_path = pathbuf_to_rust_path(&path);
|
||||
assert_eq!(rust_path, "src::models::user.rs");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,30 +1,11 @@
|
||||
use file::{GeneratedFile, GeneratedFileChunk};
|
||||
|
||||
// pub mod column;
|
||||
// pub mod discover;
|
||||
use file::GeneratedFileChunk;
|
||||
use modules::models::table::Table;
|
||||
pub mod discover;
|
||||
pub mod file;
|
||||
pub mod models;
|
||||
// pub mod table;
|
||||
//
|
||||
// use core::time;
|
||||
//
|
||||
// use crate::{config::DateTimeCrate, Config};
|
||||
use color_eyre::{
|
||||
eyre::{eyre, ContextCompat, Report},
|
||||
Result,
|
||||
};
|
||||
use handlebars::Handlebars;
|
||||
use models::ModelConfig;
|
||||
|
||||
pub mod modules;
|
||||
use crate::config::Config;
|
||||
// use comment_parser::{CommentParser, Event};
|
||||
// use discover::DbType;
|
||||
// use handlebars::Handlebars;
|
||||
// use sea_orm_codegen::OutputFile;
|
||||
// use sea_schema::sea_query::{self, ColumnSpec, ColumnType, StringLen, TableCreateStatement};
|
||||
// use table::Table;
|
||||
// use tokio::{fs, task::JoinSet};
|
||||
// use url::Url;
|
||||
use color_eyre::Result;
|
||||
use handlebars::Handlebars;
|
||||
|
||||
pub async fn generate<'a>(
|
||||
database_url: &str,
|
||||
@@ -32,222 +13,16 @@ pub async fn generate<'a>(
|
||||
handlebars: &'a Handlebars<'a>,
|
||||
) -> Result<Vec<GeneratedFileChunk>> {
|
||||
let mut files = Vec::new();
|
||||
let model_outputs = models::generate_models(database_url, config, handlebars).await?;
|
||||
|
||||
let db_filter = config.sea_orm.entity.tables.get_filter();
|
||||
let (table_stmts, db_type) =
|
||||
discover::get_tables(database_url.to_owned(), db_filter, &config.db).await?;
|
||||
let tables = table_stmts
|
||||
.into_iter()
|
||||
.map(Table::new)
|
||||
.collect::<Result<Vec<Table>>>()?;
|
||||
|
||||
let model_outputs = modules::models::generate_models(database_url, config, handlebars).await?;
|
||||
files.extend(model_outputs);
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
// pub async fn update_files<'a>(
|
||||
// tables: Vec<TableCreateStatement>,
|
||||
// config: Config,
|
||||
// handlebars: &'a Handlebars<'a>,
|
||||
// db_type: DbType,
|
||||
// ) -> Result<Vec<OutputFile>> {
|
||||
// let mut files = Vec::new();
|
||||
// let entities_path = &config.output.models.entities.replace("/", "::");
|
||||
// let entities_path_split = entities_path.split("::").collect::<Vec<&str>>();
|
||||
//
|
||||
// let mut mod_file_content = String::new();
|
||||
// mod_file_content
|
||||
// .push_str(format!("pub mod {};\n", entities_path_split.first().unwrap_or(&"")).as_str());
|
||||
// for table in tables {
|
||||
// mod_file_content.push_str(format!("pub mod {};\n", table.name).as_str());
|
||||
// let entities_path = format!("super::{}", entities_path);
|
||||
// let comment = file::FileGenerator::new(table, entities_path)?;
|
||||
// let file = comment.build_file(&config, handlebars, &db_type).await?;
|
||||
// tracing::info!(?file.name, file.content.len = file.content.len());
|
||||
// files.push(file);
|
||||
// }
|
||||
// if entities_path_split.len() > 1 {
|
||||
// for index in 0..entities_path_split.len() - 1 {
|
||||
// let entity = entities_path_split[index];
|
||||
// let next = entities_path_split[index + 1];
|
||||
// files.push(OutputFile {
|
||||
// name: format!("{}/mod.rs", entity),
|
||||
// content: format!("pub mod {};\n", next),
|
||||
// });
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// files.push(OutputFile {
|
||||
// name: "mod.rs".to_string(),
|
||||
// content: mod_file_content,
|
||||
// });
|
||||
//
|
||||
// Ok(files)
|
||||
// }
|
||||
//
|
||||
// pub async fn generate_models(
|
||||
// tables: Vec<TableCreateStatement>,
|
||||
// config: Config,
|
||||
// ) -> Result<Vec<OutputFile>> {
|
||||
// tracing::debug!(?tables);
|
||||
// let output_path = config.output.path.clone();
|
||||
// let files = tables
|
||||
// .into_iter()
|
||||
// .map(|table| {
|
||||
// let output_path = output_path.clone();
|
||||
// let config = config.clone();
|
||||
// async move {
|
||||
// let table_name = match table.get_table_name() {
|
||||
// Some(table_ref) => match table_ref {
|
||||
// sea_query::TableRef::Table(t)
|
||||
// | sea_query::TableRef::SchemaTable(_, t)
|
||||
// | sea_query::TableRef::DatabaseSchemaTable(_, _, t)
|
||||
// | sea_query::TableRef::TableAlias(t, _)
|
||||
// | sea_query::TableRef::SchemaTableAlias(_, t, _)
|
||||
// | sea_query::TableRef::DatabaseSchemaTableAlias(_, _, t, _) => {
|
||||
// t.to_string()
|
||||
// }
|
||||
// _ => unimplemented!(),
|
||||
// },
|
||||
// None => return Err(eyre!("Table name not found")),
|
||||
// };
|
||||
// let table_str = generate_table(table, config.clone()).await?;
|
||||
// tracing::debug!(?table_str);
|
||||
// let filename = format!("{}.rs", table_name);
|
||||
// let file_path = output_path.join(&filename);
|
||||
// let exists = file_path.exists();
|
||||
// let content = if exists {
|
||||
// let mut file_content = fs::read_to_string(file_path).await?;
|
||||
// let rules = comment_parser::get_syntax("rust").unwrap();
|
||||
// let comments = CommentParser::new(&file_content, rules);
|
||||
// let mut found = false;
|
||||
// for comment in comments {
|
||||
// if let Event::BlockComment(a, b) = comment {
|
||||
// tracing::debug!(?a, ?b);
|
||||
// if b.contains(HEADER) {
|
||||
// found = true;
|
||||
// file_content = file_content.replace(a, &table_str);
|
||||
// tracing::debug!("Found header");
|
||||
// break;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// if found {
|
||||
// Ok::<String, Report>(file_content)
|
||||
// } else {
|
||||
// let merged_content = format!("{}\n\n{}", table_str, file_content);
|
||||
// Ok::<String, Report>(merged_content)
|
||||
// }
|
||||
// } else {
|
||||
// Ok::<String, Report>(table_str)
|
||||
// }?;
|
||||
//
|
||||
// Ok(OutputFile {
|
||||
// name: filename,
|
||||
// content,
|
||||
// })
|
||||
// }
|
||||
// })
|
||||
// .collect::<JoinSet<Result<OutputFile>>>();
|
||||
// let files = files
|
||||
// .join_all()
|
||||
// .await
|
||||
// .into_iter()
|
||||
// .collect::<Result<Vec<OutputFile>>>()?;
|
||||
// Ok(files)
|
||||
// }
|
||||
|
||||
// async fn generate_table(table: TableCreateStatement, config: Config) -> Result<String> {
|
||||
// let mut string = String::new();
|
||||
// string.push_str(format!("{COMMENTHEAD} {HEADER}\n").as_str());
|
||||
//
|
||||
// let table_name = match table.get_table_name() {
|
||||
// Some(table_ref) => match table_ref {
|
||||
// sea_query::TableRef::Table(t)
|
||||
// | sea_query::TableRef::SchemaTable(_, t)
|
||||
// | sea_query::TableRef::DatabaseSchemaTable(_, _, t)
|
||||
// | sea_query::TableRef::TableAlias(t, _)
|
||||
// | sea_query::TableRef::SchemaTableAlias(_, t, _)
|
||||
// | sea_query::TableRef::DatabaseSchemaTableAlias(_, _, t, _) => t.to_string(),
|
||||
// _ => unimplemented!(),
|
||||
// },
|
||||
// None => return Err(eyre!("Table name not found")),
|
||||
// };
|
||||
// let mut inner = String::new();
|
||||
// inner.push_str(format!("{COMMENTBODY}\n Table name: {table_name}\n\n\n").as_str());
|
||||
// inner = inner.strip_suffix('\n').unwrap().to_string();
|
||||
// let mut ptable = Table::new();
|
||||
// let format = format::FormatBuilder::new()
|
||||
// .column_separator(' ')
|
||||
// .borders(' ')
|
||||
// .separators(
|
||||
// &[
|
||||
// format::LinePosition::Bottom,
|
||||
// format::LinePosition::Title,
|
||||
// // format::LinePosition::Top,
|
||||
// ],
|
||||
// format::LineSeparator::default(),
|
||||
// )
|
||||
// .padding(1, 1)
|
||||
// .build();
|
||||
// ptable.set_format(format);
|
||||
// ptable.set_titles(row!["Name", "Type", "RustType", "Attributes"]);
|
||||
// let indexes = table.get_indexes();
|
||||
// tracing::info!(?indexes);
|
||||
// for column in table.get_columns() {
|
||||
// let name = column.get_column_name();
|
||||
// if let Some(column_type) = column.get_column_type() {
|
||||
// let column_type_rust =
|
||||
// type_to_rust_string(column_type, config.sea_orm.entity.date_time_crate.clone());
|
||||
// let column_type =
|
||||
// type_to_string(column_type, config.sea_orm.entity.date_time_crate.clone());
|
||||
// let attrs = attrs_to_string(column.get_column_spec());
|
||||
// ptable.add_row(row![name, column_type, column_type_rust, attrs]);
|
||||
// }
|
||||
// }
|
||||
// inner.push_str(ptable.to_string().as_str());
|
||||
//
|
||||
// string.push_str(
|
||||
// inner
|
||||
// .replace("\n", format!("\n{} ", COMMENTBODY).as_str())
|
||||
// .as_str(),
|
||||
// );
|
||||
// string.push_str(format!("\n{COMMENTTAIL}\n").as_str());
|
||||
// Ok(string)
|
||||
// }
|
||||
//
|
||||
// fn type_to_string(column: &ColumnType, date_time_create: DateTimeCrate) -> String {
|
||||
// tracing::debug!(?column, ?date_time_create);
|
||||
// match column {
|
||||
// ColumnType::Char(Some(max)) => format!("Char({}max)", max),
|
||||
// ColumnType::Char(None) => "Char".to_owned(),
|
||||
// ColumnType::String(StringLen::None) => "String".to_owned(),
|
||||
// ColumnType::String(StringLen::Max) => "String(Max)".to_owned(),
|
||||
// ColumnType::String(StringLen::N(len)) => format!("String({}max)", len),
|
||||
// ColumnType::Text => "Text".to_owned(),
|
||||
// ColumnType::Integer => "Integer".to_owned(),
|
||||
//
|
||||
// ColumnType::Custom(v) => v.to_string(),
|
||||
// _ => unimplemented!(),
|
||||
// }
|
||||
// }
|
||||
// fn type_to_rust_string(column: &ColumnType, date_time_create: DateTimeCrate) -> String {
|
||||
// tracing::debug!(?column, ?date_time_create);
|
||||
// match column {
|
||||
// ColumnType::Char(_) | ColumnType::String(_) | ColumnType::Text | ColumnType::Custom(_) => {
|
||||
// "String".to_owned()
|
||||
// }
|
||||
// ColumnType::Integer => "i32".to_owned(),
|
||||
// _ => unimplemented!(),
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// fn attrs_to_string(column: &Vec<ColumnSpec>) -> String {
|
||||
// tracing::debug!(?column);
|
||||
// column
|
||||
// .iter()
|
||||
// .filter_map(|c| match c {
|
||||
// ColumnSpec::PrimaryKey => Some("primary key"),
|
||||
// ColumnSpec::UniqueKey => Some("unique key"),
|
||||
// ColumnSpec::AutoIncrement => Some("autoincrement"),
|
||||
// ColumnSpec::NotNull => Some("not null"),
|
||||
// _ => None,
|
||||
// })
|
||||
// .map(|s| s.to_string())
|
||||
// .collect::<Vec<String>>()
|
||||
// .join(", ")
|
||||
// }
|
||||
//
|
||||
// //
|
||||
|
||||
10
src/generator/modules/mod.rs
Normal file
10
src/generator/modules/mod.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
use models::table::Table;
|
||||
|
||||
use super::discover::DbType;
|
||||
|
||||
pub mod models;
|
||||
|
||||
pub struct ModulesContext {
|
||||
pub tables: Vec<Table>,
|
||||
pub db_type: DbType,
|
||||
}
|
||||
@@ -3,7 +3,7 @@ use comfy_table::Cell;
|
||||
use heck::ToUpperCamelCase;
|
||||
use sea_schema::sea_query::{ColumnDef, ColumnSpec, ColumnType, IndexCreateStatement};
|
||||
|
||||
use crate::config::{sea_orm_config::DateTimeCrate, Config};
|
||||
use crate::config::sea_orm_config::DateTimeCrate;
|
||||
|
||||
use super::{discover::DbType, ModelConfig};
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -65,15 +65,15 @@ impl Column {
|
||||
pub fn get_addr_type(attr: &ColumnSpec) -> Option<String> {
|
||||
match attr {
|
||||
ColumnSpec::PrimaryKey => Some("primary key".to_owned()),
|
||||
ColumnSpec::Null => todo!(),
|
||||
ColumnSpec::Null => unimplemented!(),
|
||||
ColumnSpec::NotNull => Some("not null".to_owned()),
|
||||
ColumnSpec::Default(simple_expr) => todo!(),
|
||||
ColumnSpec::Default(_) => unimplemented!(),
|
||||
ColumnSpec::AutoIncrement => Some("autoincrement".to_owned()),
|
||||
ColumnSpec::UniqueKey => Some("unique key".to_owned()),
|
||||
ColumnSpec::Check(simple_expr) => todo!(),
|
||||
ColumnSpec::Generated { expr, stored } => todo!(),
|
||||
ColumnSpec::Extra(_) => todo!(),
|
||||
ColumnSpec::Comment(_) => todo!(),
|
||||
ColumnSpec::Check(_) => unimplemented!(),
|
||||
ColumnSpec::Generated { .. } => unimplemented!(),
|
||||
ColumnSpec::Extra(_) => unimplemented!(),
|
||||
ColumnSpec::Comment(_) => unimplemented!(),
|
||||
}
|
||||
}
|
||||
pub fn get_db_type(&self, db_type: &DbType) -> String {
|
||||
@@ -1,7 +1,5 @@
|
||||
use crate::generator::models::{CommentConfig, CommentConfigSerde};
|
||||
|
||||
use super::{table::Table, ModelConfig};
|
||||
use color_eyre::{eyre, owo_colors::colors::White, Result};
|
||||
use super::{table::Table, CommentConfig, CommentConfigSerde, ModelConfig};
|
||||
use color_eyre::Result;
|
||||
use comfy_table::{ContentArrangement, Table as CTable};
|
||||
use comment_parser::{CommentParser, Event};
|
||||
|
||||
@@ -28,7 +26,7 @@ impl ModelCommentGenerator {
|
||||
config: &ModelConfig,
|
||||
) -> Result<String> {
|
||||
let rules = comment_parser::get_syntax("rust").unwrap();
|
||||
let parser = CommentParser::new(&file_content, rules);
|
||||
let parser = CommentParser::new(file_content, rules);
|
||||
for comment in parser {
|
||||
if let Event::BlockComment(body, _) = comment {
|
||||
if body.contains(HEADER) {
|
||||
@@ -1,23 +1,15 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::{config::Config, generator::file::GeneratedFileChunk};
|
||||
use crate::generator::file::GeneratedFileChunk;
|
||||
|
||||
use super::{
|
||||
comment::ModelCommentGenerator, discover::DbType, table::Table, CommentConfig, ModelConfig,
|
||||
};
|
||||
use super::{comment::ModelCommentGenerator, table::Table, ModelConfig};
|
||||
use color_eyre::Result;
|
||||
use comfy_table::{ContentArrangement, Table as CTable};
|
||||
use comment_parser::{CommentParser, Event};
|
||||
use handlebars::Handlebars;
|
||||
use heck::ToPascalCase;
|
||||
use sea_orm_codegen::OutputFile;
|
||||
use serde::Serialize;
|
||||
use tokio::fs;
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FileGenerator {
|
||||
filename: String,
|
||||
table: Table,
|
||||
}
|
||||
pub struct FileGenerator;
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct ModelContext {
|
||||
entities_path: String,
|
||||
@@ -80,46 +72,13 @@ impl FileGenerator {
|
||||
priority: 0,
|
||||
});
|
||||
}
|
||||
|
||||
// let filepath = config.output.path.join(&self.filename);
|
||||
// let file_context = FileContext {
|
||||
// entities_path: self.entities_path.clone(),
|
||||
// model_name: self.table.name.clone(),
|
||||
// };
|
||||
// let generated_header = self.generate_header(config, db_type).await?;
|
||||
// if filepath.exists() {
|
||||
// let mut file_content = fs::read_to_string(filepath).await?;
|
||||
// if !config.output.models.comment.enable {
|
||||
// return Ok(file_content);
|
||||
// }
|
||||
// let rules = comment_parser::get_syntax("rust").unwrap();
|
||||
// let parser = CommentParser::new(&file_content, rules);
|
||||
// for comment in parser {
|
||||
// if let Event::BlockComment(body, _) = comment {
|
||||
// if body.contains(HEADER) {
|
||||
// tracing::debug!("Found header");
|
||||
// file_content = file_content.replace(body, &generated_header);
|
||||
// return Ok(file_content);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// Ok(format!("{}\n{}", generated_header, file_content))
|
||||
// } else {
|
||||
// let content = handlebars.render("model", &file_context)?;
|
||||
// Ok(format!("{}{}", generated_header, content))
|
||||
// }
|
||||
|
||||
// Ok(OutputFile {
|
||||
// name: self.filename.clone(),
|
||||
// content: self.generate_file(config, handlebars, db_type).await?,
|
||||
// })
|
||||
Ok(file_chunks)
|
||||
}
|
||||
async fn handle_existing_file<'a>(
|
||||
table: Table,
|
||||
filepath: &PathBuf,
|
||||
config: &ModelConfig,
|
||||
handlebars: &'a Handlebars<'a>,
|
||||
_handlebars: &'a Handlebars<'a>,
|
||||
) -> Result<Vec<GeneratedFileChunk>> {
|
||||
let mut file_chunks = Vec::new();
|
||||
let mut file_content = fs::read_to_string(filepath).await?;
|
||||
@@ -1,6 +1,11 @@
|
||||
use crate::config::{sea_orm_config::DateTimeCrate, Config};
|
||||
use crate::{
|
||||
config::{sea_orm_config::DateTimeCrate, Config},
|
||||
generator::{
|
||||
discover::{self, DbType},
|
||||
file::GeneratedFileChunk,
|
||||
},
|
||||
};
|
||||
use color_eyre::Result;
|
||||
use discover::DbType;
|
||||
use file::FileGenerator;
|
||||
use handlebars::Handlebars;
|
||||
use sea_orm_codegen::{EntityTransformer, EntityWriterContext, OutputFile};
|
||||
@@ -9,10 +14,8 @@ use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use table::Table;
|
||||
|
||||
use super::file::GeneratedFileChunk;
|
||||
pub mod column;
|
||||
pub mod comment;
|
||||
pub mod discover;
|
||||
pub mod file;
|
||||
pub mod table;
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -1,8 +1,6 @@
|
||||
use super::column::Column;
|
||||
use crate::config::Config;
|
||||
use color_eyre::{eyre::eyre, Result};
|
||||
use sea_schema::sea_query::{self, ColumnDef, IndexCreateStatement, TableCreateStatement};
|
||||
use tracing::instrument;
|
||||
use sea_schema::sea_query::{self, TableCreateStatement};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Table {
|
||||
55
src/main.rs
55
src/main.rs
@@ -3,19 +3,13 @@ mod generator;
|
||||
mod templates;
|
||||
|
||||
use clap::Parser;
|
||||
use color_eyre::{eyre::eyre, Report, Result};
|
||||
use color_eyre::{eyre::eyre, Result};
|
||||
use config::Config;
|
||||
use figment::{
|
||||
providers::{Format, Serialized, Yaml},
|
||||
Figment,
|
||||
};
|
||||
use handlebars::Handlebars;
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
use sea_orm_codegen::{
|
||||
DateTimeCrate as CodegenDateTimeCrate, EntityTransformer, EntityWriterContext, OutputFile,
|
||||
WithPrelude, WithSerde,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::{fs, io::AsyncWriteExt, process::Command};
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
@@ -25,10 +19,11 @@ struct Args {
|
||||
#[clap(short, long, env = "DATABASE_URL")]
|
||||
database_url: String,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
color_eyre::install()?;
|
||||
tracing_subscriber::fmt::init();
|
||||
tracing_subscriber::fmt().init();
|
||||
let args = Args::parse();
|
||||
|
||||
let config: Config = Figment::new()
|
||||
@@ -52,7 +47,7 @@ async fn main() -> Result<()> {
|
||||
|
||||
let merged_outputs = generator::file::combine_chunks(outputs)?;
|
||||
for output in merged_outputs.iter() {
|
||||
tracing::info!(?output, "Merged file");
|
||||
tracing::info!(?output.path, "Merged file");
|
||||
let parent = output.path.parent().unwrap();
|
||||
if !parent.exists() {
|
||||
fs::create_dir_all(parent).await?;
|
||||
@@ -61,51 +56,11 @@ async fn main() -> Result<()> {
|
||||
file.write_all(output.content.as_bytes()).await?;
|
||||
}
|
||||
for output in merged_outputs.iter() {
|
||||
tracing::info!(?output, "Running rustfmt");
|
||||
tracing::info!(?output.path, "Running rustfmt");
|
||||
let exit_status = Command::new("rustfmt").arg(&output.path).status().await?;
|
||||
if !exit_status.success() {
|
||||
return Err(eyre!("Failed to run rustfmt"));
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// let output_dir = &config.output.path;
|
||||
//
|
||||
// let output_models_dir = output_dir.join(&config.output.models.path);
|
||||
//
|
||||
// let output_internal_entities = output_models_dir.join(&config.output.models.entities);
|
||||
// let (_, table_stmts, db_type) =
|
||||
// generator::discover::get_tables(args.database_url, &config).await?;
|
||||
// let writer_context = config.clone().into();
|
||||
// let output = EntityTransformer::transform(table_stmts.clone())?.generate(&writer_context);
|
||||
// let mut files = output
|
||||
// .files
|
||||
// .into_iter()
|
||||
// .map(|OutputFile { name, content }| (output_internal_entities.join(name), content))
|
||||
// .collect::<Vec<_>>();
|
||||
// // generator::update_files(table_stmts, config.clone(), &handlebars).await?;
|
||||
// let generate_files = generator::update_files(table_stmts, config.clone(), &handlebars, db_type)
|
||||
// .await?
|
||||
// .into_iter()
|
||||
// .map(|OutputFile { name, content }| (output_models_dir.join(name), content))
|
||||
// .collect::<Vec<_>>();
|
||||
// files.extend(generate_files);
|
||||
// tracing::info!("Generated {} files", files.len());
|
||||
// fs::create_dir_all(&output_internal_entities).await?;
|
||||
//
|
||||
// for (file_path, content) in files.iter() {
|
||||
// tracing::info!(?file_path, "Writing file");
|
||||
// let mut file = fs::File::create(&file_path).await?;
|
||||
// file.write_all(content.as_bytes()).await?;
|
||||
// }
|
||||
//
|
||||
// for (file_path, ..) in files.iter() {
|
||||
// tracing::info!(?file_path, "Running rustfmt");
|
||||
// let exit_status = Command::new("rustfmt").arg(file_path).status().await?; // Get the status code
|
||||
// if !exit_status.success() {
|
||||
// // Propagate the error if any
|
||||
// return Err(eyre!("Failed to run rustfmt"));
|
||||
// }
|
||||
// }
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::config::Config;
|
||||
use color_eyre::eyre::{Context, ContextCompat, Result};
|
||||
use color_eyre::eyre::{ContextCompat, Result};
|
||||
use handlebars::Handlebars;
|
||||
use include_dir::{include_dir, Dir, DirEntry, File};
|
||||
use include_dir::{include_dir, Dir, DirEntry};
|
||||
use serde_yaml::Value;
|
||||
use std::path::PathBuf;
|
||||
use tokio::fs;
|
||||
|
||||
Reference in New Issue
Block a user