basic generator

This commit is contained in:
2025-04-08 12:18:39 +04:00
parent 17b1994b28
commit 3ed3e079f2
14 changed files with 3115 additions and 97 deletions

3
.gitignore vendored
View File

@@ -1,3 +1,4 @@
/target
**/target
/.compose
/.direnv
/local

View File

@@ -7,9 +7,4 @@ skip_seaql_migrations = true
[modules.sea_orm]
enable = true
prelude = true
[modules.sea_orm.serde]
enable = true
skip_deserializing_primary_key = false
skip_hidden_column = false
[modules.sea_orm.entity]
path = "./tests/src/models/_entities"

View File

@@ -3,7 +3,7 @@
processes = {
frontend = {
command = ''
RUST_LOG=debug ${pkgs.cargo-watch}/bin/cargo-watch -x 'run'
RUST_LOG=debug,sqlx=warn ${pkgs.cargo-watch}/bin/cargo-watch -x 'run'
'';
};
};

View File

@@ -2,42 +2,6 @@ use color_eyre::Result;
use path_clean::PathClean;
use std::{collections::HashMap, path::PathBuf};
#[derive(Debug, Clone)]
pub struct GeneratedFileChunk {
pub path: PathBuf,
pub content: String,
pub priority: i32,
}
#[derive(Debug, Clone)]
pub struct GeneratedFile {
pub path: PathBuf,
pub content: String,
}
pub fn combine_chunks(chunks: Vec<GeneratedFileChunk>) -> Result<Vec<GeneratedFile>> {
let mut table: HashMap<PathBuf, Vec<GeneratedFileChunk>> = HashMap::new();
for chunk in chunks {
let path = chunk.path.clone();
if let Some(v) = table.get_mut(&path) {
v.push(chunk);
} else {
table.insert(path, vec![chunk]);
}
}
let mut files = Vec::new();
for (path, mut chunks) in table {
chunks.sort_by(|a, b| a.priority.cmp(&b.priority));
let mut content = String::new();
for chunk in chunks {
content.push_str(&chunk.content);
}
files.push(GeneratedFile { path, content });
}
Ok(files)
}
pub fn pathbuf_to_rust_path(path: PathBuf) -> String {
let clean_path = path.clean();
let components = clean_path.components();
@@ -62,10 +26,49 @@ pub fn pathbuf_to_rust_path(path: PathBuf) -> String {
}
path
}
#[derive(Debug, Clone)]
pub enum InsertPoint {
Start,
End,
}
#[derive(Debug, Clone)]
pub struct FileContent {
pub content: String,
}
#[derive(Debug, Clone)]
pub struct FileManager {
pub files: HashMap<PathBuf, FileContent>,
}
impl FileManager {
pub fn new() -> Self {
Self {
files: HashMap::new(),
}
}
pub fn insert_file(
&mut self,
file: PathBuf,
content: String,
insert_point: Option<InsertPoint>,
) -> Result<()> {
if let Some(file) = self.files.get_mut(&file) {
match insert_point {
Some(InsertPoint::Start) => file.content.insert_str(0, &content),
Some(InsertPoint::End) => file.content.push_str(&content),
None => file.content.push_str(&content),
}
} else {
self.files.insert(file.clone(), FileContent { content });
}
Ok(())
}
}
#[cfg(test)]
mod test {
use crate::generator::file::combine_chunks;
use crate::generator::file::{pathbuf_to_rust_path, GeneratedFileChunk};
use crate::generator::file::pathbuf_to_rust_path;
use std::path::PathBuf;
#[test]
fn test_pathbuf_to_rust_path() {
@@ -87,36 +90,4 @@ mod test {
let rust_path = pathbuf_to_rust_path(path);
assert_eq!(rust_path, "");
}
#[test]
fn test_combine_chunks() {
let chunks = vec![
GeneratedFileChunk {
path: PathBuf::from("test.rs"),
content: "test".to_string(),
priority: 1,
},
GeneratedFileChunk {
path: PathBuf::from("test.rs"),
content: "test".to_string(),
priority: 1,
},
GeneratedFileChunk {
path: PathBuf::from("test.rs"),
content: "testpre".to_string(),
priority: -1,
},
GeneratedFileChunk {
path: PathBuf::from("test2.rs"),
content: "test".to_string(),
priority: 1,
},
];
let mut files = combine_chunks(chunks).unwrap();
assert_eq!(files.len(), 2);
files.sort_by(|a, b| a.path.cmp(&b.path));
assert_eq!(files[0].path, PathBuf::from("test.rs"));
assert_eq!(files[0].content, "testpretesttest");
assert_eq!(files[1].path, PathBuf::from("test2.rs"));
assert_eq!(files[1].content, "test");
}
}

View File

@@ -1,4 +1,3 @@
use file::GeneratedFileChunk;
pub mod discover;
pub mod file;
pub mod modules;
@@ -8,12 +7,7 @@ use toml_edit::DocumentMut;
#[derive(Clone, Debug)]
pub struct DatabaseUrl(String);
pub async fn generate(
database_url: &str,
root_config: DocumentMut,
) -> Result<Vec<GeneratedFileChunk>> {
let mut files = Vec::new();
pub async fn generate(database_url: &str, root_config: DocumentMut) -> Result<()> {
let mut module_manager = modules::ModuleManager::new(root_config);
module_manager.init()?;
let ctx = module_manager.get_context_mut();
@@ -32,5 +26,5 @@ pub async fn generate(
//
// let model_outputs = modules::models::generate_models(database_url, config, handlebars).await?;
// files.extend(model_outputs);
Ok(files)
Ok(())
}

View File

@@ -6,6 +6,7 @@ use crate::generator::DatabaseUrl;
use super::{Module, ModulesContext};
use color_eyre::Result;
use db::DbType;
use sea_schema::sea_query::TableCreateStatement;
use serde::Deserialize;
use serde_inline_default::serde_inline_default;
use table::Table;
@@ -93,6 +94,10 @@ pub struct DiscoveredSchema {
pub database_type: DbType,
}
#[derive(Debug, Clone)]
pub struct RawDiscoveredStatements {
pub statements: Vec<TableCreateStatement>,
}
#[derive(Debug)]
pub struct DiscoveryModule;
@@ -121,6 +126,7 @@ impl Module for DiscoveryModule {
let (stmts, db_type) = db::get_tables(url.0, config).await?;
let tables = stmts
.clone()
.into_iter()
.map(Table::new)
.collect::<Result<Vec<Table>>>()?;
@@ -129,7 +135,9 @@ impl Module for DiscoveryModule {
tables,
database_type: db_type,
};
let raw_discovered = RawDiscoveredStatements { statements: stmts };
ctx.get_anymap_mut().insert(discovered);
ctx.get_anymap_mut().insert(raw_discovered);
// db::generate(ctx).await?;
}
Ok(())

View File

@@ -0,0 +1,191 @@
use serde::{Deserialize, Deserializer, Serialize};
use serde_yaml::Value;
use sea_orm_codegen::{DateTimeCrate as CodegenDateTimeCrate, WithPrelude, WithSerde};
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum EntityFormat {
Expanded,
Compact,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
#[serde(untagged)]
pub enum TableConfig {
Specific { specific: Vec<String> },
Exclude { exclude: Vec<String> },
}
#[derive(Debug, Clone)]
pub enum SerdeEnable {
Both,
Serialize,
Deserialize,
None,
}
#[derive(Debug, Clone)]
pub enum Prelude {
Enabled,
Disabled,
AllowUnusedImports,
}
impl<'de> Deserialize<'de> for SerdeEnable {
fn deserialize<D>(deserializer: D) -> Result<SerdeEnable, D::Error>
where
D: Deserializer<'de>,
{
let value = Value::deserialize(deserializer)?;
match value {
Value::String(s) if s == "serialize" => Ok(SerdeEnable::Serialize),
Value::String(s) if s == "deserialize" => Ok(SerdeEnable::Deserialize),
Value::Bool(true) => Ok(SerdeEnable::Both),
Value::Bool(false) => Ok(SerdeEnable::None),
_ => Err(serde::de::Error::custom(
"expected 'serialize', 'deserialize', 'true' or 'false'",
)),
}
}
}
impl Serialize for SerdeEnable {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
match self {
SerdeEnable::Both => serializer.serialize_bool(true),
SerdeEnable::Serialize => serializer.serialize_str("serialize"),
SerdeEnable::Deserialize => serializer.serialize_str("deserialize"),
SerdeEnable::None => serializer.serialize_bool(false),
}
}
}
impl<'de> Deserialize<'de> for Prelude {
fn deserialize<D>(deserializer: D) -> Result<Prelude, D::Error>
where
D: Deserializer<'de>,
{
let value = Value::deserialize(deserializer)?;
match value {
Value::Bool(true) => Ok(Prelude::Enabled),
Value::Bool(false) => Ok(Prelude::Disabled),
Value::String(s) if s == "allow_unused_imports" => Ok(Prelude::AllowUnusedImports),
_ => Err(serde::de::Error::custom(
"expected 'true', 'false', or 'allow_unused_imports'",
)),
}
}
}
impl Serialize for Prelude {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
match self {
Prelude::Enabled => serializer.serialize_bool(true),
Prelude::Disabled => serializer.serialize_bool(false),
Prelude::AllowUnusedImports => serializer.serialize_str("allow_unused_imports"),
}
}
}
#[derive(Deserialize, Serialize, Debug, Clone)]
#[serde(default)]
pub struct SeaOrmSerdeConfig {
pub enable: SerdeEnable,
pub skip_deserializing_primary_key: bool,
pub skip_hidden_column: bool,
}
impl Default for SeaOrmSerdeConfig {
fn default() -> Self {
Self {
enable: SerdeEnable::None,
skip_deserializing_primary_key: false,
skip_hidden_column: false,
}
}
}
#[derive(Deserialize, Serialize, Debug, Clone)]
#[serde(default)]
pub struct SeaOrmEntityConfig {
pub format: EntityFormat,
pub extra_derives: SeaOrmExtraDerivesConfig,
pub extra_attributes: SeaOrmExtraAttributesConfig,
pub date_time_crate: DateTimeCrate,
pub with_copy_enums: bool,
}
impl Default for SeaOrmEntityConfig {
fn default() -> Self {
Self {
format: EntityFormat::Compact,
extra_derives: SeaOrmExtraDerivesConfig::default(),
extra_attributes: SeaOrmExtraAttributesConfig::default(),
date_time_crate: DateTimeCrate::Chrono,
with_copy_enums: false,
}
}
}
#[derive(Deserialize, Serialize, Debug, Clone, Default)]
#[serde(default)]
pub struct SeaOrmExtraDerivesConfig {
pub model: Vec<String>,
#[serde(rename = "enum")]
pub eenum: Vec<String>,
}
#[derive(Deserialize, Serialize, Debug, Clone, Default)]
#[serde(default)]
pub struct SeaOrmExtraAttributesConfig {
pub model: Vec<String>,
#[serde(rename = "enum")]
pub eenum: Vec<String>,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
#[serde(rename_all = "lowercase")]
pub enum DateTimeCrate {
Time,
Chrono,
}
impl From<DateTimeCrate> for CodegenDateTimeCrate {
fn from(date_time_crate: DateTimeCrate) -> CodegenDateTimeCrate {
match date_time_crate {
DateTimeCrate::Chrono => CodegenDateTimeCrate::Chrono,
DateTimeCrate::Time => CodegenDateTimeCrate::Time,
}
}
}
impl EntityFormat {
pub fn is_expanded(&self) -> bool {
matches!(self, EntityFormat::Expanded)
}
}
impl From<Prelude> for WithPrelude {
fn from(val: Prelude) -> Self {
match val {
Prelude::Enabled => WithPrelude::All,
Prelude::Disabled => WithPrelude::None,
Prelude::AllowUnusedImports => WithPrelude::AllAllowUnusedImports,
}
}
}
impl From<SerdeEnable> for WithSerde {
fn from(val: SerdeEnable) -> Self {
match val {
SerdeEnable::Both => WithSerde::Both,
SerdeEnable::Serialize => WithSerde::Serialize,
SerdeEnable::Deserialize => WithSerde::Deserialize,
SerdeEnable::None => WithSerde::None,
}
}
}

View File

@@ -1,22 +1,39 @@
use std::path::PathBuf;
use crate::generator::DatabaseUrl;
pub mod config;
use super::{discovery::DiscoveryConfig, Module, ModulesContext};
use super::{
discovery::{DiscoveryConfig, RawDiscoveredStatements},
Module, ModulesContext,
};
use color_eyre::{eyre::eyre, Result};
use config::{Prelude, SeaOrmEntityConfig, SeaOrmSerdeConfig};
use sea_orm_codegen::{EntityTransformer, EntityWriterContext};
use serde::Deserialize;
use serde_inline_default::serde_inline_default;
#[serde_inline_default]
#[derive(Debug, Clone, Deserialize)]
#[serde(default)]
pub struct SeaOrmConfig {
#[serde_inline_default(false)]
pub enable: bool,
#[serde_inline_default(None)]
pub database_schema: Option<String>,
#[serde_inline_default(10)]
pub max_connections: u32,
#[serde_inline_default(30)]
pub acquire_timeout: u32,
pub path: Option<PathBuf>,
pub prelude: Prelude,
pub serde: SeaOrmSerdeConfig,
pub entity: SeaOrmEntityConfig,
}
impl Default for SeaOrmConfig {
fn default() -> Self {
Self {
enable: false,
path: None,
prelude: Prelude::Disabled,
serde: SeaOrmSerdeConfig::default(),
entity: SeaOrmEntityConfig::default(),
}
}
}
#[derive(Debug)]
pub struct SeaOrmModule;
@@ -37,13 +54,52 @@ impl Module for SeaOrmModule {
if config_sea_orm.enable && !config_discovery_config.enable {
return Err(eyre!("\"modules.discovery.enable\" must be enabled to use \"modules.sea_orm.enable\""));
}
if config_sea_orm.enable && config_sea_orm.path.is_none() {
return Err(eyre!(
"\"modules.sea_orm.path\" must be set to use \"modules.sea_orm.enable\""
));
}
Ok(config_sea_orm.enable && config_discovery_config.enable)
} else {
// One or both keys are missing
Ok(false)
}
}
async fn execute(&mut self, ctx: &mut ModulesContext) -> Result<()> {
let map = ctx.get_anymap();
if let (Some(statements), Some(config), Some(discovery_config)) = (
map.get::<RawDiscoveredStatements>(),
map.get::<SeaOrmConfig>(),
map.get::<DiscoveryConfig>(),
) {
let writer_context = EntityWriterContext::new(
config.entity.format.is_expanded(),
config.prelude.clone().into(),
config.serde.enable.clone().into(),
false,
config.entity.date_time_crate.clone().into(),
discovery_config.database_schema.clone(),
false,
config.serde.skip_deserializing_primary_key,
config.serde.skip_hidden_column,
config.entity.extra_derives.model.clone(),
config.entity.extra_attributes.model.clone(),
config.entity.extra_derives.eenum.clone(),
config.entity.extra_attributes.eenum.clone(),
false,
true,
);
let output = EntityTransformer::transform(statements.statements.clone())?
.generate(&writer_context);
for file in output.files {
let file_path = config.path.clone().unwrap_or_default();
let file_path = file_path.join(file.name);
tracing::info!(?file_path, "Generating file");
// let mut file_generator = crate::generator::file::FileGenerator::new(file_path);
// file_generator.write(file.content)?;
}
}
Ok(())
}
}

2653
tests/migration/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,22 @@
[package]
name = "migration"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
name = "migration"
path = "src/lib.rs"
[dependencies]
async-std = { version = "1", features = ["attributes", "tokio1"] }
[dependencies.sea-orm-migration]
version = "1.1.0"
features = [
# Enable at least one `ASYNC_RUNTIME` and `DATABASE_DRIVER` feature if you want to run migration via CLI.
# View the list of supported features at https://www.sea-ql.org/SeaORM/docs/install-and-config/database-and-async-runtime.
# e.g.
"runtime-tokio-rustls", # `ASYNC_RUNTIME` feature
"sqlx-postgres", # `DATABASE_DRIVER` feature
]

41
tests/migration/README.md Normal file
View File

@@ -0,0 +1,41 @@
# Running Migrator CLI
- Generate a new migration file
```sh
cargo run -- generate MIGRATION_NAME
```
- Apply all pending migrations
```sh
cargo run
```
```sh
cargo run -- up
```
- Apply first 10 pending migrations
```sh
cargo run -- up -n 10
```
- Rollback last applied migrations
```sh
cargo run -- down
```
- Rollback last 10 applied migrations
```sh
cargo run -- down -n 10
```
- Drop all tables from the database, then reapply all migrations
```sh
cargo run -- fresh
```
- Rollback all applied migrations, then reapply all migrations
```sh
cargo run -- refresh
```
- Rollback all applied migrations
```sh
cargo run -- reset
```
- Check the status of all migrations
```sh
cargo run -- status
```

View File

@@ -0,0 +1,12 @@
pub use sea_orm_migration::prelude::*;
mod m20250318_045009_create_user;
pub struct Migrator;
#[async_trait::async_trait]
impl MigratorTrait for Migrator {
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
vec![Box::new(m20250318_045009_create_user::Migration)]
}
}

View File

@@ -0,0 +1,68 @@
use sea_orm_migration::{prelude::*, schema::*};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
// Replace the sample below with your own migration scripts
manager
.create_table(
Table::create()
.table(User::Table)
.if_not_exists()
.col(pk_auto(User::Id))
.col(string(User::Username).unique_key().not_null())
.col(string(User::Email).unique_key().not_null())
.col(string(User::Password).not_null())
.col(string(User::Test))
.to_owned(),
)
.await?;
manager
.create_index(
Index::create()
.table(User::Table)
.name("idx_test")
.col(User::Test)
.unique()
.to_owned(),
)
.await
// manager
// .create_index(
// Index::create()
// .table(User::Table)
// .name("idx_id")
// .col(User::Id)
// .to_owned(),
// )
// .await
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
// Replace the sample below with your own migration scripts
manager
.drop_table(Table::drop().table(User::Table).to_owned())
.await?;
manager
.drop_index(Index::drop().table(User::Table).name("idx_test").to_owned())
.await
// manager
// .drop_index(Index::drop().table(User::Table).name("idx_id").to_owned())
// .await
}
}
#[derive(DeriveIden)]
enum User {
Table,
Id,
Username,
Email,
Password,
Test,
}

View File

@@ -0,0 +1,6 @@
use sea_orm_migration::prelude::*;
#[async_std::main]
async fn main() {
cli::run_cli(migration::Migrator).await;
}