major rework

This commit is contained in:
2025-04-07 19:58:52 +04:00
parent 1f8f45af8e
commit a5fa9ca080
20 changed files with 3788 additions and 576 deletions

7
.cargo/audit.toml Normal file
View File

@@ -0,0 +1,7 @@
[advisories]
# can't be resolved
ignore = ["RUSTSEC-2023-0071"]
[yanked]
enabled = false # Warn for yanked crates in Cargo.lock (default: true)
update_index = false # Auto-update the crates.io index (default: true)

1
.envrc Normal file
View File

@@ -0,0 +1 @@
use flake

2
.gitignore vendored
View File

@@ -1 +1,3 @@
/target
/.compose
/.direnv

2895
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,11 @@
[package]
name = "generator"
name = "sea-orm-generator"
version = "0.1.0"
edition = "2021"
[dependencies]
anymap = "0.12.1"
async-trait = "0.1.88"
clap = { version = "4.5.32", features = ["derive", "env"] }
color-eyre = "0.6.3"
comfy-table = { version = "7.1.4", default-features = false }
@@ -14,15 +16,18 @@ heck = "0.5.0"
include_dir = "0.7.4"
indicatif = "0.17.11"
inquire = "0.7.5"
path-clean = "1.0.1"
quote = "1.0.40"
sea-orm-codegen = "1.1.8"
sea-schema = { version = "0.16.1", features = ["sqlx-all"] }
serde = { version = "1.0.219", features = ["derive"] }
serde-inline-default = "0.2.3"
serde_json = "1.0.140"
serde_yaml = "0.9.34"
sqlx = { version = "0.8.3", features = ["mysql", "postgres", "sqlite"] }
syn = { version = "2.0.100", features = ["extra-traits", "full"] }
tokio = { version = "1.44.1", features = ["full"] }
toml_edit = { version = "0.22.24", features = ["serde"] }
tracing = "0.1.41"
tracing-subscriber = "0.3.19"
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
url = "2.5.4"

132
flake.lock generated Normal file
View File

@@ -0,0 +1,132 @@
{
"nodes": {
"advisory-db": {
"flake": false,
"locked": {
"lastModified": 1744011916,
"narHash": "sha256-vZIug2BsukcfdNIH8Kto6iUGJM4PgaE8sPIKZDy8MT0=",
"owner": "rustsec",
"repo": "advisory-db",
"rev": "b3d5d51745076cac459a298838d6bec9f4b052f3",
"type": "github"
},
"original": {
"owner": "rustsec",
"repo": "advisory-db",
"type": "github"
}
},
"crane": {
"locked": {
"lastModified": 1743908961,
"narHash": "sha256-e1idZdpnnHWuosI3KsBgAgrhMR05T2oqskXCmNzGPq0=",
"owner": "ipetkov",
"repo": "crane",
"rev": "80ceeec0dc94ef967c371dcdc56adb280328f591",
"type": "github"
},
"original": {
"owner": "ipetkov",
"repo": "crane",
"type": "github"
}
},
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1743827369,
"narHash": "sha256-rpqepOZ8Eo1zg+KJeWoq1HAOgoMCDloqv5r2EAa9TSA=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "42a1c966be226125b48c384171c44c651c236c22",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"process-compose-wrapper": {
"locked": {
"lastModified": 1743615288,
"narHash": "sha256-X7beZH4dxMDkFqOJRB5daCkTMdgo90nZ62VwbFol55M=",
"ref": "dev",
"rev": "c0f01d143e56669d4ea174ddc882d8c6ca85f55c",
"revCount": 85,
"type": "git",
"url": "https://gitlab.scug.io/Nikkuss/process-compose-wrapper.git"
},
"original": {
"ref": "dev",
"type": "git",
"url": "https://gitlab.scug.io/Nikkuss/process-compose-wrapper.git"
}
},
"root": {
"inputs": {
"advisory-db": "advisory-db",
"crane": "crane",
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs",
"process-compose-wrapper": "process-compose-wrapper",
"rust-overlay": "rust-overlay"
}
},
"rust-overlay": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1743993291,
"narHash": "sha256-u8GHvduU1gCtoFXvTS/wGjH1ouv5S/GRGq6MAT+sG/k=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "0cb3c8979c65dc6a5812dfe67499a8c7b8b4325b",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

159
flake.nix Normal file
View File

@@ -0,0 +1,159 @@
{
description = "A very basic flake";
inputs = {
nixpkgs.url = "github:nixos/nixpkgs?ref=nixos-unstable";
crane.url = "github:ipetkov/crane";
flake-utils.url = "github:numtide/flake-utils";
process-compose-wrapper = {
url = "git+https://gitlab.scug.io/Nikkuss/process-compose-wrapper.git?ref=dev";
};
rust-overlay = {
url = "github:oxalica/rust-overlay";
inputs.nixpkgs.follows = "nixpkgs";
};
advisory-db = {
url = "github:rustsec/advisory-db";
flake = false;
};
};
outputs =
{
self,
nixpkgs,
crane,
flake-utils,
process-compose-wrapper,
rust-overlay,
advisory-db,
...
}:
flake-utils.lib.eachDefaultSystem (
system:
let
pkgs = import nixpkgs {
inherit system;
overlays = [ (import rust-overlay) ];
};
process-compose = process-compose-wrapper.lib.mkLib pkgs;
inherit (pkgs) lib;
craneLib = (crane.mkLib pkgs).overrideToolchain (
p:
p.rust-bin.stable.latest.default.override {
extensions = [ "llvm-tools-preview" ];
}
);
unfilteredRoot = ./.; # The original, unfiltered source
src = lib.fileset.toSource {
root = unfilteredRoot;
fileset = lib.fileset.unions [
# Default files from crane (Rust and cargo files)
(craneLib.fileset.commonCargoSources unfilteredRoot)
./.cargo/audit.toml
./templates
];
};
commonArgs = {
inherit src;
strictDeps = true;
doCheck = false;
};
cargoArtifacts = craneLib.buildDepsOnly commonArgs;
generator = craneLib.buildPackage (
commonArgs
// {
inherit cargoArtifacts;
}
);
in
rec {
checks = {
# inherit generator;
clippy = craneLib.cargoClippy (
commonArgs
// {
inherit cargoArtifacts;
cargoClippyExtraArgs = "--all-targets -- --deny warnings";
}
);
fmt = craneLib.cargoFmt {
inherit src;
};
audit = craneLib.cargoAudit {
inherit src advisory-db;
};
nextest = craneLib.cargoNextest (
commonArgs
// {
inherit cargoArtifacts;
partitions = 1;
partitionType = "count";
cargoNextestPartitionsExtraArgs = "--no-tests=pass";
}
);
crateCoverage = craneLib.cargoTarpaulin (
commonArgs
// {
inherit cargoArtifacts;
}
);
};
packages = {
process-compose = process-compose.mkWrapper {
name = "process-compose";
config = (import ./process-compose.nix { inherit pkgs; });
# enableTui = true;
# modules = [
# (process-compose.mkPostgres {
# name = "postgres";
# initialDatabases = [
# {
# name = "db";
# user = "root";
# password = "root";
# }
# ];
# })
# (process-compose.mkRedis {
# name = "redis";
# })
# ];
};
llvm-coverage = craneLib.cargoLlvmCov (
commonArgs
// {
inherit cargoArtifacts;
}
);
inherit generator;
};
devShells.default = craneLib.devShell {
checks = self.checks.${system};
DATABASE_URL = "postgres://root:root@localhost/db";
REDIS_HOST = "localhost:6379";
MIGRATION_DIR = "crates/server/migration";
RUSTFLAGS = "-Clinker=clang -Clink-arg=-fuse-ld=mold";
packages = with pkgs; [
sea-orm-cli
cargo-watch
llvmPackages.clang
llvmPackages.bintools
mold
cargo-nextest
cargo-llvm-cov
cargo-audit
cargo-tarpaulin
];
};
}
);
}

6
generator.toml Normal file
View File

@@ -0,0 +1,6 @@
# This file is used to configure the SeaORM generator.
[modules.discovery]
enable = true
[modules.sea_orm]
enable = true

6
generator.yml Normal file
View File

@@ -0,0 +1,6 @@
modules:
discovery:
enable: true
database_schema: "private"
max_connections: 10
acquire_timeout: 5

10
process-compose.nix Normal file
View File

@@ -0,0 +1,10 @@
{ pkgs, ... }:
{
processes = {
frontend = {
command = ''
RUST_LOG=debug ${pkgs.cargo-watch}/bin/cargo-watch -x 'run'
'';
};
};
}

View File

@@ -1,341 +0,0 @@
use std::path::PathBuf;
use color_eyre::Report;
use sea_orm_codegen::{
DateTimeCrate as CodegenDateTimeCrate, EntityWriterContext, WithPrelude, WithSerde,
};
use serde::{Deserialize, Deserializer, Serialize};
use serde_yaml::{Mapping, Value};
use tracing::instrument;
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum EntityFormat {
Expanded,
Compact,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
#[serde(untagged)]
pub enum TableConfig {
Specific { specific: Vec<String> },
Exclude { exclude: Vec<String> },
}
#[derive(Debug, Clone)]
pub enum SerdeEnable {
Both,
Serialize,
Deserialize,
None,
}
#[derive(Debug, Clone)]
pub enum Prelude {
Enabled,
Disabled,
AllowUnusedImports,
}
impl<'de> Deserialize<'de> for SerdeEnable {
fn deserialize<D>(deserializer: D) -> Result<SerdeEnable, D::Error>
where
D: Deserializer<'de>,
{
let value = Value::deserialize(deserializer)?;
match value {
Value::String(s) if s == "serialize" => Ok(SerdeEnable::Serialize),
Value::String(s) if s == "deserialize" => Ok(SerdeEnable::Deserialize),
Value::Bool(true) => Ok(SerdeEnable::Both),
Value::Bool(false) => Ok(SerdeEnable::None),
_ => Err(serde::de::Error::custom(
"expected 'serialize', 'deserialize', 'true' or 'false'",
)),
}
}
}
impl Serialize for SerdeEnable {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
match self {
SerdeEnable::Both => serializer.serialize_bool(true),
SerdeEnable::Serialize => serializer.serialize_str("serialize"),
SerdeEnable::Deserialize => serializer.serialize_str("deserialize"),
SerdeEnable::None => serializer.serialize_bool(false),
}
}
}
impl<'de> Deserialize<'de> for Prelude {
fn deserialize<D>(deserializer: D) -> Result<Prelude, D::Error>
where
D: Deserializer<'de>,
{
let value = Value::deserialize(deserializer)?;
match value {
Value::Bool(true) => Ok(Prelude::Enabled),
Value::Bool(false) => Ok(Prelude::Disabled),
Value::String(s) if s == "allow_unused_imports" => Ok(Prelude::AllowUnusedImports),
_ => Err(serde::de::Error::custom(
"expected 'true', 'false', or 'allow_unused_imports'",
)),
}
}
}
impl Serialize for Prelude {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
match self {
Prelude::Enabled => serializer.serialize_bool(true),
Prelude::Disabled => serializer.serialize_bool(false),
Prelude::AllowUnusedImports => serializer.serialize_str("allow_unused_imports"),
}
}
}
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct Config {
pub db: DbConfig,
pub sea_orm: SeaOrmConfig,
pub output: OutputConfig,
pub templates: Option<Mapping>,
pub templates_dir: Option<PathBuf>,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct OutputConfig {
pub path: PathBuf,
pub models: OutputModelConfig,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct OutputCommentConfig {
pub enable: bool,
pub max_width: Option<u16>,
pub table_name: bool,
pub column_info: bool,
pub column_name: bool,
pub column_db_type: bool,
pub column_rust_type: bool,
pub column_attributes: bool,
pub column_exclude_attributes: Vec<String>,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct OutputModelConfig {
pub prelude: bool,
pub path: PathBuf,
pub comment: OutputCommentConfig,
pub entities: String,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct DbConfig {
pub database_schema: Option<String>,
pub max_connections: u32,
pub acquire_timeout: u64,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct SeaOrmConfig {
pub prelude: Prelude,
pub serde: SeaOrmSerdeConfig,
pub entity: SeaOrmEntityConfig,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct SeaOrmSerdeConfig {
pub enable: SerdeEnable,
pub skip_deserializing_primary_key: bool,
pub skip_hidden_column: bool,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct SeaOrmEntityConfig {
pub format: EntityFormat,
pub tables: SeaOrmTableConfig,
pub extra_derives: SeaOrmExtraDerivesConfig,
pub extra_attributes: SeaOrmExtraAttributesConfig,
pub date_time_crate: DateTimeCrate,
pub with_copy_enums: bool,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct SeaOrmTableConfig {
pub include_hidden: bool,
pub skip_seaql_migrations: bool,
#[serde(flatten)]
pub table_config: Option<TableConfig>,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct SeaOrmExtraDerivesConfig {
pub model: Vec<String>,
#[serde(rename = "enum")]
pub eenum: Vec<String>,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct SeaOrmExtraAttributesConfig {
pub model: Vec<String>,
#[serde(rename = "enum")]
pub eenum: Vec<String>,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
#[serde(rename_all = "lowercase")]
pub enum DateTimeCrate {
Time,
Chrono,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct TemplateConfig {
pub model: Option<String>,
}
impl From<DateTimeCrate> for CodegenDateTimeCrate {
fn from(date_time_crate: DateTimeCrate) -> CodegenDateTimeCrate {
match date_time_crate {
DateTimeCrate::Chrono => CodegenDateTimeCrate::Chrono,
DateTimeCrate::Time => CodegenDateTimeCrate::Time,
}
}
}
impl SeaOrmTableConfig {
pub fn get_filter(&self) -> Box<dyn Fn(&String) -> bool> {
let include_hidden = self.include_hidden;
if let Some(table) = &self.table_config {
match table {
TableConfig::Specific { specific } => {
let specific = specific.clone();
Box::new(move |table: &String| {
(include_hidden || !table.starts_with('_')) && specific.contains(table)
})
}
TableConfig::Exclude { exclude } => {
let exclude = exclude.clone();
Box::new(move |table: &String| {
(include_hidden || !table.starts_with('_')) && !exclude.contains(table)
})
}
}
} else if self.skip_seaql_migrations {
Box::new(move |table: &String| {
(include_hidden || !table.starts_with('_'))
&& !table.starts_with("seaql_migrations")
})
} else {
Box::new(move |table: &String| (include_hidden || !table.starts_with('_')))
}
}
}
impl Default for Config {
fn default() -> Self {
Self {
db: DbConfig {
database_schema: None,
max_connections: 10,
acquire_timeout: 5,
},
sea_orm: SeaOrmConfig {
prelude: Prelude::Enabled,
serde: SeaOrmSerdeConfig {
enable: SerdeEnable::None,
skip_deserializing_primary_key: false,
skip_hidden_column: false,
},
entity: SeaOrmEntityConfig {
format: EntityFormat::Compact,
tables: SeaOrmTableConfig {
include_hidden: false,
skip_seaql_migrations: true,
table_config: None,
},
extra_derives: SeaOrmExtraDerivesConfig {
model: Vec::new(),
eenum: Vec::new(),
},
extra_attributes: SeaOrmExtraAttributesConfig {
model: Vec::new(),
eenum: Vec::new(),
},
date_time_crate: DateTimeCrate::Chrono,
with_copy_enums: false,
},
},
output: OutputConfig {
path: PathBuf::from("./src/"),
models: OutputModelConfig {
comment: OutputCommentConfig {
max_width: None,
table_name: true,
column_name: true,
column_db_type: true,
column_rust_type: true,
column_attributes: true,
column_exclude_attributes: Vec::new(),
enable: true,
column_info: true,
},
entities: String::from("_entities"),
prelude: true,
path: PathBuf::from("./models"),
},
},
templates: None,
templates_dir: None,
}
}
}
impl EntityFormat {
pub fn is_expanded(&self) -> bool {
matches!(self, EntityFormat::Expanded)
}
}
impl From<Prelude> for WithPrelude {
fn from(val: Prelude) -> Self {
match val {
Prelude::Enabled => WithPrelude::All,
Prelude::Disabled => WithPrelude::None,
Prelude::AllowUnusedImports => WithPrelude::AllAllowUnusedImports,
}
}
}
impl From<SerdeEnable> for WithSerde {
fn from(val: SerdeEnable) -> Self {
match val {
SerdeEnable::Both => WithSerde::Both,
SerdeEnable::Serialize => WithSerde::Serialize,
SerdeEnable::Deserialize => WithSerde::Deserialize,
SerdeEnable::None => WithSerde::None,
}
}
}
impl From<Config> for EntityWriterContext {
fn from(val: Config) -> Self {
EntityWriterContext::new(
val.sea_orm.entity.format.is_expanded(),
val.sea_orm.prelude.into(),
val.sea_orm.serde.enable.into(),
val.sea_orm.entity.with_copy_enums,
val.sea_orm.entity.date_time_crate.into(),
val.db.database_schema,
false,
val.sea_orm.serde.skip_deserializing_primary_key,
val.sea_orm.serde.skip_hidden_column,
val.sea_orm.entity.extra_derives.model,
val.sea_orm.entity.extra_attributes.model,
val.sea_orm.entity.extra_derives.eenum,
val.sea_orm.entity.extra_attributes.eenum,
false,
false,
)
}
}

View File

@@ -1,4 +1,5 @@
use color_eyre::Result;
use path_clean::PathClean;
use std::{collections::HashMap, path::PathBuf};
#[derive(Debug, Clone)]
@@ -37,25 +38,15 @@ pub fn combine_chunks(chunks: Vec<GeneratedFileChunk>) -> Result<Vec<GeneratedFi
Ok(files)
}
pub fn pathbuf_to_rust_path(path: &PathBuf) -> String {
let mut components = path.components();
pub fn pathbuf_to_rust_path(path: PathBuf) -> String {
let clean_path = path.clean();
let components = clean_path.components();
let mut path = String::new();
for component in components {
match component {
std::path::Component::Prefix(_) => {
// Handle Windows-specific prefixes if necessary
}
std::path::Component::RootDir => {
// Ignore root directory component
}
std::path::Component::CurDir => {
// Ignore current directory component
}
std::path::Component::ParentDir => {
if path.is_empty() || path.ends_with("::") {
path.push_str("super::");
} else {
path.push_str("::super::");
}
}
std::path::Component::Normal(name) => {
@@ -64,19 +55,68 @@ pub fn pathbuf_to_rust_path(path: &PathBuf) -> String {
}
path.push_str(name.to_str().unwrap());
}
// ignored
_ => (),
}
}
path
}
#[cfg(test)]
mod test {
use super::*;
use crate::generator::file::combine_chunks;
use crate::generator::file::{pathbuf_to_rust_path, GeneratedFileChunk};
use std::path::PathBuf;
#[test]
fn test_pathbuf_to_rust_path() {
let path = PathBuf::from("src/models/user.rs");
let rust_path = pathbuf_to_rust_path(&path);
assert_eq!(rust_path, "src::models::user.rs");
let path = PathBuf::from("src/models/../user.rs");
let rust_path = pathbuf_to_rust_path(path);
assert_eq!(rust_path, "src::user.rs");
}
#[test]
fn test_pathbuf_to_rust_path_backwards() {
let path = PathBuf::from("../../../test");
let rust_path = pathbuf_to_rust_path(path);
assert_eq!(rust_path, "super::super::super::test");
}
#[test]
fn test_pathbuf_to_rust_path_blank() {
let path = PathBuf::from("/");
let rust_path = pathbuf_to_rust_path(path);
assert_eq!(rust_path, "");
}
#[test]
fn test_combine_chunks() {
let chunks = vec![
GeneratedFileChunk {
path: PathBuf::from("test.rs"),
content: "test".to_string(),
priority: 1,
},
GeneratedFileChunk {
path: PathBuf::from("test.rs"),
content: "test".to_string(),
priority: 1,
},
GeneratedFileChunk {
path: PathBuf::from("test.rs"),
content: "testpre".to_string(),
priority: -1,
},
GeneratedFileChunk {
path: PathBuf::from("test2.rs"),
content: "test".to_string(),
priority: 1,
},
];
let mut files = combine_chunks(chunks).unwrap();
assert_eq!(files.len(), 2);
files.sort_by(|a, b| a.path.cmp(&b.path));
assert_eq!(files[0].path, PathBuf::from("test.rs"));
assert_eq!(files[0].content, "testpretesttest");
assert_eq!(files[1].path, PathBuf::from("test2.rs"));
assert_eq!(files[1].content, "test");
}
}

View File

@@ -1,28 +1,35 @@
use file::GeneratedFileChunk;
use modules::models::table::Table;
pub mod discover;
pub mod file;
pub mod modules;
use crate::config::Config;
use color_eyre::Result;
use handlebars::Handlebars;
use toml_edit::DocumentMut;
#[derive(Clone, Debug)]
pub struct DatabaseUrl(String);
pub async fn generate<'a>(
pub async fn generate(
database_url: &str,
config: &Config,
handlebars: &'a Handlebars<'a>,
root_config: DocumentMut,
) -> Result<Vec<GeneratedFileChunk>> {
let mut files = Vec::new();
let db_filter = config.sea_orm.entity.tables.get_filter();
let (table_stmts, db_type) =
discover::get_tables(database_url.to_owned(), db_filter, &config.db).await?;
let tables = table_stmts
.into_iter()
.map(Table::new)
.collect::<Result<Vec<Table>>>()?;
let mut module_manager = modules::ModuleManager::new(root_config);
module_manager.init()?;
let ctx = module_manager.get_context_mut();
ctx.get_anymap_mut()
.insert(DatabaseUrl(database_url.to_owned()));
module_manager.validate().await?;
let model_outputs = modules::models::generate_models(database_url, config, handlebars).await?;
files.extend(model_outputs);
// let db_filter = config.sea_orm.entity.tables.get_filter();
// let (table_stmts, db_type) =
// discover::get_tables(database_url.to_owned(), db_filter, &config.db).await?;
// let tables = table_stmts
// .into_iter()
// .map(Table::new)
// .collect::<Result<Vec<Table>>>()?;
//
// let model_outputs = modules::models::generate_models(database_url, config, handlebars).await?;
// files.extend(model_outputs);
Ok(files)
}

View File

@@ -0,0 +1,39 @@
use crate::generator::DatabaseUrl;
use super::{Module, ModulesContext};
use color_eyre::Result;
use serde::Deserialize;
use serde_inline_default::serde_inline_default;
#[serde_inline_default]
#[derive(Debug, Clone, Deserialize)]
pub struct DiscoveryConfig {
#[serde_inline_default(false)]
pub enable: bool,
#[serde_inline_default(None)]
pub database_schema: Option<String>,
#[serde_inline_default(10)]
pub max_connections: u32,
#[serde_inline_default(30)]
pub acquire_timeout: u32,
}
#[derive(Debug)]
pub struct DiscoveryModule;
#[async_trait::async_trait]
impl Module for DiscoveryModule {
fn init(&self, ctx: &mut ModulesContext) -> Result<()> {
ctx.get_config_auto::<DiscoveryConfig>("modules.discovery")?;
Ok(())
}
async fn validate(&self, ctx: &mut ModulesContext) -> Result<bool> {
let map = ctx.get_anymap();
if let (Some(config), Some(_)) = (map.get::<DiscoveryConfig>(), map.get::<DatabaseUrl>()) {
Ok(config.enable)
} else {
// One or both keys are missing
Ok(false)
}
}
}

View File

@@ -1,10 +1,137 @@
use models::table::Table;
use std::{
fmt::Debug,
sync::{Arc, MutexGuard},
};
use super::discover::DbType;
use anymap::{
any::{Any, CloneAny},
Map,
};
use color_eyre::{eyre::eyre, Result};
use discovery::DiscoveryModule;
use sea_orm::{SeaOrmConfig, SeaOrmModule};
// use models::ModelsModule;
use serde::{de::IntoDeserializer, Deserialize};
use std::sync::Mutex;
use templates::TemplateModule;
use toml_edit::{de::ValueDeserializer, DocumentMut, Item, Value};
// use models::table::Table;
//
// use super::discover::DbType;
type AnyCloneMap = Map<dyn CloneAny + Send>;
pub mod discovery;
pub mod models;
pub mod sea_orm;
pub mod templates;
pub struct ModulesContext {
pub tables: Vec<Table>,
pub db_type: DbType,
pub anymap: AnyCloneMap,
pub root_config: DocumentMut,
}
impl ModulesContext {
pub fn new(root_config: DocumentMut) -> Self {
Self {
anymap: AnyCloneMap::new(),
root_config,
}
}
pub fn get_config_raw(&self, path: &str) -> Result<&Item> {
let mut item: Option<&Item> = None;
let path = path.split('.').collect::<Vec<_>>();
for i in path {
if let Some(item) = &mut item {
if let Some(v) = item.get(i) {
*item = v;
} else {
return Err(eyre!("Config not found"));
}
} else if let Some(v) = self.root_config.get(i) {
item = Some(v);
} else {
return Err(eyre!("Config not found"));
}
}
if let Some(v) = item {
Ok(v)
} else {
Err(eyre!("Config not found"))
}
}
pub fn get_config<'a, V: Deserialize<'a>>(&self, path: &str) -> Result<V> {
let item = self.get_config_raw(path)?;
let value = item
.clone()
.into_value()
.map_err(|_| eyre!("Config not found"))?;
let deserializer = value.into_deserializer();
let config = V::deserialize(deserializer)?;
Ok(config)
}
pub fn get_config_auto<'a, V: Deserialize<'a> + Clone + Send + 'static>(
&mut self,
path: &str,
) -> Result<()> {
let value: V = self.get_config::<V>(path)?;
self.get_anymap_mut().insert(value);
Ok(())
}
// pub fn get_anymap(&self) -> MutexGuard<Map> {
// let v = self.anymap.lock().unwrap();
// v
// }
pub fn get_anymap(&self) -> &AnyCloneMap {
&self.anymap
}
pub fn get_anymap_mut(&mut self) -> &mut AnyCloneMap {
&mut self.anymap
}
}
#[async_trait::async_trait]
pub trait Module: Debug {
fn init(&self, ctx: &mut ModulesContext) -> Result<()>;
async fn validate(&self, ctx: &mut ModulesContext) -> Result<bool>;
}
pub struct ModuleManager {
ctx: ModulesContext,
modules: Vec<Box<dyn Module>>,
}
impl ModuleManager {
pub fn new(root_config: DocumentMut) -> Self {
let modules: Vec<Box<dyn Module>> = vec![
Box::new(TemplateModule),
Box::new(DiscoveryModule),
Box::new(SeaOrmModule), //Box::new(ModelsModule)
];
Self {
modules,
ctx: ModulesContext::new(root_config),
}
}
pub fn get_context_mut(&mut self) -> &mut ModulesContext {
&mut self.ctx
}
pub fn init(&mut self) -> Result<()> {
for module in &self.modules {
module.init(&mut self.ctx)?;
}
Ok(())
}
pub async fn validate(&mut self) -> Result<()> {
let mut index_wr = 0usize;
for index in 0..self.modules.len() {
let module = &self.modules[index];
let enabled = module.validate(&mut self.ctx).await?;
tracing::info!(?module, ?enabled);
if !enabled {
self.modules.swap(index_wr, index);
index_wr += 1;
}
}
self.modules.truncate(index_wr);
Ok(())
}
}

View File

@@ -3,9 +3,8 @@ use comfy_table::Cell;
use heck::ToUpperCamelCase;
use sea_schema::sea_query::{ColumnDef, ColumnSpec, ColumnType, IndexCreateStatement};
use crate::config::sea_orm_config::DateTimeCrate;
use super::{discover::DbType, ModelConfig};
use crate::config::sea_orm_config::DateTimeCrate;
#[derive(Clone, Debug)]
pub struct Column {
pub name: String,
@@ -74,6 +73,7 @@ impl Column {
ColumnSpec::Generated { .. } => unimplemented!(),
ColumnSpec::Extra(_) => unimplemented!(),
ColumnSpec::Comment(_) => unimplemented!(),
ColumnSpec::Using(_) => unimplemented!(),
}
}
pub fn get_db_type(&self, db_type: &DbType) -> String {

View File

@@ -1,163 +1,189 @@
use crate::{
config::{sea_orm_config::DateTimeCrate, Config},
generator::{
discover::{self, DbType},
file::GeneratedFileChunk,
},
};
use super::{Module, ModulesContext};
use color_eyre::Result;
use file::FileGenerator;
use handlebars::Handlebars;
use sea_orm_codegen::{EntityTransformer, EntityWriterContext, OutputFile};
use sea_schema::sea_query::TableCreateStatement;
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use table::Table;
pub mod column;
pub mod comment;
pub mod file;
pub mod table;
#[derive(Debug, Clone)]
pub struct ModelConfig {
pub models_path: PathBuf,
pub prelude: bool,
pub entities_path: PathBuf,
pub relative_entities_path: String,
use serde::Deserialize;
#[derive(Debug, Clone, Deserialize)]
pub struct ModelsConfig {
pub enable: bool,
pub comment: CommentConfig,
pub db_type: DbType,
}
#[derive(Debug, Clone)]
pub struct CommentConfig {
pub max_width: Option<u16>,
pub enable: bool,
pub table_name: bool,
pub column_info: bool,
pub column_name: bool,
pub column_rust_type: bool,
pub column_db_type: bool,
pub column_attributes: bool,
pub ignore_errors: bool,
pub date_time_crate: DateTimeCrate,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CommentConfigSerde {
#[serde(skip_serializing_if = "Option::is_none")]
pub max_width: Option<u16>,
#[serde(skip_serializing_if = "Option::is_none")]
pub enable: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub table_name: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub info: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub rust_type: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub db_type: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub attributes: Option<bool>,
}
impl CommentConfigSerde {
pub fn merge(&self, config: &CommentConfig) -> CommentConfig {
CommentConfig {
max_width: self.max_width.or(config.max_width),
table_name: self.table_name.unwrap_or(config.table_name),
column_name: self.name.unwrap_or(config.column_name),
column_info: self.info.unwrap_or(config.column_info),
column_rust_type: self.rust_type.unwrap_or(config.column_rust_type),
column_db_type: self.db_type.unwrap_or(config.column_db_type),
column_attributes: self.attributes.unwrap_or(config.column_attributes),
ignore_errors: config.ignore_errors,
enable: self.enable.unwrap_or(config.enable),
date_time_crate: config.date_time_crate.clone(),
}
}
pub database_schema: String,
pub max_connections: u32,
pub acquire_timeout: u32,
}
impl ModelConfig {
pub fn new(config: Config, db_type: DbType) -> Self {
let models_path = config.output.path.join(&config.output.models.path);
let entities_path = models_path.join(&config.output.models.entities);
ModelConfig {
db_type,
prelude: config.output.models.prelude,
entities_path,
models_path,
relative_entities_path: config.output.models.entities.clone(),
enable: config.output.models.enable,
comment: CommentConfig {
max_width: config.output.models.comment.max_width,
enable: config.output.models.comment.enable,
table_name: config.output.models.comment.table_name,
column_name: config.output.models.comment.column_name,
column_info: config.output.models.comment.column_info,
column_rust_type: config.output.models.comment.column_rust_type,
column_db_type: config.output.models.comment.column_db_type,
column_attributes: config.output.models.comment.column_attributes,
ignore_errors: config.output.models.comment.ignore_errors,
date_time_crate: config.sea_orm.entity.date_time_crate,
},
}
}
}
pub async fn generate_models<'a>(
database_url: &str,
config: &Config,
handlebars: &'a Handlebars<'a>,
) -> Result<Vec<GeneratedFileChunk>> {
let mut files = Vec::new();
let db_filter = config.sea_orm.entity.tables.get_filter();
let (table_stmts, db_type) =
discover::get_tables(database_url.to_owned(), db_filter, &config.db).await?;
let model_config = ModelConfig::new(config.clone(), db_type);
let writer_context = config.clone().into();
files.extend(
generate_entities(table_stmts.clone(), model_config.clone(), writer_context).await?,
);
files.push(GeneratedFileChunk {
path: model_config.models_path.join("mod.rs"),
content: format!("pub mod {};", model_config.relative_entities_path),
priority: 0,
});
let tables = table_stmts
.into_iter()
.map(Table::new)
.collect::<Result<Vec<Table>>>()?;
if model_config.enable {
for table in tables {
files.extend(FileGenerator::generate_file(table, &model_config, handlebars).await?);
}
if model_config.prelude {
files.push(GeneratedFileChunk {
path: model_config.models_path.join("mod.rs"),
content: String::from("pub mod prelude;"),
priority: 0,
})
}
}
Ok(files)
}
pub async fn generate_entities(
table_statements: Vec<TableCreateStatement>,
config: ModelConfig,
writer_context: EntityWriterContext,
) -> Result<Vec<GeneratedFileChunk>> {
let output = EntityTransformer::transform(table_statements)?.generate(&writer_context);
Ok(output
.files
.into_iter()
.map(|OutputFile { name, content }| GeneratedFileChunk {
path: config.entities_path.join(name),
content,
priority: 0,
})
.collect::<Vec<_>>())
}
// #[derive(Debug)]
// pub struct ModelsModule;
//
// #[async_trait::async_trait]
// impl Module for ModelsModule {
// fn init(&self, ctx: &mut ModulesContext) -> Result<()> {
// Ok(())
// }
//
// async fn validate(&self, ctx: &mut ModulesContext) -> Result<bool> {
// Ok(false)
// }
// }
//
//
// use crate::{
// config::{sea_orm_config::DateTimeCrate, Config},
// generator::{
// discover::{self, DbType},
// file::GeneratedFileChunk,
// },
// };
// use color_eyre::Result;
// use file::FileGenerator;
// use handlebars::Handlebars;
// use sea_orm_codegen::{EntityTransformer, EntityWriterContext, OutputFile};
// use sea_schema::sea_query::TableCreateStatement;
// use serde::{Deserialize, Serialize};
// use std::path::PathBuf;
// use table::Table;
//
// pub mod column;
// pub mod comment;
// pub mod file;
// pub mod table;
// #[derive(Debug, Clone)]
// pub struct ModelConfig {
// pub models_path: PathBuf,
// pub prelude: bool,
// pub entities_path: PathBuf,
// pub relative_entities_path: String,
// pub enable: bool,
// pub comment: CommentConfig,
// pub db_type: DbType,
// }
// #[derive(Debug, Clone)]
// pub struct CommentConfig {
// pub max_width: Option<u16>,
// pub enable: bool,
// pub table_name: bool,
// pub column_info: bool,
// pub column_name: bool,
// pub column_rust_type: bool,
// pub column_db_type: bool,
// pub column_attributes: bool,
// pub ignore_errors: bool,
// pub date_time_crate: DateTimeCrate,
// }
// #[derive(Debug, Clone, Serialize, Deserialize)]
// pub struct CommentConfigSerde {
// #[serde(skip_serializing_if = "Option::is_none")]
// pub max_width: Option<u16>,
// #[serde(skip_serializing_if = "Option::is_none")]
// pub enable: Option<bool>,
// #[serde(skip_serializing_if = "Option::is_none")]
// pub table_name: Option<bool>,
// #[serde(skip_serializing_if = "Option::is_none")]
// pub name: Option<bool>,
// #[serde(skip_serializing_if = "Option::is_none")]
// pub info: Option<bool>,
// #[serde(skip_serializing_if = "Option::is_none")]
// pub rust_type: Option<bool>,
// #[serde(skip_serializing_if = "Option::is_none")]
// pub db_type: Option<bool>,
// #[serde(skip_serializing_if = "Option::is_none")]
// pub attributes: Option<bool>,
// }
// impl CommentConfigSerde {
// pub fn merge(&self, config: &CommentConfig) -> CommentConfig {
// CommentConfig {
// max_width: self.max_width.or(config.max_width),
// table_name: self.table_name.unwrap_or(config.table_name),
// column_name: self.name.unwrap_or(config.column_name),
// column_info: self.info.unwrap_or(config.column_info),
// column_rust_type: self.rust_type.unwrap_or(config.column_rust_type),
// column_db_type: self.db_type.unwrap_or(config.column_db_type),
// column_attributes: self.attributes.unwrap_or(config.column_attributes),
// ignore_errors: config.ignore_errors,
// enable: self.enable.unwrap_or(config.enable),
// date_time_crate: config.date_time_crate.clone(),
// }
// }
// }
//
// impl ModelConfig {
// pub fn new(config: Config, db_type: DbType) -> Self {
// let models_path = config.output.path.join(&config.output.models.path);
// let entities_path = models_path.join(&config.output.models.entities);
// ModelConfig {
// db_type,
// prelude: config.output.models.prelude,
// entities_path,
// models_path,
// relative_entities_path: config.output.models.entities.clone(),
// enable: config.output.models.enable,
// comment: CommentConfig {
// max_width: config.output.models.comment.max_width,
// enable: config.output.models.comment.enable,
// table_name: config.output.models.comment.table_name,
// column_name: config.output.models.comment.column_name,
// column_info: config.output.models.comment.column_info,
// column_rust_type: config.output.models.comment.column_rust_type,
// column_db_type: config.output.models.comment.column_db_type,
// column_attributes: config.output.models.comment.column_attributes,
// ignore_errors: config.output.models.comment.ignore_errors,
// date_time_crate: config.sea_orm.entity.date_time_crate,
// },
// }
// }
// }
//
// pub async fn generate_models<'a>(
// database_url: &str,
// config: &Config,
// handlebars: &'a Handlebars<'a>,
// ) -> Result<Vec<GeneratedFileChunk>> {
// let mut files = Vec::new();
// let db_filter = config.sea_orm.entity.tables.get_filter();
// let (table_stmts, db_type) =
// discover::get_tables(database_url.to_owned(), db_filter, &config.db).await?;
// let model_config = ModelConfig::new(config.clone(), db_type);
//
// let writer_context = config.clone().into();
// files.extend(
// generate_entities(table_stmts.clone(), model_config.clone(), writer_context).await?,
// );
//
// files.push(GeneratedFileChunk {
// path: model_config.models_path.join("mod.rs"),
// content: format!("pub mod {};", model_config.relative_entities_path),
// priority: 0,
// });
// let tables = table_stmts
// .into_iter()
// .map(Table::new)
// .collect::<Result<Vec<Table>>>()?;
//
// if model_config.enable {
// for table in tables {
// files.extend(FileGenerator::generate_file(table, &model_config, handlebars).await?);
// }
// if model_config.prelude {
// files.push(GeneratedFileChunk {
// path: model_config.models_path.join("mod.rs"),
// content: String::from("pub mod prelude;"),
// priority: 0,
// })
// }
// }
// Ok(files)
// }
//
// pub async fn generate_entities(
// table_statements: Vec<TableCreateStatement>,
// config: ModelConfig,
// writer_context: EntityWriterContext,
// ) -> Result<Vec<GeneratedFileChunk>> {
// let output = EntityTransformer::transform(table_statements)?.generate(&writer_context);
// Ok(output
// .files
// .into_iter()
// .map(|OutputFile { name, content }| GeneratedFileChunk {
// path: config.entities_path.join(name),
// content,
// priority: 0,
// })
// .collect::<Vec<_>>())
// }

View File

@@ -0,0 +1,46 @@
use crate::generator::DatabaseUrl;
use super::{discovery::DiscoveryConfig, Module, ModulesContext};
use color_eyre::{eyre::eyre, Result};
use serde::Deserialize;
use serde_inline_default::serde_inline_default;
#[serde_inline_default]
#[derive(Debug, Clone, Deserialize)]
pub struct SeaOrmConfig {
#[serde_inline_default(false)]
pub enable: bool,
#[serde_inline_default(None)]
pub database_schema: Option<String>,
#[serde_inline_default(10)]
pub max_connections: u32,
#[serde_inline_default(30)]
pub acquire_timeout: u32,
}
#[derive(Debug)]
pub struct SeaOrmModule;
#[async_trait::async_trait]
impl Module for SeaOrmModule {
fn init(&self, ctx: &mut ModulesContext) -> Result<()> {
ctx.get_config_auto::<SeaOrmConfig>("modules.sea_orm")?;
Ok(())
}
async fn validate(&self, ctx: &mut ModulesContext) -> Result<bool> {
let map = ctx.get_anymap();
if let (Some(config_discovery_config), Some(_), Some(config_sea_orm)) = (
map.get::<DiscoveryConfig>(),
map.get::<DatabaseUrl>(),
map.get::<SeaOrmConfig>(),
) {
if config_sea_orm.enable && !config_discovery_config.enable {
return Err(eyre!("\"modules.discovery.enable\" must be enabled to use \"modules.sea_orm.enable\""));
}
Ok(config_sea_orm.enable && config_discovery_config.enable)
} else {
// One or both keys are missing
Ok(false)
}
}
}

View File

@@ -0,0 +1,37 @@
use crate::generator::DatabaseUrl;
use super::{Module, ModulesContext};
use color_eyre::Result;
use handlebars::Handlebars;
use serde::Deserialize;
use serde_inline_default::serde_inline_default;
#[serde_inline_default]
#[derive(Debug, Clone, Deserialize)]
pub struct TemplateConfig {
#[serde_inline_default(false)]
pub enable: bool,
}
#[derive(Debug)]
pub struct TemplateModule;
#[async_trait::async_trait]
impl Module for TemplateModule {
fn init(&self, ctx: &mut ModulesContext) -> Result<()> {
let registry: Handlebars<'static> = Handlebars::new();
ctx.get_anymap_mut().insert(registry);
// ctx.get_config_auto::<DiscoveryConfig>("modules.discovery")?;
Ok(())
}
async fn validate(&self, ctx: &mut ModulesContext) -> Result<bool> {
// let map = ctx.get_anymap();
//
// if let (Some(config), Some(_)) = (map.get::<DiscoveryConfig>(), map.get::<DatabaseUrl>()) {
// Ok(config.enable)
// } else {
// // One or both keys are missing
// Ok(false)
// }
Ok(false)
}
}

View File

@@ -11,10 +11,12 @@ use figment::{
};
use handlebars::Handlebars;
use tokio::{fs, io::AsyncWriteExt, process::Command};
use toml_edit::DocumentMut;
use tracing_subscriber::{fmt, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter};
#[derive(Parser, Debug)]
struct Args {
#[clap(short, long, default_value = "generator.yml")]
#[clap(short, long, default_value = "generator.toml")]
config: String,
#[clap(short, long, env = "DATABASE_URL")]
database_url: String,
@@ -23,44 +25,50 @@ struct Args {
#[tokio::main]
async fn main() -> Result<()> {
color_eyre::install()?;
tracing_subscriber::fmt().init();
tracing_subscriber::registry()
.with(fmt::layer())
.with(EnvFilter::from_default_env())
.init();
let args = Args::parse();
let config: Config = Figment::new()
.merge(Serialized::defaults(Config::default()))
.merge(Yaml::file(&args.config))
.extract()?;
tracing::info!(?config);
// let config: Config = Figment::new()
// .merge(Serialized::defaults(Config::default()))
// .merge(Yaml::file(&args.config))
// .extract()?;
// tracing::info!(?config);
tracing::info!(?args);
let mut handlebars = Handlebars::new();
templates::register_templates(&mut handlebars, &config).await?;
// let mut handlebars: Registry = ;
// templates::register_templates(&mut handlebars, &config).await?;
let config = fs::read_to_string(args.config).await?;
let root_config = config.parse::<DocumentMut>()?;
let outputs = generator::generate(&args.database_url, &config, &handlebars).await?;
// tracing::info!(?outputs, "Generated files");
for output in outputs.iter() {
tracing::info!(?output, "Generated chunk");
// let mut file = fs::File::create(&output.path).await?;
// file.write_all(output.content.as_bytes()).await?;
}
let merged_outputs = generator::file::combine_chunks(outputs)?;
for output in merged_outputs.iter() {
tracing::info!(?output.path, "Merged file");
let parent = output.path.parent().unwrap();
if !parent.exists() {
fs::create_dir_all(parent).await?;
}
let mut file = fs::File::create(&output.path).await?;
file.write_all(output.content.as_bytes()).await?;
}
for output in merged_outputs.iter() {
tracing::info!(?output.path, "Running rustfmt");
let exit_status = Command::new("rustfmt").arg(&output.path).status().await?;
if !exit_status.success() {
return Err(eyre!("Failed to run rustfmt"));
}
}
let outputs = generator::generate(&args.database_url, root_config).await?;
//
// // tracing::info!(?outputs, "Generated files");
// for output in outputs.iter() {
// tracing::info!(?output, "Generated chunk");
// // let mut file = fs::File::create(&output.path).await?;
// // file.write_all(output.content.as_bytes()).await?;
// }
//
// let merged_outputs = generator::file::combine_chunks(outputs)?;
// for output in merged_outputs.iter() {
// tracing::info!(?output.path, "Merged file");
// let parent = output.path.parent().unwrap();
// if !parent.exists() {
// fs::create_dir_all(parent).await?;
// }
// let mut file = fs::File::create(&output.path).await?;
// file.write_all(output.content.as_bytes()).await?;
// }
// for output in merged_outputs.iter() {
// tracing::info!(?output.path, "Running rustfmt");
// let exit_status = Command::new("rustfmt").arg(&output.path).status().await?;
// if !exit_status.success() {
// return Err(eyre!("Failed to run rustfmt"));
// }
// }
Ok(())
}