Compare commits
14 Commits
3ed3e079f2
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| b9dd0399ed | |||
| 67b0033158 | |||
| b1948d3061 | |||
| 3ac0fdeae2 | |||
| 64e0b4e536 | |||
| a94f8e2bfd | |||
| 2b668ba89f | |||
| 1a745ff17f | |||
| ed640ad20d | |||
| 8f509b1e2c | |||
| 3c58ffbb36 | |||
| 68d77a23e4 | |||
| fe423a199b | |||
| 8fd390fe18 |
11
.gitea/workflows/check.yml
Normal file
11
.gitea/workflows/check.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
name: checks
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
jobs:
|
||||
checks:
|
||||
runs-on: nix
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: nix flake check -L
|
||||
1072
Cargo.lock
generated
1072
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
38
Cargo.toml
38
Cargo.toml
@@ -1,3 +1,4 @@
|
||||
[workspace]
|
||||
[package]
|
||||
name = "sea-orm-generator"
|
||||
version = "0.1.0"
|
||||
@@ -5,29 +6,28 @@ edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
anymap = "0.12.1"
|
||||
async-trait = "0.1.88"
|
||||
clap = { version = "4.5.32", features = ["derive", "env"] }
|
||||
color-eyre = "0.6.3"
|
||||
comfy-table = { version = "7.1.4", default-features = false }
|
||||
async-trait = "0.1.89"
|
||||
clap = { version = "4.5.48", features = ["derive", "env"] }
|
||||
color-eyre = "0.6.5"
|
||||
comfy-table = { version = "7.2.1", default-features = false }
|
||||
comment-parser = "0.1.0"
|
||||
figment = { version = "0.10.19", features = ["yaml"] }
|
||||
handlebars = "6.3.2"
|
||||
heck = "0.5.0"
|
||||
include_dir = "0.7.4"
|
||||
indicatif = "0.17.11"
|
||||
inquire = "0.7.5"
|
||||
minijinja = { version = "2.12.0", features = ["loader"] }
|
||||
path-clean = "1.0.1"
|
||||
quote = "1.0.40"
|
||||
sea-orm-codegen = "1.1.8"
|
||||
sea-schema = { version = "0.16.1", features = ["sqlx-all"] }
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde-inline-default = "0.2.3"
|
||||
serde_json = "1.0.140"
|
||||
serde_yaml = "0.9.34"
|
||||
sqlx = { version = "0.8.3", features = ["mysql", "postgres", "sqlite", "runtime-tokio"] }
|
||||
syn = { version = "2.0.100", features = ["extra-traits", "full"] }
|
||||
tokio = { version = "1.44.1", features = ["full"] }
|
||||
toml_edit = { version = "0.22.24", features = ["serde"] }
|
||||
pathdiff = "0.2.3"
|
||||
quote = "1.0.41"
|
||||
sea-orm-codegen = "1.1.16"
|
||||
sea-schema = { version = "0.16.2", features = ["sqlx-all"] }
|
||||
serde = { version = "1.0.228", features = ["derive"] }
|
||||
serde_merge = "0.1.3"
|
||||
sqlx = { version = "0.8.6", features = ["mysql", "postgres", "sqlite", "runtime-tokio"] }
|
||||
syn = { version = "2.0.106", features = ["extra-traits", "full"] }
|
||||
tokio = { version = "1.47.1", features = ["full"] }
|
||||
toml = "0.8.23"
|
||||
toml_edit = { version = "0.22.27", features = ["serde"] }
|
||||
tracing = "0.1.41"
|
||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
|
||||
url = "2.5.4"
|
||||
tracing-subscriber = { version = "0.3.20", features = ["env-filter"] }
|
||||
url = "2.5.7"
|
||||
|
||||
36
flake.lock
generated
36
flake.lock
generated
@@ -3,11 +3,11 @@
|
||||
"advisory-db": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1744011916,
|
||||
"narHash": "sha256-vZIug2BsukcfdNIH8Kto6iUGJM4PgaE8sPIKZDy8MT0=",
|
||||
"lastModified": 1759226657,
|
||||
"narHash": "sha256-msIjIH46R32kIF72PbU0+Hxqeu6yQFo8FsANKgs/iP0=",
|
||||
"owner": "rustsec",
|
||||
"repo": "advisory-db",
|
||||
"rev": "b3d5d51745076cac459a298838d6bec9f4b052f3",
|
||||
"rev": "f71b77f025d1c2afcd2b07a32e7127a5d138ef4a",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -18,11 +18,11 @@
|
||||
},
|
||||
"crane": {
|
||||
"locked": {
|
||||
"lastModified": 1743908961,
|
||||
"narHash": "sha256-e1idZdpnnHWuosI3KsBgAgrhMR05T2oqskXCmNzGPq0=",
|
||||
"lastModified": 1758758545,
|
||||
"narHash": "sha256-NU5WaEdfwF6i8faJ2Yh+jcK9vVFrofLcwlD/mP65JrI=",
|
||||
"owner": "ipetkov",
|
||||
"repo": "crane",
|
||||
"rev": "80ceeec0dc94ef967c371dcdc56adb280328f591",
|
||||
"rev": "95d528a5f54eaba0d12102249ce42f4d01f4e364",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -51,11 +51,11 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1743827369,
|
||||
"narHash": "sha256-rpqepOZ8Eo1zg+KJeWoq1HAOgoMCDloqv5r2EAa9TSA=",
|
||||
"lastModified": 1759036355,
|
||||
"narHash": "sha256-0m27AKv6ka+q270dw48KflE0LwQYrO7Fm4/2//KCVWg=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "42a1c966be226125b48c384171c44c651c236c22",
|
||||
"rev": "e9f00bd893984bc8ce46c895c3bf7cac95331127",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -67,18 +67,18 @@
|
||||
},
|
||||
"process-compose-wrapper": {
|
||||
"locked": {
|
||||
"lastModified": 1743615288,
|
||||
"narHash": "sha256-X7beZH4dxMDkFqOJRB5daCkTMdgo90nZ62VwbFol55M=",
|
||||
"lastModified": 1747144888,
|
||||
"narHash": "sha256-qxIPqNf4JS9Gz138MP+UOSk7PAsIniDhW0NvOeaC/Ek=",
|
||||
"ref": "dev",
|
||||
"rev": "c0f01d143e56669d4ea174ddc882d8c6ca85f55c",
|
||||
"revCount": 85,
|
||||
"rev": "948180a09c429d24648d283212a09ff0f50b2815",
|
||||
"revCount": 86,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.scug.io/Nikkuss/process-compose-wrapper.git"
|
||||
"url": "https://git.scug.io/nikkuss/process-compose-wrapper.git"
|
||||
},
|
||||
"original": {
|
||||
"ref": "dev",
|
||||
"type": "git",
|
||||
"url": "https://gitlab.scug.io/Nikkuss/process-compose-wrapper.git"
|
||||
"url": "https://git.scug.io/nikkuss/process-compose-wrapper.git"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
@@ -98,11 +98,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1743993291,
|
||||
"narHash": "sha256-u8GHvduU1gCtoFXvTS/wGjH1ouv5S/GRGq6MAT+sG/k=",
|
||||
"lastModified": 1759286284,
|
||||
"narHash": "sha256-JLdGGc4XDutzSD1L65Ni6Ye+oTm8kWfm0KTPMcyl7Y4=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "0cb3c8979c65dc6a5812dfe67499a8c7b8b4325b",
|
||||
"rev": "f6f2da475176bb7cff51faae8b3fe879cd393545",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
||||
11
flake.nix
11
flake.nix
@@ -8,7 +8,7 @@
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
|
||||
process-compose-wrapper = {
|
||||
url = "git+https://gitlab.scug.io/Nikkuss/process-compose-wrapper.git?ref=dev";
|
||||
url = "git+https://git.scug.io/nikkuss/process-compose-wrapper.git?ref=dev";
|
||||
};
|
||||
rust-overlay = {
|
||||
url = "github:oxalica/rust-overlay";
|
||||
@@ -43,8 +43,12 @@
|
||||
inherit (pkgs) lib;
|
||||
craneLib = (crane.mkLib pkgs).overrideToolchain (
|
||||
p:
|
||||
p.rust-bin.stable.latest.default.override {
|
||||
extensions = [ "llvm-tools-preview" ];
|
||||
p.rust-bin.nightly.latest.default.override {
|
||||
extensions = [
|
||||
"llvm-tools-preview"
|
||||
"rust-analyzer"
|
||||
"rust-src"
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
@@ -150,6 +154,7 @@
|
||||
cargo-llvm-cov
|
||||
cargo-audit
|
||||
cargo-tarpaulin
|
||||
cargo-udeps
|
||||
];
|
||||
};
|
||||
|
||||
|
||||
@@ -1,10 +1,19 @@
|
||||
# This file is used to configure the SeaORM generator.
|
||||
[modules.discovery]
|
||||
enable = true
|
||||
[modules.discovery.filter]
|
||||
include_hidden = false
|
||||
skip_seaql_migrations = true
|
||||
|
||||
[modules.sea_orm]
|
||||
enable = true
|
||||
path = "./tests/src/models/_entities"
|
||||
|
||||
[modules.template]
|
||||
enable = true
|
||||
[modules.template.tables]
|
||||
|
||||
[modules.model]
|
||||
enable = true
|
||||
prelude = true
|
||||
path = "./tests/src/models"
|
||||
|
||||
[modules.annotate]
|
||||
enable = true
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
processes = {
|
||||
frontend = {
|
||||
command = ''
|
||||
RUST_LOG=debug,sqlx=warn ${pkgs.cargo-watch}/bin/cargo-watch -x 'run'
|
||||
RUST_LOG=debug,sqlx=warn ${pkgs.cargo-watch}/bin/cargo-watch -i tests/src -x 'run && cat tests/src/models/user.rs'
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct DbConfig {
|
||||
pub database_schema: Option<String>,
|
||||
pub max_connections: u32,
|
||||
pub acquire_timeout: u64,
|
||||
}
|
||||
@@ -1,86 +0,0 @@
|
||||
pub mod db;
|
||||
pub mod output;
|
||||
pub mod sea_orm_config;
|
||||
pub mod template;
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use db::DbConfig;
|
||||
use output::{OutputCommentConfig, OutputConfig, OutputModelConfig};
|
||||
use sea_orm_config::{
|
||||
DateTimeCrate, EntityFormat, Prelude, SeaOrmConfig, SeaOrmEntityConfig,
|
||||
SeaOrmExtraAttributesConfig, SeaOrmExtraDerivesConfig, SeaOrmSerdeConfig, SeaOrmTableConfig,
|
||||
SerdeEnable,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_yaml::Mapping;
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct Config {
|
||||
pub db: DbConfig,
|
||||
pub sea_orm: SeaOrmConfig,
|
||||
pub output: OutputConfig,
|
||||
pub templates: Option<Mapping>,
|
||||
pub templates_dir: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl Default for Config {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
db: DbConfig {
|
||||
database_schema: None,
|
||||
max_connections: 10,
|
||||
acquire_timeout: 5,
|
||||
},
|
||||
sea_orm: SeaOrmConfig {
|
||||
prelude: Prelude::Enabled,
|
||||
serde: SeaOrmSerdeConfig {
|
||||
enable: SerdeEnable::None,
|
||||
skip_deserializing_primary_key: false,
|
||||
skip_hidden_column: false,
|
||||
},
|
||||
entity: SeaOrmEntityConfig {
|
||||
format: EntityFormat::Compact,
|
||||
tables: SeaOrmTableConfig {
|
||||
include_hidden: false,
|
||||
skip_seaql_migrations: true,
|
||||
table_config: None,
|
||||
},
|
||||
extra_derives: SeaOrmExtraDerivesConfig {
|
||||
model: Vec::new(),
|
||||
eenum: Vec::new(),
|
||||
},
|
||||
extra_attributes: SeaOrmExtraAttributesConfig {
|
||||
model: Vec::new(),
|
||||
eenum: Vec::new(),
|
||||
},
|
||||
date_time_crate: DateTimeCrate::Chrono,
|
||||
with_copy_enums: false,
|
||||
},
|
||||
},
|
||||
output: OutputConfig {
|
||||
path: PathBuf::from("./src/"),
|
||||
models: OutputModelConfig {
|
||||
comment: OutputCommentConfig {
|
||||
max_width: None,
|
||||
table_name: true,
|
||||
column_name: true,
|
||||
column_db_type: true,
|
||||
column_rust_type: true,
|
||||
column_attributes: true,
|
||||
column_exclude_attributes: Vec::new(),
|
||||
enable: true,
|
||||
column_info: true,
|
||||
ignore_errors: false,
|
||||
},
|
||||
enable: true,
|
||||
entities: String::from("_entities"),
|
||||
prelude: true,
|
||||
path: PathBuf::from("./models"),
|
||||
},
|
||||
},
|
||||
templates: None,
|
||||
templates_dir: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct OutputConfig {
|
||||
pub path: PathBuf,
|
||||
pub models: OutputModelConfig,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct OutputModelConfig {
|
||||
pub prelude: bool,
|
||||
pub enable: bool,
|
||||
pub path: PathBuf,
|
||||
pub comment: OutputCommentConfig,
|
||||
pub entities: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct OutputCommentConfig {
|
||||
pub enable: bool,
|
||||
pub max_width: Option<u16>,
|
||||
pub table_name: bool,
|
||||
pub column_info: bool,
|
||||
pub column_name: bool,
|
||||
pub column_db_type: bool,
|
||||
pub column_rust_type: bool,
|
||||
pub column_attributes: bool,
|
||||
pub column_exclude_attributes: Vec<String>,
|
||||
pub ignore_errors: bool,
|
||||
}
|
||||
@@ -1,233 +0,0 @@
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use serde_yaml::Value;
|
||||
|
||||
use sea_orm_codegen::{
|
||||
DateTimeCrate as CodegenDateTimeCrate, EntityWriterContext, WithPrelude, WithSerde,
|
||||
};
|
||||
|
||||
use super::Config;
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum EntityFormat {
|
||||
Expanded,
|
||||
Compact,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[serde(untagged)]
|
||||
pub enum TableConfig {
|
||||
Specific { specific: Vec<String> },
|
||||
Exclude { exclude: Vec<String> },
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum SerdeEnable {
|
||||
Both,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
None,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Prelude {
|
||||
Enabled,
|
||||
Disabled,
|
||||
AllowUnusedImports,
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for SerdeEnable {
|
||||
fn deserialize<D>(deserializer: D) -> Result<SerdeEnable, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let value = Value::deserialize(deserializer)?;
|
||||
|
||||
match value {
|
||||
Value::String(s) if s == "serialize" => Ok(SerdeEnable::Serialize),
|
||||
Value::String(s) if s == "deserialize" => Ok(SerdeEnable::Deserialize),
|
||||
Value::Bool(true) => Ok(SerdeEnable::Both),
|
||||
Value::Bool(false) => Ok(SerdeEnable::None),
|
||||
_ => Err(serde::de::Error::custom(
|
||||
"expected 'serialize', 'deserialize', 'true' or 'false'",
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Serialize for SerdeEnable {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
match self {
|
||||
SerdeEnable::Both => serializer.serialize_bool(true),
|
||||
SerdeEnable::Serialize => serializer.serialize_str("serialize"),
|
||||
SerdeEnable::Deserialize => serializer.serialize_str("deserialize"),
|
||||
SerdeEnable::None => serializer.serialize_bool(false),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'de> Deserialize<'de> for Prelude {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Prelude, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let value = Value::deserialize(deserializer)?;
|
||||
|
||||
match value {
|
||||
Value::Bool(true) => Ok(Prelude::Enabled),
|
||||
Value::Bool(false) => Ok(Prelude::Disabled),
|
||||
Value::String(s) if s == "allow_unused_imports" => Ok(Prelude::AllowUnusedImports),
|
||||
_ => Err(serde::de::Error::custom(
|
||||
"expected 'true', 'false', or 'allow_unused_imports'",
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Serialize for Prelude {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
match self {
|
||||
Prelude::Enabled => serializer.serialize_bool(true),
|
||||
Prelude::Disabled => serializer.serialize_bool(false),
|
||||
Prelude::AllowUnusedImports => serializer.serialize_str("allow_unused_imports"),
|
||||
}
|
||||
}
|
||||
}
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct SeaOrmConfig {
|
||||
pub prelude: Prelude,
|
||||
pub serde: SeaOrmSerdeConfig,
|
||||
pub entity: SeaOrmEntityConfig,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct SeaOrmSerdeConfig {
|
||||
pub enable: SerdeEnable,
|
||||
pub skip_deserializing_primary_key: bool,
|
||||
pub skip_hidden_column: bool,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct SeaOrmEntityConfig {
|
||||
pub format: EntityFormat,
|
||||
pub tables: SeaOrmTableConfig,
|
||||
pub extra_derives: SeaOrmExtraDerivesConfig,
|
||||
pub extra_attributes: SeaOrmExtraAttributesConfig,
|
||||
pub date_time_crate: DateTimeCrate,
|
||||
pub with_copy_enums: bool,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct SeaOrmTableConfig {
|
||||
pub include_hidden: bool,
|
||||
pub skip_seaql_migrations: bool,
|
||||
#[serde(flatten)]
|
||||
pub table_config: Option<TableConfig>,
|
||||
}
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct SeaOrmExtraDerivesConfig {
|
||||
pub model: Vec<String>,
|
||||
#[serde(rename = "enum")]
|
||||
pub eenum: Vec<String>,
|
||||
}
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct SeaOrmExtraAttributesConfig {
|
||||
pub model: Vec<String>,
|
||||
#[serde(rename = "enum")]
|
||||
pub eenum: Vec<String>,
|
||||
}
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum DateTimeCrate {
|
||||
Time,
|
||||
Chrono,
|
||||
}
|
||||
|
||||
impl From<DateTimeCrate> for CodegenDateTimeCrate {
|
||||
fn from(date_time_crate: DateTimeCrate) -> CodegenDateTimeCrate {
|
||||
match date_time_crate {
|
||||
DateTimeCrate::Chrono => CodegenDateTimeCrate::Chrono,
|
||||
DateTimeCrate::Time => CodegenDateTimeCrate::Time,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SeaOrmTableConfig {
|
||||
pub fn get_filter(&self) -> Box<dyn Fn(&String) -> bool> {
|
||||
let include_hidden = self.include_hidden;
|
||||
if let Some(table) = &self.table_config {
|
||||
match table {
|
||||
TableConfig::Specific { specific } => {
|
||||
let specific = specific.clone();
|
||||
Box::new(move |table: &String| {
|
||||
(include_hidden || !table.starts_with('_')) && specific.contains(table)
|
||||
})
|
||||
}
|
||||
TableConfig::Exclude { exclude } => {
|
||||
let exclude = exclude.clone();
|
||||
Box::new(move |table: &String| {
|
||||
(include_hidden || !table.starts_with('_')) && !exclude.contains(table)
|
||||
})
|
||||
}
|
||||
}
|
||||
} else if self.skip_seaql_migrations {
|
||||
Box::new(move |table: &String| {
|
||||
(include_hidden || !table.starts_with('_'))
|
||||
&& !table.starts_with("seaql_migrations")
|
||||
})
|
||||
} else {
|
||||
Box::new(move |table: &String| (include_hidden || !table.starts_with('_')))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EntityFormat {
|
||||
pub fn is_expanded(&self) -> bool {
|
||||
matches!(self, EntityFormat::Expanded)
|
||||
}
|
||||
}
|
||||
impl From<Prelude> for WithPrelude {
|
||||
fn from(val: Prelude) -> Self {
|
||||
match val {
|
||||
Prelude::Enabled => WithPrelude::All,
|
||||
|
||||
Prelude::Disabled => WithPrelude::None,
|
||||
Prelude::AllowUnusedImports => WithPrelude::AllAllowUnusedImports,
|
||||
}
|
||||
}
|
||||
}
|
||||
impl From<SerdeEnable> for WithSerde {
|
||||
fn from(val: SerdeEnable) -> Self {
|
||||
match val {
|
||||
SerdeEnable::Both => WithSerde::Both,
|
||||
SerdeEnable::Serialize => WithSerde::Serialize,
|
||||
SerdeEnable::Deserialize => WithSerde::Deserialize,
|
||||
SerdeEnable::None => WithSerde::None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Config> for EntityWriterContext {
|
||||
fn from(val: Config) -> Self {
|
||||
EntityWriterContext::new(
|
||||
val.sea_orm.entity.format.is_expanded(),
|
||||
val.sea_orm.prelude.into(),
|
||||
val.sea_orm.serde.enable.into(),
|
||||
val.sea_orm.entity.with_copy_enums,
|
||||
val.sea_orm.entity.date_time_crate.into(),
|
||||
val.db.database_schema,
|
||||
false,
|
||||
val.sea_orm.serde.skip_deserializing_primary_key,
|
||||
val.sea_orm.serde.skip_hidden_column,
|
||||
val.sea_orm.entity.extra_derives.model,
|
||||
val.sea_orm.entity.extra_attributes.model,
|
||||
val.sea_orm.entity.extra_derives.eenum,
|
||||
val.sea_orm.entity.extra_attributes.eenum,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1,154 +0,0 @@
|
||||
use core::time;
|
||||
|
||||
use color_eyre::eyre::{eyre, ContextCompat, Report, Result};
|
||||
use sea_schema::sea_query::TableCreateStatement;
|
||||
use url::Url;
|
||||
|
||||
use crate::config::db::DbConfig;
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum DbType {
|
||||
MySql,
|
||||
Postgres,
|
||||
Sqlite,
|
||||
}
|
||||
|
||||
pub async fn get_tables(
|
||||
database_url: String,
|
||||
filter: Box<dyn Fn(&String) -> bool>,
|
||||
database_config: &DbConfig,
|
||||
) -> Result<(Vec<TableCreateStatement>, DbType)> {
|
||||
let url = Url::parse(&database_url)?;
|
||||
|
||||
tracing::trace!(?url);
|
||||
|
||||
let is_sqlite = url.scheme() == "sqlite";
|
||||
|
||||
let database_name: &str = (if !is_sqlite {
|
||||
let database_name = url
|
||||
.path_segments()
|
||||
.context("No database name as part of path")?
|
||||
.next()
|
||||
.context("No database name as part of path")?;
|
||||
|
||||
if database_name.is_empty() {
|
||||
return Err(eyre!("Database path name is empty"));
|
||||
}
|
||||
Ok::<&str, Report>(database_name)
|
||||
} else {
|
||||
Ok(Default::default())
|
||||
})?;
|
||||
|
||||
let (table_stmts, db_type) = match url.scheme() {
|
||||
"mysql" => {
|
||||
use sea_schema::mysql::discovery::SchemaDiscovery;
|
||||
use sqlx::MySql;
|
||||
|
||||
tracing::info!("Connecting to MySQL");
|
||||
let connection = sqlx_connect::<MySql>(
|
||||
database_config.max_connections,
|
||||
database_config.acquire_timeout,
|
||||
url.as_str(),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
tracing::info!("Discovering schema");
|
||||
let schema_discovery = SchemaDiscovery::new(connection, database_name);
|
||||
let schema = schema_discovery.discover().await?;
|
||||
let table_stmts = schema
|
||||
.tables
|
||||
.into_iter()
|
||||
.filter(|schema| filter(&schema.info.name))
|
||||
.map(|schema| schema.write())
|
||||
.collect();
|
||||
(table_stmts, DbType::MySql)
|
||||
}
|
||||
"sqlite" => {
|
||||
use sea_schema::sqlite::discovery::SchemaDiscovery;
|
||||
use sqlx::Sqlite;
|
||||
|
||||
tracing::info!("Connecting to SQLite");
|
||||
let connection = sqlx_connect::<Sqlite>(
|
||||
database_config.max_connections,
|
||||
database_config.acquire_timeout,
|
||||
url.as_str(),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
tracing::info!("Discovering schema");
|
||||
let schema_discovery = SchemaDiscovery::new(connection);
|
||||
let schema = schema_discovery
|
||||
.discover()
|
||||
.await?
|
||||
.merge_indexes_into_table();
|
||||
let table_stmts = schema
|
||||
.tables
|
||||
.into_iter()
|
||||
.filter(|schema| filter(&schema.name))
|
||||
.map(|schema| schema.write())
|
||||
.collect();
|
||||
(table_stmts, DbType::Sqlite)
|
||||
}
|
||||
"postgres" | "potgresql" => {
|
||||
use sea_schema::postgres::discovery::SchemaDiscovery;
|
||||
use sqlx::Postgres;
|
||||
|
||||
tracing::info!("Connecting to Postgres");
|
||||
let schema = &database_config
|
||||
.database_schema
|
||||
.as_deref()
|
||||
.unwrap_or("public");
|
||||
let connection = sqlx_connect::<Postgres>(
|
||||
database_config.max_connections,
|
||||
database_config.acquire_timeout,
|
||||
url.as_str(),
|
||||
Some(schema),
|
||||
)
|
||||
.await?;
|
||||
tracing::info!("Discovering schema");
|
||||
let schema_discovery = SchemaDiscovery::new(connection, schema);
|
||||
let schema = schema_discovery.discover().await?;
|
||||
tracing::info!(?schema);
|
||||
let table_stmts = schema
|
||||
.tables
|
||||
.into_iter()
|
||||
.filter(|schema| filter(&schema.info.name))
|
||||
.map(|schema| schema.write())
|
||||
.collect();
|
||||
(table_stmts, DbType::Postgres)
|
||||
}
|
||||
_ => unimplemented!("{} is not supported", url.scheme()),
|
||||
};
|
||||
tracing::info!("Schema discovered");
|
||||
|
||||
Ok((table_stmts, db_type))
|
||||
}
|
||||
async fn sqlx_connect<DB>(
|
||||
max_connections: u32,
|
||||
acquire_timeout: u64,
|
||||
url: &str,
|
||||
schema: Option<&str>,
|
||||
) -> Result<sqlx::Pool<DB>>
|
||||
where
|
||||
DB: sqlx::Database,
|
||||
for<'a> &'a mut <DB as sqlx::Database>::Connection: sqlx::Executor<'a>,
|
||||
{
|
||||
let mut pool_options = sqlx::pool::PoolOptions::<DB>::new()
|
||||
.max_connections(max_connections)
|
||||
.acquire_timeout(time::Duration::from_secs(acquire_timeout));
|
||||
// Set search_path for Postgres, E.g. Some("public") by default
|
||||
// MySQL & SQLite connection initialize with schema `None`
|
||||
if let Some(schema) = schema {
|
||||
let sql = format!("SET search_path = '{schema}'");
|
||||
pool_options = pool_options.after_connect(move |conn, _| {
|
||||
let sql = sql.clone();
|
||||
Box::pin(async move {
|
||||
sqlx::Executor::execute(conn, sql.as_str())
|
||||
.await
|
||||
.map(|_| ())
|
||||
})
|
||||
});
|
||||
}
|
||||
pool_options.connect(url).await.map_err(Into::into)
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
use color_eyre::Result;
|
||||
use path_clean::PathClean;
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
use tokio::{fs::File, io::AsyncWriteExt};
|
||||
|
||||
pub fn pathbuf_to_rust_path(path: PathBuf) -> String {
|
||||
let clean_path = path.clean();
|
||||
@@ -27,8 +28,10 @@ pub fn pathbuf_to_rust_path(path: PathBuf) -> String {
|
||||
path
|
||||
}
|
||||
#[derive(Debug, Clone)]
|
||||
#[allow(unused)]
|
||||
pub enum InsertPoint {
|
||||
Start,
|
||||
Replace(String),
|
||||
End,
|
||||
}
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -47,20 +50,54 @@ impl FileManager {
|
||||
files: HashMap::new(),
|
||||
}
|
||||
}
|
||||
pub fn insert_file(
|
||||
pub fn insert(
|
||||
&mut self,
|
||||
file: PathBuf,
|
||||
content: String,
|
||||
file: &PathBuf,
|
||||
content: &str,
|
||||
insert_point: Option<InsertPoint>,
|
||||
) -> Result<()> {
|
||||
if let Some(file) = self.files.get_mut(&file) {
|
||||
if let Some(file) = self.files.get_mut(file) {
|
||||
match insert_point {
|
||||
Some(InsertPoint::Start) => file.content.insert_str(0, &content),
|
||||
Some(InsertPoint::End) => file.content.push_str(&content),
|
||||
None => file.content.push_str(&content),
|
||||
Some(InsertPoint::Start) => file.content.insert_str(0, content),
|
||||
Some(InsertPoint::End) => file.content.push_str(content),
|
||||
None => file.content.push_str(content),
|
||||
Some(InsertPoint::Replace(replace)) => {
|
||||
let content = file.content.replace(&replace, content);
|
||||
file.content = content;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
self.files.insert(file.clone(), FileContent { content });
|
||||
self.files.insert(
|
||||
file.clone(),
|
||||
FileContent {
|
||||
content: content.to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
pub fn get(&self, file: &PathBuf) -> Option<&FileContent> {
|
||||
self.files.get(file)
|
||||
}
|
||||
pub async fn write_files(&self) -> Result<()> {
|
||||
for (file, content) in &self.files {
|
||||
tracing::info!(?file, "Writing file");
|
||||
let parent = file.parent().unwrap();
|
||||
if !parent.exists() {
|
||||
tokio::fs::create_dir_all(parent).await?;
|
||||
}
|
||||
let mut opened_file = File::create(file).await?;
|
||||
opened_file.write_all(content.content.as_bytes()).await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
pub async fn format_files(&self) -> Result<()> {
|
||||
for file in self.files.keys() {
|
||||
tracing::info!(?file, "Formatting file");
|
||||
tokio::process::Command::new("rustfmt")
|
||||
.arg(file)
|
||||
.output()
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -68,7 +105,7 @@ impl FileManager {
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::generator::file::pathbuf_to_rust_path;
|
||||
use crate::generator::file::{pathbuf_to_rust_path, FileManager, InsertPoint};
|
||||
use std::path::PathBuf;
|
||||
#[test]
|
||||
fn test_pathbuf_to_rust_path() {
|
||||
@@ -90,4 +127,49 @@ mod test {
|
||||
let rust_path = pathbuf_to_rust_path(path);
|
||||
assert_eq!(rust_path, "");
|
||||
}
|
||||
#[test]
|
||||
fn test_fildmanager_insert() {
|
||||
let mut file_manager = FileManager::new();
|
||||
let file_path = PathBuf::from("test.rs");
|
||||
file_manager.insert(&file_path, "test", None).unwrap();
|
||||
file_manager.insert(&file_path, "test1", None).unwrap();
|
||||
assert_eq!(file_manager.get(&file_path).unwrap().content, "testtest1");
|
||||
}
|
||||
#[test]
|
||||
fn test_fildmanager_insert_start() {
|
||||
let mut file_manager = FileManager::new();
|
||||
let file_path = PathBuf::from("test.rs");
|
||||
file_manager.insert(&file_path, "test", None).unwrap();
|
||||
file_manager
|
||||
.insert(&file_path, "teststart", Some(InsertPoint::Start))
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
file_manager.get(&file_path).unwrap().content,
|
||||
"teststarttest"
|
||||
);
|
||||
}
|
||||
#[test]
|
||||
fn test_fildmanager_insert_end() {
|
||||
let mut file_manager = FileManager::new();
|
||||
let file_path = PathBuf::from("test.rs");
|
||||
file_manager.insert(&file_path, "test", None).unwrap();
|
||||
file_manager
|
||||
.insert(&file_path, "testend", Some(InsertPoint::End))
|
||||
.unwrap();
|
||||
assert_eq!(file_manager.get(&file_path).unwrap().content, "testtestend");
|
||||
}
|
||||
#[test]
|
||||
fn test_fildmanager_insert_replace() {
|
||||
let mut file_manager = FileManager::new();
|
||||
let file_path = PathBuf::from("test.rs");
|
||||
file_manager.insert(&file_path, "test", None).unwrap();
|
||||
file_manager
|
||||
.insert(
|
||||
&file_path,
|
||||
"testreplace",
|
||||
Some(InsertPoint::Replace("test".to_string())),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(file_manager.get(&file_path).unwrap().content, "testreplace");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,30 +1,32 @@
|
||||
pub mod discover;
|
||||
pub mod file;
|
||||
pub mod modules;
|
||||
use color_eyre::Result;
|
||||
use handlebars::Handlebars;
|
||||
use toml_edit::DocumentMut;
|
||||
|
||||
use crate::Args;
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DatabaseUrl(String);
|
||||
|
||||
pub async fn generate(database_url: &str, root_config: DocumentMut) -> Result<()> {
|
||||
pub async fn generate(args: Args, root_config: DocumentMut) -> Result<()> {
|
||||
let mut module_manager = modules::ModuleManager::new(root_config);
|
||||
module_manager.init()?;
|
||||
let ctx = module_manager.get_context_mut();
|
||||
ctx.get_anymap_mut()
|
||||
.insert(DatabaseUrl(database_url.to_owned()));
|
||||
.insert(DatabaseUrl(args.database_url.to_owned()));
|
||||
module_manager.validate().await?;
|
||||
module_manager.execute().await?;
|
||||
module_manager
|
||||
.get_context_mut()
|
||||
.get_file_manager()
|
||||
.write_files()
|
||||
.await?;
|
||||
if args.rustfmt {
|
||||
module_manager
|
||||
.get_context_mut()
|
||||
.get_file_manager()
|
||||
.format_files()
|
||||
.await?;
|
||||
}
|
||||
|
||||
// let db_filter = config.sea_orm.entity.tables.get_filter();
|
||||
// let (table_stmts, db_type) =
|
||||
// discover::get_tables(database_url.to_owned(), db_filter, &config.db).await?;
|
||||
// let tables = table_stmts
|
||||
// .into_iter()
|
||||
// .map(Table::new)
|
||||
// .collect::<Result<Vec<Table>>>()?;
|
||||
//
|
||||
// let model_outputs = modules::models::generate_models(database_url, config, handlebars).await?;
|
||||
// files.extend(model_outputs);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
121
src/generator/modules/annotate/comment.rs
Normal file
121
src/generator/modules/annotate/comment.rs
Normal file
@@ -0,0 +1,121 @@
|
||||
use color_eyre::Result;
|
||||
use minijinja::Environment;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::generator::modules::{
|
||||
discovery::{db::DbType, table::Table},
|
||||
sea_orm::config::DateTimeCrate,
|
||||
};
|
||||
use comfy_table::{Cell, ContentArrangement, Table as CTable};
|
||||
|
||||
use super::{
|
||||
AnnotateCommentConfig, COMMENTBODY, COMMENTHEAD, COMMENTTAIL, HEADER, SETTINGSDELIMITER,
|
||||
};
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct CommentContext<'a> {
|
||||
pub table_name: &'a str,
|
||||
pub config: &'a AnnotateCommentConfig,
|
||||
pub column_info_table: String,
|
||||
pub comment_config: Option<String>,
|
||||
pub config_delimiter: &'a str,
|
||||
}
|
||||
|
||||
pub fn generate_comment(
|
||||
table: &Table,
|
||||
config: &AnnotateCommentConfig,
|
||||
environment: &Environment<'static>,
|
||||
db_type: &DbType,
|
||||
date_time_crate: &DateTimeCrate,
|
||||
comment_config: Option<AnnotateCommentConfig>,
|
||||
) -> Result<String> {
|
||||
let mut column_info_table = CTable::new();
|
||||
let mut header = Vec::new();
|
||||
if config.column_name.unwrap() {
|
||||
header.push("Name");
|
||||
}
|
||||
if config.column_db_type.unwrap() {
|
||||
header.push("DbType");
|
||||
}
|
||||
if config.column_rust_type.unwrap() {
|
||||
header.push("RsType");
|
||||
}
|
||||
if config.column_attributes.unwrap() {
|
||||
header.push("Attrs");
|
||||
}
|
||||
column_info_table
|
||||
.load_preset("|| -+=++ + ++")
|
||||
.set_content_arrangement(ContentArrangement::Dynamic)
|
||||
.set_header(header);
|
||||
if let Some(width) = config.max_wdith {
|
||||
column_info_table.set_width(width);
|
||||
}
|
||||
for column in &table.columns {
|
||||
let mut row = Vec::new();
|
||||
if config.column_name.unwrap() {
|
||||
row.push(Cell::new(column.name.clone()))
|
||||
}
|
||||
if config.column_db_type.unwrap() {
|
||||
let column_type = column.get_db_type(db_type);
|
||||
row.push(Cell::new(column_type));
|
||||
}
|
||||
if config.column_rust_type.unwrap() {
|
||||
let column_type = column.get_rust_type(date_time_crate);
|
||||
row.push(Cell::new(column_type));
|
||||
}
|
||||
if config.column_attributes.unwrap() {
|
||||
let exclude = config.column_exclude_attributes.clone().unwrap();
|
||||
let filter: Box<dyn Fn(&String) -> bool> = Box::new(move |f| {
|
||||
let exclude = exclude.clone();
|
||||
!exclude.contains(f)
|
||||
});
|
||||
let attrs_string = column.attrs_to_string(Some(filter));
|
||||
|
||||
row.push(Cell::new(attrs_string));
|
||||
}
|
||||
column_info_table.add_row(row);
|
||||
}
|
||||
let context = CommentContext {
|
||||
table_name: &table.name,
|
||||
config,
|
||||
column_info_table: column_info_table.to_string(),
|
||||
comment_config: comment_config
|
||||
.and_then(|f| toml::to_string_pretty(&f).ok())
|
||||
.map(|s| {
|
||||
s.lines()
|
||||
.map(|line| format!(" {}", line))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
}),
|
||||
config_delimiter: SETTINGSDELIMITER,
|
||||
};
|
||||
let template = environment.get_template("annotate.comment")?;
|
||||
let rendered_data = template.render(&context)?;
|
||||
|
||||
Ok(pad_comment(&rendered_data))
|
||||
}
|
||||
|
||||
pub fn pad_comment(s: &str) -> String {
|
||||
let parts = s.split('\n').collect::<Vec<_>>();
|
||||
let mut padded = String::new();
|
||||
for (index, part) in parts.iter().enumerate() {
|
||||
let first = index == 0;
|
||||
let comment = match first {
|
||||
true => format!("{} {}\n{}", COMMENTHEAD, HEADER, COMMENTBODY),
|
||||
false => COMMENTBODY.to_string(),
|
||||
};
|
||||
let padded_part = format!("{} {}\n", comment, part);
|
||||
padded.push_str(&padded_part);
|
||||
}
|
||||
padded.push_str(COMMENTTAIL);
|
||||
padded
|
||||
}
|
||||
|
||||
pub fn find_settings_block(file_content: &str) -> Option<String> {
|
||||
let delimiter_length = SETTINGSDELIMITER.len();
|
||||
let start_pos = file_content.find(SETTINGSDELIMITER)?;
|
||||
let end_pos = file_content[start_pos + delimiter_length..].find(SETTINGSDELIMITER)?;
|
||||
let content = &file_content[start_pos + delimiter_length..start_pos + end_pos];
|
||||
let content = content.replace(&format!("\n{COMMENTBODY}"), "\n");
|
||||
Some(content)
|
||||
}
|
||||
174
src/generator/modules/annotate/mod.rs
Normal file
174
src/generator/modules/annotate/mod.rs
Normal file
@@ -0,0 +1,174 @@
|
||||
pub mod comment;
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::generator::file::InsertPoint;
|
||||
|
||||
use super::{
|
||||
discovery::DiscoveredSchema, models::ModelsConfig, sea_orm::SeaOrmConfig, Module,
|
||||
ModulesContext,
|
||||
};
|
||||
use color_eyre::Result;
|
||||
use comment_parser::{CommentParser, Event};
|
||||
use minijinja::Environment;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::fs;
|
||||
const HEADER: &str = r#"== Schema Information"#;
|
||||
const COMMENTHEAD: &str = r#"/*"#;
|
||||
const COMMENTBODY: &str = r#" *"#;
|
||||
const COMMENTTAIL: &str = r#"*/"#;
|
||||
const SETTINGSDELIMITER: &str = r#"```"#;
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
#[serde(default)]
|
||||
#[derive(Default)]
|
||||
pub struct AnnotateConfig {
|
||||
pub enable: bool,
|
||||
pub comment: AnnotateCommentConfig,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct AnnotateCommentConfig {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub enable: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub max_wdith: Option<u16>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub table_name: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub column_name: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub column_db_type: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub column_rust_type: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub column_attributes: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub column_exclude_attributes: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl Default for AnnotateCommentConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enable: Some(true),
|
||||
max_wdith: Some(80),
|
||||
table_name: Some(true),
|
||||
column_name: Some(true),
|
||||
column_db_type: Some(true),
|
||||
column_rust_type: Some(true),
|
||||
column_attributes: Some(true),
|
||||
column_exclude_attributes: Some(vec![]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct AnnotateModule;
|
||||
#[async_trait::async_trait]
|
||||
impl Module for AnnotateModule {
|
||||
fn init(&mut self, ctx: &mut ModulesContext) -> Result<()> {
|
||||
ctx.get_config_auto::<AnnotateConfig>("modules.annotate")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn validate(&mut self, ctx: &mut ModulesContext) -> Result<bool> {
|
||||
let map = ctx.get_anymap();
|
||||
|
||||
if let Some(config) = map.get::<AnnotateConfig>() {
|
||||
Ok(config.enable)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
async fn execute(&mut self, ctx: &mut ModulesContext) -> Result<()> {
|
||||
let map = ctx.get_anymap();
|
||||
let file_manager = ctx.get_file_manager();
|
||||
let mut file_chunks: Vec<(PathBuf, String, Option<InsertPoint>)> = Vec::new();
|
||||
if let (Some(config), Some(environment), Some(schema)) = (
|
||||
map.get::<AnnotateConfig>(),
|
||||
map.get::<Environment<'static>>(),
|
||||
map.get::<DiscoveredSchema>(),
|
||||
) {
|
||||
if let (Some(models_config), Some(sea_orm_config)) =
|
||||
(map.get::<ModelsConfig>(), map.get::<SeaOrmConfig>())
|
||||
{
|
||||
if models_config.enable {
|
||||
let path = models_config.path.clone().unwrap();
|
||||
for table in &schema.tables {
|
||||
let path = path.join(format!("{}.rs", table.name));
|
||||
let content = file_manager.get(&path);
|
||||
if content.is_some() {
|
||||
// generate default comment and insert
|
||||
let comment = comment::generate_comment(
|
||||
table,
|
||||
&config.comment,
|
||||
environment,
|
||||
&schema.database_type,
|
||||
&sea_orm_config.entity.date_time_crate,
|
||||
None,
|
||||
)?;
|
||||
file_chunks.push((path, comment, Some(InsertPoint::Start)))
|
||||
} else {
|
||||
let content = fs::read_to_string(&path).await?;
|
||||
let rules = comment_parser::get_syntax("rust").unwrap();
|
||||
let parser = CommentParser::new(&content, rules);
|
||||
for comment in parser {
|
||||
if let Event::BlockComment(body, _) = comment {
|
||||
if body.contains(HEADER) {
|
||||
tracing::debug!("Found header");
|
||||
let mut comment_config = config.comment.clone();
|
||||
let mut file_comment_config = None;
|
||||
if let Some(parsed_settings) =
|
||||
comment::find_settings_block(&content)
|
||||
{
|
||||
match toml::from_str::<AnnotateCommentConfig>(
|
||||
&parsed_settings,
|
||||
) {
|
||||
Ok(s) => {
|
||||
file_comment_config = Some(s.clone());
|
||||
comment_config =
|
||||
serde_merge::omerge(&config.comment, s)?;
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(e.into());
|
||||
// if !settings.ignore_errors {
|
||||
// return Err(e.into());
|
||||
// }
|
||||
}
|
||||
}
|
||||
}
|
||||
tracing::info!(?comment_config, ?file_comment_config);
|
||||
if comment_config.enable.unwrap() {
|
||||
let comment = comment::generate_comment(
|
||||
table,
|
||||
&comment_config,
|
||||
environment,
|
||||
&schema.database_type,
|
||||
&sea_orm_config.entity.date_time_crate,
|
||||
file_comment_config,
|
||||
)?;
|
||||
file_chunks.push((path.clone(), content.clone(), None));
|
||||
file_chunks.push((
|
||||
path,
|
||||
comment,
|
||||
Some(InsertPoint::Replace(body.to_string())),
|
||||
));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let file_manager = ctx.get_file_manager_mut();
|
||||
for file in file_chunks {
|
||||
file_manager.insert(&file.0, &file.1, file.2)?;
|
||||
}
|
||||
Ok(())
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,14 +1,17 @@
|
||||
use color_eyre::{eyre::ContextCompat, Result};
|
||||
use heck::ToUpperCamelCase;
|
||||
use sea_schema::sea_query::{ColumnDef, ColumnSpec, ColumnType, IndexCreateStatement};
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::config::sea_orm_config::DateTimeCrate;
|
||||
use crate::generator::modules::sea_orm::config::DateTimeCrate;
|
||||
|
||||
use super::db::DbType;
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct Column {
|
||||
pub name: String,
|
||||
#[serde(skip_serializing)]
|
||||
pub col_type: ColumnType,
|
||||
#[serde(skip_serializing)]
|
||||
pub attrs: Vec<ColumnSpec>,
|
||||
}
|
||||
|
||||
@@ -34,30 +37,18 @@ impl Column {
|
||||
attrs: attrs.to_vec(),
|
||||
})
|
||||
}
|
||||
// pub fn get_info_row(&self, config: &ModelConfig) -> Result<Vec<Cell>> {
|
||||
// let column_type_rust = self.get_rs_type(&config.comment.date_time_crate);
|
||||
// let column_type = self.get_db_type(&config.db_type);
|
||||
// let attrs = self.attrs_to_string();
|
||||
// let mut cols = Vec::new();
|
||||
// if config.comment.column_name {
|
||||
// cols.push(Cell::new(self.name.clone()))
|
||||
// }
|
||||
// if config.comment.column_name {
|
||||
// cols.push(Cell::new(column_type.clone()))
|
||||
// }
|
||||
// if config.comment.column_rust_type {
|
||||
// cols.push(Cell::new(column_type_rust.clone()))
|
||||
// }
|
||||
// if config.comment.column_attributes {
|
||||
// cols.push(Cell::new(attrs.clone()));
|
||||
// }
|
||||
// Ok(cols)
|
||||
// }
|
||||
pub fn attrs_to_string(&self) -> String {
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn attrs_to_string(&self, filter: Option<Box<dyn Fn(&String) -> bool>>) -> String {
|
||||
self.attrs
|
||||
.iter()
|
||||
.filter_map(Self::get_addr_type)
|
||||
.map(|s| s.to_string())
|
||||
.filter(|s| {
|
||||
if let Some(filter) = &filter {
|
||||
return filter(s);
|
||||
};
|
||||
true
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
}
|
||||
@@ -71,7 +62,7 @@ impl Column {
|
||||
ColumnSpec::UniqueKey => Some("unique key".to_owned()),
|
||||
ColumnSpec::Check(_) => unimplemented!(),
|
||||
ColumnSpec::Generated { .. } => unimplemented!(),
|
||||
ColumnSpec::Extra(_) => unimplemented!(),
|
||||
ColumnSpec::Extra(e) => Some(e.to_string()),
|
||||
ColumnSpec::Comment(_) => unimplemented!(),
|
||||
ColumnSpec::Using(_) => unimplemented!(),
|
||||
}
|
||||
@@ -171,7 +162,7 @@ impl Column {
|
||||
}
|
||||
write_db_type(&self.col_type, db_type)
|
||||
}
|
||||
pub fn get_rs_type(&self, date_time_crate: &DateTimeCrate) -> String {
|
||||
pub fn get_rust_type(&self, date_time_crate: &DateTimeCrate) -> String {
|
||||
fn write_rs_type(col_type: &ColumnType, date_time_crate: &DateTimeCrate) -> String {
|
||||
#[allow(unreachable_patterns)]
|
||||
match col_type {
|
||||
|
||||
@@ -4,8 +4,6 @@ use color_eyre::eyre::{eyre, ContextCompat, Report, Result};
|
||||
use sea_schema::sea_query::TableCreateStatement;
|
||||
use url::Url;
|
||||
|
||||
use crate::config::db::DbConfig;
|
||||
|
||||
use super::DiscoveryConfig;
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum DbType {
|
||||
|
||||
@@ -8,7 +8,6 @@ use color_eyre::Result;
|
||||
use db::DbType;
|
||||
use sea_schema::sea_query::TableCreateStatement;
|
||||
use serde::Deserialize;
|
||||
use serde_inline_default::serde_inline_default;
|
||||
use table::Table;
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
@@ -83,7 +82,7 @@ impl DiscoveryFilterConfig {
|
||||
&& !table.starts_with("seaql_migrations")
|
||||
})
|
||||
} else {
|
||||
Box::new(move |table: &String| (include_hidden || !table.starts_with('_')))
|
||||
Box::new(move |table: &String| include_hidden || !table.starts_with('_'))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -113,7 +112,6 @@ impl Module for DiscoveryModule {
|
||||
if let (Some(config), Some(_)) = (map.get::<DiscoveryConfig>(), map.get::<DatabaseUrl>()) {
|
||||
Ok(config.enable)
|
||||
} else {
|
||||
// One or both keys are missing
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
use super::column::Column;
|
||||
use color_eyre::{eyre::eyre, Result};
|
||||
use sea_schema::sea_query::{self, TableCreateStatement};
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct Table {
|
||||
pub name: String,
|
||||
pub columns: Vec<Column>,
|
||||
|
||||
@@ -1,25 +1,18 @@
|
||||
use std::{
|
||||
fmt::Debug,
|
||||
sync::{Arc, MutexGuard},
|
||||
};
|
||||
use std::fmt::Debug;
|
||||
|
||||
use anymap::{
|
||||
any::{Any, CloneAny},
|
||||
Map,
|
||||
};
|
||||
use annotate::AnnotateModule;
|
||||
use anymap::{any::CloneAny, Map};
|
||||
use color_eyre::{eyre::eyre, Result};
|
||||
use discovery::DiscoveryModule;
|
||||
use sea_orm::{SeaOrmConfig, SeaOrmModule};
|
||||
// use models::ModelsModule;
|
||||
use models::ModelsModule;
|
||||
use sea_orm::SeaOrmModule;
|
||||
use serde::{de::IntoDeserializer, Deserialize};
|
||||
use std::sync::Mutex;
|
||||
use templates::TemplateModule;
|
||||
use toml_edit::{de::ValueDeserializer, DocumentMut, Item, Value};
|
||||
// use models::table::Table;
|
||||
//
|
||||
// use super::discover::DbType;
|
||||
use toml_edit::{DocumentMut, Item};
|
||||
|
||||
use super::file::FileManager;
|
||||
type AnyCloneMap = Map<dyn CloneAny + Send>;
|
||||
pub mod annotate;
|
||||
pub mod discovery;
|
||||
pub mod models;
|
||||
pub mod sea_orm;
|
||||
@@ -28,12 +21,14 @@ pub mod templates;
|
||||
pub struct ModulesContext {
|
||||
pub anymap: AnyCloneMap,
|
||||
pub root_config: DocumentMut,
|
||||
pub file_manager: FileManager,
|
||||
}
|
||||
impl ModulesContext {
|
||||
pub fn new(root_config: DocumentMut) -> Self {
|
||||
Self {
|
||||
anymap: AnyCloneMap::new(),
|
||||
root_config,
|
||||
file_manager: FileManager::new(),
|
||||
}
|
||||
}
|
||||
pub fn get_config_raw(&self, path: &str) -> Result<&Item> {
|
||||
@@ -44,12 +39,12 @@ impl ModulesContext {
|
||||
if let Some(v) = item.get(i) {
|
||||
*item = v;
|
||||
} else {
|
||||
return Err(eyre!("Config not found"));
|
||||
return Err(eyre!("Config not found \"{i}\""));
|
||||
}
|
||||
} else if let Some(v) = self.root_config.get(i) {
|
||||
item = Some(v);
|
||||
} else {
|
||||
return Err(eyre!("Config not found"));
|
||||
return Err(eyre!("Config not found \"{i}\""));
|
||||
}
|
||||
}
|
||||
if let Some(v) = item {
|
||||
@@ -58,8 +53,10 @@ impl ModulesContext {
|
||||
Err(eyre!("Config not found"))
|
||||
}
|
||||
}
|
||||
pub fn get_config<'a, V: Deserialize<'a> + Debug>(&self, path: &str) -> Result<V> {
|
||||
let item = self.get_config_raw(path)?;
|
||||
pub fn get_config<'a, V: Deserialize<'a> + Debug>(&self, path: &str) -> Result<Option<V>> {
|
||||
let Ok(item) = self.get_config_raw(path) else {
|
||||
return Ok(None);
|
||||
};
|
||||
let value = item
|
||||
.clone()
|
||||
.into_value()
|
||||
@@ -67,26 +64,31 @@ impl ModulesContext {
|
||||
let deserializer = value.into_deserializer();
|
||||
let config = V::deserialize(deserializer)?;
|
||||
tracing::debug!(?config, "{}", path);
|
||||
Ok(config)
|
||||
Ok(Some(config))
|
||||
}
|
||||
pub fn get_config_auto<'a, V: Deserialize<'a> + Clone + Send + Debug + 'static>(
|
||||
pub fn get_config_auto<'a, V: Deserialize<'a> + Clone + Send + Debug + Default + 'static>(
|
||||
&mut self,
|
||||
path: &str,
|
||||
) -> Result<()> {
|
||||
let value: V = self.get_config::<V>(path)?;
|
||||
self.get_anymap_mut().insert(value);
|
||||
let value: Option<V> = self.get_config::<V>(path)?;
|
||||
if value.is_none() {
|
||||
tracing::warn!(?path, "Config not found, using default");
|
||||
}
|
||||
self.get_anymap_mut().insert(value.unwrap_or_default());
|
||||
Ok(())
|
||||
}
|
||||
// pub fn get_anymap(&self) -> MutexGuard<Map> {
|
||||
// let v = self.anymap.lock().unwrap();
|
||||
// v
|
||||
// }
|
||||
pub fn get_anymap(&self) -> &AnyCloneMap {
|
||||
&self.anymap
|
||||
}
|
||||
pub fn get_anymap_mut(&mut self) -> &mut AnyCloneMap {
|
||||
&mut self.anymap
|
||||
}
|
||||
pub fn get_file_manager(&self) -> &FileManager {
|
||||
&self.file_manager
|
||||
}
|
||||
pub fn get_file_manager_mut(&mut self) -> &mut FileManager {
|
||||
&mut self.file_manager
|
||||
}
|
||||
}
|
||||
#[async_trait::async_trait]
|
||||
pub trait Module: Debug {
|
||||
@@ -105,7 +107,9 @@ impl ModuleManager {
|
||||
let modules: Vec<Box<dyn Module>> = vec![
|
||||
Box::new(TemplateModule),
|
||||
Box::new(DiscoveryModule),
|
||||
Box::new(SeaOrmModule), //Box::new(ModelsModule)
|
||||
Box::new(SeaOrmModule),
|
||||
Box::new(ModelsModule),
|
||||
Box::new(AnnotateModule),
|
||||
];
|
||||
Self {
|
||||
modules,
|
||||
|
||||
@@ -1,236 +0,0 @@
|
||||
use color_eyre::{eyre::ContextCompat, Result};
|
||||
use comfy_table::Cell;
|
||||
use heck::ToUpperCamelCase;
|
||||
use sea_schema::sea_query::{ColumnDef, ColumnSpec, ColumnType, IndexCreateStatement};
|
||||
|
||||
use super::{discover::DbType, ModelConfig};
|
||||
use crate::config::sea_orm_config::DateTimeCrate;
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Column {
|
||||
pub name: String,
|
||||
pub col_type: ColumnType,
|
||||
pub attrs: Vec<ColumnSpec>,
|
||||
}
|
||||
|
||||
impl Column {
|
||||
pub fn new(column: ColumnDef, index: Option<IndexCreateStatement>) -> Result<Self> {
|
||||
let name = column.get_column_name();
|
||||
let col_type = column
|
||||
.get_column_type()
|
||||
.context("Unable to get column type")?
|
||||
.clone();
|
||||
let mut attrs = column.get_column_spec().clone();
|
||||
if let Some(index) = index {
|
||||
if index.is_unique_key() {
|
||||
attrs.push(ColumnSpec::UniqueKey)
|
||||
}
|
||||
if index.is_primary_key() {
|
||||
attrs.push(ColumnSpec::PrimaryKey);
|
||||
}
|
||||
}
|
||||
Ok(Column {
|
||||
name: name.to_string(),
|
||||
col_type,
|
||||
attrs: attrs.to_vec(),
|
||||
})
|
||||
}
|
||||
pub fn get_info_row(&self, config: &ModelConfig) -> Result<Vec<Cell>> {
|
||||
let column_type_rust = self.get_rs_type(&config.comment.date_time_crate);
|
||||
let column_type = self.get_db_type(&config.db_type);
|
||||
let attrs = self.attrs_to_string();
|
||||
let mut cols = Vec::new();
|
||||
if config.comment.column_name {
|
||||
cols.push(Cell::new(self.name.clone()))
|
||||
}
|
||||
if config.comment.column_name {
|
||||
cols.push(Cell::new(column_type.clone()))
|
||||
}
|
||||
if config.comment.column_rust_type {
|
||||
cols.push(Cell::new(column_type_rust.clone()))
|
||||
}
|
||||
if config.comment.column_attributes {
|
||||
cols.push(Cell::new(attrs.clone()));
|
||||
}
|
||||
Ok(cols)
|
||||
}
|
||||
pub fn attrs_to_string(&self) -> String {
|
||||
self.attrs
|
||||
.iter()
|
||||
.filter_map(Self::get_addr_type)
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
}
|
||||
pub fn get_addr_type(attr: &ColumnSpec) -> Option<String> {
|
||||
match attr {
|
||||
ColumnSpec::PrimaryKey => Some("primary key".to_owned()),
|
||||
ColumnSpec::Null => unimplemented!(),
|
||||
ColumnSpec::NotNull => Some("not null".to_owned()),
|
||||
ColumnSpec::Default(_) => unimplemented!(),
|
||||
ColumnSpec::AutoIncrement => Some("autoincrement".to_owned()),
|
||||
ColumnSpec::UniqueKey => Some("unique key".to_owned()),
|
||||
ColumnSpec::Check(_) => unimplemented!(),
|
||||
ColumnSpec::Generated { .. } => unimplemented!(),
|
||||
ColumnSpec::Extra(_) => unimplemented!(),
|
||||
ColumnSpec::Comment(_) => unimplemented!(),
|
||||
ColumnSpec::Using(_) => unimplemented!(),
|
||||
}
|
||||
}
|
||||
pub fn get_db_type(&self, db_type: &DbType) -> String {
|
||||
fn write_db_type(col_type: &ColumnType, db_type: &DbType) -> String {
|
||||
#[allow(unreachable_patterns)]
|
||||
match (col_type, db_type) {
|
||||
(ColumnType::Char(_), _) => "char".to_owned(),
|
||||
(ColumnType::String(_), _) => "varchar".to_owned(),
|
||||
(ColumnType::Text, _) => "text".to_owned(),
|
||||
(ColumnType::TinyInteger, DbType::MySql | DbType::Sqlite) => "tinyint".to_owned(),
|
||||
(ColumnType::TinyInteger, DbType::Postgres) => "smallint".to_owned(),
|
||||
(ColumnType::SmallInteger, _) => "smallint".to_owned(),
|
||||
(ColumnType::Integer, DbType::MySql) => "int".to_owned(),
|
||||
(ColumnType::Integer, _) => "integer".to_owned(),
|
||||
(ColumnType::BigInteger, DbType::MySql | DbType::Postgres) => "bigint".to_owned(),
|
||||
(ColumnType::BigInteger, DbType::Sqlite) => "integer".to_owned(),
|
||||
(ColumnType::TinyUnsigned, DbType::MySql) => "tinyint unsigned".to_owned(),
|
||||
(ColumnType::TinyUnsigned, DbType::Postgres) => "smallint".to_owned(),
|
||||
(ColumnType::TinyUnsigned, DbType::Sqlite) => "tinyint".to_owned(),
|
||||
(ColumnType::SmallUnsigned, DbType::MySql) => "smallint unsigned".to_owned(),
|
||||
(ColumnType::SmallUnsigned, DbType::Postgres | DbType::Sqlite) => {
|
||||
"smallint".to_owned()
|
||||
}
|
||||
(ColumnType::Unsigned, DbType::MySql) => "int unsigned".to_owned(),
|
||||
(ColumnType::Unsigned, DbType::Postgres | DbType::Sqlite) => "integer".to_owned(),
|
||||
(ColumnType::BigUnsigned, DbType::MySql) => "bigint unsigned".to_owned(),
|
||||
(ColumnType::BigUnsigned, DbType::Postgres) => "bigint".to_owned(),
|
||||
(ColumnType::BigUnsigned, DbType::Sqlite) => "integer".to_owned(),
|
||||
(ColumnType::Float, DbType::MySql | DbType::Sqlite) => "float".to_owned(),
|
||||
(ColumnType::Float, DbType::Postgres) => "real".to_owned(),
|
||||
(ColumnType::Double, DbType::MySql | DbType::Sqlite) => "double".to_owned(),
|
||||
(ColumnType::Double, DbType::Postgres) => "double precision".to_owned(),
|
||||
(ColumnType::Decimal(_), DbType::MySql | DbType::Postgres) => "decimal".to_owned(),
|
||||
(ColumnType::Decimal(_), DbType::Sqlite) => "real".to_owned(),
|
||||
(ColumnType::DateTime, DbType::MySql) => "datetime".to_owned(),
|
||||
(ColumnType::DateTime, DbType::Postgres) => "timestamp w/o tz".to_owned(),
|
||||
(ColumnType::DateTime, DbType::Sqlite) => "datetime_text".to_owned(),
|
||||
(ColumnType::Timestamp, DbType::MySql | DbType::Postgres) => "timestamp".to_owned(),
|
||||
(ColumnType::Timestamp, DbType::Sqlite) => "timestamp_text".to_owned(),
|
||||
(ColumnType::TimestampWithTimeZone, DbType::MySql) => "timestamp".to_owned(),
|
||||
(ColumnType::TimestampWithTimeZone, DbType::Postgres) => {
|
||||
"timestamp w tz".to_owned()
|
||||
}
|
||||
(ColumnType::TimestampWithTimeZone, DbType::Sqlite) => {
|
||||
"timestamp_with_timezone_text".to_owned()
|
||||
}
|
||||
(ColumnType::Time, DbType::MySql | DbType::Postgres) => "time".to_owned(),
|
||||
(ColumnType::Time, DbType::Sqlite) => "time_text".to_owned(),
|
||||
(ColumnType::Date, DbType::MySql | DbType::Postgres) => "date".to_owned(),
|
||||
(ColumnType::Date, DbType::Sqlite) => "date_text".to_owned(),
|
||||
(ColumnType::Year, DbType::MySql) => "year".to_owned(),
|
||||
(ColumnType::Interval(_, _), DbType::Postgres) => "interval".to_owned(),
|
||||
(ColumnType::Blob, DbType::MySql | DbType::Sqlite) => "blob".to_owned(),
|
||||
(ColumnType::Blob, DbType::Postgres) => "bytea".to_owned(),
|
||||
(ColumnType::Binary(_), DbType::MySql) => "binary".to_owned(),
|
||||
(ColumnType::Binary(_), DbType::Postgres) => "bytea".to_owned(),
|
||||
(ColumnType::Binary(_), DbType::Sqlite) => "blob".to_owned(),
|
||||
(ColumnType::VarBinary(_), DbType::MySql) => "varbinary".to_owned(),
|
||||
(ColumnType::VarBinary(_), DbType::Postgres) => "bytea".to_owned(),
|
||||
(ColumnType::VarBinary(_), DbType::Sqlite) => "varbinary_blob".to_owned(),
|
||||
(ColumnType::Bit(_), DbType::MySql | DbType::Postgres) => "bit".to_owned(),
|
||||
(ColumnType::VarBit(_), DbType::MySql) => "bit".to_owned(),
|
||||
(ColumnType::VarBit(_), DbType::Postgres) => "varbit".to_owned(),
|
||||
(ColumnType::Boolean, DbType::MySql | DbType::Postgres) => "bool".to_owned(),
|
||||
(ColumnType::Boolean, DbType::Sqlite) => "boolean".to_owned(),
|
||||
(ColumnType::Money(_), DbType::MySql) => "decimal".to_owned(),
|
||||
(ColumnType::Money(_), DbType::Postgres) => "money".to_owned(),
|
||||
(ColumnType::Money(_), DbType::Sqlite) => "real_money".to_owned(),
|
||||
(ColumnType::Json, DbType::MySql | DbType::Postgres) => "json".to_owned(),
|
||||
(ColumnType::Json, DbType::Sqlite) => "json_text".to_owned(),
|
||||
(ColumnType::JsonBinary, DbType::MySql) => "json".to_owned(),
|
||||
(ColumnType::JsonBinary, DbType::Postgres) => "jsonb".to_owned(),
|
||||
(ColumnType::JsonBinary, DbType::Sqlite) => "jsonb_text".to_owned(),
|
||||
(ColumnType::Uuid, DbType::MySql) => "binary(16)".to_owned(),
|
||||
(ColumnType::Uuid, DbType::Postgres) => "uuid".to_owned(),
|
||||
(ColumnType::Uuid, DbType::Sqlite) => "uuid_text".to_owned(),
|
||||
(ColumnType::Enum { name, .. }, DbType::MySql) => {
|
||||
format!("ENUM({})", name.to_string().to_upper_camel_case())
|
||||
}
|
||||
(ColumnType::Enum { name, .. }, DbType::Postgres) => {
|
||||
name.to_string().to_uppercase()
|
||||
}
|
||||
(ColumnType::Enum { .. }, DbType::Sqlite) => "enum_text".to_owned(),
|
||||
(ColumnType::Array(column_type), DbType::Postgres) => {
|
||||
format!("{}[]", write_db_type(column_type, db_type)).to_uppercase()
|
||||
}
|
||||
(ColumnType::Vector(_), DbType::Postgres) => "vector".to_owned(),
|
||||
(ColumnType::Cidr, DbType::Postgres) => "cidr".to_owned(),
|
||||
(ColumnType::Inet, DbType::Postgres) => "inet".to_owned(),
|
||||
(ColumnType::MacAddr, DbType::Postgres) => "macaddr".to_owned(),
|
||||
(ColumnType::LTree, DbType::Postgres) => "ltree".to_owned(),
|
||||
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
write_db_type(&self.col_type, db_type)
|
||||
}
|
||||
pub fn get_rs_type(&self, date_time_crate: &DateTimeCrate) -> String {
|
||||
fn write_rs_type(col_type: &ColumnType, date_time_crate: &DateTimeCrate) -> String {
|
||||
#[allow(unreachable_patterns)]
|
||||
match col_type {
|
||||
ColumnType::Char(_)
|
||||
| ColumnType::String(_)
|
||||
| ColumnType::Text
|
||||
| ColumnType::Custom(_) => "String".to_owned(),
|
||||
ColumnType::TinyInteger => "i8".to_owned(),
|
||||
ColumnType::SmallInteger => "i16".to_owned(),
|
||||
ColumnType::Integer => "i32".to_owned(),
|
||||
ColumnType::BigInteger => "i64".to_owned(),
|
||||
ColumnType::TinyUnsigned => "u8".to_owned(),
|
||||
ColumnType::SmallUnsigned => "u16".to_owned(),
|
||||
ColumnType::Unsigned => "u32".to_owned(),
|
||||
ColumnType::BigUnsigned => "u64".to_owned(),
|
||||
ColumnType::Float => "f32".to_owned(),
|
||||
ColumnType::Double => "f64".to_owned(),
|
||||
ColumnType::Json | ColumnType::JsonBinary => "Json".to_owned(),
|
||||
ColumnType::Date => match date_time_crate {
|
||||
DateTimeCrate::Chrono => "Date".to_owned(),
|
||||
DateTimeCrate::Time => "TimeDate".to_owned(),
|
||||
},
|
||||
ColumnType::Time => match date_time_crate {
|
||||
DateTimeCrate::Chrono => "Time".to_owned(),
|
||||
DateTimeCrate::Time => "TimeTime".to_owned(),
|
||||
},
|
||||
ColumnType::DateTime => match date_time_crate {
|
||||
DateTimeCrate::Chrono => "DateTime".to_owned(),
|
||||
DateTimeCrate::Time => "TimeDateTime".to_owned(),
|
||||
},
|
||||
ColumnType::Timestamp => match date_time_crate {
|
||||
DateTimeCrate::Chrono => "DateTimeUtc".to_owned(),
|
||||
DateTimeCrate::Time => "TimeDateTime".to_owned(),
|
||||
},
|
||||
ColumnType::TimestampWithTimeZone => match date_time_crate {
|
||||
DateTimeCrate::Chrono => "DateTimeWithTimeZone".to_owned(),
|
||||
DateTimeCrate::Time => "TimeDateTimeWithTimeZone".to_owned(),
|
||||
},
|
||||
ColumnType::Decimal(_) | ColumnType::Money(_) => "Decimal".to_owned(),
|
||||
ColumnType::Uuid => "Uuid".to_owned(),
|
||||
ColumnType::Binary(_) | ColumnType::VarBinary(_) | ColumnType::Blob => {
|
||||
"Vec<u8>".to_owned()
|
||||
}
|
||||
ColumnType::Boolean => "bool".to_owned(),
|
||||
ColumnType::Enum { name, .. } => name.to_string().to_upper_camel_case(),
|
||||
ColumnType::Array(column_type) => {
|
||||
format!("Vec<{}>", write_rs_type(column_type, date_time_crate))
|
||||
}
|
||||
ColumnType::Vector(_) => "::pgvector::Vector".to_owned(),
|
||||
ColumnType::Bit(None | Some(1)) => "bool".to_owned(),
|
||||
ColumnType::Bit(_) | ColumnType::VarBit(_) => "Vec<u8>".to_owned(),
|
||||
ColumnType::Year => "i32".to_owned(),
|
||||
ColumnType::Cidr | ColumnType::Inet => "IpNetwork".to_owned(),
|
||||
ColumnType::Interval(_, _) | ColumnType::MacAddr | ColumnType::LTree => {
|
||||
"String".to_owned()
|
||||
}
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
write_rs_type(&self.col_type, date_time_crate)
|
||||
}
|
||||
}
|
||||
@@ -1,147 +0,0 @@
|
||||
use super::{table::Table, CommentConfig, CommentConfigSerde, ModelConfig};
|
||||
use color_eyre::Result;
|
||||
use comfy_table::{ContentArrangement, Table as CTable};
|
||||
use comment_parser::{CommentParser, Event};
|
||||
|
||||
const HEADER: &str = r#"== Schema Information"#;
|
||||
const COMMENTHEAD: &str = r#"/*"#;
|
||||
const COMMENTBODY: &str = r#" *"#;
|
||||
const COMMENTTAIL: &str = r#"*/"#;
|
||||
const SETTINGSDELIMITER: &str = r#"```"#;
|
||||
|
||||
pub struct ModelCommentGenerator {}
|
||||
|
||||
impl ModelCommentGenerator {
|
||||
pub fn find_settings_block(file_content: &str) -> Option<String> {
|
||||
let delimiter_length = SETTINGSDELIMITER.len();
|
||||
let start_pos = file_content.find(SETTINGSDELIMITER)?;
|
||||
let end_pos = file_content[start_pos + delimiter_length..].find(SETTINGSDELIMITER)?;
|
||||
let content = &file_content[start_pos + delimiter_length..start_pos + end_pos];
|
||||
let content = content.replace(&format!("\n{COMMENTBODY}"), "\n");
|
||||
Some(content)
|
||||
}
|
||||
pub fn generate_comment(
|
||||
table: Table,
|
||||
file_content: &str,
|
||||
config: &ModelConfig,
|
||||
) -> Result<String> {
|
||||
let rules = comment_parser::get_syntax("rust").unwrap();
|
||||
let parser = CommentParser::new(file_content, rules);
|
||||
for comment in parser {
|
||||
if let Event::BlockComment(body, _) = comment {
|
||||
if body.contains(HEADER) {
|
||||
tracing::debug!("Found header");
|
||||
let mut settings = config.comment.clone();
|
||||
let mut new_settings = None;
|
||||
if let Some(parsed_settings) = Self::find_settings_block(file_content) {
|
||||
tracing::info!(?new_settings);
|
||||
match serde_yaml::from_str::<CommentConfigSerde>(&parsed_settings) {
|
||||
Ok(s) => {
|
||||
new_settings = Some(s.clone());
|
||||
settings = s.merge(&settings);
|
||||
tracing::info!(?settings);
|
||||
}
|
||||
Err(e) => {
|
||||
if !settings.ignore_errors {
|
||||
return Err(e.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
tracing::debug!(?table.name, ?settings);
|
||||
if settings.enable {
|
||||
let comment =
|
||||
Self::generate_comment_content(table, config, &settings, new_settings)?;
|
||||
return Ok(file_content.replace(body, &comment));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let comment = Self::generate_comment_content(table, config, &config.comment, None)?;
|
||||
Ok(format!("{}\n{}", comment, file_content))
|
||||
}
|
||||
pub fn generate_comment_content(
|
||||
table: Table,
|
||||
model_config: &ModelConfig,
|
||||
config: &CommentConfig,
|
||||
parsed_settings: Option<CommentConfigSerde>,
|
||||
) -> Result<String> {
|
||||
let mut model_config = model_config.clone();
|
||||
model_config.comment = config.clone();
|
||||
let column_info_table = if config.column_info {
|
||||
let mut column_info_table = CTable::new();
|
||||
let mut header = Vec::new();
|
||||
if config.column_name {
|
||||
header.push("Name");
|
||||
}
|
||||
if config.column_db_type {
|
||||
header.push("DbType");
|
||||
}
|
||||
if config.column_rust_type {
|
||||
header.push("RsType");
|
||||
}
|
||||
if config.column_attributes {
|
||||
header.push("Attrs");
|
||||
}
|
||||
column_info_table
|
||||
.load_preset(" -+=++ + ++")
|
||||
.set_content_arrangement(ContentArrangement::Dynamic)
|
||||
.set_header(header);
|
||||
if let Some(width) = config.max_width {
|
||||
column_info_table.set_width(width);
|
||||
}
|
||||
for column in &table.columns {
|
||||
column_info_table.add_row(column.get_info_row(&model_config)?);
|
||||
}
|
||||
column_info_table.to_string()
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let config_part = match parsed_settings {
|
||||
Some(settings) => {
|
||||
let settings_str = serde_yaml::to_string(&settings)?;
|
||||
let settings_str = settings_str
|
||||
.lines()
|
||||
.map(|line| format!(" {}", line))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
format!(
|
||||
"{SETTINGSDELIMITER}\n{}\n{SETTINGSDELIMITER}\n\n",
|
||||
settings_str
|
||||
)
|
||||
}
|
||||
None => String::new(),
|
||||
};
|
||||
|
||||
let table_name = &table.name;
|
||||
let table_name_str = if config.table_name {
|
||||
format!("Table: {}\n", table_name)
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let string = format!("{HEADER}\n{config_part}{table_name_str}\n{column_info_table}");
|
||||
|
||||
let padded_string = Self::pad_comment(&string);
|
||||
Ok(padded_string)
|
||||
}
|
||||
|
||||
pub fn pad_comment(s: &str) -> String {
|
||||
let parts = s.split('\n').collect::<Vec<_>>();
|
||||
let mut padded = String::new();
|
||||
for (index, part) in parts.iter().enumerate() {
|
||||
let first = index == 0;
|
||||
let comment = match first {
|
||||
true => COMMENTHEAD.to_string(),
|
||||
false => COMMENTBODY.to_string(),
|
||||
};
|
||||
let padded_part = format!("{} {}\n", comment, part);
|
||||
padded.push_str(&padded_part);
|
||||
}
|
||||
padded.push_str(COMMENTTAIL);
|
||||
padded
|
||||
}
|
||||
// pub async fn generate_header(&self, config: &Config, db_type: &DbType) -> Result<String> {
|
||||
//
|
||||
// }
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::generator::file::GeneratedFileChunk;
|
||||
|
||||
use super::{comment::ModelCommentGenerator, table::Table, ModelConfig};
|
||||
use color_eyre::Result;
|
||||
use handlebars::Handlebars;
|
||||
use heck::ToPascalCase;
|
||||
use serde::Serialize;
|
||||
use tokio::fs;
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FileGenerator;
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct ModelContext {
|
||||
entities_path: String,
|
||||
model_path: String,
|
||||
model_name: String,
|
||||
active_model_name: String,
|
||||
prelude_path: Option<String>,
|
||||
}
|
||||
|
||||
impl FileGenerator {
|
||||
pub async fn generate_file<'a>(
|
||||
table: Table,
|
||||
config: &ModelConfig,
|
||||
handlebars: &'a Handlebars<'a>,
|
||||
) -> Result<Vec<GeneratedFileChunk>> {
|
||||
let mut file_chunks = Vec::new();
|
||||
file_chunks.push(GeneratedFileChunk {
|
||||
path: config.models_path.join("mod.rs"),
|
||||
content: format!("pub mod {};", table.name),
|
||||
priority: 0,
|
||||
});
|
||||
let filepath = config.models_path.join(format!("{}.rs", table.name));
|
||||
tracing::debug!(?filepath, "Generating file");
|
||||
if filepath.exists() {
|
||||
file_chunks
|
||||
.extend(Self::handle_existing_file(table, &filepath, config, handlebars).await?);
|
||||
} else {
|
||||
let model_name = format!("{}Model", table.name).to_pascal_case();
|
||||
let active_model_name = format!("{}ActiveModel", table.name).to_pascal_case();
|
||||
let context = ModelContext {
|
||||
entities_path: format!("super::{}", config.relative_entities_path),
|
||||
model_path: table.name.clone(),
|
||||
model_name,
|
||||
active_model_name,
|
||||
prelude_path: Some("super::prelude".to_string()),
|
||||
};
|
||||
tracing::info!(?context, "Generating new file",);
|
||||
|
||||
let content = if config.prelude {
|
||||
file_chunks.push(GeneratedFileChunk {
|
||||
path: config.models_path.join("prelude.rs"),
|
||||
content: format!(
|
||||
"pub use super::{}::{}::{{Model as {},ActiveModel as {},Entity as {}}};",
|
||||
config.relative_entities_path,
|
||||
table.name,
|
||||
context.model_name,
|
||||
context.active_model_name,
|
||||
table.name.clone().to_pascal_case()
|
||||
),
|
||||
priority: 0,
|
||||
});
|
||||
handlebars.render("modelprelude", &context)?
|
||||
} else {
|
||||
handlebars.render("model", &context)?
|
||||
};
|
||||
|
||||
file_chunks.push(GeneratedFileChunk {
|
||||
path: filepath,
|
||||
content,
|
||||
priority: 0,
|
||||
});
|
||||
}
|
||||
Ok(file_chunks)
|
||||
}
|
||||
async fn handle_existing_file<'a>(
|
||||
table: Table,
|
||||
filepath: &PathBuf,
|
||||
config: &ModelConfig,
|
||||
_handlebars: &'a Handlebars<'a>,
|
||||
) -> Result<Vec<GeneratedFileChunk>> {
|
||||
let mut file_chunks = Vec::new();
|
||||
let mut file_content = fs::read_to_string(filepath).await?;
|
||||
if config.comment.enable {
|
||||
file_content = ModelCommentGenerator::generate_comment(table, &file_content, config)?;
|
||||
}
|
||||
file_chunks.push(GeneratedFileChunk {
|
||||
path: filepath.clone(),
|
||||
content: file_content,
|
||||
priority: 0,
|
||||
});
|
||||
Ok(file_chunks)
|
||||
}
|
||||
}
|
||||
@@ -1,189 +1,176 @@
|
||||
use super::{Module, ModulesContext};
|
||||
use color_eyre::Result;
|
||||
use serde::Deserialize;
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::generator::file::pathbuf_to_rust_path;
|
||||
|
||||
use super::{
|
||||
discovery::{table::Table, DiscoveredSchema},
|
||||
sea_orm::SeaOrmConfig,
|
||||
templates::TemplateConfig,
|
||||
Module, ModulesContext,
|
||||
};
|
||||
use color_eyre::{
|
||||
eyre::{eyre, Context, ContextCompat},
|
||||
Result,
|
||||
};
|
||||
use minijinja::Environment;
|
||||
use serde::{Deserialize, Serialize};
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(default)]
|
||||
pub struct ModelsConfig {
|
||||
pub enable: bool,
|
||||
pub database_schema: String,
|
||||
pub max_connections: u32,
|
||||
pub acquire_timeout: u32,
|
||||
pub path: Option<PathBuf>,
|
||||
pub prelude: bool,
|
||||
}
|
||||
|
||||
// #[derive(Debug)]
|
||||
// pub struct ModelsModule;
|
||||
//
|
||||
// #[async_trait::async_trait]
|
||||
// impl Module for ModelsModule {
|
||||
// fn init(&self, ctx: &mut ModulesContext) -> Result<()> {
|
||||
// Ok(())
|
||||
// }
|
||||
//
|
||||
// async fn validate(&self, ctx: &mut ModulesContext) -> Result<bool> {
|
||||
// Ok(false)
|
||||
// }
|
||||
// }
|
||||
//
|
||||
//
|
||||
// use crate::{
|
||||
// config::{sea_orm_config::DateTimeCrate, Config},
|
||||
// generator::{
|
||||
// discover::{self, DbType},
|
||||
// file::GeneratedFileChunk,
|
||||
// },
|
||||
// };
|
||||
// use color_eyre::Result;
|
||||
// use file::FileGenerator;
|
||||
// use handlebars::Handlebars;
|
||||
// use sea_orm_codegen::{EntityTransformer, EntityWriterContext, OutputFile};
|
||||
// use sea_schema::sea_query::TableCreateStatement;
|
||||
// use serde::{Deserialize, Serialize};
|
||||
// use std::path::PathBuf;
|
||||
// use table::Table;
|
||||
//
|
||||
// pub mod column;
|
||||
// pub mod comment;
|
||||
// pub mod file;
|
||||
// pub mod table;
|
||||
// #[derive(Debug, Clone)]
|
||||
// pub struct ModelConfig {
|
||||
// pub models_path: PathBuf,
|
||||
// pub prelude: bool,
|
||||
// pub entities_path: PathBuf,
|
||||
// pub relative_entities_path: String,
|
||||
// pub enable: bool,
|
||||
// pub comment: CommentConfig,
|
||||
// pub db_type: DbType,
|
||||
// }
|
||||
// #[derive(Debug, Clone)]
|
||||
// pub struct CommentConfig {
|
||||
// pub max_width: Option<u16>,
|
||||
// pub enable: bool,
|
||||
// pub table_name: bool,
|
||||
// pub column_info: bool,
|
||||
// pub column_name: bool,
|
||||
// pub column_rust_type: bool,
|
||||
// pub column_db_type: bool,
|
||||
// pub column_attributes: bool,
|
||||
// pub ignore_errors: bool,
|
||||
// pub date_time_crate: DateTimeCrate,
|
||||
// }
|
||||
// #[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
// pub struct CommentConfigSerde {
|
||||
// #[serde(skip_serializing_if = "Option::is_none")]
|
||||
// pub max_width: Option<u16>,
|
||||
// #[serde(skip_serializing_if = "Option::is_none")]
|
||||
// pub enable: Option<bool>,
|
||||
// #[serde(skip_serializing_if = "Option::is_none")]
|
||||
// pub table_name: Option<bool>,
|
||||
// #[serde(skip_serializing_if = "Option::is_none")]
|
||||
// pub name: Option<bool>,
|
||||
// #[serde(skip_serializing_if = "Option::is_none")]
|
||||
// pub info: Option<bool>,
|
||||
// #[serde(skip_serializing_if = "Option::is_none")]
|
||||
// pub rust_type: Option<bool>,
|
||||
// #[serde(skip_serializing_if = "Option::is_none")]
|
||||
// pub db_type: Option<bool>,
|
||||
// #[serde(skip_serializing_if = "Option::is_none")]
|
||||
// pub attributes: Option<bool>,
|
||||
// }
|
||||
// impl CommentConfigSerde {
|
||||
// pub fn merge(&self, config: &CommentConfig) -> CommentConfig {
|
||||
// CommentConfig {
|
||||
// max_width: self.max_width.or(config.max_width),
|
||||
// table_name: self.table_name.unwrap_or(config.table_name),
|
||||
// column_name: self.name.unwrap_or(config.column_name),
|
||||
// column_info: self.info.unwrap_or(config.column_info),
|
||||
// column_rust_type: self.rust_type.unwrap_or(config.column_rust_type),
|
||||
// column_db_type: self.db_type.unwrap_or(config.column_db_type),
|
||||
// column_attributes: self.attributes.unwrap_or(config.column_attributes),
|
||||
// ignore_errors: config.ignore_errors,
|
||||
// enable: self.enable.unwrap_or(config.enable),
|
||||
// date_time_crate: config.date_time_crate.clone(),
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// impl ModelConfig {
|
||||
// pub fn new(config: Config, db_type: DbType) -> Self {
|
||||
// let models_path = config.output.path.join(&config.output.models.path);
|
||||
// let entities_path = models_path.join(&config.output.models.entities);
|
||||
// ModelConfig {
|
||||
// db_type,
|
||||
// prelude: config.output.models.prelude,
|
||||
// entities_path,
|
||||
// models_path,
|
||||
// relative_entities_path: config.output.models.entities.clone(),
|
||||
// enable: config.output.models.enable,
|
||||
// comment: CommentConfig {
|
||||
// max_width: config.output.models.comment.max_width,
|
||||
// enable: config.output.models.comment.enable,
|
||||
// table_name: config.output.models.comment.table_name,
|
||||
// column_name: config.output.models.comment.column_name,
|
||||
// column_info: config.output.models.comment.column_info,
|
||||
// column_rust_type: config.output.models.comment.column_rust_type,
|
||||
// column_db_type: config.output.models.comment.column_db_type,
|
||||
// column_attributes: config.output.models.comment.column_attributes,
|
||||
// ignore_errors: config.output.models.comment.ignore_errors,
|
||||
// date_time_crate: config.sea_orm.entity.date_time_crate,
|
||||
// },
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// pub async fn generate_models<'a>(
|
||||
// database_url: &str,
|
||||
// config: &Config,
|
||||
// handlebars: &'a Handlebars<'a>,
|
||||
// ) -> Result<Vec<GeneratedFileChunk>> {
|
||||
// let mut files = Vec::new();
|
||||
// let db_filter = config.sea_orm.entity.tables.get_filter();
|
||||
// let (table_stmts, db_type) =
|
||||
// discover::get_tables(database_url.to_owned(), db_filter, &config.db).await?;
|
||||
// let model_config = ModelConfig::new(config.clone(), db_type);
|
||||
//
|
||||
// let writer_context = config.clone().into();
|
||||
// files.extend(
|
||||
// generate_entities(table_stmts.clone(), model_config.clone(), writer_context).await?,
|
||||
// );
|
||||
//
|
||||
// files.push(GeneratedFileChunk {
|
||||
// path: model_config.models_path.join("mod.rs"),
|
||||
// content: format!("pub mod {};", model_config.relative_entities_path),
|
||||
// priority: 0,
|
||||
// });
|
||||
// let tables = table_stmts
|
||||
// .into_iter()
|
||||
// .map(Table::new)
|
||||
// .collect::<Result<Vec<Table>>>()?;
|
||||
//
|
||||
// if model_config.enable {
|
||||
// for table in tables {
|
||||
// files.extend(FileGenerator::generate_file(table, &model_config, handlebars).await?);
|
||||
// }
|
||||
// if model_config.prelude {
|
||||
// files.push(GeneratedFileChunk {
|
||||
// path: model_config.models_path.join("mod.rs"),
|
||||
// content: String::from("pub mod prelude;"),
|
||||
// priority: 0,
|
||||
// })
|
||||
// }
|
||||
// }
|
||||
// Ok(files)
|
||||
// }
|
||||
//
|
||||
// pub async fn generate_entities(
|
||||
// table_statements: Vec<TableCreateStatement>,
|
||||
// config: ModelConfig,
|
||||
// writer_context: EntityWriterContext,
|
||||
// ) -> Result<Vec<GeneratedFileChunk>> {
|
||||
// let output = EntityTransformer::transform(table_statements)?.generate(&writer_context);
|
||||
// Ok(output
|
||||
// .files
|
||||
// .into_iter()
|
||||
// .map(|OutputFile { name, content }| GeneratedFileChunk {
|
||||
// path: config.entities_path.join(name),
|
||||
// content,
|
||||
// priority: 0,
|
||||
// })
|
||||
// .collect::<Vec<_>>())
|
||||
// }
|
||||
impl Default for ModelsConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enable: false,
|
||||
path: None,
|
||||
prelude: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct ModelTemplateContext {
|
||||
entities_path: Option<String>,
|
||||
tables: Option<Vec<Table>>,
|
||||
prelude_path: Option<String>,
|
||||
table_name: Option<String>,
|
||||
config: ModelsConfig,
|
||||
}
|
||||
|
||||
impl ModelTemplateContext {
|
||||
pub fn new(
|
||||
entities_path: Option<String>,
|
||||
prelude_path: Option<String>,
|
||||
tables: Option<Vec<Table>>,
|
||||
table_name: Option<String>,
|
||||
config: ModelsConfig,
|
||||
) -> Self {
|
||||
Self {
|
||||
entities_path,
|
||||
tables,
|
||||
prelude_path,
|
||||
table_name,
|
||||
config,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ModelsModule;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl Module for ModelsModule {
|
||||
fn init(&mut self, ctx: &mut ModulesContext) -> Result<()> {
|
||||
ctx.get_config_auto::<ModelsConfig>("modules.model")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn validate(&mut self, ctx: &mut ModulesContext) -> Result<bool> {
|
||||
let map = ctx.get_anymap();
|
||||
|
||||
if let (Some(config), Some(template_config), Some(sea_orm_config)) = (
|
||||
map.get::<ModelsConfig>(),
|
||||
map.get::<TemplateConfig>(),
|
||||
map.get::<SeaOrmConfig>(),
|
||||
) {
|
||||
if config.enable && !template_config.enable {
|
||||
return Err(eyre!(
|
||||
"\"modules.template.enable\" must be enabled to use \"modules.model.enable\""
|
||||
));
|
||||
}
|
||||
if config.enable && !sea_orm_config.enable {
|
||||
return Err(eyre!(
|
||||
"\"modules.sea_orm.enable\" must be enabled to use \"modules.model.enable\""
|
||||
));
|
||||
}
|
||||
if config.enable && config.path.is_none() {
|
||||
return Err(eyre!(
|
||||
"\"modules.model.path\" must be set to use \"modules.model.enable\""
|
||||
));
|
||||
}
|
||||
Ok(config.enable && template_config.enable)
|
||||
} else {
|
||||
// One or both keys are missing
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
async fn execute(&mut self, ctx: &mut ModulesContext) -> Result<()> {
|
||||
let mut files: Vec<(PathBuf, String)> = Vec::new();
|
||||
let map = ctx.get_anymap();
|
||||
|
||||
if let (Some(config), Some(templates), Some(sea_orm_config), Some(schema)) = (
|
||||
map.get::<ModelsConfig>(),
|
||||
map.get::<Environment<'static>>(),
|
||||
map.get::<SeaOrmConfig>(),
|
||||
map.get::<DiscoveredSchema>(),
|
||||
) {
|
||||
let models_path = config.path.clone().unwrap();
|
||||
tracing::info!(?models_path, "Models path");
|
||||
let entities_path = sea_orm_config.path.clone().unwrap();
|
||||
let mod_path = models_path.join("mod.rs");
|
||||
|
||||
let relative_entities_path = pathdiff::diff_paths(&entities_path, &mod_path)
|
||||
.context("Failed to calculate relative path")?;
|
||||
let relative_entities_rust_path = pathbuf_to_rust_path(relative_entities_path);
|
||||
let context = ModelTemplateContext::new(
|
||||
Some(relative_entities_rust_path.clone()),
|
||||
None,
|
||||
Some(schema.tables.clone()),
|
||||
None,
|
||||
config.clone(),
|
||||
);
|
||||
if config.prelude {
|
||||
let prelude = templates
|
||||
.get_template("model_prelude")?
|
||||
.render(&context)
|
||||
.context("Failed to render model prelude part")?;
|
||||
files.push((models_path.join("prelude.rs"), prelude));
|
||||
}
|
||||
|
||||
let mod_content = templates
|
||||
.get_template("model_mod")?
|
||||
.render(&context)
|
||||
.context("Failed to render model mod")?;
|
||||
files.push((mod_path.clone(), mod_content));
|
||||
for table in &schema.tables {
|
||||
tracing::debug!(?table, "Generating model for table");
|
||||
let path = models_path.join(format!("{}.rs", table.name));
|
||||
|
||||
let context = ModelTemplateContext::new(
|
||||
Some(relative_entities_rust_path.clone()),
|
||||
if config.prelude {
|
||||
Some("super::prelude".to_string())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
None,
|
||||
Some(table.name.clone()),
|
||||
config.clone(),
|
||||
);
|
||||
|
||||
if path.exists() {
|
||||
tracing::debug!(?path, "Model file already exists");
|
||||
continue;
|
||||
}
|
||||
|
||||
let content = templates
|
||||
.get_template("model")?
|
||||
.render(&context)
|
||||
.context("Failed to render model")?;
|
||||
files.push((path.clone(), content.clone()));
|
||||
}
|
||||
} else {
|
||||
// One or both keys are missing
|
||||
}
|
||||
tracing::info!(?files, "Generated model files");
|
||||
let file_manager = ctx.get_file_manager_mut();
|
||||
for (output_path, content) in files {
|
||||
file_manager.insert(&output_path, &content, None)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
use super::column::Column;
|
||||
use color_eyre::{eyre::eyre, Result};
|
||||
use sea_schema::sea_query::{self, TableCreateStatement};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Table {
|
||||
pub name: String,
|
||||
pub columns: Vec<Column>,
|
||||
}
|
||||
|
||||
impl Table {
|
||||
pub fn new(statement: TableCreateStatement) -> Result<Table> {
|
||||
let table_name = match statement.get_table_name() {
|
||||
Some(table_ref) => match table_ref {
|
||||
sea_query::TableRef::Table(t)
|
||||
| sea_query::TableRef::SchemaTable(_, t)
|
||||
| sea_query::TableRef::DatabaseSchemaTable(_, _, t)
|
||||
| sea_query::TableRef::TableAlias(t, _)
|
||||
| sea_query::TableRef::SchemaTableAlias(_, t, _)
|
||||
| sea_query::TableRef::DatabaseSchemaTableAlias(_, _, t, _) => t.to_string(),
|
||||
_ => unimplemented!(),
|
||||
},
|
||||
None => return Err(eyre!("Table name not found")),
|
||||
};
|
||||
tracing::debug!(?table_name);
|
||||
let columns_raw = statement.get_columns();
|
||||
let indexes = statement.get_indexes();
|
||||
for column in columns_raw {
|
||||
tracing::debug!(?column);
|
||||
}
|
||||
for index in indexes {
|
||||
tracing::debug!(?index);
|
||||
}
|
||||
let columns = columns_raw
|
||||
.iter()
|
||||
.map(|column| {
|
||||
let name = column.get_column_name();
|
||||
let index = indexes
|
||||
.iter()
|
||||
.find(|index| index.get_index_spec().get_column_names().contains(&name));
|
||||
Column::new(column.clone(), index.cloned())
|
||||
})
|
||||
.collect::<Result<Vec<Column>>>()?;
|
||||
tracing::debug!(?columns);
|
||||
Ok(Table {
|
||||
columns,
|
||||
name: table_name,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use serde_yaml::Value;
|
||||
use toml::Value;
|
||||
|
||||
use sea_orm_codegen::{DateTimeCrate as CodegenDateTimeCrate, WithPrelude, WithSerde};
|
||||
|
||||
@@ -10,13 +10,13 @@ pub enum EntityFormat {
|
||||
Compact,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[serde(untagged)]
|
||||
pub enum TableConfig {
|
||||
Specific { specific: Vec<String> },
|
||||
Exclude { exclude: Vec<String> },
|
||||
}
|
||||
// #[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
// #[serde(rename_all = "snake_case")]
|
||||
// #[serde(untagged)]
|
||||
// pub enum TableConfig {
|
||||
// Specific { specific: Vec<String> },
|
||||
// Exclude { exclude: Vec<String> },
|
||||
// }
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum SerdeEnable {
|
||||
@@ -43,8 +43,8 @@ impl<'de> Deserialize<'de> for SerdeEnable {
|
||||
match value {
|
||||
Value::String(s) if s == "serialize" => Ok(SerdeEnable::Serialize),
|
||||
Value::String(s) if s == "deserialize" => Ok(SerdeEnable::Deserialize),
|
||||
Value::Bool(true) => Ok(SerdeEnable::Both),
|
||||
Value::Bool(false) => Ok(SerdeEnable::None),
|
||||
Value::Boolean(true) => Ok(SerdeEnable::Both),
|
||||
Value::Boolean(false) => Ok(SerdeEnable::None),
|
||||
_ => Err(serde::de::Error::custom(
|
||||
"expected 'serialize', 'deserialize', 'true' or 'false'",
|
||||
)),
|
||||
@@ -72,8 +72,8 @@ impl<'de> Deserialize<'de> for Prelude {
|
||||
let value = Value::deserialize(deserializer)?;
|
||||
|
||||
match value {
|
||||
Value::Bool(true) => Ok(Prelude::Enabled),
|
||||
Value::Bool(false) => Ok(Prelude::Disabled),
|
||||
Value::Boolean(true) => Ok(Prelude::Enabled),
|
||||
Value::Boolean(false) => Ok(Prelude::Disabled),
|
||||
Value::String(s) if s == "allow_unused_imports" => Ok(Prelude::AllowUnusedImports),
|
||||
_ => Err(serde::de::Error::custom(
|
||||
"expected 'true', 'false', or 'allow_unused_imports'",
|
||||
|
||||
@@ -66,6 +66,7 @@ impl Module for SeaOrmModule {
|
||||
}
|
||||
async fn execute(&mut self, ctx: &mut ModulesContext) -> Result<()> {
|
||||
let map = ctx.get_anymap();
|
||||
let mut outputs = vec![];
|
||||
if let (Some(statements), Some(config), Some(discovery_config)) = (
|
||||
map.get::<RawDiscoveredStatements>(),
|
||||
map.get::<SeaOrmConfig>(),
|
||||
@@ -73,6 +74,7 @@ impl Module for SeaOrmModule {
|
||||
) {
|
||||
let writer_context = EntityWriterContext::new(
|
||||
config.entity.format.is_expanded(),
|
||||
true,
|
||||
config.prelude.clone().into(),
|
||||
config.serde.enable.clone().into(),
|
||||
false,
|
||||
@@ -86,18 +88,20 @@ impl Module for SeaOrmModule {
|
||||
config.entity.extra_derives.eenum.clone(),
|
||||
config.entity.extra_attributes.eenum.clone(),
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
);
|
||||
let output = EntityTransformer::transform(statements.statements.clone())?
|
||||
.generate(&writer_context);
|
||||
for file in output.files {
|
||||
outputs.extend(output.files.into_iter().map(|file| {
|
||||
let file_path = config.path.clone().unwrap_or_default();
|
||||
let file_path = file_path.join(file.name);
|
||||
tracing::info!(?file_path, "Generating file");
|
||||
|
||||
// let mut file_generator = crate::generator::file::FileGenerator::new(file_path);
|
||||
// file_generator.write(file.content)?;
|
||||
(file_path, file.content)
|
||||
}));
|
||||
}
|
||||
|
||||
let file_manager = ctx.get_file_manager_mut();
|
||||
for (output_path, content) in outputs {
|
||||
file_manager.insert(&output_path, &content, None)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -1,16 +1,84 @@
|
||||
use crate::generator::DatabaseUrl;
|
||||
|
||||
use super::{Module, ModulesContext};
|
||||
use color_eyre::Result;
|
||||
use handlebars::Handlebars;
|
||||
use color_eyre::{
|
||||
eyre::{eyre, ContextCompat},
|
||||
Result,
|
||||
};
|
||||
use heck::ToPascalCase;
|
||||
use minijinja::Environment;
|
||||
use serde::Deserialize;
|
||||
use serde_inline_default::serde_inline_default;
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
|
||||
use include_dir::{include_dir, Dir, DirEntry};
|
||||
use tokio::fs;
|
||||
|
||||
static TEMPLATE_DIR: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/templates");
|
||||
|
||||
#[serde_inline_default]
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct TemplateConfig {
|
||||
#[serde_inline_default(false)]
|
||||
pub enable: bool,
|
||||
pub path: Option<PathBuf>,
|
||||
#[serde(flatten)]
|
||||
pub tables: HashMap<String, MapString>,
|
||||
}
|
||||
impl TemplateConfig {
|
||||
pub fn to_paths(&self) -> Vec<(String, PathBuf)> {
|
||||
let mut paths = Vec::new();
|
||||
for (key, value) in &self.tables {
|
||||
let map_string = value.clone();
|
||||
paths.extend(map_string.into_paths(key.clone()));
|
||||
}
|
||||
let root = self.path.clone().unwrap_or_default();
|
||||
|
||||
paths
|
||||
.into_iter()
|
||||
.map(|(key, path)| {
|
||||
let new_path = root.clone().join(path);
|
||||
(key, new_path)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
}
|
||||
#[derive(Debug, Clone, Deserialize, Eq, PartialEq)]
|
||||
#[serde(untagged)]
|
||||
pub enum MapString {
|
||||
Map(HashMap<String, MapString>),
|
||||
PathBuf(PathBuf),
|
||||
}
|
||||
impl MapString {
|
||||
pub fn into_paths(self, prefix: String) -> Vec<(String, PathBuf)> {
|
||||
fn write_path(prefix: String, string: MapString) -> Vec<(String, PathBuf)> {
|
||||
let mut strings = Vec::new();
|
||||
match string {
|
||||
MapString::Map(inner) => {
|
||||
for (key, value) in inner {
|
||||
let new_prefix = if prefix.is_empty() {
|
||||
key
|
||||
} else {
|
||||
format!("{}.{}", prefix, key)
|
||||
};
|
||||
strings.extend(write_path(new_prefix, value));
|
||||
}
|
||||
}
|
||||
MapString::PathBuf(pathbuf) => {
|
||||
strings.push((prefix, pathbuf));
|
||||
}
|
||||
}
|
||||
strings.sort();
|
||||
strings
|
||||
}
|
||||
write_path(prefix, self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for TemplateConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enable: true,
|
||||
tables: HashMap::new(),
|
||||
path: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub struct TemplateModule;
|
||||
@@ -18,22 +86,79 @@ pub struct TemplateModule;
|
||||
#[async_trait::async_trait]
|
||||
impl Module for TemplateModule {
|
||||
fn init(&mut self, ctx: &mut ModulesContext) -> Result<()> {
|
||||
let registry: Handlebars<'static> = Handlebars::new();
|
||||
ctx.get_anymap_mut().insert(registry);
|
||||
ctx.get_config_auto::<TemplateConfig>("modules.template")?;
|
||||
Ok(())
|
||||
}
|
||||
async fn validate(&mut self, ctx: &mut ModulesContext) -> Result<bool> {
|
||||
// let map = ctx.get_anymap();
|
||||
let map = ctx.get_anymap_mut();
|
||||
//
|
||||
// if let (Some(config), Some(_)) = (map.get::<DiscoveryConfig>(), map.get::<DatabaseUrl>()) {
|
||||
// Ok(config.enable)
|
||||
// } else {
|
||||
// // One or both keys are missing
|
||||
// Ok(false)
|
||||
// }
|
||||
Ok(true)
|
||||
if let Some(config) = map.get::<TemplateConfig>() {
|
||||
if config.enable {
|
||||
for templates in config.to_paths() {
|
||||
let path = templates.1;
|
||||
if !path.exists() {
|
||||
return Err(eyre!("Template path does not exist: {}", path.display()));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(config.enable)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
async fn execute(&mut self, ctx: &mut ModulesContext) -> Result<()> {
|
||||
let mut env: Environment<'static> = Environment::new();
|
||||
env.add_function("pascalCase", |f: String| f.to_pascal_case());
|
||||
// registry.set_strict_mode(true);
|
||||
if let Some(config) = ctx.get_anymap().get::<TemplateConfig>() {
|
||||
for (template, path) in config.to_paths() {
|
||||
tracing::debug!(?template, ?path, "Registering template");
|
||||
let content = fs::read_to_string(path).await?;
|
||||
env.add_template_owned(template, content)?;
|
||||
}
|
||||
Self::register_default_templates(TEMPLATE_DIR.entries(), &mut env).await?;
|
||||
}
|
||||
ctx.get_anymap_mut().insert(env);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl TemplateModule {
|
||||
async fn register_default_templates<'a>(
|
||||
entries: &[DirEntry<'a>],
|
||||
env: &mut Environment<'a>,
|
||||
) -> Result<()> {
|
||||
for entry in entries.iter().filter(|file| {
|
||||
file.path()
|
||||
.extension()
|
||||
.is_some_and(|f| f.to_str().is_some_and(|f| f == "jinja"))
|
||||
|| file.as_dir().is_some()
|
||||
}) {
|
||||
match entry {
|
||||
DirEntry::File(file) => {
|
||||
let path = file.path().with_extension("");
|
||||
let name = path
|
||||
.to_str()
|
||||
.context("Failed to convert path to str")?
|
||||
.replace("/", ".");
|
||||
let content = file
|
||||
.contents_utf8()
|
||||
.context(format!("Template {} failed to parse", name))?
|
||||
.to_owned();
|
||||
|
||||
tracing::debug!(?name, "Registering template");
|
||||
if env.get_template(name.as_str()).is_err() {
|
||||
env.add_template_owned(name, content)?;
|
||||
} else {
|
||||
tracing::debug!(?name, "Template already registered, skipping");
|
||||
}
|
||||
}
|
||||
DirEntry::Dir(dir) => {
|
||||
Box::pin(Self::register_default_templates(dir.entries(), env)).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
60
src/main.rs
60
src/main.rs
@@ -1,25 +1,22 @@
|
||||
mod config;
|
||||
// mod config;
|
||||
mod generator;
|
||||
mod templates;
|
||||
|
||||
use clap::Parser;
|
||||
use color_eyre::{eyre::eyre, Result};
|
||||
use config::Config;
|
||||
use figment::{
|
||||
providers::{Format, Serialized, Yaml},
|
||||
Figment,
|
||||
};
|
||||
use handlebars::Handlebars;
|
||||
use tokio::{fs, io::AsyncWriteExt, process::Command};
|
||||
use color_eyre::Result;
|
||||
use tokio::fs;
|
||||
use toml_edit::DocumentMut;
|
||||
use tracing_subscriber::{fmt, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter};
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
struct Args {
|
||||
pub struct Args {
|
||||
#[clap(short, long, default_value = "generator.toml")]
|
||||
config: String,
|
||||
#[clap(short, long, env = "DATABASE_URL")]
|
||||
database_url: String,
|
||||
#[clap(short, long)]
|
||||
workdir: Option<String>,
|
||||
#[clap(short, long, default_value = "true")]
|
||||
rustfmt: bool,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
@@ -32,43 +29,14 @@ async fn main() -> Result<()> {
|
||||
.init();
|
||||
let args = Args::parse();
|
||||
|
||||
// let config: Config = Figment::new()
|
||||
// .merge(Serialized::defaults(Config::default()))
|
||||
// .merge(Yaml::file(&args.config))
|
||||
// .extract()?;
|
||||
// tracing::info!(?config);
|
||||
tracing::info!(?args);
|
||||
|
||||
// let mut handlebars: Registry = ;
|
||||
// templates::register_templates(&mut handlebars, &config).await?;
|
||||
let config = fs::read_to_string(args.config).await?;
|
||||
// change workdir
|
||||
if let Some(workdir) = &args.workdir {
|
||||
std::env::set_current_dir(workdir)?;
|
||||
}
|
||||
let config = fs::read_to_string(&args.config).await?;
|
||||
let root_config = config.parse::<DocumentMut>()?;
|
||||
|
||||
let outputs = generator::generate(&args.database_url, root_config).await?;
|
||||
//
|
||||
// // tracing::info!(?outputs, "Generated files");
|
||||
// for output in outputs.iter() {
|
||||
// tracing::info!(?output, "Generated chunk");
|
||||
// // let mut file = fs::File::create(&output.path).await?;
|
||||
// // file.write_all(output.content.as_bytes()).await?;
|
||||
// }
|
||||
//
|
||||
// let merged_outputs = generator::file::combine_chunks(outputs)?;
|
||||
// for output in merged_outputs.iter() {
|
||||
// tracing::info!(?output.path, "Merged file");
|
||||
// let parent = output.path.parent().unwrap();
|
||||
// if !parent.exists() {
|
||||
// fs::create_dir_all(parent).await?;
|
||||
// }
|
||||
// let mut file = fs::File::create(&output.path).await?;
|
||||
// file.write_all(output.content.as_bytes()).await?;
|
||||
// }
|
||||
// for output in merged_outputs.iter() {
|
||||
// tracing::info!(?output.path, "Running rustfmt");
|
||||
// let exit_status = Command::new("rustfmt").arg(&output.path).status().await?;
|
||||
// if !exit_status.success() {
|
||||
// return Err(eyre!("Failed to run rustfmt"));
|
||||
// }
|
||||
// }
|
||||
generator::generate(args, root_config).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
use crate::config::Config;
|
||||
use color_eyre::eyre::{ContextCompat, Result};
|
||||
use handlebars::Handlebars;
|
||||
use include_dir::{include_dir, Dir, DirEntry};
|
||||
use serde_yaml::Value;
|
||||
use std::path::PathBuf;
|
||||
use tokio::fs;
|
||||
|
||||
static TEMPLATE_DIR: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/templates");
|
||||
|
||||
async fn handle_direntries<'a>(
|
||||
entries: &[DirEntry<'a>],
|
||||
handlebars: &mut Handlebars<'a>,
|
||||
) -> Result<()> {
|
||||
for entry in entries.iter().filter(|file| {
|
||||
file.path()
|
||||
.extension()
|
||||
.is_some_and(|f| f.to_str().is_some_and(|f| f == "hbs"))
|
||||
|| file.as_dir().is_some()
|
||||
}) {
|
||||
match entry {
|
||||
DirEntry::File(file) => {
|
||||
let path = file.path().with_extension("");
|
||||
let name = path
|
||||
.to_str()
|
||||
.context("Failed to convert path to str")?
|
||||
.replace("/", ".");
|
||||
let content = file
|
||||
.contents_utf8()
|
||||
.context(format!("Template {} failed to parse", name))?;
|
||||
|
||||
tracing::debug!(?name, "Registering template");
|
||||
if !handlebars.has_template(&name) {
|
||||
handlebars.register_template_string(&name, content)?;
|
||||
} else {
|
||||
tracing::debug!(?name, "Template already registered, skipping");
|
||||
}
|
||||
}
|
||||
DirEntry::Dir(dir) => {
|
||||
Box::pin(handle_direntries(dir.entries(), handlebars)).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
pub async fn register_templates(handlebars: &mut Handlebars<'_>, config: &Config) -> Result<()> {
|
||||
if let Some(templates) = &config.templates {
|
||||
for (name, value) in templates.iter() {
|
||||
let Value::String(name) = name else {
|
||||
return Err(color_eyre::eyre::eyre!("Invalid template name"));
|
||||
};
|
||||
let Value::String(path) = value else {
|
||||
return Err(color_eyre::eyre::eyre!("Invalid template value"));
|
||||
};
|
||||
let mut path = PathBuf::from(path);
|
||||
if let Some(templates_dir) = &config.templates_dir {
|
||||
path = templates_dir.join(path);
|
||||
}
|
||||
tracing::info!(?name, ?path, "Registering template");
|
||||
let content = fs::read_to_string(path).await?;
|
||||
handlebars.register_template_string(name, content)?;
|
||||
}
|
||||
}
|
||||
handle_direntries(TEMPLATE_DIR.entries(), handlebars).await?;
|
||||
Ok(())
|
||||
}
|
||||
9
templates/annotate/comment.jinja
Normal file
9
templates/annotate/comment.jinja
Normal file
@@ -0,0 +1,9 @@
|
||||
{% if comment_config %}{{config_delimiter}}
|
||||
{{comment_config}}
|
||||
{{config_delimiter}}{% endif %}
|
||||
|
||||
Table: {{table_name}}
|
||||
|
||||
Columns:
|
||||
|
||||
{{column_info_table}}
|
||||
@@ -1,9 +0,0 @@
|
||||
use {{entities_path}}::{{model_path}}::{ActiveModel, Model, Entity};
|
||||
use sea_orm::ActiveModelBehavior;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl Model {}
|
||||
|
||||
impl ActiveModel {}
|
||||
20
templates/model.jinja
Normal file
20
templates/model.jinja
Normal file
@@ -0,0 +1,20 @@
|
||||
{% if config.prelude %}
|
||||
use {{prelude_path}}::*;
|
||||
use sea_orm::ActiveModelBehavior;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for {{pascalCase(table_name)}}ActiveModel {}
|
||||
|
||||
impl {{pascalCase(table_name)}}Model {}
|
||||
|
||||
impl {{pascalCase(table_name)}}ActiveModel {}
|
||||
{% else %}
|
||||
use {{entities_path}}::{{table_name}}::{ActiveModel, Model, Entity};
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl Model {}
|
||||
|
||||
impl ActiveModel {}
|
||||
{% endif %}
|
||||
9
templates/model_mod.jinja
Normal file
9
templates/model_mod.jinja
Normal file
@@ -0,0 +1,9 @@
|
||||
{% if config.prelude %}
|
||||
pub mod prelude;
|
||||
{% endif%}
|
||||
{% if entities_path == "super::_entities" %}
|
||||
pub mod _entities;
|
||||
{% endif %}
|
||||
{% for table in tables %}
|
||||
pub mod {{table.name}};
|
||||
{% endfor %}
|
||||
7
templates/model_prelude.jinja
Normal file
7
templates/model_prelude.jinja
Normal file
@@ -0,0 +1,7 @@
|
||||
{% for table in tables %}
|
||||
pub use {{ entities_path }}::{{ table.name }}::{ActiveModel as {{pascalCase(table.name)}}ActiveModel, Model as {{pascalCase(table.name)}}Model, Entity as {{pascalCase(table.name)}}, Column as {{pascalCase(table.name)}}Column};
|
||||
{% endfor %}
|
||||
|
||||
{% for table in tables %}
|
||||
{# pub use {{entities_path}}::{{table.name}}::{ActiveModel as {% call upperCamelCase(table.name) %}ActiveModel, Model as {% call upperCamelCase(table.name) %}Model, Entity as {% call upperCamelCase(table.name) %}}; #}
|
||||
{% endfor %}
|
||||
@@ -1,9 +0,0 @@
|
||||
use {{prelude_path}}::*;
|
||||
use sea_orm::ActiveModelBehavior;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl ActiveModelBehavior for {{active_model_name}} {}
|
||||
|
||||
impl {{model_name}} {}
|
||||
|
||||
impl {{active_model_name}} {}
|
||||
3
tests/src/models/_entities/mod.rs
Normal file
3
tests/src/models/_entities/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.10
|
||||
|
||||
pub mod user ;
|
||||
9
tests/src/models/_entities/user.rs
Normal file
9
tests/src/models/_entities/user.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.10
|
||||
|
||||
|
||||
|
||||
use sea_orm :: entity :: prelude :: * ;
|
||||
|
||||
# [derive (Clone , Debug , PartialEq , DeriveEntityModel , Eq)] # [sea_orm (table_name = "user")] pub struct Model { # [sea_orm (primary_key)] pub id : i32 , # [sea_orm (unique)] pub username : String , # [sea_orm (unique)] pub email : String , pub password : String , # [sea_orm (unique)] pub test : String , }
|
||||
|
||||
# [derive (Copy , Clone , Debug , EnumIter , DeriveRelation)] pub enum Relation { }
|
||||
9
tests/src/models/mod.rs
Normal file
9
tests/src/models/mod.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
|
||||
pub mod prelude;
|
||||
|
||||
|
||||
pub mod _entities;
|
||||
|
||||
|
||||
pub mod user;
|
||||
pub mod user;
|
||||
3
tests/src/models/prelude.rs
Normal file
3
tests/src/models/prelude.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
|
||||
pub use super::_entities::user::{ActiveModel as UserActiveModel, Model as UserModel, Entity as User};
|
||||
|
||||
20
tests/src/models/user.rs
Normal file
20
tests/src/models/user.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
/* == Schema Information
|
||||
* ```
|
||||
* column_db_type = false
|
||||
* ```
|
||||
*
|
||||
* Table: user
|
||||
*
|
||||
* Columns:
|
||||
*
|
||||
* Name RsType Attrs
|
||||
* +==========+========+======================================+
|
||||
* id i32 autoincrement, not null, primary key
|
||||
* username String not null, unique key
|
||||
* email String not null, unique key
|
||||
* password String not null
|
||||
* test String not null, unique key
|
||||
* +----------+--------+--------------------------------------+
|
||||
*/
|
||||
|
||||
sdsfsdfsdfsdf
|
||||
Reference in New Issue
Block a user