From d205d722aa8f8e5c0e92157ebd49304098b0f00d Mon Sep 17 00:00:00 2001 From: Matthieu Bessat Date: Tue, 13 Jan 2026 20:47:19 +0100 Subject: [PATCH 1/3] style: apply cargo fmt --- lib/sandbox/src/db.rs | 10 +- lib/sandbox/src/lib.rs | 2 +- lib/sandbox/src/main.rs | 80 +++---- lib/sandbox/src/models/user.rs | 25 +-- .../src/repositories/user_repository.rs | 28 +-- .../src/repositories/user_token_repository.rs | 31 ++- lib/sandbox/tests/test_user_repository.rs | 66 +++--- lib/sqlxgentools_attrs/src/lib.rs | 3 +- .../src/generators/migrations.rs | 30 ++- lib/sqlxgentools_cli/src/generators/mod.rs | 5 +- .../src/generators/repositories/base.rs | 208 +++++++++++------- .../src/generators/repositories/mod.rs | 7 +- .../src/generators/repositories/relations.rs | 13 +- lib/sqlxgentools_cli/src/main.rs | 43 ++-- lib/sqlxgentools_cli/src/models.rs | 9 +- lib/sqlxgentools_cli/src/parse_models.rs | 124 +++++++---- lib/sqlxgentools_misc/src/lib.rs | 21 +- 17 files changed, 390 insertions(+), 315 deletions(-) diff --git a/lib/sandbox/src/db.rs b/lib/sandbox/src/db.rs index 32c9d8e..dbf076c 100644 --- a/lib/sandbox/src/db.rs +++ b/lib/sandbox/src/db.rs @@ -1,11 +1,12 @@ use anyhow::Context; -use std::str::FromStr; -use std::path::PathBuf; use anyhow::Result; +use std::path::PathBuf; +use std::str::FromStr; use fully_pub::fully_pub; use sqlx::{ - Pool, Sqlite, sqlite::{SqliteConnectOptions, SqlitePoolOptions}, + sqlite::{SqliteConnectOptions, SqlitePoolOptions}, + Pool, Sqlite, }; /// database storage interface @@ -13,7 +14,6 @@ use sqlx::{ #[derive(Clone, Debug)] struct Database(Pool); - /// Initialize database pub async fn provide_database(sqlite_db_path: &str) -> Result { let path = PathBuf::from(sqlite_db_path); @@ -37,5 +37,3 @@ pub async fn provide_database(sqlite_db_path: &str) -> Result { Ok(Database(pool)) } - - diff --git a/lib/sandbox/src/lib.rs b/lib/sandbox/src/lib.rs index 0bfa17e..5918116 100644 --- a/lib/sandbox/src/lib.rs +++ b/lib/sandbox/src/lib.rs @@ -1,3 +1,3 @@ -pub mod repositories; pub mod db; pub mod models; +pub mod repositories; diff --git a/lib/sandbox/src/main.rs b/lib/sandbox/src/main.rs index d198fb3..8ec9bba 100644 --- a/lib/sandbox/src/main.rs +++ b/lib/sandbox/src/main.rs @@ -4,12 +4,15 @@ use chrono::Utc; use sqlx::types::Json; use sqlxgentools_misc::ForeignRef; -use crate::{db::provide_database, models::user::{User, UserToken}, repositories::user_token_repository::UserTokenRepository}; +use crate::{ + db::provide_database, + models::user::{User, UserToken}, + repositories::user_token_repository::UserTokenRepository, +}; +pub mod db; pub mod models; pub mod repositories; -pub mod db; - #[tokio::main] async fn main() -> Result<()> { @@ -24,7 +27,7 @@ async fn main() -> Result<()> { last_login_at: None, status: models::user::UserStatus::Invited, groups: Json(vec![]), - avatar_bytes: None + avatar_bytes: None, }, User { id: "idu2".into(), @@ -34,7 +37,7 @@ async fn main() -> Result<()> { last_login_at: None, status: models::user::UserStatus::Invited, groups: Json(vec![]), - avatar_bytes: None + avatar_bytes: None, }, User { id: "idu3".into(), @@ -44,8 +47,8 @@ async fn main() -> Result<()> { last_login_at: None, status: models::user::UserStatus::Invited, groups: Json(vec![]), - avatar_bytes: None - } + avatar_bytes: None, + }, ]; let user_token = UserToken { id: "idtoken1".into(), @@ -53,43 +56,44 @@ async fn main() -> Result<()> { last_use_time: None, creation_time: Utc::now(), expiration_time: Utc::now(), - user_id: ForeignRef::new(&users.get(0).unwrap()) + user_id: ForeignRef::new(&users.get(0).unwrap()), }; let db = provide_database("tmp/db.db").await?; let user_token_repo = UserTokenRepository::new(db); - user_token_repo.insert_many(&vec![ - UserToken { - id: "idtoken2".into(), - secret: "4LP5A3F3XBV5NM8VXRGZG3QDXO9PNAC0".into(), - last_use_time: None, - creation_time: Utc::now(), - expiration_time: Utc::now(), - user_id: ForeignRef::new(&users.get(0).unwrap()) - }, - UserToken { - id: "idtoken3".into(), - secret: "CBHR6G41KSEMR1AI".into(), - last_use_time: None, - creation_time: Utc::now(), - expiration_time: Utc::now(), - user_id: ForeignRef::new(&users.get(1).unwrap()) - }, - UserToken { - id: "idtoken4".into(), - secret: "CBHR6G41KSEMR1AI".into(), - last_use_time: None, - creation_time: Utc::now(), - expiration_time: Utc::now(), - user_id: ForeignRef::new(&users.get(1).unwrap()) - } - ]).await?; - let user_tokens = user_token_repo.get_many_user_tokens_by_usersss( - vec!["idu2".into()] - ).await?; + user_token_repo + .insert_many(&vec![ + UserToken { + id: "idtoken2".into(), + secret: "4LP5A3F3XBV5NM8VXRGZG3QDXO9PNAC0".into(), + last_use_time: None, + creation_time: Utc::now(), + expiration_time: Utc::now(), + user_id: ForeignRef::new(&users.get(0).unwrap()), + }, + UserToken { + id: "idtoken3".into(), + secret: "CBHR6G41KSEMR1AI".into(), + last_use_time: None, + creation_time: Utc::now(), + expiration_time: Utc::now(), + user_id: ForeignRef::new(&users.get(1).unwrap()), + }, + UserToken { + id: "idtoken4".into(), + secret: "CBHR6G41KSEMR1AI".into(), + last_use_time: None, + creation_time: Utc::now(), + expiration_time: Utc::now(), + user_id: ForeignRef::new(&users.get(1).unwrap()), + }, + ]) + .await?; + let user_tokens = user_token_repo + .get_many_user_tokens_by_usersss(vec!["idu2".into()]) + .await?; dbg!(&user_tokens); Ok(()) } - diff --git a/lib/sandbox/src/models/user.rs b/lib/sandbox/src/models/user.rs index 7f70cb8..02fc9bd 100644 --- a/lib/sandbox/src/models/user.rs +++ b/lib/sandbox/src/models/user.rs @@ -1,8 +1,8 @@ use chrono::{DateTime, Utc}; -use sqlx::types::Json; use fully_pub::fully_pub; +use sqlx::types::Json; -use sqlxgentools_attrs::{SqlGeneratorDerive, SqlGeneratorModelWithId, sql_generator_model}; +use sqlxgentools_attrs::{sql_generator_model, SqlGeneratorDerive, SqlGeneratorModelWithId}; use sqlxgentools_misc::{DatabaseLine, ForeignRef}; #[derive(sqlx::Type, Clone, Debug, PartialEq)] @@ -11,37 +11,36 @@ enum UserStatus { Disabled, Invited, Active, - Archived + Archived, } #[derive(SqlGeneratorDerive, SqlGeneratorModelWithId, sqlx::FromRow, Debug, Clone)] -#[sql_generator_model(table_name="usersss")] +#[sql_generator_model(table_name = "usersss")] #[fully_pub] struct User { - #[sql_generator_field(is_primary=true)] + #[sql_generator_field(is_primary = true)] id: String, - #[sql_generator_field(is_unique=true)] + #[sql_generator_field(is_unique = true)] handle: String, full_name: Option, prefered_color: Option, last_login_at: Option>, status: UserStatus, groups: Json>, - avatar_bytes: Option> + avatar_bytes: Option>, } - #[derive(SqlGeneratorDerive, SqlGeneratorModelWithId, sqlx::FromRow, Debug, Clone)] -#[sql_generator_model(table_name="user_tokens")] +#[sql_generator_model(table_name = "user_tokens")] #[fully_pub] struct UserToken { - #[sql_generator_field(is_primary=true)] + #[sql_generator_field(is_primary = true)] id: String, secret: String, last_use_time: Option>, creation_time: DateTime, expiration_time: DateTime, - #[sql_generator_field(reverse_relation_name="user_tokens")] // to generate get_user_tokens_of_user(&user_id) - user_id: ForeignRef + #[sql_generator_field(reverse_relation_name = "user_tokens")] + // to generate get_user_tokens_of_user(&user_id) + user_id: ForeignRef, } - diff --git a/lib/sandbox/src/repositories/user_repository.rs b/lib/sandbox/src/repositories/user_repository.rs index de25a85..7951733 100644 --- a/lib/sandbox/src/repositories/user_repository.rs +++ b/lib/sandbox/src/repositories/user_repository.rs @@ -1,5 +1,5 @@ -use crate::models::user::User; use crate::db::Database; +use crate::models::user::User; pub struct UserRepository { db: Database, } @@ -8,7 +8,9 @@ impl UserRepository { UserRepository { db } } pub async fn get_all(&self) -> Result, sqlx::Error> { - sqlx::query_as::<_, User>("SELECT * FROM usersss").fetch_all(&self.db.0).await + sqlx::query_as::<_, User>("SELECT * FROM usersss") + .fetch_all(&self.db.0) + .await } pub async fn get_by_id(&self, item_id: &str) -> Result { sqlx::query_as::<_, User>("SELECT * FROM usersss WHERE id = $1") @@ -16,10 +18,7 @@ impl UserRepository { .fetch_one(&self.db.0) .await } - pub async fn get_many_by_id( - &self, - items_ids: &[&str], - ) -> Result, sqlx::Error> { + pub async fn get_many_by_id(&self, items_ids: &[&str]) -> Result, sqlx::Error> { if items_ids.is_empty() { return Ok(vec![]); } @@ -27,9 +26,7 @@ impl UserRepository { .map(|i| format!("${}", i)) .collect::>() .join(","); - let query_sql = format!( - "SELECT * FROM usersss WHERE id IN ({})", placeholder_params - ); + let query_sql = format!("SELECT * FROM usersss WHERE id IN ({})", placeholder_params); let mut query = sqlx::query_as::<_, User>(&query_sql); for id in items_ids { query = query.bind(id); @@ -59,8 +56,11 @@ impl UserRepository { .map(|c| c.to_vec()) .map(|x| { format!( - "({})", x.iter().map(| i | format!("${}", i)).collect:: < Vec < - String >> ().join(", ") + "({})", + x.iter() + .map(|i| format!("${}", i)) + .collect::>() + .join(", ") ) }) .collect::>() @@ -84,11 +84,7 @@ impl UserRepository { query.execute(&self.db.0).await?; Ok(()) } - pub async fn update_by_id( - &self, - item_id: &str, - entity: &User, - ) -> Result<(), sqlx::Error> { + pub async fn update_by_id(&self, item_id: &str, entity: &User) -> Result<(), sqlx::Error> { sqlx::query( "UPDATE usersss SET id = $2, handle = $3, full_name = $4, prefered_color = $5, last_login_at = $6, status = $7, groups = $8, avatar_bytes = $9 WHERE id = $1", ) diff --git a/lib/sandbox/src/repositories/user_token_repository.rs b/lib/sandbox/src/repositories/user_token_repository.rs index 1962898..10c554e 100644 --- a/lib/sandbox/src/repositories/user_token_repository.rs +++ b/lib/sandbox/src/repositories/user_token_repository.rs @@ -1,5 +1,5 @@ -use crate::models::user::UserToken; use crate::db::Database; +use crate::models::user::UserToken; pub struct UserTokenRepository { db: Database, } @@ -18,10 +18,7 @@ impl UserTokenRepository { .fetch_one(&self.db.0) .await } - pub async fn get_many_by_id( - &self, - items_ids: &[&str], - ) -> Result, sqlx::Error> { + pub async fn get_many_by_id(&self, items_ids: &[&str]) -> Result, sqlx::Error> { if items_ids.is_empty() { return Ok(vec![]); } @@ -30,7 +27,8 @@ impl UserTokenRepository { .collect::>() .join(","); let query_sql = format!( - "SELECT * FROM user_tokens WHERE id IN ({})", placeholder_params + "SELECT * FROM user_tokens WHERE id IN ({})", + placeholder_params ); let mut query = sqlx::query_as::<_, UserToken>(&query_sql); for id in items_ids { @@ -52,18 +50,18 @@ impl UserTokenRepository { .await?; Ok(()) } - pub async fn insert_many( - &self, - entities: &Vec, - ) -> Result<(), sqlx::Error> { + pub async fn insert_many(&self, entities: &Vec) -> Result<(), sqlx::Error> { let values_templates: String = (1..(6usize * entities.len() + 1)) .collect::>() .chunks(6usize) .map(|c| c.to_vec()) .map(|x| { format!( - "({})", x.iter().map(| i | format!("${}", i)).collect:: < Vec < - String >> ().join(", ") + "({})", + x.iter() + .map(|i| format!("${}", i)) + .collect::>() + .join(", ") ) }) .collect::>() @@ -85,11 +83,7 @@ impl UserTokenRepository { query.execute(&self.db.0).await?; Ok(()) } - pub async fn update_by_id( - &self, - item_id: &str, - entity: &UserToken, - ) -> Result<(), sqlx::Error> { + pub async fn update_by_id(&self, item_id: &str, entity: &UserToken) -> Result<(), sqlx::Error> { sqlx::query( "UPDATE user_tokens SET id = $2, secret = $3, last_use_time = $4, creation_time = $5, expiration_time = $6, user_id = $7 WHERE id = $1", ) @@ -132,7 +126,8 @@ impl UserTokenRepository { .collect::>() .join(","); let query_tmpl = format!( - "SELECT * FROM user_tokens WHERE user_id IN ({})", placeholder_params + "SELECT * FROM user_tokens WHERE user_id IN ({})", + placeholder_params ); let mut query = sqlx::query_as::<_, UserToken>(&query_tmpl); for id in items_ids { diff --git a/lib/sandbox/tests/test_user_repository.rs b/lib/sandbox/tests/test_user_repository.rs index 3328f9a..a81958d 100644 --- a/lib/sandbox/tests/test_user_repository.rs +++ b/lib/sandbox/tests/test_user_repository.rs @@ -5,7 +5,10 @@ use std::assert_matches::assert_matches; use chrono::Utc; -use sandbox::{models::user::{User, UserStatus}, repositories::user_repository::UserRepository}; +use sandbox::{ + models::user::{User, UserStatus}, + repositories::user_repository::UserRepository, +}; use sqlx::{types::Json, Pool, Sqlite}; #[sqlx::test(fixtures("../src/migrations/all.sql"))] @@ -22,43 +25,40 @@ async fn test_user_repository_create_read_update_delete(pool: Pool) -> s last_login_at: Some(Utc::now()), status: UserStatus::Invited, groups: Json(vec!["artists".into()]), - avatar_bytes: vec![0x00] + avatar_bytes: vec![0x00], }; + assert_matches!(user_repo.insert(&new_user).await, Ok(())); assert_matches!( - user_repo.insert(&new_user).await, - Ok(()) - ); - assert_matches!( - user_repo.get_by_id("ffffffff-0000-4000-0000-0000000000c9".into()).await, + user_repo + .get_by_id("ffffffff-0000-4000-0000-0000000000c9".into()) + .await, Ok(User { .. }) ); assert_matches!( - user_repo.get_by_id("ffffffff-0000-4040-0000-000000000000".into()).await, + user_repo + .get_by_id("ffffffff-0000-4040-0000-000000000000".into()) + .await, Err(sqlx::Error::RowNotFound) ); // Insert Many - let bunch_of_users: Vec = (0..10).map(|pid| User { - id: format!("ffffffff-0000-4000-0010-{:0>8}", pid), - handle: format!("user num {}", pid), - full_name: None, - prefered_color: None, - last_login_at: None, - status: UserStatus::Invited, - groups: Json(vec![]), - avatar_bytes: vec![] - }).collect(); - assert_matches!( - user_repo.insert_many(&bunch_of_users).await, - Ok(()) - ); + let bunch_of_users: Vec = (0..10) + .map(|pid| User { + id: format!("ffffffff-0000-4000-0010-{:0>8}", pid), + handle: format!("user num {}", pid), + full_name: None, + prefered_color: None, + last_login_at: None, + status: UserStatus::Invited, + groups: Json(vec![]), + avatar_bytes: vec![], + }) + .collect(); + assert_matches!(user_repo.insert_many(&bunch_of_users).await, Ok(())); // Read many all let read_all_res = user_repo.get_all().await; - assert_matches!( - read_all_res, - Ok(..) - ); + assert_matches!(read_all_res, Ok(..)); let all_users = read_all_res.unwrap(); assert_eq!(all_users.len(), 11); @@ -69,16 +69,18 @@ async fn test_user_repository_create_read_update_delete(pool: Pool) -> s user_repo.update_by_id(&new_user.id, &updated_user).await, Ok(()) ); - let user_from_db = user_repo.get_by_id("ffffffff-0000-4000-0000-0000000000c9".into()).await.unwrap(); + let user_from_db = user_repo + .get_by_id("ffffffff-0000-4000-0000-0000000000c9".into()) + .await + .unwrap(); assert_eq!(user_from_db.status, UserStatus::Disabled); // Delete + assert_matches!(user_repo.delete_by_id(&new_user.id).await, Ok(())); assert_matches!( - user_repo.delete_by_id(&new_user.id).await, - Ok(()) - ); - assert_matches!( - user_repo.get_by_id("ffffffff-0000-4000-0000-0000000000c9".into()).await, + user_repo + .get_by_id("ffffffff-0000-4000-0000-0000000000c9".into()) + .await, Err(sqlx::Error::RowNotFound) ); diff --git a/lib/sqlxgentools_attrs/src/lib.rs b/lib/sqlxgentools_attrs/src/lib.rs index f88bf77..c3d1c51 100644 --- a/lib/sqlxgentools_attrs/src/lib.rs +++ b/lib/sqlxgentools_attrs/src/lib.rs @@ -1,6 +1,6 @@ use proc_macro::TokenStream; use quote::quote; -use syn::{DeriveInput, Fields, parse_macro_input}; +use syn::{parse_macro_input, DeriveInput, Fields}; #[proc_macro_attribute] pub fn sql_generator_model(_attr: TokenStream, item: TokenStream) -> TokenStream { @@ -38,4 +38,3 @@ pub fn derive_sql_generator_model_with_id(input: TokenStream) -> TokenStream { // If `id` field is not found, return an error panic!("Expected struct with a named field `id` of type String") } - diff --git a/lib/sqlxgentools_cli/src/generators/migrations.rs b/lib/sqlxgentools_cli/src/generators/migrations.rs index fbc2dc6..97b5d1f 100644 --- a/lib/sqlxgentools_cli/src/generators/migrations.rs +++ b/lib/sqlxgentools_cli/src/generators/migrations.rs @@ -1,8 +1,7 @@ -use anyhow::{Result, anyhow}; +use anyhow::{anyhow, Result}; use crate::models::{Field, Model}; - // Implementations impl Field { /// return sqlite type @@ -21,7 +20,7 @@ impl Field { "DateTime" => Some("DATETIME".into()), "Json" => Some("TEXT".into()), "Vec" => Some("BLOB".into()), - _ => Some("TEXT".into()) + _ => Some("TEXT".into()), } } } @@ -35,8 +34,10 @@ pub fn generate_create_table_sql(models: &[Model]) -> Result { let mut fields_sql: Vec = vec![]; for field in model.fields.iter() { let mut additions: String = "".into(); - let sql_type = field.sql_type() - .ok_or(anyhow!(format!("Could not find SQL type for field {}", field.name)))?; + let sql_type = field.sql_type().ok_or(anyhow!(format!( + "Could not find SQL type for field {}", + field.name + )))?; if !field.is_nullable { additions.push_str(" NOT NULL"); } @@ -46,20 +47,15 @@ pub fn generate_create_table_sql(models: &[Model]) -> Result { if field.is_primary { additions.push_str(" PRIMARY KEY"); } - fields_sql.push( - format!("\t{: <#18}\t{}{}", field.name, sql_type, additions) - ); + fields_sql.push(format!("\t{: <#18}\t{}{}", field.name, sql_type, additions)); } - sql_code.push_str( - &format!( - "CREATE TABLE {} (\n{}\n);\n", - model.table_name, - fields_sql.join(",\n") - ) - ); + sql_code.push_str(&format!( + "CREATE TABLE {} (\n{}\n);\n", + model.table_name, + fields_sql.join(",\n") + )); } - + Ok(sql_code) } - diff --git a/lib/sqlxgentools_cli/src/generators/mod.rs b/lib/sqlxgentools_cli/src/generators/mod.rs index ae0ab5f..7b3c823 100644 --- a/lib/sqlxgentools_cli/src/generators/mod.rs +++ b/lib/sqlxgentools_cli/src/generators/mod.rs @@ -8,13 +8,12 @@ pub mod repositories; #[fully_pub] enum SourceNode { File(String), - Directory(Vec) + Directory(Vec), } #[derive(Serialize, Debug)] #[fully_pub] struct SourceNodeContainer { name: String, - inner: SourceNode + inner: SourceNode, } - diff --git a/lib/sqlxgentools_cli/src/generators/repositories/base.rs b/lib/sqlxgentools_cli/src/generators/repositories/base.rs index d2f3609..0482ab8 100644 --- a/lib/sqlxgentools_cli/src/generators/repositories/base.rs +++ b/lib/sqlxgentools_cli/src/generators/repositories/base.rs @@ -1,12 +1,14 @@ use anyhow::Result; -use proc_macro2::{TokenStream, Ident}; +use heck::ToSnakeCase; +use proc_macro2::{Ident, TokenStream}; use quote::{format_ident, quote}; use syn::File; -use heck::ToSnakeCase; -use crate::{generators::repositories::relations::gen_get_many_of_related_entity_method, models::{Field, FieldForeignMode, Model}}; use crate::generators::{SourceNode, SourceNodeContainer}; - +use crate::{ + generators::repositories::relations::gen_get_many_of_related_entity_method, + models::{Field, FieldForeignMode, Model}, +}; fn gen_get_all_method(model: &Model) -> TokenStream { let resource_ident = format_ident!("{}", &model.name); @@ -23,7 +25,10 @@ fn gen_get_all_method(model: &Model) -> TokenStream { fn gen_get_by_field_method(model: &Model, query_field: &Field) -> TokenStream { let resource_ident = format_ident!("{}", &model.name); - let select_query = format!("SELECT * FROM {} WHERE {} = $1", model.table_name, query_field.name); + let select_query = format!( + "SELECT * FROM {} WHERE {} = $1", + model.table_name, query_field.name + ); let func_name_ident = format_ident!("get_by_{}", query_field.name); @@ -40,7 +45,10 @@ fn gen_get_by_field_method(model: &Model, query_field: &Field) -> TokenStream { fn gen_get_many_by_field_method(model: &Model, query_field: &Field) -> TokenStream { let resource_ident = format_ident!("{}", &model.name); - let select_query_tmpl = format!("SELECT * FROM {} WHERE {} IN ({{}})", model.table_name, query_field.name); + let select_query_tmpl = format!( + "SELECT * FROM {} WHERE {} IN ({{}})", + model.table_name, query_field.name + ); let func_name_ident = format_ident!("get_many_by_{}", query_field.name); @@ -66,21 +74,41 @@ fn gen_get_many_by_field_method(model: &Model, query_field: &Field) -> TokenStre } fn get_mutation_fields(model: &Model) -> (Vec<&Field>, Vec<&Field>) { - let normal_field_names: Vec<&Field> = model.fields.iter() - .filter(|f| match f.foreign_mode { FieldForeignMode::NotRef => true, FieldForeignMode::ForeignRef(_) => false }) + let normal_field_names: Vec<&Field> = model + .fields + .iter() + .filter(|f| match f.foreign_mode { + FieldForeignMode::NotRef => true, + FieldForeignMode::ForeignRef(_) => false, + }) .collect(); - let foreign_keys_field_names: Vec<&Field> = model.fields.iter() - .filter(|f| match f.foreign_mode { FieldForeignMode::NotRef => false, FieldForeignMode::ForeignRef(_) => true }) + let foreign_keys_field_names: Vec<&Field> = model + .fields + .iter() + .filter(|f| match f.foreign_mode { + FieldForeignMode::NotRef => false, + FieldForeignMode::ForeignRef(_) => true, + }) .collect(); (normal_field_names, foreign_keys_field_names) } fn get_mutation_fields_ident(model: &Model) -> (Vec<&Field>, Vec<&Field>) { - let normal_field_names: Vec<&Field> = model.fields.iter() - .filter(|f| match f.foreign_mode { FieldForeignMode::NotRef => true, FieldForeignMode::ForeignRef(_) => false }) + let normal_field_names: Vec<&Field> = model + .fields + .iter() + .filter(|f| match f.foreign_mode { + FieldForeignMode::NotRef => true, + FieldForeignMode::ForeignRef(_) => false, + }) .collect(); - let foreign_keys_field_names: Vec<&Field> = model.fields.iter() - .filter(|f| match f.foreign_mode { FieldForeignMode::NotRef => false, FieldForeignMode::ForeignRef(_) => true }) + let foreign_keys_field_names: Vec<&Field> = model + .fields + .iter() + .filter(|f| match f.foreign_mode { + FieldForeignMode::NotRef => false, + FieldForeignMode::ForeignRef(_) => true, + }) .collect(); (normal_field_names, foreign_keys_field_names) } @@ -88,26 +116,31 @@ fn get_mutation_fields_ident(model: &Model) -> (Vec<&Field>, Vec<&Field>) { fn gen_insert_method(model: &Model) -> TokenStream { let resource_ident = format_ident!("{}", &model.name); - let value_templates = (1..(model.fields.len()+1)) + let value_templates = (1..(model.fields.len() + 1)) .map(|i| format!("${}", i)) .collect::>() .join(", "); let (normal_fields, foreign_keys_fields) = get_mutation_fields(model); let (normal_field_idents, foreign_keys_field_idents) = ( - normal_fields.iter().map(|f| format_ident!("{}", &f.name)).collect::>(), - foreign_keys_fields.iter().map(|f| format_ident!("{}", &f.name)).collect::>() + normal_fields + .iter() + .map(|f| format_ident!("{}", &f.name)) + .collect::>(), + foreign_keys_fields + .iter() + .map(|f| format_ident!("{}", &f.name)) + .collect::>(), ); - let sql_columns = [normal_fields, foreign_keys_fields].concat() + let sql_columns = [normal_fields, foreign_keys_fields] + .concat() .iter() .map(|f| f.name.clone()) .collect::>() .join(", "); let insert_query = format!( "INSERT INTO {} ({}) VALUES ({})", - model.table_name, - sql_columns, - value_templates + model.table_name, sql_columns, value_templates ); // foreign keys must be inserted first, we sort the columns so that foreign keys are first @@ -126,19 +159,26 @@ fn gen_insert_method(model: &Model) -> TokenStream { fn gen_insert_many_method(model: &Model) -> TokenStream { let resource_ident = format_ident!("{}", &model.name); - let sql_columns = model.fields.iter() + let sql_columns = model + .fields + .iter() .map(|f| f.name.clone()) .collect::>() .join(", "); let base_insert_query = format!( "INSERT INTO {} ({}) VALUES {{}} ON CONFLICT DO NOTHING", - model.table_name, - sql_columns + model.table_name, sql_columns ); let (normal_fields, foreign_keys_fields) = get_mutation_fields(model); let (normal_field_idents, foreign_keys_field_idents) = ( - normal_fields.iter().map(|f| format_ident!("{}", &f.name)).collect::>(), - foreign_keys_fields.iter().map(|f| format_ident!("{}", &f.name)).collect::>() + normal_fields + .iter() + .map(|f| format_ident!("{}", &f.name)) + .collect::>(), + foreign_keys_fields + .iter() + .map(|f| format_ident!("{}", &f.name)) + .collect::>(), ); let fields_count = model.fields.len(); @@ -174,32 +214,39 @@ fn gen_insert_many_method(model: &Model) -> TokenStream { } } - fn gen_update_by_id_method(model: &Model) -> TokenStream { let resource_ident = format_ident!("{}", &model.name); - let primary_key = &model.fields.iter() + let primary_key = &model + .fields + .iter() .find(|f| f.is_primary) .expect("A model must have at least one primary key") .name; let (normal_fields, foreign_keys_fields) = get_mutation_fields(model); let (normal_field_idents, foreign_keys_field_idents) = ( - normal_fields.iter().map(|f| format_ident!("{}", &f.name)).collect::>(), - foreign_keys_fields.iter().map(|f| format_ident!("{}", &f.name)).collect::>() + normal_fields + .iter() + .map(|f| format_ident!("{}", &f.name)) + .collect::>(), + foreign_keys_fields + .iter() + .map(|f| format_ident!("{}", &f.name)) + .collect::>(), ); - let sql_columns = [normal_fields, foreign_keys_fields].concat() + let sql_columns = [normal_fields, foreign_keys_fields] + .concat() .iter() .map(|f| f.name.clone()) .collect::>(); - let set_statements = sql_columns.iter() + let set_statements = sql_columns + .iter() .enumerate() - .map(|(i, column_name)| format!("{} = ${}", column_name, i+2)) + .map(|(i, column_name)| format!("{} = ${}", column_name, i + 2)) .collect::>() .join(", "); let update_query = format!( "UPDATE {} SET {} WHERE {} = $1", - model.table_name, - set_statements, - primary_key + model.table_name, set_statements, primary_key ); let func_name_ident = format_ident!("update_by_{}", primary_key); @@ -218,7 +265,9 @@ fn gen_update_by_id_method(model: &Model) -> TokenStream { } fn gen_delete_by_id_method(model: &Model) -> TokenStream { - let primary_key = &model.fields.iter() + let primary_key = &model + .fields + .iter() .find(|f| f.is_primary) .expect("A model must have at least one primary key") .name; @@ -226,8 +275,7 @@ fn gen_delete_by_id_method(model: &Model) -> TokenStream { let func_name_ident = format_ident!("delete_by_{}", primary_key); let query = format!( "DELETE FROM {} WHERE {} = $1", - model.table_name, - primary_key + model.table_name, primary_key ); quote! { @@ -243,7 +291,9 @@ fn gen_delete_by_id_method(model: &Model) -> TokenStream { } fn gen_delete_many_by_id_method(model: &Model) -> TokenStream { - let primary_key = &model.fields.iter() + let primary_key = &model + .fields + .iter() .find(|f| f.is_primary) .expect("A model must have at least one primary key") .name; @@ -251,8 +301,7 @@ fn gen_delete_many_by_id_method(model: &Model) -> TokenStream { let func_name_ident = format_ident!("delete_many_by_{}", primary_key); let delete_query_tmpl = format!( "DELETE FROM {} WHERE {} IN ({{}})", - model.table_name, - primary_key + model.table_name, primary_key ); quote! { @@ -278,8 +327,10 @@ fn gen_delete_many_by_id_method(model: &Model) -> TokenStream { } } - -pub fn generate_repository_file(all_models: &[Model], model: &Model) -> Result { +pub fn generate_repository_file( + all_models: &[Model], + model: &Model, +) -> Result { let resource_name = model.name.clone(); let resource_module_ident = format_ident!("{}", &model.module_path.first().unwrap()); @@ -290,15 +341,19 @@ pub fn generate_repository_file(all_models: &[Model], model: &Model) -> Result Result = model + .fields + .iter() + .filter(|f| f.is_query_entrypoint) + .map(|field| gen_get_by_field_method(model, &field)) + .collect(); + let query_many_by_field_methods: Vec = model + .fields + .iter() + .filter(|f| f.is_query_entrypoint) + .map(|field| gen_get_many_by_field_method(model, &field)) + .collect(); - let query_by_field_methods: Vec = - model.fields.iter() - .filter(|f| f.is_query_entrypoint) - .map(|field| - gen_get_by_field_method( - model, - &field - ) - ) - .collect(); - let query_many_by_field_methods: Vec = - model.fields.iter() - .filter(|f| f.is_query_entrypoint) - .map(|field| - gen_get_many_by_field_method( - model, - &field - ) - ) - .collect(); - - let fields_with_foreign_refs: Vec<&Field> = model.fields.iter().filter(|f| - match f.foreign_mode { FieldForeignMode::ForeignRef(_) => true, FieldForeignMode::NotRef => false } - ).collect(); - let related_entity_methods_codes: Vec = fields_with_foreign_refs.iter().map(|field| - gen_get_many_of_related_entity_method(model, &field) - ).collect(); + let fields_with_foreign_refs: Vec<&Field> = model + .fields + .iter() + .filter(|f| match f.foreign_mode { + FieldForeignMode::ForeignRef(_) => true, + FieldForeignMode::NotRef => false, + }) + .collect(); + let related_entity_methods_codes: Vec = fields_with_foreign_refs + .iter() + .map(|field| gen_get_many_of_related_entity_method(model, &field)) + .collect(); // TODO: add import line let base_repository_code: TokenStream = quote! { use crate::models::#resource_module_ident::#resource_ident; use crate::db::Database; - + pub struct #repository_ident { db: Database } @@ -370,7 +422,7 @@ pub fn generate_repository_file(all_models: &[Model], model: &Model) -> Result Result Result Result TokenStream { +pub fn gen_get_many_of_related_entity_method( + model: &Model, + foreign_key_field: &Field, +) -> TokenStream { let resource_ident = format_ident!("{}", &model.name); let foreign_ref_params = match &foreign_key_field.foreign_mode { FieldForeignMode::ForeignRef(params) => params, FieldForeignMode::NotRef => { panic!("Expected foreign key"); - } + } }; - let select_query = format!("SELECT * FROM {} WHERE {} = $1", model.table_name, foreign_key_field.name); + let select_query = format!( + "SELECT * FROM {} WHERE {} = $1", + model.table_name, foreign_key_field.name + ); let func_name_ident = format_ident!("get_many_of_{}", foreign_ref_params.target_resource_name); @@ -28,4 +34,3 @@ pub fn gen_get_many_of_related_entity_method(model: &Model, foreign_key_field: & } } } - diff --git a/lib/sqlxgentools_cli/src/main.rs b/lib/sqlxgentools_cli/src/main.rs index d6c1423..c0a2591 100644 --- a/lib/sqlxgentools_cli/src/main.rs +++ b/lib/sqlxgentools_cli/src/main.rs @@ -1,22 +1,19 @@ -use std::{ffi::OsStr, path::Path}; use attribute_derive::FromAttr; +use std::{ffi::OsStr, path::Path}; +use anyhow::{anyhow, Result}; use argh::FromArgs; -use anyhow::{Result, anyhow}; use crate::generators::{SourceNode, SourceNodeContainer}; -// use gen_migrations::generate_create_table_sql; -// use gen_repositories::{generate_repositories_source_files, SourceNodeContainer}; - +pub mod generators; pub mod models; pub mod parse_models; -pub mod generators; #[derive(FromAttr, PartialEq, Debug, Default)] #[attribute(ident = sql_generator_model)] pub struct SqlGeneratorModelAttr { - table_name: Option + table_name: Option, } #[derive(FromAttr, PartialEq, Debug, Default)] @@ -25,20 +22,19 @@ pub struct SqlGeneratorFieldAttr { is_primary: Option, is_unique: Option, reverse_relation_name: Option, - + /// to indicate that this field will be used to obtains entities /// our framework will generate methods for all fields that is an entrypoint - is_query_entrypoint: Option + is_query_entrypoint: Option, } - #[derive(FromArgs, PartialEq, Debug)] /// Generate SQL CREATE TABLE migrations #[argh(subcommand, name = "gen-migrations")] struct GenerateMigration { /// path of file where to write all in one generated SQL migration #[argh(option, short = 'o')] - output: Option + output: Option, } #[derive(FromArgs, PartialEq, Debug)] @@ -47,7 +43,7 @@ struct GenerateMigration { struct GenerateRepositories { /// path of the directory that contains repositories #[argh(option, short = 'o')] - output: Option + output: Option, } #[derive(FromArgs, PartialEq, Debug)] @@ -67,9 +63,9 @@ struct GeneratorArgs { /// path of the directory containing models #[argh(option, short = 'm')] models_path: Option, - + #[argh(subcommand)] - nested: GeneratorArgsSubCommands + nested: GeneratorArgsSubCommands, } fn write_source_code(base_path: &Path, snc: SourceNodeContainer) -> Result<()> { @@ -78,7 +74,7 @@ fn write_source_code(base_path: &Path, snc: SourceNodeContainer) -> Result<()> { SourceNode::File(code) => { println!("writing file {:?}", path); std::fs::write(path, code)?; - }, + } SourceNode::Directory(dir) => { for node in dir { write_source_code(&path, node)?; @@ -92,11 +88,14 @@ pub fn main() -> Result<()> { let args: GeneratorArgs = argh::from_env(); let project_root = &args.project_root.unwrap_or(".".to_string()); let project_root_path = Path::new(&project_root); - eprintln!("Using project root at: {:?}", &project_root_path.canonicalize()?); + eprintln!( + "Using project root at: {:?}", + &project_root_path.canonicalize()? + ); if !project_root_path.exists() { return Err(anyhow!("Could not resolve project root path.")); } - + // check Cargo.toml let main_manifest_location = "Cargo.toml"; let main_manifest_path = project_root_path.join(main_manifest_location); @@ -117,13 +116,17 @@ pub fn main() -> Result<()> { if !models_mod_path.exists() { return Err(anyhow!("Could not resolve models modules.")); } - if models_mod_path.file_name().map(|x| x == OsStr::new("mod.rs")).unwrap_or(false) { + if models_mod_path + .file_name() + .map(|x| x == OsStr::new("mod.rs")) + .unwrap_or(false) + { models_mod_path.pop(); } eprintln!("Found models in project, parsing models"); let models = parse_models::parse_models_from_module(&models_mod_path)?; dbg!(&models); - + match args.nested { GeneratorArgsSubCommands::GenerateRepositories(opts) => { eprintln!("Generating repositories…"); @@ -136,7 +139,7 @@ pub fn main() -> Result<()> { let snc = generators::repositories::generate_repositories_source_files(&models)?; dbg!(&snc); write_source_code(&repositories_mod_path, snc)?; - }, + } GeneratorArgsSubCommands::GenerateMigration(opts) => { eprintln!("Generating migrations…"); let sql_code = generators::migrations::generate_create_table_sql(&models)?; diff --git a/lib/sqlxgentools_cli/src/models.rs b/lib/sqlxgentools_cli/src/models.rs index 8be8dac..31559f9 100644 --- a/lib/sqlxgentools_cli/src/models.rs +++ b/lib/sqlxgentools_cli/src/models.rs @@ -7,7 +7,7 @@ struct Model { module_path: Vec, name: String, table_name: String, - fields: Vec + fields: Vec, } impl Model { @@ -29,7 +29,6 @@ impl Model { // } } - #[derive(Debug, Clone)] #[fully_pub] struct ForeignRefParams { @@ -41,12 +40,11 @@ struct ForeignRefParams { // target_resource_name_plural: String } - #[derive(Debug, Clone)] #[fully_pub] enum FieldForeignMode { ForeignRef(ForeignRefParams), - NotRef + NotRef, } #[derive(Debug, Clone)] @@ -58,6 +56,5 @@ struct Field { is_unique: bool, is_primary: bool, is_query_entrypoint: bool, - foreign_mode: FieldForeignMode + foreign_mode: FieldForeignMode, } - diff --git a/lib/sqlxgentools_cli/src/parse_models.rs b/lib/sqlxgentools_cli/src/parse_models.rs index bea7a43..88ca4c8 100644 --- a/lib/sqlxgentools_cli/src/parse_models.rs +++ b/lib/sqlxgentools_cli/src/parse_models.rs @@ -1,11 +1,14 @@ -use std::{fs, path::Path}; use attribute_derive::FromAttr; +use std::{fs, path::Path}; -use anyhow::{Result, anyhow}; +use anyhow::{anyhow, Result}; use convert_case::{Case, Casing}; use syn::{GenericArgument, PathArguments, Type}; -use crate::{SqlGeneratorFieldAttr, SqlGeneratorModelAttr, models::{Field, FieldForeignMode, ForeignRefParams, Model}}; +use crate::{ + models::{Field, FieldForeignMode, ForeignRefParams, Model}, + SqlGeneratorFieldAttr, SqlGeneratorModelAttr, +}; fn extract_generic_type(base_segments: Vec, ty: &Type) -> Option<&Type> { // If it is not `TypePath`, it is not possible to be `Option`, return `None` @@ -52,24 +55,38 @@ fn extract_generic_type(base_segments: Vec, ty: &Type) -> Option<&Type> fn get_type_first_ident(inp: &Type) -> Option { match inp { - Type::Path(field_type_path) => { - Some(field_type_path.path.segments.get(0).unwrap().ident.to_string()) - }, - _ => { - None - } + Type::Path(field_type_path) => Some( + field_type_path + .path + .segments + .get(0) + .unwrap() + .ident + .to_string(), + ), + _ => None, } } fn get_first_generic_arg_type_ident(inp: &Type) -> Option { if let Type::Path(field_type_path) = inp { - if let PathArguments::AngleBracketed(args) = &field_type_path.path.segments.get(0).unwrap().arguments { + if let PathArguments::AngleBracketed(args) = + &field_type_path.path.segments.get(0).unwrap().arguments + { if args.args.is_empty() { None } else { if let GenericArgument::Type(arg_type) = args.args.get(0).unwrap() { if let Type::Path(arg_type_path) = arg_type { - Some(arg_type_path.path.segments.get(0).unwrap().ident.to_string()) + Some( + arg_type_path + .path + .segments + .get(0) + .unwrap() + .ident + .to_string(), + ) } else { None } @@ -85,7 +102,6 @@ fn get_first_generic_arg_type_ident(inp: &Type) -> Option { } } - fn parse_model_attribute(item: &syn::ItemStruct) -> Result> { for attr in item.attrs.iter() { let attr_ident = match attr.path().get_ident() { @@ -101,9 +117,12 @@ fn parse_model_attribute(item: &syn::ItemStruct) -> Result { return Ok(Some(v)); - }, + } Err(err) => { - return Err(anyhow!("Failed to parse sql_generator_model attribute macro: {}", err)); + return Err(anyhow!( + "Failed to parse sql_generator_model attribute macro: {}", + err + )); } }; } @@ -125,9 +144,13 @@ fn parse_field_attribute(field: &syn::Field) -> Result { return Ok(Some(v)); - }, + } Err(err) => { - return Err(anyhow!("Failed to parse sql_generator_field attribute macro on field {:?}, {}", field, err)); + return Err(anyhow!( + "Failed to parse sql_generator_field attribute macro on field {:?}, {}", + field, + err + )); } }; } @@ -136,10 +159,7 @@ fn parse_field_attribute(field: &syn::Field) -> Result String { - format!( - "{}s", - struct_name.to_case(Case::Snake) - ) + format!("{}s", struct_name.to_case(Case::Snake)) } /// Scan for models struct in a rust file and return a struct representing the model @@ -166,7 +186,7 @@ pub fn parse_models(source_code_path: &Path) -> Result> { let field_name = field.ident.clone().unwrap().to_string(); let field_type = field.ty.clone(); println!("field {} {:?}", field_name, field_type); - + let mut output_field = Field { name: field_name, rust_type: "Unknown".into(), @@ -174,7 +194,7 @@ pub fn parse_models(source_code_path: &Path) -> Result> { is_primary: false, is_unique: false, is_query_entrypoint: false, - foreign_mode: FieldForeignMode::NotRef + foreign_mode: FieldForeignMode::NotRef, }; let first_type: String = match get_type_first_ident(&field_type) { @@ -187,8 +207,12 @@ pub fn parse_models(source_code_path: &Path) -> Result> { if first_type == "Option" { output_field.is_nullable = true; let inner_type = match extract_generic_type( - vec!["Option".into(), "std:option:Option".into(), "core:option:Option".into()], - &field_type + vec![ + "Option".into(), + "std:option:Option".into(), + "core:option:Option".into(), + ], + &field_type, ) { Some(v) => v, None => { @@ -198,15 +222,15 @@ pub fn parse_models(source_code_path: &Path) -> Result> { final_type = match get_type_first_ident(inner_type) { Some(v) => v, None => { - return Err(anyhow!("Could not extract ident from Option inner type")); + return Err(anyhow!( + "Could not extract ident from Option inner type" + )); } } } if first_type == "Vec" { - let inner_type = match extract_generic_type( - vec!["Vec".into()], - &field_type - ) { + let inner_type = match extract_generic_type(vec!["Vec".into()], &field_type) + { Some(v) => v, None => { return Err(anyhow!("Could not extract type from Vec")); @@ -221,13 +245,14 @@ pub fn parse_models(source_code_path: &Path) -> Result> { } output_field.rust_type = final_type; - let field_attrs_opt = parse_field_attribute(field)?; if first_type == "ForeignRef" { let attrs = match &field_attrs_opt { Some(attrs) => attrs, None => { - return Err(anyhow!("Found a ForeignRef type but did not found attributes.")) + return Err(anyhow!( + "Found a ForeignRef type but did not found attributes." + )) } }; let rrn = match &attrs.reverse_relation_name { @@ -237,39 +262,48 @@ pub fn parse_models(source_code_path: &Path) -> Result> { } }; - let extract_res = extract_generic_type(vec!["ForeignRef".into()], &field_type) - .and_then(|t| get_type_first_ident(t)); + let extract_res = + extract_generic_type(vec!["ForeignRef".into()], &field_type) + .and_then(|t| get_type_first_ident(t)); let target_type_name = match extract_res { Some(v) => v, None => { - return Err(anyhow!("Could not extract inner type from ForeignRef.")); + return Err(anyhow!( + "Could not extract inner type from ForeignRef." + )); } }; - output_field.foreign_mode = FieldForeignMode::ForeignRef( - ForeignRefParams { + output_field.foreign_mode = + FieldForeignMode::ForeignRef(ForeignRefParams { reverse_relation_name: rrn, - target_resource_name: target_type_name.to_case(Case::Snake) - } - ); + target_resource_name: target_type_name.to_case(Case::Snake), + }); } // parse attribute if let Some(field_attr) = field_attrs_opt { output_field.is_primary = field_attr.is_primary.unwrap_or_default(); output_field.is_unique = field_attr.is_unique.unwrap_or_default(); - output_field.is_query_entrypoint = field_attr.is_query_entrypoint.unwrap_or_default(); + output_field.is_query_entrypoint = + field_attr.is_query_entrypoint.unwrap_or_default(); } fields.push(output_field); } models.push(Model { - module_path: vec![source_code_path.file_stem().unwrap().to_str().unwrap().to_string()], + module_path: vec![source_code_path + .file_stem() + .unwrap() + .to_str() + .unwrap() + .to_string()], name: model_name.clone(), - table_name: model_attrs.table_name + table_name: model_attrs + .table_name .unwrap_or(generate_table_name_from_struct_name(&model_name)), - fields + fields, }) - }, + } _ => {} } } @@ -304,7 +338,7 @@ fn parse_models_from_module_inner(module_path: &Path) -> Result> { // match original_field.foreign_mode { // FieldForeignMode::NotRef => {}, // FieldForeignMode::ForeignRef(ref_params) => { - + // } // } diff --git a/lib/sqlxgentools_misc/src/lib.rs b/lib/sqlxgentools_misc/src/lib.rs index f20a305..eee9ed5 100644 --- a/lib/sqlxgentools_misc/src/lib.rs +++ b/lib/sqlxgentools_misc/src/lib.rs @@ -12,23 +12,20 @@ use sqlx_core::error::BoxDynError; use sqlx_core::types::Type; use sqlx_sqlite::{Sqlite, SqliteArgumentValue}; - #[fully_pub] trait DatabaseLine { fn id(&self) -> String; } - /// Wrapper to mark a model field as foreign /// You can use a generic argument inside ForeignRef to point to the target model #[derive(Clone, Debug)] #[fully_pub] struct ForeignRef { pub target_type: PhantomData, - pub target_id: String + pub target_id: String, } - // Implement serde Serialize for ForeignRef impl Serialize for ForeignRef { fn serialize(&self, serializer: S) -> Result @@ -40,22 +37,20 @@ impl Serialize for ForeignRef { } } - impl ForeignRef { - pub fn new(entity: &T) -> ForeignRef { + pub fn new(entity: &T) -> ForeignRef { ForeignRef { target_type: PhantomData, - target_id: entity.id() + target_id: entity.id(), } } } - impl<'r, DB: Database, T: Sized + DatabaseLine> Decode<'r, DB> for ForeignRef where // we want to delegate some of the work to string decoding so let's make sure strings // are supported by the database - &'r str: Decode<'r, DB> + &'r str: Decode<'r, DB>, { fn decode( value: ::ValueRef<'r>, @@ -66,7 +61,7 @@ where Ok(ForeignRef:: { target_type: PhantomData, - target_id: ref_val + target_id: ref_val, }) } } @@ -84,9 +79,11 @@ impl Type for ForeignRef { } impl Encode<'_, Sqlite> for ForeignRef { - fn encode_by_ref(&self, args: &mut Vec>) -> Result { + fn encode_by_ref( + &self, + args: &mut Vec>, + ) -> Result { args.push(SqliteArgumentValue::Text(self.target_id.clone().into())); Ok(IsNull::No) } } - From 8f7d11226fa04e72bd4017e1bacb91a39d6c534b Mon Sep 17 00:00:00 2001 From: Matthieu Bessat Date: Tue, 13 Jan 2026 21:15:27 +0100 Subject: [PATCH 2/3] refactor: apply clippy rules --- lib/sandbox/src/db.rs | 12 - lib/sandbox/src/main.rs | 14 +- .../src/repositories/user_repository.rs | 12 +- .../src/repositories/user_token_repository.rs | 18 +- lib/sqlxgentools_attrs/src/lib.rs | 2 +- .../src/generators/repositories/base.rs | 32 +- lib/sqlxgentools_cli/src/main.rs | 12 +- lib/sqlxgentools_cli/src/parse_models.rs | 294 +++++++----------- lib/sqlxgentools_misc/Cargo.toml | 2 +- 9 files changed, 153 insertions(+), 245 deletions(-) diff --git a/lib/sandbox/src/db.rs b/lib/sandbox/src/db.rs index dbf076c..89ad2c0 100644 --- a/lib/sandbox/src/db.rs +++ b/lib/sandbox/src/db.rs @@ -1,6 +1,5 @@ use anyhow::Context; use anyhow::Result; -use std::path::PathBuf; use std::str::FromStr; use fully_pub::fully_pub; @@ -16,14 +15,6 @@ struct Database(Pool); /// Initialize database pub async fn provide_database(sqlite_db_path: &str) -> Result { - let path = PathBuf::from(sqlite_db_path); - let is_db_initialization = !path.exists(); - // // database does not exists, trying to create it - // if path - // .parent() - // .filter(|pp| pp.exists()) - // Err(anyhow!("Could not find parent directory of the db location."))); - let conn_str = format!("sqlite://{sqlite_db_path}"); let pool = SqlitePoolOptions::new() @@ -31,9 +22,6 @@ pub async fn provide_database(sqlite_db_path: &str) -> Result { .connect_with(SqliteConnectOptions::from_str(&conn_str)?.create_if_missing(true)) .await .context("could not connect to database_url")?; - // if is_db_initialization { - // initialize_db(Database(pool.clone())).await?; - // } Ok(Database(pool)) } diff --git a/lib/sandbox/src/main.rs b/lib/sandbox/src/main.rs index 8ec9bba..421a797 100644 --- a/lib/sandbox/src/main.rs +++ b/lib/sandbox/src/main.rs @@ -1,4 +1,4 @@ -use anyhow::{Context, Result}; +use anyhow::Result; use chrono::Utc; use sqlx::types::Json; @@ -18,7 +18,7 @@ pub mod repositories; async fn main() -> Result<()> { println!("Sandbox"); - let users = vec![ + let users = [ User { id: "idu1".into(), handle: "john.doe".into(), @@ -50,13 +50,13 @@ async fn main() -> Result<()> { avatar_bytes: None, }, ]; - let user_token = UserToken { + let _user_token = UserToken { id: "idtoken1".into(), secret: "4LP5A3F3XBV5NM8VXRGZG3QDXO9PNAC0".into(), last_use_time: None, creation_time: Utc::now(), expiration_time: Utc::now(), - user_id: ForeignRef::new(&users.get(0).unwrap()), + user_id: ForeignRef::new(users.first().unwrap()), }; let db = provide_database("tmp/db.db").await?; @@ -70,7 +70,7 @@ async fn main() -> Result<()> { last_use_time: None, creation_time: Utc::now(), expiration_time: Utc::now(), - user_id: ForeignRef::new(&users.get(0).unwrap()), + user_id: ForeignRef::new(users.first().unwrap()), }, UserToken { id: "idtoken3".into(), @@ -78,7 +78,7 @@ async fn main() -> Result<()> { last_use_time: None, creation_time: Utc::now(), expiration_time: Utc::now(), - user_id: ForeignRef::new(&users.get(1).unwrap()), + user_id: ForeignRef::new(users.get(1).unwrap()), }, UserToken { id: "idtoken4".into(), @@ -86,7 +86,7 @@ async fn main() -> Result<()> { last_use_time: None, creation_time: Utc::now(), expiration_time: Utc::now(), - user_id: ForeignRef::new(&users.get(1).unwrap()), + user_id: ForeignRef::new(users.get(1).unwrap()), }, ]) .await?; diff --git a/lib/sandbox/src/repositories/user_repository.rs b/lib/sandbox/src/repositories/user_repository.rs index 7951733..317f3ef 100644 --- a/lib/sandbox/src/repositories/user_repository.rs +++ b/lib/sandbox/src/repositories/user_repository.rs @@ -40,8 +40,8 @@ impl UserRepository { .bind(&entity.id) .bind(&entity.handle) .bind(&entity.full_name) - .bind(&entity.prefered_color) - .bind(&entity.last_login_at) + .bind(entity.prefered_color) + .bind(entity.last_login_at) .bind(&entity.status) .bind(&entity.groups) .bind(&entity.avatar_bytes) @@ -75,8 +75,8 @@ impl UserRepository { .bind(&entity.id) .bind(&entity.handle) .bind(&entity.full_name) - .bind(&entity.prefered_color) - .bind(&entity.last_login_at) + .bind(entity.prefered_color) + .bind(entity.last_login_at) .bind(&entity.status) .bind(&entity.groups) .bind(&entity.avatar_bytes); @@ -92,8 +92,8 @@ impl UserRepository { .bind(&entity.id) .bind(&entity.handle) .bind(&entity.full_name) - .bind(&entity.prefered_color) - .bind(&entity.last_login_at) + .bind(entity.prefered_color) + .bind(entity.last_login_at) .bind(&entity.status) .bind(&entity.groups) .bind(&entity.avatar_bytes) diff --git a/lib/sandbox/src/repositories/user_token_repository.rs b/lib/sandbox/src/repositories/user_token_repository.rs index 10c554e..b65ec6e 100644 --- a/lib/sandbox/src/repositories/user_token_repository.rs +++ b/lib/sandbox/src/repositories/user_token_repository.rs @@ -42,9 +42,9 @@ impl UserTokenRepository { ) .bind(&entity.id) .bind(&entity.secret) - .bind(&entity.last_use_time) - .bind(&entity.creation_time) - .bind(&entity.expiration_time) + .bind(entity.last_use_time) + .bind(entity.creation_time) + .bind(entity.expiration_time) .bind(&entity.user_id.target_id) .execute(&self.db.0) .await?; @@ -75,9 +75,9 @@ impl UserTokenRepository { query = query .bind(&entity.id) .bind(&entity.secret) - .bind(&entity.last_use_time) - .bind(&entity.creation_time) - .bind(&entity.expiration_time) + .bind(entity.last_use_time) + .bind(entity.creation_time) + .bind(entity.expiration_time) .bind(&entity.user_id.target_id); } query.execute(&self.db.0).await?; @@ -90,9 +90,9 @@ impl UserTokenRepository { .bind(item_id) .bind(&entity.id) .bind(&entity.secret) - .bind(&entity.last_use_time) - .bind(&entity.creation_time) - .bind(&entity.expiration_time) + .bind(entity.last_use_time) + .bind(entity.creation_time) + .bind(entity.expiration_time) .bind(&entity.user_id.target_id) .execute(&self.db.0) .await?; diff --git a/lib/sqlxgentools_attrs/src/lib.rs b/lib/sqlxgentools_attrs/src/lib.rs index c3d1c51..4607474 100644 --- a/lib/sqlxgentools_attrs/src/lib.rs +++ b/lib/sqlxgentools_attrs/src/lib.rs @@ -21,7 +21,7 @@ pub fn derive_sql_generator_model_with_id(input: TokenStream) -> TokenStream { if let syn::Data::Struct(data) = input.data { if let Fields::Named(fields) = data.fields { for field in fields.named { - if field.ident.as_ref().map_or(false, |ident| ident == "id") { + if field.ident.as_ref().is_some_and(|ident| ident == "id") { let expanded = quote! { impl DatabaseLine for #name { fn id(&self) -> String { diff --git a/lib/sqlxgentools_cli/src/generators/repositories/base.rs b/lib/sqlxgentools_cli/src/generators/repositories/base.rs index 0482ab8..26ebd41 100644 --- a/lib/sqlxgentools_cli/src/generators/repositories/base.rs +++ b/lib/sqlxgentools_cli/src/generators/repositories/base.rs @@ -93,26 +93,6 @@ fn get_mutation_fields(model: &Model) -> (Vec<&Field>, Vec<&Field>) { (normal_field_names, foreign_keys_field_names) } -fn get_mutation_fields_ident(model: &Model) -> (Vec<&Field>, Vec<&Field>) { - let normal_field_names: Vec<&Field> = model - .fields - .iter() - .filter(|f| match f.foreign_mode { - FieldForeignMode::NotRef => true, - FieldForeignMode::ForeignRef(_) => false, - }) - .collect(); - let foreign_keys_field_names: Vec<&Field> = model - .fields - .iter() - .filter(|f| match f.foreign_mode { - FieldForeignMode::NotRef => false, - FieldForeignMode::ForeignRef(_) => true, - }) - .collect(); - (normal_field_names, foreign_keys_field_names) -} - fn gen_insert_method(model: &Model) -> TokenStream { let resource_ident = format_ident!("{}", &model.name); @@ -328,7 +308,7 @@ fn gen_delete_many_by_id_method(model: &Model) -> TokenStream { } pub fn generate_repository_file( - all_models: &[Model], + _all_models: &[Model], model: &Model, ) -> Result { let resource_name = model.name.clone(); @@ -344,7 +324,7 @@ pub fn generate_repository_file( model .fields .iter() - .find(|f| f.is_primary == true) + .find(|f| f.is_primary) .expect("Expected at least one primary key on the model."), ); let get_many_by_id_method_code = gen_get_many_by_field_method( @@ -352,7 +332,7 @@ pub fn generate_repository_file( model .fields .iter() - .find(|f| f.is_primary == true) + .find(|f| f.is_primary) .expect("Expected at least one primary key on the model."), ); let insert_method_code = gen_insert_method(model); @@ -365,13 +345,13 @@ pub fn generate_repository_file( .fields .iter() .filter(|f| f.is_query_entrypoint) - .map(|field| gen_get_by_field_method(model, &field)) + .map(|field| gen_get_by_field_method(model, field)) .collect(); let query_many_by_field_methods: Vec = model .fields .iter() .filter(|f| f.is_query_entrypoint) - .map(|field| gen_get_many_by_field_method(model, &field)) + .map(|field| gen_get_many_by_field_method(model, field)) .collect(); let fields_with_foreign_refs: Vec<&Field> = model @@ -384,7 +364,7 @@ pub fn generate_repository_file( .collect(); let related_entity_methods_codes: Vec = fields_with_foreign_refs .iter() - .map(|field| gen_get_many_of_related_entity_method(model, &field)) + .map(|field| gen_get_many_of_related_entity_method(model, field)) .collect(); // TODO: add import line diff --git a/lib/sqlxgentools_cli/src/main.rs b/lib/sqlxgentools_cli/src/main.rs index c0a2591..7de6696 100644 --- a/lib/sqlxgentools_cli/src/main.rs +++ b/lib/sqlxgentools_cli/src/main.rs @@ -72,7 +72,7 @@ fn write_source_code(base_path: &Path, snc: SourceNodeContainer) -> Result<()> { let path = base_path.join(snc.name); match snc.inner { SourceNode::File(code) => { - println!("writing file {:?}", path); + println!("Writing file {:?}.", path); std::fs::write(path, code)?; } SourceNode::Directory(dir) => { @@ -125,7 +125,10 @@ pub fn main() -> Result<()> { } eprintln!("Found models in project, parsing models"); let models = parse_models::parse_models_from_module(&models_mod_path)?; - dbg!(&models); + eprintln!( + "Found and parsed a grand total of {} sqlxgentools compatible models.", + models.len() + ); match args.nested { GeneratorArgsSubCommands::GenerateRepositories(opts) => { @@ -137,7 +140,6 @@ pub fn main() -> Result<()> { return Err(anyhow!("Could not resolve repositories modules.")); } let snc = generators::repositories::generate_repositories_source_files(&models)?; - dbg!(&snc); write_source_code(&repositories_mod_path, snc)?; } GeneratorArgsSubCommands::GenerateMigration(opts) => { @@ -145,8 +147,8 @@ pub fn main() -> Result<()> { let sql_code = generators::migrations::generate_create_table_sql(&models)?; if let Some(out_location) = opts.output { let output_path = Path::new(&out_location); - let write_res = std::fs::write(output_path, sql_code); - eprintln!("{:?}", write_res); + let _write_res = std::fs::write(output_path, sql_code); + // TODO: check if write result is an error and return error message. } else { println!("{}", sql_code); } diff --git a/lib/sqlxgentools_cli/src/parse_models.rs b/lib/sqlxgentools_cli/src/parse_models.rs index 88ca4c8..dbe5648 100644 --- a/lib/sqlxgentools_cli/src/parse_models.rs +++ b/lib/sqlxgentools_cli/src/parse_models.rs @@ -3,7 +3,7 @@ use std::{fs, path::Path}; use anyhow::{anyhow, Result}; use convert_case::{Case, Casing}; -use syn::{GenericArgument, PathArguments, Type}; +use syn::{GenericArgument, Type}; use crate::{ models::{Field, FieldForeignMode, ForeignRefParams, Model}, @@ -68,40 +68,6 @@ fn get_type_first_ident(inp: &Type) -> Option { } } -fn get_first_generic_arg_type_ident(inp: &Type) -> Option { - if let Type::Path(field_type_path) = inp { - if let PathArguments::AngleBracketed(args) = - &field_type_path.path.segments.get(0).unwrap().arguments - { - if args.args.is_empty() { - None - } else { - if let GenericArgument::Type(arg_type) = args.args.get(0).unwrap() { - if let Type::Path(arg_type_path) = arg_type { - Some( - arg_type_path - .path - .segments - .get(0) - .unwrap() - .ident - .to_string(), - ) - } else { - None - } - } else { - None - } - } - } else { - None - } - } else { - None - } -} - fn parse_model_attribute(item: &syn::ItemStruct) -> Result> { for attr in item.attrs.iter() { let attr_ident = match attr.path().get_ident() { @@ -170,141 +136,130 @@ pub fn parse_models(source_code_path: &Path) -> Result> { let mut models: Vec = vec![]; for item in parsed_file.items { - match item { - syn::Item::Struct(itemval) => { - let model_name = itemval.ident.to_string(); - let model_attrs = match parse_model_attribute(&itemval)? { - Some(v) => v, - None => { - // we require model struct to have the `sql_generator_model` attribute - continue; - } + if let syn::Item::Struct(itemval) = item { + let model_name = itemval.ident.to_string(); + let model_attrs = match parse_model_attribute(&itemval)? { + Some(v) => v, + None => { + // we require model struct to have the `sql_generator_model` attribute + continue; + } + }; + + let mut fields: Vec = vec![]; + for field in itemval.fields.iter() { + let field_name = field.ident.clone().unwrap().to_string(); + let field_type = field.ty.clone(); + + let mut output_field = Field { + name: field_name, + rust_type: "Unknown".into(), + is_nullable: false, + is_primary: false, + is_unique: false, + is_query_entrypoint: false, + foreign_mode: FieldForeignMode::NotRef, }; - let mut fields: Vec = vec![]; - for field in itemval.fields.iter() { - let field_name = field.ident.clone().unwrap().to_string(); - let field_type = field.ty.clone(); - println!("field {} {:?}", field_name, field_type); - - let mut output_field = Field { - name: field_name, - rust_type: "Unknown".into(), - is_nullable: false, - is_primary: false, - is_unique: false, - is_query_entrypoint: false, - foreign_mode: FieldForeignMode::NotRef, + let first_type: String = match get_type_first_ident(&field_type) { + Some(v) => v, + None => { + return Err(anyhow!("Could not extract ident from Option inner type")); + } + }; + let mut final_type = first_type.clone(); + if first_type == "Option" { + output_field.is_nullable = true; + let inner_type = match extract_generic_type( + vec![ + "Option".into(), + "std:option:Option".into(), + "core:option:Option".into(), + ], + &field_type, + ) { + Some(v) => v, + None => { + return Err(anyhow!("Could not extract type from Option")); + } }; - - let first_type: String = match get_type_first_ident(&field_type) { + final_type = match get_type_first_ident(inner_type) { Some(v) => v, None => { return Err(anyhow!("Could not extract ident from Option inner type")); } - }; - let mut final_type = first_type.clone(); - if first_type == "Option" { - output_field.is_nullable = true; - let inner_type = match extract_generic_type( - vec![ - "Option".into(), - "std:option:Option".into(), - "core:option:Option".into(), - ], - &field_type, - ) { - Some(v) => v, - None => { - return Err(anyhow!("Could not extract type from Option")); - } - }; - final_type = match get_type_first_ident(inner_type) { - Some(v) => v, - None => { - return Err(anyhow!( - "Could not extract ident from Option inner type" - )); - } - } } - if first_type == "Vec" { - let inner_type = match extract_generic_type(vec!["Vec".into()], &field_type) - { - Some(v) => v, - None => { - return Err(anyhow!("Could not extract type from Vec")); - } - }; - final_type = match get_type_first_ident(inner_type) { - Some(v) => format!("Vec<{}>", v), - None => { - return Err(anyhow!("Could not extract ident from Vec inner type")); - } - } - } - output_field.rust_type = final_type; - - let field_attrs_opt = parse_field_attribute(field)?; - if first_type == "ForeignRef" { - let attrs = match &field_attrs_opt { - Some(attrs) => attrs, - None => { - return Err(anyhow!( - "Found a ForeignRef type but did not found attributes." - )) - } - }; - let rrn = match &attrs.reverse_relation_name { - Some(rrn) => rrn.clone(), - None => { - return Err(anyhow!("Found a ForeignRef type but did not found reverse_relation_name attribute.")) - } - }; - - let extract_res = - extract_generic_type(vec!["ForeignRef".into()], &field_type) - .and_then(|t| get_type_first_ident(t)); - let target_type_name = match extract_res { - Some(v) => v, - None => { - return Err(anyhow!( - "Could not extract inner type from ForeignRef." - )); - } - }; - output_field.foreign_mode = - FieldForeignMode::ForeignRef(ForeignRefParams { - reverse_relation_name: rrn, - target_resource_name: target_type_name.to_case(Case::Snake), - }); - } - - // parse attribute - if let Some(field_attr) = field_attrs_opt { - output_field.is_primary = field_attr.is_primary.unwrap_or_default(); - output_field.is_unique = field_attr.is_unique.unwrap_or_default(); - output_field.is_query_entrypoint = - field_attr.is_query_entrypoint.unwrap_or_default(); - } - - fields.push(output_field); } - models.push(Model { - module_path: vec![source_code_path - .file_stem() - .unwrap() - .to_str() - .unwrap() - .to_string()], - name: model_name.clone(), - table_name: model_attrs - .table_name - .unwrap_or(generate_table_name_from_struct_name(&model_name)), - fields, - }) + if first_type == "Vec" { + let inner_type = match extract_generic_type(vec!["Vec".into()], &field_type) { + Some(v) => v, + None => { + return Err(anyhow!("Could not extract type from Vec")); + } + }; + final_type = match get_type_first_ident(inner_type) { + Some(v) => format!("Vec<{}>", v), + None => { + return Err(anyhow!("Could not extract ident from Vec inner type")); + } + } + } + output_field.rust_type = final_type; + + let field_attrs_opt = parse_field_attribute(field)?; + if first_type == "ForeignRef" { + let attrs = match &field_attrs_opt { + Some(attrs) => attrs, + None => { + return Err(anyhow!( + "Found a ForeignRef type but did not found attributes." + )) + } + }; + let rrn = match &attrs.reverse_relation_name { + Some(rrn) => rrn.clone(), + None => { + return Err(anyhow!("Found a ForeignRef type but did not found reverse_relation_name attribute.")) + } + }; + + let extract_res = extract_generic_type(vec!["ForeignRef".into()], &field_type) + .and_then(get_type_first_ident); + let target_type_name = match extract_res { + Some(v) => v, + None => { + return Err(anyhow!("Could not extract inner type from ForeignRef.")); + } + }; + output_field.foreign_mode = FieldForeignMode::ForeignRef(ForeignRefParams { + reverse_relation_name: rrn, + target_resource_name: target_type_name.to_case(Case::Snake), + }); + } + + // parse attribute + if let Some(field_attr) = field_attrs_opt { + output_field.is_primary = field_attr.is_primary.unwrap_or_default(); + output_field.is_unique = field_attr.is_unique.unwrap_or_default(); + output_field.is_query_entrypoint = + field_attr.is_query_entrypoint.unwrap_or_default(); + } + + fields.push(output_field); } - _ => {} + models.push(Model { + module_path: vec![source_code_path + .file_stem() + .unwrap() + .to_str() + .unwrap() + .to_string()], + name: model_name.clone(), + table_name: model_attrs + .table_name + .unwrap_or(generate_table_name_from_struct_name(&model_name)), + fields, + }) } } Ok(models) @@ -315,7 +270,7 @@ fn parse_models_from_module_inner(module_path: &Path) -> Result> { let mut models: Vec = vec![]; if module_path.is_file() { - println!("Parsing models from path {:?}.", module_path); + println!("Looking for models to parse from path {:?}.", module_path); models.extend(parse_models(module_path)?); return Ok(models); } @@ -330,23 +285,6 @@ fn parse_models_from_module_inner(module_path: &Path) -> Result> { Ok(models) } -// fn complete_models(original_models: Vec) -> Result> { -// let mut new_models: Vec = vec![]; -// for model in original_models { -// for original_field in model.fields { -// let mut field = original_field -// match original_field.foreign_mode { -// FieldForeignMode::NotRef => {}, -// FieldForeignMode::ForeignRef(ref_params) => { - -// } -// } - -// } -// } -// Ok(new_models) -// } - /// Scan for models struct in a rust file and return a struct representing the model pub fn parse_models_from_module(module_path: &Path) -> Result> { let models = parse_models_from_module_inner(module_path)?; diff --git a/lib/sqlxgentools_misc/Cargo.toml b/lib/sqlxgentools_misc/Cargo.toml index dfbfcb6..193791a 100644 --- a/lib/sqlxgentools_misc/Cargo.toml +++ b/lib/sqlxgentools_misc/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sqlxgentools_misc" -description = "Various misc class to use in applications that use sqlxgentools" +description = "Various data types and traits to use in a sqlxgentools-enabled codebase." publish = true edition.workspace = true authors.workspace = true From d3aae47d2c0ba4d6c8b6b6088904a8c812cf485f Mon Sep 17 00:00:00 2001 From: Matthieu Bessat Date: Tue, 13 Jan 2026 21:41:03 +0100 Subject: [PATCH 3/3] docs: still WIP --- README.md | 42 +++++++++++++++------------------ docs/concepts/architecture.md | 0 docs/how-tos/debug_models.md | 0 docs/references/cli.md | 0 docs/references/repositories.md | 0 docs/tutorials/quick_start.md | 31 ++++++++++++++++++++++++ docs/tutorials/relations.md | 1 + 7 files changed, 51 insertions(+), 23 deletions(-) create mode 100644 docs/concepts/architecture.md create mode 100644 docs/how-tos/debug_models.md create mode 100644 docs/references/cli.md create mode 100644 docs/references/repositories.md create mode 100644 docs/tutorials/relations.md diff --git a/README.md b/README.md index f862881..23697a6 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,12 @@ # [WIP] sqlxgentools -Tools to generate SQL migrations and Rust SQLx repositories code from models structs to use with a SQLite database. +Little tool to generate SQLite migrations files and Rust SQLx repositories code, all from models structs. -Will be used in [minauthator](https://forge.lefuturiste.fr/mbess/minauthator). +Still very much work in progress, but it can be already used in your next Rust app if you don't mind some limitations like the lack of incremental migrations and little quirks here and there. + +## Getting started + +- [Quick start tutorial](./docs/tutorials/quick_start.md) ## Project context @@ -18,10 +22,19 @@ Will be used in [minauthator](https://forge.lefuturiste.fr/mbess/minauthator). - Provide a full a ORM interface +## Included crates + +This project is split into 3 published crates. + +- [`sqlxgentools_cli`](https://crates.io/crates/sqlxgentools_cli), used to parse, generate migrations and repositories. +- [`sqlxgentools_attrs`](https://crates.io/crates/sqlxgentools_attrs), provides proc macros. +- [`sqlxgentools_misc`](https://crates.io/crates/sqlxgentools_misc), provides data types and traits (optional). + ## Features - [x] generate migrations - [x] from scratch + - [ ] incremental migration - [ ] up migration - [ ] down migration - [x] generate repositories @@ -29,27 +42,10 @@ Will be used in [minauthator](https://forge.lefuturiste.fr/mbess/minauthator). - [x] get_by_id - [x] insert - [x] insert_many - - [ ] generate custom by - - [x] co-exist with custom repository + - [x] custom get_by, get_many_by + - [x] get_many_of (from one-to-many relations) -## Usage +## Contributions -### Generate initial CREATE TABLE sqlite migration +Questions, remarks and contributions is very much welcomed. - cargo run --bin sqlx-generator -- ./path/to/project generate-create-migrations > migrations/all.sql - - sqlx-generator \ - -m path/to/models \ - gen-repositories \ - -o path/to/repositories - - sqlx-generator \ - -m path/to/models \ - gen-migrations \ - -o path/to/migrations/all.sql - -### Generate repositories code - -not implemented yet - - cargo run --bin sqlx-generator -- ./path/to/project generate-repositories diff --git a/docs/concepts/architecture.md b/docs/concepts/architecture.md new file mode 100644 index 0000000..e69de29 diff --git a/docs/how-tos/debug_models.md b/docs/how-tos/debug_models.md new file mode 100644 index 0000000..e69de29 diff --git a/docs/references/cli.md b/docs/references/cli.md new file mode 100644 index 0000000..e69de29 diff --git a/docs/references/repositories.md b/docs/references/repositories.md new file mode 100644 index 0000000..e69de29 diff --git a/docs/tutorials/quick_start.md b/docs/tutorials/quick_start.md index c94322a..976eda5 100644 --- a/docs/tutorials/quick_start.md +++ b/docs/tutorials/quick_start.md @@ -6,3 +6,34 @@ Steps: - Generate migrations - Generate repositories - Use repositories in your code + +### CLI installation + +The [sqlxgentools_cli crate](https://crates.io/crates/sqlxgentools_cli) provides the CLI, +it can be installed globally on your machine (or at least your user). + + cargo install sqlxgentools_cli + +### Project installation + +Install the `sqlxgentools_attrs` crate + +### Declare your models + +TODO + +### Generate migrations + +Change directory into your project root. + + sqlx-generator -m path/to/models_module gen-migrations -o path/to/migrations/all.sql + +### Generate repositories + +Change directory into your project root. + + sqlx-generator -m path/to/models_module gen-repositories -o path/to/repositories_module + +### Use the repositories + +TODO diff --git a/docs/tutorials/relations.md b/docs/tutorials/relations.md new file mode 100644 index 0000000..1333ed7 --- /dev/null +++ b/docs/tutorials/relations.md @@ -0,0 +1 @@ +TODO