refactor: apply clippy rules

This commit is contained in:
Matthieu Bessat 2026-01-13 21:15:27 +01:00
parent d205d722aa
commit 8f7d11226f
9 changed files with 153 additions and 245 deletions

View file

@ -1,6 +1,5 @@
use anyhow::Context;
use anyhow::Result;
use std::path::PathBuf;
use std::str::FromStr;
use fully_pub::fully_pub;
@ -16,14 +15,6 @@ struct Database(Pool<Sqlite>);
/// Initialize database
pub async fn provide_database(sqlite_db_path: &str) -> Result<Database> {
let path = PathBuf::from(sqlite_db_path);
let is_db_initialization = !path.exists();
// // database does not exists, trying to create it
// if path
// .parent()
// .filter(|pp| pp.exists())
// Err(anyhow!("Could not find parent directory of the db location.")));
let conn_str = format!("sqlite://{sqlite_db_path}");
let pool = SqlitePoolOptions::new()
@ -31,9 +22,6 @@ pub async fn provide_database(sqlite_db_path: &str) -> Result<Database> {
.connect_with(SqliteConnectOptions::from_str(&conn_str)?.create_if_missing(true))
.await
.context("could not connect to database_url")?;
// if is_db_initialization {
// initialize_db(Database(pool.clone())).await?;
// }
Ok(Database(pool))
}

View file

@ -1,4 +1,4 @@
use anyhow::{Context, Result};
use anyhow::Result;
use chrono::Utc;
use sqlx::types::Json;
@ -18,7 +18,7 @@ pub mod repositories;
async fn main() -> Result<()> {
println!("Sandbox");
let users = vec![
let users = [
User {
id: "idu1".into(),
handle: "john.doe".into(),
@ -50,13 +50,13 @@ async fn main() -> Result<()> {
avatar_bytes: None,
},
];
let user_token = UserToken {
let _user_token = UserToken {
id: "idtoken1".into(),
secret: "4LP5A3F3XBV5NM8VXRGZG3QDXO9PNAC0".into(),
last_use_time: None,
creation_time: Utc::now(),
expiration_time: Utc::now(),
user_id: ForeignRef::new(&users.get(0).unwrap()),
user_id: ForeignRef::new(users.first().unwrap()),
};
let db = provide_database("tmp/db.db").await?;
@ -70,7 +70,7 @@ async fn main() -> Result<()> {
last_use_time: None,
creation_time: Utc::now(),
expiration_time: Utc::now(),
user_id: ForeignRef::new(&users.get(0).unwrap()),
user_id: ForeignRef::new(users.first().unwrap()),
},
UserToken {
id: "idtoken3".into(),
@ -78,7 +78,7 @@ async fn main() -> Result<()> {
last_use_time: None,
creation_time: Utc::now(),
expiration_time: Utc::now(),
user_id: ForeignRef::new(&users.get(1).unwrap()),
user_id: ForeignRef::new(users.get(1).unwrap()),
},
UserToken {
id: "idtoken4".into(),
@ -86,7 +86,7 @@ async fn main() -> Result<()> {
last_use_time: None,
creation_time: Utc::now(),
expiration_time: Utc::now(),
user_id: ForeignRef::new(&users.get(1).unwrap()),
user_id: ForeignRef::new(users.get(1).unwrap()),
},
])
.await?;

View file

@ -40,8 +40,8 @@ impl UserRepository {
.bind(&entity.id)
.bind(&entity.handle)
.bind(&entity.full_name)
.bind(&entity.prefered_color)
.bind(&entity.last_login_at)
.bind(entity.prefered_color)
.bind(entity.last_login_at)
.bind(&entity.status)
.bind(&entity.groups)
.bind(&entity.avatar_bytes)
@ -75,8 +75,8 @@ impl UserRepository {
.bind(&entity.id)
.bind(&entity.handle)
.bind(&entity.full_name)
.bind(&entity.prefered_color)
.bind(&entity.last_login_at)
.bind(entity.prefered_color)
.bind(entity.last_login_at)
.bind(&entity.status)
.bind(&entity.groups)
.bind(&entity.avatar_bytes);
@ -92,8 +92,8 @@ impl UserRepository {
.bind(&entity.id)
.bind(&entity.handle)
.bind(&entity.full_name)
.bind(&entity.prefered_color)
.bind(&entity.last_login_at)
.bind(entity.prefered_color)
.bind(entity.last_login_at)
.bind(&entity.status)
.bind(&entity.groups)
.bind(&entity.avatar_bytes)

View file

@ -42,9 +42,9 @@ impl UserTokenRepository {
)
.bind(&entity.id)
.bind(&entity.secret)
.bind(&entity.last_use_time)
.bind(&entity.creation_time)
.bind(&entity.expiration_time)
.bind(entity.last_use_time)
.bind(entity.creation_time)
.bind(entity.expiration_time)
.bind(&entity.user_id.target_id)
.execute(&self.db.0)
.await?;
@ -75,9 +75,9 @@ impl UserTokenRepository {
query = query
.bind(&entity.id)
.bind(&entity.secret)
.bind(&entity.last_use_time)
.bind(&entity.creation_time)
.bind(&entity.expiration_time)
.bind(entity.last_use_time)
.bind(entity.creation_time)
.bind(entity.expiration_time)
.bind(&entity.user_id.target_id);
}
query.execute(&self.db.0).await?;
@ -90,9 +90,9 @@ impl UserTokenRepository {
.bind(item_id)
.bind(&entity.id)
.bind(&entity.secret)
.bind(&entity.last_use_time)
.bind(&entity.creation_time)
.bind(&entity.expiration_time)
.bind(entity.last_use_time)
.bind(entity.creation_time)
.bind(entity.expiration_time)
.bind(&entity.user_id.target_id)
.execute(&self.db.0)
.await?;

View file

@ -21,7 +21,7 @@ pub fn derive_sql_generator_model_with_id(input: TokenStream) -> TokenStream {
if let syn::Data::Struct(data) = input.data {
if let Fields::Named(fields) = data.fields {
for field in fields.named {
if field.ident.as_ref().map_or(false, |ident| ident == "id") {
if field.ident.as_ref().is_some_and(|ident| ident == "id") {
let expanded = quote! {
impl DatabaseLine for #name {
fn id(&self) -> String {

View file

@ -93,26 +93,6 @@ fn get_mutation_fields(model: &Model) -> (Vec<&Field>, Vec<&Field>) {
(normal_field_names, foreign_keys_field_names)
}
fn get_mutation_fields_ident(model: &Model) -> (Vec<&Field>, Vec<&Field>) {
let normal_field_names: Vec<&Field> = model
.fields
.iter()
.filter(|f| match f.foreign_mode {
FieldForeignMode::NotRef => true,
FieldForeignMode::ForeignRef(_) => false,
})
.collect();
let foreign_keys_field_names: Vec<&Field> = model
.fields
.iter()
.filter(|f| match f.foreign_mode {
FieldForeignMode::NotRef => false,
FieldForeignMode::ForeignRef(_) => true,
})
.collect();
(normal_field_names, foreign_keys_field_names)
}
fn gen_insert_method(model: &Model) -> TokenStream {
let resource_ident = format_ident!("{}", &model.name);
@ -328,7 +308,7 @@ fn gen_delete_many_by_id_method(model: &Model) -> TokenStream {
}
pub fn generate_repository_file(
all_models: &[Model],
_all_models: &[Model],
model: &Model,
) -> Result<SourceNodeContainer> {
let resource_name = model.name.clone();
@ -344,7 +324,7 @@ pub fn generate_repository_file(
model
.fields
.iter()
.find(|f| f.is_primary == true)
.find(|f| f.is_primary)
.expect("Expected at least one primary key on the model."),
);
let get_many_by_id_method_code = gen_get_many_by_field_method(
@ -352,7 +332,7 @@ pub fn generate_repository_file(
model
.fields
.iter()
.find(|f| f.is_primary == true)
.find(|f| f.is_primary)
.expect("Expected at least one primary key on the model."),
);
let insert_method_code = gen_insert_method(model);
@ -365,13 +345,13 @@ pub fn generate_repository_file(
.fields
.iter()
.filter(|f| f.is_query_entrypoint)
.map(|field| gen_get_by_field_method(model, &field))
.map(|field| gen_get_by_field_method(model, field))
.collect();
let query_many_by_field_methods: Vec<TokenStream> = model
.fields
.iter()
.filter(|f| f.is_query_entrypoint)
.map(|field| gen_get_many_by_field_method(model, &field))
.map(|field| gen_get_many_by_field_method(model, field))
.collect();
let fields_with_foreign_refs: Vec<&Field> = model
@ -384,7 +364,7 @@ pub fn generate_repository_file(
.collect();
let related_entity_methods_codes: Vec<TokenStream> = fields_with_foreign_refs
.iter()
.map(|field| gen_get_many_of_related_entity_method(model, &field))
.map(|field| gen_get_many_of_related_entity_method(model, field))
.collect();
// TODO: add import line

View file

@ -72,7 +72,7 @@ fn write_source_code(base_path: &Path, snc: SourceNodeContainer) -> Result<()> {
let path = base_path.join(snc.name);
match snc.inner {
SourceNode::File(code) => {
println!("writing file {:?}", path);
println!("Writing file {:?}.", path);
std::fs::write(path, code)?;
}
SourceNode::Directory(dir) => {
@ -125,7 +125,10 @@ pub fn main() -> Result<()> {
}
eprintln!("Found models in project, parsing models");
let models = parse_models::parse_models_from_module(&models_mod_path)?;
dbg!(&models);
eprintln!(
"Found and parsed a grand total of {} sqlxgentools compatible models.",
models.len()
);
match args.nested {
GeneratorArgsSubCommands::GenerateRepositories(opts) => {
@ -137,7 +140,6 @@ pub fn main() -> Result<()> {
return Err(anyhow!("Could not resolve repositories modules."));
}
let snc = generators::repositories::generate_repositories_source_files(&models)?;
dbg!(&snc);
write_source_code(&repositories_mod_path, snc)?;
}
GeneratorArgsSubCommands::GenerateMigration(opts) => {
@ -145,8 +147,8 @@ pub fn main() -> Result<()> {
let sql_code = generators::migrations::generate_create_table_sql(&models)?;
if let Some(out_location) = opts.output {
let output_path = Path::new(&out_location);
let write_res = std::fs::write(output_path, sql_code);
eprintln!("{:?}", write_res);
let _write_res = std::fs::write(output_path, sql_code);
// TODO: check if write result is an error and return error message.
} else {
println!("{}", sql_code);
}

View file

@ -3,7 +3,7 @@ use std::{fs, path::Path};
use anyhow::{anyhow, Result};
use convert_case::{Case, Casing};
use syn::{GenericArgument, PathArguments, Type};
use syn::{GenericArgument, Type};
use crate::{
models::{Field, FieldForeignMode, ForeignRefParams, Model},
@ -68,40 +68,6 @@ fn get_type_first_ident(inp: &Type) -> Option<String> {
}
}
fn get_first_generic_arg_type_ident(inp: &Type) -> Option<String> {
if let Type::Path(field_type_path) = inp {
if let PathArguments::AngleBracketed(args) =
&field_type_path.path.segments.get(0).unwrap().arguments
{
if args.args.is_empty() {
None
} else {
if let GenericArgument::Type(arg_type) = args.args.get(0).unwrap() {
if let Type::Path(arg_type_path) = arg_type {
Some(
arg_type_path
.path
.segments
.get(0)
.unwrap()
.ident
.to_string(),
)
} else {
None
}
} else {
None
}
}
} else {
None
}
} else {
None
}
}
fn parse_model_attribute(item: &syn::ItemStruct) -> Result<Option<SqlGeneratorModelAttr>> {
for attr in item.attrs.iter() {
let attr_ident = match attr.path().get_ident() {
@ -170,8 +136,7 @@ pub fn parse_models(source_code_path: &Path) -> Result<Vec<Model>> {
let mut models: Vec<Model> = vec![];
for item in parsed_file.items {
match item {
syn::Item::Struct(itemval) => {
if let syn::Item::Struct(itemval) = item {
let model_name = itemval.ident.to_string();
let model_attrs = match parse_model_attribute(&itemval)? {
Some(v) => v,
@ -185,7 +150,6 @@ pub fn parse_models(source_code_path: &Path) -> Result<Vec<Model>> {
for field in itemval.fields.iter() {
let field_name = field.ident.clone().unwrap().to_string();
let field_type = field.ty.clone();
println!("field {} {:?}", field_name, field_type);
let mut output_field = Field {
name: field_name,
@ -222,15 +186,12 @@ pub fn parse_models(source_code_path: &Path) -> Result<Vec<Model>> {
final_type = match get_type_first_ident(inner_type) {
Some(v) => v,
None => {
return Err(anyhow!(
"Could not extract ident from Option inner type"
));
return Err(anyhow!("Could not extract ident from Option inner type"));
}
}
}
if first_type == "Vec" {
let inner_type = match extract_generic_type(vec!["Vec".into()], &field_type)
{
let inner_type = match extract_generic_type(vec!["Vec".into()], &field_type) {
Some(v) => v,
None => {
return Err(anyhow!("Could not extract type from Vec"));
@ -262,19 +223,15 @@ pub fn parse_models(source_code_path: &Path) -> Result<Vec<Model>> {
}
};
let extract_res =
extract_generic_type(vec!["ForeignRef".into()], &field_type)
.and_then(|t| get_type_first_ident(t));
let extract_res = extract_generic_type(vec!["ForeignRef".into()], &field_type)
.and_then(get_type_first_ident);
let target_type_name = match extract_res {
Some(v) => v,
None => {
return Err(anyhow!(
"Could not extract inner type from ForeignRef."
));
return Err(anyhow!("Could not extract inner type from ForeignRef."));
}
};
output_field.foreign_mode =
FieldForeignMode::ForeignRef(ForeignRefParams {
output_field.foreign_mode = FieldForeignMode::ForeignRef(ForeignRefParams {
reverse_relation_name: rrn,
target_resource_name: target_type_name.to_case(Case::Snake),
});
@ -304,8 +261,6 @@ pub fn parse_models(source_code_path: &Path) -> Result<Vec<Model>> {
fields,
})
}
_ => {}
}
}
Ok(models)
}
@ -315,7 +270,7 @@ fn parse_models_from_module_inner(module_path: &Path) -> Result<Vec<Model>> {
let mut models: Vec<Model> = vec![];
if module_path.is_file() {
println!("Parsing models from path {:?}.", module_path);
println!("Looking for models to parse from path {:?}.", module_path);
models.extend(parse_models(module_path)?);
return Ok(models);
}
@ -330,23 +285,6 @@ fn parse_models_from_module_inner(module_path: &Path) -> Result<Vec<Model>> {
Ok(models)
}
// fn complete_models(original_models: Vec<Model>) -> Result<Vec<Model>> {
// let mut new_models: Vec<Model> = vec![];
// for model in original_models {
// for original_field in model.fields {
// let mut field = original_field
// match original_field.foreign_mode {
// FieldForeignMode::NotRef => {},
// FieldForeignMode::ForeignRef(ref_params) => {
// }
// }
// }
// }
// Ok(new_models)
// }
/// Scan for models struct in a rust file and return a struct representing the model
pub fn parse_models_from_module(module_path: &Path) -> Result<Vec<Model>> {
let models = parse_models_from_module_inner(module_path)?;

View file

@ -1,6 +1,6 @@
[package]
name = "sqlxgentools_misc"
description = "Various misc class to use in applications that use sqlxgentools"
description = "Various data types and traits to use in a sqlxgentools-enabled codebase."
publish = true
edition.workspace = true
authors.workspace = true