initial commit
This commit is contained in:
commit
912d00f2d3
12 changed files with 2635 additions and 0 deletions
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
/target
|
||||
/tmp
|
||||
|
1
.rgignore
Normal file
1
.rgignore
Normal file
|
@ -0,0 +1 @@
|
|||
target
|
2121
Cargo.lock
generated
Normal file
2121
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
8
Cargo.toml
Normal file
8
Cargo.toml
Normal file
|
@ -0,0 +1,8 @@
|
|||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"lib/generator_cli",
|
||||
"lib/sandbox"
|
||||
]
|
||||
|
||||
|
11
TODO.md
Normal file
11
TODO.md
Normal file
|
@ -0,0 +1,11 @@
|
|||
# TODO
|
||||
|
||||
- CREATE TABLE migration
|
||||
- add unit tests
|
||||
- add custom table name attribute
|
||||
- add automagically generated CHECK constraints on enum type field
|
||||
- support foreign key CHECK constraints
|
||||
|
||||
- Repositories generation
|
||||
- Create get_all
|
||||
- Create get_by_id
|
10
lib/generator_attr/Cargo.toml
Normal file
10
lib/generator_attr/Cargo.toml
Normal file
|
@ -0,0 +1,10 @@
|
|||
[package]
|
||||
name = "generator_attr"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
attribute-derive = "0.10.3"
|
||||
proc-macro2 = "1.0.92"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
13
lib/generator_attr/src/lib.rs
Normal file
13
lib/generator_attr/src/lib.rs
Normal file
|
@ -0,0 +1,13 @@
|
|||
use proc_macro::TokenStream;
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn sql_generator_model(attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
item
|
||||
}
|
||||
|
||||
#[proc_macro_derive(SqlGeneratorDerive, attributes(sql_generator_field))]
|
||||
pub fn sql_generator_field(item: TokenStream) -> TokenStream {
|
||||
TokenStream::new()
|
||||
}
|
||||
|
||||
|
20
lib/generator_cli/Cargo.toml
Normal file
20
lib/generator_cli/Cargo.toml
Normal file
|
@ -0,0 +1,20 @@
|
|||
[package]
|
||||
name = "generator_cli"
|
||||
edition = "2021"
|
||||
|
||||
[[bin]]
|
||||
name = "sqlx-generator"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.95"
|
||||
argh = "0.1.13"
|
||||
attribute-derive = "0.10.3"
|
||||
convert_case = "0.6.0"
|
||||
fully_pub = "0.1.4"
|
||||
prettyplease = "0.2.25"
|
||||
proc-macro2 = "1.0.92"
|
||||
quote = "1.0.38"
|
||||
serde = "1.0.216"
|
||||
structmeta = "0.3.0"
|
||||
syn = { version = "2.0.92", features = ["extra-traits", "full", "parsing"] }
|
396
lib/generator_cli/src/main.rs
Normal file
396
lib/generator_cli/src/main.rs
Normal file
|
@ -0,0 +1,396 @@
|
|||
use std::{fs, path::Path};
|
||||
use attribute_derive::FromAttr;
|
||||
|
||||
use argh::FromArgs;
|
||||
use anyhow::{Result, anyhow};
|
||||
use convert_case::{Case, Casing};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{format_ident, quote};
|
||||
use syn::{File, Type};
|
||||
|
||||
// BASE MODELS
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Model {
|
||||
name: String,
|
||||
table_name: String,
|
||||
fields: Vec<Field>
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Field {
|
||||
name: String,
|
||||
rust_type: String,
|
||||
is_nullable: bool,
|
||||
is_unique: bool,
|
||||
is_primary: bool,
|
||||
default: Option<String>
|
||||
}
|
||||
|
||||
#[derive(FromAttr, PartialEq, Debug, Default)]
|
||||
#[attribute(ident = sql_generator_model)]
|
||||
pub struct SqlGeneratorModelAttr {
|
||||
table_name: Option<String>
|
||||
}
|
||||
|
||||
#[derive(FromAttr, PartialEq, Debug, Default)]
|
||||
#[attribute(ident = sql_generator_field)]
|
||||
pub struct SqlGeneratorFieldAttr {
|
||||
is_primary: Option<bool>,
|
||||
is_unique: Option<bool>
|
||||
}
|
||||
|
||||
// Implementations
|
||||
|
||||
fn generate_repository_code() -> Result<()> {
|
||||
let resource_name = "User";
|
||||
let resource_ident = format_ident!("{}", &resource_name);
|
||||
let repository_ident = format_ident!("{}Repository", resource_ident);
|
||||
let error_msg = format!("Failed to fetch resource {:?} by id", resource_name);
|
||||
let token_stream: TokenStream = quote! {
|
||||
struct #repository_ident {}
|
||||
|
||||
impl #repository_ident {
|
||||
pub fn get_by_id(storage: &Storage, id: &str) -> Result<#resource_ident> {
|
||||
sqlx::query_as::<_, #resource_ident>("SELECT * FROM users WHERE id = $1")
|
||||
.bind(user_id)
|
||||
.fetch_one(&storage.0)
|
||||
.await
|
||||
.context(#error_msg)
|
||||
}
|
||||
}
|
||||
};
|
||||
// convert TokenStream into rust code as string
|
||||
dbg!(&token_stream);
|
||||
let parse_res: syn::Result<File> = syn::parse2(token_stream);
|
||||
let pretty = prettyplease::unparse(&parse_res?);
|
||||
println!("{}", pretty);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
impl Field {
|
||||
/// return sqlite type
|
||||
fn sql_type(&self) -> Option<String> {
|
||||
// for now, we just match against the rust type string representation
|
||||
match self.rust_type.as_str() {
|
||||
"u64" => Some("INTEGER".into()),
|
||||
"u32" => Some("INTEGER".into()),
|
||||
"i32" => Some("INTEGER".into()),
|
||||
"i64" => Some("INTEGER".into()),
|
||||
"f64" => Some("REAL".into()),
|
||||
"f32" => Some("REAL".into()),
|
||||
"String" => Some("TEXT".into()),
|
||||
"DateTime" => Some("DATETIME".into()),
|
||||
"Json" => Some("TEXT".into()),
|
||||
"Vec<u8>" => Some("BLOB".into()),
|
||||
_ => Some("TEXT".into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Take struct name as source, apply snake case and pluralize with a s
|
||||
fn generate_table_name_from_struct_name(struct_name: &str) -> String {
|
||||
return format!("{}s", struct_name.clone().to_case(Case::Snake));
|
||||
}
|
||||
|
||||
fn extract_generic_type(base_segments: Vec<String>, ty: &syn::Type) -> Option<&syn::Type> {
|
||||
// If it is not `TypePath`, it is not possible to be `Option<T>`, return `None`
|
||||
if let syn::Type::Path(syn::TypePath { qself: None, path }) = ty {
|
||||
// We have limited the 5 ways to write `Option`, and we can see that after `Option`,
|
||||
// there will be no `PathSegment` of the same level
|
||||
// Therefore, we only need to take out the highest level `PathSegment` and splice it into a string
|
||||
// for comparison with the analysis result
|
||||
let segments_str = &path
|
||||
.segments
|
||||
.iter()
|
||||
.map(|segment| segment.ident.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(":");
|
||||
// Concatenate `PathSegment` into a string, compare and take out the `PathSegment` where `Option` is located
|
||||
|
||||
let option_segment = base_segments
|
||||
.iter()
|
||||
.find(|s| segments_str == *s)
|
||||
.and_then(|_| path.segments.last());
|
||||
let inner_type = option_segment
|
||||
// Take out the generic parameters of the `PathSegment` where `Option` is located
|
||||
// If it is not generic, it is not possible to be `Option<T>`, return `None`
|
||||
// But this situation may not occur
|
||||
.and_then(|path_seg| match &path_seg.arguments {
|
||||
syn::PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments {
|
||||
args,
|
||||
..
|
||||
}) => args.first(),
|
||||
_ => None,
|
||||
})
|
||||
// Take out the type information in the generic parameter
|
||||
// If it is not a type, it is not possible to be `Option<T>`, return `None`
|
||||
// But this situation may not occur
|
||||
.and_then(|generic_arg| match generic_arg {
|
||||
syn::GenericArgument::Type(ty) => Some(ty),
|
||||
_ => None,
|
||||
});
|
||||
// Return `T` in `Option<T>`
|
||||
return inner_type;
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn get_type_first_ident(inp: &Type) -> Option<String> {
|
||||
match inp {
|
||||
syn::Type::Path(field_type_path) => {
|
||||
Some(field_type_path.path.segments.get(0).unwrap().ident.to_string())
|
||||
},
|
||||
_ => {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_model_attribute(item: &syn::ItemStruct) -> Result<Option<SqlGeneratorModelAttr>> {
|
||||
for attr in item.attrs.iter() {
|
||||
let attr_ident = match attr.path().get_ident() {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
continue;
|
||||
}
|
||||
};
|
||||
if attr_ident.to_string() != "sql_generator_model" {
|
||||
continue;
|
||||
}
|
||||
|
||||
match SqlGeneratorModelAttr::from_attribute(attr) {
|
||||
Ok(v) => {
|
||||
return Ok(Some(v));
|
||||
},
|
||||
Err(err) => {
|
||||
return Err(anyhow!("Failed to parse sql_generator_model attribute macro: {}", err));
|
||||
}
|
||||
};
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn parse_field_attribute(field: &syn::Field) -> Result<Option<SqlGeneratorFieldAttr>> {
|
||||
for attr in field.attrs.iter() {
|
||||
let attr_ident = match attr.path().get_ident() {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
continue;
|
||||
}
|
||||
};
|
||||
if attr_ident.to_string() != "sql_generator_field" {
|
||||
continue;
|
||||
}
|
||||
|
||||
match SqlGeneratorFieldAttr::from_attribute(attr) {
|
||||
Ok(v) => {
|
||||
return Ok(Some(v));
|
||||
},
|
||||
Err(err) => {
|
||||
return Err(anyhow!("Failed to parse sql_generator_field attribute macro: {}", err));
|
||||
}
|
||||
};
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Scan for models struct in a rust module and return a struct representing the model
|
||||
fn parse_models(models_mod_path: &Path) -> Result<Vec<Model>> {
|
||||
let models_code = fs::read_to_string(models_mod_path)?;
|
||||
let parsed_file = syn::parse_file(&models_code)?;
|
||||
|
||||
let mut models: Vec<Model> = vec![];
|
||||
|
||||
for item in parsed_file.items {
|
||||
match item {
|
||||
syn::Item::Struct(itemval) => {
|
||||
let model_name = itemval.ident.to_string();
|
||||
let model_attrs = match parse_model_attribute(&itemval)? {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
// we require model struct to have the `sql_generator_model` attribute
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let mut fields: Vec<Field> = vec![];
|
||||
for field in itemval.fields.iter() {
|
||||
let field_name = field.ident.clone().unwrap().to_string();
|
||||
let field_type = field.ty.clone();
|
||||
// println!("field {}", field_name);
|
||||
|
||||
let mut output_field = Field {
|
||||
name: field_name,
|
||||
rust_type: "Unknown".into(),
|
||||
default: Some("".into()),
|
||||
is_nullable: false,
|
||||
is_primary: false,
|
||||
is_unique: false
|
||||
};
|
||||
|
||||
let first_type = match get_type_first_ident(&field_type) {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
return Err(anyhow!("Could not extract ident from Option inner type"));
|
||||
}
|
||||
};
|
||||
let mut final_type = first_type.clone();
|
||||
if first_type == "Option" {
|
||||
output_field.is_nullable = true;
|
||||
let inner_type = match extract_generic_type(
|
||||
vec!["Option".into(), "std:option:Option".into(), "core:option:Option".into()],
|
||||
&field_type
|
||||
) {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
return Err(anyhow!("Could not extract type from Option"));
|
||||
}
|
||||
};
|
||||
final_type = match get_type_first_ident(inner_type) {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
return Err(anyhow!("Could not extract ident from Option inner type"));
|
||||
}
|
||||
}
|
||||
}
|
||||
if first_type == "Vec" {
|
||||
let inner_type = match extract_generic_type(
|
||||
vec!["Vec".into()],
|
||||
&field_type
|
||||
) {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
return Err(anyhow!("Could not extract type from Vec"));
|
||||
}
|
||||
};
|
||||
final_type = match get_type_first_ident(inner_type) {
|
||||
Some(v) => format!("Vec<{}>", v),
|
||||
None => {
|
||||
return Err(anyhow!("Could not extract ident from Vec inner type"));
|
||||
}
|
||||
}
|
||||
}
|
||||
output_field.rust_type = final_type;
|
||||
|
||||
// parse attribute
|
||||
if let Some(field_attr) = parse_field_attribute(field)? {
|
||||
output_field.is_primary = field_attr.is_primary.unwrap_or_default();
|
||||
output_field.is_unique = field_attr.is_unique.unwrap_or_default();
|
||||
}
|
||||
|
||||
fields.push(output_field);
|
||||
}
|
||||
models.push(Model {
|
||||
name: model_name.clone(),
|
||||
table_name: model_attrs.table_name
|
||||
.unwrap_or(generate_table_name_from_struct_name(&model_name)),
|
||||
fields
|
||||
})
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Ok(models)
|
||||
}
|
||||
|
||||
/// Generate CREATE TABLE statement from parsed model
|
||||
fn generate_create_table_sql(models: &Vec<Model>) -> Result<String> {
|
||||
|
||||
let mut sql_code: String = "".into();
|
||||
for model in models.iter() {
|
||||
let mut fields_sql: Vec<String> = vec![];
|
||||
for field in model.fields.iter() {
|
||||
let mut additions: String = "".into();
|
||||
let sql_type = field.sql_type()
|
||||
.ok_or(anyhow!(format!("Could not find SQL type for field {}", field.name)))?;
|
||||
if !field.is_nullable {
|
||||
additions.push_str(" NOT NULL");
|
||||
}
|
||||
if field.is_unique {
|
||||
additions.push_str(" UNIQUE");
|
||||
}
|
||||
if field.is_primary {
|
||||
additions.push_str(" PRIMARY KEY");
|
||||
}
|
||||
fields_sql.push(
|
||||
format!("\t{: <#18}\t{}{}", field.name, sql_type, additions)
|
||||
);
|
||||
}
|
||||
|
||||
sql_code.push_str(
|
||||
&format!(
|
||||
"CREATE TABLE {} (\n{}\n);",
|
||||
model.table_name,
|
||||
fields_sql.join(",\n")
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
Ok(sql_code)
|
||||
}
|
||||
|
||||
|
||||
|
||||
#[derive(FromArgs, PartialEq, Debug)]
|
||||
/// Generate SQL CREATE TABLE migrations
|
||||
#[argh(subcommand, name = "generate-create-migrations")]
|
||||
struct GenerateCreateMigration {
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Debug)]
|
||||
/// Generate Rust SQLx repositories code
|
||||
#[argh(subcommand, name = "generate-repositories")]
|
||||
struct GenerateRepositories {
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Debug)]
|
||||
#[argh(subcommand)]
|
||||
enum GeneratorArgsSubCommands {
|
||||
GenerateCreateMigration(GenerateCreateMigration),
|
||||
GenerateRepositories(GenerateRepositories),
|
||||
}
|
||||
|
||||
#[derive(FromArgs)]
|
||||
/// SQLX Generator args
|
||||
struct GeneratorArgs {
|
||||
/// whether or not to debug
|
||||
#[argh(switch, short = 'd')]
|
||||
debug: bool,
|
||||
|
||||
#[argh(positional)]
|
||||
project_root: Option<String>,
|
||||
|
||||
#[argh(subcommand)]
|
||||
nested: GeneratorArgsSubCommands
|
||||
}
|
||||
|
||||
pub fn main() -> Result<()> {
|
||||
let args: GeneratorArgs = argh::from_env();
|
||||
let project_root = &args.project_root.unwrap_or(".".to_string());
|
||||
let project_root_path = Path::new(&project_root);
|
||||
eprintln!("Using project root at: {:?}", &project_root_path.canonicalize()?);
|
||||
if !project_root_path.exists() {
|
||||
return Err(anyhow!("Could not resolve project root path."));
|
||||
}
|
||||
// search for a models modules
|
||||
let models_mod_location = "src/models.rs";
|
||||
let models_mod_path = project_root_path.join(models_mod_location);
|
||||
if !project_root_path.exists() {
|
||||
return Err(anyhow!("Could not resolve models modules."));
|
||||
}
|
||||
let models = parse_models(&models_mod_path)?;
|
||||
|
||||
match args.nested {
|
||||
GeneratorArgsSubCommands::GenerateRepositories(opts) => {
|
||||
println!("Generate repositories");
|
||||
todo!();
|
||||
},
|
||||
GeneratorArgsSubCommands::GenerateCreateMigration(opts) => {
|
||||
let sql_code = generate_create_table_sql(&models)?;
|
||||
println!("{}", sql_code);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
14
lib/sandbox/Cargo.toml
Normal file
14
lib/sandbox/Cargo.toml
Normal file
|
@ -0,0 +1,14 @@
|
|||
[package]
|
||||
name = "sandbox"
|
||||
edition = "2021"
|
||||
|
||||
[[bin]]
|
||||
name = "sandbox"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
chrono = "0.4.39"
|
||||
fully_pub = "0.1.4"
|
||||
serde = "1.0.216"
|
||||
sqlx = { version = "0.8.2", features = ["chrono", "uuid", "sqlite"] }
|
||||
generator_attr = { path = "../generator_attr" }
|
5
lib/sandbox/src/main.rs
Normal file
5
lib/sandbox/src/main.rs
Normal file
|
@ -0,0 +1,5 @@
|
|||
pub mod models;
|
||||
|
||||
fn main() {
|
||||
println!("Sandbox")
|
||||
}
|
33
lib/sandbox/src/models.rs
Normal file
33
lib/sandbox/src/models.rs
Normal file
|
@ -0,0 +1,33 @@
|
|||
use chrono::{DateTime, Utc};
|
||||
use sqlx::types::Json;
|
||||
use fully_pub::fully_pub;
|
||||
|
||||
use generator_attr::{sql_generator_model, SqlGeneratorDerive};
|
||||
|
||||
#[derive(sqlx::Type, Clone, Debug, PartialEq)]
|
||||
enum UserStatus {
|
||||
Disabled,
|
||||
Invited,
|
||||
Active,
|
||||
Archived
|
||||
}
|
||||
|
||||
struct RandomStruct {
|
||||
}
|
||||
|
||||
#[derive(SqlGeneratorDerive, sqlx::FromRow, Debug, Clone)]
|
||||
#[sql_generator_model(table_name="usersss")]
|
||||
#[fully_pub]
|
||||
struct User {
|
||||
#[sql_generator_field(is_primary=true)]
|
||||
id: String,
|
||||
#[sql_generator_field(is_unique=true)]
|
||||
handle: String,
|
||||
full_name: Option<String>,
|
||||
prefered_color: Option<i64>,
|
||||
last_login_at: Option<DateTime<Utc>>,
|
||||
status: UserStatus,
|
||||
groups: Json<Vec<String>>,
|
||||
avatar_bytes: Vec<u8>
|
||||
}
|
||||
|
Loading…
Reference in a new issue