Compare commits
4 commits
e3ce642226
...
2da8721778
| Author | SHA1 | Date | |
|---|---|---|---|
| 2da8721778 | |||
| 5e0ffe67c3 | |||
| cbe60d1bd2 | |||
| dc77c71f68 |
29 changed files with 1335 additions and 401 deletions
96
Cargo.lock
generated
96
Cargo.lock
generated
|
|
@ -40,9 +40,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.95"
|
||||
version = "1.0.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
|
||||
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
|
||||
|
||||
[[package]]
|
||||
name = "argh"
|
||||
|
|
@ -64,7 +64,7 @@ dependencies = [
|
|||
"argh_shared",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -96,7 +96,7 @@ dependencies = [
|
|||
"manyhow",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -112,7 +112,7 @@ dependencies = [
|
|||
"proc-macro2",
|
||||
"quote",
|
||||
"quote-use",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -317,7 +317,7 @@ checksum = "62d671cc41a825ebabc75757b62d3d168c577f9149b2d49ece1dad1f72119d25"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -340,7 +340,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -419,7 +419,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "7fd8cb48eceb4e8b471af6a8e4e223cbe1286552791b9ab274512ba9cfd754df"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -718,7 +718,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -848,7 +848,7 @@ dependencies = [
|
|||
"manyhow-macros",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1059,7 +1059,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1110,7 +1110,7 @@ dependencies = [
|
|||
"proc-macro-utils",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1194,11 +1194,14 @@ checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
|
|||
name = "sandbox"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
"fully_pub",
|
||||
"serde",
|
||||
"sqlx",
|
||||
"sqlxgentools_attrs",
|
||||
"sqlxgentools_misc",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1224,7 +1227,7 @@ checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1395,7 +1398,7 @@ dependencies = [
|
|||
"quote",
|
||||
"sqlx-core",
|
||||
"sqlx-macros-core",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1418,7 +1421,7 @@ dependencies = [
|
|||
"sqlx-mysql",
|
||||
"sqlx-postgres",
|
||||
"sqlx-sqlite",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
"tokio",
|
||||
"url",
|
||||
]
|
||||
|
|
@ -1534,15 +1537,17 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "sqlxgentools_attrs"
|
||||
version = "0.0.0"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"attribute-derive",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlxgentools_cli"
|
||||
version = "0.0.0"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argh",
|
||||
|
|
@ -1556,7 +1561,17 @@ dependencies = [
|
|||
"serde",
|
||||
"serde_json",
|
||||
"structmeta",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlxgentools_misc"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"fully_pub",
|
||||
"serde",
|
||||
"sqlx-core",
|
||||
"sqlx-sqlite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1585,7 +1600,7 @@ dependencies = [
|
|||
"proc-macro2",
|
||||
"quote",
|
||||
"structmeta-derive",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1596,7 +1611,7 @@ checksum = "152a0b65a590ff6c3da95cabe2353ee04e6167c896b28e3b14478c2636c922fc"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1605,6 +1620,17 @@ version = "2.6.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.109"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.92"
|
||||
|
|
@ -1624,7 +1650,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1644,7 +1670,7 @@ checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1686,9 +1712,21 @@ dependencies = [
|
|||
"pin-project-lite",
|
||||
"slab",
|
||||
"socket2",
|
||||
"tokio-macros",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-macros"
|
||||
version = "2.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-stream"
|
||||
version = "0.1.17"
|
||||
|
|
@ -1720,7 +1758,7 @@ checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1845,7 +1883,7 @@ dependencies = [
|
|||
"log",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
|
|
@ -1867,7 +1905,7 @@ checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
"wasm-bindgen-backend",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
|
@ -2083,7 +2121,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
|
|
@ -2105,7 +2143,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -2125,7 +2163,7 @@ checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
|
|
@ -2154,5 +2192,5 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.92",
|
||||
]
|
||||
|
|
|
|||
13
Cargo.toml
13
Cargo.toml
|
|
@ -3,6 +3,19 @@ resolver = "2"
|
|||
members = [
|
||||
"lib/sqlxgentools_attrs",
|
||||
"lib/sqlxgentools_cli",
|
||||
"lib/sqlxgentools_misc",
|
||||
"lib/sandbox"
|
||||
]
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.8"
|
||||
license = "MIT OR Apache-2.0"
|
||||
authors = [
|
||||
"Matthieu Bessat <rust-dev@mbess.net>"
|
||||
]
|
||||
keywords = ["orm", "migrations", "repositories", "code-generation"]
|
||||
categories = ["database"]
|
||||
repository = "https://forge.lefuturiste.fr/mbess/sqlxgentools"
|
||||
readme = "README.md"
|
||||
version = "0.1.0"
|
||||
|
|
|
|||
39
DRAFT.md
Normal file
39
DRAFT.md
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
# Design draft
|
||||
|
||||
This document is at the attention of the developers of sqlxgentools.
|
||||
|
||||
## Implementing basic relation ship
|
||||
|
||||
### Issues
|
||||
|
||||
Problems with Struct non-flexibility
|
||||
|
||||
### Turning the problem around: Views
|
||||
|
||||
### hasMany / belongsTo relationship
|
||||
|
||||
So we can implements a method
|
||||
|
||||
```rs
|
||||
use repositories::impls::post::RelationShips;
|
||||
|
||||
let post = PostRepository::new(db).get_one_by_id("id_machin")?;
|
||||
post.first_name // OK
|
||||
let authors = post.get_authors()? // we need to require the implementation
|
||||
|
||||
```
|
||||
|
||||
.relations() => give you a RelationFetcherBuilder
|
||||
.of(entity) => give you a RelationFetcher
|
||||
.author
|
||||
|
||||
```rs
|
||||
|
||||
let post_repo = PostRepository::new(db);
|
||||
let author: User = post_repo.relations()
|
||||
.of(post)
|
||||
.author().ok_or(Err)?;
|
||||
let comments: Vec<Comment> = post_repo.relations()
|
||||
.of(post)
|
||||
.comments().ok_or(Err)?;
|
||||
```
|
||||
201
LICENSE-APACHE
Normal file
201
LICENSE-APACHE
Normal file
|
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2025 Matthieu Bessat
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
25
LICENSE-MIT
Normal file
25
LICENSE-MIT
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
Copyright (c) 2025 Matthieu Bessat
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
2
TODO.md
2
TODO.md
|
|
@ -14,7 +14,7 @@
|
|||
- insert
|
||||
- update
|
||||
- delete_by_id
|
||||
- custom queries
|
||||
- [ ] delete_many
|
||||
|
||||
- [ ] Config file for project
|
||||
- configure models path
|
||||
|
|
|
|||
8
docs/tutorials/quick_start.md
Normal file
8
docs/tutorials/quick_start.md
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
# Quick start with sqlxgentools
|
||||
|
||||
Steps:
|
||||
- Install the crate
|
||||
- Declare your models
|
||||
- Generate migrations
|
||||
- Generate repositories
|
||||
- Use repositories in your code
|
||||
1
lib/sandbox/.gitignore
vendored
Normal file
1
lib/sandbox/.gitignore
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
tmp/
|
||||
|
|
@ -1,14 +1,19 @@
|
|||
[package]
|
||||
name = "sandbox"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[[bin]]
|
||||
name = "sandbox"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.100"
|
||||
chrono = "0.4.39"
|
||||
fully_pub = "0.1.4"
|
||||
serde = "1.0.216"
|
||||
tokio = { version = "1.40.0", features = ["rt-multi-thread", "macros"] }
|
||||
sqlx = { version = "0.8.6", features = ["sqlite", "runtime-tokio", "chrono", "uuid", "migrate"] }
|
||||
sqlxgentools_attrs = { path = "../sqlxgentools_attrs" }
|
||||
sqlxgentools_misc = { path = "../sqlxgentools_misc" }
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,12 @@
|
|||
# Children justfile
|
||||
|
||||
reset-db *args:
|
||||
rm sandbox.db
|
||||
sqlite3 {{args}} sandbox.db < src/migrations/all.sql
|
||||
touch tmp/db.db
|
||||
rm tmp/db.db
|
||||
sqlite3 {{args}} tmp/db.db < src/migrations/all.sql
|
||||
|
||||
seed-db:
|
||||
sqlite3 tmp/db.db < src/migrations/all.sql
|
||||
|
||||
gen-sqlx:
|
||||
../../target/release/sqlx-generator \
|
||||
|
|
|
|||
|
|
@ -1,6 +1,11 @@
|
|||
use anyhow::Context;
|
||||
use std::str::FromStr;
|
||||
use std::path::PathBuf;
|
||||
use anyhow::Result;
|
||||
|
||||
use fully_pub::fully_pub;
|
||||
use sqlx::{
|
||||
Pool, Sqlite,
|
||||
Pool, Sqlite, sqlite::{SqliteConnectOptions, SqlitePoolOptions},
|
||||
};
|
||||
|
||||
/// database storage interface
|
||||
|
|
@ -8,3 +13,29 @@ use sqlx::{
|
|||
#[derive(Clone, Debug)]
|
||||
struct Database(Pool<Sqlite>);
|
||||
|
||||
|
||||
/// Initialize database
|
||||
pub async fn provide_database(sqlite_db_path: &str) -> Result<Database> {
|
||||
let path = PathBuf::from(sqlite_db_path);
|
||||
let is_db_initialization = !path.exists();
|
||||
// // database does not exists, trying to create it
|
||||
// if path
|
||||
// .parent()
|
||||
// .filter(|pp| pp.exists())
|
||||
// Err(anyhow!("Could not find parent directory of the db location.")));
|
||||
|
||||
let conn_str = format!("sqlite://{sqlite_db_path}");
|
||||
|
||||
let pool = SqlitePoolOptions::new()
|
||||
.max_connections(50)
|
||||
.connect_with(SqliteConnectOptions::from_str(&conn_str)?.create_if_missing(true))
|
||||
.await
|
||||
.context("could not connect to database_url")?;
|
||||
// if is_db_initialization {
|
||||
// initialize_db(Database(pool.clone())).await?;
|
||||
// }
|
||||
|
||||
Ok(Database(pool))
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,95 @@
|
|||
use anyhow::{Context, Result};
|
||||
|
||||
use chrono::Utc;
|
||||
use sqlx::types::Json;
|
||||
use sqlxgentools_misc::ForeignRef;
|
||||
|
||||
use crate::{db::provide_database, models::user::{User, UserToken}, repositories::user_token_repository::UserTokenRepository};
|
||||
|
||||
pub mod models;
|
||||
pub mod repositories;
|
||||
pub mod db;
|
||||
|
||||
fn main() {
|
||||
println!("Sandbox")
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
println!("Sandbox");
|
||||
|
||||
let users = vec![
|
||||
User {
|
||||
id: "idu1".into(),
|
||||
handle: "john.doe".into(),
|
||||
full_name: None,
|
||||
prefered_color: None,
|
||||
last_login_at: None,
|
||||
status: models::user::UserStatus::Invited,
|
||||
groups: Json(vec![]),
|
||||
avatar_bytes: None
|
||||
},
|
||||
User {
|
||||
id: "idu2".into(),
|
||||
handle: "richard".into(),
|
||||
full_name: None,
|
||||
prefered_color: None,
|
||||
last_login_at: None,
|
||||
status: models::user::UserStatus::Invited,
|
||||
groups: Json(vec![]),
|
||||
avatar_bytes: None
|
||||
},
|
||||
User {
|
||||
id: "idu3".into(),
|
||||
handle: "manned".into(),
|
||||
full_name: None,
|
||||
prefered_color: None,
|
||||
last_login_at: None,
|
||||
status: models::user::UserStatus::Invited,
|
||||
groups: Json(vec![]),
|
||||
avatar_bytes: None
|
||||
}
|
||||
];
|
||||
let user_token = UserToken {
|
||||
id: "idtoken1".into(),
|
||||
secret: "4LP5A3F3XBV5NM8VXRGZG3QDXO9PNAC0".into(),
|
||||
last_use_time: None,
|
||||
creation_time: Utc::now(),
|
||||
expiration_time: Utc::now(),
|
||||
user_id: ForeignRef::new(&users.get(0).unwrap())
|
||||
};
|
||||
|
||||
let db = provide_database("tmp/db.db").await?;
|
||||
|
||||
let user_token_repo = UserTokenRepository::new(db);
|
||||
user_token_repo.insert_many(&vec![
|
||||
UserToken {
|
||||
id: "idtoken2".into(),
|
||||
secret: "4LP5A3F3XBV5NM8VXRGZG3QDXO9PNAC0".into(),
|
||||
last_use_time: None,
|
||||
creation_time: Utc::now(),
|
||||
expiration_time: Utc::now(),
|
||||
user_id: ForeignRef::new(&users.get(0).unwrap())
|
||||
},
|
||||
UserToken {
|
||||
id: "idtoken3".into(),
|
||||
secret: "CBHR6G41KSEMR1AI".into(),
|
||||
last_use_time: None,
|
||||
creation_time: Utc::now(),
|
||||
expiration_time: Utc::now(),
|
||||
user_id: ForeignRef::new(&users.get(1).unwrap())
|
||||
},
|
||||
UserToken {
|
||||
id: "idtoken4".into(),
|
||||
secret: "CBHR6G41KSEMR1AI".into(),
|
||||
last_use_time: None,
|
||||
creation_time: Utc::now(),
|
||||
expiration_time: Utc::now(),
|
||||
user_id: ForeignRef::new(&users.get(1).unwrap())
|
||||
}
|
||||
]).await?;
|
||||
let user_tokens = user_token_repo.get_many_user_tokens_by_usersss(
|
||||
vec!["idu2".into()]
|
||||
).await?;
|
||||
dbg!(&user_tokens);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
-- DO NOT EDIT THIS FILE.
|
||||
-- Generated by sqlxgentools from models files.
|
||||
CREATE TABLE usersss (
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
handle TEXT NOT NULL UNIQUE,
|
||||
|
|
@ -6,13 +8,13 @@ CREATE TABLE usersss (
|
|||
last_login_at DATETIME,
|
||||
status TEXT NOT NULL,
|
||||
groups TEXT NOT NULL,
|
||||
avatar_bytes BLOB NOT NULL
|
||||
avatar_bytes TEXT
|
||||
);
|
||||
CREATE TABLE user_tokens (
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
secret TEXT NOT NULL,
|
||||
last_use_time DATETIME,
|
||||
creation_time DATETIME NOT NULL,
|
||||
expiration_time DATETIME NOT NULL
|
||||
expiration_time DATETIME NOT NULL,
|
||||
user_id TEXT NOT NULL
|
||||
);
|
||||
|
|
|
|||
|
|
@ -2,7 +2,8 @@ use chrono::{DateTime, Utc};
|
|||
use sqlx::types::Json;
|
||||
use fully_pub::fully_pub;
|
||||
|
||||
use sqlxgentools_attrs::{sql_generator_model, SqlGeneratorDerive};
|
||||
use sqlxgentools_attrs::{SqlGeneratorDerive, SqlGeneratorModelWithId, sql_generator_model};
|
||||
use sqlxgentools_misc::{DatabaseLine, ForeignRef};
|
||||
|
||||
#[derive(sqlx::Type, Clone, Debug, PartialEq)]
|
||||
#[fully_pub]
|
||||
|
|
@ -13,7 +14,7 @@ enum UserStatus {
|
|||
Archived
|
||||
}
|
||||
|
||||
#[derive(SqlGeneratorDerive, sqlx::FromRow, Debug, Clone)]
|
||||
#[derive(SqlGeneratorDerive, SqlGeneratorModelWithId, sqlx::FromRow, Debug, Clone)]
|
||||
#[sql_generator_model(table_name="usersss")]
|
||||
#[fully_pub]
|
||||
struct User {
|
||||
|
|
@ -26,20 +27,21 @@ struct User {
|
|||
last_login_at: Option<DateTime<Utc>>,
|
||||
status: UserStatus,
|
||||
groups: Json<Vec<String>>,
|
||||
avatar_bytes: Vec<u8>
|
||||
avatar_bytes: Option<Vec<u8>>
|
||||
}
|
||||
|
||||
#[derive(SqlGeneratorDerive, sqlx::FromRow, Debug, Clone)]
|
||||
|
||||
#[derive(SqlGeneratorDerive, SqlGeneratorModelWithId, sqlx::FromRow, Debug, Clone)]
|
||||
#[sql_generator_model(table_name="user_tokens")]
|
||||
#[fully_pub]
|
||||
struct UserToken {
|
||||
#[sql_generator_field(is_primary=true)]
|
||||
id: String,
|
||||
// #[sql_generator_field(foreign_key=Relation::BelongsTo(User))]
|
||||
user_id: String,
|
||||
secret: String,
|
||||
last_use_time: Option<DateTime<Utc>>,
|
||||
creation_time: DateTime<Utc>,
|
||||
expiration_time: DateTime<Utc>
|
||||
expiration_time: DateTime<Utc>,
|
||||
#[sql_generator_field(reverse_relation_name="user_tokens")] // to generate get_user_tokens_of_user(&user_id)
|
||||
user_id: ForeignRef<User>
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -40,14 +40,14 @@ impl UserTokenRepository {
|
|||
}
|
||||
pub async fn insert(&self, entity: &UserToken) -> Result<(), sqlx::Error> {
|
||||
sqlx::query(
|
||||
"INSERT INTO user_tokens (id, user_id, secret, last_use_time, creation_time, expiration_time) VALUES ($1, $2, $3, $4, $5, $6)",
|
||||
"INSERT INTO user_tokens (id, secret, last_use_time, creation_time, expiration_time, user_id) VALUES ($1, $2, $3, $4, $5, $6)",
|
||||
)
|
||||
.bind(&entity.id)
|
||||
.bind(&entity.user_id)
|
||||
.bind(&entity.secret)
|
||||
.bind(&entity.last_use_time)
|
||||
.bind(&entity.creation_time)
|
||||
.bind(&entity.expiration_time)
|
||||
.bind(&entity.user_id.target_id)
|
||||
.execute(&self.db.0)
|
||||
.await?;
|
||||
Ok(())
|
||||
|
|
@ -69,18 +69,18 @@ impl UserTokenRepository {
|
|||
.collect::<Vec<String>>()
|
||||
.join(", ");
|
||||
let query_sql = format!(
|
||||
"INSERT INTO user_tokens (id, user_id, secret, last_use_time, creation_time, expiration_time) VALUES {} ON CONFLICT DO NOTHING",
|
||||
"INSERT INTO user_tokens (id, secret, last_use_time, creation_time, expiration_time, user_id) VALUES {} ON CONFLICT DO NOTHING",
|
||||
values_templates
|
||||
);
|
||||
let mut query = sqlx::query(&query_sql);
|
||||
for entity in entities {
|
||||
query = query
|
||||
.bind(&entity.id)
|
||||
.bind(&entity.user_id)
|
||||
.bind(&entity.secret)
|
||||
.bind(&entity.last_use_time)
|
||||
.bind(&entity.creation_time)
|
||||
.bind(&entity.expiration_time);
|
||||
.bind(&entity.expiration_time)
|
||||
.bind(&entity.user_id.target_id);
|
||||
}
|
||||
query.execute(&self.db.0).await?;
|
||||
Ok(())
|
||||
|
|
@ -91,15 +91,15 @@ impl UserTokenRepository {
|
|||
entity: &UserToken,
|
||||
) -> Result<(), sqlx::Error> {
|
||||
sqlx::query(
|
||||
"UPDATE user_tokens SET id = $2, user_id = $3, secret = $4, last_use_time = $5, creation_time = $6, expiration_time = $7 WHERE id = $1",
|
||||
"UPDATE user_tokens SET id = $2, secret = $3, last_use_time = $4, creation_time = $5, expiration_time = $6, user_id = $7 WHERE id = $1",
|
||||
)
|
||||
.bind(item_id)
|
||||
.bind(&entity.id)
|
||||
.bind(&entity.user_id)
|
||||
.bind(&entity.secret)
|
||||
.bind(&entity.last_use_time)
|
||||
.bind(&entity.creation_time)
|
||||
.bind(&entity.expiration_time)
|
||||
.bind(&entity.user_id.target_id)
|
||||
.execute(&self.db.0)
|
||||
.await?;
|
||||
Ok(())
|
||||
|
|
@ -111,4 +111,33 @@ impl UserTokenRepository {
|
|||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
pub async fn get_many_user_tokens_by_user(
|
||||
&self,
|
||||
item_id: &str,
|
||||
) -> Result<Vec<UserToken>, sqlx::Error> {
|
||||
sqlx::query_as::<_, UserToken>("SELECT * FROM user_tokens WHERE user_id = $1")
|
||||
.bind(item_id)
|
||||
.fetch_all(&self.db.0)
|
||||
.await
|
||||
}
|
||||
pub async fn get_many_user_tokens_by_usersss(
|
||||
&self,
|
||||
items_ids: Vec<String>,
|
||||
) -> Result<Vec<UserToken>, sqlx::Error> {
|
||||
if items_ids.is_empty() {
|
||||
return Ok(vec![]);
|
||||
}
|
||||
let placeholder_params: String = (1..=(items_ids.len()))
|
||||
.map(|i| format!("${i}"))
|
||||
.collect::<Vec<String>>()
|
||||
.join(",");
|
||||
let query_tmpl = format!(
|
||||
"SELECT * FROM user_tokens WHERE user_id IN ({})", placeholder_params
|
||||
);
|
||||
let mut query = sqlx::query_as::<_, UserToken>(&query_tmpl);
|
||||
for id in items_ids {
|
||||
query = query.bind(id);
|
||||
}
|
||||
query.fetch_all(&self.db.0).await
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,10 +1,18 @@
|
|||
[package]
|
||||
name = "sqlxgentools_attrs"
|
||||
edition = "2021"
|
||||
description = "Proc-macros to allow automatic migrations and repositories generation from models by the sqlxgentools CLI tools."
|
||||
publish = true
|
||||
edition.workspace = true
|
||||
authors.workspace = true
|
||||
version.workspace = true
|
||||
license.workspace = true
|
||||
repository.workspace = true
|
||||
|
||||
[dependencies]
|
||||
attribute-derive = "0.10.3"
|
||||
proc-macro2 = "1.0.92"
|
||||
quote = "1.0"
|
||||
syn = { version = "1.0" }
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
use proc_macro::TokenStream;
|
||||
use quote::quote;
|
||||
use syn::{DeriveInput, Fields, parse_macro_input};
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn sql_generator_model(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
|
|
@ -6,8 +8,34 @@ pub fn sql_generator_model(_attr: TokenStream, item: TokenStream) -> TokenStream
|
|||
}
|
||||
|
||||
#[proc_macro_derive(SqlGeneratorDerive, attributes(sql_generator_field))]
|
||||
pub fn sql_generator_field(_item: TokenStream) -> TokenStream {
|
||||
pub fn derive_sql_generator_model(_input: TokenStream) -> TokenStream {
|
||||
TokenStream::new()
|
||||
}
|
||||
|
||||
#[proc_macro_derive(SqlGeneratorModelWithId)]
|
||||
pub fn derive_sql_generator_model_with_id(input: TokenStream) -> TokenStream {
|
||||
let input = parse_macro_input!(input as DeriveInput);
|
||||
let name = input.ident;
|
||||
|
||||
// Extract the fields of the struct
|
||||
if let syn::Data::Struct(data) = input.data {
|
||||
if let Fields::Named(fields) = data.fields {
|
||||
for field in fields.named {
|
||||
if field.ident.as_ref().map_or(false, |ident| ident == "id") {
|
||||
let expanded = quote! {
|
||||
impl DatabaseLine for #name {
|
||||
fn id(&self) -> String {
|
||||
self.id.clone()
|
||||
}
|
||||
}
|
||||
};
|
||||
return TokenStream::from(expanded);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If `id` field is not found, return an error
|
||||
panic!("Expected struct with a named field `id` of type String")
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,25 +1,31 @@
|
|||
[package]
|
||||
name = "sqlxgentools_cli"
|
||||
edition = "2021"
|
||||
description = "CLI to generate SQL migrations and repositories code that use sqlx from models definition."
|
||||
publish = true
|
||||
edition.workspace = true
|
||||
authors.workspace = true
|
||||
version.workspace = true
|
||||
license.workspace = true
|
||||
repository.workspace = true
|
||||
|
||||
[[bin]]
|
||||
name = "sqlx-generator"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.95"
|
||||
argh = "0.1.13"
|
||||
attribute-derive = "0.10.3"
|
||||
convert_case = "0.6.0"
|
||||
fully_pub = "0.1.4"
|
||||
heck = "0.5.0"
|
||||
prettyplease = "0.2.25"
|
||||
proc-macro2 = "1.0.92"
|
||||
quote = "1.0.38"
|
||||
serde = "1.0.216"
|
||||
serde_json = "1.0.134"
|
||||
structmeta = "0.3.0"
|
||||
syn = { version = "2.0.92", features = ["extra-traits", "full", "parsing"] }
|
||||
anyhow = "1.0"
|
||||
argh = "0.1"
|
||||
attribute-derive = "0.10"
|
||||
convert_case = "0.6"
|
||||
fully_pub = "0.1"
|
||||
heck = "0.5"
|
||||
prettyplease = "0.2"
|
||||
proc-macro2 = "1.0"
|
||||
quote = "1.0"
|
||||
serde = "1.0"
|
||||
serde_json = "1.0"
|
||||
structmeta = "0.3"
|
||||
syn = { version = "2.0", features = ["extra-traits", "full", "parsing"] }
|
||||
|
||||
[lints.clippy]
|
||||
uninlined_format_args = "allow"
|
||||
|
|
|
|||
|
|
@ -1,311 +0,0 @@
|
|||
use anyhow::Result;
|
||||
use fully_pub::fully_pub;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{format_ident, quote};
|
||||
use serde::Serialize;
|
||||
use syn::File;
|
||||
use heck::ToSnakeCase;
|
||||
|
||||
use crate::models::Model;
|
||||
|
||||
|
||||
fn gen_get_all_method(model: &Model) -> TokenStream {
|
||||
let resource_ident = format_ident!("{}", &model.name);
|
||||
let select_query = format!("SELECT * FROM {}", model.table_name);
|
||||
|
||||
quote! {
|
||||
pub async fn get_all(&self) -> Result<Vec<#resource_ident>, sqlx::Error> {
|
||||
sqlx::query_as::<_, #resource_ident>(#select_query)
|
||||
.fetch_all(&self.db.0)
|
||||
.await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_get_by_id_method(model: &Model) -> TokenStream {
|
||||
let resource_ident = format_ident!("{}", &model.name);
|
||||
let primary_key = &model.fields.iter()
|
||||
.find(|f| f.is_primary)
|
||||
.expect("A model must have at least one primary key")
|
||||
.name;
|
||||
let select_query = format!("SELECT * FROM {} WHERE {} = $1", model.table_name, primary_key);
|
||||
|
||||
let func_name_ident = format_ident!("get_by_{}", primary_key);
|
||||
|
||||
quote! {
|
||||
pub async fn #func_name_ident(&self, item_id: &str) -> Result<#resource_ident, sqlx::Error> {
|
||||
sqlx::query_as::<_, #resource_ident>(#select_query)
|
||||
.bind(item_id)
|
||||
.fetch_one(&self.db.0)
|
||||
.await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_get_many_by_id_method(model: &Model) -> TokenStream {
|
||||
let resource_ident = format_ident!("{}", &model.name);
|
||||
let primary_key = &model.fields.iter()
|
||||
.find(|f| f.is_primary)
|
||||
.expect("A model must have at least one primary key")
|
||||
.name;
|
||||
let select_query_tmpl = format!("SELECT * FROM {} WHERE {} IN ({{}})", model.table_name, primary_key);
|
||||
|
||||
let func_name_ident = format_ident!("get_many_by_{}", primary_key);
|
||||
|
||||
quote! {
|
||||
pub async fn #func_name_ident(&self, items_ids: &[&str]) -> Result<Vec<#resource_ident>, sqlx::Error> {
|
||||
if items_ids.is_empty() {
|
||||
return Ok(vec![])
|
||||
}
|
||||
let placeholder_params: String = (1..=(items_ids.len()))
|
||||
.map(|i| format!("${}", i))
|
||||
.collect::<Vec<String>>()
|
||||
.join(",");
|
||||
let query_sql = format!(#select_query_tmpl, placeholder_params);
|
||||
let mut query = sqlx::query_as::<_, #resource_ident>(&query_sql);
|
||||
for id in items_ids {
|
||||
query = query.bind(id)
|
||||
}
|
||||
query
|
||||
.fetch_all(&self.db.0)
|
||||
.await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_insert_method(model: &Model) -> TokenStream {
|
||||
let resource_ident = format_ident!("{}", &model.name);
|
||||
let sql_columns = model.fields.iter()
|
||||
.map(|f| f.name.clone())
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ");
|
||||
let value_templates = (1..(model.fields.len()+1))
|
||||
.map(|i| format!("${}", i))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ");
|
||||
let insert_query = format!(
|
||||
"INSERT INTO {} ({}) VALUES ({})",
|
||||
model.table_name,
|
||||
sql_columns,
|
||||
value_templates
|
||||
);
|
||||
let field_names: Vec<proc_macro2::Ident> = model.fields.iter()
|
||||
.map(|f| format_ident!("{}", &f.name))
|
||||
.collect();
|
||||
|
||||
quote! {
|
||||
pub async fn insert(&self, entity: &#resource_ident) -> Result<(), sqlx::Error> {
|
||||
sqlx::query(#insert_query)
|
||||
#( .bind( &entity.#field_names ) )*
|
||||
.execute(&self.db.0)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_insert_many_method(model: &Model) -> TokenStream {
|
||||
let resource_ident = format_ident!("{}", &model.name);
|
||||
let sql_columns = model.fields.iter()
|
||||
.map(|f| f.name.clone())
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ");
|
||||
let base_insert_query = format!(
|
||||
"INSERT INTO {} ({}) VALUES {{}} ON CONFLICT DO NOTHING",
|
||||
model.table_name,
|
||||
sql_columns
|
||||
);
|
||||
let field_names: Vec<proc_macro2::Ident> = model.fields.iter()
|
||||
.map(|f| format_ident!("{}", &f.name))
|
||||
.collect();
|
||||
let fields_count: usize = model.fields.len();
|
||||
|
||||
quote! {
|
||||
pub async fn insert_many(&self, entities: &Vec<#resource_ident>) -> Result<(), sqlx::Error> {
|
||||
let values_templates: String = (1..(#fields_count*entities.len()+1))
|
||||
.collect::<Vec<usize>>()
|
||||
.chunks(#fields_count)
|
||||
.map(|c| c.to_vec())
|
||||
.map(|x| format!(
|
||||
"({})",
|
||||
x.iter()
|
||||
.map(|i| format!("${}", i))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ");
|
||||
let query_sql = format!(#base_insert_query, values_templates);
|
||||
|
||||
let mut query = sqlx::query(&query_sql);
|
||||
for entity in entities {
|
||||
query = query
|
||||
#( .bind( &entity.#field_names ) )*;
|
||||
}
|
||||
query
|
||||
.execute(&self.db.0)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fn gen_update_by_id_method(model: &Model) -> TokenStream {
|
||||
let resource_ident = format_ident!("{}", &model.name);
|
||||
let primary_key = &model.fields.iter()
|
||||
.find(|f| f.is_primary)
|
||||
.expect("A model must have at least one primary key")
|
||||
.name;
|
||||
let set_statements = model.fields.iter().enumerate()
|
||||
.map(|(i, field)| format!("{} = ${}", field.name, i+2))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ");
|
||||
|
||||
let func_name_ident = format_ident!("update_by_{}", primary_key);
|
||||
let update_query = format!(
|
||||
"UPDATE {} SET {} WHERE {} = $1",
|
||||
model.table_name,
|
||||
set_statements,
|
||||
primary_key
|
||||
);
|
||||
let field_names: Vec<proc_macro2::Ident> = model.fields.iter()
|
||||
.map(|f| format_ident!("{}", &f.name))
|
||||
.collect();
|
||||
|
||||
quote! {
|
||||
pub async fn #func_name_ident(&self, item_id: &str, entity: &#resource_ident) -> Result<(), sqlx::Error> {
|
||||
sqlx::query(#update_query)
|
||||
.bind(item_id)
|
||||
#( .bind( &entity.#field_names ) )*
|
||||
.execute(&self.db.0)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_delete_by_id_method(model: &Model) -> TokenStream {
|
||||
let primary_key = &model.fields.iter()
|
||||
.find(|f| f.is_primary)
|
||||
.expect("A model must have at least one primary key")
|
||||
.name;
|
||||
|
||||
let func_name_ident = format_ident!("delete_by_{}", primary_key);
|
||||
let query = format!(
|
||||
"DELETE FROM {} WHERE {} = $1",
|
||||
model.table_name,
|
||||
primary_key
|
||||
);
|
||||
|
||||
quote! {
|
||||
pub async fn #func_name_ident(&self, item_id: &str) -> Result<(), sqlx::Error> {
|
||||
sqlx::query(#query)
|
||||
.bind(item_id)
|
||||
.execute(&self.db.0)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fn generate_repository_file(model: &Model) -> Result<SourceNodeContainer> {
|
||||
let resource_name = model.name.clone();
|
||||
|
||||
let resource_module_ident = format_ident!("{}", &model.module_path.first().unwrap());
|
||||
|
||||
let resource_ident = format_ident!("{}", &resource_name);
|
||||
let repository_ident = format_ident!("{}Repository", resource_ident);
|
||||
|
||||
let get_all_method_code = gen_get_all_method(model);
|
||||
let get_by_id_method_code = gen_get_by_id_method(model);
|
||||
let get_many_by_id_method_code = gen_get_many_by_id_method(model);
|
||||
let insert_method_code = gen_insert_method(model);
|
||||
let insert_many_method_code = gen_insert_many_method(model);
|
||||
let update_by_id_method_code = gen_update_by_id_method(model);
|
||||
let delete_by_id_method_code = gen_delete_by_id_method(model);
|
||||
|
||||
// TODO: add import line
|
||||
|
||||
let base_repository_code: TokenStream = quote! {
|
||||
use crate::models::#resource_module_ident::#resource_ident;
|
||||
use crate::db::Database;
|
||||
|
||||
pub struct #repository_ident {
|
||||
db: Database
|
||||
}
|
||||
|
||||
impl #repository_ident {
|
||||
pub fn new(db: Database) -> Self {
|
||||
#repository_ident {
|
||||
db
|
||||
}
|
||||
}
|
||||
|
||||
#get_all_method_code
|
||||
|
||||
#get_by_id_method_code
|
||||
|
||||
#get_many_by_id_method_code
|
||||
|
||||
#insert_method_code
|
||||
|
||||
#insert_many_method_code
|
||||
|
||||
#update_by_id_method_code
|
||||
|
||||
#delete_by_id_method_code
|
||||
}
|
||||
};
|
||||
// convert TokenStream into rust code as string
|
||||
let parse_res: syn::Result<File> = syn::parse2(base_repository_code);
|
||||
let pretty = prettyplease::unparse(&parse_res?);
|
||||
|
||||
Ok(SourceNodeContainer {
|
||||
name: format!("{}_repository.rs", model.name.to_snake_case()),
|
||||
inner: SourceNode::File(pretty)
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
#[fully_pub]
|
||||
enum SourceNode {
|
||||
File(String),
|
||||
Directory(Vec<SourceNodeContainer>)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
#[fully_pub]
|
||||
struct SourceNodeContainer {
|
||||
name: String,
|
||||
inner: SourceNode
|
||||
}
|
||||
|
||||
/// Generate base repositories for all models
|
||||
pub fn generate_repositories_source_files(models: &[Model]) -> Result<SourceNodeContainer> {
|
||||
let mut nodes: Vec<SourceNodeContainer> = vec![];
|
||||
for model in models.iter() {
|
||||
let snc = generate_repository_file(model)?;
|
||||
nodes.push(snc)
|
||||
}
|
||||
|
||||
let mut mod_index_code: String = String::new();
|
||||
for node in &nodes {
|
||||
let module_name = node.name.replace(".rs", "");
|
||||
mod_index_code.push_str(&format!("pub mod {module_name};\n"));
|
||||
}
|
||||
nodes.push(SourceNodeContainer {
|
||||
name: "mod.rs".into(),
|
||||
inner: SourceNode::File(mod_index_code.to_string())
|
||||
});
|
||||
Ok(SourceNodeContainer {
|
||||
name: "".into(),
|
||||
inner: SourceNode::Directory(nodes)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -29,6 +29,8 @@ impl Field {
|
|||
/// Generate CREATE TABLE statement from parsed model
|
||||
pub fn generate_create_table_sql(models: &[Model]) -> Result<String> {
|
||||
let mut sql_code: String = "".into();
|
||||
sql_code.push_str("-- DO NOT EDIT THIS FILE.\n");
|
||||
sql_code.push_str("-- Generated by sqlxgentools from models files.\n");
|
||||
for model in models.iter() {
|
||||
let mut fields_sql: Vec<String> = vec![];
|
||||
for field in model.fields.iter() {
|
||||
20
lib/sqlxgentools_cli/src/generators/mod.rs
Normal file
20
lib/sqlxgentools_cli/src/generators/mod.rs
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
use fully_pub::fully_pub;
|
||||
use serde::Serialize;
|
||||
|
||||
pub mod migrations;
|
||||
pub mod repositories;
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
#[fully_pub]
|
||||
enum SourceNode {
|
||||
File(String),
|
||||
Directory(Vec<SourceNodeContainer>)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
#[fully_pub]
|
||||
struct SourceNodeContainer {
|
||||
name: String,
|
||||
inner: SourceNode
|
||||
}
|
||||
|
||||
385
lib/sqlxgentools_cli/src/generators/repositories/base.rs
Normal file
385
lib/sqlxgentools_cli/src/generators/repositories/base.rs
Normal file
|
|
@ -0,0 +1,385 @@
|
|||
use anyhow::Result;
|
||||
use proc_macro2::{TokenStream, Ident};
|
||||
use quote::{format_ident, quote};
|
||||
use syn::File;
|
||||
use heck::ToSnakeCase;
|
||||
|
||||
use crate::{generators::repositories::relations::gen_get_many_of_related_entity_method, models::{Field, FieldForeignMode, Model}};
|
||||
use crate::generators::{SourceNode, SourceNodeContainer};
|
||||
|
||||
|
||||
fn gen_get_all_method(model: &Model) -> TokenStream {
|
||||
let resource_ident = format_ident!("{}", &model.name);
|
||||
let select_query = format!("SELECT * FROM {}", model.table_name);
|
||||
|
||||
quote! {
|
||||
pub async fn get_all(&self) -> Result<Vec<#resource_ident>, sqlx::Error> {
|
||||
sqlx::query_as::<_, #resource_ident>(#select_query)
|
||||
.fetch_all(&self.db.0)
|
||||
.await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_get_by_field_method(model: &Model, query_field: &Field) -> TokenStream {
|
||||
let resource_ident = format_ident!("{}", &model.name);
|
||||
let select_query = format!("SELECT * FROM {} WHERE {} = $1", model.table_name, query_field.name);
|
||||
|
||||
let func_name_ident = format_ident!("get_by_{}", query_field.name);
|
||||
|
||||
quote! {
|
||||
// FIXME: Value is not necesssarly a string, it can be an int or a bool
|
||||
pub async fn #func_name_ident(&self, value: &str) -> Result<#resource_ident, sqlx::Error> {
|
||||
sqlx::query_as::<_, #resource_ident>(#select_query)
|
||||
.bind(value)
|
||||
.fetch_one(&self.db.0)
|
||||
.await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_get_many_by_field_method(model: &Model, query_field: &Field) -> TokenStream {
|
||||
let resource_ident = format_ident!("{}", &model.name);
|
||||
let select_query_tmpl = format!("SELECT * FROM {} WHERE {} IN ({{}})", model.table_name, query_field.name);
|
||||
|
||||
let func_name_ident = format_ident!("get_many_by_{}", query_field.name);
|
||||
|
||||
quote! {
|
||||
pub async fn #func_name_ident(&self, values: &[&str]) -> Result<Vec<#resource_ident>, sqlx::Error> {
|
||||
if values.is_empty() {
|
||||
return Ok(vec![])
|
||||
}
|
||||
let placeholder_params: String = (1..=(values.len()))
|
||||
.map(|i| format!("${}", i))
|
||||
.collect::<Vec<String>>()
|
||||
.join(",");
|
||||
let query_sql = format!(#select_query_tmpl, placeholder_params);
|
||||
let mut query = sqlx::query_as::<_, #resource_ident>(&query_sql);
|
||||
for value in values {
|
||||
query = query.bind(value)
|
||||
}
|
||||
query
|
||||
.fetch_all(&self.db.0)
|
||||
.await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_mutation_fields(model: &Model) -> (Vec<&Field>, Vec<&Field>) {
|
||||
let normal_field_names: Vec<&Field> = model.fields.iter()
|
||||
.filter(|f| match f.foreign_mode { FieldForeignMode::NotRef => true, FieldForeignMode::ForeignRef(_) => false })
|
||||
.collect();
|
||||
let foreign_keys_field_names: Vec<&Field> = model.fields.iter()
|
||||
.filter(|f| match f.foreign_mode { FieldForeignMode::NotRef => false, FieldForeignMode::ForeignRef(_) => true })
|
||||
.collect();
|
||||
(normal_field_names, foreign_keys_field_names)
|
||||
}
|
||||
|
||||
fn get_mutation_fields_ident(model: &Model) -> (Vec<&Field>, Vec<&Field>) {
|
||||
let normal_field_names: Vec<&Field> = model.fields.iter()
|
||||
.filter(|f| match f.foreign_mode { FieldForeignMode::NotRef => true, FieldForeignMode::ForeignRef(_) => false })
|
||||
.collect();
|
||||
let foreign_keys_field_names: Vec<&Field> = model.fields.iter()
|
||||
.filter(|f| match f.foreign_mode { FieldForeignMode::NotRef => false, FieldForeignMode::ForeignRef(_) => true })
|
||||
.collect();
|
||||
(normal_field_names, foreign_keys_field_names)
|
||||
}
|
||||
|
||||
fn gen_insert_method(model: &Model) -> TokenStream {
|
||||
let resource_ident = format_ident!("{}", &model.name);
|
||||
|
||||
let value_templates = (1..(model.fields.len()+1))
|
||||
.map(|i| format!("${}", i))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ");
|
||||
let (normal_fields, foreign_keys_fields) = get_mutation_fields(model);
|
||||
let (normal_field_idents, foreign_keys_field_idents) = (
|
||||
normal_fields.iter().map(|f| format_ident!("{}", &f.name)).collect::<Vec<Ident>>(),
|
||||
foreign_keys_fields.iter().map(|f| format_ident!("{}", &f.name)).collect::<Vec<Ident>>()
|
||||
);
|
||||
|
||||
let sql_columns = [normal_fields, foreign_keys_fields].concat()
|
||||
.iter()
|
||||
.map(|f| f.name.clone())
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ");
|
||||
let insert_query = format!(
|
||||
"INSERT INTO {} ({}) VALUES ({})",
|
||||
model.table_name,
|
||||
sql_columns,
|
||||
value_templates
|
||||
);
|
||||
// foreign keys must be inserted first, we sort the columns so that foreign keys are first
|
||||
|
||||
quote! {
|
||||
pub async fn insert(&self, entity: &#resource_ident) -> Result<(), sqlx::Error> {
|
||||
sqlx::query(#insert_query)
|
||||
#( .bind( &entity.#normal_field_idents ) )*
|
||||
#( .bind( &entity.#foreign_keys_field_idents.target_id) )*
|
||||
.execute(&self.db.0)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_insert_many_method(model: &Model) -> TokenStream {
|
||||
let resource_ident = format_ident!("{}", &model.name);
|
||||
let sql_columns = model.fields.iter()
|
||||
.map(|f| f.name.clone())
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ");
|
||||
let base_insert_query = format!(
|
||||
"INSERT INTO {} ({}) VALUES {{}} ON CONFLICT DO NOTHING",
|
||||
model.table_name,
|
||||
sql_columns
|
||||
);
|
||||
let (normal_fields, foreign_keys_fields) = get_mutation_fields(model);
|
||||
let (normal_field_idents, foreign_keys_field_idents) = (
|
||||
normal_fields.iter().map(|f| format_ident!("{}", &f.name)).collect::<Vec<Ident>>(),
|
||||
foreign_keys_fields.iter().map(|f| format_ident!("{}", &f.name)).collect::<Vec<Ident>>()
|
||||
);
|
||||
let fields_count = model.fields.len();
|
||||
|
||||
quote! {
|
||||
pub async fn insert_many(&self, entities: &Vec<#resource_ident>) -> Result<(), sqlx::Error> {
|
||||
let values_templates: String = (1..(#fields_count*entities.len()+1))
|
||||
.collect::<Vec<usize>>()
|
||||
.chunks(#fields_count)
|
||||
.map(|c| c.to_vec())
|
||||
.map(|x| format!(
|
||||
"({})",
|
||||
x.iter()
|
||||
.map(|i| format!("${}", i))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ");
|
||||
let query_sql = format!(#base_insert_query, values_templates);
|
||||
|
||||
let mut query = sqlx::query(&query_sql);
|
||||
for entity in entities {
|
||||
query = query
|
||||
#( .bind( &entity.#normal_field_idents ) )*
|
||||
#( .bind( &entity.#foreign_keys_field_idents.target_id) )*;
|
||||
}
|
||||
query
|
||||
.execute(&self.db.0)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fn gen_update_by_id_method(model: &Model) -> TokenStream {
|
||||
let resource_ident = format_ident!("{}", &model.name);
|
||||
let primary_key = &model.fields.iter()
|
||||
.find(|f| f.is_primary)
|
||||
.expect("A model must have at least one primary key")
|
||||
.name;
|
||||
let (normal_fields, foreign_keys_fields) = get_mutation_fields(model);
|
||||
let (normal_field_idents, foreign_keys_field_idents) = (
|
||||
normal_fields.iter().map(|f| format_ident!("{}", &f.name)).collect::<Vec<Ident>>(),
|
||||
foreign_keys_fields.iter().map(|f| format_ident!("{}", &f.name)).collect::<Vec<Ident>>()
|
||||
);
|
||||
let sql_columns = [normal_fields, foreign_keys_fields].concat()
|
||||
.iter()
|
||||
.map(|f| f.name.clone())
|
||||
.collect::<Vec<String>>();
|
||||
let set_statements = sql_columns.iter()
|
||||
.enumerate()
|
||||
.map(|(i, column_name)| format!("{} = ${}", column_name, i+2))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ");
|
||||
let update_query = format!(
|
||||
"UPDATE {} SET {} WHERE {} = $1",
|
||||
model.table_name,
|
||||
set_statements,
|
||||
primary_key
|
||||
);
|
||||
let func_name_ident = format_ident!("update_by_{}", primary_key);
|
||||
|
||||
quote! {
|
||||
pub async fn #func_name_ident(&self, item_id: &str, entity: &#resource_ident) -> Result<(), sqlx::Error> {
|
||||
sqlx::query(#update_query)
|
||||
.bind(item_id)
|
||||
#( .bind( &entity.#normal_field_idents ) )*
|
||||
#( .bind( &entity.#foreign_keys_field_idents.target_id) )*
|
||||
.execute(&self.db.0)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_delete_by_id_method(model: &Model) -> TokenStream {
|
||||
let primary_key = &model.fields.iter()
|
||||
.find(|f| f.is_primary)
|
||||
.expect("A model must have at least one primary key")
|
||||
.name;
|
||||
|
||||
let func_name_ident = format_ident!("delete_by_{}", primary_key);
|
||||
let query = format!(
|
||||
"DELETE FROM {} WHERE {} = $1",
|
||||
model.table_name,
|
||||
primary_key
|
||||
);
|
||||
|
||||
quote! {
|
||||
pub async fn #func_name_ident(&self, item_id: &str) -> Result<(), sqlx::Error> {
|
||||
sqlx::query(#query)
|
||||
.bind(item_id)
|
||||
.execute(&self.db.0)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_delete_many_by_id_method(model: &Model) -> TokenStream {
|
||||
let primary_key = &model.fields.iter()
|
||||
.find(|f| f.is_primary)
|
||||
.expect("A model must have at least one primary key")
|
||||
.name;
|
||||
|
||||
let func_name_ident = format_ident!("delete_many_by_{}", primary_key);
|
||||
let delete_query_tmpl = format!(
|
||||
"DELETE FROM {} WHERE {} IN ({{}})",
|
||||
model.table_name,
|
||||
primary_key
|
||||
);
|
||||
|
||||
quote! {
|
||||
pub async fn #func_name_ident(&self, ids: &[&str]) -> Result<(), sqlx::Error> {
|
||||
if ids.is_empty() {
|
||||
return Ok(())
|
||||
}
|
||||
let placeholder_params: String = (1..=(ids.len()))
|
||||
.map(|i| format!("${}", i))
|
||||
.collect::<Vec<String>>()
|
||||
.join(",");
|
||||
let query_sql = format!(#delete_query_tmpl, placeholder_params);
|
||||
let mut query = sqlx::query(&query_sql);
|
||||
for item_id in ids {
|
||||
query = query.bind(item_id)
|
||||
}
|
||||
query
|
||||
.execute(&self.db.0)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn generate_repository_file(all_models: &[Model], model: &Model) -> Result<SourceNodeContainer> {
|
||||
let resource_name = model.name.clone();
|
||||
|
||||
let resource_module_ident = format_ident!("{}", &model.module_path.first().unwrap());
|
||||
|
||||
let resource_ident = format_ident!("{}", &resource_name);
|
||||
let repository_ident = format_ident!("{}Repository", resource_ident);
|
||||
|
||||
let get_all_method_code = gen_get_all_method(model);
|
||||
let get_by_id_method_code = gen_get_by_field_method(
|
||||
model,
|
||||
model.fields.iter()
|
||||
.find(|f| f.is_primary == true)
|
||||
.expect("Expected at least one primary key on the model.")
|
||||
);
|
||||
let get_many_by_id_method_code = gen_get_many_by_field_method(
|
||||
model,
|
||||
model.fields.iter()
|
||||
.find(|f| f.is_primary == true)
|
||||
.expect("Expected at least one primary key on the model.")
|
||||
);
|
||||
let insert_method_code = gen_insert_method(model);
|
||||
let insert_many_method_code = gen_insert_many_method(model);
|
||||
let update_by_id_method_code = gen_update_by_id_method(model);
|
||||
let delete_by_id_method_code = gen_delete_by_id_method(model);
|
||||
let delete_many_by_id_method_code = gen_delete_many_by_id_method(model);
|
||||
|
||||
|
||||
let query_by_field_methods: Vec<TokenStream> =
|
||||
model.fields.iter()
|
||||
.filter(|f| f.is_query_entrypoint)
|
||||
.map(|field|
|
||||
gen_get_by_field_method(
|
||||
model,
|
||||
&field
|
||||
)
|
||||
)
|
||||
.collect();
|
||||
let query_many_by_field_methods: Vec<TokenStream> =
|
||||
model.fields.iter()
|
||||
.filter(|f| f.is_query_entrypoint)
|
||||
.map(|field|
|
||||
gen_get_many_by_field_method(
|
||||
model,
|
||||
&field
|
||||
)
|
||||
)
|
||||
.collect();
|
||||
|
||||
let fields_with_foreign_refs: Vec<&Field> = model.fields.iter().filter(|f|
|
||||
match f.foreign_mode { FieldForeignMode::ForeignRef(_) => true, FieldForeignMode::NotRef => false }
|
||||
).collect();
|
||||
let related_entity_methods_codes: Vec<TokenStream> = fields_with_foreign_refs.iter().map(|field|
|
||||
gen_get_many_of_related_entity_method(model, &field)
|
||||
).collect();
|
||||
|
||||
// TODO: add import line
|
||||
let base_repository_code: TokenStream = quote! {
|
||||
use crate::models::#resource_module_ident::#resource_ident;
|
||||
use crate::db::Database;
|
||||
|
||||
pub struct #repository_ident {
|
||||
db: Database
|
||||
}
|
||||
|
||||
impl #repository_ident {
|
||||
pub fn new(db: Database) -> Self {
|
||||
#repository_ident {
|
||||
db
|
||||
}
|
||||
}
|
||||
|
||||
#get_all_method_code
|
||||
|
||||
#get_by_id_method_code
|
||||
|
||||
#get_many_by_id_method_code
|
||||
|
||||
#insert_method_code
|
||||
|
||||
#insert_many_method_code
|
||||
|
||||
#update_by_id_method_code
|
||||
|
||||
#delete_by_id_method_code
|
||||
|
||||
#delete_many_by_id_method_code
|
||||
|
||||
#(#query_by_field_methods)*
|
||||
|
||||
#(#query_many_by_field_methods)*
|
||||
|
||||
#(#related_entity_methods_codes)*
|
||||
}
|
||||
};
|
||||
// convert TokenStream into rust code as string
|
||||
let parse_res: syn::Result<File> = syn::parse2(base_repository_code);
|
||||
let pretty = prettyplease::unparse(&parse_res?);
|
||||
|
||||
Ok(SourceNodeContainer {
|
||||
name: format!("{}_repository.rs", model.name.to_snake_case()),
|
||||
inner: SourceNode::File(pretty)
|
||||
})
|
||||
}
|
||||
31
lib/sqlxgentools_cli/src/generators/repositories/mod.rs
Normal file
31
lib/sqlxgentools_cli/src/generators/repositories/mod.rs
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
pub mod base;
|
||||
pub mod relations;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::generators::{SourceNode, SourceNodeContainer};
|
||||
use crate::models::Model;
|
||||
|
||||
/// Generate base repositories for all models
|
||||
pub fn generate_repositories_source_files(models: &[Model]) -> Result<SourceNodeContainer> {
|
||||
let mut nodes: Vec<SourceNodeContainer> = vec![];
|
||||
for model in models.iter() {
|
||||
nodes.push(base::generate_repository_file(models, model)?);
|
||||
// nodes.push(relations::generate_repository_file(model)?);
|
||||
}
|
||||
|
||||
let mut mod_index_code: String = String::new();
|
||||
for node in &nodes {
|
||||
let module_name = node.name.replace(".rs", "");
|
||||
mod_index_code.push_str(&format!("pub mod {module_name};\n"));
|
||||
}
|
||||
nodes.push(SourceNodeContainer {
|
||||
name: "mod.rs".into(),
|
||||
inner: SourceNode::File(mod_index_code.to_string())
|
||||
});
|
||||
Ok(SourceNodeContainer {
|
||||
name: "".into(),
|
||||
inner: SourceNode::Directory(nodes)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
use proc_macro2::TokenStream;
|
||||
use quote::{format_ident, quote};
|
||||
|
||||
use crate::models::{Field, FieldForeignMode, Model};
|
||||
|
||||
/// method that can be used to retreive a list of entities of type X that are the children of a parent type Y
|
||||
/// ex: get all comments of a post
|
||||
pub fn gen_get_many_of_related_entity_method(model: &Model, foreign_key_field: &Field) -> TokenStream {
|
||||
let resource_ident = format_ident!("{}", &model.name);
|
||||
|
||||
let foreign_ref_params = match &foreign_key_field.foreign_mode {
|
||||
FieldForeignMode::ForeignRef(params) => params,
|
||||
FieldForeignMode::NotRef => {
|
||||
panic!("Expected foreign key");
|
||||
}
|
||||
};
|
||||
|
||||
let select_query = format!("SELECT * FROM {} WHERE {} = $1", model.table_name, foreign_key_field.name);
|
||||
|
||||
let func_name_ident = format_ident!("get_many_of_{}", foreign_ref_params.target_resource_name);
|
||||
|
||||
quote! {
|
||||
pub async fn #func_name_ident(&self, item_id: &str) -> Result<Vec<#resource_ident>, sqlx::Error> {
|
||||
sqlx::query_as::<_, #resource_ident>(#select_query)
|
||||
.bind(item_id)
|
||||
.fetch_all(&self.db.0)
|
||||
.await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -3,13 +3,15 @@ use attribute_derive::FromAttr;
|
|||
|
||||
use argh::FromArgs;
|
||||
use anyhow::{Result, anyhow};
|
||||
use gen_migrations::generate_create_table_sql;
|
||||
use gen_repositories::{generate_repositories_source_files, SourceNodeContainer};
|
||||
|
||||
use crate::generators::{SourceNode, SourceNodeContainer};
|
||||
|
||||
// use gen_migrations::generate_create_table_sql;
|
||||
// use gen_repositories::{generate_repositories_source_files, SourceNodeContainer};
|
||||
|
||||
pub mod models;
|
||||
pub mod parse_models;
|
||||
pub mod gen_migrations;
|
||||
pub mod gen_repositories;
|
||||
pub mod generators;
|
||||
|
||||
#[derive(FromAttr, PartialEq, Debug, Default)]
|
||||
#[attribute(ident = sql_generator_model)]
|
||||
|
|
@ -21,7 +23,12 @@ pub struct SqlGeneratorModelAttr {
|
|||
#[attribute(ident = sql_generator_field)]
|
||||
pub struct SqlGeneratorFieldAttr {
|
||||
is_primary: Option<bool>,
|
||||
is_unique: Option<bool>
|
||||
is_unique: Option<bool>,
|
||||
reverse_relation_name: Option<String>,
|
||||
|
||||
/// to indicate that this field will be used to obtains entities
|
||||
/// our framework will generate methods for all fields that is an entrypoint
|
||||
is_query_entrypoint: Option<bool>
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -68,11 +75,11 @@ struct GeneratorArgs {
|
|||
fn write_source_code(base_path: &Path, snc: SourceNodeContainer) -> Result<()> {
|
||||
let path = base_path.join(snc.name);
|
||||
match snc.inner {
|
||||
gen_repositories::SourceNode::File(code) => {
|
||||
SourceNode::File(code) => {
|
||||
println!("writing file {:?}", path);
|
||||
std::fs::write(path, code)?;
|
||||
},
|
||||
gen_repositories::SourceNode::Directory(dir) => {
|
||||
SourceNode::Directory(dir) => {
|
||||
for node in dir {
|
||||
write_source_code(&path, node)?;
|
||||
}
|
||||
|
|
@ -126,13 +133,13 @@ pub fn main() -> Result<()> {
|
|||
if !repositories_mod_path.exists() {
|
||||
return Err(anyhow!("Could not resolve repositories modules."));
|
||||
}
|
||||
let snc = generate_repositories_source_files(&models)?;
|
||||
let snc = generators::repositories::generate_repositories_source_files(&models)?;
|
||||
dbg!(&snc);
|
||||
write_source_code(&repositories_mod_path, snc)?;
|
||||
},
|
||||
GeneratorArgsSubCommands::GenerateMigration(opts) => {
|
||||
eprintln!("Generating migrations…");
|
||||
let sql_code = generate_create_table_sql(&models)?;
|
||||
let sql_code = generators::migrations::generate_create_table_sql(&models)?;
|
||||
if let Some(out_location) = opts.output {
|
||||
let output_path = Path::new(&out_location);
|
||||
let write_res = std::fs::write(output_path, sql_code);
|
||||
|
|
|
|||
|
|
@ -10,12 +10,54 @@ struct Model {
|
|||
fields: Vec<Field>
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
impl Model {
|
||||
// pub fn concrete_fields(&self) -> Vec<Field> {
|
||||
// self.fields.iter().map(|f| {
|
||||
// if f.is_foreign_ref {
|
||||
// Field {
|
||||
// name: f.name.clone(),
|
||||
// rust_type: "String".into(),
|
||||
// is_nullable: f.is_nullable.clone(),
|
||||
// is_unique: f.is_unique.clone(),
|
||||
// is_primary: false,
|
||||
// is_foreign_ref: false
|
||||
// }
|
||||
// } else {
|
||||
// f.clone()
|
||||
// }
|
||||
// }).collect()
|
||||
// }
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[fully_pub]
|
||||
struct ForeignRefParams {
|
||||
/// eg. "tokens"
|
||||
reverse_relation_name: String,
|
||||
/// eg. "user"
|
||||
target_resource_name: String,
|
||||
// /// eg. "users"
|
||||
// target_resource_name_plural: String
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[fully_pub]
|
||||
enum FieldForeignMode {
|
||||
ForeignRef(ForeignRefParams),
|
||||
NotRef
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[fully_pub]
|
||||
struct Field {
|
||||
name: String,
|
||||
rust_type: String,
|
||||
is_nullable: bool,
|
||||
is_unique: bool,
|
||||
is_primary: bool
|
||||
is_primary: bool,
|
||||
is_query_entrypoint: bool,
|
||||
foreign_mode: FieldForeignMode
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -3,13 +3,13 @@ use attribute_derive::FromAttr;
|
|||
|
||||
use anyhow::{Result, anyhow};
|
||||
use convert_case::{Case, Casing};
|
||||
use syn::Type;
|
||||
use syn::{GenericArgument, PathArguments, Type};
|
||||
|
||||
use crate::{models::{Field, Model}, SqlGeneratorFieldAttr, SqlGeneratorModelAttr};
|
||||
use crate::{SqlGeneratorFieldAttr, SqlGeneratorModelAttr, models::{Field, FieldForeignMode, ForeignRefParams, Model}};
|
||||
|
||||
fn extract_generic_type(base_segments: Vec<String>, ty: &syn::Type) -> Option<&syn::Type> {
|
||||
fn extract_generic_type(base_segments: Vec<String>, ty: &Type) -> Option<&Type> {
|
||||
// If it is not `TypePath`, it is not possible to be `Option<T>`, return `None`
|
||||
if let syn::Type::Path(syn::TypePath { qself: None, path }) = ty {
|
||||
if let Type::Path(syn::TypePath { qself: None, path }) = ty {
|
||||
// We have limited the 5 ways to write `Option`, and we can see that after `Option`,
|
||||
// there will be no `PathSegment` of the same level
|
||||
// Therefore, we only need to take out the highest level `PathSegment` and splice it into a string
|
||||
|
|
@ -41,7 +41,7 @@ fn extract_generic_type(base_segments: Vec<String>, ty: &syn::Type) -> Option<&s
|
|||
// If it is not a type, it is not possible to be `Option<T>`, return `None`
|
||||
// But this situation may not occur
|
||||
.and_then(|generic_arg| match generic_arg {
|
||||
syn::GenericArgument::Type(ty) => Some(ty),
|
||||
GenericArgument::Type(ty) => Some(ty),
|
||||
_ => None,
|
||||
});
|
||||
// Return `T` in `Option<T>`
|
||||
|
|
@ -52,7 +52,7 @@ fn extract_generic_type(base_segments: Vec<String>, ty: &syn::Type) -> Option<&s
|
|||
|
||||
fn get_type_first_ident(inp: &Type) -> Option<String> {
|
||||
match inp {
|
||||
syn::Type::Path(field_type_path) => {
|
||||
Type::Path(field_type_path) => {
|
||||
Some(field_type_path.path.segments.get(0).unwrap().ident.to_string())
|
||||
},
|
||||
_ => {
|
||||
|
|
@ -61,6 +61,31 @@ fn get_type_first_ident(inp: &Type) -> Option<String> {
|
|||
}
|
||||
}
|
||||
|
||||
fn get_first_generic_arg_type_ident(inp: &Type) -> Option<String> {
|
||||
if let Type::Path(field_type_path) = inp {
|
||||
if let PathArguments::AngleBracketed(args) = &field_type_path.path.segments.get(0).unwrap().arguments {
|
||||
if args.args.is_empty() {
|
||||
None
|
||||
} else {
|
||||
if let GenericArgument::Type(arg_type) = args.args.get(0).unwrap() {
|
||||
if let Type::Path(arg_type_path) = arg_type {
|
||||
Some(arg_type_path.path.segments.get(0).unwrap().ident.to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fn parse_model_attribute(item: &syn::ItemStruct) -> Result<Option<SqlGeneratorModelAttr>> {
|
||||
for attr in item.attrs.iter() {
|
||||
let attr_ident = match attr.path().get_ident() {
|
||||
|
|
@ -102,7 +127,7 @@ fn parse_field_attribute(field: &syn::Field) -> Result<Option<SqlGeneratorFieldA
|
|||
return Ok(Some(v));
|
||||
},
|
||||
Err(err) => {
|
||||
return Err(anyhow!("Failed to parse sql_generator_field attribute macro: {}", err));
|
||||
return Err(anyhow!("Failed to parse sql_generator_field attribute macro on field {:?}, {}", field, err));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
@ -140,14 +165,16 @@ pub fn parse_models(source_code_path: &Path) -> Result<Vec<Model>> {
|
|||
for field in itemval.fields.iter() {
|
||||
let field_name = field.ident.clone().unwrap().to_string();
|
||||
let field_type = field.ty.clone();
|
||||
// println!("field {}", field_name);
|
||||
println!("field {} {:?}", field_name, field_type);
|
||||
|
||||
let mut output_field = Field {
|
||||
name: field_name,
|
||||
rust_type: "Unknown".into(),
|
||||
is_nullable: false,
|
||||
is_primary: false,
|
||||
is_unique: false
|
||||
is_unique: false,
|
||||
is_query_entrypoint: false,
|
||||
foreign_mode: FieldForeignMode::NotRef
|
||||
};
|
||||
|
||||
let first_type: String = match get_type_first_ident(&field_type) {
|
||||
|
|
@ -194,10 +221,43 @@ pub fn parse_models(source_code_path: &Path) -> Result<Vec<Model>> {
|
|||
}
|
||||
output_field.rust_type = final_type;
|
||||
|
||||
|
||||
let field_attrs_opt = parse_field_attribute(field)?;
|
||||
if first_type == "ForeignRef" {
|
||||
let attrs = match &field_attrs_opt {
|
||||
Some(attrs) => attrs,
|
||||
None => {
|
||||
return Err(anyhow!("Found a ForeignRef type but did not found attributes."))
|
||||
}
|
||||
};
|
||||
let rrn = match &attrs.reverse_relation_name {
|
||||
Some(rrn) => rrn.clone(),
|
||||
None => {
|
||||
return Err(anyhow!("Found a ForeignRef type but did not found reverse_relation_name attribute."))
|
||||
}
|
||||
};
|
||||
|
||||
let extract_res = extract_generic_type(vec!["ForeignRef".into()], &field_type)
|
||||
.and_then(|t| get_type_first_ident(t));
|
||||
let target_type_name = match extract_res {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
return Err(anyhow!("Could not extract inner type from ForeignRef."));
|
||||
}
|
||||
};
|
||||
output_field.foreign_mode = FieldForeignMode::ForeignRef(
|
||||
ForeignRefParams {
|
||||
reverse_relation_name: rrn,
|
||||
target_resource_name: target_type_name.to_case(Case::Snake)
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// parse attribute
|
||||
if let Some(field_attr) = parse_field_attribute(field)? {
|
||||
if let Some(field_attr) = field_attrs_opt {
|
||||
output_field.is_primary = field_attr.is_primary.unwrap_or_default();
|
||||
output_field.is_unique = field_attr.is_unique.unwrap_or_default();
|
||||
output_field.is_query_entrypoint = field_attr.is_query_entrypoint.unwrap_or_default();
|
||||
}
|
||||
|
||||
fields.push(output_field);
|
||||
|
|
@ -217,10 +277,11 @@ pub fn parse_models(source_code_path: &Path) -> Result<Vec<Model>> {
|
|||
}
|
||||
|
||||
/// Scan for models struct in a rust file and return a struct representing the model
|
||||
pub fn parse_models_from_module(module_path: &Path) -> Result<Vec<Model>> {
|
||||
fn parse_models_from_module_inner(module_path: &Path) -> Result<Vec<Model>> {
|
||||
let mut models: Vec<Model> = vec![];
|
||||
|
||||
if module_path.is_file() {
|
||||
println!("Parsing models from path {:?}.", module_path);
|
||||
models.extend(parse_models(module_path)?);
|
||||
return Ok(models);
|
||||
}
|
||||
|
|
@ -234,3 +295,29 @@ pub fn parse_models_from_module(module_path: &Path) -> Result<Vec<Model>> {
|
|||
|
||||
Ok(models)
|
||||
}
|
||||
|
||||
// fn complete_models(original_models: Vec<Model>) -> Result<Vec<Model>> {
|
||||
// let mut new_models: Vec<Model> = vec![];
|
||||
// for model in original_models {
|
||||
// for original_field in model.fields {
|
||||
// let mut field = original_field
|
||||
// match original_field.foreign_mode {
|
||||
// FieldForeignMode::NotRef => {},
|
||||
// FieldForeignMode::ForeignRef(ref_params) => {
|
||||
|
||||
// }
|
||||
// }
|
||||
|
||||
// }
|
||||
// }
|
||||
// Ok(new_models)
|
||||
// }
|
||||
|
||||
/// Scan for models struct in a rust file and return a struct representing the model
|
||||
pub fn parse_models_from_module(module_path: &Path) -> Result<Vec<Model>> {
|
||||
let models = parse_models_from_module_inner(module_path)?;
|
||||
|
||||
// let models = complete_models(models)?;
|
||||
|
||||
Ok(models)
|
||||
}
|
||||
|
|
|
|||
20
lib/sqlxgentools_misc/Cargo.toml
Normal file
20
lib/sqlxgentools_misc/Cargo.toml
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
[package]
|
||||
name = "sqlxgentools_misc"
|
||||
description = "Various misc class to use in applications that use sqlxgentools"
|
||||
publish = true
|
||||
edition.workspace = true
|
||||
authors.workspace = true
|
||||
version.workspace = true
|
||||
license.workspace = true
|
||||
repository.workspace = true
|
||||
|
||||
[dependencies]
|
||||
sqlx-core = { version = "=0.8.6" }
|
||||
sqlx-sqlite = { version = "=0.8.6", features = ["offline"] }
|
||||
fully_pub = "0.1"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
[lib]
|
||||
|
||||
[lints.clippy]
|
||||
uninlined_format_args = "allow"
|
||||
92
lib/sqlxgentools_misc/src/lib.rs
Normal file
92
lib/sqlxgentools_misc/src/lib.rs
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
use std::error::Error;
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use fully_pub::fully_pub;
|
||||
|
||||
use serde::{Serialize, Serializer};
|
||||
use sqlx_core::any::{Any, AnyArgumentBuffer};
|
||||
use sqlx_core::database::Database;
|
||||
use sqlx_core::decode::Decode;
|
||||
use sqlx_core::encode::{Encode, IsNull};
|
||||
use sqlx_core::error::BoxDynError;
|
||||
use sqlx_core::types::Type;
|
||||
use sqlx_sqlite::{Sqlite, SqliteArgumentValue};
|
||||
|
||||
|
||||
#[fully_pub]
|
||||
trait DatabaseLine {
|
||||
fn id(&self) -> String;
|
||||
}
|
||||
|
||||
|
||||
/// Wrapper to mark a model field as foreign
|
||||
/// You can use a generic argument inside ForeignRef to point to the target model
|
||||
#[derive(Clone, Debug)]
|
||||
#[fully_pub]
|
||||
struct ForeignRef<T: Sized + DatabaseLine> {
|
||||
pub target_type: PhantomData<T>,
|
||||
pub target_id: String
|
||||
}
|
||||
|
||||
|
||||
// Implement serde Serialize for ForeignRef
|
||||
impl<T: Sized + DatabaseLine> Serialize for ForeignRef<T> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
// Serialize only the target_id as a string
|
||||
serializer.serialize_str(&self.target_id)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl<T: Sized + DatabaseLine> ForeignRef<T> {
|
||||
pub fn new(entity: &T) -> ForeignRef<T> {
|
||||
ForeignRef {
|
||||
target_type: PhantomData,
|
||||
target_id: entity.id()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl<'r, DB: Database, T: Sized + DatabaseLine> Decode<'r, DB> for ForeignRef<T>
|
||||
where
|
||||
// we want to delegate some of the work to string decoding so let's make sure strings
|
||||
// are supported by the database
|
||||
&'r str: Decode<'r, DB>
|
||||
{
|
||||
fn decode(
|
||||
value: <DB as Database>::ValueRef<'r>,
|
||||
) -> Result<ForeignRef<T>, Box<dyn Error + 'static + Send + Sync>> {
|
||||
let value = <&str as Decode<DB>>::decode(value)?;
|
||||
|
||||
let ref_val: String = value.parse()?;
|
||||
|
||||
Ok(ForeignRef::<T> {
|
||||
target_type: PhantomData,
|
||||
target_id: ref_val
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: DatabaseLine + Sized> Encode<'_, Any> for ForeignRef<T> {
|
||||
fn encode_by_ref(&self, buf: &mut AnyArgumentBuffer) -> Result<IsNull, BoxDynError> {
|
||||
<String as Encode<'_, Any>>::encode_by_ref(&self.target_id.to_string(), buf)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: DatabaseLine + Sized> Type<Sqlite> for ForeignRef<T> {
|
||||
fn type_info() -> <Sqlite as Database>::TypeInfo {
|
||||
<String as Type<Sqlite>>::type_info()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: DatabaseLine + Sized> Encode<'_, Sqlite> for ForeignRef<T> {
|
||||
fn encode_by_ref(&self, args: &mut Vec<SqliteArgumentValue<'_>>) -> Result<IsNull, BoxDynError> {
|
||||
args.push(SqliteArgumentValue::Text(self.target_id.clone().into()));
|
||||
Ok(IsNull::No)
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
Add table
Add a link
Reference in a new issue