This commit is contained in:
Matthieu Bessat 2023-01-23 10:07:56 +01:00
parent 8a001fbb14
commit 34bf3275d3
26 changed files with 2307 additions and 998 deletions

1114
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -8,12 +8,28 @@ edition = "2021"
[dependencies] [dependencies]
actix-web = "^4.2" actix-web = "^4.2"
sea-orm = { version = "^0", features = [ "sqlx-sqlite", "runtime-actix-native-tls", "macros" ] } #sea-orm = { version = "^0", features = [ "sqlx-sqlite", "runtime-actix-native-tls", "macros" ] }
rust-argon2 = "1.0" # password hashing
jsonwebtoken = "8"
serde = { version = "^1", features = [ "derive" ] } serde = { version = "^1", features = [ "derive" ] }
toml = { version = "^0.5" } toml = { version = "^0.5" }
serde_yaml = "^0.9"
chrono = { version = "^0.4", features = [ "unstable-locales", "clock" ] } chrono = { version = "^0.4", features = [ "unstable-locales", "clock" ] }
git2 = { version = "0.15" } git2 = { version = "0.15" }
log = "^0.4" log = "^0.4"
env_logger = "^0.9" env_logger = "^0.9"
futures = "0.3"
futures-util = "0.3"
rand = "0.8.5"
sha2 = "0.10.6"
slug = "0.1.4"
actix-rt = "2.2.0"
background-jobs = "0.14.0"
anyhow = "1.0"
crossbeam-channel = "0.5"

88
README.md Normal file
View file

@ -0,0 +1,88 @@
# Hugotator
Web Content Management System on top of Hugo and git for non-git and non-markdown folks.
The goal is to replicate the ease that someone have to use a classic blog CMS like Grav or Wordpress.
## Features
## Philosophie
- Avoid too much dependencies
## API
### Auth
Auth is done via JWT
POST /login
GET /me
GET /sites
GET /sites/{site_slug}
GET /sites/{site_slug}/postings/{posting_slug}
POST /sites/{site_slug}/postings
DELETE /sites/{site_slug}/postings
PUT /sites/{site_slug}/postings/{posting_slug}
## Technical details of auth
auth is done by generating bcrypt password hash and store them in a config file
[[users]]
name = "admin"
password = "$bcryptpass"
## Details of git
### Debounced auto-push
Criticity: Low
- we can afford network traffic (surtout si c'est notre propre serveur git en local)
- in this case it good to avoid to make too much push in one go.
- that would mean that the site would not be immidiatly online
### Debouncing commits
Criticity: Low
In order to track the changes made in the git repository, we need to commit each time an "update" is made to the content of our website.
We could, for a starter, commit at each change. This would work but it will probably create too many commits because one of the way the user will use the admin interface will be in rapid succession of actions: e.g, there is a mistake in a keyword, the writer will change the title, then click the save button and notice that he should also change a word in the description of the article. Another example is when the user want to save regularly his work to the server to be safe.
We could use `git commit --ammend` to commit a change in a file that was committed recently.
We could also have a debouncer system, on an article change, we set a timer to commit in 60 seconds for that site, but if there is another change, it will reset the timer.
This is tricky to implement because what if for some reason, we made a small change in an article and then after a big big change in quick succession. It probably makes no sense to bundle those changes together.
We could probably try to detect to separate a big change with a smart algorithm but that too much complexity.
An easy solution would be to not bundle commits at all for now, as it's probably not a critical feature.
## Merge resolution
Criticity: Medium
For now, we can assume that only a handful of people do change on a site repository, so we assume very little merge conflict.
Picture this scenario:
- User A edit the article A on the hugotator web platform
- User B edit the same article A but on her setup with git and vim.
- User A do stuff and commit, and pushes.
- User B do stuff and commit, but when pushing there is a conflict. It this case, the conflict is managed by the User B
But what if the User A pushes *after* User B. In this case, as the git interface is hidden from the Hugotator user, we need to provide some kind of merge resolution.
We can probably try an automatic git resolution, but if it fails (and it will even in simple case), the best next thing is to warn and provide a choice for the user (accept their or yours).
## Auto-deploy
It must be separate from hugotator, as hugotator is just meant to be a simple editor.
The deployment is downstream of the git repository and hugotator is upstream.
We can try something like github actions or gitlab jobs or similar with gitea/forgejo.
It's just a script that will `git pull` and execute the hugo build, as well as emptying the cache.

View file

@ -1,12 +1,21 @@
secret = "Uw3Nr4mrIjWlTW08Ps4Bj4TYoLJNUtPWNTrgQN7yltkaJdMN"
[server] [server]
host = "0.0.0.0" host = "0.0.0.0"
[[users]]
username = "john"
password = "$argon2i$v=19$m=4096,t=3,p=1$aXMgdGhpcyByZWFsbHkgcmFuZG9t$V9HA8SSXKd/dQPVKEl2mEB/zhvbRpqkjQm0djDwDr70"
[[sites]] [[sites]]
slug = "etoiledebethleem" slug = "etoiledebethleem"
name = "Étoile de Bethléem"
git_remote_url = "https://forge.lefuturiste.fr/etoiledebethleem/website.git" git_remote_url = "https://forge.lefuturiste.fr/etoiledebethleem/website.git"
content_path = "content" content_path = "content"
allowed_users = ["john"]
[[sites.content_bindings]] [[sites.content_bindings]]
name = "Actualités"
slug = "actualites" slug = "actualites"
posting_kind = "Article" posting_kind = "Article"

280
src/__web_api.rs Normal file
View file

@ -0,0 +1,280 @@
use actix_web::{get, post, web, App, HttpResponse, HttpServer, Responder, Result as WebResult};
use super::front_matter::scan_posting_detailled;
use serde::{Serialize, Deserialize};
use super::AppState;
use jsonwebtoken;
use chrono::{Duration, Utc};
#[derive(Debug, Serialize, Deserialize)]
struct TokenClaims {
sub: String,
exp: usize,
}
#[derive(Serialize)]
struct CurrentUserOutput {
username: String
}
#[get("/me")]
async fn get_current_user(app_state: web::Data<AppState>) -> impl Responder {
HttpResponse::Ok().json(CurrentUserOutput {
username: "hello".to_string()
})
}
#[derive(Deserialize, Debug)]
struct LoginInput {
username: String,
password: String
}
#[derive(Serialize)]
struct LoginOutput {
jwt: String
}
#[post("/login")]
async fn login(
app_state: web::Data<AppState>,
// TODO: customize Actix deserialization error to have json instead of text/plain
// content type
input: web::Json<LoginInput>
) -> impl Responder {
let user_opt = app_state
.users
.iter()
.find(|user| user.username == input.username);
const DUMMY_HASH: &str = "$argon2i$v=19$m=4096,t=3,p=1$aXMgdGhpcyByZWFsbHkgcmFuZG9t$V9HA8SSXKd/dQPVKEl2mEB/zhvbRpqkjQm0djDwDr70";
let password_hash = match user_opt {
Some(user) => &user.password,
None => DUMMY_HASH
};
let password_verified = match argon2::verify_encoded(
password_hash,
input.password.as_bytes()
) {
Ok(res) => res,
Err(e) => {
error!("argon2::verify_encoded failed {:?}", e);
return HttpResponse::InternalServerError()
.reason("cannot verify password")
.finish();
}
};
if user_opt.is_none() || !password_verified {
return HttpResponse::Unauthorized()
.finish();
}
let expiration_timestamp = Utc::now()
.checked_add_signed(Duration::seconds(5))
.expect("invalid timestamp")
.timestamp();
let token_claims = TokenClaims {
sub: "User".to_string(),
exp: expiration_timestamp as usize
};
let token = match jsonwebtoken::encode(
&jsonwebtoken::Header::default(),
&token_claims,
&jsonwebtoken::EncodingKey::from_secret(app_state.secret.as_ref())
) {
Ok(res) => res,
Err(err) => {
error!("Failed to create a JWT {:?}", err);
return HttpResponse::InternalServerError()
.reason("failed to create a token")
.finish()
}
};
// TODO: Handle token creation errors as 500
HttpResponse::Ok()
.json(LoginOutput {
jwt: token
})
}
#[derive(Serialize)]
struct HomeResponseBody {
version: String
}
#[get("/")]
async fn home(app_state: web::Data<AppState>) -> WebResult<impl Responder> {
Ok(web::Json(HomeResponseBody {
version: String::from(&app_state.version)
}))
}
#[derive(Serialize)]
struct SiteSummaryOutput {
slug: String,
name: String
}
#[get("/sites")]
async fn get_many_sites(app_state: web::Data<AppState>) -> WebResult<impl Responder> {
let res: Vec<SiteSummaryOutput> = app_state
.sites
.iter()
.map(|site| {
SiteSummaryOutput {
slug: site.slug.clone(),
name: site.name.clone(),
}
})
.collect();
Ok(web::Json(res))
}
#[derive(Serialize)]
struct SectionSummaryOutput {
slug: String,
name: String
}
#[derive(Serialize)]
struct SiteDetailledOutput {
slug: String,
name: String,
sections: Vec<SectionSummaryOutput>
}
#[get("/sites/{slug}")]
async fn get_one_site(path: web::Path<(String, )>, app_state: web::Data<AppState>) -> impl Responder {
let site = match app_state
.sites
.iter()
.find(|site| {
site.slug == path.0
}) {
Some(site) => site,
None => return HttpResponse::NotFound().json("not found")
};
HttpResponse::Ok().json(
SiteDetailledOutput {
slug: site.slug.clone(),
name: site.name.clone(),
sections: site.sections
.iter().map(|s| {
SectionSummaryOutput {
slug: s.slug.clone(),
name: s.name.clone()
}
}).collect()
}
)
}
#[derive(Serialize)]
struct PostSummaryOutput {
slug: String,
title: String,
created_at: String
}
#[get("/posts")]
async fn get_many_posts(app_state: web::Data<AppState>) -> WebResult<impl Responder> {
let res: Vec<PostSummaryOutput> = app_state
.sites.first().unwrap()
.sections.first().unwrap().postings
.iter()
.map(|post| {
PostSummaryOutput {
slug: post.slug.clone(),
title: post.title.clone(),
created_at: post.created_at.to_rfc3339_opts(chrono::SecondsFormat::Secs, true)
}
})
.collect();
Ok(web::Json(res))
}
#[derive(Serialize)]
struct PostDetailledOutput {
slug: String,
title: String,
created_at: String,
content: String
}
#[get("/posts/{id}")]
async fn get_one_post(path: web::Path<(String, )>, app_state: web::Data<AppState>) -> impl Responder {
let post = match app_state
.sites.first().unwrap()
.sections.first().unwrap().postings
.iter()
.find(|post| post.slug == path.0) {
Some(post) => post,
None => return HttpResponse::NotFound().json("not found")
};
// actually read the file on disk
let scan_out = match scan_posting_detailled(&post.path) {
Ok(res) => res,
Err(err) => {
error!("Could not scan posting details {:?}", err);
return HttpResponse::InternalServerError().json("Whoops")
}
};
let res = PostDetailledOutput {
slug: post.slug.clone(),
title: post.title.clone(),
created_at: post.created_at.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
content: scan_out.content
};
HttpResponse::Ok()
.json(res)
}
#[derive(Serialize)]
struct CreatePostInput {
slug: String,
title: String,
created_at: String,
content: String
}
#[derive(Serialize)]
struct CreatePostOutput {
id: String
}
// #[post("/posts/{id}")]
// async fn create_post(app_state: web::Data<AppState>, body: CreatePostInput) -> impl Responder {
// todo!();
// // check that the post slug does not already exists
// // create a new folder with the slug
// // create an index.md inside with the initial content
// }
#[post("/echo")]
async fn echo(req_body: String) -> impl Responder {
HttpResponse::Ok().body(req_body)
}
#[actix_web::main]
pub async fn actix_web_main(
bind_config: (String, u16),
app_data: AppState
) -> std::io::Result<()> {
info!("Starting a web server on {:?}", bind_config);
HttpServer::new(move || {
App::new()
.app_data(web::Data::new(app_data.clone()))
.service(home)
.service(echo)
.service(get_many_sites)
.service(get_one_site)
.service(get_many_posts)
.service(get_one_post)
.service(login)
.service(get_current_user)
// .service(get_sites)
})
.bind(bind_config)?
.run()
.await
}

1
src/actions/mod.rs Normal file
View file

@ -0,0 +1 @@
pub mod posts;

198
src/actions/posts.rs Normal file
View file

@ -0,0 +1,198 @@
use std::sync::Arc;
use crate::Store;
use crate::models::{Id, Post, SiteSection, PostingKind, Message};
use crate::utils::{pub_fields, generate_id, unwrap_or_return, unwrap_opt_or_return};
use chrono::{DateTime, Utc};
// use std::fs::{File, create_dir, remove_dir_all, rename};
use std::fs;
use std::io::Write;
use crate::front_matter::PostFrontMatterHeader;
use crate::git_helper::{GitSyncError, commit_changes};
use crate::repositories::posts as posts_repository;
#[derive(Debug)]
pub enum PostSyncError {
CannotSerialize,
BrokenIo
}
pub fn sync_to_disk(post: &Post, content: String) -> Result<(), PostSyncError> {
if !post.path.exists() {
fs::create_dir(&post.path).unwrap();
}
let index_path = post.path.join("index.md");
let mut file = unwrap_or_return!(
if !index_path.exists() {
fs::File::create(index_path)
} else {
fs::File::options()
.write(true)
.truncate(true)
.open(index_path)
},
|_| PostSyncError::BrokenIo
);
let header = PostFrontMatterHeader {
id: Some(post.id.clone().to_string()),
title: post.title.clone(),
date: post.created_at.format("%Y-%m-%d %H:%M").to_string()
};
let yaml_header = unwrap_or_return!(
serde_yaml::to_string(&header),
|_| PostSyncError::CannotSerialize
);
let markdown_content: String = format!("---\n{yaml_header}---\n\n{0}\n", content);
unwrap_or_return!(
file.write_all(markdown_content.as_bytes()),
|e| {
error!("Cannot write file: {:?}", e);
PostSyncError::BrokenIo
}
);
Ok(())
}
pub enum PostCreateError {
SlugNotUnique,
SyncError(PostSyncError)
}
pub fn create(
store: &Arc<Store>,
section: &SiteSection,
title: String,
content: String,
created_at: DateTime<Utc>
) -> Result<Id, PostCreateError> {
let mut posts = store.posts.write().unwrap();
let slug = slug::slugify(&title);
if posts
.iter()
.any(|post_cand| post_cand.slug == slug) {
return Err(PostCreateError::SlugNotUnique)
}
// check that the post slug does not already exists
// create a new folder with the slug
// create an index.md inside with the initial content
let id = generate_id();
let post = Post {
id: id.clone(),
path: section.path.join(&slug),
section_id: section.id.clone(),
title,
kind: PostingKind::Article,
slug,
created_at
};
// sync to disk
unwrap_or_return!(
sync_to_disk(&post, content),
|x| PostCreateError::SyncError(x)
);
// send event to side channel
posts.push(post);
store.channel_sender.send(Message::NewPost { post_id: id.clone() });
Ok(id)
}
pub enum PostUpdateError {
SlugNotUnique,
SyncError(PostSyncError),
CannotMoveDirectory,
PositionNotFound,
GitSyncError(GitSyncError),
CannotGetAssociatedSite
}
pub fn update(
store: &Arc<Store>,
post: &Post,
title: String,
content: String,
created_at: DateTime<Utc>
) -> Result<(), PostUpdateError> {
let mut posts = store.posts.write().unwrap();
let post_position = match posts
.iter()
.position(|cand| cand.id == post.id) {
Some(pos) => pos,
None => return Err(PostUpdateError::PositionNotFound)
};
let mut new_post = post.clone();
let new_slug = slug::slugify(&title);
if new_slug != post.slug {
if posts
.iter()
.any(|post_cand| post_cand.slug == new_slug && post_cand.id != post.id) {
return Err(PostUpdateError::SlugNotUnique)
}
new_post.slug = new_slug;
new_post.path = post.path.parent().unwrap().join(&new_post.slug);
// move the old folder
unwrap_or_return!(
fs::rename(&post.path, &new_post.path),
|_| PostUpdateError::CannotMoveDirectory
)
}
new_post.created_at = created_at;
new_post.title= title;
// sync to disk
unwrap_or_return!(
sync_to_disk(&new_post, content),
|x| PostUpdateError::SyncError(x)
);
// let site = unwrap_opt_or_return!(
// posts_repository::get_post_site(store, post),
// PostUpdateError::CannotGetAssociatedSite
// );
// unwrap_or_return!(
// commit_changes(&site),
// |x| PostUpdateError::GitSyncError(x)
// );
// for now replace and push
posts.push(new_post);
posts.remove(post_position);
store.channel_sender.send(Message::UpdatedPost { post_id: post.id.clone() });
Ok(())
}
pub enum PostRemoveError {
PositionNotFound,
CannotSyncFiles
}
pub fn remove(store: &Arc<Store>, post: &Post) -> Result<(), PostRemoveError> {
let mut posts = store.posts.write().unwrap();
let post_position = match posts
.iter()
.position(|cand| cand.id == post.id) {
Some(pos) => pos,
None => return Err(PostRemoveError::PositionNotFound)
};
unwrap_or_return!(fs::remove_dir_all(&post.path), |_| PostRemoveError::CannotSyncFiles);
posts.swap_remove(post_position);
// TODO: sync the repository in an async task
Ok(())
}

84
src/background_jobs.rs Normal file
View file

@ -0,0 +1,84 @@
use std::fs;
use std::process::ExitCode;
use crate::models::{Config, Site, SiteSection, User, Post, Id};
use std::collections::hash_map::HashMap;
use crossbeam_channel::{unbounded, Sender, Receiver, RecvTimeoutError};
use crate::models::Message;
use std::thread;
use crate::Store;
use std::sync::{Arc, Mutex};
use crate::utils::{unwrap_or_return, unwrap_opt_or_return};
use crate::repositories::posts as posts_repository;
use crate::repositories::sites as sites_repository;
use crate::git_helper;
use std::time::Duration;
fn run_debouncer(store: Arc<Store>, site_id: Id, new_channel_receiver: Receiver<()>) {
loop {
match new_channel_receiver.recv_timeout(Duration::from_secs(30)) {
Ok(_) => {
println!("New signal received in the debouncer, continuing the loop");
continue;
},
Err(RecvTimeoutError::Timeout) => {
println!("> Debouncer, I will actually do the thing {:?}", site_id);
let site = sites_repository::get_by_id(&store, site_id).expect("To be able to get the site from an ID");
git_helper::commit_changes(&site).unwrap();
return;
},
Err(_) => panic!()
}
}
}
pub fn run_background_processor(store: Arc<Store>) {
// to prevent Broken Pipes we use a Mutex
let mut threads: Arc<Mutex<HashMap<Id, Sender<()>>>> = Arc::new(Mutex::new(HashMap::new()));
loop {
let msg: Message = store.channel_receiver.recv().unwrap();
match msg {
Message::DummyMessage { x, y } => {
println!("Received dummy message {} {}", x, y);
},
Message::UpdatedPost { post_id } => {
handle_updated_post(
store.clone(),
threads.clone(),
post_id
);
},
_ => {
println!("Received message");
}
}
}
}
fn handle_updated_post(store: Arc<Store>, threads_ref: Arc<Mutex<HashMap<Id, Sender<()>>>>, post_id: Id) {
println!("received UpdatedPost message");
let post = posts_repository::get_by_id(&store, post_id).unwrap();
let site_id = posts_repository::get_post_site_id(&store, &post).unwrap();
let mut local_threads = threads_ref.lock().unwrap();
match local_threads.get(&site_id) {
Some(sender) => {
println!("Sending signal to existing debouncer thread");
sender.send(()).expect("To be able to send to the site debouncer thread.")
},
None => {
println!("Spawning new site debouncer thread");
let (new_channel_sender, new_channel_receiver) = unbounded();
local_threads.insert(site_id.clone(), new_channel_sender);
let store_arg = store.clone();
let threads_arg = Arc::clone(&threads_ref);
// drop to unlock the mutex
drop(local_threads);
thread::spawn(move || {
run_debouncer(store_arg, site_id.clone(), new_channel_receiver);
// remove the debouncer thread
threads_arg.lock().unwrap().remove(&site_id.clone());
});
}
}
}

85
src/front_matter.rs Normal file
View file

@ -0,0 +1,85 @@
use std::io;
use std::path::Path;
use std::fs;
use serde::{Serialize, Deserialize};
use crate::utils::pub_fields;
pub_fields! {
#[derive(Debug, Deserialize, Serialize)]
struct PostFrontMatterHeader {
id: Option<String>,
title: String,
date: String
}
}
pub_fields! {
#[derive(Debug, Deserialize)]
struct ScanPostingDetailledOutput {
summary: PostFrontMatterHeader,
content: String
}
}
#[derive(Debug)]
pub enum ScanPostingFileError {
IoError(io::Error),
HeaderParseError,
InvalidHeader
}
pub const INDEX_PATH: &str = "index.md";
/// post_path must be the path to the post directory containing index.md
pub fn scan_posting_summary(post_path: &Path) -> Result<(PostFrontMatterHeader, String), ScanPostingFileError> {
// TODO: use buffered IO to prevent reading all the string
let index_path = post_path.join(INDEX_PATH);
let file_str: String = match fs::read_to_string(&index_path) {
Ok(res) => res,
Err(err) => {
error!("Cannot read path {:?} (expected index.md)", &index_path);
return Err(ScanPostingFileError::IoError(err));
}
};
let after_header_start_delimiter = match file_str.split_once("---") {
Some((delimiter, following)) => {
if !delimiter.trim().is_empty() {
error!("Unexpected shit before front matter header");
return Err(ScanPostingFileError::HeaderParseError);
}
following
},
None => {
error!("Did not find front matter YAML starting delimiter.");
return Err(ScanPostingFileError::HeaderParseError)
}
};
let (header_content, following) = match after_header_start_delimiter.split_once("---") {
Some(components) => components,
None => {
error!("Did not find front matter YAML end delimiter.");
return Err(ScanPostingFileError::HeaderParseError)
}
};
Ok((
match serde_yaml::from_str(&header_content) {
Ok(res) => res,
Err(err) => {
error!("Failed to parse front matter YAML header {:?}", err);
return Err(ScanPostingFileError::InvalidHeader)
}
},
following.to_string()
))
}
pub fn scan_posting_detailled(file_path: &Path)
-> Result<ScanPostingDetailledOutput, ScanPostingFileError>
{
let (summary, following) = scan_posting_summary(file_path)?;
Ok(ScanPostingDetailledOutput {
summary,
content: following.trim_start().to_string()
})
}

88
src/git_helper.rs Normal file
View file

@ -0,0 +1,88 @@
// all functions that interact with the git repository belongs here
use crate::models::{Post, Site};
use std::path::Path;
use std::sync::Arc;
use crate::Store;
use git2::{Repository, Error as Git2Error, Remote, RepositoryState, IndexEntry};
use crate::repositories::posts as posts_repository;
use crate::utils::{unwrap_or_return, unwrap_opt_or_return};
#[derive(Debug)]
pub enum GitSyncError {
Other,
BrokenRepository,
CannotOpenRepository,
CannotOpenHead,
CannotGetIndex
}
/// commit all the changes made on that site
pub fn commit_changes(site: &Site) -> Result<(), GitSyncError> {
// get the site from the post
// get the repo path
// open repository
// add all the content/ folder to the staging index (make the files go from WT_NEW to
// INDEX_NEW)
// commit
// add to the push unbounced loop
let repo = unwrap_or_return!(
Repository::open(&site.repository_path),
|_| GitSyncError::CannotOpenRepository
);
// TODO: optionally give insight about what operations has been done (e.g create,update,delete
// a post)
let commit_message = String::from("hugotator changes sync");
// get the head
let head = unwrap_or_return!(repo.head(), |err| {
error!("It looks like this repository has no commits yet, got {:?}", err);
GitSyncError::CannotOpenHead
});
let mut index = unwrap_or_return!(
repo.index(),
|_| GitSyncError::CannotGetIndex
);
// let mut opts = git2::StatusOptions::new();
// opts.include_untracked(true);
// opts.include_ignored(true);
// let stat = repo.statuses(None);
// stat.iter().for_each(|x| {
// println!("---");
// x.iter().for_each(|x| {
// dbg!(x.status(), x.path().unwrap());
// });
// println!("---");
// });
let paths: Vec<&str> = vec!["."];
let cb = &mut |path: &Path, _matched_spec: &[u8]| -> i32 {
let stat = repo.status_file(path).unwrap();
dbg!(("add_cb", stat, path));
// for some reason you have to write zero to accept move that file in staging for ex.
0
};
let cb_ref = Some(cb as &mut git2::IndexMatchedPath);
index.add_all(paths, git2::IndexAddOption::DEFAULT, cb_ref);
index.write().unwrap();
let sig = git2::Signature::now("Hugotator", "auto-commit@hugotator.org").unwrap();
let tree_id = index.write_tree().unwrap();
let parent = repo.head().ok().and_then(|h| h.target()).unwrap();
let parent = repo.find_commit(parent).unwrap();
let commit_res = repo.commit(
Some("HEAD"),
&sig,
&sig,
&commit_message,
&repo.find_tree(tree_id).unwrap(),
&[&parent],
);
dbg!(commit_res);
Ok(())
}

View file

@ -1,25 +1,24 @@
use git2::{Repository, Error as Git2Error, Remote, RepositoryState}; use git2::{Repository, Error as Git2Error, Remote, RepositoryState};
use git2::string_array::StringArray; use git2::string_array::StringArray;
use super::models::{SiteConfig, SiteContentBindingConfig, Site, SiteSection, Posting}; use crate::Store;
use super::models::{SiteConfig, SiteContentBindingConfig, Site, SiteSection, Post, Id};
use std::path::{PathBuf, Path}; use std::path::{PathBuf, Path};
use std::ffi::{OsString, CString};
use std::fs; use std::fs;
use std::io; use std::io;
use chrono::prelude::{Utc}; use chrono::prelude::{DateTime, Utc, NaiveDate, NaiveDateTime};
use super::front_matter::scan_posting_summary;
use crate::REPOSITORIES_CONTAINER_PATH;
use super::utils::{generate_id, hash_to_u64, parse_human_date};
use sha2::{Sha256, Digest};
use crate::utils::parse_hex_id;
use crate::utils::{ use crate::utils::{
unwrap_opt_or_return, unwrap_opt_or_return,
unwrap_or_return, unwrap_or_return,
os_str_to_str, os_str_to_str,
bytes2path bytes2path,
}; };
use log::error;
use log::info;
use log::warn;
const REPOSITORIES_CONTAINER_PATH: &str = "tmp/repositories";
#[derive(Debug)] #[derive(Debug)]
struct DetailledRemote { struct DetailledRemote {
slug: String, // expected to be origin slug: String, // expected to be origin
@ -128,16 +127,22 @@ fn ls_files_repository(repo_handle: &Repository) -> Result<Vec<GitFile>, LsFiles
Ok(files) Ok(files)
} }
fn scan_section_dir(
store: &mut Store,
section: &mut SiteSection,
section_path: &Path,
section_conf: &SiteContentBindingConfig
) -> Result<(), InitSiteErr> {
// get the first 64 bits from the hash of the site slug and
// section slug
fn scan_section_dir(content_dir_path: &Path, section_conf: &SiteContentBindingConfig) -> Result<SiteSection, InitSiteErr> { debug!("Scanning section dir {:?}...", &section_path);
let section_dir = content_dir_path.join(&section_conf.slug);
debug!("Scanning section dir {:?}...", &section_dir);
// check that directory exists // check that directory exists
if !section_dir.exists() { if !section_path.exists() {
// note: converting PathBuf into String can fail if there is a non-unicode char // note: converting PathBuf into String can fail if there is a non-unicode char
error!( error!(
"Invalid binding: cannot find section directory {}", "Invalid binding: cannot find section directory {}",
os_str_to_str!(section_dir, InitSiteErr::NonUnicodePath) os_str_to_str!(section_path, InitSiteErr::NonUnicodePath)
); );
return Err(InitSiteErr::InvalidContentBinding); return Err(InitSiteErr::InvalidContentBinding);
} }
@ -147,8 +152,7 @@ fn scan_section_dir(content_dir_path: &Path, section_conf: &SiteContentBindingCo
// then look inside each dir and check for index.md file // then look inside each dir and check for index.md file
// then parse the yaml frontmatter header // then parse the yaml frontmatter header
let mut postings: Vec<Posting> = vec![]; let entries = unwrap_or_return!(fs::read_dir(section_path), |e| InitSiteErr::IoError(e));
let entries = unwrap_or_return!(fs::read_dir(section_dir), |e| InitSiteErr::IoError(e));
for entry_res in entries { for entry_res in entries {
let entry: fs::DirEntry = unwrap_or_return!(entry_res, |e| InitSiteErr::IoError(e)); let entry: fs::DirEntry = unwrap_or_return!(entry_res, |e| InitSiteErr::IoError(e));
@ -158,27 +162,61 @@ fn scan_section_dir(content_dir_path: &Path, section_conf: &SiteContentBindingCo
} }
let slug = entry.path(); let slug = entry.path();
postings.push(Posting { let post_dir_path = entry.path();
let index_path = post_dir_path.join("index.md");
if !index_path.exists() {
error!("In section entry {:?}: did not find index.md", slug);
return Err(InitSiteErr::InvalidPosting);
}
let summary = match scan_posting_summary(&post_dir_path) {
Ok((summary, _following)) => summary,
Err(_err) => {
error!("Failed to scan posting file for section entry {:?}", slug);
return Err(InitSiteErr::InvalidPosting);
}
};
let created_at = match parse_human_date(&summary.date) {
Some(res) => res,
None => {
error!("Failed to parse datetime {:?}", summary.date);
return Err(InitSiteErr::InvalidPosting);
}
};
let id = match summary.id {
Some(hex_id) => match parse_hex_id(&hex_id) {
Some(id) => id,
None => {
error!("Malformed id in front matter header");
return Err(InitSiteErr::InvalidPosting);
}
},
None => generate_id()
};
let post = Post {
id,
path: post_dir_path,
kind: section_conf.posting_kind.clone(), kind: section_conf.posting_kind.clone(),
slug: os_str_to_str!( slug: os_str_to_str!(
slug.file_name().unwrap(), slug.file_name().unwrap(),
InitSiteErr::NonUnicodePath InitSiteErr::NonUnicodePath
), ),
title: "title".to_string(), title: summary.title,
created_at: Utc::now() section_id: section.id.clone(),
}) created_at
};
store.posts.write().unwrap().push(post);
} }
return Ok(SiteSection {
slug: section_conf.slug.clone(), Ok(())
postings
})
} }
#[derive(Debug)] #[derive(Debug)]
pub enum InitSiteErr { pub enum InitSiteErr {
RepositoryCloneErr(Git2Error), RepositoryCloneErr(Git2Error),
ExistingRepositoryInvalid(Git2Error),
CannotIndexRepository(LsFilesErr), CannotIndexRepository(LsFilesErr),
CannotGetRemotes(DetailledRemotesErr), CannotGetRemotes(DetailledRemotesErr),
InvalidRemoteLayout, InvalidRemoteLayout,
@ -189,10 +227,11 @@ pub enum InitSiteErr {
RepositoryNotClean, RepositoryNotClean,
InvalidContentBinding, InvalidContentBinding,
IoError(io::Error), IoError(io::Error),
NonUnicodePath NonUnicodePath,
InvalidPosting
} }
pub fn init_site(site_conf: &SiteConfig) -> Result<Site, InitSiteErr> { pub fn init_site(store: &mut Store, site_conf: &SiteConfig) -> Result<Site, InitSiteErr> {
let remote_url = &site_conf.git_remote_url; let remote_url = &site_conf.git_remote_url;
// check if the path exists // check if the path exists
@ -202,10 +241,12 @@ pub fn init_site(site_conf: &SiteConfig) -> Result<Site, InitSiteErr> {
// try to find the binding in the `content` directory provided by site config // try to find the binding in the `content` directory provided by site config
// scan the existing content for markdown files and call the scan_markdown function // scan the existing content for markdown files and call the scan_markdown function
// construct the struct that represent the current state of the site // construct the struct that represent the current state of the site
// TODO: add whitelist CORS support
let clone_destination: String = format!("{REPOSITORIES_CONTAINER_PATH}/{0}", site_conf.slug); let clone_destination: String = format!("{REPOSITORIES_CONTAINER_PATH}/{0}", site_conf.slug);
let repo_path = Path::new(&clone_destination); let site_repo_path = Path::new(&clone_destination);
let repo = if !repo_path.exists() { // TODO: move out the function that interract with git repository
let repo = if !site_repo_path.exists() {
// do a narrow clone // do a narrow clone
match Repository::clone(&remote_url, &clone_destination) { match Repository::clone(&remote_url, &clone_destination) {
Ok(repo) => repo, Ok(repo) => repo,
@ -250,7 +291,7 @@ pub fn init_site(site_conf: &SiteConfig) -> Result<Site, InitSiteErr> {
if files_list.is_empty() { if files_list.is_empty() {
return Err(InitSiteErr::EmptyRepository); return Err(InitSiteErr::EmptyRepository);
} }
dbg!(&files_list); // dbg!(&files_list);
// if !files_list.iter().any(|f| f.path.ends_with("config.toml")) { // if !files_list.iter().any(|f| f.path.ends_with("config.toml")) {
// return Err(InitSiteErr::HugoConfigFileNotFound); // return Err(InitSiteErr::HugoConfigFileNotFound);
@ -264,10 +305,27 @@ pub fn init_site(site_conf: &SiteConfig) -> Result<Site, InitSiteErr> {
Err(VerifyRepositoryPathErr::CannotGetIndex(e)) => return Err(InitSiteErr::InvalidExistingRepository(e)) Err(VerifyRepositoryPathErr::CannotGetIndex(e)) => return Err(InitSiteErr::InvalidExistingRepository(e))
}; };
let mut sections: Vec<SiteSection> = vec![]; // let mut sections: Vec<SiteSection> = vec![];
let content_dir = repo_path.join("content"); let mut sections_ids: Vec<Id> = vec![];
let content_base_path = site_repo_path.join("content");
for content_binding in &site_conf.content_bindings { for content_binding in &site_conf.content_bindings {
sections.push(scan_section_dir(&content_dir, content_binding)?); // generate section_id
let mut hasher: Sha256 = Sha256::new();
hasher.update(&site_conf.slug);
hasher.update(&content_binding.slug);
let section_id = Id(hash_to_u64(hasher));
let section_path = content_base_path.join(&content_binding.slug);
let mut section = SiteSection {
id: section_id.clone(),
name: content_binding.name.clone(),
slug: content_binding.slug.clone(),
path: section_path.clone()
};
scan_section_dir(store, &mut section, &section_path, content_binding)?;
store.sections.write().unwrap().push(section);
sections_ids.push(section_id);
// match scan_section_dir(&content_dir, content_binding) { // match scan_section_dir(&content_dir, content_binding) {
// Ok(section) => { // Ok(section) => {
// sections.push(section); // sections.push(section);
@ -278,7 +336,13 @@ pub fn init_site(site_conf: &SiteConfig) -> Result<Site, InitSiteErr> {
Ok(Site { Ok(Site {
sections id: generate_id(),
slug: site_conf.slug.clone(),
name: site_conf.name.clone(),
repository_path: site_repo_path.to_path_buf(),
sections: sections_ids,
allowed_users: vec![],
// allowed_origins: vec![]
}) })
} }

32
src/jobs.rs Normal file
View file

@ -0,0 +1,32 @@
use background_jobs::Job;
use anyhow::Error;
use std::future::{ready, Ready};
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
pub struct MyJob {
some_usize: usize,
other_usize: usize,
}
impl MyJob {
pub fn new(some_usize: usize, other_usize: usize) -> Self {
MyJob {
some_usize,
other_usize,
}
}
}
impl Job for MyJob {
type State = ();
type Future = Ready<Result<(), Error>>;
const NAME: &'static str = "MyJob";
fn run(self, _: Self::State) -> Self::Future {
info!("args: {:?}", self);
ready(Ok(()))
}
}

View file

@ -1,80 +1,175 @@
#![allow(unused)]
#[macro_use] #[macro_use]
extern crate log; extern crate log;
use std::sync::{RwLock, Arc};
mod models; mod models;
mod init_site; mod init_site;
mod post; mod front_matter;
mod utils; mod utils;
mod web_api;
mod repositories;
mod actions;
mod git_helper;
mod background_jobs;
use init_site::InitSiteErr;
use actix_web::{get, post, web, App, HttpResponse, HttpServer, Responder};
use toml; use toml;
use std::fs; use std::fs;
use std::process::ExitCode; use std::process::ExitCode;
use models::{Config}; use models::{Config, Site, SiteSection, User, Post, Id};
use std::collections::hash_set::HashSet;
use crossbeam_channel::{unbounded, Sender, Receiver};
use models::Message;
use std::thread;
const REPOSITORIES_CONTAINER_PATH: &str = "tmp/repositories";
#[get("/")] #[derive(Debug)]
async fn hello() -> impl Responder { pub struct Store {
HttpResponse::Ok().body("Ignition sequence started") version: String,
secret: String,
sites: RwLock<Vec<Site>>,
posts: RwLock<Vec<Post>>,
users: RwLock<Vec<User>>,
sections: RwLock<Vec<SiteSection>>,
sandbox: RwLock<u64>,
channel_sender: Sender<Message>,
channel_receiver: Receiver<Message>
} }
#[get("/posts")]
async fn get_posts() -> impl Responder {
HttpResponse::Ok().body("test: get posts")
}
#[post("/echo")]
async fn echo(req_body: String) -> impl Responder {
HttpResponse::Ok().body(req_body)
}
#[derive(Debug, Clone)]
struct AppState {
}
fn main() -> ExitCode { fn main() -> ExitCode {
// let password = b"passwords can be phrases";
// let salt = b"is this really random";
// let config = argon2::Config::default();
// let hash = argon2::hash_encoded(password, salt, &config).unwrap();
// dbg!(&hash);
// let matches = argon2::verify_encoded(&hash, password).unwrap();
// assert!(matches);
return start_server();
}
#[derive(Debug)]
enum InitAppDataErr {
InitSiteErr(InitSiteErr),
DuplicatedSite
}
fn init_store(config: &Config) -> Result<Store, InitAppDataErr> {
let mut sites_slugs: HashSet<String> = HashSet::new();
let channel = unbounded();
let channel_sender: Sender<Message> = channel.0;
let channel_receiver: Receiver<Message> = channel.1;
let mut store = Store {
version: "v1.0.0".to_string(),
secret: config.secret.clone(),
sites: RwLock::new(vec![]),
sections: RwLock::new(vec![]),
users: RwLock::new(vec![]),
posts: RwLock::new(vec![]),
sandbox: RwLock::new(0),
channel_sender,
channel_receiver
};
// initialize sites
for site_conf in &config.sites {
info!("Initializing site {:?}..", &site_conf.slug);
if sites_slugs.contains(&site_conf.slug) {
error!("Found duplicated site {:?}", &site_conf.slug);
return Err(InitAppDataErr::DuplicatedSite);
}
let site_initialized = match init_site::init_site(&mut store, &site_conf) {
Ok(res) => res,
Err(e) => {
error!("Cannot initialize site");
debug!("{:?}", e);
return Err(InitAppDataErr::InitSiteErr(e))
}
};
info!("Site {:?} initialized.", &site_conf.slug);
debug!("{:#?}", site_initialized);
store.sites.write().unwrap().push(site_initialized);
sites_slugs.insert(site_conf.slug.clone());
}
// initialize users
store.users.write().unwrap().extend(
config.users
.iter()
.map(|user_config| {
User {
username: user_config.username.clone(),
password: user_config.password.clone()
}
})
);
Ok(store)
}
fn start_server() -> ExitCode {
env_logger::init(); env_logger::init();
info!("Starting a hugotator instance..."); info!("Starting a hugotator instance...");
let config_str = match fs::read_to_string("./config.example.toml") { let config_str = match fs::read_to_string("./config.example.toml") {
Ok(res) => res, Ok(res) => res,
Err(e) => { Err(err) => {
error!("Cannot read the server config file."); error!("Cannot read the server config file.");
error!("{:?}", err);
return ExitCode::FAILURE; return ExitCode::FAILURE;
} }
}; };
let config: Config = match toml::from_str(&config_str) { let config: Config = match toml::from_str(&config_str) {
Ok(res) => res, Ok(res) => res,
Err(e) => { Err(err) => {
error!("Invalid TOML server config file."); error!("Invalid TOML server config file.");
error!("{:?}", e); error!("{:?}", err);
return ExitCode::FAILURE; return ExitCode::FAILURE;
} }
}; };
dbg!(&config); dbg!(&config);
// initialize all the sites let store = Arc::new(
for site_conf in config.sites { match init_store(&config) {
info!("Initializing site {:?}..", &site_conf.slug);
let site_initialized = match init_site::init_site(&site_conf) {
Ok(res) => res, Ok(res) => res,
Err(e) => { Err(_err) => {
error!("Cannot initialize site"); error!("Failed to initialize app store");
debug!("{:?}", e); return ExitCode::FAILURE;
return ExitCode::FAILURE
} }
};
info!("Site {:?} initialized.", &site_conf.slug);
debug!("{:#?}", site_initialized);
} }
);
// debug!("store = {:#?}", &store);
debug!("Finished init sequence");
let shared_store = Arc::clone(&store);
thread::spawn(move || {
background_jobs::run_background_processor(shared_store);
});
let bind_config = ( let bind_config = (
config.server.as_ref().and_then(|sc| sc.host.clone()).unwrap_or_else(|| "127.0.0.1".to_string()), config.server.as_ref().and_then(|sc| sc.host.clone()).unwrap_or_else(|| "127.0.0.1".to_string()),
config.server.as_ref().and_then(|sc| sc.port).unwrap_or(6968), config.server.as_ref().and_then(|sc| sc.port).unwrap_or(6968),
); );
match actix_web_main(bind_config) { // test area
// {
// use crate::git_helper::commit_changes;
// commit_changes(store.sites.read().unwrap().get(0).unwrap());
// dbg!("end of commit changes");
// }
// return ExitCode::SUCCESS;
match web_api::actix_web_main(bind_config, store) {
Ok(_) => (), Ok(_) => (),
Err(e) => { Err(e) => {
error!("Failed to start actix web main: {:?}", e); error!("Failed to start actix web main: {:?}", e);
@ -85,17 +180,3 @@ fn main() -> ExitCode {
return ExitCode::SUCCESS; return ExitCode::SUCCESS;
} }
#[actix_web::main]
async fn actix_web_main(bind_config: (String, u16)) -> std::io::Result<()> {
HttpServer::new(|| {
App::new()
.service(hello)
.service(echo)
.service(get_posts)
})
.bind(bind_config)?
.run()
.await
}

View file

@ -1,41 +1,103 @@
use serde::Deserialize;
use chrono::prelude::{DateTime, Utc};
use crate::utils::pub_fields; use crate::utils::pub_fields;
use serde::{Serialize, Deserialize};
use chrono::prelude::{DateTime, Utc};
use std::path::PathBuf;
pub_fields! { // pub_fields! {
#[derive(Debug)] // #[derive(Debug)]
struct Post { // struct Post {
id: u64, // id: u64,
title: String, // title: String,
description: String, // description: String,
content: String, // content: String,
created_at: DateTime<Utc> // date: DateTime<Utc>,
} // kind: PostingKind
} // }
// }
pub_fields! {
#[derive(Default, Clone, Deserialize, Debug)]
struct ServerConfig {
host: Option<String>,
port: Option<u16>
}
}
#[derive(Clone, Deserialize, Debug)] #[derive(Clone, Deserialize, Debug)]
pub enum PostingKind { pub enum PostingKind {
/// a micro-bloging kind of post (less than 80 words) /// a micro-bloging kind of post (less than 80 words)
Micro, Micro,
/// a full article (more than 80 words) /// a full article (more than 80 words)
Article, Article,
} }
#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq, Hash)]
pub struct Id(pub u64);
impl Id {
pub fn to_string(&self) -> String {
format!("{:x}", self.0)
}
}
impl Into<String> for Id {
fn into(self) -> String {
format!("{:x}", self.0)
}
}
// TODO: add get_section() and get_site() method on the Post model
// TODO: remove use "Post" instead
pub_fields! {
#[derive(Debug, Clone)]
struct Post {
id: Id,
path: PathBuf,
kind: PostingKind,
slug: String,
title: String,
created_at: DateTime<Utc>,
section_id: Id
}
}
pub_fields! {
#[derive(Debug, Clone)]
struct SiteSection {
id: Id, // generated from the slug
slug: String,
name: String,
path: PathBuf // path to the folder holding the posts
}
}
#[derive(Debug, Clone)]
pub enum SiteMutation {
PostCreated,
PostUpdated,
PostDeleted
}
pub_fields! {
#[derive(Debug, Clone)]
struct Site {
id: Id,
slug: String,
name: String,
repository_path: PathBuf,
sections: Vec<Id>,
allowed_users: Vec<User>,
// actions: Vec<SiteMutation> // list of actions to include in the git commit
}
}
pub_fields! {
#[derive(Debug, Clone)]
struct User {
username: String,
password: String
}
}
// configuration types
pub_fields! { pub_fields! {
/// A hugo directory under `content` /// A hugo directory under `content`
#[derive(Clone, Deserialize, Debug)] #[derive(Clone, Deserialize, Debug)]
struct SiteContentBindingConfig { struct SiteContentBindingConfig {
name: String,
slug: String, slug: String,
posting_kind: PostingKind posting_kind: PostingKind
} }
@ -45,41 +107,52 @@ pub_fields! {
#[derive(Clone, Deserialize, Debug)] #[derive(Clone, Deserialize, Debug)]
struct SiteConfig { struct SiteConfig {
slug: String, // for example "werobot_blog" slug: String, // for example "werobot_blog"
name: String, // for example "Le blog de We Robot"
git_remote_url: String, git_remote_url: String,
content_path: String, content_path: String,
content_bindings: Vec<SiteContentBindingConfig> content_bindings: Vec<SiteContentBindingConfig>,
allowed_users: Vec<String> // list of username allowed to manage this website
} }
} }
pub_fields! { pub_fields! {
#[derive(Debug)] /// A hugo directory under `content`
struct Posting { #[derive(Clone, Deserialize, Debug)]
kind: PostingKind, struct UserConfig {
slug: String, username: String,
title: String, password: String
created_at: DateTime<Utc>
} }
} }
pub_fields! { pub_fields! {
#[derive(Debug)] #[derive(Default, Clone, Deserialize, Debug)]
struct SiteSection { struct ServerConfig {
slug: String, host: Option<String>,
postings: Vec<Posting> port: Option<u16>
}
}
pub_fields! {
#[derive(Debug)]
struct Site {
sections: Vec<SiteSection>
} }
} }
pub_fields! { pub_fields! {
#[derive(Clone, Deserialize, Debug)] #[derive(Clone, Deserialize, Debug)]
struct Config { struct Config {
secret: String,
server: Option<ServerConfig>, server: Option<ServerConfig>,
sites: Vec<SiteConfig>, sites: Vec<SiteConfig>,
users: Vec<UserConfig>
} }
} }
pub_fields! {
#[derive(Debug, Serialize, Deserialize)]
struct JsonWebTokenClaims {
sub: String, // the username
exp: usize,
}
}
pub enum Message {
UpdatedPost { post_id: Id },
NewPost { post_id: Id },
DummyMessage { x: f32, y: f32 }
}

View file

19
src/posts_repository.rs Normal file
View file

@ -0,0 +1,19 @@
use crate::REPOSITORIES_CONTAINER_PATH;
use std::fs;
use super::models::{Site, Post};
/// create post on disk and return a Post struct
pub fn create_post(site: Site, post: Post) {
// get the category (content-section) from the Post user-input struct
// then search if this section is binded in the site config
// if not, error
// get the binded content and call the path method to construct the path
// then check if the folder that match this entry slug is already here
// if yes, error
// create a new folder entry and add a index.md file
// write the yaml frontmatter header
// call a commit changes procedure
// trigger the push changes unbounced
}

6
src/repositories/mod.rs Normal file
View file

@ -0,0 +1,6 @@
pub mod posts;
pub mod sites;
pub enum RepositoryGenericErr {
NotFound
}

46
src/repositories/posts.rs Normal file
View file

@ -0,0 +1,46 @@
// the posts repository
// it define an interface to manage entity in the in-memory database
// it must not be called directly as an API because it's only managing in-memory entities
// if we want to create a post we need to use the "actions" module that abstract the end-user
// actions
use std::sync::Arc;
use crate::models::{Id, Post, SiteSection, Site};
// use crate::utils::{unwrap_opt_or_return, unwrap_or_return};
// use super::RepositoryGenericErr;
use crate::Store;
// use crate::front_matter::scan_posting_detailled;
#[derive(Debug)]
pub enum ReadPostErr {
NotFound
}
pub fn get_by_id(store: &Arc<Store>, post_id: Id) -> Result<Post, ReadPostErr> {
let posts = store.posts.read().unwrap();
let post = match posts
.iter()
.find(|cand| cand.id == post_id) {
Some(post) => post,
None => return Err(ReadPostErr::NotFound)
};
Ok(post.clone())
}
pub fn get_post_section(store: &Arc<Store>, post: &Post) -> Option<SiteSection> {
let sections = store.sections.read().unwrap();
Some(sections.iter().find(|x| x.id == post.section_id)?.clone())
}
pub fn get_post_site(store: &Arc<Store>, post: &Post) -> Option<Site> {
let section = get_post_section(store, post)?;
let sites = store.sites.read().unwrap();
Some(sites.iter().find(|site| site.sections.contains(&section.id))?.clone())
}
pub fn get_post_site_id(store: &Arc<Store>, post: &Post) -> Option<Id> {
let section = get_post_section(store, post)?;
let sites = store.sites.read().unwrap();
Some(sites.iter().find(|site| site.sections.contains(&section.id))?.id.clone())
}

19
src/repositories/sites.rs Normal file
View file

@ -0,0 +1,19 @@
use std::sync::Arc;
use crate::models::{Id, Site};
use crate::Store;
#[derive(Debug)]
pub enum ReadSiteErr {
NotFound
}
pub fn get_by_id(store: &Arc<Store>, site_id: Id) -> Result<Site, ReadSiteErr> {
let sites = store.sites.read().unwrap();
let site = match sites
.iter()
.find(|cand| cand.id == site_id) {
Some(site) => site,
None => return Err(ReadSiteErr::NotFound)
};
Ok(site.clone())
}

View file

@ -1,6 +1,8 @@
use std::path::{Component, Path, PathBuf}; use std::path::Path;
use std::ffi::{CString, OsStr}; use std::ffi::OsStr;
use crate::models::Id;
// 'ident' is short for identifier token
macro_rules! pub_fields { macro_rules! pub_fields {
{ {
$(#[doc = $doc:expr])? $(#[doc = $doc:expr])?
@ -79,3 +81,74 @@ pub fn bytes2path(b: &[u8]) -> &Path {
Path::new(str::from_utf8(b).unwrap()) Path::new(str::from_utf8(b).unwrap())
} }
use rand::{thread_rng, Rng};
use rand::distributions::Alphanumeric;
pub fn random_string(n: usize) -> String {
// TODO: find a better way to do that, as it collect and then reiterate
thread_rng()
.sample_iter(&Alphanumeric)
.take(n)
.map(char::from)
.collect::<String>()
.to_uppercase()
}
pub fn generate_id() -> Id {
Id(thread_rng().gen())
}
pub fn parse_hex_id(id_hex: &String) -> Option<Id> {
Some(Id(match u64::from_str_radix(&id_hex, 16) {
Ok(res) => res,
Err(err) => return None
}))
}
pub fn compare_id(target_id_hex: &String, id_candidate: &Id) -> bool {
let target_id: Id = Id(u64::from_str_radix(&target_id_hex, 16).unwrap_or(0));
id_candidate == &target_id
}
use sha2::{Sha256, Digest};
pub fn hash_to_u64(hasher: Sha256) -> u64 {
let res = hasher.finalize();
let mut bytes: [u8; 8] = [0; 8];
bytes.copy_from_slice(&res[0..8]);
let res_u64: u64 = u64::from_le_bytes(bytes);
res_u64
// let mut word_array = [0u8; 8];
// word_array.copy_from_slice(&digest[0..8]);
// u64::from_le_bytes(word_array)
}
use chrono::prelude::{DateTime, Utc, NaiveDateTime, NaiveDate};
pub fn parse_human_date(human_date: &str) -> Option<DateTime<Utc>> {
match NaiveDate::parse_from_str(human_date, "%Y-%m-%d") {
Ok(naive_date) => {
let naive_datetime: NaiveDateTime = naive_date.and_hms_opt(0, 0, 0).unwrap();
return Some(DateTime::<Utc>::from_utc(naive_datetime, Utc));
},
Err(err) => {
debug!("Failed to parse naive date time: {:?}", err);
}
};
match NaiveDateTime::parse_from_str(human_date, "%Y-%m-%d %H:%M") {
Ok(naive_datetime) => {
return Some(DateTime::<Utc>::from_utc(naive_datetime, Utc));
},
Err(err) => {
debug!("Failed to parse naive date time: {:?}", err);
}
};
match NaiveDateTime::parse_from_str(human_date, "%Y-%m-%d %H-%M-%s") {
Ok(naive_datetime) => {
return Some(DateTime::<Utc>::from_utc(naive_datetime, Utc));
},
Err(_e) => return None
};
}

99
src/web_api/login.rs Normal file
View file

@ -0,0 +1,99 @@
use actix_web::{get, post, web, HttpResponse, Responder};
use serde::{Serialize, Deserialize};
use crate::Store;
use jsonwebtoken;
use chrono::{Duration, Utc};
use crate::models::JsonWebTokenClaims;
use super::middlewares::auth::AuthorizationService;
use super::WebState;
#[derive(Deserialize, Debug)]
struct LoginInput {
username: String,
password: String
}
#[derive(Serialize)]
struct LoginOutput {
jwt: String
}
#[post("/login")]
async fn login(
app_state: WebState,
// TODO: customize Actix deserialization error to have json instead of text/plain
// content type
input: web::Json<LoginInput>
) -> impl Responder {
let users = app_state.users.read().unwrap();
let user_opt = users
.iter()
.find(|user| user.username == input.username);
const DUMMY_HASH: &str = "$argon2i$v=19$m=4096,t=3,p=1$aXMgdGhpcyByZWFsbHkgcmFuZG9t$V9HA8SSXKd/dQPVKEl2mEB/zhvbRpqkjQm0djDwDr70";
let password_hash = match &user_opt {
Some(user) => &user.password,
None => DUMMY_HASH
};
let password_verified = match argon2::verify_encoded(
&password_hash,
input.password.as_bytes()
) {
Ok(res) => res,
Err(e) => {
error!("argon2::verify_encoded failed {:?}", e);
return HttpResponse::InternalServerError()
.reason("cannot verify password")
.finish();
}
};
if user_opt.is_none() || !password_verified {
return HttpResponse::Unauthorized()
.finish();
}
let expiration_timestamp = Utc::now()
.checked_add_signed(Duration::days(15))
.expect("invalid timestamp")
.timestamp();
let token_claims = JsonWebTokenClaims {
sub: user_opt.unwrap().username.clone(),
exp: expiration_timestamp as usize
};
let token = match jsonwebtoken::encode(
&jsonwebtoken::Header::default(),
&token_claims,
&jsonwebtoken::EncodingKey::from_secret(app_state.secret.as_ref())
) {
Ok(res) => res,
Err(err) => {
error!("Failed to create a JWT {:?}", err);
return HttpResponse::InternalServerError()
.reason("failed to create a token")
.finish()
}
};
// TODO: Handle token creation errors as 500
HttpResponse::Ok()
.json(LoginOutput {
jwt: token
})
}
#[derive(Serialize)]
struct CurrentUserOutput {
username: String
}
#[get("/me")]
async fn get_current_user(
// app_state: web::Data<AppState>,
auth: AuthorizationService
) -> impl Responder {
HttpResponse::Ok().json(CurrentUserOutput {
username: auth.user.username
})
}

View file

@ -0,0 +1,64 @@
use crate::models::{JsonWebTokenClaims, User};
use actix_web::error::ErrorUnauthorized;
use actix_web::{dev, Error, FromRequest, HttpRequest, web};
use futures::future::{err, ok, Ready};
use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation};
use super::super::WebState;
use crate::utils::pub_fields;
pub_fields! {
struct AuthorizationService {
user: User
}
}
impl FromRequest for AuthorizationService {
type Error = Error;
type Future = Ready<Result<AuthorizationService, Error>>;
fn from_request(
req: &HttpRequest,
_payload: &mut dev::Payload
) -> Self::Future {
let authorization_header = req.headers().get("Authorization");
let app_data = req.app_data::<WebState>().unwrap();
let header_value = match authorization_header {
Some(val) => val,
None => {
return err(ErrorUnauthorized("Missing Authorization header"))
}
};
let split: Vec<&str> = match header_value.to_str() {
Ok(res) => res.split("Bearer").collect(),
Err(_) => return err(ErrorUnauthorized("Malformed token"))
};
let token = match split.get(1) {
Some(res) => res.trim(),
None => return err(ErrorUnauthorized("Malformed token"))
};
match decode::<JsonWebTokenClaims>(
token,
&DecodingKey::from_secret(app_data.secret.as_bytes()),
&Validation::new(Algorithm::HS256),
) {
Ok(parsed_jwt) => ok(AuthorizationService {
user: match app_data.users.read().unwrap()
.iter()
.find(|user| user.username == parsed_jwt.claims.sub) {
Some(u) => u.clone(),
None => {
error!("User not found in JWT");
return err(ErrorUnauthorized("invalid token!"))
}
}
}),
Err(decode_err) => {
dbg!(decode_err);
err(ErrorUnauthorized("invalid token!"))
},
}
}
}

View file

@ -0,0 +1 @@
pub mod auth;

153
src/web_api/mod.rs Normal file
View file

@ -0,0 +1,153 @@
use actix_web::{
get, web, App, HttpServer, Responder,
web::Data, middleware::Logger};
use std::sync::{RwLock, Arc};
use serde::Serialize;
use super::Store;
mod posts;
mod sites;
mod middlewares;
mod login;
use super::models::Message;
use actix_web::dev::{Service, ServiceRequest};
use futures::FutureExt;
pub type WebState = web::Data<Arc<Store>>;
// Home page
#[derive(Serialize)]
struct HomeResponseBody {
version: String,
sandbox: u64
}
#[get("/")]
async fn home(state: WebState) -> impl Responder {
HttpResponse::Ok().json(HomeResponseBody {
version: String::from(&state.version),
sandbox: *state.sandbox.read().unwrap()
})
}
// #[get("/read")]
// async fn test_read(app_state: WebState) -> impl Responder {
// let s = app_state.read().unwrap();
// for _ in 0..3 {
// std::thread::sleep(std::time::Duration::from_secs(1));
// }
// HttpResponse::Ok().json(HomeResponseBody {
// version: String::from(""),
// sandbox: s.sandbox
// })
// }
// #[get("/mutation")]
// async fn test_mutation(app_state: web::Data<Arc<Store>>) -> impl Responder {
// let mut s = app_state.write().unwrap();
// for _ in 0..3 {
// s.sandbox += 1;
// std::thread::sleep(std::time::Duration::from_secs(1));
// }
// HttpResponse::Ok().json(HomeResponseBody {
// version: String::from(""),
// sandbox: 0
// })
// }
// #[get("/short_mutation")]
// async fn test_short_mutation(app_state: web::Data<RwLock<Store>>) -> impl Responder {
// let mut s = app_state.write().unwrap();
// s.sandbox += 1;
// HttpResponse::Ok().json(HomeResponseBody {
// version: String::from(""),
// sandbox: 0
// })
// }
#[get("/long")]
async fn long_task(app_state: WebState) -> impl Responder {
// this works
let hand = std::thread::spawn(|| {
std::thread::sleep(std::time::Duration::from_secs(2));
println!("Thread sleepy");
});
HttpResponse::Ok().finish()
}
#[get("/send_msg")]
async fn send_message(app_state: WebState) -> impl Responder {
app_state.channel_sender.send(Message::DummyMessage {
x: 10.0,
y: 5.0
});
HttpResponse::Ok().finish()
}
use actix_web::{
body::{BoxBody, MessageBody},
dev::{ResponseHead, ServiceResponse},
http::{header, StatusCode},
HttpRequest, HttpResponse, ResponseError,
};
use futures::prelude::*;
use actix_web::http::header::{HeaderName, HeaderValue};
#[actix_web::main]
pub async fn actix_web_main(
bind_config: (String, u16),
store: Arc<Store>
) -> std::io::Result<()> {
info!("Starting a web server on {:?}", bind_config);
let app_data = Data::new(store);
/// Middleware to convert plain-text error responses to user-friendly web pages
fn cors_middleware<S, B>(
req: ServiceRequest,
srv: &S,
) -> impl Future<Output = actix_web::Result<ServiceResponse>> + 'static
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = actix_web::Error>,
B: MessageBody + 'static,
S::Future: 'static,
{
let fut = srv.call(req);
async {
let mut res = fut.await?.map_into_boxed_body();
dbg!(res.headers_mut());
res.headers_mut().insert(header::ACCESS_CONTROL_ALLOW_ORIGIN, HeaderValue::from_static("*"));
Ok(res)
}
}
HttpServer::new(move || {
App::new()
.app_data(Data::clone(&app_data))
.wrap_fn(cors_middleware)
// .wrap_fn(|req, srv| {
// srv.call(req).map(|res| cors_wrapper(req, res))
// })
.service(home)
.service(long_task)
.service(send_message)
// sites and sections
.service(sites::get_many_sites)
.service(sites::get_one_site)
.service(sites::get_one_section)
// auth
.service(login::login)
.service(login::get_current_user)
// posts
.service(posts::get_many_posts)
.service(posts::get_one_post)
.service(posts::create_post)
.service(posts::update_post)
.service(posts::delete_one_post)
.wrap(Logger::new("%a %{User-Agent}i"))
})
.bind(bind_config)?
.run()
.await
}

221
src/web_api/posts.rs Normal file
View file

@ -0,0 +1,221 @@
use actix_web::{get, put, post, delete, web, HttpResponse, Responder};
use serde::{Serialize, Deserialize};
use crate::front_matter::scan_posting_detailled;
use super::middlewares::auth::AuthorizationService;
use super::super::utils::{unwrap_opt_or_return, pub_fields, generate_id, parse_human_date};
use crate::models::{PostingKind, Post};
use std::path::PathBuf;
use chrono::prelude::{DateTime, Utc};
use crate::Store;
use crate::web_api::WebState;
use crate::actions::posts as posts_actions;
use crate::repositories::RepositoryGenericErr;
use crate::repositories::posts as posts_repository;
use crate::utils::parse_hex_id;
use crate::repositories::posts::ReadPostErr;
pub_fields! {
#[derive(Serialize)]
struct PostSummaryOutput {
id: String,
slug: String,
title: String,
created_at: String
}
}
#[get("/posts")]
async fn get_many_posts(
app_state: WebState
) -> impl Responder {
let posts = app_state.posts.read().unwrap();
let res: Vec<PostSummaryOutput> = posts
.iter()
.map(|post| {
PostSummaryOutput {
id: post.id.to_string(),
slug: post.slug.clone(),
title: post.title.clone(),
created_at: post.created_at.to_rfc3339_opts(chrono::SecondsFormat::Secs, true)
}
})
.collect();
HttpResponse::Ok().json(res)
}
#[derive(Serialize)]
struct PostDetailledOutput {
id: String,
slug: String,
title: String,
created_at: String,
content: String
}
#[get("/posts/{id}")]
async fn get_one_post(
path: web::Path<(String, )>,
app_state: WebState
) -> impl Responder {
let post = match posts_repository::get_by_id(
&app_state,
match parse_hex_id(&path.0) {
Some(id) => id,
None => return HttpResponse::BadRequest().json("Invalid id url param")
}
) {
Ok(p) => p,
Err(ReadPostErr::NotFound) => return HttpResponse::NotFound().json("Post not found"),
Err(_) => return HttpResponse::InternalServerError().finish()
};
// actually read the file on disk
let scan_out = match scan_posting_detailled(&post.path) {
Ok(res) => res,
Err(err) => {
error!("Could not scan posting details {:?}", err);
return HttpResponse::InternalServerError().json("Whoops")
}
};
let res = PostDetailledOutput {
id: post.id.to_string(),
slug: post.slug.clone(),
title: post.title.clone(),
created_at: post.created_at.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
content: scan_out.content
};
HttpResponse::Ok()
.json(res)
}
#[derive(Deserialize)]
struct CreatePostInput {
title: String,
created_at: String,
content: String,
section_id: String
}
#[derive(Serialize)]
struct CreatePostOutput {
id: String
}
#[post("/posts")]
async fn create_post(
app_state: WebState,
body: web::Json<CreatePostInput>,
_auth: AuthorizationService
) -> impl Responder {
// get section
let sections = app_state.sections.read().unwrap();
let section = match sections
.iter()
.find(|cand| crate::utils::compare_id(&body.section_id, &cand.id))
{
Some(section) => section.clone(),
None => return HttpResponse::NotFound().json("section not found")
};
use posts_actions::PostCreateError;
let created_at = match parse_human_date(&body.created_at) {
Some(r) => r,
None => return HttpResponse::BadRequest().json("invalid created_at format")
};
match posts_actions::create(
&app_state,
&section,
body.title.clone(),
body.content.clone(),
created_at
) {
Err(PostCreateError::SlugNotUnique) => {
HttpResponse::BadRequest().json("title provided not unique when sluggified")
},
Err(_) => {
HttpResponse::BadRequest().json("cannot create post")
},
Ok(post_id) => {
HttpResponse::Ok().json(CreatePostOutput {
id: post_id.to_string()
})
}
}
}
#[derive(Deserialize)]
struct UpdatePostInput {
title: String,
created_at: String,
content: String
}
#[put("/posts/{id}")]
async fn update_post(
app_state: WebState,
path: web::Path<(String, )>,
body: web::Json<UpdatePostInput>,
_auth: AuthorizationService
) -> impl Responder {
let post = match posts_repository::get_by_id(
&app_state,
match parse_hex_id(&path.0) {
Some(id) => id,
None => return HttpResponse::BadRequest().json("Invalid id url param")
}
) {
Ok(p) => p,
Err(ReadPostErr::NotFound) => return HttpResponse::NotFound().json("Post not found"),
Err(_) => return HttpResponse::InternalServerError().finish()
};
use posts_actions::PostUpdateError;
let created_at = match parse_human_date(&body.created_at) {
Some(r) => r,
None => return HttpResponse::BadRequest().json("invalid created_at format")
};
match posts_actions::update(
&app_state,
&post,
body.title.clone(),
body.content.clone(),
created_at
) {
Ok(()) => (),
Err(PostUpdateError::SlugNotUnique) => {
return HttpResponse::BadRequest().json("title provided not unique when sluggified")
},
Err(_) => {
return HttpResponse::BadRequest().json("cannot create post")
}
}
HttpResponse::NoContent().finish()
}
#[delete("/posts/{id}")]
async fn delete_one_post(
path: web::Path<(String, )>,
app_state: WebState
) -> impl Responder {
let post = match posts_repository::get_by_id(
&app_state,
match parse_hex_id(&path.0) {
Some(id) => id,
None => return HttpResponse::BadRequest().json("Invalid id url param")
}
) {
Ok(p) => p,
Err(ReadPostErr::NotFound) => return HttpResponse::NotFound().json("Post not found"),
Err(_) => return HttpResponse::InternalServerError().finish()
};
match posts_actions::remove(&app_state, &post) {
Ok(_) => (),
Err(_) => return HttpResponse::InternalServerError().finish()
}
HttpResponse::NoContent().finish()
}

127
src/web_api/sites.rs Normal file
View file

@ -0,0 +1,127 @@
use actix_web::{get, web, HttpResponse, Responder};
use serde::Serialize;
use crate::models::{SiteSection, Post};
use super::WebState;
#[derive(Serialize)]
struct SiteSummaryOutput {
slug: String,
name: String
}
#[get("/sites")]
async fn get_many_sites(
app_state: WebState
) -> impl Responder {
let res: Vec<SiteSummaryOutput> = app_state
.sites.read().unwrap()
.iter()
.map(|site| {
SiteSummaryOutput {
slug: site.slug.clone(),
name: site.name.clone(),
}
})
.collect();
HttpResponse::Ok().json(res)
}
#[derive(Serialize)]
struct SectionSummaryOutput {
id: String,
slug: String,
name: String
}
#[derive(Serialize)]
struct SiteDetailledOutput {
slug: String,
name: String,
sections: Vec<SectionSummaryOutput>
}
#[get("/sites/{slug}")]
async fn get_one_site(
path: web::Path<(String, )>,
app_state: WebState
) -> impl Responder {
let sites = app_state.sites.read().unwrap();
let sections = app_state.sections.read().unwrap();
let site = match sites
.iter()
.find(|site| {
site.slug == path.0
}) {
Some(site) => site.clone(),
None => return HttpResponse::NotFound().json("not found")
};
let site_sections: Vec<&SiteSection> = sections
.iter()
.filter(|section_candidate|
site.sections.contains(&section_candidate.id)
)
.collect();
HttpResponse::Ok().json(
SiteDetailledOutput {
slug: site.slug.clone(),
name: site.name.clone(),
sections: site_sections
.iter().map(|s| {
SectionSummaryOutput {
id: s.id.to_string(),
slug: s.slug.clone(),
name: s.name.clone()
}
}).collect()
}
)
}
use super::posts::PostSummaryOutput;
#[derive(Serialize)]
struct SectionDetailledOutput {
id: String,
slug: String,
name: String,
posts: Vec<PostSummaryOutput>
}
#[get("/sections/{id}")]
async fn get_one_section(
path: web::Path<(String, )>,
app_state: WebState
) -> impl Responder {
let sections = app_state.sections.read().unwrap();
let posts = app_state.posts.read().unwrap();
let section = match sections
.iter()
.find(|cand| crate::utils::compare_id(&path.0, &cand.id))
{
Some(section) => section.clone(),
None => return HttpResponse::NotFound().json("not found")
};
let section_posts: Vec<&Post> = posts
.iter()
.filter(|post_candidate|
post_candidate.section_id == section.id
)
.collect();
HttpResponse::Ok().json(
SectionDetailledOutput {
id: section.id.to_string(),
slug: section.slug,
name: section.name,
posts: section_posts
.iter().map(|p| {
PostSummaryOutput {
id: p.id.to_string(),
slug: p.slug.clone(),
title: p.title.clone(),
created_at: p.created_at.to_rfc3339_opts(chrono::SecondsFormat::Secs, true)
}
}).collect()
}
)
}