Compare commits
16 commits
Author | SHA1 | Date | |
---|---|---|---|
c7f1f912f0 | |||
eb55d13c01 | |||
182b17c47f | |||
a5240fea57 | |||
754e717a58 | |||
77970da8b3 | |||
b9bc57c1e0 | |||
744857d685 | |||
1209b0eb36 | |||
40cd5bdca5 | |||
9dde91f8ed | |||
47570bf9e3 | |||
8e1b036386 | |||
8b5c128bfd | |||
7b1fb7dae3 | |||
58c1b8a21a |
23 changed files with 742 additions and 606 deletions
4
Cargo.lock
generated
4
Cargo.lock
generated
|
@ -557,9 +557,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cached"
|
name = "cached"
|
||||||
version = "0.53.1"
|
version = "0.54.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b4d73155ae6b28cf5de4cfc29aeb02b8a1c6dab883cb015d15cd514e42766846"
|
checksum = "9718806c4a2fe9e8a56fd736f97b340dd10ed1be8ed733ed50449f351dc33cae"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash 0.8.11",
|
"ahash 0.8.11",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
|
|
|
@ -12,7 +12,7 @@ license = "AGPL-3.0-or-later"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-web = { version = "4.9", default-features = false, features = ["macros", "compress-brotli"] }
|
actix-web = { version = "4.9", default-features = false, features = ["macros", "compress-brotli"] }
|
||||||
actix-files = "0.6"
|
actix-files = "0.6"
|
||||||
cached = { version = "0.53", features = ["async", "ahash"] }
|
cached = { version = "0.54", features = ["async", "ahash"] }
|
||||||
ramhorns = "1.0"
|
ramhorns = "1.0"
|
||||||
toml = "0.8"
|
toml = "0.8"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
@ -35,3 +35,7 @@ cyborgtime = "2.1.1"
|
||||||
|
|
||||||
[lints.clippy]
|
[lints.clippy]
|
||||||
pedantic = "warn"
|
pedantic = "warn"
|
||||||
|
|
||||||
|
[profile.release]
|
||||||
|
strip = "symbols"
|
||||||
|
lto = "thin"
|
||||||
|
|
|
@ -13,7 +13,7 @@ use crate::routes::{
|
||||||
mod config;
|
mod config;
|
||||||
mod template;
|
mod template;
|
||||||
|
|
||||||
mod misc;
|
mod utils;
|
||||||
mod routes;
|
mod routes;
|
||||||
|
|
||||||
#[actix_web::main]
|
#[actix_web::main]
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::{config::Config, misc::utils::get_url, template::InfosPage};
|
use crate::{config::Config, utils::misc::get_url, template::InfosPage};
|
||||||
use actix_web::{get, http::header::ContentType, routes, web, HttpResponse, Responder};
|
use actix_web::{get, http::header::ContentType, routes, web, HttpResponse, Responder};
|
||||||
use cached::proc_macro::once;
|
use cached::proc_macro::once;
|
||||||
use ramhorns::Content;
|
use ramhorns::Content;
|
||||||
|
|
|
@ -1,33 +1,17 @@
|
||||||
use std::{
|
|
||||||
collections::hash_map::DefaultHasher,
|
|
||||||
hash::{Hash, Hasher},
|
|
||||||
};
|
|
||||||
|
|
||||||
use ::rss::{
|
|
||||||
extension::atom::{AtomExtension, Link},
|
|
||||||
Category, Channel, Guid, Image, Item,
|
|
||||||
};
|
|
||||||
use actix_web::{get, http::header::ContentType, routes, web, HttpResponse, Responder};
|
use actix_web::{get, http::header::ContentType, routes, web, HttpResponse, Responder};
|
||||||
use cached::proc_macro::once;
|
use cached::proc_macro::once;
|
||||||
use chrono::{DateTime, Datelike, Local, NaiveDateTime, Utc};
|
|
||||||
use chrono_tz::Europe;
|
|
||||||
use comrak::{parse_document, Arena};
|
|
||||||
use ramhorns::Content;
|
use ramhorns::Content;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config::Config,
|
config::Config,
|
||||||
misc::{
|
|
||||||
date::Date,
|
|
||||||
markdown::{get_metadata, get_options, File, FileMetadataBlog, TypeFileMetadata},
|
|
||||||
utils::{get_url, make_kw, read_file, Html},
|
|
||||||
},
|
|
||||||
template::{InfosPage, NavBar},
|
template::{InfosPage, NavBar},
|
||||||
|
utils::{
|
||||||
|
markdown::{File, TypeFileMetadata},
|
||||||
|
misc::{make_kw, read_file, Html},
|
||||||
|
routes::blog::{build_rss, get_post, get_posts, Post, BLOG_DIR, MIME_TYPE_RSS, POST_DIR},
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const MIME_TYPE_RSS: &str = "application/rss+xml";
|
|
||||||
const BLOG_DIR: &str = "blog";
|
|
||||||
const POST_DIR: &str = "posts";
|
|
||||||
|
|
||||||
#[get("/blog")]
|
#[get("/blog")]
|
||||||
pub async fn index(config: web::Data<Config>) -> impl Responder {
|
pub async fn index(config: web::Data<Config>) -> impl Responder {
|
||||||
Html(build_index(config.get_ref().to_owned()))
|
Html(build_index(config.get_ref().to_owned()))
|
||||||
|
@ -47,8 +31,7 @@ fn build_index(config: Config) -> String {
|
||||||
let mut posts = get_posts(&format!("{blog_dir}/{POST_DIR}"));
|
let mut posts = get_posts(&format!("{blog_dir}/{POST_DIR}"));
|
||||||
|
|
||||||
// Get about
|
// Get about
|
||||||
let about: Option<File> =
|
let about: Option<File> = read_file(format!("{blog_dir}/about.md"), TypeFileMetadata::Generic);
|
||||||
read_file(&format!("{blog_dir}/about.md"), &TypeFileMetadata::Generic);
|
|
||||||
|
|
||||||
// Sort from newest to oldest
|
// Sort from newest to oldest
|
||||||
posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day));
|
posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day));
|
||||||
|
@ -76,112 +59,6 @@ fn build_index(config: Config) -> String {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Content, Debug)]
|
|
||||||
struct Post {
|
|
||||||
title: String,
|
|
||||||
date: Date,
|
|
||||||
url: String,
|
|
||||||
desc: Option<String>,
|
|
||||||
content: Option<String>,
|
|
||||||
tags: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Post {
|
|
||||||
// Fetch the file content
|
|
||||||
fn fetch_content(&mut self, data_dir: &str) {
|
|
||||||
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
|
|
||||||
let ext = ".md";
|
|
||||||
|
|
||||||
if let Some(file) = read_file(
|
|
||||||
&format!("{blog_dir}/{}{ext}", self.url),
|
|
||||||
&TypeFileMetadata::Blog,
|
|
||||||
) {
|
|
||||||
self.content = Some(file.content);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Hash for Post {
|
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
|
||||||
if let Some(content) = &self.content {
|
|
||||||
content.hash(state);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_posts(location: &str) -> Vec<Post> {
|
|
||||||
let entries = std::fs::read_dir(location).map_or_else(
|
|
||||||
|_| vec![],
|
|
||||||
|res| {
|
|
||||||
res.flatten()
|
|
||||||
.filter(|f| f.path().extension().map_or(false, |ext| ext == "md"))
|
|
||||||
.collect::<Vec<std::fs::DirEntry>>()
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
entries
|
|
||||||
.iter()
|
|
||||||
.filter_map(|f| {
|
|
||||||
let fname = f.file_name();
|
|
||||||
let filename = fname.to_string_lossy();
|
|
||||||
let file_without_ext = filename.split_at(filename.len() - 3).0;
|
|
||||||
|
|
||||||
let file_metadata = std::fs::read_to_string(format!("{location}/{filename}"))
|
|
||||||
.map_or_else(
|
|
||||||
|_| FileMetadataBlog {
|
|
||||||
title: Some(file_without_ext.into()),
|
|
||||||
..FileMetadataBlog::default()
|
|
||||||
},
|
|
||||||
|text| {
|
|
||||||
let arena = Arena::new();
|
|
||||||
|
|
||||||
let options = get_options();
|
|
||||||
let root = parse_document(&arena, &text, &options);
|
|
||||||
let mut metadata =
|
|
||||||
get_metadata(root, &TypeFileMetadata::Blog).blog.unwrap();
|
|
||||||
|
|
||||||
// Always have a title
|
|
||||||
metadata.title = metadata
|
|
||||||
.title
|
|
||||||
.map_or_else(|| Some(file_without_ext.into()), Some);
|
|
||||||
|
|
||||||
metadata
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
if file_metadata.publish == Some(true) {
|
|
||||||
Some(Post {
|
|
||||||
url: file_without_ext.into(),
|
|
||||||
title: file_metadata.title.unwrap(),
|
|
||||||
date: file_metadata.date.unwrap_or({
|
|
||||||
let m = f.metadata().unwrap();
|
|
||||||
let date = std::convert::Into::<DateTime<Utc>>::into(
|
|
||||||
m.modified().unwrap_or_else(|_| m.created().unwrap()),
|
|
||||||
)
|
|
||||||
.date_naive();
|
|
||||||
|
|
||||||
Date {
|
|
||||||
day: date.day(),
|
|
||||||
month: date.month(),
|
|
||||||
year: date.year(),
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
desc: file_metadata.description,
|
|
||||||
content: None,
|
|
||||||
tags: file_metadata
|
|
||||||
.tags
|
|
||||||
.unwrap_or_default()
|
|
||||||
.iter()
|
|
||||||
.map(|t| t.name.clone())
|
|
||||||
.collect(),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<Post>>()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Content, Debug)]
|
#[derive(Content, Debug)]
|
||||||
struct BlogPostTemplate {
|
struct BlogPostTemplate {
|
||||||
navbar: NavBar,
|
navbar: NavBar,
|
||||||
|
@ -220,64 +97,6 @@ fn build_post(file: &str, config: Config) -> String {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_post(
|
|
||||||
post: &mut Option<File>,
|
|
||||||
filename: &str,
|
|
||||||
name: &str,
|
|
||||||
data_dir: &str,
|
|
||||||
) -> (InfosPage, String) {
|
|
||||||
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
|
|
||||||
let ext = ".md";
|
|
||||||
|
|
||||||
*post = read_file(
|
|
||||||
&format!("{blog_dir}/{filename}{ext}"),
|
|
||||||
&TypeFileMetadata::Blog,
|
|
||||||
);
|
|
||||||
|
|
||||||
let default = (
|
|
||||||
filename,
|
|
||||||
&format!("Blog d'{name}"),
|
|
||||||
Vec::new(),
|
|
||||||
String::new(),
|
|
||||||
);
|
|
||||||
let (title, desc, tags, toc) = match post {
|
|
||||||
Some(data) => (
|
|
||||||
match &data.metadata.info.blog.as_ref().unwrap().title {
|
|
||||||
Some(text) => text,
|
|
||||||
None => default.0,
|
|
||||||
},
|
|
||||||
match &data.metadata.info.blog.as_ref().unwrap().description {
|
|
||||||
Some(desc) => desc,
|
|
||||||
None => default.1,
|
|
||||||
},
|
|
||||||
match &data.metadata.info.blog.as_ref().unwrap().tags {
|
|
||||||
Some(tags) => tags.clone(),
|
|
||||||
None => default.2,
|
|
||||||
},
|
|
||||||
match &data.metadata.info.blog.as_ref().unwrap().toc {
|
|
||||||
// TODO: Generate TOC
|
|
||||||
Some(true) => String::new(),
|
|
||||||
_ => default.3,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
None => default,
|
|
||||||
};
|
|
||||||
|
|
||||||
(
|
|
||||||
InfosPage {
|
|
||||||
title: Some(format!("Post: {title}")),
|
|
||||||
desc: Some(desc.clone()),
|
|
||||||
kw: Some(make_kw(
|
|
||||||
&["blog", "blogging", "write", "writing"]
|
|
||||||
.into_iter()
|
|
||||||
.chain(tags.iter().map(|t| t.name.as_str()))
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
)),
|
|
||||||
},
|
|
||||||
toc,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[routes]
|
#[routes]
|
||||||
#[get("/blog/blog.rss")]
|
#[get("/blog/blog.rss")]
|
||||||
#[get("/blog/rss")]
|
#[get("/blog/rss")]
|
||||||
|
@ -286,110 +105,3 @@ pub async fn rss(config: web::Data<Config>) -> impl Responder {
|
||||||
.content_type(ContentType(MIME_TYPE_RSS.parse().unwrap()))
|
.content_type(ContentType(MIME_TYPE_RSS.parse().unwrap()))
|
||||||
.body(build_rss(config.get_ref().to_owned()))
|
.body(build_rss(config.get_ref().to_owned()))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[once(time = 10800)] // 3h
|
|
||||||
fn build_rss(config: Config) -> String {
|
|
||||||
let mut posts = get_posts(&format!(
|
|
||||||
"{}/{}/{}",
|
|
||||||
config.locations.data_dir, BLOG_DIR, POST_DIR
|
|
||||||
));
|
|
||||||
|
|
||||||
// Sort from newest to oldest
|
|
||||||
posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day));
|
|
||||||
posts.reverse();
|
|
||||||
|
|
||||||
// Only the 20 newest
|
|
||||||
let max = 20;
|
|
||||||
if posts.len() > max {
|
|
||||||
posts.drain(max..);
|
|
||||||
}
|
|
||||||
|
|
||||||
let link_to_site = get_url(config.fc.clone());
|
|
||||||
let author = if let (Some(mail), Some(name)) = (config.fc.mail, config.fc.fullname.clone()) {
|
|
||||||
Some(format!("{mail} ({name})"))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
let title = format!("Blog d'{}", config.fc.name.unwrap_or_default());
|
|
||||||
let lang = "fr";
|
|
||||||
let channel = Channel {
|
|
||||||
title: title.clone(),
|
|
||||||
link: link_to_site.clone(),
|
|
||||||
description: "Un fil qui parle d'informatique notamment".into(),
|
|
||||||
language: Some(lang.into()),
|
|
||||||
managing_editor: author.clone(),
|
|
||||||
webmaster: author,
|
|
||||||
pub_date: Some(Local::now().to_rfc2822()),
|
|
||||||
categories: ["blog", "blogging", "write", "writing"]
|
|
||||||
.iter()
|
|
||||||
.map(|&c| Category {
|
|
||||||
name: c.into(),
|
|
||||||
..Category::default()
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
generator: Some("ewp with rss crate".into()),
|
|
||||||
docs: Some("https://www.rssboard.org/rss-specification".into()),
|
|
||||||
image: Some(Image {
|
|
||||||
url: format!("{link_to_site}/icons/favicon-32x32.png"),
|
|
||||||
title: title.clone(),
|
|
||||||
link: link_to_site.clone(),
|
|
||||||
..Image::default()
|
|
||||||
}),
|
|
||||||
items: posts
|
|
||||||
.iter_mut()
|
|
||||||
.map(|p| {
|
|
||||||
// Get post data
|
|
||||||
p.fetch_content(&config.locations.data_dir);
|
|
||||||
|
|
||||||
// Build item
|
|
||||||
Item {
|
|
||||||
title: Some(p.title.clone()),
|
|
||||||
link: Some(format!("{}/blog/p/{}", link_to_site, p.url)),
|
|
||||||
description: p.content.clone(),
|
|
||||||
categories: p
|
|
||||||
.tags
|
|
||||||
.iter()
|
|
||||||
.map(|c| Category {
|
|
||||||
name: c.to_owned(),
|
|
||||||
..Category::default()
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
guid: Some(Guid {
|
|
||||||
value: format!("urn:hash:{}", {
|
|
||||||
let mut hasher = DefaultHasher::new();
|
|
||||||
p.hash(&mut hasher);
|
|
||||||
hasher.finish()
|
|
||||||
}),
|
|
||||||
permalink: false,
|
|
||||||
}),
|
|
||||||
pub_date: Some(
|
|
||||||
NaiveDateTime::parse_from_str(
|
|
||||||
&format!("{}-{}-{} 13:12:00", p.date.day, p.date.month, p.date.year),
|
|
||||||
"%d-%m-%Y %H:%M:%S",
|
|
||||||
)
|
|
||||||
.unwrap()
|
|
||||||
.and_local_timezone(Europe::Paris)
|
|
||||||
.unwrap()
|
|
||||||
.to_rfc2822(),
|
|
||||||
),
|
|
||||||
..Item::default()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
atom_ext: Some(AtomExtension {
|
|
||||||
links: vec![Link {
|
|
||||||
href: format!("{link_to_site}/blog/rss"),
|
|
||||||
rel: "self".into(),
|
|
||||||
hreflang: Some(lang.into()),
|
|
||||||
mime_type: Some(MIME_TYPE_RSS.into()),
|
|
||||||
title: Some(title),
|
|
||||||
length: None,
|
|
||||||
}],
|
|
||||||
}),
|
|
||||||
..Channel::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
std::str::from_utf8(&channel.write_to(Vec::new()).unwrap())
|
|
||||||
.unwrap()
|
|
||||||
.into()
|
|
||||||
}
|
|
||||||
|
|
|
@ -2,15 +2,15 @@ use actix_web::{get, routes, web, HttpRequest, Responder};
|
||||||
use cached::proc_macro::once;
|
use cached::proc_macro::once;
|
||||||
use glob::glob;
|
use glob::glob;
|
||||||
use ramhorns::Content;
|
use ramhorns::Content;
|
||||||
use std::fs::read_to_string;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config::Config,
|
config::Config,
|
||||||
misc::{
|
|
||||||
markdown::{File, TypeFileMetadata},
|
|
||||||
utils::{make_kw, read_file, Html},
|
|
||||||
},
|
|
||||||
template::{InfosPage, NavBar},
|
template::{InfosPage, NavBar},
|
||||||
|
utils::{
|
||||||
|
markdown::{File, TypeFileMetadata},
|
||||||
|
misc::{make_kw, read_file, Html},
|
||||||
|
routes::contact::{find_links, remove_paragraphs},
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const CONTACT_DIR: &str = "contacts";
|
const CONTACT_DIR: &str = "contacts";
|
||||||
|
@ -32,47 +32,6 @@ async fn page(config: web::Data<Config>) -> impl Responder {
|
||||||
Html(build_page(config.get_ref().to_owned()))
|
Html(build_page(config.get_ref().to_owned()))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Contact node
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
struct ContactLink {
|
|
||||||
service: String,
|
|
||||||
scope: Option<String>,
|
|
||||||
link: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[once(time = 60)]
|
|
||||||
fn find_links(directory: String) -> Vec<ContactLink> {
|
|
||||||
// TOML filename
|
|
||||||
let toml_file = "links.toml";
|
|
||||||
|
|
||||||
// Read the TOML file and parse it
|
|
||||||
let toml_str = read_to_string(format!("{directory}/{toml_file}")).unwrap_or_default();
|
|
||||||
|
|
||||||
let mut redirections = vec![];
|
|
||||||
match toml::de::from_str::<toml::Value>(&toml_str) {
|
|
||||||
Ok(data) => {
|
|
||||||
if let Some(section) = data.as_table() {
|
|
||||||
section.iter().for_each(|(key, value)| {
|
|
||||||
// Scopes are delimited with `/`
|
|
||||||
let (service, scope) = match key.split_once('/') {
|
|
||||||
Some((service, scope)) => (service.to_owned(), Some(scope.to_owned())),
|
|
||||||
None => (key.to_owned(), None),
|
|
||||||
};
|
|
||||||
|
|
||||||
redirections.push(ContactLink {
|
|
||||||
service,
|
|
||||||
scope,
|
|
||||||
link: value.as_str().unwrap().to_owned(),
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(_) => return vec![],
|
|
||||||
}
|
|
||||||
|
|
||||||
redirections
|
|
||||||
}
|
|
||||||
|
|
||||||
#[routes]
|
#[routes]
|
||||||
#[get("/{service}")]
|
#[get("/{service}")]
|
||||||
#[get("/{service}/{scope}")]
|
#[get("/{service}/{scope}")]
|
||||||
|
@ -92,7 +51,7 @@ async fn service_redirection(config: web::Data<Config>, req: HttpRequest) -> imp
|
||||||
_ => false,
|
_ => false,
|
||||||
})
|
})
|
||||||
// Returns the link
|
// Returns the link
|
||||||
.map(|data| data.link.clone())
|
.map(|data| data.url.clone())
|
||||||
.collect::<Vec<String>>();
|
.collect::<Vec<String>>();
|
||||||
|
|
||||||
// This shouldn't be more than one link here
|
// This shouldn't be more than one link here
|
||||||
|
@ -119,11 +78,6 @@ struct NetworksTemplate {
|
||||||
others: Vec<File>,
|
others: Vec<File>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove_paragraphs(list: &mut [File]) {
|
|
||||||
list.iter_mut()
|
|
||||||
.for_each(|file| file.content = file.content.replace("<p>", "").replace("</p>", ""));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[once(time = 60)]
|
#[once(time = 60)]
|
||||||
fn build_page(config: Config) -> String {
|
fn build_page(config: Config) -> String {
|
||||||
let contacts_dir = format!("{}/{}", config.locations.data_dir, CONTACT_DIR);
|
let contacts_dir = format!("{}/{}", config.locations.data_dir, CONTACT_DIR);
|
||||||
|
@ -131,26 +85,44 @@ fn build_page(config: Config) -> String {
|
||||||
|
|
||||||
// Get about
|
// Get about
|
||||||
let about = read_file(
|
let about = read_file(
|
||||||
&format!("{contacts_dir}/about.md"),
|
format!("{contacts_dir}/about.md"),
|
||||||
&TypeFileMetadata::Generic,
|
TypeFileMetadata::Generic,
|
||||||
);
|
);
|
||||||
|
|
||||||
let socials_dir = "socials";
|
let socials_dir = "socials";
|
||||||
let mut socials = glob(&format!("{contacts_dir}/{socials_dir}/*{ext}"))
|
let mut socials = glob(&format!("{contacts_dir}/{socials_dir}/*{ext}"))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Contact).unwrap())
|
.map(|e| {
|
||||||
|
read_file(
|
||||||
|
e.unwrap().to_string_lossy().to_string(),
|
||||||
|
TypeFileMetadata::Contact,
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
})
|
||||||
.collect::<Vec<File>>();
|
.collect::<Vec<File>>();
|
||||||
|
|
||||||
let forges_dir = "forges";
|
let forges_dir = "forges";
|
||||||
let mut forges = glob(&format!("{contacts_dir}/{forges_dir}/*{ext}"))
|
let mut forges = glob(&format!("{contacts_dir}/{forges_dir}/*{ext}"))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Contact).unwrap())
|
.map(|e| {
|
||||||
|
read_file(
|
||||||
|
e.unwrap().to_string_lossy().to_string(),
|
||||||
|
TypeFileMetadata::Contact,
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
})
|
||||||
.collect::<Vec<File>>();
|
.collect::<Vec<File>>();
|
||||||
|
|
||||||
let others_dir = "others";
|
let others_dir = "others";
|
||||||
let mut others = glob(&format!("{contacts_dir}/{others_dir}/*{ext}"))
|
let mut others = glob(&format!("{contacts_dir}/{others_dir}/*{ext}"))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Contact).unwrap())
|
.map(|e| {
|
||||||
|
read_file(
|
||||||
|
e.unwrap().to_string_lossy().to_string(),
|
||||||
|
TypeFileMetadata::Contact,
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
})
|
||||||
.collect::<Vec<File>>();
|
.collect::<Vec<File>>();
|
||||||
|
|
||||||
// Remove paragraphs in custom statements
|
// Remove paragraphs in custom statements
|
||||||
|
|
|
@ -1,12 +1,10 @@
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config::Config,
|
config::Config,
|
||||||
misc::{
|
|
||||||
github::{fetch_pr, ProjectState},
|
|
||||||
utils::{make_kw, Html},
|
|
||||||
},
|
|
||||||
template::{InfosPage, NavBar},
|
template::{InfosPage, NavBar},
|
||||||
|
utils::{
|
||||||
|
misc::{make_kw, Html},
|
||||||
|
routes::contrib::{fetch, Project},
|
||||||
|
},
|
||||||
};
|
};
|
||||||
use actix_web::{get, web, Responder};
|
use actix_web::{get, web, Responder};
|
||||||
use cached::proc_macro::once;
|
use cached::proc_macro::once;
|
||||||
|
@ -26,24 +24,6 @@ struct PortfolioTemplate {
|
||||||
closed: Option<Vec<Project>>,
|
closed: Option<Vec<Project>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Content, Debug)]
|
|
||||||
struct Project {
|
|
||||||
name: String,
|
|
||||||
url: String,
|
|
||||||
pulls_merged: Vec<Pull>,
|
|
||||||
pulls_open: Vec<Pull>,
|
|
||||||
pulls_closed: Vec<Pull>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Content, Debug)]
|
|
||||||
struct Pull {
|
|
||||||
url: String,
|
|
||||||
id: u32,
|
|
||||||
name_repo: String,
|
|
||||||
title: String,
|
|
||||||
state: u8,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[once(time = 600)] // 10min
|
#[once(time = 600)] // 10min
|
||||||
async fn build_page(config: Config) -> String {
|
async fn build_page(config: Config) -> String {
|
||||||
let navbar = NavBar {
|
let navbar = NavBar {
|
||||||
|
@ -52,66 +32,8 @@ async fn build_page(config: Config) -> String {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Fetch latest data from github
|
// Fetch latest data from github
|
||||||
let data = match fetch_pr().await {
|
let data = match fetch().await {
|
||||||
Ok(projects) => {
|
Ok(data) => PortfolioTemplate {
|
||||||
let mut data: Vec<Project> = Vec::new();
|
|
||||||
|
|
||||||
// Grouping PRs by projects
|
|
||||||
let mut map: HashMap<&str, Vec<Pull>> = HashMap::new();
|
|
||||||
for p in &projects {
|
|
||||||
let project = Pull {
|
|
||||||
url: p.contrib_url.clone(),
|
|
||||||
id: p.id,
|
|
||||||
name_repo: p.name.clone(),
|
|
||||||
title: p.title.clone(),
|
|
||||||
state: p.status as u8,
|
|
||||||
};
|
|
||||||
let project_name = p.name.as_str();
|
|
||||||
if map.contains_key(project_name) {
|
|
||||||
map.entry(project_name).and_modify(|v| v.push(project));
|
|
||||||
} else {
|
|
||||||
data.push(Project {
|
|
||||||
name: project_name.into(),
|
|
||||||
url: p.url.clone(),
|
|
||||||
pulls_merged: Vec::new(),
|
|
||||||
pulls_closed: Vec::new(),
|
|
||||||
pulls_open: Vec::new(),
|
|
||||||
});
|
|
||||||
map.insert(project_name, vec![project]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Distributes each PR in the right vector
|
|
||||||
for d in &mut data {
|
|
||||||
map.get(d.name.as_str()).unwrap().iter().for_each(|p| {
|
|
||||||
let state = p.state.try_into().unwrap();
|
|
||||||
match state {
|
|
||||||
ProjectState::Closed => d.pulls_closed.push(p.to_owned()),
|
|
||||||
ProjectState::Merged => d.pulls_merged.push(p.to_owned()),
|
|
||||||
ProjectState::Open => d.pulls_open.push(p.to_owned()),
|
|
||||||
}
|
|
||||||
});
|
|
||||||
let mut name: Vec<char> = d.name.replace('-', " ").chars().collect();
|
|
||||||
name[0] = name[0].to_uppercase().next().unwrap();
|
|
||||||
d.name = name.into_iter().collect();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ascending order by pulls IDs
|
|
||||||
for d in &mut data {
|
|
||||||
d.pulls_closed.reverse();
|
|
||||||
d.pulls_merged.reverse();
|
|
||||||
d.pulls_open.reverse();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ascending order by number of pulls
|
|
||||||
data.sort_by(|a, b| {
|
|
||||||
b.pulls_merged
|
|
||||||
.len()
|
|
||||||
.partial_cmp(&a.pulls_merged.len())
|
|
||||||
.unwrap()
|
|
||||||
});
|
|
||||||
|
|
||||||
PortfolioTemplate {
|
|
||||||
navbar,
|
navbar,
|
||||||
error: false,
|
error: false,
|
||||||
projects: Some(
|
projects: Some(
|
||||||
|
@ -132,8 +54,7 @@ async fn build_page(config: Config) -> String {
|
||||||
.cloned()
|
.cloned()
|
||||||
.collect(),
|
.collect(),
|
||||||
),
|
),
|
||||||
}
|
},
|
||||||
}
|
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
eprintln!("{e}");
|
eprintln!("{e}");
|
||||||
|
|
||||||
|
|
|
@ -1,18 +1,17 @@
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use actix_web::{get, web, Responder};
|
use actix_web::{get, web, Responder};
|
||||||
use cached::proc_macro::cached;
|
use cached::proc_macro::cached;
|
||||||
use ramhorns::Content;
|
use ramhorns::Content;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::Deserialize;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config::Config,
|
config::Config,
|
||||||
misc::{
|
|
||||||
markdown::{File, TypeFileMetadata},
|
|
||||||
utils::{make_kw, read_file, Html},
|
|
||||||
},
|
|
||||||
template::{InfosPage, NavBar},
|
template::{InfosPage, NavBar},
|
||||||
|
utils::{
|
||||||
|
markdown::{File, TypeFileMetadata},
|
||||||
|
misc::{make_kw, read_file, Html},
|
||||||
|
routes::cours::{excluded, get_filetree},
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
|
@ -32,13 +31,6 @@ struct CoursTemplate {
|
||||||
content: Option<File>,
|
content: Option<File>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize)]
|
|
||||||
struct FileNode {
|
|
||||||
name: String,
|
|
||||||
is_dir: bool,
|
|
||||||
children: Vec<FileNode>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cached]
|
#[cached]
|
||||||
fn compile_patterns(exclusion_list: Vec<String>) -> Vec<Regex> {
|
fn compile_patterns(exclusion_list: Vec<String>) -> Vec<Regex> {
|
||||||
exclusion_list
|
exclusion_list
|
||||||
|
@ -47,76 +39,42 @@ fn compile_patterns(exclusion_list: Vec<String>) -> Vec<Regex> {
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_filetree(dir_path: &str, exclusion_patterns: &Vec<Regex>) -> FileNode {
|
|
||||||
let children = std::fs::read_dir(dir_path)
|
|
||||||
.unwrap()
|
|
||||||
.filter_map(Result::ok)
|
|
||||||
.filter_map(|entry| {
|
|
||||||
let entry_path = entry.path();
|
|
||||||
let entry_name = entry_path.file_name()?.to_string_lossy().to_string();
|
|
||||||
|
|
||||||
// Exclude element with the exclusion_list
|
|
||||||
if exclusion_patterns.iter().any(|re| re.is_match(&entry_name)) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
if entry_path.is_file() {
|
|
||||||
Some(FileNode {
|
|
||||||
name: entry_name,
|
|
||||||
is_dir: false,
|
|
||||||
children: vec![],
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
// Exclude empty directories
|
|
||||||
let children_of_children =
|
|
||||||
get_filetree(entry_path.to_str().unwrap(), exclusion_patterns);
|
|
||||||
if children_of_children.is_dir && children_of_children.children.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(children_of_children)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
FileNode {
|
|
||||||
name: Path::new(dir_path)
|
|
||||||
.file_name()
|
|
||||||
.unwrap()
|
|
||||||
.to_string_lossy()
|
|
||||||
.to_string(),
|
|
||||||
is_dir: true,
|
|
||||||
children,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a page content
|
/// Get a page content
|
||||||
fn get_content(
|
fn get_content(
|
||||||
cours_dir: &str,
|
cours_dir: &str,
|
||||||
path: &web::Query<PathRequest>,
|
path: &web::Query<PathRequest>,
|
||||||
exclusion_list: &[String],
|
exclusion_list: &[String],
|
||||||
|
exclusion_patterns: &[Regex],
|
||||||
) -> Option<File> {
|
) -> Option<File> {
|
||||||
let filename = path.q.as_ref().map_or("index.md", |q| q);
|
let filename = path.q.as_ref().map_or("index.md", |q| q);
|
||||||
|
|
||||||
// We should support regex?
|
// Exclusion checks
|
||||||
if exclusion_list
|
if excluded(filename, exclusion_list, exclusion_patterns) {
|
||||||
.iter()
|
|
||||||
.any(|excluded_term| filename.contains(excluded_term.as_str()))
|
|
||||||
{
|
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
read_file(
|
read_file(format!("{cours_dir}/{filename}"), TypeFileMetadata::Generic)
|
||||||
&format!("{cours_dir}/{filename}"),
|
|
||||||
&TypeFileMetadata::Generic,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_page(info: &web::Query<PathRequest>, config: Config) -> String {
|
fn build_page(info: &web::Query<PathRequest>, config: Config) -> String {
|
||||||
let cours_dir = "data/cours";
|
let cours_dir = "data/cours";
|
||||||
let exclusion_list = config.fc.exclude_courses.unwrap();
|
|
||||||
let exclusion_patterns = compile_patterns(exclusion_list.clone());
|
let (ep, el): (_, Vec<String>) = config
|
||||||
let filetree = get_filetree(cours_dir, &exclusion_patterns);
|
.fc
|
||||||
|
.exclude_courses
|
||||||
|
.unwrap()
|
||||||
|
.into_iter()
|
||||||
|
.partition(|item| item.starts_with('/'));
|
||||||
|
|
||||||
|
let exclusion_list = {
|
||||||
|
let mut base = vec!["../".to_owned()];
|
||||||
|
base.extend(el);
|
||||||
|
base
|
||||||
|
};
|
||||||
|
let exclusion_patterns: Vec<Regex> =
|
||||||
|
compile_patterns(ep.iter().map(|r| r[1..r.len() - 1].to_owned()).collect());
|
||||||
|
|
||||||
|
let filetree = get_filetree(cours_dir, &exclusion_list, &exclusion_patterns);
|
||||||
|
|
||||||
config.tmpl.render(
|
config.tmpl.render(
|
||||||
"cours.html",
|
"cours.html",
|
||||||
|
@ -126,7 +84,7 @@ fn build_page(info: &web::Query<PathRequest>, config: Config) -> String {
|
||||||
..NavBar::default()
|
..NavBar::default()
|
||||||
},
|
},
|
||||||
filetree: serde_json::to_string(&filetree).unwrap(),
|
filetree: serde_json::to_string(&filetree).unwrap(),
|
||||||
content: get_content(cours_dir, info, &exclusion_list),
|
content: get_content(cours_dir, info, &exclusion_list, &exclusion_patterns),
|
||||||
},
|
},
|
||||||
InfosPage {
|
InfosPage {
|
||||||
title: Some("Cours".into()),
|
title: Some("Cours".into()),
|
||||||
|
|
|
@ -4,11 +4,11 @@ use ramhorns::Content;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config::Config,
|
config::Config,
|
||||||
misc::{
|
|
||||||
markdown::{File, TypeFileMetadata},
|
|
||||||
utils::{make_kw, read_file, Html},
|
|
||||||
},
|
|
||||||
template::{InfosPage, NavBar},
|
template::{InfosPage, NavBar},
|
||||||
|
utils::{
|
||||||
|
markdown::{File, TypeFileMetadata},
|
||||||
|
misc::{make_kw, read_file, Html},
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[get("/")]
|
#[get("/")]
|
||||||
|
@ -36,8 +36,8 @@ struct StyleAvatar {
|
||||||
#[once(time = 60)]
|
#[once(time = 60)]
|
||||||
fn build_page(config: Config) -> String {
|
fn build_page(config: Config) -> String {
|
||||||
let mut file = read_file(
|
let mut file = read_file(
|
||||||
&format!("{}/index.md", config.locations.data_dir),
|
format!("{}/index.md", config.locations.data_dir),
|
||||||
&TypeFileMetadata::Index,
|
TypeFileMetadata::Index,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Default values
|
// Default values
|
||||||
|
@ -67,7 +67,7 @@ fn build_page(config: Config) -> String {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
file = read_file("README.md", &TypeFileMetadata::Generic);
|
file = read_file("README.md".to_string(), TypeFileMetadata::Generic);
|
||||||
}
|
}
|
||||||
|
|
||||||
config.tmpl.render(
|
config.tmpl.render(
|
||||||
|
|
|
@ -4,7 +4,7 @@ use ramhorns::Content;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config::Config,
|
config::Config,
|
||||||
misc::utils::{get_url, Html},
|
utils::misc::{get_url, Html},
|
||||||
template::{InfosPage, NavBar},
|
template::{InfosPage, NavBar},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -5,11 +5,11 @@ use ramhorns::Content;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config::Config,
|
config::Config,
|
||||||
misc::{
|
|
||||||
markdown::{File, TypeFileMetadata},
|
|
||||||
utils::{make_kw, read_file, Html},
|
|
||||||
},
|
|
||||||
template::{InfosPage, NavBar},
|
template::{InfosPage, NavBar},
|
||||||
|
utils::{
|
||||||
|
markdown::{File, TypeFileMetadata},
|
||||||
|
misc::{make_kw, read_file, Html},
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[get("/portfolio")]
|
#[get("/portfolio")]
|
||||||
|
@ -36,14 +36,20 @@ fn build_page(config: Config) -> String {
|
||||||
|
|
||||||
// Get about
|
// Get about
|
||||||
let about = read_file(
|
let about = read_file(
|
||||||
&format!("{projects_dir}/about.md"),
|
format!("{projects_dir}/about.md"),
|
||||||
&TypeFileMetadata::Generic,
|
TypeFileMetadata::Generic,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Get apps
|
// Get apps
|
||||||
let apps = glob(&format!("{apps_dir}/*{ext}"))
|
let apps = glob(&format!("{apps_dir}/*{ext}"))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Portfolio).unwrap())
|
.map(|e| {
|
||||||
|
read_file(
|
||||||
|
e.unwrap().to_string_lossy().to_string(),
|
||||||
|
TypeFileMetadata::Portfolio,
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
})
|
||||||
.collect::<Vec<File>>();
|
.collect::<Vec<File>>();
|
||||||
|
|
||||||
let appdata = if apps.is_empty() {
|
let appdata = if apps.is_empty() {
|
||||||
|
@ -55,7 +61,13 @@ fn build_page(config: Config) -> String {
|
||||||
// Get archived apps
|
// Get archived apps
|
||||||
let archived_apps = glob(&format!("{apps_dir}/archive/*{ext}"))
|
let archived_apps = glob(&format!("{apps_dir}/archive/*{ext}"))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Portfolio).unwrap())
|
.map(|e| {
|
||||||
|
read_file(
|
||||||
|
e.unwrap().to_string_lossy().to_string(),
|
||||||
|
TypeFileMetadata::Portfolio,
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
})
|
||||||
.collect::<Vec<File>>();
|
.collect::<Vec<File>>();
|
||||||
|
|
||||||
let archived_appdata = if archived_apps.is_empty() {
|
let archived_appdata = if archived_apps.is_empty() {
|
||||||
|
|
|
@ -3,7 +3,7 @@ use cached::proc_macro::once;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config::Config,
|
config::Config,
|
||||||
misc::utils::{make_kw, Html},
|
utils::misc::{make_kw, Html},
|
||||||
template::InfosPage,
|
template::InfosPage,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@ use chrono::{Datelike, NaiveDate};
|
||||||
use ramhorns::Content;
|
use ramhorns::Content;
|
||||||
use serde::{Deserialize, Deserializer};
|
use serde::{Deserialize, Deserializer};
|
||||||
|
|
||||||
#[derive(Content, Default, Debug)]
|
#[derive(Content, Clone, Default, Debug)]
|
||||||
pub struct Date {
|
pub struct Date {
|
||||||
pub day: u32,
|
pub day: u32,
|
||||||
pub month: u32,
|
pub month: u32,
|
|
@ -1,7 +1,7 @@
|
||||||
use reqwest::{header::ACCEPT, Error};
|
use reqwest::{header::ACCEPT, Error};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
use crate::misc::utils::get_reqwest_client;
|
use crate::utils::misc::get_reqwest_client;
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
struct GithubResponse {
|
struct GithubResponse {
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::misc::date::Date;
|
use crate::utils::date::Date;
|
||||||
use base64::engine::general_purpose;
|
use base64::engine::general_purpose;
|
||||||
use base64::Engine;
|
use base64::Engine;
|
||||||
use comrak::nodes::{AstNode, NodeValue};
|
use comrak::nodes::{AstNode, NodeValue};
|
||||||
|
@ -12,8 +12,9 @@ use std::fs;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
/// Metadata for blog posts
|
/// Metadata for blog posts
|
||||||
#[derive(Content, Debug, Default, Deserialize)]
|
#[derive(Content, Clone, Debug, Default, Deserialize)]
|
||||||
pub struct FileMetadataBlog {
|
pub struct FileMetadataBlog {
|
||||||
|
pub hardbreaks: Option<bool>,
|
||||||
pub title: Option<String>,
|
pub title: Option<String>,
|
||||||
pub date: Option<Date>,
|
pub date: Option<Date>,
|
||||||
pub description: Option<String>,
|
pub description: Option<String>,
|
||||||
|
@ -44,7 +45,7 @@ impl<'a> Deserialize<'a> for Tag {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Metadata for contact entry
|
/// Metadata for contact entry
|
||||||
#[derive(Content, Debug, Default, Deserialize)]
|
#[derive(Content, Debug, Default, Deserialize, Clone)]
|
||||||
pub struct FileMetadataContact {
|
pub struct FileMetadataContact {
|
||||||
pub title: String,
|
pub title: String,
|
||||||
pub custom: Option<bool>,
|
pub custom: Option<bool>,
|
||||||
|
@ -55,7 +56,7 @@ pub struct FileMetadataContact {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Metadata for index page
|
/// Metadata for index page
|
||||||
#[derive(Content, Debug, Default, Deserialize)]
|
#[derive(Content, Debug, Default, Deserialize, Clone)]
|
||||||
pub struct FileMetadataIndex {
|
pub struct FileMetadataIndex {
|
||||||
pub name: Option<String>,
|
pub name: Option<String>,
|
||||||
pub pronouns: Option<String>,
|
pub pronouns: Option<String>,
|
||||||
|
@ -65,7 +66,7 @@ pub struct FileMetadataIndex {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Metadata for portfolio cards
|
/// Metadata for portfolio cards
|
||||||
#[derive(Content, Debug, Default, Deserialize)]
|
#[derive(Content, Debug, Default, Deserialize, Clone)]
|
||||||
pub struct FileMetadataPortfolio {
|
pub struct FileMetadataPortfolio {
|
||||||
pub title: Option<String>,
|
pub title: Option<String>,
|
||||||
pub link: Option<String>,
|
pub link: Option<String>,
|
||||||
|
@ -74,6 +75,7 @@ pub struct FileMetadataPortfolio {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// List of available metadata types
|
/// List of available metadata types
|
||||||
|
#[derive(Hash, PartialEq, Eq, Clone, Copy)]
|
||||||
pub enum TypeFileMetadata {
|
pub enum TypeFileMetadata {
|
||||||
Blog,
|
Blog,
|
||||||
Contact,
|
Contact,
|
||||||
|
@ -84,8 +86,9 @@ pub enum TypeFileMetadata {
|
||||||
|
|
||||||
/// Structure who holds all the metadata the file have
|
/// Structure who holds all the metadata the file have
|
||||||
/// Usually all fields are None except one
|
/// Usually all fields are None except one
|
||||||
#[derive(Content, Debug, Default, Deserialize)]
|
#[derive(Content, Debug, Default, Deserialize, Clone)]
|
||||||
pub struct FileMetadata {
|
pub struct FileMetadata {
|
||||||
|
pub hardbreaks: bool,
|
||||||
pub blog: Option<FileMetadataBlog>,
|
pub blog: Option<FileMetadataBlog>,
|
||||||
pub contact: Option<FileMetadataContact>,
|
pub contact: Option<FileMetadataContact>,
|
||||||
pub index: Option<FileMetadataIndex>,
|
pub index: Option<FileMetadataIndex>,
|
||||||
|
@ -94,7 +97,7 @@ pub struct FileMetadata {
|
||||||
|
|
||||||
#[allow(clippy::struct_excessive_bools)]
|
#[allow(clippy::struct_excessive_bools)]
|
||||||
/// Global metadata
|
/// Global metadata
|
||||||
#[derive(Content, Debug)]
|
#[derive(Content, Debug, Clone)]
|
||||||
pub struct Metadata {
|
pub struct Metadata {
|
||||||
pub info: FileMetadata,
|
pub info: FileMetadata,
|
||||||
pub math: bool,
|
pub math: bool,
|
||||||
|
@ -113,7 +116,7 @@ impl Metadata {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// File description
|
/// File description
|
||||||
#[derive(Content, Debug)]
|
#[derive(Content, Debug, Clone)]
|
||||||
pub struct File {
|
pub struct File {
|
||||||
pub metadata: Metadata,
|
pub metadata: Metadata,
|
||||||
pub content: String,
|
pub content: String,
|
||||||
|
@ -151,7 +154,7 @@ pub fn get_options<'a>() -> ComrakOptions<'a> {
|
||||||
// options.render.broken_link_callback = ...;
|
// options.render.broken_link_callback = ...;
|
||||||
|
|
||||||
// Renderer
|
// Renderer
|
||||||
options.render.hardbreaks = true;
|
options.render.hardbreaks = false;
|
||||||
options.render.github_pre_lang = false;
|
options.render.github_pre_lang = false;
|
||||||
options.render.full_info_string = true;
|
options.render.full_info_string = true;
|
||||||
options.render.width = 0; // 0 mean disabled?
|
options.render.width = 0; // 0 mean disabled?
|
||||||
|
@ -245,12 +248,8 @@ fn fix_images_and_integration(path: &str, html: &str) -> (String, Metadata) {
|
||||||
if mime == "text/markdown" {
|
if mime == "text/markdown" {
|
||||||
let mut options = get_options();
|
let mut options = get_options();
|
||||||
options.extension.footnotes = false;
|
options.extension.footnotes = false;
|
||||||
let data = read_md(
|
let data =
|
||||||
&img_path,
|
read_md(&img_path, &file, TypeFileMetadata::Generic, Some(options));
|
||||||
&file,
|
|
||||||
&TypeFileMetadata::Generic,
|
|
||||||
Some(options),
|
|
||||||
);
|
|
||||||
el.replace(&data.content, ContentType::Html);
|
el.replace(&data.content, ContentType::Html);
|
||||||
|
|
||||||
// Store the metadata for later merging
|
// Store the metadata for later merging
|
||||||
|
@ -281,7 +280,7 @@ fn fix_images_and_integration(path: &str, html: &str) -> (String, Metadata) {
|
||||||
pub fn read_md(
|
pub fn read_md(
|
||||||
path: &str,
|
path: &str,
|
||||||
raw_text: &str,
|
raw_text: &str,
|
||||||
metadata_type: &TypeFileMetadata,
|
metadata_type: TypeFileMetadata,
|
||||||
options: Option<Options>,
|
options: Option<Options>,
|
||||||
) -> File {
|
) -> File {
|
||||||
let arena = Arena::new();
|
let arena = Arena::new();
|
||||||
|
@ -292,14 +291,12 @@ pub fn read_md(
|
||||||
// Find metadata
|
// Find metadata
|
||||||
let metadata = get_metadata(root, metadata_type);
|
let metadata = get_metadata(root, metadata_type);
|
||||||
|
|
||||||
|
// Update comrak render properties
|
||||||
|
opt.render.hardbreaks = metadata.hardbreaks;
|
||||||
|
|
||||||
let mermaid_name = "mermaid";
|
let mermaid_name = "mermaid";
|
||||||
hljs_replace(root, mermaid_name);
|
hljs_replace(root, mermaid_name);
|
||||||
|
|
||||||
if let TypeFileMetadata::Blog = metadata_type {
|
|
||||||
// Change by metadata could be good for compatibility
|
|
||||||
opt.render.hardbreaks = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert to HTML
|
// Convert to HTML
|
||||||
let mut html = vec![];
|
let mut html = vec![];
|
||||||
format_html(root, &opt, &mut html).unwrap();
|
format_html(root, &opt, &mut html).unwrap();
|
||||||
|
@ -333,29 +330,37 @@ fn deserialize_metadata<T: Default + serde::de::DeserializeOwned>(text: &str) ->
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fetch metadata from AST
|
/// Fetch metadata from AST
|
||||||
pub fn get_metadata<'a>(root: &'a AstNode<'a>, mtype: &TypeFileMetadata) -> FileMetadata {
|
pub fn get_metadata<'a>(root: &'a AstNode<'a>, mtype: TypeFileMetadata) -> FileMetadata {
|
||||||
root.children()
|
root.children()
|
||||||
.find_map(|node| match &node.data.borrow().value {
|
.map(|node| {
|
||||||
// Extract metadata from frontmatter
|
let generic = FileMetadata {
|
||||||
NodeValue::FrontMatter(text) => Some(match mtype {
|
hardbreaks: true,
|
||||||
TypeFileMetadata::Blog => FileMetadata {
|
|
||||||
blog: Some(deserialize_metadata(text)),
|
|
||||||
..FileMetadata::default()
|
..FileMetadata::default()
|
||||||
},
|
};
|
||||||
|
|
||||||
|
match &node.data.borrow().value {
|
||||||
|
// Extract metadata from frontmatter
|
||||||
|
NodeValue::FrontMatter(text) => match mtype {
|
||||||
|
TypeFileMetadata::Blog => {
|
||||||
|
let metadata: FileMetadataBlog = deserialize_metadata(text);
|
||||||
|
FileMetadata {
|
||||||
|
blog: Some(metadata.clone()),
|
||||||
|
hardbreaks: metadata.hardbreaks.unwrap_or_default(),
|
||||||
|
..FileMetadata::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
TypeFileMetadata::Contact => {
|
TypeFileMetadata::Contact => {
|
||||||
let mut metadata: FileMetadataContact = deserialize_metadata(text);
|
let mut metadata: FileMetadataContact = deserialize_metadata(text);
|
||||||
|
|
||||||
// Trim descriptions
|
// Trim descriptions
|
||||||
if let Some(desc) = &mut metadata.description {
|
if let Some(desc) = &mut metadata.description {
|
||||||
desc.clone_from(&desc.trim().into());
|
desc.clone_from(&desc.trim().into());
|
||||||
}
|
}
|
||||||
|
|
||||||
FileMetadata {
|
FileMetadata {
|
||||||
contact: Some(metadata),
|
contact: Some(metadata),
|
||||||
..FileMetadata::default()
|
..FileMetadata::default()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TypeFileMetadata::Generic => FileMetadata::default(),
|
TypeFileMetadata::Generic => generic,
|
||||||
TypeFileMetadata::Index => FileMetadata {
|
TypeFileMetadata::Index => FileMetadata {
|
||||||
index: Some(deserialize_metadata(text)),
|
index: Some(deserialize_metadata(text)),
|
||||||
..FileMetadata::default()
|
..FileMetadata::default()
|
||||||
|
@ -364,9 +369,11 @@ pub fn get_metadata<'a>(root: &'a AstNode<'a>, mtype: &TypeFileMetadata) -> File
|
||||||
portfolio: Some(deserialize_metadata(text)),
|
portfolio: Some(deserialize_metadata(text)),
|
||||||
..FileMetadata::default()
|
..FileMetadata::default()
|
||||||
},
|
},
|
||||||
}),
|
},
|
||||||
_ => None,
|
_ => generic,
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
.next()
|
||||||
.map_or_else(
|
.map_or_else(
|
||||||
|| match mtype {
|
|| match mtype {
|
||||||
TypeFileMetadata::Blog => FileMetadata {
|
TypeFileMetadata::Blog => FileMetadata {
|
|
@ -53,13 +53,14 @@ impl Responder for Html {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read a file
|
/// Read a file
|
||||||
pub fn read_file(filename: &str, expected_file: &TypeFileMetadata) -> Option<File> {
|
#[cached]
|
||||||
Path::new(filename)
|
pub fn read_file(filename: String, expected_file: TypeFileMetadata) -> Option<File> {
|
||||||
|
Path::new(&filename.clone())
|
||||||
.extension()
|
.extension()
|
||||||
.and_then(|ext| match ext.to_str().unwrap() {
|
.and_then(|ext| match ext.to_str().unwrap() {
|
||||||
"pdf" => fs::read(filename).map_or(None, |bytes| Some(read_pdf(bytes))),
|
"pdf" => fs::read(filename).map_or(None, |bytes| Some(read_pdf(bytes))),
|
||||||
_ => fs::read_to_string(filename).map_or(None, |text| {
|
_ => fs::read_to_string(&filename).map_or(None, |text| {
|
||||||
Some(read_md(filename, &text, expected_file, None))
|
Some(read_md(&filename, &text, expected_file, None))
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
}
|
}
|
|
@ -1,4 +1,5 @@
|
||||||
pub mod date;
|
pub mod date;
|
||||||
pub mod github;
|
pub mod github;
|
||||||
pub mod markdown;
|
pub mod markdown;
|
||||||
pub mod utils;
|
pub mod misc;
|
||||||
|
pub mod routes;
|
298
src/utils/routes/blog.rs
Normal file
298
src/utils/routes/blog.rs
Normal file
|
@ -0,0 +1,298 @@
|
||||||
|
use std::{
|
||||||
|
collections::hash_map::DefaultHasher,
|
||||||
|
hash::{Hash, Hasher},
|
||||||
|
};
|
||||||
|
|
||||||
|
use ::rss::{
|
||||||
|
extension::atom::{AtomExtension, Link},
|
||||||
|
Category, Channel, Guid, Image, Item,
|
||||||
|
};
|
||||||
|
use cached::proc_macro::once;
|
||||||
|
use chrono::{DateTime, Datelike, Local, NaiveDateTime, Utc};
|
||||||
|
use chrono_tz::Europe;
|
||||||
|
use comrak::{parse_document, Arena};
|
||||||
|
use ramhorns::Content;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
config::Config,
|
||||||
|
template::InfosPage,
|
||||||
|
utils::{
|
||||||
|
date::Date,
|
||||||
|
markdown::{get_metadata, get_options, File, FileMetadataBlog, TypeFileMetadata},
|
||||||
|
misc::{get_url, make_kw, read_file},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const MIME_TYPE_RSS: &str = "application/rss+xml";
|
||||||
|
pub const BLOG_DIR: &str = "blog";
|
||||||
|
pub const POST_DIR: &str = "posts";
|
||||||
|
|
||||||
|
#[derive(Content, Debug)]
|
||||||
|
pub struct Post {
|
||||||
|
title: String,
|
||||||
|
pub date: Date,
|
||||||
|
pub url: String,
|
||||||
|
desc: Option<String>,
|
||||||
|
content: Option<String>,
|
||||||
|
tags: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Post {
|
||||||
|
// Fetch the file content
|
||||||
|
fn fetch_content(&mut self, data_dir: &str) {
|
||||||
|
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
|
||||||
|
let ext = ".md";
|
||||||
|
|
||||||
|
if let Some(file) = read_file(
|
||||||
|
format!("{blog_dir}/{}{ext}", self.url),
|
||||||
|
TypeFileMetadata::Blog,
|
||||||
|
) {
|
||||||
|
self.content = Some(file.content);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Hash for Post {
|
||||||
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
|
if let Some(content) = &self.content {
|
||||||
|
content.hash(state);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_posts(location: &str) -> Vec<Post> {
|
||||||
|
let entries = std::fs::read_dir(location).map_or_else(
|
||||||
|
|_| vec![],
|
||||||
|
|res| {
|
||||||
|
res.flatten()
|
||||||
|
.filter(|f| f.path().extension().map_or(false, |ext| ext == "md"))
|
||||||
|
.collect::<Vec<std::fs::DirEntry>>()
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
entries
|
||||||
|
.iter()
|
||||||
|
.filter_map(|f| {
|
||||||
|
let fname = f.file_name();
|
||||||
|
let filename = fname.to_string_lossy();
|
||||||
|
let file_without_ext = filename.split_at(filename.len() - 3).0;
|
||||||
|
|
||||||
|
let file_metadata = std::fs::read_to_string(format!("{location}/{filename}"))
|
||||||
|
.map_or_else(
|
||||||
|
|_| FileMetadataBlog {
|
||||||
|
title: Some(file_without_ext.into()),
|
||||||
|
..FileMetadataBlog::default()
|
||||||
|
},
|
||||||
|
|text| {
|
||||||
|
let arena = Arena::new();
|
||||||
|
|
||||||
|
let options = get_options();
|
||||||
|
let root = parse_document(&arena, &text, &options);
|
||||||
|
let mut metadata = get_metadata(root, TypeFileMetadata::Blog).blog.unwrap();
|
||||||
|
|
||||||
|
// Always have a title
|
||||||
|
metadata.title = metadata
|
||||||
|
.title
|
||||||
|
.map_or_else(|| Some(file_without_ext.into()), Some);
|
||||||
|
|
||||||
|
metadata
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
if file_metadata.publish == Some(true) {
|
||||||
|
Some(Post {
|
||||||
|
url: file_without_ext.into(),
|
||||||
|
title: file_metadata.title.unwrap(),
|
||||||
|
date: file_metadata.date.unwrap_or({
|
||||||
|
let m = f.metadata().unwrap();
|
||||||
|
let date = std::convert::Into::<DateTime<Utc>>::into(
|
||||||
|
m.modified().unwrap_or_else(|_| m.created().unwrap()),
|
||||||
|
)
|
||||||
|
.date_naive();
|
||||||
|
|
||||||
|
Date {
|
||||||
|
day: date.day(),
|
||||||
|
month: date.month(),
|
||||||
|
year: date.year(),
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
desc: file_metadata.description,
|
||||||
|
content: None,
|
||||||
|
tags: file_metadata
|
||||||
|
.tags
|
||||||
|
.unwrap_or_default()
|
||||||
|
.iter()
|
||||||
|
.map(|t| t.name.clone())
|
||||||
|
.collect(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<Post>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_post(
|
||||||
|
post: &mut Option<File>,
|
||||||
|
filename: &str,
|
||||||
|
name: &str,
|
||||||
|
data_dir: &str,
|
||||||
|
) -> (InfosPage, String) {
|
||||||
|
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
|
||||||
|
let ext = ".md";
|
||||||
|
|
||||||
|
*post = read_file(
|
||||||
|
format!("{blog_dir}/{filename}{ext}"),
|
||||||
|
TypeFileMetadata::Blog,
|
||||||
|
);
|
||||||
|
|
||||||
|
let default = (
|
||||||
|
filename,
|
||||||
|
&format!("Blog d'{name}"),
|
||||||
|
Vec::new(),
|
||||||
|
String::new(),
|
||||||
|
);
|
||||||
|
let (title, desc, tags, toc) = match post {
|
||||||
|
Some(data) => (
|
||||||
|
match &data.metadata.info.blog.as_ref().unwrap().title {
|
||||||
|
Some(text) => text,
|
||||||
|
None => default.0,
|
||||||
|
},
|
||||||
|
match &data.metadata.info.blog.as_ref().unwrap().description {
|
||||||
|
Some(desc) => desc,
|
||||||
|
None => default.1,
|
||||||
|
},
|
||||||
|
match &data.metadata.info.blog.as_ref().unwrap().tags {
|
||||||
|
Some(tags) => tags.clone(),
|
||||||
|
None => default.2,
|
||||||
|
},
|
||||||
|
match &data.metadata.info.blog.as_ref().unwrap().toc {
|
||||||
|
// TODO: Generate TOC
|
||||||
|
Some(true) => String::new(),
|
||||||
|
_ => default.3,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
None => default,
|
||||||
|
};
|
||||||
|
|
||||||
|
(
|
||||||
|
InfosPage {
|
||||||
|
title: Some(format!("Post: {title}")),
|
||||||
|
desc: Some(desc.clone()),
|
||||||
|
kw: Some(make_kw(
|
||||||
|
&["blog", "blogging", "write", "writing"]
|
||||||
|
.into_iter()
|
||||||
|
.chain(tags.iter().map(|t| t.name.as_str()))
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
)),
|
||||||
|
},
|
||||||
|
toc,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[once(time = 10800)] // 3h
|
||||||
|
pub fn build_rss(config: Config) -> String {
|
||||||
|
let mut posts = get_posts(&format!(
|
||||||
|
"{}/{}/{}",
|
||||||
|
config.locations.data_dir, BLOG_DIR, POST_DIR
|
||||||
|
));
|
||||||
|
|
||||||
|
// Sort from newest to oldest
|
||||||
|
posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day));
|
||||||
|
posts.reverse();
|
||||||
|
|
||||||
|
// Only the 20 newest
|
||||||
|
let max = 20;
|
||||||
|
if posts.len() > max {
|
||||||
|
posts.drain(max..);
|
||||||
|
}
|
||||||
|
|
||||||
|
let link_to_site = get_url(config.fc.clone());
|
||||||
|
let author = if let (Some(mail), Some(name)) = (config.fc.mail, config.fc.fullname.clone()) {
|
||||||
|
Some(format!("{mail} ({name})"))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
let title = format!("Blog d'{}", config.fc.name.unwrap_or_default());
|
||||||
|
let lang = "fr";
|
||||||
|
let channel = Channel {
|
||||||
|
title: title.clone(),
|
||||||
|
link: link_to_site.clone(),
|
||||||
|
description: "Un fil qui parle d'informatique notamment".into(),
|
||||||
|
language: Some(lang.into()),
|
||||||
|
managing_editor: author.clone(),
|
||||||
|
webmaster: author,
|
||||||
|
pub_date: Some(Local::now().to_rfc2822()),
|
||||||
|
categories: ["blog", "blogging", "write", "writing"]
|
||||||
|
.iter()
|
||||||
|
.map(|&c| Category {
|
||||||
|
name: c.into(),
|
||||||
|
..Category::default()
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
generator: Some("ewp with rss crate".into()),
|
||||||
|
docs: Some("https://www.rssboard.org/rss-specification".into()),
|
||||||
|
image: Some(Image {
|
||||||
|
url: format!("{link_to_site}/icons/favicon-32x32.png"),
|
||||||
|
title: title.clone(),
|
||||||
|
link: link_to_site.clone(),
|
||||||
|
..Image::default()
|
||||||
|
}),
|
||||||
|
items: posts
|
||||||
|
.iter_mut()
|
||||||
|
.map(|p| {
|
||||||
|
// Get post data
|
||||||
|
p.fetch_content(&config.locations.data_dir);
|
||||||
|
|
||||||
|
// Build item
|
||||||
|
Item {
|
||||||
|
title: Some(p.title.clone()),
|
||||||
|
link: Some(format!("{}/blog/p/{}", link_to_site, p.url)),
|
||||||
|
description: p.content.clone(),
|
||||||
|
categories: p
|
||||||
|
.tags
|
||||||
|
.iter()
|
||||||
|
.map(|c| Category {
|
||||||
|
name: c.to_owned(),
|
||||||
|
..Category::default()
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
guid: Some(Guid {
|
||||||
|
value: format!("urn:hash:{}", {
|
||||||
|
let mut hasher = DefaultHasher::new();
|
||||||
|
p.hash(&mut hasher);
|
||||||
|
hasher.finish()
|
||||||
|
}),
|
||||||
|
permalink: false,
|
||||||
|
}),
|
||||||
|
pub_date: Some(
|
||||||
|
NaiveDateTime::parse_from_str(
|
||||||
|
&format!("{}-{}-{} 13:12:00", p.date.day, p.date.month, p.date.year),
|
||||||
|
"%d-%m-%Y %H:%M:%S",
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
.and_local_timezone(Europe::Paris)
|
||||||
|
.unwrap()
|
||||||
|
.to_rfc2822(),
|
||||||
|
),
|
||||||
|
..Item::default()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
atom_ext: Some(AtomExtension {
|
||||||
|
links: vec![Link {
|
||||||
|
href: format!("{link_to_site}/blog/rss"),
|
||||||
|
rel: "self".into(),
|
||||||
|
hreflang: Some(lang.into()),
|
||||||
|
mime_type: Some(MIME_TYPE_RSS.into()),
|
||||||
|
title: Some(title),
|
||||||
|
length: None,
|
||||||
|
}],
|
||||||
|
}),
|
||||||
|
..Channel::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
std::str::from_utf8(&channel.write_to(Vec::new()).unwrap())
|
||||||
|
.unwrap()
|
||||||
|
.into()
|
||||||
|
}
|
50
src/utils/routes/contact.rs
Normal file
50
src/utils/routes/contact.rs
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
use cached::proc_macro::once;
|
||||||
|
use std::fs::read_to_string;
|
||||||
|
|
||||||
|
use crate::utils::markdown::File;
|
||||||
|
|
||||||
|
/// Contact node
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Link {
|
||||||
|
pub service: String,
|
||||||
|
pub scope: Option<String>,
|
||||||
|
pub url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[once(time = 60)]
|
||||||
|
pub fn find_links(directory: String) -> Vec<Link> {
|
||||||
|
// TOML filename
|
||||||
|
let toml_file = "links.toml";
|
||||||
|
|
||||||
|
// Read the TOML file and parse it
|
||||||
|
let toml_str = read_to_string(format!("{directory}/{toml_file}")).unwrap_or_default();
|
||||||
|
|
||||||
|
let mut redirections = vec![];
|
||||||
|
match toml::de::from_str::<toml::Value>(&toml_str) {
|
||||||
|
Ok(data) => {
|
||||||
|
if let Some(section) = data.as_table() {
|
||||||
|
section.iter().for_each(|(key, value)| {
|
||||||
|
// Scopes are delimited with `/`
|
||||||
|
let (service, scope) = match key.split_once('/') {
|
||||||
|
Some((service, scope)) => (service.to_owned(), Some(scope.to_owned())),
|
||||||
|
None => (key.to_owned(), None),
|
||||||
|
};
|
||||||
|
|
||||||
|
redirections.push(Link {
|
||||||
|
service,
|
||||||
|
scope,
|
||||||
|
url: value.as_str().unwrap().to_owned(),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(_) => return vec![],
|
||||||
|
}
|
||||||
|
|
||||||
|
redirections
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remove_paragraphs(list: &mut [File]) {
|
||||||
|
list.iter_mut()
|
||||||
|
.for_each(|file| file.content = file.content.replace("<p>", "").replace("</p>", ""));
|
||||||
|
}
|
90
src/utils/routes/contrib.rs
Normal file
90
src/utils/routes/contrib.rs
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use ramhorns::Content;
|
||||||
|
use reqwest::Error;
|
||||||
|
|
||||||
|
use crate::utils::github::{fetch_pr, ProjectState};
|
||||||
|
|
||||||
|
#[derive(Clone, Content, Debug)]
|
||||||
|
pub struct Project {
|
||||||
|
name: String,
|
||||||
|
url: String,
|
||||||
|
pub pulls_merged: Vec<Pull>,
|
||||||
|
pub pulls_open: Vec<Pull>,
|
||||||
|
pub pulls_closed: Vec<Pull>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Content, Debug)]
|
||||||
|
pub struct Pull {
|
||||||
|
url: String,
|
||||||
|
id: u32,
|
||||||
|
name_repo: String,
|
||||||
|
title: String,
|
||||||
|
state: u8,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn fetch() -> Result<Vec<Project>, Error> {
|
||||||
|
match fetch_pr().await {
|
||||||
|
Ok(projects) => {
|
||||||
|
let mut data: Vec<Project> = Vec::new();
|
||||||
|
|
||||||
|
// Grouping PRs by projects
|
||||||
|
let mut map: HashMap<&str, Vec<Pull>> = HashMap::new();
|
||||||
|
for p in &projects {
|
||||||
|
let project = Pull {
|
||||||
|
url: p.contrib_url.clone(),
|
||||||
|
id: p.id,
|
||||||
|
name_repo: p.name.clone(),
|
||||||
|
title: p.title.clone(),
|
||||||
|
state: p.status as u8,
|
||||||
|
};
|
||||||
|
let project_name = p.name.as_str();
|
||||||
|
if map.contains_key(project_name) {
|
||||||
|
map.entry(project_name).and_modify(|v| v.push(project));
|
||||||
|
} else {
|
||||||
|
data.push(Project {
|
||||||
|
name: project_name.into(),
|
||||||
|
url: p.url.clone(),
|
||||||
|
pulls_merged: Vec::new(),
|
||||||
|
pulls_closed: Vec::new(),
|
||||||
|
pulls_open: Vec::new(),
|
||||||
|
});
|
||||||
|
map.insert(project_name, vec![project]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Distributes each PR in the right vector
|
||||||
|
for d in &mut data {
|
||||||
|
map.get(d.name.as_str()).unwrap().iter().for_each(|p| {
|
||||||
|
let state = p.state.try_into().unwrap();
|
||||||
|
match state {
|
||||||
|
ProjectState::Closed => d.pulls_closed.push(p.to_owned()),
|
||||||
|
ProjectState::Merged => d.pulls_merged.push(p.to_owned()),
|
||||||
|
ProjectState::Open => d.pulls_open.push(p.to_owned()),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
let mut name: Vec<char> = d.name.replace('-', " ").chars().collect();
|
||||||
|
name[0] = name[0].to_uppercase().next().unwrap();
|
||||||
|
d.name = name.into_iter().collect();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ascending order by pulls IDs
|
||||||
|
for d in &mut data {
|
||||||
|
d.pulls_closed.reverse();
|
||||||
|
d.pulls_merged.reverse();
|
||||||
|
d.pulls_open.reverse();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ascending order by number of pulls
|
||||||
|
data.sort_by(|a, b| {
|
||||||
|
b.pulls_merged
|
||||||
|
.len()
|
||||||
|
.partial_cmp(&a.pulls_merged.len())
|
||||||
|
.unwrap()
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(data)
|
||||||
|
}
|
||||||
|
Err(e) => Err(e),
|
||||||
|
}
|
||||||
|
}
|
106
src/utils/routes/cours.rs
Normal file
106
src/utils/routes/cours.rs
Normal file
|
@ -0,0 +1,106 @@
|
||||||
|
use std::{cmp::Ordering, path::Path};
|
||||||
|
|
||||||
|
use cached::proc_macro::once;
|
||||||
|
use regex::Regex;
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, PartialEq, Eq)]
|
||||||
|
pub struct FileNode {
|
||||||
|
name: String,
|
||||||
|
is_dir: bool,
|
||||||
|
children: Vec<FileNode>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for FileNode {
|
||||||
|
fn cmp(&self, other: &Self) -> Ordering {
|
||||||
|
match (self.is_dir, other.is_dir) {
|
||||||
|
// If both are directories or both are files, compare names
|
||||||
|
(true, true) | (false, false) => self.name.cmp(&other.name),
|
||||||
|
// If self is directory and other is file, self comes first
|
||||||
|
(true, false) => Ordering::Less,
|
||||||
|
// If self is file and other is directory, other comes first
|
||||||
|
(false, true) => Ordering::Greater,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialOrd for FileNode {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||||
|
Some(self.cmp(other))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[once(time = 120)]
|
||||||
|
pub fn get_filetree(
|
||||||
|
initial_dir: &str,
|
||||||
|
exclusion_list: &[String],
|
||||||
|
exclusion_patterns: &[Regex],
|
||||||
|
) -> FileNode {
|
||||||
|
gen_filetree(initial_dir, exclusion_list, exclusion_patterns)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn gen_filetree(
|
||||||
|
dir_path: &str,
|
||||||
|
exclusion_list: &[String],
|
||||||
|
exclusion_patterns: &[Regex],
|
||||||
|
) -> FileNode {
|
||||||
|
let mut children: Vec<FileNode> = std::fs::read_dir(dir_path)
|
||||||
|
.unwrap()
|
||||||
|
.filter_map(Result::ok)
|
||||||
|
.filter_map(|entry| {
|
||||||
|
let entry_path = entry.path();
|
||||||
|
let entry_name = entry_path.file_name()?.to_string_lossy().to_string();
|
||||||
|
|
||||||
|
// Exclusion checks
|
||||||
|
if excluded(&entry_name, exclusion_list, exclusion_patterns) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
if entry_path.is_file() {
|
||||||
|
Some(FileNode {
|
||||||
|
name: entry_name,
|
||||||
|
is_dir: false,
|
||||||
|
children: vec![],
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
// Exclude empty directories
|
||||||
|
let children_of_children = gen_filetree(
|
||||||
|
entry_path.to_str().unwrap(),
|
||||||
|
exclusion_list,
|
||||||
|
exclusion_patterns,
|
||||||
|
);
|
||||||
|
if children_of_children.is_dir && children_of_children.children.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(children_of_children)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
children.sort();
|
||||||
|
|
||||||
|
FileNode {
|
||||||
|
name: Path::new(dir_path)
|
||||||
|
.file_name()
|
||||||
|
.unwrap()
|
||||||
|
.to_string_lossy()
|
||||||
|
.to_string(),
|
||||||
|
is_dir: true,
|
||||||
|
children,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn excluded(element: &str, exclusion_list: &[String], exclusion_patterns: &[Regex]) -> bool {
|
||||||
|
if exclusion_list
|
||||||
|
.iter()
|
||||||
|
.any(|excluded_term| element.contains(excluded_term))
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if exclusion_patterns.iter().any(|re| re.is_match(element)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
false
|
||||||
|
}
|
4
src/utils/routes/mod.rs
Normal file
4
src/utils/routes/mod.rs
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
pub mod blog;
|
||||||
|
pub mod contact;
|
||||||
|
pub mod contrib;
|
||||||
|
pub mod cours;
|
|
@ -16,7 +16,7 @@
|
||||||
</aside>
|
</aside>
|
||||||
<main>
|
<main>
|
||||||
{{^content}}
|
{{^content}}
|
||||||
<p>Fichier introuvable</p>
|
<p>Fichier introuvable ou invalide.</p>
|
||||||
{{/content}} {{#content}}
|
{{/content}} {{#content}}
|
||||||
<article>{{&content}}</article>
|
<article>{{&content}}</article>
|
||||||
</main>
|
</main>
|
||||||
|
|
Loading…
Reference in a new issue