Compare commits
2 commits
Author | SHA1 | Date | |
---|---|---|---|
98fd99f702 | |||
b46b20e693 |
24 changed files with 643 additions and 650 deletions
4
Cargo.lock
generated
4
Cargo.lock
generated
|
@ -557,9 +557,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cached"
|
||||
version = "0.54.0"
|
||||
version = "0.53.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9718806c4a2fe9e8a56fd736f97b340dd10ed1be8ed733ed50449f351dc33cae"
|
||||
checksum = "b4d73155ae6b28cf5de4cfc29aeb02b8a1c6dab883cb015d15cd514e42766846"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"async-trait",
|
||||
|
|
|
@ -12,7 +12,7 @@ license = "AGPL-3.0-or-later"
|
|||
[dependencies]
|
||||
actix-web = { version = "4.9", default-features = false, features = ["macros", "compress-brotli"] }
|
||||
actix-files = "0.6"
|
||||
cached = { version = "0.54", features = ["async", "ahash"] }
|
||||
cached = { version = "0.53", features = ["async", "ahash"] }
|
||||
ramhorns = "1.0"
|
||||
toml = "0.8"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
@ -35,7 +35,3 @@ cyborgtime = "2.1.1"
|
|||
|
||||
[lints.clippy]
|
||||
pedantic = "warn"
|
||||
|
||||
[profile.release]
|
||||
strip = "symbols"
|
||||
lto = "thin"
|
||||
|
|
|
@ -13,7 +13,7 @@ use crate::routes::{
|
|||
mod config;
|
||||
mod template;
|
||||
|
||||
mod utils;
|
||||
mod misc;
|
||||
mod routes;
|
||||
|
||||
#[actix_web::main]
|
||||
|
|
|
@ -2,7 +2,7 @@ use chrono::{Datelike, NaiveDate};
|
|||
use ramhorns::Content;
|
||||
use serde::{Deserialize, Deserializer};
|
||||
|
||||
#[derive(Content, Clone, Default, Debug)]
|
||||
#[derive(Content, Default, Debug)]
|
||||
pub struct Date {
|
||||
pub day: u32,
|
||||
pub month: u32,
|
|
@ -1,7 +1,7 @@
|
|||
use reqwest::{header::ACCEPT, Error};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::utils::misc::get_reqwest_client;
|
||||
use crate::misc::utils::get_reqwest_client;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct GithubResponse {
|
|
@ -1,8 +1,10 @@
|
|||
use crate::utils::date::Date;
|
||||
use crate::misc::date::Date;
|
||||
use base64::engine::general_purpose;
|
||||
use base64::Engine;
|
||||
use comrak::nodes::{AstNode, NodeValue};
|
||||
use comrak::{format_html, parse_document, Arena, ComrakOptions, ListStyleType, Options};
|
||||
use comrak::nodes::{AstNode, NodeCode, NodeMath, NodeValue};
|
||||
use comrak::{
|
||||
format_html, parse_document, Anchorizer, Arena, ComrakOptions, ListStyleType, Options,
|
||||
};
|
||||
use lol_html::html_content::ContentType;
|
||||
use lol_html::{element, rewrite_str, HtmlRewriter, RewriteStrSettings, Settings};
|
||||
use ramhorns::Content;
|
||||
|
@ -12,9 +14,8 @@ use std::fs;
|
|||
use std::path::Path;
|
||||
|
||||
/// Metadata for blog posts
|
||||
#[derive(Content, Clone, Debug, Default, Deserialize)]
|
||||
#[derive(Content, Debug, Default, Deserialize)]
|
||||
pub struct FileMetadataBlog {
|
||||
pub hardbreaks: Option<bool>,
|
||||
pub title: Option<String>,
|
||||
pub date: Option<Date>,
|
||||
pub description: Option<String>,
|
||||
|
@ -87,7 +88,6 @@ pub enum TypeFileMetadata {
|
|||
/// Usually all fields are None except one
|
||||
#[derive(Content, Debug, Default, Deserialize)]
|
||||
pub struct FileMetadata {
|
||||
pub hardbreaks: bool,
|
||||
pub blog: Option<FileMetadataBlog>,
|
||||
pub contact: Option<FileMetadataContact>,
|
||||
pub index: Option<FileMetadataIndex>,
|
||||
|
@ -119,6 +119,7 @@ impl Metadata {
|
|||
pub struct File {
|
||||
pub metadata: Metadata,
|
||||
pub content: String,
|
||||
pub toc_data: String,
|
||||
}
|
||||
|
||||
/// Options used for parser and compiler MD --> HTML
|
||||
|
@ -153,7 +154,7 @@ pub fn get_options<'a>() -> ComrakOptions<'a> {
|
|||
// options.render.broken_link_callback = ...;
|
||||
|
||||
// Renderer
|
||||
options.render.hardbreaks = false;
|
||||
options.render.hardbreaks = true;
|
||||
options.render.github_pre_lang = false;
|
||||
options.render.full_info_string = true;
|
||||
options.render.width = 0; // 0 mean disabled?
|
||||
|
@ -294,12 +295,14 @@ pub fn read_md(
|
|||
// Find metadata
|
||||
let metadata = get_metadata(root, metadata_type);
|
||||
|
||||
// Update comrak render properties
|
||||
opt.render.hardbreaks = metadata.hardbreaks;
|
||||
|
||||
let mermaid_name = "mermaid";
|
||||
hljs_replace(root, mermaid_name);
|
||||
|
||||
if let TypeFileMetadata::Blog = metadata_type {
|
||||
// Change by metadata could be good for compatibility
|
||||
opt.render.hardbreaks = true;
|
||||
}
|
||||
|
||||
// Convert to HTML
|
||||
let mut html = vec![];
|
||||
format_html(root, &opt, &mut html).unwrap();
|
||||
|
@ -312,6 +315,8 @@ pub fn read_md(
|
|||
html_content = custom_img_size(&html_content);
|
||||
(html_content, mail_obfsucated) = mail_obfuscation(&html_content);
|
||||
|
||||
let toc = toc_to_html(&generate_toc(root));
|
||||
|
||||
let mut final_metadata = Metadata {
|
||||
info: metadata,
|
||||
mermaid: check_mermaid(root, mermaid_name),
|
||||
|
@ -324,6 +329,7 @@ pub fn read_md(
|
|||
File {
|
||||
metadata: final_metadata,
|
||||
content: html_content,
|
||||
toc_data: toc,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -335,35 +341,27 @@ fn deserialize_metadata<T: Default + serde::de::DeserializeOwned>(text: &str) ->
|
|||
/// Fetch metadata from AST
|
||||
pub fn get_metadata<'a>(root: &'a AstNode<'a>, mtype: &TypeFileMetadata) -> FileMetadata {
|
||||
root.children()
|
||||
.map(|node| {
|
||||
let generic = FileMetadata {
|
||||
hardbreaks: true,
|
||||
..FileMetadata::default()
|
||||
};
|
||||
|
||||
match &node.data.borrow().value {
|
||||
.find_map(|node| match &node.data.borrow().value {
|
||||
// Extract metadata from frontmatter
|
||||
NodeValue::FrontMatter(text) => match mtype {
|
||||
TypeFileMetadata::Blog => {
|
||||
let metadata: FileMetadataBlog = deserialize_metadata(text);
|
||||
FileMetadata {
|
||||
blog: Some(metadata.clone()),
|
||||
hardbreaks: metadata.hardbreaks.unwrap_or_default(),
|
||||
NodeValue::FrontMatter(text) => Some(match mtype {
|
||||
TypeFileMetadata::Blog => FileMetadata {
|
||||
blog: Some(deserialize_metadata(text)),
|
||||
..FileMetadata::default()
|
||||
}
|
||||
}
|
||||
},
|
||||
TypeFileMetadata::Contact => {
|
||||
let mut metadata: FileMetadataContact = deserialize_metadata(text);
|
||||
|
||||
// Trim descriptions
|
||||
if let Some(desc) = &mut metadata.description {
|
||||
desc.clone_from(&desc.trim().into());
|
||||
}
|
||||
|
||||
FileMetadata {
|
||||
contact: Some(metadata),
|
||||
..FileMetadata::default()
|
||||
}
|
||||
}
|
||||
TypeFileMetadata::Generic => generic,
|
||||
TypeFileMetadata::Generic => FileMetadata::default(),
|
||||
TypeFileMetadata::Index => FileMetadata {
|
||||
index: Some(deserialize_metadata(text)),
|
||||
..FileMetadata::default()
|
||||
|
@ -372,11 +370,9 @@ pub fn get_metadata<'a>(root: &'a AstNode<'a>, mtype: &TypeFileMetadata) -> File
|
|||
portfolio: Some(deserialize_metadata(text)),
|
||||
..FileMetadata::default()
|
||||
},
|
||||
},
|
||||
_ => generic,
|
||||
}
|
||||
}),
|
||||
_ => None,
|
||||
})
|
||||
.next()
|
||||
.map_or_else(
|
||||
|| match mtype {
|
||||
TypeFileMetadata::Blog => FileMetadata {
|
||||
|
@ -518,3 +514,87 @@ fn mail_obfuscation(html: &str) -> (String, bool) {
|
|||
(new_html, modified)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct TOCEntry {
|
||||
id: String,
|
||||
title: String,
|
||||
depth: u8,
|
||||
}
|
||||
|
||||
fn generate_toc<'a>(root: &'a AstNode<'a>) -> Vec<TOCEntry> {
|
||||
/// See <https://github.com/kivikakk/comrak/blob/b67d406d3b101b93539c37a1ca75bff81ff8c149/src/html.rs#L446>
|
||||
fn collect_text<'a>(node: &'a AstNode<'a>, output: &mut String) {
|
||||
match node.data.borrow().value {
|
||||
NodeValue::Text(ref literal)
|
||||
| NodeValue::Code(NodeCode { ref literal, .. })
|
||||
| NodeValue::Math(NodeMath { ref literal, .. }) => {
|
||||
*output = literal.to_string();
|
||||
}
|
||||
_ => {
|
||||
for n in node.children() {
|
||||
if !output.is_empty() {
|
||||
break;
|
||||
}
|
||||
|
||||
collect_text(n, output);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut toc = vec![];
|
||||
|
||||
let mut anchorizer = Anchorizer::new();
|
||||
|
||||
// Collect headings first to avoid mutable borrow conflicts
|
||||
let headings: Vec<_> = root
|
||||
.children()
|
||||
.filter_map(|node| {
|
||||
if let NodeValue::Heading(ref nch) = &node.data.borrow().value {
|
||||
Some((*nch, node))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Now process each heading
|
||||
for (nch, node) in headings {
|
||||
let mut title = String::with_capacity(20);
|
||||
collect_text(node, &mut title);
|
||||
|
||||
toc.push(TOCEntry {
|
||||
id: anchorizer.anchorize(title.clone()),
|
||||
title,
|
||||
depth: nch.level,
|
||||
});
|
||||
}
|
||||
|
||||
toc
|
||||
}
|
||||
|
||||
fn toc_to_html(toc: &[TOCEntry]) -> String {
|
||||
if toc.is_empty() {
|
||||
return String::new();
|
||||
}
|
||||
|
||||
let mut html = Vec::with_capacity(20 + 20 * toc.len());
|
||||
|
||||
html.extend_from_slice(b"<ul>");
|
||||
|
||||
for entry in toc {
|
||||
// TODO: Use depth
|
||||
html.extend_from_slice(
|
||||
format!(
|
||||
"<li><a href=\"{}\">{} (dbg/depth/{})</a></li>",
|
||||
entry.id, entry.title, entry.depth
|
||||
)
|
||||
.as_bytes(),
|
||||
);
|
||||
}
|
||||
|
||||
html.extend_from_slice(b"</ul>");
|
||||
|
||||
String::from_utf8(html).unwrap()
|
||||
}
|
|
@ -1,5 +1,4 @@
|
|||
pub mod date;
|
||||
pub mod github;
|
||||
pub mod markdown;
|
||||
pub mod misc;
|
||||
pub mod routes;
|
||||
pub mod utils;
|
|
@ -81,5 +81,6 @@ fn read_pdf(data: Vec<u8>) -> File {
|
|||
style="width: 100%; height: 79vh";
|
||||
>"#
|
||||
),
|
||||
toc_data: String::new(),
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{config::Config, utils::misc::get_url, template::InfosPage};
|
||||
use crate::{config::Config, misc::utils::get_url, template::InfosPage};
|
||||
use actix_web::{get, http::header::ContentType, routes, web, HttpResponse, Responder};
|
||||
use cached::proc_macro::once;
|
||||
use ramhorns::Content;
|
||||
|
|
|
@ -1,17 +1,33 @@
|
|||
use std::{
|
||||
collections::hash_map::DefaultHasher,
|
||||
hash::{Hash, Hasher},
|
||||
};
|
||||
|
||||
use ::rss::{
|
||||
extension::atom::{AtomExtension, Link},
|
||||
Category, Channel, Guid, Image, Item,
|
||||
};
|
||||
use actix_web::{get, http::header::ContentType, routes, web, HttpResponse, Responder};
|
||||
use cached::proc_macro::once;
|
||||
use chrono::{DateTime, Datelike, Local, NaiveDateTime, Utc};
|
||||
use chrono_tz::Europe;
|
||||
use comrak::{parse_document, Arena};
|
||||
use ramhorns::Content;
|
||||
|
||||
use crate::{
|
||||
config::Config,
|
||||
template::{InfosPage, NavBar},
|
||||
utils::{
|
||||
markdown::{File, TypeFileMetadata},
|
||||
misc::{make_kw, read_file, Html},
|
||||
routes::blog::{build_rss, get_post, get_posts, Post, BLOG_DIR, MIME_TYPE_RSS, POST_DIR},
|
||||
misc::{
|
||||
date::Date,
|
||||
markdown::{get_metadata, get_options, File, FileMetadataBlog, TypeFileMetadata},
|
||||
utils::{get_url, make_kw, read_file, Html},
|
||||
},
|
||||
template::{InfosPage, NavBar},
|
||||
};
|
||||
|
||||
const MIME_TYPE_RSS: &str = "application/rss+xml";
|
||||
const BLOG_DIR: &str = "blog";
|
||||
const POST_DIR: &str = "posts";
|
||||
|
||||
#[get("/blog")]
|
||||
pub async fn index(config: web::Data<Config>) -> impl Responder {
|
||||
Html(build_index(config.get_ref().to_owned()))
|
||||
|
@ -60,11 +76,116 @@ fn build_index(config: Config) -> String {
|
|||
)
|
||||
}
|
||||
|
||||
#[derive(Content, Debug)]
|
||||
struct Post {
|
||||
title: String,
|
||||
date: Date,
|
||||
url: String,
|
||||
desc: Option<String>,
|
||||
content: Option<String>,
|
||||
tags: Vec<String>,
|
||||
}
|
||||
|
||||
impl Post {
|
||||
// Fetch the file content
|
||||
fn fetch_content(&mut self, data_dir: &str) {
|
||||
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
|
||||
let ext = ".md";
|
||||
|
||||
if let Some(file) = read_file(
|
||||
&format!("{blog_dir}/{}{ext}", self.url),
|
||||
&TypeFileMetadata::Blog,
|
||||
) {
|
||||
self.content = Some(file.content);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for Post {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
if let Some(content) = &self.content {
|
||||
content.hash(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_posts(location: &str) -> Vec<Post> {
|
||||
let entries = std::fs::read_dir(location).map_or_else(
|
||||
|_| vec![],
|
||||
|res| {
|
||||
res.flatten()
|
||||
.filter(|f| f.path().extension().map_or(false, |ext| ext == "md"))
|
||||
.collect::<Vec<std::fs::DirEntry>>()
|
||||
},
|
||||
);
|
||||
|
||||
entries
|
||||
.iter()
|
||||
.filter_map(|f| {
|
||||
let fname = f.file_name();
|
||||
let filename = fname.to_string_lossy();
|
||||
let file_without_ext = filename.split_at(filename.len() - 3).0;
|
||||
|
||||
let file_metadata = std::fs::read_to_string(format!("{location}/{filename}"))
|
||||
.map_or_else(
|
||||
|_| FileMetadataBlog {
|
||||
title: Some(file_without_ext.into()),
|
||||
..FileMetadataBlog::default()
|
||||
},
|
||||
|text| {
|
||||
let arena = Arena::new();
|
||||
|
||||
let options = get_options();
|
||||
let root = parse_document(&arena, &text, &options);
|
||||
let mut metadata =
|
||||
get_metadata(root, &TypeFileMetadata::Blog).blog.unwrap();
|
||||
|
||||
// Always have a title
|
||||
metadata.title = metadata
|
||||
.title
|
||||
.map_or_else(|| Some(file_without_ext.into()), Some);
|
||||
|
||||
metadata
|
||||
},
|
||||
);
|
||||
|
||||
if file_metadata.publish == Some(true) {
|
||||
Some(Post {
|
||||
url: file_without_ext.into(),
|
||||
title: file_metadata.title.unwrap(),
|
||||
date: file_metadata.date.unwrap_or({
|
||||
let m = f.metadata().unwrap();
|
||||
let date = std::convert::Into::<DateTime<Utc>>::into(
|
||||
m.modified().unwrap_or_else(|_| m.created().unwrap()),
|
||||
)
|
||||
.date_naive();
|
||||
|
||||
Date {
|
||||
day: date.day(),
|
||||
month: date.month(),
|
||||
year: date.year(),
|
||||
}
|
||||
}),
|
||||
desc: file_metadata.description,
|
||||
content: None,
|
||||
tags: file_metadata
|
||||
.tags
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.map(|t| t.name.clone())
|
||||
.collect(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<Post>>()
|
||||
}
|
||||
|
||||
#[derive(Content, Debug)]
|
||||
struct BlogPostTemplate {
|
||||
navbar: NavBar,
|
||||
post: Option<File>,
|
||||
toc: String,
|
||||
}
|
||||
|
||||
#[get("/blog/p/{id}")]
|
||||
|
@ -77,7 +198,7 @@ pub async fn page(path: web::Path<(String,)>, config: web::Data<Config>) -> impl
|
|||
|
||||
fn build_post(file: &str, config: Config) -> String {
|
||||
let mut post = None;
|
||||
let (infos, toc) = get_post(
|
||||
let infos = get_post(
|
||||
&mut post,
|
||||
file,
|
||||
&config.fc.name.unwrap_or_default(),
|
||||
|
@ -92,12 +213,51 @@ fn build_post(file: &str, config: Config) -> String {
|
|||
..NavBar::default()
|
||||
},
|
||||
post,
|
||||
toc,
|
||||
},
|
||||
infos,
|
||||
)
|
||||
}
|
||||
|
||||
fn get_post(post: &mut Option<File>, filename: &str, name: &str, data_dir: &str) -> InfosPage {
|
||||
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
|
||||
let ext = ".md";
|
||||
|
||||
*post = read_file(
|
||||
&format!("{blog_dir}/{filename}{ext}"),
|
||||
&TypeFileMetadata::Blog,
|
||||
);
|
||||
|
||||
let default = (filename, &format!("Blog d'{name}"), Vec::new());
|
||||
let (title, desc, tags) = match post {
|
||||
Some(data) => (
|
||||
match &data.metadata.info.blog.as_ref().unwrap().title {
|
||||
Some(text) => text,
|
||||
None => default.0,
|
||||
},
|
||||
match &data.metadata.info.blog.as_ref().unwrap().description {
|
||||
Some(desc) => desc,
|
||||
None => default.1,
|
||||
},
|
||||
match &data.metadata.info.blog.as_ref().unwrap().tags {
|
||||
Some(tags) => tags.clone(),
|
||||
None => default.2,
|
||||
},
|
||||
),
|
||||
None => default,
|
||||
};
|
||||
|
||||
InfosPage {
|
||||
title: Some(format!("Post: {title}")),
|
||||
desc: Some(desc.clone()),
|
||||
kw: Some(make_kw(
|
||||
&["blog", "blogging", "write", "writing"]
|
||||
.into_iter()
|
||||
.chain(tags.iter().map(|t| t.name.as_str()))
|
||||
.collect::<Vec<_>>(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
#[routes]
|
||||
#[get("/blog/blog.rss")]
|
||||
#[get("/blog/rss")]
|
||||
|
@ -106,3 +266,110 @@ pub async fn rss(config: web::Data<Config>) -> impl Responder {
|
|||
.content_type(ContentType(MIME_TYPE_RSS.parse().unwrap()))
|
||||
.body(build_rss(config.get_ref().to_owned()))
|
||||
}
|
||||
|
||||
#[once(time = 10800)] // 3h
|
||||
fn build_rss(config: Config) -> String {
|
||||
let mut posts = get_posts(&format!(
|
||||
"{}/{}/{}",
|
||||
config.locations.data_dir, BLOG_DIR, POST_DIR
|
||||
));
|
||||
|
||||
// Sort from newest to oldest
|
||||
posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day));
|
||||
posts.reverse();
|
||||
|
||||
// Only the 20 newest
|
||||
let max = 20;
|
||||
if posts.len() > max {
|
||||
posts.drain(max..);
|
||||
}
|
||||
|
||||
let link_to_site = get_url(config.fc.clone());
|
||||
let author = if let (Some(mail), Some(name)) = (config.fc.mail, config.fc.fullname.clone()) {
|
||||
Some(format!("{mail} ({name})"))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let title = format!("Blog d'{}", config.fc.name.unwrap_or_default());
|
||||
let lang = "fr";
|
||||
let channel = Channel {
|
||||
title: title.clone(),
|
||||
link: link_to_site.clone(),
|
||||
description: "Un fil qui parle d'informatique notamment".into(),
|
||||
language: Some(lang.into()),
|
||||
managing_editor: author.clone(),
|
||||
webmaster: author,
|
||||
pub_date: Some(Local::now().to_rfc2822()),
|
||||
categories: ["blog", "blogging", "write", "writing"]
|
||||
.iter()
|
||||
.map(|&c| Category {
|
||||
name: c.into(),
|
||||
..Category::default()
|
||||
})
|
||||
.collect(),
|
||||
generator: Some("ewp with rss crate".into()),
|
||||
docs: Some("https://www.rssboard.org/rss-specification".into()),
|
||||
image: Some(Image {
|
||||
url: format!("{link_to_site}/icons/favicon-32x32.png"),
|
||||
title: title.clone(),
|
||||
link: link_to_site.clone(),
|
||||
..Image::default()
|
||||
}),
|
||||
items: posts
|
||||
.iter_mut()
|
||||
.map(|p| {
|
||||
// Get post data
|
||||
p.fetch_content(&config.locations.data_dir);
|
||||
|
||||
// Build item
|
||||
Item {
|
||||
title: Some(p.title.clone()),
|
||||
link: Some(format!("{}/blog/p/{}", link_to_site, p.url)),
|
||||
description: p.content.clone(),
|
||||
categories: p
|
||||
.tags
|
||||
.iter()
|
||||
.map(|c| Category {
|
||||
name: c.to_owned(),
|
||||
..Category::default()
|
||||
})
|
||||
.collect(),
|
||||
guid: Some(Guid {
|
||||
value: format!("urn:hash:{}", {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
p.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}),
|
||||
permalink: false,
|
||||
}),
|
||||
pub_date: Some(
|
||||
NaiveDateTime::parse_from_str(
|
||||
&format!("{}-{}-{} 13:12:00", p.date.day, p.date.month, p.date.year),
|
||||
"%d-%m-%Y %H:%M:%S",
|
||||
)
|
||||
.unwrap()
|
||||
.and_local_timezone(Europe::Paris)
|
||||
.unwrap()
|
||||
.to_rfc2822(),
|
||||
),
|
||||
..Item::default()
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
atom_ext: Some(AtomExtension {
|
||||
links: vec![Link {
|
||||
href: format!("{link_to_site}/blog/rss"),
|
||||
rel: "self".into(),
|
||||
hreflang: Some(lang.into()),
|
||||
mime_type: Some(MIME_TYPE_RSS.into()),
|
||||
title: Some(title),
|
||||
length: None,
|
||||
}],
|
||||
}),
|
||||
..Channel::default()
|
||||
};
|
||||
|
||||
std::str::from_utf8(&channel.write_to(Vec::new()).unwrap())
|
||||
.unwrap()
|
||||
.into()
|
||||
}
|
||||
|
|
|
@ -2,15 +2,15 @@ use actix_web::{get, routes, web, HttpRequest, Responder};
|
|||
use cached::proc_macro::once;
|
||||
use glob::glob;
|
||||
use ramhorns::Content;
|
||||
use std::fs::read_to_string;
|
||||
|
||||
use crate::{
|
||||
config::Config,
|
||||
template::{InfosPage, NavBar},
|
||||
utils::{
|
||||
misc::{
|
||||
markdown::{File, TypeFileMetadata},
|
||||
misc::{make_kw, read_file, Html},
|
||||
routes::contact::{find_links, remove_paragraphs},
|
||||
utils::{make_kw, read_file, Html},
|
||||
},
|
||||
template::{InfosPage, NavBar},
|
||||
};
|
||||
|
||||
const CONTACT_DIR: &str = "contacts";
|
||||
|
@ -32,6 +32,47 @@ async fn page(config: web::Data<Config>) -> impl Responder {
|
|||
Html(build_page(config.get_ref().to_owned()))
|
||||
}
|
||||
|
||||
/// Contact node
|
||||
#[derive(Clone, Debug)]
|
||||
struct ContactLink {
|
||||
service: String,
|
||||
scope: Option<String>,
|
||||
link: String,
|
||||
}
|
||||
|
||||
#[once(time = 60)]
|
||||
fn find_links(directory: String) -> Vec<ContactLink> {
|
||||
// TOML filename
|
||||
let toml_file = "links.toml";
|
||||
|
||||
// Read the TOML file and parse it
|
||||
let toml_str = read_to_string(format!("{directory}/{toml_file}")).unwrap_or_default();
|
||||
|
||||
let mut redirections = vec![];
|
||||
match toml::de::from_str::<toml::Value>(&toml_str) {
|
||||
Ok(data) => {
|
||||
if let Some(section) = data.as_table() {
|
||||
section.iter().for_each(|(key, value)| {
|
||||
// Scopes are delimited with `/`
|
||||
let (service, scope) = match key.split_once('/') {
|
||||
Some((service, scope)) => (service.to_owned(), Some(scope.to_owned())),
|
||||
None => (key.to_owned(), None),
|
||||
};
|
||||
|
||||
redirections.push(ContactLink {
|
||||
service,
|
||||
scope,
|
||||
link: value.as_str().unwrap().to_owned(),
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
Err(_) => return vec![],
|
||||
}
|
||||
|
||||
redirections
|
||||
}
|
||||
|
||||
#[routes]
|
||||
#[get("/{service}")]
|
||||
#[get("/{service}/{scope}")]
|
||||
|
@ -51,7 +92,7 @@ async fn service_redirection(config: web::Data<Config>, req: HttpRequest) -> imp
|
|||
_ => false,
|
||||
})
|
||||
// Returns the link
|
||||
.map(|data| data.url.clone())
|
||||
.map(|data| data.link.clone())
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
// This shouldn't be more than one link here
|
||||
|
@ -78,6 +119,11 @@ struct NetworksTemplate {
|
|||
others: Vec<File>,
|
||||
}
|
||||
|
||||
fn remove_paragraphs(list: &mut [File]) {
|
||||
list.iter_mut()
|
||||
.for_each(|file| file.content = file.content.replace("<p>", "").replace("</p>", ""));
|
||||
}
|
||||
|
||||
#[once(time = 60)]
|
||||
fn build_page(config: Config) -> String {
|
||||
let contacts_dir = format!("{}/{}", config.locations.data_dir, CONTACT_DIR);
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use crate::{
|
||||
config::Config,
|
||||
template::{InfosPage, NavBar},
|
||||
utils::{
|
||||
misc::{make_kw, Html},
|
||||
routes::contrib::{fetch, Project},
|
||||
misc::{
|
||||
github::{fetch_pr, ProjectState},
|
||||
utils::{make_kw, Html},
|
||||
},
|
||||
template::{InfosPage, NavBar},
|
||||
};
|
||||
use actix_web::{get, web, Responder};
|
||||
use cached::proc_macro::once;
|
||||
|
@ -24,6 +26,24 @@ struct PortfolioTemplate {
|
|||
closed: Option<Vec<Project>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Content, Debug)]
|
||||
struct Project {
|
||||
name: String,
|
||||
url: String,
|
||||
pulls_merged: Vec<Pull>,
|
||||
pulls_open: Vec<Pull>,
|
||||
pulls_closed: Vec<Pull>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Content, Debug)]
|
||||
struct Pull {
|
||||
url: String,
|
||||
id: u32,
|
||||
name_repo: String,
|
||||
title: String,
|
||||
state: u8,
|
||||
}
|
||||
|
||||
#[once(time = 600)] // 10min
|
||||
async fn build_page(config: Config) -> String {
|
||||
let navbar = NavBar {
|
||||
|
@ -32,8 +52,66 @@ async fn build_page(config: Config) -> String {
|
|||
};
|
||||
|
||||
// Fetch latest data from github
|
||||
let data = match fetch().await {
|
||||
Ok(data) => PortfolioTemplate {
|
||||
let data = match fetch_pr().await {
|
||||
Ok(projects) => {
|
||||
let mut data: Vec<Project> = Vec::new();
|
||||
|
||||
// Grouping PRs by projects
|
||||
let mut map: HashMap<&str, Vec<Pull>> = HashMap::new();
|
||||
for p in &projects {
|
||||
let project = Pull {
|
||||
url: p.contrib_url.clone(),
|
||||
id: p.id,
|
||||
name_repo: p.name.clone(),
|
||||
title: p.title.clone(),
|
||||
state: p.status as u8,
|
||||
};
|
||||
let project_name = p.name.as_str();
|
||||
if map.contains_key(project_name) {
|
||||
map.entry(project_name).and_modify(|v| v.push(project));
|
||||
} else {
|
||||
data.push(Project {
|
||||
name: project_name.into(),
|
||||
url: p.url.clone(),
|
||||
pulls_merged: Vec::new(),
|
||||
pulls_closed: Vec::new(),
|
||||
pulls_open: Vec::new(),
|
||||
});
|
||||
map.insert(project_name, vec![project]);
|
||||
}
|
||||
}
|
||||
|
||||
// Distributes each PR in the right vector
|
||||
for d in &mut data {
|
||||
map.get(d.name.as_str()).unwrap().iter().for_each(|p| {
|
||||
let state = p.state.try_into().unwrap();
|
||||
match state {
|
||||
ProjectState::Closed => d.pulls_closed.push(p.to_owned()),
|
||||
ProjectState::Merged => d.pulls_merged.push(p.to_owned()),
|
||||
ProjectState::Open => d.pulls_open.push(p.to_owned()),
|
||||
}
|
||||
});
|
||||
let mut name: Vec<char> = d.name.replace('-', " ").chars().collect();
|
||||
name[0] = name[0].to_uppercase().next().unwrap();
|
||||
d.name = name.into_iter().collect();
|
||||
}
|
||||
|
||||
// Ascending order by pulls IDs
|
||||
for d in &mut data {
|
||||
d.pulls_closed.reverse();
|
||||
d.pulls_merged.reverse();
|
||||
d.pulls_open.reverse();
|
||||
}
|
||||
|
||||
// Ascending order by number of pulls
|
||||
data.sort_by(|a, b| {
|
||||
b.pulls_merged
|
||||
.len()
|
||||
.partial_cmp(&a.pulls_merged.len())
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
PortfolioTemplate {
|
||||
navbar,
|
||||
error: false,
|
||||
projects: Some(
|
||||
|
@ -54,7 +132,8 @@ async fn build_page(config: Config) -> String {
|
|||
.cloned()
|
||||
.collect(),
|
||||
),
|
||||
},
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("{e}");
|
||||
|
||||
|
|
|
@ -1,17 +1,18 @@
|
|||
use std::path::Path;
|
||||
|
||||
use actix_web::{get, web, Responder};
|
||||
use cached::proc_macro::cached;
|
||||
use ramhorns::Content;
|
||||
use regex::Regex;
|
||||
use serde::Deserialize;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
config::Config,
|
||||
template::{InfosPage, NavBar},
|
||||
utils::{
|
||||
misc::{
|
||||
markdown::{File, TypeFileMetadata},
|
||||
routes::cours::get_filetree,
|
||||
misc::{make_kw, read_file, Html},
|
||||
utils::{make_kw, read_file, Html},
|
||||
},
|
||||
template::{InfosPage, NavBar},
|
||||
};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
|
@ -31,6 +32,13 @@ struct CoursTemplate {
|
|||
content: Option<File>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
struct FileNode {
|
||||
name: String,
|
||||
is_dir: bool,
|
||||
children: Vec<FileNode>,
|
||||
}
|
||||
|
||||
#[cached]
|
||||
fn compile_patterns(exclusion_list: Vec<String>) -> Vec<Regex> {
|
||||
exclusion_list
|
||||
|
@ -39,25 +47,64 @@ fn compile_patterns(exclusion_list: Vec<String>) -> Vec<Regex> {
|
|||
.collect()
|
||||
}
|
||||
|
||||
fn get_filetree(dir_path: &str, exclusion_patterns: &Vec<Regex>) -> FileNode {
|
||||
let children = std::fs::read_dir(dir_path)
|
||||
.unwrap()
|
||||
.filter_map(Result::ok)
|
||||
.filter_map(|entry| {
|
||||
let entry_path = entry.path();
|
||||
let entry_name = entry_path.file_name()?.to_string_lossy().to_string();
|
||||
|
||||
// Exclude element with the exclusion_list
|
||||
if exclusion_patterns.iter().any(|re| re.is_match(&entry_name)) {
|
||||
return None;
|
||||
}
|
||||
|
||||
if entry_path.is_file() {
|
||||
Some(FileNode {
|
||||
name: entry_name,
|
||||
is_dir: false,
|
||||
children: vec![],
|
||||
})
|
||||
} else {
|
||||
// Exclude empty directories
|
||||
let children_of_children =
|
||||
get_filetree(entry_path.to_str().unwrap(), exclusion_patterns);
|
||||
if children_of_children.is_dir && children_of_children.children.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(children_of_children)
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
FileNode {
|
||||
name: Path::new(dir_path)
|
||||
.file_name()
|
||||
.unwrap()
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
is_dir: true,
|
||||
children,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a page content
|
||||
fn get_content(
|
||||
cours_dir: &str,
|
||||
path: &web::Query<PathRequest>,
|
||||
exclusion_list: &[String],
|
||||
exclusion_patterns: &[Regex],
|
||||
) -> Option<File> {
|
||||
let filename = path.q.as_ref().map_or("index.md", |q| q);
|
||||
|
||||
// Exclusion checks
|
||||
// We should support regex?
|
||||
if exclusion_list
|
||||
.iter()
|
||||
.any(|excluded_term| filename.contains(excluded_term.as_str()))
|
||||
{
|
||||
return None;
|
||||
}
|
||||
if exclusion_patterns.iter().any(|re| re.is_match(filename)) {
|
||||
return None;
|
||||
}
|
||||
|
||||
read_file(
|
||||
&format!("{cours_dir}/{filename}"),
|
||||
|
@ -67,23 +114,9 @@ fn get_content(
|
|||
|
||||
fn build_page(info: &web::Query<PathRequest>, config: Config) -> String {
|
||||
let cours_dir = "data/cours";
|
||||
|
||||
let (ep, el): (_, Vec<String>) = config
|
||||
.fc
|
||||
.exclude_courses
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.partition(|item| item.starts_with('/'));
|
||||
|
||||
let exclusion_list = {
|
||||
let mut base = vec!["../".to_owned()];
|
||||
base.extend(el);
|
||||
base
|
||||
};
|
||||
let exclusion_patterns: Vec<Regex> =
|
||||
compile_patterns(ep.iter().map(|r| r[1..r.len() - 1].to_owned()).collect());
|
||||
|
||||
let filetree = get_filetree(cours_dir, &exclusion_list, &exclusion_patterns);
|
||||
let exclusion_list = config.fc.exclude_courses.unwrap();
|
||||
let exclusion_patterns = compile_patterns(exclusion_list.clone());
|
||||
let filetree = get_filetree(cours_dir, &exclusion_patterns);
|
||||
|
||||
config.tmpl.render(
|
||||
"cours.html",
|
||||
|
@ -93,7 +126,7 @@ fn build_page(info: &web::Query<PathRequest>, config: Config) -> String {
|
|||
..NavBar::default()
|
||||
},
|
||||
filetree: serde_json::to_string(&filetree).unwrap(),
|
||||
content: get_content(cours_dir, info, &exclusion_list, &exclusion_patterns),
|
||||
content: get_content(cours_dir, info, &exclusion_list),
|
||||
},
|
||||
InfosPage {
|
||||
title: Some("Cours".into()),
|
||||
|
|
|
@ -4,11 +4,11 @@ use ramhorns::Content;
|
|||
|
||||
use crate::{
|
||||
config::Config,
|
||||
template::{InfosPage, NavBar},
|
||||
utils::{
|
||||
misc::{
|
||||
markdown::{File, TypeFileMetadata},
|
||||
misc::{make_kw, read_file, Html},
|
||||
utils::{make_kw, read_file, Html},
|
||||
},
|
||||
template::{InfosPage, NavBar},
|
||||
};
|
||||
|
||||
#[get("/")]
|
||||
|
|
|
@ -4,7 +4,7 @@ use ramhorns::Content;
|
|||
|
||||
use crate::{
|
||||
config::Config,
|
||||
utils::misc::{get_url, Html},
|
||||
misc::utils::{get_url, Html},
|
||||
template::{InfosPage, NavBar},
|
||||
};
|
||||
|
||||
|
|
|
@ -5,9 +5,9 @@ use ramhorns::Content;
|
|||
|
||||
use crate::{
|
||||
config::Config,
|
||||
utils::{
|
||||
misc::{
|
||||
markdown::{File, TypeFileMetadata},
|
||||
misc::{make_kw, read_file, Html},
|
||||
utils::{make_kw, read_file, Html},
|
||||
},
|
||||
template::{InfosPage, NavBar},
|
||||
};
|
||||
|
|
|
@ -3,7 +3,7 @@ use cached::proc_macro::once;
|
|||
|
||||
use crate::{
|
||||
config::Config,
|
||||
utils::misc::{make_kw, Html},
|
||||
misc::utils::{make_kw, Html},
|
||||
template::InfosPage,
|
||||
};
|
||||
|
||||
|
|
|
@ -1,299 +0,0 @@
|
|||
use std::{
|
||||
collections::hash_map::DefaultHasher,
|
||||
hash::{Hash, Hasher},
|
||||
};
|
||||
|
||||
use ::rss::{
|
||||
extension::atom::{AtomExtension, Link},
|
||||
Category, Channel, Guid, Image, Item,
|
||||
};
|
||||
use cached::proc_macro::once;
|
||||
use chrono::{DateTime, Datelike, Local, NaiveDateTime, Utc};
|
||||
use chrono_tz::Europe;
|
||||
use comrak::{parse_document, Arena};
|
||||
use ramhorns::Content;
|
||||
|
||||
use crate::{
|
||||
config::Config,
|
||||
template::InfosPage,
|
||||
utils::{
|
||||
date::Date,
|
||||
markdown::{get_metadata, get_options, File, FileMetadataBlog, TypeFileMetadata},
|
||||
misc::{get_url, make_kw, read_file},
|
||||
},
|
||||
};
|
||||
|
||||
pub const MIME_TYPE_RSS: &str = "application/rss+xml";
|
||||
pub const BLOG_DIR: &str = "blog";
|
||||
pub const POST_DIR: &str = "posts";
|
||||
|
||||
#[derive(Content, Debug)]
|
||||
pub struct Post {
|
||||
title: String,
|
||||
pub date: Date,
|
||||
pub url: String,
|
||||
desc: Option<String>,
|
||||
content: Option<String>,
|
||||
tags: Vec<String>,
|
||||
}
|
||||
|
||||
impl Post {
|
||||
// Fetch the file content
|
||||
fn fetch_content(&mut self, data_dir: &str) {
|
||||
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
|
||||
let ext = ".md";
|
||||
|
||||
if let Some(file) = read_file(
|
||||
&format!("{blog_dir}/{}{ext}", self.url),
|
||||
&TypeFileMetadata::Blog,
|
||||
) {
|
||||
self.content = Some(file.content);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for Post {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
if let Some(content) = &self.content {
|
||||
content.hash(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_posts(location: &str) -> Vec<Post> {
|
||||
let entries = std::fs::read_dir(location).map_or_else(
|
||||
|_| vec![],
|
||||
|res| {
|
||||
res.flatten()
|
||||
.filter(|f| f.path().extension().map_or(false, |ext| ext == "md"))
|
||||
.collect::<Vec<std::fs::DirEntry>>()
|
||||
},
|
||||
);
|
||||
|
||||
entries
|
||||
.iter()
|
||||
.filter_map(|f| {
|
||||
let fname = f.file_name();
|
||||
let filename = fname.to_string_lossy();
|
||||
let file_without_ext = filename.split_at(filename.len() - 3).0;
|
||||
|
||||
let file_metadata = std::fs::read_to_string(format!("{location}/{filename}"))
|
||||
.map_or_else(
|
||||
|_| FileMetadataBlog {
|
||||
title: Some(file_without_ext.into()),
|
||||
..FileMetadataBlog::default()
|
||||
},
|
||||
|text| {
|
||||
let arena = Arena::new();
|
||||
|
||||
let options = get_options();
|
||||
let root = parse_document(&arena, &text, &options);
|
||||
let mut metadata =
|
||||
get_metadata(root, &TypeFileMetadata::Blog).blog.unwrap();
|
||||
|
||||
// Always have a title
|
||||
metadata.title = metadata
|
||||
.title
|
||||
.map_or_else(|| Some(file_without_ext.into()), Some);
|
||||
|
||||
metadata
|
||||
},
|
||||
);
|
||||
|
||||
if file_metadata.publish == Some(true) {
|
||||
Some(Post {
|
||||
url: file_without_ext.into(),
|
||||
title: file_metadata.title.unwrap(),
|
||||
date: file_metadata.date.unwrap_or({
|
||||
let m = f.metadata().unwrap();
|
||||
let date = std::convert::Into::<DateTime<Utc>>::into(
|
||||
m.modified().unwrap_or_else(|_| m.created().unwrap()),
|
||||
)
|
||||
.date_naive();
|
||||
|
||||
Date {
|
||||
day: date.day(),
|
||||
month: date.month(),
|
||||
year: date.year(),
|
||||
}
|
||||
}),
|
||||
desc: file_metadata.description,
|
||||
content: None,
|
||||
tags: file_metadata
|
||||
.tags
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.map(|t| t.name.clone())
|
||||
.collect(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<Post>>()
|
||||
}
|
||||
|
||||
pub fn get_post(
|
||||
post: &mut Option<File>,
|
||||
filename: &str,
|
||||
name: &str,
|
||||
data_dir: &str,
|
||||
) -> (InfosPage, String) {
|
||||
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
|
||||
let ext = ".md";
|
||||
|
||||
*post = read_file(
|
||||
&format!("{blog_dir}/{filename}{ext}"),
|
||||
&TypeFileMetadata::Blog,
|
||||
);
|
||||
|
||||
let default = (
|
||||
filename,
|
||||
&format!("Blog d'{name}"),
|
||||
Vec::new(),
|
||||
String::new(),
|
||||
);
|
||||
let (title, desc, tags, toc) = match post {
|
||||
Some(data) => (
|
||||
match &data.metadata.info.blog.as_ref().unwrap().title {
|
||||
Some(text) => text,
|
||||
None => default.0,
|
||||
},
|
||||
match &data.metadata.info.blog.as_ref().unwrap().description {
|
||||
Some(desc) => desc,
|
||||
None => default.1,
|
||||
},
|
||||
match &data.metadata.info.blog.as_ref().unwrap().tags {
|
||||
Some(tags) => tags.clone(),
|
||||
None => default.2,
|
||||
},
|
||||
match &data.metadata.info.blog.as_ref().unwrap().toc {
|
||||
// TODO: Generate TOC
|
||||
Some(true) => String::new(),
|
||||
_ => default.3,
|
||||
},
|
||||
),
|
||||
None => default,
|
||||
};
|
||||
|
||||
(
|
||||
InfosPage {
|
||||
title: Some(format!("Post: {title}")),
|
||||
desc: Some(desc.clone()),
|
||||
kw: Some(make_kw(
|
||||
&["blog", "blogging", "write", "writing"]
|
||||
.into_iter()
|
||||
.chain(tags.iter().map(|t| t.name.as_str()))
|
||||
.collect::<Vec<_>>(),
|
||||
)),
|
||||
},
|
||||
toc,
|
||||
)
|
||||
}
|
||||
|
||||
#[once(time = 10800)] // 3h
|
||||
pub fn build_rss(config: Config) -> String {
|
||||
let mut posts = get_posts(&format!(
|
||||
"{}/{}/{}",
|
||||
config.locations.data_dir, BLOG_DIR, POST_DIR
|
||||
));
|
||||
|
||||
// Sort from newest to oldest
|
||||
posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day));
|
||||
posts.reverse();
|
||||
|
||||
// Only the 20 newest
|
||||
let max = 20;
|
||||
if posts.len() > max {
|
||||
posts.drain(max..);
|
||||
}
|
||||
|
||||
let link_to_site = get_url(config.fc.clone());
|
||||
let author = if let (Some(mail), Some(name)) = (config.fc.mail, config.fc.fullname.clone()) {
|
||||
Some(format!("{mail} ({name})"))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let title = format!("Blog d'{}", config.fc.name.unwrap_or_default());
|
||||
let lang = "fr";
|
||||
let channel = Channel {
|
||||
title: title.clone(),
|
||||
link: link_to_site.clone(),
|
||||
description: "Un fil qui parle d'informatique notamment".into(),
|
||||
language: Some(lang.into()),
|
||||
managing_editor: author.clone(),
|
||||
webmaster: author,
|
||||
pub_date: Some(Local::now().to_rfc2822()),
|
||||
categories: ["blog", "blogging", "write", "writing"]
|
||||
.iter()
|
||||
.map(|&c| Category {
|
||||
name: c.into(),
|
||||
..Category::default()
|
||||
})
|
||||
.collect(),
|
||||
generator: Some("ewp with rss crate".into()),
|
||||
docs: Some("https://www.rssboard.org/rss-specification".into()),
|
||||
image: Some(Image {
|
||||
url: format!("{link_to_site}/icons/favicon-32x32.png"),
|
||||
title: title.clone(),
|
||||
link: link_to_site.clone(),
|
||||
..Image::default()
|
||||
}),
|
||||
items: posts
|
||||
.iter_mut()
|
||||
.map(|p| {
|
||||
// Get post data
|
||||
p.fetch_content(&config.locations.data_dir);
|
||||
|
||||
// Build item
|
||||
Item {
|
||||
title: Some(p.title.clone()),
|
||||
link: Some(format!("{}/blog/p/{}", link_to_site, p.url)),
|
||||
description: p.content.clone(),
|
||||
categories: p
|
||||
.tags
|
||||
.iter()
|
||||
.map(|c| Category {
|
||||
name: c.to_owned(),
|
||||
..Category::default()
|
||||
})
|
||||
.collect(),
|
||||
guid: Some(Guid {
|
||||
value: format!("urn:hash:{}", {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
p.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}),
|
||||
permalink: false,
|
||||
}),
|
||||
pub_date: Some(
|
||||
NaiveDateTime::parse_from_str(
|
||||
&format!("{}-{}-{} 13:12:00", p.date.day, p.date.month, p.date.year),
|
||||
"%d-%m-%Y %H:%M:%S",
|
||||
)
|
||||
.unwrap()
|
||||
.and_local_timezone(Europe::Paris)
|
||||
.unwrap()
|
||||
.to_rfc2822(),
|
||||
),
|
||||
..Item::default()
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
atom_ext: Some(AtomExtension {
|
||||
links: vec![Link {
|
||||
href: format!("{link_to_site}/blog/rss"),
|
||||
rel: "self".into(),
|
||||
hreflang: Some(lang.into()),
|
||||
mime_type: Some(MIME_TYPE_RSS.into()),
|
||||
title: Some(title),
|
||||
length: None,
|
||||
}],
|
||||
}),
|
||||
..Channel::default()
|
||||
};
|
||||
|
||||
std::str::from_utf8(&channel.write_to(Vec::new()).unwrap())
|
||||
.unwrap()
|
||||
.into()
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
use cached::proc_macro::once;
|
||||
use std::fs::read_to_string;
|
||||
|
||||
use crate::utils::markdown::File;
|
||||
|
||||
/// Contact node
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Link {
|
||||
pub service: String,
|
||||
pub scope: Option<String>,
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
#[once(time = 60)]
|
||||
pub fn find_links(directory: String) -> Vec<Link> {
|
||||
// TOML filename
|
||||
let toml_file = "links.toml";
|
||||
|
||||
// Read the TOML file and parse it
|
||||
let toml_str = read_to_string(format!("{directory}/{toml_file}")).unwrap_or_default();
|
||||
|
||||
let mut redirections = vec![];
|
||||
match toml::de::from_str::<toml::Value>(&toml_str) {
|
||||
Ok(data) => {
|
||||
if let Some(section) = data.as_table() {
|
||||
section.iter().for_each(|(key, value)| {
|
||||
// Scopes are delimited with `/`
|
||||
let (service, scope) = match key.split_once('/') {
|
||||
Some((service, scope)) => (service.to_owned(), Some(scope.to_owned())),
|
||||
None => (key.to_owned(), None),
|
||||
};
|
||||
|
||||
redirections.push(Link {
|
||||
service,
|
||||
scope,
|
||||
url: value.as_str().unwrap().to_owned(),
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
Err(_) => return vec![],
|
||||
}
|
||||
|
||||
redirections
|
||||
}
|
||||
|
||||
pub fn remove_paragraphs(list: &mut [File]) {
|
||||
list.iter_mut()
|
||||
.for_each(|file| file.content = file.content.replace("<p>", "").replace("</p>", ""));
|
||||
}
|
|
@ -1,90 +0,0 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use ramhorns::Content;
|
||||
use reqwest::Error;
|
||||
|
||||
use crate::utils::github::{fetch_pr, ProjectState};
|
||||
|
||||
#[derive(Clone, Content, Debug)]
|
||||
pub struct Project {
|
||||
name: String,
|
||||
url: String,
|
||||
pub pulls_merged: Vec<Pull>,
|
||||
pub pulls_open: Vec<Pull>,
|
||||
pub pulls_closed: Vec<Pull>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Content, Debug)]
|
||||
pub struct Pull {
|
||||
url: String,
|
||||
id: u32,
|
||||
name_repo: String,
|
||||
title: String,
|
||||
state: u8,
|
||||
}
|
||||
|
||||
pub async fn fetch() -> Result<Vec<Project>, Error> {
|
||||
match fetch_pr().await {
|
||||
Ok(projects) => {
|
||||
let mut data: Vec<Project> = Vec::new();
|
||||
|
||||
// Grouping PRs by projects
|
||||
let mut map: HashMap<&str, Vec<Pull>> = HashMap::new();
|
||||
for p in &projects {
|
||||
let project = Pull {
|
||||
url: p.contrib_url.clone(),
|
||||
id: p.id,
|
||||
name_repo: p.name.clone(),
|
||||
title: p.title.clone(),
|
||||
state: p.status as u8,
|
||||
};
|
||||
let project_name = p.name.as_str();
|
||||
if map.contains_key(project_name) {
|
||||
map.entry(project_name).and_modify(|v| v.push(project));
|
||||
} else {
|
||||
data.push(Project {
|
||||
name: project_name.into(),
|
||||
url: p.url.clone(),
|
||||
pulls_merged: Vec::new(),
|
||||
pulls_closed: Vec::new(),
|
||||
pulls_open: Vec::new(),
|
||||
});
|
||||
map.insert(project_name, vec![project]);
|
||||
}
|
||||
}
|
||||
|
||||
// Distributes each PR in the right vector
|
||||
for d in &mut data {
|
||||
map.get(d.name.as_str()).unwrap().iter().for_each(|p| {
|
||||
let state = p.state.try_into().unwrap();
|
||||
match state {
|
||||
ProjectState::Closed => d.pulls_closed.push(p.to_owned()),
|
||||
ProjectState::Merged => d.pulls_merged.push(p.to_owned()),
|
||||
ProjectState::Open => d.pulls_open.push(p.to_owned()),
|
||||
}
|
||||
});
|
||||
let mut name: Vec<char> = d.name.replace('-', " ").chars().collect();
|
||||
name[0] = name[0].to_uppercase().next().unwrap();
|
||||
d.name = name.into_iter().collect();
|
||||
}
|
||||
|
||||
// Ascending order by pulls IDs
|
||||
for d in &mut data {
|
||||
d.pulls_closed.reverse();
|
||||
d.pulls_merged.reverse();
|
||||
d.pulls_open.reverse();
|
||||
}
|
||||
|
||||
// Ascending order by number of pulls
|
||||
data.sort_by(|a, b| {
|
||||
b.pulls_merged
|
||||
.len()
|
||||
.partial_cmp(&a.pulls_merged.len())
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
|
@ -1,67 +0,0 @@
|
|||
use std::path::Path;
|
||||
|
||||
use regex::Regex;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct FileNode {
|
||||
name: String,
|
||||
is_dir: bool,
|
||||
children: Vec<FileNode>,
|
||||
}
|
||||
|
||||
pub fn get_filetree(
|
||||
dir_path: &str,
|
||||
exclusion_list: &[String],
|
||||
exclusion_patterns: &[Regex],
|
||||
) -> FileNode {
|
||||
let children = std::fs::read_dir(dir_path)
|
||||
.unwrap()
|
||||
.filter_map(Result::ok)
|
||||
.filter_map(|entry| {
|
||||
let entry_path = entry.path();
|
||||
let entry_name = entry_path.file_name()?.to_string_lossy().to_string();
|
||||
|
||||
// Exclusion checks
|
||||
if exclusion_list
|
||||
.iter()
|
||||
.any(|excluded_term| entry_name.contains(excluded_term.as_str()))
|
||||
{
|
||||
return None;
|
||||
}
|
||||
if exclusion_patterns.iter().any(|re| re.is_match(&entry_name)) {
|
||||
return None;
|
||||
}
|
||||
|
||||
if entry_path.is_file() {
|
||||
Some(FileNode {
|
||||
name: entry_name,
|
||||
is_dir: false,
|
||||
children: vec![],
|
||||
})
|
||||
} else {
|
||||
// Exclude empty directories
|
||||
let children_of_children = get_filetree(
|
||||
entry_path.to_str().unwrap(),
|
||||
exclusion_list,
|
||||
exclusion_patterns,
|
||||
);
|
||||
if children_of_children.is_dir && children_of_children.children.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(children_of_children)
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
FileNode {
|
||||
name: Path::new(dir_path)
|
||||
.file_name()
|
||||
.unwrap()
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
is_dir: true,
|
||||
children,
|
||||
}
|
||||
}
|
|
@ -1,4 +0,0 @@
|
|||
pub mod blog;
|
||||
pub mod contact;
|
||||
pub mod contrib;
|
||||
pub mod cours;
|
|
@ -28,7 +28,9 @@
|
|||
<main>
|
||||
{{^post}}
|
||||
<p>This post doesn't exist... sorry</p>
|
||||
{{/post}} {{#post}} {{&toc}}
|
||||
{{/post}} {{#post}} {{#metadata}} {{#info}} {{#blog}} {{#toc}}
|
||||
<aside>{{&toc_data}}</aside>
|
||||
{{/toc}} {{/blog}} {{/info}} {{/metadata}}
|
||||
<article>{{&content}}</article>
|
||||
{{/post}}
|
||||
</main>
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
</aside>
|
||||
<main>
|
||||
{{^content}}
|
||||
<p>Fichier introuvable ou invalide.</p>
|
||||
<p>Fichier introuvable</p>
|
||||
{{/content}} {{#content}}
|
||||
<article>{{&content}}</article>
|
||||
</main>
|
||||
|
|
Loading…
Reference in a new issue