Compare commits

..

2 commits
main ... toc

Author SHA1 Message Date
98fd99f702 Respect toc attribute in metadata
All checks were successful
PR Check / lint-and-format (pull_request) Successful in 3m19s
2024-11-06 18:38:56 +01:00
b46b20e693 wip: quick and dumb implementation of toc 2024-11-06 18:38:56 +01:00
24 changed files with 684 additions and 747 deletions

4
Cargo.lock generated
View file

@ -557,9 +557,9 @@ dependencies = [
[[package]] [[package]]
name = "cached" name = "cached"
version = "0.54.0" version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9718806c4a2fe9e8a56fd736f97b340dd10ed1be8ed733ed50449f351dc33cae" checksum = "b4d73155ae6b28cf5de4cfc29aeb02b8a1c6dab883cb015d15cd514e42766846"
dependencies = [ dependencies = [
"ahash 0.8.11", "ahash 0.8.11",
"async-trait", "async-trait",

View file

@ -12,7 +12,7 @@ license = "AGPL-3.0-or-later"
[dependencies] [dependencies]
actix-web = { version = "4.9", default-features = false, features = ["macros", "compress-brotli"] } actix-web = { version = "4.9", default-features = false, features = ["macros", "compress-brotli"] }
actix-files = "0.6" actix-files = "0.6"
cached = { version = "0.54", features = ["async", "ahash"] } cached = { version = "0.53", features = ["async", "ahash"] }
ramhorns = "1.0" ramhorns = "1.0"
toml = "0.8" toml = "0.8"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
@ -35,7 +35,3 @@ cyborgtime = "2.1.1"
[lints.clippy] [lints.clippy]
pedantic = "warn" pedantic = "warn"
[profile.release]
strip = "symbols"
lto = "thin"

View file

@ -13,7 +13,7 @@ use crate::routes::{
mod config; mod config;
mod template; mod template;
mod utils; mod misc;
mod routes; mod routes;
#[actix_web::main] #[actix_web::main]

View file

@ -2,7 +2,7 @@ use chrono::{Datelike, NaiveDate};
use ramhorns::Content; use ramhorns::Content;
use serde::{Deserialize, Deserializer}; use serde::{Deserialize, Deserializer};
#[derive(Content, Clone, Default, Debug)] #[derive(Content, Default, Debug)]
pub struct Date { pub struct Date {
pub day: u32, pub day: u32,
pub month: u32, pub month: u32,

View file

@ -1,7 +1,7 @@
use reqwest::{header::ACCEPT, Error}; use reqwest::{header::ACCEPT, Error};
use serde::Deserialize; use serde::Deserialize;
use crate::utils::misc::get_reqwest_client; use crate::misc::utils::get_reqwest_client;
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
struct GithubResponse { struct GithubResponse {

View file

@ -1,8 +1,10 @@
use crate::utils::date::Date; use crate::misc::date::Date;
use base64::engine::general_purpose; use base64::engine::general_purpose;
use base64::Engine; use base64::Engine;
use comrak::nodes::{AstNode, NodeValue}; use comrak::nodes::{AstNode, NodeCode, NodeMath, NodeValue};
use comrak::{format_html, parse_document, Arena, ComrakOptions, ListStyleType, Options}; use comrak::{
format_html, parse_document, Anchorizer, Arena, ComrakOptions, ListStyleType, Options,
};
use lol_html::html_content::ContentType; use lol_html::html_content::ContentType;
use lol_html::{element, rewrite_str, HtmlRewriter, RewriteStrSettings, Settings}; use lol_html::{element, rewrite_str, HtmlRewriter, RewriteStrSettings, Settings};
use ramhorns::Content; use ramhorns::Content;
@ -12,9 +14,8 @@ use std::fs;
use std::path::Path; use std::path::Path;
/// Metadata for blog posts /// Metadata for blog posts
#[derive(Content, Clone, Debug, Default, Deserialize)] #[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadataBlog { pub struct FileMetadataBlog {
pub hardbreaks: Option<bool>,
pub title: Option<String>, pub title: Option<String>,
pub date: Option<Date>, pub date: Option<Date>,
pub description: Option<String>, pub description: Option<String>,
@ -45,7 +46,7 @@ impl<'a> Deserialize<'a> for Tag {
} }
/// Metadata for contact entry /// Metadata for contact entry
#[derive(Content, Debug, Default, Deserialize, Clone)] #[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadataContact { pub struct FileMetadataContact {
pub title: String, pub title: String,
pub custom: Option<bool>, pub custom: Option<bool>,
@ -56,7 +57,7 @@ pub struct FileMetadataContact {
} }
/// Metadata for index page /// Metadata for index page
#[derive(Content, Debug, Default, Deserialize, Clone)] #[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadataIndex { pub struct FileMetadataIndex {
pub name: Option<String>, pub name: Option<String>,
pub pronouns: Option<String>, pub pronouns: Option<String>,
@ -66,7 +67,7 @@ pub struct FileMetadataIndex {
} }
/// Metadata for portfolio cards /// Metadata for portfolio cards
#[derive(Content, Debug, Default, Deserialize, Clone)] #[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadataPortfolio { pub struct FileMetadataPortfolio {
pub title: Option<String>, pub title: Option<String>,
pub link: Option<String>, pub link: Option<String>,
@ -75,7 +76,6 @@ pub struct FileMetadataPortfolio {
} }
/// List of available metadata types /// List of available metadata types
#[derive(Hash, PartialEq, Eq, Clone, Copy)]
pub enum TypeFileMetadata { pub enum TypeFileMetadata {
Blog, Blog,
Contact, Contact,
@ -86,9 +86,8 @@ pub enum TypeFileMetadata {
/// Structure who holds all the metadata the file have /// Structure who holds all the metadata the file have
/// Usually all fields are None except one /// Usually all fields are None except one
#[derive(Content, Debug, Default, Deserialize, Clone)] #[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadata { pub struct FileMetadata {
pub hardbreaks: bool,
pub blog: Option<FileMetadataBlog>, pub blog: Option<FileMetadataBlog>,
pub contact: Option<FileMetadataContact>, pub contact: Option<FileMetadataContact>,
pub index: Option<FileMetadataIndex>, pub index: Option<FileMetadataIndex>,
@ -97,7 +96,7 @@ pub struct FileMetadata {
#[allow(clippy::struct_excessive_bools)] #[allow(clippy::struct_excessive_bools)]
/// Global metadata /// Global metadata
#[derive(Content, Debug, Clone)] #[derive(Content, Debug)]
pub struct Metadata { pub struct Metadata {
pub info: FileMetadata, pub info: FileMetadata,
pub math: bool, pub math: bool,
@ -116,10 +115,11 @@ impl Metadata {
} }
/// File description /// File description
#[derive(Content, Debug, Clone)] #[derive(Content, Debug)]
pub struct File { pub struct File {
pub metadata: Metadata, pub metadata: Metadata,
pub content: String, pub content: String,
pub toc_data: String,
} }
/// Options used for parser and compiler MD --> HTML /// Options used for parser and compiler MD --> HTML
@ -154,7 +154,7 @@ pub fn get_options<'a>() -> ComrakOptions<'a> {
// options.render.broken_link_callback = ...; // options.render.broken_link_callback = ...;
// Renderer // Renderer
options.render.hardbreaks = false; options.render.hardbreaks = true;
options.render.github_pre_lang = false; options.render.github_pre_lang = false;
options.render.full_info_string = true; options.render.full_info_string = true;
options.render.width = 0; // 0 mean disabled? options.render.width = 0; // 0 mean disabled?
@ -248,8 +248,12 @@ fn fix_images_and_integration(path: &str, html: &str) -> (String, Metadata) {
if mime == "text/markdown" { if mime == "text/markdown" {
let mut options = get_options(); let mut options = get_options();
options.extension.footnotes = false; options.extension.footnotes = false;
let data = let data = read_md(
read_md(&img_path, &file, TypeFileMetadata::Generic, Some(options)); &img_path,
&file,
&TypeFileMetadata::Generic,
Some(options),
);
el.replace(&data.content, ContentType::Html); el.replace(&data.content, ContentType::Html);
// Store the metadata for later merging // Store the metadata for later merging
@ -280,7 +284,7 @@ fn fix_images_and_integration(path: &str, html: &str) -> (String, Metadata) {
pub fn read_md( pub fn read_md(
path: &str, path: &str,
raw_text: &str, raw_text: &str,
metadata_type: TypeFileMetadata, metadata_type: &TypeFileMetadata,
options: Option<Options>, options: Option<Options>,
) -> File { ) -> File {
let arena = Arena::new(); let arena = Arena::new();
@ -291,12 +295,14 @@ pub fn read_md(
// Find metadata // Find metadata
let metadata = get_metadata(root, metadata_type); let metadata = get_metadata(root, metadata_type);
// Update comrak render properties
opt.render.hardbreaks = metadata.hardbreaks;
let mermaid_name = "mermaid"; let mermaid_name = "mermaid";
hljs_replace(root, mermaid_name); hljs_replace(root, mermaid_name);
if let TypeFileMetadata::Blog = metadata_type {
// Change by metadata could be good for compatibility
opt.render.hardbreaks = true;
}
// Convert to HTML // Convert to HTML
let mut html = vec![]; let mut html = vec![];
format_html(root, &opt, &mut html).unwrap(); format_html(root, &opt, &mut html).unwrap();
@ -309,6 +315,8 @@ pub fn read_md(
html_content = custom_img_size(&html_content); html_content = custom_img_size(&html_content);
(html_content, mail_obfsucated) = mail_obfuscation(&html_content); (html_content, mail_obfsucated) = mail_obfuscation(&html_content);
let toc = toc_to_html(&generate_toc(root));
let mut final_metadata = Metadata { let mut final_metadata = Metadata {
info: metadata, info: metadata,
mermaid: check_mermaid(root, mermaid_name), mermaid: check_mermaid(root, mermaid_name),
@ -321,6 +329,7 @@ pub fn read_md(
File { File {
metadata: final_metadata, metadata: final_metadata,
content: html_content, content: html_content,
toc_data: toc,
} }
} }
@ -330,37 +339,29 @@ fn deserialize_metadata<T: Default + serde::de::DeserializeOwned>(text: &str) ->
} }
/// Fetch metadata from AST /// Fetch metadata from AST
pub fn get_metadata<'a>(root: &'a AstNode<'a>, mtype: TypeFileMetadata) -> FileMetadata { pub fn get_metadata<'a>(root: &'a AstNode<'a>, mtype: &TypeFileMetadata) -> FileMetadata {
root.children() root.children()
.map(|node| { .find_map(|node| match &node.data.borrow().value {
let generic = FileMetadata {
hardbreaks: true,
..FileMetadata::default()
};
match &node.data.borrow().value {
// Extract metadata from frontmatter // Extract metadata from frontmatter
NodeValue::FrontMatter(text) => match mtype { NodeValue::FrontMatter(text) => Some(match mtype {
TypeFileMetadata::Blog => { TypeFileMetadata::Blog => FileMetadata {
let metadata: FileMetadataBlog = deserialize_metadata(text); blog: Some(deserialize_metadata(text)),
FileMetadata {
blog: Some(metadata.clone()),
hardbreaks: metadata.hardbreaks.unwrap_or_default(),
..FileMetadata::default() ..FileMetadata::default()
} },
}
TypeFileMetadata::Contact => { TypeFileMetadata::Contact => {
let mut metadata: FileMetadataContact = deserialize_metadata(text); let mut metadata: FileMetadataContact = deserialize_metadata(text);
// Trim descriptions // Trim descriptions
if let Some(desc) = &mut metadata.description { if let Some(desc) = &mut metadata.description {
desc.clone_from(&desc.trim().into()); desc.clone_from(&desc.trim().into());
} }
FileMetadata { FileMetadata {
contact: Some(metadata), contact: Some(metadata),
..FileMetadata::default() ..FileMetadata::default()
} }
} }
TypeFileMetadata::Generic => generic, TypeFileMetadata::Generic => FileMetadata::default(),
TypeFileMetadata::Index => FileMetadata { TypeFileMetadata::Index => FileMetadata {
index: Some(deserialize_metadata(text)), index: Some(deserialize_metadata(text)),
..FileMetadata::default() ..FileMetadata::default()
@ -369,11 +370,9 @@ pub fn get_metadata<'a>(root: &'a AstNode<'a>, mtype: TypeFileMetadata) -> FileM
portfolio: Some(deserialize_metadata(text)), portfolio: Some(deserialize_metadata(text)),
..FileMetadata::default() ..FileMetadata::default()
}, },
}, }),
_ => generic, _ => None,
}
}) })
.next()
.map_or_else( .map_or_else(
|| match mtype { || match mtype {
TypeFileMetadata::Blog => FileMetadata { TypeFileMetadata::Blog => FileMetadata {
@ -515,3 +514,87 @@ fn mail_obfuscation(html: &str) -> (String, bool) {
(new_html, modified) (new_html, modified)
} }
} }
#[derive(Debug)]
struct TOCEntry {
id: String,
title: String,
depth: u8,
}
fn generate_toc<'a>(root: &'a AstNode<'a>) -> Vec<TOCEntry> {
/// See <https://github.com/kivikakk/comrak/blob/b67d406d3b101b93539c37a1ca75bff81ff8c149/src/html.rs#L446>
fn collect_text<'a>(node: &'a AstNode<'a>, output: &mut String) {
match node.data.borrow().value {
NodeValue::Text(ref literal)
| NodeValue::Code(NodeCode { ref literal, .. })
| NodeValue::Math(NodeMath { ref literal, .. }) => {
*output = literal.to_string();
}
_ => {
for n in node.children() {
if !output.is_empty() {
break;
}
collect_text(n, output);
}
}
}
}
let mut toc = vec![];
let mut anchorizer = Anchorizer::new();
// Collect headings first to avoid mutable borrow conflicts
let headings: Vec<_> = root
.children()
.filter_map(|node| {
if let NodeValue::Heading(ref nch) = &node.data.borrow().value {
Some((*nch, node))
} else {
None
}
})
.collect();
// Now process each heading
for (nch, node) in headings {
let mut title = String::with_capacity(20);
collect_text(node, &mut title);
toc.push(TOCEntry {
id: anchorizer.anchorize(title.clone()),
title,
depth: nch.level,
});
}
toc
}
fn toc_to_html(toc: &[TOCEntry]) -> String {
if toc.is_empty() {
return String::new();
}
let mut html = Vec::with_capacity(20 + 20 * toc.len());
html.extend_from_slice(b"<ul>");
for entry in toc {
// TODO: Use depth
html.extend_from_slice(
format!(
"<li><a href=\"{}\">{} (dbg/depth/{})</a></li>",
entry.id, entry.title, entry.depth
)
.as_bytes(),
);
}
html.extend_from_slice(b"</ul>");
String::from_utf8(html).unwrap()
}

View file

@ -1,5 +1,4 @@
pub mod date; pub mod date;
pub mod github; pub mod github;
pub mod markdown; pub mod markdown;
pub mod misc; pub mod utils;
pub mod routes;

View file

@ -53,14 +53,13 @@ impl Responder for Html {
} }
/// Read a file /// Read a file
#[cached] pub fn read_file(filename: &str, expected_file: &TypeFileMetadata) -> Option<File> {
pub fn read_file(filename: String, expected_file: TypeFileMetadata) -> Option<File> { Path::new(filename)
Path::new(&filename.clone())
.extension() .extension()
.and_then(|ext| match ext.to_str().unwrap() { .and_then(|ext| match ext.to_str().unwrap() {
"pdf" => fs::read(filename).map_or(None, |bytes| Some(read_pdf(bytes))), "pdf" => fs::read(filename).map_or(None, |bytes| Some(read_pdf(bytes))),
_ => fs::read_to_string(&filename).map_or(None, |text| { _ => fs::read_to_string(filename).map_or(None, |text| {
Some(read_md(&filename, &text, expected_file, None)) Some(read_md(filename, &text, expected_file, None))
}), }),
}) })
} }
@ -82,5 +81,6 @@ fn read_pdf(data: Vec<u8>) -> File {
style="width: 100%; height: 79vh"; style="width: 100%; height: 79vh";
>"# >"#
), ),
toc_data: String::new(),
} }
} }

View file

@ -1,4 +1,4 @@
use crate::{config::Config, utils::misc::get_url, template::InfosPage}; use crate::{config::Config, misc::utils::get_url, template::InfosPage};
use actix_web::{get, http::header::ContentType, routes, web, HttpResponse, Responder}; use actix_web::{get, http::header::ContentType, routes, web, HttpResponse, Responder};
use cached::proc_macro::once; use cached::proc_macro::once;
use ramhorns::Content; use ramhorns::Content;

View file

@ -1,17 +1,33 @@
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use ::rss::{
extension::atom::{AtomExtension, Link},
Category, Channel, Guid, Image, Item,
};
use actix_web::{get, http::header::ContentType, routes, web, HttpResponse, Responder}; use actix_web::{get, http::header::ContentType, routes, web, HttpResponse, Responder};
use cached::proc_macro::once; use cached::proc_macro::once;
use chrono::{DateTime, Datelike, Local, NaiveDateTime, Utc};
use chrono_tz::Europe;
use comrak::{parse_document, Arena};
use ramhorns::Content; use ramhorns::Content;
use crate::{ use crate::{
config::Config, config::Config,
template::{InfosPage, NavBar}, misc::{
utils::{ date::Date,
markdown::{File, TypeFileMetadata}, markdown::{get_metadata, get_options, File, FileMetadataBlog, TypeFileMetadata},
misc::{make_kw, read_file, Html}, utils::{get_url, make_kw, read_file, Html},
routes::blog::{build_rss, get_post, get_posts, Post, BLOG_DIR, MIME_TYPE_RSS, POST_DIR},
}, },
template::{InfosPage, NavBar},
}; };
const MIME_TYPE_RSS: &str = "application/rss+xml";
const BLOG_DIR: &str = "blog";
const POST_DIR: &str = "posts";
#[get("/blog")] #[get("/blog")]
pub async fn index(config: web::Data<Config>) -> impl Responder { pub async fn index(config: web::Data<Config>) -> impl Responder {
Html(build_index(config.get_ref().to_owned())) Html(build_index(config.get_ref().to_owned()))
@ -31,7 +47,8 @@ fn build_index(config: Config) -> String {
let mut posts = get_posts(&format!("{blog_dir}/{POST_DIR}")); let mut posts = get_posts(&format!("{blog_dir}/{POST_DIR}"));
// Get about // Get about
let about: Option<File> = read_file(format!("{blog_dir}/about.md"), TypeFileMetadata::Generic); let about: Option<File> =
read_file(&format!("{blog_dir}/about.md"), &TypeFileMetadata::Generic);
// Sort from newest to oldest // Sort from newest to oldest
posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day)); posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day));
@ -59,11 +76,116 @@ fn build_index(config: Config) -> String {
) )
} }
#[derive(Content, Debug)]
struct Post {
title: String,
date: Date,
url: String,
desc: Option<String>,
content: Option<String>,
tags: Vec<String>,
}
impl Post {
// Fetch the file content
fn fetch_content(&mut self, data_dir: &str) {
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
let ext = ".md";
if let Some(file) = read_file(
&format!("{blog_dir}/{}{ext}", self.url),
&TypeFileMetadata::Blog,
) {
self.content = Some(file.content);
}
}
}
impl Hash for Post {
fn hash<H: Hasher>(&self, state: &mut H) {
if let Some(content) = &self.content {
content.hash(state);
}
}
}
fn get_posts(location: &str) -> Vec<Post> {
let entries = std::fs::read_dir(location).map_or_else(
|_| vec![],
|res| {
res.flatten()
.filter(|f| f.path().extension().map_or(false, |ext| ext == "md"))
.collect::<Vec<std::fs::DirEntry>>()
},
);
entries
.iter()
.filter_map(|f| {
let fname = f.file_name();
let filename = fname.to_string_lossy();
let file_without_ext = filename.split_at(filename.len() - 3).0;
let file_metadata = std::fs::read_to_string(format!("{location}/{filename}"))
.map_or_else(
|_| FileMetadataBlog {
title: Some(file_without_ext.into()),
..FileMetadataBlog::default()
},
|text| {
let arena = Arena::new();
let options = get_options();
let root = parse_document(&arena, &text, &options);
let mut metadata =
get_metadata(root, &TypeFileMetadata::Blog).blog.unwrap();
// Always have a title
metadata.title = metadata
.title
.map_or_else(|| Some(file_without_ext.into()), Some);
metadata
},
);
if file_metadata.publish == Some(true) {
Some(Post {
url: file_without_ext.into(),
title: file_metadata.title.unwrap(),
date: file_metadata.date.unwrap_or({
let m = f.metadata().unwrap();
let date = std::convert::Into::<DateTime<Utc>>::into(
m.modified().unwrap_or_else(|_| m.created().unwrap()),
)
.date_naive();
Date {
day: date.day(),
month: date.month(),
year: date.year(),
}
}),
desc: file_metadata.description,
content: None,
tags: file_metadata
.tags
.unwrap_or_default()
.iter()
.map(|t| t.name.clone())
.collect(),
})
} else {
None
}
})
.collect::<Vec<Post>>()
}
#[derive(Content, Debug)] #[derive(Content, Debug)]
struct BlogPostTemplate { struct BlogPostTemplate {
navbar: NavBar, navbar: NavBar,
post: Option<File>, post: Option<File>,
toc: String,
} }
#[get("/blog/p/{id}")] #[get("/blog/p/{id}")]
@ -76,7 +198,7 @@ pub async fn page(path: web::Path<(String,)>, config: web::Data<Config>) -> impl
fn build_post(file: &str, config: Config) -> String { fn build_post(file: &str, config: Config) -> String {
let mut post = None; let mut post = None;
let (infos, toc) = get_post( let infos = get_post(
&mut post, &mut post,
file, file,
&config.fc.name.unwrap_or_default(), &config.fc.name.unwrap_or_default(),
@ -91,12 +213,51 @@ fn build_post(file: &str, config: Config) -> String {
..NavBar::default() ..NavBar::default()
}, },
post, post,
toc,
}, },
infos, infos,
) )
} }
fn get_post(post: &mut Option<File>, filename: &str, name: &str, data_dir: &str) -> InfosPage {
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
let ext = ".md";
*post = read_file(
&format!("{blog_dir}/{filename}{ext}"),
&TypeFileMetadata::Blog,
);
let default = (filename, &format!("Blog d'{name}"), Vec::new());
let (title, desc, tags) = match post {
Some(data) => (
match &data.metadata.info.blog.as_ref().unwrap().title {
Some(text) => text,
None => default.0,
},
match &data.metadata.info.blog.as_ref().unwrap().description {
Some(desc) => desc,
None => default.1,
},
match &data.metadata.info.blog.as_ref().unwrap().tags {
Some(tags) => tags.clone(),
None => default.2,
},
),
None => default,
};
InfosPage {
title: Some(format!("Post: {title}")),
desc: Some(desc.clone()),
kw: Some(make_kw(
&["blog", "blogging", "write", "writing"]
.into_iter()
.chain(tags.iter().map(|t| t.name.as_str()))
.collect::<Vec<_>>(),
)),
}
}
#[routes] #[routes]
#[get("/blog/blog.rss")] #[get("/blog/blog.rss")]
#[get("/blog/rss")] #[get("/blog/rss")]
@ -105,3 +266,110 @@ pub async fn rss(config: web::Data<Config>) -> impl Responder {
.content_type(ContentType(MIME_TYPE_RSS.parse().unwrap())) .content_type(ContentType(MIME_TYPE_RSS.parse().unwrap()))
.body(build_rss(config.get_ref().to_owned())) .body(build_rss(config.get_ref().to_owned()))
} }
#[once(time = 10800)] // 3h
fn build_rss(config: Config) -> String {
let mut posts = get_posts(&format!(
"{}/{}/{}",
config.locations.data_dir, BLOG_DIR, POST_DIR
));
// Sort from newest to oldest
posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day));
posts.reverse();
// Only the 20 newest
let max = 20;
if posts.len() > max {
posts.drain(max..);
}
let link_to_site = get_url(config.fc.clone());
let author = if let (Some(mail), Some(name)) = (config.fc.mail, config.fc.fullname.clone()) {
Some(format!("{mail} ({name})"))
} else {
None
};
let title = format!("Blog d'{}", config.fc.name.unwrap_or_default());
let lang = "fr";
let channel = Channel {
title: title.clone(),
link: link_to_site.clone(),
description: "Un fil qui parle d'informatique notamment".into(),
language: Some(lang.into()),
managing_editor: author.clone(),
webmaster: author,
pub_date: Some(Local::now().to_rfc2822()),
categories: ["blog", "blogging", "write", "writing"]
.iter()
.map(|&c| Category {
name: c.into(),
..Category::default()
})
.collect(),
generator: Some("ewp with rss crate".into()),
docs: Some("https://www.rssboard.org/rss-specification".into()),
image: Some(Image {
url: format!("{link_to_site}/icons/favicon-32x32.png"),
title: title.clone(),
link: link_to_site.clone(),
..Image::default()
}),
items: posts
.iter_mut()
.map(|p| {
// Get post data
p.fetch_content(&config.locations.data_dir);
// Build item
Item {
title: Some(p.title.clone()),
link: Some(format!("{}/blog/p/{}", link_to_site, p.url)),
description: p.content.clone(),
categories: p
.tags
.iter()
.map(|c| Category {
name: c.to_owned(),
..Category::default()
})
.collect(),
guid: Some(Guid {
value: format!("urn:hash:{}", {
let mut hasher = DefaultHasher::new();
p.hash(&mut hasher);
hasher.finish()
}),
permalink: false,
}),
pub_date: Some(
NaiveDateTime::parse_from_str(
&format!("{}-{}-{} 13:12:00", p.date.day, p.date.month, p.date.year),
"%d-%m-%Y %H:%M:%S",
)
.unwrap()
.and_local_timezone(Europe::Paris)
.unwrap()
.to_rfc2822(),
),
..Item::default()
}
})
.collect(),
atom_ext: Some(AtomExtension {
links: vec![Link {
href: format!("{link_to_site}/blog/rss"),
rel: "self".into(),
hreflang: Some(lang.into()),
mime_type: Some(MIME_TYPE_RSS.into()),
title: Some(title),
length: None,
}],
}),
..Channel::default()
};
std::str::from_utf8(&channel.write_to(Vec::new()).unwrap())
.unwrap()
.into()
}

View file

@ -2,15 +2,15 @@ use actix_web::{get, routes, web, HttpRequest, Responder};
use cached::proc_macro::once; use cached::proc_macro::once;
use glob::glob; use glob::glob;
use ramhorns::Content; use ramhorns::Content;
use std::fs::read_to_string;
use crate::{ use crate::{
config::Config, config::Config,
template::{InfosPage, NavBar}, misc::{
utils::{
markdown::{File, TypeFileMetadata}, markdown::{File, TypeFileMetadata},
misc::{make_kw, read_file, Html}, utils::{make_kw, read_file, Html},
routes::contact::{find_links, remove_paragraphs},
}, },
template::{InfosPage, NavBar},
}; };
const CONTACT_DIR: &str = "contacts"; const CONTACT_DIR: &str = "contacts";
@ -32,6 +32,47 @@ async fn page(config: web::Data<Config>) -> impl Responder {
Html(build_page(config.get_ref().to_owned())) Html(build_page(config.get_ref().to_owned()))
} }
/// Contact node
#[derive(Clone, Debug)]
struct ContactLink {
service: String,
scope: Option<String>,
link: String,
}
#[once(time = 60)]
fn find_links(directory: String) -> Vec<ContactLink> {
// TOML filename
let toml_file = "links.toml";
// Read the TOML file and parse it
let toml_str = read_to_string(format!("{directory}/{toml_file}")).unwrap_or_default();
let mut redirections = vec![];
match toml::de::from_str::<toml::Value>(&toml_str) {
Ok(data) => {
if let Some(section) = data.as_table() {
section.iter().for_each(|(key, value)| {
// Scopes are delimited with `/`
let (service, scope) = match key.split_once('/') {
Some((service, scope)) => (service.to_owned(), Some(scope.to_owned())),
None => (key.to_owned(), None),
};
redirections.push(ContactLink {
service,
scope,
link: value.as_str().unwrap().to_owned(),
});
});
}
}
Err(_) => return vec![],
}
redirections
}
#[routes] #[routes]
#[get("/{service}")] #[get("/{service}")]
#[get("/{service}/{scope}")] #[get("/{service}/{scope}")]
@ -51,7 +92,7 @@ async fn service_redirection(config: web::Data<Config>, req: HttpRequest) -> imp
_ => false, _ => false,
}) })
// Returns the link // Returns the link
.map(|data| data.url.clone()) .map(|data| data.link.clone())
.collect::<Vec<String>>(); .collect::<Vec<String>>();
// This shouldn't be more than one link here // This shouldn't be more than one link here
@ -78,6 +119,11 @@ struct NetworksTemplate {
others: Vec<File>, others: Vec<File>,
} }
fn remove_paragraphs(list: &mut [File]) {
list.iter_mut()
.for_each(|file| file.content = file.content.replace("<p>", "").replace("</p>", ""));
}
#[once(time = 60)] #[once(time = 60)]
fn build_page(config: Config) -> String { fn build_page(config: Config) -> String {
let contacts_dir = format!("{}/{}", config.locations.data_dir, CONTACT_DIR); let contacts_dir = format!("{}/{}", config.locations.data_dir, CONTACT_DIR);
@ -85,44 +131,26 @@ fn build_page(config: Config) -> String {
// Get about // Get about
let about = read_file( let about = read_file(
format!("{contacts_dir}/about.md"), &format!("{contacts_dir}/about.md"),
TypeFileMetadata::Generic, &TypeFileMetadata::Generic,
); );
let socials_dir = "socials"; let socials_dir = "socials";
let mut socials = glob(&format!("{contacts_dir}/{socials_dir}/*{ext}")) let mut socials = glob(&format!("{contacts_dir}/{socials_dir}/*{ext}"))
.unwrap() .unwrap()
.map(|e| { .map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Contact).unwrap())
read_file(
e.unwrap().to_string_lossy().to_string(),
TypeFileMetadata::Contact,
)
.unwrap()
})
.collect::<Vec<File>>(); .collect::<Vec<File>>();
let forges_dir = "forges"; let forges_dir = "forges";
let mut forges = glob(&format!("{contacts_dir}/{forges_dir}/*{ext}")) let mut forges = glob(&format!("{contacts_dir}/{forges_dir}/*{ext}"))
.unwrap() .unwrap()
.map(|e| { .map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Contact).unwrap())
read_file(
e.unwrap().to_string_lossy().to_string(),
TypeFileMetadata::Contact,
)
.unwrap()
})
.collect::<Vec<File>>(); .collect::<Vec<File>>();
let others_dir = "others"; let others_dir = "others";
let mut others = glob(&format!("{contacts_dir}/{others_dir}/*{ext}")) let mut others = glob(&format!("{contacts_dir}/{others_dir}/*{ext}"))
.unwrap() .unwrap()
.map(|e| { .map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Contact).unwrap())
read_file(
e.unwrap().to_string_lossy().to_string(),
TypeFileMetadata::Contact,
)
.unwrap()
})
.collect::<Vec<File>>(); .collect::<Vec<File>>();
// Remove paragraphs in custom statements // Remove paragraphs in custom statements

View file

@ -1,10 +1,12 @@
use std::collections::HashMap;
use crate::{ use crate::{
config::Config, config::Config,
template::{InfosPage, NavBar}, misc::{
utils::{ github::{fetch_pr, ProjectState},
misc::{make_kw, Html}, utils::{make_kw, Html},
routes::contrib::{fetch, Project},
}, },
template::{InfosPage, NavBar},
}; };
use actix_web::{get, web, Responder}; use actix_web::{get, web, Responder};
use cached::proc_macro::once; use cached::proc_macro::once;
@ -24,6 +26,24 @@ struct PortfolioTemplate {
closed: Option<Vec<Project>>, closed: Option<Vec<Project>>,
} }
#[derive(Clone, Content, Debug)]
struct Project {
name: String,
url: String,
pulls_merged: Vec<Pull>,
pulls_open: Vec<Pull>,
pulls_closed: Vec<Pull>,
}
#[derive(Clone, Content, Debug)]
struct Pull {
url: String,
id: u32,
name_repo: String,
title: String,
state: u8,
}
#[once(time = 600)] // 10min #[once(time = 600)] // 10min
async fn build_page(config: Config) -> String { async fn build_page(config: Config) -> String {
let navbar = NavBar { let navbar = NavBar {
@ -32,8 +52,66 @@ async fn build_page(config: Config) -> String {
}; };
// Fetch latest data from github // Fetch latest data from github
let data = match fetch().await { let data = match fetch_pr().await {
Ok(data) => PortfolioTemplate { Ok(projects) => {
let mut data: Vec<Project> = Vec::new();
// Grouping PRs by projects
let mut map: HashMap<&str, Vec<Pull>> = HashMap::new();
for p in &projects {
let project = Pull {
url: p.contrib_url.clone(),
id: p.id,
name_repo: p.name.clone(),
title: p.title.clone(),
state: p.status as u8,
};
let project_name = p.name.as_str();
if map.contains_key(project_name) {
map.entry(project_name).and_modify(|v| v.push(project));
} else {
data.push(Project {
name: project_name.into(),
url: p.url.clone(),
pulls_merged: Vec::new(),
pulls_closed: Vec::new(),
pulls_open: Vec::new(),
});
map.insert(project_name, vec![project]);
}
}
// Distributes each PR in the right vector
for d in &mut data {
map.get(d.name.as_str()).unwrap().iter().for_each(|p| {
let state = p.state.try_into().unwrap();
match state {
ProjectState::Closed => d.pulls_closed.push(p.to_owned()),
ProjectState::Merged => d.pulls_merged.push(p.to_owned()),
ProjectState::Open => d.pulls_open.push(p.to_owned()),
}
});
let mut name: Vec<char> = d.name.replace('-', " ").chars().collect();
name[0] = name[0].to_uppercase().next().unwrap();
d.name = name.into_iter().collect();
}
// Ascending order by pulls IDs
for d in &mut data {
d.pulls_closed.reverse();
d.pulls_merged.reverse();
d.pulls_open.reverse();
}
// Ascending order by number of pulls
data.sort_by(|a, b| {
b.pulls_merged
.len()
.partial_cmp(&a.pulls_merged.len())
.unwrap()
});
PortfolioTemplate {
navbar, navbar,
error: false, error: false,
projects: Some( projects: Some(
@ -54,7 +132,8 @@ async fn build_page(config: Config) -> String {
.cloned() .cloned()
.collect(), .collect(),
), ),
}, }
}
Err(e) => { Err(e) => {
eprintln!("{e}"); eprintln!("{e}");

View file

@ -1,17 +1,18 @@
use std::path::Path;
use actix_web::{get, web, Responder}; use actix_web::{get, web, Responder};
use cached::proc_macro::cached; use cached::proc_macro::cached;
use ramhorns::Content; use ramhorns::Content;
use regex::Regex; use regex::Regex;
use serde::Deserialize; use serde::{Deserialize, Serialize};
use crate::{ use crate::{
config::Config, config::Config,
template::{InfosPage, NavBar}, misc::{
utils::{
markdown::{File, TypeFileMetadata}, markdown::{File, TypeFileMetadata},
misc::{make_kw, read_file, Html}, utils::{make_kw, read_file, Html},
routes::cours::{excluded, get_filetree},
}, },
template::{InfosPage, NavBar},
}; };
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
@ -31,6 +32,13 @@ struct CoursTemplate {
content: Option<File>, content: Option<File>,
} }
#[derive(Clone, Debug, Serialize)]
struct FileNode {
name: String,
is_dir: bool,
children: Vec<FileNode>,
}
#[cached] #[cached]
fn compile_patterns(exclusion_list: Vec<String>) -> Vec<Regex> { fn compile_patterns(exclusion_list: Vec<String>) -> Vec<Regex> {
exclusion_list exclusion_list
@ -39,42 +47,76 @@ fn compile_patterns(exclusion_list: Vec<String>) -> Vec<Regex> {
.collect() .collect()
} }
fn get_filetree(dir_path: &str, exclusion_patterns: &Vec<Regex>) -> FileNode {
let children = std::fs::read_dir(dir_path)
.unwrap()
.filter_map(Result::ok)
.filter_map(|entry| {
let entry_path = entry.path();
let entry_name = entry_path.file_name()?.to_string_lossy().to_string();
// Exclude element with the exclusion_list
if exclusion_patterns.iter().any(|re| re.is_match(&entry_name)) {
return None;
}
if entry_path.is_file() {
Some(FileNode {
name: entry_name,
is_dir: false,
children: vec![],
})
} else {
// Exclude empty directories
let children_of_children =
get_filetree(entry_path.to_str().unwrap(), exclusion_patterns);
if children_of_children.is_dir && children_of_children.children.is_empty() {
None
} else {
Some(children_of_children)
}
}
})
.collect();
FileNode {
name: Path::new(dir_path)
.file_name()
.unwrap()
.to_string_lossy()
.to_string(),
is_dir: true,
children,
}
}
/// Get a page content /// Get a page content
fn get_content( fn get_content(
cours_dir: &str, cours_dir: &str,
path: &web::Query<PathRequest>, path: &web::Query<PathRequest>,
exclusion_list: &[String], exclusion_list: &[String],
exclusion_patterns: &[Regex],
) -> Option<File> { ) -> Option<File> {
let filename = path.q.as_ref().map_or("index.md", |q| q); let filename = path.q.as_ref().map_or("index.md", |q| q);
// Exclusion checks // We should support regex?
if excluded(filename, exclusion_list, exclusion_patterns) { if exclusion_list
.iter()
.any(|excluded_term| filename.contains(excluded_term.as_str()))
{
return None; return None;
} }
read_file(format!("{cours_dir}/{filename}"), TypeFileMetadata::Generic) read_file(
&format!("{cours_dir}/{filename}"),
&TypeFileMetadata::Generic,
)
} }
fn build_page(info: &web::Query<PathRequest>, config: Config) -> String { fn build_page(info: &web::Query<PathRequest>, config: Config) -> String {
let cours_dir = "data/cours"; let cours_dir = "data/cours";
let exclusion_list = config.fc.exclude_courses.unwrap();
let (ep, el): (_, Vec<String>) = config let exclusion_patterns = compile_patterns(exclusion_list.clone());
.fc let filetree = get_filetree(cours_dir, &exclusion_patterns);
.exclude_courses
.unwrap()
.into_iter()
.partition(|item| item.starts_with('/'));
let exclusion_list = {
let mut base = vec!["../".to_owned()];
base.extend(el);
base
};
let exclusion_patterns: Vec<Regex> =
compile_patterns(ep.iter().map(|r| r[1..r.len() - 1].to_owned()).collect());
let filetree = get_filetree(cours_dir, &exclusion_list, &exclusion_patterns);
config.tmpl.render( config.tmpl.render(
"cours.html", "cours.html",
@ -84,7 +126,7 @@ fn build_page(info: &web::Query<PathRequest>, config: Config) -> String {
..NavBar::default() ..NavBar::default()
}, },
filetree: serde_json::to_string(&filetree).unwrap(), filetree: serde_json::to_string(&filetree).unwrap(),
content: get_content(cours_dir, info, &exclusion_list, &exclusion_patterns), content: get_content(cours_dir, info, &exclusion_list),
}, },
InfosPage { InfosPage {
title: Some("Cours".into()), title: Some("Cours".into()),

View file

@ -4,11 +4,11 @@ use ramhorns::Content;
use crate::{ use crate::{
config::Config, config::Config,
template::{InfosPage, NavBar}, misc::{
utils::{
markdown::{File, TypeFileMetadata}, markdown::{File, TypeFileMetadata},
misc::{make_kw, read_file, Html}, utils::{make_kw, read_file, Html},
}, },
template::{InfosPage, NavBar},
}; };
#[get("/")] #[get("/")]
@ -36,8 +36,8 @@ struct StyleAvatar {
#[once(time = 60)] #[once(time = 60)]
fn build_page(config: Config) -> String { fn build_page(config: Config) -> String {
let mut file = read_file( let mut file = read_file(
format!("{}/index.md", config.locations.data_dir), &format!("{}/index.md", config.locations.data_dir),
TypeFileMetadata::Index, &TypeFileMetadata::Index,
); );
// Default values // Default values
@ -67,7 +67,7 @@ fn build_page(config: Config) -> String {
} }
} }
} else { } else {
file = read_file("README.md".to_string(), TypeFileMetadata::Generic); file = read_file("README.md", &TypeFileMetadata::Generic);
} }
config.tmpl.render( config.tmpl.render(

View file

@ -4,7 +4,7 @@ use ramhorns::Content;
use crate::{ use crate::{
config::Config, config::Config,
utils::misc::{get_url, Html}, misc::utils::{get_url, Html},
template::{InfosPage, NavBar}, template::{InfosPage, NavBar},
}; };

View file

@ -5,11 +5,11 @@ use ramhorns::Content;
use crate::{ use crate::{
config::Config, config::Config,
template::{InfosPage, NavBar}, misc::{
utils::{
markdown::{File, TypeFileMetadata}, markdown::{File, TypeFileMetadata},
misc::{make_kw, read_file, Html}, utils::{make_kw, read_file, Html},
}, },
template::{InfosPage, NavBar},
}; };
#[get("/portfolio")] #[get("/portfolio")]
@ -36,20 +36,14 @@ fn build_page(config: Config) -> String {
// Get about // Get about
let about = read_file( let about = read_file(
format!("{projects_dir}/about.md"), &format!("{projects_dir}/about.md"),
TypeFileMetadata::Generic, &TypeFileMetadata::Generic,
); );
// Get apps // Get apps
let apps = glob(&format!("{apps_dir}/*{ext}")) let apps = glob(&format!("{apps_dir}/*{ext}"))
.unwrap() .unwrap()
.map(|e| { .map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Portfolio).unwrap())
read_file(
e.unwrap().to_string_lossy().to_string(),
TypeFileMetadata::Portfolio,
)
.unwrap()
})
.collect::<Vec<File>>(); .collect::<Vec<File>>();
let appdata = if apps.is_empty() { let appdata = if apps.is_empty() {
@ -61,13 +55,7 @@ fn build_page(config: Config) -> String {
// Get archived apps // Get archived apps
let archived_apps = glob(&format!("{apps_dir}/archive/*{ext}")) let archived_apps = glob(&format!("{apps_dir}/archive/*{ext}"))
.unwrap() .unwrap()
.map(|e| { .map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Portfolio).unwrap())
read_file(
e.unwrap().to_string_lossy().to_string(),
TypeFileMetadata::Portfolio,
)
.unwrap()
})
.collect::<Vec<File>>(); .collect::<Vec<File>>();
let archived_appdata = if archived_apps.is_empty() { let archived_appdata = if archived_apps.is_empty() {

View file

@ -3,7 +3,7 @@ use cached::proc_macro::once;
use crate::{ use crate::{
config::Config, config::Config,
utils::misc::{make_kw, Html}, misc::utils::{make_kw, Html},
template::InfosPage, template::InfosPage,
}; };

View file

@ -1,298 +0,0 @@
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use ::rss::{
extension::atom::{AtomExtension, Link},
Category, Channel, Guid, Image, Item,
};
use cached::proc_macro::once;
use chrono::{DateTime, Datelike, Local, NaiveDateTime, Utc};
use chrono_tz::Europe;
use comrak::{parse_document, Arena};
use ramhorns::Content;
use crate::{
config::Config,
template::InfosPage,
utils::{
date::Date,
markdown::{get_metadata, get_options, File, FileMetadataBlog, TypeFileMetadata},
misc::{get_url, make_kw, read_file},
},
};
pub const MIME_TYPE_RSS: &str = "application/rss+xml";
pub const BLOG_DIR: &str = "blog";
pub const POST_DIR: &str = "posts";
#[derive(Content, Debug)]
pub struct Post {
title: String,
pub date: Date,
pub url: String,
desc: Option<String>,
content: Option<String>,
tags: Vec<String>,
}
impl Post {
// Fetch the file content
fn fetch_content(&mut self, data_dir: &str) {
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
let ext = ".md";
if let Some(file) = read_file(
format!("{blog_dir}/{}{ext}", self.url),
TypeFileMetadata::Blog,
) {
self.content = Some(file.content);
}
}
}
impl Hash for Post {
fn hash<H: Hasher>(&self, state: &mut H) {
if let Some(content) = &self.content {
content.hash(state);
}
}
}
pub fn get_posts(location: &str) -> Vec<Post> {
let entries = std::fs::read_dir(location).map_or_else(
|_| vec![],
|res| {
res.flatten()
.filter(|f| f.path().extension().map_or(false, |ext| ext == "md"))
.collect::<Vec<std::fs::DirEntry>>()
},
);
entries
.iter()
.filter_map(|f| {
let fname = f.file_name();
let filename = fname.to_string_lossy();
let file_without_ext = filename.split_at(filename.len() - 3).0;
let file_metadata = std::fs::read_to_string(format!("{location}/{filename}"))
.map_or_else(
|_| FileMetadataBlog {
title: Some(file_without_ext.into()),
..FileMetadataBlog::default()
},
|text| {
let arena = Arena::new();
let options = get_options();
let root = parse_document(&arena, &text, &options);
let mut metadata = get_metadata(root, TypeFileMetadata::Blog).blog.unwrap();
// Always have a title
metadata.title = metadata
.title
.map_or_else(|| Some(file_without_ext.into()), Some);
metadata
},
);
if file_metadata.publish == Some(true) {
Some(Post {
url: file_without_ext.into(),
title: file_metadata.title.unwrap(),
date: file_metadata.date.unwrap_or({
let m = f.metadata().unwrap();
let date = std::convert::Into::<DateTime<Utc>>::into(
m.modified().unwrap_or_else(|_| m.created().unwrap()),
)
.date_naive();
Date {
day: date.day(),
month: date.month(),
year: date.year(),
}
}),
desc: file_metadata.description,
content: None,
tags: file_metadata
.tags
.unwrap_or_default()
.iter()
.map(|t| t.name.clone())
.collect(),
})
} else {
None
}
})
.collect::<Vec<Post>>()
}
pub fn get_post(
post: &mut Option<File>,
filename: &str,
name: &str,
data_dir: &str,
) -> (InfosPage, String) {
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
let ext = ".md";
*post = read_file(
format!("{blog_dir}/{filename}{ext}"),
TypeFileMetadata::Blog,
);
let default = (
filename,
&format!("Blog d'{name}"),
Vec::new(),
String::new(),
);
let (title, desc, tags, toc) = match post {
Some(data) => (
match &data.metadata.info.blog.as_ref().unwrap().title {
Some(text) => text,
None => default.0,
},
match &data.metadata.info.blog.as_ref().unwrap().description {
Some(desc) => desc,
None => default.1,
},
match &data.metadata.info.blog.as_ref().unwrap().tags {
Some(tags) => tags.clone(),
None => default.2,
},
match &data.metadata.info.blog.as_ref().unwrap().toc {
// TODO: Generate TOC
Some(true) => String::new(),
_ => default.3,
},
),
None => default,
};
(
InfosPage {
title: Some(format!("Post: {title}")),
desc: Some(desc.clone()),
kw: Some(make_kw(
&["blog", "blogging", "write", "writing"]
.into_iter()
.chain(tags.iter().map(|t| t.name.as_str()))
.collect::<Vec<_>>(),
)),
},
toc,
)
}
#[once(time = 10800)] // 3h
pub fn build_rss(config: Config) -> String {
let mut posts = get_posts(&format!(
"{}/{}/{}",
config.locations.data_dir, BLOG_DIR, POST_DIR
));
// Sort from newest to oldest
posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day));
posts.reverse();
// Only the 20 newest
let max = 20;
if posts.len() > max {
posts.drain(max..);
}
let link_to_site = get_url(config.fc.clone());
let author = if let (Some(mail), Some(name)) = (config.fc.mail, config.fc.fullname.clone()) {
Some(format!("{mail} ({name})"))
} else {
None
};
let title = format!("Blog d'{}", config.fc.name.unwrap_or_default());
let lang = "fr";
let channel = Channel {
title: title.clone(),
link: link_to_site.clone(),
description: "Un fil qui parle d'informatique notamment".into(),
language: Some(lang.into()),
managing_editor: author.clone(),
webmaster: author,
pub_date: Some(Local::now().to_rfc2822()),
categories: ["blog", "blogging", "write", "writing"]
.iter()
.map(|&c| Category {
name: c.into(),
..Category::default()
})
.collect(),
generator: Some("ewp with rss crate".into()),
docs: Some("https://www.rssboard.org/rss-specification".into()),
image: Some(Image {
url: format!("{link_to_site}/icons/favicon-32x32.png"),
title: title.clone(),
link: link_to_site.clone(),
..Image::default()
}),
items: posts
.iter_mut()
.map(|p| {
// Get post data
p.fetch_content(&config.locations.data_dir);
// Build item
Item {
title: Some(p.title.clone()),
link: Some(format!("{}/blog/p/{}", link_to_site, p.url)),
description: p.content.clone(),
categories: p
.tags
.iter()
.map(|c| Category {
name: c.to_owned(),
..Category::default()
})
.collect(),
guid: Some(Guid {
value: format!("urn:hash:{}", {
let mut hasher = DefaultHasher::new();
p.hash(&mut hasher);
hasher.finish()
}),
permalink: false,
}),
pub_date: Some(
NaiveDateTime::parse_from_str(
&format!("{}-{}-{} 13:12:00", p.date.day, p.date.month, p.date.year),
"%d-%m-%Y %H:%M:%S",
)
.unwrap()
.and_local_timezone(Europe::Paris)
.unwrap()
.to_rfc2822(),
),
..Item::default()
}
})
.collect(),
atom_ext: Some(AtomExtension {
links: vec![Link {
href: format!("{link_to_site}/blog/rss"),
rel: "self".into(),
hreflang: Some(lang.into()),
mime_type: Some(MIME_TYPE_RSS.into()),
title: Some(title),
length: None,
}],
}),
..Channel::default()
};
std::str::from_utf8(&channel.write_to(Vec::new()).unwrap())
.unwrap()
.into()
}

View file

@ -1,50 +0,0 @@
use cached::proc_macro::once;
use std::fs::read_to_string;
use crate::utils::markdown::File;
/// Contact node
#[derive(Clone, Debug)]
pub struct Link {
pub service: String,
pub scope: Option<String>,
pub url: String,
}
#[once(time = 60)]
pub fn find_links(directory: String) -> Vec<Link> {
// TOML filename
let toml_file = "links.toml";
// Read the TOML file and parse it
let toml_str = read_to_string(format!("{directory}/{toml_file}")).unwrap_or_default();
let mut redirections = vec![];
match toml::de::from_str::<toml::Value>(&toml_str) {
Ok(data) => {
if let Some(section) = data.as_table() {
section.iter().for_each(|(key, value)| {
// Scopes are delimited with `/`
let (service, scope) = match key.split_once('/') {
Some((service, scope)) => (service.to_owned(), Some(scope.to_owned())),
None => (key.to_owned(), None),
};
redirections.push(Link {
service,
scope,
url: value.as_str().unwrap().to_owned(),
});
});
}
}
Err(_) => return vec![],
}
redirections
}
pub fn remove_paragraphs(list: &mut [File]) {
list.iter_mut()
.for_each(|file| file.content = file.content.replace("<p>", "").replace("</p>", ""));
}

View file

@ -1,90 +0,0 @@
use std::collections::HashMap;
use ramhorns::Content;
use reqwest::Error;
use crate::utils::github::{fetch_pr, ProjectState};
#[derive(Clone, Content, Debug)]
pub struct Project {
name: String,
url: String,
pub pulls_merged: Vec<Pull>,
pub pulls_open: Vec<Pull>,
pub pulls_closed: Vec<Pull>,
}
#[derive(Clone, Content, Debug)]
pub struct Pull {
url: String,
id: u32,
name_repo: String,
title: String,
state: u8,
}
pub async fn fetch() -> Result<Vec<Project>, Error> {
match fetch_pr().await {
Ok(projects) => {
let mut data: Vec<Project> = Vec::new();
// Grouping PRs by projects
let mut map: HashMap<&str, Vec<Pull>> = HashMap::new();
for p in &projects {
let project = Pull {
url: p.contrib_url.clone(),
id: p.id,
name_repo: p.name.clone(),
title: p.title.clone(),
state: p.status as u8,
};
let project_name = p.name.as_str();
if map.contains_key(project_name) {
map.entry(project_name).and_modify(|v| v.push(project));
} else {
data.push(Project {
name: project_name.into(),
url: p.url.clone(),
pulls_merged: Vec::new(),
pulls_closed: Vec::new(),
pulls_open: Vec::new(),
});
map.insert(project_name, vec![project]);
}
}
// Distributes each PR in the right vector
for d in &mut data {
map.get(d.name.as_str()).unwrap().iter().for_each(|p| {
let state = p.state.try_into().unwrap();
match state {
ProjectState::Closed => d.pulls_closed.push(p.to_owned()),
ProjectState::Merged => d.pulls_merged.push(p.to_owned()),
ProjectState::Open => d.pulls_open.push(p.to_owned()),
}
});
let mut name: Vec<char> = d.name.replace('-', " ").chars().collect();
name[0] = name[0].to_uppercase().next().unwrap();
d.name = name.into_iter().collect();
}
// Ascending order by pulls IDs
for d in &mut data {
d.pulls_closed.reverse();
d.pulls_merged.reverse();
d.pulls_open.reverse();
}
// Ascending order by number of pulls
data.sort_by(|a, b| {
b.pulls_merged
.len()
.partial_cmp(&a.pulls_merged.len())
.unwrap()
});
Ok(data)
}
Err(e) => Err(e),
}
}

View file

@ -1,106 +0,0 @@
use std::{cmp::Ordering, path::Path};
use cached::proc_macro::once;
use regex::Regex;
use serde::Serialize;
#[derive(Clone, Debug, Serialize, PartialEq, Eq)]
pub struct FileNode {
name: String,
is_dir: bool,
children: Vec<FileNode>,
}
impl Ord for FileNode {
fn cmp(&self, other: &Self) -> Ordering {
match (self.is_dir, other.is_dir) {
// If both are directories or both are files, compare names
(true, true) | (false, false) => self.name.cmp(&other.name),
// If self is directory and other is file, self comes first
(true, false) => Ordering::Less,
// If self is file and other is directory, other comes first
(false, true) => Ordering::Greater,
}
}
}
impl PartialOrd for FileNode {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
#[once(time = 120)]
pub fn get_filetree(
initial_dir: &str,
exclusion_list: &[String],
exclusion_patterns: &[Regex],
) -> FileNode {
gen_filetree(initial_dir, exclusion_list, exclusion_patterns)
}
fn gen_filetree(
dir_path: &str,
exclusion_list: &[String],
exclusion_patterns: &[Regex],
) -> FileNode {
let mut children: Vec<FileNode> = std::fs::read_dir(dir_path)
.unwrap()
.filter_map(Result::ok)
.filter_map(|entry| {
let entry_path = entry.path();
let entry_name = entry_path.file_name()?.to_string_lossy().to_string();
// Exclusion checks
if excluded(&entry_name, exclusion_list, exclusion_patterns) {
return None;
}
if entry_path.is_file() {
Some(FileNode {
name: entry_name,
is_dir: false,
children: vec![],
})
} else {
// Exclude empty directories
let children_of_children = gen_filetree(
entry_path.to_str().unwrap(),
exclusion_list,
exclusion_patterns,
);
if children_of_children.is_dir && children_of_children.children.is_empty() {
None
} else {
Some(children_of_children)
}
}
})
.collect();
children.sort();
FileNode {
name: Path::new(dir_path)
.file_name()
.unwrap()
.to_string_lossy()
.to_string(),
is_dir: true,
children,
}
}
pub fn excluded(element: &str, exclusion_list: &[String], exclusion_patterns: &[Regex]) -> bool {
if exclusion_list
.iter()
.any(|excluded_term| element.contains(excluded_term))
{
return true;
}
if exclusion_patterns.iter().any(|re| re.is_match(element)) {
return true;
}
false
}

View file

@ -1,4 +0,0 @@
pub mod blog;
pub mod contact;
pub mod contrib;
pub mod cours;

View file

@ -28,7 +28,9 @@
<main> <main>
{{^post}} {{^post}}
<p>This post doesn't exist... sorry</p> <p>This post doesn't exist... sorry</p>
{{/post}} {{#post}} {{&toc}} {{/post}} {{#post}} {{#metadata}} {{#info}} {{#blog}} {{#toc}}
<aside>{{&toc_data}}</aside>
{{/toc}} {{/blog}} {{/info}} {{/metadata}}
<article>{{&content}}</article> <article>{{&content}}</article>
{{/post}} {{/post}}
</main> </main>

View file

@ -16,7 +16,7 @@
</aside> </aside>
<main> <main>
{{^content}} {{^content}}
<p>Fichier introuvable ou invalide.</p> <p>Fichier introuvable</p>
{{/content}} {{#content}} {{/content}} {{#content}}
<article>{{&content}}</article> <article>{{&content}}</article>
</main> </main>