Compare commits

..

51 commits

Author SHA1 Message Date
4ae660983f
Merge pull request 'feat: allow subdirs in posts directory' (#86) from posts-subdir into main
Reviewed-on: #86
2024-11-14 17:48:07 +01:00
6c16315a07
cargo fmt
All checks were successful
PR Check / lint-and-format (pull_request) Successful in 11m20s
2024-11-14 17:34:15 +01:00
a3ad87dc05
accept all paths?
Some checks failed
PR Check / lint-and-format (pull_request) Failing after 9m26s
2024-11-14 03:39:46 +01:00
e8f69a34b1
correct url 2024-11-14 03:28:50 +01:00
adfb769df6
use css property block instead of visibility 2024-11-13 16:56:24 +01:00
411ea64fcb
walkdir 2024-11-12 01:14:48 +01:00
aaf228c32d
add draft in metadata 2024-11-12 00:48:24 +01:00
d9f3d64c55
add makefile 2024-11-12 00:29:59 +01:00
e1448bd773
add hide field in contact cards 2024-11-10 11:14:27 +01:00
fa4d0ba7e8
rename structs 2024-11-10 10:58:08 +01:00
b1c4bbdb27
split metadata of files and markdown reader 2024-11-10 10:55:18 +01:00
c7f1f912f0
arg name 2024-11-09 18:26:19 +01:00
eb55d13c01
cache files 2024-11-09 18:25:13 +01:00
182b17c47f
cache filetree 2024-11-09 18:07:35 +01:00
a5240fea57
alphabetical order in toc 2024-11-09 17:46:53 +01:00
754e717a58
exlusion function 2024-11-09 17:29:59 +01:00
77970da8b3
split logic of contrib 2024-11-09 17:13:30 +01:00
b9bc57c1e0
split logic of contact 2024-11-09 16:58:52 +01:00
744857d685
split blog logic 2024-11-09 16:54:38 +01:00
1209b0eb36
change err msg 2024-11-09 16:44:55 +01:00
40cd5bdca5
rename utils to misc rs file + split cours into specific utils directory specialized in utils for specific routes 2024-11-09 16:41:56 +01:00
9dde91f8ed
rename misc dir to utils dir 2024-11-09 16:36:04 +01:00
47570bf9e3
exclusion? 2024-11-09 16:34:51 +01:00
8e1b036386
fix generic 2024-11-09 15:43:29 +01:00
8b5c128bfd
update dependencie 2024-11-09 15:22:23 +01:00
7b1fb7dae3
extra option for release target 2024-11-08 20:26:54 +01:00
58c1b8a21a
hardbreak option for blogs, true for generic, false in other cases 2024-11-06 19:15:38 +01:00
e54cd44714
fix code blocks 2024-11-06 18:24:49 +01:00
a3161d822d
svg theme changer 2024-11-06 18:17:29 +01:00
0f6f2f1fc4
increase cours page width 2024-11-06 17:54:02 +01:00
2a4ae9f273
centered 2024-11-06 17:41:27 +01:00
aed4fa2bff
svg update on media change 2024-11-06 16:42:45 +01:00
856770c2ae
fix table 2024-11-06 16:32:19 +01:00
29cf3e3e00
fix markdown on posts 2024-11-06 16:31:13 +01:00
b02f715c5a
default to hardbreak minus blog 2024-11-06 16:11:42 +01:00
3e5ac643a7
table support 2024-11-06 15:46:27 +01:00
1097ee5194
split markdown sheet from post 2024-11-06 15:33:09 +01:00
61170953fe
ordering 2024-11-06 15:02:07 +01:00
15f8397c6c
add \B, fix lb and rb 2024-11-06 14:57:25 +01:00
87d0fa3c11
'a instead of 'de 2024-10-24 20:19:26 +02:00
7f3434b7c1
no sync needed 2024-10-24 20:17:31 +02:00
13d7b54c27
build at first 2024-10-22 12:07:05 +02:00
984ecb6b69
bump dependencies (#79)
lol_html and comrak

Reviewed-on: #79
2024-10-22 12:05:38 +02:00
5a15945439
fix img 2024-10-22 12:00:13 +02:00
e9441dba46
PR Check 2024-10-22 11:49:57 +02:00
3f3efe4afa
add lb and rb 2024-10-08 13:21:30 +02:00
37b51bcbee
Add Tau 2024-10-03 13:06:56 +02:00
3cc69f3d4f
no longer stagiaire 2024-09-22 17:02:03 +02:00
7432ffd5f9
clickable badge 2024-09-19 15:53:28 +02:00
764a632ae6
/api/v1/websites 2024-09-19 15:50:37 +02:00
396bff909e
update dependencies 2024-09-14 21:01:30 +02:00
36 changed files with 1710 additions and 1519 deletions

View file

@ -0,0 +1,23 @@
name: PR Check
on:
pull_request:
types: [opened, synchronize, reopened]
jobs:
lint-and-format:
container:
image: cimg/rust:1.81-node
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Build
run: cargo build
- name: Run format check
run: cargo fmt --check
- name: Run Clippy
run: cargo clippy

13
.gitignore vendored
View file

@ -5,4 +5,15 @@
docker-compose.yml
/.vscode
/data
# Data
data/index.md
data/contacts/*
data/cours/*
data/projects/*
# Blog
data/blog/*.md
data/blog/posts/*
!data/blog/posts/Makefile

897
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -10,9 +10,9 @@ publish = false
license = "AGPL-3.0-or-later"
[dependencies]
actix-web = { version = "4.6", default-features = false, features = ["macros", "compress-brotli"] }
actix-web = { version = "4.9", default-features = false, features = ["macros", "compress-brotli"] }
actix-files = "0.6"
cached = { version = "0.53", features = ["async", "ahash"] }
cached = { version = "0.54", features = ["async", "ahash"] }
ramhorns = "1.0"
toml = "0.8"
serde = { version = "1.0", features = ["derive"] }
@ -21,17 +21,22 @@ serde_json = "1.0"
minify-html = "0.15"
minify-js = "0.6"
glob = "0.3"
comrak = "0.26"
comrak = "0.29"
reqwest = { version = "0.12", features = ["json"] }
chrono = { version = "0.4.38", default-features = false, features = ["clock"]}
chrono-tz = "0.9"
chrono-tz = "0.10"
rss = { version = "2.0", features = ["atom"] }
lol_html = "1.2"
lol_html = "2.0"
base64 = "0.22"
mime_guess = "2.0"
urlencoding = "2.1"
regex = "1.10"
cyborgtime = "2.1.1"
walkdir = "2.5"
[lints.clippy]
pedantic = "warn"
[profile.release]
strip = "symbols"
lto = "thin"

View file

@ -171,6 +171,7 @@ title: Option<String>
date: Option<Date>
description: Option<String>
publish: Option<bool>
draft: Option<bool>
tags: Option<Vec<Tag>>
---
@ -180,7 +181,8 @@ Post content
- If no `title`, the filename will be used
- `date` format is `day-month-year`
- `publish` is default to false. When false, posts are hidden from index
but accessible, see [#30](https://git.mylloon.fr/Anri/mylloon.fr/issues/30)
but accessible.
- `draft` is default to false. When true, posts are hidden and unaccessible.
### About <!-- omit in toc -->
@ -223,6 +225,7 @@ custom: Option<bool>
user: "Option<String>"
link: Option<String>
newtab: Option<bool>
hide: Option<bool>
description: >
Option<String>
---

View file

@ -3,7 +3,7 @@
Easy WebPage generator
[![dependency status](https://deps.rs/repo/gitea/git.mylloon.fr/Anri/mylloon.fr/status.svg)](https://deps.rs/repo/gitea/git.mylloon.fr/Anri/mylloon.fr)
![status-badge](https://git.mylloon.fr/Anri/mylloon.fr/badges/workflows/publish.yml/badge.svg)
[![status-badge](https://git.mylloon.fr/Anri/mylloon.fr/badges/workflows/publish.yml/badge.svg)](https://git.mylloon.fr/Anri/mylloon.fr/actions?workflow=publish.yml)
- See [issues](https://git.mylloon.fr/Anri/mylloon.fr/issues)
- See [documentation](https://git.mylloon.fr/Anri/mylloon.fr/src/branch/main/Documentation.md)

20
data/blog/posts/Makefile Normal file
View file

@ -0,0 +1,20 @@
MKDIR = mkdir -p
TOUCH = touch
PRINT = echo
DATE = $(shell date '+%d-%m-%Y')
DIR = $(shell date '+%Y/%m')
FI := new
new:
$(MKDIR) $(DIR) 2> /dev/null
$(TOUCH) $(DIR)/$(FI).md
$(PRINT) "---" > $(DIR)/$(FI).md
$(PRINT) "publish: false" >> $(DIR)/$(FI).md
$(PRINT) "date: $(DATE)" >> $(DIR)/$(FI).md
$(PRINT) "draft: true" >> $(DIR)/$(FI).md
$(PRINT) "---" >> $(DIR)/$(FI).md
help:
$(PRINT) "make FI=new"

View file

@ -13,8 +13,8 @@ use crate::routes::{
mod config;
mod template;
mod misc;
mod routes;
mod utils;
#[actix_web::main]
async fn main() -> Result<()> {
@ -43,8 +43,12 @@ async fn main() -> Result<()> {
.add(("Permissions-Policy", "interest-cohort=()")),
)
.service(
web::scope("/api")
.service(web::scope("v1").service(api_v1::love).service(api_v1::btf)),
web::scope("/api").service(
web::scope("v1")
.service(api_v1::love)
.service(api_v1::btf)
.service(api_v1::websites),
),
)
.service(index::page)
.service(agreements::security)

View file

@ -1,4 +1,4 @@
use crate::{config::Config, misc::utils::get_url, template::InfosPage};
use crate::{config::Config, template::InfosPage, utils::misc::get_url};
use actix_web::{get, http::header::ContentType, routes, web, HttpResponse, Responder};
use cached::proc_macro::once;
use ramhorns::Content;

View file

@ -43,3 +43,11 @@ pub async fn btf() -> impl Responder {
HttpResponse::Ok().json(info)
}
#[get("/websites")]
pub async fn websites() -> impl Responder {
HttpResponse::Ok().json((
"http://www.bocal.cs.univ-paris8.fr/~akennel/",
"https://anri.up8.site/",
))
}

View file

@ -1,33 +1,18 @@
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use ::rss::{
extension::atom::{AtomExtension, Link},
Category, Channel, Guid, Image, Item,
};
use actix_web::{get, http::header::ContentType, routes, web, HttpResponse, Responder};
use cached::proc_macro::once;
use chrono::{DateTime, Datelike, Local, NaiveDateTime, Utc};
use chrono_tz::Europe;
use comrak::{parse_document, Arena};
use ramhorns::Content;
use crate::{
config::Config,
misc::{
date::Date,
markdown::{get_metadata, get_options, File, FileMetadataBlog, TypeFileMetadata},
utils::{get_url, make_kw, read_file, Html},
},
template::{InfosPage, NavBar},
utils::{
markdown::File,
metadata::MType,
misc::{make_kw, read_file, Html},
routes::blog::{build_rss, get_post, get_posts, Post, BLOG_DIR, MIME_TYPE_RSS, POST_DIR},
},
};
const MIME_TYPE_RSS: &str = "application/rss+xml";
const BLOG_DIR: &str = "blog";
const POST_DIR: &str = "posts";
#[get("/blog")]
pub async fn index(config: web::Data<Config>) -> impl Responder {
Html(build_index(config.get_ref().to_owned()))
@ -47,8 +32,7 @@ fn build_index(config: Config) -> String {
let mut posts = get_posts(&format!("{blog_dir}/{POST_DIR}"));
// Get about
let about: Option<File> =
read_file(&format!("{blog_dir}/about.md"), &TypeFileMetadata::Generic);
let about: Option<File> = read_file(format!("{blog_dir}/about.md"), MType::Generic);
// Sort from newest to oldest
posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day));
@ -76,112 +60,6 @@ fn build_index(config: Config) -> String {
)
}
#[derive(Content, Debug)]
struct Post {
title: String,
date: Date,
url: String,
desc: Option<String>,
content: Option<String>,
tags: Vec<String>,
}
impl Post {
// Fetch the file content
fn fetch_content(&mut self, data_dir: &str) {
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
let ext = ".md";
if let Some(file) = read_file(
&format!("{blog_dir}/{}{ext}", self.url),
&TypeFileMetadata::Blog,
) {
self.content = Some(file.content);
}
}
}
impl Hash for Post {
fn hash<H: Hasher>(&self, state: &mut H) {
if let Some(content) = &self.content {
content.hash(state);
}
}
}
fn get_posts(location: &str) -> Vec<Post> {
let entries = std::fs::read_dir(location).map_or_else(
|_| vec![],
|res| {
res.flatten()
.filter(|f| f.path().extension().map_or(false, |ext| ext == "md"))
.collect::<Vec<std::fs::DirEntry>>()
},
);
entries
.iter()
.filter_map(|f| {
let fname = f.file_name();
let filename = fname.to_string_lossy();
let file_without_ext = filename.split_at(filename.len() - 3).0;
let file_metadata = std::fs::read_to_string(format!("{location}/{filename}"))
.map_or_else(
|_| FileMetadataBlog {
title: Some(file_without_ext.into()),
..FileMetadataBlog::default()
},
|text| {
let arena = Arena::new();
let options = get_options();
let root = parse_document(&arena, &text, &options);
let mut metadata =
get_metadata(root, &TypeFileMetadata::Blog).blog.unwrap();
// Always have a title
metadata.title = metadata
.title
.map_or_else(|| Some(file_without_ext.into()), Some);
metadata
},
);
if file_metadata.publish == Some(true) {
Some(Post {
url: file_without_ext.into(),
title: file_metadata.title.unwrap(),
date: file_metadata.date.unwrap_or({
let m = f.metadata().unwrap();
let date = std::convert::Into::<DateTime<Utc>>::into(
m.modified().unwrap_or_else(|_| m.created().unwrap()),
)
.date_naive();
Date {
day: date.day(),
month: date.month(),
year: date.year(),
}
}),
desc: file_metadata.description,
content: None,
tags: file_metadata
.tags
.unwrap_or_default()
.iter()
.map(|t| t.name.clone())
.collect(),
})
} else {
None
}
})
.collect::<Vec<Post>>()
}
#[derive(Content, Debug)]
struct BlogPostTemplate {
navbar: NavBar,
@ -189,7 +67,7 @@ struct BlogPostTemplate {
toc: String,
}
#[get("/blog/p/{id}")]
#[get("/blog/p/{id:.*}")]
pub async fn page(path: web::Path<(String,)>, config: web::Data<Config>) -> impl Responder {
Html(build_post(
&path.into_inner().0,
@ -220,64 +98,6 @@ fn build_post(file: &str, config: Config) -> String {
)
}
fn get_post(
post: &mut Option<File>,
filename: &str,
name: &str,
data_dir: &str,
) -> (InfosPage, String) {
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
let ext = ".md";
*post = read_file(
&format!("{blog_dir}/{filename}{ext}"),
&TypeFileMetadata::Blog,
);
let default = (
filename,
&format!("Blog d'{name}"),
Vec::new(),
String::new(),
);
let (title, desc, tags, toc) = match post {
Some(data) => (
match &data.metadata.info.blog.as_ref().unwrap().title {
Some(text) => text,
None => default.0,
},
match &data.metadata.info.blog.as_ref().unwrap().description {
Some(desc) => desc,
None => default.1,
},
match &data.metadata.info.blog.as_ref().unwrap().tags {
Some(tags) => tags.clone(),
None => default.2,
},
match &data.metadata.info.blog.as_ref().unwrap().toc {
// TODO: Generate TOC
Some(true) => String::new(),
_ => default.3,
},
),
None => default,
};
(
InfosPage {
title: Some(format!("Post: {title}")),
desc: Some(desc.clone()),
kw: Some(make_kw(
&["blog", "blogging", "write", "writing"]
.into_iter()
.chain(tags.iter().map(|t| t.name.as_str()))
.collect::<Vec<_>>(),
)),
},
toc,
)
}
#[routes]
#[get("/blog/blog.rss")]
#[get("/blog/rss")]
@ -286,110 +106,3 @@ pub async fn rss(config: web::Data<Config>) -> impl Responder {
.content_type(ContentType(MIME_TYPE_RSS.parse().unwrap()))
.body(build_rss(config.get_ref().to_owned()))
}
#[once(time = 10800)] // 3h
fn build_rss(config: Config) -> String {
let mut posts = get_posts(&format!(
"{}/{}/{}",
config.locations.data_dir, BLOG_DIR, POST_DIR
));
// Sort from newest to oldest
posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day));
posts.reverse();
// Only the 20 newest
let max = 20;
if posts.len() > max {
posts.drain(max..);
}
let link_to_site = get_url(config.fc.clone());
let author = if let (Some(mail), Some(name)) = (config.fc.mail, config.fc.fullname.clone()) {
Some(format!("{mail} ({name})"))
} else {
None
};
let title = format!("Blog d'{}", config.fc.name.unwrap_or_default());
let lang = "fr";
let channel = Channel {
title: title.clone(),
link: link_to_site.clone(),
description: "Un fil qui parle d'informatique notamment".into(),
language: Some(lang.into()),
managing_editor: author.clone(),
webmaster: author,
pub_date: Some(Local::now().to_rfc2822()),
categories: ["blog", "blogging", "write", "writing"]
.iter()
.map(|&c| Category {
name: c.into(),
..Category::default()
})
.collect(),
generator: Some("ewp with rss crate".into()),
docs: Some("https://www.rssboard.org/rss-specification".into()),
image: Some(Image {
url: format!("{link_to_site}/icons/favicon-32x32.png"),
title: title.clone(),
link: link_to_site.clone(),
..Image::default()
}),
items: posts
.iter_mut()
.map(|p| {
// Get post data
p.fetch_content(&config.locations.data_dir);
// Build item
Item {
title: Some(p.title.clone()),
link: Some(format!("{}/blog/p/{}", link_to_site, p.url)),
description: p.content.clone(),
categories: p
.tags
.iter()
.map(|c| Category {
name: c.to_owned(),
..Category::default()
})
.collect(),
guid: Some(Guid {
value: format!("urn:hash:{}", {
let mut hasher = DefaultHasher::new();
p.hash(&mut hasher);
hasher.finish()
}),
permalink: false,
}),
pub_date: Some(
NaiveDateTime::parse_from_str(
&format!("{}-{}-{} 13:12:00", p.date.day, p.date.month, p.date.year),
"%d-%m-%Y %H:%M:%S",
)
.unwrap()
.and_local_timezone(Europe::Paris)
.unwrap()
.to_rfc2822(),
),
..Item::default()
}
})
.collect(),
atom_ext: Some(AtomExtension {
links: vec![Link {
href: format!("{link_to_site}/blog/rss"),
rel: "self".into(),
hreflang: Some(lang.into()),
mime_type: Some(MIME_TYPE_RSS.into()),
title: Some(title),
length: None,
}],
}),
..Channel::default()
};
std::str::from_utf8(&channel.write_to(Vec::new()).unwrap())
.unwrap()
.into()
}

View file

@ -1,16 +1,16 @@
use actix_web::{get, routes, web, HttpRequest, Responder};
use cached::proc_macro::once;
use glob::glob;
use ramhorns::Content;
use std::fs::read_to_string;
use crate::{
config::Config,
misc::{
markdown::{File, TypeFileMetadata},
utils::{make_kw, read_file, Html},
},
template::{InfosPage, NavBar},
utils::{
markdown::File,
metadata::MType,
misc::{make_kw, read_file, Html},
routes::contact::{find_links, read, remove_paragraphs},
},
};
const CONTACT_DIR: &str = "contacts";
@ -32,47 +32,6 @@ async fn page(config: web::Data<Config>) -> impl Responder {
Html(build_page(config.get_ref().to_owned()))
}
/// Contact node
#[derive(Clone, Debug)]
struct ContactLink {
service: String,
scope: Option<String>,
link: String,
}
#[once(time = 60)]
fn find_links(directory: String) -> Vec<ContactLink> {
// TOML filename
let toml_file = "links.toml";
// Read the TOML file and parse it
let toml_str = read_to_string(format!("{directory}/{toml_file}")).unwrap_or_default();
let mut redirections = vec![];
match toml::de::from_str::<toml::Value>(&toml_str) {
Ok(data) => {
if let Some(section) = data.as_table() {
section.iter().for_each(|(key, value)| {
// Scopes are delimited with `/`
let (service, scope) = match key.split_once('/') {
Some((service, scope)) => (service.to_owned(), Some(scope.to_owned())),
None => (key.to_owned(), None),
};
redirections.push(ContactLink {
service,
scope,
link: value.as_str().unwrap().to_owned(),
});
});
}
}
Err(_) => return vec![],
}
redirections
}
#[routes]
#[get("/{service}")]
#[get("/{service}/{scope}")]
@ -92,7 +51,7 @@ async fn service_redirection(config: web::Data<Config>, req: HttpRequest) -> imp
_ => false,
})
// Returns the link
.map(|data| data.link.clone())
.map(|data| data.url.clone())
.collect::<Vec<String>>();
// This shouldn't be more than one link here
@ -119,39 +78,17 @@ struct NetworksTemplate {
others: Vec<File>,
}
fn remove_paragraphs(list: &mut [File]) {
list.iter_mut()
.for_each(|file| file.content = file.content.replace("<p>", "").replace("</p>", ""));
}
#[once(time = 60)]
fn build_page(config: Config) -> String {
let contacts_dir = format!("{}/{}", config.locations.data_dir, CONTACT_DIR);
let ext = ".md";
// Get about
let about = read_file(
&format!("{contacts_dir}/about.md"),
&TypeFileMetadata::Generic,
);
let about = read_file(format!("{contacts_dir}/about.md"), MType::Generic);
let socials_dir = "socials";
let mut socials = glob(&format!("{contacts_dir}/{socials_dir}/*{ext}"))
.unwrap()
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Contact).unwrap())
.collect::<Vec<File>>();
let forges_dir = "forges";
let mut forges = glob(&format!("{contacts_dir}/{forges_dir}/*{ext}"))
.unwrap()
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Contact).unwrap())
.collect::<Vec<File>>();
let others_dir = "others";
let mut others = glob(&format!("{contacts_dir}/{others_dir}/*{ext}"))
.unwrap()
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Contact).unwrap())
.collect::<Vec<File>>();
let mut socials = read(&format!("{contacts_dir}/socials/*{ext}"));
let mut forges = read(&format!("{contacts_dir}/forges/*{ext}"));
let mut others = read(&format!("{contacts_dir}/others/*{ext}"));
// Remove paragraphs in custom statements
[&mut socials, &mut forges, &mut others]

View file

@ -1,12 +1,10 @@
use std::collections::HashMap;
use crate::{
config::Config,
misc::{
github::{fetch_pr, ProjectState},
utils::{make_kw, Html},
},
template::{InfosPage, NavBar},
utils::{
misc::{make_kw, Html},
routes::contrib::{fetch, Project},
},
};
use actix_web::{get, web, Responder};
use cached::proc_macro::once;
@ -26,24 +24,6 @@ struct PortfolioTemplate {
closed: Option<Vec<Project>>,
}
#[derive(Clone, Content, Debug)]
struct Project {
name: String,
url: String,
pulls_merged: Vec<Pull>,
pulls_open: Vec<Pull>,
pulls_closed: Vec<Pull>,
}
#[derive(Clone, Content, Debug)]
struct Pull {
url: String,
id: u32,
name_repo: String,
title: String,
state: u8,
}
#[once(time = 600)] // 10min
async fn build_page(config: Config) -> String {
let navbar = NavBar {
@ -52,88 +32,29 @@ async fn build_page(config: Config) -> String {
};
// Fetch latest data from github
let data = match fetch_pr().await {
Ok(projects) => {
let mut data: Vec<Project> = Vec::new();
// Grouping PRs by projects
let mut map: HashMap<&str, Vec<Pull>> = HashMap::new();
for p in &projects {
let project = Pull {
url: p.contrib_url.clone(),
id: p.id,
name_repo: p.name.clone(),
title: p.title.clone(),
state: p.status as u8,
};
let project_name = p.name.as_str();
if map.contains_key(project_name) {
map.entry(project_name).and_modify(|v| v.push(project));
} else {
data.push(Project {
name: project_name.into(),
url: p.url.clone(),
pulls_merged: Vec::new(),
pulls_closed: Vec::new(),
pulls_open: Vec::new(),
});
map.insert(project_name, vec![project]);
}
}
// Distributes each PR in the right vector
for d in &mut data {
map.get(d.name.as_str()).unwrap().iter().for_each(|p| {
let state = p.state.try_into().unwrap();
match state {
ProjectState::Closed => d.pulls_closed.push(p.to_owned()),
ProjectState::Merged => d.pulls_merged.push(p.to_owned()),
ProjectState::Open => d.pulls_open.push(p.to_owned()),
}
});
let mut name: Vec<char> = d.name.replace('-', " ").chars().collect();
name[0] = name[0].to_uppercase().next().unwrap();
d.name = name.into_iter().collect();
}
// Ascending order by pulls IDs
for d in &mut data {
d.pulls_closed.reverse();
d.pulls_merged.reverse();
d.pulls_open.reverse();
}
// Ascending order by number of pulls
data.sort_by(|a, b| {
b.pulls_merged
.len()
.partial_cmp(&a.pulls_merged.len())
.unwrap()
});
PortfolioTemplate {
navbar,
error: false,
projects: Some(
data.iter()
.filter(|&p| !p.pulls_merged.is_empty())
.cloned()
.collect(),
),
waiting: Some(
data.iter()
.filter(|&p| !p.pulls_open.is_empty())
.cloned()
.collect(),
),
closed: Some(
data.iter()
.filter(|&p| !p.pulls_closed.is_empty())
.cloned()
.collect(),
),
}
}
let data = match fetch().await {
Ok(data) => PortfolioTemplate {
navbar,
error: false,
projects: Some(
data.iter()
.filter(|&p| !p.pulls_merged.is_empty())
.cloned()
.collect(),
),
waiting: Some(
data.iter()
.filter(|&p| !p.pulls_open.is_empty())
.cloned()
.collect(),
),
closed: Some(
data.iter()
.filter(|&p| !p.pulls_closed.is_empty())
.cloned()
.collect(),
),
},
Err(e) => {
eprintln!("{e}");

View file

@ -1,18 +1,18 @@
use std::path::Path;
use actix_web::{get, web, Responder};
use cached::proc_macro::cached;
use ramhorns::Content;
use regex::Regex;
use serde::{Deserialize, Serialize};
use serde::Deserialize;
use crate::{
config::Config,
misc::{
markdown::{File, TypeFileMetadata},
utils::{make_kw, read_file, Html},
},
template::{InfosPage, NavBar},
utils::{
markdown::File,
metadata::MType,
misc::{make_kw, read_file, Html},
routes::cours::{excluded, get_filetree},
},
};
#[derive(Debug, Deserialize)]
@ -32,13 +32,6 @@ struct CoursTemplate {
content: Option<File>,
}
#[derive(Clone, Debug, Serialize)]
struct FileNode {
name: String,
is_dir: bool,
children: Vec<FileNode>,
}
#[cached]
fn compile_patterns(exclusion_list: Vec<String>) -> Vec<Regex> {
exclusion_list
@ -47,76 +40,42 @@ fn compile_patterns(exclusion_list: Vec<String>) -> Vec<Regex> {
.collect()
}
fn get_filetree(dir_path: &str, exclusion_patterns: &Vec<Regex>) -> FileNode {
let children = std::fs::read_dir(dir_path)
.unwrap()
.filter_map(Result::ok)
.filter_map(|entry| {
let entry_path = entry.path();
let entry_name = entry_path.file_name()?.to_string_lossy().to_string();
// Exclude element with the exclusion_list
if exclusion_patterns.iter().any(|re| re.is_match(&entry_name)) {
return None;
}
if entry_path.is_file() {
Some(FileNode {
name: entry_name,
is_dir: false,
children: vec![],
})
} else {
// Exclude empty directories
let children_of_children =
get_filetree(entry_path.to_str().unwrap(), exclusion_patterns);
if children_of_children.is_dir && children_of_children.children.is_empty() {
None
} else {
Some(children_of_children)
}
}
})
.collect();
FileNode {
name: Path::new(dir_path)
.file_name()
.unwrap()
.to_string_lossy()
.to_string(),
is_dir: true,
children,
}
}
/// Get a page content
fn get_content(
cours_dir: &str,
path: &web::Query<PathRequest>,
exclusion_list: &[String],
exclusion_patterns: &[Regex],
) -> Option<File> {
let filename = path.q.as_ref().map_or("index.md", |q| q);
// We should support regex?
if exclusion_list
.iter()
.any(|excluded_term| filename.contains(excluded_term.as_str()))
{
// Exclusion checks
if excluded(filename, exclusion_list, exclusion_patterns) {
return None;
}
read_file(
&format!("{cours_dir}/{filename}"),
&TypeFileMetadata::Generic,
)
read_file(format!("{cours_dir}/{filename}"), MType::Generic)
}
fn build_page(info: &web::Query<PathRequest>, config: Config) -> String {
let cours_dir = "data/cours";
let exclusion_list = config.fc.exclude_courses.unwrap();
let exclusion_patterns = compile_patterns(exclusion_list.clone());
let filetree = get_filetree(cours_dir, &exclusion_patterns);
let (ep, el): (_, Vec<String>) = config
.fc
.exclude_courses
.unwrap()
.into_iter()
.partition(|item| item.starts_with('/'));
let exclusion_list = {
let mut base = vec!["../".to_owned()];
base.extend(el);
base
};
let exclusion_patterns: Vec<Regex> =
compile_patterns(ep.iter().map(|r| r[1..r.len() - 1].to_owned()).collect());
let filetree = get_filetree(cours_dir, &exclusion_list, &exclusion_patterns);
config.tmpl.render(
"cours.html",
@ -126,7 +85,7 @@ fn build_page(info: &web::Query<PathRequest>, config: Config) -> String {
..NavBar::default()
},
filetree: serde_json::to_string(&filetree).unwrap(),
content: get_content(cours_dir, info, &exclusion_list),
content: get_content(cours_dir, info, &exclusion_list, &exclusion_patterns),
},
InfosPage {
title: Some("Cours".into()),

View file

@ -4,11 +4,12 @@ use ramhorns::Content;
use crate::{
config::Config,
misc::{
markdown::{File, TypeFileMetadata},
utils::{make_kw, read_file, Html},
},
template::{InfosPage, NavBar},
utils::{
markdown::File,
metadata::MType,
misc::{make_kw, read_file, Html},
},
};
#[get("/")]
@ -36,8 +37,8 @@ struct StyleAvatar {
#[once(time = 60)]
fn build_page(config: Config) -> String {
let mut file = read_file(
&format!("{}/index.md", config.locations.data_dir),
&TypeFileMetadata::Index,
format!("{}/index.md", config.locations.data_dir),
MType::Index,
);
// Default values
@ -67,7 +68,7 @@ fn build_page(config: Config) -> String {
}
}
} else {
file = read_file("README.md", &TypeFileMetadata::Generic);
file = read_file("README.md".to_string(), MType::Generic);
}
config.tmpl.render(

View file

@ -4,8 +4,8 @@ use ramhorns::Content;
use crate::{
config::Config,
misc::utils::{get_url, Html},
template::{InfosPage, NavBar},
utils::misc::{get_url, Html},
};
pub async fn page(config: web::Data<Config>) -> impl Responder {

View file

@ -5,11 +5,12 @@ use ramhorns::Content;
use crate::{
config::Config,
misc::{
markdown::{File, TypeFileMetadata},
utils::{make_kw, read_file, Html},
},
template::{InfosPage, NavBar},
utils::{
markdown::File,
metadata::MType,
misc::{make_kw, read_file, Html},
},
};
#[get("/portfolio")]
@ -35,15 +36,12 @@ fn build_page(config: Config) -> String {
let ext = ".md";
// Get about
let about = read_file(
&format!("{projects_dir}/about.md"),
&TypeFileMetadata::Generic,
);
let about = read_file(format!("{projects_dir}/about.md"), MType::Generic);
// Get apps
let apps = glob(&format!("{apps_dir}/*{ext}"))
.unwrap()
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Portfolio).unwrap())
.map(|e| read_file(e.unwrap().to_string_lossy().to_string(), MType::Portfolio).unwrap())
.collect::<Vec<File>>();
let appdata = if apps.is_empty() {
@ -55,7 +53,7 @@ fn build_page(config: Config) -> String {
// Get archived apps
let archived_apps = glob(&format!("{apps_dir}/archive/*{ext}"))
.unwrap()
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Portfolio).unwrap())
.map(|e| read_file(e.unwrap().to_string_lossy().to_string(), MType::Portfolio).unwrap())
.collect::<Vec<File>>();
let archived_appdata = if archived_apps.is_empty() {

View file

@ -3,8 +3,8 @@ use cached::proc_macro::once;
use crate::{
config::Config,
misc::utils::{make_kw, Html},
template::InfosPage,
utils::misc::{make_kw, Html},
};
#[get("/web3")]

View file

@ -2,7 +2,7 @@ use chrono::{Datelike, NaiveDate};
use ramhorns::Content;
use serde::{Deserialize, Deserializer};
#[derive(Content, Default, Debug)]
#[derive(Content, Clone, Default, Debug)]
pub struct Date {
pub day: u32,
pub month: u32,

View file

@ -1,7 +1,7 @@
use reqwest::{header::ACCEPT, Error};
use serde::Deserialize;
use crate::misc::utils::get_reqwest_client;
use crate::utils::misc::get_reqwest_client;
#[derive(Debug, Deserialize)]
struct GithubResponse {

View file

@ -1,4 +1,3 @@
use crate::misc::date::Date;
use base64::engine::general_purpose;
use base64::Engine;
use comrak::nodes::{AstNode, NodeValue};
@ -6,116 +5,16 @@ use comrak::{format_html, parse_document, Arena, ComrakOptions, ListStyleType, O
use lol_html::html_content::ContentType;
use lol_html::{element, rewrite_str, HtmlRewriter, RewriteStrSettings, Settings};
use ramhorns::Content;
use serde::{Deserialize, Deserializer};
use std::fmt::Debug;
use std::fs;
use std::path::Path;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
/// Metadata for blog posts
#[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadataBlog {
pub title: Option<String>,
pub date: Option<Date>,
pub description: Option<String>,
pub publish: Option<bool>,
pub tags: Option<Vec<Tag>>,
pub toc: Option<bool>,
}
use crate::utils::metadata::MType;
/// A tag, related to post blog
#[derive(Content, Debug, Clone)]
pub struct Tag {
pub name: String,
}
impl<'de> Deserialize<'de> for Tag {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
match <&str>::deserialize(deserializer) {
Ok(s) => match serde_yml::from_str(s) {
Ok(tag) => Ok(Self { name: tag }),
Err(e) => Err(serde::de::Error::custom(e)),
},
Err(e) => Err(e),
}
}
}
/// Metadata for contact entry
#[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadataContact {
pub title: String,
pub custom: Option<bool>,
pub user: Option<String>,
pub link: Option<String>,
pub newtab: Option<bool>,
pub description: Option<String>,
}
/// Metadata for index page
#[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadataIndex {
pub name: Option<String>,
pub pronouns: Option<String>,
pub avatar: Option<String>,
pub avatar_caption: Option<String>,
pub avatar_style: Option<String>,
}
/// Metadata for portfolio cards
#[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadataPortfolio {
pub title: Option<String>,
pub link: Option<String>,
pub description: Option<String>,
pub language: Option<String>,
}
/// List of available metadata types
pub enum TypeFileMetadata {
Blog,
Contact,
Generic,
Index,
Portfolio,
}
/// Structure who holds all the metadata the file have
/// Usually all fields are None except one
#[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadata {
pub blog: Option<FileMetadataBlog>,
pub contact: Option<FileMetadataContact>,
pub index: Option<FileMetadataIndex>,
pub portfolio: Option<FileMetadataPortfolio>,
}
#[allow(clippy::struct_excessive_bools)]
/// Global metadata
#[derive(Content, Debug)]
pub struct Metadata {
pub info: FileMetadata,
pub math: bool,
pub mermaid: bool,
pub syntax_highlight: bool,
pub mail_obfsucated: bool,
}
impl Metadata {
/// Update current metadata boolean fields, keeping true ones
fn merge(&mut self, other: &Self) {
self.math = self.math || other.math;
self.mermaid = self.mermaid || other.mermaid;
self.syntax_highlight = self.syntax_highlight || other.syntax_highlight;
}
}
use super::metadata::{get, MFile, Metadata};
/// File description
#[derive(Content, Debug)]
#[derive(Content, Debug, Clone)]
pub struct File {
pub metadata: Metadata,
pub content: String,
@ -146,14 +45,14 @@ pub fn get_options<'a>() -> ComrakOptions<'a> {
options.extension.greentext = false;
// Parser
options.parse.smart = false; // could be boring
options.parse.smart = true; // could be boring
options.parse.default_info_string = Some("plaintext".into());
options.parse.relaxed_tasklist_matching = true;
options.parse.relaxed_autolinks = true;
// options.render.broken_link_callback = ...;
// Renderer
options.render.hardbreaks = false; // could be true? change by metadata could be good for compatibility
options.render.hardbreaks = false;
options.render.github_pre_lang = false;
options.render.full_info_string = true;
options.render.width = 0; // 0 mean disabled?
@ -167,6 +66,7 @@ pub fn get_options<'a>() -> ComrakOptions<'a> {
options.render.ignore_empty_links = true;
options.render.gfm_quirks = true;
options.render.prefer_fenced = false;
options.render.figure_with_caption = false;
options
}
@ -222,75 +122,73 @@ fn custom_img_size(html: &str) -> String {
/// Fix local images to base64 and integration of markdown files
fn fix_images_and_integration(path: &str, html: &str) -> (String, Metadata) {
let mut metadata = Metadata {
info: FileMetadata::default(),
info: MFile::default(),
math: false,
mermaid: false,
syntax_highlight: false,
mail_obfsucated: false,
};
(
rewrite_str(
html,
RewriteStrSettings {
element_content_handlers: vec![element!("img", |el| {
if let Some(src) = el.get_attribute("src") {
let img_src = Path::new(path).parent().unwrap();
let img_path = urlencoding::decode(img_src.join(src).to_str().unwrap())
.unwrap()
.to_string();
if let Ok(file) = fs::read_to_string(&img_path) {
let mime = mime_guess::from_path(&img_path).first_or_octet_stream();
if mime == "text/markdown" {
let mut options = get_options();
options.extension.footnotes = false;
let data = read_md(
&img_path,
&file,
&TypeFileMetadata::Generic,
Some(options),
);
el.replace(&data.content, ContentType::Html);
metadata.merge(&data.metadata);
} else {
let image = general_purpose::STANDARD.encode(file);
// Collection of any additional metadata
let mut additional_metadata = Vec::new();
el.set_attribute("src", &format!("data:{mime};base64,{image}"))
.unwrap();
}
let result = rewrite_str(
html,
RewriteStrSettings {
element_content_handlers: vec![element!("img", |el| {
if let Some(src) = el.get_attribute("src") {
let img_src = Path::new(path).parent().unwrap();
let img_path = urlencoding::decode(img_src.join(src).to_str().unwrap())
.unwrap()
.to_string();
if let Ok(file) = fs::read_to_string(&img_path) {
let mime = mime_guess::from_path(&img_path).first_or_octet_stream();
if mime == "text/markdown" {
let mut options = get_options();
options.extension.footnotes = false;
let data = read_md(&img_path, &file, MType::Generic, Some(options));
el.replace(&data.content, ContentType::Html);
// Store the metadata for later merging
additional_metadata.push(data.metadata);
} else {
let image = general_purpose::STANDARD.encode(file);
el.set_attribute("src", &format!("data:{mime};base64,{image}"))
.unwrap();
}
}
Ok(())
})],
..RewriteStrSettings::default()
},
)
.unwrap(),
metadata,
}
Ok(())
})],
..RewriteStrSettings::default()
},
)
.unwrap();
// Merge all collected metadata
for additional in additional_metadata {
metadata.merge(&additional);
}
(result, metadata)
}
/// Transform markdown string to File structure
pub fn read_md(
path: &str,
raw_text: &str,
metadata_type: &TypeFileMetadata,
options: Option<Options>,
) -> File {
pub fn read_md(path: &str, raw_text: &str, metadata_type: MType, options: Option<Options>) -> File {
let arena = Arena::new();
let opt = options.map_or_else(get_options, |specific_opt| specific_opt);
let mut opt = options.map_or_else(get_options, |specific_opt| specific_opt);
let root = parse_document(&arena, raw_text, &opt);
// Find metadata
let metadata = get_metadata(root, metadata_type);
let metadata = get(root, metadata_type);
// Update comrak render properties
opt.render.hardbreaks = metadata.hardbreaks;
let mermaid_name = "mermaid";
hljs_replace(root, mermaid_name);
replace_quotes(root);
// Convert to HTML
let mut html = vec![];
format_html(root, &opt, &mut html).unwrap();
@ -318,70 +216,6 @@ pub fn read_md(
}
}
/// Deserialize metadata based on a type
fn deserialize_metadata<T: Default + serde::de::DeserializeOwned>(text: &str) -> T {
serde_yml::from_str(text.trim().trim_matches(&['-'] as &[_])).unwrap_or_default()
}
/// Fetch metadata from AST
pub fn get_metadata<'a>(root: &'a AstNode<'a>, mtype: &TypeFileMetadata) -> FileMetadata {
root.children()
.find_map(|node| match &node.data.borrow().value {
// Extract metadata from frontmatter
NodeValue::FrontMatter(text) => Some(match mtype {
TypeFileMetadata::Blog => FileMetadata {
blog: Some(deserialize_metadata(text)),
..FileMetadata::default()
},
TypeFileMetadata::Contact => {
let mut metadata: FileMetadataContact = deserialize_metadata(text);
// Trim descriptions
if let Some(desc) = &mut metadata.description {
desc.clone_from(&desc.trim().into());
}
FileMetadata {
contact: Some(metadata),
..FileMetadata::default()
}
}
TypeFileMetadata::Generic => FileMetadata::default(),
TypeFileMetadata::Index => FileMetadata {
index: Some(deserialize_metadata(text)),
..FileMetadata::default()
},
TypeFileMetadata::Portfolio => FileMetadata {
portfolio: Some(deserialize_metadata(text)),
..FileMetadata::default()
},
}),
_ => None,
})
.map_or_else(
|| match mtype {
TypeFileMetadata::Blog => FileMetadata {
blog: Some(FileMetadataBlog::default()),
..FileMetadata::default()
},
TypeFileMetadata::Contact => FileMetadata {
contact: Some(FileMetadataContact::default()),
..FileMetadata::default()
},
TypeFileMetadata::Generic => FileMetadata::default(),
TypeFileMetadata::Index => FileMetadata {
index: Some(FileMetadataIndex::default()),
..FileMetadata::default()
},
TypeFileMetadata::Portfolio => FileMetadata {
portfolio: Some(FileMetadataPortfolio::default()),
..FileMetadata::default()
},
},
|data| data,
)
}
/// Check whether mermaid diagrams are in the AST
fn check_mermaid<'a>(root: &'a AstNode<'a>, mermaid_str: &str) -> bool {
root.children().any(|node| match &node.data.borrow().value {
@ -407,12 +241,12 @@ fn check_code<'a>(root: &'a AstNode<'a>, blacklist: &[String]) -> bool {
/// Check if html contains maths
fn check_math(html: &str) -> bool {
let math_detected = Arc::new(AtomicBool::new(false));
let mut math_detected = false;
let _ = HtmlRewriter::new(
Settings {
element_content_handlers: vec![element!("span[data-math-style]", |_| {
math_detected.store(true, Ordering::SeqCst);
math_detected = true;
Ok(())
})],
@ -422,7 +256,7 @@ fn check_math(html: &str) -> bool {
)
.write(html.as_bytes());
math_detected.load(Ordering::SeqCst)
math_detected
}
/// Change class of languages for hljs detection
@ -436,23 +270,9 @@ fn hljs_replace<'a>(root: &'a AstNode<'a>, mermaid_str: &str) {
});
}
/// TODO
fn replace_quotes<'a>(node: &'a AstNode<'a>) {
match &mut node.data.borrow_mut().value {
NodeValue::Text(text) => {
*text = text.replacen('"', "«", 1).replacen('"', "»", 1);
}
_ => {
for c in node.children() {
replace_quotes(c);
}
}
}
}
/// Obfuscate email if email found
fn mail_obfuscation(html: &str) -> (String, bool) {
let modified = Arc::new(AtomicBool::new(false));
let mut modified = false;
let data_attr = "title";
@ -461,7 +281,7 @@ fn mail_obfuscation(html: &str) -> (String, bool) {
html,
RewriteStrSettings {
element_content_handlers: vec![element!("a[href^='mailto:']", |el| {
modified.store(true, Ordering::SeqCst);
modified = true;
// Get mail address
let link = el.get_attribute("href").unwrap();
@ -481,9 +301,7 @@ fn mail_obfuscation(html: &str) -> (String, bool) {
)
.unwrap();
let is_modified = modified.load(Ordering::SeqCst);
if is_modified {
if modified {
// Remove old data email if exists
(
rewrite_str(
@ -509,9 +327,9 @@ fn mail_obfuscation(html: &str) -> (String, bool) {
},
)
.unwrap(),
is_modified,
modified,
)
} else {
(new_html, is_modified)
(new_html, modified)
}
}

185
src/utils/metadata.rs Normal file
View file

@ -0,0 +1,185 @@
use crate::utils::date::Date;
use comrak::nodes::{AstNode, NodeValue};
use ramhorns::Content;
use serde::{Deserialize, Deserializer};
use std::fmt::Debug;
/// Metadata for blog posts
#[derive(Content, Clone, Debug, Default, Deserialize)]
pub struct FileMetadataBlog {
pub hardbreaks: Option<bool>,
pub title: Option<String>,
pub date: Option<Date>,
pub description: Option<String>,
pub publish: Option<bool>,
pub draft: Option<bool>,
pub tags: Option<Vec<Tag>>,
pub toc: Option<bool>,
}
/// A tag, related to post blog
#[derive(Content, Debug, Clone)]
pub struct Tag {
pub name: String,
}
impl<'a> Deserialize<'a> for Tag {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'a>,
{
match <&str>::deserialize(deserializer) {
Ok(s) => match serde_yml::from_str(s) {
Ok(tag) => Ok(Self { name: tag }),
Err(e) => Err(serde::de::Error::custom(e)),
},
Err(e) => Err(e),
}
}
}
/// Metadata for contact entry
#[derive(Content, Debug, Default, Deserialize, Clone)]
pub struct FileMetadataContact {
pub title: String,
pub custom: Option<bool>,
pub user: Option<String>,
pub link: Option<String>,
pub newtab: Option<bool>,
pub description: Option<String>,
pub hide: Option<bool>,
}
/// Metadata for index page
#[derive(Content, Debug, Default, Deserialize, Clone)]
pub struct FileMetadataIndex {
pub name: Option<String>,
pub pronouns: Option<String>,
pub avatar: Option<String>,
pub avatar_caption: Option<String>,
pub avatar_style: Option<String>,
}
/// Metadata for portfolio cards
#[derive(Content, Debug, Default, Deserialize, Clone)]
pub struct FileMetadataPortfolio {
pub title: Option<String>,
pub link: Option<String>,
pub description: Option<String>,
pub language: Option<String>,
}
/// List of available metadata types
#[derive(Hash, PartialEq, Eq, Clone, Copy)]
pub enum MType {
Blog,
Contact,
Generic,
Index,
Portfolio,
}
/// Structure who holds all the metadata the file have
/// Usually all fields are None except one
#[derive(Content, Debug, Default, Deserialize, Clone)]
pub struct MFile {
pub hardbreaks: bool,
pub blog: Option<FileMetadataBlog>,
pub contact: Option<FileMetadataContact>,
pub index: Option<FileMetadataIndex>,
pub portfolio: Option<FileMetadataPortfolio>,
}
#[allow(clippy::struct_excessive_bools)]
/// Global metadata
#[derive(Content, Debug, Clone)]
pub struct Metadata {
pub info: MFile,
pub math: bool,
pub mermaid: bool,
pub syntax_highlight: bool,
pub mail_obfsucated: bool,
}
impl Metadata {
/// Update current metadata boolean fields, keeping true ones
pub fn merge(&mut self, other: &Self) {
self.math = self.math || other.math;
self.mermaid = self.mermaid || other.mermaid;
self.syntax_highlight = self.syntax_highlight || other.syntax_highlight;
}
}
/// Deserialize metadata based on a type
fn deserialize_metadata<T: Default + serde::de::DeserializeOwned>(text: &str) -> T {
serde_yml::from_str(text.trim().trim_matches(&['-'] as &[_])).unwrap_or_default()
}
/// Fetch metadata from AST
pub fn get<'a>(root: &'a AstNode<'a>, mtype: MType) -> MFile {
root.children()
.map(|node| {
let generic = MFile {
hardbreaks: true,
..MFile::default()
};
match &node.data.borrow().value {
// Extract metadata from frontmatter
NodeValue::FrontMatter(text) => match mtype {
MType::Blog => {
let metadata: FileMetadataBlog = deserialize_metadata(text);
MFile {
blog: Some(metadata.clone()),
hardbreaks: metadata.hardbreaks.unwrap_or_default(),
..MFile::default()
}
}
MType::Contact => {
let mut metadata: FileMetadataContact = deserialize_metadata(text);
// Trim descriptions
if let Some(desc) = &mut metadata.description {
desc.clone_from(&desc.trim().into());
}
MFile {
contact: Some(metadata),
..MFile::default()
}
}
MType::Generic => generic,
MType::Index => MFile {
index: Some(deserialize_metadata(text)),
..MFile::default()
},
MType::Portfolio => MFile {
portfolio: Some(deserialize_metadata(text)),
..MFile::default()
},
},
_ => generic,
}
})
.next()
.map_or_else(
|| match mtype {
MType::Blog => MFile {
blog: Some(FileMetadataBlog::default()),
..MFile::default()
},
MType::Contact => MFile {
contact: Some(FileMetadataContact::default()),
..MFile::default()
},
MType::Generic => MFile::default(),
MType::Index => MFile {
index: Some(FileMetadataIndex::default()),
..MFile::default()
},
MType::Portfolio => MFile {
portfolio: Some(FileMetadataPortfolio::default()),
..MFile::default()
},
},
|data| data,
)
}

View file

@ -11,7 +11,10 @@ use reqwest::Client;
use crate::config::FileConfiguration;
use super::markdown::{read_md, File, FileMetadata, Metadata, TypeFileMetadata};
use super::{
markdown::{read_md, File},
metadata::{MFile, MType, Metadata},
};
#[cached]
pub fn get_reqwest_client() -> Client {
@ -53,13 +56,14 @@ impl Responder for Html {
}
/// Read a file
pub fn read_file(filename: &str, expected_file: &TypeFileMetadata) -> Option<File> {
Path::new(filename)
#[cached]
pub fn read_file(filename: String, expected_file: MType) -> Option<File> {
Path::new(&filename.clone())
.extension()
.and_then(|ext| match ext.to_str().unwrap() {
"pdf" => fs::read(filename).map_or(None, |bytes| Some(read_pdf(bytes))),
_ => fs::read_to_string(filename).map_or(None, |text| {
Some(read_md(filename, &text, expected_file, None))
_ => fs::read_to_string(&filename).map_or(None, |text| {
Some(read_md(&filename, &text, expected_file, None))
}),
})
}
@ -69,7 +73,7 @@ fn read_pdf(data: Vec<u8>) -> File {
File {
metadata: Metadata {
info: FileMetadata::default(),
info: MFile::default(),
mermaid: false,
syntax_highlight: false,
math: false,

View file

@ -1,4 +1,6 @@
pub mod date;
pub mod github;
pub mod markdown;
pub mod utils;
pub mod metadata;
pub mod misc;
pub mod routes;

291
src/utils/routes/blog.rs Normal file
View file

@ -0,0 +1,291 @@
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use ::rss::{
extension::atom::{AtomExtension, Link},
Category, Channel, Guid, Image, Item,
};
use cached::proc_macro::once;
use chrono::{DateTime, Datelike, Local, NaiveDateTime, Utc};
use chrono_tz::Europe;
use comrak::{parse_document, Arena};
use ramhorns::Content;
use walkdir::WalkDir;
use crate::{
config::Config,
template::InfosPage,
utils::{
date::Date,
markdown::{get_options, File},
metadata::{get, FileMetadataBlog, MType},
misc::{get_url, make_kw, read_file},
},
};
pub const MIME_TYPE_RSS: &str = "application/rss+xml";
pub const BLOG_DIR: &str = "blog";
pub const POST_DIR: &str = "posts";
#[derive(Content, Debug)]
pub struct Post {
title: String,
pub date: Date,
pub url: String,
desc: Option<String>,
content: Option<String>,
tags: Vec<String>,
}
impl Post {
// Fetch the file content
fn fetch_content(&mut self, data_dir: &str) {
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
let ext = ".md";
if let Some(file) = read_file(format!("{blog_dir}/{}{ext}", self.url), MType::Blog) {
self.content = Some(file.content);
}
}
}
impl Hash for Post {
fn hash<H: Hasher>(&self, state: &mut H) {
if let Some(content) = &self.content {
content.hash(state);
}
}
}
pub fn get_posts(location: &str) -> Vec<Post> {
WalkDir::new(location)
.into_iter()
.filter_map(Result::ok)
.filter(|entry| {
entry.file_type().is_file() && entry.path().extension().is_some_and(|s| s == "md")
})
.filter_map(|f| {
let fname = f.file_name();
let filename = fname.to_string_lossy();
let file_without_ext = filename.split_at(filename.len() - 3).0;
let file_metadata = std::fs::read_to_string(f.path()).map_or_else(
|_| FileMetadataBlog {
title: Some(file_without_ext.into()),
..FileMetadataBlog::default()
},
|text| {
let arena = Arena::new();
let options = get_options();
let root = parse_document(&arena, &text, &options);
let mut metadata = get(root, MType::Blog).blog.unwrap();
// Always have a title
metadata.title = metadata
.title
.map_or_else(|| Some(file_without_ext.into()), Some);
metadata
},
);
if file_metadata.publish == Some(true) && file_metadata.draft != Some(true) {
let url =
f.path().to_string_lossy().strip_prefix(location).unwrap()[1..].to_owned();
Some(Post {
url: url[..url.len() - 3].to_owned(),
title: file_metadata.title.unwrap(),
date: file_metadata.date.unwrap_or({
let m = f.metadata().unwrap();
let date = std::convert::Into::<DateTime<Utc>>::into(
m.modified().unwrap_or_else(|_| m.created().unwrap()),
)
.date_naive();
Date {
day: date.day(),
month: date.month(),
year: date.year(),
}
}),
desc: file_metadata.description,
content: None,
tags: file_metadata
.tags
.unwrap_or_default()
.iter()
.map(|t| t.name.clone())
.collect(),
})
} else {
None
}
})
.collect::<Vec<Post>>()
}
pub fn get_post(
post: &mut Option<File>,
filename: &str,
name: &str,
data_dir: &str,
) -> (InfosPage, String) {
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
let ext = ".md";
*post = read_file(format!("{blog_dir}/{filename}{ext}"), MType::Blog);
let default = (
filename,
&format!("Blog d'{name}"),
Vec::new(),
String::new(),
);
let (title, desc, tags, toc) = match post {
Some(data) => (
match &data.metadata.info.blog.as_ref().unwrap().title {
Some(text) => text,
None => default.0,
},
match &data.metadata.info.blog.as_ref().unwrap().description {
Some(desc) => desc,
None => default.1,
},
match &data.metadata.info.blog.as_ref().unwrap().tags {
Some(tags) => tags.clone(),
None => default.2,
},
match &data.metadata.info.blog.as_ref().unwrap().toc {
// TODO: Generate TOC
Some(true) => String::new(),
_ => default.3,
},
),
None => default,
};
(
InfosPage {
title: Some(format!("Post: {title}")),
desc: Some(desc.clone()),
kw: Some(make_kw(
&["blog", "blogging", "write", "writing"]
.into_iter()
.chain(tags.iter().map(|t| t.name.as_str()))
.collect::<Vec<_>>(),
)),
},
toc,
)
}
#[once(time = 10800)] // 3h
pub fn build_rss(config: Config) -> String {
let mut posts = get_posts(&format!(
"{}/{}/{}",
config.locations.data_dir, BLOG_DIR, POST_DIR
));
// Sort from newest to oldest
posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day));
posts.reverse();
// Only the 20 newest
let max = 20;
if posts.len() > max {
posts.drain(max..);
}
let link_to_site = get_url(config.fc.clone());
let author = if let (Some(mail), Some(name)) = (config.fc.mail, config.fc.fullname.clone()) {
Some(format!("{mail} ({name})"))
} else {
None
};
let title = format!("Blog d'{}", config.fc.name.unwrap_or_default());
let lang = "fr";
let channel = Channel {
title: title.clone(),
link: link_to_site.clone(),
description: "Un fil qui parle d'informatique notamment".into(),
language: Some(lang.into()),
managing_editor: author.clone(),
webmaster: author,
pub_date: Some(Local::now().to_rfc2822()),
categories: ["blog", "blogging", "write", "writing"]
.iter()
.map(|&c| Category {
name: c.into(),
..Category::default()
})
.collect(),
generator: Some("ewp with rss crate".into()),
docs: Some("https://www.rssboard.org/rss-specification".into()),
image: Some(Image {
url: format!("{link_to_site}/icons/favicon-32x32.png"),
title: title.clone(),
link: link_to_site.clone(),
..Image::default()
}),
items: posts
.iter_mut()
.map(|p| {
// Get post data
p.fetch_content(&config.locations.data_dir);
// Build item
Item {
title: Some(p.title.clone()),
link: Some(format!("{}/blog/p/{}", link_to_site, p.url)),
description: p.content.clone(),
categories: p
.tags
.iter()
.map(|c| Category {
name: c.to_owned(),
..Category::default()
})
.collect(),
guid: Some(Guid {
value: format!("urn:hash:{}", {
let mut hasher = DefaultHasher::new();
p.hash(&mut hasher);
hasher.finish()
}),
permalink: false,
}),
pub_date: Some(
NaiveDateTime::parse_from_str(
&format!("{}-{}-{} 13:12:00", p.date.day, p.date.month, p.date.year),
"%d-%m-%Y %H:%M:%S",
)
.unwrap()
.and_local_timezone(Europe::Paris)
.unwrap()
.to_rfc2822(),
),
..Item::default()
}
})
.collect(),
atom_ext: Some(AtomExtension {
links: vec![Link {
href: format!("{link_to_site}/blog/rss"),
rel: "self".into(),
hreflang: Some(lang.into()),
mime_type: Some(MIME_TYPE_RSS.into()),
title: Some(title),
length: None,
}],
}),
..Channel::default()
};
std::str::from_utf8(&channel.write_to(Vec::new()).unwrap())
.unwrap()
.into()
}

View file

@ -0,0 +1,67 @@
use cached::proc_macro::once;
use glob::glob;
use std::fs::read_to_string;
use crate::utils::{markdown::File, metadata::MType, misc::read_file};
/// Contact node
#[derive(Clone, Debug)]
pub struct Link {
pub service: String,
pub scope: Option<String>,
pub url: String,
}
#[once(time = 60)]
pub fn find_links(directory: String) -> Vec<Link> {
// TOML filename
let toml_file = "links.toml";
// Read the TOML file and parse it
let toml_str = read_to_string(format!("{directory}/{toml_file}")).unwrap_or_default();
let mut redirections = vec![];
match toml::de::from_str::<toml::Value>(&toml_str) {
Ok(data) => {
if let Some(section) = data.as_table() {
section.iter().for_each(|(key, value)| {
// Scopes are delimited with `/`
let (service, scope) = match key.split_once('/') {
Some((service, scope)) => (service.to_owned(), Some(scope.to_owned())),
None => (key.to_owned(), None),
};
redirections.push(Link {
service,
scope,
url: value.as_str().unwrap().to_owned(),
});
});
}
}
Err(_) => return vec![],
}
redirections
}
pub fn remove_paragraphs(list: &mut [File]) {
list.iter_mut()
.for_each(|file| file.content = file.content.replace("<p>", "").replace("</p>", ""));
}
pub fn read(path: &str) -> Vec<File> {
glob(path)
.unwrap()
.map(|e| read_file(e.unwrap().to_string_lossy().to_string(), MType::Contact).unwrap())
.filter(|f| {
!f.metadata
.info
.contact
.clone()
.unwrap()
.hide
.unwrap_or_default()
})
.collect::<Vec<File>>()
}

View file

@ -0,0 +1,90 @@
use std::collections::HashMap;
use ramhorns::Content;
use reqwest::Error;
use crate::utils::github::{fetch_pr, ProjectState};
#[derive(Clone, Content, Debug)]
pub struct Project {
name: String,
url: String,
pub pulls_merged: Vec<Pull>,
pub pulls_open: Vec<Pull>,
pub pulls_closed: Vec<Pull>,
}
#[derive(Clone, Content, Debug)]
pub struct Pull {
url: String,
id: u32,
name_repo: String,
title: String,
state: u8,
}
pub async fn fetch() -> Result<Vec<Project>, Error> {
match fetch_pr().await {
Ok(projects) => {
let mut data: Vec<Project> = Vec::new();
// Grouping PRs by projects
let mut map: HashMap<&str, Vec<Pull>> = HashMap::new();
for p in &projects {
let project = Pull {
url: p.contrib_url.clone(),
id: p.id,
name_repo: p.name.clone(),
title: p.title.clone(),
state: p.status as u8,
};
let project_name = p.name.as_str();
if map.contains_key(project_name) {
map.entry(project_name).and_modify(|v| v.push(project));
} else {
data.push(Project {
name: project_name.into(),
url: p.url.clone(),
pulls_merged: Vec::new(),
pulls_closed: Vec::new(),
pulls_open: Vec::new(),
});
map.insert(project_name, vec![project]);
}
}
// Distributes each PR in the right vector
for d in &mut data {
map.get(d.name.as_str()).unwrap().iter().for_each(|p| {
let state = p.state.try_into().unwrap();
match state {
ProjectState::Closed => d.pulls_closed.push(p.to_owned()),
ProjectState::Merged => d.pulls_merged.push(p.to_owned()),
ProjectState::Open => d.pulls_open.push(p.to_owned()),
}
});
let mut name: Vec<char> = d.name.replace('-', " ").chars().collect();
name[0] = name[0].to_uppercase().next().unwrap();
d.name = name.into_iter().collect();
}
// Ascending order by pulls IDs
for d in &mut data {
d.pulls_closed.reverse();
d.pulls_merged.reverse();
d.pulls_open.reverse();
}
// Ascending order by number of pulls
data.sort_by(|a, b| {
b.pulls_merged
.len()
.partial_cmp(&a.pulls_merged.len())
.unwrap()
});
Ok(data)
}
Err(e) => Err(e),
}
}

106
src/utils/routes/cours.rs Normal file
View file

@ -0,0 +1,106 @@
use std::{cmp::Ordering, path::Path};
use cached::proc_macro::once;
use regex::Regex;
use serde::Serialize;
#[derive(Clone, Debug, Serialize, PartialEq, Eq)]
pub struct FileNode {
name: String,
is_dir: bool,
children: Vec<FileNode>,
}
impl Ord for FileNode {
fn cmp(&self, other: &Self) -> Ordering {
match (self.is_dir, other.is_dir) {
// If both are directories or both are files, compare names
(true, true) | (false, false) => self.name.cmp(&other.name),
// If self is directory and other is file, self comes first
(true, false) => Ordering::Less,
// If self is file and other is directory, other comes first
(false, true) => Ordering::Greater,
}
}
}
impl PartialOrd for FileNode {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
#[once(time = 120)]
pub fn get_filetree(
initial_dir: &str,
exclusion_list: &[String],
exclusion_patterns: &[Regex],
) -> FileNode {
gen_filetree(initial_dir, exclusion_list, exclusion_patterns)
}
fn gen_filetree(
dir_path: &str,
exclusion_list: &[String],
exclusion_patterns: &[Regex],
) -> FileNode {
let mut children: Vec<FileNode> = std::fs::read_dir(dir_path)
.unwrap()
.filter_map(Result::ok)
.filter_map(|entry| {
let entry_path = entry.path();
let entry_name = entry_path.file_name()?.to_string_lossy().to_string();
// Exclusion checks
if excluded(&entry_name, exclusion_list, exclusion_patterns) {
return None;
}
if entry_path.is_file() {
Some(FileNode {
name: entry_name,
is_dir: false,
children: vec![],
})
} else {
// Exclude empty directories
let children_of_children = gen_filetree(
entry_path.to_str().unwrap(),
exclusion_list,
exclusion_patterns,
);
if children_of_children.is_dir && children_of_children.children.is_empty() {
None
} else {
Some(children_of_children)
}
}
})
.collect();
children.sort();
FileNode {
name: Path::new(dir_path)
.file_name()
.unwrap()
.to_string_lossy()
.to_string(),
is_dir: true,
children,
}
}
pub fn excluded(element: &str, exclusion_list: &[String], exclusion_patterns: &[Regex]) -> bool {
if exclusion_list
.iter()
.any(|excluded_term| element.contains(excluded_term))
{
return true;
}
if exclusion_patterns.iter().any(|re| re.is_match(element)) {
return true;
}
false
}

4
src/utils/routes/mod.rs Normal file
View file

@ -0,0 +1,4 @@
pub mod blog;
pub mod contact;
pub mod contrib;
pub mod cours;

View file

@ -1,21 +1,13 @@
@import "../markdown.css";
@media (prefers-color-scheme: light) {
:root {
--code-font-color: #333333;
--code-bg-color: #eeeeee;
--quote-border-color: #9852fa;
--quote-bg-color: #d8d6d6;
--separator-color: #cccccc;
--tag-bg-color: #d2e0f0;
}
}
@media (prefers-color-scheme: dark) {
:root {
--code-font-color: #eeeeee;
--code-bg-color: #333333;
--quote-border-color: #bd93f9;
--quote-bg-color: #273341;
--separator-color: #414558;
--tag-bg-color: #242e38;
}
}
@ -24,11 +16,6 @@
--max-width: 750px;
}
/* Page */
html {
scroll-behavior: smooth;
}
body {
max-width: var(--max-width);
margin: auto;
@ -70,49 +57,8 @@ main {
max-width: 100%;
}
/* Anchors */
:is(h1, h2, h3, h4, h5, h6):hover a.anchor::before {
visibility: visible;
}
a.anchor::before {
content: "#";
visibility: hidden;
padding-right: 0.1em;
}
a.anchor {
text-decoration: none;
vertical-align: baseline;
}
/* Links in headers */
:is(h1, h2, h3, h4, h5, h6) a {
font-size: inherit;
}
/* Separators */
hr {
border: 0;
height: 1px;
background: var(--separator-color);
}
/* Quotes */
blockquote {
margin: 1em 0;
padding: 0.1em 10px;
border-left: 6px solid;
border-color: var(--quote-border-color);
background: var(--quote-bg-color);
border-top-right-radius: 5px;
border-bottom-right-radius: 5px;
}
/* Images */
img {
display: block;
margin: auto;
max-width: var(--max-width);
}
@ -122,115 +68,6 @@ code {
font-family: monospace;
}
/* Little snippet of code (not blocks) */
kbd,
code:not(.hljs):not(:has(svg)) {
background: var(--code-bg-color);
border-radius: 3px;
color: var(--code-font-color);
box-shadow: 0 1px 1px black;
font-size: calc(var(--font-size) * 0.8);
padding: 2px 4px;
vertical-align: 1.5px;
}
/* Code blocks */
.hljs {
border-radius: 5px;
}
.hljs::-webkit-scrollbar {
width: 7px;
height: 9px;
background: var(--background);
}
.hljs::-webkit-scrollbar-thumb {
background-color: var(--font-color);
border-radius: 10px;
}
/* Marge for numbers */
.hljs-ln-n {
margin-right: 0.4em;
}
/* Numbers in codeblocks */
.hljs-ln-numbers {
text-align: right;
color: var(--font-color);
}
/* Fix scroll in codeblocks with line numbering */
table.hljs-ln {
overflow: hidden;
}
/* Background for copy code button */
.hljs-copy-button {
background-color: var(--background) !important;
}
/* Light theme for the copy code button */
@media (prefers-color-scheme: light) {
.hljs-copy-button {
background-color: var(--font-color) !important;
filter: invert(100%);
}
}
/* Hide last line in codeblocks if empty */
.hljs-ln
> tbody
> tr:last-child:has(td:last-child > span::-moz-only-whitespace) {
visibility: collapse;
}
/* Temporary fix for layout.css.has-selector.enabled available only on
* Firefox under certain circumstances */
.hljs-ln > tbody > tr:last-child {
visibility: collapse;
}
/* Reference to footnotes */
.footnote-ref a {
text-decoration: underline dotted;
font-size: calc(var(--font-size) * 0.8);
}
/* Footnote */
section.footnotes * {
font-size: calc(var(--font-size) * 0.8);
}
/* When multiple ref */
a.footnote-backref sup {
font-size: calc(var(--font-size) * 0.6);
}
a.footnote-backref sup::before {
content: "(";
}
a.footnote-backref sup::after {
content: ")";
}
/* Footnotes links */
a.footnote-backref {
font-family: "Segoe UI", "Segoe UI Symbol", system-ui;
text-decoration: underline dotted;
}
/* Footnotes block separation from article */
section.footnotes {
margin: 3px;
border-top: 2px dotted var(--separator-color);
}
/* Mermaid diagrams */
pre:has(code.language-mermaid) {
text-align: center;
}
/* Table of content */
nav#toc {
position: fixed;
@ -246,36 +83,3 @@ nav#toc {
visibility: hidden;
}
}
@media print {
/* Better colors for paper */
blockquote {
border-color: black;
background: var(--background);
}
.hljs {
background: var(--background);
}
/* Force line numbering to be on top */
td.hljs-ln-line {
vertical-align: top;
}
/* Break code */
code.hljs {
white-space: break-spaces;
hyphens: none;
}
/* Hide arrows of backref */
a.footnote-backref {
visibility: hidden;
}
/* No underline for footnotes */
.footnote-ref > a {
text-decoration: none;
}
}

View file

@ -1,3 +1,13 @@
@import "markdown.css";
:root {
--max-width: 900px;
}
main {
max-width: var(--max-width);
}
/* Filetree */
aside {
float: left;
@ -42,12 +52,10 @@ aside li.directory {
@media print {
aside {
visibility: hidden;
display: none;
}
}
main img {
max-width: 100%;
display: block;
margin: auto;
}

248
static/css/markdown.css Normal file
View file

@ -0,0 +1,248 @@
@media (prefers-color-scheme: light) {
:root {
--code-font-color: #333333;
--code-bg-color: #eeeeee;
--quote-border-color: #9852fa;
--quote-bg-color: #d8d6d6;
--separator-color: #cccccc;
--tag-bg-color: #d2e0f0;
}
}
@media (prefers-color-scheme: dark) {
:root {
--code-font-color: #eeeeee;
--code-bg-color: #333333;
--quote-border-color: #bd93f9;
--quote-bg-color: #273341;
--separator-color: #414558;
--tag-bg-color: #242e38;
}
}
/* Page */
html {
scroll-behavior: smooth;
}
/* Anchors */
main :is(h1, h2, h3, h4, h5, h6):hover a.anchor::before {
visibility: visible;
}
main a.anchor::before {
content: "#";
visibility: hidden;
padding-right: 0.1em;
}
main a.anchor {
text-decoration: none;
vertical-align: baseline;
}
/* Links in headers */
:is(h1, h2, h3, h4, h5, h6) a {
font-size: inherit;
}
/* Images */
main img {
display: block;
margin: auto;
}
/* Separators */
hr {
border: 0;
height: 1px;
background: var(--separator-color);
}
/* Quotes */
blockquote {
margin: 1em 0;
padding: 0.1em 10px;
border-left: 6px solid;
border-color: var(--quote-border-color);
background: var(--quote-bg-color);
border-top-right-radius: 5px;
border-bottom-right-radius: 5px;
}
/* Little snippet of code (not blocks) */
kbd,
code:not(.hljs):not(:has(svg)) {
background: var(--code-bg-color);
border-radius: 3px;
color: var(--code-font-color);
box-shadow: 0 1px 1px black;
font-size: calc(var(--font-size) * 0.8);
padding: 2px 4px;
vertical-align: 1.5px;
}
/* Code blocks */
.hljs {
border-radius: 5px;
}
.hljs::-webkit-scrollbar {
width: 7px;
height: 9px;
background: var(--background);
}
.hljs::-webkit-scrollbar-thumb {
background-color: var(--font-color);
border-radius: 10px;
}
/* Marge for numbers */
.hljs-ln-n {
margin-right: 0.4em;
}
/* Numbers in codeblocks */
.hljs-ln-numbers {
text-align: right;
color: var(--font-color);
}
/* Fix scroll in codeblocks with line numbering */
table.hljs-ln {
overflow: hidden;
}
/* Background for copy code button */
.hljs-copy-button {
background-color: var(--background) !important;
}
/* Light theme for the copy code button */
@media (prefers-color-scheme: light) {
.hljs-copy-button {
background-color: var(--font-color) !important;
filter: invert(100%);
}
}
/* Hide last line in codeblocks if empty */
.hljs-ln
> tbody
> tr:last-child:has(td:last-child > span::-moz-only-whitespace) {
visibility: collapse;
}
/* Temporary fix for layout.css.has-selector.enabled available only on
* Firefox under certain circumstances */
.hljs-ln > tbody > tr:last-child {
visibility: collapse;
}
/* Reference to footnotes */
.footnote-ref a {
text-decoration: underline dotted;
font-size: calc(var(--font-size) * 0.8);
}
/* Footnote */
section.footnotes * {
font-size: calc(var(--font-size) * 0.8);
}
/* When multiple ref */
a.footnote-backref sup {
font-size: calc(var(--font-size) * 0.6);
}
a.footnote-backref sup::before {
content: "(";
}
a.footnote-backref sup::after {
content: ")";
}
/* Footnotes links */
a.footnote-backref {
font-family: "Segoe UI", "Segoe UI Symbol", system-ui;
text-decoration: underline dotted;
}
/* Footnotes block separation from content */
section.footnotes {
margin: 3px;
border-top: 2px dotted var(--separator-color);
}
/* Mermaid diagrams */
pre:has(code.language-mermaid) {
text-align: center;
}
/* Tables */
table:not(.hljs-ln) {
border-collapse: collapse;
margin-inline: auto;
}
table:not(.hljs-ln) th,
table:not(.hljs-ln) td {
padding: 5px;
border: 1px solid var(--separator-color);
}
table:not(.hljs-ln)th {
border-bottom: 2px solid var(--separator-color);
}
/* No borders on the outer edges of the table */
table:not(.hljs-ln) tr:last-child td {
border-bottom: 0;
}
table:not(.hljs-ln) tr:first-child th {
border-top: 0;
}
table:not(.hljs-ln) tr td:first-child,
table:not(.hljs-ln) tr th:first-child {
border-left: 0;
}
table:not(.hljs-ln) tr td:last-child,
table:not(.hljs-ln) tr th:last-child {
border-right: 0;
}
@media print {
/* Better colors for paper */
blockquote {
border-color: black;
background: var(--background);
}
.hljs {
background: var(--background);
}
/* Force line numbering to be on top */
td.hljs-ln-line {
vertical-align: top;
}
/* Break code */
code.hljs {
white-space: break-spaces;
hyphens: none;
}
/* Hide arrows of backref */
a.footnote-backref {
visibility: hidden;
}
/* No underline for footnotes */
.footnote-ref > a {
text-decoration: none;
}
}

View file

@ -80,11 +80,16 @@ const deepestNodeOpened = (path, options) => {
}
};
const svgDarkTheme = () => {
const Mode = {
Light: 1,
Dark: 2,
};
const svgChangeTheme = (mode) => {
for (const item of document.getElementsByTagName("img")) {
if (!item.src.startsWith("data:image/svg+xml;base64,")) {
// Exclude image who aren't SVG and base64 encoded
break;
continue;
}
/** Convert to grayscale */
@ -129,9 +134,19 @@ const svgDarkTheme = () => {
const totalGrayscale = grayscaleValues.reduce((acc, val) => acc + val, 0);
const averageGrayscale = totalGrayscale / grayscaleValues.length;
if (averageGrayscale < 128) {
const treshold = 128;
if (averageGrayscale < treshold && mode === Mode.Dark) {
item.style = "filter: invert(1);";
continue;
}
if (averageGrayscale > treshold && mode === Mode.Light) {
item.style = "filter: invert(1);";
continue;
}
item.style = "";
}
};
@ -160,8 +175,16 @@ window.addEventListener("load", () => {
uncollapse(last_openeded);
}
// Fix SVG images in dark mode
if (window.matchMedia("(prefers-color-scheme: dark)").matches) {
svgDarkTheme();
}
// Fix SVG images
svgChangeTheme(
window.matchMedia("(prefers-color-scheme: dark)").matches
? Mode.Dark
: Mode.Light
);
});
window
.matchMedia("(prefers-color-scheme: dark)")
.addEventListener("change", (event) =>
svgChangeTheme(event.matches ? Mode.Dark : Mode.Light)
);

View file

@ -18,6 +18,7 @@ window.addEventListener("load", () => {
new Tag("Comment fonctionne un PC 😵‍💫"),
new Tag("undefined", mono),
new Tag("/api/v1/love", mono),
new Tag("/api/v1/websites", mono),
new Tag("Peak D2 sur Valo 🤡"),
new Tag(
"0x520",
@ -47,7 +48,7 @@ window.addEventListener("load", () => {
`
),
new Tag("School hater"),
new Tag("Stagiaire"),
new Tag("Étudiant"),
new Tag("Rempli de malice"),
new Tag(
"#NouveauFrontPopulaire ✊",

View file

@ -2,15 +2,19 @@ window.addEventListener("load", () => {
const macros = {};
for (const item of new Map(
Object.entries({
B: "mathbb{B}",
N: "mathbb{N}",
R: "mathbb{R}",
Z: "mathbb{Z}",
O: "Theta",
Tau: "mathrm{T}",
u: "mu",
ra: "rightarrow",
la: "leftarrow",
RA: "Rightarrow",
LA: "Leftarrow",
u: "mu",
lb: "llbracket",
rb: "rrbracket",
})
)[Symbol.iterator]()) {
const bs = "\\";

View file

@ -16,7 +16,7 @@
</aside>
<main>
{{^content}}
<p>Fichier introuvable</p>
<p>Fichier introuvable ou invalide.</p>
{{/content}} {{#content}}
<article>{{&content}}</article>
</main>