Compare commits

..

1 commit

Author SHA1 Message Date
91dc938fb1
french quotes 2024-09-14 20:32:26 +02:00
52 changed files with 2160 additions and 3239 deletions

View file

@ -1,23 +0,0 @@
name: PR Check
on:
pull_request:
types: [opened, synchronize, reopened]
jobs:
lint-and-format:
container:
image: cimg/rust:1.81-node
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Build
run: cargo build
- name: Run format check
run: cargo fmt --check
- name: Run Clippy
run: cargo clippy

13
.gitignore vendored
View file

@ -5,15 +5,4 @@
docker-compose.yml
/.vscode
# Data
data/index*.md
data/contacts/*
data/cours/*
data/projects/*
# Blog
data/blog/about*.md
data/blog/posts/*
!data/blog/posts/Makefile
/data

1407
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -10,9 +10,9 @@ publish = false
license = "AGPL-3.0-or-later"
[dependencies]
actix-web = { version = "4.9", default-features = false, features = ["macros", "compress-brotli"] }
actix-web = { version = "4.6", default-features = false, features = ["macros", "compress-brotli"] }
actix-files = "0.6"
cached = { version = "0.54", features = ["async", "ahash"] }
cached = { version = "0.53", features = ["async", "ahash"] }
ramhorns = "1.0"
toml = "0.8"
serde = { version = "1.0", features = ["derive"] }
@ -21,23 +21,17 @@ serde_json = "1.0"
minify-html = "0.15"
minify-js = "0.6"
glob = "0.3"
comrak = "0.32"
comrak = "0.26"
reqwest = { version = "0.12", features = ["json"] }
chrono = { version = "0.4.39", default-features = false, features = ["clock"]}
chrono-tz = "0.10"
chrono = { version = "0.4.38", default-features = false, features = ["clock"]}
chrono-tz = "0.9"
rss = { version = "2.0", features = ["atom"] }
lol_html = "2.1"
lol_html = "1.2"
base64 = "0.22"
mime_guess = "2.0"
urlencoding = "2.1"
regex = "1.11"
regex = "1.10"
cyborgtime = "2.1.1"
walkdir = "2.5"
env_logger = "0.11"
[lints.clippy]
pedantic = "warn"
[profile.release]
strip = "symbols"
lto = "thin"

View file

@ -145,8 +145,6 @@ Markdown file
Markdown file is stored in `/app/data/index.md`
> For french clients, `/app/data/index-fr.md` will be read instead.
```
---
name: Option<String>
@ -173,7 +171,6 @@ title: Option<String>
date: Option<Date>
description: Option<String>
publish: Option<bool>
draft: Option<bool>
tags: Option<Vec<Tag>>
---
@ -183,15 +180,12 @@ Post content
- If no `title`, the filename will be used
- `date` format is `day-month-year`
- `publish` is default to false. When false, posts are hidden from index
but accessible.
- `draft` is default to false. When true, posts are hidden and unaccessible.
but accessible, see [#30](https://git.mylloon.fr/Anri/mylloon.fr/issues/30)
### About <!-- omit in toc -->
The file is stored at `/app/data/blog/about.md`.
> For french clients, `/app/data/blog/about-fr.md` will be read instead.
## Projects
Markdown files are stored in `/app/data/projects/apps/`
@ -218,8 +212,6 @@ files in `archive` subdirectory of `apps`.
The file is stored at `/app/data/projects/about.md`.
> For french clients, `/app/data/projects/about-fr.md` will be read instead.
## Contacts
Markdown files are stored in `/app/data/contacts/`
@ -231,7 +223,6 @@ custom: Option<bool>
user: "Option<String>"
link: Option<String>
newtab: Option<bool>
hide: Option<bool>
description: >
Option<String>
---
@ -260,8 +251,6 @@ For example, `socials` contact files are stored in `/app/data/contacts/socials/`
The file is stored at `/app/data/contacts/about.md`.
> For french clients, `/app/data/contacts/about-fr.md` will be read instead.
## Courses
Markdown files are stored in `/app/data/cours/`

View file

@ -3,7 +3,7 @@
Easy WebPage generator
[![dependency status](https://deps.rs/repo/gitea/git.mylloon.fr/Anri/mylloon.fr/status.svg)](https://deps.rs/repo/gitea/git.mylloon.fr/Anri/mylloon.fr)
[![status-badge](https://git.mylloon.fr/Anri/mylloon.fr/badges/workflows/publish.yml/badge.svg)](https://git.mylloon.fr/Anri/mylloon.fr/actions?workflow=publish.yml)
![status-badge](https://git.mylloon.fr/Anri/mylloon.fr/badges/workflows/publish.yml/badge.svg)
- See [issues](https://git.mylloon.fr/Anri/mylloon.fr/issues)
- See [documentation](https://git.mylloon.fr/Anri/mylloon.fr/src/branch/main/Documentation.md)

View file

@ -1,20 +0,0 @@
MKDIR = mkdir -p
TOUCH = touch
PRINT = echo
DATE = $(shell date '+%d-%m-%Y')
DIR = $(shell date '+%Y/%m')
PH := new
new:
$(MKDIR) $(DIR) 2> /dev/null
$(TOUCH) $(DIR)/$(PH).md
$(PRINT) "---" > $(DIR)/$(PH).md
$(PRINT) "publish: false" >> $(DIR)/$(PH).md
$(PRINT) "date: $(DATE)" >> $(DIR)/$(PH).md
$(PRINT) "draft: true" >> $(DIR)/$(PH).md
$(PRINT) "---" >> $(DIR)/$(PH).md
help:
$(PRINT) "make PH=new"

View file

@ -7,7 +7,7 @@ use std::{fs::File, io::Write, path::Path};
use crate::template::Template;
/// Store the configuration of config/config.toml
#[derive(Clone, Debug, Default, Deserialize, Hash, PartialEq, Eq)]
#[derive(Clone, Debug, Default, Deserialize)]
pub struct FileConfiguration {
/// http/https
pub scheme: Option<String>,
@ -75,14 +75,14 @@ impl FileConfiguration {
}
// Paths where files are stored
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
#[derive(Clone, Debug)]
pub struct Locations {
pub static_dir: String,
pub data_dir: String,
}
/// Configuration used internally in the app
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
#[derive(Clone, Debug)]
pub struct Config {
/// Information given in the config file
pub fc: FileConfiguration,

View file

@ -1,6 +1,6 @@
use actix_files::Files;
use actix_web::{
middleware::{Compress, DefaultHeaders, Logger},
middleware::{Compress, DefaultHeaders},
web, App, HttpServer,
};
use std::io::Result;
@ -13,28 +13,25 @@ use crate::routes::{
mod config;
mod template;
mod misc;
mod routes;
mod utils;
#[actix_web::main]
async fn main() -> Result<()> {
let config = config::get_configuration("./config/config.toml");
let addr = (
if cfg!(debug_assertions) {
"127.0.0.1"
} else {
"0.0.0.0"
},
config.fc.port.unwrap(),
);
let addr = ("0.0.0.0", config.fc.port.unwrap());
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
println!(
"Listening to {}://{}:{}",
config.clone().fc.scheme.unwrap(),
addr.0,
addr.1
);
HttpServer::new(move || {
App::new()
.app_data(web::Data::new(config.clone()))
.wrap(Logger::default().log_target(config.fc.app_name.clone().unwrap_or_default()))
.wrap(Compress::default())
.wrap(
DefaultHeaders::new()
@ -46,12 +43,8 @@ async fn main() -> Result<()> {
.add(("Permissions-Policy", "interest-cohort=()")),
)
.service(
web::scope("/api").service(
web::scope("v1")
.service(api_v1::love)
.service(api_v1::btf)
.service(api_v1::websites),
),
web::scope("/api")
.service(web::scope("v1").service(api_v1::love).service(api_v1::btf)),
)
.service(index::page)
.service(agreements::security)

View file

@ -2,7 +2,7 @@ use chrono::{Datelike, NaiveDate};
use ramhorns::Content;
use serde::{Deserialize, Deserializer};
#[derive(Content, Clone, Default, Debug)]
#[derive(Content, Default, Debug)]
pub struct Date {
pub day: u32,
pub month: u32,

View file

@ -1,7 +1,7 @@
use reqwest::{header::ACCEPT, Error};
use serde::Deserialize;
use crate::utils::misc::get_reqwest_client;
use crate::misc::utils::get_reqwest_client;
#[derive(Debug, Deserialize)]
struct GithubResponse {

517
src/misc/markdown.rs Normal file
View file

@ -0,0 +1,517 @@
use crate::misc::date::Date;
use base64::engine::general_purpose;
use base64::Engine;
use comrak::nodes::{AstNode, NodeValue};
use comrak::{format_html, parse_document, Arena, ComrakOptions, ListStyleType, Options};
use lol_html::html_content::ContentType;
use lol_html::{element, rewrite_str, HtmlRewriter, RewriteStrSettings, Settings};
use ramhorns::Content;
use serde::{Deserialize, Deserializer};
use std::fmt::Debug;
use std::fs;
use std::path::Path;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
/// Metadata for blog posts
#[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadataBlog {
pub title: Option<String>,
pub date: Option<Date>,
pub description: Option<String>,
pub publish: Option<bool>,
pub tags: Option<Vec<Tag>>,
pub toc: Option<bool>,
}
/// A tag, related to post blog
#[derive(Content, Debug, Clone)]
pub struct Tag {
pub name: String,
}
impl<'de> Deserialize<'de> for Tag {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
match <&str>::deserialize(deserializer) {
Ok(s) => match serde_yml::from_str(s) {
Ok(tag) => Ok(Self { name: tag }),
Err(e) => Err(serde::de::Error::custom(e)),
},
Err(e) => Err(e),
}
}
}
/// Metadata for contact entry
#[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadataContact {
pub title: String,
pub custom: Option<bool>,
pub user: Option<String>,
pub link: Option<String>,
pub newtab: Option<bool>,
pub description: Option<String>,
}
/// Metadata for index page
#[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadataIndex {
pub name: Option<String>,
pub pronouns: Option<String>,
pub avatar: Option<String>,
pub avatar_caption: Option<String>,
pub avatar_style: Option<String>,
}
/// Metadata for portfolio cards
#[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadataPortfolio {
pub title: Option<String>,
pub link: Option<String>,
pub description: Option<String>,
pub language: Option<String>,
}
/// List of available metadata types
pub enum TypeFileMetadata {
Blog,
Contact,
Generic,
Index,
Portfolio,
}
/// Structure who holds all the metadata the file have
/// Usually all fields are None except one
#[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadata {
pub blog: Option<FileMetadataBlog>,
pub contact: Option<FileMetadataContact>,
pub index: Option<FileMetadataIndex>,
pub portfolio: Option<FileMetadataPortfolio>,
}
#[allow(clippy::struct_excessive_bools)]
/// Global metadata
#[derive(Content, Debug)]
pub struct Metadata {
pub info: FileMetadata,
pub math: bool,
pub mermaid: bool,
pub syntax_highlight: bool,
pub mail_obfsucated: bool,
}
impl Metadata {
/// Update current metadata boolean fields, keeping true ones
fn merge(&mut self, other: &Self) {
self.math = self.math || other.math;
self.mermaid = self.mermaid || other.mermaid;
self.syntax_highlight = self.syntax_highlight || other.syntax_highlight;
}
}
/// File description
#[derive(Content, Debug)]
pub struct File {
pub metadata: Metadata,
pub content: String,
}
/// Options used for parser and compiler MD --> HTML
pub fn get_options<'a>() -> ComrakOptions<'a> {
let mut options = comrak::Options::default();
// Extension
options.extension.strikethrough = true;
options.extension.tagfilter = true;
options.extension.table = true;
options.extension.autolink = true;
options.extension.tasklist = true;
options.extension.superscript = true;
options.extension.header_ids = Some(String::new());
options.extension.footnotes = true;
options.extension.description_lists = true;
options.extension.front_matter_delimiter = Some("---".into());
options.extension.multiline_block_quotes = true;
options.extension.math_dollars = true;
options.extension.math_code = false;
options.extension.wikilinks_title_after_pipe = false;
options.extension.wikilinks_title_before_pipe = false;
options.extension.underline = true;
options.extension.spoiler = false;
options.extension.greentext = false;
// Parser
options.parse.smart = false; // could be boring
options.parse.default_info_string = Some("plaintext".into());
options.parse.relaxed_tasklist_matching = true;
options.parse.relaxed_autolinks = true;
// options.render.broken_link_callback = ...;
// Renderer
options.render.hardbreaks = false; // could be true? change by metadata could be good for compatibility
options.render.github_pre_lang = false;
options.render.full_info_string = true;
options.render.width = 0; // 0 mean disabled?
options.render.unsafe_ = true;
options.render.escape = false;
options.render.list_style = ListStyleType::Dash;
options.render.sourcepos = false;
options.render.experimental_inline_sourcepos = false;
options.render.escaped_char_spans = false;
options.render.ignore_setext = true;
options.render.ignore_empty_links = true;
options.render.gfm_quirks = true;
options.render.prefer_fenced = false;
options
}
/// Resize images if needed
fn custom_img_size(html: &str) -> String {
rewrite_str(
html,
RewriteStrSettings {
element_content_handlers: vec![element!("img[alt]", |el| {
let alt = el.get_attribute("alt").unwrap();
let possible_piece = alt.split('|').collect::<Vec<&str>>();
if possible_piece.len() > 1 {
let data = possible_piece.last().unwrap().trim();
// Remove the dimension data from the alt
let new_alt = possible_piece.first().unwrap().trim_end();
if let Some(dimension) = data.split_once('x') {
// Width and height precised
if dimension.0.parse::<i32>().is_ok() && dimension.1.parse::<i32>().is_ok()
{
el.set_attribute("width", dimension.0).unwrap();
el.set_attribute("height", dimension.1).unwrap();
if new_alt.is_empty() {
el.remove_attribute("alt");
} else {
el.set_attribute("alt", new_alt).unwrap();
}
}
} else {
// Only width precised
if data.parse::<i32>().is_ok() {
el.set_attribute("width", data).unwrap();
if new_alt.is_empty() {
el.remove_attribute("alt");
} else {
el.set_attribute("alt", new_alt).unwrap();
}
}
}
}
Ok(())
})],
..RewriteStrSettings::default()
},
)
.unwrap()
}
/// Fix local images to base64 and integration of markdown files
fn fix_images_and_integration(path: &str, html: &str) -> (String, Metadata) {
let mut metadata = Metadata {
info: FileMetadata::default(),
math: false,
mermaid: false,
syntax_highlight: false,
mail_obfsucated: false,
};
(
rewrite_str(
html,
RewriteStrSettings {
element_content_handlers: vec![element!("img", |el| {
if let Some(src) = el.get_attribute("src") {
let img_src = Path::new(path).parent().unwrap();
let img_path = urlencoding::decode(img_src.join(src).to_str().unwrap())
.unwrap()
.to_string();
if let Ok(file) = fs::read_to_string(&img_path) {
let mime = mime_guess::from_path(&img_path).first_or_octet_stream();
if mime == "text/markdown" {
let mut options = get_options();
options.extension.footnotes = false;
let data = read_md(
&img_path,
&file,
&TypeFileMetadata::Generic,
Some(options),
);
el.replace(&data.content, ContentType::Html);
metadata.merge(&data.metadata);
} else {
let image = general_purpose::STANDARD.encode(file);
el.set_attribute("src", &format!("data:{mime};base64,{image}"))
.unwrap();
}
}
}
Ok(())
})],
..RewriteStrSettings::default()
},
)
.unwrap(),
metadata,
)
}
/// Transform markdown string to File structure
pub fn read_md(
path: &str,
raw_text: &str,
metadata_type: &TypeFileMetadata,
options: Option<Options>,
) -> File {
let arena = Arena::new();
let opt = options.map_or_else(get_options, |specific_opt| specific_opt);
let root = parse_document(&arena, raw_text, &opt);
// Find metadata
let metadata = get_metadata(root, metadata_type);
let mermaid_name = "mermaid";
hljs_replace(root, mermaid_name);
replace_quotes(root);
// Convert to HTML
let mut html = vec![];
format_html(root, &opt, &mut html).unwrap();
let mut html_content = String::from_utf8(html).unwrap();
let children_metadata;
let mail_obfsucated;
(html_content, children_metadata) = fix_images_and_integration(path, &html_content);
html_content = custom_img_size(&html_content);
(html_content, mail_obfsucated) = mail_obfuscation(&html_content);
let mut final_metadata = Metadata {
info: metadata,
mermaid: check_mermaid(root, mermaid_name),
syntax_highlight: check_code(root, &[mermaid_name.into()]),
math: check_math(&html_content),
mail_obfsucated,
};
final_metadata.merge(&children_metadata);
File {
metadata: final_metadata,
content: html_content,
}
}
/// Deserialize metadata based on a type
fn deserialize_metadata<T: Default + serde::de::DeserializeOwned>(text: &str) -> T {
serde_yml::from_str(text.trim().trim_matches(&['-'] as &[_])).unwrap_or_default()
}
/// Fetch metadata from AST
pub fn get_metadata<'a>(root: &'a AstNode<'a>, mtype: &TypeFileMetadata) -> FileMetadata {
root.children()
.find_map(|node| match &node.data.borrow().value {
// Extract metadata from frontmatter
NodeValue::FrontMatter(text) => Some(match mtype {
TypeFileMetadata::Blog => FileMetadata {
blog: Some(deserialize_metadata(text)),
..FileMetadata::default()
},
TypeFileMetadata::Contact => {
let mut metadata: FileMetadataContact = deserialize_metadata(text);
// Trim descriptions
if let Some(desc) = &mut metadata.description {
desc.clone_from(&desc.trim().into());
}
FileMetadata {
contact: Some(metadata),
..FileMetadata::default()
}
}
TypeFileMetadata::Generic => FileMetadata::default(),
TypeFileMetadata::Index => FileMetadata {
index: Some(deserialize_metadata(text)),
..FileMetadata::default()
},
TypeFileMetadata::Portfolio => FileMetadata {
portfolio: Some(deserialize_metadata(text)),
..FileMetadata::default()
},
}),
_ => None,
})
.map_or_else(
|| match mtype {
TypeFileMetadata::Blog => FileMetadata {
blog: Some(FileMetadataBlog::default()),
..FileMetadata::default()
},
TypeFileMetadata::Contact => FileMetadata {
contact: Some(FileMetadataContact::default()),
..FileMetadata::default()
},
TypeFileMetadata::Generic => FileMetadata::default(),
TypeFileMetadata::Index => FileMetadata {
index: Some(FileMetadataIndex::default()),
..FileMetadata::default()
},
TypeFileMetadata::Portfolio => FileMetadata {
portfolio: Some(FileMetadataPortfolio::default()),
..FileMetadata::default()
},
},
|data| data,
)
}
/// Check whether mermaid diagrams are in the AST
fn check_mermaid<'a>(root: &'a AstNode<'a>, mermaid_str: &str) -> bool {
root.children().any(|node| match &node.data.borrow().value {
// Check if code of block define a mermaid diagram
NodeValue::CodeBlock(code_block) => code_block.info == mermaid_str,
_ => false,
})
}
/// Check if code is in the AST
fn check_code<'a>(root: &'a AstNode<'a>, blacklist: &[String]) -> bool {
root.children().any(|node| match &node.data.borrow().value {
// Detect code in paragraph
/* NodeValue::Paragraph => match &node.children().next() {
Some(child) => matches!(child.data.borrow().value, NodeValue::Code(_)),
None => false,
}, */
// Detect blocks of code where the lang isn't in the blacklist
NodeValue::CodeBlock(code_block) => !blacklist.contains(&code_block.info),
_ => false,
})
}
/// Check if html contains maths
fn check_math(html: &str) -> bool {
let math_detected = Arc::new(AtomicBool::new(false));
let _ = HtmlRewriter::new(
Settings {
element_content_handlers: vec![element!("span[data-math-style]", |_| {
math_detected.store(true, Ordering::SeqCst);
Ok(())
})],
..Settings::default()
},
|_: &[u8]| {},
)
.write(html.as_bytes());
math_detected.load(Ordering::SeqCst)
}
/// Change class of languages for hljs detection
fn hljs_replace<'a>(root: &'a AstNode<'a>, mermaid_str: &str) {
root.children().for_each(|node| {
if let NodeValue::CodeBlock(ref mut block) = &mut node.data.borrow_mut().value {
if block.info != mermaid_str {
block.info = format!("hljs-{}", block.info);
}
}
});
}
/// TODO
fn replace_quotes<'a>(node: &'a AstNode<'a>) {
match &mut node.data.borrow_mut().value {
NodeValue::Text(text) => {
*text = text.replacen('"', "«", 1).replacen('"', "»", 1);
}
_ => {
for c in node.children() {
replace_quotes(c);
}
}
}
}
/// Obfuscate email if email found
fn mail_obfuscation(html: &str) -> (String, bool) {
let modified = Arc::new(AtomicBool::new(false));
let data_attr = "title";
// Modify HTML for mails
let new_html = rewrite_str(
html,
RewriteStrSettings {
element_content_handlers: vec![element!("a[href^='mailto:']", |el| {
modified.store(true, Ordering::SeqCst);
// Get mail address
let link = el.get_attribute("href").unwrap();
let (uri, mail) = &link.split_at(7);
let (before, after) = mail.split_once('@').unwrap();
// Preserve old data and add obfuscated mail address
el.prepend(&format!("<span {data_attr}='"), ContentType::Html);
let modified_mail = format!("'></span>{before}<span class='at'>(at)</span>{after}");
el.append(&modified_mail, ContentType::Html);
// Change href
Ok(el.set_attribute("href", &format!("{uri}{before} at {after}"))?)
})],
..RewriteStrSettings::default()
},
)
.unwrap();
let is_modified = modified.load(Ordering::SeqCst);
if is_modified {
// Remove old data email if exists
(
rewrite_str(
&new_html,
RewriteStrSettings {
element_content_handlers: vec![element!(
&format!("a[href^='mailto:'] > span[{data_attr}]"),
|el| {
Ok(el.set_attribute(
data_attr,
// Remove mails
el.get_attribute(data_attr)
.unwrap()
.split_whitespace()
.filter(|word| !word.contains('@'))
.collect::<Vec<&str>>()
.join(" ")
.trim(),
)?)
}
)],
..RewriteStrSettings::default()
},
)
.unwrap(),
is_modified,
)
} else {
(new_html, is_modified)
}
}

View file

@ -1,6 +1,4 @@
pub mod date;
pub mod github;
pub mod markdown;
pub mod metadata;
pub mod misc;
pub mod routes;
pub mod utils;

85
src/misc/utils.rs Normal file
View file

@ -0,0 +1,85 @@
use std::{fs, path::Path};
use actix_web::{
http::header::{self, ContentType, TryIntoHeaderValue},
http::StatusCode,
HttpRequest, HttpResponse, Responder,
};
use base64::{engine::general_purpose, Engine};
use cached::proc_macro::cached;
use reqwest::Client;
use crate::config::FileConfiguration;
use super::markdown::{read_md, File, FileMetadata, Metadata, TypeFileMetadata};
#[cached]
pub fn get_reqwest_client() -> Client {
Client::builder()
.user_agent(format!("EWP/{}", env!("CARGO_PKG_VERSION")))
.build()
.unwrap()
}
/// Get URL of the app
pub fn get_url(fc: FileConfiguration) -> String {
/* let port = match fc.scheme.as_deref() {
Some("https") if fc.port == Some(443) => String::new(),
Some("http") if fc.port == Some(80) => String::new(),
_ => format!(":{}", fc.port.unwrap()),
}; */
format!("{}://{}", fc.scheme.unwrap(), fc.domain.unwrap())
}
/// Make a list of keywords
pub fn make_kw(list: &[&str]) -> String {
list.join(", ")
}
/// Send HTML file
pub struct Html(pub String);
impl Responder for Html {
type Body = String;
fn respond_to(self, _req: &HttpRequest) -> HttpResponse<Self::Body> {
let mut res = HttpResponse::with_body(StatusCode::OK, self.0);
res.headers_mut().insert(
header::CONTENT_TYPE,
ContentType::html().try_into_value().unwrap(),
);
res
}
}
/// Read a file
pub fn read_file(filename: &str, expected_file: &TypeFileMetadata) -> Option<File> {
Path::new(filename)
.extension()
.and_then(|ext| match ext.to_str().unwrap() {
"pdf" => fs::read(filename).map_or(None, |bytes| Some(read_pdf(bytes))),
_ => fs::read_to_string(filename).map_or(None, |text| {
Some(read_md(filename, &text, expected_file, None))
}),
})
}
fn read_pdf(data: Vec<u8>) -> File {
let pdf = general_purpose::STANDARD.encode(data);
File {
metadata: Metadata {
info: FileMetadata::default(),
mermaid: false,
syntax_highlight: false,
math: false,
mail_obfsucated: false,
},
content: format!(
r#"<embed
src="data:application/pdf;base64,{pdf}"
style="width: 100%; height: 79vh";
>"#
),
}
}

View file

@ -1,4 +1,4 @@
use crate::{config::Config, template::InfosPage, utils::misc::get_url};
use crate::{config::Config, misc::utils::get_url, template::InfosPage};
use actix_web::{get, http::header::ContentType, routes, web, HttpResponse, Responder};
use cached::proc_macro::once;
use ramhorns::Content;
@ -29,7 +29,6 @@ fn build_securitytxt(config: Config) -> String {
pref_lang: config.fc.lang.unwrap_or_default(),
},
InfosPage::default(),
None,
)
}
@ -57,7 +56,6 @@ fn build_humanstxt(config: Config) -> String {
name: config.fc.fullname.unwrap_or_default(),
},
InfosPage::default(),
None,
)
}
@ -97,6 +95,5 @@ fn build_webmanifest(config: Config) -> String {
url: get_url(config.fc),
},
InfosPage::default(),
None,
)
}

View file

@ -8,22 +8,13 @@ use serde::Serialize;
/// Response for /love
#[derive(Serialize)]
struct InfoLove {
unix_epoch: u64,
since: String,
unix_epoch: u32,
}
#[get("/love")]
pub async fn love() -> impl Responder {
let target = 1_605_576_600;
let current_time: u64 = Utc::now().timestamp().try_into().unwrap();
HttpResponse::Ok().json(InfoLove {
unix_epoch: target,
since: {
let duration_epoch = current_time - target;
let duration = Duration::from_secs(duration_epoch);
format_duration(duration).to_string()
},
unix_epoch: 1_605_576_600,
})
}
@ -32,42 +23,23 @@ pub async fn love() -> impl Responder {
struct InfoBTF {
unix_epoch: u64,
countdown: String,
since: String,
}
#[get("/backtofrance")]
pub async fn btf() -> impl Responder {
let target = 1_736_618_100;
let start = 1_724_832_000;
let current_time: u64 = Utc::now().timestamp().try_into().unwrap();
if current_time > target {
HttpResponse::Ok().json(InfoBTF {
unix_epoch: target,
countdown: "Already happened".to_owned(),
since: "Not relevant anymore".to_owned(),
})
} else {
HttpResponse::Ok().json(InfoBTF {
unix_epoch: target,
countdown: {
let duration_epoch = target - current_time;
let duration = Duration::from_secs(duration_epoch);
format_duration(duration).to_string()
},
since: {
let duration_epoch = current_time - start;
let duration = Duration::from_secs(duration_epoch);
format_duration(duration).to_string()
},
})
}
}
let info = InfoBTF {
unix_epoch: target,
countdown: if current_time > target {
"Already happened".to_owned()
} else {
let duration_epoch = target - current_time;
let duration = Duration::from_secs(duration_epoch);
format_duration(duration).to_string()
},
};
#[get("/websites")]
pub async fn websites() -> impl Responder {
HttpResponse::Ok().json((
"http://www.bocal.cs.univ-paris8.fr/~akennel/",
"https://anri.up8.site/",
))
HttpResponse::Ok().json(info)
}

View file

@ -1,26 +1,36 @@
use actix_web::{
get, http::header::ContentType, routes, web, HttpRequest, HttpResponse, Responder,
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use cached::proc_macro::cached;
use ::rss::{
extension::atom::{AtomExtension, Link},
Category, Channel, Guid, Image, Item,
};
use actix_web::{get, http::header::ContentType, routes, web, HttpResponse, Responder};
use cached::proc_macro::once;
use chrono::{DateTime, Datelike, Local, NaiveDateTime, Utc};
use chrono_tz::Europe;
use comrak::{parse_document, Arena};
use ramhorns::Content;
use crate::{
config::Config,
template::{InfosPage, NavBar},
utils::{
markdown::{File, FilePath},
metadata::MType,
misc::{lang, make_kw, read_file_fallback, Html, Lang},
routes::blog::{build_rss, get_post, get_posts, Post, BLOG_DIR, MIME_TYPE_RSS, POST_DIR},
misc::{
date::Date,
markdown::{get_metadata, get_options, File, FileMetadataBlog, TypeFileMetadata},
utils::{get_url, make_kw, read_file, Html},
},
template::{InfosPage, NavBar},
};
const MIME_TYPE_RSS: &str = "application/rss+xml";
const BLOG_DIR: &str = "blog";
const POST_DIR: &str = "posts";
#[get("/blog")]
pub async fn index(req: HttpRequest, config: web::Data<Config>) -> impl Responder {
Html(build_index(
config.get_ref().to_owned(),
lang(req.headers()),
))
pub async fn index(config: web::Data<Config>) -> impl Responder {
Html(build_index(config.get_ref().to_owned()))
}
#[derive(Content, Debug)]
@ -31,20 +41,14 @@ struct BlogIndexTemplate {
no_posts: bool,
}
#[cached(time = 60)]
fn build_index(config: Config, lang: Lang) -> String {
#[once(time = 60)]
fn build_index(config: Config) -> String {
let blog_dir = format!("{}/{}", config.locations.data_dir, BLOG_DIR);
let mut posts = get_posts(&format!("{blog_dir}/{POST_DIR}"));
// Get about
let (about, html_lang) = read_file_fallback(
FilePath {
base: blog_dir,
path: "about.md".to_owned(),
},
MType::Generic,
&lang,
);
let about: Option<File> =
read_file(&format!("{blog_dir}/about.md"), &TypeFileMetadata::Generic);
// Sort from newest to oldest
posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day));
@ -69,10 +73,115 @@ fn build_index(config: Config, lang: Lang) -> String {
)),
kw: Some(make_kw(&["blog", "blogging"])),
},
Some(html_lang),
)
}
#[derive(Content, Debug)]
struct Post {
title: String,
date: Date,
url: String,
desc: Option<String>,
content: Option<String>,
tags: Vec<String>,
}
impl Post {
// Fetch the file content
fn fetch_content(&mut self, data_dir: &str) {
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
let ext = ".md";
if let Some(file) = read_file(
&format!("{blog_dir}/{}{ext}", self.url),
&TypeFileMetadata::Blog,
) {
self.content = Some(file.content);
}
}
}
impl Hash for Post {
fn hash<H: Hasher>(&self, state: &mut H) {
if let Some(content) = &self.content {
content.hash(state);
}
}
}
fn get_posts(location: &str) -> Vec<Post> {
let entries = std::fs::read_dir(location).map_or_else(
|_| vec![],
|res| {
res.flatten()
.filter(|f| f.path().extension().map_or(false, |ext| ext == "md"))
.collect::<Vec<std::fs::DirEntry>>()
},
);
entries
.iter()
.filter_map(|f| {
let fname = f.file_name();
let filename = fname.to_string_lossy();
let file_without_ext = filename.split_at(filename.len() - 3).0;
let file_metadata = std::fs::read_to_string(format!("{location}/{filename}"))
.map_or_else(
|_| FileMetadataBlog {
title: Some(file_without_ext.into()),
..FileMetadataBlog::default()
},
|text| {
let arena = Arena::new();
let options = get_options();
let root = parse_document(&arena, &text, &options);
let mut metadata =
get_metadata(root, &TypeFileMetadata::Blog).blog.unwrap();
// Always have a title
metadata.title = metadata
.title
.map_or_else(|| Some(file_without_ext.into()), Some);
metadata
},
);
if file_metadata.publish == Some(true) {
Some(Post {
url: file_without_ext.into(),
title: file_metadata.title.unwrap(),
date: file_metadata.date.unwrap_or({
let m = f.metadata().unwrap();
let date = std::convert::Into::<DateTime<Utc>>::into(
m.modified().unwrap_or_else(|_| m.created().unwrap()),
)
.date_naive();
Date {
day: date.day(),
month: date.month(),
year: date.year(),
}
}),
desc: file_metadata.description,
content: None,
tags: file_metadata
.tags
.unwrap_or_default()
.iter()
.map(|t| t.name.clone())
.collect(),
})
} else {
None
}
})
.collect::<Vec<Post>>()
}
#[derive(Content, Debug)]
struct BlogPostTemplate {
navbar: NavBar,
@ -80,7 +189,7 @@ struct BlogPostTemplate {
toc: String,
}
#[get("/blog/p/{id:.*}")]
#[get("/blog/p/{id}")]
pub async fn page(path: web::Path<(String,)>, config: web::Data<Config>) -> impl Responder {
Html(build_post(
&path.into_inner().0,
@ -108,7 +217,64 @@ fn build_post(file: &str, config: Config) -> String {
toc,
},
infos,
None,
)
}
fn get_post(
post: &mut Option<File>,
filename: &str,
name: &str,
data_dir: &str,
) -> (InfosPage, String) {
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
let ext = ".md";
*post = read_file(
&format!("{blog_dir}/{filename}{ext}"),
&TypeFileMetadata::Blog,
);
let default = (
filename,
&format!("Blog d'{name}"),
Vec::new(),
String::new(),
);
let (title, desc, tags, toc) = match post {
Some(data) => (
match &data.metadata.info.blog.as_ref().unwrap().title {
Some(text) => text,
None => default.0,
},
match &data.metadata.info.blog.as_ref().unwrap().description {
Some(desc) => desc,
None => default.1,
},
match &data.metadata.info.blog.as_ref().unwrap().tags {
Some(tags) => tags.clone(),
None => default.2,
},
match &data.metadata.info.blog.as_ref().unwrap().toc {
// TODO: Generate TOC
Some(true) => String::new(),
_ => default.3,
},
),
None => default,
};
(
InfosPage {
title: Some(format!("Post: {title}")),
desc: Some(desc.clone()),
kw: Some(make_kw(
&["blog", "blogging", "write", "writing"]
.into_iter()
.chain(tags.iter().map(|t| t.name.as_str()))
.collect::<Vec<_>>(),
)),
},
toc,
)
}
@ -120,3 +286,110 @@ pub async fn rss(config: web::Data<Config>) -> impl Responder {
.content_type(ContentType(MIME_TYPE_RSS.parse().unwrap()))
.body(build_rss(config.get_ref().to_owned()))
}
#[once(time = 10800)] // 3h
fn build_rss(config: Config) -> String {
let mut posts = get_posts(&format!(
"{}/{}/{}",
config.locations.data_dir, BLOG_DIR, POST_DIR
));
// Sort from newest to oldest
posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day));
posts.reverse();
// Only the 20 newest
let max = 20;
if posts.len() > max {
posts.drain(max..);
}
let link_to_site = get_url(config.fc.clone());
let author = if let (Some(mail), Some(name)) = (config.fc.mail, config.fc.fullname.clone()) {
Some(format!("{mail} ({name})"))
} else {
None
};
let title = format!("Blog d'{}", config.fc.name.unwrap_or_default());
let lang = "fr";
let channel = Channel {
title: title.clone(),
link: link_to_site.clone(),
description: "Un fil qui parle d'informatique notamment".into(),
language: Some(lang.into()),
managing_editor: author.clone(),
webmaster: author,
pub_date: Some(Local::now().to_rfc2822()),
categories: ["blog", "blogging", "write", "writing"]
.iter()
.map(|&c| Category {
name: c.into(),
..Category::default()
})
.collect(),
generator: Some("ewp with rss crate".into()),
docs: Some("https://www.rssboard.org/rss-specification".into()),
image: Some(Image {
url: format!("{link_to_site}/icons/favicon-32x32.png"),
title: title.clone(),
link: link_to_site.clone(),
..Image::default()
}),
items: posts
.iter_mut()
.map(|p| {
// Get post data
p.fetch_content(&config.locations.data_dir);
// Build item
Item {
title: Some(p.title.clone()),
link: Some(format!("{}/blog/p/{}", link_to_site, p.url)),
description: p.content.clone(),
categories: p
.tags
.iter()
.map(|c| Category {
name: c.to_owned(),
..Category::default()
})
.collect(),
guid: Some(Guid {
value: format!("urn:hash:{}", {
let mut hasher = DefaultHasher::new();
p.hash(&mut hasher);
hasher.finish()
}),
permalink: false,
}),
pub_date: Some(
NaiveDateTime::parse_from_str(
&format!("{}-{}-{} 13:12:00", p.date.day, p.date.month, p.date.year),
"%d-%m-%Y %H:%M:%S",
)
.unwrap()
.and_local_timezone(Europe::Paris)
.unwrap()
.to_rfc2822(),
),
..Item::default()
}
})
.collect(),
atom_ext: Some(AtomExtension {
links: vec![Link {
href: format!("{link_to_site}/blog/rss"),
rel: "self".into(),
hreflang: Some(lang.into()),
mime_type: Some(MIME_TYPE_RSS.into()),
title: Some(title),
length: None,
}],
}),
..Channel::default()
};
std::str::from_utf8(&channel.write_to(Vec::new()).unwrap())
.unwrap()
.into()
}

View file

@ -1,16 +1,16 @@
use actix_web::{get, routes, web, HttpRequest, Responder};
use cached::proc_macro::cached;
use cached::proc_macro::once;
use glob::glob;
use ramhorns::Content;
use std::fs::read_to_string;
use crate::{
config::Config,
template::{InfosPage, NavBar},
utils::{
markdown::{File, FilePath},
metadata::MType,
misc::{lang, make_kw, read_file_fallback, Html, Lang},
routes::contact::{find_links, read, remove_paragraphs},
misc::{
markdown::{File, TypeFileMetadata},
utils::{make_kw, read_file, Html},
},
template::{InfosPage, NavBar},
};
const CONTACT_DIR: &str = "contacts";
@ -28,8 +28,49 @@ pub fn pages(cfg: &mut web::ServiceConfig) {
}
#[get("")]
async fn page(req: HttpRequest, config: web::Data<Config>) -> impl Responder {
Html(build_page(config.get_ref().to_owned(), lang(req.headers())))
async fn page(config: web::Data<Config>) -> impl Responder {
Html(build_page(config.get_ref().to_owned()))
}
/// Contact node
#[derive(Clone, Debug)]
struct ContactLink {
service: String,
scope: Option<String>,
link: String,
}
#[once(time = 60)]
fn find_links(directory: String) -> Vec<ContactLink> {
// TOML filename
let toml_file = "links.toml";
// Read the TOML file and parse it
let toml_str = read_to_string(format!("{directory}/{toml_file}")).unwrap_or_default();
let mut redirections = vec![];
match toml::de::from_str::<toml::Value>(&toml_str) {
Ok(data) => {
if let Some(section) = data.as_table() {
section.iter().for_each(|(key, value)| {
// Scopes are delimited with `/`
let (service, scope) = match key.split_once('/') {
Some((service, scope)) => (service.to_owned(), Some(scope.to_owned())),
None => (key.to_owned(), None),
};
redirections.push(ContactLink {
service,
scope,
link: value.as_str().unwrap().to_owned(),
});
});
}
}
Err(_) => return vec![],
}
redirections
}
#[routes]
@ -51,7 +92,7 @@ async fn service_redirection(config: web::Data<Config>, req: HttpRequest) -> imp
_ => false,
})
// Returns the link
.map(|data| data.url.clone())
.map(|data| data.link.clone())
.collect::<Vec<String>>();
// This shouldn't be more than one link here
@ -78,33 +119,39 @@ struct NetworksTemplate {
others: Vec<File>,
}
#[cached(time = 60)]
fn build_page(config: Config, lang: Lang) -> String {
fn remove_paragraphs(list: &mut [File]) {
list.iter_mut()
.for_each(|file| file.content = file.content.replace("<p>", "").replace("</p>", ""));
}
#[once(time = 60)]
fn build_page(config: Config) -> String {
let contacts_dir = format!("{}/{}", config.locations.data_dir, CONTACT_DIR);
let ext = ".md";
// Get about
let (about, html_lang) = read_file_fallback(
FilePath {
base: contacts_dir.clone(),
path: "about.md".to_owned(),
},
MType::Generic,
&lang,
let about = read_file(
&format!("{contacts_dir}/about.md"),
&TypeFileMetadata::Generic,
);
let mut socials = read(&FilePath {
base: contacts_dir.clone(),
path: format!("socials/*{ext}"),
});
let mut forges = read(&FilePath {
base: contacts_dir.clone(),
path: format!("forges/*{ext}"),
});
let mut others = read(&FilePath {
base: contacts_dir,
path: format!("others/*{ext}"),
});
let socials_dir = "socials";
let mut socials = glob(&format!("{contacts_dir}/{socials_dir}/*{ext}"))
.unwrap()
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Contact).unwrap())
.collect::<Vec<File>>();
let forges_dir = "forges";
let mut forges = glob(&format!("{contacts_dir}/{forges_dir}/*{ext}"))
.unwrap()
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Contact).unwrap())
.collect::<Vec<File>>();
let others_dir = "others";
let mut others = glob(&format!("{contacts_dir}/{others_dir}/*{ext}"))
.unwrap()
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Contact).unwrap())
.collect::<Vec<File>>();
// Remove paragraphs in custom statements
[&mut socials, &mut forges, &mut others]
@ -139,6 +186,5 @@ fn build_page(config: Config, lang: Lang) -> String {
"linktree",
])),
},
Some(html_lang),
)
}

View file

@ -1,10 +1,12 @@
use std::collections::HashMap;
use crate::{
config::Config,
template::{InfosPage, NavBar},
utils::{
misc::{make_kw, Html},
routes::contrib::{fetch, Project},
misc::{
github::{fetch_pr, ProjectState},
utils::{make_kw, Html},
},
template::{InfosPage, NavBar},
};
use actix_web::{get, web, Responder};
use cached::proc_macro::once;
@ -24,6 +26,24 @@ struct PortfolioTemplate {
closed: Option<Vec<Project>>,
}
#[derive(Clone, Content, Debug)]
struct Project {
name: String,
url: String,
pulls_merged: Vec<Pull>,
pulls_open: Vec<Pull>,
pulls_closed: Vec<Pull>,
}
#[derive(Clone, Content, Debug)]
struct Pull {
url: String,
id: u32,
name_repo: String,
title: String,
state: u8,
}
#[once(time = 600)] // 10min
async fn build_page(config: Config) -> String {
let navbar = NavBar {
@ -32,29 +52,88 @@ async fn build_page(config: Config) -> String {
};
// Fetch latest data from github
let data = match fetch().await {
Ok(data) => PortfolioTemplate {
navbar,
error: false,
projects: Some(
data.iter()
.filter(|&p| !p.pulls_merged.is_empty())
.cloned()
.collect(),
),
waiting: Some(
data.iter()
.filter(|&p| !p.pulls_open.is_empty())
.cloned()
.collect(),
),
closed: Some(
data.iter()
.filter(|&p| !p.pulls_closed.is_empty())
.cloned()
.collect(),
),
},
let data = match fetch_pr().await {
Ok(projects) => {
let mut data: Vec<Project> = Vec::new();
// Grouping PRs by projects
let mut map: HashMap<&str, Vec<Pull>> = HashMap::new();
for p in &projects {
let project = Pull {
url: p.contrib_url.clone(),
id: p.id,
name_repo: p.name.clone(),
title: p.title.clone(),
state: p.status as u8,
};
let project_name = p.name.as_str();
if map.contains_key(project_name) {
map.entry(project_name).and_modify(|v| v.push(project));
} else {
data.push(Project {
name: project_name.into(),
url: p.url.clone(),
pulls_merged: Vec::new(),
pulls_closed: Vec::new(),
pulls_open: Vec::new(),
});
map.insert(project_name, vec![project]);
}
}
// Distributes each PR in the right vector
for d in &mut data {
map.get(d.name.as_str()).unwrap().iter().for_each(|p| {
let state = p.state.try_into().unwrap();
match state {
ProjectState::Closed => d.pulls_closed.push(p.to_owned()),
ProjectState::Merged => d.pulls_merged.push(p.to_owned()),
ProjectState::Open => d.pulls_open.push(p.to_owned()),
}
});
let mut name: Vec<char> = d.name.replace('-', " ").chars().collect();
name[0] = name[0].to_uppercase().next().unwrap();
d.name = name.into_iter().collect();
}
// Ascending order by pulls IDs
for d in &mut data {
d.pulls_closed.reverse();
d.pulls_merged.reverse();
d.pulls_open.reverse();
}
// Ascending order by number of pulls
data.sort_by(|a, b| {
b.pulls_merged
.len()
.partial_cmp(&a.pulls_merged.len())
.unwrap()
});
PortfolioTemplate {
navbar,
error: false,
projects: Some(
data.iter()
.filter(|&p| !p.pulls_merged.is_empty())
.cloned()
.collect(),
),
waiting: Some(
data.iter()
.filter(|&p| !p.pulls_open.is_empty())
.cloned()
.collect(),
),
closed: Some(
data.iter()
.filter(|&p| !p.pulls_closed.is_empty())
.cloned()
.collect(),
),
}
}
Err(e) => {
eprintln!("{e}");
@ -74,7 +153,7 @@ async fn build_page(config: Config) -> String {
InfosPage {
title: Some("Mes contributions".into()),
desc: Some(format!(
"Contributions d'{} sur GitHub",
"Contributions d'{} à GitHub",
config.fc.name.unwrap_or_default()
)),
kw: Some(make_kw(&[
@ -87,6 +166,5 @@ async fn build_page(config: Config) -> String {
"code",
])),
},
None,
)
}

View file

@ -1,18 +1,18 @@
use std::path::Path;
use actix_web::{get, web, Responder};
use cached::proc_macro::cached;
use ramhorns::Content;
use regex::Regex;
use serde::Deserialize;
use serde::{Deserialize, Serialize};
use crate::{
config::Config,
template::{InfosPage, NavBar},
utils::{
markdown::{File, FilePath},
metadata::MType,
misc::{make_kw, read_file, Html},
routes::cours::{excluded, get_filetree},
misc::{
markdown::{File, TypeFileMetadata},
utils::{make_kw, read_file, Html},
},
template::{InfosPage, NavBar},
};
#[derive(Debug, Deserialize)]
@ -32,6 +32,13 @@ struct CoursTemplate {
content: Option<File>,
}
#[derive(Clone, Debug, Serialize)]
struct FileNode {
name: String,
is_dir: bool,
children: Vec<FileNode>,
}
#[cached]
fn compile_patterns(exclusion_list: Vec<String>) -> Vec<Regex> {
exclusion_list
@ -40,49 +47,76 @@ fn compile_patterns(exclusion_list: Vec<String>) -> Vec<Regex> {
.collect()
}
fn get_filetree(dir_path: &str, exclusion_patterns: &Vec<Regex>) -> FileNode {
let children = std::fs::read_dir(dir_path)
.unwrap()
.filter_map(Result::ok)
.filter_map(|entry| {
let entry_path = entry.path();
let entry_name = entry_path.file_name()?.to_string_lossy().to_string();
// Exclude element with the exclusion_list
if exclusion_patterns.iter().any(|re| re.is_match(&entry_name)) {
return None;
}
if entry_path.is_file() {
Some(FileNode {
name: entry_name,
is_dir: false,
children: vec![],
})
} else {
// Exclude empty directories
let children_of_children =
get_filetree(entry_path.to_str().unwrap(), exclusion_patterns);
if children_of_children.is_dir && children_of_children.children.is_empty() {
None
} else {
Some(children_of_children)
}
}
})
.collect();
FileNode {
name: Path::new(dir_path)
.file_name()
.unwrap()
.to_string_lossy()
.to_string(),
is_dir: true,
children,
}
}
/// Get a page content
fn get_content(
cours_dir: &str,
path: &web::Query<PathRequest>,
exclusion_list: &[String],
exclusion_patterns: &[Regex],
) -> Option<File> {
let filename = path.q.as_ref().map_or("index.md", |q| q);
// Exclusion checks
if excluded(filename, exclusion_list, exclusion_patterns) {
// We should support regex?
if exclusion_list
.iter()
.any(|excluded_term| filename.contains(excluded_term.as_str()))
{
return None;
}
read_file(
FilePath {
base: cours_dir.to_owned(),
path: filename.to_owned(),
},
MType::Cours,
None,
&format!("{cours_dir}/{filename}"),
&TypeFileMetadata::Generic,
)
}
fn build_page(info: &web::Query<PathRequest>, config: Config) -> String {
let cours_dir = "data/cours";
let (ep, el): (_, Vec<_>) = config
.fc
.exclude_courses
.unwrap()
.into_iter()
.partition(|item| item.starts_with('/'));
let exclusion_list = {
let mut base = vec!["../".to_owned()];
base.extend(el);
base
};
let exclusion_patterns: Vec<Regex> =
compile_patterns(ep.iter().map(|r| r[1..r.len() - 1].to_owned()).collect());
let filetree = get_filetree(cours_dir, &exclusion_list, &exclusion_patterns);
let exclusion_list = config.fc.exclude_courses.unwrap();
let exclusion_patterns = compile_patterns(exclusion_list.clone());
let filetree = get_filetree(cours_dir, &exclusion_patterns);
config.tmpl.render(
"cours.html",
@ -92,7 +126,7 @@ fn build_page(info: &web::Query<PathRequest>, config: Config) -> String {
..NavBar::default()
},
filetree: serde_json::to_string(&filetree).unwrap(),
content: get_content(cours_dir, info, &exclusion_list, &exclusion_patterns),
content: get_content(cours_dir, info, &exclusion_list),
},
InfosPage {
title: Some("Cours".into()),
@ -107,6 +141,5 @@ fn build_page(info: &web::Query<PathRequest>, config: Config) -> String {
"digital garden",
])),
},
None,
)
}

View file

@ -1,20 +1,19 @@
use actix_web::{get, web, HttpRequest, Responder};
use cached::proc_macro::cached;
use actix_web::{get, web, Responder};
use cached::proc_macro::once;
use ramhorns::Content;
use crate::{
config::Config,
template::{InfosPage, NavBar},
utils::{
markdown::{File, FilePath},
metadata::MType,
misc::{lang, make_kw, read_file, read_file_fallback, Html, Lang},
misc::{
markdown::{File, TypeFileMetadata},
utils::{make_kw, read_file, Html},
},
template::{InfosPage, NavBar},
};
#[get("/")]
pub async fn page(req: HttpRequest, config: web::Data<Config>) -> impl Responder {
Html(build_page(config.get_ref().to_owned(), lang(req.headers())))
pub async fn page(config: web::Data<Config>) -> impl Responder {
Html(build_page(config.get_ref().to_owned()))
}
#[derive(Content, Debug)]
@ -34,15 +33,11 @@ struct StyleAvatar {
square: bool,
}
#[cached(time = 60)]
fn build_page(config: Config, lang: Lang) -> String {
let (mut file, html_lang) = read_file_fallback(
FilePath {
base: config.locations.data_dir.clone(),
path: "index.md".to_owned(),
},
MType::Index,
&lang,
#[once(time = 60)]
fn build_page(config: Config) -> String {
let mut file = read_file(
&format!("{}/index.md", config.locations.data_dir),
&TypeFileMetadata::Index,
);
// Default values
@ -72,14 +67,7 @@ fn build_page(config: Config, lang: Lang) -> String {
}
}
} else {
file = read_file(
FilePath {
base: String::new(),
path: "README.md".to_owned(),
},
MType::Generic,
None,
);
file = read_file("README.md", &TypeFileMetadata::Generic);
}
config.tmpl.render(
@ -101,6 +89,5 @@ fn build_page(config: Config, lang: Lang) -> String {
desc: Some("Page principale".into()),
kw: Some(make_kw(&["index", "étudiant", "accueil"])),
},
Some(html_lang),
)
}

View file

@ -4,8 +4,8 @@ use ramhorns::Content;
use crate::{
config::Config,
misc::utils::{get_url, Html},
template::{InfosPage, NavBar},
utils::misc::{get_url, Html},
};
pub async fn page(config: web::Data<Config>) -> impl Responder {
@ -32,6 +32,5 @@ fn build_page(config: Config) -> String {
desc: Some("Une page perdu du web".into()),
..InfosPage::default()
},
None,
)
}

View file

@ -1,21 +1,20 @@
use actix_web::{get, web, HttpRequest, Responder};
use cached::proc_macro::cached;
use actix_web::{get, web, Responder};
use cached::proc_macro::once;
use glob::glob;
use ramhorns::Content;
use crate::{
config::Config,
template::{InfosPage, NavBar},
utils::{
markdown::{File, FilePath},
metadata::MType,
misc::{lang, make_kw, read_file, read_file_fallback, Html, Lang},
misc::{
markdown::{File, TypeFileMetadata},
utils::{make_kw, read_file, Html},
},
template::{InfosPage, NavBar},
};
#[get("/portfolio")]
pub async fn page(req: HttpRequest, config: web::Data<Config>) -> impl Responder {
Html(build_page(config.get_ref().to_owned(), lang(req.headers())))
pub async fn page(config: web::Data<Config>) -> impl Responder {
Html(build_page(config.get_ref().to_owned()))
}
#[derive(Content, Debug)]
@ -29,40 +28,26 @@ struct PortfolioTemplate<'a> {
err_msg: &'a str,
}
#[cached(time = 60)]
fn build_page(config: Config, lang: Lang) -> String {
#[once(time = 60)]
fn build_page(config: Config) -> String {
let projects_dir = format!("{}/projects", config.locations.data_dir);
let apps_dir = FilePath {
base: format!("{projects_dir}/apps"),
path: String::new(),
};
let apps_dir = format!("{projects_dir}/apps");
let ext = ".md";
// Get about
let (about, html_lang) = read_file_fallback(
FilePath {
base: projects_dir,
path: "about.md".to_owned(),
},
MType::Generic,
&lang,
let about = read_file(
&format!("{projects_dir}/about.md"),
&TypeFileMetadata::Generic,
);
// Get apps
let apps = glob(&format!("{apps_dir}/*{ext}"))
.unwrap()
.map(|e| {
read_file(
apps_dir.from(&e.unwrap().to_string_lossy()),
MType::Portfolio,
None,
)
.unwrap()
})
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Portfolio).unwrap())
.collect::<Vec<File>>();
let appdata = if apps.is_empty() {
(None, Some(apps_dir.to_string()))
(None, Some(apps_dir.as_str()))
} else {
(Some(apps), None)
};
@ -70,14 +55,7 @@ fn build_page(config: Config, lang: Lang) -> String {
// Get archived apps
let archived_apps = glob(&format!("{apps_dir}/archive/*{ext}"))
.unwrap()
.map(|e| {
read_file(
apps_dir.from(&e.unwrap().to_string_lossy()),
MType::Portfolio,
None,
)
.unwrap()
})
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Portfolio).unwrap())
.collect::<Vec<File>>();
let archived_appdata = if archived_apps.is_empty() {
@ -95,7 +73,7 @@ fn build_page(config: Config, lang: Lang) -> String {
},
about,
apps: appdata.0,
location_apps: appdata.1.as_deref(),
location_apps: appdata.1,
archived_apps: archived_appdata.0,
archived_apps_exists: archived_appdata.1,
err_msg: "is empty",
@ -115,6 +93,5 @@ fn build_page(config: Config, lang: Lang) -> String {
"code",
])),
},
Some(html_lang),
)
}

View file

@ -3,8 +3,8 @@ use cached::proc_macro::once;
use crate::{
config::Config,
misc::utils::{make_kw, Html},
template::InfosPage,
utils::misc::{make_kw, Html},
};
#[get("/web3")]
@ -22,6 +22,5 @@ fn build_page(config: Config) -> String {
desc: Some("Coin reculé de l'internet".into()),
kw: Some(make_kw(&["web3", "blockchain", "nft", "ai"])),
},
None,
)
}

View file

@ -1,9 +1,7 @@
use ramhorns::{Content, Ramhorns};
use crate::utils::misc::Lang;
/// Structure used in the config variable of the app
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
#[derive(Clone, Debug)]
pub struct Template {
/// Root directory where templates are stored
pub directory: String,
@ -53,20 +51,12 @@ struct DataPage<T> {
page_kw: Option<String>,
/// Author's name
page_author: Option<String>,
/// Language used
lang: String,
/// Data needed to render the page
data: T,
}
impl Template {
pub fn render<C: Content>(
&self,
template: &str,
data: C,
info: InfosPage,
lang: Option<String>,
) -> String {
pub fn render<C: Content>(&self, template: &str, data: C, info: InfosPage) -> String {
let mut templates: Ramhorns = Ramhorns::lazy(&self.directory).unwrap();
let tplt = templates.from_file(template).unwrap();
@ -77,7 +67,6 @@ impl Template {
page_desc: info.desc,
page_kw: info.kw,
page_author: self.name.clone(),
lang: lang.unwrap_or(Lang::default()),
data,
})
}

View file

@ -1,478 +0,0 @@
use base64::engine::general_purpose;
use base64::Engine;
use comrak::nodes::{AstNode, NodeValue};
use comrak::{format_html, parse_document, Arena, ComrakOptions, ListStyleType, Options};
use lol_html::html_content::ContentType;
use lol_html::{element, rewrite_str, HtmlRewriter, RewriteStrSettings, Settings};
use ramhorns::Content;
use std::fmt::Debug;
use std::fs;
use std::path::Path;
use std::sync::Arc;
use crate::utils::metadata::MType;
use crate::utils::misc::remove_first_letter;
use super::metadata::{get, MFile, Metadata};
/// File description
#[derive(Content, Debug, Clone)]
pub struct File {
pub metadata: Metadata,
pub content: String,
}
#[derive(Hash, PartialEq, Eq, Clone)]
pub struct FilePath {
pub base: String,
pub path: String,
}
impl std::fmt::Display for FilePath {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
if self.path.is_empty() {
return write!(f, "{}", self.base);
}
if self.base.is_empty() {
return write!(f, "{}", self.path);
}
write!(f, "{}/{}", self.base, self.path)
}
}
impl FilePath {
pub fn from(&self, fullpath: &str) -> Self {
Self {
base: self.base.clone(),
path: fullpath.to_owned().split_off(self.base.len()),
}
}
}
/// Options used for parser and compiler MD --> HTML
pub fn get_options(path: Option<FilePath>, metadata_type: MType) -> ComrakOptions<'static> {
comrak::Options {
extension: comrak::ExtensionOptions::builder()
.strikethrough(true)
.tagfilter(true)
.table(true)
.autolink(true)
.tasklist(true)
.superscript(true)
.header_ids(path_to_hid(path.as_ref()))
.footnotes(true)
.description_lists(true)
.front_matter_delimiter("---".into())
.multiline_block_quotes(true)
.math_dollars(true)
.underline(true)
.maybe_link_url_rewriter(match metadata_type {
MType::Cours => Some(Arc::new(move |url: &str| {
// Exclude external links
if [
"://",
"mailto:",
"magnet:",
"ftp:",
"tel:",
"irc:",
"geo:",
"ftps:",
"im:",
"ircs:",
"bitcoin:",
"matrix:",
"mms:",
"news:",
"nntp:",
"openpgp4fpr:",
"sftp:",
"sip:",
"sms:",
"smsto:",
"ssh:",
"urn:",
"webcal:",
"wtai:",
"xmpp:",
]
.iter()
.any(|&item| url.contains(item))
{
return String::from(url);
}
let file = if url.starts_with("./") {
url.to_owned().split_off(2)
} else {
url.to_owned()
};
match &path {
Some(p) => {
let mut parts = p.path.split('/').collect::<Vec<_>>();
parts.pop();
parts.push(&file);
format!("/cours?q={}", parts.join("/"))
}
None => format!("/cours?q={file}"),
}
})),
_ => None,
})
.build(),
parse: comrak::ParseOptions::builder()
.smart(true)
.default_info_string("plaintext".into())
.relaxed_tasklist_matching(true)
.relaxed_autolinks(true)
.build(),
render: comrak::RenderOptions::builder()
.full_info_string(true)
.unsafe_(true)
.list_style(ListStyleType::Dash)
.ignore_setext(true)
.ignore_empty_links(true)
.gfm_quirks(true)
.build(),
}
}
/// Transform the path to something usable for header IDs
fn path_to_hid(path: Option<&FilePath>) -> String {
match path {
Some(fp) => {
format!(
"{}-",
fp.path
.get(..fp.path.len() - 3)
.unwrap_or_default()
.replace([' ', '/'], "-")
.to_lowercase()
)
}
None => String::new(),
}
}
/// Resize images if needed
fn custom_img_size(html: &str) -> String {
rewrite_str(
html,
RewriteStrSettings {
element_content_handlers: vec![element!("img[alt]", |el| {
let alt = el.get_attribute("alt").unwrap();
let possible_piece = alt.split('|').collect::<Vec<&str>>();
if possible_piece.len() > 1 {
let data = possible_piece.last().unwrap().trim();
// Remove the dimension data from the alt
let new_alt = possible_piece.first().unwrap().trim_end();
if let Some(dimension) = data.split_once('x') {
// Width and height precised
if dimension.0.parse::<i32>().is_ok() && dimension.1.parse::<i32>().is_ok()
{
el.set_attribute("width", dimension.0).unwrap();
el.set_attribute("height", dimension.1).unwrap();
if new_alt.is_empty() {
el.remove_attribute("alt");
} else {
el.set_attribute("alt", new_alt).unwrap();
}
}
} else {
// Only width precised
if data.parse::<i32>().is_ok() {
el.set_attribute("width", data).unwrap();
if new_alt.is_empty() {
el.remove_attribute("alt");
} else {
el.set_attribute("alt", new_alt).unwrap();
}
}
}
}
Ok(())
})],
..RewriteStrSettings::default()
},
)
.unwrap()
}
fn fix_headers_ids(html: &str, path: Option<&FilePath>) -> String {
rewrite_str(
html,
RewriteStrSettings {
element_content_handlers: vec![element!(
"a:not([data-footnote-ref]):not(.footnote-backref)[href^='#']",
|el| {
let id = el.get_attribute("id").unwrap_or(
path_to_hid(path) + remove_first_letter(&el.get_attribute("href").unwrap()),
);
el.set_attribute("href", &format!("#{id}")).unwrap();
Ok(())
}
)],
..RewriteStrSettings::default()
},
)
.unwrap()
}
/// Fix local images to base64 and integration of markdown files
fn fix_images_and_integration(
path: &FilePath,
html: &str,
metadata_type: MType,
recursive: bool,
) -> (String, Metadata) {
let mut metadata = Metadata {
info: MFile::default(),
math: false,
mermaid: false,
syntax_highlight: false,
mail_obfsucated: false,
};
// Collection of any additional metadata
let mut additional_metadata = Vec::new();
let result = rewrite_str(
html,
RewriteStrSettings {
element_content_handlers: vec![element!("img", |el| {
if let Some(src) = el.get_attribute("src") {
let path_str = path.to_string();
let img_src = Path::new(&path_str).parent().unwrap();
let img_path = urlencoding::decode(img_src.join(src).to_str().unwrap())
.unwrap()
.to_string();
if let Ok(file_contents) = fs::read(&img_path) {
let mime = mime_guess::from_path(&img_path).first_or_octet_stream();
if recursive && mime == "text/markdown" {
let file_str = String::from_utf8_lossy(&file_contents).into_owned();
// Find a ok-ish path
let new_path = img_path.split('/');
let mut options = get_options(
Some(FilePath {
base: new_path
.clone()
.take(new_path.clone().count() - 1)
.collect::<Vec<_>>()
.join("/"),
path: new_path.last().unwrap_or("").to_string(),
}),
metadata_type,
);
options.extension.footnotes = false;
let data = read_md(
&path.from(&img_path),
&file_str,
metadata_type,
Some(options),
false,
);
el.replace(&data.content, ContentType::Html);
// Store the metadata for later merging
additional_metadata.push(data.metadata);
} else {
let image = general_purpose::STANDARD.encode(&file_contents);
el.set_attribute("src", &format!("data:{mime};base64,{image}"))
.unwrap();
}
}
}
Ok(())
})],
..RewriteStrSettings::default()
},
)
.unwrap();
// Merge all collected metadata
for additional in additional_metadata {
metadata.merge(&additional);
}
(result, metadata)
}
/// Transform markdown string to File structure
pub fn read_md(
path: &FilePath,
raw_text: &str,
metadata_type: MType,
options: Option<Options>,
recursive: bool,
) -> File {
let arena = Arena::new();
let mut opt = options.map_or_else(
|| get_options(Some(path.clone()), metadata_type),
|specific_opt| specific_opt,
);
let root = parse_document(&arena, raw_text, &opt);
// Find metadata
let metadata = get(root, metadata_type);
// Update comrak render properties
opt.render.hardbreaks = metadata.hardbreaks;
let mermaid_name = "mermaid";
hljs_replace(root, mermaid_name);
// Convert to HTML
let mut html = vec![];
format_html(root, &opt, &mut html).unwrap();
let mut html_content = String::from_utf8(html).unwrap();
let children_metadata;
let mail_obfsucated;
(html_content, children_metadata) =
fix_images_and_integration(path, &html_content, metadata_type, recursive);
html_content = custom_img_size(&html_content);
html_content = fix_headers_ids(&html_content, Some(path));
(html_content, mail_obfsucated) = mail_obfuscation(&html_content);
let mut final_metadata = Metadata {
info: metadata,
mermaid: check_mermaid(root, mermaid_name),
syntax_highlight: check_code(root, &[mermaid_name.into()]),
math: check_math(&html_content),
mail_obfsucated,
};
final_metadata.merge(&children_metadata);
File {
metadata: final_metadata,
content: html_content,
}
}
/// Check whether mermaid diagrams are in the AST
fn check_mermaid<'a>(root: &'a AstNode<'a>, mermaid_str: &str) -> bool {
root.descendants()
.any(|node| match &node.data.borrow().value {
// Check if code of block define a mermaid diagram
NodeValue::CodeBlock(code_block) => code_block.info == mermaid_str,
_ => false,
})
}
/// Check if code is in the AST
fn check_code<'a>(root: &'a AstNode<'a>, blacklist: &[String]) -> bool {
root.descendants()
.any(|node| match &node.data.borrow().value {
// Detect blocks of code where the lang isn't in the blacklist
NodeValue::CodeBlock(code_block) => !blacklist.contains(&code_block.info),
_ => false,
})
}
/// Check if html contains maths
fn check_math(html: &str) -> bool {
let mut math_detected = false;
let _ = HtmlRewriter::new(
Settings {
element_content_handlers: vec![element!("span[data-math-style]", |_| {
math_detected = true;
Ok(())
})],
..Settings::default()
},
|_: &[u8]| {},
)
.write(html.as_bytes());
math_detected
}
/// Change class of languages for hljs detection
fn hljs_replace<'a>(root: &'a AstNode<'a>, mermaid_str: &str) {
root.descendants().for_each(|node| {
if let NodeValue::CodeBlock(ref mut block) = &mut node.data.borrow_mut().value {
if block.info != mermaid_str {
block.info = format!("hljs-{}", block.info);
}
}
});
}
/// Obfuscate email if email found
fn mail_obfuscation(html: &str) -> (String, bool) {
let mut modified = false;
let data_attr = "title";
// Modify HTML for mails
let new_html = rewrite_str(
html,
RewriteStrSettings {
element_content_handlers: vec![element!("a[href^='mailto:']", |el| {
modified = true;
// Get mail address
let link = el.get_attribute("href").unwrap();
let (uri, mail) = &link.split_at(7);
let (before, after) = mail.split_once('@').unwrap();
// Preserve old data and add obfuscated mail address
el.prepend(&format!("<span {data_attr}='"), ContentType::Html);
let modified_mail = format!("'></span>{before}<span class='at'>(at)</span>{after}");
el.append(&modified_mail, ContentType::Html);
// Change href
Ok(el.set_attribute("href", &format!("{uri}{before} at {after}"))?)
})],
..RewriteStrSettings::default()
},
)
.unwrap();
if modified {
// Remove old data email if exists
(
rewrite_str(
&new_html,
RewriteStrSettings {
element_content_handlers: vec![element!(
&format!("a[href^='mailto:'] > span[{data_attr}]"),
|el| {
Ok(el.set_attribute(
data_attr,
// Remove mails
el.get_attribute(data_attr)
.unwrap()
.split_whitespace()
.filter(|word| !word.contains('@'))
.collect::<Vec<&str>>()
.join(" ")
.trim(),
)?)
}
)],
..RewriteStrSettings::default()
},
)
.unwrap(),
modified,
)
} else {
(new_html, modified)
}
}

View file

@ -1,164 +0,0 @@
use crate::utils::date::Date;
use comrak::nodes::{AstNode, NodeValue};
use ramhorns::Content;
use serde::{Deserialize, Deserializer};
use std::fmt::Debug;
/// Metadata for blog posts
#[derive(Content, Clone, Debug, Default, Deserialize)]
pub struct FileMetadataBlog {
pub hardbreaks: Option<bool>,
pub title: Option<String>,
pub date: Option<Date>,
pub description: Option<String>,
pub publish: Option<bool>,
pub draft: Option<bool>,
pub tags: Option<Vec<Tag>>,
pub toc: Option<bool>,
}
/// A tag, related to post blog
#[derive(Content, Debug, Clone)]
pub struct Tag {
pub name: String,
}
impl<'a> Deserialize<'a> for Tag {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'a>,
{
match <&str>::deserialize(deserializer) {
Ok(s) => match serde_yml::from_str(s) {
Ok(tag) => Ok(Self { name: tag }),
Err(e) => Err(serde::de::Error::custom(e)),
},
Err(e) => Err(e),
}
}
}
/// Metadata for contact entry
#[derive(Content, Debug, Default, Deserialize, Clone)]
pub struct FileMetadataContact {
pub title: String,
pub custom: Option<bool>,
pub user: Option<String>,
pub link: Option<String>,
pub newtab: Option<bool>,
pub description: Option<String>,
pub hide: Option<bool>,
}
/// Metadata for index page
#[derive(Content, Debug, Default, Deserialize, Clone)]
pub struct FileMetadataIndex {
pub name: Option<String>,
pub pronouns: Option<String>,
pub avatar: Option<String>,
pub avatar_caption: Option<String>,
pub avatar_style: Option<String>,
}
/// Metadata for portfolio cards
#[derive(Content, Debug, Default, Deserialize, Clone)]
pub struct FileMetadataPortfolio {
pub title: Option<String>,
pub link: Option<String>,
pub description: Option<String>,
pub language: Option<String>,
}
/// List of available metadata types
#[derive(Hash, PartialEq, Eq, Clone, Copy)]
pub enum MType {
Blog,
Contact,
Cours,
Generic,
Index,
Portfolio,
}
/// Structure who holds all the metadata the file have
/// Usually all fields are None except one
#[derive(Content, Debug, Default, Deserialize, Clone)]
pub struct MFile {
pub hardbreaks: bool,
pub blog: Option<FileMetadataBlog>,
pub contact: Option<FileMetadataContact>,
pub index: Option<FileMetadataIndex>,
pub portfolio: Option<FileMetadataPortfolio>,
}
#[allow(clippy::struct_excessive_bools)]
/// Global metadata
#[derive(Content, Debug, Clone, Default)]
pub struct Metadata {
pub info: MFile,
pub math: bool,
pub mermaid: bool,
pub syntax_highlight: bool,
pub mail_obfsucated: bool,
}
impl Metadata {
/// Update current metadata boolean fields, keeping true ones
pub fn merge(&mut self, other: &Self) {
self.math = self.math || other.math;
self.mermaid = self.mermaid || other.mermaid;
self.syntax_highlight = self.syntax_highlight || other.syntax_highlight;
}
}
/// Deserialize metadata based on a type
fn deserialize_metadata<T: Default + serde::de::DeserializeOwned>(text: &str) -> T {
serde_yml::from_str(text.trim().trim_matches(&['-'] as &[_])).unwrap_or_default()
}
/// Fetch metadata from AST
pub fn get<'a>(root: &'a AstNode<'a>, mtype: MType) -> MFile {
root.children()
.map(|node| match &node.data.borrow().value {
// Extract metadata from frontmatter
NodeValue::FrontMatter(text) => match mtype {
MType::Blog => {
let metadata = deserialize_metadata::<FileMetadataBlog>(text);
MFile {
blog: Some(metadata.clone()),
hardbreaks: metadata.hardbreaks.unwrap_or_default(),
..MFile::default()
}
}
MType::Contact => {
let mut metadata = deserialize_metadata::<FileMetadataContact>(text);
// Trim descriptions
if let Some(desc) = &mut metadata.description {
desc.clone_from(&desc.trim().into());
}
MFile {
contact: Some(metadata),
..MFile::default()
}
}
MType::Generic | MType::Cours => MFile {
hardbreaks: deserialize_metadata(text),
..MFile::default()
},
MType::Index => MFile {
index: Some(deserialize_metadata(text)),
..MFile::default()
},
MType::Portfolio => MFile {
portfolio: Some(deserialize_metadata(text)),
..MFile::default()
},
},
_ => MFile {
hardbreaks: true,
..MFile::default()
},
})
.next()
.unwrap_or_default()
}

View file

@ -1,179 +0,0 @@
use std::{fs, os::unix::fs::MetadataExt, path::Path};
use actix_web::{
http::{
header::{self, ContentType, HeaderMap, TryIntoHeaderValue},
StatusCode,
},
HttpRequest, HttpResponse, Responder,
};
use base64::{engine::general_purpose, Engine};
use cached::proc_macro::cached;
use mime_guess::mime;
use reqwest::Client;
use crate::config::FileConfiguration;
use super::{
markdown::{read_md, File, FilePath},
metadata::{MType, Metadata},
};
#[cached]
pub fn get_reqwest_client() -> Client {
Client::builder()
.user_agent(format!("EWP/{}", env!("CARGO_PKG_VERSION")))
.build()
.unwrap()
}
/// Get URL of the app
pub fn get_url(fc: FileConfiguration) -> String {
/* let port = match fc.scheme.as_deref() {
Some("https") if fc.port == Some(443) => String::new(),
Some("http") if fc.port == Some(80) => String::new(),
_ => format!(":{}", fc.port.unwrap()),
}; */
format!("{}://{}", fc.scheme.unwrap(), fc.domain.unwrap())
}
/// Make a list of keywords
pub fn make_kw(list: &[&str]) -> String {
list.join(", ")
}
/// Send HTML file
pub struct Html(pub String);
impl Responder for Html {
type Body = String;
fn respond_to(self, _req: &HttpRequest) -> HttpResponse<Self::Body> {
let mut res = HttpResponse::with_body(StatusCode::OK, self.0);
res.headers_mut().insert(
header::CONTENT_TYPE,
ContentType::html().try_into_value().unwrap(),
);
res
}
}
/// Read a file localized, fallback to default file if localized file isn't found
pub fn read_file_fallback(
filename: FilePath,
expected_file: MType,
lang: &Lang,
) -> (Option<File>, String) {
match read_file(filename.clone(), expected_file, Some(lang.clone())) {
None => (
read_file(filename, expected_file, None),
Lang::English.to_string(),
),
data => (data, lang.to_string()),
}
}
/// Read a file
pub fn read_file(filename: FilePath, expected_file: MType, lang: Option<Lang>) -> Option<File> {
reader(filename, expected_file, lang.unwrap_or(Lang::English))
}
#[cached(time = 600)]
fn reader(filename: FilePath, expected_file: MType, lang: Lang) -> Option<File> {
let as_str = match lang {
Lang::French => {
let str = filename.to_string();
let mut parts = str.split('.').collect::<Vec<_>>();
let extension = parts.pop().unwrap_or("");
let filename = parts.join(".");
&format!("{filename}-fr.{extension}")
}
Lang::English => &filename.to_string(),
};
let path = Path::new(as_str);
if let Ok(metadata) = path.metadata() {
// Taille maximale : 30M
if metadata.size() > 30 * 1000 * 1000 {
return None;
}
}
path.extension().and_then(|ext| {
match mime_guess::from_ext(ext.to_str().unwrap_or_default()).first_or_text_plain() {
mime if mime == mime::APPLICATION_PDF => {
fs::read(as_str).map_or(None, |bytes| Some(read_pdf(bytes)))
}
mime if mime.type_() == mime::IMAGE => {
fs::read(as_str).map_or(None, |bytes| Some(read_img(bytes, &mime)))
}
_ => fs::read_to_string(as_str).map_or(None, |text| {
Some(read_md(&filename, &text, expected_file, None, true))
}),
}
})
}
fn read_pdf(data: Vec<u8>) -> File {
let pdf = general_purpose::STANDARD.encode(data);
File {
metadata: Metadata::default(),
content: format!(
r#"<embed
src="data:{};base64,{pdf}"
style="width: 100%; height: 79vh";
>"#,
mime::APPLICATION_PDF
),
}
}
fn read_img(data: Vec<u8>, mime: &mime::Mime) -> File {
let image = general_purpose::STANDARD.encode(data);
File {
metadata: Metadata::default(),
content: format!("<img src='data:{mime};base64,{image}'>"),
}
}
/// Remove the first character of a string
pub fn remove_first_letter(s: &str) -> &str {
s.chars().next().map(|c| &s[c.len_utf8()..]).unwrap()
}
#[derive(Hash, PartialEq, Eq, Clone)]
pub enum Lang {
French,
English,
}
impl Lang {
pub fn default() -> String {
Lang::French.to_string()
}
}
impl std::fmt::Display for Lang {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Lang::French => write!(f, "fr"),
Lang::English => write!(f, "en"),
}
}
}
/// Get the browser language
pub fn lang(headers: &HeaderMap) -> Lang {
headers
.get("Accept-Language")
.and_then(|lang| lang.to_str().ok())
.and_then(|lang| {
["fr", "fr-FR"]
.into_iter()
.any(|i| lang.contains(i))
.then_some(Lang::French)
})
.unwrap_or(Lang::English)
}

View file

@ -1,305 +0,0 @@
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use ::rss::{
extension::atom::{AtomExtension, Link},
Category, Channel, Guid, Image, Item,
};
use cached::proc_macro::once;
use chrono::{DateTime, Datelike, Local, NaiveDateTime, Utc};
use chrono_tz::Europe;
use comrak::{parse_document, Arena};
use ramhorns::Content;
use walkdir::WalkDir;
use crate::{
config::Config,
template::InfosPage,
utils::{
date::Date,
markdown::{get_options, File, FilePath},
metadata::{get, FileMetadataBlog, MType},
misc::{get_url, make_kw, read_file},
},
};
pub const MIME_TYPE_RSS: &str = "application/rss+xml";
pub const BLOG_DIR: &str = "blog";
pub const POST_DIR: &str = "posts";
#[derive(Content, Debug)]
pub struct Post {
title: String,
pub date: Date,
pub url: String,
desc: Option<String>,
content: Option<String>,
tags: Vec<String>,
}
impl Post {
// Fetch the file content
fn fetch_content(&mut self, data_dir: &str) {
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
let ext = ".md";
if let Some(file) = read_file(
FilePath {
base: blog_dir,
path: format!("{}{ext}", self.url),
},
MType::Blog,
None,
) {
self.content = Some(file.content);
}
}
}
impl Hash for Post {
fn hash<H: Hasher>(&self, state: &mut H) {
if let Some(content) = &self.content {
content.hash(state);
}
}
}
pub fn get_posts(location: &str) -> Vec<Post> {
WalkDir::new(location)
.into_iter()
.filter_map(Result::ok)
.filter(|entry| {
entry.file_type().is_file() && entry.path().extension().is_some_and(|s| s == "md")
})
.filter_map(|f| {
let fname = f.file_name();
let filename = fname.to_string_lossy();
let file_without_ext = filename.split_at(filename.len() - 3).0;
let file_metadata = std::fs::read_to_string(f.path()).map_or_else(
|_| FileMetadataBlog {
title: Some(file_without_ext.into()),
..FileMetadataBlog::default()
},
|text| {
let arena = Arena::new();
let options = get_options(None, MType::Generic);
let root = parse_document(&arena, &text, &options);
let mut metadata = get(root, MType::Blog).blog.unwrap();
// Always have a title
metadata.title = metadata
.title
.map_or_else(|| Some(file_without_ext.into()), Some);
metadata
},
);
if file_metadata.publish == Some(true) && file_metadata.draft != Some(true) {
let url =
f.path().to_string_lossy().strip_prefix(location).unwrap()[1..].to_owned();
Some(Post {
url: url[..url.len() - 3].to_owned(),
title: file_metadata.title.unwrap(),
date: file_metadata.date.unwrap_or({
let m = f.metadata().unwrap();
let date = std::convert::Into::<DateTime<Utc>>::into(
m.modified().unwrap_or_else(|_| m.created().unwrap()),
)
.date_naive();
Date {
day: date.day(),
month: date.month(),
year: date.year(),
}
}),
desc: file_metadata.description,
content: None,
tags: file_metadata
.tags
.unwrap_or_default()
.iter()
.map(|t| t.name.clone())
.collect(),
})
} else {
None
}
})
.collect::<Vec<Post>>()
}
pub fn get_post(
post: &mut Option<File>,
filename: &str,
name: &str,
data_dir: &str,
) -> (InfosPage, String) {
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
let ext = ".md";
*post = read_file(
FilePath {
base: blog_dir,
path: format!("{filename}{ext}"),
},
MType::Blog,
None,
);
let default = (
filename,
&format!("Blog d'{name}"),
Vec::new(),
String::new(),
);
let (title, desc, tags, toc) = match post {
Some(data) => (
match &data.metadata.info.blog.as_ref().unwrap().title {
Some(text) => text,
None => default.0,
},
match &data.metadata.info.blog.as_ref().unwrap().description {
Some(desc) => desc,
None => default.1,
},
match &data.metadata.info.blog.as_ref().unwrap().tags {
Some(tags) => tags.clone(),
None => default.2,
},
match &data.metadata.info.blog.as_ref().unwrap().toc {
// TODO: Generate TOC
Some(true) => String::new(),
_ => default.3,
},
),
None => default,
};
(
InfosPage {
title: Some(format!("Post: {title}")),
desc: Some(desc.clone()),
kw: Some(make_kw(
&["blog", "blogging", "write", "writing"]
.into_iter()
.chain(tags.iter().map(|t| t.name.as_str()))
.collect::<Vec<_>>(),
)),
},
toc,
)
}
#[once(time = 10800)] // 3h
pub fn build_rss(config: Config) -> String {
let mut posts = get_posts(&format!(
"{}/{}/{}",
config.locations.data_dir, BLOG_DIR, POST_DIR
));
// Sort from newest to oldest
posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day));
posts.reverse();
// Only the 20 newest
let max = 20;
if posts.len() > max {
posts.drain(max..);
}
let link_to_site = get_url(config.fc.clone());
let author = if let (Some(mail), Some(name)) = (config.fc.mail, config.fc.fullname.clone()) {
Some(format!("{mail} ({name})"))
} else {
None
};
let title = format!("Blog d'{}", config.fc.name.unwrap_or_default());
let lang = "fr";
let channel = Channel {
title: title.clone(),
link: link_to_site.clone(),
description: "Un fil qui parle d'informatique notamment".into(),
language: Some(lang.into()),
managing_editor: author.clone(),
webmaster: author,
pub_date: Some(Local::now().to_rfc2822()),
categories: ["blog", "blogging", "write", "writing"]
.iter()
.map(|&c| Category {
name: c.into(),
..Category::default()
})
.collect(),
generator: Some("ewp with rss crate".into()),
docs: Some("https://www.rssboard.org/rss-specification".into()),
image: Some(Image {
url: format!("{link_to_site}/icons/favicon-32x32.png"),
title: title.clone(),
link: link_to_site.clone(),
..Image::default()
}),
items: posts
.iter_mut()
.map(|p| {
// Get post data
p.fetch_content(&config.locations.data_dir);
// Build item
Item {
title: Some(p.title.clone()),
link: Some(format!("{}/blog/p/{}", link_to_site, p.url)),
description: p.content.clone(),
categories: p
.tags
.iter()
.map(|c| Category {
name: c.to_owned(),
..Category::default()
})
.collect(),
guid: Some(Guid {
value: format!("urn:hash:{}", {
let mut hasher = DefaultHasher::new();
p.hash(&mut hasher);
hasher.finish()
}),
permalink: false,
}),
pub_date: Some(
NaiveDateTime::parse_from_str(
&format!("{}-{}-{} 13:12:00", p.date.day, p.date.month, p.date.year),
"%d-%m-%Y %H:%M:%S",
)
.unwrap()
.and_local_timezone(Europe::Paris)
.unwrap()
.to_rfc2822(),
),
..Item::default()
}
})
.collect(),
atom_ext: Some(AtomExtension {
links: vec![Link {
href: format!("{link_to_site}/blog/rss"),
rel: "self".into(),
hreflang: Some(lang.into()),
mime_type: Some(MIME_TYPE_RSS.into()),
title: Some(title),
length: None,
}],
}),
..Channel::default()
};
std::str::from_utf8(&channel.write_to(Vec::new()).unwrap())
.unwrap()
.into()
}

View file

@ -1,78 +0,0 @@
use cached::proc_macro::once;
use glob::glob;
use std::fs::read_to_string;
use crate::utils::{
markdown::{File, FilePath},
metadata::MType,
misc::read_file,
};
/// Contact node
#[derive(Clone, Debug)]
pub struct Link {
pub service: String,
pub scope: Option<String>,
pub url: String,
}
#[once(time = 60)]
pub fn find_links(directory: String) -> Vec<Link> {
// TOML filename
let toml_file = "links.toml";
// Read the TOML file and parse it
let toml_str = read_to_string(format!("{directory}/{toml_file}")).unwrap_or_default();
let mut redirections = vec![];
match toml::de::from_str::<toml::Value>(&toml_str) {
Ok(data) => {
if let Some(section) = data.as_table() {
section.iter().for_each(|(key, value)| {
// Scopes are delimited with `/`
let (service, scope) = match key.split_once('/') {
Some((service, scope)) => (service.to_owned(), Some(scope.to_owned())),
None => (key.to_owned(), None),
};
redirections.push(Link {
service,
scope,
url: value.as_str().unwrap().to_owned(),
});
});
}
}
Err(_) => return vec![],
}
redirections
}
pub fn remove_paragraphs(list: &mut [File]) {
list.iter_mut()
.for_each(|file| file.content = file.content.replace("<p>", "").replace("</p>", ""));
}
pub fn read(path: &FilePath) -> Vec<File> {
glob(&path.to_string())
.unwrap()
.map(|e| {
read_file(
path.from(&e.unwrap().to_string_lossy()),
MType::Contact,
None,
)
.unwrap()
})
.filter(|f| {
!f.metadata
.info
.contact
.clone()
.unwrap()
.hide
.unwrap_or_default()
})
.collect::<Vec<File>>()
}

View file

@ -1,90 +0,0 @@
use std::collections::HashMap;
use ramhorns::Content;
use reqwest::Error;
use crate::utils::github::{fetch_pr, ProjectState};
#[derive(Clone, Content, Debug)]
pub struct Project {
name: String,
url: String,
pub pulls_merged: Vec<Pull>,
pub pulls_open: Vec<Pull>,
pub pulls_closed: Vec<Pull>,
}
#[derive(Clone, Content, Debug)]
pub struct Pull {
url: String,
id: u32,
name_repo: String,
title: String,
state: u8,
}
pub async fn fetch() -> Result<Vec<Project>, Error> {
match fetch_pr().await {
Ok(projects) => {
let mut data: Vec<Project> = Vec::new();
// Grouping PRs by projects
let mut map: HashMap<&str, Vec<Pull>> = HashMap::new();
for p in &projects {
let project = Pull {
url: p.contrib_url.clone(),
id: p.id,
name_repo: p.name.clone(),
title: p.title.clone(),
state: p.status as u8,
};
let project_name = p.name.as_str();
if map.contains_key(project_name) {
map.entry(project_name).and_modify(|v| v.push(project));
} else {
data.push(Project {
name: project_name.into(),
url: p.url.clone(),
pulls_merged: Vec::new(),
pulls_closed: Vec::new(),
pulls_open: Vec::new(),
});
map.insert(project_name, vec![project]);
}
}
// Distributes each PR in the right vector
for d in &mut data {
map.get(d.name.as_str()).unwrap().iter().for_each(|p| {
let state = p.state.try_into().unwrap();
match state {
ProjectState::Closed => d.pulls_closed.push(p.to_owned()),
ProjectState::Merged => d.pulls_merged.push(p.to_owned()),
ProjectState::Open => d.pulls_open.push(p.to_owned()),
}
});
let mut name: Vec<char> = d.name.replace('-', " ").chars().collect();
name[0] = name[0].to_uppercase().next().unwrap();
d.name = name.into_iter().collect();
}
// Ascending order by pulls IDs
for d in &mut data {
d.pulls_closed.reverse();
d.pulls_merged.reverse();
d.pulls_open.reverse();
}
// Ascending order by number of pulls
data.sort_by(|a, b| {
b.pulls_merged
.len()
.partial_cmp(&a.pulls_merged.len())
.unwrap()
});
Ok(data)
}
Err(e) => Err(e),
}
}

View file

@ -1,113 +0,0 @@
use std::{cmp::Ordering, path::Path};
use cached::proc_macro::once;
use regex::Regex;
use serde::Serialize;
#[derive(Clone, Debug, Serialize, PartialEq, Eq)]
pub struct FileNode {
name: String,
is_dir: bool,
children: Vec<FileNode>,
}
impl Ord for FileNode {
fn cmp(&self, other: &Self) -> Ordering {
match (self.is_dir, other.is_dir) {
// If both are directories or both are files, compare names
(true, true) | (false, false) => self.name.cmp(&other.name),
// If self is directory and other is file, self comes first
(true, false) => Ordering::Less,
// If self is file and other is directory, other comes first
(false, true) => Ordering::Greater,
}
}
}
impl PartialOrd for FileNode {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
#[once(time = 600)]
pub fn get_filetree(
initial_dir: &str,
exclusion_list: &[String],
exclusion_patterns: &[Regex],
) -> FileNode {
gen_filetree(initial_dir, exclusion_list, exclusion_patterns).unwrap_or(FileNode {
name: String::new(),
is_dir: false,
children: vec![],
})
}
fn gen_filetree(
dir_path: &str,
exclusion_list: &[String],
exclusion_patterns: &[Regex],
) -> Result<FileNode, std::io::Error> {
let mut children: Vec<FileNode> = std::fs::read_dir(dir_path)?
.filter_map(Result::ok)
.filter_map(|entry| {
let entry_path = entry.path();
let entry_name = entry_path.file_name()?.to_string_lossy().to_string();
// Exclusion checks
if excluded(&entry_name, exclusion_list, exclusion_patterns) {
return None;
}
if entry_path.is_file() {
Some(FileNode {
name: entry_name,
is_dir: false,
children: vec![],
})
} else {
// Exclude empty directories
let children_of_children = gen_filetree(
entry_path.to_str().unwrap(),
exclusion_list,
exclusion_patterns,
);
if let Ok(coc) = children_of_children {
if coc.is_dir && coc.children.is_empty() {
None
} else {
Some(coc)
}
} else {
None
}
}
})
.collect();
children.sort();
Ok(FileNode {
name: Path::new(dir_path)
.file_name()
.unwrap()
.to_string_lossy()
.to_string(),
is_dir: true,
children,
})
}
pub fn excluded(element: &str, exclusion_list: &[String], exclusion_patterns: &[Regex]) -> bool {
if exclusion_list
.iter()
.any(|excluded_term| element.contains(excluded_term))
{
return true;
}
if exclusion_patterns.iter().any(|re| re.is_match(element)) {
return true;
}
false
}

View file

@ -1,4 +0,0 @@
pub mod blog;
pub mod contact;
pub mod contrib;
pub mod cours;

View file

@ -1,13 +1,21 @@
@import "../markdown.css";
@media (prefers-color-scheme: light) {
:root {
--code-font-color: #333333;
--code-bg-color: #eeeeee;
--quote-border-color: #9852fa;
--quote-bg-color: #d8d6d6;
--separator-color: #cccccc;
--tag-bg-color: #d2e0f0;
}
}
@media (prefers-color-scheme: dark) {
:root {
--code-font-color: #eeeeee;
--code-bg-color: #333333;
--quote-border-color: #bd93f9;
--quote-bg-color: #273341;
--separator-color: #414558;
--tag-bg-color: #242e38;
}
}
@ -16,6 +24,11 @@
--max-width: 750px;
}
/* Page */
html {
scroll-behavior: smooth;
}
body {
max-width: var(--max-width);
margin: auto;
@ -57,8 +70,49 @@ main {
max-width: 100%;
}
/* Anchors */
:is(h1, h2, h3, h4, h5, h6):hover a.anchor::before {
visibility: visible;
}
a.anchor::before {
content: "#";
visibility: hidden;
padding-right: 0.1em;
}
a.anchor {
text-decoration: none;
vertical-align: baseline;
}
/* Links in headers */
:is(h1, h2, h3, h4, h5, h6) a {
font-size: inherit;
}
/* Separators */
hr {
border: 0;
height: 1px;
background: var(--separator-color);
}
/* Quotes */
blockquote {
margin: 1em 0;
padding: 0.1em 10px;
border-left: 6px solid;
border-color: var(--quote-border-color);
background: var(--quote-bg-color);
border-top-right-radius: 5px;
border-bottom-right-radius: 5px;
}
/* Images */
img {
display: block;
margin: auto;
max-width: var(--max-width);
}
@ -68,6 +122,115 @@ code {
font-family: monospace;
}
/* Little snippet of code (not blocks) */
kbd,
code:not(.hljs):not(:has(svg)) {
background: var(--code-bg-color);
border-radius: 3px;
color: var(--code-font-color);
box-shadow: 0 1px 1px black;
font-size: calc(var(--font-size) * 0.8);
padding: 2px 4px;
vertical-align: 1.5px;
}
/* Code blocks */
.hljs {
border-radius: 5px;
}
.hljs::-webkit-scrollbar {
width: 7px;
height: 9px;
background: var(--background);
}
.hljs::-webkit-scrollbar-thumb {
background-color: var(--font-color);
border-radius: 10px;
}
/* Marge for numbers */
.hljs-ln-n {
margin-right: 0.4em;
}
/* Numbers in codeblocks */
.hljs-ln-numbers {
text-align: right;
color: var(--font-color);
}
/* Fix scroll in codeblocks with line numbering */
table.hljs-ln {
overflow: hidden;
}
/* Background for copy code button */
.hljs-copy-button {
background-color: var(--background) !important;
}
/* Light theme for the copy code button */
@media (prefers-color-scheme: light) {
.hljs-copy-button {
background-color: var(--font-color) !important;
filter: invert(100%);
}
}
/* Hide last line in codeblocks if empty */
.hljs-ln
> tbody
> tr:last-child:has(td:last-child > span::-moz-only-whitespace) {
visibility: collapse;
}
/* Temporary fix for layout.css.has-selector.enabled available only on
* Firefox under certain circumstances */
.hljs-ln > tbody > tr:last-child {
visibility: collapse;
}
/* Reference to footnotes */
.footnote-ref a {
text-decoration: underline dotted;
font-size: calc(var(--font-size) * 0.8);
}
/* Footnote */
section.footnotes * {
font-size: calc(var(--font-size) * 0.8);
}
/* When multiple ref */
a.footnote-backref sup {
font-size: calc(var(--font-size) * 0.6);
}
a.footnote-backref sup::before {
content: "(";
}
a.footnote-backref sup::after {
content: ")";
}
/* Footnotes links */
a.footnote-backref {
font-family: "Segoe UI", "Segoe UI Symbol", system-ui;
text-decoration: underline dotted;
}
/* Footnotes block separation from article */
section.footnotes {
margin: 3px;
border-top: 2px dotted var(--separator-color);
}
/* Mermaid diagrams */
pre:has(code.language-mermaid) {
text-align: center;
}
/* Table of content */
nav#toc {
position: fixed;
@ -83,3 +246,36 @@ nav#toc {
visibility: hidden;
}
}
@media print {
/* Better colors for paper */
blockquote {
border-color: black;
background: var(--background);
}
.hljs {
background: var(--background);
}
/* Force line numbering to be on top */
td.hljs-ln-line {
vertical-align: top;
}
/* Break code */
code.hljs {
white-space: break-spaces;
hyphens: none;
}
/* Hide arrows of backref */
a.footnote-backref {
visibility: hidden;
}
/* No underline for footnotes */
.footnote-ref > a {
text-decoration: none;
}
}

View file

@ -1,28 +1,9 @@
@import "markdown.css";
:root {
--max-width: 900px;
--sidebar-width: 20%;
--sidebar-padding: 10px;
--shift-icon-filetree: -10px;
/* Size of icon */
--shift-icon-filetree-size: 80%;
--shift-icon-filetree-height: 2;
}
main {
max-width: var(--max-width);
padding-inline: calc(var(--sidebar-width) + var(--sidebar-padding));
}
/* Filetree */
aside {
float: left;
z-index: 1;
position: absolute;
width: var(--sidebar-width);
padding-inline: var(--sidebar-padding);
margin-left: 20px;
position: sticky;
top: 0;
}
aside ul {
@ -48,73 +29,25 @@ aside li.collapsed > ul {
}
aside li.directory::before {
content: "📁";
margin-left: var(--shift-icon-filetree);
font-size: var(--shift-icon-filetree-size);
line-height: var(--shift-icon-filetree-height);
content: "+";
}
aside li:not(.collapsed).directory::before {
content: "📂";
margin-left: var(--shift-icon-filetree);
font-size: var(--shift-icon-filetree-size);
line-height: var(--shift-icon-filetree-height);
content: "-";
}
aside li.directory {
cursor: pointer;
}
aside a {
text-decoration: none;
}
/* Image */
main img {
max-width: 100%;
}
/* Collapse filetree */
button#menu {
display: none;
}
/* Search field */
input[type="text"] {
outline: none;
}
/* breakpoint */
@media only screen and (max-width: 740px) {
aside {
display: none;
width: unset;
}
main {
padding-inline: unset;
}
button#menu {
width: 100%;
display: block;
position: sticky;
top: 10px;
z-index: 1;
}
input {
width: 85vw;
}
}
@media print {
aside {
display: none;
}
main {
padding-inline: unset;
visibility: hidden;
}
}
main img {
max-width: 100%;
display: block;
margin: auto;
}

View file

@ -1,262 +0,0 @@
@media (prefers-color-scheme: light) {
:root {
--separator-color: #cccccc;
}
}
@media (prefers-color-scheme: dark) {
:root {
--separator-color: #414558;
}
}
/* Page */
html {
scroll-behavior: smooth;
}
/* Anchors */
main :is(h1, h2, h3, h4, h5, h6):hover a.anchor::before {
visibility: visible;
}
main a.anchor::before {
content: "#";
visibility: hidden;
padding-right: 0.1em;
}
main a.anchor {
text-decoration: none;
vertical-align: baseline;
}
/* Links in headers */
:is(h1, h2, h3, h4, h5, h6) a {
font-size: inherit;
}
/* Images */
main img {
display: block;
margin: auto;
}
/* Separators */
hr {
border: 0;
height: 1px;
background: var(--separator-color);
}
/* Quotes */
blockquote {
margin: 1em 0;
padding: 0.1em 10px;
border-left: 6px solid;
border-color: light-dark(#9852fa, #bd93f9);
background: light-dark(#d8d6d6, #273341);
border-top-right-radius: 5px;
border-bottom-right-radius: 5px;
}
/* Little snippet of code (not blocks) */
kbd,
code:not(.hljs):not(:has(svg)) {
background: light-dark(#eeeeee, #333333);
border-radius: 3px;
color: light-dark(#333333, #eeeeee);
box-shadow: 0 1px 1px black;
font-size: calc(var(--font-size) * 0.8);
padding: 2px 4px;
vertical-align: 1.5px;
}
/* Code blocks */
.hljs {
border-radius: 5px;
}
.hljs::-webkit-scrollbar {
width: 7px;
height: 9px;
background: var(--background);
}
.hljs::-webkit-scrollbar-thumb {
background-color: var(--font-color);
border-radius: 10px;
}
/* Marge for numbers */
.hljs-ln-n {
margin-right: 0.4em;
}
/* Numbers in codeblocks */
.hljs-ln-numbers {
text-align: right;
color: var(--font-color);
}
/* Fix scroll in codeblocks with line numbering */
table.hljs-ln {
overflow: hidden;
}
/* Background for copy code button */
.hljs-copy-button {
background-color: var(--background) !important;
}
/* Light theme for the copy code button */
@media (prefers-color-scheme: light) {
.hljs-copy-button {
background-color: var(--font-color) !important;
filter: invert(100%);
}
}
/* Hide last line in codeblocks if empty */
.hljs-ln
> tbody
> tr:last-child:has(td:last-child > span::-moz-only-whitespace) {
visibility: collapse;
}
/* Temporary fix for layout.css.has-selector.enabled available only on
* Firefox under certain circumstances */
.hljs-ln > tbody > tr:last-child {
visibility: collapse;
}
/* Reference to footnotes */
.footnote-ref a {
text-decoration: underline dotted;
font-size: calc(var(--font-size) * 0.8);
}
/* Footnote */
section.footnotes * {
font-size: calc(var(--font-size) * 0.8);
}
/* Hightlight footnote on click */
section.footnotes :target {
background-color: light-dark(#fafa35b9, #adad213c);
}
/* When multiple ref */
a.footnote-backref sup {
font-size: calc(var(--font-size) * 0.6);
}
a.footnote-backref sup::before {
content: "(";
}
a.footnote-backref sup::after {
content: ")";
}
/* Footnotes links */
a.footnote-backref {
font-family: "Segoe UI", "Segoe UI Symbol", system-ui;
text-decoration: underline dotted;
}
/* Footnotes block separation from content */
section.footnotes {
margin: 3px;
border-top: 2px dotted var(--separator-color);
}
/* Mermaid diagrams */
pre:has(code.language-mermaid) {
text-align: center;
}
@media (prefers-color-scheme: dark) {
.language-mermaid {
filter: invert(0.9);
}
}
/* Tables */
table:not(.hljs-ln) {
border-collapse: collapse;
margin-inline: auto;
}
table:not(.hljs-ln) th,
table:not(.hljs-ln) td {
padding: 5px;
border: 1px solid var(--separator-color);
}
table:not(.hljs-ln)th {
border-bottom: 2px solid var(--separator-color);
}
/* No borders on the outer edges of the table */
table:not(.hljs-ln) tr:last-child td {
border-bottom: 0;
}
table:not(.hljs-ln) tr:first-child th {
border-top: 0;
}
table:not(.hljs-ln) tr td:first-child,
table:not(.hljs-ln) tr th:first-child {
border-left: 0;
}
table:not(.hljs-ln) tr td:last-child,
table:not(.hljs-ln) tr th:last-child {
border-right: 0;
}
@media print {
/* Better colors for paper */
blockquote {
border-color: black;
background: var(--background);
}
.hljs {
background: var(--background);
}
/* Force line numbering to be on top */
td.hljs-ln-line {
vertical-align: top;
}
/* Break code */
code.hljs {
white-space: break-spaces;
hyphens: none;
}
/* Hide arrows of backref */
a.footnote-backref {
visibility: hidden;
}
/* No underline for footnotes */
.footnote-ref > a {
text-decoration: none;
}
/* Prevent figures from splitting accross pages */
article *:has(img),
table:not(.hljs-ln),
table:not(.hljs-ln) > * {
page-break-inside: avoid;
}
@page {
@bottom-right {
content: counter(page) "/" counter(pages);
}
}
}

View file

@ -94,9 +94,3 @@ header nav a:hover {
text-underline-position: under;
}
}
@media (prefers-color-scheme: dark) {
img {
filter: brightness(0.8) contrast(1.2);
}
}

View file

@ -1,71 +1,47 @@
/**
* Clean URLs from anchors
* @param {string} url Link URL
* @returns Link URL cleaned
*/
const cleanURL = (url) => url.split("#")[0];
/**
* Capitalize a text
* @param {string} text Input text
* @returns Capitalize text
*/
const capitalize = (text) =>
text.length === 0 ? text : text[0].toUpperCase() + text.substring(1);
/**
* Build the filetree
* @param {HTMLElement} parent Root element of the filetree
* @param {{name: string, is_dir: boolean, children: any[]}[]} data FileNode
* @param {{name: string, is_dir: boolean, children: any[]}} data FileNode
* @param {string} location Current location, used for links creation
*/
const buildFileTree = (parent, data, location) => {
const fragment = document.createDocumentFragment();
const ul = document.createElement("ul");
fragment.appendChild(ul);
data.forEach((item) => {
const li = document.createElement("li");
li.classList.add(item.is_dir ? "directory" : "file");
if (item.is_dir) {
// Directory
li.textContent = capitalize(item.name);
li.textContent = item.name;
li.classList.add("collapsed");
// Toggle collapsing on click
li.addEventListener(
"click",
(e) => {
if (e.target === li) {
li.classList.toggle("collapsed");
}
},
{ passive: true }
);
li.addEventListener("click", function (e) {
if (e.target === li) {
li.classList.toggle("collapsed");
}
});
} else {
// File
const url = cleanURL(window.location.href).split("?")[0];
const url = window.location.href.split("?")[0];
const a = document.createElement("a");
a.text = capitalize(
item.name.endsWith(".md") ? item.name.slice(0, -3) : item.name
);
a.text = item.name;
a.href = `${url}?q=${location}${item.name}`;
li.appendChild(a);
}
ul.appendChild(li);
if (item.children?.length) {
if (item.children && item.children.length > 0) {
buildFileTree(
li,
item.children,
item.is_dir ? `${location}${item.name}/` : location
item.is_dir ? location + `${item.name}/` : location
);
}
});
parent.appendChild(fragment);
parent.appendChild(ul);
};
/**
@ -83,20 +59,13 @@ const uncollapse = (element) => {
* Find the deepest opened directory
* @param {string[]} path Current path we are looking at, init with fullpath
* @param {NodeListOf<ChildNode>} options Options we have, init with list root
* @returns The deepest node
* @returns
*/
const deepestNodeOpened = (path, options) => {
if (path[0] === "") {
return options[0].parentNode.parentNode;
}
// Iterate over possible options
for (let i = 0; i < options.length; ++i) {
// If the directory and the current path match
if (
decodeURI(path[0]).toLowerCase() ===
options[i].firstChild.nodeValue.toLowerCase()
) {
if (decodeURI(path[0]) === options[i].firstChild.nodeValue) {
if (path.length === 1) {
// We found it
return options[i];
@ -111,42 +80,59 @@ const deepestNodeOpened = (path, options) => {
}
};
/**
* Search in the filetree, when nothing is aske, returns to initial state
* @param {string} query Query
* @param {HTMLElement} parent Filetree
* @param {HTMLLIElement} currentFile Current file opened
*/
const searchFiles = (query, parent, currentFile) => {
// Prevent blocking the main thread
requestAnimationFrame(() => {
const children = parent.querySelectorAll("li");
const svgDarkTheme = () => {
for (const item of document.getElementsByTagName("img")) {
if (!item.src.startsWith("data:image/svg+xml;base64,")) {
// Exclude image who aren't SVG and base64 encoded
break;
}
const normalizedQuery = query.toLowerCase().trim();
if (normalizedQuery === "") {
children.forEach((item) => {
item.style.display = "";
if (
item.classList.contains("directory") &&
!item.classList.contains("collapsed")
) {
item.classList.add("collapsed");
/** Convert to grayscale */
const colorToGrayscale = (color) => {
return 0.3 * color.r + 0.59 * color.g + 0.11 * color.b;
};
/** Extract color using canvas2d */
const extractColors = (image) => {
const canvas = document.createElement("canvas");
canvas.width = image.width;
canvas.height = image.height;
const ctx = canvas.getContext("2d");
ctx.drawImage(image, 0, 0);
const imageData = ctx.getImageData(
0,
0,
Math.max(1, canvas.width),
Math.max(1, canvas.height)
);
const pixelData = imageData.data;
const colors = [];
for (let i = 0; i < pixelData.length; i += 4) {
if (pixelData[i + 3] > 0) {
colors.push({
r: pixelData[i],
g: pixelData[i + 1],
b: pixelData[i + 2],
});
}
});
uncollapse(currentFile);
return;
}
for (const item of children) {
const matches = item.innerText.toLowerCase().includes(normalizedQuery);
if (matches) {
item.style.display = "";
uncollapse(item);
continue;
}
item.style.display = "none";
return colors;
};
// Extract colors
const colors = extractColors(item);
// Calculate the average grayscale value
const grayscaleValues = colors.map(colorToGrayscale);
const totalGrayscale = grayscaleValues.reduce((acc, val) => acc + val, 0);
const averageGrayscale = totalGrayscale / grayscaleValues.length;
if (averageGrayscale < 128) {
item.style = "filter: invert(1);";
}
});
}
};
window.addEventListener("load", () => {
@ -162,58 +148,20 @@ window.addEventListener("load", () => {
dataElement.remove();
// Open nested openeded directories
const infoURL = cleanURL(window.location.href).split("?");
const fullpath = infoURL.length > 1 ? infoURL[1].substring(2) : "index.md";
const path = fullpath.substring(0, fullpath.lastIndexOf("/"));
const currentlyOpen = deepestNodeOpened(
path.split("/"),
fileTreeElement.querySelector("ul").childNodes
);
uncollapse(currentlyOpen);
// Bold opened file
const openedFile = decodeURI(fullpath.split("/").at(-1));
currentlyOpen.querySelector("ul").childNodes.forEach((el) => {
const elementToCompare = decodeURI(
el.firstChild.search
? el.firstChild.search.substring(3).split("/").at(-1)
: el.firstChild.data
const infoURL = window.location.href.split("?");
if (infoURL.length > 1) {
const fullpath = infoURL[1].substring(2);
const path = fullpath.substring(0, fullpath.lastIndexOf("/"));
const last_openeded = deepestNodeOpened(
path.split("/"),
fileTreeElement.querySelector("ul").childNodes
);
if (elementToCompare === openedFile) {
el.style.fontWeight = "bold";
}
});
uncollapse(last_openeded);
}
// Search bar hook
document.getElementById("searchBar").addEventListener("input", (e) => {
searchFiles(e.target.value, fileTreeElement, currentlyOpen);
});
// Responsive menu
let menuOpen = false;
const button = document.getElementById("menu");
const content = document.getElementsByTagName("main")[0];
const initialButtonTextContent = button.textContent;
const resetPage = () => {
menuOpen = !menuOpen;
if (menuOpen) {
fileTreeElement.style.display = "block";
content.style.display = "none";
button.textContent = "Fermer le menu";
return;
}
fileTreeElement.style.display = "";
content.style.display = "";
button.textContent = initialButtonTextContent;
};
button.addEventListener("click", resetPage);
window.addEventListener("resize", () => {
if (menuOpen && window.innerWidth > 640) {
resetPage();
}
});
// Fix SVG images in dark mode
if (window.matchMedia("(prefers-color-scheme: dark)").matches) {
svgDarkTheme();
}
});

View file

@ -14,13 +14,19 @@ window.addEventListener("load", () => {
`;
const mono = "font-family: monospace";
let tags = [
const tags = [
new Tag("Comment fonctionne un PC 😵‍💫"),
new Tag("undefined", mono),
new Tag("[object Object]", mono),
new Tag("/api/v1/love", mono),
new Tag("/api/v1/websites", mono),
new Tag("Peak D2 sur Valo 🤡"),
new Tag(
"0x520",
`
background: linear-gradient(to bottom right, red 0%, red 50%, black 50%);
${clipping_text}
text-shadow: 0px 0px 20px light-dark(transparent, var(--font-color));
`
),
new Tag("Nul en CSS", "font-family: 'Comic Sans MS', TSCu_Comic, cursive"),
new Tag("anri k... caterpillar 🐛☝️"),
new Tag(
@ -31,48 +37,33 @@ window.addEventListener("load", () => {
text-shadow: 0px 0px 20px light-dark(var(--font-color), transparent);
`
),
new Tag("Étudiant"),
new Tag(
"Free Palestine",
`
background: conic-gradient(at 30% 60%, transparent 230deg, red 0, red 310deg, transparent 0),
linear-gradient(to bottom, black 45%, white 45%, white 67%, DarkGreen 67%);
${clipping_text}
text-shadow: 0px 0px 20px var(--font-color);
`
),
new Tag("School hater"),
new Tag("Stagiaire"),
new Tag("Rempli de malice"),
new Tag("anri.exe", mono),
new Tag(
"#NouveauFrontPopulaire ✊",
`
background: linear-gradient(to bottom, #4fb26b 0%, #4fb26b 36%, \
#e62e35 36%, #e62e35 50%, \
#feeb25 50%, #feeb25 62%, \
#724e99 62%, #724e99 77%, \
#e73160 77%);
${clipping_text}
text-shadow: 0px 0px 20px light-dark(var(--font-color), transparent);
`
),
new Tag("s/centre/droite/g", mono),
];
const hour = new Date().getHours();
if (hour <= 8 || hour >= 18) {
tags = tags.concat([
new Tag(
"0x520",
`
background: linear-gradient(to bottom right, red 0%, red 50%, black 50%);
${clipping_text}
text-shadow: 0px 0px 20px light-dark(transparent, var(--font-color));
`
),
new Tag("School hater"),
new Tag(
"Free Palestine",
`
background: conic-gradient(at 30% 60%, transparent 230deg, red 0, red 310deg, transparent 0),
linear-gradient(to bottom, black 45%, white 45%, white 67%, DarkGreen 67%);
${clipping_text}
text-shadow: 0px 0px 20px var(--font-color);
`
),
new Tag(
"#NouveauFrontPopulaire ✊",
`
background: linear-gradient(to bottom, #4fb26b 0%, #4fb26b 36%, \
#e62e35 36%, #e62e35 50%, \
#feeb25 50%, #feeb25 62%, \
#724e99 62%, #724e99 77%, \
#e73160 77%);
${clipping_text}
text-shadow: 0px 0px 20px light-dark(var(--font-color), transparent);
`
),
new Tag("s/centre/droite/g", mono),
]);
}
const random = Math.round(Math.random() * (tags.length - 1));
const element = document.getElementById("subname");
element.textContent = tags[random].variant;

View file

@ -2,19 +2,15 @@ window.addEventListener("load", () => {
const macros = {};
for (const item of new Map(
Object.entries({
B: "mathbb{B}",
N: "mathbb{N}",
R: "mathbb{R}",
Z: "mathbb{Z}",
O: "Theta",
Tau: "mathrm{T}",
u: "mu",
ra: "rightarrow",
la: "leftarrow",
RA: "Rightarrow",
LA: "Leftarrow",
lb: "llbracket",
rb: "rrbracket",
u: "mu",
})
)[Symbol.iterator]()) {
const bs = "\\";

View file

@ -1,10 +1,11 @@
import mermaid from "https://cdn.jsdelivr.net/npm/mermaid@10/dist/mermaid.esm.min.mjs";
window.addEventListener("load", async () => {
mermaid.initialize({
startOnLoad: false,
theme: "default",
});
const theme = window.matchMedia("(prefers-color-scheme: dark)").matches
? "dark" // dark
: "default"; // light
mermaid.initialize({ startOnLoad: false, theme: theme });
await mermaid.run({
querySelector: ".language-mermaid",

View file

@ -1,105 +0,0 @@
const Mode = {
Light: 1,
Dark: 2,
};
/**
* Change the svg color theme based on the mode
* @param {Mode} mode
*/
const svgChangeTheme = (mode) => {
for (const item of document.getElementsByTagName("img")) {
if (!item.src.startsWith("data:image/svg+xml;base64,")) {
// Exclude image who aren't SVG and base64 encoded
continue;
}
/**
* Convert to grayscale
* @param {{r: number, g: number, b: number}} color
* @returns Number between 0 and 255
*/
const colorToGrayscale = (color) => {
return 0.3 * color.r + 0.59 * color.g + 0.11 * color.b;
};
/**
* Extract color using canvas2d
* @param {HTMLImageElement} image Image source
* @returns Colors represeting the image
*/
const extractColors = (image) => {
const canvas = document.createElement("canvas");
canvas.width = image.naturalWidth;
canvas.height = image.naturalHeight;
const ctx = canvas.getContext("2d");
ctx.drawImage(image, 0, 0);
const imageData = ctx.getImageData(
0,
0,
Math.max(1, canvas.width),
Math.max(1, canvas.height)
);
const pixelData = imageData.data;
const colors = [];
for (let i = 0; i < pixelData.length; i += 4) {
if (pixelData[i + 3] > 0) {
colors.push({
r: pixelData[i],
g: pixelData[i + 1],
b: pixelData[i + 2],
});
}
}
return colors;
};
// Extract colors
const colors = extractColors(item);
// Calculate the average grayscale value
const grayscaleValues = colors.map(colorToGrayscale);
const totalGrayscale = grayscaleValues.reduce((acc, val) => acc + val, 0);
const averageGrayscale = totalGrayscale / grayscaleValues.length;
const treshold = 128;
const style = "filter: ";
const dim = "brightness(0.8) contrast(1.2)";
if (averageGrayscale < treshold && mode === Mode.Dark) {
item.style = style + dim + " invert(1);";
continue;
}
if (averageGrayscale > treshold && mode === Mode.Light) {
item.style = style + "invert(1);";
continue;
}
if (mode === Mode.Dark) {
item.style = style + `${dim};`;
continue;
}
item.style = "";
}
};
window.addEventListener("load", () => {
// Fix SVG images
svgChangeTheme(
window.matchMedia("(prefers-color-scheme: dark)").matches
? Mode.Dark
: Mode.Light
);
});
window
.matchMedia("(prefers-color-scheme: dark)")
.addEventListener("change", (event) =>
svgChangeTheme(event.matches ? Mode.Dark : Mode.Light)
);

View file

@ -1,5 +1,5 @@
<!DOCTYPE html>
<html lang="{{lang}}">
<html lang="fr">
<head dir="ltr">
{{>head.html}}
</head>

View file

@ -1,5 +1,5 @@
<!DOCTYPE html>
<html lang="{{lang}}">
<html lang="fr">
<head dir="ltr">
{{>head.html}}
<link

View file

@ -1,5 +1,5 @@
<!DOCTYPE html>
<html lang="{{lang}}">
<html lang="fr">
<head dir="ltr">
{{>head.html}}
<link
@ -38,6 +38,5 @@
{{#syntax_highlight}}{{>libs/hljs_footer.html}}{{/syntax_highlight}}
{{#mail_obfsucated}}{{>libs/mail_obfuscater.html}}{{/mail_obfsucated}}
{{/metadata}} {{/post}} {{/data}}
<script src="/js/markdown.js"></script>
</body>
</html>

View file

@ -1,5 +1,5 @@
<!DOCTYPE html>
<html lang="{{lang}}">
<html lang="fr">
<head dir="ltr">
{{>head.html}}
<link rel="stylesheet" href="/css/contact.css" />

View file

@ -1,5 +1,5 @@
<!DOCTYPE html>
<html lang="{{lang}}">
<html lang="fr">
<head dir="ltr">
{{>head.html}}
<link rel="stylesheet" href="/css/contrib.css" />

View file

@ -1,5 +1,5 @@
<!DOCTYPE html>
<html lang="{{lang}}">
<html lang="fr">
<head dir="ltr">
{{>head.html}}
<link rel="stylesheet" href="/css/cours.css" />
@ -11,14 +11,12 @@
<body>
<header>{{>navbar.html}}</header>
<button type="button" id="menu">Ouvrir le menu</button>
<aside>
<input type="text" id="searchBar" placeholder="Recherche..." autofocus />
<span data-json="{{filetree}} "></span>
</aside>
<main>
{{^content}}
<p>Fichier introuvable ou invalide.</p>
<p>Fichier introuvable</p>
{{/content}} {{#content}}
<article>{{&content}}</article>
</main>
@ -29,6 +27,5 @@
{{#mail_obfsucated}}{{>libs/mail_obfuscater.html}}{{/mail_obfsucated}}
{{/metadata}} {{/content}} {{/data}}
<script src="/js/cours.js"></script>
<script src="/js/markdown.js"></script>
</body>
</html>

View file

@ -1,5 +1,5 @@
<!DOCTYPE html>
<html lang="{{lang}}">
<html lang="fr">
<head dir="ltr">
{{>head.html}}
<link rel="stylesheet" href="/css/index.css" />

View file

@ -32,7 +32,7 @@
>Contact</a
></p>
</li> --><!-- <li>
</li> --><li>
<p><a
class="_ {{#contrib}}bold{{/contrib}}"
@ -41,12 +41,13 @@
>Contributions</a
></p>
</li> --><li>
</li><li>
<p><a
class="_ {{#cours}}bold{{/cours}}"
href="/cours"
href="https://univ.mylloon.fr"
title="Page des notes de cours"
target="_blank"
>Cours</a
></p>

View file

@ -1,5 +1,5 @@
<!DOCTYPE html>
<html lang="{{lang}}">
<html lang="fr">
<head dir="ltr">
{{>head.html}}
<link rel="stylesheet" href="/css/portfolio.css" />

View file

@ -1,5 +1,5 @@
<!DOCTYPE html>
<html class="index" lang="{{lang}}">
<html class="index" lang="fr">
<head dir="ltr">
<title>{{page_title}}{{#page_title}} - {{/page_title}}{{app_name}}</title>
<meta charset="UTF-8" />