Compare commits

...

29 commits

Author SHA1 Message Date
8f5895e9ba Respect toc attribute in metadata
All checks were successful
PR Check / lint-and-format (pull_request) Successful in 2m57s
2024-11-06 14:03:24 +01:00
dde2ca0172 wip: quick and dumb implementation of toc 2024-11-06 14:03:24 +01:00
87d0fa3c11
'a instead of 'de 2024-10-24 20:19:26 +02:00
7f3434b7c1
no sync needed 2024-10-24 20:17:31 +02:00
13d7b54c27
build at first 2024-10-22 12:07:05 +02:00
984ecb6b69
bump dependencies (#79)
lol_html and comrak

Reviewed-on: #79
2024-10-22 12:05:38 +02:00
5a15945439
fix img 2024-10-22 12:00:13 +02:00
e9441dba46
PR Check 2024-10-22 11:49:57 +02:00
3f3efe4afa
add lb and rb 2024-10-08 13:21:30 +02:00
37b51bcbee
Add Tau 2024-10-03 13:06:56 +02:00
3cc69f3d4f
no longer stagiaire 2024-09-22 17:02:03 +02:00
7432ffd5f9
clickable badge 2024-09-19 15:53:28 +02:00
764a632ae6
/api/v1/websites 2024-09-19 15:50:37 +02:00
396bff909e
update dependencies 2024-09-14 21:01:30 +02:00
fb44c25e47
clippy 2024-09-14 20:17:11 +02:00
a7aec1b94e
add countdown 2024-08-29 23:31:35 +02:00
8cedeb531d
cleanup using https://github.com/actions/checkout/issues/1830#issuecomment-2314758792 answer 2024-08-28 16:43:30 +02:00
912f16e0c3
remove jas 2024-08-13 13:21:25 +02:00
999d68ab60
includes images in releases 2024-08-13 13:01:20 +02:00
2d9fc0d559
only run workflow manually 2024-08-13 11:55:12 +02:00
8c386d5ac6
use forgejo actions (#72)
Some checks failed
Publish latest version / build (push) Has been cancelled
2024-08-11 16:49:06 +02:00
95b92699ed
update dependencies
All checks were successful
ci/woodpecker/push/publish Pipeline was successful
2024-07-23 10:26:19 +02:00
2dc54a6f76
turn out to be weird with blockquote
All checks were successful
ci/woodpecker/push/publish Pipeline was successful
2024-07-15 10:24:51 +02:00
deb54372a2
update comrak
All checks were successful
ci/woodpecker/push/publish Pipeline was successful
2024-07-13 00:29:07 +02:00
485797c64f
update comrak
Some checks are pending
ci/woodpecker/push/publish Pipeline is pending approval
2024-07-12 17:52:38 +02:00
5b43730150
update dependencie "cached"
All checks were successful
ci/woodpecker/push/publish Pipeline was successful
2024-07-10 23:02:49 +02:00
b145510d83
custom selection color
All checks were successful
ci/woodpecker/push/publish Pipeline was successful
2024-07-10 22:58:35 +02:00
847ec0d3c3
sursis
All checks were successful
ci/woodpecker/push/publish Pipeline was successful
2024-07-10 19:56:36 +02:00
e0b59130ee
macron
All checks were successful
ci/woodpecker/push/publish Pipeline was successful
2024-06-29 13:06:59 +02:00
17 changed files with 939 additions and 704 deletions

View file

@ -0,0 +1,23 @@
name: PR Check
on:
pull_request:
types: [opened, synchronize, reopened]
jobs:
lint-and-format:
container:
image: cimg/rust:1.81-node
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Build
run: cargo build
- name: Run format check
run: cargo fmt --check
- name: Run Clippy
run: cargo clippy

View file

@ -0,0 +1,47 @@
name: Publish latest version
on:
workflow_dispatch:
jobs:
build:
container:
image: ghcr.io/catthehacker/ubuntu:act-latest
steps:
- name: Checkout Code
uses: actions/checkout@v4
- name: Checkout LFS
run: |
# Replace double auth header, see https://github.com/actions/checkout/issues/1830
AUTH=$(git config --local http.${{ github.server_url }}/.extraheader)
git config --local --unset http.${{ github.server_url }}/.extraheader
git config --local http.${{ github.server_url }}/${{ github.repository }}.git/info/lfs/objects/batch.extraheader "$AUTH"
# Get files
git lfs fetch
git lfs checkout
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Sanitize metadata
id: meta
uses: docker/metadata-action@v5
with:
tags: latest
images: git.mylloon.fr/${{ github.repository }}
- name: Login to Registry
uses: docker/login-action@v3
with:
registry: ${{ github.server_url }}
username: ${{ github.actor }}
password: ${{ secrets.TOKEN }}
- name: Build and push
uses: docker/build-push-action@v6
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}

View file

@ -1,15 +0,0 @@
steps:
publish:
image: woodpeckerci/plugin-docker-buildx:2
settings:
labels:
platform: linux/amd64
repo: git.mylloon.fr/${CI_REPO,,}
auto_tag: true
registry: git.mylloon.fr
username: ${CI_REPO_OWNER}
password:
from_secret: cb_token
when:
event: push
branch: main

1204
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -10,9 +10,9 @@ publish = false
license = "AGPL-3.0-or-later" license = "AGPL-3.0-or-later"
[dependencies] [dependencies]
actix-web = { version = "4.6", default-features = false, features = ["macros", "compress-brotli"] } actix-web = { version = "4.9", default-features = false, features = ["macros", "compress-brotli"] }
actix-files = "0.6" actix-files = "0.6"
cached = { version = "0.51", features = ["async", "ahash"] } cached = { version = "0.53", features = ["async", "ahash"] }
ramhorns = "1.0" ramhorns = "1.0"
toml = "0.8" toml = "0.8"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
@ -21,16 +21,17 @@ serde_json = "1.0"
minify-html = "0.15" minify-html = "0.15"
minify-js = "0.6" minify-js = "0.6"
glob = "0.3" glob = "0.3"
comrak = "0.24" comrak = "0.29"
reqwest = { version = "0.12", features = ["json"] } reqwest = { version = "0.12", features = ["json"] }
chrono = { version = "0.4.38", default-features = false, features = ["clock"]} chrono = { version = "0.4.38", default-features = false, features = ["clock"]}
chrono-tz = "0.9" chrono-tz = "0.10"
rss = { version = "2.0", features = ["atom"] } rss = { version = "2.0", features = ["atom"] }
lol_html = "1.2" lol_html = "2.0"
base64 = "0.22" base64 = "0.22"
mime_guess = "2.0" mime_guess = "2.0"
urlencoding = "2.1" urlencoding = "2.1"
regex = "1.10" regex = "1.10"
cyborgtime = "2.1.1"
[lints.clippy] [lints.clippy]
pedantic = "warn" pedantic = "warn"

View file

@ -3,7 +3,7 @@
Easy WebPage generator Easy WebPage generator
[![dependency status](https://deps.rs/repo/gitea/git.mylloon.fr/Anri/mylloon.fr/status.svg)](https://deps.rs/repo/gitea/git.mylloon.fr/Anri/mylloon.fr) [![dependency status](https://deps.rs/repo/gitea/git.mylloon.fr/Anri/mylloon.fr/status.svg)](https://deps.rs/repo/gitea/git.mylloon.fr/Anri/mylloon.fr)
[![status-badge](https://ci.mylloon.fr/api/badges/Anri/mylloon.fr/status.svg)](https://ci.mylloon.fr/Anri/mylloon.fr) [![status-badge](https://git.mylloon.fr/Anri/mylloon.fr/badges/workflows/publish.yml/badge.svg)](https://git.mylloon.fr/Anri/mylloon.fr/actions?workflow=publish.yml)
- See [issues](https://git.mylloon.fr/Anri/mylloon.fr/issues) - See [issues](https://git.mylloon.fr/Anri/mylloon.fr/issues)
- See [documentation](https://git.mylloon.fr/Anri/mylloon.fr/src/branch/main/Documentation.md) - See [documentation](https://git.mylloon.fr/Anri/mylloon.fr/src/branch/main/Documentation.md)

View file

@ -42,7 +42,14 @@ async fn main() -> Result<()> {
.add(("Server", format!("ewp/{}", env!("CARGO_PKG_VERSION")))) .add(("Server", format!("ewp/{}", env!("CARGO_PKG_VERSION"))))
.add(("Permissions-Policy", "interest-cohort=()")), .add(("Permissions-Policy", "interest-cohort=()")),
) )
.service(web::scope("/api").service(web::scope("v1").service(api_v1::love))) .service(
web::scope("/api").service(
web::scope("v1")
.service(api_v1::love)
.service(api_v1::btf)
.service(api_v1::websites),
),
)
.service(index::page) .service(index::page)
.service(agreements::security) .service(agreements::security)
.service(agreements::humans) .service(agreements::humans)

View file

@ -1,8 +1,10 @@
use crate::misc::date::Date; use crate::misc::date::Date;
use base64::engine::general_purpose; use base64::engine::general_purpose;
use base64::Engine; use base64::Engine;
use comrak::nodes::{AstNode, NodeValue}; use comrak::nodes::{AstNode, NodeCode, NodeMath, NodeValue};
use comrak::{format_html, parse_document, Arena, ComrakOptions, ListStyleType, Options}; use comrak::{
format_html, parse_document, Anchorizer, Arena, ComrakOptions, ListStyleType, Options,
};
use lol_html::html_content::ContentType; use lol_html::html_content::ContentType;
use lol_html::{element, rewrite_str, HtmlRewriter, RewriteStrSettings, Settings}; use lol_html::{element, rewrite_str, HtmlRewriter, RewriteStrSettings, Settings};
use ramhorns::Content; use ramhorns::Content;
@ -10,8 +12,6 @@ use serde::{Deserialize, Deserializer};
use std::fmt::Debug; use std::fmt::Debug;
use std::fs; use std::fs;
use std::path::Path; use std::path::Path;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
/// Metadata for blog posts /// Metadata for blog posts
#[derive(Content, Debug, Default, Deserialize)] #[derive(Content, Debug, Default, Deserialize)]
@ -30,10 +30,10 @@ pub struct Tag {
pub name: String, pub name: String,
} }
impl<'de> Deserialize<'de> for Tag { impl<'a> Deserialize<'a> for Tag {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where where
D: Deserializer<'de>, D: Deserializer<'a>,
{ {
match <&str>::deserialize(deserializer) { match <&str>::deserialize(deserializer) {
Ok(s) => match serde_yml::from_str(s) { Ok(s) => match serde_yml::from_str(s) {
@ -119,10 +119,11 @@ impl Metadata {
pub struct File { pub struct File {
pub metadata: Metadata, pub metadata: Metadata,
pub content: String, pub content: String,
pub toc_data: String,
} }
/// Options used for parser and compiler MD --> HTML /// Options used for parser and compiler MD --> HTML
pub fn get_options() -> ComrakOptions { pub fn get_options<'a>() -> ComrakOptions<'a> {
let mut options = comrak::Options::default(); let mut options = comrak::Options::default();
// Extension // Extension
@ -139,12 +140,18 @@ pub fn get_options() -> ComrakOptions {
options.extension.multiline_block_quotes = true; options.extension.multiline_block_quotes = true;
options.extension.math_dollars = true; options.extension.math_dollars = true;
options.extension.math_code = false; options.extension.math_code = false;
options.extension.wikilinks_title_after_pipe = false;
options.extension.wikilinks_title_before_pipe = false;
options.extension.underline = true;
options.extension.spoiler = false;
options.extension.greentext = false;
// Parser // Parser
options.parse.smart = true; // could be boring options.parse.smart = true; // could be boring
options.parse.default_info_string = Some("plaintext".into()); options.parse.default_info_string = Some("plaintext".into());
options.parse.relaxed_tasklist_matching = true; options.parse.relaxed_tasklist_matching = true;
options.parse.relaxed_autolinks = true; options.parse.relaxed_autolinks = true;
// options.render.broken_link_callback = ...;
// Renderer // Renderer
options.render.hardbreaks = false; // could be true? change by metadata could be good for compatibility options.render.hardbreaks = false; // could be true? change by metadata could be good for compatibility
@ -155,7 +162,13 @@ pub fn get_options() -> ComrakOptions {
options.render.escape = false; options.render.escape = false;
options.render.list_style = ListStyleType::Dash; options.render.list_style = ListStyleType::Dash;
options.render.sourcepos = false; options.render.sourcepos = false;
options.render.experimental_inline_sourcepos = false;
options.render.escaped_char_spans = false; options.render.escaped_char_spans = false;
options.render.ignore_setext = true;
options.render.ignore_empty_links = true;
options.render.gfm_quirks = true;
options.render.prefer_fenced = false;
options.render.figure_with_caption = false;
options options
} }
@ -167,7 +180,7 @@ fn custom_img_size(html: &str) -> String {
RewriteStrSettings { RewriteStrSettings {
element_content_handlers: vec![element!("img[alt]", |el| { element_content_handlers: vec![element!("img[alt]", |el| {
let alt = el.get_attribute("alt").unwrap(); let alt = el.get_attribute("alt").unwrap();
let possible_piece = alt.split(|c| c == '|').collect::<Vec<&str>>(); let possible_piece = alt.split('|').collect::<Vec<&str>>();
if possible_piece.len() > 1 { if possible_piece.len() > 1 {
let data = possible_piece.last().unwrap().trim(); let data = possible_piece.last().unwrap().trim();
@ -218,46 +231,53 @@ fn fix_images_and_integration(path: &str, html: &str) -> (String, Metadata) {
mail_obfsucated: false, mail_obfsucated: false,
}; };
( // Collection of any additional metadata
rewrite_str( let mut additional_metadata = Vec::new();
html,
RewriteStrSettings {
element_content_handlers: vec![element!("img", |el| {
if let Some(src) = el.get_attribute("src") {
let img_src = Path::new(path).parent().unwrap();
let img_path = urlencoding::decode(img_src.join(src).to_str().unwrap())
.unwrap()
.to_string();
if let Ok(file) = fs::read_to_string(&img_path) {
let mime = mime_guess::from_path(&img_path).first_or_octet_stream();
if mime == "text/markdown" {
let mut options = get_options();
options.extension.footnotes = false;
let data = read_md(
&img_path,
&file,
&TypeFileMetadata::Generic,
Some(options),
);
el.replace(&data.content, ContentType::Html);
metadata.merge(&data.metadata);
} else {
let image = general_purpose::STANDARD.encode(file);
el.set_attribute("src", &format!("data:{mime};base64,{image}")) let result = rewrite_str(
.unwrap(); html,
} RewriteStrSettings {
element_content_handlers: vec![element!("img", |el| {
if let Some(src) = el.get_attribute("src") {
let img_src = Path::new(path).parent().unwrap();
let img_path = urlencoding::decode(img_src.join(src).to_str().unwrap())
.unwrap()
.to_string();
if let Ok(file) = fs::read_to_string(&img_path) {
let mime = mime_guess::from_path(&img_path).first_or_octet_stream();
if mime == "text/markdown" {
let mut options = get_options();
options.extension.footnotes = false;
let data = read_md(
&img_path,
&file,
&TypeFileMetadata::Generic,
Some(options),
);
el.replace(&data.content, ContentType::Html);
// Store the metadata for later merging
additional_metadata.push(data.metadata);
} else {
let image = general_purpose::STANDARD.encode(file);
el.set_attribute("src", &format!("data:{mime};base64,{image}"))
.unwrap();
} }
} }
}
Ok(()) Ok(())
})], })],
..RewriteStrSettings::default() ..RewriteStrSettings::default()
}, },
)
.unwrap(),
metadata,
) )
.unwrap();
// Merge all collected metadata
for additional in additional_metadata {
metadata.merge(&additional);
}
(result, metadata)
} }
/// Transform markdown string to File structure /// Transform markdown string to File structure
@ -290,6 +310,8 @@ pub fn read_md(
html_content = custom_img_size(&html_content); html_content = custom_img_size(&html_content);
(html_content, mail_obfsucated) = mail_obfuscation(&html_content); (html_content, mail_obfsucated) = mail_obfuscation(&html_content);
let toc = toc_to_html(&generate_toc(root));
let mut final_metadata = Metadata { let mut final_metadata = Metadata {
info: metadata, info: metadata,
mermaid: check_mermaid(root, mermaid_name), mermaid: check_mermaid(root, mermaid_name),
@ -302,6 +324,7 @@ pub fn read_md(
File { File {
metadata: final_metadata, metadata: final_metadata,
content: html_content, content: html_content,
toc_data: toc,
} }
} }
@ -394,12 +417,12 @@ fn check_code<'a>(root: &'a AstNode<'a>, blacklist: &[String]) -> bool {
/// Check if html contains maths /// Check if html contains maths
fn check_math(html: &str) -> bool { fn check_math(html: &str) -> bool {
let math_detected = Arc::new(AtomicBool::new(false)); let mut math_detected = false;
let _ = HtmlRewriter::new( let _ = HtmlRewriter::new(
Settings { Settings {
element_content_handlers: vec![element!("span[data-math-style]", |_| { element_content_handlers: vec![element!("span[data-math-style]", |_| {
math_detected.store(true, Ordering::SeqCst); math_detected = true;
Ok(()) Ok(())
})], })],
@ -409,7 +432,7 @@ fn check_math(html: &str) -> bool {
) )
.write(html.as_bytes()); .write(html.as_bytes());
math_detected.load(Ordering::SeqCst) math_detected
} }
/// Change class of languages for hljs detection /// Change class of languages for hljs detection
@ -425,7 +448,7 @@ fn hljs_replace<'a>(root: &'a AstNode<'a>, mermaid_str: &str) {
/// Obfuscate email if email found /// Obfuscate email if email found
fn mail_obfuscation(html: &str) -> (String, bool) { fn mail_obfuscation(html: &str) -> (String, bool) {
let modified = Arc::new(AtomicBool::new(false)); let mut modified = false;
let data_attr = "title"; let data_attr = "title";
@ -434,7 +457,7 @@ fn mail_obfuscation(html: &str) -> (String, bool) {
html, html,
RewriteStrSettings { RewriteStrSettings {
element_content_handlers: vec![element!("a[href^='mailto:']", |el| { element_content_handlers: vec![element!("a[href^='mailto:']", |el| {
modified.store(true, Ordering::SeqCst); modified = true;
// Get mail address // Get mail address
let link = el.get_attribute("href").unwrap(); let link = el.get_attribute("href").unwrap();
@ -454,9 +477,7 @@ fn mail_obfuscation(html: &str) -> (String, bool) {
) )
.unwrap(); .unwrap();
let is_modified = modified.load(Ordering::SeqCst); if modified {
if is_modified {
// Remove old data email if exists // Remove old data email if exists
( (
rewrite_str( rewrite_str(
@ -482,9 +503,93 @@ fn mail_obfuscation(html: &str) -> (String, bool) {
}, },
) )
.unwrap(), .unwrap(),
is_modified, modified,
) )
} else { } else {
(new_html, is_modified) (new_html, modified)
} }
} }
#[derive(Debug)]
struct TOCEntry {
id: String,
title: String,
depth: u8,
}
fn generate_toc<'a>(root: &'a AstNode<'a>) -> Vec<TOCEntry> {
/// See <https://github.com/kivikakk/comrak/blob/b67d406d3b101b93539c37a1ca75bff81ff8c149/src/html.rs#L446>
fn collect_text<'a>(node: &'a AstNode<'a>, output: &mut String) {
match node.data.borrow().value {
NodeValue::Text(ref literal)
| NodeValue::Code(NodeCode { ref literal, .. })
| NodeValue::Math(NodeMath { ref literal, .. }) => {
*output = literal.to_string();
}
_ => {
for n in node.children() {
if !output.is_empty() {
break;
}
collect_text(n, output);
}
}
}
}
let mut toc = vec![];
let mut anchorizer = Anchorizer::new();
// Collect headings first to avoid mutable borrow conflicts
let headings: Vec<_> = root
.children()
.filter_map(|node| {
if let NodeValue::Heading(ref nch) = &node.data.borrow().value {
Some((*nch, node))
} else {
None
}
})
.collect();
// Now process each heading
for (nch, node) in headings {
let mut title = String::with_capacity(20);
collect_text(node, &mut title);
toc.push(TOCEntry {
id: anchorizer.anchorize(title.clone()),
title,
depth: nch.level,
});
}
toc
}
fn toc_to_html(toc: &[TOCEntry]) -> String {
if toc.is_empty() {
return String::new();
}
let mut html = Vec::with_capacity(20 + 20 * toc.len());
html.extend_from_slice(b"<ul>");
for entry in toc {
// TODO: Use depth
html.extend_from_slice(
format!(
"<li><a href=\"{}\">{} (dbg/depth/{})</a></li>",
entry.id, entry.title, entry.depth
)
.as_bytes(),
);
}
html.extend_from_slice(b"</ul>");
String::from_utf8(html).unwrap()
}

View file

@ -81,5 +81,6 @@ fn read_pdf(data: Vec<u8>) -> File {
style="width: 100%; height: 79vh"; style="width: 100%; height: 79vh";
>"# >"#
), ),
toc_data: String::new(),
} }
} }

View file

@ -1,15 +1,53 @@
use std::time::Duration;
use actix_web::{get, HttpResponse, Responder}; use actix_web::{get, HttpResponse, Responder};
use chrono::Utc;
use cyborgtime::format_duration;
use serde::Serialize; use serde::Serialize;
/// Response /// Response for /love
#[derive(Serialize)] #[derive(Serialize)]
struct Info { struct InfoLove {
unix_epoch: u32, unix_epoch: u32,
} }
#[get("/love")] #[get("/love")]
pub async fn love() -> impl Responder { pub async fn love() -> impl Responder {
HttpResponse::Ok().json(Info { HttpResponse::Ok().json(InfoLove {
unix_epoch: 1_605_576_600, unix_epoch: 1_605_576_600,
}) })
} }
/// Response for /backtofrance
#[derive(Serialize)]
struct InfoBTF {
unix_epoch: u64,
countdown: String,
}
#[get("/backtofrance")]
pub async fn btf() -> impl Responder {
let target = 1_736_618_100;
let current_time: u64 = Utc::now().timestamp().try_into().unwrap();
let info = InfoBTF {
unix_epoch: target,
countdown: if current_time > target {
"Already happened".to_owned()
} else {
let duration_epoch = target - current_time;
let duration = Duration::from_secs(duration_epoch);
format_duration(duration).to_string()
},
};
HttpResponse::Ok().json(info)
}
#[get("/websites")]
pub async fn websites() -> impl Responder {
HttpResponse::Ok().json((
"http://www.bocal.cs.univ-paris8.fr/~akennel/",
"https://anri.up8.site/",
))
}

View file

@ -186,7 +186,6 @@ fn get_posts(location: &str) -> Vec<Post> {
struct BlogPostTemplate { struct BlogPostTemplate {
navbar: NavBar, navbar: NavBar,
post: Option<File>, post: Option<File>,
toc: String,
} }
#[get("/blog/p/{id}")] #[get("/blog/p/{id}")]
@ -199,7 +198,7 @@ pub async fn page(path: web::Path<(String,)>, config: web::Data<Config>) -> impl
fn build_post(file: &str, config: Config) -> String { fn build_post(file: &str, config: Config) -> String {
let mut post = None; let mut post = None;
let (infos, toc) = get_post( let infos = get_post(
&mut post, &mut post,
file, file,
&config.fc.name.unwrap_or_default(), &config.fc.name.unwrap_or_default(),
@ -214,18 +213,12 @@ fn build_post(file: &str, config: Config) -> String {
..NavBar::default() ..NavBar::default()
}, },
post, post,
toc,
}, },
infos, infos,
) )
} }
fn get_post( fn get_post(post: &mut Option<File>, filename: &str, name: &str, data_dir: &str) -> InfosPage {
post: &mut Option<File>,
filename: &str,
name: &str,
data_dir: &str,
) -> (InfosPage, String) {
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}"); let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
let ext = ".md"; let ext = ".md";
@ -234,13 +227,8 @@ fn get_post(
&TypeFileMetadata::Blog, &TypeFileMetadata::Blog,
); );
let default = ( let default = (filename, &format!("Blog d'{name}"), Vec::new());
filename, let (title, desc, tags) = match post {
&format!("Blog d'{name}"),
Vec::new(),
String::new(),
);
let (title, desc, tags, toc) = match post {
Some(data) => ( Some(data) => (
match &data.metadata.info.blog.as_ref().unwrap().title { match &data.metadata.info.blog.as_ref().unwrap().title {
Some(text) => text, Some(text) => text,
@ -254,28 +242,20 @@ fn get_post(
Some(tags) => tags.clone(), Some(tags) => tags.clone(),
None => default.2, None => default.2,
}, },
match &data.metadata.info.blog.as_ref().unwrap().toc {
// TODO: Generate TOC
Some(true) => String::new(),
_ => default.3,
},
), ),
None => default, None => default,
}; };
( InfosPage {
InfosPage { title: Some(format!("Post: {title}")),
title: Some(format!("Post: {title}")), desc: Some(desc.clone()),
desc: Some(desc.clone()), kw: Some(make_kw(
kw: Some(make_kw( &["blog", "blogging", "write", "writing"]
&["blog", "blogging", "write", "writing"] .into_iter()
.into_iter() .chain(tags.iter().map(|t| t.name.as_str()))
.chain(tags.iter().map(|t| t.name.as_str())) .collect::<Vec<_>>(),
.collect::<Vec<_>>(), )),
)), }
},
toc,
)
} }
#[routes] #[routes]

BIN
static/badges/friends/jas.webp (Stored with Git LFS)

Binary file not shown.

View file

@ -12,6 +12,7 @@
--background: #f1f1f1; --background: #f1f1f1;
--font-color: #18181b; --font-color: #18181b;
--link-color: #df5a9c; --link-color: #df5a9c;
--selection-color: #c5c5c560;
} }
} }
@ -21,5 +22,6 @@
--background: #171e26; --background: #171e26;
--font-color: #bcbcc5; --font-color: #bcbcc5;
--link-color: #ff80bf; --link-color: #ff80bf;
--selection-color: #c5c5c530;
} }
} }

View file

@ -4,6 +4,10 @@ html {
font-family: var(--font-family); font-family: var(--font-family);
} }
::selection {
background-color: var(--selection-color);
}
body, body,
a { a {
color: var(--font-color); color: var(--font-color);

View file

@ -12,12 +12,13 @@ window.addEventListener("load", () => {
-webkit-background-clip: text; /* Chromium fix */ -webkit-background-clip: text; /* Chromium fix */
color: transparent; color: transparent;
`; `;
const mono = "font-family: monospace";
const tags = [ const tags = [
new Tag("Comment fonctionne un PC 😵‍💫"), new Tag("Comment fonctionne un PC 😵‍💫"),
new Tag("undefined", "font-family: monospace"), new Tag("undefined", mono),
new Tag("/api/v1/love", "font-family: monospace"), new Tag("/api/v1/love", mono),
new Tag("A rater son master 🎊"), new Tag("/api/v1/websites", mono),
new Tag("Peak D2 sur Valo 🤡"), new Tag("Peak D2 sur Valo 🤡"),
new Tag( new Tag(
"0x520", "0x520",
@ -47,7 +48,7 @@ window.addEventListener("load", () => {
` `
), ),
new Tag("School hater"), new Tag("School hater"),
new Tag("Stagiaire"), new Tag("Étudiant"),
new Tag("Rempli de malice"), new Tag("Rempli de malice"),
new Tag( new Tag(
"#NouveauFrontPopulaire ✊", "#NouveauFrontPopulaire ✊",
@ -61,6 +62,7 @@ window.addEventListener("load", () => {
text-shadow: 0px 0px 20px light-dark(var(--font-color), transparent); text-shadow: 0px 0px 20px light-dark(var(--font-color), transparent);
` `
), ),
new Tag("s/centre/droite/g", mono),
]; ];
const random = Math.round(Math.random() * (tags.length - 1)); const random = Math.round(Math.random() * (tags.length - 1));

View file

@ -11,6 +11,9 @@ window.addEventListener("load", () => {
RA: "Rightarrow", RA: "Rightarrow",
LA: "Leftarrow", LA: "Leftarrow",
u: "mu", u: "mu",
Tau: "mathrm{T}",
lb: "textlbrackdbl",
rb: "textrbrackdbl",
}) })
)[Symbol.iterator]()) { )[Symbol.iterator]()) {
const bs = "\\"; const bs = "\\";

View file

@ -28,7 +28,9 @@
<main> <main>
{{^post}} {{^post}}
<p>This post doesn't exist... sorry</p> <p>This post doesn't exist... sorry</p>
{{/post}} {{#post}} {{&toc}} {{/post}} {{#post}} {{#metadata}} {{#info}} {{#blog}} {{#toc}}
<aside>{{&toc_data}}</aside>
{{/toc}} {{/blog}} {{/info}} {{/metadata}}
<article>{{&content}}</article> <article>{{&content}}</article>
{{/post}} {{/post}}
</main> </main>