Compare commits
29 commits
ba4b5b6b54
...
8f5895e9ba
Author | SHA1 | Date | |
---|---|---|---|
8f5895e9ba | |||
dde2ca0172 | |||
87d0fa3c11 | |||
7f3434b7c1 | |||
13d7b54c27 | |||
984ecb6b69 | |||
5a15945439 | |||
e9441dba46 | |||
3f3efe4afa | |||
37b51bcbee | |||
3cc69f3d4f | |||
7432ffd5f9 | |||
764a632ae6 | |||
396bff909e | |||
fb44c25e47 | |||
a7aec1b94e | |||
8cedeb531d | |||
912f16e0c3 | |||
999d68ab60 | |||
2d9fc0d559 | |||
8c386d5ac6 | |||
95b92699ed | |||
2dc54a6f76 | |||
deb54372a2 | |||
485797c64f | |||
5b43730150 | |||
b145510d83 | |||
847ec0d3c3 | |||
e0b59130ee |
17 changed files with 939 additions and 704 deletions
23
.forgejo/workflows/pr-check.yml
Normal file
23
.forgejo/workflows/pr-check.yml
Normal file
|
@ -0,0 +1,23 @@
|
|||
name: PR Check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
jobs:
|
||||
lint-and-format:
|
||||
container:
|
||||
image: cimg/rust:1.81-node
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build
|
||||
run: cargo build
|
||||
|
||||
- name: Run format check
|
||||
run: cargo fmt --check
|
||||
|
||||
- name: Run Clippy
|
||||
run: cargo clippy
|
47
.forgejo/workflows/publish.yml
Normal file
47
.forgejo/workflows/publish.yml
Normal file
|
@ -0,0 +1,47 @@
|
|||
name: Publish latest version
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
container:
|
||||
image: ghcr.io/catthehacker/ubuntu:act-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Checkout LFS
|
||||
run: |
|
||||
# Replace double auth header, see https://github.com/actions/checkout/issues/1830
|
||||
AUTH=$(git config --local http.${{ github.server_url }}/.extraheader)
|
||||
git config --local --unset http.${{ github.server_url }}/.extraheader
|
||||
git config --local http.${{ github.server_url }}/${{ github.repository }}.git/info/lfs/objects/batch.extraheader "$AUTH"
|
||||
|
||||
# Get files
|
||||
git lfs fetch
|
||||
git lfs checkout
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Sanitize metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
tags: latest
|
||||
images: git.mylloon.fr/${{ github.repository }}
|
||||
|
||||
- name: Login to Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ github.server_url }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
|
@ -1,15 +0,0 @@
|
|||
steps:
|
||||
publish:
|
||||
image: woodpeckerci/plugin-docker-buildx:2
|
||||
settings:
|
||||
labels:
|
||||
platform: linux/amd64
|
||||
repo: git.mylloon.fr/${CI_REPO,,}
|
||||
auto_tag: true
|
||||
registry: git.mylloon.fr
|
||||
username: ${CI_REPO_OWNER}
|
||||
password:
|
||||
from_secret: cb_token
|
||||
when:
|
||||
event: push
|
||||
branch: main
|
1204
Cargo.lock
generated
1204
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
11
Cargo.toml
11
Cargo.toml
|
@ -10,9 +10,9 @@ publish = false
|
|||
license = "AGPL-3.0-or-later"
|
||||
|
||||
[dependencies]
|
||||
actix-web = { version = "4.6", default-features = false, features = ["macros", "compress-brotli"] }
|
||||
actix-web = { version = "4.9", default-features = false, features = ["macros", "compress-brotli"] }
|
||||
actix-files = "0.6"
|
||||
cached = { version = "0.51", features = ["async", "ahash"] }
|
||||
cached = { version = "0.53", features = ["async", "ahash"] }
|
||||
ramhorns = "1.0"
|
||||
toml = "0.8"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
@ -21,16 +21,17 @@ serde_json = "1.0"
|
|||
minify-html = "0.15"
|
||||
minify-js = "0.6"
|
||||
glob = "0.3"
|
||||
comrak = "0.24"
|
||||
comrak = "0.29"
|
||||
reqwest = { version = "0.12", features = ["json"] }
|
||||
chrono = { version = "0.4.38", default-features = false, features = ["clock"]}
|
||||
chrono-tz = "0.9"
|
||||
chrono-tz = "0.10"
|
||||
rss = { version = "2.0", features = ["atom"] }
|
||||
lol_html = "1.2"
|
||||
lol_html = "2.0"
|
||||
base64 = "0.22"
|
||||
mime_guess = "2.0"
|
||||
urlencoding = "2.1"
|
||||
regex = "1.10"
|
||||
cyborgtime = "2.1.1"
|
||||
|
||||
[lints.clippy]
|
||||
pedantic = "warn"
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
Easy WebPage generator
|
||||
|
||||
[![dependency status](https://deps.rs/repo/gitea/git.mylloon.fr/Anri/mylloon.fr/status.svg)](https://deps.rs/repo/gitea/git.mylloon.fr/Anri/mylloon.fr)
|
||||
[![status-badge](https://ci.mylloon.fr/api/badges/Anri/mylloon.fr/status.svg)](https://ci.mylloon.fr/Anri/mylloon.fr)
|
||||
[![status-badge](https://git.mylloon.fr/Anri/mylloon.fr/badges/workflows/publish.yml/badge.svg)](https://git.mylloon.fr/Anri/mylloon.fr/actions?workflow=publish.yml)
|
||||
|
||||
- See [issues](https://git.mylloon.fr/Anri/mylloon.fr/issues)
|
||||
- See [documentation](https://git.mylloon.fr/Anri/mylloon.fr/src/branch/main/Documentation.md)
|
||||
|
|
|
@ -42,7 +42,14 @@ async fn main() -> Result<()> {
|
|||
.add(("Server", format!("ewp/{}", env!("CARGO_PKG_VERSION"))))
|
||||
.add(("Permissions-Policy", "interest-cohort=()")),
|
||||
)
|
||||
.service(web::scope("/api").service(web::scope("v1").service(api_v1::love)))
|
||||
.service(
|
||||
web::scope("/api").service(
|
||||
web::scope("v1")
|
||||
.service(api_v1::love)
|
||||
.service(api_v1::btf)
|
||||
.service(api_v1::websites),
|
||||
),
|
||||
)
|
||||
.service(index::page)
|
||||
.service(agreements::security)
|
||||
.service(agreements::humans)
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
use crate::misc::date::Date;
|
||||
use base64::engine::general_purpose;
|
||||
use base64::Engine;
|
||||
use comrak::nodes::{AstNode, NodeValue};
|
||||
use comrak::{format_html, parse_document, Arena, ComrakOptions, ListStyleType, Options};
|
||||
use comrak::nodes::{AstNode, NodeCode, NodeMath, NodeValue};
|
||||
use comrak::{
|
||||
format_html, parse_document, Anchorizer, Arena, ComrakOptions, ListStyleType, Options,
|
||||
};
|
||||
use lol_html::html_content::ContentType;
|
||||
use lol_html::{element, rewrite_str, HtmlRewriter, RewriteStrSettings, Settings};
|
||||
use ramhorns::Content;
|
||||
|
@ -10,8 +12,6 @@ use serde::{Deserialize, Deserializer};
|
|||
use std::fmt::Debug;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Metadata for blog posts
|
||||
#[derive(Content, Debug, Default, Deserialize)]
|
||||
|
@ -30,10 +30,10 @@ pub struct Tag {
|
|||
pub name: String,
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for Tag {
|
||||
impl<'a> Deserialize<'a> for Tag {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
D: Deserializer<'a>,
|
||||
{
|
||||
match <&str>::deserialize(deserializer) {
|
||||
Ok(s) => match serde_yml::from_str(s) {
|
||||
|
@ -119,10 +119,11 @@ impl Metadata {
|
|||
pub struct File {
|
||||
pub metadata: Metadata,
|
||||
pub content: String,
|
||||
pub toc_data: String,
|
||||
}
|
||||
|
||||
/// Options used for parser and compiler MD --> HTML
|
||||
pub fn get_options() -> ComrakOptions {
|
||||
pub fn get_options<'a>() -> ComrakOptions<'a> {
|
||||
let mut options = comrak::Options::default();
|
||||
|
||||
// Extension
|
||||
|
@ -139,12 +140,18 @@ pub fn get_options() -> ComrakOptions {
|
|||
options.extension.multiline_block_quotes = true;
|
||||
options.extension.math_dollars = true;
|
||||
options.extension.math_code = false;
|
||||
options.extension.wikilinks_title_after_pipe = false;
|
||||
options.extension.wikilinks_title_before_pipe = false;
|
||||
options.extension.underline = true;
|
||||
options.extension.spoiler = false;
|
||||
options.extension.greentext = false;
|
||||
|
||||
// Parser
|
||||
options.parse.smart = true; // could be boring
|
||||
options.parse.default_info_string = Some("plaintext".into());
|
||||
options.parse.relaxed_tasklist_matching = true;
|
||||
options.parse.relaxed_autolinks = true;
|
||||
// options.render.broken_link_callback = ...;
|
||||
|
||||
// Renderer
|
||||
options.render.hardbreaks = false; // could be true? change by metadata could be good for compatibility
|
||||
|
@ -155,7 +162,13 @@ pub fn get_options() -> ComrakOptions {
|
|||
options.render.escape = false;
|
||||
options.render.list_style = ListStyleType::Dash;
|
||||
options.render.sourcepos = false;
|
||||
options.render.experimental_inline_sourcepos = false;
|
||||
options.render.escaped_char_spans = false;
|
||||
options.render.ignore_setext = true;
|
||||
options.render.ignore_empty_links = true;
|
||||
options.render.gfm_quirks = true;
|
||||
options.render.prefer_fenced = false;
|
||||
options.render.figure_with_caption = false;
|
||||
|
||||
options
|
||||
}
|
||||
|
@ -167,7 +180,7 @@ fn custom_img_size(html: &str) -> String {
|
|||
RewriteStrSettings {
|
||||
element_content_handlers: vec![element!("img[alt]", |el| {
|
||||
let alt = el.get_attribute("alt").unwrap();
|
||||
let possible_piece = alt.split(|c| c == '|').collect::<Vec<&str>>();
|
||||
let possible_piece = alt.split('|').collect::<Vec<&str>>();
|
||||
|
||||
if possible_piece.len() > 1 {
|
||||
let data = possible_piece.last().unwrap().trim();
|
||||
|
@ -218,46 +231,53 @@ fn fix_images_and_integration(path: &str, html: &str) -> (String, Metadata) {
|
|||
mail_obfsucated: false,
|
||||
};
|
||||
|
||||
(
|
||||
rewrite_str(
|
||||
html,
|
||||
RewriteStrSettings {
|
||||
element_content_handlers: vec![element!("img", |el| {
|
||||
if let Some(src) = el.get_attribute("src") {
|
||||
let img_src = Path::new(path).parent().unwrap();
|
||||
let img_path = urlencoding::decode(img_src.join(src).to_str().unwrap())
|
||||
.unwrap()
|
||||
.to_string();
|
||||
if let Ok(file) = fs::read_to_string(&img_path) {
|
||||
let mime = mime_guess::from_path(&img_path).first_or_octet_stream();
|
||||
if mime == "text/markdown" {
|
||||
let mut options = get_options();
|
||||
options.extension.footnotes = false;
|
||||
let data = read_md(
|
||||
&img_path,
|
||||
&file,
|
||||
&TypeFileMetadata::Generic,
|
||||
Some(options),
|
||||
);
|
||||
el.replace(&data.content, ContentType::Html);
|
||||
metadata.merge(&data.metadata);
|
||||
} else {
|
||||
let image = general_purpose::STANDARD.encode(file);
|
||||
// Collection of any additional metadata
|
||||
let mut additional_metadata = Vec::new();
|
||||
|
||||
el.set_attribute("src", &format!("data:{mime};base64,{image}"))
|
||||
.unwrap();
|
||||
}
|
||||
let result = rewrite_str(
|
||||
html,
|
||||
RewriteStrSettings {
|
||||
element_content_handlers: vec![element!("img", |el| {
|
||||
if let Some(src) = el.get_attribute("src") {
|
||||
let img_src = Path::new(path).parent().unwrap();
|
||||
let img_path = urlencoding::decode(img_src.join(src).to_str().unwrap())
|
||||
.unwrap()
|
||||
.to_string();
|
||||
if let Ok(file) = fs::read_to_string(&img_path) {
|
||||
let mime = mime_guess::from_path(&img_path).first_or_octet_stream();
|
||||
if mime == "text/markdown" {
|
||||
let mut options = get_options();
|
||||
options.extension.footnotes = false;
|
||||
let data = read_md(
|
||||
&img_path,
|
||||
&file,
|
||||
&TypeFileMetadata::Generic,
|
||||
Some(options),
|
||||
);
|
||||
el.replace(&data.content, ContentType::Html);
|
||||
|
||||
// Store the metadata for later merging
|
||||
additional_metadata.push(data.metadata);
|
||||
} else {
|
||||
let image = general_purpose::STANDARD.encode(file);
|
||||
el.set_attribute("src", &format!("data:{mime};base64,{image}"))
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})],
|
||||
..RewriteStrSettings::default()
|
||||
},
|
||||
)
|
||||
.unwrap(),
|
||||
metadata,
|
||||
}
|
||||
Ok(())
|
||||
})],
|
||||
..RewriteStrSettings::default()
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Merge all collected metadata
|
||||
for additional in additional_metadata {
|
||||
metadata.merge(&additional);
|
||||
}
|
||||
|
||||
(result, metadata)
|
||||
}
|
||||
|
||||
/// Transform markdown string to File structure
|
||||
|
@ -290,6 +310,8 @@ pub fn read_md(
|
|||
html_content = custom_img_size(&html_content);
|
||||
(html_content, mail_obfsucated) = mail_obfuscation(&html_content);
|
||||
|
||||
let toc = toc_to_html(&generate_toc(root));
|
||||
|
||||
let mut final_metadata = Metadata {
|
||||
info: metadata,
|
||||
mermaid: check_mermaid(root, mermaid_name),
|
||||
|
@ -302,6 +324,7 @@ pub fn read_md(
|
|||
File {
|
||||
metadata: final_metadata,
|
||||
content: html_content,
|
||||
toc_data: toc,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -394,12 +417,12 @@ fn check_code<'a>(root: &'a AstNode<'a>, blacklist: &[String]) -> bool {
|
|||
|
||||
/// Check if html contains maths
|
||||
fn check_math(html: &str) -> bool {
|
||||
let math_detected = Arc::new(AtomicBool::new(false));
|
||||
let mut math_detected = false;
|
||||
|
||||
let _ = HtmlRewriter::new(
|
||||
Settings {
|
||||
element_content_handlers: vec![element!("span[data-math-style]", |_| {
|
||||
math_detected.store(true, Ordering::SeqCst);
|
||||
math_detected = true;
|
||||
|
||||
Ok(())
|
||||
})],
|
||||
|
@ -409,7 +432,7 @@ fn check_math(html: &str) -> bool {
|
|||
)
|
||||
.write(html.as_bytes());
|
||||
|
||||
math_detected.load(Ordering::SeqCst)
|
||||
math_detected
|
||||
}
|
||||
|
||||
/// Change class of languages for hljs detection
|
||||
|
@ -425,7 +448,7 @@ fn hljs_replace<'a>(root: &'a AstNode<'a>, mermaid_str: &str) {
|
|||
|
||||
/// Obfuscate email if email found
|
||||
fn mail_obfuscation(html: &str) -> (String, bool) {
|
||||
let modified = Arc::new(AtomicBool::new(false));
|
||||
let mut modified = false;
|
||||
|
||||
let data_attr = "title";
|
||||
|
||||
|
@ -434,7 +457,7 @@ fn mail_obfuscation(html: &str) -> (String, bool) {
|
|||
html,
|
||||
RewriteStrSettings {
|
||||
element_content_handlers: vec![element!("a[href^='mailto:']", |el| {
|
||||
modified.store(true, Ordering::SeqCst);
|
||||
modified = true;
|
||||
|
||||
// Get mail address
|
||||
let link = el.get_attribute("href").unwrap();
|
||||
|
@ -454,9 +477,7 @@ fn mail_obfuscation(html: &str) -> (String, bool) {
|
|||
)
|
||||
.unwrap();
|
||||
|
||||
let is_modified = modified.load(Ordering::SeqCst);
|
||||
|
||||
if is_modified {
|
||||
if modified {
|
||||
// Remove old data email if exists
|
||||
(
|
||||
rewrite_str(
|
||||
|
@ -482,9 +503,93 @@ fn mail_obfuscation(html: &str) -> (String, bool) {
|
|||
},
|
||||
)
|
||||
.unwrap(),
|
||||
is_modified,
|
||||
modified,
|
||||
)
|
||||
} else {
|
||||
(new_html, is_modified)
|
||||
(new_html, modified)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct TOCEntry {
|
||||
id: String,
|
||||
title: String,
|
||||
depth: u8,
|
||||
}
|
||||
|
||||
fn generate_toc<'a>(root: &'a AstNode<'a>) -> Vec<TOCEntry> {
|
||||
/// See <https://github.com/kivikakk/comrak/blob/b67d406d3b101b93539c37a1ca75bff81ff8c149/src/html.rs#L446>
|
||||
fn collect_text<'a>(node: &'a AstNode<'a>, output: &mut String) {
|
||||
match node.data.borrow().value {
|
||||
NodeValue::Text(ref literal)
|
||||
| NodeValue::Code(NodeCode { ref literal, .. })
|
||||
| NodeValue::Math(NodeMath { ref literal, .. }) => {
|
||||
*output = literal.to_string();
|
||||
}
|
||||
_ => {
|
||||
for n in node.children() {
|
||||
if !output.is_empty() {
|
||||
break;
|
||||
}
|
||||
|
||||
collect_text(n, output);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut toc = vec![];
|
||||
|
||||
let mut anchorizer = Anchorizer::new();
|
||||
|
||||
// Collect headings first to avoid mutable borrow conflicts
|
||||
let headings: Vec<_> = root
|
||||
.children()
|
||||
.filter_map(|node| {
|
||||
if let NodeValue::Heading(ref nch) = &node.data.borrow().value {
|
||||
Some((*nch, node))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Now process each heading
|
||||
for (nch, node) in headings {
|
||||
let mut title = String::with_capacity(20);
|
||||
collect_text(node, &mut title);
|
||||
|
||||
toc.push(TOCEntry {
|
||||
id: anchorizer.anchorize(title.clone()),
|
||||
title,
|
||||
depth: nch.level,
|
||||
});
|
||||
}
|
||||
|
||||
toc
|
||||
}
|
||||
|
||||
fn toc_to_html(toc: &[TOCEntry]) -> String {
|
||||
if toc.is_empty() {
|
||||
return String::new();
|
||||
}
|
||||
|
||||
let mut html = Vec::with_capacity(20 + 20 * toc.len());
|
||||
|
||||
html.extend_from_slice(b"<ul>");
|
||||
|
||||
for entry in toc {
|
||||
// TODO: Use depth
|
||||
html.extend_from_slice(
|
||||
format!(
|
||||
"<li><a href=\"{}\">{} (dbg/depth/{})</a></li>",
|
||||
entry.id, entry.title, entry.depth
|
||||
)
|
||||
.as_bytes(),
|
||||
);
|
||||
}
|
||||
|
||||
html.extend_from_slice(b"</ul>");
|
||||
|
||||
String::from_utf8(html).unwrap()
|
||||
}
|
||||
|
|
|
@ -81,5 +81,6 @@ fn read_pdf(data: Vec<u8>) -> File {
|
|||
style="width: 100%; height: 79vh";
|
||||
>"#
|
||||
),
|
||||
toc_data: String::new(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,15 +1,53 @@
|
|||
use std::time::Duration;
|
||||
|
||||
use actix_web::{get, HttpResponse, Responder};
|
||||
use chrono::Utc;
|
||||
use cyborgtime::format_duration;
|
||||
use serde::Serialize;
|
||||
|
||||
/// Response
|
||||
/// Response for /love
|
||||
#[derive(Serialize)]
|
||||
struct Info {
|
||||
struct InfoLove {
|
||||
unix_epoch: u32,
|
||||
}
|
||||
|
||||
#[get("/love")]
|
||||
pub async fn love() -> impl Responder {
|
||||
HttpResponse::Ok().json(Info {
|
||||
HttpResponse::Ok().json(InfoLove {
|
||||
unix_epoch: 1_605_576_600,
|
||||
})
|
||||
}
|
||||
|
||||
/// Response for /backtofrance
|
||||
#[derive(Serialize)]
|
||||
struct InfoBTF {
|
||||
unix_epoch: u64,
|
||||
countdown: String,
|
||||
}
|
||||
|
||||
#[get("/backtofrance")]
|
||||
pub async fn btf() -> impl Responder {
|
||||
let target = 1_736_618_100;
|
||||
let current_time: u64 = Utc::now().timestamp().try_into().unwrap();
|
||||
|
||||
let info = InfoBTF {
|
||||
unix_epoch: target,
|
||||
countdown: if current_time > target {
|
||||
"Already happened".to_owned()
|
||||
} else {
|
||||
let duration_epoch = target - current_time;
|
||||
let duration = Duration::from_secs(duration_epoch);
|
||||
format_duration(duration).to_string()
|
||||
},
|
||||
};
|
||||
|
||||
HttpResponse::Ok().json(info)
|
||||
}
|
||||
|
||||
#[get("/websites")]
|
||||
pub async fn websites() -> impl Responder {
|
||||
HttpResponse::Ok().json((
|
||||
"http://www.bocal.cs.univ-paris8.fr/~akennel/",
|
||||
"https://anri.up8.site/",
|
||||
))
|
||||
}
|
||||
|
|
|
@ -186,7 +186,6 @@ fn get_posts(location: &str) -> Vec<Post> {
|
|||
struct BlogPostTemplate {
|
||||
navbar: NavBar,
|
||||
post: Option<File>,
|
||||
toc: String,
|
||||
}
|
||||
|
||||
#[get("/blog/p/{id}")]
|
||||
|
@ -199,7 +198,7 @@ pub async fn page(path: web::Path<(String,)>, config: web::Data<Config>) -> impl
|
|||
|
||||
fn build_post(file: &str, config: Config) -> String {
|
||||
let mut post = None;
|
||||
let (infos, toc) = get_post(
|
||||
let infos = get_post(
|
||||
&mut post,
|
||||
file,
|
||||
&config.fc.name.unwrap_or_default(),
|
||||
|
@ -214,18 +213,12 @@ fn build_post(file: &str, config: Config) -> String {
|
|||
..NavBar::default()
|
||||
},
|
||||
post,
|
||||
toc,
|
||||
},
|
||||
infos,
|
||||
)
|
||||
}
|
||||
|
||||
fn get_post(
|
||||
post: &mut Option<File>,
|
||||
filename: &str,
|
||||
name: &str,
|
||||
data_dir: &str,
|
||||
) -> (InfosPage, String) {
|
||||
fn get_post(post: &mut Option<File>, filename: &str, name: &str, data_dir: &str) -> InfosPage {
|
||||
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
|
||||
let ext = ".md";
|
||||
|
||||
|
@ -234,13 +227,8 @@ fn get_post(
|
|||
&TypeFileMetadata::Blog,
|
||||
);
|
||||
|
||||
let default = (
|
||||
filename,
|
||||
&format!("Blog d'{name}"),
|
||||
Vec::new(),
|
||||
String::new(),
|
||||
);
|
||||
let (title, desc, tags, toc) = match post {
|
||||
let default = (filename, &format!("Blog d'{name}"), Vec::new());
|
||||
let (title, desc, tags) = match post {
|
||||
Some(data) => (
|
||||
match &data.metadata.info.blog.as_ref().unwrap().title {
|
||||
Some(text) => text,
|
||||
|
@ -254,28 +242,20 @@ fn get_post(
|
|||
Some(tags) => tags.clone(),
|
||||
None => default.2,
|
||||
},
|
||||
match &data.metadata.info.blog.as_ref().unwrap().toc {
|
||||
// TODO: Generate TOC
|
||||
Some(true) => String::new(),
|
||||
_ => default.3,
|
||||
},
|
||||
),
|
||||
None => default,
|
||||
};
|
||||
|
||||
(
|
||||
InfosPage {
|
||||
title: Some(format!("Post: {title}")),
|
||||
desc: Some(desc.clone()),
|
||||
kw: Some(make_kw(
|
||||
&["blog", "blogging", "write", "writing"]
|
||||
.into_iter()
|
||||
.chain(tags.iter().map(|t| t.name.as_str()))
|
||||
.collect::<Vec<_>>(),
|
||||
)),
|
||||
},
|
||||
toc,
|
||||
)
|
||||
InfosPage {
|
||||
title: Some(format!("Post: {title}")),
|
||||
desc: Some(desc.clone()),
|
||||
kw: Some(make_kw(
|
||||
&["blog", "blogging", "write", "writing"]
|
||||
.into_iter()
|
||||
.chain(tags.iter().map(|t| t.name.as_str()))
|
||||
.collect::<Vec<_>>(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
#[routes]
|
||||
|
|
BIN
static/badges/friends/jas.webp
(Stored with Git LFS)
BIN
static/badges/friends/jas.webp
(Stored with Git LFS)
Binary file not shown.
|
@ -12,6 +12,7 @@
|
|||
--background: #f1f1f1;
|
||||
--font-color: #18181b;
|
||||
--link-color: #df5a9c;
|
||||
--selection-color: #c5c5c560;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -21,5 +22,6 @@
|
|||
--background: #171e26;
|
||||
--font-color: #bcbcc5;
|
||||
--link-color: #ff80bf;
|
||||
--selection-color: #c5c5c530;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,10 @@ html {
|
|||
font-family: var(--font-family);
|
||||
}
|
||||
|
||||
::selection {
|
||||
background-color: var(--selection-color);
|
||||
}
|
||||
|
||||
body,
|
||||
a {
|
||||
color: var(--font-color);
|
||||
|
|
|
@ -12,12 +12,13 @@ window.addEventListener("load", () => {
|
|||
-webkit-background-clip: text; /* Chromium fix */
|
||||
color: transparent;
|
||||
`;
|
||||
const mono = "font-family: monospace";
|
||||
|
||||
const tags = [
|
||||
new Tag("Comment fonctionne un PC 😵💫"),
|
||||
new Tag("undefined", "font-family: monospace"),
|
||||
new Tag("/api/v1/love", "font-family: monospace"),
|
||||
new Tag("A rater son master 🎊"),
|
||||
new Tag("undefined", mono),
|
||||
new Tag("/api/v1/love", mono),
|
||||
new Tag("/api/v1/websites", mono),
|
||||
new Tag("Peak D2 sur Valo 🤡"),
|
||||
new Tag(
|
||||
"0x520",
|
||||
|
@ -47,7 +48,7 @@ window.addEventListener("load", () => {
|
|||
`
|
||||
),
|
||||
new Tag("School hater"),
|
||||
new Tag("Stagiaire"),
|
||||
new Tag("Étudiant"),
|
||||
new Tag("Rempli de malice"),
|
||||
new Tag(
|
||||
"#NouveauFrontPopulaire ✊",
|
||||
|
@ -61,6 +62,7 @@ window.addEventListener("load", () => {
|
|||
text-shadow: 0px 0px 20px light-dark(var(--font-color), transparent);
|
||||
`
|
||||
),
|
||||
new Tag("s/centre/droite/g", mono),
|
||||
];
|
||||
|
||||
const random = Math.round(Math.random() * (tags.length - 1));
|
||||
|
|
|
@ -11,6 +11,9 @@ window.addEventListener("load", () => {
|
|||
RA: "Rightarrow",
|
||||
LA: "Leftarrow",
|
||||
u: "mu",
|
||||
Tau: "mathrm{T}",
|
||||
lb: "textlbrackdbl",
|
||||
rb: "textrbrackdbl",
|
||||
})
|
||||
)[Symbol.iterator]()) {
|
||||
const bs = "\\";
|
||||
|
|
|
@ -28,7 +28,9 @@
|
|||
<main>
|
||||
{{^post}}
|
||||
<p>This post doesn't exist... sorry</p>
|
||||
{{/post}} {{#post}} {{&toc}}
|
||||
{{/post}} {{#post}} {{#metadata}} {{#info}} {{#blog}} {{#toc}}
|
||||
<aside>{{&toc_data}}</aside>
|
||||
{{/toc}} {{/blog}} {{/info}} {{/metadata}}
|
||||
<article>{{&content}}</article>
|
||||
{{/post}}
|
||||
</main>
|
||||
|
|
Loading…
Reference in a new issue