Compare commits

..

20 commits

Author SHA1 Message Date
71e9776aa2 add raw post endpoint
All checks were successful
PR Check / lint-and-format (pull_request) Successful in 2m43s
2024-11-06 14:04:22 +01:00
87d0fa3c11
'a instead of 'de 2024-10-24 20:19:26 +02:00
7f3434b7c1
no sync needed 2024-10-24 20:17:31 +02:00
13d7b54c27
build at first 2024-10-22 12:07:05 +02:00
984ecb6b69
bump dependencies (#79)
lol_html and comrak

Reviewed-on: #79
2024-10-22 12:05:38 +02:00
5a15945439
fix img 2024-10-22 12:00:13 +02:00
e9441dba46
PR Check 2024-10-22 11:49:57 +02:00
3f3efe4afa
add lb and rb 2024-10-08 13:21:30 +02:00
37b51bcbee
Add Tau 2024-10-03 13:06:56 +02:00
3cc69f3d4f
no longer stagiaire 2024-09-22 17:02:03 +02:00
7432ffd5f9
clickable badge 2024-09-19 15:53:28 +02:00
764a632ae6
/api/v1/websites 2024-09-19 15:50:37 +02:00
396bff909e
update dependencies 2024-09-14 21:01:30 +02:00
fb44c25e47
clippy 2024-09-14 20:17:11 +02:00
a7aec1b94e
add countdown 2024-08-29 23:31:35 +02:00
8cedeb531d
cleanup using https://github.com/actions/checkout/issues/1830#issuecomment-2314758792 answer 2024-08-28 16:43:30 +02:00
912f16e0c3
remove jas 2024-08-13 13:21:25 +02:00
999d68ab60
includes images in releases 2024-08-13 13:01:20 +02:00
2d9fc0d559
only run workflow manually 2024-08-13 11:55:12 +02:00
8c386d5ac6
use forgejo actions (#72)
Some checks failed
Publish latest version / build (push) Has been cancelled
2024-08-11 16:49:06 +02:00
12 changed files with 604 additions and 559 deletions

View file

@ -0,0 +1,23 @@
name: PR Check
on:
pull_request:
types: [opened, synchronize, reopened]
jobs:
lint-and-format:
container:
image: cimg/rust:1.81-node
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Build
run: cargo build
- name: Run format check
run: cargo fmt --check
- name: Run Clippy
run: cargo clippy

View file

@ -0,0 +1,47 @@
name: Publish latest version
on:
workflow_dispatch:
jobs:
build:
container:
image: ghcr.io/catthehacker/ubuntu:act-latest
steps:
- name: Checkout Code
uses: actions/checkout@v4
- name: Checkout LFS
run: |
# Replace double auth header, see https://github.com/actions/checkout/issues/1830
AUTH=$(git config --local http.${{ github.server_url }}/.extraheader)
git config --local --unset http.${{ github.server_url }}/.extraheader
git config --local http.${{ github.server_url }}/${{ github.repository }}.git/info/lfs/objects/batch.extraheader "$AUTH"
# Get files
git lfs fetch
git lfs checkout
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Sanitize metadata
id: meta
uses: docker/metadata-action@v5
with:
tags: latest
images: git.mylloon.fr/${{ github.repository }}
- name: Login to Registry
uses: docker/login-action@v3
with:
registry: ${{ github.server_url }}
username: ${{ github.actor }}
password: ${{ secrets.TOKEN }}
- name: Build and push
uses: docker/build-push-action@v6
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}

View file

@ -1,15 +0,0 @@
steps:
publish:
image: woodpeckerci/plugin-docker-buildx:2
settings:
labels:
platform: linux/amd64
repo: git.mylloon.fr/${CI_REPO,,}
auto_tag: true
registry: git.mylloon.fr
username: ${CI_REPO_OWNER}
password:
from_secret: cb_token
when:
event: push
branch: main

899
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -10,7 +10,7 @@ publish = false
license = "AGPL-3.0-or-later" license = "AGPL-3.0-or-later"
[dependencies] [dependencies]
actix-web = { version = "4.6", default-features = false, features = ["macros", "compress-brotli"] } actix-web = { version = "4.9", default-features = false, features = ["macros", "compress-brotli"] }
actix-files = "0.6" actix-files = "0.6"
cached = { version = "0.53", features = ["async", "ahash"] } cached = { version = "0.53", features = ["async", "ahash"] }
ramhorns = "1.0" ramhorns = "1.0"
@ -21,16 +21,17 @@ serde_json = "1.0"
minify-html = "0.15" minify-html = "0.15"
minify-js = "0.6" minify-js = "0.6"
glob = "0.3" glob = "0.3"
comrak = "0.26" comrak = "0.29"
reqwest = { version = "0.12", features = ["json"] } reqwest = { version = "0.12", features = ["json"] }
chrono = { version = "0.4.38", default-features = false, features = ["clock"]} chrono = { version = "0.4.38", default-features = false, features = ["clock"]}
chrono-tz = "0.9" chrono-tz = "0.10"
rss = { version = "2.0", features = ["atom"] } rss = { version = "2.0", features = ["atom"] }
lol_html = "1.2" lol_html = "2.0"
base64 = "0.22" base64 = "0.22"
mime_guess = "2.0" mime_guess = "2.0"
urlencoding = "2.1" urlencoding = "2.1"
regex = "1.10" regex = "1.10"
cyborgtime = "2.1.1"
[lints.clippy] [lints.clippy]
pedantic = "warn" pedantic = "warn"

View file

@ -3,7 +3,7 @@
Easy WebPage generator Easy WebPage generator
[![dependency status](https://deps.rs/repo/gitea/git.mylloon.fr/Anri/mylloon.fr/status.svg)](https://deps.rs/repo/gitea/git.mylloon.fr/Anri/mylloon.fr) [![dependency status](https://deps.rs/repo/gitea/git.mylloon.fr/Anri/mylloon.fr/status.svg)](https://deps.rs/repo/gitea/git.mylloon.fr/Anri/mylloon.fr)
[![status-badge](https://ci.mylloon.fr/api/badges/Anri/mylloon.fr/status.svg)](https://ci.mylloon.fr/Anri/mylloon.fr) [![status-badge](https://git.mylloon.fr/Anri/mylloon.fr/badges/workflows/publish.yml/badge.svg)](https://git.mylloon.fr/Anri/mylloon.fr/actions?workflow=publish.yml)
- See [issues](https://git.mylloon.fr/Anri/mylloon.fr/issues) - See [issues](https://git.mylloon.fr/Anri/mylloon.fr/issues)
- See [documentation](https://git.mylloon.fr/Anri/mylloon.fr/src/branch/main/Documentation.md) - See [documentation](https://git.mylloon.fr/Anri/mylloon.fr/src/branch/main/Documentation.md)

View file

@ -42,7 +42,14 @@ async fn main() -> Result<()> {
.add(("Server", format!("ewp/{}", env!("CARGO_PKG_VERSION")))) .add(("Server", format!("ewp/{}", env!("CARGO_PKG_VERSION"))))
.add(("Permissions-Policy", "interest-cohort=()")), .add(("Permissions-Policy", "interest-cohort=()")),
) )
.service(web::scope("/api").service(web::scope("v1").service(api_v1::love))) .service(
web::scope("/api").service(
web::scope("v1")
.service(api_v1::love)
.service(api_v1::btf)
.service(api_v1::websites),
),
)
.service(index::page) .service(index::page)
.service(agreements::security) .service(agreements::security)
.service(agreements::humans) .service(agreements::humans)

View file

@ -10,8 +10,6 @@ use serde::{Deserialize, Deserializer};
use std::fmt::Debug; use std::fmt::Debug;
use std::fs; use std::fs;
use std::path::Path; use std::path::Path;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
/// Metadata for blog posts /// Metadata for blog posts
#[derive(Content, Debug, Default, Deserialize)] #[derive(Content, Debug, Default, Deserialize)]
@ -30,10 +28,10 @@ pub struct Tag {
pub name: String, pub name: String,
} }
impl<'de> Deserialize<'de> for Tag { impl<'a> Deserialize<'a> for Tag {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where where
D: Deserializer<'de>, D: Deserializer<'a>,
{ {
match <&str>::deserialize(deserializer) { match <&str>::deserialize(deserializer) {
Ok(s) => match serde_yml::from_str(s) { Ok(s) => match serde_yml::from_str(s) {
@ -167,6 +165,7 @@ pub fn get_options<'a>() -> ComrakOptions<'a> {
options.render.ignore_empty_links = true; options.render.ignore_empty_links = true;
options.render.gfm_quirks = true; options.render.gfm_quirks = true;
options.render.prefer_fenced = false; options.render.prefer_fenced = false;
options.render.figure_with_caption = false;
options options
} }
@ -178,7 +177,7 @@ fn custom_img_size(html: &str) -> String {
RewriteStrSettings { RewriteStrSettings {
element_content_handlers: vec![element!("img[alt]", |el| { element_content_handlers: vec![element!("img[alt]", |el| {
let alt = el.get_attribute("alt").unwrap(); let alt = el.get_attribute("alt").unwrap();
let possible_piece = alt.split(|c| c == '|').collect::<Vec<&str>>(); let possible_piece = alt.split('|').collect::<Vec<&str>>();
if possible_piece.len() > 1 { if possible_piece.len() > 1 {
let data = possible_piece.last().unwrap().trim(); let data = possible_piece.last().unwrap().trim();
@ -229,46 +228,53 @@ fn fix_images_and_integration(path: &str, html: &str) -> (String, Metadata) {
mail_obfsucated: false, mail_obfsucated: false,
}; };
( // Collection of any additional metadata
rewrite_str( let mut additional_metadata = Vec::new();
html,
RewriteStrSettings {
element_content_handlers: vec![element!("img", |el| {
if let Some(src) = el.get_attribute("src") {
let img_src = Path::new(path).parent().unwrap();
let img_path = urlencoding::decode(img_src.join(src).to_str().unwrap())
.unwrap()
.to_string();
if let Ok(file) = fs::read_to_string(&img_path) {
let mime = mime_guess::from_path(&img_path).first_or_octet_stream();
if mime == "text/markdown" {
let mut options = get_options();
options.extension.footnotes = false;
let data = read_md(
&img_path,
&file,
&TypeFileMetadata::Generic,
Some(options),
);
el.replace(&data.content, ContentType::Html);
metadata.merge(&data.metadata);
} else {
let image = general_purpose::STANDARD.encode(file);
el.set_attribute("src", &format!("data:{mime};base64,{image}")) let result = rewrite_str(
.unwrap(); html,
} RewriteStrSettings {
element_content_handlers: vec![element!("img", |el| {
if let Some(src) = el.get_attribute("src") {
let img_src = Path::new(path).parent().unwrap();
let img_path = urlencoding::decode(img_src.join(src).to_str().unwrap())
.unwrap()
.to_string();
if let Ok(file) = fs::read_to_string(&img_path) {
let mime = mime_guess::from_path(&img_path).first_or_octet_stream();
if mime == "text/markdown" {
let mut options = get_options();
options.extension.footnotes = false;
let data = read_md(
&img_path,
&file,
&TypeFileMetadata::Generic,
Some(options),
);
el.replace(&data.content, ContentType::Html);
// Store the metadata for later merging
additional_metadata.push(data.metadata);
} else {
let image = general_purpose::STANDARD.encode(file);
el.set_attribute("src", &format!("data:{mime};base64,{image}"))
.unwrap();
} }
} }
}
Ok(()) Ok(())
})], })],
..RewriteStrSettings::default() ..RewriteStrSettings::default()
}, },
)
.unwrap(),
metadata,
) )
.unwrap();
// Merge all collected metadata
for additional in additional_metadata {
metadata.merge(&additional);
}
(result, metadata)
} }
/// Transform markdown string to File structure /// Transform markdown string to File structure
@ -405,12 +411,12 @@ fn check_code<'a>(root: &'a AstNode<'a>, blacklist: &[String]) -> bool {
/// Check if html contains maths /// Check if html contains maths
fn check_math(html: &str) -> bool { fn check_math(html: &str) -> bool {
let math_detected = Arc::new(AtomicBool::new(false)); let mut math_detected = false;
let _ = HtmlRewriter::new( let _ = HtmlRewriter::new(
Settings { Settings {
element_content_handlers: vec![element!("span[data-math-style]", |_| { element_content_handlers: vec![element!("span[data-math-style]", |_| {
math_detected.store(true, Ordering::SeqCst); math_detected = true;
Ok(()) Ok(())
})], })],
@ -420,7 +426,7 @@ fn check_math(html: &str) -> bool {
) )
.write(html.as_bytes()); .write(html.as_bytes());
math_detected.load(Ordering::SeqCst) math_detected
} }
/// Change class of languages for hljs detection /// Change class of languages for hljs detection
@ -436,7 +442,7 @@ fn hljs_replace<'a>(root: &'a AstNode<'a>, mermaid_str: &str) {
/// Obfuscate email if email found /// Obfuscate email if email found
fn mail_obfuscation(html: &str) -> (String, bool) { fn mail_obfuscation(html: &str) -> (String, bool) {
let modified = Arc::new(AtomicBool::new(false)); let mut modified = false;
let data_attr = "title"; let data_attr = "title";
@ -445,7 +451,7 @@ fn mail_obfuscation(html: &str) -> (String, bool) {
html, html,
RewriteStrSettings { RewriteStrSettings {
element_content_handlers: vec![element!("a[href^='mailto:']", |el| { element_content_handlers: vec![element!("a[href^='mailto:']", |el| {
modified.store(true, Ordering::SeqCst); modified = true;
// Get mail address // Get mail address
let link = el.get_attribute("href").unwrap(); let link = el.get_attribute("href").unwrap();
@ -465,9 +471,7 @@ fn mail_obfuscation(html: &str) -> (String, bool) {
) )
.unwrap(); .unwrap();
let is_modified = modified.load(Ordering::SeqCst); if modified {
if is_modified {
// Remove old data email if exists // Remove old data email if exists
( (
rewrite_str( rewrite_str(
@ -493,9 +497,9 @@ fn mail_obfuscation(html: &str) -> (String, bool) {
}, },
) )
.unwrap(), .unwrap(),
is_modified, modified,
) )
} else { } else {
(new_html, is_modified) (new_html, modified)
} }
} }

View file

@ -1,15 +1,53 @@
use std::time::Duration;
use actix_web::{get, HttpResponse, Responder}; use actix_web::{get, HttpResponse, Responder};
use chrono::Utc;
use cyborgtime::format_duration;
use serde::Serialize; use serde::Serialize;
/// Response /// Response for /love
#[derive(Serialize)] #[derive(Serialize)]
struct Info { struct InfoLove {
unix_epoch: u32, unix_epoch: u32,
} }
#[get("/love")] #[get("/love")]
pub async fn love() -> impl Responder { pub async fn love() -> impl Responder {
HttpResponse::Ok().json(Info { HttpResponse::Ok().json(InfoLove {
unix_epoch: 1_605_576_600, unix_epoch: 1_605_576_600,
}) })
} }
/// Response for /backtofrance
#[derive(Serialize)]
struct InfoBTF {
unix_epoch: u64,
countdown: String,
}
#[get("/backtofrance")]
pub async fn btf() -> impl Responder {
let target = 1_736_618_100;
let current_time: u64 = Utc::now().timestamp().try_into().unwrap();
let info = InfoBTF {
unix_epoch: target,
countdown: if current_time > target {
"Already happened".to_owned()
} else {
let duration_epoch = target - current_time;
let duration = Duration::from_secs(duration_epoch);
format_duration(duration).to_string()
},
};
HttpResponse::Ok().json(info)
}
#[get("/websites")]
pub async fn websites() -> impl Responder {
HttpResponse::Ok().json((
"http://www.bocal.cs.univ-paris8.fr/~akennel/",
"https://anri.up8.site/",
))
}

BIN
static/badges/friends/jas.webp (Stored with Git LFS)

Binary file not shown.

View file

@ -18,6 +18,7 @@ window.addEventListener("load", () => {
new Tag("Comment fonctionne un PC 😵‍💫"), new Tag("Comment fonctionne un PC 😵‍💫"),
new Tag("undefined", mono), new Tag("undefined", mono),
new Tag("/api/v1/love", mono), new Tag("/api/v1/love", mono),
new Tag("/api/v1/websites", mono),
new Tag("Peak D2 sur Valo 🤡"), new Tag("Peak D2 sur Valo 🤡"),
new Tag( new Tag(
"0x520", "0x520",
@ -47,7 +48,7 @@ window.addEventListener("load", () => {
` `
), ),
new Tag("School hater"), new Tag("School hater"),
new Tag("Stagiaire"), new Tag("Étudiant"),
new Tag("Rempli de malice"), new Tag("Rempli de malice"),
new Tag( new Tag(
"#NouveauFrontPopulaire ✊", "#NouveauFrontPopulaire ✊",

View file

@ -11,6 +11,9 @@ window.addEventListener("load", () => {
RA: "Rightarrow", RA: "Rightarrow",
LA: "Leftarrow", LA: "Leftarrow",
u: "mu", u: "mu",
Tau: "mathrm{T}",
lb: "textlbrackdbl",
rb: "textrbrackdbl",
}) })
)[Symbol.iterator]()) { )[Symbol.iterator]()) {
const bs = "\\"; const bs = "\\";