Basic cours support (#44)
All checks were successful
ci/woodpecker/push/publish Pipeline was successful

feat: Basic support for new `/cours` endpoint (not ready for release yet), see commit description for more

- Basic /cours support
- Fix LaTeX support (see #47 / cours+blog)
  - Better detection of when there is LaTeX in document
  - Don't shuffle markdown and LaTeX processing (thanks to comrak)
  - Macros on release
- Local image support (cours+blog)
- PDF support
- Support of markdown files integration in other markdown files
- Very basic exclusion support in toc (need a lot of improvement!!)
- Update multiple dependencies (actix-web, ramhorns, comrak, reqwest, hljs)
- Reformat some code
- ToC in /cours support (very basic, works via building it in rust and processing it in js)
- Remove very old assets (font + jspdf)
- Hide navbar when printing the website
- New tag in index page
- Fix OCaml support for HLJS + add "pseudocode" derived from Julia

Reviewed-on: #44
Co-authored-by: Mylloon <kennel.anri@tutanota.com>
Co-committed-by: Mylloon <kennel.anri@tutanota.com>
This commit is contained in:
Mylloon 2024-04-01 18:11:46 +02:00 committed by Anri Kennel
parent 51ed97273c
commit 9dfcc1101d
Signed by: Forgejo
GPG key ID: E72245C752A07631
32 changed files with 1767 additions and 258 deletions

748
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -10,19 +10,24 @@ publish = false
license = "AGPL-3.0-or-later"
[dependencies]
actix-web = { version = "4.4", default-features = false, features = ["macros", "compress-brotli"] }
actix-web = { version = "4.5", default-features = false, features = ["macros", "compress-brotli"] }
actix-files = "0.6"
cached = { version = "0.49", features = ["async"] }
ramhorns = "0.14"
cached = { version = "0.49", features = ["async", "ahash"] }
ramhorns = "1.0"
toml = "0.8"
serde = { version = "1.0", features = ["derive"] }
serde_yaml = "0.9"
serde_yml = "0.0.2"
serde_json = "1.0"
minify-html = "0.15"
minify-js = "0.6"
glob = "0.3"
comrak = "0.21"
reqwest = { version = "0.11", features = ["json"] }
chrono = { version = "0.4.30", default-features = false, features = ["clock"]}
comrak = "0.22"
reqwest = { version = "0.12", features = ["json"] }
chrono = { version = "0.4", default-features = false, features = ["clock"]}
chrono-tz = "0.8"
rss = { version = "2.0", features = ["atom"] }
lol_html = "1.2"
base64 = "0.22"
mime_guess = "2.0"
urlencoding = "2.1"
regex = "1.10"

View file

@ -14,6 +14,7 @@
- [Blog](#blog)
- [Projects](#projects)
- [Contacts](#contacts)
- [Courses](#courses)
# Installation
@ -105,6 +106,7 @@ onion = "http://youraddress.onion/"
app_name = "Nickname" # fallback to 'EWP' if none
name = "Firstname"
fullname = "Fullname"
exclude_courses = []
```
## Link shortener for contacts
@ -248,3 +250,7 @@ For example, `socials` contact files are stored in `/app/data/contacts/socials/`
### About <!-- omit in toc -->
The file is stored at `/app/data/contacts/about.md`.
## Courses
Markdown files are stored in `/app/data/cours/`

View file

@ -7,7 +7,7 @@ use std::{fs::File, io::Write, path::Path};
use crate::template::Template;
/// Store the configuration of config/config.toml
#[derive(Deserialize, Clone, Default, Debug)]
#[derive(Clone, Debug, Default, Deserialize)]
pub struct FileConfig {
/// http/https
pub scheme: Option<String>,
@ -27,6 +27,8 @@ pub struct FileConfig {
pub name: Option<String>,
/// Fullname of website owner
pub fullname: Option<String>,
/// List exclusion for courses
pub exclude_courses: Option<Vec<String>>,
}
impl FileConfig {
@ -37,6 +39,7 @@ impl FileConfig {
domain: Some("localhost".into()),
port: Some(8080),
app_name: Some("EWP".into()),
exclude_courses: Some([].into()),
..FileConfig::default()
}
}
@ -65,6 +68,7 @@ impl FileConfig {
app_name: test(a.app_name, d.app_name),
name: test(a.name, d.name),
fullname: test(a.fullname, d.fullname),
exclude_courses: test(a.exclude_courses, d.exclude_courses),
}
}
}

View file

@ -5,12 +5,12 @@ use serde::Deserialize;
use crate::misc::utils::get_reqwest_client;
#[derive(Deserialize, Debug)]
#[derive(Debug, Deserialize)]
struct GithubResponse {
items: Vec<GithubProject>,
}
#[derive(Deserialize, Debug)]
#[derive(Debug, Deserialize)]
struct GithubProject {
repository_url: String,
number: u32,
@ -19,7 +19,7 @@ struct GithubProject {
pull_request: GithubPullRequest,
}
#[derive(Deserialize, Debug)]
#[derive(Debug, Deserialize)]
struct GithubPullRequest {
html_url: String,
merged_at: Option<String>,

View file

@ -1,13 +1,19 @@
use crate::misc::date::Date;
use base64::engine::general_purpose;
use base64::Engine;
use comrak::nodes::{AstNode, NodeValue};
use comrak::{format_html, parse_document, Arena, ComrakOptions, ListStyleType};
use lol_html::{element, rewrite_str, RewriteStrSettings};
use comrak::{format_html, parse_document, Arena, ComrakOptions, ListStyleType, Options};
use lol_html::html_content::ContentType;
use lol_html::{element, rewrite_str, HtmlRewriter, RewriteStrSettings, Settings};
use ramhorns::Content;
use serde::{Deserialize, Deserializer};
use std::fs;
use std::path::Path;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
/// Metadata for blog posts
#[derive(Default, Deserialize, Content, Debug)]
#[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadataBlog {
pub title: Option<String>,
pub date: Option<Date>,
@ -29,7 +35,7 @@ impl<'de> Deserialize<'de> for Tag {
D: Deserializer<'de>,
{
match <&str>::deserialize(deserializer) {
Ok(s) => match serde_yaml::from_str(s) {
Ok(s) => match serde_yml::from_str(s) {
Ok(tag) => Ok(Self { name: tag }),
Err(e) => Err(serde::de::Error::custom(e)),
},
@ -39,7 +45,7 @@ impl<'de> Deserialize<'de> for Tag {
}
/// Metadata for contact entry
#[derive(Default, Deserialize, Content, Debug)]
#[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadataContact {
pub title: String,
pub custom: Option<bool>,
@ -50,7 +56,7 @@ pub struct FileMetadataContact {
}
/// Metadata for index page
#[derive(Default, Deserialize, Content, Debug)]
#[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadataIndex {
pub name: Option<String>,
pub pronouns: Option<String>,
@ -60,7 +66,7 @@ pub struct FileMetadataIndex {
}
/// Metadata for portfolio cards
#[derive(Default, Deserialize, Content, Debug)]
#[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadataPortfolio {
pub title: Option<String>,
pub link: Option<String>,
@ -79,7 +85,7 @@ pub enum TypeFileMetadata {
/// Structure who holds all the metadata the file have
/// Usually all fields are None except one
#[derive(Default, Deserialize, Content, Debug)]
#[derive(Content, Debug, Default, Deserialize)]
pub struct FileMetadata {
pub blog: Option<FileMetadataBlog>,
pub contact: Option<FileMetadataContact>,
@ -96,6 +102,15 @@ pub struct Metadata {
pub syntax_highlight: bool,
}
impl Metadata {
/// Update current metadata boolean fields, keeping true ones
fn merge(&mut self, other: Metadata) {
self.math = self.math || other.math;
self.mermaid = self.mermaid || other.mermaid;
self.syntax_highlight = self.syntax_highlight || other.syntax_highlight;
}
}
/// File description
#[derive(Content, Debug)]
pub struct File {
@ -118,6 +133,9 @@ pub fn get_options() -> ComrakOptions {
options.extension.footnotes = true;
options.extension.description_lists = true;
options.extension.front_matter_delimiter = Some("---".into());
options.extension.multiline_block_quotes = true;
options.extension.math_dollars = true;
options.extension.math_code = false;
// Parser
options.parse.smart = true; // could be boring
@ -134,6 +152,7 @@ pub fn get_options() -> ComrakOptions {
options.render.escape = false;
options.render.list_style = ListStyleType::Dash;
options.render.sourcepos = false;
options.render.escaped_char_spans = false;
options
}
@ -186,12 +205,71 @@ fn custom_img_size(html: String) -> String {
.unwrap()
}
/// Fix local images to base64 and integration of markdown files
fn fix_images_and_integration(path: &str, html: String) -> (String, Metadata) {
let mut metadata = Metadata {
info: FileMetadata::default(),
math: false,
mermaid: false,
syntax_highlight: false,
};
(
rewrite_str(
&html,
RewriteStrSettings {
element_content_handlers: vec![element!("img", |el| {
if let Some(src) = el.get_attribute("src") {
let img_src = Path::new(path).parent().unwrap();
let img_path = urlencoding::decode(img_src.join(src).to_str().unwrap())
.unwrap()
.to_string();
if let Ok(file) = fs::read_to_string(&img_path) {
let mime = mime_guess::from_path(&img_path).first_or_octet_stream();
if mime == "text/markdown" {
let mut options = get_options();
options.extension.footnotes = false;
let data = read_md(
&img_path,
&file,
TypeFileMetadata::Generic,
Some(options),
);
el.replace(&data.content, ContentType::Html);
metadata.merge(data.metadata);
} else {
let image = general_purpose::STANDARD.encode(file);
el.set_attribute("src", &format!("data:{};base64,{}", mime, image))
.unwrap();
}
}
}
Ok(())
})],
..RewriteStrSettings::default()
},
)
.unwrap(),
metadata,
)
}
/// Transform markdown string to File structure
fn read(raw_text: &str, metadata_type: TypeFileMetadata) -> File {
pub fn read_md(
path: &str,
raw_text: &str,
metadata_type: TypeFileMetadata,
options: Option<Options>,
) -> File {
let arena = Arena::new();
let options = get_options();
let root = parse_document(&arena, raw_text, &options);
let opt = match options {
Some(specific_opt) => specific_opt,
None => get_options(),
};
let root = parse_document(&arena, raw_text, &opt);
// Find metadata
let metadata = get_metadata(root, metadata_type);
@ -201,34 +279,31 @@ fn read(raw_text: &str, metadata_type: TypeFileMetadata) -> File {
// Convert to HTML
let mut html = vec![];
format_html(root, &options, &mut html).unwrap();
format_html(root, &opt, &mut html).unwrap();
let mut html_content = String::from_utf8(html).unwrap();
let children_metadata;
(html_content, children_metadata) = fix_images_and_integration(path, html_content);
html_content = custom_img_size(html_content);
File {
metadata: Metadata {
info: metadata,
mermaid: check_mermaid(root, mermaid_name),
syntax_highlight: check_code(root, &[mermaid_name.into()]),
math: check_math(&html_content),
},
content: html_content,
}
}
let mut final_metadata = Metadata {
info: metadata,
mermaid: check_mermaid(root, mermaid_name),
syntax_highlight: check_code(root, &[mermaid_name.into()]),
math: check_math(&html_content),
};
final_metadata.merge(children_metadata);
/// Read markdown file
pub fn read_file(filename: &str, expected_file: TypeFileMetadata) -> Option<File> {
match fs::read_to_string(filename) {
Ok(text) => Some(read(&text, expected_file)),
_ => None,
File {
metadata: final_metadata,
content: html_content,
}
}
/// Deserialize metadata based on a type
fn deserialize_metadata<T: Default + serde::de::DeserializeOwned>(text: &str) -> T {
serde_yaml::from_str(text.trim().trim_matches(&['-'] as &[_])).unwrap_or_default()
serde_yml::from_str(text.trim().trim_matches(&['-'] as &[_])).unwrap_or_default()
}
/// Fetch metadata from AST
@ -318,9 +393,25 @@ fn check_code<'a>(root: &'a AstNode<'a>, blacklist: &[String]) -> bool {
})
}
/// Check if html can contains maths
/// Check if html contains maths
fn check_math(html: &str) -> bool {
html.contains('$')
let math_detected = Arc::new(AtomicBool::new(false));
let mut output = vec![];
let _ = HtmlRewriter::new(
Settings {
element_content_handlers: vec![element!("span[data-math-style]", |_| {
math_detected.store(true, Ordering::SeqCst);
Ok(())
})],
..Settings::default()
},
|c: &[u8]| output.extend_from_slice(c),
)
.write(html.as_bytes());
math_detected.load(Ordering::SeqCst)
}
/// Change class of languages for hljs detection

View file

@ -1,12 +1,18 @@
use std::{fs, path::Path};
use actix_web::{
http::header::{self, ContentType, TryIntoHeaderValue},
http::StatusCode,
HttpRequest, HttpResponse, Responder,
};
use base64::{engine::general_purpose, Engine};
use cached::proc_macro::cached;
use reqwest::{Client, StatusCode};
use reqwest::Client;
use crate::config::FileConfig;
use super::markdown::{read_md, File, FileMetadata, Metadata, TypeFileMetadata};
#[cached]
pub fn get_reqwest_client() -> Client {
Client::builder()
@ -45,3 +51,40 @@ impl Responder for Html {
res
}
}
/// Read a file
pub fn read_file(filename: &str, expected_file: TypeFileMetadata) -> Option<File> {
match Path::new(filename).extension() {
Some(ext) => match ext.to_str().unwrap() {
"pdf" => match fs::read(filename) {
Ok(bytes) => Some(read_pdf(bytes)),
Err(_) => None,
},
_ => match fs::read_to_string(filename) {
Ok(text) => Some(read_md(filename, &text, expected_file, None)),
Err(_) => None,
},
},
None => None,
}
}
fn read_pdf(data: Vec<u8>) -> File {
let pdf = general_purpose::STANDARD.encode(data);
File {
metadata: Metadata {
info: FileMetadata::default(),
mermaid: false,
syntax_highlight: false,
math: false,
},
content: format!(
r#"<embed
src="data:application/pdf;base64,{}"
style="width: 100%; height: 79vh";
>"#,
pdf
),
}
}

View file

@ -18,10 +18,8 @@ use crate::{
config::Config,
misc::{
date::Date,
markdown::{
get_metadata, get_options, read_file, File, FileMetadataBlog, TypeFileMetadata,
},
utils::{get_url, make_kw, Html},
markdown::{get_metadata, get_options, File, FileMetadataBlog, TypeFileMetadata},
utils::{get_url, make_kw, read_file, Html},
},
template::{Infos, NavBar},
};

View file

@ -7,8 +7,8 @@ use std::fs::read_to_string;
use crate::{
config::Config,
misc::{
markdown::{read_file, File, TypeFileMetadata},
utils::{make_kw, Html},
markdown::{File, TypeFileMetadata},
utils::{make_kw, read_file, Html},
},
template::{Infos, NavBar},
};

View file

@ -26,7 +26,7 @@ struct PortfolioTemplate {
closed: Option<Vec<Project>>,
}
#[derive(Content, Clone, Debug)]
#[derive(Clone, Content, Debug)]
struct Project {
name: String,
url: String,
@ -35,7 +35,7 @@ struct Project {
pulls_closed: Vec<Pull>,
}
#[derive(Content, Clone, Debug)]
#[derive(Clone, Content, Debug)]
struct Pull {
url: String,
id: u32,

View file

@ -1,9 +1,148 @@
use actix_web::{get, Responder};
use std::path::Path;
use actix_web::{get, web, Responder};
use cached::proc_macro::cached;
use ramhorns::Content;
use regex::Regex;
use serde::{Deserialize, Serialize};
use crate::{
config::Config,
misc::{
markdown::{File, TypeFileMetadata},
utils::{make_kw, read_file, Html},
},
template::{Infos, NavBar},
};
#[derive(Debug, Deserialize)]
pub struct PathRequest {
q: Option<String>,
}
#[get("/cours")]
async fn page() -> impl Responder {
// Page de notes de cours
// Cf. https://univ.mylloon.fr/
// Cf. https://github.com/xy2z/PineDocs
actix_web::web::Redirect::to("/")
async fn page(info: web::Query<PathRequest>, config: web::Data<Config>) -> impl Responder {
Html(build_page(info, config.get_ref().to_owned()))
}
#[derive(Content, Debug)]
struct CoursTemplate {
navbar: NavBar,
filetree: String,
content: Option<File>,
}
#[derive(Clone, Debug, Serialize)]
struct FileNode {
name: String,
is_dir: bool,
children: Vec<FileNode>,
}
#[cached]
fn compile_patterns(exclusion_list: Vec<String>) -> Vec<Regex> {
exclusion_list
.iter()
.map(|pattern| Regex::new(pattern).unwrap())
.collect()
}
fn get_filetree(dir_path: &str, exclusion_patterns: &Vec<Regex>) -> FileNode {
let children = std::fs::read_dir(dir_path)
.unwrap()
.filter_map(Result::ok)
.filter_map(|entry| {
let entry_path = entry.path();
let entry_name = entry_path.file_name()?.to_string_lossy().to_string();
// Exclude element with the exclusion_list
if exclusion_patterns.iter().any(|re| re.is_match(&entry_name)) {
return None;
}
if entry_path.is_file() {
Some(FileNode {
name: entry_name,
is_dir: false,
children: vec![],
})
} else {
// Exclude empty directories
let children_of_children =
get_filetree(entry_path.to_str().unwrap(), exclusion_patterns);
if children_of_children.is_dir && children_of_children.children.is_empty() {
None
} else {
Some(children_of_children)
}
}
})
.collect();
FileNode {
name: Path::new(dir_path)
.file_name()
.unwrap()
.to_string_lossy()
.to_string(),
is_dir: true,
children,
}
}
/// Get a page content
fn get_content(
cours_dir: &str,
path: &web::Query<PathRequest>,
exclusion_list: Vec<String>,
) -> Option<File> {
let filename = match &path.q {
Some(q) => q,
None => "index.md",
};
// We should support regex?
if exclusion_list
.iter()
.any(|excluded_term| filename.contains(excluded_term.as_str()))
{
return None;
}
read_file(
&format!("{cours_dir}/{filename}"),
TypeFileMetadata::Generic,
)
}
fn build_page(info: web::Query<PathRequest>, config: Config) -> String {
let cours_dir = "data/cours";
let exclusion_list = config.fc.exclude_courses.unwrap();
let exclusion_patterns = compile_patterns(exclusion_list.to_owned());
let filetree = get_filetree(cours_dir, &exclusion_patterns);
config.tmpl.render(
"cours.html",
CoursTemplate {
navbar: NavBar {
cours: true,
..NavBar::default()
},
filetree: serde_json::to_string(&filetree).unwrap(),
content: get_content(cours_dir, &info, exclusion_list),
},
Infos {
page_title: Some("Cours".into()),
page_desc: Some("Cours à l'univ".into()),
page_kw: make_kw(&[
"cours",
"études",
"université",
"licence",
"master",
"notes",
"digital garden",
]),
},
)
}

View file

@ -5,8 +5,8 @@ use ramhorns::Content;
use crate::{
config::Config,
misc::{
markdown::{read_file, File, TypeFileMetadata},
utils::{make_kw, Html},
markdown::{File, TypeFileMetadata},
utils::{make_kw, read_file, Html},
},
template::{Infos, NavBar},
};

View file

@ -6,8 +6,8 @@ use ramhorns::Content;
use crate::{
config::Config,
misc::{
markdown::{read_file, File, TypeFileMetadata},
utils::{make_kw, Html},
markdown::{File, TypeFileMetadata},
utils::{make_kw, read_file, Html},
},
template::{Infos, NavBar},
};

View file

@ -14,7 +14,7 @@ pub struct Template {
}
/// Structure used by /routes/*.rs
#[derive(Default, Debug)]
#[derive(Debug, Default)]
pub struct Infos {
/// Title
pub page_title: Option<String>,

53
static/css/cours.css Normal file
View file

@ -0,0 +1,53 @@
/* Filetree */
aside {
float: left;
margin-left: 20px;
position: sticky;
top: 0;
}
aside ul {
list-style: none;
padding-left: 0.6em;
}
aside li {
position: relative;
}
/* Element */
aside li:before {
content: "";
position: absolute;
top: -0.2em;
left: -1em;
height: 1em;
}
aside li.collapsed > ul {
display: none;
}
aside li.directory::before {
content: "+";
}
aside li:not(.collapsed).directory::before {
content: "-";
}
aside li.directory {
cursor: pointer;
}
@media print {
aside {
visibility: hidden;
}
}
main img {
max-width: 100%;
display: block;
margin: auto;
}

Binary file not shown.

Binary file not shown.

View file

@ -1,14 +0,0 @@
Ces fontes sont distribuées gratuitement sous Licence publique Creative Commons Attribution 4.0 International :
https://creativecommons.org/licenses/by/4.0/legalcode.fr
These fonts are freely available under Creative Commons Attribution 4.0 International Public License:
https://creativecommons.org/licenses/by/4.0/legalcode
Luciole © Laurent Bourcellier & Jonathan Perez

View file

@ -65,3 +65,9 @@ header nav a:hover {
.bold {
font-weight: bold;
}
@media print {
header nav {
visibility: hidden;
}
}

167
static/js/cours.js Normal file
View file

@ -0,0 +1,167 @@
/**
* Build the filetree
* @param {HTMLElement} parent Root element of the filetree
* @param {{name: string, is_dir: boolean, children: any[]}} data FileNode
* @param {string} location Current location, used for links creation
*/
const buildFileTree = (parent, data, location) => {
const ul = document.createElement("ul");
data.forEach((item) => {
const li = document.createElement("li");
li.classList.add(item.is_dir ? "directory" : "file");
if (item.is_dir) {
// Directory
li.textContent = item.name;
li.classList.add("collapsed");
// Toggle collapsing on click
li.addEventListener("click", function (e) {
if (e.target === li) {
li.classList.toggle("collapsed");
}
});
} else {
// File
const url = window.location.href.split("?")[0];
const a = document.createElement("a");
a.text = item.name;
a.href = `${url}?q=${location}${item.name}`;
li.appendChild(a);
}
ul.appendChild(li);
if (item.children && item.children.length > 0) {
buildFileTree(
li,
item.children,
item.is_dir ? location + `${item.name}/` : location
);
}
});
parent.appendChild(ul);
};
/**
* Uncollapse elements from the deepest element
* @param {HTMLLIElement} element Element to uncollapse
*/
const uncollapse = (element) => {
if (element) {
element.classList.remove("collapsed");
uncollapse(element.parentElement.closest("li"));
}
};
/**
* Find the deepest opened directory
* @param {string[]} path Current path we are looking at, init with fullpath
* @param {NodeListOf<ChildNode>} options Options we have, init with list root
* @returns
*/
const deepestNodeOpened = (path, options) => {
// Iterate over possible options
for (let i = 0; i < options.length; ++i) {
// If the directory and the current path match
if (decodeURI(path[0]) === options[i].firstChild.nodeValue) {
if (path.length === 1) {
// We found it
return options[i];
}
// Continue the search
return deepestNodeOpened(
path.slice(1),
options[i].querySelector("ul").childNodes
);
}
}
};
const svgDarkTheme = () => {
for (const item of document.getElementsByTagName("img")) {
if (!item.src.startsWith("data:image/svg+xml;base64,")) {
// Exclude image who aren't SVG and base64 encoded
break;
}
/** Convert to grayscale */
const colorToGrayscale = (color) => {
return 0.3 * color.r + 0.59 * color.g + 0.11 * color.b;
};
/** Extract color using canvas2d */
const extractColors = (image) => {
const canvas = document.createElement("canvas");
canvas.width = image.width;
canvas.height = image.height;
const ctx = canvas.getContext("2d");
ctx.drawImage(image, 0, 0);
const imageData = ctx.getImageData(
0,
0,
Math.max(1, canvas.width),
Math.max(1, canvas.height)
);
const pixelData = imageData.data;
const colors = [];
for (let i = 0; i < pixelData.length; i += 4) {
if (pixelData[i + 3] > 0) {
colors.push({
r: pixelData[i],
g: pixelData[i + 1],
b: pixelData[i + 2],
});
}
}
return colors;
};
// Extract colors
const colors = extractColors(item);
// Calculate the average grayscale value
const grayscaleValues = colors.map(colorToGrayscale);
const totalGrayscale = grayscaleValues.reduce((acc, val) => acc + val, 0);
const averageGrayscale = totalGrayscale / grayscaleValues.length;
if (averageGrayscale < 128) {
item.style = "filter: invert(1);";
}
}
};
window.addEventListener("load", () => {
// Build the filetree
const fileTreeElement = document.getElementsByTagName("aside")[0];
const dataElement = fileTreeElement.getElementsByTagName("span")[0];
buildFileTree(
fileTreeElement,
JSON.parse(dataElement.getAttribute("data-json")).children,
""
);
dataElement.remove();
// Open nested openeded directories
const infoURL = window.location.href.split("?");
if (infoURL.length > 1) {
const fullpath = infoURL[1].substring(2);
const path = fullpath.substring(0, fullpath.lastIndexOf("/"));
const last_openeded = deepestNodeOpened(
path.split("/"),
fileTreeElement.querySelector("ul").childNodes
);
uncollapse(last_openeded);
}
// Fix SVG images in dark mode
if (window.matchMedia("(prefers-color-scheme: dark)").matches) {
svgDarkTheme();
}
});

View file

@ -24,6 +24,7 @@ window.addEventListener("load", () => {
`
),
new Tag("Nul en CSS", "font-family: 'Comic Sans MS', cursive"),
new Tag("Mention poufiasse"),
new Tag("anri k... caterpillar 🐛☝️"),
];

View file

@ -0,0 +1,452 @@
/*! `julia` grammar compiled for Highlight.js 11.9.0 */
(function(){
var hljsGrammar = (function () {
'use strict';
/*
Language: Julia
Description: Julia is a high-level, high-performance, dynamic programming language.
Author: Kenta Sato <bicycle1885@gmail.com>
Contributors: Alex Arslan <ararslan@comcast.net>, Fredrik Ekre <ekrefredrik@gmail.com>
Website: https://julialang.org
Category: scientific
*/
function julia(hljs) {
// Since there are numerous special names in Julia, it is too much trouble
// to maintain them by hand. Hence these names (i.e. keywords, literals and
// built-ins) are automatically generated from Julia 1.5.2 itself through
// the following scripts for each.
// ref: https://docs.julialang.org/en/v1/manual/variables/#Allowed-Variable-Names
const VARIABLE_NAME_RE = '[A-Za-z_\\u00A1-\\uFFFF][A-Za-z_0-9\\u00A1-\\uFFFF]*';
// # keyword generator, multi-word keywords handled manually below (Julia 1.5.2)
// import REPL.REPLCompletions
// res = String["in", "isa", "where"]
// for kw in collect(x.keyword for x in REPLCompletions.complete_keyword(""))
// if !(contains(kw, " ") || kw == "struct")
// push!(res, kw)
// end
// end
// sort!(unique!(res))
// foreach(x -> println("\'", x, "\',"), res)
const KEYWORD_LIST = [
'baremodule',
'begin',
'break',
'catch',
'ccall',
'const',
'continue',
'do',
'else',
'elseif',
'end',
'export',
'false',
'finally',
'for',
'function',
'global',
'if',
'import',
'in',
'isa',
'let',
'local',
'macro',
'module',
'quote',
'return',
'true',
'try',
'using',
'where',
'while',
];
// # literal generator (Julia 1.5.2)
// import REPL.REPLCompletions
// res = String["true", "false"]
// for compl in filter!(x -> isa(x, REPLCompletions.ModuleCompletion) && (x.parent === Base || x.parent === Core),
// REPLCompletions.completions("", 0)[1])
// try
// v = eval(Symbol(compl.mod))
// if !(v isa Function || v isa Type || v isa TypeVar || v isa Module || v isa Colon)
// push!(res, compl.mod)
// end
// catch e
// end
// end
// sort!(unique!(res))
// foreach(x -> println("\'", x, "\',"), res)
const LITERAL_LIST = [
'ARGS',
'C_NULL',
'DEPOT_PATH',
'ENDIAN_BOM',
'ENV',
'Inf',
'Inf16',
'Inf32',
'Inf64',
'InsertionSort',
'LOAD_PATH',
'MergeSort',
'NaN',
'NaN16',
'NaN32',
'NaN64',
'PROGRAM_FILE',
'QuickSort',
'RoundDown',
'RoundFromZero',
'RoundNearest',
'RoundNearestTiesAway',
'RoundNearestTiesUp',
'RoundToZero',
'RoundUp',
'VERSION|0',
'devnull',
'false',
'im',
'missing',
'nothing',
'pi',
'stderr',
'stdin',
'stdout',
'true',
'undef',
'π',
'',
];
// # built_in generator (Julia 1.5.2)
// import REPL.REPLCompletions
// res = String[]
// for compl in filter!(x -> isa(x, REPLCompletions.ModuleCompletion) && (x.parent === Base || x.parent === Core),
// REPLCompletions.completions("", 0)[1])
// try
// v = eval(Symbol(compl.mod))
// if (v isa Type || v isa TypeVar) && (compl.mod != "=>")
// push!(res, compl.mod)
// end
// catch e
// end
// end
// sort!(unique!(res))
// foreach(x -> println("\'", x, "\',"), res)
const BUILT_IN_LIST = [
'AbstractArray',
'AbstractChannel',
'AbstractChar',
'AbstractDict',
'AbstractDisplay',
'AbstractFloat',
'AbstractIrrational',
'AbstractMatrix',
'AbstractRange',
'AbstractSet',
'AbstractString',
'AbstractUnitRange',
'AbstractVecOrMat',
'AbstractVector',
'Any',
'ArgumentError',
'Array',
'AssertionError',
'BigFloat',
'BigInt',
'BitArray',
'BitMatrix',
'BitSet',
'BitVector',
'Bool',
'BoundsError',
'CapturedException',
'CartesianIndex',
'CartesianIndices',
'Cchar',
'Cdouble',
'Cfloat',
'Channel',
'Char',
'Cint',
'Cintmax_t',
'Clong',
'Clonglong',
'Cmd',
'Colon',
'Complex',
'ComplexF16',
'ComplexF32',
'ComplexF64',
'CompositeException',
'Condition',
'Cptrdiff_t',
'Cshort',
'Csize_t',
'Cssize_t',
'Cstring',
'Cuchar',
'Cuint',
'Cuintmax_t',
'Culong',
'Culonglong',
'Cushort',
'Cvoid',
'Cwchar_t',
'Cwstring',
'DataType',
'DenseArray',
'DenseMatrix',
'DenseVecOrMat',
'DenseVector',
'Dict',
'DimensionMismatch',
'Dims',
'DivideError',
'DomainError',
'EOFError',
'Enum',
'ErrorException',
'Exception',
'ExponentialBackOff',
'Expr',
'Float16',
'Float32',
'Float64',
'Function',
'GlobalRef',
'HTML',
'IO',
'IOBuffer',
'IOContext',
'IOStream',
'IdDict',
'IndexCartesian',
'IndexLinear',
'IndexStyle',
'InexactError',
'InitError',
'Int',
'Int128',
'Int16',
'Int32',
'Int64',
'Int8',
'Integer',
'InterruptException',
'InvalidStateException',
'Irrational',
'KeyError',
'LinRange',
'LineNumberNode',
'LinearIndices',
'LoadError',
'MIME',
'Matrix',
'Method',
'MethodError',
'Missing',
'MissingException',
'Module',
'NTuple',
'NamedTuple',
'Nothing',
'Number',
'OrdinalRange',
'OutOfMemoryError',
'OverflowError',
'Pair',
'PartialQuickSort',
'PermutedDimsArray',
'Pipe',
'ProcessFailedException',
'Ptr',
'QuoteNode',
'Rational',
'RawFD',
'ReadOnlyMemoryError',
'Real',
'ReentrantLock',
'Ref',
'Regex',
'RegexMatch',
'RoundingMode',
'SegmentationFault',
'Set',
'Signed',
'Some',
'StackOverflowError',
'StepRange',
'StepRangeLen',
'StridedArray',
'StridedMatrix',
'StridedVecOrMat',
'StridedVector',
'String',
'StringIndexError',
'SubArray',
'SubString',
'SubstitutionString',
'Symbol',
'SystemError',
'Task',
'TaskFailedException',
'Text',
'TextDisplay',
'Timer',
'Tuple',
'Type',
'TypeError',
'TypeVar',
'UInt',
'UInt128',
'UInt16',
'UInt32',
'UInt64',
'UInt8',
'UndefInitializer',
'UndefKeywordError',
'UndefRefError',
'UndefVarError',
'Union',
'UnionAll',
'UnitRange',
'Unsigned',
'Val',
'Vararg',
'VecElement',
'VecOrMat',
'Vector',
'VersionNumber',
'WeakKeyDict',
'WeakRef',
];
const KEYWORDS = {
$pattern: VARIABLE_NAME_RE,
keyword: KEYWORD_LIST,
literal: LITERAL_LIST,
built_in: BUILT_IN_LIST,
};
// placeholder for recursive self-reference
const DEFAULT = {
keywords: KEYWORDS,
illegal: /<\//
};
// ref: https://docs.julialang.org/en/v1/manual/integers-and-floating-point-numbers/
const NUMBER = {
className: 'number',
// supported numeric literals:
// * binary literal (e.g. 0x10)
// * octal literal (e.g. 0o76543210)
// * hexadecimal literal (e.g. 0xfedcba876543210)
// * hexadecimal floating point literal (e.g. 0x1p0, 0x1.2p2)
// * decimal literal (e.g. 9876543210, 100_000_000)
// * floating pointe literal (e.g. 1.2, 1.2f, .2, 1., 1.2e10, 1.2e-10)
begin: /(\b0x[\d_]*(\.[\d_]*)?|0x\.\d[\d_]*)p[-+]?\d+|\b0[box][a-fA-F0-9][a-fA-F0-9_]*|(\b\d[\d_]*(\.[\d_]*)?|\.\d[\d_]*)([eEfF][-+]?\d+)?/,
relevance: 0
};
const CHAR = {
className: 'string',
begin: /'(.|\\[xXuU][a-zA-Z0-9]+)'/
};
const INTERPOLATION = {
className: 'subst',
begin: /\$\(/,
end: /\)/,
keywords: KEYWORDS
};
const INTERPOLATED_VARIABLE = {
className: 'variable',
begin: '\\$' + VARIABLE_NAME_RE
};
// TODO: neatly escape normal code in string literal
const STRING = {
className: 'string',
contains: [
hljs.BACKSLASH_ESCAPE,
INTERPOLATION,
INTERPOLATED_VARIABLE
],
variants: [
{
begin: /\w*"""/,
end: /"""\w*/,
relevance: 10
},
{
begin: /\w*"/,
end: /"\w*/
}
]
};
const COMMAND = {
className: 'string',
contains: [
hljs.BACKSLASH_ESCAPE,
INTERPOLATION,
INTERPOLATED_VARIABLE
],
begin: '`',
end: '`'
};
const MACROCALL = {
className: 'meta',
begin: '@' + VARIABLE_NAME_RE
};
const COMMENT = {
className: 'comment',
variants: [
{
begin: '#=',
end: '=#',
relevance: 10
},
{
begin: '#',
end: '$'
}
]
};
DEFAULT.name = 'Julia';
DEFAULT.contains = [
NUMBER,
CHAR,
STRING,
COMMAND,
MACROCALL,
COMMENT,
hljs.HASH_COMMENT_MODE,
{
className: 'keyword',
begin:
'\\b(((abstract|primitive)\\s+)type|(mutable\\s+)?struct)\\b'
},
{ begin: /<:/ } // relevance booster
];
INTERPOLATION.contains = DEFAULT.contains;
return DEFAULT;
}
return julia;
})();
hljs.registerLanguage('julia', hljsGrammar);
})();

View file

@ -0,0 +1,93 @@
/*! `ocaml` grammar compiled for Highlight.js 11.9.0 */
(function(){
var hljsGrammar = (function () {
'use strict';
/*
Language: OCaml
Author: Mehdi Dogguy <mehdi@dogguy.org>
Contributors: Nicolas Braud-Santoni <nicolas.braud-santoni@ens-cachan.fr>, Mickael Delahaye <mickael.delahaye@gmail.com>
Description: OCaml language definition.
Website: https://ocaml.org
Category: functional
*/
function ocaml(hljs) {
/* missing support for heredoc-like string (OCaml 4.0.2+) */
return {
name: 'OCaml',
aliases: [ 'ml' ],
keywords: {
$pattern: '[a-z_]\\w*!?',
keyword:
'and as assert asr begin class constraint do done downto else end '
+ 'exception external for fun function functor if in include '
+ 'inherit! inherit initializer land lazy let lor lsl lsr lxor match method!|10 method '
+ 'mod module mutable new object of open! open or private rec sig struct '
+ 'then to try type val! val virtual when while with '
/* camlp4 */
+ 'parser value',
built_in:
/* built-in types */
'array bool bytes char exn|5 float int int32 int64 list lazy_t|5 nativeint|5 string unit '
/* (some) types in Pervasives */
+ 'in_channel out_channel ref',
literal:
'true false'
},
illegal: /\/\/|>>/,
contains: [
{
className: 'literal',
begin: '\\[(\\|\\|)?\\]|\\(\\)',
relevance: 0
},
hljs.COMMENT(
'\\(\\*',
'\\*\\)',
{ contains: [ 'self' ] }
),
{ /* type variable */
className: 'symbol',
begin: '\'[A-Za-z_](?!\')[\\w\']*'
/* the grammar is ambiguous on how 'a'b should be interpreted but not the compiler */
},
{ /* polymorphic variant */
className: 'type',
begin: '`[A-Z][\\w\']*'
},
{ /* module or constructor */
className: 'type',
begin: '\\b[A-Z][\\w\']*',
relevance: 0
},
{ /* don't color identifiers, but safely catch all identifiers with ' */
begin: '[a-z_]\\w*\'[\\w\']*',
relevance: 0
},
hljs.inherit(hljs.APOS_STRING_MODE, {
className: 'string',
relevance: 0
}),
hljs.inherit(hljs.QUOTE_STRING_MODE, { illegal: null }),
{
className: 'number',
begin:
'\\b(0[xX][a-fA-F0-9_]+[Lln]?|'
+ '0[oO][0-7_]+[Lln]?|'
+ '0[bB][01_]+[Lln]?|'
+ '[0-9][0-9_]*([Lln]|(\\.[0-9_]*)?([eE][-+]?[0-9_]+)?)?)',
relevance: 0
},
{ begin: /->/ // relevance booster
}
]
};
}
return ocaml;
})();
hljs.registerLanguage('ocaml', hljsGrammar);
})();

View file

@ -9,7 +9,7 @@ window.addEventListener("load", () => {
/* Aliases of langs */
const aliases = {
bash: ["fish"],
pascal: ["pseudocode"],
julia: ["pseudocode"],
};
for (const lang in aliases) {
hljs.registerAliases(aliases[lang], { languageName: lang });

View file

@ -1,14 +0,0 @@
window.addEventListener("load", () => {
const { jsPDF } = window.jspdf;
const doc = new jsPDF();
doc.html(document.body, {
width: doc.internal.pageSize.getWidth() - 20,
windowWidth: 800,
margin: [15, 10, 10, 10],
callback: function (doc) {
doc.save(`${document.title}.pdf`);
},
});
});

View file

@ -10,17 +10,20 @@ window.addEventListener("load", () => {
la: "leftarrow",
RA: "Rightarrow",
LA: "Leftarrow",
u: "mu",
})
)[Symbol.iterator]()) {
macros[`\\${item[0]}`] = `\\${item[1]}`;
const bs = "\\";
macros[`${bs}${item[0]}`] = `${bs}${item[1]}`;
}
renderMathInElement(document.body, {
delimiters: [
{ left: "$$", right: "$$", display: true },
{ left: "$", right: "$", display: false },
],
throwOnError: false,
macros,
});
const attribute = "data-math-style";
for (const element of document.querySelectorAll(`span[${attribute}]`)) {
katex.render(element.textContent, element, {
throwOnError: false,
displayMode: element.getAttribute(attribute) === "display",
macros: macros,
output: "mathml",
});
}
});

30
templates/cours.html Normal file
View file

@ -0,0 +1,30 @@
<!DOCTYPE html>
<html lang="fr">
<head dir="ltr">
{{>head.html}}
<link rel="stylesheet" href="/css/cours.css" />
{{#data}} {{#content}} {{#metadata}}
{{#math}}{{>libs/katex_head.html}}{{/math}}
{{#syntax_highlight}}{{>libs/hljs_head.html}}{{/syntax_highlight}}
{{/metadata}} {{/content}}
</head>
<body>
<header>{{>navbar.html}}</header>
<aside>
<span data-json="{{filetree}} "></span>
</aside>
<main>
{{^content}}
<p>Fichier introuvable</p>
{{/content}} {{#content}}
<article>{{&content}}</article>
</main>
{{#metadata}} {{#mermaid}}{{>libs/mermaid_footer.html}}{{/mermaid}}
{{#math}}{{>libs/katex_footer.html}}{{/math}}
{{#syntax_highlight}}{{>libs/hljs_footer.html}}{{/syntax_highlight}}
{{/metadata}} {{/content}} {{/data}}
<script src="/js/cours.js"></script>
</body>
</html>

View file

@ -1,4 +1,6 @@
<script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/11.7.0/highlight.min.js"></script>
<script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js"></script>
<script src="//unpkg.com/highlightjs-copy/dist/highlightjs-copy.min.js"></script>
<script src="//cdnjs.cloudflare.com/ajax/libs/highlightjs-line-numbers.js/2.8.0/highlightjs-line-numbers.min.js"></script>
<script src="/js/libs/hljs-languages/julia.js"></script>
<script src="/js/libs/hljs-languages/ocaml.js"></script>
<script src="/js/libs/hljs.js"></script>

View file

@ -1,12 +1,12 @@
<link
id="hljs-light-theme"
rel="stylesheet"
href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/11.7.0/styles/base16/solarized-light.min.css"
href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/base16/solarized-light.min.css"
/>
<link
id="hljs-dark-theme"
rel="stylesheet"
href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/11.7.0/styles/base16/dracula.min.css"
href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/base16/dracula.min.css"
/>
<link
rel="stylesheet"

View file

@ -1,8 +0,0 @@
<script src="//html2canvas.hertzen.com/dist/html2canvas.min.js"></script>
<script
src="//cdnjs.cloudflare.com/ajax/libs/jspdf/2.5.1/jspdf.umd.min.js"
integrity="sha512-qZvrmS2ekKPF2mSznTQsxqPgnpkI4DNTlrdUmTzrDgektczlKNRRhy5X5AAOnx5S09ydFYWWNSfcEqDTTHgtNA=="
crossorigin="anonymous"
referrerpolicy="no-referrer"
></script>
<script src="/js/libs/jspdf.js"></script>