This commit is contained in:
parent
2bc0f14475
commit
a293ef4332
2 changed files with 46 additions and 20 deletions
|
@ -5,7 +5,7 @@ use ramhorns::Content;
|
|||
|
||||
use crate::{
|
||||
config::Config,
|
||||
template::{read_md_file, File, Infos},
|
||||
template::{get_md_asm, get_md_metadata, read_md_file, File, Infos},
|
||||
};
|
||||
|
||||
#[get("/blog")]
|
||||
|
@ -26,7 +26,8 @@ struct Post {
|
|||
|
||||
#[once(time = 60)]
|
||||
pub fn get_index(config: Config) -> String {
|
||||
let paths = glob("data/blog/*.md")
|
||||
let location = "data/blog";
|
||||
let paths = glob(&format!("{location}/*.md"))
|
||||
.unwrap()
|
||||
.map(|f| {
|
||||
let filename = f
|
||||
|
@ -36,9 +37,19 @@ pub fn get_index(config: Config) -> String {
|
|||
.to_string_lossy()
|
||||
.to_string();
|
||||
let file_without_ext = filename.split_at(filename.len() - 3).0;
|
||||
|
||||
let file_metadata = match std::fs::read_to_string(format!("{location}/{filename}")) {
|
||||
Ok(text) => {
|
||||
let md_tree = get_md_asm(&text);
|
||||
let md_nodes = md_tree.children().unwrap();
|
||||
get_md_metadata(md_nodes).title
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
Post {
|
||||
title: file_without_ext.to_string(),
|
||||
url: file_without_ext.to_string(),
|
||||
title: file_metadata.unwrap_or(file_without_ext.to_string()),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<Post>>();
|
||||
|
|
|
@ -87,8 +87,21 @@ pub fn read_md_file(filename: &str) -> Option<File> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn read_md(raw_text: &str) -> File {
|
||||
let parse_option = markdown::ParseOptions {
|
||||
pub fn get_md_metadata(vec: &[Node]) -> FileMetadata {
|
||||
if vec.is_empty() {
|
||||
FileMetadata::default()
|
||||
} else {
|
||||
match &vec[0] {
|
||||
Node::Yaml(v) => FrontMatter::Yaml(&v.value).parse(),
|
||||
Node::Toml(v) => FrontMatter::Toml(&v.value).parse(),
|
||||
Node::MdxjsEsm(v) => FrontMatter::Json(&v.value).parse(),
|
||||
_ => FileMetadata::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_parse_option() -> markdown::ParseOptions {
|
||||
markdown::ParseOptions {
|
||||
constructs: markdown::Constructs {
|
||||
frontmatter: true,
|
||||
math_text: true,
|
||||
|
@ -96,32 +109,34 @@ pub fn read_md(raw_text: &str) -> File {
|
|||
..markdown::Constructs::gfm()
|
||||
},
|
||||
..markdown::ParseOptions::gfm()
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
let compile_option = markdown::CompileOptions {
|
||||
fn get_compile_option() -> markdown::CompileOptions {
|
||||
markdown::CompileOptions {
|
||||
allow_dangerous_html: true,
|
||||
..markdown::CompileOptions::gfm()
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
let md_tree = markdown::to_mdast(raw_text, &parse_option).unwrap();
|
||||
pub fn get_md_asm(raw_text: &str) -> Node {
|
||||
let parse_option = get_parse_option();
|
||||
|
||||
markdown::to_mdast(raw_text, &parse_option).unwrap()
|
||||
}
|
||||
|
||||
pub fn read_md(raw_text: &str) -> File {
|
||||
let md_tree = get_md_asm(raw_text);
|
||||
let md_nodes = md_tree.children().unwrap();
|
||||
let metadata;
|
||||
let metadata = get_md_metadata(md_nodes);
|
||||
let presence_mermaid;
|
||||
let presence_math;
|
||||
let presence_code;
|
||||
if md_nodes.is_empty() {
|
||||
metadata = FileMetadata::default();
|
||||
presence_mermaid = false;
|
||||
presence_math = false;
|
||||
presence_code = false;
|
||||
} else {
|
||||
metadata = match &md_nodes[0] {
|
||||
Node::Yaml(v) => FrontMatter::Yaml(&v.value).parse(),
|
||||
Node::Toml(v) => FrontMatter::Toml(&v.value).parse(),
|
||||
Node::MdxjsEsm(v) => FrontMatter::Json(&v.value).parse(),
|
||||
_ => FileMetadata::default(),
|
||||
};
|
||||
|
||||
// Find if document contains maths
|
||||
presence_math = check_math(md_nodes);
|
||||
|
||||
|
@ -137,8 +152,8 @@ pub fn read_md(raw_text: &str) -> File {
|
|||
let html = markdown::to_html_with_options(
|
||||
raw_text,
|
||||
&markdown::Options {
|
||||
parse: parse_option,
|
||||
compile: compile_option,
|
||||
parse: get_parse_option(),
|
||||
compile: get_compile_option(),
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
|
Loading…
Reference in a new issue