This commit is contained in:
parent
d400ef3c5b
commit
0d924de79b
16 changed files with 199 additions and 193 deletions
|
@ -8,7 +8,7 @@ use crate::template::Template;
|
|||
|
||||
/// Store the configuration of config/config.toml
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct FileConfig {
|
||||
pub struct FileConfiguration {
|
||||
/// http/https
|
||||
pub scheme: Option<String>,
|
||||
/// Domain name "sub.domain.tld"
|
||||
|
@ -31,7 +31,7 @@ pub struct FileConfig {
|
|||
pub exclude_courses: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl FileConfig {
|
||||
impl FileConfiguration {
|
||||
/// Initialize with default values
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
|
@ -40,15 +40,16 @@ impl FileConfig {
|
|||
port: Some(8080),
|
||||
app_name: Some("EWP".into()),
|
||||
exclude_courses: Some([].into()),
|
||||
..FileConfig::default()
|
||||
..FileConfiguration::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Complete default structure with an existing one
|
||||
fn complete(a: Self) -> Self {
|
||||
// Default config
|
||||
let d = FileConfig::new();
|
||||
let d = FileConfiguration::new();
|
||||
|
||||
#[allow(clippy::items_after_statements)]
|
||||
/// Return the default value if nothing is value is none
|
||||
fn test<T>(val: Option<T>, default: Option<T>) -> Option<T> {
|
||||
if val.is_some() {
|
||||
|
@ -84,7 +85,7 @@ pub struct Locations {
|
|||
#[derive(Clone, Debug)]
|
||||
pub struct Config {
|
||||
/// Information given in the config file
|
||||
pub fc: FileConfig,
|
||||
pub fc: FileConfiguration,
|
||||
/// Location where the static files are stored
|
||||
pub locations: Locations,
|
||||
/// Informations about templates
|
||||
|
@ -92,41 +93,37 @@ pub struct Config {
|
|||
}
|
||||
|
||||
/// Load the config file
|
||||
fn get_file_config(file_path: &str) -> FileConfig {
|
||||
fn get_file_config(file_path: &str) -> FileConfiguration {
|
||||
match fs::read_to_string(file_path) {
|
||||
Ok(file) => match toml::from_str(&file) {
|
||||
Ok(stored_config) => FileConfig::complete(stored_config),
|
||||
Ok(stored_config) => FileConfiguration::complete(stored_config),
|
||||
Err(file_error) => {
|
||||
panic!("Error in config file: {file_error}");
|
||||
}
|
||||
},
|
||||
Err(_) => {
|
||||
// No config file
|
||||
FileConfig::new()
|
||||
FileConfiguration::new()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Build the configuration
|
||||
pub fn get_config(file_path: &str) -> Config {
|
||||
pub fn get_configuration(file_path: &str) -> Config {
|
||||
let internal_config = get_file_config(file_path);
|
||||
|
||||
let static_dir = "static".to_owned();
|
||||
let templates_dir = "templates".to_owned();
|
||||
let files_root = init(
|
||||
"dist".into(),
|
||||
static_dir.to_owned(),
|
||||
templates_dir.to_owned(),
|
||||
);
|
||||
let static_dir = "static";
|
||||
let templates_dir = "templates";
|
||||
let files_root = init("dist".into(), static_dir, templates_dir);
|
||||
|
||||
Config {
|
||||
fc: internal_config.to_owned(),
|
||||
fc: internal_config.clone(),
|
||||
locations: Locations {
|
||||
static_dir: format!("{}/{}", files_root, static_dir),
|
||||
static_dir: format!("{files_root}/{static_dir}"),
|
||||
data_dir: String::from("data"),
|
||||
},
|
||||
tmpl: Template {
|
||||
directory: format!("{}/{}", files_root, templates_dir),
|
||||
directory: format!("{files_root}/{templates_dir}"),
|
||||
app_name: internal_config.app_name.unwrap(),
|
||||
url: internal_config.domain.unwrap(),
|
||||
name: internal_config.name,
|
||||
|
@ -135,7 +132,7 @@ pub fn get_config(file_path: &str) -> Config {
|
|||
}
|
||||
|
||||
/// Preparation before running the http server
|
||||
fn init(dist_dir: String, static_dir: String, templates_dir: String) -> String {
|
||||
fn init(dist_dir: String, static_dir: &str, templates_dir: &str) -> String {
|
||||
// The static folder is minimized only in release mode
|
||||
if cfg!(debug_assertions) {
|
||||
".".into()
|
||||
|
@ -154,7 +151,7 @@ fn init(dist_dir: String, static_dir: String, templates_dir: String) -> String {
|
|||
let path = entry.unwrap();
|
||||
let path_with_dist = path
|
||||
.to_string_lossy()
|
||||
.replace(&static_dir, &format!("{dist_dir}/{static_dir}"));
|
||||
.replace(static_dir, &format!("{dist_dir}/{static_dir}"));
|
||||
|
||||
minify_and_copy(&cfg, path, path_with_dist);
|
||||
}
|
||||
|
@ -164,7 +161,7 @@ fn init(dist_dir: String, static_dir: String, templates_dir: String) -> String {
|
|||
let path = entry.unwrap();
|
||||
let path_with_dist = path
|
||||
.to_string_lossy()
|
||||
.replace(&templates_dir, &format!("{dist_dir}/{templates_dir}"));
|
||||
.replace(templates_dir, &format!("{dist_dir}/{templates_dir}"));
|
||||
|
||||
minify_and_copy(&cfg, path, path_with_dist);
|
||||
}
|
||||
|
|
|
@ -18,20 +18,20 @@ mod routes;
|
|||
|
||||
#[actix_web::main]
|
||||
async fn main() -> Result<()> {
|
||||
let config = config::get_config("./config/config.toml");
|
||||
let config = config::get_configuration("./config/config.toml");
|
||||
|
||||
let addr = ("0.0.0.0", config.fc.port.unwrap());
|
||||
|
||||
println!(
|
||||
"Listening to {}://{}:{}",
|
||||
config.to_owned().fc.scheme.unwrap(),
|
||||
config.clone().fc.scheme.unwrap(),
|
||||
addr.0,
|
||||
addr.1
|
||||
);
|
||||
|
||||
HttpServer::new(move || {
|
||||
App::new()
|
||||
.app_data(web::Data::new(config.to_owned()))
|
||||
.app_data(web::Data::new(config.clone()))
|
||||
.wrap(Compress::default())
|
||||
.wrap(
|
||||
DefaultHeaders::new()
|
||||
|
@ -60,7 +60,7 @@ async fn main() -> Result<()> {
|
|||
.service(portfolio::page)
|
||||
.service(setup::page)
|
||||
.service(web3::page)
|
||||
.service(Files::new("/", config.locations.static_dir.to_owned()))
|
||||
.service(Files::new("/", config.locations.static_dir.clone()))
|
||||
.default_service(web::to(not_found::page))
|
||||
})
|
||||
.bind(addr)?
|
||||
|
|
|
@ -45,8 +45,8 @@ impl From<u8> for ProjectState {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct Project {
|
||||
pub project: String,
|
||||
pub project_url: String,
|
||||
pub name: String,
|
||||
pub url: String,
|
||||
pub status: ProjectState,
|
||||
pub title: String,
|
||||
pub id: u32,
|
||||
|
@ -68,8 +68,8 @@ pub async fn fetch_pr() -> Result<Vec<Project>, Error> {
|
|||
let mut list = vec![];
|
||||
resp.items.iter().for_each(|p| {
|
||||
list.push(Project {
|
||||
project: p.repository_url.split('/').last().unwrap().into(),
|
||||
project_url: p.repository_url.to_owned(),
|
||||
name: p.repository_url.split('/').last().unwrap().into(),
|
||||
url: p.repository_url.clone(),
|
||||
status: if p.pull_request.merged_at.is_none() {
|
||||
if p.state == "closed" {
|
||||
ProjectState::Closed
|
||||
|
@ -79,9 +79,9 @@ pub async fn fetch_pr() -> Result<Vec<Project>, Error> {
|
|||
} else {
|
||||
ProjectState::Merged
|
||||
},
|
||||
title: p.title.to_owned(),
|
||||
title: p.title.clone(),
|
||||
id: p.number,
|
||||
contrib_url: p.pull_request.html_url.to_owned(),
|
||||
contrib_url: p.pull_request.html_url.clone(),
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -93,6 +93,7 @@ pub struct FileMetadata {
|
|||
pub portfolio: Option<FileMetadataPortfolio>,
|
||||
}
|
||||
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
/// Global metadata
|
||||
#[derive(Content, Debug)]
|
||||
pub struct Metadata {
|
||||
|
@ -105,7 +106,7 @@ pub struct Metadata {
|
|||
|
||||
impl Metadata {
|
||||
/// Update current metadata boolean fields, keeping true ones
|
||||
fn merge(&mut self, other: Metadata) {
|
||||
fn merge(&mut self, other: &Metadata) {
|
||||
self.math = self.math || other.math;
|
||||
self.mermaid = self.mermaid || other.mermaid;
|
||||
self.syntax_highlight = self.syntax_highlight || other.syntax_highlight;
|
||||
|
@ -159,9 +160,9 @@ pub fn get_options() -> ComrakOptions {
|
|||
}
|
||||
|
||||
/// Resize images if needed
|
||||
fn custom_img_size(html: String) -> String {
|
||||
fn custom_img_size(html: &str) -> String {
|
||||
rewrite_str(
|
||||
&html,
|
||||
html,
|
||||
RewriteStrSettings {
|
||||
element_content_handlers: vec![element!("img[alt]", |el| {
|
||||
let alt = el.get_attribute("alt").unwrap();
|
||||
|
@ -180,7 +181,7 @@ fn custom_img_size(html: String) -> String {
|
|||
el.set_attribute("width", dimension.0).unwrap();
|
||||
el.set_attribute("height", dimension.1).unwrap();
|
||||
if new_alt.is_empty() {
|
||||
el.remove_attribute("alt")
|
||||
el.remove_attribute("alt");
|
||||
} else {
|
||||
el.set_attribute("alt", new_alt).unwrap();
|
||||
}
|
||||
|
@ -190,7 +191,7 @@ fn custom_img_size(html: String) -> String {
|
|||
if data.parse::<i32>().is_ok() {
|
||||
el.set_attribute("width", data).unwrap();
|
||||
if new_alt.is_empty() {
|
||||
el.remove_attribute("alt")
|
||||
el.remove_attribute("alt");
|
||||
} else {
|
||||
el.set_attribute("alt", new_alt).unwrap();
|
||||
}
|
||||
|
@ -207,7 +208,7 @@ fn custom_img_size(html: String) -> String {
|
|||
}
|
||||
|
||||
/// Fix local images to base64 and integration of markdown files
|
||||
fn fix_images_and_integration(path: &str, html: String) -> (String, Metadata) {
|
||||
fn fix_images_and_integration(path: &str, html: &str) -> (String, Metadata) {
|
||||
let mut metadata = Metadata {
|
||||
info: FileMetadata::default(),
|
||||
math: false,
|
||||
|
@ -218,7 +219,7 @@ fn fix_images_and_integration(path: &str, html: String) -> (String, Metadata) {
|
|||
|
||||
(
|
||||
rewrite_str(
|
||||
&html,
|
||||
html,
|
||||
RewriteStrSettings {
|
||||
element_content_handlers: vec![element!("img", |el| {
|
||||
if let Some(src) = el.get_attribute("src") {
|
||||
|
@ -234,15 +235,15 @@ fn fix_images_and_integration(path: &str, html: String) -> (String, Metadata) {
|
|||
let data = read_md(
|
||||
&img_path,
|
||||
&file,
|
||||
TypeFileMetadata::Generic,
|
||||
&TypeFileMetadata::Generic,
|
||||
Some(options),
|
||||
);
|
||||
el.replace(&data.content, ContentType::Html);
|
||||
metadata.merge(data.metadata);
|
||||
metadata.merge(&data.metadata);
|
||||
} else {
|
||||
let image = general_purpose::STANDARD.encode(file);
|
||||
|
||||
el.set_attribute("src", &format!("data:{};base64,{}", mime, image))
|
||||
el.set_attribute("src", &format!("data:{mime};base64,{image}"))
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
@ -262,7 +263,7 @@ fn fix_images_and_integration(path: &str, html: String) -> (String, Metadata) {
|
|||
pub fn read_md(
|
||||
path: &str,
|
||||
raw_text: &str,
|
||||
metadata_type: TypeFileMetadata,
|
||||
metadata_type: &TypeFileMetadata,
|
||||
options: Option<Options>,
|
||||
) -> File {
|
||||
let arena = Arena::new();
|
||||
|
@ -287,9 +288,9 @@ pub fn read_md(
|
|||
|
||||
let children_metadata;
|
||||
let mail_obfsucated;
|
||||
(html_content, children_metadata) = fix_images_and_integration(path, html_content);
|
||||
html_content = custom_img_size(html_content);
|
||||
(html_content, mail_obfsucated) = mail_obfuscation(html_content);
|
||||
(html_content, children_metadata) = fix_images_and_integration(path, &html_content);
|
||||
html_content = custom_img_size(&html_content);
|
||||
(html_content, mail_obfsucated) = mail_obfuscation(&html_content);
|
||||
|
||||
let mut final_metadata = Metadata {
|
||||
info: metadata,
|
||||
|
@ -298,7 +299,7 @@ pub fn read_md(
|
|||
math: check_math(&html_content),
|
||||
mail_obfsucated,
|
||||
};
|
||||
final_metadata.merge(children_metadata);
|
||||
final_metadata.merge(&children_metadata);
|
||||
|
||||
File {
|
||||
metadata: final_metadata,
|
||||
|
@ -312,7 +313,7 @@ fn deserialize_metadata<T: Default + serde::de::DeserializeOwned>(text: &str) ->
|
|||
}
|
||||
|
||||
/// Fetch metadata from AST
|
||||
pub fn get_metadata<'a>(root: &'a AstNode<'a>, mtype: TypeFileMetadata) -> FileMetadata {
|
||||
pub fn get_metadata<'a>(root: &'a AstNode<'a>, mtype: &TypeFileMetadata) -> FileMetadata {
|
||||
match root
|
||||
.children()
|
||||
.find_map(|node| match &node.data.borrow().value {
|
||||
|
@ -327,7 +328,7 @@ pub fn get_metadata<'a>(root: &'a AstNode<'a>, mtype: TypeFileMetadata) -> FileM
|
|||
|
||||
// Trim descriptions
|
||||
if let Some(desc) = &mut metadata.description {
|
||||
desc.clone_from(&desc.trim().into())
|
||||
desc.clone_from(&desc.trim().into());
|
||||
}
|
||||
|
||||
FileMetadata {
|
||||
|
@ -431,11 +432,11 @@ fn hljs_replace<'a>(root: &'a AstNode<'a>, mermaid_str: &str) {
|
|||
}
|
||||
|
||||
/// Obfuscate email if email found
|
||||
fn mail_obfuscation(html: String) -> (String, bool) {
|
||||
fn mail_obfuscation(html: &str) -> (String, bool) {
|
||||
let modified = Arc::new(AtomicBool::new(false));
|
||||
(
|
||||
rewrite_str(
|
||||
&html,
|
||||
html,
|
||||
RewriteStrSettings {
|
||||
element_content_handlers: vec![element!("a[href^='mailto:']", |el| {
|
||||
modified.store(true, Ordering::SeqCst);
|
||||
|
@ -444,7 +445,7 @@ fn mail_obfuscation(html: String) -> (String, bool) {
|
|||
let (_uri, mail) = &link.split_at(7);
|
||||
let (before, after) = mail.split_once('@').unwrap();
|
||||
|
||||
let modified_mail = format!("{}<span class='at'>(at)</span>{}", before, after);
|
||||
let modified_mail = format!("{before}<span class='at'>(at)</span>{after}");
|
||||
|
||||
el.set_inner_content(&modified_mail, ContentType::Html);
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ use base64::{engine::general_purpose, Engine};
|
|||
use cached::proc_macro::cached;
|
||||
use reqwest::Client;
|
||||
|
||||
use crate::config::FileConfig;
|
||||
use crate::config::FileConfiguration;
|
||||
|
||||
use super::markdown::{read_md, File, FileMetadata, Metadata, TypeFileMetadata};
|
||||
|
||||
|
@ -22,7 +22,7 @@ pub fn get_reqwest_client() -> Client {
|
|||
}
|
||||
|
||||
/// Get URL of the app
|
||||
pub fn get_url(fc: FileConfig) -> String {
|
||||
pub fn get_url(fc: FileConfiguration) -> String {
|
||||
/* let port = match fc.scheme.as_deref() {
|
||||
Some("https") if fc.port == Some(443) => String::new(),
|
||||
Some("http") if fc.port == Some(80) => String::new(),
|
||||
|
@ -33,8 +33,8 @@ pub fn get_url(fc: FileConfig) -> String {
|
|||
}
|
||||
|
||||
/// Make a list of keywords
|
||||
pub fn make_kw(list: &[&str]) -> Option<String> {
|
||||
Some(list.join(", "))
|
||||
pub fn make_kw(list: &[&str]) -> String {
|
||||
list.join(", ")
|
||||
}
|
||||
|
||||
/// Send HTML file
|
||||
|
@ -53,7 +53,7 @@ impl Responder for Html {
|
|||
}
|
||||
|
||||
/// Read a file
|
||||
pub fn read_file(filename: &str, expected_file: TypeFileMetadata) -> Option<File> {
|
||||
pub fn read_file(filename: &str, expected_file: &TypeFileMetadata) -> Option<File> {
|
||||
match Path::new(filename).extension() {
|
||||
Some(ext) => match ext.to_str().unwrap() {
|
||||
"pdf" => match fs::read(filename) {
|
||||
|
@ -82,10 +82,9 @@ fn read_pdf(data: Vec<u8>) -> File {
|
|||
},
|
||||
content: format!(
|
||||
r#"<embed
|
||||
src="data:application/pdf;base64,{}"
|
||||
src="data:application/pdf;base64,{pdf}"
|
||||
style="width: 100%; height: 79vh";
|
||||
>"#,
|
||||
pdf
|
||||
>"#
|
||||
),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{config::Config, misc::utils::get_url, template::Infos};
|
||||
use crate::{config::Config, misc::utils::get_url, template::InfosPage};
|
||||
use actix_web::{get, http::header::ContentType, routes, web, HttpResponse, Responder};
|
||||
use cached::proc_macro::once;
|
||||
use ramhorns::Content;
|
||||
|
@ -28,7 +28,7 @@ fn build_securitytxt(config: Config) -> String {
|
|||
contact: config.fc.mail.unwrap_or_default(),
|
||||
pref_lang: config.fc.lang.unwrap_or_default(),
|
||||
},
|
||||
Infos::default(),
|
||||
InfosPage::default(),
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -55,7 +55,7 @@ fn build_humanstxt(config: Config) -> String {
|
|||
lang: config.fc.lang.unwrap_or_default(),
|
||||
name: config.fc.fullname.unwrap_or_default(),
|
||||
},
|
||||
Infos::default(),
|
||||
InfosPage::default(),
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -94,6 +94,6 @@ fn build_webmanifest(config: Config) -> String {
|
|||
description: "Easy WebPage generator".to_owned(),
|
||||
url: get_url(config.fc),
|
||||
},
|
||||
Infos::default(),
|
||||
InfosPage::default(),
|
||||
)
|
||||
}
|
||||
|
|
|
@ -10,6 +10,6 @@ struct Info {
|
|||
#[get("/love")]
|
||||
async fn love() -> impl Responder {
|
||||
HttpResponse::Ok().json(Info {
|
||||
unix_epoch: 1605576600,
|
||||
unix_epoch: 1_605_576_600,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ use crate::{
|
|||
markdown::{get_metadata, get_options, File, FileMetadataBlog, TypeFileMetadata},
|
||||
utils::{get_url, make_kw, read_file, Html},
|
||||
},
|
||||
template::{Infos, NavBar},
|
||||
template::{InfosPage, NavBar},
|
||||
};
|
||||
|
||||
const MIME_TYPE_RSS: &str = "application/rss+xml";
|
||||
|
@ -44,11 +44,11 @@ struct BlogIndexTemplate {
|
|||
#[once(time = 60)]
|
||||
fn build_index(config: Config) -> String {
|
||||
let blog_dir = format!("{}/{}", config.locations.data_dir, BLOG_DIR);
|
||||
let mut posts = get_posts(format!("{}/{}", blog_dir, POST_DIR));
|
||||
let mut posts = get_posts(&format!("{blog_dir}/{POST_DIR}"));
|
||||
|
||||
// Get about
|
||||
let about: Option<File> =
|
||||
read_file(&format!("{}/about.md", blog_dir), TypeFileMetadata::Generic);
|
||||
read_file(&format!("{blog_dir}/about.md"), &TypeFileMetadata::Generic);
|
||||
|
||||
// Sort from newest to oldest
|
||||
posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day));
|
||||
|
@ -65,13 +65,13 @@ fn build_index(config: Config) -> String {
|
|||
no_posts: posts.is_empty(),
|
||||
posts,
|
||||
},
|
||||
Infos {
|
||||
page_title: Some("Blog".into()),
|
||||
page_desc: Some(format!(
|
||||
InfosPage {
|
||||
title: Some("Blog".into()),
|
||||
desc: Some(format!(
|
||||
"Liste des posts d'{}",
|
||||
config.fc.name.unwrap_or_default()
|
||||
)),
|
||||
page_kw: make_kw(&["blog", "blogging"]),
|
||||
kw: Some(make_kw(&["blog", "blogging"])),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
@ -89,12 +89,12 @@ struct Post {
|
|||
impl Post {
|
||||
// Fetch the file content
|
||||
fn fetch_content(&mut self, data_dir: &str) {
|
||||
let blog_dir = format!("{}/{}/{}", data_dir, BLOG_DIR, POST_DIR);
|
||||
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
|
||||
let ext = ".md";
|
||||
|
||||
if let Some(file) = read_file(
|
||||
&format!("{blog_dir}/{}{ext}", self.url),
|
||||
TypeFileMetadata::Blog,
|
||||
&TypeFileMetadata::Blog,
|
||||
) {
|
||||
self.content = Some(file.content);
|
||||
}
|
||||
|
@ -104,13 +104,13 @@ impl Post {
|
|||
impl Hash for Post {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
if let Some(content) = &self.content {
|
||||
content.hash(state)
|
||||
content.hash(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_posts(location: String) -> Vec<Post> {
|
||||
let entries = match std::fs::read_dir(&location) {
|
||||
fn get_posts(location: &str) -> Vec<Post> {
|
||||
let entries = match std::fs::read_dir(location) {
|
||||
Ok(res) => res
|
||||
.flatten()
|
||||
.filter(|f| match f.path().extension() {
|
||||
|
@ -124,8 +124,8 @@ fn get_posts(location: String) -> Vec<Post> {
|
|||
entries
|
||||
.iter()
|
||||
.filter_map(|f| {
|
||||
let _filename = f.file_name();
|
||||
let filename = _filename.to_string_lossy();
|
||||
let fname = f.file_name();
|
||||
let filename = fname.to_string_lossy();
|
||||
let file_without_ext = filename.split_at(filename.len() - 3).0;
|
||||
|
||||
let file_metadata = match std::fs::read_to_string(format!("{location}/{filename}")) {
|
||||
|
@ -134,7 +134,7 @@ fn get_posts(location: String) -> Vec<Post> {
|
|||
|
||||
let options = get_options();
|
||||
let root = parse_document(&arena, &text, &options);
|
||||
let mut metadata = get_metadata(root, TypeFileMetadata::Blog).blog.unwrap();
|
||||
let mut metadata = get_metadata(root, &TypeFileMetadata::Blog).blog.unwrap();
|
||||
|
||||
// Always have a title
|
||||
metadata.title = match metadata.title {
|
||||
|
@ -173,7 +173,7 @@ fn get_posts(location: String) -> Vec<Post> {
|
|||
.tags
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.map(|t| t.name.to_owned())
|
||||
.map(|t| t.name.clone())
|
||||
.collect(),
|
||||
})
|
||||
} else {
|
||||
|
@ -192,16 +192,19 @@ struct BlogPostTemplate {
|
|||
|
||||
#[get("/blog/p/{id}")]
|
||||
async fn page(path: web::Path<(String,)>, config: web::Data<Config>) -> impl Responder {
|
||||
Html(build_post(path.into_inner().0, config.get_ref().to_owned()))
|
||||
Html(build_post(
|
||||
&path.into_inner().0,
|
||||
config.get_ref().to_owned(),
|
||||
))
|
||||
}
|
||||
|
||||
fn build_post(file: String, config: Config) -> String {
|
||||
fn build_post(file: &str, config: Config) -> String {
|
||||
let mut post = None;
|
||||
let (infos, toc) = get_post(
|
||||
&mut post,
|
||||
file,
|
||||
config.fc.name.unwrap_or_default(),
|
||||
config.locations.data_dir,
|
||||
&config.fc.name.unwrap_or_default(),
|
||||
&config.locations.data_dir,
|
||||
);
|
||||
|
||||
config.tmpl.render(
|
||||
|
@ -220,20 +223,20 @@ fn build_post(file: String, config: Config) -> String {
|
|||
|
||||
fn get_post(
|
||||
post: &mut Option<File>,
|
||||
filename: String,
|
||||
name: String,
|
||||
data_dir: String,
|
||||
) -> (Infos, String) {
|
||||
let blog_dir = format!("{}/{}/{}", data_dir, BLOG_DIR, POST_DIR);
|
||||
filename: &str,
|
||||
name: &str,
|
||||
data_dir: &str,
|
||||
) -> (InfosPage, String) {
|
||||
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
|
||||
let ext = ".md";
|
||||
|
||||
*post = read_file(
|
||||
&format!("{blog_dir}/{filename}{ext}"),
|
||||
TypeFileMetadata::Blog,
|
||||
&TypeFileMetadata::Blog,
|
||||
);
|
||||
|
||||
let default = (
|
||||
&filename,
|
||||
filename,
|
||||
&format!("Blog d'{name}"),
|
||||
Vec::new(),
|
||||
String::new(),
|
||||
|
@ -262,15 +265,15 @@ fn get_post(
|
|||
};
|
||||
|
||||
(
|
||||
Infos {
|
||||
page_title: Some(format!("Post: {}", title)),
|
||||
page_desc: Some(desc.clone()),
|
||||
page_kw: make_kw(
|
||||
InfosPage {
|
||||
title: Some(format!("Post: {title}")),
|
||||
desc: Some(desc.clone()),
|
||||
kw: Some(make_kw(
|
||||
&["blog", "blogging", "write", "writing"]
|
||||
.into_iter()
|
||||
.chain(tags.iter().map(|t| t.name.as_str()))
|
||||
.collect::<Vec<_>>(),
|
||||
),
|
||||
)),
|
||||
},
|
||||
toc,
|
||||
)
|
||||
|
@ -285,7 +288,7 @@ async fn rss(config: web::Data<Config>) -> impl Responder {
|
|||
|
||||
#[once(time = 10800)] // 3h
|
||||
fn build_rss(config: Config) -> String {
|
||||
let mut posts = get_posts(format!(
|
||||
let mut posts = get_posts(&format!(
|
||||
"{}/{}/{}",
|
||||
config.locations.data_dir, BLOG_DIR, POST_DIR
|
||||
));
|
||||
|
@ -301,7 +304,7 @@ fn build_rss(config: Config) -> String {
|
|||
}
|
||||
|
||||
let link_to_site = get_url(config.fc.clone());
|
||||
let author = if let (Some(mail), Some(name)) = (config.fc.mail, config.fc.fullname.to_owned()) {
|
||||
let author = if let (Some(mail), Some(name)) = (config.fc.mail, config.fc.fullname.clone()) {
|
||||
Some(format!("{mail} ({name})"))
|
||||
} else {
|
||||
None
|
||||
|
@ -309,11 +312,11 @@ fn build_rss(config: Config) -> String {
|
|||
let title = format!("Blog d'{}", config.fc.name.unwrap_or_default());
|
||||
let lang = "fr";
|
||||
let channel = Channel {
|
||||
title: title.to_owned(),
|
||||
link: link_to_site.to_owned(),
|
||||
title: title.clone(),
|
||||
link: link_to_site.clone(),
|
||||
description: "Un fil qui parle d'informatique notamment".into(),
|
||||
language: Some(lang.into()),
|
||||
managing_editor: author.to_owned(),
|
||||
managing_editor: author.clone(),
|
||||
webmaster: author,
|
||||
pub_date: Some(Local::now().to_rfc2822()),
|
||||
categories: ["blog", "blogging", "write", "writing"]
|
||||
|
@ -326,9 +329,9 @@ fn build_rss(config: Config) -> String {
|
|||
generator: Some("ewp with rss crate".into()),
|
||||
docs: Some("https://www.rssboard.org/rss-specification".into()),
|
||||
image: Some(Image {
|
||||
url: format!("{}/icons/favicon-32x32.png", link_to_site),
|
||||
title: title.to_owned(),
|
||||
link: link_to_site.to_owned(),
|
||||
url: format!("{link_to_site}/icons/favicon-32x32.png"),
|
||||
title: title.clone(),
|
||||
link: link_to_site.clone(),
|
||||
..Image::default()
|
||||
}),
|
||||
items: posts
|
||||
|
@ -339,9 +342,9 @@ fn build_rss(config: Config) -> String {
|
|||
|
||||
// Build item
|
||||
Item {
|
||||
title: Some(p.title.to_owned()),
|
||||
title: Some(p.title.clone()),
|
||||
link: Some(format!("{}/blog/p/{}", link_to_site, p.url)),
|
||||
description: p.content.to_owned(),
|
||||
description: p.content.clone(),
|
||||
categories: p
|
||||
.tags
|
||||
.iter()
|
||||
|
@ -374,7 +377,7 @@ fn build_rss(config: Config) -> String {
|
|||
.collect(),
|
||||
atom_ext: Some(AtomExtension {
|
||||
links: vec![Link {
|
||||
href: format!("{}/blog/rss", link_to_site),
|
||||
href: format!("{link_to_site}/blog/rss"),
|
||||
rel: "self".into(),
|
||||
hreflang: Some(lang.into()),
|
||||
mime_type: Some(MIME_TYPE_RSS.into()),
|
||||
|
|
|
@ -10,7 +10,7 @@ use crate::{
|
|||
markdown::{File, TypeFileMetadata},
|
||||
utils::{make_kw, read_file, Html},
|
||||
},
|
||||
template::{Infos, NavBar},
|
||||
template::{InfosPage, NavBar},
|
||||
};
|
||||
|
||||
const CONTACT_DIR: &str = "contacts";
|
||||
|
@ -83,7 +83,7 @@ async fn service_redirection(config: web::Data<Config>, req: HttpRequest) -> imp
|
|||
// Find requested service
|
||||
.filter(|&x| x.service == *info.query("service"))
|
||||
// Search for a potential scope
|
||||
.filter(|&x| match (info.get("scope"), x.scope.to_owned()) {
|
||||
.filter(|&x| match (info.get("scope"), x.scope.clone()) {
|
||||
// The right scope is accepted
|
||||
(Some(str_value), Some(string_value)) if str_value == string_value.as_str() => true,
|
||||
// No scope provided is accepted
|
||||
|
@ -131,26 +131,26 @@ fn build_page(config: Config) -> String {
|
|||
|
||||
// Get about
|
||||
let about = read_file(
|
||||
&format!("{}/about.md", contacts_dir),
|
||||
TypeFileMetadata::Generic,
|
||||
&format!("{contacts_dir}/about.md"),
|
||||
&TypeFileMetadata::Generic,
|
||||
);
|
||||
|
||||
let socials_dir = "socials";
|
||||
let mut socials = glob(&format!("{contacts_dir}/{socials_dir}/*{ext}"))
|
||||
.unwrap()
|
||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), TypeFileMetadata::Contact).unwrap())
|
||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Contact).unwrap())
|
||||
.collect::<Vec<File>>();
|
||||
|
||||
let forges_dir = "forges";
|
||||
let mut forges = glob(&format!("{contacts_dir}/{forges_dir}/*{ext}"))
|
||||
.unwrap()
|
||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), TypeFileMetadata::Contact).unwrap())
|
||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Contact).unwrap())
|
||||
.collect::<Vec<File>>();
|
||||
|
||||
let others_dir = "others";
|
||||
let mut others = glob(&format!("{contacts_dir}/{others_dir}/*{ext}"))
|
||||
.unwrap()
|
||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), TypeFileMetadata::Contact).unwrap())
|
||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Contact).unwrap())
|
||||
.collect::<Vec<File>>();
|
||||
|
||||
// Remove paragraphs in custom statements
|
||||
|
@ -176,10 +176,15 @@ fn build_page(config: Config) -> String {
|
|||
others_exists: !others.is_empty(),
|
||||
others,
|
||||
},
|
||||
Infos {
|
||||
page_title: Some("Contacts".into()),
|
||||
page_desc: Some(format!("Réseaux d'{}", config.fc.name.unwrap_or_default())),
|
||||
page_kw: make_kw(&["réseaux sociaux", "email", "contact", "linktree"]),
|
||||
InfosPage {
|
||||
title: Some("Contacts".into()),
|
||||
desc: Some(format!("Réseaux d'{}", config.fc.name.unwrap_or_default())),
|
||||
kw: Some(make_kw(&[
|
||||
"réseaux sociaux",
|
||||
"email",
|
||||
"contact",
|
||||
"linktree",
|
||||
])),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ use crate::{
|
|||
github::{fetch_pr, ProjectState},
|
||||
utils::{make_kw, Html},
|
||||
},
|
||||
template::{Infos, NavBar},
|
||||
template::{InfosPage, NavBar},
|
||||
};
|
||||
use actix_web::{get, web, Responder};
|
||||
use cached::proc_macro::once;
|
||||
|
@ -58,31 +58,31 @@ async fn build_page(config: Config) -> String {
|
|||
|
||||
// Grouping PRs by projects
|
||||
let mut map: HashMap<&str, Vec<Pull>> = HashMap::new();
|
||||
projects.iter().for_each(|p| {
|
||||
for p in &projects {
|
||||
let project = Pull {
|
||||
url: p.contrib_url.to_owned(),
|
||||
url: p.contrib_url.clone(),
|
||||
id: p.id,
|
||||
name_repo: p.project.to_owned(),
|
||||
title: p.title.to_owned(),
|
||||
name_repo: p.name.clone(),
|
||||
title: p.title.clone(),
|
||||
state: p.status as u8,
|
||||
};
|
||||
let project_name = p.project.as_str();
|
||||
let project_name = p.name.as_str();
|
||||
if map.contains_key(project_name) {
|
||||
map.entry(project_name).and_modify(|v| v.push(project));
|
||||
} else {
|
||||
data.push(Project {
|
||||
name: project_name.into(),
|
||||
url: p.project_url.to_owned(),
|
||||
url: p.url.clone(),
|
||||
pulls_merged: Vec::new(),
|
||||
pulls_closed: Vec::new(),
|
||||
pulls_open: Vec::new(),
|
||||
});
|
||||
map.insert(project_name, vec![project]);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Distributes each PR in the right vector
|
||||
data.iter_mut().for_each(|d| {
|
||||
for d in &mut data {
|
||||
map.get(d.name.as_str()).unwrap().iter().for_each(|p| {
|
||||
let state = p.state.into();
|
||||
match state {
|
||||
|
@ -94,14 +94,14 @@ async fn build_page(config: Config) -> String {
|
|||
let mut name: Vec<char> = d.name.replace('-', " ").chars().collect();
|
||||
name[0] = name[0].to_uppercase().next().unwrap();
|
||||
d.name = name.into_iter().collect();
|
||||
});
|
||||
}
|
||||
|
||||
// Ascending order by pulls IDs
|
||||
data.iter_mut().for_each(|d| {
|
||||
for d in &mut data {
|
||||
d.pulls_closed.reverse();
|
||||
d.pulls_merged.reverse();
|
||||
d.pulls_open.reverse();
|
||||
});
|
||||
}
|
||||
|
||||
// Ascending order by number of pulls
|
||||
data.sort_by(|a, b| {
|
||||
|
@ -135,7 +135,7 @@ async fn build_page(config: Config) -> String {
|
|||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("{}", e);
|
||||
eprintln!("{e}");
|
||||
|
||||
PortfolioTemplate {
|
||||
navbar,
|
||||
|
@ -150,13 +150,13 @@ async fn build_page(config: Config) -> String {
|
|||
config.tmpl.render(
|
||||
"contrib.html",
|
||||
data,
|
||||
Infos {
|
||||
page_title: Some("Mes contributions".into()),
|
||||
page_desc: Some(format!(
|
||||
InfosPage {
|
||||
title: Some("Mes contributions".into()),
|
||||
desc: Some(format!(
|
||||
"Contributions d'{} à GitHub",
|
||||
config.fc.name.unwrap_or_default()
|
||||
)),
|
||||
page_kw: make_kw(&[
|
||||
kw: Some(make_kw(&[
|
||||
"github",
|
||||
"contributions",
|
||||
"open source",
|
||||
|
@ -164,7 +164,7 @@ async fn build_page(config: Config) -> String {
|
|||
"portfolio",
|
||||
"projets",
|
||||
"code",
|
||||
]),
|
||||
])),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ use crate::{
|
|||
markdown::{File, TypeFileMetadata},
|
||||
utils::{make_kw, read_file, Html},
|
||||
},
|
||||
template::{Infos, NavBar},
|
||||
template::{InfosPage, NavBar},
|
||||
};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
|
@ -22,7 +22,7 @@ pub struct PathRequest {
|
|||
|
||||
#[get("/cours")]
|
||||
async fn page(info: web::Query<PathRequest>, config: web::Data<Config>) -> impl Responder {
|
||||
Html(build_page(info, config.get_ref().to_owned()))
|
||||
Html(build_page(&info, config.get_ref().to_owned()))
|
||||
}
|
||||
|
||||
#[derive(Content, Debug)]
|
||||
|
@ -94,7 +94,7 @@ fn get_filetree(dir_path: &str, exclusion_patterns: &Vec<Regex>) -> FileNode {
|
|||
fn get_content(
|
||||
cours_dir: &str,
|
||||
path: &web::Query<PathRequest>,
|
||||
exclusion_list: Vec<String>,
|
||||
exclusion_list: &[String],
|
||||
) -> Option<File> {
|
||||
let filename = match &path.q {
|
||||
Some(q) => q,
|
||||
|
@ -111,14 +111,14 @@ fn get_content(
|
|||
|
||||
read_file(
|
||||
&format!("{cours_dir}/{filename}"),
|
||||
TypeFileMetadata::Generic,
|
||||
&TypeFileMetadata::Generic,
|
||||
)
|
||||
}
|
||||
|
||||
fn build_page(info: web::Query<PathRequest>, config: Config) -> String {
|
||||
fn build_page(info: &web::Query<PathRequest>, config: Config) -> String {
|
||||
let cours_dir = "data/cours";
|
||||
let exclusion_list = config.fc.exclude_courses.unwrap();
|
||||
let exclusion_patterns = compile_patterns(exclusion_list.to_owned());
|
||||
let exclusion_patterns = compile_patterns(exclusion_list.clone());
|
||||
let filetree = get_filetree(cours_dir, &exclusion_patterns);
|
||||
|
||||
config.tmpl.render(
|
||||
|
@ -129,12 +129,12 @@ fn build_page(info: web::Query<PathRequest>, config: Config) -> String {
|
|||
..NavBar::default()
|
||||
},
|
||||
filetree: serde_json::to_string(&filetree).unwrap(),
|
||||
content: get_content(cours_dir, &info, exclusion_list),
|
||||
content: get_content(cours_dir, info, &exclusion_list),
|
||||
},
|
||||
Infos {
|
||||
page_title: Some("Cours".into()),
|
||||
page_desc: Some("Cours à l'univ".into()),
|
||||
page_kw: make_kw(&[
|
||||
InfosPage {
|
||||
title: Some("Cours".into()),
|
||||
desc: Some("Cours à l'univ".into()),
|
||||
kw: Some(make_kw(&[
|
||||
"cours",
|
||||
"études",
|
||||
"université",
|
||||
|
@ -142,7 +142,7 @@ fn build_page(info: web::Query<PathRequest>, config: Config) -> String {
|
|||
"master",
|
||||
"notes",
|
||||
"digital garden",
|
||||
]),
|
||||
])),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ use crate::{
|
|||
markdown::{File, TypeFileMetadata},
|
||||
utils::{make_kw, read_file, Html},
|
||||
},
|
||||
template::{Infos, NavBar},
|
||||
template::{InfosPage, NavBar},
|
||||
};
|
||||
|
||||
#[get("/")]
|
||||
|
@ -37,11 +37,11 @@ struct StyleAvatar {
|
|||
fn build_page(config: Config) -> String {
|
||||
let mut file = read_file(
|
||||
&format!("{}/index.md", config.locations.data_dir),
|
||||
TypeFileMetadata::Index,
|
||||
&TypeFileMetadata::Index,
|
||||
);
|
||||
|
||||
// Default values
|
||||
let mut name = config.fc.fullname.to_owned().unwrap_or_default();
|
||||
let mut name = config.fc.fullname.clone().unwrap_or_default();
|
||||
let mut pronouns = None;
|
||||
let mut avatar = "/icons/apple-touch-icon.png".to_owned();
|
||||
let mut avatar_caption = "EWP avatar".to_owned();
|
||||
|
@ -52,12 +52,12 @@ fn build_page(config: Config) -> String {
|
|||
|
||||
if let Some(f) = &file {
|
||||
if let Some(m) = &f.metadata.info.index {
|
||||
name = m.name.to_owned().unwrap_or(name);
|
||||
avatar = m.avatar.to_owned().unwrap_or(avatar);
|
||||
name = m.name.clone().unwrap_or(name);
|
||||
avatar = m.avatar.clone().unwrap_or(avatar);
|
||||
m.pronouns.clone_into(&mut pronouns);
|
||||
avatar_caption = m.avatar_caption.to_owned().unwrap_or(avatar_caption);
|
||||
avatar_caption = m.avatar_caption.clone().unwrap_or(avatar_caption);
|
||||
|
||||
if let Some(style) = m.avatar_style.to_owned() {
|
||||
if let Some(style) = m.avatar_style.clone() {
|
||||
if style.trim() == "square" {
|
||||
avatar_style = StyleAvatar {
|
||||
square: true,
|
||||
|
@ -67,7 +67,7 @@ fn build_page(config: Config) -> String {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
file = read_file("README.md", TypeFileMetadata::Generic);
|
||||
file = read_file("README.md", &TypeFileMetadata::Generic);
|
||||
}
|
||||
|
||||
config.tmpl.render(
|
||||
|
@ -84,10 +84,10 @@ fn build_page(config: Config) -> String {
|
|||
avatar_caption,
|
||||
avatar_style,
|
||||
},
|
||||
Infos {
|
||||
page_title: config.fc.fullname,
|
||||
page_desc: Some("Page principale".into()),
|
||||
page_kw: make_kw(&["index", "étudiant", "accueil"]),
|
||||
InfosPage {
|
||||
title: config.fc.fullname,
|
||||
desc: Some("Page principale".into()),
|
||||
kw: Some(make_kw(&["index", "étudiant", "accueil"])),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ use ramhorns::Content;
|
|||
use crate::{
|
||||
config::Config,
|
||||
misc::utils::{get_url, Html},
|
||||
template::{Infos, NavBar},
|
||||
template::{InfosPage, NavBar},
|
||||
};
|
||||
|
||||
pub async fn page(config: web::Data<Config>) -> impl Responder {
|
||||
|
@ -28,9 +28,9 @@ fn build_page(config: Config) -> String {
|
|||
www: get_url(config.fc.clone()),
|
||||
onion: config.fc.onion,
|
||||
},
|
||||
Infos {
|
||||
page_desc: Some("Une page perdu du web".into()),
|
||||
..Infos::default()
|
||||
InfosPage {
|
||||
desc: Some("Une page perdu du web".into()),
|
||||
..InfosPage::default()
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ use crate::{
|
|||
markdown::{File, TypeFileMetadata},
|
||||
utils::{make_kw, read_file, Html},
|
||||
},
|
||||
template::{Infos, NavBar},
|
||||
template::{InfosPage, NavBar},
|
||||
};
|
||||
|
||||
#[get("/portfolio")]
|
||||
|
@ -31,19 +31,19 @@ struct PortfolioTemplate<'a> {
|
|||
#[once(time = 60)]
|
||||
fn build_page(config: Config) -> String {
|
||||
let projects_dir = format!("{}/projects", config.locations.data_dir);
|
||||
let apps_dir = format!("{}/apps", projects_dir);
|
||||
let apps_dir = format!("{projects_dir}/apps");
|
||||
let ext = ".md";
|
||||
|
||||
// Get about
|
||||
let about = read_file(
|
||||
&format!("{}/about.md", projects_dir),
|
||||
TypeFileMetadata::Generic,
|
||||
&format!("{projects_dir}/about.md"),
|
||||
&TypeFileMetadata::Generic,
|
||||
);
|
||||
|
||||
// Get apps
|
||||
let apps = glob(&format!("{apps_dir}/*{ext}"))
|
||||
.unwrap()
|
||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), TypeFileMetadata::Portfolio).unwrap())
|
||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Portfolio).unwrap())
|
||||
.collect::<Vec<File>>();
|
||||
|
||||
let appdata = if apps.is_empty() {
|
||||
|
@ -55,7 +55,7 @@ fn build_page(config: Config) -> String {
|
|||
// Get archived apps
|
||||
let archived_apps = glob(&format!("{apps_dir}/archive/*{ext}"))
|
||||
.unwrap()
|
||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), TypeFileMetadata::Portfolio).unwrap())
|
||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Portfolio).unwrap())
|
||||
.collect::<Vec<File>>();
|
||||
|
||||
let archived_appdata = if archived_apps.is_empty() {
|
||||
|
@ -78,20 +78,20 @@ fn build_page(config: Config) -> String {
|
|||
archived_apps_exists: archived_appdata.1,
|
||||
err_msg: "is empty",
|
||||
},
|
||||
Infos {
|
||||
page_title: Some("Portfolio".into()),
|
||||
page_desc: Some(format!(
|
||||
InfosPage {
|
||||
title: Some("Portfolio".into()),
|
||||
desc: Some(format!(
|
||||
"Portfolio d'{}",
|
||||
config.fc.name.unwrap_or_default()
|
||||
)),
|
||||
page_kw: make_kw(&[
|
||||
kw: Some(make_kw(&[
|
||||
"développeur",
|
||||
"portfolio",
|
||||
"projets",
|
||||
"programmation",
|
||||
"applications",
|
||||
"code",
|
||||
]),
|
||||
])),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ use cached::proc_macro::once;
|
|||
use crate::{
|
||||
config::Config,
|
||||
misc::utils::{make_kw, Html},
|
||||
template::Infos,
|
||||
template::InfosPage,
|
||||
};
|
||||
|
||||
#[get("/web3")]
|
||||
|
@ -17,10 +17,10 @@ fn build_page(config: Config) -> String {
|
|||
config.tmpl.render(
|
||||
"web3.html",
|
||||
(),
|
||||
Infos {
|
||||
page_title: Some("Mylloon".into()),
|
||||
page_desc: Some("Coin reculé de l'internet".into()),
|
||||
page_kw: make_kw(&["web3", "blockchain", "nft", "ai"]),
|
||||
InfosPage {
|
||||
title: Some("Mylloon".into()),
|
||||
desc: Some("Coin reculé de l'internet".into()),
|
||||
kw: Some(make_kw(&["web3", "blockchain", "nft", "ai"])),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
|
@ -15,15 +15,16 @@ pub struct Template {
|
|||
|
||||
/// Structure used by /routes/*.rs
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Infos {
|
||||
pub struct InfosPage {
|
||||
/// Title
|
||||
pub page_title: Option<String>,
|
||||
pub title: Option<String>,
|
||||
/// Description
|
||||
pub page_desc: Option<String>,
|
||||
pub desc: Option<String>,
|
||||
/// Keywords
|
||||
pub page_kw: Option<String>,
|
||||
pub kw: Option<String>,
|
||||
}
|
||||
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
/// Information on what page the user is currently
|
||||
#[derive(Content, Debug, Default)]
|
||||
pub struct NavBar {
|
||||
|
@ -37,7 +38,7 @@ pub struct NavBar {
|
|||
|
||||
/// Final structure given to template
|
||||
#[derive(Content, Debug)]
|
||||
struct Data<T> {
|
||||
struct DataPage<T> {
|
||||
/// App name
|
||||
app_name: String,
|
||||
/// App URL
|
||||
|
@ -55,16 +56,16 @@ struct Data<T> {
|
|||
}
|
||||
|
||||
impl Template {
|
||||
pub fn render<C: Content>(&self, template: &str, data: C, info: Infos) -> String {
|
||||
pub fn render<C: Content>(&self, template: &str, data: C, info: InfosPage) -> String {
|
||||
let mut templates: Ramhorns = Ramhorns::lazy(&self.directory).unwrap();
|
||||
let tplt = templates.from_file(template).unwrap();
|
||||
|
||||
tplt.render(&Data {
|
||||
app_name: self.app_name.to_owned(),
|
||||
url: self.url.to_owned(),
|
||||
page_title: info.page_title,
|
||||
page_desc: info.page_desc,
|
||||
page_kw: info.page_kw,
|
||||
tplt.render(&DataPage {
|
||||
app_name: self.app_name.clone(),
|
||||
url: self.url.clone(),
|
||||
page_title: info.title,
|
||||
page_desc: info.desc,
|
||||
page_kw: info.kw,
|
||||
page_author: self.name.clone(),
|
||||
data,
|
||||
})
|
||||
|
|
Loading…
Reference in a new issue