This commit is contained in:
parent
d400ef3c5b
commit
0d924de79b
16 changed files with 199 additions and 193 deletions
|
@ -8,7 +8,7 @@ use crate::template::Template;
|
||||||
|
|
||||||
/// Store the configuration of config/config.toml
|
/// Store the configuration of config/config.toml
|
||||||
#[derive(Clone, Debug, Default, Deserialize)]
|
#[derive(Clone, Debug, Default, Deserialize)]
|
||||||
pub struct FileConfig {
|
pub struct FileConfiguration {
|
||||||
/// http/https
|
/// http/https
|
||||||
pub scheme: Option<String>,
|
pub scheme: Option<String>,
|
||||||
/// Domain name "sub.domain.tld"
|
/// Domain name "sub.domain.tld"
|
||||||
|
@ -31,7 +31,7 @@ pub struct FileConfig {
|
||||||
pub exclude_courses: Option<Vec<String>>,
|
pub exclude_courses: Option<Vec<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FileConfig {
|
impl FileConfiguration {
|
||||||
/// Initialize with default values
|
/// Initialize with default values
|
||||||
fn new() -> Self {
|
fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
@ -40,15 +40,16 @@ impl FileConfig {
|
||||||
port: Some(8080),
|
port: Some(8080),
|
||||||
app_name: Some("EWP".into()),
|
app_name: Some("EWP".into()),
|
||||||
exclude_courses: Some([].into()),
|
exclude_courses: Some([].into()),
|
||||||
..FileConfig::default()
|
..FileConfiguration::default()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Complete default structure with an existing one
|
/// Complete default structure with an existing one
|
||||||
fn complete(a: Self) -> Self {
|
fn complete(a: Self) -> Self {
|
||||||
// Default config
|
// Default config
|
||||||
let d = FileConfig::new();
|
let d = FileConfiguration::new();
|
||||||
|
|
||||||
|
#[allow(clippy::items_after_statements)]
|
||||||
/// Return the default value if nothing is value is none
|
/// Return the default value if nothing is value is none
|
||||||
fn test<T>(val: Option<T>, default: Option<T>) -> Option<T> {
|
fn test<T>(val: Option<T>, default: Option<T>) -> Option<T> {
|
||||||
if val.is_some() {
|
if val.is_some() {
|
||||||
|
@ -84,7 +85,7 @@ pub struct Locations {
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct Config {
|
pub struct Config {
|
||||||
/// Information given in the config file
|
/// Information given in the config file
|
||||||
pub fc: FileConfig,
|
pub fc: FileConfiguration,
|
||||||
/// Location where the static files are stored
|
/// Location where the static files are stored
|
||||||
pub locations: Locations,
|
pub locations: Locations,
|
||||||
/// Informations about templates
|
/// Informations about templates
|
||||||
|
@ -92,41 +93,37 @@ pub struct Config {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Load the config file
|
/// Load the config file
|
||||||
fn get_file_config(file_path: &str) -> FileConfig {
|
fn get_file_config(file_path: &str) -> FileConfiguration {
|
||||||
match fs::read_to_string(file_path) {
|
match fs::read_to_string(file_path) {
|
||||||
Ok(file) => match toml::from_str(&file) {
|
Ok(file) => match toml::from_str(&file) {
|
||||||
Ok(stored_config) => FileConfig::complete(stored_config),
|
Ok(stored_config) => FileConfiguration::complete(stored_config),
|
||||||
Err(file_error) => {
|
Err(file_error) => {
|
||||||
panic!("Error in config file: {file_error}");
|
panic!("Error in config file: {file_error}");
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
// No config file
|
// No config file
|
||||||
FileConfig::new()
|
FileConfiguration::new()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Build the configuration
|
/// Build the configuration
|
||||||
pub fn get_config(file_path: &str) -> Config {
|
pub fn get_configuration(file_path: &str) -> Config {
|
||||||
let internal_config = get_file_config(file_path);
|
let internal_config = get_file_config(file_path);
|
||||||
|
|
||||||
let static_dir = "static".to_owned();
|
let static_dir = "static";
|
||||||
let templates_dir = "templates".to_owned();
|
let templates_dir = "templates";
|
||||||
let files_root = init(
|
let files_root = init("dist".into(), static_dir, templates_dir);
|
||||||
"dist".into(),
|
|
||||||
static_dir.to_owned(),
|
|
||||||
templates_dir.to_owned(),
|
|
||||||
);
|
|
||||||
|
|
||||||
Config {
|
Config {
|
||||||
fc: internal_config.to_owned(),
|
fc: internal_config.clone(),
|
||||||
locations: Locations {
|
locations: Locations {
|
||||||
static_dir: format!("{}/{}", files_root, static_dir),
|
static_dir: format!("{files_root}/{static_dir}"),
|
||||||
data_dir: String::from("data"),
|
data_dir: String::from("data"),
|
||||||
},
|
},
|
||||||
tmpl: Template {
|
tmpl: Template {
|
||||||
directory: format!("{}/{}", files_root, templates_dir),
|
directory: format!("{files_root}/{templates_dir}"),
|
||||||
app_name: internal_config.app_name.unwrap(),
|
app_name: internal_config.app_name.unwrap(),
|
||||||
url: internal_config.domain.unwrap(),
|
url: internal_config.domain.unwrap(),
|
||||||
name: internal_config.name,
|
name: internal_config.name,
|
||||||
|
@ -135,7 +132,7 @@ pub fn get_config(file_path: &str) -> Config {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Preparation before running the http server
|
/// Preparation before running the http server
|
||||||
fn init(dist_dir: String, static_dir: String, templates_dir: String) -> String {
|
fn init(dist_dir: String, static_dir: &str, templates_dir: &str) -> String {
|
||||||
// The static folder is minimized only in release mode
|
// The static folder is minimized only in release mode
|
||||||
if cfg!(debug_assertions) {
|
if cfg!(debug_assertions) {
|
||||||
".".into()
|
".".into()
|
||||||
|
@ -154,7 +151,7 @@ fn init(dist_dir: String, static_dir: String, templates_dir: String) -> String {
|
||||||
let path = entry.unwrap();
|
let path = entry.unwrap();
|
||||||
let path_with_dist = path
|
let path_with_dist = path
|
||||||
.to_string_lossy()
|
.to_string_lossy()
|
||||||
.replace(&static_dir, &format!("{dist_dir}/{static_dir}"));
|
.replace(static_dir, &format!("{dist_dir}/{static_dir}"));
|
||||||
|
|
||||||
minify_and_copy(&cfg, path, path_with_dist);
|
minify_and_copy(&cfg, path, path_with_dist);
|
||||||
}
|
}
|
||||||
|
@ -164,7 +161,7 @@ fn init(dist_dir: String, static_dir: String, templates_dir: String) -> String {
|
||||||
let path = entry.unwrap();
|
let path = entry.unwrap();
|
||||||
let path_with_dist = path
|
let path_with_dist = path
|
||||||
.to_string_lossy()
|
.to_string_lossy()
|
||||||
.replace(&templates_dir, &format!("{dist_dir}/{templates_dir}"));
|
.replace(templates_dir, &format!("{dist_dir}/{templates_dir}"));
|
||||||
|
|
||||||
minify_and_copy(&cfg, path, path_with_dist);
|
minify_and_copy(&cfg, path, path_with_dist);
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,20 +18,20 @@ mod routes;
|
||||||
|
|
||||||
#[actix_web::main]
|
#[actix_web::main]
|
||||||
async fn main() -> Result<()> {
|
async fn main() -> Result<()> {
|
||||||
let config = config::get_config("./config/config.toml");
|
let config = config::get_configuration("./config/config.toml");
|
||||||
|
|
||||||
let addr = ("0.0.0.0", config.fc.port.unwrap());
|
let addr = ("0.0.0.0", config.fc.port.unwrap());
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
"Listening to {}://{}:{}",
|
"Listening to {}://{}:{}",
|
||||||
config.to_owned().fc.scheme.unwrap(),
|
config.clone().fc.scheme.unwrap(),
|
||||||
addr.0,
|
addr.0,
|
||||||
addr.1
|
addr.1
|
||||||
);
|
);
|
||||||
|
|
||||||
HttpServer::new(move || {
|
HttpServer::new(move || {
|
||||||
App::new()
|
App::new()
|
||||||
.app_data(web::Data::new(config.to_owned()))
|
.app_data(web::Data::new(config.clone()))
|
||||||
.wrap(Compress::default())
|
.wrap(Compress::default())
|
||||||
.wrap(
|
.wrap(
|
||||||
DefaultHeaders::new()
|
DefaultHeaders::new()
|
||||||
|
@ -60,7 +60,7 @@ async fn main() -> Result<()> {
|
||||||
.service(portfolio::page)
|
.service(portfolio::page)
|
||||||
.service(setup::page)
|
.service(setup::page)
|
||||||
.service(web3::page)
|
.service(web3::page)
|
||||||
.service(Files::new("/", config.locations.static_dir.to_owned()))
|
.service(Files::new("/", config.locations.static_dir.clone()))
|
||||||
.default_service(web::to(not_found::page))
|
.default_service(web::to(not_found::page))
|
||||||
})
|
})
|
||||||
.bind(addr)?
|
.bind(addr)?
|
||||||
|
|
|
@ -45,8 +45,8 @@ impl From<u8> for ProjectState {
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Project {
|
pub struct Project {
|
||||||
pub project: String,
|
pub name: String,
|
||||||
pub project_url: String,
|
pub url: String,
|
||||||
pub status: ProjectState,
|
pub status: ProjectState,
|
||||||
pub title: String,
|
pub title: String,
|
||||||
pub id: u32,
|
pub id: u32,
|
||||||
|
@ -68,8 +68,8 @@ pub async fn fetch_pr() -> Result<Vec<Project>, Error> {
|
||||||
let mut list = vec![];
|
let mut list = vec![];
|
||||||
resp.items.iter().for_each(|p| {
|
resp.items.iter().for_each(|p| {
|
||||||
list.push(Project {
|
list.push(Project {
|
||||||
project: p.repository_url.split('/').last().unwrap().into(),
|
name: p.repository_url.split('/').last().unwrap().into(),
|
||||||
project_url: p.repository_url.to_owned(),
|
url: p.repository_url.clone(),
|
||||||
status: if p.pull_request.merged_at.is_none() {
|
status: if p.pull_request.merged_at.is_none() {
|
||||||
if p.state == "closed" {
|
if p.state == "closed" {
|
||||||
ProjectState::Closed
|
ProjectState::Closed
|
||||||
|
@ -79,9 +79,9 @@ pub async fn fetch_pr() -> Result<Vec<Project>, Error> {
|
||||||
} else {
|
} else {
|
||||||
ProjectState::Merged
|
ProjectState::Merged
|
||||||
},
|
},
|
||||||
title: p.title.to_owned(),
|
title: p.title.clone(),
|
||||||
id: p.number,
|
id: p.number,
|
||||||
contrib_url: p.pull_request.html_url.to_owned(),
|
contrib_url: p.pull_request.html_url.clone(),
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -93,6 +93,7 @@ pub struct FileMetadata {
|
||||||
pub portfolio: Option<FileMetadataPortfolio>,
|
pub portfolio: Option<FileMetadataPortfolio>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::struct_excessive_bools)]
|
||||||
/// Global metadata
|
/// Global metadata
|
||||||
#[derive(Content, Debug)]
|
#[derive(Content, Debug)]
|
||||||
pub struct Metadata {
|
pub struct Metadata {
|
||||||
|
@ -105,7 +106,7 @@ pub struct Metadata {
|
||||||
|
|
||||||
impl Metadata {
|
impl Metadata {
|
||||||
/// Update current metadata boolean fields, keeping true ones
|
/// Update current metadata boolean fields, keeping true ones
|
||||||
fn merge(&mut self, other: Metadata) {
|
fn merge(&mut self, other: &Metadata) {
|
||||||
self.math = self.math || other.math;
|
self.math = self.math || other.math;
|
||||||
self.mermaid = self.mermaid || other.mermaid;
|
self.mermaid = self.mermaid || other.mermaid;
|
||||||
self.syntax_highlight = self.syntax_highlight || other.syntax_highlight;
|
self.syntax_highlight = self.syntax_highlight || other.syntax_highlight;
|
||||||
|
@ -159,9 +160,9 @@ pub fn get_options() -> ComrakOptions {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resize images if needed
|
/// Resize images if needed
|
||||||
fn custom_img_size(html: String) -> String {
|
fn custom_img_size(html: &str) -> String {
|
||||||
rewrite_str(
|
rewrite_str(
|
||||||
&html,
|
html,
|
||||||
RewriteStrSettings {
|
RewriteStrSettings {
|
||||||
element_content_handlers: vec![element!("img[alt]", |el| {
|
element_content_handlers: vec![element!("img[alt]", |el| {
|
||||||
let alt = el.get_attribute("alt").unwrap();
|
let alt = el.get_attribute("alt").unwrap();
|
||||||
|
@ -180,7 +181,7 @@ fn custom_img_size(html: String) -> String {
|
||||||
el.set_attribute("width", dimension.0).unwrap();
|
el.set_attribute("width", dimension.0).unwrap();
|
||||||
el.set_attribute("height", dimension.1).unwrap();
|
el.set_attribute("height", dimension.1).unwrap();
|
||||||
if new_alt.is_empty() {
|
if new_alt.is_empty() {
|
||||||
el.remove_attribute("alt")
|
el.remove_attribute("alt");
|
||||||
} else {
|
} else {
|
||||||
el.set_attribute("alt", new_alt).unwrap();
|
el.set_attribute("alt", new_alt).unwrap();
|
||||||
}
|
}
|
||||||
|
@ -190,7 +191,7 @@ fn custom_img_size(html: String) -> String {
|
||||||
if data.parse::<i32>().is_ok() {
|
if data.parse::<i32>().is_ok() {
|
||||||
el.set_attribute("width", data).unwrap();
|
el.set_attribute("width", data).unwrap();
|
||||||
if new_alt.is_empty() {
|
if new_alt.is_empty() {
|
||||||
el.remove_attribute("alt")
|
el.remove_attribute("alt");
|
||||||
} else {
|
} else {
|
||||||
el.set_attribute("alt", new_alt).unwrap();
|
el.set_attribute("alt", new_alt).unwrap();
|
||||||
}
|
}
|
||||||
|
@ -207,7 +208,7 @@ fn custom_img_size(html: String) -> String {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fix local images to base64 and integration of markdown files
|
/// Fix local images to base64 and integration of markdown files
|
||||||
fn fix_images_and_integration(path: &str, html: String) -> (String, Metadata) {
|
fn fix_images_and_integration(path: &str, html: &str) -> (String, Metadata) {
|
||||||
let mut metadata = Metadata {
|
let mut metadata = Metadata {
|
||||||
info: FileMetadata::default(),
|
info: FileMetadata::default(),
|
||||||
math: false,
|
math: false,
|
||||||
|
@ -218,7 +219,7 @@ fn fix_images_and_integration(path: &str, html: String) -> (String, Metadata) {
|
||||||
|
|
||||||
(
|
(
|
||||||
rewrite_str(
|
rewrite_str(
|
||||||
&html,
|
html,
|
||||||
RewriteStrSettings {
|
RewriteStrSettings {
|
||||||
element_content_handlers: vec![element!("img", |el| {
|
element_content_handlers: vec![element!("img", |el| {
|
||||||
if let Some(src) = el.get_attribute("src") {
|
if let Some(src) = el.get_attribute("src") {
|
||||||
|
@ -234,15 +235,15 @@ fn fix_images_and_integration(path: &str, html: String) -> (String, Metadata) {
|
||||||
let data = read_md(
|
let data = read_md(
|
||||||
&img_path,
|
&img_path,
|
||||||
&file,
|
&file,
|
||||||
TypeFileMetadata::Generic,
|
&TypeFileMetadata::Generic,
|
||||||
Some(options),
|
Some(options),
|
||||||
);
|
);
|
||||||
el.replace(&data.content, ContentType::Html);
|
el.replace(&data.content, ContentType::Html);
|
||||||
metadata.merge(data.metadata);
|
metadata.merge(&data.metadata);
|
||||||
} else {
|
} else {
|
||||||
let image = general_purpose::STANDARD.encode(file);
|
let image = general_purpose::STANDARD.encode(file);
|
||||||
|
|
||||||
el.set_attribute("src", &format!("data:{};base64,{}", mime, image))
|
el.set_attribute("src", &format!("data:{mime};base64,{image}"))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -262,7 +263,7 @@ fn fix_images_and_integration(path: &str, html: String) -> (String, Metadata) {
|
||||||
pub fn read_md(
|
pub fn read_md(
|
||||||
path: &str,
|
path: &str,
|
||||||
raw_text: &str,
|
raw_text: &str,
|
||||||
metadata_type: TypeFileMetadata,
|
metadata_type: &TypeFileMetadata,
|
||||||
options: Option<Options>,
|
options: Option<Options>,
|
||||||
) -> File {
|
) -> File {
|
||||||
let arena = Arena::new();
|
let arena = Arena::new();
|
||||||
|
@ -287,9 +288,9 @@ pub fn read_md(
|
||||||
|
|
||||||
let children_metadata;
|
let children_metadata;
|
||||||
let mail_obfsucated;
|
let mail_obfsucated;
|
||||||
(html_content, children_metadata) = fix_images_and_integration(path, html_content);
|
(html_content, children_metadata) = fix_images_and_integration(path, &html_content);
|
||||||
html_content = custom_img_size(html_content);
|
html_content = custom_img_size(&html_content);
|
||||||
(html_content, mail_obfsucated) = mail_obfuscation(html_content);
|
(html_content, mail_obfsucated) = mail_obfuscation(&html_content);
|
||||||
|
|
||||||
let mut final_metadata = Metadata {
|
let mut final_metadata = Metadata {
|
||||||
info: metadata,
|
info: metadata,
|
||||||
|
@ -298,7 +299,7 @@ pub fn read_md(
|
||||||
math: check_math(&html_content),
|
math: check_math(&html_content),
|
||||||
mail_obfsucated,
|
mail_obfsucated,
|
||||||
};
|
};
|
||||||
final_metadata.merge(children_metadata);
|
final_metadata.merge(&children_metadata);
|
||||||
|
|
||||||
File {
|
File {
|
||||||
metadata: final_metadata,
|
metadata: final_metadata,
|
||||||
|
@ -312,7 +313,7 @@ fn deserialize_metadata<T: Default + serde::de::DeserializeOwned>(text: &str) ->
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fetch metadata from AST
|
/// Fetch metadata from AST
|
||||||
pub fn get_metadata<'a>(root: &'a AstNode<'a>, mtype: TypeFileMetadata) -> FileMetadata {
|
pub fn get_metadata<'a>(root: &'a AstNode<'a>, mtype: &TypeFileMetadata) -> FileMetadata {
|
||||||
match root
|
match root
|
||||||
.children()
|
.children()
|
||||||
.find_map(|node| match &node.data.borrow().value {
|
.find_map(|node| match &node.data.borrow().value {
|
||||||
|
@ -327,7 +328,7 @@ pub fn get_metadata<'a>(root: &'a AstNode<'a>, mtype: TypeFileMetadata) -> FileM
|
||||||
|
|
||||||
// Trim descriptions
|
// Trim descriptions
|
||||||
if let Some(desc) = &mut metadata.description {
|
if let Some(desc) = &mut metadata.description {
|
||||||
desc.clone_from(&desc.trim().into())
|
desc.clone_from(&desc.trim().into());
|
||||||
}
|
}
|
||||||
|
|
||||||
FileMetadata {
|
FileMetadata {
|
||||||
|
@ -431,11 +432,11 @@ fn hljs_replace<'a>(root: &'a AstNode<'a>, mermaid_str: &str) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Obfuscate email if email found
|
/// Obfuscate email if email found
|
||||||
fn mail_obfuscation(html: String) -> (String, bool) {
|
fn mail_obfuscation(html: &str) -> (String, bool) {
|
||||||
let modified = Arc::new(AtomicBool::new(false));
|
let modified = Arc::new(AtomicBool::new(false));
|
||||||
(
|
(
|
||||||
rewrite_str(
|
rewrite_str(
|
||||||
&html,
|
html,
|
||||||
RewriteStrSettings {
|
RewriteStrSettings {
|
||||||
element_content_handlers: vec![element!("a[href^='mailto:']", |el| {
|
element_content_handlers: vec![element!("a[href^='mailto:']", |el| {
|
||||||
modified.store(true, Ordering::SeqCst);
|
modified.store(true, Ordering::SeqCst);
|
||||||
|
@ -444,7 +445,7 @@ fn mail_obfuscation(html: String) -> (String, bool) {
|
||||||
let (_uri, mail) = &link.split_at(7);
|
let (_uri, mail) = &link.split_at(7);
|
||||||
let (before, after) = mail.split_once('@').unwrap();
|
let (before, after) = mail.split_once('@').unwrap();
|
||||||
|
|
||||||
let modified_mail = format!("{}<span class='at'>(at)</span>{}", before, after);
|
let modified_mail = format!("{before}<span class='at'>(at)</span>{after}");
|
||||||
|
|
||||||
el.set_inner_content(&modified_mail, ContentType::Html);
|
el.set_inner_content(&modified_mail, ContentType::Html);
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ use base64::{engine::general_purpose, Engine};
|
||||||
use cached::proc_macro::cached;
|
use cached::proc_macro::cached;
|
||||||
use reqwest::Client;
|
use reqwest::Client;
|
||||||
|
|
||||||
use crate::config::FileConfig;
|
use crate::config::FileConfiguration;
|
||||||
|
|
||||||
use super::markdown::{read_md, File, FileMetadata, Metadata, TypeFileMetadata};
|
use super::markdown::{read_md, File, FileMetadata, Metadata, TypeFileMetadata};
|
||||||
|
|
||||||
|
@ -22,7 +22,7 @@ pub fn get_reqwest_client() -> Client {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get URL of the app
|
/// Get URL of the app
|
||||||
pub fn get_url(fc: FileConfig) -> String {
|
pub fn get_url(fc: FileConfiguration) -> String {
|
||||||
/* let port = match fc.scheme.as_deref() {
|
/* let port = match fc.scheme.as_deref() {
|
||||||
Some("https") if fc.port == Some(443) => String::new(),
|
Some("https") if fc.port == Some(443) => String::new(),
|
||||||
Some("http") if fc.port == Some(80) => String::new(),
|
Some("http") if fc.port == Some(80) => String::new(),
|
||||||
|
@ -33,8 +33,8 @@ pub fn get_url(fc: FileConfig) -> String {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Make a list of keywords
|
/// Make a list of keywords
|
||||||
pub fn make_kw(list: &[&str]) -> Option<String> {
|
pub fn make_kw(list: &[&str]) -> String {
|
||||||
Some(list.join(", "))
|
list.join(", ")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Send HTML file
|
/// Send HTML file
|
||||||
|
@ -53,7 +53,7 @@ impl Responder for Html {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read a file
|
/// Read a file
|
||||||
pub fn read_file(filename: &str, expected_file: TypeFileMetadata) -> Option<File> {
|
pub fn read_file(filename: &str, expected_file: &TypeFileMetadata) -> Option<File> {
|
||||||
match Path::new(filename).extension() {
|
match Path::new(filename).extension() {
|
||||||
Some(ext) => match ext.to_str().unwrap() {
|
Some(ext) => match ext.to_str().unwrap() {
|
||||||
"pdf" => match fs::read(filename) {
|
"pdf" => match fs::read(filename) {
|
||||||
|
@ -82,10 +82,9 @@ fn read_pdf(data: Vec<u8>) -> File {
|
||||||
},
|
},
|
||||||
content: format!(
|
content: format!(
|
||||||
r#"<embed
|
r#"<embed
|
||||||
src="data:application/pdf;base64,{}"
|
src="data:application/pdf;base64,{pdf}"
|
||||||
style="width: 100%; height: 79vh";
|
style="width: 100%; height: 79vh";
|
||||||
>"#,
|
>"#
|
||||||
pdf
|
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::{config::Config, misc::utils::get_url, template::Infos};
|
use crate::{config::Config, misc::utils::get_url, template::InfosPage};
|
||||||
use actix_web::{get, http::header::ContentType, routes, web, HttpResponse, Responder};
|
use actix_web::{get, http::header::ContentType, routes, web, HttpResponse, Responder};
|
||||||
use cached::proc_macro::once;
|
use cached::proc_macro::once;
|
||||||
use ramhorns::Content;
|
use ramhorns::Content;
|
||||||
|
@ -28,7 +28,7 @@ fn build_securitytxt(config: Config) -> String {
|
||||||
contact: config.fc.mail.unwrap_or_default(),
|
contact: config.fc.mail.unwrap_or_default(),
|
||||||
pref_lang: config.fc.lang.unwrap_or_default(),
|
pref_lang: config.fc.lang.unwrap_or_default(),
|
||||||
},
|
},
|
||||||
Infos::default(),
|
InfosPage::default(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -55,7 +55,7 @@ fn build_humanstxt(config: Config) -> String {
|
||||||
lang: config.fc.lang.unwrap_or_default(),
|
lang: config.fc.lang.unwrap_or_default(),
|
||||||
name: config.fc.fullname.unwrap_or_default(),
|
name: config.fc.fullname.unwrap_or_default(),
|
||||||
},
|
},
|
||||||
Infos::default(),
|
InfosPage::default(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -94,6 +94,6 @@ fn build_webmanifest(config: Config) -> String {
|
||||||
description: "Easy WebPage generator".to_owned(),
|
description: "Easy WebPage generator".to_owned(),
|
||||||
url: get_url(config.fc),
|
url: get_url(config.fc),
|
||||||
},
|
},
|
||||||
Infos::default(),
|
InfosPage::default(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,6 +10,6 @@ struct Info {
|
||||||
#[get("/love")]
|
#[get("/love")]
|
||||||
async fn love() -> impl Responder {
|
async fn love() -> impl Responder {
|
||||||
HttpResponse::Ok().json(Info {
|
HttpResponse::Ok().json(Info {
|
||||||
unix_epoch: 1605576600,
|
unix_epoch: 1_605_576_600,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,7 @@ use crate::{
|
||||||
markdown::{get_metadata, get_options, File, FileMetadataBlog, TypeFileMetadata},
|
markdown::{get_metadata, get_options, File, FileMetadataBlog, TypeFileMetadata},
|
||||||
utils::{get_url, make_kw, read_file, Html},
|
utils::{get_url, make_kw, read_file, Html},
|
||||||
},
|
},
|
||||||
template::{Infos, NavBar},
|
template::{InfosPage, NavBar},
|
||||||
};
|
};
|
||||||
|
|
||||||
const MIME_TYPE_RSS: &str = "application/rss+xml";
|
const MIME_TYPE_RSS: &str = "application/rss+xml";
|
||||||
|
@ -44,11 +44,11 @@ struct BlogIndexTemplate {
|
||||||
#[once(time = 60)]
|
#[once(time = 60)]
|
||||||
fn build_index(config: Config) -> String {
|
fn build_index(config: Config) -> String {
|
||||||
let blog_dir = format!("{}/{}", config.locations.data_dir, BLOG_DIR);
|
let blog_dir = format!("{}/{}", config.locations.data_dir, BLOG_DIR);
|
||||||
let mut posts = get_posts(format!("{}/{}", blog_dir, POST_DIR));
|
let mut posts = get_posts(&format!("{blog_dir}/{POST_DIR}"));
|
||||||
|
|
||||||
// Get about
|
// Get about
|
||||||
let about: Option<File> =
|
let about: Option<File> =
|
||||||
read_file(&format!("{}/about.md", blog_dir), TypeFileMetadata::Generic);
|
read_file(&format!("{blog_dir}/about.md"), &TypeFileMetadata::Generic);
|
||||||
|
|
||||||
// Sort from newest to oldest
|
// Sort from newest to oldest
|
||||||
posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day));
|
posts.sort_by_cached_key(|p| (p.date.year, p.date.month, p.date.day));
|
||||||
|
@ -65,13 +65,13 @@ fn build_index(config: Config) -> String {
|
||||||
no_posts: posts.is_empty(),
|
no_posts: posts.is_empty(),
|
||||||
posts,
|
posts,
|
||||||
},
|
},
|
||||||
Infos {
|
InfosPage {
|
||||||
page_title: Some("Blog".into()),
|
title: Some("Blog".into()),
|
||||||
page_desc: Some(format!(
|
desc: Some(format!(
|
||||||
"Liste des posts d'{}",
|
"Liste des posts d'{}",
|
||||||
config.fc.name.unwrap_or_default()
|
config.fc.name.unwrap_or_default()
|
||||||
)),
|
)),
|
||||||
page_kw: make_kw(&["blog", "blogging"]),
|
kw: Some(make_kw(&["blog", "blogging"])),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -89,12 +89,12 @@ struct Post {
|
||||||
impl Post {
|
impl Post {
|
||||||
// Fetch the file content
|
// Fetch the file content
|
||||||
fn fetch_content(&mut self, data_dir: &str) {
|
fn fetch_content(&mut self, data_dir: &str) {
|
||||||
let blog_dir = format!("{}/{}/{}", data_dir, BLOG_DIR, POST_DIR);
|
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
|
||||||
let ext = ".md";
|
let ext = ".md";
|
||||||
|
|
||||||
if let Some(file) = read_file(
|
if let Some(file) = read_file(
|
||||||
&format!("{blog_dir}/{}{ext}", self.url),
|
&format!("{blog_dir}/{}{ext}", self.url),
|
||||||
TypeFileMetadata::Blog,
|
&TypeFileMetadata::Blog,
|
||||||
) {
|
) {
|
||||||
self.content = Some(file.content);
|
self.content = Some(file.content);
|
||||||
}
|
}
|
||||||
|
@ -104,13 +104,13 @@ impl Post {
|
||||||
impl Hash for Post {
|
impl Hash for Post {
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
if let Some(content) = &self.content {
|
if let Some(content) = &self.content {
|
||||||
content.hash(state)
|
content.hash(state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_posts(location: String) -> Vec<Post> {
|
fn get_posts(location: &str) -> Vec<Post> {
|
||||||
let entries = match std::fs::read_dir(&location) {
|
let entries = match std::fs::read_dir(location) {
|
||||||
Ok(res) => res
|
Ok(res) => res
|
||||||
.flatten()
|
.flatten()
|
||||||
.filter(|f| match f.path().extension() {
|
.filter(|f| match f.path().extension() {
|
||||||
|
@ -124,8 +124,8 @@ fn get_posts(location: String) -> Vec<Post> {
|
||||||
entries
|
entries
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|f| {
|
.filter_map(|f| {
|
||||||
let _filename = f.file_name();
|
let fname = f.file_name();
|
||||||
let filename = _filename.to_string_lossy();
|
let filename = fname.to_string_lossy();
|
||||||
let file_without_ext = filename.split_at(filename.len() - 3).0;
|
let file_without_ext = filename.split_at(filename.len() - 3).0;
|
||||||
|
|
||||||
let file_metadata = match std::fs::read_to_string(format!("{location}/{filename}")) {
|
let file_metadata = match std::fs::read_to_string(format!("{location}/{filename}")) {
|
||||||
|
@ -134,7 +134,7 @@ fn get_posts(location: String) -> Vec<Post> {
|
||||||
|
|
||||||
let options = get_options();
|
let options = get_options();
|
||||||
let root = parse_document(&arena, &text, &options);
|
let root = parse_document(&arena, &text, &options);
|
||||||
let mut metadata = get_metadata(root, TypeFileMetadata::Blog).blog.unwrap();
|
let mut metadata = get_metadata(root, &TypeFileMetadata::Blog).blog.unwrap();
|
||||||
|
|
||||||
// Always have a title
|
// Always have a title
|
||||||
metadata.title = match metadata.title {
|
metadata.title = match metadata.title {
|
||||||
|
@ -173,7 +173,7 @@ fn get_posts(location: String) -> Vec<Post> {
|
||||||
.tags
|
.tags
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|t| t.name.to_owned())
|
.map(|t| t.name.clone())
|
||||||
.collect(),
|
.collect(),
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
|
@ -192,16 +192,19 @@ struct BlogPostTemplate {
|
||||||
|
|
||||||
#[get("/blog/p/{id}")]
|
#[get("/blog/p/{id}")]
|
||||||
async fn page(path: web::Path<(String,)>, config: web::Data<Config>) -> impl Responder {
|
async fn page(path: web::Path<(String,)>, config: web::Data<Config>) -> impl Responder {
|
||||||
Html(build_post(path.into_inner().0, config.get_ref().to_owned()))
|
Html(build_post(
|
||||||
|
&path.into_inner().0,
|
||||||
|
config.get_ref().to_owned(),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_post(file: String, config: Config) -> String {
|
fn build_post(file: &str, config: Config) -> String {
|
||||||
let mut post = None;
|
let mut post = None;
|
||||||
let (infos, toc) = get_post(
|
let (infos, toc) = get_post(
|
||||||
&mut post,
|
&mut post,
|
||||||
file,
|
file,
|
||||||
config.fc.name.unwrap_or_default(),
|
&config.fc.name.unwrap_or_default(),
|
||||||
config.locations.data_dir,
|
&config.locations.data_dir,
|
||||||
);
|
);
|
||||||
|
|
||||||
config.tmpl.render(
|
config.tmpl.render(
|
||||||
|
@ -220,20 +223,20 @@ fn build_post(file: String, config: Config) -> String {
|
||||||
|
|
||||||
fn get_post(
|
fn get_post(
|
||||||
post: &mut Option<File>,
|
post: &mut Option<File>,
|
||||||
filename: String,
|
filename: &str,
|
||||||
name: String,
|
name: &str,
|
||||||
data_dir: String,
|
data_dir: &str,
|
||||||
) -> (Infos, String) {
|
) -> (InfosPage, String) {
|
||||||
let blog_dir = format!("{}/{}/{}", data_dir, BLOG_DIR, POST_DIR);
|
let blog_dir = format!("{data_dir}/{BLOG_DIR}/{POST_DIR}");
|
||||||
let ext = ".md";
|
let ext = ".md";
|
||||||
|
|
||||||
*post = read_file(
|
*post = read_file(
|
||||||
&format!("{blog_dir}/{filename}{ext}"),
|
&format!("{blog_dir}/{filename}{ext}"),
|
||||||
TypeFileMetadata::Blog,
|
&TypeFileMetadata::Blog,
|
||||||
);
|
);
|
||||||
|
|
||||||
let default = (
|
let default = (
|
||||||
&filename,
|
filename,
|
||||||
&format!("Blog d'{name}"),
|
&format!("Blog d'{name}"),
|
||||||
Vec::new(),
|
Vec::new(),
|
||||||
String::new(),
|
String::new(),
|
||||||
|
@ -262,15 +265,15 @@ fn get_post(
|
||||||
};
|
};
|
||||||
|
|
||||||
(
|
(
|
||||||
Infos {
|
InfosPage {
|
||||||
page_title: Some(format!("Post: {}", title)),
|
title: Some(format!("Post: {title}")),
|
||||||
page_desc: Some(desc.clone()),
|
desc: Some(desc.clone()),
|
||||||
page_kw: make_kw(
|
kw: Some(make_kw(
|
||||||
&["blog", "blogging", "write", "writing"]
|
&["blog", "blogging", "write", "writing"]
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(tags.iter().map(|t| t.name.as_str()))
|
.chain(tags.iter().map(|t| t.name.as_str()))
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
toc,
|
toc,
|
||||||
)
|
)
|
||||||
|
@ -285,7 +288,7 @@ async fn rss(config: web::Data<Config>) -> impl Responder {
|
||||||
|
|
||||||
#[once(time = 10800)] // 3h
|
#[once(time = 10800)] // 3h
|
||||||
fn build_rss(config: Config) -> String {
|
fn build_rss(config: Config) -> String {
|
||||||
let mut posts = get_posts(format!(
|
let mut posts = get_posts(&format!(
|
||||||
"{}/{}/{}",
|
"{}/{}/{}",
|
||||||
config.locations.data_dir, BLOG_DIR, POST_DIR
|
config.locations.data_dir, BLOG_DIR, POST_DIR
|
||||||
));
|
));
|
||||||
|
@ -301,7 +304,7 @@ fn build_rss(config: Config) -> String {
|
||||||
}
|
}
|
||||||
|
|
||||||
let link_to_site = get_url(config.fc.clone());
|
let link_to_site = get_url(config.fc.clone());
|
||||||
let author = if let (Some(mail), Some(name)) = (config.fc.mail, config.fc.fullname.to_owned()) {
|
let author = if let (Some(mail), Some(name)) = (config.fc.mail, config.fc.fullname.clone()) {
|
||||||
Some(format!("{mail} ({name})"))
|
Some(format!("{mail} ({name})"))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -309,11 +312,11 @@ fn build_rss(config: Config) -> String {
|
||||||
let title = format!("Blog d'{}", config.fc.name.unwrap_or_default());
|
let title = format!("Blog d'{}", config.fc.name.unwrap_or_default());
|
||||||
let lang = "fr";
|
let lang = "fr";
|
||||||
let channel = Channel {
|
let channel = Channel {
|
||||||
title: title.to_owned(),
|
title: title.clone(),
|
||||||
link: link_to_site.to_owned(),
|
link: link_to_site.clone(),
|
||||||
description: "Un fil qui parle d'informatique notamment".into(),
|
description: "Un fil qui parle d'informatique notamment".into(),
|
||||||
language: Some(lang.into()),
|
language: Some(lang.into()),
|
||||||
managing_editor: author.to_owned(),
|
managing_editor: author.clone(),
|
||||||
webmaster: author,
|
webmaster: author,
|
||||||
pub_date: Some(Local::now().to_rfc2822()),
|
pub_date: Some(Local::now().to_rfc2822()),
|
||||||
categories: ["blog", "blogging", "write", "writing"]
|
categories: ["blog", "blogging", "write", "writing"]
|
||||||
|
@ -326,9 +329,9 @@ fn build_rss(config: Config) -> String {
|
||||||
generator: Some("ewp with rss crate".into()),
|
generator: Some("ewp with rss crate".into()),
|
||||||
docs: Some("https://www.rssboard.org/rss-specification".into()),
|
docs: Some("https://www.rssboard.org/rss-specification".into()),
|
||||||
image: Some(Image {
|
image: Some(Image {
|
||||||
url: format!("{}/icons/favicon-32x32.png", link_to_site),
|
url: format!("{link_to_site}/icons/favicon-32x32.png"),
|
||||||
title: title.to_owned(),
|
title: title.clone(),
|
||||||
link: link_to_site.to_owned(),
|
link: link_to_site.clone(),
|
||||||
..Image::default()
|
..Image::default()
|
||||||
}),
|
}),
|
||||||
items: posts
|
items: posts
|
||||||
|
@ -339,9 +342,9 @@ fn build_rss(config: Config) -> String {
|
||||||
|
|
||||||
// Build item
|
// Build item
|
||||||
Item {
|
Item {
|
||||||
title: Some(p.title.to_owned()),
|
title: Some(p.title.clone()),
|
||||||
link: Some(format!("{}/blog/p/{}", link_to_site, p.url)),
|
link: Some(format!("{}/blog/p/{}", link_to_site, p.url)),
|
||||||
description: p.content.to_owned(),
|
description: p.content.clone(),
|
||||||
categories: p
|
categories: p
|
||||||
.tags
|
.tags
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -374,7 +377,7 @@ fn build_rss(config: Config) -> String {
|
||||||
.collect(),
|
.collect(),
|
||||||
atom_ext: Some(AtomExtension {
|
atom_ext: Some(AtomExtension {
|
||||||
links: vec![Link {
|
links: vec![Link {
|
||||||
href: format!("{}/blog/rss", link_to_site),
|
href: format!("{link_to_site}/blog/rss"),
|
||||||
rel: "self".into(),
|
rel: "self".into(),
|
||||||
hreflang: Some(lang.into()),
|
hreflang: Some(lang.into()),
|
||||||
mime_type: Some(MIME_TYPE_RSS.into()),
|
mime_type: Some(MIME_TYPE_RSS.into()),
|
||||||
|
|
|
@ -10,7 +10,7 @@ use crate::{
|
||||||
markdown::{File, TypeFileMetadata},
|
markdown::{File, TypeFileMetadata},
|
||||||
utils::{make_kw, read_file, Html},
|
utils::{make_kw, read_file, Html},
|
||||||
},
|
},
|
||||||
template::{Infos, NavBar},
|
template::{InfosPage, NavBar},
|
||||||
};
|
};
|
||||||
|
|
||||||
const CONTACT_DIR: &str = "contacts";
|
const CONTACT_DIR: &str = "contacts";
|
||||||
|
@ -83,7 +83,7 @@ async fn service_redirection(config: web::Data<Config>, req: HttpRequest) -> imp
|
||||||
// Find requested service
|
// Find requested service
|
||||||
.filter(|&x| x.service == *info.query("service"))
|
.filter(|&x| x.service == *info.query("service"))
|
||||||
// Search for a potential scope
|
// Search for a potential scope
|
||||||
.filter(|&x| match (info.get("scope"), x.scope.to_owned()) {
|
.filter(|&x| match (info.get("scope"), x.scope.clone()) {
|
||||||
// The right scope is accepted
|
// The right scope is accepted
|
||||||
(Some(str_value), Some(string_value)) if str_value == string_value.as_str() => true,
|
(Some(str_value), Some(string_value)) if str_value == string_value.as_str() => true,
|
||||||
// No scope provided is accepted
|
// No scope provided is accepted
|
||||||
|
@ -131,26 +131,26 @@ fn build_page(config: Config) -> String {
|
||||||
|
|
||||||
// Get about
|
// Get about
|
||||||
let about = read_file(
|
let about = read_file(
|
||||||
&format!("{}/about.md", contacts_dir),
|
&format!("{contacts_dir}/about.md"),
|
||||||
TypeFileMetadata::Generic,
|
&TypeFileMetadata::Generic,
|
||||||
);
|
);
|
||||||
|
|
||||||
let socials_dir = "socials";
|
let socials_dir = "socials";
|
||||||
let mut socials = glob(&format!("{contacts_dir}/{socials_dir}/*{ext}"))
|
let mut socials = glob(&format!("{contacts_dir}/{socials_dir}/*{ext}"))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), TypeFileMetadata::Contact).unwrap())
|
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Contact).unwrap())
|
||||||
.collect::<Vec<File>>();
|
.collect::<Vec<File>>();
|
||||||
|
|
||||||
let forges_dir = "forges";
|
let forges_dir = "forges";
|
||||||
let mut forges = glob(&format!("{contacts_dir}/{forges_dir}/*{ext}"))
|
let mut forges = glob(&format!("{contacts_dir}/{forges_dir}/*{ext}"))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), TypeFileMetadata::Contact).unwrap())
|
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Contact).unwrap())
|
||||||
.collect::<Vec<File>>();
|
.collect::<Vec<File>>();
|
||||||
|
|
||||||
let others_dir = "others";
|
let others_dir = "others";
|
||||||
let mut others = glob(&format!("{contacts_dir}/{others_dir}/*{ext}"))
|
let mut others = glob(&format!("{contacts_dir}/{others_dir}/*{ext}"))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), TypeFileMetadata::Contact).unwrap())
|
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Contact).unwrap())
|
||||||
.collect::<Vec<File>>();
|
.collect::<Vec<File>>();
|
||||||
|
|
||||||
// Remove paragraphs in custom statements
|
// Remove paragraphs in custom statements
|
||||||
|
@ -176,10 +176,15 @@ fn build_page(config: Config) -> String {
|
||||||
others_exists: !others.is_empty(),
|
others_exists: !others.is_empty(),
|
||||||
others,
|
others,
|
||||||
},
|
},
|
||||||
Infos {
|
InfosPage {
|
||||||
page_title: Some("Contacts".into()),
|
title: Some("Contacts".into()),
|
||||||
page_desc: Some(format!("Réseaux d'{}", config.fc.name.unwrap_or_default())),
|
desc: Some(format!("Réseaux d'{}", config.fc.name.unwrap_or_default())),
|
||||||
page_kw: make_kw(&["réseaux sociaux", "email", "contact", "linktree"]),
|
kw: Some(make_kw(&[
|
||||||
|
"réseaux sociaux",
|
||||||
|
"email",
|
||||||
|
"contact",
|
||||||
|
"linktree",
|
||||||
|
])),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ use crate::{
|
||||||
github::{fetch_pr, ProjectState},
|
github::{fetch_pr, ProjectState},
|
||||||
utils::{make_kw, Html},
|
utils::{make_kw, Html},
|
||||||
},
|
},
|
||||||
template::{Infos, NavBar},
|
template::{InfosPage, NavBar},
|
||||||
};
|
};
|
||||||
use actix_web::{get, web, Responder};
|
use actix_web::{get, web, Responder};
|
||||||
use cached::proc_macro::once;
|
use cached::proc_macro::once;
|
||||||
|
@ -58,31 +58,31 @@ async fn build_page(config: Config) -> String {
|
||||||
|
|
||||||
// Grouping PRs by projects
|
// Grouping PRs by projects
|
||||||
let mut map: HashMap<&str, Vec<Pull>> = HashMap::new();
|
let mut map: HashMap<&str, Vec<Pull>> = HashMap::new();
|
||||||
projects.iter().for_each(|p| {
|
for p in &projects {
|
||||||
let project = Pull {
|
let project = Pull {
|
||||||
url: p.contrib_url.to_owned(),
|
url: p.contrib_url.clone(),
|
||||||
id: p.id,
|
id: p.id,
|
||||||
name_repo: p.project.to_owned(),
|
name_repo: p.name.clone(),
|
||||||
title: p.title.to_owned(),
|
title: p.title.clone(),
|
||||||
state: p.status as u8,
|
state: p.status as u8,
|
||||||
};
|
};
|
||||||
let project_name = p.project.as_str();
|
let project_name = p.name.as_str();
|
||||||
if map.contains_key(project_name) {
|
if map.contains_key(project_name) {
|
||||||
map.entry(project_name).and_modify(|v| v.push(project));
|
map.entry(project_name).and_modify(|v| v.push(project));
|
||||||
} else {
|
} else {
|
||||||
data.push(Project {
|
data.push(Project {
|
||||||
name: project_name.into(),
|
name: project_name.into(),
|
||||||
url: p.project_url.to_owned(),
|
url: p.url.clone(),
|
||||||
pulls_merged: Vec::new(),
|
pulls_merged: Vec::new(),
|
||||||
pulls_closed: Vec::new(),
|
pulls_closed: Vec::new(),
|
||||||
pulls_open: Vec::new(),
|
pulls_open: Vec::new(),
|
||||||
});
|
});
|
||||||
map.insert(project_name, vec![project]);
|
map.insert(project_name, vec![project]);
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
|
|
||||||
// Distributes each PR in the right vector
|
// Distributes each PR in the right vector
|
||||||
data.iter_mut().for_each(|d| {
|
for d in &mut data {
|
||||||
map.get(d.name.as_str()).unwrap().iter().for_each(|p| {
|
map.get(d.name.as_str()).unwrap().iter().for_each(|p| {
|
||||||
let state = p.state.into();
|
let state = p.state.into();
|
||||||
match state {
|
match state {
|
||||||
|
@ -94,14 +94,14 @@ async fn build_page(config: Config) -> String {
|
||||||
let mut name: Vec<char> = d.name.replace('-', " ").chars().collect();
|
let mut name: Vec<char> = d.name.replace('-', " ").chars().collect();
|
||||||
name[0] = name[0].to_uppercase().next().unwrap();
|
name[0] = name[0].to_uppercase().next().unwrap();
|
||||||
d.name = name.into_iter().collect();
|
d.name = name.into_iter().collect();
|
||||||
});
|
}
|
||||||
|
|
||||||
// Ascending order by pulls IDs
|
// Ascending order by pulls IDs
|
||||||
data.iter_mut().for_each(|d| {
|
for d in &mut data {
|
||||||
d.pulls_closed.reverse();
|
d.pulls_closed.reverse();
|
||||||
d.pulls_merged.reverse();
|
d.pulls_merged.reverse();
|
||||||
d.pulls_open.reverse();
|
d.pulls_open.reverse();
|
||||||
});
|
}
|
||||||
|
|
||||||
// Ascending order by number of pulls
|
// Ascending order by number of pulls
|
||||||
data.sort_by(|a, b| {
|
data.sort_by(|a, b| {
|
||||||
|
@ -135,7 +135,7 @@ async fn build_page(config: Config) -> String {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
eprintln!("{}", e);
|
eprintln!("{e}");
|
||||||
|
|
||||||
PortfolioTemplate {
|
PortfolioTemplate {
|
||||||
navbar,
|
navbar,
|
||||||
|
@ -150,13 +150,13 @@ async fn build_page(config: Config) -> String {
|
||||||
config.tmpl.render(
|
config.tmpl.render(
|
||||||
"contrib.html",
|
"contrib.html",
|
||||||
data,
|
data,
|
||||||
Infos {
|
InfosPage {
|
||||||
page_title: Some("Mes contributions".into()),
|
title: Some("Mes contributions".into()),
|
||||||
page_desc: Some(format!(
|
desc: Some(format!(
|
||||||
"Contributions d'{} à GitHub",
|
"Contributions d'{} à GitHub",
|
||||||
config.fc.name.unwrap_or_default()
|
config.fc.name.unwrap_or_default()
|
||||||
)),
|
)),
|
||||||
page_kw: make_kw(&[
|
kw: Some(make_kw(&[
|
||||||
"github",
|
"github",
|
||||||
"contributions",
|
"contributions",
|
||||||
"open source",
|
"open source",
|
||||||
|
@ -164,7 +164,7 @@ async fn build_page(config: Config) -> String {
|
||||||
"portfolio",
|
"portfolio",
|
||||||
"projets",
|
"projets",
|
||||||
"code",
|
"code",
|
||||||
]),
|
])),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,7 @@ use crate::{
|
||||||
markdown::{File, TypeFileMetadata},
|
markdown::{File, TypeFileMetadata},
|
||||||
utils::{make_kw, read_file, Html},
|
utils::{make_kw, read_file, Html},
|
||||||
},
|
},
|
||||||
template::{Infos, NavBar},
|
template::{InfosPage, NavBar},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
|
@ -22,7 +22,7 @@ pub struct PathRequest {
|
||||||
|
|
||||||
#[get("/cours")]
|
#[get("/cours")]
|
||||||
async fn page(info: web::Query<PathRequest>, config: web::Data<Config>) -> impl Responder {
|
async fn page(info: web::Query<PathRequest>, config: web::Data<Config>) -> impl Responder {
|
||||||
Html(build_page(info, config.get_ref().to_owned()))
|
Html(build_page(&info, config.get_ref().to_owned()))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Content, Debug)]
|
#[derive(Content, Debug)]
|
||||||
|
@ -94,7 +94,7 @@ fn get_filetree(dir_path: &str, exclusion_patterns: &Vec<Regex>) -> FileNode {
|
||||||
fn get_content(
|
fn get_content(
|
||||||
cours_dir: &str,
|
cours_dir: &str,
|
||||||
path: &web::Query<PathRequest>,
|
path: &web::Query<PathRequest>,
|
||||||
exclusion_list: Vec<String>,
|
exclusion_list: &[String],
|
||||||
) -> Option<File> {
|
) -> Option<File> {
|
||||||
let filename = match &path.q {
|
let filename = match &path.q {
|
||||||
Some(q) => q,
|
Some(q) => q,
|
||||||
|
@ -111,14 +111,14 @@ fn get_content(
|
||||||
|
|
||||||
read_file(
|
read_file(
|
||||||
&format!("{cours_dir}/{filename}"),
|
&format!("{cours_dir}/{filename}"),
|
||||||
TypeFileMetadata::Generic,
|
&TypeFileMetadata::Generic,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_page(info: web::Query<PathRequest>, config: Config) -> String {
|
fn build_page(info: &web::Query<PathRequest>, config: Config) -> String {
|
||||||
let cours_dir = "data/cours";
|
let cours_dir = "data/cours";
|
||||||
let exclusion_list = config.fc.exclude_courses.unwrap();
|
let exclusion_list = config.fc.exclude_courses.unwrap();
|
||||||
let exclusion_patterns = compile_patterns(exclusion_list.to_owned());
|
let exclusion_patterns = compile_patterns(exclusion_list.clone());
|
||||||
let filetree = get_filetree(cours_dir, &exclusion_patterns);
|
let filetree = get_filetree(cours_dir, &exclusion_patterns);
|
||||||
|
|
||||||
config.tmpl.render(
|
config.tmpl.render(
|
||||||
|
@ -129,12 +129,12 @@ fn build_page(info: web::Query<PathRequest>, config: Config) -> String {
|
||||||
..NavBar::default()
|
..NavBar::default()
|
||||||
},
|
},
|
||||||
filetree: serde_json::to_string(&filetree).unwrap(),
|
filetree: serde_json::to_string(&filetree).unwrap(),
|
||||||
content: get_content(cours_dir, &info, exclusion_list),
|
content: get_content(cours_dir, info, &exclusion_list),
|
||||||
},
|
},
|
||||||
Infos {
|
InfosPage {
|
||||||
page_title: Some("Cours".into()),
|
title: Some("Cours".into()),
|
||||||
page_desc: Some("Cours à l'univ".into()),
|
desc: Some("Cours à l'univ".into()),
|
||||||
page_kw: make_kw(&[
|
kw: Some(make_kw(&[
|
||||||
"cours",
|
"cours",
|
||||||
"études",
|
"études",
|
||||||
"université",
|
"université",
|
||||||
|
@ -142,7 +142,7 @@ fn build_page(info: web::Query<PathRequest>, config: Config) -> String {
|
||||||
"master",
|
"master",
|
||||||
"notes",
|
"notes",
|
||||||
"digital garden",
|
"digital garden",
|
||||||
]),
|
])),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@ use crate::{
|
||||||
markdown::{File, TypeFileMetadata},
|
markdown::{File, TypeFileMetadata},
|
||||||
utils::{make_kw, read_file, Html},
|
utils::{make_kw, read_file, Html},
|
||||||
},
|
},
|
||||||
template::{Infos, NavBar},
|
template::{InfosPage, NavBar},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[get("/")]
|
#[get("/")]
|
||||||
|
@ -37,11 +37,11 @@ struct StyleAvatar {
|
||||||
fn build_page(config: Config) -> String {
|
fn build_page(config: Config) -> String {
|
||||||
let mut file = read_file(
|
let mut file = read_file(
|
||||||
&format!("{}/index.md", config.locations.data_dir),
|
&format!("{}/index.md", config.locations.data_dir),
|
||||||
TypeFileMetadata::Index,
|
&TypeFileMetadata::Index,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Default values
|
// Default values
|
||||||
let mut name = config.fc.fullname.to_owned().unwrap_or_default();
|
let mut name = config.fc.fullname.clone().unwrap_or_default();
|
||||||
let mut pronouns = None;
|
let mut pronouns = None;
|
||||||
let mut avatar = "/icons/apple-touch-icon.png".to_owned();
|
let mut avatar = "/icons/apple-touch-icon.png".to_owned();
|
||||||
let mut avatar_caption = "EWP avatar".to_owned();
|
let mut avatar_caption = "EWP avatar".to_owned();
|
||||||
|
@ -52,12 +52,12 @@ fn build_page(config: Config) -> String {
|
||||||
|
|
||||||
if let Some(f) = &file {
|
if let Some(f) = &file {
|
||||||
if let Some(m) = &f.metadata.info.index {
|
if let Some(m) = &f.metadata.info.index {
|
||||||
name = m.name.to_owned().unwrap_or(name);
|
name = m.name.clone().unwrap_or(name);
|
||||||
avatar = m.avatar.to_owned().unwrap_or(avatar);
|
avatar = m.avatar.clone().unwrap_or(avatar);
|
||||||
m.pronouns.clone_into(&mut pronouns);
|
m.pronouns.clone_into(&mut pronouns);
|
||||||
avatar_caption = m.avatar_caption.to_owned().unwrap_or(avatar_caption);
|
avatar_caption = m.avatar_caption.clone().unwrap_or(avatar_caption);
|
||||||
|
|
||||||
if let Some(style) = m.avatar_style.to_owned() {
|
if let Some(style) = m.avatar_style.clone() {
|
||||||
if style.trim() == "square" {
|
if style.trim() == "square" {
|
||||||
avatar_style = StyleAvatar {
|
avatar_style = StyleAvatar {
|
||||||
square: true,
|
square: true,
|
||||||
|
@ -67,7 +67,7 @@ fn build_page(config: Config) -> String {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
file = read_file("README.md", TypeFileMetadata::Generic);
|
file = read_file("README.md", &TypeFileMetadata::Generic);
|
||||||
}
|
}
|
||||||
|
|
||||||
config.tmpl.render(
|
config.tmpl.render(
|
||||||
|
@ -84,10 +84,10 @@ fn build_page(config: Config) -> String {
|
||||||
avatar_caption,
|
avatar_caption,
|
||||||
avatar_style,
|
avatar_style,
|
||||||
},
|
},
|
||||||
Infos {
|
InfosPage {
|
||||||
page_title: config.fc.fullname,
|
title: config.fc.fullname,
|
||||||
page_desc: Some("Page principale".into()),
|
desc: Some("Page principale".into()),
|
||||||
page_kw: make_kw(&["index", "étudiant", "accueil"]),
|
kw: Some(make_kw(&["index", "étudiant", "accueil"])),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ use ramhorns::Content;
|
||||||
use crate::{
|
use crate::{
|
||||||
config::Config,
|
config::Config,
|
||||||
misc::utils::{get_url, Html},
|
misc::utils::{get_url, Html},
|
||||||
template::{Infos, NavBar},
|
template::{InfosPage, NavBar},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub async fn page(config: web::Data<Config>) -> impl Responder {
|
pub async fn page(config: web::Data<Config>) -> impl Responder {
|
||||||
|
@ -28,9 +28,9 @@ fn build_page(config: Config) -> String {
|
||||||
www: get_url(config.fc.clone()),
|
www: get_url(config.fc.clone()),
|
||||||
onion: config.fc.onion,
|
onion: config.fc.onion,
|
||||||
},
|
},
|
||||||
Infos {
|
InfosPage {
|
||||||
page_desc: Some("Une page perdu du web".into()),
|
desc: Some("Une page perdu du web".into()),
|
||||||
..Infos::default()
|
..InfosPage::default()
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,7 @@ use crate::{
|
||||||
markdown::{File, TypeFileMetadata},
|
markdown::{File, TypeFileMetadata},
|
||||||
utils::{make_kw, read_file, Html},
|
utils::{make_kw, read_file, Html},
|
||||||
},
|
},
|
||||||
template::{Infos, NavBar},
|
template::{InfosPage, NavBar},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[get("/portfolio")]
|
#[get("/portfolio")]
|
||||||
|
@ -31,19 +31,19 @@ struct PortfolioTemplate<'a> {
|
||||||
#[once(time = 60)]
|
#[once(time = 60)]
|
||||||
fn build_page(config: Config) -> String {
|
fn build_page(config: Config) -> String {
|
||||||
let projects_dir = format!("{}/projects", config.locations.data_dir);
|
let projects_dir = format!("{}/projects", config.locations.data_dir);
|
||||||
let apps_dir = format!("{}/apps", projects_dir);
|
let apps_dir = format!("{projects_dir}/apps");
|
||||||
let ext = ".md";
|
let ext = ".md";
|
||||||
|
|
||||||
// Get about
|
// Get about
|
||||||
let about = read_file(
|
let about = read_file(
|
||||||
&format!("{}/about.md", projects_dir),
|
&format!("{projects_dir}/about.md"),
|
||||||
TypeFileMetadata::Generic,
|
&TypeFileMetadata::Generic,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Get apps
|
// Get apps
|
||||||
let apps = glob(&format!("{apps_dir}/*{ext}"))
|
let apps = glob(&format!("{apps_dir}/*{ext}"))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), TypeFileMetadata::Portfolio).unwrap())
|
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Portfolio).unwrap())
|
||||||
.collect::<Vec<File>>();
|
.collect::<Vec<File>>();
|
||||||
|
|
||||||
let appdata = if apps.is_empty() {
|
let appdata = if apps.is_empty() {
|
||||||
|
@ -55,7 +55,7 @@ fn build_page(config: Config) -> String {
|
||||||
// Get archived apps
|
// Get archived apps
|
||||||
let archived_apps = glob(&format!("{apps_dir}/archive/*{ext}"))
|
let archived_apps = glob(&format!("{apps_dir}/archive/*{ext}"))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|e| read_file(&e.unwrap().to_string_lossy(), TypeFileMetadata::Portfolio).unwrap())
|
.map(|e| read_file(&e.unwrap().to_string_lossy(), &TypeFileMetadata::Portfolio).unwrap())
|
||||||
.collect::<Vec<File>>();
|
.collect::<Vec<File>>();
|
||||||
|
|
||||||
let archived_appdata = if archived_apps.is_empty() {
|
let archived_appdata = if archived_apps.is_empty() {
|
||||||
|
@ -78,20 +78,20 @@ fn build_page(config: Config) -> String {
|
||||||
archived_apps_exists: archived_appdata.1,
|
archived_apps_exists: archived_appdata.1,
|
||||||
err_msg: "is empty",
|
err_msg: "is empty",
|
||||||
},
|
},
|
||||||
Infos {
|
InfosPage {
|
||||||
page_title: Some("Portfolio".into()),
|
title: Some("Portfolio".into()),
|
||||||
page_desc: Some(format!(
|
desc: Some(format!(
|
||||||
"Portfolio d'{}",
|
"Portfolio d'{}",
|
||||||
config.fc.name.unwrap_or_default()
|
config.fc.name.unwrap_or_default()
|
||||||
)),
|
)),
|
||||||
page_kw: make_kw(&[
|
kw: Some(make_kw(&[
|
||||||
"développeur",
|
"développeur",
|
||||||
"portfolio",
|
"portfolio",
|
||||||
"projets",
|
"projets",
|
||||||
"programmation",
|
"programmation",
|
||||||
"applications",
|
"applications",
|
||||||
"code",
|
"code",
|
||||||
]),
|
])),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ use cached::proc_macro::once;
|
||||||
use crate::{
|
use crate::{
|
||||||
config::Config,
|
config::Config,
|
||||||
misc::utils::{make_kw, Html},
|
misc::utils::{make_kw, Html},
|
||||||
template::Infos,
|
template::InfosPage,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[get("/web3")]
|
#[get("/web3")]
|
||||||
|
@ -17,10 +17,10 @@ fn build_page(config: Config) -> String {
|
||||||
config.tmpl.render(
|
config.tmpl.render(
|
||||||
"web3.html",
|
"web3.html",
|
||||||
(),
|
(),
|
||||||
Infos {
|
InfosPage {
|
||||||
page_title: Some("Mylloon".into()),
|
title: Some("Mylloon".into()),
|
||||||
page_desc: Some("Coin reculé de l'internet".into()),
|
desc: Some("Coin reculé de l'internet".into()),
|
||||||
page_kw: make_kw(&["web3", "blockchain", "nft", "ai"]),
|
kw: Some(make_kw(&["web3", "blockchain", "nft", "ai"])),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,15 +15,16 @@ pub struct Template {
|
||||||
|
|
||||||
/// Structure used by /routes/*.rs
|
/// Structure used by /routes/*.rs
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct Infos {
|
pub struct InfosPage {
|
||||||
/// Title
|
/// Title
|
||||||
pub page_title: Option<String>,
|
pub title: Option<String>,
|
||||||
/// Description
|
/// Description
|
||||||
pub page_desc: Option<String>,
|
pub desc: Option<String>,
|
||||||
/// Keywords
|
/// Keywords
|
||||||
pub page_kw: Option<String>,
|
pub kw: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::struct_excessive_bools)]
|
||||||
/// Information on what page the user is currently
|
/// Information on what page the user is currently
|
||||||
#[derive(Content, Debug, Default)]
|
#[derive(Content, Debug, Default)]
|
||||||
pub struct NavBar {
|
pub struct NavBar {
|
||||||
|
@ -37,7 +38,7 @@ pub struct NavBar {
|
||||||
|
|
||||||
/// Final structure given to template
|
/// Final structure given to template
|
||||||
#[derive(Content, Debug)]
|
#[derive(Content, Debug)]
|
||||||
struct Data<T> {
|
struct DataPage<T> {
|
||||||
/// App name
|
/// App name
|
||||||
app_name: String,
|
app_name: String,
|
||||||
/// App URL
|
/// App URL
|
||||||
|
@ -55,16 +56,16 @@ struct Data<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Template {
|
impl Template {
|
||||||
pub fn render<C: Content>(&self, template: &str, data: C, info: Infos) -> String {
|
pub fn render<C: Content>(&self, template: &str, data: C, info: InfosPage) -> String {
|
||||||
let mut templates: Ramhorns = Ramhorns::lazy(&self.directory).unwrap();
|
let mut templates: Ramhorns = Ramhorns::lazy(&self.directory).unwrap();
|
||||||
let tplt = templates.from_file(template).unwrap();
|
let tplt = templates.from_file(template).unwrap();
|
||||||
|
|
||||||
tplt.render(&Data {
|
tplt.render(&DataPage {
|
||||||
app_name: self.app_name.to_owned(),
|
app_name: self.app_name.clone(),
|
||||||
url: self.url.to_owned(),
|
url: self.url.clone(),
|
||||||
page_title: info.page_title,
|
page_title: info.title,
|
||||||
page_desc: info.page_desc,
|
page_desc: info.desc,
|
||||||
page_kw: info.page_kw,
|
page_kw: info.kw,
|
||||||
page_author: self.name.clone(),
|
page_author: self.name.clone(),
|
||||||
data,
|
data,
|
||||||
})
|
})
|
||||||
|
|
Loading…
Reference in a new issue