fetch content for item description
All checks were successful
ci/woodpecker/push/publish Pipeline was successful
All checks were successful
ci/woodpecker/push/publish Pipeline was successful
This commit is contained in:
parent
56c87aa3c0
commit
61b94eb43e
1 changed files with 43 additions and 26 deletions
|
@ -64,15 +64,25 @@ struct Post {
|
||||||
date: Date,
|
date: Date,
|
||||||
url: String,
|
url: String,
|
||||||
desc: Option<String>,
|
desc: Option<String>,
|
||||||
|
content: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Hash for Post {
|
impl Post {
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
// Fetch the file content
|
||||||
|
fn fetch_content(&mut self) {
|
||||||
let blog_dir = "data/blog";
|
let blog_dir = "data/blog";
|
||||||
let ext = ".md";
|
let ext = ".md";
|
||||||
|
|
||||||
if let Some(file) = read_file(&format!("{blog_dir}/{}{ext}", self.url)) {
|
if let Some(file) = read_file(&format!("{blog_dir}/{}{ext}", self.url)) {
|
||||||
file.content.hash(state)
|
self.content = Some(file.content);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Hash for Post {
|
||||||
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
|
if let Some(content) = &self.content {
|
||||||
|
content.hash(state)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -131,6 +141,7 @@ fn get_posts(location: &str) -> Vec<Post> {
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
desc: file_metadata.description,
|
desc: file_metadata.description,
|
||||||
|
content: None,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect::<Vec<Post>>()
|
.collect::<Vec<Post>>()
|
||||||
|
@ -232,30 +243,36 @@ fn build_rss(config: Config, info: ConnectionInfo) -> String {
|
||||||
..Image::default()
|
..Image::default()
|
||||||
}),
|
}),
|
||||||
items: posts
|
items: posts
|
||||||
.iter()
|
.iter_mut()
|
||||||
.map(|p| Item {
|
.map(|p| {
|
||||||
title: Some(p.title.to_owned()),
|
// Get post data
|
||||||
link: Some(format!("{}/blog/p/{}", link_to_site, p.url)),
|
p.fetch_content();
|
||||||
description: p.desc.to_owned(),
|
|
||||||
guid: Some(Guid {
|
// Build item
|
||||||
value: format!("urn:hash:{}", {
|
Item {
|
||||||
let mut hasher = DefaultHasher::new();
|
title: Some(p.title.to_owned()),
|
||||||
p.hash(&mut hasher);
|
link: Some(format!("{}/blog/p/{}", link_to_site, p.url)),
|
||||||
hasher.finish()
|
description: p.content.to_owned(),
|
||||||
|
guid: Some(Guid {
|
||||||
|
value: format!("urn:hash:{}", {
|
||||||
|
let mut hasher = DefaultHasher::new();
|
||||||
|
p.hash(&mut hasher);
|
||||||
|
hasher.finish()
|
||||||
|
}),
|
||||||
|
permalink: false,
|
||||||
}),
|
}),
|
||||||
permalink: false,
|
pub_date: Some(
|
||||||
}),
|
NaiveDateTime::parse_from_str(
|
||||||
pub_date: Some(
|
&format!("{}-{}-{} 13:12:00", p.date.day, p.date.month, p.date.year),
|
||||||
NaiveDateTime::parse_from_str(
|
"%d-%m-%Y %H:%M:%S",
|
||||||
&format!("{}-{}-{} 13:12:00", p.date.day, p.date.month, p.date.year),
|
)
|
||||||
"%d-%m-%Y %H:%M:%S",
|
.unwrap()
|
||||||
)
|
.and_local_timezone(Europe::Paris)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.and_local_timezone(Europe::Paris)
|
.to_rfc2822(),
|
||||||
.unwrap()
|
),
|
||||||
.to_rfc2822(),
|
..Item::default()
|
||||||
),
|
}
|
||||||
..Item::default()
|
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
atom_ext: Some(AtomExtension {
|
atom_ext: Some(AtomExtension {
|
||||||
|
|
Loading…
Reference in a new issue