split logic of contrib

This commit is contained in:
Mylloon 2024-11-09 17:13:30 +01:00
parent b9bc57c1e0
commit 77970da8b3
Signed by: Anri
GPG key ID: A82D63DFF8D1317F
3 changed files with 118 additions and 106 deletions

View file

@ -1,12 +1,10 @@
use std::collections::HashMap;
use crate::{
config::Config,
utils::{
github::{fetch_pr, ProjectState},
misc::{make_kw, Html},
},
template::{InfosPage, NavBar},
utils::{
misc::{make_kw, Html},
routes::contrib::{fetch, Project},
},
};
use actix_web::{get, web, Responder};
use cached::proc_macro::once;
@ -26,24 +24,6 @@ struct PortfolioTemplate {
closed: Option<Vec<Project>>,
}
#[derive(Clone, Content, Debug)]
struct Project {
name: String,
url: String,
pulls_merged: Vec<Pull>,
pulls_open: Vec<Pull>,
pulls_closed: Vec<Pull>,
}
#[derive(Clone, Content, Debug)]
struct Pull {
url: String,
id: u32,
name_repo: String,
title: String,
state: u8,
}
#[once(time = 600)] // 10min
async fn build_page(config: Config) -> String {
let navbar = NavBar {
@ -52,88 +32,29 @@ async fn build_page(config: Config) -> String {
};
// Fetch latest data from github
let data = match fetch_pr().await {
Ok(projects) => {
let mut data: Vec<Project> = Vec::new();
// Grouping PRs by projects
let mut map: HashMap<&str, Vec<Pull>> = HashMap::new();
for p in &projects {
let project = Pull {
url: p.contrib_url.clone(),
id: p.id,
name_repo: p.name.clone(),
title: p.title.clone(),
state: p.status as u8,
};
let project_name = p.name.as_str();
if map.contains_key(project_name) {
map.entry(project_name).and_modify(|v| v.push(project));
} else {
data.push(Project {
name: project_name.into(),
url: p.url.clone(),
pulls_merged: Vec::new(),
pulls_closed: Vec::new(),
pulls_open: Vec::new(),
});
map.insert(project_name, vec![project]);
}
}
// Distributes each PR in the right vector
for d in &mut data {
map.get(d.name.as_str()).unwrap().iter().for_each(|p| {
let state = p.state.try_into().unwrap();
match state {
ProjectState::Closed => d.pulls_closed.push(p.to_owned()),
ProjectState::Merged => d.pulls_merged.push(p.to_owned()),
ProjectState::Open => d.pulls_open.push(p.to_owned()),
}
});
let mut name: Vec<char> = d.name.replace('-', " ").chars().collect();
name[0] = name[0].to_uppercase().next().unwrap();
d.name = name.into_iter().collect();
}
// Ascending order by pulls IDs
for d in &mut data {
d.pulls_closed.reverse();
d.pulls_merged.reverse();
d.pulls_open.reverse();
}
// Ascending order by number of pulls
data.sort_by(|a, b| {
b.pulls_merged
.len()
.partial_cmp(&a.pulls_merged.len())
.unwrap()
});
PortfolioTemplate {
navbar,
error: false,
projects: Some(
data.iter()
.filter(|&p| !p.pulls_merged.is_empty())
.cloned()
.collect(),
),
waiting: Some(
data.iter()
.filter(|&p| !p.pulls_open.is_empty())
.cloned()
.collect(),
),
closed: Some(
data.iter()
.filter(|&p| !p.pulls_closed.is_empty())
.cloned()
.collect(),
),
}
}
let data = match fetch().await {
Ok(data) => PortfolioTemplate {
navbar,
error: false,
projects: Some(
data.iter()
.filter(|&p| !p.pulls_merged.is_empty())
.cloned()
.collect(),
),
waiting: Some(
data.iter()
.filter(|&p| !p.pulls_open.is_empty())
.cloned()
.collect(),
),
closed: Some(
data.iter()
.filter(|&p| !p.pulls_closed.is_empty())
.cloned()
.collect(),
),
},
Err(e) => {
eprintln!("{e}");

View file

@ -0,0 +1,90 @@
use std::collections::HashMap;
use ramhorns::Content;
use reqwest::Error;
use crate::utils::github::{fetch_pr, ProjectState};
#[derive(Clone, Content, Debug)]
pub struct Project {
name: String,
url: String,
pub pulls_merged: Vec<Pull>,
pub pulls_open: Vec<Pull>,
pub pulls_closed: Vec<Pull>,
}
#[derive(Clone, Content, Debug)]
pub struct Pull {
url: String,
id: u32,
name_repo: String,
title: String,
state: u8,
}
pub async fn fetch() -> Result<Vec<Project>, Error> {
match fetch_pr().await {
Ok(projects) => {
let mut data: Vec<Project> = Vec::new();
// Grouping PRs by projects
let mut map: HashMap<&str, Vec<Pull>> = HashMap::new();
for p in &projects {
let project = Pull {
url: p.contrib_url.clone(),
id: p.id,
name_repo: p.name.clone(),
title: p.title.clone(),
state: p.status as u8,
};
let project_name = p.name.as_str();
if map.contains_key(project_name) {
map.entry(project_name).and_modify(|v| v.push(project));
} else {
data.push(Project {
name: project_name.into(),
url: p.url.clone(),
pulls_merged: Vec::new(),
pulls_closed: Vec::new(),
pulls_open: Vec::new(),
});
map.insert(project_name, vec![project]);
}
}
// Distributes each PR in the right vector
for d in &mut data {
map.get(d.name.as_str()).unwrap().iter().for_each(|p| {
let state = p.state.try_into().unwrap();
match state {
ProjectState::Closed => d.pulls_closed.push(p.to_owned()),
ProjectState::Merged => d.pulls_merged.push(p.to_owned()),
ProjectState::Open => d.pulls_open.push(p.to_owned()),
}
});
let mut name: Vec<char> = d.name.replace('-', " ").chars().collect();
name[0] = name[0].to_uppercase().next().unwrap();
d.name = name.into_iter().collect();
}
// Ascending order by pulls IDs
for d in &mut data {
d.pulls_closed.reverse();
d.pulls_merged.reverse();
d.pulls_open.reverse();
}
// Ascending order by number of pulls
data.sort_by(|a, b| {
b.pulls_merged
.len()
.partial_cmp(&a.pulls_merged.len())
.unwrap()
});
Ok(data)
}
Err(e) => Err(e),
}
}

View file

@ -1,3 +1,4 @@
pub mod blog;
pub mod contact;
pub mod contrib;
pub mod cours;