2022-08-15 14:53:09 +02:00
|
|
|
use scraper::{Html, Selector};
|
2022-08-15 12:20:16 +02:00
|
|
|
|
|
|
|
pub async fn info() {
|
2022-08-15 14:53:09 +02:00
|
|
|
let document = get_webpage().await.expect("Can't reach info website.");
|
|
|
|
|
|
|
|
// Selectors
|
|
|
|
let sel_ul = Selector::parse("ul").unwrap();
|
|
|
|
|
|
|
|
// Find the raw infos in html page
|
|
|
|
for (i, data) in document.select(&sel_ul).enumerate() {
|
|
|
|
if [1, 2].contains(&i) {
|
|
|
|
println!("\n{} - {:#?}", data.value().name(), data.inner_html());
|
|
|
|
}
|
|
|
|
}
|
2022-08-15 12:20:16 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
async fn get_webpage() -> Result<Html, Box<dyn std::error::Error>> {
|
|
|
|
/* let html = reqwest::get("https://informatique.up8.edu/licence-iv/edt").await?.text().await?;
|
|
|
|
|
|
|
|
Ok(Html::parse_document(&html)) */
|
|
|
|
|
|
|
|
let html = include_str!("../target/debug2.html");
|
|
|
|
Ok(Html::parse_document(html))
|
|
|
|
}
|