From 81a50a237d5f3baa989d2a9f4eedce6323914e13 Mon Sep 17 00:00:00 2001 From: Mylloon Date: Thu, 9 Feb 2023 12:01:35 +0100 Subject: [PATCH] Add robots.txt #9 --- src/routes/agreements.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/src/routes/agreements.rs b/src/routes/agreements.rs index cca92df..30452b7 100644 --- a/src/routes/agreements.rs +++ b/src/routes/agreements.rs @@ -19,7 +19,7 @@ struct SecurityTemplate { url: String, } -fn get_security(config: Config, addr: Option) -> std::string::String { +fn get_security(config: Config, addr: Option) -> String { let data = SecurityTemplate { contact: config.mail.unwrap_or_default(), pref_lang: config.lang.unwrap_or_default(), @@ -31,14 +31,18 @@ fn get_security(config: Config, addr: Option) -> std::string::String #[get("/humans.txt")] pub async fn humans() -> impl Responder { - // TODO + // TODO, see https://humanstxt.org/humans.txt actix_web::web::Redirect::to("/") } #[get("/robots.txt")] pub async fn robots() -> impl Responder { - // TODO - actix_web::web::Redirect::to("/") + HttpResponse::Ok().body(get_robots()) +} + +fn get_robots() -> String { + // TODO, see https://www.robotstxt.org/orig.html + "User-agent: * Allow: /".to_string() } #[get("/sitemap.xml")]