added robots.txt to web server

pull/19/head
Niko PLP 2 years ago
parent 04df0b5488
commit d3a5963eca
  1. 10
      p2p-broker/src/server_ws.rs

@ -197,6 +197,8 @@ fn prepare_urls_from_private_addrs(addrs: &Vec<BindAddress>, port: u16) -> Vec<S
#[include = "*.gzip"]
struct App;
static ROBOTS: &str = "User-agent: *\r\nDisallow: /";
fn upgrade_ws_or_serve_app(
connection: Option<&HeaderValue>,
remote: IP,
@ -253,6 +255,14 @@ fn upgrade_ws_or_serve_app(
.body(Some(BOOTSTRAP_STRING.get().unwrap().as_bytes().to_vec()))
.unwrap();
return Err(res);
} else if uri == "/robots.txt" {
let res = Response::builder()
.status(StatusCode::OK)
.header("Content-Type", "text/plain")
.header("Cache-Control", "max-age=3600, must-revalidate")
.body(Some(ROBOTS.as_bytes().to_vec()))
.unwrap();
return Err(res);
}
}

Loading…
Cancel
Save