write website

This commit is contained in:
nora 2023-08-31 20:46:44 +02:00
parent 24abf6d98c
commit 7ff9a67148
15 changed files with 1580 additions and 38 deletions

View file

@ -1,16 +1,16 @@
use chrono::Utc;
use eyre::{bail, Context, Result};
use std::collections::HashMap;
use std::collections::BTreeMap;
use tracing::info;
use crate::config::WebsiteConfig;
pub struct Client {
websites: Vec<WebsiteConfig>,
req: reqwest::Client,
pub websites: Vec<WebsiteConfig>,
pub req: reqwest::Client,
}
pub struct Results {
pub states: HashMap<String, CheckResult>,
pub states: BTreeMap<String, CheckResult>,
}
pub struct CheckResult {
@ -18,13 +18,15 @@ pub struct CheckResult {
pub state: CheckState,
}
#[derive(sqlx::Type)]
#[sqlx(rename_all = "snake_case")]
pub enum CheckState {
Ok,
NotOk,
}
pub async fn do_checks(client: &Client) -> Results {
let mut states = HashMap::new();
let mut states = BTreeMap::new();
for website in &client.websites {
let check_result = make_request(&client.req, website).await;
states.insert(website.name.clone(), check_result);
@ -33,10 +35,13 @@ pub async fn do_checks(client: &Client) -> Results {
Results { states }
}
#[tracing::instrument(skip(client))]
async fn make_request(client: &reqwest::Client, website: &WebsiteConfig) -> CheckResult {
let time = Utc::now();
let result = client.get(website.url.clone()).send().await;
info!(?result, ?website.url, "Made health request");
match result {
Ok(res) => CheckResult {
time,
@ -46,7 +51,7 @@ async fn make_request(client: &reqwest::Client, website: &WebsiteConfig) -> Chec
CheckState::NotOk
},
},
Err(err) => CheckResult {
Err(_) => CheckResult {
time,
state: CheckState::NotOk,
},

View file

@ -1,6 +1,27 @@
use eyre::{Context, Result};
use url::Url;
#[derive(Debug, serde::Deserialize)]
pub struct Config {
pub interval_seconds: u64,
pub websites: Vec<WebsiteConfig>,
#[serde(default = "default_db_url")]
pub db_url: String,
}
fn default_db_url() -> String {
"uptime.db".into()
}
#[derive(Debug, serde::Deserialize)]
pub struct WebsiteConfig {
pub name: String,
pub url: Url,
}
pub fn read_config(config_path: &str) -> Result<Config> {
let config_str = std::fs::read_to_string(config_path)
.wrap_err_with(|| format!("opening config at '{config_path}'"))?;
serde_json::from_str(&config_str).wrap_err("reading config file")
}

60
src/db.rs Normal file
View file

@ -0,0 +1,60 @@
use std::str::FromStr;
use chrono::Utc;
use eyre::{Context, Result};
use sqlx::{migrate::Migrator, sqlite::SqliteConnectOptions, Pool, Sqlite};
pub static MIGRATOR: Migrator = sqlx::migrate!();
use crate::client::{CheckState, Results};
#[derive(sqlx::FromRow)]
pub struct Check {
pub id: i32,
pub request_time: chrono::DateTime<Utc>,
pub website: String,
pub result: CheckState,
}
pub async fn open_db(db_url: &str) -> Result<Pool<Sqlite>> {
let db_opts = SqliteConnectOptions::from_str(db_url)
.wrap_err("parsing database URL")?
.create_if_missing(true);
Pool::connect_with(db_opts)
.await
.wrap_err_with(|| format!("opening db from `{}`", db_url))
}
pub async fn insert_results(db: &Pool<Sqlite>, results: Results) -> Result<()> {
let mut errors = Vec::new();
for (website, check) in results.states.iter() {
let result =
sqlx::query("INSERT INTO checks (request_time, website, result) VALUES (?, ?, ?);")
.bind(check.time)
.bind(website)
.bind(&check.state)
.execute(db)
.await
.wrap_err(format!("inserting result for {website}"));
if let Err(err) = result {
errors.push(err);
}
}
if errors.len() > 0 {
for err in errors {
error!(?err);
}
Err(eyre::eyre!("error inserting results"))
} else {
Ok(())
}
}
pub async fn get_checks(db: &Pool<Sqlite>) -> Result<Vec<Check>> {
sqlx::query_as::<_, Check>("SELECT id, request_time, website, result FROM checks")
.fetch_all(db)
.await
.wrap_err("getting all checks")
}

View file

@ -1,2 +1,52 @@
#![allow(uncommon_codepoints)] // lmao
#[macro_use]
extern crate tracing;
mod client;
mod config;
pub mod db;
mod web;
use eyre::Context;
use eyre::Result;
use sqlx::{Pool, Sqlite};
use std::{sync::Arc, time::Duration};
use client::Client;
pub use config::{read_config, Config, WebsiteConfig};
pub use web::axum_server;
const USER_AGENT: &str = concat!("uptime/", env!("GIT_COMMIT"));
pub async fn check_timer(config: Config, db: Arc<Pool<Sqlite>>) -> Result<> {
let req_client = reqwest::Client::builder()
.use_rustls_tls()
.user_agent(USER_AGENT)
.build()
.wrap_err("building client")?;
let mut interval = tokio::time::interval(Duration::from_secs(config.interval_seconds));
let client = Client {
websites: config.websites,
req: req_client,
};
loop {
interval.tick().await;
info!("Running tick.");
let results = client::do_checks(&client).await;
if let Err(err) = db::insert_results(&db, results).await {
error!(?err);
}
info!("Finished tick.");
}
}
// look away
pub enum {}

View file

@ -1,4 +1,48 @@
fn main() {
use eyre::WrapErr;
use std::sync::Arc;
#[macro_use]
extern crate tracing;
#[tokio::main]
async fn main() -> eyre::Result<()> {
tracing_subscriber::fmt().init();
println!("Hello, world!");
let version = env!("GIT_COMMIT");
info!("Starting up uptime {version}");
let config_path = std::env::var("UPTIME_CONFIG_PATH").unwrap_or_else(|_| "uptime.json".into());
info!("Loading reading config");
let mut config = uptime::read_config(&config_path)?;
let db_url = std::env::var("UPTIME_DB_URL");
if let Ok(db_url) = db_url {
config.db_url = db_url;
}
info!("Opening db");
let db = uptime::db::open_db(&config.db_url).await?;
let db = Arc::new(db);
info!("Running migrations");
uptime::db::MIGRATOR
.run(&*db)
.await
.wrap_err("running migrations")?;
info!("Started up.");
let checker = uptime::check_timer(config, db.clone());
let server = uptime::axum_server(db);
tokio::select! {
result = checker => {
result.map(|ok| match ok {})
}
result = server => {
result
}
}
}

101
src/web.rs Normal file
View file

@ -0,0 +1,101 @@
use std::{collections::BTreeMap, sync::Arc};
use askama::Template;
use axum::{
extract::State,
response::{Html, IntoResponse, Response},
routing::get,
Router,
};
use eyre::{Context, Result};
use http::StatusCode;
use sqlx::{Pool, Sqlite};
use crate::{client::CheckState, db::Check};
pub async fn axum_server(db: Arc<Pool<Sqlite>>) -> Result<()> {
let app = Router::new().route("/", get(root)).with_state(db);
info!("Serving website on port 3000");
axum::Server::bind(&"0.0.0.0:3000".parse().unwrap())
.serve(app.into_make_service())
.await
.wrap_err("running axum server")
}
async fn root(State(db): State<Arc<Pool<Sqlite>>>) -> Response {
render_root(db)
.await
.map(Html)
.map(IntoResponse::into_response)
.unwrap_or_else(|err| {
error!(?err);
(StatusCode::INTERNAL_SERVER_ERROR).into_response()
})
}
async fn render_root(db: Arc<Pool<Sqlite>>) -> Result<String> {
let checks = crate::db::get_checks(&db).await?;
let status = compute_status(checks);
let html = RootTemplate { status }
.render()
.wrap_err("error rendering template")?;
Ok(html)
}
fn compute_status(checks: Vec<Check>) -> Vec<WebsiteStatus> {
let mut websites = BTreeMap::new();
checks.into_iter().for_each(|check| {
websites
.entry(check.website)
.or_insert(Vec::new())
.push((check.request_time, check.result));
});
websites
.into_iter()
.map(|(website, checks)| {
let mut last_ok = None;
let mut count_ok = 0;
let len = checks.len();
checks.into_iter().for_each(|(time, result)| {
last_ok = std::cmp::max(last_ok, Some(time));
if let CheckState::Ok = result {
count_ok += 1;
}
});
let ok_ratio = (count_ok as f32) / (len as f32);
let ok_ratio = format!("{:.2}%", ok_ratio * 100.0);
let last_ok = last_ok
.map(|utc| utc.to_rfc3339_opts(chrono::SecondsFormat::Millis, /*use_z*/ true));
WebsiteStatus {
website,
last_ok,
ok_ratio,
count_ok,
total_requests: len,
}
})
.collect()
}
struct WebsiteStatus {
website: String,
last_ok: Option<String>,
ok_ratio: String,
total_requests: usize,
count_ok: usize,
}
#[derive(Template)]
#[template(path = "index.html")]
struct RootTemplate {
status: Vec<WebsiteStatus>,
}