write website

This commit is contained in:
nora 2023-08-31 20:46:44 +02:00
parent 24abf6d98c
commit 7ff9a67148
15 changed files with 1580 additions and 38 deletions

1
.gitignore vendored
View file

@ -1 +1,2 @@
/target
/uptime.db

1193
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -6,12 +6,19 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
askama = "0.12.0"
axum = "0.6.20"
chrono = "0.4.27"
eyre = "0.6.8"
http = "0.2.9"
reqwest = { version = "0.11.20", default-features = false, features = ["rustls-tls"] }
rusqlite = { version = "0.29.0", features = ["bundled"] }
reqwest = { version = "0.11.20", default-features = false, features = ["rustls-tls", "trust-dns"] }
serde = { version = "1.0.188", features = ["derive"] }
serde_json = "1.0.105"
sqlx = { version = "0.7.1", default-features = false, features = ["migrate", "sqlite", "macros", "chrono", "runtime-tokio"] }
tokio = { version = "1.32.0", features = ["full"] }
tracing = "0.1.37"
tracing-subscriber = "0.3.17"
url = "2.4.1"
url = { version = "2.4.1", features = ["serde"] }
[build-dependencies]
eyre = "0.6.8"

View file

@ -1,3 +1,26 @@
# uptime
custom uptime monitoring tool.
## config
JSON file located at `$UPTIME_CONFIG_PATH`, defaults to `./uptime.json`.
```json
{
"interval_seconds": 30,
"websites": [
{
"name": "nilstrieb.dev",
"url": "https://nilstrieb.dev"
},
{
"name": "google.com",
"url": "https://google.com"
}
],
"db_url": "sqlite::memory:"
}
```
`db_url` can be overriden with `$UPTIME_DB_URL` and defaults to `./uptime.db` if not present.

35
build.rs Normal file
View file

@ -0,0 +1,35 @@
fn main() {
// Always rerun.
let version = if let Ok(commit) = try_get_commit() {
match has_no_changes() {
Ok(true) => commit,
Ok(false) => format!("{commit} (*)"),
Err(_) => format!("{commit} (?)"),
}
} else {
"unknown".into()
};
println!("cargo:rustc-env=GIT_COMMIT={version}");
}
fn try_get_commit() -> eyre::Result<String> {
let stdout = std::process::Command::new("git")
.arg("rev-parse")
.arg("HEAD")
.output()?
.stdout;
let stdout = String::from_utf8(stdout)?;
Ok(stdout.trim().to_owned())
}
fn has_no_changes() -> eyre::Result<bool> {
Ok(std::process::Command::new("git")
.args(["diff", "--no-ext-diff", "--quiet", "--exit-code"])
.output()?
.status
.success())
}

View file

@ -37,6 +37,7 @@
''-I${pkgs.glib.out}/lib/glib-2.0/include/''
];
packages = (with pkgs; [
sqlx-cli
]);
};
});

View file

@ -0,0 +1,6 @@
CREATE TABLE checks (
id INTEGER AUTO INCREMENT PRIMARY KEY,
request_time TIMESTAMP NOT NULL,
website VARCHAR NOT NULL,
result VARCHAR NOT NULL
);

View file

@ -1,16 +1,16 @@
use chrono::Utc;
use eyre::{bail, Context, Result};
use std::collections::HashMap;
use std::collections::BTreeMap;
use tracing::info;
use crate::config::WebsiteConfig;
pub struct Client {
websites: Vec<WebsiteConfig>,
req: reqwest::Client,
pub websites: Vec<WebsiteConfig>,
pub req: reqwest::Client,
}
pub struct Results {
pub states: HashMap<String, CheckResult>,
pub states: BTreeMap<String, CheckResult>,
}
pub struct CheckResult {
@ -18,13 +18,15 @@ pub struct CheckResult {
pub state: CheckState,
}
#[derive(sqlx::Type)]
#[sqlx(rename_all = "snake_case")]
pub enum CheckState {
Ok,
NotOk,
}
pub async fn do_checks(client: &Client) -> Results {
let mut states = HashMap::new();
let mut states = BTreeMap::new();
for website in &client.websites {
let check_result = make_request(&client.req, website).await;
states.insert(website.name.clone(), check_result);
@ -33,10 +35,13 @@ pub async fn do_checks(client: &Client) -> Results {
Results { states }
}
#[tracing::instrument(skip(client))]
async fn make_request(client: &reqwest::Client, website: &WebsiteConfig) -> CheckResult {
let time = Utc::now();
let result = client.get(website.url.clone()).send().await;
info!(?result, ?website.url, "Made health request");
match result {
Ok(res) => CheckResult {
time,
@ -46,7 +51,7 @@ async fn make_request(client: &reqwest::Client, website: &WebsiteConfig) -> Chec
CheckState::NotOk
},
},
Err(err) => CheckResult {
Err(_) => CheckResult {
time,
state: CheckState::NotOk,
},

View file

@ -1,6 +1,27 @@
use eyre::{Context, Result};
use url::Url;
#[derive(Debug, serde::Deserialize)]
pub struct Config {
pub interval_seconds: u64,
pub websites: Vec<WebsiteConfig>,
#[serde(default = "default_db_url")]
pub db_url: String,
}
fn default_db_url() -> String {
"uptime.db".into()
}
#[derive(Debug, serde::Deserialize)]
pub struct WebsiteConfig {
pub name: String,
pub url: Url,
}
pub fn read_config(config_path: &str) -> Result<Config> {
let config_str = std::fs::read_to_string(config_path)
.wrap_err_with(|| format!("opening config at '{config_path}'"))?;
serde_json::from_str(&config_str).wrap_err("reading config file")
}

60
src/db.rs Normal file
View file

@ -0,0 +1,60 @@
use std::str::FromStr;
use chrono::Utc;
use eyre::{Context, Result};
use sqlx::{migrate::Migrator, sqlite::SqliteConnectOptions, Pool, Sqlite};
pub static MIGRATOR: Migrator = sqlx::migrate!();
use crate::client::{CheckState, Results};
#[derive(sqlx::FromRow)]
pub struct Check {
pub id: i32,
pub request_time: chrono::DateTime<Utc>,
pub website: String,
pub result: CheckState,
}
pub async fn open_db(db_url: &str) -> Result<Pool<Sqlite>> {
let db_opts = SqliteConnectOptions::from_str(db_url)
.wrap_err("parsing database URL")?
.create_if_missing(true);
Pool::connect_with(db_opts)
.await
.wrap_err_with(|| format!("opening db from `{}`", db_url))
}
pub async fn insert_results(db: &Pool<Sqlite>, results: Results) -> Result<()> {
let mut errors = Vec::new();
for (website, check) in results.states.iter() {
let result =
sqlx::query("INSERT INTO checks (request_time, website, result) VALUES (?, ?, ?);")
.bind(check.time)
.bind(website)
.bind(&check.state)
.execute(db)
.await
.wrap_err(format!("inserting result for {website}"));
if let Err(err) = result {
errors.push(err);
}
}
if errors.len() > 0 {
for err in errors {
error!(?err);
}
Err(eyre::eyre!("error inserting results"))
} else {
Ok(())
}
}
pub async fn get_checks(db: &Pool<Sqlite>) -> Result<Vec<Check>> {
sqlx::query_as::<_, Check>("SELECT id, request_time, website, result FROM checks")
.fetch_all(db)
.await
.wrap_err("getting all checks")
}

View file

@ -1,2 +1,52 @@
#![allow(uncommon_codepoints)] // lmao
#[macro_use]
extern crate tracing;
mod client;
mod config;
pub mod db;
mod web;
use eyre::Context;
use eyre::Result;
use sqlx::{Pool, Sqlite};
use std::{sync::Arc, time::Duration};
use client::Client;
pub use config::{read_config, Config, WebsiteConfig};
pub use web::axum_server;
const USER_AGENT: &str = concat!("uptime/", env!("GIT_COMMIT"));
pub async fn check_timer(config: Config, db: Arc<Pool<Sqlite>>) -> Result<> {
let req_client = reqwest::Client::builder()
.use_rustls_tls()
.user_agent(USER_AGENT)
.build()
.wrap_err("building client")?;
let mut interval = tokio::time::interval(Duration::from_secs(config.interval_seconds));
let client = Client {
websites: config.websites,
req: req_client,
};
loop {
interval.tick().await;
info!("Running tick.");
let results = client::do_checks(&client).await;
if let Err(err) = db::insert_results(&db, results).await {
error!(?err);
}
info!("Finished tick.");
}
}
// look away
pub enum {}

View file

@ -1,4 +1,48 @@
fn main() {
use eyre::WrapErr;
use std::sync::Arc;
#[macro_use]
extern crate tracing;
#[tokio::main]
async fn main() -> eyre::Result<()> {
tracing_subscriber::fmt().init();
println!("Hello, world!");
let version = env!("GIT_COMMIT");
info!("Starting up uptime {version}");
let config_path = std::env::var("UPTIME_CONFIG_PATH").unwrap_or_else(|_| "uptime.json".into());
info!("Loading reading config");
let mut config = uptime::read_config(&config_path)?;
let db_url = std::env::var("UPTIME_DB_URL");
if let Ok(db_url) = db_url {
config.db_url = db_url;
}
info!("Opening db");
let db = uptime::db::open_db(&config.db_url).await?;
let db = Arc::new(db);
info!("Running migrations");
uptime::db::MIGRATOR
.run(&*db)
.await
.wrap_err("running migrations")?;
info!("Started up.");
let checker = uptime::check_timer(config, db.clone());
let server = uptime::axum_server(db);
tokio::select! {
result = checker => {
result.map(|ok| match ok {})
}
result = server => {
result
}
}
}

101
src/web.rs Normal file
View file

@ -0,0 +1,101 @@
use std::{collections::BTreeMap, sync::Arc};
use askama::Template;
use axum::{
extract::State,
response::{Html, IntoResponse, Response},
routing::get,
Router,
};
use eyre::{Context, Result};
use http::StatusCode;
use sqlx::{Pool, Sqlite};
use crate::{client::CheckState, db::Check};
pub async fn axum_server(db: Arc<Pool<Sqlite>>) -> Result<()> {
let app = Router::new().route("/", get(root)).with_state(db);
info!("Serving website on port 3000");
axum::Server::bind(&"0.0.0.0:3000".parse().unwrap())
.serve(app.into_make_service())
.await
.wrap_err("running axum server")
}
async fn root(State(db): State<Arc<Pool<Sqlite>>>) -> Response {
render_root(db)
.await
.map(Html)
.map(IntoResponse::into_response)
.unwrap_or_else(|err| {
error!(?err);
(StatusCode::INTERNAL_SERVER_ERROR).into_response()
})
}
async fn render_root(db: Arc<Pool<Sqlite>>) -> Result<String> {
let checks = crate::db::get_checks(&db).await?;
let status = compute_status(checks);
let html = RootTemplate { status }
.render()
.wrap_err("error rendering template")?;
Ok(html)
}
fn compute_status(checks: Vec<Check>) -> Vec<WebsiteStatus> {
let mut websites = BTreeMap::new();
checks.into_iter().for_each(|check| {
websites
.entry(check.website)
.or_insert(Vec::new())
.push((check.request_time, check.result));
});
websites
.into_iter()
.map(|(website, checks)| {
let mut last_ok = None;
let mut count_ok = 0;
let len = checks.len();
checks.into_iter().for_each(|(time, result)| {
last_ok = std::cmp::max(last_ok, Some(time));
if let CheckState::Ok = result {
count_ok += 1;
}
});
let ok_ratio = (count_ok as f32) / (len as f32);
let ok_ratio = format!("{:.2}%", ok_ratio * 100.0);
let last_ok = last_ok
.map(|utc| utc.to_rfc3339_opts(chrono::SecondsFormat::Millis, /*use_z*/ true));
WebsiteStatus {
website,
last_ok,
ok_ratio,
count_ok,
total_requests: len,
}
})
.collect()
}
struct WebsiteStatus {
website: String,
last_ok: Option<String>,
ok_ratio: String,
total_requests: usize,
count_ok: usize,
}
#[derive(Template)]
#[template(path = "index.html")]
struct RootTemplate {
status: Vec<WebsiteStatus>,
}

34
templates/index.html Normal file
View file

@ -0,0 +1,34 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Uptime</title>
</head>
<body>
<main>
{% for check in status %}
<h2>{{ check.website }}</h2>
<p>Uptime: {{ check.ok_ratio }} ({{ check.count_ok }}/{{ check.total_requests }})</p>
{% if check.last_ok.is_some() %}
<p>Last OK: <span class="utc-timestamp">{{ check.last_ok.as_deref().unwrap() }}</span></p>
{% endif %}
{% endfor %}
</main>
<script>
const timestamps = document.querySelectorAll(".utc-timestamp");
timestamps.forEach((timestamp) => {
const date = new Date(timestamp.innerText);
const formatted = new Intl.DateTimeFormat([], {
dateStyle: "short",
timeStyle: "short"
}).format(date);
timestamp.innerText = formatted;
})
</script>
</body>
</html>

13
uptime.json Normal file
View file

@ -0,0 +1,13 @@
{
"interval_seconds": 30,
"websites": [
{
"name": "nilstrieb.dev",
"url": "https://nilstrieb.dev"
},
{
"name": "localhost 1234",
"url": "http://localhost:1234"
}
]
}