store series db

thanks 522 for the idea :3
This commit is contained in:
nora 2024-02-03 20:04:27 +01:00
parent 593f8ec279
commit 5a521d00d0
5 changed files with 104 additions and 6 deletions

View file

@ -0,0 +1,6 @@
CREATE TABLE checks_series (
request_time_range_start TIMESTAMP NOT NULL,
request_time_range_end TIMESTAMP NOT NULL,
website VARCHAR NOT NULL,
result VARCHAR NOT NULL
)

View file

@ -18,7 +18,7 @@ pub struct CheckResult {
pub state: CheckState,
}
#[derive(Debug, PartialEq, sqlx::Type)]
#[derive(Debug, PartialEq, Clone, sqlx::Type)]
#[sqlx(rename_all = "snake_case")]
pub enum CheckState {
Ok,
@ -40,7 +40,7 @@ async fn make_request(client: &reqwest::Client, website: &WebsiteConfig) -> Chec
let time = Utc::now();
let result = client.get(website.url.clone()).send().await;
info!(?result, ?website.url, "Made health request");
info!(?result, %website.url, "Made health request");
match result {
Ok(res) => CheckResult {

View file

@ -13,12 +13,21 @@ fn default_db_url() -> String {
"uptime.db".into()
}
#[derive(Debug, serde::Deserialize)]
#[derive(serde::Deserialize)]
pub struct WebsiteConfig {
pub name: String,
pub url: Url,
}
impl std::fmt::Debug for WebsiteConfig {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("WebsiteConfig")
.field("name", &self.name)
.field("url", &format_args!("{}", self.url))
.finish()
}
}
pub fn read_config(config_path: &str) -> Result<Config> {
let config_str = std::fs::read_to_string(config_path)
.wrap_err_with(|| format!("opening config at '{config_path}'"))?;

View file

@ -1,4 +1,4 @@
use std::str::FromStr;
use std::{str::FromStr, time::Duration};
use chrono::Utc;
use eyre::{Context, Result};
@ -16,6 +16,15 @@ pub struct Check {
pub result: CheckState,
}
#[derive(sqlx::FromRow, Clone)]
pub struct CheckSeries {
pub id: i32,
pub request_time_range_start: chrono::DateTime<Utc>,
pub request_time_range_end: chrono::DateTime<Utc>,
pub website: String,
pub result: CheckState,
}
pub async fn open_db(db_url: &str) -> Result<Pool<Sqlite>> {
let db_opts = SqliteConnectOptions::from_str(db_url)
.wrap_err("parsing database URL")?
@ -26,7 +35,7 @@ pub async fn open_db(db_url: &str) -> Result<Pool<Sqlite>> {
.wrap_err_with(|| format!("opening db from `{}`", db_url))
}
pub async fn insert_results(db: &Pool<Sqlite>, results: Results) -> Result<()> {
pub async fn insert_results(db: &Pool<Sqlite>, results: &Results) -> Result<()> {
let mut errors = Vec::new();
for (website, check) in results.states.iter() {
let result =
@ -52,9 +61,80 @@ pub async fn insert_results(db: &Pool<Sqlite>, results: Results) -> Result<()> {
}
}
pub async fn insert_results_series(db: &Pool<Sqlite>, interval_seconds: u64, results: &Results) -> Result<()> {
let mut errors = Vec::new();
for (website, check) in results.states.iter() {
let latest = get_latest_series_for_website(db, website)
.await
.wrap_err("getting the latest series record")?;
let threshold = chrono::Duration::from_std(Duration::from_secs(interval_seconds * 5))
.wrap_err("cannot create threshold, interval_seconds too high or low")?;
let result = match latest {
Some(latest) if latest.result == check.state && (latest.request_time_range_end < (check.time.checked_add_signed(threshold).unwrap())) => {
sqlx::query("UPDATE checks_series SET request_time_range_end = ? WHERE rowid = ?")
.bind(check.time)
.bind(latest.id)
.execute(db)
.await
.wrap_err_with(|| format!("updating series record for {website}"))
}
_ => {
sqlx::query("INSERT INTO checks_series (request_time_range_start, request_time_range_end, website, result) VALUES (?, ?, ?, ?);")
.bind(check.time)
.bind(check.time)
.bind(website)
.bind(&check.state)
.execute(db)
.await
.wrap_err_with(|| format!("inserting new series record for {website}"))
}
};
if let Err(err) = result {
errors.push(err);
}
}
if errors.len() > 0 {
for err in errors {
error!(?err);
}
Err(eyre::eyre!("error inserting results"))
} else {
Ok(())
}
}
pub async fn get_checks(db: &Pool<Sqlite>) -> Result<Vec<Check>> {
sqlx::query_as::<_, Check>("SELECT id, request_time, website, result FROM checks")
.fetch_all(db)
.await
.wrap_err("getting all checks")
}
pub async fn get_checks_series(db: &Pool<Sqlite>) -> Result<Vec<CheckSeries>> {
sqlx::query_as::<_, CheckSeries>("SELECT rowid as id, request_time_range_start, request_time_range_end, website, result FROM checks_series")
.fetch_all(db)
.await
.wrap_err("getting all checks")
}
pub async fn get_latest_series_for_website(
db: &Pool<Sqlite>,
website: &str,
) -> Result<Option<CheckSeries>> {
sqlx::query_as::<_, CheckSeries>(
"SELECT rowid as id, request_time_range_start, request_time_range_end, website, result
FROM checks_series
WHERE website = ?
ORDER BY request_time_range_end DESC
LIMIT 1
",
)
.bind(website)
.fetch_all(db)
.await
.wrap_err("getting all checks")
.map(|elems| -> Option<CheckSeries> { elems.get(0).cloned() })
}

View file

@ -71,10 +71,13 @@ pub async fn check_timer(config: Config, db: Arc<Pool<Sqlite>>) -> Result<> {
let results = client::do_checks(&client).await;
if let Err(err) = db::insert_results(&db, results).await {
if let Err(err) = db::insert_results(&db, &results).await {
error!(?err);
}
if let Err(err) = db::insert_results_series(&db, config.interval_seconds, &results).await {
error!(?err);
}
info!("Finished tick.");
}
}