make it a webapp

This commit is contained in:
nora 2024-09-07 22:43:04 +02:00
parent 11db12e074
commit b2ab2ff73e
12 changed files with 3082 additions and 155 deletions

1
.gitignore vendored
View file

@ -1,3 +1,4 @@
/target /target
/targets /targets
/results /results
/db.sqlite

2264
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -4,4 +4,20 @@ version = "0.1.0"
edition = "2021" edition = "2021"
[dependencies] [dependencies]
axum = { version = "0.7.5", features = ["macros"] }
color-eyre = "0.6.3" color-eyre = "0.6.3"
futures = "0.3.30"
reqwest = { version = "0.12.7", features = [
"rustls-tls",
], default-features = false }
serde = { version = "1.0.210", features = ["derive"] }
sqlx = { version = "0.8.2", features = [
"macros",
"migrate",
"runtime-tokio",
"sqlite",
] }
tempfile = "3.12.0"
tokio = { version = "1.40.0", features = ["full"] }
tracing = "0.1.40"
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }

View file

@ -0,0 +1,14 @@
-- Add migration script here
CREATE TABLE build_info (
"nightly" VARCHAR NOT NULL,
"target" VARCHAR NOT NULL,
"status" VARCHAR NOT NULL,
"stderr" VARCHAR NOT NULL,
PRIMARY KEY ("nightly", "target")
);
CREATE TABLE finished_nightly (
"nightly" VARCHAR NOT NULL PRIMARY KEY
);

252
src/build.rs Normal file
View file

@ -0,0 +1,252 @@
use std::{
fmt::{Debug, Display},
num::NonZero,
path::Path,
time::Duration,
};
use color_eyre::{
eyre::{bail, Context},
Result,
};
use futures::StreamExt;
use tokio::process::Command;
use tracing::{debug, info};
use crate::{
db::{Db, FullBuildInfo, Status},
nightlies::Nightlies,
};
pub struct Toolchain(String);
impl Toolchain {
pub fn from_nightly(nightly: &str) -> Self {
Self(format!("nightly-{nightly}"))
}
}
impl Debug for Toolchain {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.0)
}
}
impl Display for Toolchain {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.0)
}
}
pub async fn background_builder(db: Db) -> Result<()> {
loop {
let nightlies = Nightlies::fetch().await.wrap_err("fetching nightlies")?;
let already_finished = db
.finished_nightlies()
.await
.wrap_err("fetching finished nightlies")?;
let next = nightlies.select_latest_to_build(&already_finished);
match next {
Some(nightly) => {
info!(%nightly, "Building next nightly");
build_every_target_for_toolchain(&db, &nightly)
.await
.wrap_err_with(|| format!("building targets for toolchain {nightly}"))?;
}
None => {
info!("No new nightly, waiting for an hour to try again");
tokio::time::sleep(Duration::from_secs(1 * 60 * 60)).await;
}
}
}
}
async fn targets_for_toolchain(toolchain: &Toolchain) -> Result<Vec<String>> {
let output = Command::new("rustc")
.arg(format!("+{toolchain}"))
.arg("--print")
.arg("target-list")
.output()
.await
.wrap_err("failed to spawn rustc")?;
if !output.status.success() {
bail!(
"failed to get target-list from rustc: {:?}",
String::from_utf8(output.stderr)
);
}
Ok(String::from_utf8(output.stdout)
.wrap_err("rustc target-list is invalid UTF-8")?
.split_whitespace()
.map(ToOwned::to_owned)
.collect())
}
#[tracing::instrument]
async fn install_toolchain(toolchain: &Toolchain) -> Result<()> {
info!(%toolchain, "Installing toolchain");
let result = Command::new("rustup")
.arg("toolchain")
.arg("install")
.arg(&toolchain.0)
.arg("--profile")
.arg("minimal")
.output()
.await
.wrap_err("failed to spawn rustup")?;
if !result.status.success() {
bail!("rustup failed: {:?}", String::from_utf8(result.stderr));
}
let result = Command::new("rustup")
.arg("component")
.arg("add")
.arg("rust-src")
.arg("--toolchain")
.arg(&toolchain.0)
.output()
.await
.wrap_err("failed to spawn rustup")?;
if !result.status.success() {
bail!("rustup failed: {:?}", String::from_utf8(result.stderr));
}
Ok(())
}
#[tracing::instrument]
async fn uninstall_toolchain(toolchain: &Toolchain) -> Result<()> {
info!(%toolchain, "Uninstalling toolchain");
let result = Command::new("rustup")
.arg("toolchain")
.arg("remove")
.arg(&toolchain.0)
.output()
.await
.wrap_err("failed to spawn rustup")?;
if !result.status.success() {
bail!(
"rustup toolchain remove failed: {:?}",
String::from_utf8(result.stderr)
);
}
Ok(())
}
pub async fn build_every_target_for_toolchain(db: &Db, nightly: &str) -> Result<()> {
if db.is_nightly_finished(nightly).await? {
debug!("Nightly is already finished, not trying again");
return Ok(());
}
let toolchain = Toolchain::from_nightly(nightly);
install_toolchain(&toolchain).await?;
let targets = targets_for_toolchain(&toolchain)
.await
.wrap_err("failed to get targets")?;
let concurrent = std::thread::available_parallelism()
.unwrap_or(NonZero::new(2).unwrap())
.get()
/ 2;
let results = futures::stream::iter(
targets
.iter()
.map(|target| build_single_target(&db, nightly, target)),
)
.buffer_unordered(concurrent)
.collect::<Vec<Result<()>>>()
.await;
for result in results {
result?;
}
for target in targets {
build_single_target(db, nightly, &target)
.await
.wrap_err_with(|| format!("building target {target} for toolchain {toolchain}"))?;
}
// Mark it as finished, so we never have to build it again.
db.finish_nightly(nightly).await?;
uninstall_toolchain(&toolchain).await?;
Ok(())
}
#[tracing::instrument(skip(db))]
async fn build_single_target(db: &Db, nightly: &str, target: &str) -> Result<()> {
let existing = db
.build_status_full(nightly, target)
.await
.wrap_err("getting existing build")?;
if existing.is_some() {
debug!("Build already exists");
return Ok(());
}
info!("Building target");
let tmpdir = tempfile::tempdir().wrap_err("creating temporary directory")?;
let result = build_target(tmpdir.path(), &Toolchain::from_nightly(nightly), target)
.await
.wrap_err("running build")?;
db.insert(FullBuildInfo {
nightly: nightly.into(),
target: target.into(),
status: result.status,
stderr: result.stderr,
})
.await?;
Ok(())
}
struct BuildResult {
status: Status,
stderr: String,
}
/// Build a target core in a temporary directory and see whether it passes or not.
async fn build_target(tmpdir: &Path, toolchain: &Toolchain, target: &str) -> Result<BuildResult> {
std::fs::create_dir_all(&tmpdir).wrap_err("creating target src dir")?;
let init = Command::new("cargo")
.args(["init", "--lib", "--name", "target-test"])
.current_dir(&tmpdir)
.output()
.await
.wrap_err("spawning cargo init")?;
if !init.status.success() {
bail!("init failed: {}", String::from_utf8(init.stderr)?);
}
let librs = tmpdir.join("src").join("lib.rs");
std::fs::write(&librs, "#![no_std]\n")
.wrap_err_with(|| format!("writing to {}", librs.display()))?;
let output = Command::new("cargo")
.arg(format!("+{toolchain}"))
.args(["build", "-Zbuild-std=core", "--release"])
.args(["--target", target])
.current_dir(&tmpdir)
.output()
.await
.wrap_err("spawning cargo build")?;
let stderr = String::from_utf8(output.stderr).wrap_err("cargo stderr utf8")?;
let status = if output.status.success() {
Status::Pass
} else {
Status::Error
};
info!("Finished build");
Ok(BuildResult { status, stderr })
}

130
src/db.rs Normal file
View file

@ -0,0 +1,130 @@
use std::{fmt::Display, str::FromStr};
use color_eyre::{eyre::Context, Result};
use serde::{Deserialize, Serialize};
use sqlx::{migrate::Migrator, sqlite::SqliteConnectOptions, Pool, Sqlite};
#[derive(Clone)]
pub struct Db {
pub conn: Pool<Sqlite>,
}
pub static MIGRATOR: Migrator = sqlx::migrate!();
#[derive(sqlx::FromRow, Serialize, Deserialize)]
pub struct BuildInfo {
pub nightly: String,
pub target: String,
pub status: Status,
}
#[derive(Clone, sqlx::FromRow, Serialize, Deserialize)]
pub struct FullBuildInfo {
pub nightly: String,
pub target: String,
pub status: Status,
pub stderr: String,
}
#[derive(Debug, PartialEq, Clone, Copy, sqlx::Type, Serialize, Deserialize)]
#[sqlx(rename_all = "snake_case")]
#[serde(rename_all = "lowercase")]
pub enum Status {
Error,
Pass,
}
impl Display for Status {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Error => f.write_str("error"),
Self::Pass => f.write_str("pass"),
}
}
}
#[derive(sqlx::FromRow)]
struct FinishedNightly {
nightly: String,
}
impl Db {
pub async fn open(path: &str) -> Result<Self> {
let db_opts = SqliteConnectOptions::from_str(path)
.wrap_err("parsing database URL")?
.create_if_missing(true);
let conn = Pool::connect_with(db_opts)
.await
.wrap_err_with(|| format!("opening db from `{}`", path))?;
Ok(Self { conn })
}
pub async fn insert(&self, info: FullBuildInfo) -> Result<()> {
sqlx::query(
"INSERT INTO build_info (nightly, target, status, stderr) VALUES (?, ?, ?, ?);",
)
.bind(info.nightly)
.bind(info.target)
.bind(info.status)
.bind(info.stderr)
.execute(&self.conn)
.await
.wrap_err("inserting build info into database")?;
Ok(())
}
pub async fn build_status(&self) -> Result<Vec<BuildInfo>> {
sqlx::query_as::<_, BuildInfo>("SELECT nightly, target, status FROM build_info")
.fetch_all(&self.conn)
.await
.wrap_err("getting build status from DB")
}
pub async fn build_status_full(
&self,
nightly: &str,
target: &str,
) -> Result<Option<FullBuildInfo>> {
let result = sqlx::query_as::<_, FullBuildInfo>(
"SELECT nightly, target, status, stderr FROM build_info
WHERE nightly = ? AND target = ?",
)
.bind(nightly)
.bind(target)
.fetch_all(&self.conn)
.await
.wrap_err("getting build status from DB")?;
Ok(result.first().cloned())
}
pub async fn finished_nightlies(&self) -> Result<Vec<String>> {
let result = sqlx::query_as::<_, FinishedNightly>("SELECT nightly from finished_nightly")
.fetch_all(&self.conn)
.await
.wrap_err("fetching fnished nightlies")?;
Ok(result.into_iter().map(|nightly| nightly.nightly).collect())
}
pub async fn is_nightly_finished(&self, nightly: &str) -> Result<bool> {
let result = sqlx::query_as::<_, FinishedNightly>(
"SELECT nightly from finished_nightly WHERE nightly = ?",
)
.bind(nightly)
.fetch_all(&self.conn)
.await
.wrap_err("fetching fnished nightlies")?;
Ok(result.len() == 1)
}
pub async fn finish_nightly(&self, nightly: &str) -> Result<()> {
sqlx::query("INSERT INTO finished_nightly (nightly) VALUES (?)")
.bind(nightly)
.execute(&self.conn)
.await
.wrap_err("inserting finished nightly")?;
Ok(())
}
}

View file

@ -1,151 +1,30 @@
use std::{collections::BTreeMap, num::NonZeroUsize, path::Path, process::Command, sync::Mutex}; mod build;
mod db;
mod nightlies;
mod web;
use color_eyre::{ use color_eyre::{eyre::WrapErr, Result};
eyre::{bail, WrapErr}, use db::Db;
Result,
};
fn main() -> Result<()> { #[tokio::main]
let targets = String::from_utf8( async fn main() -> Result<()> {
Command::new("rustc") tracing_subscriber::fmt().init();
.arg("--print")
.arg("target-list")
.output()?
.stdout,
)?;
let year = 2024; let db = Db::open(&std::env::var("DB_PATH").unwrap_or("db.sqlite".into())).await?;
let current_month = 6_u32; db::MIGRATOR
let current_day = 12_u32; .run(&db.conn)
.await
.wrap_err("running migrations")?;
let dates = (current_month.saturating_sub(5)..=current_month).flat_map(|month| { let builder = build::background_builder(db.clone());
if month == current_month && current_day <= 16 { let server = web::webserver(db);
vec![format!("{year}-{month:0>2}-01")]
} else { tokio::select! {
vec![ result = builder => {
format!("{year}-{month:0>2}-01"), result
format!("{year}-{month:0>2}-15"),
]
} }
}); result = server => {
result
for date in dates {
println!("Doing date {date}");
let toolchain = format!("nightly-{date}");
let result = Command::new("rustup")
.arg("toolchain")
.arg("install")
.arg(&toolchain)
.arg("--profile")
.arg("minimal")
.spawn()?
.wait()?;
if !result.success() {
bail!("rustup failed");
}
let result = Command::new("rustup")
.arg("component")
.arg("add")
.arg("rust-src")
.arg("--toolchain")
.arg(&toolchain)
.spawn()?
.wait()?;
if !result.success() {
bail!("rustup failed");
}
let queue = targets.lines().collect::<Vec<&str>>();
let queue = &Mutex::new(queue);
std::fs::create_dir_all("targets")?;
let failures = Mutex::new(BTreeMap::new());
let targets = Path::new("targets").join(&toolchain);
std::thread::scope(|s| -> Result<()> {
let mut handles = vec![];
for _ in 0..std::thread::available_parallelism()
.unwrap_or(NonZeroUsize::new(1).unwrap())
.get()
{
let handle = s.spawn(|| -> Result<()> {
loop {
let target = {
let mut queue = queue.lock().unwrap();
let Some(next) = queue.pop() else {
return Ok(());
};
println!("remaining: {:>3 } - {next}", queue.len());
next
};
(|| -> Result<()> {
let target_dir = targets.join(target);
std::fs::create_dir_all(&target_dir)
.wrap_err("creating target src dir")?;
if !target_dir.join("Cargo.toml").exists() {
let init = Command::new("cargo")
.args(["init", "--lib", "--name", "target-test"])
.current_dir(&target_dir)
.output()
.wrap_err("spawning cargo init")?;
if !init.status.success() {
bail!("init failed: {}", String::from_utf8(init.stderr)?);
}
}
let librs = target_dir.join("src").join("lib.rs");
std::fs::write(&librs, "#![no_std]\n")
.wrap_err_with(|| format!("writing to {}", librs.display()))?;
let output = Command::new("cargo")
.arg(format!("+{toolchain}"))
.args(["build", "-Zbuild-std=core", "--release"])
.args(["--target", target])
.current_dir(&target_dir)
.output()
.wrap_err("spawning cargo build")?;
if !output.status.success() {
println!("failure: {target}");
let stderr = String::from_utf8(output.stderr)
.wrap_err("cargo stderr utf8")?;
failures.lock().unwrap().insert(target.to_owned(), stderr);
}
Ok(())
})()
.wrap_err_with(|| format!("while checking {target}"))?;
}
});
handles.push(handle);
}
for handle in handles {
handle.join().unwrap()?;
}
Ok(())
})?;
std::fs::create_dir_all("results").wrap_err("creating results directory")?;
std::fs::write(
Path::new("results").join(date),
failures
.lock()
.unwrap()
.keys()
.cloned()
.collect::<Vec<_>>()
.join(","),
)
.wrap_err("writing results file")?;
for (target, stderr) in failures.into_inner().unwrap() {
println!("-----------------\nBROKEN TARGET: {target}\n{stderr}\n\n");
} }
} }
Ok(())
} }

66
src/nightlies.rs Normal file
View file

@ -0,0 +1,66 @@
use std::collections::HashSet;
use std::hash::RandomState;
use color_eyre::eyre::Context;
use color_eyre::Result;
use tracing::debug;
const EARLIEST_CUTOFF_DATE: &str = "2023-01-01";
/// All nightlies that exist.
pub struct Nightlies {
all: Vec<String>,
}
impl Nightlies {
pub async fn fetch() -> Result<Nightlies> {
let manifests = reqwest::get("https://static.rust-lang.org/manifests.txt")
.await
.wrap_err("fetching https://static.rust-lang.org/manifests.txt")?
.text()
.await
.wrap_err("fetching body of https://static.rust-lang.org/manifests.txt")?;
let mut all = nightlies_from_manifest(&manifests)
.into_iter()
.filter(|date| date.as_str() > EARLIEST_CUTOFF_DATE)
.collect::<Vec<_>>();
all.sort_by(|a, b| b.cmp(a));
debug!("Loaded {} nightlies from the manifest", all.len());
Ok(Self { all })
}
pub fn select_latest_to_build(&self, already_finished: &[String]) -> Option<String> {
let already_finished = HashSet::<_, RandomState>::from_iter(already_finished.iter());
self.all
.iter()
.find(|nightly| !already_finished.contains(nightly))
.cloned()
}
}
fn nightlies_from_manifest(manifest: &str) -> Vec<String> {
manifest
.lines()
.filter_map(|line| {
let rest = line.strip_prefix("static.rust-lang.org/dist/")?;
let date = rest.strip_suffix("/channel-rust-nightly.toml")?;
Some(date.to_owned())
})
.collect()
}
#[cfg(test)]
mod tests {
#[test]
fn manifest_parse() {
let test_manifest = "static.rust-lang.org/dist/2024-08-22/channel-rust-nightly.toml
static.rust-lang.org/dist/2024-08-22/channel-rust-1.81.0-beta.toml
static.rust-lang.org/dist/2024-08-22/channel-rust-1.81.0-beta.6.toml
static.rust-lang.org/dist/2024-08-23/channel-rust-nightly.toml";
let nightlies = super::nightlies_from_manifest(&test_manifest);
assert_eq!(nightlies, vec!["2024-08-22", "2024-08-23"]);
}
}

103
src/web.rs Normal file
View file

@ -0,0 +1,103 @@
use axum::{
extract::{Query, State},
http::StatusCode,
response::{Html, IntoResponse, Response},
routing::{get, post},
Json, Router,
};
use color_eyre::{eyre::Context, Result};
use serde::{Deserialize, Serialize};
use tracing::{error, info};
use crate::db::Db;
#[derive(Clone)]
pub struct AppState {
pub db: Db,
}
pub async fn webserver(db: Db) -> Result<()> {
let app = Router::new()
.route("/", get(root))
.route("/build", get(build))
.route("/index.css", get(index_css))
.route("/target-state", get(target_state))
.route("/trigger-build", post(trigger_build))
.with_state(AppState { db });
info!("Serving website on port 3000");
let listener = tokio::net::TcpListener::bind("0.0.0.0:3000").await.unwrap();
axum::serve(listener, app).await.wrap_err("failed to serve")
}
async fn root() -> impl IntoResponse {
Html(include_str!("../static/index.html"))
}
#[derive(Deserialize)]
struct BuildQuery {
nightly: String,
target: String,
}
async fn build(State(state): State<AppState>, Query(query): Query<BuildQuery>) -> Response {
match state
.db
.build_status_full(&query.nightly, &query.target)
.await
{
Ok(Some(build)) => {
let page = include_str!("../static/build.html")
.replace("{{nightly}}", &query.nightly)
.replace("{{target}}", &query.target)
.replace("{{stderr}}", &build.stderr)
.replace("{{status}}", &build.status.to_string());
Html(page).into_response()
}
Ok(None) => StatusCode::NOT_FOUND.into_response(),
Err(err) => {
error!(?err, "Error loading target state");
StatusCode::INTERNAL_SERVER_ERROR.into_response()
}
}
}
async fn index_css() -> impl IntoResponse {
(
[(
axum::http::header::CONTENT_TYPE,
axum::http::HeaderValue::from_static("text/css; charset=utf-8"),
)],
include_str!("../static/index.css"),
)
}
async fn target_state(State(state): State<AppState>) -> impl IntoResponse {
state.db.build_status().await.map(Json).map_err(|err| {
error!(?err, "Error loading target state");
StatusCode::INTERNAL_SERVER_ERROR
})
}
#[derive(Serialize, Deserialize)]
struct TriggerBuildBody {
nightly: String,
}
#[axum::debug_handler]
async fn trigger_build(
State(_state): State<AppState>,
_body: Json<TriggerBuildBody>,
) -> StatusCode {
return StatusCode::BAD_REQUEST;
// tokio::spawn(async move {
// let result = build::build_every_target_for_toolchain(&state.db, &body.nightly).await;
// if let Err(err) = result {
// error!(?err, "Error while building");
// }
// });
//
// StatusCode::ACCEPTED
}

21
static/build.html Normal file
View file

@ -0,0 +1,21 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Build</title>
<link rel="stylesheet" href="/index.css" />
<style>
.build-indicator-big {
padding: 10px;
}
</style>
</head>
<body>
<h1>Build results nightly-{{nightly}} target-{{target}}</h1>
<div class="{{status}} build-indicator-big">{{status}}</div>
<pre>
{{stderr}}
</pre>
</body>
</html>

26
static/index.css Normal file
View file

@ -0,0 +1,26 @@
html {
font-family: sans-serif;
}
th,
td {
border: 1px solid;
margin: 0;
}
.error {
background-color: lightcoral;
}
.pass {
background-color: greenyellow;
}
.missing {
background-color: lightgray;
}
.build-info-a {
color: black;
text-decoration: none;
}

179
static/index.html Normal file
View file

@ -0,0 +1,179 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Target state</title>
<link rel="stylesheet" href="/index.css" />
</head>
<body>
<h1>Target state</h1>
<!--<form id="nightly-form">
<h2>Force manual build</h2>
<label for="nightly-date-field">Nightly date</label>
<input id="nightly-date-field" placeholder="2024-08-05" type="text" />
<input type="submit" />
</form>-->
<h2>Historical target state</h2>
<label for="target-filter">Target Filter</label>
<input id="target-filter" />
<label for="target-filter-failed">Filter failed</label>
<input type="checkbox" id="target-filter-failed" />
<table id="target-state" class="target-state-table">
<tr>
<td>loading...</td>
</tr>
</table>
<script>
let data = [];
let filter = localStorage.getItem("filter") ?? "";
document.getElementById("target-filter").value = filter;
document.getElementById("target-filter-failed").value =
localStorage.getItem("filterFailed") ?? false;
const table = document.getElementById("target-state");
function fetchTargets() {
fetch("/target-state")
.then((body) => body.json())
.then((body) => {
data = body;
renderTable();
});
}
function renderTable() {
const allTargets = new Set();
const allNightlies = new Set();
const nightlyInfos = new Map();
// Targets that have, at some point, errored
const targetsWithErrors = new Set();
for (const info of data) {
allNightlies.add(info.nightly);
if (!info.target.includes(filter)) {
continue;
}
if (info.status === "error") {
targetsWithErrors.add(info.target);
}
allTargets.add(info.target);
if (!nightlyInfos.has(info.nightly)) {
nightlyInfos.set(info.nightly, new Map());
}
nightlyInfos.get(info.nightly).set(info.target, info);
}
const nightlies = Array.from(allNightlies);
nightlies.sort();
nightlies.reverse();
const targets = Array.from(allTargets);
targets.sort();
const header = document.createElement("tr");
const headerNightly = document.createElement("th");
headerNightly.innerText = "nightly";
header.appendChild(headerNightly);
const targetHeaders = targets.forEach((target) => {
if (
document.getElementById("target-filter-failed").checked &&
!targetsWithErrors.has(target)
) {
return;
}
const elem = document.createElement("th");
elem.innerText = target;
header.appendChild(elem);
});
const rows = nightlies.map((nightly) => {
const tr = document.createElement("tr");
const nightlyCol = document.createElement("td");
nightlyCol.innerText = nightly;
tr.appendChild(nightlyCol);
const info = nightlyInfos.get(nightly) ?? new Map();
for (const target of targets) {
if (
document.getElementById("target-filter-failed").checked &&
!targetsWithErrors.has(target)
) {
continue;
}
const td = document.createElement("td");
const targetInfo = info.get(target);
if (targetInfo) {
const a = document.createElement("a");
a.classList.add("build-info-a");
a.href = `/build?nightly=${encodeURIComponent(
nightly
)}&target=${encodeURIComponent(target)}`;
a.innerText = targetInfo.status;
td.appendChild(a);
td.classList.add(targetInfo.status);
} else {
td.innerText = "";
td.classList.add("missing");
}
tr.appendChild(td);
}
return tr;
});
table.replaceChildren(header, ...rows);
}
//function onTriggerBuild(e) {
// e.preventDefault();
//
// const date = document.getElementById("nightly-date-field").value;
// if (!date) {
// return;
// }
//
// fetch("/trigger-build", {
// method: "POST",
// body: JSON.stringify({
// nightly: date,
// }),
// headers: {
// "Content-Type": "application/json",
// },
// }).then(() =>
// alert(`triggered build for ${date}, this may take a few minutes`)
// );
//}
function onFilterChange(e) {
filter = e.target.value;
localStorage.setItem("filter", filter);
console.log(filter);
renderTable();
}
//document
// .getElementById("nightly-form")
// .addEventListener("submit", onTriggerBuild);
document
.getElementById("target-filter")
.addEventListener("input", onFilterChange);
document
.getElementById("target-filter-failed")
.addEventListener("input", renderTable);
// Initial fetch
fetchTargets();
</script>
</body>
</html>