diff --git a/migrations/20240908082826_add_miri_std.sql b/migrations/20240908082826_add_miri_std.sql new file mode 100644 index 0000000..46076e9 --- /dev/null +++ b/migrations/20240908082826_add_miri_std.sql @@ -0,0 +1,43 @@ +PRAGMA foreign_keys=OFF; + +-- Migrate build_info + +CREATE TABLE new_build_info ( + "nightly" VARCHAR NOT NULL, + "target" VARCHAR NOT NULL, + "status" VARCHAR NOT NULL, + "stderr" VARCHAR NOT NULL, + "mode" VARCHAR NOT NULL, + + PRIMARY KEY ("nightly", "target", "mode") +); + +INSERT INTO new_build_info (nightly, target, status, stderr, mode) +SELECT nightly, target, status, stderr, 'core' FROM build_info; + +DROP TABLE build_info; + +ALTER TABLE new_build_info RENAME TO build_info; + + +-- Migrate finished_nightly + +CREATE TABLE new_finished_nightly ( + "nightly" VARCHAR NOT NULL, + "mode" VARCHAR NOT NULL, + + PRIMARY KEY ("nightly", "mode") +); + +INSERT INTO new_finished_nightly (nightly, mode) +SELECT nightly, 'core' FROM finished_nightly; + +DROP TABLE finished_nightly; + +ALTER TABLE new_finished_nightly RENAME TO finished_nightly; + +-- Finish + +PRAGMA foreign_keys=ON; + + diff --git a/src/build.rs b/src/build.rs index 8806e63..159bd7f 100644 --- a/src/build.rs +++ b/src/build.rs @@ -14,7 +14,7 @@ use tokio::process::Command; use tracing::{debug, info}; use crate::{ - db::{Db, FullBuildInfo, Status}, + db::{BuildMode, Db, FullBuildInfo, Status}, nightlies::Nightlies, }; @@ -45,9 +45,9 @@ pub async fn background_builder(db: Db) -> Result<()> { let next = nightlies.select_latest_to_build(&already_finished); match next { - Some(nightly) => { - info!(%nightly, "Building next nightly"); - build_every_target_for_toolchain(&db, &nightly) + Some((nightly, mode)) => { + info!(%nightly, %mode, "Building next nightly"); + build_every_target_for_toolchain(&db, &nightly, mode) .await .wrap_err_with(|| format!("building targets for toolchain {nightly}"))?; } @@ -82,7 +82,7 @@ async fn targets_for_toolchain(toolchain: &Toolchain) -> Result> { } #[tracing::instrument] -async fn install_toolchain(toolchain: &Toolchain) -> Result<()> { +async fn install_toolchain(toolchain: &Toolchain, mode: BuildMode) -> Result<()> { info!(%toolchain, "Installing toolchain"); let result = Command::new("rustup") @@ -109,6 +109,20 @@ async fn install_toolchain(toolchain: &Toolchain) -> Result<()> { if !result.status.success() { bail!("rustup failed: {:?}", String::from_utf8(result.stderr)); } + if mode == BuildMode::MiriStd { + let result = Command::new("rustup") + .arg("component") + .arg("add") + .arg("miri") + .arg("--toolchain") + .arg(&toolchain.0) + .output() + .await + .wrap_err("failed to spawn rustup")?; + if !result.status.success() { + bail!("rustup failed: {:?}", String::from_utf8(result.stderr)); + } + } Ok(()) } @@ -132,14 +146,18 @@ async fn uninstall_toolchain(toolchain: &Toolchain) -> Result<()> { Ok(()) } -pub async fn build_every_target_for_toolchain(db: &Db, nightly: &str) -> Result<()> { - if db.is_nightly_finished(nightly).await? { +pub async fn build_every_target_for_toolchain( + db: &Db, + nightly: &str, + mode: BuildMode, +) -> Result<()> { + if db.is_nightly_finished(nightly, mode).await? { debug!("Nightly is already finished, not trying again"); return Ok(()); } let toolchain = Toolchain::from_nightly(nightly); - install_toolchain(&toolchain).await?; + install_toolchain(&toolchain, mode).await?; let targets = targets_for_toolchain(&toolchain) .await @@ -153,7 +171,7 @@ pub async fn build_every_target_for_toolchain(db: &Db, nightly: &str) -> Result< let results = futures::stream::iter( targets .iter() - .map(|target| build_single_target(&db, nightly, target)), + .map(|target| build_single_target(&db, nightly, target, mode)), ) .buffer_unordered(concurrent) .collect::>>() @@ -163,13 +181,13 @@ pub async fn build_every_target_for_toolchain(db: &Db, nightly: &str) -> Result< } for target in targets { - build_single_target(db, nightly, &target) + build_single_target(db, nightly, &target, mode) .await .wrap_err_with(|| format!("building target {target} for toolchain {toolchain}"))?; } // Mark it as finished, so we never have to build it again. - db.finish_nightly(nightly).await?; + db.finish_nightly(nightly, mode).await?; uninstall_toolchain(&toolchain).await?; @@ -177,9 +195,9 @@ pub async fn build_every_target_for_toolchain(db: &Db, nightly: &str) -> Result< } #[tracing::instrument(skip(db))] -async fn build_single_target(db: &Db, nightly: &str, target: &str) -> Result<()> { +async fn build_single_target(db: &Db, nightly: &str, target: &str, mode: BuildMode) -> Result<()> { let existing = db - .build_status_full(nightly, target) + .build_status_full(nightly, target, mode) .await .wrap_err("getting existing build")?; if existing.is_some() { @@ -191,15 +209,21 @@ async fn build_single_target(db: &Db, nightly: &str, target: &str) -> Result<()> let tmpdir = tempfile::tempdir().wrap_err("creating temporary directory")?; - let result = build_target(tmpdir.path(), &Toolchain::from_nightly(nightly), target) - .await - .wrap_err("running build")?; + let result = build_target( + tmpdir.path(), + &Toolchain::from_nightly(nightly), + target, + mode, + ) + .await + .wrap_err("running build")?; db.insert(FullBuildInfo { nightly: nightly.into(), target: target.into(), status: result.status, stderr: result.stderr, + mode, }) .await?; @@ -212,31 +236,47 @@ struct BuildResult { } /// Build a target core in a temporary directory and see whether it passes or not. -async fn build_target(tmpdir: &Path, toolchain: &Toolchain, target: &str) -> Result { - std::fs::create_dir_all(&tmpdir).wrap_err("creating target src dir")?; +async fn build_target( + tmpdir: &Path, + toolchain: &Toolchain, + target: &str, + mode: BuildMode, +) -> Result { + let output = match mode { + BuildMode::Core => { + let init = Command::new("cargo") + .args(["init", "--lib", "--name", "target-test"]) + .current_dir(&tmpdir) + .output() + .await + .wrap_err("spawning cargo init")?; + if !init.status.success() { + bail!("init failed: {}", String::from_utf8(init.stderr)?); + } - let init = Command::new("cargo") - .args(["init", "--lib", "--name", "target-test"]) - .current_dir(&tmpdir) - .output() - .await - .wrap_err("spawning cargo init")?; - if !init.status.success() { - bail!("init failed: {}", String::from_utf8(init.stderr)?); - } + let librs = tmpdir.join("src").join("lib.rs"); + std::fs::write(&librs, "#![no_std]\n") + .wrap_err_with(|| format!("writing to {}", librs.display()))?; - let librs = tmpdir.join("src").join("lib.rs"); - std::fs::write(&librs, "#![no_std]\n") - .wrap_err_with(|| format!("writing to {}", librs.display()))?; - - let output = Command::new("cargo") - .arg(format!("+{toolchain}")) - .args(["build", "-Zbuild-std=core", "--release"]) - .args(["--target", target]) - .current_dir(&tmpdir) - .output() - .await - .wrap_err("spawning cargo build")?; + Command::new("cargo") + .arg(format!("+{toolchain}")) + .args(["build", "-Zbuild-std=core", "--release"]) + .args(["--target", target]) + .current_dir(&tmpdir) + .output() + .await + .wrap_err("spawning cargo build")? + } + BuildMode::MiriStd => Command::new("cargo") + .arg(format!("+{toolchain}")) + .args(["miri", "setup"]) + .args(["--target", target]) + .current_dir(&tmpdir) + .env("MIRI_SYSROOT", tmpdir) + .output() + .await + .wrap_err("spawning cargo build")?, + }; let stderr = String::from_utf8(output.stderr).wrap_err("cargo stderr utf8")?; diff --git a/src/db.rs b/src/db.rs index 8ed5319..f88244a 100644 --- a/src/db.rs +++ b/src/db.rs @@ -1,6 +1,9 @@ use std::{fmt::Display, str::FromStr}; -use color_eyre::{eyre::Context, Result}; +use color_eyre::{ + eyre::{bail, Context}, + Result, +}; use serde::{Deserialize, Serialize}; use sqlx::{migrate::Migrator, sqlite::SqliteConnectOptions, Pool, Sqlite}; @@ -11,11 +14,31 @@ pub struct Db { pub static MIGRATOR: Migrator = sqlx::migrate!(); +#[derive(Debug, Clone, Copy, sqlx::Type, Serialize, Deserialize, PartialEq, Eq, Hash)] +#[sqlx(rename_all = "kebab-case")] +#[serde(rename_all = "kebab-case")] +pub enum BuildMode { + /// `-Zbuild-std=core` + Core, + /// `cargo miri setup` + MiriStd, +} + +impl Display for BuildMode { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Core => f.write_str("core"), + Self::MiriStd => f.write_str("miri-std"), + } + } +} + #[derive(sqlx::FromRow, Serialize, Deserialize)] pub struct BuildInfo { pub nightly: String, pub target: String, pub status: Status, + pub mode: BuildMode, } #[derive(Clone, sqlx::FromRow, Serialize, Deserialize)] @@ -24,6 +47,7 @@ pub struct FullBuildInfo { pub target: String, pub status: Status, pub stderr: String, + pub mode: BuildMode, } #[derive(Debug, PartialEq, Clone, Copy, sqlx::Type, Serialize, Deserialize)] @@ -43,9 +67,10 @@ impl Display for Status { } } -#[derive(sqlx::FromRow)] -struct FinishedNightly { - nightly: String, +#[derive(sqlx::FromRow, Debug, PartialEq, Eq, Hash)] +pub struct FinishedNightly { + pub nightly: String, + pub mode: BuildMode, } impl Db { @@ -62,12 +87,13 @@ impl Db { pub async fn insert(&self, info: FullBuildInfo) -> Result<()> { sqlx::query( - "INSERT INTO build_info (nightly, target, status, stderr) VALUES (?, ?, ?, ?);", + "INSERT INTO build_info (nightly, target, status, stderr, mode) VALUES (?, ?, ?, ?, ?);", ) .bind(info.nightly) .bind(info.target) .bind(info.status) .bind(info.stderr) + .bind(info.mode) .execute(&self.conn) .await .wrap_err("inserting build info into database")?; @@ -75,7 +101,7 @@ impl Db { } pub async fn build_status(&self) -> Result> { - sqlx::query_as::<_, BuildInfo>("SELECT nightly, target, status FROM build_info") + sqlx::query_as::<_, BuildInfo>("SELECT nightly, target, status, mode FROM build_info") .fetch_all(&self.conn) .await .wrap_err("getting build status from DB") @@ -85,43 +111,52 @@ impl Db { &self, nightly: &str, target: &str, + mode: BuildMode, ) -> Result> { let result = sqlx::query_as::<_, FullBuildInfo>( - "SELECT nightly, target, status, stderr FROM build_info - WHERE nightly = ? AND target = ?", + "SELECT nightly, target, status, stderr, mode FROM build_info + WHERE nightly = ? AND target = ? AND mode = ?", ) .bind(nightly) .bind(target) + .bind(mode) .fetch_all(&self.conn) .await .wrap_err("getting build status from DB")?; Ok(result.first().cloned()) } - pub async fn finished_nightlies(&self) -> Result> { - let result = sqlx::query_as::<_, FinishedNightly>("SELECT nightly from finished_nightly") - .fetch_all(&self.conn) - .await - .wrap_err("fetching fnished nightlies")?; + pub async fn finished_nightlies(&self) -> Result> { + let result = + sqlx::query_as::<_, FinishedNightly>("SELECT nightly, mode from finished_nightly") + .fetch_all(&self.conn) + .await + .wrap_err("fetching finished nightlies")?; - Ok(result.into_iter().map(|nightly| nightly.nightly).collect()) + Ok(result) } - pub async fn is_nightly_finished(&self, nightly: &str) -> Result { + pub async fn is_nightly_finished(&self, nightly: &str, mode: BuildMode) -> Result { let result = sqlx::query_as::<_, FinishedNightly>( - "SELECT nightly from finished_nightly WHERE nightly = ?", + "SELECT nightly, mode from finished_nightly WHERE nightly = ? AND mode = ?", ) .bind(nightly) + .bind(mode) .fetch_all(&self.conn) .await - .wrap_err("fetching fnished nightlies")?; + .wrap_err("checking whether a nightly is finished")?; + + if result.len() > 1 { + bail!("found more than one result for {nightly} {mode}"); + } Ok(result.len() == 1) } - pub async fn finish_nightly(&self, nightly: &str) -> Result<()> { - sqlx::query("INSERT INTO finished_nightly (nightly) VALUES (?)") + pub async fn finish_nightly(&self, nightly: &str, mode: BuildMode) -> Result<()> { + sqlx::query("INSERT INTO finished_nightly (nightly, mode) VALUES (?, ?)") .bind(nightly) + .bind(mode) .execute(&self.conn) .await .wrap_err("inserting finished nightly")?; diff --git a/src/main.rs b/src/main.rs index 853a8f6..5a46bc3 100644 --- a/src/main.rs +++ b/src/main.rs @@ -5,12 +5,15 @@ mod web; use color_eyre::{eyre::WrapErr, Result}; use db::Db; +use tracing_subscriber::EnvFilter; const VERSION: &str = env!("GIT_COMMIT"); #[tokio::main] async fn main() -> Result<()> { - tracing_subscriber::fmt().init(); + tracing_subscriber::fmt() + .with_env_filter(EnvFilter::try_from_default_env().unwrap_or(EnvFilter::new("info"))) + .init(); let db = Db::open(&std::env::var("DB_PATH").unwrap_or("db.sqlite".into())).await?; db::MIGRATOR diff --git a/src/nightlies.rs b/src/nightlies.rs index bc93bdb..ee6c7eb 100644 --- a/src/nightlies.rs +++ b/src/nightlies.rs @@ -5,6 +5,8 @@ use color_eyre::eyre::Context; use color_eyre::Result; use tracing::debug; +use crate::db::{BuildMode, FinishedNightly}; + const EARLIEST_CUTOFF_DATE: &str = "2023-01-01"; /// All nightlies that exist. @@ -29,13 +31,22 @@ impl Nightlies { Ok(Self { all }) } - pub fn select_latest_to_build(&self, already_finished: &[String]) -> Option { + pub fn select_latest_to_build( + &self, + already_finished: &[FinishedNightly], + ) -> Option<(String, BuildMode)> { let already_finished = HashSet::<_, RandomState>::from_iter(already_finished.iter()); self.all .iter() - .find(|nightly| !already_finished.contains(nightly)) - .cloned() + .flat_map(|nightly| [(nightly, BuildMode::Core), (nightly, BuildMode::MiriStd)]) + .find(|(nightly, mode)| { + !already_finished.contains(&FinishedNightly { + nightly: (*nightly).to_owned(), + mode: *mode, + }) + }) + .map(|(nightly, mode)| (nightly.clone(), mode)) } } diff --git a/src/web.rs b/src/web.rs index ed32751..2fad208 100644 --- a/src/web.rs +++ b/src/web.rs @@ -9,7 +9,7 @@ use color_eyre::{eyre::Context, Result}; use serde::{Deserialize, Serialize}; use tracing::{error, info}; -use crate::db::Db; +use crate::db::{BuildMode, Db}; #[derive(Clone)] pub struct AppState { @@ -21,6 +21,7 @@ pub async fn webserver(db: Db) -> Result<()> { .route("/", get(root)) .route("/build", get(build)) .route("/index.css", get(index_css)) + .route("/index.js", get(index_js)) .route("/target-state", get(target_state)) .route("/trigger-build", post(trigger_build)) .with_state(AppState { db }); @@ -31,20 +32,21 @@ pub async fn webserver(db: Db) -> Result<()> { axum::serve(listener, app).await.wrap_err("failed to serve") } -async fn root() -> impl IntoResponse { - Html(include_str!("../static/index.html").replace("{{version}}", crate::VERSION)) -} - #[derive(Deserialize)] struct BuildQuery { nightly: String, target: String, + mode: Option, } async fn build(State(state): State, Query(query): Query) -> Response { match state .db - .build_status_full(&query.nightly, &query.target) + .build_status_full( + &query.nightly, + &query.target, + query.mode.unwrap_or(BuildMode::Core), + ) .await { Ok(Some(build)) => { @@ -65,6 +67,9 @@ async fn build(State(state): State, Query(query): Query) - } } +async fn root() -> impl IntoResponse { + Html(include_str!("../static/index.html").replace("{{version}}", crate::VERSION)) +} async fn index_css() -> impl IntoResponse { ( [( @@ -74,6 +79,15 @@ async fn index_css() -> impl IntoResponse { include_str!("../static/index.css"), ) } +async fn index_js() -> impl IntoResponse { + ( + [( + axum::http::header::CONTENT_TYPE, + axum::http::HeaderValue::from_static("text/javascript"), + )], + include_str!("../static/index.js"), + ) +} async fn target_state(State(state): State) -> impl IntoResponse { state.db.build_status().await.map(Json).map_err(|err| { diff --git a/static/index.html b/static/index.html index acf2ca6..244dc5b 100644 --- a/static/index.html +++ b/static/index.html @@ -4,19 +4,15 @@ Does it build? - +

Does it build?

+ +

Core Build

Builds every target with:

cargo build --release -Zbuild-std=core

-

Does therefore currently not check for the std build status.

- +

This checks that codegen/linking of core works, but does not check whether std builds.

@@ -27,6 +23,23 @@ loading... + + +

Miri Std Build

+

Builds every target with: +

cargo miri setup

+

This checks that std builds (on targets that have it) but does not check whether codegen/linking works.

+ + + + + + + + + +
loading...
+ - diff --git a/static/index.js b/static/index.js new file mode 100644 index 0000000..e26b452 --- /dev/null +++ b/static/index.js @@ -0,0 +1,158 @@ +class Table { + constructor(data, tableElemId, filterElemId, filterFailedElemId) { + this.data = data; + this.elem = document.getElementById(tableElemId); + + document.getElementById(filterElemId).addEventListener("input", (e) => { + this.filter.search = e.target.value; + this.render(); + }); + document + .getElementById(filterFailedElemId) + .addEventListener("input", (e) => { + this.filter.filterFailed = e.target.checked; + this.render(); + }); + + this.filter = { + search: "", + filterFailed: false, + }; + } + + update(data) { + this.data = data; + } + + render() { + const allTargets = new Set(); + const allNightlies = new Set(); + + const nightlyInfos = new Map(); + + // Targets that have, at some point, errored + const targetsWithErrors = new Set(); + + // Whether a nightly is completely broken. + // These are still filtered out when filter failed is selected. + const isNightlyBroken = new Map(); + + // The first pass over the data, to find nightlies that are broken. + for (const info of this.data) { + if (!isNightlyBroken.has(info.nightly)) { + // Assume that a nightly is broken until proven otherwise. + isNightlyBroken.set(info.nightly, true); + } + if (info.status == "pass") { + // This nightly has built something, so it's clearly not broken :). + isNightlyBroken.set(info.nightly, false); + } + } + + // Second pass over the data, group by nightly and prepare data for filter. + for (const info of this.data) { + allNightlies.add(info.nightly); + + if (!info.target.includes(this.filter.search)) { + continue; + } + + if (info.status === "error" && !isNightlyBroken.get(info.nightly)) { + targetsWithErrors.add(info.target); + } + + allTargets.add(info.target); + if (!nightlyInfos.has(info.nightly)) { + nightlyInfos.set(info.nightly, new Map()); + } + nightlyInfos.get(info.nightly).set(info.target, info); + } + + const nightlies = Array.from(allNightlies); + nightlies.sort(); + nightlies.reverse(); + const targets = Array.from(allTargets); + targets.sort(); + + const header = document.createElement("tr"); + const headerNightly = document.createElement("th"); + headerNightly.innerText = "nightly"; + header.appendChild(headerNightly); + targets.forEach((target) => { + if (this.filter.filterFailed && !targetsWithErrors.has(target)) { + return; + } + const elem = document.createElement("th"); + elem.innerText = target; + header.appendChild(elem); + }); + + const rows = nightlies.map((nightly) => { + const tr = document.createElement("tr"); + + const nightlyCol = document.createElement("td"); + nightlyCol.innerText = nightly; + tr.appendChild(nightlyCol); + + const info = nightlyInfos.get(nightly) ?? new Map(); + + for (const target of targets) { + if (this.filter.filterFailed && !targetsWithErrors.has(target)) { + continue; + } + + const td = document.createElement("td"); + const targetInfo = info.get(target); + + if (targetInfo) { + const a = document.createElement("a"); + a.classList.add("build-info-a"); + a.href = `build?nightly=${encodeURIComponent( + nightly + )}&target=${encodeURIComponent(target)}&mode=${encodeURIComponent( + targetInfo.mode + )}`; + a.innerText = targetInfo.status; + td.appendChild(a); + td.classList.add(targetInfo.status); + } else { + td.innerText = ""; + td.classList.add("missing"); + } + tr.appendChild(td); + } + + return tr; + }); + this.elem.replaceChildren(header, ...rows); + } +} + +const coreTable = new Table( + [], + "target-state", + "target-filter", + "target-filter-failed" +); +const miriTable = new Table( + [], + "target-state-miri", + "target-filter-miri", + "target-filter-failed-miri" +); + +function fetchTargets() { + fetch("target-state") + .then((body) => body.json()) + .then((body) => { + const core = body.filter((info) => info.mode === "core"); + const miri = body.filter((info) => info.mode === "miri-std"); + coreTable.update(core); + miriTable.update(miri); + coreTable.render(); + miriTable.render(); + }); +} + +// Initial fetch +fetchTargets();