make it a webapp

This commit is contained in:
nora 2024-09-07 22:43:04 +02:00
parent 11db12e074
commit b2ab2ff73e
12 changed files with 3082 additions and 155 deletions

252
src/build.rs Normal file
View file

@ -0,0 +1,252 @@
use std::{
fmt::{Debug, Display},
num::NonZero,
path::Path,
time::Duration,
};
use color_eyre::{
eyre::{bail, Context},
Result,
};
use futures::StreamExt;
use tokio::process::Command;
use tracing::{debug, info};
use crate::{
db::{Db, FullBuildInfo, Status},
nightlies::Nightlies,
};
pub struct Toolchain(String);
impl Toolchain {
pub fn from_nightly(nightly: &str) -> Self {
Self(format!("nightly-{nightly}"))
}
}
impl Debug for Toolchain {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.0)
}
}
impl Display for Toolchain {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.0)
}
}
pub async fn background_builder(db: Db) -> Result<()> {
loop {
let nightlies = Nightlies::fetch().await.wrap_err("fetching nightlies")?;
let already_finished = db
.finished_nightlies()
.await
.wrap_err("fetching finished nightlies")?;
let next = nightlies.select_latest_to_build(&already_finished);
match next {
Some(nightly) => {
info!(%nightly, "Building next nightly");
build_every_target_for_toolchain(&db, &nightly)
.await
.wrap_err_with(|| format!("building targets for toolchain {nightly}"))?;
}
None => {
info!("No new nightly, waiting for an hour to try again");
tokio::time::sleep(Duration::from_secs(1 * 60 * 60)).await;
}
}
}
}
async fn targets_for_toolchain(toolchain: &Toolchain) -> Result<Vec<String>> {
let output = Command::new("rustc")
.arg(format!("+{toolchain}"))
.arg("--print")
.arg("target-list")
.output()
.await
.wrap_err("failed to spawn rustc")?;
if !output.status.success() {
bail!(
"failed to get target-list from rustc: {:?}",
String::from_utf8(output.stderr)
);
}
Ok(String::from_utf8(output.stdout)
.wrap_err("rustc target-list is invalid UTF-8")?
.split_whitespace()
.map(ToOwned::to_owned)
.collect())
}
#[tracing::instrument]
async fn install_toolchain(toolchain: &Toolchain) -> Result<()> {
info!(%toolchain, "Installing toolchain");
let result = Command::new("rustup")
.arg("toolchain")
.arg("install")
.arg(&toolchain.0)
.arg("--profile")
.arg("minimal")
.output()
.await
.wrap_err("failed to spawn rustup")?;
if !result.status.success() {
bail!("rustup failed: {:?}", String::from_utf8(result.stderr));
}
let result = Command::new("rustup")
.arg("component")
.arg("add")
.arg("rust-src")
.arg("--toolchain")
.arg(&toolchain.0)
.output()
.await
.wrap_err("failed to spawn rustup")?;
if !result.status.success() {
bail!("rustup failed: {:?}", String::from_utf8(result.stderr));
}
Ok(())
}
#[tracing::instrument]
async fn uninstall_toolchain(toolchain: &Toolchain) -> Result<()> {
info!(%toolchain, "Uninstalling toolchain");
let result = Command::new("rustup")
.arg("toolchain")
.arg("remove")
.arg(&toolchain.0)
.output()
.await
.wrap_err("failed to spawn rustup")?;
if !result.status.success() {
bail!(
"rustup toolchain remove failed: {:?}",
String::from_utf8(result.stderr)
);
}
Ok(())
}
pub async fn build_every_target_for_toolchain(db: &Db, nightly: &str) -> Result<()> {
if db.is_nightly_finished(nightly).await? {
debug!("Nightly is already finished, not trying again");
return Ok(());
}
let toolchain = Toolchain::from_nightly(nightly);
install_toolchain(&toolchain).await?;
let targets = targets_for_toolchain(&toolchain)
.await
.wrap_err("failed to get targets")?;
let concurrent = std::thread::available_parallelism()
.unwrap_or(NonZero::new(2).unwrap())
.get()
/ 2;
let results = futures::stream::iter(
targets
.iter()
.map(|target| build_single_target(&db, nightly, target)),
)
.buffer_unordered(concurrent)
.collect::<Vec<Result<()>>>()
.await;
for result in results {
result?;
}
for target in targets {
build_single_target(db, nightly, &target)
.await
.wrap_err_with(|| format!("building target {target} for toolchain {toolchain}"))?;
}
// Mark it as finished, so we never have to build it again.
db.finish_nightly(nightly).await?;
uninstall_toolchain(&toolchain).await?;
Ok(())
}
#[tracing::instrument(skip(db))]
async fn build_single_target(db: &Db, nightly: &str, target: &str) -> Result<()> {
let existing = db
.build_status_full(nightly, target)
.await
.wrap_err("getting existing build")?;
if existing.is_some() {
debug!("Build already exists");
return Ok(());
}
info!("Building target");
let tmpdir = tempfile::tempdir().wrap_err("creating temporary directory")?;
let result = build_target(tmpdir.path(), &Toolchain::from_nightly(nightly), target)
.await
.wrap_err("running build")?;
db.insert(FullBuildInfo {
nightly: nightly.into(),
target: target.into(),
status: result.status,
stderr: result.stderr,
})
.await?;
Ok(())
}
struct BuildResult {
status: Status,
stderr: String,
}
/// Build a target core in a temporary directory and see whether it passes or not.
async fn build_target(tmpdir: &Path, toolchain: &Toolchain, target: &str) -> Result<BuildResult> {
std::fs::create_dir_all(&tmpdir).wrap_err("creating target src dir")?;
let init = Command::new("cargo")
.args(["init", "--lib", "--name", "target-test"])
.current_dir(&tmpdir)
.output()
.await
.wrap_err("spawning cargo init")?;
if !init.status.success() {
bail!("init failed: {}", String::from_utf8(init.stderr)?);
}
let librs = tmpdir.join("src").join("lib.rs");
std::fs::write(&librs, "#![no_std]\n")
.wrap_err_with(|| format!("writing to {}", librs.display()))?;
let output = Command::new("cargo")
.arg(format!("+{toolchain}"))
.args(["build", "-Zbuild-std=core", "--release"])
.args(["--target", target])
.current_dir(&tmpdir)
.output()
.await
.wrap_err("spawning cargo build")?;
let stderr = String::from_utf8(output.stderr).wrap_err("cargo stderr utf8")?;
let status = if output.status.success() {
Status::Pass
} else {
Status::Error
};
info!("Finished build");
Ok(BuildResult { status, stderr })
}

130
src/db.rs Normal file
View file

@ -0,0 +1,130 @@
use std::{fmt::Display, str::FromStr};
use color_eyre::{eyre::Context, Result};
use serde::{Deserialize, Serialize};
use sqlx::{migrate::Migrator, sqlite::SqliteConnectOptions, Pool, Sqlite};
#[derive(Clone)]
pub struct Db {
pub conn: Pool<Sqlite>,
}
pub static MIGRATOR: Migrator = sqlx::migrate!();
#[derive(sqlx::FromRow, Serialize, Deserialize)]
pub struct BuildInfo {
pub nightly: String,
pub target: String,
pub status: Status,
}
#[derive(Clone, sqlx::FromRow, Serialize, Deserialize)]
pub struct FullBuildInfo {
pub nightly: String,
pub target: String,
pub status: Status,
pub stderr: String,
}
#[derive(Debug, PartialEq, Clone, Copy, sqlx::Type, Serialize, Deserialize)]
#[sqlx(rename_all = "snake_case")]
#[serde(rename_all = "lowercase")]
pub enum Status {
Error,
Pass,
}
impl Display for Status {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Error => f.write_str("error"),
Self::Pass => f.write_str("pass"),
}
}
}
#[derive(sqlx::FromRow)]
struct FinishedNightly {
nightly: String,
}
impl Db {
pub async fn open(path: &str) -> Result<Self> {
let db_opts = SqliteConnectOptions::from_str(path)
.wrap_err("parsing database URL")?
.create_if_missing(true);
let conn = Pool::connect_with(db_opts)
.await
.wrap_err_with(|| format!("opening db from `{}`", path))?;
Ok(Self { conn })
}
pub async fn insert(&self, info: FullBuildInfo) -> Result<()> {
sqlx::query(
"INSERT INTO build_info (nightly, target, status, stderr) VALUES (?, ?, ?, ?);",
)
.bind(info.nightly)
.bind(info.target)
.bind(info.status)
.bind(info.stderr)
.execute(&self.conn)
.await
.wrap_err("inserting build info into database")?;
Ok(())
}
pub async fn build_status(&self) -> Result<Vec<BuildInfo>> {
sqlx::query_as::<_, BuildInfo>("SELECT nightly, target, status FROM build_info")
.fetch_all(&self.conn)
.await
.wrap_err("getting build status from DB")
}
pub async fn build_status_full(
&self,
nightly: &str,
target: &str,
) -> Result<Option<FullBuildInfo>> {
let result = sqlx::query_as::<_, FullBuildInfo>(
"SELECT nightly, target, status, stderr FROM build_info
WHERE nightly = ? AND target = ?",
)
.bind(nightly)
.bind(target)
.fetch_all(&self.conn)
.await
.wrap_err("getting build status from DB")?;
Ok(result.first().cloned())
}
pub async fn finished_nightlies(&self) -> Result<Vec<String>> {
let result = sqlx::query_as::<_, FinishedNightly>("SELECT nightly from finished_nightly")
.fetch_all(&self.conn)
.await
.wrap_err("fetching fnished nightlies")?;
Ok(result.into_iter().map(|nightly| nightly.nightly).collect())
}
pub async fn is_nightly_finished(&self, nightly: &str) -> Result<bool> {
let result = sqlx::query_as::<_, FinishedNightly>(
"SELECT nightly from finished_nightly WHERE nightly = ?",
)
.bind(nightly)
.fetch_all(&self.conn)
.await
.wrap_err("fetching fnished nightlies")?;
Ok(result.len() == 1)
}
pub async fn finish_nightly(&self, nightly: &str) -> Result<()> {
sqlx::query("INSERT INTO finished_nightly (nightly) VALUES (?)")
.bind(nightly)
.execute(&self.conn)
.await
.wrap_err("inserting finished nightly")?;
Ok(())
}
}

View file

@ -1,151 +1,30 @@
use std::{collections::BTreeMap, num::NonZeroUsize, path::Path, process::Command, sync::Mutex};
mod build;
mod db;
mod nightlies;
mod web;
use color_eyre::{
eyre::{bail, WrapErr},
Result,
};
use color_eyre::{eyre::WrapErr, Result};
use db::Db;
fn main() -> Result<()> {
let targets = String::from_utf8(
Command::new("rustc")
.arg("--print")
.arg("target-list")
.output()?
.stdout,
)?;
#[tokio::main]
async fn main() -> Result<()> {
tracing_subscriber::fmt().init();
let year = 2024;
let current_month = 6_u32;
let current_day = 12_u32;
let db = Db::open(&std::env::var("DB_PATH").unwrap_or("db.sqlite".into())).await?;
db::MIGRATOR
.run(&db.conn)
.await
.wrap_err("running migrations")?;
let dates = (current_month.saturating_sub(5)..=current_month).flat_map(|month| {
if month == current_month && current_day <= 16 {
vec![format!("{year}-{month:0>2}-01")]
} else {
vec![
format!("{year}-{month:0>2}-01"),
format!("{year}-{month:0>2}-15"),
]
let builder = build::background_builder(db.clone());
let server = web::webserver(db);
tokio::select! {
result = builder => {
result
}
});
for date in dates {
println!("Doing date {date}");
let toolchain = format!("nightly-{date}");
let result = Command::new("rustup")
.arg("toolchain")
.arg("install")
.arg(&toolchain)
.arg("--profile")
.arg("minimal")
.spawn()?
.wait()?;
if !result.success() {
bail!("rustup failed");
}
let result = Command::new("rustup")
.arg("component")
.arg("add")
.arg("rust-src")
.arg("--toolchain")
.arg(&toolchain)
.spawn()?
.wait()?;
if !result.success() {
bail!("rustup failed");
}
let queue = targets.lines().collect::<Vec<&str>>();
let queue = &Mutex::new(queue);
std::fs::create_dir_all("targets")?;
let failures = Mutex::new(BTreeMap::new());
let targets = Path::new("targets").join(&toolchain);
std::thread::scope(|s| -> Result<()> {
let mut handles = vec![];
for _ in 0..std::thread::available_parallelism()
.unwrap_or(NonZeroUsize::new(1).unwrap())
.get()
{
let handle = s.spawn(|| -> Result<()> {
loop {
let target = {
let mut queue = queue.lock().unwrap();
let Some(next) = queue.pop() else {
return Ok(());
};
println!("remaining: {:>3 } - {next}", queue.len());
next
};
(|| -> Result<()> {
let target_dir = targets.join(target);
std::fs::create_dir_all(&target_dir)
.wrap_err("creating target src dir")?;
if !target_dir.join("Cargo.toml").exists() {
let init = Command::new("cargo")
.args(["init", "--lib", "--name", "target-test"])
.current_dir(&target_dir)
.output()
.wrap_err("spawning cargo init")?;
if !init.status.success() {
bail!("init failed: {}", String::from_utf8(init.stderr)?);
}
}
let librs = target_dir.join("src").join("lib.rs");
std::fs::write(&librs, "#![no_std]\n")
.wrap_err_with(|| format!("writing to {}", librs.display()))?;
let output = Command::new("cargo")
.arg(format!("+{toolchain}"))
.args(["build", "-Zbuild-std=core", "--release"])
.args(["--target", target])
.current_dir(&target_dir)
.output()
.wrap_err("spawning cargo build")?;
if !output.status.success() {
println!("failure: {target}");
let stderr = String::from_utf8(output.stderr)
.wrap_err("cargo stderr utf8")?;
failures.lock().unwrap().insert(target.to_owned(), stderr);
}
Ok(())
})()
.wrap_err_with(|| format!("while checking {target}"))?;
}
});
handles.push(handle);
}
for handle in handles {
handle.join().unwrap()?;
}
Ok(())
})?;
std::fs::create_dir_all("results").wrap_err("creating results directory")?;
std::fs::write(
Path::new("results").join(date),
failures
.lock()
.unwrap()
.keys()
.cloned()
.collect::<Vec<_>>()
.join(","),
)
.wrap_err("writing results file")?;
for (target, stderr) in failures.into_inner().unwrap() {
println!("-----------------\nBROKEN TARGET: {target}\n{stderr}\n\n");
result = server => {
result
}
}
Ok(())
}

66
src/nightlies.rs Normal file
View file

@ -0,0 +1,66 @@
use std::collections::HashSet;
use std::hash::RandomState;
use color_eyre::eyre::Context;
use color_eyre::Result;
use tracing::debug;
const EARLIEST_CUTOFF_DATE: &str = "2023-01-01";
/// All nightlies that exist.
pub struct Nightlies {
all: Vec<String>,
}
impl Nightlies {
pub async fn fetch() -> Result<Nightlies> {
let manifests = reqwest::get("https://static.rust-lang.org/manifests.txt")
.await
.wrap_err("fetching https://static.rust-lang.org/manifests.txt")?
.text()
.await
.wrap_err("fetching body of https://static.rust-lang.org/manifests.txt")?;
let mut all = nightlies_from_manifest(&manifests)
.into_iter()
.filter(|date| date.as_str() > EARLIEST_CUTOFF_DATE)
.collect::<Vec<_>>();
all.sort_by(|a, b| b.cmp(a));
debug!("Loaded {} nightlies from the manifest", all.len());
Ok(Self { all })
}
pub fn select_latest_to_build(&self, already_finished: &[String]) -> Option<String> {
let already_finished = HashSet::<_, RandomState>::from_iter(already_finished.iter());
self.all
.iter()
.find(|nightly| !already_finished.contains(nightly))
.cloned()
}
}
fn nightlies_from_manifest(manifest: &str) -> Vec<String> {
manifest
.lines()
.filter_map(|line| {
let rest = line.strip_prefix("static.rust-lang.org/dist/")?;
let date = rest.strip_suffix("/channel-rust-nightly.toml")?;
Some(date.to_owned())
})
.collect()
}
#[cfg(test)]
mod tests {
#[test]
fn manifest_parse() {
let test_manifest = "static.rust-lang.org/dist/2024-08-22/channel-rust-nightly.toml
static.rust-lang.org/dist/2024-08-22/channel-rust-1.81.0-beta.toml
static.rust-lang.org/dist/2024-08-22/channel-rust-1.81.0-beta.6.toml
static.rust-lang.org/dist/2024-08-23/channel-rust-nightly.toml";
let nightlies = super::nightlies_from_manifest(&test_manifest);
assert_eq!(nightlies, vec!["2024-08-22", "2024-08-23"]);
}
}

103
src/web.rs Normal file
View file

@ -0,0 +1,103 @@
use axum::{
extract::{Query, State},
http::StatusCode,
response::{Html, IntoResponse, Response},
routing::{get, post},
Json, Router,
};
use color_eyre::{eyre::Context, Result};
use serde::{Deserialize, Serialize};
use tracing::{error, info};
use crate::db::Db;
#[derive(Clone)]
pub struct AppState {
pub db: Db,
}
pub async fn webserver(db: Db) -> Result<()> {
let app = Router::new()
.route("/", get(root))
.route("/build", get(build))
.route("/index.css", get(index_css))
.route("/target-state", get(target_state))
.route("/trigger-build", post(trigger_build))
.with_state(AppState { db });
info!("Serving website on port 3000");
let listener = tokio::net::TcpListener::bind("0.0.0.0:3000").await.unwrap();
axum::serve(listener, app).await.wrap_err("failed to serve")
}
async fn root() -> impl IntoResponse {
Html(include_str!("../static/index.html"))
}
#[derive(Deserialize)]
struct BuildQuery {
nightly: String,
target: String,
}
async fn build(State(state): State<AppState>, Query(query): Query<BuildQuery>) -> Response {
match state
.db
.build_status_full(&query.nightly, &query.target)
.await
{
Ok(Some(build)) => {
let page = include_str!("../static/build.html")
.replace("{{nightly}}", &query.nightly)
.replace("{{target}}", &query.target)
.replace("{{stderr}}", &build.stderr)
.replace("{{status}}", &build.status.to_string());
Html(page).into_response()
}
Ok(None) => StatusCode::NOT_FOUND.into_response(),
Err(err) => {
error!(?err, "Error loading target state");
StatusCode::INTERNAL_SERVER_ERROR.into_response()
}
}
}
async fn index_css() -> impl IntoResponse {
(
[(
axum::http::header::CONTENT_TYPE,
axum::http::HeaderValue::from_static("text/css; charset=utf-8"),
)],
include_str!("../static/index.css"),
)
}
async fn target_state(State(state): State<AppState>) -> impl IntoResponse {
state.db.build_status().await.map(Json).map_err(|err| {
error!(?err, "Error loading target state");
StatusCode::INTERNAL_SERVER_ERROR
})
}
#[derive(Serialize, Deserialize)]
struct TriggerBuildBody {
nightly: String,
}
#[axum::debug_handler]
async fn trigger_build(
State(_state): State<AppState>,
_body: Json<TriggerBuildBody>,
) -> StatusCode {
return StatusCode::BAD_REQUEST;
// tokio::spawn(async move {
// let result = build::build_every_target_for_toolchain(&state.db, &body.nightly).await;
// if let Err(err) = result {
// error!(?err, "Error while building");
// }
// });
//
// StatusCode::ACCEPTED
}