Skip to content

Commit

Permalink
wip
Browse files Browse the repository at this point in the history
  • Loading branch information
syphar committed Dec 6, 2024
1 parent a3d07d3 commit 210e150
Showing 1 changed file with 126 additions and 31 deletions.
157 changes: 126 additions & 31 deletions src/web/build_details.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,8 +93,13 @@ pub(crate) async fn build_details_handler(
.ok_or(AxumNope::BuildNotFound)?;

let (output, all_log_filenames, current_filename) = if let Some(output) = row.output {
// legacy case, for old builds the build log was stored in the database.
(output, Vec::new(), None)
} else {
// for newer builds we have the build logs stored in S3.
// For a long time only for one target, then we started storing the logs for other targets
// toFor a long time only for one target, then we started storing the logs for other
// targets. In any case, all the logfiles are put into a folder we can just query.
let prefix = format!("build-logs/{}/", id);
let all_log_filenames: Vec<_> = storage
.list_prefix(&prefix) // the result from S3 is ordered by key
Expand All @@ -107,42 +112,34 @@ pub(crate) async fn build_details_handler(
.try_collect()
.await?;

let fetch_file = |filename: String| async {
let file = File::from_path(&storage, &format!("{prefix}{filename}"), &config).await?;
Ok::<_, anyhow::Error>(String::from_utf8(file.0.content).context("non utf8")?)
let current_filename = if let Some(filename) = params.filename {
// if we have a given filename in the URL, we use that one.
Some(filename)
} else if let Some(default_target) = row.default_target {
// without a filename in the URL, we try to show the build log
// for the default target, if we have one.
let wanted_filename = format!("{default_target}.txt");
if all_log_filenames.contains(&wanted_filename) {
Some(wanted_filename)
} else {
None
}
} else {
// this can only happen when `releases.default_target` is NULL,
// which is the case for in-progress builds or builds which errored
// before we could determine the target.
// For the "error" case we show `row.errors`, which should contain what we need to see.
None
};

let (filename, file_content) = if let Some(filename) = params.filename {
(Some(filename.clone()), fetch_file(filename.clone()).await?)
} else if let Some(default_target) = row.default_target {
let filename = format!("{default_target}.txt");
(
Some(filename),
match fetch_file(filename.clone()).await {
Ok(content) => content,
Err(_err) => "".to_string(),
},
)
let file_content = if let Some(ref filename) = current_filename {
let file = File::from_path(&storage, &format!("{prefix}{filename}"), &config).await?;
String::from_utf8(file.0.content).context("non utf8")?
} else {
(None, "".into())
"".to_string()
};

// .or(row.default_target.map(|target| format!("{}.txt", target)))
// {
// let path = format!("{prefix}{current_filename}");
// let file = File::from_path(&storage, &path, &config).await?;
// (
// String::from_utf8(file.0.content).context("non utf8")?,
// Some(current_filename),
// )
// } else {
// // this can only happen when `releases.default_target` is NULL,
// // which is the case for in-progress builds or builds which errored
// // before we could determine the target.
// // For the "error" case we show `row.errors`, which should contain what we need to see.
// ("".into(), Vec::new(), None)
// }
(file_content, all_log_filenames, filename)
(file_content, all_log_filenames, current_filename)
};

Ok(BuildDetailsPage {
Expand Down Expand Up @@ -216,6 +213,44 @@ mod tests {
});
}

#[test]
fn test_partial_build_result_plus_default_target_from_previous_build() {
async_wrapper(|env| async move {
let mut conn = env.async_db().await.async_conn().await;
let (release_id, build_id) = fake_release_that_failed_before_build(
&mut conn,
"foo",
"0.1.0",
"some random error",
)
.await?;

sqlx::query!(
"UPDATE releases SET default_target = 'x86_64-unknown-linux-gnu' WHERE id = $1",
release_id.0
)
.execute(&mut *conn)
.await?;

let page = kuchikiki::parse_html().one(
env.web_app()
.await
.get(&format!("/crate/foo/0.1.0/builds/{build_id}"))
.await?
.error_for_status()?
.text()
.await?,
);

let info_text = page.select("pre").unwrap().next().unwrap().text_contents();

assert!(info_text.contains("# pre-build errors"), "{}", info_text);
assert!(info_text.contains("some random error"), "{}", info_text);

Ok(())
});
}

#[test]
fn db_build_logs() {
async_wrapper(|env| async move {
Expand Down Expand Up @@ -308,6 +343,66 @@ mod tests {
});
}

// #[test]
// fn build_details_for_build_with_pre_build_errors() {
// async_wrapper(|env| async move {
// let mut conn = env.async_db().await.async_conn().await;
// let (release_id, build_id) =
// fake_release_that_failed_before_build(&mut conn, "foo", "0.1.0", "some error")
// .await?;

// let web = env.web_app().await;

// let page = kuchikiki::parse_html().one(
// web.get("/crate/foo/0.1.0/builds")
// .await?
// .error_for_status()?
// .text()
// .await?,
// );

// let node = page.select("ul > li a.release").unwrap().next().unwrap();
// let build_url = {
// let attrs = node.attributes.borrow();
// attrs.get("href").unwrap().to_owned()
// };

// let page = kuchikiki::parse_html().one(
// web.get(&build_url)
// .await?
// .error_for_status()?
// .text()
// .await?,
// );

// let log = page.select("pre").unwrap().next().unwrap().text_contents();

// assert!(log.contains("some error"));

// let all_log_links = get_all_log_links(&page);
// assert_eq!(
// all_log_links,
// vec![(
// "x86_64-unknown-linux-gnu.txt".into(),
// format!("{build_url}/x86_64-unknown-linux-gnu.txt")
// )]
// );

// // now get the log with the specific filename in the URL
// let log = kuchikiki::parse_html()
// .one(web.get(&all_log_links[0].1).await?.text().await?)
// .select("pre")
// .unwrap()
// .next()
// .unwrap()
// .text_contents();

// assert!(log.contains("some error"));

// Ok(())
// });
// }

#[test]
fn s3_build_logs_multiple_targets() {
async_wrapper(|env| async move {
Expand Down

0 comments on commit 210e150

Please sign in to comment.