codegen: Clean up error handling and move some checks to appropriate

place

Unless error handling is done in particular care, in private tools this
approach tends to make debugging easier when failed.
This commit is contained in:
Taiki Endo
2026-03-21 17:22:01 +09:00
parent 7a6eff0bac
commit 525387f706
3 changed files with 95 additions and 80 deletions

View File

@@ -61,6 +61,18 @@ impl BaseManifest {
if self.platform.is_empty() { if self.platform.is_empty() {
panic!("At least one platform must be specified"); panic!("At least one platform must be specified");
} }
if let Some(website) = &self.website {
if website.is_empty() || *website == self.repository {
panic!(
"Please do not put the repository in website, or set website to an empty value"
);
}
}
if let Some(license_markdown) = &self.license_markdown {
if license_markdown.is_empty() {
panic!("license_markdown can not be an empty value");
}
}
} }
} }

View File

@@ -22,7 +22,7 @@ use install_action_internal_codegen::{
}; };
use spdx::expression::{ExprNode, ExpressionReq, Operator}; use spdx::expression::{ExprNode, ExpressionReq, Operator};
fn main() -> Result<()> { fn main() {
let args: Vec<_> = env::args().skip(1).collect(); let args: Vec<_> = env::args().skip(1).collect();
if args.is_empty() || args.iter().any(|arg| arg.starts_with('-')) { if args.is_empty() || args.iter().any(|arg| arg.starts_with('-')) {
println!( println!(
@@ -38,23 +38,26 @@ fn main() -> Result<()> {
let workspace_root = workspace_root(); let workspace_root = workspace_root();
let manifest_path = &workspace_root.join("manifests").join(format!("{package}.json")); let manifest_path = &workspace_root.join("manifests").join(format!("{package}.json"));
let download_cache_dir = &workspace_root.join("tools/codegen/tmp/cache").join(package); let download_cache_dir = &workspace_root.join("tools/codegen/tmp/cache").join(package);
fs::create_dir_all(manifest_path.parent().unwrap())?; fs::create_dir_all(manifest_path.parent().unwrap()).unwrap();
fs::create_dir_all(download_cache_dir)?; fs::create_dir_all(download_cache_dir).unwrap();
eprintln!("download cache: {}", download_cache_dir.display()); eprintln!("download cache: {}", download_cache_dir.display());
let mut base_info: BaseManifest = serde_json::from_slice(&fs::read( let mut base_info: BaseManifest = serde_json::from_slice(
workspace_root.join("tools/codegen/base").join(format!("{package}.json")), &fs::read(workspace_root.join("tools/codegen/base").join(format!("{package}.json")))
)?)?; .unwrap(),
)
.unwrap();
base_info.validate(); base_info.validate();
let repo = base_info let repo = base_info
.repository .repository
.strip_prefix("https://github.com/") .strip_prefix("https://github.com/")
.context("repository must start with https://github.com/")?; .context("repository must start with https://github.com/")
.unwrap();
eprintln!("downloading metadata from {GITHUB_API_START}repos/{repo}"); eprintln!("downloading metadata from {GITHUB_API_START}repos/{repo}");
let repo_info: github::RepoMetadata = let repo_info: github::RepoMetadata =
download(&format!("{GITHUB_API_START}repos/{repo}"))?.into_json()?; download(&format!("{GITHUB_API_START}repos/{repo}")).unwrap().into_json().unwrap();
eprintln!("downloading releases from {GITHUB_API_START}repos/{repo}/releases"); eprintln!("downloading releases from {GITHUB_API_START}repos/{repo}/releases");
let mut releases: github::Releases = vec![]; let mut releases: github::Releases = vec![];
@@ -64,8 +67,10 @@ fn main() -> Result<()> {
let per_page = 100; let per_page = 100;
let mut r: github::Releases = download(&format!( let mut r: github::Releases = download(&format!(
"{GITHUB_API_START}repos/{repo}/releases?per_page={per_page}&page={page}" "{GITHUB_API_START}repos/{repo}/releases?per_page={per_page}&page={page}"
))? ))
.into_json()?; .unwrap()
.into_json()
.unwrap();
// If version_req is latest, it is usually sufficient to look at the latest 100 releases. // If version_req is latest, it is usually sufficient to look at the latest 100 releases.
if r.len() < per_page || version_req.is_some_and(|req| req == "latest") { if r.len() < per_page || version_req.is_some_and(|req| req == "latest") {
releases.append(&mut r); releases.append(&mut r);
@@ -103,15 +108,20 @@ fn main() -> Result<()> {
.rust_crate .rust_crate
.as_ref() .as_ref()
.map(|s| replace_vars(s, package, None, None, base_info.rust_crate.as_deref())) .map(|s| replace_vars(s, package, None, None, base_info.rust_crate.as_deref()))
.transpose()?; .transpose()
.unwrap();
if let Some(crate_name) = &base_info.rust_crate { if let Some(crate_name) = &base_info.rust_crate {
eprintln!("downloading crate info from https://crates.io/api/v1/crates/{crate_name}"); eprintln!("downloading crate info from https://crates.io/api/v1/crates/{crate_name}");
let info = download(&format!("https://crates.io/api/v1/crates/{crate_name}"))? let info = download(&format!("https://crates.io/api/v1/crates/{crate_name}"))
.into_json::<crates_io::Crate>()?; .unwrap()
.into_json::<crates_io::Crate>()
.unwrap();
let latest_version = &info.versions[0].num; let latest_version = &info.versions[0].num;
crates_io_version_detail = Some( crates_io_version_detail = Some(
download(&format!("https://crates.io/api/v1/crates/{crate_name}/{latest_version}"))? download(&format!("https://crates.io/api/v1/crates/{crate_name}/{latest_version}"))
.into_json::<crates_io::VersionMetadata>()? .unwrap()
.into_json::<crates_io::VersionMetadata>()
.unwrap()
.version, .version,
); );
@@ -119,7 +129,7 @@ fn main() -> Result<()> {
if !crate_repository.to_lowercase().starts_with(&base_info.repository.to_lowercase()) { if !crate_repository.to_lowercase().starts_with(&base_info.repository.to_lowercase()) {
panic!("repository {crate_repository} from crates.io differs from base manifest"); panic!("repository {crate_repository} from crates.io differs from base manifest");
} }
} else if crate_name != "zola" { } else {
panic!("crate metadata does not include a repository"); panic!("crate metadata does not include a repository");
} }
@@ -139,7 +149,7 @@ fn main() -> Result<()> {
if manifest_path.is_file() { if manifest_path.is_file() {
println!("loading pre-existing manifest {}", manifest_path.display()); println!("loading pre-existing manifest {}", manifest_path.display());
match serde_json::from_slice(&fs::read(manifest_path)?) { match serde_json::from_slice(&fs::read(manifest_path).unwrap()) {
Ok(m) => { Ok(m) => {
manifests = m; manifests = m;
for (k, manifest) in &mut manifests.map { for (k, manifest) in &mut manifests.map {
@@ -165,18 +175,8 @@ fn main() -> Result<()> {
} }
} }
// Check website
if let Some(website) = base_info.website {
if website.is_empty() || website == base_info.repository {
panic!("Please do not put the repository in website, or set website to an empty value");
}
}
// Populate license_markdown from the base manifest if present. // Populate license_markdown from the base manifest if present.
if let Some(license_markdown) = base_info.license_markdown { if let Some(license_markdown) = base_info.license_markdown {
if license_markdown.is_empty() {
panic!("license_markdown can not be an empty value");
}
manifests.license_markdown = license_markdown; manifests.license_markdown = license_markdown;
} }
@@ -184,7 +184,7 @@ fn main() -> Result<()> {
if !manifests.license_markdown.is_empty() { if !manifests.license_markdown.is_empty() {
let urls = get_license_markdown_urls(&manifests.license_markdown); let urls = get_license_markdown_urls(&manifests.license_markdown);
if urls.is_empty() { if urls.is_empty() {
bail!("Could not find URLs in license_markdown: {}.", manifests.license_markdown); panic!("Could not find URLs in license_markdown: {}.", manifests.license_markdown);
} }
for url in urls { for url in urls {
if let Err(err) = github_head(&url) { if let Err(err) = github_head(&url) {
@@ -207,7 +207,7 @@ fn main() -> Result<()> {
spdx_id spdx_id
} }
_ => { _ => {
bail!( panic!(
"No license SPDX found in crates.io or GitHub metadata.\n\ "No license SPDX found in crates.io or GitHub metadata.\n\
Please set license_markdown in the base manifest" Please set license_markdown in the base manifest"
); );
@@ -218,7 +218,7 @@ fn main() -> Result<()> {
{ {
manifests.license_markdown = license_markdown; manifests.license_markdown = license_markdown;
} else { } else {
bail!( panic!(
"Unable to verify license file(s) in the repo for license {license}.\n\ "Unable to verify license file(s) in the repo for license {license}.\n\
Please set license_markdown in the base manifest" Please set license_markdown in the base manifest"
); );
@@ -227,13 +227,13 @@ fn main() -> Result<()> {
let version_req: semver::VersionReq = match version_req { let version_req: semver::VersionReq = match version_req {
_ if latest_only => { _ if latest_only => {
let req = format!("={}", releases.first_key_value().unwrap().0.0).parse()?; let req = format!("={}", releases.first_key_value().unwrap().0.0).parse().unwrap();
eprintln!("update manifest for versions '{req}'"); eprintln!("update manifest for versions '{req}'");
req req
} }
None => match base_info.version_range { None => match base_info.version_range {
Some(version_range) => version_range.parse()?, Some(version_range) => version_range.parse().unwrap(),
None => ">= 0.0.1".parse()?, // HACK: ignore pre-releases None => ">= 0.0.1".parse().unwrap(), // HACK: ignore pre-releases
}, },
Some(version_req) => { Some(version_req) => {
for version in manifests.map.keys() { for version in manifests.map.keys() {
@@ -249,12 +249,12 @@ fn main() -> Result<()> {
let req = if version_req == "latest" { let req = if version_req == "latest" {
// TODO: this should check all missing versions // TODO: this should check all missing versions
if manifests.map.is_empty() { if manifests.map.is_empty() {
format!("={}", releases.first_key_value().unwrap().0.0).parse()? format!("={}", releases.first_key_value().unwrap().0.0).parse().unwrap()
} else { } else {
format!(">={}", semver_versions.last().unwrap()).parse()? format!(">={}", semver_versions.last().unwrap()).parse().unwrap()
} }
} else { } else {
version_req.parse()? version_req.parse().unwrap()
}; };
eprintln!("update manifest for versions '{req}'"); eprintln!("update manifest for versions '{req}'");
req req
@@ -285,8 +285,8 @@ fn main() -> Result<()> {
let signing_version_req: Option<semver::VersionReq> = match &base_info.signing { let signing_version_req: Option<semver::VersionReq> = match &base_info.signing {
Some(signing) => { Some(signing) => {
match &signing.version_range { match &signing.version_range {
Some(version_range) => Some(version_range.parse()?), Some(version_range) => Some(version_range.parse().unwrap()),
None => Some(">= 0.0.1".parse()?), // HACK: ignore pre-releases None => Some(">= 0.0.1".parse().unwrap()), // HACK: ignore pre-releases
} }
} }
None => { None => {
@@ -311,7 +311,8 @@ fn main() -> Result<()> {
.asset_name .asset_name
.as_ref() .as_ref()
.or(base_info.asset_name.as_ref()) .or(base_info.asset_name.as_ref())
.with_context(|| format!("asset_name is needed for {package} on {platform:?}"))? .with_context(|| format!("asset_name is needed for {package} on {platform:?}"))
.unwrap()
.as_slice() .as_slice()
.iter() .iter()
.map(|asset_name| { .map(|asset_name| {
@@ -323,7 +324,8 @@ fn main() -> Result<()> {
base_info.rust_crate.as_deref(), base_info.rust_crate.as_deref(),
) )
}) })
.collect::<Result<Vec<_>>>()?; .collect::<Result<Vec<_>>>()
.unwrap();
let (url, digest, asset_name) = match asset_names.iter().find_map(|asset_name| { let (url, digest, asset_name) = match asset_names.iter().find_map(|asset_name| {
release release
.assets .assets
@@ -345,7 +347,7 @@ fn main() -> Result<()> {
"{version}-{platform:?}-{}", "{version}-{platform:?}-{}",
Path::new(&url).file_name().unwrap().to_str().unwrap() Path::new(&url).file_name().unwrap().to_str().unwrap()
)); ));
let response = download(&url)?; let response = download(&url).unwrap();
let etag = let etag =
response.header("etag").expect("binary should have an etag").replace('\"', ""); response.header("etag").expect("binary should have an etag").replace('\"', "");
@@ -365,11 +367,11 @@ fn main() -> Result<()> {
if download_cache.is_file() { if download_cache.is_file() {
eprintln!("already downloaded"); eprintln!("already downloaded");
fs::File::open(download_cache)?.read_to_end(&mut buf)?; // Not buffered because it is read at once. fs::File::open(download_cache).unwrap().read_to_end(&mut buf).unwrap(); // Not buffered because it is read at once.
} else { } else {
response.into_reader().read_to_end(&mut buf)?; response.into_reader().read_to_end(&mut buf).unwrap();
eprintln!("download complete"); eprintln!("download complete");
fs::write(download_cache, &buf)?; fs::write(download_cache, &buf).unwrap();
} }
eprintln!("getting sha256 hash for {url}"); eprintln!("getting sha256 hash for {url}");
@@ -377,7 +379,7 @@ fn main() -> Result<()> {
let hash = format!("{hash:?}").strip_prefix("SHA256:").unwrap().to_owned(); let hash = format!("{hash:?}").strip_prefix("SHA256:").unwrap().to_owned();
if let Some(digest) = digest { if let Some(digest) = digest {
if hash != digest.strip_prefix("sha256:").unwrap() { if hash != digest.strip_prefix("sha256:").unwrap() {
bail!( panic!(
"digest mismatch between GitHub release page and actually downloaded file" "digest mismatch between GitHub release page and actually downloaded file"
); );
} }
@@ -401,9 +403,15 @@ fn main() -> Result<()> {
signer_workflow, signer_workflow,
&download_cache &download_cache
) )
.run()?; .run()
.unwrap();
} }
SigningKind::MinisignBinstall => { SigningKind::MinisignBinstall => {
let Some(crates_io_info) = &crates_io_info else {
panic!(
"signing kind minisign-binstall is supported only for rust crate"
);
};
let url = url.clone() + ".sig"; let url = url.clone() + ".sig";
let sig_download_cache = &download_cache.with_extension(format!( let sig_download_cache = &download_cache.with_extension(format!(
"{}.sig", "{}.sig",
@@ -412,19 +420,14 @@ fn main() -> Result<()> {
eprint!("downloading {url} for signature validation ... "); eprint!("downloading {url} for signature validation ... ");
let sig = if sig_download_cache.is_file() { let sig = if sig_download_cache.is_file() {
eprintln!("already downloaded"); eprintln!("already downloaded");
minisign_verify::Signature::from_file(sig_download_cache)? minisign_verify::Signature::from_file(sig_download_cache).unwrap()
} else { } else {
let buf = download(&url)?.into_string()?; let buf = download(&url).unwrap().into_string().unwrap();
eprintln!("download complete"); eprintln!("download complete");
fs::write(sig_download_cache, &buf)?; fs::write(sig_download_cache, &buf).unwrap();
minisign_verify::Signature::decode(&buf)? minisign_verify::Signature::decode(&buf).unwrap()
}; };
let Some(crates_io_info) = &crates_io_info else {
bail!(
"signing kind minisign-binstall is supported only for rust crate"
);
};
let v = crates_io_info let v = crates_io_info
.versions .versions
.iter() .iter()
@@ -437,18 +440,18 @@ fn main() -> Result<()> {
if crate_download_cache.is_file() { if crate_download_cache.is_file() {
eprintln!("already downloaded"); eprintln!("already downloaded");
} else { } else {
download(&url)?.into_reader().read_to_end(&mut buf2)?; download(&url).unwrap().into_reader().read_to_end(&mut buf2).unwrap();
let hash = ring::digest::digest(&ring::digest::SHA256, &buf2); let hash = ring::digest::digest(&ring::digest::SHA256, &buf2);
if format!("{hash:?}").strip_prefix("SHA256:").unwrap() != v.checksum { if format!("{hash:?}").strip_prefix("SHA256:").unwrap() != v.checksum {
bail!("checksum mismatch for {url}"); panic!("checksum mismatch for {url}");
} }
let decoder = flate2::read::GzDecoder::new(&*buf2); let decoder = flate2::read::GzDecoder::new(&*buf2);
let mut archive = tar::Archive::new(decoder); let mut archive = tar::Archive::new(decoder);
for entry in archive.entries()? { for entry in archive.entries().unwrap() {
let mut entry = entry?; let mut entry = entry.unwrap();
let path = entry.path()?; let path = entry.path().unwrap();
if path.file_name() == Some(OsStr::new("Cargo.toml")) { if path.file_name() == Some(OsStr::new("Cargo.toml")) {
entry.unpack(crate_download_cache)?; entry.unpack(crate_download_cache).unwrap();
break; break;
} }
} }
@@ -457,8 +460,9 @@ fn main() -> Result<()> {
} }
if pubkey.is_none() { if pubkey.is_none() {
let cargo_manifest = toml::de::from_str::<cargo_manifest::Manifest>( let cargo_manifest = toml::de::from_str::<cargo_manifest::Manifest>(
&fs::read_to_string(crate_download_cache)?, &fs::read_to_string(crate_download_cache).unwrap(),
)?; )
.unwrap();
eprintln!( eprintln!(
"algorithm: {}", "algorithm: {}",
cargo_manifest.package.metadata.binstall.signing.algorithm cargo_manifest.package.metadata.binstall.signing.algorithm
@@ -471,14 +475,17 @@ fn main() -> Result<()> {
cargo_manifest.package.metadata.binstall.signing.algorithm, cargo_manifest.package.metadata.binstall.signing.algorithm,
"minisign" "minisign"
); );
pubkey = Some(minisign_verify::PublicKey::from_base64( pubkey = Some(
&cargo_manifest.package.metadata.binstall.signing.pubkey, minisign_verify::PublicKey::from_base64(
)?); &cargo_manifest.package.metadata.binstall.signing.pubkey,
)
.unwrap(),
);
} }
let pubkey = pubkey.as_ref().unwrap(); let pubkey = pubkey.as_ref().unwrap();
eprint!("verifying signature for {bin_url} ... "); eprint!("verifying signature for {bin_url} ... ");
let allow_legacy = false; let allow_legacy = false;
pubkey.verify(&buf, &sig, allow_legacy)?; pubkey.verify(&buf, &sig, allow_legacy).unwrap();
eprintln!("done"); eprintln!("done");
} }
} }
@@ -563,7 +570,7 @@ fn main() -> Result<()> {
// update an existing manifests.json to avoid discarding work done in the event of a fetch error. // update an existing manifests.json to avoid discarding work done in the event of a fetch error.
if existing_manifest.is_some() && !version_req_given { if existing_manifest.is_some() && !version_req_given {
write_manifests(manifest_path, &manifests.clone())?; write_manifests(manifest_path, &manifests.clone()).unwrap();
eprintln!("wrote {} with incomplete data", manifest_path.display()); eprintln!("wrote {} with incomplete data", manifest_path.display());
} }
} }
@@ -636,7 +643,7 @@ fn main() -> Result<()> {
.values() .values()
.any(|m| matches!(m, ManifestRef::Real(m) if m.download_info.contains_key(&p))) .any(|m| matches!(m, ManifestRef::Real(m) if m.download_info.contains_key(&p)))
{ {
bail!( panic!(
"platform list in base manifest for {package} contains {p:?}, \ "platform list in base manifest for {package} contains {p:?}, \
but result manifest doesn't contain it; \ but result manifest doesn't contain it; \
consider removing {p:?} from platform list in base manifest" consider removing {p:?} from platform list in base manifest"
@@ -680,7 +687,7 @@ fn main() -> Result<()> {
// until 2027-08, people aren't paying much attention to it at this time. // until 2027-08, people aren't paying much attention to it at this time.
continue; continue;
} }
bail!( panic!(
"platform list in base manifest for {package} contains {p:?}, \ "platform list in base manifest for {package} contains {p:?}, \
but latest release ({latest_version}) doesn't contain it; \ but latest release ({latest_version}) doesn't contain it; \
consider marking {latest_version} as broken by adding 'broken' field to base manifest" consider marking {latest_version} as broken by adding 'broken' field to base manifest"
@@ -720,10 +727,8 @@ fn main() -> Result<()> {
manifests.rust_crate = base_info.rust_crate; manifests.rust_crate = base_info.rust_crate;
write_manifests(manifest_path, &manifests)?; write_manifests(manifest_path, &manifests).unwrap();
eprintln!("wrote {}", manifest_path.display()); eprintln!("wrote {}", manifest_path.display());
Ok(())
} }
fn write_manifests(manifest_path: &Path, manifests: &Manifests) -> Result<()> { fn write_manifests(manifest_path: &Path, manifests: &Manifests) -> Result<()> {

View File

@@ -6,7 +6,6 @@ use std::{
path::PathBuf, path::PathBuf,
}; };
use anyhow::Result;
use fs_err as fs; use fs_err as fs;
use install_action_internal_codegen::{BaseManifest, Manifests, workspace_root}; use install_action_internal_codegen::{BaseManifest, Manifests, workspace_root};
@@ -32,7 +31,7 @@ const FOOTER: &str = "
[cargo-binstall]: https://github.com/cargo-bins/cargo-binstall [cargo-binstall]: https://github.com/cargo-bins/cargo-binstall
"; ";
fn main() -> Result<()> { fn main() {
let args: Vec<_> = env::args().skip(1).collect(); let args: Vec<_> = env::args().skip(1).collect();
if !args.is_empty() || args.iter().any(|arg| arg.starts_with('-')) { if !args.is_empty() || args.iter().any(|arg| arg.starts_with('-')) {
println!( println!(
@@ -72,9 +71,10 @@ fn main() -> Result<()> {
name.set_extension(""); name.set_extension("");
let name = name.to_string_lossy().to_string(); let name = name.to_string_lossy().to_string();
let base_info: BaseManifest = let base_info: BaseManifest =
serde_json::from_slice(&fs::read(base_info_dir.join(file_name.clone()))?)?; serde_json::from_slice(&fs::read(base_info_dir.join(file_name.clone())).unwrap())
.unwrap();
let manifests: Manifests = let manifests: Manifests =
serde_json::from_slice(&fs::read(manifest_dir.join(file_name))?)?; serde_json::from_slice(&fs::read(manifest_dir.join(file_name)).unwrap()).unwrap();
let website = match base_info.website { let website = match base_info.website {
Some(website) => website, Some(website) => website,
@@ -135,9 +135,7 @@ fn main() -> Result<()> {
} }
file.write_all(FOOTER.as_bytes()).expect("Unable to write footer"); file.write_all(FOOTER.as_bytes()).expect("Unable to write footer");
file.flush()?; file.flush().unwrap();
Ok(())
} }
#[derive(Debug)] #[derive(Debug)]