diff --git a/package/src/bin/omicron-package.rs b/package/src/bin/omicron-package.rs index 6ea10ae27b..2cb0512169 100644 --- a/package/src/bin/omicron-package.rs +++ b/package/src/bin/omicron-package.rs @@ -11,25 +11,22 @@ use futures::stream::{self, StreamExt, TryStreamExt}; use illumos_utils::{zfs, zone}; use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; use omicron_package::cargo_plan::build_cargo_plan; -use omicron_package::target::KnownTarget; +use omicron_package::config::{Config, ConfigArgs}; +use omicron_package::target::{target_command_help, KnownTarget}; use omicron_package::{parse, BuildCommand, DeployCommand, TargetCommand}; -use omicron_zone_package::config::{Config as PackageConfig, PackageMap}; +use omicron_zone_package::config::Config as PackageConfig; use omicron_zone_package::package::{Package, PackageOutput, PackageSource}; use omicron_zone_package::progress::Progress; use omicron_zone_package::target::Target; use rayon::prelude::*; use ring::digest::{Context as DigestContext, Digest, SHA256}; use sled_hardware::cleanup::cleanup_networking_resources; -use slog::debug; use slog::o; use slog::Drain; use slog::Logger; use slog::{info, warn}; -use std::collections::BTreeMap; use std::env; use std::fs::create_dir_all; -use std::io::Write; -use std::str::FromStr; use std::sync::{Arc, OnceLock}; use std::time::Duration; use tokio::io::{AsyncReadExt, AsyncWriteExt, BufReader}; @@ -44,11 +41,6 @@ enum SubCommand { Deploy(DeployCommand), } -fn parse_duration_ms(arg: &str) -> Result { - let ms = arg.parse()?; - Ok(std::time::Duration::from_millis(ms)) -} - #[derive(Debug, Parser)] #[clap(name = "packaging tool")] struct Args { @@ -64,43 +56,12 @@ struct Args { )] manifest: Utf8PathBuf, - #[clap( - short, - long, - help = "The name of the build target to use for this command", - default_value_t = ACTIVE.to_string(), - )] - target: String, - /// The output directory, where artifacts should be built and staged #[clap(long = "artifacts", default_value = "out/")] - artifact_dir: Utf8PathBuf, + pub artifact_dir: Utf8PathBuf, - #[clap( - short, - long, - help = "Skip confirmation prompt for destructive operations", - action, - default_value_t = false - )] - force: bool, - - #[clap( - long, - help = "Number of retries to use when re-attempting failed package downloads", - action, - default_value_t = 10 - )] - retry_count: usize, - - #[clap( - long, - help = "Duration, in ms, to wait before re-attempting failed package downloads", - action, - value_parser = parse_duration_ms, - default_value = "1000", - )] - retry_duration: std::time::Duration, + #[clap(flatten)] + config_args: ConfigArgs, #[clap(subcommand)] subcommand: SubCommand, @@ -158,7 +119,7 @@ async fn do_for_all_rust_packages( let cargo_plan = build_cargo_plan(&metadata, config.packages_to_build(), &features)?; - cargo_plan.run(command, &config.log).await + cargo_plan.run(command, config.log()).await } async fn do_check(config: &Config) -> Result<()> { @@ -172,7 +133,7 @@ async fn do_build(config: &Config) -> Result<()> { async fn do_dot(config: &Config) -> Result<()> { println!( "{}", - omicron_package::dot::do_dot(&config.target, &config.package_config)? + omicron_package::dot::do_dot(config.target(), config.package_config())? ); Ok(()) } @@ -194,12 +155,9 @@ async fn do_list_outputs( Ok(()) } -// The name reserved for the currently-in-use build target. -const ACTIVE: &str = "active"; - async fn do_target( artifact_dir: &Utf8Path, - name: &str, + name: Option<&str>, subcommand: &TargetCommand, ) -> Result<()> { let target_dir = artifact_dir.join("target"); @@ -222,7 +180,7 @@ async fn do_target( clickhouse_topology.clone(), )?; - let path = get_single_target(&target_dir, name).await?; + let (name, path) = get_required_named_target(&target_dir, name)?; tokio::fs::write(&path, Target::from(target).to_string()) .await .with_context(|| { @@ -234,7 +192,8 @@ async fn do_target( println!("Created new build target '{name}' and set it as active"); } TargetCommand::List => { - let active = tokio::fs::read_link(target_dir.join(ACTIVE)).await?; + let active = + tokio::fs::read_link(target_dir.join(Config::ACTIVE)).await?; let active = Utf8PathBuf::try_from(active)?; for entry in walkdir::WalkDir::new(&target_dir) .max_depth(1) @@ -256,31 +215,46 @@ async fn do_target( } } TargetCommand::Set => { - let _ = get_single_target(&target_dir, name).await?; + let (name, _) = get_required_named_target(&target_dir, name)?; replace_active_link(&name, &target_dir).await?; println!("Set build target '{name}' as active"); } TargetCommand::Delete => { - let path = get_single_target(&target_dir, name).await?; - tokio::fs::remove_file(&path).await?; + let (name, path) = get_required_named_target(&target_dir, name)?; + tokio::fs::remove_file(&path).await.with_context(|| { + format!("failed to remove target file {}", path) + })?; println!("Removed build target '{name}'"); } }; Ok(()) } -async fn get_single_target( +/// Get the path to a named target as required by the `target` subcommand. +/// +/// This function bans `active` as a target name, as it is reserved for the +/// active target. +fn get_required_named_target( target_dir: impl AsRef, - name: &str, -) -> Result { - if name == ACTIVE { - bail!( - "The name '{name}' is reserved, please try another (e.g. 'default')\n\ - Usage: '{} -t target ...'", - env::current_exe().unwrap().display(), - ); + name: Option<&str>, +) -> Result<(&str, Utf8PathBuf)> { + match name { + Some(name) if name == Config::ACTIVE => { + bail!( + "the name '{name}' is reserved, please try another (e.g. 'default')\n\ + Usage: {} ...", + target_command_help(""), + ); + } + Some(name) => Ok((name, target_dir.as_ref().join(name))), + None => { + bail!( + "a target name is required for this operation (e.g. 'default')\n\ + Usage: {} ...", + target_command_help(""), + ); + } } - Ok(target_dir.as_ref().join(name)) } async fn replace_active_link( @@ -290,7 +264,7 @@ async fn replace_active_link( let src = src.as_ref(); let target_dir = target_dir.as_ref(); - let dst = target_dir.join(ACTIVE); + let dst = target_dir.join(Config::ACTIVE); if !target_dir.join(src).exists() { bail!("Target file {} does not exist", src); } @@ -399,7 +373,7 @@ async fn ensure_package( output_directory: &Utf8Path, disable_cache: bool, ) -> Result<()> { - let target = &config.target; + let target = config.target(); let progress = ui.add_package(package_name.to_string()); match &package.source { PackageSource::Prebuilt { repo, commit, sha256 } => { @@ -416,7 +390,7 @@ async fn ensure_package( }; if should_download { - let mut attempts_left = config.retry_count + 1; + let mut attempts_left = config.retry_count() + 1; loop { match download_prebuilt( &progress, @@ -436,7 +410,7 @@ async fn ensure_package( if attempts_left == 0 { return Err(err); } - tokio::time::sleep(config.retry_duration).await; + tokio::time::sleep(config.retry_duration()).await; progress.reset(); } } @@ -487,7 +461,7 @@ async fn do_package( create_dir_all(&output_directory) .map_err(|err| anyhow!("Cannot create output directory: {}", err))?; - let ui = ProgressUI::new(&config.log); + let ui = ProgressUI::new(config.log()); do_build(&config).await?; @@ -528,8 +502,8 @@ async fn do_stamp( ) -> Result<()> { // Find the package which should be stamped let (_name, package) = config - .package_config - .packages_to_deploy(&config.target) + .package_config() + .packages_to_deploy(config.target()) .0 .into_iter() .find(|(name, _pkg)| name.as_str() == package_name) @@ -552,7 +526,8 @@ async fn do_unpack( })?; // Copy all packages to the install location in parallel. - let packages = config.package_config.packages_to_deploy(&config.target).0; + let packages = + config.package_config().packages_to_deploy(&config.target()).0; packages.par_iter().try_for_each( |(package_name, package)| -> Result<()> { @@ -561,7 +536,7 @@ async fn do_unpack( let dst = package.get_output_path(&package.service_name, install_dir); info!( - &config.log, + config.log(), "Installing service"; "src" => %src, "dst" => %dst, @@ -593,7 +568,7 @@ async fn do_unpack( let tar_path = install_dir.join(format!("{}.tar", service_name)); let service_path = install_dir.join(service_name); info!( - &config.log, + config.log(), "Unpacking service tarball"; "tar_path" => %tar_path, "service_path" => %service_path, @@ -613,14 +588,14 @@ fn do_activate(config: &Config, install_dir: &Utf8Path) -> Result<()> { // Install the bootstrap service, which itself extracts and // installs other services. if let Some(package) = - config.package_config.packages.get("omicron-sled-agent") + config.package_config().packages.get("omicron-sled-agent") { let manifest_path = install_dir .join(&package.service_name) .join("pkg") .join("manifest.xml"); info!( - config.log, + config.log(), "Installing bootstrap service from {}", manifest_path ); @@ -654,7 +629,7 @@ async fn uninstall_all_omicron_zones() -> Result<()> { fn uninstall_all_omicron_datasets(config: &Config) -> Result<()> { let datasets = match zfs::get_all_omicron_datasets_for_delete() { Err(e) => { - warn!(config.log, "Failed to get omicron datasets: {}", e); + warn!(config.log(), "Failed to get omicron datasets: {}", e); return Err(e); } Ok(datasets) => datasets, @@ -669,7 +644,7 @@ fn uninstall_all_omicron_datasets(config: &Config) -> Result<()> { datasets ))?; for dataset in &datasets { - info!(config.log, "Deleting dataset: {dataset}"); + info!(config.log(), "Deleting dataset: {dataset}"); zfs::Zfs::destroy_dataset(dataset)?; } @@ -679,8 +654,8 @@ fn uninstall_all_omicron_datasets(config: &Config) -> Result<()> { // Attempts to both disable and delete all requested packages. fn uninstall_all_packages(config: &Config) { for (_, package) in config - .package_config - .packages_to_deploy(&config.target) + .package_config() + .packages_to_deploy(config.target()) .0 .into_iter() .filter(|(_, package)| matches!(package.output, PackageOutput::Tarball)) @@ -744,18 +719,18 @@ fn remove_all_except>( } async fn do_deactivate(config: &Config) -> Result<()> { - info!(&config.log, "Removing all Omicron zones"); + info!(config.log(), "Removing all Omicron zones"); uninstall_all_omicron_zones().await?; - info!(config.log, "Uninstalling all packages"); + info!(config.log(), "Uninstalling all packages"); uninstall_all_packages(config); - info!(config.log, "Removing networking resources"); - cleanup_networking_resources(&config.log).await?; + info!(config.log(), "Removing networking resources"); + cleanup_networking_resources(config.log()).await?; Ok(()) } async fn do_uninstall(config: &Config) -> Result<()> { do_deactivate(config).await?; - info!(config.log, "Removing datasets"); + info!(config.log(), "Removing datasets"); uninstall_all_omicron_datasets(config)?; Ok(()) } @@ -766,7 +741,7 @@ async fn do_clean( install_dir: &Utf8Path, ) -> Result<()> { do_uninstall(&config).await?; - info!(config.log, "Removing artifacts from {}", artifact_dir); + info!(config.log(), "Removing artifacts from {}", artifact_dir); const ARTIFACTS_TO_KEEP: &[&str] = &[ "clickhouse", "cockroachdb", @@ -775,10 +750,10 @@ async fn do_clean( "downloads", "softnpu", ]; - remove_all_except(artifact_dir, ARTIFACTS_TO_KEEP, &config.log)?; - info!(config.log, "Removing installed objects in: {}", install_dir); + remove_all_except(artifact_dir, ARTIFACTS_TO_KEEP, config.log())?; + info!(config.log(), "Removing installed objects in: {}", install_dir); const INSTALLED_OBJECTS_TO_KEEP: &[&str] = &["opte"]; - remove_all_except(install_dir, INSTALLED_OBJECTS_TO_KEEP, &config.log)?; + remove_all_except(install_dir, INSTALLED_OBJECTS_TO_KEEP, config.log())?; Ok(()) } @@ -889,115 +864,6 @@ impl Progress for PackageProgress { } } -struct Config { - log: Logger, - // Description of all possible packages. - package_config: PackageConfig, - // Description of the target we're trying to operate on. - target: Target, - // The list of packages the user wants us to build (all, if empty) - only: Vec, - // True if we should skip confirmations for destructive operations. - force: bool, - // Number of times to retry failed downloads. - retry_count: usize, - // Duration to wait before retrying failed downloads. - retry_duration: std::time::Duration, -} - -impl Config { - /// Prompts the user for input before proceeding with an operation. - fn confirm(&self, prompt: &str) -> Result<()> { - if self.force { - return Ok(()); - } - - print!("{prompt}\n[yY to confirm] >> "); - let _ = std::io::stdout().flush(); - - let mut input = String::new(); - std::io::stdin().read_line(&mut input)?; - match input.as_str().trim() { - "y" | "Y" => Ok(()), - _ => bail!("Aborting"), - } - } - - /// Returns target packages to be assembled on the builder machine, limited - /// to those specified in `only` (if set). - fn packages_to_build(&self) -> PackageMap<'_> { - let packages = self.package_config.packages_to_build(&self.target); - if self.only.is_empty() { - return packages; - } - - let mut filtered_packages = PackageMap(BTreeMap::new()); - let mut to_walk = PackageMap(BTreeMap::new()); - // add the requested packages to `to_walk` - for package_name in &self.only { - to_walk.0.insert( - package_name, - packages.0.get(package_name).unwrap_or_else(|| { - panic!( - "Explicitly-requested package '{}' does not exist", - package_name - ) - }), - ); - } - // dependencies are listed by output name, so create a lookup table to - // get a package by its output name. - let lookup_by_output = packages - .0 - .iter() - .map(|(name, package)| { - (package.get_output_file(name), (*name, *package)) - }) - .collect::>(); - // packages yet to be walked are added to `to_walk`. pop each entry and - // add its dependencies to `to_walk`, then add the package we finished - // walking to `filtered_packages`. - while let Some((package_name, package)) = to_walk.0.pop_first() { - if let PackageSource::Composite { packages } = &package.source { - for output in packages { - // find the package by output name - let (dep_name, dep_package) = - lookup_by_output.get(output).unwrap_or_else(|| { - panic!( - "Could not find a package which creates '{}'", - output - ) - }); - if dep_name.as_str() == package_name { - panic!("'{}' depends on itself", package_name); - } - // if we've seen this package already, it will be in - // `filtered_packages`. otherwise, add it to `to_walk`. - if !filtered_packages.0.contains_key(dep_name) { - to_walk.0.insert(dep_name, dep_package); - } - } - } - // we're done looking at this package's deps - filtered_packages.0.insert(package_name, package); - } - filtered_packages - } - - /// Return a list of all possible Cargo features that could be requested for - /// the packages being built. - /// - /// Out of these, the features that actually get requested are determined by - /// which features are available for the list of packages being built. - fn cargo_features(&self) -> Vec { - self.target - .0 - .iter() - .map(|(name, value)| format!("{name}-{value}")) - .collect::>() - } -} - #[tokio::main] async fn main() -> Result<()> { let args = Args::try_parse()?; @@ -1014,43 +880,13 @@ async fn main() -> Result<()> { let drain = slog_async::Async::new(drain).build().fuse(); let log = Logger::root(drain, o!()); - let target_help_str = || -> String { - format!( - "Try calling: '{} -t default target create' to create a new build target", - env::current_exe().unwrap().display() - ) - }; - let get_config = || -> Result { - let target_path = args.artifact_dir.join("target").join(&args.target); - let raw_target = - std::fs::read_to_string(&target_path).inspect_err(|_| { - eprintln!( - "Failed to read build target: {}\n{}", - target_path, - target_help_str() - ); - })?; - let target: Target = KnownTarget::from_str(&raw_target) - .inspect_err(|_| { - eprintln!( - "Failed to parse {} as target\n{}", - target_path, - target_help_str() - ); - })? - .into(); - debug!(log, "target[{}]: {:?}", args.target, target); - - Ok(Config { - log: log.clone(), + Config::get_config( + &log, package_config, - target, - only: Vec::new(), - force: args.force, - retry_count: args.retry_count, - retry_duration: args.retry_duration, - }) + &args.config_args, + &args.artifact_dir, + ) }; // Use a CWD that is the root of the Omicron repository. @@ -1064,7 +900,12 @@ async fn main() -> Result<()> { match args.subcommand { SubCommand::Build(BuildCommand::Target { subcommand }) => { - do_target(&args.artifact_dir, &args.target, &subcommand).await?; + do_target( + &args.artifact_dir, + args.config_args.target.as_deref(), + &subcommand, + ) + .await?; } SubCommand::Build(BuildCommand::Dot) => { do_dot(&get_config()?).await?; @@ -1075,7 +916,7 @@ async fn main() -> Result<()> { } SubCommand::Build(BuildCommand::Package { disable_cache, only }) => { let mut config = get_config()?; - config.only = only; + config.set_only(only); do_package(&config, &args.artifact_dir, disable_cache).await?; } SubCommand::Build(BuildCommand::Stamp { package_name, version }) => { diff --git a/package/src/config.rs b/package/src/config.rs new file mode 100644 index 0000000000..800af7a0de --- /dev/null +++ b/package/src/config.rs @@ -0,0 +1,243 @@ +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v. 2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at https://mozilla.org/MPL/2.0/. + +use anyhow::{bail, Result}; +use camino::Utf8Path; +use clap::Args; +use omicron_zone_package::{ + config::{Config as PackageConfig, PackageMap}, + package::PackageSource, + target::Target, +}; +use slog::{debug, Logger}; +use std::{collections::BTreeMap, io::Write, str::FromStr, time::Duration}; + +use crate::target::{target_command_help, KnownTarget}; + +#[derive(Debug, Args)] +pub struct ConfigArgs { + /// The name of the build target to use for this command + #[clap(short, long)] + pub target: Option, + + /// Skip confirmation prompt for destructive operations + #[clap(short, long, action, default_value_t = false)] + pub force: bool, + + /// Number of retries to use when re-attempting failed package downloads + #[clap(long, action, default_value_t = 10)] + pub retry_count: usize, + + /// Duration, in ms, to wait before re-attempting failed package downloads + #[clap( + long, + action, + value_parser = parse_duration_ms, + default_value = "1000", + )] + pub retry_duration: Duration, +} + +fn parse_duration_ms(arg: &str) -> Result { + let ms = arg.parse()?; + Ok(Duration::from_millis(ms)) +} + +#[derive(Debug)] +pub struct Config { + log: Logger, + // Description of all possible packages. + package_config: PackageConfig, + // Description of the target we're trying to operate on. + target: Target, + // The list of packages the user wants us to build (all, if empty) + only: Vec, + // True if we should skip confirmations for destructive operations. + force: bool, + // Number of times to retry failed downloads. + retry_count: usize, + // Duration to wait before retrying failed downloads. + retry_duration: Duration, +} + +impl Config { + /// The name reserved for the currently-in-use build target. + pub const ACTIVE: &str = "active"; + + /// Builds a new configuration. + pub fn get_config( + log: &Logger, + package_config: PackageConfig, + args: &ConfigArgs, + artifact_dir: &Utf8Path, + ) -> Result { + // Within this path, the target is expected to be set. + let target = args.target.as_deref().unwrap_or(Self::ACTIVE); + + let target_help_str = || -> String { + format!( + "Try calling: '{} target create' to create a new build target", + target_command_help("default"), + ) + }; + + let target_path = artifact_dir.join("target").join(target); + let raw_target = + std::fs::read_to_string(&target_path).inspect_err(|_| { + eprintln!( + "Failed to read build target: {}\n{}", + target_path, + target_help_str() + ); + })?; + let target: Target = KnownTarget::from_str(&raw_target) + .inspect_err(|_| { + eprintln!( + "Failed to parse {} as target\n{}", + target_path, + target_help_str() + ); + })? + .into(); + debug!(log, "target[{}]: {:?}", target, target); + + Ok(Config { + log: log.clone(), + package_config, + target, + only: Vec::new(), + force: args.force, + retry_count: args.retry_count, + retry_duration: args.retry_duration, + }) + } + + /// Sets the `only` field. + #[inline] + pub fn set_only(&mut self, only: Vec) -> &mut Self { + self.only = only; + self + } + + /// Returns the logger. + #[inline] + pub fn log(&self) -> &Logger { + &self.log + } + + /// Returns the target currently being operated on. + #[inline] + pub fn target(&self) -> &Target { + &self.target + } + + /// Returns the underlying package configuration. + #[inline] + pub fn package_config(&self) -> &PackageConfig { + &self.package_config + } + + /// Returns the retry count. + #[inline] + pub fn retry_count(&self) -> usize { + self.retry_count + } + + /// Returns the retry duration. + #[inline] + pub fn retry_duration(&self) -> Duration { + self.retry_duration + } + + /// Prompts the user for input before proceeding with an operation. + pub fn confirm(&self, prompt: &str) -> Result<()> { + if self.force { + return Ok(()); + } + + print!("{prompt}\n[yY to confirm] >> "); + let _ = std::io::stdout().flush(); + + let mut input = String::new(); + std::io::stdin().read_line(&mut input)?; + match input.as_str().trim() { + "y" | "Y" => Ok(()), + _ => bail!("Aborting"), + } + } + + /// Returns target packages to be assembled on the builder machine, limited + /// to those specified in `only` (if set). + pub fn packages_to_build(&self) -> PackageMap<'_> { + let packages = self.package_config.packages_to_build(&self.target); + if self.only.is_empty() { + return packages; + } + + let mut filtered_packages = PackageMap(BTreeMap::new()); + let mut to_walk = PackageMap(BTreeMap::new()); + // add the requested packages to `to_walk` + for package_name in &self.only { + to_walk.0.insert( + package_name, + packages.0.get(package_name).unwrap_or_else(|| { + panic!( + "Explicitly-requested package '{}' does not exist", + package_name + ) + }), + ); + } + // dependencies are listed by output name, so create a lookup table to + // get a package by its output name. + let lookup_by_output = packages + .0 + .iter() + .map(|(name, package)| { + (package.get_output_file(name), (*name, *package)) + }) + .collect::>(); + // packages yet to be walked are added to `to_walk`. pop each entry and + // add its dependencies to `to_walk`, then add the package we finished + // walking to `filtered_packages`. + while let Some((package_name, package)) = to_walk.0.pop_first() { + if let PackageSource::Composite { packages } = &package.source { + for output in packages { + // find the package by output name + let (dep_name, dep_package) = + lookup_by_output.get(output).unwrap_or_else(|| { + panic!( + "Could not find a package which creates '{}'", + output + ) + }); + if dep_name.as_str() == package_name { + panic!("'{}' depends on itself", package_name); + } + // if we've seen this package already, it will be in + // `filtered_packages`. otherwise, add it to `to_walk`. + if !filtered_packages.0.contains_key(dep_name) { + to_walk.0.insert(dep_name, dep_package); + } + } + } + // we're done looking at this package's deps + filtered_packages.0.insert(package_name, package); + } + filtered_packages + } + + /// Return a list of all possible Cargo features that could be requested for + /// the packages being built. + /// + /// Out of these, the features that actually get requested are determined by + /// which features are available for the list of packages being built. + pub fn cargo_features(&self) -> Vec { + self.target + .0 + .iter() + .map(|(name, value)| format!("{name}-{value}")) + .collect::>() + } +} diff --git a/package/src/lib.rs b/package/src/lib.rs index 9d58f476b2..8ef9a4c951 100644 --- a/package/src/lib.rs +++ b/package/src/lib.rs @@ -6,6 +6,7 @@ use serde::de::DeserializeOwned; use thiserror::Error; pub mod cargo_plan; +pub mod config; pub mod dot; pub mod target; diff --git a/package/src/target.rs b/package/src/target.rs index 6a6cbd32d8..d56f7e87c5 100644 --- a/package/src/target.rs +++ b/package/src/target.rs @@ -200,3 +200,11 @@ impl std::str::FromStr for KnownTarget { ) } } + +/// Generate a command to build a target, for use in usage strings. +pub fn target_command_help(target_name: &str) -> String { + format!( + "{} -t {target_name} target", + std::env::current_exe().unwrap().display(), + ) +}