mirror of
https://github.com/esp-rs/esp-hal.git
synced 2025-09-27 04:10:28 +00:00
xtask
housekeeping, adding logs, adding missing docs (#4106)
* xtask housekeeping, adding logs, adding missing docs * Use anyhow::Context
This commit is contained in:
parent
646495eb16
commit
139ce0e0c7
@ -13,12 +13,14 @@ use toml_edit::{DocumentMut, Formatted, Item, Value};
|
||||
|
||||
use crate::{Package, windows_safe_path};
|
||||
|
||||
/// Actions that can be performed with Cargo.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum CargoAction {
|
||||
Build(Option<PathBuf>),
|
||||
Run,
|
||||
}
|
||||
|
||||
/// Information about a built artifact.
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct Artifact {
|
||||
pub executable: PathBuf,
|
||||
@ -26,7 +28,9 @@ pub struct Artifact {
|
||||
|
||||
/// Execute cargo with the given arguments and from the specified directory.
|
||||
pub fn run(args: &[String], cwd: &Path) -> Result<()> {
|
||||
run_with_env::<[(&str, &str); 0], _, _>(args, cwd, [], false)?;
|
||||
run_with_env::<[(&str, &str); 0], _, _>(args, cwd, [], false).with_context(|| {
|
||||
format!("Failed to execute cargo with given arguments {args:?} in cwd {cwd:?}",)
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -76,7 +80,10 @@ where
|
||||
command.env_remove("CARGO");
|
||||
}
|
||||
|
||||
let output = command.stdin(Stdio::inherit()).output()?;
|
||||
let output = command
|
||||
.stdin(Stdio::inherit())
|
||||
.output()
|
||||
.with_context(|| format!("Couldn't get output for command {command:?}"))?;
|
||||
|
||||
// Make sure that we return an appropriate exit code here, as Github Actions
|
||||
// requires this in order to function correctly:
|
||||
@ -107,6 +114,7 @@ fn get_cargo() -> String {
|
||||
cargo
|
||||
}
|
||||
|
||||
/// A builder for constructing cargo command line arguments.
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct CargoArgsBuilder {
|
||||
toolchain: Option<String>,
|
||||
@ -117,6 +125,7 @@ pub struct CargoArgsBuilder {
|
||||
}
|
||||
|
||||
impl CargoArgsBuilder {
|
||||
/// Set the Rust toolchain to use.
|
||||
#[must_use]
|
||||
pub fn toolchain<S>(mut self, toolchain: S) -> Self
|
||||
where
|
||||
@ -126,6 +135,7 @@ impl CargoArgsBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the cargo subcommand to use.
|
||||
#[must_use]
|
||||
pub fn subcommand<S>(mut self, subcommand: S) -> Self
|
||||
where
|
||||
@ -135,6 +145,7 @@ impl CargoArgsBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the compilation target to use.
|
||||
#[must_use]
|
||||
pub fn target<S>(mut self, target: S) -> Self
|
||||
where
|
||||
@ -144,12 +155,14 @@ impl CargoArgsBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the cargo features to use.
|
||||
#[must_use]
|
||||
pub fn features(mut self, features: &[String]) -> Self {
|
||||
self.features = features.to_vec();
|
||||
self
|
||||
}
|
||||
|
||||
/// Add a single argument to the cargo command line.
|
||||
#[must_use]
|
||||
pub fn arg<S>(mut self, arg: S) -> Self
|
||||
where
|
||||
@ -159,6 +172,7 @@ impl CargoArgsBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
/// Add multiple arguments to the cargo command line.
|
||||
#[must_use]
|
||||
pub fn args<S>(mut self, args: &[S]) -> Self
|
||||
where
|
||||
@ -170,6 +184,7 @@ impl CargoArgsBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
/// Add a single argument to the cargo command line.
|
||||
pub fn add_arg<S>(&mut self, arg: S) -> &mut Self
|
||||
where
|
||||
S: Into<String>,
|
||||
@ -178,6 +193,7 @@ impl CargoArgsBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
/// Build the final list of cargo command line arguments.
|
||||
#[must_use]
|
||||
pub fn build(&self) -> Vec<String> {
|
||||
let mut args = vec![];
|
||||
@ -200,13 +216,18 @@ impl CargoArgsBuilder {
|
||||
args.push(arg.clone());
|
||||
}
|
||||
|
||||
log::debug!("Built cargo args: {:?}", args);
|
||||
args
|
||||
}
|
||||
}
|
||||
|
||||
/// A representation of a Cargo.toml file for a specific package.
|
||||
pub struct CargoToml<'a> {
|
||||
/// The workspace path where the Cargo.toml is located.
|
||||
pub workspace: &'a Path,
|
||||
/// The package this Cargo.toml belongs to.
|
||||
pub package: Package,
|
||||
/// The parsed Cargo.toml manifest.
|
||||
pub manifest: toml_edit::DocumentMut,
|
||||
}
|
||||
|
||||
@ -214,6 +235,7 @@ const DEPENDENCY_KINDS: [&'static str; 3] =
|
||||
["dependencies", "dev-dependencies", "build-dependencies"];
|
||||
|
||||
impl<'a> CargoToml<'a> {
|
||||
/// Load and parse the Cargo.toml for the specified package in the given workspace.
|
||||
pub fn new(workspace: &'a Path, package: Package) -> Result<Self> {
|
||||
let package_path = workspace.join(package.to_string());
|
||||
let manifest_path = package_path.join("Cargo.toml");
|
||||
@ -230,15 +252,19 @@ impl<'a> CargoToml<'a> {
|
||||
Self::from_str(workspace, package, &manifest)
|
||||
}
|
||||
|
||||
/// Create a `CargoToml` instance from a manifest string.
|
||||
pub fn from_str(workspace: &'a Path, package: Package, manifest: &str) -> Result<Self> {
|
||||
// Parse the manifest string into a mutable TOML document.
|
||||
Ok(Self {
|
||||
workspace,
|
||||
package,
|
||||
manifest: manifest.parse::<DocumentMut>()?,
|
||||
manifest: manifest
|
||||
.parse::<DocumentMut>()
|
||||
.with_context(|| format!("Manifest {manifest} parsing failed!"))?,
|
||||
})
|
||||
}
|
||||
|
||||
/// Check if the package is published to crates.io.
|
||||
pub fn is_published(&self) -> bool {
|
||||
// Check if the package is published by looking for the `publish` key
|
||||
// in the manifest.
|
||||
@ -253,14 +279,17 @@ impl<'a> CargoToml<'a> {
|
||||
publish.as_bool().unwrap_or(true)
|
||||
}
|
||||
|
||||
/// Get the absolute path to the package directory.
|
||||
pub fn package_path(&self) -> PathBuf {
|
||||
self.workspace.join(self.package.to_string())
|
||||
}
|
||||
|
||||
/// Get the absolute path to the Cargo.toml file of the package.
|
||||
pub fn manifest_path(&self) -> PathBuf {
|
||||
self.package_path().join("Cargo.toml")
|
||||
}
|
||||
|
||||
/// Get the current version of the package.
|
||||
pub fn version(&self) -> &str {
|
||||
self.manifest["package"]["version"]
|
||||
.as_str()
|
||||
@ -269,10 +298,12 @@ impl<'a> CargoToml<'a> {
|
||||
.trim_matches('"')
|
||||
}
|
||||
|
||||
/// Get the current version of the package as a `semver::Version`.
|
||||
pub fn package_version(&self) -> semver::Version {
|
||||
semver::Version::parse(self.version()).expect("Failed to parse version")
|
||||
}
|
||||
|
||||
/// Set the version of the package to the specified version.
|
||||
pub fn set_version(&mut self, version: &semver::Version) {
|
||||
log::info!(
|
||||
"Bumping version for package: {} ({} -> {version})",
|
||||
@ -282,6 +313,7 @@ impl<'a> CargoToml<'a> {
|
||||
self.manifest["package"]["version"] = toml_edit::value(version.to_string());
|
||||
}
|
||||
|
||||
/// Save the modified Cargo.toml back to disk.
|
||||
pub fn save(&self) -> Result<()> {
|
||||
let manifest_path = self.manifest_path();
|
||||
std::fs::write(&manifest_path, self.manifest.to_string())
|
||||
@ -332,6 +364,7 @@ impl<'a> CargoToml<'a> {
|
||||
);
|
||||
}
|
||||
|
||||
/// Returns the package this Cargo.toml belongs to.
|
||||
pub fn package(&self) -> Package {
|
||||
self.package
|
||||
}
|
||||
|
@ -10,6 +10,7 @@ pub(crate) struct Changelog {
|
||||
}
|
||||
|
||||
impl Changelog {
|
||||
/// Parse the changelog, normalizing it in the process.
|
||||
pub fn parse(changelog: &str) -> Result<Self> {
|
||||
let mut lines = changelog.lines().peekable();
|
||||
|
||||
@ -44,6 +45,7 @@ impl Changelog {
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
/// Finalize the changelog for a new release.
|
||||
pub fn finalize(
|
||||
&mut self,
|
||||
package: Package,
|
||||
|
@ -15,6 +15,7 @@ use crate::{
|
||||
// ----------------------------------------------------------------------------
|
||||
// Subcommands
|
||||
|
||||
/// Build subcommands and their arguments.
|
||||
#[derive(Debug, Subcommand)]
|
||||
pub enum Build {
|
||||
/// Build documentation for the specified chip.
|
||||
@ -33,6 +34,7 @@ pub enum Build {
|
||||
// ----------------------------------------------------------------------------
|
||||
// Subcommand Arguments
|
||||
|
||||
/// Arguments for building documentation.
|
||||
#[derive(Debug, Default, Args)]
|
||||
pub struct BuildDocumentationArgs {
|
||||
/// Package(s) to document.
|
||||
@ -49,6 +51,7 @@ pub struct BuildDocumentationArgs {
|
||||
pub serve: bool,
|
||||
}
|
||||
|
||||
/// Arguments for building a package.
|
||||
#[derive(Debug, Args)]
|
||||
pub struct BuildPackageArgs {
|
||||
/// Package to build.
|
||||
@ -71,7 +74,13 @@ pub struct BuildPackageArgs {
|
||||
// ----------------------------------------------------------------------------
|
||||
// Subcommand Actions
|
||||
|
||||
/// Build documentation for the specified packages and chips.
|
||||
pub fn build_documentation(workspace: &Path, mut args: BuildDocumentationArgs) -> Result<()> {
|
||||
log::debug!(
|
||||
"Building documentation for packages {:?} on chips {:?}",
|
||||
args.packages,
|
||||
args.chips
|
||||
);
|
||||
crate::documentation::build_documentation(
|
||||
workspace,
|
||||
&mut args.packages,
|
||||
@ -112,6 +121,7 @@ pub fn build_documentation(workspace: &Path, mut args: BuildDocumentationArgs) -
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Build the documentation index for all packages.
|
||||
#[cfg(feature = "deploy-docs")]
|
||||
pub fn build_documentation_index(workspace: &Path) -> Result<()> {
|
||||
let mut packages = Package::iter().collect::<Vec<_>>();
|
||||
@ -120,6 +130,7 @@ pub fn build_documentation_index(workspace: &Path) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Build all examples for the specified package and chip.
|
||||
pub fn build_examples(
|
||||
args: ExamplesArgs,
|
||||
examples: Vec<Metadata>,
|
||||
@ -148,7 +159,14 @@ pub fn build_examples(
|
||||
})
|
||||
}
|
||||
|
||||
/// Build the specified package with the given options.
|
||||
pub fn build_package(workspace: &Path, args: BuildPackageArgs) -> Result<()> {
|
||||
log::debug!(
|
||||
"Building package '{}' with target '{:?}' and features {:?}",
|
||||
args.package,
|
||||
args.target,
|
||||
args.features
|
||||
);
|
||||
// Absolute path of the package's root:
|
||||
let package_path = crate::windows_safe_path(&workspace.join(args.package.to_string()));
|
||||
|
||||
|
@ -4,6 +4,8 @@ use anyhow::{Context, Result, bail};
|
||||
|
||||
use crate::{Package, changelog::Changelog};
|
||||
|
||||
/// Check the changelogs for the specified packages. If `normalize` is true, rewrite
|
||||
/// the changelogs in a normalized format.
|
||||
pub fn check_changelog(workspace: &Path, packages: &[Package], normalize: bool) -> Result<()> {
|
||||
let mut failed = false;
|
||||
for package in packages {
|
||||
@ -38,7 +40,12 @@ fn check_changelog_for_package(workspace: &Path, package: Package, normalize: bo
|
||||
.with_context(|| format!("Could not parse {}", changelog_path.display()))?;
|
||||
|
||||
if normalize {
|
||||
std::fs::write(&changelog_path, changelog.to_string())?;
|
||||
std::fs::write(&changelog_path, changelog.to_string()).with_context(|| {
|
||||
format!(
|
||||
"Failed to write changelog into {}",
|
||||
changelog_path.display()
|
||||
)
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -1,6 +1,6 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::{Result, bail};
|
||||
use anyhow::{Context, Result, bail};
|
||||
use clap::Args;
|
||||
use esp_metadata::Chip;
|
||||
use inquire::Select;
|
||||
@ -16,6 +16,7 @@ mod run;
|
||||
// ----------------------------------------------------------------------------
|
||||
// Subcommand Arguments
|
||||
|
||||
/// Arguments common to commands which act on examples.
|
||||
#[derive(Debug, Args)]
|
||||
pub struct ExamplesArgs {
|
||||
/// Example to act on ("all" will execute every example).
|
||||
@ -39,6 +40,7 @@ pub struct ExamplesArgs {
|
||||
pub timings: bool,
|
||||
}
|
||||
|
||||
/// Arguments common to commands which act on doctests.
|
||||
#[derive(Debug, Args)]
|
||||
pub struct DocTestArgs {
|
||||
/// Package where we wish to run doc tests.
|
||||
@ -49,6 +51,7 @@ pub struct DocTestArgs {
|
||||
pub chip: Chip,
|
||||
}
|
||||
|
||||
/// Arguments common to commands which act on tests.
|
||||
#[derive(Debug, Args)]
|
||||
pub struct TestsArgs {
|
||||
/// Chip to target.
|
||||
@ -76,7 +79,13 @@ pub struct TestsArgs {
|
||||
// ----------------------------------------------------------------------------
|
||||
// Subcommand Actions
|
||||
|
||||
/// Execute the given action on the specified examples.
|
||||
pub fn examples(workspace: &Path, mut args: ExamplesArgs, action: CargoAction) -> Result<()> {
|
||||
log::debug!(
|
||||
"Running examples for '{}' on '{:?}'",
|
||||
args.package,
|
||||
args.chip
|
||||
);
|
||||
if args.chip.is_none() {
|
||||
let chip_variants = Chip::iter().collect::<Vec<_>>();
|
||||
|
||||
@ -88,7 +97,12 @@ pub fn examples(workspace: &Path, mut args: ExamplesArgs, action: CargoAction) -
|
||||
let chip = args.chip.unwrap();
|
||||
|
||||
// Ensure that the package/chip combination provided are valid:
|
||||
args.package.validate_package_chip(&chip)?;
|
||||
args.package.validate_package_chip(&chip).with_context(|| {
|
||||
format!(
|
||||
"The package '{0}' does not support the chip '{chip:?}'",
|
||||
args.package
|
||||
)
|
||||
})?;
|
||||
|
||||
// If the 'esp-hal' package is specified, what we *really* want is the
|
||||
// 'examples' package instead:
|
||||
@ -110,7 +124,12 @@ pub fn examples(workspace: &Path, mut args: ExamplesArgs, action: CargoAction) -
|
||||
// metadata comments in the source files. As such, it needs to load its metadata differently
|
||||
// than other packages.
|
||||
let examples = if args.package == Package::Examples {
|
||||
crate::firmware::load_cargo_toml(&package_path)?
|
||||
crate::firmware::load_cargo_toml(&package_path).with_context(|| {
|
||||
format!(
|
||||
"Failed to load specified examples from {}",
|
||||
package_path.display()
|
||||
)
|
||||
})?
|
||||
} else {
|
||||
let example_path = match args.package {
|
||||
Package::QaTest => package_path.join("src").join("bin"),
|
||||
@ -184,6 +203,7 @@ pub fn examples(workspace: &Path, mut args: ExamplesArgs, action: CargoAction) -
|
||||
}
|
||||
}
|
||||
|
||||
/// Execute the given action on the specified doctests.
|
||||
pub fn tests(workspace: &Path, args: TestsArgs, action: CargoAction) -> Result<()> {
|
||||
let (test_arg, filter) = if let Some(test_arg) = args.test.as_deref() {
|
||||
match test_arg.split_once("::") {
|
||||
|
@ -33,6 +33,7 @@ pub const PLACEHOLDER: &str = "{{currentVersion}}";
|
||||
// ----------------------------------------------------------------------------
|
||||
// Subcommands
|
||||
|
||||
/// Release subcommands and their arguments.
|
||||
#[derive(Debug, Subcommand)]
|
||||
pub enum Release {
|
||||
/// Create a release plan. This is the first step in the release process.
|
||||
@ -62,14 +63,12 @@ pub enum Release {
|
||||
/// Bump the version of the specified package(s).
|
||||
///
|
||||
/// This command will, for each specified package:
|
||||
/// - Verify that the crate can be released (e.g. it doesn't refer to git
|
||||
/// dependencies)
|
||||
/// - Verify that the crate can be released (e.g. it doesn't refer to git dependencies)
|
||||
/// - Update the version in `Cargo.toml` files
|
||||
/// - Update the version in dependencies' `Cargo.toml` files
|
||||
/// - Check if the changelog can be finalized
|
||||
/// - Update the version in the changelog
|
||||
/// - Replaces `{{currentVersion}}` markers in source files and the
|
||||
/// migration guide.
|
||||
/// - Replaces `{{currentVersion}}` markers in source files and the migration guide.
|
||||
BumpVersion(BumpVersionArgs),
|
||||
/// Attempt to publish the specified package.
|
||||
Publish(PublishArgs),
|
||||
|
@ -8,6 +8,7 @@ use toml_edit::value;
|
||||
|
||||
use crate::{Package, cargo::CargoToml};
|
||||
|
||||
/// Arguments for bumping the MSRV.
|
||||
#[derive(Debug, Args)]
|
||||
pub struct BumpMsrvArgs {
|
||||
/// The MSRV to be used
|
||||
@ -36,7 +37,9 @@ pub struct BumpMsrvArgs {
|
||||
/// If it detects a package which other packages in the repo depend on it will
|
||||
/// also apply the changes there. (Can be disabled)
|
||||
pub fn bump_msrv(workspace: &Path, args: BumpMsrvArgs) -> Result<()> {
|
||||
let new_msrv = semver::Version::parse(&args.msrv)?;
|
||||
log::debug!("Bumping MSRV...");
|
||||
let new_msrv = semver::Version::parse(&args.msrv)
|
||||
.with_context(|| format!("MSRV parsing with arguments {args:?} failed!"))?;
|
||||
if !new_msrv.pre.is_empty() || !new_msrv.build.is_empty() {
|
||||
bail!("Invalid MSRV: {}", args.msrv);
|
||||
}
|
||||
@ -138,7 +141,13 @@ fn add_dependent_crates(
|
||||
|
||||
// iterate over ALL known crates
|
||||
for package in Package::iter() {
|
||||
let mut cargo_toml = CargoToml::new(workspace, package.clone())?;
|
||||
let mut cargo_toml =
|
||||
CargoToml::new(workspace, package.clone()).with_context(|| {
|
||||
format!(
|
||||
"Creating Cargo.toml in workspace {} for {package} failed!",
|
||||
workspace.display()
|
||||
)
|
||||
})?;
|
||||
|
||||
// iterate the dependencies in the repo
|
||||
for dep in cargo_toml.repo_dependencies() {
|
||||
|
@ -13,6 +13,7 @@ use toml_edit::{Item, TableLike, Value};
|
||||
|
||||
use crate::{Package, Version, cargo::CargoToml, changelog::Changelog, commands::PLACEHOLDER};
|
||||
|
||||
/// How to bump the version of a package.
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
|
||||
pub enum VersionBump {
|
||||
PreRelease(String),
|
||||
@ -21,6 +22,7 @@ pub enum VersionBump {
|
||||
Major,
|
||||
}
|
||||
|
||||
/// Arguments for bumping the version of packages.
|
||||
#[derive(Debug, Args)]
|
||||
pub struct BumpVersionArgs {
|
||||
/// How much to bump the version by.
|
||||
@ -41,6 +43,7 @@ pub struct BumpVersionArgs {
|
||||
packages: Vec<Package>,
|
||||
}
|
||||
|
||||
/// Bump the version of the specified packages by the specified amount.
|
||||
pub fn bump_version(workspace: &Path, args: BumpVersionArgs) -> Result<()> {
|
||||
// Bump the version by the specified amount for each given package:
|
||||
for package in args.packages {
|
||||
@ -60,6 +63,7 @@ pub fn bump_version(workspace: &Path, args: BumpVersionArgs) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Update the specified package by bumping its version, updating its changelog,
|
||||
pub fn update_package(
|
||||
package: &mut CargoToml<'_>,
|
||||
version: &VersionBump,
|
||||
@ -215,6 +219,7 @@ fn bump_crate_version(
|
||||
Ok(version)
|
||||
}
|
||||
|
||||
/// Perform the actual version bump logic.
|
||||
pub fn do_version_bump(version: &semver::Version, amount: &VersionBump) -> Result<semver::Version> {
|
||||
fn bump_version_number(version: &mut semver::Version, amount: &VersionBump) {
|
||||
log::info!("Bumping version number: {version} by {amount:?}");
|
||||
|
@ -11,6 +11,7 @@ use crate::{
|
||||
git::{current_branch, ensure_workspace_clean, get_remote_name_for},
|
||||
};
|
||||
|
||||
/// Arguments for executing the release plan.
|
||||
#[derive(Debug, Args)]
|
||||
pub struct ApplyPlanArgs {
|
||||
/// Actually make git changes. Without this flag, the command will only
|
||||
@ -23,8 +24,10 @@ pub struct ApplyPlanArgs {
|
||||
manual_pull_request: bool,
|
||||
}
|
||||
|
||||
/// Execute the release plan by making code changes, committing them to a new
|
||||
pub fn execute_plan(workspace: &Path, args: ApplyPlanArgs) -> Result<()> {
|
||||
ensure_workspace_clean(workspace)?;
|
||||
ensure_workspace_clean(workspace)
|
||||
.with_context(|| format!("Workspace {workspace:?} is not clean!"))?;
|
||||
|
||||
let plan_path = workspace.join("release_plan.jsonc");
|
||||
let plan_path = crate::windows_safe_path(&plan_path);
|
||||
@ -41,7 +44,12 @@ pub fn execute_plan(workspace: &Path, args: ApplyPlanArgs) -> Result<()> {
|
||||
|
||||
// Make code changes
|
||||
for step in plan.packages.iter_mut() {
|
||||
let mut package = CargoToml::new(workspace, step.package)?;
|
||||
let mut package = CargoToml::new(workspace, step.package).with_context(|| {
|
||||
format!(
|
||||
"Couldn't create Cargo.toml in workspace {workspace:?} for {:?}",
|
||||
step.package
|
||||
)
|
||||
})?;
|
||||
|
||||
if package.package_version() != step.current_version {
|
||||
if package.package_version() == step.new_version {
|
||||
|
@ -14,6 +14,7 @@ use crate::{
|
||||
git::current_branch,
|
||||
};
|
||||
|
||||
/// Arguments for generating a release plan.
|
||||
#[derive(Debug, Args)]
|
||||
pub struct PlanArgs {
|
||||
/// Allow making a release from the current (non-main) branch. The
|
||||
@ -26,6 +27,7 @@ pub struct PlanArgs {
|
||||
packages: Vec<Package>,
|
||||
}
|
||||
|
||||
/// A package in the release plan.
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct PackagePlan {
|
||||
pub package: Package,
|
||||
@ -44,11 +46,14 @@ pub struct PackagePlan {
|
||||
/// order in which the packages are released.
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct Plan {
|
||||
/// The branch the release is made from.
|
||||
pub base: String,
|
||||
/// The packages to be released, in order.
|
||||
pub packages: Vec<PackagePlan>,
|
||||
}
|
||||
|
||||
impl Plan {
|
||||
/// Load a release plan from a file.
|
||||
pub fn from_path(plan_path: &Path) -> Result<Self> {
|
||||
let plan_source = std::fs::read_to_string(&plan_path)
|
||||
.with_context(|| format!("Failed to read release plan from {}. Run `cargo xrelease plan` to generate a release plan.", plan_path.display()))?;
|
||||
@ -67,6 +72,7 @@ impl Plan {
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate a release plan for the specified packages.
|
||||
pub fn plan(workspace: &Path, args: PlanArgs) -> Result<()> {
|
||||
let current_branch = ensure_main_branch(args.allow_non_main)?;
|
||||
|
||||
@ -222,9 +228,13 @@ pub fn plan(workspace: &Path, args: PlanArgs) -> Result<()> {
|
||||
"#,
|
||||
);
|
||||
|
||||
let mut plan_file = std::fs::File::create(&plan_path)?;
|
||||
plan_file.write_all(plan_header.as_bytes())?;
|
||||
serde_json::to_writer_pretty(&mut plan_file, &plan)?;
|
||||
let mut plan_file = std::fs::File::create(&plan_path)
|
||||
.with_context(|| format!("Failed to create {plan_path:?}"))?;
|
||||
plan_file
|
||||
.write_all(plan_header.as_bytes())
|
||||
.with_context(|| format!("Failed to write to {plan_file:?}"))?;
|
||||
serde_json::to_writer_pretty(&mut plan_file, &plan)
|
||||
.with_context(|| format!("Failed to serialize {plan:?} as pretty-printed JSON"))?;
|
||||
log::debug!("Release plan written to {}", plan_path.display());
|
||||
|
||||
println!("Release plan written to {}.", plan_path.display());
|
||||
@ -237,6 +247,7 @@ pub fn plan(workspace: &Path, args: PlanArgs) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Ensure we are on the main branch, or allow non-main if specified.
|
||||
pub fn ensure_main_branch(allow_non_main: bool) -> Result<String> {
|
||||
let current_branch = current_branch()?;
|
||||
|
||||
@ -339,7 +350,8 @@ mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
// Dependencies in this test do not always reflect real dependencies, it is only a test of correct function operation.
|
||||
// Dependencies in this test do not always reflect real dependencies, it is only a test of
|
||||
// correct function operation.
|
||||
fn test_topological_sort() {
|
||||
let mut dep_graph = HashMap::new();
|
||||
dep_graph.insert(Package::EspHal, vec![Package::EspAlloc]);
|
||||
|
@ -1,12 +1,12 @@
|
||||
use std::fs;
|
||||
|
||||
use super::PLACEHOLDER;
|
||||
use super::Plan;
|
||||
use super::execute_plan::make_git_changes;
|
||||
use crate::commands::comparison_url;
|
||||
use anyhow::{Context, Result};
|
||||
use semver::Version;
|
||||
|
||||
use super::{PLACEHOLDER, Plan, execute_plan::make_git_changes};
|
||||
use crate::commands::comparison_url;
|
||||
|
||||
/// Perform post-release tasks such as creating migration guides for packages that have them.
|
||||
pub fn post_release(workspace: &std::path::Path) -> Result<()> {
|
||||
// Read the release plan
|
||||
let plan_path = workspace.join("release_plan.jsonc");
|
||||
@ -28,8 +28,16 @@ pub fn post_release(workspace: &std::path::Path) -> Result<()> {
|
||||
let cargo_toml_path = package_path.join("Cargo.toml");
|
||||
|
||||
// Read and parse Cargo.toml
|
||||
let cargo_toml_content = fs::read_to_string(&cargo_toml_path)?;
|
||||
let cargo_toml = cargo_toml_content.parse::<toml_edit::DocumentMut>()?;
|
||||
let cargo_toml_content = fs::read_to_string(&cargo_toml_path)
|
||||
.with_context(|| format!("Failed to read from {:?}", cargo_toml_path))?;
|
||||
let cargo_toml = cargo_toml_content
|
||||
.parse::<toml_edit::DocumentMut>()
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Failed to parse Cargo.toml at {}",
|
||||
cargo_toml_path.display()
|
||||
)
|
||||
})?;
|
||||
|
||||
// Extract version from Cargo.toml
|
||||
let version_str = cargo_toml["package"]["version"].as_str().ok_or_else(|| {
|
||||
@ -40,7 +48,8 @@ pub fn post_release(workspace: &std::path::Path) -> Result<()> {
|
||||
})?;
|
||||
|
||||
// Parse version using semver and zero out patch version
|
||||
let mut version = Version::parse(version_str)?;
|
||||
let mut version = Version::parse(version_str)
|
||||
.with_context(|| format!("Failed to parse version {version_str:?}"))?;
|
||||
version.patch = 0;
|
||||
|
||||
// Generate migration guide filename
|
||||
@ -51,7 +60,8 @@ pub fn post_release(workspace: &std::path::Path) -> Result<()> {
|
||||
if !migration_file_path.exists() {
|
||||
// Create the title content
|
||||
let title = format!("# Migration Guide from {} to {}\n", version, PLACEHOLDER);
|
||||
fs::write(&migration_file_path, title)?;
|
||||
fs::write(&migration_file_path, title)
|
||||
.with_context(|| format!("Failed to write to {migration_file_path:?}"))?;
|
||||
log::info!("Created migration guide: {}", migration_file_path.display());
|
||||
} else {
|
||||
log::info!(
|
||||
|
@ -1,10 +1,11 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::{Result, ensure};
|
||||
use anyhow::{Context, Result, ensure};
|
||||
use clap::Args;
|
||||
|
||||
use crate::{Package, cargo::CargoArgsBuilder, windows_safe_path};
|
||||
|
||||
/// Arguments for publishing a package to crates.io.
|
||||
#[derive(Debug, Args)]
|
||||
pub struct PublishArgs {
|
||||
/// Package to publish (performs a dry-run by default).
|
||||
@ -16,6 +17,7 @@ pub struct PublishArgs {
|
||||
no_dry_run: bool,
|
||||
}
|
||||
|
||||
/// Publish a package to crates.io.
|
||||
pub fn publish(workspace: &Path, args: PublishArgs) -> Result<()> {
|
||||
let package_name = args.package.to_string();
|
||||
let package_path = windows_safe_path(&workspace.join(&package_name));
|
||||
@ -44,7 +46,8 @@ pub fn publish(workspace: &Path, args: PublishArgs) -> Result<()> {
|
||||
log::debug!("{args:#?}");
|
||||
|
||||
// Execute `cargo publish` command from the package root:
|
||||
crate::cargo::run(&args, &package_path)?;
|
||||
crate::cargo::run(&args, &package_path)
|
||||
.with_context(|| format!("Failed to run `cargo publish` with {args:?} args"))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -9,6 +9,7 @@ use crate::{
|
||||
git::{current_branch, ensure_workspace_clean, get_remote_name_for},
|
||||
};
|
||||
|
||||
/// Arguments for publishing the packages in the release plan to crates.io.
|
||||
#[derive(Debug, Args)]
|
||||
pub struct PublishPlanArgs {
|
||||
/// Do not pass the `--dry-run` argument, actually try to publish.
|
||||
@ -16,7 +17,9 @@ pub struct PublishPlanArgs {
|
||||
no_dry_run: bool,
|
||||
}
|
||||
|
||||
/// Publish the packages in the release plan to crates.io.
|
||||
pub fn publish_plan(workspace: &Path, args: PublishPlanArgs) -> Result<()> {
|
||||
log::debug!("Publishing packages in the release plan...");
|
||||
ensure_workspace_clean(workspace)?;
|
||||
|
||||
let plan_path = workspace.join("release_plan.jsonc");
|
||||
@ -47,6 +50,11 @@ pub fn publish_plan(workspace: &Path, args: PublishPlanArgs) -> Result<()> {
|
||||
// Check that all packages are updated and ready to go. This is meant to prevent
|
||||
// publishing unupdated packages.
|
||||
for (step, toml) in plan.packages.iter().zip(tomls.iter()) {
|
||||
log::debug!(
|
||||
"Checking that package {} is updated to {}...",
|
||||
step.package,
|
||||
step.new_version
|
||||
);
|
||||
if toml.package_version() != step.new_version {
|
||||
if toml.package_version() == step.current_version {
|
||||
bail!(
|
||||
@ -63,6 +71,7 @@ pub fn publish_plan(workspace: &Path, args: PublishPlanArgs) -> Result<()> {
|
||||
|
||||
// Actually publish the packages.
|
||||
for (step, toml) in plan.packages.iter().zip(tomls.iter()) {
|
||||
log::debug!("Actually publishing package {}...", step.package);
|
||||
let mut publish_args =
|
||||
if step.package.has_chip_features() || step.package.has_inline_assembly(workspace) {
|
||||
vec!["--no-verify"]
|
||||
@ -82,7 +91,8 @@ pub fn publish_plan(workspace: &Path, args: PublishPlanArgs) -> Result<()> {
|
||||
log::debug!("{args:#?}");
|
||||
|
||||
// Execute `cargo publish` command from the package root:
|
||||
crate::cargo::run(&args, &toml.package_path())?;
|
||||
crate::cargo::run(&args, &toml.package_path())
|
||||
.with_context(|| format!("Failed to run `cargo publish` with {args:?} args"))?;
|
||||
}
|
||||
|
||||
// Tag the releases
|
||||
@ -91,6 +101,8 @@ pub fn publish_plan(workspace: &Path, args: PublishPlanArgs) -> Result<()> {
|
||||
let tag_name = toml.package.tag(&toml.package_version());
|
||||
let tag_message = format!("{} {}", step.package, toml.version());
|
||||
|
||||
log::debug!("Tagging package {} with tag {}...", step.package, tag_name);
|
||||
|
||||
if args.no_dry_run {
|
||||
let output = Command::new("git")
|
||||
.arg("tag")
|
||||
|
@ -6,14 +6,17 @@ use strum::IntoEnumIterator;
|
||||
|
||||
use crate::Package;
|
||||
|
||||
/// Commands for performing semver checks on the public API of packages.
|
||||
#[derive(Debug, Subcommand)]
|
||||
pub enum SemverCheckCmd {
|
||||
GenerateBaseline,
|
||||
Check,
|
||||
}
|
||||
|
||||
/// Arguments for performing semver checks on the public API of packages.
|
||||
#[derive(Debug, Args)]
|
||||
pub struct SemverCheckArgs {
|
||||
/// The semver check command to run.
|
||||
#[command(subcommand)]
|
||||
pub command: SemverCheckCmd,
|
||||
|
||||
@ -26,6 +29,7 @@ pub struct SemverCheckArgs {
|
||||
pub chips: Vec<Chip>,
|
||||
}
|
||||
|
||||
/// Perform semver checks on the public API of packages.
|
||||
pub fn semver_checks(workspace: &Path, args: SemverCheckArgs) -> anyhow::Result<()> {
|
||||
#[cfg(not(feature = "semver-checks"))]
|
||||
{
|
||||
@ -48,6 +52,7 @@ pub fn semver_checks(workspace: &Path, args: SemverCheckArgs) -> anyhow::Result<
|
||||
}
|
||||
}
|
||||
|
||||
/// Module containing functions for performing semver checks on the public API of packages.
|
||||
#[cfg(feature = "semver-checks")]
|
||||
pub mod checker {
|
||||
use std::{
|
||||
@ -64,6 +69,7 @@ pub mod checker {
|
||||
semver_check::{build_doc_json, minimum_update},
|
||||
};
|
||||
|
||||
/// Generate the API baselines for the specified packages and chips.
|
||||
pub fn generate_baseline(
|
||||
workspace: &Path,
|
||||
packages: Vec<Package>,
|
||||
@ -105,6 +111,7 @@ pub mod checker {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Determine the minimum required version bump for the specified package and chips.
|
||||
pub fn min_package_update(
|
||||
workspace: &Path,
|
||||
package: Package,
|
||||
@ -140,6 +147,7 @@ pub mod checker {
|
||||
Ok(highest_result)
|
||||
}
|
||||
|
||||
/// Check for breaking changes in the specified packages and chips.
|
||||
pub fn check_for_breaking_changes(
|
||||
workspace: &Path,
|
||||
packages: Vec<Package>,
|
||||
|
@ -6,6 +6,7 @@ use strum::IntoEnumIterator;
|
||||
|
||||
use crate::{Package, package_version};
|
||||
|
||||
/// Tag the releases for the specified packages.
|
||||
#[derive(Debug, Args)]
|
||||
pub struct TagReleasesArgs {
|
||||
/// Package(s) to tag.
|
||||
@ -17,6 +18,7 @@ pub struct TagReleasesArgs {
|
||||
no_dry_run: bool,
|
||||
}
|
||||
|
||||
/// Tag the releases for the specified packages.
|
||||
pub fn tag_releases(workspace: &Path, mut args: TagReleasesArgs) -> Result<()> {
|
||||
args.packages.sort();
|
||||
|
||||
|
@ -32,6 +32,7 @@ pub enum Run {
|
||||
// ----------------------------------------------------------------------------
|
||||
// Subcommand Arguments
|
||||
|
||||
/// Arguments for running ELFs.
|
||||
#[derive(Debug, Args)]
|
||||
pub struct RunElfsArgs {
|
||||
/// Which chip to run the tests for.
|
||||
@ -44,7 +45,13 @@ pub struct RunElfsArgs {
|
||||
// ----------------------------------------------------------------------------
|
||||
// Subcommand Actions
|
||||
|
||||
/// Run doc tests for the specified package and chip.
|
||||
pub fn run_doc_tests(workspace: &Path, args: DocTestArgs) -> Result<()> {
|
||||
log::debug!(
|
||||
"Running doc tests for '{}' on '{}'",
|
||||
args.package,
|
||||
args.chip
|
||||
);
|
||||
let chip = args.chip;
|
||||
|
||||
let package_name = args.package.to_string();
|
||||
@ -86,9 +93,12 @@ pub fn run_doc_tests(workspace: &Path, args: DocTestArgs) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Run all ELFs in the specified folder using `probe-rs`.
|
||||
pub fn run_elfs(args: RunElfsArgs) -> Result<()> {
|
||||
let mut failed: Vec<String> = Vec::new();
|
||||
for elf in fs::read_dir(&args.path)? {
|
||||
for elf in fs::read_dir(&args.path)
|
||||
.with_context(|| format!("Failed to read {}", args.path.display()))?
|
||||
{
|
||||
let entry = elf?;
|
||||
|
||||
let elf_path = entry.path();
|
||||
@ -125,6 +135,7 @@ pub fn run_elfs(args: RunElfsArgs) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Run the specified examples for the given chip.
|
||||
pub fn run_examples(
|
||||
args: ExamplesArgs,
|
||||
examples: Vec<Metadata>,
|
||||
|
@ -22,12 +22,14 @@ struct Manifest {
|
||||
versions: HashSet<semver::Version>,
|
||||
}
|
||||
|
||||
/// Build the documentation for the specified packages and chips.
|
||||
pub fn build_documentation(
|
||||
workspace: &Path,
|
||||
packages: &mut [Package],
|
||||
chips: &mut [Chip],
|
||||
base_url: Option<String>,
|
||||
) -> Result<()> {
|
||||
log::info!("Building documentation for packages: {packages:?} on chips: {chips:?}");
|
||||
let output_path = workspace.join("docs");
|
||||
|
||||
fs::create_dir_all(&output_path)
|
||||
@ -77,8 +79,10 @@ pub fn build_documentation(
|
||||
// Write out the package manifest JSON file:
|
||||
fs::write(
|
||||
output_path.join(package.to_string()).join("manifest.json"),
|
||||
serde_json::to_string(&manifest)?,
|
||||
)?;
|
||||
serde_json::to_string(&manifest)
|
||||
.with_context(|| format!("Failed to parse {manifest:?}"))?,
|
||||
)
|
||||
.with_context(|| format!("Failed to write out {}", output_path.display()))?;
|
||||
|
||||
// Patch the generated documentation to include a select box for the version:
|
||||
#[cfg(feature = "deploy-docs")]
|
||||
@ -152,18 +156,21 @@ fn build_documentation_for_package(
|
||||
output_path.parent().unwrap().join("latest")
|
||||
};
|
||||
log::info!("Creating latest version redirect at {:?}", latest_path);
|
||||
create_dir_all(latest_path.clone())?;
|
||||
std::fs::File::create(latest_path.clone().join("index.html"))?.write_all(
|
||||
format!(
|
||||
"<meta http-equiv=\"refresh\" content=\"0; url=../{}/\" />",
|
||||
if package.chip_features_matter() {
|
||||
version.to_string()
|
||||
} else {
|
||||
format!("{}/{}", version, package.to_string().replace('-', "_"))
|
||||
}
|
||||
create_dir_all(latest_path.clone())
|
||||
.with_context(|| format!("Failed to create dir in {}", latest_path.display()))?;
|
||||
std::fs::File::create(latest_path.clone().join("index.html"))?
|
||||
.write_all(
|
||||
format!(
|
||||
"<meta http-equiv=\"refresh\" content=\"0; url=../{}/\" />",
|
||||
if package.chip_features_matter() {
|
||||
version.to_string()
|
||||
} else {
|
||||
format!("{}/{}", version, package.to_string().replace('-', "_"))
|
||||
}
|
||||
)
|
||||
.as_bytes(),
|
||||
)
|
||||
.as_bytes(),
|
||||
)?;
|
||||
.with_context(|| format!("Failed to create or write to {}", latest_path.display()))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -185,6 +192,8 @@ fn cargo_doc(workspace: &Path, package: Package, chip: Option<Chip>) -> Result<P
|
||||
"nightly"
|
||||
};
|
||||
|
||||
log::debug!("Using toolchain '{toolchain}'");
|
||||
|
||||
// Determine the appropriate build target for the given package and chip,
|
||||
// if we're able to:
|
||||
let target = if let Some(ref chip) = chip {
|
||||
@ -263,7 +272,9 @@ fn patch_documentation_index_for_package(
|
||||
let mut index_paths = Vec::new();
|
||||
|
||||
if package.chip_features_matter() {
|
||||
for chip_path in fs::read_dir(version_path)? {
|
||||
for chip_path in fs::read_dir(&version_path)
|
||||
.with_context(|| format!("Failed to read {}", version_path.display()))?
|
||||
{
|
||||
let chip_path = chip_path?.path();
|
||||
if chip_path.is_dir() {
|
||||
let path = chip_path.join(&package_name).join("index.html");
|
||||
@ -276,7 +287,8 @@ fn patch_documentation_index_for_package(
|
||||
}
|
||||
|
||||
for (version, index_path) in index_paths {
|
||||
let html = fs::read_to_string(&index_path)?;
|
||||
let html = fs::read_to_string(&index_path)
|
||||
.with_context(|| format!("Failed to read {}", index_path.display()))?;
|
||||
let document = kuchikiki::parse_html().one(html);
|
||||
|
||||
let elem = document
|
||||
@ -294,7 +306,8 @@ fn patch_documentation_index_for_package(
|
||||
let node = elem.as_node();
|
||||
node.append(kuchikiki::parse_html().one(html));
|
||||
|
||||
fs::write(&index_path, document.to_string())?;
|
||||
fs::write(&index_path, document.to_string())
|
||||
.with_context(|| format!("Failed to write to {}", index_path.display()))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -303,6 +316,7 @@ fn patch_documentation_index_for_package(
|
||||
// ----------------------------------------------------------------------------
|
||||
// Build Documentation Index
|
||||
|
||||
/// Build the documentation index for all packages.
|
||||
pub fn build_documentation_index(workspace: &Path, packages: &mut [Package]) -> Result<()> {
|
||||
let docs_path = workspace.join("docs");
|
||||
let resources_path = workspace.join("resources");
|
||||
@ -310,6 +324,7 @@ pub fn build_documentation_index(workspace: &Path, packages: &mut [Package]) ->
|
||||
packages.sort();
|
||||
|
||||
for package in packages {
|
||||
log::debug!("Building documentation index for package '{package}'");
|
||||
// Not all packages have documentation built:
|
||||
if !package.is_published(workspace) {
|
||||
continue;
|
||||
@ -336,7 +351,9 @@ pub fn build_documentation_index(workspace: &Path, packages: &mut [Package]) ->
|
||||
);
|
||||
continue;
|
||||
}
|
||||
for version_path in fs::read_dir(package_docs_path)? {
|
||||
for version_path in fs::read_dir(&package_docs_path)
|
||||
.with_context(|| format!("Failed to read {}", &package_docs_path.display()))?
|
||||
{
|
||||
let version_path = version_path?.path();
|
||||
if version_path.is_file() {
|
||||
log::debug!(
|
||||
@ -350,7 +367,9 @@ pub fn build_documentation_index(workspace: &Path, packages: &mut [Package]) ->
|
||||
continue;
|
||||
}
|
||||
|
||||
for path in fs::read_dir(&version_path)? {
|
||||
for path in fs::read_dir(&version_path)
|
||||
.with_context(|| format!("Failed to read {}", version_path.display()))?
|
||||
{
|
||||
let path = path?.path();
|
||||
if path.is_dir() {
|
||||
device_doc_paths.push(path);
|
||||
@ -433,6 +452,8 @@ fn generate_documentation_meta_for_package(
|
||||
});
|
||||
}
|
||||
|
||||
log::debug!("Generated metadata for package '{package}': {metadata:#?}");
|
||||
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
@ -461,6 +482,8 @@ fn generate_documentation_meta_for_index(workspace: &Path) -> Result<Vec<Value>>
|
||||
});
|
||||
}
|
||||
|
||||
log::debug!("Generated metadata for documentation index: {metadata:#?}");
|
||||
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
|
@ -93,6 +93,7 @@ impl Metadata {
|
||||
self.description.clone()
|
||||
}
|
||||
|
||||
/// Check if the example matches the given filter.
|
||||
pub fn matches(&self, filter: Option<&str>) -> bool {
|
||||
let Some(filter) = filter else {
|
||||
return false;
|
||||
@ -101,11 +102,13 @@ impl Metadata {
|
||||
filter == self.binary_name() || filter == self.output_file_name()
|
||||
}
|
||||
|
||||
/// Check if the example matches the given name (case insensitive).
|
||||
pub fn matches_name(&self, name: &str) -> bool {
|
||||
name.to_lowercase() == self.binary_name() || name.to_lowercase() == self.output_file_name()
|
||||
}
|
||||
}
|
||||
|
||||
/// A single configuration of an example, as parsed from metadata lines.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct Configuration {
|
||||
chips: Vec<Chip>,
|
||||
@ -184,7 +187,8 @@ fn parse_meta_line(line: &str) -> anyhow::Result<MetaLine> {
|
||||
pub fn load(path: &Path) -> Result<Vec<Metadata>> {
|
||||
let mut examples = Vec::new();
|
||||
|
||||
for entry in fs::read_dir(path)? {
|
||||
for entry in fs::read_dir(path).context("Failed to read {path}")? {
|
||||
log::debug!("Loading example from path: {}", path.display());
|
||||
let path = windows_safe_path(&entry?.path());
|
||||
let text = fs::read_to_string(&path)
|
||||
.with_context(|| format!("Could not read {}", path.display()))?;
|
||||
@ -330,6 +334,7 @@ struct CargoToml {
|
||||
features: HashMap<String, Vec<String>>,
|
||||
}
|
||||
|
||||
/// Load all examples by finding all packages in the given path, and parsing their metadata.
|
||||
pub fn load_cargo_toml(examples_path: &Path) -> Result<Vec<Metadata>> {
|
||||
let mut examples = Vec::new();
|
||||
|
||||
@ -337,6 +342,7 @@ pub fn load_cargo_toml(examples_path: &Path) -> Result<Vec<Metadata>> {
|
||||
packages.sort();
|
||||
|
||||
for package_path in packages {
|
||||
log::debug!("Loading package from path: {}", package_path.display());
|
||||
let cargo_toml_path = package_path.join("Cargo.toml");
|
||||
let main_rs_path = package_path.join("src").join("main.rs");
|
||||
|
||||
@ -384,5 +390,7 @@ fn parse_description(text: &str) -> Option<String> {
|
||||
description = Some(descr);
|
||||
}
|
||||
|
||||
log::debug!("Parsed description: {:?}", description);
|
||||
|
||||
description
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ use std::process::Command;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
|
||||
/// Get the current git branch name.
|
||||
pub fn current_branch() -> Result<String> {
|
||||
let status = Command::new("git")
|
||||
.arg("rev-parse")
|
||||
@ -14,6 +15,7 @@ pub fn current_branch() -> Result<String> {
|
||||
}
|
||||
|
||||
#[cfg(feature = "release")]
|
||||
/// Ensure that the git workspace is clean (no uncommitted changes).
|
||||
pub fn ensure_workspace_clean(workspace: &std::path::Path) -> Result<()> {
|
||||
std::env::set_current_dir(workspace)
|
||||
.with_context(|| format!("Failed to change directory to {}", workspace.display()))?;
|
||||
@ -33,6 +35,7 @@ pub fn ensure_workspace_clean(workspace: &std::path::Path) -> Result<()> {
|
||||
}
|
||||
|
||||
#[cfg(feature = "release")]
|
||||
/// Get the remote name for the given repository URL.
|
||||
pub fn get_remote_name_for(repo: &str) -> Result<String> {
|
||||
let remotes = Command::new("git")
|
||||
.arg("remote")
|
||||
|
@ -3,7 +3,7 @@ use std::{
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use cargo::CargoAction;
|
||||
use esp_metadata::{Chip, Config, TokenStream};
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -135,6 +135,7 @@ impl Package {
|
||||
false
|
||||
}
|
||||
|
||||
/// Does the package have any host tests?
|
||||
pub fn has_host_tests(&self, workspace: &Path) -> bool {
|
||||
let package_path = workspace.join(self.to_string()).join("src");
|
||||
|
||||
@ -287,6 +288,7 @@ impl Package {
|
||||
_ => {}
|
||||
}
|
||||
|
||||
log::debug!("Features for package '{}': {:?}", self, features);
|
||||
features
|
||||
}
|
||||
|
||||
@ -333,6 +335,7 @@ impl Package {
|
||||
_ => {}
|
||||
}
|
||||
|
||||
log::debug!("Lint feature cases for package '{}': {:?}", self, cases);
|
||||
cases
|
||||
}
|
||||
|
||||
@ -369,6 +372,11 @@ impl Package {
|
||||
|
||||
/// Creates a tag string for this [`Package`] combined with a semantic version.
|
||||
pub fn tag(&self, version: &semver::Version) -> String {
|
||||
log::debug!(
|
||||
"Creating tag for package '{}' with version '{}'",
|
||||
self,
|
||||
version
|
||||
);
|
||||
format!("{self}-v{version}")
|
||||
}
|
||||
|
||||
@ -523,12 +531,17 @@ pub fn execute_app(
|
||||
// ----------------------------------------------------------------------------
|
||||
// Helper Functions
|
||||
|
||||
// Copy an entire directory recursively.
|
||||
/// Copy an entire directory recursively.
|
||||
// https://stackoverflow.com/a/65192210
|
||||
pub fn copy_dir_all(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> Result<()> {
|
||||
fs::create_dir_all(&dst)?;
|
||||
log::debug!(
|
||||
"Copying directory '{}' to '{}'",
|
||||
src.as_ref().display(),
|
||||
dst.as_ref().display()
|
||||
);
|
||||
fs::create_dir_all(&dst).with_context(|| "Failed to create a {dst}")?;
|
||||
|
||||
for entry in fs::read_dir(src)? {
|
||||
for entry in fs::read_dir(src).with_context(|| "Failed to read {src}")? {
|
||||
let entry = entry?;
|
||||
let ty = entry.file_type()?;
|
||||
|
||||
@ -546,7 +559,7 @@ pub fn copy_dir_all(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> Result<()>
|
||||
/// workspace.
|
||||
pub fn package_paths(workspace: &Path) -> Result<Vec<PathBuf>> {
|
||||
let mut paths = Vec::new();
|
||||
for entry in fs::read_dir(workspace)? {
|
||||
for entry in fs::read_dir(workspace).context("Failed to read {workspace}")? {
|
||||
let entry = entry?;
|
||||
if entry.file_type()?.is_dir() && entry.path().join("Cargo.toml").exists() {
|
||||
paths.push(entry.path());
|
||||
@ -555,6 +568,12 @@ pub fn package_paths(workspace: &Path) -> Result<Vec<PathBuf>> {
|
||||
|
||||
paths.sort();
|
||||
|
||||
log::debug!(
|
||||
"Found {} packages in workspace '{}':",
|
||||
paths.len(),
|
||||
workspace.display()
|
||||
);
|
||||
|
||||
Ok(paths)
|
||||
}
|
||||
|
||||
@ -586,6 +605,7 @@ pub fn format_package(workspace: &Path, package: Package, check: bool) -> Result
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Run the host tests for the specified package.
|
||||
pub fn run_host_tests(workspace: &Path, package: Package) -> Result<()> {
|
||||
log::info!("Running host tests for package: {}", package);
|
||||
let package_path = workspace.join(package.as_ref());
|
||||
@ -698,11 +718,14 @@ fn format_package_path(workspace: &Path, package_path: &Path, check: bool) -> Re
|
||||
));
|
||||
cargo_args.extend(source_files);
|
||||
|
||||
log::debug!("{cargo_args:#?}");
|
||||
|
||||
cargo::run(&cargo_args, &package_path)
|
||||
}
|
||||
|
||||
/// Update the metadata and chip support table in the esp-hal README.
|
||||
pub fn update_metadata(workspace: &Path, check: bool) -> Result<()> {
|
||||
log::info!("Updating esp-metadata and chip support table...");
|
||||
update_chip_support_table(workspace)?;
|
||||
generate_metadata(workspace, save)?;
|
||||
|
||||
@ -711,7 +734,8 @@ pub fn update_metadata(workspace: &Path, check: bool) -> Result<()> {
|
||||
if check {
|
||||
let res = std::process::Command::new("git")
|
||||
.args(["diff", "HEAD", "esp-metadata-generated"])
|
||||
.output()?;
|
||||
.output()
|
||||
.context("Failed to run `git diff HEAD esp-metadata-generated`")?;
|
||||
if !res.stdout.is_empty() {
|
||||
return Err(anyhow::Error::msg(
|
||||
"detected `esp-metadata-generated` changes. Run `cargo xtask update-metadata`, and commit the changes.",
|
||||
@ -763,8 +787,10 @@ fn save(out_path: &Path, tokens: TokenStream) -> Result<()> {
|
||||
}
|
||||
|
||||
fn update_chip_support_table(workspace: &Path) -> Result<()> {
|
||||
log::debug!("Updating chip support table in README.md...");
|
||||
let mut output = String::new();
|
||||
let readme = std::fs::read_to_string(workspace.join("esp-hal").join("README.md"))?;
|
||||
let readme = std::fs::read_to_string(workspace.join("esp-hal").join("README.md"))
|
||||
.context("Failed to read {workspace}")?;
|
||||
|
||||
let mut in_support_table = false;
|
||||
let mut generate_support_table = true;
|
||||
@ -799,7 +825,10 @@ fn update_chip_support_table(workspace: &Path) -> Result<()> {
|
||||
pub fn find_packages(path: &Path) -> Result<Vec<PathBuf>> {
|
||||
let mut packages = Vec::new();
|
||||
|
||||
for result in fs::read_dir(path)? {
|
||||
for result in
|
||||
fs::read_dir(path).with_context(|| format!("Failed to read {}", path.display()))?
|
||||
{
|
||||
log::debug!("Inspecting path: {}", path.display());
|
||||
let entry = result?;
|
||||
if entry.path().is_file() {
|
||||
continue;
|
||||
@ -813,5 +842,11 @@ pub fn find_packages(path: &Path) -> Result<Vec<PathBuf>> {
|
||||
}
|
||||
}
|
||||
|
||||
log::debug!(
|
||||
"Found {} packages in path '{}':",
|
||||
packages.len(),
|
||||
path.display()
|
||||
);
|
||||
|
||||
Ok(packages)
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ use std::{
|
||||
time::Instant,
|
||||
};
|
||||
|
||||
use anyhow::{Result, bail};
|
||||
use anyhow::{Context, Result, bail};
|
||||
use clap::{Args, Parser};
|
||||
use esp_metadata::{Chip, Config};
|
||||
use strum::IntoEnumIterator;
|
||||
@ -131,7 +131,8 @@ fn main() -> Result<()> {
|
||||
builder.target(env_logger::Target::Stdout);
|
||||
builder.init();
|
||||
|
||||
let workspace = std::env::current_dir()?;
|
||||
let workspace =
|
||||
std::env::current_dir().with_context(|| format!("Failed to get the current dir!"))?;
|
||||
let target_path = Path::new("target");
|
||||
|
||||
match Cli::parse() {
|
||||
@ -197,6 +198,7 @@ fn fmt_packages(workspace: &Path, args: FmtPackagesArgs) -> Result<()> {
|
||||
packages.sort();
|
||||
|
||||
for package in packages {
|
||||
log::info!("Formatting package: {}", package);
|
||||
xtask::format_package(workspace, package, args.check)?;
|
||||
}
|
||||
|
||||
@ -213,13 +215,19 @@ fn clean(workspace: &Path, args: CleanArgs) -> Result<()> {
|
||||
|
||||
let cargo_args = CargoArgsBuilder::default().subcommand("clean").build();
|
||||
|
||||
xtask::cargo::run(&cargo_args, &path)?;
|
||||
xtask::cargo::run(&cargo_args, &path).with_context(|| {
|
||||
format!(
|
||||
"Failed to run `cargo run` with {cargo_args:?} in {}",
|
||||
path.display()
|
||||
)
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn lint_packages(workspace: &Path, args: LintPackagesArgs) -> Result<()> {
|
||||
log::debug!("Linting packages: {:?}", args.packages);
|
||||
let mut packages = args.packages;
|
||||
packages.sort();
|
||||
|
||||
@ -228,6 +236,7 @@ fn lint_packages(workspace: &Path, args: LintPackagesArgs) -> Result<()> {
|
||||
// building, so we need to handle each individually (though there
|
||||
// is *some* overlap)
|
||||
for chip in &args.chips {
|
||||
log::debug!(" for chip: {}", chip);
|
||||
let device = Config::for_chip(chip);
|
||||
|
||||
if package.validate_package_chip(chip).is_err() {
|
||||
@ -317,7 +326,13 @@ fn lint_package(
|
||||
&path,
|
||||
[("CI", "1"), ("DEFMT_LOG", "trace")],
|
||||
false,
|
||||
)?;
|
||||
)
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Failed to run `cargo run` with {args:?} `CI, `1`, `DEFMT_LOG`, and `trace` envs in {}",
|
||||
path.display()
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -370,6 +385,7 @@ impl Runner {
|
||||
}
|
||||
|
||||
fn run_ci_checks(workspace: &Path, args: CiArgs) -> Result<()> {
|
||||
log::info!("Running CI checks for chip: {}", args.chip);
|
||||
println!("::add-matcher::.github/rust-matchers.json");
|
||||
|
||||
let mut runner = Runner::new();
|
||||
@ -440,21 +456,35 @@ fn run_ci_checks(workspace: &Path, args: CiArgs) -> Result<()> {
|
||||
"./esp-lp-hal/target/{}/release/examples",
|
||||
args.chip.target()
|
||||
));
|
||||
from_dir.read_dir()?.for_each(|entry| {
|
||||
let entry = entry.unwrap();
|
||||
let path = entry.path();
|
||||
let to = to_dir.join(entry.file_name());
|
||||
fs::copy(path, to).expect("Failed to copy file");
|
||||
});
|
||||
from_dir
|
||||
.read_dir()
|
||||
.with_context(|| format!("Failed to read from {}", from_dir.display()))?
|
||||
.for_each(|entry| {
|
||||
let entry = entry.unwrap();
|
||||
let path = entry.path();
|
||||
let to = to_dir.join(entry.file_name());
|
||||
fs::copy(&path, &to).expect(
|
||||
format!("Failed to copy {} to {}", path.display(), to.display())
|
||||
.as_str(),
|
||||
);
|
||||
});
|
||||
Ok(())
|
||||
});
|
||||
|
||||
// remove the (now) obsolete duplicates
|
||||
log::debug!("Removing obsolete LP-HAL example duplicates");
|
||||
std::fs::remove_dir_all(PathBuf::from(format!(
|
||||
"./esp-lp-hal/target/{}/release/examples/{}",
|
||||
args.chip.target(),
|
||||
args.chip
|
||||
)))?;
|
||||
)))
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Failed to remove duplicates in ./esp-lp-hal/target/{}/release/examples/{}",
|
||||
args.chip.target(),
|
||||
args.chip
|
||||
)
|
||||
})?;
|
||||
|
||||
result
|
||||
});
|
||||
@ -489,8 +519,10 @@ fn run_ci_checks(workspace: &Path, args: CiArgs) -> Result<()> {
|
||||
runner.run("Build examples", || {
|
||||
// The `ota_example` expects a file named `examples/target/ota_image` - it
|
||||
// doesn't care about the contents however
|
||||
std::fs::create_dir_all("./examples/target")?;
|
||||
std::fs::write("./examples/target/ota_image", "DUMMY")?;
|
||||
std::fs::create_dir_all("./examples/target")
|
||||
.with_context(|| format!("Failed to create `./examples/target`"))?;
|
||||
std::fs::write("./examples/target/ota_image", "DUMMY")
|
||||
.with_context(|| format!("Failed to create a dummy file required by ota example!"))?;
|
||||
|
||||
examples(
|
||||
workspace,
|
||||
@ -529,6 +561,7 @@ fn host_tests(workspace: &Path, args: HostTestsArgs) -> Result<()> {
|
||||
packages.sort();
|
||||
|
||||
for package in packages {
|
||||
log::debug!("Running host-tests for package: {}", package);
|
||||
if package.has_host_tests(workspace) {
|
||||
xtask::run_host_tests(workspace, package)?;
|
||||
}
|
||||
|
@ -4,6 +4,7 @@ use std::{
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use anyhow::{Context, Error};
|
||||
use cargo_semver_checks::{Check, GlobalConfig, ReleaseType, Rustdoc};
|
||||
use esp_metadata::Chip;
|
||||
|
||||
@ -15,7 +16,7 @@ pub fn minimum_update(
|
||||
workspace: &Path,
|
||||
package: Package,
|
||||
chip: Chip,
|
||||
) -> Result<ReleaseType, anyhow::Error> {
|
||||
) -> Result<ReleaseType, Error> {
|
||||
log::info!("Package = {}, Chip = {}", package, chip);
|
||||
|
||||
let package_name = package.to_string();
|
||||
@ -30,7 +31,8 @@ pub fn minimum_update(
|
||||
|
||||
let baseline_path_gz =
|
||||
PathBuf::from(&package_path).join(format!("api-baseline/{}.json.gz", file_name));
|
||||
let baseline_path = temp_file::TempFile::new()?;
|
||||
let baseline_path =
|
||||
temp_file::TempFile::new().with_context(|| format!("Failed to create a TempFile!"))?;
|
||||
let buffer = Vec::new();
|
||||
let mut decoder = flate2::write::GzDecoder::new(buffer);
|
||||
decoder.write_all(&(fs::read(&baseline_path_gz)?))?;
|
||||
@ -61,7 +63,7 @@ pub(crate) fn build_doc_json(
|
||||
package: Package,
|
||||
chip: &Chip,
|
||||
package_path: &PathBuf,
|
||||
) -> Result<PathBuf, anyhow::Error> {
|
||||
) -> Result<PathBuf, Error> {
|
||||
let target_dir = std::env::var("CARGO_TARGET_DIR");
|
||||
|
||||
let target_path = if let Ok(target) = target_dir {
|
||||
@ -108,6 +110,7 @@ pub(crate) fn build_doc_json(
|
||||
.arg("--config=host.rustflags=[\"--cfg=instability_disable_unstable_docs\"]");
|
||||
let cargo_args = cargo_builder.build();
|
||||
log::debug!("{cargo_args:#?}");
|
||||
crate::cargo::run_with_env(&cargo_args, package_path, envs, false)?;
|
||||
crate::cargo::run_with_env(&cargo_args, package_path, envs, false)
|
||||
.with_context(|| format!("Failed to run `cargo rustdoc` with {cargo_args:?}",))?;
|
||||
Ok(current_path)
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user