Random cleanups in non-checked packages (#2034)

* Deduplicate feature check macros

* Re-enable rust-analyzer for most of the workspace

* Cargo fix

* Turn off defmt

* Only build xtask

* Clippy pls

* Fix CI

* Fix paths

* Always create doc directory first

* Revert r-a

* Update esp-hal-procmacros/src/lp_core.rs

Co-authored-by: Dominic Fischer <14130965+Dominaezzz@users.noreply.github.com>

---------

Co-authored-by: Dominic Fischer <14130965+Dominaezzz@users.noreply.github.com>
This commit is contained in:
Dániel Buga 2024-08-30 16:45:54 +02:00 committed by GitHub
parent fce510f50a
commit 8e6411bd31
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 148 additions and 112 deletions

View File

@ -60,7 +60,7 @@ jobs:
run: cargo install cross
- name: Build xtasks
run: cross build --release --target ${{ matrix.host.rust-target }}
run: cross build --release --target ${{ matrix.host.rust-target }} -p xtask
- name: Upload artifact
uses: actions/upload-artifact@v4

View File

@ -5,6 +5,7 @@
use std::{io::Write as _, process};
use proc_macro::TokenStream;
use quote::ToTokens;
use syn::{parse_macro_input, punctuated::Punctuated, LitStr, Token};
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
@ -58,22 +59,10 @@ pub fn assert_unique_features(input: TokenStream) -> TokenStream {
.into_iter()
.collect::<Vec<_>>();
let pairs = unique_pairs(&features);
let unique_cfgs = pairs
.iter()
.map(|(a, b)| quote::quote! { all(feature = #a, feature = #b) });
let message = format!(
r#"
ERROR: expected exactly zero or one enabled feature from feature group:
{:?}
"#,
features.iter().map(|lit| lit.value()).collect::<Vec<_>>(),
);
let unique = impl_unique_features(&features, "exactly zero or one");
quote::quote! {
#[cfg(any(#(#unique_cfgs),*))]
::esp_build::error! { #message }
#unique
}
.into()
}
@ -91,17 +80,10 @@ pub fn assert_used_features(input: TokenStream) -> TokenStream {
.into_iter()
.collect::<Vec<_>>();
let message = format!(
r#"
ERROR: expected at least one enabled feature from feature group:
{:?}
"#,
features.iter().map(|lit| lit.value()).collect::<Vec<_>>()
);
let used = impl_used_features(&features, "at least one");
quote::quote! {
#[cfg(not(any(#(feature = #features),*)))]
::esp_build::error! { #message }
#used
}
.into()
}
@ -118,22 +100,12 @@ pub fn assert_unique_used_features(input: TokenStream) -> TokenStream {
.into_iter()
.collect::<Vec<_>>();
let pairs = unique_pairs(&features);
let unique_cfgs = pairs
.iter()
.map(|(a, b)| quote::quote! { all(feature = #a, feature = #b) });
let message = format!(
r#"
ERROR: expected exactly one enabled feature from feature group:
{:?}
"#,
features.iter().map(|lit| lit.value()).collect::<Vec<_>>()
);
let unique = impl_unique_features(&features, "exactly one");
let used = impl_used_features(&features, "exactly one");
quote::quote! {
#[cfg(any(any(#(#unique_cfgs),*), not(any(#(feature = #features),*))))]
::esp_build::error! { #message }
#unique
#used
}
.into()
}
@ -141,6 +113,41 @@ ERROR: expected exactly one enabled feature from feature group:
// ----------------------------------------------------------------------------
// Helper Functions
fn impl_unique_features(features: &[LitStr], expectation: &str) -> impl ToTokens {
let pairs = unique_pairs(features);
let unique_cfgs = pairs
.iter()
.map(|(a, b)| quote::quote! { all(feature = #a, feature = #b) });
let message = format!(
r#"
ERROR: expected {expectation} enabled feature from feature group:
{:?}
"#,
features.iter().map(|lit| lit.value()).collect::<Vec<_>>(),
);
quote::quote! {
#[cfg(any(#(#unique_cfgs),*))]
::esp_build::error! { #message }
}
}
fn impl_used_features(features: &[LitStr], expectation: &str) -> impl ToTokens {
let message = format!(
r#"
ERROR: expected {expectation} enabled feature from feature group:
{:?}
"#,
features.iter().map(|lit| lit.value()).collect::<Vec<_>>()
);
quote::quote! {
#[cfg(not(any(#(feature = #features),*)))]
::esp_build::error! { #message }
}
}
// Adapted from:
// https://github.com/dtolnay/build-alert/blob/49d060e/src/lib.rs#L54-L93
fn do_alert(color: Color, input: TokenStream) -> TokenStream {

View File

@ -46,13 +46,13 @@ pub(crate) mod main {
if !f.sig.generics.params.is_empty() {
ctxt.error_spanned_by(&f.sig, "main function must not be generic");
}
if !f.sig.generics.where_clause.is_none() {
if f.sig.generics.where_clause.is_some() {
ctxt.error_spanned_by(&f.sig, "main function must not have `where` clauses");
}
if !f.sig.abi.is_none() {
if f.sig.abi.is_some() {
ctxt.error_spanned_by(&f.sig, "main function must not have an ABI qualifier");
}
if !f.sig.variadic.is_none() {
if f.sig.variadic.is_some() {
ctxt.error_spanned_by(&f.sig, "main function must not be variadic");
}
match &f.sig.output {

View File

@ -30,21 +30,18 @@ impl Parse for MakeGpioEnumDispatchMacro {
let mut elements = vec![];
let mut stream = input.parse::<Group>()?.stream().into_iter();
let stream = input.parse::<Group>()?.stream().into_iter();
let mut element_name = String::new();
loop {
match stream.next() {
Some(v) => match v {
TokenTree::Ident(ident) => {
element_name = ident.to_string();
}
TokenTree::Literal(lit) => {
let index = lit.to_string().parse().unwrap();
elements.push((element_name.clone(), index));
}
_ => (),
},
None => break,
for v in stream {
match v {
TokenTree::Ident(ident) => {
element_name = ident.to_string();
}
TokenTree::Literal(lit) => {
let index = lit.to_string().parse().unwrap();
elements.push((element_name.clone(), index));
}
_ => (),
}
}

View File

@ -24,7 +24,7 @@ pub(crate) fn check_attr_whitelist(
'o: for attr in attrs {
for val in whitelist {
if eq(&attr, &val) {
if eq(attr, val) {
continue 'o;
}
}
@ -35,7 +35,7 @@ pub(crate) fn check_attr_whitelist(
}
};
return Err(Error::new(attr.span(), &err_str).to_compile_error().into());
return Err(Error::new(attr.span(), err_str).to_compile_error().into());
}
Ok(())

View File

@ -210,7 +210,7 @@ pub fn ram(args: TokenStream, input: TokenStream) -> TokenStream {
let hal = proc_macro2::Ident::new(
if let Ok(FoundCrate::Name(ref name)) = crate_name("esp-hal") {
&name
name
} else {
"crate"
},
@ -278,7 +278,7 @@ pub fn handler(args: TokenStream, input: TokenStream) -> TokenStream {
let root = Ident::new(
if let Ok(FoundCrate::Name(ref name)) = crate_name("esp-hal") {
&name
name
} else {
"crate"
},

View File

@ -23,19 +23,19 @@ pub fn entry(args: TokenStream, input: TokenStream) -> TokenStream {
let mut res = String::from("__ULP_MAGIC_");
for &a in args {
let t = &a.ty;
let quoted = to_string(&t);
let quoted = to_string(t);
res.push_str(&quoted);
res.push_str("$");
res.push('$');
}
res
}
pub(crate) fn get_simplename(t: &Type) -> String {
String::from(match t {
Type::Path(p) => String::from(&p.path.segments.last().unwrap().ident.to_string()),
match t {
Type::Path(p) => p.path.segments.last().unwrap().ident.to_string(),
_ => String::new(),
})
}
}
pub(crate) fn extract_pin(ty: &Type) -> u8 {
@ -49,7 +49,7 @@ pub fn entry(args: TokenStream, input: TokenStream) -> TokenStream {
res = extract_pin(t);
}
GenericArgument::Const(c) => {
res = (&quote! { #c }.to_string()).parse().unwrap();
res = quote! { #c }.to_string().parse().unwrap();
}
_ => (),
}
@ -68,11 +68,11 @@ pub fn entry(args: TokenStream, input: TokenStream) -> TokenStream {
res.push_str(&segment.ident.to_string());
if let PathArguments::AngleBracketed(g) = &segment.arguments {
res.push_str("<");
res.push('<');
let mut pushed = false;
for arg in &g.args {
if pushed {
res.push_str(",");
res.push(',');
}
match arg {
@ -87,7 +87,7 @@ pub fn entry(args: TokenStream, input: TokenStream) -> TokenStream {
_ => (),
}
}
res.push_str(">");
res.push('>');
}
}
@ -219,7 +219,7 @@ pub fn load_lp_code(input: TokenStream) -> TokenStream {
};
let hal_crate = if let Ok(FoundCrate::Name(ref name)) = hal_crate {
let ident = Ident::new(&name, Span::call_site().into());
let ident = Ident::new(name, Span::call_site().into());
quote!( #ident )
} else {
quote!(crate)
@ -261,12 +261,14 @@ pub fn load_lp_code(input: TokenStream) -> TokenStream {
let mut sections: Vec<Section> = sections
.into_iter()
.filter(|section| match section.kind() {
SectionKind::Text
| SectionKind::ReadOnlyData
| SectionKind::Data
| SectionKind::UninitializedData => true,
_ => false,
.filter(|section| {
matches!(
section.kind(),
SectionKind::Text
| SectionKind::ReadOnlyData
| SectionKind::Data
| SectionKind::UninitializedData
)
})
.collect();
sections.sort_by(|a, b| a.address().partial_cmp(&b.address()).unwrap());
@ -280,9 +282,8 @@ pub fn load_lp_code(input: TokenStream) -> TokenStream {
for section in sections {
if section.address() > last_address {
for _ in 0..(section.address() - last_address) {
binary.push(0);
}
let fill = section.address() - last_address;
binary.extend(std::iter::repeat(0).take(fill as usize));
}
binary.extend_from_slice(section.data().unwrap());
@ -293,21 +294,20 @@ pub fn load_lp_code(input: TokenStream) -> TokenStream {
.symbols()
.find(|s| s.name().unwrap().starts_with("__ULP_MAGIC_"));
if let None = magic_symbol {
let magic_symbol = if let Some(magic_symbol) = magic_symbol {
magic_symbol.name().unwrap()
} else {
return Error::new(
Span::call_site().into(),
"Given file doesn't seem to be an LP/ULP core application.",
)
.to_compile_error()
.into();
}
let magic_symbol = magic_symbol.unwrap().name().unwrap();
};
let magic_symbol = magic_symbol.trim_start_matches("__ULP_MAGIC_");
let args: Vec<proc_macro2::TokenStream> = magic_symbol
.split("$")
.into_iter()
.map(|t| {
let t = if t.contains("OutputOpenDrain") {
t.replace("OutputOpenDrain", "LowPowerOutputOpenDrain")

View File

@ -7,10 +7,6 @@ description = "HAL for low-power RISC-V coprocessors found in ESP32 devices"
repository = "https://github.com/esp-rs/esp-hal"
license = "MIT OR Apache-2.0"
[lib]
bench = false
test = false
keywords = [
"embedded",
"embedded-hal",
@ -24,6 +20,10 @@ categories = [
"no-std",
]
[lib]
bench = false
test = false
[dependencies]
cfg-if = "1.0.0"
document-features = "0.2.10"

View File

@ -1,7 +1,9 @@
use std::io::{Read, Write};
use std::net::{TcpListener, TcpStream};
use std::thread::spawn;
use std::time::Duration;
use std::{
io::{Read, Write},
net::{TcpListener, TcpStream},
thread::spawn,
time::Duration,
};
use log::info;
@ -22,8 +24,12 @@ fn tx_listen() {
}
fn tx_conn(mut socket: TcpStream) {
socket.set_read_timeout(Some(Duration::from_secs(30))).unwrap();
socket.set_write_timeout(Some(Duration::from_secs(30))).unwrap();
socket
.set_read_timeout(Some(Duration::from_secs(30)))
.unwrap();
socket
.set_write_timeout(Some(Duration::from_secs(30)))
.unwrap();
let buf = [0; 1024];
loop {
@ -44,8 +50,12 @@ fn rx_listen() {
}
fn rx_conn(mut socket: TcpStream) {
socket.set_read_timeout(Some(Duration::from_secs(30))).unwrap();
socket.set_write_timeout(Some(Duration::from_secs(30))).unwrap();
socket
.set_read_timeout(Some(Duration::from_secs(30)))
.unwrap();
socket
.set_write_timeout(Some(Duration::from_secs(30)))
.unwrap();
let mut buf = [0; 1024];
loop {
@ -66,8 +76,12 @@ fn rxtx_listen() {
}
fn rxtx_conn(mut socket: TcpStream) {
socket.set_read_timeout(Some(Duration::from_secs(30))).unwrap();
socket.set_write_timeout(Some(Duration::from_secs(30))).unwrap();
socket
.set_read_timeout(Some(Duration::from_secs(30)))
.unwrap();
socket
.set_write_timeout(Some(Duration::from_secs(30)))
.unwrap();
let mut buf = [0; 1024];
loop {
@ -84,4 +98,4 @@ fn rxtx_conn(mut socket: TcpStream) {
}
}
}
}
}

View File

@ -114,7 +114,7 @@ pub fn build_documentation(
package: Package,
chip: Chip,
target: &str,
) -> Result<()> {
) -> Result<PathBuf> {
let package_name = package.to_string();
let package_path = windows_safe_path(&workspace.join(&package_name));
@ -142,7 +142,15 @@ pub fn build_documentation(
// Execute `cargo doc` from the package root:
cargo::run(&args, &package_path)?;
Ok(())
let docs_path = windows_safe_path(
&workspace
.join(package.to_string())
.join("target")
.join(target)
.join("doc"),
);
Ok(docs_path)
}
/// Load all examples at the given path, and parse their metadata.

View File

@ -332,6 +332,9 @@ fn build_documentation(workspace: &Path, args: BuildDocumentationArgs) -> Result
let output_path = workspace.join("docs");
let resources = workspace.join("resources");
fs::create_dir_all(&output_path)
.with_context(|| format!("Failed to create {}", output_path.display()))?;
let mut packages = HashMap::new();
for package in args.packages {
packages.insert(
@ -341,10 +344,12 @@ fn build_documentation(workspace: &Path, args: BuildDocumentationArgs) -> Result
}
// Copy any additional assets to the documentation's output path:
fs::copy(resources.join("esp-rs.svg"), output_path.join("esp-rs.svg"))?;
fs::copy(resources.join("esp-rs.svg"), output_path.join("esp-rs.svg"))
.context("Failed to copy esp-rs.svg")?;
// Render the index and write it out to the documentaiton's output path:
let source = fs::read_to_string(resources.join("index.html.jinja"))?;
let source = fs::read_to_string(resources.join("index.html.jinja"))
.context("Failed to read index.html.jinja")?;
let mut env = minijinja::Environment::new();
env.add_template("index", &source)?;
@ -352,7 +357,7 @@ fn build_documentation(workspace: &Path, args: BuildDocumentationArgs) -> Result
let tmpl = env.get_template("index")?;
let html = tmpl.render(minijinja::context! { packages => packages })?;
fs::write(output_path.join("index.html"), html)?;
fs::write(output_path.join("index.html"), html).context("Failed to write index.html")?;
Ok(())
}
@ -377,14 +382,12 @@ fn build_documentation_for_package(
// Build the documentation for the specified package, targeting the
// specified chip:
xtask::build_documentation(workspace, package, *chip, target)?;
let docs_path = xtask::build_documentation(workspace, package, *chip, target)?;
let docs_path = xtask::windows_safe_path(
&workspace
.join(package.to_string())
.join("target")
.join(target)
.join("doc"),
ensure!(
docs_path.exists(),
"Documentation not found at {}",
docs_path.display()
);
let output_path = output_path
@ -394,8 +397,15 @@ fn build_documentation_for_package(
let output_path = xtask::windows_safe_path(&output_path);
// Create the output directory, and copy the built documentation into it:
fs::create_dir_all(&output_path)?;
copy_dir_all(&docs_path, &output_path)?;
fs::create_dir_all(&output_path)
.with_context(|| format!("Failed to create {}", output_path.display()))?;
copy_dir_all(&docs_path, &output_path).with_context(|| {
format!(
"Failed to copy {} to {}",
docs_path.display(),
output_path.display()
)
})?;
// Build the context object required for rendering this particular build's
// information on the documentation index: