mirror of
https://github.com/esp-rs/esp-hal.git
synced 2025-09-27 12:20:56 +00:00
Random cleanups in non-checked packages (#2034)
* Deduplicate feature check macros * Re-enable rust-analyzer for most of the workspace * Cargo fix * Turn off defmt * Only build xtask * Clippy pls * Fix CI * Fix paths * Always create doc directory first * Revert r-a * Update esp-hal-procmacros/src/lp_core.rs Co-authored-by: Dominic Fischer <14130965+Dominaezzz@users.noreply.github.com> --------- Co-authored-by: Dominic Fischer <14130965+Dominaezzz@users.noreply.github.com>
This commit is contained in:
parent
fce510f50a
commit
8e6411bd31
2
.github/workflows/hil.yml
vendored
2
.github/workflows/hil.yml
vendored
@ -60,7 +60,7 @@ jobs:
|
|||||||
run: cargo install cross
|
run: cargo install cross
|
||||||
|
|
||||||
- name: Build xtasks
|
- name: Build xtasks
|
||||||
run: cross build --release --target ${{ matrix.host.rust-target }}
|
run: cross build --release --target ${{ matrix.host.rust-target }} -p xtask
|
||||||
|
|
||||||
- name: Upload artifact
|
- name: Upload artifact
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
use std::{io::Write as _, process};
|
use std::{io::Write as _, process};
|
||||||
|
|
||||||
use proc_macro::TokenStream;
|
use proc_macro::TokenStream;
|
||||||
|
use quote::ToTokens;
|
||||||
use syn::{parse_macro_input, punctuated::Punctuated, LitStr, Token};
|
use syn::{parse_macro_input, punctuated::Punctuated, LitStr, Token};
|
||||||
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
|
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
|
||||||
|
|
||||||
@ -58,22 +59,10 @@ pub fn assert_unique_features(input: TokenStream) -> TokenStream {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let pairs = unique_pairs(&features);
|
let unique = impl_unique_features(&features, "exactly zero or one");
|
||||||
let unique_cfgs = pairs
|
|
||||||
.iter()
|
|
||||||
.map(|(a, b)| quote::quote! { all(feature = #a, feature = #b) });
|
|
||||||
|
|
||||||
let message = format!(
|
|
||||||
r#"
|
|
||||||
ERROR: expected exactly zero or one enabled feature from feature group:
|
|
||||||
{:?}
|
|
||||||
"#,
|
|
||||||
features.iter().map(|lit| lit.value()).collect::<Vec<_>>(),
|
|
||||||
);
|
|
||||||
|
|
||||||
quote::quote! {
|
quote::quote! {
|
||||||
#[cfg(any(#(#unique_cfgs),*))]
|
#unique
|
||||||
::esp_build::error! { #message }
|
|
||||||
}
|
}
|
||||||
.into()
|
.into()
|
||||||
}
|
}
|
||||||
@ -91,17 +80,10 @@ pub fn assert_used_features(input: TokenStream) -> TokenStream {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let message = format!(
|
let used = impl_used_features(&features, "at least one");
|
||||||
r#"
|
|
||||||
ERROR: expected at least one enabled feature from feature group:
|
|
||||||
{:?}
|
|
||||||
"#,
|
|
||||||
features.iter().map(|lit| lit.value()).collect::<Vec<_>>()
|
|
||||||
);
|
|
||||||
|
|
||||||
quote::quote! {
|
quote::quote! {
|
||||||
#[cfg(not(any(#(feature = #features),*)))]
|
#used
|
||||||
::esp_build::error! { #message }
|
|
||||||
}
|
}
|
||||||
.into()
|
.into()
|
||||||
}
|
}
|
||||||
@ -118,22 +100,12 @@ pub fn assert_unique_used_features(input: TokenStream) -> TokenStream {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let pairs = unique_pairs(&features);
|
let unique = impl_unique_features(&features, "exactly one");
|
||||||
let unique_cfgs = pairs
|
let used = impl_used_features(&features, "exactly one");
|
||||||
.iter()
|
|
||||||
.map(|(a, b)| quote::quote! { all(feature = #a, feature = #b) });
|
|
||||||
|
|
||||||
let message = format!(
|
|
||||||
r#"
|
|
||||||
ERROR: expected exactly one enabled feature from feature group:
|
|
||||||
{:?}
|
|
||||||
"#,
|
|
||||||
features.iter().map(|lit| lit.value()).collect::<Vec<_>>()
|
|
||||||
);
|
|
||||||
|
|
||||||
quote::quote! {
|
quote::quote! {
|
||||||
#[cfg(any(any(#(#unique_cfgs),*), not(any(#(feature = #features),*))))]
|
#unique
|
||||||
::esp_build::error! { #message }
|
#used
|
||||||
}
|
}
|
||||||
.into()
|
.into()
|
||||||
}
|
}
|
||||||
@ -141,6 +113,41 @@ ERROR: expected exactly one enabled feature from feature group:
|
|||||||
// ----------------------------------------------------------------------------
|
// ----------------------------------------------------------------------------
|
||||||
// Helper Functions
|
// Helper Functions
|
||||||
|
|
||||||
|
fn impl_unique_features(features: &[LitStr], expectation: &str) -> impl ToTokens {
|
||||||
|
let pairs = unique_pairs(features);
|
||||||
|
let unique_cfgs = pairs
|
||||||
|
.iter()
|
||||||
|
.map(|(a, b)| quote::quote! { all(feature = #a, feature = #b) });
|
||||||
|
|
||||||
|
let message = format!(
|
||||||
|
r#"
|
||||||
|
ERROR: expected {expectation} enabled feature from feature group:
|
||||||
|
{:?}
|
||||||
|
"#,
|
||||||
|
features.iter().map(|lit| lit.value()).collect::<Vec<_>>(),
|
||||||
|
);
|
||||||
|
|
||||||
|
quote::quote! {
|
||||||
|
#[cfg(any(#(#unique_cfgs),*))]
|
||||||
|
::esp_build::error! { #message }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn impl_used_features(features: &[LitStr], expectation: &str) -> impl ToTokens {
|
||||||
|
let message = format!(
|
||||||
|
r#"
|
||||||
|
ERROR: expected {expectation} enabled feature from feature group:
|
||||||
|
{:?}
|
||||||
|
"#,
|
||||||
|
features.iter().map(|lit| lit.value()).collect::<Vec<_>>()
|
||||||
|
);
|
||||||
|
|
||||||
|
quote::quote! {
|
||||||
|
#[cfg(not(any(#(feature = #features),*)))]
|
||||||
|
::esp_build::error! { #message }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Adapted from:
|
// Adapted from:
|
||||||
// https://github.com/dtolnay/build-alert/blob/49d060e/src/lib.rs#L54-L93
|
// https://github.com/dtolnay/build-alert/blob/49d060e/src/lib.rs#L54-L93
|
||||||
fn do_alert(color: Color, input: TokenStream) -> TokenStream {
|
fn do_alert(color: Color, input: TokenStream) -> TokenStream {
|
||||||
|
@ -46,13 +46,13 @@ pub(crate) mod main {
|
|||||||
if !f.sig.generics.params.is_empty() {
|
if !f.sig.generics.params.is_empty() {
|
||||||
ctxt.error_spanned_by(&f.sig, "main function must not be generic");
|
ctxt.error_spanned_by(&f.sig, "main function must not be generic");
|
||||||
}
|
}
|
||||||
if !f.sig.generics.where_clause.is_none() {
|
if f.sig.generics.where_clause.is_some() {
|
||||||
ctxt.error_spanned_by(&f.sig, "main function must not have `where` clauses");
|
ctxt.error_spanned_by(&f.sig, "main function must not have `where` clauses");
|
||||||
}
|
}
|
||||||
if !f.sig.abi.is_none() {
|
if f.sig.abi.is_some() {
|
||||||
ctxt.error_spanned_by(&f.sig, "main function must not have an ABI qualifier");
|
ctxt.error_spanned_by(&f.sig, "main function must not have an ABI qualifier");
|
||||||
}
|
}
|
||||||
if !f.sig.variadic.is_none() {
|
if f.sig.variadic.is_some() {
|
||||||
ctxt.error_spanned_by(&f.sig, "main function must not be variadic");
|
ctxt.error_spanned_by(&f.sig, "main function must not be variadic");
|
||||||
}
|
}
|
||||||
match &f.sig.output {
|
match &f.sig.output {
|
||||||
|
@ -30,21 +30,18 @@ impl Parse for MakeGpioEnumDispatchMacro {
|
|||||||
|
|
||||||
let mut elements = vec![];
|
let mut elements = vec![];
|
||||||
|
|
||||||
let mut stream = input.parse::<Group>()?.stream().into_iter();
|
let stream = input.parse::<Group>()?.stream().into_iter();
|
||||||
let mut element_name = String::new();
|
let mut element_name = String::new();
|
||||||
loop {
|
for v in stream {
|
||||||
match stream.next() {
|
match v {
|
||||||
Some(v) => match v {
|
TokenTree::Ident(ident) => {
|
||||||
TokenTree::Ident(ident) => {
|
element_name = ident.to_string();
|
||||||
element_name = ident.to_string();
|
}
|
||||||
}
|
TokenTree::Literal(lit) => {
|
||||||
TokenTree::Literal(lit) => {
|
let index = lit.to_string().parse().unwrap();
|
||||||
let index = lit.to_string().parse().unwrap();
|
elements.push((element_name.clone(), index));
|
||||||
elements.push((element_name.clone(), index));
|
}
|
||||||
}
|
_ => (),
|
||||||
_ => (),
|
|
||||||
},
|
|
||||||
None => break,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@ pub(crate) fn check_attr_whitelist(
|
|||||||
|
|
||||||
'o: for attr in attrs {
|
'o: for attr in attrs {
|
||||||
for val in whitelist {
|
for val in whitelist {
|
||||||
if eq(&attr, &val) {
|
if eq(attr, val) {
|
||||||
continue 'o;
|
continue 'o;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -35,7 +35,7 @@ pub(crate) fn check_attr_whitelist(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
return Err(Error::new(attr.span(), &err_str).to_compile_error().into());
|
return Err(Error::new(attr.span(), err_str).to_compile_error().into());
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -210,7 +210,7 @@ pub fn ram(args: TokenStream, input: TokenStream) -> TokenStream {
|
|||||||
|
|
||||||
let hal = proc_macro2::Ident::new(
|
let hal = proc_macro2::Ident::new(
|
||||||
if let Ok(FoundCrate::Name(ref name)) = crate_name("esp-hal") {
|
if let Ok(FoundCrate::Name(ref name)) = crate_name("esp-hal") {
|
||||||
&name
|
name
|
||||||
} else {
|
} else {
|
||||||
"crate"
|
"crate"
|
||||||
},
|
},
|
||||||
@ -278,7 +278,7 @@ pub fn handler(args: TokenStream, input: TokenStream) -> TokenStream {
|
|||||||
|
|
||||||
let root = Ident::new(
|
let root = Ident::new(
|
||||||
if let Ok(FoundCrate::Name(ref name)) = crate_name("esp-hal") {
|
if let Ok(FoundCrate::Name(ref name)) = crate_name("esp-hal") {
|
||||||
&name
|
name
|
||||||
} else {
|
} else {
|
||||||
"crate"
|
"crate"
|
||||||
},
|
},
|
||||||
|
@ -23,19 +23,19 @@ pub fn entry(args: TokenStream, input: TokenStream) -> TokenStream {
|
|||||||
let mut res = String::from("__ULP_MAGIC_");
|
let mut res = String::from("__ULP_MAGIC_");
|
||||||
for &a in args {
|
for &a in args {
|
||||||
let t = &a.ty;
|
let t = &a.ty;
|
||||||
let quoted = to_string(&t);
|
let quoted = to_string(t);
|
||||||
res.push_str("ed);
|
res.push_str("ed);
|
||||||
res.push_str("$");
|
res.push('$');
|
||||||
}
|
}
|
||||||
|
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get_simplename(t: &Type) -> String {
|
pub(crate) fn get_simplename(t: &Type) -> String {
|
||||||
String::from(match t {
|
match t {
|
||||||
Type::Path(p) => String::from(&p.path.segments.last().unwrap().ident.to_string()),
|
Type::Path(p) => p.path.segments.last().unwrap().ident.to_string(),
|
||||||
_ => String::new(),
|
_ => String::new(),
|
||||||
})
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn extract_pin(ty: &Type) -> u8 {
|
pub(crate) fn extract_pin(ty: &Type) -> u8 {
|
||||||
@ -49,7 +49,7 @@ pub fn entry(args: TokenStream, input: TokenStream) -> TokenStream {
|
|||||||
res = extract_pin(t);
|
res = extract_pin(t);
|
||||||
}
|
}
|
||||||
GenericArgument::Const(c) => {
|
GenericArgument::Const(c) => {
|
||||||
res = ("e! { #c }.to_string()).parse().unwrap();
|
res = quote! { #c }.to_string().parse().unwrap();
|
||||||
}
|
}
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
@ -68,11 +68,11 @@ pub fn entry(args: TokenStream, input: TokenStream) -> TokenStream {
|
|||||||
res.push_str(&segment.ident.to_string());
|
res.push_str(&segment.ident.to_string());
|
||||||
|
|
||||||
if let PathArguments::AngleBracketed(g) = &segment.arguments {
|
if let PathArguments::AngleBracketed(g) = &segment.arguments {
|
||||||
res.push_str("<");
|
res.push('<');
|
||||||
let mut pushed = false;
|
let mut pushed = false;
|
||||||
for arg in &g.args {
|
for arg in &g.args {
|
||||||
if pushed {
|
if pushed {
|
||||||
res.push_str(",");
|
res.push(',');
|
||||||
}
|
}
|
||||||
|
|
||||||
match arg {
|
match arg {
|
||||||
@ -87,7 +87,7 @@ pub fn entry(args: TokenStream, input: TokenStream) -> TokenStream {
|
|||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
res.push_str(">");
|
res.push('>');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -219,7 +219,7 @@ pub fn load_lp_code(input: TokenStream) -> TokenStream {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let hal_crate = if let Ok(FoundCrate::Name(ref name)) = hal_crate {
|
let hal_crate = if let Ok(FoundCrate::Name(ref name)) = hal_crate {
|
||||||
let ident = Ident::new(&name, Span::call_site().into());
|
let ident = Ident::new(name, Span::call_site().into());
|
||||||
quote!( #ident )
|
quote!( #ident )
|
||||||
} else {
|
} else {
|
||||||
quote!(crate)
|
quote!(crate)
|
||||||
@ -261,12 +261,14 @@ pub fn load_lp_code(input: TokenStream) -> TokenStream {
|
|||||||
|
|
||||||
let mut sections: Vec<Section> = sections
|
let mut sections: Vec<Section> = sections
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|section| match section.kind() {
|
.filter(|section| {
|
||||||
SectionKind::Text
|
matches!(
|
||||||
| SectionKind::ReadOnlyData
|
section.kind(),
|
||||||
| SectionKind::Data
|
SectionKind::Text
|
||||||
| SectionKind::UninitializedData => true,
|
| SectionKind::ReadOnlyData
|
||||||
_ => false,
|
| SectionKind::Data
|
||||||
|
| SectionKind::UninitializedData
|
||||||
|
)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
sections.sort_by(|a, b| a.address().partial_cmp(&b.address()).unwrap());
|
sections.sort_by(|a, b| a.address().partial_cmp(&b.address()).unwrap());
|
||||||
@ -280,9 +282,8 @@ pub fn load_lp_code(input: TokenStream) -> TokenStream {
|
|||||||
|
|
||||||
for section in sections {
|
for section in sections {
|
||||||
if section.address() > last_address {
|
if section.address() > last_address {
|
||||||
for _ in 0..(section.address() - last_address) {
|
let fill = section.address() - last_address;
|
||||||
binary.push(0);
|
binary.extend(std::iter::repeat(0).take(fill as usize));
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
binary.extend_from_slice(section.data().unwrap());
|
binary.extend_from_slice(section.data().unwrap());
|
||||||
@ -293,21 +294,20 @@ pub fn load_lp_code(input: TokenStream) -> TokenStream {
|
|||||||
.symbols()
|
.symbols()
|
||||||
.find(|s| s.name().unwrap().starts_with("__ULP_MAGIC_"));
|
.find(|s| s.name().unwrap().starts_with("__ULP_MAGIC_"));
|
||||||
|
|
||||||
if let None = magic_symbol {
|
let magic_symbol = if let Some(magic_symbol) = magic_symbol {
|
||||||
|
magic_symbol.name().unwrap()
|
||||||
|
} else {
|
||||||
return Error::new(
|
return Error::new(
|
||||||
Span::call_site().into(),
|
Span::call_site().into(),
|
||||||
"Given file doesn't seem to be an LP/ULP core application.",
|
"Given file doesn't seem to be an LP/ULP core application.",
|
||||||
)
|
)
|
||||||
.to_compile_error()
|
.to_compile_error()
|
||||||
.into();
|
.into();
|
||||||
}
|
};
|
||||||
|
|
||||||
let magic_symbol = magic_symbol.unwrap().name().unwrap();
|
|
||||||
|
|
||||||
let magic_symbol = magic_symbol.trim_start_matches("__ULP_MAGIC_");
|
let magic_symbol = magic_symbol.trim_start_matches("__ULP_MAGIC_");
|
||||||
let args: Vec<proc_macro2::TokenStream> = magic_symbol
|
let args: Vec<proc_macro2::TokenStream> = magic_symbol
|
||||||
.split("$")
|
.split("$")
|
||||||
.into_iter()
|
|
||||||
.map(|t| {
|
.map(|t| {
|
||||||
let t = if t.contains("OutputOpenDrain") {
|
let t = if t.contains("OutputOpenDrain") {
|
||||||
t.replace("OutputOpenDrain", "LowPowerOutputOpenDrain")
|
t.replace("OutputOpenDrain", "LowPowerOutputOpenDrain")
|
||||||
|
@ -7,10 +7,6 @@ description = "HAL for low-power RISC-V coprocessors found in ESP32 devices"
|
|||||||
repository = "https://github.com/esp-rs/esp-hal"
|
repository = "https://github.com/esp-rs/esp-hal"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
|
|
||||||
[lib]
|
|
||||||
bench = false
|
|
||||||
test = false
|
|
||||||
|
|
||||||
keywords = [
|
keywords = [
|
||||||
"embedded",
|
"embedded",
|
||||||
"embedded-hal",
|
"embedded-hal",
|
||||||
@ -24,6 +20,10 @@ categories = [
|
|||||||
"no-std",
|
"no-std",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
bench = false
|
||||||
|
test = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
cfg-if = "1.0.0"
|
cfg-if = "1.0.0"
|
||||||
document-features = "0.2.10"
|
document-features = "0.2.10"
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
use std::io::{Read, Write};
|
use std::{
|
||||||
use std::net::{TcpListener, TcpStream};
|
io::{Read, Write},
|
||||||
use std::thread::spawn;
|
net::{TcpListener, TcpStream},
|
||||||
use std::time::Duration;
|
thread::spawn,
|
||||||
|
time::Duration,
|
||||||
|
};
|
||||||
|
|
||||||
use log::info;
|
use log::info;
|
||||||
|
|
||||||
@ -22,8 +24,12 @@ fn tx_listen() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn tx_conn(mut socket: TcpStream) {
|
fn tx_conn(mut socket: TcpStream) {
|
||||||
socket.set_read_timeout(Some(Duration::from_secs(30))).unwrap();
|
socket
|
||||||
socket.set_write_timeout(Some(Duration::from_secs(30))).unwrap();
|
.set_read_timeout(Some(Duration::from_secs(30)))
|
||||||
|
.unwrap();
|
||||||
|
socket
|
||||||
|
.set_write_timeout(Some(Duration::from_secs(30)))
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let buf = [0; 1024];
|
let buf = [0; 1024];
|
||||||
loop {
|
loop {
|
||||||
@ -44,8 +50,12 @@ fn rx_listen() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn rx_conn(mut socket: TcpStream) {
|
fn rx_conn(mut socket: TcpStream) {
|
||||||
socket.set_read_timeout(Some(Duration::from_secs(30))).unwrap();
|
socket
|
||||||
socket.set_write_timeout(Some(Duration::from_secs(30))).unwrap();
|
.set_read_timeout(Some(Duration::from_secs(30)))
|
||||||
|
.unwrap();
|
||||||
|
socket
|
||||||
|
.set_write_timeout(Some(Duration::from_secs(30)))
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let mut buf = [0; 1024];
|
let mut buf = [0; 1024];
|
||||||
loop {
|
loop {
|
||||||
@ -66,8 +76,12 @@ fn rxtx_listen() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn rxtx_conn(mut socket: TcpStream) {
|
fn rxtx_conn(mut socket: TcpStream) {
|
||||||
socket.set_read_timeout(Some(Duration::from_secs(30))).unwrap();
|
socket
|
||||||
socket.set_write_timeout(Some(Duration::from_secs(30))).unwrap();
|
.set_read_timeout(Some(Duration::from_secs(30)))
|
||||||
|
.unwrap();
|
||||||
|
socket
|
||||||
|
.set_write_timeout(Some(Duration::from_secs(30)))
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let mut buf = [0; 1024];
|
let mut buf = [0; 1024];
|
||||||
loop {
|
loop {
|
||||||
@ -84,4 +98,4 @@ fn rxtx_conn(mut socket: TcpStream) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -114,7 +114,7 @@ pub fn build_documentation(
|
|||||||
package: Package,
|
package: Package,
|
||||||
chip: Chip,
|
chip: Chip,
|
||||||
target: &str,
|
target: &str,
|
||||||
) -> Result<()> {
|
) -> Result<PathBuf> {
|
||||||
let package_name = package.to_string();
|
let package_name = package.to_string();
|
||||||
let package_path = windows_safe_path(&workspace.join(&package_name));
|
let package_path = windows_safe_path(&workspace.join(&package_name));
|
||||||
|
|
||||||
@ -142,7 +142,15 @@ pub fn build_documentation(
|
|||||||
// Execute `cargo doc` from the package root:
|
// Execute `cargo doc` from the package root:
|
||||||
cargo::run(&args, &package_path)?;
|
cargo::run(&args, &package_path)?;
|
||||||
|
|
||||||
Ok(())
|
let docs_path = windows_safe_path(
|
||||||
|
&workspace
|
||||||
|
.join(package.to_string())
|
||||||
|
.join("target")
|
||||||
|
.join(target)
|
||||||
|
.join("doc"),
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(docs_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Load all examples at the given path, and parse their metadata.
|
/// Load all examples at the given path, and parse their metadata.
|
||||||
|
@ -332,6 +332,9 @@ fn build_documentation(workspace: &Path, args: BuildDocumentationArgs) -> Result
|
|||||||
let output_path = workspace.join("docs");
|
let output_path = workspace.join("docs");
|
||||||
let resources = workspace.join("resources");
|
let resources = workspace.join("resources");
|
||||||
|
|
||||||
|
fs::create_dir_all(&output_path)
|
||||||
|
.with_context(|| format!("Failed to create {}", output_path.display()))?;
|
||||||
|
|
||||||
let mut packages = HashMap::new();
|
let mut packages = HashMap::new();
|
||||||
for package in args.packages {
|
for package in args.packages {
|
||||||
packages.insert(
|
packages.insert(
|
||||||
@ -341,10 +344,12 @@ fn build_documentation(workspace: &Path, args: BuildDocumentationArgs) -> Result
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Copy any additional assets to the documentation's output path:
|
// Copy any additional assets to the documentation's output path:
|
||||||
fs::copy(resources.join("esp-rs.svg"), output_path.join("esp-rs.svg"))?;
|
fs::copy(resources.join("esp-rs.svg"), output_path.join("esp-rs.svg"))
|
||||||
|
.context("Failed to copy esp-rs.svg")?;
|
||||||
|
|
||||||
// Render the index and write it out to the documentaiton's output path:
|
// Render the index and write it out to the documentaiton's output path:
|
||||||
let source = fs::read_to_string(resources.join("index.html.jinja"))?;
|
let source = fs::read_to_string(resources.join("index.html.jinja"))
|
||||||
|
.context("Failed to read index.html.jinja")?;
|
||||||
|
|
||||||
let mut env = minijinja::Environment::new();
|
let mut env = minijinja::Environment::new();
|
||||||
env.add_template("index", &source)?;
|
env.add_template("index", &source)?;
|
||||||
@ -352,7 +357,7 @@ fn build_documentation(workspace: &Path, args: BuildDocumentationArgs) -> Result
|
|||||||
let tmpl = env.get_template("index")?;
|
let tmpl = env.get_template("index")?;
|
||||||
let html = tmpl.render(minijinja::context! { packages => packages })?;
|
let html = tmpl.render(minijinja::context! { packages => packages })?;
|
||||||
|
|
||||||
fs::write(output_path.join("index.html"), html)?;
|
fs::write(output_path.join("index.html"), html).context("Failed to write index.html")?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -377,14 +382,12 @@ fn build_documentation_for_package(
|
|||||||
|
|
||||||
// Build the documentation for the specified package, targeting the
|
// Build the documentation for the specified package, targeting the
|
||||||
// specified chip:
|
// specified chip:
|
||||||
xtask::build_documentation(workspace, package, *chip, target)?;
|
let docs_path = xtask::build_documentation(workspace, package, *chip, target)?;
|
||||||
|
|
||||||
let docs_path = xtask::windows_safe_path(
|
ensure!(
|
||||||
&workspace
|
docs_path.exists(),
|
||||||
.join(package.to_string())
|
"Documentation not found at {}",
|
||||||
.join("target")
|
docs_path.display()
|
||||||
.join(target)
|
|
||||||
.join("doc"),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
let output_path = output_path
|
let output_path = output_path
|
||||||
@ -394,8 +397,15 @@ fn build_documentation_for_package(
|
|||||||
let output_path = xtask::windows_safe_path(&output_path);
|
let output_path = xtask::windows_safe_path(&output_path);
|
||||||
|
|
||||||
// Create the output directory, and copy the built documentation into it:
|
// Create the output directory, and copy the built documentation into it:
|
||||||
fs::create_dir_all(&output_path)?;
|
fs::create_dir_all(&output_path)
|
||||||
copy_dir_all(&docs_path, &output_path)?;
|
.with_context(|| format!("Failed to create {}", output_path.display()))?;
|
||||||
|
copy_dir_all(&docs_path, &output_path).with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Failed to copy {} to {}",
|
||||||
|
docs_path.display(),
|
||||||
|
output_path.display()
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
// Build the context object required for rendering this particular build's
|
// Build the context object required for rendering this particular build's
|
||||||
// information on the documentation index:
|
// information on the documentation index:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user