mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-01 11:31:15 +00:00
internal: Better type proc macro dylib build data state
This commit is contained in:
parent
971c393ab3
commit
7950da3940
@ -30,7 +30,8 @@ pub type ProcMacroPaths =
|
|||||||
pub enum ProcMacroLoadingError {
|
pub enum ProcMacroLoadingError {
|
||||||
Disabled,
|
Disabled,
|
||||||
FailedToBuild,
|
FailedToBuild,
|
||||||
MissingDylibPath,
|
ExpectedProcMacroArtifact,
|
||||||
|
MissingDylibPath(Box<[String]>),
|
||||||
NotYetBuilt,
|
NotYetBuilt,
|
||||||
NoProcMacros,
|
NoProcMacros,
|
||||||
ProcMacroSrvError(Box<str>),
|
ProcMacroSrvError(Box<str>),
|
||||||
@ -39,8 +40,9 @@ impl ProcMacroLoadingError {
|
|||||||
pub fn is_hard_error(&self) -> bool {
|
pub fn is_hard_error(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
ProcMacroLoadingError::Disabled | ProcMacroLoadingError::NotYetBuilt => false,
|
ProcMacroLoadingError::Disabled | ProcMacroLoadingError::NotYetBuilt => false,
|
||||||
ProcMacroLoadingError::FailedToBuild
|
ProcMacroLoadingError::ExpectedProcMacroArtifact
|
||||||
| ProcMacroLoadingError::MissingDylibPath
|
| ProcMacroLoadingError::FailedToBuild
|
||||||
|
| ProcMacroLoadingError::MissingDylibPath(_)
|
||||||
| ProcMacroLoadingError::NoProcMacros
|
| ProcMacroLoadingError::NoProcMacros
|
||||||
| ProcMacroLoadingError::ProcMacroSrvError(_) => true,
|
| ProcMacroLoadingError::ProcMacroSrvError(_) => true,
|
||||||
}
|
}
|
||||||
@ -51,10 +53,23 @@ impl Error for ProcMacroLoadingError {}
|
|||||||
impl fmt::Display for ProcMacroLoadingError {
|
impl fmt::Display for ProcMacroLoadingError {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
|
ProcMacroLoadingError::ExpectedProcMacroArtifact => {
|
||||||
|
write!(f, "proc-macro crate did not build proc-macro artifact")
|
||||||
|
}
|
||||||
ProcMacroLoadingError::Disabled => write!(f, "proc-macro expansion is disabled"),
|
ProcMacroLoadingError::Disabled => write!(f, "proc-macro expansion is disabled"),
|
||||||
ProcMacroLoadingError::FailedToBuild => write!(f, "proc-macro failed to build"),
|
ProcMacroLoadingError::FailedToBuild => write!(f, "proc-macro failed to build"),
|
||||||
ProcMacroLoadingError::MissingDylibPath => {
|
ProcMacroLoadingError::MissingDylibPath(candidates) if candidates.is_empty() => {
|
||||||
write!(f, "proc-macro crate build data is missing a dylib path")
|
write!(
|
||||||
|
f,
|
||||||
|
"proc-macro crate built but the dylib path is missing, this indicates a problem with your build system."
|
||||||
|
)
|
||||||
|
}
|
||||||
|
ProcMacroLoadingError::MissingDylibPath(candidates) => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"proc-macro crate built but the dylib path is missing, this indicates a problem with your build system. Candidates not considered due to not having a dynamic library extension: {}",
|
||||||
|
candidates.join(", ")
|
||||||
|
)
|
||||||
}
|
}
|
||||||
ProcMacroLoadingError::NotYetBuilt => write!(f, "proc-macro not yet built"),
|
ProcMacroLoadingError::NotYetBuilt => write!(f, "proc-macro not yet built"),
|
||||||
ProcMacroLoadingError::NoProcMacros => {
|
ProcMacroLoadingError::NoProcMacros => {
|
||||||
|
@ -16,6 +16,7 @@ use la_arena::ArenaMap;
|
|||||||
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
|
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use serde::Deserialize as _;
|
use serde::Deserialize as _;
|
||||||
|
use stdx::{always, never};
|
||||||
use toolchain::Tool;
|
use toolchain::Tool;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@ -30,6 +31,15 @@ pub struct WorkspaceBuildScripts {
|
|||||||
error: Option<String>,
|
error: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Default, PartialEq, Eq)]
|
||||||
|
pub enum ProcMacroDylibPath {
|
||||||
|
Path(AbsPathBuf),
|
||||||
|
DylibNotFound(Box<[Utf8PathBuf]>),
|
||||||
|
NotProcMacro,
|
||||||
|
#[default]
|
||||||
|
NotBuilt,
|
||||||
|
}
|
||||||
|
|
||||||
/// Output of the build script and proc-macro building step for a concrete package.
|
/// Output of the build script and proc-macro building step for a concrete package.
|
||||||
#[derive(Debug, Clone, Default, PartialEq, Eq)]
|
#[derive(Debug, Clone, Default, PartialEq, Eq)]
|
||||||
pub(crate) struct BuildScriptOutput {
|
pub(crate) struct BuildScriptOutput {
|
||||||
@ -43,7 +53,7 @@ pub(crate) struct BuildScriptOutput {
|
|||||||
/// Directory where a build script might place its output.
|
/// Directory where a build script might place its output.
|
||||||
pub(crate) out_dir: Option<AbsPathBuf>,
|
pub(crate) out_dir: Option<AbsPathBuf>,
|
||||||
/// Path to the proc-macro library file if this package exposes proc-macros.
|
/// Path to the proc-macro library file if this package exposes proc-macros.
|
||||||
pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>,
|
pub(crate) proc_macro_dylib_path: ProcMacroDylibPath,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BuildScriptOutput {
|
impl BuildScriptOutput {
|
||||||
@ -51,7 +61,7 @@ impl BuildScriptOutput {
|
|||||||
self.cfgs.is_empty()
|
self.cfgs.is_empty()
|
||||||
&& self.envs.is_empty()
|
&& self.envs.is_empty()
|
||||||
&& self.out_dir.is_none()
|
&& self.out_dir.is_none()
|
||||||
&& self.proc_macro_dylib_path.is_none()
|
&& self.proc_macro_dylib_path == ProcMacroDylibPath::NotBuilt
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -126,6 +136,8 @@ impl WorkspaceBuildScripts {
|
|||||||
|package, cb| {
|
|package, cb| {
|
||||||
if let Some(&(package, workspace)) = by_id.get(package) {
|
if let Some(&(package, workspace)) = by_id.get(package) {
|
||||||
cb(&workspaces[workspace][package].name, &mut res[workspace].outputs[package]);
|
cb(&workspaces[workspace][package].name, &mut res[workspace].outputs[package]);
|
||||||
|
} else {
|
||||||
|
never!("Received compiler message for unknown package: {}", package);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
progress,
|
progress,
|
||||||
@ -140,12 +152,9 @@ impl WorkspaceBuildScripts {
|
|||||||
if tracing::enabled!(tracing::Level::INFO) {
|
if tracing::enabled!(tracing::Level::INFO) {
|
||||||
for (idx, workspace) in workspaces.iter().enumerate() {
|
for (idx, workspace) in workspaces.iter().enumerate() {
|
||||||
for package in workspace.packages() {
|
for package in workspace.packages() {
|
||||||
let package_build_data = &mut res[idx].outputs[package];
|
let package_build_data: &mut BuildScriptOutput = &mut res[idx].outputs[package];
|
||||||
if !package_build_data.is_empty() {
|
if !package_build_data.is_empty() {
|
||||||
tracing::info!(
|
tracing::info!("{}: {package_build_data:?}", workspace[package].manifest,);
|
||||||
"{}: {package_build_data:?}",
|
|
||||||
workspace[package].manifest.parent(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -198,10 +207,33 @@ impl WorkspaceBuildScripts {
|
|||||||
let path = dir_entry.path();
|
let path = dir_entry.path();
|
||||||
let extension = path.extension()?;
|
let extension = path.extension()?;
|
||||||
if extension == std::env::consts::DLL_EXTENSION {
|
if extension == std::env::consts::DLL_EXTENSION {
|
||||||
let name = path.file_stem()?.to_str()?.split_once('-')?.0.to_owned();
|
let name = path
|
||||||
let path = AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).ok()?)
|
.file_stem()?
|
||||||
.ok()?;
|
.to_str()?
|
||||||
return Some((name, path));
|
.split_once('-')?
|
||||||
|
.0
|
||||||
|
.trim_start_matches("lib")
|
||||||
|
.to_owned();
|
||||||
|
let path = match Utf8PathBuf::from_path_buf(path) {
|
||||||
|
Ok(path) => path,
|
||||||
|
Err(path) => {
|
||||||
|
tracing::warn!(
|
||||||
|
"Proc-macro dylib path contains non-UTF8 characters: {:?}",
|
||||||
|
path.display()
|
||||||
|
);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return match AbsPathBuf::try_from(path) {
|
||||||
|
Ok(path) => Some((name, path)),
|
||||||
|
Err(path) => {
|
||||||
|
tracing::error!(
|
||||||
|
"proc-macro dylib path is not absolute: {:?}",
|
||||||
|
path
|
||||||
|
);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
@ -209,17 +241,16 @@ impl WorkspaceBuildScripts {
|
|||||||
.collect();
|
.collect();
|
||||||
for p in rustc.packages() {
|
for p in rustc.packages() {
|
||||||
let package = &rustc[p];
|
let package = &rustc[p];
|
||||||
if package
|
bs.outputs[p].proc_macro_dylib_path =
|
||||||
.targets
|
if package.targets.iter().any(|&it| {
|
||||||
.iter()
|
matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true })
|
||||||
.any(|&it| matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true }))
|
}) {
|
||||||
{
|
match proc_macro_dylibs.iter().find(|(name, _)| *name == package.name) {
|
||||||
if let Some((_, path)) = proc_macro_dylibs
|
Some((_, path)) => ProcMacroDylibPath::Path(path.clone()),
|
||||||
.iter()
|
_ => ProcMacroDylibPath::DylibNotFound(Box::default()),
|
||||||
.find(|(name, _)| *name.trim_start_matches("lib") == package.name)
|
|
||||||
{
|
|
||||||
bs.outputs[p].proc_macro_dylib_path = Some(path.clone());
|
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
ProcMacroDylibPath::NotProcMacro
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -227,10 +258,7 @@ impl WorkspaceBuildScripts {
|
|||||||
for package in rustc.packages() {
|
for package in rustc.packages() {
|
||||||
let package_build_data = &bs.outputs[package];
|
let package_build_data = &bs.outputs[package];
|
||||||
if !package_build_data.is_empty() {
|
if !package_build_data.is_empty() {
|
||||||
tracing::info!(
|
tracing::info!("{}: {package_build_data:?}", rustc[package].manifest,);
|
||||||
"{}: {package_build_data:?}",
|
|
||||||
rustc[package].manifest.parent(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -263,6 +291,12 @@ impl WorkspaceBuildScripts {
|
|||||||
|package, cb| {
|
|package, cb| {
|
||||||
if let Some(&package) = by_id.get(package) {
|
if let Some(&package) = by_id.get(package) {
|
||||||
cb(&workspace[package].name, &mut outputs[package]);
|
cb(&workspace[package].name, &mut outputs[package]);
|
||||||
|
} else {
|
||||||
|
never!(
|
||||||
|
"Received compiler message for unknown package: {}\n {}",
|
||||||
|
package,
|
||||||
|
by_id.keys().join(", ")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
progress,
|
progress,
|
||||||
@ -272,10 +306,7 @@ impl WorkspaceBuildScripts {
|
|||||||
for package in workspace.packages() {
|
for package in workspace.packages() {
|
||||||
let package_build_data = &outputs[package];
|
let package_build_data = &outputs[package];
|
||||||
if !package_build_data.is_empty() {
|
if !package_build_data.is_empty() {
|
||||||
tracing::info!(
|
tracing::info!("{}: {package_build_data:?}", workspace[package].manifest,);
|
||||||
"{}: {package_build_data:?}",
|
|
||||||
workspace[package].manifest.parent(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -348,15 +379,25 @@ impl WorkspaceBuildScripts {
|
|||||||
progress(format!(
|
progress(format!(
|
||||||
"building compile-time-deps: proc-macro {name} built"
|
"building compile-time-deps: proc-macro {name} built"
|
||||||
));
|
));
|
||||||
|
always!(
|
||||||
|
data.proc_macro_dylib_path == ProcMacroDylibPath::NotBuilt,
|
||||||
|
"received multiple compiler artifacts for the same package: {message:?}"
|
||||||
|
);
|
||||||
|
if data.proc_macro_dylib_path == ProcMacroDylibPath::NotBuilt {
|
||||||
|
data.proc_macro_dylib_path = ProcMacroDylibPath::NotProcMacro;
|
||||||
|
}
|
||||||
if message.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro)
|
if message.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro)
|
||||||
{
|
{
|
||||||
// Skip rmeta file
|
data.proc_macro_dylib_path =
|
||||||
if let Some(filename) =
|
match message.filenames.iter().find(|file| is_dylib(file)) {
|
||||||
message.filenames.iter().find(|file| is_dylib(file))
|
Some(filename) => {
|
||||||
{
|
|
||||||
let filename = AbsPath::assert(filename);
|
let filename = AbsPath::assert(filename);
|
||||||
data.proc_macro_dylib_path = Some(filename.to_owned());
|
ProcMacroDylibPath::Path(filename.to_owned())
|
||||||
}
|
}
|
||||||
|
None => ProcMacroDylibPath::DylibNotFound(
|
||||||
|
message.filenames.clone().into_boxed_slice(),
|
||||||
|
),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -245,7 +245,7 @@ pub enum TargetKind {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TargetKind {
|
impl TargetKind {
|
||||||
fn new(kinds: &[cargo_metadata::TargetKind]) -> TargetKind {
|
pub fn new(kinds: &[cargo_metadata::TargetKind]) -> TargetKind {
|
||||||
for kind in kinds {
|
for kind in kinds {
|
||||||
return match kind {
|
return match kind {
|
||||||
cargo_metadata::TargetKind::Bin => TargetKind::Bin,
|
cargo_metadata::TargetKind::Bin => TargetKind::Bin,
|
||||||
@ -604,14 +604,14 @@ impl FetchMetadata {
|
|||||||
// but nothing else
|
// but nothing else
|
||||||
let mut extra_args = config.extra_args.iter();
|
let mut extra_args = config.extra_args.iter();
|
||||||
while let Some(arg) = extra_args.next() {
|
while let Some(arg) = extra_args.next() {
|
||||||
if arg == "-Z" {
|
if arg == "-Z"
|
||||||
if let Some(arg) = extra_args.next() {
|
&& let Some(arg) = extra_args.next()
|
||||||
|
{
|
||||||
needs_nightly = true;
|
needs_nightly = true;
|
||||||
other_options.push("-Z".to_owned());
|
other_options.push("-Z".to_owned());
|
||||||
other_options.push(arg.to_owned());
|
other_options.push(arg.to_owned());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
let mut lockfile_path = None;
|
let mut lockfile_path = None;
|
||||||
if cargo_toml.is_rust_manifest() {
|
if cargo_toml.is_rust_manifest() {
|
||||||
|
@ -59,7 +59,7 @@ use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
|
|||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
|
||||||
pub use crate::{
|
pub use crate::{
|
||||||
build_dependencies::WorkspaceBuildScripts,
|
build_dependencies::{ProcMacroDylibPath, WorkspaceBuildScripts},
|
||||||
cargo_workspace::{
|
cargo_workspace::{
|
||||||
CargoConfig, CargoFeatures, CargoMetadataConfig, CargoWorkspace, Package, PackageData,
|
CargoConfig, CargoFeatures, CargoMetadataConfig, CargoWorkspace, Package, PackageData,
|
||||||
PackageDependency, RustLibSource, Target, TargetData, TargetKind,
|
PackageDependency, RustLibSource, Target, TargetData, TargetKind,
|
||||||
@ -139,21 +139,22 @@ impl ProjectManifest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn find_in_parent_dirs(path: &AbsPath, target_file_name: &str) -> Option<ManifestPath> {
|
fn find_in_parent_dirs(path: &AbsPath, target_file_name: &str) -> Option<ManifestPath> {
|
||||||
if path.file_name().unwrap_or_default() == target_file_name {
|
if path.file_name().unwrap_or_default() == target_file_name
|
||||||
if let Ok(manifest) = ManifestPath::try_from(path.to_path_buf()) {
|
&& let Ok(manifest) = ManifestPath::try_from(path.to_path_buf())
|
||||||
|
{
|
||||||
return Some(manifest);
|
return Some(manifest);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
let mut curr = Some(path);
|
let mut curr = Some(path);
|
||||||
|
|
||||||
while let Some(path) = curr {
|
while let Some(path) = curr {
|
||||||
let candidate = path.join(target_file_name);
|
let candidate = path.join(target_file_name);
|
||||||
if fs::metadata(&candidate).is_ok() {
|
if fs::metadata(&candidate).is_ok()
|
||||||
if let Ok(manifest) = ManifestPath::try_from(candidate) {
|
&& let Ok(manifest) = ManifestPath::try_from(candidate)
|
||||||
|
{
|
||||||
return Some(manifest);
|
return Some(manifest);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
curr = path.parent();
|
curr = path.parent();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -143,13 +143,12 @@ impl Sysroot {
|
|||||||
Some(root) => {
|
Some(root) => {
|
||||||
// special case rustc, we can look that up directly in the sysroot's bin folder
|
// special case rustc, we can look that up directly in the sysroot's bin folder
|
||||||
// as it should never invoke another cargo binary
|
// as it should never invoke another cargo binary
|
||||||
if let Tool::Rustc = tool {
|
if let Tool::Rustc = tool
|
||||||
if let Some(path) =
|
&& let Some(path) =
|
||||||
probe_for_binary(root.join("bin").join(Tool::Rustc.name()).into())
|
probe_for_binary(root.join("bin").join(Tool::Rustc.name()).into())
|
||||||
{
|
{
|
||||||
return toolchain::command(path, current_dir, envs);
|
return toolchain::command(path, current_dir, envs);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
let mut cmd = toolchain::command(tool.prefer_proxy(), current_dir, envs);
|
let mut cmd = toolchain::command(tool.prefer_proxy(), current_dir, envs);
|
||||||
if !envs.contains_key("RUSTUP_TOOLCHAIN")
|
if !envs.contains_key("RUSTUP_TOOLCHAIN")
|
||||||
@ -291,12 +290,11 @@ impl Sysroot {
|
|||||||
|
|
||||||
pub fn set_workspace(&mut self, workspace: RustLibSrcWorkspace) {
|
pub fn set_workspace(&mut self, workspace: RustLibSrcWorkspace) {
|
||||||
self.workspace = workspace;
|
self.workspace = workspace;
|
||||||
if self.error.is_none() {
|
if self.error.is_none()
|
||||||
if let Some(src_root) = &self.rust_lib_src_root {
|
&& let Some(src_root) = &self.rust_lib_src_root
|
||||||
|
{
|
||||||
let has_core = match &self.workspace {
|
let has_core = match &self.workspace {
|
||||||
RustLibSrcWorkspace::Workspace(ws) => {
|
RustLibSrcWorkspace::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"),
|
||||||
ws.packages().any(|p| ws[p].name == "core")
|
|
||||||
}
|
|
||||||
RustLibSrcWorkspace::Json(project_json) => project_json
|
RustLibSrcWorkspace::Json(project_json) => project_json
|
||||||
.crates()
|
.crates()
|
||||||
.filter_map(|(_, krate)| krate.display_name.clone())
|
.filter_map(|(_, krate)| krate.display_name.clone())
|
||||||
@ -310,10 +308,8 @@ impl Sysroot {
|
|||||||
} else {
|
} else {
|
||||||
", try running `rustup component add rust-src` to possibly fix this"
|
", try running `rustup component add rust-src` to possibly fix this"
|
||||||
};
|
};
|
||||||
self.error = Some(format!(
|
self.error =
|
||||||
"sysroot at `{src_root}` is missing a `core` library{var_note}",
|
Some(format!("sysroot at `{src_root}` is missing a `core` library{var_note}",));
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -24,7 +24,7 @@ use crate::{
|
|||||||
CargoConfig, CargoWorkspace, CfgOverrides, InvocationStrategy, ManifestPath, Package,
|
CargoConfig, CargoWorkspace, CfgOverrides, InvocationStrategy, ManifestPath, Package,
|
||||||
ProjectJson, ProjectManifest, RustSourceWorkspaceConfig, Sysroot, TargetData, TargetKind,
|
ProjectJson, ProjectManifest, RustSourceWorkspaceConfig, Sysroot, TargetData, TargetKind,
|
||||||
WorkspaceBuildScripts,
|
WorkspaceBuildScripts,
|
||||||
build_dependencies::BuildScriptOutput,
|
build_dependencies::{BuildScriptOutput, ProcMacroDylibPath},
|
||||||
cargo_config_file,
|
cargo_config_file,
|
||||||
cargo_workspace::{CargoMetadataConfig, DepKind, FetchMetadata, PackageData, RustLibSource},
|
cargo_workspace::{CargoMetadataConfig, DepKind, FetchMetadata, PackageData, RustLibSource},
|
||||||
env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env},
|
env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env},
|
||||||
@ -424,13 +424,13 @@ impl ProjectWorkspace {
|
|||||||
sysroot.set_workspace(loaded_sysroot);
|
sysroot.set_workspace(loaded_sysroot);
|
||||||
}
|
}
|
||||||
|
|
||||||
if !cargo.requires_rustc_private() {
|
if !cargo.requires_rustc_private()
|
||||||
if let Err(e) = &mut rustc {
|
&& let Err(e) = &mut rustc
|
||||||
|
{
|
||||||
// We don't need the rustc sources here,
|
// We don't need the rustc sources here,
|
||||||
// so just discard the error.
|
// so just discard the error.
|
||||||
_ = e.take();
|
_ = e.take();
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
Ok(ProjectWorkspace {
|
Ok(ProjectWorkspace {
|
||||||
kind: ProjectWorkspaceKind::Cargo {
|
kind: ProjectWorkspaceKind::Cargo {
|
||||||
@ -1163,8 +1163,7 @@ fn project_json_to_crate_graph(
|
|||||||
crate = display_name.as_ref().map(|name| name.canonical_name().as_str()),
|
crate = display_name.as_ref().map(|name| name.canonical_name().as_str()),
|
||||||
"added root to crate graph"
|
"added root to crate graph"
|
||||||
);
|
);
|
||||||
if *is_proc_macro {
|
if *is_proc_macro && let Some(path) = proc_macro_dylib_path.clone() {
|
||||||
if let Some(path) = proc_macro_dylib_path.clone() {
|
|
||||||
let node = Ok((
|
let node = Ok((
|
||||||
display_name
|
display_name
|
||||||
.as_ref()
|
.as_ref()
|
||||||
@ -1174,7 +1173,6 @@ fn project_json_to_crate_graph(
|
|||||||
));
|
));
|
||||||
proc_macros.insert(crate_graph_crate_id, node);
|
proc_macros.insert(crate_graph_crate_id, node);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
(idx, crate_graph_crate_id)
|
(idx, crate_graph_crate_id)
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -1319,15 +1317,13 @@ fn cargo_to_crate_graph(
|
|||||||
|
|
||||||
// Add dep edge of all targets to the package's lib target
|
// Add dep edge of all targets to the package's lib target
|
||||||
if let Some((to, name)) = lib_tgt.clone() {
|
if let Some((to, name)) = lib_tgt.clone() {
|
||||||
if to != from && kind != TargetKind::BuildScript {
|
match to != from && kind != TargetKind::BuildScript {
|
||||||
// (build script can not depend on its library target)
|
true => {
|
||||||
|
|
||||||
// For root projects with dashes in their name,
|
|
||||||
// cargo metadata does not do any normalization,
|
|
||||||
// so we do it ourselves currently
|
|
||||||
let name = CrateName::normalize_dashes(&name);
|
let name = CrateName::normalize_dashes(&name);
|
||||||
add_dep(crate_graph, from, name, to);
|
add_dep(crate_graph, from, name, to);
|
||||||
}
|
}
|
||||||
|
false => (),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1638,9 +1634,21 @@ fn add_target_crate_root(
|
|||||||
let proc_macro = match build_data {
|
let proc_macro = match build_data {
|
||||||
Some((BuildScriptOutput { proc_macro_dylib_path, .. }, has_errors)) => {
|
Some((BuildScriptOutput { proc_macro_dylib_path, .. }, has_errors)) => {
|
||||||
match proc_macro_dylib_path {
|
match proc_macro_dylib_path {
|
||||||
Some(path) => Ok((cargo_name.to_owned(), path.clone())),
|
ProcMacroDylibPath::Path(path) => Ok((cargo_name.to_owned(), path.clone())),
|
||||||
None if has_errors => Err(ProcMacroLoadingError::FailedToBuild),
|
ProcMacroDylibPath::NotBuilt => Err(ProcMacroLoadingError::NotYetBuilt),
|
||||||
None => Err(ProcMacroLoadingError::MissingDylibPath),
|
ProcMacroDylibPath::NotProcMacro | ProcMacroDylibPath::DylibNotFound(_)
|
||||||
|
if has_errors =>
|
||||||
|
{
|
||||||
|
Err(ProcMacroLoadingError::FailedToBuild)
|
||||||
|
}
|
||||||
|
ProcMacroDylibPath::NotProcMacro => {
|
||||||
|
Err(ProcMacroLoadingError::ExpectedProcMacroArtifact)
|
||||||
|
}
|
||||||
|
ProcMacroDylibPath::DylibNotFound(candidates) => {
|
||||||
|
Err(ProcMacroLoadingError::MissingDylibPath(
|
||||||
|
candidates.iter().map(ToString::to_string).collect(),
|
||||||
|
))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => Err(ProcMacroLoadingError::NotYetBuilt),
|
None => Err(ProcMacroLoadingError::NotYetBuilt),
|
||||||
|
Loading…
x
Reference in New Issue
Block a user