fix: Split toolchain and datalayout out of CrateData

This commit is contained in:
Lukas Wirth 2024-02-16 14:48:25 +01:00
parent a01655552d
commit b1404d387a
22 changed files with 246 additions and 345 deletions

View File

@ -11,7 +11,6 @@ use std::{fmt, mem, ops, str::FromStr};
use cfg::CfgOptions;
use la_arena::{Arena, Idx, RawIdx};
use rustc_hash::{FxHashMap, FxHashSet};
use semver::Version;
use syntax::SmolStr;
use triomphe::Arc;
use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
@ -292,16 +291,11 @@ pub struct CrateData {
pub dependencies: Vec<Dependency>,
pub origin: CrateOrigin,
pub is_proc_macro: bool,
// FIXME: These things should not be per crate! These are more per workspace crate graph level
// things. This info does need to be somewhat present though as to prevent deduplication from
// happening across different workspaces with different layouts.
pub target_layout: TargetLayoutLoadResult,
pub toolchain: Option<Version>,
}
impl CrateData {
/// Check if [`other`] is almost equal to [`self`] ignoring `CrateOrigin` value.
pub fn eq_ignoring_origin_and_deps(&self, other: &CrateData, ignore_dev_deps: bool) -> bool {
fn eq_ignoring_origin_and_deps(&self, other: &CrateData, ignore_dev_deps: bool) -> bool {
// This method has some obscure bits. These are mostly there to be compliant with
// some patches. References to the patches are given.
if self.root_file_id != other.root_file_id {
@ -353,10 +347,6 @@ impl CrateData {
slf_deps.eq(other_deps)
}
pub fn channel(&self) -> Option<ReleaseChannel> {
self.toolchain.as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre))
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
@ -439,8 +429,6 @@ impl CrateGraph {
env: Env,
is_proc_macro: bool,
origin: CrateOrigin,
target_layout: Result<Arc<str>, Arc<str>>,
toolchain: Option<Version>,
) -> CrateId {
let data = CrateData {
root_file_id,
@ -452,9 +440,7 @@ impl CrateGraph {
env,
dependencies: Vec::new(),
origin,
target_layout,
is_proc_macro,
toolchain,
};
self.arena.alloc(data)
}
@ -524,6 +510,10 @@ impl CrateGraph {
self.arena.is_empty()
}
pub fn len(&self) -> usize {
self.arena.len()
}
pub fn iter(&self) -> impl Iterator<Item = CrateId> + '_ {
self.arena.iter().map(|(idx, _)| idx)
}
@ -624,13 +614,16 @@ impl CrateGraph {
///
/// This will deduplicate the crates of the graph where possible.
/// Note that for deduplication to fully work, `self`'s crate dependencies must be sorted by crate id.
/// If the crate dependencies were sorted, the resulting graph from this `extend` call will also have the crate dependencies sorted.
/// If the crate dependencies were sorted, the resulting graph from this `extend` call will also
/// have the crate dependencies sorted.
///
/// Returns a mapping from `other`'s crate ids to the new crate ids in `self`.
pub fn extend(
&mut self,
mut other: CrateGraph,
proc_macros: &mut ProcMacroPaths,
on_finished: impl FnOnce(&FxHashMap<CrateId, CrateId>),
) {
may_merge: impl Fn((CrateId, &CrateData), (CrateId, &CrateData)) -> bool,
) -> FxHashMap<CrateId, CrateId> {
let topo = other.crates_in_topological_order();
let mut id_map: FxHashMap<CrateId, CrateId> = FxHashMap::default();
for topo in topo {
@ -639,6 +632,10 @@ impl CrateGraph {
crate_data.dependencies.iter_mut().for_each(|dep| dep.crate_id = id_map[&dep.crate_id]);
crate_data.dependencies.sort_by_key(|dep| dep.crate_id);
let res = self.arena.iter().find_map(|(id, data)| {
if !may_merge((id, &data), (topo, &crate_data)) {
return None;
}
match (&data.origin, &crate_data.origin) {
(a, b) if a == b => {
if data.eq_ignoring_origin_and_deps(crate_data, false) {
@ -663,8 +660,7 @@ impl CrateGraph {
None
});
if let Some((res, should_update_lib_to_local)) = res {
id_map.insert(topo, res);
let new_id = if let Some((res, should_update_lib_to_local)) = res {
if should_update_lib_to_local {
assert!(self.arena[res].origin.is_lib());
assert!(crate_data.origin.is_local());
@ -673,16 +669,17 @@ impl CrateGraph {
// Move local's dev dependencies into the newly-local-formerly-lib crate.
self.arena[res].dependencies = crate_data.dependencies.clone();
}
res
} else {
let id = self.arena.alloc(crate_data.clone());
id_map.insert(topo, id);
}
self.arena.alloc(crate_data.clone())
};
id_map.insert(topo, new_id);
}
*proc_macros =
mem::take(proc_macros).into_iter().map(|(id, macros)| (id_map[&id], macros)).collect();
on_finished(&id_map);
id_map
}
fn find_path(
@ -889,8 +886,6 @@ mod tests {
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
let crate2 = graph.add_crate_root(
FileId::from_raw(2u32),
@ -902,8 +897,6 @@ mod tests {
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
let crate3 = graph.add_crate_root(
FileId::from_raw(3u32),
@ -915,8 +908,6 @@ mod tests {
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
assert!(graph
.add_dep(
@ -951,8 +942,6 @@ mod tests {
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
let crate2 = graph.add_crate_root(
FileId::from_raw(2u32),
@ -964,8 +953,6 @@ mod tests {
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
assert!(graph
.add_dep(
@ -994,8 +981,6 @@ mod tests {
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
let crate2 = graph.add_crate_root(
FileId::from_raw(2u32),
@ -1007,8 +992,6 @@ mod tests {
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
let crate3 = graph.add_crate_root(
FileId::from_raw(3u32),
@ -1020,8 +1003,6 @@ mod tests {
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
assert!(graph
.add_dep(
@ -1050,8 +1031,6 @@ mod tests {
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
let crate2 = graph.add_crate_root(
FileId::from_raw(2u32),
@ -1063,8 +1042,6 @@ mod tests {
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("".into()),
None,
);
assert!(graph
.add_dep(

View File

@ -62,6 +62,20 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
/// The crate graph.
#[salsa::input]
fn crate_graph(&self) -> Arc<CrateGraph>;
// FIXME: Consider removing this, making HirDatabase::target_data_layout an input query
#[salsa::input]
fn data_layout(&self, krate: CrateId) -> TargetLayoutLoadResult;
#[salsa::input]
fn toolchain(&self, krate: CrateId) -> Option<Version>;
#[salsa::transparent]
fn toolchain_channel(&self, krate: CrateId) -> Option<ReleaseChannel>;
}
fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option<ReleaseChannel> {
db.toolchain(krate).as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre))
}
fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {

View File

@ -1,6 +1,10 @@
//! Defines a unit of change that can applied to the database to get the next
//! state. Changes are transactional.
use base_db::{salsa::Durability, CrateGraph, FileChange, SourceDatabaseExt, SourceRoot};
use base_db::{
salsa::Durability, CrateGraph, CrateId, FileChange, SourceDatabaseExt, SourceRoot,
TargetLayoutLoadResult, Version,
};
use la_arena::RawIdx;
use span::FileId;
use triomphe::Arc;
@ -10,6 +14,8 @@ use crate::{db::ExpandDatabase, proc_macro::ProcMacros};
pub struct Change {
pub source_change: FileChange,
pub proc_macros: Option<ProcMacros>,
pub toolchains: Option<Vec<Option<Version>>>,
pub target_data_layouts: Option<Vec<TargetLayoutLoadResult>>,
}
impl Change {
@ -22,6 +28,24 @@ impl Change {
if let Some(proc_macros) = self.proc_macros {
db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH);
}
if let Some(target_data_layouts) = self.target_data_layouts {
for (id, val) in target_data_layouts.into_iter().enumerate() {
db.set_data_layout_with_durability(
CrateId::from_raw(RawIdx::from(id as u32)),
val,
Durability::HIGH,
);
}
}
if let Some(toolchains) = self.toolchains {
for (id, val) in toolchains.into_iter().enumerate() {
db.set_toolchain_with_durability(
CrateId::from_raw(RawIdx::from(id as u32)),
val,
Durability::HIGH,
);
}
}
}
pub fn change_file(&mut self, file_id: FileId, new_text: Option<Arc<str>>) {
@ -36,6 +60,14 @@ impl Change {
self.proc_macros = Some(proc_macros);
}
pub fn set_toolchains(&mut self, toolchains: Vec<Option<Version>>) {
self.toolchains = Some(toolchains);
}
pub fn set_target_data_layouts(&mut self, target_data_layouts: Vec<TargetLayoutLoadResult>) {
self.target_data_layouts = Some(target_data_layouts);
}
pub fn set_roots(&mut self, roots: Vec<SourceRoot>) {
self.source_change.set_roots(roots)
}

View File

@ -31,7 +31,7 @@ impl DeclarativeMacroExpander {
call_id: MacroCallId,
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(call_id);
let toolchain = &db.crate_graph()[loc.def.krate].toolchain;
let toolchain = db.toolchain(loc.def.krate);
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version {
@ -67,7 +67,7 @@ impl DeclarativeMacroExpander {
krate: CrateId,
call_site: Span,
) -> ExpandResult<tt::Subtree> {
let toolchain = &db.crate_graph()[krate].toolchain;
let toolchain = db.toolchain(krate);
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version {
@ -119,7 +119,7 @@ impl DeclarativeMacroExpander {
_ => None,
}
};
let toolchain = crate_data.toolchain.as_ref();
let toolchain = db.toolchain(def_crate);
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version {

View File

@ -11,10 +11,8 @@ pub fn target_data_layout_query(
db: &dyn HirDatabase,
krate: CrateId,
) -> Result<Arc<TargetDataLayout>, Arc<str>> {
let crate_graph = db.crate_graph();
let res = crate_graph[krate].target_layout.as_deref();
match res {
Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) {
match db.data_layout(krate) {
Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(&it) {
Ok(it) => Ok(Arc::new(it)),
Err(e) => {
Err(match e {
@ -44,6 +42,6 @@ pub fn target_data_layout_query(
}.into())
}
},
Err(e) => Err(Arc::from(&**e)),
Err(e) => Err(e),
}
}

View File

@ -27,7 +27,7 @@ fn current_machine_data_layout() -> String {
fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutError> {
let target_data_layout = current_machine_data_layout();
let ra_fixture = format!(
"{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\n{ra_fixture}",
"//- target_data_layout: {target_data_layout}\n{minicore}//- /main.rs crate:test\n{ra_fixture}",
);
let (db, file_ids) = TestDB::with_many_files(&ra_fixture);
@ -76,7 +76,7 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutError> {
let target_data_layout = current_machine_data_layout();
let ra_fixture = format!(
"{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\nfn main(){{let goal = {{{ra_fixture}}};}}",
"//- target_data_layout: {target_data_layout}\n{minicore}//- /main.rs crate:test\nfn main(){{let goal = {{{ra_fixture}}};}}",
);
let (db, file_id) = TestDB::with_single_file(&ra_fixture);

View File

@ -717,7 +717,7 @@ impl<'a> CompletionContext<'a> {
let krate = scope.krate();
let module = scope.module();
let toolchain = db.crate_graph()[krate.into()].channel();
let toolchain = db.toolchain_channel(krate.into());
// `toolchain == None` means we're in some detached files. Since we have no information on
// the toolchain being used, let's just allow unstable items to be listed.
let is_nightly = matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None);

View File

@ -501,7 +501,7 @@ fn get_doc_base_urls(
let Some(krate) = def.krate(db) else { return Default::default() };
let Some(display_name) = krate.display_name(db) else { return Default::default() };
let crate_data = &db.crate_graph()[krate.into()];
let channel = crate_data.channel().unwrap_or(ReleaseChannel::Nightly).as_str();
let channel = db.toolchain_channel(krate.into()).unwrap_or(ReleaseChannel::Nightly).as_str();
let (web_base, local_base) = match &crate_data.origin {
// std and co do not specify `html_root_url` any longer so we gotta handwrite this ourself.

View File

@ -253,11 +253,11 @@ impl Analysis {
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
Err("Analysis::from_single_file has no target layout".into()),
None,
);
change.change_file(file_id, Some(Arc::from(text)));
change.set_crate_graph(crate_graph);
change.set_target_data_layouts(vec![Err("fixture has no layout".into())]);
change.set_toolchains(vec![None]);
host.apply_change(change);
(host.analysis(), file_id)
}

View File

@ -39,8 +39,6 @@ pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) {
data.env.clone(),
data.is_proc_macro,
data.origin.clone(),
data.target_layout.clone(),
data.toolchain.clone(),
);
new_proc_macros.insert(new_id, proc_macros[&old_id].clone());
map.insert(old_id, new_id);

View File

@ -72,8 +72,6 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
dependencies,
origin,
is_proc_macro,
target_layout,
toolchain,
} = &crate_graph[crate_id];
format_to!(
buf,
@ -91,12 +89,6 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
format_to!(buf, " Env: {:?}\n", env);
format_to!(buf, " Origin: {:?}\n", origin);
format_to!(buf, " Is a proc macro crate: {}\n", is_proc_macro);
format_to!(buf, " Workspace Target Layout: {:?}\n", target_layout);
format_to!(
buf,
" Workspace Toolchain: {}\n",
toolchain.as_ref().map_or_else(|| "n/a".into(), |v| v.to_string())
);
let deps = dependencies
.iter()
.map(|dep| format!("{}={}", dep.name, dep.crate_id.into_raw()))

View File

@ -2,7 +2,7 @@
//! for incorporating changes.
// Note, don't remove any public api from this. This API is consumed by external tools
// to run rust-analyzer as a library.
use std::{collections::hash_map::Entry, mem, path::Path, sync};
use std::{collections::hash_map::Entry, iter, mem, path::Path, sync};
use crossbeam_channel::{unbounded, Receiver};
use hir_expand::proc_macro::{
@ -106,7 +106,7 @@ pub fn load_workspace(
.collect()
};
let project_folders = ProjectFolders::new(&[ws], &[]);
let project_folders = ProjectFolders::new(std::slice::from_ref(&ws), &[]);
loader.set_config(vfs::loader::Config {
load: project_folders.load,
watch: vec![],
@ -114,6 +114,7 @@ pub fn load_workspace(
});
let host = load_crate_graph(
&ws,
crate_graph,
proc_macros,
project_folders.source_root_config,
@ -301,6 +302,7 @@ pub fn load_proc_macro(
}
fn load_crate_graph(
ws: &ProjectWorkspace,
crate_graph: CrateGraph,
proc_macros: ProcMacros,
source_root_config: SourceRootConfig,
@ -339,8 +341,17 @@ fn load_crate_graph(
let source_roots = source_root_config.partition(vfs);
analysis_change.set_roots(source_roots);
let num_crates = crate_graph.len();
analysis_change.set_crate_graph(crate_graph);
analysis_change.set_proc_macros(proc_macros);
if let ProjectWorkspace::Cargo { toolchain, target_layout, .. }
| ProjectWorkspace::Json { toolchain, target_layout, .. } = ws
{
analysis_change.set_target_data_layouts(
iter::repeat(target_layout.clone()).take(num_crates).collect(),
);
analysis_change.set_toolchains(iter::repeat(toolchain.clone()).take(num_crates).collect());
}
host.apply_change(analysis_change);
host

View File

@ -9,6 +9,7 @@ use expect_test::{expect_file, ExpectFile};
use paths::{AbsPath, AbsPathBuf};
use rustc_hash::FxHashMap;
use serde::de::DeserializeOwned;
use triomphe::Arc;
use crate::{
CargoWorkspace, CfgOverrides, ProjectJson, ProjectJsonData, ProjectWorkspace, Sysroot,
@ -76,7 +77,7 @@ fn load_rust_project(file: &str) -> (CrateGraph, ProcMacroPaths) {
sysroot,
rustc_cfg: Vec::new(),
toolchain: None,
target_layout: Err("test has no data layout".to_owned()),
target_layout: Err(Arc::from("test has no data layout")),
};
to_crate_graph(project_workspace)
}
@ -237,7 +238,7 @@ fn crate_graph_dedup_identical() {
let (d_crate_graph, mut d_proc_macros) = (crate_graph.clone(), proc_macros.clone());
crate_graph.extend(d_crate_graph.clone(), &mut d_proc_macros, |_| ());
crate_graph.extend(d_crate_graph.clone(), &mut d_proc_macros, |_, _| true);
assert!(crate_graph.iter().eq(d_crate_graph.iter()));
assert_eq!(proc_macros, d_proc_macros);
}
@ -253,7 +254,7 @@ fn crate_graph_dedup() {
load_cargo_with_fake_sysroot(path_map, "regex-metadata.json");
assert_eq!(regex_crate_graph.iter().count(), 60);
crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |_| ());
crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |_, _| true);
assert_eq!(crate_graph.iter().count(), 118);
}
@ -266,7 +267,7 @@ fn test_deduplicate_origin_dev() {
let (crate_graph_1, mut _proc_macros_2) =
load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_B.json");
crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_| ());
crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_, _| true);
let mut crates_named_p2 = vec![];
for id in crate_graph.iter() {
@ -292,7 +293,7 @@ fn test_deduplicate_origin_dev_rev() {
let (crate_graph_1, mut _proc_macros_2) =
load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_A.json");
crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_| ());
crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_, _| true);
let mut crates_named_p2 = vec![];
for id in crate_graph.iter() {

View File

@ -71,7 +71,7 @@ pub enum ProjectWorkspace {
rustc_cfg: Vec<CfgFlag>,
cfg_overrides: CfgOverrides,
toolchain: Option<Version>,
target_layout: Result<String, String>,
target_layout: TargetLayoutLoadResult,
cargo_config_extra_env: FxHashMap<String, String>,
},
/// Project workspace was manually specified using a `rust-project.json` file.
@ -82,7 +82,7 @@ pub enum ProjectWorkspace {
/// `rustc --print cfg`.
rustc_cfg: Vec<CfgFlag>,
toolchain: Option<Version>,
target_layout: Result<String, String>,
target_layout: TargetLayoutLoadResult,
},
// FIXME: The primary limitation of this approach is that the set of detached files needs to be fixed at the beginning.
// That's not the end user experience we should strive for.
@ -335,7 +335,9 @@ impl ProjectWorkspace {
rustc_cfg,
cfg_overrides,
toolchain,
target_layout: data_layout.map_err(|it| it.to_string()),
target_layout: data_layout
.map(Arc::from)
.map_err(|it| Arc::from(it.to_string())),
cargo_config_extra_env,
}
}
@ -393,7 +395,7 @@ impl ProjectWorkspace {
sysroot,
rustc_cfg,
toolchain,
target_layout: data_layout.map_err(|it| it.to_string()),
target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
}
}
@ -690,20 +692,19 @@ impl ProjectWorkspace {
let _p = tracing::span!(tracing::Level::INFO, "ProjectWorkspace::to_crate_graph").entered();
let (mut crate_graph, proc_macros) = match self {
ProjectWorkspace::Json { project, sysroot, rustc_cfg, toolchain, target_layout } => {
project_json_to_crate_graph(
rustc_cfg.clone(),
load,
project,
sysroot.as_ref().ok(),
extra_env,
match target_layout.as_ref() {
Ok(it) => Ok(Arc::from(it.as_str())),
Err(it) => Err(Arc::from(it.as_str())),
},
toolchain.clone(),
)
}
ProjectWorkspace::Json {
project,
sysroot,
rustc_cfg,
toolchain: _,
target_layout: _,
} => project_json_to_crate_graph(
rustc_cfg.clone(),
load,
project,
sysroot.as_ref().ok(),
extra_env,
),
ProjectWorkspace::Cargo {
cargo,
sysroot,
@ -711,8 +712,8 @@ impl ProjectWorkspace {
rustc_cfg,
cfg_overrides,
build_scripts,
toolchain,
target_layout,
toolchain: _,
target_layout: _,
cargo_config_extra_env: _,
} => cargo_to_crate_graph(
load,
@ -722,20 +723,9 @@ impl ProjectWorkspace {
rustc_cfg.clone(),
cfg_overrides,
build_scripts,
match target_layout.as_ref() {
Ok(it) => Ok(Arc::from(it.as_str())),
Err(it) => Err(Arc::from(it.as_str())),
},
toolchain.as_ref(),
),
ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => {
detached_files_to_crate_graph(
rustc_cfg.clone(),
load,
files,
sysroot.as_ref().ok(),
Err("detached file projects have no target layout set".into()),
)
detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot.as_ref().ok())
}
};
if crate_graph.patch_cfg_if() {
@ -818,21 +808,12 @@ fn project_json_to_crate_graph(
project: &ProjectJson,
sysroot: Option<&Sysroot>,
extra_env: &FxHashMap<String, String>,
target_layout: TargetLayoutLoadResult,
toolchain: Option<Version>,
) -> (CrateGraph, ProcMacroPaths) {
let mut res = (CrateGraph::default(), ProcMacroPaths::default());
let (crate_graph, proc_macros) = &mut res;
let sysroot_deps = sysroot.as_ref().map(|sysroot| {
sysroot_to_crate_graph(
crate_graph,
sysroot,
rustc_cfg.clone(),
target_layout.clone(),
load,
toolchain.as_ref(),
)
});
let sysroot_deps = sysroot
.as_ref()
.map(|sysroot| sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load));
let r_a_cfg_flag = CfgFlag::Atom("rust_analyzer".to_owned());
let mut cfg_cache: FxHashMap<&str, Vec<CfgFlag>> = FxHashMap::default();
@ -887,8 +868,6 @@ fn project_json_to_crate_graph(
} else {
CrateOrigin::Local { repo: None, name: None }
},
target_layout.clone(),
toolchain.clone(),
);
if *is_proc_macro {
if let Some(path) = proc_macro_dylib_path.clone() {
@ -931,22 +910,13 @@ fn cargo_to_crate_graph(
rustc_cfg: Vec<CfgFlag>,
override_cfg: &CfgOverrides,
build_scripts: &WorkspaceBuildScripts,
target_layout: TargetLayoutLoadResult,
toolchain: Option<&Version>,
) -> (CrateGraph, ProcMacroPaths) {
let _p = tracing::span!(tracing::Level::INFO, "cargo_to_crate_graph").entered();
let mut res = (CrateGraph::default(), ProcMacroPaths::default());
let crate_graph = &mut res.0;
let proc_macros = &mut res.1;
let (public_deps, libproc_macro) = match sysroot {
Some(sysroot) => sysroot_to_crate_graph(
crate_graph,
sysroot,
rustc_cfg.clone(),
target_layout.clone(),
load,
toolchain,
),
Some(sysroot) => sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load),
None => (SysrootPublicDeps::default(), None),
};
@ -1012,9 +982,7 @@ fn cargo_to_crate_graph(
file_id,
name,
kind,
target_layout.clone(),
false,
toolchain.cloned(),
);
if let TargetKind::Lib { .. } = kind {
lib_tgt = Some((crate_id, name.clone()));
@ -1106,8 +1074,6 @@ fn cargo_to_crate_graph(
} else {
rustc_build_scripts
},
target_layout,
toolchain,
);
}
}
@ -1119,19 +1085,11 @@ fn detached_files_to_crate_graph(
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
detached_files: &[AbsPathBuf],
sysroot: Option<&Sysroot>,
target_layout: TargetLayoutLoadResult,
) -> (CrateGraph, ProcMacroPaths) {
let _p = tracing::span!(tracing::Level::INFO, "detached_files_to_crate_graph").entered();
let mut crate_graph = CrateGraph::default();
let (public_deps, _libproc_macro) = match sysroot {
Some(sysroot) => sysroot_to_crate_graph(
&mut crate_graph,
sysroot,
rustc_cfg.clone(),
target_layout.clone(),
load,
None,
),
Some(sysroot) => sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load),
None => (SysrootPublicDeps::default(), None),
};
@ -1163,8 +1121,6 @@ fn detached_files_to_crate_graph(
repo: None,
name: display_name.map(|n| n.canonical_name().to_owned()),
},
target_layout.clone(),
None,
);
public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate);
@ -1185,8 +1141,6 @@ fn handle_rustc_crates(
cfg_options: &CfgOptions,
override_cfg: &CfgOverrides,
build_scripts: &WorkspaceBuildScripts,
target_layout: TargetLayoutLoadResult,
toolchain: Option<&Version>,
) {
let mut rustc_pkg_crates = FxHashMap::default();
// The root package of the rustc-dev component is rustc_driver, so we match that
@ -1239,9 +1193,7 @@ fn handle_rustc_crates(
file_id,
&rustc_workspace[tgt].name,
kind,
target_layout.clone(),
true,
toolchain.cloned(),
);
pkg_to_lib_crate.insert(pkg, crate_id);
// Add dependencies on core / std / alloc for this crate
@ -1310,9 +1262,7 @@ fn add_target_crate_root(
file_id: FileId,
cargo_name: &str,
kind: TargetKind,
target_layout: TargetLayoutLoadResult,
rustc_crate: bool,
toolchain: Option<Version>,
) -> CrateId {
let edition = pkg.edition;
let potential_cfg_options = if pkg.features.is_empty() {
@ -1367,8 +1317,6 @@ fn add_target_crate_root(
} else {
CrateOrigin::Library { repo: pkg.repository.clone(), name: pkg.name.clone() }
},
target_layout,
toolchain,
);
if let TargetKind::Lib { is_proc_macro: true } = kind {
let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) {
@ -1408,9 +1356,7 @@ fn sysroot_to_crate_graph(
crate_graph: &mut CrateGraph,
sysroot: &Sysroot,
rustc_cfg: Vec<CfgFlag>,
target_layout: TargetLayoutLoadResult,
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
toolchain: Option<&Version>,
) -> (SysrootPublicDeps, Option<CrateId>) {
let _p = tracing::span!(tracing::Level::INFO, "sysroot_to_crate_graph").entered();
match sysroot.mode() {
@ -1423,8 +1369,6 @@ fn sysroot_to_crate_graph(
rustc_cfg,
&CfgOverrides::default(),
&WorkspaceBuildScripts::default(),
target_layout,
toolchain,
);
let mut pub_deps = vec![];
@ -1467,17 +1411,16 @@ fn sysroot_to_crate_graph(
// Remove all crates except the ones we are interested in to keep the sysroot graph small.
let removed_mapping = cg.remove_crates_except(&marker_set);
let mapping = crate_graph.extend(cg, &mut pm, |_, _| true);
crate_graph.extend(cg, &mut pm, |mapping| {
// Map the id through the removal mapping first, then through the crate graph extension mapping.
pub_deps.iter_mut().for_each(|(_, cid, _)| {
*cid = mapping[&removed_mapping[cid.into_raw().into_u32() as usize].unwrap()]
});
if let Some(libproc_macro) = &mut libproc_macro {
*libproc_macro = mapping
[&removed_mapping[libproc_macro.into_raw().into_u32() as usize].unwrap()];
}
// Map the id through the removal mapping first, then through the crate graph extension mapping.
pub_deps.iter_mut().for_each(|(_, cid, _)| {
*cid = mapping[&removed_mapping[cid.into_raw().into_u32() as usize].unwrap()]
});
if let Some(libproc_macro) = &mut libproc_macro {
*libproc_macro = mapping
[&removed_mapping[libproc_macro.into_raw().into_u32() as usize].unwrap()];
}
(SysrootPublicDeps { deps: pub_deps }, libproc_macro)
}
@ -1501,8 +1444,6 @@ fn sysroot_to_crate_graph(
env,
false,
CrateOrigin::Lang(LangCrateOrigin::from(&*stitched[krate].name)),
target_layout.clone(),
toolchain.cloned(),
);
Some((krate, crate_id))
})

View File

@ -59,10 +59,6 @@
),
},
is_proc_macro: false,
target_layout: Err(
"target_data_layout not loaded",
),
toolchain: None,
},
1: CrateData {
root_file_id: FileId(
@ -132,10 +128,6 @@
),
},
is_proc_macro: false,
target_layout: Err(
"target_data_layout not loaded",
),
toolchain: None,
},
2: CrateData {
root_file_id: FileId(
@ -205,10 +197,6 @@
),
},
is_proc_macro: false,
target_layout: Err(
"target_data_layout not loaded",
),
toolchain: None,
},
3: CrateData {
root_file_id: FileId(
@ -278,10 +266,6 @@
),
},
is_proc_macro: false,
target_layout: Err(
"target_data_layout not loaded",
),
toolchain: None,
},
4: CrateData {
root_file_id: FileId(
@ -347,9 +331,5 @@
name: "libc",
},
is_proc_macro: false,
target_layout: Err(
"target_data_layout not loaded",
),
toolchain: None,
},
}

View File

@ -59,10 +59,6 @@
),
},
is_proc_macro: false,
target_layout: Err(
"target_data_layout not loaded",
),
toolchain: None,
},
1: CrateData {
root_file_id: FileId(
@ -132,10 +128,6 @@
),
},
is_proc_macro: false,
target_layout: Err(
"target_data_layout not loaded",
),
toolchain: None,
},
2: CrateData {
root_file_id: FileId(
@ -205,10 +197,6 @@
),
},
is_proc_macro: false,
target_layout: Err(
"target_data_layout not loaded",
),
toolchain: None,
},
3: CrateData {
root_file_id: FileId(
@ -278,10 +266,6 @@
),
},
is_proc_macro: false,
target_layout: Err(
"target_data_layout not loaded",
),
toolchain: None,
},
4: CrateData {
root_file_id: FileId(
@ -347,9 +331,5 @@
name: "libc",
},
is_proc_macro: false,
target_layout: Err(
"target_data_layout not loaded",
),
toolchain: None,
},
}

View File

@ -58,10 +58,6 @@
),
},
is_proc_macro: false,
target_layout: Err(
"target_data_layout not loaded",
),
toolchain: None,
},
1: CrateData {
root_file_id: FileId(
@ -130,10 +126,6 @@
),
},
is_proc_macro: false,
target_layout: Err(
"target_data_layout not loaded",
),
toolchain: None,
},
2: CrateData {
root_file_id: FileId(
@ -202,10 +194,6 @@
),
},
is_proc_macro: false,
target_layout: Err(
"target_data_layout not loaded",
),
toolchain: None,
},
3: CrateData {
root_file_id: FileId(
@ -274,10 +262,6 @@
),
},
is_proc_macro: false,
target_layout: Err(
"target_data_layout not loaded",
),
toolchain: None,
},
4: CrateData {
root_file_id: FileId(
@ -343,9 +327,5 @@
name: "libc",
},
is_proc_macro: false,
target_layout: Err(
"target_data_layout not loaded",
),
toolchain: None,
},
}

View File

@ -36,10 +36,6 @@
Alloc,
),
is_proc_macro: false,
target_layout: Err(
"test has no data layout",
),
toolchain: None,
},
1: CrateData {
root_file_id: FileId(
@ -69,10 +65,6 @@
Core,
),
is_proc_macro: false,
target_layout: Err(
"test has no data layout",
),
toolchain: None,
},
2: CrateData {
root_file_id: FileId(
@ -102,10 +94,6 @@
Other,
),
is_proc_macro: false,
target_layout: Err(
"test has no data layout",
),
toolchain: None,
},
3: CrateData {
root_file_id: FileId(
@ -135,10 +123,6 @@
Other,
),
is_proc_macro: false,
target_layout: Err(
"test has no data layout",
),
toolchain: None,
},
4: CrateData {
root_file_id: FileId(
@ -185,10 +169,6 @@
ProcMacro,
),
is_proc_macro: false,
target_layout: Err(
"test has no data layout",
),
toolchain: None,
},
5: CrateData {
root_file_id: FileId(
@ -218,10 +198,6 @@
Other,
),
is_proc_macro: false,
target_layout: Err(
"test has no data layout",
),
toolchain: None,
},
6: CrateData {
root_file_id: FileId(
@ -316,10 +292,6 @@
Std,
),
is_proc_macro: false,
target_layout: Err(
"test has no data layout",
),
toolchain: None,
},
7: CrateData {
root_file_id: FileId(
@ -349,10 +321,6 @@
Other,
),
is_proc_macro: false,
target_layout: Err(
"test has no data layout",
),
toolchain: None,
},
8: CrateData {
root_file_id: FileId(
@ -382,10 +350,6 @@
Test,
),
is_proc_macro: false,
target_layout: Err(
"test has no data layout",
),
toolchain: None,
},
9: CrateData {
root_file_id: FileId(
@ -415,10 +379,6 @@
Other,
),
is_proc_macro: false,
target_layout: Err(
"test has no data layout",
),
toolchain: None,
},
10: CrateData {
root_file_id: FileId(
@ -492,9 +452,5 @@
),
},
is_proc_macro: false,
target_layout: Err(
"test has no data layout",
),
toolchain: None,
},
}

View File

@ -524,8 +524,8 @@ impl GlobalState {
}
fn recreate_crate_graph(&mut self, cause: String) {
// Create crate graph from all the workspaces
let (crate_graph, proc_macro_paths, crate_graph_file_dependencies) = {
{
// Create crate graph from all the workspaces
let vfs = &mut self.vfs.write().0;
let loader = &mut self.loader;
// crate graph construction relies on these paths, record them so when one of them gets
@ -548,31 +548,71 @@ impl GlobalState {
};
let mut crate_graph = CrateGraph::default();
let mut proc_macros = Vec::default();
let mut proc_macro_paths = Vec::default();
let mut layouts = Vec::default();
let mut toolchains = Vec::default();
let e = Err(Arc::from("missing layout"));
for ws in &**self.workspaces {
let (other, mut crate_proc_macros) =
ws.to_crate_graph(&mut load, self.config.extra_env());
crate_graph.extend(other, &mut crate_proc_macros, |_| {});
proc_macros.push(crate_proc_macros);
let num_layouts = layouts.len();
let num_toolchains = toolchains.len();
let (toolchain, layout) = match ws {
ProjectWorkspace::Cargo { toolchain, target_layout, .. }
| ProjectWorkspace::Json { toolchain, target_layout, .. } => {
(toolchain.clone(), target_layout.clone())
}
ProjectWorkspace::DetachedFiles { .. } => {
(None, Err("detached files have no layout".into()))
}
};
let mapping = crate_graph.extend(
other,
&mut crate_proc_macros,
|(cg_id, _cg_data), (_o_id, _o_data)| {
// if the newly created crate graph's layout is equal to the crate of the merged graph, then
// we can merge the crates.
layouts[cg_id.into_raw().into_u32() as usize] == layout
&& toolchains[cg_id.into_raw().into_u32() as usize] == toolchain
},
);
// Populate the side tables for the newly merged crates
mapping.values().for_each(|val| {
let idx = val.into_raw().into_u32() as usize;
// we only need to consider crates that were not merged and remapped, as the
// ones that were remapped already have the correct layout and toolchain
if idx >= num_layouts {
if layouts.len() <= idx {
layouts.resize(idx + 1, e.clone());
}
layouts[idx] = layout.clone();
}
if idx >= num_toolchains {
if toolchains.len() <= idx {
toolchains.resize(idx + 1, None);
}
toolchains[idx] = toolchain.clone();
}
});
proc_macro_paths.push(crate_proc_macros);
}
(crate_graph, proc_macros, crate_graph_file_dependencies)
};
let mut change = Change::new();
if self.config.expand_proc_macros() {
change.set_proc_macros(
crate_graph
.iter()
.map(|id| (id, Err("Proc-macros have not been built yet".to_owned())))
.collect(),
);
self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths);
let mut change = Change::new();
if self.config.expand_proc_macros() {
change.set_proc_macros(
crate_graph
.iter()
.map(|id| (id, Err("Proc-macros have not been built yet".to_owned())))
.collect(),
);
self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths);
}
change.set_crate_graph(crate_graph);
self.analysis_host.apply_change(change);
self.crate_graph_file_dependencies = crate_graph_file_dependencies;
}
change.set_crate_graph(crate_graph);
self.analysis_host.apply_change(change);
self.crate_graph_file_dependencies = crate_graph_file_dependencies;
self.process_changes();
self.reload_flycheck();
}

View File

@ -101,8 +101,13 @@ impl Project<'_> {
};
});
let FixtureWithProjectMeta { fixture, mini_core, proc_macro_names, toolchain } =
FixtureWithProjectMeta::parse(self.fixture);
let FixtureWithProjectMeta {
fixture,
mini_core,
proc_macro_names,
toolchain,
target_data_layout: _,
} = FixtureWithProjectMeta::parse(self.fixture);
assert!(proc_macro_names.is_empty());
assert!(mini_core.is_none());
assert!(toolchain.is_none());

View File

@ -1,5 +1,5 @@
//! A set of high-level utility fixture methods to use in tests.
use std::{mem, ops::Not, str::FromStr, sync};
use std::{iter, mem, ops::Not, str::FromStr, sync};
use base_db::{
CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, DependencyKind,
@ -118,8 +118,14 @@ impl ChangeFixture {
ra_fixture: &str,
mut proc_macro_defs: Vec<(String, ProcMacro)>,
) -> ChangeFixture {
let FixtureWithProjectMeta { fixture, mini_core, proc_macro_names, toolchain } =
FixtureWithProjectMeta::parse(ra_fixture);
let FixtureWithProjectMeta {
fixture,
mini_core,
proc_macro_names,
toolchain,
target_data_layout,
} = FixtureWithProjectMeta::parse(ra_fixture);
let target_data_layout = Ok(target_data_layout.into());
let toolchain = Some({
let channel = toolchain.as_deref().unwrap_or("stable");
Version::parse(&format!("1.76.0-{channel}")).unwrap()
@ -131,7 +137,6 @@ impl ChangeFixture {
let mut crates = FxHashMap::default();
let mut crate_deps = Vec::new();
let mut default_crate_root: Option<FileId> = None;
let mut default_target_data_layout: Option<String> = None;
let mut default_cfg = CfgOptions::default();
let mut default_env = Env::new_for_test_fixture();
@ -187,11 +192,6 @@ impl ChangeFixture {
meta.env,
false,
origin,
meta.target_data_layout
.as_deref()
.map(From::from)
.ok_or_else(|| "target_data_layout unset".into()),
toolchain.clone(),
);
let prev = crates.insert(crate_name.clone(), crate_id);
assert!(prev.is_none(), "multiple crates with same name: {}", crate_name);
@ -205,7 +205,6 @@ impl ChangeFixture {
default_crate_root = Some(file_id);
default_cfg.extend(meta.cfg.into_iter());
default_env.extend(meta.env.iter().map(|(x, y)| (x.to_owned(), y.to_owned())));
default_target_data_layout = meta.target_data_layout;
}
source_change.change_file(file_id, Some(text.into()));
@ -228,10 +227,6 @@ impl ChangeFixture {
default_env,
false,
CrateOrigin::Local { repo: None, name: None },
default_target_data_layout
.map(|it| it.into())
.ok_or_else(|| "target_data_layout unset".into()),
toolchain.clone(),
);
} else {
for (from, to, prelude) in crate_deps {
@ -250,10 +245,6 @@ impl ChangeFixture {
.unwrap();
}
}
let target_layout = crate_graph.iter().next().map_or_else(
|| Err("target_data_layout unset".into()),
|it| crate_graph[it].target_layout.clone(),
);
if let Some(mini_core) = mini_core {
let core_file = file_id;
@ -277,8 +268,6 @@ impl ChangeFixture {
Env::new_for_test_fixture(),
false,
CrateOrigin::Lang(LangCrateOrigin::Core),
target_layout.clone(),
toolchain.clone(),
);
for krate in all_crates {
@ -322,8 +311,6 @@ impl ChangeFixture {
Env::new_for_test_fixture(),
true,
CrateOrigin::Local { repo: None, name: None },
target_layout,
toolchain,
);
proc_macros.insert(proc_macros_crate, Ok(proc_macro));
@ -346,17 +333,20 @@ impl ChangeFixture {
SourceRootKind::Library => SourceRoot::new_library(mem::take(&mut file_set)),
};
roots.push(root);
source_change.set_roots(roots);
source_change.set_crate_graph(crate_graph);
ChangeFixture {
file_position,
files,
change: Change {
source_change,
proc_macros: proc_macros.is_empty().not().then_some(proc_macros),
},
}
let mut change = Change {
source_change,
proc_macros: proc_macros.is_empty().not().then_some(proc_macros),
toolchains: Some(iter::repeat(toolchain).take(crate_graph.len()).collect()),
target_data_layouts: Some(
iter::repeat(target_data_layout).take(crate_graph.len()).collect(),
),
};
change.source_change.set_roots(roots);
change.source_change.set_crate_graph(crate_graph);
ChangeFixture { file_position, files, change }
}
}
@ -475,7 +465,6 @@ struct FileMeta {
edition: Edition,
env: Env,
introduce_new_source_root: Option<SourceRootKind>,
target_data_layout: Option<String>,
}
impl FileMeta {
@ -507,7 +496,6 @@ impl FileMeta {
edition: f.edition.map_or(Edition::CURRENT, |v| Edition::from_str(&v).unwrap()),
env: f.env.into_iter().collect(),
introduce_new_source_root,
target_data_layout: f.target_data_layout,
}
}
}

View File

@ -126,11 +126,6 @@ pub struct Fixture {
///
/// Syntax: `library`
pub library: bool,
/// Specifies LLVM data layout to be used.
///
/// You probably don't want to manually specify this. See LLVM manual for the
/// syntax, if you must: https://llvm.org/docs/LangRef.html#data-layout
pub target_data_layout: Option<String>,
/// Actual file contents. All meta comments are stripped.
pub text: String,
}
@ -145,6 +140,11 @@ pub struct FixtureWithProjectMeta {
pub mini_core: Option<MiniCore>,
pub proc_macro_names: Vec<String>,
pub toolchain: Option<String>,
/// Specifies LLVM data layout to be used.
///
/// You probably don't want to manually specify this. See LLVM manual for the
/// syntax, if you must: https://llvm.org/docs/LangRef.html#data-layout
pub target_data_layout: String,
}
impl FixtureWithProjectMeta {
@ -172,6 +172,8 @@ impl FixtureWithProjectMeta {
let fixture = trim_indent(ra_fixture);
let mut fixture = fixture.as_str();
let mut toolchain = None;
let mut target_data_layout =
"e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128".to_owned();
let mut mini_core = None;
let mut res: Vec<Fixture> = Vec::new();
let mut proc_macro_names = vec![];
@ -182,6 +184,12 @@ impl FixtureWithProjectMeta {
fixture = remain;
}
if let Some(meta) = fixture.strip_prefix("//- target_data_layout:") {
let (meta, remain) = meta.split_once('\n').unwrap();
target_data_layout = meta.trim().to_owned();
fixture = remain;
}
if let Some(meta) = fixture.strip_prefix("//- proc_macros:") {
let (meta, remain) = meta.split_once('\n').unwrap();
proc_macro_names = meta.split(',').map(|it| it.trim().to_owned()).collect();
@ -225,7 +233,7 @@ impl FixtureWithProjectMeta {
}
}
Self { fixture: res, mini_core, proc_macro_names, toolchain }
Self { fixture: res, mini_core, proc_macro_names, toolchain, target_data_layout }
}
//- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo
@ -245,9 +253,6 @@ impl FixtureWithProjectMeta {
let mut env = FxHashMap::default();
let mut introduce_new_source_root = None;
let mut library = false;
let mut target_data_layout = Some(
"e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128".to_owned(),
);
for component in components {
if component == "library" {
library = true;
@ -284,7 +289,6 @@ impl FixtureWithProjectMeta {
}
}
"new_source_root" => introduce_new_source_root = Some(value.to_owned()),
"target_data_layout" => target_data_layout = Some(value.to_owned()),
_ => panic!("bad component: {component:?}"),
}
}
@ -307,7 +311,6 @@ impl FixtureWithProjectMeta {
env,
introduce_new_source_root,
library,
target_data_layout,
}
}
}
@ -476,16 +479,21 @@ fn parse_fixture_checks_further_indented_metadata() {
#[test]
fn parse_fixture_gets_full_meta() {
let FixtureWithProjectMeta { fixture: parsed, mini_core, proc_macro_names, toolchain } =
FixtureWithProjectMeta::parse(
r#"
let FixtureWithProjectMeta {
fixture: parsed,
mini_core,
proc_macro_names,
toolchain,
target_data_layout: _,
} = FixtureWithProjectMeta::parse(
r#"
//- toolchain: nightly
//- proc_macros: identity
//- minicore: coerce_unsized
//- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b,atom env:OUTDIR=path/to,OTHER=foo
mod m;
"#,
);
);
assert_eq!(toolchain, Some("nightly".to_owned()));
assert_eq!(proc_macro_names, vec!["identity".to_owned()]);
assert_eq!(mini_core.unwrap().activated_flags, vec!["coerce_unsized".to_owned()]);