mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 14:55:26 +00:00
Auto merge of #122272 - lnicola:sync-from-ra, r=lnicola
Subtree update of `rust-analyzer` r? ghost
This commit is contained in:
commit
9bd88ef612
@ -3,6 +3,7 @@ xtask = "run --package xtask --bin xtask --"
|
||||
tq = "test -- -q"
|
||||
qt = "tq"
|
||||
lint = "clippy --all-targets -- --cap-lints warn"
|
||||
codegen = "run --package xtask --bin xtask -- codegen"
|
||||
|
||||
[target.x86_64-pc-windows-msvc]
|
||||
linker = "rust-lld"
|
||||
|
@ -79,6 +79,9 @@ jobs:
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: sed -i '/\[profile.dev]/a opt-level=1' Cargo.toml
|
||||
|
||||
- name: Codegen checks (rust-analyzer)
|
||||
run: cargo codegen --check
|
||||
|
||||
- name: Compile (tests)
|
||||
run: cargo test --no-run --locked ${{ env.USE_SYSROOT_ABI }}
|
||||
|
||||
|
@ -5,7 +5,6 @@ extend-exclude = [
|
||||
"crates/parser/test_data/lexer/err/",
|
||||
"crates/project-model/test_data/",
|
||||
]
|
||||
ignore-hidden = false
|
||||
|
||||
[default]
|
||||
extend-ignore-re = [
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -84,11 +84,11 @@ tt = { path = "./crates/tt", version = "0.0.0" }
|
||||
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
|
||||
vfs = { path = "./crates/vfs", version = "0.0.0" }
|
||||
|
||||
ra-ap-rustc_lexer = { version = "0.35.0", default-features = false }
|
||||
ra-ap-rustc_parse_format = { version = "0.35.0", default-features = false }
|
||||
ra-ap-rustc_index = { version = "0.35.0", default-features = false }
|
||||
ra-ap-rustc_abi = { version = "0.35.0", default-features = false }
|
||||
ra-ap-rustc_pattern_analysis = { version = "0.37.0", default-features = false }
|
||||
ra-ap-rustc_lexer = { version = "0.42.0", default-features = false }
|
||||
ra-ap-rustc_parse_format = { version = "0.42.0", default-features = false }
|
||||
ra-ap-rustc_index = { version = "0.42.0", default-features = false }
|
||||
ra-ap-rustc_abi = { version = "0.42.0", default-features = false }
|
||||
ra-ap-rustc_pattern_analysis = { version = "0.42.0", default-features = false }
|
||||
|
||||
# local crates that aren't published to crates.io. These should not have versions.
|
||||
sourcegen = { path = "./crates/sourcegen" }
|
||||
@ -108,6 +108,7 @@ cargo_metadata = "0.18.1"
|
||||
command-group = "2.0.1"
|
||||
crossbeam-channel = "0.5.8"
|
||||
dissimilar = "1.0.7"
|
||||
dot = "0.1.4"
|
||||
either = "1.9.0"
|
||||
expect-test = "1.4.0"
|
||||
hashbrown = { version = "0.14", features = [
|
||||
@ -117,6 +118,16 @@ indexmap = "2.1.0"
|
||||
itertools = "0.12.0"
|
||||
libc = "0.2.150"
|
||||
nohash-hasher = "0.2.0"
|
||||
oorandom = "11.1.3"
|
||||
object = { version = "0.33.0", default-features = false, features = [
|
||||
"std",
|
||||
"read_core",
|
||||
"elf",
|
||||
"macho",
|
||||
"pe",
|
||||
] }
|
||||
pulldown-cmark-to-cmark = "10.0.4"
|
||||
pulldown-cmark = { version = "0.9.0", default-features = false }
|
||||
rayon = "1.8.0"
|
||||
rustc-hash = "1.1.0"
|
||||
semver = "1.0.14"
|
||||
@ -137,6 +148,7 @@ tracing-subscriber = { version = "0.3.18", default-features = false, features =
|
||||
"tracing-log",
|
||||
] }
|
||||
triomphe = { version = "0.1.10", default-features = false, features = ["std"] }
|
||||
url = "2.3.1"
|
||||
xshell = "0.2.5"
|
||||
|
||||
|
||||
@ -146,6 +158,7 @@ dashmap = { version = "=5.5.3", features = ["raw-api"] }
|
||||
[workspace.lints.rust]
|
||||
rust_2018_idioms = "warn"
|
||||
unused_lifetimes = "warn"
|
||||
unreachable_pub = "warn"
|
||||
semicolon_in_expressions_from_macros = "warn"
|
||||
|
||||
[workspace.lints.clippy]
|
||||
|
@ -21,7 +21,6 @@ tracing.workspace = true
|
||||
|
||||
# local deps
|
||||
cfg.workspace = true
|
||||
profile.workspace = true
|
||||
stdx.workspace = true
|
||||
syntax.workspace = true
|
||||
vfs.workspace = true
|
||||
|
@ -43,7 +43,7 @@ pub trait Upcast<T: ?Sized> {
|
||||
}
|
||||
|
||||
pub const DEFAULT_PARSE_LRU_CAP: usize = 128;
|
||||
pub const DEFAULT_BORROWCK_LRU_CAP: usize = 256;
|
||||
pub const DEFAULT_BORROWCK_LRU_CAP: usize = 1024;
|
||||
|
||||
pub trait FileLoader {
|
||||
/// Text of the file.
|
||||
|
156
src/tools/rust-analyzer/crates/flycheck/src/command.rs
Normal file
156
src/tools/rust-analyzer/crates/flycheck/src/command.rs
Normal file
@ -0,0 +1,156 @@
|
||||
//! Utilities for running a cargo command like `cargo check` or `cargo test` in a separate thread and
|
||||
//! parse its stdout/stderr.
|
||||
|
||||
use std::{
|
||||
ffi::OsString,
|
||||
fmt, io,
|
||||
path::PathBuf,
|
||||
process::{ChildStderr, ChildStdout, Command, Stdio},
|
||||
};
|
||||
|
||||
use command_group::{CommandGroup, GroupChild};
|
||||
use crossbeam_channel::{unbounded, Receiver, Sender};
|
||||
use stdx::process::streaming_output;
|
||||
|
||||
/// Cargo output is structured as a one JSON per line. This trait abstracts parsing one line of
|
||||
/// cargo output into a Rust data type.
|
||||
pub(crate) trait ParseFromLine: Sized + Send + 'static {
|
||||
fn from_line(line: &str, error: &mut String) -> Option<Self>;
|
||||
fn from_eof() -> Option<Self>;
|
||||
}
|
||||
|
||||
struct CargoActor<T> {
|
||||
sender: Sender<T>,
|
||||
stdout: ChildStdout,
|
||||
stderr: ChildStderr,
|
||||
}
|
||||
|
||||
impl<T: ParseFromLine> CargoActor<T> {
|
||||
fn new(sender: Sender<T>, stdout: ChildStdout, stderr: ChildStderr) -> Self {
|
||||
CargoActor { sender, stdout, stderr }
|
||||
}
|
||||
|
||||
fn run(self) -> io::Result<(bool, String)> {
|
||||
// We manually read a line at a time, instead of using serde's
|
||||
// stream deserializers, because the deserializer cannot recover
|
||||
// from an error, resulting in it getting stuck, because we try to
|
||||
// be resilient against failures.
|
||||
//
|
||||
// Because cargo only outputs one JSON object per line, we can
|
||||
// simply skip a line if it doesn't parse, which just ignores any
|
||||
// erroneous output.
|
||||
|
||||
let mut stdout_errors = String::new();
|
||||
let mut stderr_errors = String::new();
|
||||
let mut read_at_least_one_stdout_message = false;
|
||||
let mut read_at_least_one_stderr_message = false;
|
||||
let process_line = |line: &str, error: &mut String| {
|
||||
// Try to deserialize a message from Cargo or Rustc.
|
||||
if let Some(t) = T::from_line(line, error) {
|
||||
self.sender.send(t).unwrap();
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
};
|
||||
let output = streaming_output(
|
||||
self.stdout,
|
||||
self.stderr,
|
||||
&mut |line| {
|
||||
if process_line(line, &mut stdout_errors) {
|
||||
read_at_least_one_stdout_message = true;
|
||||
}
|
||||
},
|
||||
&mut |line| {
|
||||
if process_line(line, &mut stderr_errors) {
|
||||
read_at_least_one_stderr_message = true;
|
||||
}
|
||||
},
|
||||
&mut || {
|
||||
if let Some(t) = T::from_eof() {
|
||||
self.sender.send(t).unwrap();
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
let read_at_least_one_message =
|
||||
read_at_least_one_stdout_message || read_at_least_one_stderr_message;
|
||||
let mut error = stdout_errors;
|
||||
error.push_str(&stderr_errors);
|
||||
match output {
|
||||
Ok(_) => Ok((read_at_least_one_message, error)),
|
||||
Err(e) => Err(io::Error::new(e.kind(), format!("{e:?}: {error}"))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct JodGroupChild(GroupChild);
|
||||
|
||||
impl Drop for JodGroupChild {
|
||||
fn drop(&mut self) {
|
||||
_ = self.0.kill();
|
||||
_ = self.0.wait();
|
||||
}
|
||||
}
|
||||
|
||||
/// A handle to a cargo process used for fly-checking.
|
||||
pub(crate) struct CommandHandle<T> {
|
||||
/// The handle to the actual cargo process. As we cannot cancel directly from with
|
||||
/// a read syscall dropping and therefore terminating the process is our best option.
|
||||
child: JodGroupChild,
|
||||
thread: stdx::thread::JoinHandle<io::Result<(bool, String)>>,
|
||||
pub(crate) receiver: Receiver<T>,
|
||||
program: OsString,
|
||||
arguments: Vec<OsString>,
|
||||
current_dir: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl<T> fmt::Debug for CommandHandle<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("CommandHandle")
|
||||
.field("program", &self.program)
|
||||
.field("arguments", &self.arguments)
|
||||
.field("current_dir", &self.current_dir)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ParseFromLine> CommandHandle<T> {
|
||||
pub(crate) fn spawn(mut command: Command) -> std::io::Result<Self> {
|
||||
command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null());
|
||||
let mut child = command.group_spawn().map(JodGroupChild)?;
|
||||
|
||||
let program = command.get_program().into();
|
||||
let arguments = command.get_args().map(|arg| arg.into()).collect::<Vec<OsString>>();
|
||||
let current_dir = command.get_current_dir().map(|arg| arg.to_path_buf());
|
||||
|
||||
let stdout = child.0.inner().stdout.take().unwrap();
|
||||
let stderr = child.0.inner().stderr.take().unwrap();
|
||||
|
||||
let (sender, receiver) = unbounded();
|
||||
let actor = CargoActor::<T>::new(sender, stdout, stderr);
|
||||
let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker)
|
||||
.name("CommandHandle".to_owned())
|
||||
.spawn(move || actor.run())
|
||||
.expect("failed to spawn thread");
|
||||
Ok(CommandHandle { program, arguments, current_dir, child, thread, receiver })
|
||||
}
|
||||
|
||||
pub(crate) fn cancel(mut self) {
|
||||
let _ = self.child.0.kill();
|
||||
let _ = self.child.0.wait();
|
||||
}
|
||||
|
||||
pub(crate) fn join(mut self) -> io::Result<()> {
|
||||
let _ = self.child.0.kill();
|
||||
let exit_status = self.child.0.wait()?;
|
||||
let (read_at_least_one_message, error) = self.thread.join()?;
|
||||
if read_at_least_one_message || exit_status.success() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(io::Error::new(io::ErrorKind::Other, format!(
|
||||
"Cargo watcher failed, the command produced no valid metadata (exit code: {exit_status:?}):\n{error}"
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
@ -2,22 +2,18 @@
|
||||
//! another compatible command (f.x. clippy) in a background thread and provide
|
||||
//! LSP diagnostics based on the output of the command.
|
||||
|
||||
// FIXME: This crate now handles running `cargo test` needed in the test explorer in
|
||||
// addition to `cargo check`. Either split it into 3 crates (one for test, one for check
|
||||
// and one common utilities) or change its name and docs to reflect the current state.
|
||||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
use std::{
|
||||
ffi::OsString,
|
||||
fmt, io,
|
||||
path::PathBuf,
|
||||
process::{ChildStderr, ChildStdout, Command, Stdio},
|
||||
time::Duration,
|
||||
};
|
||||
use std::{fmt, io, path::PathBuf, process::Command, time::Duration};
|
||||
|
||||
use command_group::{CommandGroup, GroupChild};
|
||||
use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
|
||||
use paths::{AbsPath, AbsPathBuf};
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::Deserialize;
|
||||
use stdx::process::streaming_output;
|
||||
|
||||
pub use cargo_metadata::diagnostic::{
|
||||
Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan,
|
||||
@ -25,6 +21,12 @@ pub use cargo_metadata::diagnostic::{
|
||||
};
|
||||
use toolchain::Tool;
|
||||
|
||||
mod command;
|
||||
mod test_runner;
|
||||
|
||||
use command::{CommandHandle, ParseFromLine};
|
||||
pub use test_runner::{CargoTestHandle, CargoTestMessage, TestState};
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub enum InvocationStrategy {
|
||||
Once,
|
||||
@ -181,12 +183,12 @@ struct FlycheckActor {
|
||||
/// doesn't provide a way to read sub-process output without blocking, so we
|
||||
/// have to wrap sub-processes output handling in a thread and pass messages
|
||||
/// back over a channel.
|
||||
command_handle: Option<CommandHandle>,
|
||||
command_handle: Option<CommandHandle<CargoCheckMessage>>,
|
||||
}
|
||||
|
||||
enum Event {
|
||||
RequestStateChange(StateChange),
|
||||
CheckEvent(Option<CargoMessage>),
|
||||
CheckEvent(Option<CargoCheckMessage>),
|
||||
}
|
||||
|
||||
const SAVED_FILE_PLACEHOLDER: &str = "$saved_file";
|
||||
@ -282,7 +284,7 @@ impl FlycheckActor {
|
||||
self.report_progress(Progress::DidFinish(res));
|
||||
}
|
||||
Event::CheckEvent(Some(message)) => match message {
|
||||
CargoMessage::CompilerArtifact(msg) => {
|
||||
CargoCheckMessage::CompilerArtifact(msg) => {
|
||||
tracing::trace!(
|
||||
flycheck_id = self.id,
|
||||
artifact = msg.target.name,
|
||||
@ -291,7 +293,7 @@ impl FlycheckActor {
|
||||
self.report_progress(Progress::DidCheckCrate(msg.target.name));
|
||||
}
|
||||
|
||||
CargoMessage::Diagnostic(msg) => {
|
||||
CargoCheckMessage::Diagnostic(msg) => {
|
||||
tracing::trace!(
|
||||
flycheck_id = self.id,
|
||||
message = msg.message,
|
||||
@ -448,161 +450,42 @@ impl FlycheckActor {
|
||||
}
|
||||
}
|
||||
|
||||
struct JodGroupChild(GroupChild);
|
||||
|
||||
impl Drop for JodGroupChild {
|
||||
fn drop(&mut self) {
|
||||
_ = self.0.kill();
|
||||
_ = self.0.wait();
|
||||
}
|
||||
}
|
||||
|
||||
/// A handle to a cargo process used for fly-checking.
|
||||
struct CommandHandle {
|
||||
/// The handle to the actual cargo process. As we cannot cancel directly from with
|
||||
/// a read syscall dropping and therefore terminating the process is our best option.
|
||||
child: JodGroupChild,
|
||||
thread: stdx::thread::JoinHandle<io::Result<(bool, String)>>,
|
||||
receiver: Receiver<CargoMessage>,
|
||||
program: OsString,
|
||||
arguments: Vec<OsString>,
|
||||
current_dir: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl fmt::Debug for CommandHandle {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("CommandHandle")
|
||||
.field("program", &self.program)
|
||||
.field("arguments", &self.arguments)
|
||||
.field("current_dir", &self.current_dir)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl CommandHandle {
|
||||
fn spawn(mut command: Command) -> std::io::Result<CommandHandle> {
|
||||
command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null());
|
||||
let mut child = command.group_spawn().map(JodGroupChild)?;
|
||||
|
||||
let program = command.get_program().into();
|
||||
let arguments = command.get_args().map(|arg| arg.into()).collect::<Vec<OsString>>();
|
||||
let current_dir = command.get_current_dir().map(|arg| arg.to_path_buf());
|
||||
|
||||
let stdout = child.0.inner().stdout.take().unwrap();
|
||||
let stderr = child.0.inner().stderr.take().unwrap();
|
||||
|
||||
let (sender, receiver) = unbounded();
|
||||
let actor = CargoActor::new(sender, stdout, stderr);
|
||||
let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker)
|
||||
.name("CommandHandle".to_owned())
|
||||
.spawn(move || actor.run())
|
||||
.expect("failed to spawn thread");
|
||||
Ok(CommandHandle { program, arguments, current_dir, child, thread, receiver })
|
||||
}
|
||||
|
||||
fn cancel(mut self) {
|
||||
let _ = self.child.0.kill();
|
||||
let _ = self.child.0.wait();
|
||||
}
|
||||
|
||||
fn join(mut self) -> io::Result<()> {
|
||||
let _ = self.child.0.kill();
|
||||
let exit_status = self.child.0.wait()?;
|
||||
let (read_at_least_one_message, error) = self.thread.join()?;
|
||||
if read_at_least_one_message || exit_status.success() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(io::Error::new(io::ErrorKind::Other, format!(
|
||||
"Cargo watcher failed, the command produced no valid metadata (exit code: {exit_status:?}):\n{error}"
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct CargoActor {
|
||||
sender: Sender<CargoMessage>,
|
||||
stdout: ChildStdout,
|
||||
stderr: ChildStderr,
|
||||
}
|
||||
|
||||
impl CargoActor {
|
||||
fn new(sender: Sender<CargoMessage>, stdout: ChildStdout, stderr: ChildStderr) -> CargoActor {
|
||||
CargoActor { sender, stdout, stderr }
|
||||
}
|
||||
|
||||
fn run(self) -> io::Result<(bool, String)> {
|
||||
// We manually read a line at a time, instead of using serde's
|
||||
// stream deserializers, because the deserializer cannot recover
|
||||
// from an error, resulting in it getting stuck, because we try to
|
||||
// be resilient against failures.
|
||||
//
|
||||
// Because cargo only outputs one JSON object per line, we can
|
||||
// simply skip a line if it doesn't parse, which just ignores any
|
||||
// erroneous output.
|
||||
|
||||
let mut stdout_errors = String::new();
|
||||
let mut stderr_errors = String::new();
|
||||
let mut read_at_least_one_stdout_message = false;
|
||||
let mut read_at_least_one_stderr_message = false;
|
||||
let process_line = |line: &str, error: &mut String| {
|
||||
// Try to deserialize a message from Cargo or Rustc.
|
||||
let mut deserializer = serde_json::Deserializer::from_str(line);
|
||||
deserializer.disable_recursion_limit();
|
||||
if let Ok(message) = JsonMessage::deserialize(&mut deserializer) {
|
||||
match message {
|
||||
// Skip certain kinds of messages to only spend time on what's useful
|
||||
JsonMessage::Cargo(message) => match message {
|
||||
cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => {
|
||||
self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap();
|
||||
}
|
||||
cargo_metadata::Message::CompilerMessage(msg) => {
|
||||
self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
|
||||
}
|
||||
_ => (),
|
||||
},
|
||||
JsonMessage::Rustc(message) => {
|
||||
self.sender.send(CargoMessage::Diagnostic(message)).unwrap();
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
error.push_str(line);
|
||||
error.push('\n');
|
||||
false
|
||||
};
|
||||
let output = streaming_output(
|
||||
self.stdout,
|
||||
self.stderr,
|
||||
&mut |line| {
|
||||
if process_line(line, &mut stdout_errors) {
|
||||
read_at_least_one_stdout_message = true;
|
||||
}
|
||||
},
|
||||
&mut |line| {
|
||||
if process_line(line, &mut stderr_errors) {
|
||||
read_at_least_one_stderr_message = true;
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
let read_at_least_one_message =
|
||||
read_at_least_one_stdout_message || read_at_least_one_stderr_message;
|
||||
let mut error = stdout_errors;
|
||||
error.push_str(&stderr_errors);
|
||||
match output {
|
||||
Ok(_) => Ok((read_at_least_one_message, error)),
|
||||
Err(e) => Err(io::Error::new(e.kind(), format!("{e:?}: {error}"))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
enum CargoMessage {
|
||||
enum CargoCheckMessage {
|
||||
CompilerArtifact(cargo_metadata::Artifact),
|
||||
Diagnostic(Diagnostic),
|
||||
}
|
||||
|
||||
impl ParseFromLine for CargoCheckMessage {
|
||||
fn from_line(line: &str, error: &mut String) -> Option<Self> {
|
||||
let mut deserializer = serde_json::Deserializer::from_str(line);
|
||||
deserializer.disable_recursion_limit();
|
||||
if let Ok(message) = JsonMessage::deserialize(&mut deserializer) {
|
||||
return match message {
|
||||
// Skip certain kinds of messages to only spend time on what's useful
|
||||
JsonMessage::Cargo(message) => match message {
|
||||
cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => {
|
||||
Some(CargoCheckMessage::CompilerArtifact(artifact))
|
||||
}
|
||||
cargo_metadata::Message::CompilerMessage(msg) => {
|
||||
Some(CargoCheckMessage::Diagnostic(msg.message))
|
||||
}
|
||||
_ => None,
|
||||
},
|
||||
JsonMessage::Rustc(message) => Some(CargoCheckMessage::Diagnostic(message)),
|
||||
};
|
||||
}
|
||||
|
||||
error.push_str(line);
|
||||
error.push('\n');
|
||||
None
|
||||
}
|
||||
|
||||
fn from_eof() -> Option<Self> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum JsonMessage {
|
||||
|
76
src/tools/rust-analyzer/crates/flycheck/src/test_runner.rs
Normal file
76
src/tools/rust-analyzer/crates/flycheck/src/test_runner.rs
Normal file
@ -0,0 +1,76 @@
|
||||
//! This module provides the functionality needed to run `cargo test` in a background
|
||||
//! thread and report the result of each test in a channel.
|
||||
|
||||
use std::process::Command;
|
||||
|
||||
use crossbeam_channel::Receiver;
|
||||
use serde::Deserialize;
|
||||
use toolchain::Tool;
|
||||
|
||||
use crate::command::{CommandHandle, ParseFromLine};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(tag = "event", rename_all = "camelCase")]
|
||||
pub enum TestState {
|
||||
Started,
|
||||
Ok,
|
||||
Ignored,
|
||||
Failed { stdout: String },
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
pub enum CargoTestMessage {
|
||||
Test {
|
||||
name: String,
|
||||
#[serde(flatten)]
|
||||
state: TestState,
|
||||
},
|
||||
Suite,
|
||||
Finished,
|
||||
}
|
||||
|
||||
impl ParseFromLine for CargoTestMessage {
|
||||
fn from_line(line: &str, error: &mut String) -> Option<Self> {
|
||||
let mut deserializer = serde_json::Deserializer::from_str(line);
|
||||
deserializer.disable_recursion_limit();
|
||||
if let Ok(message) = CargoTestMessage::deserialize(&mut deserializer) {
|
||||
return Some(message);
|
||||
}
|
||||
|
||||
error.push_str(line);
|
||||
error.push('\n');
|
||||
None
|
||||
}
|
||||
|
||||
fn from_eof() -> Option<Self> {
|
||||
Some(CargoTestMessage::Finished)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CargoTestHandle {
|
||||
handle: CommandHandle<CargoTestMessage>,
|
||||
}
|
||||
|
||||
// Example of a cargo test command:
|
||||
// cargo test -- module::func -Z unstable-options --format=json
|
||||
|
||||
impl CargoTestHandle {
|
||||
pub fn new(path: Option<&str>) -> std::io::Result<Self> {
|
||||
let mut cmd = Command::new(Tool::Cargo.path());
|
||||
cmd.env("RUSTC_BOOTSTRAP", "1");
|
||||
cmd.arg("test");
|
||||
cmd.arg("--");
|
||||
if let Some(path) = path {
|
||||
cmd.arg(path);
|
||||
}
|
||||
cmd.args(["-Z", "unstable-options"]);
|
||||
cmd.arg("--format=json");
|
||||
Ok(Self { handle: CommandHandle::spawn(cmd)? })
|
||||
}
|
||||
|
||||
pub fn receiver(&self) -> &Receiver<CargoTestMessage> {
|
||||
&self.handle.receiver
|
||||
}
|
||||
}
|
@ -348,7 +348,7 @@ impl AttrsWithOwner {
|
||||
.raw_attrs(AttrOwner::ModItem(definition_tree_id.value.into()))
|
||||
.clone(),
|
||||
ModuleOrigin::BlockExpr { id, .. } => {
|
||||
let tree = db.block_item_tree_query(id);
|
||||
let tree = db.block_item_tree(id);
|
||||
tree.raw_attrs(AttrOwner::TopLevel).clone()
|
||||
}
|
||||
}
|
||||
|
@ -13,7 +13,6 @@ use cfg::{CfgExpr, CfgOptions};
|
||||
use either::Either;
|
||||
use hir_expand::{name::Name, HirFileId, InFile};
|
||||
use la_arena::{Arena, ArenaMap};
|
||||
use profile::Count;
|
||||
use rustc_hash::FxHashMap;
|
||||
use syntax::{ast, AstPtr, SyntaxNodePtr};
|
||||
use triomphe::Arc;
|
||||
@ -51,7 +50,6 @@ pub struct Body {
|
||||
pub body_expr: ExprId,
|
||||
/// Block expressions in this body that may contain inner items.
|
||||
block_scopes: Vec<BlockId>,
|
||||
_c: Count<Self>,
|
||||
}
|
||||
|
||||
pub type ExprPtr = AstPtr<ast::Expr>;
|
||||
@ -216,7 +214,6 @@ impl Body {
|
||||
|
||||
fn shrink_to_fit(&mut self) {
|
||||
let Self {
|
||||
_c: _,
|
||||
body_expr: _,
|
||||
block_scopes,
|
||||
exprs,
|
||||
@ -300,7 +297,6 @@ impl Default for Body {
|
||||
params: Default::default(),
|
||||
block_scopes: Default::default(),
|
||||
binding_owners: Default::default(),
|
||||
_c: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -10,7 +10,6 @@ use hir_expand::{
|
||||
ExpandError, InFile,
|
||||
};
|
||||
use intern::Interned;
|
||||
use profile::Count;
|
||||
use rustc_hash::FxHashMap;
|
||||
use smallvec::SmallVec;
|
||||
use span::AstIdMap;
|
||||
@ -76,7 +75,6 @@ pub(super) fn lower(
|
||||
params: Vec::new(),
|
||||
body_expr: dummy_expr_id(),
|
||||
block_scopes: Vec::new(),
|
||||
_c: Count::new(),
|
||||
},
|
||||
expander,
|
||||
current_try_block_label: None,
|
||||
@ -705,7 +703,8 @@ impl ExprCollector<'_> {
|
||||
let Some(try_from_output) = LangItem::TryTraitFromOutput.path(self.db, self.krate) else {
|
||||
return self.collect_block(e);
|
||||
};
|
||||
let label = self.alloc_label_desugared(Label { name: Name::generate_new_name() });
|
||||
let label = self
|
||||
.alloc_label_desugared(Label { name: Name::generate_new_name(self.body.labels.len()) });
|
||||
let old_label = self.current_try_block_label.replace(label);
|
||||
|
||||
let (btail, expr_id) = self.with_labeled_rib(label, |this| {
|
||||
@ -842,7 +841,7 @@ impl ExprCollector<'_> {
|
||||
this.collect_expr_opt(e.loop_body().map(|it| it.into()))
|
||||
}),
|
||||
};
|
||||
let iter_name = Name::generate_new_name();
|
||||
let iter_name = Name::generate_new_name(self.body.exprs.len());
|
||||
let iter_expr = self.alloc_expr(Expr::Path(Path::from(iter_name.clone())), syntax_ptr);
|
||||
let iter_expr_mut = self.alloc_expr(
|
||||
Expr::Ref { expr: iter_expr, rawness: Rawness::Ref, mutability: Mutability::Mut },
|
||||
@ -903,7 +902,7 @@ impl ExprCollector<'_> {
|
||||
Expr::Call { callee: try_branch, args: Box::new([operand]), is_assignee_expr: false },
|
||||
syntax_ptr,
|
||||
);
|
||||
let continue_name = Name::generate_new_name();
|
||||
let continue_name = Name::generate_new_name(self.body.bindings.len());
|
||||
let continue_binding =
|
||||
self.alloc_binding(continue_name.clone(), BindingAnnotation::Unannotated);
|
||||
let continue_bpat =
|
||||
@ -918,7 +917,7 @@ impl ExprCollector<'_> {
|
||||
guard: None,
|
||||
expr: self.alloc_expr(Expr::Path(Path::from(continue_name)), syntax_ptr),
|
||||
};
|
||||
let break_name = Name::generate_new_name();
|
||||
let break_name = Name::generate_new_name(self.body.bindings.len());
|
||||
let break_binding = self.alloc_binding(break_name.clone(), BindingAnnotation::Unannotated);
|
||||
let break_bpat = self.alloc_pat_desugared(Pat::Bind { id: break_binding, subpat: None });
|
||||
self.add_definition_to_binding(break_binding, break_bpat);
|
||||
@ -1415,16 +1414,10 @@ impl ExprCollector<'_> {
|
||||
ast::Pat::LiteralPat(it) => {
|
||||
Some(Box::new(LiteralOrConst::Literal(pat_literal_to_hir(it)?.0)))
|
||||
}
|
||||
ast::Pat::IdentPat(p) => {
|
||||
let name =
|
||||
p.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
|
||||
Some(Box::new(LiteralOrConst::Const(name.into())))
|
||||
pat @ (ast::Pat::IdentPat(_) | ast::Pat::PathPat(_)) => {
|
||||
let subpat = self.collect_pat(pat.clone(), binding_list);
|
||||
Some(Box::new(LiteralOrConst::Const(subpat)))
|
||||
}
|
||||
ast::Pat::PathPat(p) => p
|
||||
.path()
|
||||
.and_then(|path| self.expander.parse_path(self.db, path))
|
||||
.map(LiteralOrConst::Const)
|
||||
.map(Box::new),
|
||||
_ => None,
|
||||
})
|
||||
};
|
||||
|
@ -635,7 +635,7 @@ impl Printer<'_> {
|
||||
fn print_literal_or_const(&mut self, literal_or_const: &LiteralOrConst) {
|
||||
match literal_or_const {
|
||||
LiteralOrConst::Literal(l) => self.print_literal(l),
|
||||
LiteralOrConst::Const(c) => self.print_path(c),
|
||||
LiteralOrConst::Const(c) => self.print_pat(*c),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -788,11 +788,12 @@ impl<'a> AssocItemCollector<'a> {
|
||||
};
|
||||
self.diagnostics.push(diag);
|
||||
}
|
||||
if let errors @ [_, ..] = parse.errors() {
|
||||
let errors = parse.errors();
|
||||
if !errors.is_empty() {
|
||||
self.diagnostics.push(DefDiagnostic::macro_expansion_parse_error(
|
||||
self.module_id.local_id,
|
||||
error_call_kind(),
|
||||
errors,
|
||||
errors.into_boxed_slice(),
|
||||
));
|
||||
}
|
||||
|
||||
|
@ -400,7 +400,7 @@ pub(crate) fn lower_struct(
|
||||
item_tree: &ItemTree,
|
||||
fields: &Fields,
|
||||
) -> StructKind {
|
||||
let ctx = LowerCtx::with_file_id(db, ast.file_id);
|
||||
let ctx = LowerCtx::new(db, ast.file_id);
|
||||
|
||||
match (&ast.value, fields) {
|
||||
(ast::StructKind::Tuple(fl), Fields::Tuple(fields)) => {
|
||||
@ -415,7 +415,9 @@ pub(crate) fn lower_struct(
|
||||
|| FieldData {
|
||||
name: Name::new_tuple_field(i),
|
||||
type_ref: Interned::new(TypeRef::from_ast_opt(&ctx, fd.ty())),
|
||||
visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())),
|
||||
visibility: RawVisibility::from_ast(db, fd.visibility(), &mut |range| {
|
||||
ctx.span_map().span_for_range(range).ctx
|
||||
}),
|
||||
},
|
||||
);
|
||||
}
|
||||
@ -433,7 +435,9 @@ pub(crate) fn lower_struct(
|
||||
|| FieldData {
|
||||
name: fd.name().map(|n| n.as_name()).unwrap_or_else(Name::missing),
|
||||
type_ref: Interned::new(TypeRef::from_ast_opt(&ctx, fd.ty())),
|
||||
visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())),
|
||||
visibility: RawVisibility::from_ast(db, fd.visibility(), &mut |range| {
|
||||
ctx.span_map().span_for_range(range).ctx
|
||||
}),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
@ -87,14 +87,10 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
|
||||
fn file_item_tree(&self, file_id: HirFileId) -> Arc<ItemTree>;
|
||||
|
||||
#[salsa::invoke(ItemTree::block_item_tree_query)]
|
||||
fn block_item_tree_query(&self, block_id: BlockId) -> Arc<ItemTree>;
|
||||
|
||||
#[salsa::invoke(crate_def_map_wait)]
|
||||
#[salsa::transparent]
|
||||
fn crate_def_map(&self, krate: CrateId) -> Arc<DefMap>;
|
||||
fn block_item_tree(&self, block_id: BlockId) -> Arc<ItemTree>;
|
||||
|
||||
#[salsa::invoke(DefMap::crate_def_map_query)]
|
||||
fn crate_def_map_query(&self, krate: CrateId) -> Arc<DefMap>;
|
||||
fn crate_def_map(&self, krate: CrateId) -> Arc<DefMap>;
|
||||
|
||||
/// Computes the block-level `DefMap`.
|
||||
#[salsa::invoke(DefMap::block_def_map_query)]
|
||||
@ -253,11 +249,6 @@ fn include_macro_invoc(db: &dyn DefDatabase, krate: CrateId) -> Vec<(MacroCallId
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn crate_def_map_wait(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
|
||||
let _p = tracing::span!(tracing::Level::INFO, "crate_def_map:wait").entered();
|
||||
db.crate_def_map_query(krate)
|
||||
}
|
||||
|
||||
fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool {
|
||||
let file = db.crate_graph()[crate_id].root_file_id;
|
||||
let item_tree = db.file_item_tree(file.into());
|
||||
|
@ -1,5 +1,7 @@
|
||||
//! Macro expansion utilities.
|
||||
|
||||
use std::cell::OnceCell;
|
||||
|
||||
use base_db::CrateId;
|
||||
use cfg::CfgOptions;
|
||||
use drop_bomb::DropBomb;
|
||||
@ -18,7 +20,7 @@ use crate::{
|
||||
#[derive(Debug)]
|
||||
pub struct Expander {
|
||||
cfg_options: CfgOptions,
|
||||
span_map: SpanMap,
|
||||
span_map: OnceCell<SpanMap>,
|
||||
krate: CrateId,
|
||||
current_file_id: HirFileId,
|
||||
pub(crate) module: ModuleId,
|
||||
@ -42,7 +44,7 @@ impl Expander {
|
||||
recursion_depth: 0,
|
||||
recursion_limit,
|
||||
cfg_options: db.crate_graph()[module.krate].cfg_options.clone(),
|
||||
span_map: db.span_map(current_file_id),
|
||||
span_map: OnceCell::new(),
|
||||
krate: module.krate,
|
||||
}
|
||||
}
|
||||
@ -100,7 +102,7 @@ impl Expander {
|
||||
}
|
||||
|
||||
pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> {
|
||||
LowerCtx::new(db, self.span_map.clone(), self.current_file_id)
|
||||
LowerCtx::with_span_map_cell(db, self.current_file_id, self.span_map.clone())
|
||||
}
|
||||
|
||||
pub(crate) fn in_file<T>(&self, value: T) -> InFile<T> {
|
||||
@ -108,7 +110,15 @@ impl Expander {
|
||||
}
|
||||
|
||||
pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
|
||||
Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, self.span_map.as_ref()))
|
||||
Attrs::filter(
|
||||
db,
|
||||
self.krate,
|
||||
RawAttrs::new(
|
||||
db.upcast(),
|
||||
owner,
|
||||
self.span_map.get_or_init(|| db.span_map(self.current_file_id)).as_ref(),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn cfg_options(&self) -> &CfgOptions {
|
||||
@ -120,7 +130,7 @@ impl Expander {
|
||||
}
|
||||
|
||||
pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> {
|
||||
let ctx = LowerCtx::new(db, self.span_map.clone(), self.current_file_id);
|
||||
let ctx = LowerCtx::with_span_map_cell(db, self.current_file_id, self.span_map.clone());
|
||||
Path::from_src(&ctx, path)
|
||||
}
|
||||
|
||||
@ -165,10 +175,11 @@ impl Expander {
|
||||
let parse = res.value.0.cast::<T>()?;
|
||||
|
||||
self.recursion_depth += 1;
|
||||
let old_span_map = std::mem::replace(
|
||||
&mut self.span_map,
|
||||
SpanMap::ExpansionSpanMap(res.value.1),
|
||||
);
|
||||
let old_span_map = OnceCell::new();
|
||||
if let Some(prev) = self.span_map.take() {
|
||||
_ = old_span_map.set(prev);
|
||||
};
|
||||
_ = self.span_map.set(SpanMap::ExpansionSpanMap(res.value.1));
|
||||
let old_file_id =
|
||||
std::mem::replace(&mut self.current_file_id, macro_file.into());
|
||||
let mark = Mark {
|
||||
@ -187,6 +198,6 @@ impl Expander {
|
||||
#[derive(Debug)]
|
||||
pub struct Mark {
|
||||
file_id: HirFileId,
|
||||
span_map: SpanMap,
|
||||
span_map: OnceCell<SpanMap>,
|
||||
bomb: DropBomb,
|
||||
}
|
||||
|
@ -611,8 +611,10 @@ mod tests {
|
||||
let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};"));
|
||||
let ast_path =
|
||||
parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap();
|
||||
let mod_path =
|
||||
ModPath::from_src(&db, ast_path, db.span_map(pos.file_id.into()).as_ref()).unwrap();
|
||||
let mod_path = ModPath::from_src(&db, ast_path, &mut |range| {
|
||||
db.span_map(pos.file_id.into()).as_ref().span_for_range(range).ctx
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
let def_map = module.def_map(&db);
|
||||
let resolved = def_map
|
||||
|
@ -101,7 +101,7 @@ pub enum Literal {
|
||||
/// Used in range patterns.
|
||||
pub enum LiteralOrConst {
|
||||
Literal(Literal),
|
||||
Const(Path),
|
||||
Const(PatId),
|
||||
}
|
||||
|
||||
impl Literal {
|
||||
|
@ -251,7 +251,7 @@ impl TypeRef {
|
||||
TypeRef::DynTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
|
||||
}
|
||||
ast::Type::MacroType(mt) => match mt.macro_call() {
|
||||
Some(mc) => ctx.ast_id(&mc).map(TypeRef::Macro).unwrap_or(TypeRef::Error),
|
||||
Some(mc) => TypeRef::Macro(ctx.ast_id(&mc)),
|
||||
None => TypeRef::Error,
|
||||
},
|
||||
}
|
||||
@ -398,9 +398,8 @@ pub enum ConstRef {
|
||||
impl ConstRef {
|
||||
pub(crate) fn from_const_arg(lower_ctx: &LowerCtx<'_>, arg: Option<ast::ConstArg>) -> Self {
|
||||
if let Some(arg) = arg {
|
||||
let ast_id = lower_ctx.ast_id(&arg);
|
||||
if let Some(expr) = arg.expr() {
|
||||
return Self::from_expr(expr, ast_id);
|
||||
return Self::from_expr(expr, Some(lower_ctx.ast_id(&arg)));
|
||||
}
|
||||
}
|
||||
Self::Scalar(LiteralConstRef::Unknown)
|
||||
|
@ -29,9 +29,6 @@
|
||||
//!
|
||||
//! In general, any item in the `ItemTree` stores its `AstId`, which allows mapping it back to its
|
||||
//! surface syntax.
|
||||
//!
|
||||
//! Note that we cannot store [`span::Span`]s inside of this, as typing in an item invalidates its
|
||||
//! encompassing span!
|
||||
|
||||
mod lower;
|
||||
mod pretty;
|
||||
@ -50,7 +47,6 @@ use either::Either;
|
||||
use hir_expand::{attrs::RawAttrs, name::Name, ExpandTo, HirFileId, InFile};
|
||||
use intern::Interned;
|
||||
use la_arena::{Arena, Idx, IdxRange, RawIdx};
|
||||
use profile::Count;
|
||||
use rustc_hash::FxHashMap;
|
||||
use smallvec::SmallVec;
|
||||
use span::{AstIdNode, FileAstId, Span};
|
||||
@ -94,8 +90,6 @@ impl fmt::Debug for RawVisibilityId {
|
||||
/// The item tree of a source file.
|
||||
#[derive(Debug, Default, Eq, PartialEq)]
|
||||
pub struct ItemTree {
|
||||
_c: Count<Self>,
|
||||
|
||||
top_level: SmallVec<[ModItem; 1]>,
|
||||
attrs: FxHashMap<AttrOwner, RawAttrs>,
|
||||
|
||||
@ -263,14 +257,6 @@ impl ItemVisibilities {
|
||||
}
|
||||
}
|
||||
|
||||
static VIS_PUB: RawVisibility = RawVisibility::Public;
|
||||
static VIS_PRIV_IMPLICIT: RawVisibility =
|
||||
RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)), VisibilityExplicitness::Implicit);
|
||||
static VIS_PRIV_EXPLICIT: RawVisibility =
|
||||
RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)), VisibilityExplicitness::Explicit);
|
||||
static VIS_PUB_CRATE: RawVisibility =
|
||||
RawVisibility::Module(ModPath::from_kind(PathKind::Crate), VisibilityExplicitness::Explicit);
|
||||
|
||||
#[derive(Default, Debug, Eq, PartialEq)]
|
||||
struct ItemTreeData {
|
||||
uses: Arena<Use>,
|
||||
@ -403,7 +389,7 @@ impl TreeId {
|
||||
|
||||
pub(crate) fn item_tree(&self, db: &dyn DefDatabase) -> Arc<ItemTree> {
|
||||
match self.block {
|
||||
Some(block) => db.block_item_tree_query(block),
|
||||
Some(block) => db.block_item_tree(block),
|
||||
None => db.file_item_tree(self.file),
|
||||
}
|
||||
}
|
||||
@ -562,6 +548,20 @@ impl_index!(fields: Field, variants: Variant, params: Param);
|
||||
impl Index<RawVisibilityId> for ItemTree {
|
||||
type Output = RawVisibility;
|
||||
fn index(&self, index: RawVisibilityId) -> &Self::Output {
|
||||
static VIS_PUB: RawVisibility = RawVisibility::Public;
|
||||
static VIS_PRIV_IMPLICIT: RawVisibility = RawVisibility::Module(
|
||||
ModPath::from_kind(PathKind::Super(0)),
|
||||
VisibilityExplicitness::Implicit,
|
||||
);
|
||||
static VIS_PRIV_EXPLICIT: RawVisibility = RawVisibility::Module(
|
||||
ModPath::from_kind(PathKind::Super(0)),
|
||||
VisibilityExplicitness::Explicit,
|
||||
);
|
||||
static VIS_PUB_CRATE: RawVisibility = RawVisibility::Module(
|
||||
ModPath::from_kind(PathKind::Crate),
|
||||
VisibilityExplicitness::Explicit,
|
||||
);
|
||||
|
||||
match index {
|
||||
RawVisibilityId::PRIV_IMPLICIT => &VIS_PRIV_IMPLICIT,
|
||||
RawVisibilityId::PRIV_EXPLICIT => &VIS_PRIV_EXPLICIT,
|
||||
@ -821,11 +821,13 @@ impl Use {
|
||||
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
|
||||
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
|
||||
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
|
||||
let span_map = db.span_map(file_id);
|
||||
let (_, source_map) = lower::lower_use_tree(db, span_map.as_ref(), ast_use_tree)
|
||||
.expect("failed to lower use tree");
|
||||
let (_, source_map) = lower::lower_use_tree(db, ast_use_tree, &mut |range| {
|
||||
db.span_map(file_id).span_for_range(range).ctx
|
||||
})
|
||||
.expect("failed to lower use tree");
|
||||
source_map[index].clone()
|
||||
}
|
||||
|
||||
/// Maps a `UseTree` contained in this import back to its AST node.
|
||||
pub fn use_tree_source_map(
|
||||
&self,
|
||||
@ -836,10 +838,11 @@ impl Use {
|
||||
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
|
||||
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
|
||||
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
|
||||
let span_map = db.span_map(file_id);
|
||||
lower::lower_use_tree(db, span_map.as_ref(), ast_use_tree)
|
||||
.expect("failed to lower use tree")
|
||||
.1
|
||||
lower::lower_use_tree(db, ast_use_tree, &mut |range| {
|
||||
db.span_map(file_id).span_for_range(range).ctx
|
||||
})
|
||||
.expect("failed to lower use tree")
|
||||
.1
|
||||
}
|
||||
}
|
||||
|
||||
@ -871,25 +874,19 @@ impl UseTree {
|
||||
prefix: Option<ModPath>,
|
||||
path: &ModPath,
|
||||
) -> Option<(ModPath, ImportKind)> {
|
||||
match (prefix, &path.kind) {
|
||||
match (prefix, path.kind) {
|
||||
(None, _) => Some((path.clone(), ImportKind::Plain)),
|
||||
(Some(mut prefix), PathKind::Plain) => {
|
||||
for segment in path.segments() {
|
||||
prefix.push_segment(segment.clone());
|
||||
}
|
||||
prefix.extend(path.segments().iter().cloned());
|
||||
Some((prefix, ImportKind::Plain))
|
||||
}
|
||||
(Some(mut prefix), PathKind::Super(n))
|
||||
if *n > 0 && prefix.segments().is_empty() =>
|
||||
{
|
||||
(Some(mut prefix), PathKind::Super(n)) if n > 0 && prefix.segments().is_empty() => {
|
||||
// `super::super` + `super::rest`
|
||||
match &mut prefix.kind {
|
||||
PathKind::Super(m) => {
|
||||
cov_mark::hit!(concat_super_mod_paths);
|
||||
*m += *n;
|
||||
for segment in path.segments() {
|
||||
prefix.push_segment(segment.clone());
|
||||
}
|
||||
*m += n;
|
||||
prefix.extend(path.segments().iter().cloned());
|
||||
Some((prefix, ImportKind::Plain))
|
||||
}
|
||||
_ => None,
|
||||
@ -963,10 +960,10 @@ impl ModItem {
|
||||
| ModItem::Mod(_)
|
||||
| ModItem::MacroRules(_)
|
||||
| ModItem::Macro2(_) => None,
|
||||
ModItem::MacroCall(call) => Some(AssocItem::MacroCall(*call)),
|
||||
ModItem::Const(konst) => Some(AssocItem::Const(*konst)),
|
||||
ModItem::TypeAlias(alias) => Some(AssocItem::TypeAlias(*alias)),
|
||||
ModItem::Function(func) => Some(AssocItem::Function(*func)),
|
||||
&ModItem::MacroCall(call) => Some(AssocItem::MacroCall(call)),
|
||||
&ModItem::Const(konst) => Some(AssocItem::Const(konst)),
|
||||
&ModItem::TypeAlias(alias) => Some(AssocItem::TypeAlias(alias)),
|
||||
&ModItem::Function(func) => Some(AssocItem::Function(func)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4,7 +4,7 @@ use std::collections::hash_map::Entry;
|
||||
|
||||
use hir_expand::{mod_path::path, name, name::AsName, span_map::SpanMapRef, HirFileId};
|
||||
use la_arena::Arena;
|
||||
use span::AstIdMap;
|
||||
use span::{AstIdMap, SyntaxContextId};
|
||||
use syntax::{
|
||||
ast::{self, HasModuleItem, HasName, HasTypeBounds, IsString},
|
||||
AstNode,
|
||||
@ -45,7 +45,7 @@ impl<'a> Ctx<'a> {
|
||||
db,
|
||||
tree: ItemTree::default(),
|
||||
source_ast_id_map: db.ast_id_map(file),
|
||||
body_ctx: crate::lower::LowerCtx::with_file_id(db, file),
|
||||
body_ctx: crate::lower::LowerCtx::new(db, file),
|
||||
}
|
||||
}
|
||||
|
||||
@ -535,7 +535,9 @@ impl<'a> Ctx<'a> {
|
||||
fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Use>> {
|
||||
let visibility = self.lower_visibility(use_item);
|
||||
let ast_id = self.source_ast_id_map.ast_id(use_item);
|
||||
let (use_tree, _) = lower_use_tree(self.db, self.span_map(), use_item.use_tree()?)?;
|
||||
let (use_tree, _) = lower_use_tree(self.db, use_item.use_tree()?, &mut |range| {
|
||||
self.span_map().span_for_range(range).ctx
|
||||
})?;
|
||||
|
||||
let res = Use { visibility, ast_id, use_tree };
|
||||
Some(id(self.data().uses.alloc(res)))
|
||||
@ -558,7 +560,9 @@ impl<'a> Ctx<'a> {
|
||||
|
||||
fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> {
|
||||
let span_map = self.span_map();
|
||||
let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, span_map)?);
|
||||
let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, &mut |range| {
|
||||
span_map.span_for_range(range).ctx
|
||||
})?);
|
||||
let ast_id = self.source_ast_id_map.ast_id(m);
|
||||
let expand_to = hir_expand::ExpandTo::from_call_site(m);
|
||||
let res = MacroCall {
|
||||
@ -672,8 +676,9 @@ impl<'a> Ctx<'a> {
|
||||
}
|
||||
|
||||
fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId {
|
||||
let vis =
|
||||
RawVisibility::from_opt_ast_with_span_map(self.db, item.visibility(), self.span_map());
|
||||
let vis = RawVisibility::from_ast(self.db, item.visibility(), &mut |range| {
|
||||
self.span_map().span_for_range(range).ctx
|
||||
});
|
||||
self.data().vis.alloc(vis)
|
||||
}
|
||||
|
||||
@ -745,12 +750,15 @@ fn lower_abi(abi: ast::Abi) -> Interned<str> {
|
||||
|
||||
struct UseTreeLowering<'a> {
|
||||
db: &'a dyn DefDatabase,
|
||||
span_map: SpanMapRef<'a>,
|
||||
mapping: Arena<ast::UseTree>,
|
||||
}
|
||||
|
||||
impl UseTreeLowering<'_> {
|
||||
fn lower_use_tree(&mut self, tree: ast::UseTree) -> Option<UseTree> {
|
||||
fn lower_use_tree(
|
||||
&mut self,
|
||||
tree: ast::UseTree,
|
||||
span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContextId,
|
||||
) -> Option<UseTree> {
|
||||
if let Some(use_tree_list) = tree.use_tree_list() {
|
||||
let prefix = match tree.path() {
|
||||
// E.g. use something::{{{inner}}};
|
||||
@ -758,15 +766,17 @@ impl UseTreeLowering<'_> {
|
||||
// E.g. `use something::{inner}` (prefix is `None`, path is `something`)
|
||||
// or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`)
|
||||
Some(path) => {
|
||||
match ModPath::from_src(self.db.upcast(), path, self.span_map) {
|
||||
match ModPath::from_src(self.db.upcast(), path, span_for_range) {
|
||||
Some(it) => Some(it),
|
||||
None => return None, // FIXME: report errors somewhere
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let list =
|
||||
use_tree_list.use_trees().filter_map(|tree| self.lower_use_tree(tree)).collect();
|
||||
let list = use_tree_list
|
||||
.use_trees()
|
||||
.filter_map(|tree| self.lower_use_tree(tree, span_for_range))
|
||||
.collect();
|
||||
|
||||
Some(
|
||||
self.use_tree(
|
||||
@ -777,7 +787,7 @@ impl UseTreeLowering<'_> {
|
||||
} else {
|
||||
let is_glob = tree.star_token().is_some();
|
||||
let path = match tree.path() {
|
||||
Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.span_map)?),
|
||||
Some(path) => Some(ModPath::from_src(self.db.upcast(), path, span_for_range)?),
|
||||
None => None,
|
||||
};
|
||||
let alias = tree.rename().map(|a| {
|
||||
@ -813,10 +823,10 @@ impl UseTreeLowering<'_> {
|
||||
|
||||
pub(crate) fn lower_use_tree(
|
||||
db: &dyn DefDatabase,
|
||||
span_map: SpanMapRef<'_>,
|
||||
tree: ast::UseTree,
|
||||
span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContextId,
|
||||
) -> Option<(UseTree, Arena<ast::UseTree>)> {
|
||||
let mut lowering = UseTreeLowering { db, span_map, mapping: Arena::new() };
|
||||
let tree = lowering.lower_use_tree(tree)?;
|
||||
let mut lowering = UseTreeLowering { db, mapping: Arena::new() };
|
||||
let tree = lowering.lower_use_tree(tree, span_for_range)?;
|
||||
Some((tree, lowering.mapping))
|
||||
}
|
||||
|
@ -1341,8 +1341,11 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
|
||||
let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
|
||||
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
|
||||
let span_map = db.span_map(self.file_id);
|
||||
let path =
|
||||
self.value.path().and_then(|path| path::ModPath::from_src(db, path, span_map.as_ref()));
|
||||
let path = self.value.path().and_then(|path| {
|
||||
path::ModPath::from_src(db, path, &mut |range| {
|
||||
span_map.as_ref().span_for_range(range).ctx
|
||||
})
|
||||
});
|
||||
|
||||
let Some(path) = path else {
|
||||
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
|
||||
|
@ -13,39 +13,36 @@ use crate::{db::DefDatabase, path::Path};
|
||||
|
||||
pub struct LowerCtx<'a> {
|
||||
pub db: &'a dyn DefDatabase,
|
||||
span_map: SpanMap,
|
||||
// FIXME: This optimization is probably pointless, ast id map should pretty much always exist anyways.
|
||||
ast_id_map: Option<(HirFileId, OnceCell<Arc<AstIdMap>>)>,
|
||||
file_id: HirFileId,
|
||||
span_map: OnceCell<SpanMap>,
|
||||
ast_id_map: OnceCell<Arc<AstIdMap>>,
|
||||
}
|
||||
|
||||
impl<'a> LowerCtx<'a> {
|
||||
pub fn new(db: &'a dyn DefDatabase, span_map: SpanMap, file_id: HirFileId) -> Self {
|
||||
LowerCtx { db, span_map, ast_id_map: Some((file_id, OnceCell::new())) }
|
||||
pub fn new(db: &'a dyn DefDatabase, file_id: HirFileId) -> Self {
|
||||
LowerCtx { db, file_id, span_map: OnceCell::new(), ast_id_map: OnceCell::new() }
|
||||
}
|
||||
|
||||
pub fn with_file_id(db: &'a dyn DefDatabase, file_id: HirFileId) -> Self {
|
||||
LowerCtx {
|
||||
db,
|
||||
span_map: db.span_map(file_id),
|
||||
ast_id_map: Some((file_id, OnceCell::new())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_span_map(db: &'a dyn DefDatabase, span_map: SpanMap) -> Self {
|
||||
LowerCtx { db, span_map, ast_id_map: None }
|
||||
pub fn with_span_map_cell(
|
||||
db: &'a dyn DefDatabase,
|
||||
file_id: HirFileId,
|
||||
span_map: OnceCell<SpanMap>,
|
||||
) -> Self {
|
||||
LowerCtx { db, file_id, span_map, ast_id_map: OnceCell::new() }
|
||||
}
|
||||
|
||||
pub(crate) fn span_map(&self) -> SpanMapRef<'_> {
|
||||
self.span_map.as_ref()
|
||||
self.span_map.get_or_init(|| self.db.span_map(self.file_id)).as_ref()
|
||||
}
|
||||
|
||||
pub(crate) fn lower_path(&self, ast: ast::Path) -> Option<Path> {
|
||||
Path::from_src(self, ast)
|
||||
}
|
||||
|
||||
pub(crate) fn ast_id<N: AstIdNode>(&self, item: &N) -> Option<AstId<N>> {
|
||||
let &(file_id, ref ast_id_map) = self.ast_id_map.as_ref()?;
|
||||
let ast_id_map = ast_id_map.get_or_init(|| self.db.ast_id_map(file_id));
|
||||
Some(InFile::new(file_id, ast_id_map.ast_id(item)))
|
||||
pub(crate) fn ast_id<N: AstIdNode>(&self, item: &N) -> AstId<N> {
|
||||
InFile::new(
|
||||
self.file_id,
|
||||
self.ast_id_map.get_or_init(|| self.db.ast_id_map(self.file_id)).ast_id(item),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -65,7 +65,6 @@ use hir_expand::{
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use la_arena::Arena;
|
||||
use profile::Count;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use span::FileAstId;
|
||||
use stdx::format_to;
|
||||
@ -95,7 +94,6 @@ use crate::{
|
||||
/// is computed by the `block_def_map` query.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct DefMap {
|
||||
_c: Count<Self>,
|
||||
/// When this is a block def map, this will hold the block id of the block and module that
|
||||
/// contains this block.
|
||||
block: Option<BlockInfo>,
|
||||
@ -154,6 +152,23 @@ struct DefMapCrateData {
|
||||
}
|
||||
|
||||
impl DefMapCrateData {
|
||||
fn new(edition: Edition) -> Self {
|
||||
Self {
|
||||
extern_prelude: FxHashMap::default(),
|
||||
exported_derives: FxHashMap::default(),
|
||||
fn_proc_macro_mapping: FxHashMap::default(),
|
||||
proc_macro_loading_error: None,
|
||||
registered_attrs: Vec::new(),
|
||||
registered_tools: Vec::new(),
|
||||
unstable_features: FxHashSet::default(),
|
||||
rustc_coherence_is_core: false,
|
||||
no_core: false,
|
||||
no_std: false,
|
||||
edition,
|
||||
recursion_limit: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn shrink_to_fit(&mut self) {
|
||||
let Self {
|
||||
extern_prelude,
|
||||
@ -305,67 +320,67 @@ impl DefMap {
|
||||
/// The module id of a crate or block root.
|
||||
pub const ROOT: LocalModuleId = LocalModuleId::from_raw(la_arena::RawIdx::from_u32(0));
|
||||
|
||||
pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
|
||||
pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, crate_id: CrateId) -> Arc<DefMap> {
|
||||
let crate_graph = db.crate_graph();
|
||||
let krate_name = crate_graph[krate].display_name.as_deref().unwrap_or_default();
|
||||
let krate = &crate_graph[crate_id];
|
||||
let name = krate.display_name.as_deref().unwrap_or_default();
|
||||
let _p = tracing::span!(tracing::Level::INFO, "crate_def_map_query", ?name).entered();
|
||||
|
||||
let _p = tracing::span!(tracing::Level::INFO, "crate_def_map_query", ?krate_name).entered();
|
||||
|
||||
let crate_graph = db.crate_graph();
|
||||
|
||||
let edition = crate_graph[krate].edition;
|
||||
let origin = ModuleOrigin::CrateRoot { definition: crate_graph[krate].root_file_id };
|
||||
let def_map = DefMap::empty(krate, edition, ModuleData::new(origin, Visibility::Public));
|
||||
let def_map = collector::collect_defs(
|
||||
db,
|
||||
def_map,
|
||||
TreeId::new(crate_graph[krate].root_file_id.into(), None),
|
||||
let module_data = ModuleData::new(
|
||||
ModuleOrigin::CrateRoot { definition: krate.root_file_id },
|
||||
Visibility::Public,
|
||||
);
|
||||
|
||||
let def_map = DefMap::empty(
|
||||
crate_id,
|
||||
Arc::new(DefMapCrateData::new(krate.edition)),
|
||||
module_data,
|
||||
None,
|
||||
);
|
||||
let def_map =
|
||||
collector::collect_defs(db, def_map, TreeId::new(krate.root_file_id.into(), None));
|
||||
|
||||
Arc::new(def_map)
|
||||
}
|
||||
|
||||
pub(crate) fn block_def_map_query(db: &dyn DefDatabase, block_id: BlockId) -> Arc<DefMap> {
|
||||
let block: BlockLoc = block_id.lookup(db);
|
||||
let BlockLoc { ast_id, module } = block_id.lookup(db);
|
||||
|
||||
let parent_map = block.module.def_map(db);
|
||||
let krate = block.module.krate;
|
||||
let local_id = LocalModuleId::from_raw(la_arena::RawIdx::from(0));
|
||||
// NB: we use `None` as block here, which would be wrong for implicit
|
||||
// modules declared by blocks with items. At the moment, we don't use
|
||||
// this visibility for anything outside IDE, so that's probably OK.
|
||||
let visibility = Visibility::Module(
|
||||
ModuleId { krate, local_id, block: None },
|
||||
ModuleId { krate: module.krate, local_id: Self::ROOT, block: module.block },
|
||||
VisibilityExplicitness::Implicit,
|
||||
);
|
||||
let module_data = ModuleData::new(
|
||||
ModuleOrigin::BlockExpr { block: block.ast_id, id: block_id },
|
||||
visibility,
|
||||
let module_data =
|
||||
ModuleData::new(ModuleOrigin::BlockExpr { block: ast_id, id: block_id }, visibility);
|
||||
|
||||
let parent_map = module.def_map(db);
|
||||
let def_map = DefMap::empty(
|
||||
module.krate,
|
||||
parent_map.data.clone(),
|
||||
module_data,
|
||||
Some(BlockInfo {
|
||||
block: block_id,
|
||||
parent: BlockRelativeModuleId { block: module.block, local_id: module.local_id },
|
||||
}),
|
||||
);
|
||||
|
||||
let mut def_map = DefMap::empty(krate, parent_map.data.edition, module_data);
|
||||
def_map.data = parent_map.data.clone();
|
||||
def_map.block = Some(BlockInfo {
|
||||
block: block_id,
|
||||
parent: BlockRelativeModuleId {
|
||||
block: block.module.block,
|
||||
local_id: block.module.local_id,
|
||||
},
|
||||
});
|
||||
|
||||
let def_map =
|
||||
collector::collect_defs(db, def_map, TreeId::new(block.ast_id.file_id, Some(block_id)));
|
||||
collector::collect_defs(db, def_map, TreeId::new(ast_id.file_id, Some(block_id)));
|
||||
Arc::new(def_map)
|
||||
}
|
||||
|
||||
fn empty(krate: CrateId, edition: Edition, module_data: ModuleData) -> DefMap {
|
||||
fn empty(
|
||||
krate: CrateId,
|
||||
crate_data: Arc<DefMapCrateData>,
|
||||
module_data: ModuleData,
|
||||
block: Option<BlockInfo>,
|
||||
) -> DefMap {
|
||||
let mut modules: Arena<ModuleData> = Arena::default();
|
||||
let root = modules.alloc(module_data);
|
||||
assert_eq!(root, Self::ROOT);
|
||||
|
||||
DefMap {
|
||||
_c: Count::new(),
|
||||
block: None,
|
||||
block,
|
||||
modules,
|
||||
krate,
|
||||
prelude: None,
|
||||
@ -373,23 +388,36 @@ impl DefMap {
|
||||
derive_helpers_in_scope: FxHashMap::default(),
|
||||
diagnostics: Vec::new(),
|
||||
enum_definitions: FxHashMap::default(),
|
||||
data: Arc::new(DefMapCrateData {
|
||||
extern_prelude: FxHashMap::default(),
|
||||
exported_derives: FxHashMap::default(),
|
||||
fn_proc_macro_mapping: FxHashMap::default(),
|
||||
proc_macro_loading_error: None,
|
||||
registered_attrs: Vec::new(),
|
||||
registered_tools: Vec::new(),
|
||||
unstable_features: FxHashSet::default(),
|
||||
rustc_coherence_is_core: false,
|
||||
no_core: false,
|
||||
no_std: false,
|
||||
edition,
|
||||
recursion_limit: None,
|
||||
}),
|
||||
data: crate_data,
|
||||
}
|
||||
}
|
||||
fn shrink_to_fit(&mut self) {
|
||||
// Exhaustive match to require handling new fields.
|
||||
let Self {
|
||||
macro_use_prelude,
|
||||
diagnostics,
|
||||
modules,
|
||||
derive_helpers_in_scope,
|
||||
block: _,
|
||||
krate: _,
|
||||
prelude: _,
|
||||
data: _,
|
||||
enum_definitions,
|
||||
} = self;
|
||||
|
||||
macro_use_prelude.shrink_to_fit();
|
||||
diagnostics.shrink_to_fit();
|
||||
modules.shrink_to_fit();
|
||||
derive_helpers_in_scope.shrink_to_fit();
|
||||
enum_definitions.shrink_to_fit();
|
||||
for (_, module) in modules.iter_mut() {
|
||||
module.children.shrink_to_fit();
|
||||
module.scope.shrink_to_fit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DefMap {
|
||||
pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator<Item = LocalModuleId> + '_ {
|
||||
self.modules
|
||||
.iter()
|
||||
@ -440,6 +468,105 @@ impl DefMap {
|
||||
self.krate
|
||||
}
|
||||
|
||||
pub fn module_id(&self, local_id: LocalModuleId) -> ModuleId {
|
||||
let block = self.block.map(|b| b.block);
|
||||
ModuleId { krate: self.krate, local_id, block }
|
||||
}
|
||||
|
||||
pub fn crate_root(&self) -> CrateRootModuleId {
|
||||
CrateRootModuleId { krate: self.krate }
|
||||
}
|
||||
|
||||
/// This is the same as [`Self::crate_root`] for crate def maps, but for block def maps, it
|
||||
/// returns the root block module.
|
||||
pub fn root_module_id(&self) -> ModuleId {
|
||||
self.module_id(Self::ROOT)
|
||||
}
|
||||
|
||||
/// If this `DefMap` is for a block expression, returns the module containing the block (which
|
||||
/// might again be a block, or a module inside a block).
|
||||
pub fn parent(&self) -> Option<ModuleId> {
|
||||
let BlockRelativeModuleId { block, local_id } = self.block?.parent;
|
||||
Some(ModuleId { krate: self.krate, block, local_id })
|
||||
}
|
||||
|
||||
/// Returns the module containing `local_mod`, either the parent `mod`, or the module (or block) containing
|
||||
/// the block, if `self` corresponds to a block expression.
|
||||
pub fn containing_module(&self, local_mod: LocalModuleId) -> Option<ModuleId> {
|
||||
match self[local_mod].parent {
|
||||
Some(parent) => Some(self.module_id(parent)),
|
||||
None => {
|
||||
self.block.map(
|
||||
|BlockInfo { parent: BlockRelativeModuleId { block, local_id }, .. }| {
|
||||
ModuleId { krate: self.krate, block, local_id }
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a reference to the def map's diagnostics.
|
||||
pub fn diagnostics(&self) -> &[DefDiagnostic] {
|
||||
self.diagnostics.as_slice()
|
||||
}
|
||||
|
||||
pub fn recursion_limit(&self) -> u32 {
|
||||
// 128 is the default in rustc
|
||||
self.data.recursion_limit.unwrap_or(128)
|
||||
}
|
||||
|
||||
// FIXME: this can use some more human-readable format (ideally, an IR
|
||||
// even), as this should be a great debugging aid.
|
||||
pub fn dump(&self, db: &dyn DefDatabase) -> String {
|
||||
let mut buf = String::new();
|
||||
let mut arc;
|
||||
let mut current_map = self;
|
||||
while let Some(block) = current_map.block {
|
||||
go(&mut buf, db, current_map, "block scope", Self::ROOT);
|
||||
buf.push('\n');
|
||||
arc = block.parent.def_map(db, self.krate);
|
||||
current_map = &arc;
|
||||
}
|
||||
go(&mut buf, db, current_map, "crate", Self::ROOT);
|
||||
return buf;
|
||||
|
||||
fn go(
|
||||
buf: &mut String,
|
||||
db: &dyn DefDatabase,
|
||||
map: &DefMap,
|
||||
path: &str,
|
||||
module: LocalModuleId,
|
||||
) {
|
||||
format_to!(buf, "{}\n", path);
|
||||
|
||||
map.modules[module].scope.dump(db.upcast(), buf);
|
||||
|
||||
for (name, child) in
|
||||
map.modules[module].children.iter().sorted_by(|a, b| Ord::cmp(&a.0, &b.0))
|
||||
{
|
||||
let path = format!("{path}::{}", name.display(db.upcast()));
|
||||
buf.push('\n');
|
||||
go(buf, db, map, &path, *child);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dump_block_scopes(&self, db: &dyn DefDatabase) -> String {
|
||||
let mut buf = String::new();
|
||||
let mut arc;
|
||||
let mut current_map = self;
|
||||
while let Some(block) = current_map.block {
|
||||
format_to!(buf, "{:?} in {:?}\n", block.block, block.parent);
|
||||
arc = block.parent.def_map(db, self.krate);
|
||||
current_map = &arc;
|
||||
}
|
||||
|
||||
format_to!(buf, "crate scope\n");
|
||||
buf
|
||||
}
|
||||
}
|
||||
|
||||
impl DefMap {
|
||||
pub(crate) fn block_id(&self) -> Option<BlockId> {
|
||||
self.block.map(|block| block.block)
|
||||
}
|
||||
@ -460,21 +587,6 @@ impl DefMap {
|
||||
self.macro_use_prelude.iter().map(|(name, &def)| (name, def))
|
||||
}
|
||||
|
||||
pub fn module_id(&self, local_id: LocalModuleId) -> ModuleId {
|
||||
let block = self.block.map(|b| b.block);
|
||||
ModuleId { krate: self.krate, local_id, block }
|
||||
}
|
||||
|
||||
pub fn crate_root(&self) -> CrateRootModuleId {
|
||||
CrateRootModuleId { krate: self.krate }
|
||||
}
|
||||
|
||||
/// This is the same as [`Self::crate_root`] for crate def maps, but for block def maps, it
|
||||
/// returns the root block module.
|
||||
pub fn root_module_id(&self) -> ModuleId {
|
||||
self.module_id(Self::ROOT)
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path(
|
||||
&self,
|
||||
db: &dyn DefDatabase,
|
||||
@ -536,114 +648,6 @@ impl DefMap {
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// If this `DefMap` is for a block expression, returns the module containing the block (which
|
||||
/// might again be a block, or a module inside a block).
|
||||
pub fn parent(&self) -> Option<ModuleId> {
|
||||
let BlockRelativeModuleId { block, local_id } = self.block?.parent;
|
||||
Some(ModuleId { krate: self.krate, block, local_id })
|
||||
}
|
||||
|
||||
/// Returns the module containing `local_mod`, either the parent `mod`, or the module (or block) containing
|
||||
/// the block, if `self` corresponds to a block expression.
|
||||
pub fn containing_module(&self, local_mod: LocalModuleId) -> Option<ModuleId> {
|
||||
match self[local_mod].parent {
|
||||
Some(parent) => Some(self.module_id(parent)),
|
||||
None => {
|
||||
self.block.map(
|
||||
|BlockInfo { parent: BlockRelativeModuleId { block, local_id }, .. }| {
|
||||
ModuleId { krate: self.krate, block, local_id }
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: this can use some more human-readable format (ideally, an IR
|
||||
// even), as this should be a great debugging aid.
|
||||
pub fn dump(&self, db: &dyn DefDatabase) -> String {
|
||||
let mut buf = String::new();
|
||||
let mut arc;
|
||||
let mut current_map = self;
|
||||
while let Some(block) = current_map.block {
|
||||
go(&mut buf, db, current_map, "block scope", Self::ROOT);
|
||||
buf.push('\n');
|
||||
arc = block.parent.def_map(db, self.krate);
|
||||
current_map = &arc;
|
||||
}
|
||||
go(&mut buf, db, current_map, "crate", Self::ROOT);
|
||||
return buf;
|
||||
|
||||
fn go(
|
||||
buf: &mut String,
|
||||
db: &dyn DefDatabase,
|
||||
map: &DefMap,
|
||||
path: &str,
|
||||
module: LocalModuleId,
|
||||
) {
|
||||
format_to!(buf, "{}\n", path);
|
||||
|
||||
map.modules[module].scope.dump(db.upcast(), buf);
|
||||
|
||||
for (name, child) in
|
||||
map.modules[module].children.iter().sorted_by(|a, b| Ord::cmp(&a.0, &b.0))
|
||||
{
|
||||
let path = format!("{path}::{}", name.display(db.upcast()));
|
||||
buf.push('\n');
|
||||
go(buf, db, map, &path, *child);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dump_block_scopes(&self, db: &dyn DefDatabase) -> String {
|
||||
let mut buf = String::new();
|
||||
let mut arc;
|
||||
let mut current_map = self;
|
||||
while let Some(block) = current_map.block {
|
||||
format_to!(buf, "{:?} in {:?}\n", block.block, block.parent);
|
||||
arc = block.parent.def_map(db, self.krate);
|
||||
current_map = &arc;
|
||||
}
|
||||
|
||||
format_to!(buf, "crate scope\n");
|
||||
buf
|
||||
}
|
||||
|
||||
fn shrink_to_fit(&mut self) {
|
||||
// Exhaustive match to require handling new fields.
|
||||
let Self {
|
||||
_c: _,
|
||||
macro_use_prelude,
|
||||
diagnostics,
|
||||
modules,
|
||||
derive_helpers_in_scope,
|
||||
block: _,
|
||||
krate: _,
|
||||
prelude: _,
|
||||
data: _,
|
||||
enum_definitions,
|
||||
} = self;
|
||||
|
||||
macro_use_prelude.shrink_to_fit();
|
||||
diagnostics.shrink_to_fit();
|
||||
modules.shrink_to_fit();
|
||||
derive_helpers_in_scope.shrink_to_fit();
|
||||
enum_definitions.shrink_to_fit();
|
||||
for (_, module) in modules.iter_mut() {
|
||||
module.children.shrink_to_fit();
|
||||
module.scope.shrink_to_fit();
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a reference to the def map's diagnostics.
|
||||
pub fn diagnostics(&self) -> &[DefDiagnostic] {
|
||||
self.diagnostics.as_slice()
|
||||
}
|
||||
|
||||
pub fn recursion_limit(&self) -> u32 {
|
||||
// 128 is the default in rustc
|
||||
self.data.recursion_limit.unwrap_or(128)
|
||||
}
|
||||
}
|
||||
|
||||
impl ModuleData {
|
||||
|
@ -64,19 +64,18 @@ static FIXED_POINT_LIMIT: Limit = Limit::new(8192);
|
||||
pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeId) -> DefMap {
|
||||
let crate_graph = db.crate_graph();
|
||||
|
||||
let mut deps = FxHashMap::default();
|
||||
// populate external prelude and dependency list
|
||||
let krate = &crate_graph[def_map.krate];
|
||||
|
||||
// populate external prelude and dependency list
|
||||
let mut deps =
|
||||
FxHashMap::with_capacity_and_hasher(krate.dependencies.len(), Default::default());
|
||||
for dep in &krate.dependencies {
|
||||
tracing::debug!("crate dep {:?} -> {:?}", dep.name, dep.crate_id);
|
||||
|
||||
deps.insert(dep.as_name(), dep.clone());
|
||||
}
|
||||
|
||||
let cfg_options = &krate.cfg_options;
|
||||
|
||||
let is_proc_macro = krate.is_proc_macro;
|
||||
let proc_macros = if is_proc_macro {
|
||||
let proc_macros = if krate.is_proc_macro {
|
||||
match db.proc_macros().get(&def_map.krate) {
|
||||
Some(Ok(proc_macros)) => {
|
||||
Ok(proc_macros
|
||||
@ -124,11 +123,11 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
|
||||
indeterminate_imports: Vec::new(),
|
||||
unresolved_macros: Vec::new(),
|
||||
mod_dirs: FxHashMap::default(),
|
||||
cfg_options,
|
||||
cfg_options: &krate.cfg_options,
|
||||
proc_macros,
|
||||
from_glob_import: Default::default(),
|
||||
skip_attrs: Default::default(),
|
||||
is_proc_macro,
|
||||
is_proc_macro: krate.is_proc_macro,
|
||||
};
|
||||
if tree_id.is_block() {
|
||||
collector.seed_with_inner(tree_id);
|
||||
@ -302,71 +301,50 @@ impl DefCollector<'_> {
|
||||
return;
|
||||
}
|
||||
}
|
||||
let attr_name = match attr.path.as_ident() {
|
||||
Some(name) => name,
|
||||
None => continue,
|
||||
};
|
||||
let Some(attr_name) = attr.path.as_ident() else { continue };
|
||||
|
||||
if *attr_name == hir_expand::name![recursion_limit] {
|
||||
if let Some(limit) = attr.string_value() {
|
||||
if let Ok(limit) = limit.parse() {
|
||||
crate_data.recursion_limit = Some(limit);
|
||||
match () {
|
||||
() if *attr_name == hir_expand::name![recursion_limit] => {
|
||||
if let Some(limit) = attr.string_value() {
|
||||
if let Ok(limit) = limit.parse() {
|
||||
crate_data.recursion_limit = Some(limit);
|
||||
}
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if *attr_name == hir_expand::name![crate_type] {
|
||||
if let Some("proc-macro") = attr.string_value().map(SmolStr::as_str) {
|
||||
self.is_proc_macro = true;
|
||||
() if *attr_name == hir_expand::name![crate_type] => {
|
||||
if let Some("proc-macro") = attr.string_value().map(SmolStr::as_str) {
|
||||
self.is_proc_macro = true;
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if *attr_name == hir_expand::name![no_core] {
|
||||
crate_data.no_core = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if *attr_name == hir_expand::name![no_std] {
|
||||
crate_data.no_std = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if attr_name.as_text().as_deref() == Some("rustc_coherence_is_core") {
|
||||
crate_data.rustc_coherence_is_core = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if *attr_name == hir_expand::name![feature] {
|
||||
let features = attr
|
||||
.parse_path_comma_token_tree(self.db.upcast())
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.filter_map(|(feat, _)| match feat.segments() {
|
||||
[name] => Some(name.to_smol_str()),
|
||||
_ => None,
|
||||
});
|
||||
crate_data.unstable_features.extend(features);
|
||||
}
|
||||
|
||||
let attr_is_register_like = *attr_name == hir_expand::name![register_attr]
|
||||
|| *attr_name == hir_expand::name![register_tool];
|
||||
if !attr_is_register_like {
|
||||
continue;
|
||||
}
|
||||
|
||||
let registered_name = match attr.single_ident_value() {
|
||||
Some(ident) => ident.as_name(),
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
if *attr_name == hir_expand::name![register_attr] {
|
||||
crate_data.registered_attrs.push(registered_name.to_smol_str());
|
||||
cov_mark::hit!(register_attr);
|
||||
} else {
|
||||
crate_data.registered_tools.push(registered_name.to_smol_str());
|
||||
cov_mark::hit!(register_tool);
|
||||
() if *attr_name == hir_expand::name![no_core] => crate_data.no_core = true,
|
||||
() if *attr_name == hir_expand::name![no_std] => crate_data.no_std = true,
|
||||
() if attr_name.as_text().as_deref() == Some("rustc_coherence_is_core") => {
|
||||
crate_data.rustc_coherence_is_core = true;
|
||||
}
|
||||
() if *attr_name == hir_expand::name![feature] => {
|
||||
let features = attr
|
||||
.parse_path_comma_token_tree(self.db.upcast())
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.filter_map(|(feat, _)| match feat.segments() {
|
||||
[name] => Some(name.to_smol_str()),
|
||||
_ => None,
|
||||
});
|
||||
crate_data.unstable_features.extend(features);
|
||||
}
|
||||
() if *attr_name == hir_expand::name![register_attr] => {
|
||||
if let Some(ident) = attr.single_ident_value() {
|
||||
crate_data.registered_attrs.push(ident.text.clone());
|
||||
cov_mark::hit!(register_attr);
|
||||
}
|
||||
}
|
||||
() if *attr_name == hir_expand::name![register_tool] => {
|
||||
if let Some(ident) = attr.single_ident_value() {
|
||||
crate_data.registered_tools.push(ident.text.clone());
|
||||
cov_mark::hit!(register_tool);
|
||||
}
|
||||
}
|
||||
() => (),
|
||||
}
|
||||
}
|
||||
|
||||
@ -409,6 +387,7 @@ impl DefCollector<'_> {
|
||||
// main name resolution fixed-point loop.
|
||||
let mut i = 0;
|
||||
'resolve_attr: loop {
|
||||
let _p = tracing::span!(tracing::Level::INFO, "resolve_macros loop").entered();
|
||||
'resolve_macros: loop {
|
||||
self.db.unwind_if_cancelled();
|
||||
|
||||
@ -466,9 +445,8 @@ impl DefCollector<'_> {
|
||||
// Additionally, while the proc macro entry points must be `pub`, they are not publicly
|
||||
// exported in type/value namespace. This function reduces the visibility of all items
|
||||
// in the crate root that aren't proc macros.
|
||||
let root = DefMap::ROOT;
|
||||
let module_id = self.def_map.module_id(root);
|
||||
let root = &mut self.def_map.modules[root];
|
||||
let module_id = self.def_map.module_id(DefMap::ROOT);
|
||||
let root = &mut self.def_map.modules[DefMap::ROOT];
|
||||
root.scope.censor_non_proc_macros(module_id);
|
||||
}
|
||||
}
|
||||
@ -828,12 +806,10 @@ impl DefCollector<'_> {
|
||||
return PartialResolvedImport::Unresolved;
|
||||
}
|
||||
|
||||
if let Some(krate) = res.krate {
|
||||
if krate != self.def_map.krate {
|
||||
return PartialResolvedImport::Resolved(
|
||||
def.filter_visibility(|v| matches!(v, Visibility::Public)),
|
||||
);
|
||||
}
|
||||
if res.from_differing_crate {
|
||||
return PartialResolvedImport::Resolved(
|
||||
def.filter_visibility(|v| matches!(v, Visibility::Public)),
|
||||
);
|
||||
}
|
||||
|
||||
// Check whether all namespaces are resolved.
|
||||
@ -1408,7 +1384,9 @@ impl DefCollector<'_> {
|
||||
// First, fetch the raw expansion result for purposes of error reporting. This goes through
|
||||
// `parse_macro_expansion_error` to avoid depending on the full expansion result (to improve
|
||||
// incrementality).
|
||||
let ExpandResult { value, err } = self.db.parse_macro_expansion_error(macro_call_id);
|
||||
// FIXME: This kind of error fetching feels a bit odd?
|
||||
let ExpandResult { value: errors, err } =
|
||||
self.db.parse_macro_expansion_error(macro_call_id);
|
||||
if let Some(err) = err {
|
||||
let loc: MacroCallLoc = self.db.lookup_intern_macro_call(macro_call_id);
|
||||
let diag = match err {
|
||||
@ -1422,7 +1400,7 @@ impl DefCollector<'_> {
|
||||
|
||||
self.def_map.diagnostics.push(diag);
|
||||
}
|
||||
if let errors @ [_, ..] = &*value {
|
||||
if !errors.is_empty() {
|
||||
let loc: MacroCallLoc = self.db.lookup_intern_macro_call(macro_call_id);
|
||||
let diag = DefDiagnostic::macro_expansion_parse_error(module_id, loc.kind, errors);
|
||||
self.def_map.diagnostics.push(diag);
|
||||
@ -1920,7 +1898,7 @@ impl ModCollector<'_, '_> {
|
||||
}
|
||||
|
||||
fn collect_module(&mut self, module_id: FileItemTreeId<Mod>, attrs: &Attrs) {
|
||||
let path_attr = attrs.by_key("path").string_value();
|
||||
let path_attr = attrs.by_key("path").string_value().map(SmolStr::as_str);
|
||||
let is_macro_use = attrs.by_key("macro_use").exists();
|
||||
let module = &self.item_tree[module_id];
|
||||
match &module.kind {
|
||||
@ -1934,25 +1912,26 @@ impl ModCollector<'_, '_> {
|
||||
module_id,
|
||||
);
|
||||
|
||||
if let Some(mod_dir) = self.mod_dir.descend_into_definition(&module.name, path_attr)
|
||||
{
|
||||
ModCollector {
|
||||
def_collector: &mut *self.def_collector,
|
||||
macro_depth: self.macro_depth,
|
||||
module_id,
|
||||
tree_id: self.tree_id,
|
||||
item_tree: self.item_tree,
|
||||
mod_dir,
|
||||
}
|
||||
.collect_in_top_module(items);
|
||||
if is_macro_use {
|
||||
self.import_all_legacy_macros(module_id);
|
||||
}
|
||||
let Some(mod_dir) = self.mod_dir.descend_into_definition(&module.name, path_attr)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
ModCollector {
|
||||
def_collector: &mut *self.def_collector,
|
||||
macro_depth: self.macro_depth,
|
||||
module_id,
|
||||
tree_id: self.tree_id,
|
||||
item_tree: self.item_tree,
|
||||
mod_dir,
|
||||
}
|
||||
.collect_in_top_module(items);
|
||||
if is_macro_use {
|
||||
self.import_all_legacy_macros(module_id);
|
||||
}
|
||||
}
|
||||
// out of line module, resolve, parse and recurse
|
||||
ModKind::Outline => {
|
||||
let ast_id = AstId::new(self.tree_id.file_id(), module.ast_id);
|
||||
let ast_id = AstId::new(self.file_id(), module.ast_id);
|
||||
let db = self.def_collector.db;
|
||||
match self.mod_dir.resolve_declaration(db, self.file_id(), &module.name, path_attr)
|
||||
{
|
||||
@ -2445,7 +2424,7 @@ mod tests {
|
||||
use base_db::SourceDatabase;
|
||||
use test_fixture::WithFixture;
|
||||
|
||||
use crate::test_db::TestDB;
|
||||
use crate::{nameres::DefMapCrateData, test_db::TestDB};
|
||||
|
||||
use super::*;
|
||||
|
||||
@ -2476,8 +2455,12 @@ mod tests {
|
||||
|
||||
let edition = db.crate_graph()[krate].edition;
|
||||
let module_origin = ModuleOrigin::CrateRoot { definition: file_id };
|
||||
let def_map =
|
||||
DefMap::empty(krate, edition, ModuleData::new(module_origin, Visibility::Public));
|
||||
let def_map = DefMap::empty(
|
||||
krate,
|
||||
Arc::new(DefMapCrateData::new(edition)),
|
||||
ModuleData::new(module_origin, Visibility::Public),
|
||||
None,
|
||||
);
|
||||
do_collect_defs(&db, def_map)
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
//! Diagnostics emitted during DefMap construction.
|
||||
|
||||
use std::ops::Not;
|
||||
|
||||
use base_db::CrateId;
|
||||
use cfg::{CfgExpr, CfgOptions};
|
||||
use hir_expand::{attrs::AttrId, ErasedAstId, MacroCallKind};
|
||||
@ -16,27 +18,16 @@ use crate::{
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum DefDiagnosticKind {
|
||||
UnresolvedModule { ast: AstId<ast::Module>, candidates: Box<[String]> },
|
||||
|
||||
UnresolvedExternCrate { ast: AstId<ast::ExternCrate> },
|
||||
|
||||
UnresolvedImport { id: ItemTreeId<item_tree::Use>, index: Idx<ast::UseTree> },
|
||||
|
||||
UnconfiguredCode { ast: ErasedAstId, cfg: CfgExpr, opts: CfgOptions },
|
||||
|
||||
UnresolvedProcMacro { ast: MacroCallKind, krate: CrateId },
|
||||
|
||||
UnresolvedMacroCall { ast: MacroCallKind, path: ModPath },
|
||||
|
||||
MacroError { ast: MacroCallKind, message: String },
|
||||
|
||||
MacroExpansionParseError { ast: MacroCallKind, errors: Box<[SyntaxError]> },
|
||||
|
||||
UnimplementedBuiltinMacro { ast: AstId<ast::Macro> },
|
||||
|
||||
InvalidDeriveTarget { ast: AstId<ast::Item>, id: usize },
|
||||
|
||||
MalformedDerive { ast: AstId<ast::Adt>, id: usize },
|
||||
|
||||
MacroDefError { ast: AstId<ast::Macro>, message: String },
|
||||
}
|
||||
|
||||
@ -45,11 +36,12 @@ pub struct DefDiagnostics(Option<triomphe::Arc<Box<[DefDiagnostic]>>>);
|
||||
|
||||
impl DefDiagnostics {
|
||||
pub fn new(diagnostics: Vec<DefDiagnostic>) -> Self {
|
||||
Self(if diagnostics.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(triomphe::Arc::new(diagnostics.into_boxed_slice()))
|
||||
})
|
||||
Self(
|
||||
diagnostics
|
||||
.is_empty()
|
||||
.not()
|
||||
.then(|| triomphe::Arc::new(diagnostics.into_boxed_slice())),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = &DefDiagnostic> {
|
||||
@ -125,14 +117,11 @@ impl DefDiagnostic {
|
||||
pub(crate) fn macro_expansion_parse_error(
|
||||
container: LocalModuleId,
|
||||
ast: MacroCallKind,
|
||||
errors: &[SyntaxError],
|
||||
errors: Box<[SyntaxError]>,
|
||||
) -> Self {
|
||||
Self {
|
||||
in_module: container,
|
||||
kind: DefDiagnosticKind::MacroExpansionParseError {
|
||||
ast,
|
||||
errors: errors.to_vec().into_boxed_slice(),
|
||||
},
|
||||
kind: DefDiagnosticKind::MacroExpansionParseError { ast, errors },
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3,7 +3,6 @@ use arrayvec::ArrayVec;
|
||||
use base_db::{AnchoredPath, FileId};
|
||||
use hir_expand::{name::Name, HirFileIdExt, MacroFileIdExt};
|
||||
use limit::Limit;
|
||||
use syntax::SmolStr;
|
||||
|
||||
use crate::{db::DefDatabase, HirFileId};
|
||||
|
||||
@ -29,9 +28,9 @@ impl ModDir {
|
||||
pub(super) fn descend_into_definition(
|
||||
&self,
|
||||
name: &Name,
|
||||
attr_path: Option<&SmolStr>,
|
||||
attr_path: Option<&str>,
|
||||
) -> Option<ModDir> {
|
||||
let path = match attr_path.map(SmolStr::as_str) {
|
||||
let path = match attr_path {
|
||||
None => {
|
||||
let mut path = self.dir_path.clone();
|
||||
path.push(&name.unescaped().to_smol_str());
|
||||
@ -63,10 +62,9 @@ impl ModDir {
|
||||
db: &dyn DefDatabase,
|
||||
file_id: HirFileId,
|
||||
name: &Name,
|
||||
attr_path: Option<&SmolStr>,
|
||||
attr_path: Option<&str>,
|
||||
) -> Result<(FileId, bool, ModDir), Box<[String]>> {
|
||||
let name = name.unescaped();
|
||||
let orig_file_id = file_id.original_file_respecting_includes(db.upcast());
|
||||
|
||||
let mut candidate_files = ArrayVec::<_, 2>::new();
|
||||
match attr_path {
|
||||
@ -91,17 +89,19 @@ impl ModDir {
|
||||
}
|
||||
};
|
||||
|
||||
let orig_file_id = file_id.original_file_respecting_includes(db.upcast());
|
||||
for candidate in candidate_files.iter() {
|
||||
let path = AnchoredPath { anchor: orig_file_id, path: candidate.as_str() };
|
||||
if let Some(file_id) = db.resolve_path(path) {
|
||||
let is_mod_rs = candidate.ends_with("/mod.rs");
|
||||
|
||||
let (dir_path, root_non_dir_owner) = if is_mod_rs || attr_path.is_some() {
|
||||
(DirPath::empty(), false)
|
||||
let root_dir_owner = is_mod_rs || attr_path.is_some();
|
||||
let dir_path = if root_dir_owner {
|
||||
DirPath::empty()
|
||||
} else {
|
||||
(DirPath::new(format!("{}/", name.display(db.upcast()))), true)
|
||||
DirPath::new(format!("{}/", name.display(db.upcast())))
|
||||
};
|
||||
if let Some(mod_dir) = self.child(dir_path, root_non_dir_owner) {
|
||||
if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) {
|
||||
return Ok((file_id, is_mod_rs, mod_dir));
|
||||
}
|
||||
}
|
||||
|
@ -22,7 +22,7 @@ use crate::{
|
||||
path::{ModPath, PathKind},
|
||||
per_ns::PerNs,
|
||||
visibility::{RawVisibility, Visibility},
|
||||
AdtId, CrateId, LocalModuleId, ModuleDefId,
|
||||
AdtId, LocalModuleId, ModuleDefId,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
@ -42,21 +42,21 @@ pub(super) struct ResolvePathResult {
|
||||
pub(super) resolved_def: PerNs,
|
||||
pub(super) segment_index: Option<usize>,
|
||||
pub(super) reached_fixedpoint: ReachedFixedPoint,
|
||||
pub(super) krate: Option<CrateId>,
|
||||
pub(super) from_differing_crate: bool,
|
||||
}
|
||||
|
||||
impl ResolvePathResult {
|
||||
fn empty(reached_fixedpoint: ReachedFixedPoint) -> ResolvePathResult {
|
||||
ResolvePathResult::with(PerNs::none(), reached_fixedpoint, None, None)
|
||||
ResolvePathResult::new(PerNs::none(), reached_fixedpoint, None, false)
|
||||
}
|
||||
|
||||
fn with(
|
||||
fn new(
|
||||
resolved_def: PerNs,
|
||||
reached_fixedpoint: ReachedFixedPoint,
|
||||
segment_index: Option<usize>,
|
||||
krate: Option<CrateId>,
|
||||
from_differing_crate: bool,
|
||||
) -> ResolvePathResult {
|
||||
ResolvePathResult { resolved_def, segment_index, reached_fixedpoint, krate }
|
||||
ResolvePathResult { resolved_def, segment_index, reached_fixedpoint, from_differing_crate }
|
||||
}
|
||||
}
|
||||
|
||||
@ -134,7 +134,19 @@ impl DefMap {
|
||||
// resolving them to. Pass `None` otherwise, e.g. when we're resolving import paths.
|
||||
expected_macro_subns: Option<MacroSubNs>,
|
||||
) -> ResolvePathResult {
|
||||
let mut result = ResolvePathResult::empty(ReachedFixedPoint::No);
|
||||
let mut result = self.resolve_path_fp_with_macro_single(
|
||||
db,
|
||||
mode,
|
||||
original_module,
|
||||
path,
|
||||
shadow,
|
||||
expected_macro_subns,
|
||||
);
|
||||
|
||||
if self.block.is_none() {
|
||||
// If we're in the root `DefMap`, we can resolve the path directly.
|
||||
return result;
|
||||
}
|
||||
|
||||
let mut arc;
|
||||
let mut current_map = self;
|
||||
@ -153,8 +165,7 @@ impl DefMap {
|
||||
if result.reached_fixedpoint == ReachedFixedPoint::No {
|
||||
result.reached_fixedpoint = new.reached_fixedpoint;
|
||||
}
|
||||
// FIXME: this doesn't seem right; what if the different namespace resolutions come from different crates?
|
||||
result.krate = result.krate.or(new.krate);
|
||||
result.from_differing_crate |= new.from_differing_crate;
|
||||
result.segment_index = match (result.segment_index, new.segment_index) {
|
||||
(Some(idx), None) => Some(idx),
|
||||
(Some(old), Some(new)) => Some(old.max(new)),
|
||||
@ -333,11 +344,11 @@ impl DefMap {
|
||||
// expectation is discarded.
|
||||
let (def, s) =
|
||||
defp_map.resolve_path(db, module.local_id, &path, shadow, None);
|
||||
return ResolvePathResult::with(
|
||||
return ResolvePathResult::new(
|
||||
def,
|
||||
ReachedFixedPoint::Yes,
|
||||
s.map(|s| s + i),
|
||||
Some(module.krate),
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
@ -385,11 +396,11 @@ impl DefMap {
|
||||
match res {
|
||||
Some(res) => res,
|
||||
None => {
|
||||
return ResolvePathResult::with(
|
||||
return ResolvePathResult::new(
|
||||
PerNs::types(e.into(), vis, imp),
|
||||
ReachedFixedPoint::Yes,
|
||||
Some(i),
|
||||
Some(self.krate),
|
||||
false,
|
||||
)
|
||||
}
|
||||
}
|
||||
@ -403,11 +414,11 @@ impl DefMap {
|
||||
curr,
|
||||
);
|
||||
|
||||
return ResolvePathResult::with(
|
||||
return ResolvePathResult::new(
|
||||
PerNs::types(s, vis, imp),
|
||||
ReachedFixedPoint::Yes,
|
||||
Some(i),
|
||||
Some(self.krate),
|
||||
false,
|
||||
);
|
||||
}
|
||||
};
|
||||
@ -416,7 +427,7 @@ impl DefMap {
|
||||
.filter_visibility(|vis| vis.is_visible_from_def_map(db, self, original_module));
|
||||
}
|
||||
|
||||
ResolvePathResult::with(curr_per_ns, ReachedFixedPoint::Yes, None, Some(self.krate))
|
||||
ResolvePathResult::new(curr_per_ns, ReachedFixedPoint::Yes, None, false)
|
||||
}
|
||||
|
||||
fn resolve_name_in_module(
|
||||
|
@ -2,8 +2,8 @@
|
||||
|
||||
use std::iter;
|
||||
|
||||
use hir_expand::{span_map::SpanMapRef, InFile};
|
||||
use la_arena::ArenaMap;
|
||||
use span::SyntaxContextId;
|
||||
use syntax::ast;
|
||||
use triomphe::Arc;
|
||||
|
||||
@ -34,36 +34,25 @@ impl RawVisibility {
|
||||
}
|
||||
|
||||
pub(crate) fn from_ast(
|
||||
db: &dyn DefDatabase,
|
||||
node: InFile<Option<ast::Visibility>>,
|
||||
) -> RawVisibility {
|
||||
let node = match node.transpose() {
|
||||
None => return RawVisibility::private(),
|
||||
Some(node) => node,
|
||||
};
|
||||
Self::from_ast_with_span_map(db, node.value, db.span_map(node.file_id).as_ref())
|
||||
}
|
||||
|
||||
pub(crate) fn from_opt_ast_with_span_map(
|
||||
db: &dyn DefDatabase,
|
||||
node: Option<ast::Visibility>,
|
||||
span_map: SpanMapRef<'_>,
|
||||
span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContextId,
|
||||
) -> RawVisibility {
|
||||
let node = match node {
|
||||
None => return RawVisibility::private(),
|
||||
Some(node) => node,
|
||||
};
|
||||
Self::from_ast_with_span_map(db, node, span_map)
|
||||
Self::from_ast_with_span_map(db, node, span_for_range)
|
||||
}
|
||||
|
||||
fn from_ast_with_span_map(
|
||||
db: &dyn DefDatabase,
|
||||
node: ast::Visibility,
|
||||
span_map: SpanMapRef<'_>,
|
||||
span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContextId,
|
||||
) -> RawVisibility {
|
||||
let path = match node.kind() {
|
||||
ast::VisibilityKind::In(path) => {
|
||||
let path = ModPath::from_src(db.upcast(), path, span_map);
|
||||
let path = ModPath::from_src(db.upcast(), path, span_for_range);
|
||||
match path {
|
||||
None => return RawVisibility::private(),
|
||||
Some(path) => path,
|
||||
|
@ -28,7 +28,6 @@ intern.workspace = true
|
||||
base-db.workspace = true
|
||||
cfg.workspace = true
|
||||
syntax.workspace = true
|
||||
profile.workspace = true
|
||||
tt.workspace = true
|
||||
mbe.workspace = true
|
||||
limit.workspace = true
|
||||
@ -38,4 +37,4 @@ span.workspace = true
|
||||
expect-test = "1.4.0"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
workspace = true
|
||||
|
@ -90,7 +90,7 @@ impl RawAttrs {
|
||||
}
|
||||
|
||||
/// Processes `cfg_attr`s, returning the resulting semantic `Attrs`.
|
||||
// FIXME: This should return a different type
|
||||
// FIXME: This should return a different type, signaling it was filtered?
|
||||
pub fn filter(self, db: &dyn ExpandDatabase, krate: CrateId) -> RawAttrs {
|
||||
let has_cfg_attrs = self
|
||||
.iter()
|
||||
@ -201,7 +201,9 @@ impl Attr {
|
||||
span_map: SpanMapRef<'_>,
|
||||
id: AttrId,
|
||||
) -> Option<Attr> {
|
||||
let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?);
|
||||
let path = Interned::new(ModPath::from_src(db, ast.path()?, &mut |range| {
|
||||
span_map.span_for_range(range).ctx
|
||||
})?);
|
||||
let span = span_map.span_for_range(ast.syntax().text_range());
|
||||
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
|
||||
let value = match lit.kind() {
|
||||
|
@ -4,23 +4,17 @@ use span::{MacroCallId, Span};
|
||||
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallKind};
|
||||
|
||||
macro_rules! register_builtin {
|
||||
($expand_fn:ident: $(($name:ident, $variant:ident) => $expand:ident),* ) => {
|
||||
($(($name:ident, $variant:ident) => $expand:ident),* ) => {
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum BuiltinAttrExpander {
|
||||
$($variant),*
|
||||
}
|
||||
|
||||
impl BuiltinAttrExpander {
|
||||
pub fn $expand_fn(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let expander = match *self {
|
||||
pub fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||
match *self {
|
||||
$( BuiltinAttrExpander::$variant => $expand, )*
|
||||
};
|
||||
expander(db, id, tt)
|
||||
}
|
||||
}
|
||||
|
||||
fn find_by_name(name: &name::Name) -> Option<Self> {
|
||||
@ -35,6 +29,15 @@ macro_rules! register_builtin {
|
||||
}
|
||||
|
||||
impl BuiltinAttrExpander {
|
||||
pub fn expand(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
self.expander()(db, id, tt)
|
||||
}
|
||||
|
||||
pub fn is_derive(self) -> bool {
|
||||
matches!(self, BuiltinAttrExpander::Derive | BuiltinAttrExpander::DeriveConst)
|
||||
}
|
||||
@ -46,7 +49,7 @@ impl BuiltinAttrExpander {
|
||||
}
|
||||
}
|
||||
|
||||
register_builtin! { expand:
|
||||
register_builtin! {
|
||||
(bench, Bench) => dummy_attr_expand,
|
||||
(cfg, Cfg) => dummy_attr_expand,
|
||||
(cfg_attr, CfgAttr) => dummy_attr_expand,
|
||||
|
@ -10,10 +10,12 @@ use crate::{
|
||||
hygiene::span_with_def_site_ctxt,
|
||||
name::{AsName, Name},
|
||||
quote::dollar_crate,
|
||||
span_map::SpanMapRef,
|
||||
span_map::ExpansionSpanMap,
|
||||
tt,
|
||||
};
|
||||
use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds};
|
||||
use syntax::ast::{
|
||||
self, AstNode, FieldList, HasAttrs, HasGenericParams, HasModuleItem, HasName, HasTypeBounds,
|
||||
};
|
||||
|
||||
use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult};
|
||||
|
||||
@ -25,20 +27,10 @@ macro_rules! register_builtin {
|
||||
}
|
||||
|
||||
impl BuiltinDeriveExpander {
|
||||
pub fn expand(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &ast::Adt,
|
||||
token_map: SpanMapRef<'_>,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let expander = match *self {
|
||||
pub fn expander(&self) -> fn(Span, &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||
match *self {
|
||||
$( BuiltinDeriveExpander::$trait => $expand, )*
|
||||
};
|
||||
|
||||
let span = db.lookup_intern_macro_call(id).call_site;
|
||||
let span = span_with_def_site_ctxt(db, span, id);
|
||||
expander(span, tt, token_map)
|
||||
}
|
||||
}
|
||||
|
||||
fn find_by_name(name: &name::Name) -> Option<Self> {
|
||||
@ -52,6 +44,19 @@ macro_rules! register_builtin {
|
||||
};
|
||||
}
|
||||
|
||||
impl BuiltinDeriveExpander {
|
||||
pub fn expand(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let span = db.lookup_intern_macro_call(id).call_site;
|
||||
let span = span_with_def_site_ctxt(db, span, id);
|
||||
self.expander()(span, tt)
|
||||
}
|
||||
}
|
||||
|
||||
register_builtin! {
|
||||
Copy => copy_expand,
|
||||
Clone => clone_expand,
|
||||
@ -122,7 +127,7 @@ impl VariantShape {
|
||||
}
|
||||
}
|
||||
|
||||
fn from(tm: SpanMapRef<'_>, value: Option<FieldList>) -> Result<Self, ExpandError> {
|
||||
fn from(tm: &ExpansionSpanMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
|
||||
let r = match value {
|
||||
None => VariantShape::Unit,
|
||||
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
|
||||
@ -198,11 +203,13 @@ struct BasicAdtInfo {
|
||||
associated_types: Vec<tt::Subtree>,
|
||||
}
|
||||
|
||||
fn parse_adt(
|
||||
tm: SpanMapRef<'_>,
|
||||
adt: &ast::Adt,
|
||||
call_site: Span,
|
||||
) -> Result<BasicAdtInfo, ExpandError> {
|
||||
fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandError> {
|
||||
let (parsed, tm) = &mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems);
|
||||
let macro_items = ast::MacroItems::cast(parsed.syntax_node())
|
||||
.ok_or_else(|| ExpandError::other("invalid item definition"))?;
|
||||
let item = macro_items.items().next().ok_or_else(|| ExpandError::other("no item found"))?;
|
||||
let adt = &ast::Adt::cast(item.syntax().clone())
|
||||
.ok_or_else(|| ExpandError::other("expected struct, enum or union"))?;
|
||||
let (name, generic_param_list, where_clause, shape) = match adt {
|
||||
ast::Adt::Struct(it) => (
|
||||
it.name(),
|
||||
@ -318,14 +325,14 @@ fn parse_adt(
|
||||
}
|
||||
|
||||
fn name_to_token(
|
||||
token_map: SpanMapRef<'_>,
|
||||
token_map: &ExpansionSpanMap,
|
||||
name: Option<ast::Name>,
|
||||
) -> Result<tt::Ident, ExpandError> {
|
||||
let name = name.ok_or_else(|| {
|
||||
debug!("parsed item has no name");
|
||||
ExpandError::other("missing name")
|
||||
})?;
|
||||
let span = token_map.span_for_range(name.syntax().text_range());
|
||||
let span = token_map.span_at(name.syntax().text_range().start());
|
||||
let name_token = tt::Ident { span, text: name.text().into() };
|
||||
Ok(name_token)
|
||||
}
|
||||
@ -362,14 +369,12 @@ fn name_to_token(
|
||||
/// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and
|
||||
/// therefore does not get bound by the derived trait.
|
||||
fn expand_simple_derive(
|
||||
// FIXME: use
|
||||
invoc_span: Span,
|
||||
tt: &ast::Adt,
|
||||
tm: SpanMapRef<'_>,
|
||||
tt: &tt::Subtree,
|
||||
trait_path: tt::Subtree,
|
||||
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let info = match parse_adt(tm, tt, invoc_span) {
|
||||
let info = match parse_adt(tt, invoc_span) {
|
||||
Ok(info) => info,
|
||||
Err(e) => {
|
||||
return ExpandResult::new(
|
||||
@ -412,14 +417,14 @@ fn expand_simple_derive(
|
||||
ExpandResult::ok(expanded)
|
||||
}
|
||||
|
||||
fn copy_expand(span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>) -> ExpandResult<tt::Subtree> {
|
||||
fn copy_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||
let krate = dollar_crate(span);
|
||||
expand_simple_derive(span, tt, tm, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
|
||||
expand_simple_derive(span, tt, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
|
||||
}
|
||||
|
||||
fn clone_expand(span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>) -> ExpandResult<tt::Subtree> {
|
||||
fn clone_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||
let krate = dollar_crate(span);
|
||||
expand_simple_derive(span, tt, tm, quote! {span => #krate::clone::Clone }, |adt| {
|
||||
expand_simple_derive(span, tt, quote! {span => #krate::clone::Clone }, |adt| {
|
||||
if matches!(adt.shape, AdtShape::Union) {
|
||||
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
|
||||
return quote! {span =>
|
||||
@ -468,9 +473,9 @@ fn and_and(span: Span) -> tt::Subtree {
|
||||
quote! {span => #and& }
|
||||
}
|
||||
|
||||
fn default_expand(span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>) -> ExpandResult<tt::Subtree> {
|
||||
fn default_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||
let krate = &dollar_crate(span);
|
||||
expand_simple_derive(span, tt, tm, quote! {span => #krate::default::Default }, |adt| {
|
||||
expand_simple_derive(span, tt, quote! {span => #krate::default::Default }, |adt| {
|
||||
let body = match &adt.shape {
|
||||
AdtShape::Struct(fields) => {
|
||||
let name = &adt.name;
|
||||
@ -507,9 +512,9 @@ fn default_expand(span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>) -> ExpandResult
|
||||
})
|
||||
}
|
||||
|
||||
fn debug_expand(span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>) -> ExpandResult<tt::Subtree> {
|
||||
fn debug_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||
let krate = &dollar_crate(span);
|
||||
expand_simple_derive(span, tt, tm, quote! {span => #krate::fmt::Debug }, |adt| {
|
||||
expand_simple_derive(span, tt, quote! {span => #krate::fmt::Debug }, |adt| {
|
||||
let for_variant = |name: String, v: &VariantShape| match v {
|
||||
VariantShape::Struct(fields) => {
|
||||
let for_fields = fields.iter().map(|it| {
|
||||
@ -579,9 +584,9 @@ fn debug_expand(span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>) -> ExpandResult<t
|
||||
})
|
||||
}
|
||||
|
||||
fn hash_expand(span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>) -> ExpandResult<tt::Subtree> {
|
||||
fn hash_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||
let krate = &dollar_crate(span);
|
||||
expand_simple_derive(span, tt, tm, quote! {span => #krate::hash::Hash }, |adt| {
|
||||
expand_simple_derive(span, tt, quote! {span => #krate::hash::Hash }, |adt| {
|
||||
if matches!(adt.shape, AdtShape::Union) {
|
||||
// FIXME: Return expand error here
|
||||
return quote! {span =>};
|
||||
@ -626,14 +631,14 @@ fn hash_expand(span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>) -> ExpandResult<tt
|
||||
})
|
||||
}
|
||||
|
||||
fn eq_expand(span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>) -> ExpandResult<tt::Subtree> {
|
||||
fn eq_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||
let krate = dollar_crate(span);
|
||||
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
|
||||
expand_simple_derive(span, tt, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
|
||||
}
|
||||
|
||||
fn partial_eq_expand(span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>) -> ExpandResult<tt::Subtree> {
|
||||
fn partial_eq_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||
let krate = dollar_crate(span);
|
||||
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialEq }, |adt| {
|
||||
expand_simple_derive(span, tt, quote! {span => #krate::cmp::PartialEq }, |adt| {
|
||||
if matches!(adt.shape, AdtShape::Union) {
|
||||
// FIXME: Return expand error here
|
||||
return quote! {span =>};
|
||||
@ -703,9 +708,9 @@ fn self_and_other_patterns(
|
||||
(self_patterns, other_patterns)
|
||||
}
|
||||
|
||||
fn ord_expand(span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>) -> ExpandResult<tt::Subtree> {
|
||||
fn ord_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||
let krate = &dollar_crate(span);
|
||||
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Ord }, |adt| {
|
||||
expand_simple_derive(span, tt, quote! {span => #krate::cmp::Ord }, |adt| {
|
||||
fn compare(
|
||||
krate: &tt::Ident,
|
||||
left: tt::Subtree,
|
||||
@ -761,9 +766,9 @@ fn ord_expand(span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>) -> ExpandResult<tt:
|
||||
})
|
||||
}
|
||||
|
||||
fn partial_ord_expand(span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>) -> ExpandResult<tt::Subtree> {
|
||||
fn partial_ord_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||
let krate = &dollar_crate(span);
|
||||
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialOrd }, |adt| {
|
||||
expand_simple_derive(span, tt, quote! {span => #krate::cmp::PartialOrd }, |adt| {
|
||||
fn compare(
|
||||
krate: &tt::Ident,
|
||||
left: tt::Subtree,
|
||||
|
@ -31,36 +31,18 @@ macro_rules! register_builtin {
|
||||
}
|
||||
|
||||
impl BuiltinFnLikeExpander {
|
||||
pub fn expand(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let expander = match *self {
|
||||
pub fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::Subtree, Span) -> ExpandResult<tt::Subtree> {
|
||||
match *self {
|
||||
$( BuiltinFnLikeExpander::$kind => $expand, )*
|
||||
};
|
||||
|
||||
let span = db.lookup_intern_macro_call(id).call_site;
|
||||
let span = span_with_def_site_ctxt(db, span, id);
|
||||
expander(db, id, tt, span)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EagerExpander {
|
||||
pub fn expand(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let expander = match *self {
|
||||
pub fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::Subtree, Span) -> ExpandResult<tt::Subtree> {
|
||||
match *self {
|
||||
$( EagerExpander::$e_kind => $e_expand, )*
|
||||
};
|
||||
|
||||
let span = db.lookup_intern_macro_call(id).call_site;
|
||||
let span = span_with_def_site_ctxt(db, span, id);
|
||||
expander(db, id, tt, span)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -74,7 +56,31 @@ macro_rules! register_builtin {
|
||||
};
|
||||
}
|
||||
|
||||
impl BuiltinFnLikeExpander {
|
||||
pub fn expand(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let span = db.lookup_intern_macro_call(id).call_site;
|
||||
let span = span_with_def_site_ctxt(db, span, id);
|
||||
self.expander()(db, id, tt, span)
|
||||
}
|
||||
}
|
||||
|
||||
impl EagerExpander {
|
||||
pub fn expand(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let span = db.lookup_intern_macro_call(id).call_site;
|
||||
let span = span_with_def_site_ctxt(db, span, id);
|
||||
self.expander()(db, id, tt, span)
|
||||
}
|
||||
|
||||
pub fn is_include(&self) -> bool {
|
||||
matches!(self, EagerExpander::Include)
|
||||
}
|
||||
|
@ -11,14 +11,14 @@ use triomphe::Arc;
|
||||
use crate::{db::ExpandDatabase, proc_macro::ProcMacros};
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Change {
|
||||
pub struct ChangeWithProcMacros {
|
||||
pub source_change: FileChange,
|
||||
pub proc_macros: Option<ProcMacros>,
|
||||
pub toolchains: Option<Vec<Option<Version>>>,
|
||||
pub target_data_layouts: Option<Vec<TargetLayoutLoadResult>>,
|
||||
}
|
||||
|
||||
impl Change {
|
||||
impl ChangeWithProcMacros {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
@ -215,11 +215,6 @@ pub fn expand_speculative(
|
||||
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
|
||||
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, loc.call_site)
|
||||
}
|
||||
MacroDefKind::BuiltInDerive(expander, ..) => {
|
||||
// this cast is a bit sus, can we avoid losing the typedness here?
|
||||
let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
|
||||
expander.expand(db, actual_macro_call, &adt, span_map)
|
||||
}
|
||||
MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand_unhygienic(
|
||||
db,
|
||||
tt,
|
||||
@ -227,6 +222,9 @@ pub fn expand_speculative(
|
||||
loc.call_site,
|
||||
),
|
||||
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
|
||||
MacroDefKind::BuiltInDerive(it, ..) => {
|
||||
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
|
||||
}
|
||||
MacroDefKind::BuiltInEager(it, _) => {
|
||||
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
|
||||
}
|
||||
@ -303,7 +301,7 @@ fn parse_macro_expansion_error(
|
||||
macro_call_id: MacroCallId,
|
||||
) -> ExpandResult<Box<[SyntaxError]>> {
|
||||
db.parse_macro_expansion(MacroFileId { macro_call_id })
|
||||
.map(|it| it.0.errors().to_vec().into_boxed_slice())
|
||||
.map(|it| it.0.errors().into_boxed_slice())
|
||||
}
|
||||
|
||||
pub(crate) fn parse_with_map(
|
||||
@ -321,6 +319,7 @@ pub(crate) fn parse_with_map(
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: for derive attributes, this will return separate copies of the same structures!
|
||||
fn macro_arg(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
@ -445,7 +444,7 @@ fn macro_arg(
|
||||
|
||||
if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
|
||||
match parse.errors() {
|
||||
[] => ValueResult::ok((Arc::new(tt), undo_info)),
|
||||
errors if errors.is_empty() => ValueResult::ok((Arc::new(tt), undo_info)),
|
||||
errors => ValueResult::new(
|
||||
(Arc::new(tt), undo_info),
|
||||
// Box::<[_]>::from(res.errors()), not stable yet
|
||||
@ -526,16 +525,6 @@ fn macro_expand(
|
||||
|
||||
let ExpandResult { value: tt, mut err } = match loc.def.kind {
|
||||
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id).map(CowArc::Arc),
|
||||
MacroDefKind::BuiltInDerive(expander, ..) => {
|
||||
let (root, map) = parse_with_map(db, loc.kind.file_id());
|
||||
let root = root.syntax_node();
|
||||
let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() };
|
||||
let node = ast_id.to_ptr(db).to_node(&root);
|
||||
|
||||
// FIXME: Use censoring
|
||||
let _censor = censor_for_macro_input(&loc, node.syntax());
|
||||
expander.expand(db, macro_call_id, &node, map.as_ref())
|
||||
}
|
||||
_ => {
|
||||
let ValueResult { value: (macro_arg, undo_info), err } = db.macro_arg(macro_call_id);
|
||||
let format_parse_err = |err: Arc<Box<[SyntaxError]>>| {
|
||||
@ -569,6 +558,9 @@ fn macro_expand(
|
||||
err: err.map(format_parse_err),
|
||||
};
|
||||
}
|
||||
MacroDefKind::BuiltInDerive(it, _) => {
|
||||
it.expand(db, macro_call_id, arg).map_err(Into::into)
|
||||
}
|
||||
MacroDefKind::BuiltInEager(it, _) => {
|
||||
it.expand(db, macro_call_id, arg).map_err(Into::into)
|
||||
}
|
||||
|
@ -27,7 +27,6 @@ use crate::{
|
||||
ast::{self, AstNode},
|
||||
db::ExpandDatabase,
|
||||
mod_path::ModPath,
|
||||
span_map::SpanMapRef,
|
||||
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, Intern,
|
||||
MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
|
||||
};
|
||||
@ -155,10 +154,9 @@ fn eager_macro_recur(
|
||||
}
|
||||
};
|
||||
|
||||
let def = match call
|
||||
.path()
|
||||
.and_then(|path| ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(span_map)))
|
||||
{
|
||||
let def = match call.path().and_then(|path| {
|
||||
ModPath::from_src(db, path, &mut |range| span_map.span_at(range.start()).ctx)
|
||||
}) {
|
||||
Some(path) => match macro_resolver(path.clone()) {
|
||||
Some(def) => def,
|
||||
None => {
|
||||
|
@ -252,7 +252,7 @@ impl InFile<&SyntaxNode> {
|
||||
map_node_range_up(db, &db.expansion_span_map(file_id), self.value.text_range())?;
|
||||
|
||||
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
||||
// keep pre-token map rewrite behaviour.
|
||||
// keep pre-token map rewrite behavior.
|
||||
if !ctx.is_root() {
|
||||
return None;
|
||||
}
|
||||
|
@ -9,7 +9,6 @@ use crate::{
|
||||
db::ExpandDatabase,
|
||||
hygiene::{marks_rev, SyntaxContextExt, Transparency},
|
||||
name::{known, AsName, Name},
|
||||
span_map::SpanMapRef,
|
||||
tt,
|
||||
};
|
||||
use base_db::CrateId;
|
||||
@ -49,9 +48,9 @@ impl ModPath {
|
||||
pub fn from_src(
|
||||
db: &dyn ExpandDatabase,
|
||||
path: ast::Path,
|
||||
span_map: SpanMapRef<'_>,
|
||||
span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContextId,
|
||||
) -> Option<ModPath> {
|
||||
convert_path(db, path, span_map)
|
||||
convert_path(db, path, span_for_range)
|
||||
}
|
||||
|
||||
pub fn from_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModPath> {
|
||||
@ -144,6 +143,12 @@ impl ModPath {
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<Name> for ModPath {
|
||||
fn extend<T: IntoIterator<Item = Name>>(&mut self, iter: T) {
|
||||
self.segments.extend(iter);
|
||||
}
|
||||
}
|
||||
|
||||
struct Display<'a> {
|
||||
db: &'a dyn ExpandDatabase,
|
||||
path: &'a ModPath,
|
||||
@ -215,7 +220,7 @@ fn display_fmt_path(
|
||||
fn convert_path(
|
||||
db: &dyn ExpandDatabase,
|
||||
path: ast::Path,
|
||||
span_map: SpanMapRef<'_>,
|
||||
span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContextId,
|
||||
) -> Option<ModPath> {
|
||||
let mut segments = path.segments();
|
||||
|
||||
@ -224,12 +229,9 @@ fn convert_path(
|
||||
ast::PathSegmentKind::Name(name_ref) => {
|
||||
if name_ref.text() == "$crate" {
|
||||
ModPath::from_kind(
|
||||
resolve_crate_root(
|
||||
db,
|
||||
span_map.span_for_range(name_ref.syntax().text_range()).ctx,
|
||||
)
|
||||
.map(PathKind::DollarCrate)
|
||||
.unwrap_or(PathKind::Crate),
|
||||
resolve_crate_root(db, span_for_range(name_ref.syntax().text_range()))
|
||||
.map(PathKind::DollarCrate)
|
||||
.unwrap_or(PathKind::Crate),
|
||||
)
|
||||
} else {
|
||||
let mut res = ModPath::from_kind(
|
||||
@ -283,7 +285,7 @@ fn convert_path(
|
||||
// We follow what it did anyway :)
|
||||
if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain {
|
||||
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
|
||||
let syn_ctx = span_map.span_for_range(segment.syntax().text_range()).ctx;
|
||||
let syn_ctx = span_for_range(segment.syntax().text_range());
|
||||
if let Some(macro_call_id) = db.lookup_intern_syntax_context(syn_ctx).outer_expn {
|
||||
if db.lookup_intern_macro_call(macro_call_id).def.local_inner {
|
||||
mod_path.kind = match resolve_crate_root(db, syn_ctx) {
|
||||
|
@ -111,15 +111,11 @@ impl Name {
|
||||
self == &Name::missing()
|
||||
}
|
||||
|
||||
/// Generates a new name which is only equal to itself, by incrementing a counter. Due
|
||||
/// its implementation, it should not be used in things that salsa considers, like
|
||||
/// type names or field names, and it should be only used in names of local variables
|
||||
/// and labels and similar things.
|
||||
pub fn generate_new_name() -> Name {
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
static CNT: AtomicUsize = AtomicUsize::new(0);
|
||||
let c = CNT.fetch_add(1, Ordering::Relaxed);
|
||||
Name::new_text(format_smolstr!("<ra@gennew>{c}"))
|
||||
/// Generates a new name that attempts to be unique. Should only be used when body lowering and
|
||||
/// creating desugared locals and labels. The caller is responsible for picking an index
|
||||
/// that is stable across re-executions
|
||||
pub fn generate_new_name(idx: usize) -> Name {
|
||||
Name::new_text(format_smolstr!("<ra@gennew>{idx}"))
|
||||
}
|
||||
|
||||
/// Returns the tuple index this name represents if it is a tuple field.
|
||||
|
@ -31,11 +31,13 @@ impl mbe::SpanMapper<Span> for SpanMap {
|
||||
self.span_for_range(range)
|
||||
}
|
||||
}
|
||||
|
||||
impl mbe::SpanMapper<Span> for SpanMapRef<'_> {
|
||||
fn span_for(&self, range: TextRange) -> Span {
|
||||
self.span_for_range(range)
|
||||
}
|
||||
}
|
||||
|
||||
impl SpanMap {
|
||||
pub fn span_for_range(&self, range: TextRange) -> Span {
|
||||
match self {
|
||||
|
@ -45,7 +45,6 @@ intern.workspace = true
|
||||
hir-def.workspace = true
|
||||
hir-expand.workspace = true
|
||||
base-db.workspace = true
|
||||
profile.workspace = true
|
||||
syntax.workspace = true
|
||||
limit.workspace = true
|
||||
|
||||
|
@ -31,12 +31,8 @@ use hir_expand::name::Name;
|
||||
|
||||
#[salsa::query_group(HirDatabaseStorage)]
|
||||
pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
||||
#[salsa::invoke(infer_wait)]
|
||||
#[salsa::transparent]
|
||||
fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
|
||||
|
||||
#[salsa::invoke(crate::infer::infer_query)]
|
||||
fn infer_query(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
|
||||
fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
|
||||
|
||||
// region:mir
|
||||
|
||||
@ -258,17 +254,8 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
||||
env: Arc<TraitEnvironment>,
|
||||
) -> Ty;
|
||||
|
||||
#[salsa::invoke(trait_solve_wait)]
|
||||
#[salsa::transparent]
|
||||
fn trait_solve(
|
||||
&self,
|
||||
krate: CrateId,
|
||||
block: Option<BlockId>,
|
||||
goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
|
||||
) -> Option<crate::Solution>;
|
||||
|
||||
#[salsa::invoke(crate::traits::trait_solve_query)]
|
||||
fn trait_solve_query(
|
||||
fn trait_solve(
|
||||
&self,
|
||||
krate: CrateId,
|
||||
block: Option<BlockId>,
|
||||
@ -284,38 +271,6 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
||||
) -> chalk_ir::ProgramClauses<Interner>;
|
||||
}
|
||||
|
||||
fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
|
||||
let detail = match def {
|
||||
DefWithBodyId::FunctionId(it) => db.function_data(it).name.display(db.upcast()).to_string(),
|
||||
DefWithBodyId::StaticId(it) => {
|
||||
db.static_data(it).name.clone().display(db.upcast()).to_string()
|
||||
}
|
||||
DefWithBodyId::ConstId(it) => db
|
||||
.const_data(it)
|
||||
.name
|
||||
.clone()
|
||||
.unwrap_or_else(Name::missing)
|
||||
.display(db.upcast())
|
||||
.to_string(),
|
||||
DefWithBodyId::VariantId(it) => {
|
||||
db.enum_variant_data(it).name.display(db.upcast()).to_string()
|
||||
}
|
||||
DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"),
|
||||
};
|
||||
let _p = tracing::span!(tracing::Level::INFO, "infer:wait", ?detail).entered();
|
||||
db.infer_query(def)
|
||||
}
|
||||
|
||||
fn trait_solve_wait(
|
||||
db: &dyn HirDatabase,
|
||||
krate: CrateId,
|
||||
block: Option<BlockId>,
|
||||
goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
|
||||
) -> Option<crate::Solution> {
|
||||
let _p = tracing::span!(tracing::Level::INFO, "trait_solve::wait").entered();
|
||||
db.trait_solve_query(krate, block, goal)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hir_database_is_object_safe() {
|
||||
fn _assert_object_safe(_: &dyn HirDatabase) {}
|
||||
|
@ -60,12 +60,17 @@ pub enum BodyValidationDiagnostic {
|
||||
}
|
||||
|
||||
impl BodyValidationDiagnostic {
|
||||
pub fn collect(db: &dyn HirDatabase, owner: DefWithBodyId) -> Vec<BodyValidationDiagnostic> {
|
||||
pub fn collect(
|
||||
db: &dyn HirDatabase,
|
||||
owner: DefWithBodyId,
|
||||
validate_lints: bool,
|
||||
) -> Vec<BodyValidationDiagnostic> {
|
||||
let _p =
|
||||
tracing::span!(tracing::Level::INFO, "BodyValidationDiagnostic::collect").entered();
|
||||
let infer = db.infer(owner);
|
||||
let body = db.body(owner);
|
||||
let mut validator = ExprValidator { owner, body, infer, diagnostics: Vec::new() };
|
||||
let mut validator =
|
||||
ExprValidator { owner, body, infer, diagnostics: Vec::new(), validate_lints };
|
||||
validator.validate_body(db);
|
||||
validator.diagnostics
|
||||
}
|
||||
@ -76,6 +81,7 @@ struct ExprValidator {
|
||||
body: Arc<Body>,
|
||||
infer: Arc<InferenceResult>,
|
||||
diagnostics: Vec<BodyValidationDiagnostic>,
|
||||
validate_lints: bool,
|
||||
}
|
||||
|
||||
impl ExprValidator {
|
||||
@ -139,6 +145,9 @@ impl ExprValidator {
|
||||
expr: &Expr,
|
||||
filter_map_next_checker: &mut Option<FilterMapNextChecker>,
|
||||
) {
|
||||
if !self.validate_lints {
|
||||
return;
|
||||
}
|
||||
// Check that the number of arguments matches the number of parameters.
|
||||
|
||||
if self.infer.expr_type_mismatches().next().is_some() {
|
||||
@ -173,7 +182,7 @@ impl ExprValidator {
|
||||
db: &dyn HirDatabase,
|
||||
) {
|
||||
let scrut_ty = &self.infer[scrutinee_expr];
|
||||
if scrut_ty.is_unknown() {
|
||||
if scrut_ty.contains_unknown() {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -230,6 +239,7 @@ impl ExprValidator {
|
||||
m_arms.as_slice(),
|
||||
scrut_ty.clone(),
|
||||
ValidityConstraint::ValidOnly,
|
||||
None,
|
||||
) {
|
||||
Ok(report) => report,
|
||||
Err(()) => return,
|
||||
@ -257,6 +267,9 @@ impl ExprValidator {
|
||||
};
|
||||
let Some(initializer) = initializer else { continue };
|
||||
let ty = &self.infer[initializer];
|
||||
if ty.contains_unknown() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut have_errors = false;
|
||||
let deconstructed_pat = self.lower_pattern(&cx, pat, db, &mut have_errors);
|
||||
@ -274,6 +287,7 @@ impl ExprValidator {
|
||||
&[match_arm],
|
||||
ty.clone(),
|
||||
ValidityConstraint::ValidOnly,
|
||||
None,
|
||||
) {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
@ -308,6 +322,9 @@ impl ExprValidator {
|
||||
}
|
||||
|
||||
fn check_for_trailing_return(&mut self, body_expr: ExprId, body: &Body) {
|
||||
if !self.validate_lints {
|
||||
return;
|
||||
}
|
||||
match &body.exprs[body_expr] {
|
||||
Expr::Block { statements, tail, .. } => {
|
||||
let last_stmt = tail.or_else(|| match statements.last()? {
|
||||
@ -340,6 +357,9 @@ impl ExprValidator {
|
||||
}
|
||||
|
||||
fn check_for_unnecessary_else(&mut self, id: ExprId, expr: &Expr, db: &dyn HirDatabase) {
|
||||
if !self.validate_lints {
|
||||
return;
|
||||
}
|
||||
if let Expr::If { condition: _, then_branch, else_branch } = expr {
|
||||
if else_branch.is_none() {
|
||||
return;
|
||||
|
@ -8,7 +8,7 @@ use rustc_hash::FxHashMap;
|
||||
use rustc_pattern_analysis::{
|
||||
constructor::{Constructor, ConstructorSet, VariantVisibility},
|
||||
index::IdxContainer,
|
||||
Captures, TypeCx,
|
||||
Captures, PrivateUninhabitedField, TypeCx,
|
||||
};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use stdx::never;
|
||||
@ -88,39 +88,21 @@ impl<'p> MatchCheckCtx<'p> {
|
||||
}
|
||||
}
|
||||
|
||||
// In the cases of either a `#[non_exhaustive]` field list or a non-public field, we hide
|
||||
// uninhabited fields in order not to reveal the uninhabitedness of the whole variant.
|
||||
// This lists the fields we keep along with their types.
|
||||
fn list_variant_nonhidden_fields<'a>(
|
||||
// This lists the fields of a variant along with their types.
|
||||
fn list_variant_fields<'a>(
|
||||
&'a self,
|
||||
ty: &'a Ty,
|
||||
variant: VariantId,
|
||||
) -> impl Iterator<Item = (LocalFieldId, Ty)> + Captures<'a> + Captures<'p> {
|
||||
let cx = self;
|
||||
let (adt, substs) = ty.as_adt().unwrap();
|
||||
let (_, substs) = ty.as_adt().unwrap();
|
||||
|
||||
let adt_is_local = variant.module(cx.db.upcast()).krate() == cx.module.krate();
|
||||
let field_tys = self.db.field_types(variant);
|
||||
let fields_len = variant.variant_data(self.db.upcast()).fields().len() as u32;
|
||||
|
||||
// Whether we must not match the fields of this variant exhaustively.
|
||||
let is_non_exhaustive =
|
||||
cx.db.attrs(variant.into()).by_key("non_exhaustive").exists() && !adt_is_local;
|
||||
|
||||
let visibility = cx.db.field_visibilities(variant);
|
||||
let field_ty = cx.db.field_types(variant);
|
||||
let fields_len = variant.variant_data(cx.db.upcast()).fields().len() as u32;
|
||||
|
||||
(0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).filter_map(move |fid| {
|
||||
let ty = field_ty[fid].clone().substitute(Interner, substs);
|
||||
let ty = normalize(cx.db, cx.db.trait_environment_for_body(cx.body), ty);
|
||||
let is_visible = matches!(adt, hir_def::AdtId::EnumId(..))
|
||||
|| visibility[fid].is_visible_from(cx.db.upcast(), cx.module);
|
||||
let is_uninhabited = cx.is_uninhabited(&ty);
|
||||
|
||||
if is_uninhabited && (!is_visible || is_non_exhaustive) {
|
||||
None
|
||||
} else {
|
||||
Some((fid, ty))
|
||||
}
|
||||
(0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).map(move |fid| {
|
||||
let ty = field_tys[fid].clone().substitute(Interner, substs);
|
||||
let ty = normalize(self.db, self.db.trait_environment_for_body(self.body), ty);
|
||||
(fid, ty)
|
||||
})
|
||||
}
|
||||
|
||||
@ -199,23 +181,16 @@ impl<'p> MatchCheckCtx<'p> {
|
||||
}
|
||||
};
|
||||
let variant = Self::variant_id_for_adt(&ctor, adt.0).unwrap();
|
||||
let fields_len = variant.variant_data(self.db.upcast()).fields().len();
|
||||
// For each field in the variant, we store the relevant index into `self.fields` if any.
|
||||
let mut field_id_to_id: Vec<Option<usize>> = vec![None; fields_len];
|
||||
let tys = self
|
||||
.list_variant_nonhidden_fields(&pat.ty, variant)
|
||||
.enumerate()
|
||||
.map(|(i, (fid, ty))| {
|
||||
let field_idx: u32 = fid.into_raw().into();
|
||||
field_id_to_id[field_idx as usize] = Some(i);
|
||||
ty
|
||||
});
|
||||
let mut wilds: Vec<_> = tys.map(DeconstructedPat::wildcard).collect();
|
||||
// Fill a vec with wildcards, then place the fields we have at the right
|
||||
// index.
|
||||
let mut wilds: Vec<_> = self
|
||||
.list_variant_fields(&pat.ty, variant)
|
||||
.map(|(_, ty)| ty)
|
||||
.map(DeconstructedPat::wildcard)
|
||||
.collect();
|
||||
for pat in subpatterns {
|
||||
let field_idx: u32 = pat.field.into_raw().into();
|
||||
if let Some(i) = field_id_to_id[field_idx as usize] {
|
||||
wilds[i] = self.lower_pat(&pat.pattern);
|
||||
}
|
||||
let field_id: u32 = pat.field.into_raw().into();
|
||||
wilds[field_id as usize] = self.lower_pat(&pat.pattern);
|
||||
}
|
||||
fields = wilds;
|
||||
}
|
||||
@ -263,7 +238,7 @@ impl<'p> MatchCheckCtx<'p> {
|
||||
TyKind::Adt(adt, substs) => {
|
||||
let variant = Self::variant_id_for_adt(pat.ctor(), adt.0).unwrap();
|
||||
let subpatterns = self
|
||||
.list_variant_nonhidden_fields(pat.ty(), variant)
|
||||
.list_variant_fields(pat.ty(), variant)
|
||||
.zip(subpatterns)
|
||||
.map(|((field, _ty), pattern)| FieldPat { field, pattern })
|
||||
.collect();
|
||||
@ -286,7 +261,7 @@ impl<'p> MatchCheckCtx<'p> {
|
||||
Ref => PatKind::Deref { subpattern: subpatterns.next().unwrap() },
|
||||
Slice(_) => unimplemented!(),
|
||||
&Str(void) => match void {},
|
||||
Wildcard | NonExhaustive | Hidden => PatKind::Wild,
|
||||
Wildcard | NonExhaustive | Hidden | PrivateUninhabited => PatKind::Wild,
|
||||
Missing | F32Range(..) | F64Range(..) | Opaque(..) | Or => {
|
||||
never!("can't convert to pattern: {:?}", pat.ctor());
|
||||
PatKind::Wild
|
||||
@ -326,7 +301,7 @@ impl<'p> TypeCx for MatchCheckCtx<'p> {
|
||||
1
|
||||
} else {
|
||||
let variant = Self::variant_id_for_adt(ctor, adt).unwrap();
|
||||
self.list_variant_nonhidden_fields(ty, variant).count()
|
||||
variant.variant_data(self.db.upcast()).fields().len()
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
@ -337,7 +312,7 @@ impl<'p> TypeCx for MatchCheckCtx<'p> {
|
||||
Ref => 1,
|
||||
Slice(..) => unimplemented!(),
|
||||
Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) | Opaque(..)
|
||||
| NonExhaustive | Hidden | Missing | Wildcard => 0,
|
||||
| NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => 0,
|
||||
Or => {
|
||||
never!("The `Or` constructor doesn't have a fixed arity");
|
||||
0
|
||||
@ -349,13 +324,13 @@ impl<'p> TypeCx for MatchCheckCtx<'p> {
|
||||
&'a self,
|
||||
ctor: &'a rustc_pattern_analysis::constructor::Constructor<Self>,
|
||||
ty: &'a Self::Ty,
|
||||
) -> impl ExactSizeIterator<Item = Self::Ty> + Captures<'a> {
|
||||
let single = |ty| smallvec![ty];
|
||||
) -> impl ExactSizeIterator<Item = (Self::Ty, PrivateUninhabitedField)> + Captures<'a> {
|
||||
let single = |ty| smallvec![(ty, PrivateUninhabitedField(false))];
|
||||
let tys: SmallVec<[_; 2]> = match ctor {
|
||||
Struct | Variant(_) | UnionField => match ty.kind(Interner) {
|
||||
TyKind::Tuple(_, substs) => {
|
||||
let tys = substs.iter(Interner).map(|ty| ty.assert_ty_ref(Interner));
|
||||
tys.cloned().collect()
|
||||
tys.cloned().map(|ty| (ty, PrivateUninhabitedField(false))).collect()
|
||||
}
|
||||
TyKind::Ref(.., rty) => single(rty.clone()),
|
||||
&TyKind::Adt(AdtId(adt), ref substs) => {
|
||||
@ -366,7 +341,27 @@ impl<'p> TypeCx for MatchCheckCtx<'p> {
|
||||
single(subst_ty)
|
||||
} else {
|
||||
let variant = Self::variant_id_for_adt(ctor, adt).unwrap();
|
||||
self.list_variant_nonhidden_fields(ty, variant).map(|(_, ty)| ty).collect()
|
||||
let (adt, _) = ty.as_adt().unwrap();
|
||||
|
||||
let adt_is_local =
|
||||
variant.module(self.db.upcast()).krate() == self.module.krate();
|
||||
// Whether we must not match the fields of this variant exhaustively.
|
||||
let is_non_exhaustive =
|
||||
self.db.attrs(variant.into()).by_key("non_exhaustive").exists()
|
||||
&& !adt_is_local;
|
||||
let visibilities = self.db.field_visibilities(variant);
|
||||
|
||||
self.list_variant_fields(ty, variant)
|
||||
.map(move |(fid, ty)| {
|
||||
let is_visible = matches!(adt, hir_def::AdtId::EnumId(..))
|
||||
|| visibilities[fid]
|
||||
.is_visible_from(self.db.upcast(), self.module);
|
||||
let is_uninhabited = self.is_uninhabited(&ty);
|
||||
let private_uninhabited =
|
||||
is_uninhabited && (!is_visible || is_non_exhaustive);
|
||||
(ty, PrivateUninhabitedField(private_uninhabited))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
ty_kind => {
|
||||
@ -383,7 +378,7 @@ impl<'p> TypeCx for MatchCheckCtx<'p> {
|
||||
},
|
||||
Slice(_) => unreachable!("Found a `Slice` constructor in match checking"),
|
||||
Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) | Opaque(..)
|
||||
| NonExhaustive | Hidden | Missing | Wildcard => smallvec![],
|
||||
| NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => smallvec![],
|
||||
Or => {
|
||||
never!("called `Fields::wildcards` on an `Or` ctor");
|
||||
smallvec![]
|
||||
@ -478,6 +473,11 @@ impl<'p> TypeCx for MatchCheckCtx<'p> {
|
||||
fn bug(&self, fmt: fmt::Arguments<'_>) {
|
||||
debug!("{}", fmt)
|
||||
}
|
||||
|
||||
fn complexity_exceeded(&self) -> Result<(), Self::Error> {
|
||||
// FIXME(Nadrieril): make use of the complexity counter.
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'p> fmt::Debug for MatchCheckCtx<'p> {
|
||||
|
@ -63,6 +63,7 @@ pub struct HirFormatter<'a> {
|
||||
buf: String,
|
||||
curr_size: usize,
|
||||
pub(crate) max_size: Option<usize>,
|
||||
pub entity_limit: Option<usize>,
|
||||
omit_verbose_types: bool,
|
||||
closure_style: ClosureStyle,
|
||||
display_target: DisplayTarget,
|
||||
@ -86,6 +87,7 @@ pub trait HirDisplay {
|
||||
&'a self,
|
||||
db: &'a dyn HirDatabase,
|
||||
max_size: Option<usize>,
|
||||
limited_size: Option<usize>,
|
||||
omit_verbose_types: bool,
|
||||
display_target: DisplayTarget,
|
||||
closure_style: ClosureStyle,
|
||||
@ -101,6 +103,7 @@ pub trait HirDisplay {
|
||||
db,
|
||||
t: self,
|
||||
max_size,
|
||||
limited_size,
|
||||
omit_verbose_types,
|
||||
display_target,
|
||||
closure_style,
|
||||
@ -117,6 +120,7 @@ pub trait HirDisplay {
|
||||
db,
|
||||
t: self,
|
||||
max_size: None,
|
||||
limited_size: None,
|
||||
omit_verbose_types: false,
|
||||
closure_style: ClosureStyle::ImplFn,
|
||||
display_target: DisplayTarget::Diagnostics,
|
||||
@ -137,6 +141,28 @@ pub trait HirDisplay {
|
||||
db,
|
||||
t: self,
|
||||
max_size,
|
||||
limited_size: None,
|
||||
omit_verbose_types: true,
|
||||
closure_style: ClosureStyle::ImplFn,
|
||||
display_target: DisplayTarget::Diagnostics,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a `Display`able type that is human-readable and tries to limit the number of items inside.
|
||||
/// Use this for showing definitions which may contain too many items, like `trait`, `struct`, `enum`
|
||||
fn display_limited<'a>(
|
||||
&'a self,
|
||||
db: &'a dyn HirDatabase,
|
||||
limited_size: Option<usize>,
|
||||
) -> HirDisplayWrapper<'a, Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
HirDisplayWrapper {
|
||||
db,
|
||||
t: self,
|
||||
max_size: None,
|
||||
limited_size,
|
||||
omit_verbose_types: true,
|
||||
closure_style: ClosureStyle::ImplFn,
|
||||
display_target: DisplayTarget::Diagnostics,
|
||||
@ -158,6 +184,7 @@ pub trait HirDisplay {
|
||||
buf: String::with_capacity(20),
|
||||
curr_size: 0,
|
||||
max_size: None,
|
||||
entity_limit: None,
|
||||
omit_verbose_types: false,
|
||||
closure_style: ClosureStyle::ImplFn,
|
||||
display_target: DisplayTarget::SourceCode { module_id, allow_opaque },
|
||||
@ -178,6 +205,7 @@ pub trait HirDisplay {
|
||||
db,
|
||||
t: self,
|
||||
max_size: None,
|
||||
limited_size: None,
|
||||
omit_verbose_types: false,
|
||||
closure_style: ClosureStyle::ImplFn,
|
||||
display_target: DisplayTarget::Test,
|
||||
@ -295,6 +323,7 @@ pub struct HirDisplayWrapper<'a, T> {
|
||||
db: &'a dyn HirDatabase,
|
||||
t: &'a T,
|
||||
max_size: Option<usize>,
|
||||
limited_size: Option<usize>,
|
||||
omit_verbose_types: bool,
|
||||
closure_style: ClosureStyle,
|
||||
display_target: DisplayTarget,
|
||||
@ -323,6 +352,7 @@ impl<T: HirDisplay> HirDisplayWrapper<'_, T> {
|
||||
buf: String::with_capacity(20),
|
||||
curr_size: 0,
|
||||
max_size: self.max_size,
|
||||
entity_limit: self.limited_size,
|
||||
omit_verbose_types: self.omit_verbose_types,
|
||||
display_target: self.display_target,
|
||||
closure_style: self.closure_style,
|
||||
@ -1751,10 +1781,7 @@ impl HirDisplay for TypeRef {
|
||||
f.write_joined(bounds, " + ")?;
|
||||
}
|
||||
TypeRef::Macro(macro_call) => {
|
||||
let ctx = hir_def::lower::LowerCtx::with_span_map(
|
||||
f.db.upcast(),
|
||||
f.db.span_map(macro_call.file_id),
|
||||
);
|
||||
let ctx = hir_def::lower::LowerCtx::new(f.db.upcast(), macro_call.file_id);
|
||||
let macro_call = macro_call.to_node(f.db.upcast());
|
||||
match macro_call.path() {
|
||||
Some(path) => match Path::from_src(&ctx, path) {
|
||||
|
@ -1,7 +1,6 @@
|
||||
//! Compute the binary representation of a type
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::fmt;
|
||||
use std::{borrow::Cow, fmt};
|
||||
|
||||
use base_db::salsa::Cycle;
|
||||
use chalk_ir::{AdtId, FloatTy, IntTy, TyKind, UintTy};
|
||||
|
@ -995,12 +995,12 @@ impl<'a> TyLoweringContext<'a> {
|
||||
|
||||
pub(crate) fn lower_type_bound(
|
||||
&'a self,
|
||||
bound: &'a TypeBound,
|
||||
bound: &'a Interned<TypeBound>,
|
||||
self_ty: Ty,
|
||||
ignore_bindings: bool,
|
||||
) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
|
||||
let mut bindings = None;
|
||||
let trait_ref = match bound {
|
||||
let trait_ref = match bound.as_ref() {
|
||||
TypeBound::Path(path, TraitBoundModifier::None) => {
|
||||
bindings = self.lower_trait_ref_from_path(path, Some(self_ty));
|
||||
bindings
|
||||
@ -1055,10 +1055,10 @@ impl<'a> TyLoweringContext<'a> {
|
||||
|
||||
fn assoc_type_bindings_from_type_bound(
|
||||
&'a self,
|
||||
bound: &'a TypeBound,
|
||||
bound: &'a Interned<TypeBound>,
|
||||
trait_ref: TraitRef,
|
||||
) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
|
||||
let last_segment = match bound {
|
||||
let last_segment = match bound.as_ref() {
|
||||
TypeBound::Path(path, TraitBoundModifier::None) | TypeBound::ForLifetime(_, path) => {
|
||||
path.segments().last()
|
||||
}
|
||||
@ -1121,7 +1121,63 @@ impl<'a> TyLoweringContext<'a> {
|
||||
);
|
||||
}
|
||||
} else {
|
||||
let ty = self.lower_ty(type_ref);
|
||||
let ty = 'ty: {
|
||||
if matches!(
|
||||
self.impl_trait_mode,
|
||||
ImplTraitLoweringState::Param(_)
|
||||
| ImplTraitLoweringState::Variable(_)
|
||||
) {
|
||||
// Find the generic index for the target of our `bound`
|
||||
let target_param_idx = self
|
||||
.resolver
|
||||
.where_predicates_in_scope()
|
||||
.find_map(|p| match p {
|
||||
WherePredicate::TypeBound {
|
||||
target: WherePredicateTypeTarget::TypeOrConstParam(idx),
|
||||
bound: b,
|
||||
} if b == bound => Some(idx),
|
||||
_ => None,
|
||||
});
|
||||
if let Some(target_param_idx) = target_param_idx {
|
||||
let mut counter = 0;
|
||||
for (idx, data) in self.generics().params.type_or_consts.iter()
|
||||
{
|
||||
// Count the number of `impl Trait` things that appear before
|
||||
// the target of our `bound`.
|
||||
// Our counter within `impl_trait_mode` should be that number
|
||||
// to properly lower each types within `type_ref`
|
||||
if data.type_param().is_some_and(|p| {
|
||||
p.provenance == TypeParamProvenance::ArgumentImplTrait
|
||||
}) {
|
||||
counter += 1;
|
||||
}
|
||||
if idx == *target_param_idx {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let mut ext = TyLoweringContext::new_maybe_unowned(
|
||||
self.db,
|
||||
self.resolver,
|
||||
self.owner,
|
||||
)
|
||||
.with_type_param_mode(self.type_param_mode);
|
||||
match &self.impl_trait_mode {
|
||||
ImplTraitLoweringState::Param(_) => {
|
||||
ext.impl_trait_mode =
|
||||
ImplTraitLoweringState::Param(Cell::new(counter));
|
||||
}
|
||||
ImplTraitLoweringState::Variable(_) => {
|
||||
ext.impl_trait_mode = ImplTraitLoweringState::Variable(
|
||||
Cell::new(counter),
|
||||
);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
break 'ty ext.lower_ty(type_ref);
|
||||
}
|
||||
}
|
||||
self.lower_ty(type_ref)
|
||||
};
|
||||
let alias_eq =
|
||||
AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
|
||||
predicates.push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
|
||||
@ -1403,8 +1459,14 @@ pub(crate) fn generic_predicates_for_param_query(
|
||||
assoc_name: Option<Name>,
|
||||
) -> Arc<[Binders<QuantifiedWhereClause>]> {
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let ctx = TyLoweringContext::new(db, &resolver, def.into())
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
let ctx = if let GenericDefId::FunctionId(_) = def {
|
||||
TyLoweringContext::new(db, &resolver, def.into())
|
||||
.with_impl_trait_mode(ImplTraitLoweringMode::Variable)
|
||||
.with_type_param_mode(ParamLoweringMode::Variable)
|
||||
} else {
|
||||
TyLoweringContext::new(db, &resolver, def.into())
|
||||
.with_type_param_mode(ParamLoweringMode::Variable)
|
||||
};
|
||||
let generics = generics(db.upcast(), def);
|
||||
|
||||
// we have to filter out all other predicates *first*, before attempting to lower them
|
||||
@ -1490,8 +1552,14 @@ pub(crate) fn trait_environment_query(
|
||||
def: GenericDefId,
|
||||
) -> Arc<TraitEnvironment> {
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let ctx = TyLoweringContext::new(db, &resolver, def.into())
|
||||
.with_type_param_mode(ParamLoweringMode::Placeholder);
|
||||
let ctx = if let GenericDefId::FunctionId(_) = def {
|
||||
TyLoweringContext::new(db, &resolver, def.into())
|
||||
.with_impl_trait_mode(ImplTraitLoweringMode::Param)
|
||||
.with_type_param_mode(ParamLoweringMode::Placeholder)
|
||||
} else {
|
||||
TyLoweringContext::new(db, &resolver, def.into())
|
||||
.with_type_param_mode(ParamLoweringMode::Placeholder)
|
||||
};
|
||||
let mut traits_in_scope = Vec::new();
|
||||
let mut clauses = Vec::new();
|
||||
for pred in resolver.where_predicates_in_scope() {
|
||||
@ -1549,8 +1617,14 @@ pub(crate) fn generic_predicates_query(
|
||||
def: GenericDefId,
|
||||
) -> Arc<[Binders<QuantifiedWhereClause>]> {
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let ctx = TyLoweringContext::new(db, &resolver, def.into())
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
let ctx = if let GenericDefId::FunctionId(_) = def {
|
||||
TyLoweringContext::new(db, &resolver, def.into())
|
||||
.with_impl_trait_mode(ImplTraitLoweringMode::Variable)
|
||||
.with_type_param_mode(ParamLoweringMode::Variable)
|
||||
} else {
|
||||
TyLoweringContext::new(db, &resolver, def.into())
|
||||
.with_type_param_mode(ParamLoweringMode::Variable)
|
||||
};
|
||||
let generics = generics(db.upcast(), def);
|
||||
|
||||
let mut predicates = resolver
|
||||
|
@ -1364,10 +1364,16 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
match loc {
|
||||
LiteralOrConst::Literal(l) => self.lower_literal_to_operand(ty, l),
|
||||
LiteralOrConst::Const(c) => {
|
||||
let unresolved_name = || MirLowerError::unresolved_path(self.db, c);
|
||||
let c = match &self.body.pats[*c] {
|
||||
Pat::Path(p) => p,
|
||||
_ => not_supported!(
|
||||
"only `char` and numeric types are allowed in range patterns"
|
||||
),
|
||||
};
|
||||
let unresolved_name = || MirLowerError::unresolved_path(self.db, c.as_ref());
|
||||
let resolver = self.owner.resolver(self.db.upcast());
|
||||
let pr = resolver
|
||||
.resolve_path_in_value_ns(self.db.upcast(), c)
|
||||
.resolve_path_in_value_ns(self.db.upcast(), c.as_ref())
|
||||
.ok_or_else(unresolved_name)?;
|
||||
match pr {
|
||||
ResolveValueResult::ValueNs(v, _) => {
|
||||
|
@ -1231,6 +1231,53 @@ fn test(x: impl Trait<u64>, y: &impl Trait<u64>) {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn argument_impl_trait_with_projection() {
|
||||
check_infer(
|
||||
r#"
|
||||
trait X {
|
||||
type Item;
|
||||
}
|
||||
|
||||
impl<T> X for [T; 2] {
|
||||
type Item = T;
|
||||
}
|
||||
|
||||
trait Y {}
|
||||
|
||||
impl<T> Y for T {}
|
||||
|
||||
enum R<T, U> {
|
||||
A(T),
|
||||
B(U),
|
||||
}
|
||||
|
||||
fn foo<T>(x: impl X<Item = R<impl Y, T>>) -> T { loop {} }
|
||||
|
||||
fn bar() {
|
||||
let a = foo([R::A(()), R::B(7)]);
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
153..154 'x': impl X<Item = R<impl Y + ?Sized, T>> + ?Sized
|
||||
190..201 '{ loop {} }': T
|
||||
192..199 'loop {}': !
|
||||
197..199 '{}': ()
|
||||
212..253 '{ ...)]); }': ()
|
||||
222..223 'a': i32
|
||||
226..229 'foo': fn foo<i32>([R<(), i32>; 2]) -> i32
|
||||
226..250 'foo([R...B(7)])': i32
|
||||
230..249 '[R::A(...:B(7)]': [R<(), i32>; 2]
|
||||
231..235 'R::A': extern "rust-call" A<(), i32>(()) -> R<(), i32>
|
||||
231..239 'R::A(())': R<(), i32>
|
||||
236..238 '()': ()
|
||||
241..245 'R::B': extern "rust-call" B<(), i32>(i32) -> R<(), i32>
|
||||
241..248 'R::B(7)': R<(), i32>
|
||||
246..247 '7': i32
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_return_pos_impl_trait() {
|
||||
cov_mark::check!(lower_rpit);
|
||||
|
@ -27,7 +27,6 @@ cfg.workspace = true
|
||||
hir-def.workspace = true
|
||||
hir-expand.workspace = true
|
||||
hir-ty.workspace = true
|
||||
profile.workspace = true
|
||||
stdx.workspace = true
|
||||
syntax.workspace = true
|
||||
tt.workspace = true
|
||||
|
@ -4,20 +4,20 @@
|
||||
//!
|
||||
//! But we need this for at least LRU caching at the query level.
|
||||
pub use hir_def::db::{
|
||||
AttrsQuery, BlockDefMapQuery, BlockItemTreeQueryQuery, BodyQuery, BodyWithSourceMapQuery,
|
||||
ConstDataQuery, ConstVisibilityQuery, CrateDefMapQueryQuery, CrateLangItemsQuery,
|
||||
CrateSupportsNoStdQuery, DefDatabase, DefDatabaseStorage, EnumDataQuery,
|
||||
EnumVariantDataWithDiagnosticsQuery, ExprScopesQuery, ExternCrateDeclDataQuery,
|
||||
FieldVisibilitiesQuery, FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery,
|
||||
FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery, ImplDataWithDiagnosticsQuery,
|
||||
ImportMapQuery, InternAnonymousConstQuery, InternBlockQuery, InternConstQuery, InternDatabase,
|
||||
InternDatabaseStorage, InternEnumQuery, InternExternBlockQuery, InternExternCrateQuery,
|
||||
InternFunctionQuery, InternImplQuery, InternInTypeConstQuery, InternMacro2Query,
|
||||
InternMacroRulesQuery, InternProcMacroQuery, InternStaticQuery, InternStructQuery,
|
||||
InternTraitAliasQuery, InternTraitQuery, InternTypeAliasQuery, InternUnionQuery,
|
||||
InternUseQuery, LangItemQuery, Macro2DataQuery, MacroRulesDataQuery, ProcMacroDataQuery,
|
||||
StaticDataQuery, StructDataWithDiagnosticsQuery, TraitAliasDataQuery,
|
||||
TraitDataWithDiagnosticsQuery, TypeAliasDataQuery, UnionDataWithDiagnosticsQuery,
|
||||
AttrsQuery, BlockDefMapQuery, BodyQuery, BodyWithSourceMapQuery, ConstDataQuery,
|
||||
ConstVisibilityQuery, CrateLangItemsQuery, CrateSupportsNoStdQuery, DefDatabase,
|
||||
DefDatabaseStorage, EnumDataQuery, EnumVariantDataWithDiagnosticsQuery, ExprScopesQuery,
|
||||
ExternCrateDeclDataQuery, FieldVisibilitiesQuery, FieldsAttrsQuery, FieldsAttrsSourceMapQuery,
|
||||
FileItemTreeQuery, FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery,
|
||||
ImplDataWithDiagnosticsQuery, ImportMapQuery, InternAnonymousConstQuery, InternBlockQuery,
|
||||
InternConstQuery, InternDatabase, InternDatabaseStorage, InternEnumQuery,
|
||||
InternExternBlockQuery, InternExternCrateQuery, InternFunctionQuery, InternImplQuery,
|
||||
InternInTypeConstQuery, InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery,
|
||||
InternStaticQuery, InternStructQuery, InternTraitAliasQuery, InternTraitQuery,
|
||||
InternTypeAliasQuery, InternUnionQuery, InternUseQuery, LangItemQuery, Macro2DataQuery,
|
||||
MacroRulesDataQuery, ProcMacroDataQuery, StaticDataQuery, StructDataWithDiagnosticsQuery,
|
||||
TraitAliasDataQuery, TraitDataWithDiagnosticsQuery, TypeAliasDataQuery,
|
||||
UnionDataWithDiagnosticsQuery,
|
||||
};
|
||||
pub use hir_expand::db::{
|
||||
AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
|
||||
|
@ -17,10 +17,10 @@ use hir_ty::{
|
||||
};
|
||||
|
||||
use crate::{
|
||||
Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Enum, ExternCrateDecl, Field,
|
||||
Function, GenericParam, HasCrate, HasVisibility, LifetimeParam, Macro, Module, SelfParam,
|
||||
Static, Struct, Trait, TraitAlias, TupleField, TyBuilder, Type, TypeAlias, TypeOrConstParam,
|
||||
TypeParam, Union, Variant,
|
||||
Adt, AsAssocItem, AssocItem, AssocItemContainer, Const, ConstParam, Enum, ExternCrateDecl,
|
||||
Field, Function, GenericParam, HasCrate, HasVisibility, LifetimeParam, Macro, Module,
|
||||
SelfParam, Static, Struct, Trait, TraitAlias, TupleField, TyBuilder, Type, TypeAlias,
|
||||
TypeOrConstParam, TypeParam, Union, Variant,
|
||||
};
|
||||
|
||||
impl HirDisplay for Function {
|
||||
@ -595,6 +595,35 @@ impl HirDisplay for Trait {
|
||||
let def_id = GenericDefId::TraitId(self.id);
|
||||
write_generic_params(def_id, f)?;
|
||||
write_where_clause(def_id, f)?;
|
||||
|
||||
if let Some(limit) = f.entity_limit {
|
||||
let assoc_items = self.items(f.db);
|
||||
let count = assoc_items.len().min(limit);
|
||||
if count == 0 {
|
||||
if assoc_items.is_empty() {
|
||||
f.write_str(" {}")?;
|
||||
} else {
|
||||
f.write_str(" { /* … */ }")?;
|
||||
}
|
||||
} else {
|
||||
f.write_str(" {\n")?;
|
||||
for item in &assoc_items[..count] {
|
||||
f.write_str(" ")?;
|
||||
match item {
|
||||
AssocItem::Function(func) => func.hir_fmt(f),
|
||||
AssocItem::Const(cst) => cst.hir_fmt(f),
|
||||
AssocItem::TypeAlias(type_alias) => type_alias.hir_fmt(f),
|
||||
}?;
|
||||
f.write_str(";\n")?;
|
||||
}
|
||||
|
||||
if assoc_items.len() > count {
|
||||
f.write_str(" /* … */\n")?;
|
||||
}
|
||||
f.write_str("}")?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -126,7 +126,7 @@ pub use {
|
||||
},
|
||||
hir_expand::{
|
||||
attrs::{Attr, AttrId},
|
||||
change::Change,
|
||||
change::ChangeWithProcMacros,
|
||||
hygiene::{marks_rev, SyntaxContextExt},
|
||||
name::{known, Name},
|
||||
proc_macro::ProcMacros,
|
||||
@ -365,7 +365,7 @@ impl ModuleDef {
|
||||
Some(name)
|
||||
}
|
||||
|
||||
pub fn diagnostics(self, db: &dyn HirDatabase) -> Vec<AnyDiagnostic> {
|
||||
pub fn diagnostics(self, db: &dyn HirDatabase, style_lints: bool) -> Vec<AnyDiagnostic> {
|
||||
let id = match self {
|
||||
ModuleDef::Adt(it) => match it {
|
||||
Adt::Struct(it) => it.id.into(),
|
||||
@ -387,7 +387,7 @@ impl ModuleDef {
|
||||
|
||||
match self.as_def_with_body() {
|
||||
Some(def) => {
|
||||
def.diagnostics(db, &mut acc);
|
||||
def.diagnostics(db, &mut acc, style_lints);
|
||||
}
|
||||
None => {
|
||||
for diag in hir_ty::diagnostics::incorrect_case(db, id) {
|
||||
@ -541,7 +541,12 @@ impl Module {
|
||||
}
|
||||
|
||||
/// Fills `acc` with the module's diagnostics.
|
||||
pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
|
||||
pub fn diagnostics(
|
||||
self,
|
||||
db: &dyn HirDatabase,
|
||||
acc: &mut Vec<AnyDiagnostic>,
|
||||
style_lints: bool,
|
||||
) {
|
||||
let name = self.name(db);
|
||||
let _p = tracing::span!(tracing::Level::INFO, "Module::diagnostics", ?name);
|
||||
let def_map = self.id.def_map(db.upcast());
|
||||
@ -558,9 +563,9 @@ impl Module {
|
||||
ModuleDef::Module(m) => {
|
||||
// Only add diagnostics from inline modules
|
||||
if def_map[m.id.local_id].origin.is_inline() {
|
||||
m.diagnostics(db, acc)
|
||||
m.diagnostics(db, acc, style_lints)
|
||||
}
|
||||
acc.extend(def.diagnostics(db))
|
||||
acc.extend(def.diagnostics(db, style_lints))
|
||||
}
|
||||
ModuleDef::Trait(t) => {
|
||||
for diag in db.trait_data_with_diagnostics(t.id).1.iter() {
|
||||
@ -568,10 +573,10 @@ impl Module {
|
||||
}
|
||||
|
||||
for item in t.items(db) {
|
||||
item.diagnostics(db, acc);
|
||||
item.diagnostics(db, acc, style_lints);
|
||||
}
|
||||
|
||||
acc.extend(def.diagnostics(db))
|
||||
acc.extend(def.diagnostics(db, style_lints))
|
||||
}
|
||||
ModuleDef::Adt(adt) => {
|
||||
match adt {
|
||||
@ -587,17 +592,17 @@ impl Module {
|
||||
}
|
||||
Adt::Enum(e) => {
|
||||
for v in e.variants(db) {
|
||||
acc.extend(ModuleDef::Variant(v).diagnostics(db));
|
||||
acc.extend(ModuleDef::Variant(v).diagnostics(db, style_lints));
|
||||
for diag in db.enum_variant_data_with_diagnostics(v.id).1.iter() {
|
||||
emit_def_diagnostic(db, acc, diag);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
acc.extend(def.diagnostics(db))
|
||||
acc.extend(def.diagnostics(db, style_lints))
|
||||
}
|
||||
ModuleDef::Macro(m) => emit_macro_def_diagnostics(db, acc, m),
|
||||
_ => acc.extend(def.diagnostics(db)),
|
||||
_ => acc.extend(def.diagnostics(db, style_lints)),
|
||||
}
|
||||
}
|
||||
self.legacy_macros(db).into_iter().for_each(|m| emit_macro_def_diagnostics(db, acc, m));
|
||||
@ -738,7 +743,7 @@ impl Module {
|
||||
}
|
||||
|
||||
for &item in &db.impl_data(impl_def.id).items {
|
||||
AssocItem::from(item).diagnostics(db, acc);
|
||||
AssocItem::from(item).diagnostics(db, acc, style_lints);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1616,14 +1621,19 @@ impl DefWithBody {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
|
||||
pub fn diagnostics(
|
||||
self,
|
||||
db: &dyn HirDatabase,
|
||||
acc: &mut Vec<AnyDiagnostic>,
|
||||
style_lints: bool,
|
||||
) {
|
||||
db.unwind_if_cancelled();
|
||||
let krate = self.module(db).id.krate();
|
||||
|
||||
let (body, source_map) = db.body_with_source_map(self.into());
|
||||
|
||||
for (_, def_map) in body.blocks(db.upcast()) {
|
||||
Module { id: def_map.module_id(DefMap::ROOT) }.diagnostics(db, acc);
|
||||
Module { id: def_map.module_id(DefMap::ROOT) }.diagnostics(db, acc, style_lints);
|
||||
}
|
||||
|
||||
for diag in source_map.diagnostics() {
|
||||
@ -1784,7 +1794,7 @@ impl DefWithBody {
|
||||
}
|
||||
}
|
||||
|
||||
for diagnostic in BodyValidationDiagnostic::collect(db, self.into()) {
|
||||
for diagnostic in BodyValidationDiagnostic::collect(db, self.into(), style_lints) {
|
||||
acc.extend(AnyDiagnostic::body_validation_diagnostic(db, diagnostic, &source_map));
|
||||
}
|
||||
|
||||
@ -2098,6 +2108,14 @@ pub struct Param {
|
||||
}
|
||||
|
||||
impl Param {
|
||||
pub fn parent_fn(&self) -> Function {
|
||||
self.func
|
||||
}
|
||||
|
||||
pub fn index(&self) -> usize {
|
||||
self.idx
|
||||
}
|
||||
|
||||
pub fn ty(&self) -> &Type {
|
||||
&self.ty
|
||||
}
|
||||
@ -2162,6 +2180,10 @@ impl SelfParam {
|
||||
.map(|value| InFile { file_id, value })
|
||||
}
|
||||
|
||||
pub fn parent_fn(&self) -> Function {
|
||||
Function::from(self.func)
|
||||
}
|
||||
|
||||
pub fn ty(&self, db: &dyn HirDatabase) -> Type {
|
||||
let substs = TyBuilder::placeholder_subst(db, self.func);
|
||||
let callable_sig =
|
||||
@ -2897,13 +2919,18 @@ impl AssocItem {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
|
||||
pub fn diagnostics(
|
||||
self,
|
||||
db: &dyn HirDatabase,
|
||||
acc: &mut Vec<AnyDiagnostic>,
|
||||
style_lints: bool,
|
||||
) {
|
||||
match self {
|
||||
AssocItem::Function(func) => {
|
||||
DefWithBody::from(func).diagnostics(db, acc);
|
||||
DefWithBody::from(func).diagnostics(db, acc, style_lints);
|
||||
}
|
||||
AssocItem::Const(const_) => {
|
||||
DefWithBody::from(const_).diagnostics(db, acc);
|
||||
DefWithBody::from(const_).diagnostics(db, acc, style_lints);
|
||||
}
|
||||
AssocItem::TypeAlias(type_alias) => {
|
||||
for diag in hir_ty::diagnostics::incorrect_case(db, type_alias.id.into()) {
|
||||
|
@ -38,10 +38,11 @@ use crate::{
|
||||
db::HirDatabase,
|
||||
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
|
||||
source_analyzer::{resolve_hir_path, SourceAnalyzer},
|
||||
Access, Adjust, Adjustment, AutoBorrow, BindingMode, BuiltinAttr, Callable, ConstParam, Crate,
|
||||
DeriveHelper, Field, Function, HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local,
|
||||
Macro, Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef, Struct, ToolModule, Trait,
|
||||
TupleField, Type, TypeAlias, TypeParam, VariantDef,
|
||||
Access, Adjust, Adjustment, Adt, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const,
|
||||
ConstParam, Crate, DeriveHelper, Enum, Field, Function, HasSource, HirFileId, Impl, InFile,
|
||||
Label, LifetimeParam, Local, Macro, Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef,
|
||||
Static, Struct, ToolModule, Trait, TraitAlias, TupleField, Type, TypeAlias, TypeParam, Union,
|
||||
Variant, VariantDef,
|
||||
};
|
||||
|
||||
pub enum DescendPreference {
|
||||
@ -223,20 +224,68 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
||||
self.imp.resolve_variant(record_lit).map(VariantDef::from)
|
||||
}
|
||||
|
||||
pub fn to_module_def(&self, file: FileId) -> Option<Module> {
|
||||
self.imp.to_module_def(file).next()
|
||||
pub fn file_to_module_def(&self, file: FileId) -> Option<Module> {
|
||||
self.imp.file_to_module_defs(file).next()
|
||||
}
|
||||
|
||||
pub fn to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
|
||||
self.imp.to_module_def(file)
|
||||
pub fn file_to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
|
||||
self.imp.file_to_module_defs(file)
|
||||
}
|
||||
|
||||
pub fn to_adt_def(&self, a: &ast::Adt) -> Option<Adt> {
|
||||
self.imp.to_def(a).map(Adt::from)
|
||||
}
|
||||
|
||||
pub fn to_const_def(&self, c: &ast::Const) -> Option<Const> {
|
||||
self.imp.to_def(c).map(Const::from)
|
||||
}
|
||||
|
||||
pub fn to_enum_def(&self, e: &ast::Enum) -> Option<Enum> {
|
||||
self.imp.to_def(e).map(Enum::from)
|
||||
}
|
||||
|
||||
pub fn to_enum_variant_def(&self, v: &ast::Variant) -> Option<Variant> {
|
||||
self.imp.to_def(v).map(Variant::from)
|
||||
}
|
||||
|
||||
pub fn to_fn_def(&self, f: &ast::Fn) -> Option<Function> {
|
||||
self.imp.to_def(f).map(Function::from)
|
||||
}
|
||||
|
||||
pub fn to_impl_def(&self, i: &ast::Impl) -> Option<Impl> {
|
||||
self.imp.to_def(i).map(Impl::from)
|
||||
}
|
||||
|
||||
pub fn to_macro_def(&self, m: &ast::Macro) -> Option<Macro> {
|
||||
self.imp.to_def(m).map(Macro::from)
|
||||
}
|
||||
|
||||
pub fn to_module_def(&self, m: &ast::Module) -> Option<Module> {
|
||||
self.imp.to_def(m).map(Module::from)
|
||||
}
|
||||
|
||||
pub fn to_static_def(&self, s: &ast::Static) -> Option<Static> {
|
||||
self.imp.to_def(s).map(Static::from)
|
||||
}
|
||||
|
||||
pub fn to_struct_def(&self, s: &ast::Struct) -> Option<Struct> {
|
||||
self.imp.to_def(s).map(Struct::from)
|
||||
}
|
||||
|
||||
pub fn to_impl_def(&self, i: &ast::Impl) -> Option<Impl> {
|
||||
self.imp.to_def(i).map(Impl::from)
|
||||
pub fn to_trait_alias_def(&self, t: &ast::TraitAlias) -> Option<TraitAlias> {
|
||||
self.imp.to_def(t).map(TraitAlias::from)
|
||||
}
|
||||
|
||||
pub fn to_trait_def(&self, t: &ast::Trait) -> Option<Trait> {
|
||||
self.imp.to_def(t).map(Trait::from)
|
||||
}
|
||||
|
||||
pub fn to_type_alias_def(&self, t: &ast::TypeAlias) -> Option<TypeAlias> {
|
||||
self.imp.to_def(t).map(TypeAlias::from)
|
||||
}
|
||||
|
||||
pub fn to_union_def(&self, u: &ast::Union) -> Option<Union> {
|
||||
self.imp.to_def(u).map(Union::from)
|
||||
}
|
||||
}
|
||||
|
||||
@ -1024,7 +1073,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||
|
||||
pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
|
||||
let analyze = self.analyze(ty.syntax())?;
|
||||
let ctx = LowerCtx::with_file_id(self.db.upcast(), analyze.file_id);
|
||||
let ctx = LowerCtx::new(self.db.upcast(), analyze.file_id);
|
||||
let ty = hir_ty::TyLoweringContext::new_maybe_unowned(
|
||||
self.db,
|
||||
&analyze.resolver,
|
||||
@ -1036,8 +1085,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||
|
||||
pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
|
||||
let analyze = self.analyze(path.syntax())?;
|
||||
let span_map = self.db.span_map(analyze.file_id);
|
||||
let ctx = LowerCtx::with_span_map(self.db.upcast(), span_map);
|
||||
let ctx = LowerCtx::new(self.db.upcast(), analyze.file_id);
|
||||
let hir_path = Path::from_src(&ctx, path.clone())?;
|
||||
match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? {
|
||||
TypeNs::TraitId(id) => Some(Trait { id }),
|
||||
@ -1241,7 +1289,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||
T::to_def(self, src)
|
||||
}
|
||||
|
||||
fn to_module_def(&self, file: FileId) -> impl Iterator<Item = Module> {
|
||||
fn file_to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
|
||||
self.with_ctx(|ctx| ctx.file_to_def(file)).into_iter().map(Module::from)
|
||||
}
|
||||
|
||||
@ -1645,7 +1693,7 @@ impl SemanticsScope<'_> {
|
||||
/// Resolve a path as-if it was written at the given scope. This is
|
||||
/// necessary a heuristic, as it doesn't take hygiene into account.
|
||||
pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
|
||||
let ctx = LowerCtx::with_file_id(self.db.upcast(), self.file_id);
|
||||
let ctx = LowerCtx::new(self.db.upcast(), self.file_id);
|
||||
let path = Path::from_src(&ctx, path.clone())?;
|
||||
resolve_hir_path(self.db, &self.resolver, &path)
|
||||
}
|
||||
|
@ -118,7 +118,7 @@ pub(super) struct SourceToDefCtx<'a, 'b> {
|
||||
|
||||
impl SourceToDefCtx<'_, '_> {
|
||||
pub(super) fn file_to_def(&self, file: FileId) -> SmallVec<[ModuleId; 1]> {
|
||||
let _p = tracing::span!(tracing::Level::INFO, "SourceBinder::to_module_def");
|
||||
let _p = tracing::span!(tracing::Level::INFO, "SourceBinder::file_to_module_def");
|
||||
let mut mods = SmallVec::new();
|
||||
for &crate_id in self.db.relevant_crates(file).iter() {
|
||||
// FIXME: inner items
|
||||
|
@ -549,7 +549,7 @@ impl SourceAnalyzer {
|
||||
db: &dyn HirDatabase,
|
||||
macro_call: InFile<&ast::MacroCall>,
|
||||
) -> Option<Macro> {
|
||||
let ctx = LowerCtx::with_file_id(db.upcast(), macro_call.file_id);
|
||||
let ctx = LowerCtx::new(db.upcast(), macro_call.file_id);
|
||||
let path = macro_call.value.path().and_then(|ast| Path::from_src(&ctx, ast))?;
|
||||
self.resolver
|
||||
.resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Bang))
|
||||
@ -662,7 +662,7 @@ impl SourceAnalyzer {
|
||||
}
|
||||
|
||||
// This must be a normal source file rather than macro file.
|
||||
let ctx = LowerCtx::with_span_map(db.upcast(), db.span_map(self.file_id));
|
||||
let ctx = LowerCtx::new(db.upcast(), self.file_id);
|
||||
let hir_path = Path::from_src(&ctx, path.clone())?;
|
||||
|
||||
// Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
|
||||
|
@ -23,7 +23,6 @@ tracing.workspace = true
|
||||
stdx.workspace = true
|
||||
syntax.workspace = true
|
||||
text-edit.workspace = true
|
||||
profile.workspace = true
|
||||
ide-db.workspace = true
|
||||
hir.workspace = true
|
||||
|
||||
@ -33,10 +32,6 @@ expect-test = "1.4.0"
|
||||
# local deps
|
||||
test-utils.workspace = true
|
||||
test-fixture.workspace = true
|
||||
sourcegen.workspace = true
|
||||
|
||||
[features]
|
||||
in-rust-tree = []
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
@ -107,6 +107,10 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option<Str
|
||||
let visible_fields =
|
||||
fields.into_iter().filter(|field| field.is_visible_from(ctx.db(), module)).collect_vec();
|
||||
|
||||
if visible_fields.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let has_private_members =
|
||||
(is_non_exhaustive && is_foreign_crate) || visible_fields.len() < n_fields;
|
||||
|
||||
@ -413,7 +417,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn unit_struct() {
|
||||
check_assist(
|
||||
check_assist_not_applicable(
|
||||
destructure_struct_binding,
|
||||
r#"
|
||||
struct Foo;
|
||||
@ -422,13 +426,6 @@ mod tests {
|
||||
let $0foo = Foo;
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct Foo;
|
||||
|
||||
fn main() {
|
||||
let Foo = Foo;
|
||||
}
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
@ -739,4 +736,18 @@ mod tests {
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn record_struct_no_public_members() {
|
||||
check_assist_not_applicable(
|
||||
destructure_struct_binding,
|
||||
r#"
|
||||
//- /lib.rs crate:dep
|
||||
pub struct Foo { bar: i32, baz: i32 };
|
||||
|
||||
//- /main.rs crate:main deps:dep
|
||||
fn main($0foo: dep::Foo) {}
|
||||
"#,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -274,4 +274,22 @@ fn main() {
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn escaped_literals() {
|
||||
check_assist(
|
||||
extract_expressions_from_format_string,
|
||||
r#"
|
||||
//- minicore: fmt
|
||||
fn main() {
|
||||
print!("\n$ {x + 1}$0");
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
fn main() {
|
||||
print!("\n$ {}"$0, x + 1);
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -16,7 +16,8 @@ use syntax::{
|
||||
ast::{
|
||||
self,
|
||||
edit::{self, AstNodeEdit},
|
||||
make, AssocItem, GenericArgList, GenericParamList, HasGenericParams, HasName,
|
||||
edit_in_place::AttrsOwnerEdit,
|
||||
make, AssocItem, GenericArgList, GenericParamList, HasAttrs, HasGenericParams, HasName,
|
||||
HasTypeBounds, HasVisibility as astHasVisibility, Path, WherePred,
|
||||
},
|
||||
ted::{self, Position},
|
||||
@ -116,7 +117,7 @@ impl Field {
|
||||
) -> Option<Field> {
|
||||
let db = ctx.sema.db;
|
||||
|
||||
let module = ctx.sema.to_module_def(ctx.file_id())?;
|
||||
let module = ctx.sema.file_to_module_def(ctx.file_id())?;
|
||||
|
||||
let (name, range, ty) = match f {
|
||||
Either::Left(f) => {
|
||||
@ -619,7 +620,8 @@ fn process_assoc_item(
|
||||
qual_path_ty: ast::Path,
|
||||
base_name: &str,
|
||||
) -> Option<ast::AssocItem> {
|
||||
match item {
|
||||
let attrs = item.attrs();
|
||||
let assoc = match item {
|
||||
AssocItem::Const(c) => const_assoc_item(c, qual_path_ty),
|
||||
AssocItem::Fn(f) => func_assoc_item(f, qual_path_ty, base_name),
|
||||
AssocItem::MacroCall(_) => {
|
||||
@ -628,7 +630,18 @@ fn process_assoc_item(
|
||||
None
|
||||
}
|
||||
AssocItem::TypeAlias(ta) => ty_assoc_item(ta, qual_path_ty),
|
||||
};
|
||||
if let Some(assoc) = &assoc {
|
||||
attrs.for_each(|attr| {
|
||||
assoc.add_attr(attr.clone());
|
||||
// fix indentations
|
||||
if let Some(tok) = attr.syntax().next_sibling_or_token() {
|
||||
let pos = Position::after(tok);
|
||||
ted::insert(pos, make::tokens::whitespace(" "));
|
||||
}
|
||||
})
|
||||
}
|
||||
assoc
|
||||
}
|
||||
|
||||
fn const_assoc_item(item: syntax::ast::Const, qual_path_ty: ast::Path) -> Option<AssocItem> {
|
||||
@ -1703,4 +1716,65 @@ impl some_module::SomeTrait for B {
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fn_with_attrs() {
|
||||
check_assist(
|
||||
generate_delegate_trait,
|
||||
r#"
|
||||
struct A;
|
||||
|
||||
trait T {
|
||||
#[cfg(test)]
|
||||
fn f(&self, a: u32);
|
||||
#[cfg(not(test))]
|
||||
fn f(&self, a: bool);
|
||||
}
|
||||
|
||||
impl T for A {
|
||||
#[cfg(test)]
|
||||
fn f(&self, a: u32) {}
|
||||
#[cfg(not(test))]
|
||||
fn f(&self, a: bool) {}
|
||||
}
|
||||
|
||||
struct B {
|
||||
a$0: A,
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct A;
|
||||
|
||||
trait T {
|
||||
#[cfg(test)]
|
||||
fn f(&self, a: u32);
|
||||
#[cfg(not(test))]
|
||||
fn f(&self, a: bool);
|
||||
}
|
||||
|
||||
impl T for A {
|
||||
#[cfg(test)]
|
||||
fn f(&self, a: u32) {}
|
||||
#[cfg(not(test))]
|
||||
fn f(&self, a: bool) {}
|
||||
}
|
||||
|
||||
struct B {
|
||||
a: A,
|
||||
}
|
||||
|
||||
impl T for B {
|
||||
#[cfg(test)]
|
||||
fn f(&self, a: u32) {
|
||||
<A as T>::f(&self.a, a)
|
||||
}
|
||||
|
||||
#[cfg(not(test))]
|
||||
fn f(&self, a: bool) {
|
||||
<A as T>::f(&self.a, a)
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -418,24 +418,15 @@ fn inline(
|
||||
let expr: &ast::Expr = expr;
|
||||
|
||||
let mut insert_let_stmt = || {
|
||||
let param_ty = match param_ty {
|
||||
None => None,
|
||||
Some(param_ty) => {
|
||||
if sema.hir_file_for(param_ty.syntax()).is_macro() {
|
||||
if let Some(param_ty) =
|
||||
ast::Type::cast(insert_ws_into(param_ty.syntax().clone()))
|
||||
{
|
||||
Some(param_ty)
|
||||
} else {
|
||||
Some(param_ty.clone_for_update())
|
||||
}
|
||||
} else {
|
||||
Some(param_ty.clone_for_update())
|
||||
}
|
||||
let param_ty = param_ty.clone().map(|param_ty| {
|
||||
if sema.hir_file_for(param_ty.syntax()).is_macro() {
|
||||
ast::Type::cast(insert_ws_into(param_ty.syntax().clone())).unwrap_or(param_ty)
|
||||
} else {
|
||||
param_ty
|
||||
}
|
||||
};
|
||||
let ty: Option<syntax::ast::Type> =
|
||||
sema.type_of_expr(expr).filter(TypeInfo::has_adjustment).and(param_ty);
|
||||
});
|
||||
|
||||
let ty = sema.type_of_expr(expr).filter(TypeInfo::has_adjustment).and(param_ty);
|
||||
|
||||
let is_self = param
|
||||
.name(sema.db)
|
||||
@ -1359,8 +1350,8 @@ macro_rules! define_foo {
|
||||
define_foo!();
|
||||
fn bar() -> u32 {
|
||||
{
|
||||
let x = 0;
|
||||
x
|
||||
let x = 0;
|
||||
x
|
||||
}
|
||||
}
|
||||
"#,
|
||||
@ -1673,7 +1664,7 @@ fn main() {
|
||||
let a: A = A{};
|
||||
let b = {
|
||||
let a = a;
|
||||
a as A
|
||||
a as A
|
||||
};
|
||||
}
|
||||
"#,
|
||||
@ -1792,7 +1783,7 @@ fn _hash2(self_: &u64, state: &mut u64) {
|
||||
{
|
||||
let inner_self_: &u64 = &self_;
|
||||
let state: &mut u64 = state;
|
||||
_write_u64(state, *inner_self_)
|
||||
_write_u64(state, *inner_self_)
|
||||
};
|
||||
}
|
||||
"#,
|
||||
|
@ -288,11 +288,11 @@ macro_rules! foo {
|
||||
}
|
||||
fn main() {
|
||||
cfg_if!{
|
||||
if #[cfg(test)]{
|
||||
1;
|
||||
}else {
|
||||
1;
|
||||
}
|
||||
if #[cfg(test)]{
|
||||
1;
|
||||
}else {
|
||||
1;
|
||||
}
|
||||
};
|
||||
}
|
||||
"#,
|
||||
|
@ -25,7 +25,7 @@ use crate::{
|
||||
// ```
|
||||
pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||
let source_file = ctx.find_node_at_offset::<ast::SourceFile>()?;
|
||||
let module = ctx.sema.to_module_def(ctx.file_id())?;
|
||||
let module = ctx.sema.file_to_module_def(ctx.file_id())?;
|
||||
// Enable this assist if the user select all "meaningful" content in the source file
|
||||
let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed());
|
||||
let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range());
|
||||
|
@ -25,7 +25,7 @@ use crate::{
|
||||
// ```
|
||||
pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||
let source_file = ctx.find_node_at_offset::<ast::SourceFile>()?;
|
||||
let module = ctx.sema.to_module_def(ctx.file_id())?;
|
||||
let module = ctx.sema.file_to_module_def(ctx.file_id())?;
|
||||
// Enable this assist if the user select all "meaningful" content in the source file
|
||||
let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed());
|
||||
let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range());
|
||||
|
@ -1,6 +1,4 @@
|
||||
mod generated;
|
||||
#[cfg(not(feature = "in-rust-tree"))]
|
||||
mod sourcegen;
|
||||
|
||||
use expect_test::expect;
|
||||
use hir::Semantics;
|
||||
|
@ -23,7 +23,6 @@ smallvec.workspace = true
|
||||
# local deps
|
||||
base-db.workspace = true
|
||||
ide-db.workspace = true
|
||||
profile.workspace = true
|
||||
stdx.workspace = true
|
||||
syntax.workspace = true
|
||||
text-edit.workspace = true
|
||||
|
@ -1,6 +1,7 @@
|
||||
//! Completes identifiers in format string literals.
|
||||
|
||||
use ide_db::syntax_helpers::format_string::is_format_string;
|
||||
use hir::{ModuleDef, ScopeDef};
|
||||
use ide_db::{syntax_helpers::format_string::is_format_string, SymbolKind};
|
||||
use itertools::Itertools;
|
||||
use syntax::{ast, AstToken, TextRange, TextSize};
|
||||
|
||||
@ -33,7 +34,23 @@ pub(crate) fn format_string(
|
||||
ctx.locals.iter().for_each(|(name, _)| {
|
||||
CompletionItem::new(CompletionItemKind::Binding, source_range, name.to_smol_str())
|
||||
.add_to(acc, ctx.db);
|
||||
})
|
||||
});
|
||||
ctx.scope.process_all_names(&mut |name, scope| {
|
||||
if let ScopeDef::ModuleDef(module_def) = scope {
|
||||
let symbol_kind = match module_def {
|
||||
ModuleDef::Const(..) => SymbolKind::Const,
|
||||
ModuleDef::Static(..) => SymbolKind::Static,
|
||||
_ => return,
|
||||
};
|
||||
|
||||
CompletionItem::new(
|
||||
CompletionItemKind::SymbolKind(symbol_kind),
|
||||
source_range,
|
||||
name.to_smol_str(),
|
||||
)
|
||||
.add_to(acc, ctx.db);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@ -110,6 +127,80 @@ fn main() {
|
||||
let foobar = 1;
|
||||
format_args!("{foobar");
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn completes_constants() {
|
||||
check_edit(
|
||||
"FOOBAR",
|
||||
r#"
|
||||
//- minicore: fmt
|
||||
fn main() {
|
||||
const FOOBAR: usize = 42;
|
||||
format_args!("{f$0");
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
fn main() {
|
||||
const FOOBAR: usize = 42;
|
||||
format_args!("{FOOBAR");
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
check_edit(
|
||||
"FOOBAR",
|
||||
r#"
|
||||
//- minicore: fmt
|
||||
fn main() {
|
||||
const FOOBAR: usize = 42;
|
||||
format_args!("{$0");
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
fn main() {
|
||||
const FOOBAR: usize = 42;
|
||||
format_args!("{FOOBAR");
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn completes_static_constants() {
|
||||
check_edit(
|
||||
"FOOBAR",
|
||||
r#"
|
||||
//- minicore: fmt
|
||||
fn main() {
|
||||
static FOOBAR: usize = 42;
|
||||
format_args!("{f$0");
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
fn main() {
|
||||
static FOOBAR: usize = 42;
|
||||
format_args!("{FOOBAR");
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
check_edit(
|
||||
"FOOBAR",
|
||||
r#"
|
||||
//- minicore: fmt
|
||||
fn main() {
|
||||
static FOOBAR: usize = 42;
|
||||
format_args!("{$0");
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
fn main() {
|
||||
static FOOBAR: usize = 42;
|
||||
format_args!("{FOOBAR");
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
@ -258,7 +258,7 @@ pub(crate) fn complete_postfix(
|
||||
}
|
||||
|
||||
fn get_receiver_text(receiver: &ast::Expr, receiver_is_ambiguous_float_literal: bool) -> String {
|
||||
let text = if receiver_is_ambiguous_float_literal {
|
||||
let mut text = if receiver_is_ambiguous_float_literal {
|
||||
let text = receiver.syntax().text();
|
||||
let without_dot = ..text.len() - TextSize::of('.');
|
||||
text.slice(without_dot).to_string()
|
||||
@ -267,12 +267,18 @@ fn get_receiver_text(receiver: &ast::Expr, receiver_is_ambiguous_float_literal:
|
||||
};
|
||||
|
||||
// The receiver texts should be interpreted as-is, as they are expected to be
|
||||
// normal Rust expressions. We escape '\' and '$' so they don't get treated as
|
||||
// snippet-specific constructs.
|
||||
//
|
||||
// Note that we don't need to escape the other characters that can be escaped,
|
||||
// because they wouldn't be treated as snippet-specific constructs without '$'.
|
||||
text.replace('\\', "\\\\").replace('$', "\\$")
|
||||
// normal Rust expressions.
|
||||
escape_snippet_bits(&mut text);
|
||||
text
|
||||
}
|
||||
|
||||
/// Escapes `\` and `$` so that they don't get interpreted as snippet-specific constructs.
|
||||
///
|
||||
/// Note that we don't need to escape the other characters that can be escaped,
|
||||
/// because they wouldn't be treated as snippet-specific constructs without '$'.
|
||||
fn escape_snippet_bits(text: &mut String) {
|
||||
stdx::replace(text, '\\', "\\\\");
|
||||
stdx::replace(text, '$', "\\$");
|
||||
}
|
||||
|
||||
fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) {
|
||||
|
@ -17,13 +17,15 @@
|
||||
// image::https://user-images.githubusercontent.com/48062697/113020656-b560f500-917a-11eb-87de-02991f61beb8.gif[]
|
||||
|
||||
use ide_db::{
|
||||
syntax_helpers::format_string_exprs::{parse_format_exprs, with_placeholders},
|
||||
syntax_helpers::format_string_exprs::{parse_format_exprs, with_placeholders, Arg},
|
||||
SnippetCap,
|
||||
};
|
||||
use syntax::{ast, AstToken};
|
||||
|
||||
use crate::{
|
||||
completions::postfix::build_postfix_snippet_builder, context::CompletionContext, Completions,
|
||||
completions::postfix::{build_postfix_snippet_builder, escape_snippet_bits},
|
||||
context::CompletionContext,
|
||||
Completions,
|
||||
};
|
||||
|
||||
/// Mapping ("postfix completion item" => "macro to use")
|
||||
@ -51,7 +53,15 @@ pub(crate) fn add_format_like_completions(
|
||||
None => return,
|
||||
};
|
||||
|
||||
if let Ok((out, exprs)) = parse_format_exprs(receiver_text.text()) {
|
||||
if let Ok((mut out, mut exprs)) = parse_format_exprs(receiver_text.text()) {
|
||||
// Escape any snippet bits in the out text and any of the exprs.
|
||||
escape_snippet_bits(&mut out);
|
||||
for arg in &mut exprs {
|
||||
if let Arg::Ident(text) | Arg::Expr(text) = arg {
|
||||
escape_snippet_bits(text)
|
||||
}
|
||||
}
|
||||
|
||||
let exprs = with_placeholders(exprs);
|
||||
for (label, macro_name) in KINDS {
|
||||
let snippet = if exprs.is_empty() {
|
||||
|
@ -44,13 +44,10 @@ line-index.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
expect-test = "1.4.0"
|
||||
oorandom = "11.1.3"
|
||||
xshell.workspace = true
|
||||
|
||||
# local deps
|
||||
test-utils.workspace = true
|
||||
test-fixture.workspace = true
|
||||
sourcegen.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
@ -11,7 +11,7 @@ use profile::{memory_usage, Bytes};
|
||||
use rustc_hash::FxHashSet;
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{symbol_index::SymbolsDatabase, Change, RootDatabase};
|
||||
use crate::{symbol_index::SymbolsDatabase, ChangeWithProcMacros, RootDatabase};
|
||||
|
||||
impl RootDatabase {
|
||||
pub fn request_cancellation(&mut self) {
|
||||
@ -20,7 +20,7 @@ impl RootDatabase {
|
||||
self.synthetic_write(Durability::LOW);
|
||||
}
|
||||
|
||||
pub fn apply_change(&mut self, change: Change) {
|
||||
pub fn apply_change(&mut self, change: ChangeWithProcMacros) {
|
||||
let _p = tracing::span!(tracing::Level::INFO, "RootDatabase::apply_change").entered();
|
||||
self.request_cancellation();
|
||||
tracing::trace!("apply_change {:?}", change);
|
||||
@ -91,7 +91,6 @@ impl RootDatabase {
|
||||
crate::symbol_index::LocalRootsQuery
|
||||
crate::symbol_index::LibraryRootsQuery
|
||||
// HirDatabase
|
||||
hir::db::InferQueryQuery
|
||||
hir::db::MirBodyQuery
|
||||
hir::db::BorrowckQuery
|
||||
hir::db::TyQuery
|
||||
@ -130,12 +129,10 @@ impl RootDatabase {
|
||||
hir::db::FnDefVarianceQuery
|
||||
hir::db::AdtVarianceQuery
|
||||
hir::db::AssociatedTyValueQuery
|
||||
hir::db::TraitSolveQueryQuery
|
||||
hir::db::ProgramClausesForChalkEnvQuery
|
||||
|
||||
// DefDatabase
|
||||
hir::db::FileItemTreeQuery
|
||||
hir::db::CrateDefMapQueryQuery
|
||||
hir::db::BlockDefMapQuery
|
||||
hir::db::StructDataWithDiagnosticsQuery
|
||||
hir::db::UnionDataWithDiagnosticsQuery
|
||||
@ -165,7 +162,6 @@ impl RootDatabase {
|
||||
hir::db::FunctionVisibilityQuery
|
||||
hir::db::ConstVisibilityQuery
|
||||
hir::db::CrateSupportsNoStdQuery
|
||||
hir::db::BlockItemTreeQueryQuery
|
||||
hir::db::ExternCrateDeclDataQuery
|
||||
hir::db::InternAnonymousConstQuery
|
||||
hir::db::InternExternCrateQuery
|
||||
|
@ -22,6 +22,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
||||
description: r##"detects certain glob imports that require reporting an ambiguity error"##,
|
||||
},
|
||||
Lint { label: "ambiguous_glob_reexports", description: r##"ambiguous glob re-exports"## },
|
||||
Lint {
|
||||
label: "ambiguous_wide_pointer_comparisons",
|
||||
description: r##"detects ambiguous wide pointer comparisons"##,
|
||||
},
|
||||
Lint { label: "anonymous_parameters", description: r##"detects anonymous parameters"## },
|
||||
Lint { label: "arithmetic_overflow", description: r##"arithmetic operation overflows"## },
|
||||
Lint {
|
||||
@ -66,10 +70,6 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
||||
label: "coherence_leak_check",
|
||||
description: r##"distinct impls distinguished only by the leak-check code"##,
|
||||
},
|
||||
Lint {
|
||||
label: "coinductive_overlap_in_coherence",
|
||||
description: r##"impls that are not considered to overlap may be considered to overlap in the future"##,
|
||||
},
|
||||
Lint {
|
||||
label: "conflicting_repr_hints",
|
||||
description: r##"conflicts between `#[repr(..)]` hints that were previously accepted and used in practice"##,
|
||||
@ -86,10 +86,6 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
||||
label: "const_item_mutation",
|
||||
description: r##"detects attempts to mutate a `const` item"##,
|
||||
},
|
||||
Lint {
|
||||
label: "const_patterns_without_partial_eq",
|
||||
description: r##"constant in pattern does not implement `PartialEq`"##,
|
||||
},
|
||||
Lint { label: "dead_code", description: r##"detect unused, unexported items"## },
|
||||
Lint { label: "deprecated", description: r##"detects use of deprecated items"## },
|
||||
Lint {
|
||||
@ -176,7 +172,7 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
||||
},
|
||||
Lint {
|
||||
label: "future_incompatible",
|
||||
description: r##"lint group for: deref-into-dyn-supertrait, ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, coinductive-overlap-in-coherence, conflicting-repr-hints, const-evaluatable-unchecked, const-patterns-without-partial-eq, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, illegal-floating-point-literal-pattern, implied-bounds-entailment, indirect-structural-match, invalid-doc-attributes, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, nontrivial-structural-match, order-dependent-trait-objects, patterns-in-fns-without-body, pointer-structural-match, proc-macro-back-compat, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, semicolon-in-expressions-from-macros, soft-unstable, suspicious-auto-trait-impls, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, where-clauses-object-safety"##,
|
||||
description: r##"lint group for: deref-into-dyn-supertrait, ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, conflicting-repr-hints, const-evaluatable-unchecked, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, indirect-structural-match, invalid-doc-attributes, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, order-dependent-trait-objects, patterns-in-fns-without-body, pointer-structural-match, proc-macro-back-compat, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, semicolon-in-expressions-from-macros, soft-unstable, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, where-clauses-object-safety, writes-through-immutable-pointer"##,
|
||||
},
|
||||
Lint {
|
||||
label: "fuzzy_provenance_casts",
|
||||
@ -190,14 +186,6 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
||||
label: "ill_formed_attribute_input",
|
||||
description: r##"ill-formed attribute inputs that were previously accepted and used in practice"##,
|
||||
},
|
||||
Lint {
|
||||
label: "illegal_floating_point_literal_pattern",
|
||||
description: r##"floating-point literals cannot be used in patterns"##,
|
||||
},
|
||||
Lint {
|
||||
label: "implied_bounds_entailment",
|
||||
description: r##"impl method assumes more implied bounds than its corresponding trait method"##,
|
||||
},
|
||||
Lint {
|
||||
label: "improper_ctypes",
|
||||
description: r##"proper use of libc types in foreign modules"##,
|
||||
@ -372,6 +360,7 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
||||
label: "non_fmt_panics",
|
||||
description: r##"detect single-argument panic!() invocations in which the argument is not a format string"##,
|
||||
},
|
||||
Lint { label: "non_local_definitions", description: r##"checks for non-local definitions"## },
|
||||
Lint {
|
||||
label: "non_shorthand_field_patterns",
|
||||
description: r##"using `Struct { x: x }` instead of `Struct { x }` in a pattern"##,
|
||||
@ -388,10 +377,6 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
||||
label: "nonstandard_style",
|
||||
description: r##"lint group for: non-camel-case-types, non-snake-case, non-upper-case-globals"##,
|
||||
},
|
||||
Lint {
|
||||
label: "nontrivial_structural_match",
|
||||
description: r##"constant used in pattern of non-structural-match type and the constant's initializer expression contains values of non-structural-match types"##,
|
||||
},
|
||||
Lint {
|
||||
label: "noop_method_call",
|
||||
description: r##"detects the use of well-known noop methods"##,
|
||||
@ -482,6 +467,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
||||
label: "rust_2021_prelude_collisions",
|
||||
description: r##"detects the usage of trait methods which are ambiguous with traits added to the prelude in future editions"##,
|
||||
},
|
||||
Lint {
|
||||
label: "rust_2024_compatibility",
|
||||
description: r##"lint group for: static-mut-refs, unsafe-op-in-unsafe-fn"##,
|
||||
},
|
||||
Lint {
|
||||
label: "semicolon_in_expressions_from_macros",
|
||||
description: r##"trailing semicolon in macro body used as expression"##,
|
||||
@ -502,6 +491,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
||||
label: "stable_features",
|
||||
description: r##"stable features found in `#[feature]` directive"##,
|
||||
},
|
||||
Lint {
|
||||
label: "static_mut_refs",
|
||||
description: r##"shared references or mutable references of mutable static is discouraged"##,
|
||||
},
|
||||
Lint {
|
||||
label: "suspicious_double_ref_op",
|
||||
description: r##"suspicious call of trait method on `&&T`"##,
|
||||
@ -575,6 +568,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
||||
description: r##"enabling track_caller on an async fn is a no-op unless the async_fn_track_caller feature is enabled"##,
|
||||
},
|
||||
Lint { label: "uninhabited_static", description: r##"uninhabited static"## },
|
||||
Lint {
|
||||
label: "unit_bindings",
|
||||
description: r##"binding is useless because it has the unit `()` type"##,
|
||||
},
|
||||
Lint {
|
||||
label: "unknown_crate_types",
|
||||
description: r##"unknown crate type found in `#[crate_type]` directive"##,
|
||||
@ -606,10 +603,7 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
||||
label: "unsafe_op_in_unsafe_fn",
|
||||
description: r##"unsafe operations in unsafe functions without an explicit unsafe block are deprecated"##,
|
||||
},
|
||||
Lint {
|
||||
label: "unstable_features",
|
||||
description: r##"enabling unstable features (deprecated. do not use)"##,
|
||||
},
|
||||
Lint { label: "unstable_features", description: r##"enabling unstable features"## },
|
||||
Lint {
|
||||
label: "unstable_name_collisions",
|
||||
description: r##"detects name collision with an existing but unstable method"##,
|
||||
@ -695,10 +689,6 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
||||
label: "unused_results",
|
||||
description: r##"unused result of an expression in a statement"##,
|
||||
},
|
||||
Lint {
|
||||
label: "unused_tuple_struct_fields",
|
||||
description: r##"detects tuple struct fields that are never read"##,
|
||||
},
|
||||
Lint { label: "unused_unsafe", description: r##"unnecessary use of an `unsafe` block"## },
|
||||
Lint {
|
||||
label: "unused_variables",
|
||||
@ -732,13 +722,17 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
||||
label: "while_true",
|
||||
description: r##"suggest using `loop { }` instead of `while true { }`"##,
|
||||
},
|
||||
Lint {
|
||||
label: "writes_through_immutable_pointer",
|
||||
description: r##"shared references are immutable, and pointers derived from them must not be written to"##,
|
||||
},
|
||||
];
|
||||
|
||||
pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
|
||||
LintGroup {
|
||||
lint: Lint {
|
||||
label: "future_incompatible",
|
||||
description: r##"lint group for: deref-into-dyn-supertrait, ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, coinductive-overlap-in-coherence, conflicting-repr-hints, const-evaluatable-unchecked, const-patterns-without-partial-eq, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, illegal-floating-point-literal-pattern, implied-bounds-entailment, indirect-structural-match, invalid-doc-attributes, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, nontrivial-structural-match, order-dependent-trait-objects, patterns-in-fns-without-body, pointer-structural-match, proc-macro-back-compat, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, semicolon-in-expressions-from-macros, soft-unstable, suspicious-auto-trait-impls, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, where-clauses-object-safety"##,
|
||||
description: r##"lint group for: deref-into-dyn-supertrait, ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, conflicting-repr-hints, const-evaluatable-unchecked, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, indirect-structural-match, invalid-doc-attributes, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, order-dependent-trait-objects, patterns-in-fns-without-body, pointer-structural-match, proc-macro-back-compat, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, semicolon-in-expressions-from-macros, soft-unstable, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, where-clauses-object-safety, writes-through-immutable-pointer"##,
|
||||
},
|
||||
children: &[
|
||||
"deref_into_dyn_supertrait",
|
||||
@ -747,16 +741,12 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
|
||||
"byte_slice_in_packed_struct_with_derive",
|
||||
"cenum_impl_drop_cast",
|
||||
"coherence_leak_check",
|
||||
"coinductive_overlap_in_coherence",
|
||||
"conflicting_repr_hints",
|
||||
"const_evaluatable_unchecked",
|
||||
"const_patterns_without_partial_eq",
|
||||
"deprecated_cfg_attr_crate_type_name",
|
||||
"elided_lifetimes_in_associated_constant",
|
||||
"forbidden_lint_groups",
|
||||
"ill_formed_attribute_input",
|
||||
"illegal_floating_point_literal_pattern",
|
||||
"implied_bounds_entailment",
|
||||
"indirect_structural_match",
|
||||
"invalid_doc_attributes",
|
||||
"invalid_type_param_default",
|
||||
@ -764,7 +754,6 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
|
||||
"legacy_derive_helpers",
|
||||
"macro_expanded_macro_exports_accessed_by_absolute_paths",
|
||||
"missing_fragment_specifier",
|
||||
"nontrivial_structural_match",
|
||||
"order_dependent_trait_objects",
|
||||
"patterns_in_fns_without_body",
|
||||
"pointer_structural_match",
|
||||
@ -779,6 +768,7 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
|
||||
"unstable_syntax_pre_expansion",
|
||||
"unsupported_calling_conventions",
|
||||
"where_clauses_object_safety",
|
||||
"writes_through_immutable_pointer",
|
||||
],
|
||||
},
|
||||
LintGroup {
|
||||
@ -836,6 +826,13 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
|
||||
"non_fmt_panics",
|
||||
],
|
||||
},
|
||||
LintGroup {
|
||||
lint: Lint {
|
||||
label: "rust_2024_compatibility",
|
||||
description: r##"lint group for: static-mut-refs, unsafe-op-in-unsafe-fn"##,
|
||||
},
|
||||
children: &["static_mut_refs", "unsafe_op_in_unsafe_fn"],
|
||||
},
|
||||
LintGroup {
|
||||
lint: Lint {
|
||||
label: "unused",
|
||||
@ -1730,9 +1727,17 @@ The tracking issue for this feature is: [#110011]
|
||||
label: "async_fn_traits",
|
||||
description: r##"# `async_fn_traits`
|
||||
|
||||
This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
|
||||
See Also: [`fn_traits`](../library-features/fn-traits.md)
|
||||
|
||||
------------------------
|
||||
----
|
||||
|
||||
The `async_fn_traits` feature allows for implementation of the [`AsyncFn*`] traits
|
||||
for creating custom closure-like types that return futures.
|
||||
|
||||
[`AsyncFn*`]: ../../std/ops/trait.AsyncFn.html
|
||||
|
||||
The main difference to the `Fn*` family of traits is that `AsyncFn` can return a future
|
||||
that borrows from itself (`FnOnce::Output` has no lifetime parameters, while `AsyncFn::CallFuture` does).
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
@ -2372,17 +2377,6 @@ The tracking issue for this feature is: [#89653]
|
||||
|
||||
[#89653]: https://github.com/rust-lang/rust/issues/89653
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "cfg_target_abi",
|
||||
description: r##"# `cfg_target_abi`
|
||||
|
||||
The tracking issue for this feature is: [#80970]
|
||||
|
||||
[#80970]: https://github.com/rust-lang/rust/issues/80970
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -3128,6 +3122,17 @@ The tracking issue for this feature is: [#90603]
|
||||
|
||||
This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "const_intrinsic_copy",
|
||||
description: r##"# `const_intrinsic_copy`
|
||||
|
||||
The tracking issue for this feature is: [#80697]
|
||||
|
||||
[#80697]: https://github.com/rust-lang/rust/issues/80697
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -3296,6 +3301,17 @@ The tracking issue for this feature is: [#110840]
|
||||
|
||||
[#110840]: https://github.com/rust-lang/rust/issues/110840
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "const_ops",
|
||||
description: r##"# `const_ops`
|
||||
|
||||
The tracking issue for this feature is: [#90080]
|
||||
|
||||
[#90080]: https://github.com/rust-lang/rust/issues/90080
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -3439,6 +3455,17 @@ The tracking issue for this feature is: [#80384]
|
||||
|
||||
[#80384]: https://github.com/rust-lang/rust/issues/80384
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "const_refs_to_static",
|
||||
description: r##"# `const_refs_to_static`
|
||||
|
||||
The tracking issue for this feature is: [#119618]
|
||||
|
||||
[#119618]: https://github.com/rust-lang/rust/issues/119618
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -4251,6 +4278,15 @@ The tracking issue for this feature is: [#27336]
|
||||
|
||||
[#27336]: https://github.com/rust-lang/rust/issues/27336
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "delayed_debug_assertions",
|
||||
description: r##"# `delayed_debug_assertions`
|
||||
|
||||
This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -4632,6 +4668,19 @@ The tracking issue for this feature is: [#57391]
|
||||
[#57391]: https://github.com/rust-lang/rust/issues/57391
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "duration_constructors",
|
||||
description: r##"# `duration_constructors`
|
||||
|
||||
The tracking issue for this feature is: [#120301]
|
||||
|
||||
[#120301]: https://github.com/rust-lang/rust/issues/120301
|
||||
|
||||
------------------------
|
||||
|
||||
Add the methods `from_mins`, `from_hours` and `from_days` to `Duration`.
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
@ -4642,6 +4691,17 @@ The tracking issue for this feature is: [#72440]
|
||||
|
||||
[#72440]: https://github.com/rust-lang/rust/issues/72440
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "duration_units",
|
||||
description: r##"# `duration_units`
|
||||
|
||||
The tracking issue for this feature is: [#120301]
|
||||
|
||||
[#120301]: https://github.com/rust-lang/rust/issues/120301
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -5654,13 +5714,62 @@ raw pointers in intra-doc links are unstable until it does.
|
||||
|
||||
The tracking issue for this feature is: None.
|
||||
|
||||
Intrinsics are never intended to be stable directly, but intrinsics are often
|
||||
Intrinsics are rarely intended to be stable directly, but are usually
|
||||
exported in some sort of stable manner. Prefer using the stable interfaces to
|
||||
the intrinsic directly when you can.
|
||||
|
||||
------------------------
|
||||
|
||||
|
||||
## Intrinsics with fallback logic
|
||||
|
||||
Many intrinsics can be written in pure rust, albeit inefficiently or without supporting
|
||||
some features that only exist on some backends. Backends can simply not implement those
|
||||
intrinsics without causing any code miscompilations or failures to compile.
|
||||
All intrinsic fallback bodies are automatically made cross-crate inlineable (like `#[inline]`)
|
||||
by the codegen backend, but not the MIR inliner.
|
||||
|
||||
```rust
|
||||
#![feature(rustc_attrs, effects)]
|
||||
#![allow(internal_features)]
|
||||
|
||||
#[rustc_intrinsic]
|
||||
const unsafe fn const_deallocate(_ptr: *mut u8, _size: usize, _align: usize) {}
|
||||
```
|
||||
|
||||
Since these are just regular functions, it is perfectly ok to create the intrinsic twice:
|
||||
|
||||
```rust
|
||||
#![feature(rustc_attrs, effects)]
|
||||
#![allow(internal_features)]
|
||||
|
||||
#[rustc_intrinsic]
|
||||
const unsafe fn const_deallocate(_ptr: *mut u8, _size: usize, _align: usize) {}
|
||||
|
||||
mod foo {
|
||||
#[rustc_intrinsic]
|
||||
const unsafe fn const_deallocate(_ptr: *mut u8, _size: usize, _align: usize) {
|
||||
panic!("noisy const dealloc")
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
The behaviour on backends that override the intrinsic is exactly the same. On other
|
||||
backends, the intrinsic behaviour depends on which implementation is called, just like
|
||||
with any regular function.
|
||||
|
||||
## Intrinsics lowered to MIR instructions
|
||||
|
||||
Various intrinsics have native MIR operations that they correspond to. Instead of requiring
|
||||
backends to implement both the intrinsic and the MIR operation, the `lower_intrinsics` pass
|
||||
will convert the calls to the MIR operation. Backends do not need to know about these intrinsics
|
||||
at all.
|
||||
|
||||
## Intrinsics without fallback logic
|
||||
|
||||
These must be implemented by all backends.
|
||||
|
||||
These are imported as if they were FFI functions, with the special
|
||||
`rust-intrinsic` ABI. For example, if one was in a freestanding
|
||||
context, but wished to be able to `transmute` between types, and
|
||||
@ -5679,7 +5788,8 @@ extern "rust-intrinsic" {
|
||||
}
|
||||
```
|
||||
|
||||
As with any other FFI functions, these are always `unsafe` to call.
|
||||
As with any other FFI functions, these are by default always `unsafe` to call.
|
||||
You can add `#[rustc_safe_intrinsic]` to the intrinsic to make it safe to call.
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
@ -5754,6 +5864,17 @@ The tracking issue for this feature is: [#101288]
|
||||
|
||||
[#101288]: https://github.com/rust-lang/rust/issues/101288
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "is_riscv_feature_detected",
|
||||
description: r##"# `is_riscv_feature_detected`
|
||||
|
||||
The tracking issue for this feature is: [#111192]
|
||||
|
||||
[#111192]: https://github.com/rust-lang/rust/issues/111192
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -5932,6 +6053,17 @@ The tracking issue for this feature is: [#87053]
|
||||
|
||||
[#87053]: https://github.com/rust-lang/rust/issues/87053
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "lahfsahf_target_feature",
|
||||
description: r##"# `lahfsahf_target_feature`
|
||||
|
||||
The tracking issue for this feature is: [#44839]
|
||||
|
||||
[#44839]: https://github.com/rust-lang/rust/issues/44839
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -6255,6 +6387,17 @@ The tracking issue for this feature is: [#82971]
|
||||
|
||||
[#82971]: https://github.com/rust-lang/rust/issues/82971
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "local_waker",
|
||||
description: r##"# `local_waker`
|
||||
|
||||
The tracking issue for this feature is: [#118959]
|
||||
|
||||
[#118959]: https://github.com/rust-lang/rust/issues/118959
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -6321,6 +6464,17 @@ The tracking issue for this feature is: [#82766]
|
||||
|
||||
[#82766]: https://github.com/rust-lang/rust/issues/82766
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "mapped_lock_guards",
|
||||
description: r##"# `mapped_lock_guards`
|
||||
|
||||
The tracking issue for this feature is: [#117108]
|
||||
|
||||
[#117108]: https://github.com/rust-lang/rust/issues/117108
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -6534,17 +6688,6 @@ The tracking issue for this feature is: [#83310]
|
||||
|
||||
[#83310]: https://github.com/rust-lang/rust/issues/83310
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "mutex_unlock",
|
||||
description: r##"# `mutex_unlock`
|
||||
|
||||
The tracking issue for this feature is: [#81872]
|
||||
|
||||
[#81872]: https://github.com/rust-lang/rust/issues/81872
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -6972,6 +7115,17 @@ The tracking issue for this feature is: [#70086]
|
||||
|
||||
[#70086]: https://github.com/rust-lang/rust/issues/70086
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "os_str_display",
|
||||
description: r##"# `os_str_display`
|
||||
|
||||
The tracking issue for this feature is: [#120048]
|
||||
|
||||
[#120048]: https://github.com/rust-lang/rust/issues/120048
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -7102,6 +7256,15 @@ The tracking issue for this feature is: [#27721]
|
||||
|
||||
[#27721]: https://github.com/rust-lang/rust/issues/27721
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "pattern_complexity",
|
||||
description: r##"# `pattern_complexity`
|
||||
|
||||
This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -7124,17 +7287,6 @@ The tracking issue for this feature is: [#86918]
|
||||
|
||||
[#86918]: https://github.com/rust-lang/rust/issues/86918
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "platform_intrinsics",
|
||||
description: r##"# `platform_intrinsics`
|
||||
|
||||
The tracking issue for this feature is: [#27731]
|
||||
|
||||
[#27731]: https://github.com/rust-lang/rust/issues/27731
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -7184,7 +7336,9 @@ The tracking issue for this feature is: [#44839]
|
||||
label: "prelude_2024",
|
||||
description: r##"# `prelude_2024`
|
||||
|
||||
This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
|
||||
The tracking issue for this feature is: [#121042]
|
||||
|
||||
[#121042]: https://github.com/rust-lang/rust/issues/121042
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
@ -7195,6 +7349,17 @@ This feature has no tracking issue, and is therefore likely internal to the comp
|
||||
|
||||
This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "prfchw_target_feature",
|
||||
description: r##"# `prfchw_target_feature`
|
||||
|
||||
The tracking issue for this feature is: [#44839]
|
||||
|
||||
[#44839]: https://github.com/rust-lang/rust/issues/44839
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -7507,6 +7672,17 @@ The tracking issue for this feature is: [#101196]
|
||||
|
||||
This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "reentrant_lock",
|
||||
description: r##"# `reentrant_lock`
|
||||
|
||||
The tracking issue for this feature is: [#121440]
|
||||
|
||||
[#121440]: https://github.com/rust-lang/rust/issues/121440
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -8177,6 +8353,39 @@ fn start(_argc: isize, _argv: *const *const u8) -> isize {
|
||||
|
||||
This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "stdarch_arm_feature_detection",
|
||||
description: r##"# `stdarch_arm_feature_detection`
|
||||
|
||||
The tracking issue for this feature is: [#111190]
|
||||
|
||||
[#111190]: https://github.com/rust-lang/rust/issues/111190
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "stdarch_mips_feature_detection",
|
||||
description: r##"# `stdarch_mips_feature_detection`
|
||||
|
||||
The tracking issue for this feature is: [#111188]
|
||||
|
||||
[#111188]: https://github.com/rust-lang/rust/issues/111188
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "stdarch_powerpc_feature_detection",
|
||||
description: r##"# `stdarch_powerpc_feature_detection`
|
||||
|
||||
The tracking issue for this feature is: [#111191]
|
||||
|
||||
[#111191]: https://github.com/rust-lang/rust/issues/111191
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -8188,17 +8397,6 @@ The tracking issue for this feature is: [#98288]
|
||||
|
||||
[#98288]: https://github.com/rust-lang/rust/issues/98288
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "stdsimd",
|
||||
description: r##"# `stdsimd`
|
||||
|
||||
The tracking issue for this feature is: [#48556]
|
||||
|
||||
[#48556]: https://github.com/rust-lang/rust/issues/48556
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -8459,6 +8657,17 @@ The tracking issue for this feature is: [#44839]
|
||||
|
||||
[#44839]: https://github.com/rust-lang/rust/issues/44839
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "tcp_deferaccept",
|
||||
description: r##"# `tcp_deferaccept`
|
||||
|
||||
The tracking issue for this feature is: [#119639]
|
||||
|
||||
[#119639]: https://github.com/rust-lang/rust/issues/119639
|
||||
|
||||
------------------------
|
||||
"##,
|
||||
},
|
||||
@ -10151,7 +10360,7 @@ table:
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::blocks_in_conditions",
|
||||
description: r##"Checks for `if` conditions that use blocks containing an
|
||||
description: r##"Checks for `if` and `match` conditions that use blocks containing an
|
||||
expression, statements or conditions that use closures with blocks."##,
|
||||
},
|
||||
Lint {
|
||||
@ -10453,6 +10662,12 @@ See [RFC0212](https://github.com/rust-lang/rfcs/blob/master/text/0212-restore-in
|
||||
label: "clippy::deprecated_cfg_attr",
|
||||
description: r##"Checks for `#[cfg_attr(rustfmt, rustfmt_skip)]` and suggests to replace it
|
||||
with `#[rustfmt::skip]`."##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::deprecated_clippy_cfg_attr",
|
||||
description: r##"Checks for `#[cfg_attr(feature = cargo-clippy, ...)]` and for
|
||||
`#[cfg(feature = cargo-clippy)]` and suggests to replace it with
|
||||
`#[cfg_attr(clippy, ...)]` or `#[cfg(clippy)]`."##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::deprecated_semver",
|
||||
@ -10596,6 +10811,7 @@ eagerly (e.g. using `bool::then_some`)."##,
|
||||
description: r##"Checks for usage of if expressions with an `else if` branch,
|
||||
but without a final `else` branch."##,
|
||||
},
|
||||
Lint { label: "clippy::empty_docs", description: r##"Detects documentation that is empty."## },
|
||||
Lint {
|
||||
label: "clippy::empty_drop",
|
||||
description: r##"Checks for empty `Drop` implementations."##,
|
||||
@ -11352,6 +11568,7 @@ cannot be represented as the underlying type without loss."##,
|
||||
description: r##"Checks for usage of `std::mem::size_of::<T>() * 8` when
|
||||
`T::BITS` is available."##,
|
||||
},
|
||||
Lint { label: "clippy::manual_c_str_literals", description: r##""## },
|
||||
Lint {
|
||||
label: "clippy::manual_clamp",
|
||||
description: r##"Identifies good opportunities for a clamp function from std or core, and suggests using it."##,
|
||||
@ -11726,6 +11943,10 @@ rather than globally."##,
|
||||
label: "clippy::mistyped_literal_suffixes",
|
||||
description: r##"Warns for mistyped suffix in literals"##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::mixed_attributes_style",
|
||||
description: r##"Checks that an item has only one kind of attributes."##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::mixed_case_hex_literals",
|
||||
description: r##"Warns on hexadecimal literals with mixed-case letter
|
||||
@ -11758,6 +11979,10 @@ containing module's name."##,
|
||||
one."##,
|
||||
},
|
||||
Lint { label: "clippy::multi_assignments", description: r##"Checks for nested assignments."## },
|
||||
Lint {
|
||||
label: "clippy::multiple_bound_locations",
|
||||
description: r##"Check if a generic is defined both in the bound predicate and in the `where` clause."##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::multiple_crate_versions",
|
||||
description: r##"Checks to see if multiple versions of a crate are being
|
||||
@ -12331,8 +12556,8 @@ in `vec![elem; len]`"##,
|
||||
Lint {
|
||||
label: "clippy::read_line_without_trim",
|
||||
description: r##"Looks for calls to [`Stdin::read_line`] to read a line from the standard input
|
||||
into a string, then later attempting to parse this string into a type without first trimming it, which will
|
||||
always fail because the string has a trailing newline in it."##,
|
||||
into a string, then later attempting to use that string for an operation that will never
|
||||
work for strings with a trailing newline character in it (e.g. parsing into a `i32`)."##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::read_zero_byte_vec",
|
||||
@ -12439,6 +12664,11 @@ do not change the type."##,
|
||||
label: "clippy::redundant_type_annotations",
|
||||
description: r##"Warns about needless / redundant type annotations."##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::ref_as_ptr",
|
||||
description: r##"Checks for casts of references to pointer using `as`
|
||||
and suggests `std::ptr::from_ref` and `std::ptr::from_mut` instead."##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::ref_binding_to_reference",
|
||||
description: r##"Checks for `ref` bindings which create a reference to a reference."##,
|
||||
@ -13090,6 +13320,11 @@ as returning a large `T` directly may be detrimental to performance."##,
|
||||
label: "clippy::unnecessary_cast",
|
||||
description: r##"Checks for casts to the same type, casts of int literals to integer types, casts of float
|
||||
literals to float types and casts between raw pointers without changing type or constness."##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::unnecessary_clippy_cfg",
|
||||
description: r##"Checks for `#[cfg_attr(clippy, allow(clippy::lint))]`
|
||||
and suggests to replace it with `#[allow(clippy::lint)]`."##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::unnecessary_fallible_conversions",
|
||||
@ -13114,6 +13349,10 @@ find or map operations and suggests the appropriate option."##,
|
||||
Specifically, this checks for `fold`s which could be replaced by `any`, `all`,
|
||||
`sum` or `product`."##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::unnecessary_get_then_check",
|
||||
description: r##"Checks the usage of `.get().is_some()` or `.get().is_none()` on std map types."##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::unnecessary_join",
|
||||
description: r##"Checks for usage of `.collect::<Vec<String>>().join()` on iterators."##,
|
||||
@ -13825,7 +14064,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
|
||||
LintGroup {
|
||||
lint: Lint {
|
||||
label: "clippy::pedantic",
|
||||
description: r##"lint group for: clippy::bool_to_int_with_if, clippy::borrow_as_ptr, clippy::case_sensitive_file_extension_comparisons, clippy::cast_lossless, clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::cast_precision_loss, clippy::cast_ptr_alignment, clippy::cast_sign_loss, clippy::checked_conversions, clippy::cloned_instead_of_copied, clippy::copy_iterator, clippy::default_trait_access, clippy::doc_link_with_quotes, clippy::doc_markdown, clippy::empty_enum, clippy::enum_glob_use, clippy::expl_impl_clone_on_copy, clippy::explicit_deref_methods, clippy::explicit_into_iter_loop, clippy::explicit_iter_loop, clippy::filter_map_next, clippy::flat_map_option, clippy::float_cmp, clippy::fn_params_excessive_bools, clippy::from_iter_instead_of_collect, clippy::if_not_else, clippy::ignored_unit_patterns, clippy::implicit_clone, clippy::implicit_hasher, clippy::inconsistent_struct_constructor, clippy::index_refutable_slice, clippy::inefficient_to_string, clippy::inline_always, clippy::into_iter_without_iter, clippy::invalid_upcast_comparisons, clippy::items_after_statements, clippy::iter_filter_is_ok, clippy::iter_filter_is_some, clippy::iter_not_returning_iterator, clippy::iter_without_into_iter, clippy::large_digit_groups, clippy::large_futures, clippy::large_stack_arrays, clippy::large_types_passed_by_value, clippy::linkedlist, clippy::macro_use_imports, clippy::manual_assert, clippy::manual_instant_elapsed, clippy::manual_is_variant_and, clippy::manual_let_else, clippy::manual_ok_or, clippy::manual_string_new, clippy::many_single_char_names, clippy::map_unwrap_or, clippy::match_bool, clippy::match_on_vec_items, clippy::match_same_arms, clippy::match_wild_err_arm, clippy::match_wildcard_for_single_variants, clippy::maybe_infinite_iter, clippy::mismatching_type_param_order, clippy::missing_errors_doc, clippy::missing_fields_in_debug, clippy::missing_panics_doc, clippy::module_name_repetitions, clippy::must_use_candidate, clippy::mut_mut, clippy::naive_bytecount, clippy::needless_bitwise_bool, clippy::needless_continue, clippy::needless_for_each, clippy::needless_pass_by_value, clippy::needless_raw_string_hashes, clippy::no_effect_underscore_binding, clippy::no_mangle_with_rust_abi, clippy::option_as_ref_cloned, clippy::option_option, clippy::ptr_as_ptr, clippy::ptr_cast_constness, clippy::pub_underscore_fields, clippy::range_minus_one, clippy::range_plus_one, clippy::redundant_closure_for_method_calls, clippy::redundant_else, clippy::ref_binding_to_reference, clippy::ref_option_ref, clippy::return_self_not_must_use, clippy::same_functions_in_if_condition, clippy::semicolon_if_nothing_returned, clippy::should_panic_without_expect, clippy::similar_names, clippy::single_match_else, clippy::stable_sort_primitive, clippy::str_split_at_newline, clippy::string_add_assign, clippy::struct_excessive_bools, clippy::struct_field_names, clippy::too_many_lines, clippy::transmute_ptr_to_ptr, clippy::trivially_copy_pass_by_ref, clippy::unchecked_duration_subtraction, clippy::unicode_not_nfc, clippy::uninlined_format_args, clippy::unnecessary_box_returns, clippy::unnecessary_join, clippy::unnecessary_wraps, clippy::unnested_or_patterns, clippy::unreadable_literal, clippy::unsafe_derive_deserialize, clippy::unused_async, clippy::unused_self, clippy::used_underscore_binding, clippy::verbose_bit_mask, clippy::wildcard_imports, clippy::zero_sized_map_values"##,
|
||||
description: r##"lint group for: clippy::bool_to_int_with_if, clippy::borrow_as_ptr, clippy::case_sensitive_file_extension_comparisons, clippy::cast_lossless, clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::cast_precision_loss, clippy::cast_ptr_alignment, clippy::cast_sign_loss, clippy::checked_conversions, clippy::cloned_instead_of_copied, clippy::copy_iterator, clippy::default_trait_access, clippy::doc_link_with_quotes, clippy::doc_markdown, clippy::empty_enum, clippy::enum_glob_use, clippy::expl_impl_clone_on_copy, clippy::explicit_deref_methods, clippy::explicit_into_iter_loop, clippy::explicit_iter_loop, clippy::filter_map_next, clippy::flat_map_option, clippy::float_cmp, clippy::fn_params_excessive_bools, clippy::from_iter_instead_of_collect, clippy::if_not_else, clippy::ignored_unit_patterns, clippy::implicit_clone, clippy::implicit_hasher, clippy::inconsistent_struct_constructor, clippy::index_refutable_slice, clippy::inefficient_to_string, clippy::inline_always, clippy::into_iter_without_iter, clippy::invalid_upcast_comparisons, clippy::items_after_statements, clippy::iter_filter_is_ok, clippy::iter_filter_is_some, clippy::iter_not_returning_iterator, clippy::iter_without_into_iter, clippy::large_digit_groups, clippy::large_futures, clippy::large_stack_arrays, clippy::large_types_passed_by_value, clippy::linkedlist, clippy::macro_use_imports, clippy::manual_assert, clippy::manual_c_str_literals, clippy::manual_instant_elapsed, clippy::manual_is_variant_and, clippy::manual_let_else, clippy::manual_ok_or, clippy::manual_string_new, clippy::many_single_char_names, clippy::map_unwrap_or, clippy::match_bool, clippy::match_on_vec_items, clippy::match_same_arms, clippy::match_wild_err_arm, clippy::match_wildcard_for_single_variants, clippy::maybe_infinite_iter, clippy::mismatching_type_param_order, clippy::missing_errors_doc, clippy::missing_fields_in_debug, clippy::missing_panics_doc, clippy::module_name_repetitions, clippy::must_use_candidate, clippy::mut_mut, clippy::naive_bytecount, clippy::needless_bitwise_bool, clippy::needless_continue, clippy::needless_for_each, clippy::needless_pass_by_value, clippy::needless_raw_string_hashes, clippy::no_effect_underscore_binding, clippy::no_mangle_with_rust_abi, clippy::option_as_ref_cloned, clippy::option_option, clippy::ptr_as_ptr, clippy::ptr_cast_constness, clippy::pub_underscore_fields, clippy::range_minus_one, clippy::range_plus_one, clippy::redundant_closure_for_method_calls, clippy::redundant_else, clippy::ref_as_ptr, clippy::ref_binding_to_reference, clippy::ref_option_ref, clippy::return_self_not_must_use, clippy::same_functions_in_if_condition, clippy::semicolon_if_nothing_returned, clippy::should_panic_without_expect, clippy::similar_names, clippy::single_match_else, clippy::stable_sort_primitive, clippy::str_split_at_newline, clippy::string_add_assign, clippy::struct_excessive_bools, clippy::struct_field_names, clippy::too_many_lines, clippy::transmute_ptr_to_ptr, clippy::trivially_copy_pass_by_ref, clippy::unchecked_duration_subtraction, clippy::unicode_not_nfc, clippy::uninlined_format_args, clippy::unnecessary_box_returns, clippy::unnecessary_join, clippy::unnecessary_wraps, clippy::unnested_or_patterns, clippy::unreadable_literal, clippy::unsafe_derive_deserialize, clippy::unused_async, clippy::unused_self, clippy::used_underscore_binding, clippy::verbose_bit_mask, clippy::wildcard_imports, clippy::zero_sized_map_values"##,
|
||||
},
|
||||
children: &[
|
||||
"clippy::bool_to_int_with_if",
|
||||
@ -13876,6 +14115,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
|
||||
"clippy::linkedlist",
|
||||
"clippy::macro_use_imports",
|
||||
"clippy::manual_assert",
|
||||
"clippy::manual_c_str_literals",
|
||||
"clippy::manual_instant_elapsed",
|
||||
"clippy::manual_is_variant_and",
|
||||
"clippy::manual_let_else",
|
||||
@ -13913,6 +14153,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
|
||||
"clippy::range_plus_one",
|
||||
"clippy::redundant_closure_for_method_calls",
|
||||
"clippy::redundant_else",
|
||||
"clippy::ref_as_ptr",
|
||||
"clippy::ref_binding_to_reference",
|
||||
"clippy::ref_option_ref",
|
||||
"clippy::return_self_not_must_use",
|
||||
@ -14257,7 +14498,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
|
||||
LintGroup {
|
||||
lint: Lint {
|
||||
label: "clippy::suspicious",
|
||||
description: r##"lint group for: clippy::almost_complete_range, clippy::arc_with_non_send_sync, clippy::await_holding_invalid_type, clippy::await_holding_lock, clippy::await_holding_refcell_ref, clippy::blanket_clippy_restriction_lints, clippy::cast_abs_to_unsigned, clippy::cast_enum_constructor, clippy::cast_enum_truncation, clippy::cast_nan_to_int, clippy::cast_slice_from_raw_parts, clippy::crate_in_macro_def, clippy::drop_non_drop, clippy::duplicate_mod, clippy::empty_loop, clippy::float_equality_without_abs, clippy::forget_non_drop, clippy::four_forward_slashes, clippy::from_raw_with_void_ptr, clippy::incompatible_msrv, clippy::ineffective_open_options, clippy::iter_out_of_bounds, clippy::join_absolute_paths, clippy::let_underscore_future, clippy::lines_filter_map_ok, clippy::maybe_misused_cfg, clippy::misnamed_getters, clippy::misrefactored_assign_op, clippy::multi_assignments, clippy::mut_range_bound, clippy::mutable_key_type, clippy::no_effect_replace, clippy::non_canonical_clone_impl, clippy::non_canonical_partial_ord_impl, clippy::octal_escapes, clippy::path_ends_with_ext, clippy::permissions_set_readonly_false, clippy::print_in_format_impl, clippy::rc_clone_in_vec_init, clippy::repeat_vec_with_capacity, clippy::single_range_in_vec_init, clippy::size_of_ref, clippy::suspicious_arithmetic_impl, clippy::suspicious_assignment_formatting, clippy::suspicious_command_arg_space, clippy::suspicious_doc_comments, clippy::suspicious_else_formatting, clippy::suspicious_map, clippy::suspicious_op_assign_impl, clippy::suspicious_open_options, clippy::suspicious_to_owned, clippy::suspicious_unary_op_formatting, clippy::swap_ptr_to_ref, clippy::test_attr_in_doctest, clippy::type_id_on_box, clippy::unconditional_recursion, clippy::unnecessary_result_map_or_else"##,
|
||||
description: r##"lint group for: clippy::almost_complete_range, clippy::arc_with_non_send_sync, clippy::await_holding_invalid_type, clippy::await_holding_lock, clippy::await_holding_refcell_ref, clippy::blanket_clippy_restriction_lints, clippy::cast_abs_to_unsigned, clippy::cast_enum_constructor, clippy::cast_enum_truncation, clippy::cast_nan_to_int, clippy::cast_slice_from_raw_parts, clippy::crate_in_macro_def, clippy::deprecated_clippy_cfg_attr, clippy::drop_non_drop, clippy::duplicate_mod, clippy::empty_docs, clippy::empty_loop, clippy::float_equality_without_abs, clippy::forget_non_drop, clippy::four_forward_slashes, clippy::from_raw_with_void_ptr, clippy::incompatible_msrv, clippy::ineffective_open_options, clippy::iter_out_of_bounds, clippy::join_absolute_paths, clippy::let_underscore_future, clippy::lines_filter_map_ok, clippy::maybe_misused_cfg, clippy::misnamed_getters, clippy::misrefactored_assign_op, clippy::mixed_attributes_style, clippy::multi_assignments, clippy::multiple_bound_locations, clippy::mut_range_bound, clippy::mutable_key_type, clippy::no_effect_replace, clippy::non_canonical_clone_impl, clippy::non_canonical_partial_ord_impl, clippy::octal_escapes, clippy::path_ends_with_ext, clippy::permissions_set_readonly_false, clippy::print_in_format_impl, clippy::rc_clone_in_vec_init, clippy::repeat_vec_with_capacity, clippy::single_range_in_vec_init, clippy::size_of_ref, clippy::suspicious_arithmetic_impl, clippy::suspicious_assignment_formatting, clippy::suspicious_command_arg_space, clippy::suspicious_doc_comments, clippy::suspicious_else_formatting, clippy::suspicious_map, clippy::suspicious_op_assign_impl, clippy::suspicious_open_options, clippy::suspicious_to_owned, clippy::suspicious_unary_op_formatting, clippy::swap_ptr_to_ref, clippy::test_attr_in_doctest, clippy::type_id_on_box, clippy::unconditional_recursion, clippy::unnecessary_clippy_cfg, clippy::unnecessary_get_then_check, clippy::unnecessary_result_map_or_else"##,
|
||||
},
|
||||
children: &[
|
||||
"clippy::almost_complete_range",
|
||||
@ -14272,8 +14513,10 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
|
||||
"clippy::cast_nan_to_int",
|
||||
"clippy::cast_slice_from_raw_parts",
|
||||
"clippy::crate_in_macro_def",
|
||||
"clippy::deprecated_clippy_cfg_attr",
|
||||
"clippy::drop_non_drop",
|
||||
"clippy::duplicate_mod",
|
||||
"clippy::empty_docs",
|
||||
"clippy::empty_loop",
|
||||
"clippy::float_equality_without_abs",
|
||||
"clippy::forget_non_drop",
|
||||
@ -14288,7 +14531,9 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
|
||||
"clippy::maybe_misused_cfg",
|
||||
"clippy::misnamed_getters",
|
||||
"clippy::misrefactored_assign_op",
|
||||
"clippy::mixed_attributes_style",
|
||||
"clippy::multi_assignments",
|
||||
"clippy::multiple_bound_locations",
|
||||
"clippy::mut_range_bound",
|
||||
"clippy::mutable_key_type",
|
||||
"clippy::no_effect_replace",
|
||||
@ -14316,6 +14561,8 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
|
||||
"clippy::test_attr_in_doctest",
|
||||
"clippy::type_id_on_box",
|
||||
"clippy::unconditional_recursion",
|
||||
"clippy::unnecessary_clippy_cfg",
|
||||
"clippy::unnecessary_get_then_check",
|
||||
"clippy::unnecessary_result_map_or_else",
|
||||
],
|
||||
},
|
||||
|
@ -64,7 +64,7 @@ pub fn visit_file_defs(
|
||||
cb: &mut dyn FnMut(Definition),
|
||||
) {
|
||||
let db = sema.db;
|
||||
let module = match sema.to_module_def(file_id) {
|
||||
let module = match sema.file_to_module_def(file_id) {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
|
@ -44,7 +44,7 @@ pub mod syntax_helpers {
|
||||
pub use parser::LexedStr;
|
||||
}
|
||||
|
||||
pub use hir::Change;
|
||||
pub use hir::ChangeWithProcMacros;
|
||||
|
||||
use std::{fmt, mem::ManuallyDrop};
|
||||
|
||||
@ -216,7 +216,6 @@ impl RootDatabase {
|
||||
|
||||
// DefDatabase
|
||||
hir_db::FileItemTreeQuery
|
||||
hir_db::CrateDefMapQueryQuery
|
||||
hir_db::BlockDefMapQuery
|
||||
hir_db::StructDataWithDiagnosticsQuery
|
||||
hir_db::UnionDataWithDiagnosticsQuery
|
||||
@ -248,7 +247,6 @@ impl RootDatabase {
|
||||
hir_db::CrateSupportsNoStdQuery
|
||||
|
||||
// HirDatabase
|
||||
hir_db::InferQueryQuery
|
||||
hir_db::MirBodyQuery
|
||||
hir_db::BorrowckQuery
|
||||
hir_db::TyQuery
|
||||
@ -287,7 +285,6 @@ impl RootDatabase {
|
||||
hir_db::FnDefVarianceQuery
|
||||
hir_db::AdtVarianceQuery
|
||||
hir_db::AssociatedTyValueQuery
|
||||
hir_db::TraitSolveQueryQuery
|
||||
hir_db::ProgramClausesForChalkEnvQuery
|
||||
|
||||
// SymbolsDatabase
|
||||
@ -412,9 +409,3 @@ impl SnippetCap {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
mod line_index;
|
||||
mod sourcegen_lints;
|
||||
}
|
||||
|
@ -129,7 +129,7 @@ pub fn parallel_prime_caches(
|
||||
crates_currently_indexing.insert(crate_id, crate_name);
|
||||
}
|
||||
ParallelPrimeCacheWorkerProgress::EndCrate { crate_id } => {
|
||||
crates_currently_indexing.remove(&crate_id);
|
||||
crates_currently_indexing.swap_remove(&crate_id);
|
||||
crates_to_prime.mark_done(crate_id);
|
||||
crates_done += 1;
|
||||
}
|
||||
|
@ -11,15 +11,12 @@ pub enum Arg {
|
||||
Expr(String),
|
||||
}
|
||||
|
||||
/**
|
||||
Add placeholders like `$1` and `$2` in place of [`Arg::Placeholder`],
|
||||
and unwraps the [`Arg::Ident`] and [`Arg::Expr`] enums.
|
||||
```rust
|
||||
# use ide_db::syntax_helpers::format_string_exprs::*;
|
||||
assert_eq!(with_placeholders(vec![Arg::Ident("ident".to_owned()), Arg::Placeholder, Arg::Expr("expr + 2".to_owned())]), vec!["ident".to_owned(), "$1".to_owned(), "expr + 2".to_owned()])
|
||||
```
|
||||
*/
|
||||
|
||||
/// Add placeholders like `$1` and `$2` in place of [`Arg::Placeholder`],
|
||||
/// and unwraps the [`Arg::Ident`] and [`Arg::Expr`] enums.
|
||||
/// ```rust
|
||||
/// # use ide_db::syntax_helpers::format_string_exprs::*;
|
||||
/// assert_eq!(with_placeholders(vec![Arg::Ident("ident".to_owned()), Arg::Placeholder, Arg::Expr("expr + 2".to_owned())]), vec!["ident".to_owned(), "$1".to_owned(), "expr + 2".to_owned()])
|
||||
/// ```
|
||||
pub fn with_placeholders(args: Vec<Arg>) -> Vec<String> {
|
||||
let mut placeholder_id = 1;
|
||||
args.into_iter()
|
||||
@ -34,18 +31,15 @@ pub fn with_placeholders(args: Vec<Arg>) -> Vec<String> {
|
||||
.collect()
|
||||
}
|
||||
|
||||
/**
|
||||
Parser for a format-like string. It is more allowing in terms of string contents,
|
||||
as we expect variable placeholders to be filled with expressions.
|
||||
|
||||
Built for completions and assists, and escapes `\` and `$` in output.
|
||||
(See the comments on `get_receiver_text()` for detail.)
|
||||
Splits a format string that may contain expressions
|
||||
like
|
||||
```rust
|
||||
assert_eq!(parse("{ident} {} {expr + 42} ").unwrap(), ("{} {} {}", vec![Arg::Ident("ident"), Arg::Placeholder, Arg::Expr("expr + 42")]));
|
||||
```
|
||||
*/
|
||||
/// Parser for a format-like string. It is more allowing in terms of string contents,
|
||||
/// as we expect variable placeholders to be filled with expressions.
|
||||
///
|
||||
/// Splits a format string that may contain expressions
|
||||
/// like
|
||||
/// ```rust
|
||||
/// # use ide_db::syntax_helpers::format_string_exprs::*;
|
||||
/// assert_eq!(parse_format_exprs("{ident} {} {expr + 42} ").unwrap(), ("{ident} {} {} ".to_owned(), vec![Arg::Placeholder, Arg::Expr("expr + 42".to_owned())]));
|
||||
/// ```
|
||||
pub fn parse_format_exprs(input: &str) -> Result<(String, Vec<Arg>), ()> {
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
enum State {
|
||||
@ -79,9 +73,6 @@ pub fn parse_format_exprs(input: &str) -> Result<(String, Vec<Arg>), ()> {
|
||||
state = State::MaybeIncorrect;
|
||||
}
|
||||
(State::NotArg, _) => {
|
||||
if matches!(chr, '\\' | '$') {
|
||||
output.push('\\');
|
||||
}
|
||||
output.push(chr);
|
||||
}
|
||||
(State::MaybeIncorrect, '}') => {
|
||||
@ -110,9 +101,6 @@ pub fn parse_format_exprs(input: &str) -> Result<(String, Vec<Arg>), ()> {
|
||||
state = State::FormatOpts;
|
||||
}
|
||||
(State::MaybeArg, _) => {
|
||||
if matches!(chr, '\\' | '$') {
|
||||
current_expr.push('\\');
|
||||
}
|
||||
current_expr.push(chr);
|
||||
|
||||
// While Rust uses the unicode sets of XID_start and XID_continue for Identifiers
|
||||
@ -172,9 +160,6 @@ pub fn parse_format_exprs(input: &str) -> Result<(String, Vec<Arg>), ()> {
|
||||
state = State::Expr;
|
||||
}
|
||||
|
||||
if matches!(chr, '\\' | '$') {
|
||||
current_expr.push('\\');
|
||||
}
|
||||
current_expr.push(chr);
|
||||
}
|
||||
(State::FormatOpts, '}') => {
|
||||
@ -182,9 +167,6 @@ pub fn parse_format_exprs(input: &str) -> Result<(String, Vec<Arg>), ()> {
|
||||
state = State::NotArg;
|
||||
}
|
||||
(State::FormatOpts, _) => {
|
||||
if matches!(chr, '\\' | '$') {
|
||||
output.push('\\');
|
||||
}
|
||||
output.push(chr);
|
||||
}
|
||||
}
|
||||
@ -217,15 +199,15 @@ mod tests {
|
||||
fn format_str_parser() {
|
||||
let test_vector = &[
|
||||
("no expressions", expect![["no expressions"]]),
|
||||
(r"no expressions with \$0$1", expect![r"no expressions with \\\$0\$1"]),
|
||||
(r"no expressions with \$0$1", expect![r"no expressions with \$0$1"]),
|
||||
("{expr} is {2 + 2}", expect![["{expr} is {}; 2 + 2"]]),
|
||||
("{expr:?}", expect![["{expr:?}"]]),
|
||||
("{expr:1$}", expect![[r"{expr:1\$}"]]),
|
||||
("{:1$}", expect![[r"{:1\$}; $1"]]),
|
||||
("{:>padding$}", expect![[r"{:>padding\$}; $1"]]),
|
||||
("{expr:1$}", expect![[r"{expr:1$}"]]),
|
||||
("{:1$}", expect![[r"{:1$}; $1"]]),
|
||||
("{:>padding$}", expect![[r"{:>padding$}; $1"]]),
|
||||
("{}, {}, {0}", expect![[r"{}, {}, {0}; $1, $2"]]),
|
||||
("{}, {}, {0:b}", expect![[r"{}, {}, {0:b}; $1, $2"]]),
|
||||
("{$0}", expect![[r"{}; \$0"]]),
|
||||
("{$0}", expect![[r"{}; $0"]]),
|
||||
("{malformed", expect![["-"]]),
|
||||
("malformed}", expect![["-"]]),
|
||||
("{{correct", expect![["{{correct"]]),
|
||||
|
@ -20,7 +20,7 @@ pub fn insert_ws_into(syn: SyntaxNode) -> SyntaxNode {
|
||||
let after = Position::after;
|
||||
|
||||
let do_indent = |pos: fn(_) -> Position, token: &SyntaxToken, indent| {
|
||||
(pos(token.clone()), make::tokens::whitespace(&" ".repeat(2 * indent)))
|
||||
(pos(token.clone()), make::tokens::whitespace(&" ".repeat(4 * indent)))
|
||||
};
|
||||
let do_ws = |pos: fn(_) -> Position, token: &SyntaxToken| {
|
||||
(pos(token.clone()), make::tokens::single_space())
|
||||
@ -41,7 +41,7 @@ pub fn insert_ws_into(syn: SyntaxNode) -> SyntaxNode {
|
||||
if indent > 0 {
|
||||
mods.push((
|
||||
Position::after(node.clone()),
|
||||
make::tokens::whitespace(&" ".repeat(2 * indent)),
|
||||
make::tokens::whitespace(&" ".repeat(4 * indent)),
|
||||
));
|
||||
}
|
||||
if node.parent().is_some() {
|
||||
@ -91,10 +91,7 @@ pub fn insert_ws_into(syn: SyntaxNode) -> SyntaxNode {
|
||||
LIFETIME_IDENT if is_next(is_text, true) => {
|
||||
mods.push(do_ws(after, tok));
|
||||
}
|
||||
MUT_KW if is_next(|it| it == SELF_KW, false) => {
|
||||
mods.push(do_ws(after, tok));
|
||||
}
|
||||
AS_KW | DYN_KW | IMPL_KW | CONST_KW => {
|
||||
AS_KW | DYN_KW | IMPL_KW | CONST_KW | MUT_KW => {
|
||||
mods.push(do_ws(after, tok));
|
||||
}
|
||||
T![;] if is_next(|it| it != R_CURLY, true) => {
|
||||
|
@ -1,49 +0,0 @@
|
||||
use line_index::{LineCol, LineIndex, WideEncoding};
|
||||
use test_utils::skip_slow_tests;
|
||||
|
||||
#[test]
|
||||
fn test_every_chars() {
|
||||
if skip_slow_tests() {
|
||||
return;
|
||||
}
|
||||
|
||||
let text: String = {
|
||||
let mut chars: Vec<char> = ((0 as char)..char::MAX).collect(); // Neat!
|
||||
chars.extend("\n".repeat(chars.len() / 16).chars());
|
||||
let mut rng = oorandom::Rand32::new(stdx::rand::seed());
|
||||
stdx::rand::shuffle(&mut chars, |i| rng.rand_range(0..i as u32) as usize);
|
||||
chars.into_iter().collect()
|
||||
};
|
||||
assert!(text.contains('💩')); // Sanity check.
|
||||
|
||||
let line_index = LineIndex::new(&text);
|
||||
|
||||
let mut lin_col = LineCol { line: 0, col: 0 };
|
||||
let mut col_utf16 = 0;
|
||||
let mut col_utf32 = 0;
|
||||
for (offset, c) in text.char_indices() {
|
||||
let got_offset = line_index.offset(lin_col).unwrap();
|
||||
assert_eq!(usize::from(got_offset), offset);
|
||||
|
||||
let got_lin_col = line_index.line_col(got_offset);
|
||||
assert_eq!(got_lin_col, lin_col);
|
||||
|
||||
for (enc, col) in [(WideEncoding::Utf16, col_utf16), (WideEncoding::Utf32, col_utf32)] {
|
||||
let wide_lin_col = line_index.to_wide(enc, lin_col).unwrap();
|
||||
let got_lin_col = line_index.to_utf8(enc, wide_lin_col).unwrap();
|
||||
assert_eq!(got_lin_col, lin_col);
|
||||
assert_eq!(wide_lin_col.col, col)
|
||||
}
|
||||
|
||||
if c == '\n' {
|
||||
lin_col.line += 1;
|
||||
lin_col.col = 0;
|
||||
col_utf16 = 0;
|
||||
col_utf32 = 0;
|
||||
} else {
|
||||
lin_col.col += c.len_utf8() as u32;
|
||||
col_utf16 += c.len_utf16() as u32;
|
||||
col_utf32 += 1;
|
||||
}
|
||||
}
|
||||
}
|
@ -20,7 +20,6 @@ tracing.workspace = true
|
||||
once_cell = "1.17.0"
|
||||
|
||||
# local deps
|
||||
profile.workspace = true
|
||||
stdx.workspace = true
|
||||
syntax.workspace = true
|
||||
text-edit.workspace = true
|
||||
@ -34,10 +33,6 @@ expect-test = "1.4.0"
|
||||
# local deps
|
||||
test-utils.workspace = true
|
||||
test-fixture.workspace = true
|
||||
sourcegen.workspace = true
|
||||
|
||||
[features]
|
||||
in-rust-tree = []
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
@ -200,7 +200,7 @@ fn get_default_constructor(
|
||||
}
|
||||
}
|
||||
|
||||
let krate = ctx.sema.to_module_def(d.file.original_file(ctx.sema.db))?.krate();
|
||||
let krate = ctx.sema.file_to_module_def(d.file.original_file(ctx.sema.db))?.krate();
|
||||
let module = krate.root_module();
|
||||
|
||||
// Look for a ::new() associated function
|
||||
|
@ -597,21 +597,19 @@ fn bang(never: !) {
|
||||
|
||||
#[test]
|
||||
fn unknown_type() {
|
||||
cov_mark::check_count!(validate_match_bailed_out, 1);
|
||||
|
||||
check_diagnostics(
|
||||
check_diagnostics_no_bails(
|
||||
r#"
|
||||
enum Option<T> { Some(T), None }
|
||||
|
||||
#[allow(unused)]
|
||||
fn main() {
|
||||
// `Never` is deliberately not defined so that it's an uninferred type.
|
||||
// We ignore these to avoid triggering bugs in the analysis.
|
||||
match Option::<Never>::None {
|
||||
None => (),
|
||||
Some(never) => match never {},
|
||||
}
|
||||
match Option::<Never>::None {
|
||||
//^^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `None` not covered
|
||||
Option::Some(_never) => {},
|
||||
}
|
||||
}
|
||||
@ -619,6 +617,18 @@ fn main() {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn arity_mismatch_issue_16746() {
|
||||
check_diagnostics_with_disabled(
|
||||
r#"
|
||||
fn main() {
|
||||
let (a, ) = (0, 0);
|
||||
}
|
||||
"#,
|
||||
&["E0308"],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tuple_of_bools_with_ellipsis_at_end_missing_arm() {
|
||||
check_diagnostics_no_bails(
|
||||
|
@ -30,6 +30,7 @@ pub(crate) fn remove_unnecessary_else(
|
||||
"remove unnecessary else block",
|
||||
display_range,
|
||||
)
|
||||
.experimental()
|
||||
.with_fixes(fixes(ctx, d))
|
||||
}
|
||||
|
||||
|
@ -227,6 +227,7 @@ pub struct DiagnosticsConfig {
|
||||
pub disable_experimental: bool,
|
||||
pub disabled: FxHashSet<String>,
|
||||
pub expr_fill_default: ExprFillDefaultMode,
|
||||
pub style_lints: bool,
|
||||
// FIXME: We may want to include a whole `AssistConfig` here
|
||||
pub insert_use: InsertUseConfig,
|
||||
pub prefer_no_std: bool,
|
||||
@ -245,6 +246,7 @@ impl DiagnosticsConfig {
|
||||
disable_experimental: Default::default(),
|
||||
disabled: Default::default(),
|
||||
expr_fill_default: Default::default(),
|
||||
style_lints: true,
|
||||
insert_use: InsertUseConfig {
|
||||
granularity: ImportGranularity::Preserve,
|
||||
enforce_granularity: false,
|
||||
@ -299,7 +301,7 @@ pub fn diagnostics(
|
||||
let mut res = Vec::new();
|
||||
|
||||
// [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily.
|
||||
res.extend(parse.errors().iter().take(128).map(|err| {
|
||||
res.extend(parse.errors().into_iter().take(128).map(|err| {
|
||||
Diagnostic::new(
|
||||
DiagnosticCode::RustcHardError("syntax-error"),
|
||||
format!("Syntax Error: {err}"),
|
||||
@ -315,7 +317,7 @@ pub fn diagnostics(
|
||||
handlers::json_is_not_rust::json_in_items(&sema, &mut res, file_id, &node, config);
|
||||
}
|
||||
|
||||
let module = sema.to_module_def(file_id);
|
||||
let module = sema.file_to_module_def(file_id);
|
||||
|
||||
let ctx = DiagnosticsContext { config, sema, resolve };
|
||||
if module.is_none() {
|
||||
@ -324,7 +326,7 @@ pub fn diagnostics(
|
||||
|
||||
let mut diags = Vec::new();
|
||||
if let Some(m) = module {
|
||||
m.diagnostics(db, &mut diags);
|
||||
m.diagnostics(db, &mut diags, config.style_lints);
|
||||
}
|
||||
|
||||
for diag in diags {
|
||||
|
@ -1,6 +1,4 @@
|
||||
#![allow(clippy::print_stderr)]
|
||||
#[cfg(not(feature = "in-rust-tree"))]
|
||||
mod sourcegen;
|
||||
|
||||
use ide_db::{
|
||||
assists::AssistResolveStrategy, base_db::SourceDatabaseExt, LineIndexDatabase, RootDatabase,
|
||||
|
@ -17,11 +17,11 @@ arrayvec.workspace = true
|
||||
either.workspace = true
|
||||
itertools.workspace = true
|
||||
tracing.workspace = true
|
||||
oorandom = "11.1.3"
|
||||
pulldown-cmark-to-cmark = "10.0.4"
|
||||
pulldown-cmark = { version = "0.9.1", default-features = false }
|
||||
url = "2.3.1"
|
||||
dot = "0.1.4"
|
||||
oorandom.workspace = true
|
||||
pulldown-cmark-to-cmark.workspace = true
|
||||
pulldown-cmark.workspace = true
|
||||
url.workspace = true
|
||||
dot.workspace = true
|
||||
smallvec.workspace = true
|
||||
triomphe.workspace = true
|
||||
nohash-hasher.workspace = true
|
||||
@ -52,7 +52,7 @@ test-utils.workspace = true
|
||||
test-fixture.workspace = true
|
||||
|
||||
[features]
|
||||
in-rust-tree = ["ide-assists/in-rust-tree", "ide-diagnostics/in-rust-tree"]
|
||||
in-rust-tree = []
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
@ -189,7 +189,7 @@ fn _format(
|
||||
let &crate_id = db.relevant_crates(file_id).iter().next()?;
|
||||
let edition = db.crate_graph()[crate_id].edition;
|
||||
|
||||
let mut cmd = std::process::Command::new(toolchain::rustfmt());
|
||||
let mut cmd = std::process::Command::new(toolchain::Tool::Rustfmt.path());
|
||||
cmd.arg("--edition");
|
||||
cmd.arg(edition.to_string());
|
||||
|
||||
@ -308,8 +308,8 @@ f$0oo!();
|
||||
expect![[r#"
|
||||
foo!
|
||||
fn some_thing() -> u32 {
|
||||
let a = 0;
|
||||
a+10
|
||||
let a = 0;
|
||||
a+10
|
||||
}"#]],
|
||||
);
|
||||
}
|
||||
@ -342,13 +342,13 @@ fn main() {
|
||||
expect![[r#"
|
||||
match_ast!
|
||||
{
|
||||
if let Some(it) = ast::TraitDef::cast(container.clone()){}
|
||||
else if let Some(it) = ast::ImplDef::cast(container.clone()){}
|
||||
else {
|
||||
{
|
||||
continue
|
||||
if let Some(it) = ast::TraitDef::cast(container.clone()){}
|
||||
else if let Some(it) = ast::ImplDef::cast(container.clone()){}
|
||||
else {
|
||||
{
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}"#]],
|
||||
);
|
||||
}
|
||||
@ -397,12 +397,12 @@ fn main() {
|
||||
expect![[r#"
|
||||
foo!
|
||||
{
|
||||
macro_rules! bar {
|
||||
() => {
|
||||
42
|
||||
macro_rules! bar {
|
||||
() => {
|
||||
42
|
||||
}
|
||||
}
|
||||
}
|
||||
42
|
||||
42
|
||||
}"#]],
|
||||
);
|
||||
}
|
||||
@ -482,16 +482,16 @@ struct Foo {}
|
||||
expect![[r#"
|
||||
Clone
|
||||
impl < >$crate::clone::Clone for Foo< >where {
|
||||
fn clone(&self) -> Self {
|
||||
match self {
|
||||
Foo{}
|
||||
=> Foo{}
|
||||
,
|
||||
fn clone(&self) -> Self {
|
||||
match self {
|
||||
Foo{}
|
||||
=> Foo{}
|
||||
,
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}"#]],
|
||||
}"#]],
|
||||
);
|
||||
}
|
||||
|
||||
@ -534,16 +534,16 @@ struct Foo {}
|
||||
expect![[r#"
|
||||
Clone
|
||||
impl < >$crate::clone::Clone for Foo< >where {
|
||||
fn clone(&self) -> Self {
|
||||
match self {
|
||||
Foo{}
|
||||
=> Foo{}
|
||||
,
|
||||
fn clone(&self) -> Self {
|
||||
match self {
|
||||
Foo{}
|
||||
=> Foo{}
|
||||
,
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}"#]],
|
||||
}"#]],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -536,6 +536,24 @@ fn bar() {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_definition_works_for_consts_inside_range_pattern() {
|
||||
check(
|
||||
r#"
|
||||
//- /lib.rs
|
||||
const A: u32 = 0;
|
||||
//^
|
||||
|
||||
fn bar(v: u32) {
|
||||
match v {
|
||||
0..=$0A => {}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_def_for_use_alias() {
|
||||
check(
|
||||
|
@ -32,6 +32,7 @@ pub struct HoverConfig {
|
||||
pub documentation: bool,
|
||||
pub keywords: bool,
|
||||
pub format: HoverDocFormat,
|
||||
pub max_trait_assoc_items_count: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
|
@ -406,7 +406,12 @@ pub(super) fn definition(
|
||||
config: &HoverConfig,
|
||||
) -> Markup {
|
||||
let mod_path = definition_mod_path(db, &def);
|
||||
let label = def.label(db);
|
||||
let label = match def {
|
||||
Definition::Trait(trait_) => {
|
||||
trait_.display_limited(db, config.max_trait_assoc_items_count).to_string()
|
||||
}
|
||||
_ => def.label(db),
|
||||
};
|
||||
let docs = def.docs(db, famous_defs);
|
||||
let value = (|| match def {
|
||||
Definition::Variant(it) => {
|
||||
|
@ -17,6 +17,7 @@ const HOVER_BASE_CONFIG: HoverConfig = HoverConfig {
|
||||
documentation: true,
|
||||
format: HoverDocFormat::Markdown,
|
||||
keywords: true,
|
||||
max_trait_assoc_items_count: None,
|
||||
};
|
||||
|
||||
fn check_hover_no_result(ra_fixture: &str) {
|
||||
@ -48,6 +49,28 @@ fn check(ra_fixture: &str, expect: Expect) {
|
||||
expect.assert_eq(&actual)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn check_assoc_count(count: usize, ra_fixture: &str, expect: Expect) {
|
||||
let (analysis, position) = fixture::position(ra_fixture);
|
||||
let hover = analysis
|
||||
.hover(
|
||||
&HoverConfig {
|
||||
links_in_hover: true,
|
||||
max_trait_assoc_items_count: Some(count),
|
||||
..HOVER_BASE_CONFIG
|
||||
},
|
||||
FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
let content = analysis.db.file_text(position.file_id);
|
||||
let hovered_element = &content[hover.range];
|
||||
|
||||
let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
|
||||
expect.assert_eq(&actual)
|
||||
}
|
||||
|
||||
fn check_hover_no_links(ra_fixture: &str, expect: Expect) {
|
||||
let (analysis, position) = fixture::position(ra_fixture);
|
||||
let hover = analysis
|
||||
@ -2672,26 +2695,26 @@ fn foo() -> impl Foo {}
|
||||
fn main() { let s$0t = foo(); }
|
||||
"#,
|
||||
expect![[r#"
|
||||
[
|
||||
GoToType(
|
||||
[
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::Foo",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 0..12,
|
||||
focus_range: 6..9,
|
||||
name: "Foo",
|
||||
kind: Trait,
|
||||
description: "trait Foo",
|
||||
},
|
||||
[
|
||||
GoToType(
|
||||
[
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::Foo",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 0..12,
|
||||
focus_range: 6..9,
|
||||
name: "Foo",
|
||||
kind: Trait,
|
||||
description: "trait Foo",
|
||||
},
|
||||
],
|
||||
),
|
||||
]
|
||||
"#]],
|
||||
},
|
||||
],
|
||||
),
|
||||
]
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
@ -2706,39 +2729,39 @@ fn foo() -> impl Foo<S> {}
|
||||
fn main() { let s$0t = foo(); }
|
||||
"#,
|
||||
expect![[r#"
|
||||
[
|
||||
GoToType(
|
||||
[
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::Foo",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 0..15,
|
||||
focus_range: 6..9,
|
||||
name: "Foo",
|
||||
kind: Trait,
|
||||
description: "trait Foo<T>",
|
||||
},
|
||||
[
|
||||
GoToType(
|
||||
[
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::Foo",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 0..15,
|
||||
focus_range: 6..9,
|
||||
name: "Foo",
|
||||
kind: Trait,
|
||||
description: "trait Foo<T>",
|
||||
},
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::S",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 16..25,
|
||||
focus_range: 23..24,
|
||||
name: "S",
|
||||
kind: Struct,
|
||||
description: "struct S",
|
||||
},
|
||||
},
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::S",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 16..25,
|
||||
focus_range: 23..24,
|
||||
name: "S",
|
||||
kind: Struct,
|
||||
description: "struct S",
|
||||
},
|
||||
],
|
||||
),
|
||||
]
|
||||
"#]],
|
||||
},
|
||||
],
|
||||
),
|
||||
]
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
@ -2873,26 +2896,26 @@ trait Foo {}
|
||||
fn foo(ar$0g: &impl Foo) {}
|
||||
"#,
|
||||
expect![[r#"
|
||||
[
|
||||
GoToType(
|
||||
[
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::Foo",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 0..12,
|
||||
focus_range: 6..9,
|
||||
name: "Foo",
|
||||
kind: Trait,
|
||||
description: "trait Foo",
|
||||
},
|
||||
[
|
||||
GoToType(
|
||||
[
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::Foo",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 0..12,
|
||||
focus_range: 6..9,
|
||||
name: "Foo",
|
||||
kind: Trait,
|
||||
description: "trait Foo",
|
||||
},
|
||||
],
|
||||
),
|
||||
]
|
||||
"#]],
|
||||
},
|
||||
],
|
||||
),
|
||||
]
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
@ -3020,39 +3043,39 @@ struct S {}
|
||||
fn foo(ar$0g: &impl Foo<S>) {}
|
||||
"#,
|
||||
expect![[r#"
|
||||
[
|
||||
GoToType(
|
||||
[
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::Foo",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 0..15,
|
||||
focus_range: 6..9,
|
||||
name: "Foo",
|
||||
kind: Trait,
|
||||
description: "trait Foo<T>",
|
||||
},
|
||||
[
|
||||
GoToType(
|
||||
[
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::Foo",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 0..15,
|
||||
focus_range: 6..9,
|
||||
name: "Foo",
|
||||
kind: Trait,
|
||||
description: "trait Foo<T>",
|
||||
},
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::S",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 16..27,
|
||||
focus_range: 23..24,
|
||||
name: "S",
|
||||
kind: Struct,
|
||||
description: "struct S {}",
|
||||
},
|
||||
},
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::S",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 16..27,
|
||||
focus_range: 23..24,
|
||||
name: "S",
|
||||
kind: Struct,
|
||||
description: "struct S {}",
|
||||
},
|
||||
],
|
||||
),
|
||||
]
|
||||
"#]],
|
||||
},
|
||||
],
|
||||
),
|
||||
]
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
@ -3070,39 +3093,39 @@ fn foo() -> B<dyn Foo> {}
|
||||
fn main() { let s$0t = foo(); }
|
||||
"#,
|
||||
expect![[r#"
|
||||
[
|
||||
GoToType(
|
||||
[
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::B",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 42..55,
|
||||
focus_range: 49..50,
|
||||
name: "B",
|
||||
kind: Struct,
|
||||
description: "struct B<T> {}",
|
||||
},
|
||||
[
|
||||
GoToType(
|
||||
[
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::B",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 42..55,
|
||||
focus_range: 49..50,
|
||||
name: "B",
|
||||
kind: Struct,
|
||||
description: "struct B<T> {}",
|
||||
},
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::Foo",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 0..12,
|
||||
focus_range: 6..9,
|
||||
name: "Foo",
|
||||
kind: Trait,
|
||||
description: "trait Foo",
|
||||
},
|
||||
},
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::Foo",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 0..12,
|
||||
focus_range: 6..9,
|
||||
name: "Foo",
|
||||
kind: Trait,
|
||||
description: "trait Foo",
|
||||
},
|
||||
],
|
||||
),
|
||||
]
|
||||
"#]],
|
||||
},
|
||||
],
|
||||
),
|
||||
]
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
@ -3114,26 +3137,26 @@ trait Foo {}
|
||||
fn foo(ar$0g: &dyn Foo) {}
|
||||
"#,
|
||||
expect![[r#"
|
||||
[
|
||||
GoToType(
|
||||
[
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::Foo",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 0..12,
|
||||
focus_range: 6..9,
|
||||
name: "Foo",
|
||||
kind: Trait,
|
||||
description: "trait Foo",
|
||||
},
|
||||
[
|
||||
GoToType(
|
||||
[
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::Foo",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 0..12,
|
||||
focus_range: 6..9,
|
||||
name: "Foo",
|
||||
kind: Trait,
|
||||
description: "trait Foo",
|
||||
},
|
||||
],
|
||||
),
|
||||
]
|
||||
"#]],
|
||||
},
|
||||
],
|
||||
),
|
||||
]
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
@ -3146,39 +3169,39 @@ struct S {}
|
||||
fn foo(ar$0g: &dyn Foo<S>) {}
|
||||
"#,
|
||||
expect![[r#"
|
||||
[
|
||||
GoToType(
|
||||
[
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::Foo",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 0..15,
|
||||
focus_range: 6..9,
|
||||
name: "Foo",
|
||||
kind: Trait,
|
||||
description: "trait Foo<T>",
|
||||
},
|
||||
[
|
||||
GoToType(
|
||||
[
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::Foo",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 0..15,
|
||||
focus_range: 6..9,
|
||||
name: "Foo",
|
||||
kind: Trait,
|
||||
description: "trait Foo<T>",
|
||||
},
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::S",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 16..27,
|
||||
focus_range: 23..24,
|
||||
name: "S",
|
||||
kind: Struct,
|
||||
description: "struct S {}",
|
||||
},
|
||||
},
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::S",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 16..27,
|
||||
focus_range: 23..24,
|
||||
name: "S",
|
||||
kind: Struct,
|
||||
description: "struct S {}",
|
||||
},
|
||||
],
|
||||
),
|
||||
]
|
||||
"#]],
|
||||
},
|
||||
],
|
||||
),
|
||||
]
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
@ -3275,26 +3298,26 @@ fn test() -> impl Foo { S {} }
|
||||
fn main() { let s$0t = test().get(); }
|
||||
"#,
|
||||
expect![[r#"
|
||||
[
|
||||
GoToType(
|
||||
[
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::Foo",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 0..62,
|
||||
focus_range: 6..9,
|
||||
name: "Foo",
|
||||
kind: Trait,
|
||||
description: "trait Foo",
|
||||
},
|
||||
[
|
||||
GoToType(
|
||||
[
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::Foo",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 0..62,
|
||||
focus_range: 6..9,
|
||||
name: "Foo",
|
||||
kind: Trait,
|
||||
description: "trait Foo",
|
||||
},
|
||||
],
|
||||
),
|
||||
]
|
||||
"#]],
|
||||
},
|
||||
],
|
||||
),
|
||||
]
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
@ -3340,26 +3363,26 @@ trait Foo {}
|
||||
fn foo<T: Foo>(t: T$0){}
|
||||
"#,
|
||||
expect![[r#"
|
||||
[
|
||||
GoToType(
|
||||
[
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::Foo",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 0..12,
|
||||
focus_range: 6..9,
|
||||
name: "Foo",
|
||||
kind: Trait,
|
||||
description: "trait Foo",
|
||||
},
|
||||
[
|
||||
GoToType(
|
||||
[
|
||||
HoverGotoTypeData {
|
||||
mod_path: "test::Foo",
|
||||
nav: NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 0..12,
|
||||
focus_range: 6..9,
|
||||
name: "Foo",
|
||||
kind: Trait,
|
||||
description: "trait Foo",
|
||||
},
|
||||
],
|
||||
),
|
||||
]
|
||||
"#]],
|
||||
},
|
||||
],
|
||||
),
|
||||
]
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
@ -5434,13 +5457,62 @@ fn hover_feature() {
|
||||
|
||||
The tracking issue for this feature is: None.
|
||||
|
||||
Intrinsics are never intended to be stable directly, but intrinsics are often
|
||||
Intrinsics are rarely intended to be stable directly, but are usually
|
||||
exported in some sort of stable manner. Prefer using the stable interfaces to
|
||||
the intrinsic directly when you can.
|
||||
|
||||
------------------------
|
||||
|
||||
|
||||
## Intrinsics with fallback logic
|
||||
|
||||
Many intrinsics can be written in pure rust, albeit inefficiently or without supporting
|
||||
some features that only exist on some backends. Backends can simply not implement those
|
||||
intrinsics without causing any code miscompilations or failures to compile.
|
||||
All intrinsic fallback bodies are automatically made cross-crate inlineable (like `#[inline]`)
|
||||
by the codegen backend, but not the MIR inliner.
|
||||
|
||||
```rust
|
||||
#![feature(rustc_attrs, effects)]
|
||||
#![allow(internal_features)]
|
||||
|
||||
#[rustc_intrinsic]
|
||||
const unsafe fn const_deallocate(_ptr: *mut u8, _size: usize, _align: usize) {}
|
||||
```
|
||||
|
||||
Since these are just regular functions, it is perfectly ok to create the intrinsic twice:
|
||||
|
||||
```rust
|
||||
#![feature(rustc_attrs, effects)]
|
||||
#![allow(internal_features)]
|
||||
|
||||
#[rustc_intrinsic]
|
||||
const unsafe fn const_deallocate(_ptr: *mut u8, _size: usize, _align: usize) {}
|
||||
|
||||
mod foo {
|
||||
#[rustc_intrinsic]
|
||||
const unsafe fn const_deallocate(_ptr: *mut u8, _size: usize, _align: usize) {
|
||||
panic!("noisy const dealloc")
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
The behaviour on backends that override the intrinsic is exactly the same. On other
|
||||
backends, the intrinsic behaviour depends on which implementation is called, just like
|
||||
with any regular function.
|
||||
|
||||
## Intrinsics lowered to MIR instructions
|
||||
|
||||
Various intrinsics have native MIR operations that they correspond to. Instead of requiring
|
||||
backends to implement both the intrinsic and the MIR operation, the `lower_intrinsics` pass
|
||||
will convert the calls to the MIR operation. Backends do not need to know about these intrinsics
|
||||
at all.
|
||||
|
||||
## Intrinsics without fallback logic
|
||||
|
||||
These must be implemented by all backends.
|
||||
|
||||
These are imported as if they were FFI functions, with the special
|
||||
`rust-intrinsic` ABI. For example, if one was in a freestanding
|
||||
context, but wished to be able to `transmute` between types, and
|
||||
@ -5459,7 +5531,8 @@ fn hover_feature() {
|
||||
}
|
||||
```
|
||||
|
||||
As with any other FFI functions, these are always `unsafe` to call.
|
||||
As with any other FFI functions, these are by default always `unsafe` to call.
|
||||
You can add `#[rustc_safe_intrinsic]` to the intrinsic to make it safe to call.
|
||||
|
||||
"#]],
|
||||
)
|
||||
@ -6277,6 +6350,151 @@ impl T for () {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_trait_show_assoc_items() {
|
||||
check_assoc_count(
|
||||
0,
|
||||
r#"
|
||||
trait T {}
|
||||
impl T$0 for () {}
|
||||
"#,
|
||||
expect![[r#"
|
||||
*T*
|
||||
|
||||
```rust
|
||||
test
|
||||
```
|
||||
|
||||
```rust
|
||||
trait T {}
|
||||
```
|
||||
"#]],
|
||||
);
|
||||
|
||||
check_assoc_count(
|
||||
1,
|
||||
r#"
|
||||
trait T {}
|
||||
impl T$0 for () {}
|
||||
"#,
|
||||
expect![[r#"
|
||||
*T*
|
||||
|
||||
```rust
|
||||
test
|
||||
```
|
||||
|
||||
```rust
|
||||
trait T {}
|
||||
```
|
||||
"#]],
|
||||
);
|
||||
|
||||
check_assoc_count(
|
||||
0,
|
||||
r#"
|
||||
trait T {
|
||||
fn func() {}
|
||||
const FLAG: i32 = 34;
|
||||
type Bar;
|
||||
}
|
||||
impl T$0 for () {}
|
||||
"#,
|
||||
expect![[r#"
|
||||
*T*
|
||||
|
||||
```rust
|
||||
test
|
||||
```
|
||||
|
||||
```rust
|
||||
trait T { /* … */ }
|
||||
```
|
||||
"#]],
|
||||
);
|
||||
|
||||
check_assoc_count(
|
||||
2,
|
||||
r#"
|
||||
trait T {
|
||||
fn func() {}
|
||||
const FLAG: i32 = 34;
|
||||
type Bar;
|
||||
}
|
||||
impl T$0 for () {}
|
||||
"#,
|
||||
expect![[r#"
|
||||
*T*
|
||||
|
||||
```rust
|
||||
test
|
||||
```
|
||||
|
||||
```rust
|
||||
trait T {
|
||||
fn func();
|
||||
const FLAG: i32;
|
||||
/* … */
|
||||
}
|
||||
```
|
||||
"#]],
|
||||
);
|
||||
|
||||
check_assoc_count(
|
||||
3,
|
||||
r#"
|
||||
trait T {
|
||||
fn func() {}
|
||||
const FLAG: i32 = 34;
|
||||
type Bar;
|
||||
}
|
||||
impl T$0 for () {}
|
||||
"#,
|
||||
expect![[r#"
|
||||
*T*
|
||||
|
||||
```rust
|
||||
test
|
||||
```
|
||||
|
||||
```rust
|
||||
trait T {
|
||||
fn func();
|
||||
const FLAG: i32;
|
||||
type Bar;
|
||||
}
|
||||
```
|
||||
"#]],
|
||||
);
|
||||
|
||||
check_assoc_count(
|
||||
4,
|
||||
r#"
|
||||
trait T {
|
||||
fn func() {}
|
||||
const FLAG: i32 = 34;
|
||||
type Bar;
|
||||
}
|
||||
impl T$0 for () {}
|
||||
"#,
|
||||
expect![[r#"
|
||||
*T*
|
||||
|
||||
```rust
|
||||
test
|
||||
```
|
||||
|
||||
```rust
|
||||
trait T {
|
||||
fn func();
|
||||
const FLAG: i32;
|
||||
type Bar;
|
||||
}
|
||||
```
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hover_ranged_macro_call() {
|
||||
check_hover_range(
|
||||
@ -6366,8 +6584,8 @@ fn main() { $0V; }
|
||||
|
||||
```rust
|
||||
pub const V: i8 = {
|
||||
let e = 123;
|
||||
f(e)
|
||||
let e = 123;
|
||||
f(e)
|
||||
}
|
||||
```
|
||||
"#]],
|
||||
@ -6393,7 +6611,7 @@ fn main() { $0V; }
|
||||
|
||||
```rust
|
||||
pub static V: i8 = {
|
||||
let e = 123;
|
||||
let e = 123;
|
||||
}
|
||||
```
|
||||
"#]],
|
||||
|
@ -50,6 +50,7 @@ mod static_index;
|
||||
mod status;
|
||||
mod syntax_highlighting;
|
||||
mod syntax_tree;
|
||||
mod test_explorer;
|
||||
mod typing;
|
||||
mod view_crate_graph;
|
||||
mod view_hir;
|
||||
@ -61,7 +62,7 @@ use std::ffi::OsStr;
|
||||
|
||||
use cfg::CfgOptions;
|
||||
use fetch_crates::CrateInfo;
|
||||
use hir::Change;
|
||||
use hir::ChangeWithProcMacros;
|
||||
use ide_db::{
|
||||
base_db::{
|
||||
salsa::{self, ParallelDatabase},
|
||||
@ -108,6 +109,7 @@ pub use crate::{
|
||||
tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag},
|
||||
HighlightConfig, HlRange,
|
||||
},
|
||||
test_explorer::{TestItem, TestItemKind},
|
||||
};
|
||||
pub use hir::Semantics;
|
||||
pub use ide_assists::{
|
||||
@ -184,7 +186,7 @@ impl AnalysisHost {
|
||||
|
||||
/// Applies changes to the current state of the world. If there are
|
||||
/// outstanding snapshots, they will be canceled.
|
||||
pub fn apply_change(&mut self, change: Change) {
|
||||
pub fn apply_change(&mut self, change: ChangeWithProcMacros) {
|
||||
self.db.apply_change(change);
|
||||
}
|
||||
|
||||
@ -239,7 +241,7 @@ impl Analysis {
|
||||
file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_owned()));
|
||||
let source_root = SourceRoot::new_local(file_set);
|
||||
|
||||
let mut change = Change::new();
|
||||
let mut change = ChangeWithProcMacros::new();
|
||||
change.set_roots(vec![source_root]);
|
||||
let mut crate_graph = CrateGraph::default();
|
||||
// FIXME: cfg options
|
||||
@ -340,6 +342,18 @@ impl Analysis {
|
||||
self.with_db(|db| view_item_tree::view_item_tree(db, file_id))
|
||||
}
|
||||
|
||||
pub fn discover_test_roots(&self) -> Cancellable<Vec<TestItem>> {
|
||||
self.with_db(test_explorer::discover_test_roots)
|
||||
}
|
||||
|
||||
pub fn discover_tests_in_crate_by_test_id(&self, crate_id: &str) -> Cancellable<Vec<TestItem>> {
|
||||
self.with_db(|db| test_explorer::discover_tests_in_crate_by_test_id(db, crate_id))
|
||||
}
|
||||
|
||||
pub fn discover_tests_in_crate(&self, crate_id: CrateId) -> Cancellable<Vec<TestItem>> {
|
||||
self.with_db(|db| test_explorer::discover_tests_in_crate(db, crate_id))
|
||||
}
|
||||
|
||||
/// Renders the crate graph to GraphViz "dot" syntax.
|
||||
pub fn view_crate_graph(&self, full: bool) -> Cancellable<Result<String, String>> {
|
||||
self.with_db(|db| view_crate_graph::view_crate_graph(db, full))
|
||||
|
@ -48,7 +48,7 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
|
||||
.flat_map(|module| NavigationTarget::from_module_to_decl(db, module))
|
||||
.collect(),
|
||||
None => sema
|
||||
.to_module_defs(position.file_id)
|
||||
.file_to_module_defs(position.file_id)
|
||||
.flat_map(|module| NavigationTarget::from_module_to_decl(db, module))
|
||||
.collect(),
|
||||
}
|
||||
|
@ -156,7 +156,7 @@ pub(crate) fn will_rename_file(
|
||||
new_name_stem: &str,
|
||||
) -> Option<SourceChange> {
|
||||
let sema = Semantics::new(db);
|
||||
let module = sema.to_module_def(file_id)?;
|
||||
let module = sema.file_to_module_def(file_id)?;
|
||||
let def = Definition::Module(module);
|
||||
let mut change = if is_raw_identifier(new_name_stem) {
|
||||
def.rename(&sema, &SmolStr::from_iter(["r#", new_name_stem])).ok()?
|
||||
|
@ -178,7 +178,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
|
||||
}
|
||||
});
|
||||
|
||||
sema.to_module_defs(file_id)
|
||||
sema.file_to_module_defs(file_id)
|
||||
.map(|it| runnable_mod_outline_definition(&sema, it))
|
||||
.for_each(|it| add_opt(it, None));
|
||||
|
||||
|
@ -166,6 +166,7 @@ impl StaticIndex<'_> {
|
||||
documentation: true,
|
||||
keywords: true,
|
||||
format: crate::HoverDocFormat::Markdown,
|
||||
max_trait_assoc_items_count: None,
|
||||
};
|
||||
let tokens = tokens.filter(|token| {
|
||||
matches!(
|
||||
|
@ -223,7 +223,7 @@ fn traverse(
|
||||
krate: hir::Crate,
|
||||
range_to_highlight: TextRange,
|
||||
) {
|
||||
let is_unlinked = sema.to_module_def(file_id).is_none();
|
||||
let is_unlinked = sema.file_to_module_def(file_id).is_none();
|
||||
let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default();
|
||||
|
||||
enum AttrOrDerive {
|
||||
|
135
src/tools/rust-analyzer/crates/ide/src/test_explorer.rs
Normal file
135
src/tools/rust-analyzer/crates/ide/src/test_explorer.rs
Normal file
@ -0,0 +1,135 @@
|
||||
//! Discovers tests
|
||||
|
||||
use hir::{Crate, Module, ModuleDef, Semantics};
|
||||
use ide_db::{
|
||||
base_db::{CrateGraph, CrateId, FileId, SourceDatabase},
|
||||
RootDatabase,
|
||||
};
|
||||
use syntax::TextRange;
|
||||
|
||||
use crate::{navigation_target::ToNav, runnables::runnable_fn, Runnable, TryToNav};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum TestItemKind {
|
||||
Crate,
|
||||
Module,
|
||||
Function,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TestItem {
|
||||
pub id: String,
|
||||
pub kind: TestItemKind,
|
||||
pub label: String,
|
||||
pub parent: Option<String>,
|
||||
pub file: Option<FileId>,
|
||||
pub text_range: Option<TextRange>,
|
||||
pub runnable: Option<Runnable>,
|
||||
}
|
||||
|
||||
pub(crate) fn discover_test_roots(db: &RootDatabase) -> Vec<TestItem> {
|
||||
let crate_graph = db.crate_graph();
|
||||
crate_graph
|
||||
.iter()
|
||||
.filter(|&id| crate_graph[id].origin.is_local())
|
||||
.filter_map(|id| Some(crate_graph[id].display_name.as_ref()?.to_string()))
|
||||
.map(|id| TestItem {
|
||||
kind: TestItemKind::Crate,
|
||||
label: id.clone(),
|
||||
id,
|
||||
parent: None,
|
||||
file: None,
|
||||
text_range: None,
|
||||
runnable: None,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn find_crate_by_id(crate_graph: &CrateGraph, crate_id: &str) -> Option<CrateId> {
|
||||
// here, we use display_name as the crate id. This is not super ideal, but it works since we
|
||||
// only show tests for the local crates.
|
||||
crate_graph.iter().find(|&id| {
|
||||
crate_graph[id].origin.is_local()
|
||||
&& crate_graph[id].display_name.as_ref().is_some_and(|x| x.to_string() == crate_id)
|
||||
})
|
||||
}
|
||||
|
||||
fn discover_tests_in_module(db: &RootDatabase, module: Module, prefix_id: String) -> Vec<TestItem> {
|
||||
let sema = Semantics::new(db);
|
||||
|
||||
let mut r = vec![];
|
||||
for c in module.children(db) {
|
||||
let module_name =
|
||||
c.name(db).as_ref().and_then(|n| n.as_str()).unwrap_or("[mod without name]").to_owned();
|
||||
let module_id = format!("{prefix_id}::{module_name}");
|
||||
let module_children = discover_tests_in_module(db, c, module_id.clone());
|
||||
if !module_children.is_empty() {
|
||||
let nav = c.to_nav(db).call_site;
|
||||
r.push(TestItem {
|
||||
id: module_id,
|
||||
kind: TestItemKind::Module,
|
||||
label: module_name,
|
||||
parent: Some(prefix_id.clone()),
|
||||
file: Some(nav.file_id),
|
||||
text_range: Some(nav.focus_or_full_range()),
|
||||
runnable: None,
|
||||
});
|
||||
r.extend(module_children);
|
||||
}
|
||||
}
|
||||
for def in module.declarations(db) {
|
||||
let ModuleDef::Function(f) = def else {
|
||||
continue;
|
||||
};
|
||||
if !f.is_test(db) {
|
||||
continue;
|
||||
}
|
||||
let nav = f.try_to_nav(db).map(|r| r.call_site);
|
||||
let fn_name = f.name(db).as_str().unwrap_or("[function without name]").to_owned();
|
||||
r.push(TestItem {
|
||||
id: format!("{prefix_id}::{fn_name}"),
|
||||
kind: TestItemKind::Function,
|
||||
label: fn_name,
|
||||
parent: Some(prefix_id.clone()),
|
||||
file: nav.as_ref().map(|n| n.file_id),
|
||||
text_range: nav.as_ref().map(|n| n.focus_or_full_range()),
|
||||
runnable: runnable_fn(&sema, f),
|
||||
});
|
||||
}
|
||||
r
|
||||
}
|
||||
|
||||
pub(crate) fn discover_tests_in_crate_by_test_id(
|
||||
db: &RootDatabase,
|
||||
crate_test_id: &str,
|
||||
) -> Vec<TestItem> {
|
||||
let crate_graph = db.crate_graph();
|
||||
let Some(crate_id) = find_crate_by_id(&crate_graph, crate_test_id) else {
|
||||
return vec![];
|
||||
};
|
||||
discover_tests_in_crate(db, crate_id)
|
||||
}
|
||||
|
||||
pub(crate) fn discover_tests_in_crate(db: &RootDatabase, crate_id: CrateId) -> Vec<TestItem> {
|
||||
let crate_graph = db.crate_graph();
|
||||
if !crate_graph[crate_id].origin.is_local() {
|
||||
return vec![];
|
||||
}
|
||||
let Some(crate_test_id) = &crate_graph[crate_id].display_name else {
|
||||
return vec![];
|
||||
};
|
||||
let crate_test_id = crate_test_id.to_string();
|
||||
let crate_id: Crate = crate_id.into();
|
||||
let module = crate_id.root_module();
|
||||
let mut r = vec![TestItem {
|
||||
id: crate_test_id.clone(),
|
||||
kind: TestItemKind::Crate,
|
||||
label: crate_test_id.clone(),
|
||||
parent: None,
|
||||
file: None,
|
||||
text_range: None,
|
||||
runnable: None,
|
||||
}];
|
||||
r.extend(discover_tests_in_module(db, module, crate_test_id));
|
||||
r
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user