Auto merge of #100304 - matthiaskrgr:rollup-gs56vlw, r=matthiaskrgr

Rollup of 6 pull requests

Successful merges:

 - #100163 (Refactor: remove an unnecessary string search)
 - #100212 (Remove more Clean trait implementations)
 - #100238 (Further improve error message for E0081)
 - #100268 (Add regression test for #79148)
 - #100294 (Update Duration::as_secs doc to point to as_secs_f64/32 for including fractional part)
 - #100303 (⬆️ rust-analyzer)

Failed merges:

 - #100281 (Remove more Clean trait implementations)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2022-08-09 08:03:08 +00:00
commit 5af97e8b0b
84 changed files with 1953 additions and 553 deletions

View File

@ -32,7 +32,6 @@ use rustc_trait_selection::traits::error_reporting::InferCtxtExt as _;
use rustc_trait_selection::traits::{self, ObligationCtxt};
use rustc_ty_utils::representability::{self, Representability};
use std::iter;
use std::ops::ControlFlow;
pub(super) fn check_abi(tcx: TyCtxt<'_>, hir_id: hir::HirId, span: Span, abi: Abi) {
@ -1494,76 +1493,109 @@ fn check_enum<'tcx>(tcx: TyCtxt<'tcx>, vs: &'tcx [hir::Variant<'tcx>], def_id: L
}
}
let mut disr_vals: Vec<Discr<'tcx>> = Vec::with_capacity(vs.len());
// This tracks the previous variant span (in the loop) incase we need it for diagnostics
let mut prev_variant_span: Span = DUMMY_SP;
for ((_, discr), v) in iter::zip(def.discriminants(tcx), vs) {
// Check for duplicate discriminant values
if let Some(i) = disr_vals.iter().position(|&x| x.val == discr.val) {
let variant_did = def.variant(VariantIdx::new(i)).def_id;
let variant_i_hir_id = tcx.hir().local_def_id_to_hir_id(variant_did.expect_local());
let variant_i = tcx.hir().expect_variant(variant_i_hir_id);
let i_span = match variant_i.disr_expr {
Some(ref expr) => tcx.hir().span(expr.hir_id),
None => tcx.def_span(variant_did),
};
let span = match v.disr_expr {
Some(ref expr) => tcx.hir().span(expr.hir_id),
None => v.span,
};
let display_discr = format_discriminant_overflow(tcx, v, discr);
let display_discr_i = format_discriminant_overflow(tcx, variant_i, disr_vals[i]);
let no_disr = v.disr_expr.is_none();
let mut err = struct_span_err!(
tcx.sess,
sp,
E0081,
"discriminant value `{}` assigned more than once",
discr,
);
err.span_label(i_span, format!("first assignment of {display_discr_i}"));
err.span_label(span, format!("second assignment of {display_discr}"));
if no_disr {
err.span_label(
prev_variant_span,
format!(
"assigned discriminant for `{}` was incremented from this discriminant",
v.ident
),
);
}
err.emit();
}
disr_vals.push(discr);
prev_variant_span = v.span;
}
detect_discriminant_duplicate(tcx, def.discriminants(tcx).collect(), vs, sp);
check_representable(tcx, sp, def_id);
check_transparent(tcx, sp, def);
}
/// In the case that a discriminant is both a duplicate and an overflowing literal,
/// we insert both the assigned discriminant and the literal it overflowed from into the formatted
/// output. Otherwise we format the discriminant normally.
fn format_discriminant_overflow<'tcx>(
/// Part of enum check. Given the discriminants of an enum, errors if two or more discriminants are equal
fn detect_discriminant_duplicate<'tcx>(
tcx: TyCtxt<'tcx>,
variant: &hir::Variant<'_>,
dis: Discr<'tcx>,
) -> String {
if let Some(expr) = &variant.disr_expr {
let body = &tcx.hir().body(expr.body).value;
if let hir::ExprKind::Lit(lit) = &body.kind
&& let rustc_ast::LitKind::Int(lit_value, _int_kind) = &lit.node
&& dis.val != *lit_value
{
return format!("`{dis}` (overflowed from `{lit_value}`)");
}
}
mut discrs: Vec<(VariantIdx, Discr<'tcx>)>,
vs: &'tcx [hir::Variant<'tcx>],
self_span: Span,
) {
// Helper closure to reduce duplicate code. This gets called everytime we detect a duplicate.
// Here `idx` refers to the order of which the discriminant appears, and its index in `vs`
let report = |dis: Discr<'tcx>,
idx: usize,
err: &mut DiagnosticBuilder<'_, ErrorGuaranteed>| {
let var = &vs[idx]; // HIR for the duplicate discriminant
let (span, display_discr) = match var.disr_expr {
Some(ref expr) => {
// In the case the discriminant is both a duplicate and overflowed, let the user know
if let hir::ExprKind::Lit(lit) = &tcx.hir().body(expr.body).value.kind
&& let rustc_ast::LitKind::Int(lit_value, _int_kind) = &lit.node
&& *lit_value != dis.val
{
(tcx.hir().span(expr.hir_id), format!("`{dis}` (overflowed from `{lit_value}`)"))
// Otherwise, format the value as-is
} else {
(tcx.hir().span(expr.hir_id), format!("`{dis}`"))
}
}
None => {
// At this point we know this discriminant is a duplicate, and was not explicitly
// assigned by the user. Here we iterate backwards to fetch the HIR for the last
// explictly assigned discriminant, and letting the user know that this was the
// increment startpoint, and how many steps from there leading to the duplicate
if let Some((n, hir::Variant { span, ident, .. })) =
vs[..idx].iter().rev().enumerate().find(|v| v.1.disr_expr.is_some())
{
let ve_ident = var.ident;
let n = n + 1;
let sp = if n > 1 { "variants" } else { "variant" };
format!("`{dis}`")
err.span_label(
*span,
format!("discriminant for `{ve_ident}` incremented from this startpoint (`{ident}` + {n} {sp} later => `{ve_ident}` = {dis})"),
);
}
(vs[idx].span, format!("`{dis}`"))
}
};
err.span_label(span, format!("{display_discr} assigned here"));
};
// Here we loop through the discriminants, comparing each discriminant to another.
// When a duplicate is detected, we instatiate an error and point to both
// initial and duplicate value. The duplicate discriminant is then discarded by swapping
// it with the last element and decrementing the `vec.len` (which is why we have to evaluate
// `discrs.len()` anew every iteration, and why this could be tricky to do in a functional
// style as we are mutating `discrs` on the fly).
let mut i = 0;
while i < discrs.len() {
let hir_var_i_idx = discrs[i].0.index();
let mut error: Option<DiagnosticBuilder<'_, _>> = None;
let mut o = i + 1;
while o < discrs.len() {
let hir_var_o_idx = discrs[o].0.index();
if discrs[i].1.val == discrs[o].1.val {
let err = error.get_or_insert_with(|| {
let mut ret = struct_span_err!(
tcx.sess,
self_span,
E0081,
"discriminant value `{}` assigned more than once",
discrs[i].1,
);
report(discrs[i].1, hir_var_i_idx, &mut ret);
ret
});
report(discrs[o].1, hir_var_o_idx, err);
// Safe to unwrap here, as we wouldn't reach this point if `discrs` was empty
discrs[o] = *discrs.last().unwrap();
discrs.pop();
} else {
o += 1;
}
}
if let Some(mut e) = error {
e.emit();
}
i += 1;
}
}
pub(super) fn check_type_params_are_used<'tcx>(

View File

@ -50,7 +50,6 @@ use rustc_span::hygiene::DesugaringKind;
use rustc_span::lev_distance::find_best_match_for_name;
use rustc_span::source_map::{Span, Spanned};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{BytePos, Pos};
use rustc_target::spec::abi::Abi::RustIntrinsic;
use rustc_trait_selection::infer::InferCtxtExt;
use rustc_trait_selection::traits::{self, ObligationCauseCode};
@ -2398,37 +2397,29 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
expr,
Some(span),
);
} else if let ty::RawPtr(ty_and_mut) = expr_t.kind()
&& let ty::Adt(adt_def, _) = ty_and_mut.ty.kind()
&& let ExprKind::Field(base_expr, _) = expr.kind
&& adt_def.variants().len() == 1
&& adt_def
.variants()
.iter()
.next()
.unwrap()
.fields
.iter()
.any(|f| f.ident(self.tcx) == field)
{
err.multipart_suggestion(
"to access the field, dereference first",
vec![
(base_expr.span.shrink_to_lo(), "(*".to_string()),
(base_expr.span.shrink_to_hi(), ")".to_string()),
],
Applicability::MaybeIncorrect,
);
} else {
let mut found = false;
if let ty::RawPtr(ty_and_mut) = expr_t.kind()
&& let ty::Adt(adt_def, _) = ty_and_mut.ty.kind()
{
if adt_def.variants().len() == 1
&& adt_def
.variants()
.iter()
.next()
.unwrap()
.fields
.iter()
.any(|f| f.ident(self.tcx) == field)
{
if let Some(dot_loc) = expr_snippet.rfind('.') {
found = true;
err.span_suggestion(
expr.span.with_hi(expr.span.lo() + BytePos::from_usize(dot_loc)),
"to access the field, dereference first",
format!("(*{})", &expr_snippet[0..dot_loc]),
Applicability::MaybeIncorrect,
);
}
}
}
if !found {
err.help("methods are immutable and cannot be assigned to");
}
err.help("methods are immutable and cannot be assigned to");
}
err.emit();

View File

@ -112,7 +112,6 @@ use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::intravisit::Visitor;
use rustc_hir::{HirIdMap, ImplicitSelfKind, Node};
use rustc_index::bit_set::BitSet;
use rustc_index::vec::Idx;
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::subst::{InternalSubsts, Subst, SubstsRef};

View File

@ -318,19 +318,11 @@ impl Duration {
/// assert_eq!(duration.as_secs(), 5);
/// ```
///
/// To determine the total number of seconds represented by the `Duration`,
/// use `as_secs` in combination with [`subsec_nanos`]:
///
/// ```
/// use std::time::Duration;
///
/// let duration = Duration::new(5, 730023852);
///
/// assert_eq!(5.730023852,
/// duration.as_secs() as f64
/// + duration.subsec_nanos() as f64 * 1e-9);
/// ```
/// To determine the total number of seconds represented by the `Duration`
/// including the fractional part, use [`as_secs_f64`] or [`as_secs_f32`]
///
/// [`as_secs_f32`]: Duration::as_secs_f64
/// [`as_secs_f64`]: Duration::as_secs_f32
/// [`subsec_nanos`]: Duration::subsec_nanos
#[stable(feature = "duration", since = "1.3.0")]
#[rustc_const_stable(feature = "duration_consts", since = "1.32.0")]

View File

@ -476,7 +476,7 @@ where
let mut ty_to_fn: FxHashMap<Type, (PolyTrait, Option<Type>)> = Default::default();
for p in clean_where_predicates {
let (orig_p, p) = (p, p.clean(self.cx));
let (orig_p, p) = (p, clean_predicate(p, self.cx));
if p.is_none() {
continue;
}

View File

@ -259,66 +259,68 @@ pub(crate) fn clean_middle_region<'tcx>(region: ty::Region<'tcx>) -> Option<Life
}
}
impl<'tcx> Clean<'tcx, Option<WherePredicate>> for hir::WherePredicate<'tcx> {
fn clean(&self, cx: &mut DocContext<'tcx>) -> Option<WherePredicate> {
if !self.in_where_clause() {
return None;
}
Some(match *self {
hir::WherePredicate::BoundPredicate(ref wbp) => {
let bound_params = wbp
.bound_generic_params
.iter()
.map(|param| {
// Higher-ranked params must be lifetimes.
// Higher-ranked lifetimes can't have bounds.
assert_matches!(
param,
hir::GenericParam { kind: hir::GenericParamKind::Lifetime { .. }, .. }
);
Lifetime(param.name.ident().name)
})
.collect();
WherePredicate::BoundPredicate {
ty: clean_ty(wbp.bounded_ty, cx),
bounds: wbp.bounds.iter().filter_map(|x| clean_generic_bound(x, cx)).collect(),
bound_params,
}
}
hir::WherePredicate::RegionPredicate(ref wrp) => WherePredicate::RegionPredicate {
lifetime: clean_lifetime(wrp.lifetime, cx),
bounds: wrp.bounds.iter().filter_map(|x| clean_generic_bound(x, cx)).collect(),
},
hir::WherePredicate::EqPredicate(ref wrp) => WherePredicate::EqPredicate {
lhs: clean_ty(wrp.lhs_ty, cx),
rhs: clean_ty(wrp.rhs_ty, cx).into(),
},
})
fn clean_where_predicate<'tcx>(
predicate: &hir::WherePredicate<'tcx>,
cx: &mut DocContext<'tcx>,
) -> Option<WherePredicate> {
if !predicate.in_where_clause() {
return None;
}
Some(match *predicate {
hir::WherePredicate::BoundPredicate(ref wbp) => {
let bound_params = wbp
.bound_generic_params
.iter()
.map(|param| {
// Higher-ranked params must be lifetimes.
// Higher-ranked lifetimes can't have bounds.
assert_matches!(
param,
hir::GenericParam { kind: hir::GenericParamKind::Lifetime { .. }, .. }
);
Lifetime(param.name.ident().name)
})
.collect();
WherePredicate::BoundPredicate {
ty: clean_ty(wbp.bounded_ty, cx),
bounds: wbp.bounds.iter().filter_map(|x| clean_generic_bound(x, cx)).collect(),
bound_params,
}
}
hir::WherePredicate::RegionPredicate(ref wrp) => WherePredicate::RegionPredicate {
lifetime: clean_lifetime(wrp.lifetime, cx),
bounds: wrp.bounds.iter().filter_map(|x| clean_generic_bound(x, cx)).collect(),
},
hir::WherePredicate::EqPredicate(ref wrp) => WherePredicate::EqPredicate {
lhs: clean_ty(wrp.lhs_ty, cx),
rhs: clean_ty(wrp.rhs_ty, cx).into(),
},
})
}
impl<'tcx> Clean<'tcx, Option<WherePredicate>> for ty::Predicate<'tcx> {
fn clean(&self, cx: &mut DocContext<'tcx>) -> Option<WherePredicate> {
let bound_predicate = self.kind();
match bound_predicate.skip_binder() {
ty::PredicateKind::Trait(pred) => {
clean_poly_trait_predicate(bound_predicate.rebind(pred), cx)
}
ty::PredicateKind::RegionOutlives(pred) => clean_region_outlives_predicate(pred),
ty::PredicateKind::TypeOutlives(pred) => clean_type_outlives_predicate(pred, cx),
ty::PredicateKind::Projection(pred) => Some(clean_projection_predicate(pred, cx)),
ty::PredicateKind::ConstEvaluatable(..) => None,
ty::PredicateKind::WellFormed(..) => None,
ty::PredicateKind::Subtype(..)
| ty::PredicateKind::Coerce(..)
| ty::PredicateKind::ObjectSafe(..)
| ty::PredicateKind::ClosureKind(..)
| ty::PredicateKind::ConstEquate(..)
| ty::PredicateKind::TypeWellFormedFromEnv(..) => panic!("not user writable"),
pub(crate) fn clean_predicate<'tcx>(
predicate: ty::Predicate<'tcx>,
cx: &mut DocContext<'tcx>,
) -> Option<WherePredicate> {
let bound_predicate = predicate.kind();
match bound_predicate.skip_binder() {
ty::PredicateKind::Trait(pred) => {
clean_poly_trait_predicate(bound_predicate.rebind(pred), cx)
}
ty::PredicateKind::RegionOutlives(pred) => clean_region_outlives_predicate(pred),
ty::PredicateKind::TypeOutlives(pred) => clean_type_outlives_predicate(pred, cx),
ty::PredicateKind::Projection(pred) => Some(clean_projection_predicate(pred, cx)),
ty::PredicateKind::ConstEvaluatable(..) => None,
ty::PredicateKind::WellFormed(..) => None,
ty::PredicateKind::Subtype(..)
| ty::PredicateKind::Coerce(..)
| ty::PredicateKind::ObjectSafe(..)
| ty::PredicateKind::ClosureKind(..)
| ty::PredicateKind::ConstEquate(..)
| ty::PredicateKind::TypeWellFormedFromEnv(..) => panic!("not user writable"),
}
}
@ -594,7 +596,11 @@ impl<'tcx> Clean<'tcx, Generics> for hir::Generics<'tcx> {
let mut generics = Generics {
params,
where_predicates: self.predicates.iter().filter_map(|x| x.clean(cx)).collect(),
where_predicates: self
.predicates
.iter()
.filter_map(|x| clean_where_predicate(x, cx))
.collect(),
};
// Some duplicates are generated for ?Sized bounds between type params and where
@ -695,7 +701,7 @@ fn clean_ty_generics<'tcx>(
if let Some(param_idx) = param_idx {
if let Some(b) = impl_trait.get_mut(&param_idx.into()) {
let p: WherePredicate = p.clean(cx)?;
let p: WherePredicate = clean_predicate(*p, cx)?;
b.extend(
p.get_bounds()
@ -752,7 +758,7 @@ fn clean_ty_generics<'tcx>(
// Now that `cx.impl_trait_bounds` is populated, we can process
// remaining predicates which could contain `impl Trait`.
let mut where_predicates =
where_predicates.into_iter().flat_map(|p| p.clean(cx)).collect::<Vec<_>>();
where_predicates.into_iter().flat_map(|p| clean_predicate(*p, cx)).collect::<Vec<_>>();
// Type parameters have a Sized bound by default unless removed with
// ?Sized. Scan through the predicates and mark any type parameter with

View File

@ -6,9 +6,9 @@
enum Eu64 {
//~^ ERROR discriminant value `0` assigned more than once
Au64 = 0,
//~^NOTE first assignment of `0`
//~^NOTE `0` assigned here
Bu64 = 0x8000_0000_0000_0000
//~^NOTE second assignment of `0` (overflowed from `9223372036854775808`)
//~^NOTE `0` (overflowed from `9223372036854775808`) assigned here
}
fn main() {}

View File

@ -5,10 +5,10 @@ LL | enum Eu64 {
| ^^^^^^^^^
LL |
LL | Au64 = 0,
| - first assignment of `0`
| - `0` assigned here
LL |
LL | Bu64 = 0x8000_0000_0000_0000
| --------------------- second assignment of `0` (overflowed from `9223372036854775808`)
| --------------------- `0` (overflowed from `9223372036854775808`) assigned here
error: aborting due to previous error

View File

@ -1,9 +1,9 @@
enum Enum {
//~^ ERROR discriminant value `3` assigned more than once
P = 3,
//~^ NOTE first assignment of `3`
//~^ NOTE `3` assigned here
X = 3,
//~^ NOTE second assignment of `3`
//~^ NOTE `3` assigned here
Y = 5
}
@ -11,20 +11,43 @@ enum Enum {
enum EnumOverflowRepr {
//~^ ERROR discriminant value `1` assigned more than once
P = 257,
//~^ NOTE first assignment of `1` (overflowed from `257`)
//~^ NOTE `1` (overflowed from `257`) assigned here
X = 513,
//~^ NOTE second assignment of `1` (overflowed from `513`)
//~^ NOTE `1` (overflowed from `513`) assigned here
}
#[repr(i8)]
enum NegDisEnum {
//~^ ERROR discriminant value `-1` assigned more than once
First = -1,
//~^ NOTE first assignment of `-1`
//~^ NOTE `-1` assigned here
Second = -2,
//~^ NOTE assigned discriminant for `Last` was incremented from this discriminant
//~^ NOTE discriminant for `Last` incremented from this startpoint (`Second` + 1 variant later => `Last` = -1)
Last,
//~^ NOTE second assignment of `-1`
//~^ NOTE `-1` assigned here
}
enum MultipleDuplicates {
//~^ ERROR discriminant value `0` assigned more than once
//~^^ ERROR discriminant value `-2` assigned more than once
V0,
//~^ NOTE `0` assigned here
V1 = 0,
//~^ NOTE `0` assigned here
V2,
V3,
V4 = 0,
//~^ NOTE `0` assigned here
V5 = -2,
//~^ NOTE discriminant for `V7` incremented from this startpoint (`V5` + 2 variants later => `V7` = 0)
//~^^ NOTE `-2` assigned here
V6,
V7,
//~^ NOTE `0` assigned here
V8 = -3,
//~^ NOTE discriminant for `V9` incremented from this startpoint (`V8` + 1 variant later => `V9` = -2)
V9,
//~^ NOTE `-2` assigned here
}
fn main() {

View File

@ -5,10 +5,10 @@ LL | enum Enum {
| ^^^^^^^^^
LL |
LL | P = 3,
| - first assignment of `3`
| - `3` assigned here
LL |
LL | X = 3,
| - second assignment of `3`
| - `3` assigned here
error[E0081]: discriminant value `1` assigned more than once
--> $DIR/E0081.rs:11:1
@ -17,10 +17,10 @@ LL | enum EnumOverflowRepr {
| ^^^^^^^^^^^^^^^^^^^^^
LL |
LL | P = 257,
| --- first assignment of `1` (overflowed from `257`)
| --- `1` (overflowed from `257`) assigned here
LL |
LL | X = 513,
| --- second assignment of `1` (overflowed from `513`)
| --- `1` (overflowed from `513`) assigned here
error[E0081]: discriminant value `-1` assigned more than once
--> $DIR/E0081.rs:20:1
@ -29,14 +29,50 @@ LL | enum NegDisEnum {
| ^^^^^^^^^^^^^^^
LL |
LL | First = -1,
| -- first assignment of `-1`
| -- `-1` assigned here
LL |
LL | Second = -2,
| ----------- assigned discriminant for `Last` was incremented from this discriminant
| ----------- discriminant for `Last` incremented from this startpoint (`Second` + 1 variant later => `Last` = -1)
LL |
LL | Last,
| ---- second assignment of `-1`
| ---- `-1` assigned here
error: aborting due to 3 previous errors
error[E0081]: discriminant value `0` assigned more than once
--> $DIR/E0081.rs:30:1
|
LL | enum MultipleDuplicates {
| ^^^^^^^^^^^^^^^^^^^^^^^
...
LL | V0,
| -- `0` assigned here
LL |
LL | V1 = 0,
| - `0` assigned here
...
LL | V4 = 0,
| - `0` assigned here
LL |
LL | V5 = -2,
| ------- discriminant for `V7` incremented from this startpoint (`V5` + 2 variants later => `V7` = 0)
...
LL | V7,
| -- `0` assigned here
error[E0081]: discriminant value `-2` assigned more than once
--> $DIR/E0081.rs:30:1
|
LL | enum MultipleDuplicates {
| ^^^^^^^^^^^^^^^^^^^^^^^
...
LL | V5 = -2,
| -- `-2` assigned here
...
LL | V8 = -3,
| ------- discriminant for `V9` incremented from this startpoint (`V8` + 1 variant later => `V9` = -2)
LL |
LL | V9,
| -- `-2` assigned here
error: aborting due to 5 previous errors
For more information about this error, try `rustc --explain E0081`.

View File

@ -1,16 +0,0 @@
const N: isize = 1;
enum Foo {
//~^ ERROR discriminant value `1` assigned more than once
//~| ERROR discriminant value `1` assigned more than once
//~| ERROR discriminant value `1` assigned more than once
A = 1,
B = 1,
C = 0,
D,
E = N,
}
fn main() {}

View File

@ -1,40 +0,0 @@
error[E0081]: discriminant value `1` assigned more than once
--> $DIR/issue-15524.rs:3:1
|
LL | enum Foo {
| ^^^^^^^^
...
LL | A = 1,
| - first assignment of `1`
LL | B = 1,
| - second assignment of `1`
error[E0081]: discriminant value `1` assigned more than once
--> $DIR/issue-15524.rs:3:1
|
LL | enum Foo {
| ^^^^^^^^
...
LL | A = 1,
| - first assignment of `1`
LL | B = 1,
LL | C = 0,
| ----- assigned discriminant for `D` was incremented from this discriminant
LL | D,
| - second assignment of `1`
error[E0081]: discriminant value `1` assigned more than once
--> $DIR/issue-15524.rs:3:1
|
LL | enum Foo {
| ^^^^^^^^
...
LL | A = 1,
| - first assignment of `1`
...
LL | E = N,
| - second assignment of `1`
error: aborting due to 3 previous errors
For more information about this error, try `rustc --explain E0081`.

View File

@ -0,0 +1,19 @@
// force-host
// no-prefer-dynamic
#![crate_type = "proc-macro"]
extern crate proc_macro;
use proc_macro::TokenStream;
#[proc_macro]
pub fn cause_ice(_: TokenStream) -> TokenStream {
"
enum IceCause {
Variant,
}
pub use IceCause::Variant;
".parse().unwrap()
}

View File

@ -0,0 +1,10 @@
// aux-build:re-export.rs
// edition:2018
extern crate re_export;
use re_export::cause_ice;
cause_ice!(); //~ ERROR `Variant` is only public within the crate, and cannot be re-exported outside
fn main() {}

View File

@ -0,0 +1,16 @@
error[E0364]: `Variant` is only public within the crate, and cannot be re-exported outside
--> $DIR/issue-79148.rs:8:1
|
LL | cause_ice!();
| ^^^^^^^^^^^^
|
note: consider marking `Variant` as `pub` in the imported module
--> $DIR/issue-79148.rs:8:1
|
LL | cause_ice!();
| ^^^^^^^^^^^^
= note: this error originates in the macro `cause_ice` (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to previous error
For more information about this error, try `rustc --explain E0364`.

View File

@ -1,12 +0,0 @@
// Black and White have the same discriminator value ...
enum Color {
//~^ ERROR discriminant value `0` assigned more than once
Red = 0xff0000,
Green = 0x00ff00,
Blue = 0x0000ff,
Black = 0x000000,
White = 0x000000,
}
fn main() { }

View File

@ -1,14 +0,0 @@
error[E0081]: discriminant value `0` assigned more than once
--> $DIR/tag-variant-disr-dup.rs:3:1
|
LL | enum Color {
| ^^^^^^^^^^
...
LL | Black = 0x000000,
| -------- first assignment of `0`
LL | White = 0x000000,
| -------- second assignment of `0`
error: aborting due to previous error
For more information about this error, try `rustc --explain E0081`.

View File

@ -2,9 +2,12 @@ error[E0615]: attempted to take value of method `read` on type `*mut Foo`
--> $DIR/issue-91210-ptr-method.rs:10:7
|
LL | x.read = 4;
| - ^^^^ method, not a field
| |
| help: to access the field, dereference first: `(*x)`
| ^^^^ method, not a field
|
help: to access the field, dereference first
|
LL | (*x).read = 4;
| ++ +
error: aborting due to previous error

View File

@ -19,7 +19,7 @@ Before submitting, please make sure that you're not running into one of these kn
Otherwise please try to provide information which will help us to fix the issue faster. Minimal reproducible examples with few dependencies are especially lovely <3.
-->
**rust-analyzer version**: (eg. output of "Rust Analyzer: Show RA Version" command)
**rust-analyzer version**: (eg. output of "rust-analyzer: Show RA Version" command, accessible in VSCode via <kbd>Ctrl/⌘</kbd>+<kbd>Shift</kbd>+<kbd>P</kbd>)
**rustc version**: (eg. output of `rustc -V`)

View File

@ -18,6 +18,7 @@ env:
FETCH_DEPTH: 0 # pull in the tags for the version string
MACOSX_DEPLOYMENT_TARGET: 10.15
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc
CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc
jobs:
dist:
@ -36,6 +37,9 @@ jobs:
- os: ubuntu-18.04
target: aarch64-unknown-linux-gnu
code-target: linux-arm64
- os: ubuntu-18.04
target: arm-unknown-linux-gnueabihf
code-target: linux-armhf
- os: macos-11
target: x86_64-apple-darwin
code-target: darwin-x64
@ -67,13 +71,17 @@ jobs:
node-version: 14.x
- name: Update apt repositories
if: matrix.target == 'aarch64-unknown-linux-gnu'
if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf'
run: sudo apt-get update
- name: Install target toolchain
- name: Install AArch64 target toolchain
if: matrix.target == 'aarch64-unknown-linux-gnu'
run: sudo apt-get install gcc-aarch64-linux-gnu
- name: Install ARM target toolchain
if: matrix.target == 'arm-unknown-linux-gnueabihf'
run: sudo apt-get install gcc-arm-linux-gnueabihf
- name: Dist
run: cargo xtask dist --client-patch-version ${{ github.run_number }}
@ -204,6 +212,10 @@ jobs:
with:
name: dist-aarch64-unknown-linux-gnu
path: dist
- uses: actions/download-artifact@v1
with:
name: dist-arm-unknown-linux-gnueabihf
path: dist
- uses: actions/download-artifact@v1
with:
name: dist-x86_64-pc-windows-msvc

View File

@ -43,7 +43,7 @@ https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer
## License
Rust analyzer is primarily distributed under the terms of both the MIT
rust-analyzer is primarily distributed under the terms of both the MIT
license and the Apache License (Version 2.0).
See LICENSE-APACHE and LICENSE-MIT for details.

View File

@ -57,6 +57,7 @@ pub struct FlycheckHandle {
// XXX: drop order is significant
sender: Sender<Restart>,
_thread: jod_thread::JoinHandle,
id: usize,
}
impl FlycheckHandle {
@ -72,18 +73,22 @@ impl FlycheckHandle {
.name("Flycheck".to_owned())
.spawn(move || actor.run(receiver))
.expect("failed to spawn thread");
FlycheckHandle { sender, _thread: thread }
FlycheckHandle { id, sender, _thread: thread }
}
/// Schedule a re-start of the cargo check worker.
pub fn update(&self) {
self.sender.send(Restart).unwrap();
}
pub fn id(&self) -> usize {
self.id
}
}
pub enum Message {
/// Request adding a diagnostic with fixes included to a file
AddDiagnostic { workspace_root: AbsPathBuf, diagnostic: Diagnostic },
AddDiagnostic { id: usize, workspace_root: AbsPathBuf, diagnostic: Diagnostic },
/// Request check progress notification to client
Progress {
@ -96,8 +101,9 @@ pub enum Message {
impl fmt::Debug for Message {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Message::AddDiagnostic { workspace_root, diagnostic } => f
Message::AddDiagnostic { id, workspace_root, diagnostic } => f
.debug_struct("AddDiagnostic")
.field("id", id)
.field("workspace_root", workspace_root)
.field("diagnostic_code", &diagnostic.code.as_ref().map(|it| &it.code))
.finish(),
@ -183,7 +189,7 @@ impl FlycheckActor {
}
}
Event::CheckEvent(None) => {
tracing::debug!("flycheck finished");
tracing::debug!(flycheck_id = self.id, "flycheck finished");
// Watcher finished
let cargo_handle = self.cargo_handle.take().unwrap();
@ -203,6 +209,7 @@ impl FlycheckActor {
CargoMessage::Diagnostic(msg) => {
self.send(Message::AddDiagnostic {
id: self.id,
workspace_root: self.workspace_root.clone(),
diagnostic: msg,
});

View File

@ -451,7 +451,7 @@ impl HasChildSource<LocalTypeOrConstParamId> for GenericDefId {
if let GenericDefId::TraitId(id) = *self {
let trait_ref = id.lookup(db).source(db).value;
let idx = idx_iter.next().unwrap();
params.insert(idx, Either::Right(trait_ref))
params.insert(idx, Either::Right(trait_ref));
}
if let Some(generic_params_list) = generic_params_list {

View File

@ -14,7 +14,7 @@
//! unaffected, so we don't have to recompute name resolution results or item data (see `data.rs`).
//!
//! The `ItemTree` for the currently open file can be displayed by using the VS Code command
//! "Rust Analyzer: Debug ItemTree".
//! "rust-analyzer: Debug ItemTree".
//!
//! Compared to rustc's architecture, `ItemTree` has properties from both rustc's AST and HIR: many
//! syntax-level Rust features are already desugared to simpler forms in the `ItemTree`, but name

View File

@ -224,7 +224,7 @@ pub(crate) fn field_visibilities_query(
let resolver = variant_id.module(db).resolver(db);
let mut res = ArenaMap::default();
for (field_id, field_data) in var_data.fields().iter() {
res.insert(field_id, field_data.visibility.resolve(db, &resolver))
res.insert(field_id, field_data.visibility.resolve(db, &resolver));
}
Arc::new(res)
}

View File

@ -5,7 +5,7 @@ use std::mem;
use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
use rustc_hash::FxHashMap;
use syntax::{
ast::{self, AstNode},
ast::{self, AstNode, HasLoopBody},
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
};
use tt::Subtree;
@ -142,8 +142,59 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
]);
}
},
ast::WhileExpr(it) => {
if it.condition().is_none() {
// insert placeholder token after the while token
let while_token = match it.while_token() {
Some(t) => t,
None => continue,
};
append.insert(while_token.into(), vec![
SyntheticToken {
kind: SyntaxKind::IDENT,
text: "__ra_fixup".into(),
range: end_range,
id: EMPTY_ID,
},
]);
}
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::L_CURLY,
text: "{".into(),
range: end_range,
id: EMPTY_ID,
},
SyntheticToken {
kind: SyntaxKind::R_CURLY,
text: "}".into(),
range: end_range,
id: EMPTY_ID,
},
]);
}
},
ast::LoopExpr(it) => {
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::L_CURLY,
text: "{".into(),
range: end_range,
id: EMPTY_ID,
},
SyntheticToken {
kind: SyntaxKind::R_CURLY,
text: "}".into(),
range: end_range,
id: EMPTY_ID,
},
]);
}
},
// FIXME: foo::
// FIXME: for, loop, match etc.
// FIXME: for, match etc.
_ => (),
}
}
@ -376,6 +427,61 @@ fn foo() {
// the {} gets parsed as the condition, I think?
expect![[r#"
fn foo () {if {} {}}
"#]],
)
}
#[test]
fn fixup_while_1() {
check(
r#"
fn foo() {
while
}
"#,
expect![[r#"
fn foo () {while __ra_fixup {}}
"#]],
)
}
#[test]
fn fixup_while_2() {
check(
r#"
fn foo() {
while foo
}
"#,
expect![[r#"
fn foo () {while foo {}}
"#]],
)
}
#[test]
fn fixup_while_3() {
check(
r#"
fn foo() {
while {}
}
"#,
expect![[r#"
fn foo () {while __ra_fixup {}}
"#]],
)
}
#[test]
fn fixup_loop() {
check(
r#"
fn foo() {
loop
}
"#,
expect![[r#"
fn foo () {loop {}}
"#]],
)
}

View File

@ -381,6 +381,7 @@ pub mod known {
bitor,
bitxor_assign,
bitxor,
branch,
deref_mut,
deref,
div_assign,
@ -396,6 +397,7 @@ pub mod known {
not,
owned_box,
partial_ord,
poll,
r#fn,
rem_assign,
rem,

View File

@ -10,13 +10,13 @@ use chalk_ir::{
cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyVariableKind,
};
use hir_def::{
expr::{ArithOp, Array, BinaryOp, CmpOp, Expr, ExprId, Literal, Ordering, Statement, UnaryOp},
expr::{ArithOp, Array, BinaryOp, CmpOp, Expr, ExprId, Literal, Statement, UnaryOp},
generics::TypeOrConstParamData,
path::{GenericArg, GenericArgs},
resolver::resolver_for_expr,
ConstParamId, FieldId, FunctionId, ItemContainerId, Lookup,
ConstParamId, FieldId, ItemContainerId, Lookup,
};
use hir_expand::name::{name, Name};
use hir_expand::name::Name;
use stdx::always;
use syntax::ast::RangeOp;
@ -28,7 +28,7 @@ use crate::{
const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode,
},
mapping::{from_chalk, ToChalk},
method_resolution::{self, VisibleFromModule},
method_resolution::{self, lang_names_for_bin_op, VisibleFromModule},
primitive::{self, UintTy},
static_lifetime, to_chalk_trait_id,
utils::{generics, Generics},
@ -947,7 +947,9 @@ impl<'a> InferenceContext<'a> {
let lhs_ty = self.infer_expr(lhs, &lhs_expectation);
let rhs_ty = self.table.new_type_var();
let func = self.resolve_binop_method(op);
let func = lang_names_for_bin_op(op).and_then(|(name, lang_item)| {
self.db.trait_data(self.resolve_lang_item(lang_item)?.as_trait()?).method_by_name(&name)
});
let func = match func {
Some(func) => func,
None => {
@ -1473,55 +1475,4 @@ impl<'a> InferenceContext<'a> {
},
})
}
fn resolve_binop_method(&self, op: BinaryOp) -> Option<FunctionId> {
let (name, lang_item) = match op {
BinaryOp::LogicOp(_) => return None,
BinaryOp::ArithOp(aop) => match aop {
ArithOp::Add => (name!(add), name!(add)),
ArithOp::Mul => (name!(mul), name!(mul)),
ArithOp::Sub => (name!(sub), name!(sub)),
ArithOp::Div => (name!(div), name!(div)),
ArithOp::Rem => (name!(rem), name!(rem)),
ArithOp::Shl => (name!(shl), name!(shl)),
ArithOp::Shr => (name!(shr), name!(shr)),
ArithOp::BitXor => (name!(bitxor), name!(bitxor)),
ArithOp::BitOr => (name!(bitor), name!(bitor)),
ArithOp::BitAnd => (name!(bitand), name!(bitand)),
},
BinaryOp::Assignment { op: Some(aop) } => match aop {
ArithOp::Add => (name!(add_assign), name!(add_assign)),
ArithOp::Mul => (name!(mul_assign), name!(mul_assign)),
ArithOp::Sub => (name!(sub_assign), name!(sub_assign)),
ArithOp::Div => (name!(div_assign), name!(div_assign)),
ArithOp::Rem => (name!(rem_assign), name!(rem_assign)),
ArithOp::Shl => (name!(shl_assign), name!(shl_assign)),
ArithOp::Shr => (name!(shr_assign), name!(shr_assign)),
ArithOp::BitXor => (name!(bitxor_assign), name!(bitxor_assign)),
ArithOp::BitOr => (name!(bitor_assign), name!(bitor_assign)),
ArithOp::BitAnd => (name!(bitand_assign), name!(bitand_assign)),
},
BinaryOp::CmpOp(cop) => match cop {
CmpOp::Eq { negated: false } => (name!(eq), name!(eq)),
CmpOp::Eq { negated: true } => (name!(ne), name!(eq)),
CmpOp::Ord { ordering: Ordering::Less, strict: false } => {
(name!(le), name!(partial_ord))
}
CmpOp::Ord { ordering: Ordering::Less, strict: true } => {
(name!(lt), name!(partial_ord))
}
CmpOp::Ord { ordering: Ordering::Greater, strict: false } => {
(name!(ge), name!(partial_ord))
}
CmpOp::Ord { ordering: Ordering::Greater, strict: true } => {
(name!(gt), name!(partial_ord))
}
},
BinaryOp::Assignment { op: None } => return None,
};
let trait_ = self.resolve_lang_item(lang_item)?.as_trait()?;
self.db.trait_data(trait_).method_by_name(&name)
}
}

View File

@ -1126,7 +1126,7 @@ pub(crate) fn field_types_query(
let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
for (field_id, field_data) in var_data.fields().iter() {
res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(&field_data.type_ref)))
res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(&field_data.type_ref)));
}
Arc::new(res)
}

View File

@ -336,7 +336,7 @@ impl InherentImpls {
}
}
pub fn inherent_impl_crates_query(
pub(crate) fn inherent_impl_crates_query(
db: &dyn HirDatabase,
krate: CrateId,
fp: TyFingerprint,
@ -419,6 +419,55 @@ pub fn def_crates(
}
}
pub fn lang_names_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, Name)> {
use hir_expand::name;
use syntax::ast::{ArithOp, BinaryOp, CmpOp, Ordering};
Some(match op {
BinaryOp::LogicOp(_) => return None,
BinaryOp::ArithOp(aop) => match aop {
ArithOp::Add => (name!(add), name!(add)),
ArithOp::Mul => (name!(mul), name!(mul)),
ArithOp::Sub => (name!(sub), name!(sub)),
ArithOp::Div => (name!(div), name!(div)),
ArithOp::Rem => (name!(rem), name!(rem)),
ArithOp::Shl => (name!(shl), name!(shl)),
ArithOp::Shr => (name!(shr), name!(shr)),
ArithOp::BitXor => (name!(bitxor), name!(bitxor)),
ArithOp::BitOr => (name!(bitor), name!(bitor)),
ArithOp::BitAnd => (name!(bitand), name!(bitand)),
},
BinaryOp::Assignment { op: Some(aop) } => match aop {
ArithOp::Add => (name!(add_assign), name!(add_assign)),
ArithOp::Mul => (name!(mul_assign), name!(mul_assign)),
ArithOp::Sub => (name!(sub_assign), name!(sub_assign)),
ArithOp::Div => (name!(div_assign), name!(div_assign)),
ArithOp::Rem => (name!(rem_assign), name!(rem_assign)),
ArithOp::Shl => (name!(shl_assign), name!(shl_assign)),
ArithOp::Shr => (name!(shr_assign), name!(shr_assign)),
ArithOp::BitXor => (name!(bitxor_assign), name!(bitxor_assign)),
ArithOp::BitOr => (name!(bitor_assign), name!(bitor_assign)),
ArithOp::BitAnd => (name!(bitand_assign), name!(bitand_assign)),
},
BinaryOp::CmpOp(cop) => match cop {
CmpOp::Eq { negated: false } => (name!(eq), name!(eq)),
CmpOp::Eq { negated: true } => (name!(ne), name!(eq)),
CmpOp::Ord { ordering: Ordering::Less, strict: false } => {
(name!(le), name!(partial_ord))
}
CmpOp::Ord { ordering: Ordering::Less, strict: true } => {
(name!(lt), name!(partial_ord))
}
CmpOp::Ord { ordering: Ordering::Greater, strict: false } => {
(name!(ge), name!(partial_ord))
}
CmpOp::Ord { ordering: Ordering::Greater, strict: true } => {
(name!(gt), name!(partial_ord))
}
},
BinaryOp::Assignment { op: None } => return None,
})
}
/// Look up the method with the given name.
pub(crate) fn lookup_method(
ty: &Canonical<Ty>,

View File

@ -357,6 +357,26 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.resolve_method_call(call).map(Function::from)
}
pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
self.imp.resolve_await_to_poll(await_expr).map(Function::from)
}
pub fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<Function> {
self.imp.resolve_prefix_expr(prefix_expr).map(Function::from)
}
pub fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<Function> {
self.imp.resolve_index_expr(index_expr).map(Function::from)
}
pub fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<Function> {
self.imp.resolve_bin_expr(bin_expr).map(Function::from)
}
pub fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<Function> {
self.imp.resolve_try_expr(try_expr).map(Function::from)
}
pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
self.imp.resolve_method_call_as_callable(call)
}
@ -1066,6 +1086,26 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(call.syntax())?.resolve_method_call(self.db, call)
}
fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<FunctionId> {
self.analyze(await_expr.syntax())?.resolve_await_to_poll(self.db, await_expr)
}
fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<FunctionId> {
self.analyze(prefix_expr.syntax())?.resolve_prefix_expr(self.db, prefix_expr)
}
fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<FunctionId> {
self.analyze(index_expr.syntax())?.resolve_index_expr(self.db, index_expr)
}
fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<FunctionId> {
self.analyze(bin_expr.syntax())?.resolve_bin_expr(self.db, bin_expr)
}
fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<FunctionId> {
self.analyze(try_expr.syntax())?.resolve_try_expr(self.db, try_expr)
}
fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
}

View File

@ -25,15 +25,20 @@ use hir_def::{
Lookup, ModuleDefId, VariantId,
};
use hir_expand::{
builtin_fn_macro::BuiltinFnLikeExpander, hygiene::Hygiene, name::AsName, HirFileId, InFile,
builtin_fn_macro::BuiltinFnLikeExpander,
hygiene::Hygiene,
name,
name::{AsName, Name},
HirFileId, InFile,
};
use hir_ty::{
diagnostics::{
record_literal_missing_fields, record_pattern_missing_fields, unsafe_expressions,
UnsafeExpr,
},
method_resolution, Adjust, Adjustment, AutoBorrow, InferenceResult, Interner, Substitution,
TyExt, TyKind, TyLoweringContext,
method_resolution::{self, lang_names_for_bin_op},
Adjust, Adjustment, AutoBorrow, InferenceResult, Interner, Substitution, Ty, TyExt, TyKind,
TyLoweringContext,
};
use itertools::Itertools;
use smallvec::SmallVec;
@ -255,8 +260,90 @@ impl SourceAnalyzer {
) -> Option<FunctionId> {
let expr_id = self.expr_id(db, &call.clone().into())?;
let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
let f_in_impl = self.resolve_impl_method(db, f_in_trait, &substs);
f_in_impl.or(Some(f_in_trait))
Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, &substs))
}
pub(crate) fn resolve_await_to_poll(
&self,
db: &dyn HirDatabase,
await_expr: &ast::AwaitExpr,
) -> Option<FunctionId> {
let ty = self.ty_of_expr(db, &await_expr.expr()?.into())?;
let op_fn = db
.lang_item(self.resolver.krate(), hir_expand::name![poll].to_smol_str())?
.as_function()?;
let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn).push(ty.clone()).build();
Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
}
pub(crate) fn resolve_prefix_expr(
&self,
db: &dyn HirDatabase,
prefix_expr: &ast::PrefixExpr,
) -> Option<FunctionId> {
let lang_item_name = match prefix_expr.op_kind()? {
ast::UnaryOp::Deref => name![deref],
ast::UnaryOp::Not => name![not],
ast::UnaryOp::Neg => name![neg],
};
let ty = self.ty_of_expr(db, &prefix_expr.expr()?.into())?;
let op_fn = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?;
let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn).push(ty.clone()).build();
Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
}
pub(crate) fn resolve_index_expr(
&self,
db: &dyn HirDatabase,
index_expr: &ast::IndexExpr,
) -> Option<FunctionId> {
let base_ty = self.ty_of_expr(db, &index_expr.base()?.into())?;
let index_ty = self.ty_of_expr(db, &index_expr.index()?.into())?;
let lang_item_name = name![index];
let op_fn = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?;
let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn)
.push(base_ty.clone())
.push(index_ty.clone())
.build();
Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
}
pub(crate) fn resolve_bin_expr(
&self,
db: &dyn HirDatabase,
binop_expr: &ast::BinExpr,
) -> Option<FunctionId> {
let op = binop_expr.op_kind()?;
let lhs = self.ty_of_expr(db, &binop_expr.lhs()?.into())?;
let rhs = self.ty_of_expr(db, &binop_expr.rhs()?.into())?;
let op_fn = lang_names_for_bin_op(op)
.and_then(|(name, lang_item)| self.lang_trait_fn(db, &lang_item, &name))?;
let substs =
hir_ty::TyBuilder::subst_for_def(db, op_fn).push(lhs.clone()).push(rhs.clone()).build();
Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
}
pub(crate) fn resolve_try_expr(
&self,
db: &dyn HirDatabase,
try_expr: &ast::TryExpr,
) -> Option<FunctionId> {
let ty = self.ty_of_expr(db, &try_expr.expr()?.into())?;
let op_fn =
db.lang_item(self.resolver.krate(), name![branch].to_smol_str())?.as_function()?;
let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn).push(ty.clone()).build();
Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
}
pub(crate) fn resolve_field(
@ -666,6 +753,29 @@ impl SourceAnalyzer {
let fun_data = db.function_data(func);
method_resolution::lookup_impl_method(self_ty, db, trait_env, impled_trait, &fun_data.name)
}
fn resolve_impl_method_or_trait_def(
&self,
db: &dyn HirDatabase,
func: FunctionId,
substs: &Substitution,
) -> FunctionId {
self.resolve_impl_method(db, func, substs).unwrap_or(func)
}
fn lang_trait_fn(
&self,
db: &dyn HirDatabase,
lang_trait: &Name,
method_name: &Name,
) -> Option<FunctionId> {
db.trait_data(db.lang_item(self.resolver.krate(), lang_trait.to_smol_str())?.as_trait()?)
.method_by_name(method_name)
}
fn ty_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<&Ty> {
self.infer.as_ref()?.type_of_expr.get(self.expr_id(db, &expr)?)
}
}
fn scope_for(

View File

@ -1,8 +1,8 @@
use hir::{HasSource, InFile};
use hir::{HasSource, HirDisplay, InFile};
use ide_db::assists::{AssistId, AssistKind};
use syntax::{
ast::{self, edit::IndentLevel},
AstNode, TextSize,
ast::{self, make, HasArgList},
match_ast, AstNode, SyntaxNode,
};
use crate::assist_context::{AssistContext, Assists};
@ -32,8 +32,8 @@ use crate::assist_context::{AssistContext, Assists};
// }
// ```
pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let path_expr: ast::PathExpr = ctx.find_node_at_offset()?;
let path = path_expr.path()?;
let path: ast::Path = ctx.find_node_at_offset()?;
let parent = path_parent(&path)?;
if ctx.sema.resolve_path(&path).is_some() {
// No need to generate anything if the path resolves
@ -50,26 +50,71 @@ pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>)
ctx.sema.resolve_path(&path.qualifier()?)
{
let target = path.syntax().text_range();
return add_variant_to_accumulator(acc, ctx, target, e, &name_ref);
return add_variant_to_accumulator(acc, ctx, target, e, &name_ref, parent);
}
None
}
#[derive(Debug)]
enum PathParent {
PathExpr(ast::PathExpr),
RecordExpr(ast::RecordExpr),
PathPat(ast::PathPat),
UseTree(ast::UseTree),
}
impl PathParent {
fn syntax(&self) -> &SyntaxNode {
match self {
PathParent::PathExpr(it) => it.syntax(),
PathParent::RecordExpr(it) => it.syntax(),
PathParent::PathPat(it) => it.syntax(),
PathParent::UseTree(it) => it.syntax(),
}
}
fn make_field_list(&self, ctx: &AssistContext<'_>) -> Option<ast::FieldList> {
let scope = ctx.sema.scope(self.syntax())?;
match self {
PathParent::PathExpr(it) => {
if let Some(call_expr) = it.syntax().parent().and_then(ast::CallExpr::cast) {
make_tuple_field_list(call_expr, ctx, &scope)
} else {
None
}
}
PathParent::RecordExpr(it) => make_record_field_list(it, ctx, &scope),
PathParent::UseTree(_) | PathParent::PathPat(_) => None,
}
}
}
fn path_parent(path: &ast::Path) -> Option<PathParent> {
let parent = path.syntax().parent()?;
match_ast! {
match parent {
ast::PathExpr(it) => Some(PathParent::PathExpr(it)),
ast::RecordExpr(it) => Some(PathParent::RecordExpr(it)),
ast::PathPat(it) => Some(PathParent::PathPat(it)),
ast::UseTree(it) => Some(PathParent::UseTree(it)),
_ => None
}
}
}
fn add_variant_to_accumulator(
acc: &mut Assists,
ctx: &AssistContext<'_>,
target: syntax::TextRange,
adt: hir::Enum,
name_ref: &ast::NameRef,
parent: PathParent,
) -> Option<()> {
let db = ctx.db();
let InFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?;
let enum_indent = IndentLevel::from_node(&enum_node.syntax());
let variant_list = enum_node.variant_list()?;
let offset = variant_list.syntax().text_range().end() - TextSize::of('}');
let empty_enum = variant_list.variants().next().is_none();
acc.add(
AssistId("generate_enum_variant", AssistKind::Generate),
@ -77,18 +122,80 @@ fn add_variant_to_accumulator(
target,
|builder| {
builder.edit_file(file_id.original_file(db));
let text = format!(
"{maybe_newline}{indent_1}{name},\n{enum_indent}",
maybe_newline = if empty_enum { "\n" } else { "" },
indent_1 = IndentLevel(1),
name = name_ref,
enum_indent = enum_indent
);
builder.insert(offset, text)
let node = builder.make_mut(enum_node);
let variant = make_variant(ctx, name_ref, parent);
node.variant_list().map(|it| it.add_variant(variant.clone_for_update()));
},
)
}
fn make_variant(
ctx: &AssistContext<'_>,
name_ref: &ast::NameRef,
parent: PathParent,
) -> ast::Variant {
let field_list = parent.make_field_list(ctx);
make::variant(make::name(&name_ref.text()), field_list)
}
fn make_record_field_list(
record: &ast::RecordExpr,
ctx: &AssistContext<'_>,
scope: &hir::SemanticsScope<'_>,
) -> Option<ast::FieldList> {
let fields = record.record_expr_field_list()?.fields();
let record_fields = fields.map(|field| {
let name = name_from_field(&field);
let ty = field
.expr()
.and_then(|it| expr_ty(ctx, it, scope))
.unwrap_or_else(make::ty_placeholder);
make::record_field(None, name, ty)
});
Some(make::record_field_list(record_fields).into())
}
fn name_from_field(field: &ast::RecordExprField) -> ast::Name {
let text = match field.name_ref() {
Some(it) => it.to_string(),
None => name_from_field_shorthand(field).unwrap_or("unknown".to_string()),
};
make::name(&text)
}
fn name_from_field_shorthand(field: &ast::RecordExprField) -> Option<String> {
let path = match field.expr()? {
ast::Expr::PathExpr(path_expr) => path_expr.path(),
_ => None,
}?;
Some(path.as_single_name_ref()?.to_string())
}
fn make_tuple_field_list(
call_expr: ast::CallExpr,
ctx: &AssistContext<'_>,
scope: &hir::SemanticsScope<'_>,
) -> Option<ast::FieldList> {
let args = call_expr.arg_list()?.args();
let tuple_fields = args.map(|arg| {
let ty = expr_ty(ctx, arg, &scope).unwrap_or_else(make::ty_placeholder);
make::tuple_field(None, ty)
});
Some(make::tuple_field_list(tuple_fields).into())
}
fn expr_ty(
ctx: &AssistContext<'_>,
arg: ast::Expr,
scope: &hir::SemanticsScope<'_>,
) -> Option<ast::Type> {
let ty = ctx.sema.type_of_expr(&arg).map(|it| it.adjusted())?;
let text = ty.display_source_code(ctx.db(), scope.module().into()).ok()?;
Some(make::ty(&text))
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
@ -221,6 +328,236 @@ mod m {
fn main() {
m::Foo::Baz
}
",
)
}
#[test]
fn associated_single_element_tuple() {
check_assist(
generate_enum_variant,
r"
enum Foo {}
fn main() {
Foo::Bar$0(true)
}
",
r"
enum Foo {
Bar(bool),
}
fn main() {
Foo::Bar(true)
}
",
)
}
#[test]
fn associated_single_element_tuple_unknown_type() {
check_assist(
generate_enum_variant,
r"
enum Foo {}
fn main() {
Foo::Bar$0(x)
}
",
r"
enum Foo {
Bar(_),
}
fn main() {
Foo::Bar(x)
}
",
)
}
#[test]
fn associated_multi_element_tuple() {
check_assist(
generate_enum_variant,
r"
struct Struct {}
enum Foo {}
fn main() {
Foo::Bar$0(true, x, Struct {})
}
",
r"
struct Struct {}
enum Foo {
Bar(bool, _, Struct),
}
fn main() {
Foo::Bar(true, x, Struct {})
}
",
)
}
#[test]
fn associated_record() {
check_assist(
generate_enum_variant,
r"
enum Foo {}
fn main() {
Foo::$0Bar { x: true }
}
",
r"
enum Foo {
Bar { x: bool },
}
fn main() {
Foo::Bar { x: true }
}
",
)
}
#[test]
fn associated_record_unknown_type() {
check_assist(
generate_enum_variant,
r"
enum Foo {}
fn main() {
Foo::$0Bar { x: y }
}
",
r"
enum Foo {
Bar { x: _ },
}
fn main() {
Foo::Bar { x: y }
}
",
)
}
#[test]
fn associated_record_field_shorthand() {
check_assist(
generate_enum_variant,
r"
enum Foo {}
fn main() {
let x = true;
Foo::$0Bar { x }
}
",
r"
enum Foo {
Bar { x: bool },
}
fn main() {
let x = true;
Foo::Bar { x }
}
",
)
}
#[test]
fn associated_record_field_shorthand_unknown_type() {
check_assist(
generate_enum_variant,
r"
enum Foo {}
fn main() {
Foo::$0Bar { x }
}
",
r"
enum Foo {
Bar { x: _ },
}
fn main() {
Foo::Bar { x }
}
",
)
}
#[test]
fn associated_record_field_multiple_fields() {
check_assist(
generate_enum_variant,
r"
struct Struct {}
enum Foo {}
fn main() {
Foo::$0Bar { x, y: x, s: Struct {} }
}
",
r"
struct Struct {}
enum Foo {
Bar { x: _, y: _, s: Struct },
}
fn main() {
Foo::Bar { x, y: x, s: Struct {} }
}
",
)
}
#[test]
fn use_tree() {
check_assist(
generate_enum_variant,
r"
//- /main.rs
mod foo;
use foo::Foo::Bar$0;
//- /foo.rs
enum Foo {}
",
r"
enum Foo {
Bar,
}
",
)
}
#[test]
fn not_applicable_for_path_type() {
check_assist_not_applicable(
generate_enum_variant,
r"
enum Foo {}
impl Foo::Bar$0 {}
",
)
}
#[test]
fn path_pat() {
check_assist(
generate_enum_variant,
r"
enum Foo {}
fn foo(x: Foo) {
match x {
Foo::Bar$0 =>
}
}
",
r"
enum Foo {
Bar,
}
fn foo(x: Foo) {
match x {
Foo::Bar =>
}
}
",
)
}

View File

@ -45,7 +45,7 @@ impl RootDatabase {
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Memory Usage (Clears Database)**
// | VS Code | **rust-analyzer: Memory Usage (Clears Database)**
// |===
// image::https://user-images.githubusercontent.com/48062697/113065592-08559f00-91b1-11eb-8c96-64b88068ec02.gif[]
pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> {

View File

@ -127,10 +127,12 @@ impl Definition {
}
}
// FIXME: IdentClass as a name no longer fits
#[derive(Debug)]
pub enum IdentClass {
NameClass(NameClass),
NameRefClass(NameRefClass),
Operator(OperatorClass),
}
impl IdentClass {
@ -147,6 +149,11 @@ impl IdentClass {
.map(IdentClass::NameClass)
.or_else(|| NameRefClass::classify_lifetime(sema, &lifetime).map(IdentClass::NameRefClass))
},
ast::AwaitExpr(await_expr) => OperatorClass::classify_await(sema, &await_expr).map(IdentClass::Operator),
ast::BinExpr(bin_expr) => OperatorClass::classify_bin(sema, &bin_expr).map(IdentClass::Operator),
ast::IndexExpr(index_expr) => OperatorClass::classify_index(sema, &index_expr).map(IdentClass::Operator),
ast::PrefixExpr(prefix_expr) => OperatorClass::classify_prefix(sema,&prefix_expr).map(IdentClass::Operator),
ast::TryExpr(try_expr) => OperatorClass::classify_try(sema,&try_expr).map(IdentClass::Operator),
_ => None,
}
}
@ -184,6 +191,33 @@ impl IdentClass {
res.push(Definition::Local(local_ref));
res.push(Definition::Field(field_ref));
}
IdentClass::Operator(
OperatorClass::Await(func)
| OperatorClass::Prefix(func)
| OperatorClass::Bin(func)
| OperatorClass::Index(func)
| OperatorClass::Try(func),
) => res.push(Definition::Function(func)),
}
res
}
pub fn definitions_no_ops(self) -> ArrayVec<Definition, 2> {
let mut res = ArrayVec::new();
match self {
IdentClass::NameClass(NameClass::Definition(it) | NameClass::ConstReference(it)) => {
res.push(it)
}
IdentClass::NameClass(NameClass::PatFieldShorthand { local_def, field_ref }) => {
res.push(Definition::Local(local_def));
res.push(Definition::Field(field_ref));
}
IdentClass::NameRefClass(NameRefClass::Definition(it)) => res.push(it),
IdentClass::NameRefClass(NameRefClass::FieldShorthand { local_ref, field_ref }) => {
res.push(Definition::Local(local_ref));
res.push(Definition::Field(field_ref));
}
IdentClass::Operator(_) => (),
}
res
}
@ -332,6 +366,52 @@ impl NameClass {
}
}
#[derive(Debug)]
pub enum OperatorClass {
Await(Function),
Prefix(Function),
Index(Function),
Try(Function),
Bin(Function),
}
impl OperatorClass {
pub fn classify_await(
sema: &Semantics<'_, RootDatabase>,
await_expr: &ast::AwaitExpr,
) -> Option<OperatorClass> {
sema.resolve_await_to_poll(await_expr).map(OperatorClass::Await)
}
pub fn classify_prefix(
sema: &Semantics<'_, RootDatabase>,
prefix_expr: &ast::PrefixExpr,
) -> Option<OperatorClass> {
sema.resolve_prefix_expr(prefix_expr).map(OperatorClass::Prefix)
}
pub fn classify_try(
sema: &Semantics<'_, RootDatabase>,
try_expr: &ast::TryExpr,
) -> Option<OperatorClass> {
sema.resolve_try_expr(try_expr).map(OperatorClass::Try)
}
pub fn classify_index(
sema: &Semantics<'_, RootDatabase>,
index_expr: &ast::IndexExpr,
) -> Option<OperatorClass> {
sema.resolve_index_expr(index_expr).map(OperatorClass::Index)
}
pub fn classify_bin(
sema: &Semantics<'_, RootDatabase>,
bin_expr: &ast::BinExpr,
) -> Option<OperatorClass> {
sema.resolve_bin_expr(bin_expr).map(OperatorClass::Bin)
}
}
/// This is similar to [`NameClass`], but works for [`ast::NameRef`] rather than
/// for [`ast::Name`]. Similarly, what looks like a reference in syntax is a
/// reference most of the time, but there are a couple of annoying exceptions.

View File

@ -57,7 +57,7 @@
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Structural Search Replace**
// | VS Code | **rust-analyzer: Structural Search Replace**
// |===
//
// Also available as an assist, by writing a comment containing the structural

View File

@ -7,7 +7,7 @@ use ide_db::{
search::FileReference,
FxIndexMap, RootDatabase,
};
use syntax::{ast, AstNode, SyntaxKind::NAME, TextRange};
use syntax::{ast, AstNode, SyntaxKind::IDENT, TextRange};
use crate::{goto_definition, FilePosition, NavigationTarget, RangeInfo, TryToNav};
@ -79,7 +79,7 @@ pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Optio
let file = sema.parse(file_id);
let file = file.syntax();
let token = pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
NAME => 1,
IDENT => 1,
_ => 0,
})?;
let mut calls = CallLocations::default();

View File

@ -19,7 +19,7 @@ pub struct ExpandedMacro {
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Expand macro recursively**
// | VS Code | **rust-analyzer: Expand macro recursively**
// |===
//
// image::https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif[]
@ -32,7 +32,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
_ => 0,
})?;
// due to how Rust Analyzer works internally, we need to special case derive attributes,
// due to how rust-analyzer works internally, we need to special case derive attributes,
// otherwise they might not get found, e.g. here with the cursor at $0 `#[attr]` would expand:
// ```
// #[attr]

View File

@ -39,7 +39,11 @@ pub(crate) fn goto_definition(
| T![super]
| T![crate]
| T![Self]
| COMMENT => 2,
| COMMENT => 4,
// index and prefix ops
T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] => 3,
kind if kind.is_keyword() => 2,
T!['('] | T![')'] => 2,
kind if kind.is_trivia() => 0,
_ => 1,
})?;
@ -1628,6 +1632,122 @@ macro_rules! foo {
}
foo!(bar$0);
"#,
);
}
#[test]
fn goto_await_poll() {
check(
r#"
//- minicore: future
struct MyFut;
impl core::future::Future for MyFut {
type Output = ();
fn poll(
//^^^^
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>
) -> std::task::Poll<Self::Output>
{
()
}
}
fn f() {
MyFut.await$0;
}
"#,
);
}
#[test]
fn goto_try_op() {
check(
r#"
//- minicore: try
struct Struct;
impl core::ops::Try for Struct {
fn branch(
//^^^^^^
self
) {}
}
fn f() {
Struct?$0;
}
"#,
);
}
#[test]
fn goto_index_op() {
check(
r#"
//- minicore: index
struct Struct;
impl core::ops::Index<usize> for Struct {
fn index(
//^^^^^
self
) {}
}
fn f() {
Struct[0]$0;
}
"#,
);
}
#[test]
fn goto_prefix_op() {
check(
r#"
//- minicore: deref
struct Struct;
impl core::ops::Deref for Struct {
fn deref(
//^^^^^
self
) {}
}
fn f() {
$0*Struct;
}
"#,
);
}
#[test]
fn goto_bin_op() {
check(
r#"
//- minicore: add
struct Struct;
impl core::ops::Add for Struct {
fn add(
//^^^
self
) {}
}
fn f() {
Struct +$0 Struct;
}
"#,
);
}

View File

@ -30,7 +30,7 @@ pub(crate) fn goto_implementation(
let original_token =
pick_best_token(syntax.token_at_offset(position.offset), |kind| match kind {
IDENT | T![self] => 1,
IDENT | T![self] | INT_NUMBER => 1,
_ => 0,
})?;
let range = original_token.text_range();

View File

@ -333,7 +333,8 @@ fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange
fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSet<Definition> {
sema.descend_into_macros(token)
.into_iter()
.filter_map(|token| IdentClass::classify_token(sema, &token).map(IdentClass::definitions))
.filter_map(|token| IdentClass::classify_token(sema, &token))
.map(IdentClass::definitions_no_ops)
.flatten()
.collect()
}

View File

@ -9,7 +9,7 @@ use either::Either;
use hir::{HasSource, Semantics};
use ide_db::{
base_db::FileRange,
defs::{Definition, IdentClass},
defs::{Definition, IdentClass, OperatorClass},
famous_defs::FamousDefs,
helpers::pick_best_token,
FxIndexSet, RootDatabase,
@ -101,7 +101,10 @@ pub(crate) fn hover(
let offset = range.start();
let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self] => 3,
IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self] => 4,
// index and prefix ops
T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] => 3,
kind if kind.is_keyword() => 2,
T!['('] | T![')'] => 2,
kind if kind.is_trivia() => 0,
_ => 1,
@ -136,6 +139,11 @@ pub(crate) fn hover(
.filter_map(|token| {
let node = token.parent()?;
let class = IdentClass::classify_token(sema, token)?;
if let IdentClass::Operator(OperatorClass::Await(_)) = class {
// It's better for us to fall back to the keyword hover here,
// rendering poll is very confusing
return None;
}
Some(class.definitions().into_iter().zip(iter::once(node).cycle()))
})
.flatten()
@ -232,10 +240,12 @@ fn hover_type_fallback(
token: &SyntaxToken,
original_token: &SyntaxToken,
) -> Option<RangeInfo<HoverResult>> {
let node = token
.parent_ancestors()
.take_while(|it| !ast::Item::can_cast(it.kind()))
.find(|n| ast::Expr::can_cast(n.kind()) || ast::Pat::can_cast(n.kind()))?;
let node =
token.parent_ancestors().take_while(|it| !ast::Item::can_cast(it.kind())).find(|n| {
ast::Expr::can_cast(n.kind())
|| ast::Pat::can_cast(n.kind())
|| ast::Type::can_cast(n.kind())
})?;
let expr_or_pat = match_ast! {
match node {

View File

@ -5051,3 +5051,37 @@ fn f() {
```"#]],
);
}
#[test]
fn hover_deref() {
check(
r#"
//- minicore: deref
struct Struct(usize);
impl core::ops::Deref for Struct {
type Target = usize;
fn deref(&self) -> &Self::Target {
&self.0
}
}
fn f() {
$0*Struct(0);
}
"#,
expect![[r#"
***
```rust
test::Struct
```
```rust
fn deref(&self) -> &Self::Target
```
"#]],
);
}

View File

@ -100,7 +100,7 @@ pub enum InlayTooltip {
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Toggle inlay hints*
// | VS Code | **rust-analyzer: Toggle inlay hints*
// |===
//
// image::https://user-images.githubusercontent.com/48062697/113020660-b5f98b80-917a-11eb-8d70-3be3fd558cdd.png[]

View File

@ -28,7 +28,7 @@ pub struct JoinLinesConfig {
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Join lines**
// | VS Code | **rust-analyzer: Join lines**
// |===
//
// image::https://user-images.githubusercontent.com/48062697/113020661-b6922200-917a-11eb-87c4-b75acc028f11.gif[]

View File

@ -12,7 +12,7 @@ use syntax::{
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Find matching brace**
// | VS Code | **rust-analyzer: Find matching brace**
// |===
//
// image::https://user-images.githubusercontent.com/48062697/113065573-04298180-91b1-11eb-8dec-d4e2a202f304.gif[]

View File

@ -90,7 +90,7 @@ pub(crate) fn moniker(
.descend_into_macros(original_token.clone())
.into_iter()
.filter_map(|token| {
IdentClass::classify_token(sema, &token).map(IdentClass::definitions).map(|it| {
IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| {
it.into_iter().flat_map(|def| def_to_moniker(sema.db, def, current_crate))
})
})

View File

@ -19,8 +19,8 @@ pub enum Direction {
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Move item up**
// | VS Code | **Rust Analyzer: Move item down**
// | VS Code | **rust-analyzer: Move item up**
// | VS Code | **rust-analyzer: Move item down**
// |===
//
// image::https://user-images.githubusercontent.com/48062697/113065576-04298180-91b1-11eb-91ce-4505e99ed598.gif[]

View File

@ -18,7 +18,7 @@ use crate::NavigationTarget;
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Locate parent module**
// | VS Code | **rust-analyzer: Locate parent module**
// |===
//
// image::https://user-images.githubusercontent.com/48062697/113065580-04c21800-91b1-11eb-9a32-00086161c0bd.gif[]

View File

@ -116,7 +116,7 @@ impl Runnable {
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Run**
// | VS Code | **rust-analyzer: Run**
// |===
// image::https://user-images.githubusercontent.com/48062697/113065583-055aae80-91b1-11eb-958f-d67efcaf6a2f.gif[]
pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
@ -202,7 +202,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Peek related tests**
// | VS Code | **rust-analyzer: Peek related tests**
// |===
pub(crate) fn related_tests(
db: &RootDatabase,
@ -373,11 +373,13 @@ pub(crate) fn runnable_impl(
let adt_name = ty.as_adt()?.name(sema.db);
let mut ty_args = ty.type_arguments().peekable();
let params = if ty_args.peek().is_some() {
format!("<{}>", ty_args.format_with(", ", |ty, cb| cb(&ty.display(sema.db))))
format!("<{}>", ty_args.format_with(",", |ty, cb| cb(&ty.display(sema.db))))
} else {
String::new()
};
let test_id = TestId::Path(format!("{}{}", adt_name, params));
let mut test_id = format!("{}{}", adt_name, params);
test_id.retain(|c| c != ' ');
let test_id = TestId::Path(test_id);
Some(Runnable { use_name_in_title: false, nav, kind: RunnableKind::DocTest { test_id }, cfg })
}
@ -441,10 +443,11 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option<Runnable> {
format_to!(
path,
"<{}>",
ty_args.format_with(", ", |ty, cb| cb(&ty.display(db)))
ty_args.format_with(",", |ty, cb| cb(&ty.display(db)))
);
}
format_to!(path, "::{}", def_name);
path.retain(|c| c != ' ');
return Some(path);
}
}
@ -2067,13 +2070,23 @@ mod tests {
$0
struct Foo<T, U>;
/// ```
/// ```
impl<T, U> Foo<T, U> {
/// ```rust
/// ````
fn t() {}
}
/// ```
/// ```
impl Foo<Foo<(), ()>, ()> {
/// ```
/// ```
fn t() {}
}
"#,
&[DocTest],
&[DocTest, DocTest, DocTest, DocTest],
expect![[r#"
[
Runnable {
@ -2082,12 +2095,64 @@ impl<T, U> Foo<T, U> {
file_id: FileId(
0,
),
full_range: 47..85,
full_range: 20..103,
focus_range: 47..56,
name: "impl",
kind: Impl,
},
kind: DocTest {
test_id: Path(
"Foo<T,U>",
),
},
cfg: None,
},
Runnable {
use_name_in_title: false,
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 63..101,
name: "t",
},
kind: DocTest {
test_id: Path(
"Foo<T, U>::t",
"Foo<T,U>::t",
),
},
cfg: None,
},
Runnable {
use_name_in_title: false,
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 105..188,
focus_range: 126..146,
name: "impl",
kind: Impl,
},
kind: DocTest {
test_id: Path(
"Foo<Foo<(),()>,()>",
),
},
cfg: None,
},
Runnable {
use_name_in_title: false,
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 153..186,
name: "t",
},
kind: DocTest {
test_id: Path(
"Foo<Foo<(),()>,()>::t",
),
},
cfg: None,

View File

@ -12,7 +12,7 @@ use ide_db::{
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Shuffle Crate Graph**
// | VS Code | **rust-analyzer: Shuffle Crate Graph**
// |===
pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) {
let crate_graph = db.crate_graph();

View File

@ -204,7 +204,7 @@ impl StaticIndex<'_> {
fn get_definition(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Definition> {
for token in sema.descend_into_macros(token) {
let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions);
let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
if let Some(&[x]) = def.as_deref() {
return Some(x);
} else {

View File

@ -29,7 +29,7 @@ fn macro_syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats {
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Status**
// | VS Code | **rust-analyzer: Status**
// |===
// image::https://user-images.githubusercontent.com/48062697/113065584-05f34500-91b1-11eb-98cc-5c196f76be7f.gif[]
pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {

View File

@ -958,7 +958,7 @@ pub struct Struct;
#[test]
#[cfg_attr(
all(unix, not(target_pointer_width = "64")),
not(all(unix, target_pointer_width = "64")),
ignore = "depends on `DefaultHasher` outputs"
)]
fn test_rainbow_highlighting() {

View File

@ -12,7 +12,7 @@ use syntax::{
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Show Syntax Tree**
// | VS Code | **rust-analyzer: Show Syntax Tree**
// |===
// image::https://user-images.githubusercontent.com/48062697/113065586-068bdb80-91b1-11eb-9507-fee67f9f45a0.gif[]
pub(crate) fn syntax_tree(

View File

@ -16,7 +16,7 @@ use ide_db::{
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: View Crate Graph**
// | VS Code | **rust-analyzer: View Crate Graph**
// |===
pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String, String> {
let crate_graph = db.crate_graph();

View File

@ -8,7 +8,7 @@ use syntax::{algo::find_node_at_offset, ast, AstNode};
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: View Hir**
// | VS Code | **rust-analyzer: View Hir**
// |===
// image::https://user-images.githubusercontent.com/48062697/113065588-068bdb80-91b1-11eb-9a78-0b4ef1e972fb.gif[]
pub(crate) fn view_hir(db: &RootDatabase, position: FilePosition) -> String {

View File

@ -9,7 +9,7 @@ use ide_db::RootDatabase;
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Debug ItemTree**
// | VS Code | **rust-analyzer: Debug ItemTree**
// |===
pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String {
db.file_item_tree(file_id.into()).pretty_print()

View File

@ -106,6 +106,14 @@ impl AsRef<Path> for AbsPath {
}
}
impl ToOwned for AbsPath {
type Owned = AbsPathBuf;
fn to_owned(&self) -> Self::Owned {
AbsPathBuf(self.0.to_owned())
}
}
impl<'a> TryFrom<&'a Path> for &'a AbsPath {
type Error = &'a Path;
fn try_from(path: &'a Path) -> Result<&'a AbsPath, &'a Path> {

View File

@ -60,7 +60,7 @@ impl MacroDylib {
let info = version::read_dylib_info(&path)?;
if info.version.0 < 1 || info.version.1 < 47 {
let msg = format!("proc-macro {} built by {:#?} is not supported by Rust Analyzer, please update your rust version.", path.display(), info);
let msg = format!("proc-macro {} built by {:#?} is not supported by rust-analyzer, please update your Rust version.", path.display(), info);
return Err(io::Error::new(io::ErrorKind::InvalidData, msg));
}

View File

@ -5,7 +5,7 @@
//! compiler into submodules of this module (e.g proc_macro_srv::abis::abi_1_47).
//!
//! All of these ABIs are subsumed in the `Abi` enum, which exposes a simple
//! interface the rest of rust analyzer can use to talk to the macro
//! interface the rest of rust-analyzer can use to talk to the macro
//! provider.
//!
//! # Adding a new ABI

View File

@ -19,7 +19,7 @@ use crate::{utf8_stdout, ManifestPath};
/// [`CargoWorkspace`] represents the logical structure of, well, a Cargo
/// workspace. It pretty closely mirrors `cargo metadata` output.
///
/// Note that internally, rust analyzer uses a different structure:
/// Note that internally, rust-analyzer uses a different structure:
/// `CrateGraph`. `CrateGraph` is lower-level: it knows only about the crates,
/// while this knows about `Packages` & `Targets`: purely cargo-related
/// concepts.

View File

@ -8,7 +8,7 @@ use rustc_hash::{FxHashMap, FxHashSet};
use crate::lsp_ext;
pub(crate) type CheckFixes = Arc<FxHashMap<FileId, Vec<Fix>>>;
pub(crate) type CheckFixes = Arc<FxHashMap<usize, FxHashMap<FileId, Vec<Fix>>>>;
#[derive(Debug, Default, Clone)]
pub struct DiagnosticsMapConfig {
@ -22,7 +22,7 @@ pub(crate) struct DiagnosticCollection {
// FIXME: should be FxHashMap<FileId, Vec<ra_id::Diagnostic>>
pub(crate) native: FxHashMap<FileId, Vec<lsp_types::Diagnostic>>,
// FIXME: should be Vec<flycheck::Diagnostic>
pub(crate) check: FxHashMap<FileId, Vec<lsp_types::Diagnostic>>,
pub(crate) check: FxHashMap<usize, FxHashMap<FileId, Vec<lsp_types::Diagnostic>>>,
pub(crate) check_fixes: CheckFixes,
changes: FxHashSet<FileId>,
}
@ -35,9 +35,19 @@ pub(crate) struct Fix {
}
impl DiagnosticCollection {
pub(crate) fn clear_check(&mut self) {
pub(crate) fn clear_check(&mut self, flycheck_id: usize) {
if let Some(it) = Arc::make_mut(&mut self.check_fixes).get_mut(&flycheck_id) {
it.clear();
}
if let Some(it) = self.check.get_mut(&flycheck_id) {
self.changes.extend(it.drain().map(|(key, _value)| key));
}
}
pub(crate) fn clear_check_all(&mut self) {
Arc::make_mut(&mut self.check_fixes).clear();
self.changes.extend(self.check.drain().map(|(key, _value)| key))
self.changes
.extend(self.check.values_mut().flat_map(|it| it.drain().map(|(key, _value)| key)))
}
pub(crate) fn clear_native_for(&mut self, file_id: FileId) {
@ -47,11 +57,12 @@ impl DiagnosticCollection {
pub(crate) fn add_check_diagnostic(
&mut self,
flycheck_id: usize,
file_id: FileId,
diagnostic: lsp_types::Diagnostic,
fix: Option<Fix>,
) {
let diagnostics = self.check.entry(file_id).or_default();
let diagnostics = self.check.entry(flycheck_id).or_default().entry(file_id).or_default();
for existing_diagnostic in diagnostics.iter() {
if are_diagnostics_equal(existing_diagnostic, &diagnostic) {
return;
@ -59,7 +70,7 @@ impl DiagnosticCollection {
}
let check_fixes = Arc::make_mut(&mut self.check_fixes);
check_fixes.entry(file_id).or_default().extend(fix);
check_fixes.entry(flycheck_id).or_default().entry(file_id).or_default().extend(fix);
diagnostics.push(diagnostic);
self.changes.insert(file_id);
}
@ -89,7 +100,8 @@ impl DiagnosticCollection {
file_id: FileId,
) -> impl Iterator<Item = &lsp_types::Diagnostic> {
let native = self.native.get(&file_id).into_iter().flatten();
let check = self.check.get(&file_id).into_iter().flatten();
let check =
self.check.values().filter_map(move |it| it.get(&file_id)).into_iter().flatten();
native.chain(check)
}

View File

@ -8,7 +8,7 @@ use std::{sync::Arc, time::Instant};
use crossbeam_channel::{unbounded, Receiver, Sender};
use flycheck::FlycheckHandle;
use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId};
use ide_db::base_db::{CrateId, FileLoader, SourceDatabase};
use ide_db::base_db::{CrateId, FileLoader, SourceDatabase, SourceDatabaseExt};
use lsp_types::{SemanticTokens, Url};
use parking_lot::{Mutex, RwLock};
use proc_macro_api::ProcMacroServer;
@ -176,7 +176,7 @@ impl GlobalState {
pub(crate) fn process_changes(&mut self) -> bool {
let _p = profile::span("GlobalState::process_changes");
let mut fs_changes = Vec::new();
let mut fs_refresh_changes = Vec::new();
// A file was added or deleted
let mut has_structure_changes = false;
@ -192,15 +192,14 @@ impl GlobalState {
if let Some(path) = vfs.file_path(file.file_id).as_path() {
let path = path.to_path_buf();
if reload::should_refresh_for_change(&path, file.change_kind) {
self.fetch_workspaces_queue
.request_op(format!("vfs file change: {}", path.display()));
fs_refresh_changes.push((path, file.file_id));
}
fs_changes.push((path, file.change_kind));
if file.is_created_or_deleted() {
has_structure_changes = true;
}
}
// Clear native diagnostics when their file gets deleted
if !file.exists() {
self.diagnostics.clear_native_for(file.file_id);
}
@ -226,14 +225,25 @@ impl GlobalState {
self.analysis_host.apply_change(change);
let raw_database = &self.analysis_host.raw_database();
self.proc_macro_changed =
changed_files.iter().filter(|file| !file.is_created_or_deleted()).any(|file| {
let crates = raw_database.relevant_crates(file.file_id);
let crate_graph = raw_database.crate_graph();
{
let raw_database = self.analysis_host.raw_database();
let workspace_structure_change =
fs_refresh_changes.into_iter().find(|&(_, file_id)| {
!raw_database.source_root(raw_database.file_source_root(file_id)).is_library
});
if let Some((path, _)) = workspace_structure_change {
self.fetch_workspaces_queue
.request_op(format!("workspace vfs file change: {}", path.display()));
}
self.proc_macro_changed =
changed_files.iter().filter(|file| !file.is_created_or_deleted()).any(|file| {
let crates = raw_database.relevant_crates(file.file_id);
let crate_graph = raw_database.crate_graph();
crates.iter().any(|&krate| crate_graph[krate].is_proc_macro)
});
}
crates.iter().any(|&krate| crate_graph[krate].is_proc_macro)
});
true
}

View File

@ -1094,7 +1094,9 @@ pub(crate) fn handle_code_action(
}
// Fixes from `cargo check`.
for fix in snap.check_fixes.get(&frange.file_id).into_iter().flatten() {
for fix in
snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).into_iter().flatten()
{
// FIXME: this mapping is awkward and shouldn't exist. Refactor
// `snap.check_fixes` to not convert to LSP prematurely.
let intersect_fix_range = fix

View File

@ -6,8 +6,8 @@
//! code here exercise this specific completion, and thus have a fast
//! edit/compile/test cycle.
//!
//! Note that "Rust Analyzer: Run" action does not allow running a single test
//! in release mode in VS Code. There's however "Rust Analyzer: Copy Run Command Line"
//! Note that "rust-analyzer: Run" action does not allow running a single test
//! in release mode in VS Code. There's however "rust-analyzer: Copy Run Command Line"
//! which you can use to paste the command in terminal and add `--release` manually.
use std::sync::Arc;

View File

@ -2,13 +2,15 @@
//! requests/replies and notifications back to the client.
use std::{
fmt,
ops::Deref,
sync::Arc,
time::{Duration, Instant},
};
use always_assert::always;
use crossbeam_channel::{select, Receiver};
use ide_db::base_db::{SourceDatabaseExt, VfsPath};
use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath};
use itertools::Itertools;
use lsp_server::{Connection, Notification, Request};
use lsp_types::notification::Notification as _;
use vfs::{ChangeKind, FileId};
@ -371,7 +373,7 @@ impl GlobalState {
let _p = profile::span("GlobalState::handle_event/flycheck");
loop {
match task {
flycheck::Message::AddDiagnostic { workspace_root, diagnostic } => {
flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => {
let snap = self.snapshot();
let diagnostics =
crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp(
@ -383,6 +385,7 @@ impl GlobalState {
for diag in diagnostics {
match url_to_file_id(&self.vfs.read().0, &diag.url) {
Ok(file_id) => self.diagnostics.add_check_diagnostic(
id,
file_id,
diag.diagnostic,
diag.fix,
@ -400,7 +403,7 @@ impl GlobalState {
flycheck::Message::Progress { id, progress } => {
let (state, message) = match progress {
flycheck::Progress::DidStart => {
self.diagnostics.clear_check();
self.diagnostics.clear_check(id);
(Progress::Begin, None)
}
flycheck::Progress::DidCheckCrate(target) => {
@ -444,7 +447,10 @@ impl GlobalState {
let memdocs_added_or_removed = self.mem_docs.take_changes();
if self.is_quiescent() {
if !was_quiescent {
if !was_quiescent
&& !self.fetch_workspaces_queue.op_requested()
&& !self.fetch_build_data_queue.op_requested()
{
for flycheck in &self.flycheck {
flycheck.update();
}
@ -734,13 +740,76 @@ impl GlobalState {
Ok(())
})?
.on::<lsp_types::notification::DidSaveTextDocument>(|this, params| {
for flycheck in &this.flycheck {
flycheck.update();
let mut updated = false;
if let Ok(vfs_path) = from_proto::vfs_path(&params.text_document.uri) {
let (vfs, _) = &*this.vfs.read();
if let Some(file_id) = vfs.file_id(&vfs_path) {
let analysis = this.analysis_host.analysis();
// Crates containing or depending on the saved file
let crate_ids: Vec<_> = analysis
.crate_for(file_id)?
.into_iter()
.flat_map(|id| {
this.analysis_host
.raw_database()
.crate_graph()
.transitive_rev_deps(id)
})
.sorted()
.unique()
.collect();
let crate_root_paths: Vec<_> = crate_ids
.iter()
.filter_map(|&crate_id| {
analysis
.crate_root(crate_id)
.map(|file_id| {
vfs.file_path(file_id).as_path().map(ToOwned::to_owned)
})
.transpose()
})
.collect::<ide::Cancellable<_>>()?;
let crate_root_paths: Vec<_> =
crate_root_paths.iter().map(Deref::deref).collect();
// Find all workspaces that have at least one target containing the saved file
let workspace_ids =
this.workspaces.iter().enumerate().filter(|(_, ws)| match ws {
project_model::ProjectWorkspace::Cargo { cargo, .. } => {
cargo.packages().any(|pkg| {
cargo[pkg].targets.iter().any(|&it| {
crate_root_paths.contains(&cargo[it].root.as_path())
})
})
}
project_model::ProjectWorkspace::Json { project, .. } => project
.crates()
.any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c)),
project_model::ProjectWorkspace::DetachedFiles { .. } => false,
});
// Find and trigger corresponding flychecks
for flycheck in &this.flycheck {
for (id, _) in workspace_ids.clone() {
if id == flycheck.id() {
updated = true;
flycheck.update();
continue;
}
}
}
}
if let Some(abs_path) = vfs_path.as_path() {
if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) {
this.fetch_workspaces_queue
.request_op(format!("DidSaveTextDocument {}", abs_path.display()));
}
}
}
if let Ok(abs_path) = from_proto::abs_path(&params.text_document.uri) {
if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) {
this.fetch_workspaces_queue
.request_op(format!("DidSaveTextDocument {}", abs_path.display()));
if !updated {
for flycheck in &this.flycheck {
flycheck.update();
}
}
Ok(())

View File

@ -196,10 +196,7 @@ impl GlobalState {
}
if let Err(error) = self.fetch_build_data_error() {
self.show_and_log_error(
"rust-analyzer failed to run build scripts".to_string(),
Some(error),
);
self.show_and_log_error("failed to run build scripts".to_string(), Some(error));
}
let workspaces = self
@ -308,6 +305,7 @@ impl GlobalState {
if self.proc_macro_clients.is_empty() {
if let Some((path, args)) = self.config.proc_macro_srv() {
tracing::info!("Spawning proc-macro servers");
self.proc_macro_clients = self
.workspaces
.iter()
@ -316,20 +314,20 @@ impl GlobalState {
let mut path = path.clone();
if let ProjectWorkspace::Cargo { sysroot, .. } = ws {
tracing::info!("Found a cargo workspace...");
tracing::debug!("Found a cargo workspace...");
if let Some(sysroot) = sysroot.as_ref() {
tracing::info!("Found a cargo workspace with a sysroot...");
tracing::debug!("Found a cargo workspace with a sysroot...");
let server_path =
sysroot.root().join("libexec").join(&standalone_server_name);
if std::fs::metadata(&server_path).is_ok() {
tracing::info!(
tracing::debug!(
"And the server exists at {}",
server_path.display()
);
path = server_path;
args = vec![];
} else {
tracing::info!(
tracing::debug!(
"And the server does not exist at {}",
server_path.display()
);
@ -337,14 +335,10 @@ impl GlobalState {
}
}
tracing::info!(
"Using proc-macro server at {} with args {:?}",
path.display(),
args
);
tracing::info!(?args, "Using proc-macro server at {}", path.display(),);
ProcMacroServer::spawn(path.clone(), args.clone()).map_err(|err| {
let error = format!(
"Failed to run proc_macro_srv from path {}, error: {:?}",
"Failed to run proc-macro server from path {}, error: {:?}",
path.display(),
err
);
@ -458,7 +452,7 @@ impl GlobalState {
Some(it) => it,
None => {
self.flycheck = Vec::new();
self.diagnostics.clear_check();
self.diagnostics.clear_check_all();
return;
}
};
@ -621,7 +615,10 @@ pub(crate) fn load_proc_macro(
};
let expander: Arc<dyn ProcMacroExpander> =
if dummy_replace.iter().any(|replace| &**replace == name) {
Arc::new(DummyExpander)
match kind {
ProcMacroKind::Attr => Arc::new(IdentityExpander),
_ => Arc::new(EmptyExpander),
}
} else {
Arc::new(Expander(expander))
};
@ -647,11 +644,11 @@ pub(crate) fn load_proc_macro(
}
}
/// Dummy identity expander, used for proc-macros that are deliberately ignored by the user.
/// Dummy identity expander, used for attribute proc-macros that are deliberately ignored by the user.
#[derive(Debug)]
struct DummyExpander;
struct IdentityExpander;
impl ProcMacroExpander for DummyExpander {
impl ProcMacroExpander for IdentityExpander {
fn expand(
&self,
subtree: &tt::Subtree,
@ -661,27 +658,46 @@ pub(crate) fn load_proc_macro(
Ok(subtree.clone())
}
}
/// Empty expander, used for proc-macros that are deliberately ignored by the user.
#[derive(Debug)]
struct EmptyExpander;
impl ProcMacroExpander for EmptyExpander {
fn expand(
&self,
_: &tt::Subtree,
_: Option<&tt::Subtree>,
_: &Env,
) -> Result<tt::Subtree, ProcMacroExpansionError> {
Ok(tt::Subtree::default())
}
}
}
pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool {
const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
let file_name = path.file_name().unwrap_or_default();
if file_name == "Cargo.toml" || file_name == "Cargo.lock" {
let file_name = match path.file_name().unwrap_or_default().to_str() {
Some(it) => it,
None => return false,
};
if let "Cargo.toml" | "Cargo.lock" = file_name {
return true;
}
if change_kind == ChangeKind::Modify {
return false;
}
// .cargo/config{.toml}
if path.extension().unwrap_or_default() != "rs" {
if (file_name == "config.toml" || file_name == "config")
&& path.parent().map(|parent| parent.as_ref().ends_with(".cargo")) == Some(true)
{
return true;
}
return false;
let is_cargo_config = matches!(file_name, "config.toml" | "config")
&& path.parent().map(|parent| parent.as_ref().ends_with(".cargo")).unwrap_or(false);
return is_cargo_config;
}
if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_ref().ends_with(it)) {
return true;
}

View File

@ -13,9 +13,8 @@ use xshell::cmd;
fn check_code_formatting() {
let sh = &Shell::new().unwrap();
sh.change_dir(sourcegen::project_root());
sh.set_var("RUSTUP_TOOLCHAIN", "stable");
let out = cmd!(sh, "rustfmt --version").read().unwrap();
let out = cmd!(sh, "rustup run stable rustfmt --version").read().unwrap();
if !out.contains("stable") {
panic!(
"Failed to run rustfmt from toolchain 'stable'. \
@ -23,9 +22,9 @@ fn check_code_formatting() {
)
}
let res = cmd!(sh, "cargo fmt -- --check").run();
let res = cmd!(sh, "rustup run stable cargo fmt -- --check").run();
if res.is_err() {
let _ = cmd!(sh, "cargo fmt").run();
let _ = cmd!(sh, "rustup run stable cargo fmt").run();
}
res.unwrap()
}

View File

@ -136,7 +136,7 @@ impl fmt::Display for Location {
}
fn ensure_rustfmt(sh: &Shell) {
let version = cmd!(sh, "rustfmt --version").read().unwrap_or_default();
let version = cmd!(sh, "rustup run stable rustfmt --version").read().unwrap_or_default();
if !version.contains("stable") {
panic!(
"Failed to run rustfmt from toolchain 'stable'. \
@ -147,13 +147,15 @@ fn ensure_rustfmt(sh: &Shell) {
pub fn reformat(text: String) -> String {
let sh = Shell::new().unwrap();
sh.set_var("RUSTUP_TOOLCHAIN", "stable");
ensure_rustfmt(&sh);
let rustfmt_toml = project_root().join("rustfmt.toml");
let mut stdout = cmd!(sh, "rustfmt --config-path {rustfmt_toml} --config fn_single_line=true")
.stdin(text)
.read()
.unwrap();
let mut stdout = cmd!(
sh,
"rustup run stable rustfmt --config-path {rustfmt_toml} --config fn_single_line=true"
)
.stdin(text)
.read()
.unwrap();
if !stdout.ends_with('\n') {
stdout.push('\n');
}

View File

@ -11,7 +11,7 @@ use crate::{
ted::{self, Position},
AstNode, AstToken, Direction,
SyntaxKind::{ATTR, COMMENT, WHITESPACE},
SyntaxNode,
SyntaxNode, SyntaxToken,
};
use super::HasName;
@ -506,19 +506,7 @@ impl ast::RecordExprFieldList {
let position = match self.fields().last() {
Some(last_field) => {
let comma = match last_field
.syntax()
.siblings_with_tokens(Direction::Next)
.filter_map(|it| it.into_token())
.find(|it| it.kind() == T![,])
{
Some(it) => it,
None => {
let comma = ast::make::token(T![,]);
ted::insert(Position::after(last_field.syntax()), &comma);
comma
}
};
let comma = get_or_insert_comma_after(last_field.syntax());
Position::after(comma)
}
None => match self.l_curly_token() {
@ -579,19 +567,8 @@ impl ast::RecordPatFieldList {
let position = match self.fields().last() {
Some(last_field) => {
let comma = match last_field
.syntax()
.siblings_with_tokens(Direction::Next)
.filter_map(|it| it.into_token())
.find(|it| it.kind() == T![,])
{
Some(it) => it,
None => {
let comma = ast::make::token(T![,]);
ted::insert(Position::after(last_field.syntax()), &comma);
comma
}
};
let syntax = last_field.syntax();
let comma = get_or_insert_comma_after(syntax);
Position::after(comma)
}
None => match self.l_curly_token() {
@ -606,12 +583,53 @@ impl ast::RecordPatFieldList {
}
}
}
fn get_or_insert_comma_after(syntax: &SyntaxNode) -> SyntaxToken {
let comma = match syntax
.siblings_with_tokens(Direction::Next)
.filter_map(|it| it.into_token())
.find(|it| it.kind() == T![,])
{
Some(it) => it,
None => {
let comma = ast::make::token(T![,]);
ted::insert(Position::after(syntax), &comma);
comma
}
};
comma
}
impl ast::StmtList {
pub fn push_front(&self, statement: ast::Stmt) {
ted::insert(Position::after(self.l_curly_token().unwrap()), statement.syntax());
}
}
impl ast::VariantList {
pub fn add_variant(&self, variant: ast::Variant) {
let (indent, position) = match self.variants().last() {
Some(last_item) => (
IndentLevel::from_node(last_item.syntax()),
Position::after(get_or_insert_comma_after(last_item.syntax())),
),
None => match self.l_curly_token() {
Some(l_curly) => {
normalize_ws_between_braces(self.syntax());
(IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly))
}
None => (IndentLevel::single(), Position::last_child_of(self.syntax())),
},
};
let elements: Vec<SyntaxElement<_>> = vec![
make::tokens::whitespace(&format!("{}{}", "\n", indent)).into(),
variant.syntax().clone().into(),
ast::make::token(T![,]).into(),
];
ted::insert_all(position, elements);
}
}
fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> {
let l = node
.children_with_tokens()
@ -661,6 +679,9 @@ impl<N: AstNode + Clone> Indent for N {}
mod tests {
use std::fmt;
use stdx::trim_indent;
use test_utils::assert_eq_text;
use crate::SourceFile;
use super::*;
@ -714,4 +735,100 @@ mod tests {
}",
);
}
#[test]
fn add_variant_to_empty_enum() {
let variant = make::variant(make::name("Bar"), None).clone_for_update();
check_add_variant(
r#"
enum Foo {}
"#,
r#"
enum Foo {
Bar,
}
"#,
variant,
);
}
#[test]
fn add_variant_to_non_empty_enum() {
let variant = make::variant(make::name("Baz"), None).clone_for_update();
check_add_variant(
r#"
enum Foo {
Bar,
}
"#,
r#"
enum Foo {
Bar,
Baz,
}
"#,
variant,
);
}
#[test]
fn add_variant_with_tuple_field_list() {
let variant = make::variant(
make::name("Baz"),
Some(ast::FieldList::TupleFieldList(make::tuple_field_list(std::iter::once(
make::tuple_field(None, make::ty("bool")),
)))),
)
.clone_for_update();
check_add_variant(
r#"
enum Foo {
Bar,
}
"#,
r#"
enum Foo {
Bar,
Baz(bool),
}
"#,
variant,
);
}
#[test]
fn add_variant_with_record_field_list() {
let variant = make::variant(
make::name("Baz"),
Some(ast::FieldList::RecordFieldList(make::record_field_list(std::iter::once(
make::record_field(None, make::name("x"), make::ty("bool")),
)))),
)
.clone_for_update();
check_add_variant(
r#"
enum Foo {
Bar,
}
"#,
r#"
enum Foo {
Bar,
Baz { x: bool },
}
"#,
variant,
);
}
fn check_add_variant(before: &str, expected: &str, variant: ast::Variant) {
let enum_ = ast_mut_from_text::<ast::Enum>(before);
enum_.variant_list().map(|it| it.add_variant(variant));
let after = enum_.to_string();
assert_eq_text!(&trim_indent(expected.trim()), &trim_indent(&after.trim()));
}
}

View File

@ -745,7 +745,10 @@ pub fn tuple_field(visibility: Option<ast::Visibility>, ty: ast::Type) -> ast::T
pub fn variant(name: ast::Name, field_list: Option<ast::FieldList>) -> ast::Variant {
let field_list = match field_list {
None => String::new(),
Some(it) => format!("{}", it),
Some(it) => match it {
ast::FieldList::RecordFieldList(record) => format!(" {}", record),
ast::FieldList::TupleFieldList(tuple) => format!("{}", tuple),
},
};
ast_from_text(&format!("enum f {{ {}{} }}", name, field_list))
}

View File

@ -1,4 +1,4 @@
//! Syntax Tree library used throughout the rust analyzer.
//! Syntax Tree library used throughout the rust-analyzer.
//!
//! Properties:
//! - easy and fast incremental re-parsing

View File

@ -40,12 +40,15 @@ impl loader::Handle for NotifyHandle {
.expect("failed to spawn thread");
NotifyHandle { sender, _thread: thread }
}
fn set_config(&mut self, config: loader::Config) {
self.sender.send(Message::Config(config)).unwrap();
}
fn invalidate(&mut self, path: AbsPathBuf) {
self.sender.send(Message::Invalidate(path)).unwrap();
}
fn load_sync(&mut self, path: &AbsPath) -> Option<Vec<u8>> {
read(path)
}
@ -70,6 +73,7 @@ impl NotifyActor {
fn new(sender: loader::Sender) -> NotifyActor {
NotifyActor { sender, watched_entries: Vec::new(), watcher: None }
}
fn next_event(&self, receiver: &Receiver<Message>) -> Option<Event> {
let watcher_receiver = self.watcher.as_ref().map(|(_, receiver)| receiver);
select! {
@ -77,9 +81,10 @@ impl NotifyActor {
recv(watcher_receiver.unwrap_or(&never())) -> it => Some(Event::NotifyEvent(it.unwrap())),
}
}
fn run(mut self, inbox: Receiver<Message>) {
while let Some(event) = self.next_event(&inbox) {
tracing::debug!("vfs-notify event: {:?}", event);
tracing::debug!(?event, "vfs-notify event");
match event {
Event::Message(msg) => match msg {
Message::Config(config) => {

View File

@ -82,7 +82,7 @@ There's **"Run Extension (Debug Build)"** launch configuration for this in VS Co
In general, I use one of the following workflows for fixing bugs and implementing features:
If the problem concerns only internal parts of rust-analyzer (i.e. I don't need to touch the `rust-analyzer` crate or TypeScript code), there is a unit-test for it.
So, I use **Rust Analyzer: Run** action in VS Code to run this single test, and then just do printf-driven development/debugging.
So, I use **rust-analyzer: Run** action in VS Code to run this single test, and then just do printf-driven development/debugging.
As a sanity check after I'm done, I use `cargo xtask install --server` and **Reload Window** action in VS Code to verify that the thing works as I expect.
If the problem concerns only the VS Code extension, I use **Run Installed Extension** launch configuration from `launch.json`.
@ -152,11 +152,11 @@ To log all communication between the server and the client, there are two choice
There are also several VS Code commands which might be of interest:
* `Rust Analyzer: Status` shows some memory-usage statistics.
* `rust-analyzer: Status` shows some memory-usage statistics.
* `Rust Analyzer: Syntax Tree` shows syntax tree of the current file/selection.
* `rust-analyzer: Syntax Tree` shows syntax tree of the current file/selection.
* `Rust Analyzer: View Hir` shows the HIR expressions within the function containing the cursor.
* `rust-analyzer: View Hir` shows the HIR expressions within the function containing the cursor.
You can hover over syntax nodes in the opened text file to see the appropriate
rust code that it refers to and the rust editor will also highlight the proper

View File

@ -371,7 +371,7 @@ That is, rust-analyzer requires unwinding.
### Testing
Rust Analyzer has three interesting [system boundaries](https://www.tedinski.com/2018/04/10/making-tests-a-positive-influence-on-design.html) to concentrate tests on.
rust-analyzer has three interesting [system boundaries](https://www.tedinski.com/2018/04/10/making-tests-a-positive-influence-on-design.html) to concentrate tests on.
The outermost boundary is the `rust-analyzer` crate, which defines an LSP interface in terms of stdio.
We do integration testing of this component, by feeding it with a stream of LSP requests and checking responses.

View File

@ -63,7 +63,7 @@ Next, let's talk about what the inputs to the `Analysis` are, precisely.
## Inputs
Rust Analyzer never does any I/O itself, all inputs get passed explicitly via
rust-analyzer never does any I/O itself, all inputs get passed explicitly via
the `AnalysisHost::apply_change` method, which accepts a single argument, a
`Change`. [`Change`] is a builder for a single change
"transaction", so it suffices to study its methods to understand all of the

View File

@ -479,7 +479,7 @@ You can follow instructions for installing <<rust-analyzer-language-server-binar
== Troubleshooting
Start with looking at the rust-analyzer version.
Try **Rust Analyzer: Show RA Version** in VS Code (using **Command Palette** feature typically activated by Ctrl+Shift+P) or `rust-analyzer --version` in the command line.
Try **rust-analyzer: Show RA Version** in VS Code (using **Command Palette** feature typically activated by Ctrl+Shift+P) or `rust-analyzer --version` in the command line.
If the date is more than a week ago, it's better to update rust-analyzer version.
The next thing to check would be panic messages in rust-analyzer's log.
@ -492,7 +492,7 @@ To fully capture LSP messages between the editor and the server, set `"rust-anal
The root cause for many "`nothing works`" problems is that rust-analyzer fails to understand the project structure.
To debug that, first note the `rust-analyzer` section in the status bar.
If it has an error icon and red, that's the problem (hover will have somewhat helpful error message).
**Rust Analyzer: Status** prints dependency information for the current file.
**rust-analyzer: Status** prints dependency information for the current file.
Finally, `RA_LOG=project_model=debug` enables verbose logs during project loading.
If rust-analyzer outright crashes, try running `rust-analyzer analysis-stats /path/to/project/directory/` on the command line.

View File

@ -99,142 +99,142 @@
{
"command": "rust-analyzer.syntaxTree",
"title": "Show Syntax Tree",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.viewHir",
"title": "View Hir",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.viewFileText",
"title": "View File Text (as seen by the server)",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.viewItemTree",
"title": "Debug ItemTree",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.viewCrateGraph",
"title": "View Crate Graph",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.viewFullCrateGraph",
"title": "View Crate Graph (Full)",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.expandMacro",
"title": "Expand macro recursively",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.matchingBrace",
"title": "Find matching brace",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.parentModule",
"title": "Locate parent module",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.joinLines",
"title": "Join lines",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.run",
"title": "Run",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.copyRunCommandLine",
"title": "Copy Run Command Line",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.debug",
"title": "Debug",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.newDebugConfig",
"title": "Generate launch configuration",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.analyzerStatus",
"title": "Status",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.memoryUsage",
"title": "Memory Usage (Clears Database)",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.shuffleCrateGraph",
"title": "Shuffle Crate Graph",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.reloadWorkspace",
"title": "Reload workspace",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.reload",
"title": "Restart server",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.onEnter",
"title": "Enhanced enter key",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.ssr",
"title": "Structural Search Replace",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.serverVersion",
"title": "Show RA Version",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.toggleInlayHints",
"title": "Toggle inlay hints",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.openDocs",
"title": "Open docs under cursor",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.openCargoToml",
"title": "Open Cargo.toml",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.peekTests",
"title": "Peek related tests",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.moveItemUp",
"title": "Move item up",
"category": "Rust Analyzer"
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.moveItemDown",
"title": "Move item down",
"category": "Rust Analyzer"
"category": "rust-analyzer"
}
],
"keybindings": [
@ -256,7 +256,7 @@
],
"configuration": {
"type": "object",
"title": "Rust Analyzer",
"title": "rust-analyzer",
"properties": {
"rust-analyzer.cargoRunner": {
"type": [
@ -380,6 +380,11 @@
"default": false,
"type": "boolean"
},
"rust-analyzer.typing.continueCommentsOnNewline": {
"markdownDescription": "Whether to prefix newlines after comments with the corresponding comment prefix.",
"default": true,
"type": "boolean"
},
"$generated-start": {},
"rust-analyzer.assist.expressionFillDefault": {
"markdownDescription": "Placeholder expression to use for missing expressions in assists.",

View File

@ -16,9 +16,13 @@ export class Config {
readonly extensionId = "rust-lang.rust-analyzer";
readonly rootSection = "rust-analyzer";
private readonly requiresWorkspaceReloadOpts = ["serverPath", "server"].map(
(opt) => `${this.rootSection}.${opt}`
);
private readonly requiresWorkspaceReloadOpts = [
"serverPath",
"server",
// FIXME: This shouldn't be here, changing this setting should reload
// `continueCommentsOnNewline` behavior without restart
"typing",
].map((opt) => `${this.rootSection}.${opt}`);
private readonly requiresReloadOpts = [
"cargo",
"procMacro",
@ -140,6 +144,10 @@ export class Config {
return this.get<boolean>("restartServerOnConfigChange");
}
get typingContinueCommentsOnNewline() {
return this.get<boolean>("typing.continueCommentsOnNewline");
}
get debug() {
let sourceFileMap = this.get<Record<string, string> | "auto">("debug.sourceFileMap");
if (sourceFileMap !== "auto") {

View File

@ -84,7 +84,9 @@ async function tryActivate(context: vscode.ExtensionContext): Promise<RustAnalyz
warnAboutExtensionConflicts();
ctx.pushCleanup(configureLanguage());
if (config.typingContinueCommentsOnNewline) {
ctx.pushCleanup(configureLanguage());
}
vscode.workspace.onDidChangeConfiguration(
(_) =>

View File

@ -12,7 +12,7 @@ use std::{
};
mod map;
pub use map::ArenaMap;
pub use map::{ArenaMap, Entry, OccupiedEntry, VacantEntry};
/// The raw index of a value in an arena.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
@ -208,6 +208,16 @@ impl<T> Arena<T> {
Arena { data: Vec::new() }
}
/// Create a new empty arena with specific capacity.
///
/// ```
/// let arena: la_arena::Arena<i32> = la_arena::Arena::with_capacity(42);
/// assert!(arena.is_empty());
/// ```
pub fn with_capacity(capacity: usize) -> Arena<T> {
Arena { data: Vec::with_capacity(capacity) }
}
/// Empties the arena, removing all contained values.
///
/// ```

View File

@ -11,12 +11,52 @@ pub struct ArenaMap<IDX, V> {
}
impl<T, V> ArenaMap<Idx<T>, V> {
/// Creates a new empty map.
pub const fn new() -> Self {
Self { v: Vec::new(), _ty: PhantomData }
}
/// Create a new empty map with specific capacity.
pub fn with_capacity(capacity: usize) -> Self {
Self { v: Vec::with_capacity(capacity), _ty: PhantomData }
}
/// Reserves capacity for at least additional more elements to be inserted in the map.
pub fn reserve(&mut self, additional: usize) {
self.v.reserve(additional);
}
/// Clears the map, removing all elements.
pub fn clear(&mut self) {
self.v.clear();
}
/// Shrinks the capacity of the map as much as possible.
pub fn shrink_to_fit(&mut self) {
let min_len = self.v.iter().rposition(|slot| slot.is_some()).map_or(0, |i| i + 1);
self.v.truncate(min_len);
self.v.shrink_to_fit();
}
/// Returns whether the map contains a value for the specified index.
pub fn contains_idx(&self, idx: Idx<T>) -> bool {
matches!(self.v.get(Self::to_idx(idx)), Some(Some(_)))
}
/// Removes an index from the map, returning the value at the index if the index was previously in the map.
pub fn remove(&mut self, idx: Idx<T>) -> Option<V> {
self.v.get_mut(Self::to_idx(idx))?.take()
}
/// Inserts a value associated with a given arena index into the map.
pub fn insert(&mut self, idx: Idx<T>, t: V) {
///
/// If the map did not have this index present, None is returned.
/// Otherwise, the value is updated, and the old value is returned.
pub fn insert(&mut self, idx: Idx<T>, t: V) -> Option<V> {
let idx = Self::to_idx(idx);
self.v.resize_with((idx + 1).max(self.v.len()), || None);
self.v[idx] = Some(t);
self.v[idx].replace(t)
}
/// Returns a reference to the value associated with the provided index
@ -46,6 +86,16 @@ impl<T, V> ArenaMap<Idx<T>, V> {
self.v.iter().enumerate().filter_map(|(idx, o)| Some((Self::from_idx(idx), o.as_ref()?)))
}
/// Gets the given key's corresponding entry in the map for in-place manipulation.
pub fn entry(&mut self, idx: Idx<T>) -> Entry<'_, Idx<T>, V> {
let idx = Self::to_idx(idx);
self.v.resize_with((idx + 1).max(self.v.len()), || None);
match &mut self.v[idx] {
slot @ Some(_) => Entry::Occupied(OccupiedEntry { slot, _ty: PhantomData }),
slot @ None => Entry::Vacant(VacantEntry { slot, _ty: PhantomData }),
}
}
fn to_idx(idx: Idx<T>) -> usize {
u32::from(idx.into_raw()) as usize
}
@ -70,6 +120,119 @@ impl<T, V> std::ops::IndexMut<Idx<V>> for ArenaMap<Idx<V>, T> {
impl<T, V> Default for ArenaMap<Idx<V>, T> {
fn default() -> Self {
ArenaMap { v: Vec::new(), _ty: PhantomData }
Self::new()
}
}
impl<T, V> Extend<(Idx<V>, T)> for ArenaMap<Idx<V>, T> {
fn extend<I: IntoIterator<Item = (Idx<V>, T)>>(&mut self, iter: I) {
iter.into_iter().for_each(move |(k, v)| {
self.insert(k, v);
});
}
}
impl<T, V> FromIterator<(Idx<V>, T)> for ArenaMap<Idx<V>, T> {
fn from_iter<I: IntoIterator<Item = (Idx<V>, T)>>(iter: I) -> Self {
let mut this = Self::new();
this.extend(iter);
this
}
}
/// A view into a single entry in a map, which may either be vacant or occupied.
///
/// This `enum` is constructed from the [`entry`] method on [`ArenaMap`].
///
/// [`entry`]: ArenaMap::entry
pub enum Entry<'a, IDX, V> {
/// A vacant entry.
Vacant(VacantEntry<'a, IDX, V>),
/// An occupied entry.
Occupied(OccupiedEntry<'a, IDX, V>),
}
impl<'a, IDX, V> Entry<'a, IDX, V> {
/// Ensures a value is in the entry by inserting the default if empty, and returns a mutable reference to
/// the value in the entry.
pub fn or_insert(self, default: V) -> &'a mut V {
match self {
Self::Vacant(ent) => ent.insert(default),
Self::Occupied(ent) => ent.into_mut(),
}
}
/// Ensures a value is in the entry by inserting the result of the default function if empty, and returns
/// a mutable reference to the value in the entry.
pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V {
match self {
Self::Vacant(ent) => ent.insert(default()),
Self::Occupied(ent) => ent.into_mut(),
}
}
/// Provides in-place mutable access to an occupied entry before any potential inserts into the map.
pub fn and_modify<F: FnOnce(&mut V)>(mut self, f: F) -> Self {
if let Self::Occupied(ent) = &mut self {
f(ent.get_mut());
}
self
}
}
impl<'a, IDX, V> Entry<'a, IDX, V>
where
V: Default,
{
/// Ensures a value is in the entry by inserting the default value if empty, and returns a mutable reference
/// to the value in the entry.
pub fn or_default(self) -> &'a mut V {
self.or_insert_with(Default::default)
}
}
/// A view into an vacant entry in a [`ArenaMap`]. It is part of the [`Entry`] enum.
pub struct VacantEntry<'a, IDX, V> {
slot: &'a mut Option<V>,
_ty: PhantomData<IDX>,
}
impl<'a, IDX, V> VacantEntry<'a, IDX, V> {
/// Sets the value of the entry with the `VacantEntry`s key, and returns a mutable reference to it.
pub fn insert(self, value: V) -> &'a mut V {
self.slot.insert(value)
}
}
/// A view into an occupied entry in a [`ArenaMap`]. It is part of the [`Entry`] enum.
pub struct OccupiedEntry<'a, IDX, V> {
slot: &'a mut Option<V>,
_ty: PhantomData<IDX>,
}
impl<'a, IDX, V> OccupiedEntry<'a, IDX, V> {
/// Gets a reference to the value in the entry.
pub fn get(&self) -> &V {
self.slot.as_ref().expect("Occupied")
}
/// Gets a mutable reference to the value in the entry.
pub fn get_mut(&mut self) -> &mut V {
self.slot.as_mut().expect("Occupied")
}
/// Converts the entry into a mutable reference to its value.
pub fn into_mut(self) -> &'a mut V {
self.slot.as_mut().expect("Occupied")
}
/// Sets the value of the entry with the `OccupiedEntry`s key, and returns the entrys old value.
pub fn insert(&mut self, value: V) -> V {
self.slot.replace(value).expect("Occupied")
}
/// Takes the value of the entry out of the map, and returns it.
pub fn remove(self) -> V {
self.slot.take().expect("Occupied")
}
}

View File

@ -81,7 +81,7 @@ impl flags::Promote {
let date = date_iso(sh)?;
let branch = format!("rust-analyzer-{date}");
cmd!(sh, "git switch -c {branch}").run()?;
cmd!(sh, "git subtree pull -P src/tools/rust-analyzer rust-analyzer master").run()?;
cmd!(sh, "git subtree pull -m ':arrow_up: rust-analyzer' -P src/tools/rust-analyzer rust-analyzer release").run()?;
if !self.dry_run {
cmd!(sh, "git push -u origin {branch}").run()?;