9242: Clippy r=matklad a=Maan2003

Best viewed commit wise

Co-authored-by: Maan2003 <manmeetmann2003@gmail.com>
This commit is contained in:
bors[bot] 2021-06-13 07:18:49 +00:00 committed by GitHub
commit adbee621a7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
134 changed files with 372 additions and 402 deletions

View File

@ -190,7 +190,7 @@ impl From<Fixture> for FileMeta {
edition: f
.edition
.as_ref()
.map_or(Edition::Edition2018, |v| Edition::from_str(&v).unwrap()),
.map_or(Edition::Edition2018, |v| Edition::from_str(v).unwrap()),
env: f.env.into_iter().collect(),
introduce_new_source_root: f.introduce_new_source_root,
}

View File

@ -452,7 +452,7 @@ impl Module {
}
pub fn visibility_of(self, db: &dyn HirDatabase, def: &ModuleDef) -> Option<Visibility> {
self.id.def_map(db.upcast())[self.id.local_id].scope.visibility_of(def.clone().into())
self.id.def_map(db.upcast())[self.id.local_id].scope.visibility_of((*def).into())
}
pub fn diagnostics(
@ -1112,7 +1112,7 @@ impl Function {
.collect();
sink.push(MissingFields {
file: source_ptr.file_id,
field_list_parent: AstPtr::new(&record_expr),
field_list_parent: AstPtr::new(record_expr),
field_list_parent_path: record_expr
.path()
.map(|path| AstPtr::new(&path)),
@ -2531,13 +2531,13 @@ impl Type {
match ty.kind(&Interner) {
TyKind::Adt(_, substs) => {
cb(type_.derived(ty.clone()));
walk_substs(db, type_, &substs, cb);
walk_substs(db, type_, substs, cb);
}
TyKind::AssociatedType(_, substs) => {
if let Some(_) = ty.associated_type_parent_trait(db) {
cb(type_.derived(ty.clone()));
}
walk_substs(db, type_, &substs, cb);
walk_substs(db, type_, substs, cb);
}
TyKind::OpaqueType(_, subst) => {
if let Some(bounds) = ty.impl_trait_bounds(db) {
@ -2577,7 +2577,7 @@ impl Type {
TyKind::FnDef(_, substs)
| TyKind::Tuple(_, substs)
| TyKind::Closure(.., substs) => {
walk_substs(db, type_, &substs, cb);
walk_substs(db, type_, substs, cb);
}
TyKind::Function(hir_ty::FnPointer { substitution, .. }) => {
walk_substs(db, type_, &substitution.0, cb);

View File

@ -192,7 +192,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
node: &SyntaxNode,
offset: TextSize,
) -> Option<N> {
if let Some(it) = find_node_at_offset(&node, offset) {
if let Some(it) = find_node_at_offset(node, offset) {
return Some(it);
}
@ -744,7 +744,7 @@ impl<'db> SemanticsImpl<'db> {
return None;
}
let func = self.resolve_method_call(&method_call_expr).map(Function::from)?;
let func = self.resolve_method_call(method_call_expr).map(Function::from)?;
let res = match func.self_param(self.db)?.access(self.db) {
Access::Shared | Access::Exclusive => true,
Access::Owned => false,

View File

@ -222,7 +222,7 @@ impl SourceAnalyzer {
Pat::Path(path) => path,
_ => return None,
};
let res = resolve_hir_path(db, &self.resolver, &path)?;
let res = resolve_hir_path(db, &self.resolver, path)?;
match res {
PathResolution::Def(def) => Some(def),
_ => None,
@ -329,7 +329,7 @@ impl SourceAnalyzer {
let (variant, missing_fields, _exhaustive) =
record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?;
let res = self.missing_fields(db, krate, &substs, variant, missing_fields);
let res = self.missing_fields(db, krate, substs, variant, missing_fields);
Some(res)
}
@ -347,7 +347,7 @@ impl SourceAnalyzer {
let (variant, missing_fields, _exhaustive) =
record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
let res = self.missing_fields(db, krate, &substs, variant, missing_fields);
let res = self.missing_fields(db, krate, substs, variant, missing_fields);
Some(res)
}

View File

@ -583,13 +583,13 @@ impl AttrSourceMap {
.get(id.ast_index as usize)
.unwrap_or_else(|| panic!("cannot find doc comment at index {:?}", id))
.clone()
.map(|attr| Either::Right(attr))
.map(Either::Right)
} else {
self.attrs
.get(id.ast_index as usize)
.unwrap_or_else(|| panic!("cannot find `Attr` at index {:?}", id))
.clone()
.map(|attr| Either::Left(attr))
.map(Either::Left)
}
}
}
@ -606,7 +606,7 @@ pub struct DocsRangeMap {
impl DocsRangeMap {
pub fn map(&self, range: TextRange) -> Option<InFile<TextRange>> {
let found = self.mapping.binary_search_by(|(probe, ..)| probe.ordering(range)).ok()?;
let (line_docs_range, idx, original_line_src_range) = self.mapping[found].clone();
let (line_docs_range, idx, original_line_src_range) = self.mapping[found];
if !line_docs_range.contains_range(range) {
return None;
}

View File

@ -1000,18 +1000,18 @@ impl From<ast::LiteralKind> for Literal {
// FIXME: these should have actual values filled in, but unsure on perf impact
LiteralKind::IntNumber(lit) => {
if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) {
return Literal::Float(Default::default(), builtin);
Literal::Float(Default::default(), builtin)
} else if let builtin @ Some(_) =
lit.suffix().and_then(|it| BuiltinInt::from_suffix(&it))
lit.suffix().and_then(|it| BuiltinInt::from_suffix(it))
{
Literal::Int(lit.value().unwrap_or(0) as i128, builtin)
} else {
let builtin = lit.suffix().and_then(|it| BuiltinUint::from_suffix(&it));
let builtin = lit.suffix().and_then(|it| BuiltinUint::from_suffix(it));
Literal::Uint(lit.value().unwrap_or(0), builtin)
}
}
LiteralKind::FloatNumber(lit) => {
let ty = lit.suffix().and_then(|it| BuiltinFloat::from_suffix(&it));
let ty = lit.suffix().and_then(|it| BuiltinFloat::from_suffix(it));
Literal::Float(Default::default(), ty)
}
LiteralKind::ByteString(bs) => {

View File

@ -198,7 +198,7 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
}
Expr::Lambda { args, body: body_expr, .. } => {
let scope = scopes.new_scope(scope);
scopes.add_params_bindings(body, scope, &args);
scopes.add_params_bindings(body, scope, args);
compute_expr_scopes(*body_expr, body, scopes, scope);
}
Expr::Match { expr, arms } => {

View File

@ -280,7 +280,7 @@ impl GenericParams {
sm.type_params.insert(param_id, Either::Right(type_param.clone()));
let type_ref = TypeRef::Path(name.into());
self.fill_bounds(&lower_ctx, &type_param, Either::Left(type_ref));
self.fill_bounds(lower_ctx, &type_param, Either::Left(type_ref));
}
for lifetime_param in params.lifetime_params() {
let name =
@ -289,7 +289,7 @@ impl GenericParams {
let param_id = self.lifetimes.alloc(param);
sm.lifetime_params.insert(param_id, lifetime_param.clone());
let lifetime_ref = LifetimeRef::new_name(name);
self.fill_bounds(&lower_ctx, &lifetime_param, Either::Right(lifetime_ref));
self.fill_bounds(lower_ctx, &lifetime_param, Either::Right(lifetime_ref));
}
for const_param in params.const_params() {
let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());

View File

@ -59,7 +59,7 @@ pub struct ItemScope {
pub(crate) static BUILTIN_SCOPE: Lazy<FxHashMap<Name, PerNs>> = Lazy::new(|| {
BuiltinType::ALL
.iter()
.map(|(name, ty)| (name.clone(), PerNs::types(ty.clone().into(), Visibility::Public)))
.map(|(name, ty)| (name.clone(), PerNs::types((*ty).into(), Visibility::Public)))
.collect()
});

View File

@ -823,7 +823,7 @@ fn is_intrinsic_fn_unsafe(name: &Name) -> bool {
known::type_name,
known::variant_count,
]
.contains(&name)
.contains(name)
}
fn lower_abi(abi: ast::Abi) -> Interned<str> {
@ -855,7 +855,7 @@ impl UseTreeLowering<'_> {
// E.g. `use something::{inner}` (prefix is `None`, path is `something`)
// or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`)
Some(path) => {
match ModPath::from_src(self.db, path, &self.hygiene) {
match ModPath::from_src(self.db, path, self.hygiene) {
Some(it) => Some(it),
None => return None, // FIXME: report errors somewhere
}
@ -874,7 +874,7 @@ impl UseTreeLowering<'_> {
} else {
let is_glob = tree.star_token().is_some();
let path = match tree.path() {
Some(path) => Some(ModPath::from_src(self.db, path, &self.hygiene)?),
Some(path) => Some(ModPath::from_src(self.db, path, self.hygiene)?),
None => None,
};
let alias = tree.rename().map(|a| {

View File

@ -426,7 +426,7 @@ impl<'a> Printer<'a> {
w!(self, " {{");
self.indented(|this| {
for item in &**items {
this.print_mod_item((*item).into());
this.print_mod_item(*item);
}
});
wln!(self, "}}");

View File

@ -731,13 +731,11 @@ fn macro_call_as_call_id(
)
.map(MacroCallId::from)
} else {
Ok(def
.as_lazy_macro(
db.upcast(),
krate,
MacroCallKind::FnLike { ast_id: call.ast_id, fragment },
)
.into())
Ok(def.as_lazy_macro(
db.upcast(),
krate,
MacroCallKind::FnLike { ast_id: call.ast_id, fragment },
))
};
Ok(res)
}
@ -756,17 +754,15 @@ fn derive_macro_as_call_id(
.segments()
.last()
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
let res = def
.as_lazy_macro(
db.upcast(),
krate,
MacroCallKind::Derive {
ast_id: item_attr.ast_id,
derive_name: last_segment.to_string(),
derive_attr_index: derive_attr.ast_index,
},
)
.into();
let res = def.as_lazy_macro(
db.upcast(),
krate,
MacroCallKind::Derive {
ast_id: item_attr.ast_id,
derive_name: last_segment.to_string(),
derive_attr_index: derive_attr.ast_index,
},
);
Ok(res)
}
@ -794,17 +790,15 @@ fn attr_macro_as_call_id(
// The parentheses are always disposed here.
arg.delimiter = None;
let res = def
.as_lazy_macro(
db.upcast(),
krate,
MacroCallKind::Attr {
ast_id: item_attr.ast_id,
attr_name: last_segment.to_string(),
attr_args: arg,
invoc_attr_index: macro_attr.id.ast_index,
},
)
.into();
let res = def.as_lazy_macro(
db.upcast(),
krate,
MacroCallKind::Attr {
ast_id: item_attr.ast_id,
attr_name: last_segment.to_string(),
attr_args: arg,
invoc_attr_index: macro_attr.id.ast_index,
},
);
Ok(res)
}

View File

@ -500,7 +500,7 @@ impl DefCollector<'_> {
let (per_ns, _) = self.def_map.resolve_path(
self.db,
self.def_map.root,
&path,
path,
BuiltinShadowMode::Other,
);
@ -722,7 +722,7 @@ impl DefCollector<'_> {
if import.is_extern_crate {
let res = self.def_map.resolve_name_in_extern_prelude(
self.db,
&import
import
.path
.as_ident()
.expect("extern crate should have been desugared to one-element path"),
@ -1351,7 +1351,7 @@ impl ModCollector<'_, '_> {
let imports = Import::from_use(
self.def_collector.db,
krate,
&self.item_tree,
self.item_tree,
ItemTreeId::new(self.file_id, import_id),
);
self.def_collector.unresolved_imports.extend(imports.into_iter().map(
@ -1368,7 +1368,7 @@ impl ModCollector<'_, '_> {
import: Import::from_extern_crate(
self.def_collector.db,
krate,
&self.item_tree,
self.item_tree,
ItemTreeId::new(self.file_id, import_id),
),
status: PartialResolvedImport::Unresolved,
@ -1889,7 +1889,7 @@ impl ModCollector<'_, '_> {
self.def_collector.def_map.with_ancestor_maps(
self.def_collector.db,
self.module_id,
&mut |map, module| map[module].scope.get_legacy_macro(&name),
&mut |map, module| map[module].scope.get_legacy_macro(name),
)
})
},
@ -1993,7 +1993,7 @@ mod tests {
}
fn do_resolve(code: &str) -> DefMap {
let (db, _file_id) = TestDB::with_single_file(&code);
let (db, _file_id) = TestDB::with_single_file(code);
let krate = db.test_crate();
let edition = db.crate_graph()[krate].edition;

View File

@ -93,7 +93,7 @@ impl DefMap {
let mut vis = match visibility {
RawVisibility::Module(path) => {
let (result, remaining) =
self.resolve_path(db, original_module, &path, BuiltinShadowMode::Module);
self.resolve_path(db, original_module, path, BuiltinShadowMode::Module);
if remaining.is_some() {
return None;
}
@ -205,7 +205,7 @@ impl DefMap {
None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
};
log::debug!("resolving {:?} in crate root (+ extern prelude)", segment);
self.resolve_name_in_crate_root_or_extern_prelude(db, &segment)
self.resolve_name_in_crate_root_or_extern_prelude(db, segment)
}
PathKind::Plain => {
let (_, segment) = match segments.next() {
@ -222,7 +222,7 @@ impl DefMap {
if path.segments().len() == 1 { shadow } else { BuiltinShadowMode::Module };
log::debug!("resolving {:?} in module", segment);
self.resolve_name_in_module(db, original_module, &segment, prefer_module)
self.resolve_name_in_module(db, original_module, segment, prefer_module)
}
PathKind::Super(lvl) => {
let mut module = original_module;
@ -269,7 +269,7 @@ impl DefMap {
Some((_, segment)) => segment,
None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
};
if let Some(def) = self.extern_prelude.get(&segment) {
if let Some(def) = self.extern_prelude.get(segment) {
log::debug!("absolute path {:?} resolved to crate {:?}", path, def);
PerNs::types(*def, Visibility::Public)
} else {
@ -319,13 +319,13 @@ impl DefMap {
};
// Since it is a qualified path here, it should not contains legacy macros
module_data.scope.get(&segment)
module_data.scope.get(segment)
}
ModuleDefId::AdtId(AdtId::EnumId(e)) => {
// enum variant
cov_mark::hit!(can_import_enum_variant);
let enum_data = db.enum_data(e);
match enum_data.variant(&segment) {
match enum_data.variant(segment) {
Some(local_id) => {
let variant = EnumVariantId { parent: e, local_id };
match &*enum_data.variants[local_id].variant_data {

View File

@ -208,13 +208,13 @@ fn lower_generic_args_from_fn_path(
let params = params?;
let mut param_types = Vec::new();
for param in params.params() {
let type_ref = TypeRef::from_ast_opt(&ctx, param.ty());
let type_ref = TypeRef::from_ast_opt(ctx, param.ty());
param_types.push(type_ref);
}
let arg = GenericArg::Type(TypeRef::Tuple(param_types));
args.push(arg);
if let Some(ret_type) = ret_type {
let type_ref = TypeRef::from_ast_opt(&ctx, ret_type.ty());
let type_ref = TypeRef::from_ast_opt(ctx, ret_type.ty());
bindings.push(AssociatedTypeBinding {
name: name![Output],
type_ref: Some(type_ref),

View File

@ -133,7 +133,7 @@ impl Resolver {
Some(it) => it,
None => return PerNs::none(),
};
let (module_res, segment_index) = item_map.resolve_path(db, module, &path, shadow);
let (module_res, segment_index) = item_map.resolve_path(db, module, path, shadow);
if segment_index.is_some() {
return PerNs::none();
}
@ -150,7 +150,7 @@ impl Resolver {
path: &ModPath,
) -> Option<TraitId> {
let (item_map, module) = self.module_scope()?;
let (module_res, ..) = item_map.resolve_path(db, module, &path, BuiltinShadowMode::Module);
let (module_res, ..) = item_map.resolve_path(db, module, path, BuiltinShadowMode::Module);
match module_res.take_types()? {
ModuleDefId::TraitId(it) => Some(it),
_ => None,
@ -325,7 +325,7 @@ impl Resolver {
path: &ModPath,
) -> Option<MacroDefId> {
let (item_map, module) = self.module_scope()?;
item_map.resolve_path(db, module, &path, BuiltinShadowMode::Other).0.take_macros()
item_map.resolve_path(db, module, path, BuiltinShadowMode::Other).0.take_macros()
}
pub fn process_all_names(&self, db: &dyn DefDatabase, f: &mut dyn FnMut(Name, ScopeDef)) {
@ -561,7 +561,7 @@ impl ModuleItemMap {
path: &ModPath,
) -> Option<ResolveValueResult> {
let (module_def, idx) =
self.def_map.resolve_path_locally(db, self.module_id, &path, BuiltinShadowMode::Other);
self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other);
match idx {
None => {
let value = to_value_ns(module_def)?;
@ -591,7 +591,7 @@ impl ModuleItemMap {
path: &ModPath,
) -> Option<(TypeNs, Option<usize>)> {
let (module_def, idx) =
self.def_map.resolve_path_locally(db, self.module_id, &path, BuiltinShadowMode::Other);
self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other);
let res = to_type_ns(module_def)?;
Some((res, idx))
}

View File

@ -325,7 +325,7 @@ impl TestDB {
for diag in source_map.diagnostics() {
let (ptr, message): (InFile<SyntaxNodePtr>, &str) = match diag {
BodyDiagnostic::InactiveCode { node, .. } => {
(node.clone().map(|it| it.into()), "InactiveCode")
(node.clone().map(|it| it), "InactiveCode")
}
BodyDiagnostic::MacroError { node, message } => {
(node.clone().map(|it| it.into()), message.as_str())

View File

@ -128,7 +128,7 @@ impl TypeRef {
/// Converts an `ast::TypeRef` to a `hir::TypeRef`.
pub fn from_ast(ctx: &LowerCtx, node: ast::Type) -> Self {
match node {
ast::Type::ParenType(inner) => TypeRef::from_ast_opt(&ctx, inner.ty()),
ast::Type::ParenType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()),
ast::Type::TupleType(inner) => {
TypeRef::Tuple(inner.fields().map(|it| TypeRef::from_ast(ctx, it)).collect())
}
@ -142,7 +142,7 @@ impl TypeRef {
.unwrap_or(TypeRef::Error)
}
ast::Type::PtrType(inner) => {
let inner_ty = TypeRef::from_ast_opt(&ctx, inner.ty());
let inner_ty = TypeRef::from_ast_opt(ctx, inner.ty());
let mutability = Mutability::from_mutable(inner.mut_token().is_some());
TypeRef::RawPtr(Box::new(inner_ty), mutability)
}
@ -156,13 +156,13 @@ impl TypeRef {
.map(ConstScalar::usize_from_literal_expr)
.unwrap_or(ConstScalar::Unknown);
TypeRef::Array(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty())), len)
TypeRef::Array(Box::new(TypeRef::from_ast_opt(ctx, inner.ty())), len)
}
ast::Type::SliceType(inner) => {
TypeRef::Slice(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty())))
TypeRef::Slice(Box::new(TypeRef::from_ast_opt(ctx, inner.ty())))
}
ast::Type::RefType(inner) => {
let inner_ty = TypeRef::from_ast_opt(&ctx, inner.ty());
let inner_ty = TypeRef::from_ast_opt(ctx, inner.ty());
let lifetime = inner.lifetime().map(|lt| LifetimeRef::new(&lt));
let mutability = Mutability::from_mutable(inner.mut_token().is_some());
TypeRef::Reference(Box::new(inner_ty), lifetime, mutability)
@ -180,7 +180,7 @@ impl TypeRef {
is_varargs = param.dotdotdot_token().is_some();
}
pl.params().map(|p| p.ty()).map(|it| TypeRef::from_ast_opt(&ctx, it)).collect()
pl.params().map(|p| p.ty()).map(|it| TypeRef::from_ast_opt(ctx, it)).collect()
} else {
Vec::new()
};
@ -188,7 +188,7 @@ impl TypeRef {
TypeRef::Fn(params, is_varargs)
}
// for types are close enough for our purposes to the inner type for now...
ast::Type::ForType(inner) => TypeRef::from_ast_opt(&ctx, inner.ty()),
ast::Type::ForType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()),
ast::Type::ImplTraitType(inner) => {
TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
}
@ -229,7 +229,7 @@ impl TypeRef {
TypeRef::RawPtr(type_ref, _)
| TypeRef::Reference(type_ref, ..)
| TypeRef::Array(type_ref, _)
| TypeRef::Slice(type_ref) => go(&type_ref, f),
| TypeRef::Slice(type_ref) => go(type_ref, f),
TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => {
for bound in bounds {
match bound.as_ref() {

View File

@ -325,7 +325,7 @@ $0
},
};
let id: MacroCallId = db.intern_macro(loc).into();
let id: MacroCallId = db.intern_macro(loc);
let parsed = db.parse_or_expand(id.as_file()).unwrap();
// FIXME text() for syntax nodes parsed from token tree looks weird

View File

@ -354,7 +354,7 @@ fn concat_expand(
// concat works with string and char literals, so remove any quotes.
// It also works with integer, float and boolean literals, so just use the rest
// as-is.
let component = unquote_str(&it).unwrap_or_else(|| it.text.to_string());
let component = unquote_str(it).unwrap_or_else(|| it.text.to_string());
text.push_str(&component);
}
// handle boolean literals
@ -417,7 +417,7 @@ fn parse_string(tt: &tt::Subtree) -> Result<String, mbe::ExpandError> {
tt.token_trees
.get(0)
.and_then(|tt| match tt {
tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(&it),
tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(it),
_ => None,
})
.ok_or_else(|| mbe::ExpandError::ConversionError)
@ -430,7 +430,7 @@ fn include_expand(
) -> ExpandResult<Option<ExpandedEager>> {
let res = (|| {
let path = parse_string(tt)?;
let file_id = relative_file(db, arg_id.into(), &path, false)?;
let file_id = relative_file(db, arg_id, &path, false)?;
let subtree = parse_to_token_tree(&db.file_text(file_id))
.ok_or_else(|| mbe::ExpandError::ConversionError)?
@ -480,7 +480,7 @@ fn include_str_expand(
// it's unusual to `include_str!` a Rust file), but we can return an empty string.
// Ideally, we'd be able to offer a precise expansion if the user asks for macro
// expansion.
let file_id = match relative_file(db, arg_id.into(), &path, true) {
let file_id = match relative_file(db, arg_id, &path, true) {
Ok(file_id) => file_id,
Err(_) => {
return ExpandResult::ok(Some(ExpandedEager::new(quote!(""))));
@ -561,7 +561,7 @@ mod tests {
use syntax::ast::NameOwner;
fn expand_builtin_macro(ra_fixture: &str) -> String {
let (db, file_id) = TestDB::with_single_file(&ra_fixture);
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let parsed = db.parse(file_id);
let mut macro_rules: Vec<_> =
parsed.syntax_node().descendants().filter_map(ast::MacroRules::cast).collect();
@ -598,7 +598,7 @@ mod tests {
},
};
let id: MacroCallId = db.intern_macro(loc).into();
let id: MacroCallId = db.intern_macro(loc);
id.as_file()
}
Either::Right(expander) => {
@ -635,7 +635,7 @@ mod tests {
kind: MacroCallKind::FnLike { ast_id: call_id, fragment },
};
let id: MacroCallId = db.intern_macro(loc).into();
let id: MacroCallId = db.intern_macro(loc);
id.as_file()
}
};

View File

@ -57,7 +57,7 @@ impl TokenExpander {
// We store the result in salsa db to prevent non-deterministic behavior in
// some proc-macro implementation
// See #4315 for details
db.expand_proc_macro(id.into()).into()
db.expand_proc_macro(id).into()
}
}
}
@ -241,7 +241,7 @@ fn parse_macro_expansion(
}
};
if is_self_replicating(&node, &call_node.value) {
return ExpandResult::only_err(err);
ExpandResult::only_err(err)
} else {
ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) }
}

View File

@ -128,7 +128,7 @@ pub fn expand_eager_macro(
}),
kind: MacroCallKind::FnLike { ast_id: call_id, fragment: FragmentKind::Expr },
});
let arg_file_id: MacroCallId = arg_id.into();
let arg_file_id: MacroCallId = arg_id;
let parsed_args =
diagnostic_sink.result(mbe::token_tree_to_syntax_node(&parsed_args, FragmentKind::Expr))?.0;
@ -177,13 +177,11 @@ fn lazy_expand(
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value);
let fragment = crate::to_fragment_kind(&macro_call.value);
let id: MacroCallId = def
.as_lazy_macro(
db,
krate,
MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), fragment },
)
.into();
let id: MacroCallId = def.as_lazy_macro(
db,
krate,
MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), fragment },
);
let err = db.macro_expand_error(id);
let value = db.parse_or_expand(id.as_file()).map(|node| InFile::new(id.as_file(), node));
@ -216,8 +214,7 @@ fn eager_macro_recur(
def,
macro_resolver,
diagnostic_sink,
)?
.into();
)?;
db.parse_or_expand(id.as_file())
.expect("successful macro expansion should be parseable")
.clone_for_update()

View File

@ -78,7 +78,7 @@ mod tests {
use super::*;
fn test_remove_derives_up_to(attr: usize, ra_fixture: &str, expect: Expect) {
let (db, file_id) = TestDB::with_single_file(&ra_fixture);
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let parsed = db.parse(file_id);
let mut items: Vec<_> =

View File

@ -45,13 +45,12 @@ impl ProcMacroExpander {
let proc_macro = krate_graph[self.krate]
.proc_macro
.get(id.0 as usize)
.clone()
.ok_or_else(|| err!("No derive macro found."))?;
// Proc macros have access to the environment variables of the invoking crate.
let env = &krate_graph[calling_crate].env;
proc_macro.expander.expand(&tt, attr_arg, &env).map_err(mbe::ExpandError::from)
proc_macro.expander.expand(tt, attr_arg, env).map_err(mbe::ExpandError::from)
}
None => Err(mbe::ExpandError::UnresolvedProcMacro),
}

View File

@ -202,7 +202,7 @@ impl<T: HasInterner<Interner = Interner> + Fold<Interner>> TyBuilder<Binders<T>>
impl TyBuilder<Binders<Ty>> {
pub fn def_ty(db: &dyn HirDatabase, def: TyDefId) -> TyBuilder<Binders<Ty>> {
TyBuilder::subst_binders(db.ty(def.into()))
TyBuilder::subst_binders(db.ty(def))
}
pub fn impl_self_ty(db: &dyn HirDatabase, def: hir_def::ImplId) -> TyBuilder<Binders<Ty>> {

View File

@ -430,8 +430,7 @@ pub(crate) fn trait_datum_query(
fundamental: false,
};
let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
let associated_ty_ids =
trait_data.associated_types().map(|type_alias| to_assoc_type_id(type_alias)).collect();
let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect();
let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
let well_known =
lang_attr(db.upcast(), trait_).and_then(|name| well_known_trait_from_lang_attr(&name));

View File

@ -49,7 +49,7 @@ pub fn usize_const(value: Option<u64>) -> Const {
ConstData {
ty: TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize)).intern(&Interner),
value: ConstValue::Concrete(chalk_ir::ConcreteConst {
interned: value.map(|value| ConstScalar::Usize(value)).unwrap_or(ConstScalar::Unknown),
interned: value.map(ConstScalar::Usize).unwrap_or(ConstScalar::Unknown),
}),
}
.intern(&Interner)

View File

@ -528,7 +528,7 @@ impl SplitWildcard {
smallvec![NonExhaustive]
}
TyKind::Never => SmallVec::new(),
_ if cx.is_uninhabited(&pcx.ty) => SmallVec::new(),
_ if cx.is_uninhabited(pcx.ty) => SmallVec::new(),
TyKind::Adt(..) | TyKind::Tuple(..) | TyKind::Ref(..) => smallvec![Single],
// This type is one for which we cannot list constructors, like `str` or `f64`.
_ => smallvec![NonExhaustive],

View File

@ -645,7 +645,7 @@ impl SubPatSet {
(Seq { subpats: s_set }, Seq { subpats: mut o_set }) => {
s_set.retain(|i, s_sub_set| {
// Missing entries count as full.
let o_sub_set = o_set.remove(&i).unwrap_or(Full);
let o_sub_set = o_set.remove(i).unwrap_or(Full);
s_sub_set.union(o_sub_set);
// We drop full entries.
!s_sub_set.is_full()
@ -656,7 +656,7 @@ impl SubPatSet {
(Alt { subpats: s_set, .. }, Alt { subpats: mut o_set, .. }) => {
s_set.retain(|i, s_sub_set| {
// Missing entries count as empty.
let o_sub_set = o_set.remove(&i).unwrap_or(Empty);
let o_sub_set = o_set.remove(i).unwrap_or(Empty);
s_sub_set.union(o_sub_set);
// We drop empty entries.
!s_sub_set.is_empty()
@ -898,7 +898,7 @@ impl Usefulness {
} else {
witnesses
.into_iter()
.map(|witness| witness.apply_constructor(pcx, &ctor, ctor_wild_subpatterns))
.map(|witness| witness.apply_constructor(pcx, ctor, ctor_wild_subpatterns))
.collect()
};
WithWitnesses(new_witnesses)

View File

@ -782,7 +782,7 @@ impl Expectation {
fn adjust_for_branches(&self, table: &mut unify::InferenceTable) -> Expectation {
match self {
Expectation::HasType(ety) => {
let ety = table.resolve_ty_shallow(&ety);
let ety = table.resolve_ty_shallow(ety);
if !ety.is_ty_var() {
Expectation::HasType(ety)
} else {

View File

@ -109,7 +109,7 @@ impl<'a> InferenceContext<'a> {
}
// Consider coercing the subtype to a DST
if let Ok(ret) = self.try_coerce_unsized(&from_ty, &to_ty) {
if let Ok(ret) = self.try_coerce_unsized(&from_ty, to_ty) {
return Ok(ret);
}

View File

@ -54,7 +54,7 @@ impl<'a> InferenceContext<'a> {
/// Infer type of expression with possibly implicit coerce to the expected type.
/// Return the type after possible coercion.
pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
let ty = self.infer_expr_inner(expr, &expected);
let ty = self.infer_expr_inner(expr, expected);
let ty = if let Some(target) = expected.only_has_type(&mut self.table) {
if !self.coerce(&ty, &target) {
self.result
@ -135,11 +135,11 @@ impl<'a> InferenceContext<'a> {
let mut both_arms_diverge = Diverges::Always;
let mut result_ty = self.table.new_type_var();
let then_ty = self.infer_expr_inner(*then_branch, &expected);
let then_ty = self.infer_expr_inner(*then_branch, expected);
both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe);
result_ty = self.coerce_merge_branch(Some(*then_branch), &result_ty, &then_ty);
let else_ty = match else_branch {
Some(else_branch) => self.infer_expr_inner(*else_branch, &expected),
Some(else_branch) => self.infer_expr_inner(*else_branch, expected),
None => TyBuilder::unit(),
};
both_arms_diverge &= self.diverges;
@ -327,13 +327,7 @@ impl<'a> InferenceContext<'a> {
self.normalize_associated_types_in(ret_ty)
}
Expr::MethodCall { receiver, args, method_name, generic_args } => self
.infer_method_call(
tgt_expr,
*receiver,
&args,
&method_name,
generic_args.as_deref(),
),
.infer_method_call(tgt_expr, *receiver, args, method_name, generic_args.as_deref()),
Expr::Match { expr, arms } => {
let input_ty = self.infer_expr(*expr, &Expectation::none());
@ -993,7 +987,7 @@ impl<'a> InferenceContext<'a> {
}
fn register_obligations_for_call(&mut self, callable_ty: &Ty) {
let callable_ty = self.resolve_ty_shallow(&callable_ty);
let callable_ty = self.resolve_ty_shallow(callable_ty);
if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(&Interner) {
let def: CallableDefId = from_chalk(self.db, *fn_def);
let generic_predicates = self.db.generic_predicates(def.into());

View File

@ -192,7 +192,7 @@ impl<'a> InferenceContext<'a> {
Pat::Path(path) => {
// FIXME use correct resolver for the surrounding expression
let resolver = self.resolver.clone();
self.infer_path(&resolver, &path, pat.into()).unwrap_or(self.err_ty())
self.infer_path(&resolver, path, pat.into()).unwrap_or(self.err_ty())
}
Pat::Bind { mode, name: _, subpat } => {
let mode = if mode == &BindingAnnotation::Unannotated {
@ -275,7 +275,7 @@ impl<'a> InferenceContext<'a> {
if !self.unify(&ty, &expected) {
self.result
.type_mismatches
.insert(pat.into(), TypeMismatch { expected: expected, actual: ty.clone() });
.insert(pat.into(), TypeMismatch { expected, actual: ty.clone() });
}
self.write_pat_ty(pat, ty.clone());
ty

View File

@ -43,11 +43,11 @@ impl<'a> InferenceContext<'a> {
}
let ty = self.make_ty(type_ref);
let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
let ctx = crate::lower::TyLoweringContext::new(self.db, &resolver);
let ctx = crate::lower::TyLoweringContext::new(self.db, resolver);
let (ty, _) = ctx.lower_ty_relative_path(ty, None, remaining_segments_for_ty);
self.resolve_ty_assoc_item(
ty,
&path.segments().last().expect("path had at least one segment").name,
path.segments().last().expect("path had at least one segment").name,
id,
)?
} else {
@ -154,7 +154,7 @@ impl<'a> InferenceContext<'a> {
let segment =
remaining_segments.last().expect("there should be at least one segment here");
self.resolve_ty_assoc_item(ty, &segment.name, id)
self.resolve_ty_assoc_item(ty, segment.name, id)
}
}
}

View File

@ -331,7 +331,7 @@ impl chalk_ir::interner::Interner for Interner {
&self,
clauses: &'a Self::InternedProgramClauses,
) -> &'a [chalk_ir::ProgramClause<Self>] {
&clauses
clauses
}
fn intern_quantified_where_clauses<E>(
@ -373,7 +373,7 @@ impl chalk_ir::interner::Interner for Interner {
&self,
canonical_var_kinds: &'a Self::InternedCanonicalVarKinds,
) -> &'a [chalk_ir::CanonicalVarKind<Self>] {
&canonical_var_kinds
canonical_var_kinds
}
fn intern_constraints<E>(
@ -413,7 +413,7 @@ impl chalk_ir::interner::Interner for Interner {
&self,
variances: &'a Self::InternedVariances,
) -> &'a [chalk_ir::Variance] {
&variances
variances
}
}

View File

@ -238,7 +238,7 @@ impl<'a> TyLoweringContext<'a> {
// away instead of two.
let actual_opaque_type_data = self
.with_debruijn(DebruijnIndex::INNERMOST, |ctx| {
ctx.lower_impl_trait(&bounds)
ctx.lower_impl_trait(bounds)
});
self.opaque_type_data.borrow_mut()[idx as usize] = actual_opaque_type_data;
@ -421,7 +421,7 @@ impl<'a> TyLoweringContext<'a> {
let found = self
.db
.trait_data(trait_ref.hir_trait_id())
.associated_type_by_name(&segment.name);
.associated_type_by_name(segment.name);
match found {
Some(associated_ty) => {
// FIXME handle type parameters on the segment
@ -505,7 +505,7 @@ impl<'a> TyLoweringContext<'a> {
pub(crate) fn lower_path(&self, path: &Path) -> (Ty, Option<TypeNs>) {
// Resolve the path (in type namespace)
if let Some(type_ref) = path.type_anchor() {
let (ty, res) = self.lower_ty_ext(&type_ref);
let (ty, res) = self.lower_ty_ext(type_ref);
return self.lower_ty_relative_path(ty, res, path.segments());
}
let (resolution, remaining_index) =
@ -784,7 +784,7 @@ impl<'a> TyLoweringContext<'a> {
let trait_ref = match bound {
TypeBound::Path(path) => {
bindings = self.lower_trait_ref_from_path(path, Some(self_ty));
bindings.clone().map(WhereClause::Implemented).map(|b| crate::wrap_empty_binders(b))
bindings.clone().map(WhereClause::Implemented).map(crate::wrap_empty_binders)
}
TypeBound::Lifetime(_) => None,
TypeBound::Error => None,

View File

@ -60,7 +60,7 @@ impl TyFingerprint {
TyKind::Adt(AdtId(adt), _) => TyFingerprint::Adt(*adt),
TyKind::Raw(mutability, ..) => TyFingerprint::RawPtr(*mutability),
TyKind::Foreign(alias_id, ..) => TyFingerprint::ForeignType(*alias_id),
TyKind::Dyn(_) => ty.dyn_trait().map(|trait_| TyFingerprint::Dyn(trait_))?,
TyKind::Dyn(_) => ty.dyn_trait().map(TyFingerprint::Dyn)?,
_ => return None,
};
Some(fp)
@ -77,7 +77,7 @@ impl TyFingerprint {
TyKind::Adt(AdtId(adt), _) => TyFingerprint::Adt(*adt),
TyKind::Raw(mutability, ..) => TyFingerprint::RawPtr(*mutability),
TyKind::Foreign(alias_id, ..) => TyFingerprint::ForeignType(*alias_id),
TyKind::Dyn(_) => ty.dyn_trait().map(|trait_| TyFingerprint::Dyn(trait_))?,
TyKind::Dyn(_) => ty.dyn_trait().map(TyFingerprint::Dyn)?,
TyKind::Ref(_, _, ty) => return TyFingerprint::for_trait_impl(ty),
TyKind::Tuple(_, subst) => {
let first_ty = subst.interned().get(0).map(|arg| arg.assert_ty_ref(&Interner));
@ -372,7 +372,7 @@ pub(crate) fn lookup_method(
db,
env,
krate,
&traits_in_scope,
traits_in_scope,
visible_from_module,
Some(name),
LookupMode::MethodCall,
@ -484,7 +484,7 @@ fn iterate_method_candidates_impl(
LookupMode::Path => {
// No autoderef for path lookups
iterate_method_candidates_for_self_ty(
&ty,
ty,
db,
env,
krate,
@ -513,7 +513,7 @@ fn iterate_method_candidates_with_autoref(
db,
env.clone(),
krate,
&traits_in_scope,
traits_in_scope,
visible_from_module,
name,
&mut callback,
@ -531,7 +531,7 @@ fn iterate_method_candidates_with_autoref(
db,
env.clone(),
krate,
&traits_in_scope,
traits_in_scope,
visible_from_module,
name,
&mut callback,
@ -549,7 +549,7 @@ fn iterate_method_candidates_with_autoref(
db,
env,
krate,
&traits_in_scope,
traits_in_scope,
visible_from_module,
name,
&mut callback,
@ -593,7 +593,7 @@ fn iterate_method_candidates_by_receiver(
db,
env.clone(),
krate,
&traits_in_scope,
traits_in_scope,
name,
Some(receiver_ty),
&mut callback,
@ -870,7 +870,7 @@ fn transform_receiver_ty(
.fill_with_unknown()
.build(),
AssocContainerId::ImplId(impl_id) => {
let impl_substs = inherent_impl_substs(db, env, impl_id, &self_ty)?;
let impl_substs = inherent_impl_substs(db, env, impl_id, self_ty)?;
TyBuilder::subst_for_def(db, function_id)
.use_parent_substs(&impl_substs)
.fill_with_unknown()

View File

@ -208,7 +208,7 @@ pub(crate) fn diagnostics(
match sema.to_module_def(file_id) {
Some(m) => m.diagnostics(db, &mut sink, internal_diagnostics),
None => {
sink.push(UnlinkedFile { file_id, node: SyntaxNodePtr::new(&parse.tree().syntax()) });
sink.push(UnlinkedFile { file_id, node: SyntaxNodePtr::new(parse.tree().syntax()) });
}
}
@ -222,7 +222,7 @@ fn diagnostic_with_fix<D: DiagnosticWithFixes>(
resolve: &AssistResolveStrategy,
) -> Diagnostic {
Diagnostic::error(sema.diagnostics_display_range(d.display_source()).range, d.message())
.with_fixes(d.fixes(&sema, resolve))
.with_fixes(d.fixes(sema, resolve))
.with_code(Some(d.code()))
}
@ -232,7 +232,7 @@ fn warning_with_fix<D: DiagnosticWithFixes>(
resolve: &AssistResolveStrategy,
) -> Diagnostic {
Diagnostic::hint(sema.diagnostics_display_range(d.display_source()).range, d.message())
.with_fixes(d.fixes(&sema, resolve))
.with_fixes(d.fixes(sema, resolve))
.with_code(Some(d.code()))
}

View File

@ -18,7 +18,7 @@ impl DiagnosticWithFixes for NoSuchField {
) -> Option<Vec<Assist>> {
let root = sema.db.parse_or_expand(self.file)?;
missing_record_expr_field_fixes(
&sema,
sema,
self.file.original_file(sema.db),
&self.field.to_node(&root),
)

View File

@ -37,7 +37,7 @@ impl DiagnosticWithFixes for MissingFields {
let edit = {
let mut builder = TextEdit::builder();
algo::diff(&old_field_list.syntax(), &new_field_list.syntax())
algo::diff(old_field_list.syntax(), new_field_list.syntax())
.into_text_edit(&mut builder);
builder.finish()
};
@ -45,7 +45,7 @@ impl DiagnosticWithFixes for MissingFields {
"fill_missing_fields",
"Fill struct fields",
SourceChange::from_text_edit(self.file.original_file(sema.db), edit),
sema.original_range(&field_list_parent.syntax()).range,
sema.original_range(field_list_parent.syntax()).range,
)])
}
}

View File

@ -151,18 +151,18 @@ pub(crate) fn resolve_doc_path_for_def(
) -> Option<hir::ModuleDef> {
match def {
Definition::ModuleDef(def) => match def {
hir::ModuleDef::Module(it) => it.resolve_doc_path(db, &link, ns),
hir::ModuleDef::Function(it) => it.resolve_doc_path(db, &link, ns),
hir::ModuleDef::Adt(it) => it.resolve_doc_path(db, &link, ns),
hir::ModuleDef::Variant(it) => it.resolve_doc_path(db, &link, ns),
hir::ModuleDef::Const(it) => it.resolve_doc_path(db, &link, ns),
hir::ModuleDef::Static(it) => it.resolve_doc_path(db, &link, ns),
hir::ModuleDef::Trait(it) => it.resolve_doc_path(db, &link, ns),
hir::ModuleDef::TypeAlias(it) => it.resolve_doc_path(db, &link, ns),
hir::ModuleDef::Module(it) => it.resolve_doc_path(db, link, ns),
hir::ModuleDef::Function(it) => it.resolve_doc_path(db, link, ns),
hir::ModuleDef::Adt(it) => it.resolve_doc_path(db, link, ns),
hir::ModuleDef::Variant(it) => it.resolve_doc_path(db, link, ns),
hir::ModuleDef::Const(it) => it.resolve_doc_path(db, link, ns),
hir::ModuleDef::Static(it) => it.resolve_doc_path(db, link, ns),
hir::ModuleDef::Trait(it) => it.resolve_doc_path(db, link, ns),
hir::ModuleDef::TypeAlias(it) => it.resolve_doc_path(db, link, ns),
hir::ModuleDef::BuiltinType(_) => None,
},
Definition::Macro(it) => it.resolve_doc_path(db, &link, ns),
Definition::Field(it) => it.resolve_doc_path(db, &link, ns),
Definition::Macro(it) => it.resolve_doc_path(db, link, ns),
Definition::Field(it) => it.resolve_doc_path(db, link, ns),
Definition::SelfType(_)
| Definition::Local(_)
| Definition::GenericParam(_)
@ -192,7 +192,7 @@ pub(crate) fn doc_attributes(
ast::TupleField(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Field(def))),
ast::Macro(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Macro(def))),
// ast::Use(it) => sema.to_def(&it).map(|def| (Box::new(it) as _, def.attrs(sema.db))),
_ => return None
_ => None
}
}
}

View File

@ -328,7 +328,7 @@ mod tests {
use super::*;
fn do_check(before: &str, afters: &[&str]) {
let (analysis, position) = fixture::position(&before);
let (analysis, position) = fixture::position(before);
let before = analysis.file_text(position.file_id).unwrap();
let range = TextRange::empty(position.offset);
let mut frange = FileRange { file_id: position.file_id, range };

View File

@ -43,7 +43,7 @@ pub(crate) fn goto_definition(
let (docs, doc_mapping) = attributes.docs_with_rangemap(db)?;
let (_, link, ns) =
extract_definitions_from_markdown(docs.as_str()).into_iter().find(|(range, ..)| {
doc_mapping.map(range.clone()).map_or(false, |InFile { file_id, value: range }| {
doc_mapping.map(*range).map_or(false, |InFile { file_id, value: range }| {
file_id == position.file_id.into() && range.contains(position.offset)
})
})?;
@ -57,7 +57,7 @@ pub(crate) fn goto_definition(
},
ast::Name(name) => {
let def = NameClass::classify(&sema, &name)?.referenced_or_defined(sema.db);
try_find_trait_item_definition(&sema.db, &def)
try_find_trait_item_definition(sema.db, &def)
.or_else(|| def.try_to_nav(sema.db))
},
ast::Lifetime(lt) => if let Some(name_class) = NameClass::classify_lifetime(&sema, &lt) {

View File

@ -87,7 +87,7 @@ fn impls_for_trait_item(
.filter_map(|imp| {
let item = imp.items(sema.db).iter().find_map(|itm| {
let itm_name = itm.name(sema.db)?;
(itm_name == fun_name).then(|| itm.clone())
(itm_name == fun_name).then(|| *itm)
})?;
item.try_to_nav(sema.db)
})

View File

@ -131,7 +131,7 @@ pub(crate) fn hover(
let (docs, doc_mapping) = attributes.docs_with_rangemap(db)?;
let (idl_range, link, ns) =
extract_definitions_from_markdown(docs.as_str()).into_iter().find_map(|(range, link, ns)| {
let InFile { file_id, value: range } = doc_mapping.map(range.clone())?;
let InFile { file_id, value: range } = doc_mapping.map(range)?;
if file_id == position.file_id.into() && range.contains(position.offset) {
Some((range, link, ns))
} else {
@ -288,7 +288,7 @@ fn runnable_action(
) -> Option<HoverAction> {
match def {
Definition::ModuleDef(it) => match it {
ModuleDef::Module(it) => runnable_mod(&sema, it).map(|it| HoverAction::Runnable(it)),
ModuleDef::Module(it) => runnable_mod(sema, it).map(HoverAction::Runnable),
ModuleDef::Function(func) => {
let src = func.source(sema.db)?;
if src.file_id != file_id.into() {
@ -297,7 +297,7 @@ fn runnable_action(
return None;
}
runnable_fn(&sema, func).map(HoverAction::Runnable)
runnable_fn(sema, func).map(HoverAction::Runnable)
}
_ => None,
},
@ -432,7 +432,7 @@ fn hover_for_definition(
return match def {
Definition::Macro(it) => match &it.source(db)?.value {
Either::Left(mac) => {
let label = macro_label(&mac);
let label = macro_label(mac);
from_def_source_labeled(db, it, Some(label), mod_path)
}
Either::Right(_) => {
@ -516,7 +516,7 @@ fn hover_for_keyword(
if !token.kind().is_keyword() {
return None;
}
let famous_defs = FamousDefs(&sema, sema.scope(&token.parent()?).krate());
let famous_defs = FamousDefs(sema, sema.scope(&token.parent()?).krate());
// std exposes {}_keyword modules with docstrings on the root to document keywords
let keyword_mod = format!("{}_keyword", token.text());
let doc_owner = find_std_module(&famous_defs, &keyword_mod)?;

View File

@ -96,7 +96,7 @@ fn get_chaining_hints(
}
let krate = sema.scope(expr.syntax()).module().map(|it| it.krate());
let famous_defs = FamousDefs(&sema, krate);
let famous_defs = FamousDefs(sema, krate);
let mut tokens = expr
.syntax()
@ -165,7 +165,7 @@ fn get_param_name_hints(
};
Some((param_name, arg))
})
.filter(|(param_name, arg)| !should_hide_param_name_hint(sema, &callable, param_name, &arg))
.filter(|(param_name, arg)| !should_hide_param_name_hint(sema, &callable, param_name, arg))
.map(|(param_name, arg)| InlayHint {
range: arg.syntax().text_range(),
kind: InlayKind::ParameterHint,
@ -187,7 +187,7 @@ fn get_bind_pat_hints(
}
let krate = sema.scope(pat.syntax()).module().map(|it| it.krate());
let famous_defs = FamousDefs(&sema, krate);
let famous_defs = FamousDefs(sema, krate);
let ty = sema.type_of_pat(&pat.clone().into())?;

View File

@ -60,7 +60,7 @@ fn remove_newlines(edit: &mut TextEditBuilder, token: &SyntaxToken, range: TextR
let pos: TextSize = (pos as u32).into();
let offset = token.text_range().start() + range.start() + pos;
if !edit.invalidates_offset(offset) {
remove_newline(edit, &token, offset);
remove_newline(edit, token, offset);
}
}
}

View File

@ -282,20 +282,20 @@ impl Analysis {
file_id: FileId,
text_range: Option<TextRange>,
) -> Cancellable<String> {
self.with_db(|db| syntax_tree::syntax_tree(&db, file_id, text_range))
self.with_db(|db| syntax_tree::syntax_tree(db, file_id, text_range))
}
pub fn view_hir(&self, position: FilePosition) -> Cancellable<String> {
self.with_db(|db| view_hir::view_hir(&db, position))
self.with_db(|db| view_hir::view_hir(db, position))
}
pub fn view_item_tree(&self, file_id: FileId) -> Cancellable<String> {
self.with_db(|db| view_item_tree::view_item_tree(&db, file_id))
self.with_db(|db| view_item_tree::view_item_tree(db, file_id))
}
/// Renders the crate graph to GraphViz "dot" syntax.
pub fn view_crate_graph(&self) -> Cancellable<Result<String, String>> {
self.with_db(|db| view_crate_graph::view_crate_graph(&db))
self.with_db(|db| view_crate_graph::view_crate_graph(db))
}
pub fn expand_macro(&self, position: FilePosition) -> Cancellable<Option<ExpandedMacro>> {
@ -315,7 +315,7 @@ impl Analysis {
/// up minor stuff like continuing the comment.
/// The edit will be a snippet (with `$0`).
pub fn on_enter(&self, position: FilePosition) -> Cancellable<Option<TextEdit>> {
self.with_db(|db| typing::on_enter(&db, position))
self.with_db(|db| typing::on_enter(db, position))
}
/// Returns an edit which should be applied after a character was typed.
@ -331,7 +331,7 @@ impl Analysis {
if !typing::TRIGGER_CHARS.contains(char_typed) {
return Ok(None);
}
self.with_db(|db| typing::on_char_typed(&db, position, char_typed))
self.with_db(|db| typing::on_char_typed(db, position, char_typed))
}
/// Returns a tree representation of symbols in the file. Useful to draw a

View File

@ -62,7 +62,7 @@ pub(crate) fn find_all_refs(
if let Some(name) = get_name_of_item_declaration(&syntax, position) {
(NameClass::classify(sema, &name)?.referenced_or_defined(sema.db), true)
} else {
(find_def(&sema, &syntax, position)?, false)
(find_def(sema, &syntax, position)?, false)
};
let mut usages = def.usages(sema).set_scope(search_scope).include_self_refs().all();

View File

@ -64,7 +64,7 @@ pub(crate) fn prepare_rename(
}
};
let name_like = sema
.find_node_at_offset_with_descend(&syntax, position.offset)
.find_node_at_offset_with_descend(syntax, position.offset)
.ok_or_else(|| format_err!("No references found at position"))?;
let node = match &name_like {
ast::NameLike::Name(it) => it.syntax(),
@ -104,7 +104,7 @@ pub(crate) fn rename_with_semantics(
let def = find_definition(sema, syntax, position)?;
match def {
Definition::ModuleDef(ModuleDef::Module(module)) => rename_mod(&sema, module, new_name),
Definition::ModuleDef(ModuleDef::Module(module)) => rename_mod(sema, module, new_name),
Definition::SelfType(_) => bail!("Cannot rename `Self`"),
Definition::ModuleDef(ModuleDef::BuiltinType(_)) => bail!("Cannot rename builtin type"),
def => rename_reference(sema, def, new_name),
@ -323,7 +323,7 @@ fn rename_reference(
}
let mut source_change = SourceChange::default();
source_change.extend(usages.iter().map(|(&file_id, references)| {
(file_id, source_edit_from_references(&references, def, new_name))
(file_id, source_edit_from_references(references, def, new_name))
}));
let (file_id, edit) = source_edit_from_def(sema, def, new_name)?;
@ -413,7 +413,7 @@ fn rename_self_to_param(
let mut source_change = SourceChange::default();
source_change.insert_source_edit(file_id.original_file(sema.db), edit);
source_change.extend(usages.iter().map(|(&file_id, references)| {
(file_id, source_edit_from_references(&references, def, new_name))
(file_id, source_edit_from_references(references, def, new_name))
}));
Ok(source_change)
}
@ -426,7 +426,7 @@ fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: &str) -> Opt
None
}
let impl_def = self_param.syntax().ancestors().find_map(|it| ast::Impl::cast(it))?;
let impl_def = self_param.syntax().ancestors().find_map(ast::Impl::cast)?;
let type_name = target_type_name(&impl_def)?;
let mut replacement_text = String::from(new_name);

View File

@ -158,7 +158,7 @@ fn find_related_tests(
search_scope: Option<SearchScope>,
tests: &mut FxHashSet<Runnable>,
) {
if let Some(refs) = references::find_all_refs(&sema, position, search_scope) {
if let Some(refs) = references::find_all_refs(sema, position, search_scope) {
for (file_id, refs) in refs.references {
let file = sema.parse(file_id);
let file = file.syntax();
@ -169,10 +169,10 @@ fn find_related_tests(
});
for fn_def in functions {
if let Some(runnable) = as_test_runnable(&sema, &fn_def) {
if let Some(runnable) = as_test_runnable(sema, &fn_def) {
// direct test
tests.insert(runnable);
} else if let Some(module) = parent_test_module(&sema, &fn_def) {
} else if let Some(module) = parent_test_module(sema, &fn_def) {
// indirect test
find_related_tests_in_module(sema, &fn_def, &module, tests);
}
@ -203,7 +203,7 @@ fn find_related_tests_in_module(
}
fn as_test_runnable(sema: &Semantics<RootDatabase>, fn_def: &ast::Fn) -> Option<Runnable> {
if test_related_attribute(&fn_def).is_some() {
if test_related_attribute(fn_def).is_some() {
let function = sema.to_def(fn_def)?;
runnable_fn(sema, function)
} else {

View File

@ -323,7 +323,7 @@ fn traverse(
if let Some(token) = element.as_token().cloned().and_then(ast::String::cast) {
if token.is_raw() {
let expanded = element_to_highlight.as_token().unwrap().clone();
if inject::ra_fixture(hl, &sema, token, expanded).is_some() {
if inject::ra_fixture(hl, sema, token, expanded).is_some() {
continue;
}
}
@ -334,7 +334,7 @@ fn traverse(
}
if let Some((mut highlight, binding_hash)) = highlight::element(
&sema,
sema,
krate,
&mut bindings_shadow_count,
syntactic_name_ref_highlighting,

View File

@ -449,12 +449,12 @@ fn highlight_method_call(
krate: Option<hir::Crate>,
method_call: &ast::MethodCallExpr,
) -> Option<Highlight> {
let func = sema.resolve_method_call(&method_call)?;
let func = sema.resolve_method_call(method_call)?;
let mut h = SymbolKind::Function.into();
h |= HlMod::Associated;
if func.is_unsafe(sema.db) || sema.is_unsafe_method_call(&method_call) {
if func.is_unsafe(sema.db) || sema.is_unsafe_method_call(method_call) {
h |= HlMod::Unsafe;
}
if func.is_async(sema.db) {
@ -526,11 +526,9 @@ fn highlight_name_ref_by_syntax(
};
match parent.kind() {
METHOD_CALL_EXPR => {
return ast::MethodCallExpr::cast(parent)
.and_then(|it| highlight_method_call(sema, krate, &it))
.unwrap_or_else(|| SymbolKind::Function.into());
}
METHOD_CALL_EXPR => ast::MethodCallExpr::cast(parent)
.and_then(|it| highlight_method_call(sema, krate, &it))
.unwrap_or_else(|| SymbolKind::Function.into()),
FIELD_EXPR => {
let h = HlTag::Symbol(SymbolKind::Field);
let is_union = ast::FieldExpr::cast(parent)

View File

@ -23,7 +23,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
let hl_ranges = highlight(db, file_id, None, false);
let text = parse.tree().syntax().to_string();
let mut buf = String::new();
buf.push_str(&STYLE);
buf.push_str(STYLE);
buf.push_str("<pre><code>");
for r in &hl_ranges {
let chunk = html_escape(&text[r.range]);

View File

@ -23,7 +23,7 @@ pub(super) fn ra_fixture(
literal: ast::String,
expanded: SyntaxToken,
) -> Option<()> {
let active_parameter = ActiveParameter::at_token(&sema, expanded)?;
let active_parameter = ActiveParameter::at_token(sema, expanded)?;
if !active_parameter.ident().map_or(false, |name| name.text().starts_with("ra_fixture")) {
return None;
}
@ -124,7 +124,7 @@ pub(super) fn doc_comment(
}
for attr in attributes.by_key("doc").attrs() {
let InFile { file_id, value: src } = attrs_source_map.source_of(&attr);
let InFile { file_id, value: src } = attrs_source_map.source_of(attr);
if file_id != node.file_id {
continue;
}
@ -232,7 +232,7 @@ fn find_doc_string_in_attr(attr: &hir::Attr, it: &ast::Attr) -> Option<ast::Stri
string.text().get(1..string.text().len() - 1).map_or(false, |it| it == text)
})
}
_ => return None,
_ => None,
}
}

View File

@ -88,12 +88,12 @@ fn on_enter_in_comment(
if comment.text().ends_with(' ') {
cov_mark::hit!(continues_end_of_line_comment_with_space);
remove_trailing_whitespace = true;
} else if !followed_by_comment(&comment) {
} else if !followed_by_comment(comment) {
return None;
}
}
let indent = node_indent(&file, comment.syntax())?;
let indent = node_indent(file, comment.syntax())?;
let inserted = format!("\n{}{} $0", indent, prefix);
let delete = if remove_trailing_whitespace {
let trimmed_len = comment.text().trim_end().len() as u32;
@ -188,7 +188,7 @@ mod tests {
use crate::fixture;
fn apply_on_enter(before: &str) -> Option<String> {
let (analysis, position) = fixture::position(&before);
let (analysis, position) = fixture::position(before);
let result = analysis.on_enter(position).unwrap()?;
let mut actual = analysis.file_text(position.file_id).unwrap().to_string();

View File

@ -291,8 +291,7 @@ impl AssistBuilder {
algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit)
}
pub(crate) fn create_file(&mut self, dst: AnchoredPathBuf, content: impl Into<String>) {
let file_system_edit =
FileSystemEdit::CreateFile { dst: dst, initial_contents: content.into() };
let file_system_edit = FileSystemEdit::CreateFile { dst, initial_contents: content.into() };
self.source_change.push_file_system_edit(file_system_edit);
}

View File

@ -78,12 +78,12 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext) -> Option<(
terms.sort_by_key(|t| t.syntax().text_range().start());
let mut terms = VecDeque::from(terms);
let paren_expr = expr.syntax().parent().and_then(|parent| ast::ParenExpr::cast(parent));
let paren_expr = expr.syntax().parent().and_then(ast::ParenExpr::cast);
let neg_expr = paren_expr
.clone()
.and_then(|paren_expr| paren_expr.syntax().parent())
.and_then(|parent| ast::PrefixExpr::cast(parent))
.and_then(ast::PrefixExpr::cast)
.and_then(|prefix_expr| {
if prefix_expr.op_kind().unwrap() == ast::PrefixOp::Not {
Some(prefix_expr)

View File

@ -88,7 +88,7 @@ fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
// We pick a single indentation level for the whole block comment based on the
// comment where the assist was invoked. This will be prepended to the
// contents of each line comment when they're put into the block comment.
let indentation = IndentLevel::from_token(&comment.syntax());
let indentation = IndentLevel::from_token(comment.syntax());
let block_comment_body =
comments.into_iter().map(|c| line_comment_text(indentation, c)).join("\n");
@ -167,7 +167,7 @@ fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String {
if contents.is_empty() {
contents.to_owned()
} else {
indentation.to_string() + &contents
indentation.to_string() + contents
}
}

View File

@ -108,7 +108,7 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext)
"Convert to guarded return",
target,
|edit| {
let if_indent_level = IndentLevel::from_node(&if_expr.syntax());
let if_indent_level = IndentLevel::from_node(if_expr.syntax());
let new_block = match if_let_pat {
None => {
// If.
@ -174,7 +174,7 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext)
.take_while(|i| *i != end_of_then),
);
replace_children(
&parent_block.syntax(),
parent_block.syntax(),
RangeInclusive::new(
if_expr.clone().syntax().clone().into(),
if_expr.syntax().clone().into(),

View File

@ -76,7 +76,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext) -> Option
let module = ctx.sema.scope(&insert_after).module()?;
let vars_defined_in_body_and_outlive =
vars_defined_in_body_and_outlive(ctx, &body, &node.parent().as_ref().unwrap_or(&node));
vars_defined_in_body_and_outlive(ctx, &body, node.parent().as_ref().unwrap_or(&node));
let ret_ty = body_return_ty(ctx, &body)?;
// FIXME: we compute variables that outlive here just to check `never!` condition
@ -808,7 +808,7 @@ trait HasTokenAtOffset {
impl HasTokenAtOffset for SyntaxNode {
fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken> {
SyntaxNode::token_at_offset(&self, offset)
SyntaxNode::token_at_offset(self, offset)
}
}
@ -854,7 +854,7 @@ fn vars_defined_in_body_and_outlive(
body: &FunctionBody,
parent: &SyntaxNode,
) -> Vec<OutlivedLocal> {
let vars_defined_in_body = vars_defined_in_body(&body, ctx);
let vars_defined_in_body = vars_defined_in_body(body, ctx);
vars_defined_in_body
.into_iter()
.filter_map(|var| var_outlives_body(ctx, body, var, parent))
@ -868,7 +868,7 @@ fn is_defined_before(
src: &hir::InFile<Either<ast::IdentPat, ast::SelfParam>>,
) -> bool {
src.file_id.original_file(ctx.db()) == ctx.frange.file_id
&& !body.contains_node(&either_syntax(&src.value))
&& !body.contains_node(either_syntax(&src.value))
}
fn either_syntax(value: &Either<ast::IdentPat, ast::SelfParam>) -> &SyntaxNode {

View File

@ -202,7 +202,7 @@ impl ExtendedEnum {
fn variants(self, db: &RootDatabase) -> Vec<ExtendedVariant> {
match self {
ExtendedEnum::Enum(e) => {
e.variants(db).into_iter().map(|x| ExtendedVariant::Variant(x)).collect::<Vec<_>>()
e.variants(db).into_iter().map(ExtendedVariant::Variant).collect::<Vec<_>>()
}
ExtendedEnum::Bool => {
Vec::<ExtendedVariant>::from([ExtendedVariant::True, ExtendedVariant::False])
@ -212,7 +212,7 @@ impl ExtendedEnum {
}
fn resolve_enum_def(sema: &Semantics<RootDatabase>, expr: &ast::Expr) -> Option<ExtendedEnum> {
sema.type_of_expr(&expr)?.autoderef(sema.db).find_map(|ty| match ty.as_adt() {
sema.type_of_expr(expr)?.autoderef(sema.db).find_map(|ty| match ty.as_adt() {
Some(Adt::Enum(e)) => Some(ExtendedEnum::Enum(e)),
_ => {
if ty.is_bool() {
@ -228,7 +228,7 @@ fn resolve_tuple_of_enum_def(
sema: &Semantics<RootDatabase>,
expr: &ast::Expr,
) -> Option<Vec<ExtendedEnum>> {
sema.type_of_expr(&expr)?
sema.type_of_expr(expr)?
.tuple_fields(sema.db)
.iter()
.map(|ty| {

View File

@ -43,7 +43,7 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext) -> O
_ => return None,
};
let current_module = ctx.sema.scope(&path.syntax()).module()?;
let current_module = ctx.sema.scope(path.syntax()).module()?;
let target_module = def.module(ctx.db())?;
let vis = target_module.visibility_of(ctx.db(), &def)?;

View File

@ -47,7 +47,7 @@ pub(crate) fn generate_enum_is_method(acc: &mut Assists, ctx: &AssistContext) ->
let fn_name = format!("is_{}", &to_lower_snake_case(&variant_name.text()));
// Return early if we've found an existing new fn
let impl_def = find_struct_impl(&ctx, &parent_enum, &fn_name)?;
let impl_def = find_struct_impl(ctx, &parent_enum, &fn_name)?;
let target = variant.syntax().text_range();
acc.add(

View File

@ -136,7 +136,7 @@ fn generate_enum_projection_method(
format!("{}_{}", props.fn_name_prefix, &to_lower_snake_case(&variant_name.text()));
// Return early if we've found an existing new fn
let impl_def = find_struct_impl(&ctx, &parent_enum, &fn_name)?;
let impl_def = find_struct_impl(ctx, &parent_enum, &fn_name)?;
let target = variant.syntax().text_range();
acc.add(AssistId(assist_id, AssistKind::Generate), assist_description, target, |builder| {

View File

@ -59,7 +59,7 @@ pub(crate) fn generate_function(acc: &mut Assists, ctx: &AssistContext) -> Optio
None => None,
};
let function_builder = FunctionBuilder::from_call(&ctx, &call, &path, target_module)?;
let function_builder = FunctionBuilder::from_call(ctx, &call, &path, target_module)?;
let target = call.syntax().text_range();
acc.add(
@ -128,12 +128,12 @@ impl FunctionBuilder {
file = in_file;
target
}
None => next_space_for_fn_after_call_site(&call)?,
None => next_space_for_fn_after_call_site(call)?,
};
let needs_pub = target_module.is_some();
let target_module = target_module.or_else(|| ctx.sema.scope(target.syntax()).module())?;
let fn_name = fn_name(&path)?;
let (type_params, params) = fn_args(ctx, target_module, &call)?;
let fn_name = fn_name(path)?;
let (type_params, params) = fn_args(ctx, target_module, call)?;
// should_render_snippet intends to express a rough level of confidence about
// the correctness of the return type.

View File

@ -75,7 +75,7 @@ pub(crate) fn generate_getter_impl(
if mutable {
format_to!(fn_name, "_mut");
}
let impl_def = find_struct_impl(&ctx, &ast::Adt::Struct(strukt.clone()), fn_name.as_str())?;
let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), fn_name.as_str())?;
let (id, label) = if mutable {
("generate_getter_mut", "Generate a mut getter method")

View File

@ -36,7 +36,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
};
// Return early if we've found an existing new fn
let impl_def = find_struct_impl(&ctx, &ast::Adt::Struct(strukt.clone()), "new")?;
let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), "new")?;
let target = strukt.syntax().text_range();
acc.add(AssistId("generate_new", AssistKind::Generate), "Generate `new`", target, |builder| {

View File

@ -39,7 +39,7 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext) -> Option<
// Return early if we've found an existing fn
let fn_name = to_lower_snake_case(&field_name.to_string());
let impl_def = find_struct_impl(
&ctx,
ctx,
&ast::Adt::Struct(strukt.clone()),
format!("set_{}", fn_name).as_str(),
)?;

View File

@ -85,7 +85,7 @@ fn whitespace_start(it: SyntaxElement) -> Option<TextSize> {
}
fn adjusted_macro_contents(macro_call: &ast::MacroCall) -> Option<String> {
let contents = get_valid_macrocall_contents(&macro_call, "dbg")?;
let contents = get_valid_macrocall_contents(macro_call, "dbg")?;
let macro_text_with_brackets = macro_call.token_tree()?.syntax().text();
let macro_text_in_brackets = macro_text_with_brackets.slice(TextRange::new(
TextSize::of('('),

View File

@ -28,7 +28,7 @@ pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext) -> Option<(
.or_else(|| ctx.find_node_at_offset::<ast::RecordPat>().map(Either::Right))?;
let path = record.as_ref().either(|it| it.path(), |it| it.path())?;
let ranks = compute_fields_ranks(&path, &ctx)?;
let ranks = compute_fields_ranks(&path, ctx)?;
let get_rank_of_field =
|of: Option<_>| *ranks.get(&of.unwrap_or_default()).unwrap_or(&usize::MAX);

View File

@ -112,7 +112,7 @@ fn add_assist(
let insert_pos = adt.syntax().text_range().end();
let impl_def_with_items =
impl_def_from_trait(&ctx.sema, &annotated_name, trait_, trait_path);
update_attribute(builder, &input, &trait_name, &attr);
update_attribute(builder, input, &trait_name, attr);
let trait_path = format!("{}", trait_path);
match (ctx.config.snippet_cap, impl_def_with_items) {
(None, _) => {

View File

@ -169,7 +169,7 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext)
}
fn is_pat_wildcard_or_sad(sema: &hir::Semantics<RootDatabase>, pat: &ast::Pat) -> bool {
sema.type_of_pat(&pat)
sema.type_of_pat(pat)
.and_then(|ty| TryEnum::from_ty(sema, &ty))
.map(|it| it.sad_pattern().syntax().text() == pat.syntax().text())
.unwrap_or_else(|| matches!(pat, ast::Pat::WildcardPat(_)))

View File

@ -123,7 +123,7 @@ impl TailReturnCollector {
fn handle_exprs(&mut self, expr: &Expr, collect_break: bool) {
match expr {
Expr::BlockExpr(block_expr) => {
self.collect_jump_exprs(&block_expr, collect_break);
self.collect_jump_exprs(block_expr, collect_break);
}
Expr::ReturnExpr(ret_expr) => {
if let Some(ret_expr_arg) = &ret_expr.expr() {

View File

@ -47,14 +47,14 @@ impl AssistKind {
}
match self {
AssistKind::None | AssistKind::Generate => return true,
AssistKind::None | AssistKind::Generate => true,
AssistKind::Refactor => match other {
AssistKind::RefactorExtract
| AssistKind::RefactorInline
| AssistKind::RefactorRewrite => return true,
_ => return false,
| AssistKind::RefactorRewrite => true,
_ => false,
},
_ => return false,
_ => false,
}
}

View File

@ -74,7 +74,7 @@ pub(crate) fn check_assist_unresolved(assist: Handler, ra_fixture: &str) {
#[track_caller]
fn check_doc_test(assist_id: &str, before: &str, after: &str) {
let after = trim_indent(after);
let (db, file_id, selection) = RootDatabase::with_range_or_offset(&before);
let (db, file_id, selection) = RootDatabase::with_range_or_offset(before);
let before = db.file_text(file_id).to_string();
let frange = FileRange { file_id, range: selection.into() };

View File

@ -492,7 +492,7 @@ pub(crate) fn add_method_to_adt(
let start_offset = impl_def
.and_then(|impl_def| find_impl_block_end(impl_def, &mut buf))
.unwrap_or_else(|| {
buf = generate_impl_text(&adt, &buf);
buf = generate_impl_text(adt, &buf);
adt.syntax().text_range().end()
});

View File

@ -187,7 +187,7 @@ fn from_method_call(expr: &ast::Expr) -> Option<String> {
}
}
normalize(&name)
normalize(name)
}
fn from_param(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<String> {

View File

@ -13,7 +13,7 @@ pub(crate) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
_ => return complete_undotted_self(acc, ctx),
};
let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) {
let receiver_ty = match ctx.sema.type_of_expr(dot_receiver) {
Some(ty) => ty,
_ => return,
};

View File

@ -34,7 +34,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
let receiver_text = get_receiver_text(dot_receiver, receiver_is_ambiguous_float_literal);
let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) {
let receiver_ty = match ctx.sema.type_of_expr(dot_receiver) {
Some(it) => it,
None => return,
};
@ -50,7 +50,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
postfix_snippet(
ctx,
cap,
&dot_receiver,
dot_receiver,
"ifl",
"if let Ok {}",
&format!("if let Ok($1) = {} {{\n $0\n}}", receiver_text),
@ -60,7 +60,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
postfix_snippet(
ctx,
cap,
&dot_receiver,
dot_receiver,
"while",
"while let Ok {}",
&format!("while let Ok($1) = {} {{\n $0\n}}", receiver_text),
@ -71,7 +71,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
postfix_snippet(
ctx,
cap,
&dot_receiver,
dot_receiver,
"ifl",
"if let Some {}",
&format!("if let Some($1) = {} {{\n $0\n}}", receiver_text),
@ -81,7 +81,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
postfix_snippet(
ctx,
cap,
&dot_receiver,
dot_receiver,
"while",
"while let Some {}",
&format!("while let Some($1) = {} {{\n $0\n}}", receiver_text),
@ -93,7 +93,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
postfix_snippet(
ctx,
cap,
&dot_receiver,
dot_receiver,
"if",
"if expr {}",
&format!("if {} {{\n $0\n}}", receiver_text),
@ -102,22 +102,22 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
postfix_snippet(
ctx,
cap,
&dot_receiver,
dot_receiver,
"while",
"while expr {}",
&format!("while {} {{\n $0\n}}", receiver_text),
)
.add_to(acc);
postfix_snippet(ctx, cap, &dot_receiver, "not", "!expr", &format!("!{}", receiver_text))
postfix_snippet(ctx, cap, dot_receiver, "not", "!expr", &format!("!{}", receiver_text))
.add_to(acc);
}
postfix_snippet(ctx, cap, &dot_receiver, "ref", "&expr", &format!("&{}", receiver_text))
postfix_snippet(ctx, cap, dot_receiver, "ref", "&expr", &format!("&{}", receiver_text))
.add_to(acc);
postfix_snippet(
ctx,
cap,
&dot_receiver,
dot_receiver,
"refm",
"&mut expr",
&format!("&mut {}", receiver_text),

View File

@ -53,7 +53,7 @@ pub(crate) fn add_format_like_completions(
for (label, macro_name) in KINDS {
let snippet = parser.into_suggestion(macro_name);
postfix_snippet(ctx, cap, &dot_receiver, label, macro_name, &snippet).add_to(acc);
postfix_snippet(ctx, cap, dot_receiver, label, macro_name, &snippet).add_to(acc);
}
}
}
@ -91,7 +91,7 @@ enum State {
impl FormatStrParser {
pub(crate) fn new(input: String) -> Self {
Self {
input: input,
input,
output: String::new(),
extracted_expressions: Vec::new(),
state: State::NotExpr,

View File

@ -15,7 +15,7 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
None => return,
};
let resolution = match ctx.sema.resolve_path(&path) {
let resolution = match ctx.sema.resolve_path(path) {
Some(res) => res,
None => return,
};

View File

@ -380,7 +380,7 @@ impl<'a> CompletionContext<'a> {
(|| {
let expr_field = self.token.prev_sibling_or_token()?
.into_node()
.and_then(|node| ast::RecordExprField::cast(node))?;
.and_then(ast::RecordExprField::cast)?;
let (_, _, ty) = self.sema.resolve_record_field(&expr_field)?;
Some((
Some(ty),
@ -467,7 +467,7 @@ impl<'a> CompletionContext<'a> {
self.expected_type = expected_type;
self.expected_name = expected_name;
let name_like = match find_node_at_offset(&&file_with_fake_ident, offset) {
let name_like = match find_node_at_offset(&file_with_fake_ident, offset) {
Some(it) => it,
None => return,
};

View File

@ -115,12 +115,12 @@ pub(crate) fn determine_location(
) -> Option<ImmediateLocation> {
let node = match name_like {
ast::NameLike::NameRef(name_ref) => {
if ast::RecordExprField::for_field_name(&name_ref).is_some() {
if ast::RecordExprField::for_field_name(name_ref).is_some() {
return sema
.find_node_at_offset_with_macros(original_file, offset)
.map(ImmediateLocation::RecordExpr);
}
if ast::RecordPatField::for_field_name_ref(&name_ref).is_some() {
if ast::RecordPatField::for_field_name_ref(name_ref).is_some() {
return sema
.find_node_at_offset_with_macros(original_file, offset)
.map(ImmediateLocation::RecordPat);
@ -128,7 +128,7 @@ pub(crate) fn determine_location(
maximize_name_ref(name_ref)
}
ast::NameLike::Name(name) => {
if ast::RecordPatField::for_field_name(&name).is_some() {
if ast::RecordPatField::for_field_name(name).is_some() {
return sema
.find_node_at_offset_with_macros(original_file, offset)
.map(ImmediateLocation::RecordPat);
@ -259,7 +259,7 @@ fn test_inside_impl_trait_block() {
}
pub(crate) fn previous_token(element: SyntaxElement) -> Option<SyntaxToken> {
element.into_token().and_then(|it| previous_non_trivia_token(it))
element.into_token().and_then(previous_non_trivia_token)
}
/// Check if the token previous to the previous one is `for`.
@ -267,8 +267,8 @@ pub(crate) fn previous_token(element: SyntaxElement) -> Option<SyntaxToken> {
pub(crate) fn for_is_prev2(element: SyntaxElement) -> bool {
element
.into_token()
.and_then(|it| previous_non_trivia_token(it))
.and_then(|it| previous_non_trivia_token(it))
.and_then(previous_non_trivia_token)
.and_then(previous_non_trivia_token)
.filter(|it| it.kind() == T![for])
.is_some()
}

View File

@ -86,7 +86,7 @@ impl<'a> RenderContext<'a> {
}
fn db(&self) -> &'a RootDatabase {
&self.completion.db
self.completion.db
}
fn source_range(&self) -> TextRange {

View File

@ -75,10 +75,10 @@ fn render_pat(
) -> Option<String> {
let mut pat = match kind {
StructKind::Tuple if ctx.snippet_cap().is_some() => {
render_tuple_as_pat(&fields, &name, fields_omitted)
render_tuple_as_pat(fields, name, fields_omitted)
}
StructKind::Record => {
render_record_as_pat(ctx.db(), ctx.snippet_cap(), &fields, &name, fields_omitted)
render_record_as_pat(ctx.db(), ctx.snippet_cap(), fields, name, fields_omitted)
}
_ => return None,
};
@ -86,7 +86,7 @@ fn render_pat(
if ctx.completion.is_param {
pat.push(':');
pat.push(' ');
pat.push_str(&name);
pat.push_str(name);
}
if ctx.snippet_cap().is_some() {
pat.push_str("$0");

View File

@ -162,7 +162,7 @@ impl ActiveParameter {
}
pub fn at_token(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> Option<Self> {
let (signature, active_parameter) = call_info_impl(&sema, token)?;
let (signature, active_parameter) = call_info_impl(sema, token)?;
let idx = active_parameter?;
let mut params = signature.params(sema.db);

View File

@ -323,7 +323,7 @@ fn import_for_item(
}
let segment_import =
find_import_for_segment(db, original_item_candidate, &unresolved_first_segment)?;
find_import_for_segment(db, original_item_candidate, unresolved_first_segment)?;
let trait_item_to_import = item_as_assoc(db, original_item)
.and_then(|assoc| assoc.containing_trait(db))
.map(|trait_| ItemInNs::from(ModuleDef::from(trait_)));
@ -383,7 +383,7 @@ fn find_import_for_segment(
original_item
} else {
let matching_module =
module_with_segment_name(db, &unresolved_first_segment, original_item)?;
module_with_segment_name(db, unresolved_first_segment, original_item)?;
ItemInNs::from(ModuleDef::from(matching_module))
})
}

View File

@ -124,7 +124,7 @@ fn recursive_merge(
.map(|tree_list| tree_list.use_trees().any(tree_is_self))
.unwrap_or(false)
};
match (tree_contains_self(&lhs_t), tree_contains_self(&rhs_t)) {
match (tree_contains_self(lhs_t), tree_contains_self(&rhs_t)) {
(true, false) => continue,
(false, true) => {
*lhs_t = rhs_t;

View File

@ -409,7 +409,7 @@ impl<'a> FindUsages<'a> {
if let Some(ast::NameLike::NameRef(name_ref)) =
sema.find_node_at_offset_with_descend(&tree, offset)
{
if self.found_self_ty_name_ref(&self_ty, &name_ref, sink) {
if self.found_self_ty_name_ref(self_ty, &name_ref, sink) {
return;
}
}
@ -424,7 +424,7 @@ impl<'a> FindUsages<'a> {
name_ref: &ast::NameRef,
sink: &mut dyn FnMut(FileId, FileReference) -> bool,
) -> bool {
match NameRefClass::classify(self.sema, &name_ref) {
match NameRefClass::classify(self.sema, name_ref) {
Some(NameRefClass::Definition(Definition::SelfType(impl_)))
if impl_.self_ty(self.sema.db) == *self_ty =>
{
@ -464,13 +464,13 @@ impl<'a> FindUsages<'a> {
name_ref: &ast::NameRef,
sink: &mut dyn FnMut(FileId, FileReference) -> bool,
) -> bool {
match NameRefClass::classify(self.sema, &name_ref) {
match NameRefClass::classify(self.sema, name_ref) {
Some(NameRefClass::Definition(def)) if def == self.def => {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference {
range,
name: ast::NameLike::NameRef(name_ref.clone()),
access: reference_access(&def, &name_ref),
access: reference_access(&def, name_ref),
};
sink(file_id, reference)
}
@ -480,7 +480,7 @@ impl<'a> FindUsages<'a> {
let reference = FileReference {
range,
name: ast::NameLike::NameRef(name_ref.clone()),
access: reference_access(&def, &name_ref),
access: reference_access(&def, name_ref),
};
sink(file_id, reference)
} else {
@ -490,11 +490,9 @@ impl<'a> FindUsages<'a> {
Some(NameRefClass::FieldShorthand { local_ref: local, field_ref: field }) => {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let access = match self.def {
Definition::Field(_) if field == self.def => {
reference_access(&field, &name_ref)
}
Definition::Field(_) if field == self.def => reference_access(&field, name_ref),
Definition::Local(l) if local == l => {
reference_access(&Definition::Local(local), &name_ref)
reference_access(&Definition::Local(local), name_ref)
}
_ => return false,
};

View File

@ -382,7 +382,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
code: Option<T>,
) -> Result<(), MatchFailed> {
match (pattern, code) {
(Some(p), Some(c)) => self.attempt_match_node(phase, &p.syntax(), &c.syntax()),
(Some(p), Some(c)) => self.attempt_match_node(phase, p.syntax(), c.syntax()),
(None, None) => Ok(()),
(Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()),
(None, Some(c)) => {
@ -478,7 +478,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
if Some(first_token.text()) == next_pattern_token.as_deref() {
if let Some(SyntaxElement::Node(p)) = pattern.next() {
// We have a subtree that starts with the next token in our pattern.
self.attempt_match_token_tree(phase, &p, &n)?;
self.attempt_match_token_tree(phase, &p, n)?;
break;
}
}
@ -609,7 +609,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
expr: &ast::Expr,
) -> Result<usize, MatchFailed> {
use hir::HirDisplay;
let code_type = self.sema.type_of_expr(&expr).ok_or_else(|| {
let code_type = self.sema.type_of_expr(expr).ok_or_else(|| {
match_error!("Failed to get receiver type for `{}`", expr.syntax().text())
})?;
// Temporary needed to make the borrow checker happy.

View File

@ -84,16 +84,16 @@ impl ReplacementRenderer<'_> {
fn render_node_or_token(&mut self, node_or_token: &SyntaxElement) {
match node_or_token {
SyntaxElement::Token(token) => {
self.render_token(&token);
self.render_token(token);
}
SyntaxElement::Node(child_node) => {
self.render_node(&child_node);
self.render_node(child_node);
}
}
}
fn render_node(&mut self, node: &SyntaxNode) {
if let Some(mod_path) = self.match_info.rendered_template_paths.get(&node) {
if let Some(mod_path) = self.match_info.rendered_template_paths.get(node) {
self.out.push_str(&mod_path.to_string());
// Emit everything except for the segment's name-ref, since we already effectively
// emitted that as part of `mod_path`.
@ -107,12 +107,12 @@ impl ReplacementRenderer<'_> {
}
}
} else {
self.render_node_children(&node);
self.render_node_children(node);
}
}
fn render_token(&mut self, token: &SyntaxToken) {
if let Some(placeholder) = self.rule.get_placeholder(&token) {
if let Some(placeholder) = self.rule.get_placeholder(token) {
if let Some(placeholder_value) =
self.match_info.placeholder_values.get(&placeholder.ident)
{

View File

@ -211,7 +211,7 @@ impl<'db> ResolutionScope<'db> {
// First try resolving the whole path. This will work for things like
// `std::collections::HashMap`, but will fail for things like
// `std::collections::HashMap::new`.
if let Some(resolution) = self.scope.speculative_resolve(&path) {
if let Some(resolution) = self.scope.speculative_resolve(path) {
return Some(resolution);
}
// Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if

View File

@ -173,7 +173,7 @@ impl<'db> MatchFinder<'db> {
if !is_search_permitted(code) {
return;
}
self.try_add_match(rule, &code, restrict_range, matches_out);
self.try_add_match(rule, code, restrict_range, matches_out);
// If we've got a macro call, we already tried matching it pre-expansion, which is the only
// way to match the whole macro, now try expanding it and matching the expansion.
if let Some(macro_call) = ast::MacroCall::cast(code.clone()) {

View File

@ -75,7 +75,7 @@ pub(crate) fn single_file(code: &str) -> (ide_db::RootDatabase, FilePosition, Ve
match range_or_offset {
RangeOrOffset::Range(range) => {
position = FilePosition { file_id, offset: range.start() };
selections = vec![FileRange { file_id, range: range }];
selections = vec![FileRange { file_id, range }];
}
RangeOrOffset::Offset(offset) => {
position = FilePosition { file_id, offset };
@ -129,7 +129,7 @@ fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
let matched_strings: Vec<String> =
match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect();
if matched_strings != expected && !expected.is_empty() {
print_match_debug_info(&match_finder, position.file_id, &expected[0]);
print_match_debug_info(&match_finder, position.file_id, expected[0]);
}
assert_eq!(matched_strings, expected);
}

View File

@ -121,7 +121,7 @@ impl Match {
/// Matching errors are added to the `Match`.
pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree) -> Match {
let mut res = match_loop(pattern, &input);
let mut res = match_loop(pattern, input);
res.bound_count = count(res.bindings.bindings());
return res;
@ -202,7 +202,7 @@ impl BindingsBuilder {
}
fn push_nested(&mut self, parent: &mut BindingsIdx, child: &BindingsIdx) {
let BindingsIdx(idx, nidx) = self.copy(&child);
let BindingsIdx(idx, nidx) = self.copy(child);
self.nodes[parent.0].push(LinkNode::Node(Rc::new(BindingKind::Nested(idx, nidx))));
}
@ -221,7 +221,7 @@ impl BindingsBuilder {
fn build_inner(&self, bindings: &mut Bindings, link_nodes: &[LinkNode<Rc<BindingKind>>]) {
let mut nodes = Vec::new();
self.collect_nodes(&link_nodes, &mut nodes);
self.collect_nodes(link_nodes, &mut nodes);
for cmd in nodes {
match &**cmd {
@ -282,7 +282,7 @@ impl BindingsBuilder {
nested_refs.into_iter().for_each(|iter| {
let mut child_bindings = Bindings::default();
self.build_inner(&mut child_bindings, &iter);
self.build_inner(&mut child_bindings, iter);
nested.push(child_bindings)
})
}
@ -417,7 +417,7 @@ fn match_loop_inner<'t>(
let sep_len = item.sep.as_ref().map_or(0, Separator::tt_count);
if item.sep.is_some() && sep_idx != sep_len {
let sep = item.sep.as_ref().unwrap();
if src.clone().expect_separator(&sep, sep_idx) {
if src.clone().expect_separator(sep, sep_idx) {
item.dot.next();
item.sep_parsed = Some(sep_idx + 1);
try_push!(next_items, item);
@ -487,7 +487,7 @@ fn match_loop_inner<'t>(
item.meta_result = Some((fork, match_res));
try_push!(bb_items, item);
} else {
bindings_builder.push_optional(&mut item.bindings, &name);
bindings_builder.push_optional(&mut item.bindings, name);
item.dot.next();
cur_items.push(item);
}
@ -495,7 +495,7 @@ fn match_loop_inner<'t>(
Some(err) => {
res.add_err(err);
if let Some(fragment) = match_res.value {
bindings_builder.push_fragment(&mut item.bindings, &name, fragment);
bindings_builder.push_fragment(&mut item.bindings, name, fragment);
}
item.is_error = true;
error_items.push(item);
@ -504,7 +504,7 @@ fn match_loop_inner<'t>(
}
}
OpDelimited::Op(Op::Leaf(leaf)) => {
if let Err(err) = match_leaf(&leaf, &mut src.clone()) {
if let Err(err) = match_leaf(leaf, &mut src.clone()) {
res.add_err(err);
item.is_error = true;
} else {
@ -640,10 +640,10 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match {
let (iter, match_res) = item.meta_result.take().unwrap();
match match_res.value {
Some(fragment) => {
bindings_builder.push_fragment(&mut item.bindings, &name, fragment);
bindings_builder.push_fragment(&mut item.bindings, name, fragment);
}
None if match_res.err.is_none() => {
bindings_builder.push_optional(&mut item.bindings, &name);
bindings_builder.push_optional(&mut item.bindings, name);
}
_ => {}
}

View File

@ -55,7 +55,7 @@ pub(super) fn transcribe(
template: &MetaTemplate,
bindings: &Bindings,
) -> ExpandResult<tt::Subtree> {
let mut ctx = ExpandCtx { bindings: &bindings, nesting: Vec::new() };
let mut ctx = ExpandCtx { bindings, nesting: Vec::new() };
let mut arena: Vec<tt::TokenTree> = Vec::new();
expand_subtree(&mut ctx, template, None, &mut arena)
}
@ -91,12 +91,12 @@ fn expand_subtree(
Op::Leaf(tt) => arena.push(tt.clone().into()),
Op::Subtree { tokens, delimiter } => {
let ExpandResult { value: tt, err: e } =
expand_subtree(ctx, &tokens, *delimiter, arena);
expand_subtree(ctx, tokens, *delimiter, arena);
err = err.or(e);
arena.push(tt.into());
}
Op::Var { name, id, .. } => {
let ExpandResult { value: fragment, err: e } = expand_var(ctx, &name, *id);
let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id);
err = err.or(e);
push_fragment(arena, fragment);
}
@ -141,7 +141,7 @@ fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr, id: tt::TokenId) -> ExpandResult
.into();
ExpandResult::ok(Fragment::Tokens(tt))
} else {
ctx.bindings.get(&v, &mut ctx.nesting).map_or_else(
ctx.bindings.get(v, &mut ctx.nesting).map_or_else(
|e| ExpandResult { value: Fragment::Tokens(tt::TokenTree::empty()), err: Some(e) },
|b| ExpandResult::ok(b.clone()),
)

View File

@ -280,8 +280,8 @@ impl Rule {
.expect_subtree()
.map_err(|()| ParseError::Expected("expected subtree".to_string()))?;
let lhs = MetaTemplate(parse_pattern(&lhs)?);
let rhs = MetaTemplate(parse_template(&rhs)?);
let lhs = MetaTemplate(parse_pattern(lhs)?);
let rhs = MetaTemplate(parse_template(rhs)?);
Ok(crate::Rule { lhs, rhs })
}
@ -290,7 +290,7 @@ impl Rule {
fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> {
for op in pattern.iter() {
match op {
Op::Subtree { tokens, .. } => validate(&tokens)?,
Op::Subtree { tokens, .. } => validate(tokens)?,
Op::Repeat { tokens: subtree, separator, .. } => {
// Checks that no repetition which could match an empty token
// https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558

View File

@ -42,7 +42,7 @@ impl<'a> OpDelimitedIter<'a> {
}
pub(crate) fn reset(&self) -> Self {
Self { inner: &self.inner, idx: 0, delimited: self.delimited }
Self { inner: self.inner, idx: 0, delimited: self.delimited }
}
}
@ -126,11 +126,11 @@ impl Separator {
}
pub(crate) fn parse_template(template: &tt::Subtree) -> Result<Vec<Op>, ParseError> {
parse_inner(&template, Mode::Template).into_iter().collect()
parse_inner(template, Mode::Template).into_iter().collect()
}
pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Result<Vec<Op>, ParseError> {
parse_inner(&pattern, Mode::Pattern).into_iter().collect()
parse_inner(pattern, Mode::Pattern).into_iter().collect()
}
#[derive(Clone, Copy)]
@ -140,7 +140,7 @@ enum Mode {
}
fn parse_inner(tt: &tt::Subtree, mode: Mode) -> Vec<Result<Op, ParseError>> {
let mut src = TtIter::new(&tt);
let mut src = TtIter::new(tt);
std::iter::from_fn(move || {
let first = src.next()?;
Some(next_op(first, &mut src, mode))
@ -171,7 +171,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul
match second {
tt::TokenTree::Subtree(subtree) => {
let (separator, kind) = parse_repeat(src)?;
let tokens = parse_inner(&subtree, mode)
let tokens = parse_inner(subtree, mode)
.into_iter()
.collect::<Result<Vec<Op>, ParseError>>()?;
Op::Repeat { tokens: MetaTemplate(tokens), separator, kind }
@ -191,7 +191,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul
Op::Var { name, kind, id }
}
tt::Leaf::Literal(lit) => {
if is_boolean_literal(&lit) {
if is_boolean_literal(lit) {
let name = lit.text.clone();
let kind = eat_fragment_kind(src, mode)?;
let id = lit.id;
@ -206,7 +206,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul
tt::TokenTree::Leaf(tt) => Op::Leaf(tt.clone()),
tt::TokenTree::Subtree(subtree) => {
let tokens =
parse_inner(&subtree, mode).into_iter().collect::<Result<Vec<Op>, ParseError>>()?;
parse_inner(subtree, mode).into_iter().collect::<Result<Vec<Op>, ParseError>>()?;
Op::Subtree { tokens: MetaTemplate(tokens), delimiter: subtree.delimiter }
}
};

View File

@ -22,7 +22,7 @@ impl<'a> SubtreeTokenSource {
#[cfg(test)]
pub(crate) fn text(&self) -> SmolStr {
match self.cached.get(self.curr.1) {
Some(ref tt) => tt.text.clone(),
Some(tt) => tt.text.clone(),
_ => SmolStr::new(""),
}
}
@ -59,7 +59,7 @@ impl<'a> SubtreeTokenSource {
current = match tt {
Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
cached.push(convert_leaf(&leaf));
cached.push(convert_leaf(leaf));
cursor.bump()
}
Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
@ -114,7 +114,7 @@ impl<'a> TokenSource for SubtreeTokenSource {
/// Is the current token a specified keyword?
fn is_keyword(&self, kw: &str) -> bool {
match self.cached.get(self.curr.1) {
Some(ref t) => t.text == *kw,
Some(t) => t.text == *kw,
_ => false,
}
}

Some files were not shown because too many files have changed in this diff Show More