mirror of
https://github.com/rust-lang/rust.git
synced 2024-12-11 08:05:12 +00:00
Merge #138
138: Fix some clippy lints r=matklad a=alanhdu
I went ahead and fixed all the clippy lints (there were a couple I thought would be better unfixed and added `cfg` statements to allow them) and also re-enabled clippy and rustfmt in CI.
They were disabled with `no time to explain, disable clippy checks`, so hopefully this won't go against whatever the reason at the time was 😆.
One question about the CI though: right now, it's an allowed failure that runs against the latest nightly each time. Would it be better to pin it to a specific nightly (or use the `beta` versions) to lower the churn?
Co-authored-by: Alan Du <alanhdu@gmail.com>
This commit is contained in:
commit
27694abd94
@ -87,12 +87,18 @@ salsa::query_group! {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, PartialEq, Eq)]
|
||||
#[derive(Default, Debug, Eq)]
|
||||
pub(crate) struct FileSet {
|
||||
pub(crate) files: FxHashSet<FileId>,
|
||||
pub(crate) resolver: FileResolverImp,
|
||||
}
|
||||
|
||||
impl PartialEq for FileSet {
|
||||
fn eq(&self, other: &FileSet) -> bool {
|
||||
self.files == other.files && self.resolver == other.resolver
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for FileSet {
|
||||
fn hash<H: Hasher>(&self, hasher: &mut H) {
|
||||
let mut files = self.files.iter().cloned().collect::<Vec<_>>();
|
||||
|
@ -22,7 +22,7 @@ impl ModuleDescriptor {
|
||||
}
|
||||
}
|
||||
|
||||
fn modules<'a>(root: ast::Root<'a>) -> impl Iterator<Item = (SmolStr, ast::Module<'a>)> {
|
||||
fn modules(root: ast::Root<'_>) -> impl Iterator<Item = (SmolStr, ast::Module<'_>)> {
|
||||
root.modules().filter_map(|module| {
|
||||
let name = module.name()?.text();
|
||||
if !module.has_semi() {
|
||||
@ -184,8 +184,7 @@ impl Link {
|
||||
root: ast::Root<'a>,
|
||||
) -> ast::Module<'a> {
|
||||
modules(root)
|
||||
.filter(|(name, _)| name == &tree.link(self).name)
|
||||
.next()
|
||||
.find(|(name, _)| name == &tree.link(self).name)
|
||||
.unwrap()
|
||||
.1
|
||||
}
|
||||
|
@ -426,12 +426,12 @@ impl AnalysisImpl {
|
||||
.text()
|
||||
.slice(range_search)
|
||||
.to_string()
|
||||
.matches(",")
|
||||
.matches(',')
|
||||
.count();
|
||||
|
||||
// If we have a method call eat the first param since it's just self.
|
||||
if has_self {
|
||||
commas = commas + 1;
|
||||
commas += 1;
|
||||
}
|
||||
|
||||
current_parameter = Some(commas);
|
||||
|
@ -63,7 +63,7 @@ fn extend_ws(root: SyntaxNodeRef, ws: SyntaxNodeRef, offset: TextUnit) -> TextRa
|
||||
let prefix = TextRange::from_to(ws.range().start(), offset) - ws.range().start();
|
||||
let ws_suffix = &ws_text.as_str()[suffix];
|
||||
let ws_prefix = &ws_text.as_str()[prefix];
|
||||
if ws_text.contains("\n") && !ws_suffix.contains("\n") {
|
||||
if ws_text.contains('\n') && !ws_suffix.contains('\n') {
|
||||
if let Some(node) = ws.next_sibling() {
|
||||
let start = match ws_prefix.rfind('\n') {
|
||||
Some(idx) => ws.range().start() + TextUnit::from((idx + 1) as u32),
|
||||
|
@ -38,12 +38,12 @@ pub fn folding_ranges(file: &File) -> Vec<Fold> {
|
||||
continue;
|
||||
}
|
||||
if node.kind() == COMMENT {
|
||||
contiguous_range_for_comment(node, &mut visited_comments).map(|range| {
|
||||
if let Some(range) = contiguous_range_for_comment(node, &mut visited_comments) {
|
||||
res.push(Fold {
|
||||
range,
|
||||
kind: FoldKind::Comment,
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -29,10 +29,10 @@ impl LineIndex {
|
||||
let line = self.newlines.upper_bound(&offset) - 1;
|
||||
let line_start_offset = self.newlines[line];
|
||||
let col = offset - line_start_offset;
|
||||
return LineCol {
|
||||
LineCol {
|
||||
line: line as u32,
|
||||
col,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn offset(&self, line_col: LineCol) -> TextUnit {
|
||||
|
@ -22,14 +22,14 @@ impl ModuleScope {
|
||||
let mut entries = Vec::new();
|
||||
for item in items {
|
||||
let entry = match item {
|
||||
ast::ModuleItem::StructDef(item) => Entry::new(item),
|
||||
ast::ModuleItem::EnumDef(item) => Entry::new(item),
|
||||
ast::ModuleItem::FnDef(item) => Entry::new(item),
|
||||
ast::ModuleItem::ConstDef(item) => Entry::new(item),
|
||||
ast::ModuleItem::StaticDef(item) => Entry::new(item),
|
||||
ast::ModuleItem::TraitDef(item) => Entry::new(item),
|
||||
ast::ModuleItem::TypeDef(item) => Entry::new(item),
|
||||
ast::ModuleItem::Module(item) => Entry::new(item),
|
||||
ast::ModuleItem::StructDef(item) => Entry::new_item(item),
|
||||
ast::ModuleItem::EnumDef(item) => Entry::new_item(item),
|
||||
ast::ModuleItem::FnDef(item) => Entry::new_item(item),
|
||||
ast::ModuleItem::ConstDef(item) => Entry::new_item(item),
|
||||
ast::ModuleItem::StaticDef(item) => Entry::new_item(item),
|
||||
ast::ModuleItem::TraitDef(item) => Entry::new_item(item),
|
||||
ast::ModuleItem::TypeDef(item) => Entry::new_item(item),
|
||||
ast::ModuleItem::Module(item) => Entry::new_item(item),
|
||||
ast::ModuleItem::UseItem(item) => {
|
||||
if let Some(tree) = item.use_tree() {
|
||||
collect_imports(tree, &mut entries);
|
||||
@ -50,7 +50,7 @@ impl ModuleScope {
|
||||
}
|
||||
|
||||
impl Entry {
|
||||
fn new<'a>(item: impl ast::NameOwner<'a>) -> Option<Entry> {
|
||||
fn new_item<'a>(item: impl ast::NameOwner<'a>) -> Option<Entry> {
|
||||
let name = item.name()?;
|
||||
Some(Entry {
|
||||
node: name.syntax().owned(),
|
||||
|
@ -54,15 +54,15 @@ pub fn file_structure(file: &File) -> Vec<StructureNode> {
|
||||
let mut res = Vec::new();
|
||||
let mut stack = Vec::new();
|
||||
|
||||
|
||||
for event in file.syntax().preorder() {
|
||||
match event {
|
||||
WalkEvent::Enter(node) => match structure_node(node) {
|
||||
Some(mut symbol) => {
|
||||
WalkEvent::Enter(node) => {
|
||||
if let Some(mut symbol) = structure_node(node) {
|
||||
symbol.parent = stack.last().map(|&n| n);
|
||||
stack.push(res.len());
|
||||
res.push(symbol);
|
||||
}
|
||||
None => (),
|
||||
},
|
||||
WalkEvent::Leave(node) => {
|
||||
if structure_node(node).is_some() {
|
||||
|
@ -58,7 +58,7 @@ pub fn join_lines(file: &File, range: TextRange) -> LocalEdit {
|
||||
pub fn on_enter(file: &File, offset: TextUnit) -> Option<LocalEdit> {
|
||||
let comment = find_leaf_at_offset(file.syntax(), offset)
|
||||
.left_biased()
|
||||
.and_then(|it| ast::Comment::cast(it))?;
|
||||
.and_then(ast::Comment::cast)?;
|
||||
|
||||
if let ast::CommentFlavor::Multiline = comment.flavor() {
|
||||
return None;
|
||||
|
@ -65,7 +65,7 @@ impl ConvWith for TextUnit {
|
||||
fn conv_with(self, line_index: &LineIndex) -> Position {
|
||||
let line_col = line_index.line_col(self);
|
||||
// TODO: UTF-16
|
||||
Position::new(line_col.line as u64, u32::from(line_col.col) as u64)
|
||||
Position::new(u64::from(line_col.line), u64::from(u32::from(line_col.col)))
|
||||
}
|
||||
}
|
||||
|
||||
@ -192,7 +192,7 @@ impl TryConvWith for SourceChange {
|
||||
.map(|it| it.edits.as_slice())
|
||||
.unwrap_or(&[]);
|
||||
let line_col = translate_offset_with_edit(&*line_index, pos.offset, edits);
|
||||
let position = Position::new(line_col.line as u64, u32::from(line_col.col) as u64);
|
||||
let position = Position::new(u64::from(line_col.line), u64::from(u32::from(line_col.col)));
|
||||
Some(TextDocumentPositionParams {
|
||||
text_document: TextDocumentIdentifier::new(pos.file_id.try_conv_with(world)?),
|
||||
position,
|
||||
|
@ -18,7 +18,7 @@ fn main() -> Result<()> {
|
||||
.directory("log")
|
||||
.start()?;
|
||||
info!("lifecycle: server started");
|
||||
match ::std::panic::catch_unwind(|| main_inner()) {
|
||||
match ::std::panic::catch_unwind(main_inner) {
|
||||
Ok(res) => {
|
||||
info!("lifecycle: terminating process with {:?}", res);
|
||||
res
|
||||
|
@ -154,8 +154,8 @@ pub fn handle_workspace_symbol(
|
||||
world: ServerWorld,
|
||||
params: req::WorkspaceSymbolParams,
|
||||
) -> Result<Option<Vec<SymbolInformation>>> {
|
||||
let all_symbols = params.query.contains("#");
|
||||
let libs = params.query.contains("*");
|
||||
let all_symbols = params.query.contains('#');
|
||||
let libs = params.query.contains('*');
|
||||
let query = {
|
||||
let query: String = params
|
||||
.query
|
||||
@ -279,8 +279,8 @@ pub fn handle_runnables(
|
||||
.filter_map(|ws| {
|
||||
let tgt = ws.target_by_root(path)?;
|
||||
Some((
|
||||
tgt.package(ws).name(ws).clone(),
|
||||
tgt.name(ws).clone(),
|
||||
tgt.package(ws).name(ws),
|
||||
tgt.name(ws),
|
||||
tgt.kind(ws),
|
||||
))
|
||||
})
|
||||
|
@ -173,7 +173,6 @@ pub fn workspace_loader() -> (Worker<PathBuf, Result<CargoWorkspace>>, ThreadWat
|
||||
1,
|
||||
|input_receiver, output_sender| {
|
||||
input_receiver
|
||||
.into_iter()
|
||||
.map(|path| CargoWorkspace::from_cargo_metadata(path.as_path()))
|
||||
.for_each(|it| output_sender.send(it))
|
||||
},
|
||||
|
@ -73,9 +73,7 @@ impl ServerWorldState {
|
||||
events
|
||||
.into_iter()
|
||||
.map(|event| {
|
||||
let text = match event.kind {
|
||||
FileEventKind::Add(text) => text,
|
||||
};
|
||||
let FileEventKind::Add(text) = event.kind;
|
||||
(event.path, text)
|
||||
})
|
||||
.map(|(path, text)| (pm.get_or_insert(path, Root::Lib), text))
|
||||
|
@ -17,8 +17,7 @@ impl<I, O> Worker<I, O> {
|
||||
I: Send + 'static,
|
||||
O: Send + 'static,
|
||||
{
|
||||
let ((inp, out), inp_r, out_s) = worker_chan(buf);
|
||||
let worker = Worker { inp, out };
|
||||
let (worker, inp_r, out_s) = worker_chan(buf);
|
||||
let watcher = ThreadWatcher::spawn(name, move || f(inp_r, out_s));
|
||||
(worker, watcher)
|
||||
}
|
||||
@ -67,11 +66,14 @@ impl ThreadWatcher {
|
||||
/// Sets up worker channels in a deadlock-avoind way.
|
||||
/// If one sets both input and output buffers to a fixed size,
|
||||
/// a worker might get stuck.
|
||||
fn worker_chan<I, O>(buf: usize) -> ((Sender<I>, Receiver<O>), Receiver<I>, Sender<O>) {
|
||||
fn worker_chan<I, O>(buf: usize) -> (Worker<I, O>, Receiver<I>, Sender<O>) {
|
||||
let (input_sender, input_receiver) = bounded::<I>(buf);
|
||||
let (output_sender, output_receiver) = unbounded::<O>();
|
||||
(
|
||||
(input_sender, output_receiver),
|
||||
Worker {
|
||||
inp: input_sender,
|
||||
out: output_receiver,
|
||||
},
|
||||
input_receiver,
|
||||
output_sender,
|
||||
)
|
||||
|
@ -24,7 +24,6 @@ pub fn roots_loader() -> (Worker<PathBuf, (PathBuf, Vec<FileEvent>)>, ThreadWatc
|
||||
128,
|
||||
|input_receiver, output_sender| {
|
||||
input_receiver
|
||||
.into_iter()
|
||||
.map(|path| {
|
||||
debug!("loading {} ...", path.as_path().display());
|
||||
let events = load_root(path.as_path());
|
||||
|
@ -30,7 +30,8 @@ pub fn find_leaf_at_offset(node: SyntaxNodeRef, offset: TextUnit) -> LeafAtOffse
|
||||
let left = children.next().unwrap();
|
||||
let right = children.next();
|
||||
assert!(children.next().is_none());
|
||||
return if let Some(right) = right {
|
||||
|
||||
if let Some(right) = right {
|
||||
match (
|
||||
find_leaf_at_offset(left, offset),
|
||||
find_leaf_at_offset(right, offset),
|
||||
@ -42,10 +43,10 @@ pub fn find_leaf_at_offset(node: SyntaxNodeRef, offset: TextUnit) -> LeafAtOffse
|
||||
}
|
||||
} else {
|
||||
find_leaf_at_offset(left, offset)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum LeafAtOffset<'a> {
|
||||
None,
|
||||
Single(SyntaxNodeRef<'a>),
|
||||
|
@ -259,9 +259,8 @@ impl<'a, N: AstNode<'a>> Iterator for AstChildren<'a, N> {
|
||||
type Item = N;
|
||||
fn next(&mut self) -> Option<N> {
|
||||
loop {
|
||||
match N::cast(self.inner.next()?) {
|
||||
Some(n) => return Some(n),
|
||||
None => (),
|
||||
if let Some(n) = N::cast(self.inner.next()?) {
|
||||
return Some(n);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -62,9 +62,8 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet = token_set_union![
|
||||
const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW];
|
||||
|
||||
pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<CompletedMarker> {
|
||||
match literal(p) {
|
||||
Some(m) => return Some(m),
|
||||
None => (),
|
||||
if let Some(m) = literal(p) {
|
||||
return Some(m);
|
||||
}
|
||||
if paths::is_path_start(p) || p.at(L_ANGLE) {
|
||||
return Some(path_expr(p, r));
|
||||
|
@ -352,7 +352,7 @@ fn macro_call(p: &mut Parser) -> BlockLike {
|
||||
pub(super) fn macro_call_after_excl(p: &mut Parser) -> BlockLike {
|
||||
p.expect(EXCL);
|
||||
p.eat(IDENT);
|
||||
let flavor = match p.current() {
|
||||
match p.current() {
|
||||
L_CURLY => {
|
||||
token_tree(p);
|
||||
BlockLike::Block
|
||||
@ -365,9 +365,7 @@ pub(super) fn macro_call_after_excl(p: &mut Parser) -> BlockLike {
|
||||
p.error("expected `{`, `[`, `(`");
|
||||
BlockLike::NotBlock
|
||||
}
|
||||
};
|
||||
|
||||
flavor
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn token_tree(p: &mut Parser) {
|
||||
|
@ -49,9 +49,8 @@ fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> {
|
||||
// "hello" => (),
|
||||
// }
|
||||
// }
|
||||
match expressions::literal(p) {
|
||||
Some(m) => return Some(m),
|
||||
None => (),
|
||||
if let Some(m) = expressions::literal(p) {
|
||||
return Some(m);
|
||||
}
|
||||
|
||||
let m = match la0 {
|
||||
|
@ -31,7 +31,7 @@ impl<'s> Ptr<'s> {
|
||||
/// For example, 0 will return the current token, 1 will return the next, etc.
|
||||
pub fn nth(&self, n: u32) -> Option<char> {
|
||||
let mut chars = self.chars().peekable();
|
||||
chars.by_ref().skip(n as usize).next()
|
||||
chars.by_ref().nth(n as usize)
|
||||
}
|
||||
|
||||
/// Checks whether the current character is `c`.
|
||||
|
@ -98,17 +98,18 @@ fn is_contextual_kw(text: &str) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
fn find_reparsable_node<'node>(
|
||||
node: SyntaxNodeRef<'node>,
|
||||
type ParseFn = fn(&mut Parser);
|
||||
fn find_reparsable_node(
|
||||
node: SyntaxNodeRef<'_>,
|
||||
range: TextRange,
|
||||
) -> Option<(SyntaxNodeRef<'node>, fn(&mut Parser))> {
|
||||
) -> Option<(SyntaxNodeRef<'_>, ParseFn)> {
|
||||
let node = algo::find_covering_node(node, range);
|
||||
return node
|
||||
.ancestors()
|
||||
.filter_map(|node| reparser(node).map(|r| (node, r)))
|
||||
.next();
|
||||
|
||||
fn reparser(node: SyntaxNodeRef) -> Option<fn(&mut Parser)> {
|
||||
fn reparser(node: SyntaxNodeRef) -> Option<ParseFn> {
|
||||
let res = match node.kind() {
|
||||
BLOCK => grammar::block,
|
||||
NAMED_FIELD_DEF_LIST => grammar::named_field_def_list,
|
||||
@ -134,7 +135,7 @@ fn find_reparsable_node<'node>(
|
||||
}
|
||||
|
||||
fn is_balanced(tokens: &[Token]) -> bool {
|
||||
if tokens.len() == 0
|
||||
if tokens.is_empty()
|
||||
|| tokens.first().unwrap().kind != L_CURLY
|
||||
|| tokens.last().unwrap().kind != R_CURLY
|
||||
{
|
||||
|
@ -5,7 +5,7 @@ use std::fmt::Write;
|
||||
|
||||
/// Parse a file and create a string representation of the resulting parse tree.
|
||||
pub fn dump_tree(syntax: SyntaxNodeRef) -> String {
|
||||
let mut errors: Vec<_> = syntax.root_data().iter().cloned().collect();
|
||||
let mut errors: Vec<_> = syntax.root_data().to_vec();
|
||||
errors.sort_by_key(|e| e.offset);
|
||||
let mut err_pos = 0;
|
||||
let mut level = 0;
|
||||
@ -42,7 +42,7 @@ pub fn dump_tree(syntax: SyntaxNodeRef) -> String {
|
||||
writeln!(buf, "err: `{}`", err.msg).unwrap();
|
||||
}
|
||||
|
||||
return buf;
|
||||
buf
|
||||
}
|
||||
|
||||
pub fn check_fuzz_invariants(text: &str) {
|
||||
|
@ -50,7 +50,7 @@ pub fn collect_tests(s: &str) -> Vec<(usize, Test)> {
|
||||
block.map(|(_, line)| line).chain(::std::iter::once("")),
|
||||
"\n",
|
||||
);
|
||||
assert!(!text.trim().is_empty() && text.ends_with("\n"));
|
||||
assert!(!text.trim().is_empty() && text.ends_with('\n'));
|
||||
res.push((start_line, Test { name, text }))
|
||||
}
|
||||
res
|
||||
|
@ -112,9 +112,8 @@ fn existing_tests(dir: &Path) -> Result<HashMap<String, (PathBuf, Test)>> {
|
||||
name: name.clone(),
|
||||
text,
|
||||
};
|
||||
match res.insert(name, (path, test)) {
|
||||
Some(old) => println!("Duplicate test: {:?}", old),
|
||||
None => (),
|
||||
if let Some(old) = res.insert(name, (path, test)) {
|
||||
println!("Duplicate test: {:?}", old);
|
||||
}
|
||||
}
|
||||
Ok(res)
|
||||
|
Loading…
Reference in New Issue
Block a user