Merge remote-tracking branch 'upstream/master' into 503-hover-doc-links

This commit is contained in:
Zac Pullar-Strecker 2020-08-25 16:44:36 +12:00
commit b835f06cec
46 changed files with 970 additions and 846 deletions

44
.github/workflows/publish.yml vendored Normal file
View File

@ -0,0 +1,44 @@
name: publish
on:
workflow_dispatch: # We can add version input when 1.0 is released and scheduled releases are removed
schedule:
- cron: "0 0 * * *" # midnight UTC
push:
branches:
- release
jobs:
publish:
name: publish
runs-on: ubuntu-16.04
steps:
- name: Checkout repository
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Install Rust toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
override: true
- name: Install cargo-workspaces
uses: actions-rs/install@v0.1
with:
crate: cargo-workspaces
- name: Release
env:
CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
PATCH: ${{ github.run_number }}
shell: bash
run: |
git config --global user.email "runner@gha.local"
git config --global user.name "Github Action"
rm Cargo.lock
cargo workspaces rename ra_ap_%n
cargo workspaces publish --yes --force '*' --exact --no-git-commit --allow-dirty --skip-published custom 0.0.$PATCH

4
Cargo.lock generated
View File

@ -1718,9 +1718,9 @@ dependencies = [
[[package]]
name = "ungrammar"
version = "1.1.2"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bab6142ac77be714b1ea78faca6efaed5478c50724786b0fe80d8528d10692b3"
checksum = "ca4d39065b45f658d33013f7cc93ee050708cd543f6e07dd15b4293fcf217e12"
[[package]]
name = "unicase"

View File

@ -1,6 +1,7 @@
[package]
name = "arena"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"

View File

@ -1,6 +1,7 @@
[package]
name = "assists"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -13,11 +14,11 @@ rustc-hash = "1.1.0"
itertools = "0.9.0"
either = "1.5.3"
stdx = { path = "../stdx" }
syntax = { path = "../syntax" }
text_edit = { path = "../text_edit" }
profile = { path = "../profile" }
base_db = { path = "../base_db" }
ide_db = { path = "../ide_db" }
hir = { path = "../hir" }
test_utils = { path = "../test_utils" }
stdx = { path = "../stdx", version = "0.0.0" }
syntax = { path = "../syntax", version = "0.0.0" }
text_edit = { path = "../text_edit", version = "0.0.0" }
profile = { path = "../profile", version = "0.0.0" }
base_db = { path = "../base_db", version = "0.0.0" }
ide_db = { path = "../ide_db", version = "0.0.0" }
hir = { path = "../hir", version = "0.0.0" }
test_utils = { path = "../test_utils", version = "0.0.0" }

View File

@ -73,10 +73,6 @@ impl<'a> AssistContext<'a> {
self.sema.db
}
pub(crate) fn source_file(&self) -> &SourceFile {
&self.source_file
}
// NB, this ignores active selection.
pub(crate) fn offset(&self) -> TextSize {
self.frange.range.start()

View File

@ -1,10 +1,10 @@
use either::Either;
use hir::{AssocItem, MacroDef, ModuleDef, Name, PathResolution, ScopeDef, SemanticsScope};
use hir::{AssocItem, MacroDef, Module, ModuleDef, Name, PathResolution, ScopeDef};
use ide_db::{
defs::{classify_name_ref, Definition, NameRefClass},
RootDatabase,
search::SearchScope,
};
use syntax::{algo, ast, match_ast, AstNode, SyntaxNode, SyntaxToken, T};
use syntax::{algo, ast, AstNode, Direction, SyntaxNode, SyntaxToken, T};
use crate::{
assist_context::{AssistBuilder, AssistContext, Assists},
@ -38,140 +38,259 @@ use crate::{
// ```
pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let star = ctx.find_token_at_offset(T![*])?;
let mod_path = find_mod_path(&star)?;
let module = match ctx.sema.resolve_path(&mod_path)? {
let (parent, mod_path) = find_parent_and_path(&star)?;
let target_module = match ctx.sema.resolve_path(&mod_path)? {
PathResolution::Def(ModuleDef::Module(it)) => it,
_ => return None,
};
let source_file = ctx.source_file();
let scope = ctx.sema.scope_at_offset(source_file.syntax(), ctx.offset());
let current_scope = ctx.sema.scope(&star.parent());
let current_module = current_scope.module()?;
let defs_in_mod = find_defs_in_mod(ctx, scope, module)?;
let name_refs_in_source_file =
source_file.syntax().descendants().filter_map(ast::NameRef::cast).collect();
let used_names = find_used_names(ctx, defs_in_mod, name_refs_in_source_file);
let refs_in_target = find_refs_in_mod(ctx, target_module, Some(current_module))?;
let imported_defs = find_imported_defs(ctx, star)?;
let names_to_import = find_names_to_import(ctx, refs_in_target, imported_defs);
let parent = star.parent().parent()?;
let target = parent.clone().either(|n| n.syntax().clone(), |n| n.syntax().clone());
acc.add(
AssistId("expand_glob_import", AssistKind::RefactorRewrite),
"Expand glob import",
parent.text_range(),
target.text_range(),
|builder| {
replace_ast(builder, &parent, mod_path, used_names);
replace_ast(builder, parent, mod_path, names_to_import);
},
)
}
fn find_mod_path(star: &SyntaxToken) -> Option<ast::Path> {
star.ancestors().find_map(|n| ast::UseTree::cast(n).and_then(|u| u.path()))
fn find_parent_and_path(
star: &SyntaxToken,
) -> Option<(Either<ast::UseTree, ast::UseTreeList>, ast::Path)> {
return star.ancestors().find_map(|n| {
find_use_tree_list(n.clone())
.and_then(|(u, p)| Some((Either::Right(u), p)))
.or_else(|| find_use_tree(n).and_then(|(u, p)| Some((Either::Left(u), p))))
});
fn find_use_tree_list(n: SyntaxNode) -> Option<(ast::UseTreeList, ast::Path)> {
let use_tree_list = ast::UseTreeList::cast(n)?;
let path = use_tree_list.parent_use_tree().path()?;
Some((use_tree_list, path))
}
fn find_use_tree(n: SyntaxNode) -> Option<(ast::UseTree, ast::Path)> {
let use_tree = ast::UseTree::cast(n)?;
let path = use_tree.path()?;
Some((use_tree, path))
}
}
#[derive(PartialEq)]
#[derive(Debug, PartialEq, Clone)]
enum Def {
ModuleDef(ModuleDef),
MacroDef(MacroDef),
}
impl Def {
fn name(&self, db: &RootDatabase) -> Option<Name> {
match self {
Def::ModuleDef(def) => def.name(db),
Def::MacroDef(def) => def.name(db),
}
fn is_referenced_in(&self, ctx: &AssistContext) -> bool {
let def = match self {
Def::ModuleDef(def) => Definition::ModuleDef(*def),
Def::MacroDef(def) => Definition::Macro(*def),
};
let search_scope = SearchScope::single_file(ctx.frange.file_id);
def.usages(&ctx.sema).in_scope(search_scope).at_least_one()
}
}
fn find_defs_in_mod(
ctx: &AssistContext,
from: SemanticsScope<'_>,
module: hir::Module,
) -> Option<Vec<Def>> {
let module_scope = module.scope(ctx.db(), from.module());
let mut defs = vec![];
for (_, def) in module_scope {
match def {
ScopeDef::ModuleDef(def) => defs.push(Def::ModuleDef(def)),
ScopeDef::MacroDef(def) => defs.push(Def::MacroDef(def)),
_ => continue,
}
}
Some(defs)
#[derive(Debug, Clone)]
struct Ref {
// could be alias
visible_name: Name,
def: Def,
}
fn find_used_names(
ctx: &AssistContext,
defs_in_mod: Vec<Def>,
name_refs_in_source_file: Vec<ast::NameRef>,
) -> Vec<Name> {
let defs_in_source_file = name_refs_in_source_file
.iter()
.filter_map(|r| classify_name_ref(&ctx.sema, r))
.filter_map(|rc| match rc {
NameRefClass::Definition(Definition::ModuleDef(def)) => Some(Def::ModuleDef(def)),
NameRefClass::Definition(Definition::Macro(def)) => Some(Def::MacroDef(def)),
impl Ref {
fn from_scope_def(name: Name, scope_def: ScopeDef) -> Option<Self> {
match scope_def {
ScopeDef::ModuleDef(def) => Some(Ref { visible_name: name, def: Def::ModuleDef(def) }),
ScopeDef::MacroDef(def) => Some(Ref { visible_name: name, def: Def::MacroDef(def) }),
_ => None,
})
.collect::<Vec<Def>>();
}
}
}
defs_in_mod
.iter()
.filter(|def| {
if let Def::ModuleDef(ModuleDef::Trait(tr)) = def {
for item in tr.items(ctx.db()) {
if let AssocItem::Function(f) = item {
if defs_in_source_file.contains(&Def::ModuleDef(ModuleDef::Function(f))) {
#[derive(Debug, Clone)]
struct Refs(Vec<Ref>);
impl Refs {
fn used_refs(&self, ctx: &AssistContext) -> Refs {
Refs(
self.0
.clone()
.into_iter()
.filter(|r| {
if let Def::ModuleDef(ModuleDef::Trait(tr)) = r.def {
if tr
.items(ctx.db())
.into_iter()
.find(|ai| {
if let AssocItem::Function(f) = *ai {
Def::ModuleDef(ModuleDef::Function(f)).is_referenced_in(ctx)
} else {
false
}
})
.is_some()
{
return true;
}
}
}
}
defs_in_source_file.contains(def)
})
.filter_map(|d| d.name(ctx.db()))
.collect()
r.def.is_referenced_in(ctx)
})
.collect(),
)
}
fn filter_out_by_defs(&self, defs: Vec<Def>) -> Refs {
Refs(self.0.clone().into_iter().filter(|r| !defs.contains(&r.def)).collect())
}
}
fn find_refs_in_mod(
ctx: &AssistContext,
module: Module,
visible_from: Option<Module>,
) -> Option<Refs> {
if let Some(from) = visible_from {
if !is_mod_visible_from(ctx, module, from) {
return None;
}
}
let module_scope = module.scope(ctx.db(), visible_from);
let refs = module_scope.into_iter().filter_map(|(n, d)| Ref::from_scope_def(n, d)).collect();
Some(Refs(refs))
}
fn is_mod_visible_from(ctx: &AssistContext, module: Module, from: Module) -> bool {
match module.parent(ctx.db()) {
Some(parent) => {
parent.visibility_of(ctx.db(), &ModuleDef::Module(module)).map_or(true, |vis| {
vis.is_visible_from(ctx.db(), from.into()) && is_mod_visible_from(ctx, parent, from)
})
}
None => true,
}
}
// looks for name refs in parent use block's siblings
//
// mod bar {
// mod qux {
// struct Qux;
// }
//
// pub use qux::Qux;
// }
//
// ↓ ---------------
// use foo::*<|>;
// use baz::Baz;
// ↑ ---------------
fn find_imported_defs(ctx: &AssistContext, star: SyntaxToken) -> Option<Vec<Def>> {
let parent_use_item_syntax =
star.ancestors().find_map(|n| if ast::Use::can_cast(n.kind()) { Some(n) } else { None })?;
Some(
[Direction::Prev, Direction::Next]
.iter()
.map(|dir| {
parent_use_item_syntax
.siblings(dir.to_owned())
.filter(|n| ast::Use::can_cast(n.kind()))
})
.flatten()
.filter_map(|n| Some(n.descendants().filter_map(ast::NameRef::cast)))
.flatten()
.filter_map(|r| match classify_name_ref(&ctx.sema, &r)? {
NameRefClass::Definition(Definition::ModuleDef(def)) => Some(Def::ModuleDef(def)),
NameRefClass::Definition(Definition::Macro(def)) => Some(Def::MacroDef(def)),
_ => None,
})
.collect(),
)
}
fn find_names_to_import(
ctx: &AssistContext,
refs_in_target: Refs,
imported_defs: Vec<Def>,
) -> Vec<Name> {
let used_refs = refs_in_target.used_refs(ctx).filter_out_by_defs(imported_defs);
used_refs.0.iter().map(|r| r.visible_name.clone()).collect()
}
fn replace_ast(
builder: &mut AssistBuilder,
node: &SyntaxNode,
parent: Either<ast::UseTree, ast::UseTreeList>,
path: ast::Path,
used_names: Vec<Name>,
names_to_import: Vec<Name>,
) {
let replacement: Either<ast::UseTree, ast::UseTreeList> = match used_names.as_slice() {
[name] => Either::Left(ast::make::use_tree(
ast::make::path_from_text(&format!("{}::{}", path, name)),
None,
None,
false,
)),
names => Either::Right(ast::make::use_tree_list(names.iter().map(|n| {
ast::make::use_tree(ast::make::path_from_text(&n.to_string()), None, None, false)
}))),
let existing_use_trees = match parent.clone() {
Either::Left(_) => vec![],
Either::Right(u) => u
.use_trees()
.filter(|n|
// filter out star
n.star_token().is_none())
.collect(),
};
let mut replace_node = |replacement: Either<ast::UseTree, ast::UseTreeList>| {
algo::diff(node, &replacement.either(|u| u.syntax().clone(), |ut| ut.syntax().clone()))
.into_text_edit(builder.text_edit_builder());
};
let new_use_trees: Vec<ast::UseTree> = names_to_import
.iter()
.map(|n| ast::make::use_tree(ast::make::path_from_text(&n.to_string()), None, None, false))
.collect();
match_ast! {
match node {
ast::UseTree(use_tree) => {
replace_node(replacement);
},
ast::UseTreeList(use_tree_list) => {
replace_node(replacement);
},
ast::Use(use_item) => {
builder.replace_ast(use_item, ast::make::use_(replacement.left_or_else(|ut| ast::make::use_tree(path, Some(ut), None, false))));
},
_ => {},
let use_trees = [&existing_use_trees[..], &new_use_trees[..]].concat();
match use_trees.as_slice() {
[name] => {
if let Some(end_path) = name.path() {
let replacement = ast::make::use_tree(
ast::make::path_from_text(&format!("{}::{}", path, end_path)),
None,
None,
false,
);
algo::diff(
&parent.either(|n| n.syntax().clone(), |n| n.syntax().clone()),
replacement.syntax(),
)
.into_text_edit(builder.text_edit_builder());
}
}
}
names => {
let replacement = match parent {
Either::Left(_) => ast::make::use_tree(
path,
Some(ast::make::use_tree_list(names.to_owned())),
None,
false,
)
.syntax()
.clone(),
Either::Right(_) => ast::make::use_tree_list(names.to_owned()).syntax().clone(),
};
algo::diff(
&parent.either(|n| n.syntax().clone(), |n| n.syntax().clone()),
&replacement,
)
.into_text_edit(builder.text_edit_builder());
}
};
}
#[cfg(test)]
@ -245,7 +364,46 @@ mod foo {
pub fn f() {}
}
use foo::{Baz, Bar, f};
use foo::{f, Baz, Bar};
fn qux(bar: Bar, baz: Baz) {
f();
}
",
)
}
#[test]
fn expanding_glob_import_with_existing_uses_in_same_module() {
check_assist(
expand_glob_import,
r"
mod foo {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
use foo::Bar;
use foo::{*<|>, f};
fn qux(bar: Bar, baz: Baz) {
f();
}
",
r"
mod foo {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
use foo::Bar;
use foo::{f, Baz};
fn qux(bar: Bar, baz: Baz) {
f();
@ -260,7 +418,7 @@ fn qux(bar: Bar, baz: Baz) {
expand_glob_import,
r"
mod foo {
mod bar {
pub mod bar {
pub struct Bar;
pub struct Baz;
pub struct Qux;
@ -268,7 +426,7 @@ mod foo {
pub fn f() {}
}
mod baz {
pub mod baz {
pub fn g() {}
}
}
@ -282,7 +440,7 @@ fn qux(bar: Bar, baz: Baz) {
",
r"
mod foo {
mod bar {
pub mod bar {
pub struct Bar;
pub struct Baz;
pub struct Qux;
@ -290,51 +448,330 @@ mod foo {
pub fn f() {}
}
mod baz {
pub mod baz {
pub fn g() {}
}
}
use foo::{bar::{Baz, Bar, f}, baz::*};
use foo::{bar::{f, Baz, Bar}, baz::*};
fn qux(bar: Bar, baz: Baz) {
f();
g();
}
",
)
);
check_assist(
expand_glob_import,
r"
mod foo {
pub mod bar {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
pub mod baz {
pub fn g() {}
}
}
use foo::{bar::{Bar, Baz, f}, baz::*<|>};
fn qux(bar: Bar, baz: Baz) {
f();
g();
}
",
r"
mod foo {
pub mod bar {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
pub mod baz {
pub fn g() {}
}
}
use foo::{bar::{Bar, Baz, f}, baz::g};
fn qux(bar: Bar, baz: Baz) {
f();
g();
}
",
);
check_assist(
expand_glob_import,
r"
mod foo {
pub mod bar {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
pub mod baz {
pub fn g() {}
pub mod qux {
pub fn h() {}
pub fn m() {}
pub mod q {
pub fn j() {}
}
}
}
}
use foo::{
bar::{*, f},
baz::{g, qux::*<|>}
};
fn qux(bar: Bar, baz: Baz) {
f();
g();
h();
q::j();
}
",
r"
mod foo {
pub mod bar {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
pub mod baz {
pub fn g() {}
pub mod qux {
pub fn h() {}
pub fn m() {}
pub mod q {
pub fn j() {}
}
}
}
}
use foo::{
bar::{*, f},
baz::{g, qux::{q, h}}
};
fn qux(bar: Bar, baz: Baz) {
f();
g();
h();
q::j();
}
",
);
check_assist(
expand_glob_import,
r"
mod foo {
pub mod bar {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
pub mod baz {
pub fn g() {}
pub mod qux {
pub fn h() {}
pub fn m() {}
pub mod q {
pub fn j() {}
}
}
}
}
use foo::{
bar::{*, f},
baz::{g, qux::{h, q::*<|>}}
};
fn qux(bar: Bar, baz: Baz) {
f();
g();
h();
j();
}
",
r"
mod foo {
pub mod bar {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
pub mod baz {
pub fn g() {}
pub mod qux {
pub fn h() {}
pub fn m() {}
pub mod q {
pub fn j() {}
}
}
}
}
use foo::{
bar::{*, f},
baz::{g, qux::{h, q::j}}
};
fn qux(bar: Bar, baz: Baz) {
f();
g();
h();
j();
}
",
);
check_assist(
expand_glob_import,
r"
mod foo {
pub mod bar {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
pub mod baz {
pub fn g() {}
pub mod qux {
pub fn h() {}
pub fn m() {}
pub mod q {
pub fn j() {}
}
}
}
}
use foo::{
bar::{*, f},
baz::{g, qux::{q::j, *<|>}}
};
fn qux(bar: Bar, baz: Baz) {
f();
g();
h();
j();
}
",
r"
mod foo {
pub mod bar {
pub struct Bar;
pub struct Baz;
pub struct Qux;
pub fn f() {}
}
pub mod baz {
pub fn g() {}
pub mod qux {
pub fn h() {}
pub fn m() {}
pub mod q {
pub fn j() {}
}
}
}
}
use foo::{
bar::{*, f},
baz::{g, qux::{q::j, h}}
};
fn qux(bar: Bar, baz: Baz) {
f();
g();
h();
j();
}
",
);
}
#[test]
fn expanding_glob_import_with_macro_defs() {
check_assist(
expand_glob_import,
r"
//- /lib.rs crate:foo
#[macro_export]
macro_rules! bar {
() => ()
}
// FIXME: this is currently fails because `Definition::find_usages` ignores macros
// https://github.com/rust-analyzer/rust-analyzer/issues/3484
//
// check_assist(
// expand_glob_import,
// r"
// //- /lib.rs crate:foo
// #[macro_export]
// macro_rules! bar {
// () => ()
// }
pub fn baz() {}
// pub fn baz() {}
//- /main.rs crate:main deps:foo
use foo::*<|>;
// //- /main.rs crate:main deps:foo
// use foo::*<|>;
fn main() {
bar!();
baz();
}
",
r"
use foo::{bar, baz};
// fn main() {
// bar!();
// baz();
// }
// ",
// r"
// use foo::{bar, baz};
fn main() {
bar!();
baz();
}
",
)
// fn main() {
// bar!();
// baz();
// }
// ",
// )
}
#[test]
@ -362,7 +799,72 @@ fn main() {
().method();
}
",
)
);
check_assist(
expand_glob_import,
r"
//- /lib.rs crate:foo
pub trait Tr {
fn method(&self) {}
}
impl Tr for () {}
pub trait Tr2 {
fn method2(&self) {}
}
impl Tr2 for () {}
//- /main.rs crate:main deps:foo
use foo::*<|>;
fn main() {
().method();
}
",
r"
use foo::Tr;
fn main() {
().method();
}
",
);
}
#[test]
fn expanding_is_not_applicable_if_target_module_is_not_accessible_from_current_scope() {
check_assist_not_applicable(
expand_glob_import,
r"
mod foo {
mod bar {
pub struct Bar;
}
}
use foo::bar::*<|>;
fn baz(bar: Bar) {}
",
);
check_assist_not_applicable(
expand_glob_import,
r"
mod foo {
mod bar {
pub mod baz {
pub struct Baz;
}
}
}
use foo::bar::baz::*<|>;
fn qux(baz: Baz) {}
",
);
}
#[test]

View File

@ -106,4 +106,22 @@ mod tests {
"fn f() { i<|>f let Some(_) = Some(1) { 1 } else { 0 } }",
)
}
#[test]
fn invert_if_option_case() {
check_assist(
invert_if,
"fn f() { if<|> doc_style.is_some() { Class::DocComment } else { Class::Comment } }",
"fn f() { if doc_style.is_none() { Class::Comment } else { Class::DocComment } }",
)
}
#[test]
fn invert_if_result_case() {
check_assist(
invert_if,
"fn f() { i<|>f doc_style.is_err() { Class::Err } else { Class::Ok } }",
"fn f() { if doc_style.is_ok() { Class::Ok } else { Class::Err } }",
)
}
}

View File

@ -11,7 +11,7 @@ use syntax::{
ast::{self, make, NameOwner},
AstNode, Direction,
SyntaxKind::*,
SyntaxNode, TextSize, T,
SyntaxNode, SyntaxText, TextSize, T,
};
use crate::assist_config::SnippetCap;
@ -179,6 +179,25 @@ fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> {
ast::BinOp::EqualityTest => bin.replace_op(T![!=]).map(|it| it.into()),
_ => None,
},
ast::Expr::MethodCallExpr(mce) => {
const IS_SOME_TEXT: &str = "is_some";
const IS_NONE_TEXT: &str = "is_none";
const IS_OK_TEXT: &str = "is_ok";
const IS_ERR_TEXT: &str = "is_err";
let name = mce.name_ref()?;
let name_text = name.text();
let caller = || -> Option<SyntaxText> { Some(mce.receiver()?.syntax().text()) };
match name_text {
x if x == IS_SOME_TEXT => make::expr_method_call(IS_NONE_TEXT, caller),
x if x == IS_NONE_TEXT => make::expr_method_call(IS_SOME_TEXT, caller),
x if x == IS_OK_TEXT => make::expr_method_call(IS_ERR_TEXT, caller),
x if x == IS_ERR_TEXT => make::expr_method_call(IS_OK_TEXT, caller),
_ => None,
}
}
ast::Expr::PrefixExpr(pe) if pe.op_kind()? == ast::PrefixOp::Not => pe.expr(),
// FIXME:
// ast::Expr::Literal(true | false )

View File

@ -1,6 +1,7 @@
[package]
name = "base_db"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -12,10 +13,10 @@ doctest = false
salsa = "0.15.2"
rustc-hash = "1.1.0"
syntax = { path = "../syntax" }
cfg = { path = "../cfg" }
profile = { path = "../profile" }
tt = { path = "../tt" }
test_utils = { path = "../test_utils" }
vfs = { path = "../vfs" }
stdx = { path = "../stdx" }
syntax = { path = "../syntax", version = "0.0.0" }
cfg = { path = "../cfg", version = "0.0.0" }
profile = { path = "../profile", version = "0.0.0" }
tt = { path = "../tt", version = "0.0.0" }
test_utils = { path = "../test_utils", version = "0.0.0" }
vfs = { path = "../vfs", version = "0.0.0" }
stdx = { path = "../stdx", version = "0.0.0" }

View File

@ -1,6 +1,7 @@
[package]
name = "cfg"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -11,7 +12,7 @@ doctest = false
[dependencies]
rustc-hash = "1.1.0"
tt = { path = "../tt" }
tt = { path = "../tt", version = "0.0.0" }
[dev-dependencies]
mbe = { path = "../mbe" }

View File

@ -1,6 +1,7 @@
[package]
name = "flycheck"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -15,4 +16,4 @@ cargo_metadata = "0.11.1"
serde_json = "1.0.48"
jod-thread = "0.1.1"
toolchain = { path = "../toolchain" }
toolchain = { path = "../toolchain", version = "0.0.0" }

View File

@ -1,6 +1,7 @@
[package]
name = "hir"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -16,11 +17,11 @@ arrayvec = "0.5.1"
itertools = "0.9.0"
url = "2.1.1"
stdx = { path = "../stdx" }
syntax = { path = "../syntax" }
base_db = { path = "../base_db" }
profile = { path = "../profile" }
hir_expand = { path = "../hir_expand" }
hir_def = { path = "../hir_def" }
hir_ty = { path = "../hir_ty" }
tt = { path = "../tt" }
stdx = { path = "../stdx", version = "0.0.0" }
syntax = { path = "../syntax", version = "0.0.0" }
base_db = { path = "../base_db", version = "0.0.0" }
profile = { path = "../profile", version = "0.0.0" }
hir_expand = { path = "../hir_expand", version = "0.0.0" }
hir_def = { path = "../hir_def", version = "0.0.0" }
hir_ty = { path = "../hir_ty", version = "0.0.0" }
tt = { path = "../tt", version = "0.0.0" }

View File

@ -1,6 +1,7 @@
[package]
name = "hir_def"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -20,16 +21,16 @@ itertools = "0.9.0"
indexmap = "1.4.0"
smallvec = "1.4.0"
stdx = { path = "../stdx" }
arena = { path = "../arena" }
base_db = { path = "../base_db" }
syntax = { path = "../syntax" }
profile = { path = "../profile" }
hir_expand = { path = "../hir_expand" }
test_utils = { path = "../test_utils" }
mbe = { path = "../mbe" }
cfg = { path = "../cfg" }
tt = { path = "../tt" }
stdx = { path = "../stdx", version = "0.0.0" }
arena = { path = "../arena", version = "0.0.0" }
base_db = { path = "../base_db", version = "0.0.0" }
syntax = { path = "../syntax", version = "0.0.0" }
profile = { path = "../profile", version = "0.0.0" }
hir_expand = { path = "../hir_expand", version = "0.0.0" }
test_utils = { path = "../test_utils", version = "0.0.0" }
mbe = { path = "../mbe", version = "0.0.0" }
cfg = { path = "../cfg", version = "0.0.0" }
tt = { path = "../tt", version = "0.0.0" }
[dev-dependencies]
expect-test = "0.1"

View File

@ -557,6 +557,10 @@ impl Ctx {
let statik = self.lower_static(&ast)?;
statik.into()
}
ast::ExternItem::TypeAlias(ty) => {
let id = self.lower_type_alias(&ty)?;
id.into()
}
ast::ExternItem::MacroCall(_) => return None,
};
self.add_attrs(id.into(), attrs);

View File

@ -46,6 +46,7 @@ union U { to_be: bool, not_to_be: u8 }
enum E { V }
extern {
type Ext;
static EXT: u8;
fn ext();
}
@ -65,6 +66,7 @@ extern {
Baz: t v
E: t
EXT: v
Ext: t
U: t
ext: v
"#]],

View File

@ -1,6 +1,7 @@
[package]
name = "hir_expand"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -13,11 +14,11 @@ log = "0.4.8"
either = "1.5.3"
rustc-hash = "1.0.0"
arena = { path = "../arena" }
base_db = { path = "../base_db" }
syntax = { path = "../syntax" }
parser = { path = "../parser" }
profile = { path = "../profile" }
tt = { path = "../tt" }
mbe = { path = "../mbe" }
test_utils = { path = "../test_utils"}
arena = { path = "../arena", version = "0.0.0" }
base_db = { path = "../base_db", version = "0.0.0" }
syntax = { path = "../syntax", version = "0.0.0" }
parser = { path = "../parser", version = "0.0.0" }
profile = { path = "../profile", version = "0.0.0" }
tt = { path = "../tt", version = "0.0.0" }
mbe = { path = "../mbe", version = "0.0.0" }
test_utils = { path = "../test_utils", version = "0.0.0" }

View File

@ -1,6 +1,7 @@
[package]
name = "hir_ty"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -20,14 +21,14 @@ chalk-solve = { version = "0.23.0" }
chalk-ir = { version = "0.23.0" }
chalk-recursive = { version = "0.23.0" }
stdx = { path = "../stdx" }
hir_def = { path = "../hir_def" }
hir_expand = { path = "../hir_expand" }
arena = { path = "../arena" }
base_db = { path = "../base_db" }
profile = { path = "../profile" }
syntax = { path = "../syntax" }
test_utils = { path = "../test_utils" }
stdx = { path = "../stdx", version = "0.0.0" }
hir_def = { path = "../hir_def", version = "0.0.0" }
hir_expand = { path = "../hir_expand", version = "0.0.0" }
arena = { path = "../arena", version = "0.0.0" }
base_db = { path = "../base_db", version = "0.0.0" }
profile = { path = "../profile", version = "0.0.0" }
syntax = { path = "../syntax", version = "0.0.0" }
test_utils = { path = "../test_utils", version = "0.0.0" }
[dev-dependencies]
expect-test = "0.1"

View File

@ -1,6 +1,7 @@
[package]
name = "ide"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -18,20 +19,20 @@ oorandom = "11.1.2"
pulldown-cmark-to-cmark = "5.0.0"
pulldown-cmark = {version = "0.7.2", default-features = false}
stdx = { path = "../stdx" }
syntax = { path = "../syntax" }
text_edit = { path = "../text_edit" }
base_db = { path = "../base_db" }
ide_db = { path = "../ide_db" }
cfg = { path = "../cfg" }
profile = { path = "../profile" }
test_utils = { path = "../test_utils" }
assists = { path = "../assists" }
ssr = { path = "../ssr" }
stdx = { path = "../stdx", version = "0.0.0" }
syntax = { path = "../syntax", version = "0.0.0" }
text_edit = { path = "../text_edit", version = "0.0.0" }
base_db = { path = "../base_db", version = "0.0.0" }
ide_db = { path = "../ide_db", version = "0.0.0" }
cfg = { path = "../cfg", version = "0.0.0" }
profile = { path = "../profile", version = "0.0.0" }
test_utils = { path = "../test_utils", version = "0.0.0" }
assists = { path = "../assists", version = "0.0.0" }
ssr = { path = "../ssr", version = "0.0.0" }
# ide should depend only on the top-level `hir` package. if you need
# something from some `hir_xxx` subpackage, reexport the API via `hir`.
hir = { path = "../hir" }
hir = { path = "../hir", version = "0.0.0" }
[dev-dependencies]
expect-test = "0.1"

View File

@ -92,7 +92,7 @@ pub use crate::completion::{
/// already present, it should give all possible variants for the identifier at
/// the caret. In other words, for
///
/// ```no-run
/// ```no_run
/// fn f() {
/// let foo = 92;
/// let _ = bar<|>

View File

@ -160,7 +160,7 @@ fn runnable_fn(
RunnableKind::Test { test_id, attr }
} else if fn_def.has_atom_attr("bench") {
RunnableKind::Bench { test_id }
} else if has_doc_test(&fn_def) {
} else if has_runnable_doc_test(&fn_def) {
RunnableKind::DocTest { test_id }
} else {
return None;
@ -211,8 +211,13 @@ fn has_test_related_attribute(fn_def: &ast::Fn) -> bool {
.any(|attribute_text| attribute_text.contains("test"))
}
fn has_doc_test(fn_def: &ast::Fn) -> bool {
fn_def.doc_comment_text().map_or(false, |comment| comment.contains("```"))
fn has_runnable_doc_test(fn_def: &ast::Fn) -> bool {
fn_def.doc_comment_text().map_or(false, |comments_text| {
comments_text.contains("```")
&& !comments_text.contains("```ignore")
&& !comments_text.contains("```no_run")
&& !comments_text.contains("```compile_fail")
})
}
fn runnable_mod(
@ -417,6 +422,21 @@ fn main() {}
/// let x = 5;
/// ```
fn foo() {}
/// ```no_run
/// let z = 55;
/// ```
fn should_have_no_runnable() {}
/// ```ignore
/// let z = 55;
/// ```
fn should_have_no_runnable_2() {}
/// ```compile_fail
/// let z = 55;
/// ```
fn should_have_no_runnable_3() {}
"#,
&[&BIN, &DOCTEST],
expect![[r#"

View File

@ -1,6 +1,7 @@
[package]
name = "ide_db"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -19,12 +20,12 @@ rustc-hash = "1.1.0"
once_cell = "1.3.1"
either = "1.5.3"
stdx = { path = "../stdx" }
syntax = { path = "../syntax" }
text_edit = { path = "../text_edit" }
base_db = { path = "../base_db" }
profile = { path = "../profile" }
test_utils = { path = "../test_utils" }
stdx = { path = "../stdx", version = "0.0.0" }
syntax = { path = "../syntax", version = "0.0.0" }
text_edit = { path = "../text_edit", version = "0.0.0" }
base_db = { path = "../base_db", version = "0.0.0" }
profile = { path = "../profile", version = "0.0.0" }
test_utils = { path = "../test_utils", version = "0.0.0" }
# ide should depend only on the top-level `hir` package. if you need
# something from some `hir_xxx` subpackage, reexport the API via `hir`.
hir = { path = "../hir" }
hir = { path = "../hir", version = "0.0.0" }

View File

@ -1,6 +1,7 @@
[package]
name = "mbe"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -13,9 +14,9 @@ rustc-hash = "1.1.0"
smallvec = "1.2.0"
log = "0.4.8"
syntax = { path = "../syntax" }
parser = { path = "../parser" }
tt = { path = "../tt" }
syntax = { path = "../syntax", version = "0.0.0" }
parser = { path = "../parser", version = "0.0.0" }
tt = { path = "../tt", version = "0.0.0" }
[dev-dependencies]
test_utils = { path = "../test_utils" }

View File

@ -1,6 +1,7 @@
[package]
name = "parser"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"

View File

@ -1,6 +1,7 @@
[package]
name = "paths"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"

View File

@ -1,6 +1,7 @@
[package]
name = "proc_macro_api"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -15,4 +16,4 @@ log = "0.4.8"
crossbeam-channel = "0.4.0"
jod-thread = "0.1.1"
tt = { path = "../tt" }
tt = { path = "../tt", version = "0.0.0" }

View File

@ -1,6 +1,7 @@
[package]
name = "proc_macro_srv"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -13,10 +14,10 @@ goblin = "0.2.1"
libloading = "0.6.0"
memmap = "0.7"
tt = { path = "../tt" }
mbe = { path = "../mbe" }
proc_macro_api = { path = "../proc_macro_api" }
test_utils = { path = "../test_utils" }
tt = { path = "../tt", version = "0.0.0" }
mbe = { path = "../mbe", version = "0.0.0" }
proc_macro_api = { path = "../proc_macro_api", version = "0.0.0" }
test_utils = { path = "../test_utils", version = "0.0.0" }
[dev-dependencies]
cargo_metadata = "0.11.1"

View File

@ -4,6 +4,7 @@ version = "0.0.0"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
publish = false
[lib]
doctest = false

View File

@ -1,6 +1,7 @@
[package]
name = "profile"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -14,7 +15,7 @@ cfg-if = "0.1.10"
libc = "0.2.73"
backtrace = { version = "0.3.44", optional = true }
arena = { path = "../arena" }
arena = { path = "../arena", version = "0.0.0" }
[target.'cfg(target_os = "linux")'.dependencies]
perf-event = "0.4"

View File

@ -1,6 +1,7 @@
[package]
name = "project_model"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -16,10 +17,10 @@ serde = { version = "1.0.106", features = ["derive"] }
serde_json = "1.0.48"
anyhow = "1.0.26"
arena = { path = "../arena" }
cfg = { path = "../cfg" }
base_db = { path = "../base_db" }
toolchain = { path = "../toolchain" }
proc_macro_api = { path = "../proc_macro_api" }
paths = { path = "../paths" }
stdx = { path = "../stdx" }
arena = { path = "../arena", version = "0.0.0" }
cfg = { path = "../cfg", version = "0.0.0" }
base_db = { path = "../base_db", version = "0.0.0" }
toolchain = { path = "../toolchain", version = "0.0.0" }
proc_macro_api = { path = "../proc_macro_api", version = "0.0.0" }
paths = { path = "../paths", version = "0.0.0" }
stdx = { path = "../stdx", version = "0.0.0" }

View File

@ -1,6 +1,7 @@
[package]
name = "rust-analyzer"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
autobins = false
@ -32,26 +33,26 @@ rayon = "1.3.1"
mimalloc = { version = "0.1.19", default-features = false, optional = true }
lsp-server = "0.3.3"
stdx = { path = "../stdx" }
flycheck = { path = "../flycheck" }
ide = { path = "../ide" }
profile = { path = "../profile" }
project_model = { path = "../project_model" }
syntax = { path = "../syntax" }
text_edit = { path = "../text_edit" }
vfs = { path = "../vfs" }
vfs-notify = { path = "../vfs-notify" }
cfg = { path = "../cfg" }
toolchain = { path = "../toolchain" }
stdx = { path = "../stdx", version = "0.0.0" }
flycheck = { path = "../flycheck", version = "0.0.0" }
ide = { path = "../ide", version = "0.0.0" }
profile = { path = "../profile", version = "0.0.0" }
project_model = { path = "../project_model", version = "0.0.0" }
syntax = { path = "../syntax", version = "0.0.0" }
text_edit = { path = "../text_edit", version = "0.0.0" }
vfs = { path = "../vfs", version = "0.0.0" }
vfs-notify = { path = "../vfs-notify", version = "0.0.0" }
cfg = { path = "../cfg", version = "0.0.0" }
toolchain = { path = "../toolchain", version = "0.0.0" }
# This should only be used in CLI
base_db = { path = "../base_db" }
ide_db = { path = "../ide_db" }
ssr = { path = "../ssr" }
hir = { path = "../hir" }
hir_def = { path = "../hir_def" }
hir_ty = { path = "../hir_ty" }
proc_macro_srv = { path = "../proc_macro_srv" }
base_db = { path = "../base_db", version = "0.0.0" }
ide_db = { path = "../ide_db", version = "0.0.0" }
ssr = { path = "../ssr", version = "0.0.0" }
hir = { path = "../hir", version = "0.0.0" }
hir_def = { path = "../hir_def", version = "0.0.0" }
hir_ty = { path = "../hir_ty", version = "0.0.0" }
proc_macro_srv = { path = "../proc_macro_srv", version = "0.0.0" }
[target.'cfg(windows)'.dependencies]
winapi = "0.3.8"

View File

@ -14,12 +14,12 @@ doctest = false
rustc-hash = "1.1.0"
itertools = "0.9.0"
text_edit = { path = "../text_edit" }
syntax = { path = "../syntax" }
base_db = { path = "../base_db" }
ide_db = { path = "../ide_db" }
hir = { path = "../hir" }
test_utils = { path = "../test_utils" }
text_edit = { path = "../text_edit", version = "0.0.0" }
syntax = { path = "../syntax", version = "0.0.0" }
base_db = { path = "../base_db", version = "0.0.0" }
ide_db = { path = "../ide_db", version = "0.0.0" }
hir = { path = "../hir", version = "0.0.0" }
test_utils = { path = "../test_utils", version = "0.0.0" }
[dev-dependencies]
expect-test = "0.1"

View File

@ -1,6 +1,7 @@
[package]
name = "stdx"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"

View File

@ -23,9 +23,9 @@ once_cell = "1.3.1"
smol_str = { version = "0.1.15", features = ["serde"] }
serde = { version = "1.0.106", features = ["derive"] }
stdx = { path = "../stdx" }
text_edit = { path = "../text_edit" }
parser = { path = "../parser" }
stdx = { path = "../stdx", version = "0.0.0" }
text_edit = { path = "../text_edit", version = "0.0.0" }
parser = { path = "../parser", version = "0.0.0" }
[dev-dependencies]
walkdir = "2.3.1"

View File

@ -32,7 +32,7 @@ pub fn ancestors_at_offset(
/// imprecise: if the cursor is strictly between two nodes of the desired type,
/// as in
///
/// ```no-run
/// ```no_run
/// struct Foo {}|struct Bar;
/// ```
///

View File

@ -1380,6 +1380,7 @@ pub enum ExternItem {
Fn(Fn),
MacroCall(MacroCall),
Static(Static),
TypeAlias(TypeAlias),
}
impl ast::AttrsOwner for ExternItem {}
impl ast::NameOwner for ExternItem {}
@ -3339,10 +3340,13 @@ impl From<MacroCall> for ExternItem {
impl From<Static> for ExternItem {
fn from(node: Static) -> ExternItem { ExternItem::Static(node) }
}
impl From<TypeAlias> for ExternItem {
fn from(node: TypeAlias) -> ExternItem { ExternItem::TypeAlias(node) }
}
impl AstNode for ExternItem {
fn can_cast(kind: SyntaxKind) -> bool {
match kind {
FN | MACRO_CALL | STATIC => true,
FN | MACRO_CALL | STATIC | TYPE_ALIAS => true,
_ => false,
}
}
@ -3351,6 +3355,7 @@ impl AstNode for ExternItem {
FN => ExternItem::Fn(Fn { syntax }),
MACRO_CALL => ExternItem::MacroCall(MacroCall { syntax }),
STATIC => ExternItem::Static(Static { syntax }),
TYPE_ALIAS => ExternItem::TypeAlias(TypeAlias { syntax }),
_ => return None,
};
Some(res)
@ -3360,6 +3365,7 @@ impl AstNode for ExternItem {
ExternItem::Fn(it) => &it.syntax,
ExternItem::MacroCall(it) => &it.syntax,
ExternItem::Static(it) => &it.syntax,
ExternItem::TypeAlias(it) => &it.syntax,
}
}
}

View File

@ -7,7 +7,7 @@
use itertools::Itertools;
use stdx::format_to;
use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, SyntaxToken};
use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, SyntaxText, SyntaxToken};
pub fn name(text: &str) -> ast::Name {
ast_from_text(&format!("mod {};", text))
@ -137,6 +137,12 @@ pub fn expr_prefix(op: SyntaxKind, expr: ast::Expr) -> ast::Expr {
pub fn expr_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr {
expr_from_text(&format!("{}{}", f, arg_list))
}
pub fn expr_method_call<F>(text: &str, caller: F) -> Option<ast::Expr>
where
F: FnOnce() -> Option<SyntaxText>,
{
try_expr_from_text(&format!("{}.{}()", caller()?, text))
}
fn expr_from_text(text: &str) -> ast::Expr {
ast_from_text(&format!("const C: () = {};", text))
}

View File

@ -1,6 +1,7 @@
[package]
name = "test_utils"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -15,4 +16,4 @@ text-size = "1.0.0"
serde_json = "1.0.48"
rustc-hash = "1.1.0"
stdx = { path = "../stdx" }
stdx = { path = "../stdx", version = "0.0.0" }

View File

@ -1,6 +1,7 @@
[package]
name = "text_edit"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"

View File

@ -1,6 +1,7 @@
[package]
name = "toolchain"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"

View File

@ -1,6 +1,7 @@
[package]
name = "tt"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -13,4 +14,4 @@ doctest = false
# to reduce number of compilations
smol_str = { version = "0.1.15", features = ["serde"] }
stdx = { path = "../stdx" }
stdx = { path = "../stdx", version = "0.0.0" }

View File

@ -1,6 +1,7 @@
[package]
name = "vfs-notify"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -16,5 +17,5 @@ walkdir = "2.3.1"
crossbeam-channel = "0.4.0"
notify = "5.0.0-pre.3"
vfs = { path = "../vfs" }
paths = { path = "../paths" }
vfs = { path = "../vfs", version = "0.0.0" }
paths = { path = "../paths", version = "0.0.0" }

View File

@ -1,6 +1,7 @@
[package]
name = "vfs"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
@ -12,4 +13,4 @@ doctest = false
rustc-hash = "1.0"
fst = "0.4"
paths = { path = "../paths" }
paths = { path = "../paths", version = "0.0.0" }

View File

@ -181,6 +181,30 @@ fn frobnicate(walrus: Option<Walrus>) {
}
```
# Early Returns
Do use early returns
```rust
// Good
fn foo() -> Option<Bar> {
if !condition() {
return None;
}
Some(...)
}
// Not as good
fn foo() -> Option<Bar> {
if condition() {
Some(...)
} else {
None
}
}
```
# Getters & Setters
If a field can have any value without breaking invariants, make the field public.
@ -189,7 +213,7 @@ Never provide setters.
Getters should return borrowed data:
```
```rust
struct Person {
// Invariant: never empty
first_name: String,
@ -231,6 +255,41 @@ if words.len() != 2 {
}
```
# Avoid Monomorphization
Rust uses monomorphization to compile generic code, meaning that for each instantiation of a generic functions with concrete types, the function is compiled afresh, *per crate*.
This allows for exceptionally good performance, but leads to increased compile times.
Runtime performance obeys 80%/20% rule -- only a small fraction of code is hot.
Compile time **does not** obey this rule -- all code has to be compiled.
For this reason, avoid making a lot of code type parametric, *especially* on the boundaries between crates.
```rust
// Good
fn frbonicate(f: impl FnMut()) {
frobnicate_impl(&mut f)
}
fn frobnicate_impl(f: &mut dyn FnMut()) {
// lots of code
}
// Not as good
fn frbonicate(f: impl FnMut()) {
// lots of code
}
```
Avoid `AsRef` polymorphism, it pays back only for widely used libraries:
```rust
// Good
fn frbonicate(f: &Path) {
}
// Not as good
fn frbonicate(f: impl AsRef<Path>) {
}
```
# Documentation
For `.md` and `.adoc` files, prefer a sentence-per-line format, don't wrap lines.

View File

@ -134,6 +134,14 @@ async function downloadFile(
await pipeline(srcStream, destFileStream);
// Don't apply the workaround in fixed versions of nodejs, since the process
// freezes on them, the process waits for no-longer emitted `close` event.
// The fix was applied in commit 7eed9d6bcc in v13.11.0
// See the nodejs changelog:
// https://github.com/nodejs/node/blob/master/doc/changelogs/CHANGELOG_V13.md
const [, major, minor] = /v(\d+)\.(\d+)\.(\d+)/.exec(process.version)!;
if (+major > 13 || (+major === 13 && +minor >= 11)) return;
await new Promise<void>(resolve => {
destFileStream.on("close", resolve);
destFileStream.destroy();

View File

@ -15,7 +15,7 @@ flate2 = "1.0"
pico-args = "0.3.1"
proc-macro2 = "1.0.8"
quote = "1.0.2"
ungrammar = "1.1.1"
ungrammar = "1.1.3"
walkdir = "2.3.1"
write-json = "0.1.0"
# Avoid adding more dependencies to this crate

View File

@ -1,587 +0,0 @@
//*************************//
// Names, Paths and Macros //
//*************************//
Name =
'ident'
NameRef =
'ident' | 'int_number'
Path =
(qualifier:Path '::')? segment:PathSegment
PathSegment =
'crate' | 'self' | 'super'
| '::' NameRef
| NameRef GenericArgList?
| NameRef ParamList RetType?
| '<' PathType ('as' PathType)? '>'
GenericArgList =
'::'? '<' (GenericArg (',' GenericArg)* ','?)? '>'
GenericArg =
TypeArg
| AssocTypeArg
| LifetimeArg
| ConstArg
TypeArg =
Type
AssocTypeArg =
NameRef (':' TypeBoundList | '=' Type)
LifetimeArg =
'lifetime'
ConstArg =
Expr
MacroCall =
Attr* Path '!' Name? TokenTree ';'?
TokenTree =
'(' ')'
| '{' '}'
| '[' ']'
MacroItems =
Item*
MacroStmts =
statements:Stmt*
Expr?
//*************************//
// Items //
//*************************//
SourceFile =
'shebang'?
Attr*
Item*
Item =
Const
| Enum
| ExternBlock
| ExternCrate
| Fn
| Impl
| MacroCall
| Module
| Static
| Struct
| Trait
| TypeAlias
| Union
| Use
Module =
Attr* Visibility? 'mod' Name
(ItemList | ';')
ItemList =
'{' Attr* Item* '}'
ExternCrate =
Attr* Visibility? 'extern' 'crate' (NameRef | 'self') Rename? ';'
Rename =
'as' (Name | '_')
Use =
Attr* Visibility? 'use' UseTree ';'
UseTree =
(Path? '::')? ('*' | UseTreeList )
| Path Rename?
UseTreeList =
'{' (UseTree (',' UseTree)* ','?)? '}'
Fn =
Attr* Visibility?
'default'? ('async' | 'const')? 'unsafe'? Abi?
'fn' Name GenericParamList? ParamList RetType?
WhereClause?
(body:BlockExpr | ';')
Abi =
'extern' 'string'?
ParamList =
'('(
SelfParam
| (SelfParam ',')? (Param (',' Param)* ','?)?
)')'
SelfParam =
Attr* (
('&' 'lifetime'?)? 'mut'? 'self'
| 'mut'? 'self' ':' Type
)
Param =
Attr* (
Pat (':' Type)
| Type
| '...'
)
RetType =
'->' Type
TypeAlias =
Attr* Visibility? 'default'? 'type' Name GenericParamList? (':' TypeBoundList?)? WhereClause?
'=' Type ';'
Struct =
Attr* Visibility? 'struct' Name GenericParamList? (
WhereClause? (RecordFieldList | ';')
| TupleFieldList WhereClause? ';'
)
RecordFieldList =
'{' fields:(RecordField (',' RecordField)* ','?)? '}'
RecordField =
Attr* Visibility? Name ':' Type
TupleFieldList =
'(' fields:(TupleField (',' TupleField)* ','?)? ')'
TupleField =
Attr* Visibility? Type
FieldList =
RecordFieldList
| TupleFieldList
Enum =
Attr* Visibility? 'enum' Name GenericParamList? WhereClause?
VariantList
VariantList =
'{' (Variant (',' Variant)* ','?)? '}'
Variant =
Attr* Visibility? Name FieldList ('=' Expr)?
Union =
Attr* Visibility? 'union' Name GenericParamList? WhereClause?
RecordFieldList
AdtDef =
Enum
| Struct
| Union
Const =
Attr* Visibility? 'default'? 'const' (Name | '_') ':' Type
'=' body:Expr ';'
Static =
Attr* Visibility? 'static'? 'mut'? Name ':' Type
'=' body:Expr ';'
Trait =
Attr* Visibility? 'unsafe'? 'auto'? 'trait' Name GenericParamList
(':' TypeBoundList?)? WhereClause
AssocItemList
AssocItemList =
'{' Attr* AssocItem* '}'
AssocItem =
Const
| Fn
| MacroCall
| TypeAlias
Impl =
Attr* Visibility?
'default'? 'unsafe'? 'impl' 'const'? GenericParamList?
('!'? target_trait:Type 'for')? target_type:Type
WhereClause?
AssocItemList
ExternBlock =
Attr* Abi ExternItemList
ExternItemList =
'{' Attr* ExternItem* '}'
ExternItem =
Fn | Static | MacroCall
GenericParamList =
'<' (GenericParam (',' GenericParam)* ','?)? '>'
GenericParam =
ConstParam
| LifetimeParam
| TypeParam
TypeParam =
Attr* Name (':' TypeBoundList?)?
('=' default_type:Type)?
ConstParam =
Attr* 'const' Name ':' Type
('=' default_val:Expr)?
LifetimeParam =
Attr* 'lifetime' (':' TypeBoundList?)?
WhereClause =
'where' predicates:(WherePred (',' WherePred)* ','?)
WherePred =
('for' GenericParamList)? ('lifetime' | Type) ':' TypeBoundList
Visibility =
'pub' ('('
'super'
| 'self'
| 'crate'
| 'in' Path
')')?
Attr =
'#' '!'? '[' Path ('=' Literal | TokenTree)? ']'
//****************************//
// Statements and Expressions //
//****************************//
Stmt =
ExprStmt
| Item
| LetStmt
LetStmt =
Attr* 'let' Pat (':' Type)?
'=' initializer:Expr ';'
ExprStmt =
Attr* Expr ';'?
Expr =
ArrayExpr
| AwaitExpr
| BinExpr
| BlockExpr
| BoxExpr
| BreakExpr
| CallExpr
| CastExpr
| ClosureExpr
| ContinueExpr
| EffectExpr
| FieldExpr
| ForExpr
| IfExpr
| IndexExpr
| Literal
| LoopExpr
| MacroCall
| MatchExpr
| MethodCallExpr
| ParenExpr
| PathExpr
| PrefixExpr
| RangeExpr
| RecordExpr
| RefExpr
| ReturnExpr
| TryExpr
| TupleExpr
| WhileExpr
Literal =
Attr* value:(
'int_number' | 'float_number'
| 'string' | 'raw_string'
| 'byte_string' | 'raw_byte_string'
| 'true' | 'false'
| 'char' | 'byte'
)
PathExpr =
Attr* Path
BlockExpr =
'{'
Attr*
statements:Stmt*
Expr?
'}'
RefExpr =
Attr* '&' ('raw' |'mut' | 'const') Expr
TryExpr =
Attr* Expr '?'
EffectExpr =
Attr* Label? ('try' | 'unsafe' | 'async') BlockExpr
PrefixExpr =
Attr* op:('-' | '!' | '*') Expr
BinExpr =
Attr*
lhs:Expr
op:(
'||' | '&&'
| '==' | '!=' | '<=' | '>=' | '<' | '>'
| '+' | '*' | '-' | '/' | '%' | '<<' | '>>' | '^' | '|' | '&'
| '=' | '+=' | '/=' | '*=' | '%=' | '>>=' | '<<=' | '-=' | '|=' | '&=' | '^='
)
rhs:Expr
CastExpr =
Attr* Expr 'as' Type
ParenExpr =
Attr* '(' Attr* Expr ')'
ArrayExpr =
Attr* '[' Attr* (
(Expr (',' Expr)* ','?)?
| Expr ';' Expr
) ']'
IndexExpr =
Attr* base:Expr '[' index:Expr ']'
TupleExpr =
Attr* '(' Attr* fields:(Expr (',' Expr)* ','?)? ')'
RecordExpr =
Path RecordExprFieldList
RecordExprFieldList =
'{'
Attr*
fields:(RecordExprField (',' RecordExprField)* ','?)
('..' spread:Expr)?
'}'
RecordExprField =
Attr* NameRef (':' Expr)?
CallExpr =
Attr* Expr ArgList
ArgList =
'(' args:(Expr (',' Expr)* ','?)? ')'
MethodCallExpr =
Attr* Expr '.' NameRef GenericArgList? ArgList
FieldExpr =
Attr* Expr '.' NameRef
ClosureExpr =
Attr* 'static'? 'async'? 'move'? ParamList RetType?
body:Expr
IfExpr =
Attr* 'if' Condition then_branch:BlockExpr
('else' else_branch:(IfExpr | BlockExpr))?
Condition =
'let' Pat '=' Expr
| Expr
LoopExpr =
Attr* Label? 'loop'
loop_body:BlockExpr
ForExpr =
Attr* Label? 'for' Pat 'in' iterable:Expr
loop_body:BlockExpr
WhileExpr =
Attr* Label? 'while' Condition
loop_body:BlockExpr
Label =
'lifetime'
BreakExpr =
Attr* 'break' 'lifetime'? Expr?
ContinueExpr =
Attr* 'continue' 'lifetime'?
RangeExpr =
Attr* start:Expr? op:('..' | '..=') end:Expr?
MatchExpr =
Attr* 'match' Expr MatchArmList
MatchArmList =
'{'
Attr*
arms:MatchArm*
'}'
MatchArm =
Attr* Pat guard:MatchGuard? '=>' Expr ','?
MatchGuard =
'if' Expr
ReturnExpr =
Attr* 'return' Expr?
AwaitExpr =
Attr* Expr '.' 'await'
BoxExpr =
Attr* 'box' Expr
//*************************//
// Types //
//*************************//
Type =
ArrayType
| DynTraitType
| FnPointerType
| ForType
| ImplTraitType
| InferType
| NeverType
| ParenType
| PathType
| PointerType
| ReferenceType
| SliceType
| TupleType
ParenType =
'(' Type ')'
NeverType =
'!'
PathType =
Path
TupleType =
'(' fields:(Type (',' Type)* ','?)? ')'
PointerType =
'*' ('const' | 'mut') Type
ReferenceType =
'&' 'lifetime'? 'mut'? Type
ArrayType =
'[' Type ';' Expr ']'
SliceType =
'[' Type ']'
InferType =
'_'
FnPointerType =
'const'? 'async'? 'unsafe'? Abi? 'fn' ParamList RetType?
ForType =
'for' GenericParamList Type
ImplTraitType =
'impl' TypeBoundList
DynTraitType =
'dyn' TypeBoundList
TypeBoundList =
bounds:(TypeBound ('+' TypeBound)* '+'?)
TypeBound =
'lifetime'
| '?'? Type
//************************//
// Patterns //
//************************//
Pat =
IdentPat
| BoxPat
| RestPat
| LiteralPat
| MacroPat
| OrPat
| ParenPat
| PathPat
| WildcardPat
| RangePat
| RecordPat
| RefPat
| SlicePat
| TuplePat
| TupleStructPat
LiteralPat =
Literal
IdentPat =
Attr* 'ref'? 'mut'? Name ('@' Pat)?
WildcardPat =
'_'
RangePat =
start:Pat op:('..' | '..=') end:Pat
RefPat =
'&' 'mut'? Pat
RecordPat =
Path RecordPatFieldList
RecordPatFieldList =
'{'
fields:(RecordPatField (',' RecordPatField)* ','?)
'..'?
'}'
RecordPatField =
Attr* (NameRef ':')? Pat
TupleStructPat =
Path '(' fields:(Pat (',' Pat)* ','?)? ')'
TuplePat =
'(' fields:(Pat (',' Pat)* ','?)? ')'
ParenPat =
'(' Pat ')'
SlicePat =
'[' (Pat (',' Pat)* ','?)? ']'
PathPat =
Path
OrPat =
(Pat ('|' Pat)* '|'?)
BoxPat =
'box' Pat
RestPat =
'..'
MacroPat =
MacroCall