Rename ra_prof -> profile

This commit is contained in:
Aleksey Kladov 2020-08-12 16:32:36 +02:00
parent 98baa9b569
commit 208b7bd7ba
61 changed files with 154 additions and 188 deletions

42
Cargo.lock generated
View File

@ -894,6 +894,18 @@ dependencies = [
"unicode-xid",
]
[[package]]
name = "profile"
version = "0.0.0"
dependencies = [
"arena",
"backtrace",
"cfg-if",
"libc",
"once_cell",
"perf-event",
]
[[package]]
name = "quote"
version = "1.0.7"
@ -909,11 +921,11 @@ version = "0.1.0"
dependencies = [
"either",
"itertools",
"profile",
"ra_db",
"ra_fmt",
"ra_hir",
"ra_ide_db",
"ra_prof",
"ra_syntax",
"ra_text_edit",
"rustc-hash",
@ -935,8 +947,8 @@ dependencies = [
name = "ra_db"
version = "0.1.0"
dependencies = [
"profile",
"ra_cfg",
"ra_prof",
"ra_syntax",
"ra_tt",
"rustc-hash",
@ -962,11 +974,11 @@ dependencies = [
"either",
"itertools",
"log",
"profile",
"ra_db",
"ra_hir_def",
"ra_hir_expand",
"ra_hir_ty",
"ra_prof",
"ra_syntax",
"rustc-hash",
"stdx",
@ -986,11 +998,11 @@ dependencies = [
"itertools",
"log",
"once_cell",
"profile",
"ra_cfg",
"ra_db",
"ra_hir_expand",
"ra_mbe",
"ra_prof",
"ra_syntax",
"ra_tt",
"rustc-hash",
@ -1006,10 +1018,10 @@ dependencies = [
"arena",
"either",
"log",
"profile",
"ra_db",
"ra_mbe",
"ra_parser",
"ra_prof",
"ra_syntax",
"ra_tt",
"rustc-hash",
@ -1029,10 +1041,10 @@ dependencies = [
"expect",
"itertools",
"log",
"profile",
"ra_db",
"ra_hir_def",
"ra_hir_expand",
"ra_prof",
"ra_syntax",
"rustc-hash",
"scoped-tls",
@ -1054,13 +1066,13 @@ dependencies = [
"itertools",
"log",
"oorandom",
"profile",
"ra_assists",
"ra_cfg",
"ra_db",
"ra_fmt",
"ra_hir",
"ra_ide_db",
"ra_prof",
"ra_ssr",
"ra_syntax",
"ra_text_edit",
@ -1077,9 +1089,9 @@ dependencies = [
"fst",
"log",
"once_cell",
"profile",
"ra_db",
"ra_hir",
"ra_prof",
"ra_syntax",
"ra_text_edit",
"rayon",
@ -1137,18 +1149,6 @@ dependencies = [
"test_utils",
]
[[package]]
name = "ra_prof"
version = "0.1.0"
dependencies = [
"arena",
"backtrace",
"cfg-if",
"libc",
"once_cell",
"perf-event",
]
[[package]]
name = "ra_project_model"
version = "0.1.0"
@ -1314,6 +1314,7 @@ dependencies = [
"oorandom",
"parking_lot",
"pico-args",
"profile",
"ra_cfg",
"ra_db",
"ra_hir",
@ -1323,7 +1324,6 @@ dependencies = [
"ra_ide_db",
"ra_mbe",
"ra_proc_macro_srv",
"ra_prof",
"ra_project_model",
"ra_ssr",
"ra_syntax",

View File

@ -1,20 +1,20 @@
[package]
edition = "2018"
name = "ra_prof"
version = "0.1.0"
authors = ["rust-analyzer developers"]
publish = false
name = "profile"
version = "0.0.0"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
[lib]
doctest = false
[dependencies]
arena = { path = "../arena" }
once_cell = "1.3.1"
backtrace = { version = "0.3.44", optional = true }
cfg-if = "0.1.10"
libc = "0.2.73"
backtrace = { version = "0.3.44", optional = true }
arena = { path = "../arena" }
[target.'cfg(target_os = "linux")'.dependencies]
perf-event = "0.4"

View File

@ -37,19 +37,16 @@ pub type Label = &'static str;
///
/// # Example
/// ```
/// use ra_prof::{profile, set_filter, Filter};
///
/// let f = Filter::from_spec("profile1|profile2@2");
/// set_filter(f);
/// profile::init_from("profile1|profile2@2");
/// profiling_function1();
///
/// fn profiling_function1() {
/// let _p = profile("profile1");
/// let _p = profile::span("profile1");
/// profiling_function2();
/// }
///
/// fn profiling_function2() {
/// let _p = profile("profile2");
/// let _p = profile::span("profile2");
/// }
/// ```
/// This will print in the stderr the following:
@ -57,27 +54,27 @@ pub type Label = &'static str;
/// 0ms - profile
/// 0ms - profile2
/// ```
pub fn profile(label: Label) -> Profiler {
pub fn span(label: Label) -> ProfileSpan {
assert!(!label.is_empty());
if PROFILING_ENABLED.load(Ordering::Relaxed)
&& PROFILE_STACK.with(|stack| stack.borrow_mut().push(label))
{
Profiler(Some(ProfilerImpl { label, detail: None }))
ProfileSpan(Some(ProfilerImpl { label, detail: None }))
} else {
Profiler(None)
ProfileSpan(None)
}
}
pub struct Profiler(Option<ProfilerImpl>);
pub struct ProfileSpan(Option<ProfilerImpl>);
struct ProfilerImpl {
label: Label,
detail: Option<String>,
}
impl Profiler {
pub fn detail(mut self, detail: impl FnOnce() -> String) -> Profiler {
impl ProfileSpan {
pub fn detail(mut self, detail: impl FnOnce() -> String) -> ProfileSpan {
if let Some(profiler) = &mut self.0 {
profiler.detail = Some(detail())
}

View File

@ -10,7 +10,7 @@ mod tree;
use std::cell::RefCell;
pub use crate::{
hprof::{init, init_from, profile},
hprof::{init, init_from, span},
memory_usage::{Bytes, MemoryUsage},
stop_watch::{StopWatch, StopWatchSpan},
};
@ -25,7 +25,7 @@ pub fn print_backtrace() {
pub fn print_backtrace() {
eprintln!(
r#"enable the backtrace feature:
ra_prof = {{ path = "../ra_prof", features = [ "backtrace"] }}
profile = {{ path = "../profile", features = [ "backtrace"] }}
"#
);
}
@ -76,12 +76,12 @@ impl Drop for Scope {
///
/// https://github.com/rust-analyzer/rust-analyzer/pull/5306
#[derive(Debug)]
pub struct CpuProfiler {
pub struct CpuSpan {
_private: (),
}
#[must_use]
pub fn cpu_profiler() -> CpuProfiler {
pub fn cpu_span() -> CpuSpan {
#[cfg(feature = "cpu_profiler")]
{
google_cpu_profiler::start("./out.profile".as_ref())
@ -92,10 +92,10 @@ pub fn cpu_profiler() -> CpuProfiler {
eprintln!("cpu_profiler feature is disabled")
}
CpuProfiler { _private: () }
CpuSpan { _private: () }
}
impl Drop for CpuProfiler {
impl Drop for CpuSpan {
fn drop(&mut self) {
#[cfg(feature = "cpu_profiler")]
{

View File

@ -18,7 +18,7 @@ stdx = { path = "../stdx" }
ra_syntax = { path = "../ra_syntax" }
ra_text_edit = { path = "../ra_text_edit" }
ra_fmt = { path = "../ra_fmt" }
ra_prof = { path = "../ra_prof" }
profile = { path = "../profile" }
ra_db = { path = "../ra_db" }
ra_ide_db = { path = "../ra_ide_db" }
hir = { path = "../ra_hir", package = "ra_hir" }

View File

@ -110,7 +110,7 @@ fn add_missing_impl_members_inner(
assist_id: &'static str,
label: &'static str,
) -> Option<()> {
let _p = ra_prof::profile("add_missing_impl_members_inner");
let _p = profile::span("add_missing_impl_members_inner");
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
let impl_item_list = impl_def.assoc_item_list()?;

View File

@ -6,7 +6,6 @@ use hir::{
Type,
};
use ra_ide_db::{imports_locator, RootDatabase};
use ra_prof::profile;
use ra_syntax::{
ast::{self, AstNode},
SyntaxNode,
@ -130,7 +129,7 @@ impl AutoImportAssets {
}
fn search_for_imports(&self, ctx: &AssistContext) -> BTreeSet<ModPath> {
let _p = profile("auto_import::search_for_imports");
let _p = profile::span("auto_import::search_for_imports");
let db = ctx.db();
let current_crate = self.module_with_name_to_import.krate();
imports_locator::find_imports(&ctx.sema, current_crate, &self.get_search_query())

View File

@ -14,7 +14,7 @@ rustc-hash = "1.1.0"
ra_syntax = { path = "../ra_syntax" }
ra_cfg = { path = "../ra_cfg" }
ra_prof = { path = "../ra_prof" }
profile = { path = "../profile" }
ra_tt = { path = "../ra_tt" }
test_utils = { path = "../test_utils" }
vfs = { path = "../vfs" }

View File

@ -5,7 +5,6 @@ pub mod fixture;
use std::{panic, sync::Arc};
use ra_prof::profile;
use ra_syntax::{ast, Parse, SourceFile, TextRange, TextSize};
use rustc_hash::FxHashSet;
@ -113,7 +112,7 @@ pub trait SourceDatabase: CheckCanceled + FileLoader + std::fmt::Debug {
}
fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
let _p = profile("parse_query").detail(|| format!("{:?}", file_id));
let _p = profile::span("parse_query").detail(|| format!("{:?}", file_id));
let text = db.file_text(file_id);
SourceFile::parse(&*text)
}

View File

@ -19,7 +19,7 @@ itertools = "0.9.0"
stdx = { path = "../stdx" }
ra_syntax = { path = "../ra_syntax" }
ra_db = { path = "../ra_db" }
ra_prof = { path = "../ra_prof" }
profile = { path = "../profile" }
hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" }
hir_def = { path = "../ra_hir_def", package = "ra_hir_def" }
hir_ty = { path = "../ra_hir_ty", package = "ra_hir_ty" }

View File

@ -31,7 +31,6 @@ use hir_ty::{
InEnvironment, Substs, TraitEnvironment, Ty, TyDefId, TypeCtor,
};
use ra_db::{CrateId, Edition, FileId};
use ra_prof::profile;
use ra_syntax::{
ast::{self, AttrsOwner, NameOwner},
AstNode,
@ -304,7 +303,7 @@ impl Module {
}
pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) {
let _p = profile("Module::diagnostics");
let _p = profile::span("Module::diagnostics");
let crate_def_map = db.crate_def_map(self.id.krate);
crate_def_map.add_diagnostics(db.upcast(), self.id.local_id, sink);
for decl in self.declarations(db) {

View File

@ -12,7 +12,6 @@ use hir_expand::{hygiene::Hygiene, name::AsName, ExpansionInfo};
use hir_ty::associated_type_shorthand_candidates;
use itertools::Itertools;
use ra_db::{FileId, FileRange};
use ra_prof::profile;
use ra_syntax::{
algo::{find_node_at_offset, skip_trivia_token},
ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextSize,
@ -334,7 +333,7 @@ impl<'db> SemanticsImpl<'db> {
}
fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
let _p = profile("descend_into_macros");
let _p = profile::span("descend_into_macros");
let parent = token.parent();
let parent = self.find_file(parent);
let sa = self.analyze2(parent.as_ref(), None);
@ -523,7 +522,7 @@ impl<'db> SemanticsImpl<'db> {
}
fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextSize>) -> SourceAnalyzer {
let _p = profile("Semantics::analyze2");
let _p = profile::span("Semantics::analyze2");
let container = match self.with_ctx(|ctx| ctx.find_container(src)) {
Some(it) => it,

View File

@ -10,7 +10,6 @@ use hir_def::{
};
use hir_expand::{name::AsName, AstId, MacroDefKind};
use ra_db::FileId;
use ra_prof::profile;
use ra_syntax::{
ast::{self, NameOwner},
match_ast, AstNode, SyntaxNode,
@ -29,7 +28,7 @@ pub(super) struct SourceToDefCtx<'a, 'b> {
impl SourceToDefCtx<'_, '_> {
pub(super) fn file_to_def(&mut self, file: FileId) -> Option<ModuleId> {
let _p = profile("SourceBinder::to_module_def");
let _p = profile::span("SourceBinder::to_module_def");
let (krate, local_id) = self.db.relevant_crates(file).iter().find_map(|&crate_id| {
let crate_def_map = self.db.crate_def_map(crate_id);
let local_id = crate_def_map.modules_for_file(file).next()?;
@ -39,7 +38,7 @@ impl SourceToDefCtx<'_, '_> {
}
pub(super) fn module_to_def(&mut self, src: InFile<ast::Module>) -> Option<ModuleId> {
let _p = profile("module_to_def");
let _p = profile::span("module_to_def");
let parent_declaration = src
.as_ref()
.map(|it| it.syntax())

View File

@ -25,7 +25,7 @@ stdx = { path = "../stdx" }
arena = { path = "../arena" }
ra_db = { path = "../ra_db" }
ra_syntax = { path = "../ra_syntax" }
ra_prof = { path = "../ra_prof" }
profile = { path = "../profile" }
hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" }
test_utils = { path = "../test_utils" }
mbe = { path = "../ra_mbe", package = "ra_mbe" }

View File

@ -11,7 +11,6 @@ use either::Either;
use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, AstId, HirFileId, InFile, MacroDefId};
use ra_cfg::CfgOptions;
use ra_db::CrateId;
use ra_prof::profile;
use ra_syntax::{ast, AstNode, AstPtr};
use rustc_hash::FxHashMap;
use test_utils::mark;
@ -228,7 +227,7 @@ impl Body {
db: &dyn DefDatabase,
def: DefWithBodyId,
) -> (Arc<Body>, Arc<BodySourceMap>) {
let _p = profile("body_with_source_map_query");
let _p = profile::span("body_with_source_map_query");
let mut params = None;
let (file_id, module, body) = match def {

View File

@ -3,7 +3,6 @@
use std::sync::Arc;
use hir_expand::{name::Name, InFile};
use ra_prof::profile;
use ra_syntax::ast;
use crate::{
@ -133,7 +132,7 @@ pub struct ImplData {
impl ImplData {
pub(crate) fn impl_data_query(db: &dyn DefDatabase, id: ImplId) -> Arc<ImplData> {
let _p = profile("impl_data_query");
let _p = profile::span("impl_data_query");
let impl_loc = id.lookup(db);
let item_tree = db.item_tree(impl_loc.id.file_id);

View File

@ -3,7 +3,6 @@ use std::sync::Arc;
use hir_expand::{db::AstDatabase, HirFileId};
use ra_db::{salsa, CrateId, SourceDatabase, Upcast};
use ra_prof::profile;
use ra_syntax::SmolStr;
use crate::{
@ -116,6 +115,6 @@ pub trait DefDatabase: InternDatabase + AstDatabase + Upcast<dyn AstDatabase> {
}
fn crate_def_map_wait(db: &impl DefDatabase, krate: CrateId) -> Arc<CrateDefMap> {
let _p = profile("crate_def_map:wait");
let _p = profile::span("crate_def_map:wait");
db.crate_def_map_query(krate)
}

View File

@ -1,7 +1,6 @@
//! An algorithm to find a path to refer to a certain item.
use hir_expand::name::{known, AsName, Name};
use ra_prof::profile;
use rustc_hash::FxHashSet;
use test_utils::mark;
@ -18,7 +17,7 @@ use crate::{
/// Find a path that can be used to refer to a certain item. This can depend on
/// *from where* you're referring to the item, hence the `from` parameter.
pub fn find_path(db: &dyn DefDatabase, item: ItemInNs, from: ModuleId) -> Option<ModPath> {
let _p = profile("find_path");
let _p = profile::span("find_path");
find_path_inner(db, item, from, MAX_PATH_LEN)
}
@ -215,7 +214,7 @@ fn find_local_import_locations(
item: ItemInNs,
from: ModuleId,
) -> Vec<(ModuleId, Name)> {
let _p = profile("find_local_import_locations");
let _p = profile::span("find_local_import_locations");
// `from` can import anything below `from` with visibility of at least `from`, and anything
// above `from` with any visibility. That means we do not need to descend into private siblings

View File

@ -11,7 +11,6 @@ use hir_expand::{
InFile,
};
use ra_db::FileId;
use ra_prof::profile;
use ra_syntax::ast::{self, GenericParamsOwner, NameOwner, TypeBoundsOwner};
use crate::{
@ -73,7 +72,7 @@ impl GenericParams {
db: &dyn DefDatabase,
def: GenericDefId,
) -> Arc<GenericParams> {
let _p = profile("generic_params_query");
let _p = profile::span("generic_params_query");
let generics = match def {
GenericDefId::FunctionId(id) => {

View File

@ -56,7 +56,7 @@ pub struct ImportMap {
impl ImportMap {
pub fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
let _p = ra_prof::profile("import_map_query");
let _p = profile::span("import_map_query");
let def_map = db.crate_def_map(krate);
let mut import_map = Self::default();
@ -254,7 +254,7 @@ pub fn search_dependencies<'a>(
krate: CrateId,
query: Query,
) -> Vec<ItemInNs> {
let _p = ra_prof::profile("search_dependencies").detail(|| format!("{:?}", query));
let _p = profile::span("search_dependencies").detail(|| format!("{:?}", query));
let graph = db.crate_graph();
let import_maps: Vec<_> =

View File

@ -77,7 +77,7 @@ pub struct ItemTree {
impl ItemTree {
pub fn item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
let _p = ra_prof::profile("item_tree_query").detail(|| format!("{:?}", file_id));
let _p = profile::span("item_tree_query").detail(|| format!("{:?}", file_id));
let syntax = if let Some(node) = db.parse_or_expand(file_id) {
node
} else {

View File

@ -4,7 +4,6 @@
//! features, such as Fn family of traits.
use std::sync::Arc;
use ra_prof::profile;
use ra_syntax::SmolStr;
use rustc_hash::FxHashMap;
@ -79,7 +78,7 @@ impl LangItems {
/// Salsa query. This will look for lang items in a specific crate.
pub(crate) fn crate_lang_items_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<LangItems> {
let _p = profile("crate_lang_items_query");
let _p = profile::span("crate_lang_items_query");
let mut lang_items = LangItems::default();
@ -98,7 +97,7 @@ impl LangItems {
db: &dyn DefDatabase,
module: ModuleId,
) -> Option<Arc<LangItems>> {
let _p = profile("module_lang_items_query");
let _p = profile::span("module_lang_items_query");
let mut lang_items = LangItems::default();
lang_items.collect_lang_items(db, module);
if lang_items.items.is_empty() {
@ -115,7 +114,7 @@ impl LangItems {
start_crate: CrateId,
item: SmolStr,
) -> Option<LangItemTarget> {
let _p = profile("lang_item_query");
let _p = profile::span("lang_item_query");
let lang_items = db.crate_lang_items(start_crate);
let start_crate_target = lang_items.items.get(&item);
if let Some(target) = start_crate_target {

View File

@ -59,7 +59,6 @@ use std::sync::Arc;
use arena::Arena;
use hir_expand::{diagnostics::DiagnosticSink, name::Name, InFile};
use ra_db::{CrateId, Edition, FileId};
use ra_prof::profile;
use ra_syntax::ast;
use rustc_hash::FxHashMap;
use stdx::format_to;
@ -172,7 +171,7 @@ pub struct ModuleData {
impl CrateDefMap {
pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<CrateDefMap> {
let _p = profile("crate_def_map_query").detail(|| {
let _p = profile::span("crate_def_map_query").detail(|| {
db.crate_graph()[krate]
.display_name
.as_ref()

View File

@ -17,7 +17,7 @@ arena = { path = "../arena" }
ra_db = { path = "../ra_db" }
ra_syntax = { path = "../ra_syntax" }
ra_parser = { path = "../ra_parser" }
ra_prof = { path = "../ra_prof" }
profile = { path = "../profile" }
tt = { path = "../ra_tt", package = "ra_tt" }
mbe = { path = "../ra_mbe", package = "ra_mbe" }
test_utils = { path = "../test_utils"}

View File

@ -5,7 +5,6 @@ use std::sync::Arc;
use mbe::{ExpandResult, MacroRules};
use ra_db::{salsa, SourceDatabase};
use ra_parser::FragmentKind;
use ra_prof::profile;
use ra_syntax::{algo::diff, AstNode, GreenNode, Parse, SyntaxKind::*, SyntaxNode};
use crate::{
@ -278,7 +277,7 @@ pub fn parse_macro_with_arg(
macro_file: MacroFile,
arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>,
) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
let _p = profile("parse_macro_query");
let _p = profile::span("parse_macro_query");
let macro_call_id = macro_file.macro_call_id;
let (tt, err) = if let Some(arg) = arg {

View File

@ -22,7 +22,7 @@ hir_def = { path = "../ra_hir_def", package = "ra_hir_def" }
hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" }
arena = { path = "../arena" }
ra_db = { path = "../ra_db" }
ra_prof = { path = "../ra_prof" }
profile = { path = "../profile" }
ra_syntax = { path = "../ra_syntax" }
test_utils = { path = "../test_utils" }

View File

@ -8,7 +8,6 @@ use hir_def::{
TypeParamId, VariantId,
};
use ra_db::{impl_intern_key, salsa, CrateId, Upcast};
use ra_prof::profile;
use crate::{
method_resolution::{InherentImpls, TraitImpls},
@ -123,7 +122,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
}
fn infer_wait(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
let _p = profile("infer:wait").detail(|| match def {
let _p = profile::span("infer:wait").detail(|| match def {
DefWithBodyId::FunctionId(it) => db.function_data(it).name.to_string(),
DefWithBodyId::StaticId(it) => {
db.static_data(it).name.clone().unwrap_or_else(Name::missing).to_string()

View File

@ -8,7 +8,6 @@ use std::any::Any;
use hir_def::DefWithBodyId;
use hir_expand::diagnostics::{Diagnostic, DiagnosticSink};
use hir_expand::{name::Name, HirFileId, InFile};
use ra_prof::profile;
use ra_syntax::{ast, AstPtr, SyntaxNodePtr};
use stdx::format_to;
@ -17,7 +16,7 @@ use crate::db::HirDatabase;
pub use crate::diagnostics::expr::{record_literal_missing_fields, record_pattern_missing_fields};
pub fn validate_body(db: &dyn HirDatabase, owner: DefWithBodyId, sink: &mut DiagnosticSink<'_>) {
let _p = profile("validate_body");
let _p = profile::span("validate_body");
let infer = db.infer(owner);
infer.add_diagnostics(db, owner, sink);
let mut validator = expr::ExprValidator::new(owner, infer.clone(), sink);

View File

@ -31,7 +31,6 @@ use hir_def::{
TypeAliasId, VariantId,
};
use hir_expand::{diagnostics::DiagnosticSink, name::name};
use ra_prof::profile;
use ra_syntax::SmolStr;
use rustc_hash::FxHashMap;
use stdx::impl_from;
@ -64,7 +63,7 @@ mod coerce;
/// The entry point of type inference.
pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
let _p = profile("infer_query");
let _p = profile::span("infer_query");
let resolver = def.resolver(db.upcast());
let mut ctx = InferenceContext::new(db, def, resolver);

View File

@ -13,7 +13,6 @@ use hir_def::{
};
use hir_expand::name::Name;
use ra_db::CrateId;
use ra_prof::profile;
use rustc_hash::{FxHashMap, FxHashSet};
use super::Substs;
@ -109,7 +108,7 @@ pub struct TraitImpls {
impl TraitImpls {
pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
let _p = profile("trait_impls_in_crate_query");
let _p = profile::span("trait_impls_in_crate_query");
let mut impls = Self { map: FxHashMap::default() };
let crate_def_map = db.crate_def_map(krate);
@ -135,7 +134,7 @@ impl TraitImpls {
}
pub(crate) fn trait_impls_in_deps_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
let _p = profile("trait_impls_in_deps_query");
let _p = profile::span("trait_impls_in_deps_query");
let crate_graph = db.crate_graph();
let mut res = Self { map: FxHashMap::default() };

View File

@ -5,7 +5,6 @@ use chalk_ir::cast::Cast;
use chalk_solve::Solver;
use hir_def::{lang_item::LangItemTarget, TraitId};
use ra_db::CrateId;
use ra_prof::profile;
use crate::{db::HirDatabase, DebruijnIndex, Substs};
@ -125,7 +124,7 @@ pub(crate) fn trait_solve_query(
krate: CrateId,
goal: Canonical<InEnvironment<Obligation>>,
) -> Option<Solution> {
let _p = profile("trait_solve_query").detail(|| match &goal.value.value {
let _p = profile::span("trait_solve_query").detail(|| match &goal.value.value {
Obligation::Trait(it) => db.trait_data(it.trait_).name.to_string(),
Obligation::Projection(_) => "projection".to_string(),
});

View File

@ -410,7 +410,7 @@ pub(crate) fn impl_datum_query(
krate: CrateId,
impl_id: ImplId,
) -> Arc<ImplDatum> {
let _p = ra_prof::profile("impl_datum");
let _p = profile::span("impl_datum");
debug!("impl_datum {:?}", impl_id);
let impl_: hir_def::ImplId = from_chalk(db, impl_id);
impl_def_datum(db, krate, impl_id, impl_)

View File

@ -27,7 +27,7 @@ ra_db = { path = "../ra_db" }
ra_ide_db = { path = "../ra_ide_db" }
ra_cfg = { path = "../ra_cfg" }
ra_fmt = { path = "../ra_fmt" }
ra_prof = { path = "../ra_prof" }
profile = { path = "../profile" }
test_utils = { path = "../test_utils" }
ra_assists = { path = "../ra_assists" }
ra_ssr = { path = "../ra_ssr" }

View File

@ -10,7 +10,6 @@ use hir::{diagnostics::DiagnosticSinkBuilder, Semantics};
use itertools::Itertools;
use ra_db::SourceDatabase;
use ra_ide_db::RootDatabase;
use ra_prof::profile;
use ra_syntax::{
ast::{self, AstNode},
SyntaxNode, TextRange, T,
@ -33,7 +32,7 @@ pub(crate) fn diagnostics(
file_id: FileId,
enable_experimental: bool,
) -> Vec<Diagnostic> {
let _p = profile("diagnostics");
let _p = profile::span("diagnostics");
let sema = Semantics::new(db);
let parse = db.parse(file_id);
let mut res = Vec::new();

View File

@ -1,6 +1,5 @@
use hir::{Adt, Callable, HirDisplay, Semantics, Type};
use ra_ide_db::RootDatabase;
use ra_prof::profile;
use ra_syntax::{
ast::{self, ArgListOwner, AstNode},
match_ast, Direction, NodeOrToken, SmolStr, SyntaxKind, TextRange, T,
@ -64,7 +63,7 @@ pub(crate) fn inlay_hints(
file_id: FileId,
config: &InlayHintsConfig,
) -> Vec<InlayHint> {
let _p = profile("inlay_hints");
let _p = profile::span("inlay_hints");
let sema = Semantics::new(db);
let file = sema.parse(file_id);

View File

@ -176,7 +176,7 @@ impl AnalysisHost {
self.db.collect_garbage();
}
/// NB: this clears the database
pub fn per_query_memory_usage(&mut self) -> Vec<(String, ra_prof::Bytes)> {
pub fn per_query_memory_usage(&mut self) -> Vec<(String, profile::Bytes)> {
self.db.per_query_memory_usage()
}
pub fn request_cancellation(&mut self) {

View File

@ -17,7 +17,6 @@ use ra_ide_db::{
search::SearchScope,
RootDatabase,
};
use ra_prof::profile;
use ra_syntax::{
algo::find_node_at_offset,
ast::{self, NameOwner},
@ -90,7 +89,7 @@ pub(crate) fn find_all_refs(
position: FilePosition,
search_scope: Option<SearchScope>,
) -> Option<RangeInfo<ReferenceSearchResult>> {
let _p = profile("find_all_refs");
let _p = profile::span("find_all_refs");
let syntax = sema.parse(position.file_id).syntax().clone();
let (opt_name, search_kind) = if let Some(name) =

View File

@ -1,6 +1,7 @@
use std::{fmt, iter::FromIterator, sync::Arc};
use hir::MacroFile;
use profile::{memory_usage, Bytes};
use ra_db::{
salsa::debug::{DebugQueryTable, TableEntry},
FileTextQuery, SourceRootId,
@ -9,7 +10,6 @@ use ra_ide_db::{
symbol_index::{LibrarySymbolsQuery, SymbolIndex},
RootDatabase,
};
use ra_prof::{memory_usage, Bytes};
use ra_syntax::{ast, Parse, SyntaxNode};
use rustc_hash::FxHashMap;

View File

@ -9,7 +9,6 @@ use ra_ide_db::{
defs::{classify_name, classify_name_ref, Definition, NameClass, NameRefClass},
RootDatabase,
};
use ra_prof::profile;
use ra_syntax::{
ast::{self, HasFormatSpecifier},
AstNode, AstToken, Direction, NodeOrToken, SyntaxElement,
@ -46,7 +45,7 @@ pub(crate) fn highlight(
range_to_highlight: Option<TextRange>,
syntactic_name_ref_highlighting: bool,
) -> Vec<HighlightedRange> {
let _p = profile("highlight");
let _p = profile::span("highlight");
let sema = Semantics::new(db);
// Determine the root based on the given range.

View File

@ -24,7 +24,7 @@ stdx = { path = "../stdx" }
ra_syntax = { path = "../ra_syntax" }
ra_text_edit = { path = "../ra_text_edit" }
ra_db = { path = "../ra_db" }
ra_prof = { path = "../ra_prof" }
profile = { path = "../profile" }
test_utils = { path = "../test_utils" }
# ra_ide should depend only on the top-level `hir` package. if you need

View File

@ -3,11 +3,11 @@
use std::{fmt, sync::Arc, time};
use profile::{memory_usage, Bytes};
use ra_db::{
salsa::{Database, Durability, SweepStrategy},
CrateGraph, FileId, SourceDatabase, SourceDatabaseExt, SourceRoot, SourceRootId,
};
use ra_prof::{memory_usage, profile, Bytes};
use rustc_hash::FxHashSet;
use crate::{symbol_index::SymbolsDatabase, RootDatabase};
@ -85,12 +85,12 @@ const GC_COOLDOWN: time::Duration = time::Duration::from_millis(100);
impl RootDatabase {
pub fn request_cancellation(&mut self) {
let _p = profile("RootDatabase::request_cancellation");
let _p = profile::span("RootDatabase::request_cancellation");
self.salsa_runtime_mut().synthetic_write(Durability::LOW);
}
pub fn apply_change(&mut self, change: AnalysisChange) {
let _p = profile("RootDatabase::apply_change");
let _p = profile::span("RootDatabase::apply_change");
self.request_cancellation();
log::info!("apply_change {:?}", change);
if let Some(roots) = change.roots {
@ -141,7 +141,7 @@ impl RootDatabase {
return;
}
let _p = profile("RootDatabase::collect_garbage");
let _p = profile::span("RootDatabase::collect_garbage");
self.last_gc = crate::wasm_shims::Instant::now();
let sweep = SweepStrategy::default().discard_values().sweep_all_revisions();

View File

@ -9,7 +9,6 @@ use hir::{
db::HirDatabase, Crate, Field, HasVisibility, ImplDef, Local, MacroDef, Module, ModuleDef,
Name, PathResolution, Semantics, TypeParam, Visibility,
};
use ra_prof::profile;
use ra_syntax::{
ast::{self, AstNode},
match_ast, SyntaxNode,
@ -110,7 +109,7 @@ impl NameClass {
}
pub fn classify_name(sema: &Semantics<RootDatabase>, name: &ast::Name) -> Option<NameClass> {
let _p = profile("classify_name");
let _p = profile::span("classify_name");
let parent = name.syntax().parent()?;
@ -249,7 +248,7 @@ pub fn classify_name_ref(
sema: &Semantics<RootDatabase>,
name_ref: &ast::NameRef,
) -> Option<NameRefClass> {
let _p = profile("classify_name_ref");
let _p = profile::span("classify_name_ref");
let parent = name_ref.syntax().parent()?;

View File

@ -2,7 +2,6 @@
//! Later, this should be moved away to a separate crate that is accessible from the ra_assists module.
use hir::{Crate, MacroDef, ModuleDef, Semantics};
use ra_prof::profile;
use ra_syntax::{ast, AstNode, SyntaxKind::NAME};
use crate::{
@ -18,7 +17,7 @@ pub fn find_imports<'a>(
krate: Crate,
name_to_import: &str,
) -> Vec<Either<ModuleDef, MacroDef>> {
let _p = profile("search_for_imports");
let _p = profile::span("search_for_imports");
let db = sema.db;
// Query dependencies first.
@ -51,7 +50,7 @@ fn get_name_definition<'a>(
sema: &Semantics<'a, RootDatabase>,
import_candidate: &FileSymbol,
) -> Option<Definition> {
let _p = profile("get_name_definition");
let _p = profile::span("get_name_definition");
let file_id = import_candidate.file_id;
let candidate_node = import_candidate.ptr.to_node(sema.parse(file_id).syntax());

View File

@ -9,7 +9,6 @@ use std::{convert::TryInto, mem};
use hir::{DefWithBody, HasSource, Module, ModuleSource, Semantics, Visibility};
use once_cell::unsync::Lazy;
use ra_db::{FileId, FileRange, SourceDatabaseExt};
use ra_prof::profile;
use ra_syntax::{ast, match_ast, AstNode, TextRange, TextSize};
use rustc_hash::FxHashMap;
@ -107,7 +106,7 @@ impl IntoIterator for SearchScope {
impl Definition {
fn search_scope(&self, db: &RootDatabase) -> SearchScope {
let _p = profile("search_scope");
let _p = profile::span("search_scope");
let module = match self.module(db) {
Some(it) => it,
None => return SearchScope::empty(),
@ -187,7 +186,7 @@ impl Definition {
sema: &Semantics<RootDatabase>,
search_scope: Option<SearchScope>,
) -> Vec<Reference> {
let _p = profile("Definition::find_usages");
let _p = profile::span("Definition::find_usages");
let search_scope = {
let base = self.search_scope(sema.db);

View File

@ -34,7 +34,6 @@ use ra_db::{
salsa::{self, ParallelDatabase},
CrateId, FileId, SourceDatabaseExt, SourceRootId,
};
use ra_prof::profile;
use ra_syntax::{
ast::{self, NameOwner},
match_ast, AstNode, Parse, SmolStr, SourceFile,
@ -101,7 +100,7 @@ pub trait SymbolsDatabase: hir::db::HirDatabase + SourceDatabaseExt {
}
fn library_symbols(db: &dyn SymbolsDatabase) -> Arc<FxHashMap<SourceRootId, SymbolIndex>> {
let _p = profile("library_symbols");
let _p = profile::span("library_symbols");
let roots = db.library_roots();
let res = roots
@ -162,7 +161,7 @@ impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> {
// | VS Code | kbd:[Ctrl+T]
// |===
pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> {
let _p = ra_prof::profile("world_symbols").detail(|| query.query.clone());
let _p = profile::span("world_symbols").detail(|| query.query.clone());
let tmp1;
let tmp2;

View File

@ -36,7 +36,7 @@ stdx = { path = "../stdx" }
lsp-server = "0.3.3"
flycheck = { path = "../flycheck" }
ra_ide = { path = "../ra_ide" }
ra_prof = { path = "../ra_prof" }
profile = { path = "../profile" }
ra_project_model = { path = "../ra_project_model" }
ra_syntax = { path = "../ra_syntax" }
ra_text_edit = { path = "../ra_text_edit" }

View File

@ -55,7 +55,7 @@ fn try_main() -> Result<()> {
fn setup_logging() -> Result<()> {
std::env::set_var("RUST_BACKTRACE", "short");
env_logger::try_init_from_env("RA_LOG")?;
ra_prof::init();
profile::init();
Ok(())
}

View File

@ -11,7 +11,6 @@ use std::io::Read;
use anyhow::Result;
use ra_ide::Analysis;
use ra_prof::profile;
use ra_syntax::{AstNode, SourceFile};
pub use analysis_bench::{BenchCmd, BenchWhat, Position};
@ -38,7 +37,7 @@ impl Verbosity {
}
pub fn parse(no_dump: bool) -> Result<()> {
let _p = profile("parsing");
let _p = profile::span("parsing");
let file = file()?;
if !no_dump {
println!("{:#?}", file.syntax());

View File

@ -52,7 +52,7 @@ impl FromStr for Position {
impl BenchCmd {
pub fn run(self, verbosity: Verbosity) -> Result<()> {
ra_prof::init();
profile::init();
let start = Instant::now();
eprint!("loading: ");

View File

@ -29,7 +29,7 @@ use crate::{
},
print_memory_usage,
};
use ra_prof::StopWatch;
use profile::StopWatch;
/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
struct Snap<DB>(DB);

View File

@ -27,7 +27,6 @@ use crate::{
to_proto::url_from_abs_path,
Result,
};
use ra_prof::profile;
#[derive(Eq, PartialEq, Copy, Clone)]
pub(crate) enum Status {
@ -135,7 +134,7 @@ impl GlobalState {
}
pub(crate) fn process_changes(&mut self) -> bool {
let _p = profile("GlobalState::process_changes");
let _p = profile::span("GlobalState::process_changes");
let mut fs_changes = Vec::new();
let mut has_fs_changes = false;

View File

@ -22,7 +22,6 @@ use ra_ide::{
FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, NavigationTarget, Query,
RangeInfo, Runnable, RunnableKind, SearchScope, TextEdit,
};
use ra_prof::profile;
use ra_project_model::TargetKind;
use ra_syntax::{algo, ast, AstNode, SyntaxKind, TextRange, TextSize};
use serde::{Deserialize, Serialize};
@ -39,7 +38,7 @@ use crate::{
};
pub(crate) fn handle_analyzer_status(snap: GlobalStateSnapshot, _: ()) -> Result<String> {
let _p = profile("handle_analyzer_status");
let _p = profile::span("handle_analyzer_status");
let mut buf = String::new();
if snap.workspaces.is_empty() {
@ -64,7 +63,7 @@ pub(crate) fn handle_analyzer_status(snap: GlobalStateSnapshot, _: ()) -> Result
}
pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> Result<String> {
let _p = profile("handle_memory_usage");
let _p = profile::span("handle_memory_usage");
let mem = state.analysis_host.per_query_memory_usage();
let mut out = String::new();
@ -78,7 +77,7 @@ pub(crate) fn handle_syntax_tree(
snap: GlobalStateSnapshot,
params: lsp_ext::SyntaxTreeParams,
) -> Result<String> {
let _p = profile("handle_syntax_tree");
let _p = profile::span("handle_syntax_tree");
let id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(id)?;
let text_range = params.range.map(|r| from_proto::text_range(&line_index, r));
@ -90,7 +89,7 @@ pub(crate) fn handle_expand_macro(
snap: GlobalStateSnapshot,
params: lsp_ext::ExpandMacroParams,
) -> Result<Option<lsp_ext::ExpandedMacro>> {
let _p = profile("handle_expand_macro");
let _p = profile::span("handle_expand_macro");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let offset = from_proto::offset(&line_index, params.position);
@ -103,7 +102,7 @@ pub(crate) fn handle_selection_range(
snap: GlobalStateSnapshot,
params: lsp_types::SelectionRangeParams,
) -> Result<Option<Vec<lsp_types::SelectionRange>>> {
let _p = profile("handle_selection_range");
let _p = profile::span("handle_selection_range");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let res: Result<Vec<lsp_types::SelectionRange>> = params
@ -146,7 +145,7 @@ pub(crate) fn handle_matching_brace(
snap: GlobalStateSnapshot,
params: lsp_ext::MatchingBraceParams,
) -> Result<Vec<Position>> {
let _p = profile("handle_matching_brace");
let _p = profile::span("handle_matching_brace");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let res = params
@ -168,7 +167,7 @@ pub(crate) fn handle_join_lines(
snap: GlobalStateSnapshot,
params: lsp_ext::JoinLinesParams,
) -> Result<Vec<lsp_types::TextEdit>> {
let _p = profile("handle_join_lines");
let _p = profile::span("handle_join_lines");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let line_endings = snap.file_line_endings(file_id);
@ -191,7 +190,7 @@ pub(crate) fn handle_on_enter(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
let _p = profile("handle_on_enter");
let _p = profile::span("handle_on_enter");
let position = from_proto::file_position(&snap, params)?;
let edit = match snap.analysis.on_enter(position)? {
None => return Ok(None),
@ -208,7 +207,7 @@ pub(crate) fn handle_on_type_formatting(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentOnTypeFormattingParams,
) -> Result<Option<Vec<lsp_types::TextEdit>>> {
let _p = profile("handle_on_type_formatting");
let _p = profile::span("handle_on_type_formatting");
let mut position = from_proto::file_position(&snap, params.text_document_position)?;
let line_index = snap.analysis.file_line_index(position.file_id)?;
let line_endings = snap.file_line_endings(position.file_id);
@ -247,7 +246,7 @@ pub(crate) fn handle_document_symbol(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentSymbolParams,
) -> Result<Option<lsp_types::DocumentSymbolResponse>> {
let _p = profile("handle_document_symbol");
let _p = profile::span("handle_document_symbol");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(file_id)?;
@ -332,7 +331,7 @@ pub(crate) fn handle_workspace_symbol(
snap: GlobalStateSnapshot,
params: lsp_types::WorkspaceSymbolParams,
) -> Result<Option<Vec<SymbolInformation>>> {
let _p = profile("handle_workspace_symbol");
let _p = profile::span("handle_workspace_symbol");
let all_symbols = params.query.contains('#');
let libs = params.query.contains('*');
let query = {
@ -380,7 +379,7 @@ pub(crate) fn handle_goto_definition(
snap: GlobalStateSnapshot,
params: lsp_types::GotoDefinitionParams,
) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
let _p = profile("handle_goto_definition");
let _p = profile::span("handle_goto_definition");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis.goto_definition(position)? {
None => return Ok(None),
@ -395,7 +394,7 @@ pub(crate) fn handle_goto_implementation(
snap: GlobalStateSnapshot,
params: lsp_types::request::GotoImplementationParams,
) -> Result<Option<lsp_types::request::GotoImplementationResponse>> {
let _p = profile("handle_goto_implementation");
let _p = profile::span("handle_goto_implementation");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis.goto_implementation(position)? {
None => return Ok(None),
@ -410,7 +409,7 @@ pub(crate) fn handle_goto_type_definition(
snap: GlobalStateSnapshot,
params: lsp_types::request::GotoTypeDefinitionParams,
) -> Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> {
let _p = profile("handle_goto_type_definition");
let _p = profile::span("handle_goto_type_definition");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis.goto_type_definition(position)? {
None => return Ok(None),
@ -425,7 +424,7 @@ pub(crate) fn handle_parent_module(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
let _p = profile("handle_parent_module");
let _p = profile::span("handle_parent_module");
let position = from_proto::file_position(&snap, params)?;
let navs = snap.analysis.parent_module(position)?;
let res = to_proto::goto_definition_response(&snap, None, navs)?;
@ -436,7 +435,7 @@ pub(crate) fn handle_runnables(
snap: GlobalStateSnapshot,
params: lsp_ext::RunnablesParams,
) -> Result<Vec<lsp_ext::Runnable>> {
let _p = profile("handle_runnables");
let _p = profile::span("handle_runnables");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let offset = params.position.map(|it| from_proto::offset(&line_index, it));
@ -513,7 +512,7 @@ pub(crate) fn handle_completion(
snap: GlobalStateSnapshot,
params: lsp_types::CompletionParams,
) -> Result<Option<lsp_types::CompletionResponse>> {
let _p = profile("handle_completion");
let _p = profile::span("handle_completion");
let position = from_proto::file_position(&snap, params.text_document_position)?;
let completion_triggered_after_single_colon = {
let mut res = false;
@ -555,7 +554,7 @@ pub(crate) fn handle_folding_range(
snap: GlobalStateSnapshot,
params: FoldingRangeParams,
) -> Result<Option<Vec<FoldingRange>>> {
let _p = profile("handle_folding_range");
let _p = profile::span("handle_folding_range");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let folds = snap.analysis.folding_ranges(file_id)?;
let text = snap.analysis.file_text(file_id)?;
@ -572,7 +571,7 @@ pub(crate) fn handle_signature_help(
snap: GlobalStateSnapshot,
params: lsp_types::SignatureHelpParams,
) -> Result<Option<lsp_types::SignatureHelp>> {
let _p = profile("handle_signature_help");
let _p = profile::span("handle_signature_help");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let call_info = match snap.analysis.call_info(position)? {
Some(it) => it,
@ -591,7 +590,7 @@ pub(crate) fn handle_hover(
snap: GlobalStateSnapshot,
params: lsp_types::HoverParams,
) -> Result<Option<lsp_ext::Hover>> {
let _p = profile("handle_hover");
let _p = profile::span("handle_hover");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let info = match snap.analysis.hover(position)? {
None => return Ok(None),
@ -614,7 +613,7 @@ pub(crate) fn handle_prepare_rename(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> Result<Option<PrepareRenameResponse>> {
let _p = profile("handle_prepare_rename");
let _p = profile::span("handle_prepare_rename");
let position = from_proto::file_position(&snap, params)?;
let optional_change = snap.analysis.rename(position, "dummy")?;
@ -632,7 +631,7 @@ pub(crate) fn handle_rename(
snap: GlobalStateSnapshot,
params: RenameParams,
) -> Result<Option<WorkspaceEdit>> {
let _p = profile("handle_rename");
let _p = profile::span("handle_rename");
let position = from_proto::file_position(&snap, params.text_document_position)?;
if params.new_name.is_empty() {
@ -656,7 +655,7 @@ pub(crate) fn handle_references(
snap: GlobalStateSnapshot,
params: lsp_types::ReferenceParams,
) -> Result<Option<Vec<Location>>> {
let _p = profile("handle_references");
let _p = profile::span("handle_references");
let position = from_proto::file_position(&snap, params.text_document_position)?;
let refs = match snap.analysis.find_all_refs(position, None)? {
@ -683,7 +682,7 @@ pub(crate) fn handle_formatting(
snap: GlobalStateSnapshot,
params: DocumentFormattingParams,
) -> Result<Option<Vec<lsp_types::TextEdit>>> {
let _p = profile("handle_formatting");
let _p = profile::span("handle_formatting");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let file = snap.analysis.file_text(file_id)?;
let crate_ids = snap.analysis.crate_for(file_id)?;
@ -805,7 +804,7 @@ pub(crate) fn handle_code_action(
mut snap: GlobalStateSnapshot,
params: lsp_types::CodeActionParams,
) -> Result<Option<Vec<lsp_ext::CodeAction>>> {
let _p = profile("handle_code_action");
let _p = profile::span("handle_code_action");
// We intentionally don't support command-based actions, as those either
// requires custom client-code anyway, or requires server-initiated edits.
// Server initiated edits break causality, so we avoid those as well.
@ -847,7 +846,7 @@ pub(crate) fn handle_resolve_code_action(
mut snap: GlobalStateSnapshot,
params: lsp_ext::ResolveCodeActionParams,
) -> Result<Option<lsp_ext::SnippetWorkspaceEdit>> {
let _p = profile("handle_resolve_code_action");
let _p = profile::span("handle_resolve_code_action");
let file_id = from_proto::file_id(&snap, &params.code_action_params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let range = from_proto::text_range(&line_index, params.code_action_params.range);
@ -871,7 +870,7 @@ pub(crate) fn handle_code_lens(
snap: GlobalStateSnapshot,
params: lsp_types::CodeLensParams,
) -> Result<Option<Vec<CodeLens>>> {
let _p = profile("handle_code_lens");
let _p = profile::span("handle_code_lens");
let mut lenses: Vec<CodeLens> = Default::default();
if snap.config.lens.none() {
@ -957,7 +956,7 @@ pub(crate) fn handle_code_lens_resolve(
snap: GlobalStateSnapshot,
code_lens: CodeLens,
) -> Result<CodeLens> {
let _p = profile("handle_code_lens_resolve");
let _p = profile::span("handle_code_lens_resolve");
let data = code_lens.data.unwrap();
let resolve = from_json::<Option<CodeLensResolveData>>("CodeLensResolveData", data)?;
match resolve {
@ -994,7 +993,7 @@ pub(crate) fn handle_document_highlight(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentHighlightParams,
) -> Result<Option<Vec<DocumentHighlight>>> {
let _p = profile("handle_document_highlight");
let _p = profile::span("handle_document_highlight");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let line_index = snap.analysis.file_line_index(position.file_id)?;
@ -1021,7 +1020,7 @@ pub(crate) fn handle_ssr(
snap: GlobalStateSnapshot,
params: lsp_ext::SsrParams,
) -> Result<lsp_types::WorkspaceEdit> {
let _p = profile("handle_ssr");
let _p = profile::span("handle_ssr");
let selections = params
.selections
.iter()
@ -1041,7 +1040,7 @@ pub(crate) fn publish_diagnostics(
snap: &GlobalStateSnapshot,
file_id: FileId,
) -> Result<Vec<Diagnostic>> {
let _p = profile("publish_diagnostics");
let _p = profile::span("publish_diagnostics");
let line_index = snap.analysis.file_line_index(file_id)?;
let diagnostics: Vec<Diagnostic> = snap
.analysis
@ -1064,7 +1063,7 @@ pub(crate) fn handle_inlay_hints(
snap: GlobalStateSnapshot,
params: InlayHintsParams,
) -> Result<Vec<InlayHint>> {
let _p = profile("handle_inlay_hints");
let _p = profile::span("handle_inlay_hints");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis.file_line_index(file_id)?;
Ok(snap
@ -1079,7 +1078,7 @@ pub(crate) fn handle_call_hierarchy_prepare(
snap: GlobalStateSnapshot,
params: CallHierarchyPrepareParams,
) -> Result<Option<Vec<CallHierarchyItem>>> {
let _p = profile("handle_call_hierarchy_prepare");
let _p = profile::span("handle_call_hierarchy_prepare");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis.call_hierarchy(position)? {
@ -1101,7 +1100,7 @@ pub(crate) fn handle_call_hierarchy_incoming(
snap: GlobalStateSnapshot,
params: CallHierarchyIncomingCallsParams,
) -> Result<Option<Vec<CallHierarchyIncomingCall>>> {
let _p = profile("handle_call_hierarchy_incoming");
let _p = profile::span("handle_call_hierarchy_incoming");
let item = params.item;
let doc = TextDocumentIdentifier::new(item.uri);
@ -1136,7 +1135,7 @@ pub(crate) fn handle_call_hierarchy_outgoing(
snap: GlobalStateSnapshot,
params: CallHierarchyOutgoingCallsParams,
) -> Result<Option<Vec<CallHierarchyOutgoingCall>>> {
let _p = profile("handle_call_hierarchy_outgoing");
let _p = profile::span("handle_call_hierarchy_outgoing");
let item = params.item;
let doc = TextDocumentIdentifier::new(item.uri);
@ -1171,7 +1170,7 @@ pub(crate) fn handle_semantic_tokens(
snap: GlobalStateSnapshot,
params: SemanticTokensParams,
) -> Result<Option<SemanticTokensResult>> {
let _p = profile("handle_semantic_tokens");
let _p = profile::span("handle_semantic_tokens");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let text = snap.analysis.file_text(file_id)?;
@ -1190,7 +1189,7 @@ pub(crate) fn handle_semantic_tokens_edits(
snap: GlobalStateSnapshot,
params: SemanticTokensEditsParams,
) -> Result<Option<SemanticTokensEditResult>> {
let _p = profile("handle_semantic_tokens_edits");
let _p = profile::span("handle_semantic_tokens_edits");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let text = snap.analysis.file_text(file_id)?;
@ -1220,7 +1219,7 @@ pub(crate) fn handle_semantic_tokens_range(
snap: GlobalStateSnapshot,
params: SemanticTokensRangeParams,
) -> Result<Option<SemanticTokensRangeResult>> {
let _p = profile("handle_semantic_tokens_range");
let _p = profile::span("handle_semantic_tokens_range");
let frange = from_proto::file_range(&snap, params.text_document, params.range)?;
let text = snap.analysis.file_text(frange.file_id)?;

View File

@ -74,16 +74,16 @@ impl std::error::Error for LspError {}
fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) {
let mut mem = host.per_query_memory_usage();
let before = ra_prof::memory_usage();
let before = profile::memory_usage();
drop(vfs);
let vfs = before.allocated - ra_prof::memory_usage().allocated;
let vfs = before.allocated - profile::memory_usage().allocated;
mem.push(("VFS".into(), vfs));
let before = ra_prof::memory_usage();
let before = profile::memory_usage();
drop(host);
mem.push(("Unaccounted".into(), before.allocated - ra_prof::memory_usage().allocated));
mem.push(("Unaccounted".into(), before.allocated - profile::memory_usage().allocated));
mem.push(("Remaining".into(), ra_prof::memory_usage().allocated));
mem.push(("Remaining".into(), profile::memory_usage().allocated));
for (name, bytes) in mem {
eprintln!("{:>8} {}", bytes, name);

View File

@ -10,7 +10,6 @@ use lsp_server::{Connection, Notification, Request, Response};
use lsp_types::notification::Notification as _;
use ra_db::VfsPath;
use ra_ide::{Canceled, FileId};
use ra_prof::profile;
use crate::{
config::Config,
@ -173,7 +172,7 @@ impl GlobalState {
fn handle_event(&mut self, event: Event) -> Result<()> {
let loop_start = Instant::now();
// NOTE: don't count blocking select! call as a loop-turn time
let _p = profile("GlobalState::handle_event");
let _p = profile::span("GlobalState::handle_event");
log::info!("handle_event({:?})", event);
let queue_count = self.task_pool.handle.len();
@ -204,7 +203,7 @@ impl GlobalState {
self.analysis_host.maybe_collect_garbage();
}
Event::Vfs(mut task) => {
let _p = profile("GlobalState::handle_event/vfs");
let _p = profile::span("GlobalState::handle_event/vfs");
loop {
match task {
vfs::loader::Message::Loaded { files } => {

View File

@ -4,7 +4,6 @@ use std::{mem, sync::Arc};
use flycheck::FlycheckHandle;
use ra_db::{CrateGraph, SourceRoot, VfsPath};
use ra_ide::AnalysisChange;
use ra_prof::profile;
use ra_project_model::{ProcMacroClient, ProjectWorkspace};
use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
@ -17,7 +16,7 @@ use crate::{
impl GlobalState {
pub(crate) fn update_configuration(&mut self, config: Config) {
let _p = profile("GlobalState::update_configuration");
let _p = profile::span("GlobalState::update_configuration");
let old_config = mem::replace(&mut self.config, config);
if self.config.lru_capacity != old_config.lru_capacity {
self.analysis_host.update_lru_capacity(old_config.lru_capacity);
@ -115,7 +114,7 @@ impl GlobalState {
});
}
pub(crate) fn switch_workspaces(&mut self, workspaces: Vec<anyhow::Result<ProjectWorkspace>>) {
let _p = profile("GlobalState::switch_workspaces");
let _p = profile::span("GlobalState::switch_workspaces");
log::info!("reloading projects: {:?}", self.config.linked_projects);
let mut has_errors = false;
@ -300,7 +299,7 @@ pub(crate) struct SourceRootConfig {
impl SourceRootConfig {
pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> {
let _p = profile("SourceRootConfig::partition");
let _p = profile::span("SourceRootConfig::partition");
self.fsc
.partition(vfs)
.into_iter()

View File

@ -62,7 +62,7 @@ impl<'a> Project<'a> {
static INIT: Once = Once::new();
INIT.call_once(|| {
env_logger::builder().is_test(true).try_init().unwrap();
ra_prof::init_from(crate::PROFILE);
profile::init_from(crate::PROFILE);
});
for entry in Fixture::parse(self.fixture) {

View File

@ -197,7 +197,7 @@ impl TidyDocs {
"ra_ide",
"ra_mbe",
"ra_parser",
"ra_prof",
"profile",
"ra_project_model",
"ra_syntax",
"ra_tt",