129: 2018 r=matklad a=matklad



Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2018-10-15 17:55:03 +00:00
commit e929355eca
66 changed files with 154 additions and 98 deletions

View File

@ -1,7 +1,7 @@
matrix:
include:
- language: rust
rust: stable
rust: beta
script:
- cargo gen-kinds --verify
- cargo gen-tests --verify

View File

@ -16,7 +16,8 @@ functionality is provided via a language server.
## Quick Start
Rust analyzer builds on stable Rust >= 1.29.0.
Rust analyzer builds on Rust >= 1.30.0 (currently in beta) and uses
the 2018 edition.
```
# run tests

View File

@ -2,7 +2,7 @@ os: Visual Studio 2015
install:
- curl https://win.rustup.rs/ --output rustup-init.exe
- rustup-init -yv --default-toolchain stable --default-host x86_64-pc-windows-msvc
- rustup-init -yv --default-toolchain beta --default-host x86_64-pc-windows-msvc
- set PATH=%PATH%;%USERPROFILE%\.cargo\bin
- rustc -vV
- cargo -vV

View File

@ -1,4 +1,5 @@
[package]
edition = "2018"
name = "ra_analysis"
version = "0.1.0"
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]

View File

@ -7,7 +7,7 @@ use std::{
};
use rustc_hash::FxHashMap;
use salsa;
use {FileId, imp::FileResolverImp};
use crate::{FileId, imp::FileResolverImp};
use super::{State, Query, QueryCtx};
pub(super) type Data = Arc<Any + Send + Sync + 'static>;
@ -51,7 +51,7 @@ pub(crate) trait EvalQuery {
type Output;
fn query_type(&self) -> salsa::QueryTypeId;
fn f(&self) -> salsa::QueryFn<State, Data>;
fn get(&self, &QueryCtx, Self::Params) -> Arc<Self::Output>;
fn get(&self, ctx: &QueryCtx, params: Self::Params) -> Arc<Self::Output>;
}
impl<T, R> EvalQuery for Query<T, R>

View File

@ -5,7 +5,7 @@ use std::{
};
use im;
use salsa;
use {FileId, imp::FileResolverImp};
use crate::{FileId, imp::FileResolverImp};
#[derive(Debug, Default, Clone)]
pub(crate) struct State {
@ -75,8 +75,8 @@ pub(crate) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> {
impl QueryRegistry {
fn new() -> QueryRegistry {
let mut reg = QueryRegistry { imp: imp::QueryRegistry::new() };
::queries::register_queries(&mut reg);
::module_map::register_queries(&mut reg);
crate::queries::register_queries(&mut reg);
crate::module_map::register_queries(&mut reg);
reg
}
pub(crate) fn add<Q: imp::EvalQuery>(&mut self, q: Q, name: &'static str) {

View File

@ -7,7 +7,7 @@ use ra_syntax::{
ast::{self, NameOwner, AstNode},
text_utils::is_subrange
};
use {
use crate::{
FileId,
imp::FileResolverImp,
};
@ -271,4 +271,4 @@ impl FnDescriptor {
}
res
}
}
}

View File

@ -17,7 +17,7 @@ use ra_syntax::{
ast::{self, NameOwner, ArgListOwner, Expr},
};
use {
use crate::{
FileId, FileResolver, Query, Diagnostic, SourceChange, SourceFileEdit, Position, FileSystemEdit,
JobToken, CrateGraph, CrateId,
roots::{SourceRoot, ReadonlySourceRoot, WritableSourceRoot},
@ -458,4 +458,4 @@ impl<'a> FnCallNode<'a> {
FnCallNode::MethodCallExpr(expr) => expr.arg_list()
}
}
}
}

View File

@ -29,16 +29,18 @@ use std::{
use relative_path::{RelativePath, RelativePathBuf};
use ra_syntax::{File, TextRange, TextUnit, AtomEdit};
use imp::{AnalysisImpl, AnalysisHostImpl, FileResolverImp};
use rustc_hash::FxHashMap;
use crate::imp::{AnalysisImpl, AnalysisHostImpl, FileResolverImp};
pub use ra_editor::{
StructureNode, LineIndex, FileSymbol,
Runnable, RunnableKind, HighlightedRange, CompletionItem,
Fold, FoldKind
};
pub use job::{JobToken, JobHandle};
pub use descriptors::FnDescriptor;
pub use crate::{
job::{JobToken, JobHandle},
descriptors::FnDescriptor,
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct FileId(pub u32);

View File

@ -1,5 +1,5 @@
use std::sync::Arc;
use {
use crate::{
FileId,
db::{
Query, QueryRegistry, QueryCtx,
@ -38,7 +38,7 @@ mod tests {
use std::collections::HashMap;
use im;
use relative_path::{RelativePath, RelativePathBuf};
use {
use crate::{
db::{Db},
imp::FileResolverImp,
FileId, FileResolver,

View File

@ -1,13 +1,13 @@
use std::sync::Arc;
use ra_syntax::File;
use ra_editor::LineIndex;
use {
use crate::{
FileId,
db::{Query, QueryCtx, QueryRegistry},
symbol_index::SymbolIndex,
};
pub(crate) use db::{file_text, file_set};
pub(crate) use crate::db::{file_text, file_set};
pub(crate) fn file_syntax(ctx: QueryCtx, file_id: FileId) -> File {
(&*ctx.get(FILE_SYNTAX, file_id)).clone()

View File

@ -9,7 +9,7 @@ use rustc_hash::FxHashMap;
use ra_editor::LineIndex;
use ra_syntax::File;
use {
use crate::{
FileId,
imp::FileResolverImp,
symbol_index::SymbolIndex,
@ -62,23 +62,23 @@ impl WritableSourceRoot {
impl SourceRoot for WritableSourceRoot {
fn module_tree(&self) -> Arc<ModuleTreeDescriptor> {
self.db.make_query(::module_map::module_tree)
self.db.make_query(crate::module_map::module_tree)
}
fn contains(&self, file_id: FileId) -> bool {
self.db.state().file_map.contains_key(&file_id)
}
fn lines(&self, file_id: FileId) -> Arc<LineIndex> {
self.db.make_query(|ctx| ::queries::file_lines(ctx, file_id))
self.db.make_query(|ctx| crate::queries::file_lines(ctx, file_id))
}
fn syntax(&self, file_id: FileId) -> File {
self.db.make_query(|ctx| ::queries::file_syntax(ctx, file_id))
self.db.make_query(|ctx| crate::queries::file_syntax(ctx, file_id))
}
fn symbols<'a>(&'a self, acc: &mut Vec<Arc<SymbolIndex>>) {
self.db.make_query(|ctx| {
let file_set = ::queries::file_set(ctx);
let file_set = crate::queries::file_set(ctx);
let syms = file_set.0.iter()
.map(|file_id| ::queries::file_symbols(ctx, *file_id));
.map(|file_id| crate::queries::file_symbols(ctx, *file_id));
acc.extend(syms);
});
}

View File

@ -9,7 +9,7 @@ use ra_syntax::{
};
use fst::{self, Streamer};
use rayon::prelude::*;
use {Query, FileId, JobToken};
use crate::{Query, FileId, JobToken};
#[derive(Debug)]
pub(crate) struct SymbolIndex {

View File

@ -1,4 +1,5 @@
[package]
edition = "2018"
name = "ra_cli"
version = "0.1.0"
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]

View File

@ -1,4 +1,5 @@
[package]
edition = "2018"
name = "ra_editor"
version = "0.1.0"
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]

View File

@ -11,7 +11,7 @@ use ra_syntax::{
},
};
use {EditBuilder, Edit, find_node_at_offset};
use crate::{EditBuilder, Edit, find_node_at_offset};
#[derive(Debug)]
pub struct LocalEdit {
@ -136,7 +136,7 @@ fn non_trivia_sibling(node: SyntaxNodeRef, direction: Direction) -> Option<Synta
#[cfg(test)]
mod tests {
use super::*;
use test_utils::{check_action, check_action_range};
use crate::test_utils::{check_action, check_action_range};
#[test]
fn test_swap_comma() {

View File

@ -9,7 +9,7 @@ use ra_syntax::{
text_utils::is_subrange,
};
use {
use crate::{
AtomEdit, find_node_at_offset,
scope::{FnScopes, ModuleScope},
};

View File

@ -1,4 +1,4 @@
use {TextRange, TextUnit};
use crate::{TextRange, TextUnit};
use ra_syntax::{
AtomEdit,
text_utils::contains_offset_nonstrict,

View File

@ -164,7 +164,7 @@ pub fn resolve_local_name(file: &File, offset: TextUnit, name_ref: ast::NameRef)
#[cfg(test)]
mod tests {
use super::*;
use test_utils::{assert_eq_dbg, extract_offset, add_cursor};
use crate::test_utils::{assert_eq_dbg, extract_offset, add_cursor};
#[test]
fn test_highlighting() {

View File

@ -1,5 +1,5 @@
use superslice::Ext;
use ::TextUnit;
use crate::TextUnit;
#[derive(Clone, Debug, Hash)]
pub struct LineIndex {

View File

@ -174,7 +174,7 @@ fn compute_expr_scopes(expr: ast::Expr, scopes: &mut FnScopes, scope: ScopeId) {
}
}
ast::Expr::LambdaExpr(e) => {
let mut scope = scopes.new_scope(scope);
let scope = scopes.new_scope(scope);
scopes.add_params_bindings(scope, e.param_list());
if let Some(body) = e.body() {
scopes.set_scope(body.syntax(), scope);
@ -256,7 +256,7 @@ pub fn resolve_local_name<'a>(name_ref: ast::NameRef, scopes: &'a FnScopes) -> O
mod tests {
use super::*;
use ra_syntax::File;
use {find_node_at_offset, test_utils::extract_offset};
use crate::{find_node_at_offset, test_utils::extract_offset};
fn do_check(code: &str, expected: &[&str]) {
let (off, code) = extract_offset(code);

View File

@ -6,7 +6,7 @@ use ra_syntax::{
walk::{walk, WalkEvent},
},
};
use TextRange;
use crate::TextRange;
#[derive(Debug, Clone)]
pub struct StructureNode {

View File

@ -1,6 +1,6 @@
use ra_syntax::{File, TextUnit, TextRange};
pub use _test_utils::*;
use LocalEdit;
pub use crate::_test_utils::*;
use crate::LocalEdit;
pub fn check_action<F: Fn(&File, TextUnit) -> Option<LocalEdit>> (
before: &str,

View File

@ -10,7 +10,7 @@ use ra_syntax::{
SyntaxKind::*,
};
use {LocalEdit, EditBuilder, find_node_at_offset};
use crate::{LocalEdit, EditBuilder, find_node_at_offset};
pub fn join_lines(file: &File, range: TextRange) -> LocalEdit {
let range = if range.is_empty() {
@ -244,7 +244,7 @@ fn compute_ws(left: SyntaxNodeRef, right: SyntaxNodeRef) -> &'static str {
#[cfg(test)]
mod tests {
use super::*;
use test_utils::{check_action, extract_range, extract_offset, add_cursor};
use crate::test_utils::{check_action, extract_range, extract_offset, add_cursor};
fn check_join_lines(before: &str, after: &str) {
check_action(before, after, |file, offset| {

View File

@ -1,4 +1,5 @@
[package]
edition = "2018"
name = "ra_lsp_server"
version = "0.1.0"
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]

View File

@ -7,7 +7,7 @@ use ra_editor::{LineIndex, LineCol, Edit, AtomEdit};
use ra_syntax::{SyntaxKind, TextUnit, TextRange};
use ra_analysis::{FileId, SourceChange, SourceFileEdit, FileSystemEdit};
use {
use crate::{
Result,
server_world::ServerWorld,
req,
@ -299,7 +299,7 @@ pub fn to_location(
Ok(loc)
}
pub trait MapConvWith<'a>: Sized {
pub trait MapConvWith<'a>: Sized + 'a {
type Ctx;
type Output;
@ -309,7 +309,7 @@ pub trait MapConvWith<'a>: Sized {
}
impl<'a, I> MapConvWith<'a> for I
where I: Iterator,
where I: Iterator + 'a,
I::Item: ConvWith
{
type Ctx = <I::Item as ConvWith>::Ctx;

View File

@ -34,5 +34,7 @@ mod project_model;
pub mod thread_watcher;
pub type Result<T> = ::std::result::Result<T, ::failure::Error>;
pub use caps::server_capabilities;
pub use main_loop::main_loop;
pub use crate::{
main_loop::main_loop,
caps::server_capabilities,
};

View File

@ -13,7 +13,7 @@ use ra_syntax::{
text_utils::contains_offset_nonstrict
};
use ::{
use crate::{
req::{self, Decoration}, Result,
conv::{Conv, ConvWith, TryConvWith, MapConvWith, to_location},
server_world::ServerWorld,

View File

@ -16,7 +16,7 @@ use gen_lsp_server::{
};
use rustc_hash::FxHashMap;
use {
use crate::{
req,
Result,
vfs::{self, FileEvent},

View File

@ -5,7 +5,7 @@ use rustc_hash::{FxHashMap, FxHashSet};
use cargo_metadata::{metadata_run, CargoOpt};
use ra_syntax::SmolStr;
use {
use crate::{
Result,
thread_watcher::{Worker, ThreadWatcher},
};

View File

@ -8,7 +8,7 @@ use rustc_hash::FxHashMap;
use languageserver_types::Url;
use ra_analysis::{FileId, AnalysisHost, Analysis, CrateGraph, CrateId, LibraryData, FileResolver};
use {
use crate::{
Result,
path_map::{PathMap, Root},
vfs::{FileEvent, FileEventKind},

View File

@ -1,7 +1,7 @@
use std::thread;
use crossbeam_channel::{bounded, unbounded, Sender, Receiver};
use drop_bomb::DropBomb;
use Result;
use crate::Result;
pub struct Worker<I, O> {
pub inp: Sender<I>,

View File

@ -5,7 +5,7 @@ use std::{
use walkdir::WalkDir;
use {
use crate::{
thread_watcher::{Worker, ThreadWatcher},
};

View File

@ -12,7 +12,7 @@ mod support;
use ra_lsp_server::req::{Runnables, RunnablesParams};
use support::project;
use crate::support::project;
const LOG: &'static str = "";

View File

@ -25,7 +25,7 @@ use ra_lsp_server::{main_loop, req, thread_watcher::{ThreadWatcher, Worker}};
pub fn project(fixture: &str) -> Server {
static INIT: Once = Once::new();
INIT.call_once(|| Logger::with_env_or_str(::LOG).start().unwrap());
INIT.call_once(|| Logger::with_env_or_str(crate::LOG).start().unwrap());
let tmp_dir = TempDir::new("test-project")
.unwrap();

View File

@ -1,4 +1,5 @@
[package]
edition = "2018"
name = "ra_syntax"
version = "0.1.0"
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]

View File

@ -1,7 +1,7 @@
pub mod walk;
pub mod visit;
use {
use crate::{
SyntaxNodeRef, TextUnit, TextRange,
text_utils::{contains_offset_nonstrict, is_subrange},
};

View File

@ -1,5 +1,5 @@
use std::marker::PhantomData;
use {SyntaxNodeRef, AstNode};
use crate::{SyntaxNodeRef, AstNode};
pub fn visitor<'a, T>() -> impl Visitor<'a, Output=T> {

View File

@ -1,4 +1,4 @@
use {
use crate::{
SyntaxNodeRef,
algo::generate,
};

View File

@ -1,7 +1,7 @@
// This file is automatically generated based on the file `./generated.rs.tera` when `cargo gen-kinds` is run
// Do not edit manually
use {
use crate::{
ast,
SyntaxNodeRef, AstNode,
SyntaxKind::*,

View File

@ -3,7 +3,7 @@ the below applies to the result of this template
#}// This file is automatically generated based on the file `./generated.rs.tera` when `cargo gen-kinds` is run
// Do not edit manually
use {
use crate::{
ast,
SyntaxNodeRef, AstNode,
SyntaxKind::*,

View File

@ -4,7 +4,7 @@ use std::marker::PhantomData;
use itertools::Itertools;
use {
use crate::{
SmolStr, SyntaxNodeRef, SyntaxKind::*,
yellow::{RefRoot, SyntaxNodeChildren},
};

View File

@ -30,7 +30,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet =
token_set_union![
LITERAL_FIRST,
token_set![L_CURLY, L_PAREN, L_BRACK, PIPE, MOVE_KW, IF_KW, WHILE_KW, MATCH_KW, UNSAFE_KW,
RETURN_KW, IDENT, SELF_KW, SUPER_KW, COLONCOLON, BREAK_KW, CONTINUE_KW, LIFETIME ],
RETURN_KW, IDENT, SELF_KW, SUPER_KW, CRATE_KW, COLONCOLON, BREAK_KW, CONTINUE_KW, LIFETIME ],
];
const EXPR_RECOVERY_SET: TokenSet =

View File

@ -31,7 +31,7 @@ mod type_args;
mod type_params;
mod types;
use {
use crate::{
token_set::TokenSet,
parser_api::{Marker, CompletedMarker, Parser},
SyntaxKind::{self, *},

View File

@ -1,6 +1,6 @@
use lexer::ptr::Ptr;
use crate::lexer::ptr::Ptr;
use SyntaxKind::{self, *};
use crate::SyntaxKind::{self, *};
pub(crate) fn scan_shebang(ptr: &mut Ptr) -> bool {
if ptr.at_str("!/") {

View File

@ -4,7 +4,7 @@ mod numbers;
mod ptr;
mod strings;
use {
use crate::{
SyntaxKind::{self, *},
TextUnit,
};

View File

@ -1,7 +1,7 @@
use lexer::classes::*;
use lexer::ptr::Ptr;
use crate::lexer::classes::*;
use crate::lexer::ptr::Ptr;
use SyntaxKind::{self, *};
use crate::SyntaxKind::{self, *};
pub(crate) fn scan_number(c: char, ptr: &mut Ptr) -> SyntaxKind {
if c == '0' {

View File

@ -1,4 +1,4 @@
use TextUnit;
use crate::TextUnit;
use std::str::Chars;

View File

@ -1,6 +1,6 @@
use SyntaxKind::{self, *};
use crate::SyntaxKind::{self, *};
use lexer::ptr::Ptr;
use crate::lexer::ptr::Ptr;
pub(crate) fn is_string_literal_start(c: char, c1: Option<char>, c2: Option<char>) -> bool {
match (c, c1, c2) {

View File

@ -46,7 +46,7 @@ mod yellow;
pub mod utils;
pub mod text_utils;
pub use {
pub use crate::{
rowan::{SmolStr, TextRange, TextUnit},
ast::AstNode,
lexer::{tokenize, Token},
@ -55,7 +55,7 @@ pub use {
reparsing::AtomEdit,
};
use {
use crate::{
yellow::{GreenNode},
};

View File

@ -1,4 +1,4 @@
use {
use crate::{
token_set::TokenSet,
parser_impl::ParserImpl,
SyntaxKind::{self, ERROR},

View File

@ -8,7 +8,7 @@
//! `start node`, `finish node`, and `FileBuilder` converts
//! this stream to a real tree.
use std::mem;
use {
use crate::{
TextUnit, TextRange, SmolStr,
lexer::Token,
parser_impl::Sink,

View File

@ -1,4 +1,4 @@
use {lexer::Token, SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit};
use crate::{lexer::Token, SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit};
use std::ops::{Add, AddAssign};

View File

@ -3,7 +3,7 @@ mod input;
use std::cell::Cell;
use {
use crate::{
TextUnit, SmolStr,
lexer::Token,
parser_api::Parser,
@ -13,7 +13,7 @@ use {
},
};
use SyntaxKind::{self, EOF, TOMBSTONE};
use crate::SyntaxKind::{self, EOF, TOMBSTONE};
pub(crate) trait Sink {
type Tree;

View File

@ -1,14 +1,14 @@
use algo;
use grammar;
use lexer::{tokenize, Token};
use yellow::{self, GreenNode, SyntaxNodeRef, SyntaxError};
use parser_impl;
use parser_api::Parser;
use {
use crate::algo;
use crate::grammar;
use crate::lexer::{tokenize, Token};
use crate::yellow::{self, GreenNode, SyntaxNodeRef, SyntaxError};
use crate::parser_impl;
use crate::parser_api::Parser;
use crate::{
TextUnit, TextRange,
SyntaxKind::*,
};
use text_utils::replace_range;
use crate::text_utils::replace_range;
#[derive(Debug, Clone)]
pub struct AtomEdit {

View File

@ -1,7 +1,7 @@
mod generated;
use std::fmt;
use SyntaxKind::*;
use crate::SyntaxKind::*;
pub use self::generated::SyntaxKind;

View File

@ -1,4 +1,4 @@
use {TextRange, TextUnit};
use crate::{TextRange, TextUnit};
pub fn contains_offset_nonstrict(range: TextRange, offset: TextUnit) -> bool {
range.start() <= offset && offset <= range.end()

View File

@ -1,4 +1,4 @@
use SyntaxKind;
use crate::SyntaxKind;
#[derive(Clone, Copy)]
pub(crate) struct TokenSet(pub(crate) u128);
@ -29,7 +29,7 @@ macro_rules! token_set_union {
#[test]
fn token_set_works_for_tokens() {
use SyntaxKind::*;
use crate::SyntaxKind::*;
let ts = token_set! { EOF, SHEBANG };
assert!(ts.contains(EOF));
assert!(ts.contains(SHEBANG));

View File

@ -1,5 +1,5 @@
use std::fmt::Write;
use {
use crate::{
algo::walk::{walk, WalkEvent},
SyntaxKind, File, SyntaxNodeRef
};

View File

@ -1,5 +1,5 @@
use rowan::GreenNodeBuilder;
use {
use crate::{
TextUnit, SmolStr,
parser_impl::Sink,
yellow::{GreenNode, SyntaxError, RaTypes},

View File

@ -6,7 +6,7 @@ use std::{
hash::{Hash, Hasher},
};
use rowan::Types;
use {SyntaxKind, TextUnit, TextRange, SmolStr};
use crate::{SyntaxKind, TextUnit, TextRange, SmolStr};
use self::syntax_text::SyntaxText;
pub use rowan::{TreeRoot};
@ -70,16 +70,16 @@ impl<'a> SyntaxNodeRef<'a> {
self.0.leaf_text()
}
pub fn ancestors(self) -> impl Iterator<Item=SyntaxNodeRef<'a>> {
::algo::generate(Some(self), |&node| node.parent())
crate::algo::generate(Some(self), |&node| node.parent())
}
pub fn descendants(self) -> impl Iterator<Item=SyntaxNodeRef<'a>> {
::algo::walk::walk(self).filter_map(|event| match event {
::algo::walk::WalkEvent::Enter(node) => Some(node),
::algo::walk::WalkEvent::Exit(_) => None,
crate::algo::walk::walk(self).filter_map(|event| match event {
crate::algo::walk::WalkEvent::Enter(node) => Some(node),
crate::algo::walk::WalkEvent::Exit(_) => None,
})
}
pub fn siblings(self, direction: Direction) -> impl Iterator<Item=SyntaxNodeRef<'a>> {
::algo::generate(Some(self), move |&node| match direction {
crate::algo::generate(Some(self), move |&node| match direction {
Direction::Next => node.next_sibling(),
Direction::Prev => node.prev_sibling(),
})
@ -156,7 +156,7 @@ impl<R: TreeRoot<RaTypes>> Iterator for SyntaxNodeChildren<R> {
fn has_short_text(kind: SyntaxKind) -> bool {
use SyntaxKind::*;
use crate::SyntaxKind::*;
match kind {
IDENT | LIFETIME | INT_NUMBER | FLOAT_NUMBER => true,
_ => false,

View File

@ -2,7 +2,7 @@ use std::{
fmt, ops,
};
use {
use crate::{
SyntaxNodeRef, TextRange, TextUnit,
text_utils::{intersect, contains_offset_nonstrict},
};

View File

@ -0,0 +1,3 @@
fn main() {
make_query(crate::module_map::module_tree);
}

View File

@ -0,0 +1,41 @@
ROOT@[0; 62)
FN_DEF@[0; 61)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
NAME@[3; 7)
IDENT@[3; 7) "main"
PARAM_LIST@[7; 9)
L_PAREN@[7; 8)
R_PAREN@[8; 9)
WHITESPACE@[9; 10)
BLOCK@[10; 61)
L_CURLY@[10; 11)
WHITESPACE@[11; 16)
EXPR_STMT@[16; 59)
CALL_EXPR@[16; 58)
PATH_EXPR@[16; 26)
PATH@[16; 26)
PATH_SEGMENT@[16; 26)
NAME_REF@[16; 26)
IDENT@[16; 26) "make_query"
ARG_LIST@[26; 58)
L_PAREN@[26; 27)
PATH_EXPR@[27; 57)
PATH@[27; 57)
PATH@[27; 44)
PATH@[27; 32)
PATH_SEGMENT@[27; 32)
CRATE_KW@[27; 32)
COLONCOLON@[32; 34)
PATH_SEGMENT@[34; 44)
NAME_REF@[34; 44)
IDENT@[34; 44) "module_map"
COLONCOLON@[44; 46)
PATH_SEGMENT@[46; 57)
NAME_REF@[46; 57)
IDENT@[46; 57) "module_tree"
R_PAREN@[57; 58)
SEMI@[58; 59)
WHITESPACE@[59; 60)
R_CURLY@[60; 61)
WHITESPACE@[61; 62)

View File

@ -1,4 +1,5 @@
[package]
edition = "2018"
name = "test_utils"
version = "0.1.0"
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]

View File

@ -1,4 +1,5 @@
[package]
edition = "2018"
name = "tools"
version = "0.1.0"
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]