mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-26 16:54:01 +00:00
Fix clippy::identity_conversion
This commit is contained in:
parent
ed3d93b875
commit
40424d4222
@ -34,10 +34,10 @@ impl salsa::Database for BatchDatabase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn vfs_file_to_id(f: ra_vfs::VfsFile) -> FileId {
|
fn vfs_file_to_id(f: ra_vfs::VfsFile) -> FileId {
|
||||||
FileId(f.0.into())
|
FileId(f.0)
|
||||||
}
|
}
|
||||||
fn vfs_root_to_id(r: ra_vfs::VfsRoot) -> SourceRootId {
|
fn vfs_root_to_id(r: ra_vfs::VfsRoot) -> SourceRootId {
|
||||||
SourceRootId(r.0.into())
|
SourceRootId(r.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BatchDatabase {
|
impl BatchDatabase {
|
||||||
|
@ -36,7 +36,7 @@ impl AdtDef {
|
|||||||
|
|
||||||
impl Struct {
|
impl Struct {
|
||||||
pub(crate) fn variant_data(&self, db: &impl DefDatabase) -> Arc<VariantData> {
|
pub(crate) fn variant_data(&self, db: &impl DefDatabase) -> Arc<VariantData> {
|
||||||
db.struct_data((*self).into()).variant_data.clone()
|
db.struct_data(*self).variant_data.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -202,7 +202,6 @@ impl ModuleImplBlocks {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let (file_id, module_source) = m.module.definition_source(db);
|
let (file_id, module_source) = m.module.definition_source(db);
|
||||||
let file_id: HirFileId = file_id.into();
|
|
||||||
let node = match &module_source {
|
let node = match &module_source {
|
||||||
ModuleSource::SourceFile(node) => node.syntax(),
|
ModuleSource::SourceFile(node) => node.syntax(),
|
||||||
ModuleSource::Module(node) => {
|
ModuleSource::Module(node) => {
|
||||||
|
@ -468,7 +468,7 @@ impl CrateDefMap {
|
|||||||
);
|
);
|
||||||
|
|
||||||
return ResolvePathResult::with(
|
return ResolvePathResult::with(
|
||||||
Either::Left(PerNs::types((*s).into())),
|
Either::Left(PerNs::types(*s)),
|
||||||
ReachedFixedPoint::Yes,
|
ReachedFixedPoint::Yes,
|
||||||
Some(i),
|
Some(i),
|
||||||
);
|
);
|
||||||
|
@ -556,7 +556,7 @@ where
|
|||||||
|
|
||||||
fn define_def(&mut self, def: &raw::DefData) {
|
fn define_def(&mut self, def: &raw::DefData) {
|
||||||
let module = Module { krate: self.def_collector.def_map.krate, module_id: self.module_id };
|
let module = Module { krate: self.def_collector.def_map.krate, module_id: self.module_id };
|
||||||
let ctx = LocationCtx::new(self.def_collector.db, module, self.file_id.into());
|
let ctx = LocationCtx::new(self.def_collector.db, module, self.file_id);
|
||||||
|
|
||||||
macro_rules! def {
|
macro_rules! def {
|
||||||
($kind:ident, $ast_id:ident) => {
|
($kind:ident, $ast_id:ident) => {
|
||||||
|
@ -69,7 +69,7 @@ impl RawItems {
|
|||||||
) -> (Arc<RawItems>, Arc<ImportSourceMap>) {
|
) -> (Arc<RawItems>, Arc<ImportSourceMap>) {
|
||||||
let mut collector = RawItemsCollector {
|
let mut collector = RawItemsCollector {
|
||||||
raw_items: RawItems::default(),
|
raw_items: RawItems::default(),
|
||||||
source_ast_id_map: db.ast_id_map(file_id.into()),
|
source_ast_id_map: db.ast_id_map(file_id),
|
||||||
source_map: ImportSourceMap::default(),
|
source_map: ImportSourceMap::default(),
|
||||||
};
|
};
|
||||||
if let Some(node) = db.parse_or_expand(file_id) {
|
if let Some(node) = db.parse_or_expand(file_id) {
|
||||||
|
@ -48,8 +48,8 @@ pub fn module_from_declaration(
|
|||||||
pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Module> {
|
pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Module> {
|
||||||
let file = db.parse(position.file_id).tree;
|
let file = db.parse(position.file_id).tree;
|
||||||
match find_node_at_offset::<ast::Module>(file.syntax(), position.offset) {
|
match find_node_at_offset::<ast::Module>(file.syntax(), position.offset) {
|
||||||
Some(m) if !m.has_semi() => module_from_inline(db, position.file_id.into(), m),
|
Some(m) if !m.has_semi() => module_from_inline(db, position.file_id, m),
|
||||||
_ => module_from_file_id(db, position.file_id.into()),
|
_ => module_from_file_id(db, position.file_id),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -72,9 +72,9 @@ pub fn module_from_child_node(
|
|||||||
child: &SyntaxNode,
|
child: &SyntaxNode,
|
||||||
) -> Option<Module> {
|
) -> Option<Module> {
|
||||||
if let Some(m) = child.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi()) {
|
if let Some(m) = child.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi()) {
|
||||||
module_from_inline(db, file_id.into(), m)
|
module_from_inline(db, file_id, m)
|
||||||
} else {
|
} else {
|
||||||
module_from_file_id(db, file_id.into())
|
module_from_file_id(db, file_id)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -99,14 +99,12 @@ pub fn struct_from_module(
|
|||||||
struct_def: &ast::StructDef,
|
struct_def: &ast::StructDef,
|
||||||
) -> Struct {
|
) -> Struct {
|
||||||
let (file_id, _) = module.definition_source(db);
|
let (file_id, _) = module.definition_source(db);
|
||||||
let file_id = file_id.into();
|
|
||||||
let ctx = LocationCtx::new(db, module, file_id);
|
let ctx = LocationCtx::new(db, module, file_id);
|
||||||
Struct { id: ctx.to_def(struct_def) }
|
Struct { id: ctx.to_def(struct_def) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn enum_from_module(db: &impl HirDatabase, module: Module, enum_def: &ast::EnumDef) -> Enum {
|
pub fn enum_from_module(db: &impl HirDatabase, module: Module, enum_def: &ast::EnumDef) -> Enum {
|
||||||
let (file_id, _) = module.definition_source(db);
|
let (file_id, _) = module.definition_source(db);
|
||||||
let file_id = file_id.into();
|
|
||||||
let ctx = LocationCtx::new(db, module, file_id);
|
let ctx = LocationCtx::new(db, module, file_id);
|
||||||
Enum { id: ctx.to_def(enum_def) }
|
Enum { id: ctx.to_def(enum_def) }
|
||||||
}
|
}
|
||||||
@ -117,7 +115,6 @@ pub fn trait_from_module(
|
|||||||
trait_def: &ast::TraitDef,
|
trait_def: &ast::TraitDef,
|
||||||
) -> Trait {
|
) -> Trait {
|
||||||
let (file_id, _) = module.definition_source(db);
|
let (file_id, _) = module.definition_source(db);
|
||||||
let file_id = file_id.into();
|
|
||||||
let ctx = LocationCtx::new(db, module, file_id);
|
let ctx = LocationCtx::new(db, module, file_id);
|
||||||
Trait { id: ctx.to_def(trait_def) }
|
Trait { id: ctx.to_def(trait_def) }
|
||||||
}
|
}
|
||||||
|
@ -539,7 +539,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||||||
}
|
}
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
resolved = Resolution::Def(item.into());
|
resolved = Resolution::Def(item);
|
||||||
}
|
}
|
||||||
|
|
||||||
match resolved {
|
match resolved {
|
||||||
@ -762,7 +762,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||||||
_ => &Ty::Unknown,
|
_ => &Ty::Unknown,
|
||||||
};
|
};
|
||||||
let subty = self.infer_pat(*pat, expectation, default_bm);
|
let subty = self.infer_pat(*pat, expectation, default_bm);
|
||||||
Ty::apply_one(TypeCtor::Ref(*mutability), subty.into())
|
Ty::apply_one(TypeCtor::Ref(*mutability), subty)
|
||||||
}
|
}
|
||||||
Pat::TupleStruct { path: ref p, args: ref subpats } => {
|
Pat::TupleStruct { path: ref p, args: ref subpats } => {
|
||||||
self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm)
|
self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm)
|
||||||
@ -790,7 +790,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||||||
|
|
||||||
let bound_ty = match mode {
|
let bound_ty = match mode {
|
||||||
BindingMode::Ref(mutability) => {
|
BindingMode::Ref(mutability) => {
|
||||||
Ty::apply_one(TypeCtor::Ref(mutability), inner_ty.clone().into())
|
Ty::apply_one(TypeCtor::Ref(mutability), inner_ty.clone())
|
||||||
}
|
}
|
||||||
BindingMode::Move => inner_ty.clone(),
|
BindingMode::Move => inner_ty.clone(),
|
||||||
};
|
};
|
||||||
|
@ -21,8 +21,7 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
|
|||||||
let function = match calling_node {
|
let function = match calling_node {
|
||||||
FnCallNode::CallExpr(expr) => {
|
FnCallNode::CallExpr(expr) => {
|
||||||
//FIXME: apply subst
|
//FIXME: apply subst
|
||||||
let (callable_def, _subst) =
|
let (callable_def, _subst) = analyzer.type_of(db, expr.expr()?)?.as_callable()?;
|
||||||
analyzer.type_of(db, expr.expr()?.into())?.as_callable()?;
|
|
||||||
match callable_def {
|
match callable_def {
|
||||||
hir::CallableDef::Function(it) => it,
|
hir::CallableDef::Function(it) => it,
|
||||||
//FIXME: handle other callables
|
//FIXME: handle other callables
|
||||||
|
@ -110,7 +110,7 @@ pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option<
|
|||||||
let mut edit = TextEditBuilder::default();
|
let mut edit = TextEditBuilder::default();
|
||||||
edit.replace(
|
edit.replace(
|
||||||
TextRange::from_to(position.offset - current_indent_len, position.offset),
|
TextRange::from_to(position.offset - current_indent_len, position.offset),
|
||||||
target_indent.into(),
|
target_indent,
|
||||||
);
|
);
|
||||||
|
|
||||||
let res = SourceChange::source_file_edit_from("reindent dot", position.file_id, edit.finish())
|
let res = SourceChange::source_file_edit_from("reindent dot", position.file_id, edit.finish())
|
||||||
|
@ -64,7 +64,7 @@ impl CargoTargetSpec {
|
|||||||
None => return Ok(None),
|
None => return Ok(None),
|
||||||
};
|
};
|
||||||
let file_id = world.analysis().crate_root(crate_id)?;
|
let file_id = world.analysis().crate_root(crate_id)?;
|
||||||
let path = world.vfs.read().file2path(ra_vfs::VfsFile(file_id.0.into()));
|
let path = world.vfs.read().file2path(ra_vfs::VfsFile(file_id.0));
|
||||||
let res = world.workspaces.iter().find_map(|ws| match ws {
|
let res = world.workspaces.iter().find_map(|ws| match ws {
|
||||||
project_model::ProjectWorkspace::Cargo { cargo, .. } => {
|
project_model::ProjectWorkspace::Cargo { cargo, .. } => {
|
||||||
let tgt = cargo.target_by_root(&path)?;
|
let tgt = cargo.target_by_root(&path)?;
|
||||||
|
@ -384,7 +384,7 @@ fn on_notification(
|
|||||||
if let Some(file_id) =
|
if let Some(file_id) =
|
||||||
state.vfs.write().add_file_overlay(&path, params.text_document.text)
|
state.vfs.write().add_file_overlay(&path, params.text_document.text)
|
||||||
{
|
{
|
||||||
subs.add_sub(FileId(file_id.0.into()));
|
subs.add_sub(FileId(file_id.0));
|
||||||
}
|
}
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
@ -406,7 +406,7 @@ fn on_notification(
|
|||||||
let uri = params.text_document.uri;
|
let uri = params.text_document.uri;
|
||||||
let path = uri.to_file_path().map_err(|()| format_err!("invalid uri: {}", uri))?;
|
let path = uri.to_file_path().map_err(|()| format_err!("invalid uri: {}", uri))?;
|
||||||
if let Some(file_id) = state.vfs.write().remove_file_overlay(path.as_path()) {
|
if let Some(file_id) = state.vfs.write().remove_file_overlay(path.as_path()) {
|
||||||
subs.remove_sub(FileId(file_id.0.into()));
|
subs.remove_sub(FileId(file_id.0));
|
||||||
}
|
}
|
||||||
let params = req::PublishDiagnosticsParams { uri, diagnostics: Vec::new() };
|
let params = req::PublishDiagnosticsParams { uri, diagnostics: Vec::new() };
|
||||||
let not = RawNotification::new::<req::PublishDiagnostics>(¶ms);
|
let not = RawNotification::new::<req::PublishDiagnostics>(¶ms);
|
||||||
|
@ -60,14 +60,14 @@ impl WorldState {
|
|||||||
for r in vfs_roots {
|
for r in vfs_roots {
|
||||||
let vfs_root_path = vfs.root2path(r);
|
let vfs_root_path = vfs.root2path(r);
|
||||||
let is_local = folder_roots.iter().any(|it| vfs_root_path.starts_with(it));
|
let is_local = folder_roots.iter().any(|it| vfs_root_path.starts_with(it));
|
||||||
change.add_root(SourceRootId(r.0.into()), is_local);
|
change.add_root(SourceRootId(r.0), is_local);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create crate graph from all the workspaces
|
// Create crate graph from all the workspaces
|
||||||
let mut crate_graph = CrateGraph::default();
|
let mut crate_graph = CrateGraph::default();
|
||||||
let mut load = |path: &std::path::Path| {
|
let mut load = |path: &std::path::Path| {
|
||||||
let vfs_file = vfs.load(path);
|
let vfs_file = vfs.load(path);
|
||||||
vfs_file.map(|f| FileId(f.0.into()))
|
vfs_file.map(|f| FileId(f.0))
|
||||||
};
|
};
|
||||||
for ws in workspaces.iter() {
|
for ws in workspaces.iter() {
|
||||||
crate_graph.extend(ws.to_crate_graph(&mut load));
|
crate_graph.extend(ws.to_crate_graph(&mut load));
|
||||||
@ -105,29 +105,24 @@ impl WorldState {
|
|||||||
if is_local {
|
if is_local {
|
||||||
self.roots_to_scan -= 1;
|
self.roots_to_scan -= 1;
|
||||||
for (file, path, text) in files {
|
for (file, path, text) in files {
|
||||||
change.add_file(
|
change.add_file(SourceRootId(root.0), FileId(file.0), path, text);
|
||||||
SourceRootId(root.0.into()),
|
|
||||||
FileId(file.0.into()),
|
|
||||||
path,
|
|
||||||
text,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let files = files
|
let files = files
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(vfsfile, path, text)| (FileId(vfsfile.0.into()), path, text))
|
.map(|(vfsfile, path, text)| (FileId(vfsfile.0), path, text))
|
||||||
.collect();
|
.collect();
|
||||||
libs.push((SourceRootId(root.0.into()), files));
|
libs.push((SourceRootId(root.0), files));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
VfsChange::AddFile { root, file, path, text } => {
|
VfsChange::AddFile { root, file, path, text } => {
|
||||||
change.add_file(SourceRootId(root.0.into()), FileId(file.0.into()), path, text);
|
change.add_file(SourceRootId(root.0), FileId(file.0), path, text);
|
||||||
}
|
}
|
||||||
VfsChange::RemoveFile { root, file, path } => {
|
VfsChange::RemoveFile { root, file, path } => {
|
||||||
change.remove_file(SourceRootId(root.0.into()), FileId(file.0.into()), path)
|
change.remove_file(SourceRootId(root.0), FileId(file.0), path)
|
||||||
}
|
}
|
||||||
VfsChange::ChangeFile { file, text } => {
|
VfsChange::ChangeFile { file, text } => {
|
||||||
change.change_file(FileId(file.0.into()), text);
|
change.change_file(FileId(file.0), text);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -178,18 +173,18 @@ impl WorldSnapshot {
|
|||||||
message: "Rust file outside current workspace is not supported yet.".to_string(),
|
message: "Rust file outside current workspace is not supported yet.".to_string(),
|
||||||
})
|
})
|
||||||
})?;
|
})?;
|
||||||
Ok(FileId(file.0.into()))
|
Ok(FileId(file.0))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn file_id_to_uri(&self, id: FileId) -> Result<Url> {
|
pub fn file_id_to_uri(&self, id: FileId) -> Result<Url> {
|
||||||
let path = self.vfs.read().file2path(VfsFile(id.0.into()));
|
let path = self.vfs.read().file2path(VfsFile(id.0));
|
||||||
let url = Url::from_file_path(&path)
|
let url = Url::from_file_path(&path)
|
||||||
.map_err(|_| format_err!("can't convert path to url: {}", path.display()))?;
|
.map_err(|_| format_err!("can't convert path to url: {}", path.display()))?;
|
||||||
Ok(url)
|
Ok(url)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn path_to_uri(&self, root: SourceRootId, path: &RelativePathBuf) -> Result<Url> {
|
pub fn path_to_uri(&self, root: SourceRootId, path: &RelativePathBuf) -> Result<Url> {
|
||||||
let base = self.vfs.read().root2path(VfsRoot(root.0.into()));
|
let base = self.vfs.read().root2path(VfsRoot(root.0));
|
||||||
let path = path.to_path(base);
|
let path = path.to_path(base);
|
||||||
let url = Url::from_file_path(&path)
|
let url = Url::from_file_path(&path)
|
||||||
.map_err(|_| format_err!("can't convert path to url: {}", path.display()))?;
|
.map_err(|_| format_err!("can't convert path to url: {}", path.display()))?;
|
||||||
@ -212,7 +207,7 @@ impl WorldSnapshot {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn workspace_root_for(&self, file_id: FileId) -> Option<&Path> {
|
pub fn workspace_root_for(&self, file_id: FileId) -> Option<&Path> {
|
||||||
let path = self.vfs.read().file2path(VfsFile(file_id.0.into()));
|
let path = self.vfs.read().file2path(VfsFile(file_id.0));
|
||||||
self.workspaces.iter().find_map(|ws| ws.workspace_root_for(&path))
|
self.workspaces.iter().find_map(|ws| ws.workspace_root_for(&path))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -206,48 +206,48 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings,
|
|||||||
"path" => {
|
"path" => {
|
||||||
let path =
|
let path =
|
||||||
input.eat_path().ok_or(ExpandError::UnexpectedToken)?.clone();
|
input.eat_path().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||||
res.inner.insert(text.clone(), Binding::Simple(path.into()));
|
res.inner.insert(text.clone(), Binding::Simple(path));
|
||||||
}
|
}
|
||||||
"expr" => {
|
"expr" => {
|
||||||
let expr =
|
let expr =
|
||||||
input.eat_expr().ok_or(ExpandError::UnexpectedToken)?.clone();
|
input.eat_expr().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||||
res.inner.insert(text.clone(), Binding::Simple(expr.into()));
|
res.inner.insert(text.clone(), Binding::Simple(expr));
|
||||||
}
|
}
|
||||||
"ty" => {
|
"ty" => {
|
||||||
let ty = input.eat_ty().ok_or(ExpandError::UnexpectedToken)?.clone();
|
let ty = input.eat_ty().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||||
res.inner.insert(text.clone(), Binding::Simple(ty.into()));
|
res.inner.insert(text.clone(), Binding::Simple(ty));
|
||||||
}
|
}
|
||||||
"pat" => {
|
"pat" => {
|
||||||
let pat = input.eat_pat().ok_or(ExpandError::UnexpectedToken)?.clone();
|
let pat = input.eat_pat().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||||
res.inner.insert(text.clone(), Binding::Simple(pat.into()));
|
res.inner.insert(text.clone(), Binding::Simple(pat));
|
||||||
}
|
}
|
||||||
"stmt" => {
|
"stmt" => {
|
||||||
let pat = input.eat_stmt().ok_or(ExpandError::UnexpectedToken)?.clone();
|
let pat = input.eat_stmt().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||||
res.inner.insert(text.clone(), Binding::Simple(pat.into()));
|
res.inner.insert(text.clone(), Binding::Simple(pat));
|
||||||
}
|
}
|
||||||
"block" => {
|
"block" => {
|
||||||
let block =
|
let block =
|
||||||
input.eat_block().ok_or(ExpandError::UnexpectedToken)?.clone();
|
input.eat_block().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||||
res.inner.insert(text.clone(), Binding::Simple(block.into()));
|
res.inner.insert(text.clone(), Binding::Simple(block));
|
||||||
}
|
}
|
||||||
"meta" => {
|
"meta" => {
|
||||||
let meta =
|
let meta =
|
||||||
input.eat_meta().ok_or(ExpandError::UnexpectedToken)?.clone();
|
input.eat_meta().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||||
res.inner.insert(text.clone(), Binding::Simple(meta.into()));
|
res.inner.insert(text.clone(), Binding::Simple(meta));
|
||||||
}
|
}
|
||||||
"tt" => {
|
"tt" => {
|
||||||
let token = input.eat().ok_or(ExpandError::UnexpectedToken)?.clone();
|
let token = input.eat().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||||
res.inner.insert(text.clone(), Binding::Simple(token.into()));
|
res.inner.insert(text.clone(), Binding::Simple(token));
|
||||||
}
|
}
|
||||||
"item" => {
|
"item" => {
|
||||||
let item =
|
let item =
|
||||||
input.eat_item().ok_or(ExpandError::UnexpectedToken)?.clone();
|
input.eat_item().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||||
res.inner.insert(text.clone(), Binding::Simple(item.into()));
|
res.inner.insert(text.clone(), Binding::Simple(item));
|
||||||
}
|
}
|
||||||
"lifetime" => {
|
"lifetime" => {
|
||||||
let lifetime =
|
let lifetime =
|
||||||
input.eat_lifetime().ok_or(ExpandError::UnexpectedToken)?.clone();
|
input.eat_lifetime().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||||
res.inner.insert(text.clone(), Binding::Simple(lifetime.into()));
|
res.inner.insert(text.clone(), Binding::Simple(lifetime));
|
||||||
}
|
}
|
||||||
"literal" => {
|
"literal" => {
|
||||||
let literal =
|
let literal =
|
||||||
@ -262,7 +262,7 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings,
|
|||||||
// `vis` is optional
|
// `vis` is optional
|
||||||
if let Some(vis) = input.try_eat_vis() {
|
if let Some(vis) = input.try_eat_vis() {
|
||||||
let vis = vis.clone();
|
let vis = vis.clone();
|
||||||
res.inner.insert(text.clone(), Binding::Simple(vis.into()));
|
res.inner.insert(text.clone(), Binding::Simple(vis));
|
||||||
} else {
|
} else {
|
||||||
res.push_optional(&text);
|
res.push_optional(&text);
|
||||||
}
|
}
|
||||||
@ -452,7 +452,7 @@ fn expand_tt(
|
|||||||
|
|
||||||
let idx = ctx.nesting.pop().unwrap();
|
let idx = ctx.nesting.pop().unwrap();
|
||||||
ctx.nesting.push(idx + 1);
|
ctx.nesting.push(idx + 1);
|
||||||
token_trees.push(reduce_single_token(t).into());
|
token_trees.push(reduce_single_token(t));
|
||||||
|
|
||||||
if let Some(ref sep) = repeat.separator {
|
if let Some(ref sep) = repeat.separator {
|
||||||
match sep {
|
match sep {
|
||||||
|
@ -155,9 +155,10 @@ fn convert_doc_comment<'a>(token: &ra_syntax::SyntaxToken<'a>) -> Option<Vec<tt:
|
|||||||
if let ast::CommentPlacement::Inner = doc {
|
if let ast::CommentPlacement::Inner = doc {
|
||||||
token_trees.push(mk_punct('!'));
|
token_trees.push(mk_punct('!'));
|
||||||
}
|
}
|
||||||
token_trees.push(tt::TokenTree::from(tt::Subtree::from(
|
token_trees.push(tt::TokenTree::from(tt::Subtree {
|
||||||
tt::Subtree { delimiter: tt::Delimiter::Bracket, token_trees: meta_tkns }.into(),
|
delimiter: tt::Delimiter::Bracket,
|
||||||
)));
|
token_trees: meta_tkns,
|
||||||
|
}));
|
||||||
|
|
||||||
return Some(token_trees);
|
return Some(token_trees);
|
||||||
|
|
||||||
|
@ -137,7 +137,7 @@ impl CargoWorkspace {
|
|||||||
for meta_pkg in meta.packages {
|
for meta_pkg in meta.packages {
|
||||||
let is_member = ws_members.contains(&meta_pkg.id);
|
let is_member = ws_members.contains(&meta_pkg.id);
|
||||||
let pkg = packages.alloc(PackageData {
|
let pkg = packages.alloc(PackageData {
|
||||||
name: meta_pkg.name.into(),
|
name: meta_pkg.name,
|
||||||
manifest: meta_pkg.manifest_path.clone(),
|
manifest: meta_pkg.manifest_path.clone(),
|
||||||
targets: Vec::new(),
|
targets: Vec::new(),
|
||||||
is_member,
|
is_member,
|
||||||
@ -149,7 +149,7 @@ impl CargoWorkspace {
|
|||||||
for meta_tgt in meta_pkg.targets {
|
for meta_tgt in meta_pkg.targets {
|
||||||
let tgt = targets.alloc(TargetData {
|
let tgt = targets.alloc(TargetData {
|
||||||
pkg,
|
pkg,
|
||||||
name: meta_tgt.name.into(),
|
name: meta_tgt.name,
|
||||||
root: meta_tgt.src_path.clone(),
|
root: meta_tgt.src_path.clone(),
|
||||||
kind: TargetKind::new(meta_tgt.kind.as_slice()),
|
kind: TargetKind::new(meta_tgt.kind.as_slice()),
|
||||||
});
|
});
|
||||||
@ -160,8 +160,7 @@ impl CargoWorkspace {
|
|||||||
for node in resolve.nodes {
|
for node in resolve.nodes {
|
||||||
let source = pkg_by_id[&node.id];
|
let source = pkg_by_id[&node.id];
|
||||||
for dep_node in node.deps {
|
for dep_node in node.deps {
|
||||||
let dep =
|
let dep = PackageDependency { name: dep_node.name, pkg: pkg_by_id[&dep_node.pkg] };
|
||||||
PackageDependency { name: dep_node.name.into(), pkg: pkg_by_id[&dep_node.pkg] };
|
|
||||||
packages[source].dependencies.push(dep);
|
packages[source].dependencies.push(dep);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user