From 24e98121d81b75bafcd9c6005548776c00de8401 Mon Sep 17 00:00:00 2001 From: Florian Diebold Date: Sat, 7 Mar 2020 15:27:03 +0100 Subject: Try to complete within macros --- Cargo.lock | 1 + crates/ra_hir/Cargo.toml | 2 + crates/ra_hir/src/semantics.rs | 55 +++++++++++- crates/ra_hir_expand/src/db.rs | 53 ++++++++++-- crates/ra_hir_expand/src/lib.rs | 9 +- crates/ra_ide/src/completion/complete_dot.rs | 98 ++++++++++++++++++++++ crates/ra_ide/src/completion/complete_path.rs | 2 +- crates/ra_ide/src/completion/complete_scope.rs | 70 +++++++++++++++- crates/ra_ide/src/completion/completion_context.rs | 87 +++++++++++++------ 9 files changed, 339 insertions(+), 38 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 80e778bcf..2e052d267 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -960,6 +960,7 @@ name = "ra_hir" version = "0.1.0" dependencies = [ "either", + "itertools", "log", "ra_db", "ra_hir_def", diff --git a/crates/ra_hir/Cargo.toml b/crates/ra_hir/Cargo.toml index 0555a0de7..266c4cff3 100644 --- a/crates/ra_hir/Cargo.toml +++ b/crates/ra_hir/Cargo.toml @@ -12,6 +12,8 @@ log = "0.4.8" rustc-hash = "1.1.0" either = "1.5.3" +itertools = "0.8.2" + ra_syntax = { path = "../ra_syntax" } ra_db = { path = "../ra_db" } ra_prof = { path = "../ra_prof" } diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs index 965d185a4..56bd763c7 100644 --- a/crates/ra_hir/src/semantics.rs +++ b/crates/ra_hir/src/semantics.rs @@ -6,13 +6,14 @@ use std::{cell::RefCell, fmt, iter::successors}; use hir_def::{ resolver::{self, HasResolver, Resolver}, - TraitId, + AsMacroCall, TraitId, }; use hir_expand::ExpansionInfo; use ra_db::{FileId, FileRange}; use ra_prof::profile; use ra_syntax::{ - algo::skip_trivia_token, ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextUnit, + algo::{self, skip_trivia_token}, + ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextUnit, }; use rustc_hash::{FxHashMap, FxHashSet}; @@ -70,6 +71,37 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { Some(node) } + pub fn expand_hypothetical( + &self, + actual_macro_call: &ast::MacroCall, + hypothetical_call: &ast::MacroCall, + token_to_map: SyntaxToken, + ) -> Option<(SyntaxNode, SyntaxToken)> { + let macro_call = + self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call); + let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); + let macro_call_id = macro_call + .as_call_id(self.db, |path| sa.resolver.resolve_path_as_macro(self.db, &path))?; + let macro_file = macro_call_id.as_file().macro_file().unwrap(); + let (tt, tmap_1) = + hir_expand::syntax_node_to_token_tree(hypothetical_call.token_tree().unwrap().syntax()) + .unwrap(); + let range = token_to_map + .text_range() + .checked_sub(hypothetical_call.token_tree().unwrap().syntax().text_range().start())?; + let token_id = tmap_1.token_by_range(range)?; + let macro_def = hir_expand::db::expander(self.db, macro_call_id)?; + let (node, tmap_2) = hir_expand::db::parse_macro_with_arg( + self.db, + macro_file, + Some(std::sync::Arc::new((tt, tmap_1))), + )?; + let token_id = macro_def.0.map_id_down(token_id); + let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; + let token = algo::find_covering_element(&node.syntax_node(), range).into_token()?; + Some((node.syntax_node(), token)) + } + pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { let parent = token.parent(); let parent = self.find_file(parent); @@ -104,6 +136,25 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { node.ancestors_with_macros(self.db).map(|it| it.value) } + pub fn ancestors_at_offset_with_macros( + &self, + node: &SyntaxNode, + offset: TextUnit, + ) -> impl Iterator + '_ { + use itertools::Itertools; + node.token_at_offset(offset) + .map(|token| self.ancestors_with_macros(token.parent())) + .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) + } + + pub fn find_node_at_offset_with_macros( + &self, + node: &SyntaxNode, + offset: TextUnit, + ) -> Option { + self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) + } + pub fn type_of_expr(&self, expr: &ast::Expr) -> Option { self.analyze(expr.syntax()).type_of(self.db, &expr) } diff --git a/crates/ra_hir_expand/src/db.rs b/crates/ra_hir_expand/src/db.rs index f3a84cacc..a7aa60fc9 100644 --- a/crates/ra_hir_expand/src/db.rs +++ b/crates/ra_hir_expand/src/db.rs @@ -129,16 +129,43 @@ pub(crate) fn macro_arg( pub(crate) fn macro_expand( db: &dyn AstDatabase, id: MacroCallId, +) -> Result, String> { + macro_expand_with_arg(db, id, None) +} + +// TODO hack +pub fn expander( + db: &dyn AstDatabase, + id: MacroCallId, +) -> Option> { + let lazy_id = match id { + MacroCallId::LazyMacro(id) => id, + MacroCallId::EagerMacro(_id) => { + // TODO + unimplemented!() + } + }; + + let loc = db.lookup_intern_macro(lazy_id); + let macro_rules = db.macro_def(loc.def)?; + Some(macro_rules) +} + +pub(crate) fn macro_expand_with_arg( + db: &dyn AstDatabase, + id: MacroCallId, + arg: Option>, ) -> Result, String> { let lazy_id = match id { MacroCallId::LazyMacro(id) => id, MacroCallId::EagerMacro(id) => { + // TODO return Ok(db.lookup_intern_eager_expansion(id).subtree); } }; let loc = db.lookup_intern_macro(lazy_id); - let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?; + let macro_arg = arg.or_else(|| db.macro_arg(id)).ok_or("Fail to args in to tt::TokenTree")?; let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?; let tt = macro_rules.0.expand(db, lazy_id, ¯o_arg.0).map_err(|err| format!("{:?}", err))?; @@ -162,12 +189,24 @@ pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Optio pub(crate) fn parse_macro( db: &dyn AstDatabase, macro_file: MacroFile, +) -> Option<(Parse, Arc)> { + parse_macro_with_arg(db, macro_file, None) +} + +pub fn parse_macro_with_arg( + db: &dyn AstDatabase, + macro_file: MacroFile, + arg: Option>, ) -> Option<(Parse, Arc)> { let _p = profile("parse_macro_query"); let macro_call_id = macro_file.macro_call_id; - let tt = db - .macro_expand(macro_call_id) + let expansion = if let Some(arg) = arg { + macro_expand_with_arg(db, macro_call_id, Some(arg)) + } else { + db.macro_expand(macro_call_id) + }; + let tt = expansion .map_err(|err| { // Note: // The final goal we would like to make all parse_macro success, @@ -185,15 +224,13 @@ pub(crate) fn parse_macro( .collect::>() .join("\n"); - log::warn!( + eprintln!( "fail on macro_parse: (reason: {} macro_call: {:#}) parents: {}", - err, - node.value, - parents + err, node.value, parents ); } _ => { - log::warn!("fail on macro_parse: (reason: {})", err); + eprintln!("fail on macro_parse: (reason: {})", err); } } }) diff --git a/crates/ra_hir_expand/src/lib.rs b/crates/ra_hir_expand/src/lib.rs index 3fce73e8a..92f3902dd 100644 --- a/crates/ra_hir_expand/src/lib.rs +++ b/crates/ra_hir_expand/src/lib.rs @@ -157,6 +157,13 @@ impl HirFileId { } } } + + pub fn macro_file(self) -> Option { + match self.0 { + HirFileIdRepr::FileId(_) => None, + HirFileIdRepr::MacroFile(m) => Some(m), + } + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -296,7 +303,7 @@ pub struct ExpansionInfo { exp_map: Arc, } -pub use mbe::Origin; +pub use mbe::{syntax_node_to_token_tree, Origin}; use ra_parser::FragmentKind; impl ExpansionInfo { diff --git a/crates/ra_ide/src/completion/complete_dot.rs b/crates/ra_ide/src/completion/complete_dot.rs index 9145aa183..da2c4c1ab 100644 --- a/crates/ra_ide/src/completion/complete_dot.rs +++ b/crates/ra_ide/src/completion/complete_dot.rs @@ -584,4 +584,102 @@ mod tests { "### ); } + + #[test] + fn works_in_simple_macro_1() { + assert_debug_snapshot!( + do_ref_completion( + r" + macro_rules! m { ($e:expr) => { $e } } + struct A { the_field: u32 } + fn foo(a: A) { + m!(a.x<|>) + } + ", + ), + @r###" + [ + CompletionItem { + label: "the_field", + source_range: [156; 157), + delete: [156; 157), + insert: "the_field", + kind: Field, + detail: "u32", + }, + ] + "### + ); + } + + #[test] + fn works_in_simple_macro_recursive() { + assert_debug_snapshot!( + do_ref_completion( + r" + macro_rules! m { ($e:expr) => { $e } } + struct A { the_field: u32 } + fn foo(a: A) { + m!(a.x<|>) + } + ", + ), + @r###" + [ + CompletionItem { + label: "the_field", + source_range: [156; 157), + delete: [156; 157), + insert: "the_field", + kind: Field, + detail: "u32", + }, + ] + "### + ); + } + + #[test] + fn works_in_simple_macro_2() { + // this doesn't work yet because the macro doesn't expand without the token -- maybe it can be fixed with better recovery + assert_debug_snapshot!( + do_ref_completion( + r" + macro_rules! m { ($e:expr) => { $e } } + struct A { the_field: u32 } + fn foo(a: A) { + m!(a.<|>) + } + ", + ), + @r###"[]"### + ); + } + + #[test] + fn works_in_simple_macro_recursive_1() { + assert_debug_snapshot!( + do_ref_completion( + r" + macro_rules! m { ($e:expr) => { $e } } + struct A { the_field: u32 } + fn foo(a: A) { + m!(m!(m!(a.x<|>))) + } + ", + ), + @r###" + [ + CompletionItem { + label: "the_field", + source_range: [162; 163), + delete: [162; 163), + insert: "the_field", + kind: Field, + detail: "u32", + }, + ] + "### + ); + } } diff --git a/crates/ra_ide/src/completion/complete_path.rs b/crates/ra_ide/src/completion/complete_path.rs index 1a9699466..4fa47951a 100644 --- a/crates/ra_ide/src/completion/complete_path.rs +++ b/crates/ra_ide/src/completion/complete_path.rs @@ -1,4 +1,4 @@ -//! Completion of paths, including when writing a single name. +//! Completion of paths, i.e. `some::prefix::<|>`. use hir::{Adt, PathResolution, ScopeDef}; use ra_syntax::AstNode; diff --git a/crates/ra_ide/src/completion/complete_scope.rs b/crates/ra_ide/src/completion/complete_scope.rs index 2b9a0e556..eb3c8cf1b 100644 --- a/crates/ra_ide/src/completion/complete_scope.rs +++ b/crates/ra_ide/src/completion/complete_scope.rs @@ -1,4 +1,4 @@ -//! FIXME: write short doc here +//! Completion of names from the current scope, e.g. locals and imported items. use crate::completion::{CompletionContext, Completions}; @@ -797,4 +797,72 @@ mod tests { "### ) } + + #[test] + fn completes_in_simple_macro_1() { + assert_debug_snapshot!( + do_reference_completion( + r" + macro_rules! m { ($e:expr) => { $e } } + fn quux(x: i32) { + let y = 92; + m!(<|>); + } + " + ), + @"[]" + ); + } + + #[test] + fn completes_in_simple_macro_2() { + assert_debug_snapshot!( + do_reference_completion( + r" + macro_rules! m { ($e:expr) => { $e } } + fn quux(x: i32) { + let y = 92; + m!(x<|>); + } + " + ), + @r###" + [ + CompletionItem { + label: "m!", + source_range: [145; 146), + delete: [145; 146), + insert: "m!($0)", + kind: Macro, + detail: "macro_rules! m", + }, + CompletionItem { + label: "quux(…)", + source_range: [145; 146), + delete: [145; 146), + insert: "quux(${1:x})$0", + kind: Function, + lookup: "quux", + detail: "fn quux(x: i32)", + }, + CompletionItem { + label: "x", + source_range: [145; 146), + delete: [145; 146), + insert: "x", + kind: Binding, + detail: "i32", + }, + CompletionItem { + label: "y", + source_range: [145; 146), + delete: [145; 146), + insert: "y", + kind: Binding, + detail: "i32", + }, + ] + "### + ); + } } diff --git a/crates/ra_ide/src/completion/completion_context.rs b/crates/ra_ide/src/completion/completion_context.rs index 9aa5a705d..81a033fcb 100644 --- a/crates/ra_ide/src/completion/completion_context.rs +++ b/crates/ra_ide/src/completion/completion_context.rs @@ -5,7 +5,7 @@ use ra_db::SourceDatabase; use ra_ide_db::RootDatabase; use ra_syntax::{ algo::{find_covering_element, find_node_at_offset}, - ast, AstNode, SourceFile, + ast, AstNode, SyntaxKind::*, SyntaxNode, SyntaxToken, TextRange, TextUnit, }; @@ -20,6 +20,9 @@ pub(crate) struct CompletionContext<'a> { pub(super) sema: Semantics<'a, RootDatabase>, pub(super) db: &'a RootDatabase, pub(super) offset: TextUnit, + /// The token before the cursor, in the original file. + pub(super) original_token: SyntaxToken, + /// The token before the cursor, in the macro-expanded file. pub(super) token: SyntaxToken, pub(super) module: Option, pub(super) name_ref_syntax: Option, @@ -67,12 +70,18 @@ impl<'a> CompletionContext<'a> { let edit = AtomTextEdit::insert(position.offset, "intellijRulezz".to_string()); parse.reparse(&edit).tree() }; + let fake_ident_token = + file_with_fake_ident.syntax().token_at_offset(position.offset).right_biased().unwrap(); + // TODO: shouldn't this take the position into account? (in case we're inside a mod {}) let module = sema.to_module_def(position.file_id); - let token = original_file.syntax().token_at_offset(position.offset).left_biased()?; + let original_token = + original_file.syntax().token_at_offset(position.offset).left_biased()?; + let token = sema.descend_into_macros(original_token.clone()); let mut ctx = CompletionContext { sema, db, + original_token, token, offset: position.offset, module, @@ -95,15 +104,45 @@ impl<'a> CompletionContext<'a> { has_type_args: false, dot_receiver_is_ambiguous_float_literal: false, }; - ctx.fill(&original_file, file_with_fake_ident, position.offset); + + let mut original_file = original_file.syntax().clone(); + let mut hypothetical_file = file_with_fake_ident.syntax().clone(); + let mut offset = position.offset; + let mut fake_ident_token = fake_ident_token; + + // Are we inside a macro call? + while let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = ( + find_node_at_offset::(&original_file, offset), + find_node_at_offset::(&hypothetical_file, offset), + ) { + if let (Some(actual_expansion), Some(hypothetical_expansion)) = ( + ctx.sema.expand(&actual_macro_call), + ctx.sema.expand_hypothetical( + &actual_macro_call, + ¯o_call_with_fake_ident, + fake_ident_token, + ), + ) { + // TODO check that the expansions 'look the same' up to the inserted token? + original_file = actual_expansion; + hypothetical_file = hypothetical_expansion.0; + fake_ident_token = hypothetical_expansion.1; + offset = fake_ident_token.text_range().start(); + } else { + break; + } + } + + ctx.fill(&original_file, hypothetical_file, offset); Some(ctx) } // The range of the identifier that is being completed. pub(crate) fn source_range(&self) -> TextRange { + // check kind of macro-expanded token, but use range of original token match self.token.kind() { // workaroud when completion is triggered by trigger characters. - IDENT => self.token.text_range(), + IDENT => self.original_token.text_range(), _ => TextRange::offset_len(self.offset, 0.into()), } } @@ -114,14 +153,12 @@ impl<'a> CompletionContext<'a> { fn fill( &mut self, - original_file: &ast::SourceFile, - file_with_fake_ident: ast::SourceFile, + original_file: &SyntaxNode, + file_with_fake_ident: SyntaxNode, offset: TextUnit, ) { // First, let's try to complete a reference to some declaration. - if let Some(name_ref) = - find_node_at_offset::(file_with_fake_ident.syntax(), offset) - { + if let Some(name_ref) = find_node_at_offset::(&file_with_fake_ident, offset) { // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. // See RFC#1685. if is_node::(name_ref.syntax()) { @@ -133,8 +170,7 @@ impl<'a> CompletionContext<'a> { // Otherwise, see if this is a declaration. We can use heuristics to // suggest declaration names, see `CompletionKind::Magic`. - if let Some(name) = find_node_at_offset::(file_with_fake_ident.syntax(), offset) - { + if let Some(name) = find_node_at_offset::(&file_with_fake_ident, offset) { if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) { let parent = bind_pat.syntax().parent(); if parent.clone().and_then(ast::MatchArm::cast).is_some() @@ -148,23 +184,24 @@ impl<'a> CompletionContext<'a> { return; } if name.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() { - self.record_lit_pat = find_node_at_offset(original_file.syntax(), self.offset); + self.record_lit_pat = + self.sema.find_node_at_offset_with_macros(&original_file, self.offset); } } } - fn classify_name_ref(&mut self, original_file: &SourceFile, name_ref: ast::NameRef) { + fn classify_name_ref(&mut self, original_file: &SyntaxNode, name_ref: ast::NameRef) { self.name_ref_syntax = - find_node_at_offset(original_file.syntax(), name_ref.syntax().text_range().start()); + find_node_at_offset(&original_file, name_ref.syntax().text_range().start()); let name_range = name_ref.syntax().text_range(); if name_ref.syntax().parent().and_then(ast::RecordField::cast).is_some() { - self.record_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset); + self.record_lit_syntax = + self.sema.find_node_at_offset_with_macros(&original_file, self.offset); } self.impl_def = self - .token - .parent() - .ancestors() + .sema + .ancestors_with_macros(self.token.parent()) .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) .find_map(ast::ImplDef::cast); @@ -183,12 +220,12 @@ impl<'a> CompletionContext<'a> { _ => (), } - self.use_item_syntax = self.token.parent().ancestors().find_map(ast::UseItem::cast); + self.use_item_syntax = + self.sema.ancestors_with_macros(self.token.parent()).find_map(ast::UseItem::cast); self.function_syntax = self - .token - .parent() - .ancestors() + .sema + .ancestors_with_macros(self.token.parent()) .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) .find_map(ast::FnDef::cast); @@ -242,7 +279,7 @@ impl<'a> CompletionContext<'a> { if let Some(off) = name_ref.syntax().text_range().start().checked_sub(2.into()) { if let Some(if_expr) = - find_node_at_offset::(original_file.syntax(), off) + self.sema.find_node_at_offset_with_macros::(original_file, off) { if if_expr.syntax().text_range().end() < name_ref.syntax().text_range().start() @@ -259,7 +296,7 @@ impl<'a> CompletionContext<'a> { self.dot_receiver = field_expr .expr() .map(|e| e.syntax().text_range()) - .and_then(|r| find_node_with_range(original_file.syntax(), r)); + .and_then(|r| find_node_with_range(original_file, r)); self.dot_receiver_is_ambiguous_float_literal = if let Some(ast::Expr::Literal(l)) = &self.dot_receiver { match l.kind() { @@ -275,7 +312,7 @@ impl<'a> CompletionContext<'a> { self.dot_receiver = method_call_expr .expr() .map(|e| e.syntax().text_range()) - .and_then(|r| find_node_with_range(original_file.syntax(), r)); + .and_then(|r| find_node_with_range(original_file, r)); self.is_call = true; } } -- cgit v1.2.3