aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/ci.yaml19
-rw-r--r--Cargo.lock53
-rw-r--r--crates/ra_assists/Cargo.toml1
-rw-r--r--crates/ra_assists/src/assist_ctx.rs40
-rw-r--r--crates/ra_assists/src/ast_transform.rs67
-rw-r--r--crates/ra_assists/src/handlers/add_explicit_type.rs5
-rw-r--r--crates/ra_assists/src/handlers/add_missing_impl_members.rs29
-rw-r--r--crates/ra_assists/src/handlers/add_new.rs11
-rw-r--r--crates/ra_assists/src/handlers/auto_import.rs46
-rw-r--r--crates/ra_assists/src/handlers/change_visibility.rs13
-rw-r--r--crates/ra_assists/src/handlers/early_return.rs23
-rw-r--r--crates/ra_assists/src/handlers/fill_match_arms.rs23
-rw-r--r--crates/ra_assists/src/handlers/inline_local_variable.rs3
-rw-r--r--crates/ra_assists/src/handlers/introduce_variable.rs60
-rw-r--r--crates/ra_assists/src/handlers/move_bounds.rs6
-rw-r--r--crates/ra_assists/src/handlers/raw_string.rs3
-rw-r--r--crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs555
-rw-r--r--crates/ra_assists/src/lib.rs124
-rw-r--r--crates/ra_assists/src/utils.rs27
-rw-r--r--crates/ra_assists/src/utils/insert_use.rs510
-rw-r--r--crates/ra_cargo_watch/Cargo.toml5
-rw-r--r--crates/ra_hir/src/from_id.rs1
-rw-r--r--crates/ra_hir/src/lib.rs9
-rw-r--r--crates/ra_hir/src/semantics.rs405
-rw-r--r--crates/ra_hir/src/source_analyzer.rs247
-rw-r--r--crates/ra_hir/src/source_binder.rs217
-rw-r--r--crates/ra_hir_def/Cargo.toml1
-rw-r--r--crates/ra_hir_ty/src/infer.rs41
-rw-r--r--crates/ra_hir_ty/src/infer/expr.rs8
-rw-r--r--crates/ra_hir_ty/src/tests.rs4
-rw-r--r--crates/ra_hir_ty/src/tests/coercion.rs31
-rw-r--r--crates/ra_ide/Cargo.toml4
-rw-r--r--crates/ra_ide/src/call_hierarchy.rs42
-rw-r--r--crates/ra_ide/src/call_info.rs61
-rw-r--r--crates/ra_ide/src/completion.rs4
-rw-r--r--crates/ra_ide/src/completion/complete_dot.rs4
-rw-r--r--crates/ra_ide/src/completion/complete_macro_in_item_position.rs2
-rw-r--r--crates/ra_ide/src/completion/complete_path.rs4
-rw-r--r--crates/ra_ide/src/completion/complete_pattern.rs2
-rw-r--r--crates/ra_ide/src/completion/complete_postfix.rs2
-rw-r--r--crates/ra_ide/src/completion/complete_record_literal.rs5
-rw-r--r--crates/ra_ide/src/completion/complete_record_pattern.rs5
-rw-r--r--crates/ra_ide/src/completion/complete_scope.rs4
-rw-r--r--crates/ra_ide/src/completion/complete_trait_impl.rs33
-rw-r--r--crates/ra_ide/src/completion/completion_context.rs65
-rw-r--r--crates/ra_ide/src/diagnostics.rs9
-rw-r--r--crates/ra_ide/src/display/navigation_target.rs8
-rw-r--r--crates/ra_ide/src/expand.rs102
-rw-r--r--crates/ra_ide/src/expand_macro.rs31
-rw-r--r--crates/ra_ide/src/extend_selection.rs81
-rw-r--r--crates/ra_ide/src/goto_definition.rs42
-rw-r--r--crates/ra_ide/src/goto_type_definition.rs36
-rw-r--r--crates/ra_ide/src/hover.rs198
-rw-r--r--crates/ra_ide/src/impls.rs49
-rw-r--r--crates/ra_ide/src/inlay_hints.rs48
-rw-r--r--crates/ra_ide/src/lib.rs23
-rw-r--r--crates/ra_ide/src/marks.rs1
-rw-r--r--crates/ra_ide/src/mock_analysis.rs16
-rw-r--r--crates/ra_ide/src/parent_module.rs17
-rw-r--r--crates/ra_ide/src/references.rs130
-rw-r--r--crates/ra_ide/src/references/classify.rs30
-rw-r--r--crates/ra_ide/src/references/rename.rs28
-rw-r--r--crates/ra_ide/src/runnables.rs50
-rw-r--r--crates/ra_ide/src/snapshots/highlighting.html74
-rw-r--r--crates/ra_ide/src/snapshots/rainbow_highlighting.html36
-rw-r--r--crates/ra_ide/src/ssr.rs141
-rw-r--r--crates/ra_ide/src/syntax_highlighting.rs632
-rw-r--r--crates/ra_ide/src/syntax_highlighting/html.rs106
-rw-r--r--crates/ra_ide/src/syntax_highlighting/tags.rs175
-rw-r--r--crates/ra_ide/src/syntax_highlighting/tests.rs133
-rw-r--r--crates/ra_ide_db/Cargo.toml13
-rw-r--r--crates/ra_ide_db/src/defs.rs85
-rw-r--r--crates/ra_ide_db/src/imports_locator.rs27
-rw-r--r--crates/ra_ide_db/src/line_index.rs44
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs2
-rw-r--r--crates/ra_parser/src/grammar/expressions.rs3
-rw-r--r--crates/ra_prof/Cargo.toml1
-rw-r--r--crates/ra_syntax/src/algo.rs52
-rw-r--r--crates/ra_syntax/src/ast/edit.rs11
-rw-r--r--crates/ra_syntax/src/ast/make.rs11
-rw-r--r--crates/ra_syntax/src/ast/tokens.rs110
-rw-r--r--crates/ra_syntax/test_data/parser/inline/ok/0130_let_stmt.rs1
-rw-r--r--crates/ra_syntax/test_data/parser/inline/ok/0130_let_stmt.txt41
-rw-r--r--crates/ra_text_edit/Cargo.toml4
-rw-r--r--crates/rust-analyzer/Cargo.toml3
-rw-r--r--crates/rust-analyzer/src/caps.rs23
-rw-r--r--crates/rust-analyzer/src/cli/analysis_stats.rs24
-rw-r--r--crates/rust-analyzer/src/conv.rs116
-rw-r--r--crates/rust-analyzer/src/main_loop.rs3
-rw-r--r--crates/rust-analyzer/src/main_loop/handlers.rs41
-rw-r--r--crates/rust-analyzer/src/req.rs9
-rw-r--r--crates/rust-analyzer/src/semantic_tokens.rs46
-rw-r--r--crates/test_utils/Cargo.toml2
-rw-r--r--crates/test_utils/src/lib.rs1
-rw-r--r--editors/code/package-lock.json11
-rw-r--r--editors/code/package.json144
-rw-r--r--editors/code/src/client.ts16
-rw-r--r--editors/code/src/config.ts2
-rw-r--r--editors/code/src/inlay_hints.ts2
-rw-r--r--editors/code/src/installation/download_artifact.ts30
-rw-r--r--editors/code/src/installation/download_file.ts3
-rw-r--r--editors/code/src/installation/server.ts3
-rw-r--r--editors/code/src/main.ts4
-rw-r--r--editors/code/src/util.ts17
-rw-r--r--xtask/src/ast_src.rs2
-rw-r--r--xtask/src/codegen/gen_assists_docs.rs2
-rw-r--r--xtask/src/install.rs6
-rw-r--r--xtask/src/lib.rs4
-rw-r--r--xtask/src/not_bash.rs2
109 files changed, 3408 insertions, 2533 deletions
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 8ab47106d..3f41d32f7 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -8,6 +8,16 @@ on:
8 - trying 8 - trying
9 9
10jobs: 10jobs:
11 rust-audit:
12 name: Audit Rust vulnerabilities
13 runs-on: ubuntu-latest
14 steps:
15 - name: Checkout repository
16 uses: actions/checkout@v1
17
18 - run: cargo install cargo-audit
19 - run: cargo audit
20
11 rust: 21 rust:
12 name: Rust 22 name: Rust
13 runs-on: ${{ matrix.os }} 23 runs-on: ${{ matrix.os }}
@@ -79,7 +89,7 @@ jobs:
79 if: matrix.os == 'windows-latest' 89 if: matrix.os == 'windows-latest'
80 run: Remove-Item ./target/debug/xtask.exe 90 run: Remove-Item ./target/debug/xtask.exe
81 91
82 type-script: 92 typescript:
83 name: TypeScript 93 name: TypeScript
84 runs-on: ubuntu-latest 94 runs-on: ubuntu-latest
85 env: 95 env:
@@ -96,7 +106,12 @@ jobs:
96 106
97 - run: npm ci 107 - run: npm ci
98 working-directory: ./editors/code 108 working-directory: ./editors/code
99 - run: npm run fmt 109
110 - run: npm audit
111 working-directory: ./editors/code
112
113 - run: npm run lint
100 working-directory: ./editors/code 114 working-directory: ./editors/code
115
101 - run: npm run package --scripts-prepend-node-path 116 - run: npm run package --scripts-prepend-node-path
102 working-directory: ./editors/code 117 working-directory: ./editors/code
diff --git a/Cargo.lock b/Cargo.lock
index e5400f5eb..49fddef4b 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -662,9 +662,9 @@ dependencies = [
662 662
663[[package]] 663[[package]]
664name = "lsp-types" 664name = "lsp-types"
665version = "0.70.2" 665version = "0.71.0"
666source = "registry+https://github.com/rust-lang/crates.io-index" 666source = "registry+https://github.com/rust-lang/crates.io-index"
667checksum = "6743fb3902ab3dfa6ce030daeac6ff492e20bb0fee840739d16f6bfb0efaf91c" 667checksum = "efa6b75633b0c3412ee36fc416e6d9c1e4ff576b536217f4ac3f34ac83d9e564"
668dependencies = [ 668dependencies = [
669 "base64", 669 "base64",
670 "bitflags", 670 "bitflags",
@@ -882,9 +882,9 @@ dependencies = [
882 882
883[[package]] 883[[package]]
884name = "proc-macro2" 884name = "proc-macro2"
885version = "1.0.8" 885version = "1.0.9"
886source = "registry+https://github.com/rust-lang/crates.io-index" 886source = "registry+https://github.com/rust-lang/crates.io-index"
887checksum = "3acb317c6ff86a4e579dfa00fc5e6cca91ecbb4e7eb2df0468805b674eb88548" 887checksum = "6c09721c6781493a2a492a96b5a5bf19b65917fe6728884e7c44dd0c60ca3435"
888dependencies = [ 888dependencies = [
889 "unicode-xid", 889 "unicode-xid",
890] 890]
@@ -906,7 +906,6 @@ version = "0.1.0"
906name = "ra_assists" 906name = "ra_assists"
907version = "0.1.0" 907version = "0.1.0"
908dependencies = [ 908dependencies = [
909 "either",
910 "format-buf", 909 "format-buf",
911 "join_to_string", 910 "join_to_string",
912 "ra_db", 911 "ra_db",
@@ -927,10 +926,8 @@ dependencies = [
927 "cargo_metadata", 926 "cargo_metadata",
928 "crossbeam-channel", 927 "crossbeam-channel",
929 "insta", 928 "insta",
930 "jod-thread",
931 "log", 929 "log",
932 "lsp-types", 930 "lsp-types",
933 "parking_lot",
934 "serde_json", 931 "serde_json",
935] 932]
936 933
@@ -988,7 +985,6 @@ dependencies = [
988 "drop_bomb", 985 "drop_bomb",
989 "either", 986 "either",
990 "insta", 987 "insta",
991 "itertools",
992 "log", 988 "log",
993 "once_cell", 989 "once_cell",
994 "ra_arena", 990 "ra_arena",
@@ -1046,7 +1042,6 @@ version = "0.1.0"
1046dependencies = [ 1042dependencies = [
1047 "either", 1043 "either",
1048 "format-buf", 1044 "format-buf",
1049 "fst",
1050 "indexmap", 1045 "indexmap",
1051 "insta", 1046 "insta",
1052 "itertools", 1047 "itertools",
@@ -1063,29 +1058,17 @@ dependencies = [
1063 "ra_syntax", 1058 "ra_syntax",
1064 "ra_text_edit", 1059 "ra_text_edit",
1065 "rand", 1060 "rand",
1066 "rayon",
1067 "rustc-hash", 1061 "rustc-hash",
1068 "superslice",
1069 "test_utils", 1062 "test_utils",
1070 "unicase",
1071] 1063]
1072 1064
1073[[package]] 1065[[package]]
1074name = "ra_ide_db" 1066name = "ra_ide_db"
1075version = "0.1.0" 1067version = "0.1.0"
1076dependencies = [ 1068dependencies = [
1077 "either",
1078 "format-buf",
1079 "fst", 1069 "fst",
1080 "indexmap",
1081 "insta",
1082 "itertools",
1083 "join_to_string",
1084 "log", 1070 "log",
1085 "once_cell",
1086 "ra_cfg",
1087 "ra_db", 1071 "ra_db",
1088 "ra_fmt",
1089 "ra_hir", 1072 "ra_hir",
1090 "ra_prof", 1073 "ra_prof",
1091 "ra_syntax", 1074 "ra_syntax",
@@ -1093,8 +1076,6 @@ dependencies = [
1093 "rayon", 1076 "rayon",
1094 "rustc-hash", 1077 "rustc-hash",
1095 "superslice", 1078 "superslice",
1096 "test_utils",
1097 "unicase",
1098] 1079]
1099 1080
1100[[package]] 1081[[package]]
@@ -1122,7 +1103,6 @@ name = "ra_prof"
1122version = "0.1.0" 1103version = "0.1.0"
1123dependencies = [ 1104dependencies = [
1124 "backtrace", 1105 "backtrace",
1125 "itertools",
1126 "jemalloc-ctl", 1106 "jemalloc-ctl",
1127 "jemallocator", 1107 "jemallocator",
1128 "once_cell", 1108 "once_cell",
@@ -1165,7 +1145,6 @@ dependencies = [
1165name = "ra_text_edit" 1145name = "ra_text_edit"
1166version = "0.1.0" 1146version = "0.1.0"
1167dependencies = [ 1147dependencies = [
1168 "test_utils",
1169 "text_unit", 1148 "text_unit",
1170] 1149]
1171 1150
@@ -1324,7 +1303,6 @@ version = "0.1.0"
1324dependencies = [ 1303dependencies = [
1325 "anyhow", 1304 "anyhow",
1326 "crossbeam-channel", 1305 "crossbeam-channel",
1327 "either",
1328 "env_logger", 1306 "env_logger",
1329 "globset", 1307 "globset",
1330 "itertools", 1308 "itertools",
@@ -1534,9 +1512,9 @@ checksum = "ab16ced94dbd8a46c82fd81e3ed9a8727dac2977ea869d217bcc4ea1f122e81f"
1534 1512
1535[[package]] 1513[[package]]
1536name = "syn" 1514name = "syn"
1537version = "1.0.15" 1515version = "1.0.16"
1538source = "registry+https://github.com/rust-lang/crates.io-index" 1516source = "registry+https://github.com/rust-lang/crates.io-index"
1539checksum = "7a0294dc449adc58bb6592fff1a23d3e5e6e235afc6a0ffca2657d19e7bbffe5" 1517checksum = "123bd9499cfb380418d509322d7a6d52e5315f064fe4b3ad18a53d6b92c07859"
1540dependencies = [ 1518dependencies = [
1541 "proc-macro2", 1519 "proc-macro2",
1542 "quote", 1520 "quote",
@@ -1577,9 +1555,9 @@ dependencies = [
1577 1555
1578[[package]] 1556[[package]]
1579name = "text_unit" 1557name = "text_unit"
1580version = "0.1.9" 1558version = "0.1.10"
1581source = "registry+https://github.com/rust-lang/crates.io-index" 1559source = "registry+https://github.com/rust-lang/crates.io-index"
1582checksum = "e08bbcb7a3adbda0eb23431206b653bdad3d8dea311e72d36bf2215e27a42579" 1560checksum = "20431e104bfecc1a40872578dbc390e10290a0e9c35fffe3ce6f73c15a9dbfc2"
1583 1561
1584[[package]] 1562[[package]]
1585name = "thin-dst" 1563name = "thin-dst"
@@ -1606,15 +1584,6 @@ dependencies = [
1606] 1584]
1607 1585
1608[[package]] 1586[[package]]
1609name = "unicase"
1610version = "2.6.0"
1611source = "registry+https://github.com/rust-lang/crates.io-index"
1612checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
1613dependencies = [
1614 "version_check",
1615]
1616
1617[[package]]
1618name = "unicode-bidi" 1587name = "unicode-bidi"
1619version = "0.3.4" 1588version = "0.3.4"
1620source = "registry+https://github.com/rust-lang/crates.io-index" 1589source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1657,12 +1626,6 @@ dependencies = [
1657] 1626]
1658 1627
1659[[package]] 1628[[package]]
1660name = "version_check"
1661version = "0.9.1"
1662source = "registry+https://github.com/rust-lang/crates.io-index"
1663checksum = "078775d0255232fb988e6fccf26ddc9d1ac274299aaedcedce21c6f72cc533ce"
1664
1665[[package]]
1666name = "walkdir" 1629name = "walkdir"
1667version = "2.3.1" 1630version = "2.3.1"
1668source = "registry+https://github.com/rust-lang/crates.io-index" 1631source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/crates/ra_assists/Cargo.toml b/crates/ra_assists/Cargo.toml
index 12a933645..d314dc8e6 100644
--- a/crates/ra_assists/Cargo.toml
+++ b/crates/ra_assists/Cargo.toml
@@ -11,7 +11,6 @@ doctest = false
11format-buf = "1.0.0" 11format-buf = "1.0.0"
12join_to_string = "0.1.3" 12join_to_string = "0.1.3"
13rustc-hash = "1.1.0" 13rustc-hash = "1.1.0"
14either = "1.5.3"
15 14
16ra_syntax = { path = "../ra_syntax" } 15ra_syntax = { path = "../ra_syntax" }
17ra_text_edit = { path = "../ra_text_edit" } 16ra_text_edit = { path = "../ra_text_edit" }
diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs
index 5aab5fb8b..c25d2e323 100644
--- a/crates/ra_assists/src/assist_ctx.rs
+++ b/crates/ra_assists/src/assist_ctx.rs
@@ -1,6 +1,6 @@
1//! This module defines `AssistCtx` -- the API surface that is exposed to assists. 1//! This module defines `AssistCtx` -- the API surface that is exposed to assists.
2use hir::{InFile, SourceAnalyzer, SourceBinder}; 2use hir::Semantics;
3use ra_db::{FileRange, SourceDatabase}; 3use ra_db::FileRange;
4use ra_fmt::{leading_indent, reindent}; 4use ra_fmt::{leading_indent, reindent};
5use ra_ide_db::RootDatabase; 5use ra_ide_db::RootDatabase;
6use ra_syntax::{ 6use ra_syntax::{
@@ -74,29 +74,23 @@ pub(crate) type AssistHandler = fn(AssistCtx) -> Option<Assist>;
74/// Note, however, that we don't actually use such two-phase logic at the 74/// Note, however, that we don't actually use such two-phase logic at the
75/// moment, because the LSP API is pretty awkward in this place, and it's much 75/// moment, because the LSP API is pretty awkward in this place, and it's much
76/// easier to just compute the edit eagerly :-) 76/// easier to just compute the edit eagerly :-)
77#[derive(Debug)] 77#[derive(Clone)]
78pub(crate) struct AssistCtx<'a> { 78pub(crate) struct AssistCtx<'a> {
79 pub(crate) sema: &'a Semantics<'a, RootDatabase>,
79 pub(crate) db: &'a RootDatabase, 80 pub(crate) db: &'a RootDatabase,
80 pub(crate) frange: FileRange, 81 pub(crate) frange: FileRange,
81 source_file: SourceFile, 82 source_file: SourceFile,
82 should_compute_edit: bool, 83 should_compute_edit: bool,
83} 84}
84 85
85impl Clone for AssistCtx<'_> {
86 fn clone(&self) -> Self {
87 AssistCtx {
88 db: self.db,
89 frange: self.frange,
90 source_file: self.source_file.clone(),
91 should_compute_edit: self.should_compute_edit,
92 }
93 }
94}
95
96impl<'a> AssistCtx<'a> { 86impl<'a> AssistCtx<'a> {
97 pub fn new(db: &RootDatabase, frange: FileRange, should_compute_edit: bool) -> AssistCtx { 87 pub fn new(
98 let parse = db.parse(frange.file_id); 88 sema: &'a Semantics<'a, RootDatabase>,
99 AssistCtx { db, frange, source_file: parse.tree(), should_compute_edit } 89 frange: FileRange,
90 should_compute_edit: bool,
91 ) -> AssistCtx<'a> {
92 let source_file = sema.parse(frange.file_id);
93 AssistCtx { sema, db: sema.db, frange, source_file, should_compute_edit }
100 } 94 }
101 95
102 pub(crate) fn add_assist( 96 pub(crate) fn add_assist(
@@ -138,18 +132,6 @@ impl<'a> AssistCtx<'a> {
138 pub(crate) fn covering_element(&self) -> SyntaxElement { 132 pub(crate) fn covering_element(&self) -> SyntaxElement {
139 find_covering_element(self.source_file.syntax(), self.frange.range) 133 find_covering_element(self.source_file.syntax(), self.frange.range)
140 } 134 }
141 pub(crate) fn source_binder(&self) -> SourceBinder<'a, RootDatabase> {
142 SourceBinder::new(self.db)
143 }
144 pub(crate) fn source_analyzer(
145 &self,
146 node: &SyntaxNode,
147 offset: Option<TextUnit>,
148 ) -> SourceAnalyzer {
149 let src = InFile::new(self.frange.file_id.into(), node);
150 self.source_binder().analyze(src, offset)
151 }
152
153 pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement { 135 pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement {
154 find_covering_element(self.source_file.syntax(), range) 136 find_covering_element(self.source_file.syntax(), range)
155 } 137 }
diff --git a/crates/ra_assists/src/ast_transform.rs b/crates/ra_assists/src/ast_transform.rs
index c6d15af5f..a74ac42d5 100644
--- a/crates/ra_assists/src/ast_transform.rs
+++ b/crates/ra_assists/src/ast_transform.rs
@@ -1,15 +1,12 @@
1//! `AstTransformer`s are functions that replace nodes in an AST and can be easily combined. 1//! `AstTransformer`s are functions that replace nodes in an AST and can be easily combined.
2use rustc_hash::FxHashMap; 2use rustc_hash::FxHashMap;
3 3
4use hir::{InFile, PathResolution}; 4use hir::{PathResolution, SemanticsScope};
5use ra_ide_db::RootDatabase; 5use ra_ide_db::RootDatabase;
6use ra_syntax::ast::{self, AstNode}; 6use ra_syntax::ast::{self, AstNode};
7 7
8pub trait AstTransform<'a> { 8pub trait AstTransform<'a> {
9 fn get_substitution( 9 fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode>;
10 &self,
11 node: InFile<&ra_syntax::SyntaxNode>,
12 ) -> Option<ra_syntax::SyntaxNode>;
13 10
14 fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a>; 11 fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a>;
15 fn or<T: AstTransform<'a> + 'a>(self, other: T) -> Box<dyn AstTransform<'a> + 'a> 12 fn or<T: AstTransform<'a> + 'a>(self, other: T) -> Box<dyn AstTransform<'a> + 'a>
@@ -23,10 +20,7 @@ pub trait AstTransform<'a> {
23struct NullTransformer; 20struct NullTransformer;
24 21
25impl<'a> AstTransform<'a> for NullTransformer { 22impl<'a> AstTransform<'a> for NullTransformer {
26 fn get_substitution( 23 fn get_substitution(&self, _node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode> {
27 &self,
28 _node: InFile<&ra_syntax::SyntaxNode>,
29 ) -> Option<ra_syntax::SyntaxNode> {
30 None 24 None
31 } 25 }
32 fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> { 26 fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> {
@@ -35,14 +29,16 @@ impl<'a> AstTransform<'a> for NullTransformer {
35} 29}
36 30
37pub struct SubstituteTypeParams<'a> { 31pub struct SubstituteTypeParams<'a> {
38 db: &'a RootDatabase, 32 source_scope: &'a SemanticsScope<'a, RootDatabase>,
39 substs: FxHashMap<hir::TypeParam, ast::TypeRef>, 33 substs: FxHashMap<hir::TypeParam, ast::TypeRef>,
40 previous: Box<dyn AstTransform<'a> + 'a>, 34 previous: Box<dyn AstTransform<'a> + 'a>,
41} 35}
42 36
43impl<'a> SubstituteTypeParams<'a> { 37impl<'a> SubstituteTypeParams<'a> {
44 pub fn for_trait_impl( 38 pub fn for_trait_impl(
39 source_scope: &'a SemanticsScope<'a, RootDatabase>,
45 db: &'a RootDatabase, 40 db: &'a RootDatabase,
41 // FIXME: there's implicit invariant that `trait_` and `source_scope` match...
46 trait_: hir::Trait, 42 trait_: hir::Trait,
47 impl_block: ast::ImplBlock, 43 impl_block: ast::ImplBlock,
48 ) -> SubstituteTypeParams<'a> { 44 ) -> SubstituteTypeParams<'a> {
@@ -56,7 +52,7 @@ impl<'a> SubstituteTypeParams<'a> {
56 .zip(substs.into_iter()) 52 .zip(substs.into_iter())
57 .collect(); 53 .collect();
58 return SubstituteTypeParams { 54 return SubstituteTypeParams {
59 db, 55 source_scope,
60 substs: substs_by_param, 56 substs: substs_by_param,
61 previous: Box::new(NullTransformer), 57 previous: Box::new(NullTransformer),
62 }; 58 };
@@ -80,15 +76,15 @@ impl<'a> SubstituteTypeParams<'a> {
80 } 76 }
81 fn get_substitution_inner( 77 fn get_substitution_inner(
82 &self, 78 &self,
83 node: InFile<&ra_syntax::SyntaxNode>, 79 node: &ra_syntax::SyntaxNode,
84 ) -> Option<ra_syntax::SyntaxNode> { 80 ) -> Option<ra_syntax::SyntaxNode> {
85 let type_ref = ast::TypeRef::cast(node.value.clone())?; 81 let type_ref = ast::TypeRef::cast(node.clone())?;
86 let path = match &type_ref { 82 let path = match &type_ref {
87 ast::TypeRef::PathType(path_type) => path_type.path()?, 83 ast::TypeRef::PathType(path_type) => path_type.path()?,
88 _ => return None, 84 _ => return None,
89 }; 85 };
90 let analyzer = hir::SourceAnalyzer::new(self.db, node, None); 86 let path = hir::Path::from_ast(path)?;
91 let resolution = analyzer.resolve_path(self.db, &path)?; 87 let resolution = self.source_scope.resolve_hir_path(&path)?;
92 match resolution { 88 match resolution {
93 hir::PathResolution::TypeParam(tp) => Some(self.substs.get(&tp)?.syntax().clone()), 89 hir::PathResolution::TypeParam(tp) => Some(self.substs.get(&tp)?.syntax().clone()),
94 _ => None, 90 _ => None,
@@ -97,10 +93,7 @@ impl<'a> SubstituteTypeParams<'a> {
97} 93}
98 94
99impl<'a> AstTransform<'a> for SubstituteTypeParams<'a> { 95impl<'a> AstTransform<'a> for SubstituteTypeParams<'a> {
100 fn get_substitution( 96 fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode> {
101 &self,
102 node: InFile<&ra_syntax::SyntaxNode>,
103 ) -> Option<ra_syntax::SyntaxNode> {
104 self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node)) 97 self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node))
105 } 98 }
106 fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> { 99 fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> {
@@ -109,29 +102,34 @@ impl<'a> AstTransform<'a> for SubstituteTypeParams<'a> {
109} 102}
110 103
111pub struct QualifyPaths<'a> { 104pub struct QualifyPaths<'a> {
105 target_scope: &'a SemanticsScope<'a, RootDatabase>,
106 source_scope: &'a SemanticsScope<'a, RootDatabase>,
112 db: &'a RootDatabase, 107 db: &'a RootDatabase,
113 from: Option<hir::Module>,
114 previous: Box<dyn AstTransform<'a> + 'a>, 108 previous: Box<dyn AstTransform<'a> + 'a>,
115} 109}
116 110
117impl<'a> QualifyPaths<'a> { 111impl<'a> QualifyPaths<'a> {
118 pub fn new(db: &'a RootDatabase, from: Option<hir::Module>) -> Self { 112 pub fn new(
119 Self { db, from, previous: Box::new(NullTransformer) } 113 target_scope: &'a SemanticsScope<'a, RootDatabase>,
114 source_scope: &'a SemanticsScope<'a, RootDatabase>,
115 db: &'a RootDatabase,
116 ) -> Self {
117 Self { target_scope, source_scope, db, previous: Box::new(NullTransformer) }
120 } 118 }
121 119
122 fn get_substitution_inner( 120 fn get_substitution_inner(
123 &self, 121 &self,
124 node: InFile<&ra_syntax::SyntaxNode>, 122 node: &ra_syntax::SyntaxNode,
125 ) -> Option<ra_syntax::SyntaxNode> { 123 ) -> Option<ra_syntax::SyntaxNode> {
126 // FIXME handle value ns? 124 // FIXME handle value ns?
127 let from = self.from?; 125 let from = self.target_scope.module()?;
128 let p = ast::Path::cast(node.value.clone())?; 126 let p = ast::Path::cast(node.clone())?;
129 if p.segment().and_then(|s| s.param_list()).is_some() { 127 if p.segment().and_then(|s| s.param_list()).is_some() {
130 // don't try to qualify `Fn(Foo) -> Bar` paths, they are in prelude anyway 128 // don't try to qualify `Fn(Foo) -> Bar` paths, they are in prelude anyway
131 return None; 129 return None;
132 } 130 }
133 let analyzer = hir::SourceAnalyzer::new(self.db, node, None); 131 let hir_path = hir::Path::from_ast(p.clone());
134 let resolution = analyzer.resolve_path(self.db, &p)?; 132 let resolution = self.source_scope.resolve_hir_path(&hir_path?)?;
135 match resolution { 133 match resolution {
136 PathResolution::Def(def) => { 134 PathResolution::Def(def) => {
137 let found_path = from.find_use_path(self.db, def)?; 135 let found_path = from.find_use_path(self.db, def)?;
@@ -140,7 +138,7 @@ impl<'a> QualifyPaths<'a> {
140 let type_args = p 138 let type_args = p
141 .segment() 139 .segment()
142 .and_then(|s| s.type_arg_list()) 140 .and_then(|s| s.type_arg_list())
143 .map(|arg_list| apply(self, node.with_value(arg_list))); 141 .map(|arg_list| apply(self, arg_list));
144 if let Some(type_args) = type_args { 142 if let Some(type_args) = type_args {
145 let last_segment = path.segment().unwrap(); 143 let last_segment = path.segment().unwrap();
146 path = path.with_segment(last_segment.with_type_args(type_args)) 144 path = path.with_segment(last_segment.with_type_args(type_args))
@@ -157,11 +155,11 @@ impl<'a> QualifyPaths<'a> {
157 } 155 }
158} 156}
159 157
160pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: InFile<N>) -> N { 158pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N {
161 let syntax = node.value.syntax(); 159 let syntax = node.syntax();
162 let result = ra_syntax::algo::replace_descendants(syntax, &|element| match element { 160 let result = ra_syntax::algo::replace_descendants(syntax, |element| match element {
163 ra_syntax::SyntaxElement::Node(n) => { 161 ra_syntax::SyntaxElement::Node(n) => {
164 let replacement = transformer.get_substitution(node.with_value(&n))?; 162 let replacement = transformer.get_substitution(&n)?;
165 Some(replacement.into()) 163 Some(replacement.into())
166 } 164 }
167 _ => None, 165 _ => None,
@@ -170,10 +168,7 @@ pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: InFile<N>
170} 168}
171 169
172impl<'a> AstTransform<'a> for QualifyPaths<'a> { 170impl<'a> AstTransform<'a> for QualifyPaths<'a> {
173 fn get_substitution( 171 fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode> {
174 &self,
175 node: InFile<&ra_syntax::SyntaxNode>,
176 ) -> Option<ra_syntax::SyntaxNode> {
177 self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node)) 172 self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node))
178 } 173 }
179 fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> { 174 fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> {
diff --git a/crates/ra_assists/src/handlers/add_explicit_type.rs b/crates/ra_assists/src/handlers/add_explicit_type.rs
index 2cb9d2f48..a63ef48b1 100644
--- a/crates/ra_assists/src/handlers/add_explicit_type.rs
+++ b/crates/ra_assists/src/handlers/add_explicit_type.rs
@@ -51,14 +51,13 @@ pub(crate) fn add_explicit_type(ctx: AssistCtx) -> Option<Assist> {
51 } 51 }
52 } 52 }
53 // Infer type 53 // Infer type
54 let db = ctx.db; 54 let ty = ctx.sema.type_of_expr(&expr)?;
55 let analyzer = ctx.source_analyzer(stmt.syntax(), None);
56 let ty = analyzer.type_of(db, &expr)?;
57 // Assist not applicable if the type is unknown 55 // Assist not applicable if the type is unknown
58 if ty.contains_unknown() { 56 if ty.contains_unknown() {
59 return None; 57 return None;
60 } 58 }
61 59
60 let db = ctx.db;
62 ctx.add_assist( 61 ctx.add_assist(
63 AssistId("add_explicit_type"), 62 AssistId("add_explicit_type"),
64 format!("Insert explicit type '{}'", ty.display(db)), 63 format!("Insert explicit type '{}'", ty.display(db)),
diff --git a/crates/ra_assists/src/handlers/add_missing_impl_members.rs b/crates/ra_assists/src/handlers/add_missing_impl_members.rs
index ab21388c8..4005014bd 100644
--- a/crates/ra_assists/src/handlers/add_missing_impl_members.rs
+++ b/crates/ra_assists/src/handlers/add_missing_impl_members.rs
@@ -1,4 +1,4 @@
1use hir::{HasSource, InFile}; 1use hir::HasSource;
2use ra_syntax::{ 2use ra_syntax::{
3 ast::{self, edit, make, AstNode, NameOwner}, 3 ast::{self, edit, make, AstNode, NameOwner},
4 SmolStr, 4 SmolStr,
@@ -104,9 +104,7 @@ fn add_missing_impl_members_inner(
104 let impl_node = ctx.find_node_at_offset::<ast::ImplBlock>()?; 104 let impl_node = ctx.find_node_at_offset::<ast::ImplBlock>()?;
105 let impl_item_list = impl_node.item_list()?; 105 let impl_item_list = impl_node.item_list()?;
106 106
107 let analyzer = ctx.source_analyzer(impl_node.syntax(), None); 107 let trait_ = resolve_target_trait(&ctx.sema, &impl_node)?;
108
109 let trait_ = resolve_target_trait(ctx.db, &analyzer, &impl_node)?;
110 108
111 let def_name = |item: &ast::ImplItem| -> Option<SmolStr> { 109 let def_name = |item: &ast::ImplItem| -> Option<SmolStr> {
112 match item { 110 match item {
@@ -117,7 +115,7 @@ fn add_missing_impl_members_inner(
117 .map(|it| it.text().clone()) 115 .map(|it| it.text().clone())
118 }; 116 };
119 117
120 let missing_items = get_missing_impl_items(ctx.db, &analyzer, &impl_node) 118 let missing_items = get_missing_impl_items(&ctx.sema, &impl_node)
121 .iter() 119 .iter()
122 .map(|i| match i { 120 .map(|i| match i {
123 hir::AssocItem::Function(i) => ast::ImplItem::FnDef(i.source(ctx.db).value), 121 hir::AssocItem::Function(i) => ast::ImplItem::FnDef(i.source(ctx.db).value),
@@ -138,23 +136,17 @@ fn add_missing_impl_members_inner(
138 return None; 136 return None;
139 } 137 }
140 138
141 let db = ctx.db; 139 let sema = ctx.sema;
142 let file_id = ctx.frange.file_id;
143 let trait_file_id = trait_.source(db).file_id;
144 140
145 ctx.add_assist(AssistId(assist_id), label, |edit| { 141 ctx.add_assist(AssistId(assist_id), label, |edit| {
146 let n_existing_items = impl_item_list.impl_items().count(); 142 let n_existing_items = impl_item_list.impl_items().count();
147 let module = hir::SourceAnalyzer::new( 143 let source_scope = sema.scope_for_def(trait_);
148 db, 144 let target_scope = sema.scope(impl_item_list.syntax());
149 hir::InFile::new(file_id.into(), impl_node.syntax()), 145 let ast_transform = QualifyPaths::new(&target_scope, &source_scope, sema.db)
150 None, 146 .or(SubstituteTypeParams::for_trait_impl(&source_scope, sema.db, trait_, impl_node));
151 )
152 .module();
153 let ast_transform = QualifyPaths::new(db, module)
154 .or(SubstituteTypeParams::for_trait_impl(db, trait_, impl_node));
155 let items = missing_items 147 let items = missing_items
156 .into_iter() 148 .into_iter()
157 .map(|it| ast_transform::apply(&*ast_transform, InFile::new(trait_file_id, it))) 149 .map(|it| ast_transform::apply(&*ast_transform, it))
158 .map(|it| match it { 150 .map(|it| match it {
159 ast::ImplItem::FnDef(def) => ast::ImplItem::FnDef(add_body(def)), 151 ast::ImplItem::FnDef(def) => ast::ImplItem::FnDef(add_body(def)),
160 _ => it, 152 _ => it,
@@ -181,9 +173,10 @@ fn add_body(fn_def: ast::FnDef) -> ast::FnDef {
181 173
182#[cfg(test)] 174#[cfg(test)]
183mod tests { 175mod tests {
184 use super::*;
185 use crate::helpers::{check_assist, check_assist_not_applicable}; 176 use crate::helpers::{check_assist, check_assist_not_applicable};
186 177
178 use super::*;
179
187 #[test] 180 #[test]
188 fn test_add_missing_impl_members() { 181 fn test_add_missing_impl_members() {
189 check_assist( 182 check_assist(
diff --git a/crates/ra_assists/src/handlers/add_new.rs b/crates/ra_assists/src/handlers/add_new.rs
index dd070e8ec..166e907fb 100644
--- a/crates/ra_assists/src/handlers/add_new.rs
+++ b/crates/ra_assists/src/handlers/add_new.rs
@@ -1,5 +1,5 @@
1use format_buf::format; 1use format_buf::format;
2use hir::{Adt, InFile}; 2use hir::Adt;
3use join_to_string::join; 3use join_to_string::join;
4use ra_syntax::{ 4use ra_syntax::{
5 ast::{ 5 ast::{
@@ -133,16 +133,11 @@ fn find_struct_impl(ctx: &AssistCtx, strukt: &ast::StructDef) -> Option<Option<a
133 let module = strukt.syntax().ancestors().find(|node| { 133 let module = strukt.syntax().ancestors().find(|node| {
134 ast::Module::can_cast(node.kind()) || ast::SourceFile::can_cast(node.kind()) 134 ast::Module::can_cast(node.kind()) || ast::SourceFile::can_cast(node.kind())
135 })?; 135 })?;
136 let mut sb = ctx.source_binder();
137 136
138 let struct_def = { 137 let struct_def = ctx.sema.to_def(strukt)?;
139 let src = InFile { file_id: ctx.frange.file_id.into(), value: strukt.clone() };
140 sb.to_def(src)?
141 };
142 138
143 let block = module.descendants().filter_map(ast::ImplBlock::cast).find_map(|impl_blk| { 139 let block = module.descendants().filter_map(ast::ImplBlock::cast).find_map(|impl_blk| {
144 let src = InFile { file_id: ctx.frange.file_id.into(), value: impl_blk.clone() }; 140 let blk = ctx.sema.to_def(&impl_blk)?;
145 let blk = sb.to_def(src)?;
146 141
147 // FIXME: handle e.g. `struct S<T>; impl<U> S<U> {}` 142 // FIXME: handle e.g. `struct S<T>; impl<U> S<U> {}`
148 // (we currently use the wrong type parameter) 143 // (we currently use the wrong type parameter)
diff --git a/crates/ra_assists/src/handlers/auto_import.rs b/crates/ra_assists/src/handlers/auto_import.rs
index c4aea2a06..c8bf181f9 100644
--- a/crates/ra_assists/src/handlers/auto_import.rs
+++ b/crates/ra_assists/src/handlers/auto_import.rs
@@ -1,10 +1,11 @@
1use crate::{ 1use crate::{
2 assist_ctx::{Assist, AssistCtx}, 2 assist_ctx::{Assist, AssistCtx},
3 insert_use_statement, AssistId, 3 utils::insert_use_statement,
4 AssistId,
4}; 5};
5use hir::{ 6use hir::{
6 db::HirDatabase, AsAssocItem, AssocItemContainer, ModPath, Module, ModuleDef, PathResolution, 7 AsAssocItem, AssocItemContainer, ModPath, Module, ModuleDef, PathResolution, Semantics, Trait,
7 SourceAnalyzer, Trait, Type, 8 Type,
8}; 9};
9use ra_ide_db::{imports_locator::ImportsLocator, RootDatabase}; 10use ra_ide_db::{imports_locator::ImportsLocator, RootDatabase};
10use ra_prof::profile; 11use ra_prof::profile;
@@ -52,7 +53,6 @@ pub(crate) fn auto_import(ctx: AssistCtx) -> Option<Assist> {
52 edit.target(auto_import_assets.syntax_under_caret.text_range()); 53 edit.target(auto_import_assets.syntax_under_caret.text_range());
53 insert_use_statement( 54 insert_use_statement(
54 &auto_import_assets.syntax_under_caret, 55 &auto_import_assets.syntax_under_caret,
55 &auto_import_assets.syntax_under_caret,
56 &import, 56 &import,
57 edit.text_edit_builder(), 57 edit.text_edit_builder(),
58 ); 58 );
@@ -78,14 +78,9 @@ impl AutoImportAssets {
78 78
79 fn for_method_call(method_call: ast::MethodCallExpr, ctx: &AssistCtx) -> Option<Self> { 79 fn for_method_call(method_call: ast::MethodCallExpr, ctx: &AssistCtx) -> Option<Self> {
80 let syntax_under_caret = method_call.syntax().to_owned(); 80 let syntax_under_caret = method_call.syntax().to_owned();
81 let source_analyzer = ctx.source_analyzer(&syntax_under_caret, None); 81 let module_with_name_to_import = ctx.sema.scope(&syntax_under_caret).module()?;
82 let module_with_name_to_import = source_analyzer.module()?;
83 Some(Self { 82 Some(Self {
84 import_candidate: ImportCandidate::for_method_call( 83 import_candidate: ImportCandidate::for_method_call(&ctx.sema, &method_call)?,
85 &method_call,
86 &source_analyzer,
87 ctx.db,
88 )?,
89 module_with_name_to_import, 84 module_with_name_to_import,
90 syntax_under_caret, 85 syntax_under_caret,
91 }) 86 })
@@ -97,14 +92,9 @@ impl AutoImportAssets {
97 return None; 92 return None;
98 } 93 }
99 94
100 let source_analyzer = ctx.source_analyzer(&syntax_under_caret, None); 95 let module_with_name_to_import = ctx.sema.scope(&syntax_under_caret).module()?;
101 let module_with_name_to_import = source_analyzer.module()?;
102 Some(Self { 96 Some(Self {
103 import_candidate: ImportCandidate::for_regular_path( 97 import_candidate: ImportCandidate::for_regular_path(&ctx.sema, &path_under_caret)?,
104 &path_under_caret,
105 &source_analyzer,
106 ctx.db,
107 )?,
108 module_with_name_to_import, 98 module_with_name_to_import,
109 syntax_under_caret, 99 syntax_under_caret,
110 }) 100 })
@@ -229,25 +219,23 @@ enum ImportCandidate {
229 219
230impl ImportCandidate { 220impl ImportCandidate {
231 fn for_method_call( 221 fn for_method_call(
222 sema: &Semantics<RootDatabase>,
232 method_call: &ast::MethodCallExpr, 223 method_call: &ast::MethodCallExpr,
233 source_analyzer: &SourceAnalyzer,
234 db: &impl HirDatabase,
235 ) -> Option<Self> { 224 ) -> Option<Self> {
236 if source_analyzer.resolve_method_call(method_call).is_some() { 225 if sema.resolve_method_call(method_call).is_some() {
237 return None; 226 return None;
238 } 227 }
239 Some(Self::TraitMethod( 228 Some(Self::TraitMethod(
240 source_analyzer.type_of(db, &method_call.expr()?)?, 229 sema.type_of_expr(&method_call.expr()?)?,
241 method_call.name_ref()?.syntax().to_string(), 230 method_call.name_ref()?.syntax().to_string(),
242 )) 231 ))
243 } 232 }
244 233
245 fn for_regular_path( 234 fn for_regular_path(
235 sema: &Semantics<RootDatabase>,
246 path_under_caret: &ast::Path, 236 path_under_caret: &ast::Path,
247 source_analyzer: &SourceAnalyzer,
248 db: &impl HirDatabase,
249 ) -> Option<Self> { 237 ) -> Option<Self> {
250 if source_analyzer.resolve_path(db, path_under_caret).is_some() { 238 if sema.resolve_path(path_under_caret).is_some() {
251 return None; 239 return None;
252 } 240 }
253 241
@@ -256,17 +244,15 @@ impl ImportCandidate {
256 let qualifier_start = qualifier.syntax().descendants().find_map(ast::NameRef::cast)?; 244 let qualifier_start = qualifier.syntax().descendants().find_map(ast::NameRef::cast)?;
257 let qualifier_start_path = 245 let qualifier_start_path =
258 qualifier_start.syntax().ancestors().find_map(ast::Path::cast)?; 246 qualifier_start.syntax().ancestors().find_map(ast::Path::cast)?;
259 if let Some(qualifier_start_resolution) = 247 if let Some(qualifier_start_resolution) = sema.resolve_path(&qualifier_start_path) {
260 source_analyzer.resolve_path(db, &qualifier_start_path)
261 {
262 let qualifier_resolution = if qualifier_start_path == qualifier { 248 let qualifier_resolution = if qualifier_start_path == qualifier {
263 qualifier_start_resolution 249 qualifier_start_resolution
264 } else { 250 } else {
265 source_analyzer.resolve_path(db, &qualifier)? 251 sema.resolve_path(&qualifier)?
266 }; 252 };
267 if let PathResolution::Def(ModuleDef::Adt(assoc_item_path)) = qualifier_resolution { 253 if let PathResolution::Def(ModuleDef::Adt(assoc_item_path)) = qualifier_resolution {
268 Some(ImportCandidate::TraitAssocItem( 254 Some(ImportCandidate::TraitAssocItem(
269 assoc_item_path.ty(db), 255 assoc_item_path.ty(sema.db),
270 segment.syntax().to_string(), 256 segment.syntax().to_string(),
271 )) 257 ))
272 } else { 258 } else {
diff --git a/crates/ra_assists/src/handlers/change_visibility.rs b/crates/ra_assists/src/handlers/change_visibility.rs
index f325b6f92..54e0a6c84 100644
--- a/crates/ra_assists/src/handlers/change_visibility.rs
+++ b/crates/ra_assists/src/handlers/change_visibility.rs
@@ -2,8 +2,8 @@ use ra_syntax::{
2 ast::{self, NameOwner, VisibilityOwner}, 2 ast::{self, NameOwner, VisibilityOwner},
3 AstNode, 3 AstNode,
4 SyntaxKind::{ 4 SyntaxKind::{
5 ATTR, COMMENT, ENUM_DEF, FN_DEF, IDENT, MODULE, STRUCT_DEF, TRAIT_DEF, VISIBILITY, 5 ATTR, COMMENT, CONST_DEF, ENUM_DEF, FN_DEF, IDENT, MODULE, STRUCT_DEF, TRAIT_DEF,
6 WHITESPACE, 6 VISIBILITY, WHITESPACE,
7 }, 7 },
8 SyntaxNode, TextUnit, T, 8 SyntaxNode, TextUnit, T,
9}; 9};
@@ -30,13 +30,13 @@ pub(crate) fn change_visibility(ctx: AssistCtx) -> Option<Assist> {
30 30
31fn add_vis(ctx: AssistCtx) -> Option<Assist> { 31fn add_vis(ctx: AssistCtx) -> Option<Assist> {
32 let item_keyword = ctx.token_at_offset().find(|leaf| match leaf.kind() { 32 let item_keyword = ctx.token_at_offset().find(|leaf| match leaf.kind() {
33 T![fn] | T![mod] | T![struct] | T![enum] | T![trait] => true, 33 T![const] | T![fn] | T![mod] | T![struct] | T![enum] | T![trait] => true,
34 _ => false, 34 _ => false,
35 }); 35 });
36 36
37 let (offset, target) = if let Some(keyword) = item_keyword { 37 let (offset, target) = if let Some(keyword) = item_keyword {
38 let parent = keyword.parent(); 38 let parent = keyword.parent();
39 let def_kws = vec![FN_DEF, MODULE, STRUCT_DEF, ENUM_DEF, TRAIT_DEF]; 39 let def_kws = vec![CONST_DEF, FN_DEF, MODULE, STRUCT_DEF, ENUM_DEF, TRAIT_DEF];
40 // Parent is not a definition, can't add visibility 40 // Parent is not a definition, can't add visibility
41 if !def_kws.iter().any(|&def_kw| def_kw == parent.kind()) { 41 if !def_kws.iter().any(|&def_kw| def_kw == parent.kind()) {
42 return None; 42 return None;
@@ -136,6 +136,11 @@ mod tests {
136 } 136 }
137 137
138 #[test] 138 #[test]
139 fn change_visibility_const() {
140 check_assist(change_visibility, "<|>const FOO = 3u8;", "<|>pub(crate) const FOO = 3u8;");
141 }
142
143 #[test]
139 fn change_visibility_handles_comment_attrs() { 144 fn change_visibility_handles_comment_attrs() {
140 check_assist( 145 check_assist(
141 change_visibility, 146 change_visibility,
diff --git a/crates/ra_assists/src/handlers/early_return.rs b/crates/ra_assists/src/handlers/early_return.rs
index 22f88884f..f3167b4e5 100644
--- a/crates/ra_assists/src/handlers/early_return.rs
+++ b/crates/ra_assists/src/handlers/early_return.rs
@@ -112,16 +112,19 @@ pub(crate) fn convert_to_guarded_return(ctx: AssistCtx) -> Option<Assist> {
112 Some((path, bound_ident)) => { 112 Some((path, bound_ident)) => {
113 // If-let. 113 // If-let.
114 let match_expr = { 114 let match_expr = {
115 let happy_arm = make::match_arm( 115 let happy_arm = {
116 once( 116 let pat = make::tuple_struct_pat(
117 make::tuple_struct_pat( 117 path,
118 path, 118 once(make::bind_pat(make::name("it")).into()),
119 once(make::bind_pat(make::name("it")).into()), 119 );
120 ) 120 let expr = {
121 .into(), 121 let name_ref = make::name_ref("it");
122 ), 122 let segment = make::path_segment(name_ref);
123 make::expr_path(make::path_from_name_ref(make::name_ref("it"))), 123 let path = make::path_unqualified(segment);
124 ); 124 make::expr_path(path)
125 };
126 make::match_arm(once(pat.into()), expr)
127 };
125 128
126 let sad_arm = make::match_arm( 129 let sad_arm = make::match_arm(
127 // FIXME: would be cool to use `None` or `Err(_)` if appropriate 130 // FIXME: would be cool to use `None` or `Err(_)` if appropriate
diff --git a/crates/ra_assists/src/handlers/fill_match_arms.rs b/crates/ra_assists/src/handlers/fill_match_arms.rs
index ae2437ed3..e5d8c639d 100644
--- a/crates/ra_assists/src/handlers/fill_match_arms.rs
+++ b/crates/ra_assists/src/handlers/fill_match_arms.rs
@@ -2,10 +2,11 @@
2 2
3use std::iter; 3use std::iter;
4 4
5use hir::{db::HirDatabase, Adt, HasSource}; 5use hir::{db::HirDatabase, Adt, HasSource, Semantics};
6use ra_syntax::ast::{self, edit::IndentLevel, make, AstNode, NameOwner}; 6use ra_syntax::ast::{self, edit::IndentLevel, make, AstNode, NameOwner};
7 7
8use crate::{Assist, AssistCtx, AssistId}; 8use crate::{Assist, AssistCtx, AssistId};
9use ra_ide_db::RootDatabase;
9 10
10// Assist: fill_match_arms 11// Assist: fill_match_arms
11// 12//
@@ -46,10 +47,9 @@ pub(crate) fn fill_match_arms(ctx: AssistCtx) -> Option<Assist> {
46 }; 47 };
47 48
48 let expr = match_expr.expr()?; 49 let expr = match_expr.expr()?;
49 let (enum_def, module) = { 50 let enum_def = resolve_enum_def(&ctx.sema, &expr)?;
50 let analyzer = ctx.source_analyzer(expr.syntax(), None); 51 let module = ctx.sema.scope(expr.syntax()).module()?;
51 (resolve_enum_def(ctx.db, &analyzer, &expr)?, analyzer.module()?) 52
52 };
53 let variants = enum_def.variants(ctx.db); 53 let variants = enum_def.variants(ctx.db);
54 if variants.is_empty() { 54 if variants.is_empty() {
55 return None; 55 return None;
@@ -81,18 +81,11 @@ fn is_trivial(arm: &ast::MatchArm) -> bool {
81 } 81 }
82} 82}
83 83
84fn resolve_enum_def( 84fn resolve_enum_def(sema: &Semantics<RootDatabase>, expr: &ast::Expr) -> Option<hir::Enum> {
85 db: &impl HirDatabase, 85 sema.type_of_expr(&expr)?.autoderef(sema.db).find_map(|ty| match ty.as_adt() {
86 analyzer: &hir::SourceAnalyzer,
87 expr: &ast::Expr,
88) -> Option<hir::Enum> {
89 let expr_ty = analyzer.type_of(db, &expr)?;
90
91 let result = expr_ty.autoderef(db).find_map(|ty| match ty.as_adt() {
92 Some(Adt::Enum(e)) => Some(e), 86 Some(Adt::Enum(e)) => Some(e),
93 _ => None, 87 _ => None,
94 }); 88 })
95 result
96} 89}
97 90
98fn build_pat( 91fn build_pat(
diff --git a/crates/ra_assists/src/handlers/inline_local_variable.rs b/crates/ra_assists/src/handlers/inline_local_variable.rs
index 91b588243..53a72309b 100644
--- a/crates/ra_assists/src/handlers/inline_local_variable.rs
+++ b/crates/ra_assists/src/handlers/inline_local_variable.rs
@@ -44,8 +44,7 @@ pub(crate) fn inline_local_variable(ctx: AssistCtx) -> Option<Assist> {
44 } else { 44 } else {
45 let_stmt.syntax().text_range() 45 let_stmt.syntax().text_range()
46 }; 46 };
47 let analyzer = ctx.source_analyzer(bind_pat.syntax(), None); 47 let refs = ctx.sema.find_all_refs(&bind_pat);
48 let refs = analyzer.find_all_refs(&bind_pat);
49 if refs.is_empty() { 48 if refs.is_empty() {
50 return None; 49 return None;
51 }; 50 };
diff --git a/crates/ra_assists/src/handlers/introduce_variable.rs b/crates/ra_assists/src/handlers/introduce_variable.rs
index 7312ce687..b453c51fb 100644
--- a/crates/ra_assists/src/handlers/introduce_variable.rs
+++ b/crates/ra_assists/src/handlers/introduce_variable.rs
@@ -136,15 +136,13 @@ fn anchor_stmt(expr: ast::Expr) -> Option<(SyntaxNode, bool)> {
136mod tests { 136mod tests {
137 use test_utils::covers; 137 use test_utils::covers;
138 138
139 use crate::helpers::{ 139 use crate::helpers::{check_assist, check_assist_not_applicable, check_assist_target};
140 check_assist_range, check_assist_range_not_applicable, check_assist_range_target,
141 };
142 140
143 use super::*; 141 use super::*;
144 142
145 #[test] 143 #[test]
146 fn test_introduce_var_simple() { 144 fn test_introduce_var_simple() {
147 check_assist_range( 145 check_assist(
148 introduce_variable, 146 introduce_variable,
149 " 147 "
150fn foo() { 148fn foo() {
@@ -161,16 +159,13 @@ fn foo() {
161 #[test] 159 #[test]
162 fn introduce_var_in_comment_is_not_applicable() { 160 fn introduce_var_in_comment_is_not_applicable() {
163 covers!(introduce_var_in_comment_is_not_applicable); 161 covers!(introduce_var_in_comment_is_not_applicable);
164 check_assist_range_not_applicable( 162 check_assist_not_applicable(introduce_variable, "fn main() { 1 + /* <|>comment<|> */ 1; }");
165 introduce_variable,
166 "fn main() { 1 + /* <|>comment<|> */ 1; }",
167 );
168 } 163 }
169 164
170 #[test] 165 #[test]
171 fn test_introduce_var_expr_stmt() { 166 fn test_introduce_var_expr_stmt() {
172 covers!(test_introduce_var_expr_stmt); 167 covers!(test_introduce_var_expr_stmt);
173 check_assist_range( 168 check_assist(
174 introduce_variable, 169 introduce_variable,
175 " 170 "
176fn foo() { 171fn foo() {
@@ -181,7 +176,7 @@ fn foo() {
181 let <|>var_name = 1 + 1; 176 let <|>var_name = 1 + 1;
182}", 177}",
183 ); 178 );
184 check_assist_range( 179 check_assist(
185 introduce_variable, 180 introduce_variable,
186 " 181 "
187fn foo() { 182fn foo() {
@@ -198,7 +193,7 @@ fn foo() {
198 193
199 #[test] 194 #[test]
200 fn test_introduce_var_part_of_expr_stmt() { 195 fn test_introduce_var_part_of_expr_stmt() {
201 check_assist_range( 196 check_assist(
202 introduce_variable, 197 introduce_variable,
203 " 198 "
204fn foo() { 199fn foo() {
@@ -215,7 +210,7 @@ fn foo() {
215 #[test] 210 #[test]
216 fn test_introduce_var_last_expr() { 211 fn test_introduce_var_last_expr() {
217 covers!(test_introduce_var_last_expr); 212 covers!(test_introduce_var_last_expr);
218 check_assist_range( 213 check_assist(
219 introduce_variable, 214 introduce_variable,
220 " 215 "
221fn foo() { 216fn foo() {
@@ -227,7 +222,7 @@ fn foo() {
227 bar(var_name) 222 bar(var_name)
228}", 223}",
229 ); 224 );
230 check_assist_range( 225 check_assist(
231 introduce_variable, 226 introduce_variable,
232 " 227 "
233fn foo() { 228fn foo() {
@@ -243,7 +238,7 @@ fn foo() {
243 238
244 #[test] 239 #[test]
245 fn test_introduce_var_in_match_arm_no_block() { 240 fn test_introduce_var_in_match_arm_no_block() {
246 check_assist_range( 241 check_assist(
247 introduce_variable, 242 introduce_variable,
248 " 243 "
249fn main() { 244fn main() {
@@ -268,7 +263,7 @@ fn main() {
268 263
269 #[test] 264 #[test]
270 fn test_introduce_var_in_match_arm_with_block() { 265 fn test_introduce_var_in_match_arm_with_block() {
271 check_assist_range( 266 check_assist(
272 introduce_variable, 267 introduce_variable,
273 " 268 "
274fn main() { 269fn main() {
@@ -300,7 +295,7 @@ fn main() {
300 295
301 #[test] 296 #[test]
302 fn test_introduce_var_in_closure_no_block() { 297 fn test_introduce_var_in_closure_no_block() {
303 check_assist_range( 298 check_assist(
304 introduce_variable, 299 introduce_variable,
305 " 300 "
306fn main() { 301fn main() {
@@ -317,7 +312,7 @@ fn main() {
317 312
318 #[test] 313 #[test]
319 fn test_introduce_var_in_closure_with_block() { 314 fn test_introduce_var_in_closure_with_block() {
320 check_assist_range( 315 check_assist(
321 introduce_variable, 316 introduce_variable,
322 " 317 "
323fn main() { 318fn main() {
@@ -334,7 +329,7 @@ fn main() {
334 329
335 #[test] 330 #[test]
336 fn test_introduce_var_path_simple() { 331 fn test_introduce_var_path_simple() {
337 check_assist_range( 332 check_assist(
338 introduce_variable, 333 introduce_variable,
339 " 334 "
340fn main() { 335fn main() {
@@ -352,7 +347,7 @@ fn main() {
352 347
353 #[test] 348 #[test]
354 fn test_introduce_var_path_method() { 349 fn test_introduce_var_path_method() {
355 check_assist_range( 350 check_assist(
356 introduce_variable, 351 introduce_variable,
357 " 352 "
358fn main() { 353fn main() {
@@ -370,7 +365,7 @@ fn main() {
370 365
371 #[test] 366 #[test]
372 fn test_introduce_var_return() { 367 fn test_introduce_var_return() {
373 check_assist_range( 368 check_assist(
374 introduce_variable, 369 introduce_variable,
375 " 370 "
376fn foo() -> u32 { 371fn foo() -> u32 {
@@ -388,7 +383,7 @@ fn foo() -> u32 {
388 383
389 #[test] 384 #[test]
390 fn test_introduce_var_does_not_add_extra_whitespace() { 385 fn test_introduce_var_does_not_add_extra_whitespace() {
391 check_assist_range( 386 check_assist(
392 introduce_variable, 387 introduce_variable,
393 " 388 "
394fn foo() -> u32 { 389fn foo() -> u32 {
@@ -407,7 +402,7 @@ fn foo() -> u32 {
407", 402",
408 ); 403 );
409 404
410 check_assist_range( 405 check_assist(
411 introduce_variable, 406 introduce_variable,
412 " 407 "
413fn foo() -> u32 { 408fn foo() -> u32 {
@@ -424,7 +419,7 @@ fn foo() -> u32 {
424", 419",
425 ); 420 );
426 421
427 check_assist_range( 422 check_assist(
428 introduce_variable, 423 introduce_variable,
429 " 424 "
430fn foo() -> u32 { 425fn foo() -> u32 {
@@ -452,7 +447,7 @@ fn foo() -> u32 {
452 447
453 #[test] 448 #[test]
454 fn test_introduce_var_break() { 449 fn test_introduce_var_break() {
455 check_assist_range( 450 check_assist(
456 introduce_variable, 451 introduce_variable,
457 " 452 "
458fn main() { 453fn main() {
@@ -474,7 +469,7 @@ fn main() {
474 469
475 #[test] 470 #[test]
476 fn test_introduce_var_for_cast() { 471 fn test_introduce_var_for_cast() {
477 check_assist_range( 472 check_assist(
478 introduce_variable, 473 introduce_variable,
479 " 474 "
480fn main() { 475fn main() {
@@ -492,27 +487,20 @@ fn main() {
492 487
493 #[test] 488 #[test]
494 fn test_introduce_var_for_return_not_applicable() { 489 fn test_introduce_var_for_return_not_applicable() {
495 check_assist_range_not_applicable(introduce_variable, "fn foo() { <|>return<|>; } "); 490 check_assist_not_applicable(introduce_variable, "fn foo() { <|>return<|>; } ");
496 } 491 }
497 492
498 #[test] 493 #[test]
499 fn test_introduce_var_for_break_not_applicable() { 494 fn test_introduce_var_for_break_not_applicable() {
500 check_assist_range_not_applicable( 495 check_assist_not_applicable(introduce_variable, "fn main() { loop { <|>break<|>; }; }");
501 introduce_variable,
502 "fn main() { loop { <|>break<|>; }; }",
503 );
504 } 496 }
505 497
506 // FIXME: This is not quite correct, but good enough(tm) for the sorting heuristic 498 // FIXME: This is not quite correct, but good enough(tm) for the sorting heuristic
507 #[test] 499 #[test]
508 fn introduce_var_target() { 500 fn introduce_var_target() {
509 check_assist_range_target( 501 check_assist_target(introduce_variable, "fn foo() -> u32 { <|>return 2 + 2<|>; }", "2 + 2");
510 introduce_variable,
511 "fn foo() -> u32 { <|>return 2 + 2<|>; }",
512 "2 + 2",
513 );
514 502
515 check_assist_range_target( 503 check_assist_target(
516 introduce_variable, 504 introduce_variable,
517 " 505 "
518fn main() { 506fn main() {
diff --git a/crates/ra_assists/src/handlers/move_bounds.rs b/crates/ra_assists/src/handlers/move_bounds.rs
index 90793b5fc..86b235366 100644
--- a/crates/ra_assists/src/handlers/move_bounds.rs
+++ b/crates/ra_assists/src/handlers/move_bounds.rs
@@ -72,7 +72,11 @@ pub(crate) fn move_bounds_to_where_clause(ctx: AssistCtx) -> Option<Assist> {
72} 72}
73 73
74fn build_predicate(param: ast::TypeParam) -> Option<ast::WherePred> { 74fn build_predicate(param: ast::TypeParam) -> Option<ast::WherePred> {
75 let path = make::path_from_name_ref(make::name_ref(&param.name()?.syntax().to_string())); 75 let path = {
76 let name_ref = make::name_ref(&param.name()?.syntax().to_string());
77 let segment = make::path_segment(name_ref);
78 make::path_unqualified(segment)
79 };
76 let predicate = make::where_pred(path, param.type_bound_list()?.bounds()); 80 let predicate = make::where_pred(path, param.type_bound_list()?.bounds());
77 Some(predicate) 81 Some(predicate)
78} 82}
diff --git a/crates/ra_assists/src/handlers/raw_string.rs b/crates/ra_assists/src/handlers/raw_string.rs
index 2c0a1e126..7e4b83f13 100644
--- a/crates/ra_assists/src/handlers/raw_string.rs
+++ b/crates/ra_assists/src/handlers/raw_string.rs
@@ -1,5 +1,6 @@
1use ra_syntax::{ 1use ra_syntax::{
2 ast, AstToken, 2 ast::{self, HasStringValue},
3 AstToken,
3 SyntaxKind::{RAW_STRING, STRING}, 4 SyntaxKind::{RAW_STRING, STRING},
4 TextUnit, 5 TextUnit,
5}; 6};
diff --git a/crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs b/crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs
index eac452413..94f5d6c50 100644
--- a/crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs
+++ b/crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs
@@ -1,42 +1,12 @@
1use hir::{self, ModPath}; 1use hir;
2use ra_syntax::{ 2use ra_syntax::{ast, AstNode, SmolStr, TextRange};
3 ast::{self, NameOwner},
4 AstNode, Direction, SmolStr,
5 SyntaxKind::{PATH, PATH_SEGMENT},
6 SyntaxNode, TextRange, T,
7};
8use ra_text_edit::TextEditBuilder;
9 3
10use crate::{ 4use crate::{
11 assist_ctx::{Assist, AssistCtx}, 5 assist_ctx::{Assist, AssistCtx},
6 utils::insert_use_statement,
12 AssistId, 7 AssistId,
13}; 8};
14 9
15/// Creates and inserts a use statement for the given path to import.
16/// The use statement is inserted in the scope most appropriate to the
17/// the cursor position given, additionally merged with the existing use imports.
18pub fn insert_use_statement(
19 // Ideally the position of the cursor, used to
20 position: &SyntaxNode,
21 // The statement to use as anchor (last resort)
22 anchor: &SyntaxNode,
23 path_to_import: &ModPath,
24 edit: &mut TextEditBuilder,
25) {
26 let target = path_to_import.to_string().split("::").map(SmolStr::new).collect::<Vec<_>>();
27 let container = position.ancestors().find_map(|n| {
28 if let Some(module) = ast::Module::cast(n.clone()) {
29 return module.item_list().map(|it| it.syntax().clone());
30 }
31 ast::SourceFile::cast(n).map(|it| it.syntax().clone())
32 });
33
34 if let Some(container) = container {
35 let action = best_action_for_target(container, anchor.clone(), &target);
36 make_assist(&action, &target, edit);
37 }
38}
39
40// Assist: replace_qualified_name_with_use 10// Assist: replace_qualified_name_with_use
41// 11//
42// Adds a use statement for a given fully-qualified name. 12// Adds a use statement for a given fully-qualified name.
@@ -63,522 +33,25 @@ pub(crate) fn replace_qualified_name_with_use(ctx: AssistCtx) -> Option<Assist>
63 return None; 33 return None;
64 } 34 }
65 35
66 let module = path.syntax().ancestors().find_map(ast::Module::cast);
67 let position = match module.and_then(|it| it.item_list()) {
68 Some(item_list) => item_list.syntax().clone(),
69 None => {
70 let current_file = path.syntax().ancestors().find_map(ast::SourceFile::cast)?;
71 current_file.syntax().clone()
72 }
73 };
74
75 ctx.add_assist( 36 ctx.add_assist(
76 AssistId("replace_qualified_name_with_use"), 37 AssistId("replace_qualified_name_with_use"),
77 "Replace qualified path with use", 38 "Replace qualified path with use",
78 |edit| { 39 |edit| {
79 replace_with_use(&position, &path, &segments, edit.text_edit_builder()); 40 let path_to_import = hir_path.mod_path().clone();
41 insert_use_statement(path.syntax(), &path_to_import, edit.text_edit_builder());
42
43 if let Some(last) = path.segment() {
44 // Here we are assuming the assist will provide a correct use statement
45 // so we can delete the path qualifier
46 edit.delete(TextRange::from_to(
47 path.syntax().text_range().start(),
48 last.syntax().text_range().start(),
49 ));
50 }
80 }, 51 },
81 ) 52 )
82} 53}
83 54
84fn collect_path_segments_raw(
85 segments: &mut Vec<ast::PathSegment>,
86 mut path: ast::Path,
87) -> Option<usize> {
88 let oldlen = segments.len();
89 loop {
90 let mut children = path.syntax().children_with_tokens();
91 let (first, second, third) = (
92 children.next().map(|n| (n.clone(), n.kind())),
93 children.next().map(|n| (n.clone(), n.kind())),
94 children.next().map(|n| (n.clone(), n.kind())),
95 );
96 match (first, second, third) {
97 (Some((subpath, PATH)), Some((_, T![::])), Some((segment, PATH_SEGMENT))) => {
98 path = ast::Path::cast(subpath.as_node()?.clone())?;
99 segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?);
100 }
101 (Some((segment, PATH_SEGMENT)), _, _) => {
102 segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?);
103 break;
104 }
105 (_, _, _) => return None,
106 }
107 }
108 // We need to reverse only the new added segments
109 let only_new_segments = segments.split_at_mut(oldlen).1;
110 only_new_segments.reverse();
111 Some(segments.len() - oldlen)
112}
113
114fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) {
115 let mut iter = segments.iter();
116 if let Some(s) = iter.next() {
117 buf.push_str(s);
118 }
119 for s in iter {
120 buf.push_str("::");
121 buf.push_str(s);
122 }
123}
124
125/// Returns the number of common segments.
126fn compare_path_segments(left: &[SmolStr], right: &[ast::PathSegment]) -> usize {
127 left.iter().zip(right).take_while(|(l, r)| compare_path_segment(l, r)).count()
128}
129
130fn compare_path_segment(a: &SmolStr, b: &ast::PathSegment) -> bool {
131 if let Some(kb) = b.kind() {
132 match kb {
133 ast::PathSegmentKind::Name(nameref_b) => a == nameref_b.text(),
134 ast::PathSegmentKind::SelfKw => a == "self",
135 ast::PathSegmentKind::SuperKw => a == "super",
136 ast::PathSegmentKind::CrateKw => a == "crate",
137 ast::PathSegmentKind::Type { .. } => false, // not allowed in imports
138 }
139 } else {
140 false
141 }
142}
143
144fn compare_path_segment_with_name(a: &SmolStr, b: &ast::Name) -> bool {
145 a == b.text()
146}
147
148#[derive(Clone, Debug)]
149enum ImportAction {
150 Nothing,
151 // Add a brand new use statement.
152 AddNewUse {
153 anchor: Option<SyntaxNode>, // anchor node
154 add_after_anchor: bool,
155 },
156
157 // To split an existing use statement creating a nested import.
158 AddNestedImport {
159 // how may segments matched with the target path
160 common_segments: usize,
161 path_to_split: ast::Path,
162 // the first segment of path_to_split we want to add into the new nested list
163 first_segment_to_split: Option<ast::PathSegment>,
164 // Wether to add 'self' in addition to the target path
165 add_self: bool,
166 },
167 // To add the target path to an existing nested import tree list.
168 AddInTreeList {
169 common_segments: usize,
170 // The UseTreeList where to add the target path
171 tree_list: ast::UseTreeList,
172 add_self: bool,
173 },
174}
175
176impl ImportAction {
177 fn add_new_use(anchor: Option<SyntaxNode>, add_after_anchor: bool) -> Self {
178 ImportAction::AddNewUse { anchor, add_after_anchor }
179 }
180
181 fn add_nested_import(
182 common_segments: usize,
183 path_to_split: ast::Path,
184 first_segment_to_split: Option<ast::PathSegment>,
185 add_self: bool,
186 ) -> Self {
187 ImportAction::AddNestedImport {
188 common_segments,
189 path_to_split,
190 first_segment_to_split,
191 add_self,
192 }
193 }
194
195 fn add_in_tree_list(
196 common_segments: usize,
197 tree_list: ast::UseTreeList,
198 add_self: bool,
199 ) -> Self {
200 ImportAction::AddInTreeList { common_segments, tree_list, add_self }
201 }
202
203 fn better(left: ImportAction, right: ImportAction) -> ImportAction {
204 if left.is_better(&right) {
205 left
206 } else {
207 right
208 }
209 }
210
211 fn is_better(&self, other: &ImportAction) -> bool {
212 match (self, other) {
213 (ImportAction::Nothing, _) => true,
214 (ImportAction::AddInTreeList { .. }, ImportAction::Nothing) => false,
215 (
216 ImportAction::AddNestedImport { common_segments: n, .. },
217 ImportAction::AddInTreeList { common_segments: m, .. },
218 )
219 | (
220 ImportAction::AddInTreeList { common_segments: n, .. },
221 ImportAction::AddNestedImport { common_segments: m, .. },
222 )
223 | (
224 ImportAction::AddInTreeList { common_segments: n, .. },
225 ImportAction::AddInTreeList { common_segments: m, .. },
226 )
227 | (
228 ImportAction::AddNestedImport { common_segments: n, .. },
229 ImportAction::AddNestedImport { common_segments: m, .. },
230 ) => n > m,
231 (ImportAction::AddInTreeList { .. }, _) => true,
232 (ImportAction::AddNestedImport { .. }, ImportAction::Nothing) => false,
233 (ImportAction::AddNestedImport { .. }, _) => true,
234 (ImportAction::AddNewUse { .. }, _) => false,
235 }
236 }
237}
238
239// Find out the best ImportAction to import target path against current_use_tree.
240// If current_use_tree has a nested import the function gets called recursively on every UseTree inside a UseTreeList.
241fn walk_use_tree_for_best_action(
242 current_path_segments: &mut Vec<ast::PathSegment>, // buffer containing path segments
243 current_parent_use_tree_list: Option<ast::UseTreeList>, // will be Some value if we are in a nested import
244 current_use_tree: ast::UseTree, // the use tree we are currently examinating
245 target: &[SmolStr], // the path we want to import
246) -> ImportAction {
247 // We save the number of segments in the buffer so we can restore the correct segments
248 // before returning. Recursive call will add segments so we need to delete them.
249 let prev_len = current_path_segments.len();
250
251 let tree_list = current_use_tree.use_tree_list();
252 let alias = current_use_tree.alias();
253
254 let path = match current_use_tree.path() {
255 Some(path) => path,
256 None => {
257 // If the use item don't have a path, it means it's broken (syntax error)
258 return ImportAction::add_new_use(
259 current_use_tree
260 .syntax()
261 .ancestors()
262 .find_map(ast::UseItem::cast)
263 .map(|it| it.syntax().clone()),
264 true,
265 );
266 }
267 };
268
269 // This can happen only if current_use_tree is a direct child of a UseItem
270 if let Some(name) = alias.and_then(|it| it.name()) {
271 if compare_path_segment_with_name(&target[0], &name) {
272 return ImportAction::Nothing;
273 }
274 }
275
276 collect_path_segments_raw(current_path_segments, path.clone());
277
278 // We compare only the new segments added in the line just above.
279 // The first prev_len segments were already compared in 'parent' recursive calls.
280 let left = target.split_at(prev_len).1;
281 let right = current_path_segments.split_at(prev_len).1;
282 let common = compare_path_segments(left, &right);
283 let mut action = match common {
284 0 => ImportAction::add_new_use(
285 // e.g: target is std::fmt and we can have
286 // use foo::bar
287 // We add a brand new use statement
288 current_use_tree
289 .syntax()
290 .ancestors()
291 .find_map(ast::UseItem::cast)
292 .map(|it| it.syntax().clone()),
293 true,
294 ),
295 common if common == left.len() && left.len() == right.len() => {
296 // e.g: target is std::fmt and we can have
297 // 1- use std::fmt;
298 // 2- use std::fmt::{ ... }
299 if let Some(list) = tree_list {
300 // In case 2 we need to add self to the nested list
301 // unless it's already there
302 let has_self = list.use_trees().map(|it| it.path()).any(|p| {
303 p.and_then(|it| it.segment())
304 .and_then(|it| it.kind())
305 .filter(|k| *k == ast::PathSegmentKind::SelfKw)
306 .is_some()
307 });
308
309 if has_self {
310 ImportAction::Nothing
311 } else {
312 ImportAction::add_in_tree_list(current_path_segments.len(), list, true)
313 }
314 } else {
315 // Case 1
316 ImportAction::Nothing
317 }
318 }
319 common if common != left.len() && left.len() == right.len() => {
320 // e.g: target is std::fmt and we have
321 // use std::io;
322 // We need to split.
323 let segments_to_split = current_path_segments.split_at(prev_len + common).1;
324 ImportAction::add_nested_import(
325 prev_len + common,
326 path,
327 Some(segments_to_split[0].clone()),
328 false,
329 )
330 }
331 common if common == right.len() && left.len() > right.len() => {
332 // e.g: target is std::fmt and we can have
333 // 1- use std;
334 // 2- use std::{ ... };
335
336 // fallback action
337 let mut better_action = ImportAction::add_new_use(
338 current_use_tree
339 .syntax()
340 .ancestors()
341 .find_map(ast::UseItem::cast)
342 .map(|it| it.syntax().clone()),
343 true,
344 );
345 if let Some(list) = tree_list {
346 // Case 2, check recursively if the path is already imported in the nested list
347 for u in list.use_trees() {
348 let child_action = walk_use_tree_for_best_action(
349 current_path_segments,
350 Some(list.clone()),
351 u,
352 target,
353 );
354 if child_action.is_better(&better_action) {
355 better_action = child_action;
356 if let ImportAction::Nothing = better_action {
357 return better_action;
358 }
359 }
360 }
361 } else {
362 // Case 1, split adding self
363 better_action = ImportAction::add_nested_import(prev_len + common, path, None, true)
364 }
365 better_action
366 }
367 common if common == left.len() && left.len() < right.len() => {
368 // e.g: target is std::fmt and we can have
369 // use std::fmt::Debug;
370 let segments_to_split = current_path_segments.split_at(prev_len + common).1;
371 ImportAction::add_nested_import(
372 prev_len + common,
373 path,
374 Some(segments_to_split[0].clone()),
375 true,
376 )
377 }
378 common if common < left.len() && common < right.len() => {
379 // e.g: target is std::fmt::nested::Debug
380 // use std::fmt::Display
381 let segments_to_split = current_path_segments.split_at(prev_len + common).1;
382 ImportAction::add_nested_import(
383 prev_len + common,
384 path,
385 Some(segments_to_split[0].clone()),
386 false,
387 )
388 }
389 _ => unreachable!(),
390 };
391
392 // If we are inside a UseTreeList adding a use statement become adding to the existing
393 // tree list.
394 action = match (current_parent_use_tree_list, action.clone()) {
395 (Some(use_tree_list), ImportAction::AddNewUse { .. }) => {
396 ImportAction::add_in_tree_list(prev_len, use_tree_list, false)
397 }
398 (_, _) => action,
399 };
400
401 // We remove the segments added
402 current_path_segments.truncate(prev_len);
403 action
404}
405
406fn best_action_for_target(
407 container: SyntaxNode,
408 anchor: SyntaxNode,
409 target: &[SmolStr],
410) -> ImportAction {
411 let mut storage = Vec::with_capacity(16); // this should be the only allocation
412 let best_action = container
413 .children()
414 .filter_map(ast::UseItem::cast)
415 .filter_map(|it| it.use_tree())
416 .map(|u| walk_use_tree_for_best_action(&mut storage, None, u, target))
417 .fold(None, |best, a| match best {
418 Some(best) => Some(ImportAction::better(best, a)),
419 None => Some(a),
420 });
421
422 match best_action {
423 Some(action) => action,
424 None => {
425 // We have no action and no UseItem was found in container so we find
426 // another item and we use it as anchor.
427 // If there are no items above, we choose the target path itself as anchor.
428 // todo: we should include even whitespace blocks as anchor candidates
429 let anchor = container
430 .children()
431 .find(|n| n.text_range().start() < anchor.text_range().start())
432 .or_else(|| Some(anchor));
433
434 let add_after_anchor = anchor
435 .clone()
436 .and_then(ast::Attr::cast)
437 .map(|attr| attr.kind() == ast::AttrKind::Inner)
438 .unwrap_or(false);
439 ImportAction::add_new_use(anchor, add_after_anchor)
440 }
441 }
442}
443
444fn make_assist(action: &ImportAction, target: &[SmolStr], edit: &mut TextEditBuilder) {
445 match action {
446 ImportAction::AddNewUse { anchor, add_after_anchor } => {
447 make_assist_add_new_use(anchor, *add_after_anchor, target, edit)
448 }
449 ImportAction::AddInTreeList { common_segments, tree_list, add_self } => {
450 // We know that the fist n segments already exists in the use statement we want
451 // to modify, so we want to add only the last target.len() - n segments.
452 let segments_to_add = target.split_at(*common_segments).1;
453 make_assist_add_in_tree_list(tree_list, segments_to_add, *add_self, edit)
454 }
455 ImportAction::AddNestedImport {
456 common_segments,
457 path_to_split,
458 first_segment_to_split,
459 add_self,
460 } => {
461 let segments_to_add = target.split_at(*common_segments).1;
462 make_assist_add_nested_import(
463 path_to_split,
464 first_segment_to_split,
465 segments_to_add,
466 *add_self,
467 edit,
468 )
469 }
470 _ => {}
471 }
472}
473
474fn make_assist_add_new_use(
475 anchor: &Option<SyntaxNode>,
476 after: bool,
477 target: &[SmolStr],
478 edit: &mut TextEditBuilder,
479) {
480 if let Some(anchor) = anchor {
481 let indent = ra_fmt::leading_indent(anchor);
482 let mut buf = String::new();
483 if after {
484 buf.push_str("\n");
485 if let Some(spaces) = &indent {
486 buf.push_str(spaces);
487 }
488 }
489 buf.push_str("use ");
490 fmt_segments_raw(target, &mut buf);
491 buf.push_str(";");
492 if !after {
493 buf.push_str("\n\n");
494 if let Some(spaces) = &indent {
495 buf.push_str(&spaces);
496 }
497 }
498 let position = if after { anchor.text_range().end() } else { anchor.text_range().start() };
499 edit.insert(position, buf);
500 }
501}
502
503fn make_assist_add_in_tree_list(
504 tree_list: &ast::UseTreeList,
505 target: &[SmolStr],
506 add_self: bool,
507 edit: &mut TextEditBuilder,
508) {
509 let last = tree_list.use_trees().last();
510 if let Some(last) = last {
511 let mut buf = String::new();
512 let comma = last.syntax().siblings(Direction::Next).find(|n| n.kind() == T![,]);
513 let offset = if let Some(comma) = comma {
514 comma.text_range().end()
515 } else {
516 buf.push_str(",");
517 last.syntax().text_range().end()
518 };
519 if add_self {
520 buf.push_str(" self")
521 } else {
522 buf.push_str(" ");
523 }
524 fmt_segments_raw(target, &mut buf);
525 edit.insert(offset, buf);
526 } else {
527 }
528}
529
530fn make_assist_add_nested_import(
531 path: &ast::Path,
532 first_segment_to_split: &Option<ast::PathSegment>,
533 target: &[SmolStr],
534 add_self: bool,
535 edit: &mut TextEditBuilder,
536) {
537 let use_tree = path.syntax().ancestors().find_map(ast::UseTree::cast);
538 if let Some(use_tree) = use_tree {
539 let (start, add_colon_colon) = if let Some(first_segment_to_split) = first_segment_to_split
540 {
541 (first_segment_to_split.syntax().text_range().start(), false)
542 } else {
543 (use_tree.syntax().text_range().end(), true)
544 };
545 let end = use_tree.syntax().text_range().end();
546
547 let mut buf = String::new();
548 if add_colon_colon {
549 buf.push_str("::");
550 }
551 buf.push_str("{");
552 if add_self {
553 buf.push_str("self, ");
554 }
555 fmt_segments_raw(target, &mut buf);
556 if !target.is_empty() {
557 buf.push_str(", ");
558 }
559 edit.insert(start, buf);
560 edit.insert(end, "}".to_string());
561 }
562}
563
564fn replace_with_use(
565 container: &SyntaxNode,
566 path: &ast::Path,
567 target: &[SmolStr],
568 edit: &mut TextEditBuilder,
569) {
570 let action = best_action_for_target(container.clone(), path.syntax().clone(), target);
571 make_assist(&action, target, edit);
572 if let Some(last) = path.segment() {
573 // Here we are assuming the assist will provide a correct use statement
574 // so we can delete the path qualifier
575 edit.delete(TextRange::from_to(
576 path.syntax().text_range().start(),
577 last.syntax().text_range().start(),
578 ));
579 }
580}
581
582fn collect_hir_path_segments(path: &hir::Path) -> Option<Vec<SmolStr>> { 55fn collect_hir_path_segments(path: &hir::Path) -> Option<Vec<SmolStr>> {
583 let mut ps = Vec::<SmolStr>::with_capacity(10); 56 let mut ps = Vec::<SmolStr>::with_capacity(10);
584 match path.kind() { 57 match path.kind() {
diff --git a/crates/ra_assists/src/lib.rs b/crates/ra_assists/src/lib.rs
index d7998b0d1..deeada2de 100644
--- a/crates/ra_assists/src/lib.rs
+++ b/crates/ra_assists/src/lib.rs
@@ -18,7 +18,7 @@ use ra_syntax::{TextRange, TextUnit};
18use ra_text_edit::TextEdit; 18use ra_text_edit::TextEdit;
19 19
20pub(crate) use crate::assist_ctx::{Assist, AssistCtx, AssistHandler}; 20pub(crate) use crate::assist_ctx::{Assist, AssistCtx, AssistHandler};
21pub use crate::handlers::replace_qualified_name_with_use::insert_use_statement; 21use hir::Semantics;
22 22
23/// Unique identifier of the assist, should not be shown to the user 23/// Unique identifier of the assist, should not be shown to the user
24/// directly. 24/// directly.
@@ -63,7 +63,8 @@ pub struct ResolvedAssist {
63/// Assists are returned in the "unresolved" state, that is only labels are 63/// Assists are returned in the "unresolved" state, that is only labels are
64/// returned, without actual edits. 64/// returned, without actual edits.
65pub fn unresolved_assists(db: &RootDatabase, range: FileRange) -> Vec<AssistLabel> { 65pub fn unresolved_assists(db: &RootDatabase, range: FileRange) -> Vec<AssistLabel> {
66 let ctx = AssistCtx::new(db, range, false); 66 let sema = Semantics::new(db);
67 let ctx = AssistCtx::new(&sema, range, false);
67 handlers::all() 68 handlers::all()
68 .iter() 69 .iter()
69 .filter_map(|f| f(ctx.clone())) 70 .filter_map(|f| f(ctx.clone()))
@@ -77,7 +78,8 @@ pub fn unresolved_assists(db: &RootDatabase, range: FileRange) -> Vec<AssistLabe
77/// Assists are returned in the "resolved" state, that is with edit fully 78/// Assists are returned in the "resolved" state, that is with edit fully
78/// computed. 79/// computed.
79pub fn resolved_assists(db: &RootDatabase, range: FileRange) -> Vec<ResolvedAssist> { 80pub fn resolved_assists(db: &RootDatabase, range: FileRange) -> Vec<ResolvedAssist> {
80 let ctx = AssistCtx::new(db, range, true); 81 let sema = Semantics::new(db);
82 let ctx = AssistCtx::new(&sema, range, true);
81 let mut a = handlers::all() 83 let mut a = handlers::all()
82 .iter() 84 .iter()
83 .filter_map(|f| f(ctx.clone())) 85 .filter_map(|f| f(ctx.clone()))
@@ -162,9 +164,10 @@ mod helpers {
162 use ra_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt}; 164 use ra_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt};
163 use ra_ide_db::{symbol_index::SymbolsDatabase, RootDatabase}; 165 use ra_ide_db::{symbol_index::SymbolsDatabase, RootDatabase};
164 use ra_syntax::TextRange; 166 use ra_syntax::TextRange;
165 use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range}; 167 use test_utils::{add_cursor, assert_eq_text, extract_range_or_offset, RangeOrOffset};
166 168
167 use crate::{AssistCtx, AssistHandler}; 169 use crate::{AssistCtx, AssistHandler};
170 use hir::Semantics;
168 171
169 pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) { 172 pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {
170 let (mut db, file_id) = RootDatabase::with_single_file(text); 173 let (mut db, file_id) = RootDatabase::with_single_file(text);
@@ -176,81 +179,66 @@ mod helpers {
176 } 179 }
177 180
178 pub(crate) fn check_assist(assist: AssistHandler, before: &str, after: &str) { 181 pub(crate) fn check_assist(assist: AssistHandler, before: &str, after: &str) {
179 let (before_cursor_pos, before) = extract_offset(before); 182 check(assist, before, ExpectedResult::After(after));
180 let (db, file_id) = with_single_file(&before);
181 let frange =
182 FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
183 let assist =
184 assist(AssistCtx::new(&db, frange, true)).expect("code action is not applicable");
185 let action = assist.0[0].action.clone().unwrap();
186
187 let actual = action.edit.apply(&before);
188 let actual_cursor_pos = match action.cursor_position {
189 None => action
190 .edit
191 .apply_to_offset(before_cursor_pos)
192 .expect("cursor position is affected by the edit"),
193 Some(off) => off,
194 };
195 let actual = add_cursor(&actual, actual_cursor_pos);
196 assert_eq_text!(after, &actual);
197 }
198
199 pub(crate) fn check_assist_range(assist: AssistHandler, before: &str, after: &str) {
200 let (range, before) = extract_range(before);
201 let (db, file_id) = with_single_file(&before);
202 let frange = FileRange { file_id, range };
203 let assist =
204 assist(AssistCtx::new(&db, frange, true)).expect("code action is not applicable");
205 let action = assist.0[0].action.clone().unwrap();
206
207 let mut actual = action.edit.apply(&before);
208 if let Some(pos) = action.cursor_position {
209 actual = add_cursor(&actual, pos);
210 }
211 assert_eq_text!(after, &actual);
212 } 183 }
213 184
185 // FIXME: instead of having a separate function here, maybe use
186 // `extract_ranges` and mark the target as `<target> </target>` in the
187 // fixuture?
214 pub(crate) fn check_assist_target(assist: AssistHandler, before: &str, target: &str) { 188 pub(crate) fn check_assist_target(assist: AssistHandler, before: &str, target: &str) {
215 let (before_cursor_pos, before) = extract_offset(before); 189 check(assist, before, ExpectedResult::Target(target));
216 let (db, file_id) = with_single_file(&before);
217 let frange =
218 FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
219 let assist =
220 assist(AssistCtx::new(&db, frange, true)).expect("code action is not applicable");
221 let action = assist.0[0].action.clone().unwrap();
222
223 let range = action.target.expect("expected target on action");
224 assert_eq_text!(&before[range.start().to_usize()..range.end().to_usize()], target);
225 } 190 }
226 191
227 pub(crate) fn check_assist_range_target(assist: AssistHandler, before: &str, target: &str) { 192 pub(crate) fn check_assist_not_applicable(assist: AssistHandler, before: &str) {
228 let (range, before) = extract_range(before); 193 check(assist, before, ExpectedResult::NotApplicable);
229 let (db, file_id) = with_single_file(&before);
230 let frange = FileRange { file_id, range };
231 let assist =
232 assist(AssistCtx::new(&db, frange, true)).expect("code action is not applicable");
233 let action = assist.0[0].action.clone().unwrap();
234
235 let range = action.target.expect("expected target on action");
236 assert_eq_text!(&before[range.start().to_usize()..range.end().to_usize()], target);
237 } 194 }
238 195
239 pub(crate) fn check_assist_not_applicable(assist: AssistHandler, before: &str) { 196 enum ExpectedResult<'a> {
240 let (before_cursor_pos, before) = extract_offset(before); 197 NotApplicable,
241 let (db, file_id) = with_single_file(&before); 198 After(&'a str),
242 let frange = 199 Target(&'a str),
243 FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
244 let assist = assist(AssistCtx::new(&db, frange, true));
245 assert!(assist.is_none());
246 } 200 }
247 201
248 pub(crate) fn check_assist_range_not_applicable(assist: AssistHandler, before: &str) { 202 fn check(assist: AssistHandler, before: &str, expected: ExpectedResult) {
249 let (range, before) = extract_range(before); 203 let (range_or_offset, before) = extract_range_or_offset(before);
204 let range: TextRange = range_or_offset.into();
205
250 let (db, file_id) = with_single_file(&before); 206 let (db, file_id) = with_single_file(&before);
251 let frange = FileRange { file_id, range }; 207 let frange = FileRange { file_id, range };
252 let assist = assist(AssistCtx::new(&db, frange, true)); 208 let sema = Semantics::new(&db);
253 assert!(assist.is_none()); 209 let assist_ctx = AssistCtx::new(&sema, frange, true);
210
211 match (assist(assist_ctx), expected) {
212 (Some(assist), ExpectedResult::After(after)) => {
213 let action = assist.0[0].action.clone().unwrap();
214
215 let mut actual = action.edit.apply(&before);
216 match action.cursor_position {
217 None => {
218 if let RangeOrOffset::Offset(before_cursor_pos) = range_or_offset {
219 let off = action
220 .edit
221 .apply_to_offset(before_cursor_pos)
222 .expect("cursor position is affected by the edit");
223 actual = add_cursor(&actual, off)
224 }
225 }
226 Some(off) => actual = add_cursor(&actual, off),
227 };
228
229 assert_eq_text!(after, &actual);
230 }
231 (Some(assist), ExpectedResult::Target(target)) => {
232 let action = assist.0[0].action.clone().unwrap();
233 let range = action.target.expect("expected target on action");
234 assert_eq_text!(&before[range.start().to_usize()..range.end().to_usize()], target);
235 }
236 (Some(_), ExpectedResult::NotApplicable) => panic!("assist should not be applicable!"),
237 (None, ExpectedResult::After(_)) | (None, ExpectedResult::Target(_)) => {
238 panic!("code action is not applicable")
239 }
240 (None, ExpectedResult::NotApplicable) => (),
241 };
254 } 242 }
255} 243}
256 244
diff --git a/crates/ra_assists/src/utils.rs b/crates/ra_assists/src/utils.rs
index 6ff44c95c..d544caee7 100644
--- a/crates/ra_assists/src/utils.rs
+++ b/crates/ra_assists/src/utils.rs
@@ -1,16 +1,18 @@
1//! Assorted functions shared by several assists. 1//! Assorted functions shared by several assists.
2pub(crate) mod insert_use;
2 3
4use hir::Semantics;
5use ra_ide_db::RootDatabase;
3use ra_syntax::{ 6use ra_syntax::{
4 ast::{self, make, NameOwner}, 7 ast::{self, make, NameOwner},
5 AstNode, T, 8 AstNode, T,
6}; 9};
7
8use hir::db::HirDatabase;
9use rustc_hash::FxHashSet; 10use rustc_hash::FxHashSet;
10 11
12pub use insert_use::insert_use_statement;
13
11pub fn get_missing_impl_items( 14pub fn get_missing_impl_items(
12 db: &impl HirDatabase, 15 sema: &Semantics<RootDatabase>,
13 analyzer: &hir::SourceAnalyzer,
14 impl_block: &ast::ImplBlock, 16 impl_block: &ast::ImplBlock,
15) -> Vec<hir::AssocItem> { 17) -> Vec<hir::AssocItem> {
16 // Names must be unique between constants and functions. However, type aliases 18 // Names must be unique between constants and functions. However, type aliases
@@ -42,15 +44,17 @@ pub fn get_missing_impl_items(
42 } 44 }
43 } 45 }
44 46
45 resolve_target_trait(db, analyzer, impl_block).map_or(vec![], |target_trait| { 47 resolve_target_trait(sema, impl_block).map_or(vec![], |target_trait| {
46 target_trait 48 target_trait
47 .items(db) 49 .items(sema.db)
48 .iter() 50 .iter()
49 .filter(|i| match i { 51 .filter(|i| match i {
50 hir::AssocItem::Function(f) => !impl_fns_consts.contains(&f.name(db).to_string()), 52 hir::AssocItem::Function(f) => {
51 hir::AssocItem::TypeAlias(t) => !impl_type.contains(&t.name(db).to_string()), 53 !impl_fns_consts.contains(&f.name(sema.db).to_string())
54 }
55 hir::AssocItem::TypeAlias(t) => !impl_type.contains(&t.name(sema.db).to_string()),
52 hir::AssocItem::Const(c) => c 56 hir::AssocItem::Const(c) => c
53 .name(db) 57 .name(sema.db)
54 .map(|n| !impl_fns_consts.contains(&n.to_string())) 58 .map(|n| !impl_fns_consts.contains(&n.to_string()))
55 .unwrap_or_default(), 59 .unwrap_or_default(),
56 }) 60 })
@@ -60,8 +64,7 @@ pub fn get_missing_impl_items(
60} 64}
61 65
62pub(crate) fn resolve_target_trait( 66pub(crate) fn resolve_target_trait(
63 db: &impl HirDatabase, 67 sema: &Semantics<RootDatabase>,
64 analyzer: &hir::SourceAnalyzer,
65 impl_block: &ast::ImplBlock, 68 impl_block: &ast::ImplBlock,
66) -> Option<hir::Trait> { 69) -> Option<hir::Trait> {
67 let ast_path = impl_block 70 let ast_path = impl_block
@@ -70,7 +73,7 @@ pub(crate) fn resolve_target_trait(
70 .and_then(ast::PathType::cast)? 73 .and_then(ast::PathType::cast)?
71 .path()?; 74 .path()?;
72 75
73 match analyzer.resolve_path(db, &ast_path) { 76 match sema.resolve_path(&ast_path) {
74 Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def), 77 Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def),
75 _ => None, 78 _ => None,
76 } 79 }
diff --git a/crates/ra_assists/src/utils/insert_use.rs b/crates/ra_assists/src/utils/insert_use.rs
new file mode 100644
index 000000000..36fd2fc0b
--- /dev/null
+++ b/crates/ra_assists/src/utils/insert_use.rs
@@ -0,0 +1,510 @@
1//! Handle syntactic aspects of inserting a new `use`.
2
3use hir::{self, ModPath};
4use ra_syntax::{
5 ast::{self, NameOwner},
6 AstNode, Direction, SmolStr,
7 SyntaxKind::{PATH, PATH_SEGMENT},
8 SyntaxNode, T,
9};
10use ra_text_edit::TextEditBuilder;
11
12/// Creates and inserts a use statement for the given path to import.
13/// The use statement is inserted in the scope most appropriate to the
14/// the cursor position given, additionally merged with the existing use imports.
15pub fn insert_use_statement(
16 // Ideally the position of the cursor, used to
17 position: &SyntaxNode,
18 path_to_import: &ModPath,
19 edit: &mut TextEditBuilder,
20) {
21 let target = path_to_import.to_string().split("::").map(SmolStr::new).collect::<Vec<_>>();
22 let container = position.ancestors().find_map(|n| {
23 if let Some(module) = ast::Module::cast(n.clone()) {
24 return module.item_list().map(|it| it.syntax().clone());
25 }
26 ast::SourceFile::cast(n).map(|it| it.syntax().clone())
27 });
28
29 if let Some(container) = container {
30 let action = best_action_for_target(container, position.clone(), &target);
31 make_assist(&action, &target, edit);
32 }
33}
34
35fn collect_path_segments_raw(
36 segments: &mut Vec<ast::PathSegment>,
37 mut path: ast::Path,
38) -> Option<usize> {
39 let oldlen = segments.len();
40 loop {
41 let mut children = path.syntax().children_with_tokens();
42 let (first, second, third) = (
43 children.next().map(|n| (n.clone(), n.kind())),
44 children.next().map(|n| (n.clone(), n.kind())),
45 children.next().map(|n| (n.clone(), n.kind())),
46 );
47 match (first, second, third) {
48 (Some((subpath, PATH)), Some((_, T![::])), Some((segment, PATH_SEGMENT))) => {
49 path = ast::Path::cast(subpath.as_node()?.clone())?;
50 segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?);
51 }
52 (Some((segment, PATH_SEGMENT)), _, _) => {
53 segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?);
54 break;
55 }
56 (_, _, _) => return None,
57 }
58 }
59 // We need to reverse only the new added segments
60 let only_new_segments = segments.split_at_mut(oldlen).1;
61 only_new_segments.reverse();
62 Some(segments.len() - oldlen)
63}
64
65fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) {
66 let mut iter = segments.iter();
67 if let Some(s) = iter.next() {
68 buf.push_str(s);
69 }
70 for s in iter {
71 buf.push_str("::");
72 buf.push_str(s);
73 }
74}
75
76/// Returns the number of common segments.
77fn compare_path_segments(left: &[SmolStr], right: &[ast::PathSegment]) -> usize {
78 left.iter().zip(right).take_while(|(l, r)| compare_path_segment(l, r)).count()
79}
80
81fn compare_path_segment(a: &SmolStr, b: &ast::PathSegment) -> bool {
82 if let Some(kb) = b.kind() {
83 match kb {
84 ast::PathSegmentKind::Name(nameref_b) => a == nameref_b.text(),
85 ast::PathSegmentKind::SelfKw => a == "self",
86 ast::PathSegmentKind::SuperKw => a == "super",
87 ast::PathSegmentKind::CrateKw => a == "crate",
88 ast::PathSegmentKind::Type { .. } => false, // not allowed in imports
89 }
90 } else {
91 false
92 }
93}
94
95fn compare_path_segment_with_name(a: &SmolStr, b: &ast::Name) -> bool {
96 a == b.text()
97}
98
99#[derive(Clone, Debug)]
100enum ImportAction {
101 Nothing,
102 // Add a brand new use statement.
103 AddNewUse {
104 anchor: Option<SyntaxNode>, // anchor node
105 add_after_anchor: bool,
106 },
107
108 // To split an existing use statement creating a nested import.
109 AddNestedImport {
110 // how may segments matched with the target path
111 common_segments: usize,
112 path_to_split: ast::Path,
113 // the first segment of path_to_split we want to add into the new nested list
114 first_segment_to_split: Option<ast::PathSegment>,
115 // Wether to add 'self' in addition to the target path
116 add_self: bool,
117 },
118 // To add the target path to an existing nested import tree list.
119 AddInTreeList {
120 common_segments: usize,
121 // The UseTreeList where to add the target path
122 tree_list: ast::UseTreeList,
123 add_self: bool,
124 },
125}
126
127impl ImportAction {
128 fn add_new_use(anchor: Option<SyntaxNode>, add_after_anchor: bool) -> Self {
129 ImportAction::AddNewUse { anchor, add_after_anchor }
130 }
131
132 fn add_nested_import(
133 common_segments: usize,
134 path_to_split: ast::Path,
135 first_segment_to_split: Option<ast::PathSegment>,
136 add_self: bool,
137 ) -> Self {
138 ImportAction::AddNestedImport {
139 common_segments,
140 path_to_split,
141 first_segment_to_split,
142 add_self,
143 }
144 }
145
146 fn add_in_tree_list(
147 common_segments: usize,
148 tree_list: ast::UseTreeList,
149 add_self: bool,
150 ) -> Self {
151 ImportAction::AddInTreeList { common_segments, tree_list, add_self }
152 }
153
154 fn better(left: ImportAction, right: ImportAction) -> ImportAction {
155 if left.is_better(&right) {
156 left
157 } else {
158 right
159 }
160 }
161
162 fn is_better(&self, other: &ImportAction) -> bool {
163 match (self, other) {
164 (ImportAction::Nothing, _) => true,
165 (ImportAction::AddInTreeList { .. }, ImportAction::Nothing) => false,
166 (
167 ImportAction::AddNestedImport { common_segments: n, .. },
168 ImportAction::AddInTreeList { common_segments: m, .. },
169 )
170 | (
171 ImportAction::AddInTreeList { common_segments: n, .. },
172 ImportAction::AddNestedImport { common_segments: m, .. },
173 )
174 | (
175 ImportAction::AddInTreeList { common_segments: n, .. },
176 ImportAction::AddInTreeList { common_segments: m, .. },
177 )
178 | (
179 ImportAction::AddNestedImport { common_segments: n, .. },
180 ImportAction::AddNestedImport { common_segments: m, .. },
181 ) => n > m,
182 (ImportAction::AddInTreeList { .. }, _) => true,
183 (ImportAction::AddNestedImport { .. }, ImportAction::Nothing) => false,
184 (ImportAction::AddNestedImport { .. }, _) => true,
185 (ImportAction::AddNewUse { .. }, _) => false,
186 }
187 }
188}
189
190// Find out the best ImportAction to import target path against current_use_tree.
191// If current_use_tree has a nested import the function gets called recursively on every UseTree inside a UseTreeList.
192fn walk_use_tree_for_best_action(
193 current_path_segments: &mut Vec<ast::PathSegment>, // buffer containing path segments
194 current_parent_use_tree_list: Option<ast::UseTreeList>, // will be Some value if we are in a nested import
195 current_use_tree: ast::UseTree, // the use tree we are currently examinating
196 target: &[SmolStr], // the path we want to import
197) -> ImportAction {
198 // We save the number of segments in the buffer so we can restore the correct segments
199 // before returning. Recursive call will add segments so we need to delete them.
200 let prev_len = current_path_segments.len();
201
202 let tree_list = current_use_tree.use_tree_list();
203 let alias = current_use_tree.alias();
204
205 let path = match current_use_tree.path() {
206 Some(path) => path,
207 None => {
208 // If the use item don't have a path, it means it's broken (syntax error)
209 return ImportAction::add_new_use(
210 current_use_tree
211 .syntax()
212 .ancestors()
213 .find_map(ast::UseItem::cast)
214 .map(|it| it.syntax().clone()),
215 true,
216 );
217 }
218 };
219
220 // This can happen only if current_use_tree is a direct child of a UseItem
221 if let Some(name) = alias.and_then(|it| it.name()) {
222 if compare_path_segment_with_name(&target[0], &name) {
223 return ImportAction::Nothing;
224 }
225 }
226
227 collect_path_segments_raw(current_path_segments, path.clone());
228
229 // We compare only the new segments added in the line just above.
230 // The first prev_len segments were already compared in 'parent' recursive calls.
231 let left = target.split_at(prev_len).1;
232 let right = current_path_segments.split_at(prev_len).1;
233 let common = compare_path_segments(left, &right);
234 let mut action = match common {
235 0 => ImportAction::add_new_use(
236 // e.g: target is std::fmt and we can have
237 // use foo::bar
238 // We add a brand new use statement
239 current_use_tree
240 .syntax()
241 .ancestors()
242 .find_map(ast::UseItem::cast)
243 .map(|it| it.syntax().clone()),
244 true,
245 ),
246 common if common == left.len() && left.len() == right.len() => {
247 // e.g: target is std::fmt and we can have
248 // 1- use std::fmt;
249 // 2- use std::fmt::{ ... }
250 if let Some(list) = tree_list {
251 // In case 2 we need to add self to the nested list
252 // unless it's already there
253 let has_self = list.use_trees().map(|it| it.path()).any(|p| {
254 p.and_then(|it| it.segment())
255 .and_then(|it| it.kind())
256 .filter(|k| *k == ast::PathSegmentKind::SelfKw)
257 .is_some()
258 });
259
260 if has_self {
261 ImportAction::Nothing
262 } else {
263 ImportAction::add_in_tree_list(current_path_segments.len(), list, true)
264 }
265 } else {
266 // Case 1
267 ImportAction::Nothing
268 }
269 }
270 common if common != left.len() && left.len() == right.len() => {
271 // e.g: target is std::fmt and we have
272 // use std::io;
273 // We need to split.
274 let segments_to_split = current_path_segments.split_at(prev_len + common).1;
275 ImportAction::add_nested_import(
276 prev_len + common,
277 path,
278 Some(segments_to_split[0].clone()),
279 false,
280 )
281 }
282 common if common == right.len() && left.len() > right.len() => {
283 // e.g: target is std::fmt and we can have
284 // 1- use std;
285 // 2- use std::{ ... };
286
287 // fallback action
288 let mut better_action = ImportAction::add_new_use(
289 current_use_tree
290 .syntax()
291 .ancestors()
292 .find_map(ast::UseItem::cast)
293 .map(|it| it.syntax().clone()),
294 true,
295 );
296 if let Some(list) = tree_list {
297 // Case 2, check recursively if the path is already imported in the nested list
298 for u in list.use_trees() {
299 let child_action = walk_use_tree_for_best_action(
300 current_path_segments,
301 Some(list.clone()),
302 u,
303 target,
304 );
305 if child_action.is_better(&better_action) {
306 better_action = child_action;
307 if let ImportAction::Nothing = better_action {
308 return better_action;
309 }
310 }
311 }
312 } else {
313 // Case 1, split adding self
314 better_action = ImportAction::add_nested_import(prev_len + common, path, None, true)
315 }
316 better_action
317 }
318 common if common == left.len() && left.len() < right.len() => {
319 // e.g: target is std::fmt and we can have
320 // use std::fmt::Debug;
321 let segments_to_split = current_path_segments.split_at(prev_len + common).1;
322 ImportAction::add_nested_import(
323 prev_len + common,
324 path,
325 Some(segments_to_split[0].clone()),
326 true,
327 )
328 }
329 common if common < left.len() && common < right.len() => {
330 // e.g: target is std::fmt::nested::Debug
331 // use std::fmt::Display
332 let segments_to_split = current_path_segments.split_at(prev_len + common).1;
333 ImportAction::add_nested_import(
334 prev_len + common,
335 path,
336 Some(segments_to_split[0].clone()),
337 false,
338 )
339 }
340 _ => unreachable!(),
341 };
342
343 // If we are inside a UseTreeList adding a use statement become adding to the existing
344 // tree list.
345 action = match (current_parent_use_tree_list, action.clone()) {
346 (Some(use_tree_list), ImportAction::AddNewUse { .. }) => {
347 ImportAction::add_in_tree_list(prev_len, use_tree_list, false)
348 }
349 (_, _) => action,
350 };
351
352 // We remove the segments added
353 current_path_segments.truncate(prev_len);
354 action
355}
356
357fn best_action_for_target(
358 container: SyntaxNode,
359 anchor: SyntaxNode,
360 target: &[SmolStr],
361) -> ImportAction {
362 let mut storage = Vec::with_capacity(16); // this should be the only allocation
363 let best_action = container
364 .children()
365 .filter_map(ast::UseItem::cast)
366 .filter_map(|it| it.use_tree())
367 .map(|u| walk_use_tree_for_best_action(&mut storage, None, u, target))
368 .fold(None, |best, a| match best {
369 Some(best) => Some(ImportAction::better(best, a)),
370 None => Some(a),
371 });
372
373 match best_action {
374 Some(action) => action,
375 None => {
376 // We have no action and no UseItem was found in container so we find
377 // another item and we use it as anchor.
378 // If there are no items above, we choose the target path itself as anchor.
379 // todo: we should include even whitespace blocks as anchor candidates
380 let anchor = container.children().next().or_else(|| Some(anchor));
381
382 let add_after_anchor = anchor
383 .clone()
384 .and_then(ast::Attr::cast)
385 .map(|attr| attr.kind() == ast::AttrKind::Inner)
386 .unwrap_or(false);
387 ImportAction::add_new_use(anchor, add_after_anchor)
388 }
389 }
390}
391
392fn make_assist(action: &ImportAction, target: &[SmolStr], edit: &mut TextEditBuilder) {
393 match action {
394 ImportAction::AddNewUse { anchor, add_after_anchor } => {
395 make_assist_add_new_use(anchor, *add_after_anchor, target, edit)
396 }
397 ImportAction::AddInTreeList { common_segments, tree_list, add_self } => {
398 // We know that the fist n segments already exists in the use statement we want
399 // to modify, so we want to add only the last target.len() - n segments.
400 let segments_to_add = target.split_at(*common_segments).1;
401 make_assist_add_in_tree_list(tree_list, segments_to_add, *add_self, edit)
402 }
403 ImportAction::AddNestedImport {
404 common_segments,
405 path_to_split,
406 first_segment_to_split,
407 add_self,
408 } => {
409 let segments_to_add = target.split_at(*common_segments).1;
410 make_assist_add_nested_import(
411 path_to_split,
412 first_segment_to_split,
413 segments_to_add,
414 *add_self,
415 edit,
416 )
417 }
418 _ => {}
419 }
420}
421
422fn make_assist_add_new_use(
423 anchor: &Option<SyntaxNode>,
424 after: bool,
425 target: &[SmolStr],
426 edit: &mut TextEditBuilder,
427) {
428 if let Some(anchor) = anchor {
429 let indent = ra_fmt::leading_indent(anchor);
430 let mut buf = String::new();
431 if after {
432 buf.push_str("\n");
433 if let Some(spaces) = &indent {
434 buf.push_str(spaces);
435 }
436 }
437 buf.push_str("use ");
438 fmt_segments_raw(target, &mut buf);
439 buf.push_str(";");
440 if !after {
441 buf.push_str("\n\n");
442 if let Some(spaces) = &indent {
443 buf.push_str(&spaces);
444 }
445 }
446 let position = if after { anchor.text_range().end() } else { anchor.text_range().start() };
447 edit.insert(position, buf);
448 }
449}
450
451fn make_assist_add_in_tree_list(
452 tree_list: &ast::UseTreeList,
453 target: &[SmolStr],
454 add_self: bool,
455 edit: &mut TextEditBuilder,
456) {
457 let last = tree_list.use_trees().last();
458 if let Some(last) = last {
459 let mut buf = String::new();
460 let comma = last.syntax().siblings(Direction::Next).find(|n| n.kind() == T![,]);
461 let offset = if let Some(comma) = comma {
462 comma.text_range().end()
463 } else {
464 buf.push_str(",");
465 last.syntax().text_range().end()
466 };
467 if add_self {
468 buf.push_str(" self")
469 } else {
470 buf.push_str(" ");
471 }
472 fmt_segments_raw(target, &mut buf);
473 edit.insert(offset, buf);
474 } else {
475 }
476}
477
478fn make_assist_add_nested_import(
479 path: &ast::Path,
480 first_segment_to_split: &Option<ast::PathSegment>,
481 target: &[SmolStr],
482 add_self: bool,
483 edit: &mut TextEditBuilder,
484) {
485 let use_tree = path.syntax().ancestors().find_map(ast::UseTree::cast);
486 if let Some(use_tree) = use_tree {
487 let (start, add_colon_colon) = if let Some(first_segment_to_split) = first_segment_to_split
488 {
489 (first_segment_to_split.syntax().text_range().start(), false)
490 } else {
491 (use_tree.syntax().text_range().end(), true)
492 };
493 let end = use_tree.syntax().text_range().end();
494
495 let mut buf = String::new();
496 if add_colon_colon {
497 buf.push_str("::");
498 }
499 buf.push_str("{");
500 if add_self {
501 buf.push_str("self, ");
502 }
503 fmt_segments_raw(target, &mut buf);
504 if !target.is_empty() {
505 buf.push_str(", ");
506 }
507 edit.insert(start, buf);
508 edit.insert(end, "}".to_string());
509 }
510}
diff --git a/crates/ra_cargo_watch/Cargo.toml b/crates/ra_cargo_watch/Cargo.toml
index b09650d98..1fdbffea1 100644
--- a/crates/ra_cargo_watch/Cargo.toml
+++ b/crates/ra_cargo_watch/Cargo.toml
@@ -6,13 +6,10 @@ authors = ["rust-analyzer developers"]
6 6
7[dependencies] 7[dependencies]
8crossbeam-channel = "0.4.0" 8crossbeam-channel = "0.4.0"
9lsp-types = { version = "0.70.1", features = ["proposed"] } 9lsp-types = { version = "0.71.0", features = ["proposed"] }
10log = "0.4.8" 10log = "0.4.8"
11cargo_metadata = "0.9.1" 11cargo_metadata = "0.9.1"
12jod-thread = "0.1.0"
13parking_lot = "0.10.0"
14serde_json = "1.0.48" 12serde_json = "1.0.48"
15 13
16[dev-dependencies] 14[dev-dependencies]
17insta = "0.13.1" 15insta = "0.13.1"
18serde_json = "1.0.48"
diff --git a/crates/ra_hir/src/from_id.rs b/crates/ra_hir/src/from_id.rs
index c16c17072..3aa7c4870 100644
--- a/crates/ra_hir/src/from_id.rs
+++ b/crates/ra_hir/src/from_id.rs
@@ -40,6 +40,7 @@ from_id![
40 (hir_def::ConstId, crate::Const), 40 (hir_def::ConstId, crate::Const),
41 (hir_def::FunctionId, crate::Function), 41 (hir_def::FunctionId, crate::Function),
42 (hir_def::ImplId, crate::ImplBlock), 42 (hir_def::ImplId, crate::ImplBlock),
43 (hir_def::TypeParamId, crate::TypeParam),
43 (hir_expand::MacroDefId, crate::MacroDef) 44 (hir_expand::MacroDefId, crate::MacroDef)
44]; 45];
45 46
diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs
index 7a9745ebe..3aa964fb6 100644
--- a/crates/ra_hir/src/lib.rs
+++ b/crates/ra_hir/src/lib.rs
@@ -26,9 +26,10 @@ macro_rules! impl_froms {
26 } 26 }
27} 27}
28 28
29mod semantics;
29pub mod db; 30pub mod db;
30pub mod source_analyzer; 31mod source_analyzer;
31pub mod source_binder; 32mod source_binder;
32 33
33pub mod diagnostics; 34pub mod diagnostics;
34 35
@@ -45,8 +46,8 @@ pub use crate::{
45 StructField, Trait, Type, TypeAlias, TypeParam, Union, VariantDef, 46 StructField, Trait, Type, TypeAlias, TypeParam, Union, VariantDef,
46 }, 47 },
47 has_source::HasSource, 48 has_source::HasSource,
48 source_analyzer::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer}, 49 semantics::{original_range, Semantics, SemanticsScope},
49 source_binder::SourceBinder, 50 source_analyzer::PathResolution,
50}; 51};
51 52
52pub use hir_def::{ 53pub use hir_def::{
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs
new file mode 100644
index 000000000..4a9cb7b3e
--- /dev/null
+++ b/crates/ra_hir/src/semantics.rs
@@ -0,0 +1,405 @@
1//! See `Semantics`.
2
3use std::{cell::RefCell, fmt, iter::successors};
4
5use hir_def::{
6 resolver::{self, HasResolver, Resolver},
7 DefWithBodyId, TraitId,
8};
9use ra_db::{FileId, FileRange};
10use ra_syntax::{
11 algo::skip_trivia_token, ast, match_ast, AstNode, Direction, SyntaxNode, SyntaxToken,
12 TextRange, TextUnit,
13};
14use rustc_hash::{FxHashMap, FxHashSet};
15
16use crate::{
17 db::HirDatabase,
18 source_analyzer::{resolve_hir_path, ReferenceDescriptor, SourceAnalyzer},
19 source_binder::{ChildContainer, SourceBinder},
20 Function, HirFileId, InFile, Local, MacroDef, Module, ModuleDef, Name, Origin, Path,
21 PathResolution, ScopeDef, StructField, Trait, Type, TypeParam, VariantDef,
22};
23use hir_expand::ExpansionInfo;
24use ra_prof::profile;
25
26/// Primary API to get semantic information, like types, from syntax trees.
27pub struct Semantics<'db, DB> {
28 pub db: &'db DB,
29 sb: RefCell<SourceBinder>,
30 cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
31}
32
33impl<DB> fmt::Debug for Semantics<'_, DB> {
34 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
35 write!(f, "Semantics {{ ... }}")
36 }
37}
38
39impl<'db, DB: HirDatabase> Semantics<'db, DB> {
40 pub fn new(db: &DB) -> Semantics<DB> {
41 let sb = RefCell::new(SourceBinder::new());
42 Semantics { db, sb, cache: RefCell::default() }
43 }
44
45 pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
46 let tree = self.db.parse(file_id).tree();
47 self.cache(tree.syntax().clone(), file_id.into());
48 tree
49 }
50
51 pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
52 let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
53 let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
54 let file_id = sa.expand(self.db, macro_call)?;
55 let node = self.db.parse_or_expand(file_id)?;
56 self.cache(node.clone(), file_id);
57 Some(node)
58 }
59
60 pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
61 let parent = token.parent();
62 let parent = self.find_file(parent);
63 let sa = self.analyze2(parent.as_ref(), None);
64
65 let token = successors(Some(parent.with_value(token)), |token| {
66 let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
67 let tt = macro_call.token_tree()?;
68 if !token.value.text_range().is_subrange(&tt.syntax().text_range()) {
69 return None;
70 }
71 let file_id = sa.expand(self.db, token.with_value(&macro_call))?;
72 let token = file_id.expansion_info(self.db)?.map_token_down(token.as_ref())?;
73
74 self.cache(find_root(&token.value.parent()), token.file_id);
75
76 Some(token)
77 })
78 .last()
79 .unwrap();
80
81 token.value
82 }
83
84 pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
85 let node = self.find_file(node.clone());
86 original_range(self.db, node.as_ref())
87 }
88
89 pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
90 let node = self.find_file(node);
91 node.ancestors_with_macros(self.db).map(|it| it.value)
92 }
93
94 pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> {
95 self.analyze(expr.syntax()).type_of(self.db, &expr)
96 }
97
98 pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> {
99 self.analyze(pat.syntax()).type_of_pat(self.db, &pat)
100 }
101
102 pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
103 self.analyze(call.syntax()).resolve_method_call(call)
104 }
105
106 pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<StructField> {
107 self.analyze(field.syntax()).resolve_field(field)
108 }
109
110 pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<StructField> {
111 self.analyze(field.syntax()).resolve_record_field(field)
112 }
113
114 pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<VariantDef> {
115 self.analyze(record_lit.syntax()).resolve_record_literal(record_lit)
116 }
117
118 pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<VariantDef> {
119 self.analyze(record_pat.syntax()).resolve_record_pattern(record_pat)
120 }
121
122 pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
123 let sa = self.analyze(macro_call.syntax());
124 let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
125 sa.resolve_macro_call(self.db, macro_call)
126 }
127
128 pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
129 self.analyze(path.syntax()).resolve_path(self.db, path)
130 }
131
132 pub fn resolve_bind_pat_to_const(&self, pat: &ast::BindPat) -> Option<ModuleDef> {
133 self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat)
134 }
135
136 // FIXME: use this instead?
137 // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>;
138
139 pub fn to_def<T: ToDef + Clone>(&self, src: &T) -> Option<T::Def> {
140 T::to_def(self, src)
141 }
142
143 pub fn to_module_def(&self, file: FileId) -> Option<Module> {
144 let mut sb = self.sb.borrow_mut();
145 sb.to_module_def(self.db, file)
146 }
147
148 pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db, DB> {
149 let node = self.find_file(node.clone());
150 let resolver = self.analyze2(node.as_ref(), None).resolver;
151 SemanticsScope { db: self.db, resolver }
152 }
153
154 pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextUnit) -> SemanticsScope<'db, DB> {
155 let node = self.find_file(node.clone());
156 let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver;
157 SemanticsScope { db: self.db, resolver }
158 }
159
160 pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db, DB> {
161 let resolver = def.id.resolver(self.db);
162 SemanticsScope { db: self.db, resolver }
163 }
164
165 // FIXME: we only use this in `inline_local_variable` assist, ideally, we
166 // should switch to general reference search infra there.
167 pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
168 self.analyze(pat.syntax()).find_all_refs(pat)
169 }
170
171 fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
172 let src = self.find_file(node.clone());
173 self.analyze2(src.as_ref(), None)
174 }
175
176 fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextUnit>) -> SourceAnalyzer {
177 let _p = profile("Semantics::analyze2");
178
179 let container = match self.sb.borrow_mut().find_container(self.db, src) {
180 Some(it) => it,
181 None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src),
182 };
183
184 let resolver = match container {
185 ChildContainer::DefWithBodyId(def) => {
186 return SourceAnalyzer::new_for_body(self.db, def, src, offset)
187 }
188 ChildContainer::TraitId(it) => it.resolver(self.db),
189 ChildContainer::ImplId(it) => it.resolver(self.db),
190 ChildContainer::ModuleId(it) => it.resolver(self.db),
191 ChildContainer::EnumId(it) => it.resolver(self.db),
192 ChildContainer::VariantId(it) => it.resolver(self.db),
193 ChildContainer::GenericDefId(it) => it.resolver(self.db),
194 };
195 SourceAnalyzer::new_for_resolver(resolver, src)
196 }
197
198 fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
199 assert!(root_node.parent().is_none());
200 let mut cache = self.cache.borrow_mut();
201 let prev = cache.insert(root_node, file_id);
202 assert!(prev == None || prev == Some(file_id))
203 }
204
205 pub fn assert_contains_node(&self, node: &SyntaxNode) {
206 self.find_file(node.clone());
207 }
208
209 fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
210 let cache = self.cache.borrow();
211 cache.get(root_node).copied()
212 }
213
214 fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> {
215 let root_node = find_root(&node);
216 let file_id = self.lookup(&root_node).unwrap_or_else(|| {
217 panic!(
218 "\n\nFailed to lookup {:?} in this Semantics.\n\
219 Make sure to use only query nodes, derived from this instance of Semantics.\n\
220 root node: {:?}\n\
221 known nodes: {}\n\n",
222 node,
223 root_node,
224 self.cache
225 .borrow()
226 .keys()
227 .map(|it| format!("{:?}", it))
228 .collect::<Vec<_>>()
229 .join(", ")
230 )
231 });
232 InFile::new(file_id, node)
233 }
234}
235
236pub trait ToDef: Sized + AstNode + 'static {
237 type Def;
238 fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: &Self) -> Option<Self::Def>;
239}
240
241macro_rules! to_def_impls {
242 ($(($def:path, $ast:path)),* ,) => {$(
243 impl ToDef for $ast {
244 type Def = $def;
245 fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: &Self)
246 -> Option<Self::Def>
247 {
248 let src = sema.find_file(src.syntax().clone()).with_value(src);
249 sema.sb.borrow_mut().to_id(sema.db, src.cloned()).map(Into::into)
250 }
251 }
252 )*}
253}
254
255to_def_impls![
256 (crate::Module, ast::Module),
257 (crate::Struct, ast::StructDef),
258 (crate::Enum, ast::EnumDef),
259 (crate::Union, ast::UnionDef),
260 (crate::Trait, ast::TraitDef),
261 (crate::ImplBlock, ast::ImplBlock),
262 (crate::TypeAlias, ast::TypeAliasDef),
263 (crate::Const, ast::ConstDef),
264 (crate::Static, ast::StaticDef),
265 (crate::Function, ast::FnDef),
266 (crate::StructField, ast::RecordFieldDef),
267 (crate::EnumVariant, ast::EnumVariant),
268 (crate::TypeParam, ast::TypeParam),
269 (crate::MacroDef, ast::MacroCall), // this one is dubious, not all calls are macros
270];
271
272impl ToDef for ast::BindPat {
273 type Def = Local;
274
275 fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: &Self) -> Option<Local> {
276 let src = sema.find_file(src.syntax().clone()).with_value(src);
277 let file_id = src.file_id;
278 let mut sb = sema.sb.borrow_mut();
279 let db = sema.db;
280 let parent: DefWithBodyId = src.value.syntax().ancestors().find_map(|it| {
281 let res = match_ast! {
282 match it {
283 ast::ConstDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
284 ast::StaticDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
285 ast::FnDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
286 _ => return None,
287 }
288 };
289 Some(res)
290 })?;
291 let (_body, source_map) = db.body_with_source_map(parent);
292 let src = src.cloned().map(ast::Pat::from);
293 let pat_id = source_map.node_pat(src.as_ref())?;
294 Some(Local { parent: parent.into(), pat_id })
295 }
296}
297
298fn find_root(node: &SyntaxNode) -> SyntaxNode {
299 node.ancestors().last().unwrap()
300}
301
302pub struct SemanticsScope<'a, DB> {
303 pub db: &'a DB,
304 resolver: Resolver,
305}
306
307impl<'a, DB: HirDatabase> SemanticsScope<'a, DB> {
308 pub fn module(&self) -> Option<Module> {
309 Some(Module { id: self.resolver.module()? })
310 }
311
312 /// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type
313 // FIXME: rename to visible_traits to not repeat scope?
314 pub fn traits_in_scope(&self) -> FxHashSet<TraitId> {
315 let resolver = &self.resolver;
316 resolver.traits_in_scope(self.db)
317 }
318
319 pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
320 let resolver = &self.resolver;
321
322 resolver.process_all_names(self.db, &mut |name, def| {
323 let def = match def {
324 resolver::ScopeDef::PerNs(it) => it.into(),
325 resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
326 resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
327 resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }),
328 resolver::ScopeDef::Local(pat_id) => {
329 let parent = resolver.body_owner().unwrap().into();
330 ScopeDef::Local(Local { parent, pat_id })
331 }
332 };
333 f(name, def)
334 })
335 }
336
337 pub fn resolve_hir_path(&self, path: &Path) -> Option<PathResolution> {
338 resolve_hir_path(self.db, &self.resolver, path)
339 }
340}
341
342// FIXME: Change `HasSource` trait to work with `Semantics` and remove this?
343pub fn original_range(db: &impl HirDatabase, node: InFile<&SyntaxNode>) -> FileRange {
344 if let Some(range) = original_range_opt(db, node) {
345 let original_file = range.file_id.original_file(db);
346 if range.file_id == original_file.into() {
347 return FileRange { file_id: original_file, range: range.value };
348 }
349
350 log::error!("Fail to mapping up more for {:?}", range);
351 return FileRange { file_id: range.file_id.original_file(db), range: range.value };
352 }
353
354 // Fall back to whole macro call
355 if let Some(expansion) = node.file_id.expansion_info(db) {
356 if let Some(call_node) = expansion.call_node() {
357 return FileRange {
358 file_id: call_node.file_id.original_file(db),
359 range: call_node.value.text_range(),
360 };
361 }
362 }
363
364 FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() }
365}
366
367fn original_range_opt(
368 db: &impl HirDatabase,
369 node: InFile<&SyntaxNode>,
370) -> Option<InFile<TextRange>> {
371 let expansion = node.file_id.expansion_info(db)?;
372
373 // the input node has only one token ?
374 let single = skip_trivia_token(node.value.first_token()?, Direction::Next)?
375 == skip_trivia_token(node.value.last_token()?, Direction::Prev)?;
376
377 Some(node.value.descendants().find_map(|it| {
378 let first = skip_trivia_token(it.first_token()?, Direction::Next)?;
379 let first = ascend_call_token(db, &expansion, node.with_value(first))?;
380
381 let last = skip_trivia_token(it.last_token()?, Direction::Prev)?;
382 let last = ascend_call_token(db, &expansion, node.with_value(last))?;
383
384 if (!single && first == last) || (first.file_id != last.file_id) {
385 return None;
386 }
387
388 Some(first.with_value(first.value.text_range().extend_to(&last.value.text_range())))
389 })?)
390}
391
392fn ascend_call_token(
393 db: &impl HirDatabase,
394 expansion: &ExpansionInfo,
395 token: InFile<SyntaxToken>,
396) -> Option<InFile<SyntaxToken>> {
397 let (mapped, origin) = expansion.map_token_up(token.as_ref())?;
398 if origin != Origin::Call {
399 return None;
400 }
401 if let Some(info) = mapped.file_id.expansion_info(db) {
402 return ascend_call_token(db, &info, mapped);
403 }
404 Some(mapped)
405}
diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs
index efa3f8a79..c650a9e08 100644
--- a/crates/ra_hir/src/source_analyzer.rs
+++ b/crates/ra_hir/src/source_analyzer.rs
@@ -11,32 +11,31 @@ use either::Either;
11use hir_def::{ 11use hir_def::{
12 body::{ 12 body::{
13 scope::{ExprScopes, ScopeId}, 13 scope::{ExprScopes, ScopeId},
14 BodySourceMap, 14 Body, BodySourceMap,
15 }, 15 },
16 expr::{ExprId, PatId}, 16 expr::{ExprId, Pat, PatId},
17 resolver::{self, resolver_for_scope, Resolver, TypeNs, ValueNs}, 17 resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
18 AsMacroCall, DefWithBodyId, TraitId, 18 AsMacroCall, DefWithBodyId,
19}; 19};
20use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile, MacroCallId}; 20use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile};
21use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment}; 21use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment};
22use ra_syntax::{ 22use ra_syntax::{
23 ast::{self, AstNode}, 23 ast::{self, AstNode},
24 AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextUnit, 24 AstPtr, SyntaxNode, SyntaxNodePtr, TextRange, TextUnit,
25}; 25};
26use rustc_hash::FxHashSet;
27 26
28use crate::{ 27use crate::{
29 db::HirDatabase, Adt, Const, DefWithBody, EnumVariant, Function, Local, MacroDef, Name, Path, 28 db::HirDatabase, Adt, Const, EnumVariant, Function, Local, MacroDef, ModuleDef, Path, Static,
30 ScopeDef, Static, Struct, Trait, Type, TypeAlias, TypeParam, 29 Struct, Trait, Type, TypeAlias, TypeParam,
31}; 30};
32 31
33/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of 32/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
34/// original source files. It should not be used inside the HIR itself. 33/// original source files. It should not be used inside the HIR itself.
35#[derive(Debug)] 34#[derive(Debug)]
36pub struct SourceAnalyzer { 35pub(crate) struct SourceAnalyzer {
37 file_id: HirFileId, 36 file_id: HirFileId,
38 resolver: Resolver, 37 pub(crate) resolver: Resolver,
39 body_owner: Option<DefWithBody>, 38 body: Option<Arc<Body>>,
40 body_source_map: Option<Arc<BodySourceMap>>, 39 body_source_map: Option<Arc<BodySourceMap>>,
41 infer: Option<Arc<InferenceResult>>, 40 infer: Option<Arc<InferenceResult>>,
42 scopes: Option<Arc<ExprScopes>>, 41 scopes: Option<Arc<ExprScopes>>,
@@ -55,64 +54,20 @@ pub enum PathResolution {
55 AssocItem(crate::AssocItem), 54 AssocItem(crate::AssocItem),
56} 55}
57 56
58#[derive(Debug, Clone, PartialEq, Eq)]
59pub struct ScopeEntryWithSyntax {
60 pub(crate) name: Name,