aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/ci.yaml35
-rw-r--r--Cargo.lock89
-rw-r--r--crates/hir/src/display.rs2
-rw-r--r--crates/hir/src/lib.rs8
-rw-r--r--crates/hir_def/src/attr.rs12
-rw-r--r--crates/hir_def/src/find_path.rs30
-rw-r--r--crates/hir_expand/src/db.rs397
-rw-r--r--crates/hir_expand/src/hygiene.rs16
-rw-r--r--crates/hir_expand/src/lib.rs21
-rw-r--r--crates/hir_ty/src/db.rs1
-rw-r--r--crates/hir_ty/src/lower.rs34
-rw-r--r--crates/hir_ty/src/tests/regression.rs38
-rw-r--r--crates/hir_ty/src/utils.rs52
-rw-r--r--crates/ide/src/diagnostics.rs43
-rw-r--r--crates/ide/src/diagnostics/fixes.rs51
-rw-r--r--crates/ide/src/diagnostics/unlinked_file.rs7
-rw-r--r--crates/ide/src/inlay_hints.rs4
-rw-r--r--crates/ide/src/lib.rs39
-rw-r--r--crates/ide/src/prime_caches.rs1
-rw-r--r--crates/ide/src/runnables.rs6
-rw-r--r--crates/ide/src/ssr.rs54
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html2
-rw-r--r--crates/ide/src/syntax_highlighting/tests.rs2
-rw-r--r--crates/ide_assists/src/assist_context.rs49
-rw-r--r--crates/ide_assists/src/handlers/extract_function.rs35
-rw-r--r--crates/ide_assists/src/handlers/generate_from_impl_for_enum.rs2
-rw-r--r--crates/ide_assists/src/handlers/reorder_impl.rs40
-rw-r--r--crates/ide_assists/src/lib.rs68
-rw-r--r--crates/ide_assists/src/tests.rs262
-rw-r--r--crates/ide_assists/src/utils/suggest_name.rs2
-rw-r--r--crates/ide_completion/src/completions.rs56
-rw-r--r--crates/ide_completion/src/completions/pattern.rs6
-rw-r--r--crates/ide_completion/src/completions/postfix.rs5
-rw-r--r--crates/ide_completion/src/completions/postfix/format_like.rs2
-rw-r--r--crates/ide_completion/src/completions/qualified_path.rs73
-rw-r--r--crates/ide_completion/src/completions/unqualified_path.rs6
-rw-r--r--crates/ide_completion/src/context.rs208
-rw-r--r--crates/ide_completion/src/render/enum_variant.rs2
-rw-r--r--crates/proc_macro_api/Cargo.toml2
-rw-r--r--crates/proc_macro_api/src/version.rs2
-rw-r--r--crates/rust-analyzer/src/benchmarks.rs74
-rw-r--r--crates/rust-analyzer/src/cli/diagnostics.rs7
-rw-r--r--crates/rust-analyzer/src/global_state.rs2
-rw-r--r--crates/rust-analyzer/src/handlers.rs72
-rw-r--r--crates/rust-analyzer/src/integrated_benchmarks.rs184
-rw-r--r--crates/rust-analyzer/src/lib.rs2
-rw-r--r--crates/rust-analyzer/src/main_loop.rs28
-rw-r--r--crates/rust-analyzer/src/to_proto.rs2
-rw-r--r--docs/dev/style.md33
-rw-r--r--docs/user/manual.adoc35
-rw-r--r--xtask/src/dist.rs1
-rw-r--r--xtask/src/flags.rs5
-rw-r--r--xtask/src/main.rs2
-rw-r--r--xtask/src/pre_cache.rs79
54 files changed, 1489 insertions, 801 deletions
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 0f68b234c..63518e67f 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -42,14 +42,6 @@ jobs:
42 if: matrix.os == 'windows-latest' 42 if: matrix.os == 'windows-latest'
43 run: Rename-Item C:\Users\runneradmin\.rustup\toolchains\stable-x86_64-pc-windows-msvc C:\Users\runneradmin\.rustup\toolchains\stable-x86_64-pc-windows-msvc.old 43 run: Rename-Item C:\Users\runneradmin\.rustup\toolchains\stable-x86_64-pc-windows-msvc C:\Users\runneradmin\.rustup\toolchains\stable-x86_64-pc-windows-msvc.old
44 44
45 # Work around https://github.com/actions/cache/issues/403 by using GNU tar
46 # instead of BSD tar.
47 - name: Install GNU tar
48 if: matrix.os == 'macos-latest'
49 run: |
50 brew install gnu-tar
51 echo PATH="/usr/local/opt/gnu-tar/libexec/gnubin:$PATH" >> $GITHUB_ENV
52
53 - name: Install Rust toolchain 45 - name: Install Rust toolchain
54 uses: actions-rs/toolchain@v1 46 uses: actions-rs/toolchain@v1
55 with: 47 with:
@@ -58,19 +50,8 @@ jobs:
58 override: true 50 override: true
59 components: rustfmt, rust-src 51 components: rustfmt, rust-src
60 52
61 - name: Cache cargo directories 53 - name: Cache Dependencies
62 uses: actions/cache@v2 54 uses: Swatinem/rust-cache@ce325b60658c1b38465c06cc965b79baf32c1e72
63 with:
64 path: |
65 ~/.cargo/registry
66 ~/.cargo/git
67 key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
68
69 - name: Cache cargo target dir
70 uses: actions/cache@v2
71 with:
72 path: target
73 key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
74 55
75 - name: Compile 56 - name: Compile
76 run: cargo test --no-run --locked 57 run: cargo test --no-run --locked
@@ -78,9 +59,6 @@ jobs:
78 - name: Test 59 - name: Test
79 run: cargo test -- --nocapture 60 run: cargo test -- --nocapture
80 61
81 - name: Prepare cache
82 run: cargo xtask pre-cache
83
84 # Weird targets to catch non-portable code 62 # Weird targets to catch non-portable code
85 rust-cross: 63 rust-cross:
86 name: Rust Cross 64 name: Rust Cross
@@ -103,13 +81,8 @@ jobs:
103 - name: Install Rust targets 81 - name: Install Rust targets
104 run: rustup target add ${{ env.targets }} 82 run: rustup target add ${{ env.targets }}
105 83
106 - name: Cache cargo directories 84 - name: Cache Dependencies
107 uses: actions/cache@v2 85 uses: Swatinem/rust-cache@ce325b60658c1b38465c06cc965b79baf32c1e72
108 with:
109 path: |
110 ~/.cargo/registry
111 ~/.cargo/git
112 key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
113 86
114 - name: Check 87 - name: Check
115 run: | 88 run: |
diff --git a/Cargo.lock b/Cargo.lock
index ba6862e98..c411ce8e6 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -72,11 +72,12 @@ checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
72 72
73[[package]] 73[[package]]
74name = "backtrace" 74name = "backtrace"
75version = "0.3.57" 75version = "0.3.58"
76source = "registry+https://github.com/rust-lang/crates.io-index" 76source = "registry+https://github.com/rust-lang/crates.io-index"
77checksum = "78ed203b9ba68b242c62b3fb7480f589dd49829be1edb3fe8fc8b4ffda2dcb8d" 77checksum = "88fb5a785d6b44fd9d6700935608639af1b8356de1e55d5f7c2740f4faa15d82"
78dependencies = [ 78dependencies = [
79 "addr2line", 79 "addr2line",
80 "cc",
80 "cfg-if", 81 "cfg-if",
81 "libc", 82 "libc",
82 "miniz_oxide", 83 "miniz_oxide",
@@ -280,9 +281,9 @@ dependencies = [
280 281
281[[package]] 282[[package]]
282name = "crossbeam-epoch" 283name = "crossbeam-epoch"
283version = "0.9.3" 284version = "0.9.4"
284source = "registry+https://github.com/rust-lang/crates.io-index" 285source = "registry+https://github.com/rust-lang/crates.io-index"
285checksum = "2584f639eb95fea8c798496315b297cf81b9b58b6d30ab066a75455333cf4b12" 286checksum = "52fb27eab85b17fbb9f6fd667089e07d6a2eb8743d02639ee7f6a7a7729c9c94"
286dependencies = [ 287dependencies = [
287 "cfg-if", 288 "cfg-if",
288 "crossbeam-utils", 289 "crossbeam-utils",
@@ -293,9 +294,9 @@ dependencies = [
293 294
294[[package]] 295[[package]]
295name = "crossbeam-utils" 296name = "crossbeam-utils"
296version = "0.8.3" 297version = "0.8.4"
297source = "registry+https://github.com/rust-lang/crates.io-index" 298source = "registry+https://github.com/rust-lang/crates.io-index"
298checksum = "e7e9d99fa91428effe99c5c6d4634cdeba32b8cf784fc428a2a687f61a952c49" 299checksum = "4feb231f0d4d6af81aed15928e58ecf5816aa62a2393e2c82f46973e92a9a278"
299dependencies = [ 300dependencies = [
300 "autocfg", 301 "autocfg",
301 "cfg-if", 302 "cfg-if",
@@ -798,9 +799,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
798 799
799[[package]] 800[[package]]
800name = "libc" 801name = "libc"
801version = "0.2.93" 802version = "0.2.94"
802source = "registry+https://github.com/rust-lang/crates.io-index" 803source = "registry+https://github.com/rust-lang/crates.io-index"
803checksum = "9385f66bf6105b241aa65a61cb923ef20efc665cb9f9bb50ac2f0c4b7f378d41" 804checksum = "18794a8ad5b29321f790b55d93dfba91e125cb1a9edbd4f8e3150acc771c1a5e"
804 805
805[[package]] 806[[package]]
806name = "libloading" 807name = "libloading"
@@ -823,9 +824,9 @@ dependencies = [
823 824
824[[package]] 825[[package]]
825name = "lock_api" 826name = "lock_api"
826version = "0.4.3" 827version = "0.4.4"
827source = "registry+https://github.com/rust-lang/crates.io-index" 828source = "registry+https://github.com/rust-lang/crates.io-index"
828checksum = "5a3c91c24eae6777794bb1997ad98bbb87daf92890acab859f7eaa4320333176" 829checksum = "0382880606dff6d15c9476c416d18690b72742aa7b605bb6dd6ec9030fbf07eb"
829dependencies = [ 830dependencies = [
830 "scopeguard", 831 "scopeguard",
831] 832]
@@ -897,19 +898,9 @@ dependencies = [
897 898
898[[package]] 899[[package]]
899name = "memchr" 900name = "memchr"
900version = "2.3.4" 901version = "2.4.0"
901source = "registry+https://github.com/rust-lang/crates.io-index" 902source = "registry+https://github.com/rust-lang/crates.io-index"
902checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525" 903checksum = "b16bd47d9e329435e309c58469fe0791c2d0d1ba96ec0954152a5ae2b04387dc"
903
904[[package]]
905name = "memmap"
906version = "0.7.0"
907source = "registry+https://github.com/rust-lang/crates.io-index"
908checksum = "6585fd95e7bb50d6cc31e20d4cf9afb4e2ba16c5846fc76793f11218da9c475b"
909dependencies = [
910 "libc",
911 "winapi",
912]
913 904
914[[package]] 905[[package]]
915name = "memmap2" 906name = "memmap2"
@@ -1108,9 +1099,9 @@ checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
1108 1099
1109[[package]] 1100[[package]]
1110name = "perf-event" 1101name = "perf-event"
1111version = "0.4.6" 1102version = "0.4.7"
1112source = "registry+https://github.com/rust-lang/crates.io-index" 1103source = "registry+https://github.com/rust-lang/crates.io-index"
1113checksum = "b7a1c2678a77d65edf773bd900f5b87f0944ac3421949842a2c6a4efe42d6c66" 1104checksum = "5396562cd2eaa828445d6d34258ae21ee1eb9d40fe626ca7f51c8dccb4af9d66"
1114dependencies = [ 1105dependencies = [
1115 "libc", 1106 "libc",
1116 "perf-event-open-sys", 1107 "perf-event-open-sys",
@@ -1173,7 +1164,7 @@ dependencies = [
1173 "crossbeam-channel", 1164 "crossbeam-channel",
1174 "jod-thread", 1165 "jod-thread",
1175 "log", 1166 "log",
1176 "memmap", 1167 "memmap2",
1177 "object", 1168 "object",
1178 "profile", 1169 "profile",
1179 "serde", 1170 "serde",
@@ -1294,18 +1285,18 @@ dependencies = [
1294 1285
1295[[package]] 1286[[package]]
1296name = "redox_syscall" 1287name = "redox_syscall"
1297version = "0.2.6" 1288version = "0.2.7"
1298source = "registry+https://github.com/rust-lang/crates.io-index" 1289source = "registry+https://github.com/rust-lang/crates.io-index"
1299checksum = "8270314b5ccceb518e7e578952f0b72b88222d02e8f77f5ecf7abbb673539041" 1290checksum = "85dd92e586f7355c633911e11f77f3d12f04b1b1bd76a198bd34ae3af8341ef2"
1300dependencies = [ 1291dependencies = [
1301 "bitflags", 1292 "bitflags",
1302] 1293]
1303 1294
1304[[package]] 1295[[package]]
1305name = "regex" 1296name = "regex"
1306version = "1.4.6" 1297version = "1.5.3"
1307source = "registry+https://github.com/rust-lang/crates.io-index" 1298source = "registry+https://github.com/rust-lang/crates.io-index"
1308checksum = "2a26af418b574bd56588335b3a3659a65725d4e636eb1016c2f9e3b38c7cc759" 1299checksum = "ce5f1ceb7f74abbce32601642fcf8e8508a8a8991e0621c7d750295b9095702b"
1309dependencies = [ 1300dependencies = [
1310 "regex-syntax", 1301 "regex-syntax",
1311] 1302]
@@ -1322,9 +1313,9 @@ dependencies = [
1322 1313
1323[[package]] 1314[[package]]
1324name = "regex-syntax" 1315name = "regex-syntax"
1325version = "0.6.23" 1316version = "0.6.25"
1326source = "registry+https://github.com/rust-lang/crates.io-index" 1317source = "registry+https://github.com/rust-lang/crates.io-index"
1327checksum = "24d5f089152e60f62d28b835fbff2cd2e8dc0baf1ac13343bef92ab7eed84548" 1318checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
1328 1319
1329[[package]] 1320[[package]]
1330name = "rowan" 1321name = "rowan"
@@ -1420,9 +1411,9 @@ checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
1420 1411
1421[[package]] 1412[[package]]
1422name = "salsa" 1413name = "salsa"
1423version = "0.16.0" 1414version = "0.16.1"
1424source = "registry+https://github.com/rust-lang/crates.io-index" 1415source = "registry+https://github.com/rust-lang/crates.io-index"
1425checksum = "d8fadca2ab5de17acf66d744f4888049ca8f1bb9b8a1ab8afd9d032cc959c5dc" 1416checksum = "4b84d9f96071f3f3be0dc818eae3327625d8ebc95b58da37d6850724f31d3403"
1426dependencies = [ 1417dependencies = [
1427 "crossbeam-utils", 1418 "crossbeam-utils",
1428 "indexmap", 1419 "indexmap",
@@ -1565,9 +1556,9 @@ dependencies = [
1565 1556
1566[[package]] 1557[[package]]
1567name = "snap" 1558name = "snap"
1568version = "1.0.4" 1559version = "1.0.5"
1569source = "registry+https://github.com/rust-lang/crates.io-index" 1560source = "registry+https://github.com/rust-lang/crates.io-index"
1570checksum = "dc725476a1398f0480d56cd0ad381f6f32acf2642704456f8f59a35df464b59a" 1561checksum = "45456094d1983e2ee2a18fdfebce3189fa451699d0502cb8e3b49dba5ba41451"
1571 1562
1572[[package]] 1563[[package]]
1573name = "stdx" 1564name = "stdx"
@@ -1582,9 +1573,9 @@ dependencies = [
1582 1573
1583[[package]] 1574[[package]]
1584name = "syn" 1575name = "syn"
1585version = "1.0.70" 1576version = "1.0.71"
1586source = "registry+https://github.com/rust-lang/crates.io-index" 1577source = "registry+https://github.com/rust-lang/crates.io-index"
1587checksum = "b9505f307c872bab8eb46f77ae357c8eba1fdacead58ee5a850116b1d7f82883" 1578checksum = "ad184cc9470f9117b2ac6817bfe297307418819ba40552f9b3846f05c33d5373"
1588dependencies = [ 1579dependencies = [
1589 "proc-macro2", 1580 "proc-macro2",
1590 "quote", 1581 "quote",
@@ -1702,9 +1693,9 @@ dependencies = [
1702 1693
1703[[package]] 1694[[package]]
1704name = "tracing" 1695name = "tracing"
1705version = "0.1.25" 1696version = "0.1.26"
1706source = "registry+https://github.com/rust-lang/crates.io-index" 1697source = "registry+https://github.com/rust-lang/crates.io-index"
1707checksum = "01ebdc2bb4498ab1ab5f5b73c5803825e60199229ccba0698170e3be0e7f959f" 1698checksum = "09adeb8c97449311ccd28a427f96fb563e7fd31aabf994189879d9da2394b89d"
1708dependencies = [ 1699dependencies = [
1709 "cfg-if", 1700 "cfg-if",
1710 "pin-project-lite", 1701 "pin-project-lite",
@@ -1725,9 +1716,9 @@ dependencies = [
1725 1716
1726[[package]] 1717[[package]]
1727name = "tracing-core" 1718name = "tracing-core"
1728version = "0.1.17" 1719version = "0.1.18"
1729source = "registry+https://github.com/rust-lang/crates.io-index" 1720source = "registry+https://github.com/rust-lang/crates.io-index"
1730checksum = "f50de3927f93d202783f4513cda820ab47ef17f624b03c096e86ef00c67e6b5f" 1721checksum = "a9ff14f98b1a4b289c6248a023c1c2fa1491062964e9fed67ab29c4e4da4a052"
1731dependencies = [ 1722dependencies = [
1732 "lazy_static", 1723 "lazy_static",
1733] 1724]
@@ -1755,9 +1746,9 @@ dependencies = [
1755 1746
1756[[package]] 1747[[package]]
1757name = "tracing-subscriber" 1748name = "tracing-subscriber"
1758version = "0.2.17" 1749version = "0.2.18"
1759source = "registry+https://github.com/rust-lang/crates.io-index" 1750source = "registry+https://github.com/rust-lang/crates.io-index"
1760checksum = "705096c6f83bf68ea5d357a6aa01829ddbdac531b357b45abeca842938085baa" 1751checksum = "aa5553bf0883ba7c9cbe493b085c29926bd41b66afc31ff72cf17ff4fb60dcd5"
1761dependencies = [ 1752dependencies = [
1762 "ansi_term", 1753 "ansi_term",
1763 "chrono", 1754 "chrono",
@@ -1844,9 +1835,9 @@ checksum = "bb0d2e7be6ae3a5fa87eed5fb451aff96f2573d2694942e40543ae0bbe19c796"
1844 1835
1845[[package]] 1836[[package]]
1846name = "unicode-xid" 1837name = "unicode-xid"
1847version = "0.2.1" 1838version = "0.2.2"
1848source = "registry+https://github.com/rust-lang/crates.io-index" 1839source = "registry+https://github.com/rust-lang/crates.io-index"
1849checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" 1840checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
1850 1841
1851[[package]] 1842[[package]]
1852name = "url" 1843name = "url"
@@ -1959,18 +1950,18 @@ dependencies = [
1959 1950
1960[[package]] 1951[[package]]
1961name = "xshell" 1952name = "xshell"
1962version = "0.1.9" 1953version = "0.1.10"
1963source = "registry+https://github.com/rust-lang/crates.io-index" 1954source = "registry+https://github.com/rust-lang/crates.io-index"
1964checksum = "6f18102278453c8f70ea5c514ac78cb4c73a0ef72a8273d17094b52f9584c0c1" 1955checksum = "aa25217c682f9f991d7889238a99e65eb8431c266d36e0f4e850a73773415473"
1965dependencies = [ 1956dependencies = [
1966 "xshell-macros", 1957 "xshell-macros",
1967] 1958]
1968 1959
1969[[package]] 1960[[package]]
1970name = "xshell-macros" 1961name = "xshell-macros"
1971version = "0.1.9" 1962version = "0.1.10"
1972source = "registry+https://github.com/rust-lang/crates.io-index" 1963source = "registry+https://github.com/rust-lang/crates.io-index"
1973checksum = "6093c460064572007f885facc70bb0ca5e40a83ea7ff8b16c1abbee56fd2e767" 1964checksum = "4404d53d2113af4fa31c58326eb7b37d6d7bf11ba87520787cddeaff45385c72"
1974 1965
1975[[package]] 1966[[package]]
1976name = "xtask" 1967name = "xtask"
diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs
index 01a4d205f..508ac37c2 100644
--- a/crates/hir/src/display.rs
+++ b/crates/hir/src/display.rs
@@ -170,7 +170,7 @@ impl HirDisplay for Field {
170 fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { 170 fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
171 write_visibility(self.parent.module(f.db).id, self.visibility(f.db), f)?; 171 write_visibility(self.parent.module(f.db).id, self.visibility(f.db), f)?;
172 write!(f, "{}: ", self.name(f.db))?; 172 write!(f, "{}: ", self.name(f.db))?;
173 self.signature_ty(f.db).hir_fmt(f) 173 self.ty(f.db).hir_fmt(f)
174 } 174 }
175} 175}
176 176
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 0acfa582a..6fcc58f61 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -509,7 +509,7 @@ impl Field {
509 /// placeholder types for type parameters). This is good for showing 509 /// placeholder types for type parameters). This is good for showing
510 /// signature help, but not so good to actually get the type of the field 510 /// signature help, but not so good to actually get the type of the field
511 /// when you actually have a variable of the struct. 511 /// when you actually have a variable of the struct.
512 pub fn signature_ty(&self, db: &dyn HirDatabase) -> Type { 512 pub fn ty(&self, db: &dyn HirDatabase) -> Type {
513 let var_id = self.parent.into(); 513 let var_id = self.parent.into();
514 let generic_def_id: GenericDefId = match self.parent { 514 let generic_def_id: GenericDefId = match self.parent {
515 VariantDef::Struct(it) => it.id.into(), 515 VariantDef::Struct(it) => it.id.into(),
@@ -1744,6 +1744,10 @@ impl Type {
1744 } 1744 }
1745 } 1745 }
1746 1746
1747 pub fn strip_references(&self) -> Type {
1748 self.derived(self.ty.strip_references().clone())
1749 }
1750
1747 pub fn is_unknown(&self) -> bool { 1751 pub fn is_unknown(&self) -> bool {
1748 self.ty.is_unknown() 1752 self.ty.is_unknown()
1749 } 1753 }
@@ -1984,7 +1988,7 @@ impl Type {
1984 None 1988 None
1985 } 1989 }
1986 1990
1987 pub fn type_parameters(&self) -> impl Iterator<Item = Type> + '_ { 1991 pub fn type_arguments(&self) -> impl Iterator<Item = Type> + '_ {
1988 self.ty 1992 self.ty
1989 .strip_references() 1993 .strip_references()
1990 .as_adt() 1994 .as_adt()
diff --git a/crates/hir_def/src/attr.rs b/crates/hir_def/src/attr.rs
index d9294d93a..0171d8a92 100644
--- a/crates/hir_def/src/attr.rs
+++ b/crates/hir_def/src/attr.rs
@@ -484,10 +484,10 @@ impl AttrsWithOwner {
484 let mut buf = String::new(); 484 let mut buf = String::new();
485 let mut mapping = Vec::new(); 485 let mut mapping = Vec::new();
486 for (doc, idx) in docs { 486 for (doc, idx) in docs {
487 // str::lines doesn't yield anything for the empty string
488 if !doc.is_empty() { 487 if !doc.is_empty() {
489 for line in doc.split('\n') { 488 let mut base_offset = 0;
490 let line = line.trim_end(); 489 for raw_line in doc.split('\n') {
490 let line = raw_line.trim_end();
491 let line_len = line.len(); 491 let line_len = line.len();
492 let (offset, line) = match line.char_indices().nth(indent) { 492 let (offset, line) = match line.char_indices().nth(indent) {
493 Some((offset, _)) => (offset, &line[offset..]), 493 Some((offset, _)) => (offset, &line[offset..]),
@@ -498,9 +498,13 @@ impl AttrsWithOwner {
498 mapping.push(( 498 mapping.push((
499 TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?), 499 TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?),
500 idx, 500 idx,
501 TextRange::new(offset.try_into().ok()?, line_len.try_into().ok()?), 501 TextRange::at(
502 (base_offset + offset).try_into().ok()?,
503 line_len.try_into().ok()?,
504 ),
502 )); 505 ));
503 buf.push('\n'); 506 buf.push('\n');
507 base_offset += raw_line.len() + 1;
504 } 508 }
505 } else { 509 } else {
506 buf.push('\n'); 510 buf.push('\n');
diff --git a/crates/hir_def/src/find_path.rs b/crates/hir_def/src/find_path.rs
index dc3f2908f..c06a37294 100644
--- a/crates/hir_def/src/find_path.rs
+++ b/crates/hir_def/src/find_path.rs
@@ -130,7 +130,8 @@ fn find_path_inner(
130 } 130 }
131 131
132 // - if the item is the crate root of a dependency crate, return the name from the extern prelude 132 // - if the item is the crate root of a dependency crate, return the name from the extern prelude
133 for (name, def_id) in root.def_map(db).extern_prelude() { 133 let root_def_map = root.def_map(db);
134 for (name, def_id) in root_def_map.extern_prelude() {
134 if item == ItemInNs::Types(*def_id) { 135 if item == ItemInNs::Types(*def_id) {
135 let name = scope_name.unwrap_or_else(|| name.clone()); 136 let name = scope_name.unwrap_or_else(|| name.clone());
136 return Some(ModPath::from_segments(PathKind::Plain, vec![name])); 137 return Some(ModPath::from_segments(PathKind::Plain, vec![name]));
@@ -138,7 +139,8 @@ fn find_path_inner(
138 } 139 }
139 140
140 // - if the item is in the prelude, return the name from there 141 // - if the item is in the prelude, return the name from there
141 if let Some(prelude_module) = def_map.prelude() { 142 if let Some(prelude_module) = root_def_map.prelude() {
143 // Preludes in block DefMaps are ignored, only the crate DefMap is searched
142 let prelude_def_map = prelude_module.def_map(db); 144 let prelude_def_map = prelude_module.def_map(db);
143 let prelude_scope: &crate::item_scope::ItemScope = 145 let prelude_scope: &crate::item_scope::ItemScope =
144 &prelude_def_map[prelude_module.local_id].scope; 146 &prelude_def_map[prelude_module.local_id].scope;
@@ -1057,4 +1059,28 @@ fn f() {
1057 "dep", 1059 "dep",
1058 ); 1060 );
1059 } 1061 }
1062
1063 #[test]
1064 fn prelude_with_inner_items() {
1065 check_found_path(
1066 r#"
1067//- /main.rs crate:main deps:std
1068fn f() {
1069 fn inner() {}
1070 $0
1071}
1072//- /std.rs crate:std
1073pub mod prelude {
1074 pub enum Option { None }
1075 pub use Option::*;
1076}
1077#[prelude_import]
1078pub use prelude::*;
1079 "#,
1080 "None",
1081 "None",
1082 "None",
1083 "None",
1084 );
1085 }
1060} 1086}
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs
index 1e4b0cc19..3e9abd8a1 100644
--- a/crates/hir_expand/src/db.rs
+++ b/crates/hir_expand/src/db.rs
@@ -3,14 +3,14 @@
3use std::sync::Arc; 3use std::sync::Arc;
4 4
5use base_db::{salsa, SourceDatabase}; 5use base_db::{salsa, SourceDatabase};
6use mbe::{ExpandError, ExpandResult, MacroDef, MacroRules}; 6use mbe::{ExpandError, ExpandResult};
7use parser::FragmentKind; 7use parser::FragmentKind;
8use syntax::{ 8use syntax::{
9 algo::diff, 9 algo::diff,
10 ast::{MacroStmts, NameOwner}, 10 ast::{self, NameOwner},
11 AstNode, GreenNode, Parse, 11 AstNode, GreenNode, Parse,
12 SyntaxKind::*, 12 SyntaxKind::*,
13 SyntaxNode, 13 SyntaxNode, SyntaxToken,
14}; 14};
15 15
16use crate::{ 16use crate::{
@@ -27,23 +27,28 @@ const TOKEN_LIMIT: usize = 524288;
27 27
28#[derive(Debug, Clone, Eq, PartialEq)] 28#[derive(Debug, Clone, Eq, PartialEq)]
29pub enum TokenExpander { 29pub enum TokenExpander {
30 MacroRules(mbe::MacroRules), 30 /// Old-style `macro_rules`.
31 MacroDef(mbe::MacroDef), 31 MacroRules { mac: mbe::MacroRules, def_site_token_map: mbe::TokenMap },
32 /// AKA macros 2.0.
33 MacroDef { mac: mbe::MacroDef, def_site_token_map: mbe::TokenMap },
34 /// Stuff like `line!` and `file!`.
32 Builtin(BuiltinFnLikeExpander), 35 Builtin(BuiltinFnLikeExpander),
36 /// `derive(Copy)` and such.
33 BuiltinDerive(BuiltinDeriveExpander), 37 BuiltinDerive(BuiltinDeriveExpander),
38 /// The thing we love the most here in rust-analyzer -- procedural macros.
34 ProcMacro(ProcMacroExpander), 39 ProcMacro(ProcMacroExpander),
35} 40}
36 41
37impl TokenExpander { 42impl TokenExpander {
38 pub fn expand( 43 fn expand(
39 &self, 44 &self,
40 db: &dyn AstDatabase, 45 db: &dyn AstDatabase,
41 id: LazyMacroId, 46 id: LazyMacroId,
42 tt: &tt::Subtree, 47 tt: &tt::Subtree,
43 ) -> mbe::ExpandResult<tt::Subtree> { 48 ) -> mbe::ExpandResult<tt::Subtree> {
44 match self { 49 match self {
45 TokenExpander::MacroRules(it) => it.expand(tt), 50 TokenExpander::MacroRules { mac, .. } => mac.expand(tt),
46 TokenExpander::MacroDef(it) => it.expand(tt), 51 TokenExpander::MacroDef { mac, .. } => mac.expand(tt),
47 TokenExpander::Builtin(it) => it.expand(db, id, tt), 52 TokenExpander::Builtin(it) => it.expand(db, id, tt),
48 // FIXME switch these to ExpandResult as well 53 // FIXME switch these to ExpandResult as well
49 TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(), 54 TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(),
@@ -56,23 +61,23 @@ impl TokenExpander {
56 } 61 }
57 } 62 }
58 63
59 pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { 64 pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
60 match self { 65 match self {
61 TokenExpander::MacroRules(it) => it.map_id_down(id), 66 TokenExpander::MacroRules { mac, .. } => mac.map_id_down(id),
62 TokenExpander::MacroDef(it) => it.map_id_down(id), 67 TokenExpander::MacroDef { mac, .. } => mac.map_id_down(id),
63 TokenExpander::Builtin(..) => id, 68 TokenExpander::Builtin(..)
64 TokenExpander::BuiltinDerive(..) => id, 69 | TokenExpander::BuiltinDerive(..)
65 TokenExpander::ProcMacro(..) => id, 70 | TokenExpander::ProcMacro(..) => id,
66 } 71 }
67 } 72 }
68 73
69 pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { 74 pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
70 match self { 75 match self {
71 TokenExpander::MacroRules(it) => it.map_id_up(id), 76 TokenExpander::MacroRules { mac, .. } => mac.map_id_up(id),
72 TokenExpander::MacroDef(it) => it.map_id_up(id), 77 TokenExpander::MacroDef { mac, .. } => mac.map_id_up(id),
73 TokenExpander::Builtin(..) => (id, mbe::Origin::Call), 78 TokenExpander::Builtin(..)
74 TokenExpander::BuiltinDerive(..) => (id, mbe::Origin::Call), 79 | TokenExpander::BuiltinDerive(..)
75 TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call), 80 | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
76 } 81 }
77 } 82 }
78} 83}
@@ -82,28 +87,48 @@ impl TokenExpander {
82pub trait AstDatabase: SourceDatabase { 87pub trait AstDatabase: SourceDatabase {
83 fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>; 88 fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
84 89
90 /// Main public API -- parsis a hir file, not caring whether it's a real
91 /// file or a macro expansion.
85 #[salsa::transparent] 92 #[salsa::transparent]
86 fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>; 93 fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>;
87 94 /// Implementation for the macro case.
88 #[salsa::interned]
89 fn intern_macro(&self, macro_call: MacroCallLoc) -> LazyMacroId;
90 fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>;
91 #[salsa::transparent]
92 fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>;
93 fn macro_def(&self, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>>;
94 fn parse_macro_expansion( 95 fn parse_macro_expansion(
95 &self, 96 &self,
96 macro_file: MacroFile, 97 macro_file: MacroFile,
97 ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>>; 98 ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>>;
98 fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>>;
99
100 /// Firewall query that returns the error from the `macro_expand` query.
101 fn macro_expand_error(&self, macro_call: MacroCallId) -> Option<ExpandError>;
102 99
100 /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
101 /// reason why we use salsa at all.
102 ///
103 /// We encode macro definitions into ids of macro calls, this what allows us
104 /// to be incremental.
105 #[salsa::interned]
106 fn intern_macro(&self, macro_call: MacroCallLoc) -> LazyMacroId;
107 /// Certain built-in macros are eager (`format!(concat!("file: ", file!(), "{}"")), 92`).
108 /// For them, we actually want to encode the whole token tree as an argument.
103 #[salsa::interned] 109 #[salsa::interned]
104 fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId; 110 fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId;
105 111
112 /// Lowers syntactic macro call to a token tree representation.
113 #[salsa::transparent]
114 fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>;
115 /// Extracts syntax node, corresponding to a macro call. That's a firewall
116 /// query, only typing in the macro call itself changes the returned
117 /// subtree.
118 fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>;
119 /// Gets the expander for this macro. This compiles declarative macros, and
120 /// just fetches procedural ones.
121 fn macro_def(&self, id: MacroDefId) -> Option<Arc<TokenExpander>>;
122
123 /// Expand macro call to a token tree. This query is LRUed (we keep 128 or so results in memory)
124 fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>>;
125 /// Special case of the previous query for procedural macros. We can't LRU
126 /// proc macros, since they are not deterministic in general, and
127 /// non-determinism breaks salsa in a very, very, very bad way. @edwin0cheng
128 /// heroically debugged this once!
106 fn expand_proc_macro(&self, call: MacroCallId) -> Result<tt::Subtree, mbe::ExpandError>; 129 fn expand_proc_macro(&self, call: MacroCallId) -> Result<tt::Subtree, mbe::ExpandError>;
130 /// Firewall query that returns the error from the `macro_expand` query.
131 fn macro_expand_error(&self, macro_call: MacroCallId) -> Option<ExpandError>;
107 132
108 fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>; 133 fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
109} 134}
@@ -115,36 +140,159 @@ pub trait AstDatabase: SourceDatabase {
115pub fn expand_hypothetical( 140pub fn expand_hypothetical(
116 db: &dyn AstDatabase, 141 db: &dyn AstDatabase,
117 actual_macro_call: MacroCallId, 142 actual_macro_call: MacroCallId,
118 hypothetical_args: &syntax::ast::TokenTree, 143 hypothetical_args: &ast::TokenTree,
119 token_to_map: syntax::SyntaxToken, 144 token_to_map: SyntaxToken,
120) -> Option<(SyntaxNode, syntax::SyntaxToken)> { 145) -> Option<(SyntaxNode, SyntaxToken)> {
121 let macro_file = MacroFile { macro_call_id: actual_macro_call };
122 let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()); 146 let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax());
123 let range = 147 let range =
124 token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?; 148 token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?;
125 let token_id = tmap_1.token_by_range(range)?; 149 let token_id = tmap_1.token_by_range(range)?;
126 let macro_def = expander(db, actual_macro_call)?; 150
151 let lazy_id = match actual_macro_call {
152 MacroCallId::LazyMacro(id) => id,
153 MacroCallId::EagerMacro(_) => return None,
154 };
155
156 let macro_def = {
157 let loc = db.lookup_intern_macro(lazy_id);
158 db.macro_def(loc.def)?
159 };
160
161 let hypothetical_expansion = macro_def.expand(db, lazy_id, &tt);
162
163 let fragment_kind = to_fragment_kind(db, actual_macro_call);
164
127 let (node, tmap_2) = 165 let (node, tmap_2) =
128 parse_macro_with_arg(db, macro_file, Some(std::sync::Arc::new((tt, tmap_1)))).value?; 166 mbe::token_tree_to_syntax_node(&hypothetical_expansion.value, fragment_kind).ok()?;
129 let token_id = macro_def.0.map_id_down(token_id); 167
168 let token_id = macro_def.map_id_down(token_id);
130 let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; 169 let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?;
131 let token = node.syntax_node().covering_element(range).into_token()?; 170 let token = node.syntax_node().covering_element(range).into_token()?;
132 Some((node.syntax_node(), token)) 171 Some((node.syntax_node(), token))
133} 172}
134 173
135fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { 174fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
136 let map = 175 let map = db.parse_or_expand(file_id).map(|it| AstIdMap::from_source(&it)).unwrap_or_default();
137 db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it));
138 Arc::new(map) 176 Arc::new(map)
139} 177}
140 178
141fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { 179fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
180 match file_id.0 {
181 HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()),
182 HirFileIdRepr::MacroFile(macro_file) => {
183 db.parse_macro_expansion(macro_file).value.map(|(it, _)| it.syntax_node())
184 }
185 }
186}
187
188fn parse_macro_expansion(
189 db: &dyn AstDatabase,
190 macro_file: MacroFile,
191) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> {
192 let _p = profile::span("parse_macro_expansion");
193 let result = db.macro_expand(macro_file.macro_call_id);
194
195 if let Some(err) = &result.err {
196 // Note:
197 // The final goal we would like to make all parse_macro success,
198 // such that the following log will not call anyway.
199 match macro_file.macro_call_id {
200 MacroCallId::LazyMacro(id) => {
201 let loc: MacroCallLoc = db.lookup_intern_macro(id);
202 let node = loc.kind.node(db);
203
204 // collect parent information for warning log
205 let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| {
206 it.file_id.call_node(db)
207 })
208 .map(|n| format!("{:#}", n.value))
209 .collect::<Vec<_>>()
210 .join("\n");
211
212 log::warn!(
213 "fail on macro_parse: (reason: {:?} macro_call: {:#}) parents: {}",
214 err,
215 node.value,
216 parents
217 );
218 }
219 _ => {
220 log::warn!("fail on macro_parse: (reason: {:?})", err);
221 }
222 }
223 }
224 let tt = match result.value {
225 Some(tt) => tt,
226 None => return ExpandResult { value: None, err: result.err },
227 };
228
229 let fragment_kind = to_fragment_kind(db, macro_file.macro_call_id);
230
231 log::debug!("expanded = {}", tt.as_debug_string());
232 log::debug!("kind = {:?}", fragment_kind);
233
234 let (parse, rev_token_map) = match mbe::token_tree_to_syntax_node(&tt, fragment_kind) {
235 Ok(it) => it,
236 Err(err) => {
237 log::debug!(
238 "failed to parse expanstion to {:?} = {}",
239 fragment_kind,
240 tt.as_debug_string()
241 );
242 return ExpandResult::only_err(err);
243 }
244 };
245
246 match result.err {
247 Some(err) => {
248 // Safety check for recursive identity macro.
249 let node = parse.syntax_node();
250 let file: HirFileId = macro_file.into();
251 let call_node = match file.call_node(db) {
252 Some(it) => it,
253 None => {
254 return ExpandResult::only_err(err);
255 }
256 };
257 if is_self_replicating(&node, &call_node.value) {
258 return ExpandResult::only_err(err);
259 } else {
260 ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) }
261 }
262 }
263 None => {
264 log::debug!("parse = {:?}", parse.syntax_node().kind());
265 ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: None }
266 }
267 }
268}
269
270fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
271 let arg = db.macro_arg_text(id)?;
272 let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg));
273 Some(Arc::new((tt, tmap)))
274}
275
276fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
277 let id = match id {
278 MacroCallId::LazyMacro(id) => id,
279 MacroCallId::EagerMacro(_id) => {
280 // FIXME: support macro_arg for eager macro
281 return None;
282 }
283 };
284 let loc = db.lookup_intern_macro(id);
285 let arg = loc.kind.arg(db)?;
286 Some(arg.green())
287}
288
289fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<TokenExpander>> {
142 match id.kind { 290 match id.kind {
143 MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) { 291 MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) {
144 syntax::ast::Macro::MacroRules(macro_rules) => { 292 ast::Macro::MacroRules(macro_rules) => {
145 let arg = macro_rules.token_tree()?; 293 let arg = macro_rules.token_tree()?;
146 let (tt, tmap) = mbe::ast_to_token_tree(&arg); 294 let (tt, def_site_token_map) = mbe::ast_to_token_tree(&arg);
147 let rules = match MacroRules::parse(&tt) { 295 let mac = match mbe::MacroRules::parse(&tt) {
148 Ok(it) => it, 296 Ok(it) => it,
149 Err(err) => { 297 Err(err) => {
150 let name = macro_rules.name().map(|n| n.to_string()).unwrap_or_default(); 298 let name = macro_rules.name().map(|n| n.to_string()).unwrap_or_default();
@@ -152,12 +300,12 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander,
152 return None; 300 return None;
153 } 301 }
154 }; 302 };
155 Some(Arc::new((TokenExpander::MacroRules(rules), tmap))) 303 Some(Arc::new(TokenExpander::MacroRules { mac, def_site_token_map }))
156 } 304 }
157 syntax::ast::Macro::MacroDef(macro_def) => { 305 ast::Macro::MacroDef(macro_def) => {
158 let arg = macro_def.body()?; 306 let arg = macro_def.body()?;
159 let (tt, tmap) = mbe::ast_to_token_tree(&arg); 307 let (tt, def_site_token_map) = mbe::ast_to_token_tree(&arg);
160 let rules = match MacroDef::parse(&tt) { 308 let mac = match mbe::MacroDef::parse(&tt) {
161 Ok(it) => it, 309 Ok(it) => it,
162 Err(err) => { 310 Err(err) => {
163 let name = macro_def.name().map(|n| n.to_string()).unwrap_or_default(); 311 let name = macro_def.name().map(|n| n.to_string()).unwrap_or_default();
@@ -165,41 +313,18 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander,
165 return None; 313 return None;
166 } 314 }
167 }; 315 };
168 Some(Arc::new((TokenExpander::MacroDef(rules), tmap))) 316 Some(Arc::new(TokenExpander::MacroDef { mac, def_site_token_map }))
169 } 317 }
170 }, 318 },
171 MacroDefKind::BuiltIn(expander, _) => { 319 MacroDefKind::BuiltIn(expander, _) => Some(Arc::new(TokenExpander::Builtin(expander))),
172 Some(Arc::new((TokenExpander::Builtin(expander), mbe::TokenMap::default())))
173 }
174 MacroDefKind::BuiltInDerive(expander, _) => { 320 MacroDefKind::BuiltInDerive(expander, _) => {
175 Some(Arc::new((TokenExpander::BuiltinDerive(expander), mbe::TokenMap::default()))) 321 Some(Arc::new(TokenExpander::BuiltinDerive(expander)))
176 } 322 }
177 MacroDefKind::BuiltInEager(..) => None, 323 MacroDefKind::BuiltInEager(..) => None,
178 MacroDefKind::ProcMacro(expander, ..) => { 324 MacroDefKind::ProcMacro(expander, ..) => Some(Arc::new(TokenExpander::ProcMacro(expander))),
179 Some(Arc::new((TokenExpander::ProcMacro(expander), mbe::TokenMap::default())))
180 }
181 } 325 }
182} 326}
183 327
184fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
185 let id = match id {
186 MacroCallId::LazyMacro(id) => id,
187 MacroCallId::EagerMacro(_id) => {
188 // FIXME: support macro_arg for eager macro
189 return None;
190 }
191 };
192 let loc = db.lookup_intern_macro(id);
193 let arg = loc.kind.arg(db)?;
194 Some(arg.green())
195}
196
197fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
198 let arg = db.macro_arg_text(id)?;
199 let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg));
200 Some(Arc::new((tt, tmap)))
201}
202
203fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>> { 328fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>> {
204 macro_expand_with_arg(db, id, None) 329 macro_expand_with_arg(db, id, None)
205} 330}
@@ -208,19 +333,6 @@ fn macro_expand_error(db: &dyn AstDatabase, macro_call: MacroCallId) -> Option<E
208 db.macro_expand(macro_call).err 333 db.macro_expand(macro_call).err
209} 334}
210 335
211fn expander(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> {
212 let lazy_id = match id {
213 MacroCallId::LazyMacro(id) => id,
214 MacroCallId::EagerMacro(_id) => {
215 return None;
216 }
217 };
218
219 let loc = db.lookup_intern_macro(lazy_id);
220 let macro_rules = db.macro_def(loc.def)?;
221 Some(macro_rules)
222}
223
224fn macro_expand_with_arg( 336fn macro_expand_with_arg(
225 db: &dyn AstDatabase, 337 db: &dyn AstDatabase,
226 id: MacroCallId, 338 id: MacroCallId,
@@ -254,7 +366,7 @@ fn macro_expand_with_arg(
254 Some(it) => it, 366 Some(it) => it,
255 None => return ExpandResult::str_err("Fail to find macro definition".into()), 367 None => return ExpandResult::str_err("Fail to find macro definition".into()),
256 }; 368 };
257 let ExpandResult { value: tt, err } = macro_rules.0.expand(db, lazy_id, &macro_arg.0); 369 let ExpandResult { value: tt, err } = macro_rules.expand(db, lazy_id, &macro_arg.0);
258 // Set a hard limit for the expanded tt 370 // Set a hard limit for the expanded tt
259 let count = tt.count(); 371 let count = tt.count();
260 if count > TOKEN_LIMIT { 372 if count > TOKEN_LIMIT {
@@ -294,116 +406,11 @@ fn expand_proc_macro(
294 expander.expand(db, loc.krate, &macro_arg.0) 406 expander.expand(db, loc.krate, &macro_arg.0)
295} 407}
296 408
297fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
298 match file_id.0 {
299 HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()),
300 HirFileIdRepr::MacroFile(macro_file) => {
301 db.parse_macro_expansion(macro_file).value.map(|(it, _)| it.syntax_node())
302 }
303 }
304}
305
306fn parse_macro_expansion(
307 db: &dyn AstDatabase,
308 macro_file: MacroFile,
309) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> {
310 parse_macro_with_arg(db, macro_file, None)
311}
312
313fn parse_macro_with_arg(
314 db: &dyn AstDatabase,
315 macro_file: MacroFile,
316 arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>,
317) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> {
318 let macro_call_id = macro_file.macro_call_id;
319 let result = if let Some(arg) = arg {
320 macro_expand_with_arg(db, macro_call_id, Some(arg))
321 } else {
322 db.macro_expand(macro_call_id)
323 };
324
325 let _p = profile::span("parse_macro_expansion");
326
327 if let Some(err) = &result.err {
328 // Note:
329 // The final goal we would like to make all parse_macro success,
330 // such that the following log will not call anyway.
331 match macro_call_id {
332 MacroCallId::LazyMacro(id) => {
333 let loc: MacroCallLoc = db.lookup_intern_macro(id);
334 let node = loc.kind.node(db);
335
336 // collect parent information for warning log
337 let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| {
338 it.file_id.call_node(db)
339 })
340 .map(|n| format!("{:#}", n.value))
341 .collect::<Vec<_>>()
342 .join("\n");
343
344 log::warn!(
345 "fail on macro_parse: (reason: {:?} macro_call: {:#}) parents: {}",
346 err,
347 node.value,
348 parents
349 );
350 }
351 _ => {
352 log::warn!("fail on macro_parse: (reason: {:?})", err);
353 }
354 }
355 }
356 let tt = match result.value {
357 Some(tt) => tt,
358 None => return ExpandResult { value: None, err: result.err },
359 };
360
361 let fragment_kind = to_fragment_kind(db, macro_call_id);
362
363 log::debug!("expanded = {}", tt.as_debug_string());
364 log::debug!("kind = {:?}", fragment_kind);
365
366 let (parse, rev_token_map) = match mbe::token_tree_to_syntax_node(&tt, fragment_kind) {
367 Ok(it) => it,
368 Err(err) => {
369 log::debug!(
370 "failed to parse expanstion to {:?} = {}",
371 fragment_kind,
372 tt.as_debug_string()
373 );
374 return ExpandResult::only_err(err);
375 }
376 };
377
378 match result.err {
379 Some(err) => {
380 // Safety check for recursive identity macro.
381 let node = parse.syntax_node();
382 let file: HirFileId = macro_file.into();
383 let call_node = match file.call_node(db) {
384 Some(it) => it,
385 None => {
386 return ExpandResult::only_err(err);
387 }
388 };
389 if is_self_replicating(&node, &call_node.value) {
390 return ExpandResult::only_err(err);
391 } else {
392 ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) }
393 }
394 }
395 None => {
396 log::debug!("parse = {:?}", parse.syntax_node().kind());
397 ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: None }
398 }
399 }
400}
401
402fn is_self_replicating(from: &SyntaxNode, to: &SyntaxNode) -> bool { 409fn is_self_replicating(from: &SyntaxNode, to: &SyntaxNode) -> bool {
403 if diff(from, to).is_empty() { 410 if diff(from, to).is_empty() {
404 return true; 411 return true;
405 } 412 }
406 if let Some(stmts) = MacroStmts::cast(from.clone()) { 413 if let Some(stmts) = ast::MacroStmts::cast(from.clone()) {
407 if stmts.statements().any(|stmt| diff(stmt.syntax(), to).is_empty()) { 414 if stmts.statements().any(|stmt| diff(stmt.syntax(), to).is_empty()) {
408 return true; 415 return true;
409 } 416 }
diff --git a/crates/hir_expand/src/hygiene.rs b/crates/hir_expand/src/hygiene.rs
index 779725629..ed61ebca3 100644
--- a/crates/hir_expand/src/hygiene.rs
+++ b/crates/hir_expand/src/hygiene.rs
@@ -5,6 +5,7 @@
5use std::sync::Arc; 5use std::sync::Arc;
6 6
7use base_db::CrateId; 7use base_db::CrateId;
8use db::TokenExpander;
8use either::Either; 9use either::Either;
9use mbe::Origin; 10use mbe::Origin;
10use parser::SyntaxKind; 11use parser::SyntaxKind;
@@ -115,7 +116,7 @@ struct HygieneInfo {
115 /// The `macro_rules!` arguments. 116 /// The `macro_rules!` arguments.
116 def_start: Option<InFile<TextSize>>, 117 def_start: Option<InFile<TextSize>>,
117 118
118 macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, 119 macro_def: Arc<TokenExpander>,
119 macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, 120 macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
120 exp_map: Arc<mbe::TokenMap>, 121 exp_map: Arc<mbe::TokenMap>,
121} 122}
@@ -124,13 +125,16 @@ impl HygieneInfo {
124 fn map_ident_up(&self, token: TextRange) -> Option<(InFile<TextRange>, Origin)> { 125 fn map_ident_up(&self, token: TextRange) -> Option<(InFile<TextRange>, Origin)> {
125 let token_id = self.exp_map.token_by_range(token)?; 126 let token_id = self.exp_map.token_by_range(token)?;
126 127
127 let (token_id, origin) = self.macro_def.0.map_id_up(token_id); 128 let (token_id, origin) = self.macro_def.map_id_up(token_id);
128 let (token_map, tt) = match origin { 129 let (token_map, tt) = match origin {
129 mbe::Origin::Call => (&self.macro_arg.1, self.arg_start), 130 mbe::Origin::Call => (&self.macro_arg.1, self.arg_start),
130 mbe::Origin::Def => ( 131 mbe::Origin::Def => match (&*self.macro_def, self.def_start) {
131 &self.macro_def.1, 132 (TokenExpander::MacroDef { def_site_token_map, .. }, Some(tt))
132 *self.def_start.as_ref().expect("`Origin::Def` used with non-`macro_rules!` macro"), 133 | (TokenExpander::MacroRules { def_site_token_map, .. }, Some(tt)) => {
133 ), 134 (def_site_token_map, tt)
135 }
136 _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
137 },
134 }; 138 };
135 139
136 let range = token_map.range_by_token(token_id)?.by_kind(SyntaxKind::IDENT)?; 140 let range = token_map.range_by_token(token_id)?.by_kind(SyntaxKind::IDENT)?;
diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs
index a0e6aec62..0402640de 100644
--- a/crates/hir_expand/src/lib.rs
+++ b/crates/hir_expand/src/lib.rs
@@ -351,7 +351,7 @@ pub struct ExpansionInfo {
351 /// The `macro_rules!` arguments. 351 /// The `macro_rules!` arguments.
352 def: Option<InFile<ast::TokenTree>>, 352 def: Option<InFile<ast::TokenTree>>,
353 353
354 macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, 354 macro_def: Arc<db::TokenExpander>,
355 macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, 355 macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
356 exp_map: Arc<mbe::TokenMap>, 356 exp_map: Arc<mbe::TokenMap>,
357} 357}
@@ -368,7 +368,7 @@ impl ExpansionInfo {
368 assert_eq!(token.file_id, self.arg.file_id); 368 assert_eq!(token.file_id, self.arg.file_id);
369 let range = token.value.text_range().checked_sub(self.arg.value.text_range().start())?; 369 let range = token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
370 let token_id = self.macro_arg.1.token_by_range(range)?; 370 let token_id = self.macro_arg.1.token_by_range(range)?;
371 let token_id = self.macro_def.0.map_id_down(token_id); 371 let token_id = self.macro_def.map_id_down(token_id);
372 372
373 let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?; 373 let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
374 374
@@ -383,17 +383,16 @@ impl ExpansionInfo {
383 ) -> Option<(InFile<SyntaxToken>, Origin)> { 383 ) -> Option<(InFile<SyntaxToken>, Origin)> {
384 let token_id = self.exp_map.token_by_range(token.value.text_range())?; 384 let token_id = self.exp_map.token_by_range(token.value.text_range())?;
385 385
386 let (token_id, origin) = self.macro_def.0.map_id_up(token_id); 386 let (token_id, origin) = self.macro_def.map_id_up(token_id);
387 let (token_map, tt) = match origin { 387 let (token_map, tt) = match origin {
388 mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()), 388 mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
389 mbe::Origin::Def => ( 389 mbe::Origin::Def => match (&*self.macro_def, self.def.as_ref()) {
390 &self.macro_def.1, 390 (db::TokenExpander::MacroRules { def_site_token_map, .. }, Some(tt))
391 self.def 391 | (db::TokenExpander::MacroDef { def_site_token_map, .. }, Some(tt)) => {
392 .as_ref() 392 (def_site_token_map, tt.as_ref().map(|tt| tt.syntax().clone()))
393 .expect("`Origin::Def` used with non-`macro_rules!` macro") 393 }
394 .as_ref() 394 _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
395 .map(|tt| tt.syntax().clone()), 395 },
396 ),
397 }; 396 };
398 397
399 let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?; 398 let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
diff --git a/crates/hir_ty/src/db.rs b/crates/hir_ty/src/db.rs
index cf67d4266..9da0a02e3 100644
--- a/crates/hir_ty/src/db.rs
+++ b/crates/hir_ty/src/db.rs
@@ -70,6 +70,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
70 fn trait_environment(&self, def: GenericDefId) -> Arc<crate::TraitEnvironment>; 70 fn trait_environment(&self, def: GenericDefId) -> Arc<crate::TraitEnvironment>;
71 71
72 #[salsa::invoke(crate::lower::generic_defaults_query)] 72 #[salsa::invoke(crate::lower::generic_defaults_query)]
73 #[salsa::cycle(crate::lower::generic_defaults_recover)]
73 fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders<Ty>]>; 74 fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders<Ty>]>;
74 75
75 #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)] 76 #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
diff --git a/crates/hir_ty/src/lower.rs b/crates/hir_ty/src/lower.rs
index 7fd46becd..c99dd8d0a 100644
--- a/crates/hir_ty/src/lower.rs
+++ b/crates/hir_ty/src/lower.rs
@@ -414,17 +414,16 @@ impl<'a> TyLoweringContext<'a> {
414 self.lower_trait_ref_from_resolved_path(trait_, resolved_segment, self_ty); 414 self.lower_trait_ref_from_resolved_path(trait_, resolved_segment, self_ty);
415 let ty = if remaining_segments.len() == 1 { 415 let ty = if remaining_segments.len() == 1 {
416 let segment = remaining_segments.first().unwrap(); 416 let segment = remaining_segments.first().unwrap();
417 let found = associated_type_by_name_including_super_traits( 417 let found = self
418 self.db, 418 .db
419 trait_ref, 419 .trait_data(trait_ref.hir_trait_id())
420 &segment.name, 420 .associated_type_by_name(&segment.name);
421 );
422 match found { 421 match found {
423 Some((super_trait_ref, associated_ty)) => { 422 Some(associated_ty) => {
424 // FIXME handle type parameters on the segment 423 // FIXME handle type parameters on the segment
425 TyKind::Alias(AliasTy::Projection(ProjectionTy { 424 TyKind::Alias(AliasTy::Projection(ProjectionTy {
426 associated_ty_id: to_assoc_type_id(associated_ty), 425 associated_ty_id: to_assoc_type_id(associated_ty),
427 substitution: super_trait_ref.substitution, 426 substitution: trait_ref.substitution,
428 })) 427 }))
429 .intern(&Interner) 428 .intern(&Interner)
430 } 429 }
@@ -1089,6 +1088,27 @@ pub(crate) fn generic_defaults_query(
1089 defaults 1088 defaults
1090} 1089}
1091 1090
1091pub(crate) fn generic_defaults_recover(
1092 db: &dyn HirDatabase,
1093 _cycle: &[String],
1094 def: &GenericDefId,
1095) -> Arc<[Binders<Ty>]> {
1096 let generic_params = generics(db.upcast(), *def);
1097
1098 // we still need one default per parameter
1099 let defaults = generic_params
1100 .iter()
1101 .enumerate()
1102 .map(|(idx, _)| {
1103 let ty = TyKind::Error.intern(&Interner);
1104
1105 crate::make_only_type_binders(idx, ty)
1106 })
1107 .collect();
1108
1109 defaults
1110}
1111
1092fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig { 1112fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
1093 let data = db.function_data(def); 1113 let data = db.function_data(def);
1094 let resolver = def.resolver(db.upcast()); 1114 let resolver = def.resolver(db.upcast());
diff --git a/crates/hir_ty/src/tests/regression.rs b/crates/hir_ty/src/tests/regression.rs
index 9cd9f473d..d14f5c9bb 100644
--- a/crates/hir_ty/src/tests/regression.rs
+++ b/crates/hir_ty/src/tests/regression.rs
@@ -1012,3 +1012,41 @@ fn lifetime_from_chalk_during_deref() {
1012 "#, 1012 "#,
1013 ) 1013 )
1014} 1014}
1015
1016#[test]
1017fn issue_8686() {
1018 check_infer(
1019 r#"
1020pub trait Try: FromResidual {
1021 type Output;
1022 type Residual;
1023}
1024pub trait FromResidual<R = <Self as Try>::Residual> {
1025 fn from_residual(residual: R) -> Self;
1026}
1027
1028struct ControlFlow<B, C>;
1029impl<B, C> Try for ControlFlow<B, C> {
1030 type Output = C;
1031 type Residual = ControlFlow<B, !>;
1032}
1033impl<B, C> FromResidual for ControlFlow<B, C> {
1034 fn from_residual(r: ControlFlow<B, !>) -> Self { ControlFlow }
1035}
1036
1037fn test() {
1038 ControlFlow::from_residual(ControlFlow::<u32, !>);
1039}
1040 "#,
1041 expect![[r#"
1042 144..152 'residual': R
1043 365..366 'r': ControlFlow<B, !>
1044 395..410 '{ ControlFlow }': ControlFlow<B, C>
1045 397..408 'ControlFlow': ControlFlow<B, C>
1046 424..482 '{ ...!>); }': ()
1047 430..456 'Contro...sidual': fn from_residual<ControlFlow<u32, {unknown}>, ControlFlow<u32, !>>(ControlFlow<u32, !>) -> ControlFlow<u32, {unknown}>
1048 430..479 'Contro...2, !>)': ControlFlow<u32, {unknown}>
1049 457..478 'Contro...32, !>': ControlFlow<u32, !>
1050 "#]],
1051 );
1052}
diff --git a/crates/hir_ty/src/utils.rs b/crates/hir_ty/src/utils.rs
index 2f04ee57a..2f490fb92 100644
--- a/crates/hir_ty/src/utils.rs
+++ b/crates/hir_ty/src/utils.rs
@@ -1,6 +1,8 @@
1//! Helper functions for working with def, which don't need to be a separate 1//! Helper functions for working with def, which don't need to be a separate
2//! query, but can't be computed directly from `*Data` (ie, which need a `db`). 2//! query, but can't be computed directly from `*Data` (ie, which need a `db`).
3 3
4use std::iter;
5
4use chalk_ir::{fold::Shift, BoundVar, DebruijnIndex}; 6use chalk_ir::{fold::Shift, BoundVar, DebruijnIndex};
5use hir_def::{ 7use hir_def::{
6 db::DefDatabase, 8 db::DefDatabase,
@@ -14,8 +16,12 @@ use hir_def::{
14 AssocContainerId, GenericDefId, Lookup, TraitId, TypeAliasId, TypeParamId, 16 AssocContainerId, GenericDefId, Lookup, TraitId, TypeAliasId, TypeParamId,
15}; 17};
16use hir_expand::name::{name, Name}; 18use hir_expand::name::{name, Name};
19use rustc_hash::FxHashSet;
17 20
18use crate::{db::HirDatabase, Interner, Substitution, TraitRef, TraitRefExt, TyKind, WhereClause}; 21use crate::{
22 db::HirDatabase, ChalkTraitId, Interner, Substitution, TraitRef, TraitRefExt, TyKind,
23 WhereClause,
24};
19 25
20fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> Vec<TraitId> { 26fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> Vec<TraitId> {
21 let resolver = trait_.resolver(db); 27 let resolver = trait_.resolver(db);
@@ -102,25 +108,35 @@ pub fn all_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> Vec<TraitId> {
102/// `all_super_traits` is that we keep track of type parameters; for example if 108/// `all_super_traits` is that we keep track of type parameters; for example if
103/// we have `Self: Trait<u32, i32>` and `Trait<T, U>: OtherTrait<U>` we'll get 109/// we have `Self: Trait<u32, i32>` and `Trait<T, U>: OtherTrait<U>` we'll get
104/// `Self: OtherTrait<i32>`. 110/// `Self: OtherTrait<i32>`.
105pub(super) fn all_super_trait_refs(db: &dyn HirDatabase, trait_ref: TraitRef) -> Vec<TraitRef> { 111pub(super) fn all_super_trait_refs(db: &dyn HirDatabase, trait_ref: TraitRef) -> SuperTraits {
106 // FIXME: replace by Chalk's `super_traits`, maybe make this a query 112 SuperTraits { db, seen: iter::once(trait_ref.trait_id).collect(), stack: vec![trait_ref] }
113}
107 114
108 // we need to take care a bit here to avoid infinite loops in case of cycles 115pub(super) struct SuperTraits<'a> {
109 // (i.e. if we have `trait A: B; trait B: A;`) 116 db: &'a dyn HirDatabase,
110 let mut result = vec![trait_ref]; 117 stack: Vec<TraitRef>,
111 let mut i = 0; 118 seen: FxHashSet<ChalkTraitId>,
112 while i < result.len() { 119}
113 let t = &result[i]; 120
114 // yeah this is quadratic, but trait hierarchies should be flat 121impl<'a> SuperTraits<'a> {
115 // enough that this doesn't matter 122 fn elaborate(&mut self, trait_ref: &TraitRef) {
116 for tt in direct_super_trait_refs(db, t) { 123 let mut trait_refs = direct_super_trait_refs(self.db, trait_ref);
117 if !result.iter().any(|tr| tr.trait_id == tt.trait_id) { 124 trait_refs.retain(|tr| !self.seen.contains(&tr.trait_id));
118 result.push(tt); 125 self.stack.extend(trait_refs);
119 } 126 }
127}
128
129impl<'a> Iterator for SuperTraits<'a> {
130 type Item = TraitRef;
131
132 fn next(&mut self) -> Option<Self::Item> {
133 if let Some(next) = self.stack.pop() {
134 self.elaborate(&next);
135 Some(next)
136 } else {
137 None
120 } 138 }
121 i += 1;
122 } 139 }
123 result
124} 140}
125 141
126pub(super) fn associated_type_by_name_including_super_traits( 142pub(super) fn associated_type_by_name_including_super_traits(
@@ -128,7 +144,7 @@ pub(super) fn associated_type_by_name_including_super_traits(
128 trait_ref: TraitRef, 144 trait_ref: TraitRef,
129 name: &Name, 145 name: &Name,
130) -> Option<(TraitRef, TypeAliasId)> { 146) -> Option<(TraitRef, TypeAliasId)> {
131 all_super_trait_refs(db, trait_ref).into_iter().find_map(|t| { 147 all_super_trait_refs(db, trait_ref).find_map(|t| {
132 let assoc_type = db.trait_data(t.hir_trait_id()).associated_type_by_name(name)?; 148 let assoc_type = db.trait_data(t.hir_trait_id()).associated_type_by_name(name)?;
133 Some((t, assoc_type)) 149 Some((t, assoc_type))
134 }) 150 })
diff --git a/crates/ide/src/diagnostics.rs b/crates/ide/src/diagnostics.rs
index 1c911a8b2..b14f908b7 100644
--- a/crates/ide/src/diagnostics.rs
+++ b/crates/ide/src/diagnostics.rs
@@ -15,6 +15,7 @@ use hir::{
15 diagnostics::{Diagnostic as _, DiagnosticCode, DiagnosticSinkBuilder}, 15 diagnostics::{Diagnostic as _, DiagnosticCode, DiagnosticSinkBuilder},
16 InFile, Semantics, 16 InFile, Semantics,
17}; 17};
18use ide_assists::AssistResolveStrategy;
18use ide_db::{base_db::SourceDatabase, RootDatabase}; 19use ide_db::{base_db::SourceDatabase, RootDatabase};
19use itertools::Itertools; 20use itertools::Itertools;
20use rustc_hash::FxHashSet; 21use rustc_hash::FxHashSet;
@@ -84,7 +85,7 @@ pub struct DiagnosticsConfig {
84pub(crate) fn diagnostics( 85pub(crate) fn diagnostics(
85 db: &RootDatabase, 86 db: &RootDatabase,
86 config: &DiagnosticsConfig, 87 config: &DiagnosticsConfig,
87 resolve: bool, 88 resolve: &AssistResolveStrategy,
88 file_id: FileId, 89 file_id: FileId,
89) -> Vec<Diagnostic> { 90) -> Vec<Diagnostic> {
90 let _p = profile::span("diagnostics"); 91 let _p = profile::span("diagnostics");
@@ -212,7 +213,7 @@ pub(crate) fn diagnostics(
212fn diagnostic_with_fix<D: DiagnosticWithFix>( 213fn diagnostic_with_fix<D: DiagnosticWithFix>(
213 d: &D, 214 d: &D,
214 sema: &Semantics<RootDatabase>, 215 sema: &Semantics<RootDatabase>,
215 resolve: bool, 216 resolve: &AssistResolveStrategy,
216) -> Diagnostic { 217) -> Diagnostic {
217 Diagnostic::error(sema.diagnostics_display_range(d.display_source()).range, d.message()) 218 Diagnostic::error(sema.diagnostics_display_range(d.display_source()).range, d.message())
218 .with_fix(d.fix(&sema, resolve)) 219 .with_fix(d.fix(&sema, resolve))
@@ -222,7 +223,7 @@ fn diagnostic_with_fix<D: DiagnosticWithFix>(
222fn warning_with_fix<D: DiagnosticWithFix>( 223fn warning_with_fix<D: DiagnosticWithFix>(
223 d: &D, 224 d: &D,
224 sema: &Semantics<RootDatabase>, 225 sema: &Semantics<RootDatabase>,
225 resolve: bool, 226 resolve: &AssistResolveStrategy,
226) -> Diagnostic { 227) -> Diagnostic {
227 Diagnostic::hint(sema.diagnostics_display_range(d.display_source()).range, d.message()) 228 Diagnostic::hint(sema.diagnostics_display_range(d.display_source()).range, d.message())
228 .with_fix(d.fix(&sema, resolve)) 229 .with_fix(d.fix(&sema, resolve))
@@ -299,6 +300,7 @@ fn unresolved_fix(id: &'static str, label: &str, target: TextRange) -> Assist {
299#[cfg(test)] 300#[cfg(test)]
300mod tests { 301mod tests {
301 use expect_test::{expect, Expect}; 302 use expect_test::{expect, Expect};
303 use ide_assists::AssistResolveStrategy;
302 use stdx::trim_indent; 304 use stdx::trim_indent;
303 use test_utils::assert_eq_text; 305 use test_utils::assert_eq_text;
304 306
@@ -314,7 +316,11 @@ mod tests {
314 316
315 let (analysis, file_position) = fixture::position(ra_fixture_before); 317 let (analysis, file_position) = fixture::position(ra_fixture_before);
316 let diagnostic = analysis 318 let diagnostic = analysis
317 .diagnostics(&DiagnosticsConfig::default(), true, file_position.file_id) 319 .diagnostics(
320 &DiagnosticsConfig::default(),
321 AssistResolveStrategy::All,
322 file_position.file_id,
323 )
318 .unwrap() 324 .unwrap()
319 .pop() 325 .pop()
320 .unwrap(); 326 .unwrap();
@@ -343,7 +349,11 @@ mod tests {
343 fn check_no_fix(ra_fixture: &str) { 349 fn check_no_fix(ra_fixture: &str) {
344 let (analysis, file_position) = fixture::position(ra_fixture); 350 let (analysis, file_position) = fixture::position(ra_fixture);
345 let diagnostic = analysis 351 let diagnostic = analysis
346 .diagnostics(&DiagnosticsConfig::default(), true, file_position.file_id) 352 .diagnostics(
353 &DiagnosticsConfig::default(),
354 AssistResolveStrategy::All,
355 file_position.file_id,
356 )
347 .unwrap() 357 .unwrap()
348 .pop() 358 .pop()
349 .unwrap(); 359 .unwrap();
@@ -357,7 +367,9 @@ mod tests {
357 let diagnostics = files 367 let diagnostics = files
358 .into_iter() 368 .into_iter()
359 .flat_map(|file_id| { 369 .flat_map(|file_id| {
360 analysis.diagnostics(&DiagnosticsConfig::default(), true, file_id).unwrap() 370 analysis
371 .diagnostics(&DiagnosticsConfig::default(), AssistResolveStrategy::All, file_id)
372 .unwrap()
361 }) 373 })
362 .collect::<Vec<_>>(); 374 .collect::<Vec<_>>();
363 assert_eq!(diagnostics.len(), 0, "unexpected diagnostics:\n{:#?}", diagnostics); 375 assert_eq!(diagnostics.len(), 0, "unexpected diagnostics:\n{:#?}", diagnostics);
@@ -365,8 +377,9 @@ mod tests {
365 377
366 fn check_expect(ra_fixture: &str, expect: Expect) { 378 fn check_expect(ra_fixture: &str, expect: Expect) {
367 let (analysis, file_id) = fixture::file(ra_fixture); 379 let (analysis, file_id) = fixture::file(ra_fixture);
368 let diagnostics = 380 let diagnostics = analysis
369 analysis.diagnostics(&DiagnosticsConfig::default(), true, file_id).unwrap(); 381 .diagnostics(&DiagnosticsConfig::default(), AssistResolveStrategy::All, file_id)
382 .unwrap();
370 expect.assert_debug_eq(&diagnostics) 383 expect.assert_debug_eq(&diagnostics)
371 } 384 }
372 385
@@ -911,11 +924,13 @@ struct Foo {
911 924
912 let (analysis, file_id) = fixture::file(r#"mod foo;"#); 925 let (analysis, file_id) = fixture::file(r#"mod foo;"#);
913 926
914 let diagnostics = analysis.diagnostics(&config, true, file_id).unwrap(); 927 let diagnostics =
928 analysis.diagnostics(&config, AssistResolveStrategy::All, file_id).unwrap();
915 assert!(diagnostics.is_empty()); 929 assert!(diagnostics.is_empty());
916 930
917 let diagnostics = 931 let diagnostics = analysis
918 analysis.diagnostics(&DiagnosticsConfig::default(), true, file_id).unwrap(); 932 .diagnostics(&DiagnosticsConfig::default(), AssistResolveStrategy::All, file_id)
933 .unwrap();
919 assert!(!diagnostics.is_empty()); 934 assert!(!diagnostics.is_empty());
920 } 935 }
921 936
@@ -1022,7 +1037,11 @@ impl TestStruct {
1022 1037
1023 let (analysis, file_position) = fixture::position(input); 1038 let (analysis, file_position) = fixture::position(input);
1024 let diagnostics = analysis 1039 let diagnostics = analysis
1025 .diagnostics(&DiagnosticsConfig::default(), true, file_position.file_id) 1040 .diagnostics(
1041 &DiagnosticsConfig::default(),
1042 AssistResolveStrategy::All,
1043 file_position.file_id,
1044 )
1026 .unwrap(); 1045 .unwrap();
1027 assert_eq!(diagnostics.len(), 1); 1046 assert_eq!(diagnostics.len(), 1);
1028 1047
diff --git a/crates/ide/src/diagnostics/fixes.rs b/crates/ide/src/diagnostics/fixes.rs
index 7be8b3459..15821500f 100644
--- a/crates/ide/src/diagnostics/fixes.rs
+++ b/crates/ide/src/diagnostics/fixes.rs
@@ -8,6 +8,7 @@ use hir::{
8 }, 8 },
9 HasSource, HirDisplay, InFile, Semantics, VariantDef, 9 HasSource, HirDisplay, InFile, Semantics, VariantDef,
10}; 10};
11use ide_assists::AssistResolveStrategy;
11use ide_db::{ 12use ide_db::{
12 base_db::{AnchoredPathBuf, FileId}, 13 base_db::{AnchoredPathBuf, FileId},
13 source_change::{FileSystemEdit, SourceChange}, 14 source_change::{FileSystemEdit, SourceChange},
@@ -35,11 +36,19 @@ pub(crate) trait DiagnosticWithFix: Diagnostic {
35 /// 36 ///
36 /// If `resolve` is false, the edit will be computed later, on demand, and 37 /// If `resolve` is false, the edit will be computed later, on demand, and
37 /// can be omitted. 38 /// can be omitted.
38 fn fix(&self, sema: &Semantics<RootDatabase>, _resolve: bool) -> Option<Assist>; 39 fn fix(
40 &self,
41 sema: &Semantics<RootDatabase>,
42 _resolve: &AssistResolveStrategy,
43 ) -> Option<Assist>;
39} 44}
40 45
41impl DiagnosticWithFix for UnresolvedModule { 46impl DiagnosticWithFix for UnresolvedModule {
42 fn fix(&self, sema: &Semantics<RootDatabase>, _resolve: bool) -> Option<Assist> { 47 fn fix(
48 &self,
49 sema: &Semantics<RootDatabase>,
50 _resolve: &AssistResolveStrategy,
51 ) -> Option<Assist> {
43 let root = sema.db.parse_or_expand(self.file)?; 52 let root = sema.db.parse_or_expand(self.file)?;
44 let unresolved_module = self.decl.to_node(&root); 53 let unresolved_module = self.decl.to_node(&root);
45 Some(fix( 54 Some(fix(
@@ -59,7 +68,11 @@ impl DiagnosticWithFix for UnresolvedModule {
59} 68}
60 69
61impl DiagnosticWithFix for NoSuchField { 70impl DiagnosticWithFix for NoSuchField {
62 fn fix(&self, sema: &Semantics<RootDatabase>, _resolve: bool) -> Option<Assist> { 71 fn fix(
72 &self,
73 sema: &Semantics<RootDatabase>,
74 _resolve: &AssistResolveStrategy,
75 ) -> Option<Assist> {
63 let root = sema.db.parse_or_expand(self.file)?; 76 let root = sema.db.parse_or_expand(self.file)?;
64 missing_record_expr_field_fix( 77 missing_record_expr_field_fix(
65 &sema, 78 &sema,
@@ -70,7 +83,11 @@ impl DiagnosticWithFix for NoSuchField {
70} 83}
71 84
72impl DiagnosticWithFix for MissingFields { 85impl DiagnosticWithFix for MissingFields {
73 fn fix(&self, sema: &Semantics<RootDatabase>, _resolve: bool) -> Option<Assist> { 86 fn fix(
87 &self,
88 sema: &Semantics<RootDatabase>,
89 _resolve: &AssistResolveStrategy,
90 ) -> Option<Assist> {
74 // Note that although we could add a diagnostics to 91 // Note that although we could add a diagnostics to
75 // fill the missing tuple field, e.g : 92 // fill the missing tuple field, e.g :
76 // `struct A(usize);` 93 // `struct A(usize);`
@@ -106,7 +123,11 @@ impl DiagnosticWithFix for MissingFields {
106} 123}
107 124
108impl DiagnosticWithFix for MissingOkOrSomeInTailExpr { 125impl DiagnosticWithFix for MissingOkOrSomeInTailExpr {
109 fn fix(&self, sema: &Semantics<RootDatabase>, _resolve: bool) -> Option<Assist> { 126 fn fix(
127 &self,
128 sema: &Semantics<RootDatabase>,
129 _resolve: &AssistResolveStrategy,
130 ) -> Option<Assist> {
110 let root = sema.db.parse_or_expand(self.file)?; 131 let root = sema.db.parse_or_expand(self.file)?;
111 let tail_expr = self.expr.to_node(&root); 132 let tail_expr = self.expr.to_node(&root);
112 let tail_expr_range = tail_expr.syntax().text_range(); 133 let tail_expr_range = tail_expr.syntax().text_range();
@@ -119,7 +140,11 @@ impl DiagnosticWithFix for MissingOkOrSomeInTailExpr {
119} 140}
120 141
121impl DiagnosticWithFix for RemoveThisSemicolon { 142impl DiagnosticWithFix for RemoveThisSemicolon {
122 fn fix(&self, sema: &Semantics<RootDatabase>, _resolve: bool) -> Option<Assist> { 143 fn fix(
144 &self,
145 sema: &Semantics<RootDatabase>,
146 _resolve: &AssistResolveStrategy,
147 ) -> Option<Assist> {
123 let root = sema.db.parse_or_expand(self.file)?; 148 let root = sema.db.parse_or_expand(self.file)?;
124 149
125 let semicolon = self 150 let semicolon = self
@@ -139,7 +164,11 @@ impl DiagnosticWithFix for RemoveThisSemicolon {
139} 164}
140 165
141impl DiagnosticWithFix for IncorrectCase { 166impl DiagnosticWithFix for IncorrectCase {
142 fn fix(&self, sema: &Semantics<RootDatabase>, resolve: bool) -> Option<Assist> { 167 fn fix(
168 &self,
169 sema: &Semantics<RootDatabase>,
170 resolve: &AssistResolveStrategy,
171 ) -> Option<Assist> {
143 let root = sema.db.parse_or_expand(self.file)?; 172 let root = sema.db.parse_or_expand(self.file)?;
144 let name_node = self.ident.to_node(&root); 173 let name_node = self.ident.to_node(&root);
145 174
@@ -149,7 +178,7 @@ impl DiagnosticWithFix for IncorrectCase {
149 178
150 let label = format!("Rename to {}", self.suggested_text); 179 let label = format!("Rename to {}", self.suggested_text);
151 let mut res = unresolved_fix("change_case", &label, frange.range); 180 let mut res = unresolved_fix("change_case", &label, frange.range);
152 if resolve { 181 if resolve.should_resolve(&res.id) {
153 let source_change = rename_with_semantics(sema, file_position, &self.suggested_text); 182 let source_change = rename_with_semantics(sema, file_position, &self.suggested_text);
154 res.source_change = Some(source_change.ok().unwrap_or_default()); 183 res.source_change = Some(source_change.ok().unwrap_or_default());
155 } 184 }
@@ -159,7 +188,11 @@ impl DiagnosticWithFix for IncorrectCase {
159} 188}
160 189
161impl DiagnosticWithFix for ReplaceFilterMapNextWithFindMap { 190impl DiagnosticWithFix for ReplaceFilterMapNextWithFindMap {
162 fn fix(&self, sema: &Semantics<RootDatabase>, _resolve: bool) -> Option<Assist> { 191 fn fix(
192 &self,
193 sema: &Semantics<RootDatabase>,
194 _resolve: &AssistResolveStrategy,
195 ) -> Option<Assist> {
163 let root = sema.db.parse_or_expand(self.file)?; 196 let root = sema.db.parse_or_expand(self.file)?;
164 let next_expr = self.next_expr.to_node(&root); 197 let next_expr = self.next_expr.to_node(&root);
165 let next_call = ast::MethodCallExpr::cast(next_expr.syntax().clone())?; 198 let next_call = ast::MethodCallExpr::cast(next_expr.syntax().clone())?;
diff --git a/crates/ide/src/diagnostics/unlinked_file.rs b/crates/ide/src/diagnostics/unlinked_file.rs
index 7d39f4fbe..93fd25dea 100644
--- a/crates/ide/src/diagnostics/unlinked_file.rs
+++ b/crates/ide/src/diagnostics/unlinked_file.rs
@@ -5,6 +5,7 @@ use hir::{
5 diagnostics::{Diagnostic, DiagnosticCode}, 5 diagnostics::{Diagnostic, DiagnosticCode},
6 InFile, 6 InFile,
7}; 7};
8use ide_assists::AssistResolveStrategy;
8use ide_db::{ 9use ide_db::{
9 base_db::{FileId, FileLoader, SourceDatabase, SourceDatabaseExt}, 10 base_db::{FileId, FileLoader, SourceDatabase, SourceDatabaseExt},
10 source_change::SourceChange, 11 source_change::SourceChange,
@@ -50,7 +51,11 @@ impl Diagnostic for UnlinkedFile {
50} 51}
51 52
52impl DiagnosticWithFix for UnlinkedFile { 53impl DiagnosticWithFix for UnlinkedFile {
53 fn fix(&self, sema: &hir::Semantics<RootDatabase>, _resolve: bool) -> Option<Assist> { 54 fn fix(
55 &self,
56 sema: &hir::Semantics<RootDatabase>,
57 _resolve: &AssistResolveStrategy,
58 ) -> Option<Assist> {
54 // If there's an existing module that could add a `mod` item to include the unlinked file, 59 // If there's an existing module that could add a `mod` item to include the unlinked file,
55 // suggest that as a fix. 60 // suggest that as a fix.
56 61
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs
index d5ef054d8..e0bf660c4 100644
--- a/crates/ide/src/inlay_hints.rs
+++ b/crates/ide/src/inlay_hints.rs
@@ -218,9 +218,7 @@ fn hint_iterator(
218 ty: &hir::Type, 218 ty: &hir::Type,
219) -> Option<SmolStr> { 219) -> Option<SmolStr> {
220 let db = sema.db; 220 let db = sema.db;
221 let strukt = std::iter::successors(Some(ty.clone()), |ty| ty.remove_ref()) 221 let strukt = ty.strip_references().as_adt()?;
222 .last()
223 .and_then(|strukt| strukt.as_adt())?;
224 let krate = strukt.krate(db); 222 let krate = strukt.krate(db);
225 if krate != famous_defs.core()? { 223 if krate != famous_defs.core()? {
226 return None; 224 return None;
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index 99e45633e..8e5b72044 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -87,7 +87,9 @@ pub use crate::{
87 }, 87 },
88}; 88};
89pub use hir::{Documentation, Semantics}; 89pub use hir::{Documentation, Semantics};
90pub use ide_assists::{Assist, AssistConfig, AssistId, AssistKind}; 90pub use ide_assists::{
91 Assist, AssistConfig, AssistId, AssistKind, AssistResolveStrategy, SingleResolve,
92};
91pub use ide_completion::{ 93pub use ide_completion::{
92 CompletionConfig, CompletionItem, CompletionItemKind, CompletionRelevance, ImportEdit, 94 CompletionConfig, CompletionItem, CompletionItemKind, CompletionRelevance, ImportEdit,
93 InsertTextFormat, 95 InsertTextFormat,
@@ -518,12 +520,13 @@ impl Analysis {
518 pub fn assists( 520 pub fn assists(
519 &self, 521 &self,
520 config: &AssistConfig, 522 config: &AssistConfig,
521 resolve: bool, 523 resolve: AssistResolveStrategy,
522 frange: FileRange, 524 frange: FileRange,
523 ) -> Cancelable<Vec<Assist>> { 525 ) -> Cancelable<Vec<Assist>> {
524 self.with_db(|db| { 526 self.with_db(|db| {
527 let ssr_assists = ssr::ssr_assists(db, &resolve, frange);
525 let mut acc = Assist::get(db, config, resolve, frange); 528 let mut acc = Assist::get(db, config, resolve, frange);
526 ssr::add_ssr_assist(db, &mut acc, resolve, frange); 529 acc.extend(ssr_assists.into_iter());
527 acc 530 acc
528 }) 531 })
529 } 532 }
@@ -532,10 +535,10 @@ impl Analysis {
532 pub fn diagnostics( 535 pub fn diagnostics(
533 &self, 536 &self,
534 config: &DiagnosticsConfig, 537 config: &DiagnosticsConfig,
535 resolve: bool, 538 resolve: AssistResolveStrategy,
536 file_id: FileId, 539 file_id: FileId,
537 ) -> Cancelable<Vec<Diagnostic>> { 540 ) -> Cancelable<Vec<Diagnostic>> {
538 self.with_db(|db| diagnostics::diagnostics(db, config, resolve, file_id)) 541 self.with_db(|db| diagnostics::diagnostics(db, config, &resolve, file_id))
539 } 542 }
540 543
541 /// Convenience function to return assists + quick fixes for diagnostics 544 /// Convenience function to return assists + quick fixes for diagnostics
@@ -543,7 +546,7 @@ impl Analysis {
543 &self, 546 &self,
544 assist_config: &AssistConfig, 547 assist_config: &AssistConfig,
545 diagnostics_config: &DiagnosticsConfig, 548 diagnostics_config: &DiagnosticsConfig,
546 resolve: bool, 549 resolve: AssistResolveStrategy,
547 frange: FileRange, 550 frange: FileRange,
548 ) -> Cancelable<Vec<Assist>> { 551 ) -> Cancelable<Vec<Assist>> {
549 let include_fixes = match &assist_config.allowed { 552 let include_fixes = match &assist_config.allowed {
@@ -552,17 +555,21 @@ impl Analysis {
552 }; 555 };
553 556
554 self.with_db(|db| { 557 self.with_db(|db| {
558 let ssr_assists = ssr::ssr_assists(db, &resolve, frange);
559 let diagnostic_assists = if include_fixes {
560 diagnostics::diagnostics(db, diagnostics_config, &resolve, frange.file_id)
561 .into_iter()
562 .filter_map(|it| it.fix)
563 .filter(|it| it.target.intersect(frange.range).is_some())
564 .collect()
565 } else {
566 Vec::new()
567 };
568
555 let mut res = Assist::get(db, assist_config, resolve, frange); 569 let mut res = Assist::get(db, assist_config, resolve, frange);
556 ssr::add_ssr_assist(db, &mut res, resolve, frange); 570 res.extend(ssr_assists.into_iter());
557 571 res.extend(diagnostic_assists.into_iter());
558 if include_fixes { 572
559 res.extend(
560 diagnostics::diagnostics(db, diagnostics_config, resolve, frange.file_id)
561 .into_iter()
562 .filter_map(|it| it.fix)
563 .filter(|it| it.target.intersect(frange.range).is_some()),
564 );
565 }
566 res 573 res
567 }) 574 })
568 } 575 }
diff --git a/crates/ide/src/prime_caches.rs b/crates/ide/src/prime_caches.rs
index 03597f507..d912a01b8 100644
--- a/crates/ide/src/prime_caches.rs
+++ b/crates/ide/src/prime_caches.rs
@@ -27,6 +27,7 @@ pub(crate) fn prime_caches(db: &RootDatabase, cb: &(dyn Fn(PrimeCachesProgress)
27 let topo = &graph.crates_in_topological_order(); 27 let topo = &graph.crates_in_topological_order();
28 28
29 cb(PrimeCachesProgress::Started); 29 cb(PrimeCachesProgress::Started);
30 // Take care to emit the finish signal even when the computation is canceled.
30 let _d = stdx::defer(|| cb(PrimeCachesProgress::Finished)); 31 let _d = stdx::defer(|| cb(PrimeCachesProgress::Finished));
31 32
32 // FIXME: This would be easy to parallelize, since it's in the ideal ordering for that. 33 // FIXME: This would be easy to parallelize, since it's in the ideal ordering for that.
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs
index 3eb9e27ee..f76715d84 100644
--- a/crates/ide/src/runnables.rs
+++ b/crates/ide/src/runnables.rs
@@ -304,11 +304,11 @@ fn module_def_doctest(sema: &Semantics<RootDatabase>, def: hir::ModuleDef) -> Op
304 let name = adt.name(sema.db); 304 let name = adt.name(sema.db);
305 let idx = path.rfind(':').map_or(0, |idx| idx + 1); 305 let idx = path.rfind(':').map_or(0, |idx| idx + 1);
306 let (prefix, suffix) = path.split_at(idx); 306 let (prefix, suffix) = path.split_at(idx);
307 let mut ty_params = ty.type_parameters().peekable(); 307 let mut ty_args = ty.type_arguments().peekable();
308 let params = if ty_params.peek().is_some() { 308 let params = if ty_args.peek().is_some() {
309 format!( 309 format!(
310 "<{}>", 310 "<{}>",
311 ty_params.format_with(", ", |ty, cb| cb(&ty.display(sema.db))) 311 ty_args.format_with(", ", |ty, cb| cb(&ty.display(sema.db)))
312 ) 312 )
313 } else { 313 } else {
314 String::new() 314 String::new()
diff --git a/crates/ide/src/ssr.rs b/crates/ide/src/ssr.rs
index f3638d928..57ec80261 100644
--- a/crates/ide/src/ssr.rs
+++ b/crates/ide/src/ssr.rs
@@ -2,18 +2,23 @@
2//! assist in ide_assists because that would require the ide_assists crate 2//! assist in ide_assists because that would require the ide_assists crate
3//! depend on the ide_ssr crate. 3//! depend on the ide_ssr crate.
4 4
5use ide_assists::{Assist, AssistId, AssistKind, GroupLabel}; 5use ide_assists::{Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel};
6use ide_db::{base_db::FileRange, label::Label, source_change::SourceChange, RootDatabase}; 6use ide_db::{base_db::FileRange, label::Label, source_change::SourceChange, RootDatabase};
7 7
8pub(crate) fn add_ssr_assist( 8pub(crate) fn ssr_assists(
9 db: &RootDatabase, 9 db: &RootDatabase,
10 base: &mut Vec<Assist>, 10 resolve: &AssistResolveStrategy,
11 resolve: bool,
12 frange: FileRange, 11 frange: FileRange,
13) -> Option<()> { 12) -> Vec<Assist> {
14 let (match_finder, comment_range) = ide_ssr::ssr_from_comment(db, frange)?; 13 let mut ssr_assists = Vec::with_capacity(2);
15 14
16 let (source_change_for_file, source_change_for_workspace) = if resolve { 15 let (match_finder, comment_range) = match ide_ssr::ssr_from_comment(db, frange) {
16 Some(ssr_data) => ssr_data,
17 None => return ssr_assists,
18 };
19 let id = AssistId("ssr", AssistKind::RefactorRewrite);
20
21 let (source_change_for_file, source_change_for_workspace) = if resolve.should_resolve(&id) {
17 let edits = match_finder.edits(); 22 let edits = match_finder.edits();
18 23
19 let source_change_for_file = { 24 let source_change_for_file = {
@@ -35,16 +40,17 @@ pub(crate) fn add_ssr_assist(
35 40
36 for (label, source_change) in assists.into_iter() { 41 for (label, source_change) in assists.into_iter() {
37 let assist = Assist { 42 let assist = Assist {
38 id: AssistId("ssr", AssistKind::RefactorRewrite), 43 id,
39 label: Label::new(label), 44 label: Label::new(label),
40 group: Some(GroupLabel("Apply SSR".into())), 45 group: Some(GroupLabel("Apply SSR".into())),
41 target: comment_range, 46 target: comment_range,
42 source_change, 47 source_change,
43 }; 48 };
44 49
45 base.push(assist); 50 ssr_assists.push(assist);
46 } 51 }
47 Some(()) 52
53 ssr_assists
48} 54}
49 55
50#[cfg(test)] 56#[cfg(test)]
@@ -52,7 +58,7 @@ mod tests {
52 use std::sync::Arc; 58 use std::sync::Arc;
53 59
54 use expect_test::expect; 60 use expect_test::expect;
55 use ide_assists::Assist; 61 use ide_assists::{Assist, AssistResolveStrategy};
56 use ide_db::{ 62 use ide_db::{
57 base_db::{fixture::WithFixture, salsa::Durability, FileRange}, 63 base_db::{fixture::WithFixture, salsa::Durability, FileRange},
58 symbol_index::SymbolsDatabase, 64 symbol_index::SymbolsDatabase,
@@ -60,24 +66,14 @@ mod tests {
60 }; 66 };
61 use rustc_hash::FxHashSet; 67 use rustc_hash::FxHashSet;
62 68
63 use super::add_ssr_assist; 69 use super::ssr_assists;
64 70
65 fn get_assists(ra_fixture: &str, resolve: bool) -> Vec<Assist> { 71 fn get_assists(ra_fixture: &str, resolve: AssistResolveStrategy) -> Vec<Assist> {
66 let (mut db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture); 72 let (mut db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture);
67 let mut local_roots = FxHashSet::default(); 73 let mut local_roots = FxHashSet::default();
68 local_roots.insert(ide_db::base_db::fixture::WORKSPACE); 74 local_roots.insert(ide_db::base_db::fixture::WORKSPACE);
69 db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); 75 db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
70 76 ssr_assists(&db, &resolve, FileRange { file_id, range: range_or_offset.into() })
71 let mut assists = vec![];
72
73 add_ssr_assist(
74 &db,
75 &mut assists,
76 resolve,
77 FileRange { file_id, range: range_or_offset.into() },
78 );
79
80 assists
81 } 77 }
82 78
83 #[test] 79 #[test]
@@ -88,16 +84,13 @@ mod tests {
88 // This is foo $0 84 // This is foo $0
89 fn foo() {} 85 fn foo() {}
90 "#; 86 "#;
91 let resolve = true; 87 let assists = get_assists(ra_fixture, AssistResolveStrategy::All);
92
93 let assists = get_assists(ra_fixture, resolve);
94 88
95 assert_eq!(0, assists.len()); 89 assert_eq!(0, assists.len());
96 } 90 }
97 91
98 #[test] 92 #[test]
99 fn resolve_edits_true() { 93 fn resolve_edits_true() {
100 let resolve = true;
101 let assists = get_assists( 94 let assists = get_assists(
102 r#" 95 r#"
103 //- /lib.rs 96 //- /lib.rs
@@ -109,7 +102,7 @@ mod tests {
109 //- /bar.rs 102 //- /bar.rs
110 fn bar() { 2 } 103 fn bar() { 2 }
111 "#, 104 "#,
112 resolve, 105 AssistResolveStrategy::All,
113 ); 106 );
114 107
115 assert_eq!(2, assists.len()); 108 assert_eq!(2, assists.len());
@@ -200,7 +193,6 @@ mod tests {
200 193
201 #[test] 194 #[test]
202 fn resolve_edits_false() { 195 fn resolve_edits_false() {
203 let resolve = false;
204 let assists = get_assists( 196 let assists = get_assists(
205 r#" 197 r#"
206 //- /lib.rs 198 //- /lib.rs
@@ -212,7 +204,7 @@ mod tests {
212 //- /bar.rs 204 //- /bar.rs
213 fn bar() { 2 } 205 fn bar() { 2 }
214 "#, 206 "#,
215 resolve, 207 AssistResolveStrategy::None,
216 ); 208 );
217 209
218 assert_eq!(2, assists.len()); 210 assert_eq!(2, assists.len());
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
index 638f42c2f..8d83ba206 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
@@ -142,6 +142,7 @@ It is beyond me why you'd use these when you got ///
142```rust 142```rust
143</span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation"> 143</span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation">
144``` 144```
145</span><span class="function documentation injected intra_doc_link">[`block_comments2`]</span><span class="comment documentation"> tests these with indentation
145 */</span> 146 */</span>
146<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration">block_comments</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span> 147<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration">block_comments</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
147 148
@@ -150,5 +151,6 @@ It is beyond me why you'd use these when you got ///
150 ```rust 151 ```rust
151</span><span class="comment documentation"> </span><span class="none injected"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation"> 152</span><span class="comment documentation"> </span><span class="none injected"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation">
152 ``` 153 ```
154 </span><span class="function documentation injected intra_doc_link">[`block_comments`]</span><span class="comment documentation"> tests these without indentation
153*/</span> 155*/</span>
154<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration">block_comments2</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span></code></pre> \ No newline at end of file 156<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration">block_comments2</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs
index 17cc6334b..b6e952b08 100644
--- a/crates/ide/src/syntax_highlighting/tests.rs
+++ b/crates/ide/src/syntax_highlighting/tests.rs
@@ -618,6 +618,7 @@ It is beyond me why you'd use these when you got ///
618```rust 618```rust
619let _ = example(&[1, 2, 3]); 619let _ = example(&[1, 2, 3]);
620``` 620```
621[`block_comments2`] tests these with indentation
621 */ 622 */
622pub fn block_comments() {} 623pub fn block_comments() {}
623 624
@@ -626,6 +627,7 @@ pub fn block_comments() {}
626 ```rust 627 ```rust
627 let _ = example(&[1, 2, 3]); 628 let _ = example(&[1, 2, 3]);
628 ``` 629 ```
630 [`block_comments`] tests these without indentation
629*/ 631*/
630pub fn block_comments2() {} 632pub fn block_comments2() {}
631"# 633"#
diff --git a/crates/ide_assists/src/assist_context.rs b/crates/ide_assists/src/assist_context.rs
index 8714e4978..112939948 100644
--- a/crates/ide_assists/src/assist_context.rs
+++ b/crates/ide_assists/src/assist_context.rs
@@ -19,7 +19,9 @@ use syntax::{
19}; 19};
20use text_edit::{TextEdit, TextEditBuilder}; 20use text_edit::{TextEdit, TextEditBuilder};
21 21
22use crate::{assist_config::AssistConfig, Assist, AssistId, AssistKind, GroupLabel}; 22use crate::{
23 assist_config::AssistConfig, Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel,
24};
23 25
24/// `AssistContext` allows to apply an assist or check if it could be applied. 26/// `AssistContext` allows to apply an assist or check if it could be applied.
25/// 27///
@@ -105,14 +107,14 @@ impl<'a> AssistContext<'a> {
105} 107}
106 108
107pub(crate) struct Assists { 109pub(crate) struct Assists {
108 resolve: bool,
109 file: FileId, 110 file: FileId,
111 resolve: AssistResolveStrategy,
110 buf: Vec<Assist>, 112 buf: Vec<Assist>,
111 allowed: Option<Vec<AssistKind>>, 113 allowed: Option<Vec<AssistKind>>,
112} 114}
113 115
114impl Assists { 116impl Assists {
115 pub(crate) fn new(ctx: &AssistContext, resolve: bool) -> Assists { 117 pub(crate) fn new(ctx: &AssistContext, resolve: AssistResolveStrategy) -> Assists {
116 Assists { 118 Assists {
117 resolve, 119 resolve,
118 file: ctx.frange.file_id, 120 file: ctx.frange.file_id,
@@ -158,7 +160,7 @@ impl Assists {
158 } 160 }
159 161
160 fn add_impl(&mut self, mut assist: Assist, f: impl FnOnce(&mut AssistBuilder)) -> Option<()> { 162 fn add_impl(&mut self, mut assist: Assist, f: impl FnOnce(&mut AssistBuilder)) -> Option<()> {
161 let source_change = if self.resolve { 163 let source_change = if self.resolve.should_resolve(&assist.id) {
162 let mut builder = AssistBuilder::new(self.file); 164 let mut builder = AssistBuilder::new(self.file);
163 f(&mut builder); 165 f(&mut builder);
164 Some(builder.finish()) 166 Some(builder.finish())
@@ -185,7 +187,29 @@ pub(crate) struct AssistBuilder {
185 source_change: SourceChange, 187 source_change: SourceChange,
186 188
187 /// Maps the original, immutable `SyntaxNode` to a `clone_for_update` twin. 189 /// Maps the original, immutable `SyntaxNode` to a `clone_for_update` twin.
188 mutated_tree: Option<(SyntaxNode, SyntaxNode)>, 190 mutated_tree: Option<TreeMutator>,
191}
192
193pub(crate) struct TreeMutator {
194 immutable: SyntaxNode,
195 mutable_clone: SyntaxNode,
196}
197
198impl TreeMutator {
199 pub(crate) fn new(immutable: &SyntaxNode) -> TreeMutator {
200 let immutable = immutable.ancestors().last().unwrap();
201 let mutable_clone = immutable.clone_for_update();
202 TreeMutator { immutable, mutable_clone }
203 }
204
205 pub(crate) fn make_mut<N: AstNode>(&self, node: &N) -> N {
206 N::cast(self.make_syntax_mut(node.syntax())).unwrap()
207 }
208
209 pub(crate) fn make_syntax_mut(&self, node: &SyntaxNode) -> SyntaxNode {
210 let ptr = SyntaxNodePtr::new(node);
211 ptr.to_node(&self.mutable_clone)
212 }
189} 213}
190 214
191impl AssistBuilder { 215impl AssistBuilder {
@@ -204,8 +228,8 @@ impl AssistBuilder {
204 } 228 }
205 229
206 fn commit(&mut self) { 230 fn commit(&mut self) {
207 if let Some((old, new)) = self.mutated_tree.take() { 231 if let Some(tm) = self.mutated_tree.take() {
208 algo::diff(&old, &new).into_text_edit(&mut self.edit) 232 algo::diff(&tm.immutable, &tm.mutable_clone).into_text_edit(&mut self.edit)
209 } 233 }
210 234
211 let edit = mem::take(&mut self.edit).finish(); 235 let edit = mem::take(&mut self.edit).finish();
@@ -228,16 +252,7 @@ impl AssistBuilder {
228 /// phase, and then get their mutable couterparts using `make_mut` in the 252 /// phase, and then get their mutable couterparts using `make_mut` in the
229 /// mutable state. 253 /// mutable state.
230 pub(crate) fn make_mut(&mut self, node: SyntaxNode) -> SyntaxNode { 254 pub(crate) fn make_mut(&mut self, node: SyntaxNode) -> SyntaxNode {
231 let root = &self 255 self.mutated_tree.get_or_insert_with(|| TreeMutator::new(&node)).make_syntax_mut(&node)
232 .mutated_tree
233 .get_or_insert_with(|| {
234 let immutable = node.ancestors().last().unwrap();
235 let mutable = immutable.clone_for_update();
236 (immutable, mutable)
237 })
238 .1;
239 let ptr = SyntaxNodePtr::new(&&node);
240 ptr.to_node(root)
241 } 256 }
242 257
243 /// Remove specified `range` of text. 258 /// Remove specified `range` of text.
diff --git a/crates/ide_assists/src/handlers/extract_function.rs b/crates/ide_assists/src/handlers/extract_function.rs
index 5f80a40c8..93b28370c 100644
--- a/crates/ide_assists/src/handlers/extract_function.rs
+++ b/crates/ide_assists/src/handlers/extract_function.rs
@@ -16,12 +16,13 @@ use syntax::{
16 edit::{AstNodeEdit, IndentLevel}, 16 edit::{AstNodeEdit, IndentLevel},
17 AstNode, 17 AstNode,
18 }, 18 },
19 ted,
19 SyntaxKind::{self, BLOCK_EXPR, BREAK_EXPR, COMMENT, PATH_EXPR, RETURN_EXPR}, 20 SyntaxKind::{self, BLOCK_EXPR, BREAK_EXPR, COMMENT, PATH_EXPR, RETURN_EXPR},
20 SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, WalkEvent, T, 21 SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, WalkEvent, T,
21}; 22};
22 23
23use crate::{ 24use crate::{
24 assist_context::{AssistContext, Assists}, 25 assist_context::{AssistContext, Assists, TreeMutator},
25 AssistId, 26 AssistId,
26}; 27};
27 28
@@ -1183,7 +1184,7 @@ fn make_ret_ty(ctx: &AssistContext, module: hir::Module, fun: &Function) -> Opti
1183 } 1184 }
1184 FlowHandler::Try { kind: TryKind::Result { ty: parent_ret_ty } } => { 1185 FlowHandler::Try { kind: TryKind::Result { ty: parent_ret_ty } } => {
1185 let handler_ty = parent_ret_ty 1186 let handler_ty = parent_ret_ty
1186 .type_parameters() 1187 .type_arguments()
1187 .nth(1) 1188 .nth(1)
1188 .map(|ty| make_ty(&ty, ctx, module)) 1189 .map(|ty| make_ty(&ty, ctx, module))
1189 .unwrap_or_else(make::ty_unit); 1190 .unwrap_or_else(make::ty_unit);
@@ -1366,7 +1367,10 @@ fn rewrite_body_segment(
1366 1367
1367/// change all usages to account for added `&`/`&mut` for some params 1368/// change all usages to account for added `&`/`&mut` for some params
1368fn fix_param_usages(ctx: &AssistContext, params: &[Param], syntax: &SyntaxNode) -> SyntaxNode { 1369fn fix_param_usages(ctx: &AssistContext, params: &[Param], syntax: &SyntaxNode) -> SyntaxNode {
1369 let mut rewriter = SyntaxRewriter::default(); 1370 let mut usages_for_param: Vec<(&Param, Vec<ast::Expr>)> = Vec::new();
1371
1372 let tm = TreeMutator::new(syntax);
1373
1370 for param in params { 1374 for param in params {
1371 if !param.kind().is_ref() { 1375 if !param.kind().is_ref() {
1372 continue; 1376 continue;
@@ -1376,30 +1380,39 @@ fn fix_param_usages(ctx: &AssistContext, params: &[Param], syntax: &SyntaxNode)
1376 let usages = usages 1380 let usages = usages
1377 .iter() 1381 .iter()
1378 .filter(|reference| syntax.text_range().contains_range(reference.range)) 1382 .filter(|reference| syntax.text_range().contains_range(reference.range))
1379 .filter_map(|reference| path_element_of_reference(syntax, reference)); 1383 .filter_map(|reference| path_element_of_reference(syntax, reference))
1380 for path in usages { 1384 .map(|expr| tm.make_mut(&expr));
1381 match path.syntax().ancestors().skip(1).find_map(ast::Expr::cast) { 1385
1386 usages_for_param.push((param, usages.collect()));
1387 }
1388
1389 let res = tm.make_syntax_mut(syntax);
1390
1391 for (param, usages) in usages_for_param {
1392 for usage in usages {
1393 match usage.syntax().ancestors().skip(1).find_map(ast::Expr::cast) {
1382 Some(ast::Expr::MethodCallExpr(_)) | Some(ast::Expr::FieldExpr(_)) => { 1394 Some(ast::Expr::MethodCallExpr(_)) | Some(ast::Expr::FieldExpr(_)) => {
1383 // do nothing 1395 // do nothing
1384 } 1396 }
1385 Some(ast::Expr::RefExpr(node)) 1397 Some(ast::Expr::RefExpr(node))
1386 if param.kind() == ParamKind::MutRef && node.mut_token().is_some() => 1398 if param.kind() == ParamKind::MutRef && node.mut_token().is_some() =>
1387 { 1399 {
1388 rewriter.replace_ast(&node.clone().into(), &node.expr().unwrap()); 1400 ted::replace(node.syntax(), node.expr().unwrap().syntax());
1389 } 1401 }
1390 Some(ast::Expr::RefExpr(node)) 1402 Some(ast::Expr::RefExpr(node))
1391 if param.kind() == ParamKind::SharedRef && node.mut_token().is_none() => 1403 if param.kind() == ParamKind::SharedRef && node.mut_token().is_none() =>
1392 { 1404 {
1393 rewriter.replace_ast(&node.clone().into(), &node.expr().unwrap()); 1405 ted::replace(node.syntax(), node.expr().unwrap().syntax());
1394 } 1406 }
1395 Some(_) | None => { 1407 Some(_) | None => {
1396 rewriter.replace_ast(&path, &make::expr_prefix(T![*], path.clone())); 1408 let p = &make::expr_prefix(T![*], usage.clone()).clone_for_update();
1409 ted::replace(usage.syntax(), p.syntax())
1397 } 1410 }
1398 }; 1411 }
1399 } 1412 }
1400 } 1413 }
1401 1414
1402 rewriter.rewrite(syntax) 1415 res
1403} 1416}
1404 1417
1405fn update_external_control_flow(handler: &FlowHandler, syntax: &SyntaxNode) -> SyntaxNode { 1418fn update_external_control_flow(handler: &FlowHandler, syntax: &SyntaxNode) -> SyntaxNode {
diff --git a/crates/ide_assists/src/handlers/generate_from_impl_for_enum.rs b/crates/ide_assists/src/handlers/generate_from_impl_for_enum.rs
index c13c6eebe..ce6998d82 100644
--- a/crates/ide_assists/src/handlers/generate_from_impl_for_enum.rs
+++ b/crates/ide_assists/src/handlers/generate_from_impl_for_enum.rs
@@ -91,7 +91,7 @@ fn existing_from_impl(
91 91
92 let enum_type = enum_.ty(sema.db); 92 let enum_type = enum_.ty(sema.db);
93 93
94 let wrapped_type = variant.fields(sema.db).get(0)?.signature_ty(sema.db); 94 let wrapped_type = variant.fields(sema.db).get(0)?.ty(sema.db);
95 95
96 if enum_type.impls_trait(sema.db, from_trait, &[wrapped_type]) { 96 if enum_type.impls_trait(sema.db, from_trait, &[wrapped_type]) {
97 Some(()) 97 Some(())
diff --git a/crates/ide_assists/src/handlers/reorder_impl.rs b/crates/ide_assists/src/handlers/reorder_impl.rs
index 72d889248..54a9a468e 100644
--- a/crates/ide_assists/src/handlers/reorder_impl.rs
+++ b/crates/ide_assists/src/handlers/reorder_impl.rs
@@ -79,9 +79,12 @@ pub(crate) fn reorder_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
79 "Sort methods", 79 "Sort methods",
80 target, 80 target,
81 |builder| { 81 |builder| {
82 methods.into_iter().zip(sorted).for_each(|(old, new)| { 82 let methods =
83 ted::replace(builder.make_ast_mut(old).syntax(), new.clone_for_update().syntax()) 83 methods.into_iter().map(|fn_| builder.make_ast_mut(fn_)).collect::<Vec<_>>();
84 }); 84 methods
85 .into_iter()
86 .zip(sorted)
87 .for_each(|(old, new)| ted::replace(old.syntax(), new.clone_for_update().syntax()));
85 }, 88 },
86 ) 89 )
87} 90}
@@ -160,7 +163,7 @@ $0impl Bar for Foo {}
160 } 163 }
161 164
162 #[test] 165 #[test]
163 fn reorder_impl_trait_methods() { 166 fn reorder_impl_trait_functions() {
164 check_assist( 167 check_assist(
165 reorder_impl, 168 reorder_impl,
166 r#" 169 r#"
@@ -197,4 +200,33 @@ impl Bar for Foo {
197 "#, 200 "#,
198 ) 201 )
199 } 202 }
203
204 #[test]
205 fn reorder_impl_trait_methods_uneven_ident_lengths() {
206 check_assist(
207 reorder_impl,
208 r#"
209trait Bar {
210 fn foo(&mut self) {}
211 fn fooo(&mut self) {}
212}
213
214struct Foo;
215impl Bar for Foo {
216 fn fooo(&mut self) {}
217 fn foo(&mut self) {$0}
218}"#,
219 r#"
220trait Bar {
221 fn foo(&mut self) {}
222 fn fooo(&mut self) {}
223}
224
225struct Foo;
226impl Bar for Foo {
227 fn foo(&mut self) {}
228 fn fooo(&mut self) {}
229}"#,
230 )
231 }
200} 232}
diff --git a/crates/ide_assists/src/lib.rs b/crates/ide_assists/src/lib.rs
index 88ae5c9a9..2e0c58504 100644
--- a/crates/ide_assists/src/lib.rs
+++ b/crates/ide_assists/src/lib.rs
@@ -17,6 +17,8 @@ mod tests;
17pub mod utils; 17pub mod utils;
18pub mod ast_transform; 18pub mod ast_transform;
19 19
20use std::str::FromStr;
21
20use hir::Semantics; 22use hir::Semantics;
21use ide_db::base_db::FileRange; 23use ide_db::base_db::FileRange;
22use ide_db::{label::Label, source_change::SourceChange, RootDatabase}; 24use ide_db::{label::Label, source_change::SourceChange, RootDatabase};
@@ -56,6 +58,35 @@ impl AssistKind {
56 _ => return false, 58 _ => return false,
57 } 59 }
58 } 60 }
61
62 pub fn name(&self) -> &str {
63 match self {
64 AssistKind::None => "None",
65 AssistKind::QuickFix => "QuickFix",
66 AssistKind::Generate => "Generate",
67 AssistKind::Refactor => "Refactor",
68 AssistKind::RefactorExtract => "RefactorExtract",
69 AssistKind::RefactorInline => "RefactorInline",
70 AssistKind::RefactorRewrite => "RefactorRewrite",
71 }
72 }
73}
74
75impl FromStr for AssistKind {
76 type Err = String;
77
78 fn from_str(s: &str) -> Result<Self, Self::Err> {
79 match s {
80 "None" => Ok(AssistKind::None),
81 "QuickFix" => Ok(AssistKind::QuickFix),
82 "Generate" => Ok(AssistKind::Generate),
83 "Refactor" => Ok(AssistKind::Refactor),
84 "RefactorExtract" => Ok(AssistKind::RefactorExtract),
85 "RefactorInline" => Ok(AssistKind::RefactorInline),
86 "RefactorRewrite" => Ok(AssistKind::RefactorRewrite),
87 unknown => Err(format!("Unknown AssistKind: '{}'", unknown)),
88 }
89 }
59} 90}
60 91
61/// Unique identifier of the assist, should not be shown to the user 92/// Unique identifier of the assist, should not be shown to the user
@@ -63,6 +94,41 @@ impl AssistKind {
63#[derive(Debug, Clone, Copy, PartialEq, Eq)] 94#[derive(Debug, Clone, Copy, PartialEq, Eq)]
64pub struct AssistId(pub &'static str, pub AssistKind); 95pub struct AssistId(pub &'static str, pub AssistKind);
65 96
97/// A way to control how many asssist to resolve during the assist resolution.
98/// When an assist is resolved, its edits are calculated that might be costly to always do by default.
99#[derive(Debug)]
100pub enum AssistResolveStrategy {
101 /// No assists should be resolved.
102 None,
103 /// All assists should be resolved.
104 All,
105 /// Only a certain assist should be resolved.
106 Single(SingleResolve),
107}
108
109/// Hold the [`AssistId`] data of a certain assist to resolve.
110/// The original id object cannot be used due to a `'static` lifetime
111/// and the requirement to construct this struct dynamically during the resolve handling.
112#[derive(Debug)]
113pub struct SingleResolve {
114 /// The id of the assist.
115 pub assist_id: String,
116 // The kind of the assist.
117 pub assist_kind: AssistKind,
118}
119
120impl AssistResolveStrategy {
121 pub fn should_resolve(&self, id: &AssistId) -> bool {
122 match self {
123 AssistResolveStrategy::None => false,
124 AssistResolveStrategy::All => true,
125 AssistResolveStrategy::Single(single_resolve) => {
126 single_resolve.assist_id == id.0 && single_resolve.assist_kind == id.1
127 }
128 }
129 }
130}
131
66#[derive(Clone, Debug)] 132#[derive(Clone, Debug)]
67pub struct GroupLabel(pub String); 133pub struct GroupLabel(pub String);
68 134
@@ -91,7 +157,7 @@ impl Assist {
91 pub fn get( 157 pub fn get(
92 db: &RootDatabase, 158 db: &RootDatabase,
93 config: &AssistConfig, 159 config: &AssistConfig,
94 resolve: bool, 160 resolve: AssistResolveStrategy,
95 range: FileRange, 161 range: FileRange,
96 ) -> Vec<Assist> { 162 ) -> Vec<Assist> {
97 let sema = Semantics::new(db); 163 let sema = Semantics::new(db);
diff --git a/crates/ide_assists/src/tests.rs b/crates/ide_assists/src/tests.rs
index 6f4f97361..9c2847998 100644
--- a/crates/ide_assists/src/tests.rs
+++ b/crates/ide_assists/src/tests.rs
@@ -12,7 +12,10 @@ use stdx::{format_to, trim_indent};
12use syntax::TextRange; 12use syntax::TextRange;
13use test_utils::{assert_eq_text, extract_offset}; 13use test_utils::{assert_eq_text, extract_offset};
14 14
15use crate::{handlers::Handler, Assist, AssistConfig, AssistContext, AssistKind, Assists}; 15use crate::{
16 handlers::Handler, Assist, AssistConfig, AssistContext, AssistKind, AssistResolveStrategy,
17 Assists, SingleResolve,
18};
16 19
17pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig { 20pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
18 snippet_cap: SnippetCap::new(true), 21 snippet_cap: SnippetCap::new(true),
@@ -65,14 +68,14 @@ fn check_doc_test(assist_id: &str, before: &str, after: &str) {
65 let before = db.file_text(file_id).to_string(); 68 let before = db.file_text(file_id).to_string();
66 let frange = FileRange { file_id, range: selection.into() }; 69 let frange = FileRange { file_id, range: selection.into() };
67 70
68 let assist = Assist::get(&db, &TEST_CONFIG, true, frange) 71 let assist = Assist::get(&db, &TEST_CONFIG, AssistResolveStrategy::All, frange)
69 .into_iter() 72 .into_iter()
70 .find(|assist| assist.id.0 == assist_id) 73 .find(|assist| assist.id.0 == assist_id)
71 .unwrap_or_else(|| { 74 .unwrap_or_else(|| {
72 panic!( 75 panic!(
73 "\n\nAssist is not applicable: {}\nAvailable assists: {}", 76 "\n\nAssist is not applicable: {}\nAvailable assists: {}",
74 assist_id, 77 assist_id,
75 Assist::get(&db, &TEST_CONFIG, false, frange) 78 Assist::get(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange)
76 .into_iter() 79 .into_iter()
77 .map(|assist| assist.id.0) 80 .map(|assist| assist.id.0)
78 .collect::<Vec<_>>() 81 .collect::<Vec<_>>()
@@ -108,7 +111,7 @@ fn check(handler: Handler, before: &str, expected: ExpectedResult, assist_label:
108 let sema = Semantics::new(&db); 111 let sema = Semantics::new(&db);
109 let config = TEST_CONFIG; 112 let config = TEST_CONFIG;
110 let ctx = AssistContext::new(sema, &config, frange); 113 let ctx = AssistContext::new(sema, &config, frange);
111 let mut acc = Assists::new(&ctx, true); 114 let mut acc = Assists::new(&ctx, AssistResolveStrategy::All);
112 handler(&mut acc, &ctx); 115 handler(&mut acc, &ctx);
113 let mut res = acc.finish(); 116 let mut res = acc.finish();
114 117
@@ -186,7 +189,7 @@ fn assist_order_field_struct() {
186 let (before_cursor_pos, before) = extract_offset(before); 189 let (before_cursor_pos, before) = extract_offset(before);
187 let (db, file_id) = with_single_file(&before); 190 let (db, file_id) = with_single_file(&before);
188 let frange = FileRange { file_id, range: TextRange::empty(before_cursor_pos) }; 191 let frange = FileRange { file_id, range: TextRange::empty(before_cursor_pos) };
189 let assists = Assist::get(&db, &TEST_CONFIG, false, frange); 192 let assists = Assist::get(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange);
190 let mut assists = assists.iter(); 193 let mut assists = assists.iter();
191 194
192 assert_eq!(assists.next().expect("expected assist").label, "Change visibility to pub(crate)"); 195 assert_eq!(assists.next().expect("expected assist").label, "Change visibility to pub(crate)");
@@ -211,7 +214,7 @@ pub fn test_some_range(a: int) -> bool {
211"#, 214"#,
212 ); 215 );
213 216
214 let assists = Assist::get(&db, &TEST_CONFIG, false, frange); 217 let assists = Assist::get(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange);
215 let expected = labels(&assists); 218 let expected = labels(&assists);
216 219
217 expect![[r#" 220 expect![[r#"
@@ -240,7 +243,7 @@ pub fn test_some_range(a: int) -> bool {
240 let mut cfg = TEST_CONFIG; 243 let mut cfg = TEST_CONFIG;
241 cfg.allowed = Some(vec![AssistKind::Refactor]); 244 cfg.allowed = Some(vec![AssistKind::Refactor]);
242 245
243 let assists = Assist::get(&db, &cfg, false, frange); 246 let assists = Assist::get(&db, &cfg, AssistResolveStrategy::None, frange);
244 let expected = labels(&assists); 247 let expected = labels(&assists);
245 248
246 expect![[r#" 249 expect![[r#"
@@ -255,7 +258,7 @@ pub fn test_some_range(a: int) -> bool {
255 { 258 {
256 let mut cfg = TEST_CONFIG; 259 let mut cfg = TEST_CONFIG;
257 cfg.allowed = Some(vec![AssistKind::RefactorExtract]); 260 cfg.allowed = Some(vec![AssistKind::RefactorExtract]);
258 let assists = Assist::get(&db, &cfg, false, frange); 261 let assists = Assist::get(&db, &cfg, AssistResolveStrategy::None, frange);
259 let expected = labels(&assists); 262 let expected = labels(&assists);
260 263
261 expect![[r#" 264 expect![[r#"
@@ -268,9 +271,250 @@ pub fn test_some_range(a: int) -> bool {
268 { 271 {
269 let mut cfg = TEST_CONFIG; 272 let mut cfg = TEST_CONFIG;
270 cfg.allowed = Some(vec![AssistKind::QuickFix]); 273 cfg.allowed = Some(vec![AssistKind::QuickFix]);
271 let assists = Assist::get(&db, &cfg, false, frange); 274 let assists = Assist::get(&db, &cfg, AssistResolveStrategy::None, frange);
272 let expected = labels(&assists); 275 let expected = labels(&assists);
273 276
274 expect![[r#""#]].assert_eq(&expected); 277 expect![[r#""#]].assert_eq(&expected);
275 } 278 }
276} 279}
280
281#[test]
282fn various_resolve_strategies() {
283 let (db, frange) = RootDatabase::with_range(
284 r#"
285pub fn test_some_range(a: int) -> bool {
286 if let 2..6 = $05$0 {
287 true
288 } else {
289 false
290 }
291}
292"#,
293 );
294
295 let mut cfg = TEST_CONFIG;
296 cfg.allowed = Some(vec![AssistKind::RefactorExtract]);
297
298 {
299 let assists = Assist::get(&db, &cfg, AssistResolveStrategy::None, frange);
300 assert_eq!(2, assists.len());
301 let mut assists = assists.into_iter();
302
303 let extract_into_variable_assist = assists.next().unwrap();
304 expect![[r#"
305 Assist {
306 id: AssistId(
307 "extract_variable",
308 RefactorExtract,
309 ),
310 label: "Extract into variable",
311 group: None,
312 target: 59..60,
313 source_change: None,
314 }
315 "#]]
316 .assert_debug_eq(&extract_into_variable_assist);
317
318 let extract_into_function_assist = assists.next().unwrap();
319 expect![[r#"
320 Assist {
321 id: AssistId(
322 "extract_function",
323 RefactorExtract,
324 ),
325 label: "Extract into function",
326 group: None,
327 target: 59..60,
328 source_change: None,
329 }
330 "#]]
331 .assert_debug_eq(&extract_into_function_assist);
332 }
333
334 {
335 let assists = Assist::get(
336 &db,
337 &cfg,
338 AssistResolveStrategy::Single(SingleResolve {
339 assist_id: "SOMETHING_MISMATCHING".to_string(),
340 assist_kind: AssistKind::RefactorExtract,
341 }),
342 frange,
343 );
344 assert_eq!(2, assists.len());
345 let mut assists = assists.into_iter();
346
347 let extract_into_variable_assist = assists.next().unwrap();
348 expect![[r#"
349 Assist {
350 id: AssistId(
351 "extract_variable",
352 RefactorExtract,
353 ),
354 label: "Extract into variable",
355 group: None,
356 target: 59..60,
357 source_change: None,
358 }
359 "#]]
360 .assert_debug_eq(&extract_into_variable_assist);
361
362 let extract_into_function_assist = assists.next().unwrap();
363 expect![[r#"
364 Assist {
365 id: AssistId(
366 "extract_function",
367 RefactorExtract,
368 ),
369 label: "Extract into function",
370 group: None,
371 target: 59..60,
372 source_change: None,
373 }
374 "#]]
375 .assert_debug_eq(&extract_into_function_assist);
376 }
377
378 {
379 let assists = Assist::get(
380 &db,
381 &cfg,
382 AssistResolveStrategy::Single(SingleResolve {
383 assist_id: "extract_variable".to_string(),
384 assist_kind: AssistKind::RefactorExtract,
385 }),
386 frange,
387 );
388 assert_eq!(2, assists.len());
389 let mut assists = assists.into_iter();
390
391 let extract_into_variable_assist = assists.next().unwrap();
392 expect![[r#"
393 Assist {
394 id: AssistId(
395 "extract_variable",
396 RefactorExtract,
397 ),
398 label: "Extract into variable",
399 group: None,
400 target: 59..60,
401 source_change: Some(
402 SourceChange {
403 source_file_edits: {
404 FileId(
405 0,
406 ): TextEdit {
407 indels: [
408 Indel {
409 insert: "let $0var_name = 5;\n ",
410 delete: 45..45,
411 },
412 Indel {
413 insert: "var_name",
414 delete: 59..60,
415 },
416 ],
417 },
418 },
419 file_system_edits: [],
420 is_snippet: true,
421 },
422 ),
423 }
424 "#]]
425 .assert_debug_eq(&extract_into_variable_assist);
426
427 let extract_into_function_assist = assists.next().unwrap();
428 expect![[r#"
429 Assist {
430 id: AssistId(
431 "extract_function",
432 RefactorExtract,
433 ),
434 label: "Extract into function",
435 group: None,
436 target: 59..60,
437 source_change: None,
438 }
439 "#]]
440 .assert_debug_eq(&extract_into_function_assist);
441 }
442
443 {
444 let assists = Assist::get(&db, &cfg, AssistResolveStrategy::All, frange);
445 assert_eq!(2, assists.len());
446 let mut assists = assists.into_iter();
447
448 let extract_into_variable_assist = assists.next().unwrap();
449 expect![[r#"
450 Assist {
451 id: AssistId(
452 "extract_variable",
453 RefactorExtract,
454 ),
455 label: "Extract into variable",
456 group: None,
457 target: 59..60,
458 source_change: Some(
459 SourceChange {
460 source_file_edits: {
461 FileId(
462 0,
463 ): TextEdit {
464 indels: [
465 Indel {
466 insert: "let $0var_name = 5;\n ",
467 delete: 45..45,
468 },
469 Indel {
470 insert: "var_name",
471 delete: 59..60,
472 },
473 ],
474 },
475 },
476 file_system_edits: [],
477 is_snippet: true,
478 },
479 ),
480 }
481 "#]]
482 .assert_debug_eq(&extract_into_variable_assist);
483
484 let extract_into_function_assist = assists.next().unwrap();
485 expect![[r#"
486 Assist {
487 id: AssistId(
488 "extract_function",
489 RefactorExtract,
490 ),
491 label: "Extract into function",
492 group: None,
493 target: 59..60,
494 source_change: Some(
495 SourceChange {
496 source_file_edits: {
497 FileId(
498 0,
499 ): TextEdit {
500 indels: [
501 Indel {
502 insert: "fun_name()",
503 delete: 59..60,
504 },
505 Indel {
506 insert: "\n\nfn $0fun_name() -> i32 {\n 5\n}",
507 delete: 110..110,
508 },
509 ],
510 },
511 },
512 file_system_edits: [],
513 is_snippet: true,
514 },
515 ),
516 }
517 "#]]
518 .assert_debug_eq(&extract_into_function_assist);
519 }
520}
diff --git a/crates/ide_assists/src/utils/suggest_name.rs b/crates/ide_assists/src/utils/suggest_name.rs
index 533624c1f..deafcd630 100644
--- a/crates/ide_assists/src/utils/suggest_name.rs
+++ b/crates/ide_assists/src/utils/suggest_name.rs
@@ -227,7 +227,7 @@ fn name_of_type(ty: &hir::Type, db: &RootDatabase) -> Option<String> {
227 let name = adt.name(db).to_string(); 227 let name = adt.name(db).to_string();
228 228
229 if WRAPPER_TYPES.contains(&name.as_str()) { 229 if WRAPPER_TYPES.contains(&name.as_str()) {
230 let inner_ty = ty.type_parameters().next()?; 230 let inner_ty = ty.type_arguments().next()?;
231 return name_of_type(&inner_ty, db); 231 return name_of_type(&inner_ty, db);
232 } 232 }
233 233
diff --git a/crates/ide_completion/src/completions.rs b/crates/ide_completion/src/completions.rs
index e2994eed4..78154bf3e 100644
--- a/crates/ide_completion/src/completions.rs
+++ b/crates/ide_completion/src/completions.rs
@@ -203,41 +203,37 @@ impl Completions {
203fn complete_enum_variants( 203fn complete_enum_variants(
204 acc: &mut Completions, 204 acc: &mut Completions,
205 ctx: &CompletionContext, 205 ctx: &CompletionContext,
206 ty: &hir::Type, 206 enum_data: hir::Enum,
207 cb: impl Fn(&mut Completions, &CompletionContext, hir::Variant, hir::ModPath), 207 cb: impl Fn(&mut Completions, &CompletionContext, hir::Variant, hir::ModPath),
208) { 208) {
209 if let Some(hir::Adt::Enum(enum_data)) = 209 let variants = enum_data.variants(ctx.db);
210 iter::successors(Some(ty.clone()), |ty| ty.remove_ref()).last().and_then(|ty| ty.as_adt()) 210
211 { 211 let module = if let Some(module) = ctx.scope.module() {
212 let variants = enum_data.variants(ctx.db); 212 // Compute path from the completion site if available.
213 213 module
214 let module = if let Some(module) = ctx.scope.module() { 214 } else {
215 // Compute path from the completion site if available. 215 // Otherwise fall back to the enum's definition site.
216 module 216 enum_data.module(ctx.db)
217 } else { 217 };
218 // Otherwise fall back to the enum's definition site. 218
219 enum_data.module(ctx.db) 219 if let Some(impl_) = ctx.impl_def.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) {
220 }; 220 if impl_.self_ty(ctx.db).as_adt() == Some(hir::Adt::Enum(enum_data)) {
221 221 for &variant in &variants {
222 if let Some(impl_) = ctx.impl_def.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) { 222 let self_path = hir::ModPath::from_segments(
223 if impl_.self_ty(ctx.db) == *ty { 223 hir::PathKind::Plain,
224 for &variant in &variants { 224 iter::once(known::SELF_TYPE).chain(iter::once(variant.name(ctx.db))),
225 let self_path = hir::ModPath::from_segments( 225 );
226 hir::PathKind::Plain, 226 cb(acc, ctx, variant, self_path);
227 iter::once(known::SELF_TYPE).chain(iter::once(variant.name(ctx.db))),
228 );
229 cb(acc, ctx, variant, self_path);
230 }
231 } 227 }
232 } 228 }
229 }
233 230
234 for variant in variants { 231 for variant in variants {
235 if let Some(path) = module.find_use_path(ctx.db, hir::ModuleDef::from(variant)) { 232 if let Some(path) = module.find_use_path(ctx.db, hir::ModuleDef::from(variant)) {
236 // Variants with trivial paths are already added by the existing completion logic, 233 // Variants with trivial paths are already added by the existing completion logic,
237 // so we should avoid adding these twice 234 // so we should avoid adding these twice
238 if path.segments().len() > 1 { 235 if path.segments().len() > 1 {
239 cb(acc, ctx, variant, path); 236 cb(acc, ctx, variant, path);
240 }
241 } 237 }
242 } 238 }
243 } 239 }
diff --git a/crates/ide_completion/src/completions/pattern.rs b/crates/ide_completion/src/completions/pattern.rs
index 808d7ff7e..8dc9ab73c 100644
--- a/crates/ide_completion/src/completions/pattern.rs
+++ b/crates/ide_completion/src/completions/pattern.rs
@@ -12,8 +12,10 @@ pub(crate) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) {
12 } 12 }
13 13
14 if !ctx.is_irrefutable_pat_binding { 14 if !ctx.is_irrefutable_pat_binding {
15 if let Some(ty) = ctx.expected_type.as_ref() { 15 if let Some(hir::Adt::Enum(e)) =
16 super::complete_enum_variants(acc, ctx, ty, |acc, ctx, variant, path| { 16 ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt())
17 {
18 super::complete_enum_variants(acc, ctx, e, |acc, ctx, variant, path| {
17 acc.add_qualified_variant_pat(ctx, variant, path.clone()); 19 acc.add_qualified_variant_pat(ctx, variant, path.clone());
18 acc.add_qualified_enum_variant(ctx, variant, path); 20 acc.add_qualified_enum_variant(ctx, variant, path);
19 }); 21 });
diff --git a/crates/ide_completion/src/completions/postfix.rs b/crates/ide_completion/src/completions/postfix.rs
index ac69b720a..962aaf0df 100644
--- a/crates/ide_completion/src/completions/postfix.rs
+++ b/crates/ide_completion/src/completions/postfix.rs
@@ -35,14 +35,11 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
35 None => return, 35 None => return,
36 }; 36 };
37 37
38 let ref_removed_ty =
39 std::iter::successors(Some(receiver_ty.clone()), |ty| ty.remove_ref()).last().unwrap();
40
41 let cap = match ctx.config.snippet_cap { 38 let cap = match ctx.config.snippet_cap {
42 Some(it) => it, 39 Some(it) => it,
43 None => return, 40 None => return,
44 }; 41 };
45 let try_enum = TryEnum::from_ty(&ctx.sema, &ref_removed_ty); 42 let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty.strip_references());
46 if let Some(try_enum) = &try_enum { 43 if let Some(try_enum) = &try_enum {
47 match try_enum { 44 match try_enum {
48 TryEnum::Result => { 45 TryEnum::Result => {
diff --git a/crates/ide_completion/src/completions/postfix/format_like.rs b/crates/ide_completion/src/completions/postfix/format_like.rs
index e86ffa8f8..0dcb3e898 100644
--- a/crates/ide_completion/src/completions/postfix/format_like.rs
+++ b/crates/ide_completion/src/completions/postfix/format_like.rs
@@ -1,4 +1,4 @@
1// Feature: Format String Completion. 1// Feature: Format String Completion
2// 2//
3// `"Result {result} is {2 + 2}"` is expanded to the `"Result {} is {}", result, 2 + 2`. 3// `"Result {result} is {2 + 2}"` is expanded to the `"Result {} is {}", result, 2 + 2`.
4// 4//
diff --git a/crates/ide_completion/src/completions/qualified_path.rs b/crates/ide_completion/src/completions/qualified_path.rs
index 969249df6..eedb44873 100644
--- a/crates/ide_completion/src/completions/qualified_path.rs
+++ b/crates/ide_completion/src/completions/qualified_path.rs
@@ -52,18 +52,24 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
52 | PathResolution::Def(def @ hir::ModuleDef::TypeAlias(_)) 52 | PathResolution::Def(def @ hir::ModuleDef::TypeAlias(_))
53 | PathResolution::Def(def @ hir::ModuleDef::BuiltinType(_)) => { 53 | PathResolution::Def(def @ hir::ModuleDef::BuiltinType(_)) => {
54 if let hir::ModuleDef::Adt(Adt::Enum(e)) = def { 54 if let hir::ModuleDef::Adt(Adt::Enum(e)) = def {
55 for variant in e.variants(ctx.db) { 55 add_enum_variants(ctx, acc, e);
56 acc.add_enum_variant(ctx, variant, None);
57 }
58 } 56 }
59 let ty = match def { 57 let ty = match def {
60 hir::ModuleDef::Adt(adt) => adt.ty(ctx.db), 58 hir::ModuleDef::Adt(adt) => adt.ty(ctx.db),
61 hir::ModuleDef::TypeAlias(a) => a.ty(ctx.db), 59 hir::ModuleDef::TypeAlias(a) => {
60 let ty = a.ty(ctx.db);
61 if let Some(Adt::Enum(e)) = ty.as_adt() {
62 cov_mark::hit!(completes_variant_through_alias);
63 add_enum_variants(ctx, acc, e);
64 }
65 ty
66 }
62 hir::ModuleDef::BuiltinType(builtin) => { 67 hir::ModuleDef::BuiltinType(builtin) => {
63 let module = match ctx.scope.module() { 68 let module = match ctx.scope.module() {
64 Some(it) => it, 69 Some(it) => it,
65 None => return, 70 None => return,
66 }; 71 };
72 cov_mark::hit!(completes_primitive_assoc_const);
67 builtin.ty(ctx.db, module) 73 builtin.ty(ctx.db, module)
68 } 74 }
69 _ => unreachable!(), 75 _ => unreachable!(),
@@ -92,9 +98,8 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
92 if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { 98 if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) {
93 return None; 99 return None;
94 } 100 }
95 match item { 101 if let hir::AssocItem::TypeAlias(ty) = item {
96 hir::AssocItem::Function(_) | hir::AssocItem::Const(_) => {} 102 acc.add_type_alias(ctx, ty)
97 hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty),
98 } 103 }
99 None::<()> 104 None::<()>
100 }); 105 });
@@ -122,9 +127,7 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
122 }; 127 };
123 128
124 if let Some(Adt::Enum(e)) = ty.as_adt() { 129 if let Some(Adt::Enum(e)) = ty.as_adt() {
125 for variant in e.variants(ctx.db) { 130 add_enum_variants(ctx, acc, e);
126 acc.add_enum_variant(ctx, variant, None);
127 }
128 } 131 }
129 132
130 let traits_in_scope = ctx.scope.traits_in_scope(); 133 let traits_in_scope = ctx.scope.traits_in_scope();
@@ -151,6 +154,12 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
151 } 154 }
152} 155}
153 156
157fn add_enum_variants(ctx: &CompletionContext, acc: &mut Completions, e: hir::Enum) {
158 for variant in e.variants(ctx.db) {
159 acc.add_enum_variant(ctx, variant, None);
160 }
161}
162
154#[cfg(test)] 163#[cfg(test)]
155mod tests { 164mod tests {
156 use expect_test::{expect, Expect}; 165 use expect_test::{expect, Expect};
@@ -737,29 +746,7 @@ fn f() {}
737 } 746 }
738 747
739 #[test] 748 #[test]
740 fn completes_function() { 749 fn completes_variant_through_self() {
741 check(
742 r#"
743fn foo(
744 a: i32,
745 b: i32
746) {
747
748}
749
750fn main() {
751 fo$0
752}
753"#,
754 expect![[r#"
755 fn main() fn()
756 fn foo(…) fn(i32, i32)
757 "#]],
758 );
759 }
760
761 #[test]
762 fn completes_self_enum() {
763 check( 750 check(
764 r#" 751 r#"
765enum Foo { 752enum Foo {
@@ -783,6 +770,7 @@ impl Foo {
783 770
784 #[test] 771 #[test]
785 fn completes_primitive_assoc_const() { 772 fn completes_primitive_assoc_const() {
773 cov_mark::check!(completes_primitive_assoc_const);
786 check( 774 check(
787 r#" 775 r#"
788//- /lib.rs crate:lib deps:core 776//- /lib.rs crate:lib deps:core
@@ -804,4 +792,23 @@ impl u8 {
804 "#]], 792 "#]],
805 ); 793 );
806 } 794 }
795
796 #[test]
797 fn completes_variant_through_alias() {
798 cov_mark::check!(completes_variant_through_alias);
799 check(
800 r#"
801enum Foo {
802 Bar
803}
804type Foo2 = Foo;
805fn main() {
806 Foo2::$0
807}
808"#,
809 expect![[r#"
810 ev Bar ()
811 "#]],
812 );
813 }
807} 814}
diff --git a/crates/ide_completion/src/completions/unqualified_path.rs b/crates/ide_completion/src/completions/unqualified_path.rs
index 1b8b063e7..7875500c1 100644
--- a/crates/ide_completion/src/completions/unqualified_path.rs
+++ b/crates/ide_completion/src/completions/unqualified_path.rs
@@ -17,8 +17,10 @@ pub(crate) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionC
17 return; 17 return;
18 } 18 }
19 19
20 if let Some(ty) = &ctx.expected_type { 20 if let Some(hir::Adt::Enum(e)) =
21 super::complete_enum_variants(acc, ctx, ty, |acc, ctx, variant, path| { 21 ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt())
22 {
23 super::complete_enum_variants(acc, ctx, e, |acc, ctx, variant, path| {
22 acc.add_qualified_enum_variant(ctx, variant, path) 24 acc.add_qualified_enum_variant(ctx, variant, path)
23 }); 25 });
24 } 26 }
diff --git a/crates/ide_completion/src/context.rs b/crates/ide_completion/src/context.rs
index 32f81aec1..f3fcb712c 100644
--- a/crates/ide_completion/src/context.rs
+++ b/crates/ide_completion/src/context.rs
@@ -301,103 +301,108 @@ impl<'a> CompletionContext<'a> {
301 .find_map(ast::Impl::cast); 301 .find_map(ast::Impl::cast);
302 } 302 }
303 303
304 fn expected_type_and_name(&self) -> (Option<Type>, Option<NameOrNameRef>) {
305 let mut node = match self.token.parent() {
306 Some(it) => it,
307 None => return (None, None),
308 };
309 loop {
310 break match_ast! {
311 match node {
312 ast::LetStmt(it) => {
313 cov_mark::hit!(expected_type_let_with_leading_char);
314 cov_mark::hit!(expected_type_let_without_leading_char);
315 let ty = it.pat()
316 .and_then(|pat| self.sema.type_of_pat(&pat));
317 let name = if let Some(ast::Pat::IdentPat(ident)) = it.pat() {
318 ident.name().map(NameOrNameRef::Name)
319 } else {
320 None
321 };
322
323 (ty, name)
324 },
325 ast::ArgList(_it) => {
326 cov_mark::hit!(expected_type_fn_param_with_leading_char);
327 cov_mark::hit!(expected_type_fn_param_without_leading_char);
328 ActiveParameter::at_token(
329 &self.sema,
330 self.token.clone(),
331 ).map(|ap| {
332 let name = ap.ident().map(NameOrNameRef::Name);
333 (Some(ap.ty), name)
334 })
335 .unwrap_or((None, None))
336 },
337 ast::RecordExprFieldList(_it) => {
338 cov_mark::hit!(expected_type_struct_field_without_leading_char);
339 self.token.prev_sibling_or_token()
340 .and_then(|se| se.into_node())
341 .and_then(|node| ast::RecordExprField::cast(node))
342 .and_then(|rf| self.sema.resolve_record_field(&rf).zip(Some(rf)))
343 .map(|(f, rf)|(
344 Some(f.0.ty(self.db)),
345 rf.field_name().map(NameOrNameRef::NameRef),
346 ))
347 .unwrap_or((None, None))
348 },
349 ast::RecordExprField(it) => {
350 cov_mark::hit!(expected_type_struct_field_with_leading_char);
351 self.sema
352 .resolve_record_field(&it)
353 .map(|f|(
354 Some(f.0.ty(self.db)),
355 it.field_name().map(NameOrNameRef::NameRef),
356 ))
357 .unwrap_or((None, None))
358 },
359 ast::MatchExpr(it) => {
360 cov_mark::hit!(expected_type_match_arm_without_leading_char);
361 let ty = it.expr()
362 .and_then(|e| self.sema.type_of_expr(&e));
363 (ty, None)
364 },
365 ast::IfExpr(it) => {
366 cov_mark::hit!(expected_type_if_let_without_leading_char);
367 let ty = it.condition()
368 .and_then(|cond| cond.expr())
369 .and_then(|e| self.sema.type_of_expr(&e));
370 (ty, None)
371 },
372 ast::IdentPat(it) => {
373 cov_mark::hit!(expected_type_if_let_with_leading_char);
374 cov_mark::hit!(expected_type_match_arm_with_leading_char);
375 let ty = self.sema.type_of_pat(&ast::Pat::from(it));
376 (ty, None)
377 },
378 ast::Fn(it) => {
379 cov_mark::hit!(expected_type_fn_ret_with_leading_char);
380 cov_mark::hit!(expected_type_fn_ret_without_leading_char);
381 let def = self.sema.to_def(&it);
382 (def.map(|def| def.ret_type(self.db)), None)
383 },
384 ast::Stmt(it) => (None, None),
385 _ => {
386 match node.parent() {
387 Some(n) => {
388 node = n;
389 continue;
390 },
391 None => (None, None),
392 }
393 },
394 }
395 };
396 }
397 }
398
304 fn fill( 399 fn fill(
305 &mut self, 400 &mut self,
306 original_file: &SyntaxNode, 401 original_file: &SyntaxNode,
307 file_with_fake_ident: SyntaxNode, 402 file_with_fake_ident: SyntaxNode,
308 offset: TextSize, 403 offset: TextSize,
309 ) { 404 ) {
310 let (expected_type, expected_name) = { 405 let (expected_type, expected_name) = self.expected_type_and_name();
311 let mut node = match self.token.parent() {
312 Some(it) => it,
313 None => return,
314 };
315 loop {
316 break match_ast! {
317 match node {
318 ast::LetStmt(it) => {
319 cov_mark::hit!(expected_type_let_with_leading_char);
320 cov_mark::hit!(expected_type_let_without_leading_char);
321 let ty = it.pat()
322 .and_then(|pat| self.sema.type_of_pat(&pat));
323 let name = if let Some(ast::Pat::IdentPat(ident)) = it.pat() {
324 ident.name().map(NameOrNameRef::Name)
325 } else {
326 None
327 };
328
329 (ty, name)
330 },
331 ast::ArgList(_it) => {
332 cov_mark::hit!(expected_type_fn_param_with_leading_char);
333 cov_mark::hit!(expected_type_fn_param_without_leading_char);
334 ActiveParameter::at_token(
335 &self.sema,
336 self.token.clone(),
337 ).map(|ap| {
338 let name = ap.ident().map(NameOrNameRef::Name);
339 (Some(ap.ty), name)
340 })
341 .unwrap_or((None, None))
342 },
343 ast::RecordExprFieldList(_it) => {
344 cov_mark::hit!(expected_type_struct_field_without_leading_char);
345 self.token.prev_sibling_or_token()
346 .and_then(|se| se.into_node())
347 .and_then(|node| ast::RecordExprField::cast(node))
348 .and_then(|rf| self.sema.resolve_record_field(&rf).zip(Some(rf)))
349 .map(|(f, rf)|(
350 Some(f.0.signature_ty(self.db)),
351 rf.field_name().map(NameOrNameRef::NameRef),
352 ))
353 .unwrap_or((None, None))
354 },
355 ast::RecordExprField(it) => {
356 cov_mark::hit!(expected_type_struct_field_with_leading_char);
357 self.sema
358 .resolve_record_field(&it)
359 .map(|f|(
360 Some(f.0.signature_ty(self.db)),
361 it.field_name().map(NameOrNameRef::NameRef),
362 ))
363 .unwrap_or((None, None))
364 },
365 ast::MatchExpr(it) => {
366 cov_mark::hit!(expected_type_match_arm_without_leading_char);
367 let ty = it.expr()
368 .and_then(|e| self.sema.type_of_expr(&e));
369
370 (ty, None)
371 },
372 ast::IdentPat(it) => {
373 cov_mark::hit!(expected_type_if_let_with_leading_char);
374 cov_mark::hit!(expected_type_match_arm_with_leading_char);
375 let ty = self.sema.type_of_pat(&ast::Pat::from(it));
376
377 (ty, None)
378 },
379 ast::Fn(_it) => {
380 cov_mark::hit!(expected_type_fn_ret_with_leading_char);
381 cov_mark::hit!(expected_type_fn_ret_without_leading_char);
382 let ty = self.token.ancestors()
383 .find_map(|ancestor| ast::Expr::cast(ancestor))
384 .and_then(|expr| self.sema.type_of_expr(&expr));
385
386 (ty, None)
387 },
388 _ => {
389 match node.parent() {
390 Some(n) => {
391 node = n;
392 continue;
393 },
394 None => (None, None),
395 }
396 },
397 }
398 };
399 }
400 };
401 self.expected_type = expected_type; 406 self.expected_type = expected_type;
402 self.expected_name = expected_name; 407 self.expected_name = expected_name;
403 self.attribute_under_caret = find_node_at_offset(&file_with_fake_ident, offset); 408 self.attribute_under_caret = find_node_at_offset(&file_with_fake_ident, offset);
@@ -802,6 +807,7 @@ fn foo() {
802 807
803 #[test] 808 #[test]
804 fn expected_type_if_let_without_leading_char() { 809 fn expected_type_if_let_without_leading_char() {
810 cov_mark::check!(expected_type_if_let_without_leading_char);
805 check_expected_type_and_name( 811 check_expected_type_and_name(
806 r#" 812 r#"
807enum Foo { Bar, Baz, Quux } 813enum Foo { Bar, Baz, Quux }
@@ -811,8 +817,8 @@ fn foo() {
811 if let $0 = f { } 817 if let $0 = f { }
812} 818}
813"#, 819"#,
814 expect![[r#"ty: (), name: ?"#]], 820 expect![[r#"ty: Foo, name: ?"#]],
815 ) // FIXME should be `ty: u32, name: ?` 821 )
816 } 822 }
817 823
818 #[test] 824 #[test]
@@ -840,8 +846,8 @@ fn foo() -> u32 {
840 $0 846 $0
841} 847}
842"#, 848"#,
843 expect![[r#"ty: (), name: ?"#]], 849 expect![[r#"ty: u32, name: ?"#]],
844 ) // FIXME this should be `ty: u32, name: ?` 850 )
845 } 851 }
846 852
847 #[test] 853 #[test]
@@ -856,4 +862,16 @@ fn foo() -> u32 {
856 expect![[r#"ty: u32, name: ?"#]], 862 expect![[r#"ty: u32, name: ?"#]],
857 ) 863 )
858 } 864 }
865
866 #[test]
867 fn expected_type_fn_ret_fn_ref_fully_typed() {
868 check_expected_type_and_name(
869 r#"
870fn foo() -> u32 {
871 foo$0
872}
873"#,
874 expect![[r#"ty: u32, name: ?"#]],
875 )
876 }
859} 877}
diff --git a/crates/ide_completion/src/render/enum_variant.rs b/crates/ide_completion/src/render/enum_variant.rs
index 832f5ced1..0c0c71134 100644
--- a/crates/ide_completion/src/render/enum_variant.rs
+++ b/crates/ide_completion/src/render/enum_variant.rs
@@ -93,7 +93,7 @@ impl<'a> EnumRender<'a> {
93 .variant 93 .variant
94 .fields(self.ctx.db()) 94 .fields(self.ctx.db())
95 .into_iter() 95 .into_iter()
96 .map(|field| (field.name(self.ctx.db()), field.signature_ty(self.ctx.db()))); 96 .map(|field| (field.name(self.ctx.db()), field.ty(self.ctx.db())));
97 97
98 match self.variant_kind { 98 match self.variant_kind {
99 StructKind::Tuple | StructKind::Unit => format!( 99 StructKind::Tuple | StructKind::Unit => format!(
diff --git a/crates/proc_macro_api/Cargo.toml b/crates/proc_macro_api/Cargo.toml
index 1ba1e4abd..2ce5eeedd 100644
--- a/crates/proc_macro_api/Cargo.toml
+++ b/crates/proc_macro_api/Cargo.toml
@@ -15,7 +15,7 @@ serde_json = { version = "1.0", features = ["unbounded_depth"] }
15log = "0.4.8" 15log = "0.4.8"
16crossbeam-channel = "0.5.0" 16crossbeam-channel = "0.5.0"
17jod-thread = "0.1.1" 17jod-thread = "0.1.1"
18memmap = "0.7.0" 18memmap2 = "0.2.0"
19object = { version = "0.23.0", default-features = false, features = ["std", "read_core", "elf", "macho", "pe", "unaligned"] } 19object = { version = "0.23.0", default-features = false, features = ["std", "read_core", "elf", "macho", "pe", "unaligned"] }
20snap = "1.0" 20snap = "1.0"
21 21
diff --git a/crates/proc_macro_api/src/version.rs b/crates/proc_macro_api/src/version.rs
index dcf8fae8f..6dbac50b4 100644
--- a/crates/proc_macro_api/src/version.rs
+++ b/crates/proc_macro_api/src/version.rs
@@ -6,7 +6,7 @@ use std::{
6 path::Path, 6 path::Path,
7}; 7};
8 8
9use memmap::Mmap; 9use memmap2::Mmap;
10use object::read::{File as BinaryFile, Object, ObjectSection}; 10use object::read::{File as BinaryFile, Object, ObjectSection};
11use snap::read::FrameDecoder as SnapDecoder; 11use snap::read::FrameDecoder as SnapDecoder;
12 12
diff --git a/crates/rust-analyzer/src/benchmarks.rs b/crates/rust-analyzer/src/benchmarks.rs
deleted file mode 100644
index bdd94b1c4..000000000
--- a/crates/rust-analyzer/src/benchmarks.rs
+++ /dev/null
@@ -1,74 +0,0 @@
1//! Fully integrated benchmarks for rust-analyzer, which load real cargo
2//! projects.
3//!
4//! The benchmark here is used to debug specific performance regressions. If you
5//! notice that, eg, completion is slow in some specific case, you can modify
6//! code here exercise this specific completion, and thus have a fast
7//! edit/compile/test cycle.
8//!
9//! Note that "Rust Analyzer: Run" action does not allow running a single test
10//! in release mode in VS Code. There's however "Rust Analyzer: Copy Run Command Line"
11//! which you can use to paste the command in terminal and add `--release` manually.
12
13use std::sync::Arc;
14
15use ide::Change;
16use test_utils::project_root;
17use vfs::{AbsPathBuf, VfsPath};
18
19use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig};
20
21#[test]
22fn benchmark_integrated_highlighting() {
23 // Don't run slow benchmark by default
24 if true {
25 return;
26 }
27
28 // Load rust-analyzer itself.
29 let workspace_to_load = project_root();
30 let file = "./crates/ide_db/src/apply_change.rs";
31
32 let cargo_config = Default::default();
33 let load_cargo_config = LoadCargoConfig {
34 load_out_dirs_from_check: true,
35 wrap_rustc: false,
36 with_proc_macro: false,
37 };
38
39 let (mut host, vfs, _proc_macro) = {
40 let _it = stdx::timeit("workspace loading");
41 load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap()
42 };
43
44 let file_id = {
45 let file = workspace_to_load.join(file);
46 let path = VfsPath::from(AbsPathBuf::assert(file));
47 vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path))
48 };
49
50 {
51 let _it = stdx::timeit("initial");
52 let analysis = host.analysis();
53 analysis.highlight_as_html(file_id, false).unwrap();
54 }
55
56 profile::init_from("*>100");
57 // let _s = profile::heartbeat_span();
58
59 {
60 let _it = stdx::timeit("change");
61 let mut text = host.analysis().file_text(file_id).unwrap().to_string();
62 text.push_str("\npub fn _dummy() {}\n");
63 let mut change = Change::new();
64 change.change_file(file_id, Some(Arc::new(text)));
65 host.apply_change(change);
66 }
67
68 {
69 let _it = stdx::timeit("after change");
70 let _span = profile::cpu_span();
71 let analysis = host.analysis();
72 analysis.highlight_as_html(file_id, false).unwrap();
73 }
74}
diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs
index 74f784338..c33c8179c 100644
--- a/crates/rust-analyzer/src/cli/diagnostics.rs
+++ b/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -7,7 +7,7 @@ use anyhow::anyhow;
7use rustc_hash::FxHashSet; 7use rustc_hash::FxHashSet;
8 8
9use hir::{db::HirDatabase, Crate, Module}; 9use hir::{db::HirDatabase, Crate, Module};
10use ide::{DiagnosticsConfig, Severity}; 10use ide::{AssistResolveStrategy, DiagnosticsConfig, Severity};
11use ide_db::base_db::SourceDatabaseExt; 11use ide_db::base_db::SourceDatabaseExt;
12 12
13use crate::cli::{ 13use crate::cli::{
@@ -57,8 +57,9 @@ pub fn diagnostics(
57 let crate_name = 57 let crate_name =
58 module.krate().display_name(db).as_deref().unwrap_or("unknown").to_string(); 58 module.krate().display_name(db).as_deref().unwrap_or("unknown").to_string();
59 println!("processing crate: {}, module: {}", crate_name, _vfs.file_path(file_id)); 59 println!("processing crate: {}, module: {}", crate_name, _vfs.file_path(file_id));
60 for diagnostic in 60 for diagnostic in analysis
61 analysis.diagnostics(&DiagnosticsConfig::default(), false, file_id).unwrap() 61 .diagnostics(&DiagnosticsConfig::default(), AssistResolveStrategy::None, file_id)
62 .unwrap()
62 { 63 {
63 if matches!(diagnostic.severity, Severity::Error) { 64 if matches!(diagnostic.severity, Severity::Error) {
64 found_error = true; 65 found_error = true;
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index adeb7a97e..6f2f482c1 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -84,6 +84,7 @@ pub(crate) struct GlobalState {
84 pub(crate) workspace_build_data: Option<BuildDataResult>, 84 pub(crate) workspace_build_data: Option<BuildDataResult>,
85 pub(crate) fetch_build_data_queue: 85 pub(crate) fetch_build_data_queue:
86 OpQueue<BuildDataCollector, Option<anyhow::Result<BuildDataResult>>>, 86 OpQueue<BuildDataCollector, Option<anyhow::Result<BuildDataResult>>>,
87 pub(crate) prime_caches_queue: OpQueue<(), ()>,
87 88
88 latest_requests: Arc<RwLock<LatestRequests>>, 89 latest_requests: Arc<RwLock<LatestRequests>>,
89} 90}
@@ -146,6 +147,7 @@ impl GlobalState {
146 workspaces: Arc::new(Vec::new()), 147 workspaces: Arc::new(Vec::new()),
147 fetch_workspaces_queue: OpQueue::default(), 148 fetch_workspaces_queue: OpQueue::default(),
148 workspace_build_data: None, 149 workspace_build_data: None,
150 prime_caches_queue: OpQueue::default(),
149 151
150 fetch_build_data_queue: OpQueue::default(), 152 fetch_build_data_queue: OpQueue::default(),
151 latest_requests: Default::default(), 153 latest_requests: Default::default(),
diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs
index 1f59402e5..f6e40f872 100644
--- a/crates/rust-analyzer/src/handlers.rs
+++ b/crates/rust-analyzer/src/handlers.rs
@@ -8,8 +8,9 @@ use std::{
8}; 8};
9 9
10use ide::{ 10use ide::{
11 AnnotationConfig, FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, Query, 11 AnnotationConfig, AssistKind, AssistResolveStrategy, FileId, FilePosition, FileRange,
12 RangeInfo, Runnable, RunnableKind, SearchScope, SourceChange, TextEdit, 12 HoverAction, HoverGotoTypeData, Query, RangeInfo, Runnable, RunnableKind, SearchScope,
13 SingleResolve, SourceChange, TextEdit,
13}; 14};
14use ide_db::SymbolKind; 15use ide_db::SymbolKind;
15use itertools::Itertools; 16use itertools::Itertools;
@@ -27,7 +28,7 @@ use lsp_types::{
27use project_model::TargetKind; 28use project_model::TargetKind;
28use serde::{Deserialize, Serialize}; 29use serde::{Deserialize, Serialize};
29use serde_json::to_value; 30use serde_json::to_value;
30use stdx::{format_to, split_once}; 31use stdx::format_to;
31use syntax::{algo, ast, AstNode, TextRange, TextSize}; 32use syntax::{algo, ast, AstNode, TextRange, TextSize};
32 33
33use crate::{ 34use crate::{
@@ -1004,10 +1005,15 @@ pub(crate) fn handle_code_action(
1004 let mut res: Vec<lsp_ext::CodeAction> = Vec::new(); 1005 let mut res: Vec<lsp_ext::CodeAction> = Vec::new();
1005 1006
1006 let code_action_resolve_cap = snap.config.code_action_resolve(); 1007 let code_action_resolve_cap = snap.config.code_action_resolve();
1008 let resolve = if code_action_resolve_cap {
1009 AssistResolveStrategy::None
1010 } else {
1011 AssistResolveStrategy::All
1012 };
1007 let assists = snap.analysis.assists_with_fixes( 1013 let assists = snap.analysis.assists_with_fixes(
1008 &assists_config, 1014 &assists_config,
1009 &snap.config.diagnostics(), 1015 &snap.config.diagnostics(),
1010 !code_action_resolve_cap, 1016 resolve,
1011 frange, 1017 frange,
1012 )?; 1018 )?;
1013 for (index, assist) in assists.into_iter().enumerate() { 1019 for (index, assist) in assists.into_iter().enumerate() {
@@ -1052,22 +1058,68 @@ pub(crate) fn handle_code_action_resolve(
1052 .only 1058 .only
1053 .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect()); 1059 .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect());
1054 1060
1061 let (assist_index, assist_resolve) = match parse_action_id(&params.id) {
1062 Ok(parsed_data) => parsed_data,
1063 Err(e) => {
1064 return Err(LspError::new(
1065 ErrorCode::InvalidParams as i32,
1066 format!("Failed to parse action id string '{}': {}", params.id, e),
1067 )
1068 .into())
1069 }
1070 };
1071
1072 let expected_assist_id = assist_resolve.assist_id.clone();
1073 let expected_kind = assist_resolve.assist_kind;
1074
1055 let assists = snap.analysis.assists_with_fixes( 1075 let assists = snap.analysis.assists_with_fixes(
1056 &assists_config, 1076 &assists_config,
1057 &snap.config.diagnostics(), 1077 &snap.config.diagnostics(),
1058 true, 1078 AssistResolveStrategy::Single(assist_resolve),
1059 frange, 1079 frange,
1060 )?; 1080 )?;
1061 1081
1062 let (id, index) = split_once(&params.id, ':').unwrap(); 1082 let assist = match assists.get(assist_index) {
1063 let index = index.parse::<usize>().unwrap(); 1083 Some(assist) => assist,
1064 let assist = &assists[index]; 1084 None => return Err(LspError::new(
1065 assert!(assist.id.0 == id); 1085 ErrorCode::InvalidParams as i32,
1086 format!(
1087 "Failed to find the assist for index {} provided by the resolve request. Resolve request assist id: {}",
1088 assist_index, params.id,
1089 ),
1090 )
1091 .into())
1092 };
1093 if assist.id.0 != expected_assist_id || assist.id.1 != expected_kind {
1094 return Err(LspError::new(
1095 ErrorCode::InvalidParams as i32,
1096 format!(
1097 "Mismatching assist at index {} for the resolve parameters given. Resolve request assist id: {}, actual id: {:?}.",
1098 assist_index, params.id, assist.id
1099 ),
1100 )
1101 .into());
1102 }
1066 let edit = to_proto::code_action(&snap, assist.clone(), None)?.edit; 1103 let edit = to_proto::code_action(&snap, assist.clone(), None)?.edit;
1067 code_action.edit = edit; 1104 code_action.edit = edit;
1068 Ok(code_action) 1105 Ok(code_action)
1069} 1106}
1070 1107
1108fn parse_action_id(action_id: &str) -> Result<(usize, SingleResolve), String> {
1109 let id_parts = action_id.split(':').collect_vec();
1110 match id_parts.as_slice() {
1111 &[assist_id_string, assist_kind_string, index_string] => {
1112 let assist_kind: AssistKind = assist_kind_string.parse()?;
1113 let index: usize = match index_string.parse() {
1114 Ok(index) => index,
1115 Err(e) => return Err(format!("Incorrect index string: {}", e)),
1116 };
1117 Ok((index, SingleResolve { assist_id: assist_id_string.to_string(), assist_kind }))
1118 }
1119 _ => Err("Action id contains incorrect number of segments".to_string()),
1120 }
1121}
1122
1071pub(crate) fn handle_code_lens( 1123pub(crate) fn handle_code_lens(
1072 snap: GlobalStateSnapshot, 1124 snap: GlobalStateSnapshot,
1073 params: lsp_types::CodeLensParams, 1125 params: lsp_types::CodeLensParams,
@@ -1182,7 +1234,7 @@ pub(crate) fn publish_diagnostics(
1182 1234
1183 let diagnostics: Vec<Diagnostic> = snap 1235 let diagnostics: Vec<Diagnostic> = snap
1184 .analysis 1236 .analysis
1185 .diagnostics(&snap.config.diagnostics(), false, file_id)? 1237 .diagnostics(&snap.config.diagnostics(), AssistResolveStrategy::None, file_id)?
1186 .into_iter() 1238 .into_iter()
1187 .map(|d| Diagnostic { 1239 .map(|d| Diagnostic {
1188 range: to_proto::range(&line_index, d.range), 1240 range: to_proto::range(&line_index, d.range),
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs
new file mode 100644
index 000000000..3dcbe397a
--- /dev/null
+++ b/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -0,0 +1,184 @@
1//! Fully integrated benchmarks for rust-analyzer, which load real cargo
2//! projects.
3//!
4//! The benchmark here is used to debug specific performance regressions. If you
5//! notice that, eg, completion is slow in some specific case, you can modify
6//! code here exercise this specific completion, and thus have a fast
7//! edit/compile/test cycle.
8//!
9//! Note that "Rust Analyzer: Run" action does not allow running a single test
10//! in release mode in VS Code. There's however "Rust Analyzer: Copy Run Command Line"
11//! which you can use to paste the command in terminal and add `--release` manually.
12
13use std::{convert::TryFrom, sync::Arc};
14
15use ide::{Change, CompletionConfig, FilePosition, TextSize};
16use ide_db::helpers::{insert_use::InsertUseConfig, merge_imports::MergeBehavior, SnippetCap};
17use test_utils::project_root;
18use vfs::{AbsPathBuf, VfsPath};
19
20use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig};
21
22#[test]
23fn integrated_highlighting_benchmark() {
24 if std::env::var("RUN_SLOW_BENCHES").is_err() {
25 return;
26 }
27
28 // Load rust-analyzer itself.
29 let workspace_to_load = project_root();
30 let file = "./crates/ide_db/src/apply_change.rs";
31
32 let cargo_config = Default::default();
33 let load_cargo_config = LoadCargoConfig {
34 load_out_dirs_from_check: true,
35 wrap_rustc: false,
36 with_proc_macro: false,
37 };
38
39 let (mut host, vfs, _proc_macro) = {
40 let _it = stdx::timeit("workspace loading");
41 load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap()
42 };
43
44 let file_id = {
45 let file = workspace_to_load.join(file);
46 let path = VfsPath::from(AbsPathBuf::assert(file));
47 vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path))
48 };
49
50 {
51 let _it = stdx::timeit("initial");
52 let analysis = host.analysis();
53 analysis.highlight_as_html(file_id, false).unwrap();
54 }
55
56 profile::init_from("*>100");
57 // let _s = profile::heartbeat_span();
58
59 {
60 let _it = stdx::timeit("change");
61 let mut text = host.analysis().file_text(file_id).unwrap().to_string();
62 text.push_str("\npub fn _dummy() {}\n");
63 let mut change = Change::new();
64 change.change_file(file_id, Some(Arc::new(text)));
65 host.apply_change(change);
66 }
67
68 {
69 let _it = stdx::timeit("after change");
70 let _span = profile::cpu_span();
71 let analysis = host.analysis();
72 analysis.highlight_as_html(file_id, false).unwrap();
73 }
74}
75
76#[test]
77fn integrated_completion_benchmark() {
78 if std::env::var("RUN_SLOW_BENCHES").is_err() {
79 return;
80 }
81
82 // Load rust-analyzer itself.
83 let workspace_to_load = project_root();
84 let file = "./crates/hir/src/lib.rs";
85
86 let cargo_config = Default::default();
87 let load_cargo_config = LoadCargoConfig {
88 load_out_dirs_from_check: true,
89 wrap_rustc: false,
90 with_proc_macro: false,
91 };
92
93 let (mut host, vfs, _proc_macro) = {
94 let _it = stdx::timeit("workspace loading");
95 load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap()
96 };
97
98 let file_id = {
99 let file = workspace_to_load.join(file);
100 let path = VfsPath::from(AbsPathBuf::assert(file));
101 vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path))
102 };
103
104 {
105 let _it = stdx::timeit("initial");
106 let analysis = host.analysis();
107 analysis.highlight_as_html(file_id, false).unwrap();
108 }
109
110 profile::init_from("*>5");
111 // let _s = profile::heartbeat_span();
112
113 let completion_offset = {
114 let _it = stdx::timeit("change");
115 let mut text = host.analysis().file_text(file_id).unwrap().to_string();
116 let completion_offset =
117 patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
118 + "sel".len();
119 let mut change = Change::new();
120 change.change_file(file_id, Some(Arc::new(text)));
121 host.apply_change(change);
122 completion_offset
123 };
124
125 {
126 let _it = stdx::timeit("unqualified path completion");
127 let _span = profile::cpu_span();
128 let analysis = host.analysis();
129 let config = CompletionConfig {
130 enable_postfix_completions: true,
131 enable_imports_on_the_fly: true,
132 add_call_parenthesis: true,
133 add_call_argument_snippets: true,
134 snippet_cap: SnippetCap::new(true),
135 insert_use: InsertUseConfig {
136 merge: Some(MergeBehavior::Full),
137 prefix_kind: hir::PrefixKind::ByCrate,
138 group: true,
139 },
140 };
141 let position =
142 FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
143 analysis.completions(&config, position).unwrap();
144 }
145
146 let completion_offset = {
147 let _it = stdx::timeit("change");
148 let mut text = host.analysis().file_text(file_id).unwrap().to_string();
149 let completion_offset =
150 patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)")
151 + "self.".len();
152 let mut change = Change::new();
153 change.change_file(file_id, Some(Arc::new(text)));
154 host.apply_change(change);
155 completion_offset
156 };
157
158 {
159 let _it = stdx::timeit("dot completion");
160 let _span = profile::cpu_span();
161 let analysis = host.analysis();
162 let config = CompletionConfig {
163 enable_postfix_completions: true,
164 enable_imports_on_the_fly: true,
165 add_call_parenthesis: true,
166 add_call_argument_snippets: true,
167 snippet_cap: SnippetCap::new(true),
168 insert_use: InsertUseConfig {
169 merge: Some(MergeBehavior::Full),
170 prefix_kind: hir::PrefixKind::ByCrate,
171 group: true,
172 },
173 };
174 let position =
175 FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
176 analysis.completions(&config, position).unwrap();
177 }
178}
179
180fn patch(what: &mut String, from: &str, to: &str) -> usize {
181 let idx = what.find(from).unwrap();
182 *what = what.replacen(from, to, 1);
183 idx
184}
diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs
index d9a5030a0..da7e24bec 100644
--- a/crates/rust-analyzer/src/lib.rs
+++ b/crates/rust-analyzer/src/lib.rs
@@ -40,7 +40,7 @@ pub mod lsp_ext;
40pub mod config; 40pub mod config;
41 41
42#[cfg(test)] 42#[cfg(test)]
43mod benchmarks; 43mod integrated_benchmarks;
44 44
45use serde::de::DeserializeOwned; 45use serde::de::DeserializeOwned;
46use std::fmt; 46use std::fmt;
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index a766aacad..ce7ece559 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -8,8 +8,7 @@ use std::{
8 8
9use always_assert::always; 9use always_assert::always;
10use crossbeam_channel::{select, Receiver}; 10use crossbeam_channel::{select, Receiver};
11use ide::PrimeCachesProgress; 11use ide::{FileId, PrimeCachesProgress};
12use ide::{Canceled, FileId};
13use ide_db::base_db::VfsPath; 12use ide_db::base_db::VfsPath;
14use lsp_server::{Connection, Notification, Request, Response}; 13use lsp_server::{Connection, Notification, Request, Response};
15use lsp_types::notification::Notification as _; 14use lsp_types::notification::Notification as _;
@@ -295,6 +294,8 @@ impl GlobalState {
295 state = Progress::End; 294 state = Progress::End;
296 message = None; 295 message = None;
297 fraction = 1.0; 296 fraction = 1.0;
297
298 self.prime_caches_queue.op_completed(());
298 } 299 }
299 }; 300 };
300 301
@@ -711,18 +712,23 @@ impl GlobalState {
711 } 712 }
712 fn update_file_notifications_on_threadpool(&mut self) { 713 fn update_file_notifications_on_threadpool(&mut self) {
713 self.maybe_update_diagnostics(); 714 self.maybe_update_diagnostics();
715
716 // Ensure that only one cache priming task can run at a time
717 self.prime_caches_queue.request_op(());
718 if self.prime_caches_queue.should_start_op().is_none() {
719 return;
720 }
721
714 self.task_pool.handle.spawn_with_sender({ 722 self.task_pool.handle.spawn_with_sender({
715 let snap = self.snapshot(); 723 let snap = self.snapshot();
716 move |sender| { 724 move |sender| {
717 snap.analysis 725 let cb = |progress| {
718 .prime_caches(|progress| { 726 sender.send(Task::PrimeCaches(progress)).unwrap();
719 sender.send(Task::PrimeCaches(progress)).unwrap(); 727 };
720 }) 728 match snap.analysis.prime_caches(cb) {
721 .unwrap_or_else(|_: Canceled| { 729 Ok(()) => (),
722 // Pretend that we're done, so that the progress bar is removed. Otherwise 730 Err(_canceled) => (),
723 // the editor may complain about it already existing. 731 }
724 sender.send(Task::PrimeCaches(PrimeCachesProgress::Finished)).unwrap()
725 });
726 } 732 }
727 }); 733 });
728 } 734 }
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs
index 5eff036ec..ecf6fd12f 100644
--- a/crates/rust-analyzer/src/to_proto.rs
+++ b/crates/rust-analyzer/src/to_proto.rs
@@ -898,7 +898,7 @@ pub(crate) fn code_action(
898 (Some(it), _) => res.edit = Some(snippet_workspace_edit(snap, it)?), 898 (Some(it), _) => res.edit = Some(snippet_workspace_edit(snap, it)?),
899 (None, Some((index, code_action_params))) => { 899 (None, Some((index, code_action_params))) => {
900 res.data = Some(lsp_ext::CodeActionData { 900 res.data = Some(lsp_ext::CodeActionData {
901 id: format!("{}:{}", assist.id.0, index.to_string()), 901 id: format!("{}:{}:{}", assist.id.0, assist.id.1.name(), index),
902 code_action_params, 902 code_action_params,
903 }); 903 });
904 } 904 }
diff --git a/docs/dev/style.md b/docs/dev/style.md
index 6ab60b50e..00de7a711 100644
--- a/docs/dev/style.md
+++ b/docs/dev/style.md
@@ -449,6 +449,39 @@ fn query_all(name: String, case_sensitive: bool) -> Vec<Item> { ... }
449fn query_first(name: String, case_sensitive: bool) -> Option<Item> { ... } 449fn query_first(name: String, case_sensitive: bool) -> Option<Item> { ... }
450``` 450```
451 451
452## Prefer Separate Functions Over Parameters
453
454If a function has a `bool` or an `Option` parameter, and it is always called with `true`, `false`, `Some` and `None` literals, split the function in two.
455
456```rust
457// GOOD
458fn caller_a() {
459 foo()
460}
461
462fn caller_b() {
463 foo_with_bar(Bar::new())
464}
465
466fn foo() { ... }
467fn foo_with_bar(bar: Bar) { ... }
468
469// BAD
470fn caller_a() {
471 foo(None)
472}
473
474fn caller_b() {
475 foo(Some(Bar::new()))
476}
477
478fn foo(bar: Option<Bar>) { ... }
479```
480
481**Rationale:** more often than not, such functions display "`false sharing`" -- they have additional `if` branching inside for two different cases.
482Splitting the two different control flows into two functions simplifies each path, and remove cross-dependencies between the two paths.
483If there's common code between `foo` and `foo_with_bar`, extract *that* into a common helper.
484
452## Avoid Monomorphization 485## Avoid Monomorphization
453 486
454Avoid making a lot of code type parametric, *especially* on the boundaries between crates. 487Avoid making a lot of code type parametric, *especially* on the boundaries between crates.
diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc
index 54195adb7..58722aaa3 100644
--- a/docs/user/manual.adoc
+++ b/docs/user/manual.adoc
@@ -611,6 +611,41 @@ For example, mutable bindings are underlined by default and you can override thi
611} 611}
612---- 612----
613 613
614Most themes doesn't support styling unsafe operations differently yet. You can fix this by adding overrides for the rules `operator.unsafe`, `function.unsafe`, and `method.unsafe`:
615
616[source,jsonc]
617----
618{
619 "editor.semanticTokenColorCustomizations": {
620 "rules": {
621 "operator.unsafe": "#ff6600",
622 "function.unsafe": "#ff6600"
623 "method.unsafe": "#ff6600"
624 }
625 },
626}
627----
628
629In addition to the top-level rules you can specify overrides for specific themes. For example, if you wanted to use a darker text color on a specific light theme, you might write:
630
631[source,jsonc]
632----
633{
634 "editor.semanticTokenColorCustomizations": {
635 "rules": {
636 "operator.unsafe": "#ff6600"
637 },
638 "[Ayu Light]": {
639 "rules": {
640 "operator.unsafe": "#572300"
641 }
642 }
643 },
644}
645----
646
647Make sure you include the brackets around the theme name. For example, use `"[Ayu Light]"` to customize the theme Ayu Light.
648
614==== Special `when` clause context for keybindings. 649==== Special `when` clause context for keybindings.
615You may use `inRustProject` context to configure keybindings for rust projects only. 650You may use `inRustProject` context to configure keybindings for rust projects only.
616For example: 651For example:
diff --git a/xtask/src/dist.rs b/xtask/src/dist.rs
index 12a7fea1e..b6aa2f52a 100644
--- a/xtask/src/dist.rs
+++ b/xtask/src/dist.rs
@@ -66,6 +66,7 @@ fn dist_client(version: &str, release_tag: &str) -> Result<()> {
66 66
67fn dist_server(release_channel: &str) -> Result<()> { 67fn dist_server(release_channel: &str) -> Result<()> {
68 let _e = pushenv("RUST_ANALYZER_CHANNEL", release_channel); 68 let _e = pushenv("RUST_ANALYZER_CHANNEL", release_channel);
69 let _e = pushenv("CARGO_PROFILE_RELEASE_LTO", "true");
69 let target = get_target(); 70 let target = get_target();
70 if target.contains("-linux-gnu") || target.contains("-linux-musl") { 71 if target.contains("-linux-gnu") || target.contains("-linux-musl") {
71 env::set_var("CC", "clang"); 72 env::set_var("CC", "clang");
diff --git a/xtask/src/flags.rs b/xtask/src/flags.rs
index f80a5dd16..69b3cb9c1 100644
--- a/xtask/src/flags.rs
+++ b/xtask/src/flags.rs
@@ -28,7 +28,6 @@ xflags::xflags! {
28 } 28 }
29 29
30 cmd fuzz-tests {} 30 cmd fuzz-tests {}
31 cmd pre-cache {}
32 31
33 cmd release { 32 cmd release {
34 optional --dry-run 33 optional --dry-run
@@ -62,7 +61,6 @@ pub enum XtaskCmd {
62 Help(Help), 61 Help(Help),
63 Install(Install), 62 Install(Install),
64 FuzzTests(FuzzTests), 63 FuzzTests(FuzzTests),
65 PreCache(PreCache),
66 Release(Release), 64 Release(Release),
67 Promote(Promote), 65 Promote(Promote),
68 Dist(Dist), 66 Dist(Dist),
@@ -88,9 +86,6 @@ pub struct Install {
88pub struct FuzzTests; 86pub struct FuzzTests;
89 87
90#[derive(Debug)] 88#[derive(Debug)]
91pub struct PreCache;
92
93#[derive(Debug)]
94pub struct Release { 89pub struct Release {
95 pub dry_run: bool, 90 pub dry_run: bool,
96} 91}
diff --git a/xtask/src/main.rs b/xtask/src/main.rs
index ce3353410..d0bef7b7a 100644
--- a/xtask/src/main.rs
+++ b/xtask/src/main.rs
@@ -18,7 +18,6 @@ mod install;
18mod release; 18mod release;
19mod dist; 19mod dist;
20mod metrics; 20mod metrics;
21mod pre_cache;
22 21
23use anyhow::{bail, Result}; 22use anyhow::{bail, Result};
24use std::{ 23use std::{
@@ -39,7 +38,6 @@ fn main() -> Result<()> {
39 } 38 }
40 flags::XtaskCmd::Install(cmd) => cmd.run(), 39 flags::XtaskCmd::Install(cmd) => cmd.run(),
41 flags::XtaskCmd::FuzzTests(_) => run_fuzzer(), 40 flags::XtaskCmd::FuzzTests(_) => run_fuzzer(),
42 flags::XtaskCmd::PreCache(cmd) => cmd.run(),
43 flags::XtaskCmd::Release(cmd) => cmd.run(), 41 flags::XtaskCmd::Release(cmd) => cmd.run(),
44 flags::XtaskCmd::Promote(cmd) => cmd.run(), 42 flags::XtaskCmd::Promote(cmd) => cmd.run(),
45 flags::XtaskCmd::Dist(cmd) => cmd.run(), 43 flags::XtaskCmd::Dist(cmd) => cmd.run(),
diff --git a/xtask/src/pre_cache.rs b/xtask/src/pre_cache.rs
deleted file mode 100644
index b456224fd..000000000
--- a/xtask/src/pre_cache.rs
+++ /dev/null
@@ -1,79 +0,0 @@
1use std::{
2 fs::FileType,
3 path::{Path, PathBuf},
4};
5
6use anyhow::Result;
7use xshell::rm_rf;
8
9use crate::flags;
10
11impl flags::PreCache {
12 /// Cleans the `./target` dir after the build such that only
13 /// dependencies are cached on CI.
14 pub(crate) fn run(self) -> Result<()> {
15 let slow_tests_cookie = Path::new("./target/.slow_tests_cookie");
16 if !slow_tests_cookie.exists() {
17 panic!("slow tests were skipped on CI!")
18 }
19 rm_rf(slow_tests_cookie)?;
20
21 for path in read_dir("./target/debug", FileType::is_file)? {
22 // Can't delete yourself on windows :-(
23 if !path.ends_with("xtask.exe") {
24 rm_rf(&path)?
25 }
26 }
27
28 rm_rf("./target/.rustc_info.json")?;
29
30 let to_delete = read_dir("./crates", FileType::is_dir)?
31 .into_iter()
32 .map(|path| path.file_name().unwrap().to_string_lossy().replace('-', "_"))
33 .collect::<Vec<_>>();
34
35 for &dir in ["./target/debug/deps", "target/debug/.fingerprint"].iter() {
36 for path in read_dir(dir, |_file_type| true)? {
37 if path.ends_with("xtask.exe") {
38 continue;
39 }
40 let file_name = path.file_name().unwrap().to_string_lossy();
41 let (stem, _) = match rsplit_once(&file_name, '-') {
42 Some(it) => it,
43 None => {
44 rm_rf(path)?;
45 continue;
46 }
47 };
48 let stem = stem.replace('-', "_");
49 if to_delete.contains(&stem) {
50 rm_rf(path)?;
51 }
52 }
53 }
54
55 Ok(())
56 }
57}
58fn read_dir(path: impl AsRef<Path>, cond: impl Fn(&FileType) -> bool) -> Result<Vec<PathBuf>> {
59 read_dir_impl(path.as_ref(), &cond)
60}
61
62fn read_dir_impl(path: &Path, cond: &dyn Fn(&FileType) -> bool) -> Result<Vec<PathBuf>> {
63 let mut res = Vec::new();
64 for entry in path.read_dir()? {
65 let entry = entry?;
66 let file_type = entry.file_type()?;
67 if cond(&file_type) {
68 res.push(entry.path())
69 }
70 }
71 Ok(res)
72}
73
74fn rsplit_once(haystack: &str, delim: char) -> Option<(&str, &str)> {
75 let mut split = haystack.rsplitn(2, delim);
76 let suffix = split.next()?;
77 let prefix = split.next()?;
78 Some((prefix, suffix))
79}