diff options
363 files changed, 7355 insertions, 4552 deletions
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 395ce1ef4..77c92512a 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml | |||
@@ -1,4 +1,4 @@ | |||
1 | name: CI-Release | 1 | name: release |
2 | on: | 2 | on: |
3 | push: | 3 | push: |
4 | branches: | 4 | branches: |
@@ -132,7 +132,9 @@ jobs: | |||
132 | 132 | ||
133 | - name: Create Release | 133 | - name: Create Release |
134 | id: create_release | 134 | id: create_release |
135 | uses: actions/create-release@v1 | 135 | # uses: actions/create-release@v1 |
136 | # https://github.com/actions/create-release/pull/32 | ||
137 | uses: fleskesvor/create-release@1a72e235c178bf2ae6c51a8ae36febc24568c5fe | ||
136 | env: | 138 | env: |
137 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | 139 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} |
138 | with: | 140 | with: |
diff --git a/Cargo.lock b/Cargo.lock index db566fdb2..e29ff898d 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -2,863 +2,951 @@ | |||
2 | # It is not intended for manual editing. | 2 | # It is not intended for manual editing. |
3 | [[package]] | 3 | [[package]] |
4 | name = "aho-corasick" | 4 | name = "aho-corasick" |
5 | version = "0.7.6" | 5 | version = "0.7.8" |
6 | source = "registry+https://github.com/rust-lang/crates.io-index" | 6 | source = "registry+https://github.com/rust-lang/crates.io-index" |
7 | checksum = "743ad5a418686aad3b87fd14c43badd828cf26e214a00f92a384291cf22e1811" | ||
7 | dependencies = [ | 8 | dependencies = [ |
8 | "memchr 2.3.0 (registry+https://github.com/rust-lang/crates.io-index)", | 9 | "memchr", |
9 | ] | 10 | ] |
10 | 11 | ||
11 | [[package]] | 12 | [[package]] |
12 | name = "anyhow" | 13 | name = "anyhow" |
13 | version = "1.0.26" | 14 | version = "1.0.26" |
14 | source = "registry+https://github.com/rust-lang/crates.io-index" | 15 | source = "registry+https://github.com/rust-lang/crates.io-index" |
16 | checksum = "7825f6833612eb2414095684fcf6c635becf3ce97fe48cf6421321e93bfbd53c" | ||
15 | 17 | ||
16 | [[package]] | 18 | [[package]] |
17 | name = "anymap" | 19 | name = "anymap" |
18 | version = "0.12.1" | 20 | version = "0.12.1" |
19 | source = "registry+https://github.com/rust-lang/crates.io-index" | 21 | source = "registry+https://github.com/rust-lang/crates.io-index" |
22 | checksum = "33954243bd79057c2de7338850b85983a44588021f8a5fee574a8888c6de4344" | ||
20 | 23 | ||
21 | [[package]] | 24 | [[package]] |
22 | name = "arrayvec" | 25 | name = "arrayvec" |
23 | version = "0.5.1" | 26 | version = "0.5.1" |
24 | source = "registry+https://github.com/rust-lang/crates.io-index" | 27 | source = "registry+https://github.com/rust-lang/crates.io-index" |
28 | checksum = "cff77d8686867eceff3105329d4698d96c2391c176d5d03adc90c7389162b5b8" | ||
25 | 29 | ||
26 | [[package]] | 30 | [[package]] |
27 | name = "atty" | 31 | name = "atty" |
28 | version = "0.2.14" | 32 | version = "0.2.14" |
29 | source = "registry+https://github.com/rust-lang/crates.io-index" | 33 | source = "registry+https://github.com/rust-lang/crates.io-index" |
34 | checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" | ||
30 | dependencies = [ | 35 | dependencies = [ |
31 | "hermit-abi 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", | 36 | "hermit-abi", |
32 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 37 | "libc", |
33 | "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", | 38 | "winapi 0.3.8", |
34 | ] | 39 | ] |
35 | 40 | ||
36 | [[package]] | 41 | [[package]] |
37 | name = "autocfg" | 42 | name = "autocfg" |
38 | version = "0.1.7" | 43 | version = "0.1.7" |
39 | source = "registry+https://github.com/rust-lang/crates.io-index" | 44 | source = "registry+https://github.com/rust-lang/crates.io-index" |
45 | checksum = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2" | ||
40 | 46 | ||
41 | [[package]] | 47 | [[package]] |
42 | name = "autocfg" | 48 | name = "autocfg" |
43 | version = "1.0.0" | 49 | version = "1.0.0" |
44 | source = "registry+https://github.com/rust-lang/crates.io-index" | 50 | source = "registry+https://github.com/rust-lang/crates.io-index" |
51 | checksum = "f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d" | ||
45 | 52 | ||
46 | [[package]] | 53 | [[package]] |
47 | name = "backtrace" | 54 | name = "backtrace" |
48 | version = "0.3.42" | 55 | version = "0.3.43" |
49 | source = "registry+https://github.com/rust-lang/crates.io-index" | 56 | source = "registry+https://github.com/rust-lang/crates.io-index" |
57 | checksum = "7f80256bc78f67e7df7e36d77366f636ed976895d91fe2ab9efa3973e8fe8c4f" | ||
50 | dependencies = [ | 58 | dependencies = [ |
51 | "backtrace-sys 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)", | 59 | "backtrace-sys", |
52 | "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", | 60 | "cfg-if", |
53 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 61 | "libc", |
54 | "rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", | 62 | "rustc-demangle", |
55 | ] | 63 | ] |
56 | 64 | ||
57 | [[package]] | 65 | [[package]] |
58 | name = "backtrace-sys" | 66 | name = "backtrace-sys" |
59 | version = "0.1.32" | 67 | version = "0.1.32" |
60 | source = "registry+https://github.com/rust-lang/crates.io-index" | 68 | source = "registry+https://github.com/rust-lang/crates.io-index" |
69 | checksum = "5d6575f128516de27e3ce99689419835fce9643a9b215a14d2b5b685be018491" | ||
61 | dependencies = [ | 70 | dependencies = [ |
62 | "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)", | 71 | "cc", |
63 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 72 | "libc", |
64 | ] | 73 | ] |
65 | 74 | ||
66 | [[package]] | 75 | [[package]] |
67 | name = "base64" | 76 | name = "base64" |
68 | version = "0.11.0" | 77 | version = "0.11.0" |
69 | source = "registry+https://github.com/rust-lang/crates.io-index" | 78 | source = "registry+https://github.com/rust-lang/crates.io-index" |
79 | checksum = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7" | ||
70 | 80 | ||
71 | [[package]] | 81 | [[package]] |
72 | name = "bit-set" | 82 | name = "bit-set" |
73 | version = "0.5.1" | 83 | version = "0.5.1" |
74 | source = "registry+https://github.com/rust-lang/crates.io-index" | 84 | source = "registry+https://github.com/rust-lang/crates.io-index" |
85 | checksum = "e84c238982c4b1e1ee668d136c510c67a13465279c0cb367ea6baf6310620a80" | ||
75 | dependencies = [ | 86 | dependencies = [ |
76 | "bit-vec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", | 87 | "bit-vec", |
77 | ] | 88 | ] |
78 | 89 | ||
79 | [[package]] | 90 | [[package]] |
80 | name = "bit-vec" | 91 | name = "bit-vec" |
81 | version = "0.5.1" | 92 | version = "0.5.1" |
82 | source = "registry+https://github.com/rust-lang/crates.io-index" | 93 | source = "registry+https://github.com/rust-lang/crates.io-index" |
94 | checksum = "f59bbe95d4e52a6398ec21238d31577f2b28a9d86807f06ca59d191d8440d0bb" | ||
83 | 95 | ||
84 | [[package]] | 96 | [[package]] |
85 | name = "bitflags" | 97 | name = "bitflags" |
86 | version = "1.2.1" | 98 | version = "1.2.1" |
87 | source = "registry+https://github.com/rust-lang/crates.io-index" | 99 | source = "registry+https://github.com/rust-lang/crates.io-index" |
100 | checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" | ||
88 | 101 | ||
89 | [[package]] | 102 | [[package]] |
90 | name = "bstr" | 103 | name = "bstr" |
91 | version = "0.2.10" | 104 | version = "0.2.11" |
92 | source = "registry+https://github.com/rust-lang/crates.io-index" | 105 | source = "registry+https://github.com/rust-lang/crates.io-index" |
106 | checksum = "502ae1441a0a5adb8fbd38a5955a6416b9493e92b465de5e4a9bde6a539c2c48" | ||
93 | dependencies = [ | 107 | dependencies = [ |
94 | "memchr 2.3.0 (registry+https://github.com/rust-lang/crates.io-index)", | 108 | "memchr", |
95 | ] | 109 | ] |
96 | 110 | ||
97 | [[package]] | 111 | [[package]] |
98 | name = "byteorder" | 112 | name = "byteorder" |
99 | version = "1.3.2" | 113 | version = "1.3.2" |
100 | source = "registry+https://github.com/rust-lang/crates.io-index" | 114 | source = "registry+https://github.com/rust-lang/crates.io-index" |
115 | checksum = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5" | ||
101 | 116 | ||
102 | [[package]] | 117 | [[package]] |
103 | name = "c2-chacha" | 118 | name = "c2-chacha" |
104 | version = "0.2.3" | 119 | version = "0.2.3" |
105 | source = "registry+https://github.com/rust-lang/crates.io-index" | 120 | source = "registry+https://github.com/rust-lang/crates.io-index" |
121 | checksum = "214238caa1bf3a496ec3392968969cab8549f96ff30652c9e56885329315f6bb" | ||
106 | dependencies = [ | 122 | dependencies = [ |
107 | "ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", | 123 | "ppv-lite86", |
108 | ] | 124 | ] |
109 | 125 | ||
110 | [[package]] | 126 | [[package]] |
111 | name = "cargo_metadata" | 127 | name = "cargo_metadata" |
112 | version = "0.9.1" | 128 | version = "0.9.1" |
113 | source = "registry+https://github.com/rust-lang/crates.io-index" | 129 | source = "registry+https://github.com/rust-lang/crates.io-index" |
130 | checksum = "46e3374c604fb39d1a2f35ed5e4a4e30e60d01fab49446e08f1b3e9a90aef202" | ||
114 | dependencies = [ | 131 | dependencies = [ |
115 | "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", | 132 | "semver", |
116 | "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", | 133 | "serde", |
117 | "serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", | 134 | "serde_derive", |
118 | "serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)", | 135 | "serde_json", |
119 | ] | 136 | ] |
120 | 137 | ||
121 | [[package]] | 138 | [[package]] |
122 | name = "cc" | 139 | name = "cc" |
123 | version = "1.0.50" | 140 | version = "1.0.50" |
124 | source = "registry+https://github.com/rust-lang/crates.io-index" | 141 | source = "registry+https://github.com/rust-lang/crates.io-index" |
142 | checksum = "95e28fa049fda1c330bcf9d723be7663a899c4679724b34c81e9f5a326aab8cd" | ||
125 | 143 | ||
126 | [[package]] | 144 | [[package]] |
127 | name = "cfg-if" | 145 | name = "cfg-if" |
128 | version = "0.1.10" | 146 | version = "0.1.10" |
129 | source = "registry+https://github.com/rust-lang/crates.io-index" | 147 | source = "registry+https://github.com/rust-lang/crates.io-index" |
148 | checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" | ||
130 | 149 | ||
131 | [[package]] | 150 | [[package]] |
132 | name = "chalk-derive" | 151 | name = "chalk-derive" |
133 | version = "0.1.0" | 152 | version = "0.1.0" |
134 | source = "git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5#ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5" | 153 | source = "git+https://github.com/rust-lang/chalk.git?rev=af48f302a1f571b3ca418f7c5aa639a144a34f75#af48f302a1f571b3ca418f7c5aa639a144a34f75" |
135 | dependencies = [ | 154 | dependencies = [ |
136 | "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", | 155 | "proc-macro2", |
137 | "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", | 156 | "quote", |
138 | "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", | 157 | "syn", |
139 | ] | 158 | ] |
140 | 159 | ||
141 | [[package]] | 160 | [[package]] |
142 | name = "chalk-engine" | 161 | name = "chalk-engine" |
143 | version = "0.9.0" | 162 | version = "0.9.0" |
144 | source = "git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5#ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5" | 163 | source = "git+https://github.com/rust-lang/chalk.git?rev=af48f302a1f571b3ca418f7c5aa639a144a34f75#af48f302a1f571b3ca418f7c5aa639a144a34f75" |
145 | dependencies = [ | 164 | dependencies = [ |
146 | "chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)", | 165 | "chalk-macros", |
147 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 166 | "rustc-hash", |
148 | ] | 167 | ] |
149 | 168 | ||
150 | [[package]] | 169 | [[package]] |
151 | name = "chalk-ir" | 170 | name = "chalk-ir" |
152 | version = "0.1.0" | 171 | version = "0.1.0" |
153 | source = "git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5#ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5" | 172 | source = "git+https://github.com/rust-lang/chalk.git?rev=af48f302a1f571b3ca418f7c5aa639a144a34f75#af48f302a1f571b3ca418f7c5aa639a144a34f75" |
154 | dependencies = [ | 173 | dependencies = [ |
155 | "chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)", | 174 | "chalk-derive", |
156 | "chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)", | 175 | "chalk-engine", |
157 | "chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)", | 176 | "chalk-macros", |
158 | "lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)", | 177 | "lalrpop-intern", |
159 | ] | 178 | ] |
160 | 179 | ||
161 | [[package]] | 180 | [[package]] |
162 | name = "chalk-macros" | 181 | name = "chalk-macros" |
163 | version = "0.1.1" | 182 | version = "0.1.1" |
164 | source = "git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5#ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5" | 183 | source = "git+https://github.com/rust-lang/chalk.git?rev=af48f302a1f571b3ca418f7c5aa639a144a34f75#af48f302a1f571b3ca418f7c5aa639a144a34f75" |
165 | dependencies = [ | 184 | dependencies = [ |
166 | "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 185 | "lazy_static", |
167 | ] | 186 | ] |
168 | 187 | ||
169 | [[package]] | 188 | [[package]] |
170 | name = "chalk-rust-ir" | 189 | name = "chalk-rust-ir" |
171 | version = "0.1.0" | 190 | version = "0.1.0" |
172 | source = "git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5#ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5" | 191 | source = "git+https://github.com/rust-lang/chalk.git?rev=af48f302a1f571b3ca418f7c5aa639a144a34f75#af48f302a1f571b3ca418f7c5aa639a144a34f75" |
173 | dependencies = [ | 192 | dependencies = [ |
174 | "chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)", | 193 | "chalk-derive", |
175 | "chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)", | 194 | "chalk-engine", |
176 | "chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)", | 195 | "chalk-ir", |
177 | "chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)", | 196 | "chalk-macros", |
178 | ] | 197 | ] |
179 | 198 | ||
180 | [[package]] | 199 | [[package]] |
181 | name = "chalk-solve" | 200 | name = "chalk-solve" |
182 | version = "0.1.0" | 201 | version = "0.1.0" |
183 | source = "git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5#ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5" | 202 | source = "git+https://github.com/rust-lang/chalk.git?rev=af48f302a1f571b3ca418f7c5aa639a144a34f75#af48f302a1f571b3ca418f7c5aa639a144a34f75" |
184 | dependencies = [ | 203 | dependencies = [ |
185 | "chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)", | 204 | "chalk-derive", |
186 | "chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)", | 205 | "chalk-engine", |
187 | "chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)", | 206 | "chalk-ir", |
188 | "chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)", | 207 | "chalk-macros", |
189 | "chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)", | 208 | "chalk-rust-ir", |
190 | "ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)", | 209 | "ena", |
191 | "itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", | 210 | "itertools", |
192 | "petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", | 211 | "petgraph", |
193 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 212 | "rustc-hash", |
194 | ] | 213 | ] |
195 | 214 | ||
196 | [[package]] | 215 | [[package]] |
197 | name = "clicolors-control" | 216 | name = "clicolors-control" |
198 | version = "1.0.1" | 217 | version = "1.0.1" |
199 | source = "registry+https://github.com/rust-lang/crates.io-index" | 218 | source = "registry+https://github.com/rust-lang/crates.io-index" |
219 | checksum = "90082ee5dcdd64dc4e9e0d37fbf3ee325419e39c0092191e0393df65518f741e" | ||
200 | dependencies = [ | 220 | dependencies = [ |
201 | "atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", | 221 | "atty", |
202 | "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 222 | "lazy_static", |
203 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 223 | "libc", |
204 | "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", | 224 | "winapi 0.3.8", |
205 | ] | 225 | ] |
206 | 226 | ||
207 | [[package]] | 227 | [[package]] |
208 | name = "cloudabi" | 228 | name = "cloudabi" |
209 | version = "0.0.3" | 229 | version = "0.0.3" |
210 | source = "registry+https://github.com/rust-lang/crates.io-index" | 230 | source = "registry+https://github.com/rust-lang/crates.io-index" |
231 | checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" | ||
211 | dependencies = [ | 232 | dependencies = [ |
212 | "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | 233 | "bitflags", |
213 | ] | 234 | ] |
214 | 235 | ||
215 | [[package]] | 236 | [[package]] |
216 | name = "console" | 237 | name = "console" |
217 | version = "0.9.1" | 238 | version = "0.9.2" |
218 | source = "registry+https://github.com/rust-lang/crates.io-index" | 239 | source = "registry+https://github.com/rust-lang/crates.io-index" |
240 | checksum = "45e0f3986890b3acbc782009e2629dfe2baa430ac091519ce3be26164a2ae6c0" | ||
219 | dependencies = [ | 241 | dependencies = [ |
220 | "clicolors-control 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 242 | "clicolors-control", |
221 | "encode_unicode 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", | 243 | "encode_unicode", |
222 | "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 244 | "lazy_static", |
223 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 245 | "libc", |
224 | "regex 1.3.3 (registry+https://github.com/rust-lang/crates.io-index)", | 246 | "regex", |
225 | "termios 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 247 | "termios", |
226 | "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", | 248 | "winapi 0.3.8", |
227 | ] | 249 | ] |
228 | 250 | ||
229 | [[package]] | 251 | [[package]] |
230 | name = "crossbeam" | 252 | name = "crossbeam" |
231 | version = "0.7.3" | 253 | version = "0.7.3" |
232 | source = "registry+https://github.com/rust-lang/crates.io-index" | 254 | source = "registry+https://github.com/rust-lang/crates.io-index" |
255 | checksum = "69323bff1fb41c635347b8ead484a5ca6c3f11914d784170b158d8449ab07f8e" | ||
233 | dependencies = [ | 256 | dependencies = [ |
234 | "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", | 257 | "cfg-if", |
235 | "crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 258 | "crossbeam-channel", |
236 | "crossbeam-deque 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", | 259 | "crossbeam-deque", |
237 | "crossbeam-epoch 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", | 260 | "crossbeam-epoch", |
238 | "crossbeam-queue 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | 261 | "crossbeam-queue", |
239 | "crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", | 262 | "crossbeam-utils", |
240 | ] | 263 | ] |
241 | 264 | ||
242 | [[package]] | 265 | [[package]] |
243 | name = "crossbeam-channel" | 266 | name = "crossbeam-channel" |
244 | version = "0.4.0" | 267 | version = "0.4.0" |
245 | source = "registry+https://github.com/rust-lang/crates.io-index" | 268 | source = "registry+https://github.com/rust-lang/crates.io-index" |
269 | checksum = "acec9a3b0b3559f15aee4f90746c4e5e293b701c0f7d3925d24e01645267b68c" | ||
246 | dependencies = [ | 270 | dependencies = [ |
247 | "crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", | 271 | "crossbeam-utils", |
248 | ] | 272 | ] |
249 | 273 | ||
250 | [[package]] | 274 | [[package]] |
251 | name = "crossbeam-deque" | 275 | name = "crossbeam-deque" |
252 | version = "0.7.2" | 276 | version = "0.7.2" |
253 | source = "registry+https://github.com/rust-lang/crates.io-index" | 277 | source = "registry+https://github.com/rust-lang/crates.io-index" |
278 | checksum = "c3aa945d63861bfe624b55d153a39684da1e8c0bc8fba932f7ee3a3c16cea3ca" | ||
254 | dependencies = [ | 279 | dependencies = [ |
255 | "crossbeam-epoch 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", | 280 | "crossbeam-epoch", |
256 | "crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", | 281 | "crossbeam-utils", |
257 | ] | 282 | ] |
258 | 283 | ||
259 | [[package]] | 284 | [[package]] |
260 | name = "crossbeam-epoch" | 285 | name = "crossbeam-epoch" |
261 | version = "0.8.0" | 286 | version = "0.8.0" |
262 | source = "registry+https://github.com/rust-lang/crates.io-index" | 287 | source = "registry+https://github.com/rust-lang/crates.io-index" |
288 | checksum = "5064ebdbf05ce3cb95e45c8b086f72263f4166b29b97f6baff7ef7fe047b55ac" | ||
263 | dependencies = [ | 289 | dependencies = [ |
264 | "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", | 290 | "autocfg 0.1.7", |
265 | "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", | 291 | "cfg-if", |
266 | "crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", | 292 | "crossbeam-utils", |
267 | "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 293 | "lazy_static", |
268 | "memoffset 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", | 294 | "memoffset", |
269 | "scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | 295 | "scopeguard", |
270 | ] | 296 | ] |
271 | 297 | ||
272 | [[package]] | 298 | [[package]] |
273 | name = "crossbeam-queue" | 299 | name = "crossbeam-queue" |
274 | version = "0.2.1" | 300 | version = "0.2.1" |
275 | source = "registry+https://github.com/rust-lang/crates.io-index" | 301 | source = "registry+https://github.com/rust-lang/crates.io-index" |
302 | checksum = "c695eeca1e7173472a32221542ae469b3e9aac3a4fc81f7696bcad82029493db" | ||
276 | dependencies = [ | 303 | dependencies = [ |
277 | "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", | 304 | "cfg-if", |
278 | "crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", | 305 | "crossbeam-utils", |
279 | ] | 306 | ] |
280 | 307 | ||
281 | [[package]] | 308 | [[package]] |
282 | name = "crossbeam-utils" | 309 | name = "crossbeam-utils" |
283 | version = "0.7.0" | 310 | version = "0.7.0" |
284 | source = "registry+https://github.com/rust-lang/crates.io-index" | 311 | source = "registry+https://github.com/rust-lang/crates.io-index" |
312 | checksum = "ce446db02cdc3165b94ae73111e570793400d0794e46125cc4056c81cbb039f4" | ||
285 | dependencies = [ | 313 | dependencies = [ |
286 | "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", | 314 | "autocfg 0.1.7", |
287 | "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", | 315 | "cfg-if", |
288 | "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 316 | "lazy_static", |
289 | ] | 317 | ] |
290 | 318 | ||
291 | [[package]] | 319 | [[package]] |
292 | name = "difference" | 320 | name = "difference" |
293 | version = "2.0.0" | 321 | version = "2.0.0" |
294 | source = "registry+https://github.com/rust-lang/crates.io-index" | 322 | source = "registry+https://github.com/rust-lang/crates.io-index" |
323 | checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" | ||
295 | 324 | ||
296 | [[package]] | 325 | [[package]] |
297 | name = "drop_bomb" | 326 | name = "drop_bomb" |
298 | version = "0.1.4" | 327 | version = "0.1.4" |
299 | source = "registry+https://github.com/rust-lang/crates.io-index" | 328 | source = "registry+https://github.com/rust-lang/crates.io-index" |
329 | checksum = "69b26e475fd29098530e709294e94e661974c851aed42512793f120fed4e199f" | ||
300 | 330 | ||
301 | [[package]] | 331 | [[package]] |
302 | name = "dtoa" | 332 | name = "dtoa" |
303 | version = "0.4.4" | 333 | version = "0.4.5" |
304 | source = "registry+https://github.com/rust-lang/crates.io-index" | 334 | source = "registry+https://github.com/rust-lang/crates.io-index" |
335 | checksum = "4358a9e11b9a09cf52383b451b49a169e8d797b68aa02301ff586d70d9661ea3" | ||
305 | 336 | ||
306 | [[package]] | 337 | [[package]] |
307 | name = "either" | 338 | name = "either" |
308 | version = "1.5.3" | 339 | version = "1.5.3" |
309 | source = "registry+https://github.com/rust-lang/crates.io-index" | 340 | source = "registry+https://github.com/rust-lang/crates.io-index" |
341 | checksum = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3" | ||
310 | 342 | ||
311 | [[package]] | 343 | [[package]] |
312 | name = "ena" | 344 | name = "ena" |
313 | version = "0.13.1" | 345 | version = "0.13.1" |
314 | source = "registry+https://github.com/rust-lang/crates.io-index" | 346 | source = "registry+https://github.com/rust-lang/crates.io-index" |
347 | checksum = "8944dc8fa28ce4a38f778bd46bf7d923fe73eed5a439398507246c8e017e6f36" | ||
315 | dependencies = [ | 348 | dependencies = [ |
316 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | 349 | "log", |
317 | ] | 350 | ] |
318 | 351 | ||
319 | [[package]] | 352 | [[package]] |
320 | name = "encode_unicode" | 353 | name = "encode_unicode" |
321 | version = "0.3.6" | 354 | version = "0.3.6" |
322 | source = "registry+https://github.com/rust-lang/crates.io-index" | 355 | source = "registry+https://github.com/rust-lang/crates.io-index" |
356 | checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" | ||
323 | 357 | ||
324 | [[package]] | 358 | [[package]] |
325 | name = "env_logger" | 359 | name = "env_logger" |
326 | version = "0.7.1" | 360 | version = "0.7.1" |
327 | source = "registry+https://github.com/rust-lang/crates.io-index" | 361 | source = "registry+https://github.com/rust-lang/crates.io-index" |
362 | checksum = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36" | ||
328 | dependencies = [ | 363 | dependencies = [ |
329 | "humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", | 364 | "log", |
330 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | ||
331 | ] | 365 | ] |
332 | 366 | ||
333 | [[package]] | 367 | [[package]] |
334 | name = "filetime" | 368 | name = "filetime" |
335 | version = "0.2.8" | 369 | version = "0.2.8" |
336 | source = "registry+https://github.com/rust-lang/crates.io-index" | 370 | source = "registry+https://github.com/rust-lang/crates.io-index" |
371 | checksum = "1ff6d4dab0aa0c8e6346d46052e93b13a16cf847b54ed357087c35011048cc7d" | ||
337 | dependencies = [ | 372 | dependencies = [ |
338 | "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", | 373 | "cfg-if", |
339 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 374 | "libc", |
340 | "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", | 375 | "redox_syscall", |
341 | "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", | 376 | "winapi 0.3.8", |
342 | ] | 377 | ] |
343 | 378 | ||
344 | [[package]] | 379 | [[package]] |
345 | name = "fixedbitset" | 380 | name = "fixedbitset" |
346 | version = "0.1.9" | 381 | version = "0.1.9" |
347 | source = "registry+https://github.com/rust-lang/crates.io-index" | 382 | source = "registry+https://github.com/rust-lang/crates.io-index" |
383 | checksum = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33" | ||
348 | 384 | ||
349 | [[package]] | 385 | [[package]] |
350 | name = "fnv" | 386 | name = "fnv" |
351 | version = "1.0.6" | 387 | version = "1.0.6" |
352 | source = "registry+https://github.com/rust-lang/crates.io-index" | 388 | source = "registry+https://github.com/rust-lang/crates.io-index" |
389 | checksum = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3" | ||
353 | 390 | ||
354 | [[package]] | 391 | [[package]] |
355 | name = "format-buf" | 392 | name = "format-buf" |
356 | version = "1.0.0" | 393 | version = "1.0.0" |
357 | source = "registry+https://github.com/rust-lang/crates.io-index" | 394 | source = "registry+https://github.com/rust-lang/crates.io-index" |
395 | checksum = "f7aea5a5909a74969507051a3b17adc84737e31a5f910559892aedce026f4d53" | ||
358 | 396 | ||
359 | [[package]] | 397 | [[package]] |
360 | name = "fs_extra" | 398 | name = "fs_extra" |
361 | version = "1.1.0" | 399 | version = "1.1.0" |
362 | source = "registry+https://github.com/rust-lang/crates.io-index" | 400 | source = "registry+https://github.com/rust-lang/crates.io-index" |
401 | checksum = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674" | ||
363 | 402 | ||
364 | [[package]] | 403 | [[package]] |
365 | name = "fsevent" | 404 | name = "fsevent" |
366 | version = "0.4.0" | 405 | version = "0.4.0" |
367 | source = "registry+https://github.com/rust-lang/crates.io-index" | 406 | source = "registry+https://github.com/rust-lang/crates.io-index" |
407 | checksum = "5ab7d1bd1bd33cc98b0889831b72da23c0aa4df9cec7e0702f46ecea04b35db6" | ||
368 | dependencies = [ | 408 | dependencies = [ |
369 | "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | 409 | "bitflags", |
370 | "fsevent-sys 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 410 | "fsevent-sys", |
371 | ] | 411 | ] |
372 | 412 | ||
373 | [[package]] | 413 | [[package]] |
374 | name = "fsevent-sys" | 414 | name = "fsevent-sys" |
375 | version = "2.0.1" | 415 | version = "2.0.1" |
376 | source = "registry+https://github.com/rust-lang/crates.io-index" | 416 | source = "registry+https://github.com/rust-lang/crates.io-index" |
417 | checksum = "f41b048a94555da0f42f1d632e2e19510084fb8e303b0daa2816e733fb3644a0" | ||
377 | dependencies = [ | 418 | dependencies = [ |
378 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 419 | "libc", |
379 | ] | 420 | ] |
380 | 421 | ||
381 | [[package]] | 422 | [[package]] |
382 | name = "fst" | 423 | name = "fst" |
383 | version = "0.3.5" | 424 | version = "0.3.5" |
384 | source = "registry+https://github.com/rust-lang/crates.io-index" | 425 | source = "registry+https://github.com/rust-lang/crates.io-index" |
426 | checksum = "927fb434ff9f0115b215dc0efd2e4fbdd7448522a92a1aa37c77d6a2f8f1ebd6" | ||
385 | dependencies = [ | 427 | dependencies = [ |
386 | "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", | 428 | "byteorder", |
387 | ] | 429 | ] |
388 | 430 | ||
389 | [[package]] | 431 | [[package]] |
390 | name = "fuchsia-cprng" | 432 | name = "fuchsia-cprng" |
391 | version = "0.1.1" | 433 | version = "0.1.1" |
392 | source = "registry+https://github.com/rust-lang/crates.io-index" | 434 | source = "registry+https://github.com/rust-lang/crates.io-index" |
435 | checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" | ||
393 | 436 | ||
394 | [[package]] | 437 | [[package]] |
395 | name = "fuchsia-zircon" | 438 | name = "fuchsia-zircon" |
396 | version = "0.3.3" | 439 | version = "0.3.3" |
397 | source = "registry+https://github.com/rust-lang/crates.io-index" | 440 | source = "registry+https://github.com/rust-lang/crates.io-index" |
441 | checksum = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" | ||
398 | dependencies = [ | 442 | dependencies = [ |
399 | "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | 443 | "bitflags", |
400 | "fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", | 444 | "fuchsia-zircon-sys", |
401 | ] | 445 | ] |
402 | 446 | ||
403 | [[package]] | 447 | [[package]] |
404 | name = "fuchsia-zircon-sys" | 448 | name = "fuchsia-zircon-sys" |
405 | version = "0.3.3" | 449 | version = "0.3.3" |
406 | source = "registry+https://github.com/rust-lang/crates.io-index" | 450 | source = "registry+https://github.com/rust-lang/crates.io-index" |
451 | checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" | ||
407 | 452 | ||
408 | [[package]] | 453 | [[package]] |
409 | name = "getrandom" | 454 | name = "getrandom" |
410 | version = "0.1.14" | 455 | version = "0.1.14" |
411 | source = "registry+https://github.com/rust-lang/crates.io-index" | 456 | source = "registry+https://github.com/rust-lang/crates.io-index" |
457 | checksum = "7abc8dd8451921606d809ba32e95b6111925cd2906060d2dcc29c070220503eb" | ||
412 | dependencies = [ | 458 | dependencies = [ |
413 | "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", | 459 | "cfg-if", |
414 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 460 | "libc", |
415 | "wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)", | 461 | "wasi", |
416 | ] | 462 | ] |
417 | 463 | ||
418 | [[package]] | 464 | [[package]] |
419 | name = "globset" | 465 | name = "globset" |
420 | version = "0.4.4" | 466 | version = "0.4.4" |
421 | source = "registry+https://github.com/rust-lang/crates.io-index" | 467 | source = "registry+https://github.com/rust-lang/crates.io-index" |
468 | checksum = "925aa2cac82d8834e2b2a4415b6f6879757fb5c0928fc445ae76461a12eed8f2" | ||
422 | dependencies = [ | 469 | dependencies = [ |
423 | "aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", | 470 | "aho-corasick", |
424 | "bstr 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", | 471 | "bstr", |
425 | "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", | 472 | "fnv", |
426 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | 473 | "log", |
427 | "regex 1.3.3 (registry+https://github.com/rust-lang/crates.io-index)", | 474 | "regex", |
428 | ] | 475 | ] |
429 | 476 | ||
430 | [[package]] | 477 | [[package]] |
431 | name = "heck" | 478 | name = "heck" |
432 | version = "0.3.1" | 479 | version = "0.3.1" |
433 | source = "registry+https://github.com/rust-lang/crates.io-index" | 480 | source = "registry+https://github.com/rust-lang/crates.io-index" |
481 | checksum = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" | ||
434 | dependencies = [ | 482 | dependencies = [ |
435 | "unicode-segmentation 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)", | 483 | "unicode-segmentation", |
436 | ] | 484 | ] |
437 | 485 | ||
438 | [[package]] | 486 | [[package]] |
439 | name = "hermit-abi" | 487 | name = "hermit-abi" |
440 | version = "0.1.6" | 488 | version = "0.1.6" |
441 | source = "registry+https://github.com/rust-lang/crates.io-index" | 489 | source = "registry+https://github.com/rust-lang/crates.io-index" |
490 | checksum = "eff2656d88f158ce120947499e971d743c05dbcbed62e5bd2f38f1698bbc3772" | ||
442 | dependencies = [ | 491 | dependencies = [ |
443 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 492 | "libc", |
444 | ] | ||
445 | |||
446 | [[package]] | ||
447 | name = "humantime" | ||
448 | version = "1.3.0" | ||
449 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
450 | dependencies = [ | ||
451 | "quick-error 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)", | ||
452 | ] | 493 | ] |
453 | 494 | ||
454 | [[package]] | 495 | [[package]] |
455 | name = "idna" | 496 | name = "idna" |
456 | version = "0.2.0" | 497 | version = "0.2.0" |
457 | source = "registry+https://github.com/rust-lang/crates.io-index" | 498 | source = "registry+https://github.com/rust-lang/crates.io-index" |
499 | checksum = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9" | ||
458 | dependencies = [ | 500 | dependencies = [ |
459 | "matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", | 501 | "matches", |
460 | "unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", | 502 | "unicode-bidi", |
461 | "unicode-normalization 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", | 503 | "unicode-normalization", |
462 | ] | 504 | ] |
463 | 505 | ||
464 | [[package]] | 506 | [[package]] |
465 | name = "indexmap" | 507 | name = "indexmap" |
466 | version = "1.3.1" | 508 | version = "1.3.2" |
467 | source = "registry+https://github.com/rust-lang/crates.io-index" | 509 | source = "registry+https://github.com/rust-lang/crates.io-index" |
510 | checksum = "076f042c5b7b98f31d205f1249267e12a6518c1481e9dae9764af19b707d2292" | ||
468 | dependencies = [ | 511 | dependencies = [ |
469 | "autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | 512 | "autocfg 1.0.0", |
470 | ] | 513 | ] |
471 | 514 | ||
472 | [[package]] | 515 | [[package]] |
473 | name = "inotify" | 516 | name = "inotify" |
474 | version = "0.7.0" | 517 | version = "0.7.0" |
475 | source = "registry+https://github.com/rust-lang/crates.io-index" | 518 | source = "registry+https://github.com/rust-lang/crates.io-index" |
519 | checksum = "24e40d6fd5d64e2082e0c796495c8ef5ad667a96d03e5aaa0becfd9d47bcbfb8" | ||
476 | dependencies = [ | 520 | dependencies = [ |
477 | "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | 521 | "bitflags", |
478 | "inotify-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", | 522 | "inotify-sys", |
479 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 523 | "libc", |
480 | ] | 524 | ] |
481 | 525 | ||
482 | [[package]] | 526 | [[package]] |
483 | name = "inotify-sys" | 527 | name = "inotify-sys" |
484 | version = "0.1.3" | 528 | version = "0.1.3" |
485 | source = "registry+https://github.com/rust-lang/crates.io-index" | 529 | source = "registry+https://github.com/rust-lang/crates.io-index" |
530 | checksum = "e74a1aa87c59aeff6ef2cc2fa62d41bc43f54952f55652656b18a02fd5e356c0" | ||
486 | dependencies = [ | 531 | dependencies = [ |
487 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 532 | "libc", |
488 | ] | 533 | ] |
489 | 534 | ||
490 | [[package]] | 535 | [[package]] |
491 | name = "insta" | 536 | name = "insta" |
492 | version = "0.12.0" | 537 | version = "0.13.1" |
493 | source = "registry+https://github.com/rust-lang/crates.io-index" | 538 | source = "registry+https://github.com/rust-lang/crates.io-index" |
539 | checksum = "8df742abee84dbf27d20869c9adf77b0d8f7ea3eead13c2c9e3998d136a97058" | ||
494 | dependencies = [ | 540 | dependencies = [ |
495 | "console 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", | 541 | "console", |
496 | "difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | 542 | "difference", |
497 | "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 543 | "lazy_static", |
498 | "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", | 544 | "serde", |
499 | "serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)", | 545 | "serde_json", |
500 | "serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)", | 546 | "serde_yaml", |
501 | "uuid 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", | ||
502 | ] | 547 | ] |
503 | 548 | ||
504 | [[package]] | 549 | [[package]] |
505 | name = "iovec" | 550 | name = "iovec" |
506 | version = "0.1.4" | 551 | version = "0.1.4" |
507 | source = "registry+https://github.com/rust-lang/crates.io-index" | 552 | source = "registry+https://github.com/rust-lang/crates.io-index" |
553 | checksum = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" | ||
508 | dependencies = [ | 554 | dependencies = [ |
509 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 555 | "libc", |
510 | ] | 556 | ] |
511 | 557 | ||
512 | [[package]] | 558 | [[package]] |
513 | name = "itertools" | 559 | name = "itertools" |
514 | version = "0.8.2" | 560 | version = "0.8.2" |
515 | source = "registry+https://github.com/rust-lang/crates.io-index" | 561 | source = "registry+https://github.com/rust-lang/crates.io-index" |
562 | checksum = "f56a2d0bc861f9165be4eb3442afd3c236d8a98afd426f65d92324ae1091a484" | ||
516 | dependencies = [ | 563 | dependencies = [ |
517 | "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", | 564 | "either", |
518 | ] | 565 | ] |
519 | 566 | ||
520 | [[package]] | 567 | [[package]] |
521 | name = "itoa" | 568 | name = "itoa" |
522 | version = "0.4.4" | 569 | version = "0.4.5" |
523 | source = "registry+https://github.com/rust-lang/crates.io-index" | 570 | source = "registry+https://github.com/rust-lang/crates.io-index" |
571 | checksum = "b8b7a7c0c47db5545ed3fef7468ee7bb5b74691498139e4b3f6a20685dc6dd8e" | ||
524 | 572 | ||
525 | [[package]] | 573 | [[package]] |
526 | name = "jemalloc-ctl" | 574 | name = "jemalloc-ctl" |
527 | version = "0.3.3" | 575 | version = "0.3.3" |
528 | source = "registry+https://github.com/rust-lang/crates.io-index" | 576 | source = "registry+https://github.com/rust-lang/crates.io-index" |
577 | checksum = "c502a5ff9dd2924f1ed32ba96e3b65735d837b4bfd978d3161b1702e66aca4b7" | ||
529 | dependencies = [ | 578 | dependencies = [ |
530 | "jemalloc-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", | 579 | "jemalloc-sys", |
531 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 580 | "libc", |
532 | "paste 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", | 581 | "paste", |
533 | ] | 582 | ] |
534 | 583 | ||
535 | [[package]] | 584 | [[package]] |
536 | name = "jemalloc-sys" | 585 | name = "jemalloc-sys" |
537 | version = "0.3.2" | 586 | version = "0.3.2" |
538 | source = "registry+https://github.com/rust-lang/crates.io-index" | 587 | source = "registry+https://github.com/rust-lang/crates.io-index" |
588 | checksum = "0d3b9f3f5c9b31aa0f5ed3260385ac205db665baa41d49bb8338008ae94ede45" | ||
539 | dependencies = [ | 589 | dependencies = [ |
540 | "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)", | 590 | "cc", |
541 | "fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | 591 | "fs_extra", |
542 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 592 | "libc", |
543 | ] | 593 | ] |
544 | 594 | ||
545 | [[package]] | 595 | [[package]] |
546 | name = "jemallocator" | 596 | name = "jemallocator" |
547 | version = "0.3.2" | 597 | version = "0.3.2" |
548 | source = "registry+https://github.com/rust-lang/crates.io-index" | 598 | source = "registry+https://github.com/rust-lang/crates.io-index" |
599 | checksum = "43ae63fcfc45e99ab3d1b29a46782ad679e98436c3169d15a167a1108a724b69" | ||
549 | dependencies = [ | 600 | dependencies = [ |
550 | "jemalloc-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", | 601 | "jemalloc-sys", |
551 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 602 | "libc", |
552 | ] | 603 | ] |
553 | 604 | ||
554 | [[package]] | 605 | [[package]] |
555 | name = "jod-thread" | 606 | name = "jod-thread" |
556 | version = "0.1.0" | 607 | version = "0.1.0" |
557 | source = "registry+https://github.com/rust-lang/crates.io-index" | 608 | source = "registry+https://github.com/rust-lang/crates.io-index" |
609 | checksum = "2f52a11f73b88fab829a0e4d9e13ea5982c7ac457c72eb3541d82a4afdfce4ff" | ||
558 | 610 | ||
559 | [[package]] | 611 | [[package]] |
560 | name = "join_to_string" | 612 | name = "join_to_string" |
561 | version = "0.1.3" | 613 | version = "0.1.3" |
562 | source = "registry+https://github.com/rust-lang/crates.io-index" | 614 | source = "registry+https://github.com/rust-lang/crates.io-index" |
615 | checksum = "4dc7a5290e8c2606ce2be49f456d50f69173cb96d1541e4f66e34ac8b331a98f" | ||
563 | 616 | ||
564 | [[package]] | 617 | [[package]] |
565 | name = "kernel32-sys" | 618 | name = "kernel32-sys" |
566 | version = "0.2.2" | 619 | version = "0.2.2" |
567 | source = "registry+https://github.com/rust-lang/crates.io-index" | 620 | source = "registry+https://github.com/rust-lang/crates.io-index" |
621 | checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" | ||
568 | dependencies = [ | 622 | dependencies = [ |
569 | "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", | 623 | "winapi 0.2.8", |
570 | "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", | 624 | "winapi-build", |
571 | ] | 625 | ] |
572 | 626 | ||
573 | [[package]] | 627 | [[package]] |
574 | name = "lalrpop-intern" | 628 | name = "lalrpop-intern" |
575 | version = "0.15.1" | 629 | version = "0.15.1" |
576 | source = "registry+https://github.com/rust-lang/crates.io-index" | 630 | source = "registry+https://github.com/rust-lang/crates.io-index" |
631 | checksum = "cc4fd87be4a815fd373e02773983940f0d75fb26fde8c098e9e45f7af03154c0" | ||
577 | 632 | ||
578 | [[package]] | 633 | [[package]] |
579 | name = "lazy_static" | 634 | name = "lazy_static" |
580 | version = "1.4.0" | 635 | version = "1.4.0" |
581 | source = "registry+https://github.com/rust-lang/crates.io-index" | 636 | source = "registry+https://github.com/rust-lang/crates.io-index" |
637 | checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" | ||
582 | 638 | ||
583 | [[package]] | 639 | [[package]] |
584 | name = "lazycell" | 640 | name = "lazycell" |
585 | version = "1.2.1" | 641 | version = "1.2.1" |
586 | source = "registry+https://github.com/rust-lang/crates.io-index" | 642 | source = "registry+https://github.com/rust-lang/crates.io-index" |
643 | checksum = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f" | ||
587 | 644 | ||
588 | [[package]] | 645 | [[package]] |
589 | name = "libc" | 646 | name = "libc" |
590 | version = "0.2.66" | 647 | version = "0.2.66" |
591 | source = "registry+https://github.com/rust-lang/crates.io-index" | 648 | source = "registry+https://github.com/rust-lang/crates.io-index" |
649 | checksum = "d515b1f41455adea1313a4a2ac8a8a477634fbae63cc6100e3aebb207ce61558" | ||
592 | 650 | ||
593 | [[package]] | 651 | [[package]] |
594 | name = "linked-hash-map" | 652 | name = "linked-hash-map" |
595 | version = "0.5.2" | 653 | version = "0.5.2" |
596 | source = "registry+https://github.com/rust-lang/crates.io-index" | 654 | source = "registry+https://github.com/rust-lang/crates.io-index" |
655 | checksum = "ae91b68aebc4ddb91978b11a1b02ddd8602a05ec19002801c5666000e05e0f83" | ||
597 | 656 | ||
598 | [[package]] | 657 | [[package]] |
599 | name = "lock_api" | 658 | name = "lock_api" |
600 | version = "0.3.3" | 659 | version = "0.3.3" |
601 | source = "registry+https://github.com/rust-lang/crates.io-index" | 660 | source = "registry+https://github.com/rust-lang/crates.io-index" |
661 | checksum = "79b2de95ecb4691949fea4716ca53cdbcfccb2c612e19644a8bad05edcf9f47b" | ||
602 | dependencies = [ | 662 | dependencies = [ |
603 | "scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | 663 | "scopeguard", |
604 | ] | 664 | ] |
605 | 665 | ||
606 | [[package]] | 666 | [[package]] |
607 | name = "log" | 667 | name = "log" |
608 | version = "0.4.8" | 668 | version = "0.4.8" |
609 | source = "registry+https://github.com/rust-lang/crates.io-index" | 669 | source = "registry+https://github.com/rust-lang/crates.io-index" |
670 | checksum = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7" | ||
610 | dependencies = [ | 671 | dependencies = [ |
611 | "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", | 672 | "cfg-if", |
612 | ] | 673 | ] |
613 | 674 | ||
614 | [[package]] | 675 | [[package]] |
615 | name = "lsp-server" | 676 | name = "lsp-server" |
616 | version = "0.3.1" | 677 | version = "0.3.1" |
617 | source = "registry+https://github.com/rust-lang/crates.io-index" | 678 | source = "registry+https://github.com/rust-lang/crates.io-index" |
679 | checksum = "5383e043329615624bbf45e1ba27bd75c176762b2592855c659bc28ac580a06b" | ||
618 | dependencies = [ | 680 | dependencies = [ |
619 | "crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 681 | "crossbeam-channel", |
620 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | 682 | "log", |
621 | "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", | 683 | "serde", |
622 | "serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)", | 684 | "serde_json", |
623 | ] | 685 | ] |
624 | 686 | ||
625 | [[package]] | 687 | [[package]] |
626 | name = "lsp-types" | 688 | name = "lsp-types" |
627 | version = "0.70.0" | 689 | version = "0.70.1" |
628 | source = "registry+https://github.com/rust-lang/crates.io-index" | 690 | source = "registry+https://github.com/rust-lang/crates.io-index" |
691 | checksum = "d267f222864db3db63cf7e18493a2a5c84edab1f4e3c7211c9390ce033365210" | ||
629 | dependencies = [ | 692 | dependencies = [ |
630 | "base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", | 693 | "base64", |
631 | "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | 694 | "bitflags", |
632 | "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", | 695 | "serde", |
633 | "serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)", | 696 | "serde_json", |
634 | "serde_repr 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", | 697 | "serde_repr", |
635 | "url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", | 698 | "url", |
636 | ] | 699 | ] |
637 | 700 | ||
638 | [[package]] | 701 | [[package]] |
639 | name = "matches" | 702 | name = "matches" |
640 | version = "0.1.8" | 703 | version = "0.1.8" |
641 | source = "registry+https://github.com/rust-lang/crates.io-index" | 704 | source = "registry+https://github.com/rust-lang/crates.io-index" |
705 | checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" | ||
642 | 706 | ||
643 | [[package]] | 707 | [[package]] |
644 | name = "memchr" | 708 | name = "memchr" |
645 | version = "2.3.0" | 709 | version = "2.3.0" |
646 | source = "registry+https://github.com/rust-lang/crates.io-index" | 710 | source = "registry+https://github.com/rust-lang/crates.io-index" |
711 | checksum = "3197e20c7edb283f87c071ddfc7a2cca8f8e0b888c242959846a6fce03c72223" | ||
647 | 712 | ||
648 | [[package]] | 713 | [[package]] |
649 | name = "memoffset" | 714 | name = "memoffset" |
650 | version = "0.5.3" | 715 | version = "0.5.3" |
651 | source = "registry+https://github.com/rust-lang/crates.io-index" | 716 | source = "registry+https://github.com/rust-lang/crates.io-index" |
717 | checksum = "75189eb85871ea5c2e2c15abbdd541185f63b408415e5051f5cac122d8c774b9" | ||
652 | dependencies = [ | 718 | dependencies = [ |
653 | "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", | 719 | "rustc_version", |
654 | ] | 720 | ] |
655 | 721 | ||
656 | [[package]] | 722 | [[package]] |
657 | name = "mio" | 723 | name = "mio" |
658 | version = "0.6.21" | 724 | version = "0.6.21" |
659 | source = "registry+https://github.com/rust-lang/crates.io-index" | 725 | source = "registry+https://github.com/rust-lang/crates.io-index" |
726 | checksum = "302dec22bcf6bae6dfb69c647187f4b4d0fb6f535521f7bc022430ce8e12008f" | ||
660 | dependencies = [ | 727 | dependencies = [ |
661 | "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", | 728 | "cfg-if", |
662 | "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", | 729 | "fuchsia-zircon", |
663 | "fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", | 730 | "fuchsia-zircon-sys", |
664 | "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", | 731 | "iovec", |
665 | "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", | 732 | "kernel32-sys", |
666 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 733 | "libc", |
667 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | 734 | "log", |
668 | "miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | 735 | "miow", |
669 | "net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)", | 736 | "net2", |
670 | "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", | 737 | "slab", |
671 | "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", | 738 | "winapi 0.2.8", |
672 | ] | 739 | ] |
673 | 740 | ||
674 | [[package]] | 741 | [[package]] |
675 | name = "mio-extras" | 742 | name = "mio-extras" |
676 | version = "2.0.6" | 743 | version = "2.0.6" |
677 | source = "registry+https://github.com/rust-lang/crates.io-index" | 744 | source = "registry+https://github.com/rust-lang/crates.io-index" |
745 | checksum = "52403fe290012ce777c4626790c8951324a2b9e3316b3143779c72b029742f19" | ||
678 | dependencies = [ | 746 | dependencies = [ |
679 | "lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | 747 | "lazycell", |
680 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | 748 | "log", |
681 | "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)", | 749 | "mio", |
682 | "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", | 750 | "slab", |
683 | ] | 751 | ] |
684 | 752 | ||
685 | [[package]] | 753 | [[package]] |
686 | name = "miow" | 754 | name = "miow" |
687 | version = "0.2.1" | 755 | version = "0.2.1" |
688 | source = "registry+https://github.com/rust-lang/crates.io-index" | 756 | source = "registry+https://github.com/rust-lang/crates.io-index" |
757 | checksum = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919" | ||
689 | dependencies = [ | 758 | dependencies = [ |
690 | "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", | 759 | "kernel32-sys", |
691 | "net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)", | 760 | "net2", |
692 | "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", | 761 | "winapi 0.2.8", |
693 | "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | 762 | "ws2_32-sys", |
694 | ] | 763 | ] |
695 | 764 | ||
696 | [[package]] | 765 | [[package]] |
697 | name = "net2" | 766 | name = "net2" |
698 | version = "0.2.33" | 767 | version = "0.2.33" |
699 | source = "registry+https://github.com/rust-lang/crates.io-index" | 768 | source = "registry+https://github.com/rust-lang/crates.io-index" |
769 | checksum = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88" | ||
700 | dependencies = [ | 770 | dependencies = [ |
701 | "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", | 771 | "cfg-if", |
702 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 772 | "libc", |
703 | "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", | 773 | "winapi 0.3.8", |
704 | ] | 774 | ] |
705 | 775 | ||
706 | [[package]] | 776 | [[package]] |
707 | name = "notify" | 777 | name = "notify" |
708 | version = "4.0.15" | 778 | version = "4.0.15" |
709 | source = "registry+https://github.com/rust-lang/crates.io-index" | 779 | source = "registry+https://github.com/rust-lang/crates.io-index" |
780 | checksum = "80ae4a7688d1fab81c5bf19c64fc8db920be8d519ce6336ed4e7efe024724dbd" | ||
710 | dependencies = [ | 781 | dependencies = [ |
711 | "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | 782 | "bitflags", |
712 | "filetime 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", | 783 | "filetime", |
713 | "fsevent 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 784 | "fsevent", |
714 | "fsevent-sys 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 785 | "fsevent-sys", |
715 | "inotify 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", | 786 | "inotify", |
716 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 787 | "libc", |
717 | "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)", | 788 | "mio", |
718 | "mio-extras 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)", | 789 | "mio-extras", |
719 | "walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 790 | "walkdir", |
720 | "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", | 791 | "winapi 0.3.8", |
721 | ] | 792 | ] |
722 | 793 | ||
723 | [[package]] | 794 | [[package]] |
724 | name = "num-traits" | 795 | name = "num-traits" |
725 | version = "0.2.11" | 796 | version = "0.2.11" |
726 | source = "registry+https://github.com/rust-lang/crates.io-index" | 797 | source = "registry+https://github.com/rust-lang/crates.io-index" |
798 | checksum = "c62be47e61d1842b9170f0fdeec8eba98e60e90e5446449a0545e5152acd7096" | ||
727 | dependencies = [ | 799 | dependencies = [ |
728 | "autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | 800 | "autocfg 1.0.0", |
729 | ] | 801 | ] |
730 | 802 | ||
731 | [[package]] | 803 | [[package]] |
732 | name = "num_cpus" | 804 | name = "num_cpus" |
733 | version = "1.12.0" | 805 | version = "1.12.0" |
734 | source = "registry+https://github.com/rust-lang/crates.io-index" | 806 | source = "registry+https://github.com/rust-lang/crates.io-index" |
807 | checksum = "46203554f085ff89c235cd12f7075f3233af9b11ed7c9e16dfe2560d03313ce6" | ||
735 | dependencies = [ | 808 | dependencies = [ |
736 | "hermit-abi 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", | 809 | "hermit-abi", |
737 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 810 | "libc", |
738 | ] | 811 | ] |
739 | 812 | ||
740 | [[package]] | 813 | [[package]] |
741 | name = "once_cell" | 814 | name = "once_cell" |
742 | version = "1.3.1" | 815 | version = "1.3.1" |
743 | source = "registry+https://github.com/rust-lang/crates.io-index" | 816 | source = "registry+https://github.com/rust-lang/crates.io-index" |
817 | checksum = "b1c601810575c99596d4afc46f78a678c80105117c379eb3650cf99b8a21ce5b" | ||
744 | 818 | ||
745 | [[package]] | 819 | [[package]] |
746 | name = "ordermap" | 820 | name = "ordermap" |
747 | version = "0.3.5" | 821 | version = "0.3.5" |
748 | source = "registry+https://github.com/rust-lang/crates.io-index" | 822 | source = "registry+https://github.com/rust-lang/crates.io-index" |
823 | checksum = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063" | ||
749 | 824 | ||
750 | [[package]] | 825 | [[package]] |
751 | name = "parking_lot" | 826 | name = "parking_lot" |
752 | version = "0.10.0" | 827 | version = "0.10.0" |
753 | source = "registry+https://github.com/rust-lang/crates.io-index" | 828 | source = "registry+https://github.com/rust-lang/crates.io-index" |
829 | checksum = "92e98c49ab0b7ce5b222f2cc9193fc4efe11c6d0bd4f648e374684a6857b1cfc" | ||
754 | dependencies = [ | 830 | dependencies = [ |
755 | "lock_api 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", | 831 | "lock_api", |
756 | "parking_lot_core 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", | 832 | "parking_lot_core", |
757 | ] | 833 | ] |
758 | 834 | ||
759 | [[package]] | 835 | [[package]] |
760 | name = "parking_lot_core" | 836 | name = "parking_lot_core" |
761 | version = "0.7.0" | 837 | version = "0.7.0" |
762 | source = "registry+https://github.com/rust-lang/crates.io-index" | 838 | source = "registry+https://github.com/rust-lang/crates.io-index" |
839 | checksum = "7582838484df45743c8434fbff785e8edf260c28748353d44bc0da32e0ceabf1" | ||
763 | dependencies = [ | 840 | dependencies = [ |
764 | "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", | 841 | "cfg-if", |
765 | "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", | 842 | "cloudabi", |
766 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 843 | "libc", |
767 | "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", | 844 | "redox_syscall", |
768 | "smallvec 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | 845 | "smallvec", |
769 | "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", | 846 | "winapi 0.3.8", |
770 | ] | 847 | ] |
771 | 848 | ||
772 | [[package]] | 849 | [[package]] |
773 | name = "paste" | 850 | name = "paste" |
774 | version = "0.1.6" | 851 | version = "0.1.6" |
775 | source = "registry+https://github.com/rust-lang/crates.io-index" | 852 | source = "registry+https://github.com/rust-lang/crates.io-index" |
853 | checksum = "423a519e1c6e828f1e73b720f9d9ed2fa643dce8a7737fb43235ce0b41eeaa49" | ||
776 | dependencies = [ | 854 | dependencies = [ |
777 | "paste-impl 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", | 855 | "paste-impl", |
778 | "proc-macro-hack 0.5.11 (registry+https://github.com/rust-lang/crates.io-index)", | 856 | "proc-macro-hack", |
779 | ] | 857 | ] |
780 | 858 | ||
781 | [[package]] | 859 | [[package]] |
782 | name = "paste-impl" | 860 | name = "paste-impl" |
783 | version = "0.1.6" | 861 | version = "0.1.6" |
784 | source = "registry+https://github.com/rust-lang/crates.io-index" | 862 | source = "registry+https://github.com/rust-lang/crates.io-index" |
863 | checksum = "4214c9e912ef61bf42b81ba9a47e8aad1b2ffaf739ab162bf96d1e011f54e6c5" | ||
785 | dependencies = [ | 864 | dependencies = [ |
786 | "proc-macro-hack 0.5.11 (registry+https://github.com/rust-lang/crates.io-index)", | 865 | "proc-macro-hack", |
787 | "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", | 866 | "proc-macro2", |
788 | "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", | 867 | "quote", |
789 | "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", | 868 | "syn", |
790 | ] | 869 | ] |
791 | 870 | ||
792 | [[package]] | 871 | [[package]] |
793 | name = "percent-encoding" | 872 | name = "percent-encoding" |
794 | version = "2.1.0" | 873 | version = "2.1.0" |
795 | source = "registry+https://github.com/rust-lang/crates.io-index" | 874 | source = "registry+https://github.com/rust-lang/crates.io-index" |
875 | checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" | ||
796 | 876 | ||
797 | [[package]] | 877 | [[package]] |
798 | name = "petgraph" | 878 | name = "petgraph" |
799 | version = "0.4.13" | 879 | version = "0.4.13" |
800 | source = "registry+https://github.com/rust-lang/crates.io-index" | 880 | source = "registry+https://github.com/rust-lang/crates.io-index" |
881 | checksum = "9c3659d1ee90221741f65dd128d9998311b0e40c5d3c23a62445938214abce4f" | ||
801 | dependencies = [ | 882 | dependencies = [ |
802 | "fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", | 883 | "fixedbitset", |
803 | "ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", | 884 | "ordermap", |
804 | ] | 885 | ] |
805 | 886 | ||
806 | [[package]] | 887 | [[package]] |
807 | name = "pico-args" | 888 | name = "pico-args" |
808 | version = "0.3.1" | 889 | version = "0.3.1" |
809 | source = "registry+https://github.com/rust-lang/crates.io-index" | 890 | source = "registry+https://github.com/rust-lang/crates.io-index" |
891 | checksum = "3ad1f1b834a05d42dae330066e9699a173b28185b3bdc3dbf14ca239585de8cc" | ||
810 | 892 | ||
811 | [[package]] | 893 | [[package]] |
812 | name = "ppv-lite86" | 894 | name = "ppv-lite86" |
813 | version = "0.2.6" | 895 | version = "0.2.6" |
814 | source = "registry+https://github.com/rust-lang/crates.io-index" | 896 | source = "registry+https://github.com/rust-lang/crates.io-index" |
897 | checksum = "74490b50b9fbe561ac330df47c08f3f33073d2d00c150f719147d7c54522fa1b" | ||
815 | 898 | ||
816 | [[package]] | 899 | [[package]] |
817 | name = "proc-macro-hack" | 900 | name = "proc-macro-hack" |
818 | version = "0.5.11" | 901 | version = "0.5.11" |
819 | source = "registry+https://github.com/rust-lang/crates.io-index" | 902 | source = "registry+https://github.com/rust-lang/crates.io-index" |
903 | checksum = "ecd45702f76d6d3c75a80564378ae228a85f0b59d2f3ed43c91b4a69eb2ebfc5" | ||
820 | dependencies = [ | 904 | dependencies = [ |
821 | "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", | 905 | "proc-macro2", |
822 | "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", | 906 | "quote", |
823 | "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", | 907 | "syn", |
824 | ] | 908 | ] |
825 | 909 | ||
826 | [[package]] | 910 | [[package]] |
827 | name = "proc-macro2" | 911 | name = "proc-macro2" |
828 | version = "1.0.8" | 912 | version = "1.0.8" |
829 | source = "registry+https://github.com/rust-lang/crates.io-index" | 913 | source = "registry+https://github.com/rust-lang/crates.io-index" |
914 | checksum = "3acb317c6ff86a4e579dfa00fc5e6cca91ecbb4e7eb2df0468805b674eb88548" | ||
830 | dependencies = [ | 915 | dependencies = [ |
831 | "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", | 916 | "unicode-xid", |
832 | ] | 917 | ] |
833 | 918 | ||
834 | [[package]] | 919 | [[package]] |
835 | name = "proptest" | 920 | name = "proptest" |
836 | version = "0.9.5" | 921 | version = "0.9.5" |
837 | source = "registry+https://github.com/rust-lang/crates.io-index" | 922 | source = "registry+https://github.com/rust-lang/crates.io-index" |
923 | checksum = "bf6147d103a7c9d7598f4105cf049b15c99e2ecd93179bf024f0fd349be5ada4" | ||
838 | dependencies = [ | 924 | dependencies = [ |
839 | "bit-set 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", | 925 | "bit-set", |
840 | "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | 926 | "bitflags", |
841 | "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", | 927 | "byteorder", |
842 | "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 928 | "lazy_static", |
843 | "num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", | 929 | "num-traits", |
844 | "quick-error 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)", | 930 | "quick-error", |
845 | "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", | 931 | "rand 0.6.5", |
846 | "rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", | 932 | "rand_chacha 0.1.1", |
847 | "rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", | 933 | "rand_xorshift", |
848 | "regex-syntax 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", | 934 | "regex-syntax", |
849 | ] | 935 | ] |
850 | 936 | ||
851 | [[package]] | 937 | [[package]] |
852 | name = "quick-error" | 938 | name = "quick-error" |
853 | version = "1.2.3" | 939 | version = "1.2.3" |
854 | source = "registry+https://github.com/rust-lang/crates.io-index" | 940 | source = "registry+https://github.com/rust-lang/crates.io-index" |
941 | checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" | ||
855 | 942 | ||
856 | [[package]] | 943 | [[package]] |
857 | name = "quote" | 944 | name = "quote" |
858 | version = "1.0.2" | 945 | version = "1.0.2" |
859 | source = "registry+https://github.com/rust-lang/crates.io-index" | 946 | source = "registry+https://github.com/rust-lang/crates.io-index" |
947 | checksum = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe" | ||
860 | dependencies = [ | 948 | dependencies = [ |
861 | "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", | 949 | "proc-macro2", |
862 | ] | 950 | ] |
863 | 951 | ||
864 | [[package]] | 952 | [[package]] |
@@ -869,1083 +957,999 @@ version = "0.1.0" | |||
869 | name = "ra_assists" | 957 | name = "ra_assists" |
870 | version = "0.1.0" | 958 | version = "0.1.0" |
871 | dependencies = [ | 959 | dependencies = [ |
872 | "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", | 960 | "either", |
873 | "format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | 961 | "format-buf", |
874 | "join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", | 962 | "join_to_string", |
875 | "ra_db 0.1.0", | 963 | "ra_db", |
876 | "ra_fmt 0.1.0", | 964 | "ra_fmt", |
877 | "ra_hir 0.1.0", | 965 | "ra_hir", |
878 | "ra_syntax 0.1.0", | 966 | "ra_ide_db", |
879 | "ra_text_edit 0.1.0", | 967 | "ra_prof", |
880 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 968 | "ra_syntax", |
881 | "test_utils 0.1.0", | 969 | "ra_text_edit", |
970 | "rustc-hash", | ||
971 | "test_utils", | ||
882 | ] | 972 | ] |
883 | 973 | ||
884 | [[package]] | 974 | [[package]] |
885 | name = "ra_batch" | 975 | name = "ra_batch" |
886 | version = "0.1.0" | 976 | version = "0.1.0" |
887 | dependencies = [ | 977 | dependencies = [ |
888 | "crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 978 | "crossbeam-channel", |
889 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | 979 | "log", |
890 | "ra_db 0.1.0", | 980 | "ra_db", |
891 | "ra_hir 0.1.0", | 981 | "ra_hir", |
892 | "ra_ide 0.1.0", | 982 | "ra_ide", |
893 | "ra_project_model 0.1.0", | 983 | "ra_project_model", |
894 | "ra_vfs 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", | 984 | "ra_vfs", |
895 | "ra_vfs_glob 0.1.0", | 985 | "ra_vfs_glob", |
896 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 986 | "rustc-hash", |
897 | ] | 987 | ] |
898 | 988 | ||
899 | [[package]] | 989 | [[package]] |
900 | name = "ra_cargo_watch" | 990 | name = "ra_cargo_watch" |
901 | version = "0.1.0" | 991 | version = "0.1.0" |
902 | dependencies = [ | 992 | dependencies = [ |
903 | "cargo_metadata 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", | 993 | "cargo_metadata", |
904 | "crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 994 | "crossbeam-channel", |
905 | "insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", | 995 | "insta", |
906 | "jod-thread 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | 996 | "jod-thread", |
907 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | 997 | "log", |
908 | "lsp-types 0.70.0 (registry+https://github.com/rust-lang/crates.io-index)", | 998 | "lsp-types", |
909 | "parking_lot 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", | 999 | "parking_lot", |
910 | "serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)", | 1000 | "serde_json", |
911 | ] | 1001 | ] |
912 | 1002 | ||
913 | [[package]] | 1003 | [[package]] |
914 | name = "ra_cfg" | 1004 | name = "ra_cfg" |
915 | version = "0.1.0" | 1005 | version = "0.1.0" |
916 | dependencies = [ | 1006 | dependencies = [ |
917 | "ra_mbe 0.1.0", | 1007 | "ra_mbe", |
918 | "ra_syntax 0.1.0", | 1008 | "ra_syntax", |
919 | "ra_tt 0.1.0", | 1009 | "ra_tt", |
920 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1010 | "rustc-hash", |
921 | ] | 1011 | ] |
922 | 1012 | ||
923 | [[package]] | 1013 | [[package]] |
924 | name = "ra_cli" | 1014 | name = "ra_cli" |
925 | version = "0.1.0" | 1015 | version = "0.1.0" |
926 | dependencies = [ | 1016 | dependencies = [ |
927 | "env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1017 | "env_logger", |
928 | "pico-args 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1018 | "pico-args", |
929 | "ra_batch 0.1.0", | 1019 | "ra_batch", |
930 | "ra_db 0.1.0", | 1020 | "ra_db", |
931 | "ra_hir 0.1.0", | 1021 | "ra_hir", |
932 | "ra_hir_def 0.1.0", | 1022 | "ra_hir_def", |
933 | "ra_hir_ty 0.1.0", | 1023 | "ra_hir_ty", |
934 | "ra_ide 0.1.0", | 1024 | "ra_ide", |
935 | "ra_prof 0.1.0", | 1025 | "ra_prof", |
936 | "ra_syntax 0.1.0", | 1026 | "ra_syntax", |
937 | ] | 1027 | ] |
938 | 1028 | ||
939 | [[package]] | 1029 | [[package]] |
940 | name = "ra_db" | 1030 | name = "ra_db" |
941 | version = "0.1.0" | 1031 | version = "0.1.0" |
942 | dependencies = [ | 1032 | dependencies = [ |
943 | "ra_cfg 0.1.0", | 1033 | "ra_cfg", |
944 | "ra_prof 0.1.0", | 1034 | "ra_prof", |
945 | "ra_syntax 0.1.0", | 1035 | "ra_syntax", |
946 | "relative-path 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1036 | "relative-path", |
947 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1037 | "rustc-hash", |
948 | "salsa 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1038 | "salsa", |
949 | "test_utils 0.1.0", | 1039 | "test_utils", |
950 | ] | 1040 | ] |
951 | 1041 | ||
952 | [[package]] | 1042 | [[package]] |
953 | name = "ra_fmt" | 1043 | name = "ra_fmt" |
954 | version = "0.1.0" | 1044 | version = "0.1.0" |
955 | dependencies = [ | 1045 | dependencies = [ |
956 | "itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1046 | "itertools", |
957 | "ra_syntax 0.1.0", | 1047 | "ra_syntax", |
958 | ] | 1048 | ] |
959 | 1049 | ||
960 | [[package]] | 1050 | [[package]] |
961 | name = "ra_hir" | 1051 | name = "ra_hir" |
962 | version = "0.1.0" | 1052 | version = "0.1.0" |
963 | dependencies = [ | 1053 | dependencies = [ |
964 | "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1054 | "either", |
965 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1055 | "log", |
966 | "ra_db 0.1.0", | 1056 | "ra_db", |
967 | "ra_hir_def 0.1.0", | 1057 | "ra_hir_def", |
968 | "ra_hir_expand 0.1.0", | 1058 | "ra_hir_expand", |
969 | "ra_hir_ty 0.1.0", | 1059 | "ra_hir_ty", |
970 | "ra_prof 0.1.0", | 1060 | "ra_prof", |
971 | "ra_syntax 0.1.0", | 1061 | "ra_syntax", |
972 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1062 | "rustc-hash", |
973 | ] | 1063 | ] |
974 | 1064 | ||
975 | [[package]] | 1065 | [[package]] |
976 | name = "ra_hir_def" | 1066 | name = "ra_hir_def" |
977 | version = "0.1.0" | 1067 | version = "0.1.0" |
978 | dependencies = [ | 1068 | dependencies = [ |
979 | "anymap 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1069 | "anymap", |
980 | "drop_bomb 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", | 1070 | "drop_bomb", |
981 | "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1071 | "either", |
982 | "insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1072 | "insta", |
983 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1073 | "log", |
984 | "once_cell 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1074 | "once_cell", |
985 | "ra_arena 0.1.0", | 1075 | "ra_arena", |
986 | "ra_cfg 0.1.0", | 1076 | "ra_cfg", |
987 | "ra_db 0.1.0", | 1077 | "ra_db", |
988 | "ra_hir_expand 0.1.0", | 1078 | "ra_hir_expand", |
989 | "ra_mbe 0.1.0", | 1079 | "ra_mbe", |
990 | "ra_prof 0.1.0", | 1080 | "ra_prof", |
991 | "ra_syntax 0.1.0", | 1081 | "ra_syntax", |
992 | "ra_tt 0.1.0", | 1082 | "ra_tt", |
993 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1083 | "rustc-hash", |
994 | "test_utils 0.1.0", | 1084 | "test_utils", |
995 | ] | 1085 | ] |
996 | 1086 | ||
997 | [[package]] | 1087 | [[package]] |
998 | name = "ra_hir_expand" | 1088 | name = "ra_hir_expand" |
999 | version = "0.1.0" | 1089 | version = "0.1.0" |
1000 | dependencies = [ | 1090 | dependencies = [ |
1001 | "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1091 | "either", |
1002 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1092 | "log", |
1003 | "ra_arena 0.1.0", | 1093 | "ra_arena", |
1004 | "ra_db 0.1.0", | 1094 | "ra_db", |
1005 | "ra_mbe 0.1.0", | 1095 | "ra_mbe", |
1006 | "ra_parser 0.1.0", | 1096 | "ra_parser", |
1007 | "ra_prof 0.1.0", | 1097 | "ra_prof", |
1008 | "ra_syntax 0.1.0", | 1098 | "ra_syntax", |
1009 | "ra_tt 0.1.0", | 1099 | "ra_tt", |
1010 | ] | 1100 | ] |
1011 | 1101 | ||
1012 | [[package]] | 1102 | [[package]] |
1013 | name = "ra_hir_ty" | 1103 | name = "ra_hir_ty" |
1014 | version = "0.1.0" | 1104 | version = "0.1.0" |
1015 | dependencies = [ | 1105 | dependencies = [ |
1016 | "arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1106 | "arrayvec", |
1017 | "chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)", | 1107 | "chalk-ir", |
1018 | "chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)", | 1108 | "chalk-rust-ir", |
1019 | "chalk-solve 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)", | 1109 | "chalk-solve", |
1020 | "ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1110 | "ena", |
1021 | "insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1111 | "insta", |
1022 | "lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1112 | "lalrpop-intern", |
1023 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1113 | "log", |
1024 | "ra_arena 0.1.0", | 1114 | "ra_arena", |
1025 | "ra_db 0.1.0", | 1115 | "ra_db", |
1026 | "ra_hir_def 0.1.0", | 1116 | "ra_hir_def", |
1027 | "ra_hir_expand 0.1.0", | 1117 | "ra_hir_expand", |
1028 | "ra_prof 0.1.0", | 1118 | "ra_prof", |
1029 | "ra_syntax 0.1.0", | 1119 | "ra_syntax", |
1030 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1120 | "rustc-hash", |
1031 | "test_utils 0.1.0", | 1121 | "test_utils", |
1032 | ] | 1122 | ] |
1033 | 1123 | ||
1034 | [[package]] | 1124 | [[package]] |
1035 | name = "ra_ide" | 1125 | name = "ra_ide" |
1036 | version = "0.1.0" | 1126 | version = "0.1.0" |
1037 | dependencies = [ | 1127 | dependencies = [ |
1038 | "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1128 | "either", |
1039 | "format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1129 | "format-buf", |
1040 | "fst 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", | 1130 | "fst", |
1041 | "indexmap 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1131 | "indexmap", |
1042 | "insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1132 | "insta", |
1043 | "itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1133 | "itertools", |
1044 | "join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1134 | "join_to_string", |
1045 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1135 | "log", |
1046 | "once_cell 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1136 | "once_cell", |
1047 | "proptest 0.9.5 (registry+https://github.com/rust-lang/crates.io-index)", | 1137 | "proptest", |
1048 | "ra_assists 0.1.0", | 1138 | "ra_assists", |
1049 | "ra_cfg 0.1.0", | 1139 | "ra_cfg", |
1050 | "ra_db 0.1.0", | 1140 | "ra_db", |
1051 | "ra_fmt 0.1.0", | 1141 | "ra_fmt", |
1052 | "ra_hir 0.1.0", | 1142 | "ra_hir", |
1053 | "ra_prof 0.1.0", | 1143 | "ra_ide_db", |
1054 | "ra_syntax 0.1.0", | 1144 | "ra_prof", |
1055 | "ra_text_edit 0.1.0", | 1145 | "ra_syntax", |
1056 | "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1146 | "ra_text_edit", |
1057 | "rayon 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1147 | "rand 0.7.3", |
1058 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1148 | "rayon", |
1059 | "superslice 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1149 | "rustc-hash", |
1060 | "test_utils 0.1.0", | 1150 | "superslice", |
1061 | "unicase 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1151 | "test_utils", |
1152 | "unicase", | ||
1153 | ] | ||
1154 | |||
1155 | [[package]] | ||
1156 | name = "ra_ide_db" | ||
1157 | version = "0.1.0" | ||
1158 | dependencies = [ | ||
1159 | "either", | ||
1160 | "format-buf", | ||
1161 | "fst", | ||
1162 | "indexmap", | ||
1163 | "insta", | ||
1164 | "itertools", | ||
1165 | "join_to_string", | ||
1166 | "log", | ||
1167 | "once_cell", | ||
1168 | "proptest", | ||
1169 | "ra_cfg", | ||
1170 | "ra_db", | ||
1171 | "ra_fmt", | ||
1172 | "ra_hir", | ||
1173 | "ra_prof", | ||
1174 | "ra_syntax", | ||
1175 | "ra_text_edit", | ||
1176 | "rand 0.7.3", | ||
1177 | "rayon", | ||
1178 | "rustc-hash", | ||
1179 | "superslice", | ||
1180 | "test_utils", | ||
1181 | "unicase", | ||
1062 | ] | 1182 | ] |
1063 | 1183 | ||
1064 | [[package]] | 1184 | [[package]] |
1065 | name = "ra_lsp_server" | 1185 | name = "ra_lsp_server" |
1066 | version = "0.1.0" | 1186 | version = "0.1.0" |
1067 | dependencies = [ | 1187 | dependencies = [ |
1068 | "crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1188 | "crossbeam-channel", |
1069 | "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1189 | "either", |
1070 | "env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1190 | "env_logger", |
1071 | "jod-thread 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1191 | "jod-thread", |
1072 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1192 | "log", |
1073 | "lsp-server 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1193 | "lsp-server", |
1074 | "lsp-types 0.70.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1194 | "lsp-types", |
1075 | "parking_lot 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1195 | "parking_lot", |
1076 | "ra_cargo_watch 0.1.0", | 1196 | "ra_cargo_watch", |
1077 | "ra_ide 0.1.0", | 1197 | "ra_ide", |
1078 | "ra_prof 0.1.0", | 1198 | "ra_prof", |
1079 | "ra_project_model 0.1.0", | 1199 | "ra_project_model", |
1080 | "ra_syntax 0.1.0", | 1200 | "ra_syntax", |
1081 | "ra_text_edit 0.1.0", | 1201 | "ra_text_edit", |
1082 | "ra_vfs 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1202 | "ra_vfs", |
1083 | "ra_vfs_glob 0.1.0", | 1203 | "ra_vfs_glob", |
1084 | "relative-path 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1204 | "relative-path", |
1085 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1205 | "rustc-hash", |
1086 | "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", | 1206 | "serde", |
1087 | "serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)", | 1207 | "serde_json", |
1088 | "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1208 | "tempfile", |
1089 | "test_utils 0.1.0", | 1209 | "test_utils", |
1090 | "threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1210 | "threadpool", |
1211 | "winapi 0.3.8", | ||
1091 | ] | 1212 | ] |
1092 | 1213 | ||
1093 | [[package]] | 1214 | [[package]] |
1094 | name = "ra_mbe" | 1215 | name = "ra_mbe" |
1095 | version = "0.1.0" | 1216 | version = "0.1.0" |
1096 | dependencies = [ | 1217 | dependencies = [ |
1097 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1218 | "log", |
1098 | "ra_parser 0.1.0", | 1219 | "ra_parser", |
1099 | "ra_syntax 0.1.0", | 1220 | "ra_syntax", |
1100 | "ra_tt 0.1.0", | 1221 | "ra_tt", |
1101 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1222 | "rustc-hash", |
1102 | "smallvec 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1223 | "smallvec", |
1103 | "test_utils 0.1.0", | 1224 | "test_utils", |
1104 | ] | 1225 | ] |
1105 | 1226 | ||
1106 | [[package]] | 1227 | [[package]] |
1107 | name = "ra_parser" | 1228 | name = "ra_parser" |
1108 | version = "0.1.0" | 1229 | version = "0.1.0" |
1109 | dependencies = [ | 1230 | dependencies = [ |
1110 | "drop_bomb 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", | 1231 | "drop_bomb", |
1111 | ] | 1232 | ] |
1112 | 1233 | ||
1113 | [[package]] | 1234 | [[package]] |
1114 | name = "ra_prof" | 1235 | name = "ra_prof" |
1115 | version = "0.1.0" | 1236 | version = "0.1.0" |
1116 | dependencies = [ | 1237 | dependencies = [ |
1117 | "backtrace 0.3.42 (registry+https://github.com/rust-lang/crates.io-index)", | 1238 | "backtrace", |
1118 | "itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1239 | "itertools", |
1119 | "jemalloc-ctl 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1240 | "jemalloc-ctl", |
1120 | "jemallocator 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1241 | "jemallocator", |
1121 | "once_cell 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1242 | "once_cell", |
1122 | ] | 1243 | ] |
1123 | 1244 | ||
1124 | [[package]] | 1245 | [[package]] |
1125 | name = "ra_project_model" | 1246 | name = "ra_project_model" |
1126 | version = "0.1.0" | 1247 | version = "0.1.0" |
1127 | dependencies = [ | 1248 | dependencies = [ |
1128 | "cargo_metadata 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1249 | "cargo_metadata", |
1129 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1250 | "log", |
1130 | "ra_arena 0.1.0", | 1251 | "ra_arena", |
1131 | "ra_cfg 0.1.0", | 1252 | "ra_cfg", |
1132 | "ra_db 0.1.0", | 1253 | "ra_db", |
1133 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1254 | "rustc-hash", |
1134 | "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", | 1255 | "serde", |
1135 | "serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)", | 1256 | "serde_json", |
1136 | ] | 1257 | ] |
1137 | 1258 | ||
1138 | [[package]] | 1259 | [[package]] |
1139 | name = "ra_syntax" | 1260 | name = "ra_syntax" |
1140 | version = "0.1.0" | 1261 | version = "0.1.0" |
1141 | dependencies = [ | 1262 | dependencies = [ |
1142 | "arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1263 | "arrayvec", |
1143 | "itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1264 | "itertools", |
1144 | "once_cell 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1265 | "once_cell", |
1145 | "ra_parser 0.1.0", | 1266 | "ra_parser", |
1146 | "ra_text_edit 0.1.0", | 1267 | "ra_text_edit", |
1147 | "rowan 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1268 | "rowan", |
1148 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1269 | "rustc-hash", |
1149 | "rustc_lexer 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1270 | "rustc_lexer", |
1150 | "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", | 1271 | "serde", |
1151 | "smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", | 1272 | "smol_str", |
1152 | "test_utils 0.1.0", | 1273 | "test_utils", |
1153 | "walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1274 | "walkdir", |
1154 | ] | 1275 | ] |
1155 | 1276 | ||
1156 | [[package]] | 1277 | [[package]] |
1157 | name = "ra_text_edit" | 1278 | name = "ra_text_edit" |
1158 | version = "0.1.0" | 1279 | version = "0.1.0" |
1159 | dependencies = [ | 1280 | dependencies = [ |
1160 | "proptest 0.9.5 (registry+https://github.com/rust-lang/crates.io-index)", | 1281 | "proptest", |
1161 | "test_utils 0.1.0", | 1282 | "test_utils", |
1162 | "text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", | 1283 | "text_unit", |
1163 | ] | 1284 | ] |
1164 | 1285 | ||
1165 | [[package]] | 1286 | [[package]] |
1166 | name = "ra_tt" | 1287 | name = "ra_tt" |
1167 | version = "0.1.0" | 1288 | version = "0.1.0" |
1168 | dependencies = [ | 1289 | dependencies = [ |
1169 | "smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", | 1290 | "smol_str", |
1170 | ] | 1291 | ] |
1171 | 1292 | ||
1172 | [[package]] | 1293 | [[package]] |
1173 | name = "ra_vfs" | 1294 | name = "ra_vfs" |
1174 | version = "0.5.2" | 1295 | version = "0.5.2" |
1175 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1296 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1297 | checksum = "bc898f237e4b4498959ae0100c688793a23e77624d44ef710ba70094217f98e0" | ||
1176 | dependencies = [ | 1298 | dependencies = [ |
1177 | "crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1299 | "crossbeam-channel", |
1178 | "jod-thread 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1300 | "jod-thread", |
1179 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1301 | "log", |
1180 | "notify 4.0.15 (registry+https://github.com/rust-lang/crates.io-index)", | 1302 | "notify", |
1181 | "parking_lot 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1303 | "parking_lot", |
1182 | "relative-path 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1304 | "relative-path", |
1183 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1305 | "rustc-hash", |
1184 | "walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1306 | "walkdir", |
1185 | ] | 1307 | ] |
1186 | 1308 | ||
1187 | [[package]] | 1309 | [[package]] |
1188 | name = "ra_vfs_glob" | 1310 | name = "ra_vfs_glob" |
1189 | version = "0.1.0" | 1311 | version = "0.1.0" |
1190 | dependencies = [ | 1312 | dependencies = [ |
1191 | "globset 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", | 1313 | "globset", |
1192 | "ra_vfs 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1314 | "ra_vfs", |
1193 | ] | 1315 | ] |
1194 | 1316 | ||
1195 | [[package]] | 1317 | [[package]] |
1196 | name = "rand" | 1318 | name = "rand" |
1197 | version = "0.6.5" | 1319 | version = "0.6.5" |
1198 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1320 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1321 | checksum = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" | ||
1199 | dependencies = [ | 1322 | dependencies = [ |
1200 | "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", | 1323 | "autocfg 0.1.7", |
1201 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 1324 | "libc", |
1202 | "rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1325 | "rand_chacha 0.1.1", |
1203 | "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1326 | "rand_core 0.4.2", |
1204 | "rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1327 | "rand_hc 0.1.0", |
1205 | "rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1328 | "rand_isaac", |
1206 | "rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", | 1329 | "rand_jitter", |
1207 | "rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1330 | "rand_os", |
1208 | "rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1331 | "rand_pcg 0.1.2", |
1209 | "rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1332 | "rand_xorshift", |
1210 | "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1333 | "winapi 0.3.8", |
1211 | ] | 1334 | ] |
1212 | 1335 | ||
1213 | [[package]] | 1336 | [[package]] |
1214 | name = "rand" | 1337 | name = "rand" |
1215 | version = "0.7.3" | 1338 | version = "0.7.3" |
1216 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1339 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1340 | checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" | ||
1217 | dependencies = [ | 1341 | dependencies = [ |
1218 | "getrandom 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", | 1342 | "getrandom", |
1219 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 1343 | "libc", |
1220 | "rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1344 | "rand_chacha 0.2.1", |
1221 | "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1345 | "rand_core 0.5.1", |
1222 | "rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1346 | "rand_hc 0.2.0", |
1223 | "rand_pcg 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1347 | "rand_pcg 0.2.1", |
1224 | ] | 1348 | ] |
1225 | 1349 | ||
1226 | [[package]] | 1350 | [[package]] |
1227 | name = "rand_chacha" | 1351 | name = "rand_chacha" |
1228 | version = "0.1.1" | 1352 | version = "0.1.1" |
1229 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1353 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1354 | checksum = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" | ||
1230 | dependencies = [ | 1355 | dependencies = [ |
1231 | "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", | 1356 | "autocfg 0.1.7", |
1232 | "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1357 | "rand_core 0.3.1", |
1233 | ] | 1358 | ] |
1234 | 1359 | ||
1235 | [[package]] | 1360 | [[package]] |
1236 | name = "rand_chacha" | 1361 | name = "rand_chacha" |
1237 | version = "0.2.1" | 1362 | version = "0.2.1" |
1238 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1363 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1364 | checksum = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853" | ||
1239 | dependencies = [ | 1365 | dependencies = [ |
1240 | "c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1366 | "c2-chacha", |
1241 | "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1367 | "rand_core 0.5.1", |
1242 | ] | 1368 | ] |
1243 | 1369 | ||
1244 | [[package]] | 1370 | [[package]] |
1245 | name = "rand_core" | 1371 | name = "rand_core" |
1246 | version = "0.3.1" | 1372 | version = "0.3.1" |
1247 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1373 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1374 | checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" | ||
1248 | dependencies = [ | 1375 | dependencies = [ |
1249 | "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1376 | "rand_core 0.4.2", |
1250 | ] | 1377 | ] |
1251 | 1378 | ||
1252 | [[package]] | 1379 | [[package]] |
1253 | name = "rand_core" | 1380 | name = "rand_core" |
1254 | version = "0.4.2" | 1381 | version = "0.4.2" |
1255 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1382 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1383 | checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" | ||
1256 | 1384 | ||
1257 | [[package]] | 1385 | [[package]] |
1258 | name = "rand_core" | 1386 | name = "rand_core" |
1259 | version = "0.5.1" | 1387 | version = "0.5.1" |
1260 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1388 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1389 | checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" | ||
1261 | dependencies = [ | 1390 | dependencies = [ |
1262 | "getrandom 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", | 1391 | "getrandom", |
1263 | ] | 1392 | ] |
1264 | 1393 | ||
1265 | [[package]] | 1394 | [[package]] |
1266 | name = "rand_hc" | 1395 | name = "rand_hc" |
1267 | version = "0.1.0" | 1396 | version = "0.1.0" |
1268 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1397 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1398 | checksum = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" | ||
1269 | dependencies = [ | 1399 | dependencies = [ |
1270 | "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1400 | "rand_core 0.3.1", |
1271 | ] | 1401 | ] |
1272 | 1402 | ||
1273 | [[package]] | 1403 | [[package]] |
1274 | name = "rand_hc" | 1404 | name = "rand_hc" |
1275 | version = "0.2.0" | 1405 | version = "0.2.0" |
1276 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1406 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1407 | checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" | ||
1277 | dependencies = [ | 1408 | dependencies = [ |
1278 | "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1409 | "rand_core 0.5.1", |
1279 | ] | 1410 | ] |
1280 | 1411 | ||
1281 | [[package]] | 1412 | [[package]] |
1282 | name = "rand_isaac" | 1413 | name = "rand_isaac" |
1283 | version = "0.1.1" | 1414 | version = "0.1.1" |
1284 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1415 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1416 | checksum = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" | ||
1285 | dependencies = [ | 1417 | dependencies = [ |
1286 | "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1418 | "rand_core 0.3.1", |
1287 | ] | 1419 | ] |
1288 | 1420 | ||
1289 | [[package]] | 1421 | [[package]] |
1290 | name = "rand_jitter" | 1422 | name = "rand_jitter" |
1291 | version = "0.1.4" | 1423 | version = "0.1.4" |
1292 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1424 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1425 | checksum = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" | ||
1293 | dependencies = [ | 1426 | dependencies = [ |
1294 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 1427 | "libc", |
1295 | "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1428 | "rand_core 0.4.2", |
1296 | "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1429 | "winapi 0.3.8", |
1297 | ] | 1430 | ] |
1298 | 1431 | ||
1299 | [[package]] | 1432 | [[package]] |
1300 | name = "rand_os" | 1433 | name = "rand_os" |
1301 | version = "0.1.3" | 1434 | version = "0.1.3" |
1302 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1435 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1436 | checksum = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" | ||
1303 | dependencies = [ | 1437 | dependencies = [ |
1304 | "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1438 | "cloudabi", |
1305 | "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1439 | "fuchsia-cprng", |
1306 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 1440 | "libc", |
1307 | "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1441 | "rand_core 0.4.2", |
1308 | "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1442 | "rdrand", |
1309 | "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1443 | "winapi 0.3.8", |
1310 | ] | 1444 | ] |
1311 | 1445 | ||
1312 | [[package]] | 1446 | [[package]] |
1313 | name = "rand_pcg" | 1447 | name = "rand_pcg" |
1314 | version = "0.1.2" | 1448 | version = "0.1.2" |
1315 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1449 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1450 | checksum = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" | ||
1316 | dependencies = [ | 1451 | dependencies = [ |
1317 | "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", | 1452 | "autocfg 0.1.7", |
1318 | "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1453 | "rand_core 0.4.2", |
1319 | ] | 1454 | ] |
1320 | 1455 | ||
1321 | [[package]] | 1456 | [[package]] |
1322 | name = "rand_pcg" | 1457 | name = "rand_pcg" |
1323 | version = "0.2.1" | 1458 | version = "0.2.1" |
1324 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1459 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1460 | checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429" | ||
1325 | dependencies = [ | 1461 | dependencies = [ |
1326 | "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1462 | "rand_core 0.5.1", |
1327 | ] | 1463 | ] |
1328 | 1464 | ||
1329 | [[package]] | 1465 | [[package]] |
1330 | name = "rand_xorshift" | 1466 | name = "rand_xorshift" |
1331 | version = "0.1.1" | 1467 | version = "0.1.1" |
1332 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1468 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1469 | checksum = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" | ||
1333 | dependencies = [ | 1470 | dependencies = [ |
1334 | "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1471 | "rand_core 0.3.1", |
1335 | ] | 1472 | ] |
1336 | 1473 | ||
1337 | [[package]] | 1474 | [[package]] |
1338 | name = "rayon" | 1475 | name = "rayon" |
1339 | version = "1.3.0" | 1476 | version = "1.3.0" |
1340 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1477 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1478 | checksum = "db6ce3297f9c85e16621bb8cca38a06779ffc31bb8184e1be4bed2be4678a098" | ||
1341 | dependencies = [ | 1479 | dependencies = [ |
1342 | "crossbeam-deque 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1480 | "crossbeam-deque", |
1343 | "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1481 | "either", |
1344 | "rayon-core 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1482 | "rayon-core", |
1345 | ] | 1483 | ] |
1346 | 1484 | ||
1347 | [[package]] | 1485 | [[package]] |
1348 | name = "rayon-core" | 1486 | name = "rayon-core" |
1349 | version = "1.7.0" | 1487 | version = "1.7.0" |
1350 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1488 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1489 | checksum = "08a89b46efaf957e52b18062fb2f4660f8b8a4dde1807ca002690868ef2c85a9" | ||
1351 | dependencies = [ | 1490 | dependencies = [ |
1352 | "crossbeam-deque 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1491 | "crossbeam-deque", |
1353 | "crossbeam-queue 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1492 | "crossbeam-queue", |
1354 | "crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1493 | "crossbeam-utils", |
1355 | "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1494 | "lazy_static", |
1356 | "num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1495 | "num_cpus", |
1357 | ] | 1496 | ] |
1358 | 1497 | ||
1359 | [[package]] | 1498 | [[package]] |
1360 | name = "rdrand" | 1499 | name = "rdrand" |
1361 | version = "0.4.0" | 1500 | version = "0.4.0" |
1362 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1501 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1502 | checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" | ||
1363 | dependencies = [ | 1503 | dependencies = [ |
1364 | "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1504 | "rand_core 0.3.1", |
1365 | ] | 1505 | ] |
1366 | 1506 | ||
1367 | [[package]] | 1507 | [[package]] |
1368 | name = "redox_syscall" | 1508 | name = "redox_syscall" |
1369 | version = "0.1.56" | 1509 | version = "0.1.56" |
1370 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1510 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1511 | checksum = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84" | ||
1371 | 1512 | ||
1372 | [[package]] | 1513 | [[package]] |
1373 | name = "regex" | 1514 | name = "regex" |
1374 | version = "1.3.3" | 1515 | version = "1.3.4" |
1375 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1516 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1517 | checksum = "322cf97724bea3ee221b78fe25ac9c46114ebb51747ad5babd51a2fc6a8235a8" | ||
1376 | dependencies = [ | 1518 | dependencies = [ |
1377 | "aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", | 1519 | "aho-corasick", |
1378 | "memchr 2.3.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1520 | "memchr", |
1379 | "regex-syntax 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", | 1521 | "regex-syntax", |
1380 | "thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1522 | "thread_local", |
1381 | ] | 1523 | ] |
1382 | 1524 | ||
1383 | [[package]] | 1525 | [[package]] |
1384 | name = "regex-syntax" | 1526 | name = "regex-syntax" |
1385 | version = "0.6.13" | 1527 | version = "0.6.14" |
1386 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1528 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1529 | checksum = "b28dfe3fe9badec5dbf0a79a9cccad2cfc2ab5484bdb3e44cbd1ae8b3ba2be06" | ||
1387 | 1530 | ||
1388 | [[package]] | 1531 | [[package]] |
1389 | name = "relative-path" | 1532 | name = "relative-path" |
1390 | version = "1.0.0" | 1533 | version = "1.0.0" |
1391 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1534 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1535 | checksum = "bedde000f40f2921ce439ea165c9c53fd629bfa115140c72e22aceacb4a21954" | ||
1392 | 1536 | ||
1393 | [[package]] | 1537 | [[package]] |
1394 | name = "remove_dir_all" | 1538 | name = "remove_dir_all" |
1395 | version = "0.5.2" | 1539 | version = "0.5.2" |
1396 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1540 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1541 | checksum = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" | ||
1397 | dependencies = [ | 1542 | dependencies = [ |
1398 | "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1543 | "winapi 0.3.8", |
1399 | ] | 1544 | ] |
1400 | 1545 | ||
1401 | [[package]] | 1546 | [[package]] |
1402 | name = "rowan" | 1547 | name = "rowan" |
1403 | version = "0.9.0" | 1548 | version = "0.9.0" |
1404 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1549 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1550 | checksum = "6d03d4eff7a4e8dcc362e4c06bb2b1b33af4bcd64336c7f40a31a05850336b6c" | ||
1405 | dependencies = [ | 1551 | dependencies = [ |
1406 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1552 | "rustc-hash", |
1407 | "smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", | 1553 | "smol_str", |
1408 | "text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", | 1554 | "text_unit", |
1409 | "thin-dst 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1555 | "thin-dst", |
1410 | ] | 1556 | ] |
1411 | 1557 | ||
1412 | [[package]] | 1558 | [[package]] |
1413 | name = "rustc-demangle" | 1559 | name = "rustc-demangle" |
1414 | version = "0.1.16" | 1560 | version = "0.1.16" |
1415 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1561 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1562 | checksum = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783" | ||
1416 | 1563 | ||
1417 | [[package]] | 1564 | [[package]] |
1418 | name = "rustc-hash" | 1565 | name = "rustc-hash" |
1419 | version = "1.0.1" | 1566 | version = "1.0.1" |
1420 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1567 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1568 | checksum = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8" | ||
1421 | dependencies = [ | 1569 | dependencies = [ |
1422 | "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1570 | "byteorder", |
1423 | ] | 1571 | ] |
1424 | 1572 | ||
1425 | [[package]] | 1573 | [[package]] |
1426 | name = "rustc_lexer" | 1574 | name = "rustc_lexer" |
1427 | version = "0.1.0" | 1575 | version = "0.1.0" |
1428 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1576 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1577 | checksum = "c86aae0c77166108c01305ee1a36a1e77289d7dc6ca0a3cd91ff4992de2d16a5" | ||
1429 | dependencies = [ | 1578 | dependencies = [ |
1430 | "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1579 | "unicode-xid", |
1431 | ] | 1580 | ] |
1432 | 1581 | ||
1433 | [[package]] | 1582 | [[package]] |
1434 | name = "rustc_version" | 1583 | name = "rustc_version" |
1435 | version = "0.2.3" | 1584 | version = "0.2.3" |
1436 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1585 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1586 | checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" | ||
1437 | dependencies = [ | 1587 | dependencies = [ |
1438 | "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1588 | "semver", |
1439 | ] | 1589 | ] |
1440 | 1590 | ||
1441 | [[package]] | 1591 | [[package]] |
1442 | name = "ryu" | 1592 | name = "ryu" |
1443 | version = "1.0.2" | 1593 | version = "1.0.2" |
1444 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1594 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1595 | checksum = "bfa8506c1de11c9c4e4c38863ccbe02a305c8188e85a05a784c9e11e1c3910c8" | ||
1445 | 1596 | ||
1446 | [[package]] | 1597 | [[package]] |
1447 | name = "salsa" | 1598 | name = "salsa" |
1448 | version = "0.14.1" | 1599 | version = "0.14.1" |
1449 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1600 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1601 | checksum = "4a006c56096acaaa5e82e5974c28d05ff1e84aa70615f19c53fecf8a1afb2fd2" | ||
1450 | dependencies = [ | 1602 | dependencies = [ |
1451 | "crossbeam 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1603 | "crossbeam", |
1452 | "indexmap 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1604 | "indexmap", |
1453 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1605 | "log", |
1454 | "parking_lot 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1606 | "parking_lot", |
1455 | "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1607 | "rand 0.7.3", |
1456 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1608 | "rustc-hash", |
1457 | "salsa-macros 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1609 | "salsa-macros", |
1458 | "smallvec 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1610 | "smallvec", |
1459 | ] | 1611 | ] |
1460 | 1612 | ||
1461 | [[package]] | 1613 | [[package]] |
1462 | name = "salsa-macros" | 1614 | name = "salsa-macros" |
1463 | version = "0.14.1" | 1615 | version = "0.14.1" |
1464 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1616 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1617 | checksum = "038a09b6271446f1123f142fe7e5bef6d4687c4cf82e6986be574c2af3745530" | ||
1465 | dependencies = [ | 1618 | dependencies = [ |
1466 | "heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1619 | "heck", |
1467 | "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1620 | "proc-macro2", |
1468 | "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1621 | "quote", |
1469 | "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", | 1622 | "syn", |
1470 | ] | 1623 | ] |
1471 | 1624 | ||
1472 | [[package]] | 1625 | [[package]] |
1473 | name = "same-file" | 1626 | name = "same-file" |
1474 | version = "1.0.6" | 1627 | version = "1.0.6" |
1475 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1628 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1629 | checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" | ||
1476 | dependencies = [ | 1630 | dependencies = [ |
1477 | "winapi-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1631 | "winapi-util", |
1478 | ] | 1632 | ] |
1479 | 1633 | ||
1480 | [[package]] | 1634 | [[package]] |
1481 | name = "scopeguard" | 1635 | name = "scopeguard" |
1482 | version = "1.0.0" | 1636 | version = "1.0.0" |
1483 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1637 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1638 | checksum = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d" | ||
1484 | 1639 | ||
1485 | [[package]] | 1640 | [[package]] |
1486 | name = "semver" | 1641 | name = "semver" |
1487 | version = "0.9.0" | 1642 | version = "0.9.0" |
1488 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1643 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1644 | checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" | ||
1489 | dependencies = [ | 1645 | dependencies = [ |
1490 | "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1646 | "semver-parser", |
1491 | "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", | 1647 | "serde", |
1492 | ] | 1648 | ] |
1493 | 1649 | ||
1494 | [[package]] | 1650 | [[package]] |
1495 | name = "semver-parser" | 1651 | name = "semver-parser" |
1496 | version = "0.7.0" | 1652 | version = "0.7.0" |
1497 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1653 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1654 | checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" | ||
1498 | 1655 | ||
1499 | [[package]] | 1656 | [[package]] |
1500 | name = "serde" | 1657 | name = "serde" |
1501 | version = "1.0.104" | 1658 | version = "1.0.104" |
1502 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1659 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1660 | checksum = "414115f25f818d7dfccec8ee535d76949ae78584fc4f79a6f45a904bf8ab4449" | ||
1503 | dependencies = [ | 1661 | dependencies = [ |
1504 | "serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", | 1662 | "serde_derive", |
1505 | ] | 1663 | ] |
1506 | 1664 | ||
1507 | [[package]] | 1665 | [[package]] |
1508 | name = "serde_derive" | 1666 | name = "serde_derive" |
1509 | version = "1.0.104" | 1667 | version = "1.0.104" |
1510 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1668 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1669 | checksum = "128f9e303a5a29922045a830221b8f78ec74a5f544944f3d5984f8ec3895ef64" | ||
1511 | dependencies = [ | 1670 | dependencies = [ |
1512 | "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1671 | "proc-macro2", |
1513 | "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1672 | "quote", |
1514 | "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", | 1673 | "syn", |
1515 | ] | 1674 | ] |
1516 | 1675 | ||
1517 | [[package]] | 1676 | [[package]] |
1518 | name = "serde_json" | 1677 | name = "serde_json" |
1519 | version = "1.0.44" | 1678 | version = "1.0.46" |
1520 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1679 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1680 | checksum = "21b01d7f0288608a01dca632cf1df859df6fd6ffa885300fc275ce2ba6221953" | ||
1521 | dependencies = [ | 1681 | dependencies = [ |
1522 | "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", | 1682 | "itoa", |
1523 | "ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1683 | "ryu", |
1524 | "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", | 1684 | "serde", |
1525 | ] | 1685 | ] |
1526 | 1686 | ||
1527 | [[package]] | 1687 | [[package]] |
1528 | name = "serde_repr" | 1688 | name = "serde_repr" |
1529 | version = "0.1.5" | 1689 | version = "0.1.5" |
1530 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1690 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1691 | checksum = "cd02c7587ec314570041b2754829f84d873ced14a96d1fd1823531e11db40573" | ||
1531 | dependencies = [ | 1692 | dependencies = [ |
1532 | "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1693 | "proc-macro2", |
1533 | "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1694 | "quote", |
1534 | "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", | 1695 | "syn", |
1535 | ] | 1696 | ] |
1536 | 1697 | ||
1537 | [[package]] | 1698 | [[package]] |
1538 | name = "serde_yaml" | 1699 | name = "serde_yaml" |
1539 | version = "0.8.11" | 1700 | version = "0.8.11" |
1540 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1701 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1702 | checksum = "691b17f19fc1ec9d94ec0b5864859290dff279dbd7b03f017afda54eb36c3c35" | ||
1541 | dependencies = [ | 1703 | dependencies = [ |
1542 | "dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", | 1704 | "dtoa", |
1543 | "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1705 | "linked-hash-map", |
1544 | "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", | 1706 | "serde", |
1545 | "yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1707 | "yaml-rust", |
1546 | ] | 1708 | ] |
1547 | 1709 | ||
1548 | [[package]] | 1710 | [[package]] |
1549 | name = "slab" | 1711 | name = "slab" |
1550 | version = "0.4.2" | 1712 | version = "0.4.2" |
1551 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1713 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1714 | checksum = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" | ||
1552 | 1715 | ||
1553 | [[package]] | 1716 | [[package]] |
1554 | name = "smallvec" | 1717 | name = "smallvec" |
1555 | version = "1.1.0" | 1718 | version = "1.2.0" |
1556 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1719 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1720 | checksum = "5c2fb2ec9bcd216a5b0d0ccf31ab17b5ed1d627960edff65bbe95d3ce221cefc" | ||
1557 | 1721 | ||
1558 | [[package]] | 1722 | [[package]] |
1559 | name = "smol_str" | 1723 | name = "smol_str" |
1560 | version = "0.1.15" | 1724 | version = "0.1.15" |
1561 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1725 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1726 | checksum = "34836c9a295c62c2ce3514471117c5cb269891e8421b2aafdd910050576c4d8b" | ||
1562 | dependencies = [ | 1727 | dependencies = [ |
1563 | "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", | 1728 | "serde", |
1564 | ] | 1729 | ] |
1565 | 1730 | ||
1566 | [[package]] | 1731 | [[package]] |
1567 | name = "superslice" | 1732 | name = "superslice" |
1568 | version = "1.0.0" | 1733 | version = "1.0.0" |
1569 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1734 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1735 | checksum = "ab16ced94dbd8a46c82fd81e3ed9a8727dac2977ea869d217bcc4ea1f122e81f" | ||
1570 | 1736 | ||
1571 | [[package]] | 1737 | [[package]] |
1572 | name = "syn" | 1738 | name = "syn" |
1573 | version = "1.0.14" | 1739 | version = "1.0.14" |
1574 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1740 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1741 | checksum = "af6f3550d8dff9ef7dc34d384ac6f107e5d31c8f57d9f28e0081503f547ac8f5" | ||
1575 | dependencies = [ | 1742 | dependencies = [ |
1576 | "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1743 | "proc-macro2", |
1577 | "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1744 | "quote", |
1578 | "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1745 | "unicode-xid", |
1579 | ] | 1746 | ] |
1580 | 1747 | ||
1581 | [[package]] | 1748 | [[package]] |
1582 | name = "tempfile" | 1749 | name = "tempfile" |
1583 | version = "3.1.0" | 1750 | version = "3.1.0" |
1584 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1751 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1752 | checksum = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" | ||
1585 | dependencies = [ | 1753 | dependencies = [ |
1586 | "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", | 1754 | "cfg-if", |
1587 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 1755 | "libc", |
1588 | "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1756 | "rand 0.7.3", |
1589 | "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", | 1757 | "redox_syscall", |
1590 | "remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1758 | "remove_dir_all", |
1591 | "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1759 | "winapi 0.3.8", |
1592 | ] | 1760 | ] |
1593 | 1761 | ||
1594 | [[package]] | 1762 | [[package]] |
1595 | name = "termios" | 1763 | name = "termios" |
1596 | version = "0.3.1" | 1764 | version = "0.3.1" |
1597 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1765 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1766 | checksum = "72b620c5ea021d75a735c943269bb07d30c9b77d6ac6b236bc8b5c496ef05625" | ||
1598 | dependencies = [ | 1767 | dependencies = [ |
1599 | "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", | 1768 | "libc", |
1600 | ] | 1769 | ] |
1601 | 1770 | ||
1602 | [[package]] | 1771 | [[package]] |
1603 | name = "test_utils" | 1772 | name = "test_utils" |
1604 | version = "0.1.0" | 1773 | version = "0.1.0" |
1605 | dependencies = [ | 1774 | dependencies = [ |
1606 | "difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1775 | "difference", |
1607 | "serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)", | 1776 | "serde_json", |
1608 | "text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", | 1777 | "text_unit", |
1609 | ] | 1778 | ] |
1610 | 1779 | ||
1611 | [[package]] | 1780 | [[package]] |
1612 | name = "text_unit" | 1781 | name = "text_unit" |
1613 | version = "0.1.9" | 1782 | version = "0.1.9" |
1614 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1783 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1784 | checksum = "e08bbcb7a3adbda0eb23431206b653bdad3d8dea311e72d36bf2215e27a42579" | ||
1615 | 1785 | ||
1616 | [[package]] | 1786 | [[package]] |
1617 | name = "thin-dst" | 1787 | name = "thin-dst" |
1618 | version = "1.0.0" | 1788 | version = "1.0.0" |
1619 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1789 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1790 | checksum = "c52fd98a9e4913c466d83381a59245691875d2f3e04611fca57f964bd8aa96e1" | ||
1620 | 1791 | ||
1621 | [[package]] | 1792 | [[package]] |
1622 | name = "thread_local" | 1793 | name = "thread_local" |
1623 | version = "1.0.1" | 1794 | version = "1.0.1" |
1624 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1795 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1796 | checksum = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14" | ||
1625 | dependencies = [ | 1797 | dependencies = [ |
1626 | "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1798 | "lazy_static", |
1627 | ] | 1799 | ] |
1628 | 1800 | ||
1629 | [[package]] | 1801 | [[package]] |
1630 | name = "threadpool" | 1802 | name = "threadpool" |
1631 | version = "1.7.1" | 1803 | version = "1.7.1" |
1632 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1804 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1805 | checksum = "e2f0c90a5f3459330ac8bc0d2f879c693bb7a2f59689c1083fc4ef83834da865" | ||
1633 | dependencies = [ | 1806 | dependencies = [ |
1634 | "num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1807 | "num_cpus", |
1635 | ] | 1808 | ] |
1636 | 1809 | ||
1637 | [[package]] | 1810 | [[package]] |
1638 | name = "unicase" | 1811 | name = "unicase" |
1639 | version = "2.6.0" | 1812 | version = "2.6.0" |
1640 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1813 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1814 | checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" | ||
1641 | dependencies = [ | 1815 | dependencies = [ |
1642 | "version_check 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1816 | "version_check", |
1643 | ] | 1817 | ] |
1644 | 1818 | ||
1645 | [[package]] | 1819 | [[package]] |
1646 | name = "unicode-bidi" | 1820 | name = "unicode-bidi" |
1647 | version = "0.3.4" | 1821 | version = "0.3.4" |
1648 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1822 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1823 | checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" | ||
1649 | dependencies = [ | 1824 | dependencies = [ |
1650 | "matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1825 | "matches", |
1651 | ] | 1826 | ] |
1652 | 1827 | ||
1653 | [[package]] | 1828 | [[package]] |
1654 | name = "unicode-normalization" | 1829 | name = "unicode-normalization" |
1655 | version = "0.1.12" | 1830 | version = "0.1.12" |
1656 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1831 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1832 | checksum = "5479532badd04e128284890390c1e876ef7a993d0570b3597ae43dfa1d59afa4" | ||
1657 | dependencies = [ | 1833 | dependencies = [ |
1658 | "smallvec 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1834 | "smallvec", |
1659 | ] | 1835 | ] |
1660 | 1836 | ||
1661 | [[package]] | 1837 | [[package]] |
1662 | name = "unicode-segmentation" | 1838 | name = "unicode-segmentation" |
1663 | version = "1.6.0" | 1839 | version = "1.6.0" |
1664 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1840 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1841 | checksum = "e83e153d1053cbb5a118eeff7fd5be06ed99153f00dbcd8ae310c5fb2b22edc0" | ||
1665 | 1842 | ||
1666 | [[package]] | 1843 | [[package]] |
1667 | name = "unicode-xid" | 1844 | name = "unicode-xid" |
1668 | version = "0.2.0" | 1845 | version = "0.2.0" |
1669 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1846 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1847 | checksum = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c" | ||
1670 | 1848 | ||
1671 | [[package]] | 1849 | [[package]] |
1672 | name = "url" | 1850 | name = "url" |
1673 | version = "2.1.1" | 1851 | version = "2.1.1" |
1674 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1852 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1853 | checksum = "829d4a8476c35c9bf0bbce5a3b23f4106f79728039b726d292bb93bc106787cb" | ||
1675 | dependencies = [ | 1854 | dependencies = [ |
1676 | "idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1855 | "idna", |
1677 | "matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1856 | "matches", |
1678 | "percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1857 | "percent-encoding", |
1679 | "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", | 1858 | "serde", |
1680 | ] | ||
1681 | |||
1682 | [[package]] | ||
1683 | name = "uuid" | ||
1684 | version = "0.8.1" | ||
1685 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
1686 | dependencies = [ | ||
1687 | "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", | ||
1688 | "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", | ||
1689 | ] | 1859 | ] |
1690 | 1860 | ||
1691 | [[package]] | 1861 | [[package]] |
1692 | name = "version_check" | 1862 | name = "version_check" |
1693 | version = "0.9.1" | 1863 | version = "0.9.1" |
1694 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1864 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1865 | checksum = "078775d0255232fb988e6fccf26ddc9d1ac274299aaedcedce21c6f72cc533ce" | ||
1695 | 1866 | ||
1696 | [[package]] | 1867 | [[package]] |
1697 | name = "walkdir" | 1868 | name = "walkdir" |
1698 | version = "2.3.1" | 1869 | version = "2.3.1" |
1699 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1870 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1871 | checksum = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d" | ||
1700 | dependencies = [ | 1872 | dependencies = [ |
1701 | "same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", | 1873 | "same-file", |
1702 | "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1874 | "winapi 0.3.8", |
1703 | "winapi-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1875 | "winapi-util", |
1704 | ] | 1876 | ] |
1705 | 1877 | ||
1706 | [[package]] | 1878 | [[package]] |
1707 | name = "wasi" | 1879 | name = "wasi" |
1708 | version = "0.9.0+wasi-snapshot-preview1" | 1880 | version = "0.9.0+wasi-snapshot-preview1" |
1709 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1881 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1882 | checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" | ||
1710 | 1883 | ||
1711 | [[package]] | 1884 | [[package]] |
1712 | name = "winapi" | 1885 | name = "winapi" |
1713 | version = "0.2.8" | 1886 | version = "0.2.8" |
1714 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1887 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1888 | checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" | ||
1715 | 1889 | ||
1716 | [[package]] | 1890 | [[package]] |
1717 | name = "winapi" | 1891 | name = "winapi" |
1718 | version = "0.3.8" | 1892 | version = "0.3.8" |
1719 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1893 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1894 | checksum = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6" | ||
1720 | dependencies = [ | 1895 | dependencies = [ |
1721 | "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1896 | "winapi-i686-pc-windows-gnu", |
1722 | "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1897 | "winapi-x86_64-pc-windows-gnu", |
1723 | ] | 1898 | ] |
1724 | 1899 | ||
1725 | [[package]] | 1900 | [[package]] |
1726 | name = "winapi-build" | 1901 | name = "winapi-build" |
1727 | version = "0.1.1" | 1902 | version = "0.1.1" |
1728 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1903 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1904 | checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" | ||
1729 | 1905 | ||
1730 | [[package]] | 1906 | [[package]] |
1731 | name = "winapi-i686-pc-windows-gnu" | 1907 | name = "winapi-i686-pc-windows-gnu" |
1732 | version = "0.4.0" | 1908 | version = "0.4.0" |
1733 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1909 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1910 | checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" | ||
1734 | 1911 | ||
1735 | [[package]] | 1912 | [[package]] |
1736 | name = "winapi-util" | 1913 | name = "winapi-util" |
1737 | version = "0.1.3" | 1914 | version = "0.1.3" |
1738 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1915 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1916 | checksum = "4ccfbf554c6ad11084fb7517daca16cfdcaccbdadba4fc336f032a8b12c2ad80" | ||
1739 | dependencies = [ | 1917 | dependencies = [ |
1740 | "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1918 | "winapi 0.3.8", |
1741 | ] | 1919 | ] |
1742 | 1920 | ||
1743 | [[package]] | 1921 | [[package]] |
1744 | name = "winapi-x86_64-pc-windows-gnu" | 1922 | name = "winapi-x86_64-pc-windows-gnu" |
1745 | version = "0.4.0" | 1923 | version = "0.4.0" |
1746 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1924 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1925 | checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" | ||
1747 | 1926 | ||
1748 | [[package]] | 1927 | [[package]] |
1749 | name = "ws2_32-sys" | 1928 | name = "ws2_32-sys" |
1750 | version = "0.2.1" | 1929 | version = "0.2.1" |
1751 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1930 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1931 | checksum = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" | ||
1752 | dependencies = [ | 1932 | dependencies = [ |
1753 | "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1933 | "winapi 0.2.8", |
1754 | "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1934 | "winapi-build", |
1755 | ] | 1935 | ] |
1756 | 1936 | ||
1757 | [[package]] | 1937 | [[package]] |
1758 | name = "xtask" | 1938 | name = "xtask" |
1759 | version = "0.1.0" | 1939 | version = "0.1.0" |
1760 | dependencies = [ | 1940 | dependencies = [ |
1761 | "anyhow 1.0.26 (registry+https://github.com/rust-lang/crates.io-index)", | 1941 | "anyhow", |
1762 | "pico-args 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1942 | "pico-args", |
1763 | "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1943 | "proc-macro2", |
1764 | "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1944 | "quote", |
1765 | "walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1945 | "walkdir", |
1766 | ] | 1946 | ] |
1767 | 1947 | ||
1768 | [[package]] | 1948 | [[package]] |
1769 | name = "yaml-rust" | 1949 | name = "yaml-rust" |
1770 | version = "0.4.3" | 1950 | version = "0.4.3" |
1771 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1951 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1952 | checksum = "65923dd1784f44da1d2c3dbbc5e822045628c590ba72123e1c73d3c230c4434d" | ||
1772 | dependencies = [ | 1953 | dependencies = [ |
1773 | "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1954 | "linked-hash-map", |
1774 | ] | 1955 | ] |
1775 | |||
1776 | [metadata] | ||
1777 | "checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d" | ||
1778 | "checksum anyhow 1.0.26 (registry+https://github.com/rust-lang/crates.io-index)" = "7825f6833612eb2414095684fcf6c635becf3ce97fe48cf6421321e93bfbd53c" | ||
1779 | "checksum anymap 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "33954243bd79057c2de7338850b85983a44588021f8a5fee574a8888c6de4344" | ||
1780 | "checksum arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cff77d8686867eceff3105329d4698d96c2391c176d5d03adc90c7389162b5b8" | ||
1781 | "checksum atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" | ||
1782 | "checksum autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2" | ||
1783 | "checksum autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d" | ||
1784 | "checksum backtrace 0.3.42 (registry+https://github.com/rust-lang/crates.io-index)" = "b4b1549d804b6c73f4817df2ba073709e96e426f12987127c48e6745568c350b" | ||
1785 | "checksum backtrace-sys 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "5d6575f128516de27e3ce99689419835fce9643a9b215a14d2b5b685be018491" | ||
1786 | "checksum base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7" | ||
1787 | "checksum bit-set 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e84c238982c4b1e1ee668d136c510c67a13465279c0cb367ea6baf6310620a80" | ||
1788 | "checksum bit-vec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f59bbe95d4e52a6398ec21238d31577f2b28a9d86807f06ca59d191d8440d0bb" | ||
1789 | "checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" | ||
1790 | "checksum bstr 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "fe8a65814ca90dfc9705af76bb6ba3c6e2534489a72270e797e603783bb4990b" | ||
1791 | "checksum byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5" | ||
1792 | "checksum c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "214238caa1bf3a496ec3392968969cab8549f96ff30652c9e56885329315f6bb" | ||
1793 | "checksum cargo_metadata 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "46e3374c604fb39d1a2f35ed5e4a4e30e60d01fab49446e08f1b3e9a90aef202" | ||
1794 | "checksum cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)" = "95e28fa049fda1c330bcf9d723be7663a899c4679724b34c81e9f5a326aab8cd" | ||
1795 | "checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" | ||
1796 | "checksum chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)" = "<none>" | ||
1797 | "checksum chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)" = "<none>" | ||
1798 | "checksum chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)" = "<none>" | ||
1799 | "checksum chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)" = "<none>" | ||
1800 | "checksum chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)" = "<none>" | ||
1801 | "checksum chalk-solve 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5)" = "<none>" | ||
1802 | "checksum clicolors-control 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90082ee5dcdd64dc4e9e0d37fbf3ee325419e39c0092191e0393df65518f741e" | ||
1803 | "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" | ||
1804 | "checksum console 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f5d540c2d34ac9dd0deb5f3b5f54c36c79efa78f6b3ad19106a554d07a7b5d9f" | ||
1805 | "checksum crossbeam 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "69323bff1fb41c635347b8ead484a5ca6c3f11914d784170b158d8449ab07f8e" | ||
1806 | "checksum crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "acec9a3b0b3559f15aee4f90746c4e5e293b701c0f7d3925d24e01645267b68c" | ||
1807 | "checksum crossbeam-deque 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c3aa945d63861bfe624b55d153a39684da1e8c0bc8fba932f7ee3a3c16cea3ca" | ||
1808 | "checksum crossbeam-epoch 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5064ebdbf05ce3cb95e45c8b086f72263f4166b29b97f6baff7ef7fe047b55ac" | ||
1809 | "checksum crossbeam-queue 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c695eeca1e7173472a32221542ae469b3e9aac3a4fc81f7696bcad82029493db" | ||
1810 | "checksum crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ce446db02cdc3165b94ae73111e570793400d0794e46125cc4056c81cbb039f4" | ||
1811 | "checksum difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" | ||
1812 | "checksum drop_bomb 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "69b26e475fd29098530e709294e94e661974c851aed42512793f120fed4e199f" | ||
1813 | "checksum dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ea57b42383d091c85abcc2706240b94ab2a8fa1fc81c10ff23c4de06e2a90b5e" | ||
1814 | "checksum either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3" | ||
1815 | "checksum ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8944dc8fa28ce4a38f778bd46bf7d923fe73eed5a439398507246c8e017e6f36" | ||
1816 | "checksum encode_unicode 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" | ||
1817 | "checksum env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36" | ||
1818 | "checksum filetime 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1ff6d4dab0aa0c8e6346d46052e93b13a16cf847b54ed357087c35011048cc7d" | ||
1819 | "checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33" | ||
1820 | "checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3" | ||
1821 | "checksum format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f7aea5a5909a74969507051a3b17adc84737e31a5f910559892aedce026f4d53" | ||
1822 | "checksum fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674" | ||
1823 | "checksum fsevent 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5ab7d1bd1bd33cc98b0889831b72da23c0aa4df9cec7e0702f46ecea04b35db6" | ||
1824 | "checksum fsevent-sys 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f41b048a94555da0f42f1d632e2e19510084fb8e303b0daa2816e733fb3644a0" | ||
1825 | "checksum fst 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "927fb434ff9f0115b215dc0efd2e4fbdd7448522a92a1aa37c77d6a2f8f1ebd6" | ||
1826 | "checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" | ||
1827 | "checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" | ||
1828 | "checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" | ||
1829 | "checksum getrandom 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "7abc8dd8451921606d809ba32e95b6111925cd2906060d2dcc29c070220503eb" | ||
1830 | "checksum globset 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "925aa2cac82d8834e2b2a4415b6f6879757fb5c0928fc445ae76461a12eed8f2" | ||
1831 | "checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" | ||
1832 | "checksum hermit-abi 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "eff2656d88f158ce120947499e971d743c05dbcbed62e5bd2f38f1698bbc3772" | ||
1833 | "checksum humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f" | ||
1834 | "checksum idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9" | ||
1835 | "checksum indexmap 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0b54058f0a6ff80b6803da8faf8997cde53872b38f4023728f6830b06cd3c0dc" | ||
1836 | "checksum inotify 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "24e40d6fd5d64e2082e0c796495c8ef5ad667a96d03e5aaa0becfd9d47bcbfb8" | ||
1837 | "checksum inotify-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e74a1aa87c59aeff6ef2cc2fa62d41bc43f54952f55652656b18a02fd5e356c0" | ||
1838 | "checksum insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0d499dc062e841590a67230d853bce62d0abeb91304927871670b7c55c461349" | ||
1839 | "checksum iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" | ||
1840 | "checksum itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f56a2d0bc861f9165be4eb3442afd3c236d8a98afd426f65d92324ae1091a484" | ||
1841 | "checksum itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "501266b7edd0174f8530248f87f99c88fbe60ca4ef3dd486835b8d8d53136f7f" | ||
1842 | "checksum jemalloc-ctl 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c502a5ff9dd2924f1ed32ba96e3b65735d837b4bfd978d3161b1702e66aca4b7" | ||
1843 | "checksum jemalloc-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0d3b9f3f5c9b31aa0f5ed3260385ac205db665baa41d49bb8338008ae94ede45" | ||
1844 | "checksum jemallocator 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "43ae63fcfc45e99ab3d1b29a46782ad679e98436c3169d15a167a1108a724b69" | ||
1845 | "checksum jod-thread 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2f52a11f73b88fab829a0e4d9e13ea5982c7ac457c72eb3541d82a4afdfce4ff" | ||
1846 | "checksum join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4dc7a5290e8c2606ce2be49f456d50f69173cb96d1541e4f66e34ac8b331a98f" | ||
1847 | "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" | ||
1848 | "checksum lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cc4fd87be4a815fd373e02773983940f0d75fb26fde8c098e9e45f7af03154c0" | ||
1849 | "checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" | ||
1850 | "checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f" | ||
1851 | "checksum libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)" = "d515b1f41455adea1313a4a2ac8a8a477634fbae63cc6100e3aebb207ce61558" | ||
1852 | "checksum linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ae91b68aebc4ddb91978b11a1b02ddd8602a05ec19002801c5666000e05e0f83" | ||
1853 | "checksum lock_api 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "79b2de95ecb4691949fea4716ca53cdbcfccb2c612e19644a8bad05edcf9f47b" | ||
1854 | "checksum log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7" | ||
1855 | "checksum lsp-server 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5383e043329615624bbf45e1ba27bd75c176762b2592855c659bc28ac580a06b" | ||
1856 | "checksum lsp-types 0.70.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ef197b24cb3f12fc3984667a505691fec9d683204ddff56f12b2d1940e09a988" | ||
1857 | "checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" | ||
1858 | "checksum memchr 2.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3197e20c7edb283f87c071ddfc7a2cca8f8e0b888c242959846a6fce03c72223" | ||
1859 | "checksum memoffset 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "75189eb85871ea5c2e2c15abbdd541185f63b408415e5051f5cac122d8c774b9" | ||
1860 | "checksum mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)" = "302dec22bcf6bae6dfb69c647187f4b4d0fb6f535521f7bc022430ce8e12008f" | ||
1861 | "checksum mio-extras 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "52403fe290012ce777c4626790c8951324a2b9e3316b3143779c72b029742f19" | ||
1862 | "checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919" | ||
1863 | "checksum net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88" | ||
1864 | "checksum notify 4.0.15 (registry+https://github.com/rust-lang/crates.io-index)" = "80ae4a7688d1fab81c5bf19c64fc8db920be8d519ce6336ed4e7efe024724dbd" | ||
1865 | "checksum num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "c62be47e61d1842b9170f0fdeec8eba98e60e90e5446449a0545e5152acd7096" | ||
1866 | "checksum num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "46203554f085ff89c235cd12f7075f3233af9b11ed7c9e16dfe2560d03313ce6" | ||
1867 | "checksum once_cell 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b1c601810575c99596d4afc46f78a678c80105117c379eb3650cf99b8a21ce5b" | ||
1868 | "checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063" | ||
1869 | "checksum parking_lot 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "92e98c49ab0b7ce5b222f2cc9193fc4efe11c6d0bd4f648e374684a6857b1cfc" | ||
1870 | "checksum parking_lot_core 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7582838484df45743c8434fbff785e8edf260c28748353d44bc0da32e0ceabf1" | ||
1871 | "checksum paste 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "423a519e1c6e828f1e73b720f9d9ed2fa643dce8a7737fb43235ce0b41eeaa49" | ||
1872 | "checksum paste-impl 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4214c9e912ef61bf42b81ba9a47e8aad1b2ffaf739ab162bf96d1e011f54e6c5" | ||
1873 | "checksum percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" | ||
1874 | "checksum petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9c3659d1ee90221741f65dd128d9998311b0e40c5d3c23a62445938214abce4f" | ||
1875 | "checksum pico-args 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3ad1f1b834a05d42dae330066e9699a173b28185b3bdc3dbf14ca239585de8cc" | ||
1876 | "checksum ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "74490b50b9fbe561ac330df47c08f3f33073d2d00c150f719147d7c54522fa1b" | ||
1877 | "checksum proc-macro-hack 0.5.11 (registry+https://github.com/rust-lang/crates.io-index)" = "ecd45702f76d6d3c75a80564378ae228a85f0b59d2f3ed43c91b4a69eb2ebfc5" | ||
1878 | "checksum proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "3acb317c6ff86a4e579dfa00fc5e6cca91ecbb4e7eb2df0468805b674eb88548" | ||
1879 | "checksum proptest 0.9.5 (registry+https://github.com/rust-lang/crates.io-index)" = "bf6147d103a7c9d7598f4105cf049b15c99e2ecd93179bf024f0fd349be5ada4" | ||
1880 | "checksum quick-error 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" | ||
1881 | "checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe" | ||
1882 | "checksum ra_vfs 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bc898f237e4b4498959ae0100c688793a23e77624d44ef710ba70094217f98e0" | ||
1883 | "checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" | ||
1884 | "checksum rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" | ||
1885 | "checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" | ||
1886 | "checksum rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853" | ||
1887 | "checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" | ||
1888 | "checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" | ||
1889 | "checksum rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" | ||
1890 | "checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" | ||
1891 | "checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" | ||
1892 | "checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" | ||
1893 | "checksum rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" | ||
1894 | "checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" | ||
1895 | "checksum rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" | ||
1896 | "checksum rand_pcg 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429" | ||
1897 | "checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" | ||
1898 | "checksum rayon 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "db6ce3297f9c85e16621bb8cca38a06779ffc31bb8184e1be4bed2be4678a098" | ||
1899 | "checksum rayon-core 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "08a89b46efaf957e52b18062fb2f4660f8b8a4dde1807ca002690868ef2c85a9" | ||
1900 | "checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" | ||
1901 | "checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84" | ||
1902 | "checksum regex 1.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b5508c1941e4e7cb19965abef075d35a9a8b5cdf0846f30b4050e9b55dc55e87" | ||
1903 | "checksum regex-syntax 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e734e891f5b408a29efbf8309e656876276f49ab6a6ac208600b4419bd893d90" | ||
1904 | "checksum relative-path 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bedde000f40f2921ce439ea165c9c53fd629bfa115140c72e22aceacb4a21954" | ||
1905 | "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" | ||
1906 | "checksum rowan 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d03d4eff7a4e8dcc362e4c06bb2b1b33af4bcd64336c7f40a31a05850336b6c" | ||
1907 | "checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783" | ||
1908 | "checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8" | ||
1909 | "checksum rustc_lexer 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c86aae0c77166108c01305ee1a36a1e77289d7dc6ca0a3cd91ff4992de2d16a5" | ||
1910 | "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" | ||
1911 | "checksum ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bfa8506c1de11c9c4e4c38863ccbe02a305c8188e85a05a784c9e11e1c3910c8" | ||
1912 | "checksum salsa 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4a006c56096acaaa5e82e5974c28d05ff1e84aa70615f19c53fecf8a1afb2fd2" | ||
1913 | "checksum salsa-macros 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)" = "038a09b6271446f1123f142fe7e5bef6d4687c4cf82e6986be574c2af3745530" | ||
1914 | "checksum same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" | ||
1915 | "checksum scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d" | ||
1916 | "checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" | ||
1917 | "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" | ||
1918 | "checksum serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)" = "414115f25f818d7dfccec8ee535d76949ae78584fc4f79a6f45a904bf8ab4449" | ||
1919 | "checksum serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)" = "128f9e303a5a29922045a830221b8f78ec74a5f544944f3d5984f8ec3895ef64" | ||
1920 | "checksum serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)" = "48c575e0cc52bdd09b47f330f646cf59afc586e9c4e3ccd6fc1f625b8ea1dad7" | ||
1921 | "checksum serde_repr 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "cd02c7587ec314570041b2754829f84d873ced14a96d1fd1823531e11db40573" | ||
1922 | "checksum serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)" = "691b17f19fc1ec9d94ec0b5864859290dff279dbd7b03f017afda54eb36c3c35" | ||
1923 | "checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" | ||
1924 | "checksum smallvec 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "44e59e0c9fa00817912ae6e4e6e3c4fe04455e75699d06eedc7d85917ed8e8f4" | ||
1925 | "checksum smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "34836c9a295c62c2ce3514471117c5cb269891e8421b2aafdd910050576c4d8b" | ||
1926 | "checksum superslice 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ab16ced94dbd8a46c82fd81e3ed9a8727dac2977ea869d217bcc4ea1f122e81f" | ||
1927 | "checksum syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "af6f3550d8dff9ef7dc34d384ac6f107e5d31c8f57d9f28e0081503f547ac8f5" | ||
1928 | "checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" | ||
1929 | "checksum termios 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "72b620c5ea021d75a735c943269bb07d30c9b77d6ac6b236bc8b5c496ef05625" | ||
1930 | "checksum text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e08bbcb7a3adbda0eb23431206b653bdad3d8dea311e72d36bf2215e27a42579" | ||
1931 | "checksum thin-dst 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c52fd98a9e4913c466d83381a59245691875d2f3e04611fca57f964bd8aa96e1" | ||
1932 | "checksum thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14" | ||
1933 | "checksum threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e2f0c90a5f3459330ac8bc0d2f879c693bb7a2f59689c1083fc4ef83834da865" | ||
1934 | "checksum unicase 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" | ||
1935 | "checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" | ||
1936 | "checksum unicode-normalization 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "5479532badd04e128284890390c1e876ef7a993d0570b3597ae43dfa1d59afa4" | ||
1937 | "checksum unicode-segmentation 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e83e153d1053cbb5a118eeff7fd5be06ed99153f00dbcd8ae310c5fb2b22edc0" | ||
1938 | "checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c" | ||
1939 | "checksum url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "829d4a8476c35c9bf0bbce5a3b23f4106f79728039b726d292bb93bc106787cb" | ||
1940 | "checksum uuid 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9fde2f6a4bea1d6e007c4ad38c6839fa71cbb63b6dbf5b595aa38dc9b1093c11" | ||
1941 | "checksum version_check 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "078775d0255232fb988e6fccf26ddc9d1ac274299aaedcedce21c6f72cc533ce" | ||
1942 | "checksum walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d" | ||
1943 | "checksum wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)" = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" | ||
1944 | "checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" | ||
1945 | "checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6" | ||
1946 | "checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" | ||
1947 | "checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" | ||
1948 | "checksum winapi-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4ccfbf554c6ad11084fb7517daca16cfdcaccbdadba4fc336f032a8b12c2ad80" | ||
1949 | "checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" | ||
1950 | "checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" | ||
1951 | "checksum yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "65923dd1784f44da1d2c3dbbc5e822045628c590ba72123e1c73d3c230c4434d" | ||
diff --git a/Cargo.toml b/Cargo.toml index 31a0560d9..e5620b1b7 100644 --- a/Cargo.toml +++ b/Cargo.toml | |||
@@ -10,5 +10,24 @@ debug = 0 | |||
10 | incremental = true | 10 | incremental = true |
11 | debug = 0 # set this to 1 or 2 to get more useful backtraces in debugger | 11 | debug = 0 # set this to 1 or 2 to get more useful backtraces in debugger |
12 | 12 | ||
13 | # ideally, we would use `build-override` here, but some crates are also | ||
14 | # needed at run-time and we end up compiling them twice | ||
15 | [profile.release.package.proc-macro2] | ||
16 | opt-level = 0 | ||
17 | [profile.release.package.quote] | ||
18 | opt-level = 0 | ||
19 | [profile.release.package.syn] | ||
20 | opt-level = 0 | ||
21 | [profile.release.package.serde_derive] | ||
22 | opt-level = 0 | ||
23 | [profile.release.package.chalk-derive] | ||
24 | opt-level = 0 | ||
25 | [profile.release.package.chalk-macros] | ||
26 | opt-level = 0 | ||
27 | [profile.release.package.salsa-macros] | ||
28 | opt-level = 0 | ||
29 | [profile.release.package.xtask] | ||
30 | opt-level = 0 | ||
31 | |||
13 | [patch.'crates-io'] | 32 | [patch.'crates-io'] |
14 | # rowan = { path = "../rowan" } | 33 | # rowan = { path = "../rowan" } |
@@ -2,14 +2,13 @@ | |||
2 | <img src="https://user-images.githubusercontent.com/1711539/72443316-5a79f280-37ae-11ea-858f-035209ece2dd.png" alt="rust-analyzer logo"> | 2 | <img src="https://user-images.githubusercontent.com/1711539/72443316-5a79f280-37ae-11ea-858f-035209ece2dd.png" alt="rust-analyzer logo"> |
3 | </p> | 3 | </p> |
4 | 4 | ||
5 | Rust Analyzer is an **experimental** modular compiler frontend for the Rust | 5 | rust-analyzer is an **experimental** modular compiler frontend for the Rust |
6 | language. It is a part of a larger rls-2.0 effort to create excellent IDE | 6 | language. It is a part of a larger rls-2.0 effort to create excellent IDE |
7 | support for Rust. If you want to get involved, check the rls-2.0 working group | 7 | support for Rust. If you want to get involved, check the rls-2.0 working group: |
8 | in the compiler-team repository: | ||
9 | 8 | ||
10 | https://github.com/rust-lang/compiler-team/tree/master/content/working-groups/rls-2.0 | 9 | https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Fwg-rls-2.2E0 |
11 | 10 | ||
12 | Work on the Rust Analyzer is sponsored by | 11 | Work on rust-analyzer is sponsored by |
13 | 12 | ||
14 | [<img src="https://user-images.githubusercontent.com/1711539/58105231-cf306900-7bee-11e9-83d8-9f1102e59d29.png" alt="Ferrous Systems" width="300">](https://ferrous-systems.com/) | 13 | [<img src="https://user-images.githubusercontent.com/1711539/58105231-cf306900-7bee-11e9-83d8-9f1102e59d29.png" alt="Ferrous Systems" width="300">](https://ferrous-systems.com/) |
15 | - [Mozilla](https://www.mozilla.org/en-US/) | 14 | - [Mozilla](https://www.mozilla.org/en-US/) |
@@ -17,17 +16,17 @@ Work on the Rust Analyzer is sponsored by | |||
17 | 16 | ||
18 | ## Language Server Quick Start | 17 | ## Language Server Quick Start |
19 | 18 | ||
20 | Rust Analyzer is a work-in-progress, so you'll have to build it from source, and | 19 | rust-analyzer is a work-in-progress, so you might encounter critical bugs. That |
21 | you might encounter critical bugs. That said, it is complete enough to provide a | 20 | said, it is complete enough to provide a useful IDE experience and some people |
22 | useful IDE experience and some people use it as a daily driver. | 21 | use it as a daily driver. |
23 | 22 | ||
24 | To build rust-analyzer, you need: | 23 | To build rust-analyzer, you need: |
25 | 24 | ||
26 | * latest stable rust for language server itself | 25 | * latest stable Rust for the language server itself |
27 | * latest stable npm and VS Code for VS Code extension | 26 | * latest stable npm and VS Code for VS Code extension |
28 | 27 | ||
29 | To quickly install rust-analyzer with VS Code extension with standard setup | 28 | To quickly install the rust-analyzer language server and VS Code extension with |
30 | (`code` and `cargo` in `$PATH`, etc), use this: | 29 | standard setup (`code` and `cargo` in `$PATH`, etc), use this: |
31 | 30 | ||
32 | ``` | 31 | ``` |
33 | # clone the repo | 32 | # clone the repo |
@@ -45,8 +44,8 @@ cannot start, see [./docs/user](./docs/user). | |||
45 | 44 | ||
46 | ## Documentation | 45 | ## Documentation |
47 | 46 | ||
48 | If you want to **contribute** to rust-analyzer or just curious about how things work | 47 | If you want to **contribute** to rust-analyzer or are just curious about how |
49 | under the hood, check the [./docs/dev](./docs/dev) folder. | 48 | things work under the hood, check the [./docs/dev](./docs/dev) folder. |
50 | 49 | ||
51 | If you want to **use** rust-analyzer's language server with your editor of | 50 | If you want to **use** rust-analyzer's language server with your editor of |
52 | choice, check [./docs/user](./docs/user) folder. It also contains some tips & tricks to help | 51 | choice, check [./docs/user](./docs/user) folder. It also contains some tips & tricks to help |
@@ -1,6 +1,6 @@ | |||
1 | status = [ | 1 | status = [ |
2 | "Rust (ubuntu-latest)", | 2 | "Rust (ubuntu-latest)", |
3 | # "Rust (windows-latest)", | 3 | "Rust (windows-latest)", |
4 | "Rust (macos-latest)", | 4 | "Rust (macos-latest)", |
5 | "TypeScript" | 5 | "TypeScript" |
6 | ] | 6 | ] |
diff --git a/crates/ra_assists/Cargo.toml b/crates/ra_assists/Cargo.toml index 0d2109e4e..6973038d4 100644 --- a/crates/ra_assists/Cargo.toml +++ b/crates/ra_assists/Cargo.toml | |||
@@ -16,6 +16,8 @@ either = "1.5" | |||
16 | ra_syntax = { path = "../ra_syntax" } | 16 | ra_syntax = { path = "../ra_syntax" } |
17 | ra_text_edit = { path = "../ra_text_edit" } | 17 | ra_text_edit = { path = "../ra_text_edit" } |
18 | ra_fmt = { path = "../ra_fmt" } | 18 | ra_fmt = { path = "../ra_fmt" } |
19 | ra_prof = { path = "../ra_prof" } | ||
19 | ra_db = { path = "../ra_db" } | 20 | ra_db = { path = "../ra_db" } |
21 | ra_ide_db = { path = "../ra_ide_db" } | ||
20 | hir = { path = "../ra_hir", package = "ra_hir" } | 22 | hir = { path = "../ra_hir", package = "ra_hir" } |
21 | test_utils = { path = "../test_utils" } | 23 | test_utils = { path = "../test_utils" } |
diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs index 43f0d664b..5aab5fb8b 100644 --- a/crates/ra_assists/src/assist_ctx.rs +++ b/crates/ra_assists/src/assist_ctx.rs | |||
@@ -1,8 +1,8 @@ | |||
1 | //! This module defines `AssistCtx` -- the API surface that is exposed to assists. | 1 | //! This module defines `AssistCtx` -- the API surface that is exposed to assists. |
2 | use either::Either; | 2 | use hir::{InFile, SourceAnalyzer, SourceBinder}; |
3 | use hir::{db::HirDatabase, InFile, SourceAnalyzer, SourceBinder}; | 3 | use ra_db::{FileRange, SourceDatabase}; |
4 | use ra_db::FileRange; | ||
5 | use ra_fmt::{leading_indent, reindent}; | 4 | use ra_fmt::{leading_indent, reindent}; |
5 | use ra_ide_db::RootDatabase; | ||
6 | use ra_syntax::{ | 6 | use ra_syntax::{ |
7 | algo::{self, find_covering_element, find_node_at_offset}, | 7 | algo::{self, find_covering_element, find_node_at_offset}, |
8 | AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextUnit, | 8 | AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextUnit, |
@@ -10,14 +10,40 @@ use ra_syntax::{ | |||
10 | }; | 10 | }; |
11 | use ra_text_edit::TextEditBuilder; | 11 | use ra_text_edit::TextEditBuilder; |
12 | 12 | ||
13 | use crate::{AssistAction, AssistId, AssistLabel, ResolvedAssist}; | 13 | use crate::{AssistAction, AssistId, AssistLabel, GroupLabel, ResolvedAssist}; |
14 | 14 | ||
15 | #[derive(Clone, Debug)] | 15 | #[derive(Clone, Debug)] |
16 | pub(crate) enum Assist { | 16 | pub(crate) struct Assist(pub(crate) Vec<AssistInfo>); |
17 | Unresolved { label: AssistLabel }, | 17 | |
18 | Resolved { assist: ResolvedAssist }, | 18 | #[derive(Clone, Debug)] |
19 | pub(crate) struct AssistInfo { | ||
20 | pub(crate) label: AssistLabel, | ||
21 | pub(crate) group_label: Option<GroupLabel>, | ||
22 | pub(crate) action: Option<AssistAction>, | ||
23 | } | ||
24 | |||
25 | impl AssistInfo { | ||
26 | fn new(label: AssistLabel) -> AssistInfo { | ||
27 | AssistInfo { label, group_label: None, action: None } | ||
28 | } | ||
29 | |||
30 | fn resolved(self, action: AssistAction) -> AssistInfo { | ||
31 | AssistInfo { action: Some(action), ..self } | ||
32 | } | ||
33 | |||
34 | fn with_group(self, group_label: GroupLabel) -> AssistInfo { | ||
35 | AssistInfo { group_label: Some(group_label), ..self } | ||
36 | } | ||
37 | |||
38 | pub(crate) fn into_resolved(self) -> Option<ResolvedAssist> { | ||
39 | let label = self.label; | ||
40 | let group_label = self.group_label; | ||
41 | self.action.map(|action| ResolvedAssist { label, group_label, action }) | ||
42 | } | ||
19 | } | 43 | } |
20 | 44 | ||
45 | pub(crate) type AssistHandler = fn(AssistCtx) -> Option<Assist>; | ||
46 | |||
21 | /// `AssistCtx` allows to apply an assist or check if it could be applied. | 47 | /// `AssistCtx` allows to apply an assist or check if it could be applied. |
22 | /// | 48 | /// |
23 | /// Assists use a somewhat over-engineered approach, given the current needs. The | 49 | /// Assists use a somewhat over-engineered approach, given the current needs. The |
@@ -49,14 +75,14 @@ pub(crate) enum Assist { | |||
49 | /// moment, because the LSP API is pretty awkward in this place, and it's much | 75 | /// moment, because the LSP API is pretty awkward in this place, and it's much |
50 | /// easier to just compute the edit eagerly :-) | 76 | /// easier to just compute the edit eagerly :-) |
51 | #[derive(Debug)] | 77 | #[derive(Debug)] |
52 | pub(crate) struct AssistCtx<'a, DB> { | 78 | pub(crate) struct AssistCtx<'a> { |
53 | pub(crate) db: &'a DB, | 79 | pub(crate) db: &'a RootDatabase, |
54 | pub(crate) frange: FileRange, | 80 | pub(crate) frange: FileRange, |
55 | source_file: SourceFile, | 81 | source_file: SourceFile, |
56 | should_compute_edit: bool, | 82 | should_compute_edit: bool, |
57 | } | 83 | } |
58 | 84 | ||
59 | impl<'a, DB> Clone for AssistCtx<'a, DB> { | 85 | impl Clone for AssistCtx<'_> { |
60 | fn clone(&self) -> Self { | 86 | fn clone(&self) -> Self { |
61 | AssistCtx { | 87 | AssistCtx { |
62 | db: self.db, | 88 | db: self.db, |
@@ -67,15 +93,10 @@ impl<'a, DB> Clone for AssistCtx<'a, DB> { | |||
67 | } | 93 | } |
68 | } | 94 | } |
69 | 95 | ||
70 | impl<'a, DB: HirDatabase> AssistCtx<'a, DB> { | 96 | impl<'a> AssistCtx<'a> { |
71 | pub(crate) fn with_ctx<F, T>(db: &DB, frange: FileRange, should_compute_edit: bool, f: F) -> T | 97 | pub fn new(db: &RootDatabase, frange: FileRange, should_compute_edit: bool) -> AssistCtx { |
72 | where | ||
73 | F: FnOnce(AssistCtx<DB>) -> T, | ||
74 | { | ||
75 | let parse = db.parse(frange.file_id); | 98 | let parse = db.parse(frange.file_id); |
76 | 99 | AssistCtx { db, frange, source_file: parse.tree(), should_compute_edit } | |
77 | let ctx = AssistCtx { db, frange, source_file: parse.tree(), should_compute_edit }; | ||
78 | f(ctx) | ||
79 | } | 100 | } |
80 | 101 | ||
81 | pub(crate) fn add_assist( | 102 | pub(crate) fn add_assist( |
@@ -84,48 +105,23 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> { | |||
84 | label: impl Into<String>, | 105 | label: impl Into<String>, |
85 | f: impl FnOnce(&mut ActionBuilder), | 106 | f: impl FnOnce(&mut ActionBuilder), |
86 | ) -> Option<Assist> { | 107 | ) -> Option<Assist> { |
87 | let label = AssistLabel { label: label.into(), id }; | 108 | let label = AssistLabel::new(label.into(), id); |
88 | assert!(label.label.chars().nth(0).unwrap().is_uppercase()); | ||
89 | 109 | ||
90 | let assist = if self.should_compute_edit { | 110 | let mut info = AssistInfo::new(label); |
111 | if self.should_compute_edit { | ||
91 | let action = { | 112 | let action = { |
92 | let mut edit = ActionBuilder::default(); | 113 | let mut edit = ActionBuilder::default(); |
93 | f(&mut edit); | 114 | f(&mut edit); |
94 | edit.build() | 115 | edit.build() |
95 | }; | 116 | }; |
96 | Assist::Resolved { assist: ResolvedAssist { label, action_data: Either::Left(action) } } | 117 | info = info.resolved(action) |
97 | } else { | ||
98 | Assist::Unresolved { label } | ||
99 | }; | 118 | }; |
100 | 119 | ||
101 | Some(assist) | 120 | Some(Assist(vec![info])) |
102 | } | 121 | } |
103 | 122 | ||
104 | #[allow(dead_code)] // will be used for auto import assist with multiple actions | 123 | pub(crate) fn add_assist_group(self, group_name: impl Into<String>) -> AssistGroup<'a> { |
105 | pub(crate) fn add_assist_group( | 124 | AssistGroup { ctx: self, group_name: group_name.into(), assists: Vec::new() } |
106 | self, | ||
107 | id: AssistId, | ||
108 | label: impl Into<String>, | ||
109 | f: impl FnOnce() -> Vec<ActionBuilder>, | ||
110 | ) -> Option<Assist> { | ||
111 | let label = AssistLabel { label: label.into(), id }; | ||
112 | let assist = if self.should_compute_edit { | ||
113 | let actions = f(); | ||
114 | assert!(!actions.is_empty(), "Assist cannot have no"); | ||
115 | |||
116 | Assist::Resolved { | ||
117 | assist: ResolvedAssist { | ||
118 | label, | ||
119 | action_data: Either::Right( | ||
120 | actions.into_iter().map(ActionBuilder::build).collect(), | ||
121 | ), | ||
122 | }, | ||
123 | } | ||
124 | } else { | ||
125 | Assist::Unresolved { label } | ||
126 | }; | ||
127 | |||
128 | Some(assist) | ||
129 | } | 125 | } |
130 | 126 | ||
131 | pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> { | 127 | pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> { |
@@ -142,7 +138,7 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> { | |||
142 | pub(crate) fn covering_element(&self) -> SyntaxElement { | 138 | pub(crate) fn covering_element(&self) -> SyntaxElement { |
143 | find_covering_element(self.source_file.syntax(), self.frange.range) | 139 | find_covering_element(self.source_file.syntax(), self.frange.range) |
144 | } | 140 | } |
145 | pub(crate) fn source_binder(&self) -> SourceBinder<'a, DB> { | 141 | pub(crate) fn source_binder(&self) -> SourceBinder<'a, RootDatabase> { |
146 | SourceBinder::new(self.db) | 142 | SourceBinder::new(self.db) |
147 | } | 143 | } |
148 | pub(crate) fn source_analyzer( | 144 | pub(crate) fn source_analyzer( |
@@ -159,21 +155,48 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> { | |||
159 | } | 155 | } |
160 | } | 156 | } |
161 | 157 | ||
158 | pub(crate) struct AssistGroup<'a> { | ||
159 | ctx: AssistCtx<'a>, | ||
160 | group_name: String, | ||
161 | assists: Vec<AssistInfo>, | ||
162 | } | ||
163 | |||
164 | impl<'a> AssistGroup<'a> { | ||
165 | pub(crate) fn add_assist( | ||
166 | &mut self, | ||
167 | id: AssistId, | ||
168 | label: impl Into<String>, | ||
169 | f: impl FnOnce(&mut ActionBuilder), | ||
170 | ) { | ||
171 | let label = AssistLabel::new(label.into(), id); | ||
172 | |||
173 | let mut info = AssistInfo::new(label).with_group(GroupLabel(self.group_name.clone())); | ||
174 | if self.ctx.should_compute_edit { | ||
175 | let action = { | ||
176 | let mut edit = ActionBuilder::default(); | ||
177 | f(&mut edit); | ||
178 | edit.build() | ||
179 | }; | ||
180 | info = info.resolved(action) | ||
181 | }; | ||
182 | |||
183 | self.assists.push(info) | ||
184 | } | ||
185 | |||
186 | pub(crate) fn finish(self) -> Option<Assist> { | ||
187 | assert!(!self.assists.is_empty()); | ||
188 | Some(Assist(self.assists)) | ||
189 | } | ||
190 | } | ||
191 | |||
162 | #[derive(Default)] | 192 | #[derive(Default)] |
163 | pub(crate) struct ActionBuilder { | 193 | pub(crate) struct ActionBuilder { |
164 | edit: TextEditBuilder, | 194 | edit: TextEditBuilder, |
165 | cursor_position: Option<TextUnit>, | 195 | cursor_position: Option<TextUnit>, |
166 | target: Option<TextRange>, | 196 | target: Option<TextRange>, |
167 | label: Option<String>, | ||
168 | } | 197 | } |
169 | 198 | ||
170 | impl ActionBuilder { | 199 | impl ActionBuilder { |
171 | #[allow(dead_code)] | ||
172 | /// Adds a custom label to the action, if it needs to be different from the assist label | ||
173 | pub(crate) fn label(&mut self, label: impl Into<String>) { | ||
174 | self.label = Some(label.into()) | ||
175 | } | ||
176 | |||
177 | /// Replaces specified `range` of text with a given string. | 200 | /// Replaces specified `range` of text with a given string. |
178 | pub(crate) fn replace(&mut self, range: TextRange, replace_with: impl Into<String>) { | 201 | pub(crate) fn replace(&mut self, range: TextRange, replace_with: impl Into<String>) { |
179 | self.edit.replace(range, replace_with.into()) | 202 | self.edit.replace(range, replace_with.into()) |
@@ -232,7 +255,6 @@ impl ActionBuilder { | |||
232 | edit: self.edit.finish(), | 255 | edit: self.edit.finish(), |
233 | cursor_position: self.cursor_position, | 256 | cursor_position: self.cursor_position, |
234 | target: self.target, | 257 | target: self.target, |
235 | label: self.label, | ||
236 | } | 258 | } |
237 | } | 259 | } |
238 | } | 260 | } |
diff --git a/crates/ra_assists/src/doc_tests.rs b/crates/ra_assists/src/doc_tests.rs index 5dc1ee233..c0f9bc1fb 100644 --- a/crates/ra_assists/src/doc_tests.rs +++ b/crates/ra_assists/src/doc_tests.rs | |||
@@ -5,24 +5,24 @@ | |||
5 | 5 | ||
6 | mod generated; | 6 | mod generated; |
7 | 7 | ||
8 | use ra_db::{fixture::WithFixture, FileRange}; | 8 | use ra_db::FileRange; |
9 | use test_utils::{assert_eq_text, extract_range_or_offset}; | 9 | use test_utils::{assert_eq_text, extract_range_or_offset}; |
10 | 10 | ||
11 | use crate::test_db::TestDB; | 11 | use crate::resolved_assists; |
12 | 12 | ||
13 | fn check(assist_id: &str, before: &str, after: &str) { | 13 | fn check(assist_id: &str, before: &str, after: &str) { |
14 | let (selection, before) = extract_range_or_offset(before); | 14 | let (selection, before) = extract_range_or_offset(before); |
15 | let (db, file_id) = TestDB::with_single_file(&before); | 15 | let (db, file_id) = crate::helpers::with_single_file(&before); |
16 | let frange = FileRange { file_id, range: selection.into() }; | 16 | let frange = FileRange { file_id, range: selection.into() }; |
17 | 17 | ||
18 | let assist = crate::assists(&db, frange) | 18 | let assist = resolved_assists(&db, frange) |
19 | .into_iter() | 19 | .into_iter() |
20 | .find(|assist| assist.label.id.0 == assist_id) | 20 | .find(|assist| assist.label.id.0 == assist_id) |
21 | .unwrap_or_else(|| { | 21 | .unwrap_or_else(|| { |
22 | panic!( | 22 | panic!( |
23 | "\n\nAssist is not applicable: {}\nAvailable assists: {}", | 23 | "\n\nAssist is not applicable: {}\nAvailable assists: {}", |
24 | assist_id, | 24 | assist_id, |
25 | crate::assists(&db, frange) | 25 | resolved_assists(&db, frange) |
26 | .into_iter() | 26 | .into_iter() |
27 | .map(|assist| assist.label.id.0) | 27 | .map(|assist| assist.label.id.0) |
28 | .collect::<Vec<_>>() | 28 | .collect::<Vec<_>>() |
@@ -30,6 +30,6 @@ fn check(assist_id: &str, before: &str, after: &str) { | |||
30 | ) | 30 | ) |
31 | }); | 31 | }); |
32 | 32 | ||
33 | let actual = assist.get_first_action().edit.apply(&before); | 33 | let actual = assist.action.edit.apply(&before); |
34 | assert_eq_text!(after, &actual); | 34 | assert_eq_text!(after, &actual); |
35 | } | 35 | } |
diff --git a/crates/ra_assists/src/doc_tests/generated.rs b/crates/ra_assists/src/doc_tests/generated.rs index 7d84dc8fb..4ab09b167 100644 --- a/crates/ra_assists/src/doc_tests/generated.rs +++ b/crates/ra_assists/src/doc_tests/generated.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | //! Generated file, do not edit by hand, see `crate/ra_tools/src/codegen` | 1 | //! Generated file, do not edit by hand, see `xtask/src/codegen` |
2 | 2 | ||
3 | use super::check; | 3 | use super::check; |
4 | 4 | ||
@@ -161,21 +161,6 @@ impl Trait<u32> for () { | |||
161 | } | 161 | } |
162 | 162 | ||
163 | #[test] | 163 | #[test] |
164 | fn doctest_add_import() { | ||
165 | check( | ||
166 | "add_import", | ||
167 | r#####" | ||
168 | fn process(map: std::collections::<|>HashMap<String, String>) {} | ||
169 | "#####, | ||
170 | r#####" | ||
171 | use std::collections::HashMap; | ||
172 | |||
173 | fn process(map: HashMap<String, String>) {} | ||
174 | "#####, | ||
175 | ) | ||
176 | } | ||
177 | |||
178 | #[test] | ||
179 | fn doctest_add_new() { | 164 | fn doctest_add_new() { |
180 | check( | 165 | check( |
181 | "add_new", | 166 | "add_new", |
@@ -215,6 +200,27 @@ fn main() { | |||
215 | } | 200 | } |
216 | 201 | ||
217 | #[test] | 202 | #[test] |
203 | fn doctest_auto_import() { | ||
204 | check( | ||
205 | "auto_import", | ||
206 | r#####" | ||
207 | fn main() { | ||
208 | let map = HashMap<|>::new(); | ||
209 | } | ||
210 | pub mod std { pub mod collections { pub struct HashMap { } } } | ||
211 | "#####, | ||
212 | r#####" | ||
213 | use std::collections::HashMap; | ||
214 | |||
215 | fn main() { | ||
216 | let map = HashMap::new(); | ||
217 | } | ||
218 | pub mod std { pub mod collections { pub struct HashMap { } } } | ||
219 | "#####, | ||
220 | ) | ||
221 | } | ||
222 | |||
223 | #[test] | ||
218 | fn doctest_change_visibility() { | 224 | fn doctest_change_visibility() { |
219 | check( | 225 | check( |
220 | "change_visibility", | 226 | "change_visibility", |
@@ -571,6 +577,21 @@ fn handle(action: Action) { | |||
571 | } | 577 | } |
572 | 578 | ||
573 | #[test] | 579 | #[test] |
580 | fn doctest_replace_qualified_name_with_use() { | ||
581 | check( | ||
582 | "replace_qualified_name_with_use", | ||
583 | r#####" | ||
584 | fn process(map: std::collections::<|>HashMap<String, String>) {} | ||
585 | "#####, | ||
586 | r#####" | ||
587 | use std::collections::HashMap; | ||
588 | |||
589 | fn process(map: HashMap<String, String>) {} | ||
590 | "#####, | ||
591 | ) | ||
592 | } | ||
593 | |||
594 | #[test] | ||
574 | fn doctest_split_import() { | 595 | fn doctest_split_import() { |
575 | check( | 596 | check( |
576 | "split_import", | 597 | "split_import", |
diff --git a/crates/ra_assists/src/assists/add_custom_impl.rs b/crates/ra_assists/src/handlers/add_custom_impl.rs index f91034967..7fdd816bf 100644 --- a/crates/ra_assists/src/assists/add_custom_impl.rs +++ b/crates/ra_assists/src/handlers/add_custom_impl.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use crate::{Assist, AssistCtx, AssistId}; | 3 | use crate::{Assist, AssistCtx, AssistId}; |
4 | use hir::db::HirDatabase; | 4 | |
5 | use join_to_string::join; | 5 | use join_to_string::join; |
6 | use ra_syntax::{ | 6 | use ra_syntax::{ |
7 | ast::{self, AstNode}, | 7 | ast::{self, AstNode}, |
@@ -29,7 +29,7 @@ const DERIVE_TRAIT: &str = "derive"; | |||
29 | // | 29 | // |
30 | // } | 30 | // } |
31 | // ``` | 31 | // ``` |
32 | pub(crate) fn add_custom_impl(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 32 | pub(crate) fn add_custom_impl(ctx: AssistCtx) -> Option<Assist> { |
33 | let input = ctx.find_node_at_offset::<ast::AttrInput>()?; | 33 | let input = ctx.find_node_at_offset::<ast::AttrInput>()?; |
34 | let attr = input.syntax().parent().and_then(ast::Attr::cast)?; | 34 | let attr = input.syntax().parent().and_then(ast::Attr::cast)?; |
35 | 35 | ||
diff --git a/crates/ra_assists/src/assists/add_derive.rs b/crates/ra_assists/src/handlers/add_derive.rs index 6d9af3905..b0d1a0a80 100644 --- a/crates/ra_assists/src/assists/add_derive.rs +++ b/crates/ra_assists/src/handlers/add_derive.rs | |||
@@ -1,4 +1,3 @@ | |||
1 | use hir::db::HirDatabase; | ||
2 | use ra_syntax::{ | 1 | use ra_syntax::{ |
3 | ast::{self, AstNode, AttrsOwner}, | 2 | ast::{self, AstNode, AttrsOwner}, |
4 | SyntaxKind::{COMMENT, WHITESPACE}, | 3 | SyntaxKind::{COMMENT, WHITESPACE}, |
@@ -25,7 +24,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
25 | // y: u32, | 24 | // y: u32, |
26 | // } | 25 | // } |
27 | // ``` | 26 | // ``` |
28 | pub(crate) fn add_derive(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 27 | pub(crate) fn add_derive(ctx: AssistCtx) -> Option<Assist> { |
29 | let nominal = ctx.find_node_at_offset::<ast::NominalDef>()?; | 28 | let nominal = ctx.find_node_at_offset::<ast::NominalDef>()?; |
30 | let node_start = derive_insertion_offset(&nominal)?; | 29 | let node_start = derive_insertion_offset(&nominal)?; |
31 | ctx.add_assist(AssistId("add_derive"), "Add `#[derive]`", |edit| { | 30 | ctx.add_assist(AssistId("add_derive"), "Add `#[derive]`", |edit| { |
diff --git a/crates/ra_assists/src/assists/add_explicit_type.rs b/crates/ra_assists/src/handlers/add_explicit_type.rs index 38a351a54..2cb9d2f48 100644 --- a/crates/ra_assists/src/assists/add_explicit_type.rs +++ b/crates/ra_assists/src/handlers/add_explicit_type.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | use hir::{db::HirDatabase, HirDisplay}; | 1 | use hir::HirDisplay; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | ast::{self, AstNode, LetStmt, NameOwner}, | 3 | ast::{self, AstNode, LetStmt, NameOwner, TypeAscriptionOwner}, |
4 | TextRange, T, | 4 | TextRange, |
5 | }; | 5 | }; |
6 | 6 | ||
7 | use crate::{Assist, AssistCtx, AssistId}; | 7 | use crate::{Assist, AssistCtx, AssistId}; |
@@ -21,7 +21,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
21 | // let x: i32 = 92; | 21 | // let x: i32 = 92; |
22 | // } | 22 | // } |
23 | // ``` | 23 | // ``` |
24 | pub(crate) fn add_explicit_type(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 24 | pub(crate) fn add_explicit_type(ctx: AssistCtx) -> Option<Assist> { |
25 | let stmt = ctx.find_node_at_offset::<LetStmt>()?; | 25 | let stmt = ctx.find_node_at_offset::<LetStmt>()?; |
26 | let expr = stmt.initializer()?; | 26 | let expr = stmt.initializer()?; |
27 | let pat = stmt.pat()?; | 27 | let pat = stmt.pat()?; |
@@ -34,17 +34,21 @@ pub(crate) fn add_explicit_type(ctx: AssistCtx<impl HirDatabase>) -> Option<Assi | |||
34 | // The binding must have a name | 34 | // The binding must have a name |
35 | let name = pat.name()?; | 35 | let name = pat.name()?; |
36 | let name_range = name.syntax().text_range(); | 36 | let name_range = name.syntax().text_range(); |
37 | // Assist should only be applicable if cursor is between 'let' and '=' | ||
38 | let stmt_range = stmt.syntax().text_range(); | 37 | let stmt_range = stmt.syntax().text_range(); |
39 | let eq_range = stmt.eq_token()?.text_range(); | 38 | let eq_range = stmt.eq_token()?.text_range(); |
39 | // Assist should only be applicable if cursor is between 'let' and '=' | ||
40 | let let_range = TextRange::from_to(stmt_range.start(), eq_range.start()); | 40 | let let_range = TextRange::from_to(stmt_range.start(), eq_range.start()); |
41 | let cursor_in_range = ctx.frange.range.is_subrange(&let_range); | 41 | let cursor_in_range = ctx.frange.range.is_subrange(&let_range); |
42 | if !cursor_in_range { | 42 | if !cursor_in_range { |
43 | return None; | 43 | return None; |
44 | } | 44 | } |
45 | // Assist not applicable if the type has already been specified | 45 | // Assist not applicable if the type has already been specified |
46 | if stmt.syntax().children_with_tokens().any(|child| child.kind() == T![:]) { | 46 | // and it has no placeholders |
47 | return None; | 47 | let ascribed_ty = stmt.ascribed_type(); |
48 | if let Some(ref ty) = ascribed_ty { | ||
49 | if ty.syntax().descendants().find_map(ast::PlaceholderType::cast).is_none() { | ||
50 | return None; | ||
51 | } | ||
48 | } | 52 | } |
49 | // Infer type | 53 | // Infer type |
50 | let db = ctx.db; | 54 | let db = ctx.db; |
@@ -60,7 +64,11 @@ pub(crate) fn add_explicit_type(ctx: AssistCtx<impl HirDatabase>) -> Option<Assi | |||
60 | format!("Insert explicit type '{}'", ty.display(db)), | 64 | format!("Insert explicit type '{}'", ty.display(db)), |
61 | |edit| { | 65 | |edit| { |
62 | edit.target(pat_range); | 66 | edit.target(pat_range); |
63 | edit.insert(name_range.end(), format!(": {}", ty.display(db))); | 67 | if let Some(ascribed_ty) = ascribed_ty { |
68 | edit.replace(ascribed_ty.syntax().text_range(), format!("{}", ty.display(db))); | ||
69 | } else { | ||
70 | edit.insert(name_range.end(), format!(": {}", ty.display(db))); | ||
71 | } | ||
64 | }, | 72 | }, |
65 | ) | 73 | ) |
66 | } | 74 | } |
@@ -86,6 +94,40 @@ mod tests { | |||
86 | } | 94 | } |
87 | 95 | ||
88 | #[test] | 96 | #[test] |
97 | fn add_explicit_type_works_for_underscore() { | ||
98 | check_assist( | ||
99 | add_explicit_type, | ||
100 | "fn f() { let a<|>: _ = 1; }", | ||
101 | "fn f() { let a<|>: i32 = 1; }", | ||
102 | ); | ||
103 | } | ||
104 | |||
105 | #[test] | ||
106 | fn add_explicit_type_works_for_nested_underscore() { | ||
107 | check_assist( | ||
108 | add_explicit_type, | ||
109 | r#" | ||
110 | enum Option<T> { | ||
111 | Some(T), | ||
112 | None | ||
113 | } | ||
114 | |||
115 | fn f() { | ||
116 | let a<|>: Option<_> = Option::Some(1); | ||
117 | }"#, | ||
118 | r#" | ||
119 | enum Option<T> { | ||
120 | Some(T), | ||
121 | None | ||
122 | } | ||
123 | |||
124 | fn f() { | ||
125 | let a<|>: Option<i32> = Option::Some(1); | ||
126 | }"#, | ||
127 | ); | ||
128 | } | ||
129 | |||
130 | #[test] | ||
89 | fn add_explicit_type_works_for_macro_call() { | 131 | fn add_explicit_type_works_for_macro_call() { |
90 | check_assist( | 132 | check_assist( |
91 | add_explicit_type, | 133 | add_explicit_type, |
diff --git a/crates/ra_assists/src/assists/add_impl.rs b/crates/ra_assists/src/handlers/add_impl.rs index 4b326c837..241b085fd 100644 --- a/crates/ra_assists/src/assists/add_impl.rs +++ b/crates/ra_assists/src/handlers/add_impl.rs | |||
@@ -1,5 +1,5 @@ | |||
1 | use format_buf::format; | 1 | use format_buf::format; |
2 | use hir::db::HirDatabase; | 2 | |
3 | use join_to_string::join; | 3 | use join_to_string::join; |
4 | use ra_syntax::{ | 4 | use ra_syntax::{ |
5 | ast::{self, AstNode, NameOwner, TypeParamsOwner}, | 5 | ast::{self, AstNode, NameOwner, TypeParamsOwner}, |
@@ -27,7 +27,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
27 | // | 27 | // |
28 | // } | 28 | // } |
29 | // ``` | 29 | // ``` |
30 | pub(crate) fn add_impl(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 30 | pub(crate) fn add_impl(ctx: AssistCtx) -> Option<Assist> { |
31 | let nominal = ctx.find_node_at_offset::<ast::NominalDef>()?; | 31 | let nominal = ctx.find_node_at_offset::<ast::NominalDef>()?; |
32 | let name = nominal.name()?; | 32 | let name = nominal.name()?; |
33 | ctx.add_assist(AssistId("add_impl"), format!("Implement {}", name.text().as_str()), |edit| { | 33 | ctx.add_assist(AssistId("add_impl"), format!("Implement {}", name.text().as_str()), |edit| { |
diff --git a/crates/ra_assists/src/assists/add_missing_impl_members.rs b/crates/ra_assists/src/handlers/add_missing_impl_members.rs index 5bb937bde..448697d31 100644 --- a/crates/ra_assists/src/assists/add_missing_impl_members.rs +++ b/crates/ra_assists/src/handlers/add_missing_impl_members.rs | |||
@@ -43,7 +43,7 @@ enum AddMissingImplMembersMode { | |||
43 | // | 43 | // |
44 | // } | 44 | // } |
45 | // ``` | 45 | // ``` |
46 | pub(crate) fn add_missing_impl_members(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 46 | pub(crate) fn add_missing_impl_members(ctx: AssistCtx) -> Option<Assist> { |
47 | add_missing_impl_members_inner( | 47 | add_missing_impl_members_inner( |
48 | ctx, | 48 | ctx, |
49 | AddMissingImplMembersMode::NoDefaultMethods, | 49 | AddMissingImplMembersMode::NoDefaultMethods, |
@@ -84,7 +84,7 @@ pub(crate) fn add_missing_impl_members(ctx: AssistCtx<impl HirDatabase>) -> Opti | |||
84 | // | 84 | // |
85 | // } | 85 | // } |
86 | // ``` | 86 | // ``` |
87 | pub(crate) fn add_missing_default_members(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 87 | pub(crate) fn add_missing_default_members(ctx: AssistCtx) -> Option<Assist> { |
88 | add_missing_impl_members_inner( | 88 | add_missing_impl_members_inner( |
89 | ctx, | 89 | ctx, |
90 | AddMissingImplMembersMode::DefaultMethodsOnly, | 90 | AddMissingImplMembersMode::DefaultMethodsOnly, |
@@ -94,11 +94,12 @@ pub(crate) fn add_missing_default_members(ctx: AssistCtx<impl HirDatabase>) -> O | |||
94 | } | 94 | } |
95 | 95 | ||
96 | fn add_missing_impl_members_inner( | 96 | fn add_missing_impl_members_inner( |
97 | ctx: AssistCtx<impl HirDatabase>, | 97 | ctx: AssistCtx, |
98 | mode: AddMissingImplMembersMode, | 98 | mode: AddMissingImplMembersMode, |
99 | assist_id: &'static str, | 99 | assist_id: &'static str, |
100 | label: &'static str, | 100 | label: &'static str, |
101 | ) -> Option<Assist> { | 101 | ) -> Option<Assist> { |
102 | let _p = ra_prof::profile("add_missing_impl_members_inner"); | ||
102 | let impl_node = ctx.find_node_at_offset::<ast::ImplBlock>()?; | 103 | let impl_node = ctx.find_node_at_offset::<ast::ImplBlock>()?; |
103 | let impl_item_list = impl_node.item_list()?; | 104 | let impl_item_list = impl_node.item_list()?; |
104 | 105 | ||
diff --git a/crates/ra_assists/src/assists/add_new.rs b/crates/ra_assists/src/handlers/add_new.rs index 8db63f762..2701eddb8 100644 --- a/crates/ra_assists/src/assists/add_new.rs +++ b/crates/ra_assists/src/handlers/add_new.rs | |||
@@ -1,5 +1,5 @@ | |||
1 | use format_buf::format; | 1 | use format_buf::format; |
2 | use hir::{db::HirDatabase, InFile}; | 2 | use hir::{Adt, InFile}; |
3 | use join_to_string::join; | 3 | use join_to_string::join; |
4 | use ra_syntax::{ | 4 | use ra_syntax::{ |
5 | ast::{ | 5 | ast::{ |
@@ -31,7 +31,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
31 | // } | 31 | // } |
32 | // | 32 | // |
33 | // ``` | 33 | // ``` |
34 | pub(crate) fn add_new(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 34 | pub(crate) fn add_new(ctx: AssistCtx) -> Option<Assist> { |
35 | let strukt = ctx.find_node_at_offset::<ast::StructDef>()?; | 35 | let strukt = ctx.find_node_at_offset::<ast::StructDef>()?; |
36 | 36 | ||
37 | // We want to only apply this to non-union structs with named fields | 37 | // We want to only apply this to non-union structs with named fields |
@@ -128,26 +128,29 @@ fn generate_impl_text(strukt: &ast::StructDef, code: &str) -> String { | |||
128 | // | 128 | // |
129 | // FIXME: change the new fn checking to a more semantic approach when that's more | 129 | // FIXME: change the new fn checking to a more semantic approach when that's more |
130 | // viable (e.g. we process proc macros, etc) | 130 | // viable (e.g. we process proc macros, etc) |
131 | fn find_struct_impl( | 131 | fn find_struct_impl(ctx: &AssistCtx, strukt: &ast::StructDef) -> Option<Option<ast::ImplBlock>> { |
132 | ctx: &AssistCtx<impl HirDatabase>, | ||
133 | strukt: &ast::StructDef, | ||
134 | ) -> Option<Option<ast::ImplBlock>> { | ||
135 | let db = ctx.db; | 132 | let db = ctx.db; |
136 | let module = strukt.syntax().ancestors().find(|node| { | 133 | let module = strukt.syntax().ancestors().find(|node| { |
137 | ast::Module::can_cast(node.kind()) || ast::SourceFile::can_cast(node.kind()) | 134 | ast::Module::can_cast(node.kind()) || ast::SourceFile::can_cast(node.kind()) |
138 | })?; | 135 | })?; |
139 | let mut sb = ctx.source_binder(); | 136 | let mut sb = ctx.source_binder(); |
140 | 137 | ||
141 | let struct_ty = { | 138 | let struct_def = { |
142 | let src = InFile { file_id: ctx.frange.file_id.into(), value: strukt.clone() }; | 139 | let src = InFile { file_id: ctx.frange.file_id.into(), value: strukt.clone() }; |
143 | sb.to_def(src)?.ty(db) | 140 | sb.to_def(src)? |
144 | }; | 141 | }; |
145 | 142 | ||
146 | let block = module.descendants().filter_map(ast::ImplBlock::cast).find_map(|impl_blk| { | 143 | let block = module.descendants().filter_map(ast::ImplBlock::cast).find_map(|impl_blk| { |
147 | let src = InFile { file_id: ctx.frange.file_id.into(), value: impl_blk.clone() }; | 144 | let src = InFile { file_id: ctx.frange.file_id.into(), value: impl_blk.clone() }; |
148 | let blk = sb.to_def(src)?; | 145 | let blk = sb.to_def(src)?; |
149 | 146 | ||
150 | let same_ty = blk.target_ty(db) == struct_ty; | 147 | // FIXME: handle e.g. `struct S<T>; impl<U> S<U> {}` |
148 | // (we currently use the wrong type parameter) | ||
149 | // also we wouldn't want to use e.g. `impl S<u32>` | ||
150 | let same_ty = match blk.target_ty(db).as_adt() { | ||
151 | Some(def) => def == Adt::Struct(struct_def), | ||
152 | None => false, | ||
153 | }; | ||
151 | let not_trait_impl = blk.target_trait(db).is_none(); | 154 | let not_trait_impl = blk.target_trait(db).is_none(); |
152 | 155 | ||
153 | if !(same_ty && not_trait_impl) { | 156 | if !(same_ty && not_trait_impl) { |
diff --git a/crates/ra_assists/src/assists/apply_demorgan.rs b/crates/ra_assists/src/handlers/apply_demorgan.rs index 666dce4e6..239807e24 100644 --- a/crates/ra_assists/src/assists/apply_demorgan.rs +++ b/crates/ra_assists/src/handlers/apply_demorgan.rs | |||
@@ -1,8 +1,6 @@ | |||
1 | use super::invert_if::invert_boolean_expression; | ||
2 | use hir::db::HirDatabase; | ||
3 | use ra_syntax::ast::{self, AstNode}; | 1 | use ra_syntax::ast::{self, AstNode}; |
4 | 2 | ||
5 | use crate::{Assist, AssistCtx, AssistId}; | 3 | use crate::{utils::invert_boolean_expression, Assist, AssistCtx, AssistId}; |
6 | 4 | ||
7 | // Assist: apply_demorgan | 5 | // Assist: apply_demorgan |
8 | // | 6 | // |
@@ -23,7 +21,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
23 | // if !(x == 4 && y) {} | 21 | // if !(x == 4 && y) {} |
24 | // } | 22 | // } |
25 | // ``` | 23 | // ``` |
26 | pub(crate) fn apply_demorgan(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 24 | pub(crate) fn apply_demorgan(ctx: AssistCtx) -> Option<Assist> { |
27 | let expr = ctx.find_node_at_offset::<ast::BinExpr>()?; | 25 | let expr = ctx.find_node_at_offset::<ast::BinExpr>()?; |
28 | let op = expr.op_kind()?; | 26 | let op = expr.op_kind()?; |
29 | let op_range = expr.op_token()?.text_range(); | 27 | let op_range = expr.op_token()?.text_range(); |
@@ -32,12 +30,14 @@ pub(crate) fn apply_demorgan(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> | |||
32 | if !cursor_in_range { | 30 | if !cursor_in_range { |
33 | return None; | 31 | return None; |
34 | } | 32 | } |
33 | |||
35 | let lhs = expr.lhs()?; | 34 | let lhs = expr.lhs()?; |
36 | let lhs_range = lhs.syntax().text_range(); | 35 | let lhs_range = lhs.syntax().text_range(); |
36 | let not_lhs = invert_boolean_expression(lhs); | ||
37 | |||
37 | let rhs = expr.rhs()?; | 38 | let rhs = expr.rhs()?; |
38 | let rhs_range = rhs.syntax().text_range(); | 39 | let rhs_range = rhs.syntax().text_range(); |
39 | let not_lhs = invert_boolean_expression(&lhs)?; | 40 | let not_rhs = invert_boolean_expression(rhs); |
40 | let not_rhs = invert_boolean_expression(&rhs)?; | ||
41 | 41 | ||
42 | ctx.add_assist(AssistId("apply_demorgan"), "Apply De Morgan's law", |edit| { | 42 | ctx.add_assist(AssistId("apply_demorgan"), "Apply De Morgan's law", |edit| { |
43 | edit.target(op_range); | 43 | edit.target(op_range); |
@@ -78,12 +78,12 @@ mod tests { | |||
78 | } | 78 | } |
79 | 79 | ||
80 | #[test] | 80 | #[test] |
81 | fn demorgan_doesnt_apply_with_cursor_not_on_op() { | 81 | fn demorgan_general_case() { |
82 | check_assist_not_applicable(apply_demorgan, "fn f() { <|> !x || !x }") | 82 | check_assist(apply_demorgan, "fn f() { x ||<|> x }", "fn f() { !(!x &&<|> !x) }") |
83 | } | 83 | } |
84 | 84 | ||
85 | #[test] | 85 | #[test] |
86 | fn demorgan_doesnt_apply_when_operands_arent_negated_already() { | 86 | fn demorgan_doesnt_apply_with_cursor_not_on_op() { |
87 | check_assist_not_applicable(apply_demorgan, "fn f() { x ||<|> x }") | 87 | check_assist_not_applicable(apply_demorgan, "fn f() { <|> !x || !x }") |
88 | } | 88 | } |
89 | } | 89 | } |
diff --git a/crates/ra_assists/src/handlers/auto_import.rs b/crates/ra_assists/src/handlers/auto_import.rs new file mode 100644 index 000000000..1fb701da5 --- /dev/null +++ b/crates/ra_assists/src/handlers/auto_import.rs | |||
@@ -0,0 +1,293 @@ | |||
1 | use ra_ide_db::imports_locator::ImportsLocator; | ||
2 | use ra_syntax::ast::{self, AstNode}; | ||
3 | |||
4 | use crate::{ | ||
5 | assist_ctx::{Assist, AssistCtx}, | ||
6 | insert_use_statement, AssistId, | ||
7 | }; | ||
8 | use std::collections::BTreeSet; | ||
9 | |||
10 | // Assist: auto_import | ||
11 | // | ||
12 | // If the name is unresolved, provides all possible imports for it. | ||
13 | // | ||
14 | // ``` | ||
15 | // fn main() { | ||
16 | // let map = HashMap<|>::new(); | ||
17 | // } | ||
18 | // # pub mod std { pub mod collections { pub struct HashMap { } } } | ||
19 | // ``` | ||
20 | // -> | ||
21 | // ``` | ||
22 | // use std::collections::HashMap; | ||
23 | // | ||
24 | // fn main() { | ||
25 | // let map = HashMap::new(); | ||
26 | // } | ||
27 | // # pub mod std { pub mod collections { pub struct HashMap { } } } | ||
28 | // ``` | ||
29 | pub(crate) fn auto_import(ctx: AssistCtx) -> Option<Assist> { | ||
30 | let path_under_caret: ast::Path = ctx.find_node_at_offset()?; | ||
31 | if path_under_caret.syntax().ancestors().find_map(ast::UseItem::cast).is_some() { | ||
32 | return None; | ||
33 | } | ||
34 | |||
35 | let module = path_under_caret.syntax().ancestors().find_map(ast::Module::cast); | ||
36 | let position = match module.and_then(|it| it.item_list()) { | ||
37 | Some(item_list) => item_list.syntax().clone(), | ||
38 | None => { | ||
39 | let current_file = | ||
40 | path_under_caret.syntax().ancestors().find_map(ast::SourceFile::cast)?; | ||
41 | current_file.syntax().clone() | ||
42 | } | ||
43 | }; | ||
44 | let source_analyzer = ctx.source_analyzer(&position, None); | ||
45 | let module_with_name_to_import = source_analyzer.module()?; | ||
46 | |||
47 | let name_ref_to_import = | ||
48 | path_under_caret.syntax().descendants().find_map(ast::NameRef::cast)?; | ||
49 | if source_analyzer | ||
50 | .resolve_path(ctx.db, &name_ref_to_import.syntax().ancestors().find_map(ast::Path::cast)?) | ||
51 | .is_some() | ||
52 | { | ||
53 | return None; | ||
54 | } | ||
55 | |||
56 | let name_to_import = name_ref_to_import.syntax().to_string(); | ||
57 | let proposed_imports = ImportsLocator::new(ctx.db) | ||
58 | .find_imports(&name_to_import) | ||
59 | .into_iter() | ||
60 | .filter_map(|module_def| module_with_name_to_import.find_use_path(ctx.db, module_def)) | ||
61 | .filter(|use_path| !use_path.segments.is_empty()) | ||
62 | .take(20) | ||
63 | .collect::<BTreeSet<_>>(); | ||
64 | |||
65 | if proposed_imports.is_empty() { | ||
66 | return None; | ||
67 | } | ||
68 | |||
69 | let mut group = ctx.add_assist_group(format!("Import {}", name_to_import)); | ||
70 | for import in proposed_imports { | ||
71 | group.add_assist(AssistId("auto_import"), format!("Import `{}`", &import), |edit| { | ||
72 | edit.target(path_under_caret.syntax().text_range()); | ||
73 | insert_use_statement( | ||
74 | &position, | ||
75 | path_under_caret.syntax(), | ||
76 | &import, | ||
77 | edit.text_edit_builder(), | ||
78 | ); | ||
79 | }); | ||
80 | } | ||
81 | group.finish() | ||
82 | } | ||
83 | |||
84 | #[cfg(test)] | ||
85 | mod tests { | ||
86 | use crate::helpers::{check_assist, check_assist_not_applicable, check_assist_target}; | ||
87 | |||
88 | use super::*; | ||
89 | |||
90 | #[test] | ||
91 | fn applicable_when_found_an_import() { | ||
92 | check_assist( | ||
93 | auto_import, | ||
94 | r" | ||
95 | <|>PubStruct | ||
96 | |||
97 | pub mod PubMod { | ||
98 | pub struct PubStruct; | ||
99 | } | ||
100 | ", | ||
101 | r" | ||
102 | <|>use PubMod::PubStruct; | ||
103 | |||
104 | PubStruct | ||
105 | |||
106 | pub mod PubMod { | ||
107 | pub struct PubStruct; | ||
108 | } | ||
109 | ", | ||
110 | ); | ||
111 | } | ||
112 | |||
113 | #[test] | ||
114 | fn auto_imports_are_merged() { | ||
115 | check_assist( | ||
116 | auto_import, | ||
117 | r" | ||
118 | use PubMod::PubStruct1; | ||
119 | |||
120 | struct Test { | ||
121 | test: Pub<|>Struct2<u8>, | ||
122 | } | ||
123 | |||
124 | pub mod PubMod { | ||
125 | pub struct PubStruct1; | ||
126 | pub struct PubStruct2<T> { | ||
127 | _t: T, | ||
128 | } | ||
129 | } | ||
130 | ", | ||
131 | r" | ||
132 | use PubMod::{PubStruct2, PubStruct1}; | ||
133 | |||
134 | struct Test { | ||
135 | test: Pub<|>Struct2<u8>, | ||
136 | } | ||
137 | |||
138 | pub mod PubMod { | ||
139 | pub struct PubStruct1; | ||
140 | pub struct PubStruct2<T> { | ||
141 | _t: T, | ||
142 | } | ||
143 | } | ||
144 | ", | ||
145 | ); | ||
146 | } | ||
147 | |||
148 | #[test] | ||
149 | fn applicable_when_found_multiple_imports() { | ||
150 | check_assist( | ||
151 | auto_import, | ||
152 | r" | ||
153 | PubSt<|>ruct | ||
154 | |||
155 | pub mod PubMod1 { | ||
156 | pub struct PubStruct; | ||
157 | } | ||
158 | pub mod PubMod2 { | ||
159 | pub struct PubStruct; | ||
160 | } | ||
161 | pub mod PubMod3 { | ||
162 | pub struct PubStruct; | ||
163 | } | ||
164 | ", | ||
165 | r" | ||
166 | use PubMod1::PubStruct; | ||
167 | |||
168 | PubSt<|>ruct | ||
169 | |||
170 | pub mod PubMod1 { | ||
171 | pub struct PubStruct; | ||
172 | } | ||
173 | pub mod PubMod2 { | ||
174 | pub struct PubStruct; | ||
175 | } | ||
176 | pub mod PubMod3 { | ||
177 | pub struct PubStruct; | ||
178 | } | ||
179 | ", | ||
180 | ); | ||
181 | } | ||
182 | |||
183 | #[test] | ||
184 | fn not_applicable_for_already_imported_types() { | ||
185 | check_assist_not_applicable( | ||
186 | auto_import, | ||
187 | r" | ||
188 | use PubMod::PubStruct; | ||
189 | |||
190 | PubStruct<|> | ||
191 | |||
192 | pub mod PubMod { | ||
193 | pub struct PubStruct; | ||
194 | } | ||
195 | ", | ||
196 | ); | ||
197 | } | ||
198 | |||
199 | #[test] | ||
200 | fn not_applicable_for_types_with_private_paths() { | ||
201 | check_assist_not_applicable( | ||
202 | auto_import, | ||
203 | r" | ||
204 | PrivateStruct<|> | ||
205 | |||
206 | pub mod PubMod { | ||
207 | struct PrivateStruct; | ||
208 | } | ||
209 | ", | ||
210 | ); | ||
211 | } | ||
212 | |||
213 | #[test] | ||
214 | fn not_applicable_when_no_imports_found() { | ||
215 | check_assist_not_applicable( | ||
216 | auto_import, | ||
217 | " | ||
218 | PubStruct<|>", | ||
219 | ); | ||
220 | } | ||
221 | |||
222 | #[test] | ||
223 | fn not_applicable_in_import_statements() { | ||
224 | check_assist_not_applicable( | ||
225 | auto_import, | ||
226 | r" | ||
227 | use PubStruct<|>; | ||
228 | |||
229 | pub mod PubMod { | ||
230 | pub struct PubStruct; | ||
231 | }", | ||
232 | ); | ||
233 | } | ||
234 | |||
235 | #[test] | ||
236 | fn function_import() { | ||
237 | check_assist( | ||
238 | auto_import, | ||
239 | r" | ||
240 | test_function<|> | ||
241 | |||
242 | pub mod PubMod { | ||
243 | pub fn test_function() {}; | ||
244 | } | ||
245 | ", | ||
246 | r" | ||
247 | use PubMod::test_function; | ||
248 | |||
249 | test_function<|> | ||
250 | |||
251 | pub mod PubMod { | ||
252 | pub fn test_function() {}; | ||
253 | } | ||
254 | ", | ||
255 | ); | ||
256 | } | ||
257 | |||
258 | #[test] | ||
259 | fn auto_import_target() { | ||
260 | check_assist_target( | ||
261 | auto_import, | ||
262 | r" | ||
263 | struct AssistInfo { | ||
264 | group_label: Option<<|>GroupLabel>, | ||
265 | } | ||
266 | |||
267 | mod m { pub struct GroupLabel; } | ||
268 | ", | ||
269 | "GroupLabel", | ||
270 | ) | ||
271 | } | ||
272 | |||
273 | #[test] | ||
274 | fn not_applicable_when_path_start_is_imported() { | ||
275 | check_assist_not_applicable( | ||
276 | auto_import, | ||
277 | r" | ||
278 | pub mod mod1 { | ||
279 | pub mod mod2 { | ||
280 | pub mod mod3 { | ||
281 | pub struct TestStruct; | ||
282 | } | ||
283 | } | ||
284 | } | ||
285 | |||
286 | use mod1::mod2; | ||
287 | fn main() { | ||
288 | mod2::mod3::TestStruct<|> | ||
289 | } | ||
290 | ", | ||
291 | ); | ||
292 | } | ||
293 | } | ||
diff --git a/crates/ra_assists/src/assists/change_visibility.rs b/crates/ra_assists/src/handlers/change_visibility.rs index fd766bb46..f325b6f92 100644 --- a/crates/ra_assists/src/assists/change_visibility.rs +++ b/crates/ra_assists/src/handlers/change_visibility.rs | |||
@@ -1,4 +1,3 @@ | |||
1 | use hir::db::HirDatabase; | ||
2 | use ra_syntax::{ | 1 | use ra_syntax::{ |
3 | ast::{self, NameOwner, VisibilityOwner}, | 2 | ast::{self, NameOwner, VisibilityOwner}, |
4 | AstNode, | 3 | AstNode, |
@@ -22,14 +21,14 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
22 | // ``` | 21 | // ``` |
23 | // pub(crate) fn frobnicate() {} | 22 | // pub(crate) fn frobnicate() {} |
24 | // ``` | 23 | // ``` |
25 | pub(crate) fn change_visibility(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 24 | pub(crate) fn change_visibility(ctx: AssistCtx) -> Option<Assist> { |
26 | if let Some(vis) = ctx.find_node_at_offset::<ast::Visibility>() { | 25 | if let Some(vis) = ctx.find_node_at_offset::<ast::Visibility>() { |
27 | return change_vis(ctx, vis); | 26 | return change_vis(ctx, vis); |
28 | } | 27 | } |
29 | add_vis(ctx) | 28 | add_vis(ctx) |
30 | } | 29 | } |
31 | 30 | ||
32 | fn add_vis(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 31 | fn add_vis(ctx: AssistCtx) -> Option<Assist> { |
33 | let item_keyword = ctx.token_at_offset().find(|leaf| match leaf.kind() { | 32 | let item_keyword = ctx.token_at_offset().find(|leaf| match leaf.kind() { |
34 | T![fn] | T![mod] | T![struct] | T![enum] | T![trait] => true, | 33 | T![fn] | T![mod] | T![struct] | T![enum] | T![trait] => true, |
35 | _ => false, | 34 | _ => false, |
@@ -75,7 +74,7 @@ fn vis_offset(node: &SyntaxNode) -> TextUnit { | |||
75 | .unwrap_or_else(|| node.text_range().start()) | 74 | .unwrap_or_else(|| node.text_range().start()) |
76 | } | 75 | } |
77 | 76 | ||
78 | fn change_vis(ctx: AssistCtx<impl HirDatabase>, vis: ast::Visibility) -> Option<Assist> { | 77 | fn change_vis(ctx: AssistCtx, vis: ast::Visibility) -> Option<Assist> { |
79 | if vis.syntax().text() == "pub" { | 78 | if vis.syntax().text() == "pub" { |
80 | return ctx.add_assist( | 79 | return ctx.add_assist( |
81 | AssistId("change_visibility"), | 80 | AssistId("change_visibility"), |
diff --git a/crates/ra_assists/src/assists/early_return.rs b/crates/ra_assists/src/handlers/early_return.rs index 487ee9eef..22f88884f 100644 --- a/crates/ra_assists/src/assists/early_return.rs +++ b/crates/ra_assists/src/handlers/early_return.rs | |||
@@ -1,6 +1,5 @@ | |||
1 | use std::{iter::once, ops::RangeInclusive}; | 1 | use std::{iter::once, ops::RangeInclusive}; |
2 | 2 | ||
3 | use hir::db::HirDatabase; | ||
4 | use ra_syntax::{ | 3 | use ra_syntax::{ |
5 | algo::replace_children, | 4 | algo::replace_children, |
6 | ast::{self, edit::IndentLevel, make, Block, Pat::TupleStructPat}, | 5 | ast::{self, edit::IndentLevel, make, Block, Pat::TupleStructPat}, |
@@ -11,6 +10,7 @@ use ra_syntax::{ | |||
11 | 10 | ||
12 | use crate::{ | 11 | use crate::{ |
13 | assist_ctx::{Assist, AssistCtx}, | 12 | assist_ctx::{Assist, AssistCtx}, |
13 | utils::invert_boolean_expression, | ||
14 | AssistId, | 14 | AssistId, |
15 | }; | 15 | }; |
16 | 16 | ||
@@ -36,7 +36,7 @@ use crate::{ | |||
36 | // bar(); | 36 | // bar(); |
37 | // } | 37 | // } |
38 | // ``` | 38 | // ``` |
39 | pub(crate) fn convert_to_guarded_return(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 39 | pub(crate) fn convert_to_guarded_return(ctx: AssistCtx) -> Option<Assist> { |
40 | let if_expr: ast::IfExpr = ctx.find_node_at_offset()?; | 40 | let if_expr: ast::IfExpr = ctx.find_node_at_offset()?; |
41 | if if_expr.else_branch().is_some() { | 41 | if if_expr.else_branch().is_some() { |
42 | return None; | 42 | return None; |
@@ -100,9 +100,13 @@ pub(crate) fn convert_to_guarded_return(ctx: AssistCtx<impl HirDatabase>) -> Opt | |||
100 | let new_block = match if_let_pat { | 100 | let new_block = match if_let_pat { |
101 | None => { | 101 | None => { |
102 | // If. | 102 | // If. |
103 | let early_expression = &(early_expression.syntax().to_string() + ";"); | 103 | let new_expr = { |
104 | let new_expr = if_indent_level | 104 | let then_branch = |
105 | .increase_indent(make::if_expression(&cond_expr, early_expression)); | 105 | make::block_expr(once(make::expr_stmt(early_expression).into()), None); |
106 | let cond = invert_boolean_expression(cond_expr); | ||
107 | let e = make::expr_if(cond, then_branch); | ||
108 | if_indent_level.increase_indent(e) | ||
109 | }; | ||
106 | replace(new_expr.syntax(), &then_block, &parent_block, &if_expr) | 110 | replace(new_expr.syntax(), &then_block, &parent_block, &if_expr) |
107 | } | 111 | } |
108 | Some((path, bound_ident)) => { | 112 | Some((path, bound_ident)) => { |
diff --git a/crates/ra_assists/src/assists/fill_match_arms.rs b/crates/ra_assists/src/handlers/fill_match_arms.rs index 01758d23a..0908fc246 100644 --- a/crates/ra_assists/src/assists/fill_match_arms.rs +++ b/crates/ra_assists/src/handlers/fill_match_arms.rs | |||
@@ -31,7 +31,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
31 | // } | 31 | // } |
32 | // } | 32 | // } |
33 | // ``` | 33 | // ``` |
34 | pub(crate) fn fill_match_arms(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 34 | pub(crate) fn fill_match_arms(ctx: AssistCtx) -> Option<Assist> { |
35 | let match_expr = ctx.find_node_at_offset::<ast::MatchExpr>()?; | 35 | let match_expr = ctx.find_node_at_offset::<ast::MatchExpr>()?; |
36 | let match_arm_list = match_expr.match_arm_list()?; | 36 | let match_arm_list = match_expr.match_arm_list()?; |
37 | 37 | ||
diff --git a/crates/ra_assists/src/assists/flip_binexpr.rs b/crates/ra_assists/src/handlers/flip_binexpr.rs index 2074087cd..bfcc09e90 100644 --- a/crates/ra_assists/src/assists/flip_binexpr.rs +++ b/crates/ra_assists/src/handlers/flip_binexpr.rs | |||
@@ -1,4 +1,3 @@ | |||
1 | use hir::db::HirDatabase; | ||
2 | use ra_syntax::ast::{AstNode, BinExpr, BinOp}; | 1 | use ra_syntax::ast::{AstNode, BinExpr, BinOp}; |
3 | 2 | ||
4 | use crate::{Assist, AssistCtx, AssistId}; | 3 | use crate::{Assist, AssistCtx, AssistId}; |
@@ -18,7 +17,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
18 | // let _ = 2 + 90; | 17 | // let _ = 2 + 90; |
19 | // } | 18 | // } |
20 | // ``` | 19 | // ``` |
21 | pub(crate) fn flip_binexpr(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 20 | pub(crate) fn flip_binexpr(ctx: AssistCtx) -> Option<Assist> { |
22 | let expr = ctx.find_node_at_offset::<BinExpr>()?; | 21 | let expr = ctx.find_node_at_offset::<BinExpr>()?; |
23 | let lhs = expr.lhs()?.syntax().clone(); | 22 | let lhs = expr.lhs()?.syntax().clone(); |
24 | let rhs = expr.rhs()?.syntax().clone(); | 23 | let rhs = expr.rhs()?.syntax().clone(); |
diff --git a/crates/ra_assists/src/assists/flip_comma.rs b/crates/ra_assists/src/handlers/flip_comma.rs index dd0c405ed..1dacf29f8 100644 --- a/crates/ra_assists/src/assists/flip_comma.rs +++ b/crates/ra_assists/src/handlers/flip_comma.rs | |||
@@ -1,4 +1,3 @@ | |||
1 | use hir::db::HirDatabase; | ||
2 | use ra_syntax::{algo::non_trivia_sibling, Direction, T}; | 1 | use ra_syntax::{algo::non_trivia_sibling, Direction, T}; |
3 | 2 | ||
4 | use crate::{Assist, AssistCtx, AssistId}; | 3 | use crate::{Assist, AssistCtx, AssistId}; |
@@ -18,7 +17,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
18 | // ((3, 4), (1, 2)); | 17 | // ((3, 4), (1, 2)); |
19 | // } | 18 | // } |
20 | // ``` | 19 | // ``` |
21 | pub(crate) fn flip_comma(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 20 | pub(crate) fn flip_comma(ctx: AssistCtx) -> Option<Assist> { |
22 | let comma = ctx.find_token_at_offset(T![,])?; | 21 | let comma = ctx.find_token_at_offset(T![,])?; |
23 | let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?; | 22 | let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?; |
24 | let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?; | 23 | let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?; |
diff --git a/crates/ra_assists/src/assists/flip_trait_bound.rs b/crates/ra_assists/src/handlers/flip_trait_bound.rs index 50b3fa492..f56769624 100644 --- a/crates/ra_assists/src/assists/flip_trait_bound.rs +++ b/crates/ra_assists/src/handlers/flip_trait_bound.rs | |||
@@ -1,4 +1,3 @@ | |||
1 | use hir::db::HirDatabase; | ||
2 | use ra_syntax::{ | 1 | use ra_syntax::{ |
3 | algo::non_trivia_sibling, | 2 | algo::non_trivia_sibling, |
4 | ast::{self, AstNode}, | 3 | ast::{self, AstNode}, |
@@ -18,7 +17,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
18 | // ``` | 17 | // ``` |
19 | // fn foo<T: Copy + Clone>() { } | 18 | // fn foo<T: Copy + Clone>() { } |
20 | // ``` | 19 | // ``` |
21 | pub(crate) fn flip_trait_bound(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 20 | pub(crate) fn flip_trait_bound(ctx: AssistCtx) -> Option<Assist> { |
22 | // We want to replicate the behavior of `flip_binexpr` by only suggesting | 21 | // We want to replicate the behavior of `flip_binexpr` by only suggesting |
23 | // the assist when the cursor is on a `+` | 22 | // the assist when the cursor is on a `+` |
24 | let plus = ctx.find_token_at_offset(T![+])?; | 23 | let plus = ctx.find_token_at_offset(T![+])?; |
diff --git a/crates/ra_assists/src/assists/inline_local_variable.rs b/crates/ra_assists/src/handlers/inline_local_variable.rs index d0c5c3b8c..91b588243 100644 --- a/crates/ra_assists/src/assists/inline_local_variable.rs +++ b/crates/ra_assists/src/handlers/inline_local_variable.rs | |||
@@ -1,4 +1,3 @@ | |||
1 | use hir::db::HirDatabase; | ||
2 | use ra_syntax::{ | 1 | use ra_syntax::{ |
3 | ast::{self, AstNode, AstToken}, | 2 | ast::{self, AstNode, AstToken}, |
4 | TextRange, | 3 | TextRange, |
@@ -23,7 +22,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
23 | // (1 + 2) * 4; | 22 | // (1 + 2) * 4; |
24 | // } | 23 | // } |
25 | // ``` | 24 | // ``` |
26 | pub(crate) fn inline_local_variable(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 25 | pub(crate) fn inline_local_variable(ctx: AssistCtx) -> Option<Assist> { |
27 | let let_stmt = ctx.find_node_at_offset::<ast::LetStmt>()?; | 26 | let let_stmt = ctx.find_node_at_offset::<ast::LetStmt>()?; |
28 | let bind_pat = match let_stmt.pat()? { | 27 | let bind_pat = match let_stmt.pat()? { |
29 | ast::Pat::BindPat(pat) => pat, | 28 | ast::Pat::BindPat(pat) => pat, |
@@ -47,6 +46,9 @@ pub(crate) fn inline_local_variable(ctx: AssistCtx<impl HirDatabase>) -> Option< | |||
47 | }; | 46 | }; |
48 | let analyzer = ctx.source_analyzer(bind_pat.syntax(), None); | 47 | let analyzer = ctx.source_analyzer(bind_pat.syntax(), None); |
49 | let refs = analyzer.find_all_refs(&bind_pat); | 48 | let refs = analyzer.find_all_refs(&bind_pat); |
49 | if refs.is_empty() { | ||
50 | return None; | ||
51 | }; | ||
50 | 52 | ||
51 | let mut wrap_in_parens = vec![true; refs.len()]; | 53 | let mut wrap_in_parens = vec![true; refs.len()]; |
52 | 54 | ||
@@ -645,4 +647,16 @@ fn foo() { | |||
645 | }", | 647 | }", |
646 | ); | 648 | ); |
647 | } | 649 | } |
650 | |||
651 | #[test] | ||
652 | fn test_not_applicable_if_variable_unused() { | ||
653 | check_assist_not_applicable( | ||
654 | inline_local_variable, | ||
655 | " | ||
656 | fn foo() { | ||
657 | let <|>a = 0; | ||
658 | } | ||
659 | ", | ||
660 | ) | ||
661 | } | ||
648 | } | 662 | } |
diff --git a/crates/ra_assists/src/assists/introduce_variable.rs b/crates/ra_assists/src/handlers/introduce_variable.rs index 19e211e0f..7312ce687 100644 --- a/crates/ra_assists/src/assists/introduce_variable.rs +++ b/crates/ra_assists/src/handlers/introduce_variable.rs | |||
@@ -1,5 +1,4 @@ | |||
1 | use format_buf::format; | 1 | use format_buf::format; |
2 | use hir::db::HirDatabase; | ||
3 | use ra_syntax::{ | 2 | use ra_syntax::{ |
4 | ast::{self, AstNode}, | 3 | ast::{self, AstNode}, |
5 | SyntaxKind::{ | 4 | SyntaxKind::{ |
@@ -28,7 +27,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
28 | // var_name * 4; | 27 | // var_name * 4; |
29 | // } | 28 | // } |
30 | // ``` | 29 | // ``` |
31 | pub(crate) fn introduce_variable(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 30 | pub(crate) fn introduce_variable(ctx: AssistCtx) -> Option<Assist> { |
32 | if ctx.frange.range.is_empty() { | 31 | if ctx.frange.range.is_empty() { |
33 | return None; | 32 | return None; |
34 | } | 33 | } |
diff --git a/crates/ra_assists/src/assists/invert_if.rs b/crates/ra_assists/src/handlers/invert_if.rs index 16352c040..a594e7e0c 100644 --- a/crates/ra_assists/src/assists/invert_if.rs +++ b/crates/ra_assists/src/handlers/invert_if.rs | |||
@@ -1,8 +1,7 @@ | |||
1 | use hir::db::HirDatabase; | ||
2 | use ra_syntax::ast::{self, AstNode}; | 1 | use ra_syntax::ast::{self, AstNode}; |
3 | use ra_syntax::T; | 2 | use ra_syntax::T; |
4 | 3 | ||
5 | use crate::{Assist, AssistCtx, AssistId}; | 4 | use crate::{utils::invert_boolean_expression, Assist, AssistCtx, AssistId}; |
6 | 5 | ||
7 | // Assist: invert_if | 6 | // Assist: invert_if |
8 | // | 7 | // |
@@ -23,7 +22,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
23 | // } | 22 | // } |
24 | // ``` | 23 | // ``` |
25 | 24 | ||
26 | pub(crate) fn invert_if(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 25 | pub(crate) fn invert_if(ctx: AssistCtx) -> Option<Assist> { |
27 | let if_keyword = ctx.find_token_at_offset(T![if])?; | 26 | let if_keyword = ctx.find_token_at_offset(T![if])?; |
28 | let expr = ast::IfExpr::cast(if_keyword.parent())?; | 27 | let expr = ast::IfExpr::cast(if_keyword.parent())?; |
29 | let if_range = if_keyword.text_range(); | 28 | let if_range = if_keyword.text_range(); |
@@ -36,8 +35,8 @@ pub(crate) fn invert_if(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | |||
36 | let then_node = expr.then_branch()?.syntax().clone(); | 35 | let then_node = expr.then_branch()?.syntax().clone(); |
37 | 36 | ||
38 | if let ast::ElseBranch::Block(else_block) = expr.else_branch()? { | 37 | if let ast::ElseBranch::Block(else_block) = expr.else_branch()? { |
39 | let flip_cond = invert_boolean_expression(&cond)?; | ||
40 | let cond_range = cond.syntax().text_range(); | 38 | let cond_range = cond.syntax().text_range(); |
39 | let flip_cond = invert_boolean_expression(cond); | ||
41 | let else_node = else_block.syntax(); | 40 | let else_node = else_block.syntax(); |
42 | let else_range = else_node.text_range(); | 41 | let else_range = else_node.text_range(); |
43 | let then_range = then_node.text_range(); | 42 | let then_range = then_node.text_range(); |
@@ -52,20 +51,6 @@ pub(crate) fn invert_if(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | |||
52 | None | 51 | None |
53 | } | 52 | } |
54 | 53 | ||
55 | pub(crate) fn invert_boolean_expression(expr: &ast::Expr) -> Option<ast::Expr> { | ||
56 | match expr { | ||
57 | ast::Expr::BinExpr(bin) => match bin.op_kind()? { | ||
58 | ast::BinOp::NegatedEqualityTest => bin.replace_op(T![==]).map(|it| it.into()), | ||
59 | _ => None, | ||
60 | }, | ||
61 | ast::Expr::PrefixExpr(pe) => match pe.op_kind()? { | ||
62 | ast::PrefixOp::Not => pe.expr(), | ||
63 | _ => None, | ||
64 | }, | ||
65 | _ => None, | ||
66 | } | ||
67 | } | ||
68 | |||
69 | #[cfg(test)] | 54 | #[cfg(test)] |
70 | mod tests { | 55 | mod tests { |
71 | use super::*; | 56 | use super::*; |
@@ -91,12 +76,16 @@ mod tests { | |||
91 | } | 76 | } |
92 | 77 | ||
93 | #[test] | 78 | #[test] |
94 | fn invert_if_doesnt_apply_with_cursor_not_on_if() { | 79 | fn invert_if_general_case() { |
95 | check_assist_not_applicable(invert_if, "fn f() { if !<|>cond { 3 * 2 } else { 1 } }") | 80 | check_assist( |
81 | invert_if, | ||
82 | "fn f() { i<|>f cond { 3 * 2 } else { 1 } }", | ||
83 | "fn f() { i<|>f !cond { 1 } else { 3 * 2 } }", | ||
84 | ) | ||
96 | } | 85 | } |
97 | 86 | ||
98 | #[test] | 87 | #[test] |
99 | fn invert_if_doesnt_apply_without_negated() { | 88 | fn invert_if_doesnt_apply_with_cursor_not_on_if() { |
100 | check_assist_not_applicable(invert_if, "fn f() { i<|>f cond { 3 * 2 } else { 1 } }") | 89 | check_assist_not_applicable(invert_if, "fn f() { if !<|>cond { 3 * 2 } else { 1 } }") |
101 | } | 90 | } |
102 | } | 91 | } |
diff --git a/crates/ra_assists/src/assists/merge_match_arms.rs b/crates/ra_assists/src/handlers/merge_match_arms.rs index aca391155..670614dd8 100644 --- a/crates/ra_assists/src/assists/merge_match_arms.rs +++ b/crates/ra_assists/src/handlers/merge_match_arms.rs | |||
@@ -1,6 +1,11 @@ | |||
1 | use crate::{Assist, AssistCtx, AssistId, TextRange, TextUnit}; | 1 | use std::iter::successors; |
2 | use hir::db::HirDatabase; | 2 | |
3 | use ra_syntax::ast::{AstNode, MatchArm}; | 3 | use ra_syntax::{ |
4 | ast::{self, AstNode}, | ||
5 | Direction, TextUnit, | ||
6 | }; | ||
7 | |||
8 | use crate::{Assist, AssistCtx, AssistId, TextRange}; | ||
4 | 9 | ||
5 | // Assist: merge_match_arms | 10 | // Assist: merge_match_arms |
6 | // | 11 | // |
@@ -26,63 +31,81 @@ use ra_syntax::ast::{AstNode, MatchArm}; | |||
26 | // } | 31 | // } |
27 | // } | 32 | // } |
28 | // ``` | 33 | // ``` |
29 | pub(crate) fn merge_match_arms(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 34 | pub(crate) fn merge_match_arms(ctx: AssistCtx) -> Option<Assist> { |
30 | let current_arm = ctx.find_node_at_offset::<MatchArm>()?; | 35 | let current_arm = ctx.find_node_at_offset::<ast::MatchArm>()?; |
31 | |||
32 | // We check if the following match arm matches this one. We could, but don't, | ||
33 | // compare to the previous match arm as well. | ||
34 | let next = current_arm.syntax().next_sibling(); | ||
35 | let next_arm = MatchArm::cast(next?)?; | ||
36 | |||
37 | // Don't try to handle arms with guards for now - can add support for this later | 36 | // Don't try to handle arms with guards for now - can add support for this later |
38 | if current_arm.guard().is_some() || next_arm.guard().is_some() { | 37 | if current_arm.guard().is_some() { |
39 | return None; | 38 | return None; |
40 | } | 39 | } |
41 | |||
42 | let current_expr = current_arm.expr()?; | 40 | let current_expr = current_arm.expr()?; |
43 | let next_expr = next_arm.expr()?; | 41 | let current_text_range = current_arm.syntax().text_range(); |
44 | 42 | ||
45 | // Check for match arm equality by comparing lengths and then string contents | 43 | enum CursorPos { |
46 | if current_expr.syntax().text_range().len() != next_expr.syntax().text_range().len() { | 44 | InExpr(TextUnit), |
47 | return None; | 45 | InPat(TextUnit), |
48 | } | 46 | } |
49 | if current_expr.syntax().text() != next_expr.syntax().text() { | 47 | let cursor_pos = ctx.frange.range.start(); |
48 | let cursor_pos = if current_expr.syntax().text_range().contains(cursor_pos) { | ||
49 | CursorPos::InExpr(current_text_range.end() - cursor_pos) | ||
50 | } else { | ||
51 | CursorPos::InPat(cursor_pos) | ||
52 | }; | ||
53 | |||
54 | // We check if the following match arms match this one. We could, but don't, | ||
55 | // compare to the previous match arm as well. | ||
56 | let arms_to_merge = successors(Some(current_arm), next_arm) | ||
57 | .take_while(|arm| { | ||
58 | if arm.guard().is_some() { | ||
59 | return false; | ||
60 | } | ||
61 | match arm.expr() { | ||
62 | Some(expr) => expr.syntax().text() == current_expr.syntax().text(), | ||
63 | None => false, | ||
64 | } | ||
65 | }) | ||
66 | .collect::<Vec<_>>(); | ||
67 | |||
68 | if arms_to_merge.len() <= 1 { | ||
50 | return None; | 69 | return None; |
51 | } | 70 | } |
52 | 71 | ||
53 | let cursor_to_end = current_arm.syntax().text_range().end() - ctx.frange.range.start(); | ||
54 | |||
55 | ctx.add_assist(AssistId("merge_match_arms"), "Merge match arms", |edit| { | 72 | ctx.add_assist(AssistId("merge_match_arms"), "Merge match arms", |edit| { |
56 | fn contains_placeholder(a: &MatchArm) -> bool { | 73 | let pats = if arms_to_merge.iter().any(contains_placeholder) { |
57 | a.pats().any(|x| match x { | ||
58 | ra_syntax::ast::Pat::PlaceholderPat(..) => true, | ||
59 | _ => false, | ||
60 | }) | ||
61 | } | ||
62 | |||
63 | let pats = if contains_placeholder(¤t_arm) || contains_placeholder(&next_arm) { | ||
64 | "_".into() | 74 | "_".into() |
65 | } else { | 75 | } else { |
66 | let ps: Vec<String> = current_arm | 76 | arms_to_merge |
67 | .pats() | 77 | .iter() |
78 | .flat_map(ast::MatchArm::pats) | ||
68 | .map(|x| x.syntax().to_string()) | 79 | .map(|x| x.syntax().to_string()) |
69 | .chain(next_arm.pats().map(|x| x.syntax().to_string())) | 80 | .collect::<Vec<String>>() |
70 | .collect(); | 81 | .join(" | ") |
71 | ps.join(" | ") | ||
72 | }; | 82 | }; |
73 | 83 | ||
74 | let arm = format!("{} => {}", pats, current_expr.syntax().text()); | 84 | let arm = format!("{} => {}", pats, current_expr.syntax().text()); |
75 | let offset = TextUnit::from_usize(arm.len()) - cursor_to_end; | ||
76 | 85 | ||
77 | let start = current_arm.syntax().text_range().start(); | 86 | let start = arms_to_merge.first().unwrap().syntax().text_range().start(); |
78 | let end = next_arm.syntax().text_range().end(); | 87 | let end = arms_to_merge.last().unwrap().syntax().text_range().end(); |
79 | 88 | ||
80 | edit.target(current_arm.syntax().text_range()); | 89 | edit.target(current_text_range); |
90 | edit.set_cursor(match cursor_pos { | ||
91 | CursorPos::InExpr(back_offset) => start + TextUnit::from_usize(arm.len()) - back_offset, | ||
92 | CursorPos::InPat(offset) => offset, | ||
93 | }); | ||
81 | edit.replace(TextRange::from_to(start, end), arm); | 94 | edit.replace(TextRange::from_to(start, end), arm); |
82 | edit.set_cursor(start + offset); | ||
83 | }) | 95 | }) |
84 | } | 96 | } |
85 | 97 | ||
98 | fn contains_placeholder(a: &ast::MatchArm) -> bool { | ||
99 | a.pats().any(|x| match x { | ||
100 | ra_syntax::ast::Pat::PlaceholderPat(..) => true, | ||
101 | _ => false, | ||
102 | }) | ||
103 | } | ||
104 | |||
105 | fn next_arm(arm: &ast::MatchArm) -> Option<ast::MatchArm> { | ||
106 | arm.syntax().siblings(Direction::Next).skip(1).find_map(ast::MatchArm::cast) | ||
107 | } | ||
108 | |||
86 | #[cfg(test)] | 109 | #[cfg(test)] |
87 | mod tests { | 110 | mod tests { |
88 | use super::merge_match_arms; | 111 | use super::merge_match_arms; |
@@ -185,6 +208,37 @@ mod tests { | |||
185 | } | 208 | } |
186 | 209 | ||
187 | #[test] | 210 | #[test] |
211 | fn merges_all_subsequent_arms() { | ||
212 | check_assist( | ||
213 | merge_match_arms, | ||
214 | r#" | ||
215 | enum X { A, B, C, D, E } | ||
216 | |||
217 | fn main() { | ||
218 | match X::A { | ||
219 | X::A<|> => 92, | ||
220 | X::B => 92, | ||
221 | X::C => 92, | ||
222 | X::D => 62, | ||
223 | _ => panic!(), | ||
224 | } | ||
225 | } | ||
226 | "#, | ||
227 | r#" | ||
228 | enum X { A, B, C, D, E } | ||
229 | |||
230 | fn main() { | ||
231 | match X::A { | ||
232 | X::A<|> | X::B | X::C => 92, | ||
233 | X::D => 62, | ||
234 | _ => panic!(), | ||
235 | } | ||
236 | } | ||
237 | "#, | ||
238 | ) | ||
239 | } | ||
240 | |||
241 | #[test] | ||
188 | fn merge_match_arms_rejects_guards() { | 242 | fn merge_match_arms_rejects_guards() { |
189 | check_assist_not_applicable( | 243 | check_assist_not_applicable( |
190 | merge_match_arms, | 244 | merge_match_arms, |
diff --git a/crates/ra_assists/src/assists/move_bounds.rs b/crates/ra_assists/src/handlers/move_bounds.rs index 355adddc3..90793b5fc 100644 --- a/crates/ra_assists/src/assists/move_bounds.rs +++ b/crates/ra_assists/src/handlers/move_bounds.rs | |||
@@ -1,4 +1,3 @@ | |||
1 | use hir::db::HirDatabase; | ||
2 | use ra_syntax::{ | 1 | use ra_syntax::{ |
3 | ast::{self, edit, make, AstNode, NameOwner, TypeBoundsOwner}, | 2 | ast::{self, edit, make, AstNode, NameOwner, TypeBoundsOwner}, |
4 | SyntaxElement, | 3 | SyntaxElement, |
@@ -22,7 +21,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
22 | // f(x) | 21 | // f(x) |
23 | // } | 22 | // } |
24 | // ``` | 23 | // ``` |
25 | pub(crate) fn move_bounds_to_where_clause(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 24 | pub(crate) fn move_bounds_to_where_clause(ctx: AssistCtx) -> Option<Assist> { |
26 | let type_param_list = ctx.find_node_at_offset::<ast::TypeParamList>()?; | 25 | let type_param_list = ctx.find_node_at_offset::<ast::TypeParamList>()?; |
27 | 26 | ||
28 | let mut type_params = type_param_list.type_params(); | 27 | let mut type_params = type_param_list.type_params(); |
diff --git a/crates/ra_assists/src/assists/move_guard.rs b/crates/ra_assists/src/handlers/move_guard.rs index 41a31e677..2b91ce7c4 100644 --- a/crates/ra_assists/src/assists/move_guard.rs +++ b/crates/ra_assists/src/handlers/move_guard.rs | |||
@@ -1,4 +1,3 @@ | |||
1 | use hir::db::HirDatabase; | ||
2 | use ra_syntax::{ | 1 | use ra_syntax::{ |
3 | ast, | 2 | ast, |
4 | ast::{AstNode, AstToken, IfExpr, MatchArm}, | 3 | ast::{AstNode, AstToken, IfExpr, MatchArm}, |
@@ -32,7 +31,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
32 | // } | 31 | // } |
33 | // } | 32 | // } |
34 | // ``` | 33 | // ``` |
35 | pub(crate) fn move_guard_to_arm_body(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 34 | pub(crate) fn move_guard_to_arm_body(ctx: AssistCtx) -> Option<Assist> { |
36 | let match_arm = ctx.find_node_at_offset::<MatchArm>()?; | 35 | let match_arm = ctx.find_node_at_offset::<MatchArm>()?; |
37 | let guard = match_arm.guard()?; | 36 | let guard = match_arm.guard()?; |
38 | let space_before_guard = guard.syntax().prev_sibling_or_token(); | 37 | let space_before_guard = guard.syntax().prev_sibling_or_token(); |
@@ -89,7 +88,7 @@ pub(crate) fn move_guard_to_arm_body(ctx: AssistCtx<impl HirDatabase>) -> Option | |||
89 | // } | 88 | // } |
90 | // } | 89 | // } |
91 | // ``` | 90 | // ``` |
92 | pub(crate) fn move_arm_cond_to_match_guard(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 91 | pub(crate) fn move_arm_cond_to_match_guard(ctx: AssistCtx) -> Option<Assist> { |
93 | let match_arm: MatchArm = ctx.find_node_at_offset::<MatchArm>()?; | 92 | let match_arm: MatchArm = ctx.find_node_at_offset::<MatchArm>()?; |
94 | let last_match_pat = match_arm.pats().last()?; | 93 | let last_match_pat = match_arm.pats().last()?; |
95 | 94 | ||
diff --git a/crates/ra_assists/src/assists/raw_string.rs b/crates/ra_assists/src/handlers/raw_string.rs index e79c51673..2c0a1e126 100644 --- a/crates/ra_assists/src/assists/raw_string.rs +++ b/crates/ra_assists/src/handlers/raw_string.rs | |||
@@ -1,4 +1,3 @@ | |||
1 | use hir::db::HirDatabase; | ||
2 | use ra_syntax::{ | 1 | use ra_syntax::{ |
3 | ast, AstToken, | 2 | ast, AstToken, |
4 | SyntaxKind::{RAW_STRING, STRING}, | 3 | SyntaxKind::{RAW_STRING, STRING}, |
@@ -22,7 +21,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
22 | // r#"Hello, World!"#; | 21 | // r#"Hello, World!"#; |
23 | // } | 22 | // } |
24 | // ``` | 23 | // ``` |
25 | pub(crate) fn make_raw_string(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 24 | pub(crate) fn make_raw_string(ctx: AssistCtx) -> Option<Assist> { |
26 | let token = ctx.find_token_at_offset(STRING).and_then(ast::String::cast)?; | 25 | let token = ctx.find_token_at_offset(STRING).and_then(ast::String::cast)?; |
27 | let value = token.value()?; | 26 | let value = token.value()?; |
28 | ctx.add_assist(AssistId("make_raw_string"), "Rewrite as raw string", |edit| { | 27 | ctx.add_assist(AssistId("make_raw_string"), "Rewrite as raw string", |edit| { |
@@ -51,7 +50,7 @@ pub(crate) fn make_raw_string(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist | |||
51 | // "Hello, \"World!\""; | 50 | // "Hello, \"World!\""; |
52 | // } | 51 | // } |
53 | // ``` | 52 | // ``` |
54 | pub(crate) fn make_usual_string(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 53 | pub(crate) fn make_usual_string(ctx: AssistCtx) -> Option<Assist> { |
55 | let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?; | 54 | let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?; |
56 | let value = token.value()?; | 55 | let value = token.value()?; |
57 | ctx.add_assist(AssistId("make_usual_string"), "Rewrite as regular string", |edit| { | 56 | ctx.add_assist(AssistId("make_usual_string"), "Rewrite as regular string", |edit| { |
@@ -77,7 +76,7 @@ pub(crate) fn make_usual_string(ctx: AssistCtx<impl HirDatabase>) -> Option<Assi | |||
77 | // r##"Hello, World!"##; | 76 | // r##"Hello, World!"##; |
78 | // } | 77 | // } |
79 | // ``` | 78 | // ``` |
80 | pub(crate) fn add_hash(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 79 | pub(crate) fn add_hash(ctx: AssistCtx) -> Option<Assist> { |
81 | let token = ctx.find_token_at_offset(RAW_STRING)?; | 80 | let token = ctx.find_token_at_offset(RAW_STRING)?; |
82 | ctx.add_assist(AssistId("add_hash"), "Add # to raw string", |edit| { | 81 | ctx.add_assist(AssistId("add_hash"), "Add # to raw string", |edit| { |
83 | edit.target(token.text_range()); | 82 | edit.target(token.text_range()); |
@@ -101,7 +100,7 @@ pub(crate) fn add_hash(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | |||
101 | // r"Hello, World!"; | 100 | // r"Hello, World!"; |
102 | // } | 101 | // } |
103 | // ``` | 102 | // ``` |
104 | pub(crate) fn remove_hash(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 103 | pub(crate) fn remove_hash(ctx: AssistCtx) -> Option<Assist> { |
105 | let token = ctx.find_token_at_offset(RAW_STRING)?; | 104 | let token = ctx.find_token_at_offset(RAW_STRING)?; |
106 | let text = token.text().as_str(); | 105 | let text = token.text().as_str(); |
107 | if text.starts_with("r\"") { | 106 | if text.starts_with("r\"") { |
diff --git a/crates/ra_assists/src/assists/remove_dbg.rs b/crates/ra_assists/src/handlers/remove_dbg.rs index cf211ab84..5085649b4 100644 --- a/crates/ra_assists/src/assists/remove_dbg.rs +++ b/crates/ra_assists/src/handlers/remove_dbg.rs | |||
@@ -1,4 +1,3 @@ | |||
1 | use hir::db::HirDatabase; | ||
2 | use ra_syntax::{ | 1 | use ra_syntax::{ |
3 | ast::{self, AstNode}, | 2 | ast::{self, AstNode}, |
4 | TextUnit, T, | 3 | TextUnit, T, |
@@ -21,7 +20,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
21 | // 92; | 20 | // 92; |
22 | // } | 21 | // } |
23 | // ``` | 22 | // ``` |
24 | pub(crate) fn remove_dbg(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 23 | pub(crate) fn remove_dbg(ctx: AssistCtx) -> Option<Assist> { |
25 | let macro_call = ctx.find_node_at_offset::<ast::MacroCall>()?; | 24 | let macro_call = ctx.find_node_at_offset::<ast::MacroCall>()?; |
26 | 25 | ||
27 | if !is_valid_macrocall(¯o_call, "dbg")? { | 26 | if !is_valid_macrocall(¯o_call, "dbg")? { |
diff --git a/crates/ra_assists/src/assists/replace_if_let_with_match.rs b/crates/ra_assists/src/handlers/replace_if_let_with_match.rs index c9b62e5ff..e6cd50bc1 100644 --- a/crates/ra_assists/src/assists/replace_if_let_with_match.rs +++ b/crates/ra_assists/src/handlers/replace_if_let_with_match.rs | |||
@@ -1,9 +1,11 @@ | |||
1 | use format_buf::format; | 1 | use ra_fmt::unwrap_trivial_block; |
2 | use hir::db::HirDatabase; | 2 | use ra_syntax::{ |
3 | use ra_fmt::extract_trivial_expression; | 3 | ast::{self, make}, |
4 | use ra_syntax::{ast, AstNode}; | 4 | AstNode, |
5 | }; | ||
5 | 6 | ||
6 | use crate::{Assist, AssistCtx, AssistId}; | 7 | use crate::{Assist, AssistCtx, AssistId}; |
8 | use ast::edit::IndentLevel; | ||
7 | 9 | ||
8 | // Assist: replace_if_let_with_match | 10 | // Assist: replace_if_let_with_match |
9 | // | 11 | // |
@@ -31,7 +33,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
31 | // } | 33 | // } |
32 | // } | 34 | // } |
33 | // ``` | 35 | // ``` |
34 | pub(crate) fn replace_if_let_with_match(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 36 | pub(crate) fn replace_if_let_with_match(ctx: AssistCtx) -> Option<Assist> { |
35 | let if_expr: ast::IfExpr = ctx.find_node_at_offset()?; | 37 | let if_expr: ast::IfExpr = ctx.find_node_at_offset()?; |
36 | let cond = if_expr.condition()?; | 38 | let cond = if_expr.condition()?; |
37 | let pat = cond.pat()?; | 39 | let pat = cond.pat()?; |
@@ -43,32 +45,24 @@ pub(crate) fn replace_if_let_with_match(ctx: AssistCtx<impl HirDatabase>) -> Opt | |||
43 | }; | 45 | }; |
44 | 46 | ||
45 | ctx.add_assist(AssistId("replace_if_let_with_match"), "Replace with match", |edit| { | 47 | ctx.add_assist(AssistId("replace_if_let_with_match"), "Replace with match", |edit| { |
46 | let match_expr = build_match_expr(expr, pat, then_block, else_block); | 48 | let match_expr = { |
47 | edit.target(if_expr.syntax().text_range()); | 49 | let then_arm = { |
48 | edit.replace_node_and_indent(if_expr.syntax(), match_expr); | 50 | let then_expr = unwrap_trivial_block(then_block); |
49 | edit.set_cursor(if_expr.syntax().text_range().start()) | 51 | make::match_arm(vec![pat], then_expr) |
50 | }) | 52 | }; |
51 | } | 53 | let else_arm = { |
54 | let else_expr = unwrap_trivial_block(else_block); | ||
55 | make::match_arm(vec![make::placeholder_pat().into()], else_expr) | ||
56 | }; | ||
57 | make::expr_match(expr, make::match_arm_list(vec![then_arm, else_arm])) | ||
58 | }; | ||
52 | 59 | ||
53 | fn build_match_expr( | 60 | let match_expr = IndentLevel::from_node(if_expr.syntax()).increase_indent(match_expr); |
54 | expr: ast::Expr, | ||
55 | pat1: ast::Pat, | ||
56 | arm1: ast::BlockExpr, | ||
57 | arm2: ast::BlockExpr, | ||
58 | ) -> String { | ||
59 | let mut buf = String::new(); | ||
60 | format!(buf, "match {} {{\n", expr.syntax().text()); | ||
61 | format!(buf, " {} => {}\n", pat1.syntax().text(), format_arm(&arm1)); | ||
62 | format!(buf, " _ => {}\n", format_arm(&arm2)); | ||
63 | buf.push_str("}"); | ||
64 | buf | ||
65 | } | ||
66 | 61 | ||
67 | fn format_arm(block: &ast::BlockExpr) -> String { | 62 | edit.target(if_expr.syntax().text_range()); |
68 | match extract_trivial_expression(block) { | 63 | edit.set_cursor(if_expr.syntax().text_range().start()); |
69 | Some(e) if !e.syntax().text().contains_char('\n') => format!("{},", e.syntax().text()), | 64 | edit.replace_ast::<ast::Expr>(if_expr.into(), match_expr.into()); |
70 | _ => block.syntax().text().to_string(), | 65 | }) |
71 | } | ||
72 | } | 66 | } |
73 | 67 | ||
74 | #[cfg(test)] | 68 | #[cfg(test)] |
diff --git a/crates/ra_assists/src/assists/add_import.rs b/crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs index bf6cfe865..b70c88ec2 100644 --- a/crates/ra_assists/src/assists/add_import.rs +++ b/crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | use hir::{self, db::HirDatabase}; | 1 | use hir::{self, ModPath}; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | ast::{self, NameOwner}, | 3 | ast::{self, NameOwner}, |
4 | AstNode, Direction, SmolStr, | 4 | AstNode, Direction, SmolStr, |
@@ -12,18 +12,18 @@ use crate::{ | |||
12 | AssistId, | 12 | AssistId, |
13 | }; | 13 | }; |
14 | 14 | ||
15 | /// This function produces sequence of text edits into edit | 15 | /// Creates and inserts a use statement for the given path to import. |
16 | /// to import the target path in the most appropriate scope given | 16 | /// The use statement is inserted in the scope most appropriate to the |
17 | /// the cursor position | 17 | /// the cursor position given, additionally merged with the existing use imports. |
18 | pub fn auto_import_text_edit( | 18 | pub fn insert_use_statement( |
19 | // Ideally the position of the cursor, used to | 19 | // Ideally the position of the cursor, used to |
20 | position: &SyntaxNode, | 20 | position: &SyntaxNode, |
21 | // The statement to use as anchor (last resort) | 21 | // The statement to use as anchor (last resort) |
22 | anchor: &SyntaxNode, | 22 | anchor: &SyntaxNode, |
23 | // The path to import as a sequence of strings | 23 | path_to_import: &ModPath, |
24 | target: &[SmolStr], | ||
25 | edit: &mut TextEditBuilder, | 24 | edit: &mut TextEditBuilder, |
26 | ) { | 25 | ) { |
26 | let target = path_to_import.to_string().split("::").map(SmolStr::new).collect::<Vec<_>>(); | ||
27 | let container = position.ancestors().find_map(|n| { | 27 | let container = position.ancestors().find_map(|n| { |
28 | if let Some(module) = ast::Module::cast(n.clone()) { | 28 | if let Some(module) = ast::Module::cast(n.clone()) { |
29 | return module.item_list().map(|it| it.syntax().clone()); | 29 | return module.item_list().map(|it| it.syntax().clone()); |
@@ -32,14 +32,14 @@ pub fn auto_import_text_edit( | |||
32 | }); | 32 | }); |
33 | 33 | ||
34 | if let Some(container) = container { | 34 | if let Some(container) = container { |
35 | let action = best_action_for_target(container, anchor.clone(), target); | 35 | let action = best_action_for_target(container, anchor.clone(), &target); |
36 | make_assist(&action, target, edit); | 36 | make_assist(&action, &target, edit); |
37 | } | 37 | } |
38 | } | 38 | } |
39 | 39 | ||
40 | // Assist: add_import | 40 | // Assist: replace_qualified_name_with_use |
41 | // | 41 | // |
42 | // Adds a use statement for a given fully-qualified path. | 42 | // Adds a use statement for a given fully-qualified name. |
43 | // | 43 | // |
44 | // ``` | 44 | // ``` |
45 | // fn process(map: std::collections::<|>HashMap<String, String>) {} | 45 | // fn process(map: std::collections::<|>HashMap<String, String>) {} |
@@ -50,7 +50,7 @@ pub fn auto_import_text_edit( | |||
50 | // | 50 | // |
51 | // fn process(map: HashMap<String, String>) {} | 51 | // fn process(map: HashMap<String, String>) {} |
52 | // ``` | 52 | // ``` |
53 | pub(crate) fn add_import(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 53 | pub(crate) fn replace_qualified_name_with_use(ctx: AssistCtx) -> Option<Assist> { |
54 | let path: ast::Path = ctx.find_node_at_offset()?; | 54 | let path: ast::Path = ctx.find_node_at_offset()?; |
55 | // We don't want to mess with use statements | 55 | // We don't want to mess with use statements |
56 | if path.syntax().ancestors().find_map(ast::UseItem::cast).is_some() { | 56 | if path.syntax().ancestors().find_map(ast::UseItem::cast).is_some() { |
@@ -72,9 +72,13 @@ pub(crate) fn add_import(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | |||
72 | } | 72 | } |
73 | }; | 73 | }; |
74 | 74 | ||
75 | ctx.add_assist(AssistId("add_import"), format!("Import {}", fmt_segments(&segments)), |edit| { | 75 | ctx.add_assist( |
76 | apply_auto_import(&position, &path, &segments, edit.text_edit_builder()); | 76 | AssistId("replace_qualified_name_with_use"), |
77 | }) | 77 | "Replace qualified path with use", |
78 | |edit| { | ||
79 | replace_with_use(&position, &path, &segments, edit.text_edit_builder()); | ||
80 | }, | ||
81 | ) | ||
78 | } | 82 | } |
79 | 83 | ||
80 | fn collect_path_segments_raw( | 84 | fn collect_path_segments_raw( |
@@ -107,12 +111,6 @@ fn collect_path_segments_raw( | |||
107 | Some(segments.len() - oldlen) | 111 | Some(segments.len() - oldlen) |
108 | } | 112 | } |
109 | 113 | ||
110 | fn fmt_segments(segments: &[SmolStr]) -> String { | ||
111 | let mut buf = String::new(); | ||
112 | fmt_segments_raw(segments, &mut buf); | ||
113 | buf | ||
114 | } | ||
115 | |||
116 | fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) { | 114 | fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) { |
117 | let mut iter = segments.iter(); | 115 | let mut iter = segments.iter(); |
118 | if let Some(s) = iter.next() { | 116 | if let Some(s) = iter.next() { |
@@ -558,7 +556,7 @@ fn make_assist_add_nested_import( | |||
558 | } | 556 | } |
559 | } | 557 | } |
560 | 558 | ||
561 | fn apply_auto_import( | 559 | fn replace_with_use( |
562 | container: &SyntaxNode, | 560 | container: &SyntaxNode, |
563 | path: &ast::Path, | 561 | path: &ast::Path, |
564 | target: &[SmolStr], | 562 | target: &[SmolStr], |
@@ -567,7 +565,7 @@ fn apply_auto_import( | |||
567 | let action = best_action_for_target(container.clone(), path.syntax().clone(), target); | 565 | let action = best_action_for_target(container.clone(), path.syntax().clone(), target); |
568 | make_assist(&action, target, edit); | 566 | make_assist(&action, target, edit); |
569 | if let Some(last) = path.segment() { | 567 | if let Some(last) = path.segment() { |
570 | // Here we are assuming the assist will provide a correct use statement | 568 | // Here we are assuming the assist will provide a correct use statement |
571 | // so we can delete the path qualifier | 569 | // so we can delete the path qualifier |
572 | edit.delete(TextRange::from_to( | 570 | edit.delete(TextRange::from_to( |
573 | path.syntax().text_range().start(), | 571 | path.syntax().text_range().start(), |
@@ -603,9 +601,9 @@ mod tests { | |||
603 | use super::*; | 601 | use super::*; |
604 | 602 | ||
605 | #[test] | 603 | #[test] |
606 | fn test_auto_import_add_use_no_anchor() { | 604 | fn test_replace_add_use_no_anchor() { |
607 | check_assist( | 605 | check_assist( |
608 | add_import, | 606 | replace_qualified_name_with_use, |
609 | " | 607 | " |
610 | std::fmt::Debug<|> | 608 | std::fmt::Debug<|> |
611 | ", | 609 | ", |
@@ -617,9 +615,9 @@ Debug<|> | |||
617 | ); | 615 | ); |
618 | } | 616 | } |
619 | #[test] | 617 | #[test] |
620 | fn test_auto_import_add_use_no_anchor_with_item_below() { | 618 | fn test_replace_add_use_no_anchor_with_item_below() { |
621 | check_assist( | 619 | check_assist( |
622 | add_import, | 620 | replace_qualified_name_with_use, |
623 | " | 621 | " |
624 | std::fmt::Debug<|> | 622 | std::fmt::Debug<|> |
625 | 623 | ||
@@ -638,9 +636,9 @@ fn main() { | |||
638 | } | 636 | } |
639 | 637 | ||
640 | #[test] | 638 | #[test] |
641 | fn test_auto_import_add_use_no_anchor_with_item_above() { | 639 | fn test_replace_add_use_no_anchor_with_item_above() { |
642 | check_assist( | 640 | check_assist( |
643 | add_import, | 641 | replace_qualified_name_with_use, |
644 | " | 642 | " |
645 | fn main() { | 643 | fn main() { |
646 | } | 644 | } |
@@ -659,9 +657,9 @@ Debug<|> | |||
659 | } | 657 | } |
660 | 658 | ||
661 | #[test] | 659 | #[test] |
662 | fn test_auto_import_add_use_no_anchor_2seg() { | 660 | fn test_replace_add_use_no_anchor_2seg() { |
663 | check_assist( | 661 | check_assist( |
664 | add_import, | 662 | replace_qualified_name_with_use, |
665 | " | 663 | " |
666 | std::fmt<|>::Debug | 664 | std::fmt<|>::Debug |
667 | ", | 665 | ", |
@@ -674,9 +672,9 @@ fmt<|>::Debug | |||
674 | } | 672 | } |
675 | 673 | ||
676 | #[test] | 674 | #[test] |
677 | fn test_auto_import_add_use() { | 675 | fn test_replace_add_use() { |
678 | check_assist( | 676 | check_assist( |
679 | add_import, | 677 | replace_qualified_name_with_use, |
680 | " | 678 | " |
681 | use stdx; | 679 | use stdx; |
682 | 680 | ||
@@ -694,9 +692,9 @@ impl Debug<|> for Foo { | |||
694 | } | 692 | } |
695 | 693 | ||
696 | #[test] | 694 | #[test] |
697 | fn test_auto_import_file_use_other_anchor() { | 695 | fn test_replace_file_use_other_anchor() { |
698 | check_assist( | 696 | check_assist( |
699 | add_import, | 697 | replace_qualified_name_with_use, |
700 | " | 698 | " |
701 | impl std::fmt::Debug<|> for Foo { | 699 | impl std::fmt::Debug<|> for Foo { |
702 | } | 700 | } |
@@ -711,9 +709,9 @@ impl Debug<|> for Foo { | |||
711 | } | 709 | } |
712 | 710 | ||
713 | #[test] | 711 | #[test] |
714 | fn test_auto_import_add_use_other_anchor_indent() { | 712 | fn test_replace_add_use_other_anchor_indent() { |
715 | check_assist( | 713 | check_assist( |
716 | add_import, | 714 | replace_qualified_name_with_use, |
717 | " | 715 | " |
718 | impl std::fmt::Debug<|> for Foo { | 716 | impl std::fmt::Debug<|> for Foo { |
719 | } | 717 | } |
@@ -728,9 +726,9 @@ impl Debug<|> for Foo { | |||
728 | } | 726 | } |
729 | 727 | ||
730 | #[test] | 728 | #[test] |
731 | fn test_auto_import_split_different() { | 729 | fn test_replace_split_different() { |
732 | check_assist( | 730 | check_assist( |
733 | add_import, | 731 | replace_qualified_name_with_use, |
734 | " | 732 | " |
735 | use std::fmt; | 733 | use std::fmt; |
736 | 734 | ||
@@ -747,9 +745,9 @@ impl io<|> for Foo { | |||
747 | } | 745 | } |
748 | 746 | ||
749 | #[test] | 747 | #[test] |
750 | fn test_auto_import_split_self_for_use() { | 748 | fn test_replace_split_self_for_use() { |
751 | check_assist( | 749 | check_assist( |
752 | add_import, | 750 | replace_qualified_name_with_use, |
753 | " | 751 | " |
754 | use std::fmt; | 752 | use std::fmt; |
755 | 753 | ||
@@ -766,9 +764,9 @@ impl Debug<|> for Foo { | |||
766 | } | 764 | } |
767 | 765 | ||
768 | #[test] | 766 | #[test] |
769 | fn test_auto_import_split_self_for_target() { | 767 | fn test_replace_split_self_for_target() { |
770 | check_assist( | 768 | check_assist( |
771 | add_import, | 769 | replace_qualified_name_with_use, |
772 | " | 770 | " |
773 | use std::fmt::Debug; | 771 | use std::fmt::Debug; |
774 | 772 | ||
@@ -785,9 +783,9 @@ impl fmt<|> for Foo { | |||
785 | } | 783 | } |
786 | 784 | ||
787 | #[test] | 785 | #[test] |
788 | fn test_auto_import_add_to_nested_self_nested() { | 786 | fn test_replace_add_to_nested_self_nested() { |
789 | check_assist( | 787 | check_assist( |
790 | add_import, | 788 | replace_qualified_name_with_use, |
791 | " | 789 | " |
792 | use std::fmt::{Debug, nested::{Display}}; | 790 | use std::fmt::{Debug, nested::{Display}}; |
793 | 791 | ||
@@ -804,9 +802,9 @@ impl nested<|> for Foo { | |||
804 | } | 802 | } |
805 | 803 | ||
806 | #[test] | 804 | #[test] |
807 | fn test_auto_import_add_to_nested_self_already_included() { | 805 | fn test_replace_add_to_nested_self_already_included() { |
808 | check_assist( | 806 | check_assist( |
809 | add_import, | 807 | replace_qualified_name_with_use, |
810 | " | 808 | " |
811 | use std::fmt::{Debug, nested::{self, Display}}; | 809 | use std::fmt::{Debug, nested::{self, Display}}; |
812 | 810 | ||
@@ -823,9 +821,9 @@ impl nested<|> for Foo { | |||
823 | } | 821 | } |
824 | 822 | ||
825 | #[test] | 823 | #[test] |
826 | fn test_auto_import_add_to_nested_nested() { | 824 | fn test_replace_add_to_nested_nested() { |
827 | check_assist( | 825 | check_assist( |
828 | add_import, | 826 | replace_qualified_name_with_use, |
829 | " | 827 | " |
830 | use std::fmt::{Debug, nested::{Display}}; | 828 | use std::fmt::{Debug, nested::{Display}}; |
831 | 829 | ||
@@ -842,9 +840,9 @@ impl Debug<|> for Foo { | |||
842 | } | 840 | } |
843 | 841 | ||
844 | #[test] | 842 | #[test] |
845 | fn test_auto_import_split_common_target_longer() { | 843 | fn test_replace_split_common_target_longer() { |
846 | check_assist( | 844 | check_assist( |
847 | add_import, | 845 | replace_qualified_name_with_use, |
848 | " | 846 | " |
849 | use std::fmt::Debug; | 847 | use std::fmt::Debug; |
850 | 848 | ||
@@ -861,9 +859,9 @@ impl Display<|> for Foo { | |||
861 | } | 859 | } |
862 | 860 | ||
863 | #[test] | 861 | #[test] |
864 | fn test_auto_import_split_common_use_longer() { | 862 | fn test_replace_split_common_use_longer() { |
865 | check_assist( | 863 | check_assist( |
866 | add_import, | 864 | replace_qualified_name_with_use, |
867 | " | 865 | " |
868 | use std::fmt::nested::Debug; | 866 | use std::fmt::nested::Debug; |
869 | 867 | ||
@@ -880,9 +878,9 @@ impl Display<|> for Foo { | |||
880 | } | 878 | } |
881 | 879 | ||
882 | #[test] | 880 | #[test] |
883 | fn test_auto_import_use_nested_import() { | 881 | fn test_replace_use_nested_import() { |
884 | check_assist( | 882 | check_assist( |
885 | add_import, | 883 | replace_qualified_name_with_use, |
886 | " | 884 | " |
887 | use crate::{ | 885 | use crate::{ |
888 | ty::{Substs, Ty}, | 886 | ty::{Substs, Ty}, |
@@ -903,9 +901,9 @@ fn foo() { lower<|>::trait_env() } | |||
903 | } | 901 | } |
904 | 902 | ||
905 | #[test] | 903 | #[test] |
906 | fn test_auto_import_alias() { | 904 | fn test_replace_alias() { |
907 | check_assist( | 905 | check_assist( |
908 | add_import, | 906 | replace_qualified_name_with_use, |
909 | " | 907 | " |
910 | use std::fmt as foo; | 908 | use std::fmt as foo; |
911 | 909 | ||
@@ -922,9 +920,9 @@ impl Debug<|> for Foo { | |||
922 | } | 920 | } |
923 | 921 | ||
924 | #[test] | 922 | #[test] |
925 | fn test_auto_import_not_applicable_one_segment() { | 923 | fn test_replace_not_applicable_one_segment() { |
926 | check_assist_not_applicable( | 924 | check_assist_not_applicable( |
927 | add_import, | 925 | replace_qualified_name_with_use, |
928 | " | 926 | " |
929 | impl foo<|> for Foo { | 927 | impl foo<|> for Foo { |
930 | } | 928 | } |
@@ -933,9 +931,9 @@ impl foo<|> for Foo { | |||
933 | } | 931 | } |
934 | 932 | ||
935 | #[test] | 933 | #[test] |
936 | fn test_auto_import_not_applicable_in_use() { | 934 | fn test_replace_not_applicable_in_use() { |
937 | check_assist_not_applicable( | 935 | check_assist_not_applicable( |
938 | add_import, | 936 | replace_qualified_name_with_use, |
939 | " | 937 | " |
940 | use std::fmt<|>; | 938 | use std::fmt<|>; |
941 | ", | 939 | ", |
@@ -943,9 +941,9 @@ use std::fmt<|>; | |||
943 | } | 941 | } |
944 | 942 | ||
945 | #[test] | 943 | #[test] |
946 | fn test_auto_import_add_use_no_anchor_in_mod_mod() { | 944 | fn test_replace_add_use_no_anchor_in_mod_mod() { |
947 | check_assist( | 945 | check_assist( |
948 | add_import, | 946 | replace_qualified_name_with_use, |
949 | " | 947 | " |
950 | mod foo { | 948 | mod foo { |
951 | mod bar { | 949 | mod bar { |
diff --git a/crates/ra_assists/src/assists/split_import.rs b/crates/ra_assists/src/handlers/split_import.rs index 6038c4858..2c3f07a79 100644 --- a/crates/ra_assists/src/assists/split_import.rs +++ b/crates/ra_assists/src/handlers/split_import.rs | |||
@@ -1,6 +1,5 @@ | |||
1 | use std::iter::successors; | 1 | use std::iter::successors; |
2 | 2 | ||
3 | use hir::db::HirDatabase; | ||
4 | use ra_syntax::{ast, AstNode, TextUnit, T}; | 3 | use ra_syntax::{ast, AstNode, TextUnit, T}; |
5 | 4 | ||
6 | use crate::{Assist, AssistCtx, AssistId}; | 5 | use crate::{Assist, AssistCtx, AssistId}; |
@@ -16,7 +15,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
16 | // ``` | 15 | // ``` |
17 | // use std::{collections::HashMap}; | 16 | // use std::{collections::HashMap}; |
18 | // ``` | 17 | // ``` |
19 | pub(crate) fn split_import(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 18 | pub(crate) fn split_import(ctx: AssistCtx) -> Option<Assist> { |
20 | let colon_colon = ctx.find_token_at_offset(T![::])?; | 19 | let colon_colon = ctx.find_token_at_offset(T![::])?; |
21 | let path = ast::Path::cast(colon_colon.parent())?; | 20 | let path = ast::Path::cast(colon_colon.parent())?; |
22 | let top_path = successors(Some(path), |it| it.parent_path()).last()?; | 21 | let top_path = successors(Some(path), |it| it.parent_path()).last()?; |
diff --git a/crates/ra_assists/src/lib.rs b/crates/ra_assists/src/lib.rs index 3337805a5..828a8e9e8 100644 --- a/crates/ra_assists/src/lib.rs +++ b/crates/ra_assists/src/lib.rs | |||
@@ -9,18 +9,16 @@ mod assist_ctx; | |||
9 | mod marks; | 9 | mod marks; |
10 | #[cfg(test)] | 10 | #[cfg(test)] |
11 | mod doc_tests; | 11 | mod doc_tests; |
12 | #[cfg(test)] | 12 | mod utils; |
13 | mod test_db; | ||
14 | pub mod ast_transform; | 13 | pub mod ast_transform; |
15 | 14 | ||
16 | use either::Either; | ||
17 | use hir::db::HirDatabase; | ||
18 | use ra_db::FileRange; | 15 | use ra_db::FileRange; |
16 | use ra_ide_db::RootDatabase; | ||
19 | use ra_syntax::{TextRange, TextUnit}; | 17 | use ra_syntax::{TextRange, TextUnit}; |
20 | use ra_text_edit::TextEdit; | 18 | use ra_text_edit::TextEdit; |
21 | 19 | ||
22 | pub(crate) use crate::assist_ctx::{Assist, AssistCtx}; | 20 | pub(crate) use crate::assist_ctx::{Assist, AssistCtx, AssistHandler}; |
23 | pub use crate::assists::add_import::auto_import_text_edit; | 21 | pub use crate::handlers::replace_qualified_name_with_use::insert_use_statement; |
24 | 22 | ||
25 | /// Unique identifier of the assist, should not be shown to the user | 23 | /// Unique identifier of the assist, should not be shown to the user |
26 | /// directly. | 24 | /// directly. |
@@ -34,81 +32,64 @@ pub struct AssistLabel { | |||
34 | pub id: AssistId, | 32 | pub id: AssistId, |
35 | } | 33 | } |
36 | 34 | ||
35 | #[derive(Clone, Debug)] | ||
36 | pub struct GroupLabel(pub String); | ||
37 | |||
38 | impl AssistLabel { | ||
39 | pub(crate) fn new(label: String, id: AssistId) -> AssistLabel { | ||
40 | // FIXME: make fields private, so that this invariant can't be broken | ||
41 | assert!(label.chars().nth(0).unwrap().is_uppercase()); | ||
42 | AssistLabel { label: label.into(), id } | ||
43 | } | ||
44 | } | ||
45 | |||
37 | #[derive(Debug, Clone)] | 46 | #[derive(Debug, Clone)] |
38 | pub struct AssistAction { | 47 | pub struct AssistAction { |
39 | pub label: Option<String>, | ||
40 | pub edit: TextEdit, | 48 | pub edit: TextEdit, |
41 | pub cursor_position: Option<TextUnit>, | 49 | pub cursor_position: Option<TextUnit>, |
50 | // FIXME: This belongs to `AssistLabel` | ||
42 | pub target: Option<TextRange>, | 51 | pub target: Option<TextRange>, |
43 | } | 52 | } |
44 | 53 | ||
45 | #[derive(Debug, Clone)] | 54 | #[derive(Debug, Clone)] |
46 | pub struct ResolvedAssist { | 55 | pub struct ResolvedAssist { |
47 | pub label: AssistLabel, | 56 | pub label: AssistLabel, |
48 | pub action_data: Either<AssistAction, Vec<AssistAction>>, | 57 | pub group_label: Option<GroupLabel>, |
49 | } | 58 | pub action: AssistAction, |
50 | |||
51 | impl ResolvedAssist { | ||
52 | pub fn get_first_action(&self) -> AssistAction { | ||
53 | match &self.action_data { | ||
54 | Either::Left(action) => action.clone(), | ||
55 | Either::Right(actions) => actions[0].clone(), | ||
56 | } | ||
57 | } | ||
58 | } | 59 | } |
59 | 60 | ||
60 | /// Return all the assists applicable at the given position. | 61 | /// Return all the assists applicable at the given position. |
61 | /// | 62 | /// |
62 | /// Assists are returned in the "unresolved" state, that is only labels are | 63 | /// Assists are returned in the "unresolved" state, that is only labels are |
63 | /// returned, without actual edits. | 64 | /// returned, without actual edits. |
64 | pub fn applicable_assists<H>(db: &H, range: FileRange) -> Vec<AssistLabel> | 65 | pub fn unresolved_assists(db: &RootDatabase, range: FileRange) -> Vec<AssistLabel> { |
65 | where | 66 | let ctx = AssistCtx::new(db, range, false); |
66 | H: HirDatabase + 'static, | 67 | handlers::all() |
67 | { | 68 | .iter() |
68 | AssistCtx::with_ctx(db, range, false, |ctx| { | 69 | .filter_map(|f| f(ctx.clone())) |
69 | assists::all() | 70 | .flat_map(|it| it.0) |
70 | .iter() | 71 | .map(|a| a.label) |
71 | .filter_map(|f| f(ctx.clone())) | 72 | .collect() |
72 | .map(|a| match a { | ||
73 | Assist::Unresolved { label } => label, | ||
74 | Assist::Resolved { .. } => unreachable!(), | ||
75 | }) | ||
76 | .collect() | ||
77 | }) | ||
78 | } | 73 | } |
79 | 74 | ||
80 | /// Return all the assists applicable at the given position. | 75 | /// Return all the assists applicable at the given position. |
81 | /// | 76 | /// |
82 | /// Assists are returned in the "resolved" state, that is with edit fully | 77 | /// Assists are returned in the "resolved" state, that is with edit fully |
83 | /// computed. | 78 | /// computed. |
84 | pub fn assists<H>(db: &H, range: FileRange) -> Vec<ResolvedAssist> | 79 | pub fn resolved_assists(db: &RootDatabase, range: FileRange) -> Vec<ResolvedAssist> { |
85 | where | 80 | let ctx = AssistCtx::new(db, range, true); |
86 | H: HirDatabase + 'static, | 81 | let mut a = handlers::all() |
87 | { | 82 | .iter() |
88 | use std::cmp::Ordering; | 83 | .filter_map(|f| f(ctx.clone())) |
89 | 84 | .flat_map(|it| it.0) | |
90 | AssistCtx::with_ctx(db, range, true, |ctx| { | 85 | .map(|it| it.into_resolved().unwrap()) |
91 | let mut a = assists::all() | 86 | .collect::<Vec<_>>(); |
92 | .iter() | 87 | a.sort_by_key(|it| it.action.target.map_or(TextUnit::from(!0u32), |it| it.len())); |
93 | .filter_map(|f| f(ctx.clone())) | 88 | a |
94 | .map(|a| match a { | ||
95 | Assist::Resolved { assist } => assist, | ||
96 | Assist::Unresolved { .. } => unreachable!(), | ||
97 | }) | ||
98 | .collect::<Vec<_>>(); | ||
99 | a.sort_by(|a, b| match (a.get_first_action().target, b.get_first_action().target) { | ||
100 | (Some(a), Some(b)) => a.len().cmp(&b.len()), | ||
101 | (Some(_), None) => Ordering::Less, | ||
102 | (None, Some(_)) => Ordering::Greater, | ||
103 | (None, None) => Ordering::Equal, | ||
104 | }); | ||
105 | a | ||
106 | }) | ||
107 | } | 89 | } |
108 | 90 | ||
109 | mod assists { | 91 | mod handlers { |
110 | use crate::{Assist, AssistCtx}; | 92 | use crate::AssistHandler; |
111 | use hir::db::HirDatabase; | ||
112 | 93 | ||
113 | mod add_derive; | 94 | mod add_derive; |
114 | mod add_explicit_type; | 95 | mod add_explicit_type; |
@@ -116,6 +97,7 @@ mod assists { | |||
116 | mod add_custom_impl; | 97 | mod add_custom_impl; |
117 | mod add_new; | 98 | mod add_new; |
118 | mod apply_demorgan; | 99 | mod apply_demorgan; |
100 | mod auto_import; | ||
119 | mod invert_if; | 101 | mod invert_if; |
120 | mod flip_comma; | 102 | mod flip_comma; |
121 | mod flip_binexpr; | 103 | mod flip_binexpr; |
@@ -129,13 +111,13 @@ mod assists { | |||
129 | mod replace_if_let_with_match; | 111 | mod replace_if_let_with_match; |
130 | mod split_import; | 112 | mod split_import; |
131 | mod remove_dbg; | 113 | mod remove_dbg; |
132 | pub(crate) mod add_import; | 114 | pub(crate) mod replace_qualified_name_with_use; |
133 | mod add_missing_impl_members; | 115 | mod add_missing_impl_members; |
134 | mod move_guard; | 116 | mod move_guard; |
135 | mod move_bounds; | 117 | mod move_bounds; |
136 | mod early_return; | 118 | mod early_return; |
137 | 119 | ||
138 | pub(crate) fn all<DB: HirDatabase>() -> &'static [fn(AssistCtx<DB>) -> Option<Assist>] { | 120 | pub(crate) fn all() -> &'static [AssistHandler] { |
139 | &[ | 121 | &[ |
140 | add_derive::add_derive, | 122 | add_derive::add_derive, |
141 | add_explicit_type::add_explicit_type, | 123 | add_explicit_type::add_explicit_type, |
@@ -154,7 +136,7 @@ mod assists { | |||
154 | replace_if_let_with_match::replace_if_let_with_match, | 136 | replace_if_let_with_match::replace_if_let_with_match, |
155 | split_import::split_import, | 137 | split_import::split_import, |
156 | remove_dbg::remove_dbg, | 138 | remove_dbg::remove_dbg, |
157 | add_import::add_import, | 139 | replace_qualified_name_with_use::replace_qualified_name_with_use, |
158 | add_missing_impl_members::add_missing_impl_members, | 140 | add_missing_impl_members::add_missing_impl_members, |
159 | add_missing_impl_members::add_missing_default_members, | 141 | add_missing_impl_members::add_missing_default_members, |
160 | inline_local_variable::inline_local_variable, | 142 | inline_local_variable::inline_local_variable, |
@@ -166,33 +148,39 @@ mod assists { | |||
166 | raw_string::make_usual_string, | 148 | raw_string::make_usual_string, |
167 | raw_string::remove_hash, | 149 | raw_string::remove_hash, |
168 | early_return::convert_to_guarded_return, | 150 | early_return::convert_to_guarded_return, |
151 | auto_import::auto_import, | ||
169 | ] | 152 | ] |
170 | } | 153 | } |
171 | } | 154 | } |
172 | 155 | ||
173 | #[cfg(test)] | 156 | #[cfg(test)] |
174 | mod helpers { | 157 | mod helpers { |
175 | use ra_db::{fixture::WithFixture, FileRange}; | 158 | use std::sync::Arc; |
159 | |||
160 | use ra_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt}; | ||
161 | use ra_ide_db::{symbol_index::SymbolsDatabase, RootDatabase}; | ||
176 | use ra_syntax::TextRange; | 162 | use ra_syntax::TextRange; |
177 | use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range}; | 163 | use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range}; |
178 | 164 | ||
179 | use crate::{test_db::TestDB, Assist, AssistCtx}; | 165 | use crate::{AssistCtx, AssistHandler}; |
166 | |||
167 | pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) { | ||
168 | let (mut db, file_id) = RootDatabase::with_single_file(text); | ||
169 | // FIXME: ideally, this should be done by the above `RootDatabase::with_single_file`, | ||
170 | // but it looks like this might need specialization? :( | ||
171 | let local_roots = vec![db.file_source_root(file_id)]; | ||
172 | db.set_local_roots(Arc::new(local_roots)); | ||
173 | (db, file_id) | ||
174 | } | ||
180 | 175 | ||
181 | pub(crate) fn check_assist( | 176 | pub(crate) fn check_assist(assist: AssistHandler, before: &str, after: &str) { |
182 | assist: fn(AssistCtx<TestDB>) -> Option<Assist>, | ||
183 | before: &str, | ||
184 | after: &str, | ||
185 | ) { | ||
186 | let (before_cursor_pos, before) = extract_offset(before); | 177 | let (before_cursor_pos, before) = extract_offset(before); |
187 | let (db, file_id) = TestDB::with_single_file(&before); | 178 | let (db, file_id) = with_single_file(&before); |
188 | let frange = | 179 | let frange = |
189 | FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) }; | 180 | FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) }; |
190 | let assist = | 181 | let assist = |
191 | AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable"); | 182 | assist(AssistCtx::new(&db, frange, true)).expect("code action is not applicable"); |
192 | let action = match assist { | 183 | let action = assist.0[0].action.clone().unwrap(); |
193 | Assist::Unresolved { .. } => unreachable!(), | ||
194 | Assist::Resolved { assist } => assist.get_first_action(), | ||
195 | }; | ||
196 | 184 | ||
197 | let actual = action.edit.apply(&before); | 185 | let actual = action.edit.apply(&before); |
198 | let actual_cursor_pos = match action.cursor_position { | 186 | let actual_cursor_pos = match action.cursor_position { |
@@ -206,20 +194,13 @@ mod helpers { | |||
206 | assert_eq_text!(after, &actual); | 194 | assert_eq_text!(after, &actual); |
207 | } | 195 | } |
208 | 196 | ||
209 | pub(crate) fn check_assist_range( | 197 | pub(crate) fn check_assist_range(assist: AssistHandler, before: &str, after: &str) { |
210 | assist: fn(AssistCtx<TestDB>) -> Option<Assist>, | ||
211 | before: &str, | ||
212 | after: &str, | ||
213 | ) { | ||
214 | let (range, before) = extract_range(before); | 198 | let (range, before) = extract_range(before); |
215 | let (db, file_id) = TestDB::with_single_file(&before); | 199 | let (db, file_id) = with_single_file(&before); |
216 | let frange = FileRange { file_id, range }; | 200 | let frange = FileRange { file_id, range }; |
217 | let assist = | 201 | let assist = |
218 | AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable"); | 202 | assist(AssistCtx::new(&db, frange, true)).expect("code action is not applicable"); |
219 | let action = match assist { | 203 | let action = assist.0[0].action.clone().unwrap(); |
220 | Assist::Unresolved { .. } => unreachable!(), | ||
221 | Assist::Resolved { assist } => assist.get_first_action(), | ||
222 | }; | ||
223 | 204 | ||
224 | let mut actual = action.edit.apply(&before); | 205 | let mut actual = action.edit.apply(&before); |
225 | if let Some(pos) = action.cursor_position { | 206 | if let Some(pos) = action.cursor_position { |
@@ -228,85 +209,65 @@ mod helpers { | |||
228 | assert_eq_text!(after, &actual); | 209 | assert_eq_text!(after, &actual); |
229 | } | 210 | } |
230 | 211 | ||
231 | pub(crate) fn check_assist_target( | 212 | pub(crate) fn check_assist_target(assist: AssistHandler, before: &str, target: &str) { |
232 | assist: fn(AssistCtx<TestDB>) -> Option<Assist>, | ||
233 | before: &str, | ||
234 | target: &str, | ||
235 | ) { | ||
236 | let (before_cursor_pos, before) = extract_offset(before); | 213 | let (before_cursor_pos, before) = extract_offset(before); |
237 | let (db, file_id) = TestDB::with_single_file(&before); | 214 | let (db, file_id) = with_single_file(&before); |
238 | let frange = | 215 | let frange = |
239 | FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) }; | 216 | FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) }; |
240 | let assist = | 217 | let assist = |
241 | AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable"); | 218 | assist(AssistCtx::new(&db, frange, true)).expect("code action is not applicable"); |
242 | let action = match assist { | 219 | let action = assist.0[0].action.clone().unwrap(); |
243 | Assist::Unresolved { .. } => unreachable!(), | ||
244 | Assist::Resolved { assist } => assist.get_first_action(), | ||
245 | }; | ||
246 | 220 | ||
247 | let range = action.target.expect("expected target on action"); | 221 | let range = action.target.expect("expected target on action"); |
248 | assert_eq_text!(&before[range.start().to_usize()..range.end().to_usize()], target); | 222 | assert_eq_text!(&before[range.start().to_usize()..range.end().to_usize()], target); |
249 | } | 223 | } |
250 | 224 | ||
251 | pub(crate) fn check_assist_range_target( | 225 | pub(crate) fn check_assist_range_target(assist: AssistHandler, before: &str, target: &str) { |
252 | assist: fn(AssistCtx<TestDB>) -> Option<Assist>, | ||
253 | before: &str, | ||
254 | target: &str, | ||
255 | ) { | ||
256 | let (range, before) = extract_range(before); | 226 | let (range, before) = extract_range(before); |
257 | let (db, file_id) = TestDB::with_single_file(&before); | 227 | let (db, file_id) = with_single_file(&before); |
258 | let frange = FileRange { file_id, range }; | 228 | let frange = FileRange { file_id, range }; |
259 | let assist = | 229 | let assist = |
260 | AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable"); | 230 | assist(AssistCtx::new(&db, frange, true)).expect("code action is not applicable"); |
261 | let action = match assist { | 231 | let action = assist.0[0].action.clone().unwrap(); |
262 | Assist::Unresolved { .. } => unreachable!(), | ||
263 | Assist::Resolved { assist } => assist.get_first_action(), | ||
264 | }; | ||
265 | 232 | ||
266 | let range = action.target.expect("expected target on action"); | 233 | let range = action.target.expect("expected target on action"); |
267 | assert_eq_text!(&before[range.start().to_usize()..range.end().to_usize()], target); | 234 | assert_eq_text!(&before[range.start().to_usize()..range.end().to_usize()], target); |
268 | } | 235 | } |
269 | 236 | ||
270 | pub(crate) fn check_assist_not_applicable( | 237 | pub(crate) fn check_assist_not_applicable(assist: AssistHandler, before: &str) { |
271 | assist: fn(AssistCtx<TestDB>) -> Option<Assist>, | ||
272 | before: &str, | ||
273 | ) { | ||
274 | let (before_cursor_pos, before) = extract_offset(before); | 238 | let (before_cursor_pos, before) = extract_offset(before); |
275 | let (db, file_id) = TestDB::with_single_file(&before); | 239 | let (db, file_id) = with_single_file(&before); |
276 | let frange = | 240 | let frange = |
277 | FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) }; | 241 | FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) }; |
278 | let assist = AssistCtx::with_ctx(&db, frange, true, assist); | 242 | let assist = assist(AssistCtx::new(&db, frange, true)); |
279 | assert!(assist.is_none()); | 243 | assert!(assist.is_none()); |
280 | } | 244 | } |
281 | 245 | ||
282 | pub(crate) fn check_assist_range_not_applicable( | 246 | pub(crate) fn check_assist_range_not_applicable(assist: AssistHandler, before: &str) { |
283 | assist: fn(AssistCtx<TestDB>) -> Option<Assist>, | ||
284 | before: &str, | ||
285 | ) { | ||
286 | let (range, before) = extract_range(before); | 247 | let (range, before) = extract_range(before); |
287 | let (db, file_id) = TestDB::with_single_file(&before); | 248 | let (db, file_id) = with_single_file(&before); |
288 | let frange = FileRange { file_id, range }; | 249 | let frange = FileRange { file_id, range }; |
289 | let assist = AssistCtx::with_ctx(&db, frange, true, assist); | 250 | let assist = assist(AssistCtx::new(&db, frange, true)); |
290 | assert!(assist.is_none()); | 251 | assert!(assist.is_none()); |
291 | } | 252 | } |
292 | } | 253 | } |
293 | 254 | ||
294 | #[cfg(test)] | 255 | #[cfg(test)] |
295 | mod tests { | 256 | mod tests { |
296 | use ra_db::{fixture::WithFixture, FileRange}; | 257 | use ra_db::FileRange; |
297 | use ra_syntax::TextRange; | 258 | use ra_syntax::TextRange; |
298 | use test_utils::{extract_offset, extract_range}; | 259 | use test_utils::{extract_offset, extract_range}; |
299 | 260 | ||
300 | use crate::test_db::TestDB; | 261 | use crate::{helpers, resolved_assists}; |
301 | 262 | ||
302 | #[test] | 263 | #[test] |
303 | fn assist_order_field_struct() { | 264 | fn assist_order_field_struct() { |
304 | let before = "struct Foo { <|>bar: u32 }"; | 265 | let before = "struct Foo { <|>bar: u32 }"; |
305 | let (before_cursor_pos, before) = extract_offset(before); | 266 | let (before_cursor_pos, before) = extract_offset(before); |
306 | let (db, file_id) = TestDB::with_single_file(&before); | 267 | let (db, file_id) = helpers::with_single_file(&before); |
307 | let frange = | 268 | let frange = |
308 | FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) }; | 269 | FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) }; |
309 | let assists = super::assists(&db, frange); | 270 | let assists = resolved_assists(&db, frange); |
310 | let mut assists = assists.iter(); | 271 | let mut assists = assists.iter(); |
311 | 272 | ||
312 | assert_eq!( | 273 | assert_eq!( |
@@ -327,9 +288,9 @@ mod tests { | |||
327 | } | 288 | } |
328 | }"; | 289 | }"; |
329 | let (range, before) = extract_range(before); | 290 | let (range, before) = extract_range(before); |
330 | let (db, file_id) = TestDB::with_single_file(&before); | 291 | let (db, file_id) = helpers::with_single_file(&before); |
331 | let frange = FileRange { file_id, range }; | 292 | let frange = FileRange { file_id, range }; |
332 | let assists = super::assists(&db, frange); | 293 | let assists = resolved_assists(&db, frange); |
333 | let mut assists = assists.iter(); | 294 | let mut assists = assists.iter(); |
334 | 295 | ||
335 | assert_eq!(assists.next().expect("expected assist").label.label, "Extract into variable"); | 296 | assert_eq!(assists.next().expect("expected assist").label.label, "Extract into variable"); |
diff --git a/crates/ra_assists/src/test_db.rs b/crates/ra_assists/src/test_db.rs deleted file mode 100644 index d5249f308..000000000 --- a/crates/ra_assists/src/test_db.rs +++ /dev/null | |||
@@ -1,45 +0,0 @@ | |||
1 | //! Database used for testing `ra_assists`. | ||
2 | |||
3 | use std::sync::Arc; | ||
4 | |||
5 | use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath}; | ||
6 | |||
7 | #[salsa::database( | ||
8 | ra_db::SourceDatabaseExtStorage, | ||
9 | ra_db::SourceDatabaseStorage, | ||
10 | hir::db::InternDatabaseStorage, | ||
11 | hir::db::AstDatabaseStorage, | ||
12 | hir::db::DefDatabaseStorage, | ||
13 | hir::db::HirDatabaseStorage | ||
14 | )] | ||
15 | #[derive(Debug, Default)] | ||
16 | pub struct TestDB { | ||
17 | runtime: salsa::Runtime<TestDB>, | ||
18 | } | ||
19 | |||
20 | impl salsa::Database for TestDB { | ||
21 | fn salsa_runtime(&self) -> &salsa::Runtime<Self> { | ||
22 | &self.runtime | ||
23 | } | ||
24 | fn salsa_runtime_mut(&mut self) -> &mut salsa::Runtime<Self> { | ||
25 | &mut self.runtime | ||
26 | } | ||
27 | } | ||
28 | |||
29 | impl std::panic::RefUnwindSafe for TestDB {} | ||
30 | |||
31 | impl FileLoader for TestDB { | ||
32 | fn file_text(&self, file_id: FileId) -> Arc<String> { | ||
33 | FileLoaderDelegate(self).file_text(file_id) | ||
34 | } | ||
35 | fn resolve_relative_path( | ||
36 | &self, | ||
37 | anchor: FileId, | ||
38 | relative_path: &RelativePath, | ||
39 | ) -> Option<FileId> { | ||
40 | FileLoaderDelegate(self).resolve_relative_path(anchor, relative_path) | ||
41 | } | ||
42 | fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>> { | ||
43 | FileLoaderDelegate(self).relevant_crates(file_id) | ||
44 | } | ||
45 | } | ||
diff --git a/crates/ra_assists/src/utils.rs b/crates/ra_assists/src/utils.rs new file mode 100644 index 000000000..0d5722295 --- /dev/null +++ b/crates/ra_assists/src/utils.rs | |||
@@ -0,0 +1,27 @@ | |||
1 | //! Assorted functions shared by several assists. | ||
2 | |||
3 | use ra_syntax::{ | ||
4 | ast::{self, make}, | ||
5 | T, | ||
6 | }; | ||
7 | |||
8 | pub(crate) fn invert_boolean_expression(expr: ast::Expr) -> ast::Expr { | ||
9 | if let Some(expr) = invert_special_case(&expr) { | ||
10 | return expr; | ||
11 | } | ||
12 | make::expr_prefix(T![!], expr) | ||
13 | } | ||
14 | |||
15 | fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> { | ||
16 | match expr { | ||
17 | ast::Expr::BinExpr(bin) => match bin.op_kind()? { | ||
18 | ast::BinOp::NegatedEqualityTest => bin.replace_op(T![==]).map(|it| it.into()), | ||
19 | ast::BinOp::EqualityTest => bin.replace_op(T![!=]).map(|it| it.into()), | ||
20 | _ => None, | ||
21 | }, | ||
22 | ast::Expr::PrefixExpr(pe) if pe.op_kind()? == ast::PrefixOp::Not => pe.expr(), | ||
23 | // FIXME: | ||
24 | // ast::Expr::Literal(true | false ) | ||
25 | _ => None, | ||
26 | } | ||
27 | } | ||
diff --git a/crates/ra_cargo_watch/Cargo.toml b/crates/ra_cargo_watch/Cargo.toml index e88295539..dd814fc9d 100644 --- a/crates/ra_cargo_watch/Cargo.toml +++ b/crates/ra_cargo_watch/Cargo.toml | |||
@@ -11,7 +11,8 @@ log = "0.4.3" | |||
11 | cargo_metadata = "0.9.1" | 11 | cargo_metadata = "0.9.1" |
12 | jod-thread = "0.1.0" | 12 | jod-thread = "0.1.0" |
13 | parking_lot = "0.10.0" | 13 | parking_lot = "0.10.0" |
14 | serde_json = "1.0.45" | ||
14 | 15 | ||
15 | [dev-dependencies] | 16 | [dev-dependencies] |
16 | insta = "0.12.0" | 17 | insta = "0.13.0" |
17 | serde_json = "1.0" \ No newline at end of file | 18 | serde_json = "1.0" \ No newline at end of file |
diff --git a/crates/ra_cargo_watch/src/conv.rs b/crates/ra_cargo_watch/src/conv.rs index ac0f1d28a..506370535 100644 --- a/crates/ra_cargo_watch/src/conv.rs +++ b/crates/ra_cargo_watch/src/conv.rs | |||
@@ -1,12 +1,11 @@ | |||
1 | //! This module provides the functionality needed to convert diagnostics from | 1 | //! This module provides the functionality needed to convert diagnostics from |
2 | //! `cargo check` json format to the LSP diagnostic format. | 2 | //! `cargo check` json format to the LSP diagnostic format. |
3 | use cargo_metadata::diagnostic::{ | 3 | use cargo_metadata::diagnostic::{ |
4 | Applicability, Diagnostic as RustDiagnostic, DiagnosticLevel, DiagnosticSpan, | 4 | Diagnostic as RustDiagnostic, DiagnosticLevel, DiagnosticSpan, DiagnosticSpanMacroExpansion, |
5 | DiagnosticSpanMacroExpansion, | ||
6 | }; | 5 | }; |
7 | use lsp_types::{ | 6 | use lsp_types::{ |
8 | Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, DiagnosticTag, Location, | 7 | CodeAction, Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, DiagnosticTag, |
9 | NumberOrString, Position, Range, Url, | 8 | Location, NumberOrString, Position, Range, TextEdit, Url, WorkspaceEdit, |
10 | }; | 9 | }; |
11 | use std::{ | 10 | use std::{ |
12 | fmt::Write, | 11 | fmt::Write, |
@@ -117,38 +116,9 @@ fn is_deprecated(rd: &RustDiagnostic) -> bool { | |||
117 | } | 116 | } |
118 | } | 117 | } |
119 | 118 | ||
120 | #[derive(Debug)] | ||
121 | pub struct SuggestedFix { | ||
122 | pub title: String, | ||
123 | pub location: Location, | ||
124 | pub replacement: String, | ||
125 | pub applicability: Applicability, | ||
126 | pub diagnostics: Vec<Diagnostic>, | ||
127 | } | ||
128 | |||
129 | impl std::cmp::PartialEq<SuggestedFix> for SuggestedFix { | ||
130 | fn eq(&self, other: &SuggestedFix) -> bool { | ||
131 | if self.title == other.title | ||
132 | && self.location == other.location | ||
133 | && self.replacement == other.replacement | ||
134 | { | ||
135 | // Applicability doesn't impl PartialEq... | ||
136 | match (&self.applicability, &other.applicability) { | ||
137 | (Applicability::MachineApplicable, Applicability::MachineApplicable) => true, | ||
138 | (Applicability::HasPlaceholders, Applicability::HasPlaceholders) => true, | ||
139 | (Applicability::MaybeIncorrect, Applicability::MaybeIncorrect) => true, | ||
140 | (Applicability::Unspecified, Applicability::Unspecified) => true, | ||
141 | _ => false, | ||
142 | } | ||
143 | } else { | ||
144 | false | ||
145 | } | ||
146 | } | ||
147 | } | ||
148 | |||
149 | enum MappedRustChildDiagnostic { | 119 | enum MappedRustChildDiagnostic { |
150 | Related(DiagnosticRelatedInformation), | 120 | Related(DiagnosticRelatedInformation), |
151 | SuggestedFix(SuggestedFix), | 121 | SuggestedFix(CodeAction), |
152 | MessageLine(String), | 122 | MessageLine(String), |
153 | } | 123 | } |
154 | 124 | ||
@@ -176,12 +146,20 @@ fn map_rust_child_diagnostic( | |||
176 | rd.message.clone() | 146 | rd.message.clone() |
177 | }; | 147 | }; |
178 | 148 | ||
179 | MappedRustChildDiagnostic::SuggestedFix(SuggestedFix { | 149 | let edit = { |
150 | let edits = vec![TextEdit::new(location.range, suggested_replacement.clone())]; | ||
151 | let mut edit_map = std::collections::HashMap::new(); | ||
152 | edit_map.insert(location.uri, edits); | ||
153 | WorkspaceEdit::new(edit_map) | ||
154 | }; | ||
155 | |||
156 | MappedRustChildDiagnostic::SuggestedFix(CodeAction { | ||
180 | title, | 157 | title, |
181 | location, | 158 | kind: Some("quickfix".to_string()), |
182 | replacement: suggested_replacement.clone(), | 159 | diagnostics: None, |
183 | applicability: span.suggestion_applicability.clone().unwrap_or(Applicability::Unknown), | 160 | edit: Some(edit), |
184 | diagnostics: vec![], | 161 | command: None, |
162 | is_preferred: None, | ||
185 | }) | 163 | }) |
186 | } else { | 164 | } else { |
187 | MappedRustChildDiagnostic::Related(DiagnosticRelatedInformation { | 165 | MappedRustChildDiagnostic::Related(DiagnosticRelatedInformation { |
@@ -195,7 +173,7 @@ fn map_rust_child_diagnostic( | |||
195 | pub(crate) struct MappedRustDiagnostic { | 173 | pub(crate) struct MappedRustDiagnostic { |
196 | pub location: Location, | 174 | pub location: Location, |
197 | pub diagnostic: Diagnostic, | 175 | pub diagnostic: Diagnostic, |
198 | pub suggested_fixes: Vec<SuggestedFix>, | 176 | pub fixes: Vec<CodeAction>, |
199 | } | 177 | } |
200 | 178 | ||
201 | /// Converts a Rust root diagnostic to LSP form | 179 | /// Converts a Rust root diagnostic to LSP form |
@@ -250,15 +228,13 @@ pub(crate) fn map_rust_diagnostic_to_lsp( | |||
250 | } | 228 | } |
251 | } | 229 | } |
252 | 230 | ||
253 | let mut suggested_fixes = vec![]; | 231 | let mut fixes = vec![]; |
254 | let mut message = rd.message.clone(); | 232 | let mut message = rd.message.clone(); |
255 | for child in &rd.children { | 233 | for child in &rd.children { |
256 | let child = map_rust_child_diagnostic(&child, workspace_root); | 234 | let child = map_rust_child_diagnostic(&child, workspace_root); |
257 | match child { | 235 | match child { |
258 | MappedRustChildDiagnostic::Related(related) => related_information.push(related), | 236 | MappedRustChildDiagnostic::Related(related) => related_information.push(related), |
259 | MappedRustChildDiagnostic::SuggestedFix(suggested_fix) => { | 237 | MappedRustChildDiagnostic::SuggestedFix(code_action) => fixes.push(code_action.into()), |
260 | suggested_fixes.push(suggested_fix) | ||
261 | } | ||
262 | MappedRustChildDiagnostic::MessageLine(message_line) => { | 238 | MappedRustChildDiagnostic::MessageLine(message_line) => { |
263 | write!(&mut message, "\n{}", message_line).unwrap(); | 239 | write!(&mut message, "\n{}", message_line).unwrap(); |
264 | 240 | ||
@@ -295,7 +271,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp( | |||
295 | tags: if !tags.is_empty() { Some(tags) } else { None }, | 271 | tags: if !tags.is_empty() { Some(tags) } else { None }, |
296 | }; | 272 | }; |
297 | 273 | ||
298 | Some(MappedRustDiagnostic { location, diagnostic, suggested_fixes }) | 274 | Some(MappedRustDiagnostic { location, diagnostic, fixes }) |
299 | } | 275 | } |
300 | 276 | ||
301 | /// Returns a `Url` object from a given path, will lowercase drive letters if present. | 277 | /// Returns a `Url` object from a given path, will lowercase drive letters if present. |
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_clippy_pass_by_ref.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_clippy_pass_by_ref.snap index cb0920914..95ca163dc 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_clippy_pass_by_ref.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_clippy_pass_by_ref.snap | |||
@@ -61,25 +61,39 @@ MappedRustDiagnostic { | |||
61 | ), | 61 | ), |
62 | tags: None, | 62 | tags: None, |
63 | }, | 63 | }, |
64 | suggested_fixes: [ | 64 | fixes: [ |
65 | SuggestedFix { | 65 | CodeAction { |
66 | title: "consider passing by value instead: \'self\'", | 66 | title: "consider passing by value instead: \'self\'", |
67 | location: Location { | 67 | kind: Some( |
68 | uri: "file:///test/compiler/mir/tagset.rs", | 68 | "quickfix", |
69 | range: Range { | 69 | ), |
70 | start: Position { | 70 | diagnostics: None, |
71 | line: 41, | 71 | edit: Some( |
72 | character: 23, | 72 | WorkspaceEdit { |
73 | }, | 73 | changes: Some( |
74 | end: Position { | 74 | { |
75 | line: 41, | 75 | "file:///test/compiler/mir/tagset.rs": [ |
76 | character: 28, | 76 | TextEdit { |
77 | }, | 77 | range: Range { |
78 | start: Position { | ||
79 | line: 41, | ||
80 | character: 23, | ||
81 | }, | ||
82 | end: Position { | ||
83 | line: 41, | ||
84 | character: 28, | ||
85 | }, | ||
86 | }, | ||
87 | new_text: "self", | ||
88 | }, | ||
89 | ], | ||
90 | }, | ||
91 | ), | ||
92 | document_changes: None, | ||
78 | }, | 93 | }, |
79 | }, | 94 | ), |
80 | replacement: "self", | 95 | command: None, |
81 | applicability: Unspecified, | 96 | is_preferred: None, |
82 | diagnostics: [], | ||
83 | }, | 97 | }, |
84 | ], | 98 | ], |
85 | } | 99 | } |
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_handles_macro_location.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_handles_macro_location.snap index 19510ecc1..12eb32df4 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_handles_macro_location.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_handles_macro_location.snap | |||
@@ -42,5 +42,5 @@ MappedRustDiagnostic { | |||
42 | related_information: None, | 42 | related_information: None, |
43 | tags: None, | 43 | tags: None, |
44 | }, | 44 | }, |
45 | suggested_fixes: [], | 45 | fixes: [], |
46 | } | 46 | } |
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_macro_compiler_error.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_macro_compiler_error.snap index 92f7eec05..7b83a7cd0 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_macro_compiler_error.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_macro_compiler_error.snap | |||
@@ -57,5 +57,5 @@ MappedRustDiagnostic { | |||
57 | ), | 57 | ), |
58 | tags: None, | 58 | tags: None, |
59 | }, | 59 | }, |
60 | suggested_fixes: [], | 60 | fixes: [], |
61 | } | 61 | } |
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_rustc_incompatible_type_for_trait.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_incompatible_type_for_trait.snap index cf683e4b6..54679c5db 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_rustc_incompatible_type_for_trait.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_incompatible_type_for_trait.snap | |||
@@ -42,5 +42,5 @@ MappedRustDiagnostic { | |||
42 | related_information: None, | 42 | related_information: None, |
43 | tags: None, | 43 | tags: None, |
44 | }, | 44 | }, |
45 | suggested_fixes: [], | 45 | fixes: [], |
46 | } | 46 | } |
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_rustc_mismatched_type.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_mismatched_type.snap index 8c1483c74..57df4ceaf 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_rustc_mismatched_type.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_mismatched_type.snap | |||
@@ -42,5 +42,5 @@ MappedRustDiagnostic { | |||
42 | related_information: None, | 42 | related_information: None, |
43 | tags: None, | 43 | tags: None, |
44 | }, | 44 | }, |
45 | suggested_fixes: [], | 45 | fixes: [], |
46 | } | 46 | } |
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_rustc_unused_variable.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_unused_variable.snap index eb5a2247b..3e1fe736c 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_rustc_unused_variable.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_unused_variable.snap | |||
@@ -46,25 +46,39 @@ MappedRustDiagnostic { | |||
46 | ], | 46 | ], |
47 | ), | 47 | ), |
48 | }, | 48 | }, |
49 | suggested_fixes: [ | 49 | fixes: [ |
50 | SuggestedFix { | 50 | CodeAction { |
51 | title: "consider prefixing with an underscore: \'_foo\'", | 51 | title: "consider prefixing with an underscore: \'_foo\'", |
52 | location: Location { | 52 | kind: Some( |
53 | uri: "file:///test/driver/subcommand/repl.rs", | 53 | "quickfix", |
54 | range: Range { | 54 | ), |
55 | start: Position { | 55 | diagnostics: None, |
56 | line: 290, | 56 | edit: Some( |
57 | character: 8, | 57 | WorkspaceEdit { |
58 | }, | 58 | changes: Some( |
59 | end: Position { | 59 | { |
60 | line: 290, | 60 | "file:///test/driver/subcommand/repl.rs": [ |
61 | character: 11, | 61 | TextEdit { |
62 | }, | 62 | range: Range { |
63 | start: Position { | ||
64 | line: 290, | ||
65 | character: 8, | ||
66 | }, | ||
67 | end: Position { | ||
68 | line: 290, | ||
69 | character: 11, | ||
70 | }, | ||
71 | }, | ||
72 | new_text: "_foo", | ||
73 | }, | ||
74 | ], | ||
75 | }, | ||
76 | ), | ||
77 | document_changes: None, | ||
63 | }, | 78 | }, |
64 | }, | 79 | ), |
65 | replacement: "_foo", | 80 | command: None, |
66 | applicability: MachineApplicable, | 81 | is_preferred: None, |
67 | diagnostics: [], | ||
68 | }, | 82 | }, |
69 | ], | 83 | ], |
70 | } | 84 | } |
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_rustc_wrong_number_of_parameters.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_wrong_number_of_parameters.snap index 2f4518931..69301078d 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/test__snap_rustc_wrong_number_of_parameters.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_wrong_number_of_parameters.snap | |||
@@ -61,5 +61,5 @@ MappedRustDiagnostic { | |||
61 | ), | 61 | ), |
62 | tags: None, | 62 | tags: None, |
63 | }, | 63 | }, |
64 | suggested_fixes: [], | 64 | fixes: [], |
65 | } | 65 | } |
diff --git a/crates/ra_cargo_watch/src/lib.rs b/crates/ra_cargo_watch/src/lib.rs index 7f4c9280c..f07c34549 100644 --- a/crates/ra_cargo_watch/src/lib.rs +++ b/crates/ra_cargo_watch/src/lib.rs | |||
@@ -4,22 +4,20 @@ | |||
4 | use cargo_metadata::Message; | 4 | use cargo_metadata::Message; |
5 | use crossbeam_channel::{never, select, unbounded, Receiver, RecvError, Sender}; | 5 | use crossbeam_channel::{never, select, unbounded, Receiver, RecvError, Sender}; |
6 | use lsp_types::{ | 6 | use lsp_types::{ |
7 | Diagnostic, Url, WorkDoneProgress, WorkDoneProgressBegin, WorkDoneProgressEnd, | 7 | CodeAction, CodeActionOrCommand, Diagnostic, Url, WorkDoneProgress, WorkDoneProgressBegin, |
8 | WorkDoneProgressReport, | 8 | WorkDoneProgressEnd, WorkDoneProgressReport, |
9 | }; | 9 | }; |
10 | use parking_lot::RwLock; | ||
11 | use std::{ | 10 | use std::{ |
12 | collections::HashMap, | 11 | io::{BufRead, BufReader}, |
13 | path::PathBuf, | 12 | path::PathBuf, |
14 | process::{Command, Stdio}, | 13 | process::{Command, Stdio}, |
15 | sync::Arc, | ||
16 | thread::JoinHandle, | 14 | thread::JoinHandle, |
17 | time::Instant, | 15 | time::Instant, |
18 | }; | 16 | }; |
19 | 17 | ||
20 | mod conv; | 18 | mod conv; |
21 | 19 | ||
22 | use crate::conv::{map_rust_diagnostic_to_lsp, MappedRustDiagnostic, SuggestedFix}; | 20 | use crate::conv::{map_rust_diagnostic_to_lsp, MappedRustDiagnostic}; |
23 | 21 | ||
24 | pub use crate::conv::url_from_path_with_drive_lowercasing; | 22 | pub use crate::conv::url_from_path_with_drive_lowercasing; |
25 | 23 | ||
@@ -38,7 +36,6 @@ pub struct CheckOptions { | |||
38 | #[derive(Debug)] | 36 | #[derive(Debug)] |
39 | pub struct CheckWatcher { | 37 | pub struct CheckWatcher { |
40 | pub task_recv: Receiver<CheckTask>, | 38 | pub task_recv: Receiver<CheckTask>, |
41 | pub state: Arc<RwLock<CheckState>>, | ||
42 | cmd_send: Option<Sender<CheckCommand>>, | 39 | cmd_send: Option<Sender<CheckCommand>>, |
43 | handle: Option<JoinHandle<()>>, | 40 | handle: Option<JoinHandle<()>>, |
44 | } | 41 | } |
@@ -46,7 +43,6 @@ pub struct CheckWatcher { | |||
46 | impl CheckWatcher { | 43 | impl CheckWatcher { |
47 | pub fn new(options: &CheckOptions, workspace_root: PathBuf) -> CheckWatcher { | 44 | pub fn new(options: &CheckOptions, workspace_root: PathBuf) -> CheckWatcher { |
48 | let options = options.clone(); | 45 | let options = options.clone(); |
49 | let state = Arc::new(RwLock::new(CheckState::new())); | ||
50 | 46 | ||
51 | let (task_send, task_recv) = unbounded::<CheckTask>(); | 47 | let (task_send, task_recv) = unbounded::<CheckTask>(); |
52 | let (cmd_send, cmd_recv) = unbounded::<CheckCommand>(); | 48 | let (cmd_send, cmd_recv) = unbounded::<CheckCommand>(); |
@@ -54,13 +50,12 @@ impl CheckWatcher { | |||
54 | let mut check = CheckWatcherThread::new(options, workspace_root); | 50 | let mut check = CheckWatcherThread::new(options, workspace_root); |
55 | check.run(&task_send, &cmd_recv); | 51 | check.run(&task_send, &cmd_recv); |
56 | }); | 52 | }); |
57 | CheckWatcher { task_recv, cmd_send: Some(cmd_send), handle: Some(handle), state } | 53 | CheckWatcher { task_recv, cmd_send: Some(cmd_send), handle: Some(handle) } |
58 | } | 54 | } |
59 | 55 | ||
60 | /// Returns a CheckWatcher that doesn't actually do anything | 56 | /// Returns a CheckWatcher that doesn't actually do anything |
61 | pub fn dummy() -> CheckWatcher { | 57 | pub fn dummy() -> CheckWatcher { |
62 | let state = Arc::new(RwLock::new(CheckState::new())); | 58 | CheckWatcher { task_recv: never(), cmd_send: None, handle: None } |
63 | CheckWatcher { task_recv: never(), cmd_send: None, handle: None, state } | ||
64 | } | 59 | } |
65 | 60 | ||
66 | /// Schedule a re-start of the cargo check worker. | 61 | /// Schedule a re-start of the cargo check worker. |
@@ -88,83 +83,12 @@ impl std::ops::Drop for CheckWatcher { | |||
88 | } | 83 | } |
89 | 84 | ||
90 | #[derive(Debug)] | 85 | #[derive(Debug)] |
91 | pub struct CheckState { | ||
92 | diagnostic_collection: HashMap<Url, Vec<Diagnostic>>, | ||
93 | suggested_fix_collection: HashMap<Url, Vec<SuggestedFix>>, | ||
94 | } | ||
95 | |||
96 | impl CheckState { | ||
97 | fn new() -> CheckState { | ||
98 | CheckState { | ||
99 | diagnostic_collection: HashMap::new(), | ||
100 | suggested_fix_collection: HashMap::new(), | ||
101 | } | ||
102 | } | ||
103 | |||
104 | /// Clear the cached diagnostics, and schedule updating diagnostics by the | ||
105 | /// server, to clear stale results. | ||
106 | pub fn clear(&mut self) -> Vec<Url> { | ||
107 | let cleared_files: Vec<Url> = self.diagnostic_collection.keys().cloned().collect(); | ||
108 | self.diagnostic_collection.clear(); | ||
109 | self.suggested_fix_collection.clear(); | ||
110 | cleared_files | ||
111 | } | ||
112 | |||
113 | pub fn diagnostics_for(&self, uri: &Url) -> Option<&[Diagnostic]> { | ||
114 | self.diagnostic_collection.get(uri).map(|d| d.as_slice()) | ||
115 | } | ||
116 | |||
117 | pub fn fixes_for(&self, uri: &Url) -> Option<&[SuggestedFix]> { | ||
118 | self.suggested_fix_collection.get(uri).map(|d| d.as_slice()) | ||
119 | } | ||
120 | |||
121 | pub fn add_diagnostic_with_fixes(&mut self, file_uri: Url, diagnostic: DiagnosticWithFixes) { | ||
122 | for fix in diagnostic.suggested_fixes { | ||
123 | self.add_suggested_fix_for_diagnostic(fix, &diagnostic.diagnostic); | ||
124 | } | ||
125 | self.add_diagnostic(file_uri, diagnostic.diagnostic); | ||
126 | } | ||
127 | |||
128 | fn add_diagnostic(&mut self, file_uri: Url, diagnostic: Diagnostic) { | ||
129 | let diagnostics = self.diagnostic_collection.entry(file_uri).or_default(); | ||
130 | |||
131 | // If we're building multiple targets it's possible we've already seen this diagnostic | ||
132 | let is_duplicate = diagnostics.iter().any(|d| are_diagnostics_equal(d, &diagnostic)); | ||
133 | if is_duplicate { | ||
134 | return; | ||
135 | } | ||
136 | |||
137 | diagnostics.push(diagnostic); | ||
138 | } | ||
139 | |||
140 | fn add_suggested_fix_for_diagnostic( | ||
141 | &mut self, | ||
142 | mut suggested_fix: SuggestedFix, | ||
143 | diagnostic: &Diagnostic, | ||
144 | ) { | ||
145 | let file_uri = suggested_fix.location.uri.clone(); | ||
146 | let file_suggestions = self.suggested_fix_collection.entry(file_uri).or_default(); | ||
147 | |||
148 | let existing_suggestion: Option<&mut SuggestedFix> = | ||
149 | file_suggestions.iter_mut().find(|s| s == &&suggested_fix); | ||
150 | if let Some(existing_suggestion) = existing_suggestion { | ||
151 | // The existing suggestion also applies to this new diagnostic | ||
152 | existing_suggestion.diagnostics.push(diagnostic.clone()); | ||
153 | } else { | ||
154 | // We haven't seen this suggestion before | ||
155 | suggested_fix.diagnostics.push(diagnostic.clone()); | ||
156 | file_suggestions.push(suggested_fix); | ||
157 | } | ||
158 | } | ||
159 | } | ||
160 | |||
161 | #[derive(Debug)] | ||
162 | pub enum CheckTask { | 86 | pub enum CheckTask { |
163 | /// Request a clearing of all cached diagnostics from the check watcher | 87 | /// Request a clearing of all cached diagnostics from the check watcher |
164 | ClearDiagnostics, | 88 | ClearDiagnostics, |
165 | 89 | ||
166 | /// Request adding a diagnostic with fixes included to a file | 90 | /// Request adding a diagnostic with fixes included to a file |
167 | AddDiagnostic(Url, DiagnosticWithFixes), | 91 | AddDiagnostic { url: Url, diagnostic: Diagnostic, fixes: Vec<CodeActionOrCommand> }, |
168 | 92 | ||
169 | /// Request check progress notification to client | 93 | /// Request check progress notification to client |
170 | Status(WorkDoneProgress), | 94 | Status(WorkDoneProgress), |
@@ -216,8 +140,10 @@ impl CheckWatcherThread { | |||
216 | self.last_update_req.take(); | 140 | self.last_update_req.take(); |
217 | task_send.send(CheckTask::ClearDiagnostics).unwrap(); | 141 | task_send.send(CheckTask::ClearDiagnostics).unwrap(); |
218 | 142 | ||
219 | // By replacing the watcher, we drop the previous one which | 143 | // Replace with a dummy watcher first so we drop the original and wait for completion |
220 | // causes it to shut down automatically. | 144 | std::mem::replace(&mut self.watcher, WatchThread::dummy()); |
145 | |||
146 | // Then create the actual new watcher | ||
221 | self.watcher = WatchThread::new(&self.options, &self.workspace_root); | 147 | self.watcher = WatchThread::new(&self.options, &self.workspace_root); |
222 | } | 148 | } |
223 | } | 149 | } |
@@ -277,10 +203,17 @@ impl CheckWatcherThread { | |||
277 | None => return, | 203 | None => return, |
278 | }; | 204 | }; |
279 | 205 | ||
280 | let MappedRustDiagnostic { location, diagnostic, suggested_fixes } = map_result; | 206 | let MappedRustDiagnostic { location, diagnostic, fixes } = map_result; |
207 | let fixes = fixes | ||
208 | .into_iter() | ||
209 | .map(|fix| { | ||
210 | CodeAction { diagnostics: Some(vec![diagnostic.clone()]), ..fix }.into() | ||
211 | }) | ||
212 | .collect(); | ||
281 | 213 | ||
282 | let diagnostic = DiagnosticWithFixes { diagnostic, suggested_fixes }; | 214 | task_send |
283 | task_send.send(CheckTask::AddDiagnostic(location.uri, diagnostic)).unwrap(); | 215 | .send(CheckTask::AddDiagnostic { url: location.uri, diagnostic, fixes }) |
216 | .unwrap(); | ||
284 | } | 217 | } |
285 | 218 | ||
286 | CheckEvent::Msg(Message::BuildScriptExecuted(_msg)) => {} | 219 | CheckEvent::Msg(Message::BuildScriptExecuted(_msg)) => {} |
@@ -292,7 +225,7 @@ impl CheckWatcherThread { | |||
292 | #[derive(Debug)] | 225 | #[derive(Debug)] |
293 | pub struct DiagnosticWithFixes { | 226 | pub struct DiagnosticWithFixes { |
294 | diagnostic: Diagnostic, | 227 | diagnostic: Diagnostic, |
295 | suggested_fixes: Vec<SuggestedFix>, | 228 | fixes: Vec<CodeAction>, |
296 | } | 229 | } |
297 | 230 | ||
298 | /// WatchThread exists to wrap around the communication needed to be able to | 231 | /// WatchThread exists to wrap around the communication needed to be able to |
@@ -341,6 +274,7 @@ impl WatchThread { | |||
341 | .args(&args) | 274 | .args(&args) |
342 | .stdout(Stdio::piped()) | 275 | .stdout(Stdio::piped()) |
343 | .stderr(Stdio::null()) | 276 | .stderr(Stdio::null()) |
277 | .stdin(Stdio::null()) | ||
344 | .spawn() | 278 | .spawn() |
345 | .expect("couldn't launch cargo"); | 279 | .expect("couldn't launch cargo"); |
346 | 280 | ||
@@ -348,15 +282,45 @@ impl WatchThread { | |||
348 | // which will break out of the loop, and continue the shutdown | 282 | // which will break out of the loop, and continue the shutdown |
349 | let _ = message_send.send(CheckEvent::Begin); | 283 | let _ = message_send.send(CheckEvent::Begin); |
350 | 284 | ||
351 | for message in cargo_metadata::parse_messages(command.stdout.take().unwrap()) { | 285 | // We manually read a line at a time, instead of using serde's |
286 | // stream deserializers, because the deserializer cannot recover | ||
287 | // from an error, resulting in it getting stuck, because we try to | ||
288 | // be resillient against failures. | ||
289 | // | ||
290 | // Because cargo only outputs one JSON object per line, we can | ||
291 | // simply skip a line if it doesn't parse, which just ignores any | ||
292 | // erroneus output. | ||
293 | let stdout = BufReader::new(command.stdout.take().unwrap()); | ||
294 | for line in stdout.lines() { | ||
295 | let line = match line { | ||
296 | Ok(line) => line, | ||
297 | Err(err) => { | ||
298 | log::error!("Couldn't read line from cargo: {}", err); | ||
299 | continue; | ||
300 | } | ||
301 | }; | ||
302 | |||
303 | let message = serde_json::from_str::<cargo_metadata::Message>(&line); | ||
352 | let message = match message { | 304 | let message = match message { |
353 | Ok(message) => message, | 305 | Ok(message) => message, |
354 | Err(err) => { | 306 | Err(err) => { |
355 | log::error!("Invalid json from cargo check, ignoring: {}", err); | 307 | log::error!( |
308 | "Invalid json from cargo check, ignoring ({}): {:?} ", | ||
309 | err, | ||
310 | line | ||
311 | ); | ||
356 | continue; | 312 | continue; |
357 | } | 313 | } |
358 | }; | 314 | }; |
359 | 315 | ||
316 | // Skip certain kinds of messages to only spend time on what's useful | ||
317 | match &message { | ||
318 | Message::CompilerArtifact(artifact) if artifact.fresh => continue, | ||
319 | Message::BuildScriptExecuted(_) => continue, | ||
320 | Message::Unknown => continue, | ||
321 | _ => {} | ||
322 | } | ||
323 | |||
360 | match message_send.send(CheckEvent::Msg(message)) { | 324 | match message_send.send(CheckEvent::Msg(message)) { |
361 | Ok(()) => {} | 325 | Ok(()) => {} |
362 | Err(_err) => { | 326 | Err(_err) => { |
@@ -396,10 +360,3 @@ impl std::ops::Drop for WatchThread { | |||
396 | } | 360 | } |
397 | } | 361 | } |
398 | } | 362 | } |
399 | |||
400 | fn are_diagnostics_equal(left: &Diagnostic, right: &Diagnostic) -> bool { | ||
401 | left.source == right.source | ||
402 | && left.severity == right.severity | ||
403 | && left.range == right.range | ||
404 | && left.message == right.message | ||
405 | } | ||
diff --git a/crates/ra_cli/Cargo.toml b/crates/ra_cli/Cargo.toml index 12af075f7..bcd408421 100644 --- a/crates/ra_cli/Cargo.toml +++ b/crates/ra_cli/Cargo.toml | |||
@@ -7,7 +7,7 @@ publish = false | |||
7 | 7 | ||
8 | [dependencies] | 8 | [dependencies] |
9 | pico-args = "0.3.0" | 9 | pico-args = "0.3.0" |
10 | env_logger = { version = "0.7.1", default-features = false, features = ["humantime"] } | 10 | env_logger = { version = "0.7.1", default-features = false } |
11 | 11 | ||
12 | ra_syntax = { path = "../ra_syntax" } | 12 | ra_syntax = { path = "../ra_syntax" } |
13 | ra_ide = { path = "../ra_ide" } | 13 | ra_ide = { path = "../ra_ide" } |
diff --git a/crates/ra_cli/src/analysis_stats.rs b/crates/ra_cli/src/analysis_stats.rs index fd0027691..833235bff 100644 --- a/crates/ra_cli/src/analysis_stats.rs +++ b/crates/ra_cli/src/analysis_stats.rs | |||
@@ -171,12 +171,12 @@ pub fn run( | |||
171 | println!( | 171 | println!( |
172 | "Expressions of unknown type: {} ({}%)", | 172 | "Expressions of unknown type: {} ({}%)", |
173 | num_exprs_unknown, | 173 | num_exprs_unknown, |
174 | if num_exprs > 0 { (num_exprs_unknown * 100 / num_exprs) } else { 100 } | 174 | if num_exprs > 0 { num_exprs_unknown * 100 / num_exprs } else { 100 } |
175 | ); | 175 | ); |
176 | println!( | 176 | println!( |
177 | "Expressions of partially unknown type: {} ({}%)", | 177 | "Expressions of partially unknown type: {} ({}%)", |
178 | num_exprs_partially_unknown, | 178 | num_exprs_partially_unknown, |
179 | if num_exprs > 0 { (num_exprs_partially_unknown * 100 / num_exprs) } else { 100 } | 179 | if num_exprs > 0 { num_exprs_partially_unknown * 100 / num_exprs } else { 100 } |
180 | ); | 180 | ); |
181 | println!("Type mismatches: {}", num_type_mismatches); | 181 | println!("Type mismatches: {}", num_type_mismatches); |
182 | println!("Inference: {:?}, {}", inference_time.elapsed(), ra_prof::memory_usage()); | 182 | println!("Inference: {:?}, {}", inference_time.elapsed(), ra_prof::memory_usage()); |
diff --git a/crates/ra_db/src/fixture.rs b/crates/ra_db/src/fixture.rs index 30b598e9a..17cd138c2 100644 --- a/crates/ra_db/src/fixture.rs +++ b/crates/ra_db/src/fixture.rs | |||
@@ -8,8 +8,8 @@ use rustc_hash::FxHashMap; | |||
8 | use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER}; | 8 | use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER}; |
9 | 9 | ||
10 | use crate::{ | 10 | use crate::{ |
11 | CrateGraph, CrateId, Edition, Env, FileId, FilePosition, RelativePathBuf, SourceDatabaseExt, | 11 | input::CrateName, CrateGraph, CrateId, Edition, Env, FileId, FilePosition, RelativePathBuf, |
12 | SourceRoot, SourceRootId, | 12 | SourceDatabaseExt, SourceRoot, SourceRootId, |
13 | }; | 13 | }; |
14 | 14 | ||
15 | pub const WORKSPACE: SourceRootId = SourceRootId(0); | 15 | pub const WORKSPACE: SourceRootId = SourceRootId(0); |
@@ -139,7 +139,7 @@ fn with_files(db: &mut dyn SourceDatabaseExt, fixture: &str) -> Option<FilePosit | |||
139 | for (from, to) in crate_deps { | 139 | for (from, to) in crate_deps { |
140 | let from_id = crates[&from]; | 140 | let from_id = crates[&from]; |
141 | let to_id = crates[&to]; | 141 | let to_id = crates[&to]; |
142 | crate_graph.add_dep(from_id, to.into(), to_id).unwrap(); | 142 | crate_graph.add_dep(from_id, CrateName::new(&to).unwrap(), to_id).unwrap(); |
143 | } | 143 | } |
144 | } | 144 | } |
145 | 145 | ||
diff --git a/crates/ra_db/src/input.rs b/crates/ra_db/src/input.rs index 07269237a..1f1dcea42 100644 --- a/crates/ra_db/src/input.rs +++ b/crates/ra_db/src/input.rs | |||
@@ -83,6 +83,26 @@ pub struct CrateGraph { | |||
83 | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] | 83 | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] |
84 | pub struct CrateId(pub u32); | 84 | pub struct CrateId(pub u32); |
85 | 85 | ||
86 | pub struct CrateName(SmolStr); | ||
87 | |||
88 | impl CrateName { | ||
89 | /// Crates a crate name, checking for dashes in the string provided. | ||
90 | /// Dashes are not allowed in the crate names, | ||
91 | /// hence the input string is returned as `Err` for those cases. | ||
92 | pub fn new(name: &str) -> Result<CrateName, &str> { | ||
93 | if name.contains('-') { | ||
94 | Err(name) | ||
95 | } else { | ||
96 | Ok(Self(SmolStr::new(name))) | ||
97 | } | ||
98 | } | ||
99 | |||
100 | /// Crates a crate name, unconditionally replacing the dashes with underscores. | ||
101 | pub fn normalize_dashes(name: &str) -> CrateName { | ||
102 | Self(SmolStr::new(name.replace('-', "_"))) | ||
103 | } | ||
104 | } | ||
105 | |||
86 | #[derive(Debug, Clone, PartialEq, Eq)] | 106 | #[derive(Debug, Clone, PartialEq, Eq)] |
87 | struct CrateData { | 107 | struct CrateData { |
88 | file_id: FileId, | 108 | file_id: FileId, |
@@ -131,13 +151,13 @@ impl CrateGraph { | |||
131 | pub fn add_dep( | 151 | pub fn add_dep( |
132 | &mut self, | 152 | &mut self, |
133 | from: CrateId, | 153 | from: CrateId, |
134 | name: SmolStr, | 154 | name: CrateName, |
135 | to: CrateId, | 155 | to: CrateId, |
136 | ) -> Result<(), CyclicDependenciesError> { | 156 | ) -> Result<(), CyclicDependenciesError> { |
137 | if self.dfs_find(from, to, &mut FxHashSet::default()) { | 157 | if self.dfs_find(from, to, &mut FxHashSet::default()) { |
138 | return Err(CyclicDependenciesError); | 158 | return Err(CyclicDependenciesError); |
139 | } | 159 | } |
140 | self.arena.get_mut(&from).unwrap().add_dep(name, to); | 160 | self.arena.get_mut(&from).unwrap().add_dep(name.0, to); |
141 | Ok(()) | 161 | Ok(()) |
142 | } | 162 | } |
143 | 163 | ||
@@ -268,7 +288,7 @@ pub struct CyclicDependenciesError; | |||
268 | 288 | ||
269 | #[cfg(test)] | 289 | #[cfg(test)] |
270 | mod tests { | 290 | mod tests { |
271 | use super::{CfgOptions, CrateGraph, Edition::Edition2018, Env, FileId, SmolStr}; | 291 | use super::{CfgOptions, CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId}; |
272 | 292 | ||
273 | #[test] | 293 | #[test] |
274 | fn it_should_panic_because_of_cycle_dependencies() { | 294 | fn it_should_panic_because_of_cycle_dependencies() { |
@@ -279,9 +299,9 @@ mod tests { | |||
279 | graph.add_crate_root(FileId(2u32), Edition2018, CfgOptions::default(), Env::default()); | 299 | graph.add_crate_root(FileId(2u32), Edition2018, CfgOptions::default(), Env::default()); |
280 | let crate3 = | 300 | let crate3 = |
281 | graph.add_crate_root(FileId(3u32), Edition2018, CfgOptions::default(), Env::default()); | 301 | graph.add_crate_root(FileId(3u32), Edition2018, CfgOptions::default(), Env::default()); |
282 | assert!(graph.add_dep(crate1, SmolStr::new("crate2"), crate2).is_ok()); | 302 | assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); |
283 | assert!(graph.add_dep(crate2, SmolStr::new("crate3"), crate3).is_ok()); | 303 | assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok()); |
284 | assert!(graph.add_dep(crate3, SmolStr::new("crate1"), crate1).is_err()); | 304 | assert!(graph.add_dep(crate3, CrateName::new("crate1").unwrap(), crate1).is_err()); |
285 | } | 305 | } |
286 | 306 | ||
287 | #[test] | 307 | #[test] |
@@ -293,7 +313,23 @@ mod tests { | |||
293 | graph.add_crate_root(FileId(2u32), Edition2018, CfgOptions::default(), Env::default()); | 313 | graph.add_crate_root(FileId(2u32), Edition2018, CfgOptions::default(), Env::default()); |
294 | let crate3 = | 314 | let crate3 = |
295 | graph.add_crate_root(FileId(3u32), Edition2018, CfgOptions::default(), Env::default()); | 315 | graph.add_crate_root(FileId(3u32), Edition2018, CfgOptions::default(), Env::default()); |
296 | assert!(graph.add_dep(crate1, SmolStr::new("crate2"), crate2).is_ok()); | 316 | assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); |
297 | assert!(graph.add_dep(crate2, SmolStr::new("crate3"), crate3).is_ok()); | 317 | assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok()); |
318 | } | ||
319 | |||
320 | #[test] | ||
321 | fn dashes_are_normalized() { | ||
322 | let mut graph = CrateGraph::default(); | ||
323 | let crate1 = | ||
324 | graph.add_crate_root(FileId(1u32), Edition2018, CfgOptions::default(), Env::default()); | ||
325 | let crate2 = | ||
326 | graph.add_crate_root(FileId(2u32), Edition2018, CfgOptions::default(), Env::default()); | ||
327 | assert!(graph | ||
328 | .add_dep(crate1, CrateName::normalize_dashes("crate-name-with-dashes"), crate2) | ||
329 | .is_ok()); | ||
330 | assert_eq!( | ||
331 | graph.dependencies(crate1).collect::<Vec<_>>(), | ||
332 | vec![&Dependency { crate_id: crate2, name: "crate_name_with_dashes".into() }] | ||
333 | ); | ||
298 | } | 334 | } |
299 | } | 335 | } |
diff --git a/crates/ra_db/src/lib.rs b/crates/ra_db/src/lib.rs index 21341b769..fb002d717 100644 --- a/crates/ra_db/src/lib.rs +++ b/crates/ra_db/src/lib.rs | |||
@@ -10,7 +10,9 @@ use ra_syntax::{ast, Parse, SourceFile, TextRange, TextUnit}; | |||
10 | 10 | ||
11 | pub use crate::{ | 11 | pub use crate::{ |
12 | cancellation::Canceled, | 12 | cancellation::Canceled, |
13 | input::{CrateGraph, CrateId, Dependency, Edition, Env, FileId, SourceRoot, SourceRootId}, | 13 | input::{ |
14 | CrateGraph, CrateId, CrateName, Dependency, Edition, Env, FileId, SourceRoot, SourceRootId, | ||
15 | }, | ||
14 | }; | 16 | }; |
15 | pub use relative_path::{RelativePath, RelativePathBuf}; | 17 | pub use relative_path::{RelativePath, RelativePathBuf}; |
16 | pub use salsa; | 18 | pub use salsa; |
diff --git a/crates/ra_fmt/src/lib.rs b/crates/ra_fmt/src/lib.rs index 10f592257..4bca27b5c 100644 --- a/crates/ra_fmt/src/lib.rs +++ b/crates/ra_fmt/src/lib.rs | |||
@@ -35,8 +35,14 @@ fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> { | |||
35 | successors(token.prev_token(), |token| token.prev_token()) | 35 | successors(token.prev_token(), |token| token.prev_token()) |
36 | } | 36 | } |
37 | 37 | ||
38 | pub fn extract_trivial_expression(expr: &ast::BlockExpr) -> Option<ast::Expr> { | 38 | pub fn unwrap_trivial_block(block: ast::BlockExpr) -> ast::Expr { |
39 | let block = expr.block()?; | 39 | extract_trivial_expression(&block) |
40 | .filter(|expr| !expr.syntax().text().contains_char('\n')) | ||
41 | .unwrap_or_else(|| block.into()) | ||
42 | } | ||
43 | |||
44 | pub fn extract_trivial_expression(block: &ast::BlockExpr) -> Option<ast::Expr> { | ||
45 | let block = block.block()?; | ||
40 | let expr = block.expr()?; | 46 | let expr = block.expr()?; |
41 | let non_trivial_children = block.syntax().children().filter(|it| match it.kind() { | 47 | let non_trivial_children = block.syntax().children().filter(|it| match it.kind() { |
42 | WHITESPACE | T!['{'] | T!['}'] => false, | 48 | WHITESPACE | T!['{'] | T!['}'] => false, |
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index 500b34c17..4d9641728 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs | |||
@@ -10,9 +10,9 @@ use hir_def::{ | |||
10 | per_ns::PerNs, | 10 | per_ns::PerNs, |
11 | resolver::HasResolver, | 11 | resolver::HasResolver, |
12 | type_ref::{Mutability, TypeRef}, | 12 | type_ref::{Mutability, TypeRef}, |
13 | AdtId, ConstId, DefWithBodyId, EnumId, FunctionId, HasModule, ImplId, LocalEnumVariantId, | 13 | AdtId, ConstId, DefWithBodyId, EnumId, FunctionId, GenericDefId, HasModule, ImplId, |
14 | LocalModuleId, LocalStructFieldId, Lookup, ModuleId, StaticId, StructId, TraitId, TypeAliasId, | 14 | LocalEnumVariantId, LocalModuleId, LocalStructFieldId, Lookup, ModuleId, StaticId, StructId, |
15 | TypeParamId, UnionId, | 15 | TraitId, TypeAliasId, TypeParamId, UnionId, |
16 | }; | 16 | }; |
17 | use hir_expand::{ | 17 | use hir_expand::{ |
18 | diagnostics::DiagnosticSink, | 18 | diagnostics::DiagnosticSink, |
@@ -21,7 +21,7 @@ use hir_expand::{ | |||
21 | }; | 21 | }; |
22 | use hir_ty::{ | 22 | use hir_ty::{ |
23 | autoderef, display::HirFormatter, expr::ExprValidator, method_resolution, ApplicationTy, | 23 | autoderef, display::HirFormatter, expr::ExprValidator, method_resolution, ApplicationTy, |
24 | Canonical, InEnvironment, TraitEnvironment, Ty, TyDefId, TypeCtor, TypeWalk, | 24 | Canonical, InEnvironment, Substs, TraitEnvironment, Ty, TyDefId, TypeCtor, |
25 | }; | 25 | }; |
26 | use ra_db::{CrateId, Edition, FileId}; | 26 | use ra_db::{CrateId, Edition, FileId}; |
27 | use ra_prof::profile; | 27 | use ra_prof::profile; |
@@ -119,7 +119,7 @@ impl_froms!( | |||
119 | BuiltinType | 119 | BuiltinType |
120 | ); | 120 | ); |
121 | 121 | ||
122 | pub use hir_def::{attr::Attrs, visibility::Visibility, AssocItemId}; | 122 | pub use hir_def::{attr::Attrs, item_scope::ItemInNs, visibility::Visibility, AssocItemId}; |
123 | use rustc_hash::FxHashSet; | 123 | use rustc_hash::FxHashSet; |
124 | 124 | ||
125 | impl Module { | 125 | impl Module { |
@@ -238,11 +238,16 @@ impl Module { | |||
238 | item: ModuleDef, | 238 | item: ModuleDef, |
239 | ) -> Option<hir_def::path::ModPath> { | 239 | ) -> Option<hir_def::path::ModPath> { |
240 | // FIXME expose namespace choice | 240 | // FIXME expose namespace choice |
241 | hir_def::find_path::find_path( | 241 | hir_def::find_path::find_path(db, determine_item_namespace(item), self.into()) |
242 | db, | 242 | } |
243 | hir_def::item_scope::ItemInNs::Types(item.into()), | 243 | } |
244 | self.into(), | 244 | |
245 | ) | 245 | fn determine_item_namespace(module_def: ModuleDef) -> ItemInNs { |
246 | match module_def { | ||
247 | ModuleDef::Static(_) | ModuleDef::Const(_) | ModuleDef::Function(_) => { | ||
248 | ItemInNs::Values(module_def.into()) | ||
249 | } | ||
250 | _ => ItemInNs::Types(module_def.into()), | ||
246 | } | 251 | } |
247 | } | 252 | } |
248 | 253 | ||
@@ -265,7 +270,13 @@ impl StructField { | |||
265 | 270 | ||
266 | pub fn ty(&self, db: &impl HirDatabase) -> Type { | 271 | pub fn ty(&self, db: &impl HirDatabase) -> Type { |
267 | let var_id = self.parent.into(); | 272 | let var_id = self.parent.into(); |
268 | let ty = db.field_types(var_id)[self.id].clone(); | 273 | let generic_def_id: GenericDefId = match self.parent { |
274 | VariantDef::Struct(it) => it.id.into(), | ||
275 | VariantDef::Union(it) => it.id.into(), | ||
276 | VariantDef::EnumVariant(it) => it.parent.id.into(), | ||
277 | }; | ||
278 | let substs = Substs::type_params(db, generic_def_id); | ||
279 | let ty = db.field_types(var_id)[self.id].clone().subst(&substs); | ||
269 | Type::new(db, self.parent.module(db).id.krate.into(), var_id, ty) | 280 | Type::new(db, self.parent.module(db).id.krate.into(), var_id, ty) |
270 | } | 281 | } |
271 | 282 | ||
@@ -750,7 +761,7 @@ pub struct TypeParam { | |||
750 | impl TypeParam { | 761 | impl TypeParam { |
751 | pub fn name(self, db: &impl HirDatabase) -> Name { | 762 | pub fn name(self, db: &impl HirDatabase) -> Name { |
752 | let params = db.generic_params(self.id.parent); | 763 | let params = db.generic_params(self.id.parent); |
753 | params.types[self.id.local_id].name.clone() | 764 | params.types[self.id.local_id].name.clone().unwrap_or_else(Name::missing) |
754 | } | 765 | } |
755 | 766 | ||
756 | pub fn module(self, db: &impl HirDatabase) -> Module { | 767 | pub fn module(self, db: &impl HirDatabase) -> Module { |
@@ -784,8 +795,9 @@ impl ImplBlock { | |||
784 | pub fn target_ty(&self, db: &impl HirDatabase) -> Type { | 795 | pub fn target_ty(&self, db: &impl HirDatabase) -> Type { |
785 | let impl_data = db.impl_data(self.id); | 796 | let impl_data = db.impl_data(self.id); |
786 | let resolver = self.id.resolver(db); | 797 | let resolver = self.id.resolver(db); |
798 | let ctx = hir_ty::TyLoweringContext::new(db, &resolver); | ||
787 | let environment = TraitEnvironment::lower(db, &resolver); | 799 | let environment = TraitEnvironment::lower(db, &resolver); |
788 | let ty = Ty::from_hir(db, &resolver, &impl_data.target_type); | 800 | let ty = Ty::from_hir(&ctx, &impl_data.target_type); |
789 | Type { | 801 | Type { |
790 | krate: self.id.lookup(db).container.module(db).krate, | 802 | krate: self.id.lookup(db).container.module(db).krate, |
791 | ty: InEnvironment { value: ty, environment }, | 803 | ty: InEnvironment { value: ty, environment }, |
@@ -846,9 +858,10 @@ impl Type { | |||
846 | fn from_def( | 858 | fn from_def( |
847 | db: &impl HirDatabase, | 859 | db: &impl HirDatabase, |
848 | krate: CrateId, | 860 | krate: CrateId, |
849 | def: impl HasResolver + Into<TyDefId>, | 861 | def: impl HasResolver + Into<TyDefId> + Into<GenericDefId>, |
850 | ) -> Type { | 862 | ) -> Type { |
851 | let ty = db.ty(def.into()); | 863 | let substs = Substs::type_params(db, def); |
864 | let ty = db.ty(def.into()).subst(&substs); | ||
852 | Type::new(db, krate, def, ty) | 865 | Type::new(db, krate, def, ty) |
853 | } | 866 | } |
854 | 867 | ||
@@ -945,7 +958,7 @@ impl Type { | |||
945 | match a_ty.ctor { | 958 | match a_ty.ctor { |
946 | TypeCtor::Tuple { .. } => { | 959 | TypeCtor::Tuple { .. } => { |
947 | for ty in a_ty.parameters.iter() { | 960 | for ty in a_ty.parameters.iter() { |
948 | let ty = ty.clone().subst(&a_ty.parameters); | 961 | let ty = ty.clone(); |
949 | res.push(self.derived(ty)); | 962 | res.push(self.derived(ty)); |
950 | } | 963 | } |
951 | } | 964 | } |
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index e6079b88d..a77bf6de6 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs | |||
@@ -1,20 +1,24 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | pub use hir_def::db::{ | 3 | pub use hir_def::db::{ |
4 | BodyQuery, BodyWithSourceMapQuery, ComputeCrateDefMapQuery, ConstDataQuery, | 4 | AttrsQuery, BodyQuery, BodyWithSourceMapQuery, ComputeCrateDefMapQuery, ConstDataQuery, |
5 | CrateLangItemsQuery, DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery, | 5 | CrateLangItemsQuery, DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery, |
6 | ExprScopesQuery, FunctionDataQuery, GenericParamsQuery, ImplDataQuery, InternDatabase, | 6 | ExprScopesQuery, FunctionDataQuery, GenericParamsQuery, ImplDataQuery, InternConstQuery, |
7 | InternDatabaseStorage, LangItemQuery, ModuleLangItemsQuery, RawItemsQuery, StaticDataQuery, | 7 | InternDatabase, InternDatabaseStorage, InternEnumQuery, InternFunctionQuery, InternImplQuery, |
8 | StructDataQuery, TraitDataQuery, TypeAliasDataQuery, | 8 | InternStaticQuery, InternStructQuery, InternTraitQuery, InternTypeAliasQuery, InternUnionQuery, |
9 | LangItemQuery, ModuleLangItemsQuery, RawItemsQuery, StaticDataQuery, StructDataQuery, | ||
10 | TraitDataQuery, TypeAliasDataQuery, UnionDataQuery, | ||
9 | }; | 11 | }; |
10 | pub use hir_expand::db::{ | 12 | pub use hir_expand::db::{ |
11 | AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery, | 13 | AstDatabase, AstDatabaseStorage, AstIdMapQuery, InternMacroQuery, MacroArgQuery, MacroDefQuery, |
12 | ParseMacroQuery, | 14 | MacroExpandQuery, ParseMacroQuery, |
13 | }; | 15 | }; |
14 | pub use hir_ty::db::{ | 16 | pub use hir_ty::db::{ |
15 | AssociatedTyDataQuery, CallableItemSignatureQuery, DoInferQuery, FieldTypesQuery, | 17 | AssociatedTyDataQuery, AssociatedTyValueQuery, CallableItemSignatureQuery, DoInferQuery, |
16 | GenericDefaultsQuery, GenericPredicatesQuery, HirDatabase, HirDatabaseStorage, ImplDatumQuery, | 18 | FieldTypesQuery, GenericDefaultsQuery, GenericPredicatesForParamQuery, GenericPredicatesQuery, |
17 | ImplsForTraitQuery, ImplsInCrateQuery, StructDatumQuery, TraitDatumQuery, TraitSolveQuery, | 19 | HirDatabase, HirDatabaseStorage, ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, |
20 | ImplsForTraitQuery, ImplsInCrateQuery, InternAssocTyValueQuery, InternChalkImplQuery, | ||
21 | InternTypeCtorQuery, StructDatumQuery, TraitDatumQuery, TraitSolveQuery, TraitSolverQuery, | ||
18 | TyQuery, ValueTyQuery, | 22 | TyQuery, ValueTyQuery, |
19 | }; | 23 | }; |
20 | 24 | ||
diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs index 4f8fc9602..bb9a35c5d 100644 --- a/crates/ra_hir/src/source_analyzer.rs +++ b/crates/ra_hir/src/source_analyzer.rs | |||
@@ -178,6 +178,10 @@ impl SourceAnalyzer { | |||
178 | } | 178 | } |
179 | } | 179 | } |
180 | 180 | ||
181 | fn trait_env(&self, db: &impl HirDatabase) -> Arc<TraitEnvironment> { | ||
182 | TraitEnvironment::lower(db, &self.resolver) | ||
183 | } | ||
184 | |||
181 | pub fn type_of(&self, db: &impl HirDatabase, expr: &ast::Expr) -> Option<Type> { | 185 | pub fn type_of(&self, db: &impl HirDatabase, expr: &ast::Expr) -> Option<Type> { |
182 | let expr_id = if let Some(expr) = self.expand_expr(db, InFile::new(self.file_id, expr)) { | 186 | let expr_id = if let Some(expr) = self.expand_expr(db, InFile::new(self.file_id, expr)) { |
183 | self.body_source_map.as_ref()?.node_expr(expr.as_ref())? | 187 | self.body_source_map.as_ref()?.node_expr(expr.as_ref())? |
@@ -186,14 +190,14 @@ impl SourceAnalyzer { | |||
186 | }; | 190 | }; |
187 | 191 | ||
188 | let ty = self.infer.as_ref()?[expr_id].clone(); | 192 | let ty = self.infer.as_ref()?[expr_id].clone(); |
189 | let environment = TraitEnvironment::lower(db, &self.resolver); | 193 | let environment = self.trait_env(db); |
190 | Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } }) | 194 | Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } }) |
191 | } | 195 | } |
192 | 196 | ||
193 | pub fn type_of_pat(&self, db: &impl HirDatabase, pat: &ast::Pat) -> Option<Type> { | 197 | pub fn type_of_pat(&self, db: &impl HirDatabase, pat: &ast::Pat) -> Option<Type> { |
194 | let pat_id = self.pat_id(pat)?; | 198 | let pat_id = self.pat_id(pat)?; |
195 | let ty = self.infer.as_ref()?[pat_id].clone(); | 199 | let ty = self.infer.as_ref()?[pat_id].clone(); |
196 | let environment = TraitEnvironment::lower(db, &self.resolver); | 200 | let environment = self.trait_env(db); |
197 | Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } }) | 201 | Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } }) |
198 | } | 202 | } |
199 | 203 | ||
diff --git a/crates/ra_hir_def/Cargo.toml b/crates/ra_hir_def/Cargo.toml index 2c368f690..1efa00fe0 100644 --- a/crates/ra_hir_def/Cargo.toml +++ b/crates/ra_hir_def/Cargo.toml | |||
@@ -26,4 +26,4 @@ ra_cfg = { path = "../ra_cfg" } | |||
26 | tt = { path = "../ra_tt", package = "ra_tt" } | 26 | tt = { path = "../ra_tt", package = "ra_tt" } |
27 | 27 | ||
28 | [dev-dependencies] | 28 | [dev-dependencies] |
29 | insta = "0.12.0" | 29 | insta = "0.13.0" |
diff --git a/crates/ra_hir_def/src/find_path.rs b/crates/ra_hir_def/src/find_path.rs index 8cc2fb160..43b9b124a 100644 --- a/crates/ra_hir_def/src/find_path.rs +++ b/crates/ra_hir_def/src/find_path.rs | |||
@@ -7,10 +7,39 @@ use crate::{ | |||
7 | visibility::Visibility, | 7 | visibility::Visibility, |
8 | CrateId, ModuleDefId, ModuleId, | 8 | CrateId, ModuleDefId, ModuleId, |
9 | }; | 9 | }; |
10 | use hir_expand::name::Name; | 10 | use hir_expand::name::{known, Name}; |
11 | use test_utils::tested_by; | ||
11 | 12 | ||
12 | const MAX_PATH_LEN: usize = 15; | 13 | const MAX_PATH_LEN: usize = 15; |
13 | 14 | ||
15 | impl ModPath { | ||
16 | fn starts_with_std(&self) -> bool { | ||
17 | self.segments.first().filter(|&first_segment| first_segment == &known::std).is_some() | ||
18 | } | ||
19 | |||
20 | // When std library is present, paths starting with `std::` | ||
21 | // should be preferred over paths starting with `core::` and `alloc::` | ||
22 | fn should_start_with_std(&self) -> bool { | ||
23 | self.segments | ||
24 | .first() | ||
25 | .filter(|&first_segment| { | ||
26 | first_segment == &known::alloc || first_segment == &known::core | ||
27 | }) | ||
28 | .is_some() | ||
29 | } | ||
30 | |||
31 | fn len(&self) -> usize { | ||
32 | self.segments.len() | ||
33 | + match self.kind { | ||
34 | PathKind::Plain => 0, | ||
35 | PathKind::Super(i) => i as usize, | ||
36 | PathKind::Crate => 1, | ||
37 | PathKind::Abs => 0, | ||
38 | PathKind::DollarCrate(_) => 1, | ||
39 | } | ||
40 | } | ||
41 | } | ||
42 | |||
14 | // FIXME: handle local items | 43 | // FIXME: handle local items |
15 | 44 | ||
16 | /// Find a path that can be used to refer to a certain item. This can depend on | 45 | /// Find a path that can be used to refer to a certain item. This can depend on |
@@ -112,23 +141,27 @@ fn find_path_inner( | |||
112 | Some(path) => path, | 141 | Some(path) => path, |
113 | }; | 142 | }; |
114 | path.segments.push(name); | 143 | path.segments.push(name); |
115 | if path_len(&path) < best_path_len { | 144 | |
116 | best_path_len = path_len(&path); | 145 | let new_path = |
117 | best_path = Some(path); | 146 | if let Some(best_path) = best_path { select_best_path(best_path, path) } else { path }; |
118 | } | 147 | best_path_len = new_path.len(); |
148 | best_path = Some(new_path); | ||
119 | } | 149 | } |
120 | best_path | 150 | best_path |
121 | } | 151 | } |
122 | 152 | ||
123 | fn path_len(path: &ModPath) -> usize { | 153 | fn select_best_path(old_path: ModPath, new_path: ModPath) -> ModPath { |
124 | path.segments.len() | 154 | if old_path.starts_with_std() && new_path.should_start_with_std() { |
125 | + match path.kind { | 155 | tested_by!(prefer_std_paths); |
126 | PathKind::Plain => 0, | 156 | old_path |
127 | PathKind::Super(i) => i as usize, | 157 | } else if new_path.starts_with_std() && old_path.should_start_with_std() { |
128 | PathKind::Crate => 1, | 158 | tested_by!(prefer_std_paths); |
129 | PathKind::Abs => 0, | 159 | new_path |
130 | PathKind::DollarCrate(_) => 1, | 160 | } else if new_path.len() < old_path.len() { |
131 | } | 161 | new_path |
162 | } else { | ||
163 | old_path | ||
164 | } | ||
132 | } | 165 | } |
133 | 166 | ||
134 | fn find_importable_locations( | 167 | fn find_importable_locations( |
@@ -201,6 +234,7 @@ mod tests { | |||
201 | use hir_expand::hygiene::Hygiene; | 234 | use hir_expand::hygiene::Hygiene; |
202 | use ra_db::fixture::WithFixture; | 235 | use ra_db::fixture::WithFixture; |
203 | use ra_syntax::ast::AstNode; | 236 | use ra_syntax::ast::AstNode; |
237 | use test_utils::covers; | ||
204 | 238 | ||
205 | /// `code` needs to contain a cursor marker; checks that `find_path` for the | 239 | /// `code` needs to contain a cursor marker; checks that `find_path` for the |
206 | /// item the `path` refers to returns that same path when called from the | 240 | /// item the `path` refers to returns that same path when called from the |
@@ -452,4 +486,41 @@ mod tests { | |||
452 | "#; | 486 | "#; |
453 | check_found_path(code, "crate::foo::S"); | 487 | check_found_path(code, "crate::foo::S"); |
454 | } | 488 | } |
489 | |||
490 | #[test] | ||
491 | fn prefer_std_paths_over_alloc() { | ||
492 | covers!(prefer_std_paths); | ||
493 | let code = r#" | ||
494 | //- /main.rs crate:main deps:alloc,std | ||
495 | <|> | ||
496 | |||
497 | //- /std.rs crate:std deps:alloc | ||
498 | pub mod sync { | ||
499 | pub use alloc::sync::Arc; | ||
500 | } | ||
501 | |||
502 | //- /zzz.rs crate:alloc | ||
503 | pub mod sync { | ||
504 | pub struct Arc; | ||
505 | } | ||
506 | "#; | ||
507 | check_found_path(code, "std::sync::Arc"); | ||
508 | } | ||
509 | |||
510 | #[test] | ||
511 | fn prefer_shorter_paths_if_not_alloc() { | ||
512 | let code = r#" | ||
513 | //- /main.rs crate:main deps:megaalloc,std | ||
514 | <|> | ||
515 | |||
516 | //- /std.rs crate:std deps:megaalloc | ||
517 | pub mod sync { | ||
518 | pub use megaalloc::sync::Arc; | ||
519 | } | ||
520 | |||
521 | //- /zzz.rs crate:megaalloc | ||
522 | pub struct Arc; | ||
523 | "#; | ||
524 | check_found_path(code, "megaalloc::Arc"); | ||
525 | } | ||
455 | } | 526 | } |
diff --git a/crates/ra_hir_def/src/generics.rs b/crates/ra_hir_def/src/generics.rs index e9c28c730..f765e6edc 100644 --- a/crates/ra_hir_def/src/generics.rs +++ b/crates/ra_hir_def/src/generics.rs | |||
@@ -27,8 +27,16 @@ use crate::{ | |||
27 | /// Data about a generic parameter (to a function, struct, impl, ...). | 27 | /// Data about a generic parameter (to a function, struct, impl, ...). |
28 | #[derive(Clone, PartialEq, Eq, Debug)] | 28 | #[derive(Clone, PartialEq, Eq, Debug)] |
29 | pub struct TypeParamData { | 29 | pub struct TypeParamData { |
30 | pub name: Name, | 30 | pub name: Option<Name>, |
31 | pub default: Option<TypeRef>, | 31 | pub default: Option<TypeRef>, |
32 | pub provenance: TypeParamProvenance, | ||
33 | } | ||
34 | |||
35 | #[derive(Copy, Clone, PartialEq, Eq, Debug)] | ||
36 | pub enum TypeParamProvenance { | ||
37 | TypeParamList, | ||
38 | TraitSelf, | ||
39 | ArgumentImplTrait, | ||
32 | } | 40 | } |
33 | 41 | ||
34 | /// Data about the generic parameters of a function, struct, impl, etc. | 42 | /// Data about the generic parameters of a function, struct, impl, etc. |
@@ -45,10 +53,17 @@ pub struct GenericParams { | |||
45 | /// associated type bindings like `Iterator<Item = u32>`. | 53 | /// associated type bindings like `Iterator<Item = u32>`. |
46 | #[derive(Clone, PartialEq, Eq, Debug)] | 54 | #[derive(Clone, PartialEq, Eq, Debug)] |
47 | pub struct WherePredicate { | 55 | pub struct WherePredicate { |
48 | pub type_ref: TypeRef, | 56 | pub target: WherePredicateTarget, |
49 | pub bound: TypeBound, | 57 | pub bound: TypeBound, |
50 | } | 58 | } |
51 | 59 | ||
60 | #[derive(Clone, PartialEq, Eq, Debug)] | ||
61 | pub enum WherePredicateTarget { | ||
62 | TypeRef(TypeRef), | ||
63 | /// For desugared where predicates that can directly refer to a type param. | ||
64 | TypeParam(LocalTypeParamId), | ||
65 | } | ||
66 | |||
52 | type SourceMap = ArenaMap<LocalTypeParamId, Either<ast::TraitDef, ast::TypeParam>>; | 67 | type SourceMap = ArenaMap<LocalTypeParamId, Either<ast::TraitDef, ast::TypeParam>>; |
53 | 68 | ||
54 | impl GenericParams { | 69 | impl GenericParams { |
@@ -68,6 +83,11 @@ impl GenericParams { | |||
68 | GenericDefId::FunctionId(it) => { | 83 | GenericDefId::FunctionId(it) => { |
69 | let src = it.lookup(db).source(db); | 84 | let src = it.lookup(db).source(db); |
70 | generics.fill(&mut sm, &src.value); | 85 | generics.fill(&mut sm, &src.value); |
86 | // lower `impl Trait` in arguments | ||
87 | let data = db.function_data(it); | ||
88 | for param in &data.params { | ||
89 | generics.fill_implicit_impl_trait_args(param); | ||
90 | } | ||
71 | src.file_id | 91 | src.file_id |
72 | } | 92 | } |
73 | GenericDefId::AdtId(AdtId::StructId(it)) => { | 93 | GenericDefId::AdtId(AdtId::StructId(it)) => { |
@@ -89,8 +109,11 @@ impl GenericParams { | |||
89 | let src = it.lookup(db).source(db); | 109 | let src = it.lookup(db).source(db); |
90 | 110 | ||
91 | // traits get the Self type as an implicit first type parameter | 111 | // traits get the Self type as an implicit first type parameter |
92 | let self_param_id = | 112 | let self_param_id = generics.types.alloc(TypeParamData { |
93 | generics.types.alloc(TypeParamData { name: name![Self], default: None }); | 113 | name: Some(name![Self]), |
114 | default: None, | ||
115 | provenance: TypeParamProvenance::TraitSelf, | ||
116 | }); | ||
94 | sm.insert(self_param_id, Either::Left(src.value.clone())); | 117 | sm.insert(self_param_id, Either::Left(src.value.clone())); |
95 | // add super traits as bounds on Self | 118 | // add super traits as bounds on Self |
96 | // i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar | 119 | // i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar |
@@ -142,7 +165,11 @@ impl GenericParams { | |||
142 | let name = type_param.name().map_or_else(Name::missing, |it| it.as_name()); | 165 | let name = type_param.name().map_or_else(Name::missing, |it| it.as_name()); |
143 | // FIXME: Use `Path::from_src` | 166 | // FIXME: Use `Path::from_src` |
144 | let default = type_param.default_type().map(TypeRef::from_ast); | 167 | let default = type_param.default_type().map(TypeRef::from_ast); |
145 | let param = TypeParamData { name: name.clone(), default }; | 168 | let param = TypeParamData { |
169 | name: Some(name.clone()), | ||
170 | default, | ||
171 | provenance: TypeParamProvenance::TypeParamList, | ||
172 | }; | ||
146 | let param_id = self.types.alloc(param); | 173 | let param_id = self.types.alloc(param); |
147 | sm.insert(param_id, Either::Right(type_param.clone())); | 174 | sm.insert(param_id, Either::Right(type_param.clone())); |
148 | 175 | ||
@@ -170,11 +197,43 @@ impl GenericParams { | |||
170 | return; | 197 | return; |
171 | } | 198 | } |
172 | let bound = TypeBound::from_ast(bound); | 199 | let bound = TypeBound::from_ast(bound); |
173 | self.where_predicates.push(WherePredicate { type_ref, bound }); | 200 | self.where_predicates |
201 | .push(WherePredicate { target: WherePredicateTarget::TypeRef(type_ref), bound }); | ||
202 | } | ||
203 | |||
204 | fn fill_implicit_impl_trait_args(&mut self, type_ref: &TypeRef) { | ||
205 | type_ref.walk(&mut |type_ref| { | ||
206 | if let TypeRef::ImplTrait(bounds) = type_ref { | ||
207 | let param = TypeParamData { | ||
208 | name: None, | ||
209 | default: None, | ||
210 | provenance: TypeParamProvenance::ArgumentImplTrait, | ||
211 | }; | ||
212 | let param_id = self.types.alloc(param); | ||
213 | for bound in bounds { | ||
214 | self.where_predicates.push(WherePredicate { | ||
215 | target: WherePredicateTarget::TypeParam(param_id), | ||
216 | bound: bound.clone(), | ||
217 | }); | ||
218 | } | ||
219 | } | ||
220 | }); | ||
174 | } | 221 | } |
175 | 222 | ||
176 | pub fn find_by_name(&self, name: &Name) -> Option<LocalTypeParamId> { | 223 | pub fn find_by_name(&self, name: &Name) -> Option<LocalTypeParamId> { |
177 | self.types.iter().find_map(|(id, p)| if &p.name == name { Some(id) } else { None }) | 224 | self.types |
225 | .iter() | ||
226 | .find_map(|(id, p)| if p.name.as_ref() == Some(name) { Some(id) } else { None }) | ||
227 | } | ||
228 | |||
229 | pub fn find_trait_self_param(&self) -> Option<LocalTypeParamId> { | ||
230 | self.types.iter().find_map(|(id, p)| { | ||
231 | if p.provenance == TypeParamProvenance::TraitSelf { | ||
232 | Some(id) | ||
233 | } else { | ||
234 | None | ||
235 | } | ||
236 | }) | ||
178 | } | 237 | } |
179 | } | 238 | } |
180 | 239 | ||
diff --git a/crates/ra_hir_def/src/marks.rs b/crates/ra_hir_def/src/marks.rs index 457ba4abe..daa49d5f1 100644 --- a/crates/ra_hir_def/src/marks.rs +++ b/crates/ra_hir_def/src/marks.rs | |||
@@ -13,4 +13,5 @@ test_utils::marks!( | |||
13 | macro_dollar_crate_self | 13 | macro_dollar_crate_self |
14 | macro_dollar_crate_other | 14 | macro_dollar_crate_other |
15 | infer_resolve_while_let | 15 | infer_resolve_while_let |
16 | prefer_std_paths | ||
16 | ); | 17 | ); |
diff --git a/crates/ra_hir_def/src/nameres.rs b/crates/ra_hir_def/src/nameres.rs index 27c12e46c..852304dd0 100644 --- a/crates/ra_hir_def/src/nameres.rs +++ b/crates/ra_hir_def/src/nameres.rs | |||
@@ -229,6 +229,46 @@ impl CrateDefMap { | |||
229 | self.resolve_path_fp_with_macro(db, ResolveMode::Other, original_module, path, shadow); | 229 | self.resolve_path_fp_with_macro(db, ResolveMode::Other, original_module, path, shadow); |
230 | (res.resolved_def, res.segment_index) | 230 | (res.resolved_def, res.segment_index) |
231 | } | 231 | } |
232 | |||
233 | // FIXME: this can use some more human-readable format (ideally, an IR | ||
234 | // even), as this should be a great debugging aid. | ||
235 | pub fn dump(&self) -> String { | ||
236 | let mut buf = String::new(); | ||
237 | go(&mut buf, self, "\ncrate", self.root); | ||
238 | return buf.trim().to_string(); | ||
239 | |||
240 | fn go(buf: &mut String, map: &CrateDefMap, path: &str, module: LocalModuleId) { | ||
241 | *buf += path; | ||
242 | *buf += "\n"; | ||
243 | |||
244 | let mut entries: Vec<_> = map.modules[module].scope.resolutions().collect(); | ||
245 | entries.sort_by_key(|(name, _)| name.clone()); | ||
246 | |||
247 | for (name, def) in entries { | ||
248 | *buf += &format!("{}:", name); | ||
249 | |||
250 | if def.types.is_some() { | ||
251 | *buf += " t"; | ||
252 | } | ||
253 | if def.values.is_some() { | ||
254 | *buf += " v"; | ||
255 | } | ||
256 | if def.macros.is_some() { | ||
257 | *buf += " m"; | ||
258 | } | ||
259 | if def.is_none() { | ||
260 | *buf += " _"; | ||
261 | } | ||
262 | |||
263 | *buf += "\n"; | ||
264 | } | ||
265 | |||
266 | for (name, child) in map.modules[module].children.iter() { | ||
267 | let path = path.to_string() + &format!("::{}", name); | ||
268 | go(buf, map, &path, *child); | ||
269 | } | ||
270 | } | ||
271 | } | ||
232 | } | 272 | } |
233 | 273 | ||
234 | impl ModuleData { | 274 | impl ModuleData { |
diff --git a/crates/ra_hir_def/src/nameres/collector.rs b/crates/ra_hir_def/src/nameres/collector.rs index 7499dff31..6352c71ef 100644 --- a/crates/ra_hir_def/src/nameres/collector.rs +++ b/crates/ra_hir_def/src/nameres/collector.rs | |||
@@ -22,7 +22,7 @@ use crate::{ | |||
22 | diagnostics::DefDiagnostic, mod_resolution::ModDir, path_resolution::ReachedFixedPoint, | 22 | diagnostics::DefDiagnostic, mod_resolution::ModDir, path_resolution::ReachedFixedPoint, |
23 | raw, BuiltinShadowMode, CrateDefMap, ModuleData, ModuleOrigin, ResolveMode, | 23 | raw, BuiltinShadowMode, CrateDefMap, ModuleData, ModuleOrigin, ResolveMode, |
24 | }, | 24 | }, |
25 | path::{ModPath, PathKind}, | 25 | path::{ImportAlias, ModPath, PathKind}, |
26 | per_ns::PerNs, | 26 | per_ns::PerNs, |
27 | visibility::Visibility, | 27 | visibility::Visibility, |
28 | AdtId, AstId, ConstLoc, ContainerId, EnumLoc, EnumVariantId, FunctionLoc, ImplLoc, Intern, | 28 | AdtId, AstId, ConstLoc, ContainerId, EnumLoc, EnumVariantId, FunctionLoc, ImplLoc, Intern, |
@@ -438,7 +438,11 @@ where | |||
438 | } else { | 438 | } else { |
439 | match import.path.segments.last() { | 439 | match import.path.segments.last() { |
440 | Some(last_segment) => { | 440 | Some(last_segment) => { |
441 | let name = import.alias.clone().unwrap_or_else(|| last_segment.clone()); | 441 | let name = match &import.alias { |
442 | Some(ImportAlias::Alias(name)) => name.clone(), | ||
443 | Some(ImportAlias::Underscore) => last_segment.clone(), // FIXME rust-analyzer#2736 | ||
444 | None => last_segment.clone(), | ||
445 | }; | ||
442 | log::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def); | 446 | log::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def); |
443 | 447 | ||
444 | // extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658 | 448 | // extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658 |
diff --git a/crates/ra_hir_def/src/nameres/raw.rs b/crates/ra_hir_def/src/nameres/raw.rs index fac1169ef..650cf1f98 100644 --- a/crates/ra_hir_def/src/nameres/raw.rs +++ b/crates/ra_hir_def/src/nameres/raw.rs | |||
@@ -22,8 +22,11 @@ use ra_syntax::{ | |||
22 | use test_utils::tested_by; | 22 | use test_utils::tested_by; |
23 | 23 | ||
24 | use crate::{ | 24 | use crate::{ |
25 | attr::Attrs, db::DefDatabase, path::ModPath, visibility::RawVisibility, FileAstId, HirFileId, | 25 | attr::Attrs, |
26 | InFile, | 26 | db::DefDatabase, |
27 | path::{ImportAlias, ModPath}, | ||
28 | visibility::RawVisibility, | ||
29 | FileAstId, HirFileId, InFile, | ||
27 | }; | 30 | }; |
28 | 31 | ||
29 | /// `RawItems` is a set of top-level items in a file (except for impls). | 32 | /// `RawItems` is a set of top-level items in a file (except for impls). |
@@ -145,7 +148,7 @@ impl_arena_id!(Import); | |||
145 | #[derive(Debug, Clone, PartialEq, Eq)] | 148 | #[derive(Debug, Clone, PartialEq, Eq)] |
146 | pub struct ImportData { | 149 | pub struct ImportData { |
147 | pub(super) path: ModPath, | 150 | pub(super) path: ModPath, |
148 | pub(super) alias: Option<Name>, | 151 | pub(super) alias: Option<ImportAlias>, |
149 | pub(super) is_glob: bool, | 152 | pub(super) is_glob: bool, |
150 | pub(super) is_prelude: bool, | 153 | pub(super) is_prelude: bool, |
151 | pub(super) is_extern_crate: bool, | 154 | pub(super) is_extern_crate: bool, |
@@ -353,7 +356,11 @@ impl RawItemsCollector { | |||
353 | let path = ModPath::from_name_ref(&name_ref); | 356 | let path = ModPath::from_name_ref(&name_ref); |
354 | let visibility = | 357 | let visibility = |
355 | RawVisibility::from_ast_with_hygiene(extern_crate.visibility(), &self.hygiene); | 358 | RawVisibility::from_ast_with_hygiene(extern_crate.visibility(), &self.hygiene); |
356 | let alias = extern_crate.alias().and_then(|a| a.name()).map(|it| it.as_name()); | 359 | let alias = extern_crate.alias().map(|a| { |
360 | a.name() | ||
361 | .map(|it| it.as_name()) | ||
362 | .map_or(ImportAlias::Underscore, |a| ImportAlias::Alias(a)) | ||
363 | }); | ||
357 | let attrs = self.parse_attrs(&extern_crate); | 364 | let attrs = self.parse_attrs(&extern_crate); |
358 | // FIXME: cfg_attr | 365 | // FIXME: cfg_attr |
359 | let is_macro_use = extern_crate.has_atom_attr("macro_use"); | 366 | let is_macro_use = extern_crate.has_atom_attr("macro_use"); |
diff --git a/crates/ra_hir_def/src/nameres/tests.rs b/crates/ra_hir_def/src/nameres/tests.rs index 78bcdc850..82f0f835c 100644 --- a/crates/ra_hir_def/src/nameres/tests.rs +++ b/crates/ra_hir_def/src/nameres/tests.rs | |||
@@ -10,11 +10,10 @@ use insta::assert_snapshot; | |||
10 | use ra_db::{fixture::WithFixture, SourceDatabase}; | 10 | use ra_db::{fixture::WithFixture, SourceDatabase}; |
11 | use test_utils::covers; | 11 | use test_utils::covers; |
12 | 12 | ||
13 | use crate::{db::DefDatabase, nameres::*, test_db::TestDB, LocalModuleId}; | 13 | use crate::{db::DefDatabase, nameres::*, test_db::TestDB}; |
14 | 14 | ||
15 | fn def_map(fixture: &str) -> String { | 15 | fn def_map(fixture: &str) -> String { |
16 | let dm = compute_crate_def_map(fixture); | 16 | compute_crate_def_map(fixture).dump() |
17 | render_crate_def_map(&dm) | ||
18 | } | 17 | } |
19 | 18 | ||
20 | fn compute_crate_def_map(fixture: &str) -> Arc<CrateDefMap> { | 19 | fn compute_crate_def_map(fixture: &str) -> Arc<CrateDefMap> { |
@@ -23,44 +22,6 @@ fn compute_crate_def_map(fixture: &str) -> Arc<CrateDefMap> { | |||
23 | db.crate_def_map(krate) | 22 | db.crate_def_map(krate) |
24 | } | 23 | } |
25 | 24 | ||
26 | fn render_crate_def_map(map: &CrateDefMap) -> String { | ||
27 | let mut buf = String::new(); | ||
28 | go(&mut buf, map, "\ncrate", map.root); | ||
29 | return buf.trim().to_string(); | ||
30 | |||
31 | fn go(buf: &mut String, map: &CrateDefMap, path: &str, module: LocalModuleId) { | ||
32 | *buf += path; | ||
33 | *buf += "\n"; | ||
34 | |||
35 | let mut entries: Vec<_> = map.modules[module].scope.resolutions().collect(); | ||
36 | entries.sort_by_key(|(name, _)| name.clone()); | ||
37 | |||
38 | for (name, def) in entries { | ||
39 | *buf += &format!("{}:", name); | ||
40 | |||
41 | if def.types.is_some() { | ||
42 | *buf += " t"; | ||
43 | } | ||
44 | if def.values.is_some() { | ||
45 | *buf += " v"; | ||
46 | } | ||
47 | if def.macros.is_some() { | ||
48 | *buf += " m"; | ||
49 | } | ||
50 | if def.is_none() { | ||
51 | *buf += " _"; | ||
52 | } | ||
53 | |||
54 | *buf += "\n"; | ||
55 | } | ||
56 | |||
57 | for (name, child) in map.modules[module].children.iter() { | ||
58 | let path = path.to_string() + &format!("::{}", name); | ||
59 | go(buf, map, &path, *child); | ||
60 | } | ||
61 | } | ||
62 | } | ||
63 | |||
64 | #[test] | 25 | #[test] |
65 | fn crate_def_map_smoke_test() { | 26 | fn crate_def_map_smoke_test() { |
66 | let map = def_map( | 27 | let map = def_map( |
diff --git a/crates/ra_hir_def/src/path.rs b/crates/ra_hir_def/src/path.rs index ab290e2c9..246032c13 100644 --- a/crates/ra_hir_def/src/path.rs +++ b/crates/ra_hir_def/src/path.rs | |||
@@ -16,13 +16,13 @@ use ra_syntax::ast; | |||
16 | 16 | ||
17 | use crate::{type_ref::TypeRef, InFile}; | 17 | use crate::{type_ref::TypeRef, InFile}; |
18 | 18 | ||
19 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 19 | #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] |
20 | pub struct ModPath { | 20 | pub struct ModPath { |
21 | pub kind: PathKind, | 21 | pub kind: PathKind, |
22 | pub segments: Vec<Name>, | 22 | pub segments: Vec<Name>, |
23 | } | 23 | } |
24 | 24 | ||
25 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 25 | #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] |
26 | pub enum PathKind { | 26 | pub enum PathKind { |
27 | Plain, | 27 | Plain, |
28 | /// `self::` is `Super(0)` | 28 | /// `self::` is `Super(0)` |
@@ -34,6 +34,14 @@ pub enum PathKind { | |||
34 | DollarCrate(CrateId), | 34 | DollarCrate(CrateId), |
35 | } | 35 | } |
36 | 36 | ||
37 | #[derive(Debug, Clone, PartialEq, Eq)] | ||
38 | pub enum ImportAlias { | ||
39 | /// Unnamed alias, as in `use Foo as _;` | ||
40 | Underscore, | ||
41 | /// Named alias | ||
42 | Alias(Name), | ||
43 | } | ||
44 | |||
37 | impl ModPath { | 45 | impl ModPath { |
38 | pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option<ModPath> { | 46 | pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option<ModPath> { |
39 | lower::lower_path(path, hygiene).map(|it| it.mod_path) | 47 | lower::lower_path(path, hygiene).map(|it| it.mod_path) |
@@ -57,7 +65,7 @@ impl ModPath { | |||
57 | pub(crate) fn expand_use_item( | 65 | pub(crate) fn expand_use_item( |
58 | item_src: InFile<ast::UseItem>, | 66 | item_src: InFile<ast::UseItem>, |
59 | hygiene: &Hygiene, | 67 | hygiene: &Hygiene, |
60 | mut cb: impl FnMut(ModPath, &ast::UseTree, /* is_glob */ bool, Option<Name>), | 68 | mut cb: impl FnMut(ModPath, &ast::UseTree, /* is_glob */ bool, Option<ImportAlias>), |
61 | ) { | 69 | ) { |
62 | if let Some(tree) = item_src.value.use_tree() { | 70 | if let Some(tree) = item_src.value.use_tree() { |
63 | lower::lower_use_tree(None, tree, hygiene, &mut cb); | 71 | lower::lower_use_tree(None, tree, hygiene, &mut cb); |
diff --git a/crates/ra_hir_def/src/path/lower/lower_use.rs b/crates/ra_hir_def/src/path/lower/lower_use.rs index 531878174..d2bc9d193 100644 --- a/crates/ra_hir_def/src/path/lower/lower_use.rs +++ b/crates/ra_hir_def/src/path/lower/lower_use.rs | |||
@@ -4,20 +4,17 @@ | |||
4 | use std::iter; | 4 | use std::iter; |
5 | 5 | ||
6 | use either::Either; | 6 | use either::Either; |
7 | use hir_expand::{ | 7 | use hir_expand::{hygiene::Hygiene, name::AsName}; |
8 | hygiene::Hygiene, | ||
9 | name::{AsName, Name}, | ||
10 | }; | ||
11 | use ra_syntax::ast::{self, NameOwner}; | 8 | use ra_syntax::ast::{self, NameOwner}; |
12 | use test_utils::tested_by; | 9 | use test_utils::tested_by; |
13 | 10 | ||
14 | use crate::path::{ModPath, PathKind}; | 11 | use crate::path::{ImportAlias, ModPath, PathKind}; |
15 | 12 | ||
16 | pub(crate) fn lower_use_tree( | 13 | pub(crate) fn lower_use_tree( |
17 | prefix: Option<ModPath>, | 14 | prefix: Option<ModPath>, |
18 | tree: ast::UseTree, | 15 | tree: ast::UseTree, |
19 | hygiene: &Hygiene, | 16 | hygiene: &Hygiene, |
20 | cb: &mut dyn FnMut(ModPath, &ast::UseTree, bool, Option<Name>), | 17 | cb: &mut dyn FnMut(ModPath, &ast::UseTree, bool, Option<ImportAlias>), |
21 | ) { | 18 | ) { |
22 | if let Some(use_tree_list) = tree.use_tree_list() { | 19 | if let Some(use_tree_list) = tree.use_tree_list() { |
23 | let prefix = match tree.path() { | 20 | let prefix = match tree.path() { |
@@ -34,7 +31,11 @@ pub(crate) fn lower_use_tree( | |||
34 | lower_use_tree(prefix.clone(), child_tree, hygiene, cb); | 31 | lower_use_tree(prefix.clone(), child_tree, hygiene, cb); |
35 | } | 32 | } |
36 | } else { | 33 | } else { |
37 | let alias = tree.alias().and_then(|a| a.name()).map(|a| a.as_name()); | 34 | let alias = tree.alias().map(|a| { |
35 | a.name() | ||
36 | .map(|it| it.as_name()) | ||
37 | .map_or(ImportAlias::Underscore, |a| ImportAlias::Alias(a)) | ||
38 | }); | ||
38 | let is_glob = tree.has_star(); | 39 | let is_glob = tree.has_star(); |
39 | if let Some(ast_path) = tree.path() { | 40 | if let Some(ast_path) = tree.path() { |
40 | // Handle self in a path. | 41 | // Handle self in a path. |
diff --git a/crates/ra_hir_def/src/resolver.rs b/crates/ra_hir_def/src/resolver.rs index f7bac5801..05cf4646a 100644 --- a/crates/ra_hir_def/src/resolver.rs +++ b/crates/ra_hir_def/src/resolver.rs | |||
@@ -490,10 +490,12 @@ impl Scope { | |||
490 | } | 490 | } |
491 | Scope::GenericParams { params, def } => { | 491 | Scope::GenericParams { params, def } => { |
492 | for (local_id, param) in params.types.iter() { | 492 | for (local_id, param) in params.types.iter() { |
493 | f( | 493 | if let Some(name) = ¶m.name { |
494 | param.name.clone(), | 494 | f( |
495 | ScopeDef::GenericParam(TypeParamId { local_id, parent: *def }), | 495 | name.clone(), |
496 | ) | 496 | ScopeDef::GenericParam(TypeParamId { local_id, parent: *def }), |
497 | ) | ||
498 | } | ||
497 | } | 499 | } |
498 | } | 500 | } |
499 | Scope::ImplBlockScope(i) => { | 501 | Scope::ImplBlockScope(i) => { |
diff --git a/crates/ra_hir_def/src/type_ref.rs b/crates/ra_hir_def/src/type_ref.rs index 5f10e9a88..102fdb13d 100644 --- a/crates/ra_hir_def/src/type_ref.rs +++ b/crates/ra_hir_def/src/type_ref.rs | |||
@@ -124,6 +124,48 @@ impl TypeRef { | |||
124 | pub(crate) fn unit() -> TypeRef { | 124 | pub(crate) fn unit() -> TypeRef { |
125 | TypeRef::Tuple(Vec::new()) | 125 | TypeRef::Tuple(Vec::new()) |
126 | } | 126 | } |
127 | |||
128 | pub fn walk(&self, f: &mut impl FnMut(&TypeRef)) { | ||
129 | go(self, f); | ||
130 | |||
131 | fn go(type_ref: &TypeRef, f: &mut impl FnMut(&TypeRef)) { | ||
132 | f(type_ref); | ||
133 | match type_ref { | ||
134 | TypeRef::Fn(types) | TypeRef::Tuple(types) => types.iter().for_each(|t| go(t, f)), | ||
135 | TypeRef::RawPtr(type_ref, _) | ||
136 | | TypeRef::Reference(type_ref, _) | ||
137 | | TypeRef::Array(type_ref) | ||
138 | | TypeRef::Slice(type_ref) => go(&type_ref, f), | ||
139 | TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => { | ||
140 | for bound in bounds { | ||
141 | match bound { | ||
142 | TypeBound::Path(path) => go_path(path, f), | ||
143 | TypeBound::Error => (), | ||
144 | } | ||
145 | } | ||
146 | } | ||
147 | TypeRef::Path(path) => go_path(path, f), | ||
148 | TypeRef::Never | TypeRef::Placeholder | TypeRef::Error => {} | ||
149 | }; | ||
150 | } | ||
151 | |||
152 | fn go_path(path: &Path, f: &mut impl FnMut(&TypeRef)) { | ||
153 | if let Some(type_ref) = path.type_anchor() { | ||
154 | go(type_ref, f); | ||
155 | } | ||
156 | for segment in path.segments().iter() { | ||
157 | if let Some(args_and_bindings) = segment.args_and_bindings { | ||
158 | for arg in &args_and_bindings.args { | ||
159 | let crate::path::GenericArg::Type(type_ref) = arg; | ||
160 | go(type_ref, f); | ||
161 | } | ||
162 | for (_, type_ref) in &args_and_bindings.bindings { | ||
163 | go(type_ref, f); | ||
164 | } | ||
165 | } | ||
166 | } | ||
167 | } | ||
168 | } | ||
127 | } | 169 | } |
128 | 170 | ||
129 | pub(crate) fn type_bounds_from_ast(type_bounds_opt: Option<ast::TypeBoundList>) -> Vec<TypeBound> { | 171 | pub(crate) fn type_bounds_from_ast(type_bounds_opt: Option<ast::TypeBoundList>) -> Vec<TypeBound> { |
diff --git a/crates/ra_hir_expand/src/name.rs b/crates/ra_hir_expand/src/name.rs index b3fa1efba..b2e10f445 100644 --- a/crates/ra_hir_expand/src/name.rs +++ b/crates/ra_hir_expand/src/name.rs | |||
@@ -141,6 +141,8 @@ pub mod known { | |||
141 | macro_rules, | 141 | macro_rules, |
142 | // Components of known path (value or mod name) | 142 | // Components of known path (value or mod name) |
143 | std, | 143 | std, |
144 | core, | ||
145 | alloc, | ||
144 | iter, | 146 | iter, |
145 | ops, | 147 | ops, |
146 | future, | 148 | future, |
diff --git a/crates/ra_hir_ty/Cargo.toml b/crates/ra_hir_ty/Cargo.toml index 60793db44..f5484bf70 100644 --- a/crates/ra_hir_ty/Cargo.toml +++ b/crates/ra_hir_ty/Cargo.toml | |||
@@ -21,11 +21,11 @@ ra_prof = { path = "../ra_prof" } | |||
21 | ra_syntax = { path = "../ra_syntax" } | 21 | ra_syntax = { path = "../ra_syntax" } |
22 | test_utils = { path = "../test_utils" } | 22 | test_utils = { path = "../test_utils" } |
23 | 23 | ||
24 | chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5" } | 24 | chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "af48f302a1f571b3ca418f7c5aa639a144a34f75" } |
25 | chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5" } | 25 | chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "af48f302a1f571b3ca418f7c5aa639a144a34f75" } |
26 | chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5" } | 26 | chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "af48f302a1f571b3ca418f7c5aa639a144a34f75" } |
27 | 27 | ||
28 | lalrpop-intern = "0.15.1" | 28 | lalrpop-intern = "0.15.1" |
29 | 29 | ||
30 | [dev-dependencies] | 30 | [dev-dependencies] |
31 | insta = "0.12.0" | 31 | insta = "0.13.0" |
diff --git a/crates/ra_hir_ty/src/db.rs b/crates/ra_hir_ty/src/db.rs index eb521c7a0..e9bfcfa17 100644 --- a/crates/ra_hir_ty/src/db.rs +++ b/crates/ra_hir_ty/src/db.rs | |||
@@ -3,17 +3,18 @@ | |||
3 | use std::sync::Arc; | 3 | use std::sync::Arc; |
4 | 4 | ||
5 | use hir_def::{ | 5 | use hir_def::{ |
6 | db::DefDatabase, DefWithBodyId, GenericDefId, ImplId, LocalStructFieldId, TraitId, VariantId, | 6 | db::DefDatabase, DefWithBodyId, GenericDefId, ImplId, LocalStructFieldId, TraitId, TypeParamId, |
7 | VariantId, | ||
7 | }; | 8 | }; |
8 | use ra_arena::map::ArenaMap; | 9 | use ra_arena::map::ArenaMap; |
9 | use ra_db::{salsa, CrateId}; | 10 | use ra_db::{impl_intern_key, salsa, CrateId}; |
10 | use ra_prof::profile; | 11 | use ra_prof::profile; |
11 | 12 | ||
12 | use crate::{ | 13 | use crate::{ |
13 | method_resolution::CrateImplBlocks, | 14 | method_resolution::CrateImplBlocks, |
14 | traits::{chalk, AssocTyValue, Impl}, | 15 | traits::{chalk, AssocTyValue, Impl}, |
15 | CallableDef, FnSig, GenericPredicate, InferenceResult, Substs, TraitRef, Ty, TyDefId, TypeCtor, | 16 | Binders, CallableDef, GenericPredicate, InferenceResult, PolyFnSig, Substs, TraitRef, Ty, |
16 | ValueTyDefId, | 17 | TyDefId, TypeCtor, ValueTyDefId, |
17 | }; | 18 | }; |
18 | 19 | ||
19 | #[salsa::query_group(HirDatabaseStorage)] | 20 | #[salsa::query_group(HirDatabaseStorage)] |
@@ -27,34 +28,33 @@ pub trait HirDatabase: DefDatabase { | |||
27 | 28 | ||
28 | #[salsa::invoke(crate::lower::ty_query)] | 29 | #[salsa::invoke(crate::lower::ty_query)] |
29 | #[salsa::cycle(crate::lower::ty_recover)] | 30 | #[salsa::cycle(crate::lower::ty_recover)] |
30 | fn ty(&self, def: TyDefId) -> Ty; | 31 | fn ty(&self, def: TyDefId) -> Binders<Ty>; |
31 | 32 | ||
32 | #[salsa::invoke(crate::lower::value_ty_query)] | 33 | #[salsa::invoke(crate::lower::value_ty_query)] |
33 | fn value_ty(&self, def: ValueTyDefId) -> Ty; | 34 | fn value_ty(&self, def: ValueTyDefId) -> Binders<Ty>; |
34 | 35 | ||
35 | #[salsa::invoke(crate::lower::impl_self_ty_query)] | 36 | #[salsa::invoke(crate::lower::impl_self_ty_query)] |
36 | #[salsa::cycle(crate::lower::impl_self_ty_recover)] | 37 | #[salsa::cycle(crate::lower::impl_self_ty_recover)] |
37 | fn impl_self_ty(&self, def: ImplId) -> Ty; | 38 | fn impl_self_ty(&self, def: ImplId) -> Binders<Ty>; |
38 | 39 | ||
39 | #[salsa::invoke(crate::lower::impl_trait_query)] | 40 | #[salsa::invoke(crate::lower::impl_trait_query)] |
40 | fn impl_trait(&self, def: ImplId) -> Option<TraitRef>; | 41 | fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>; |
41 | 42 | ||
42 | #[salsa::invoke(crate::lower::field_types_query)] | 43 | #[salsa::invoke(crate::lower::field_types_query)] |
43 | fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalStructFieldId, Ty>>; | 44 | fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalStructFieldId, Binders<Ty>>>; |
44 | 45 | ||
45 | #[salsa::invoke(crate::callable_item_sig)] | 46 | #[salsa::invoke(crate::callable_item_sig)] |
46 | fn callable_item_signature(&self, def: CallableDef) -> FnSig; | 47 | fn callable_item_signature(&self, def: CallableDef) -> PolyFnSig; |
47 | 48 | ||
48 | #[salsa::invoke(crate::lower::generic_predicates_for_param_query)] | 49 | #[salsa::invoke(crate::lower::generic_predicates_for_param_query)] |
49 | #[salsa::cycle(crate::lower::generic_predicates_for_param_recover)] | 50 | #[salsa::cycle(crate::lower::generic_predicates_for_param_recover)] |
50 | fn generic_predicates_for_param( | 51 | fn generic_predicates_for_param( |
51 | &self, | 52 | &self, |
52 | def: GenericDefId, | 53 | param_id: TypeParamId, |
53 | param_idx: u32, | 54 | ) -> Arc<[Binders<GenericPredicate>]>; |
54 | ) -> Arc<[GenericPredicate]>; | ||
55 | 55 | ||
56 | #[salsa::invoke(crate::lower::generic_predicates_query)] | 56 | #[salsa::invoke(crate::lower::generic_predicates_query)] |
57 | fn generic_predicates(&self, def: GenericDefId) -> Arc<[GenericPredicate]>; | 57 | fn generic_predicates(&self, def: GenericDefId) -> Arc<[Binders<GenericPredicate>]>; |
58 | 58 | ||
59 | #[salsa::invoke(crate::lower::generic_defaults_query)] | 59 | #[salsa::invoke(crate::lower::generic_defaults_query)] |
60 | fn generic_defaults(&self, def: GenericDefId) -> Substs; | 60 | fn generic_defaults(&self, def: GenericDefId) -> Substs; |
@@ -77,6 +77,8 @@ pub trait HirDatabase: DefDatabase { | |||
77 | #[salsa::interned] | 77 | #[salsa::interned] |
78 | fn intern_type_ctor(&self, type_ctor: TypeCtor) -> crate::TypeCtorId; | 78 | fn intern_type_ctor(&self, type_ctor: TypeCtor) -> crate::TypeCtorId; |
79 | #[salsa::interned] | 79 | #[salsa::interned] |
80 | fn intern_type_param_id(&self, param_id: TypeParamId) -> GlobalTypeParamId; | ||
81 | #[salsa::interned] | ||
80 | fn intern_chalk_impl(&self, impl_: Impl) -> crate::traits::GlobalImplId; | 82 | fn intern_chalk_impl(&self, impl_: Impl) -> crate::traits::GlobalImplId; |
81 | #[salsa::interned] | 83 | #[salsa::interned] |
82 | fn intern_assoc_ty_value(&self, assoc_ty_value: AssocTyValue) -> crate::traits::AssocTyValueId; | 84 | fn intern_assoc_ty_value(&self, assoc_ty_value: AssocTyValue) -> crate::traits::AssocTyValueId; |
@@ -117,3 +119,7 @@ fn infer(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> { | |||
117 | fn hir_database_is_object_safe() { | 119 | fn hir_database_is_object_safe() { |
118 | fn _assert_object_safe(_: &dyn HirDatabase) {} | 120 | fn _assert_object_safe(_: &dyn HirDatabase) {} |
119 | } | 121 | } |
122 | |||
123 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
124 | pub struct GlobalTypeParamId(salsa::InternId); | ||
125 | impl_intern_key!(GlobalTypeParamId); | ||
diff --git a/crates/ra_hir_ty/src/infer.rs b/crates/ra_hir_ty/src/infer.rs index e2eda3134..a9d958c8b 100644 --- a/crates/ra_hir_ty/src/infer.rs +++ b/crates/ra_hir_ty/src/infer.rs | |||
@@ -34,7 +34,6 @@ use hir_expand::{diagnostics::DiagnosticSink, name::name}; | |||
34 | use ra_arena::map::ArenaMap; | 34 | use ra_arena::map::ArenaMap; |
35 | use ra_prof::profile; | 35 | use ra_prof::profile; |
36 | use ra_syntax::SmolStr; | 36 | use ra_syntax::SmolStr; |
37 | use test_utils::tested_by; | ||
38 | 37 | ||
39 | use super::{ | 38 | use super::{ |
40 | primitive::{FloatTy, IntTy}, | 39 | primitive::{FloatTy, IntTy}, |
@@ -42,7 +41,9 @@ use super::{ | |||
42 | ApplicationTy, GenericPredicate, InEnvironment, ProjectionTy, Substs, TraitEnvironment, | 41 | ApplicationTy, GenericPredicate, InEnvironment, ProjectionTy, Substs, TraitEnvironment, |
43 | TraitRef, Ty, TypeCtor, TypeWalk, Uncertain, | 42 | TraitRef, Ty, TypeCtor, TypeWalk, Uncertain, |
44 | }; | 43 | }; |
45 | use crate::{db::HirDatabase, infer::diagnostics::InferenceDiagnostic}; | 44 | use crate::{ |
45 | db::HirDatabase, infer::diagnostics::InferenceDiagnostic, lower::ImplTraitLoweringMode, | ||
46 | }; | ||
46 | 47 | ||
47 | pub(crate) use unify::unify; | 48 | pub(crate) use unify::unify; |
48 | 49 | ||
@@ -271,38 +272,21 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
271 | self.result.diagnostics.push(diagnostic); | 272 | self.result.diagnostics.push(diagnostic); |
272 | } | 273 | } |
273 | 274 | ||
274 | fn make_ty(&mut self, type_ref: &TypeRef) -> Ty { | 275 | fn make_ty_with_mode( |
275 | let ty = Ty::from_hir( | 276 | &mut self, |
276 | self.db, | 277 | type_ref: &TypeRef, |
277 | // FIXME use right resolver for block | 278 | impl_trait_mode: ImplTraitLoweringMode, |
278 | &self.resolver, | 279 | ) -> Ty { |
279 | type_ref, | 280 | // FIXME use right resolver for block |
280 | ); | 281 | let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver) |
282 | .with_impl_trait_mode(impl_trait_mode); | ||
283 | let ty = Ty::from_hir(&ctx, type_ref); | ||
281 | let ty = self.insert_type_vars(ty); | 284 | let ty = self.insert_type_vars(ty); |
282 | self.normalize_associated_types_in(ty) | 285 | self.normalize_associated_types_in(ty) |
283 | } | 286 | } |
284 | 287 | ||
285 | /// Replaces `impl Trait` in `ty` by type variables and obligations for | 288 | fn make_ty(&mut self, type_ref: &TypeRef) -> Ty { |
286 | /// those variables. This is done for function arguments when calling a | 289 | self.make_ty_with_mode(type_ref, ImplTraitLoweringMode::Disallowed) |
287 | /// function, and for return types when inside the function body, i.e. in | ||
288 | /// the cases where the `impl Trait` is 'transparent'. In other cases, `impl | ||
289 | /// Trait` is represented by `Ty::Opaque`. | ||
290 | fn insert_vars_for_impl_trait(&mut self, ty: Ty) -> Ty { | ||
291 | ty.fold(&mut |ty| match ty { | ||
292 | Ty::Opaque(preds) => { | ||
293 | tested_by!(insert_vars_for_impl_trait); | ||
294 | let var = self.table.new_type_var(); | ||
295 | let var_subst = Substs::builder(1).push(var.clone()).build(); | ||
296 | self.obligations.extend( | ||
297 | preds | ||
298 | .iter() | ||
299 | .map(|pred| pred.clone().subst_bound_vars(&var_subst)) | ||
300 | .filter_map(Obligation::from_predicate), | ||
301 | ); | ||
302 | var | ||
303 | } | ||
304 | _ => ty, | ||
305 | }) | ||
306 | } | 290 | } |
307 | 291 | ||
308 | /// Replaces Ty::Unknown by a new type var, so we can maybe still infer it. | 292 | /// Replaces Ty::Unknown by a new type var, so we can maybe still infer it. |
@@ -446,19 +430,20 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
446 | None => return (Ty::Unknown, None), | 430 | None => return (Ty::Unknown, None), |
447 | }; | 431 | }; |
448 | let resolver = &self.resolver; | 432 | let resolver = &self.resolver; |
433 | let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver); | ||
449 | // FIXME: this should resolve assoc items as well, see this example: | 434 | // FIXME: this should resolve assoc items as well, see this example: |
450 | // https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521 | 435 | // https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521 |
451 | match resolver.resolve_path_in_type_ns_fully(self.db, path.mod_path()) { | 436 | match resolver.resolve_path_in_type_ns_fully(self.db, path.mod_path()) { |
452 | Some(TypeNs::AdtId(AdtId::StructId(strukt))) => { | 437 | Some(TypeNs::AdtId(AdtId::StructId(strukt))) => { |
453 | let substs = Ty::substs_from_path(self.db, resolver, path, strukt.into()); | 438 | let substs = Ty::substs_from_path(&ctx, path, strukt.into()); |
454 | let ty = self.db.ty(strukt.into()); | 439 | let ty = self.db.ty(strukt.into()); |
455 | let ty = self.insert_type_vars(ty.apply_substs(substs)); | 440 | let ty = self.insert_type_vars(ty.subst(&substs)); |
456 | (ty, Some(strukt.into())) | 441 | (ty, Some(strukt.into())) |
457 | } | 442 | } |
458 | Some(TypeNs::EnumVariantId(var)) => { | 443 | Some(TypeNs::EnumVariantId(var)) => { |
459 | let substs = Ty::substs_from_path(self.db, resolver, path, var.into()); | 444 | let substs = Ty::substs_from_path(&ctx, path, var.into()); |
460 | let ty = self.db.ty(var.parent.into()); | 445 | let ty = self.db.ty(var.parent.into()); |
461 | let ty = self.insert_type_vars(ty.apply_substs(substs)); | 446 | let ty = self.insert_type_vars(ty.subst(&substs)); |
462 | (ty, Some(var.into())) | 447 | (ty, Some(var.into())) |
463 | } | 448 | } |
464 | Some(_) | None => (Ty::Unknown, None), | 449 | Some(_) | None => (Ty::Unknown, None), |
@@ -471,13 +456,18 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
471 | 456 | ||
472 | fn collect_fn(&mut self, data: &FunctionData) { | 457 | fn collect_fn(&mut self, data: &FunctionData) { |
473 | let body = Arc::clone(&self.body); // avoid borrow checker problem | 458 | let body = Arc::clone(&self.body); // avoid borrow checker problem |
474 | for (type_ref, pat) in data.params.iter().zip(body.params.iter()) { | 459 | let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver) |
475 | let ty = self.make_ty(type_ref); | 460 | .with_impl_trait_mode(ImplTraitLoweringMode::Param); |
461 | let param_tys = | ||
462 | data.params.iter().map(|type_ref| Ty::from_hir(&ctx, type_ref)).collect::<Vec<_>>(); | ||
463 | for (ty, pat) in param_tys.into_iter().zip(body.params.iter()) { | ||
464 | let ty = self.insert_type_vars(ty); | ||
465 | let ty = self.normalize_associated_types_in(ty); | ||
476 | 466 | ||
477 | self.infer_pat(*pat, &ty, BindingMode::default()); | 467 | self.infer_pat(*pat, &ty, BindingMode::default()); |
478 | } | 468 | } |
479 | let return_ty = self.make_ty(&data.ret_type); | 469 | let return_ty = self.make_ty_with_mode(&data.ret_type, ImplTraitLoweringMode::Disallowed); // FIXME implement RPIT |
480 | self.return_ty = self.insert_vars_for_impl_trait(return_ty); | 470 | self.return_ty = return_ty; |
481 | } | 471 | } |
482 | 472 | ||
483 | fn infer_body(&mut self) { | 473 | fn infer_body(&mut self) { |
diff --git a/crates/ra_hir_ty/src/infer/coerce.rs b/crates/ra_hir_ty/src/infer/coerce.rs index 83c0c2c3f..f68a1439f 100644 --- a/crates/ra_hir_ty/src/infer/coerce.rs +++ b/crates/ra_hir_ty/src/infer/coerce.rs | |||
@@ -57,8 +57,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
57 | let trait_ref = db.impl_trait(impl_id)?; | 57 | let trait_ref = db.impl_trait(impl_id)?; |
58 | 58 | ||
59 | // `CoerseUnsized` has one generic parameter for the target type. | 59 | // `CoerseUnsized` has one generic parameter for the target type. |
60 | let cur_from_ty = trait_ref.substs.0.get(0)?; | 60 | let cur_from_ty = trait_ref.value.substs.0.get(0)?; |
61 | let cur_to_ty = trait_ref.substs.0.get(1)?; | 61 | let cur_to_ty = trait_ref.value.substs.0.get(1)?; |
62 | 62 | ||
63 | match (&cur_from_ty, cur_to_ty) { | 63 | match (&cur_from_ty, cur_to_ty) { |
64 | (ty_app!(ctor1, st1), ty_app!(ctor2, st2)) => { | 64 | (ty_app!(ctor1, st1), ty_app!(ctor2, st2)) => { |
@@ -66,9 +66,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
66 | // This works for smart-pointer-like coercion, which covers all impls from std. | 66 | // This works for smart-pointer-like coercion, which covers all impls from std. |
67 | st1.iter().zip(st2.iter()).enumerate().find_map(|(i, (ty1, ty2))| { | 67 | st1.iter().zip(st2.iter()).enumerate().find_map(|(i, (ty1, ty2))| { |
68 | match (ty1, ty2) { | 68 | match (ty1, ty2) { |
69 | (Ty::Param { idx: p1, .. }, Ty::Param { idx: p2, .. }) | 69 | (Ty::Bound(idx1), Ty::Bound(idx2)) if idx1 != idx2 => { |
70 | if p1 != p2 => | ||
71 | { | ||
72 | Some(((*ctor1, *ctor2), i)) | 70 | Some(((*ctor1, *ctor2), i)) |
73 | } | 71 | } |
74 | _ => None, | 72 | _ => None, |
@@ -256,8 +254,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
256 | let unsize_generic_index = { | 254 | let unsize_generic_index = { |
257 | let mut index = None; | 255 | let mut index = None; |
258 | let mut multiple_param = false; | 256 | let mut multiple_param = false; |
259 | field_tys[last_field_id].walk(&mut |ty| match ty { | 257 | field_tys[last_field_id].value.walk(&mut |ty| match ty { |
260 | &Ty::Param { idx, .. } => { | 258 | &Ty::Bound(idx) => { |
261 | if index.is_none() { | 259 | if index.is_none() { |
262 | index = Some(idx); | 260 | index = Some(idx); |
263 | } else if Some(idx) != index { | 261 | } else if Some(idx) != index { |
@@ -276,10 +274,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
276 | // Check other fields do not involve it. | 274 | // Check other fields do not involve it. |
277 | let mut multiple_used = false; | 275 | let mut multiple_used = false; |
278 | fields.for_each(|(field_id, _data)| { | 276 | fields.for_each(|(field_id, _data)| { |
279 | field_tys[field_id].walk(&mut |ty| match ty { | 277 | field_tys[field_id].value.walk(&mut |ty| match ty { |
280 | &Ty::Param { idx, .. } if idx == unsize_generic_index => { | 278 | &Ty::Bound(idx) if idx == unsize_generic_index => multiple_used = true, |
281 | multiple_used = true | ||
282 | } | ||
283 | _ => {} | 279 | _ => {} |
284 | }) | 280 | }) |
285 | }); | 281 | }); |
diff --git a/crates/ra_hir_ty/src/infer/expr.rs b/crates/ra_hir_ty/src/infer/expr.rs index 31259a01d..3c9c02d03 100644 --- a/crates/ra_hir_ty/src/infer/expr.rs +++ b/crates/ra_hir_ty/src/infer/expr.rs | |||
@@ -10,7 +10,7 @@ use hir_def::{ | |||
10 | resolver::resolver_for_expr, | 10 | resolver::resolver_for_expr, |
11 | AdtId, AssocContainerId, Lookup, StructFieldId, | 11 | AdtId, AssocContainerId, Lookup, StructFieldId, |
12 | }; | 12 | }; |
13 | use hir_expand::name::{name, Name}; | 13 | use hir_expand::name::Name; |
14 | use ra_syntax::ast::RangeOp; | 14 | use ra_syntax::ast::RangeOp; |
15 | 15 | ||
16 | use crate::{ | 16 | use crate::{ |
@@ -19,8 +19,8 @@ use crate::{ | |||
19 | method_resolution, op, | 19 | method_resolution, op, |
20 | traits::InEnvironment, | 20 | traits::InEnvironment, |
21 | utils::{generics, variant_data, Generics}, | 21 | utils::{generics, variant_data, Generics}, |
22 | ApplicationTy, CallableDef, InferTy, IntTy, Mutability, Obligation, Substs, TraitRef, Ty, | 22 | ApplicationTy, Binders, CallableDef, InferTy, IntTy, Mutability, Obligation, Substs, TraitRef, |
23 | TypeCtor, TypeWalk, Uncertain, | 23 | Ty, TypeCtor, Uncertain, |
24 | }; | 24 | }; |
25 | 25 | ||
26 | use super::{BindingMode, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch}; | 26 | use super::{BindingMode, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch}; |
@@ -236,8 +236,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
236 | self.result.record_field_resolutions.insert(field.expr, field_def); | 236 | self.result.record_field_resolutions.insert(field.expr, field_def); |
237 | } | 237 | } |
238 | let field_ty = field_def | 238 | let field_ty = field_def |
239 | .map_or(Ty::Unknown, |it| field_types[it.local_id].clone()) | 239 | .map_or(Ty::Unknown, |it| field_types[it.local_id].clone().subst(&substs)); |
240 | .subst(&substs); | ||
241 | self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty)); | 240 | self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty)); |
242 | } | 241 | } |
243 | if let Some(expr) = spread { | 242 | if let Some(expr) = spread { |
@@ -588,10 +587,10 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
588 | self.write_method_resolution(tgt_expr, func); | 587 | self.write_method_resolution(tgt_expr, func); |
589 | (ty, self.db.value_ty(func.into()), Some(generics(self.db, func.into()))) | 588 | (ty, self.db.value_ty(func.into()), Some(generics(self.db, func.into()))) |
590 | } | 589 | } |
591 | None => (receiver_ty, Ty::Unknown, None), | 590 | None => (receiver_ty, Binders::new(0, Ty::Unknown), None), |
592 | }; | 591 | }; |
593 | let substs = self.substs_for_method_call(def_generics, generic_args, &derefed_receiver_ty); | 592 | let substs = self.substs_for_method_call(def_generics, generic_args, &derefed_receiver_ty); |
594 | let method_ty = method_ty.apply_substs(substs); | 593 | let method_ty = method_ty.subst(&substs); |
595 | let method_ty = self.insert_type_vars(method_ty); | 594 | let method_ty = self.insert_type_vars(method_ty); |
596 | self.register_obligations_for_call(&method_ty); | 595 | self.register_obligations_for_call(&method_ty); |
597 | let (expected_receiver_ty, param_tys, ret_ty) = match method_ty.callable_sig(self.db) { | 596 | let (expected_receiver_ty, param_tys, ret_ty) = match method_ty.callable_sig(self.db) { |
@@ -635,7 +634,6 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
635 | continue; | 634 | continue; |
636 | } | 635 | } |
637 | 636 | ||
638 | let param_ty = self.insert_vars_for_impl_trait(param_ty); | ||
639 | let param_ty = self.normalize_associated_types_in(param_ty); | 637 | let param_ty = self.normalize_associated_types_in(param_ty); |
640 | self.infer_expr_coerce(arg, &Expectation::has_type(param_ty.clone())); | 638 | self.infer_expr_coerce(arg, &Expectation::has_type(param_ty.clone())); |
641 | } | 639 | } |
@@ -648,13 +646,15 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
648 | generic_args: Option<&GenericArgs>, | 646 | generic_args: Option<&GenericArgs>, |
649 | receiver_ty: &Ty, | 647 | receiver_ty: &Ty, |
650 | ) -> Substs { | 648 | ) -> Substs { |
651 | let (total_len, _parent_len, child_len) = | 649 | let (parent_params, self_params, type_params, impl_trait_params) = |
652 | def_generics.as_ref().map_or((0, 0, 0), |g| g.len_split()); | 650 | def_generics.as_ref().map_or((0, 0, 0, 0), |g| g.provenance_split()); |
651 | assert_eq!(self_params, 0); // method shouldn't have another Self param | ||
652 | let total_len = parent_params + type_params + impl_trait_params; | ||
653 | let mut substs = Vec::with_capacity(total_len); | 653 | let mut substs = Vec::with_capacity(total_len); |
654 | // Parent arguments are unknown, except for the receiver type | 654 | // Parent arguments are unknown, except for the receiver type |
655 | if let Some(parent_generics) = def_generics.as_ref().map(|p| p.iter_parent()) { | 655 | if let Some(parent_generics) = def_generics.as_ref().map(|p| p.iter_parent()) { |
656 | for (_id, param) in parent_generics { | 656 | for (_id, param) in parent_generics { |
657 | if param.name == name![Self] { | 657 | if param.provenance == hir_def::generics::TypeParamProvenance::TraitSelf { |
658 | substs.push(receiver_ty.clone()); | 658 | substs.push(receiver_ty.clone()); |
659 | } else { | 659 | } else { |
660 | substs.push(Ty::Unknown); | 660 | substs.push(Ty::Unknown); |
@@ -664,7 +664,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
664 | // handle provided type arguments | 664 | // handle provided type arguments |
665 | if let Some(generic_args) = generic_args { | 665 | if let Some(generic_args) = generic_args { |
666 | // if args are provided, it should be all of them, but we can't rely on that | 666 | // if args are provided, it should be all of them, but we can't rely on that |
667 | for arg in generic_args.args.iter().take(child_len) { | 667 | for arg in generic_args.args.iter().take(type_params) { |
668 | match arg { | 668 | match arg { |
669 | GenericArg::Type(type_ref) => { | 669 | GenericArg::Type(type_ref) => { |
670 | let ty = self.make_ty(type_ref); | 670 | let ty = self.make_ty(type_ref); |
diff --git a/crates/ra_hir_ty/src/infer/pat.rs b/crates/ra_hir_ty/src/infer/pat.rs index a14662884..e7283f24c 100644 --- a/crates/ra_hir_ty/src/infer/pat.rs +++ b/crates/ra_hir_ty/src/infer/pat.rs | |||
@@ -12,7 +12,7 @@ use hir_expand::name::Name; | |||
12 | use test_utils::tested_by; | 12 | use test_utils::tested_by; |
13 | 13 | ||
14 | use super::{BindingMode, InferenceContext}; | 14 | use super::{BindingMode, InferenceContext}; |
15 | use crate::{db::HirDatabase, utils::variant_data, Substs, Ty, TypeCtor, TypeWalk}; | 15 | use crate::{db::HirDatabase, utils::variant_data, Substs, Ty, TypeCtor}; |
16 | 16 | ||
17 | impl<'a, D: HirDatabase> InferenceContext<'a, D> { | 17 | impl<'a, D: HirDatabase> InferenceContext<'a, D> { |
18 | fn infer_tuple_struct_pat( | 18 | fn infer_tuple_struct_pat( |
@@ -34,8 +34,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
34 | let expected_ty = var_data | 34 | let expected_ty = var_data |
35 | .as_ref() | 35 | .as_ref() |
36 | .and_then(|d| d.field(&Name::new_tuple_field(i))) | 36 | .and_then(|d| d.field(&Name::new_tuple_field(i))) |
37 | .map_or(Ty::Unknown, |field| field_tys[field].clone()) | 37 | .map_or(Ty::Unknown, |field| field_tys[field].clone().subst(&substs)); |
38 | .subst(&substs); | ||
39 | let expected_ty = self.normalize_associated_types_in(expected_ty); | 38 | let expected_ty = self.normalize_associated_types_in(expected_ty); |
40 | self.infer_pat(subpat, &expected_ty, default_bm); | 39 | self.infer_pat(subpat, &expected_ty, default_bm); |
41 | } | 40 | } |
@@ -65,7 +64,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
65 | for subpat in subpats { | 64 | for subpat in subpats { |
66 | let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name)); | 65 | let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name)); |
67 | let expected_ty = | 66 | let expected_ty = |
68 | matching_field.map_or(Ty::Unknown, |field| field_tys[field].clone()).subst(&substs); | 67 | matching_field.map_or(Ty::Unknown, |field| field_tys[field].clone().subst(&substs)); |
69 | let expected_ty = self.normalize_associated_types_in(expected_ty); | 68 | let expected_ty = self.normalize_associated_types_in(expected_ty); |
70 | self.infer_pat(subpat.pat, &expected_ty, default_bm); | 69 | self.infer_pat(subpat.pat, &expected_ty, default_bm); |
71 | } | 70 | } |
diff --git a/crates/ra_hir_ty/src/infer/path.rs b/crates/ra_hir_ty/src/infer/path.rs index 2c1d4831d..686ce7a21 100644 --- a/crates/ra_hir_ty/src/infer/path.rs +++ b/crates/ra_hir_ty/src/infer/path.rs | |||
@@ -9,9 +9,9 @@ use hir_def::{ | |||
9 | }; | 9 | }; |
10 | use hir_expand::name::Name; | 10 | use hir_expand::name::Name; |
11 | 11 | ||
12 | use crate::{db::HirDatabase, method_resolution, Substs, Ty, TypeWalk, ValueTyDefId}; | 12 | use crate::{db::HirDatabase, method_resolution, Substs, Ty, ValueTyDefId}; |
13 | 13 | ||
14 | use super::{ExprOrPatId, InferenceContext, TraitEnvironment, TraitRef}; | 14 | use super::{ExprOrPatId, InferenceContext, TraitRef}; |
15 | 15 | ||
16 | impl<'a, D: HirDatabase> InferenceContext<'a, D> { | 16 | impl<'a, D: HirDatabase> InferenceContext<'a, D> { |
17 | pub(super) fn infer_path( | 17 | pub(super) fn infer_path( |
@@ -39,7 +39,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
39 | } | 39 | } |
40 | let ty = self.make_ty(type_ref); | 40 | let ty = self.make_ty(type_ref); |
41 | let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1); | 41 | let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1); |
42 | let ty = Ty::from_type_relative_path(self.db, resolver, ty, remaining_segments_for_ty); | 42 | let ctx = crate::lower::TyLoweringContext::new(self.db, &resolver); |
43 | let ty = Ty::from_type_relative_path(&ctx, ty, remaining_segments_for_ty); | ||
43 | self.resolve_ty_assoc_item( | 44 | self.resolve_ty_assoc_item( |
44 | ty, | 45 | ty, |
45 | &path.segments().last().expect("path had at least one segment").name, | 46 | &path.segments().last().expect("path had at least one segment").name, |
@@ -69,12 +70,16 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
69 | ValueNs::EnumVariantId(it) => it.into(), | 70 | ValueNs::EnumVariantId(it) => it.into(), |
70 | }; | 71 | }; |
71 | 72 | ||
72 | let mut ty = self.db.value_ty(typable); | 73 | let ty = self.db.value_ty(typable); |
73 | if let Some(self_subst) = self_subst { | 74 | // self_subst is just for the parent |
74 | ty = ty.subst(&self_subst); | 75 | let parent_substs = self_subst.unwrap_or_else(Substs::empty); |
75 | } | 76 | let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver); |
76 | let substs = Ty::substs_from_path(self.db, &self.resolver, path, typable); | 77 | let substs = Ty::substs_from_path(&ctx, path, typable); |
77 | let ty = ty.subst(&substs); | 78 | let full_substs = Substs::builder(substs.len()) |
79 | .use_parent_substs(&parent_substs) | ||
80 | .fill(substs.0[parent_substs.len()..].iter().cloned()) | ||
81 | .build(); | ||
82 | let ty = ty.subst(&full_substs); | ||
78 | Some(ty) | 83 | Some(ty) |
79 | } | 84 | } |
80 | 85 | ||
@@ -98,13 +103,9 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
98 | (TypeNs::TraitId(trait_), true) => { | 103 | (TypeNs::TraitId(trait_), true) => { |
99 | let segment = | 104 | let segment = |
100 | remaining_segments.last().expect("there should be at least one segment here"); | 105 | remaining_segments.last().expect("there should be at least one segment here"); |
101 | let trait_ref = TraitRef::from_resolved_path( | 106 | let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver); |
102 | self.db, | 107 | let trait_ref = |
103 | &self.resolver, | 108 | TraitRef::from_resolved_path(&ctx, trait_.into(), resolved_segment, None); |
104 | trait_.into(), | ||
105 | resolved_segment, | ||
106 | None, | ||
107 | ); | ||
108 | self.resolve_trait_assoc_item(trait_ref, segment, id) | 109 | self.resolve_trait_assoc_item(trait_ref, segment, id) |
109 | } | 110 | } |
110 | (def, _) => { | 111 | (def, _) => { |
@@ -114,9 +115,9 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
114 | // as Iterator>::Item::default`) | 115 | // as Iterator>::Item::default`) |
115 | let remaining_segments_for_ty = | 116 | let remaining_segments_for_ty = |
116 | remaining_segments.take(remaining_segments.len() - 1); | 117 | remaining_segments.take(remaining_segments.len() - 1); |
118 | let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver); | ||
117 | let ty = Ty::from_partly_resolved_hir_path( | 119 | let ty = Ty::from_partly_resolved_hir_path( |
118 | self.db, | 120 | &ctx, |
119 | &self.resolver, | ||
120 | def, | 121 | def, |
121 | resolved_segment, | 122 | resolved_segment, |
122 | remaining_segments_for_ty, | 123 | remaining_segments_for_ty, |
@@ -173,13 +174,9 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
173 | AssocItemId::ConstId(c) => ValueNs::ConstId(c), | 174 | AssocItemId::ConstId(c) => ValueNs::ConstId(c), |
174 | AssocItemId::TypeAliasId(_) => unreachable!(), | 175 | AssocItemId::TypeAliasId(_) => unreachable!(), |
175 | }; | 176 | }; |
176 | let substs = Substs::build_for_def(self.db, item) | ||
177 | .use_parent_substs(&trait_ref.substs) | ||
178 | .fill_with_params() | ||
179 | .build(); | ||
180 | 177 | ||
181 | self.write_assoc_resolution(id, item); | 178 | self.write_assoc_resolution(id, item); |
182 | Some((def, Some(substs))) | 179 | Some((def, Some(trait_ref.substs))) |
183 | } | 180 | } |
184 | 181 | ||
185 | fn resolve_ty_assoc_item( | 182 | fn resolve_ty_assoc_item( |
@@ -193,14 +190,13 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
193 | } | 190 | } |
194 | 191 | ||
195 | let canonical_ty = self.canonicalizer().canonicalize_ty(ty.clone()); | 192 | let canonical_ty = self.canonicalizer().canonicalize_ty(ty.clone()); |
196 | let env = TraitEnvironment::lower(self.db, &self.resolver); | ||
197 | let krate = self.resolver.krate()?; | 193 | let krate = self.resolver.krate()?; |
198 | let traits_in_scope = self.resolver.traits_in_scope(self.db); | 194 | let traits_in_scope = self.resolver.traits_in_scope(self.db); |
199 | 195 | ||
200 | method_resolution::iterate_method_candidates( | 196 | method_resolution::iterate_method_candidates( |
201 | &canonical_ty.value, | 197 | &canonical_ty.value, |
202 | self.db, | 198 | self.db, |
203 | env, | 199 | self.trait_env.clone(), |
204 | krate, | 200 | krate, |
205 | &traits_in_scope, | 201 | &traits_in_scope, |
206 | Some(name), | 202 | Some(name), |
@@ -219,12 +215,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
219 | .fill(iter::repeat_with(|| self.table.new_type_var())) | 215 | .fill(iter::repeat_with(|| self.table.new_type_var())) |
220 | .build(); | 216 | .build(); |
221 | let impl_self_ty = self.db.impl_self_ty(impl_id).subst(&impl_substs); | 217 | let impl_self_ty = self.db.impl_self_ty(impl_id).subst(&impl_substs); |
222 | let substs = Substs::build_for_def(self.db, item) | ||
223 | .use_parent_substs(&impl_substs) | ||
224 | .fill_with_params() | ||
225 | .build(); | ||
226 | self.unify(&impl_self_ty, &ty); | 218 | self.unify(&impl_self_ty, &ty); |
227 | Some(substs) | 219 | Some(impl_substs) |
228 | } | 220 | } |
229 | AssocContainerId::TraitId(trait_) => { | 221 | AssocContainerId::TraitId(trait_) => { |
230 | // we're picking this method | 222 | // we're picking this method |
@@ -232,15 +224,11 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
232 | .push(ty.clone()) | 224 | .push(ty.clone()) |
233 | .fill(std::iter::repeat_with(|| self.table.new_type_var())) | 225 | .fill(std::iter::repeat_with(|| self.table.new_type_var())) |
234 | .build(); | 226 | .build(); |
235 | let substs = Substs::build_for_def(self.db, item) | ||
236 | .use_parent_substs(&trait_substs) | ||
237 | .fill_with_params() | ||
238 | .build(); | ||
239 | self.obligations.push(super::Obligation::Trait(TraitRef { | 227 | self.obligations.push(super::Obligation::Trait(TraitRef { |
240 | trait_, | 228 | trait_, |
241 | substs: trait_substs, | 229 | substs: trait_substs.clone(), |
242 | })); | 230 | })); |
243 | Some(substs) | 231 | Some(trait_substs) |
244 | } | 232 | } |
245 | AssocContainerId::ContainerId(_) => None, | 233 | AssocContainerId::ContainerId(_) => None, |
246 | }; | 234 | }; |
diff --git a/crates/ra_hir_ty/src/lib.rs b/crates/ra_hir_ty/src/lib.rs index 908e4862d..c5fe18c85 100644 --- a/crates/ra_hir_ty/src/lib.rs +++ b/crates/ra_hir_ty/src/lib.rs | |||
@@ -44,8 +44,8 @@ use std::sync::Arc; | |||
44 | use std::{fmt, iter, mem}; | 44 | use std::{fmt, iter, mem}; |
45 | 45 | ||
46 | use hir_def::{ | 46 | use hir_def::{ |
47 | expr::ExprId, type_ref::Mutability, AdtId, AssocContainerId, DefWithBodyId, GenericDefId, | 47 | expr::ExprId, generics::TypeParamProvenance, type_ref::Mutability, AdtId, AssocContainerId, |
48 | HasModule, Lookup, TraitId, TypeAliasId, | 48 | DefWithBodyId, GenericDefId, HasModule, Lookup, TraitId, TypeAliasId, TypeParamId, |
49 | }; | 49 | }; |
50 | use hir_expand::name::Name; | 50 | use hir_expand::name::Name; |
51 | use ra_db::{impl_intern_key, salsa, CrateId}; | 51 | use ra_db::{impl_intern_key, salsa, CrateId}; |
@@ -60,7 +60,9 @@ use display::{HirDisplay, HirFormatter}; | |||
60 | pub use autoderef::autoderef; | 60 | pub use autoderef::autoderef; |
61 | pub use infer::{do_infer_query, InferTy, InferenceResult}; | 61 | pub use infer::{do_infer_query, InferTy, InferenceResult}; |
62 | pub use lower::CallableDef; | 62 | pub use lower::CallableDef; |
63 | pub use lower::{callable_item_sig, TyDefId, ValueTyDefId}; | 63 | pub use lower::{ |
64 | callable_item_sig, ImplTraitLoweringMode, TyDefId, TyLoweringContext, ValueTyDefId, | ||
65 | }; | ||
64 | pub use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment}; | 66 | pub use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment}; |
65 | 67 | ||
66 | /// A type constructor or type name: this might be something like the primitive | 68 | /// A type constructor or type name: this might be something like the primitive |
@@ -285,22 +287,20 @@ pub enum Ty { | |||
285 | /// trait and all its parameters are fully known. | 287 | /// trait and all its parameters are fully known. |
286 | Projection(ProjectionTy), | 288 | Projection(ProjectionTy), |
287 | 289 | ||
288 | /// A type parameter; for example, `T` in `fn f<T>(x: T) {} | 290 | /// A placeholder for a type parameter; for example, `T` in `fn f<T>(x: T) |
289 | Param { | 291 | /// {}` when we're type-checking the body of that function. In this |
290 | /// The index of the parameter (starting with parameters from the | 292 | /// situation, we know this stands for *some* type, but don't know the exact |
291 | /// surrounding impl, then the current function). | 293 | /// type. |
292 | idx: u32, | 294 | Param(TypeParamId), |
293 | /// The name of the parameter, for displaying. | 295 | |
294 | // FIXME get rid of this | 296 | /// A bound type variable. This is used in various places: when representing |
295 | name: Name, | 297 | /// some polymorphic type like the type of function `fn f<T>`, the type |
296 | }, | 298 | /// parameters get turned into variables; during trait resolution, inference |
297 | 299 | /// variables get turned into bound variables and back; and in `Dyn` the | |
298 | /// A bound type variable. Used during trait resolution to represent Chalk | 300 | /// `Self` type is represented with a bound variable as well. |
299 | /// variables, and in `Dyn` and `Opaque` bounds to represent the `Self` type. | ||
300 | Bound(u32), | 301 | Bound(u32), |
301 | 302 | ||
302 | /// A type variable used during type checking. Not to be confused with a | 303 | /// A type variable used during type checking. |
303 | /// type parameter. | ||
304 | Infer(InferTy), | 304 | Infer(InferTy), |
305 | 305 | ||
306 | /// A trait object (`dyn Trait` or bare `Trait` in pre-2018 Rust). | 306 | /// A trait object (`dyn Trait` or bare `Trait` in pre-2018 Rust). |
@@ -364,15 +364,19 @@ impl Substs { | |||
364 | } | 364 | } |
365 | 365 | ||
366 | /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`). | 366 | /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`). |
367 | pub(crate) fn identity(generic_params: &Generics) -> Substs { | 367 | pub(crate) fn type_params_for_generics(generic_params: &Generics) -> Substs { |
368 | Substs( | 368 | Substs(generic_params.iter().map(|(id, _)| Ty::Param(id)).collect()) |
369 | generic_params.iter().map(|(idx, p)| Ty::Param { idx, name: p.name.clone() }).collect(), | 369 | } |
370 | ) | 370 | |
371 | /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`). | ||
372 | pub fn type_params(db: &impl HirDatabase, def: impl Into<GenericDefId>) -> Substs { | ||
373 | let params = generics(db, def.into()); | ||
374 | Substs::type_params_for_generics(¶ms) | ||
371 | } | 375 | } |
372 | 376 | ||
373 | /// Return Substs that replace each parameter by a bound variable. | 377 | /// Return Substs that replace each parameter by a bound variable. |
374 | pub(crate) fn bound_vars(generic_params: &Generics) -> Substs { | 378 | pub(crate) fn bound_vars(generic_params: &Generics) -> Substs { |
375 | Substs(generic_params.iter().map(|(idx, _p)| Ty::Bound(idx)).collect()) | 379 | Substs(generic_params.iter().enumerate().map(|(idx, _)| Ty::Bound(idx as u32)).collect()) |
376 | } | 380 | } |
377 | 381 | ||
378 | pub fn build_for_def(db: &impl HirDatabase, def: impl Into<GenericDefId>) -> SubstsBuilder { | 382 | pub fn build_for_def(db: &impl HirDatabase, def: impl Into<GenericDefId>) -> SubstsBuilder { |
@@ -420,11 +424,6 @@ impl SubstsBuilder { | |||
420 | self.fill((starting_from..).map(Ty::Bound)) | 424 | self.fill((starting_from..).map(Ty::Bound)) |
421 | } | 425 | } |
422 | 426 | ||
423 | pub fn fill_with_params(self) -> Self { | ||
424 | let start = self.vec.len() as u32; | ||
425 | self.fill((start..).map(|idx| Ty::Param { idx, name: Name::missing() })) | ||
426 | } | ||
427 | |||
428 | pub fn fill_with_unknown(self) -> Self { | 427 | pub fn fill_with_unknown(self) -> Self { |
429 | self.fill(iter::repeat(Ty::Unknown)) | 428 | self.fill(iter::repeat(Ty::Unknown)) |
430 | } | 429 | } |
@@ -451,6 +450,32 @@ impl Deref for Substs { | |||
451 | } | 450 | } |
452 | } | 451 | } |
453 | 452 | ||
453 | #[derive(Copy, Clone, PartialEq, Eq, Debug)] | ||
454 | pub struct Binders<T> { | ||
455 | pub num_binders: usize, | ||
456 | pub value: T, | ||
457 | } | ||
458 | |||
459 | impl<T> Binders<T> { | ||
460 | pub fn new(num_binders: usize, value: T) -> Self { | ||
461 | Self { num_binders, value } | ||
462 | } | ||
463 | } | ||
464 | |||
465 | impl<T: TypeWalk> Binders<T> { | ||
466 | /// Substitutes all variables. | ||
467 | pub fn subst(self, subst: &Substs) -> T { | ||
468 | assert_eq!(subst.len(), self.num_binders); | ||
469 | self.value.subst_bound_vars(subst) | ||
470 | } | ||
471 | |||
472 | /// Substitutes just a prefix of the variables (shifting the rest). | ||
473 | pub fn subst_prefix(self, subst: &Substs) -> Binders<T> { | ||
474 | assert!(subst.len() < self.num_binders); | ||
475 | Binders::new(self.num_binders - subst.len(), self.value.subst_bound_vars(subst)) | ||
476 | } | ||
477 | } | ||
478 | |||
454 | /// A trait with type parameters. This includes the `Self`, so this represents a concrete type implementing the trait. | 479 | /// A trait with type parameters. This includes the `Self`, so this represents a concrete type implementing the trait. |
455 | /// Name to be bikeshedded: TraitBound? TraitImplements? | 480 | /// Name to be bikeshedded: TraitBound? TraitImplements? |
456 | #[derive(Clone, PartialEq, Eq, Debug, Hash)] | 481 | #[derive(Clone, PartialEq, Eq, Debug, Hash)] |
@@ -551,6 +576,9 @@ pub struct FnSig { | |||
551 | params_and_return: Arc<[Ty]>, | 576 | params_and_return: Arc<[Ty]>, |
552 | } | 577 | } |
553 | 578 | ||
579 | /// A polymorphic function signature. | ||
580 | pub type PolyFnSig = Binders<FnSig>; | ||
581 | |||
554 | impl FnSig { | 582 | impl FnSig { |
555 | pub fn from_params_and_return(mut params: Vec<Ty>, ret: Ty) -> FnSig { | 583 | pub fn from_params_and_return(mut params: Vec<Ty>, ret: Ty) -> FnSig { |
556 | params.push(ret); | 584 | params.push(ret); |
@@ -730,22 +758,7 @@ pub trait TypeWalk { | |||
730 | self | 758 | self |
731 | } | 759 | } |
732 | 760 | ||
733 | /// Replaces type parameters in this type using the given `Substs`. (So e.g. | 761 | /// Substitutes `Ty::Bound` vars with the given substitution. |
734 | /// if `self` is `&[T]`, where type parameter T has index 0, and the | ||
735 | /// `Substs` contain `u32` at index 0, we'll have `&[u32]` afterwards.) | ||
736 | fn subst(self, substs: &Substs) -> Self | ||
737 | where | ||
738 | Self: Sized, | ||
739 | { | ||
740 | self.fold(&mut |ty| match ty { | ||
741 | Ty::Param { idx, name } => { | ||
742 | substs.get(idx as usize).cloned().unwrap_or(Ty::Param { idx, name }) | ||
743 | } | ||
744 | ty => ty, | ||
745 | }) | ||
746 | } | ||
747 | |||
748 | /// Substitutes `Ty::Bound` vars (as opposed to type parameters). | ||
749 | fn subst_bound_vars(mut self, substs: &Substs) -> Self | 762 | fn subst_bound_vars(mut self, substs: &Substs) -> Self |
750 | where | 763 | where |
751 | Self: Sized, | 764 | Self: Sized, |
@@ -755,6 +768,9 @@ pub trait TypeWalk { | |||
755 | &mut Ty::Bound(idx) => { | 768 | &mut Ty::Bound(idx) => { |
756 | if idx as usize >= binders && (idx as usize - binders) < substs.len() { | 769 | if idx as usize >= binders && (idx as usize - binders) < substs.len() { |
757 | *ty = substs.0[idx as usize - binders].clone(); | 770 | *ty = substs.0[idx as usize - binders].clone(); |
771 | } else if idx as usize >= binders + substs.len() { | ||
772 | // shift free binders | ||
773 | *ty = Ty::Bound(idx - substs.len() as u32); | ||
758 | } | 774 | } |
759 | } | 775 | } |
760 | _ => {} | 776 | _ => {} |
@@ -847,7 +863,7 @@ impl HirDisplay for ApplicationTy { | |||
847 | } | 863 | } |
848 | TypeCtor::Array => { | 864 | TypeCtor::Array => { |
849 | let t = self.parameters.as_single(); | 865 | let t = self.parameters.as_single(); |
850 | write!(f, "[{};_]", t.display(f.db))?; | 866 | write!(f, "[{}; _]", t.display(f.db))?; |
851 | } | 867 | } |
852 | TypeCtor::RawPtr(m) => { | 868 | TypeCtor::RawPtr(m) => { |
853 | let t = self.parameters.as_single(); | 869 | let t = self.parameters.as_single(); |
@@ -880,7 +896,7 @@ impl HirDisplay for ApplicationTy { | |||
880 | write!(f, ") -> {}", sig.ret().display(f.db))?; | 896 | write!(f, ") -> {}", sig.ret().display(f.db))?; |
881 | } | 897 | } |
882 | TypeCtor::FnDef(def) => { | 898 | TypeCtor::FnDef(def) => { |
883 | let sig = f.db.callable_item_signature(def); | 899 | let sig = f.db.callable_item_signature(def).subst(&self.parameters); |
884 | let name = match def { | 900 | let name = match def { |
885 | CallableDef::FunctionId(ff) => f.db.function_data(ff).name.clone(), | 901 | CallableDef::FunctionId(ff) => f.db.function_data(ff).name.clone(), |
886 | CallableDef::StructId(s) => f.db.struct_data(s).name.clone(), | 902 | CallableDef::StructId(s) => f.db.struct_data(s).name.clone(), |
@@ -896,9 +912,16 @@ impl HirDisplay for ApplicationTy { | |||
896 | } | 912 | } |
897 | } | 913 | } |
898 | if self.parameters.len() > 0 { | 914 | if self.parameters.len() > 0 { |
899 | write!(f, "<")?; | 915 | let generics = generics(f.db, def.into()); |
900 | f.write_joined(&*self.parameters.0, ", ")?; | 916 | let (parent_params, self_param, type_params, _impl_trait_params) = |
901 | write!(f, ">")?; | 917 | generics.provenance_split(); |
918 | let total_len = parent_params + self_param + type_params; | ||
919 | // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self? | ||
920 | if total_len > 0 { | ||
921 | write!(f, "<")?; | ||
922 | f.write_joined(&self.parameters.0[..total_len], ", ")?; | ||
923 | write!(f, ">")?; | ||
924 | } | ||
902 | } | 925 | } |
903 | write!(f, "(")?; | 926 | write!(f, "(")?; |
904 | f.write_joined(sig.params(), ", ")?; | 927 | f.write_joined(sig.params(), ", ")?; |
@@ -1009,7 +1032,24 @@ impl HirDisplay for Ty { | |||
1009 | match self { | 1032 | match self { |
1010 | Ty::Apply(a_ty) => a_ty.hir_fmt(f)?, | 1033 | Ty::Apply(a_ty) => a_ty.hir_fmt(f)?, |
1011 | Ty::Projection(p_ty) => p_ty.hir_fmt(f)?, | 1034 | Ty::Projection(p_ty) => p_ty.hir_fmt(f)?, |
1012 | Ty::Param { name, .. } => write!(f, "{}", name)?, | 1035 | Ty::Param(id) => { |
1036 | let generics = generics(f.db, id.parent); | ||
1037 | let param_data = &generics.params.types[id.local_id]; | ||
1038 | match param_data.provenance { | ||
1039 | TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => { | ||
1040 | write!(f, "{}", param_data.name.clone().unwrap_or_else(Name::missing))? | ||
1041 | } | ||
1042 | TypeParamProvenance::ArgumentImplTrait => { | ||
1043 | write!(f, "impl ")?; | ||
1044 | let bounds = f.db.generic_predicates_for_param(*id); | ||
1045 | let substs = Substs::type_params_for_generics(&generics); | ||
1046 | write_bounds_like_dyn_trait( | ||
1047 | &bounds.iter().map(|b| b.clone().subst(&substs)).collect::<Vec<_>>(), | ||
1048 | f, | ||
1049 | )?; | ||
1050 | } | ||
1051 | } | ||
1052 | } | ||
1013 | Ty::Bound(idx) => write!(f, "?{}", idx)?, | 1053 | Ty::Bound(idx) => write!(f, "?{}", idx)?, |
1014 | Ty::Dyn(predicates) | Ty::Opaque(predicates) => { | 1054 | Ty::Dyn(predicates) | Ty::Opaque(predicates) => { |
1015 | match self { | 1055 | match self { |
@@ -1017,66 +1057,7 @@ impl HirDisplay for Ty { | |||
1017 | Ty::Opaque(_) => write!(f, "impl ")?, | 1057 | Ty::Opaque(_) => write!(f, "impl ")?, |
1018 | _ => unreachable!(), | 1058 | _ => unreachable!(), |
1019 | }; | 1059 | }; |
1020 | // Note: This code is written to produce nice results (i.e. | 1060 | write_bounds_like_dyn_trait(&predicates, f)?; |
1021 | // corresponding to surface Rust) for types that can occur in | ||
1022 | // actual Rust. It will have weird results if the predicates | ||
1023 | // aren't as expected (i.e. self types = $0, projection | ||
1024 | // predicates for a certain trait come after the Implemented | ||
1025 | // predicate for that trait). | ||
1026 | let mut first = true; | ||
1027 | let mut angle_open = false; | ||
1028 | for p in predicates.iter() { | ||
1029 | match p { | ||
1030 | GenericPredicate::Implemented(trait_ref) => { | ||
1031 | if angle_open { | ||
1032 | write!(f, ">")?; | ||
1033 | } | ||
1034 | if !first { | ||
1035 | write!(f, " + ")?; | ||
1036 | } | ||
1037 | // We assume that the self type is $0 (i.e. the | ||
1038 | // existential) here, which is the only thing that's | ||
1039 | // possible in actual Rust, and hence don't print it | ||
1040 | write!(f, "{}", f.db.trait_data(trait_ref.trait_).name.clone())?; | ||
1041 | if trait_ref.substs.len() > 1 { | ||
1042 | write!(f, "<")?; | ||
1043 | f.write_joined(&trait_ref.substs[1..], ", ")?; | ||
1044 | // there might be assoc type bindings, so we leave the angle brackets open | ||
1045 | angle_open = true; | ||
1046 | } | ||
1047 | } | ||
1048 | GenericPredicate::Projection(projection_pred) => { | ||
1049 | // in types in actual Rust, these will always come | ||
1050 | // after the corresponding Implemented predicate | ||
1051 | if angle_open { | ||
1052 | write!(f, ", ")?; | ||
1053 | } else { | ||
1054 | write!(f, "<")?; | ||
1055 | angle_open = true; | ||
1056 | } | ||
1057 | let name = | ||
1058 | f.db.type_alias_data(projection_pred.projection_ty.associated_ty) | ||
1059 | .name | ||
1060 | .clone(); | ||
1061 | write!(f, "{} = ", name)?; | ||
1062 | projection_pred.ty.hir_fmt(f)?; | ||
1063 | } | ||
1064 | GenericPredicate::Error => { | ||
1065 | if angle_open { | ||
1066 | // impl Trait<X, {error}> | ||
1067 | write!(f, ", ")?; | ||
1068 | } else if !first { | ||
1069 | // impl Trait + {error} | ||
1070 | write!(f, " + ")?; | ||
1071 | } | ||
1072 | p.hir_fmt(f)?; | ||
1073 | } | ||
1074 | } | ||
1075 | first = false; | ||
1076 | } | ||
1077 | if angle_open { | ||
1078 | write!(f, ">")?; | ||
1079 | } | ||
1080 | } | 1061 | } |
1081 | Ty::Unknown => write!(f, "{{unknown}}")?, | 1062 | Ty::Unknown => write!(f, "{{unknown}}")?, |
1082 | Ty::Infer(..) => write!(f, "_")?, | 1063 | Ty::Infer(..) => write!(f, "_")?, |
@@ -1085,6 +1066,71 @@ impl HirDisplay for Ty { | |||
1085 | } | 1066 | } |
1086 | } | 1067 | } |
1087 | 1068 | ||
1069 | fn write_bounds_like_dyn_trait( | ||
1070 | predicates: &[GenericPredicate], | ||
1071 | f: &mut HirFormatter<impl HirDatabase>, | ||
1072 | ) -> fmt::Result { | ||
1073 | // Note: This code is written to produce nice results (i.e. | ||
1074 | // corresponding to surface Rust) for types that can occur in | ||
1075 | // actual Rust. It will have weird results if the predicates | ||
1076 | // aren't as expected (i.e. self types = $0, projection | ||
1077 | // predicates for a certain trait come after the Implemented | ||
1078 | // predicate for that trait). | ||
1079 | let mut first = true; | ||
1080 | let mut angle_open = false; | ||
1081 | for p in predicates.iter() { | ||
1082 | match p { | ||
1083 | GenericPredicate::Implemented(trait_ref) => { | ||
1084 | if angle_open { | ||
1085 | write!(f, ">")?; | ||
1086 | } | ||
1087 | if !first { | ||
1088 | write!(f, " + ")?; | ||
1089 | } | ||
1090 | // We assume that the self type is $0 (i.e. the | ||
1091 | // existential) here, which is the only thing that's | ||
1092 | // possible in actual Rust, and hence don't print it | ||
1093 | write!(f, "{}", f.db.trait_data(trait_ref.trait_).name.clone())?; | ||
1094 | if trait_ref.substs.len() > 1 { | ||
1095 | write!(f, "<")?; | ||
1096 | f.write_joined(&trait_ref.substs[1..], ", ")?; | ||
1097 | // there might be assoc type bindings, so we leave the angle brackets open | ||
1098 | angle_open = true; | ||
1099 | } | ||
1100 | } | ||
1101 | GenericPredicate::Projection(projection_pred) => { | ||
1102 | // in types in actual Rust, these will always come | ||
1103 | // after the corresponding Implemented predicate | ||
1104 | if angle_open { | ||
1105 | write!(f, ", ")?; | ||
1106 | } else { | ||
1107 | write!(f, "<")?; | ||
1108 | angle_open = true; | ||
1109 | } | ||
1110 | let name = | ||
1111 | f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name.clone(); | ||
1112 | write!(f, "{} = ", name)?; | ||
1113 | projection_pred.ty.hir_fmt(f)?; | ||
1114 | } | ||
1115 | GenericPredicate::Error => { | ||
1116 | if angle_open { | ||
1117 | // impl Trait<X, {error}> | ||
1118 | write!(f, ", ")?; | ||
1119 | } else if !first { | ||
1120 | // impl Trait + {error} | ||
1121 | write!(f, " + ")?; | ||
1122 | } | ||
1123 | p.hir_fmt(f)?; | ||
1124 | } | ||
1125 | } | ||
1126 | first = false; | ||
1127 | } | ||
1128 | if angle_open { | ||
1129 | write!(f, ">")?; | ||
1130 | } | ||
1131 | Ok(()) | ||
1132 | } | ||
1133 | |||
1088 | impl TraitRef { | 1134 | impl TraitRef { |
1089 | fn hir_fmt_ext(&self, f: &mut HirFormatter<impl HirDatabase>, use_as: bool) -> fmt::Result { | 1135 | fn hir_fmt_ext(&self, f: &mut HirFormatter<impl HirDatabase>, use_as: bool) -> fmt::Result { |
1090 | if f.should_truncate() { | 1136 | if f.should_truncate() { |
diff --git a/crates/ra_hir_ty/src/lower.rs b/crates/ra_hir_ty/src/lower.rs index 2c2ecee9c..c68c5852b 100644 --- a/crates/ra_hir_ty/src/lower.rs +++ b/crates/ra_hir_ty/src/lower.rs | |||
@@ -10,12 +10,13 @@ use std::sync::Arc; | |||
10 | 10 | ||
11 | use hir_def::{ | 11 | use hir_def::{ |
12 | builtin_type::BuiltinType, | 12 | builtin_type::BuiltinType, |
13 | generics::WherePredicate, | 13 | generics::{TypeParamProvenance, WherePredicate, WherePredicateTarget}, |
14 | path::{GenericArg, Path, PathSegment, PathSegments}, | 14 | path::{GenericArg, Path, PathSegment, PathSegments}, |
15 | resolver::{HasResolver, Resolver, TypeNs}, | 15 | resolver::{HasResolver, Resolver, TypeNs}, |
16 | type_ref::{TypeBound, TypeRef}, | 16 | type_ref::{TypeBound, TypeRef}, |
17 | AdtId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, | 17 | AdtId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, |
18 | LocalStructFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, UnionId, VariantId, | 18 | LocalStructFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId, |
19 | VariantId, | ||
19 | }; | 20 | }; |
20 | use ra_arena::map::ArenaMap; | 21 | use ra_arena::map::ArenaMap; |
21 | use ra_db::CrateId; | 22 | use ra_db::CrateId; |
@@ -27,63 +28,158 @@ use crate::{ | |||
27 | all_super_traits, associated_type_by_name_including_super_traits, generics, make_mut_slice, | 28 | all_super_traits, associated_type_by_name_including_super_traits, generics, make_mut_slice, |
28 | variant_data, | 29 | variant_data, |
29 | }, | 30 | }, |
30 | FnSig, GenericPredicate, ProjectionPredicate, ProjectionTy, Substs, TraitEnvironment, TraitRef, | 31 | Binders, FnSig, GenericPredicate, PolyFnSig, ProjectionPredicate, ProjectionTy, Substs, |
31 | Ty, TypeCtor, TypeWalk, | 32 | TraitEnvironment, TraitRef, Ty, TypeCtor, |
32 | }; | 33 | }; |
33 | 34 | ||
35 | #[derive(Debug)] | ||
36 | pub struct TyLoweringContext<'a, DB: HirDatabase> { | ||
37 | pub db: &'a DB, | ||
38 | pub resolver: &'a Resolver, | ||
39 | /// Note: Conceptually, it's thinkable that we could be in a location where | ||
40 | /// some type params should be represented as placeholders, and others | ||
41 | /// should be converted to variables. I think in practice, this isn't | ||
42 | /// possible currently, so this should be fine for now. | ||
43 | pub type_param_mode: TypeParamLoweringMode, | ||
44 | pub impl_trait_mode: ImplTraitLoweringMode, | ||
45 | pub impl_trait_counter: std::cell::Cell<u16>, | ||
46 | } | ||
47 | |||
48 | impl<'a, DB: HirDatabase> TyLoweringContext<'a, DB> { | ||
49 | pub fn new(db: &'a DB, resolver: &'a Resolver) -> Self { | ||
50 | let impl_trait_counter = std::cell::Cell::new(0); | ||
51 | let impl_trait_mode = ImplTraitLoweringMode::Disallowed; | ||
52 | let type_param_mode = TypeParamLoweringMode::Placeholder; | ||
53 | Self { db, resolver, impl_trait_mode, impl_trait_counter, type_param_mode } | ||
54 | } | ||
55 | |||
56 | pub fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self { | ||
57 | Self { impl_trait_mode, ..self } | ||
58 | } | ||
59 | |||
60 | pub fn with_type_param_mode(self, type_param_mode: TypeParamLoweringMode) -> Self { | ||
61 | Self { type_param_mode, ..self } | ||
62 | } | ||
63 | } | ||
64 | |||
65 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] | ||
66 | pub enum ImplTraitLoweringMode { | ||
67 | /// `impl Trait` gets lowered into an opaque type that doesn't unify with | ||
68 | /// anything except itself. This is used in places where values flow 'out', | ||
69 | /// i.e. for arguments of the function we're currently checking, and return | ||
70 | /// types of functions we're calling. | ||
71 | Opaque, | ||
72 | /// `impl Trait` gets lowered into a type variable. Used for argument | ||
73 | /// position impl Trait when inside the respective function, since it allows | ||
74 | /// us to support that without Chalk. | ||
75 | Param, | ||
76 | /// `impl Trait` gets lowered into a variable that can unify with some | ||
77 | /// type. This is used in places where values flow 'in', i.e. for arguments | ||
78 | /// of functions we're calling, and the return type of the function we're | ||
79 | /// currently checking. | ||
80 | Variable, | ||
81 | /// `impl Trait` is disallowed and will be an error. | ||
82 | Disallowed, | ||
83 | } | ||
84 | |||
85 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] | ||
86 | pub enum TypeParamLoweringMode { | ||
87 | Placeholder, | ||
88 | Variable, | ||
89 | } | ||
90 | |||
34 | impl Ty { | 91 | impl Ty { |
35 | pub fn from_hir(db: &impl HirDatabase, resolver: &Resolver, type_ref: &TypeRef) -> Self { | 92 | pub fn from_hir(ctx: &TyLoweringContext<'_, impl HirDatabase>, type_ref: &TypeRef) -> Self { |
36 | match type_ref { | 93 | match type_ref { |
37 | TypeRef::Never => Ty::simple(TypeCtor::Never), | 94 | TypeRef::Never => Ty::simple(TypeCtor::Never), |
38 | TypeRef::Tuple(inner) => { | 95 | TypeRef::Tuple(inner) => { |
39 | let inner_tys: Arc<[Ty]> = | 96 | let inner_tys: Arc<[Ty]> = inner.iter().map(|tr| Ty::from_hir(ctx, tr)).collect(); |
40 | inner.iter().map(|tr| Ty::from_hir(db, resolver, tr)).collect(); | ||
41 | Ty::apply( | 97 | Ty::apply( |
42 | TypeCtor::Tuple { cardinality: inner_tys.len() as u16 }, | 98 | TypeCtor::Tuple { cardinality: inner_tys.len() as u16 }, |
43 | Substs(inner_tys), | 99 | Substs(inner_tys), |
44 | ) | 100 | ) |
45 | } | 101 | } |
46 | TypeRef::Path(path) => Ty::from_hir_path(db, resolver, path), | 102 | TypeRef::Path(path) => Ty::from_hir_path(ctx, path), |
47 | TypeRef::RawPtr(inner, mutability) => { | 103 | TypeRef::RawPtr(inner, mutability) => { |
48 | let inner_ty = Ty::from_hir(db, resolver, inner); | 104 | let inner_ty = Ty::from_hir(ctx, inner); |
49 | Ty::apply_one(TypeCtor::RawPtr(*mutability), inner_ty) | 105 | Ty::apply_one(TypeCtor::RawPtr(*mutability), inner_ty) |
50 | } | 106 | } |
51 | TypeRef::Array(inner) => { | 107 | TypeRef::Array(inner) => { |
52 | let inner_ty = Ty::from_hir(db, resolver, inner); | 108 | let inner_ty = Ty::from_hir(ctx, inner); |
53 | Ty::apply_one(TypeCtor::Array, inner_ty) | 109 | Ty::apply_one(TypeCtor::Array, inner_ty) |
54 | } | 110 | } |
55 | TypeRef::Slice(inner) => { | 111 | TypeRef::Slice(inner) => { |
56 | let inner_ty = Ty::from_hir(db, resolver, inner); | 112 | let inner_ty = Ty::from_hir(ctx, inner); |
57 | Ty::apply_one(TypeCtor::Slice, inner_ty) | 113 | Ty::apply_one(TypeCtor::Slice, inner_ty) |
58 | } | 114 | } |
59 | TypeRef::Reference(inner, mutability) => { | 115 | TypeRef::Reference(inner, mutability) => { |
60 | let inner_ty = Ty::from_hir(db, resolver, inner); | 116 | let inner_ty = Ty::from_hir(ctx, inner); |
61 | Ty::apply_one(TypeCtor::Ref(*mutability), inner_ty) | 117 | Ty::apply_one(TypeCtor::Ref(*mutability), inner_ty) |
62 | } | 118 | } |
63 | TypeRef::Placeholder => Ty::Unknown, | 119 | TypeRef::Placeholder => Ty::Unknown, |
64 | TypeRef::Fn(params) => { | 120 | TypeRef::Fn(params) => { |
65 | let sig = Substs(params.iter().map(|tr| Ty::from_hir(db, resolver, tr)).collect()); | 121 | let sig = Substs(params.iter().map(|tr| Ty::from_hir(ctx, tr)).collect()); |
66 | Ty::apply(TypeCtor::FnPtr { num_args: sig.len() as u16 - 1 }, sig) | 122 | Ty::apply(TypeCtor::FnPtr { num_args: sig.len() as u16 - 1 }, sig) |
67 | } | 123 | } |
68 | TypeRef::DynTrait(bounds) => { | 124 | TypeRef::DynTrait(bounds) => { |
69 | let self_ty = Ty::Bound(0); | 125 | let self_ty = Ty::Bound(0); |
70 | let predicates = bounds | 126 | let predicates = bounds |
71 | .iter() | 127 | .iter() |
72 | .flat_map(|b| { | 128 | .flat_map(|b| GenericPredicate::from_type_bound(ctx, b, self_ty.clone())) |
73 | GenericPredicate::from_type_bound(db, resolver, b, self_ty.clone()) | ||
74 | }) | ||
75 | .collect(); | 129 | .collect(); |
76 | Ty::Dyn(predicates) | 130 | Ty::Dyn(predicates) |
77 | } | 131 | } |
78 | TypeRef::ImplTrait(bounds) => { | 132 | TypeRef::ImplTrait(bounds) => { |
79 | let self_ty = Ty::Bound(0); | 133 | match ctx.impl_trait_mode { |
80 | let predicates = bounds | 134 | ImplTraitLoweringMode::Opaque => { |
81 | .iter() | 135 | let self_ty = Ty::Bound(0); |
82 | .flat_map(|b| { | 136 | let predicates = bounds |
83 | GenericPredicate::from_type_bound(db, resolver, b, self_ty.clone()) | 137 | .iter() |
84 | }) | 138 | .flat_map(|b| { |
85 | .collect(); | 139 | GenericPredicate::from_type_bound(ctx, b, self_ty.clone()) |
86 | Ty::Opaque(predicates) | 140 | }) |
141 | .collect(); | ||
142 | Ty::Opaque(predicates) | ||
143 | } | ||
144 | ImplTraitLoweringMode::Param => { | ||
145 | let idx = ctx.impl_trait_counter.get(); | ||
146 | ctx.impl_trait_counter.set(idx + 1); | ||
147 | if let Some(def) = ctx.resolver.generic_def() { | ||
148 | let generics = generics(ctx.db, def); | ||
149 | let param = generics | ||
150 | .iter() | ||
151 | .filter(|(_, data)| { | ||
152 | data.provenance == TypeParamProvenance::ArgumentImplTrait | ||
153 | }) | ||
154 | .nth(idx as usize) | ||
155 | .map_or(Ty::Unknown, |(id, _)| Ty::Param(id)); | ||
156 | param | ||
157 | } else { | ||
158 | Ty::Unknown | ||
159 | } | ||
160 | } | ||
161 | ImplTraitLoweringMode::Variable => { | ||
162 | let idx = ctx.impl_trait_counter.get(); | ||
163 | ctx.impl_trait_counter.set(idx + 1); | ||
164 | let (parent_params, self_params, list_params, _impl_trait_params) = | ||
165 | if let Some(def) = ctx.resolver.generic_def() { | ||
166 | let generics = generics(ctx.db, def); | ||
167 | generics.provenance_split() | ||
168 | } else { | ||
169 | (0, 0, 0, 0) | ||
170 | }; | ||
171 | Ty::Bound( | ||
172 | idx as u32 | ||
173 | + parent_params as u32 | ||
174 | + self_params as u32 | ||
175 | + list_params as u32, | ||
176 | ) | ||
177 | } | ||
178 | ImplTraitLoweringMode::Disallowed => { | ||
179 | // FIXME: report error | ||
180 | Ty::Unknown | ||
181 | } | ||
182 | } | ||
87 | } | 183 | } |
88 | TypeRef::Error => Ty::Unknown, | 184 | TypeRef::Error => Ty::Unknown, |
89 | } | 185 | } |
@@ -93,10 +189,9 @@ impl Ty { | |||
93 | /// lower the self types of the predicates since that could lead to cycles. | 189 | /// lower the self types of the predicates since that could lead to cycles. |
94 | /// So we just check here if the `type_ref` resolves to a generic param, and which. | 190 | /// So we just check here if the `type_ref` resolves to a generic param, and which. |
95 | fn from_hir_only_param( | 191 | fn from_hir_only_param( |
96 | db: &impl HirDatabase, | 192 | ctx: &TyLoweringContext<'_, impl HirDatabase>, |
97 | resolver: &Resolver, | ||
98 | type_ref: &TypeRef, | 193 | type_ref: &TypeRef, |
99 | ) -> Option<u32> { | 194 | ) -> Option<TypeParamId> { |
100 | let path = match type_ref { | 195 | let path = match type_ref { |
101 | TypeRef::Path(path) => path, | 196 | TypeRef::Path(path) => path, |
102 | _ => return None, | 197 | _ => return None, |
@@ -107,29 +202,26 @@ impl Ty { | |||
107 | if path.segments().len() > 1 { | 202 | if path.segments().len() > 1 { |
108 | return None; | 203 | return None; |
109 | } | 204 | } |
110 | let resolution = match resolver.resolve_path_in_type_ns(db, path.mod_path()) { | 205 | let resolution = match ctx.resolver.resolve_path_in_type_ns(ctx.db, path.mod_path()) { |
111 | Some((it, None)) => it, | 206 | Some((it, None)) => it, |
112 | _ => return None, | 207 | _ => return None, |
113 | }; | 208 | }; |
114 | if let TypeNs::GenericParam(param_id) = resolution { | 209 | if let TypeNs::GenericParam(param_id) = resolution { |
115 | let generics = generics(db, resolver.generic_def().expect("generics in scope")); | 210 | Some(param_id) |
116 | let idx = generics.param_idx(param_id); | ||
117 | Some(idx) | ||
118 | } else { | 211 | } else { |
119 | None | 212 | None |
120 | } | 213 | } |
121 | } | 214 | } |
122 | 215 | ||
123 | pub(crate) fn from_type_relative_path( | 216 | pub(crate) fn from_type_relative_path( |
124 | db: &impl HirDatabase, | 217 | ctx: &TyLoweringContext<'_, impl HirDatabase>, |
125 | resolver: &Resolver, | ||
126 | ty: Ty, | 218 | ty: Ty, |
127 | remaining_segments: PathSegments<'_>, | 219 | remaining_segments: PathSegments<'_>, |
128 | ) -> Ty { | 220 | ) -> Ty { |
129 | if remaining_segments.len() == 1 { | 221 | if remaining_segments.len() == 1 { |
130 | // resolve unselected assoc types | 222 | // resolve unselected assoc types |
131 | let segment = remaining_segments.first().unwrap(); | 223 | let segment = remaining_segments.first().unwrap(); |
132 | Ty::select_associated_type(db, resolver, ty, segment) | 224 | Ty::select_associated_type(ctx, ty, segment) |
133 | } else if remaining_segments.len() > 1 { | 225 | } else if remaining_segments.len() > 1 { |
134 | // FIXME report error (ambiguous associated type) | 226 | // FIXME report error (ambiguous associated type) |
135 | Ty::Unknown | 227 | Ty::Unknown |
@@ -139,20 +231,18 @@ impl Ty { | |||
139 | } | 231 | } |
140 | 232 | ||
141 | pub(crate) fn from_partly_resolved_hir_path( | 233 | pub(crate) fn from_partly_resolved_hir_path( |
142 | db: &impl HirDatabase, | 234 | ctx: &TyLoweringContext<'_, impl HirDatabase>, |
143 | resolver: &Resolver, | ||
144 | resolution: TypeNs, | 235 | resolution: TypeNs, |
145 | resolved_segment: PathSegment<'_>, | 236 | resolved_segment: PathSegment<'_>, |
146 | remaining_segments: PathSegments<'_>, | 237 | remaining_segments: PathSegments<'_>, |
147 | ) -> Ty { | 238 | ) -> Ty { |
148 | let ty = match resolution { | 239 | let ty = match resolution { |
149 | TypeNs::TraitId(trait_) => { | 240 | TypeNs::TraitId(trait_) => { |
150 | let trait_ref = | 241 | let trait_ref = TraitRef::from_resolved_path(ctx, trait_, resolved_segment, None); |
151 | TraitRef::from_resolved_path(db, resolver, trait_, resolved_segment, None); | ||
152 | return if remaining_segments.len() == 1 { | 242 | return if remaining_segments.len() == 1 { |
153 | let segment = remaining_segments.first().unwrap(); | 243 | let segment = remaining_segments.first().unwrap(); |
154 | let associated_ty = associated_type_by_name_including_super_traits( | 244 | let associated_ty = associated_type_by_name_including_super_traits( |
155 | db, | 245 | ctx.db, |
156 | trait_ref.trait_, | 246 | trait_ref.trait_, |
157 | &segment.name, | 247 | &segment.name, |
158 | ); | 248 | ); |
@@ -177,37 +267,55 @@ impl Ty { | |||
177 | }; | 267 | }; |
178 | } | 268 | } |
179 | TypeNs::GenericParam(param_id) => { | 269 | TypeNs::GenericParam(param_id) => { |
180 | let generics = generics(db, resolver.generic_def().expect("generics in scope")); | 270 | let generics = |
181 | let idx = generics.param_idx(param_id); | 271 | generics(ctx.db, ctx.resolver.generic_def().expect("generics in scope")); |
182 | // FIXME: maybe return name in resolution? | 272 | match ctx.type_param_mode { |
183 | let name = generics.param_name(param_id); | 273 | TypeParamLoweringMode::Placeholder => Ty::Param(param_id), |
184 | Ty::Param { idx, name } | 274 | TypeParamLoweringMode::Variable => { |
275 | let idx = generics.param_idx(param_id).expect("matching generics"); | ||
276 | Ty::Bound(idx) | ||
277 | } | ||
278 | } | ||
185 | } | 279 | } |
186 | TypeNs::SelfType(impl_id) => db.impl_self_ty(impl_id).clone(), | 280 | TypeNs::SelfType(impl_id) => { |
187 | TypeNs::AdtSelfType(adt) => db.ty(adt.into()), | 281 | let generics = generics(ctx.db, impl_id.into()); |
188 | 282 | let substs = match ctx.type_param_mode { | |
189 | TypeNs::AdtId(it) => Ty::from_hir_path_inner(db, resolver, resolved_segment, it.into()), | 283 | TypeParamLoweringMode::Placeholder => { |
190 | TypeNs::BuiltinType(it) => { | 284 | Substs::type_params_for_generics(&generics) |
191 | Ty::from_hir_path_inner(db, resolver, resolved_segment, it.into()) | 285 | } |
286 | TypeParamLoweringMode::Variable => Substs::bound_vars(&generics), | ||
287 | }; | ||
288 | ctx.db.impl_self_ty(impl_id).subst(&substs) | ||
192 | } | 289 | } |
193 | TypeNs::TypeAliasId(it) => { | 290 | TypeNs::AdtSelfType(adt) => { |
194 | Ty::from_hir_path_inner(db, resolver, resolved_segment, it.into()) | 291 | let generics = generics(ctx.db, adt.into()); |
292 | let substs = match ctx.type_param_mode { | ||
293 | TypeParamLoweringMode::Placeholder => { | ||
294 | Substs::type_params_for_generics(&generics) | ||
295 | } | ||
296 | TypeParamLoweringMode::Variable => Substs::bound_vars(&generics), | ||
297 | }; | ||
298 | ctx.db.ty(adt.into()).subst(&substs) | ||
195 | } | 299 | } |
300 | |||
301 | TypeNs::AdtId(it) => Ty::from_hir_path_inner(ctx, resolved_segment, it.into()), | ||
302 | TypeNs::BuiltinType(it) => Ty::from_hir_path_inner(ctx, resolved_segment, it.into()), | ||
303 | TypeNs::TypeAliasId(it) => Ty::from_hir_path_inner(ctx, resolved_segment, it.into()), | ||
196 | // FIXME: report error | 304 | // FIXME: report error |
197 | TypeNs::EnumVariantId(_) => return Ty::Unknown, | 305 | TypeNs::EnumVariantId(_) => return Ty::Unknown, |
198 | }; | 306 | }; |
199 | 307 | ||
200 | Ty::from_type_relative_path(db, resolver, ty, remaining_segments) | 308 | Ty::from_type_relative_path(ctx, ty, remaining_segments) |
201 | } | 309 | } |
202 | 310 | ||
203 | pub(crate) fn from_hir_path(db: &impl HirDatabase, resolver: &Resolver, path: &Path) -> Ty { | 311 | pub(crate) fn from_hir_path(ctx: &TyLoweringContext<'_, impl HirDatabase>, path: &Path) -> Ty { |
204 | // Resolve the path (in type namespace) | 312 | // Resolve the path (in type namespace) |
205 | if let Some(type_ref) = path.type_anchor() { | 313 | if let Some(type_ref) = path.type_anchor() { |
206 | let ty = Ty::from_hir(db, resolver, &type_ref); | 314 | let ty = Ty::from_hir(ctx, &type_ref); |
207 | return Ty::from_type_relative_path(db, resolver, ty, path.segments()); | 315 | return Ty::from_type_relative_path(ctx, ty, path.segments()); |
208 | } | 316 | } |
209 | let (resolution, remaining_index) = | 317 | let (resolution, remaining_index) = |
210 | match resolver.resolve_path_in_type_ns(db, path.mod_path()) { | 318 | match ctx.resolver.resolve_path_in_type_ns(ctx.db, path.mod_path()) { |
211 | Some(it) => it, | 319 | Some(it) => it, |
212 | None => return Ty::Unknown, | 320 | None => return Ty::Unknown, |
213 | }; | 321 | }; |
@@ -218,39 +326,44 @@ impl Ty { | |||
218 | ), | 326 | ), |
219 | Some(i) => (path.segments().get(i - 1).unwrap(), path.segments().skip(i)), | 327 | Some(i) => (path.segments().get(i - 1).unwrap(), path.segments().skip(i)), |
220 | }; | 328 | }; |
221 | Ty::from_partly_resolved_hir_path( | 329 | Ty::from_partly_resolved_hir_path(ctx, resolution, resolved_segment, remaining_segments) |
222 | db, | ||
223 | resolver, | ||
224 | resolution, | ||
225 | resolved_segment, | ||
226 | remaining_segments, | ||
227 | ) | ||
228 | } | 330 | } |
229 | 331 | ||
230 | fn select_associated_type( | 332 | fn select_associated_type( |
231 | db: &impl HirDatabase, | 333 | ctx: &TyLoweringContext<'_, impl HirDatabase>, |
232 | resolver: &Resolver, | ||
233 | self_ty: Ty, | 334 | self_ty: Ty, |
234 | segment: PathSegment<'_>, | 335 | segment: PathSegment<'_>, |
235 | ) -> Ty { | 336 | ) -> Ty { |
236 | let param_idx = match self_ty { | 337 | let def = match ctx.resolver.generic_def() { |
237 | Ty::Param { idx, .. } => idx, | ||
238 | _ => return Ty::Unknown, // Error: Ambiguous associated type | ||
239 | }; | ||
240 | let def = match resolver.generic_def() { | ||
241 | Some(def) => def, | 338 | Some(def) => def, |
242 | None => return Ty::Unknown, // this can't actually happen | 339 | None => return Ty::Unknown, // this can't actually happen |
243 | }; | 340 | }; |
244 | let predicates = db.generic_predicates_for_param(def.into(), param_idx); | 341 | let param_id = match self_ty { |
245 | let traits_from_env = predicates.iter().filter_map(|pred| match pred { | 342 | Ty::Param(id) if ctx.type_param_mode == TypeParamLoweringMode::Placeholder => id, |
246 | GenericPredicate::Implemented(tr) if tr.self_ty() == &self_ty => Some(tr.trait_), | 343 | Ty::Bound(idx) if ctx.type_param_mode == TypeParamLoweringMode::Variable => { |
344 | let generics = generics(ctx.db, def); | ||
345 | let param_id = if let Some((id, _)) = generics.iter().nth(idx as usize) { | ||
346 | id | ||
347 | } else { | ||
348 | return Ty::Unknown; | ||
349 | }; | ||
350 | param_id | ||
351 | } | ||
352 | _ => return Ty::Unknown, // Error: Ambiguous associated type | ||
353 | }; | ||
354 | let predicates = ctx.db.generic_predicates_for_param(param_id); | ||
355 | let traits_from_env = predicates.iter().filter_map(|pred| match &pred.value { | ||
356 | GenericPredicate::Implemented(tr) => Some(tr.trait_), | ||
247 | _ => None, | 357 | _ => None, |
248 | }); | 358 | }); |
249 | let traits = traits_from_env.flat_map(|t| all_super_traits(db, t)); | 359 | let traits = traits_from_env.flat_map(|t| all_super_traits(ctx.db, t)); |
250 | for t in traits { | 360 | for t in traits { |
251 | if let Some(associated_ty) = db.trait_data(t).associated_type_by_name(&segment.name) { | 361 | if let Some(associated_ty) = ctx.db.trait_data(t).associated_type_by_name(&segment.name) |
252 | let substs = | 362 | { |
253 | Substs::build_for_def(db, t).push(self_ty.clone()).fill_with_unknown().build(); | 363 | let substs = Substs::build_for_def(ctx.db, t) |
364 | .push(self_ty.clone()) | ||
365 | .fill_with_unknown() | ||
366 | .build(); | ||
254 | // FIXME handle type parameters on the segment | 367 | // FIXME handle type parameters on the segment |
255 | return Ty::Projection(ProjectionTy { associated_ty, parameters: substs }); | 368 | return Ty::Projection(ProjectionTy { associated_ty, parameters: substs }); |
256 | } | 369 | } |
@@ -259,8 +372,7 @@ impl Ty { | |||
259 | } | 372 | } |
260 | 373 | ||
261 | fn from_hir_path_inner( | 374 | fn from_hir_path_inner( |
262 | db: &impl HirDatabase, | 375 | ctx: &TyLoweringContext<'_, impl HirDatabase>, |
263 | resolver: &Resolver, | ||
264 | segment: PathSegment<'_>, | 376 | segment: PathSegment<'_>, |
265 | typable: TyDefId, | 377 | typable: TyDefId, |
266 | ) -> Ty { | 378 | ) -> Ty { |
@@ -269,15 +381,14 @@ impl Ty { | |||
269 | TyDefId::AdtId(it) => Some(it.into()), | 381 | TyDefId::AdtId(it) => Some(it.into()), |
270 | TyDefId::TypeAliasId(it) => Some(it.into()), | 382 | TyDefId::TypeAliasId(it) => Some(it.into()), |
271 | }; | 383 | }; |
272 | let substs = substs_from_path_segment(db, resolver, segment, generic_def, false); | 384 | let substs = substs_from_path_segment(ctx, segment, generic_def, false); |
273 | db.ty(typable).subst(&substs) | 385 | ctx.db.ty(typable).subst(&substs) |
274 | } | 386 | } |
275 | 387 | ||
276 | /// Collect generic arguments from a path into a `Substs`. See also | 388 | /// Collect generic arguments from a path into a `Substs`. See also |
277 | /// `create_substs_for_ast_path` and `def_to_ty` in rustc. | 389 | /// `create_substs_for_ast_path` and `def_to_ty` in rustc. |
278 | pub(super) fn substs_from_path( | 390 | pub(super) fn substs_from_path( |
279 | db: &impl HirDatabase, | 391 | ctx: &TyLoweringContext<'_, impl HirDatabase>, |
280 | resolver: &Resolver, | ||
281 | path: &Path, | 392 | path: &Path, |
282 | // Note that we don't call `db.value_type(resolved)` here, | 393 | // Note that we don't call `db.value_type(resolved)` here, |
283 | // `ValueTyDefId` is just a convenient way to pass generics and | 394 | // `ValueTyDefId` is just a convenient way to pass generics and |
@@ -305,52 +416,49 @@ impl Ty { | |||
305 | (segment, Some(var.parent.into())) | 416 | (segment, Some(var.parent.into())) |
306 | } | 417 | } |
307 | }; | 418 | }; |
308 | substs_from_path_segment(db, resolver, segment, generic_def, false) | 419 | substs_from_path_segment(ctx, segment, generic_def, false) |
309 | } | 420 | } |
310 | } | 421 | } |
311 | 422 | ||
312 | pub(super) fn substs_from_path_segment( | 423 | pub(super) fn substs_from_path_segment( |
313 | db: &impl HirDatabase, | 424 | ctx: &TyLoweringContext<'_, impl HirDatabase>, |
314 | resolver: &Resolver, | ||
315 | segment: PathSegment<'_>, | 425 | segment: PathSegment<'_>, |
316 | def_generic: Option<GenericDefId>, | 426 | def_generic: Option<GenericDefId>, |
317 | add_self_param: bool, | 427 | _add_self_param: bool, |
318 | ) -> Substs { | 428 | ) -> Substs { |
319 | let mut substs = Vec::new(); | 429 | let mut substs = Vec::new(); |
320 | let def_generics = def_generic.map(|def| generics(db, def.into())); | 430 | let def_generics = def_generic.map(|def| generics(ctx.db, def.into())); |
321 | 431 | ||
322 | let (total_len, parent_len, child_len) = def_generics.map_or((0, 0, 0), |g| g.len_split()); | 432 | let (parent_params, self_params, type_params, impl_trait_params) = |
323 | substs.extend(iter::repeat(Ty::Unknown).take(parent_len)); | 433 | def_generics.map_or((0, 0, 0, 0), |g| g.provenance_split()); |
324 | if add_self_param { | 434 | substs.extend(iter::repeat(Ty::Unknown).take(parent_params)); |
325 | // FIXME this add_self_param argument is kind of a hack: Traits have the | ||
326 | // Self type as an implicit first type parameter, but it can't be | ||
327 | // actually provided in the type arguments | ||
328 | // (well, actually sometimes it can, in the form of type-relative paths: `<Foo as Default>::default()`) | ||
329 | substs.push(Ty::Unknown); | ||
330 | } | ||
331 | if let Some(generic_args) = &segment.args_and_bindings { | 435 | if let Some(generic_args) = &segment.args_and_bindings { |
436 | if !generic_args.has_self_type { | ||
437 | substs.extend(iter::repeat(Ty::Unknown).take(self_params)); | ||
438 | } | ||
439 | let expected_num = | ||
440 | if generic_args.has_self_type { self_params + type_params } else { type_params }; | ||
441 | let skip = if generic_args.has_self_type && self_params == 0 { 1 } else { 0 }; | ||
332 | // if args are provided, it should be all of them, but we can't rely on that | 442 | // if args are provided, it should be all of them, but we can't rely on that |
333 | let self_param_correction = if add_self_param { 1 } else { 0 }; | 443 | for arg in generic_args.args.iter().skip(skip).take(expected_num) { |
334 | let child_len = child_len - self_param_correction; | ||
335 | for arg in generic_args.args.iter().take(child_len) { | ||
336 | match arg { | 444 | match arg { |
337 | GenericArg::Type(type_ref) => { | 445 | GenericArg::Type(type_ref) => { |
338 | let ty = Ty::from_hir(db, resolver, type_ref); | 446 | let ty = Ty::from_hir(ctx, type_ref); |
339 | substs.push(ty); | 447 | substs.push(ty); |
340 | } | 448 | } |
341 | } | 449 | } |
342 | } | 450 | } |
343 | } | 451 | } |
452 | let total_len = parent_params + self_params + type_params + impl_trait_params; | ||
344 | // add placeholders for args that were not provided | 453 | // add placeholders for args that were not provided |
345 | let supplied_params = substs.len(); | 454 | for _ in substs.len()..total_len { |
346 | for _ in supplied_params..total_len { | ||
347 | substs.push(Ty::Unknown); | 455 | substs.push(Ty::Unknown); |
348 | } | 456 | } |
349 | assert_eq!(substs.len(), total_len); | 457 | assert_eq!(substs.len(), total_len); |
350 | 458 | ||
351 | // handle defaults | 459 | // handle defaults |
352 | if let Some(def_generic) = def_generic { | 460 | if let Some(def_generic) = def_generic { |
353 | let default_substs = db.generic_defaults(def_generic.into()); | 461 | let default_substs = ctx.db.generic_defaults(def_generic.into()); |
354 | assert_eq!(substs.len(), default_substs.len()); | 462 | assert_eq!(substs.len(), default_substs.len()); |
355 | 463 | ||
356 | for (i, default_ty) in default_substs.iter().enumerate() { | 464 | for (i, default_ty) in default_substs.iter().enumerate() { |
@@ -365,27 +473,25 @@ pub(super) fn substs_from_path_segment( | |||
365 | 473 | ||
366 | impl TraitRef { | 474 | impl TraitRef { |
367 | fn from_path( | 475 | fn from_path( |
368 | db: &impl HirDatabase, | 476 | ctx: &TyLoweringContext<'_, impl HirDatabase>, |
369 | resolver: &Resolver, | ||
370 | path: &Path, | 477 | path: &Path, |
371 | explicit_self_ty: Option<Ty>, | 478 | explicit_self_ty: Option<Ty>, |
372 | ) -> Option<Self> { | 479 | ) -> Option<Self> { |
373 | let resolved = match resolver.resolve_path_in_type_ns_fully(db, path.mod_path())? { | 480 | let resolved = match ctx.resolver.resolve_path_in_type_ns_fully(ctx.db, path.mod_path())? { |
374 | TypeNs::TraitId(tr) => tr, | 481 | TypeNs::TraitId(tr) => tr, |
375 | _ => return None, | 482 | _ => return None, |
376 | }; | 483 | }; |
377 | let segment = path.segments().last().expect("path should have at least one segment"); | 484 | let segment = path.segments().last().expect("path should have at least one segment"); |
378 | Some(TraitRef::from_resolved_path(db, resolver, resolved.into(), segment, explicit_self_ty)) | 485 | Some(TraitRef::from_resolved_path(ctx, resolved.into(), segment, explicit_self_ty)) |
379 | } | 486 | } |
380 | 487 | ||
381 | pub(crate) fn from_resolved_path( | 488 | pub(crate) fn from_resolved_path( |
382 | db: &impl HirDatabase, | 489 | ctx: &TyLoweringContext<'_, impl HirDatabase>, |
383 | resolver: &Resolver, | ||
384 | resolved: TraitId, | 490 | resolved: TraitId, |
385 | segment: PathSegment<'_>, | 491 | segment: PathSegment<'_>, |
386 | explicit_self_ty: Option<Ty>, | 492 | explicit_self_ty: Option<Ty>, |
387 | ) -> Self { | 493 | ) -> Self { |
388 | let mut substs = TraitRef::substs_from_path(db, resolver, segment, resolved); | 494 | let mut substs = TraitRef::substs_from_path(ctx, segment, resolved); |
389 | if let Some(self_ty) = explicit_self_ty { | 495 | if let Some(self_ty) = explicit_self_ty { |
390 | make_mut_slice(&mut substs.0)[0] = self_ty; | 496 | make_mut_slice(&mut substs.0)[0] = self_ty; |
391 | } | 497 | } |
@@ -393,8 +499,7 @@ impl TraitRef { | |||
393 | } | 499 | } |
394 | 500 | ||
395 | fn from_hir( | 501 | fn from_hir( |
396 | db: &impl HirDatabase, | 502 | ctx: &TyLoweringContext<'_, impl HirDatabase>, |
397 | resolver: &Resolver, | ||
398 | type_ref: &TypeRef, | 503 | type_ref: &TypeRef, |
399 | explicit_self_ty: Option<Ty>, | 504 | explicit_self_ty: Option<Ty>, |
400 | ) -> Option<Self> { | 505 | ) -> Option<Self> { |
@@ -402,28 +507,26 @@ impl TraitRef { | |||
402 | TypeRef::Path(path) => path, | 507 | TypeRef::Path(path) => path, |
403 | _ => return None, | 508 | _ => return None, |
404 | }; | 509 | }; |
405 | TraitRef::from_path(db, resolver, path, explicit_self_ty) | 510 | TraitRef::from_path(ctx, path, explicit_self_ty) |
406 | } | 511 | } |
407 | 512 | ||
408 | fn substs_from_path( | 513 | fn substs_from_path( |
409 | db: &impl HirDatabase, | 514 | ctx: &TyLoweringContext<'_, impl HirDatabase>, |
410 | resolver: &Resolver, | ||
411 | segment: PathSegment<'_>, | 515 | segment: PathSegment<'_>, |
412 | resolved: TraitId, | 516 | resolved: TraitId, |
413 | ) -> Substs { | 517 | ) -> Substs { |
414 | let has_self_param = | 518 | let has_self_param = |
415 | segment.args_and_bindings.as_ref().map(|a| a.has_self_type).unwrap_or(false); | 519 | segment.args_and_bindings.as_ref().map(|a| a.has_self_type).unwrap_or(false); |
416 | substs_from_path_segment(db, resolver, segment, Some(resolved.into()), !has_self_param) | 520 | substs_from_path_segment(ctx, segment, Some(resolved.into()), !has_self_param) |
417 | } | 521 | } |
418 | 522 | ||
419 | pub(crate) fn from_type_bound( | 523 | pub(crate) fn from_type_bound( |
420 | db: &impl HirDatabase, | 524 | ctx: &TyLoweringContext<'_, impl HirDatabase>, |
421 | resolver: &Resolver, | ||
422 | bound: &TypeBound, | 525 | bound: &TypeBound, |
423 | self_ty: Ty, | 526 | self_ty: Ty, |
424 | ) -> Option<TraitRef> { | 527 | ) -> Option<TraitRef> { |
425 | match bound { | 528 | match bound { |
426 | TypeBound::Path(path) => TraitRef::from_path(db, resolver, path, Some(self_ty)), | 529 | TypeBound::Path(path) => TraitRef::from_path(ctx, path, Some(self_ty)), |
427 | TypeBound::Error => None, | 530 | TypeBound::Error => None, |
428 | } | 531 | } |
429 | } | 532 | } |
@@ -431,33 +534,44 @@ impl TraitRef { | |||
431 | 534 | ||
432 | impl GenericPredicate { | 535 | impl GenericPredicate { |
433 | pub(crate) fn from_where_predicate<'a>( | 536 | pub(crate) fn from_where_predicate<'a>( |
434 | db: &'a impl HirDatabase, | 537 | ctx: &'a TyLoweringContext<'a, impl HirDatabase>, |
435 | resolver: &'a Resolver, | ||
436 | where_predicate: &'a WherePredicate, | 538 | where_predicate: &'a WherePredicate, |
437 | ) -> impl Iterator<Item = GenericPredicate> + 'a { | 539 | ) -> impl Iterator<Item = GenericPredicate> + 'a { |
438 | let self_ty = Ty::from_hir(db, resolver, &where_predicate.type_ref); | 540 | let self_ty = match &where_predicate.target { |
439 | GenericPredicate::from_type_bound(db, resolver, &where_predicate.bound, self_ty) | 541 | WherePredicateTarget::TypeRef(type_ref) => Ty::from_hir(ctx, type_ref), |
542 | WherePredicateTarget::TypeParam(param_id) => { | ||
543 | let generic_def = ctx.resolver.generic_def().expect("generics in scope"); | ||
544 | let generics = generics(ctx.db, generic_def); | ||
545 | let param_id = hir_def::TypeParamId { parent: generic_def, local_id: *param_id }; | ||
546 | match ctx.type_param_mode { | ||
547 | TypeParamLoweringMode::Placeholder => Ty::Param(param_id), | ||
548 | TypeParamLoweringMode::Variable => { | ||
549 | let idx = generics.param_idx(param_id).expect("matching generics"); | ||
550 | Ty::Bound(idx) | ||
551 | } | ||
552 | } | ||
553 | } | ||
554 | }; | ||
555 | GenericPredicate::from_type_bound(ctx, &where_predicate.bound, self_ty) | ||
440 | } | 556 | } |
441 | 557 | ||
442 | pub(crate) fn from_type_bound<'a>( | 558 | pub(crate) fn from_type_bound<'a>( |
443 | db: &'a impl HirDatabase, | 559 | ctx: &'a TyLoweringContext<'a, impl HirDatabase>, |
444 | resolver: &'a Resolver, | ||
445 | bound: &'a TypeBound, | 560 | bound: &'a TypeBound, |
446 | self_ty: Ty, | 561 | self_ty: Ty, |
447 | ) -> impl Iterator<Item = GenericPredicate> + 'a { | 562 | ) -> impl Iterator<Item = GenericPredicate> + 'a { |
448 | let trait_ref = TraitRef::from_type_bound(db, &resolver, bound, self_ty); | 563 | let trait_ref = TraitRef::from_type_bound(ctx, bound, self_ty); |
449 | iter::once(trait_ref.clone().map_or(GenericPredicate::Error, GenericPredicate::Implemented)) | 564 | iter::once(trait_ref.clone().map_or(GenericPredicate::Error, GenericPredicate::Implemented)) |
450 | .chain( | 565 | .chain( |
451 | trait_ref.into_iter().flat_map(move |tr| { | 566 | trait_ref |
452 | assoc_type_bindings_from_type_bound(db, resolver, bound, tr) | 567 | .into_iter() |
453 | }), | 568 | .flat_map(move |tr| assoc_type_bindings_from_type_bound(ctx, bound, tr)), |
454 | ) | 569 | ) |
455 | } | 570 | } |
456 | } | 571 | } |
457 | 572 | ||
458 | fn assoc_type_bindings_from_type_bound<'a>( | 573 | fn assoc_type_bindings_from_type_bound<'a>( |
459 | db: &'a impl HirDatabase, | 574 | ctx: &'a TyLoweringContext<'a, impl HirDatabase>, |
460 | resolver: &'a Resolver, | ||
461 | bound: &'a TypeBound, | 575 | bound: &'a TypeBound, |
462 | trait_ref: TraitRef, | 576 | trait_ref: TraitRef, |
463 | ) -> impl Iterator<Item = GenericPredicate> + 'a { | 577 | ) -> impl Iterator<Item = GenericPredicate> + 'a { |
@@ -471,21 +585,21 @@ fn assoc_type_bindings_from_type_bound<'a>( | |||
471 | .flat_map(|args_and_bindings| args_and_bindings.bindings.iter()) | 585 | .flat_map(|args_and_bindings| args_and_bindings.bindings.iter()) |
472 | .map(move |(name, type_ref)| { | 586 | .map(move |(name, type_ref)| { |
473 | let associated_ty = | 587 | let associated_ty = |
474 | associated_type_by_name_including_super_traits(db, trait_ref.trait_, &name); | 588 | associated_type_by_name_including_super_traits(ctx.db, trait_ref.trait_, &name); |
475 | let associated_ty = match associated_ty { | 589 | let associated_ty = match associated_ty { |
476 | None => return GenericPredicate::Error, | 590 | None => return GenericPredicate::Error, |
477 | Some(t) => t, | 591 | Some(t) => t, |
478 | }; | 592 | }; |
479 | let projection_ty = | 593 | let projection_ty = |
480 | ProjectionTy { associated_ty, parameters: trait_ref.substs.clone() }; | 594 | ProjectionTy { associated_ty, parameters: trait_ref.substs.clone() }; |
481 | let ty = Ty::from_hir(db, resolver, type_ref); | 595 | let ty = Ty::from_hir(ctx, type_ref); |
482 | let projection_predicate = ProjectionPredicate { projection_ty, ty }; | 596 | let projection_predicate = ProjectionPredicate { projection_ty, ty }; |
483 | GenericPredicate::Projection(projection_predicate) | 597 | GenericPredicate::Projection(projection_predicate) |
484 | }) | 598 | }) |
485 | } | 599 | } |
486 | 600 | ||
487 | /// Build the signature of a callable item (function, struct or enum variant). | 601 | /// Build the signature of a callable item (function, struct or enum variant). |
488 | pub fn callable_item_sig(db: &impl HirDatabase, def: CallableDef) -> FnSig { | 602 | pub fn callable_item_sig(db: &impl HirDatabase, def: CallableDef) -> PolyFnSig { |
489 | match def { | 603 | match def { |
490 | CallableDef::FunctionId(f) => fn_sig_for_fn(db, f), | 604 | CallableDef::FunctionId(f) => fn_sig_for_fn(db, f), |
491 | CallableDef::StructId(s) => fn_sig_for_struct_constructor(db, s), | 605 | CallableDef::StructId(s) => fn_sig_for_struct_constructor(db, s), |
@@ -497,16 +611,19 @@ pub fn callable_item_sig(db: &impl HirDatabase, def: CallableDef) -> FnSig { | |||
497 | pub(crate) fn field_types_query( | 611 | pub(crate) fn field_types_query( |
498 | db: &impl HirDatabase, | 612 | db: &impl HirDatabase, |
499 | variant_id: VariantId, | 613 | variant_id: VariantId, |
500 | ) -> Arc<ArenaMap<LocalStructFieldId, Ty>> { | 614 | ) -> Arc<ArenaMap<LocalStructFieldId, Binders<Ty>>> { |
501 | let var_data = variant_data(db, variant_id); | 615 | let var_data = variant_data(db, variant_id); |
502 | let resolver = match variant_id { | 616 | let (resolver, def): (_, GenericDefId) = match variant_id { |
503 | VariantId::StructId(it) => it.resolver(db), | 617 | VariantId::StructId(it) => (it.resolver(db), it.into()), |
504 | VariantId::UnionId(it) => it.resolver(db), | 618 | VariantId::UnionId(it) => (it.resolver(db), it.into()), |
505 | VariantId::EnumVariantId(it) => it.parent.resolver(db), | 619 | VariantId::EnumVariantId(it) => (it.parent.resolver(db), it.parent.into()), |
506 | }; | 620 | }; |
621 | let generics = generics(db, def); | ||
507 | let mut res = ArenaMap::default(); | 622 | let mut res = ArenaMap::default(); |
623 | let ctx = | ||
624 | TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); | ||
508 | for (field_id, field_data) in var_data.fields().iter() { | 625 | for (field_id, field_data) in var_data.fields().iter() { |
509 | res.insert(field_id, Ty::from_hir(db, &resolver, &field_data.type_ref)) | 626 | res.insert(field_id, Binders::new(generics.len(), Ty::from_hir(&ctx, &field_data.type_ref))) |
510 | } | 627 | } |
511 | Arc::new(res) | 628 | Arc::new(res) |
512 | } | 629 | } |
@@ -521,32 +638,43 @@ pub(crate) fn field_types_query( | |||
521 | /// these are fine: `T: Foo<U::Item>, U: Foo<()>`. | 638 | /// these are fine: `T: Foo<U::Item>, U: Foo<()>`. |
522 | pub(crate) fn generic_predicates_for_param_query( | 639 | pub(crate) fn generic_predicates_for_param_query( |
523 | db: &impl HirDatabase, | 640 | db: &impl HirDatabase, |
524 | def: GenericDefId, | 641 | param_id: TypeParamId, |
525 | param_idx: u32, | 642 | ) -> Arc<[Binders<GenericPredicate>]> { |
526 | ) -> Arc<[GenericPredicate]> { | 643 | let resolver = param_id.parent.resolver(db); |
527 | let resolver = def.resolver(db); | 644 | let ctx = |
645 | TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); | ||
646 | let generics = generics(db, param_id.parent); | ||
528 | resolver | 647 | resolver |
529 | .where_predicates_in_scope() | 648 | .where_predicates_in_scope() |
530 | // we have to filter out all other predicates *first*, before attempting to lower them | 649 | // we have to filter out all other predicates *first*, before attempting to lower them |
531 | .filter(|pred| Ty::from_hir_only_param(db, &resolver, &pred.type_ref) == Some(param_idx)) | 650 | .filter(|pred| match &pred.target { |
532 | .flat_map(|pred| GenericPredicate::from_where_predicate(db, &resolver, pred)) | 651 | WherePredicateTarget::TypeRef(type_ref) => { |
652 | Ty::from_hir_only_param(&ctx, type_ref) == Some(param_id) | ||
653 | } | ||
654 | WherePredicateTarget::TypeParam(local_id) => *local_id == param_id.local_id, | ||
655 | }) | ||
656 | .flat_map(|pred| { | ||
657 | GenericPredicate::from_where_predicate(&ctx, pred) | ||
658 | .map(|p| Binders::new(generics.len(), p)) | ||
659 | }) | ||
533 | .collect() | 660 | .collect() |
534 | } | 661 | } |
535 | 662 | ||
536 | pub(crate) fn generic_predicates_for_param_recover( | 663 | pub(crate) fn generic_predicates_for_param_recover( |
537 | _db: &impl HirDatabase, | 664 | _db: &impl HirDatabase, |
538 | _cycle: &[String], | 665 | _cycle: &[String], |
539 | _def: &GenericDefId, | 666 | _param_id: &TypeParamId, |
540 | _param_idx: &u32, | 667 | ) -> Arc<[Binders<GenericPredicate>]> { |
541 | ) -> Arc<[GenericPredicate]> { | ||
542 | Arc::new([]) | 668 | Arc::new([]) |
543 | } | 669 | } |
544 | 670 | ||
545 | impl TraitEnvironment { | 671 | impl TraitEnvironment { |
546 | pub fn lower(db: &impl HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> { | 672 | pub fn lower(db: &impl HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> { |
673 | let ctx = TyLoweringContext::new(db, &resolver) | ||
674 | .with_type_param_mode(TypeParamLoweringMode::Placeholder); | ||
547 | let predicates = resolver | 675 | let predicates = resolver |
548 | .where_predicates_in_scope() | 676 | .where_predicates_in_scope() |
549 | .flat_map(|pred| GenericPredicate::from_where_predicate(db, &resolver, pred)) | 677 | .flat_map(|pred| GenericPredicate::from_where_predicate(&ctx, pred)) |
550 | .collect::<Vec<_>>(); | 678 | .collect::<Vec<_>>(); |
551 | 679 | ||
552 | Arc::new(TraitEnvironment { predicates }) | 680 | Arc::new(TraitEnvironment { predicates }) |
@@ -557,57 +685,74 @@ impl TraitEnvironment { | |||
557 | pub(crate) fn generic_predicates_query( | 685 | pub(crate) fn generic_predicates_query( |
558 | db: &impl HirDatabase, | 686 | db: &impl HirDatabase, |
559 | def: GenericDefId, | 687 | def: GenericDefId, |
560 | ) -> Arc<[GenericPredicate]> { | 688 | ) -> Arc<[Binders<GenericPredicate>]> { |
561 | let resolver = def.resolver(db); | 689 | let resolver = def.resolver(db); |
690 | let ctx = | ||
691 | TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); | ||
692 | let generics = generics(db, def); | ||
562 | resolver | 693 | resolver |
563 | .where_predicates_in_scope() | 694 | .where_predicates_in_scope() |
564 | .flat_map(|pred| GenericPredicate::from_where_predicate(db, &resolver, pred)) | 695 | .flat_map(|pred| { |
696 | GenericPredicate::from_where_predicate(&ctx, pred) | ||
697 | .map(|p| Binders::new(generics.len(), p)) | ||
698 | }) | ||
565 | .collect() | 699 | .collect() |
566 | } | 700 | } |
567 | 701 | ||
568 | /// Resolve the default type params from generics | 702 | /// Resolve the default type params from generics |
569 | pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDefId) -> Substs { | 703 | pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDefId) -> Substs { |
570 | let resolver = def.resolver(db); | 704 | let resolver = def.resolver(db); |
705 | let ctx = TyLoweringContext::new(db, &resolver); | ||
571 | let generic_params = generics(db, def.into()); | 706 | let generic_params = generics(db, def.into()); |
572 | 707 | ||
573 | let defaults = generic_params | 708 | let defaults = generic_params |
574 | .iter() | 709 | .iter() |
575 | .map(|(_idx, p)| p.default.as_ref().map_or(Ty::Unknown, |t| Ty::from_hir(db, &resolver, t))) | 710 | .map(|(_idx, p)| p.default.as_ref().map_or(Ty::Unknown, |t| Ty::from_hir(&ctx, t))) |
576 | .collect(); | 711 | .collect(); |
577 | 712 | ||
578 | Substs(defaults) | 713 | Substs(defaults) |
579 | } | 714 | } |
580 | 715 | ||
581 | fn fn_sig_for_fn(db: &impl HirDatabase, def: FunctionId) -> FnSig { | 716 | fn fn_sig_for_fn(db: &impl HirDatabase, def: FunctionId) -> PolyFnSig { |
582 | let data = db.function_data(def); | 717 | let data = db.function_data(def); |
583 | let resolver = def.resolver(db); | 718 | let resolver = def.resolver(db); |
584 | let params = data.params.iter().map(|tr| Ty::from_hir(db, &resolver, tr)).collect::<Vec<_>>(); | 719 | let ctx_params = TyLoweringContext::new(db, &resolver) |
585 | let ret = Ty::from_hir(db, &resolver, &data.ret_type); | 720 | .with_impl_trait_mode(ImplTraitLoweringMode::Variable) |
586 | FnSig::from_params_and_return(params, ret) | 721 | .with_type_param_mode(TypeParamLoweringMode::Variable); |
722 | let params = data.params.iter().map(|tr| Ty::from_hir(&ctx_params, tr)).collect::<Vec<_>>(); | ||
723 | let ctx_ret = ctx_params.with_impl_trait_mode(ImplTraitLoweringMode::Opaque); | ||
724 | let ret = Ty::from_hir(&ctx_ret, &data.ret_type); | ||
725 | let generics = generics(db, def.into()); | ||
726 | let num_binders = generics.len(); | ||
727 | Binders::new(num_binders, FnSig::from_params_and_return(params, ret)) | ||
587 | } | 728 | } |
588 | 729 | ||
589 | /// Build the declared type of a function. This should not need to look at the | 730 | /// Build the declared type of a function. This should not need to look at the |
590 | /// function body. | 731 | /// function body. |
591 | fn type_for_fn(db: &impl HirDatabase, def: FunctionId) -> Ty { | 732 | fn type_for_fn(db: &impl HirDatabase, def: FunctionId) -> Binders<Ty> { |
592 | let generics = generics(db, def.into()); | 733 | let generics = generics(db, def.into()); |
593 | let substs = Substs::identity(&generics); | 734 | let substs = Substs::bound_vars(&generics); |
594 | Ty::apply(TypeCtor::FnDef(def.into()), substs) | 735 | Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs)) |
595 | } | 736 | } |
596 | 737 | ||
597 | /// Build the declared type of a const. | 738 | /// Build the declared type of a const. |
598 | fn type_for_const(db: &impl HirDatabase, def: ConstId) -> Ty { | 739 | fn type_for_const(db: &impl HirDatabase, def: ConstId) -> Binders<Ty> { |
599 | let data = db.const_data(def); | 740 | let data = db.const_data(def); |
741 | let generics = generics(db, def.into()); | ||
600 | let resolver = def.resolver(db); | 742 | let resolver = def.resolver(db); |
743 | let ctx = | ||
744 | TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); | ||
601 | 745 | ||
602 | Ty::from_hir(db, &resolver, &data.type_ref) | 746 | Binders::new(generics.len(), Ty::from_hir(&ctx, &data.type_ref)) |
603 | } | 747 | } |
604 | 748 | ||
605 | /// Build the declared type of a static. | 749 | /// Build the declared type of a static. |
606 | fn type_for_static(db: &impl HirDatabase, def: StaticId) -> Ty { | 750 | fn type_for_static(db: &impl HirDatabase, def: StaticId) -> Binders<Ty> { |
607 | let data = db.static_data(def); | 751 | let data = db.static_data(def); |
608 | let resolver = def.resolver(db); | 752 | let resolver = def.resolver(db); |
753 | let ctx = TyLoweringContext::new(db, &resolver); | ||
609 | 754 | ||
610 | Ty::from_hir(db, &resolver, &data.type_ref) | 755 | Binders::new(0, Ty::from_hir(&ctx, &data.type_ref)) |
611 | } | 756 | } |
612 | 757 | ||
613 | /// Build the declared type of a static. | 758 | /// Build the declared type of a static. |
@@ -621,68 +766,69 @@ fn type_for_builtin(def: BuiltinType) -> Ty { | |||
621 | }) | 766 | }) |
622 | } | 767 | } |
623 | 768 | ||
624 | fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> FnSig { | 769 | fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> PolyFnSig { |
625 | let struct_data = db.struct_data(def.into()); | 770 | let struct_data = db.struct_data(def.into()); |
626 | let fields = struct_data.variant_data.fields(); | 771 | let fields = struct_data.variant_data.fields(); |
627 | let resolver = def.resolver(db); | 772 | let resolver = def.resolver(db); |
628 | let params = fields | 773 | let ctx = |
629 | .iter() | 774 | TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); |
630 | .map(|(_, field)| Ty::from_hir(db, &resolver, &field.type_ref)) | 775 | let params = |
631 | .collect::<Vec<_>>(); | 776 | fields.iter().map(|(_, field)| Ty::from_hir(&ctx, &field.type_ref)).collect::<Vec<_>>(); |
632 | let ret = type_for_adt(db, def.into()); | 777 | let ret = type_for_adt(db, def.into()); |
633 | FnSig::from_params_and_return(params, ret) | 778 | Binders::new(ret.num_binders, FnSig::from_params_and_return(params, ret.value)) |
634 | } | 779 | } |
635 | 780 | ||
636 | /// Build the type of a tuple struct constructor. | 781 | /// Build the type of a tuple struct constructor. |
637 | fn type_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> Ty { | 782 | fn type_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> Binders<Ty> { |
638 | let struct_data = db.struct_data(def.into()); | 783 | let struct_data = db.struct_data(def.into()); |
639 | if struct_data.variant_data.is_unit() { | 784 | if struct_data.variant_data.is_unit() { |
640 | return type_for_adt(db, def.into()); // Unit struct | 785 | return type_for_adt(db, def.into()); // Unit struct |
641 | } | 786 | } |
642 | let generics = generics(db, def.into()); | 787 | let generics = generics(db, def.into()); |
643 | let substs = Substs::identity(&generics); | 788 | let substs = Substs::bound_vars(&generics); |
644 | Ty::apply(TypeCtor::FnDef(def.into()), substs) | 789 | Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs)) |
645 | } | 790 | } |
646 | 791 | ||
647 | fn fn_sig_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariantId) -> FnSig { | 792 | fn fn_sig_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariantId) -> PolyFnSig { |
648 | let enum_data = db.enum_data(def.parent); | 793 | let enum_data = db.enum_data(def.parent); |
649 | let var_data = &enum_data.variants[def.local_id]; | 794 | let var_data = &enum_data.variants[def.local_id]; |
650 | let fields = var_data.variant_data.fields(); | 795 | let fields = var_data.variant_data.fields(); |
651 | let resolver = def.parent.resolver(db); | 796 | let resolver = def.parent.resolver(db); |
652 | let params = fields | 797 | let ctx = |
653 | .iter() | 798 | TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); |
654 | .map(|(_, field)| Ty::from_hir(db, &resolver, &field.type_ref)) | 799 | let params = |
655 | .collect::<Vec<_>>(); | 800 | fields.iter().map(|(_, field)| Ty::from_hir(&ctx, &field.type_ref)).collect::<Vec<_>>(); |
656 | let generics = generics(db, def.parent.into()); | 801 | let ret = type_for_adt(db, def.parent.into()); |
657 | let substs = Substs::identity(&generics); | 802 | Binders::new(ret.num_binders, FnSig::from_params_and_return(params, ret.value)) |
658 | let ret = type_for_adt(db, def.parent.into()).subst(&substs); | ||
659 | FnSig::from_params_and_return(params, ret) | ||
660 | } | 803 | } |
661 | 804 | ||
662 | /// Build the type of a tuple enum variant constructor. | 805 | /// Build the type of a tuple enum variant constructor. |
663 | fn type_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariantId) -> Ty { | 806 | fn type_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariantId) -> Binders<Ty> { |
664 | let enum_data = db.enum_data(def.parent); | 807 | let enum_data = db.enum_data(def.parent); |
665 | let var_data = &enum_data.variants[def.local_id].variant_data; | 808 | let var_data = &enum_data.variants[def.local_id].variant_data; |
666 | if var_data.is_unit() { | 809 | if var_data.is_unit() { |
667 | return type_for_adt(db, def.parent.into()); // Unit variant | 810 | return type_for_adt(db, def.parent.into()); // Unit variant |
668 | } | 811 | } |
669 | let generics = generics(db, def.parent.into()); | 812 | let generics = generics(db, def.parent.into()); |
670 | let substs = Substs::identity(&generics); | 813 | let substs = Substs::bound_vars(&generics); |
671 | Ty::apply(TypeCtor::FnDef(EnumVariantId::from(def).into()), substs) | 814 | Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(EnumVariantId::from(def).into()), substs)) |
672 | } | 815 | } |
673 | 816 | ||
674 | fn type_for_adt(db: &impl HirDatabase, adt: AdtId) -> Ty { | 817 | fn type_for_adt(db: &impl HirDatabase, adt: AdtId) -> Binders<Ty> { |
675 | let generics = generics(db, adt.into()); | 818 | let generics = generics(db, adt.into()); |
676 | Ty::apply(TypeCtor::Adt(adt), Substs::identity(&generics)) | 819 | let substs = Substs::bound_vars(&generics); |
820 | Binders::new(substs.len(), Ty::apply(TypeCtor::Adt(adt), substs)) | ||
677 | } | 821 | } |
678 | 822 | ||
679 | fn type_for_type_alias(db: &impl HirDatabase, t: TypeAliasId) -> Ty { | 823 | fn type_for_type_alias(db: &impl HirDatabase, t: TypeAliasId) -> Binders<Ty> { |
680 | let generics = generics(db, t.into()); | 824 | let generics = generics(db, t.into()); |
681 | let resolver = t.resolver(db); | 825 | let resolver = t.resolver(db); |
826 | let ctx = | ||
827 | TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); | ||
682 | let type_ref = &db.type_alias_data(t).type_ref; | 828 | let type_ref = &db.type_alias_data(t).type_ref; |
683 | let substs = Substs::identity(&generics); | 829 | let substs = Substs::bound_vars(&generics); |
684 | let inner = Ty::from_hir(db, &resolver, type_ref.as_ref().unwrap_or(&TypeRef::Error)); | 830 | let inner = Ty::from_hir(&ctx, type_ref.as_ref().unwrap_or(&TypeRef::Error)); |
685 | inner.subst(&substs) | 831 | Binders::new(substs.len(), inner) |
686 | } | 832 | } |
687 | 833 | ||
688 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] | 834 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] |
@@ -736,19 +882,24 @@ impl_froms!(ValueTyDefId: FunctionId, StructId, EnumVariantId, ConstId, StaticId | |||
736 | /// `struct Foo(usize)`, we have two types: The type of the struct itself, and | 882 | /// `struct Foo(usize)`, we have two types: The type of the struct itself, and |
737 | /// the constructor function `(usize) -> Foo` which lives in the values | 883 | /// the constructor function `(usize) -> Foo` which lives in the values |
738 | /// namespace. | 884 | /// namespace. |
739 | pub(crate) fn ty_query(db: &impl HirDatabase, def: TyDefId) -> Ty { | 885 | pub(crate) fn ty_query(db: &impl HirDatabase, def: TyDefId) -> Binders<Ty> { |
740 | match def { | 886 | match def { |
741 | TyDefId::BuiltinType(it) => type_for_builtin(it), | 887 | TyDefId::BuiltinType(it) => Binders::new(0, type_for_builtin(it)), |
742 | TyDefId::AdtId(it) => type_for_adt(db, it), | 888 | TyDefId::AdtId(it) => type_for_adt(db, it), |
743 | TyDefId::TypeAliasId(it) => type_for_type_alias(db, it), | 889 | TyDefId::TypeAliasId(it) => type_for_type_alias(db, it), |
744 | } | 890 | } |
745 | } | 891 | } |
746 | 892 | ||
747 | pub(crate) fn ty_recover(_db: &impl HirDatabase, _cycle: &[String], _def: &TyDefId) -> Ty { | 893 | pub(crate) fn ty_recover(db: &impl HirDatabase, _cycle: &[String], def: &TyDefId) -> Binders<Ty> { |
748 | Ty::Unknown | 894 | let num_binders = match *def { |
895 | TyDefId::BuiltinType(_) => 0, | ||
896 | TyDefId::AdtId(it) => generics(db, it.into()).len(), | ||
897 | TyDefId::TypeAliasId(it) => generics(db, it.into()).len(), | ||
898 | }; | ||
899 | Binders::new(num_binders, Ty::Unknown) | ||
749 | } | 900 | } |
750 | 901 | ||
751 | pub(crate) fn value_ty_query(db: &impl HirDatabase, def: ValueTyDefId) -> Ty { | 902 | pub(crate) fn value_ty_query(db: &impl HirDatabase, def: ValueTyDefId) -> Binders<Ty> { |
752 | match def { | 903 | match def { |
753 | ValueTyDefId::FunctionId(it) => type_for_fn(db, it), | 904 | ValueTyDefId::FunctionId(it) => type_for_fn(db, it), |
754 | ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it), | 905 | ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it), |
@@ -758,24 +909,36 @@ pub(crate) fn value_ty_query(db: &impl HirDatabase, def: ValueTyDefId) -> Ty { | |||
758 | } | 909 | } |
759 | } | 910 | } |
760 | 911 | ||
761 | pub(crate) fn impl_self_ty_query(db: &impl HirDatabase, impl_id: ImplId) -> Ty { | 912 | pub(crate) fn impl_self_ty_query(db: &impl HirDatabase, impl_id: ImplId) -> Binders<Ty> { |
762 | let impl_data = db.impl_data(impl_id); | 913 | let impl_data = db.impl_data(impl_id); |
763 | let resolver = impl_id.resolver(db); | 914 | let resolver = impl_id.resolver(db); |
764 | Ty::from_hir(db, &resolver, &impl_data.target_type) | 915 | let generics = generics(db, impl_id.into()); |
916 | let ctx = | ||
917 | TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); | ||
918 | Binders::new(generics.len(), Ty::from_hir(&ctx, &impl_data.target_type)) | ||
765 | } | 919 | } |
766 | 920 | ||
767 | pub(crate) fn impl_self_ty_recover( | 921 | pub(crate) fn impl_self_ty_recover( |
768 | _db: &impl HirDatabase, | 922 | db: &impl HirDatabase, |
769 | _cycle: &[String], | 923 | _cycle: &[String], |
770 | _impl_id: &ImplId, | 924 | impl_id: &ImplId, |
771 | ) -> Ty { | 925 | ) -> Binders<Ty> { |
772 | Ty::Unknown | 926 | let generics = generics(db, (*impl_id).into()); |
927 | Binders::new(generics.len(), Ty::Unknown) | ||
773 | } | 928 | } |
774 | 929 | ||
775 | pub(crate) fn impl_trait_query(db: &impl HirDatabase, impl_id: ImplId) -> Option<TraitRef> { | 930 | pub(crate) fn impl_trait_query( |
931 | db: &impl HirDatabase, | ||
932 | impl_id: ImplId, | ||
933 | ) -> Option<Binders<TraitRef>> { | ||
776 | let impl_data = db.impl_data(impl_id); | 934 | let impl_data = db.impl_data(impl_id); |
777 | let resolver = impl_id.resolver(db); | 935 | let resolver = impl_id.resolver(db); |
936 | let ctx = | ||
937 | TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); | ||
778 | let self_ty = db.impl_self_ty(impl_id); | 938 | let self_ty = db.impl_self_ty(impl_id); |
779 | let target_trait = impl_data.target_trait.as_ref()?; | 939 | let target_trait = impl_data.target_trait.as_ref()?; |
780 | TraitRef::from_hir(db, &resolver, target_trait, Some(self_ty.clone())) | 940 | Some(Binders::new( |
941 | self_ty.num_binders, | ||
942 | TraitRef::from_hir(&ctx, target_trait, Some(self_ty.value.clone()))?, | ||
943 | )) | ||
781 | } | 944 | } |
diff --git a/crates/ra_hir_ty/src/marks.rs b/crates/ra_hir_ty/src/marks.rs index fe74acf11..0f754eb9c 100644 --- a/crates/ra_hir_ty/src/marks.rs +++ b/crates/ra_hir_ty/src/marks.rs | |||
@@ -6,5 +6,4 @@ test_utils::marks!( | |||
6 | type_var_resolves_to_int_var | 6 | type_var_resolves_to_int_var |
7 | match_ergonomics_ref | 7 | match_ergonomics_ref |
8 | coerce_merge_fail_fallback | 8 | coerce_merge_fail_fallback |
9 | insert_vars_for_impl_trait | ||
10 | ); | 9 | ); |
diff --git a/crates/ra_hir_ty/src/method_resolution.rs b/crates/ra_hir_ty/src/method_resolution.rs index 5bacbbd7c..5283bff28 100644 --- a/crates/ra_hir_ty/src/method_resolution.rs +++ b/crates/ra_hir_ty/src/method_resolution.rs | |||
@@ -61,11 +61,11 @@ impl CrateImplBlocks { | |||
61 | for impl_id in module_data.scope.impls() { | 61 | for impl_id in module_data.scope.impls() { |
62 | match db.impl_trait(impl_id) { | 62 | match db.impl_trait(impl_id) { |
63 | Some(tr) => { | 63 | Some(tr) => { |
64 | res.impls_by_trait.entry(tr.trait_).or_default().push(impl_id); | 64 | res.impls_by_trait.entry(tr.value.trait_).or_default().push(impl_id); |
65 | } | 65 | } |
66 | None => { | 66 | None => { |
67 | let self_ty = db.impl_self_ty(impl_id); | 67 | let self_ty = db.impl_self_ty(impl_id); |
68 | if let Some(self_ty_fp) = TyFingerprint::for_impl(&self_ty) { | 68 | if let Some(self_ty_fp) = TyFingerprint::for_impl(&self_ty.value) { |
69 | res.impls.entry(self_ty_fp).or_default().push(impl_id); | 69 | res.impls.entry(self_ty_fp).or_default().push(impl_id); |
70 | } | 70 | } |
71 | } | 71 | } |
@@ -496,7 +496,7 @@ fn transform_receiver_ty( | |||
496 | AssocContainerId::ContainerId(_) => unreachable!(), | 496 | AssocContainerId::ContainerId(_) => unreachable!(), |
497 | }; | 497 | }; |
498 | let sig = db.callable_item_signature(function_id.into()); | 498 | let sig = db.callable_item_signature(function_id.into()); |
499 | Some(sig.params()[0].clone().subst(&substs)) | 499 | Some(sig.value.params()[0].clone().subst_bound_vars(&substs)) |
500 | } | 500 | } |
501 | 501 | ||
502 | pub fn implements_trait( | 502 | pub fn implements_trait( |
diff --git a/crates/ra_hir_ty/src/tests/coercion.rs b/crates/ra_hir_ty/src/tests/coercion.rs index 7e99a42ed..fc5ef36a5 100644 --- a/crates/ra_hir_ty/src/tests/coercion.rs +++ b/crates/ra_hir_ty/src/tests/coercion.rs | |||
@@ -71,42 +71,42 @@ fn test2() { | |||
71 | [82; 93) '{ loop {} }': T | 71 | [82; 93) '{ loop {} }': T |
72 | [84; 91) 'loop {}': ! | 72 | [84; 91) 'loop {}': ! |
73 | [89; 91) '{}': () | 73 | [89; 91) '{}': () |
74 | [122; 133) '{ loop {} }': *mut [T;_] | 74 | [122; 133) '{ loop {} }': *mut [T; _] |
75 | [124; 131) 'loop {}': ! | 75 | [124; 131) 'loop {}': ! |
76 | [129; 131) '{}': () | 76 | [129; 131) '{}': () |
77 | [160; 173) '{ gen() }': *mut [U] | 77 | [160; 173) '{ gen() }': *mut [U] |
78 | [166; 169) 'gen': fn gen<U>() -> *mut [T;_] | 78 | [166; 169) 'gen': fn gen<U>() -> *mut [U; _] |
79 | [166; 171) 'gen()': *mut [U;_] | 79 | [166; 171) 'gen()': *mut [U; _] |
80 | [186; 420) '{ ...rr); }': () | 80 | [186; 420) '{ ...rr); }': () |
81 | [196; 199) 'arr': &[u8;_] | 81 | [196; 199) 'arr': &[u8; _] |
82 | [212; 216) '&[1]': &[u8;_] | 82 | [212; 216) '&[1]': &[u8; _] |
83 | [213; 216) '[1]': [u8;_] | 83 | [213; 216) '[1]': [u8; _] |
84 | [214; 215) '1': u8 | 84 | [214; 215) '1': u8 |
85 | [227; 228) 'a': &[u8] | 85 | [227; 228) 'a': &[u8] |
86 | [237; 240) 'arr': &[u8;_] | 86 | [237; 240) 'arr': &[u8; _] |
87 | [250; 251) 'b': u8 | 87 | [250; 251) 'b': u8 |
88 | [254; 255) 'f': fn f<u8>(&[T]) -> T | 88 | [254; 255) 'f': fn f<u8>(&[u8]) -> u8 |
89 | [254; 260) 'f(arr)': u8 | 89 | [254; 260) 'f(arr)': u8 |
90 | [256; 259) 'arr': &[u8;_] | 90 | [256; 259) 'arr': &[u8; _] |
91 | [270; 271) 'c': &[u8] | 91 | [270; 271) 'c': &[u8] |
92 | [280; 287) '{ arr }': &[u8] | 92 | [280; 287) '{ arr }': &[u8] |
93 | [282; 285) 'arr': &[u8;_] | 93 | [282; 285) 'arr': &[u8; _] |
94 | [297; 298) 'd': u8 | 94 | [297; 298) 'd': u8 |
95 | [301; 302) 'g': fn g<u8>(S<&[T]>) -> T | 95 | [301; 302) 'g': fn g<u8>(S<&[u8]>) -> u8 |
96 | [301; 316) 'g(S { a: arr })': u8 | 96 | [301; 316) 'g(S { a: arr })': u8 |
97 | [303; 315) 'S { a: arr }': S<&[u8]> | 97 | [303; 315) 'S { a: arr }': S<&[u8]> |
98 | [310; 313) 'arr': &[u8;_] | 98 | [310; 313) 'arr': &[u8; _] |
99 | [326; 327) 'e': [&[u8];_] | 99 | [326; 327) 'e': [&[u8]; _] |
100 | [341; 346) '[arr]': [&[u8];_] | 100 | [341; 346) '[arr]': [&[u8]; _] |
101 | [342; 345) 'arr': &[u8;_] | 101 | [342; 345) 'arr': &[u8; _] |
102 | [356; 357) 'f': [&[u8];_] | 102 | [356; 357) 'f': [&[u8]; _] |
103 | [371; 379) '[arr; 2]': [&[u8];_] | 103 | [371; 379) '[arr; 2]': [&[u8]; _] |
104 | [372; 375) 'arr': &[u8;_] | 104 | [372; 375) 'arr': &[u8; _] |
105 | [377; 378) '2': usize | 105 | [377; 378) '2': usize |
106 | [389; 390) 'g': (&[u8], &[u8]) | 106 | [389; 390) 'g': (&[u8], &[u8]) |
107 | [407; 417) '(arr, arr)': (&[u8], &[u8]) | 107 | [407; 417) '(arr, arr)': (&[u8], &[u8]) |
108 | [408; 411) 'arr': &[u8;_] | 108 | [408; 411) 'arr': &[u8; _] |
109 | [413; 416) 'arr': &[u8;_] | 109 | [413; 416) 'arr': &[u8; _] |
110 | "### | 110 | "### |
111 | ); | 111 | ); |
112 | } | 112 | } |
@@ -122,8 +122,8 @@ fn test() { | |||
122 | @r###" | 122 | @r###" |
123 | [11; 40) '{ ...[1]; }': () | 123 | [11; 40) '{ ...[1]; }': () |
124 | [21; 22) 'x': &[i32] | 124 | [21; 22) 'x': &[i32] |
125 | [33; 37) '&[1]': &[i32;_] | 125 | [33; 37) '&[1]': &[i32; _] |
126 | [34; 37) '[1]': [i32;_] | 126 | [34; 37) '[1]': [i32; _] |
127 | [35; 36) '1': i32 | 127 | [35; 36) '1': i32 |
128 | "###); | 128 | "###); |
129 | } | 129 | } |
@@ -159,22 +159,22 @@ fn test(a: A<[u8; 2]>, b: B<[u8; 2]>, c: C<[u8; 2]>) { | |||
159 | [334; 335) 'x': C<[T]> | 159 | [334; 335) 'x': C<[T]> |
160 | [355; 360) '{ x }': C<[T]> | 160 | [355; 360) '{ x }': C<[T]> |
161 | [357; 358) 'x': C<[T]> | 161 | [357; 358) 'x': C<[T]> |
162 | [370; 371) 'a': A<[u8;_]> | 162 | [370; 371) 'a': A<[u8; _]> |
163 | [385; 386) 'b': B<[u8;_]> | 163 | [385; 386) 'b': B<[u8; _]> |
164 | [400; 401) 'c': C<[u8;_]> | 164 | [400; 401) 'c': C<[u8; _]> |
165 | [415; 481) '{ ...(c); }': () | 165 | [415; 481) '{ ...(c); }': () |
166 | [425; 426) 'd': A<[{unknown}]> | 166 | [425; 426) 'd': A<[{unknown}]> |
167 | [429; 433) 'foo1': fn foo1<{unknown}>(A<[T]>) -> A<[T]> | 167 | [429; 433) 'foo1': fn foo1<{unknown}>(A<[{unknown}]>) -> A<[{unknown}]> |
168 | [429; 436) 'foo1(a)': A<[{unknown}]> | 168 | [429; 436) 'foo1(a)': A<[{unknown}]> |
169 | [434; 435) 'a': A<[u8;_]> | 169 | [434; 435) 'a': A<[u8; _]> |
170 | [446; 447) 'e': B<[u8]> | 170 | [446; 447) 'e': B<[u8]> |
171 | [450; 454) 'foo2': fn foo2<u8>(B<[T]>) -> B<[T]> | 171 | [450; 454) 'foo2': fn foo2<u8>(B<[u8]>) -> B<[u8]> |
172 | [450; 457) 'foo2(b)': B<[u8]> | 172 | [450; 457) 'foo2(b)': B<[u8]> |
173 | [455; 456) 'b': B<[u8;_]> | 173 | [455; 456) 'b': B<[u8; _]> |
174 | [467; 468) 'f': C<[u8]> | 174 | [467; 468) 'f': C<[u8]> |
175 | [471; 475) 'foo3': fn foo3<u8>(C<[T]>) -> C<[T]> | 175 | [471; 475) 'foo3': fn foo3<u8>(C<[u8]>) -> C<[u8]> |
176 | [471; 478) 'foo3(c)': C<[u8]> | 176 | [471; 478) 'foo3(c)': C<[u8]> |
177 | [476; 477) 'c': C<[u8;_]> | 177 | [476; 477) 'c': C<[u8; _]> |
178 | "### | 178 | "### |
179 | ); | 179 | ); |
180 | } | 180 | } |
@@ -202,14 +202,14 @@ fn test() { | |||
202 | [64; 123) 'if tru... }': &[i32] | 202 | [64; 123) 'if tru... }': &[i32] |
203 | [67; 71) 'true': bool | 203 | [67; 71) 'true': bool |
204 | [72; 97) '{ ... }': &[i32] | 204 | [72; 97) '{ ... }': &[i32] |
205 | [82; 85) 'foo': fn foo<i32>(&[T]) -> &[T] | 205 | [82; 85) 'foo': fn foo<i32>(&[i32]) -> &[i32] |
206 | [82; 91) 'foo(&[1])': &[i32] | 206 | [82; 91) 'foo(&[1])': &[i32] |
207 | [86; 90) '&[1]': &[i32;_] | 207 | [86; 90) '&[1]': &[i32; _] |
208 | [87; 90) '[1]': [i32;_] | 208 | [87; 90) '[1]': [i32; _] |
209 | [88; 89) '1': i32 | 209 | [88; 89) '1': i32 |
210 | [103; 123) '{ ... }': &[i32;_] | 210 | [103; 123) '{ ... }': &[i32; _] |
211 | [113; 117) '&[1]': &[i32;_] | 211 | [113; 117) '&[1]': &[i32; _] |
212 | [114; 117) '[1]': [i32;_] | 212 | [114; 117) '[1]': [i32; _] |
213 | [115; 116) '1': i32 | 213 | [115; 116) '1': i32 |
214 | "### | 214 | "### |
215 | ); | 215 | ); |
@@ -237,15 +237,15 @@ fn test() { | |||
237 | [60; 61) 'x': &[i32] | 237 | [60; 61) 'x': &[i32] |
238 | [64; 123) 'if tru... }': &[i32] | 238 | [64; 123) 'if tru... }': &[i32] |
239 | [67; 71) 'true': bool | 239 | [67; 71) 'true': bool |
240 | [72; 92) '{ ... }': &[i32;_] | 240 | [72; 92) '{ ... }': &[i32; _] |
241 | [82; 86) '&[1]': &[i32;_] | 241 | [82; 86) '&[1]': &[i32; _] |
242 | [83; 86) '[1]': [i32;_] | 242 | [83; 86) '[1]': [i32; _] |
243 | [84; 85) '1': i32 | 243 | [84; 85) '1': i32 |
244 | [98; 123) '{ ... }': &[i32] | 244 | [98; 123) '{ ... }': &[i32] |
245 | [108; 111) 'foo': fn foo<i32>(&[T]) -> &[T] | 245 | [108; 111) 'foo': fn foo<i32>(&[i32]) -> &[i32] |
246 | [108; 117) 'foo(&[1])': &[i32] | 246 | [108; 117) 'foo(&[1])': &[i32] |
247 | [112; 116) '&[1]': &[i32;_] | 247 | [112; 116) '&[1]': &[i32; _] |
248 | [113; 116) '[1]': [i32;_] | 248 | [113; 116) '[1]': [i32; _] |
249 | [114; 115) '1': i32 | 249 | [114; 115) '1': i32 |
250 | "### | 250 | "### |
251 | ); | 251 | ); |
@@ -275,18 +275,18 @@ fn test(i: i32) { | |||
275 | [70; 147) 'match ... }': &[i32] | 275 | [70; 147) 'match ... }': &[i32] |
276 | [76; 77) 'i': i32 | 276 | [76; 77) 'i': i32 |
277 | [88; 89) '2': i32 | 277 | [88; 89) '2': i32 |
278 | [93; 96) 'foo': fn foo<i32>(&[T]) -> &[T] | 278 | [93; 96) 'foo': fn foo<i32>(&[i32]) -> &[i32] |
279 | [93; 102) 'foo(&[2])': &[i32] | 279 | [93; 102) 'foo(&[2])': &[i32] |
280 | [97; 101) '&[2]': &[i32;_] | 280 | [97; 101) '&[2]': &[i32; _] |
281 | [98; 101) '[2]': [i32;_] | 281 | [98; 101) '[2]': [i32; _] |
282 | [99; 100) '2': i32 | 282 | [99; 100) '2': i32 |
283 | [112; 113) '1': i32 | 283 | [112; 113) '1': i32 |
284 | [117; 121) '&[1]': &[i32;_] | 284 | [117; 121) '&[1]': &[i32; _] |
285 | [118; 121) '[1]': [i32;_] | 285 | [118; 121) '[1]': [i32; _] |
286 | [119; 120) '1': i32 | 286 | [119; 120) '1': i32 |
287 | [131; 132) '_': i32 | 287 | [131; 132) '_': i32 |
288 | [136; 140) '&[3]': &[i32;_] | 288 | [136; 140) '&[3]': &[i32; _] |
289 | [137; 140) '[3]': [i32;_] | 289 | [137; 140) '[3]': [i32; _] |
290 | [138; 139) '3': i32 | 290 | [138; 139) '3': i32 |
291 | "### | 291 | "### |
292 | ); | 292 | ); |
@@ -316,18 +316,18 @@ fn test(i: i32) { | |||
316 | [70; 147) 'match ... }': &[i32] | 316 | [70; 147) 'match ... }': &[i32] |
317 | [76; 77) 'i': i32 | 317 | [76; 77) 'i': i32 |
318 | [88; 89) '1': i32 | 318 | [88; 89) '1': i32 |
319 | [93; 97) '&[1]': &[i32;_] | 319 | [93; 97) '&[1]': &[i32; _] |
320 | [94; 97) '[1]': [i32;_] | 320 | [94; 97) '[1]': [i32; _] |
321 | [95; 96) '1': i32 | 321 | [95; 96) '1': i32 |
322 | [107; 108) '2': i32 | 322 | [107; 108) '2': i32 |
323 | [112; 115) 'foo': fn foo<i32>(&[T]) -> &[T] | 323 | [112; 115) 'foo': fn foo<i32>(&[i32]) -> &[i32] |
324 | [112; 121) 'foo(&[2])': &[i32] | 324 | [112; 121) 'foo(&[2])': &[i32] |
325 | [116; 120) '&[2]': &[i32;_] | 325 | [116; 120) '&[2]': &[i32; _] |
326 | [117; 120) '[2]': [i32;_] | 326 | [117; 120) '[2]': [i32; _] |
327 | [118; 119) '2': i32 | 327 | [118; 119) '2': i32 |
328 | [131; 132) '_': i32 | 328 | [131; 132) '_': i32 |
329 | [136; 140) '&[3]': &[i32;_] | 329 | [136; 140) '&[3]': &[i32; _] |
330 | [137; 140) '[3]': [i32;_] | 330 | [137; 140) '[3]': [i32; _] |
331 | [138; 139) '3': i32 | 331 | [138; 139) '3': i32 |
332 | "### | 332 | "### |
333 | ); | 333 | ); |
@@ -438,16 +438,16 @@ fn test() { | |||
438 | [43; 45) '*x': T | 438 | [43; 45) '*x': T |
439 | [44; 45) 'x': &T | 439 | [44; 45) 'x': &T |
440 | [58; 127) '{ ...oo); }': () | 440 | [58; 127) '{ ...oo); }': () |
441 | [64; 73) 'takes_ref': fn takes_ref<Foo>(&T) -> T | 441 | [64; 73) 'takes_ref': fn takes_ref<Foo>(&Foo) -> Foo |
442 | [64; 79) 'takes_ref(&Foo)': Foo | 442 | [64; 79) 'takes_ref(&Foo)': Foo |
443 | [74; 78) '&Foo': &Foo | 443 | [74; 78) '&Foo': &Foo |
444 | [75; 78) 'Foo': Foo | 444 | [75; 78) 'Foo': Foo |
445 | [85; 94) 'takes_ref': fn takes_ref<&Foo>(&T) -> T | 445 | [85; 94) 'takes_ref': fn takes_ref<&Foo>(&&Foo) -> &Foo |
446 | [85; 101) 'takes_...&&Foo)': &Foo | 446 | [85; 101) 'takes_...&&Foo)': &Foo |
447 | [95; 100) '&&Foo': &&Foo | 447 | [95; 100) '&&Foo': &&Foo |
448 | [96; 100) '&Foo': &Foo | 448 | [96; 100) '&Foo': &Foo |
449 | [97; 100) 'Foo': Foo | 449 | [97; 100) 'Foo': Foo |
450 | [107; 116) 'takes_ref': fn takes_ref<&&Foo>(&T) -> T | 450 | [107; 116) 'takes_ref': fn takes_ref<&&Foo>(&&&Foo) -> &&Foo |
451 | [107; 124) 'takes_...&&Foo)': &&Foo | 451 | [107; 124) 'takes_...&&Foo)': &&Foo |
452 | [117; 123) '&&&Foo': &&&Foo | 452 | [117; 123) '&&&Foo': &&&Foo |
453 | [118; 123) '&&Foo': &&Foo | 453 | [118; 123) '&&Foo': &&Foo |
diff --git a/crates/ra_hir_ty/src/tests/method_resolution.rs b/crates/ra_hir_ty/src/tests/method_resolution.rs index ce9a06fde..1722563aa 100644 --- a/crates/ra_hir_ty/src/tests/method_resolution.rs +++ b/crates/ra_hir_ty/src/tests/method_resolution.rs | |||
@@ -27,7 +27,7 @@ fn test() { | |||
27 | [66; 73) 'loop {}': ! | 27 | [66; 73) 'loop {}': ! |
28 | [71; 73) '{}': () | 28 | [71; 73) '{}': () |
29 | [133; 160) '{ ...o"); }': () | 29 | [133; 160) '{ ...o"); }': () |
30 | [139; 149) '<[_]>::foo': fn foo<u8>(&[T]) -> T | 30 | [139; 149) '<[_]>::foo': fn foo<u8>(&[u8]) -> u8 |
31 | [139; 157) '<[_]>:..."foo")': u8 | 31 | [139; 157) '<[_]>:..."foo")': u8 |
32 | [150; 156) 'b"foo"': &[u8] | 32 | [150; 156) 'b"foo"': &[u8] |
33 | "### | 33 | "### |
@@ -175,7 +175,7 @@ fn test() { | |||
175 | [98; 101) 'val': T | 175 | [98; 101) 'val': T |
176 | [123; 155) '{ ...32); }': () | 176 | [123; 155) '{ ...32); }': () |
177 | [133; 134) 'a': Gen<u32> | 177 | [133; 134) 'a': Gen<u32> |
178 | [137; 146) 'Gen::make': fn make<u32>(T) -> Gen<T> | 178 | [137; 146) 'Gen::make': fn make<u32>(u32) -> Gen<u32> |
179 | [137; 152) 'Gen::make(0u32)': Gen<u32> | 179 | [137; 152) 'Gen::make(0u32)': Gen<u32> |
180 | [147; 151) '0u32': u32 | 180 | [147; 151) '0u32': u32 |
181 | "### | 181 | "### |
@@ -206,7 +206,7 @@ fn test() { | |||
206 | [95; 98) '{ }': () | 206 | [95; 98) '{ }': () |
207 | [118; 146) '{ ...e(); }': () | 207 | [118; 146) '{ ...e(); }': () |
208 | [128; 129) 'a': Gen<u32> | 208 | [128; 129) 'a': Gen<u32> |
209 | [132; 141) 'Gen::make': fn make<u32>() -> Gen<T> | 209 | [132; 141) 'Gen::make': fn make<u32>() -> Gen<u32> |
210 | [132; 143) 'Gen::make()': Gen<u32> | 210 | [132; 143) 'Gen::make()': Gen<u32> |
211 | "### | 211 | "### |
212 | ); | 212 | ); |
@@ -260,7 +260,7 @@ fn test() { | |||
260 | [91; 94) '{ }': () | 260 | [91; 94) '{ }': () |
261 | [114; 149) '{ ...e(); }': () | 261 | [114; 149) '{ ...e(); }': () |
262 | [124; 125) 'a': Gen<u32> | 262 | [124; 125) 'a': Gen<u32> |
263 | [128; 144) 'Gen::<...::make': fn make<u32>() -> Gen<T> | 263 | [128; 144) 'Gen::<...::make': fn make<u32>() -> Gen<u32> |
264 | [128; 146) 'Gen::<...make()': Gen<u32> | 264 | [128; 146) 'Gen::<...make()': Gen<u32> |
265 | "### | 265 | "### |
266 | ); | 266 | ); |
@@ -291,7 +291,7 @@ fn test() { | |||
291 | [117; 120) '{ }': () | 291 | [117; 120) '{ }': () |
292 | [140; 180) '{ ...e(); }': () | 292 | [140; 180) '{ ...e(); }': () |
293 | [150; 151) 'a': Gen<u32, u64> | 293 | [150; 151) 'a': Gen<u32, u64> |
294 | [154; 175) 'Gen::<...::make': fn make<u64>() -> Gen<u32, T> | 294 | [154; 175) 'Gen::<...::make': fn make<u64>() -> Gen<u32, u64> |
295 | [154; 177) 'Gen::<...make()': Gen<u32, u64> | 295 | [154; 177) 'Gen::<...make()': Gen<u32, u64> |
296 | "### | 296 | "### |
297 | ); | 297 | ); |
@@ -475,7 +475,7 @@ fn test() { | |||
475 | @r###" | 475 | @r###" |
476 | [33; 37) 'self': &Self | 476 | [33; 37) 'self': &Self |
477 | [102; 127) '{ ...d(); }': () | 477 | [102; 127) '{ ...d(); }': () |
478 | [108; 109) 'S': S<u32>(T) -> S<T> | 478 | [108; 109) 'S': S<u32>(u32) -> S<u32> |
479 | [108; 115) 'S(1u32)': S<u32> | 479 | [108; 115) 'S(1u32)': S<u32> |
480 | [108; 124) 'S(1u32...thod()': u32 | 480 | [108; 124) 'S(1u32...thod()': u32 |
481 | [110; 114) '1u32': u32 | 481 | [110; 114) '1u32': u32 |
@@ -501,13 +501,13 @@ fn test() { | |||
501 | @r###" | 501 | @r###" |
502 | [87; 193) '{ ...t(); }': () | 502 | [87; 193) '{ ...t(); }': () |
503 | [97; 99) 's1': S | 503 | [97; 99) 's1': S |
504 | [105; 121) 'Defaul...efault': fn default<S>() -> Self | 504 | [105; 121) 'Defaul...efault': fn default<S>() -> S |
505 | [105; 123) 'Defaul...ault()': S | 505 | [105; 123) 'Defaul...ault()': S |
506 | [133; 135) 's2': S | 506 | [133; 135) 's2': S |
507 | [138; 148) 'S::default': fn default<S>() -> Self | 507 | [138; 148) 'S::default': fn default<S>() -> S |
508 | [138; 150) 'S::default()': S | 508 | [138; 150) 'S::default()': S |
509 | [160; 162) 's3': S | 509 | [160; 162) 's3': S |
510 | [165; 188) '<S as ...efault': fn default<S>() -> Self | 510 | [165; 188) '<S as ...efault': fn default<S>() -> S |
511 | [165; 190) '<S as ...ault()': S | 511 | [165; 190) '<S as ...ault()': S |
512 | "### | 512 | "### |
513 | ); | 513 | ); |
@@ -533,13 +533,13 @@ fn test() { | |||
533 | @r###" | 533 | @r###" |
534 | [127; 211) '{ ...e(); }': () | 534 | [127; 211) '{ ...e(); }': () |
535 | [137; 138) 'a': u32 | 535 | [137; 138) 'a': u32 |
536 | [141; 148) 'S::make': fn make<S, u32>() -> T | 536 | [141; 148) 'S::make': fn make<S, u32>() -> u32 |
537 | [141; 150) 'S::make()': u32 | 537 | [141; 150) 'S::make()': u32 |
538 | [160; 161) 'b': u64 | 538 | [160; 161) 'b': u64 |
539 | [164; 178) 'G::<u64>::make': fn make<G<u64>, u64>() -> T | 539 | [164; 178) 'G::<u64>::make': fn make<G<u64>, u64>() -> u64 |
540 | [164; 180) 'G::<u6...make()': u64 | 540 | [164; 180) 'G::<u6...make()': u64 |
541 | [190; 191) 'c': f64 | 541 | [190; 191) 'c': f64 |
542 | [199; 206) 'G::make': fn make<G<f64>, f64>() -> T | 542 | [199; 206) 'G::make': fn make<G<f64>, f64>() -> f64 |
543 | [199; 208) 'G::make()': f64 | 543 | [199; 208) 'G::make()': f64 |
544 | "### | 544 | "### |
545 | ); | 545 | ); |
@@ -567,19 +567,19 @@ fn test() { | |||
567 | @r###" | 567 | @r###" |
568 | [135; 313) '{ ...e(); }': () | 568 | [135; 313) '{ ...e(); }': () |
569 | [145; 146) 'a': (u32, i64) | 569 | [145; 146) 'a': (u32, i64) |
570 | [149; 163) 'S::make::<i64>': fn make<S, u32, i64>() -> (T, U) | 570 | [149; 163) 'S::make::<i64>': fn make<S, u32, i64>() -> (u32, i64) |
571 | [149; 165) 'S::mak...i64>()': (u32, i64) | 571 | [149; 165) 'S::mak...i64>()': (u32, i64) |
572 | [175; 176) 'b': (u32, i64) | 572 | [175; 176) 'b': (u32, i64) |
573 | [189; 196) 'S::make': fn make<S, u32, i64>() -> (T, U) | 573 | [189; 196) 'S::make': fn make<S, u32, i64>() -> (u32, i64) |
574 | [189; 198) 'S::make()': (u32, i64) | 574 | [189; 198) 'S::make()': (u32, i64) |
575 | [208; 209) 'c': (u32, i64) | 575 | [208; 209) 'c': (u32, i64) |
576 | [212; 233) 'G::<u3...:<i64>': fn make<G<u32>, u32, i64>() -> (T, U) | 576 | [212; 233) 'G::<u3...:<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64) |
577 | [212; 235) 'G::<u3...i64>()': (u32, i64) | 577 | [212; 235) 'G::<u3...i64>()': (u32, i64) |
578 | [245; 246) 'd': (u32, i64) | 578 | [245; 246) 'd': (u32, i64) |
579 | [259; 273) 'G::make::<i64>': fn make<G<u32>, u32, i64>() -> (T, U) | 579 | [259; 273) 'G::make::<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64) |
580 | [259; 275) 'G::mak...i64>()': (u32, i64) | 580 | [259; 275) 'G::mak...i64>()': (u32, i64) |
581 | [285; 286) 'e': (u32, i64) | 581 | [285; 286) 'e': (u32, i64) |
582 | [301; 308) 'G::make': fn make<G<u32>, u32, i64>() -> (T, U) | 582 | [301; 308) 'G::make': fn make<G<u32>, u32, i64>() -> (u32, i64) |
583 | [301; 310) 'G::make()': (u32, i64) | 583 | [301; 310) 'G::make()': (u32, i64) |
584 | "### | 584 | "### |
585 | ); | 585 | ); |
@@ -601,7 +601,7 @@ fn test() { | |||
601 | @r###" | 601 | @r###" |
602 | [101; 127) '{ ...e(); }': () | 602 | [101; 127) '{ ...e(); }': () |
603 | [111; 112) 'a': (S<i32>, i64) | 603 | [111; 112) 'a': (S<i32>, i64) |
604 | [115; 122) 'S::make': fn make<S<i32>, i64>() -> (Self, T) | 604 | [115; 122) 'S::make': fn make<S<i32>, i64>() -> (S<i32>, i64) |
605 | [115; 124) 'S::make()': (S<i32>, i64) | 605 | [115; 124) 'S::make()': (S<i32>, i64) |
606 | "### | 606 | "### |
607 | ); | 607 | ); |
@@ -625,10 +625,10 @@ fn test() { | |||
625 | @r###" | 625 | @r###" |
626 | [131; 203) '{ ...e(); }': () | 626 | [131; 203) '{ ...e(); }': () |
627 | [141; 142) 'a': (S<u64>, i64) | 627 | [141; 142) 'a': (S<u64>, i64) |
628 | [158; 165) 'S::make': fn make<S<u64>, i64>() -> (Self, T) | 628 | [158; 165) 'S::make': fn make<S<u64>, i64>() -> (S<u64>, i64) |
629 | [158; 167) 'S::make()': (S<u64>, i64) | 629 | [158; 167) 'S::make()': (S<u64>, i64) |
630 | [177; 178) 'b': (S<u32>, i32) | 630 | [177; 178) 'b': (S<u32>, i32) |
631 | [191; 198) 'S::make': fn make<S<u32>, i32>() -> (Self, T) | 631 | [191; 198) 'S::make': fn make<S<u32>, i32>() -> (S<u32>, i32) |
632 | [191; 200) 'S::make()': (S<u32>, i32) | 632 | [191; 200) 'S::make()': (S<u32>, i32) |
633 | "### | 633 | "### |
634 | ); | 634 | ); |
@@ -651,10 +651,10 @@ fn test() { | |||
651 | @r###" | 651 | @r###" |
652 | [107; 211) '{ ...>(); }': () | 652 | [107; 211) '{ ...>(); }': () |
653 | [117; 118) 'a': (S<u64>, i64, u8) | 653 | [117; 118) 'a': (S<u64>, i64, u8) |
654 | [121; 150) '<S as ...::<u8>': fn make<S<u64>, i64, u8>() -> (Self, T, U) | 654 | [121; 150) '<S as ...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8) |
655 | [121; 152) '<S as ...<u8>()': (S<u64>, i64, u8) | 655 | [121; 152) '<S as ...<u8>()': (S<u64>, i64, u8) |
656 | [162; 163) 'b': (S<u64>, i64, u8) | 656 | [162; 163) 'b': (S<u64>, i64, u8) |
657 | [182; 206) 'Trait:...::<u8>': fn make<S<u64>, i64, u8>() -> (Self, T, U) | 657 | [182; 206) 'Trait:...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8) |
658 | [182; 208) 'Trait:...<u8>()': (S<u64>, i64, u8) | 658 | [182; 208) 'Trait:...<u8>()': (S<u64>, i64, u8) |
659 | "### | 659 | "### |
660 | ); | 660 | ); |
@@ -697,7 +697,7 @@ fn test<U, T: Trait<U>>(t: T) { | |||
697 | [71; 72) 't': T | 697 | [71; 72) 't': T |
698 | [77; 96) '{ ...d(); }': () | 698 | [77; 96) '{ ...d(); }': () |
699 | [83; 84) 't': T | 699 | [83; 84) 't': T |
700 | [83; 93) 't.method()': [missing name] | 700 | [83; 93) 't.method()': U |
701 | "### | 701 | "### |
702 | ); | 702 | ); |
703 | } | 703 | } |
@@ -728,7 +728,7 @@ fn test() { | |||
728 | [157; 158) 'S': S | 728 | [157; 158) 'S': S |
729 | [157; 165) 'S.into()': u64 | 729 | [157; 165) 'S.into()': u64 |
730 | [175; 176) 'z': u64 | 730 | [175; 176) 'z': u64 |
731 | [179; 196) 'Into::...::into': fn into<S, u64>(Self) -> T | 731 | [179; 196) 'Into::...::into': fn into<S, u64>(S) -> u64 |
732 | [179; 199) 'Into::...nto(S)': u64 | 732 | [179; 199) 'Into::...nto(S)': u64 |
733 | [197; 198) 'S': S | 733 | [197; 198) 'S': S |
734 | "### | 734 | "### |
diff --git a/crates/ra_hir_ty/src/tests/patterns.rs b/crates/ra_hir_ty/src/tests/patterns.rs index cb3890b42..e25d6dbc4 100644 --- a/crates/ra_hir_ty/src/tests/patterns.rs +++ b/crates/ra_hir_ty/src/tests/patterns.rs | |||
@@ -96,13 +96,13 @@ fn test() { | |||
96 | [38; 42) 'A(n)': A<i32> | 96 | [38; 42) 'A(n)': A<i32> |
97 | [40; 41) 'n': &i32 | 97 | [40; 41) 'n': &i32 |
98 | [45; 50) '&A(1)': &A<i32> | 98 | [45; 50) '&A(1)': &A<i32> |
99 | [46; 47) 'A': A<i32>(T) -> A<T> | 99 | [46; 47) 'A': A<i32>(i32) -> A<i32> |
100 | [46; 50) 'A(1)': A<i32> | 100 | [46; 50) 'A(1)': A<i32> |
101 | [48; 49) '1': i32 | 101 | [48; 49) '1': i32 |
102 | [60; 64) 'A(n)': A<i32> | 102 | [60; 64) 'A(n)': A<i32> |
103 | [62; 63) 'n': &mut i32 | 103 | [62; 63) 'n': &mut i32 |
104 | [67; 76) '&mut A(1)': &mut A<i32> | 104 | [67; 76) '&mut A(1)': &mut A<i32> |
105 | [72; 73) 'A': A<i32>(T) -> A<T> | 105 | [72; 73) 'A': A<i32>(i32) -> A<i32> |
106 | [72; 76) 'A(1)': A<i32> | 106 | [72; 76) 'A(1)': A<i32> |
107 | [74; 75) '1': i32 | 107 | [74; 75) '1': i32 |
108 | "### | 108 | "### |
diff --git a/crates/ra_hir_ty/src/tests/regression.rs b/crates/ra_hir_ty/src/tests/regression.rs index aa948dcbf..14c8ed3a9 100644 --- a/crates/ra_hir_ty/src/tests/regression.rs +++ b/crates/ra_hir_ty/src/tests/regression.rs | |||
@@ -102,7 +102,7 @@ fn test() { | |||
102 | [11; 48) '{ ...&y]; }': () | 102 | [11; 48) '{ ...&y]; }': () |
103 | [21; 22) 'y': &{unknown} | 103 | [21; 22) 'y': &{unknown} |
104 | [25; 32) 'unknown': &{unknown} | 104 | [25; 32) 'unknown': &{unknown} |
105 | [38; 45) '[y, &y]': [&&{unknown};_] | 105 | [38; 45) '[y, &y]': [&&{unknown}; _] |
106 | [39; 40) 'y': &{unknown} | 106 | [39; 40) 'y': &{unknown} |
107 | [42; 44) '&y': &&{unknown} | 107 | [42; 44) '&y': &&{unknown} |
108 | [43; 44) 'y': &{unknown} | 108 | [43; 44) 'y': &{unknown} |
@@ -128,7 +128,7 @@ fn test() { | |||
128 | [25; 32) 'unknown': &&{unknown} | 128 | [25; 32) 'unknown': &&{unknown} |
129 | [42; 43) 'y': &&{unknown} | 129 | [42; 43) 'y': &&{unknown} |
130 | [46; 53) 'unknown': &&{unknown} | 130 | [46; 53) 'unknown': &&{unknown} |
131 | [59; 77) '[(x, y..., &x)]': [(&&&{unknown}, &&&{unknown});_] | 131 | [59; 77) '[(x, y..., &x)]': [(&&&{unknown}, &&&{unknown}); _] |
132 | [60; 66) '(x, y)': (&&&{unknown}, &&&{unknown}) | 132 | [60; 66) '(x, y)': (&&&{unknown}, &&&{unknown}) |
133 | [61; 62) 'x': &&{unknown} | 133 | [61; 62) 'x': &&{unknown} |
134 | [64; 65) 'y': &&{unknown} | 134 | [64; 65) 'y': &&{unknown} |
@@ -180,8 +180,8 @@ fn test_line_buffer() { | |||
180 | "#), | 180 | "#), |
181 | @r###" | 181 | @r###" |
182 | [23; 53) '{ ...n']; }': () | 182 | [23; 53) '{ ...n']; }': () |
183 | [29; 50) '&[0, b...b'\n']': &[u8;_] | 183 | [29; 50) '&[0, b...b'\n']': &[u8; _] |
184 | [30; 50) '[0, b'...b'\n']': [u8;_] | 184 | [30; 50) '[0, b'...b'\n']': [u8; _] |
185 | [31; 32) '0': u8 | 185 | [31; 32) '0': u8 |
186 | [34; 39) 'b'\n'': u8 | 186 | [34; 39) 'b'\n'': u8 |
187 | [41; 42) '1': u8 | 187 | [41; 42) '1': u8 |
@@ -346,7 +346,7 @@ pub fn main_loop() { | |||
346 | @r###" | 346 | @r###" |
347 | [144; 146) '{}': () | 347 | [144; 146) '{}': () |
348 | [169; 198) '{ ...t(); }': () | 348 | [169; 198) '{ ...t(); }': () |
349 | [175; 193) 'FxHash...efault': fn default<{unknown}, FxHasher>() -> HashSet<T, H> | 349 | [175; 193) 'FxHash...efault': fn default<{unknown}, FxHasher>() -> HashSet<{unknown}, FxHasher> |
350 | [175; 195) 'FxHash...ault()': HashSet<{unknown}, FxHasher> | 350 | [175; 195) 'FxHash...ault()': HashSet<{unknown}, FxHasher> |
351 | "### | 351 | "### |
352 | ); | 352 | ); |
diff --git a/crates/ra_hir_ty/src/tests/simple.rs b/crates/ra_hir_ty/src/tests/simple.rs index b7204ec00..3803f5938 100644 --- a/crates/ra_hir_ty/src/tests/simple.rs +++ b/crates/ra_hir_ty/src/tests/simple.rs | |||
@@ -28,7 +28,7 @@ mod boxed { | |||
28 | 28 | ||
29 | "#, | 29 | "#, |
30 | ); | 30 | ); |
31 | assert_eq!("(Box<i32>, Box<Box<i32>>, Box<&i32>, Box<[i32;_]>)", type_at_pos(&db, pos)); | 31 | assert_eq!("(Box<i32>, Box<Box<i32>>, Box<&i32>, Box<[i32; _]>)", type_at_pos(&db, pos)); |
32 | } | 32 | } |
33 | 33 | ||
34 | #[test] | 34 | #[test] |
@@ -754,15 +754,15 @@ fn test() { | |||
754 | [289; 295) 'self.0': T | 754 | [289; 295) 'self.0': T |
755 | [315; 353) '{ ...))); }': () | 755 | [315; 353) '{ ...))); }': () |
756 | [325; 326) 't': &i32 | 756 | [325; 326) 't': &i32 |
757 | [329; 335) 'A::foo': fn foo<i32>(&A<T>) -> &T | 757 | [329; 335) 'A::foo': fn foo<i32>(&A<i32>) -> &i32 |
758 | [329; 350) 'A::foo...42))))': &i32 | 758 | [329; 350) 'A::foo...42))))': &i32 |
759 | [336; 349) '&&B(B(A(42)))': &&B<B<A<i32>>> | 759 | [336; 349) '&&B(B(A(42)))': &&B<B<A<i32>>> |
760 | [337; 349) '&B(B(A(42)))': &B<B<A<i32>>> | 760 | [337; 349) '&B(B(A(42)))': &B<B<A<i32>>> |
761 | [338; 339) 'B': B<B<A<i32>>>(T) -> B<T> | 761 | [338; 339) 'B': B<B<A<i32>>>(B<A<i32>>) -> B<B<A<i32>>> |
762 | [338; 349) 'B(B(A(42)))': B<B<A<i32>>> | 762 | [338; 349) 'B(B(A(42)))': B<B<A<i32>>> |
763 | [340; 341) 'B': B<A<i32>>(T) -> B<T> | 763 | [340; 341) 'B': B<A<i32>>(A<i32>) -> B<A<i32>> |
764 | [340; 348) 'B(A(42))': B<A<i32>> | 764 | [340; 348) 'B(A(42))': B<A<i32>> |
765 | [342; 343) 'A': A<i32>(T) -> A<T> | 765 | [342; 343) 'A': A<i32>(i32) -> A<i32> |
766 | [342; 347) 'A(42)': A<i32> | 766 | [342; 347) 'A(42)': A<i32> |
767 | [344; 346) '42': i32 | 767 | [344; 346) '42': i32 |
768 | "### | 768 | "### |
@@ -817,16 +817,16 @@ fn test(a: A<i32>) { | |||
817 | [326; 327) 'a': A<i32> | 817 | [326; 327) 'a': A<i32> |
818 | [337; 383) '{ ...))); }': () | 818 | [337; 383) '{ ...))); }': () |
819 | [347; 348) 't': &i32 | 819 | [347; 348) 't': &i32 |
820 | [351; 352) 'A': A<i32>(*mut T) -> A<T> | 820 | [351; 352) 'A': A<i32>(*mut i32) -> A<i32> |
821 | [351; 365) 'A(0 as *mut _)': A<i32> | 821 | [351; 365) 'A(0 as *mut _)': A<i32> |
822 | [351; 380) 'A(0 as...B(a)))': &i32 | 822 | [351; 380) 'A(0 as...B(a)))': &i32 |
823 | [353; 354) '0': i32 | 823 | [353; 354) '0': i32 |
824 | [353; 364) '0 as *mut _': *mut i32 | 824 | [353; 364) '0 as *mut _': *mut i32 |
825 | [370; 379) '&&B(B(a))': &&B<B<A<i32>>> | 825 | [370; 379) '&&B(B(a))': &&B<B<A<i32>>> |
826 | [371; 379) '&B(B(a))': &B<B<A<i32>>> | 826 | [371; 379) '&B(B(a))': &B<B<A<i32>>> |
827 | [372; 373) 'B': B<B<A<i32>>>(T) -> B<T> | 827 | [372; 373) 'B': B<B<A<i32>>>(B<A<i32>>) -> B<B<A<i32>>> |
828 | [372; 379) 'B(B(a))': B<B<A<i32>>> | 828 | [372; 379) 'B(B(a))': B<B<A<i32>>> |
829 | [374; 375) 'B': B<A<i32>>(T) -> B<T> | 829 | [374; 375) 'B': B<A<i32>>(A<i32>) -> B<A<i32>> |
830 | [374; 378) 'B(a)': B<A<i32>> | 830 | [374; 378) 'B(a)': B<A<i32>> |
831 | [376; 377) 'a': A<i32> | 831 | [376; 377) 'a': A<i32> |
832 | "### | 832 | "### |
@@ -1061,55 +1061,55 @@ fn test(x: &str, y: isize) { | |||
1061 | [9; 10) 'x': &str | 1061 | [9; 10) 'x': &str |
1062 | [18; 19) 'y': isize | 1062 | [18; 19) 'y': isize |
1063 | [28; 293) '{ ... []; }': () | 1063 | [28; 293) '{ ... []; }': () |
1064 | [38; 39) 'a': [&str;_] | 1064 | [38; 39) 'a': [&str; _] |
1065 | [42; 45) '[x]': [&str;_] | 1065 | [42; 45) '[x]': [&str; _] |
1066 | [43; 44) 'x': &str | 1066 | [43; 44) 'x': &str |
1067 | [55; 56) 'b': [[&str;_];_] | 1067 | [55; 56) 'b': [[&str; _]; _] |
1068 | [59; 65) '[a, a]': [[&str;_];_] | 1068 | [59; 65) '[a, a]': [[&str; _]; _] |
1069 | [60; 61) 'a': [&str;_] | 1069 | [60; 61) 'a': [&str; _] |
1070 | [63; 64) 'a': [&str;_] | 1070 | [63; 64) 'a': [&str; _] |
1071 | [75; 76) 'c': [[[&str;_];_];_] | 1071 | [75; 76) 'c': [[[&str; _]; _]; _] |
1072 | [79; 85) '[b, b]': [[[&str;_];_];_] | 1072 | [79; 85) '[b, b]': [[[&str; _]; _]; _] |
1073 | [80; 81) 'b': [[&str;_];_] | 1073 | [80; 81) 'b': [[&str; _]; _] |
1074 | [83; 84) 'b': [[&str;_];_] | 1074 | [83; 84) 'b': [[&str; _]; _] |
1075 | [96; 97) 'd': [isize;_] | 1075 | [96; 97) 'd': [isize; _] |
1076 | [100; 112) '[y, 1, 2, 3]': [isize;_] | 1076 | [100; 112) '[y, 1, 2, 3]': [isize; _] |
1077 | [101; 102) 'y': isize | 1077 | [101; 102) 'y': isize |
1078 | [104; 105) '1': isize | 1078 | [104; 105) '1': isize |
1079 | [107; 108) '2': isize | 1079 | [107; 108) '2': isize |
1080 | [110; 111) '3': isize | 1080 | [110; 111) '3': isize |
1081 | [122; 123) 'd': [isize;_] | 1081 | [122; 123) 'd': [isize; _] |
1082 | [126; 138) '[1, y, 2, 3]': [isize;_] | 1082 | [126; 138) '[1, y, 2, 3]': [isize; _] |
1083 | [127; 128) '1': isize | 1083 | [127; 128) '1': isize |
1084 | [130; 131) 'y': isize | 1084 | [130; 131) 'y': isize |
1085 | [133; 134) '2': isize | 1085 | [133; 134) '2': isize |
1086 | [136; 137) '3': isize | 1086 | [136; 137) '3': isize |
1087 | [148; 149) 'e': [isize;_] | 1087 | [148; 149) 'e': [isize; _] |
1088 | [152; 155) '[y]': [isize;_] | 1088 | [152; 155) '[y]': [isize; _] |
1089 | [153; 154) 'y': isize | 1089 | [153; 154) 'y': isize |
1090 | [165; 166) 'f': [[isize;_];_] | 1090 | [165; 166) 'f': [[isize; _]; _] |
1091 | [169; 175) '[d, d]': [[isize;_];_] | 1091 | [169; 175) '[d, d]': [[isize; _]; _] |
1092 | [170; 171) 'd': [isize;_] | 1092 | [170; 171) 'd': [isize; _] |
1093 | [173; 174) 'd': [isize;_] | 1093 | [173; 174) 'd': [isize; _] |
1094 | [185; 186) 'g': [[isize;_];_] | 1094 | [185; 186) 'g': [[isize; _]; _] |
1095 | [189; 195) '[e, e]': [[isize;_];_] | 1095 | [189; 195) '[e, e]': [[isize; _]; _] |
1096 | [190; 191) 'e': [isize;_] | 1096 | [190; 191) 'e': [isize; _] |
1097 | [193; 194) 'e': [isize;_] | 1097 | [193; 194) 'e': [isize; _] |
1098 | [206; 207) 'h': [i32;_] | 1098 | [206; 207) 'h': [i32; _] |
1099 | [210; 216) '[1, 2]': [i32;_] | 1099 | [210; 216) '[1, 2]': [i32; _] |
1100 | [211; 212) '1': i32 | 1100 | [211; 212) '1': i32 |
1101 | [214; 215) '2': i32 | 1101 | [214; 215) '2': i32 |
1102 | [226; 227) 'i': [&str;_] | 1102 | [226; 227) 'i': [&str; _] |
1103 | [230; 240) '["a", "b"]': [&str;_] | 1103 | [230; 240) '["a", "b"]': [&str; _] |
1104 | [231; 234) '"a"': &str | 1104 | [231; 234) '"a"': &str |
1105 | [236; 239) '"b"': &str | 1105 | [236; 239) '"b"': &str |
1106 | [251; 252) 'b': [[&str;_];_] | 1106 | [251; 252) 'b': [[&str; _]; _] |
1107 | [255; 265) '[a, ["b"]]': [[&str;_];_] | 1107 | [255; 265) '[a, ["b"]]': [[&str; _]; _] |
1108 | [256; 257) 'a': [&str;_] | 1108 | [256; 257) 'a': [&str; _] |
1109 | [259; 264) '["b"]': [&str;_] | 1109 | [259; 264) '["b"]': [&str; _] |
1110 | [260; 263) '"b"': &str | 1110 | [260; 263) '"b"': &str |
1111 | [275; 276) 'x': [u8;_] | 1111 | [275; 276) 'x': [u8; _] |
1112 | [288; 290) '[]': [u8;_] | 1112 | [288; 290) '[]': [u8; _] |
1113 | "### | 1113 | "### |
1114 | ); | 1114 | ); |
1115 | } | 1115 | } |
@@ -1169,16 +1169,16 @@ fn test() { | |||
1169 | "#), | 1169 | "#), |
1170 | @r###" | 1170 | @r###" |
1171 | [76; 184) '{ ...one; }': () | 1171 | [76; 184) '{ ...one; }': () |
1172 | [82; 83) 'A': A<i32>(T) -> A<T> | 1172 | [82; 83) 'A': A<i32>(i32) -> A<i32> |
1173 | [82; 87) 'A(42)': A<i32> | 1173 | [82; 87) 'A(42)': A<i32> |
1174 | [84; 86) '42': i32 | 1174 | [84; 86) '42': i32 |
1175 | [93; 94) 'A': A<u128>(T) -> A<T> | 1175 | [93; 94) 'A': A<u128>(u128) -> A<u128> |
1176 | [93; 102) 'A(42u128)': A<u128> | 1176 | [93; 102) 'A(42u128)': A<u128> |
1177 | [95; 101) '42u128': u128 | 1177 | [95; 101) '42u128': u128 |
1178 | [108; 112) 'Some': Some<&str>(T) -> Option<T> | 1178 | [108; 112) 'Some': Some<&str>(&str) -> Option<&str> |
1179 | [108; 117) 'Some("x")': Option<&str> | 1179 | [108; 117) 'Some("x")': Option<&str> |
1180 | [113; 116) '"x"': &str | 1180 | [113; 116) '"x"': &str |
1181 | [123; 135) 'Option::Some': Some<&str>(T) -> Option<T> | 1181 | [123; 135) 'Option::Some': Some<&str>(&str) -> Option<&str> |
1182 | [123; 140) 'Option...e("x")': Option<&str> | 1182 | [123; 140) 'Option...e("x")': Option<&str> |
1183 | [136; 139) '"x"': &str | 1183 | [136; 139) '"x"': &str |
1184 | [146; 150) 'None': Option<{unknown}> | 1184 | [146; 150) 'None': Option<{unknown}> |
@@ -1205,14 +1205,14 @@ fn test() { | |||
1205 | [21; 26) '{ t }': T | 1205 | [21; 26) '{ t }': T |
1206 | [23; 24) 't': T | 1206 | [23; 24) 't': T |
1207 | [38; 98) '{ ...(1); }': () | 1207 | [38; 98) '{ ...(1); }': () |
1208 | [44; 46) 'id': fn id<u32>(T) -> T | 1208 | [44; 46) 'id': fn id<u32>(u32) -> u32 |
1209 | [44; 52) 'id(1u32)': u32 | 1209 | [44; 52) 'id(1u32)': u32 |
1210 | [47; 51) '1u32': u32 | 1210 | [47; 51) '1u32': u32 |
1211 | [58; 68) 'id::<i128>': fn id<i128>(T) -> T | 1211 | [58; 68) 'id::<i128>': fn id<i128>(i128) -> i128 |
1212 | [58; 71) 'id::<i128>(1)': i128 | 1212 | [58; 71) 'id::<i128>(1)': i128 |
1213 | [69; 70) '1': i128 | 1213 | [69; 70) '1': i128 |
1214 | [81; 82) 'x': u64 | 1214 | [81; 82) 'x': u64 |
1215 | [90; 92) 'id': fn id<u64>(T) -> T | 1215 | [90; 92) 'id': fn id<u64>(u64) -> u64 |
1216 | [90; 95) 'id(1)': u64 | 1216 | [90; 95) 'id(1)': u64 |
1217 | [93; 94) '1': u64 | 1217 | [93; 94) '1': u64 |
1218 | "### | 1218 | "### |
@@ -1220,7 +1220,7 @@ fn test() { | |||
1220 | } | 1220 | } |
1221 | 1221 | ||
1222 | #[test] | 1222 | #[test] |
1223 | fn infer_impl_generics() { | 1223 | fn infer_impl_generics_basic() { |
1224 | assert_snapshot!( | 1224 | assert_snapshot!( |
1225 | infer(r#" | 1225 | infer(r#" |
1226 | struct A<T1, T2> { | 1226 | struct A<T1, T2> { |
@@ -1349,16 +1349,16 @@ fn test() -> i128 { | |||
1349 | [146; 147) 'x': i128 | 1349 | [146; 147) 'x': i128 |
1350 | [150; 151) '1': i128 | 1350 | [150; 151) '1': i128 |
1351 | [162; 163) 'y': i128 | 1351 | [162; 163) 'y': i128 |
1352 | [166; 168) 'id': fn id<i128>(T) -> T | 1352 | [166; 168) 'id': fn id<i128>(i128) -> i128 |
1353 | [166; 171) 'id(x)': i128 | 1353 | [166; 171) 'id(x)': i128 |
1354 | [169; 170) 'x': i128 | 1354 | [169; 170) 'x': i128 |
1355 | [182; 183) 'a': A<i128> | 1355 | [182; 183) 'a': A<i128> |
1356 | [186; 200) 'A { x: id(y) }': A<i128> | 1356 | [186; 200) 'A { x: id(y) }': A<i128> |
1357 | [193; 195) 'id': fn id<i128>(T) -> T | 1357 | [193; 195) 'id': fn id<i128>(i128) -> i128 |
1358 | [193; 198) 'id(y)': i128 | 1358 | [193; 198) 'id(y)': i128 |
1359 | [196; 197) 'y': i128 | 1359 | [196; 197) 'y': i128 |
1360 | [211; 212) 'z': i128 | 1360 | [211; 212) 'z': i128 |
1361 | [215; 217) 'id': fn id<i128>(T) -> T | 1361 | [215; 217) 'id': fn id<i128>(i128) -> i128 |
1362 | [215; 222) 'id(a.x)': i128 | 1362 | [215; 222) 'id(a.x)': i128 |
1363 | [218; 219) 'a': A<i128> | 1363 | [218; 219) 'a': A<i128> |
1364 | [218; 221) 'a.x': i128 | 1364 | [218; 221) 'a.x': i128 |
@@ -1502,14 +1502,14 @@ fn test() { | |||
1502 | [78; 158) '{ ...(1); }': () | 1502 | [78; 158) '{ ...(1); }': () |
1503 | [88; 89) 'y': u32 | 1503 | [88; 89) 'y': u32 |
1504 | [92; 97) '10u32': u32 | 1504 | [92; 97) '10u32': u32 |
1505 | [103; 105) 'id': fn id<u32>(T) -> T | 1505 | [103; 105) 'id': fn id<u32>(u32) -> u32 |
1506 | [103; 108) 'id(y)': u32 | 1506 | [103; 108) 'id(y)': u32 |
1507 | [106; 107) 'y': u32 | 1507 | [106; 107) 'y': u32 |
1508 | [118; 119) 'x': bool | 1508 | [118; 119) 'x': bool |
1509 | [128; 133) 'clone': fn clone<bool>(&T) -> T | 1509 | [128; 133) 'clone': fn clone<bool>(&bool) -> bool |
1510 | [128; 136) 'clone(z)': bool | 1510 | [128; 136) 'clone(z)': bool |
1511 | [134; 135) 'z': &bool | 1511 | [134; 135) 'z': &bool |
1512 | [142; 152) 'id::<i128>': fn id<i128>(T) -> T | 1512 | [142; 152) 'id::<i128>': fn id<i128>(i128) -> i128 |
1513 | [142; 155) 'id::<i128>(1)': i128 | 1513 | [142; 155) 'id::<i128>(1)': i128 |
1514 | [153; 154) '1': i128 | 1514 | [153; 154) '1': i128 |
1515 | "### | 1515 | "### |
diff --git a/crates/ra_hir_ty/src/tests/traits.rs b/crates/ra_hir_ty/src/tests/traits.rs index 4b268510c..17611ddbf 100644 --- a/crates/ra_hir_ty/src/tests/traits.rs +++ b/crates/ra_hir_ty/src/tests/traits.rs | |||
@@ -1,7 +1,6 @@ | |||
1 | use insta::assert_snapshot; | 1 | use insta::assert_snapshot; |
2 | 2 | ||
3 | use ra_db::fixture::WithFixture; | 3 | use ra_db::fixture::WithFixture; |
4 | use test_utils::covers; | ||
5 | 4 | ||
6 | use super::{infer, infer_with_mismatches, type_at, type_at_pos}; | 5 | use super::{infer, infer_with_mismatches, type_at, type_at_pos}; |
7 | use crate::test_db::TestDB; | 6 | use crate::test_db::TestDB; |
@@ -261,10 +260,10 @@ fn test() { | |||
261 | [92; 94) '{}': () | 260 | [92; 94) '{}': () |
262 | [105; 144) '{ ...(s); }': () | 261 | [105; 144) '{ ...(s); }': () |
263 | [115; 116) 's': S<u32> | 262 | [115; 116) 's': S<u32> |
264 | [119; 120) 'S': S<u32>(T) -> S<T> | 263 | [119; 120) 'S': S<u32>(u32) -> S<u32> |
265 | [119; 129) 'S(unknown)': S<u32> | 264 | [119; 129) 'S(unknown)': S<u32> |
266 | [121; 128) 'unknown': u32 | 265 | [121; 128) 'unknown': u32 |
267 | [135; 138) 'foo': fn foo<S<u32>>(T) -> () | 266 | [135; 138) 'foo': fn foo<S<u32>>(S<u32>) -> () |
268 | [135; 141) 'foo(s)': () | 267 | [135; 141) 'foo(s)': () |
269 | [139; 140) 's': S<u32> | 268 | [139; 140) 's': S<u32> |
270 | "### | 269 | "### |
@@ -289,11 +288,11 @@ fn test() { | |||
289 | [98; 100) '{}': () | 288 | [98; 100) '{}': () |
290 | [111; 163) '{ ...(s); }': () | 289 | [111; 163) '{ ...(s); }': () |
291 | [121; 122) 's': S<u32> | 290 | [121; 122) 's': S<u32> |
292 | [125; 126) 'S': S<u32>(T) -> S<T> | 291 | [125; 126) 'S': S<u32>(u32) -> S<u32> |
293 | [125; 135) 'S(unknown)': S<u32> | 292 | [125; 135) 'S(unknown)': S<u32> |
294 | [127; 134) 'unknown': u32 | 293 | [127; 134) 'unknown': u32 |
295 | [145; 146) 'x': u32 | 294 | [145; 146) 'x': u32 |
296 | [154; 157) 'foo': fn foo<u32, S<u32>>(T) -> U | 295 | [154; 157) 'foo': fn foo<u32, S<u32>>(S<u32>) -> u32 |
297 | [154; 160) 'foo(s)': u32 | 296 | [154; 160) 'foo(s)': u32 |
298 | [158; 159) 's': S<u32> | 297 | [158; 159) 's': S<u32> |
299 | "### | 298 | "### |
@@ -358,15 +357,15 @@ fn test() { | |||
358 | [221; 223) '{}': () | 357 | [221; 223) '{}': () |
359 | [234; 300) '{ ...(S); }': () | 358 | [234; 300) '{ ...(S); }': () |
360 | [244; 245) 'x': u32 | 359 | [244; 245) 'x': u32 |
361 | [248; 252) 'foo1': fn foo1<S>(T) -> <T as Iterable>::Item | 360 | [248; 252) 'foo1': fn foo1<S>(S) -> <S as Iterable>::Item |
362 | [248; 255) 'foo1(S)': u32 | 361 | [248; 255) 'foo1(S)': u32 |
363 | [253; 254) 'S': S | 362 | [253; 254) 'S': S |
364 | [265; 266) 'y': u32 | 363 | [265; 266) 'y': u32 |
365 | [269; 273) 'foo2': fn foo2<S>(T) -> <T as Iterable>::Item | 364 | [269; 273) 'foo2': fn foo2<S>(S) -> <S as Iterable>::Item |
366 | [269; 276) 'foo2(S)': u32 | 365 | [269; 276) 'foo2(S)': u32 |
367 | [274; 275) 'S': S | 366 | [274; 275) 'S': S |
368 | [286; 287) 'z': u32 | 367 | [286; 287) 'z': u32 |
369 | [290; 294) 'foo3': fn foo3<S>(T) -> <T as Iterable>::Item | 368 | [290; 294) 'foo3': fn foo3<S>(S) -> <S as Iterable>::Item |
370 | [290; 297) 'foo3(S)': u32 | 369 | [290; 297) 'foo3(S)': u32 |
371 | [295; 296) 'S': S | 370 | [295; 296) 'S': S |
372 | "### | 371 | "### |
@@ -479,7 +478,7 @@ fn indexing_arrays() { | |||
479 | @r###" | 478 | @r###" |
480 | [10; 26) '{ &mut...[2]; }': () | 479 | [10; 26) '{ &mut...[2]; }': () |
481 | [12; 23) '&mut [9][2]': &mut {unknown} | 480 | [12; 23) '&mut [9][2]': &mut {unknown} |
482 | [17; 20) '[9]': [i32;_] | 481 | [17; 20) '[9]': [i32; _] |
483 | [17; 23) '[9][2]': {unknown} | 482 | [17; 23) '[9][2]': {unknown} |
484 | [18; 19) '9': i32 | 483 | [18; 19) '9': i32 |
485 | [21; 22) '2': i32 | 484 | [21; 22) '2': i32 |
@@ -822,8 +821,7 @@ fn test<T: ApplyL>() { | |||
822 | "#, | 821 | "#, |
823 | ); | 822 | ); |
824 | // inside the generic function, the associated type gets normalized to a placeholder `ApplL::Out<T>` [https://rust-lang.github.io/rustc-guide/traits/associated-types.html#placeholder-associated-types]. | 823 | // inside the generic function, the associated type gets normalized to a placeholder `ApplL::Out<T>` [https://rust-lang.github.io/rustc-guide/traits/associated-types.html#placeholder-associated-types]. |
825 | // FIXME: fix type parameter names going missing when going through Chalk | 824 | assert_eq!(t, "ApplyL::Out<T>"); |
826 | assert_eq!(t, "ApplyL::Out<[missing name]>"); | ||
827 | } | 825 | } |
828 | 826 | ||
829 | #[test] | 827 | #[test] |
@@ -850,6 +848,198 @@ fn test<T: ApplyL>(t: T) { | |||
850 | } | 848 | } |
851 | 849 | ||
852 | #[test] | 850 | #[test] |
851 | fn argument_impl_trait() { | ||
852 | assert_snapshot!( | ||
853 | infer_with_mismatches(r#" | ||
854 | trait Trait<T> { | ||
855 | fn foo(&self) -> T; | ||
856 | fn foo2(&self) -> i64; | ||
857 | } | ||
858 | fn bar(x: impl Trait<u16>) {} | ||
859 | struct S<T>(T); | ||
860 | impl<T> Trait<T> for S<T> {} | ||
861 | |||
862 | fn test(x: impl Trait<u64>, y: &impl Trait<u32>) { | ||
863 | x; | ||
864 | y; | ||
865 | let z = S(1); | ||
866 | bar(z); | ||
867 | x.foo(); | ||
868 | y.foo(); | ||
869 | z.foo(); | ||
870 | x.foo2(); | ||
871 | y.foo2(); | ||
872 | z.foo2(); | ||
873 | } | ||
874 | "#, true), | ||
875 | @r###" | ||
876 | [30; 34) 'self': &Self | ||
877 | [55; 59) 'self': &Self | ||
878 | [78; 79) 'x': impl Trait<u16> | ||
879 | [98; 100) '{}': () | ||
880 | [155; 156) 'x': impl Trait<u64> | ||
881 | [175; 176) 'y': &impl Trait<u32> | ||
882 | [196; 324) '{ ...2(); }': () | ||
883 | [202; 203) 'x': impl Trait<u64> | ||
884 | [209; 210) 'y': &impl Trait<u32> | ||
885 | [220; 221) 'z': S<u16> | ||
886 | [224; 225) 'S': S<u16>(u16) -> S<u16> | ||
887 | [224; 228) 'S(1)': S<u16> | ||
888 | [226; 227) '1': u16 | ||
889 | [234; 237) 'bar': fn bar(S<u16>) -> () | ||
890 | [234; 240) 'bar(z)': () | ||
891 | [238; 239) 'z': S<u16> | ||
892 | [246; 247) 'x': impl Trait<u64> | ||
893 | [246; 253) 'x.foo()': u64 | ||
894 | [259; 260) 'y': &impl Trait<u32> | ||
895 | [259; 266) 'y.foo()': u32 | ||
896 | [272; 273) 'z': S<u16> | ||
897 | [272; 279) 'z.foo()': u16 | ||
898 | [285; 286) 'x': impl Trait<u64> | ||
899 | [285; 293) 'x.foo2()': i64 | ||
900 | [299; 300) 'y': &impl Trait<u32> | ||
901 | [299; 307) 'y.foo2()': i64 | ||
902 | [313; 314) 'z': S<u16> | ||
903 | [313; 321) 'z.foo2()': i64 | ||
904 | "### | ||
905 | ); | ||
906 | } | ||
907 | |||
908 | #[test] | ||
909 | fn argument_impl_trait_type_args_1() { | ||
910 | assert_snapshot!( | ||
911 | infer_with_mismatches(r#" | ||
912 | trait Trait {} | ||
913 | trait Foo { | ||
914 | // this function has an implicit Self param, an explicit type param, | ||
915 | // and an implicit impl Trait param! | ||
916 | fn bar<T>(x: impl Trait) -> T { loop {} } | ||
917 | } | ||
918 | fn foo<T>(x: impl Trait) -> T { loop {} } | ||
919 | struct S; | ||
920 | impl Trait for S {} | ||
921 | struct F; | ||
922 | impl Foo for F {} | ||
923 | |||
924 | fn test() { | ||
925 | Foo::bar(S); | ||
926 | <F as Foo>::bar(S); | ||
927 | F::bar(S); | ||
928 | Foo::bar::<u32>(S); | ||
929 | <F as Foo>::bar::<u32>(S); | ||
930 | |||
931 | foo(S); | ||
932 | foo::<u32>(S); | ||
933 | foo::<u32, i32>(S); // we should ignore the extraneous i32 | ||
934 | } | ||
935 | "#, true), | ||
936 | @r###" | ||
937 | [156; 157) 'x': impl Trait | ||
938 | [176; 187) '{ loop {} }': T | ||
939 | [178; 185) 'loop {}': ! | ||
940 | [183; 185) '{}': () | ||
941 | [200; 201) 'x': impl Trait | ||
942 | [220; 231) '{ loop {} }': T | ||
943 | [222; 229) 'loop {}': ! | ||
944 | [227; 229) '{}': () | ||
945 | [301; 510) '{ ... i32 }': () | ||
946 | [307; 315) 'Foo::bar': fn bar<{unknown}, {unknown}>(S) -> {unknown} | ||
947 | [307; 318) 'Foo::bar(S)': {unknown} | ||
948 | [316; 317) 'S': S | ||
949 | [324; 339) '<F as Foo>::bar': fn bar<F, {unknown}>(S) -> {unknown} | ||
950 | [324; 342) '<F as ...bar(S)': {unknown} | ||
951 | [340; 341) 'S': S | ||
952 | [348; 354) 'F::bar': fn bar<F, {unknown}>(S) -> {unknown} | ||
953 | [348; 357) 'F::bar(S)': {unknown} | ||
954 | [355; 356) 'S': S | ||
955 | [363; 378) 'Foo::bar::<u32>': fn bar<{unknown}, u32>(S) -> u32 | ||
956 | [363; 381) 'Foo::b...32>(S)': u32 | ||
957 | [379; 380) 'S': S | ||
958 | [387; 409) '<F as ...:<u32>': fn bar<F, u32>(S) -> u32 | ||
959 | [387; 412) '<F as ...32>(S)': u32 | ||
960 | [410; 411) 'S': S | ||
961 | [419; 422) 'foo': fn foo<{unknown}>(S) -> {unknown} | ||
962 | [419; 425) 'foo(S)': {unknown} | ||
963 | [423; 424) 'S': S | ||
964 | [431; 441) 'foo::<u32>': fn foo<u32>(S) -> u32 | ||
965 | [431; 444) 'foo::<u32>(S)': u32 | ||
966 | [442; 443) 'S': S | ||
967 | [450; 465) 'foo::<u32, i32>': fn foo<u32>(S) -> u32 | ||
968 | [450; 468) 'foo::<...32>(S)': u32 | ||
969 | [466; 467) 'S': S | ||
970 | "### | ||
971 | ); | ||
972 | } | ||
973 | |||
974 | #[test] | ||
975 | fn argument_impl_trait_type_args_2() { | ||
976 | assert_snapshot!( | ||
977 | infer_with_mismatches(r#" | ||
978 | trait Trait {} | ||
979 | struct S; | ||
980 | impl Trait for S {} | ||
981 | struct F<T>; | ||
982 | impl<T> F<T> { | ||
983 | fn foo<U>(self, x: impl Trait) -> (T, U) { loop {} } | ||
984 | } | ||
985 | |||
986 | fn test() { | ||
987 | F.foo(S); | ||
988 | F::<u32>.foo(S); | ||
989 | F::<u32>.foo::<i32>(S); | ||
990 | F::<u32>.foo::<i32, u32>(S); // extraneous argument should be ignored | ||
991 | } | ||
992 | "#, true), | ||
993 | @r###" | ||
994 | [88; 92) 'self': F<T> | ||
995 | [94; 95) 'x': impl Trait | ||
996 | [119; 130) '{ loop {} }': (T, U) | ||
997 | [121; 128) 'loop {}': ! | ||
998 | [126; 128) '{}': () | ||
999 | [144; 284) '{ ...ored }': () | ||
1000 | [150; 151) 'F': F<{unknown}> | ||
1001 | [150; 158) 'F.foo(S)': ({unknown}, {unknown}) | ||
1002 | [156; 157) 'S': S | ||
1003 | [164; 172) 'F::<u32>': F<u32> | ||
1004 | [164; 179) 'F::<u32>.foo(S)': (u32, {unknown}) | ||
1005 | [177; 178) 'S': S | ||
1006 | [185; 193) 'F::<u32>': F<u32> | ||
1007 | [185; 207) 'F::<u3...32>(S)': (u32, i32) | ||
1008 | [205; 206) 'S': S | ||
1009 | [213; 221) 'F::<u32>': F<u32> | ||
1010 | [213; 240) 'F::<u3...32>(S)': (u32, i32) | ||
1011 | [238; 239) 'S': S | ||
1012 | "### | ||
1013 | ); | ||
1014 | } | ||
1015 | |||
1016 | #[test] | ||
1017 | fn argument_impl_trait_to_fn_pointer() { | ||
1018 | assert_snapshot!( | ||
1019 | infer_with_mismatches(r#" | ||
1020 | trait Trait {} | ||
1021 | fn foo(x: impl Trait) { loop {} } | ||
1022 | struct S; | ||
1023 | impl Trait for S {} | ||
1024 | |||
1025 | fn test() { | ||
1026 | let f: fn(S) -> () = foo; | ||
1027 | } | ||
1028 | "#, true), | ||
1029 | @r###" | ||
1030 | [23; 24) 'x': impl Trait | ||
1031 | [38; 49) '{ loop {} }': () | ||
1032 | [40; 47) 'loop {}': ! | ||
1033 | [45; 47) '{}': () | ||
1034 | [91; 124) '{ ...foo; }': () | ||
1035 | [101; 102) 'f': fn(S) -> () | ||
1036 | [118; 121) 'foo': fn foo(S) -> () | ||
1037 | "### | ||
1038 | ); | ||
1039 | } | ||
1040 | |||
1041 | #[test] | ||
1042 | #[ignore] | ||
853 | fn impl_trait() { | 1043 | fn impl_trait() { |
854 | assert_snapshot!( | 1044 | assert_snapshot!( |
855 | infer(r#" | 1045 | infer(r#" |
@@ -993,34 +1183,23 @@ fn weird_bounds() { | |||
993 | assert_snapshot!( | 1183 | assert_snapshot!( |
994 | infer(r#" | 1184 | infer(r#" |
995 | trait Trait {} | 1185 | trait Trait {} |
996 | fn test() { | 1186 | fn test(a: impl Trait + 'lifetime, b: impl 'lifetime, c: impl (Trait), d: impl ('lifetime), e: impl ?Sized, f: impl Trait + ?Sized) { |
997 | let a: impl Trait + 'lifetime = foo; | ||
998 | let b: impl 'lifetime = foo; | ||
999 | let b: impl (Trait) = foo; | ||
1000 | let b: impl ('lifetime) = foo; | ||
1001 | let d: impl ?Sized = foo; | ||
1002 | let e: impl Trait + ?Sized = foo; | ||
1003 | } | 1187 | } |
1004 | "#), | 1188 | "#), |
1005 | @r###" | 1189 | @r###" |
1006 | [26; 237) '{ ...foo; }': () | 1190 | [24; 25) 'a': impl Trait + {error} |
1007 | [36; 37) 'a': impl Trait + {error} | 1191 | [51; 52) 'b': impl {error} |
1008 | [64; 67) 'foo': impl Trait + {error} | 1192 | [70; 71) 'c': impl Trait |
1009 | [77; 78) 'b': impl {error} | 1193 | [87; 88) 'd': impl {error} |
1010 | [97; 100) 'foo': impl {error} | 1194 | [108; 109) 'e': impl {error} |
1011 | [110; 111) 'b': impl Trait | 1195 | [124; 125) 'f': impl Trait + {error} |
1012 | [128; 131) 'foo': impl Trait | 1196 | [148; 151) '{ }': () |
1013 | [141; 142) 'b': impl {error} | ||
1014 | [163; 166) 'foo': impl {error} | ||
1015 | [176; 177) 'd': impl {error} | ||
1016 | [193; 196) 'foo': impl {error} | ||
1017 | [206; 207) 'e': impl Trait + {error} | ||
1018 | [231; 234) 'foo': impl Trait + {error} | ||
1019 | "### | 1197 | "### |
1020 | ); | 1198 | ); |
1021 | } | 1199 | } |
1022 | 1200 | ||
1023 | #[test] | 1201 | #[test] |
1202 | #[ignore] | ||
1024 | fn error_bound_chalk() { | 1203 | fn error_bound_chalk() { |
1025 | let t = type_at( | 1204 | let t = type_at( |
1026 | r#" | 1205 | r#" |
@@ -1076,26 +1255,26 @@ fn test<T: Trait<Type = u32>>(x: T, y: impl Trait<Type = i64>) { | |||
1076 | [296; 299) 'get': fn get<T>(T) -> <T as Trait>::Type | 1255 | [296; 299) 'get': fn get<T>(T) -> <T as Trait>::Type |
1077 | [296; 302) 'get(x)': {unknown} | 1256 | [296; 302) 'get(x)': {unknown} |
1078 | [300; 301) 'x': T | 1257 | [300; 301) 'x': T |
1079 | [308; 312) 'get2': fn get2<{unknown}, T>(T) -> U | 1258 | [308; 312) 'get2': fn get2<{unknown}, T>(T) -> {unknown} |
1080 | [308; 315) 'get2(x)': {unknown} | 1259 | [308; 315) 'get2(x)': {unknown} |
1081 | [313; 314) 'x': T | 1260 | [313; 314) 'x': T |
1082 | [321; 324) 'get': fn get<impl Trait<Type = i64>>(T) -> <T as Trait>::Type | 1261 | [321; 324) 'get': fn get<impl Trait<Type = i64>>(impl Trait<Type = i64>) -> <impl Trait<Type = i64> as Trait>::Type |
1083 | [321; 327) 'get(y)': {unknown} | 1262 | [321; 327) 'get(y)': {unknown} |
1084 | [325; 326) 'y': impl Trait<Type = i64> | 1263 | [325; 326) 'y': impl Trait<Type = i64> |
1085 | [333; 337) 'get2': fn get2<{unknown}, impl Trait<Type = i64>>(T) -> U | 1264 | [333; 337) 'get2': fn get2<{unknown}, impl Trait<Type = i64>>(impl Trait<Type = i64>) -> {unknown} |
1086 | [333; 340) 'get2(y)': {unknown} | 1265 | [333; 340) 'get2(y)': {unknown} |
1087 | [338; 339) 'y': impl Trait<Type = i64> | 1266 | [338; 339) 'y': impl Trait<Type = i64> |
1088 | [346; 349) 'get': fn get<S<u64>>(T) -> <T as Trait>::Type | 1267 | [346; 349) 'get': fn get<S<u64>>(S<u64>) -> <S<u64> as Trait>::Type |
1089 | [346; 357) 'get(set(S))': u64 | 1268 | [346; 357) 'get(set(S))': u64 |
1090 | [350; 353) 'set': fn set<S<u64>>(T) -> T | 1269 | [350; 353) 'set': fn set<S<u64>>(S<u64>) -> S<u64> |
1091 | [350; 356) 'set(S)': S<u64> | 1270 | [350; 356) 'set(S)': S<u64> |
1092 | [354; 355) 'S': S<u64> | 1271 | [354; 355) 'S': S<u64> |
1093 | [363; 367) 'get2': fn get2<u64, S<u64>>(T) -> U | 1272 | [363; 367) 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64 |
1094 | [363; 375) 'get2(set(S))': u64 | 1273 | [363; 375) 'get2(set(S))': u64 |
1095 | [368; 371) 'set': fn set<S<u64>>(T) -> T | 1274 | [368; 371) 'set': fn set<S<u64>>(S<u64>) -> S<u64> |
1096 | [368; 374) 'set(S)': S<u64> | 1275 | [368; 374) 'set(S)': S<u64> |
1097 | [372; 373) 'S': S<u64> | 1276 | [372; 373) 'S': S<u64> |
1098 | [381; 385) 'get2': fn get2<str, S<str>>(T) -> U | 1277 | [381; 385) 'get2': fn get2<str, S<str>>(S<str>) -> str |
1099 | [381; 395) 'get2(S::<str>)': str | 1278 | [381; 395) 'get2(S::<str>)': str |
1100 | [386; 394) 'S::<str>': S<str> | 1279 | [386; 394) 'S::<str>': S<str> |
1101 | "### | 1280 | "### |
@@ -1223,6 +1402,32 @@ fn test<T: Trait1, U: Trait2>(x: T, y: U) { | |||
1223 | } | 1402 | } |
1224 | 1403 | ||
1225 | #[test] | 1404 | #[test] |
1405 | fn super_trait_impl_trait_method_resolution() { | ||
1406 | assert_snapshot!( | ||
1407 | infer(r#" | ||
1408 | mod foo { | ||
1409 | trait SuperTrait { | ||
1410 | fn foo(&self) -> u32 {} | ||
1411 | } | ||
1412 | } | ||
1413 | trait Trait1: foo::SuperTrait {} | ||
1414 | |||
1415 | fn test(x: &impl Trait1) { | ||
1416 | x.foo(); | ||
1417 | } | ||
1418 | "#), | ||
1419 | @r###" | ||
1420 | [50; 54) 'self': &Self | ||
1421 | [63; 65) '{}': () | ||
1422 | [116; 117) 'x': &impl Trait1 | ||
1423 | [133; 149) '{ ...o(); }': () | ||
1424 | [139; 140) 'x': &impl Trait1 | ||
1425 | [139; 146) 'x.foo()': u32 | ||
1426 | "### | ||
1427 | ); | ||
1428 | } | ||
1429 | |||
1430 | #[test] | ||
1226 | fn super_trait_cycle() { | 1431 | fn super_trait_cycle() { |
1227 | // This just needs to not crash | 1432 | // This just needs to not crash |
1228 | assert_snapshot!( | 1433 | assert_snapshot!( |
@@ -1268,9 +1473,9 @@ fn test() { | |||
1268 | [157; 160) '{t}': T | 1473 | [157; 160) '{t}': T |
1269 | [158; 159) 't': T | 1474 | [158; 159) 't': T |
1270 | [259; 280) '{ ...S)); }': () | 1475 | [259; 280) '{ ...S)); }': () |
1271 | [265; 269) 'get2': fn get2<u64, S<u64>>(T) -> U | 1476 | [265; 269) 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64 |
1272 | [265; 277) 'get2(set(S))': u64 | 1477 | [265; 277) 'get2(set(S))': u64 |
1273 | [270; 273) 'set': fn set<S<u64>>(T) -> T | 1478 | [270; 273) 'set': fn set<S<u64>>(S<u64>) -> S<u64> |
1274 | [270; 276) 'set(S)': S<u64> | 1479 | [270; 276) 'set(S)': S<u64> |
1275 | [274; 275) 'S': S<u64> | 1480 | [274; 275) 'S': S<u64> |
1276 | "### | 1481 | "### |
@@ -1332,7 +1537,7 @@ fn test() { | |||
1332 | [173; 175) '{}': () | 1537 | [173; 175) '{}': () |
1333 | [189; 308) '{ ... 1); }': () | 1538 | [189; 308) '{ ... 1); }': () |
1334 | [199; 200) 'x': Option<u32> | 1539 | [199; 200) 'x': Option<u32> |
1335 | [203; 215) 'Option::Some': Some<u32>(T) -> Option<T> | 1540 | [203; 215) 'Option::Some': Some<u32>(u32) -> Option<u32> |
1336 | [203; 221) 'Option...(1u32)': Option<u32> | 1541 | [203; 221) 'Option...(1u32)': Option<u32> |
1337 | [216; 220) '1u32': u32 | 1542 | [216; 220) '1u32': u32 |
1338 | [227; 228) 'x': Option<u32> | 1543 | [227; 228) 'x': Option<u32> |
@@ -1442,7 +1647,7 @@ fn test() { | |||
1442 | [340; 342) '{}': () | 1647 | [340; 342) '{}': () |
1443 | [356; 515) '{ ... S); }': () | 1648 | [356; 515) '{ ... S); }': () |
1444 | [366; 368) 'x1': u64 | 1649 | [366; 368) 'x1': u64 |
1445 | [371; 375) 'foo1': fn foo1<S, u64, |S| -> u64>(T, F) -> U | 1650 | [371; 375) 'foo1': fn foo1<S, u64, |S| -> u64>(S, |S| -> u64) -> u64 |
1446 | [371; 394) 'foo1(S...hod())': u64 | 1651 | [371; 394) 'foo1(S...hod())': u64 |
1447 | [376; 377) 'S': S | 1652 | [376; 377) 'S': S |
1448 | [379; 393) '|s| s.method()': |S| -> u64 | 1653 | [379; 393) '|s| s.method()': |S| -> u64 |
@@ -1450,7 +1655,7 @@ fn test() { | |||
1450 | [383; 384) 's': S | 1655 | [383; 384) 's': S |
1451 | [383; 393) 's.method()': u64 | 1656 | [383; 393) 's.method()': u64 |
1452 | [404; 406) 'x2': u64 | 1657 | [404; 406) 'x2': u64 |
1453 | [409; 413) 'foo2': fn foo2<S, u64, |S| -> u64>(F, T) -> U | 1658 | [409; 413) 'foo2': fn foo2<S, u64, |S| -> u64>(|S| -> u64, S) -> u64 |
1454 | [409; 432) 'foo2(|...(), S)': u64 | 1659 | [409; 432) 'foo2(|...(), S)': u64 |
1455 | [414; 428) '|s| s.method()': |S| -> u64 | 1660 | [414; 428) '|s| s.method()': |S| -> u64 |
1456 | [415; 416) 's': S | 1661 | [415; 416) 's': S |
@@ -1603,7 +1808,6 @@ fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> { | |||
1603 | 1808 | ||
1604 | #[test] | 1809 | #[test] |
1605 | fn unify_impl_trait() { | 1810 | fn unify_impl_trait() { |
1606 | covers!(insert_vars_for_impl_trait); | ||
1607 | assert_snapshot!( | 1811 | assert_snapshot!( |
1608 | infer_with_mismatches(r#" | 1812 | infer_with_mismatches(r#" |
1609 | trait Trait<T> {} | 1813 | trait Trait<T> {} |
@@ -1635,26 +1839,26 @@ fn test() -> impl Trait<i32> { | |||
1635 | [172; 183) '{ loop {} }': T | 1839 | [172; 183) '{ loop {} }': T |
1636 | [174; 181) 'loop {}': ! | 1840 | [174; 181) 'loop {}': ! |
1637 | [179; 181) '{}': () | 1841 | [179; 181) '{}': () |
1638 | [214; 310) '{ ...t()) }': S<i32> | 1842 | [214; 310) '{ ...t()) }': S<{unknown}> |
1639 | [224; 226) 's1': S<u32> | 1843 | [224; 226) 's1': S<u32> |
1640 | [229; 230) 'S': S<u32>(T) -> S<T> | 1844 | [229; 230) 'S': S<u32>(u32) -> S<u32> |
1641 | [229; 241) 'S(default())': S<u32> | 1845 | [229; 241) 'S(default())': S<u32> |
1642 | [231; 238) 'default': fn default<u32>() -> T | 1846 | [231; 238) 'default': fn default<u32>() -> u32 |
1643 | [231; 240) 'default()': u32 | 1847 | [231; 240) 'default()': u32 |
1644 | [247; 250) 'foo': fn foo(impl Trait<u32>) -> () | 1848 | [247; 250) 'foo': fn foo(S<u32>) -> () |
1645 | [247; 254) 'foo(s1)': () | 1849 | [247; 254) 'foo(s1)': () |
1646 | [251; 253) 's1': S<u32> | 1850 | [251; 253) 's1': S<u32> |
1647 | [264; 265) 'x': i32 | 1851 | [264; 265) 'x': i32 |
1648 | [273; 276) 'bar': fn bar<i32>(impl Trait<T>) -> T | 1852 | [273; 276) 'bar': fn bar<i32>(S<i32>) -> i32 |
1649 | [273; 290) 'bar(S(...lt()))': i32 | 1853 | [273; 290) 'bar(S(...lt()))': i32 |
1650 | [277; 278) 'S': S<i32>(T) -> S<T> | 1854 | [277; 278) 'S': S<i32>(i32) -> S<i32> |
1651 | [277; 289) 'S(default())': S<i32> | 1855 | [277; 289) 'S(default())': S<i32> |
1652 | [279; 286) 'default': fn default<i32>() -> T | 1856 | [279; 286) 'default': fn default<i32>() -> i32 |
1653 | [279; 288) 'default()': i32 | 1857 | [279; 288) 'default()': i32 |
1654 | [296; 297) 'S': S<i32>(T) -> S<T> | 1858 | [296; 297) 'S': S<{unknown}>({unknown}) -> S<{unknown}> |
1655 | [296; 308) 'S(default())': S<i32> | 1859 | [296; 308) 'S(default())': S<{unknown}> |
1656 | [298; 305) 'default': fn default<i32>() -> T | 1860 | [298; 305) 'default': fn default<{unknown}>() -> {unknown} |
1657 | [298; 307) 'default()': i32 | 1861 | [298; 307) 'default()': {unknown} |
1658 | "### | 1862 | "### |
1659 | ); | 1863 | ); |
1660 | } | 1864 | } |
diff --git a/crates/ra_hir_ty/src/traits.rs b/crates/ra_hir_ty/src/traits.rs index 4aabd66dc..88af61e87 100644 --- a/crates/ra_hir_ty/src/traits.rs +++ b/crates/ra_hir_ty/src/traits.rs | |||
@@ -50,10 +50,19 @@ impl TraitSolver { | |||
50 | Err(_) => ra_db::Canceled::throw(), | 50 | Err(_) => ra_db::Canceled::throw(), |
51 | }; | 51 | }; |
52 | 52 | ||
53 | let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL); | ||
54 | |||
53 | let solution = panic::catch_unwind({ | 55 | let solution = panic::catch_unwind({ |
54 | let solver = panic::AssertUnwindSafe(&mut solver); | 56 | let solver = panic::AssertUnwindSafe(&mut solver); |
55 | let context = panic::AssertUnwindSafe(&context); | 57 | let context = panic::AssertUnwindSafe(&context); |
56 | move || solver.0.solve(context.0, goal) | 58 | move || { |
59 | solver.0.solve_limited(context.0, goal, || { | ||
60 | context.0.db.check_canceled(); | ||
61 | let remaining = fuel.get(); | ||
62 | fuel.set(remaining - 1); | ||
63 | remaining > 0 | ||
64 | }) | ||
65 | } | ||
57 | }); | 66 | }); |
58 | 67 | ||
59 | let solution = match solution { | 68 | let solution = match solution { |
@@ -78,7 +87,9 @@ impl TraitSolver { | |||
78 | /// This controls the maximum size of types Chalk considers. If we set this too | 87 | /// This controls the maximum size of types Chalk considers. If we set this too |
79 | /// high, we can run into slow edge cases; if we set it too low, Chalk won't | 88 | /// high, we can run into slow edge cases; if we set it too low, Chalk won't |
80 | /// find some solutions. | 89 | /// find some solutions. |
81 | const CHALK_SOLVER_MAX_SIZE: usize = 4; | 90 | const CHALK_SOLVER_MAX_SIZE: usize = 10; |
91 | /// This controls how much 'time' we give the Chalk solver before giving up. | ||
92 | const CHALK_SOLVER_FUEL: i32 = 100; | ||
82 | 93 | ||
83 | #[derive(Debug, Copy, Clone)] | 94 | #[derive(Debug, Copy, Clone)] |
84 | struct ChalkContext<'a, DB> { | 95 | struct ChalkContext<'a, DB> { |
@@ -97,7 +108,8 @@ pub(crate) fn trait_solver_query( | |||
97 | } | 108 | } |
98 | 109 | ||
99 | fn create_chalk_solver() -> chalk_solve::Solver<TypeFamily> { | 110 | fn create_chalk_solver() -> chalk_solve::Solver<TypeFamily> { |
100 | let solver_choice = chalk_solve::SolverChoice::SLG { max_size: CHALK_SOLVER_MAX_SIZE }; | 111 | let solver_choice = |
112 | chalk_solve::SolverChoice::SLG { max_size: CHALK_SOLVER_MAX_SIZE, expected_answers: None }; | ||
101 | solver_choice.into_solver() | 113 | solver_choice.into_solver() |
102 | } | 114 | } |
103 | 115 | ||
@@ -232,7 +244,6 @@ fn solution_from_chalk( | |||
232 | let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<TypeFamily>>| { | 244 | let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<TypeFamily>>| { |
233 | let value = subst | 245 | let value = subst |
234 | .value | 246 | .value |
235 | .parameters | ||
236 | .into_iter() | 247 | .into_iter() |
237 | .map(|p| { | 248 | .map(|p| { |
238 | let ty = match p.ty() { | 249 | let ty = match p.ty() { |
diff --git a/crates/ra_hir_ty/src/traits/chalk.rs b/crates/ra_hir_ty/src/traits/chalk.rs index 555930c9b..4974c565b 100644 --- a/crates/ra_hir_ty/src/traits/chalk.rs +++ b/crates/ra_hir_ty/src/traits/chalk.rs | |||
@@ -3,7 +3,7 @@ use std::{fmt, sync::Arc}; | |||
3 | 3 | ||
4 | use log::debug; | 4 | use log::debug; |
5 | 5 | ||
6 | use chalk_ir::{cast::Cast, Parameter, PlaceholderIndex, TypeName, UniverseIndex}; | 6 | use chalk_ir::{cast::Cast, GoalData, Parameter, PlaceholderIndex, TypeName, UniverseIndex}; |
7 | 7 | ||
8 | use hir_def::{AssocContainerId, AssocItemId, GenericDefId, HasModule, Lookup, TypeAliasId}; | 8 | use hir_def::{AssocContainerId, AssocItemId, GenericDefId, HasModule, Lookup, TypeAliasId}; |
9 | use ra_db::{ | 9 | use ra_db::{ |
@@ -14,7 +14,7 @@ use ra_db::{ | |||
14 | use super::{builtin, AssocTyValue, Canonical, ChalkContext, Impl, Obligation}; | 14 | use super::{builtin, AssocTyValue, Canonical, ChalkContext, Impl, Obligation}; |
15 | use crate::{ | 15 | use crate::{ |
16 | db::HirDatabase, display::HirDisplay, utils::generics, ApplicationTy, GenericPredicate, | 16 | db::HirDatabase, display::HirDisplay, utils::generics, ApplicationTy, GenericPredicate, |
17 | ProjectionTy, Substs, TraitRef, Ty, TypeCtor, TypeWalk, | 17 | ProjectionTy, Substs, TraitRef, Ty, TypeCtor, |
18 | }; | 18 | }; |
19 | 19 | ||
20 | #[derive(Debug, Copy, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)] | 20 | #[derive(Debug, Copy, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)] |
@@ -24,6 +24,8 @@ impl chalk_ir::family::TypeFamily for TypeFamily { | |||
24 | type InternedType = Box<chalk_ir::TyData<Self>>; | 24 | type InternedType = Box<chalk_ir::TyData<Self>>; |
25 | type InternedLifetime = chalk_ir::LifetimeData<Self>; | 25 | type InternedLifetime = chalk_ir::LifetimeData<Self>; |
26 | type InternedParameter = chalk_ir::ParameterData<Self>; | 26 | type InternedParameter = chalk_ir::ParameterData<Self>; |
27 | type InternedGoal = Arc<GoalData<Self>>; | ||
28 | type InternedSubstitution = Vec<Parameter<Self>>; | ||
27 | type DefId = InternId; | 29 | type DefId = InternId; |
28 | 30 | ||
29 | // FIXME: implement these | 31 | // FIXME: implement these |
@@ -48,8 +50,8 @@ impl chalk_ir::family::TypeFamily for TypeFamily { | |||
48 | None | 50 | None |
49 | } | 51 | } |
50 | 52 | ||
51 | fn debug_projection( | 53 | fn debug_alias( |
52 | _projection: &chalk_ir::ProjectionTy<Self>, | 54 | _projection: &chalk_ir::AliasTy<Self>, |
53 | _fmt: &mut fmt::Formatter<'_>, | 55 | _fmt: &mut fmt::Formatter<'_>, |
54 | ) -> Option<fmt::Result> { | 56 | ) -> Option<fmt::Result> { |
55 | None | 57 | None |
@@ -78,6 +80,24 @@ impl chalk_ir::family::TypeFamily for TypeFamily { | |||
78 | fn parameter_data(parameter: &chalk_ir::ParameterData<Self>) -> &chalk_ir::ParameterData<Self> { | 80 | fn parameter_data(parameter: &chalk_ir::ParameterData<Self>) -> &chalk_ir::ParameterData<Self> { |
79 | parameter | 81 | parameter |
80 | } | 82 | } |
83 | |||
84 | fn intern_goal(goal: GoalData<Self>) -> Arc<GoalData<Self>> { | ||
85 | Arc::new(goal) | ||
86 | } | ||
87 | |||
88 | fn goal_data(goal: &Arc<GoalData<Self>>) -> &GoalData<Self> { | ||
89 | goal | ||
90 | } | ||
91 | |||
92 | fn intern_substitution<E>( | ||
93 | data: impl IntoIterator<Item = Result<Parameter<Self>, E>>, | ||
94 | ) -> Result<Vec<Parameter<Self>>, E> { | ||
95 | data.into_iter().collect() | ||
96 | } | ||
97 | |||
98 | fn substitution_data(substitution: &Vec<Parameter<Self>>) -> &[Parameter<Self>] { | ||
99 | substitution | ||
100 | } | ||
81 | } | 101 | } |
82 | 102 | ||
83 | impl chalk_ir::family::HasTypeFamily for TypeFamily { | 103 | impl chalk_ir::family::HasTypeFamily for TypeFamily { |
@@ -114,17 +134,21 @@ impl ToChalk for Ty { | |||
114 | match self { | 134 | match self { |
115 | Ty::Apply(apply_ty) => { | 135 | Ty::Apply(apply_ty) => { |
116 | let name = apply_ty.ctor.to_chalk(db); | 136 | let name = apply_ty.ctor.to_chalk(db); |
117 | let parameters = apply_ty.parameters.to_chalk(db); | 137 | let substitution = apply_ty.parameters.to_chalk(db); |
118 | chalk_ir::ApplicationTy { name, parameters }.cast().intern() | 138 | chalk_ir::ApplicationTy { name, substitution }.cast().intern() |
119 | } | 139 | } |
120 | Ty::Projection(proj_ty) => { | 140 | Ty::Projection(proj_ty) => { |
121 | let associated_ty_id = proj_ty.associated_ty.to_chalk(db); | 141 | let associated_ty_id = proj_ty.associated_ty.to_chalk(db); |
122 | let parameters = proj_ty.parameters.to_chalk(db); | 142 | let substitution = proj_ty.parameters.to_chalk(db); |
123 | chalk_ir::ProjectionTy { associated_ty_id, parameters }.cast().intern() | 143 | chalk_ir::AliasTy { associated_ty_id, substitution }.cast().intern() |
124 | } | 144 | } |
125 | Ty::Param { idx, .. } => { | 145 | Ty::Param(id) => { |
126 | PlaceholderIndex { ui: UniverseIndex::ROOT, idx: idx as usize } | 146 | let interned_id = db.intern_type_param_id(id); |
127 | .to_ty::<TypeFamily>() | 147 | PlaceholderIndex { |
148 | ui: UniverseIndex::ROOT, | ||
149 | idx: interned_id.as_intern_id().as_usize(), | ||
150 | } | ||
151 | .to_ty::<TypeFamily>() | ||
128 | } | 152 | } |
129 | Ty::Bound(idx) => chalk_ir::TyData::BoundVar(idx as usize).intern(), | 153 | Ty::Bound(idx) => chalk_ir::TyData::BoundVar(idx as usize).intern(), |
130 | Ty::Infer(_infer_ty) => panic!("uncanonicalized infer ty"), | 154 | Ty::Infer(_infer_ty) => panic!("uncanonicalized infer ty"), |
@@ -135,23 +159,13 @@ impl ToChalk for Ty { | |||
135 | .cloned() | 159 | .cloned() |
136 | .map(|p| p.to_chalk(db)) | 160 | .map(|p| p.to_chalk(db)) |
137 | .collect(); | 161 | .collect(); |
138 | let bounded_ty = chalk_ir::BoundedTy { bounds: make_binders(where_clauses, 1) }; | 162 | let bounded_ty = chalk_ir::DynTy { bounds: make_binders(where_clauses, 1) }; |
139 | chalk_ir::TyData::Dyn(bounded_ty).intern() | 163 | chalk_ir::TyData::Dyn(bounded_ty).intern() |
140 | } | 164 | } |
141 | Ty::Opaque(predicates) => { | 165 | Ty::Opaque(_) | Ty::Unknown => { |
142 | let where_clauses = predicates | 166 | let substitution = chalk_ir::Substitution::empty(); |
143 | .iter() | ||
144 | .filter(|p| !p.is_error()) | ||
145 | .cloned() | ||
146 | .map(|p| p.to_chalk(db)) | ||
147 | .collect(); | ||
148 | let bounded_ty = chalk_ir::BoundedTy { bounds: make_binders(where_clauses, 1) }; | ||
149 | chalk_ir::TyData::Opaque(bounded_ty).intern() | ||
150 | } | ||
151 | Ty::Unknown => { | ||
152 | let parameters = Vec::new(); | ||
153 | let name = TypeName::Error; | 167 | let name = TypeName::Error; |
154 | chalk_ir::ApplicationTy { name, parameters }.cast().intern() | 168 | chalk_ir::ApplicationTy { name, substitution }.cast().intern() |
155 | } | 169 | } |
156 | } | 170 | } |
157 | } | 171 | } |
@@ -161,20 +175,23 @@ impl ToChalk for Ty { | |||
161 | TypeName::Error => Ty::Unknown, | 175 | TypeName::Error => Ty::Unknown, |
162 | _ => { | 176 | _ => { |
163 | let ctor = from_chalk(db, apply_ty.name); | 177 | let ctor = from_chalk(db, apply_ty.name); |
164 | let parameters = from_chalk(db, apply_ty.parameters); | 178 | let parameters = from_chalk(db, apply_ty.substitution); |
165 | Ty::Apply(ApplicationTy { ctor, parameters }) | 179 | Ty::Apply(ApplicationTy { ctor, parameters }) |
166 | } | 180 | } |
167 | }, | 181 | }, |
168 | chalk_ir::TyData::Placeholder(idx) => { | 182 | chalk_ir::TyData::Placeholder(idx) => { |
169 | assert_eq!(idx.ui, UniverseIndex::ROOT); | 183 | assert_eq!(idx.ui, UniverseIndex::ROOT); |
170 | Ty::Param { idx: idx.idx as u32, name: crate::Name::missing() } | 184 | let interned_id = crate::db::GlobalTypeParamId::from_intern_id( |
185 | crate::salsa::InternId::from(idx.idx), | ||
186 | ); | ||
187 | Ty::Param(db.lookup_intern_type_param_id(interned_id)) | ||
171 | } | 188 | } |
172 | chalk_ir::TyData::Projection(proj) => { | 189 | chalk_ir::TyData::Alias(proj) => { |
173 | let associated_ty = from_chalk(db, proj.associated_ty_id); | 190 | let associated_ty = from_chalk(db, proj.associated_ty_id); |
174 | let parameters = from_chalk(db, proj.parameters); | 191 | let parameters = from_chalk(db, proj.substitution); |
175 | Ty::Projection(ProjectionTy { associated_ty, parameters }) | 192 | Ty::Projection(ProjectionTy { associated_ty, parameters }) |
176 | } | 193 | } |
177 | chalk_ir::TyData::ForAll(_) => unimplemented!(), | 194 | chalk_ir::TyData::Function(_) => unimplemented!(), |
178 | chalk_ir::TyData::BoundVar(idx) => Ty::Bound(idx as u32), | 195 | chalk_ir::TyData::BoundVar(idx) => Ty::Bound(idx as u32), |
179 | chalk_ir::TyData::InferenceVar(_iv) => Ty::Unknown, | 196 | chalk_ir::TyData::InferenceVar(_iv) => Ty::Unknown, |
180 | chalk_ir::TyData::Dyn(where_clauses) => { | 197 | chalk_ir::TyData::Dyn(where_clauses) => { |
@@ -183,27 +200,18 @@ impl ToChalk for Ty { | |||
183 | where_clauses.bounds.value.into_iter().map(|c| from_chalk(db, c)).collect(); | 200 | where_clauses.bounds.value.into_iter().map(|c| from_chalk(db, c)).collect(); |
184 | Ty::Dyn(predicates) | 201 | Ty::Dyn(predicates) |
185 | } | 202 | } |
186 | chalk_ir::TyData::Opaque(where_clauses) => { | ||
187 | assert_eq!(where_clauses.bounds.binders.len(), 1); | ||
188 | let predicates = | ||
189 | where_clauses.bounds.value.into_iter().map(|c| from_chalk(db, c)).collect(); | ||
190 | Ty::Opaque(predicates) | ||
191 | } | ||
192 | } | 203 | } |
193 | } | 204 | } |
194 | } | 205 | } |
195 | 206 | ||
196 | impl ToChalk for Substs { | 207 | impl ToChalk for Substs { |
197 | type Chalk = Vec<chalk_ir::Parameter<TypeFamily>>; | 208 | type Chalk = chalk_ir::Substitution<TypeFamily>; |
198 | 209 | ||
199 | fn to_chalk(self, db: &impl HirDatabase) -> Vec<Parameter<TypeFamily>> { | 210 | fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Substitution<TypeFamily> { |
200 | self.iter().map(|ty| ty.clone().to_chalk(db).cast()).collect() | 211 | chalk_ir::Substitution::from(self.iter().map(|ty| ty.clone().to_chalk(db))) |
201 | } | 212 | } |
202 | 213 | ||
203 | fn from_chalk( | 214 | fn from_chalk(db: &impl HirDatabase, parameters: chalk_ir::Substitution<TypeFamily>) -> Substs { |
204 | db: &impl HirDatabase, | ||
205 | parameters: Vec<chalk_ir::Parameter<TypeFamily>>, | ||
206 | ) -> Substs { | ||
207 | let tys = parameters | 215 | let tys = parameters |
208 | .into_iter() | 216 | .into_iter() |
209 | .map(|p| match p.ty() { | 217 | .map(|p| match p.ty() { |
@@ -220,13 +228,13 @@ impl ToChalk for TraitRef { | |||
220 | 228 | ||
221 | fn to_chalk(self: TraitRef, db: &impl HirDatabase) -> chalk_ir::TraitRef<TypeFamily> { | 229 | fn to_chalk(self: TraitRef, db: &impl HirDatabase) -> chalk_ir::TraitRef<TypeFamily> { |
222 | let trait_id = self.trait_.to_chalk(db); | 230 | let trait_id = self.trait_.to_chalk(db); |
223 | let parameters = self.substs.to_chalk(db); | 231 | let substitution = self.substs.to_chalk(db); |
224 | chalk_ir::TraitRef { trait_id, parameters } | 232 | chalk_ir::TraitRef { trait_id, substitution } |
225 | } | 233 | } |
226 | 234 | ||
227 | fn from_chalk(db: &impl HirDatabase, trait_ref: chalk_ir::TraitRef<TypeFamily>) -> Self { | 235 | fn from_chalk(db: &impl HirDatabase, trait_ref: chalk_ir::TraitRef<TypeFamily>) -> Self { |
228 | let trait_ = from_chalk(db, trait_ref.trait_id); | 236 | let trait_ = from_chalk(db, trait_ref.trait_id); |
229 | let substs = from_chalk(db, trait_ref.parameters); | 237 | let substs = from_chalk(db, trait_ref.substitution); |
230 | TraitRef { trait_, substs } | 238 | TraitRef { trait_, substs } |
231 | } | 239 | } |
232 | } | 240 | } |
@@ -317,8 +325,8 @@ impl ToChalk for GenericPredicate { | |||
317 | make_binders(chalk_ir::WhereClause::Implemented(trait_ref.to_chalk(db)), 0) | 325 | make_binders(chalk_ir::WhereClause::Implemented(trait_ref.to_chalk(db)), 0) |
318 | } | 326 | } |
319 | GenericPredicate::Projection(projection_pred) => make_binders( | 327 | GenericPredicate::Projection(projection_pred) => make_binders( |
320 | chalk_ir::WhereClause::ProjectionEq(chalk_ir::ProjectionEq { | 328 | chalk_ir::WhereClause::AliasEq(chalk_ir::AliasEq { |
321 | projection: projection_pred.projection_ty.to_chalk(db), | 329 | alias: projection_pred.projection_ty.to_chalk(db), |
322 | ty: projection_pred.ty.to_chalk(db), | 330 | ty: projection_pred.ty.to_chalk(db), |
323 | }), | 331 | }), |
324 | 0, | 332 | 0, |
@@ -335,8 +343,8 @@ impl ToChalk for GenericPredicate { | |||
335 | chalk_ir::WhereClause::Implemented(tr) => { | 343 | chalk_ir::WhereClause::Implemented(tr) => { |
336 | GenericPredicate::Implemented(from_chalk(db, tr)) | 344 | GenericPredicate::Implemented(from_chalk(db, tr)) |
337 | } | 345 | } |
338 | chalk_ir::WhereClause::ProjectionEq(projection_eq) => { | 346 | chalk_ir::WhereClause::AliasEq(projection_eq) => { |
339 | let projection_ty = from_chalk(db, projection_eq.projection); | 347 | let projection_ty = from_chalk(db, projection_eq.alias); |
340 | let ty = from_chalk(db, projection_eq.ty); | 348 | let ty = from_chalk(db, projection_eq.ty); |
341 | GenericPredicate::Projection(super::ProjectionPredicate { projection_ty, ty }) | 349 | GenericPredicate::Projection(super::ProjectionPredicate { projection_ty, ty }) |
342 | } | 350 | } |
@@ -345,22 +353,22 @@ impl ToChalk for GenericPredicate { | |||
345 | } | 353 | } |
346 | 354 | ||
347 | impl ToChalk for ProjectionTy { | 355 | impl ToChalk for ProjectionTy { |
348 | type Chalk = chalk_ir::ProjectionTy<TypeFamily>; | 356 | type Chalk = chalk_ir::AliasTy<TypeFamily>; |
349 | 357 | ||
350 | fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::ProjectionTy<TypeFamily> { | 358 | fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::AliasTy<TypeFamily> { |
351 | chalk_ir::ProjectionTy { | 359 | chalk_ir::AliasTy { |
352 | associated_ty_id: self.associated_ty.to_chalk(db), | 360 | associated_ty_id: self.associated_ty.to_chalk(db), |
353 | parameters: self.parameters.to_chalk(db), | 361 | substitution: self.parameters.to_chalk(db), |
354 | } | 362 | } |
355 | } | 363 | } |
356 | 364 | ||
357 | fn from_chalk( | 365 | fn from_chalk( |
358 | db: &impl HirDatabase, | 366 | db: &impl HirDatabase, |
359 | projection_ty: chalk_ir::ProjectionTy<TypeFamily>, | 367 | projection_ty: chalk_ir::AliasTy<TypeFamily>, |
360 | ) -> ProjectionTy { | 368 | ) -> ProjectionTy { |
361 | ProjectionTy { | 369 | ProjectionTy { |
362 | associated_ty: from_chalk(db, projection_ty.associated_ty_id), | 370 | associated_ty: from_chalk(db, projection_ty.associated_ty_id), |
363 | parameters: from_chalk(db, projection_ty.parameters), | 371 | parameters: from_chalk(db, projection_ty.substitution), |
364 | } | 372 | } |
365 | } | 373 | } |
366 | } | 374 | } |
@@ -369,10 +377,7 @@ impl ToChalk for super::ProjectionPredicate { | |||
369 | type Chalk = chalk_ir::Normalize<TypeFamily>; | 377 | type Chalk = chalk_ir::Normalize<TypeFamily>; |
370 | 378 | ||
371 | fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Normalize<TypeFamily> { | 379 | fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Normalize<TypeFamily> { |
372 | chalk_ir::Normalize { | 380 | chalk_ir::Normalize { alias: self.projection_ty.to_chalk(db), ty: self.ty.to_chalk(db) } |
373 | projection: self.projection_ty.to_chalk(db), | ||
374 | ty: self.ty.to_chalk(db), | ||
375 | } | ||
376 | } | 381 | } |
377 | 382 | ||
378 | fn from_chalk(_db: &impl HirDatabase, _normalize: chalk_ir::Normalize<TypeFamily>) -> Self { | 383 | fn from_chalk(_db: &impl HirDatabase, _normalize: chalk_ir::Normalize<TypeFamily>) -> Self { |
@@ -522,7 +527,7 @@ fn convert_where_clauses( | |||
522 | let generic_predicates = db.generic_predicates(def); | 527 | let generic_predicates = db.generic_predicates(def); |
523 | let mut result = Vec::with_capacity(generic_predicates.len()); | 528 | let mut result = Vec::with_capacity(generic_predicates.len()); |
524 | for pred in generic_predicates.iter() { | 529 | for pred in generic_predicates.iter() { |
525 | if pred.is_error() { | 530 | if pred.value.is_error() { |
526 | // skip errored predicates completely | 531 | // skip errored predicates completely |
527 | continue; | 532 | continue; |
528 | } | 533 | } |
@@ -711,12 +716,12 @@ fn impl_block_datum( | |||
711 | let trait_ref = db | 716 | let trait_ref = db |
712 | .impl_trait(impl_id) | 717 | .impl_trait(impl_id) |
713 | // ImplIds for impls where the trait ref can't be resolved should never reach Chalk | 718 | // ImplIds for impls where the trait ref can't be resolved should never reach Chalk |
714 | .expect("invalid impl passed to Chalk"); | 719 | .expect("invalid impl passed to Chalk") |
720 | .value; | ||
715 | let impl_data = db.impl_data(impl_id); | 721 | let impl_data = db.impl_data(impl_id); |
716 | 722 | ||
717 | let generic_params = generics(db, impl_id.into()); | 723 | let generic_params = generics(db, impl_id.into()); |
718 | let bound_vars = Substs::bound_vars(&generic_params); | 724 | let bound_vars = Substs::bound_vars(&generic_params); |
719 | let trait_ref = trait_ref.subst(&bound_vars); | ||
720 | let trait_ = trait_ref.trait_; | 725 | let trait_ = trait_ref.trait_; |
721 | let impl_type = if impl_id.lookup(db).container.module(db).krate == krate { | 726 | let impl_type = if impl_id.lookup(db).container.module(db).krate == krate { |
722 | chalk_rust_ir::ImplType::Local | 727 | chalk_rust_ir::ImplType::Local |
@@ -791,20 +796,18 @@ fn type_alias_associated_ty_value( | |||
791 | _ => panic!("assoc ty value should be in impl"), | 796 | _ => panic!("assoc ty value should be in impl"), |
792 | }; | 797 | }; |
793 | 798 | ||
794 | let trait_ref = db.impl_trait(impl_id).expect("assoc ty value should not exist"); // we don't return any assoc ty values if the impl'd trait can't be resolved | 799 | let trait_ref = db.impl_trait(impl_id).expect("assoc ty value should not exist").value; // we don't return any assoc ty values if the impl'd trait can't be resolved |
795 | 800 | ||
796 | let assoc_ty = db | 801 | let assoc_ty = db |
797 | .trait_data(trait_ref.trait_) | 802 | .trait_data(trait_ref.trait_) |
798 | .associated_type_by_name(&type_alias_data.name) | 803 | .associated_type_by_name(&type_alias_data.name) |
799 | .expect("assoc ty value should not exist"); // validated when building the impl data as well | 804 | .expect("assoc ty value should not exist"); // validated when building the impl data as well |
800 | let generic_params = generics(db, impl_id.into()); | 805 | let ty = db.ty(type_alias.into()); |
801 | let bound_vars = Substs::bound_vars(&generic_params); | 806 | let value_bound = chalk_rust_ir::AssociatedTyValueBound { ty: ty.value.to_chalk(db) }; |
802 | let ty = db.ty(type_alias.into()).subst(&bound_vars); | ||
803 | let value_bound = chalk_rust_ir::AssociatedTyValueBound { ty: ty.to_chalk(db) }; | ||
804 | let value = chalk_rust_ir::AssociatedTyValue { | 807 | let value = chalk_rust_ir::AssociatedTyValue { |
805 | impl_id: Impl::ImplBlock(impl_id.into()).to_chalk(db), | 808 | impl_id: Impl::ImplBlock(impl_id.into()).to_chalk(db), |
806 | associated_ty_id: assoc_ty.to_chalk(db), | 809 | associated_ty_id: assoc_ty.to_chalk(db), |
807 | value: make_binders(value_bound, bound_vars.len()), | 810 | value: make_binders(value_bound, ty.num_binders), |
808 | }; | 811 | }; |
809 | Arc::new(value) | 812 | Arc::new(value) |
810 | } | 813 | } |
diff --git a/crates/ra_hir_ty/src/utils.rs b/crates/ra_hir_ty/src/utils.rs index 0b1806a84..508ae9046 100644 --- a/crates/ra_hir_ty/src/utils.rs +++ b/crates/ra_hir_ty/src/utils.rs | |||
@@ -2,10 +2,11 @@ | |||
2 | //! query, but can't be computed directly from `*Data` (ie, which need a `db`). | 2 | //! query, but can't be computed directly from `*Data` (ie, which need a `db`). |
3 | use std::sync::Arc; | 3 | use std::sync::Arc; |
4 | 4 | ||
5 | use hir_def::generics::WherePredicateTarget; | ||
5 | use hir_def::{ | 6 | use hir_def::{ |
6 | adt::VariantData, | 7 | adt::VariantData, |
7 | db::DefDatabase, | 8 | db::DefDatabase, |
8 | generics::{GenericParams, TypeParamData}, | 9 | generics::{GenericParams, TypeParamData, TypeParamProvenance}, |
9 | path::Path, | 10 | path::Path, |
10 | resolver::{HasResolver, TypeNs}, | 11 | resolver::{HasResolver, TypeNs}, |
11 | type_ref::TypeRef, | 12 | type_ref::TypeRef, |
@@ -19,11 +20,18 @@ fn direct_super_traits(db: &impl DefDatabase, trait_: TraitId) -> Vec<TraitId> { | |||
19 | // lifetime problems, but since there usually shouldn't be more than a | 20 | // lifetime problems, but since there usually shouldn't be more than a |
20 | // few direct traits this should be fine (we could even use some kind of | 21 | // few direct traits this should be fine (we could even use some kind of |
21 | // SmallVec if performance is a concern) | 22 | // SmallVec if performance is a concern) |
22 | db.generic_params(trait_.into()) | 23 | let generic_params = db.generic_params(trait_.into()); |
24 | let trait_self = generic_params.find_trait_self_param(); | ||
25 | generic_params | ||
23 | .where_predicates | 26 | .where_predicates |
24 | .iter() | 27 | .iter() |
25 | .filter_map(|pred| match &pred.type_ref { | 28 | .filter_map(|pred| match &pred.target { |
26 | TypeRef::Path(p) if p == &Path::from(name![Self]) => pred.bound.as_path(), | 29 | WherePredicateTarget::TypeRef(TypeRef::Path(p)) if p == &Path::from(name![Self]) => { |
30 | pred.bound.as_path() | ||
31 | } | ||
32 | WherePredicateTarget::TypeParam(local_id) if Some(*local_id) == trait_self => { | ||
33 | pred.bound.as_path() | ||
34 | } | ||
27 | _ => None, | 35 | _ => None, |
28 | }) | 36 | }) |
29 | .filter_map(|path| match resolver.resolve_path_in_type_ns_fully(db, path.mod_path()) { | 37 | .filter_map(|path| match resolver.resolve_path_in_type_ns_fully(db, path.mod_path()) { |
@@ -95,41 +103,77 @@ pub(crate) struct Generics { | |||
95 | } | 103 | } |
96 | 104 | ||
97 | impl Generics { | 105 | impl Generics { |
98 | pub(crate) fn iter<'a>(&'a self) -> impl Iterator<Item = (u32, &'a TypeParamData)> + 'a { | 106 | pub(crate) fn iter<'a>( |
107 | &'a self, | ||
108 | ) -> impl Iterator<Item = (TypeParamId, &'a TypeParamData)> + 'a { | ||
99 | self.parent_generics | 109 | self.parent_generics |
100 | .as_ref() | 110 | .as_ref() |
101 | .into_iter() | 111 | .into_iter() |
102 | .flat_map(|it| it.params.types.iter()) | 112 | .flat_map(|it| { |
103 | .chain(self.params.types.iter()) | 113 | it.params |
104 | .enumerate() | 114 | .types |
105 | .map(|(i, (_local_id, p))| (i as u32, p)) | 115 | .iter() |
116 | .map(move |(local_id, p)| (TypeParamId { parent: it.def, local_id }, p)) | ||
117 | }) | ||
118 | .chain( | ||
119 | self.params | ||
120 | .types | ||
121 | .iter() | ||
122 | .map(move |(local_id, p)| (TypeParamId { parent: self.def, local_id }, p)), | ||
123 | ) | ||
106 | } | 124 | } |
107 | 125 | ||
108 | pub(crate) fn iter_parent<'a>(&'a self) -> impl Iterator<Item = (u32, &'a TypeParamData)> + 'a { | 126 | pub(crate) fn iter_parent<'a>( |
109 | self.parent_generics | 127 | &'a self, |
110 | .as_ref() | 128 | ) -> impl Iterator<Item = (TypeParamId, &'a TypeParamData)> + 'a { |
111 | .into_iter() | 129 | self.parent_generics.as_ref().into_iter().flat_map(|it| { |
112 | .flat_map(|it| it.params.types.iter()) | 130 | it.params |
113 | .enumerate() | 131 | .types |
114 | .map(|(i, (_local_id, p))| (i as u32, p)) | 132 | .iter() |
133 | .map(move |(local_id, p)| (TypeParamId { parent: it.def, local_id }, p)) | ||
134 | }) | ||
115 | } | 135 | } |
116 | 136 | ||
117 | pub(crate) fn len(&self) -> usize { | 137 | pub(crate) fn len(&self) -> usize { |
118 | self.len_split().0 | 138 | self.len_split().0 |
119 | } | 139 | } |
140 | |||
120 | /// (total, parents, child) | 141 | /// (total, parents, child) |
121 | pub(crate) fn len_split(&self) -> (usize, usize, usize) { | 142 | pub(crate) fn len_split(&self) -> (usize, usize, usize) { |
122 | let parent = self.parent_generics.as_ref().map_or(0, |p| p.len()); | 143 | let parent = self.parent_generics.as_ref().map_or(0, |p| p.len()); |
123 | let child = self.params.types.len(); | 144 | let child = self.params.types.len(); |
124 | (parent + child, parent, child) | 145 | (parent + child, parent, child) |
125 | } | 146 | } |
126 | pub(crate) fn param_idx(&self, param: TypeParamId) -> u32 { | 147 | |
127 | self.find_param(param).0 | 148 | /// (parent total, self param, type param list, impl trait) |
149 | pub(crate) fn provenance_split(&self) -> (usize, usize, usize, usize) { | ||
150 | let parent = self.parent_generics.as_ref().map_or(0, |p| p.len()); | ||
151 | let self_params = self | ||
152 | .params | ||
153 | .types | ||
154 | .iter() | ||
155 | .filter(|(_, p)| p.provenance == TypeParamProvenance::TraitSelf) | ||
156 | .count(); | ||
157 | let list_params = self | ||
158 | .params | ||
159 | .types | ||
160 | .iter() | ||
161 | .filter(|(_, p)| p.provenance == TypeParamProvenance::TypeParamList) | ||
162 | .count(); | ||
163 | let impl_trait_params = self | ||
164 | .params | ||
165 | .types | ||
166 | .iter() | ||
167 | .filter(|(_, p)| p.provenance == TypeParamProvenance::ArgumentImplTrait) | ||
168 | .count(); | ||
169 | (parent, self_params, list_params, impl_trait_params) | ||
128 | } | 170 | } |
129 | pub(crate) fn param_name(&self, param: TypeParamId) -> Name { | 171 | |
130 | self.find_param(param).1.name.clone() | 172 | pub(crate) fn param_idx(&self, param: TypeParamId) -> Option<u32> { |
173 | Some(self.find_param(param)?.0) | ||
131 | } | 174 | } |
132 | fn find_param(&self, param: TypeParamId) -> (u32, &TypeParamData) { | 175 | |
176 | fn find_param(&self, param: TypeParamId) -> Option<(u32, &TypeParamData)> { | ||
133 | if param.parent == self.def { | 177 | if param.parent == self.def { |
134 | let (idx, (_local_id, data)) = self | 178 | let (idx, (_local_id, data)) = self |
135 | .params | 179 | .params |
@@ -139,9 +183,10 @@ impl Generics { | |||
139 | .find(|(_, (idx, _))| *idx == param.local_id) | 183 | .find(|(_, (idx, _))| *idx == param.local_id) |
140 | .unwrap(); | 184 | .unwrap(); |
141 | let (_total, parent_len, _child) = self.len_split(); | 185 | let (_total, parent_len, _child) = self.len_split(); |
142 | return ((parent_len + idx) as u32, data); | 186 | Some(((parent_len + idx) as u32, data)) |
187 | } else { | ||
188 | self.parent_generics.as_ref().and_then(|g| g.find_param(param)) | ||
143 | } | 189 | } |
144 | self.parent_generics.as_ref().unwrap().find_param(param) | ||
145 | } | 190 | } |
146 | } | 191 | } |
147 | 192 | ||
diff --git a/crates/ra_ide/Cargo.toml b/crates/ra_ide/Cargo.toml index 2c9f9dce0..9ace35229 100644 --- a/crates/ra_ide/Cargo.toml +++ b/crates/ra_ide/Cargo.toml | |||
@@ -28,6 +28,7 @@ once_cell = "1.2.0" | |||
28 | ra_syntax = { path = "../ra_syntax" } | 28 | ra_syntax = { path = "../ra_syntax" } |
29 | ra_text_edit = { path = "../ra_text_edit" } | 29 | ra_text_edit = { path = "../ra_text_edit" } |
30 | ra_db = { path = "../ra_db" } | 30 | ra_db = { path = "../ra_db" } |
31 | ra_ide_db = { path = "../ra_ide_db" } | ||
31 | ra_cfg = { path = "../ra_cfg" } | 32 | ra_cfg = { path = "../ra_cfg" } |
32 | ra_fmt = { path = "../ra_fmt" } | 33 | ra_fmt = { path = "../ra_fmt" } |
33 | ra_prof = { path = "../ra_prof" } | 34 | ra_prof = { path = "../ra_prof" } |
@@ -39,7 +40,7 @@ ra_assists = { path = "../ra_assists" } | |||
39 | hir = { path = "../ra_hir", package = "ra_hir" } | 40 | hir = { path = "../ra_hir", package = "ra_hir" } |
40 | 41 | ||
41 | [dev-dependencies] | 42 | [dev-dependencies] |
42 | insta = "0.12.0" | 43 | insta = "0.13.0" |
43 | 44 | ||
44 | [dev-dependencies.proptest] | 45 | [dev-dependencies.proptest] |
45 | version = "0.9.0" | 46 | version = "0.9.0" |
diff --git a/crates/ra_ide/src/assists.rs b/crates/ra_ide/src/assists.rs index a936900da..40d56a4f7 100644 --- a/crates/ra_ide/src/assists.rs +++ b/crates/ra_ide/src/assists.rs | |||
@@ -1,22 +1,23 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use ra_assists::{resolved_assists, AssistAction, AssistLabel}; | ||
3 | use ra_db::{FilePosition, FileRange}; | 4 | use ra_db::{FilePosition, FileRange}; |
5 | use ra_ide_db::RootDatabase; | ||
4 | 6 | ||
5 | use crate::{db::RootDatabase, FileId, SourceChange, SourceFileEdit}; | 7 | use crate::{FileId, SourceChange, SourceFileEdit}; |
6 | 8 | ||
7 | use either::Either; | ||
8 | pub use ra_assists::AssistId; | 9 | pub use ra_assists::AssistId; |
9 | use ra_assists::{AssistAction, AssistLabel}; | ||
10 | 10 | ||
11 | #[derive(Debug)] | 11 | #[derive(Debug)] |
12 | pub struct Assist { | 12 | pub struct Assist { |
13 | pub id: AssistId, | 13 | pub id: AssistId, |
14 | pub label: String, | 14 | pub label: String, |
15 | pub change_data: Either<SourceChange, Vec<SourceChange>>, | 15 | pub group_label: Option<String>, |
16 | pub source_change: SourceChange, | ||
16 | } | 17 | } |
17 | 18 | ||
18 | pub(crate) fn assists(db: &RootDatabase, frange: FileRange) -> Vec<Assist> { | 19 | pub(crate) fn assists(db: &RootDatabase, frange: FileRange) -> Vec<Assist> { |
19 | ra_assists::assists(db, frange) | 20 | resolved_assists(db, frange) |
20 | .into_iter() | 21 | .into_iter() |
21 | .map(|assist| { | 22 | .map(|assist| { |
22 | let file_id = frange.file_id; | 23 | let file_id = frange.file_id; |
@@ -24,17 +25,8 @@ pub(crate) fn assists(db: &RootDatabase, frange: FileRange) -> Vec<Assist> { | |||
24 | Assist { | 25 | Assist { |
25 | id: assist_label.id, | 26 | id: assist_label.id, |
26 | label: assist_label.label.clone(), | 27 | label: assist_label.label.clone(), |
27 | change_data: match assist.action_data { | 28 | group_label: assist.group_label.map(|it| it.0), |
28 | Either::Left(action) => { | 29 | source_change: action_to_edit(assist.action, file_id, assist_label), |
29 | Either::Left(action_to_edit(action, file_id, assist_label)) | ||
30 | } | ||
31 | Either::Right(actions) => Either::Right( | ||
32 | actions | ||
33 | .into_iter() | ||
34 | .map(|action| action_to_edit(action, file_id, assist_label)) | ||
35 | .collect(), | ||
36 | ), | ||
37 | }, | ||
38 | } | 30 | } |
39 | }) | 31 | }) |
40 | .collect() | 32 | .collect() |
@@ -46,9 +38,6 @@ fn action_to_edit( | |||
46 | assist_label: &AssistLabel, | 38 | assist_label: &AssistLabel, |
47 | ) -> SourceChange { | 39 | ) -> SourceChange { |
48 | let file_edit = SourceFileEdit { file_id, edit: action.edit }; | 40 | let file_edit = SourceFileEdit { file_id, edit: action.edit }; |
49 | SourceChange::source_file_edit( | 41 | SourceChange::source_file_edit(assist_label.label.clone(), file_edit) |
50 | action.label.unwrap_or_else(|| assist_label.label.clone()), | 42 | .with_cursor_opt(action.cursor_position.map(|offset| FilePosition { offset, file_id })) |
51 | file_edit, | ||
52 | ) | ||
53 | .with_cursor_opt(action.cursor_position.map(|offset| FilePosition { offset, file_id })) | ||
54 | } | 43 | } |
diff --git a/crates/ra_ide/src/call_hierarchy.rs b/crates/ra_ide/src/call_hierarchy.rs index aa5d60c7b..f984f40ad 100644 --- a/crates/ra_ide/src/call_hierarchy.rs +++ b/crates/ra_ide/src/call_hierarchy.rs | |||
@@ -3,6 +3,7 @@ | |||
3 | use indexmap::IndexMap; | 3 | use indexmap::IndexMap; |
4 | 4 | ||
5 | use hir::db::AstDatabase; | 5 | use hir::db::AstDatabase; |
6 | use ra_ide_db::RootDatabase; | ||
6 | use ra_syntax::{ | 7 | use ra_syntax::{ |
7 | ast::{self, DocCommentsOwner}, | 8 | ast::{self, DocCommentsOwner}, |
8 | match_ast, AstNode, TextRange, | 9 | match_ast, AstNode, TextRange, |
@@ -10,7 +11,6 @@ use ra_syntax::{ | |||
10 | 11 | ||
11 | use crate::{ | 12 | use crate::{ |
12 | call_info::FnCallNode, | 13 | call_info::FnCallNode, |
13 | db::RootDatabase, | ||
14 | display::{ShortLabel, ToNav}, | 14 | display::{ShortLabel, ToNav}, |
15 | expand::descend_into_macros, | 15 | expand::descend_into_macros, |
16 | goto_definition, references, FilePosition, NavigationTarget, RangeInfo, | 16 | goto_definition, references, FilePosition, NavigationTarget, RangeInfo, |
diff --git a/crates/ra_ide/src/call_info.rs b/crates/ra_ide/src/call_info.rs index 72a68522e..f2b29306e 100644 --- a/crates/ra_ide/src/call_info.rs +++ b/crates/ra_ide/src/call_info.rs | |||
@@ -1,15 +1,13 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | use hir::db::AstDatabase; | 2 | use hir::db::AstDatabase; |
3 | use ra_ide_db::RootDatabase; | ||
3 | use ra_syntax::{ | 4 | use ra_syntax::{ |
4 | ast::{self, ArgListOwner}, | 5 | ast::{self, ArgListOwner}, |
5 | match_ast, AstNode, SyntaxNode, | 6 | match_ast, AstNode, SyntaxNode, |
6 | }; | 7 | }; |
7 | |||
8 | use test_utils::tested_by; | 8 | use test_utils::tested_by; |
9 | 9 | ||
10 | use crate::{ | 10 | use crate::{expand::descend_into_macros, CallInfo, FilePosition, FunctionSignature}; |
11 | db::RootDatabase, expand::descend_into_macros, CallInfo, FilePosition, FunctionSignature, | ||
12 | }; | ||
13 | 11 | ||
14 | /// Computes parameter information for the given call expression. | 12 | /// Computes parameter information for the given call expression. |
15 | pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> { | 13 | pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> { |
diff --git a/crates/ra_ide/src/completion.rs b/crates/ra_ide/src/completion.rs index ad7f6ef26..4f24cd1f9 100644 --- a/crates/ra_ide/src/completion.rs +++ b/crates/ra_ide/src/completion.rs | |||
@@ -18,6 +18,7 @@ mod complete_macro_in_item_position; | |||
18 | mod complete_trait_impl; | 18 | mod complete_trait_impl; |
19 | 19 | ||
20 | use ra_db::SourceDatabase; | 20 | use ra_db::SourceDatabase; |
21 | use ra_ide_db::RootDatabase; | ||
21 | 22 | ||
22 | #[cfg(test)] | 23 | #[cfg(test)] |
23 | use crate::completion::completion_item::do_completion; | 24 | use crate::completion::completion_item::do_completion; |
@@ -26,7 +27,7 @@ use crate::{ | |||
26 | completion_context::CompletionContext, | 27 | completion_context::CompletionContext, |
27 | completion_item::{CompletionKind, Completions}, | 28 | completion_item::{CompletionKind, Completions}, |
28 | }, | 29 | }, |
29 | db, FilePosition, | 30 | FilePosition, |
30 | }; | 31 | }; |
31 | 32 | ||
32 | pub use crate::completion::completion_item::{ | 33 | pub use crate::completion::completion_item::{ |
@@ -55,7 +56,7 @@ pub use crate::completion::completion_item::{ | |||
55 | /// `foo` *should* be present among the completion variants. Filtering by | 56 | /// `foo` *should* be present among the completion variants. Filtering by |
56 | /// identifier prefix/fuzzy match should be done higher in the stack, together | 57 | /// identifier prefix/fuzzy match should be done higher in the stack, together |
57 | /// with ordering of completions (currently this is done by the client). | 58 | /// with ordering of completions (currently this is done by the client). |
58 | pub(crate) fn completions(db: &db::RootDatabase, position: FilePosition) -> Option<Completions> { | 59 | pub(crate) fn completions(db: &RootDatabase, position: FilePosition) -> Option<Completions> { |
59 | let original_parse = db.parse(position.file_id); | 60 | let original_parse = db.parse(position.file_id); |
60 | let ctx = CompletionContext::new(db, &original_parse, position)?; | 61 | let ctx = CompletionContext::new(db, &original_parse, position)?; |
61 | 62 | ||
diff --git a/crates/ra_ide/src/completion/complete_scope.rs b/crates/ra_ide/src/completion/complete_scope.rs index 458d7525e..e2ee86dd1 100644 --- a/crates/ra_ide/src/completion/complete_scope.rs +++ b/crates/ra_ide/src/completion/complete_scope.rs | |||
@@ -1,11 +1,6 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use ra_assists::auto_import_text_edit; | 3 | use crate::completion::{CompletionContext, Completions}; |
4 | use ra_syntax::{ast, AstNode, SmolStr}; | ||
5 | use ra_text_edit::TextEditBuilder; | ||
6 | use rustc_hash::FxHashMap; | ||
7 | |||
8 | use crate::completion::{CompletionContext, CompletionItem, CompletionKind, Completions}; | ||
9 | 4 | ||
10 | pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) { | 5 | pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) { |
11 | if !ctx.is_trivial_path { | 6 | if !ctx.is_trivial_path { |
@@ -15,120 +10,14 @@ pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) { | |||
15 | ctx.analyzer.process_all_names(ctx.db, &mut |name, res| { | 10 | ctx.analyzer.process_all_names(ctx.db, &mut |name, res| { |
16 | acc.add_resolution(ctx, name.to_string(), &res) | 11 | acc.add_resolution(ctx, name.to_string(), &res) |
17 | }); | 12 | }); |
18 | |||
19 | // auto-import | ||
20 | // We fetch ident from the original file, because we need to pre-filter auto-imports | ||
21 | if ast::NameRef::cast(ctx.token.parent()).is_some() { | ||
22 | let import_resolver = ImportResolver::new(); | ||
23 | let import_names = import_resolver.all_names(ctx.token.text()); | ||
24 | import_names.into_iter().for_each(|(name, path)| { | ||
25 | let edit = { | ||
26 | let mut builder = TextEditBuilder::default(); | ||
27 | builder.replace(ctx.source_range(), name.to_string()); | ||
28 | auto_import_text_edit( | ||
29 | &ctx.token.parent(), | ||
30 | &ctx.token.parent(), | ||
31 | &path, | ||
32 | &mut builder, | ||
33 | ); | ||
34 | builder.finish() | ||
35 | }; | ||
36 | |||
37 | // Hack: copied this check form conv.rs beacause auto import can produce edits | ||
38 | // that invalidate assert in conv_with. | ||
39 | if edit | ||
40 | .as_atoms() | ||
41 | .iter() | ||
42 | .filter(|atom| !ctx.source_range().is_subrange(&atom.delete)) | ||
43 | .all(|atom| ctx.source_range().intersection(&atom.delete).is_none()) | ||
44 | { | ||
45 | CompletionItem::new( | ||
46 | CompletionKind::Reference, | ||
47 | ctx.source_range(), | ||
48 | build_import_label(&name, &path), | ||
49 | ) | ||
50 | .text_edit(edit) | ||
51 | .add_to(acc); | ||
52 | } | ||
53 | }); | ||
54 | } | ||
55 | } | ||
56 | |||
57 | fn build_import_label(name: &str, path: &[SmolStr]) -> String { | ||
58 | let mut buf = String::with_capacity(64); | ||
59 | buf.push_str(name); | ||
60 | buf.push_str(" ("); | ||
61 | fmt_import_path(path, &mut buf); | ||
62 | buf.push_str(")"); | ||
63 | buf | ||
64 | } | ||
65 | |||
66 | fn fmt_import_path(path: &[SmolStr], buf: &mut String) { | ||
67 | let mut segments = path.iter(); | ||
68 | if let Some(s) = segments.next() { | ||
69 | buf.push_str(&s); | ||
70 | } | ||
71 | for s in segments { | ||
72 | buf.push_str("::"); | ||
73 | buf.push_str(&s); | ||
74 | } | ||
75 | } | ||
76 | |||
77 | #[derive(Debug, Clone, Default)] | ||
78 | pub(crate) struct ImportResolver { | ||
79 | // todo: use fst crate or something like that | ||
80 | dummy_names: Vec<(SmolStr, Vec<SmolStr>)>, | ||
81 | } | ||
82 | |||
83 | impl ImportResolver { | ||
84 | pub(crate) fn new() -> Self { | ||
85 | let dummy_names = vec![ | ||
86 | (SmolStr::new("fmt"), vec![SmolStr::new("std"), SmolStr::new("fmt")]), | ||
87 | (SmolStr::new("io"), vec![SmolStr::new("std"), SmolStr::new("io")]), | ||
88 | (SmolStr::new("iter"), vec![SmolStr::new("std"), SmolStr::new("iter")]), | ||
89 | (SmolStr::new("hash"), vec![SmolStr::new("std"), SmolStr::new("hash")]), | ||
90 | ( | ||
91 | SmolStr::new("Debug"), | ||
92 | vec![SmolStr::new("std"), SmolStr::new("fmt"), SmolStr::new("Debug")], | ||
93 | ), | ||
94 | ( | ||
95 | SmolStr::new("Display"), | ||
96 | vec![SmolStr::new("std"), SmolStr::new("fmt"), SmolStr::new("Display")], | ||
97 | ), | ||
98 | ( | ||
99 | SmolStr::new("Hash"), | ||
100 | vec![SmolStr::new("std"), SmolStr::new("hash"), SmolStr::new("Hash")], | ||
101 | ), | ||
102 | ( | ||
103 | SmolStr::new("Hasher"), | ||
104 | vec![SmolStr::new("std"), SmolStr::new("hash"), SmolStr::new("Hasher")], | ||
105 | ), | ||
106 | ( | ||
107 | SmolStr::new("Iterator"), | ||
108 | vec![SmolStr::new("std"), SmolStr::new("iter"), SmolStr::new("Iterator")], | ||
109 | ), | ||
110 | ]; | ||
111 | |||
112 | ImportResolver { dummy_names } | ||
113 | } | ||
114 | |||
115 | // Returns a map of importable items filtered by name. | ||
116 | // The map associates item name with its full path. | ||
117 | // todo: should return Resolutions | ||
118 | pub(crate) fn all_names(&self, name: &str) -> FxHashMap<SmolStr, Vec<SmolStr>> { | ||
119 | if name.len() > 1 { | ||
120 | self.dummy_names.iter().filter(|(n, _)| n.contains(name)).cloned().collect() | ||
121 | } else { | ||
122 | FxHashMap::default() | ||
123 | } | ||
124 | } | ||
125 | } | 13 | } |
126 | 14 | ||
127 | #[cfg(test)] | 15 | #[cfg(test)] |
128 | mod tests { | 16 | mod tests { |
129 | use crate::completion::{do_completion, CompletionItem, CompletionKind}; | ||
130 | use insta::assert_debug_snapshot; | 17 | use insta::assert_debug_snapshot; |
131 | 18 | ||
19 | use crate::completion::{do_completion, CompletionItem, CompletionKind}; | ||
20 | |||
132 | fn do_reference_completion(code: &str) -> Vec<CompletionItem> { | 21 | fn do_reference_completion(code: &str) -> Vec<CompletionItem> { |
133 | do_completion(code, CompletionKind::Reference) | 22 | do_completion(code, CompletionKind::Reference) |
134 | } | 23 | } |
diff --git a/crates/ra_ide/src/completion/complete_trait_impl.rs b/crates/ra_ide/src/completion/complete_trait_impl.rs index 528655fbc..0175f5e55 100644 --- a/crates/ra_ide/src/completion/complete_trait_impl.rs +++ b/crates/ra_ide/src/completion/complete_trait_impl.rs | |||
@@ -2,7 +2,7 @@ use crate::completion::{CompletionContext, Completions, CompletionItem, Completi | |||
2 | 2 | ||
3 | use ra_syntax::ast::{self, NameOwner, AstNode}; | 3 | use ra_syntax::ast::{self, NameOwner, AstNode}; |
4 | 4 | ||
5 | use hir::{self, db::HirDatabase}; | 5 | use hir::{self, db::HirDatabase, Docs}; |
6 | 6 | ||
7 | 7 | ||
8 | pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext) { | 8 | pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext) { |
diff --git a/crates/ra_ide/src/completion/completion_context.rs b/crates/ra_ide/src/completion/completion_context.rs index 18c91a840..8678a3234 100644 --- a/crates/ra_ide/src/completion/completion_context.rs +++ b/crates/ra_ide/src/completion/completion_context.rs | |||
@@ -1,5 +1,6 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use ra_ide_db::RootDatabase; | ||
3 | use ra_syntax::{ | 4 | use ra_syntax::{ |
4 | algo::{find_covering_element, find_node_at_offset}, | 5 | algo::{find_covering_element, find_node_at_offset}, |
5 | ast, AstNode, Parse, SourceFile, | 6 | ast, AstNode, Parse, SourceFile, |
@@ -8,13 +9,13 @@ use ra_syntax::{ | |||
8 | }; | 9 | }; |
9 | use ra_text_edit::AtomTextEdit; | 10 | use ra_text_edit::AtomTextEdit; |
10 | 11 | ||
11 | use crate::{db, FilePosition}; | 12 | use crate::FilePosition; |
12 | 13 | ||
13 | /// `CompletionContext` is created early during completion to figure out, where | 14 | /// `CompletionContext` is created early during completion to figure out, where |
14 | /// exactly is the cursor, syntax-wise. | 15 | /// exactly is the cursor, syntax-wise. |
15 | #[derive(Debug)] | 16 | #[derive(Debug)] |
16 | pub(crate) struct CompletionContext<'a> { | 17 | pub(crate) struct CompletionContext<'a> { |
17 | pub(super) db: &'a db::RootDatabase, | 18 | pub(super) db: &'a RootDatabase, |
18 | pub(super) analyzer: hir::SourceAnalyzer, | 19 | pub(super) analyzer: hir::SourceAnalyzer, |
19 | pub(super) offset: TextUnit, | 20 | pub(super) offset: TextUnit, |
20 | pub(super) token: SyntaxToken, | 21 | pub(super) token: SyntaxToken, |
@@ -49,7 +50,7 @@ pub(crate) struct CompletionContext<'a> { | |||
49 | 50 | ||
50 | impl<'a> CompletionContext<'a> { | 51 | impl<'a> CompletionContext<'a> { |
51 | pub(super) fn new( | 52 | pub(super) fn new( |
52 | db: &'a db::RootDatabase, | 53 | db: &'a RootDatabase, |
53 | original_parse: &'a Parse<ast::SourceFile>, | 54 | original_parse: &'a Parse<ast::SourceFile>, |
54 | position: FilePosition, | 55 | position: FilePosition, |
55 | ) -> Option<CompletionContext<'a>> { | 56 | ) -> Option<CompletionContext<'a>> { |
diff --git a/crates/ra_ide/src/diagnostics.rs b/crates/ra_ide/src/diagnostics.rs index f403b3bcf..22bd49723 100644 --- a/crates/ra_ide/src/diagnostics.rs +++ b/crates/ra_ide/src/diagnostics.rs | |||
@@ -5,6 +5,7 @@ use std::cell::RefCell; | |||
5 | use hir::diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink}; | 5 | use hir::diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink}; |
6 | use itertools::Itertools; | 6 | use itertools::Itertools; |
7 | use ra_db::{RelativePath, SourceDatabase, SourceDatabaseExt}; | 7 | use ra_db::{RelativePath, SourceDatabase, SourceDatabaseExt}; |
8 | use ra_ide_db::RootDatabase; | ||
8 | use ra_prof::profile; | 9 | use ra_prof::profile; |
9 | use ra_syntax::{ | 10 | use ra_syntax::{ |
10 | algo, | 11 | algo, |
@@ -13,7 +14,7 @@ use ra_syntax::{ | |||
13 | }; | 14 | }; |
14 | use ra_text_edit::{TextEdit, TextEditBuilder}; | 15 | use ra_text_edit::{TextEdit, TextEditBuilder}; |
15 | 16 | ||
16 | use crate::{db::RootDatabase, Diagnostic, FileId, FileSystemEdit, SourceChange, SourceFileEdit}; | 17 | use crate::{Diagnostic, FileId, FileSystemEdit, SourceChange, SourceFileEdit}; |
17 | 18 | ||
18 | #[derive(Debug, Copy, Clone)] | 19 | #[derive(Debug, Copy, Clone)] |
19 | pub enum Severity { | 20 | pub enum Severity { |
diff --git a/crates/ra_ide/src/display/function_signature.rs b/crates/ra_ide/src/display/function_signature.rs index 1e4a472b4..c23e08e9a 100644 --- a/crates/ra_ide/src/display/function_signature.rs +++ b/crates/ra_ide/src/display/function_signature.rs | |||
@@ -4,13 +4,11 @@ use std::fmt::{self, Display}; | |||
4 | 4 | ||
5 | use hir::{Docs, Documentation, HasSource, HirDisplay}; | 5 | use hir::{Docs, Documentation, HasSource, HirDisplay}; |
6 | use join_to_string::join; | 6 | use join_to_string::join; |
7 | use ra_ide_db::RootDatabase; | ||
7 | use ra_syntax::ast::{self, AstNode, NameOwner, VisibilityOwner}; | 8 | use ra_syntax::ast::{self, AstNode, NameOwner, VisibilityOwner}; |
8 | use std::convert::From; | 9 | use std::convert::From; |
9 | 10 | ||
10 | use crate::{ | 11 | use crate::display::{generic_parameters, where_predicates}; |
11 | db, | ||
12 | display::{generic_parameters, where_predicates}, | ||
13 | }; | ||
14 | 12 | ||
15 | #[derive(Debug)] | 13 | #[derive(Debug)] |
16 | pub enum CallableKind { | 14 | pub enum CallableKind { |
@@ -48,13 +46,13 @@ impl FunctionSignature { | |||
48 | self | 46 | self |
49 | } | 47 | } |
50 | 48 | ||
51 | pub(crate) fn from_hir(db: &db::RootDatabase, function: hir::Function) -> Self { | 49 | pub(crate) fn from_hir(db: &RootDatabase, function: hir::Function) -> Self { |
52 | let doc = function.docs(db); | 50 | let doc = function.docs(db); |
53 | let ast_node = function.source(db).value; | 51 | let ast_node = function.source(db).value; |
54 | FunctionSignature::from(&ast_node).with_doc_opt(doc) | 52 | FunctionSignature::from(&ast_node).with_doc_opt(doc) |
55 | } | 53 | } |
56 | 54 | ||
57 | pub(crate) fn from_struct(db: &db::RootDatabase, st: hir::Struct) -> Option<Self> { | 55 | pub(crate) fn from_struct(db: &RootDatabase, st: hir::Struct) -> Option<Self> { |
58 | let node: ast::StructDef = st.source(db).value; | 56 | let node: ast::StructDef = st.source(db).value; |
59 | match node.kind() { | 57 | match node.kind() { |
60 | ast::StructKind::Record(_) => return None, | 58 | ast::StructKind::Record(_) => return None, |
@@ -86,10 +84,7 @@ impl FunctionSignature { | |||
86 | ) | 84 | ) |
87 | } | 85 | } |
88 | 86 | ||
89 | pub(crate) fn from_enum_variant( | 87 | pub(crate) fn from_enum_variant(db: &RootDatabase, variant: hir::EnumVariant) -> Option<Self> { |
90 | db: &db::RootDatabase, | ||
91 | variant: hir::EnumVariant, | ||
92 | ) -> Option<Self> { | ||
93 | let node: ast::EnumVariant = variant.source(db).value; | 88 | let node: ast::EnumVariant = variant.source(db).value; |
94 | match node.kind() { | 89 | match node.kind() { |
95 | ast::StructKind::Record(_) | ast::StructKind::Unit => return None, | 90 | ast::StructKind::Record(_) | ast::StructKind::Unit => return None, |
@@ -126,7 +121,7 @@ impl FunctionSignature { | |||
126 | ) | 121 | ) |
127 | } | 122 | } |
128 | 123 | ||
129 | pub(crate) fn from_macro(db: &db::RootDatabase, macro_def: hir::MacroDef) -> Option<Self> { | 124 | pub(crate) fn from_macro(db: &RootDatabase, macro_def: hir::MacroDef) -> Option<Self> { |
130 | let node: ast::MacroCall = macro_def.source(db).value; | 125 | let node: ast::MacroCall = macro_def.source(db).value; |
131 | 126 | ||
132 | let params = vec![]; | 127 | let params = vec![]; |
diff --git a/crates/ra_ide/src/display/navigation_target.rs b/crates/ra_ide/src/display/navigation_target.rs index b2af3479c..906aab1eb 100644 --- a/crates/ra_ide/src/display/navigation_target.rs +++ b/crates/ra_ide/src/display/navigation_target.rs | |||
@@ -3,6 +3,7 @@ | |||
3 | use either::Either; | 3 | use either::Either; |
4 | use hir::{AssocItem, FieldSource, HasSource, InFile, ModuleSource}; | 4 | use hir::{AssocItem, FieldSource, HasSource, InFile, ModuleSource}; |
5 | use ra_db::{FileId, SourceDatabase}; | 5 | use ra_db::{FileId, SourceDatabase}; |
6 | use ra_ide_db::RootDatabase; | ||
6 | use ra_syntax::{ | 7 | use ra_syntax::{ |
7 | ast::{self, DocCommentsOwner, NameOwner}, | 8 | ast::{self, DocCommentsOwner, NameOwner}, |
8 | match_ast, AstNode, SmolStr, | 9 | match_ast, AstNode, SmolStr, |
@@ -10,7 +11,7 @@ use ra_syntax::{ | |||
10 | TextRange, | 11 | TextRange, |
11 | }; | 12 | }; |
12 | 13 | ||
13 | use crate::{db::RootDatabase, expand::original_range, FileSymbol}; | 14 | use crate::{expand::original_range, FileSymbol}; |
14 | 15 | ||
15 | use super::short_label::ShortLabel; | 16 | use super::short_label::ShortLabel; |
16 | 17 | ||
diff --git a/crates/ra_ide/src/expand.rs b/crates/ra_ide/src/expand.rs index b82259a3d..9f3aaa3a3 100644 --- a/crates/ra_ide/src/expand.rs +++ b/crates/ra_ide/src/expand.rs | |||
@@ -3,9 +3,10 @@ use std::iter::successors; | |||
3 | 3 | ||
4 | use hir::{InFile, Origin}; | 4 | use hir::{InFile, Origin}; |
5 | use ra_db::FileId; | 5 | use ra_db::FileId; |
6 | use ra_ide_db::RootDatabase; | ||
6 | use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken, TextRange}; | 7 | use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken, TextRange}; |
7 | 8 | ||
8 | use crate::{db::RootDatabase, FileRange}; | 9 | use crate::FileRange; |
9 | 10 | ||
10 | pub(crate) fn original_range(db: &RootDatabase, node: InFile<&SyntaxNode>) -> FileRange { | 11 | pub(crate) fn original_range(db: &RootDatabase, node: InFile<&SyntaxNode>) -> FileRange { |
11 | if let Some((range, Origin::Call)) = original_range_and_origin(db, node) { | 12 | if let Some((range, Origin::Call)) = original_range_and_origin(db, node) { |
@@ -79,6 +80,14 @@ pub(crate) fn descend_into_macros( | |||
79 | let source_analyzer = | 80 | let source_analyzer = |
80 | hir::SourceAnalyzer::new(db, src.with_value(src.value.parent()).as_ref(), None); | 81 | hir::SourceAnalyzer::new(db, src.with_value(src.value.parent()).as_ref(), None); |
81 | 82 | ||
83 | descend_into_macros_with_analyzer(db, &source_analyzer, src) | ||
84 | } | ||
85 | |||
86 | pub(crate) fn descend_into_macros_with_analyzer( | ||
87 | db: &RootDatabase, | ||
88 | source_analyzer: &hir::SourceAnalyzer, | ||
89 | src: InFile<SyntaxToken>, | ||
90 | ) -> InFile<SyntaxToken> { | ||
82 | successors(Some(src), |token| { | 91 | successors(Some(src), |token| { |
83 | let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; | 92 | let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; |
84 | let tt = macro_call.token_tree()?; | 93 | let tt = macro_call.token_tree()?; |
diff --git a/crates/ra_ide/src/expand_macro.rs b/crates/ra_ide/src/expand_macro.rs index 0f7b6e875..af2783bef 100644 --- a/crates/ra_ide/src/expand_macro.rs +++ b/crates/ra_ide/src/expand_macro.rs | |||
@@ -1,14 +1,15 @@ | |||
1 | //! This modules implements "expand macro" functionality in the IDE | 1 | //! This modules implements "expand macro" functionality in the IDE |
2 | 2 | ||
3 | use crate::{db::RootDatabase, FilePosition}; | ||
4 | use hir::db::AstDatabase; | 3 | use hir::db::AstDatabase; |
5 | use ra_db::SourceDatabase; | 4 | use ra_db::SourceDatabase; |
6 | use rustc_hash::FxHashMap; | 5 | use ra_ide_db::RootDatabase; |
7 | |||
8 | use ra_syntax::{ | 6 | use ra_syntax::{ |
9 | algo::{find_node_at_offset, replace_descendants}, | 7 | algo::{find_node_at_offset, replace_descendants}, |
10 | ast, AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, WalkEvent, T, | 8 | ast, AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, WalkEvent, T, |
11 | }; | 9 | }; |
10 | use rustc_hash::FxHashMap; | ||
11 | |||
12 | use crate::FilePosition; | ||
12 | 13 | ||
13 | pub struct ExpandedMacro { | 14 | pub struct ExpandedMacro { |
14 | pub name: String, | 15 | pub name: String, |
@@ -185,7 +186,7 @@ fn some_thing() -> u32 { | |||
185 | //- /lib.rs | 186 | //- /lib.rs |
186 | macro_rules! match_ast { | 187 | macro_rules! match_ast { |
187 | (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; | 188 | (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; |
188 | 189 | ||
189 | (match ($node:expr) { | 190 | (match ($node:expr) { |
190 | $( ast::$ast:ident($it:ident) => $res:block, )* | 191 | $( ast::$ast:ident($it:ident) => $res:block, )* |
191 | _ => $catch_all:expr $(,)? | 192 | _ => $catch_all:expr $(,)? |
@@ -193,7 +194,7 @@ fn some_thing() -> u32 { | |||
193 | $( if let Some($it) = ast::$ast::cast($node.clone()) $res else )* | 194 | $( if let Some($it) = ast::$ast::cast($node.clone()) $res else )* |
194 | { $catch_all } | 195 | { $catch_all } |
195 | }}; | 196 | }}; |
196 | } | 197 | } |
197 | 198 | ||
198 | fn main() { | 199 | fn main() { |
199 | mat<|>ch_ast! { | 200 | mat<|>ch_ast! { |
@@ -227,11 +228,11 @@ fn some_thing() -> u32 { | |||
227 | r#" | 228 | r#" |
228 | //- /lib.rs | 229 | //- /lib.rs |
229 | macro_rules! match_ast { | 230 | macro_rules! match_ast { |
230 | (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; | 231 | (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; |
231 | (match ($node:expr) {}) => {{}}; | 232 | (match ($node:expr) {}) => {{}}; |
232 | } | 233 | } |
233 | 234 | ||
234 | fn main() { | 235 | fn main() { |
235 | let p = f(|it| { | 236 | let p = f(|it| { |
236 | let res = mat<|>ch_ast! { match c {}}; | 237 | let res = mat<|>ch_ast! { match c {}}; |
237 | Some(res) | 238 | Some(res) |
@@ -254,9 +255,9 @@ fn some_thing() -> u32 { | |||
254 | } | 255 | } |
255 | macro_rules! foo { | 256 | macro_rules! foo { |
256 | () => {bar!()}; | 257 | () => {bar!()}; |
257 | } | 258 | } |
258 | 259 | ||
259 | fn main() { | 260 | fn main() { |
260 | let res = fo<|>o!(); | 261 | let res = fo<|>o!(); |
261 | } | 262 | } |
262 | "#, | 263 | "#, |
@@ -277,9 +278,9 @@ fn some_thing() -> u32 { | |||
277 | } | 278 | } |
278 | macro_rules! foo { | 279 | macro_rules! foo { |
279 | () => {$crate::bar!()}; | 280 | () => {$crate::bar!()}; |
280 | } | 281 | } |
281 | 282 | ||
282 | fn main() { | 283 | fn main() { |
283 | let res = fo<|>o!(); | 284 | let res = fo<|>o!(); |
284 | } | 285 | } |
285 | "#, | 286 | "#, |
diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs index 930e0c4c2..726963a33 100644 --- a/crates/ra_ide/src/extend_selection.rs +++ b/crates/ra_ide/src/extend_selection.rs | |||
@@ -1,6 +1,10 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use std::iter::successors; | ||
4 | |||
5 | use hir::db::AstDatabase; | ||
3 | use ra_db::SourceDatabase; | 6 | use ra_db::SourceDatabase; |
7 | use ra_ide_db::RootDatabase; | ||
4 | use ra_syntax::{ | 8 | use ra_syntax::{ |
5 | algo::find_covering_element, | 9 | algo::find_covering_element, |
6 | ast::{self, AstNode, AstToken}, | 10 | ast::{self, AstNode, AstToken}, |
@@ -9,9 +13,7 @@ use ra_syntax::{ | |||
9 | SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T, | 13 | SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T, |
10 | }; | 14 | }; |
11 | 15 | ||
12 | use crate::{db::RootDatabase, expand::descend_into_macros, FileId, FileRange}; | 16 | use crate::{expand::descend_into_macros, FileId, FileRange}; |
13 | use hir::db::AstDatabase; | ||
14 | use std::iter::successors; | ||
15 | 17 | ||
16 | pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { | 18 | pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { |
17 | let src = db.parse(frange.file_id).tree(); | 19 | let src = db.parse(frange.file_id).tree(); |
@@ -512,8 +514,8 @@ fn bar(){} | |||
512 | fn test_extend_trait_bounds_list_in_where_clause() { | 514 | fn test_extend_trait_bounds_list_in_where_clause() { |
513 | do_check( | 515 | do_check( |
514 | r#" | 516 | r#" |
515 | fn foo<R>() | 517 | fn foo<R>() |
516 | where | 518 | where |
517 | R: req::Request + 'static, | 519 | R: req::Request + 'static, |
518 | R::Params: DeserializeOwned<|> + panic::UnwindSafe + 'static, | 520 | R::Params: DeserializeOwned<|> + panic::UnwindSafe + 'static, |
519 | R::Result: Serialize + 'static, | 521 | R::Result: Serialize + 'static, |
diff --git a/crates/ra_ide/src/goto_definition.rs b/crates/ra_ide/src/goto_definition.rs index 5a12a619c..de5551a4c 100644 --- a/crates/ra_ide/src/goto_definition.rs +++ b/crates/ra_ide/src/goto_definition.rs | |||
@@ -1,6 +1,7 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use hir::{db::AstDatabase, InFile, SourceBinder}; | 3 | use hir::{db::AstDatabase, InFile, SourceBinder}; |
4 | use ra_ide_db::{symbol_index, RootDatabase}; | ||
4 | use ra_syntax::{ | 5 | use ra_syntax::{ |
5 | ast::{self, DocCommentsOwner}, | 6 | ast::{self, DocCommentsOwner}, |
6 | match_ast, AstNode, | 7 | match_ast, AstNode, |
@@ -9,7 +10,6 @@ use ra_syntax::{ | |||
9 | }; | 10 | }; |
10 | 11 | ||
11 | use crate::{ | 12 | use crate::{ |
12 | db::RootDatabase, | ||
13 | display::{ShortLabel, ToNav}, | 13 | display::{ShortLabel, ToNav}, |
14 | expand::descend_into_macros, | 14 | expand::descend_into_macros, |
15 | references::{classify_name_ref, NameKind::*}, | 15 | references::{classify_name_ref, NameKind::*}, |
@@ -76,11 +76,10 @@ pub(crate) fn reference_definition( | |||
76 | let name_kind = classify_name_ref(sb, name_ref).map(|d| d.kind); | 76 | let name_kind = classify_name_ref(sb, name_ref).map(|d| d.kind); |
77 | match name_kind { | 77 | match name_kind { |
78 | Some(Macro(it)) => return Exact(it.to_nav(sb.db)), | 78 | Some(Macro(it)) => return Exact(it.to_nav(sb.db)), |
79 | Some(Field(it)) => return Exact(it.to_nav(sb.db)), | 79 | Some(StructField(it)) => return Exact(it.to_nav(sb.db)), |
80 | Some(TypeParam(it)) => return Exact(it.to_nav(sb.db)), | 80 | Some(TypeParam(it)) => return Exact(it.to_nav(sb.db)), |
81 | Some(AssocItem(it)) => return Exact(it.to_nav(sb.db)), | ||
82 | Some(Local(it)) => return Exact(it.to_nav(sb.db)), | 81 | Some(Local(it)) => return Exact(it.to_nav(sb.db)), |
83 | Some(Def(def)) => match NavigationTarget::from_def(sb.db, def) { | 82 | Some(ModuleDef(def)) => match NavigationTarget::from_def(sb.db, def) { |
84 | Some(nav) => return Exact(nav), | 83 | Some(nav) => return Exact(nav), |
85 | None => return Approximate(vec![]), | 84 | None => return Approximate(vec![]), |
86 | }, | 85 | }, |
@@ -94,7 +93,7 @@ pub(crate) fn reference_definition( | |||
94 | }; | 93 | }; |
95 | 94 | ||
96 | // Fallback index based approach: | 95 | // Fallback index based approach: |
97 | let navs = crate::symbol_index::index_resolve(sb.db, name_ref.value) | 96 | let navs = symbol_index::index_resolve(sb.db, name_ref.value) |
98 | .into_iter() | 97 | .into_iter() |
99 | .map(|s| s.to_nav(sb.db)) | 98 | .map(|s| s.to_nav(sb.db)) |
100 | .collect(); | 99 | .collect(); |
diff --git a/crates/ra_ide/src/goto_type_definition.rs b/crates/ra_ide/src/goto_type_definition.rs index ce8b6c72a..11ad6d137 100644 --- a/crates/ra_ide/src/goto_type_definition.rs +++ b/crates/ra_ide/src/goto_type_definition.rs | |||
@@ -1,11 +1,11 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use hir::db::AstDatabase; | 3 | use hir::db::AstDatabase; |
4 | use ra_ide_db::RootDatabase; | ||
4 | use ra_syntax::{ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset}; | 5 | use ra_syntax::{ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset}; |
5 | 6 | ||
6 | use crate::{ | 7 | use crate::{ |
7 | db::RootDatabase, display::ToNav, expand::descend_into_macros, FilePosition, NavigationTarget, | 8 | display::ToNav, expand::descend_into_macros, FilePosition, NavigationTarget, RangeInfo, |
8 | RangeInfo, | ||
9 | }; | 9 | }; |
10 | 10 | ||
11 | pub(crate) fn goto_type_definition( | 11 | pub(crate) fn goto_type_definition( |
diff --git a/crates/ra_ide/src/hover.rs b/crates/ra_ide/src/hover.rs index 6661e5cb2..3f88bb260 100644 --- a/crates/ra_ide/src/hover.rs +++ b/crates/ra_ide/src/hover.rs | |||
@@ -2,6 +2,7 @@ | |||
2 | 2 | ||
3 | use hir::{db::AstDatabase, Adt, HasSource, HirDisplay, SourceBinder}; | 3 | use hir::{db::AstDatabase, Adt, HasSource, HirDisplay, SourceBinder}; |
4 | use ra_db::SourceDatabase; | 4 | use ra_db::SourceDatabase; |
5 | use ra_ide_db::RootDatabase; | ||
5 | use ra_syntax::{ | 6 | use ra_syntax::{ |
6 | algo::find_covering_element, | 7 | algo::find_covering_element, |
7 | ast::{self, DocCommentsOwner}, | 8 | ast::{self, DocCommentsOwner}, |
@@ -11,7 +12,6 @@ use ra_syntax::{ | |||
11 | }; | 12 | }; |
12 | 13 | ||
13 | use crate::{ | 14 | use crate::{ |
14 | db::RootDatabase, | ||
15 | display::{macro_label, rust_code_markup, rust_code_markup_with_doc, ShortLabel}, | 15 | display::{macro_label, rust_code_markup, rust_code_markup_with_doc, ShortLabel}, |
16 | expand::descend_into_macros, | 16 | expand::descend_into_macros, |
17 | references::{classify_name, classify_name_ref, NameKind, NameKind::*}, | 17 | references::{classify_name, classify_name_ref, NameKind, NameKind::*}, |
@@ -98,19 +98,14 @@ fn hover_text_from_name_kind(db: &RootDatabase, name_kind: NameKind) -> Option<S | |||
98 | let src = it.source(db); | 98 | let src = it.source(db); |
99 | hover_text(src.value.doc_comment_text(), Some(macro_label(&src.value))) | 99 | hover_text(src.value.doc_comment_text(), Some(macro_label(&src.value))) |
100 | } | 100 | } |
101 | Field(it) => { | 101 | StructField(it) => { |
102 | let src = it.source(db); | 102 | let src = it.source(db); |
103 | match src.value { | 103 | match src.value { |
104 | hir::FieldSource::Named(it) => hover_text(it.doc_comment_text(), it.short_label()), | 104 | hir::FieldSource::Named(it) => hover_text(it.doc_comment_text(), it.short_label()), |
105 | _ => None, | 105 | _ => None, |
106 | } | 106 | } |
107 | } | 107 | } |
108 | AssocItem(it) => match it { | 108 | ModuleDef(it) => match it { |
109 | hir::AssocItem::Function(it) => from_def_source(db, it), | ||
110 | hir::AssocItem::Const(it) => from_def_source(db, it), | ||
111 | hir::AssocItem::TypeAlias(it) => from_def_source(db, it), | ||
112 | }, | ||
113 | Def(it) => match it { | ||
114 | hir::ModuleDef::Module(it) => match it.definition_source(db).value { | 109 | hir::ModuleDef::Module(it) => match it.definition_source(db).value { |
115 | hir::ModuleSource::Module(it) => { | 110 | hir::ModuleSource::Module(it) => { |
116 | hover_text(it.doc_comment_text(), it.short_label()) | 111 | hover_text(it.doc_comment_text(), it.short_label()) |
diff --git a/crates/ra_ide/src/impls.rs b/crates/ra_ide/src/impls.rs index 9834025d3..64a2dadc8 100644 --- a/crates/ra_ide/src/impls.rs +++ b/crates/ra_ide/src/impls.rs | |||
@@ -2,9 +2,10 @@ | |||
2 | 2 | ||
3 | use hir::{Crate, ImplBlock, SourceBinder}; | 3 | use hir::{Crate, ImplBlock, SourceBinder}; |
4 | use ra_db::SourceDatabase; | 4 | use ra_db::SourceDatabase; |
5 | use ra_ide_db::RootDatabase; | ||
5 | use ra_syntax::{algo::find_node_at_offset, ast, AstNode}; | 6 | use ra_syntax::{algo::find_node_at_offset, ast, AstNode}; |
6 | 7 | ||
7 | use crate::{db::RootDatabase, display::ToNav, FilePosition, NavigationTarget, RangeInfo}; | 8 | use crate::{display::ToNav, FilePosition, NavigationTarget, RangeInfo}; |
8 | 9 | ||
9 | pub(crate) fn goto_implementation( | 10 | pub(crate) fn goto_implementation( |
10 | db: &RootDatabase, | 11 | db: &RootDatabase, |
diff --git a/crates/ra_ide/src/inlay_hints.rs b/crates/ra_ide/src/inlay_hints.rs index 393ca9447..6b0d3d996 100644 --- a/crates/ra_ide/src/inlay_hints.rs +++ b/crates/ra_ide/src/inlay_hints.rs | |||
@@ -2,13 +2,14 @@ | |||
2 | 2 | ||
3 | use hir::{HirDisplay, SourceAnalyzer, SourceBinder}; | 3 | use hir::{HirDisplay, SourceAnalyzer, SourceBinder}; |
4 | use once_cell::unsync::Lazy; | 4 | use once_cell::unsync::Lazy; |
5 | use ra_ide_db::RootDatabase; | ||
5 | use ra_prof::profile; | 6 | use ra_prof::profile; |
6 | use ra_syntax::{ | 7 | use ra_syntax::{ |
7 | ast::{self, ArgListOwner, AstNode, TypeAscriptionOwner}, | 8 | ast::{self, ArgListOwner, AstNode, TypeAscriptionOwner}, |
8 | match_ast, SmolStr, SourceFile, SyntaxKind, SyntaxNode, TextRange, | 9 | match_ast, SmolStr, SourceFile, SyntaxKind, SyntaxNode, TextRange, |
9 | }; | 10 | }; |
10 | 11 | ||
11 | use crate::{db::RootDatabase, FileId, FunctionSignature}; | 12 | use crate::{FileId, FunctionSignature}; |
12 | 13 | ||
13 | #[derive(Debug, PartialEq, Eq)] | 14 | #[derive(Debug, PartialEq, Eq)] |
14 | pub enum InlayKind { | 15 | pub enum InlayKind { |
@@ -376,7 +377,7 @@ fn main() { | |||
376 | let mut start = 0; | 377 | let mut start = 0; |
377 | (0..2).for_each(|increment| { | 378 | (0..2).for_each(|increment| { |
378 | start += increment; | 379 | start += increment; |
379 | }) | 380 | }); |
380 | 381 | ||
381 | let multiply = |a, b, c, d| a * b * c * d; | 382 | let multiply = |a, b, c, d| a * b * c * d; |
382 | let _: i32 = multiply(1, 2, 3, 4); | 383 | let _: i32 = multiply(1, 2, 3, 4); |
@@ -399,37 +400,37 @@ fn main() { | |||
399 | label: "i32", | 400 | label: "i32", |
400 | }, | 401 | }, |
401 | InlayHint { | 402 | InlayHint { |
402 | range: [114; 122), | 403 | range: [115; 123), |
403 | kind: TypeHint, | 404 | kind: TypeHint, |
404 | label: "|…| -> i32", | 405 | label: "|…| -> i32", |
405 | }, | 406 | }, |
406 | InlayHint { | 407 | InlayHint { |
407 | range: [126; 127), | 408 | range: [127; 128), |
408 | kind: TypeHint, | 409 | kind: TypeHint, |
409 | label: "i32", | 410 | label: "i32", |
410 | }, | 411 | }, |
411 | InlayHint { | 412 | InlayHint { |
412 | range: [129; 130), | 413 | range: [130; 131), |
413 | kind: TypeHint, | 414 | kind: TypeHint, |
414 | label: "i32", | 415 | label: "i32", |
415 | }, | 416 | }, |
416 | InlayHint { | 417 | InlayHint { |
417 | range: [132; 133), | 418 | range: [133; 134), |
418 | kind: TypeHint, | 419 | kind: TypeHint, |
419 | label: "i32", | 420 | label: "i32", |
420 | }, | 421 | }, |
421 | InlayHint { | 422 | InlayHint { |
422 | range: [135; 136), | 423 | range: [136; 137), |
423 | kind: TypeHint, | 424 | kind: TypeHint, |
424 | label: "i32", | 425 | label: "i32", |
425 | }, | 426 | }, |
426 | InlayHint { | 427 | InlayHint { |
427 | range: [200; 212), | 428 | range: [201; 213), |
428 | kind: TypeHint, | 429 | kind: TypeHint, |
429 | label: "&|…| -> i32", | 430 | label: "&|…| -> i32", |
430 | }, | 431 | }, |
431 | InlayHint { | 432 | InlayHint { |
432 | range: [235; 244), | 433 | range: [236; 245), |
433 | kind: TypeHint, | 434 | kind: TypeHint, |
434 | label: "|| -> i32", | 435 | label: "|| -> i32", |
435 | }, | 436 | }, |
diff --git a/crates/ra_ide/src/lib.rs b/crates/ra_ide/src/lib.rs index 4d8deb21c..689921f3f 100644 --- a/crates/ra_ide/src/lib.rs +++ b/crates/ra_ide/src/lib.rs | |||
@@ -10,12 +10,8 @@ | |||
10 | // For proving that RootDatabase is RefUnwindSafe. | 10 | // For proving that RootDatabase is RefUnwindSafe. |
11 | #![recursion_limit = "128"] | 11 | #![recursion_limit = "128"] |
12 | 12 | ||
13 | mod db; | ||
14 | pub mod mock_analysis; | 13 | pub mod mock_analysis; |
15 | mod symbol_index; | ||
16 | mod change; | ||
17 | mod source_change; | 14 | mod source_change; |
18 | mod feature_flags; | ||
19 | 15 | ||
20 | mod status; | 16 | mod status; |
21 | mod completion; | 17 | mod completion; |
@@ -34,14 +30,11 @@ mod assists; | |||
34 | mod diagnostics; | 30 | mod diagnostics; |
35 | mod syntax_tree; | 31 | mod syntax_tree; |
36 | mod folding_ranges; | 32 | mod folding_ranges; |
37 | mod line_index; | ||
38 | mod line_index_utils; | ||
39 | mod join_lines; | 33 | mod join_lines; |
40 | mod typing; | 34 | mod typing; |
41 | mod matching_brace; | 35 | mod matching_brace; |
42 | mod display; | 36 | mod display; |
43 | mod inlay_hints; | 37 | mod inlay_hints; |
44 | mod wasm_shims; | ||
45 | mod expand; | 38 | mod expand; |
46 | mod expand_macro; | 39 | mod expand_macro; |
47 | 40 | ||
@@ -57,24 +50,24 @@ use ra_db::{ | |||
57 | salsa::{self, ParallelDatabase}, | 50 | salsa::{self, ParallelDatabase}, |
58 | CheckCanceled, Env, FileLoader, SourceDatabase, | 51 | CheckCanceled, Env, FileLoader, SourceDatabase, |
59 | }; | 52 | }; |
53 | use ra_ide_db::{ | ||
54 | symbol_index::{self, FileSymbol}, | ||
55 | LineIndexDatabase, | ||
56 | }; | ||
60 | use ra_syntax::{SourceFile, TextRange, TextUnit}; | 57 | use ra_syntax::{SourceFile, TextRange, TextUnit}; |
61 | 58 | ||
62 | use crate::{db::LineIndexDatabase, display::ToNav, symbol_index::FileSymbol}; | 59 | use crate::display::ToNav; |
63 | 60 | ||
64 | pub use crate::{ | 61 | pub use crate::{ |
65 | assists::{Assist, AssistId}, | 62 | assists::{Assist, AssistId}, |
66 | call_hierarchy::CallItem, | 63 | call_hierarchy::CallItem, |
67 | change::{AnalysisChange, LibraryData}, | ||
68 | completion::{CompletionItem, CompletionItemKind, InsertTextFormat}, | 64 | completion::{CompletionItem, CompletionItemKind, InsertTextFormat}, |
69 | diagnostics::Severity, | 65 | diagnostics::Severity, |
70 | display::{file_structure, FunctionSignature, NavigationTarget, StructureNode}, | 66 | display::{file_structure, FunctionSignature, NavigationTarget, StructureNode}, |
71 | expand_macro::ExpandedMacro, | 67 | expand_macro::ExpandedMacro, |
72 | feature_flags::FeatureFlags, | ||
73 | folding_ranges::{Fold, FoldKind}, | 68 | folding_ranges::{Fold, FoldKind}, |
74 | hover::HoverResult, | 69 | hover::HoverResult, |
75 | inlay_hints::{InlayHint, InlayKind}, | 70 | inlay_hints::{InlayHint, InlayKind}, |
76 | line_index::{LineCol, LineIndex}, | ||
77 | line_index_utils::translate_offset_with_edit, | ||
78 | references::{ | 71 | references::{ |
79 | Declaration, Reference, ReferenceAccess, ReferenceKind, ReferenceSearchResult, SearchScope, | 72 | Declaration, Reference, ReferenceAccess, ReferenceKind, ReferenceSearchResult, SearchScope, |
80 | }, | 73 | }, |
@@ -87,6 +80,14 @@ pub use hir::Documentation; | |||
87 | pub use ra_db::{ | 80 | pub use ra_db::{ |
88 | Canceled, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, SourceRootId, | 81 | Canceled, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, SourceRootId, |
89 | }; | 82 | }; |
83 | pub use ra_ide_db::{ | ||
84 | change::{AnalysisChange, LibraryData}, | ||
85 | feature_flags::FeatureFlags, | ||
86 | line_index::{LineCol, LineIndex}, | ||
87 | line_index_utils::translate_offset_with_edit, | ||
88 | symbol_index::Query, | ||
89 | RootDatabase, | ||
90 | }; | ||
90 | 91 | ||
91 | pub type Cancelable<T> = Result<T, Canceled>; | 92 | pub type Cancelable<T> = Result<T, Canceled>; |
92 | 93 | ||
@@ -98,46 +99,6 @@ pub struct Diagnostic { | |||
98 | pub severity: Severity, | 99 | pub severity: Severity, |
99 | } | 100 | } |
100 | 101 | ||
101 | #[derive(Debug)] | ||
102 | pub struct Query { | ||
103 | query: String, | ||
104 | lowercased: String, | ||
105 | only_types: bool, | ||
106 | libs: bool, | ||
107 | exact: bool, | ||
108 | limit: usize, | ||
109 | } | ||
110 | |||
111 | impl Query { | ||
112 | pub fn new(query: String) -> Query { | ||
113 | let lowercased = query.to_lowercase(); | ||
114 | Query { | ||
115 | query, | ||
116 | lowercased, | ||
117 | only_types: false, | ||
118 | libs: false, | ||
119 | exact: false, | ||
120 | limit: usize::max_value(), | ||
121 | } | ||
122 | } | ||
123 | |||
124 | pub fn only_types(&mut self) { | ||
125 | self.only_types = true; | ||
126 | } | ||
127 | |||
128 | pub fn libs(&mut self) { | ||
129 | self.libs = true; | ||
130 | } | ||
131 | |||
132 | pub fn exact(&mut self) { | ||
133 | self.exact = true; | ||
134 | } | ||
135 | |||
136 | pub fn limit(&mut self, limit: usize) { | ||
137 | self.limit = limit | ||
138 | } | ||
139 | } | ||
140 | |||
141 | /// Info associated with a text range. | 102 | /// Info associated with a text range. |
142 | #[derive(Debug)] | 103 | #[derive(Debug)] |
143 | pub struct RangeInfo<T> { | 104 | pub struct RangeInfo<T> { |
@@ -162,7 +123,7 @@ pub struct CallInfo { | |||
162 | /// `AnalysisHost` stores the current state of the world. | 123 | /// `AnalysisHost` stores the current state of the world. |
163 | #[derive(Debug)] | 124 | #[derive(Debug)] |
164 | pub struct AnalysisHost { | 125 | pub struct AnalysisHost { |
165 | db: db::RootDatabase, | 126 | db: RootDatabase, |
166 | } | 127 | } |
167 | 128 | ||
168 | impl Default for AnalysisHost { | 129 | impl Default for AnalysisHost { |
@@ -173,7 +134,7 @@ impl Default for AnalysisHost { | |||
173 | 134 | ||
174 | impl AnalysisHost { | 135 | impl AnalysisHost { |
175 | pub fn new(lru_capcity: Option<usize>, feature_flags: FeatureFlags) -> AnalysisHost { | 136 | pub fn new(lru_capcity: Option<usize>, feature_flags: FeatureFlags) -> AnalysisHost { |
176 | AnalysisHost { db: db::RootDatabase::new(lru_capcity, feature_flags) } | 137 | AnalysisHost { db: RootDatabase::new(lru_capcity, feature_flags) } |
177 | } | 138 | } |
178 | /// Returns a snapshot of the current state, which you can query for | 139 | /// Returns a snapshot of the current state, which you can query for |
179 | /// semantic information. | 140 | /// semantic information. |
@@ -202,6 +163,9 @@ impl AnalysisHost { | |||
202 | pub fn per_query_memory_usage(&mut self) -> Vec<(String, ra_prof::Bytes)> { | 163 | pub fn per_query_memory_usage(&mut self) -> Vec<(String, ra_prof::Bytes)> { |
203 | self.db.per_query_memory_usage() | 164 | self.db.per_query_memory_usage() |
204 | } | 165 | } |
166 | pub fn request_cancellation(&mut self) { | ||
167 | self.db.request_cancellation(); | ||
168 | } | ||
205 | pub fn raw_database( | 169 | pub fn raw_database( |
206 | &self, | 170 | &self, |
207 | ) -> &(impl hir::db::HirDatabase + salsa::Database + ra_db::SourceDatabaseExt) { | 171 | ) -> &(impl hir::db::HirDatabase + salsa::Database + ra_db::SourceDatabaseExt) { |
@@ -220,7 +184,7 @@ impl AnalysisHost { | |||
220 | /// `Analysis` are canceled (most method return `Err(Canceled)`). | 184 | /// `Analysis` are canceled (most method return `Err(Canceled)`). |
221 | #[derive(Debug)] | 185 | #[derive(Debug)] |
222 | pub struct Analysis { | 186 | pub struct Analysis { |
223 | db: salsa::Snapshot<db::RootDatabase>, | 187 | db: salsa::Snapshot<RootDatabase>, |
224 | } | 188 | } |
225 | 189 | ||
226 | // As a general design guideline, `Analysis` API are intended to be independent | 190 | // As a general design guideline, `Analysis` API are intended to be independent |
@@ -501,7 +465,7 @@ impl Analysis { | |||
501 | } | 465 | } |
502 | 466 | ||
503 | /// Performs an operation on that may be Canceled. | 467 | /// Performs an operation on that may be Canceled. |
504 | fn with_db<F: FnOnce(&db::RootDatabase) -> T + std::panic::UnwindSafe, T>( | 468 | fn with_db<F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe, T>( |
505 | &self, | 469 | &self, |
506 | f: F, | 470 | f: F, |
507 | ) -> Cancelable<T> { | 471 | ) -> Cancelable<T> { |
@@ -514,3 +478,77 @@ fn analysis_is_send() { | |||
514 | fn is_send<T: Send>() {} | 478 | fn is_send<T: Send>() {} |
515 | is_send::<Analysis>(); | 479 | is_send::<Analysis>(); |
516 | } | 480 | } |
481 | |||
482 | #[cfg(test)] | ||
483 | mod tests { | ||
484 | use crate::{display::NavigationTarget, mock_analysis::single_file, Query}; | ||
485 | use ra_syntax::{ | ||
486 | SmolStr, | ||
487 | SyntaxKind::{FN_DEF, STRUCT_DEF}, | ||
488 | }; | ||
489 | |||
490 | #[test] | ||
491 | fn test_world_symbols_with_no_container() { | ||
492 | let code = r#" | ||
493 | enum FooInner { } | ||
494 | "#; | ||
495 | |||
496 | let mut symbols = get_symbols_matching(code, "FooInner"); | ||
497 | |||
498 | let s = symbols.pop().unwrap(); | ||
499 | |||
500 | assert_eq!(s.name(), "FooInner"); | ||
501 | assert!(s.container_name().is_none()); | ||
502 | } | ||
503 | |||
504 | #[test] | ||
505 | fn test_world_symbols_include_container_name() { | ||
506 | let code = r#" | ||
507 | fn foo() { | ||
508 | enum FooInner { } | ||
509 | } | ||
510 | "#; | ||
511 | |||
512 | let mut symbols = get_symbols_matching(code, "FooInner"); | ||
513 | |||
514 | let s = symbols.pop().unwrap(); | ||
515 | |||
516 | assert_eq!(s.name(), "FooInner"); | ||
517 | assert_eq!(s.container_name(), Some(&SmolStr::new("foo"))); | ||
518 | |||
519 | let code = r#" | ||
520 | mod foo { | ||
521 | struct FooInner; | ||
522 | } | ||
523 | "#; | ||
524 | |||
525 | let mut symbols = get_symbols_matching(code, "FooInner"); | ||
526 | |||
527 | let s = symbols.pop().unwrap(); | ||
528 | |||
529 | assert_eq!(s.name(), "FooInner"); | ||
530 | assert_eq!(s.container_name(), Some(&SmolStr::new("foo"))); | ||
531 | } | ||
532 | |||
533 | #[test] | ||
534 | fn test_world_symbols_are_case_sensitive() { | ||
535 | let code = r#" | ||
536 | fn foo() {} | ||
537 | |||
538 | struct Foo; | ||
539 | "#; | ||
540 | |||
541 | let symbols = get_symbols_matching(code, "Foo"); | ||
542 | |||
543 | let fn_match = symbols.iter().find(|s| s.name() == "foo").map(|s| s.kind()); | ||
544 | let struct_match = symbols.iter().find(|s| s.name() == "Foo").map(|s| s.kind()); | ||
545 | |||
546 | assert_eq!(fn_match, Some(FN_DEF)); | ||
547 | assert_eq!(struct_match, Some(STRUCT_DEF)); | ||
548 | } | ||
549 | |||
550 | fn get_symbols_matching(text: &str, query: &str) -> Vec<NavigationTarget> { | ||
551 | let (analysis, _) = single_file(text); | ||
552 | analysis.symbol_search(Query::new(query.into())).unwrap() | ||
553 | } | ||
554 | } | ||
diff --git a/crates/ra_ide/src/marks.rs b/crates/ra_ide/src/marks.rs index 077a44473..5bf4d2062 100644 --- a/crates/ra_ide/src/marks.rs +++ b/crates/ra_ide/src/marks.rs | |||
@@ -11,4 +11,5 @@ test_utils::marks!( | |||
11 | call_info_bad_offset | 11 | call_info_bad_offset |
12 | dont_complete_current_use | 12 | dont_complete_current_use |
13 | dont_complete_primitive_in_use | 13 | dont_complete_primitive_in_use |
14 | test_resolve_parent_module_on_module_decl | ||
14 | ); | 15 | ); |
diff --git a/crates/ra_ide/src/mock_analysis.rs b/crates/ra_ide/src/mock_analysis.rs index bf8a54932..081aaee8c 100644 --- a/crates/ra_ide/src/mock_analysis.rs +++ b/crates/ra_ide/src/mock_analysis.rs | |||
@@ -3,7 +3,7 @@ | |||
3 | use std::sync::Arc; | 3 | use std::sync::Arc; |
4 | 4 | ||
5 | use ra_cfg::CfgOptions; | 5 | use ra_cfg::CfgOptions; |
6 | use ra_db::{Env, RelativePathBuf}; | 6 | use ra_db::{CrateName, Env, RelativePathBuf}; |
7 | use test_utils::{extract_offset, extract_range, parse_fixture, CURSOR_MARKER}; | 7 | use test_utils::{extract_offset, extract_range, parse_fixture, CURSOR_MARKER}; |
8 | 8 | ||
9 | use crate::{ | 9 | use crate::{ |
@@ -107,7 +107,9 @@ impl MockAnalysis { | |||
107 | crate_graph.add_crate_root(file_id, Edition2018, cfg_options, Env::default()); | 107 | crate_graph.add_crate_root(file_id, Edition2018, cfg_options, Env::default()); |
108 | let crate_name = path.parent().unwrap().file_name().unwrap(); | 108 | let crate_name = path.parent().unwrap().file_name().unwrap(); |
109 | if let Some(root_crate) = root_crate { | 109 | if let Some(root_crate) = root_crate { |
110 | crate_graph.add_dep(root_crate, crate_name.into(), other_crate).unwrap(); | 110 | crate_graph |
111 | .add_dep(root_crate, CrateName::new(crate_name).unwrap(), other_crate) | ||
112 | .unwrap(); | ||
111 | } | 113 | } |
112 | } | 114 | } |
113 | change.add_file(source_root, file_id, path, Arc::new(contents)); | 115 | change.add_file(source_root, file_id, path, Arc::new(contents)); |
diff --git a/crates/ra_ide/src/parent_module.rs b/crates/ra_ide/src/parent_module.rs index 2dbccfc3b..af14d6ab3 100644 --- a/crates/ra_ide/src/parent_module.rs +++ b/crates/ra_ide/src/parent_module.rs | |||
@@ -1,19 +1,35 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use ra_db::{CrateId, FileId, FilePosition, SourceDatabase}; | 3 | use ra_db::{CrateId, FileId, FilePosition, SourceDatabase}; |
4 | use ra_ide_db::RootDatabase; | ||
4 | use ra_syntax::{ | 5 | use ra_syntax::{ |
5 | algo::find_node_at_offset, | 6 | algo::find_node_at_offset, |
6 | ast::{self, AstNode}, | 7 | ast::{self, AstNode}, |
7 | }; | 8 | }; |
9 | use test_utils::tested_by; | ||
8 | 10 | ||
9 | use crate::{db::RootDatabase, NavigationTarget}; | 11 | use crate::NavigationTarget; |
10 | 12 | ||
11 | /// This returns `Vec` because a module may be included from several places. We | 13 | /// This returns `Vec` because a module may be included from several places. We |
12 | /// don't handle this case yet though, so the Vec has length at most one. | 14 | /// don't handle this case yet though, so the Vec has length at most one. |
13 | pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<NavigationTarget> { | 15 | pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<NavigationTarget> { |
14 | let mut sb = hir::SourceBinder::new(db); | 16 | let mut sb = hir::SourceBinder::new(db); |
15 | let parse = db.parse(position.file_id); | 17 | let parse = db.parse(position.file_id); |
16 | let module = match find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset) { | 18 | |
19 | let mut module = find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset); | ||
20 | |||
21 | // If cursor is literally on `mod foo`, go to the grandpa. | ||
22 | if let Some(m) = &module { | ||
23 | if !m | ||
24 | .item_list() | ||
25 | .map_or(false, |it| it.syntax().text_range().contains_inclusive(position.offset)) | ||
26 | { | ||
27 | tested_by!(test_resolve_parent_module_on_module_decl); | ||
28 | module = m.syntax().ancestors().skip(1).find_map(ast::Module::cast); | ||
29 | } | ||
30 | } | ||
31 | |||
32 | let module = match module { | ||
17 | Some(module) => sb.to_def(hir::InFile::new(position.file_id.into(), module)), | 33 | Some(module) => sb.to_def(hir::InFile::new(position.file_id.into(), module)), |
18 | None => sb.to_module_def(position.file_id), | 34 | None => sb.to_module_def(position.file_id), |
19 | }; | 35 | }; |
@@ -40,6 +56,7 @@ pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> { | |||
40 | mod tests { | 56 | mod tests { |
41 | use ra_cfg::CfgOptions; | 57 | use ra_cfg::CfgOptions; |
42 | use ra_db::Env; | 58 | use ra_db::Env; |
59 | use test_utils::covers; | ||
43 | 60 | ||
44 | use crate::{ | 61 | use crate::{ |
45 | mock_analysis::{analysis_and_position, MockAnalysis}, | 62 | mock_analysis::{analysis_and_position, MockAnalysis}, |
@@ -62,6 +79,25 @@ mod tests { | |||
62 | } | 79 | } |
63 | 80 | ||
64 | #[test] | 81 | #[test] |
82 | fn test_resolve_parent_module_on_module_decl() { | ||
83 | covers!(test_resolve_parent_module_on_module_decl); | ||
84 | let (analysis, pos) = analysis_and_position( | ||
85 | " | ||
86 | //- /lib.rs | ||
87 | mod foo; | ||
88 | |||
89 | //- /foo.rs | ||
90 | mod <|>bar; | ||
91 | |||
92 | //- /foo/bar.rs | ||
93 | // empty | ||
94 | ", | ||
95 | ); | ||
96 | let nav = analysis.parent_module(pos).unwrap().pop().unwrap(); | ||
97 | nav.assert_match("foo MODULE FileId(1) [0; 8)"); | ||
98 | } | ||
99 | |||
100 | #[test] | ||
65 | fn test_resolve_parent_module_for_inline() { | 101 | fn test_resolve_parent_module_for_inline() { |
66 | let (analysis, pos) = analysis_and_position( | 102 | let (analysis, pos) = analysis_and_position( |
67 | " | 103 | " |
diff --git a/crates/ra_ide/src/references.rs b/crates/ra_ide/src/references.rs index 5e2fe1905..a6320bd2f 100644 --- a/crates/ra_ide/src/references.rs +++ b/crates/ra_ide/src/references.rs | |||
@@ -10,13 +10,13 @@ | |||
10 | //! resolved to the search element definition, we get a reference. | 10 | //! resolved to the search element definition, we get a reference. |
11 | 11 | ||
12 | mod classify; | 12 | mod classify; |
13 | mod name_definition; | ||
14 | mod rename; | 13 | mod rename; |
15 | mod search_scope; | 14 | mod search_scope; |
16 | 15 | ||
17 | use hir::{InFile, SourceBinder}; | 16 | use hir::{InFile, SourceBinder}; |
18 | use once_cell::unsync::Lazy; | 17 | use once_cell::unsync::Lazy; |
19 | use ra_db::{SourceDatabase, SourceDatabaseExt}; | 18 | use ra_db::{SourceDatabase, SourceDatabaseExt}; |
19 | use ra_ide_db::RootDatabase; | ||
20 | use ra_prof::profile; | 20 | use ra_prof::profile; |
21 | use ra_syntax::{ | 21 | use ra_syntax::{ |
22 | algo::find_node_at_offset, | 22 | algo::find_node_at_offset, |
@@ -24,15 +24,13 @@ use ra_syntax::{ | |||
24 | match_ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, TextRange, TextUnit, TokenAtOffset, | 24 | match_ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, TextRange, TextUnit, TokenAtOffset, |
25 | }; | 25 | }; |
26 | 26 | ||
27 | use crate::{ | 27 | use crate::{display::ToNav, FilePosition, FileRange, NavigationTarget, RangeInfo}; |
28 | db::RootDatabase, display::ToNav, FilePosition, FileRange, NavigationTarget, RangeInfo, | ||
29 | }; | ||
30 | 28 | ||
31 | pub(crate) use self::{ | 29 | pub(crate) use self::{ |
32 | classify::{classify_name, classify_name_ref}, | 30 | classify::{classify_name, classify_name_ref}, |
33 | name_definition::{NameDefinition, NameKind}, | ||
34 | rename::rename, | 31 | rename::rename, |
35 | }; | 32 | }; |
33 | pub(crate) use ra_ide_db::defs::{NameDefinition, NameKind}; | ||
36 | 34 | ||
37 | pub use self::search_scope::SearchScope; | 35 | pub use self::search_scope::SearchScope; |
38 | 36 | ||
@@ -112,38 +110,32 @@ impl IntoIterator for ReferenceSearchResult { | |||
112 | 110 | ||
113 | pub(crate) fn find_all_refs( | 111 | pub(crate) fn find_all_refs( |
114 | db: &RootDatabase, | 112 | db: &RootDatabase, |
115 | mut position: FilePosition, | 113 | position: FilePosition, |
116 | search_scope: Option<SearchScope>, | 114 | search_scope: Option<SearchScope>, |
117 | ) -> Option<RangeInfo<ReferenceSearchResult>> { | 115 | ) -> Option<RangeInfo<ReferenceSearchResult>> { |
118 | let parse = db.parse(position.file_id); | 116 | let parse = db.parse(position.file_id); |
119 | let syntax = parse.tree().syntax().clone(); | 117 | let syntax = parse.tree().syntax().clone(); |
120 | 118 | ||
121 | let token = syntax.token_at_offset(position.offset); | 119 | let (opt_name, search_kind) = |
122 | let mut search_kind = ReferenceKind::Other; | 120 | if let Some(name) = get_struct_def_name_for_struc_litetal_search(&syntax, position) { |
121 | (Some(name), ReferenceKind::StructLiteral) | ||
122 | } else { | ||
123 | (find_node_at_offset::<ast::Name>(&syntax, position.offset), ReferenceKind::Other) | ||
124 | }; | ||
123 | 125 | ||
124 | if let TokenAtOffset::Between(ref left, ref right) = token { | 126 | let RangeInfo { range, info: (name, def) } = find_name(db, &syntax, position, opt_name)?; |
125 | if (right.kind() == SyntaxKind::L_CURLY || right.kind() == SyntaxKind::L_PAREN) | ||
126 | && left.kind() != SyntaxKind::IDENT | ||
127 | { | ||
128 | position = FilePosition { offset: left.text_range().start(), ..position }; | ||
129 | search_kind = ReferenceKind::StructLiteral; | ||
130 | } | ||
131 | } | ||
132 | |||
133 | let RangeInfo { range, info: (name, def) } = find_name(db, &syntax, position)?; | ||
134 | 127 | ||
135 | let declaration = match def.kind { | 128 | let declaration = match def.kind { |
136 | NameKind::Macro(mac) => mac.to_nav(db), | 129 | NameKind::Macro(mac) => mac.to_nav(db), |
137 | NameKind::Field(field) => field.to_nav(db), | 130 | NameKind::StructField(field) => field.to_nav(db), |
138 | NameKind::AssocItem(assoc) => assoc.to_nav(db), | 131 | NameKind::ModuleDef(def) => NavigationTarget::from_def(db, def)?, |
139 | NameKind::Def(def) => NavigationTarget::from_def(db, def)?, | ||
140 | NameKind::SelfType(imp) => imp.to_nav(db), | 132 | NameKind::SelfType(imp) => imp.to_nav(db), |
141 | NameKind::Local(local) => local.to_nav(db), | 133 | NameKind::Local(local) => local.to_nav(db), |
142 | NameKind::TypeParam(_) => return None, | 134 | NameKind::TypeParam(_) => return None, |
143 | }; | 135 | }; |
144 | 136 | ||
145 | let search_scope = { | 137 | let search_scope = { |
146 | let base = def.search_scope(db); | 138 | let base = SearchScope::for_def(&def, db); |
147 | match search_scope { | 139 | match search_scope { |
148 | None => base, | 140 | None => base, |
149 | Some(scope) => base.intersection(&scope), | 141 | Some(scope) => base.intersection(&scope), |
@@ -170,9 +162,10 @@ fn find_name( | |||
170 | db: &RootDatabase, | 162 | db: &RootDatabase, |
171 | syntax: &SyntaxNode, | 163 | syntax: &SyntaxNode, |
172 | position: FilePosition, | 164 | position: FilePosition, |
165 | opt_name: Option<ast::Name>, | ||
173 | ) -> Option<RangeInfo<(String, NameDefinition)>> { | 166 | ) -> Option<RangeInfo<(String, NameDefinition)>> { |
174 | let mut sb = SourceBinder::new(db); | 167 | let mut sb = SourceBinder::new(db); |
175 | if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, position.offset) { | 168 | if let Some(name) = opt_name { |
176 | let def = classify_name(&mut sb, InFile::new(position.file_id.into(), &name))?; | 169 | let def = classify_name(&mut sb, InFile::new(position.file_id.into(), &name))?; |
177 | let range = name.syntax().text_range(); | 170 | let range = name.syntax().text_range(); |
178 | return Some(RangeInfo::new(range, (name.text().to_string(), def))); | 171 | return Some(RangeInfo::new(range, (name.text().to_string(), def))); |
@@ -218,15 +211,8 @@ fn process_definition( | |||
218 | if let Some(d) = classify_name_ref(&mut sb, InFile::new(file_id.into(), &name_ref)) | 211 | if let Some(d) = classify_name_ref(&mut sb, InFile::new(file_id.into(), &name_ref)) |
219 | { | 212 | { |
220 | if d == def { | 213 | if d == def { |
221 | let kind = if name_ref | 214 | let kind = if is_record_lit_name_ref(&name_ref) |
222 | .syntax() | 215 | || is_call_expr_name_ref(&name_ref) |
223 | .ancestors() | ||
224 | .find_map(ast::RecordLit::cast) | ||
225 | .and_then(|l| l.path()) | ||
226 | .and_then(|p| p.segment()) | ||
227 | .and_then(|p| p.name_ref()) | ||
228 | .map(|n| n == name_ref) | ||
229 | .unwrap_or(false) | ||
230 | { | 216 | { |
231 | ReferenceKind::StructLiteral | 217 | ReferenceKind::StructLiteral |
232 | } else { | 218 | } else { |
@@ -253,7 +239,7 @@ fn decl_access( | |||
253 | range: TextRange, | 239 | range: TextRange, |
254 | ) -> Option<ReferenceAccess> { | 240 | ) -> Option<ReferenceAccess> { |
255 | match kind { | 241 | match kind { |
256 | NameKind::Local(_) | NameKind::Field(_) => {} | 242 | NameKind::Local(_) | NameKind::StructField(_) => {} |
257 | _ => return None, | 243 | _ => return None, |
258 | }; | 244 | }; |
259 | 245 | ||
@@ -273,7 +259,7 @@ fn decl_access( | |||
273 | fn reference_access(kind: &NameKind, name_ref: &ast::NameRef) -> Option<ReferenceAccess> { | 259 | fn reference_access(kind: &NameKind, name_ref: &ast::NameRef) -> Option<ReferenceAccess> { |
274 | // Only Locals and Fields have accesses for now. | 260 | // Only Locals and Fields have accesses for now. |
275 | match kind { | 261 | match kind { |
276 | NameKind::Local(_) | NameKind::Field(_) => {} | 262 | NameKind::Local(_) | NameKind::StructField(_) => {} |
277 | _ => return None, | 263 | _ => return None, |
278 | }; | 264 | }; |
279 | 265 | ||
@@ -301,6 +287,49 @@ fn reference_access(kind: &NameKind, name_ref: &ast::NameRef) -> Option<Referenc | |||
301 | mode.or(Some(ReferenceAccess::Read)) | 287 | mode.or(Some(ReferenceAccess::Read)) |
302 | } | 288 | } |
303 | 289 | ||
290 | fn is_record_lit_name_ref(name_ref: &ast::NameRef) -> bool { | ||
291 | name_ref | ||
292 | .syntax() | ||
293 | .ancestors() | ||
294 | .find_map(ast::RecordLit::cast) | ||
295 | .and_then(|l| l.path()) | ||
296 | .and_then(|p| p.segment()) | ||
297 | .map(|p| p.name_ref().as_ref() == Some(name_ref)) | ||
298 | .unwrap_or(false) | ||
299 | } | ||
300 | |||
301 | fn get_struct_def_name_for_struc_litetal_search( | ||
302 | syntax: &SyntaxNode, | ||
303 | position: FilePosition, | ||
304 | ) -> Option<ast::Name> { | ||
305 | if let TokenAtOffset::Between(ref left, ref right) = syntax.token_at_offset(position.offset) { | ||
306 | if right.kind() != SyntaxKind::L_CURLY && right.kind() != SyntaxKind::L_PAREN { | ||
307 | return None; | ||
308 | } | ||
309 | if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, left.text_range().start()) { | ||
310 | return name.syntax().ancestors().find_map(ast::StructDef::cast).and_then(|l| l.name()); | ||
311 | } | ||
312 | if find_node_at_offset::<ast::TypeParamList>(&syntax, left.text_range().start()).is_some() { | ||
313 | return left.ancestors().find_map(ast::StructDef::cast).and_then(|l| l.name()); | ||
314 | } | ||
315 | } | ||
316 | None | ||
317 | } | ||
318 | |||
319 | fn is_call_expr_name_ref(name_ref: &ast::NameRef) -> bool { | ||
320 | name_ref | ||
321 | .syntax() | ||
322 | .ancestors() | ||
323 | .find_map(ast::CallExpr::cast) | ||
324 | .and_then(|c| match c.expr()? { | ||
325 | ast::Expr::PathExpr(p) => { | ||
326 | Some(p.path()?.segment()?.name_ref().as_ref() == Some(name_ref)) | ||
327 | } | ||
328 | _ => None, | ||
329 | }) | ||
330 | .unwrap_or(false) | ||
331 | } | ||
332 | |||
304 | #[cfg(test)] | 333 | #[cfg(test)] |
305 | mod tests { | 334 | mod tests { |
306 | use crate::{ | 335 | use crate::{ |
@@ -309,7 +338,7 @@ mod tests { | |||
309 | }; | 338 | }; |
310 | 339 | ||
311 | #[test] | 340 | #[test] |
312 | fn test_struct_literal() { | 341 | fn test_struct_literal_after_space() { |
313 | let code = r#" | 342 | let code = r#" |
314 | struct Foo <|>{ | 343 | struct Foo <|>{ |
315 | a: i32, | 344 | a: i32, |
@@ -331,6 +360,58 @@ mod tests { | |||
331 | } | 360 | } |
332 | 361 | ||
333 | #[test] | 362 | #[test] |
363 | fn test_struct_literal_befor_space() { | ||
364 | let code = r#" | ||
365 | struct Foo<|> {} | ||
366 | fn main() { | ||
367 | let f: Foo; | ||
368 | f = Foo {}; | ||
369 | }"#; | ||
370 | |||
371 | let refs = get_all_refs(code); | ||
372 | check_result( | ||
373 | refs, | ||
374 | "Foo STRUCT_DEF FileId(1) [5; 18) [12; 15) Other", | ||
375 | &["FileId(1) [54; 57) Other", "FileId(1) [71; 74) StructLiteral"], | ||
376 | ); | ||
377 | } | ||
378 | |||
379 | #[test] | ||
380 | fn test_struct_literal_with_generic_type() { | ||
381 | let code = r#" | ||
382 | struct Foo<T> <|>{} | ||
383 | fn main() { | ||
384 | let f: Foo::<i32>; | ||
385 | f = Foo {}; | ||
386 | }"#; | ||
387 | |||
388 | let refs = get_all_refs(code); | ||
389 | check_result( | ||
390 | refs, | ||
391 | "Foo STRUCT_DEF FileId(1) [5; 21) [12; 15) Other", | ||
392 | &["FileId(1) [81; 84) StructLiteral"], | ||
393 | ); | ||
394 | } | ||
395 | |||
396 | #[test] | ||
397 | fn test_struct_literal_for_tuple() { | ||
398 | let code = r#" | ||
399 | struct Foo<|>(i32); | ||
400 | |||
401 | fn main() { | ||
402 | let f: Foo; | ||
403 | f = Foo(1); | ||
404 | }"#; | ||
405 | |||
406 | let refs = get_all_refs(code); | ||
407 | check_result( | ||
408 | refs, | ||
409 | "Foo STRUCT_DEF FileId(1) [5; 21) [12; 15) Other", | ||
410 | &["FileId(1) [71; 74) StructLiteral"], | ||
411 | ); | ||
412 | } | ||
413 | |||
414 | #[test] | ||
334 | fn test_find_all_refs_for_local() { | 415 | fn test_find_all_refs_for_local() { |
335 | let code = r#" | 416 | let code = r#" |
336 | fn main() { | 417 | fn main() { |
@@ -564,7 +645,7 @@ mod tests { | |||
564 | check_result( | 645 | check_result( |
565 | refs, | 646 | refs, |
566 | "quux FN_DEF FileId(1) [18; 34) [25; 29) Other", | 647 | "quux FN_DEF FileId(1) [18; 34) [25; 29) Other", |
567 | &["FileId(2) [16; 20) Other", "FileId(3) [16; 20) Other"], | 648 | &["FileId(2) [16; 20) StructLiteral", "FileId(3) [16; 20) StructLiteral"], |
568 | ); | 649 | ); |
569 | 650 | ||
570 | let refs = | 651 | let refs = |
@@ -572,7 +653,7 @@ mod tests { | |||
572 | check_result( | 653 | check_result( |
573 | refs, | 654 | refs, |
574 | "quux FN_DEF FileId(1) [18; 34) [25; 29) Other", | 655 | "quux FN_DEF FileId(1) [18; 34) [25; 29) Other", |
575 | &["FileId(3) [16; 20) Other"], | 656 | &["FileId(3) [16; 20) StructLiteral"], |
576 | ); | 657 | ); |
577 | } | 658 | } |
578 | 659 | ||
@@ -591,7 +672,7 @@ mod tests { | |||
591 | check_result( | 672 | check_result( |
592 | refs, | 673 | refs, |
593 | "m1 MACRO_CALL FileId(1) [9; 63) [46; 48) Other", | 674 | "m1 MACRO_CALL FileId(1) [9; 63) [46; 48) Other", |
594 | &["FileId(1) [96; 98) Other", "FileId(1) [114; 116) Other"], | 675 | &["FileId(1) [96; 98) StructLiteral", "FileId(1) [114; 116) StructLiteral"], |
595 | ); | 676 | ); |
596 | } | 677 | } |
597 | 678 | ||
diff --git a/crates/ra_ide/src/references/classify.rs b/crates/ra_ide/src/references/classify.rs index 46cba30a3..d0f03d8a8 100644 --- a/crates/ra_ide/src/references/classify.rs +++ b/crates/ra_ide/src/references/classify.rs | |||
@@ -2,119 +2,13 @@ | |||
2 | 2 | ||
3 | use hir::{InFile, PathResolution, SourceBinder}; | 3 | use hir::{InFile, PathResolution, SourceBinder}; |
4 | use ra_prof::profile; | 4 | use ra_prof::profile; |
5 | use ra_syntax::{ast, match_ast, AstNode}; | 5 | use ra_syntax::{ast, AstNode}; |
6 | use test_utils::tested_by; | 6 | use test_utils::tested_by; |
7 | 7 | ||
8 | use super::{ | 8 | use super::{NameDefinition, NameKind}; |
9 | name_definition::{from_assoc_item, from_module_def, from_struct_field}, | 9 | use ra_ide_db::RootDatabase; |
10 | NameDefinition, NameKind, | ||
11 | }; | ||
12 | use crate::db::RootDatabase; | ||
13 | 10 | ||
14 | pub(crate) fn classify_name( | 11 | pub use ra_ide_db::defs::{classify_name, from_module_def, from_struct_field}; |
15 | sb: &mut SourceBinder<RootDatabase>, | ||
16 | name: InFile<&ast::Name>, | ||
17 | ) -> Option<NameDefinition> { | ||
18 | let _p = profile("classify_name"); | ||
19 | let parent = name.value.syntax().parent()?; | ||
20 | |||
21 | match_ast! { | ||
22 | match parent { | ||
23 | ast::BindPat(it) => { | ||
24 | let src = name.with_value(it); | ||
25 | let local = sb.to_def(src)?; | ||
26 | Some(NameDefinition { | ||
27 | visibility: None, | ||
28 | container: local.module(sb.db), | ||
29 | kind: NameKind::Local(local), | ||
30 | }) | ||
31 | }, | ||
32 | ast::RecordFieldDef(it) => { | ||
33 | let src = name.with_value(it); | ||
34 | let field: hir::StructField = sb.to_def(src)?; | ||
35 | Some(from_struct_field(sb.db, field)) | ||
36 | }, | ||
37 | ast::Module(it) => { | ||
38 | let def = sb.to_def(name.with_value(it))?; | ||
39 | Some(from_module_def(sb.db, def.into(), None)) | ||
40 | }, | ||
41 | ast::StructDef(it) => { | ||
42 | let src = name.with_value(it); | ||
43 | let def: hir::Struct = sb.to_def(src)?; | ||
44 | Some(from_module_def(sb.db, def.into(), None)) | ||
45 | }, | ||
46 | ast::EnumDef(it) => { | ||
47 | let src = name.with_value(it); | ||
48 | let def: hir::Enum = sb.to_def(src)?; | ||
49 | Some(from_module_def(sb.db, def.into(), None)) | ||
50 | }, | ||
51 | ast::TraitDef(it) => { | ||
52 | let src = name.with_value(it); | ||
53 | let def: hir::Trait = sb.to_def(src)?; | ||
54 | Some(from_module_def(sb.db, def.into(), None)) | ||
55 | }, | ||
56 | ast::StaticDef(it) => { | ||
57 | let src = name.with_value(it); | ||
58 | let def: hir::Static = sb.to_def(src)?; | ||
59 | Some(from_module_def(sb.db, def.into(), None)) | ||
60 | }, | ||
61 | ast::EnumVariant(it) => { | ||
62 | let src = name.with_value(it); | ||
63 | let def: hir::EnumVariant = sb.to_def(src)?; | ||
64 | Some(from_module_def(sb.db, def.into(), None)) | ||
65 | }, | ||
66 | ast::FnDef(it) => { | ||
67 | let src = name.with_value(it); | ||
68 | let def: hir::Function = sb.to_def(src)?; | ||
69 | if parent.parent().and_then(ast::ItemList::cast).is_some() { | ||
70 | Some(from_assoc_item(sb.db, def.into())) | ||
71 | } else { | ||
72 | Some(from_module_def(sb.db, def.into(), None)) | ||
73 | } | ||
74 | }, | ||
75 | ast::ConstDef(it) => { | ||
76 | let src = name.with_value(it); | ||
77 | let def: hir::Const = sb.to_def(src)?; | ||
78 | if parent.parent().and_then(ast::ItemList::cast).is_some() { | ||
79 | Some(from_assoc_item(sb.db, def.into())) | ||
80 | } else { | ||
81 | Some(from_module_def(sb.db, def.into(), None)) | ||
82 | } | ||
83 | }, | ||
84 | ast::TypeAliasDef(it) => { | ||
85 | let src = name.with_value(it); | ||
86 | let def: hir::TypeAlias = sb.to_def(src)?; | ||
87 | if parent.parent().and_then(ast::ItemList::cast).is_some() { | ||
88 | Some(from_assoc_item(sb.db, def.into())) | ||
89 | } else { | ||
90 | Some(from_module_def(sb.db, def.into(), None)) | ||
91 | } | ||
92 | }, | ||
93 | ast::MacroCall(it) => { | ||
94 | let src = name.with_value(it); | ||
95 | let def = sb.to_def(src.clone())?; | ||
96 | |||
97 | let module = sb.to_module_def(src.file_id.original_file(sb.db))?; | ||
98 | |||
99 | Some(NameDefinition { | ||
100 | visibility: None, | ||
101 | container: module, | ||
102 | kind: NameKind::Macro(def), | ||
103 | }) | ||
104 | }, | ||
105 | ast::TypeParam(it) => { | ||
106 | let src = name.with_value(it); | ||
107 | let def = sb.to_def(src)?; | ||
108 | Some(NameDefinition { | ||
109 | visibility: None, | ||
110 | container: def.module(sb.db), | ||
111 | kind: NameKind::TypeParam(def), | ||
112 | }) | ||
113 | }, | ||
114 | _ => None, | ||
115 | } | ||
116 | } | ||
117 | } | ||
118 | 12 | ||
119 | pub(crate) fn classify_name_ref( | 13 | pub(crate) fn classify_name_ref( |
120 | sb: &mut SourceBinder<RootDatabase>, | 14 | sb: &mut SourceBinder<RootDatabase>, |
@@ -128,7 +22,7 @@ pub(crate) fn classify_name_ref( | |||
128 | if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) { | 22 | if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) { |
129 | tested_by!(goto_def_for_methods); | 23 | tested_by!(goto_def_for_methods); |
130 | if let Some(func) = analyzer.resolve_method_call(&method_call) { | 24 | if let Some(func) = analyzer.resolve_method_call(&method_call) { |
131 | return Some(from_assoc_item(sb.db, func.into())); | 25 | return Some(from_module_def(sb.db, func.into(), None)); |
132 | } | 26 | } |
133 | } | 27 | } |
134 | 28 | ||
@@ -163,27 +57,35 @@ pub(crate) fn classify_name_ref( | |||
163 | 57 | ||
164 | let path = name_ref.value.syntax().ancestors().find_map(ast::Path::cast)?; | 58 | let path = name_ref.value.syntax().ancestors().find_map(ast::Path::cast)?; |
165 | let resolved = analyzer.resolve_path(sb.db, &path)?; | 59 | let resolved = analyzer.resolve_path(sb.db, &path)?; |
166 | match resolved { | 60 | let res = match resolved { |
167 | PathResolution::Def(def) => Some(from_module_def(sb.db, def, Some(container))), | 61 | PathResolution::Def(def) => from_module_def(sb.db, def, Some(container)), |
168 | PathResolution::AssocItem(item) => Some(from_assoc_item(sb.db, item)), | 62 | PathResolution::AssocItem(item) => { |
63 | let def = match item { | ||
64 | hir::AssocItem::Function(it) => it.into(), | ||
65 | hir::AssocItem::Const(it) => it.into(), | ||
66 | hir::AssocItem::TypeAlias(it) => it.into(), | ||
67 | }; | ||
68 | from_module_def(sb.db, def, Some(container)) | ||
69 | } | ||
169 | PathResolution::Local(local) => { | 70 | PathResolution::Local(local) => { |
170 | let kind = NameKind::Local(local); | 71 | let kind = NameKind::Local(local); |
171 | let container = local.module(sb.db); | 72 | let container = local.module(sb.db); |
172 | Some(NameDefinition { kind, container, visibility: None }) | 73 | NameDefinition { kind, container, visibility: None } |
173 | } | 74 | } |
174 | PathResolution::TypeParam(par) => { | 75 | PathResolution::TypeParam(par) => { |
175 | let kind = NameKind::TypeParam(par); | 76 | let kind = NameKind::TypeParam(par); |
176 | let container = par.module(sb.db); | 77 | let container = par.module(sb.db); |
177 | Some(NameDefinition { kind, container, visibility }) | 78 | NameDefinition { kind, container, visibility } |
178 | } | 79 | } |
179 | PathResolution::Macro(def) => { | 80 | PathResolution::Macro(def) => { |
180 | let kind = NameKind::Macro(def); | 81 | let kind = NameKind::Macro(def); |
181 | Some(NameDefinition { kind, container, visibility }) | 82 | NameDefinition { kind, container, visibility } |
182 | } | 83 | } |
183 | PathResolution::SelfType(impl_block) => { | 84 | PathResolution::SelfType(impl_block) => { |
184 | let kind = NameKind::SelfType(impl_block); | 85 | let kind = NameKind::SelfType(impl_block); |
185 | let container = impl_block.module(sb.db); | 86 | let container = impl_block.module(sb.db); |
186 | Some(NameDefinition { kind, container, visibility }) | 87 | NameDefinition { kind, container, visibility } |
187 | } | 88 | } |
188 | } | 89 | }; |
90 | Some(res) | ||
189 | } | 91 | } |
diff --git a/crates/ra_ide/src/references/name_definition.rs b/crates/ra_ide/src/references/name_definition.rs deleted file mode 100644 index 1e4226ab9..000000000 --- a/crates/ra_ide/src/references/name_definition.rs +++ /dev/null | |||
@@ -1,85 +0,0 @@ | |||
1 | //! `NameDefinition` keeps information about the element we want to search references for. | ||
2 | //! The element is represented by `NameKind`. It's located inside some `container` and | ||
3 | //! has a `visibility`, which defines a search scope. | ||
4 | //! Note that the reference search is possible for not all of the classified items. | ||
5 | |||
6 | use hir::{ | ||
7 | Adt, AssocItem, HasSource, ImplBlock, Local, MacroDef, Module, ModuleDef, StructField, | ||
8 | TypeParam, VariantDef, | ||
9 | }; | ||
10 | use ra_syntax::{ast, ast::VisibilityOwner}; | ||
11 | |||
12 | use crate::db::RootDatabase; | ||
13 | |||
14 | #[derive(Debug, PartialEq, Eq)] | ||
15 | pub enum NameKind { | ||
16 | Macro(MacroDef), | ||
17 | Field(StructField), | ||
18 | AssocItem(AssocItem), | ||
19 | Def(ModuleDef), | ||
20 | SelfType(ImplBlock), | ||
21 | Local(Local), | ||
22 | TypeParam(TypeParam), | ||
23 | } | ||
24 | |||
25 | #[derive(PartialEq, Eq)] | ||
26 | pub(crate) struct NameDefinition { | ||
27 | pub visibility: Option<ast::Visibility>, | ||
28 | /// FIXME: this doesn't really make sense. For example, builtin types don't | ||
29 | /// really have a module. | ||
30 | pub container: Module, | ||
31 | pub kind: NameKind, | ||
32 | } | ||
33 | |||
34 | pub(super) fn from_assoc_item(db: &RootDatabase, item: AssocItem) -> NameDefinition { | ||
35 | let container = item.module(db); | ||
36 | let visibility = match item { | ||
37 | AssocItem::Function(f) => f.source(db).value.visibility(), | ||
38 | AssocItem::Const(c) => c.source(db).value.visibility(), | ||
39 | AssocItem::TypeAlias(a) => a.source(db).value.visibility(), | ||
40 | }; | ||
41 | let kind = NameKind::AssocItem(item); | ||
42 | NameDefinition { kind, container, visibility } | ||
43 | } | ||
44 | |||
45 | pub(super) fn from_struct_field(db: &RootDatabase, field: StructField) -> NameDefinition { | ||
46 | let kind = NameKind::Field(field); | ||
47 | let parent = field.parent_def(db); | ||
48 | let container = parent.module(db); | ||
49 | let visibility = match parent { | ||
50 | VariantDef::Struct(s) => s.source(db).value.visibility(), | ||
51 | VariantDef::Union(e) => e.source(db).value.visibility(), | ||
52 | VariantDef::EnumVariant(e) => e.source(db).value.parent_enum().visibility(), | ||
53 | }; | ||
54 | NameDefinition { kind, container, visibility } | ||
55 | } | ||
56 | |||
57 | pub(super) fn from_module_def( | ||
58 | db: &RootDatabase, | ||
59 | def: ModuleDef, | ||
60 | module: Option<Module>, | ||
61 | ) -> NameDefinition { | ||
62 | let kind = NameKind::Def(def); | ||
63 | let (container, visibility) = match def { | ||
64 | ModuleDef::Module(it) => { | ||
65 | let container = it.parent(db).or_else(|| Some(it)).unwrap(); | ||
66 | let visibility = it.declaration_source(db).and_then(|s| s.value.visibility()); | ||
67 | (container, visibility) | ||
68 | } | ||
69 | ModuleDef::EnumVariant(it) => { | ||
70 | let container = it.module(db); | ||
71 | let visibility = it.source(db).value.parent_enum().visibility(); | ||
72 | (container, visibility) | ||
73 | } | ||
74 | ModuleDef::Function(it) => (it.module(db), it.source(db).value.visibility()), | ||
75 | ModuleDef::Const(it) => (it.module(db), it.source(db).value.visibility()), | ||
76 | ModuleDef::Static(it) => (it.module(db), it.source(db).value.visibility()), | ||
77 | ModuleDef::Trait(it) => (it.module(db), it.source(db).value.visibility()), | ||
78 | ModuleDef::TypeAlias(it) => (it.module(db), it.source(db).value.visibility()), | ||
79 | ModuleDef::Adt(Adt::Struct(it)) => (it.module(db), it.source(db).value.visibility()), | ||
80 | ModuleDef::Adt(Adt::Union(it)) => (it.module(db), it.source(db).value.visibility()), | ||
81 | ModuleDef::Adt(Adt::Enum(it)) => (it.module(db), it.source(db).value.visibility()), | ||
82 | ModuleDef::BuiltinType(..) => (module.unwrap(), None), | ||
83 | }; | ||
84 | NameDefinition { kind, container, visibility } | ||
85 | } | ||
diff --git a/crates/ra_ide/src/references/rename.rs b/crates/ra_ide/src/references/rename.rs index 626efb603..08e77c01f 100644 --- a/crates/ra_ide/src/references/rename.rs +++ b/crates/ra_ide/src/references/rename.rs | |||
@@ -2,12 +2,14 @@ | |||
2 | 2 | ||
3 | use hir::ModuleSource; | 3 | use hir::ModuleSource; |
4 | use ra_db::{RelativePath, RelativePathBuf, SourceDatabase, SourceDatabaseExt}; | 4 | use ra_db::{RelativePath, RelativePathBuf, SourceDatabase, SourceDatabaseExt}; |
5 | use ra_syntax::{algo::find_node_at_offset, ast, tokenize, AstNode, SyntaxKind, SyntaxNode}; | 5 | use ra_ide_db::RootDatabase; |
6 | use ra_syntax::{ | ||
7 | algo::find_node_at_offset, ast, lex_single_valid_syntax_kind, AstNode, SyntaxKind, SyntaxNode, | ||
8 | }; | ||
6 | use ra_text_edit::TextEdit; | 9 | use ra_text_edit::TextEdit; |
7 | 10 | ||
8 | use crate::{ | 11 | use crate::{ |
9 | db::RootDatabase, FileId, FilePosition, FileSystemEdit, RangeInfo, SourceChange, | 12 | FileId, FilePosition, FileSystemEdit, RangeInfo, SourceChange, SourceFileEdit, TextRange, |
10 | SourceFileEdit, TextRange, | ||
11 | }; | 13 | }; |
12 | 14 | ||
13 | use super::find_all_refs; | 15 | use super::find_all_refs; |
@@ -17,11 +19,9 @@ pub(crate) fn rename( | |||
17 | position: FilePosition, | 19 | position: FilePosition, |
18 | new_name: &str, | 20 | new_name: &str, |
19 | ) -> Option<RangeInfo<SourceChange>> { | 21 | ) -> Option<RangeInfo<SourceChange>> { |
20 | let tokens = tokenize(new_name); | 22 | match lex_single_valid_syntax_kind(new_name)? { |
21 | if tokens.len() != 1 | 23 | SyntaxKind::IDENT | SyntaxKind::UNDERSCORE => (), |
22 | || (tokens[0].kind != SyntaxKind::IDENT && tokens[0].kind != SyntaxKind::UNDERSCORE) | 24 | _ => return None, |
23 | { | ||
24 | return None; | ||
25 | } | 25 | } |
26 | 26 | ||
27 | let parse = db.parse(position.file_id); | 27 | let parse = db.parse(position.file_id); |
diff --git a/crates/ra_ide/src/references/search_scope.rs b/crates/ra_ide/src/references/search_scope.rs index f8211a746..279f57be0 100644 --- a/crates/ra_ide/src/references/search_scope.rs +++ b/crates/ra_ide/src/references/search_scope.rs | |||
@@ -10,7 +10,7 @@ use ra_prof::profile; | |||
10 | use ra_syntax::{AstNode, TextRange}; | 10 | use ra_syntax::{AstNode, TextRange}; |
11 | use rustc_hash::FxHashMap; | 11 | use rustc_hash::FxHashMap; |
12 | 12 | ||
13 | use crate::db::RootDatabase; | 13 | use ra_ide_db::RootDatabase; |
14 | 14 | ||
15 | use super::{NameDefinition, NameKind}; | 15 | use super::{NameDefinition, NameKind}; |
16 | 16 | ||
@@ -19,59 +19,13 @@ pub struct SearchScope { | |||
19 | } | 19 | } |
20 | 20 | ||
21 | impl SearchScope { | 21 | impl SearchScope { |
22 | fn new(entries: FxHashMap<FileId, Option<TextRange>>) -> SearchScope { | 22 | pub(crate) fn for_def(def: &NameDefinition, db: &RootDatabase) -> SearchScope { |
23 | SearchScope { entries } | ||
24 | } | ||
25 | pub fn single_file(file: FileId) -> SearchScope { | ||
26 | SearchScope::new(std::iter::once((file, None)).collect()) | ||
27 | } | ||
28 | pub(crate) fn intersection(&self, other: &SearchScope) -> SearchScope { | ||
29 | let (mut small, mut large) = (&self.entries, &other.entries); | ||
30 | if small.len() > large.len() { | ||
31 | mem::swap(&mut small, &mut large) | ||
32 | } | ||
33 | |||
34 | let res = small | ||
35 | .iter() | ||
36 | .filter_map(|(file_id, r1)| { | ||
37 | let r2 = large.get(file_id)?; | ||
38 | let r = intersect_ranges(*r1, *r2)?; | ||
39 | Some((*file_id, r)) | ||
40 | }) | ||
41 | .collect(); | ||
42 | return SearchScope::new(res); | ||
43 | |||
44 | fn intersect_ranges( | ||
45 | r1: Option<TextRange>, | ||
46 | r2: Option<TextRange>, | ||
47 | ) -> Option<Option<TextRange>> { | ||
48 | match (r1, r2) { | ||
49 | (None, r) | (r, None) => Some(r), | ||
50 | (Some(r1), Some(r2)) => { | ||
51 | let r = r1.intersection(&r2)?; | ||
52 | Some(Some(r)) | ||
53 | } | ||
54 | } | ||
55 | } | ||
56 | } | ||
57 | } | ||
58 | |||
59 | impl IntoIterator for SearchScope { | ||
60 | type Item = (FileId, Option<TextRange>); | ||
61 | type IntoIter = std::collections::hash_map::IntoIter<FileId, Option<TextRange>>; | ||
62 | fn into_iter(self) -> Self::IntoIter { | ||
63 | self.entries.into_iter() | ||
64 | } | ||
65 | } | ||
66 | |||
67 | impl NameDefinition { | ||
68 | pub(crate) fn search_scope(&self, db: &RootDatabase) -> SearchScope { | ||
69 | let _p = profile("search_scope"); | 23 | let _p = profile("search_scope"); |
70 | 24 | ||
71 | let module_src = self.container.definition_source(db); | 25 | let module_src = def.container.definition_source(db); |
72 | let file_id = module_src.file_id.original_file(db); | 26 | let file_id = module_src.file_id.original_file(db); |
73 | 27 | ||
74 | if let NameKind::Local(var) = self.kind { | 28 | if let NameKind::Local(var) = def.kind { |
75 | let range = match var.parent(db) { | 29 | let range = match var.parent(db) { |
76 | DefWithBody::Function(f) => f.source(db).value.syntax().text_range(), | 30 | DefWithBody::Function(f) => f.source(db).value.syntax().text_range(), |
77 | DefWithBody::Const(c) => c.source(db).value.syntax().text_range(), | 31 | DefWithBody::Const(c) => c.source(db).value.syntax().text_range(), |
@@ -82,10 +36,10 @@ impl NameDefinition { | |||
82 | return SearchScope::new(res); | 36 | return SearchScope::new(res); |
83 | } | 37 | } |
84 | 38 | ||
85 | let vis = self.visibility.as_ref().map(|v| v.syntax().to_string()).unwrap_or_default(); | 39 | let vis = def.visibility.as_ref().map(|v| v.syntax().to_string()).unwrap_or_default(); |
86 | 40 | ||
87 | if vis.as_str() == "pub(super)" { | 41 | if vis.as_str() == "pub(super)" { |
88 | if let Some(parent_module) = self.container.parent(db) { | 42 | if let Some(parent_module) = def.container.parent(db) { |
89 | let mut res = FxHashMap::default(); | 43 | let mut res = FxHashMap::default(); |
90 | let parent_src = parent_module.definition_source(db); | 44 | let parent_src = parent_module.definition_source(db); |
91 | let file_id = parent_src.file_id.original_file(db); | 45 | let file_id = parent_src.file_id.original_file(db); |
@@ -118,7 +72,7 @@ impl NameDefinition { | |||
118 | return SearchScope::new(res); | 72 | return SearchScope::new(res); |
119 | } | 73 | } |
120 | if vis.as_str() == "pub" { | 74 | if vis.as_str() == "pub" { |
121 | let krate = self.container.krate(); | 75 | let krate = def.container.krate(); |
122 | for rev_dep in krate.reverse_dependencies(db) { | 76 | for rev_dep in krate.reverse_dependencies(db) { |
123 | let root_file = rev_dep.root_file(db); | 77 | let root_file = rev_dep.root_file(db); |
124 | let source_root_id = db.file_source_root(root_file); | 78 | let source_root_id = db.file_source_root(root_file); |
@@ -137,4 +91,48 @@ impl NameDefinition { | |||
137 | res.insert(file_id, range); | 91 | res.insert(file_id, range); |
138 | SearchScope::new(res) | 92 | SearchScope::new(res) |
139 | } | 93 | } |
94 | |||
95 | fn new(entries: FxHashMap<FileId, Option<TextRange>>) -> SearchScope { | ||
96 | SearchScope { entries } | ||
97 | } | ||
98 | pub fn single_file(file: FileId) -> SearchScope { | ||
99 | SearchScope::new(std::iter::once((file, None)).collect()) | ||
100 | } | ||
101 | pub(crate) fn intersection(&self, other: &SearchScope) -> SearchScope { | ||
102 | let (mut small, mut large) = (&self.entries, &other.entries); | ||
103 | if small.len() > large.len() { | ||
104 | mem::swap(&mut small, &mut large) | ||
105 | } | ||
106 | |||
107 | let res = small | ||
108 | .iter() | ||
109 | .filter_map(|(file_id, r1)| { | ||
110 | let r2 = large.get(file_id)?; | ||
111 | let r = intersect_ranges(*r1, *r2)?; | ||
112 | Some((*file_id, r)) | ||
113 | }) | ||
114 | .collect(); | ||
115 | return SearchScope::new(res); | ||
116 | |||
117 | fn intersect_ranges( | ||
118 | r1: Option<TextRange>, | ||
119 | r2: Option<TextRange>, | ||
120 | ) -> Option<Option<TextRange>> { | ||
121 | match (r1, r2) { | ||
122 | (None, r) | (r, None) => Some(r), | ||
123 | (Some(r1), Some(r2)) => { | ||
124 | let r = r1.intersection(&r2)?; | ||
125 | Some(Some(r)) | ||
126 | } | ||
127 | } | ||
128 | } | ||
129 | } | ||
130 | } | ||
131 | |||
132 | impl IntoIterator for SearchScope { | ||
133 | type Item = (FileId, Option<TextRange>); | ||
134 | type IntoIter = std::collections::hash_map::IntoIter<FileId, Option<TextRange>>; | ||
135 | fn into_iter(self) -> Self::IntoIter { | ||
136 | self.entries.into_iter() | ||
137 | } | ||
140 | } | 138 | } |
diff --git a/crates/ra_ide/src/runnables.rs b/crates/ra_ide/src/runnables.rs index 7533692f6..b6b0c70f9 100644 --- a/crates/ra_ide/src/runnables.rs +++ b/crates/ra_ide/src/runnables.rs | |||
@@ -3,12 +3,13 @@ | |||
3 | use hir::InFile; | 3 | use hir::InFile; |
4 | use itertools::Itertools; | 4 | use itertools::Itertools; |
5 | use ra_db::SourceDatabase; | 5 | use ra_db::SourceDatabase; |
6 | use ra_ide_db::RootDatabase; | ||
6 | use ra_syntax::{ | 7 | use ra_syntax::{ |
7 | ast::{self, AstNode, AttrsOwner, ModuleItemOwner, NameOwner}, | 8 | ast::{self, AstNode, AttrsOwner, ModuleItemOwner, NameOwner}, |
8 | match_ast, SyntaxNode, TextRange, | 9 | match_ast, SyntaxNode, TextRange, |
9 | }; | 10 | }; |
10 | 11 | ||
11 | use crate::{db::RootDatabase, FileId}; | 12 | use crate::FileId; |
12 | 13 | ||
13 | #[derive(Debug)] | 14 | #[derive(Debug)] |
14 | pub struct Runnable { | 15 | pub struct Runnable { |
@@ -43,7 +44,7 @@ fn runnable_fn(fn_def: ast::FnDef) -> Option<Runnable> { | |||
43 | let name = fn_def.name()?.text().clone(); | 44 | let name = fn_def.name()?.text().clone(); |
44 | let kind = if name == "main" { | 45 | let kind = if name == "main" { |
45 | RunnableKind::Bin | 46 | RunnableKind::Bin |
46 | } else if fn_def.has_atom_attr("test") { | 47 | } else if has_test_related_attribute(&fn_def) { |
47 | RunnableKind::Test { name: name.to_string() } | 48 | RunnableKind::Test { name: name.to_string() } |
48 | } else if fn_def.has_atom_attr("bench") { | 49 | } else if fn_def.has_atom_attr("bench") { |
49 | RunnableKind::Bench { name: name.to_string() } | 50 | RunnableKind::Bench { name: name.to_string() } |
@@ -53,6 +54,20 @@ fn runnable_fn(fn_def: ast::FnDef) -> Option<Runnable> { | |||
53 | Some(Runnable { range: fn_def.syntax().text_range(), kind }) | 54 | Some(Runnable { range: fn_def.syntax().text_range(), kind }) |
54 | } | 55 | } |
55 | 56 | ||
57 | /// This is a method with a heuristics to support test methods annotated with custom test annotations, such as | ||
58 | /// `#[test_case(...)]`, `#[tokio::test]` and similar. | ||
59 | /// Also a regular `#[test]` annotation is supported. | ||
60 | /// | ||
61 | /// It may produce false positives, for example, `#[wasm_bindgen_test]` requires a different command to run the test, | ||
62 | /// but it's better than not to have the runnables for the tests at all. | ||
63 | fn has_test_related_attribute(fn_def: &ast::FnDef) -> bool { | ||
64 | fn_def | ||
65 | .attrs() | ||
66 | .filter_map(|attr| attr.path()) | ||
67 | .map(|path| path.syntax().to_string().to_lowercase()) | ||
68 | .any(|attribute_text| attribute_text.contains("test")) | ||
69 | } | ||
70 | |||
56 | fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Option<Runnable> { | 71 | fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Option<Runnable> { |
57 | let has_test_function = module | 72 | let has_test_function = module |
58 | .item_list()? | 73 | .item_list()? |
diff --git a/crates/ra_ide/src/snapshots/highlighting.html b/crates/ra_ide/src/snapshots/highlighting.html index 1d130544f..1cc55e78b 100644 --- a/crates/ra_ide/src/snapshots/highlighting.html +++ b/crates/ra_ide/src/snapshots/highlighting.html | |||
@@ -34,6 +34,16 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
34 | <span class="function">foo</span>::<<span class="type.builtin">i32</span>>(); | 34 | <span class="function">foo</span>::<<span class="type.builtin">i32</span>>(); |
35 | } | 35 | } |
36 | 36 | ||
37 | <span class="macro">macro_rules</span><span class="macro">!</span> def_fn { | ||
38 | ($($tt:tt)*) => {$($tt)*} | ||
39 | } | ||
40 | |||
41 | <span class="macro">def_fn</span><span class="macro">!</span>{ | ||
42 | <span class="keyword">fn</span> <span class="function">bar</span>() -> <span class="type.builtin">u32</span> { | ||
43 | <span class="literal.numeric">100</span> | ||
44 | } | ||
45 | } | ||
46 | |||
37 | <span class="comment">// comment</span> | 47 | <span class="comment">// comment</span> |
38 | <span class="keyword">fn</span> <span class="function">main</span>() { | 48 | <span class="keyword">fn</span> <span class="function">main</span>() { |
39 | <span class="macro">println</span><span class="macro">!</span>(<span class="string">"Hello, {}!"</span>, <span class="literal.numeric">92</span>); | 49 | <span class="macro">println</span><span class="macro">!</span>(<span class="string">"Hello, {}!"</span>, <span class="literal.numeric">92</span>); |
diff --git a/crates/ra_ide/src/snapshots/rainbow_highlighting.html b/crates/ra_ide/src/snapshots/rainbow_highlighting.html index d90ee8540..918fd4b97 100644 --- a/crates/ra_ide/src/snapshots/rainbow_highlighting.html +++ b/crates/ra_ide/src/snapshots/rainbow_highlighting.html | |||
@@ -24,14 +24,14 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
24 | .keyword\.control { color: #F0DFAF; font-weight: bold; } | 24 | .keyword\.control { color: #F0DFAF; font-weight: bold; } |
25 | </style> | 25 | </style> |
26 | <pre><code><span class="keyword">fn</span> <span class="function">main</span>() { | 26 | <pre><code><span class="keyword">fn</span> <span class="function">main</span>() { |
27 | <span class="keyword">let</span> <span class="variable" data-binding-hash="8723171760279909834" style="color: hsl(307,91%,75%);">hello</span> = <span class="string">"hello"</span>; | 27 | <span class="keyword">let</span> <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span> = <span class="string">"hello"</span>; |
28 | <span class="keyword">let</span> <span class="variable" data-binding-hash="14702933417323009544" style="color: hsl(108,90%,49%);">x</span> = <span class="variable" data-binding-hash="8723171760279909834" style="color: hsl(307,91%,75%);">hello</span>.to_string(); | 28 | <span class="keyword">let</span> <span class="variable" data-binding-hash="4303609361109701698" style="color: hsl(242,75%,88%);">x</span> = <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span>.to_string(); |
29 | <span class="keyword">let</span> <span class="variable" data-binding-hash="5443150872754369068" style="color: hsl(215,43%,43%);">y</span> = <span class="variable" data-binding-hash="8723171760279909834" style="color: hsl(307,91%,75%);">hello</span>.to_string(); | 29 | <span class="keyword">let</span> <span class="variable" data-binding-hash="13865792086344377029" style="color: hsl(340,64%,86%);">y</span> = <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span>.to_string(); |
30 | 30 | ||
31 | <span class="keyword">let</span> <span class="variable" data-binding-hash="17358108296605513516" style="color: hsl(331,46%,60%);">x</span> = <span class="string">"other color please!"</span>; | 31 | <span class="keyword">let</span> <span class="variable" data-binding-hash="7011301204224269512" style="color: hsl(198,45%,40%);">x</span> = <span class="string">"other color please!"</span>; |
32 | <span class="keyword">let</span> <span class="variable" data-binding-hash="2073121142529774969" style="color: hsl(320,43%,74%);">y</span> = <span class="variable" data-binding-hash="17358108296605513516" style="color: hsl(331,46%,60%);">x</span>.to_string(); | 32 | <span class="keyword">let</span> <span class="variable" data-binding-hash="12461245066629867975" style="color: hsl(132,91%,68%);">y</span> = <span class="variable" data-binding-hash="7011301204224269512" style="color: hsl(198,45%,40%);">x</span>.to_string(); |
33 | } | 33 | } |
34 | 34 | ||
35 | <span class="keyword">fn</span> <span class="function">bar</span>() { | 35 | <span class="keyword">fn</span> <span class="function">bar</span>() { |
36 | <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable.mut" data-binding-hash="8723171760279909834" style="color: hsl(307,91%,75%);">hello</span> = <span class="string">"hello"</span>; | 36 | <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable.mut" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span> = <span class="string">"hello"</span>; |
37 | }</code></pre> \ No newline at end of file | 37 | }</code></pre> \ No newline at end of file |
diff --git a/crates/ra_ide/src/status.rs b/crates/ra_ide/src/status.rs index 1bb27eb85..30eb5c995 100644 --- a/crates/ra_ide/src/status.rs +++ b/crates/ra_ide/src/status.rs | |||
@@ -10,14 +10,14 @@ use ra_db::{ | |||
10 | }, | 10 | }, |
11 | FileTextQuery, SourceRootId, | 11 | FileTextQuery, SourceRootId, |
12 | }; | 12 | }; |
13 | use ra_ide_db::{ | ||
14 | symbol_index::{LibrarySymbolsQuery, SymbolIndex}, | ||
15 | RootDatabase, | ||
16 | }; | ||
13 | use ra_prof::{memory_usage, Bytes}; | 17 | use ra_prof::{memory_usage, Bytes}; |
14 | use ra_syntax::{ast, Parse, SyntaxNode}; | 18 | use ra_syntax::{ast, Parse, SyntaxNode}; |
15 | 19 | ||
16 | use crate::{ | 20 | use crate::FileId; |
17 | db::RootDatabase, | ||
18 | symbol_index::{LibrarySymbolsQuery, SymbolIndex}, | ||
19 | FileId, | ||
20 | }; | ||
21 | 21 | ||
22 | fn syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { | 22 | fn syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { |
23 | db.query(ra_db::ParseQuery).entries::<SyntaxTreeStats>() | 23 | db.query(ra_db::ParseQuery).entries::<SyntaxTreeStats>() |
diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs index 0411977b9..174e13595 100644 --- a/crates/ra_ide/src/syntax_highlighting.rs +++ b/crates/ra_ide/src/syntax_highlighting.rs | |||
@@ -1,14 +1,18 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use rustc_hash::{FxHashMap, FxHashSet}; | 3 | use rustc_hash::FxHashMap; |
4 | 4 | ||
5 | use hir::{InFile, Name, SourceBinder}; | 5 | use hir::{HirFileId, InFile, Name, SourceAnalyzer, SourceBinder}; |
6 | use ra_db::SourceDatabase; | 6 | use ra_db::SourceDatabase; |
7 | use ra_ide_db::RootDatabase; | ||
7 | use ra_prof::profile; | 8 | use ra_prof::profile; |
8 | use ra_syntax::{ast, AstNode, Direction, SyntaxElement, SyntaxKind, SyntaxKind::*, TextRange, T}; | 9 | use ra_syntax::{ |
10 | ast, AstNode, Direction, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken, TextRange, | ||
11 | WalkEvent, T, | ||
12 | }; | ||
9 | 13 | ||
10 | use crate::{ | 14 | use crate::{ |
11 | db::RootDatabase, | 15 | expand::descend_into_macros_with_analyzer, |
12 | references::{ | 16 | references::{ |
13 | classify_name, classify_name_ref, | 17 | classify_name, classify_name_ref, |
14 | NameKind::{self, *}, | 18 | NameKind::{self, *}, |
@@ -72,121 +76,186 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa | |||
72 | let parse = db.parse(file_id); | 76 | let parse = db.parse(file_id); |
73 | let root = parse.tree().syntax().clone(); | 77 | let root = parse.tree().syntax().clone(); |
74 | 78 | ||
75 | fn calc_binding_hash(file_id: FileId, name: &Name, shadow_count: u32) -> u64 { | 79 | let mut sb = SourceBinder::new(db); |
76 | fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 { | 80 | let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default(); |
77 | use std::{collections::hash_map::DefaultHasher, hash::Hasher}; | 81 | let mut res = Vec::new(); |
82 | let analyzer = sb.analyze(InFile::new(file_id.into(), &root), None); | ||
78 | 83 | ||
79 | let mut hasher = DefaultHasher::new(); | 84 | let mut in_macro_call = None; |
80 | x.hash(&mut hasher); | 85 | |
81 | hasher.finish() | 86 | for event in root.preorder_with_tokens() { |
87 | match event { | ||
88 | WalkEvent::Enter(node) => match node.kind() { | ||
89 | MACRO_CALL => { | ||
90 | in_macro_call = Some(node.clone()); | ||
91 | if let Some(range) = highlight_macro(InFile::new(file_id.into(), node)) { | ||
92 | res.push(HighlightedRange { range, tag: tags::MACRO, binding_hash: None }); | ||
93 | } | ||
94 | } | ||
95 | _ if in_macro_call.is_some() => { | ||
96 | if let Some(token) = node.as_token() { | ||
97 | if let Some((tag, binding_hash)) = highlight_token_tree( | ||
98 | db, | ||
99 | &mut sb, | ||
100 | &analyzer, | ||
101 | &mut bindings_shadow_count, | ||
102 | InFile::new(file_id.into(), token.clone()), | ||
103 | ) { | ||
104 | res.push(HighlightedRange { | ||
105 | range: node.text_range(), | ||
106 | tag, | ||
107 | binding_hash, | ||
108 | }); | ||
109 | } | ||
110 | } | ||
111 | } | ||
112 | _ => { | ||
113 | if let Some((tag, binding_hash)) = highlight_node( | ||
114 | db, | ||
115 | &mut sb, | ||
116 | &mut bindings_shadow_count, | ||
117 | InFile::new(file_id.into(), node.clone()), | ||
118 | ) { | ||
119 | res.push(HighlightedRange { range: node.text_range(), tag, binding_hash }); | ||
120 | } | ||
121 | } | ||
122 | }, | ||
123 | WalkEvent::Leave(node) => { | ||
124 | if let Some(m) = in_macro_call.as_ref() { | ||
125 | if *m == node { | ||
126 | in_macro_call = None; | ||
127 | } | ||
128 | } | ||
129 | } | ||
82 | } | 130 | } |
131 | } | ||
83 | 132 | ||
84 | hash((file_id, name, shadow_count)) | 133 | res |
134 | } | ||
135 | |||
136 | fn highlight_macro(node: InFile<SyntaxElement>) -> Option<TextRange> { | ||
137 | let macro_call = ast::MacroCall::cast(node.value.as_node()?.clone())?; | ||
138 | let path = macro_call.path()?; | ||
139 | let name_ref = path.segment()?.name_ref()?; | ||
140 | |||
141 | let range_start = name_ref.syntax().text_range().start(); | ||
142 | let mut range_end = name_ref.syntax().text_range().end(); | ||
143 | for sibling in path.syntax().siblings_with_tokens(Direction::Next) { | ||
144 | match sibling.kind() { | ||
145 | T![!] | IDENT => range_end = sibling.text_range().end(), | ||
146 | _ => (), | ||
147 | } | ||
85 | } | 148 | } |
86 | 149 | ||
87 | let mut sb = SourceBinder::new(db); | 150 | Some(TextRange::from_to(range_start, range_end)) |
151 | } | ||
88 | 152 | ||
89 | // Visited nodes to handle highlighting priorities | 153 | fn highlight_token_tree( |
90 | // FIXME: retain only ranges here | 154 | db: &RootDatabase, |
91 | let mut highlighted: FxHashSet<SyntaxElement> = FxHashSet::default(); | 155 | sb: &mut SourceBinder<RootDatabase>, |
92 | let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default(); | 156 | analyzer: &SourceAnalyzer, |
157 | bindings_shadow_count: &mut FxHashMap<Name, u32>, | ||
158 | token: InFile<SyntaxToken>, | ||
159 | ) -> Option<(&'static str, Option<u64>)> { | ||
160 | if token.value.parent().kind() != TOKEN_TREE { | ||
161 | return None; | ||
162 | } | ||
163 | let token = descend_into_macros_with_analyzer(db, analyzer, token); | ||
164 | let expanded = { | ||
165 | let parent = token.value.parent(); | ||
166 | // We only care Name and Name_ref | ||
167 | match (token.value.kind(), parent.kind()) { | ||
168 | (IDENT, NAME) | (IDENT, NAME_REF) => token.with_value(parent.into()), | ||
169 | _ => token.map(|it| it.into()), | ||
170 | } | ||
171 | }; | ||
93 | 172 | ||
94 | let mut res = Vec::new(); | 173 | highlight_node(db, sb, bindings_shadow_count, expanded) |
95 | for node in root.descendants_with_tokens() { | 174 | } |
96 | if highlighted.contains(&node) { | 175 | |
97 | continue; | 176 | fn highlight_node( |
177 | db: &RootDatabase, | ||
178 | sb: &mut SourceBinder<RootDatabase>, | ||
179 | bindings_shadow_count: &mut FxHashMap<Name, u32>, | ||
180 | node: InFile<SyntaxElement>, | ||
181 | ) -> Option<(&'static str, Option<u64>)> { | ||
182 | let mut binding_hash = None; | ||
183 | let tag = match node.value.kind() { | ||
184 | FN_DEF => { | ||
185 | bindings_shadow_count.clear(); | ||
186 | return None; | ||
98 | } | 187 | } |
99 | let mut binding_hash = None; | 188 | COMMENT => tags::LITERAL_COMMENT, |
100 | let tag = match node.kind() { | 189 | STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => tags::LITERAL_STRING, |
101 | FN_DEF => { | 190 | ATTR => tags::LITERAL_ATTRIBUTE, |
102 | bindings_shadow_count.clear(); | 191 | // Special-case field init shorthand |
103 | continue; | 192 | NAME_REF if node.value.parent().and_then(ast::RecordField::cast).is_some() => tags::FIELD, |
104 | } | 193 | NAME_REF if node.value.ancestors().any(|it| it.kind() == ATTR) => return None, |
105 | COMMENT => tags::LITERAL_COMMENT, | 194 | NAME_REF => { |
106 | STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => tags::LITERAL_STRING, | 195 | let name_ref = node.value.as_node().cloned().and_then(ast::NameRef::cast).unwrap(); |
107 | ATTR => tags::LITERAL_ATTRIBUTE, | 196 | let name_kind = classify_name_ref(sb, node.with_value(&name_ref)).map(|d| d.kind); |
108 | // Special-case field init shorthand | 197 | match name_kind { |
109 | NAME_REF if node.parent().and_then(ast::RecordField::cast).is_some() => tags::FIELD, | 198 | Some(name_kind) => { |
110 | NAME_REF if node.ancestors().any(|it| it.kind() == ATTR) => continue, | 199 | if let Local(local) = &name_kind { |
111 | NAME_REF => { | 200 | if let Some(name) = local.name(db) { |
112 | let name_ref = node.as_node().cloned().and_then(ast::NameRef::cast).unwrap(); | 201 | let shadow_count = |
113 | let name_kind = classify_name_ref(&mut sb, InFile::new(file_id.into(), &name_ref)) | 202 | bindings_shadow_count.entry(name.clone()).or_default(); |
114 | .map(|d| d.kind); | 203 | binding_hash = |
115 | match name_kind { | 204 | Some(calc_binding_hash(node.file_id, &name, *shadow_count)) |
116 | Some(name_kind) => { | 205 | } |
117 | if let Local(local) = &name_kind { | 206 | }; |
118 | if let Some(name) = local.name(db) { | 207 | |
119 | let shadow_count = | 208 | highlight_name(db, name_kind) |
120 | bindings_shadow_count.entry(name.clone()).or_default(); | ||
121 | binding_hash = | ||
122 | Some(calc_binding_hash(file_id, &name, *shadow_count)) | ||
123 | } | ||
124 | }; | ||
125 | |||
126 | highlight_name(db, name_kind) | ||
127 | } | ||
128 | _ => continue, | ||
129 | } | ||
130 | } | ||
131 | NAME => { | ||
132 | let name = node.as_node().cloned().and_then(ast::Name::cast).unwrap(); | ||
133 | let name_kind = | ||
134 | classify_name(&mut sb, InFile::new(file_id.into(), &name)).map(|d| d.kind); | ||
135 | |||
136 | if let Some(Local(local)) = &name_kind { | ||
137 | if let Some(name) = local.name(db) { | ||
138 | let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); | ||
139 | *shadow_count += 1; | ||
140 | binding_hash = Some(calc_binding_hash(file_id, &name, *shadow_count)) | ||
141 | } | ||
142 | }; | ||
143 | |||
144 | match name_kind { | ||
145 | Some(name_kind) => highlight_name(db, name_kind), | ||
146 | None => name.syntax().parent().map_or(tags::FUNCTION, |x| match x.kind() { | ||
147 | STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_ALIAS_DEF => tags::TYPE, | ||
148 | TYPE_PARAM => tags::TYPE_PARAM, | ||
149 | RECORD_FIELD_DEF => tags::FIELD, | ||
150 | _ => tags::FUNCTION, | ||
151 | }), | ||
152 | } | 209 | } |
210 | _ => return None, | ||
153 | } | 211 | } |
154 | INT_NUMBER | FLOAT_NUMBER => tags::LITERAL_NUMERIC, | 212 | } |
155 | BYTE => tags::LITERAL_BYTE, | 213 | NAME => { |
156 | CHAR => tags::LITERAL_CHAR, | 214 | let name = node.value.as_node().cloned().and_then(ast::Name::cast).unwrap(); |
157 | LIFETIME => tags::TYPE_LIFETIME, | 215 | let name_kind = classify_name(sb, node.with_value(&name)).map(|d| d.kind); |
158 | T![unsafe] => tags::KEYWORD_UNSAFE, | 216 | |
159 | k if is_control_keyword(k) => tags::KEYWORD_CONTROL, | 217 | if let Some(Local(local)) = &name_kind { |
160 | k if k.is_keyword() => tags::KEYWORD, | 218 | if let Some(name) = local.name(db) { |
161 | _ => { | 219 | let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); |
162 | if let Some(macro_call) = node.as_node().cloned().and_then(ast::MacroCall::cast) { | 220 | *shadow_count += 1; |
163 | if let Some(path) = macro_call.path() { | 221 | binding_hash = Some(calc_binding_hash(node.file_id, &name, *shadow_count)) |
164 | if let Some(segment) = path.segment() { | ||
165 | if let Some(name_ref) = segment.name_ref() { | ||
166 | highlighted.insert(name_ref.syntax().clone().into()); | ||
167 | let range_start = name_ref.syntax().text_range().start(); | ||
168 | let mut range_end = name_ref.syntax().text_range().end(); | ||
169 | for sibling in path.syntax().siblings_with_tokens(Direction::Next) { | ||
170 | match sibling.kind() { | ||
171 | T![!] | IDENT => range_end = sibling.text_range().end(), | ||
172 | _ => (), | ||
173 | } | ||
174 | } | ||
175 | res.push(HighlightedRange { | ||
176 | range: TextRange::from_to(range_start, range_end), | ||
177 | tag: tags::MACRO, | ||
178 | binding_hash: None, | ||
179 | }) | ||
180 | } | ||
181 | } | ||
182 | } | ||
183 | } | 222 | } |
184 | continue; | 223 | }; |
224 | |||
225 | match name_kind { | ||
226 | Some(name_kind) => highlight_name(db, name_kind), | ||
227 | None => name.syntax().parent().map_or(tags::FUNCTION, |x| match x.kind() { | ||
228 | STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_ALIAS_DEF => tags::TYPE, | ||
229 | TYPE_PARAM => tags::TYPE_PARAM, | ||
230 | RECORD_FIELD_DEF => tags::FIELD, | ||
231 | _ => tags::FUNCTION, | ||
232 | }), | ||
185 | } | 233 | } |
186 | }; | 234 | } |
187 | res.push(HighlightedRange { range: node.text_range(), tag, binding_hash }) | 235 | INT_NUMBER | FLOAT_NUMBER => tags::LITERAL_NUMERIC, |
236 | BYTE => tags::LITERAL_BYTE, | ||
237 | CHAR => tags::LITERAL_CHAR, | ||
238 | LIFETIME => tags::TYPE_LIFETIME, | ||
239 | T![unsafe] => tags::KEYWORD_UNSAFE, | ||
240 | k if is_control_keyword(k) => tags::KEYWORD_CONTROL, | ||
241 | k if k.is_keyword() => tags::KEYWORD, | ||
242 | |||
243 | _ => return None, | ||
244 | }; | ||
245 | |||
246 | return Some((tag, binding_hash)); | ||
247 | |||
248 | fn calc_binding_hash(file_id: HirFileId, name: &Name, shadow_count: u32) -> u64 { | ||
249 | fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 { | ||
250 | use std::{collections::hash_map::DefaultHasher, hash::Hasher}; | ||
251 | |||
252 | let mut hasher = DefaultHasher::new(); | ||
253 | x.hash(&mut hasher); | ||
254 | hasher.finish() | ||
255 | } | ||
256 | |||
257 | hash((file_id, name, shadow_count)) | ||
188 | } | 258 | } |
189 | res | ||
190 | } | 259 | } |
191 | 260 | ||
192 | pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String { | 261 | pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String { |
@@ -251,19 +320,16 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo | |||
251 | fn highlight_name(db: &RootDatabase, name_kind: NameKind) -> &'static str { | 320 | fn highlight_name(db: &RootDatabase, name_kind: NameKind) -> &'static str { |
252 | match name_kind { | 321 | match name_kind { |
253 | Macro(_) => tags::MACRO, | 322 | Macro(_) => tags::MACRO, |
254 | Field(_) => tags::FIELD, | 323 | StructField(_) => tags::FIELD, |
255 | AssocItem(hir::AssocItem::Function(_)) => tags::FUNCTION, | 324 | ModuleDef(hir::ModuleDef::Module(_)) => tags::MODULE, |
256 | AssocItem(hir::AssocItem::Const(_)) => tags::CONSTANT, | 325 | ModuleDef(hir::ModuleDef::Function(_)) => tags::FUNCTION, |
257 | AssocItem(hir::AssocItem::TypeAlias(_)) => tags::TYPE, | 326 | ModuleDef(hir::ModuleDef::Adt(_)) => tags::TYPE, |
258 | Def(hir::ModuleDef::Module(_)) => tags::MODULE, | 327 | ModuleDef(hir::ModuleDef::EnumVariant(_)) => tags::CONSTANT, |
259 | Def(hir::ModuleDef::Function(_)) => tags::FUNCTION, | 328 | ModuleDef(hir::ModuleDef::Const(_)) => tags::CONSTANT, |
260 | Def(hir::ModuleDef::Adt(_)) => tags::TYPE, | 329 | ModuleDef(hir::ModuleDef::Static(_)) => tags::CONSTANT, |
261 | Def(hir::ModuleDef::EnumVariant(_)) => tags::CONSTANT, | 330 | ModuleDef(hir::ModuleDef::Trait(_)) => tags::TYPE, |
262 | Def(hir::ModuleDef::Const(_)) => tags::CONSTANT, | 331 | ModuleDef(hir::ModuleDef::TypeAlias(_)) => tags::TYPE, |
263 | Def(hir::ModuleDef::Static(_)) => tags::CONSTANT, | 332 | ModuleDef(hir::ModuleDef::BuiltinType(_)) => tags::TYPE_BUILTIN, |
264 | Def(hir::ModuleDef::Trait(_)) => tags::TYPE, | ||
265 | Def(hir::ModuleDef::TypeAlias(_)) => tags::TYPE, | ||
266 | Def(hir::ModuleDef::BuiltinType(_)) => tags::TYPE_BUILTIN, | ||
267 | SelfType(_) => tags::TYPE_SELF, | 333 | SelfType(_) => tags::TYPE_SELF, |
268 | TypeParam(_) => tags::TYPE_PARAM, | 334 | TypeParam(_) => tags::TYPE_PARAM, |
269 | Local(local) => { | 335 | Local(local) => { |
@@ -331,6 +397,16 @@ fn foo<T>() -> T { | |||
331 | foo::<i32>(); | 397 | foo::<i32>(); |
332 | } | 398 | } |
333 | 399 | ||
400 | macro_rules! def_fn { | ||
401 | ($($tt:tt)*) => {$($tt)*} | ||
402 | } | ||
403 | |||
404 | def_fn!{ | ||
405 | fn bar() -> u32 { | ||
406 | 100 | ||
407 | } | ||
408 | } | ||
409 | |||
334 | // comment | 410 | // comment |
335 | fn main() { | 411 | fn main() { |
336 | println!("Hello, {}!", 92); | 412 | println!("Hello, {}!", 92); |
diff --git a/crates/ra_ide/src/syntax_tree.rs b/crates/ra_ide/src/syntax_tree.rs index 4d0f0fc47..55966daf3 100644 --- a/crates/ra_ide/src/syntax_tree.rs +++ b/crates/ra_ide/src/syntax_tree.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use crate::db::RootDatabase; | ||
4 | use ra_db::SourceDatabase; | 3 | use ra_db::SourceDatabase; |
4 | use ra_ide_db::RootDatabase; | ||
5 | use ra_syntax::{ | 5 | use ra_syntax::{ |
6 | algo, AstNode, NodeOrToken, SourceFile, | 6 | algo, AstNode, NodeOrToken, SourceFile, |
7 | SyntaxKind::{RAW_STRING, STRING}, | 7 | SyntaxKind::{RAW_STRING, STRING}, |
diff --git a/crates/ra_ide/src/typing.rs b/crates/ra_ide/src/typing.rs index 21e5be9b3..e5d1779fd 100644 --- a/crates/ra_ide/src/typing.rs +++ b/crates/ra_ide/src/typing.rs | |||
@@ -15,6 +15,7 @@ | |||
15 | 15 | ||
16 | use ra_db::{FilePosition, SourceDatabase}; | 16 | use ra_db::{FilePosition, SourceDatabase}; |
17 | use ra_fmt::leading_indent; | 17 | use ra_fmt::leading_indent; |
18 | use ra_ide_db::RootDatabase; | ||
18 | use ra_syntax::{ | 19 | use ra_syntax::{ |
19 | algo::find_node_at_offset, | 20 | algo::find_node_at_offset, |
20 | ast::{self, AstToken}, | 21 | ast::{self, AstToken}, |
@@ -24,7 +25,7 @@ use ra_syntax::{ | |||
24 | }; | 25 | }; |
25 | use ra_text_edit::TextEdit; | 26 | use ra_text_edit::TextEdit; |
26 | 27 | ||
27 | use crate::{db::RootDatabase, source_change::SingleFileChange, SourceChange, SourceFileEdit}; | 28 | use crate::{source_change::SingleFileChange, SourceChange, SourceFileEdit}; |
28 | 29 | ||
29 | pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> { | 30 | pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> { |
30 | let parse = db.parse(position.file_id); | 31 | let parse = db.parse(position.file_id); |
diff --git a/crates/ra_ide_db/Cargo.toml b/crates/ra_ide_db/Cargo.toml new file mode 100644 index 000000000..716e88bc1 --- /dev/null +++ b/crates/ra_ide_db/Cargo.toml | |||
@@ -0,0 +1,47 @@ | |||
1 | [package] | ||
2 | edition = "2018" | ||
3 | name = "ra_ide_db" | ||
4 | version = "0.1.0" | ||
5 | authors = ["rust-analyzer developers"] | ||
6 | |||
7 | [lib] | ||
8 | doctest = false | ||
9 | |||
10 | [features] | ||
11 | wasm = [] | ||
12 | |||
13 | [dependencies] | ||
14 | either = "1.5" | ||
15 | format-buf = "1.0.0" | ||
16 | indexmap = "1.3.0" | ||
17 | itertools = "0.8.0" | ||
18 | join_to_string = "0.1.3" | ||
19 | log = "0.4.5" | ||
20 | rayon = "1.0.2" | ||
21 | fst = { version = "0.3.1", default-features = false } | ||
22 | rustc-hash = "1.0" | ||
23 | unicase = "2.2.0" | ||
24 | superslice = "1.0.0" | ||
25 | rand = { version = "0.7.0", features = ["small_rng"] } | ||
26 | once_cell = "1.2.0" | ||
27 | |||
28 | ra_syntax = { path = "../ra_syntax" } | ||
29 | ra_text_edit = { path = "../ra_text_edit" } | ||
30 | ra_db = { path = "../ra_db" } | ||
31 | ra_cfg = { path = "../ra_cfg" } | ||
32 | ra_fmt = { path = "../ra_fmt" } | ||
33 | ra_prof = { path = "../ra_prof" } | ||
34 | test_utils = { path = "../test_utils" } | ||
35 | |||
36 | # ra_ide should depend only on the top-level `hir` package. if you need | ||
37 | # something from some `hir_xxx` subpackage, reexport the API via `hir`. | ||
38 | hir = { path = "../ra_hir", package = "ra_hir" } | ||
39 | |||
40 | [dev-dependencies] | ||
41 | insta = "0.13.0" | ||
42 | |||
43 | [dev-dependencies.proptest] | ||
44 | version = "0.9.0" | ||
45 | # Disable `fork` feature to allow compiling on webassembly | ||
46 | default-features = false | ||
47 | features = ["std", "bit-set", "break-dead-code"] | ||
diff --git a/crates/ra_ide/src/change.rs b/crates/ra_ide_db/src/change.rs index b0aa2c8e0..4668784d3 100644 --- a/crates/ra_ide/src/change.rs +++ b/crates/ra_ide_db/src/change.rs | |||
@@ -1,4 +1,5 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! Defines a unit of change that can applied to a state of IDE to get the next |
2 | //! state. Changes are transactional. | ||
2 | 3 | ||
3 | use std::{fmt, sync::Arc, time}; | 4 | use std::{fmt, sync::Arc, time}; |
4 | 5 | ||
@@ -14,8 +15,8 @@ use rayon::prelude::*; | |||
14 | use rustc_hash::FxHashMap; | 15 | use rustc_hash::FxHashMap; |
15 | 16 | ||
16 | use crate::{ | 17 | use crate::{ |
17 | db::{DebugData, RootDatabase}, | ||
18 | symbol_index::{SymbolIndex, SymbolsDatabase}, | 18 | symbol_index::{SymbolIndex, SymbolsDatabase}, |
19 | DebugData, RootDatabase, | ||
19 | }; | 20 | }; |
20 | 21 | ||
21 | #[derive(Default)] | 22 | #[derive(Default)] |
@@ -145,6 +146,8 @@ impl LibraryData { | |||
145 | root_id: SourceRootId, | 146 | root_id: SourceRootId, |
146 | files: Vec<(FileId, RelativePathBuf, Arc<String>)>, | 147 | files: Vec<(FileId, RelativePathBuf, Arc<String>)>, |
147 | ) -> LibraryData { | 148 | ) -> LibraryData { |
149 | let _p = profile("LibraryData::prepare"); | ||
150 | |||
148 | #[cfg(not(feature = "wasm"))] | 151 | #[cfg(not(feature = "wasm"))] |
149 | let iter = files.par_iter(); | 152 | let iter = files.par_iter(); |
150 | #[cfg(feature = "wasm")] | 153 | #[cfg(feature = "wasm")] |
@@ -166,13 +169,15 @@ impl LibraryData { | |||
166 | const GC_COOLDOWN: time::Duration = time::Duration::from_millis(100); | 169 | const GC_COOLDOWN: time::Duration = time::Duration::from_millis(100); |
167 | 170 | ||
168 | impl RootDatabase { | 171 | impl RootDatabase { |
169 | pub(crate) fn apply_change(&mut self, change: AnalysisChange) { | 172 | pub fn request_cancellation(&mut self) { |
173 | let _p = profile("RootDatabase::request_cancellation"); | ||
174 | self.salsa_runtime_mut().synthetic_write(Durability::LOW); | ||
175 | } | ||
176 | |||
177 | pub fn apply_change(&mut self, change: AnalysisChange) { | ||
170 | let _p = profile("RootDatabase::apply_change"); | 178 | let _p = profile("RootDatabase::apply_change"); |
179 | self.request_cancellation(); | ||
171 | log::info!("apply_change {:?}", change); | 180 | log::info!("apply_change {:?}", change); |
172 | { | ||
173 | let _p = profile("RootDatabase::apply_change/cancellation"); | ||
174 | self.salsa_runtime_mut().synthetic_write(Durability::LOW); | ||
175 | } | ||
176 | if !change.new_roots.is_empty() { | 181 | if !change.new_roots.is_empty() { |
177 | let mut local_roots = Vec::clone(&self.local_roots()); | 182 | let mut local_roots = Vec::clone(&self.local_roots()); |
178 | for (root_id, is_local) in change.new_roots { | 183 | for (root_id, is_local) in change.new_roots { |
@@ -241,7 +246,7 @@ impl RootDatabase { | |||
241 | self.set_source_root_with_durability(root_id, Arc::new(source_root), durability); | 246 | self.set_source_root_with_durability(root_id, Arc::new(source_root), durability); |
242 | } | 247 | } |
243 | 248 | ||
244 | pub(crate) fn maybe_collect_garbage(&mut self) { | 249 | pub fn maybe_collect_garbage(&mut self) { |
245 | if cfg!(feature = "wasm") { | 250 | if cfg!(feature = "wasm") { |
246 | return; | 251 | return; |
247 | } | 252 | } |
@@ -251,7 +256,7 @@ impl RootDatabase { | |||
251 | } | 256 | } |
252 | } | 257 | } |
253 | 258 | ||
254 | pub(crate) fn collect_garbage(&mut self) { | 259 | pub fn collect_garbage(&mut self) { |
255 | if cfg!(feature = "wasm") { | 260 | if cfg!(feature = "wasm") { |
256 | return; | 261 | return; |
257 | } | 262 | } |
@@ -278,7 +283,7 @@ impl RootDatabase { | |||
278 | self.query(hir::db::BodyQuery).sweep(sweep); | 283 | self.query(hir::db::BodyQuery).sweep(sweep); |
279 | } | 284 | } |
280 | 285 | ||
281 | pub(crate) fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> { | 286 | pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> { |
282 | let mut acc: Vec<(String, Bytes)> = vec![]; | 287 | let mut acc: Vec<(String, Bytes)> = vec![]; |
283 | let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); | 288 | let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); |
284 | macro_rules! sweep_each_query { | 289 | macro_rules! sweep_each_query { |
@@ -299,45 +304,74 @@ impl RootDatabase { | |||
299 | )*} | 304 | )*} |
300 | } | 305 | } |
301 | sweep_each_query![ | 306 | sweep_each_query![ |
307 | // SourceDatabase | ||
302 | ra_db::ParseQuery | 308 | ra_db::ParseQuery |
303 | ra_db::SourceRootCratesQuery | 309 | ra_db::SourceRootCratesQuery |
310 | |||
311 | // AstDatabase | ||
304 | hir::db::AstIdMapQuery | 312 | hir::db::AstIdMapQuery |
305 | hir::db::ParseMacroQuery | 313 | hir::db::InternMacroQuery |
306 | hir::db::MacroDefQuery | ||
307 | hir::db::MacroArgQuery | 314 | hir::db::MacroArgQuery |
315 | hir::db::MacroDefQuery | ||
316 | hir::db::ParseMacroQuery | ||
308 | hir::db::MacroExpandQuery | 317 | hir::db::MacroExpandQuery |
318 | |||
319 | // DefDatabase | ||
320 | hir::db::RawItemsQuery | ||
321 | hir::db::ComputeCrateDefMapQuery | ||
309 | hir::db::StructDataQuery | 322 | hir::db::StructDataQuery |
323 | hir::db::UnionDataQuery | ||
310 | hir::db::EnumDataQuery | 324 | hir::db::EnumDataQuery |
325 | hir::db::ImplDataQuery | ||
311 | hir::db::TraitDataQuery | 326 | hir::db::TraitDataQuery |
312 | hir::db::RawItemsQuery | ||
313 | hir::db::ComputeCrateDefMapQuery | ||
314 | hir::db::GenericParamsQuery | ||
315 | hir::db::FunctionDataQuery | ||
316 | hir::db::TypeAliasDataQuery | 327 | hir::db::TypeAliasDataQuery |
328 | hir::db::FunctionDataQuery | ||
317 | hir::db::ConstDataQuery | 329 | hir::db::ConstDataQuery |
318 | hir::db::StaticDataQuery | 330 | hir::db::StaticDataQuery |
331 | hir::db::BodyWithSourceMapQuery | ||
332 | hir::db::BodyQuery | ||
333 | hir::db::ExprScopesQuery | ||
334 | hir::db::GenericParamsQuery | ||
335 | hir::db::AttrsQuery | ||
319 | hir::db::ModuleLangItemsQuery | 336 | hir::db::ModuleLangItemsQuery |
320 | hir::db::CrateLangItemsQuery | 337 | hir::db::CrateLangItemsQuery |
321 | hir::db::LangItemQuery | 338 | hir::db::LangItemQuery |
322 | hir::db::DocumentationQuery | 339 | hir::db::DocumentationQuery |
323 | hir::db::ExprScopesQuery | 340 | |
341 | // InternDatabase | ||
342 | hir::db::InternFunctionQuery | ||
343 | hir::db::InternStructQuery | ||
344 | hir::db::InternUnionQuery | ||
345 | hir::db::InternEnumQuery | ||
346 | hir::db::InternConstQuery | ||
347 | hir::db::InternStaticQuery | ||
348 | hir::db::InternTraitQuery | ||
349 | hir::db::InternTypeAliasQuery | ||
350 | hir::db::InternImplQuery | ||
351 | |||
352 | // HirDatabase | ||
324 | hir::db::DoInferQuery | 353 | hir::db::DoInferQuery |
325 | hir::db::TyQuery | 354 | hir::db::TyQuery |
326 | hir::db::ValueTyQuery | 355 | hir::db::ValueTyQuery |
356 | hir::db::ImplSelfTyQuery | ||
357 | hir::db::ImplTraitQuery | ||
327 | hir::db::FieldTypesQuery | 358 | hir::db::FieldTypesQuery |
328 | hir::db::CallableItemSignatureQuery | 359 | hir::db::CallableItemSignatureQuery |
360 | hir::db::GenericPredicatesForParamQuery | ||
329 | hir::db::GenericPredicatesQuery | 361 | hir::db::GenericPredicatesQuery |
330 | hir::db::GenericDefaultsQuery | 362 | hir::db::GenericDefaultsQuery |
331 | hir::db::BodyWithSourceMapQuery | ||
332 | hir::db::BodyQuery | ||
333 | hir::db::ImplsInCrateQuery | 363 | hir::db::ImplsInCrateQuery |
334 | hir::db::ImplsForTraitQuery | 364 | hir::db::ImplsForTraitQuery |
365 | hir::db::TraitSolverQuery | ||
366 | hir::db::InternTypeCtorQuery | ||
367 | hir::db::InternChalkImplQuery | ||
368 | hir::db::InternAssocTyValueQuery | ||
335 | hir::db::AssociatedTyDataQuery | 369 | hir::db::AssociatedTyDataQuery |
370 | hir::db::AssociatedTyValueQuery | ||
371 | hir::db::TraitSolveQuery | ||
336 | hir::db::TraitDatumQuery | 372 | hir::db::TraitDatumQuery |
337 | hir::db::StructDatumQuery | 373 | hir::db::StructDatumQuery |
338 | hir::db::ImplDatumQuery | 374 | hir::db::ImplDatumQuery |
339 | hir::db::ImplDataQuery | ||
340 | hir::db::TraitSolveQuery | ||
341 | ]; | 375 | ]; |
342 | acc.sort_by_key(|it| std::cmp::Reverse(it.1)); | 376 | acc.sort_by_key(|it| std::cmp::Reverse(it.1)); |
343 | acc | 377 | acc |
diff --git a/crates/ra_ide_db/src/defs.rs b/crates/ra_ide_db/src/defs.rs new file mode 100644 index 000000000..030f44f86 --- /dev/null +++ b/crates/ra_ide_db/src/defs.rs | |||
@@ -0,0 +1,172 @@ | |||
1 | //! `NameDefinition` keeps information about the element we want to search references for. | ||
2 | //! The element is represented by `NameKind`. It's located inside some `container` and | ||
3 | //! has a `visibility`, which defines a search scope. | ||
4 | //! Note that the reference search is possible for not all of the classified items. | ||
5 | |||
6 | // FIXME: this badly needs rename/rewrite (matklad, 2020-02-06). | ||
7 | |||
8 | use hir::{ | ||
9 | Adt, HasSource, ImplBlock, InFile, Local, MacroDef, Module, ModuleDef, SourceBinder, | ||
10 | StructField, TypeParam, VariantDef, | ||
11 | }; | ||
12 | use ra_prof::profile; | ||
13 | use ra_syntax::{ | ||
14 | ast::{self, AstNode, VisibilityOwner}, | ||
15 | match_ast, | ||
16 | }; | ||
17 | |||
18 | use crate::RootDatabase; | ||
19 | |||
20 | #[derive(Debug, PartialEq, Eq)] | ||
21 | pub enum NameKind { | ||
22 | Macro(MacroDef), | ||
23 | StructField(StructField), | ||
24 | ModuleDef(ModuleDef), | ||
25 | SelfType(ImplBlock), | ||
26 | Local(Local), | ||
27 | TypeParam(TypeParam), | ||
28 | } | ||
29 | |||
30 | #[derive(PartialEq, Eq)] | ||
31 | pub struct NameDefinition { | ||
32 | pub visibility: Option<ast::Visibility>, | ||
33 | /// FIXME: this doesn't really make sense. For example, builtin types don't | ||
34 | /// really have a module. | ||
35 | pub container: Module, | ||
36 | pub kind: NameKind, | ||
37 | } | ||
38 | |||
39 | pub fn classify_name( | ||
40 | sb: &mut SourceBinder<RootDatabase>, | ||
41 | name: InFile<&ast::Name>, | ||
42 | ) -> Option<NameDefinition> { | ||
43 | let _p = profile("classify_name"); | ||
44 | let parent = name.value.syntax().parent()?; | ||
45 | |||
46 | match_ast! { | ||
47 | match parent { | ||
48 | ast::BindPat(it) => { | ||
49 | let src = name.with_value(it); | ||
50 | let local = sb.to_def(src)?; | ||
51 | Some(NameDefinition { | ||
52 | visibility: None, | ||
53 | container: local.module(sb.db), | ||
54 | kind: NameKind::Local(local), | ||
55 | }) | ||
56 | }, | ||
57 | ast::RecordFieldDef(it) => { | ||
58 | let src = name.with_value(it); | ||
59 | let field: hir::StructField = sb.to_def(src)?; | ||
60 | Some(from_struct_field(sb.db, field)) | ||
61 | }, | ||
62 | ast::Module(it) => { | ||
63 | let def = sb.to_def(name.with_value(it))?; | ||
64 | Some(from_module_def(sb.db, def.into(), None)) | ||
65 | }, | ||
66 | ast::StructDef(it) => { | ||
67 | let src = name.with_value(it); | ||
68 | let def: hir::Struct = sb.to_def(src)?; | ||
69 | Some(from_module_def(sb.db, def.into(), None)) | ||
70 | }, | ||
71 | ast::EnumDef(it) => { | ||
72 | let src = name.with_value(it); | ||
73 | let def: hir::Enum = sb.to_def(src)?; | ||
74 | Some(from_module_def(sb.db, def.into(), None)) | ||
75 | }, | ||
76 | ast::TraitDef(it) => { | ||
77 | let src = name.with_value(it); | ||
78 | let def: hir::Trait = sb.to_def(src)?; | ||
79 | Some(from_module_def(sb.db, def.into(), None)) | ||
80 | }, | ||
81 | ast::StaticDef(it) => { | ||
82 | let src = name.with_value(it); | ||
83 | let def: hir::Static = sb.to_def(src)?; | ||
84 | Some(from_module_def(sb.db, def.into(), None)) | ||
85 | }, | ||
86 | ast::EnumVariant(it) => { | ||
87 | let src = name.with_value(it); | ||
88 | let def: hir::EnumVariant = sb.to_def(src)?; | ||
89 | Some(from_module_def(sb.db, def.into(), None)) | ||
90 | }, | ||
91 | ast::FnDef(it) => { | ||
92 | let src = name.with_value(it); | ||
93 | let def: hir::Function = sb.to_def(src)?; | ||
94 | Some(from_module_def(sb.db, def.into(), None)) | ||
95 | }, | ||
96 | ast::ConstDef(it) => { | ||
97 | let src = name.with_value(it); | ||
98 | let def: hir::Const = sb.to_def(src)?; | ||
99 | Some(from_module_def(sb.db, def.into(), None)) | ||
100 | }, | ||
101 | ast::TypeAliasDef(it) => { | ||
102 | let src = name.with_value(it); | ||
103 | let def: hir::TypeAlias = sb.to_def(src)?; | ||
104 | Some(from_module_def(sb.db, def.into(), None)) | ||
105 | }, | ||
106 | ast::MacroCall(it) => { | ||
107 | let src = name.with_value(it); | ||
108 | let def = sb.to_def(src.clone())?; | ||
109 | |||
110 | let module = sb.to_module_def(src.file_id.original_file(sb.db))?; | ||
111 | |||
112 | Some(NameDefinition { | ||
113 | visibility: None, | ||
114 | container: module, | ||
115 | kind: NameKind::Macro(def), | ||
116 | }) | ||
117 | }, | ||
118 | ast::TypeParam(it) => { | ||
119 | let src = name.with_value(it); | ||
120 | let def = sb.to_def(src)?; | ||
121 | Some(NameDefinition { | ||
122 | visibility: None, | ||
123 | container: def.module(sb.db), | ||
124 | kind: NameKind::TypeParam(def), | ||
125 | }) | ||
126 | }, | ||
127 | _ => None, | ||
128 | } | ||
129 | } | ||
130 | } | ||
131 | |||
132 | pub fn from_struct_field(db: &RootDatabase, field: StructField) -> NameDefinition { | ||
133 | let kind = NameKind::StructField(field); | ||
134 | let parent = field.parent_def(db); | ||
135 | let container = parent.module(db); | ||
136 | let visibility = match parent { | ||
137 | VariantDef::Struct(s) => s.source(db).value.visibility(), | ||
138 | VariantDef::Union(e) => e.source(db).value.visibility(), | ||
139 | VariantDef::EnumVariant(e) => e.source(db).value.parent_enum().visibility(), | ||
140 | }; | ||
141 | NameDefinition { kind, container, visibility } | ||
142 | } | ||
143 | |||
144 | pub fn from_module_def( | ||
145 | db: &RootDatabase, | ||
146 | def: ModuleDef, | ||
147 | module: Option<Module>, | ||
148 | ) -> NameDefinition { | ||
149 | let kind = NameKind::ModuleDef(def); | ||
150 | let (container, visibility) = match def { | ||
151 | ModuleDef::Module(it) => { | ||
152 | let container = it.parent(db).or_else(|| Some(it)).unwrap(); | ||
153 | let visibility = it.declaration_source(db).and_then(|s| s.value.visibility()); | ||
154 | (container, visibility) | ||
155 | } | ||
156 | ModuleDef::EnumVariant(it) => { | ||
157 | let container = it.module(db); | ||
158 | let visibility = it.source(db).value.parent_enum().visibility(); | ||
159 | (container, visibility) | ||
160 | } | ||
161 | ModuleDef::Function(it) => (it.module(db), it.source(db).value.visibility()), | ||
162 | ModuleDef::Const(it) => (it.module(db), it.source(db).value.visibility()), | ||
163 | ModuleDef::Static(it) => (it.module(db), it.source(db).value.visibility()), | ||
164 | ModuleDef::Trait(it) => (it.module(db), it.source(db).value.visibility()), | ||
165 | ModuleDef::TypeAlias(it) => (it.module(db), it.source(db).value.visibility()), | ||
166 | ModuleDef::Adt(Adt::Struct(it)) => (it.module(db), it.source(db).value.visibility()), | ||
167 | ModuleDef::Adt(Adt::Union(it)) => (it.module(db), it.source(db).value.visibility()), | ||
168 | ModuleDef::Adt(Adt::Enum(it)) => (it.module(db), it.source(db).value.visibility()), | ||
169 | ModuleDef::BuiltinType(..) => (module.unwrap(), None), | ||
170 | }; | ||
171 | NameDefinition { kind, container, visibility } | ||
172 | } | ||
diff --git a/crates/ra_ide/src/feature_flags.rs b/crates/ra_ide_db/src/feature_flags.rs index 85617640d..1b3cabf4d 100644 --- a/crates/ra_ide/src/feature_flags.rs +++ b/crates/ra_ide_db/src/feature_flags.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! See docs for `FeatureFlags`. |
2 | 2 | ||
3 | use rustc_hash::FxHashMap; | 3 | use rustc_hash::FxHashMap; |
4 | 4 | ||
diff --git a/crates/ra_ide_db/src/imports_locator.rs b/crates/ra_ide_db/src/imports_locator.rs new file mode 100644 index 000000000..86383bcd0 --- /dev/null +++ b/crates/ra_ide_db/src/imports_locator.rs | |||
@@ -0,0 +1,72 @@ | |||
1 | //! This module contains an import search funcionality that is provided to the ra_assists module. | ||
2 | //! Later, this should be moved away to a separate crate that is accessible from the ra_assists module. | ||
3 | |||
4 | use hir::{db::HirDatabase, ModuleDef, SourceBinder}; | ||
5 | use ra_prof::profile; | ||
6 | use ra_syntax::{ast, AstNode, SyntaxKind::NAME}; | ||
7 | |||
8 | use crate::{ | ||
9 | defs::classify_name, | ||
10 | defs::NameKind, | ||
11 | symbol_index::{self, FileSymbol, Query}, | ||
12 | RootDatabase, | ||
13 | }; | ||
14 | |||
15 | pub struct ImportsLocator<'a> { | ||
16 | source_binder: SourceBinder<'a, RootDatabase>, | ||
17 | } | ||
18 | |||
19 | impl<'a> ImportsLocator<'a> { | ||
20 | pub fn new(db: &'a RootDatabase) -> Self { | ||
21 | Self { source_binder: SourceBinder::new(db) } | ||
22 | } | ||
23 | |||
24 | pub fn find_imports(&mut self, name_to_import: &str) -> Vec<ModuleDef> { | ||
25 | let _p = profile("search_for_imports"); | ||
26 | let db = self.source_binder.db; | ||
27 | |||
28 | let project_results = { | ||
29 | let mut query = Query::new(name_to_import.to_string()); | ||
30 | query.exact(); | ||
31 | query.limit(40); | ||
32 | symbol_index::world_symbols(db, query) | ||
33 | }; | ||
34 | let lib_results = { | ||
35 | let mut query = Query::new(name_to_import.to_string()); | ||
36 | query.libs(); | ||
37 | query.exact(); | ||
38 | query.limit(40); | ||
39 | symbol_index::world_symbols(db, query) | ||
40 | }; | ||
41 | |||
42 | project_results | ||
43 | .into_iter() | ||
44 | .chain(lib_results.into_iter()) | ||
45 | .filter_map(|import_candidate| self.get_name_definition(db, &import_candidate)) | ||
46 | .filter_map(|name_definition_to_import| match name_definition_to_import { | ||
47 | NameKind::ModuleDef(module_def) => Some(module_def), | ||
48 | _ => None, | ||
49 | }) | ||
50 | .collect() | ||
51 | } | ||
52 | |||
53 | fn get_name_definition( | ||
54 | &mut self, | ||
55 | db: &impl HirDatabase, | ||
56 | import_candidate: &FileSymbol, | ||
57 | ) -> Option<NameKind> { | ||
58 | let _p = profile("get_name_definition"); | ||
59 | let file_id = import_candidate.file_id.into(); | ||
60 | let candidate_node = import_candidate.ptr.to_node(&db.parse_or_expand(file_id)?); | ||
61 | let candidate_name_node = if candidate_node.kind() != NAME { | ||
62 | candidate_node.children().find(|it| it.kind() == NAME)? | ||
63 | } else { | ||
64 | candidate_node | ||
65 | }; | ||
66 | classify_name( | ||
67 | &mut self.source_binder, | ||
68 | hir::InFile { file_id, value: &ast::Name::cast(candidate_name_node)? }, | ||
69 | ) | ||
70 | .map(|it| it.kind) | ||
71 | } | ||
72 | } | ||
diff --git a/crates/ra_ide/src/db.rs b/crates/ra_ide_db/src/lib.rs index 47d0aed6f..877ac3c38 100644 --- a/crates/ra_ide/src/db.rs +++ b/crates/ra_ide_db/src/lib.rs | |||
@@ -1,4 +1,15 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! This crate defines the core datastructure representing IDE state -- `RootDatabase`. |
2 | //! | ||
3 | //! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search. | ||
4 | |||
5 | pub mod line_index; | ||
6 | pub mod line_index_utils; | ||
7 | pub mod feature_flags; | ||
8 | pub mod symbol_index; | ||
9 | pub mod change; | ||
10 | pub mod defs; | ||
11 | pub mod imports_locator; | ||
12 | mod wasm_shims; | ||
2 | 13 | ||
3 | use std::sync::Arc; | 14 | use std::sync::Arc; |
4 | 15 | ||
@@ -9,10 +20,7 @@ use ra_db::{ | |||
9 | }; | 20 | }; |
10 | use rustc_hash::FxHashMap; | 21 | use rustc_hash::FxHashMap; |
11 | 22 | ||
12 | use crate::{ | 23 | use crate::{feature_flags::FeatureFlags, line_index::LineIndex, symbol_index::SymbolsDatabase}; |
13 | symbol_index::{self, SymbolsDatabase}, | ||
14 | FeatureFlags, LineIndex, | ||
15 | }; | ||
16 | 24 | ||
17 | #[salsa::database( | 25 | #[salsa::database( |
18 | ra_db::SourceDatabaseStorage, | 26 | ra_db::SourceDatabaseStorage, |
@@ -25,12 +33,12 @@ use crate::{ | |||
25 | hir::db::HirDatabaseStorage | 33 | hir::db::HirDatabaseStorage |
26 | )] | 34 | )] |
27 | #[derive(Debug)] | 35 | #[derive(Debug)] |
28 | pub(crate) struct RootDatabase { | 36 | pub struct RootDatabase { |
29 | runtime: salsa::Runtime<RootDatabase>, | 37 | runtime: salsa::Runtime<RootDatabase>, |
30 | pub(crate) feature_flags: Arc<FeatureFlags>, | 38 | pub feature_flags: Arc<FeatureFlags>, |
31 | pub(crate) debug_data: Arc<DebugData>, | 39 | pub(crate) debug_data: Arc<DebugData>, |
32 | pub(crate) last_gc: crate::wasm_shims::Instant, | 40 | pub last_gc: crate::wasm_shims::Instant, |
33 | pub(crate) last_gc_check: crate::wasm_shims::Instant, | 41 | pub last_gc_check: crate::wasm_shims::Instant, |
34 | } | 42 | } |
35 | 43 | ||
36 | impl FileLoader for RootDatabase { | 44 | impl FileLoader for RootDatabase { |
@@ -109,7 +117,7 @@ impl salsa::ParallelDatabase for RootDatabase { | |||
109 | } | 117 | } |
110 | 118 | ||
111 | #[salsa::query_group(LineIndexDatabaseStorage)] | 119 | #[salsa::query_group(LineIndexDatabaseStorage)] |
112 | pub(crate) trait LineIndexDatabase: ra_db::SourceDatabase + CheckCanceled { | 120 | pub trait LineIndexDatabase: ra_db::SourceDatabase + CheckCanceled { |
113 | fn line_index(&self, file_id: FileId) -> Arc<LineIndex>; | 121 | fn line_index(&self, file_id: FileId) -> Arc<LineIndex>; |
114 | } | 122 | } |
115 | 123 | ||
diff --git a/crates/ra_ide/src/line_index.rs b/crates/ra_ide_db/src/line_index.rs index 710890d27..452c87ac5 100644 --- a/crates/ra_ide/src/line_index.rs +++ b/crates/ra_ide_db/src/line_index.rs | |||
@@ -1,6 +1,7 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! `LineIndex` maps flat `TextUnit` offsets into `(Line, Column)` |
2 | //! representation. | ||
2 | 3 | ||
3 | use crate::TextUnit; | 4 | use ra_syntax::TextUnit; |
4 | use rustc_hash::FxHashMap; | 5 | use rustc_hash::FxHashMap; |
5 | use superslice::Ext; | 6 | use superslice::Ext; |
6 | 7 | ||
diff --git a/crates/ra_ide/src/line_index_utils.rs b/crates/ra_ide_db/src/line_index_utils.rs index bd1e08feb..435b06511 100644 --- a/crates/ra_ide/src/line_index_utils.rs +++ b/crates/ra_ide_db/src/line_index_utils.rs | |||
@@ -1,9 +1,87 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! Code actions can specify desirable final position of the cursor. |
2 | //! | ||
3 | //! The position is specified as a `TextUnit` in the final file. We need to send | ||
4 | //! it in `(Line, Column)` coordinate though. However, we only have a LineIndex | ||
5 | //! for a file pre-edit! | ||
6 | //! | ||
7 | //! Code in this module applies this "to (Line, Column) after edit" | ||
8 | //! transformation. | ||
2 | 9 | ||
3 | use crate::{line_index::Utf16Char, LineCol, LineIndex}; | ||
4 | use ra_syntax::{TextRange, TextUnit}; | 10 | use ra_syntax::{TextRange, TextUnit}; |
5 | use ra_text_edit::{AtomTextEdit, TextEdit}; | 11 | use ra_text_edit::{AtomTextEdit, TextEdit}; |
6 | 12 | ||
13 | use crate::line_index::{LineCol, LineIndex, Utf16Char}; | ||
14 | |||
15 | pub fn translate_offset_with_edit( | ||
16 | line_index: &LineIndex, | ||
17 | offset: TextUnit, | ||
18 | text_edit: &TextEdit, | ||
19 | ) -> LineCol { | ||
20 | let mut state = Edits::from_text_edit(&text_edit); | ||
21 | |||
22 | let mut res = RunningLineCol::new(); | ||
23 | |||
24 | macro_rules! test_step { | ||
25 | ($x:ident) => { | ||
26 | match &$x { | ||
27 | Step::Newline(n) => { | ||
28 | if offset < *n { | ||
29 | return res.to_line_col(offset); | ||
30 | } else { | ||
31 | res.add_line(*n); | ||
32 | } | ||
33 | } | ||
34 | Step::Utf16Char(x) => { | ||
35 | if offset < x.end() { | ||
36 | // if the offset is inside a multibyte char it's invalid | ||
37 | // clamp it to the start of the char | ||
38 | let clamp = offset.min(x.start()); | ||
39 | return res.to_line_col(clamp); | ||
40 | } else { | ||
41 | res.adjust_col(*x); | ||
42 | } | ||
43 | } | ||
44 | } | ||
45 | }; | ||
46 | } | ||
47 | |||
48 | for orig_step in LineIndexStepIter::from(line_index) { | ||
49 | loop { | ||
50 | let translated_step = state.translate_step(&orig_step); | ||
51 | match state.next_steps(&translated_step) { | ||
52 | NextSteps::Use => { | ||
53 | test_step!(translated_step); | ||
54 | break; | ||
55 | } | ||
56 | NextSteps::ReplaceMany(ns) => { | ||
57 | for n in ns { | ||
58 | test_step!(n); | ||
59 | } | ||
60 | break; | ||
61 | } | ||
62 | NextSteps::AddMany(ns) => { | ||
63 | for n in ns { | ||
64 | test_step!(n); | ||
65 | } | ||
66 | } | ||
67 | } | ||
68 | } | ||
69 | } | ||
70 | |||
71 | loop { | ||
72 | match state.next_inserted_steps() { | ||
73 | None => break, | ||
74 | Some(ns) => { | ||
75 | for n in ns { | ||
76 | test_step!(n); | ||
77 | } | ||
78 | } | ||
79 | } | ||
80 | } | ||
81 | |||
82 | res.to_line_col(offset) | ||
83 | } | ||
84 | |||
7 | #[derive(Debug, Clone)] | 85 | #[derive(Debug, Clone)] |
8 | enum Step { | 86 | enum Step { |
9 | Newline(TextUnit), | 87 | Newline(TextUnit), |
@@ -17,7 +95,7 @@ struct LineIndexStepIter<'a> { | |||
17 | utf16_chars: Option<(TextUnit, std::slice::Iter<'a, Utf16Char>)>, | 95 | utf16_chars: Option<(TextUnit, std::slice::Iter<'a, Utf16Char>)>, |
18 | } | 96 | } |
19 | 97 | ||
20 | impl<'a> LineIndexStepIter<'a> { | 98 | impl LineIndexStepIter<'_> { |
21 | fn from(line_index: &LineIndex) -> LineIndexStepIter { | 99 | fn from(line_index: &LineIndex) -> LineIndexStepIter { |
22 | let mut x = LineIndexStepIter { line_index, next_newline_idx: 0, utf16_chars: None }; | 100 | let mut x = LineIndexStepIter { line_index, next_newline_idx: 0, utf16_chars: None }; |
23 | // skip first newline since it's not real | 101 | // skip first newline since it's not real |
@@ -26,7 +104,7 @@ impl<'a> LineIndexStepIter<'a> { | |||
26 | } | 104 | } |
27 | } | 105 | } |
28 | 106 | ||
29 | impl<'a> Iterator for LineIndexStepIter<'a> { | 107 | impl Iterator for LineIndexStepIter<'_> { |
30 | type Item = Step; | 108 | type Item = Step; |
31 | fn next(&mut self) -> Option<Step> { | 109 | fn next(&mut self) -> Option<Step> { |
32 | self.utf16_chars | 110 | self.utf16_chars |
@@ -54,7 +132,7 @@ struct OffsetStepIter<'a> { | |||
54 | offset: TextUnit, | 132 | offset: TextUnit, |
55 | } | 133 | } |
56 | 134 | ||
57 | impl<'a> Iterator for OffsetStepIter<'a> { | 135 | impl Iterator for OffsetStepIter<'_> { |
58 | type Item = Step; | 136 | type Item = Step; |
59 | fn next(&mut self) -> Option<Step> { | 137 | fn next(&mut self) -> Option<Step> { |
60 | let (next, next_offset) = self | 138 | let (next, next_offset) = self |
@@ -220,84 +298,16 @@ impl RunningLineCol { | |||
220 | } | 298 | } |
221 | } | 299 | } |
222 | 300 | ||
223 | pub fn translate_offset_with_edit( | ||
224 | line_index: &LineIndex, | ||
225 | offset: TextUnit, | ||
226 | text_edit: &TextEdit, | ||
227 | ) -> LineCol { | ||
228 | let mut state = Edits::from_text_edit(&text_edit); | ||
229 | |||
230 | let mut res = RunningLineCol::new(); | ||
231 | |||
232 | macro_rules! test_step { | ||
233 | ($x:ident) => { | ||
234 | match &$x { | ||
235 | Step::Newline(n) => { | ||
236 | if offset < *n { | ||
237 | return res.to_line_col(offset); | ||
238 | } else { | ||
239 | res.add_line(*n); | ||
240 | } | ||
241 | } | ||
242 | Step::Utf16Char(x) => { | ||
243 | if offset < x.end() { | ||
244 | // if the offset is inside a multibyte char it's invalid | ||
245 | // clamp it to the start of the char | ||
246 | let clamp = offset.min(x.start()); | ||
247 | return res.to_line_col(clamp); | ||
248 | } else { | ||
249 | res.adjust_col(*x); | ||
250 | } | ||
251 | } | ||
252 | } | ||
253 | }; | ||
254 | } | ||
255 | |||
256 | for orig_step in LineIndexStepIter::from(line_index) { | ||
257 | loop { | ||
258 | let translated_step = state.translate_step(&orig_step); | ||
259 | match state.next_steps(&translated_step) { | ||
260 | NextSteps::Use => { | ||
261 | test_step!(translated_step); | ||
262 | break; | ||
263 | } | ||
264 | NextSteps::ReplaceMany(ns) => { | ||
265 | for n in ns { | ||
266 | test_step!(n); | ||
267 | } | ||
268 | break; | ||
269 | } | ||
270 | NextSteps::AddMany(ns) => { | ||
271 | for n in ns { | ||
272 | test_step!(n); | ||
273 | } | ||
274 | } | ||
275 | } | ||
276 | } | ||
277 | } | ||
278 | |||
279 | loop { | ||
280 | match state.next_inserted_steps() { | ||
281 | None => break, | ||
282 | Some(ns) => { | ||
283 | for n in ns { | ||
284 | test_step!(n); | ||
285 | } | ||
286 | } | ||
287 | } | ||
288 | } | ||
289 | |||
290 | res.to_line_col(offset) | ||
291 | } | ||
292 | |||
293 | #[cfg(test)] | 301 | #[cfg(test)] |
294 | mod test { | 302 | mod test { |
295 | use super::*; | ||
296 | use crate::line_index; | ||
297 | use proptest::{prelude::*, proptest}; | 303 | use proptest::{prelude::*, proptest}; |
298 | use ra_text_edit::test_utils::{arb_offset, arb_text_with_edit}; | 304 | use ra_text_edit::test_utils::{arb_offset, arb_text_with_edit}; |
299 | use ra_text_edit::TextEdit; | 305 | use ra_text_edit::TextEdit; |
300 | 306 | ||
307 | use crate::line_index; | ||
308 | |||
309 | use super::*; | ||
310 | |||
301 | #[derive(Debug)] | 311 | #[derive(Debug)] |
302 | struct ArbTextWithEditAndOffset { | 312 | struct ArbTextWithEditAndOffset { |
303 | text: String, | 313 | text: String, |
diff --git a/crates/ra_ide/src/symbol_index.rs b/crates/ra_ide_db/src/symbol_index.rs index 5729eb5b3..64ddf2f95 100644 --- a/crates/ra_ide/src/symbol_index.rs +++ b/crates/ra_ide_db/src/symbol_index.rs | |||
@@ -19,6 +19,7 @@ | |||
19 | //! for each library (which is assumed to never change) and an FST for each Rust | 19 | //! for each library (which is assumed to never change) and an FST for each Rust |
20 | //! file in the current workspace, and run a query against the union of all | 20 | //! file in the current workspace, and run a query against the union of all |
21 | //! those FSTs. | 21 | //! those FSTs. |
22 | |||
22 | use std::{ | 23 | use std::{ |
23 | fmt, | 24 | fmt, |
24 | hash::{Hash, Hasher}, | 25 | hash::{Hash, Hasher}, |
@@ -29,7 +30,7 @@ use std::{ | |||
29 | use fst::{self, Streamer}; | 30 | use fst::{self, Streamer}; |
30 | use ra_db::{ | 31 | use ra_db::{ |
31 | salsa::{self, ParallelDatabase}, | 32 | salsa::{self, ParallelDatabase}, |
32 | SourceDatabaseExt, SourceRootId, | 33 | FileId, SourceDatabaseExt, SourceRootId, |
33 | }; | 34 | }; |
34 | use ra_syntax::{ | 35 | use ra_syntax::{ |
35 | ast::{self, NameOwner}, | 36 | ast::{self, NameOwner}, |
@@ -40,10 +41,50 @@ use ra_syntax::{ | |||
40 | #[cfg(not(feature = "wasm"))] | 41 | #[cfg(not(feature = "wasm"))] |
41 | use rayon::prelude::*; | 42 | use rayon::prelude::*; |
42 | 43 | ||
43 | use crate::{db::RootDatabase, FileId, Query}; | 44 | use crate::RootDatabase; |
45 | |||
46 | #[derive(Debug)] | ||
47 | pub struct Query { | ||
48 | query: String, | ||
49 | lowercased: String, | ||
50 | only_types: bool, | ||
51 | libs: bool, | ||
52 | exact: bool, | ||
53 | limit: usize, | ||
54 | } | ||
55 | |||
56 | impl Query { | ||
57 | pub fn new(query: String) -> Query { | ||
58 | let lowercased = query.to_lowercase(); | ||
59 | Query { | ||
60 | query, | ||
61 | lowercased, | ||
62 | only_types: false, | ||
63 | libs: false, | ||
64 | exact: false, | ||
65 | limit: usize::max_value(), | ||
66 | } | ||
67 | } | ||
68 | |||
69 | pub fn only_types(&mut self) { | ||
70 | self.only_types = true; | ||
71 | } | ||
72 | |||
73 | pub fn libs(&mut self) { | ||
74 | self.libs = true; | ||
75 | } | ||
76 | |||
77 | pub fn exact(&mut self) { | ||
78 | self.exact = true; | ||
79 | } | ||
80 | |||
81 | pub fn limit(&mut self, limit: usize) { | ||
82 | self.limit = limit | ||
83 | } | ||
84 | } | ||
44 | 85 | ||
45 | #[salsa::query_group(SymbolsDatabaseStorage)] | 86 | #[salsa::query_group(SymbolsDatabaseStorage)] |
46 | pub(crate) trait SymbolsDatabase: hir::db::HirDatabase { | 87 | pub trait SymbolsDatabase: hir::db::HirDatabase { |
47 | fn file_symbols(&self, file_id: FileId) -> Arc<SymbolIndex>; | 88 | fn file_symbols(&self, file_id: FileId) -> Arc<SymbolIndex>; |
48 | #[salsa::input] | 89 | #[salsa::input] |
49 | fn library_symbols(&self, id: SourceRootId) -> Arc<SymbolIndex>; | 90 | fn library_symbols(&self, id: SourceRootId) -> Arc<SymbolIndex>; |
@@ -68,7 +109,7 @@ fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> | |||
68 | Arc::new(SymbolIndex::new(symbols)) | 109 | Arc::new(SymbolIndex::new(symbols)) |
69 | } | 110 | } |
70 | 111 | ||
71 | pub(crate) fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> { | 112 | pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> { |
72 | /// Need to wrap Snapshot to provide `Clone` impl for `map_with` | 113 | /// Need to wrap Snapshot to provide `Clone` impl for `map_with` |
73 | struct Snap(salsa::Snapshot<RootDatabase>); | 114 | struct Snap(salsa::Snapshot<RootDatabase>); |
74 | impl Clone for Snap { | 115 | impl Clone for Snap { |
@@ -110,16 +151,16 @@ pub(crate) fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> | |||
110 | query.search(&buf) | 151 | query.search(&buf) |
111 | } | 152 | } |
112 | 153 | ||
113 | pub(crate) fn index_resolve(db: &RootDatabase, name_ref: &ast::NameRef) -> Vec<FileSymbol> { | 154 | pub fn index_resolve(db: &RootDatabase, name_ref: &ast::NameRef) -> Vec<FileSymbol> { |
114 | let name = name_ref.text(); | 155 | let name = name_ref.text(); |
115 | let mut query = Query::new(name.to_string()); | 156 | let mut query = Query::new(name.to_string()); |
116 | query.exact(); | 157 | query.exact(); |
117 | query.limit(4); | 158 | query.limit(4); |
118 | crate::symbol_index::world_symbols(db, query) | 159 | world_symbols(db, query) |
119 | } | 160 | } |
120 | 161 | ||
121 | #[derive(Default)] | 162 | #[derive(Default)] |
122 | pub(crate) struct SymbolIndex { | 163 | pub struct SymbolIndex { |
123 | symbols: Vec<FileSymbol>, | 164 | symbols: Vec<FileSymbol>, |
124 | map: fst::Map, | 165 | map: fst::Map, |
125 | } | 166 | } |
@@ -178,11 +219,11 @@ impl SymbolIndex { | |||
178 | SymbolIndex { symbols, map } | 219 | SymbolIndex { symbols, map } |
179 | } | 220 | } |
180 | 221 | ||
181 | pub(crate) fn len(&self) -> usize { | 222 | pub fn len(&self) -> usize { |
182 | self.symbols.len() | 223 | self.symbols.len() |
183 | } | 224 | } |
184 | 225 | ||
185 | pub(crate) fn memory_size(&self) -> usize { | 226 | pub fn memory_size(&self) -> usize { |
186 | self.map.as_fst().size() + self.symbols.len() * mem::size_of::<FileSymbol>() | 227 | self.map.as_fst().size() + self.symbols.len() * mem::size_of::<FileSymbol>() |
187 | } | 228 | } |
188 | 229 | ||
@@ -262,12 +303,12 @@ fn is_type(kind: SyntaxKind) -> bool { | |||
262 | /// The actual data that is stored in the index. It should be as compact as | 303 | /// The actual data that is stored in the index. It should be as compact as |
263 | /// possible. | 304 | /// possible. |
264 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 305 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
265 | pub(crate) struct FileSymbol { | 306 | pub struct FileSymbol { |
266 | pub(crate) file_id: FileId, | 307 | pub file_id: FileId, |
267 | pub(crate) name: SmolStr, | 308 | pub name: SmolStr, |
268 | pub(crate) ptr: SyntaxNodePtr, | 309 | pub ptr: SyntaxNodePtr, |
269 | pub(crate) name_range: Option<TextRange>, | 310 | pub name_range: Option<TextRange>, |
270 | pub(crate) container_name: Option<SmolStr>, | 311 | pub container_name: Option<SmolStr>, |
271 | } | 312 | } |
272 | 313 | ||
273 | fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec<FileSymbol> { | 314 | fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec<FileSymbol> { |
@@ -329,77 +370,3 @@ fn to_file_symbol(node: &SyntaxNode, file_id: FileId) -> Option<FileSymbol> { | |||
329 | container_name: None, | 370 | container_name: None, |
330 | }) | 371 | }) |
331 | } | 372 | } |
332 | |||
333 | #[cfg(test)] | ||
334 | mod tests { | ||
335 | use crate::{display::NavigationTarget, mock_analysis::single_file, Query}; | ||
336 | use ra_syntax::{ | ||
337 | SmolStr, | ||
338 | SyntaxKind::{FN_DEF, STRUCT_DEF}, | ||
339 | }; | ||
340 | |||
341 | #[test] | ||
342 | fn test_world_symbols_with_no_container() { | ||
343 | let code = r#" | ||
344 | enum FooInner { } | ||
345 | "#; | ||
346 | |||
347 | let mut symbols = get_symbols_matching(code, "FooInner"); | ||
348 | |||
349 | let s = symbols.pop().unwrap(); | ||
350 | |||
351 | assert_eq!(s.name(), "FooInner"); | ||
352 | assert!(s.container_name().is_none()); | ||
353 | } | ||
354 | |||
355 | #[test] | ||
356 | fn test_world_symbols_include_container_name() { | ||
357 | let code = r#" | ||
358 | fn foo() { | ||
359 | enum FooInner { } | ||
360 | } | ||
361 | "#; | ||
362 | |||
363 | let mut symbols = get_symbols_matching(code, "FooInner"); | ||
364 | |||
365 | let s = symbols.pop().unwrap(); | ||
366 | |||
367 | assert_eq!(s.name(), "FooInner"); | ||
368 | assert_eq!(s.container_name(), Some(&SmolStr::new("foo"))); | ||
369 | |||
370 | let code = r#" | ||
371 | mod foo { | ||
372 | struct FooInner; | ||
373 | } | ||
374 | "#; | ||
375 | |||
376 | let mut symbols = get_symbols_matching(code, "FooInner"); | ||
377 | |||
378 | let s = symbols.pop().unwrap(); | ||
379 | |||
380 | assert_eq!(s.name(), "FooInner"); | ||
381 | assert_eq!(s.container_name(), Some(&SmolStr::new("foo"))); | ||
382 | } | ||
383 | |||
384 | #[test] | ||
385 | fn test_world_symbols_are_case_sensitive() { | ||
386 | let code = r#" | ||
387 | fn foo() {} | ||
388 | |||
389 | struct Foo; | ||
390 | "#; | ||
391 | |||
392 | let symbols = get_symbols_matching(code, "Foo"); | ||
393 | |||
394 | let fn_match = symbols.iter().find(|s| s.name() == "foo").map(|s| s.kind()); | ||
395 | let struct_match = symbols.iter().find(|s| s.name() == "Foo").map(|s| s.kind()); | ||
396 | |||
397 | assert_eq!(fn_match, Some(FN_DEF)); | ||
398 | assert_eq!(struct_match, Some(STRUCT_DEF)); | ||
399 | } | ||
400 | |||
401 | fn get_symbols_matching(text: &str, query: &str) -> Vec<NavigationTarget> { | ||
402 | let (analysis, _) = single_file(text); | ||
403 | analysis.symbol_search(Query::new(query.into())).unwrap() | ||
404 | } | ||
405 | } | ||
diff --git a/crates/ra_ide/src/wasm_shims.rs b/crates/ra_ide_db/src/wasm_shims.rs index 088cc9be4..7af9f9d9b 100644 --- a/crates/ra_ide/src/wasm_shims.rs +++ b/crates/ra_ide_db/src/wasm_shims.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! A version of `std::time::Instant` that doesn't panic in WASM. |
2 | 2 | ||
3 | #[cfg(not(feature = "wasm"))] | 3 | #[cfg(not(feature = "wasm"))] |
4 | pub use std::time::Instant; | 4 | pub use std::time::Instant; |
diff --git a/crates/ra_lsp_server/Cargo.toml b/crates/ra_lsp_server/Cargo.toml index 4ee3fb49f..fdf81ed87 100644 --- a/crates/ra_lsp_server/Cargo.toml +++ b/crates/ra_lsp_server/Cargo.toml | |||
@@ -26,10 +26,13 @@ lsp-server = "0.3.0" | |||
26 | ra_project_model = { path = "../ra_project_model" } | 26 | ra_project_model = { path = "../ra_project_model" } |
27 | ra_prof = { path = "../ra_prof" } | 27 | ra_prof = { path = "../ra_prof" } |
28 | ra_vfs_glob = { path = "../ra_vfs_glob" } | 28 | ra_vfs_glob = { path = "../ra_vfs_glob" } |
29 | env_logger = { version = "0.7.1", default-features = false, features = ["humantime"] } | 29 | env_logger = { version = "0.7.1", default-features = false } |
30 | ra_cargo_watch = { path = "../ra_cargo_watch" } | 30 | ra_cargo_watch = { path = "../ra_cargo_watch" } |
31 | either = "1.5" | 31 | either = "1.5" |
32 | 32 | ||
33 | [target.'cfg(windows)'.dependencies] | ||
34 | winapi = "0.3" | ||
35 | |||
33 | [dev-dependencies] | 36 | [dev-dependencies] |
34 | tempfile = "3" | 37 | tempfile = "3" |
35 | test_utils = { path = "../test_utils" } | 38 | test_utils = { path = "../test_utils" } |
diff --git a/crates/ra_lsp_server/src/diagnostics.rs b/crates/ra_lsp_server/src/diagnostics.rs new file mode 100644 index 000000000..ea08bce24 --- /dev/null +++ b/crates/ra_lsp_server/src/diagnostics.rs | |||
@@ -0,0 +1,85 @@ | |||
1 | //! Book keeping for keeping diagnostics easily in sync with the client. | ||
2 | use lsp_types::{CodeActionOrCommand, Diagnostic, Range}; | ||
3 | use ra_ide::FileId; | ||
4 | use std::{collections::HashMap, sync::Arc}; | ||
5 | |||
6 | pub type CheckFixes = Arc<HashMap<FileId, Vec<Fix>>>; | ||
7 | |||
8 | #[derive(Debug, Default, Clone)] | ||
9 | pub struct DiagnosticCollection { | ||
10 | pub native: HashMap<FileId, Vec<Diagnostic>>, | ||
11 | pub check: HashMap<FileId, Vec<Diagnostic>>, | ||
12 | pub check_fixes: CheckFixes, | ||
13 | } | ||
14 | |||
15 | #[derive(Debug, Clone)] | ||
16 | pub struct Fix { | ||
17 | pub range: Range, | ||
18 | pub action: CodeActionOrCommand, | ||
19 | } | ||
20 | |||
21 | #[derive(Debug)] | ||
22 | pub enum DiagnosticTask { | ||
23 | ClearCheck, | ||
24 | AddCheck(FileId, Diagnostic, Vec<CodeActionOrCommand>), | ||
25 | SetNative(FileId, Vec<Diagnostic>), | ||
26 | } | ||
27 | |||
28 | impl DiagnosticCollection { | ||
29 | pub fn clear_check(&mut self) -> Vec<FileId> { | ||
30 | Arc::make_mut(&mut self.check_fixes).clear(); | ||
31 | self.check.drain().map(|(key, _value)| key).collect() | ||
32 | } | ||
33 | |||
34 | pub fn add_check_diagnostic( | ||
35 | &mut self, | ||
36 | file_id: FileId, | ||
37 | diagnostic: Diagnostic, | ||
38 | fixes: Vec<CodeActionOrCommand>, | ||
39 | ) { | ||
40 | let diagnostics = self.check.entry(file_id).or_default(); | ||
41 | for existing_diagnostic in diagnostics.iter() { | ||
42 | if are_diagnostics_equal(&existing_diagnostic, &diagnostic) { | ||
43 | return; | ||
44 | } | ||
45 | } | ||
46 | |||
47 | let check_fixes = Arc::make_mut(&mut self.check_fixes); | ||
48 | check_fixes | ||
49 | .entry(file_id) | ||
50 | .or_default() | ||
51 | .extend(fixes.into_iter().map(|action| Fix { range: diagnostic.range, action })); | ||
52 | diagnostics.push(diagnostic); | ||
53 | } | ||
54 | |||
55 | pub fn set_native_diagnostics(&mut self, file_id: FileId, diagnostics: Vec<Diagnostic>) { | ||
56 | self.native.insert(file_id, diagnostics); | ||
57 | } | ||
58 | |||
59 | pub fn diagnostics_for(&self, file_id: FileId) -> impl Iterator<Item = &Diagnostic> { | ||
60 | let native = self.native.get(&file_id).into_iter().flatten(); | ||
61 | let check = self.check.get(&file_id).into_iter().flatten(); | ||
62 | native.chain(check) | ||
63 | } | ||
64 | |||
65 | pub fn handle_task(&mut self, task: DiagnosticTask) -> Vec<FileId> { | ||
66 | match task { | ||
67 | DiagnosticTask::ClearCheck => self.clear_check(), | ||
68 | DiagnosticTask::AddCheck(file_id, diagnostic, fixes) => { | ||
69 | self.add_check_diagnostic(file_id, diagnostic, fixes); | ||
70 | vec![file_id] | ||
71 | } | ||
72 | DiagnosticTask::SetNative(file_id, diagnostics) => { | ||
73 | self.set_native_diagnostics(file_id, diagnostics); | ||
74 | vec![file_id] | ||
75 | } | ||
76 | } | ||
77 | } | ||
78 | } | ||
79 | |||
80 | fn are_diagnostics_equal(left: &Diagnostic, right: &Diagnostic) -> bool { | ||
81 | left.source == right.source | ||
82 | && left.severity == right.severity | ||
83 | && left.range == right.range | ||
84 | && left.message == right.message | ||
85 | } | ||
diff --git a/crates/ra_lsp_server/src/lib.rs b/crates/ra_lsp_server/src/lib.rs index 2ca149fd5..1208c1343 100644 --- a/crates/ra_lsp_server/src/lib.rs +++ b/crates/ra_lsp_server/src/lib.rs | |||
@@ -29,6 +29,7 @@ mod markdown; | |||
29 | pub mod req; | 29 | pub mod req; |
30 | mod config; | 30 | mod config; |
31 | mod world; | 31 | mod world; |
32 | mod diagnostics; | ||
32 | 33 | ||
33 | pub type Result<T> = std::result::Result<T, Box<dyn std::error::Error + Send + Sync>>; | 34 | pub type Result<T> = std::result::Result<T, Box<dyn std::error::Error + Send + Sync>>; |
34 | pub use crate::{ | 35 | pub use crate::{ |
diff --git a/crates/ra_lsp_server/src/main_loop.rs b/crates/ra_lsp_server/src/main_loop.rs index 7822be2e2..ceff82fda 100644 --- a/crates/ra_lsp_server/src/main_loop.rs +++ b/crates/ra_lsp_server/src/main_loop.rs | |||
@@ -5,21 +5,29 @@ mod handlers; | |||
5 | mod subscriptions; | 5 | mod subscriptions; |
6 | pub(crate) mod pending_requests; | 6 | pub(crate) mod pending_requests; |
7 | 7 | ||
8 | use std::{error::Error, fmt, panic, path::PathBuf, sync::Arc, time::Instant}; | 8 | use std::{ |
9 | env, | ||
10 | error::Error, | ||
11 | fmt, panic, | ||
12 | path::PathBuf, | ||
13 | sync::Arc, | ||
14 | time::{Duration, Instant}, | ||
15 | }; | ||
9 | 16 | ||
10 | use crossbeam_channel::{select, unbounded, RecvError, Sender}; | 17 | use crossbeam_channel::{select, unbounded, RecvError, Sender}; |
11 | use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response}; | 18 | use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response}; |
12 | use lsp_types::{ClientCapabilities, NumberOrString, Url}; | 19 | use lsp_types::{ClientCapabilities, NumberOrString}; |
13 | use ra_cargo_watch::{CheckOptions, CheckTask}; | 20 | use ra_cargo_watch::{url_from_path_with_drive_lowercasing, CheckOptions, CheckTask}; |
14 | use ra_ide::{Canceled, FeatureFlags, FileId, LibraryData, SourceRootId}; | 21 | use ra_ide::{Canceled, FeatureFlags, FileId, LibraryData, SourceRootId}; |
15 | use ra_prof::profile; | 22 | use ra_prof::profile; |
16 | use ra_vfs::{VfsTask, Watch}; | 23 | use ra_vfs::{VfsFile, VfsTask, Watch}; |
17 | use relative_path::RelativePathBuf; | 24 | use relative_path::RelativePathBuf; |
18 | use rustc_hash::FxHashSet; | 25 | use rustc_hash::FxHashSet; |
19 | use serde::{de::DeserializeOwned, Serialize}; | 26 | use serde::{de::DeserializeOwned, Serialize}; |
20 | use threadpool::ThreadPool; | 27 | use threadpool::ThreadPool; |
21 | 28 | ||
22 | use crate::{ | 29 | use crate::{ |
30 | diagnostics::DiagnosticTask, | ||
23 | main_loop::{ | 31 | main_loop::{ |
24 | pending_requests::{PendingRequest, PendingRequests}, | 32 | pending_requests::{PendingRequest, PendingRequests}, |
25 | subscriptions::Subscriptions, | 33 | subscriptions::Subscriptions, |
@@ -29,9 +37,6 @@ use crate::{ | |||
29 | Result, ServerConfig, | 37 | Result, ServerConfig, |
30 | }; | 38 | }; |
31 | 39 | ||
32 | const THREADPOOL_SIZE: usize = 8; | ||
33 | const MAX_IN_FLIGHT_LIBS: usize = THREADPOOL_SIZE - 3; | ||
34 | |||
35 | #[derive(Debug)] | 40 | #[derive(Debug)] |
36 | pub struct LspError { | 41 | pub struct LspError { |
37 | pub code: i32, | 42 | pub code: i32, |
@@ -60,6 +65,25 @@ pub fn main_loop( | |||
60 | ) -> Result<()> { | 65 | ) -> Result<()> { |
61 | log::info!("server_config: {:#?}", config); | 66 | log::info!("server_config: {:#?}", config); |
62 | 67 | ||
68 | // Windows scheduler implements priority boosts: if thread waits for an | ||
69 | // event (like a condvar), and event fires, priority of the thread is | ||
70 | // temporary bumped. This optimization backfires in our case: each time the | ||
71 | // `main_loop` schedules a task to run on a threadpool, the worker threads | ||
72 | // gets a higher priority, and (on a machine with fewer cores) displaces the | ||
73 | // main loop! We work-around this by marking the main loop as a | ||
74 | // higher-priority thread. | ||
75 | // | ||
76 | // https://docs.microsoft.com/en-us/windows/win32/procthread/scheduling-priorities | ||
77 | // https://docs.microsoft.com/en-us/windows/win32/procthread/priority-boosts | ||
78 | // https://github.com/rust-analyzer/rust-analyzer/issues/2835 | ||
79 | #[cfg(windows)] | ||
80 | unsafe { | ||
81 | use winapi::um::processthreadsapi::*; | ||
82 | let thread = GetCurrentThread(); | ||
83 | let thread_priority_above_normal = 1; | ||
84 | SetThreadPriority(thread, thread_priority_above_normal); | ||
85 | } | ||
86 | |||
63 | let mut loop_state = LoopState::default(); | 87 | let mut loop_state = LoopState::default(); |
64 | let mut world_state = { | 88 | let mut world_state = { |
65 | let feature_flags = { | 89 | let feature_flags = { |
@@ -168,7 +192,7 @@ pub fn main_loop( | |||
168 | ) | 192 | ) |
169 | }; | 193 | }; |
170 | 194 | ||
171 | let pool = ThreadPool::new(THREADPOOL_SIZE); | 195 | let pool = ThreadPool::default(); |
172 | let (task_sender, task_receiver) = unbounded::<Task>(); | 196 | let (task_sender, task_receiver) = unbounded::<Task>(); |
173 | let (libdata_sender, libdata_receiver) = unbounded::<LibraryData>(); | 197 | let (libdata_sender, libdata_receiver) = unbounded::<LibraryData>(); |
174 | 198 | ||
@@ -210,7 +234,7 @@ pub fn main_loop( | |||
210 | )?; | 234 | )?; |
211 | } | 235 | } |
212 | } | 236 | } |
213 | 237 | world_state.analysis_host.request_cancellation(); | |
214 | log::info!("waiting for tasks to finish..."); | 238 | log::info!("waiting for tasks to finish..."); |
215 | task_receiver.into_iter().for_each(|task| { | 239 | task_receiver.into_iter().for_each(|task| { |
216 | on_task(task, &connection.sender, &mut loop_state.pending_requests, &mut world_state) | 240 | on_task(task, &connection.sender, &mut loop_state.pending_requests, &mut world_state) |
@@ -231,6 +255,7 @@ pub fn main_loop( | |||
231 | enum Task { | 255 | enum Task { |
232 | Respond(Response), | 256 | Respond(Response), |
233 | Notify(Notification), | 257 | Notify(Notification), |
258 | Diagnostic(DiagnosticTask), | ||
234 | } | 259 | } |
235 | 260 | ||
236 | enum Event { | 261 | enum Event { |
@@ -371,7 +396,8 @@ fn loop_turn( | |||
371 | loop_state.pending_libraries.extend(changes); | 396 | loop_state.pending_libraries.extend(changes); |
372 | } | 397 | } |
373 | 398 | ||
374 | while loop_state.in_flight_libraries < MAX_IN_FLIGHT_LIBS | 399 | let max_in_flight_libs = pool.max_count().saturating_sub(2).max(1); |
400 | while loop_state.in_flight_libraries < max_in_flight_libs | ||
375 | && !loop_state.pending_libraries.is_empty() | 401 | && !loop_state.pending_libraries.is_empty() |
376 | { | 402 | { |
377 | let (root, files) = loop_state.pending_libraries.pop().unwrap(); | 403 | let (root, files) = loop_state.pending_libraries.pop().unwrap(); |
@@ -379,7 +405,6 @@ fn loop_turn( | |||
379 | let sender = libdata_sender.clone(); | 405 | let sender = libdata_sender.clone(); |
380 | pool.execute(move || { | 406 | pool.execute(move || { |
381 | log::info!("indexing {:?} ... ", root); | 407 | log::info!("indexing {:?} ... ", root); |
382 | let _p = profile(&format!("indexed {:?}", root)); | ||
383 | let data = LibraryData::prepare(root, files); | 408 | let data = LibraryData::prepare(root, files); |
384 | sender.send(data).unwrap(); | 409 | sender.send(data).unwrap(); |
385 | }); | 410 | }); |
@@ -408,6 +433,19 @@ fn loop_turn( | |||
408 | loop_state.subscriptions.subscriptions(), | 433 | loop_state.subscriptions.subscriptions(), |
409 | ) | 434 | ) |
410 | } | 435 | } |
436 | |||
437 | let loop_duration = loop_start.elapsed(); | ||
438 | if loop_duration > Duration::from_millis(100) { | ||
439 | log::error!("overly long loop turn: {:?}", loop_duration); | ||
440 | if env::var("RA_PROFILE").is_ok() { | ||
441 | show_message( | ||
442 | req::MessageType::Error, | ||
443 | format!("overly long loop turn: {:?}", loop_duration), | ||
444 | &connection.sender, | ||
445 | ); | ||
446 | } | ||
447 | } | ||
448 | |||
411 | Ok(()) | 449 | Ok(()) |
412 | } | 450 | } |
413 | 451 | ||
@@ -428,6 +466,7 @@ fn on_task( | |||
428 | Task::Notify(n) => { | 466 | Task::Notify(n) => { |
429 | msg_sender.send(n.into()).unwrap(); | 467 | msg_sender.send(n.into()).unwrap(); |
430 | } | 468 | } |
469 | Task::Diagnostic(task) => on_diagnostic_task(task, msg_sender, state), | ||
431 | } | 470 | } |
432 | } | 471 | } |
433 | 472 | ||
@@ -435,7 +474,7 @@ fn on_request( | |||
435 | world: &mut WorldState, | 474 | world: &mut WorldState, |
436 | pending_requests: &mut PendingRequests, | 475 | pending_requests: &mut PendingRequests, |
437 | pool: &ThreadPool, | 476 | pool: &ThreadPool, |
438 | sender: &Sender<Task>, | 477 | task_sender: &Sender<Task>, |
439 | msg_sender: &Sender<Message>, | 478 | msg_sender: &Sender<Message>, |
440 | request_received: Instant, | 479 | request_received: Instant, |
441 | req: Request, | 480 | req: Request, |
@@ -444,7 +483,7 @@ fn on_request( | |||
444 | req: Some(req), | 483 | req: Some(req), |
445 | pool, | 484 | pool, |
446 | world, | 485 | world, |
447 | sender, | 486 | task_sender, |
448 | msg_sender, | 487 | msg_sender, |
449 | pending_requests, | 488 | pending_requests, |
450 | request_received, | 489 | request_received, |
@@ -586,30 +625,26 @@ fn on_notification( | |||
586 | 625 | ||
587 | fn on_check_task( | 626 | fn on_check_task( |
588 | task: CheckTask, | 627 | task: CheckTask, |
589 | world_state: &WorldState, | 628 | world_state: &mut WorldState, |
590 | task_sender: &Sender<Task>, | 629 | task_sender: &Sender<Task>, |
591 | ) -> Result<()> { | 630 | ) -> Result<()> { |
592 | match task { | 631 | match task { |
593 | CheckTask::ClearDiagnostics => { | 632 | CheckTask::ClearDiagnostics => { |
594 | let cleared_files = world_state.check_watcher.state.write().clear(); | 633 | task_sender.send(Task::Diagnostic(DiagnosticTask::ClearCheck))?; |
595 | |||
596 | // Send updated diagnostics for each cleared file | ||
597 | for url in cleared_files { | ||
598 | publish_diagnostics_for_url(&url, world_state, task_sender)?; | ||
599 | } | ||
600 | } | 634 | } |
601 | 635 | ||
602 | CheckTask::AddDiagnostic(url, diagnostic) => { | 636 | CheckTask::AddDiagnostic { url, diagnostic, fixes } => { |
603 | world_state | 637 | let path = url.to_file_path().map_err(|()| format!("invalid uri: {}", url))?; |
604 | .check_watcher | 638 | let file_id = match world_state.vfs.read().path2file(&path) { |
605 | .state | 639 | Some(file) => FileId(file.0), |
606 | .write() | 640 | None => { |
607 | .add_diagnostic_with_fixes(url.clone(), diagnostic); | 641 | log::error!("File with cargo diagnostic not found in VFS: {}", path.display()); |
608 | 642 | return Ok(()); | |
609 | // We manually send a diagnostic update when the watcher asks | 643 | } |
610 | // us to, to avoid the issue of having to change the file to | 644 | }; |
611 | // receive updated diagnostics. | 645 | |
612 | publish_diagnostics_for_url(&url, world_state, task_sender)?; | 646 | task_sender |
647 | .send(Task::Diagnostic(DiagnosticTask::AddCheck(file_id, diagnostic, fixes)))?; | ||
613 | } | 648 | } |
614 | 649 | ||
615 | CheckTask::Status(progress) => { | 650 | CheckTask::Status(progress) => { |
@@ -620,22 +655,29 @@ fn on_check_task( | |||
620 | let not = notification_new::<req::Progress>(params); | 655 | let not = notification_new::<req::Progress>(params); |
621 | task_sender.send(Task::Notify(not)).unwrap(); | 656 | task_sender.send(Task::Notify(not)).unwrap(); |
622 | } | 657 | } |
623 | } | 658 | }; |
659 | |||
624 | Ok(()) | 660 | Ok(()) |
625 | } | 661 | } |
626 | 662 | ||
627 | fn publish_diagnostics_for_url( | 663 | fn on_diagnostic_task(task: DiagnosticTask, msg_sender: &Sender<Message>, state: &mut WorldState) { |
628 | url: &Url, | 664 | let subscriptions = state.diagnostics.handle_task(task); |
629 | world_state: &WorldState, | 665 | |
630 | task_sender: &Sender<Task>, | 666 | for file_id in subscriptions { |
631 | ) -> Result<()> { | 667 | let path = state.vfs.read().file2path(VfsFile(file_id.0)); |
632 | let path = url.to_file_path().map_err(|()| format!("invalid uri: {}", url))?; | 668 | let uri = match url_from_path_with_drive_lowercasing(&path) { |
633 | if let Some(file_id) = world_state.vfs.read().path2file(&path) { | 669 | Ok(uri) => uri, |
634 | let params = handlers::publish_diagnostics(&world_state.snapshot(), FileId(file_id.0))?; | 670 | Err(err) => { |
671 | log::error!("Couldn't convert path to url ({}): {:?}", err, path.to_string_lossy()); | ||
672 | continue; | ||
673 | } | ||
674 | }; | ||
675 | |||
676 | let diagnostics = state.diagnostics.diagnostics_for(file_id).cloned().collect(); | ||
677 | let params = req::PublishDiagnosticsParams { uri, diagnostics, version: None }; | ||
635 | let not = notification_new::<req::PublishDiagnostics>(params); | 678 | let not = notification_new::<req::PublishDiagnostics>(params); |
636 | task_sender.send(Task::Notify(not)).unwrap(); | 679 | msg_sender.send(not.into()).unwrap(); |
637 | } | 680 | } |
638 | Ok(()) | ||
639 | } | 681 | } |
640 | 682 | ||
641 | struct PoolDispatcher<'a> { | 683 | struct PoolDispatcher<'a> { |
@@ -644,7 +686,7 @@ struct PoolDispatcher<'a> { | |||
644 | world: &'a mut WorldState, | 686 | world: &'a mut WorldState, |
645 | pending_requests: &'a mut PendingRequests, | 687 | pending_requests: &'a mut PendingRequests, |
646 | msg_sender: &'a Sender<Message>, | 688 | msg_sender: &'a Sender<Message>, |
647 | sender: &'a Sender<Task>, | 689 | task_sender: &'a Sender<Task>, |
648 | request_received: Instant, | 690 | request_received: Instant, |
649 | } | 691 | } |
650 | 692 | ||
@@ -691,7 +733,7 @@ impl<'a> PoolDispatcher<'a> { | |||
691 | 733 | ||
692 | self.pool.execute({ | 734 | self.pool.execute({ |
693 | let world = self.world.snapshot(); | 735 | let world = self.world.snapshot(); |
694 | let sender = self.sender.clone(); | 736 | let sender = self.task_sender.clone(); |
695 | move || { | 737 | move || { |
696 | let result = f(world, params); | 738 | let result = f(world, params); |
697 | let task = result_to_task::<R>(id, result); | 739 | let task = result_to_task::<R>(id, result); |
@@ -769,7 +811,7 @@ fn update_file_notifications_on_threadpool( | |||
769 | pool: &ThreadPool, | 811 | pool: &ThreadPool, |
770 | world: WorldSnapshot, | 812 | world: WorldSnapshot, |
771 | publish_decorations: bool, | 813 | publish_decorations: bool, |
772 | sender: Sender<Task>, | 814 | task_sender: Sender<Task>, |
773 | subscriptions: Vec<FileId>, | 815 | subscriptions: Vec<FileId>, |
774 | ) { | 816 | ) { |
775 | log::trace!("updating notifications for {:?}", subscriptions); | 817 | log::trace!("updating notifications for {:?}", subscriptions); |
@@ -783,9 +825,8 @@ fn update_file_notifications_on_threadpool( | |||
783 | log::error!("failed to compute diagnostics: {:?}", e); | 825 | log::error!("failed to compute diagnostics: {:?}", e); |
784 | } | 826 | } |
785 | } | 827 | } |
786 | Ok(params) => { | 828 | Ok(task) => { |
787 | let not = notification_new::<req::PublishDiagnostics>(params); | 829 | task_sender.send(Task::Diagnostic(task)).unwrap(); |
788 | sender.send(Task::Notify(not)).unwrap(); | ||
789 | } | 830 | } |
790 | } | 831 | } |
791 | } | 832 | } |
@@ -798,7 +839,7 @@ fn update_file_notifications_on_threadpool( | |||
798 | } | 839 | } |
799 | Ok(params) => { | 840 | Ok(params) => { |
800 | let not = notification_new::<req::PublishDecorations>(params); | 841 | let not = notification_new::<req::PublishDecorations>(params); |
801 | sender.send(Task::Notify(not)).unwrap(); | 842 | task_sender.send(Task::Notify(not)).unwrap(); |
802 | } | 843 | } |
803 | } | 844 | } |
804 | } | 845 | } |
diff --git a/crates/ra_lsp_server/src/main_loop/handlers.rs b/crates/ra_lsp_server/src/main_loop/handlers.rs index 8e43f0575..65e8bc856 100644 --- a/crates/ra_lsp_server/src/main_loop/handlers.rs +++ b/crates/ra_lsp_server/src/main_loop/handlers.rs | |||
@@ -1,17 +1,22 @@ | |||
1 | //! This module is responsible for implementing handlers for Lanuage Server Protocol. | 1 | //! This module is responsible for implementing handlers for Lanuage Server Protocol. |
2 | //! The majority of requests are fulfilled by calling into the `ra_ide` crate. | 2 | //! The majority of requests are fulfilled by calling into the `ra_ide` crate. |
3 | 3 | ||
4 | use std::{fmt::Write as _, io::Write as _}; | 4 | use std::{ |
5 | collections::hash_map::Entry, | ||
6 | fmt::Write as _, | ||
7 | io::Write as _, | ||
8 | process::{self, Stdio}, | ||
9 | }; | ||
5 | 10 | ||
6 | use either::Either; | ||
7 | use lsp_server::ErrorCode; | 11 | use lsp_server::ErrorCode; |
8 | use lsp_types::{ | 12 | use lsp_types::{ |
9 | CallHierarchyIncomingCall, CallHierarchyIncomingCallsParams, CallHierarchyItem, | 13 | CallHierarchyIncomingCall, CallHierarchyIncomingCallsParams, CallHierarchyItem, |
10 | CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams, | 14 | CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams, |
11 | CodeAction, CodeActionResponse, CodeLens, Command, CompletionItem, Diagnostic, | 15 | CodeAction, CodeActionOrCommand, CodeActionResponse, CodeLens, Command, CompletionItem, |
12 | DocumentFormattingParams, DocumentHighlight, DocumentSymbol, FoldingRange, FoldingRangeParams, | 16 | Diagnostic, DocumentFormattingParams, DocumentHighlight, DocumentSymbol, FoldingRange, |
13 | Hover, HoverContents, Location, MarkupContent, MarkupKind, Position, PrepareRenameResponse, | 17 | FoldingRangeParams, Hover, HoverContents, Location, MarkupContent, MarkupKind, Position, |
14 | Range, RenameParams, SymbolInformation, TextDocumentIdentifier, TextEdit, WorkspaceEdit, | 18 | PrepareRenameResponse, Range, RenameParams, SymbolInformation, TextDocumentIdentifier, |
19 | TextEdit, WorkspaceEdit, | ||
15 | }; | 20 | }; |
16 | use ra_ide::{ | 21 | use ra_ide::{ |
17 | AssistId, FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind, | 22 | AssistId, FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind, |
@@ -29,6 +34,7 @@ use crate::{ | |||
29 | to_call_hierarchy_item, to_location, Conv, ConvWith, FoldConvCtx, MapConvWith, TryConvWith, | 34 | to_call_hierarchy_item, to_location, Conv, ConvWith, FoldConvCtx, MapConvWith, TryConvWith, |
30 | TryConvWithToVec, | 35 | TryConvWithToVec, |
31 | }, | 36 | }, |
37 | diagnostics::DiagnosticTask, | ||
32 | req::{self, Decoration, InlayHint, InlayHintsParams, InlayKind}, | 38 | req::{self, Decoration, InlayHint, InlayHintsParams, InlayKind}, |
33 | world::WorldSnapshot, | 39 | world::WorldSnapshot, |
34 | LspError, Result, | 40 | LspError, Result, |
@@ -582,21 +588,19 @@ pub fn handle_formatting( | |||
582 | let file_line_index = world.analysis().file_line_index(file_id)?; | 588 | let file_line_index = world.analysis().file_line_index(file_id)?; |
583 | let end_position = TextUnit::of_str(&file).conv_with(&file_line_index); | 589 | let end_position = TextUnit::of_str(&file).conv_with(&file_line_index); |
584 | 590 | ||
585 | use std::process; | ||
586 | let mut rustfmt = process::Command::new("rustfmt"); | 591 | let mut rustfmt = process::Command::new("rustfmt"); |
587 | if let Some(&crate_id) = crate_ids.first() { | 592 | if let Some(&crate_id) = crate_ids.first() { |
588 | // Assume all crates are in the same edition | 593 | // Assume all crates are in the same edition |
589 | let edition = world.analysis().crate_edition(crate_id)?; | 594 | let edition = world.analysis().crate_edition(crate_id)?; |
590 | rustfmt.args(&["--edition", &edition.to_string()]); | 595 | rustfmt.args(&["--edition", &edition.to_string()]); |
591 | } | 596 | } |
592 | rustfmt.stdin(process::Stdio::piped()).stdout(process::Stdio::piped()); | ||
593 | 597 | ||
594 | if let Ok(path) = params.text_document.uri.to_file_path() { | 598 | if let Ok(path) = params.text_document.uri.to_file_path() { |
595 | if let Some(parent) = path.parent() { | 599 | if let Some(parent) = path.parent() { |
596 | rustfmt.current_dir(parent); | 600 | rustfmt.current_dir(parent); |
597 | } | 601 | } |
598 | } | 602 | } |
599 | let mut rustfmt = rustfmt.spawn()?; | 603 | let mut rustfmt = rustfmt.stdin(Stdio::piped()).stdout(Stdio::piped()).spawn()?; |
600 | 604 | ||
601 | rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?; | 605 | rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?; |
602 | 606 | ||
@@ -674,59 +678,61 @@ pub fn handle_code_action( | |||
674 | res.push(action.into()); | 678 | res.push(action.into()); |
675 | } | 679 | } |
676 | 680 | ||
677 | for fix in world.check_watcher.read().fixes_for(¶ms.text_document.uri).into_iter().flatten() | 681 | for fix in world.check_fixes.get(&file_id).into_iter().flatten() { |
678 | { | 682 | let fix_range = fix.range.conv_with(&line_index); |
679 | let fix_range = fix.location.range.conv_with(&line_index); | ||
680 | if fix_range.intersection(&range).is_none() { | 683 | if fix_range.intersection(&range).is_none() { |
681 | continue; | 684 | continue; |
682 | } | 685 | } |
686 | res.push(fix.action.clone()); | ||
687 | } | ||
683 | 688 | ||
684 | let edit = { | 689 | let mut groups = FxHashMap::default(); |
685 | let edits = vec![TextEdit::new(fix.location.range, fix.replacement.clone())]; | 690 | for assist in world.analysis().assists(FileRange { file_id, range })?.into_iter() { |
686 | let mut edit_map = std::collections::HashMap::new(); | 691 | let arg = to_value(assist.source_change.try_conv_with(&world)?)?; |
687 | edit_map.insert(fix.location.uri.clone(), edits); | 692 | |
688 | WorkspaceEdit::new(edit_map) | 693 | let (command, title, arg) = match assist.group_label { |
694 | None => ("rust-analyzer.applySourceChange", assist.label.clone(), arg), | ||
695 | |||
696 | // Group all assists with the same `group_label` into a single CodeAction. | ||
697 | Some(group_label) => { | ||
698 | match groups.entry(group_label.clone()) { | ||
699 | Entry::Occupied(entry) => { | ||
700 | let idx: usize = *entry.get(); | ||
701 | match &mut res[idx] { | ||
702 | CodeActionOrCommand::CodeAction(CodeAction { | ||
703 | command: Some(Command { arguments: Some(arguments), .. }), | ||
704 | .. | ||
705 | }) => match arguments.as_mut_slice() { | ||
706 | [serde_json::Value::Array(arguments)] => arguments.push(arg), | ||
707 | _ => panic!("invalid group"), | ||
708 | }, | ||
709 | _ => panic!("invalid group"), | ||
710 | } | ||
711 | continue; | ||
712 | } | ||
713 | Entry::Vacant(entry) => { | ||
714 | entry.insert(res.len()); | ||
715 | } | ||
716 | } | ||
717 | ("rust-analyzer.selectAndApplySourceChange", group_label, to_value(vec![arg])?) | ||
718 | } | ||
689 | }; | 719 | }; |
690 | 720 | ||
691 | let action = CodeAction { | 721 | let command = Command { |
692 | title: fix.title.clone(), | 722 | title: assist.label.clone(), |
693 | kind: Some("quickfix".to_string()), | 723 | command: command.to_string(), |
694 | diagnostics: Some(fix.diagnostics.clone()), | 724 | arguments: Some(vec![arg]), |
695 | edit: Some(edit), | ||
696 | command: None, | ||
697 | is_preferred: None, | ||
698 | }; | 725 | }; |
699 | res.push(action.into()); | ||
700 | } | ||
701 | 726 | ||
702 | for assist in world.analysis().assists(FileRange { file_id, range })?.into_iter() { | 727 | let kind = match assist.id { |
703 | let title = assist.label.clone(); | 728 | AssistId("introduce_variable") => Some("refactor.extract.variable".to_string()), |
704 | 729 | AssistId("add_custom_impl") => Some("refactor.rewrite.add_custom_impl".to_string()), | |
705 | let command = match assist.change_data { | 730 | _ => None, |
706 | Either::Left(change) => Command { | ||
707 | title, | ||
708 | command: "rust-analyzer.applySourceChange".to_string(), | ||
709 | arguments: Some(vec![to_value(change.try_conv_with(&world)?)?]), | ||
710 | }, | ||
711 | Either::Right(changes) => Command { | ||
712 | title, | ||
713 | command: "rust-analyzer.selectAndApplySourceChange".to_string(), | ||
714 | arguments: Some(vec![to_value( | ||
715 | changes | ||
716 | .into_iter() | ||
717 | .map(|change| change.try_conv_with(&world)) | ||
718 | .collect::<Result<Vec<_>>>()?, | ||
719 | )?]), | ||
720 | }, | ||
721 | }; | 731 | }; |
722 | 732 | ||
723 | let action = CodeAction { | 733 | let action = CodeAction { |
724 | title: command.title.clone(), | 734 | title, |
725 | kind: match assist.id { | 735 | kind, |
726 | AssistId("introduce_variable") => Some("refactor.extract.variable".to_string()), | ||
727 | AssistId("add_custom_impl") => Some("refactor.rewrite.add_custom_impl".to_string()), | ||
728 | _ => None, | ||
729 | }, | ||
730 | diagnostics: None, | 736 | diagnostics: None, |
731 | edit: None, | 737 | edit: None, |
732 | command: Some(command), | 738 | command: Some(command), |
@@ -874,14 +880,10 @@ pub fn handle_document_highlight( | |||
874 | )) | 880 | )) |
875 | } | 881 | } |
876 | 882 | ||
877 | pub fn publish_diagnostics( | 883 | pub fn publish_diagnostics(world: &WorldSnapshot, file_id: FileId) -> Result<DiagnosticTask> { |
878 | world: &WorldSnapshot, | ||
879 | file_id: FileId, | ||
880 | ) -> Result<req::PublishDiagnosticsParams> { | ||
881 | let _p = profile("publish_diagnostics"); | 884 | let _p = profile("publish_diagnostics"); |
882 | let uri = world.file_id_to_uri(file_id)?; | ||
883 | let line_index = world.analysis().file_line_index(file_id)?; | 885 | let line_index = world.analysis().file_line_index(file_id)?; |
884 | let mut diagnostics: Vec<Diagnostic> = world | 886 | let diagnostics: Vec<Diagnostic> = world |
885 | .analysis() | 887 | .analysis() |
886 | .diagnostics(file_id)? | 888 | .diagnostics(file_id)? |
887 | .into_iter() | 889 | .into_iter() |
@@ -895,10 +897,7 @@ pub fn publish_diagnostics( | |||
895 | tags: None, | 897 | tags: None, |
896 | }) | 898 | }) |
897 | .collect(); | 899 | .collect(); |
898 | if let Some(check_diags) = world.check_watcher.read().diagnostics_for(&uri) { | 900 | Ok(DiagnosticTask::SetNative(file_id, diagnostics)) |
899 | diagnostics.extend(check_diags.iter().cloned()); | ||
900 | } | ||
901 | Ok(req::PublishDiagnosticsParams { uri, diagnostics, version: None }) | ||
902 | } | 901 | } |
903 | 902 | ||
904 | pub fn publish_decorations( | 903 | pub fn publish_decorations( |
diff --git a/crates/ra_lsp_server/src/world.rs b/crates/ra_lsp_server/src/world.rs index e7a0acfc7..1ee02b47c 100644 --- a/crates/ra_lsp_server/src/world.rs +++ b/crates/ra_lsp_server/src/world.rs | |||
@@ -12,9 +12,7 @@ use crossbeam_channel::{unbounded, Receiver}; | |||
12 | use lsp_server::ErrorCode; | 12 | use lsp_server::ErrorCode; |
13 | use lsp_types::Url; | 13 | use lsp_types::Url; |
14 | use parking_lot::RwLock; | 14 | use parking_lot::RwLock; |
15 | use ra_cargo_watch::{ | 15 | use ra_cargo_watch::{url_from_path_with_drive_lowercasing, CheckOptions, CheckWatcher}; |
16 | url_from_path_with_drive_lowercasing, CheckOptions, CheckState, CheckWatcher, | ||
17 | }; | ||
18 | use ra_ide::{ | 16 | use ra_ide::{ |
19 | Analysis, AnalysisChange, AnalysisHost, CrateGraph, FeatureFlags, FileId, LibraryData, | 17 | Analysis, AnalysisChange, AnalysisHost, CrateGraph, FeatureFlags, FileId, LibraryData, |
20 | SourceRootId, | 18 | SourceRootId, |
@@ -25,6 +23,7 @@ use ra_vfs_glob::{Glob, RustPackageFilterBuilder}; | |||
25 | use relative_path::RelativePathBuf; | 23 | use relative_path::RelativePathBuf; |
26 | 24 | ||
27 | use crate::{ | 25 | use crate::{ |
26 | diagnostics::{CheckFixes, DiagnosticCollection}, | ||
28 | main_loop::pending_requests::{CompletedRequest, LatestRequests}, | 27 | main_loop::pending_requests::{CompletedRequest, LatestRequests}, |
29 | LspError, Result, | 28 | LspError, Result, |
30 | }; | 29 | }; |
@@ -55,6 +54,7 @@ pub struct WorldState { | |||
55 | pub task_receiver: Receiver<VfsTask>, | 54 | pub task_receiver: Receiver<VfsTask>, |
56 | pub latest_requests: Arc<RwLock<LatestRequests>>, | 55 | pub latest_requests: Arc<RwLock<LatestRequests>>, |
57 | pub check_watcher: CheckWatcher, | 56 | pub check_watcher: CheckWatcher, |
57 | pub diagnostics: DiagnosticCollection, | ||
58 | } | 58 | } |
59 | 59 | ||
60 | /// An immutable snapshot of the world's state at a point in time. | 60 | /// An immutable snapshot of the world's state at a point in time. |
@@ -63,7 +63,7 @@ pub struct WorldSnapshot { | |||
63 | pub workspaces: Arc<Vec<ProjectWorkspace>>, | 63 | pub workspaces: Arc<Vec<ProjectWorkspace>>, |
64 | pub analysis: Analysis, | 64 | pub analysis: Analysis, |
65 | pub latest_requests: Arc<RwLock<LatestRequests>>, | 65 | pub latest_requests: Arc<RwLock<LatestRequests>>, |
66 | pub check_watcher: Arc<RwLock<CheckState>>, | 66 | pub check_fixes: CheckFixes, |
67 | vfs: Arc<RwLock<Vfs>>, | 67 | vfs: Arc<RwLock<Vfs>>, |
68 | } | 68 | } |
69 | 69 | ||
@@ -159,6 +159,7 @@ impl WorldState { | |||
159 | task_receiver, | 159 | task_receiver, |
160 | latest_requests: Default::default(), | 160 | latest_requests: Default::default(), |
161 | check_watcher, | 161 | check_watcher, |
162 | diagnostics: Default::default(), | ||
162 | } | 163 | } |
163 | } | 164 | } |
164 | 165 | ||
@@ -220,7 +221,7 @@ impl WorldState { | |||
220 | analysis: self.analysis_host.analysis(), | 221 | analysis: self.analysis_host.analysis(), |
221 | vfs: Arc::clone(&self.vfs), | 222 | vfs: Arc::clone(&self.vfs), |
222 | latest_requests: Arc::clone(&self.latest_requests), | 223 | latest_requests: Arc::clone(&self.latest_requests), |
223 | check_watcher: self.check_watcher.state.clone(), | 224 | check_fixes: Arc::clone(&self.diagnostics.check_fixes), |
224 | } | 225 | } |
225 | } | 226 | } |
226 | 227 | ||
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs index b841c39d3..c9f42b3dd 100644 --- a/crates/ra_mbe/src/subtree_source.rs +++ b/crates/ra_mbe/src/subtree_source.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use ra_parser::{Token, TokenSource}; | 3 | use ra_parser::{Token, TokenSource}; |
4 | use ra_syntax::{classify_literal, SmolStr, SyntaxKind, SyntaxKind::*, T}; | 4 | use ra_syntax::{lex_single_valid_syntax_kind, SmolStr, SyntaxKind, SyntaxKind::*, T}; |
5 | use std::cell::{Cell, Ref, RefCell}; | 5 | use std::cell::{Cell, Ref, RefCell}; |
6 | use tt::buffer::{Cursor, TokenBuffer}; | 6 | use tt::buffer::{Cursor, TokenBuffer}; |
7 | 7 | ||
@@ -129,8 +129,9 @@ fn convert_delim(d: Option<tt::DelimiterKind>, closing: bool) -> TtToken { | |||
129 | } | 129 | } |
130 | 130 | ||
131 | fn convert_literal(l: &tt::Literal) -> TtToken { | 131 | fn convert_literal(l: &tt::Literal) -> TtToken { |
132 | let kind = | 132 | let kind = lex_single_valid_syntax_kind(&l.text) |
133 | classify_literal(&l.text).map(|tkn| tkn.kind).unwrap_or_else(|| match l.text.as_ref() { | 133 | .filter(|kind| kind.is_literal()) |
134 | .unwrap_or_else(|| match l.text.as_ref() { | ||
134 | "true" => T![true], | 135 | "true" => T![true], |
135 | "false" => T![false], | 136 | "false" => T![false], |
136 | _ => panic!("Fail to convert given literal {:#?}", &l), | 137 | _ => panic!("Fail to convert given literal {:#?}", &l), |
diff --git a/crates/ra_parser/src/grammar/expressions/atom.rs b/crates/ra_parser/src/grammar/expressions/atom.rs index 2cc321473..f154077a8 100644 --- a/crates/ra_parser/src/grammar/expressions/atom.rs +++ b/crates/ra_parser/src/grammar/expressions/atom.rs | |||
@@ -229,7 +229,7 @@ fn lambda_expr(p: &mut Parser) -> CompletedMarker { | |||
229 | let m = p.start(); | 229 | let m = p.start(); |
230 | p.eat(T![async]); | 230 | p.eat(T![async]); |
231 | p.eat(T![move]); | 231 | p.eat(T![move]); |
232 | params::param_list_opt_types(p); | 232 | params::param_list_closure(p); |
233 | if opt_fn_ret_type(p) { | 233 | if opt_fn_ret_type(p) { |
234 | if !p.at(T!['{']) { | 234 | if !p.at(T!['{']) { |
235 | p.error("expected `{`"); | 235 | p.error("expected `{`"); |
diff --git a/crates/ra_parser/src/grammar/items.rs b/crates/ra_parser/src/grammar/items.rs index 6e23d9b72..54284c933 100644 --- a/crates/ra_parser/src/grammar/items.rs +++ b/crates/ra_parser/src/grammar/items.rs | |||
@@ -91,13 +91,6 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker, flavor: ItemFlavor) -> Resul | |||
91 | // modifiers | 91 | // modifiers |
92 | has_mods |= p.eat(T![const]); | 92 | has_mods |= p.eat(T![const]); |
93 | 93 | ||
94 | // test_err unsafe_block_in_mod | ||
95 | // fn foo(){} unsafe { } fn bar(){} | ||
96 | if p.at(T![unsafe]) && p.nth(1) != T!['{'] { | ||
97 | p.eat(T![unsafe]); | ||
98 | has_mods = true; | ||
99 | } | ||
100 | |||
101 | // test_err async_without_semicolon | 94 | // test_err async_without_semicolon |
102 | // fn foo() { let _ = async {} } | 95 | // fn foo() { let _ = async {} } |
103 | if p.at(T![async]) && p.nth(1) != T!['{'] && p.nth(1) != T![move] && p.nth(1) != T![|] { | 96 | if p.at(T![async]) && p.nth(1) != T!['{'] && p.nth(1) != T![move] && p.nth(1) != T![|] { |
@@ -105,6 +98,13 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker, flavor: ItemFlavor) -> Resul | |||
105 | has_mods = true; | 98 | has_mods = true; |
106 | } | 99 | } |
107 | 100 | ||
101 | // test_err unsafe_block_in_mod | ||
102 | // fn foo(){} unsafe { } fn bar(){} | ||
103 | if p.at(T![unsafe]) && p.nth(1) != T!['{'] { | ||
104 | p.eat(T![unsafe]); | ||
105 | has_mods = true; | ||
106 | } | ||
107 | |||
108 | if p.at(T![extern]) { | 108 | if p.at(T![extern]) { |
109 | has_mods = true; | 109 | has_mods = true; |
110 | abi(p); | 110 | abi(p); |
@@ -157,14 +157,14 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker, flavor: ItemFlavor) -> Resul | |||
157 | // unsafe fn foo() {} | 157 | // unsafe fn foo() {} |
158 | 158 | ||
159 | // test combined_fns | 159 | // test combined_fns |
160 | // unsafe async fn foo() {} | 160 | // async unsafe fn foo() {} |
161 | // const unsafe fn bar() {} | 161 | // const unsafe fn bar() {} |
162 | 162 | ||
163 | // test_err wrong_order_fns | 163 | // test_err wrong_order_fns |
164 | // async unsafe fn foo() {} | 164 | // unsafe async fn foo() {} |
165 | // unsafe const fn bar() {} | 165 | // unsafe const fn bar() {} |
166 | T![fn] => { | 166 | T![fn] => { |
167 | fn_def(p, flavor); | 167 | fn_def(p); |
168 | m.complete(p, FN_DEF); | 168 | m.complete(p, FN_DEF); |
169 | } | 169 | } |
170 | 170 | ||
@@ -301,7 +301,7 @@ pub(crate) fn extern_item_list(p: &mut Parser) { | |||
301 | m.complete(p, EXTERN_ITEM_LIST); | 301 | m.complete(p, EXTERN_ITEM_LIST); |
302 | } | 302 | } |
303 | 303 | ||
304 | fn fn_def(p: &mut Parser, flavor: ItemFlavor) { | 304 | fn fn_def(p: &mut Parser) { |
305 | assert!(p.at(T![fn])); | 305 | assert!(p.at(T![fn])); |
306 | p.bump(T![fn]); | 306 | p.bump(T![fn]); |
307 | 307 | ||
@@ -311,10 +311,7 @@ fn fn_def(p: &mut Parser, flavor: ItemFlavor) { | |||
311 | type_params::opt_type_param_list(p); | 311 | type_params::opt_type_param_list(p); |
312 | 312 | ||
313 | if p.at(T!['(']) { | 313 | if p.at(T!['(']) { |
314 | match flavor { | 314 | params::param_list_fn_def(p); |
315 | ItemFlavor::Mod => params::param_list(p), | ||
316 | ItemFlavor::Trait => params::param_list_opt_patterns(p), | ||
317 | } | ||
318 | } else { | 315 | } else { |
319 | p.error("expected function arguments"); | 316 | p.error("expected function arguments"); |
320 | } | 317 | } |
diff --git a/crates/ra_parser/src/grammar/params.rs b/crates/ra_parser/src/grammar/params.rs index c10b53316..94edc7f35 100644 --- a/crates/ra_parser/src/grammar/params.rs +++ b/crates/ra_parser/src/grammar/params.rs | |||
@@ -7,54 +7,60 @@ use super::*; | |||
7 | // fn b(x: i32) {} | 7 | // fn b(x: i32) {} |
8 | // fn c(x: i32, ) {} | 8 | // fn c(x: i32, ) {} |
9 | // fn d(x: i32, y: ()) {} | 9 | // fn d(x: i32, y: ()) {} |
10 | pub(super) fn param_list(p: &mut Parser) { | 10 | pub(super) fn param_list_fn_def(p: &mut Parser) { |
11 | list_(p, Flavor::Normal) | 11 | list_(p, Flavor::FnDef) |
12 | } | 12 | } |
13 | 13 | ||
14 | // test param_list_opt_patterns | 14 | // test param_list_opt_patterns |
15 | // fn foo<F: FnMut(&mut Foo<'a>)>(){} | 15 | // fn foo<F: FnMut(&mut Foo<'a>)>(){} |
16 | pub(super) fn param_list_opt_patterns(p: &mut Parser) { | 16 | pub(super) fn param_list_fn_trait(p: &mut Parser) { |
17 | list_(p, Flavor::OptionalPattern) | 17 | list_(p, Flavor::FnTrait) |
18 | } | 18 | } |
19 | 19 | ||
20 | pub(super) fn param_list_opt_types(p: &mut Parser) { | 20 | pub(super) fn param_list_fn_ptr(p: &mut Parser) { |
21 | list_(p, Flavor::OptionalType) | 21 | list_(p, Flavor::FnPointer) |
22 | } | 22 | } |
23 | 23 | ||
24 | #[derive(Clone, Copy, Eq, PartialEq)] | 24 | pub(super) fn param_list_closure(p: &mut Parser) { |
25 | enum Flavor { | 25 | list_(p, Flavor::Closure) |
26 | OptionalType, | ||
27 | OptionalPattern, | ||
28 | Normal, | ||
29 | } | 26 | } |
30 | 27 | ||
31 | impl Flavor { | 28 | #[derive(Debug, Clone, Copy)] |
32 | fn type_required(self) -> bool { | 29 | enum Flavor { |
33 | match self { | 30 | FnDef, // Includes trait fn params; omitted param idents are not supported |
34 | Flavor::OptionalType => false, | 31 | FnTrait, // Params for `Fn(...)`/`FnMut(...)`/`FnOnce(...)` annotations |
35 | _ => true, | 32 | FnPointer, |
36 | } | 33 | Closure, |
37 | } | ||
38 | } | 34 | } |
39 | 35 | ||
40 | fn list_(p: &mut Parser, flavor: Flavor) { | 36 | fn list_(p: &mut Parser, flavor: Flavor) { |
41 | let (bra, ket) = if flavor.type_required() { (T!['('], T![')']) } else { (T![|], T![|]) }; | 37 | use Flavor::*; |
42 | assert!(p.at(bra)); | 38 | |
39 | let (bra, ket) = match flavor { | ||
40 | Closure => (T![|], T![|]), | ||
41 | FnDef | FnTrait | FnPointer => (T!['('], T![')']), | ||
42 | }; | ||
43 | |||
43 | let m = p.start(); | 44 | let m = p.start(); |
44 | p.bump(bra); | 45 | p.bump(bra); |
45 | if flavor.type_required() { | 46 | |
47 | if let FnDef = flavor { | ||
46 | // test self_param_outer_attr | 48 | // test self_param_outer_attr |
47 | // fn f(#[must_use] self) {} | 49 | // fn f(#[must_use] self) {} |
48 | attributes::outer_attributes(p); | 50 | attributes::outer_attributes(p); |
49 | opt_self_param(p); | 51 | opt_self_param(p); |
50 | } | 52 | } |
53 | |||
51 | while !p.at(EOF) && !p.at(ket) { | 54 | while !p.at(EOF) && !p.at(ket) { |
52 | // test param_outer_arg | 55 | // test param_outer_arg |
53 | // fn f(#[attr1] pat: Type) {} | 56 | // fn f(#[attr1] pat: Type) {} |
54 | attributes::outer_attributes(p); | 57 | attributes::outer_attributes(p); |
55 | 58 | ||
56 | if flavor.type_required() && p.at(T![...]) { | 59 | // test param_list_vararg |
57 | break; | 60 | // extern "C" { fn printf(format: *const i8, ...) -> i32; } |
61 | match flavor { | ||
62 | FnDef | FnPointer if p.eat(T![...]) => break, | ||
63 | _ => (), | ||
58 | } | 64 | } |
59 | 65 | ||
60 | if !p.at_ts(VALUE_PARAMETER_FIRST) { | 66 | if !p.at_ts(VALUE_PARAMETER_FIRST) { |
@@ -66,11 +72,7 @@ fn list_(p: &mut Parser, flavor: Flavor) { | |||
66 | p.expect(T![,]); | 72 | p.expect(T![,]); |
67 | } | 73 | } |
68 | } | 74 | } |
69 | // test param_list_vararg | 75 | |
70 | // extern "C" { fn printf(format: *const i8, ...) -> i32; } | ||
71 | if flavor.type_required() { | ||
72 | p.eat(T![...]); | ||
73 | } | ||
74 | p.expect(ket); | 76 | p.expect(ket); |
75 | m.complete(p, PARAM_LIST); | 77 | m.complete(p, PARAM_LIST); |
76 | } | 78 | } |
@@ -80,36 +82,56 @@ const VALUE_PARAMETER_FIRST: TokenSet = patterns::PATTERN_FIRST.union(types::TYP | |||
80 | fn value_parameter(p: &mut Parser, flavor: Flavor) { | 82 | fn value_parameter(p: &mut Parser, flavor: Flavor) { |
81 | let m = p.start(); | 83 | let m = p.start(); |
82 | match flavor { | 84 | match flavor { |
83 | Flavor::OptionalType | Flavor::Normal => { | 85 | // test trait_fn_placeholder_parameter |
86 | // trait Foo { | ||
87 | // fn bar(_: u64, mut x: i32); | ||
88 | // } | ||
89 | |||
90 | // test trait_fn_patterns | ||
91 | // trait T { | ||
92 | // fn f1((a, b): (usize, usize)) {} | ||
93 | // fn f2(S { a, b }: S) {} | ||
94 | // fn f3(NewType(a): NewType) {} | ||
95 | // fn f4(&&a: &&usize) {} | ||
96 | // } | ||
97 | |||
98 | // test fn_patterns | ||
99 | // impl U { | ||
100 | // fn f1((a, b): (usize, usize)) {} | ||
101 | // fn f2(S { a, b }: S) {} | ||
102 | // fn f3(NewType(a): NewType) {} | ||
103 | // fn f4(&&a: &&usize) {} | ||
104 | // } | ||
105 | Flavor::FnDef => { | ||
84 | patterns::pattern(p); | 106 | patterns::pattern(p); |
85 | if p.at(T![:]) && !p.at(T![::]) || flavor.type_required() { | 107 | types::ascription(p); |
86 | types::ascription(p) | ||
87 | } | ||
88 | } | 108 | } |
89 | // test value_parameters_no_patterns | 109 | // test value_parameters_no_patterns |
90 | // type F = Box<Fn(a: i32, &b: &i32, &mut c: &i32, ())>; | 110 | // type F = Box<Fn(i32, &i32, &i32, ())>; |
91 | Flavor::OptionalPattern => { | 111 | Flavor::FnTrait => { |
92 | let la0 = p.current(); | 112 | types::type_(p); |
93 | let la1 = p.nth(1); | 113 | } |
94 | let la2 = p.nth(2); | 114 | // test fn_pointer_param_ident_path |
95 | let la3 = p.nth(3); | 115 | // type Foo = fn(Bar::Baz); |
96 | 116 | // type Qux = fn(baz: Bar::Baz); | |
97 | // test trait_fn_placeholder_parameter | 117 | Flavor::FnPointer => { |
98 | // trait Foo { | 118 | if p.at(IDENT) && p.nth(1) == T![:] && !p.nth_at(1, T![::]) { |
99 | // fn bar(_: u64, mut x: i32); | ||
100 | // } | ||
101 | if (la0 == IDENT || la0 == T![_]) && la1 == T![:] && !p.nth_at(1, T![::]) | ||
102 | || la0 == T![mut] && la1 == IDENT && la2 == T![:] | ||
103 | || la0 == T![&] | ||
104 | && (la1 == IDENT && la2 == T![:] && !p.nth_at(2, T![::]) | ||
105 | || la1 == T![mut] && la2 == IDENT && la3 == T![:] && !p.nth_at(3, T![::])) | ||
106 | { | ||
107 | patterns::pattern(p); | 119 | patterns::pattern(p); |
108 | types::ascription(p); | 120 | types::ascription(p); |
109 | } else { | 121 | } else { |
110 | types::type_(p); | 122 | types::type_(p); |
111 | } | 123 | } |
112 | } | 124 | } |
125 | // test closure_params | ||
126 | // fn main() { | ||
127 | // let foo = |bar, baz: Baz, qux: Qux::Quux| (); | ||
128 | // } | ||
129 | Flavor::Closure => { | ||
130 | patterns::pattern(p); | ||
131 | if p.at(T![:]) && !p.at(T![::]) { | ||
132 | types::ascription(p); | ||
133 | } | ||
134 | } | ||
113 | } | 135 | } |
114 | m.complete(p, PARAM); | 136 | m.complete(p, PARAM); |
115 | } | 137 | } |
diff --git a/crates/ra_parser/src/grammar/paths.rs b/crates/ra_parser/src/grammar/paths.rs index ca8e075a1..f5bf3d7ce 100644 --- a/crates/ra_parser/src/grammar/paths.rs +++ b/crates/ra_parser/src/grammar/paths.rs | |||
@@ -97,9 +97,9 @@ fn opt_path_type_args(p: &mut Parser, mode: Mode) { | |||
97 | Mode::Use => return, | 97 | Mode::Use => return, |
98 | Mode::Type => { | 98 | Mode::Type => { |
99 | // test path_fn_trait_args | 99 | // test path_fn_trait_args |
100 | // type F = Box<Fn(x: i32) -> ()>; | 100 | // type F = Box<Fn(i32) -> ()>; |
101 | if p.at(T!['(']) { | 101 | if p.at(T!['(']) { |
102 | params::param_list_opt_patterns(p); | 102 | params::param_list_fn_trait(p); |
103 | opt_fn_ret_type(p); | 103 | opt_fn_ret_type(p); |
104 | } else { | 104 | } else { |
105 | type_args::opt_type_arg_list(p, false) | 105 | type_args::opt_type_arg_list(p, false) |
diff --git a/crates/ra_parser/src/grammar/types.rs b/crates/ra_parser/src/grammar/types.rs index 9b2e440fb..2c00bce80 100644 --- a/crates/ra_parser/src/grammar/types.rs +++ b/crates/ra_parser/src/grammar/types.rs | |||
@@ -183,7 +183,7 @@ fn fn_pointer_type(p: &mut Parser) { | |||
183 | return; | 183 | return; |
184 | } | 184 | } |
185 | if p.at(T!['(']) { | 185 | if p.at(T!['(']) { |
186 | params::param_list_opt_patterns(p); | 186 | params::param_list_fn_ptr(p); |
187 | } else { | 187 | } else { |
188 | p.error("expected parameters") | 188 | p.error("expected parameters") |
189 | } | 189 | } |
diff --git a/crates/ra_parser/src/syntax_kind/generated.rs b/crates/ra_parser/src/syntax_kind/generated.rs index 4b301d67a..e27b27ffa 100644 --- a/crates/ra_parser/src/syntax_kind/generated.rs +++ b/crates/ra_parser/src/syntax_kind/generated.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | //! Generated file, do not edit by hand, see `crate/ra_tools/src/codegen` | 1 | //! Generated file, do not edit by hand, see `xtask/src/codegen` |
2 | 2 | ||
3 | #![allow(bad_style, missing_docs, unreachable_pub)] | 3 | #![allow(bad_style, missing_docs, unreachable_pub)] |
4 | #[doc = r" The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT_DEF`."] | 4 | #[doc = r" The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT_DEF`."] |
diff --git a/crates/ra_prof/src/lib.rs b/crates/ra_prof/src/lib.rs index da541005a..d38ff397e 100644 --- a/crates/ra_prof/src/lib.rs +++ b/crates/ra_prof/src/lib.rs | |||
@@ -6,10 +6,9 @@ mod google_cpu_profiler; | |||
6 | 6 | ||
7 | use std::{ | 7 | use std::{ |
8 | cell::RefCell, | 8 | cell::RefCell, |
9 | collections::BTreeMap, | ||
9 | collections::HashSet, | 10 | collections::HashSet, |
10 | io::{stderr, Write}, | 11 | io::{stderr, Write}, |
11 | iter::repeat, | ||
12 | mem, | ||
13 | sync::{ | 12 | sync::{ |
14 | atomic::{AtomicBool, Ordering}, | 13 | atomic::{AtomicBool, Ordering}, |
15 | RwLock, | 14 | RwLock, |
@@ -17,7 +16,6 @@ use std::{ | |||
17 | time::{Duration, Instant}, | 16 | time::{Duration, Instant}, |
18 | }; | 17 | }; |
19 | 18 | ||
20 | use itertools::Itertools; | ||
21 | use once_cell::sync::Lazy; | 19 | use once_cell::sync::Lazy; |
22 | 20 | ||
23 | pub use crate::memory_usage::{Bytes, MemoryUsage}; | 21 | pub use crate::memory_usage::{Bytes, MemoryUsage}; |
@@ -51,6 +49,8 @@ pub fn set_filter(f: Filter) { | |||
51 | *old = filter_data; | 49 | *old = filter_data; |
52 | } | 50 | } |
53 | 51 | ||
52 | pub type Label = &'static str; | ||
53 | |||
54 | /// This function starts a profiling scope in the current execution stack with a given description. | 54 | /// This function starts a profiling scope in the current execution stack with a given description. |
55 | /// It returns a Profile structure and measure elapsed time between this method invocation and Profile structure drop. | 55 | /// It returns a Profile structure and measure elapsed time between this method invocation and Profile structure drop. |
56 | /// It supports nested profiling scopes in case when this function invoked multiple times at the execution stack. In this case the profiling information will be nested at the output. | 56 | /// It supports nested profiling scopes in case when this function invoked multiple times at the execution stack. In this case the profiling information will be nested at the output. |
@@ -78,10 +78,10 @@ pub fn set_filter(f: Filter) { | |||
78 | /// 0ms - profile | 78 | /// 0ms - profile |
79 | /// 0ms - profile2 | 79 | /// 0ms - profile2 |
80 | /// ``` | 80 | /// ``` |
81 | pub fn profile(desc: &str) -> Profiler { | 81 | pub fn profile(label: Label) -> Profiler { |
82 | assert!(!desc.is_empty()); | 82 | assert!(!label.is_empty()); |
83 | if !PROFILING_ENABLED.load(Ordering::Relaxed) { | 83 | if !PROFILING_ENABLED.load(Ordering::Relaxed) { |
84 | return Profiler { desc: None }; | 84 | return Profiler { label: None }; |
85 | } | 85 | } |
86 | 86 | ||
87 | PROFILE_STACK.with(|stack| { | 87 | PROFILE_STACK.with(|stack| { |
@@ -94,20 +94,35 @@ pub fn profile(desc: &str) -> Profiler { | |||
94 | }; | 94 | }; |
95 | } | 95 | } |
96 | if stack.starts.len() > stack.filter_data.depth { | 96 | if stack.starts.len() > stack.filter_data.depth { |
97 | return Profiler { desc: None }; | 97 | return Profiler { label: None }; |
98 | } | 98 | } |
99 | let allowed = &stack.filter_data.allowed; | 99 | let allowed = &stack.filter_data.allowed; |
100 | if stack.starts.is_empty() && !allowed.is_empty() && !allowed.contains(desc) { | 100 | if stack.starts.is_empty() && !allowed.is_empty() && !allowed.contains(label) { |
101 | return Profiler { desc: None }; | 101 | return Profiler { label: None }; |
102 | } | 102 | } |
103 | 103 | ||
104 | stack.starts.push(Instant::now()); | 104 | stack.starts.push(Instant::now()); |
105 | Profiler { desc: Some(desc.to_string()) } | 105 | Profiler { label: Some(label) } |
106 | }) | 106 | }) |
107 | } | 107 | } |
108 | 108 | ||
109 | pub fn print_time(label: Label) -> impl Drop { | ||
110 | struct Guard { | ||
111 | label: Label, | ||
112 | start: Instant, | ||
113 | } | ||
114 | |||
115 | impl Drop for Guard { | ||
116 | fn drop(&mut self) { | ||
117 | eprintln!("{}: {:?}", self.label, self.start.elapsed()) | ||
118 | } | ||
119 | } | ||
120 | |||
121 | Guard { label, start: Instant::now() } | ||
122 | } | ||
123 | |||
109 | pub struct Profiler { | 124 | pub struct Profiler { |
110 | desc: Option<String>, | 125 | label: Option<Label>, |
111 | } | 126 | } |
112 | 127 | ||
113 | pub struct Filter { | 128 | pub struct Filter { |
@@ -160,7 +175,7 @@ struct ProfileStack { | |||
160 | struct Message { | 175 | struct Message { |
161 | level: usize, | 176 | level: usize, |
162 | duration: Duration, | 177 | duration: Duration, |
163 | message: String, | 178 | label: Label, |
164 | } | 179 | } |
165 | 180 | ||
166 | impl ProfileStack { | 181 | impl ProfileStack { |
@@ -186,14 +201,13 @@ thread_local!(static PROFILE_STACK: RefCell<ProfileStack> = RefCell::new(Profile | |||
186 | impl Drop for Profiler { | 201 | impl Drop for Profiler { |
187 | fn drop(&mut self) { | 202 | fn drop(&mut self) { |
188 | match self { | 203 | match self { |
189 | Profiler { desc: Some(desc) } => { | 204 | Profiler { label: Some(label) } => { |
190 | PROFILE_STACK.with(|stack| { | 205 | PROFILE_STACK.with(|stack| { |
191 | let mut stack = stack.borrow_mut(); | 206 | let mut stack = stack.borrow_mut(); |
192 | let start = stack.starts.pop().unwrap(); | 207 | let start = stack.starts.pop().unwrap(); |
193 | let duration = start.elapsed(); | 208 | let duration = start.elapsed(); |
194 | let level = stack.starts.len(); | 209 | let level = stack.starts.len(); |
195 | let message = mem::replace(desc, String::new()); | 210 | stack.messages.push(Message { level, duration, label: label }); |
196 | stack.messages.push(Message { level, duration, message }); | ||
197 | if level == 0 { | 211 | if level == 0 { |
198 | let stdout = stderr(); | 212 | let stdout = stderr(); |
199 | let longer_than = stack.filter_data.longer_than; | 213 | let longer_than = stack.filter_data.longer_than; |
@@ -201,70 +215,96 @@ impl Drop for Profiler { | |||
201 | // (otherwise we could print `0ms` despite user's `>0` filter when | 215 | // (otherwise we could print `0ms` despite user's `>0` filter when |
202 | // `duration` is just a few nanos). | 216 | // `duration` is just a few nanos). |
203 | if duration.as_millis() > longer_than.as_millis() { | 217 | if duration.as_millis() > longer_than.as_millis() { |
204 | print(0, &stack.messages, &mut stdout.lock(), longer_than, None); | 218 | print(&stack.messages, longer_than, &mut stdout.lock()); |
205 | } | 219 | } |
206 | stack.messages.clear(); | 220 | stack.messages.clear(); |
207 | } | 221 | } |
208 | }); | 222 | }); |
209 | } | 223 | } |
210 | Profiler { desc: None } => (), | 224 | Profiler { label: None } => (), |
211 | } | 225 | } |
212 | } | 226 | } |
213 | } | 227 | } |
214 | 228 | ||
215 | fn print( | 229 | fn print(msgs: &[Message], longer_than: Duration, out: &mut impl Write) { |
216 | lvl: usize, | ||
217 | msgs: &[Message], | ||
218 | out: &mut impl Write, | ||
219 | longer_than: Duration, | ||
220 | total: Option<Duration>, | ||
221 | ) { | ||
222 | if msgs.is_empty() { | 230 | if msgs.is_empty() { |
223 | return; | 231 | return; |
224 | } | 232 | } |
225 | // The index of the first element that will be included in the slice when we recurse. | 233 | let children_map = idx_to_children(msgs); |
226 | let mut next_start = 0; | 234 | let root_idx = msgs.len() - 1; |
227 | let indent = repeat(" ").take(lvl).collect::<String>(); | 235 | print_for_idx(root_idx, &children_map, msgs, longer_than, out); |
228 | // We output hierarchy for long calls, but sum up all short calls | 236 | } |
229 | let mut short = Vec::new(); | 237 | |
238 | fn print_for_idx( | ||
239 | current_idx: usize, | ||
240 | children_map: &[Vec<usize>], | ||
241 | msgs: &[Message], | ||
242 | longer_than: Duration, | ||
243 | out: &mut impl Write, | ||
244 | ) { | ||
245 | let current = &msgs[current_idx]; | ||
246 | let current_indent = " ".repeat(current.level); | ||
247 | writeln!(out, "{}{:5}ms - {}", current_indent, current.duration.as_millis(), current.label) | ||
248 | .expect("printing profiling info"); | ||
249 | |||
250 | let longer_than_millis = longer_than.as_millis(); | ||
251 | let children_indices = &children_map[current_idx]; | ||
230 | let mut accounted_for = Duration::default(); | 252 | let mut accounted_for = Duration::default(); |
231 | for (i, &Message { level, duration, message: ref msg }) in msgs.iter().enumerate() { | 253 | let mut short_children = BTreeMap::new(); // Use `BTreeMap` to get deterministic output. |
232 | if level != lvl { | ||
233 | continue; | ||
234 | } | ||
235 | accounted_for += duration; | ||
236 | if duration.as_millis() > longer_than.as_millis() { | ||
237 | writeln!(out, "{}{:5}ms - {}", indent, duration.as_millis(), msg) | ||
238 | .expect("printing profiling info to stdout"); | ||
239 | 254 | ||
240 | print(lvl + 1, &msgs[next_start..i], out, longer_than, Some(duration)); | 255 | for child_idx in children_indices.iter() { |
256 | let child = &msgs[*child_idx]; | ||
257 | if child.duration.as_millis() > longer_than_millis { | ||
258 | print_for_idx(*child_idx, children_map, msgs, longer_than, out); | ||
241 | } else { | 259 | } else { |
242 | short.push((msg, duration)) | 260 | let pair = short_children.entry(child.label).or_insert((Duration::default(), 0)); |
261 | pair.0 += child.duration; | ||
262 | pair.1 += 1; | ||
243 | } | 263 | } |
264 | accounted_for += child.duration; | ||
265 | } | ||
244 | 266 | ||
245 | next_start = i + 1; | 267 | for (child_msg, (duration, count)) in short_children.iter() { |
268 | let millis = duration.as_millis(); | ||
269 | writeln!(out, " {}{:5}ms - {} ({} calls)", current_indent, millis, child_msg, count) | ||
270 | .expect("printing profiling info"); | ||
246 | } | 271 | } |
247 | short.sort_by_key(|(msg, _time)| *msg); | 272 | |
248 | for (msg, entires) in short.iter().group_by(|(msg, _time)| msg).into_iter() { | 273 | let unaccounted_millis = (current.duration - accounted_for).as_millis(); |
249 | let mut count = 0; | 274 | if !children_indices.is_empty() |
250 | let mut total_duration = Duration::default(); | 275 | && unaccounted_millis > 0 |
251 | entires.for_each(|(_msg, time)| { | 276 | && unaccounted_millis > longer_than_millis |
252 | count += 1; | 277 | { |
253 | total_duration += *time; | 278 | writeln!(out, " {}{:5}ms - ???", current_indent, unaccounted_millis) |
254 | }); | 279 | .expect("printing profiling info"); |
255 | writeln!(out, "{}{:5}ms - {} ({} calls)", indent, total_duration.as_millis(), msg, count) | ||
256 | .expect("printing profiling info to stdout"); | ||
257 | } | 280 | } |
281 | } | ||
258 | 282 | ||
259 | if let Some(total) = total { | 283 | /// Returns a mapping from an index in the `msgs` to the vector with the indices of its children. |
260 | if let Some(unaccounted) = total.checked_sub(accounted_for) { | 284 | /// |
261 | let unaccounted_millis = unaccounted.as_millis(); | 285 | /// This assumes that the entries in `msgs` are in the order of when the calls to `profile` finish. |
262 | if unaccounted_millis > longer_than.as_millis() && unaccounted_millis > 0 { | 286 | /// In other words, a postorder of the call graph. In particular, the root is the last element of |
263 | writeln!(out, "{}{:5}ms - ???", indent, unaccounted_millis) | 287 | /// `msgs`. |
264 | .expect("printing profiling info to stdout"); | 288 | fn idx_to_children(msgs: &[Message]) -> Vec<Vec<usize>> { |
265 | } | 289 | // Initialize with the index of the root; `msgs` and `ancestors` should be never empty. |
290 | assert!(!msgs.is_empty()); | ||
291 | let mut ancestors = vec![msgs.len() - 1]; | ||
292 | let mut result: Vec<Vec<usize>> = vec![vec![]; msgs.len()]; | ||
293 | for (idx, msg) in msgs[..msgs.len() - 1].iter().enumerate().rev() { | ||
294 | // We need to find the parent of the current message, i.e., the last ancestor that has a | ||
295 | // level lower than the current message. | ||
296 | while msgs[*ancestors.last().unwrap()].level >= msg.level { | ||
297 | ancestors.pop(); | ||
266 | } | 298 | } |
299 | result[*ancestors.last().unwrap()].push(idx); | ||
300 | ancestors.push(idx); | ||
301 | } | ||
302 | // Note that above we visited all children from the last to the first one. Let's reverse vectors | ||
303 | // to get the more natural order where the first element is the first child. | ||
304 | for vec in result.iter_mut() { | ||
305 | vec.reverse(); | ||
267 | } | 306 | } |
307 | result | ||
268 | } | 308 | } |
269 | 309 | ||
270 | /// Prints backtrace to stderr, useful for debugging. | 310 | /// Prints backtrace to stderr, useful for debugging. |
@@ -369,11 +409,11 @@ mod tests { | |||
369 | fn test_longer_than() { | 409 | fn test_longer_than() { |
370 | let mut result = vec![]; | 410 | let mut result = vec![]; |
371 | let msgs = vec![ | 411 | let msgs = vec![ |
372 | Message { level: 1, duration: Duration::from_nanos(3), message: "bar".to_owned() }, | 412 | Message { level: 1, duration: Duration::from_nanos(3), label: "bar" }, |
373 | Message { level: 1, duration: Duration::from_nanos(2), message: "bar".to_owned() }, | 413 | Message { level: 1, duration: Duration::from_nanos(2), label: "bar" }, |
374 | Message { level: 0, duration: Duration::from_millis(1), message: "foo".to_owned() }, | 414 | Message { level: 0, duration: Duration::from_millis(1), label: "foo" }, |
375 | ]; | 415 | ]; |
376 | print(0, &msgs, &mut result, Duration::from_millis(0), Some(Duration::from_millis(1))); | 416 | print(&msgs, Duration::from_millis(0), &mut result); |
377 | // The calls to `bar` are so short that they'll be rounded to 0ms and should get collapsed | 417 | // The calls to `bar` are so short that they'll be rounded to 0ms and should get collapsed |
378 | // when printing. | 418 | // when printing. |
379 | assert_eq!( | 419 | assert_eq!( |
@@ -386,10 +426,10 @@ mod tests { | |||
386 | fn test_unaccounted_for_topmost() { | 426 | fn test_unaccounted_for_topmost() { |
387 | let mut result = vec![]; | 427 | let mut result = vec![]; |
388 | let msgs = vec![ | 428 | let msgs = vec![ |
389 | Message { level: 1, duration: Duration::from_millis(2), message: "bar".to_owned() }, | 429 | Message { level: 1, duration: Duration::from_millis(2), label: "bar" }, |
390 | Message { level: 0, duration: Duration::from_millis(5), message: "foo".to_owned() }, | 430 | Message { level: 0, duration: Duration::from_millis(5), label: "foo" }, |
391 | ]; | 431 | ]; |
392 | print(0, &msgs, &mut result, Duration::from_millis(0), Some(Duration::from_millis(1))); | 432 | print(&msgs, Duration::from_millis(0), &mut result); |
393 | assert_eq!( | 433 | assert_eq!( |
394 | std::str::from_utf8(&result).unwrap().lines().collect::<Vec<_>>(), | 434 | std::str::from_utf8(&result).unwrap().lines().collect::<Vec<_>>(), |
395 | vec![ | 435 | vec![ |
@@ -405,13 +445,13 @@ mod tests { | |||
405 | fn test_unaccounted_for_multiple_levels() { | 445 | fn test_unaccounted_for_multiple_levels() { |
406 | let mut result = vec![]; | 446 | let mut result = vec![]; |
407 | let msgs = vec![ | 447 | let msgs = vec![ |
408 | Message { level: 2, duration: Duration::from_millis(3), message: "baz".to_owned() }, | 448 | Message { level: 2, duration: Duration::from_millis(3), label: "baz" }, |
409 | Message { level: 1, duration: Duration::from_millis(5), message: "bar".to_owned() }, | 449 | Message { level: 1, duration: Duration::from_millis(5), label: "bar" }, |
410 | Message { level: 2, duration: Duration::from_millis(2), message: "baz".to_owned() }, | 450 | Message { level: 2, duration: Duration::from_millis(2), label: "baz" }, |
411 | Message { level: 1, duration: Duration::from_millis(4), message: "bar".to_owned() }, | 451 | Message { level: 1, duration: Duration::from_millis(4), label: "bar" }, |
412 | Message { level: 0, duration: Duration::from_millis(9), message: "foo".to_owned() }, | 452 | Message { level: 0, duration: Duration::from_millis(9), label: "foo" }, |
413 | ]; | 453 | ]; |
414 | print(0, &msgs, &mut result, Duration::from_millis(0), None); | 454 | print(&msgs, Duration::from_millis(0), &mut result); |
415 | assert_eq!( | 455 | assert_eq!( |
416 | std::str::from_utf8(&result).unwrap().lines().collect::<Vec<_>>(), | 456 | std::str::from_utf8(&result).unwrap().lines().collect::<Vec<_>>(), |
417 | vec![ | 457 | vec![ |
diff --git a/crates/ra_project_model/src/cargo_workspace.rs b/crates/ra_project_model/src/cargo_workspace.rs index 1832c101f..60cb8c1eb 100644 --- a/crates/ra_project_model/src/cargo_workspace.rs +++ b/crates/ra_project_model/src/cargo_workspace.rs | |||
@@ -207,9 +207,28 @@ impl CargoWorkspace { | |||
207 | } | 207 | } |
208 | let resolve = meta.resolve.expect("metadata executed with deps"); | 208 | let resolve = meta.resolve.expect("metadata executed with deps"); |
209 | for node in resolve.nodes { | 209 | for node in resolve.nodes { |
210 | let source = pkg_by_id[&node.id]; | 210 | let source = match pkg_by_id.get(&node.id) { |
211 | Some(&src) => src, | ||
212 | // FIXME: replace this and a similar branch below with `.unwrap`, once | ||
213 | // https://github.com/rust-lang/cargo/issues/7841 | ||
214 | // is fixed and hits stable (around 1.43-is probably?). | ||
215 | None => { | ||
216 | log::error!("Node id do not match in cargo metadata, ignoring {}", node.id); | ||
217 | continue; | ||
218 | } | ||
219 | }; | ||
211 | for dep_node in node.deps { | 220 | for dep_node in node.deps { |
212 | let dep = PackageDependency { name: dep_node.name, pkg: pkg_by_id[&dep_node.pkg] }; | 221 | let pkg = match pkg_by_id.get(&dep_node.pkg) { |
222 | Some(&pkg) => pkg, | ||
223 | None => { | ||
224 | log::error!( | ||
225 | "Dep node id do not match in cargo metadata, ignoring {}", | ||
226 | dep_node.pkg | ||
227 | ); | ||
228 | continue; | ||
229 | } | ||
230 | }; | ||
231 | let dep = PackageDependency { name: dep_node.name, pkg }; | ||
213 | packages[source].dependencies.push(dep); | 232 | packages[source].dependencies.push(dep); |
214 | } | 233 | } |
215 | packages[source].features.extend(node.features); | 234 | packages[source].features.extend(node.features); |
diff --git a/crates/ra_project_model/src/lib.rs b/crates/ra_project_model/src/lib.rs index 6a104e6f2..bc1d15406 100644 --- a/crates/ra_project_model/src/lib.rs +++ b/crates/ra_project_model/src/lib.rs | |||
@@ -13,7 +13,7 @@ use std::{ | |||
13 | }; | 13 | }; |
14 | 14 | ||
15 | use ra_cfg::CfgOptions; | 15 | use ra_cfg::CfgOptions; |
16 | use ra_db::{CrateGraph, CrateId, Edition, Env, FileId}; | 16 | use ra_db::{CrateGraph, CrateId, CrateName, Edition, Env, FileId}; |
17 | use rustc_hash::FxHashMap; | 17 | use rustc_hash::FxHashMap; |
18 | use serde_json::from_reader; | 18 | use serde_json::from_reader; |
19 | 19 | ||
@@ -177,7 +177,9 @@ impl ProjectWorkspace { | |||
177 | if let (Some(&from), Some(&to)) = | 177 | if let (Some(&from), Some(&to)) = |
178 | (crates.get(&from_crate_id), crates.get(&to_crate_id)) | 178 | (crates.get(&from_crate_id), crates.get(&to_crate_id)) |
179 | { | 179 | { |
180 | if let Err(_) = crate_graph.add_dep(from, dep.name.clone().into(), to) { | 180 | if let Err(_) = |
181 | crate_graph.add_dep(from, CrateName::new(&dep.name).unwrap(), to) | ||
182 | { | ||
181 | log::error!( | 183 | log::error!( |
182 | "cyclic dependency {:?} -> {:?}", | 184 | "cyclic dependency {:?} -> {:?}", |
183 | from_crate_id, | 185 | from_crate_id, |
@@ -215,7 +217,9 @@ impl ProjectWorkspace { | |||
215 | if let (Some(&from), Some(&to)) = | 217 | if let (Some(&from), Some(&to)) = |
216 | (sysroot_crates.get(&from), sysroot_crates.get(&to)) | 218 | (sysroot_crates.get(&from), sysroot_crates.get(&to)) |
217 | { | 219 | { |
218 | if let Err(_) = crate_graph.add_dep(from, name.into(), to) { | 220 | if let Err(_) = |
221 | crate_graph.add_dep(from, CrateName::new(name).unwrap(), to) | ||
222 | { | ||
219 | log::error!("cyclic dependency between sysroot crates") | 223 | log::error!("cyclic dependency between sysroot crates") |
220 | } | 224 | } |
221 | } | 225 | } |
@@ -257,7 +261,7 @@ impl ProjectWorkspace { | |||
257 | if let Some(proc_macro) = libproc_macro { | 261 | if let Some(proc_macro) = libproc_macro { |
258 | if let Err(_) = crate_graph.add_dep( | 262 | if let Err(_) = crate_graph.add_dep( |
259 | crate_id, | 263 | crate_id, |
260 | "proc_macro".into(), | 264 | CrateName::new("proc_macro").unwrap(), |
261 | proc_macro, | 265 | proc_macro, |
262 | ) { | 266 | ) { |
263 | log::error!( | 267 | log::error!( |
@@ -276,9 +280,14 @@ impl ProjectWorkspace { | |||
276 | for &from in pkg_crates.get(&pkg).into_iter().flatten() { | 280 | for &from in pkg_crates.get(&pkg).into_iter().flatten() { |
277 | if let Some(to) = lib_tgt { | 281 | if let Some(to) = lib_tgt { |
278 | if to != from { | 282 | if to != from { |
279 | if let Err(_) = | 283 | if let Err(_) = crate_graph.add_dep( |
280 | crate_graph.add_dep(from, pkg.name(&cargo).into(), to) | 284 | from, |
281 | { | 285 | // For root projects with dashes in their name, |
286 | // cargo metadata does not do any normalization, | ||
287 | // so we do it ourselves currently | ||
288 | CrateName::normalize_dashes(pkg.name(&cargo)), | ||
289 | to, | ||
290 | ) { | ||
282 | log::error!( | 291 | log::error!( |
283 | "cyclic dependency between targets of {}", | 292 | "cyclic dependency between targets of {}", |
284 | pkg.name(&cargo) | 293 | pkg.name(&cargo) |
@@ -289,17 +298,23 @@ impl ProjectWorkspace { | |||
289 | // core is added as a dependency before std in order to | 298 | // core is added as a dependency before std in order to |
290 | // mimic rustcs dependency order | 299 | // mimic rustcs dependency order |
291 | if let Some(core) = libcore { | 300 | if let Some(core) = libcore { |
292 | if let Err(_) = crate_graph.add_dep(from, "core".into(), core) { | 301 | if let Err(_) = |
302 | crate_graph.add_dep(from, CrateName::new("core").unwrap(), core) | ||
303 | { | ||
293 | log::error!("cyclic dependency on core for {}", pkg.name(&cargo)) | 304 | log::error!("cyclic dependency on core for {}", pkg.name(&cargo)) |
294 | } | 305 | } |
295 | } | 306 | } |
296 | if let Some(alloc) = liballoc { | 307 | if let Some(alloc) = liballoc { |
297 | if let Err(_) = crate_graph.add_dep(from, "alloc".into(), alloc) { | 308 | if let Err(_) = |
309 | crate_graph.add_dep(from, CrateName::new("alloc").unwrap(), alloc) | ||
310 | { | ||
298 | log::error!("cyclic dependency on alloc for {}", pkg.name(&cargo)) | 311 | log::error!("cyclic dependency on alloc for {}", pkg.name(&cargo)) |
299 | } | 312 | } |
300 | } | 313 | } |
301 | if let Some(std) = libstd { | 314 | if let Some(std) = libstd { |
302 | if let Err(_) = crate_graph.add_dep(from, "std".into(), std) { | 315 | if let Err(_) = |
316 | crate_graph.add_dep(from, CrateName::new("std").unwrap(), std) | ||
317 | { | ||
303 | log::error!("cyclic dependency on std for {}", pkg.name(&cargo)) | 318 | log::error!("cyclic dependency on std for {}", pkg.name(&cargo)) |
304 | } | 319 | } |
305 | } | 320 | } |
@@ -312,9 +327,11 @@ impl ProjectWorkspace { | |||
312 | for dep in pkg.dependencies(&cargo) { | 327 | for dep in pkg.dependencies(&cargo) { |
313 | if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) { | 328 | if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) { |
314 | for &from in pkg_crates.get(&pkg).into_iter().flatten() { | 329 | for &from in pkg_crates.get(&pkg).into_iter().flatten() { |
315 | if let Err(_) = | 330 | if let Err(_) = crate_graph.add_dep( |
316 | crate_graph.add_dep(from, dep.name.clone().into(), to) | 331 | from, |
317 | { | 332 | CrateName::new(&dep.name).unwrap(), |
333 | to, | ||
334 | ) { | ||
318 | log::error!( | 335 | log::error!( |
319 | "cyclic dependency {} -> {}", | 336 | "cyclic dependency {} -> {}", |
320 | pkg.name(&cargo), | 337 | pkg.name(&cargo), |
diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml index c5a3d1999..83db943fe 100644 --- a/crates/ra_syntax/Cargo.toml +++ b/crates/ra_syntax/Cargo.toml | |||
@@ -4,7 +4,7 @@ name = "ra_syntax" | |||
4 | version = "0.1.0" | 4 | version = "0.1.0" |
5 | authors = ["rust-analyzer developers"] | 5 | authors = ["rust-analyzer developers"] |
6 | license = "MIT OR Apache-2.0" | 6 | license = "MIT OR Apache-2.0" |
7 | description = "Comment and whitespace preserving parser for the Rust langauge" | 7 | description = "Comment and whitespace preserving parser for the Rust language" |
8 | repository = "https://github.com/rust-analyzer/rust-analyzer" | 8 | repository = "https://github.com/rust-analyzer/rust-analyzer" |
9 | 9 | ||
10 | [lib] | 10 | [lib] |
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs index 30a479f01..acf677e7d 100644 --- a/crates/ra_syntax/src/algo.rs +++ b/crates/ra_syntax/src/algo.rs | |||
@@ -81,7 +81,7 @@ impl TreeDiff { | |||
81 | /// Specifically, returns a map whose keys are descendants of `from` and values | 81 | /// Specifically, returns a map whose keys are descendants of `from` and values |
82 | /// are descendants of `to`, such that `replace_descendants(from, map) == to`. | 82 | /// are descendants of `to`, such that `replace_descendants(from, map) == to`. |
83 | /// | 83 | /// |
84 | /// A trivial solution is a singletom map `{ from: to }`, but this function | 84 | /// A trivial solution is a singleton map `{ from: to }`, but this function |
85 | /// tries to find a more fine-grained diff. | 85 | /// tries to find a more fine-grained diff. |
86 | pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { | 86 | pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { |
87 | let mut buf = FxHashMap::default(); | 87 | let mut buf = FxHashMap::default(); |
diff --git a/crates/ra_syntax/src/ast/expr_extensions.rs b/crates/ra_syntax/src/ast/expr_extensions.rs index 539759450..2e50a095c 100644 --- a/crates/ra_syntax/src/ast/expr_extensions.rs +++ b/crates/ra_syntax/src/ast/expr_extensions.rs | |||
@@ -7,6 +7,21 @@ use crate::{ | |||
7 | SyntaxToken, T, | 7 | SyntaxToken, T, |
8 | }; | 8 | }; |
9 | 9 | ||
10 | impl ast::Expr { | ||
11 | pub fn is_block_like(&self) -> bool { | ||
12 | match self { | ||
13 | ast::Expr::IfExpr(_) | ||
14 | | ast::Expr::LoopExpr(_) | ||
15 | | ast::Expr::ForExpr(_) | ||
16 | | ast::Expr::WhileExpr(_) | ||
17 | | ast::Expr::BlockExpr(_) | ||
18 | | ast::Expr::MatchExpr(_) | ||
19 | | ast::Expr::TryBlockExpr(_) => true, | ||
20 | _ => false, | ||
21 | } | ||
22 | } | ||
23 | } | ||
24 | |||
10 | #[derive(Debug, Clone, PartialEq, Eq)] | 25 | #[derive(Debug, Clone, PartialEq, Eq)] |
11 | pub enum ElseBranch { | 26 | pub enum ElseBranch { |
12 | Block(ast::BlockExpr), | 27 | Block(ast::BlockExpr), |
diff --git a/crates/ra_syntax/src/ast/generated.rs b/crates/ra_syntax/src/ast/generated.rs index 33d5578e7..435135f92 100644 --- a/crates/ra_syntax/src/ast/generated.rs +++ b/crates/ra_syntax/src/ast/generated.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | //! Generated file, do not edit by hand, see `crate/ra_tools/src/codegen` | 1 | //! Generated file, do not edit by hand, see `xtask/src/codegen` |
2 | 2 | ||
3 | use crate::{ | 3 | use crate::{ |
4 | ast::{self, AstChildren, AstNode}, | 4 | ast::{self, AstChildren, AstNode}, |
diff --git a/crates/ra_syntax/src/ast/make.rs b/crates/ra_syntax/src/ast/make.rs index 36e648180..862eb1172 100644 --- a/crates/ra_syntax/src/ast/make.rs +++ b/crates/ra_syntax/src/ast/make.rs | |||
@@ -2,7 +2,7 @@ | |||
2 | //! of smaller pieces. | 2 | //! of smaller pieces. |
3 | use itertools::Itertools; | 3 | use itertools::Itertools; |
4 | 4 | ||
5 | use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxToken}; | 5 | use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, SyntaxToken}; |
6 | 6 | ||
7 | pub fn name(text: &str) -> ast::Name { | 7 | pub fn name(text: &str) -> ast::Name { |
8 | ast_from_text(&format!("mod {};", text)) | 8 | ast_from_text(&format!("mod {};", text)) |
@@ -33,6 +33,21 @@ pub fn record_field(name: ast::NameRef, expr: Option<ast::Expr>) -> ast::RecordF | |||
33 | } | 33 | } |
34 | } | 34 | } |
35 | 35 | ||
36 | pub fn block_expr( | ||
37 | stmts: impl IntoIterator<Item = ast::Stmt>, | ||
38 | tail_expr: Option<ast::Expr>, | ||
39 | ) -> ast::BlockExpr { | ||
40 | let mut text = "{\n".to_string(); | ||
41 | for stmt in stmts.into_iter() { | ||
42 | text += &format!(" {}\n", stmt.syntax()); | ||
43 | } | ||
44 | if let Some(tail_expr) = tail_expr { | ||
45 | text += &format!(" {}\n", tail_expr.syntax()) | ||
46 | } | ||
47 | text += "}"; | ||
48 | ast_from_text(&format!("fn f() {}", text)) | ||
49 | } | ||
50 | |||
36 | pub fn block_from_expr(e: ast::Expr) -> ast::Block { | 51 | pub fn block_from_expr(e: ast::Expr) -> ast::Block { |
37 | return from_text(&format!("{{ {} }}", e.syntax())); | 52 | return from_text(&format!("{{ {} }}", e.syntax())); |
38 | 53 | ||
@@ -62,6 +77,13 @@ pub fn expr_return() -> ast::Expr { | |||
62 | pub fn expr_match(expr: ast::Expr, match_arm_list: ast::MatchArmList) -> ast::Expr { | 77 | pub fn expr_match(expr: ast::Expr, match_arm_list: ast::MatchArmList) -> ast::Expr { |
63 | expr_from_text(&format!("match {} {}", expr.syntax(), match_arm_list.syntax())) | 78 | expr_from_text(&format!("match {} {}", expr.syntax(), match_arm_list.syntax())) |
64 | } | 79 | } |
80 | pub fn expr_if(condition: ast::Expr, then_branch: ast::BlockExpr) -> ast::Expr { | ||
81 | expr_from_text(&format!("if {} {}", condition.syntax(), then_branch.syntax())) | ||
82 | } | ||
83 | pub fn expr_prefix(op: SyntaxKind, expr: ast::Expr) -> ast::Expr { | ||
84 | let token = token(op); | ||
85 | expr_from_text(&format!("{}{}", token, expr.syntax())) | ||
86 | } | ||
65 | fn expr_from_text(text: &str) -> ast::Expr { | 87 | fn expr_from_text(text: &str) -> ast::Expr { |
66 | ast_from_text(&format!("const C: () = {};", text)) | 88 | ast_from_text(&format!("const C: () = {};", text)) |
67 | } | 89 | } |
@@ -122,11 +144,18 @@ pub fn match_arm(pats: impl IntoIterator<Item = ast::Pat>, expr: ast::Expr) -> a | |||
122 | } | 144 | } |
123 | 145 | ||
124 | pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::MatchArmList { | 146 | pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::MatchArmList { |
125 | let arms_str = arms.into_iter().map(|arm| format!("\n {}", arm.syntax())).join(","); | 147 | let arms_str = arms |
126 | return from_text(&format!("{},\n", arms_str)); | 148 | .into_iter() |
149 | .map(|arm| { | ||
150 | let needs_comma = arm.expr().map_or(true, |it| !it.is_block_like()); | ||
151 | let comma = if needs_comma { "," } else { "" }; | ||
152 | format!(" {}{}\n", arm.syntax(), comma) | ||
153 | }) | ||
154 | .collect::<String>(); | ||
155 | return from_text(&format!("{}", arms_str)); | ||
127 | 156 | ||
128 | fn from_text(text: &str) -> ast::MatchArmList { | 157 | fn from_text(text: &str) -> ast::MatchArmList { |
129 | ast_from_text(&format!("fn f() {{ match () {{{}}} }}", text)) | 158 | ast_from_text(&format!("fn f() {{ match () {{\n{}}} }}", text)) |
130 | } | 159 | } |
131 | } | 160 | } |
132 | 161 | ||
@@ -151,14 +180,6 @@ pub fn where_clause(preds: impl IntoIterator<Item = ast::WherePred>) -> ast::Whe | |||
151 | } | 180 | } |
152 | } | 181 | } |
153 | 182 | ||
154 | pub fn if_expression(condition: &ast::Expr, statement: &str) -> ast::IfExpr { | ||
155 | ast_from_text(&format!( | ||
156 | "fn f() {{ if !{} {{\n {}\n}}\n}}", | ||
157 | condition.syntax().text(), | ||
158 | statement | ||
159 | )) | ||
160 | } | ||
161 | |||
162 | pub fn let_stmt(pattern: ast::Pat, initializer: Option<ast::Expr>) -> ast::LetStmt { | 183 | pub fn let_stmt(pattern: ast::Pat, initializer: Option<ast::Expr>) -> ast::LetStmt { |
163 | let text = match initializer { | 184 | let text = match initializer { |
164 | Some(it) => format!("let {} = {};", pattern.syntax(), it.syntax()), | 185 | Some(it) => format!("let {} = {};", pattern.syntax(), it.syntax()), |
@@ -166,6 +187,9 @@ pub fn let_stmt(pattern: ast::Pat, initializer: Option<ast::Expr>) -> ast::LetSt | |||
166 | }; | 187 | }; |
167 | ast_from_text(&format!("fn f() {{ {} }}", text)) | 188 | ast_from_text(&format!("fn f() {{ {} }}", text)) |
168 | } | 189 | } |
190 | pub fn expr_stmt(expr: ast::Expr) -> ast::ExprStmt { | ||
191 | ast_from_text(&format!("fn f() {{ {}; }}", expr.syntax())) | ||
192 | } | ||
169 | 193 | ||
170 | pub fn token(kind: SyntaxKind) -> SyntaxToken { | 194 | pub fn token(kind: SyntaxKind) -> SyntaxToken { |
171 | tokens::SOURCE_FILE | 195 | tokens::SOURCE_FILE |
@@ -179,7 +203,16 @@ pub fn token(kind: SyntaxKind) -> SyntaxToken { | |||
179 | 203 | ||
180 | fn ast_from_text<N: AstNode>(text: &str) -> N { | 204 | fn ast_from_text<N: AstNode>(text: &str) -> N { |
181 | let parse = SourceFile::parse(text); | 205 | let parse = SourceFile::parse(text); |
182 | parse.tree().syntax().descendants().find_map(N::cast).unwrap() | 206 | let node = parse.tree().syntax().descendants().find_map(N::cast).unwrap(); |
207 | let node = node.syntax().clone(); | ||
208 | let node = unroot(node); | ||
209 | let node = N::cast(node).unwrap(); | ||
210 | assert_eq!(node.syntax().text_range().start(), 0.into()); | ||
211 | node | ||
212 | } | ||
213 | |||
214 | fn unroot(n: SyntaxNode) -> SyntaxNode { | ||
215 | SyntaxNode::new_root(n.green().clone()) | ||
183 | } | 216 | } |
184 | 217 | ||
185 | pub mod tokens { | 218 | pub mod tokens { |
@@ -187,7 +220,7 @@ pub mod tokens { | |||
187 | use once_cell::sync::Lazy; | 220 | use once_cell::sync::Lazy; |
188 | 221 | ||
189 | pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = | 222 | pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = |
190 | Lazy::new(|| SourceFile::parse("const C: <()>::Item = (1 != 1, 2 == 2)\n;")); | 223 | Lazy::new(|| SourceFile::parse("const C: <()>::Item = (1 != 1, 2 == 2, !true)\n;")); |
191 | 224 | ||
192 | pub fn comma() -> SyntaxToken { | 225 | pub fn comma() -> SyntaxToken { |
193 | SOURCE_FILE | 226 | SOURCE_FILE |
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index 9931fec84..f8f4b64c1 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs | |||
@@ -41,7 +41,9 @@ use crate::syntax_node::GreenNode; | |||
41 | pub use crate::{ | 41 | pub use crate::{ |
42 | algo::InsertPosition, | 42 | algo::InsertPosition, |
43 | ast::{AstNode, AstToken}, | 43 | ast::{AstNode, AstToken}, |
44 | parsing::{classify_literal, tokenize, Token}, | 44 | parsing::{ |
45 | lex_single_syntax_kind, lex_single_valid_syntax_kind, tokenize, Token, TokenizeError, | ||
46 | }, | ||
45 | ptr::{AstPtr, SyntaxNodePtr}, | 47 | ptr::{AstPtr, SyntaxNodePtr}, |
46 | syntax_error::{Location, SyntaxError, SyntaxErrorKind}, | 48 | syntax_error::{Location, SyntaxError, SyntaxErrorKind}, |
47 | syntax_node::{ | 49 | syntax_node::{ |
diff --git a/crates/ra_syntax/src/parsing.rs b/crates/ra_syntax/src/parsing.rs index 0387f0378..e5eb80850 100644 --- a/crates/ra_syntax/src/parsing.rs +++ b/crates/ra_syntax/src/parsing.rs | |||
@@ -7,15 +7,23 @@ mod text_tree_sink; | |||
7 | mod reparsing; | 7 | mod reparsing; |
8 | 8 | ||
9 | use crate::{syntax_node::GreenNode, SyntaxError}; | 9 | use crate::{syntax_node::GreenNode, SyntaxError}; |
10 | use text_token_source::TextTokenSource; | ||
11 | use text_tree_sink::TextTreeSink; | ||
10 | 12 | ||
11 | pub use self::lexer::{classify_literal, tokenize, Token}; | 13 | pub use lexer::*; |
12 | 14 | ||
13 | pub(crate) use self::reparsing::incremental_reparse; | 15 | pub(crate) use self::reparsing::incremental_reparse; |
14 | 16 | ||
15 | pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { | 17 | pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { |
16 | let tokens = tokenize(&text); | 18 | let (tokens, lexer_errors) = tokenize(&text); |
17 | let mut token_source = text_token_source::TextTokenSource::new(text, &tokens); | 19 | |
18 | let mut tree_sink = text_tree_sink::TextTreeSink::new(text, &tokens); | 20 | let mut token_source = TextTokenSource::new(text, &tokens); |
21 | let mut tree_sink = TextTreeSink::new(text, &tokens); | ||
22 | |||
19 | ra_parser::parse(&mut token_source, &mut tree_sink); | 23 | ra_parser::parse(&mut token_source, &mut tree_sink); |
20 | tree_sink.finish() | 24 | |
25 | let (tree, mut parser_errors) = tree_sink.finish(); | ||
26 | parser_errors.extend(lexer_errors); | ||
27 | |||
28 | (tree, parser_errors) | ||
21 | } | 29 | } |
diff --git a/crates/ra_syntax/src/parsing/lexer.rs b/crates/ra_syntax/src/parsing/lexer.rs index 6d839208d..f889e6a1d 100644 --- a/crates/ra_syntax/src/parsing/lexer.rs +++ b/crates/ra_syntax/src/parsing/lexer.rs | |||
@@ -1,8 +1,10 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! Lexer analyzes raw input string and produces lexemes (tokens). |
2 | //! It is just a bridge to `rustc_lexer`. | ||
2 | 3 | ||
3 | use crate::{ | 4 | use crate::{ |
5 | SyntaxError, SyntaxErrorKind, | ||
4 | SyntaxKind::{self, *}, | 6 | SyntaxKind::{self, *}, |
5 | TextUnit, | 7 | TextRange, TextUnit, |
6 | }; | 8 | }; |
7 | 9 | ||
8 | /// A token of Rust source. | 10 | /// A token of Rust source. |
@@ -14,91 +16,261 @@ pub struct Token { | |||
14 | pub len: TextUnit, | 16 | pub len: TextUnit, |
15 | } | 17 | } |
16 | 18 | ||
17 | fn match_literal_kind(kind: rustc_lexer::LiteralKind) -> SyntaxKind { | 19 | /// Break a string up into its component tokens. |
18 | match kind { | 20 | /// Beware that it checks for shebang first and its length contributes to resulting |
19 | rustc_lexer::LiteralKind::Int { .. } => INT_NUMBER, | 21 | /// tokens offsets. |
20 | rustc_lexer::LiteralKind::Float { .. } => FLOAT_NUMBER, | 22 | pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) { |
21 | rustc_lexer::LiteralKind::Char { .. } => CHAR, | 23 | // non-empty string is a precondtion of `rustc_lexer::strip_shebang()`. |
22 | rustc_lexer::LiteralKind::Byte { .. } => BYTE, | 24 | if text.is_empty() { |
23 | rustc_lexer::LiteralKind::Str { .. } => STRING, | 25 | return Default::default(); |
24 | rustc_lexer::LiteralKind::ByteStr { .. } => BYTE_STRING, | 26 | } |
25 | rustc_lexer::LiteralKind::RawStr { .. } => RAW_STRING, | 27 | |
26 | rustc_lexer::LiteralKind::RawByteStr { .. } => RAW_BYTE_STRING, | 28 | let mut tokens = Vec::new(); |
29 | let mut errors = Vec::new(); | ||
30 | |||
31 | let mut offset: usize = rustc_lexer::strip_shebang(text) | ||
32 | .map(|shebang_len| { | ||
33 | tokens.push(Token { kind: SHEBANG, len: TextUnit::from_usize(shebang_len) }); | ||
34 | shebang_len | ||
35 | }) | ||
36 | .unwrap_or(0); | ||
37 | |||
38 | let text_without_shebang = &text[offset..]; | ||
39 | |||
40 | for rustc_token in rustc_lexer::tokenize(text_without_shebang) { | ||
41 | let token_len = TextUnit::from_usize(rustc_token.len); | ||
42 | let token_range = TextRange::offset_len(TextUnit::from_usize(offset), token_len); | ||
43 | |||
44 | let (syntax_kind, error) = | ||
45 | rustc_token_kind_to_syntax_kind(&rustc_token.kind, &text[token_range]); | ||
46 | |||
47 | tokens.push(Token { kind: syntax_kind, len: token_len }); | ||
48 | |||
49 | if let Some(error) = error { | ||
50 | errors.push(SyntaxError::new(SyntaxErrorKind::TokenizeError(error), token_range)); | ||
51 | } | ||
52 | |||
53 | offset += rustc_token.len; | ||
27 | } | 54 | } |
55 | |||
56 | (tokens, errors) | ||
57 | } | ||
58 | |||
59 | /// Returns `SyntaxKind` and `Option<SyntaxError>` of the first token | ||
60 | /// encountered at the beginning of the string. | ||
61 | /// | ||
62 | /// Returns `None` if the string contains zero *or two or more* tokens. | ||
63 | /// The token is malformed if the returned error is not `None`. | ||
64 | /// | ||
65 | /// Beware that unescape errors are not checked at tokenization time. | ||
66 | pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxError>)> { | ||
67 | lex_first_token(text) | ||
68 | .filter(|(token, _)| token.len.to_usize() == text.len()) | ||
69 | .map(|(token, error)| (token.kind, error)) | ||
70 | } | ||
71 | |||
72 | /// The same as `lex_single_syntax_kind()` but returns only `SyntaxKind` and | ||
73 | /// returns `None` if any tokenization error occured. | ||
74 | /// | ||
75 | /// Beware that unescape errors are not checked at tokenization time. | ||
76 | pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> { | ||
77 | lex_first_token(text) | ||
78 | .filter(|(token, error)| !error.is_some() && token.len.to_usize() == text.len()) | ||
79 | .map(|(token, _error)| token.kind) | ||
28 | } | 80 | } |
29 | 81 | ||
30 | /// Break a string up into its component tokens | 82 | /// Returns `SyntaxKind` and `Option<SyntaxError>` of the first token |
31 | pub fn tokenize(text: &str) -> Vec<Token> { | 83 | /// encountered at the beginning of the string. |
84 | /// | ||
85 | /// Returns `None` if the string contains zero tokens or if the token was parsed | ||
86 | /// with an error. | ||
87 | /// The token is malformed if the returned error is not `None`. | ||
88 | /// | ||
89 | /// Beware that unescape errors are not checked at tokenization time. | ||
90 | fn lex_first_token(text: &str) -> Option<(Token, Option<SyntaxError>)> { | ||
91 | // non-empty string is a precondtion of `rustc_lexer::first_token()`. | ||
32 | if text.is_empty() { | 92 | if text.is_empty() { |
33 | return vec![]; | 93 | return None; |
34 | } | ||
35 | let mut text = text; | ||
36 | let mut acc = Vec::new(); | ||
37 | if let Some(len) = rustc_lexer::strip_shebang(text) { | ||
38 | acc.push(Token { kind: SHEBANG, len: TextUnit::from_usize(len) }); | ||
39 | text = &text[len..]; | ||
40 | } | 94 | } |
41 | while !text.is_empty() { | 95 | |
42 | let rustc_token = rustc_lexer::first_token(text); | 96 | let rustc_token = rustc_lexer::first_token(text); |
43 | let kind = match rustc_token.kind { | 97 | let (syntax_kind, error) = rustc_token_kind_to_syntax_kind(&rustc_token.kind, text); |
44 | rustc_lexer::TokenKind::LineComment => COMMENT, | 98 | |
45 | rustc_lexer::TokenKind::BlockComment { .. } => COMMENT, | 99 | let token = Token { kind: syntax_kind, len: TextUnit::from_usize(rustc_token.len) }; |
46 | rustc_lexer::TokenKind::Whitespace => WHITESPACE, | 100 | let error = error.map(|error| { |
47 | rustc_lexer::TokenKind::Ident => { | 101 | SyntaxError::new( |
48 | let token_text = &text[..rustc_token.len]; | 102 | SyntaxErrorKind::TokenizeError(error), |
103 | TextRange::from_to(TextUnit::from(0), TextUnit::of_str(text)), | ||
104 | ) | ||
105 | }); | ||
106 | |||
107 | Some((token, error)) | ||
108 | } | ||
109 | |||
110 | // FIXME: simplify TokenizeError to `SyntaxError(String, TextRange)` as per @matklad advice: | ||
111 | // https://github.com/rust-analyzer/rust-analyzer/pull/2911/files#r371175067 | ||
112 | |||
113 | /// Describes the values of `SyntaxErrorKind::TokenizeError` enum variant. | ||
114 | /// It describes all the types of errors that may happen during the tokenization | ||
115 | /// of Rust source. | ||
116 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
117 | pub enum TokenizeError { | ||
118 | /// Base prefix was provided, but there were no digits | ||
119 | /// after it, e.g. `0x`, `0b`. | ||
120 | EmptyInt, | ||
121 | /// Float exponent lacks digits e.g. `12.34e+`, `12.3E+`, `12e-`, `1_E-`, | ||
122 | EmptyExponent, | ||
123 | |||
124 | /// Block comment lacks trailing delimiter `*/` | ||
125 | UnterminatedBlockComment, | ||
126 | /// Character literal lacks trailing delimiter `'` | ||
127 | UnterminatedChar, | ||
128 | /// Characterish byte literal lacks trailing delimiter `'` | ||
129 | UnterminatedByte, | ||
130 | /// String literal lacks trailing delimiter `"` | ||
131 | UnterminatedString, | ||
132 | /// Byte string literal lacks trailing delimiter `"` | ||
133 | UnterminatedByteString, | ||
134 | /// Raw literal lacks trailing delimiter e.g. `"##` | ||
135 | UnterminatedRawString, | ||
136 | /// Raw byte string literal lacks trailing delimiter e.g. `"##` | ||
137 | UnterminatedRawByteString, | ||
138 | |||
139 | /// Raw string lacks a quote after the pound characters e.g. `r###` | ||
140 | UnstartedRawString, | ||
141 | /// Raw byte string lacks a quote after the pound characters e.g. `br###` | ||
142 | UnstartedRawByteString, | ||
143 | |||
144 | /// Lifetime starts with a number e.g. `'4ever` | ||
145 | LifetimeStartsWithNumber, | ||
146 | } | ||
147 | |||
148 | fn rustc_token_kind_to_syntax_kind( | ||
149 | rustc_token_kind: &rustc_lexer::TokenKind, | ||
150 | token_text: &str, | ||
151 | ) -> (SyntaxKind, Option<TokenizeError>) { | ||
152 | // A note on an intended tradeoff: | ||
153 | // We drop some useful infromation here (see patterns with double dots `..`) | ||
154 | // Storing that info in `SyntaxKind` is not possible due to its layout requirements of | ||
155 | // being `u16` that come from `rowan::SyntaxKind`. | ||
156 | |||
157 | let syntax_kind = { | ||
158 | use rustc_lexer::TokenKind as TK; | ||
159 | use TokenizeError as TE; | ||
160 | |||
161 | match rustc_token_kind { | ||
162 | TK::LineComment => COMMENT, | ||
163 | |||
164 | TK::BlockComment { terminated: true } => COMMENT, | ||
165 | TK::BlockComment { terminated: false } => { | ||
166 | return (COMMENT, Some(TE::UnterminatedBlockComment)); | ||
167 | } | ||
168 | |||
169 | TK::Whitespace => WHITESPACE, | ||
170 | |||
171 | TK::Ident => { | ||
49 | if token_text == "_" { | 172 | if token_text == "_" { |
50 | UNDERSCORE | 173 | UNDERSCORE |
51 | } else { | 174 | } else { |
52 | SyntaxKind::from_keyword(&text[..rustc_token.len]).unwrap_or(IDENT) | 175 | SyntaxKind::from_keyword(token_text).unwrap_or(IDENT) |
53 | } | 176 | } |
54 | } | 177 | } |
55 | rustc_lexer::TokenKind::RawIdent => IDENT, | 178 | |
56 | rustc_lexer::TokenKind::Literal { kind, .. } => match_literal_kind(kind), | 179 | TK::RawIdent => IDENT, |
57 | rustc_lexer::TokenKind::Lifetime { .. } => LIFETIME, | 180 | TK::Literal { kind, .. } => return match_literal_kind(&kind), |
58 | rustc_lexer::TokenKind::Semi => SEMI, | 181 | |
59 | rustc_lexer::TokenKind::Comma => COMMA, | 182 | TK::Lifetime { starts_with_number: false } => LIFETIME, |
60 | rustc_lexer::TokenKind::Dot => DOT, | 183 | TK::Lifetime { starts_with_number: true } => { |
61 | rustc_lexer::TokenKind::OpenParen => L_PAREN, | 184 | return (LIFETIME, Some(TE::LifetimeStartsWithNumber)) |
62 | rustc_lexer::TokenKind::CloseParen => R_PAREN, | 185 | } |
63 | rustc_lexer::TokenKind::OpenBrace => L_CURLY, | 186 | |
64 | rustc_lexer::TokenKind::CloseBrace => R_CURLY, | 187 | TK::Semi => SEMI, |
65 | rustc_lexer::TokenKind::OpenBracket => L_BRACK, | 188 | TK::Comma => COMMA, |
66 | rustc_lexer::TokenKind::CloseBracket => R_BRACK, | 189 | TK::Dot => DOT, |
67 | rustc_lexer::TokenKind::At => AT, | 190 | TK::OpenParen => L_PAREN, |
68 | rustc_lexer::TokenKind::Pound => POUND, | 191 | TK::CloseParen => R_PAREN, |
69 | rustc_lexer::TokenKind::Tilde => TILDE, | 192 | TK::OpenBrace => L_CURLY, |
70 | rustc_lexer::TokenKind::Question => QUESTION, | 193 | TK::CloseBrace => R_CURLY, |
71 | rustc_lexer::TokenKind::Colon => COLON, | 194 | TK::OpenBracket => L_BRACK, |
72 | rustc_lexer::TokenKind::Dollar => DOLLAR, | 195 | TK::CloseBracket => R_BRACK, |
73 | rustc_lexer::TokenKind::Eq => EQ, | 196 | TK::At => AT, |
74 | rustc_lexer::TokenKind::Not => EXCL, | 197 | TK::Pound => POUND, |
75 | rustc_lexer::TokenKind::Lt => L_ANGLE, | 198 | TK::Tilde => TILDE, |
76 | rustc_lexer::TokenKind::Gt => R_ANGLE, | 199 | TK::Question => QUESTION, |
77 | rustc_lexer::TokenKind::Minus => MINUS, | 200 | TK::Colon => COLON, |
78 | rustc_lexer::TokenKind::And => AMP, | 201 | TK::Dollar => DOLLAR, |
79 | rustc_lexer::TokenKind::Or => PIPE, | 202 | TK::Eq => EQ, |
80 | rustc_lexer::TokenKind::Plus => PLUS, | 203 | TK::Not => EXCL, |
81 | rustc_lexer::TokenKind::Star => STAR, | 204 | TK::Lt => L_ANGLE, |
82 | rustc_lexer::TokenKind::Slash => SLASH, | 205 | TK::Gt => R_ANGLE, |
83 | rustc_lexer::TokenKind::Caret => CARET, | 206 | TK::Minus => MINUS, |
84 | rustc_lexer::TokenKind::Percent => PERCENT, | 207 | TK::And => AMP, |
85 | rustc_lexer::TokenKind::Unknown => ERROR, | 208 | TK::Or => PIPE, |
209 | TK::Plus => PLUS, | ||
210 | TK::Star => STAR, | ||
211 | TK::Slash => SLASH, | ||
212 | TK::Caret => CARET, | ||
213 | TK::Percent => PERCENT, | ||
214 | TK::Unknown => ERROR, | ||
215 | } | ||
216 | }; | ||
217 | |||
218 | return (syntax_kind, None); | ||
219 | |||
220 | fn match_literal_kind(kind: &rustc_lexer::LiteralKind) -> (SyntaxKind, Option<TokenizeError>) { | ||
221 | use rustc_lexer::LiteralKind as LK; | ||
222 | use TokenizeError as TE; | ||
223 | |||
224 | #[rustfmt::skip] | ||
225 | let syntax_kind = match *kind { | ||
226 | LK::Int { empty_int: false, .. } => INT_NUMBER, | ||
227 | LK::Int { empty_int: true, .. } => { | ||
228 | return (INT_NUMBER, Some(TE::EmptyInt)) | ||
229 | } | ||
230 | |||
231 | LK::Float { empty_exponent: false, .. } => FLOAT_NUMBER, | ||
232 | LK::Float { empty_exponent: true, .. } => { | ||
233 | return (FLOAT_NUMBER, Some(TE::EmptyExponent)) | ||
234 | } | ||
235 | |||
236 | LK::Char { terminated: true } => CHAR, | ||
237 | LK::Char { terminated: false } => { | ||
238 | return (CHAR, Some(TE::UnterminatedChar)) | ||
239 | } | ||
240 | |||
241 | LK::Byte { terminated: true } => BYTE, | ||
242 | LK::Byte { terminated: false } => { | ||
243 | return (BYTE, Some(TE::UnterminatedByte)) | ||
244 | } | ||
245 | |||
246 | LK::Str { terminated: true } => STRING, | ||
247 | LK::Str { terminated: false } => { | ||
248 | return (STRING, Some(TE::UnterminatedString)) | ||
249 | } | ||
250 | |||
251 | |||
252 | LK::ByteStr { terminated: true } => BYTE_STRING, | ||
253 | LK::ByteStr { terminated: false } => { | ||
254 | return (BYTE_STRING, Some(TE::UnterminatedByteString)) | ||
255 | } | ||
256 | |||
257 | LK::RawStr { started: true, terminated: true, .. } => RAW_STRING, | ||
258 | LK::RawStr { started: true, terminated: false, .. } => { | ||
259 | return (RAW_STRING, Some(TE::UnterminatedRawString)) | ||
260 | } | ||
261 | LK::RawStr { started: false, .. } => { | ||
262 | return (RAW_STRING, Some(TE::UnstartedRawString)) | ||
263 | } | ||
264 | |||
265 | LK::RawByteStr { started: true, terminated: true, .. } => RAW_BYTE_STRING, | ||
266 | LK::RawByteStr { started: true, terminated: false, .. } => { | ||
267 | return (RAW_BYTE_STRING, Some(TE::UnterminatedRawByteString)) | ||
268 | } | ||
269 | LK::RawByteStr { started: false, .. } => { | ||
270 | return (RAW_BYTE_STRING, Some(TE::UnstartedRawByteString)) | ||
271 | } | ||
86 | }; | 272 | }; |
87 | let token = Token { kind, len: TextUnit::from_usize(rustc_token.len) }; | ||
88 | acc.push(token); | ||
89 | text = &text[rustc_token.len..]; | ||
90 | } | ||
91 | acc | ||
92 | } | ||
93 | 273 | ||
94 | pub fn classify_literal(text: &str) -> Option<Token> { | 274 | (syntax_kind, None) |
95 | let t = rustc_lexer::first_token(text); | ||
96 | if t.len != text.len() { | ||
97 | return None; | ||
98 | } | 275 | } |
99 | let kind = match t.kind { | ||
100 | rustc_lexer::TokenKind::Literal { kind, .. } => match_literal_kind(kind), | ||
101 | _ => return None, | ||
102 | }; | ||
103 | Some(Token { kind, len: TextUnit::from_usize(t.len) }) | ||
104 | } | 276 | } |
diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs index 06bdda11d..a86da0675 100644 --- a/crates/ra_syntax/src/parsing/reparsing.rs +++ b/crates/ra_syntax/src/parsing/reparsing.rs | |||
@@ -12,7 +12,7 @@ use ra_text_edit::AtomTextEdit; | |||
12 | use crate::{ | 12 | use crate::{ |
13 | algo, | 13 | algo, |
14 | parsing::{ | 14 | parsing::{ |
15 | lexer::{tokenize, Token}, | 15 | lexer::{lex_single_syntax_kind, tokenize, Token}, |
16 | text_token_source::TextTokenSource, | 16 | text_token_source::TextTokenSource, |
17 | text_tree_sink::TextTreeSink, | 17 | text_tree_sink::TextTreeSink, |
18 | }, | 18 | }, |
@@ -41,37 +41,42 @@ fn reparse_token<'node>( | |||
41 | root: &'node SyntaxNode, | 41 | root: &'node SyntaxNode, |
42 | edit: &AtomTextEdit, | 42 | edit: &AtomTextEdit, |
43 | ) -> Option<(GreenNode, TextRange)> { | 43 | ) -> Option<(GreenNode, TextRange)> { |
44 | let token = algo::find_covering_element(root, edit.delete).as_token()?.clone(); | 44 | let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone(); |
45 | match token.kind() { | 45 | let prev_token_kind = prev_token.kind(); |
46 | match prev_token_kind { | ||
46 | WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => { | 47 | WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => { |
47 | if token.kind() == WHITESPACE || token.kind() == COMMENT { | 48 | if prev_token_kind == WHITESPACE || prev_token_kind == COMMENT { |
48 | // removing a new line may extends previous token | 49 | // removing a new line may extends previous token |
49 | if token.text().to_string()[edit.delete - token.text_range().start()].contains('\n') | 50 | let deleted_range = edit.delete - prev_token.text_range().start(); |
50 | { | 51 | if prev_token.text()[deleted_range].contains('\n') { |
51 | return None; | 52 | return None; |
52 | } | 53 | } |
53 | } | 54 | } |
54 | 55 | ||
55 | let text = get_text_after_edit(token.clone().into(), &edit); | 56 | let mut new_text = get_text_after_edit(prev_token.clone().into(), &edit); |
56 | let lex_tokens = tokenize(&text); | 57 | let (new_token_kind, _error) = lex_single_syntax_kind(&new_text)?; |
57 | let lex_token = match lex_tokens[..] { | ||
58 | [lex_token] if lex_token.kind == token.kind() => lex_token, | ||
59 | _ => return None, | ||
60 | }; | ||
61 | 58 | ||
62 | if lex_token.kind == IDENT && is_contextual_kw(&text) { | 59 | if new_token_kind != prev_token_kind |
60 | || (new_token_kind == IDENT && is_contextual_kw(&new_text)) | ||
61 | { | ||
63 | return None; | 62 | return None; |
64 | } | 63 | } |
65 | 64 | ||
66 | if let Some(next_char) = root.text().char_at(token.text_range().end()) { | 65 | // Check that edited token is not a part of the bigger token. |
67 | let tokens_with_next_char = tokenize(&format!("{}{}", text, next_char)); | 66 | // E.g. if for source code `bruh"str"` the user removed `ruh`, then |
68 | if tokens_with_next_char.len() == 1 { | 67 | // `b` no longer remains an identifier, but becomes a part of byte string literal |
68 | if let Some(next_char) = root.text().char_at(prev_token.text_range().end()) { | ||
69 | new_text.push(next_char); | ||
70 | let token_with_next_char = lex_single_syntax_kind(&new_text); | ||
71 | if let Some((_kind, _error)) = token_with_next_char { | ||
69 | return None; | 72 | return None; |
70 | } | 73 | } |
74 | new_text.pop(); | ||
71 | } | 75 | } |
72 | 76 | ||
73 | let new_token = GreenToken::new(rowan::SyntaxKind(token.kind().into()), text.into()); | 77 | let new_token = |
74 | Some((token.replace_with(new_token), token.text_range())) | 78 | GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), new_text.into()); |
79 | Some((prev_token.replace_with(new_token), prev_token.text_range())) | ||
75 | } | 80 | } |
76 | _ => None, | 81 | _ => None, |
77 | } | 82 | } |
@@ -83,20 +88,26 @@ fn reparse_block<'node>( | |||
83 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { | 88 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { |
84 | let (node, reparser) = find_reparsable_node(root, edit.delete)?; | 89 | let (node, reparser) = find_reparsable_node(root, edit.delete)?; |
85 | let text = get_text_after_edit(node.clone().into(), &edit); | 90 | let text = get_text_after_edit(node.clone().into(), &edit); |
86 | let tokens = tokenize(&text); | 91 | |
92 | let (tokens, new_lexer_errors) = tokenize(&text); | ||
87 | if !is_balanced(&tokens) { | 93 | if !is_balanced(&tokens) { |
88 | return None; | 94 | return None; |
89 | } | 95 | } |
96 | |||
90 | let mut token_source = TextTokenSource::new(&text, &tokens); | 97 | let mut token_source = TextTokenSource::new(&text, &tokens); |
91 | let mut tree_sink = TextTreeSink::new(&text, &tokens); | 98 | let mut tree_sink = TextTreeSink::new(&text, &tokens); |
92 | reparser.parse(&mut token_source, &mut tree_sink); | 99 | reparser.parse(&mut token_source, &mut tree_sink); |
93 | let (green, new_errors) = tree_sink.finish(); | 100 | |
94 | Some((node.replace_with(green), new_errors, node.text_range())) | 101 | let (green, mut new_parser_errors) = tree_sink.finish(); |
102 | new_parser_errors.extend(new_lexer_errors); | ||
103 | |||
104 | Some((node.replace_with(green), new_parser_errors, node.text_range())) | ||
95 | } | 105 | } |
96 | 106 | ||
97 | fn get_text_after_edit(element: SyntaxElement, edit: &AtomTextEdit) -> String { | 107 | fn get_text_after_edit(element: SyntaxElement, edit: &AtomTextEdit) -> String { |
98 | let edit = | 108 | let edit = |
99 | AtomTextEdit::replace(edit.delete - element.text_range().start(), edit.insert.clone()); | 109 | AtomTextEdit::replace(edit.delete - element.text_range().start(), edit.insert.clone()); |
110 | |||
100 | let text = match element { | 111 | let text = match element { |
101 | NodeOrToken::Token(token) => token.text().to_string(), | 112 | NodeOrToken::Token(token) => token.text().to_string(), |
102 | NodeOrToken::Node(node) => node.text().to_string(), | 113 | NodeOrToken::Node(node) => node.text().to_string(), |
@@ -113,6 +124,7 @@ fn is_contextual_kw(text: &str) -> bool { | |||
113 | 124 | ||
114 | fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> { | 125 | fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> { |
115 | let node = algo::find_covering_element(node, range); | 126 | let node = algo::find_covering_element(node, range); |
127 | |||
116 | let mut ancestors = match node { | 128 | let mut ancestors = match node { |
117 | NodeOrToken::Token(it) => it.parent().ancestors(), | 129 | NodeOrToken::Token(it) => it.parent().ancestors(), |
118 | NodeOrToken::Node(it) => it.ancestors(), | 130 | NodeOrToken::Node(it) => it.ancestors(), |
@@ -182,7 +194,6 @@ mod tests { | |||
182 | let fully_reparsed = SourceFile::parse(&after); | 194 | let fully_reparsed = SourceFile::parse(&after); |
183 | let incrementally_reparsed: Parse<SourceFile> = { | 195 | let incrementally_reparsed: Parse<SourceFile> = { |
184 | let f = SourceFile::parse(&before); | 196 | let f = SourceFile::parse(&before); |
185 | let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() }; | ||
186 | let (green, new_errors, range) = | 197 | let (green, new_errors, range) = |
187 | incremental_reparse(f.tree().syntax(), &edit, f.errors.to_vec()).unwrap(); | 198 | incremental_reparse(f.tree().syntax(), &edit, f.errors.to_vec()).unwrap(); |
188 | assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length"); | 199 | assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length"); |
diff --git a/crates/ra_syntax/src/parsing/text_tree_sink.rs b/crates/ra_syntax/src/parsing/text_tree_sink.rs index c36756d6c..dd202601d 100644 --- a/crates/ra_syntax/src/parsing/text_tree_sink.rs +++ b/crates/ra_syntax/src/parsing/text_tree_sink.rs | |||
@@ -92,8 +92,8 @@ impl<'a> TreeSink for TextTreeSink<'a> { | |||
92 | } | 92 | } |
93 | 93 | ||
94 | impl<'a> TextTreeSink<'a> { | 94 | impl<'a> TextTreeSink<'a> { |
95 | pub(super) fn new(text: &'a str, tokens: &'a [Token]) -> TextTreeSink<'a> { | 95 | pub(super) fn new(text: &'a str, tokens: &'a [Token]) -> Self { |
96 | TextTreeSink { | 96 | Self { |
97 | text, | 97 | text, |
98 | tokens, | 98 | tokens, |
99 | text_pos: 0.into(), | 99 | text_pos: 0.into(), |
diff --git a/crates/ra_syntax/src/syntax_error.rs b/crates/ra_syntax/src/syntax_error.rs index 6c171df8d..7f9d36618 100644 --- a/crates/ra_syntax/src/syntax_error.rs +++ b/crates/ra_syntax/src/syntax_error.rs | |||
@@ -4,7 +4,7 @@ use std::fmt; | |||
4 | 4 | ||
5 | use ra_parser::ParseError; | 5 | use ra_parser::ParseError; |
6 | 6 | ||
7 | use crate::{validation::EscapeError, TextRange, TextUnit}; | 7 | use crate::{validation::EscapeError, TextRange, TextUnit, TokenizeError}; |
8 | 8 | ||
9 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 9 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
10 | pub struct SyntaxError { | 10 | pub struct SyntaxError { |
@@ -12,6 +12,10 @@ pub struct SyntaxError { | |||
12 | location: Location, | 12 | location: Location, |
13 | } | 13 | } |
14 | 14 | ||
15 | // FIXME: Location should be just `Location(TextRange)` | ||
16 | // TextUnit enum member just unnecessarily compicates things, | ||
17 | // we should'n treat it specially, it just as a `TextRange { start: x, end: x + 1 }` | ||
18 | // see `location_to_range()` in ra_ide/src/diagnostics | ||
15 | #[derive(Clone, PartialEq, Eq, Hash)] | 19 | #[derive(Clone, PartialEq, Eq, Hash)] |
16 | pub enum Location { | 20 | pub enum Location { |
17 | Offset(TextUnit), | 21 | Offset(TextUnit), |
@@ -67,6 +71,10 @@ impl SyntaxError { | |||
67 | 71 | ||
68 | self | 72 | self |
69 | } | 73 | } |
74 | |||
75 | pub fn debug_dump(&self, acc: &mut impl fmt::Write) { | ||
76 | writeln!(acc, "error {:?}: {}", self.location(), self.kind()).unwrap(); | ||
77 | } | ||
70 | } | 78 | } |
71 | 79 | ||
72 | impl fmt::Display for SyntaxError { | 80 | impl fmt::Display for SyntaxError { |
@@ -79,6 +87,10 @@ impl fmt::Display for SyntaxError { | |||
79 | pub enum SyntaxErrorKind { | 87 | pub enum SyntaxErrorKind { |
80 | ParseError(ParseError), | 88 | ParseError(ParseError), |
81 | EscapeError(EscapeError), | 89 | EscapeError(EscapeError), |
90 | TokenizeError(TokenizeError), | ||
91 | // FIXME: the obvious pattern of this enum dictates that the following enum variants | ||
92 | // should be wrapped into something like `SemmanticError(SemmanticError)` | ||
93 | // or `ValidateError(ValidateError)` or `SemmanticValidateError(...)` | ||
82 | InvalidBlockAttr, | 94 | InvalidBlockAttr, |
83 | InvalidMatchInnerAttr, | 95 | InvalidMatchInnerAttr, |
84 | InvalidTupleIndexFormat, | 96 | InvalidTupleIndexFormat, |
@@ -101,6 +113,7 @@ impl fmt::Display for SyntaxErrorKind { | |||
101 | } | 113 | } |
102 | ParseError(msg) => write!(f, "{}", msg.0), | 114 | ParseError(msg) => write!(f, "{}", msg.0), |
103 | EscapeError(err) => write!(f, "{}", err), | 115 | EscapeError(err) => write!(f, "{}", err), |
116 | TokenizeError(err) => write!(f, "{}", err), | ||
104 | VisibilityNotAllowed => { | 117 | VisibilityNotAllowed => { |
105 | write!(f, "unnecessary visibility qualifier") | 118 | write!(f, "unnecessary visibility qualifier") |
106 | } | 119 | } |
@@ -111,6 +124,51 @@ impl fmt::Display for SyntaxErrorKind { | |||
111 | } | 124 | } |
112 | } | 125 | } |
113 | 126 | ||
127 | impl fmt::Display for TokenizeError { | ||
128 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
129 | #[rustfmt::skip] | ||
130 | let msg = match self { | ||
131 | TokenizeError::EmptyInt => { | ||
132 | "Missing digits after the integer base prefix" | ||
133 | } | ||
134 | TokenizeError::EmptyExponent => { | ||
135 | "Missing digits after the exponent symbol" | ||
136 | } | ||
137 | TokenizeError::UnterminatedBlockComment => { | ||
138 | "Missing trailing `*/` symbols to terminate the block comment" | ||
139 | } | ||
140 | TokenizeError::UnterminatedChar => { | ||
141 | "Missing trailing `'` symbol to terminate the character literal" | ||
142 | } | ||
143 | TokenizeError::UnterminatedByte => { | ||
144 | "Missing trailing `'` symbol to terminate the byte literal" | ||
145 | } | ||
146 | TokenizeError::UnterminatedString => { | ||
147 | "Missing trailing `\"` symbol to terminate the string literal" | ||
148 | } | ||
149 | TokenizeError::UnterminatedByteString => { | ||
150 | "Missing trailing `\"` symbol to terminate the byte string literal" | ||
151 | } | ||
152 | TokenizeError::UnterminatedRawString => { | ||
153 | "Missing trailing `\"` with `#` symbols to terminate the raw string literal" | ||
154 | } | ||
155 | TokenizeError::UnterminatedRawByteString => { | ||
156 | "Missing trailing `\"` with `#` symbols to terminate the raw byte string literal" | ||
157 | } | ||
158 | TokenizeError::UnstartedRawString => { | ||
159 | "Missing `\"` symbol after `#` symbols to begin the raw string literal" | ||
160 | } | ||
161 | TokenizeError::UnstartedRawByteString => { | ||
162 | "Missing `\"` symbol after `#` symbols to begin the raw byte string literal" | ||
163 | } | ||
164 | TokenizeError::LifetimeStartsWithNumber => { | ||
165 | "Lifetime name cannot start with a number" | ||
166 | } | ||
167 | }; | ||
168 | write!(f, "{}", msg) | ||
169 | } | ||
170 | } | ||
171 | |||
114 | impl fmt::Display for EscapeError { | 172 | impl fmt::Display for EscapeError { |
115 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | 173 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
116 | let msg = match self { | 174 | let msg = match self { |
diff --git a/crates/ra_syntax/src/syntax_node.rs b/crates/ra_syntax/src/syntax_node.rs index b3eb5da63..7c2b18af3 100644 --- a/crates/ra_syntax/src/syntax_node.rs +++ b/crates/ra_syntax/src/syntax_node.rs | |||
@@ -4,7 +4,7 @@ | |||
4 | //! `SyntaxNode`, and a basic traversal API (parent, children, siblings). | 4 | //! `SyntaxNode`, and a basic traversal API (parent, children, siblings). |
5 | //! | 5 | //! |
6 | //! The *real* implementation is in the (language-agnostic) `rowan` crate, this | 6 | //! The *real* implementation is in the (language-agnostic) `rowan` crate, this |
7 | //! modules just wraps its API. | 7 | //! module just wraps its API. |
8 | 8 | ||
9 | use ra_parser::ParseError; | 9 | use ra_parser::ParseError; |
10 | use rowan::{GreenNodeBuilder, Language}; | 10 | use rowan::{GreenNodeBuilder, Language}; |
@@ -38,17 +38,12 @@ pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>; | |||
38 | 38 | ||
39 | pub use rowan::{Direction, NodeOrToken}; | 39 | pub use rowan::{Direction, NodeOrToken}; |
40 | 40 | ||
41 | #[derive(Default)] | ||
41 | pub struct SyntaxTreeBuilder { | 42 | pub struct SyntaxTreeBuilder { |
42 | errors: Vec<SyntaxError>, | 43 | errors: Vec<SyntaxError>, |
43 | inner: GreenNodeBuilder<'static>, | 44 | inner: GreenNodeBuilder<'static>, |
44 | } | 45 | } |
45 | 46 | ||
46 | impl Default for SyntaxTreeBuilder { | ||
47 | fn default() -> SyntaxTreeBuilder { | ||
48 | SyntaxTreeBuilder { errors: Vec::new(), inner: GreenNodeBuilder::new() } | ||
49 | } | ||
50 | } | ||
51 | |||
52 | impl SyntaxTreeBuilder { | 47 | impl SyntaxTreeBuilder { |
53 | pub(crate) fn finish_raw(self) -> (GreenNode, Vec<SyntaxError>) { | 48 | pub(crate) fn finish_raw(self) -> (GreenNode, Vec<SyntaxError>) { |
54 | let green = self.inner.finish(); | 49 | let green = self.inner.finish(); |
diff --git a/crates/ra_syntax/src/tests.rs b/crates/ra_syntax/src/tests.rs index 458920607..fb22b9e54 100644 --- a/crates/ra_syntax/src/tests.rs +++ b/crates/ra_syntax/src/tests.rs | |||
@@ -1,18 +1,28 @@ | |||
1 | use std::{ | 1 | use std::{ |
2 | fmt::Write, | 2 | fmt::Write, |
3 | path::{Component, PathBuf}, | 3 | path::{Component, Path, PathBuf}, |
4 | }; | 4 | }; |
5 | 5 | ||
6 | use test_utils::{collect_tests, dir_tests, project_dir, read_text}; | 6 | use test_utils::{collect_tests, dir_tests, project_dir, read_text}; |
7 | 7 | ||
8 | use crate::{fuzz, SourceFile}; | 8 | use crate::{fuzz, tokenize, Location, SourceFile, SyntaxError, TextRange, Token}; |
9 | 9 | ||
10 | #[test] | 10 | #[test] |
11 | fn lexer_tests() { | 11 | fn lexer_tests() { |
12 | dir_tests(&test_data_dir(), &["lexer"], |text, _| { | 12 | // FIXME: |
13 | let tokens = crate::tokenize(text); | 13 | // * Add tests for unicode escapes in byte-character and [raw]-byte-string literals |
14 | dump_tokens(&tokens, text) | 14 | // * Add tests for unescape errors |
15 | }) | 15 | |
16 | dir_tests(&test_data_dir(), &["lexer/ok"], |text, path| { | ||
17 | let (tokens, errors) = tokenize(text); | ||
18 | assert_errors_are_absent(&errors, path); | ||
19 | dump_tokens_and_errors(&tokens, &errors, text) | ||
20 | }); | ||
21 | dir_tests(&test_data_dir(), &["lexer/err"], |text, path| { | ||
22 | let (tokens, errors) = tokenize(text); | ||
23 | assert_errors_are_present(&errors, path); | ||
24 | dump_tokens_and_errors(&tokens, &errors, text) | ||
25 | }); | ||
16 | } | 26 | } |
17 | 27 | ||
18 | #[test] | 28 | #[test] |
@@ -32,18 +42,13 @@ fn parser_tests() { | |||
32 | dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| { | 42 | dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| { |
33 | let parse = SourceFile::parse(text); | 43 | let parse = SourceFile::parse(text); |
34 | let errors = parse.errors(); | 44 | let errors = parse.errors(); |
35 | assert_eq!( | 45 | assert_errors_are_absent(&errors, path); |
36 | errors, | ||
37 | &[] as &[crate::SyntaxError], | ||
38 | "There should be no errors in the file {:?}", | ||
39 | path.display(), | ||
40 | ); | ||
41 | parse.debug_dump() | 46 | parse.debug_dump() |
42 | }); | 47 | }); |
43 | dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| { | 48 | dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| { |
44 | let parse = SourceFile::parse(text); | 49 | let parse = SourceFile::parse(text); |
45 | let errors = parse.errors(); | 50 | let errors = parse.errors(); |
46 | assert!(!errors.is_empty(), "There should be errors in the file {:?}", path.display()); | 51 | assert_errors_are_present(&errors, path); |
47 | parse.debug_dump() | 52 | parse.debug_dump() |
48 | }); | 53 | }); |
49 | } | 54 | } |
@@ -75,7 +80,7 @@ fn self_hosting_parsing() { | |||
75 | .into_iter() | 80 | .into_iter() |
76 | .filter_entry(|entry| { | 81 | .filter_entry(|entry| { |
77 | !entry.path().components().any(|component| { | 82 | !entry.path().components().any(|component| { |
78 | // Get all files which are not in the crates/ra_syntax/tests/data folder | 83 | // Get all files which are not in the crates/ra_syntax/test_data folder |
79 | component == Component::Normal(OsStr::new("test_data")) | 84 | component == Component::Normal(OsStr::new("test_data")) |
80 | }) | 85 | }) |
81 | }) | 86 | }) |
@@ -101,15 +106,47 @@ fn test_data_dir() -> PathBuf { | |||
101 | project_dir().join("crates/ra_syntax/test_data") | 106 | project_dir().join("crates/ra_syntax/test_data") |
102 | } | 107 | } |
103 | 108 | ||
104 | fn dump_tokens(tokens: &[crate::Token], text: &str) -> String { | 109 | fn assert_errors_are_present(errors: &[SyntaxError], path: &Path) { |
110 | assert!(!errors.is_empty(), "There should be errors in the file {:?}", path.display()); | ||
111 | } | ||
112 | fn assert_errors_are_absent(errors: &[SyntaxError], path: &Path) { | ||
113 | assert_eq!( | ||
114 | errors, | ||
115 | &[] as &[SyntaxError], | ||
116 | "There should be no errors in the file {:?}", | ||
117 | path.display(), | ||
118 | ); | ||
119 | } | ||
120 | |||
121 | fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str) -> String { | ||
105 | let mut acc = String::new(); | 122 | let mut acc = String::new(); |
106 | let mut offset = 0; | 123 | let mut offset = 0; |
107 | for token in tokens { | 124 | for token in tokens { |
108 | let len: u32 = token.len.into(); | 125 | let token_len = token.len.to_usize(); |
109 | let len = len as usize; | 126 | let token_text = &text[offset..offset + token_len]; |
110 | let token_text = &text[offset..offset + len]; | 127 | offset += token_len; |
111 | offset += len; | 128 | writeln!(acc, "{:?} {} {:?}", token.kind, token_len, token_text).unwrap(); |
112 | write!(acc, "{:?} {} {:?}\n", token.kind, token.len, token_text).unwrap() | 129 | } |
130 | for err in errors { | ||
131 | let err_range = location_to_range(err.location()); | ||
132 | writeln!( | ||
133 | acc, | ||
134 | "> error{:?} token({:?}) msg({})", | ||
135 | err.location(), | ||
136 | &text[err_range], | ||
137 | err.kind() | ||
138 | ) | ||
139 | .unwrap(); | ||
140 | } | ||
141 | return acc; | ||
142 | |||
143 | // FIXME: copy-pasted this from `ra_ide/src/diagnostics.rs` | ||
144 | // `Location` will be refactored soon in new PR, see todos here: | ||
145 | // https://github.com/rust-analyzer/rust-analyzer/issues/223 | ||
146 | fn location_to_range(location: Location) -> TextRange { | ||
147 | match location { | ||
148 | Location::Offset(offset) => TextRange::offset_len(offset, 1.into()), | ||
149 | Location::Range(range) => range, | ||
150 | } | ||
113 | } | 151 | } |
114 | acc | ||
115 | } | 152 | } |
diff --git a/crates/ra_syntax/src/validation.rs b/crates/ra_syntax/src/validation.rs index 445e3b3e4..8a5f0e4b7 100644 --- a/crates/ra_syntax/src/validation.rs +++ b/crates/ra_syntax/src/validation.rs | |||
@@ -94,6 +94,12 @@ impl From<rustc_lexer::unescape::EscapeError> for SyntaxErrorKind { | |||
94 | } | 94 | } |
95 | 95 | ||
96 | pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> { | 96 | pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> { |
97 | // FIXME: | ||
98 | // * Add validation of character literal containing only a single char | ||
99 | // * Add validation of `crate` keyword not appearing in the middle of the symbol path | ||
100 | // * Add validation of doc comments are being attached to nodes | ||
101 | // * Remove validation of unterminated literals (it is already implemented in `tokenize()`) | ||
102 | |||
97 | let mut errors = Vec::new(); | 103 | let mut errors = Vec::new(); |
98 | for node in root.descendants() { | 104 | for node in root.descendants() { |
99 | match_ast! { | 105 | match_ast! { |
diff --git a/crates/ra_syntax/test_data/lexer/0010_comments.rs b/crates/ra_syntax/test_data/lexer/0010_comments.rs deleted file mode 100644 index 71bdd1f9c..000000000 --- a/crates/ra_syntax/test_data/lexer/0010_comments.rs +++ /dev/null | |||
@@ -1,3 +0,0 @@ | |||
1 | #!/usr/bin/env bash | ||
2 | // hello | ||
3 | //! World | ||
diff --git a/crates/ra_syntax/test_data/lexer/0010_comments.txt b/crates/ra_syntax/test_data/lexer/0010_comments.txt deleted file mode 100644 index 3c997de3f..000000000 --- a/crates/ra_syntax/test_data/lexer/0010_comments.txt +++ /dev/null | |||
@@ -1,6 +0,0 @@ | |||
1 | SHEBANG 19 "#!/usr/bin/env bash" | ||
2 | WHITESPACE 1 "\n" | ||
3 | COMMENT 8 "// hello" | ||
4 | WHITESPACE 1 "\n" | ||
5 | COMMENT 9 "//! World" | ||
6 | WHITESPACE 1 "\n" | ||
diff --git a/crates/ra_syntax/test_data/lexer/0014_unclosed_char.rs b/crates/ra_syntax/test_data/lexer/0014_unclosed_char.rs deleted file mode 100644 index 9c0007077..000000000 --- a/crates/ra_syntax/test_data/lexer/0014_unclosed_char.rs +++ /dev/null | |||
@@ -1 +0,0 @@ | |||
1 | '1 \ No newline at end of file | ||
diff --git a/crates/ra_syntax/test_data/lexer/0014_unclosed_char.txt b/crates/ra_syntax/test_data/lexer/0014_unclosed_char.txt deleted file mode 100644 index 737a300ee..000000000 --- a/crates/ra_syntax/test_data/lexer/0014_unclosed_char.txt +++ /dev/null | |||
@@ -1 +0,0 @@ | |||
1 | LIFETIME 2 "\'1" | ||
diff --git a/crates/ra_syntax/test_data/lexer/0015_unclosed_string.rs b/crates/ra_syntax/test_data/lexer/0015_unclosed_string.rs deleted file mode 100644 index d771a26d4..000000000 --- a/crates/ra_syntax/test_data/lexer/0015_unclosed_string.rs +++ /dev/null | |||
@@ -1 +0,0 @@ | |||
1 | "hello | ||
diff --git a/crates/ra_syntax/test_data/lexer/0015_unclosed_string.txt b/crates/ra_syntax/test_data/lexer/0015_unclosed_string.txt deleted file mode 100644 index 728c40b66..000000000 --- a/crates/ra_syntax/test_data/lexer/0015_unclosed_string.txt +++ /dev/null | |||
@@ -1 +0,0 @@ | |||
1 | STRING 7 "\"hello\n" | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0001_unclosed_char_at_eof.rs b/crates/ra_syntax/test_data/lexer/err/0001_unclosed_char_at_eof.rs new file mode 100644 index 000000000..ad2823b48 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0001_unclosed_char_at_eof.rs | |||
@@ -0,0 +1 @@ | |||
' \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0001_unclosed_char_at_eof.txt b/crates/ra_syntax/test_data/lexer/err/0001_unclosed_char_at_eof.txt new file mode 100644 index 000000000..f24e1fd32 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0001_unclosed_char_at_eof.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | CHAR 1 "\'" | ||
2 | > error[0; 1) token("\'") msg(Missing trailing `'` symbol to terminate the character literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.rs b/crates/ra_syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.rs new file mode 100644 index 000000000..e264a4152 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.rs | |||
@@ -0,0 +1 @@ | |||
'🦀 \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.txt b/crates/ra_syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.txt new file mode 100644 index 000000000..bd08cfc44 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | CHAR 5 "\'🦀" | ||
2 | > error[0; 5) token("\'🦀") msg(Missing trailing `'` symbol to terminate the character literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.rs b/crates/ra_syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.rs new file mode 100644 index 000000000..cf74b4dad --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.rs | |||
@@ -0,0 +1 @@ | |||
'\x7f \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.txt b/crates/ra_syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.txt new file mode 100644 index 000000000..0ee22912d --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | CHAR 5 "\'\\x7f" | ||
2 | > error[0; 5) token("\'\\x7f") msg(Missing trailing `'` symbol to terminate the character literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.rs b/crates/ra_syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.rs new file mode 100644 index 000000000..50be91f68 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.rs | |||
@@ -0,0 +1 @@ | |||
'\u{20AA} \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.txt b/crates/ra_syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.txt new file mode 100644 index 000000000..96fac42ce --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | CHAR 9 "\'\\u{20AA}" | ||
2 | > error[0; 9) token("\'\\u{20AA}") msg(Missing trailing `'` symbol to terminate the character literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0005_unclosed_char_with_space.rs b/crates/ra_syntax/test_data/lexer/err/0005_unclosed_char_with_space.rs new file mode 100644 index 000000000..309ecfe47 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0005_unclosed_char_with_space.rs | |||
@@ -0,0 +1 @@ | |||
' \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0005_unclosed_char_with_space.txt b/crates/ra_syntax/test_data/lexer/err/0005_unclosed_char_with_space.txt new file mode 100644 index 000000000..2059f3f81 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0005_unclosed_char_with_space.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | CHAR 2 "\' " | ||
2 | > error[0; 2) token("\' ") msg(Missing trailing `'` symbol to terminate the character literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0006_unclosed_char_with_slash.rs b/crates/ra_syntax/test_data/lexer/err/0006_unclosed_char_with_slash.rs new file mode 100644 index 000000000..6ba258b10 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0006_unclosed_char_with_slash.rs | |||
@@ -0,0 +1 @@ | |||
'\ \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0006_unclosed_char_with_slash.txt b/crates/ra_syntax/test_data/lexer/err/0006_unclosed_char_with_slash.txt new file mode 100644 index 000000000..7dd376e59 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0006_unclosed_char_with_slash.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | CHAR 2 "\'\\" | ||
2 | > error[0; 2) token("\'\\") msg(Missing trailing `'` symbol to terminate the character literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.rs b/crates/ra_syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.rs new file mode 100644 index 000000000..78bef7e3e --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.rs | |||
@@ -0,0 +1 @@ | |||
'\n \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.txt b/crates/ra_syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.txt new file mode 100644 index 000000000..ef7a0a147 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | CHAR 3 "\'\\n" | ||
2 | > error[0; 3) token("\'\\n") msg(Missing trailing `'` symbol to terminate the character literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.rs b/crates/ra_syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.rs new file mode 100644 index 000000000..a0e722065 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.rs | |||
@@ -0,0 +1 @@ | |||
'\' \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.txt b/crates/ra_syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.txt new file mode 100644 index 000000000..13fc5ea9a --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | CHAR 3 "\'\\\'" | ||
2 | > error[0; 3) token("\'\\\'") msg(Missing trailing `'` symbol to terminate the character literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.rs b/crates/ra_syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.rs new file mode 100644 index 000000000..795dc7e25 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.rs | |||
@@ -0,0 +1 @@ | |||
b' \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.txt b/crates/ra_syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.txt new file mode 100644 index 000000000..269d68c74 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE 2 "b\'" | ||
2 | > error[0; 2) token("b\'") msg(Missing trailing `'` symbol to terminate the byte literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.rs b/crates/ra_syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.rs new file mode 100644 index 000000000..c9230dc24 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.rs | |||
@@ -0,0 +1 @@ | |||
b'🦀 \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.txt b/crates/ra_syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.txt new file mode 100644 index 000000000..91a76e479 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE 6 "b\'🦀" | ||
2 | > error[0; 6) token("b\'🦀") msg(Missing trailing `'` symbol to terminate the byte literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.rs b/crates/ra_syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.rs new file mode 100644 index 000000000..d146a8090 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.rs | |||
@@ -0,0 +1 @@ | |||
b'\x7f \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.txt b/crates/ra_syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.txt new file mode 100644 index 000000000..b8c804a18 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE 6 "b\'\\x7f" | ||
2 | > error[0; 6) token("b\'\\x7f") msg(Missing trailing `'` symbol to terminate the byte literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.rs b/crates/ra_syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.rs new file mode 100644 index 000000000..a3dec7c25 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.rs | |||
@@ -0,0 +1 @@ | |||
b'\u{20AA} \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.txt b/crates/ra_syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.txt new file mode 100644 index 000000000..dfca22a59 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE 10 "b\'\\u{20AA}" | ||
2 | > error[0; 10) token("b\'\\u{20AA}") msg(Missing trailing `'` symbol to terminate the byte literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0013_unclosed_byte_with_space.rs b/crates/ra_syntax/test_data/lexer/err/0013_unclosed_byte_with_space.rs new file mode 100644 index 000000000..93b7f9c87 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0013_unclosed_byte_with_space.rs | |||
@@ -0,0 +1 @@ | |||
b' \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0013_unclosed_byte_with_space.txt b/crates/ra_syntax/test_data/lexer/err/0013_unclosed_byte_with_space.txt new file mode 100644 index 000000000..51a1cceab --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0013_unclosed_byte_with_space.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE 3 "b\' " | ||
2 | > error[0; 3) token("b\' ") msg(Missing trailing `'` symbol to terminate the byte literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.rs b/crates/ra_syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.rs new file mode 100644 index 000000000..abffa5037 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.rs | |||
@@ -0,0 +1 @@ | |||
b'\ \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.txt b/crates/ra_syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.txt new file mode 100644 index 000000000..24e835c27 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE 3 "b\'\\" | ||
2 | > error[0; 3) token("b\'\\") msg(Missing trailing `'` symbol to terminate the byte literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.rs b/crates/ra_syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.rs new file mode 100644 index 000000000..4f46836a9 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.rs | |||
@@ -0,0 +1 @@ | |||
b'\n \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.txt b/crates/ra_syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.txt new file mode 100644 index 000000000..f1e39a41b --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE 4 "b\'\\n" | ||
2 | > error[0; 4) token("b\'\\n") msg(Missing trailing `'` symbol to terminate the byte literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.rs b/crates/ra_syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.rs new file mode 100644 index 000000000..645b641ee --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.rs | |||
@@ -0,0 +1 @@ | |||
b'\' \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.txt b/crates/ra_syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.txt new file mode 100644 index 000000000..f8ffe815d --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE 4 "b\'\\\'" | ||
2 | > error[0; 4) token("b\'\\\'") msg(Missing trailing `'` symbol to terminate the byte literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0017_unclosed_string_at_eof.rs b/crates/ra_syntax/test_data/lexer/err/0017_unclosed_string_at_eof.rs new file mode 100644 index 000000000..9d68933c4 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0017_unclosed_string_at_eof.rs | |||
@@ -0,0 +1 @@ | |||
" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0017_unclosed_string_at_eof.txt b/crates/ra_syntax/test_data/lexer/err/0017_unclosed_string_at_eof.txt new file mode 100644 index 000000000..823daaf6f --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0017_unclosed_string_at_eof.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | STRING 1 "\"" | ||
2 | > error[0; 1) token("\"") msg(Missing trailing `"` symbol to terminate the string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.rs b/crates/ra_syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.rs new file mode 100644 index 000000000..d439b8d2a --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.rs | |||
@@ -0,0 +1 @@ | |||
"🦀 \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.txt b/crates/ra_syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.txt new file mode 100644 index 000000000..164580eb3 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | STRING 5 "\"🦀" | ||
2 | > error[0; 5) token("\"🦀") msg(Missing trailing `"` symbol to terminate the string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.rs b/crates/ra_syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.rs new file mode 100644 index 000000000..56186a344 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.rs | |||
@@ -0,0 +1 @@ | |||
"\x7f \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.txt b/crates/ra_syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.txt new file mode 100644 index 000000000..4453827c3 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | STRING 5 "\"\\x7f" | ||
2 | > error[0; 5) token("\"\\x7f") msg(Missing trailing `"` symbol to terminate the string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.rs b/crates/ra_syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.rs new file mode 100644 index 000000000..ed24095c3 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.rs | |||
@@ -0,0 +1 @@ | |||
"\u{20AA} \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.txt b/crates/ra_syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.txt new file mode 100644 index 000000000..aa614f304 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | STRING 9 "\"\\u{20AA}" | ||
2 | > error[0; 9) token("\"\\u{20AA}") msg(Missing trailing `"` symbol to terminate the string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0021_unclosed_string_with_space.rs b/crates/ra_syntax/test_data/lexer/err/0021_unclosed_string_with_space.rs new file mode 100644 index 000000000..72cdc841f --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0021_unclosed_string_with_space.rs | |||
@@ -0,0 +1 @@ | |||
" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0021_unclosed_string_with_space.txt b/crates/ra_syntax/test_data/lexer/err/0021_unclosed_string_with_space.txt new file mode 100644 index 000000000..b7db1236f --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0021_unclosed_string_with_space.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | STRING 2 "\" " | ||
2 | > error[0; 2) token("\" ") msg(Missing trailing `"` symbol to terminate the string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0022_unclosed_string_with_slash.rs b/crates/ra_syntax/test_data/lexer/err/0022_unclosed_string_with_slash.rs new file mode 100644 index 000000000..00a258400 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0022_unclosed_string_with_slash.rs | |||
@@ -0,0 +1 @@ | |||
"\ \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0022_unclosed_string_with_slash.txt b/crates/ra_syntax/test_data/lexer/err/0022_unclosed_string_with_slash.txt new file mode 100644 index 000000000..9d3df3799 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0022_unclosed_string_with_slash.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | STRING 2 "\"\\" | ||
2 | > error[0; 2) token("\"\\") msg(Missing trailing `"` symbol to terminate the string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.rs b/crates/ra_syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.rs new file mode 100644 index 000000000..a0c29b8cf --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.rs | |||
@@ -0,0 +1 @@ | |||
"\n \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.txt b/crates/ra_syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.txt new file mode 100644 index 000000000..e3eb672b6 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | STRING 3 "\"\\n" | ||
2 | > error[0; 3) token("\"\\n") msg(Missing trailing `"` symbol to terminate the string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.rs b/crates/ra_syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.rs new file mode 100644 index 000000000..403c2d6dd --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.rs | |||
@@ -0,0 +1 @@ | |||
"\" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.txt b/crates/ra_syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.txt new file mode 100644 index 000000000..041d7fb6e --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | STRING 3 "\"\\\"" | ||
2 | > error[0; 3) token("\"\\\"") msg(Missing trailing `"` symbol to terminate the string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.rs b/crates/ra_syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.rs new file mode 100644 index 000000000..36f4f4321 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.rs | |||
@@ -0,0 +1 @@ | |||
b" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.txt b/crates/ra_syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.txt new file mode 100644 index 000000000..be7970a83 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE_STRING 2 "b\"" | ||
2 | > error[0; 2) token("b\"") msg(Missing trailing `"` symbol to terminate the byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.rs b/crates/ra_syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.rs new file mode 100644 index 000000000..3c23a0372 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.rs | |||
@@ -0,0 +1 @@ | |||
b"🦀 \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.txt b/crates/ra_syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.txt new file mode 100644 index 000000000..bf9aab132 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE_STRING 6 "b\"🦀" | ||
2 | > error[0; 6) token("b\"🦀") msg(Missing trailing `"` symbol to terminate the byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.rs b/crates/ra_syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.rs new file mode 100644 index 000000000..836c112c1 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.rs | |||
@@ -0,0 +1 @@ | |||
b"\x7f \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.txt b/crates/ra_syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.txt new file mode 100644 index 000000000..76e16d7d3 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE_STRING 6 "b\"\\x7f" | ||
2 | > error[0; 6) token("b\"\\x7f") msg(Missing trailing `"` symbol to terminate the byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.rs b/crates/ra_syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.rs new file mode 100644 index 000000000..1c6df1d00 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.rs | |||
@@ -0,0 +1 @@ | |||
b"\u{20AA} \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.txt b/crates/ra_syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.txt new file mode 100644 index 000000000..09adffa16 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE_STRING 10 "b\"\\u{20AA}" | ||
2 | > error[0; 10) token("b\"\\u{20AA}") msg(Missing trailing `"` symbol to terminate the byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.rs b/crates/ra_syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.rs new file mode 100644 index 000000000..d6898541e --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.rs | |||
@@ -0,0 +1 @@ | |||
b" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.txt b/crates/ra_syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.txt new file mode 100644 index 000000000..fcb7253c8 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE_STRING 3 "b\" " | ||
2 | > error[0; 3) token("b\" ") msg(Missing trailing `"` symbol to terminate the byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.rs b/crates/ra_syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.rs new file mode 100644 index 000000000..cce661538 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.rs | |||
@@ -0,0 +1 @@ | |||
b"\ \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.txt b/crates/ra_syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.txt new file mode 100644 index 000000000..0a1b3e269 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE_STRING 3 "b\"\\" | ||
2 | > error[0; 3) token("b\"\\") msg(Missing trailing `"` symbol to terminate the byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.rs b/crates/ra_syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.rs new file mode 100644 index 000000000..5e680aabb --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.rs | |||
@@ -0,0 +1 @@ | |||
b"\n \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.txt b/crates/ra_syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.txt new file mode 100644 index 000000000..1fb89d2b6 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE_STRING 4 "b\"\\n" | ||
2 | > error[0; 4) token("b\"\\n") msg(Missing trailing `"` symbol to terminate the byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.rs b/crates/ra_syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.rs new file mode 100644 index 000000000..f2ff58ba9 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.rs | |||
@@ -0,0 +1 @@ | |||
b"\" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.txt b/crates/ra_syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.txt new file mode 100644 index 000000000..718d36992 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE_STRING 4 "b\"\\\"" | ||
2 | > error[0; 4) token("b\"\\\"") msg(Missing trailing `"` symbol to terminate the byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.rs b/crates/ra_syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.rs new file mode 100644 index 000000000..557c59b62 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.rs | |||
@@ -0,0 +1 @@ | |||
r##" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt b/crates/ra_syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt new file mode 100644 index 000000000..93348f548 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_STRING 4 "r##\"" | ||
2 | > error[0; 4) token("r##\"") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.rs b/crates/ra_syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.rs new file mode 100644 index 000000000..bd046e4bb --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.rs | |||
@@ -0,0 +1 @@ | |||
r##"🦀 \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt b/crates/ra_syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt new file mode 100644 index 000000000..42c70dfe8 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_STRING 8 "r##\"🦀" | ||
2 | > error[0; 8) token("r##\"🦀") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.rs b/crates/ra_syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.rs new file mode 100644 index 000000000..5bec883dc --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.rs | |||
@@ -0,0 +1 @@ | |||
r##"\x7f \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt b/crates/ra_syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt new file mode 100644 index 000000000..2bdeea0ff --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_STRING 8 "r##\"\\x7f" | ||
2 | > error[0; 8) token("r##\"\\x7f") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.rs b/crates/ra_syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.rs new file mode 100644 index 000000000..bf05c3913 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.rs | |||
@@ -0,0 +1 @@ | |||
r##"\u{20AA} \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt b/crates/ra_syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt new file mode 100644 index 000000000..667d4d79f --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_STRING 12 "r##\"\\u{20AA}" | ||
2 | > error[0; 12) token("r##\"\\u{20AA}") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.rs b/crates/ra_syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.rs new file mode 100644 index 000000000..f104bae4f --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.rs | |||
@@ -0,0 +1 @@ | |||
r##" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt b/crates/ra_syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt new file mode 100644 index 000000000..dd9597a1a --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_STRING 5 "r##\" " | ||
2 | > error[0; 5) token("r##\" ") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.rs b/crates/ra_syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.rs new file mode 100644 index 000000000..9242077b8 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.rs | |||
@@ -0,0 +1 @@ | |||
r##"\ \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt b/crates/ra_syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt new file mode 100644 index 000000000..6ac6e3d62 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_STRING 5 "r##\"\\" | ||
2 | > error[0; 5) token("r##\"\\") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.rs b/crates/ra_syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.rs new file mode 100644 index 000000000..db1c16f2b --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.rs | |||
@@ -0,0 +1 @@ | |||
r##"\n \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt b/crates/ra_syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt new file mode 100644 index 000000000..9d35443f5 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_STRING 6 "r##\"\\n" | ||
2 | > error[0; 6) token("r##\"\\n") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.rs b/crates/ra_syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.rs new file mode 100644 index 000000000..ae5bae622 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.rs | |||
@@ -0,0 +1 @@ | |||
br##" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt b/crates/ra_syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt new file mode 100644 index 000000000..81fa39ea5 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_BYTE_STRING 5 "br##\"" | ||
2 | > error[0; 5) token("br##\"") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.rs b/crates/ra_syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.rs new file mode 100644 index 000000000..9ef01207a --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.rs | |||
@@ -0,0 +1 @@ | |||
br##"🦀 \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt b/crates/ra_syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt new file mode 100644 index 000000000..c2503a4d0 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_BYTE_STRING 9 "br##\"🦀" | ||
2 | > error[0; 9) token("br##\"🦀") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.rs b/crates/ra_syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.rs new file mode 100644 index 000000000..d50270afe --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.rs | |||
@@ -0,0 +1 @@ | |||
br##"\x7f \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt b/crates/ra_syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt new file mode 100644 index 000000000..3bd3d8152 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_BYTE_STRING 9 "br##\"\\x7f" | ||
2 | > error[0; 9) token("br##\"\\x7f") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.rs b/crates/ra_syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.rs new file mode 100644 index 000000000..90e299a1a --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.rs | |||
@@ -0,0 +1 @@ | |||
br##"\u{20AA} \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt b/crates/ra_syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt new file mode 100644 index 000000000..a512f0428 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_BYTE_STRING 13 "br##\"\\u{20AA}" | ||
2 | > error[0; 13) token("br##\"\\u{20AA}") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.rs b/crates/ra_syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.rs new file mode 100644 index 000000000..14c602fd2 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.rs | |||
@@ -0,0 +1 @@ | |||
br##" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt b/crates/ra_syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt new file mode 100644 index 000000000..dc616a623 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_BYTE_STRING 6 "br##\" " | ||
2 | > error[0; 6) token("br##\" ") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.rs b/crates/ra_syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.rs new file mode 100644 index 000000000..0b3c015d7 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.rs | |||
@@ -0,0 +1 @@ | |||
br##"\ \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt b/crates/ra_syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt new file mode 100644 index 000000000..debafe380 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_BYTE_STRING 6 "br##\"\\" | ||
2 | > error[0; 6) token("br##\"\\") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.rs b/crates/ra_syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.rs new file mode 100644 index 000000000..0d8b0e7ab --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.rs | |||
@@ -0,0 +1 @@ | |||
br##"\n \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt b/crates/ra_syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt new file mode 100644 index 000000000..524e617b7 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_BYTE_STRING 7 "br##\"\\n" | ||
2 | > error[0; 7) token("br##\"\\n") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.rs b/crates/ra_syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.rs new file mode 100644 index 000000000..eddf8d080 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.rs | |||
@@ -0,0 +1 @@ | |||
r## \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt b/crates/ra_syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt new file mode 100644 index 000000000..00b046840 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_STRING 3 "r##" | ||
2 | > error[0; 3) token("r##") msg(Missing `"` symbol after `#` symbols to begin the raw string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.rs b/crates/ra_syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.rs new file mode 100644 index 000000000..7e8cadf4f --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.rs | |||
@@ -0,0 +1 @@ | |||
br## \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt b/crates/ra_syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt new file mode 100644 index 000000000..33b25e60f --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_BYTE_STRING 4 "br##" | ||
2 | > error[0; 4) token("br##") msg(Missing `"` symbol after `#` symbols to begin the raw byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.rs b/crates/ra_syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.rs new file mode 100644 index 000000000..534668a9b --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.rs | |||
@@ -0,0 +1 @@ | |||
r## I lack a quote! \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt b/crates/ra_syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt new file mode 100644 index 000000000..782dfd974 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt | |||
@@ -0,0 +1,10 @@ | |||
1 | RAW_STRING 4 "r## " | ||
2 | IDENT 1 "I" | ||
3 | WHITESPACE 1 " " | ||
4 | IDENT 4 "lack" | ||
5 | WHITESPACE 1 " " | ||
6 | IDENT 1 "a" | ||
7 | WHITESPACE 1 " " | ||
8 | IDENT 5 "quote" | ||
9 | EXCL 1 "!" | ||
10 | > error[0; 4) token("r## ") msg(Missing `"` symbol after `#` symbols to begin the raw string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.rs b/crates/ra_syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.rs new file mode 100644 index 000000000..d9b55455a --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.rs | |||
@@ -0,0 +1 @@ | |||
br## I lack a quote! \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt b/crates/ra_syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt new file mode 100644 index 000000000..59c40cd65 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt | |||
@@ -0,0 +1,10 @@ | |||
1 | RAW_BYTE_STRING 5 "br## " | ||
2 | IDENT 1 "I" | ||
3 | WHITESPACE 1 " " | ||
4 | IDENT 4 "lack" | ||
5 | WHITESPACE 1 " " | ||
6 | IDENT 1 "a" | ||
7 | WHITESPACE 1 " " | ||
8 | IDENT 5 "quote" | ||
9 | EXCL 1 "!" | ||
10 | > error[0; 5) token("br## ") msg(Missing `"` symbol after `#` symbols to begin the raw byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.rs b/crates/ra_syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.rs new file mode 100644 index 000000000..22e83649f --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.rs | |||
@@ -0,0 +1 @@ | |||
/* \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.txt b/crates/ra_syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.txt new file mode 100644 index 000000000..5d04cdaa4 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | COMMENT 2 "/*" | ||
2 | > error[0; 2) token("/*") msg(Missing trailing `*/` symbols to terminate the block comment) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.rs b/crates/ra_syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.rs new file mode 100644 index 000000000..c45c2844d --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.rs | |||
@@ -0,0 +1 @@ | |||
/* comment | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.txt b/crates/ra_syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.txt new file mode 100644 index 000000000..8c6b678e3 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | COMMENT 11 "/* comment\n" | ||
2 | > error[0; 11) token("/* comment\n") msg(Missing trailing `*/` symbols to terminate the block comment) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.rs b/crates/ra_syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.rs new file mode 100644 index 000000000..3fcfc9660 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.rs | |||
@@ -0,0 +1 @@ | |||
/* /* /* | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.txt b/crates/ra_syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.txt new file mode 100644 index 000000000..250de34d9 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | COMMENT 9 "/* /* /*\n" | ||
2 | > error[0; 9) token("/* /* /*\n") msg(Missing trailing `*/` symbols to terminate the block comment) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.rs b/crates/ra_syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.rs new file mode 100644 index 000000000..26c898f01 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.rs | |||
@@ -0,0 +1 @@ | |||
/** /*! /* comment */ */ | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.txt b/crates/ra_syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.txt new file mode 100644 index 000000000..f97f2a8c7 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | COMMENT 25 "/** /*! /* comment */ */\n" | ||
2 | > error[0; 25) token("/** /*! /* comment */ */\n") msg(Missing trailing `*/` symbols to terminate the block comment) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0055_empty_int.rs b/crates/ra_syntax/test_data/lexer/err/0055_empty_int.rs new file mode 100644 index 000000000..aa2a9fdca --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0055_empty_int.rs | |||
@@ -0,0 +1,17 @@ | |||
1 | 0b | ||
2 | 0o | ||
3 | 0x | ||
4 | |||
5 | 0b_ | ||
6 | 0o_ | ||
7 | 0x_ | ||
8 | |||
9 | 0bnoDigit | ||
10 | 0onoDigit | ||
11 | 0xnoDigit | ||
12 | |||
13 | 0xG | ||
14 | 0xg | ||
15 | |||
16 | 0x_g | ||
17 | 0x_G | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0055_empty_int.txt b/crates/ra_syntax/test_data/lexer/err/0055_empty_int.txt new file mode 100644 index 000000000..2fe5bd950 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0055_empty_int.txt | |||
@@ -0,0 +1,39 @@ | |||
1 | INT_NUMBER 2 "0b" | ||
2 | WHITESPACE 1 "\n" | ||
3 | INT_NUMBER 2 "0o" | ||
4 | WHITESPACE 1 "\n" | ||
5 | INT_NUMBER 2 "0x" | ||
6 | WHITESPACE 2 "\n\n" | ||
7 | INT_NUMBER 3 "0b_" | ||
8 | WHITESPACE 1 "\n" | ||
9 | INT_NUMBER 3 "0o_" | ||
10 | WHITESPACE 1 "\n" | ||
11 | INT_NUMBER 3 "0x_" | ||
12 | WHITESPACE 2 "\n\n" | ||
13 | INT_NUMBER 9 "0bnoDigit" | ||
14 | WHITESPACE 1 "\n" | ||
15 | INT_NUMBER 9 "0onoDigit" | ||
16 | WHITESPACE 1 "\n" | ||
17 | INT_NUMBER 9 "0xnoDigit" | ||
18 | WHITESPACE 2 "\n\n" | ||
19 | INT_NUMBER 3 "0xG" | ||
20 | WHITESPACE 1 "\n" | ||
21 | INT_NUMBER 3 "0xg" | ||
22 | WHITESPACE 2 "\n\n" | ||
23 | INT_NUMBER 4 "0x_g" | ||
24 | WHITESPACE 1 "\n" | ||
25 | INT_NUMBER 4 "0x_G" | ||
26 | WHITESPACE 1 "\n" | ||
27 | > error[0; 2) token("0b") msg(Missing digits after the integer base prefix) | ||
28 | > error[3; 5) token("0o") msg(Missing digits after the integer base prefix) | ||
29 | > error[6; 8) token("0x") msg(Missing digits after the integer base prefix) | ||
30 | > error[10; 13) token("0b_") msg(Missing digits after the integer base prefix) | ||
31 | > error[14; 17) token("0o_") msg(Missing digits after the integer base prefix) | ||
32 | > error[18; 21) token("0x_") msg(Missing digits after the integer base prefix) | ||
33 | > error[23; 32) token("0bnoDigit") msg(Missing digits after the integer base prefix) | ||
34 | > error[33; 42) token("0onoDigit") msg(Missing digits after the integer base prefix) | ||
35 | > error[43; 52) token("0xnoDigit") msg(Missing digits after the integer base prefix) | ||
36 | > error[54; 57) token("0xG") msg(Missing digits after the integer base prefix) | ||
37 | > error[58; 61) token("0xg") msg(Missing digits after the integer base prefix) | ||
38 | > error[63; 67) token("0x_g") msg(Missing digits after the integer base prefix) | ||
39 | > error[68; 72) token("0x_G") msg(Missing digits after the integer base prefix) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0056_empty_exponent.rs b/crates/ra_syntax/test_data/lexer/err/0056_empty_exponent.rs new file mode 100644 index 000000000..286584c88 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0056_empty_exponent.rs | |||
@@ -0,0 +1,22 @@ | |||
1 | 0e | ||
2 | 0E | ||
3 | |||
4 | 42e+ | ||
5 | 42e- | ||
6 | 42E+ | ||
7 | 42E- | ||
8 | |||
9 | 42.e+ | ||
10 | 42.e- | ||
11 | 42.E+ | ||
12 | 42.E- | ||
13 | |||
14 | 42.2e+ | ||
15 | 42.2e- | ||
16 | 42.2E+ | ||
17 | 42.2E- | ||
18 | |||
19 | 42.2e+f32 | ||
20 | 42.2e-f32 | ||
21 | 42.2E+f32 | ||
22 | 42.2E-f32 | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0056_empty_exponent.txt b/crates/ra_syntax/test_data/lexer/err/0056_empty_exponent.txt new file mode 100644 index 000000000..ab35e20a5 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0056_empty_exponent.txt | |||
@@ -0,0 +1,62 @@ | |||
1 | FLOAT_NUMBER 2 "0e" | ||
2 | WHITESPACE 1 "\n" | ||
3 | FLOAT_NUMBER 2 "0E" | ||
4 | WHITESPACE 2 "\n\n" | ||
5 | FLOAT_NUMBER 4 "42e+" | ||
6 | WHITESPACE 1 "\n" | ||
7 | FLOAT_NUMBER 4 "42e-" | ||
8 | WHITESPACE 1 "\n" | ||
9 | FLOAT_NUMBER 4 "42E+" | ||
10 | WHITESPACE 1 "\n" | ||
11 | FLOAT_NUMBER 4 "42E-" | ||
12 | WHITESPACE 2 "\n\n" | ||
13 | INT_NUMBER 2 "42" | ||
14 | DOT 1 "." | ||
15 | IDENT 1 "e" | ||
16 | PLUS 1 "+" | ||
17 | WHITESPACE 1 "\n" | ||
18 | INT_NUMBER 2 "42" | ||
19 | DOT 1 "." | ||
20 | IDENT 1 "e" | ||
21 | MINUS 1 "-" | ||
22 | WHITESPACE 1 "\n" | ||
23 | INT_NUMBER 2 "42" | ||
24 | DOT 1 "." | ||
25 | IDENT 1 "E" | ||
26 | PLUS 1 "+" | ||
27 | WHITESPACE 1 "\n" | ||
28 | INT_NUMBER 2 "42" | ||
29 | DOT 1 "." | ||
30 | IDENT 1 "E" | ||
31 | MINUS 1 "-" | ||
32 | WHITESPACE 2 "\n\n" | ||
33 | FLOAT_NUMBER 6 "42.2e+" | ||
34 | WHITESPACE 1 "\n" | ||
35 | FLOAT_NUMBER 6 "42.2e-" | ||
36 | WHITESPACE 1 "\n" | ||
37 | FLOAT_NUMBER 6 "42.2E+" | ||
38 | WHITESPACE 1 "\n" | ||
39 | FLOAT_NUMBER 6 "42.2E-" | ||
40 | WHITESPACE 2 "\n\n" | ||
41 | FLOAT_NUMBER 9 "42.2e+f32" | ||
42 | WHITESPACE 1 "\n" | ||
43 | FLOAT_NUMBER 9 "42.2e-f32" | ||
44 | WHITESPACE 1 "\n" | ||
45 | FLOAT_NUMBER 9 "42.2E+f32" | ||
46 | WHITESPACE 1 "\n" | ||
47 | FLOAT_NUMBER 9 "42.2E-f32" | ||
48 | WHITESPACE 1 "\n" | ||
49 | > error[0; 2) token("0e") msg(Missing digits after the exponent symbol) | ||
50 | > error[3; 5) token("0E") msg(Missing digits after the exponent symbol) | ||
51 | > error[7; 11) token("42e+") msg(Missing digits after the exponent symbol) | ||
52 | > error[12; 16) token("42e-") msg(Missing digits after the exponent symbol) | ||
53 | > error[17; 21) token("42E+") msg(Missing digits after the exponent symbol) | ||
54 | > error[22; 26) token("42E-") msg(Missing digits after the exponent symbol) | ||
55 | > error[53; 59) token("42.2e+") msg(Missing digits after the exponent symbol) | ||
56 | > error[60; 66) token("42.2e-") msg(Missing digits after the exponent symbol) | ||
57 | > error[67; 73) token("42.2E+") msg(Missing digits after the exponent symbol) | ||
58 | > error[74; 80) token("42.2E-") msg(Missing digits after the exponent symbol) | ||
59 | > error[82; 91) token("42.2e+f32") msg(Missing digits after the exponent symbol) | ||
60 | > error[92; 101) token("42.2e-f32") msg(Missing digits after the exponent symbol) | ||
61 | > error[102; 111) token("42.2E+f32") msg(Missing digits after the exponent symbol) | ||
62 | > error[112; 121) token("42.2E-f32") msg(Missing digits after the exponent symbol) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.rs b/crates/ra_syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.rs new file mode 100644 index 000000000..a7698a404 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.rs | |||
@@ -0,0 +1,2 @@ | |||
1 | '1 | ||
2 | '1lifetime | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.txt b/crates/ra_syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.txt new file mode 100644 index 000000000..89b38bfac --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.txt | |||
@@ -0,0 +1,6 @@ | |||
1 | LIFETIME 2 "\'1" | ||
2 | WHITESPACE 1 "\n" | ||
3 | LIFETIME 10 "\'1lifetime" | ||
4 | WHITESPACE 1 "\n" | ||
5 | > error[0; 2) token("\'1") msg(Lifetime name cannot start with a number) | ||
6 | > error[3; 13) token("\'1lifetime") msg(Lifetime name cannot start with a number) | ||
diff --git a/crates/ra_syntax/test_data/lexer/0001_hello.rs b/crates/ra_syntax/test_data/lexer/ok/0001_hello.rs index 95d09f2b1..95d09f2b1 100644 --- a/crates/ra_syntax/test_data/lexer/0001_hello.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0001_hello.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0001_hello.txt b/crates/ra_syntax/test_data/lexer/ok/0001_hello.txt index 27a5940a9..27a5940a9 100644 --- a/crates/ra_syntax/test_data/lexer/0001_hello.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0001_hello.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/0002_whitespace.rs b/crates/ra_syntax/test_data/lexer/ok/0002_whitespace.rs index 08fce1418..08fce1418 100644 --- a/crates/ra_syntax/test_data/lexer/0002_whitespace.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0002_whitespace.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0002_whitespace.txt b/crates/ra_syntax/test_data/lexer/ok/0002_whitespace.txt index 01d260918..01d260918 100644 --- a/crates/ra_syntax/test_data/lexer/0002_whitespace.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0002_whitespace.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/0003_ident.rs b/crates/ra_syntax/test_data/lexer/ok/0003_ident.rs index c05c9c009..c05c9c009 100644 --- a/crates/ra_syntax/test_data/lexer/0003_ident.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0003_ident.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0003_ident.txt b/crates/ra_syntax/test_data/lexer/ok/0003_ident.txt index 4a0d5c053..4a0d5c053 100644 --- a/crates/ra_syntax/test_data/lexer/0003_ident.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0003_ident.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/0004_numbers.rs b/crates/ra_syntax/test_data/lexer/ok/0004_numbers.rs index dc974b553..bc761c235 100644 --- a/crates/ra_syntax/test_data/lexer/0004_numbers.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0004_numbers.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | 0 0b 0o 0x 00 0_ 0. 0e 0E 0z | 1 | 0 00 0_ 0. 0z |
2 | 01790 0b1790 0o1790 0x1790aAbBcCdDeEfF 001279 0_1279 0.1279 0e1279 0E1279 | 2 | 01790 0b1790 0o1790 0x1790aAbBcCdDeEfF 001279 0_1279 0.1279 0e1279 0E1279 |
3 | 0..2 | 3 | 0..2 |
4 | 0.foo() | 4 | 0.foo() |
@@ -6,4 +6,4 @@ | |||
6 | 0.e+1 | 6 | 0.e+1 |
7 | 0.0E-2 | 7 | 0.0E-2 |
8 | 0___0.10000____0000e+111__ | 8 | 0___0.10000____0000e+111__ |
9 | 1i64 92.0f32 11__s \ No newline at end of file | 9 | 1i64 92.0f32 11__s |
diff --git a/crates/ra_syntax/test_data/lexer/0004_numbers.txt b/crates/ra_syntax/test_data/lexer/ok/0004_numbers.txt index 7bb89b8ae..e19fc5789 100644 --- a/crates/ra_syntax/test_data/lexer/0004_numbers.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0004_numbers.txt | |||
@@ -1,21 +1,11 @@ | |||
1 | INT_NUMBER 1 "0" | 1 | INT_NUMBER 1 "0" |
2 | WHITESPACE 1 " " | 2 | WHITESPACE 1 " " |
3 | INT_NUMBER 2 "0b" | ||
4 | WHITESPACE 1 " " | ||
5 | INT_NUMBER 2 "0o" | ||
6 | WHITESPACE 1 " " | ||
7 | INT_NUMBER 2 "0x" | ||
8 | WHITESPACE 1 " " | ||
9 | INT_NUMBER 2 "00" | 3 | INT_NUMBER 2 "00" |
10 | WHITESPACE 1 " " | 4 | WHITESPACE 1 " " |
11 | INT_NUMBER 2 "0_" | 5 | INT_NUMBER 2 "0_" |
12 | WHITESPACE 1 " " | 6 | WHITESPACE 1 " " |
13 | FLOAT_NUMBER 2 "0." | 7 | FLOAT_NUMBER 2 "0." |
14 | WHITESPACE 1 " " | 8 | WHITESPACE 1 " " |
15 | FLOAT_NUMBER 2 "0e" | ||
16 | WHITESPACE 1 " " | ||
17 | FLOAT_NUMBER 2 "0E" | ||
18 | WHITESPACE 1 " " | ||
19 | INT_NUMBER 2 "0z" | 9 | INT_NUMBER 2 "0z" |
20 | WHITESPACE 1 "\n" | 10 | WHITESPACE 1 "\n" |
21 | INT_NUMBER 5 "01790" | 11 | INT_NUMBER 5 "01790" |
@@ -64,3 +54,4 @@ WHITESPACE 1 " " | |||
64 | FLOAT_NUMBER 7 "92.0f32" | 54 | FLOAT_NUMBER 7 "92.0f32" |
65 | WHITESPACE 1 " " | 55 | WHITESPACE 1 " " |
66 | INT_NUMBER 5 "11__s" | 56 | INT_NUMBER 5 "11__s" |
57 | WHITESPACE 1 "\n" | ||
diff --git a/crates/ra_syntax/test_data/lexer/0005_symbols.rs b/crates/ra_syntax/test_data/lexer/ok/0005_symbols.rs index 487569b5a..487569b5a 100644 --- a/crates/ra_syntax/test_data/lexer/0005_symbols.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0005_symbols.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0005_symbols.txt b/crates/ra_syntax/test_data/lexer/ok/0005_symbols.txt index 469a90e42..469a90e42 100644 --- a/crates/ra_syntax/test_data/lexer/0005_symbols.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0005_symbols.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/0006_chars.rs b/crates/ra_syntax/test_data/lexer/ok/0006_chars.rs index 454ee0a5f..454ee0a5f 100644 --- a/crates/ra_syntax/test_data/lexer/0006_chars.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0006_chars.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0006_chars.txt b/crates/ra_syntax/test_data/lexer/ok/0006_chars.txt index 950954fbc..950954fbc 100644 --- a/crates/ra_syntax/test_data/lexer/0006_chars.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0006_chars.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/0007_lifetimes.rs b/crates/ra_syntax/test_data/lexer/ok/0007_lifetimes.rs index b764f1dce..b764f1dce 100644 --- a/crates/ra_syntax/test_data/lexer/0007_lifetimes.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0007_lifetimes.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0007_lifetimes.txt b/crates/ra_syntax/test_data/lexer/ok/0007_lifetimes.txt index 005c29100..005c29100 100644 --- a/crates/ra_syntax/test_data/lexer/0007_lifetimes.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0007_lifetimes.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/0008_byte_strings.rs b/crates/ra_syntax/test_data/lexer/ok/0008_byte_strings.rs index b54930f5e..b54930f5e 100644 --- a/crates/ra_syntax/test_data/lexer/0008_byte_strings.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0008_byte_strings.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0008_byte_strings.txt b/crates/ra_syntax/test_data/lexer/ok/0008_byte_strings.txt index bc03b51a8..bc03b51a8 100644 --- a/crates/ra_syntax/test_data/lexer/0008_byte_strings.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0008_byte_strings.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/0009_strings.rs b/crates/ra_syntax/test_data/lexer/ok/0009_strings.rs index 4ddb5bffc..4ddb5bffc 100644 --- a/crates/ra_syntax/test_data/lexer/0009_strings.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0009_strings.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0009_strings.txt b/crates/ra_syntax/test_data/lexer/ok/0009_strings.txt index 4cb4d711d..4cb4d711d 100644 --- a/crates/ra_syntax/test_data/lexer/0009_strings.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0009_strings.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/ok/0010_single_line_comments.rs b/crates/ra_syntax/test_data/lexer/ok/0010_single_line_comments.rs new file mode 100644 index 000000000..4b6653f9c --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/ok/0010_single_line_comments.rs | |||
@@ -0,0 +1,12 @@ | |||
1 | #!/usr/bin/env bash | ||
2 | // hello | ||
3 | //! World | ||
4 | //!! Inner line doc | ||
5 | /// Outer line doc | ||
6 | //// Just a comment | ||
7 | |||
8 | // | ||
9 | //! | ||
10 | //!! | ||
11 | /// | ||
12 | //// | ||
diff --git a/crates/ra_syntax/test_data/lexer/ok/0010_single_line_comments.txt b/crates/ra_syntax/test_data/lexer/ok/0010_single_line_comments.txt new file mode 100644 index 000000000..98a3818c0 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/ok/0010_single_line_comments.txt | |||
@@ -0,0 +1,22 @@ | |||
1 | SHEBANG 19 "#!/usr/bin/env bash" | ||
2 | WHITESPACE 1 "\n" | ||
3 | COMMENT 8 "// hello" | ||
4 | WHITESPACE 1 "\n" | ||
5 | COMMENT 9 "//! World" | ||
6 | WHITESPACE 1 "\n" | ||
7 | COMMENT 19 "//!! Inner line doc" | ||
8 | WHITESPACE 1 "\n" | ||
9 | COMMENT 18 "/// Outer line doc" | ||
10 | WHITESPACE 1 "\n" | ||
11 | COMMENT 19 "//// Just a comment" | ||
12 | WHITESPACE 2 "\n\n" | ||
13 | COMMENT 2 "//" | ||
14 | WHITESPACE 1 "\n" | ||
15 | COMMENT 3 "//!" | ||
16 | WHITESPACE 1 "\n" | ||
17 | COMMENT 4 "//!!" | ||
18 | WHITESPACE 1 "\n" | ||
19 | COMMENT 3 "///" | ||
20 | WHITESPACE 1 "\n" | ||
21 | COMMENT 4 "////" | ||
22 | WHITESPACE 1 "\n" | ||
diff --git a/crates/ra_syntax/test_data/lexer/0011_keywords.rs b/crates/ra_syntax/test_data/lexer/ok/0011_keywords.rs index 1e91bff4e..1e91bff4e 100644 --- a/crates/ra_syntax/test_data/lexer/0011_keywords.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0011_keywords.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0011_keywords.txt b/crates/ra_syntax/test_data/lexer/ok/0011_keywords.txt index 22c00eefb..22c00eefb 100644 --- a/crates/ra_syntax/test_data/lexer/0011_keywords.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0011_keywords.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/00012_block_comment.rs b/crates/ra_syntax/test_data/lexer/ok/0012_block_comment.rs index 708aac197..b880a59d9 100644 --- a/crates/ra_syntax/test_data/lexer/00012_block_comment.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0012_block_comment.rs | |||
@@ -1,4 +1,3 @@ | |||
1 | /* */ | 1 | /* */ |
2 | /**/ | 2 | /**/ |
3 | /* /* */ */ | 3 | /* /* */ */ |
4 | /* | ||
diff --git a/crates/ra_syntax/test_data/lexer/00012_block_comment.txt b/crates/ra_syntax/test_data/lexer/ok/0012_block_comment.txt index 9958b2518..2618e287e 100644 --- a/crates/ra_syntax/test_data/lexer/00012_block_comment.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0012_block_comment.txt | |||
@@ -4,4 +4,3 @@ COMMENT 4 "/**/" | |||
4 | WHITESPACE 1 "\n" | 4 | WHITESPACE 1 "\n" |
5 | COMMENT 11 "/* /* */ */" | 5 | COMMENT 11 "/* /* */ */" |
6 | WHITESPACE 1 "\n" | 6 | WHITESPACE 1 "\n" |
7 | COMMENT 3 "/*\n" | ||
diff --git a/crates/ra_syntax/test_data/lexer/0013_raw_strings.rs b/crates/ra_syntax/test_data/lexer/ok/0013_raw_strings.rs index e5ed0b693..e5ed0b693 100644 --- a/crates/ra_syntax/test_data/lexer/0013_raw_strings.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0013_raw_strings.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0013_raw_strings.txt b/crates/ra_syntax/test_data/lexer/ok/0013_raw_strings.txt index 9cf0957d1..9cf0957d1 100644 --- a/crates/ra_syntax/test_data/lexer/0013_raw_strings.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0013_raw_strings.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/0016_raw_ident.rs b/crates/ra_syntax/test_data/lexer/ok/0014_raw_ident.rs index b40a1b6a2..b40a1b6a2 100644 --- a/crates/ra_syntax/test_data/lexer/0016_raw_ident.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0014_raw_ident.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0016_raw_ident.txt b/crates/ra_syntax/test_data/lexer/ok/0014_raw_ident.txt index 484689693..484689693 100644 --- a/crates/ra_syntax/test_data/lexer/0016_raw_ident.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0014_raw_ident.txt | |||
diff --git a/crates/ra_syntax/test_data/parser/inline/err/0010_wrong_order_fns.rs b/crates/ra_syntax/test_data/parser/inline/err/0010_wrong_order_fns.rs index 16edee95d..731e58013 100644 --- a/crates/ra_syntax/test_data/parser/inline/err/0010_wrong_order_fns.rs +++ b/crates/ra_syntax/test_data/parser/inline/err/0010_wrong_order_fns.rs | |||
@@ -1,2 +1,2 @@ | |||
1 | async unsafe fn foo() {} | 1 | unsafe async fn foo() {} |
2 | unsafe const fn bar() {} | 2 | unsafe const fn bar() {} |
diff --git a/crates/ra_syntax/test_data/parser/inline/err/0010_wrong_order_fns.txt b/crates/ra_syntax/test_data/parser/inline/err/0010_wrong_order_fns.txt index 2ea6a566d..289193b9e 100644 --- a/crates/ra_syntax/test_data/parser/inline/err/0010_wrong_order_fns.txt +++ b/crates/ra_syntax/test_data/parser/inline/err/0010_wrong_order_fns.txt | |||
@@ -1,9 +1,9 @@ | |||
1 | SOURCE_FILE@[0; 50) | 1 | SOURCE_FILE@[0; 50) |
2 | ERROR@[0; 5) | 2 | ERROR@[0; 6) |
3 | ASYNC_KW@[0; 5) "async" | 3 | UNSAFE_KW@[0; 6) "unsafe" |
4 | WHITESPACE@[5; 6) " " | 4 | WHITESPACE@[6; 7) " " |
5 | FN_DEF@[6; 24) | 5 | FN_DEF@[7; 24) |
6 | UNSAFE_KW@[6; 12) "unsafe" | 6 | ASYNC_KW@[7; 12) "async" |
7 | WHITESPACE@[12; 13) " " | 7 | WHITESPACE@[12; 13) " " |
8 | FN_KW@[13; 15) "fn" | 8 | FN_KW@[13; 15) "fn" |
9 | WHITESPACE@[15; 16) " " | 9 | WHITESPACE@[15; 16) " " |
@@ -37,5 +37,5 @@ SOURCE_FILE@[0; 50) | |||
37 | L_CURLY@[47; 48) "{" | 37 | L_CURLY@[47; 48) "{" |
38 | R_CURLY@[48; 49) "}" | 38 | R_CURLY@[48; 49) "}" |
39 | WHITESPACE@[49; 50) "\n" | 39 | WHITESPACE@[49; 50) "\n" |
40 | error 5: expected existential, fn, trait or impl | 40 | error 6: expected existential, fn, trait or impl |
41 | error 31: expected existential, fn, trait or impl | 41 | error 31: expected existential, fn, trait or impl |
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs b/crates/ra_syntax/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs index d8c23c76a..93636e926 100644 --- a/crates/ra_syntax/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs +++ b/crates/ra_syntax/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs | |||
@@ -1 +1 @@ | |||
type F = Box<Fn(a: i32, &b: &i32, &mut c: &i32, ())>; | type F = Box<Fn(i32, &i32, &i32, ())>; | ||
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0004_value_parameters_no_patterns.txt b/crates/ra_syntax/test_data/parser/inline/ok/0004_value_parameters_no_patterns.txt index 8cfba8420..9241f6fb2 100644 --- a/crates/ra_syntax/test_data/parser/inline/ok/0004_value_parameters_no_patterns.txt +++ b/crates/ra_syntax/test_data/parser/inline/ok/0004_value_parameters_no_patterns.txt | |||
@@ -1,5 +1,5 @@ | |||
1 | SOURCE_FILE@[0; 54) | 1 | SOURCE_FILE@[0; 39) |
2 | TYPE_ALIAS_DEF@[0; 53) | 2 | TYPE_ALIAS_DEF@[0; 38) |
3 | TYPE_KW@[0; 4) "type" | 3 | TYPE_KW@[0; 4) "type" |
4 | WHITESPACE@[4; 5) " " | 4 | WHITESPACE@[4; 5) " " |
5 | NAME@[5; 6) | 5 | NAME@[5; 6) |
@@ -7,75 +7,54 @@ SOURCE_FILE@[0; 54) | |||
7 | WHITESPACE@[6; 7) " " | 7 | WHITESPACE@[6; 7) " " |
8 | EQ@[7; 8) "=" | 8 | EQ@[7; 8) "=" |
9 | WHITESPACE@[8; 9) " " | 9 | WHITESPACE@[8; 9) " " |
10 | PATH_TYPE@[9; 52) | 10 | PATH_TYPE@[9; 37) |
11 | PATH@[9; 52) | 11 | PATH@[9; 37) |
12 | PATH_SEGMENT@[9; 52) | 12 | PATH_SEGMENT@[9; 37) |
13 | NAME_REF@[9; 12) | 13 | NAME_REF@[9; 12) |
14 | IDENT@[9; 12) "Box" | 14 | IDENT@[9; 12) "Box" |
15 | TYPE_ARG_LIST@[12; 52) | 15 | TYPE_ARG_LIST@[12; 37) |
16 | L_ANGLE@[12; 13) "<" | 16 | L_ANGLE@[12; 13) "<" |
17 | TYPE_ARG@[13; 51) | 17 | TYPE_ARG@[13; 36) |
18 | PATH_TYPE@[13; 51) | 18 | PATH_TYPE@[13; 36) |
19 | PATH@[13; 51) | 19 | PATH@[13; 36) |
20 | PATH_SEGMENT@[13; 51) | 20 | PATH_SEGMENT@[13; 36) |
21 | NAME_REF@[13; 15) | 21 | NAME_REF@[13; 15) |
22 | IDENT@[13; 15) "Fn" | 22 | IDENT@[13; 15) "Fn" |
23 | PARAM_LIST@[15; 51) | 23 | PARAM_LIST@[15; 36) |
24 | L_PAREN@[15; 16) "(" | 24 | L_PAREN@[15; 16) "(" |
25 | PARAM@[16; 22) | 25 | PARAM@[16; 19) |
26 | BIND_PAT@[16; 17) | 26 | PATH_TYPE@[16; 19) |
27 | NAME@[16; 17) | 27 | PATH@[16; 19) |
28 | IDENT@[16; 17) "a" | 28 | PATH_SEGMENT@[16; 19) |
29 | COLON@[17; 18) ":" | 29 | NAME_REF@[16; 19) |
30 | WHITESPACE@[18; 19) " " | 30 | IDENT@[16; 19) "i32" |
31 | PATH_TYPE@[19; 22) | 31 | COMMA@[19; 20) "," |
32 | PATH@[19; 22) | 32 | WHITESPACE@[20; 21) " " |
33 | PATH_SEGMENT@[19; 22) | 33 | PARAM@[21; 25) |
34 | NAME_REF@[19; 22) | 34 | REFERENCE_TYPE@[21; 25) |
35 | IDENT@[19; 22) "i32" | 35 | AMP@[21; 22) "&" |
36 | COMMA@[22; 23) "," | 36 | PATH_TYPE@[22; 25) |
37 | WHITESPACE@[23; 24) " " | 37 | PATH@[22; 25) |
38 | PARAM@[24; 32) | 38 | PATH_SEGMENT@[22; 25) |
39 | REF_PAT@[24; 26) | 39 | NAME_REF@[22; 25) |
40 | AMP@[24; 25) "&" | 40 | IDENT@[22; 25) "i32" |
41 | BIND_PAT@[25; 26) | 41 | COMMA@[25; 26) "," |
42 | NAME@[25; 26) | 42 | WHITESPACE@[26; 27) " " |
43 | IDENT@[25; 26) "b" | 43 | PARAM@[27; 31) |
44 | COLON@[26; 27) ":" | 44 | REFERENCE_TYPE@[27; 31) |
45 | WHITESPACE@[27; 28) " " | 45 | AMP@[27; 28) "&" |
46 | REFERENCE_TYPE@[28; 32) | 46 | PATH_TYPE@[28; 31) |
47 | AMP@[28; 29) "&" | 47 | PATH@[28; 31) |
48 | PATH_TYPE@[29; 32) | 48 | PATH_SEGMENT@[28; 31) |
49 | PATH@[29; 32) | 49 | NAME_REF@[28; 31) |
50 | PATH_SEGMENT@[29; 32) | 50 | IDENT@[28; 31) "i32" |
51 | NAME_REF@[29; 32) | 51 | COMMA@[31; 32) "," |
52 | IDENT@[29; 32) "i32" | 52 | WHITESPACE@[32; 33) " " |
53 | COMMA@[32; 33) "," | 53 | PARAM@[33; 35) |
54 | WHITESPACE@[33; 34) " " | 54 | TUPLE_TYPE@[33; 35) |
55 | PARAM@[34; 46) | 55 | L_PAREN@[33; 34) "(" |
56 | REF_PAT@[34; 40) | 56 | R_PAREN@[34; 35) ")" |
57 | AMP@[34; 35) "&" | 57 | R_PAREN@[35; 36) ")" |
58 | MUT_KW@[35; 38) "mut" | 58 | R_ANGLE@[36; 37) ">" |
59 | WHITESPACE@[38; 39) " " | 59 | SEMI@[37; 38) ";" |
60 | BIND_PAT@[39; 40) | 60 | WHITESPACE@[38; 39) "\n" |
61 | NAME@[39; 40) | ||
62 | IDENT@[39; 40) "c" | ||
63 | COLON@[40; 41) ":" | ||
64 | WHITESPACE@[41; 42) " " | ||
65 | REFERENCE_TYPE@[42; 46) | ||
66 | AMP@[42; 43) "&" | ||
67 | PATH_TYPE@[43; 46) | ||
68 | PATH@[43; 46) | ||
69 | PATH_SEGMENT@[43; 46) | ||
70 | NAME_REF@[43; 46) | ||
71 | IDENT@[43; 46) "i32" | ||
72 | COMMA@[46; 47) "," | ||
73 | WHITESPACE@[47; 48) " " | ||
74 | PARAM@[48; 50) | ||
75 | TUPLE_TYPE@[48; 50) | ||
76 | L_PAREN@[48; 49) "(" | ||
77 | R_PAREN@[49; 50) ")" | ||
78 | R_PAREN@[50; 51) ")" | ||
79 | R_ANGLE@[51; 52) ">" | ||
80 | SEMI@[52; 53) ";" | ||
81 | WHITESPACE@[53; 54) "\n" | ||
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0104_path_fn_trait_args.rs b/crates/ra_syntax/test_data/parser/inline/ok/0104_path_fn_trait_args.rs index aef45e561..17ed20e5b 100644 --- a/crates/ra_syntax/test_data/parser/inline/ok/0104_path_fn_trait_args.rs +++ b/crates/ra_syntax/test_data/parser/inline/ok/0104_path_fn_trait_args.rs | |||
@@ -1 +1 @@ | |||
type F = Box<Fn(x: i32) -> ()>; | type F = Box<Fn(i32) -> ()>; | ||
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0104_path_fn_trait_args.txt b/crates/ra_syntax/test_data/parser/inline/ok/0104_path_fn_trait_args.txt index d6f196811..a983d5954 100644 --- a/crates/ra_syntax/test_data/parser/inline/ok/0104_path_fn_trait_args.txt +++ b/crates/ra_syntax/test_data/parser/inline/ok/0104_path_fn_trait_args.txt | |||
@@ -1,5 +1,5 @@ | |||
1 | SOURCE_FILE@[0; 32) | 1 | SOURCE_FILE@[0; 29) |
2 | TYPE_ALIAS_DEF@[0; 31) | 2 | TYPE_ALIAS_DEF@[0; 28) |
3 | TYPE_KW@[0; 4) "type" | 3 | TYPE_KW@[0; 4) "type" |
4 | WHITESPACE@[4; 5) " " | 4 | WHITESPACE@[4; 5) " " |
5 | NAME@[5; 6) | 5 | NAME@[5; 6) |
@@ -7,40 +7,35 @@ SOURCE_FILE@[0; 32) | |||
7 | WHITESPACE@[6; 7) " " | 7 | WHITESPACE@[6; 7) " " |
8 | EQ@[7; 8) "=" | 8 | EQ@[7; 8) "=" |
9 | WHITESPACE@[8; 9) " " | 9 | WHITESPACE@[8; 9) " " |
10 | PATH_TYPE@[9; 30) | 10 | PATH_TYPE@[9; 27) |
11 | PATH@[9; 30) | 11 | PATH@[9; 27) |
12 | PATH_SEGMENT@[9; 30) | 12 | PATH_SEGMENT@[9; 27) |
13 | NAME_REF@[9; 12) | 13 | NAME_REF@[9; 12) |
14 | IDENT@[9; 12) "Box" | 14 | IDENT@[9; 12) "Box" |
15 | TYPE_ARG_LIST@[12; 30) | 15 | TYPE_ARG_LIST@[12; 27) |
16 | L_ANGLE@[12; 13) "<" | 16 | L_ANGLE@[12; 13) "<" |
17 | TYPE_ARG@[13; 29) | 17 | TYPE_ARG@[13; 26) |
18 | PATH_TYPE@[13; 29) | 18 | PATH_TYPE@[13; 26) |
19 | PATH@[13; 29) | 19 | PATH@[13; 26) |
20 | PATH_SEGMENT@[13; 29) | 20 | PATH_SEGMENT@[13; 26) |
21 | NAME_REF@[13; 15) | 21 | NAME_REF@[13; 15) |
22 | IDENT@[13; 15) "Fn" | 22 | IDENT@[13; 15) "Fn" |
23 | PARAM_LIST@[15; 23) | 23 | PARAM_LIST@[15; 20) |
24 | L_PAREN@[15; 16) "(" | 24 | L_PAREN@[15; 16) "(" |
25 | PARAM@[16; 22) | 25 | PARAM@[16; 19) |
26 | BIND_PAT@[16; 17) | 26 | PATH_TYPE@[16; 19) |
27 | NAME@[16; 17) | 27 | PATH@[16; 19) |
28 | IDENT@[16; 17) "x" | 28 | PATH_SEGMENT@[16; 19) |
29 | COLON@[17; 18) ":" | 29 | NAME_REF@[16; 19) |
30 | WHITESPACE@[18; 19) " " | 30 | IDENT@[16; 19) "i32" |
31 | PATH_TYPE@[19; 22) | 31 | R_PAREN@[19; 20) ")" |
32 | PATH@[19; 22) | 32 | WHITESPACE@[20; 21) " " |
33 | PATH_SEGMENT@[19; 22) | 33 | RET_TYPE@[21; 26) |
34 | NAME_REF@[19; 22) | 34 | THIN_ARROW@[21; 23) "->" |
35 | IDENT@[19; 22) "i32" | 35 | WHITESPACE@[23; 24) " " |
36 | R_PAREN@[22; 23) ")" | 36 | TUPLE_TYPE@[24; 26) |
37 | WHITESPACE@[23; 24) " " | 37 | L_PAREN@[24; 25) "(" |
38 | RET_TYPE@[24; 29) | 38 | R_PAREN@[25; 26) ")" |
39 | THIN_ARROW@[24; 26) "->" | 39 | R_ANGLE@[26; 27) ">" |
40 | WHITESPACE@[26; 27) " " | 40 | SEMI@[27; 28) ";" |
41 | TUPLE_TYPE@[27; 29) | 41 | WHITESPACE@[28; 29) "\n" |
42 | L_PAREN@[27; 28) "(" | ||
43 | R_PAREN@[28; 29) ")" | ||
44 | R_ANGLE@[29; 30) ">" | ||
45 | SEMI@[30; 31) ";" | ||
46 | WHITESPACE@[31; 32) "\n" | ||
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0128_combined_fns.rs b/crates/ra_syntax/test_data/parser/inline/ok/0128_combined_fns.rs index 46af91b82..126287145 100644 --- a/crates/ra_syntax/test_data/parser/inline/ok/0128_combined_fns.rs +++ b/crates/ra_syntax/test_data/parser/inline/ok/0128_combined_fns.rs | |||
@@ -1,2 +1,2 @@ | |||
1 | unsafe async fn foo() {} | 1 | async unsafe fn foo() {} |
2 | const unsafe fn bar() {} | 2 | const unsafe fn bar() {} |
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0128_combined_fns.txt b/crates/ra_syntax/test_data/parser/inline/ok/0128_combined_fns.txt index cae75c41d..8a972cdb2 100644 --- a/crates/ra_syntax/test_data/parser/inline/ok/0128_combined_fns.txt +++ b/crates/ra_syntax/test_data/parser/inline/ok/0128_combined_fns.txt | |||
@@ -1,8 +1,8 @@ | |||
1 | SOURCE_FILE@[0; 50) | 1 | SOURCE_FILE@[0; 50) |
2 | FN_DEF@[0; 24) | 2 | FN_DEF@[0; 24) |
3 | UNSAFE_KW@[0; 6) "unsafe" | 3 | ASYNC_KW@[0; 5) "async" |
4 | WHITESPACE@[6; 7) " " | 4 | WHITESPACE@[5; 6) " " |
5 | ASYNC_KW@[7; 12) "async" | 5 | UNSAFE_KW@[6; 12) "unsafe" |
6 | WHITESPACE@[12; 13) " " | 6 | WHITESPACE@[12; 13) " " |
7 | FN_KW@[13; 15) "fn" | 7 | FN_KW@[13; 15) "fn" |
8 | WHITESPACE@[15; 16) " " | 8 | WHITESPACE@[15; 16) " " |
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0152_fn_patterns.rs b/crates/ra_syntax/test_data/parser/inline/ok/0152_fn_patterns.rs new file mode 100644 index 000000000..b49e872d7 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/inline/ok/0152_fn_patterns.rs | |||
@@ -0,0 +1,6 @@ | |||
1 | impl U { | ||
2 | fn f1((a, b): (usize, usize)) {} | ||
3 | fn f2(S { a, b }: S) {} | ||
4 | fn f3(NewType(a): NewType) {} | ||
5 | fn f4(&&a: &&usize) {} | ||
6 | } | ||
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0152_fn_patterns.txt b/crates/ra_syntax/test_data/parser/inline/ok/0152_fn_patterns.txt new file mode 100644 index 000000000..933f5b7bd --- /dev/null +++ b/crates/ra_syntax/test_data/parser/inline/ok/0152_fn_patterns.txt | |||
@@ -0,0 +1,164 @@ | |||
1 | SOURCE_FILE@[0; 137) | ||
2 | IMPL_BLOCK@[0; 136) | ||
3 | IMPL_KW@[0; 4) "impl" | ||
4 | WHITESPACE@[4; 5) " " | ||
5 | PATH_TYPE@[5; 6) | ||
6 | PATH@[5; 6) | ||
7 | PATH_SEGMENT@[5; 6) | ||
8 | NAME_REF@[5; 6) | ||
9 | IDENT@[5; 6) "U" | ||
10 | WHITESPACE@[6; 7) " " | ||
11 | ITEM_LIST@[7; 136) | ||
12 | L_CURLY@[7; 8) "{" | ||
13 | WHITESPACE@[8; 13) "\n " | ||
14 | FN_DEF@[13; 45) | ||
15 | FN_KW@[13; 15) "fn" | ||
16 | WHITESPACE@[15; 16) " " | ||
17 | NAME@[16; 18) | ||
18 | IDENT@[16; 18) "f1" | ||
19 | PARAM_LIST@[18; 42) | ||
20 | L_PAREN@[18; 19) "(" | ||
21 | PARAM@[19; 41) | ||
22 | TUPLE_PAT@[19; 25) | ||
23 | L_PAREN@[19; 20) "(" | ||
24 | BIND_PAT@[20; 21) | ||
25 | NAME@[20; 21) | ||
26 | IDENT@[20; 21) "a" | ||
27 | COMMA@[21; 22) "," | ||
28 | WHITESPACE@[22; 23) " " | ||
29 | BIND_PAT@[23; 24) | ||
30 | NAME@[23; 24) | ||
31 | IDENT@[23; 24) "b" | ||
32 | R_PAREN@[24; 25) ")" | ||
33 | COLON@[25; 26) ":" | ||
34 | WHITESPACE@[26; 27) " " | ||
35 | TUPLE_TYPE@[27; 41) | ||
36 | L_PAREN@[27; 28) "(" | ||
37 | PATH_TYPE@[28; 33) | ||
38 | PATH@[28; 33) | ||
39 | PATH_SEGMENT@[28; 33) | ||
40 | NAME_REF@[28; 33) | ||
41 | IDENT@[28; 33) "usize" | ||
42 | COMMA@[33; 34) "," | ||
43 | WHITESPACE@[34; 35) " " | ||
44 | PATH_TYPE@[35; 40) | ||
45 | PATH@[35; 40) | ||
46 | PATH_SEGMENT@[35; 40) | ||
47 | NAME_REF@[35; 40) | ||
48 | IDENT@[35; 40) "usize" | ||
49 | R_PAREN@[40; 41) ")" | ||
50 | R_PAREN@[41; 42) ")" | ||
51 | WHITESPACE@[42; 43) " " | ||
52 | BLOCK_EXPR@[43; 45) | ||
53 | BLOCK@[43; 45) | ||
54 | L_CURLY@[43; 44) "{" | ||
55 | R_CURLY@[44; 45) "}" | ||
56 | WHITESPACE@[45; 50) "\n " | ||
57 | FN_DEF@[50; 73) | ||
58 | FN_KW@[50; 52) "fn" | ||
59 | WHITESPACE@[52; 53) " " | ||
60 | NAME@[53; 55) | ||
61 | IDENT@[53; 55) "f2" | ||
62 | PARAM_LIST@[55; 70) | ||
63 | L_PAREN@[55; 56) "(" | ||
64 | PARAM@[56; 69) | ||
65 | RECORD_PAT@[56; 66) | ||
66 | PATH@[56; 57) | ||
67 | PATH_SEGMENT@[56; 57) | ||
68 | NAME_REF@[56; 57) | ||
69 | IDENT@[56; 57) "S" | ||
70 | WHITESPACE@[57; 58) " " | ||
71 | RECORD_FIELD_PAT_LIST@[58; 66) | ||
72 | L_CURLY@[58; 59) "{" | ||
73 | WHITESPACE@[59; 60) " " | ||
74 | BIND_PAT@[60; 61) | ||
75 | NAME@[60; 61) | ||
76 | IDENT@[60; 61) "a" | ||
77 | COMMA@[61; 62) "," | ||
78 | WHITESPACE@[62; 63) " " | ||
79 | BIND_PAT@[63; 64) | ||
80 | NAME@[63; 64) | ||
81 | IDENT@[63; 64) "b" | ||
82 | WHITESPACE@[64; 65) " " | ||
83 | R_CURLY@[65; 66) "}" | ||
84 | COLON@[66; 67) ":" | ||
85 | WHITESPACE@[67; 68) " " | ||
86 | PATH_TYPE@[68; 69) | ||
87 | PATH@[68; 69) | ||
88 | PATH_SEGMENT@[68; 69) | ||
89 | NAME_REF@[68; 69) | ||
90 | IDENT@[68; 69) "S" | ||
91 | R_PAREN@[69; 70) ")" | ||
92 | WHITESPACE@[70; 71) " " | ||
93 | BLOCK_EXPR@[71; 73) | ||
94 | BLOCK@[71; 73) | ||
95 | L_CURLY@[71; 72) "{" | ||
96 | R_CURLY@[72; 73) "}" | ||
97 | WHITESPACE@[73; 78) "\n " | ||
98 | FN_DEF@[78; 107) | ||
99 | FN_KW@[78; 80) "fn" | ||
100 | WHITESPACE@[80; 81) " " | ||
101 | NAME@[81; 83) | ||
102 | IDENT@[81; 83) "f3" | ||
103 | PARAM_LIST@[83; 104) | ||
104 | L_PAREN@[83; 84) "(" | ||
105 | PARAM@[84; 103) | ||
106 | TUPLE_STRUCT_PAT@[84; 94) | ||
107 | PATH@[84; 91) | ||
108 | PATH_SEGMENT@[84; 91) | ||
109 | NAME_REF@[84; 91) | ||
110 | IDENT@[84; 91) "NewType" | ||
111 | L_PAREN@[91; 92) "(" | ||
112 | BIND_PAT@[92; 93) | ||
113 | NAME@[92; 93) | ||
114 | IDENT@[92; 93) "a" | ||
115 | R_PAREN@[93; 94) ")" | ||
116 | COLON@[94; 95) ":" | ||
117 | WHITESPACE@[95; 96) " " | ||
118 | PATH_TYPE@[96; 103) | ||
119 | PATH@[96; 103) | ||
120 | PATH_SEGMENT@[96; 103) | ||
121 | NAME_REF@[96; 103) | ||
122 | IDENT@[96; 103) "NewType" | ||
123 | R_PAREN@[103; 104) ")" | ||
124 | WHITESPACE@[104; 105) " " | ||
125 | BLOCK_EXPR@[105; 107) | ||
126 | BLOCK@[105; 107) | ||
127 | L_CURLY@[105; 106) "{" | ||
128 | R_CURLY@[106; 107) "}" | ||
129 | WHITESPACE@[107; 112) "\n " | ||
130 | FN_DEF@[112; 134) | ||
131 | FN_KW@[112; 114) "fn" | ||
132 | WHITESPACE@[114; 115) " " | ||
133 | NAME@[115; 117) | ||
134 | IDENT@[115; 117) "f4" | ||
135 | PARAM_LIST@[117; 131) | ||
136 | L_PAREN@[117; 118) "(" | ||
137 | PARAM@[118; 130) | ||
138 | REF_PAT@[118; 121) | ||
139 | AMP@[118; 119) "&" | ||
140 | REF_PAT@[119; 121) | ||
141 | AMP@[119; 120) "&" | ||
142 | BIND_PAT@[120; 121) | ||
143 | NAME@[120; 121) | ||
144 | IDENT@[120; 121) "a" | ||
145 | COLON@[121; 122) ":" | ||
146 | WHITESPACE@[122; 123) " " | ||
147 | REFERENCE_TYPE@[123; 130) | ||
148 | AMP@[123; 124) "&" | ||
149 | REFERENCE_TYPE@[124; 130) | ||
150 | AMP@[124; 125) "&" | ||
151 | PATH_TYPE@[125; 130) | ||
152 | PATH@[125; 130) | ||
153 | PATH_SEGMENT@[125; 130) | ||
154 | NAME_REF@[125; 130) | ||
155 | IDENT@[125; 130) "usize" | ||
156 | R_PAREN@[130; 131) ")" | ||
157 | WHITESPACE@[131; 132) " " | ||
158 | BLOCK_EXPR@[132; 134) | ||
159 | BLOCK@[132; 134) | ||
160 | L_CURLY@[132; 133) "{" | ||
161 | R_CURLY@[133; 134) "}" | ||
162 | WHITESPACE@[134; 135) "\n" | ||
163 | R_CURLY@[135; 136) "}" | ||
164 | WHITESPACE@[136; 137) "\n" | ||
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0153_trait_fn_patterns.rs b/crates/ra_syntax/test_data/parser/inline/ok/0153_trait_fn_patterns.rs new file mode 100644 index 000000000..a94bf378a --- /dev/null +++ b/crates/ra_syntax/test_data/parser/inline/ok/0153_trait_fn_patterns.rs | |||
@@ -0,0 +1,6 @@ | |||
1 | trait T { | ||
2 | fn f1((a, b): (usize, usize)) {} | ||
3 | fn f2(S { a, b }: S) {} | ||
4 | fn f3(NewType(a): NewType) {} | ||
5 | fn f4(&&a: &&usize) {} | ||
6 | } | ||
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0153_trait_fn_patterns.txt b/crates/ra_syntax/test_data/parser/inline/ok/0153_trait_fn_patterns.txt new file mode 100644 index 000000000..b22df8dbe --- /dev/null +++ b/crates/ra_syntax/test_data/parser/inline/ok/0153_trait_fn_patterns.txt | |||
@@ -0,0 +1,161 @@ | |||
1 | SOURCE_FILE@[0; 138) | ||
2 | TRAIT_DEF@[0; 137) | ||
3 | TRAIT_KW@[0; 5) "trait" | ||
4 | WHITESPACE@[5; 6) " " | ||
5 | NAME@[6; 7) | ||
6 | IDENT@[6; 7) "T" | ||
7 | WHITESPACE@[7; 8) " " | ||
8 | ITEM_LIST@[8; 137) | ||
9 | L_CURLY@[8; 9) "{" | ||
10 | WHITESPACE@[9; 14) "\n " | ||
11 | FN_DEF@[14; 46) | ||
12 | FN_KW@[14; 16) "fn" | ||
13 | WHITESPACE@[16; 17) " " | ||
14 | NAME@[17; 19) | ||
15 | IDENT@[17; 19) "f1" | ||
16 | PARAM_LIST@[19; 43) | ||
17 | L_PAREN@[19; 20) "(" | ||
18 | PARAM@[20; 42) | ||
19 | TUPLE_PAT@[20; 26) | ||
20 | L_PAREN@[20; 21) "(" | ||
21 | BIND_PAT@[21; 22) | ||
22 | NAME@[21; 22) | ||
23 | IDENT@[21; 22) "a" | ||
24 | COMMA@[22; 23) "," | ||
25 | WHITESPACE@[23; 24) " " | ||
26 | BIND_PAT@[24; 25) | ||
27 | NAME@[24; 25) | ||
28 | IDENT@[24; 25) "b" | ||
29 | R_PAREN@[25; 26) ")" | ||
30 | COLON@[26; 27) ":" | ||
31 | WHITESPACE@[27; 28) " " | ||
32 | TUPLE_TYPE@[28; 42) | ||
33 | L_PAREN@[28; 29) "(" | ||
34 | PATH_TYPE@[29; 34) | ||
35 | PATH@[29; 34) | ||
36 | PATH_SEGMENT@[29; 34) | ||
37 | NAME_REF@[29; 34) | ||
38 | IDENT@[29; 34) "usize" | ||
39 | COMMA@[34; 35) "," | ||
40 | WHITESPACE@[35; 36) " " | ||
41 | PATH_TYPE@[36; 41) | ||
42 | PATH@[36; 41) | ||
43 | PATH_SEGMENT@[36; 41) | ||
44 | NAME_REF@[36; 41) | ||
45 | IDENT@[36; 41) "usize" | ||
46 | R_PAREN@[41; 42) ")" | ||
47 | R_PAREN@[42; 43) ")" | ||
48 | WHITESPACE@[43; 44) " " | ||
49 | BLOCK_EXPR@[44; 46) | ||
50 | BLOCK@[44; 46) | ||
51 | L_CURLY@[44; 45) "{" | ||
52 | R_CURLY@[45; 46) "}" | ||
53 | WHITESPACE@[46; 51) "\n " | ||
54 | FN_DEF@[51; 74) | ||
55 | FN_KW@[51; 53) "fn" | ||
56 | WHITESPACE@[53; 54) " " | ||
57 | NAME@[54; 56) | ||
58 | IDENT@[54; 56) "f2" | ||
59 | PARAM_LIST@[56; 71) | ||
60 | L_PAREN@[56; 57) "(" | ||
61 | PARAM@[57; 70) | ||
62 | RECORD_PAT@[57; 67) | ||
63 | PATH@[57; 58) | ||
64 | PATH_SEGMENT@[57; 58) | ||
65 | NAME_REF@[57; 58) | ||
66 | IDENT@[57; 58) "S" | ||
67 | WHITESPACE@[58; 59) " " | ||
68 | RECORD_FIELD_PAT_LIST@[59; 67) | ||
69 | L_CURLY@[59; 60) "{" | ||
70 | WHITESPACE@[60; 61) " " | ||
71 | BIND_PAT@[61; 62) | ||
72 | NAME@[61; 62) | ||
73 | IDENT@[61; 62) "a" | ||
74 | COMMA@[62; 63) "," | ||
75 | WHITESPACE@[63; 64) " " | ||
76 | BIND_PAT@[64; 65) | ||
77 | NAME@[64; 65) | ||
78 | IDENT@[64; 65) "b" | ||
79 | WHITESPACE@[65; 66) " " | ||
80 | R_CURLY@[66; 67) "}" | ||
81 | COLON@[67; 68) ":" | ||
82 | WHITESPACE@[68; 69) " " | ||
83 | PATH_TYPE@[69; 70) | ||
84 | PATH@[69; 70) | ||
85 | PATH_SEGMENT@[69; 70) | ||
86 | NAME_REF@[69; 70) | ||
87 | IDENT@[69; 70) "S" | ||
88 | R_PAREN@[70; 71) ")" | ||
89 | WHITESPACE@[71; 72) " " | ||
90 | BLOCK_EXPR@[72; 74) | ||
91 | BLOCK@[72; 74) | ||
92 | L_CURLY@[72; 73) "{" | ||
93 | R_CURLY@[73; 74) "}" | ||
94 | WHITESPACE@[74; 79) "\n " | ||
95 | FN_DEF@[79; 108) | ||
96 | FN_KW@[79; 81) "fn" | ||
97 | WHITESPACE@[81; 82) " " | ||
98 | NAME@[82; 84) | ||
99 | IDENT@[82; 84) "f3" | ||
100 | PARAM_LIST@[84; 105) | ||
101 | L_PAREN@[84; 85) "(" | ||
102 | PARAM@[85; 104) | ||
103 | TUPLE_STRUCT_PAT@[85; 95) | ||
104 | PATH@[85; 92) | ||
105 | PATH_SEGMENT@[85; 92) | ||
106 | NAME_REF@[85; 92) | ||
107 | IDENT@[85; 92) "NewType" | ||
108 | L_PAREN@[92; 93) "(" | ||
109 | BIND_PAT@[93; 94) | ||
110 | NAME@[93; 94) | ||
111 | IDENT@[93; 94) "a" | ||
112 | R_PAREN@[94; 95) ")" | ||
113 | COLON@[95; 96) ":" | ||
114 | WHITESPACE@[96; 97) " " | ||
115 | PATH_TYPE@[97; 104) | ||
116 | PATH@[97; 104) | ||
117 | PATH_SEGMENT@[97; 104) | ||
118 | NAME_REF@[97; 104) | ||
119 | IDENT@[97; 104) "NewType" | ||
120 | R_PAREN@[104; 105) ")" | ||
121 | WHITESPACE@[105; 106) " " | ||
122 | BLOCK_EXPR@[106; 108) | ||
123 | BLOCK@[106; 108) | ||
124 | L_CURLY@[106; 107) "{" | ||
125 | R_CURLY@[107; 108) "}" | ||
126 | WHITESPACE@[108; 113) "\n " | ||
127 | FN_DEF@[113; 135) | ||
128 | FN_KW@[113; 115) "fn" | ||
129 | WHITESPACE@[115; 116) " " | ||
130 | NAME@[116; 118) | ||
131 | IDENT@[116; 118) "f4" | ||
132 | PARAM_LIST@[118; 132) | ||
133 | L_PAREN@[118; 119) "(" | ||
134 | PARAM@[119; 131) | ||
135 | REF_PAT@[119; 122) | ||
136 | AMP@[119; 120) "&" | ||
137 | REF_PAT@[120; 122) | ||
138 | AMP@[120; 121) "&" | ||
139 | BIND_PAT@[121; 122) | ||
140 | NAME@[121; 122) | ||
141 | IDENT@[121; 122) "a" | ||
142 | COLON@[122; 123) ":" | ||
143 | WHITESPACE@[123; 124) " " | ||
144 | REFERENCE_TYPE@[124; 131) | ||
145 | AMP@[124; 125) "&" | ||
146 | REFERENCE_TYPE@[125; 131) | ||
147 | AMP@[125; 126) "&" | ||
148 | PATH_TYPE@[126; 131) | ||
149 | PATH@[126; 131) | ||
150 | PATH_SEGMENT@[126; 131) | ||
151 | NAME_REF@[126; 131) | ||
152 | IDENT@[126; 131) "usize" | ||
153 | R_PAREN@[131; 132) ")" | ||
154 | WHITESPACE@[132; 133) " " | ||
155 | BLOCK_EXPR@[133; 135) | ||
156 | BLOCK@[133; 135) | ||
157 | L_CURLY@[133; 134) "{" | ||
158 | R_CURLY@[134; 135) "}" | ||
159 | WHITESPACE@[135; 136) "\n" | ||
160 | R_CURLY@[136; 137) "}" | ||
161 | WHITESPACE@[137; 138) "\n" | ||
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rs b/crates/ra_syntax/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rs new file mode 100644 index 000000000..80a1701fd --- /dev/null +++ b/crates/ra_syntax/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rs | |||
@@ -0,0 +1,2 @@ | |||
1 | type Foo = fn(Bar::Baz); | ||
2 | type Qux = fn(baz: Bar::Baz); | ||
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.txt b/crates/ra_syntax/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.txt new file mode 100644 index 000000000..cb686854a --- /dev/null +++ b/crates/ra_syntax/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.txt | |||
@@ -0,0 +1,58 @@ | |||
1 | SOURCE_FILE@[0; 55) | ||
2 | TYPE_ALIAS_DEF@[0; 24) | ||
3 | TYPE_KW@[0; 4) "type" | ||
4 | WHITESPACE@[4; 5) " " | ||
5 | NAME@[5; 8) | ||
6 | IDENT@[5; 8) "Foo" | ||
7 | WHITESPACE@[8; 9) " " | ||
8 | EQ@[9; 10) "=" | ||
9 | WHITESPACE@[10; 11) " " | ||
10 | FN_POINTER_TYPE@[11; 23) | ||
11 | FN_KW@[11; 13) "fn" | ||
12 | PARAM_LIST@[13; 23) | ||
13 | L_PAREN@[13; 14) "(" | ||
14 | PARAM@[14; 22) | ||
15 | PATH_TYPE@[14; 22) | ||
16 | PATH@[14; 22) | ||
17 | PATH@[14; 17) | ||
18 | PATH_SEGMENT@[14; 17) | ||
19 | NAME_REF@[14; 17) | ||
20 | IDENT@[14; 17) "Bar" | ||
21 | COLONCOLON@[17; 19) "::" | ||
22 | PATH_SEGMENT@[19; 22) | ||
23 | NAME_REF@[19; 22) | ||
24 | IDENT@[19; 22) "Baz" | ||
25 | R_PAREN@[22; 23) ")" | ||
26 | SEMI@[23; 24) ";" | ||
27 | WHITESPACE@[24; 25) "\n" | ||
28 | TYPE_ALIAS_DEF@[25; 54) | ||
29 | TYPE_KW@[25; 29) "type" | ||
30 | WHITESPACE@[29; 30) " " | ||
31 | NAME@[30; 33) | ||
32 | IDENT@[30; 33) "Qux" | ||
33 | WHITESPACE@[33; 34) " " | ||
34 | EQ@[34; 35) "=" | ||
35 | WHITESPACE@[35; 36) " " | ||
36 | FN_POINTER_TYPE@[36; 53) | ||
37 | FN_KW@[36; 38) "fn" | ||
38 | PARAM_LIST@[38; 53) | ||
39 | L_PAREN@[38; 39) "(" | ||
40 | PARAM@[39; 52) | ||
41 | BIND_PAT@[39; 42) | ||
42 | NAME@[39; 42) | ||
43 | IDENT@[39; 42) "baz" | ||
44 | COLON@[42; 43) ":" | ||
45 | WHITESPACE@[43; 44) " " | ||
46 | PATH_TYPE@[44; 52) | ||
47 | PATH@[44; 52) | ||
48 | PATH@[44; 47) | ||
49 | PATH_SEGMENT@[44; 47) | ||
50 | NAME_REF@[44; 47) | ||
51 | IDENT@[44; 47) "Bar" | ||
52 | COLONCOLON@[47; 49) "::" | ||
53 | PATH_SEGMENT@[49; 52) | ||
54 | NAME_REF@[49; 52) | ||
55 | IDENT@[49; 52) "Baz" | ||
56 | R_PAREN@[52; 53) ")" | ||
57 | SEMI@[53; 54) ";" | ||
58 | WHITESPACE@[54; 55) "\n" | ||
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0155_closure_params.rs b/crates/ra_syntax/test_data/parser/inline/ok/0155_closure_params.rs new file mode 100644 index 000000000..6ca8dd2d6 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/inline/ok/0155_closure_params.rs | |||
@@ -0,0 +1,3 @@ | |||
1 | fn main() { | ||
2 | let foo = |bar, baz: Baz, qux: Qux::Quux| (); | ||
3 | } | ||
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0155_closure_params.txt b/crates/ra_syntax/test_data/parser/inline/ok/0155_closure_params.txt new file mode 100644 index 000000000..98727ae98 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/inline/ok/0155_closure_params.txt | |||
@@ -0,0 +1,70 @@ | |||
1 | SOURCE_FILE@[0; 63) | ||
2 | FN_DEF@[0; 62) | ||
3 | FN_KW@[0; 2) "fn" | ||
4 | WHITESPACE@[2; 3) " " | ||
5 | NAME@[3; 7) | ||
6 | IDENT@[3; 7) "main" | ||
7 | PARAM_LIST@[7; 9) | ||
8 | L_PAREN@[7; 8) "(" | ||
9 | R_PAREN@[8; 9) ")" | ||
10 | WHITESPACE@[9; 10) " " | ||
11 | BLOCK_EXPR@[10; 62) | ||
12 | BLOCK@[10; 62) | ||
13 | L_CURLY@[10; 11) "{" | ||
14 | WHITESPACE@[11; 15) "\n " | ||
15 | LET_STMT@[15; 60) | ||
16 | LET_KW@[15; 18) "let" | ||
17 | WHITESPACE@[18; 19) " " | ||
18 | BIND_PAT@[19; 22) | ||
19 | NAME@[19; 22) | ||
20 | IDENT@[19; 22) "foo" | ||
21 | WHITESPACE@[22; 23) " " | ||
22 | EQ@[23; 24) "=" | ||
23 | WHITESPACE@[24; 25) " " | ||
24 | LAMBDA_EXPR@[25; 59) | ||
25 | PARAM_LIST@[25; 56) | ||
26 | PIPE@[25; 26) "|" | ||
27 | PARAM@[26; 29) | ||
28 | BIND_PAT@[26; 29) | ||
29 | NAME@[26; 29) | ||
30 | IDENT@[26; 29) "bar" | ||
31 | COMMA@[29; 30) "," | ||
32 | WHITESPACE@[30; 31) " " | ||
33 | PARAM@[31; 39) | ||
34 | BIND_PAT@[31; 34) | ||
35 | NAME@[31; 34) | ||
36 | IDENT@[31; 34) "baz" | ||
37 | COLON@[34; 35) ":" | ||
38 | WHITESPACE@[35; 36) " " | ||
39 | PATH_TYPE@[36; 39) | ||
40 | PATH@[36; 39) | ||
41 | PATH_SEGMENT@[36; 39) | ||
42 | NAME_REF@[36; 39) | ||
43 | IDENT@[36; 39) "Baz" | ||
44 | COMMA@[39; 40) "," | ||
45 | WHITESPACE@[40; 41) " " | ||
46 | PARAM@[41; 55) | ||
47 | BIND_PAT@[41; 44) | ||
48 | NAME@[41; 44) | ||
49 | IDENT@[41; 44) "qux" | ||
50 | COLON@[44; 45) ":" | ||
51 | WHITESPACE@[45; 46) " " | ||
52 | PATH_TYPE@[46; 55) | ||
53 | PATH@[46; 55) | ||
54 | PATH@[46; 49) | ||
55 | PATH_SEGMENT@[46; 49) | ||
56 | NAME_REF@[46; 49) | ||
57 | IDENT@[46; 49) "Qux" | ||
58 | COLONCOLON@[49; 51) "::" | ||
59 | PATH_SEGMENT@[51; 55) | ||
60 | NAME_REF@[51; 55) | ||
61 | IDENT@[51; 55) "Quux" | ||
62 | PIPE@[55; 56) "|" | ||
63 | WHITESPACE@[56; 57) " " | ||
64 | TUPLE_EXPR@[57; 59) | ||
65 | L_PAREN@[57; 58) "(" | ||
66 | R_PAREN@[58; 59) ")" | ||
67 | SEMI@[59; 60) ";" | ||
68 | WHITESPACE@[60; 61) "\n" | ||
69 | R_CURLY@[61; 62) "}" | ||
70 | WHITESPACE@[62; 63) "\n" | ||
diff --git a/crates/ra_syntax/test_data/parser/ok/0030_traits.rs b/crates/ra_syntax/test_data/parser/ok/0030_traits.rs index 23c4be0e1..ac30843ef 100644 --- a/crates/ra_syntax/test_data/parser/ok/0030_traits.rs +++ b/crates/ra_syntax/test_data/parser/ok/0030_traits.rs | |||
@@ -1,7 +1,3 @@ | |||
1 | pub trait WriteMessage { | ||
2 | fn write_message(&FrontendMessage); | ||
3 | } | ||
4 | |||
5 | trait Runnable { | 1 | trait Runnable { |
6 | fn handler(); | 2 | fn handler(); |
7 | } | 3 | } |
diff --git a/crates/ra_syntax/test_data/parser/ok/0030_traits.txt b/crates/ra_syntax/test_data/parser/ok/0030_traits.txt index b656c1a81..ac314ae50 100644 --- a/crates/ra_syntax/test_data/parser/ok/0030_traits.txt +++ b/crates/ra_syntax/test_data/parser/ok/0030_traits.txt | |||
@@ -1,93 +1,61 @@ | |||
1 | SOURCE_FILE@[0; 164) | 1 | SOURCE_FILE@[0; 96) |
2 | TRAIT_DEF@[0; 66) | 2 | TRAIT_DEF@[0; 36) |
3 | VISIBILITY@[0; 3) | 3 | TRAIT_KW@[0; 5) "trait" |
4 | PUB_KW@[0; 3) "pub" | 4 | WHITESPACE@[5; 6) " " |
5 | WHITESPACE@[3; 4) " " | 5 | NAME@[6; 14) |
6 | TRAIT_KW@[4; 9) "trait" | 6 | IDENT@[6; 14) "Runnable" |
7 | WHITESPACE@[9; 10) " " | 7 | WHITESPACE@[14; 15) " " |
8 | NAME@[10; 22) | 8 | ITEM_LIST@[15; 36) |
9 | IDENT@[10; 22) "WriteMessage" | 9 | L_CURLY@[15; 16) "{" |
10 | WHITESPACE@[22; 23) " " | 10 | WHITESPACE@[16; 21) "\n " |
11 | ITEM_LIST@[23; 66) | 11 | FN_DEF@[21; 34) |
12 | L_CURLY@[23; 24) "{" | 12 | FN_KW@[21; 23) "fn" |
13 | WHITESPACE@[24; 29) "\n " | 13 | WHITESPACE@[23; 24) " " |
14 | FN_DEF@[29; 64) | 14 | NAME@[24; 31) |
15 | FN_KW@[29; 31) "fn" | 15 | IDENT@[24; 31) "handler" |
16 | WHITESPACE@[31; 32) " " | 16 | PARAM_LIST@[31; 33) |
17 | NAME@[32; 45) | 17 | L_PAREN@[31; 32) "(" |
18 | IDENT@[32; 45) "write_message" | 18 | R_PAREN@[32; 33) ")" |
19 | PARAM_LIST@[45; 63) | 19 | SEMI@[33; 34) ";" |
20 | L_PAREN@[45; 46) "(" | 20 | WHITESPACE@[34; 35) "\n" |
21 | PARAM@[46; 62) | 21 | R_CURLY@[35; 36) "}" |
22 | REFERENCE_TYPE@[46; 62) | 22 | WHITESPACE@[36; 38) "\n\n" |
23 | AMP@[46; 47) "&" | 23 | TRAIT_DEF@[38; 95) |
24 | PATH_TYPE@[47; 62) | 24 | TRAIT_KW@[38; 43) "trait" |
25 | PATH@[47; 62) | 25 | WHITESPACE@[43; 44) " " |
26 | PATH_SEGMENT@[47; 62) | 26 | NAME@[44; 57) |
27 | NAME_REF@[47; 62) | 27 | IDENT@[44; 57) "TraitWithExpr" |
28 | IDENT@[47; 62) "FrontendMessage" | 28 | WHITESPACE@[57; 58) " " |
29 | R_PAREN@[62; 63) ")" | 29 | ITEM_LIST@[58; 95) |
30 | SEMI@[63; 64) ";" | 30 | L_CURLY@[58; 59) "{" |
31 | WHITESPACE@[64; 65) "\n" | 31 | WHITESPACE@[59; 64) "\n " |
32 | R_CURLY@[65; 66) "}" | 32 | FN_DEF@[64; 93) |
33 | WHITESPACE@[66; 68) "\n\n" | 33 | FN_KW@[64; 66) "fn" |
34 | TRAIT_DEF@[68; 104) | 34 | WHITESPACE@[66; 67) " " |
35 | TRAIT_KW@[68; 73) "trait" | 35 | NAME@[67; 79) |
36 | WHITESPACE@[73; 74) " " | 36 | IDENT@[67; 79) "fn_with_expr" |
37 | NAME@[74; 82) | 37 | PARAM_LIST@[79; 92) |
38 | IDENT@[74; 82) "Runnable" | 38 | L_PAREN@[79; 80) "(" |
39 | WHITESPACE@[82; 83) " " | 39 | PARAM@[80; 91) |
40 | ITEM_LIST@[83; 104) | 40 | BIND_PAT@[80; 81) |
41 | L_CURLY@[83; 84) "{" | 41 | NAME@[80; 81) |
42 | WHITESPACE@[84; 89) "\n " | 42 | IDENT@[80; 81) "x" |
43 | FN_DEF@[89; 102) | 43 | COLON@[81; 82) ":" |
44 | FN_KW@[89; 91) "fn" | 44 | WHITESPACE@[82; 83) " " |
45 | WHITESPACE@[91; 92) " " | 45 | ARRAY_TYPE@[83; 91) |
46 | NAME@[92; 99) | 46 | L_BRACK@[83; 84) "[" |
47 | IDENT@[92; 99) "handler" | 47 | PATH_TYPE@[84; 87) |
48 | PARAM_LIST@[99; 101) | 48 | PATH@[84; 87) |
49 | L_PAREN@[99; 100) "(" | 49 | PATH_SEGMENT@[84; 87) |
50 | R_PAREN@[100; 101) ")" | 50 | NAME_REF@[84; 87) |
51 | SEMI@[101; 102) ";" | 51 | IDENT@[84; 87) "i32" |
52 | WHITESPACE@[102; 103) "\n" | 52 | SEMI@[87; 88) ";" |
53 | R_CURLY@[103; 104) "}" | 53 | WHITESPACE@[88; 89) " " |
54 | WHITESPACE@[104; 106) "\n\n" | 54 | LITERAL@[89; 90) |
55 | TRAIT_DEF@[106; 163) | 55 | INT_NUMBER@[89; 90) "1" |
56 | TRAIT_KW@[106; 111) "trait" | 56 | R_BRACK@[90; 91) "]" |
57 | WHITESPACE@[111; 112) " " | 57 | R_PAREN@[91; 92) ")" |
58 | NAME@[112; 125) | 58 | SEMI@[92; 93) ";" |
59 | IDENT@[112; 125) "TraitWithExpr" | 59 | WHITESPACE@[93; 94) "\n" |
60 | WHITESPACE@[125; 126) " " | 60 | R_CURLY@[94; 95) "}" |
61 | ITEM_LIST@[126; 163) | 61 | WHITESPACE@[95; 96) "\n" |
62 | L_CURLY@[126; 127) "{" | ||
63 | WHITESPACE@[127; 132) "\n " | ||
64 | FN_DEF@[132; 161) | ||
65 | FN_KW@[132; 134) "fn" | ||
66 | WHITESPACE@[134; 135) " " | ||
67 | NAME@[135; 147) | ||
68 | IDENT@[135; 147) "fn_with_expr" | ||
69 | PARAM_LIST@[147; 160) | ||
70 | L_PAREN@[147; 148) "(" | ||
71 | PARAM@[148; 159) | ||
72 | BIND_PAT@[148; 149) | ||
73 | NAME@[148; 149) | ||
74 | IDENT@[148; 149) "x" | ||
75 | COLON@[149; 150) ":" | ||
76 | WHITESPACE@[150; 151) " " | ||
77 | ARRAY_TYPE@[151; 159) | ||
78 | L_BRACK@[151; 152) "[" | ||
79 | PATH_TYPE@[152; 155) | ||
80 | PATH@[152; 155) | ||
81 | PATH_SEGMENT@[152; 155) | ||
82 | NAME_REF@[152; 155) | ||
83 | IDENT@[152; 155) "i32" | ||
84 | SEMI@[155; 156) ";" | ||
85 | WHITESPACE@[156; 157) " " | ||
86 | LITERAL@[157; 158) | ||
87 | INT_NUMBER@[157; 158) "1" | ||
88 | R_BRACK@[158; 159) "]" | ||
89 | R_PAREN@[159; 160) ")" | ||
90 | SEMI@[160; 161) ";" | ||
91 | WHITESPACE@[161; 162) "\n" | ||
92 | R_CURLY@[162; 163) "}" | ||
93 | WHITESPACE@[163; 164) "\n" | ||
diff --git a/crates/test_utils/src/lib.rs b/crates/test_utils/src/lib.rs index 659f77b71..336c594a6 100644 --- a/crates/test_utils/src/lib.rs +++ b/crates/test_utils/src/lib.rs | |||
@@ -21,6 +21,12 @@ pub use difference::Changeset as __Changeset; | |||
21 | 21 | ||
22 | pub const CURSOR_MARKER: &str = "<|>"; | 22 | pub const CURSOR_MARKER: &str = "<|>"; |
23 | 23 | ||
24 | /// Asserts that two strings are equal, otherwise displays a rich diff between them. | ||
25 | /// | ||
26 | /// The diff shows changes from the "original" left string to the "actual" right string. | ||
27 | /// | ||
28 | /// All arguments starting from and including the 3rd one are passed to | ||
29 | /// `eprintln!()` macro in case of text inequality. | ||
24 | #[macro_export] | 30 | #[macro_export] |
25 | macro_rules! assert_eq_text { | 31 | macro_rules! assert_eq_text { |
26 | ($left:expr, $right:expr) => { | 32 | ($left:expr, $right:expr) => { |
@@ -42,6 +48,7 @@ macro_rules! assert_eq_text { | |||
42 | }}; | 48 | }}; |
43 | } | 49 | } |
44 | 50 | ||
51 | /// Infallible version of `try_extract_offset()`. | ||
45 | pub fn extract_offset(text: &str) -> (TextUnit, String) { | 52 | pub fn extract_offset(text: &str) -> (TextUnit, String) { |
46 | match try_extract_offset(text) { | 53 | match try_extract_offset(text) { |
47 | None => panic!("text should contain cursor marker"), | 54 | None => panic!("text should contain cursor marker"), |
@@ -49,6 +56,8 @@ pub fn extract_offset(text: &str) -> (TextUnit, String) { | |||
49 | } | 56 | } |
50 | } | 57 | } |
51 | 58 | ||
59 | /// Returns the offset of the first occurence of `<|>` marker and the copy of `text` | ||
60 | /// without the marker. | ||
52 | fn try_extract_offset(text: &str) -> Option<(TextUnit, String)> { | 61 | fn try_extract_offset(text: &str) -> Option<(TextUnit, String)> { |
53 | let cursor_pos = text.find(CURSOR_MARKER)?; | 62 | let cursor_pos = text.find(CURSOR_MARKER)?; |
54 | let mut new_text = String::with_capacity(text.len() - CURSOR_MARKER.len()); | 63 | let mut new_text = String::with_capacity(text.len() - CURSOR_MARKER.len()); |
@@ -58,6 +67,7 @@ fn try_extract_offset(text: &str) -> Option<(TextUnit, String)> { | |||
58 | Some((cursor_pos, new_text)) | 67 | Some((cursor_pos, new_text)) |
59 | } | 68 | } |
60 | 69 | ||
70 | /// Infallible version of `try_extract_range()`. | ||
61 | pub fn extract_range(text: &str) -> (TextRange, String) { | 71 | pub fn extract_range(text: &str) -> (TextRange, String) { |
62 | match try_extract_range(text) { | 72 | match try_extract_range(text) { |
63 | None => panic!("text should contain cursor marker"), | 73 | None => panic!("text should contain cursor marker"), |
@@ -65,6 +75,8 @@ pub fn extract_range(text: &str) -> (TextRange, String) { | |||
65 | } | 75 | } |
66 | } | 76 | } |
67 | 77 | ||
78 | /// Returns `TextRange` between the first two markers `<|>...<|>` and the copy | ||
79 | /// of `text` without both of these markers. | ||
68 | fn try_extract_range(text: &str) -> Option<(TextRange, String)> { | 80 | fn try_extract_range(text: &str) -> Option<(TextRange, String)> { |
69 | let (start, text) = try_extract_offset(text)?; | 81 | let (start, text) = try_extract_offset(text)?; |
70 | let (end, text) = try_extract_offset(&text)?; | 82 | let (end, text) = try_extract_offset(&text)?; |
@@ -85,6 +97,11 @@ impl From<RangeOrOffset> for TextRange { | |||
85 | } | 97 | } |
86 | } | 98 | } |
87 | 99 | ||
100 | /// Extracts `TextRange` or `TextUnit` depending on the amount of `<|>` markers | ||
101 | /// found in `text`. | ||
102 | /// | ||
103 | /// # Panics | ||
104 | /// Panics if no `<|>` marker is present in the `text`. | ||
88 | pub fn extract_range_or_offset(text: &str) -> (RangeOrOffset, String) { | 105 | pub fn extract_range_or_offset(text: &str) -> (RangeOrOffset, String) { |
89 | if let Some((range, text)) = try_extract_range(text) { | 106 | if let Some((range, text)) = try_extract_range(text) { |
90 | return (RangeOrOffset::Range(range), text); | 107 | return (RangeOrOffset::Range(range), text); |
@@ -93,7 +110,7 @@ pub fn extract_range_or_offset(text: &str) -> (RangeOrOffset, String) { | |||
93 | (RangeOrOffset::Offset(offset), text) | 110 | (RangeOrOffset::Offset(offset), text) |
94 | } | 111 | } |
95 | 112 | ||
96 | /// Extracts ranges, marked with `<tag> </tag>` paris from the `text` | 113 | /// Extracts ranges, marked with `<tag> </tag>` pairs from the `text` |
97 | pub fn extract_ranges(mut text: &str, tag: &str) -> (Vec<TextRange>, String) { | 114 | pub fn extract_ranges(mut text: &str, tag: &str) -> (Vec<TextRange>, String) { |
98 | let open = format!("<{}>", tag); | 115 | let open = format!("<{}>", tag); |
99 | let close = format!("</{}>", tag); | 116 | let close = format!("</{}>", tag); |
@@ -127,9 +144,9 @@ pub fn extract_ranges(mut text: &str, tag: &str) -> (Vec<TextRange>, String) { | |||
127 | (ranges, res) | 144 | (ranges, res) |
128 | } | 145 | } |
129 | 146 | ||
147 | /// Inserts `<|>` marker into the `text` at `offset`. | ||
130 | pub fn add_cursor(text: &str, offset: TextUnit) -> String { | 148 | pub fn add_cursor(text: &str, offset: TextUnit) -> String { |
131 | let offset: u32 = offset.into(); | 149 | let offset: usize = offset.to_usize(); |
132 | let offset: usize = offset as usize; | ||
133 | let mut res = String::new(); | 150 | let mut res = String::new(); |
134 | res.push_str(&text[..offset]); | 151 | res.push_str(&text[..offset]); |
135 | res.push_str("<|>"); | 152 | res.push_str("<|>"); |
@@ -152,19 +169,6 @@ pub struct FixtureEntry { | |||
152 | /// // - other meta | 169 | /// // - other meta |
153 | /// ``` | 170 | /// ``` |
154 | pub fn parse_fixture(fixture: &str) -> Vec<FixtureEntry> { | 171 | pub fn parse_fixture(fixture: &str) -> Vec<FixtureEntry> { |
155 | let mut res = Vec::new(); | ||
156 | let mut buf = String::new(); | ||
157 | let mut meta: Option<&str> = None; | ||
158 | |||
159 | macro_rules! flush { | ||
160 | () => { | ||
161 | if let Some(meta) = meta { | ||
162 | res.push(FixtureEntry { meta: meta.to_string(), text: buf.clone() }); | ||
163 | buf.clear(); | ||
164 | } | ||
165 | }; | ||
166 | }; | ||
167 | |||
168 | let margin = fixture | 172 | let margin = fixture |
169 | .lines() | 173 | .lines() |
170 | .filter(|it| it.trim_start().starts_with("//-")) | 174 | .filter(|it| it.trim_start().starts_with("//-")) |
@@ -172,7 +176,7 @@ pub fn parse_fixture(fixture: &str) -> Vec<FixtureEntry> { | |||
172 | .next() | 176 | .next() |
173 | .expect("empty fixture"); | 177 | .expect("empty fixture"); |
174 | 178 | ||
175 | let lines = fixture | 179 | let mut lines = fixture |
176 | .split('\n') // don't use `.lines` to not drop `\r\n` | 180 | .split('\n') // don't use `.lines` to not drop `\r\n` |
177 | .filter_map(|line| { | 181 | .filter_map(|line| { |
178 | if line.len() >= margin { | 182 | if line.len() >= margin { |
@@ -184,17 +188,16 @@ pub fn parse_fixture(fixture: &str) -> Vec<FixtureEntry> { | |||
184 | } | 188 | } |
185 | }); | 189 | }); |
186 | 190 | ||
187 | for line in lines { | 191 | let mut res: Vec<FixtureEntry> = Vec::new(); |
192 | for line in lines.by_ref() { | ||
188 | if line.starts_with("//-") { | 193 | if line.starts_with("//-") { |
189 | flush!(); | 194 | let meta = line["//-".len()..].trim().to_string(); |
190 | buf.clear(); | 195 | res.push(FixtureEntry { meta, text: String::new() }) |
191 | meta = Some(line["//-".len()..].trim()); | 196 | } else if let Some(entry) = res.last_mut() { |
192 | continue; | 197 | entry.text.push_str(line); |
198 | entry.text.push('\n'); | ||
193 | } | 199 | } |
194 | buf.push_str(line); | ||
195 | buf.push('\n'); | ||
196 | } | 200 | } |
197 | flush!(); | ||
198 | res | 201 | res |
199 | } | 202 | } |
200 | 203 | ||
@@ -236,11 +239,10 @@ fn lines_match_works() { | |||
236 | assert!(!lines_match("b", "cb")); | 239 | assert!(!lines_match("b", "cb")); |
237 | } | 240 | } |
238 | 241 | ||
239 | // Compares JSON object for approximate equality. | 242 | /// Compares JSON object for approximate equality. |
240 | // You can use `[..]` wildcard in strings (useful for OS dependent things such | 243 | /// You can use `[..]` wildcard in strings (useful for OS dependent things such |
241 | // as paths). You can use a `"{...}"` string literal as a wildcard for | 244 | /// as paths). You can use a `"{...}"` string literal as a wildcard for |
242 | // arbitrary nested JSON (useful for parts of object emitted by other programs | 245 | /// arbitrary nested JSON. Arrays are sorted before comparison. |
243 | // (e.g. rustc) rather than Cargo itself). Arrays are sorted before comparison. | ||
244 | pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a Value, &'a Value)> { | 246 | pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a Value, &'a Value)> { |
245 | use serde_json::Value::*; | 247 | use serde_json::Value::*; |
246 | match (expected, actual) { | 248 | match (expected, actual) { |
@@ -286,6 +288,14 @@ pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a | |||
286 | } | 288 | } |
287 | } | 289 | } |
288 | 290 | ||
291 | /// Calls callback `f` with input code and file paths of all `.rs` files from `test_data_dir` | ||
292 | /// subdirectories defined by `paths`. | ||
293 | /// | ||
294 | /// If the content of the matching `.txt` file differs from the output of `f()` | ||
295 | /// the test will fail. | ||
296 | /// | ||
297 | /// If there is no matching `.txt` file it will be created and filled with the | ||
298 | /// output of `f()`, but the test will fail. | ||
289 | pub fn dir_tests<F>(test_data_dir: &Path, paths: &[&str], f: F) | 299 | pub fn dir_tests<F>(test_data_dir: &Path, paths: &[&str], f: F) |
290 | where | 300 | where |
291 | F: Fn(&str, &Path) -> String, | 301 | F: Fn(&str, &Path) -> String, |
@@ -307,6 +317,7 @@ where | |||
307 | } | 317 | } |
308 | } | 318 | } |
309 | 319 | ||
320 | /// Collects all `.rs` files from `test_data_dir` subdirectories defined by `paths`. | ||
310 | pub fn collect_tests(test_data_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> { | 321 | pub fn collect_tests(test_data_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> { |
311 | paths | 322 | paths |
312 | .iter() | 323 | .iter() |
@@ -321,6 +332,7 @@ pub fn collect_tests(test_data_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, Stri | |||
321 | .collect() | 332 | .collect() |
322 | } | 333 | } |
323 | 334 | ||
335 | /// Collects paths to all `.rs` files from `dir` in a sorted `Vec<PathBuf>`. | ||
324 | fn test_from_dir(dir: &Path) -> Vec<PathBuf> { | 336 | fn test_from_dir(dir: &Path) -> Vec<PathBuf> { |
325 | let mut acc = Vec::new(); | 337 | let mut acc = Vec::new(); |
326 | for file in fs::read_dir(&dir).unwrap() { | 338 | for file in fs::read_dir(&dir).unwrap() { |
@@ -334,6 +346,7 @@ fn test_from_dir(dir: &Path) -> Vec<PathBuf> { | |||
334 | acc | 346 | acc |
335 | } | 347 | } |
336 | 348 | ||
349 | /// Returns the path to the root directory of `rust-analyzer` project. | ||
337 | pub fn project_dir() -> PathBuf { | 350 | pub fn project_dir() -> PathBuf { |
338 | let dir = env!("CARGO_MANIFEST_DIR"); | 351 | let dir = env!("CARGO_MANIFEST_DIR"); |
339 | PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned() | 352 | PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned() |
@@ -356,6 +369,9 @@ pub fn read_text(path: &Path) -> String { | |||
356 | .replace("\r\n", "\n") | 369 | .replace("\r\n", "\n") |
357 | } | 370 | } |
358 | 371 | ||
372 | /// Returns `false` if slow tests should not run, otherwise returns `true` and | ||
373 | /// also creates a file at `./target/.slow_tests_cookie` which serves as a flag | ||
374 | /// that slow tests did run. | ||
359 | pub fn skip_slow_tests() -> bool { | 375 | pub fn skip_slow_tests() -> bool { |
360 | let should_skip = std::env::var("CI").is_err() && std::env::var("RUN_SLOW_TESTS").is_err(); | 376 | let should_skip = std::env::var("CI").is_err() && std::env::var("RUN_SLOW_TESTS").is_err(); |
361 | if should_skip { | 377 | if should_skip { |
@@ -367,8 +383,9 @@ pub fn skip_slow_tests() -> bool { | |||
367 | should_skip | 383 | should_skip |
368 | } | 384 | } |
369 | 385 | ||
370 | const REWRITE: bool = false; | 386 | /// Asserts that `expected` and `actual` strings are equal. If they differ only |
371 | 387 | /// in trailing or leading whitespace the test won't fail and | |
388 | /// the contents of `actual` will be written to the file located at `path`. | ||
372 | fn assert_equal_text(expected: &str, actual: &str, path: &Path) { | 389 | fn assert_equal_text(expected: &str, actual: &str, path: &Path) { |
373 | if expected == actual { | 390 | if expected == actual { |
374 | return; | 391 | return; |
@@ -381,6 +398,7 @@ fn assert_equal_text(expected: &str, actual: &str, path: &Path) { | |||
381 | fs::write(path, actual).unwrap(); | 398 | fs::write(path, actual).unwrap(); |
382 | return; | 399 | return; |
383 | } | 400 | } |
401 | const REWRITE: bool = false; | ||
384 | if REWRITE { | 402 | if REWRITE { |
385 | println!("rewriting {}", pretty_path.display()); | 403 | println!("rewriting {}", pretty_path.display()); |
386 | fs::write(path, actual).unwrap(); | 404 | fs::write(path, actual).unwrap(); |
diff --git a/crates/test_utils/src/marks.rs b/crates/test_utils/src/marks.rs index fe1813947..f8fabfaff 100644 --- a/crates/test_utils/src/marks.rs +++ b/crates/test_utils/src/marks.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | //! This module implements manually tracked test coverage, which useful for | 1 | //! This module implements manually tracked test coverage, which is useful for |
2 | //! quickly finding a test responsible for testing a particular bit of code. | 2 | //! quickly finding a test responsible for testing a particular bit of code. |
3 | //! | 3 | //! |
4 | //! See <https://matklad.github.io/2018/06/18/a-trick-for-test-maintenance.html> | 4 | //! See <https://matklad.github.io/2018/06/18/a-trick-for-test-maintenance.html> |
diff --git a/docs/dev/README.md b/docs/dev/README.md index 2f6215d6b..732e4bdd3 100644 --- a/docs/dev/README.md +++ b/docs/dev/README.md | |||
@@ -26,15 +26,6 @@ Discussion happens in this Zulip stream: | |||
26 | 26 | ||
27 | https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Fwg-rls-2.2E0 | 27 | https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Fwg-rls-2.2E0 |
28 | 28 | ||
29 | # Work List | ||
30 | |||
31 | We have this "work list" paper document: | ||
32 | |||
33 | https://paper.dropbox.com/doc/RLS-2.0-work-list--AZ3BgHKKCtqszbsi3gi6sjchAQ-42vbnxzuKq2lKwW0mkn8Y | ||
34 | |||
35 | It shows what everyone is working on right now. If you want to (this is not | ||
36 | mandatory), add yourself to the list! | ||
37 | |||
38 | # Issue Labels | 29 | # Issue Labels |
39 | 30 | ||
40 | * [good-first-issue](https://github.com/rust-analyzer/rust-analyzer/labels/good%20first%20issue) | 31 | * [good-first-issue](https://github.com/rust-analyzer/rust-analyzer/labels/good%20first%20issue) |
@@ -50,10 +41,12 @@ mandatory), add yourself to the list! | |||
50 | 41 | ||
51 | # CI | 42 | # CI |
52 | 43 | ||
53 | We use Travis for CI. Most of the things, including formatting, are checked by | 44 | We use GitHub Actions for CI. Most of the things, including formatting, are checked by |
54 | `cargo test` so, if `cargo test` passes locally, that's a good sign that CI will | 45 | `cargo test` so, if `cargo test` passes locally, that's a good sign that CI will |
55 | be green as well. We use bors-ng to enforce the [not rocket | 46 | be green as well. The only exception is that some long-running tests are skipped locally by default. |
56 | science](https://graydon2.dreamwidth.org/1597.html) rule. | 47 | Use `env RUN_SLOW_TESTS=1 cargo test` to run the full suite. |
48 | |||
49 | We use bors-ng to enforce the [not rocket science](https://graydon2.dreamwidth.org/1597.html) rule. | ||
57 | 50 | ||
58 | You can run `cargo xtask install-pre-commit-hook` to install git-hook to run rustfmt on commit. | 51 | You can run `cargo xtask install-pre-commit-hook` to install git-hook to run rustfmt on commit. |
59 | 52 | ||
@@ -61,9 +54,9 @@ You can run `cargo xtask install-pre-commit-hook` to install git-hook to run rus | |||
61 | 54 | ||
62 | All Rust code lives in the `crates` top-level directory, and is organized as a | 55 | All Rust code lives in the `crates` top-level directory, and is organized as a |
63 | single Cargo workspace. The `editors` top-level directory contains code for | 56 | single Cargo workspace. The `editors` top-level directory contains code for |
64 | integrating with editors. Currently, it contains plugins for VS Code (in | 57 | integrating with editors. Currently, it contains the plugin for VS Code (in |
65 | typescript) and Emacs (in elisp). The `docs` top-level directory contains both | 58 | typescript). The `docs` top-level directory contains both developer and user |
66 | developer and user documentation. | 59 | documentation. |
67 | 60 | ||
68 | We have some automation infra in Rust in the `xtask` package. It contains | 61 | We have some automation infra in Rust in the `xtask` package. It contains |
69 | stuff like formatting checking, code generation and powers `cargo xtask install`. | 62 | stuff like formatting checking, code generation and powers `cargo xtask install`. |
@@ -81,42 +74,41 @@ relevant test and execute it (VS Code includes an action for running a single | |||
81 | test). | 74 | test). |
82 | 75 | ||
83 | However, launching a VS Code instance with locally build language server is | 76 | However, launching a VS Code instance with locally build language server is |
84 | possible. There's even a VS Code task for this, so just <kbd>F5</kbd> should | 77 | possible. There's "Run Extension (Dev Server)" launch configuration for this. |
85 | work (thanks, [@andrew-w-ross](https://github.com/andrew-w-ross)!). | 78 | |
86 | 79 | In general, I use one of the following workflows for fixing bugs and | |
87 | I often just install development version with `cargo xtask install --server --jemalloc` and | 80 | implementing features. |
88 | restart the host VS Code. | 81 | |
89 | 82 | If the problem concerns only internal parts of rust-analyzer (ie, I don't need | |
90 | See [./debugging.md](./debugging.md) for how to attach to rust-analyzer with | 83 | to touch `ra_lsp_server` crate or typescript code), there is a unit-test for it. |
91 | debugger, and don't forget that rust-analyzer has useful `pd` snippet and `dbg` | 84 | So, I use **Rust Analyzer: Run** action in VS Code to run this single test, and |
92 | postfix completion for printf debugging :-) | 85 | then just do printf-driven development/debugging. As a sanity check after I'm |
93 | 86 | done, I use `cargo xtask install --server` and **Reload Window** action in VS | |
94 | # Working With VS Code Extension | 87 | Code to sanity check that the thing works as I expect. |
95 | 88 | ||
96 | To work on the VS Code extension, launch code inside `editors/code` and use `F5` | 89 | If the problem concerns only the VS Code extension, I use **Run Extension** |
97 | to launch/debug. To automatically apply formatter and linter suggestions, use | 90 | launch configuration from `launch.json`. Notably, this uses the usual |
98 | `npm run fix`. | 91 | `ra_lsp_server` binary from `PATH`. After I am done with the fix, I use `cargo |
99 | 92 | xtask install --client-code` to try the new extension for real. | |
100 | Tests are located inside `src/test` and are named `*.test.ts`. They use the | 93 | |
101 | [Mocha](https://mochajs.org) test framework and the builtin Node | 94 | If I need to fix something in the `ra_lsp_server` crate, I feel sad because it's |
102 | [assert](https://nodejs.org/api/assert.html) module. Unlike normal Node tests | 95 | on the boundary between the two processes, and working there is slow. I usually |
103 | they must be hosted inside a VS Code instance. This can be done in one of two | 96 | just `cargo xtask install --server` and poke changes from my live environment. |
104 | ways: | 97 | Note that this uses `--release`, which is usually faster overall, because |
105 | 98 | loading stdlib into debug version of rust-analyzer takes a lot of time. To speed | |
106 | 1. When `F5` debugging in VS Code select the `Extension Tests` configuration | 99 | things up, sometimes I open a temporary hello-world project which has |
107 | from the drop-down at the top of the Debug View. This will launch a temporary | 100 | `"rust-analyzer.withSysroot": false` in `.code/settings.json`. This flag causes |
108 | instance of VS Code. The test results will appear in the "Debug Console" tab | 101 | rust-analyzer to skip loading the sysroot, which greatly reduces the amount of |
109 | of the primary VS Code instance. | 102 | things rust-analyzer needs to do, and makes printf's more useful. Note that you |
110 | 103 | should only use `eprint!` family of macros for debugging: stdout is used for LSP | |
111 | 2. Run `npm test` from the command line. Although this is initiated from the | 104 | communication, and `print!` would break it. |
112 | command line it is not headless; it will also launch a temporary instance of | 105 | |
113 | VS Code. | 106 | If I need to fix something simultaneously in the server and in the client, I |
114 | 107 | feel even more sad. I don't have a specific workflow for this case. | |
115 | Due to the requirements of running the tests inside VS Code they are **not run | 108 | |
116 | on CI**. When making changes to the extension please ensure the tests are not | 109 | Additionally, I use `cargo run --release -p ra_cli -- analysis-stats |
117 | broken locally before opening a Pull Request. | 110 | path/to/some/rust/crate` to run a batch analysis. This is primarily useful for |
118 | 111 | performance optimizations, or for bug minimization. | |
119 | To install **only** the VS Code extension, use `cargo xtask install --client-code`. | ||
120 | 112 | ||
121 | # Logging | 113 | # Logging |
122 | 114 | ||
diff --git a/docs/dev/architecture.md b/docs/dev/architecture.md index 629645757..9675ed0b6 100644 --- a/docs/dev/architecture.md +++ b/docs/dev/architecture.md | |||
@@ -12,6 +12,9 @@ analyzer: | |||
12 | 12 | ||
13 | https://www.youtube.com/playlist?list=PL85XCvVPmGQho7MZkdW-wtPtuJcFpzycE | 13 | https://www.youtube.com/playlist?list=PL85XCvVPmGQho7MZkdW-wtPtuJcFpzycE |
14 | 14 | ||
15 | Note that the guide and videos are pretty dated, this document should be in | ||
16 | generally fresher. | ||
17 | |||
15 | ## The Big Picture | 18 | ## The Big Picture |
16 | 19 | ||
17 | ![](https://user-images.githubusercontent.com/1711539/50114578-e8a34280-0255-11e9-902c-7cfc70747966.png) | 20 | ![](https://user-images.githubusercontent.com/1711539/50114578-e8a34280-0255-11e9-902c-7cfc70747966.png) |
@@ -20,13 +23,12 @@ On the highest level, rust-analyzer is a thing which accepts input source code | |||
20 | from the client and produces a structured semantic model of the code. | 23 | from the client and produces a structured semantic model of the code. |
21 | 24 | ||
22 | More specifically, input data consists of a set of test files (`(PathBuf, | 25 | More specifically, input data consists of a set of test files (`(PathBuf, |
23 | String)` pairs) and information about project structure, captured in the so called | 26 | String)` pairs) and information about project structure, captured in the so |
24 | `CrateGraph`. The crate graph specifies which files are crate roots, which cfg | 27 | called `CrateGraph`. The crate graph specifies which files are crate roots, |
25 | flags are specified for each crate (TODO: actually implement this) and what | 28 | which cfg flags are specified for each crate and what dependencies exist between |
26 | dependencies exist between the crates. The analyzer keeps all this input data in | 29 | the crates. The analyzer keeps all this input data in memory and never does any |
27 | memory and never does any IO. Because the input data is source code, which | 30 | IO. Because the input data are source code, which typically measures in tens of |
28 | typically measures in tens of megabytes at most, keeping all input data in | 31 | megabytes at most, keeping everything in memory is OK. |
29 | memory is OK. | ||
30 | 32 | ||
31 | A "structured semantic model" is basically an object-oriented representation of | 33 | A "structured semantic model" is basically an object-oriented representation of |
32 | modules, functions and types which appear in the source code. This representation | 34 | modules, functions and types which appear in the source code. This representation |
@@ -43,37 +45,39 @@ can be quickly updated for small modifications. | |||
43 | ## Code generation | 45 | ## Code generation |
44 | 46 | ||
45 | Some of the components of this repository are generated through automatic | 47 | Some of the components of this repository are generated through automatic |
46 | processes. These are outlined below: | 48 | processes. `cargo xtask codegen` runs all generation tasks. Generated code is |
49 | commited to the git repository. | ||
50 | |||
51 | In particular, `cargo xtask codegen` generates: | ||
52 | |||
53 | 1. [`syntax_kind/generated`](https://github.com/rust-analyzer/rust-analyzer/blob/a0be39296d2925972cacd9fbf8b5fb258fad6947/crates/ra_parser/src/syntax_kind/generated.rs) | ||
54 | -- the set of terminals and non-terminals of rust grammar. | ||
47 | 55 | ||
48 | - `cargo xtask codegen`: The kinds of tokens that are reused in several places, so a generator | 56 | 2. [`ast/generated`](https://github.com/rust-analyzer/rust-analyzer/blob/a0be39296d2925972cacd9fbf8b5fb258fad6947/crates/ra_syntax/src/ast/generated.rs) |
49 | is used. We use `quote!` macro to generate the files listed below, based on | 57 | -- AST data structure. |
50 | the grammar described in [grammar.ron]: | ||
51 | - [ast/generated.rs][ast generated] | ||
52 | - [syntax_kind/generated.rs][syntax_kind generated] | ||
53 | 58 | ||
54 | [grammar.ron]: ../../crates/ra_syntax/src/grammar.ron | 59 | .3 [`doc_tests/generated`](https://github.com/rust-analyzer/rust-analyzer/blob/a0be39296d2925972cacd9fbf8b5fb258fad6947/crates/ra_assists/src/doc_tests/generated.rs), |
55 | [ast generated]: ../../crates/ra_syntax/src/ast/generated.rs | 60 | [`test_data/parser/inline`](https://github.com/rust-analyzer/rust-analyzer/tree/a0be39296d2925972cacd9fbf8b5fb258fad6947/crates/ra_syntax/test_data/parser/inline) |
56 | [syntax_kind generated]: ../../crates/ra_parser/src/syntax_kind/generated.rs | 61 | -- tests for assists and the parser. |
62 | |||
63 | The source for 1 and 2 is in [`ast_src.rs`](https://github.com/rust-analyzer/rust-analyzer/blob/a0be39296d2925972cacd9fbf8b5fb258fad6947/xtask/src/ast_src.rs). | ||
57 | 64 | ||
58 | ## Code Walk-Through | 65 | ## Code Walk-Through |
59 | 66 | ||
60 | ### `crates/ra_syntax`, `crates/ra_parser` | 67 | ### `crates/ra_syntax`, `crates/ra_parser` |
61 | 68 | ||
62 | Rust syntax tree structure and parser. See | 69 | Rust syntax tree structure and parser. See |
63 | [RFC](https://github.com/rust-lang/rfcs/pull/2256) for some design notes. | 70 | [RFC](https://github.com/rust-lang/rfcs/pull/2256) and [./syntax.md](./syntax.md) for some design notes. |
64 | 71 | ||
65 | - [rowan](https://github.com/rust-analyzer/rowan) library is used for constructing syntax trees. | 72 | - [rowan](https://github.com/rust-analyzer/rowan) library is used for constructing syntax trees. |
66 | - `grammar` module is the actual parser. It is a hand-written recursive descent parser, which | 73 | - `grammar` module is the actual parser. It is a hand-written recursive descent parser, which |
67 | produces a sequence of events like "start node X", "finish node Y". It works similarly to [kotlin's parser](https://github.com/JetBrains/kotlin/blob/4d951de616b20feca92f3e9cc9679b2de9e65195/compiler/frontend/src/org/jetbrains/kotlin/parsing/KotlinParsing.java), | 74 | produces a sequence of events like "start node X", "finish node Y". It works similarly to [kotlin's parser](https://github.com/JetBrains/kotlin/blob/4d951de616b20feca92f3e9cc9679b2de9e65195/compiler/frontend/src/org/jetbrains/kotlin/parsing/KotlinParsing.java), |
68 | which is a good source of inspiration for dealing with syntax errors and incomplete input. Original [libsyntax parser](https://github.com/rust-lang/rust/blob/6b99adeb11313197f409b4f7c4083c2ceca8a4fe/src/libsyntax/parse/parser.rs) | 75 | which is a good source of inspiration for dealing with syntax errors and incomplete input. Original [libsyntax parser](https://github.com/rust-lang/rust/blob/6b99adeb11313197f409b4f7c4083c2ceca8a4fe/src/libsyntax/parse/parser.rs) |
69 | is what we use for the definition of the Rust language. | 76 | is what we use for the definition of the Rust language. |
70 | - `parser_api/parser_impl` bridges the tree-agnostic parser from `grammar` with `rowan` trees. | 77 | - `TreeSink` and `TokenSource` traits bridge the tree-agnostic parser from `grammar` with `rowan` trees. |
71 | This is the thing that turns a flat list of events into a tree (see `EventProcessor`) | ||
72 | - `ast` provides a type safe API on top of the raw `rowan` tree. | 78 | - `ast` provides a type safe API on top of the raw `rowan` tree. |
73 | - `grammar.ron` RON description of the grammar, which is used to | 79 | - `ast_src` description of the grammar, which is used to generate `syntax_kinds` |
74 | generate `syntax_kinds` and `ast` modules, using `cargo xtask codegen` command. | 80 | and `ast` modules, using `cargo xtask codegen` command. |
75 | - `algo`: generic tree algorithms, including `walk` for O(1) stack | ||
76 | space tree traversal (this is cool). | ||
77 | 81 | ||
78 | Tests for ra_syntax are mostly data-driven: `test_data/parser` contains subdirectories with a bunch of `.rs` | 82 | Tests for ra_syntax are mostly data-driven: `test_data/parser` contains subdirectories with a bunch of `.rs` |
79 | (test vectors) and `.txt` files with corresponding syntax trees. During testing, we check | 83 | (test vectors) and `.txt` files with corresponding syntax trees. During testing, we check |
@@ -81,6 +85,10 @@ Tests for ra_syntax are mostly data-driven: `test_data/parser` contains subdirec | |||
81 | tests). Additionally, running `cargo xtask codegen` will walk the grammar module and collect | 85 | tests). Additionally, running `cargo xtask codegen` will walk the grammar module and collect |
82 | all `// test test_name` comments into files inside `test_data/parser/inline` directory. | 86 | all `// test test_name` comments into files inside `test_data/parser/inline` directory. |
83 | 87 | ||
88 | Note | ||
89 | [`api_walkthrough`](https://github.com/rust-analyzer/rust-analyzer/blob/2fb6af89eb794f775de60b82afe56b6f986c2a40/crates/ra_syntax/src/lib.rs#L190-L348) | ||
90 | in particular: it shows off various methods of working with syntax tree. | ||
91 | |||
84 | See [#93](https://github.com/rust-analyzer/rust-analyzer/pull/93) for an example PR which | 92 | See [#93](https://github.com/rust-analyzer/rust-analyzer/pull/93) for an example PR which |
85 | fixes a bug in the grammar. | 93 | fixes a bug in the grammar. |
86 | 94 | ||
@@ -94,18 +102,22 @@ defines most of the "input" queries: facts supplied by the client of the | |||
94 | analyzer. Reading the docs of the `ra_db::input` module should be useful: | 102 | analyzer. Reading the docs of the `ra_db::input` module should be useful: |
95 | everything else is strictly derived from those inputs. | 103 | everything else is strictly derived from those inputs. |
96 | 104 | ||
97 | ### `crates/ra_hir` | 105 | ### `crates/ra_hir*` crates |
98 | 106 | ||
99 | HIR provides high-level "object oriented" access to Rust code. | 107 | HIR provides high-level "object oriented" access to Rust code. |
100 | 108 | ||
101 | The principal difference between HIR and syntax trees is that HIR is bound to a | 109 | The principal difference between HIR and syntax trees is that HIR is bound to a |
102 | particular crate instance. That is, it has cfg flags and features applied (in | 110 | particular crate instance. That is, it has cfg flags and features applied. So, |
103 | theory, in practice this is to be implemented). So, the relation between | 111 | the relation between syntax and HIR is many-to-one. The `source_binder` module |
104 | syntax and HIR is many-to-one. The `source_binder` module is responsible for | 112 | is responsible for guessing a HIR for a particular source position. |
105 | guessing a HIR for a particular source position. | ||
106 | 113 | ||
107 | Underneath, HIR works on top of salsa, using a `HirDatabase` trait. | 114 | Underneath, HIR works on top of salsa, using a `HirDatabase` trait. |
108 | 115 | ||
116 | `ra_hir_xxx` crates have a strong ECS flavor, in that they work with raw ids and | ||
117 | directly query the databse. | ||
118 | |||
119 | The top-level `ra_hir` façade crate wraps ids into a more OO-flavored API. | ||
120 | |||
109 | ### `crates/ra_ide` | 121 | ### `crates/ra_ide` |
110 | 122 | ||
111 | A stateful library for analyzing many Rust files as they change. `AnalysisHost` | 123 | A stateful library for analyzing many Rust files as they change. `AnalysisHost` |
@@ -135,18 +147,9 @@ different from data on disk. This is more or less the single really | |||
135 | platform-dependent component, so it lives in a separate repository and has an | 147 | platform-dependent component, so it lives in a separate repository and has an |
136 | extensive cross-platform CI testing. | 148 | extensive cross-platform CI testing. |
137 | 149 | ||
138 | ### `crates/gen_lsp_server` | ||
139 | |||
140 | A language server scaffold, exposing a synchronous crossbeam-channel based API. | ||
141 | This crate handles protocol handshaking and parsing messages, while you | ||
142 | control the message dispatch loop yourself. | ||
143 | |||
144 | Run with `RUST_LOG=sync_lsp_server=debug` to see all the messages. | ||
145 | |||
146 | ### `crates/ra_cli` | 150 | ### `crates/ra_cli` |
147 | 151 | ||
148 | A CLI interface to rust-analyzer. | 152 | A CLI interface to rust-analyzer, mainly for testing. |
149 | |||
150 | 153 | ||
151 | ## Testing Infrastructure | 154 | ## Testing Infrastructure |
152 | 155 | ||
diff --git a/docs/dev/debugging.md b/docs/dev/debugging.md index f868e6998..1ccf4dca2 100644 --- a/docs/dev/debugging.md +++ b/docs/dev/debugging.md | |||
@@ -1,5 +1,7 @@ | |||
1 | # Debugging vs Code plugin and the Language Server | 1 | # Debugging vs Code plugin and the Language Server |
2 | 2 | ||
3 | **NOTE:** the information here is mostly obsolete | ||
4 | |||
3 | Install [LLDB](https://lldb.llvm.org/) and the [LLDB Extension](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb). | 5 | Install [LLDB](https://lldb.llvm.org/) and the [LLDB Extension](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb). |
4 | 6 | ||
5 | Checkout rust rust-analyzer and open it in vscode. | 7 | Checkout rust rust-analyzer and open it in vscode. |
diff --git a/docs/user/README.md b/docs/user/README.md index 9d3258c06..3da30a193 100644 --- a/docs/user/README.md +++ b/docs/user/README.md | |||
@@ -5,8 +5,7 @@ install lsp server, clone the repository and then run `cargo xtask install | |||
5 | ./crates/ra_lsp_server`). This will produce a binary named `ra_lsp_server` which | 5 | ./crates/ra_lsp_server`). This will produce a binary named `ra_lsp_server` which |
6 | you should be able to use it with any LSP-compatible editor. We use custom | 6 | you should be able to use it with any LSP-compatible editor. We use custom |
7 | extensions to LSP, so special client-side support is required to take full | 7 | extensions to LSP, so special client-side support is required to take full |
8 | advantage of rust-analyzer. This repository contains support code for VS Code | 8 | advantage of rust-analyzer. This repository contains support code for VS Code. |
9 | and Emacs. | ||
10 | 9 | ||
11 | ``` | 10 | ``` |
12 | $ git clone [email protected]:rust-analyzer/rust-analyzer && cd rust-analyzer | 11 | $ git clone [email protected]:rust-analyzer/rust-analyzer && cd rust-analyzer |
@@ -32,7 +31,38 @@ a minimum version of 10 installed. Please refer to | |||
32 | You will also need the most recent version of VS Code: we don't try to | 31 | You will also need the most recent version of VS Code: we don't try to |
33 | maintain compatibility with older versions yet. | 32 | maintain compatibility with older versions yet. |
34 | 33 | ||
35 | The experimental VS Code plugin can then be built and installed by executing the | 34 | ### Installation from prebuilt binaries |
35 | |||
36 | We ship prebuilt binaries for Linux, Mac and Windows via | ||
37 | [GitHub releases](https://github.com/rust-analyzer/rust-analyzer/releases). | ||
38 | In order to use them you need to install the client VSCode extension. | ||
39 | |||
40 | Publishing to VSCode marketplace is currently WIP. Thus, you need to clone the repository and install **only** the client extension via | ||
41 | ``` | ||
42 | $ git clone https://github.com/rust-analyzer/rust-analyzer.git --depth 1 | ||
43 | $ cd rust-analyzer | ||
44 | $ cargo xtask install --client-code | ||
45 | ``` | ||
46 | Then open VSCode (or reload the window if it was already running), open some Rust project and you should | ||
47 | see an info message pop-up. | ||
48 | |||
49 | |||
50 | <img height="140px" src="https://user-images.githubusercontent.com/36276403/74103174-a40df100-4b52-11ea-81f4-372c70797924.png" alt="Download now message"/> | ||
51 | |||
52 | |||
53 | Click `Download now`, wait until the progress is 100% and you are ready to go. | ||
54 | |||
55 | For updates you need to remove installed binary | ||
56 | ``` | ||
57 | rm -rf ${HOME}/.config/Code/User/globalStorage/matklad.rust-analyzer | ||
58 | ``` | ||
59 | |||
60 | `"Donwload latest language server"` command for VSCode and automatic updates detection is currently WIP. | ||
61 | |||
62 | |||
63 | ### Installation from sources | ||
64 | |||
65 | The experimental VS Code plugin can be built and installed by executing the | ||
36 | following commands: | 66 | following commands: |
37 | 67 | ||
38 | ``` | 68 | ``` |
@@ -47,6 +77,7 @@ doesn't, report bugs! | |||
47 | **Note** [#1831](https://github.com/rust-analyzer/rust-analyzer/issues/1831): If you are using the popular | 77 | **Note** [#1831](https://github.com/rust-analyzer/rust-analyzer/issues/1831): If you are using the popular |
48 | [Vim emulation plugin](https://github.com/VSCodeVim/Vim), you will likely | 78 | [Vim emulation plugin](https://github.com/VSCodeVim/Vim), you will likely |
49 | need to turn off the `rust-analyzer.enableEnhancedTyping` setting. | 79 | need to turn off the `rust-analyzer.enableEnhancedTyping` setting. |
80 | (// TODO: This configuration is no longer available, enhanced typing shoud be disabled via removing Enter key binding, [see this issue](https://github.com/rust-analyzer/rust-analyzer/issues/3051)) | ||
50 | 81 | ||
51 | If you have an unusual setup (for example, `code` is not in the `PATH`), you | 82 | If you have an unusual setup (for example, `code` is not in the `PATH`), you |
52 | should adapt these manual installation instructions: | 83 | should adapt these manual installation instructions: |
@@ -57,7 +88,7 @@ $ cd rust-analyzer | |||
57 | $ cargo install --path ./crates/ra_lsp_server/ --force --locked | 88 | $ cargo install --path ./crates/ra_lsp_server/ --force --locked |
58 | $ cd ./editors/code | 89 | $ cd ./editors/code |
59 | $ npm install | 90 | $ npm install |
60 | $ ./node_modules/vsce/out/vsce package | 91 | $ npm run package |
61 | $ code --install-extension ./rust-analyzer-0.1.0.vsix | 92 | $ code --install-extension ./rust-analyzer-0.1.0.vsix |
62 | ``` | 93 | ``` |
63 | 94 | ||
@@ -94,7 +125,7 @@ host. | |||
94 | * `rust-analyzer.highlightingOn`: enables experimental syntax highlighting. | 125 | * `rust-analyzer.highlightingOn`: enables experimental syntax highlighting. |
95 | Colors can be configured via `editor.tokenColorCustomizations`. | 126 | Colors can be configured via `editor.tokenColorCustomizations`. |
96 | As an example, [Pale Fire](https://github.com/matklad/pale-fire/) color scheme tweaks rust colors. | 127 | As an example, [Pale Fire](https://github.com/matklad/pale-fire/) color scheme tweaks rust colors. |
97 | * `rust-analyzer.enableEnhancedTyping`: by default, rust-analyzer intercepts. | 128 | * `rust-analyzer.enableEnhancedTyping`: by default, rust-analyzer intercepts the |
98 | `Enter` key to make it easier to continue comments. Note that it may conflict with VIM emulation plugin. | 129 | `Enter` key to make it easier to continue comments. Note that it may conflict with VIM emulation plugin. |
99 | * `rust-analyzer.raLspServerPath`: path to `ra_lsp_server` executable | 130 | * `rust-analyzer.raLspServerPath`: path to `ra_lsp_server` executable |
100 | * `rust-analyzer.enableCargoWatchOnStartup`: prompt to install & enable `cargo | 131 | * `rust-analyzer.enableCargoWatchOnStartup`: prompt to install & enable `cargo |
@@ -130,17 +161,12 @@ host. | |||
130 | 161 | ||
131 | ## Emacs | 162 | ## Emacs |
132 | 163 | ||
133 | Prerequisites: | 164 | * install recent version of `emacs-lsp` package by following the instructions [here][emacs-lsp] |
134 | 165 | * set `lsp-rust-server` to `'rust-analyzer` | |
135 | `emacs-lsp`, `dash` and `ht` packages. | 166 | * run `lsp` in a Rust buffer |
136 | 167 | * (Optionally) bind commands like `lsp-rust-analyzer-join-lines`, `lsp-extend-selection` and `lsp-rust-analyzer-expand-macro` to keys | |
137 | Installation: | ||
138 | 168 | ||
139 | * add | 169 | [emacs-lsp]: https://github.com/emacs-lsp/lsp-mode |
140 | [rust-analyzer.el](../../editors/emacs/rust-analyzer.el) | ||
141 | to load path and require it in `init.el` | ||
142 | * run `lsp` in a rust buffer | ||
143 | * (Optionally) bind commands like `rust-analyzer-join-lines`, `rust-analyzer-extend-selection` and `rust-analyzer-expand-macro` to keys, and enable `rust-analyzer-inlay-hints-mode` to get inline type hints | ||
144 | 170 | ||
145 | 171 | ||
146 | ## Vim and NeoVim (coc-rust-analyzer) | 172 | ## Vim and NeoVim (coc-rust-analyzer) |
@@ -173,8 +199,7 @@ let g:LanguageClient_serverCommands = { | |||
173 | 199 | ||
174 | NeoVim 0.5 (not yet released) has built in language server support. For a quick start configuration | 200 | NeoVim 0.5 (not yet released) has built in language server support. For a quick start configuration |
175 | of rust-analyzer, use [neovim/nvim-lsp](https://github.com/neovim/nvim-lsp#rust_analyzer). | 201 | of rust-analyzer, use [neovim/nvim-lsp](https://github.com/neovim/nvim-lsp#rust_analyzer). |
176 | Once `neovim/nvim-lsp` is installed, you can use `call nvim_lsp#setup("rust_analyzer", {})` | 202 | Once `neovim/nvim-lsp` is installed, use `lua require'nvim_lsp'.rust_analyzer.setup({})` in your `init.vim`. |
177 | or `lua require'nvim_lsp'.rust_analyzer.setup({})` to quickly get set up. | ||
178 | 203 | ||
179 | 204 | ||
180 | ## Sublime Text 3 | 205 | ## Sublime Text 3 |
diff --git a/docs/user/assists.md b/docs/user/assists.md index ecf206f71..f737a2fa4 100644 --- a/docs/user/assists.md +++ b/docs/user/assists.md | |||
@@ -154,20 +154,6 @@ impl Trait<u32> for () { | |||
154 | } | 154 | } |
155 | ``` | 155 | ``` |
156 | 156 | ||
157 | ## `add_import` | ||
158 | |||
159 | Adds a use statement for a given fully-qualified path. | ||
160 | |||
161 | ```rust | ||
162 | // BEFORE | ||
163 | fn process(map: std::collections::┃HashMap<String, String>) {} | ||
164 | |||
165 | // AFTER | ||
166 | use std::collections::HashMap; | ||
167 | |||
168 | fn process(map: HashMap<String, String>) {} | ||
169 | ``` | ||
170 | |||
171 | ## `add_new` | 157 | ## `add_new` |
172 | 158 | ||
173 | Adds a new inherent impl for a type. | 159 | Adds a new inherent impl for a type. |
@@ -209,6 +195,24 @@ fn main() { | |||
209 | } | 195 | } |
210 | ``` | 196 | ``` |
211 | 197 | ||
198 | ## `auto_import` | ||
199 | |||
200 | If the name is unresolved, provides all possible imports for it. | ||
201 | |||
202 | ```rust | ||
203 | // BEFORE | ||
204 | fn main() { | ||
205 | let map = HashMap┃::new(); | ||
206 | } | ||
207 | |||
208 | // AFTER | ||
209 | use std::collections::HashMap; | ||
210 | |||
211 | fn main() { | ||
212 | let map = HashMap::new(); | ||
213 | } | ||
214 | ``` | ||
215 | |||
212 | ## `change_visibility` | 216 | ## `change_visibility` |
213 | 217 | ||
214 | Adds or changes existing visibility specifier. | 218 | Adds or changes existing visibility specifier. |
@@ -550,6 +554,20 @@ fn handle(action: Action) { | |||
550 | } | 554 | } |
551 | ``` | 555 | ``` |
552 | 556 | ||
557 | ## `replace_qualified_name_with_use` | ||
558 | |||
559 | Adds a use statement for a given fully-qualified name. | ||
560 | |||
561 | ```rust | ||
562 | // BEFORE | ||
563 | fn process(map: std::collections::┃HashMap<String, String>) {} | ||
564 | |||
565 | // AFTER | ||
566 | use std::collections::HashMap; | ||
567 | |||
568 | fn process(map: HashMap<String, String>) {} | ||
569 | ``` | ||
570 | |||
553 | ## `split_import` | 571 | ## `split_import` |
554 | 572 | ||
555 | Wraps the tail of import into braces. | 573 | Wraps the tail of import into braces. |
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json index b81cf3820..5c056463e 100644 --- a/editors/code/package-lock.json +++ b/editors/code/package-lock.json | |||
@@ -25,62 +25,72 @@ | |||
25 | } | 25 | } |
26 | }, | 26 | }, |
27 | "@rollup/plugin-commonjs": { | 27 | "@rollup/plugin-commonjs": { |
28 | "version": "11.0.0", | 28 | "version": "11.0.2", |
29 | "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-11.0.0.tgz", | 29 | "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-11.0.2.tgz", |
30 | "integrity": "sha512-jnm//T5ZWOZ6zmJ61fReSCBOif+Ax8dHVoVggA+d2NA7T4qCWgQ3KYr+zN2faGEYLpe1wa03IzvhR+sqVLxUWg==", | 30 | "integrity": "sha512-MPYGZr0qdbV5zZj8/2AuomVpnRVXRU5XKXb3HVniwRoRCreGlf5kOE081isNWeiLIi6IYkwTX9zE0/c7V8g81g==", |
31 | "dev": true, | 31 | "dev": true, |
32 | "requires": { | 32 | "requires": { |
33 | "@rollup/pluginutils": "^3.0.0", | 33 | "@rollup/pluginutils": "^3.0.0", |
34 | "estree-walker": "^0.6.1", | 34 | "estree-walker": "^1.0.1", |
35 | "is-reference": "^1.1.2", | 35 | "is-reference": "^1.1.2", |
36 | "magic-string": "^0.25.2", | 36 | "magic-string": "^0.25.2", |
37 | "resolve": "^1.11.0" | 37 | "resolve": "^1.11.0" |
38 | } | 38 | } |
39 | }, | 39 | }, |
40 | "@rollup/plugin-node-resolve": { | 40 | "@rollup/plugin-node-resolve": { |
41 | "version": "6.0.0", | 41 | "version": "7.1.1", |
42 | "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-6.0.0.tgz", | 42 | "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-7.1.1.tgz", |
43 | "integrity": "sha512-GqWz1CfXOsqpeVMcoM315+O7zMxpRsmhWyhJoxLFHVSp9S64/u02i7len/FnbTNbmgYs+sZyilasijH8UiuboQ==", | 43 | "integrity": "sha512-14ddhD7TnemeHE97a4rLOhobfYvUVcaYuqTnL8Ti7Jxi9V9Jr5LY7Gko4HZ5k4h4vqQM0gBQt6tsp9xXW94WPA==", |
44 | "dev": true, | 44 | "dev": true, |
45 | "requires": { | 45 | "requires": { |
46 | "@rollup/pluginutils": "^3.0.0", | 46 | "@rollup/pluginutils": "^3.0.6", |
47 | "@types/resolve": "0.0.8", | 47 | "@types/resolve": "0.0.8", |
48 | "builtin-modules": "^3.1.0", | 48 | "builtin-modules": "^3.1.0", |
49 | "is-module": "^1.0.0", | 49 | "is-module": "^1.0.0", |
50 | "resolve": "^1.11.1" | 50 | "resolve": "^1.14.2" |
51 | } | 51 | }, |
52 | }, | 52 | "dependencies": { |
53 | "@rollup/plugin-typescript": { | 53 | "resolve": { |
54 | "version": "2.0.1", | 54 | "version": "1.15.0", |
55 | "resolved": "https://registry.npmjs.org/@rollup/plugin-typescript/-/plugin-typescript-2.0.1.tgz", | 55 | "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.0.tgz", |
56 | "integrity": "sha512-UA/bN/DlHN19xdOllXmp7G7pM2ac9dQMg0q2T1rg4Bogzb7oHXj2WGafpiNpEm54PivcJdzGRJvRnI6zCISW3w==", | 56 | "integrity": "sha512-+hTmAldEGE80U2wJJDC1lebb5jWqvTYAfm3YZ1ckk1gBr0MnCqUKlwK1e+anaFljIl+F5tR5IoZcm4ZDA1zMQw==", |
57 | "dev": true, | 57 | "dev": true, |
58 | "requires": { | 58 | "requires": { |
59 | "@rollup/pluginutils": "^3.0.0", | 59 | "path-parse": "^1.0.6" |
60 | "resolve": "^1.12.2" | 60 | } |
61 | } | ||
61 | } | 62 | } |
62 | }, | 63 | }, |
63 | "@rollup/pluginutils": { | 64 | "@rollup/pluginutils": { |
64 | "version": "3.0.1", | 65 | "version": "3.0.8", |
65 | "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-3.0.1.tgz", | 66 | "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-3.0.8.tgz", |
66 | "integrity": "sha512-PmNurkecagFimv7ZdKCVOfQuqKDPkrcpLFxRBcQ00LYr4HAjJwhCFxBiY2Xoletll2htTIiXBg6g0Yg21h2M3w==", | 67 | "integrity": "sha512-rYGeAc4sxcZ+kPG/Tw4/fwJODC3IXHYDH4qusdN/b6aLw5LPUbzpecYbEJh4sVQGPFJxd2dBU4kc1H3oy9/bnw==", |
67 | "dev": true, | 68 | "dev": true, |
68 | "requires": { | 69 | "requires": { |
69 | "estree-walker": "^0.6.1" | 70 | "estree-walker": "^1.0.1" |
70 | } | 71 | } |
71 | }, | 72 | }, |
72 | "@types/estree": { | 73 | "@types/estree": { |
73 | "version": "0.0.41", | 74 | "version": "0.0.39", |
74 | "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.41.tgz", | 75 | "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.39.tgz", |
75 | "integrity": "sha512-rIAmXyJlqw4KEBO7+u9gxZZSQHaCNnIzYrnNmYVpgfJhxTqO0brCX0SYpqUTkVI5mwwUwzmtspLBGBKroMeynA==", | 76 | "integrity": "sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw==", |
76 | "dev": true | 77 | "dev": true |
77 | }, | 78 | }, |
78 | "@types/node": { | 79 | "@types/node": { |
79 | "version": "12.12.22", | 80 | "version": "12.12.25", |
80 | "resolved": "https://registry.npmjs.org/@types/node/-/node-12.12.22.tgz", | 81 | "resolved": "https://registry.npmjs.org/@types/node/-/node-12.12.25.tgz", |
81 | "integrity": "sha512-r5i93jqbPWGXYXxianGATOxTelkp6ih/U0WVnvaqAvTqM+0U6J3kw6Xk6uq/dWNRkEVw/0SLcO5ORXbVNz4FMQ==", | 82 | "integrity": "sha512-nf1LMGZvgFX186geVZR1xMZKKblJiRfiASTHw85zED2kI1yDKHDwTKMdkaCbTlXoRKlGKaDfYywt+V0As30q3w==", |
82 | "dev": true | 83 | "dev": true |
83 | }, | 84 | }, |
85 | "@types/node-fetch": { | ||
86 | "version": "2.5.4", | ||
87 | "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.5.4.tgz", | ||
88 | "integrity": "sha512-Oz6id++2qAOFuOlE1j0ouk1dzl3mmI1+qINPNBhi9nt/gVOz0G+13Ao6qjhdF0Ys+eOkhu6JnFmt38bR3H0POQ==", | ||
89 | "dev": true, | ||
90 | "requires": { | ||
91 | "@types/node": "*" | ||
92 | } | ||
93 | }, | ||
84 | "@types/resolve": { | 94 | "@types/resolve": { |
85 | "version": "0.0.8", | 95 | "version": "0.0.8", |
86 | "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-0.0.8.tgz", | 96 | "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-0.0.8.tgz", |
@@ -90,10 +100,10 @@ | |||
90 | "@types/node": "*" | 100 | "@types/node": "*" |
91 | } | 101 | } |
92 | }, | 102 | }, |
93 | "@types/seedrandom": { | 103 | "@types/throttle-debounce": { |
94 | "version": "2.4.28", | 104 | "version": "2.1.0", |
95 | "resolved": "https://registry.npmjs.org/@types/seedrandom/-/seedrandom-2.4.28.tgz", | 105 | "resolved": "https://registry.npmjs.org/@types/throttle-debounce/-/throttle-debounce-2.1.0.tgz", |
96 | "integrity": "sha512-SMA+fUwULwK7sd/ZJicUztiPs8F1yCPwF3O23Z9uQ32ME5Ha0NmDK9+QTsYE4O2tHXChzXomSWWeIhCnoN1LqA==", | 106 | "integrity": "sha512-5eQEtSCoESnh2FsiLTxE121IiE60hnMqcb435fShf4bpLRjEu1Eoekht23y6zXS9Ts3l+Szu3TARnTsA0GkOkQ==", |
97 | "dev": true | 107 | "dev": true |
98 | }, | 108 | }, |
99 | "@types/vscode": { | 109 | "@types/vscode": { |
@@ -340,9 +350,9 @@ | |||
340 | "dev": true | 350 | "dev": true |
341 | }, | 351 | }, |
342 | "estree-walker": { | 352 | "estree-walker": { |
343 | "version": "0.6.1", | 353 | "version": "1.0.1", |
344 | "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-0.6.1.tgz", | 354 | "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-1.0.1.tgz", |
345 | "integrity": "sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==", | 355 | "integrity": "sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg==", |
346 | "dev": true | 356 | "dev": true |
347 | }, | 357 | }, |
348 | "esutils": { | 358 | "esutils": { |
@@ -429,14 +439,6 @@ | |||
429 | "dev": true, | 439 | "dev": true, |
430 | "requires": { | 440 | "requires": { |
431 | "@types/estree": "0.0.39" | 441 | "@types/estree": "0.0.39" |
432 | }, | ||
433 | "dependencies": { | ||
434 | "@types/estree": { | ||
435 | "version": "0.0.39", | ||
436 | "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.39.tgz", | ||
437 | "integrity": "sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw==", | ||
438 | "dev": true | ||
439 | } | ||
440 | } | 442 | } |
441 | }, | 443 | }, |
442 | "js-tokens": { | 444 | "js-tokens": { |
@@ -486,9 +488,9 @@ | |||
486 | } | 488 | } |
487 | }, | 489 | }, |
488 | "magic-string": { | 490 | "magic-string": { |
489 | "version": "0.25.4", | 491 | "version": "0.25.6", |
490 | "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.25.4.tgz", | 492 | "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.25.6.tgz", |
491 | "integrity": "sha512-oycWO9nEVAP2RVPbIoDoA4Y7LFIJ3xRYov93gAyJhZkET1tNuB0u7uWkZS2LpBWTJUWnmau/To8ECWRC+jKNfw==", | 493 | "integrity": "sha512-3a5LOMSGoCTH5rbqobC2HuDNRtE2glHZ8J7pK+QZYppyWA36yuNpsX994rIY2nCuyP7CZYy7lQq/X2jygiZ89g==", |
492 | "dev": true, | 494 | "dev": true, |
493 | "requires": { | 495 | "requires": { |
494 | "sourcemap-codec": "^1.4.4" | 496 | "sourcemap-codec": "^1.4.4" |
@@ -549,6 +551,11 @@ | |||
549 | "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", | 551 | "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", |
550 | "dev": true | 552 | "dev": true |
551 | }, | 553 | }, |
554 | "node-fetch": { | ||
555 | "version": "2.6.0", | ||
556 | "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", | ||
557 | "integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==" | ||
558 | }, | ||
552 | "nth-check": { | 559 | "nth-check": { |
553 | "version": "1.0.2", | 560 | "version": "1.0.2", |
554 | "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-1.0.2.tgz", | 561 | "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-1.0.2.tgz", |
@@ -675,9 +682,9 @@ | |||
675 | } | 682 | } |
676 | }, | 683 | }, |
677 | "rollup": { | 684 | "rollup": { |
678 | "version": "1.27.14", | 685 | "version": "1.31.0", |
679 | "resolved": "https://registry.npmjs.org/rollup/-/rollup-1.27.14.tgz", | 686 | "resolved": "https://registry.npmjs.org/rollup/-/rollup-1.31.0.tgz", |
680 | "integrity": "sha512-DuDjEyn8Y79ALYXMt+nH/EI58L5pEw5HU9K38xXdRnxQhvzUTI/nxAawhkAHUQeudANQ//8iyrhVRHJBuR6DSQ==", | 687 | "integrity": "sha512-9C6ovSyNeEwvuRuUUmsTpJcXac1AwSL1a3x+O5lpmQKZqi5mmrjauLeqIjvREC+yNRR8fPdzByojDng+af3nVw==", |
681 | "dev": true, | 688 | "dev": true, |
682 | "requires": { | 689 | "requires": { |
683 | "@types/estree": "*", | 690 | "@types/estree": "*", |
@@ -691,11 +698,6 @@ | |||
691 | "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==", | 698 | "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==", |
692 | "dev": true | 699 | "dev": true |
693 | }, | 700 | }, |
694 | "seedrandom": { | ||
695 | "version": "3.0.5", | ||
696 | "resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-3.0.5.tgz", | ||
697 | "integrity": "sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==" | ||
698 | }, | ||
699 | "semver": { | 701 | "semver": { |
700 | "version": "6.3.0", | 702 | "version": "6.3.0", |
701 | "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", | 703 | "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", |
@@ -708,9 +710,9 @@ | |||
708 | "dev": true | 710 | "dev": true |
709 | }, | 711 | }, |
710 | "sourcemap-codec": { | 712 | "sourcemap-codec": { |
711 | "version": "1.4.6", | 713 | "version": "1.4.8", |
712 | "resolved": "https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.6.tgz", | 714 | "resolved": "https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz", |
713 | "integrity": "sha512-1ZooVLYFxC448piVLBbtOxFcXwnymH9oUF8nRd3CuYDVvkRBxRl6pB4Mtas5a4drtL+E8LDgFkQNcgIw6tc8Hg==", | 715 | "integrity": "sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==", |
714 | "dev": true | 716 | "dev": true |
715 | }, | 717 | }, |
716 | "sprintf-js": { | 718 | "sprintf-js": { |
@@ -737,6 +739,11 @@ | |||
737 | "has-flag": "^3.0.0" | 739 | "has-flag": "^3.0.0" |
738 | } | 740 | } |
739 | }, | 741 | }, |
742 | "throttle-debounce": { | ||
743 | "version": "2.1.0", | ||
744 | "resolved": "https://registry.npmjs.org/throttle-debounce/-/throttle-debounce-2.1.0.tgz", | ||
745 | "integrity": "sha512-AOvyNahXQuU7NN+VVvOOX+uW6FPaWdAOdRP5HfwYxAfCzXTFKRMoIMk+n+po318+ktcChx+F1Dd91G3YHeMKyg==" | ||
746 | }, | ||
740 | "tmp": { | 747 | "tmp": { |
741 | "version": "0.0.29", | 748 | "version": "0.0.29", |
742 | "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.29.tgz", | 749 | "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.29.tgz", |
@@ -813,9 +820,9 @@ | |||
813 | } | 820 | } |
814 | }, | 821 | }, |
815 | "typescript": { | 822 | "typescript": { |
816 | "version": "3.7.4", | 823 | "version": "3.7.5", |
817 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.4.tgz", | 824 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.5.tgz", |
818 | "integrity": "sha512-A25xv5XCtarLwXpcDNZzCGvW2D1S3/bACratYBx2sax8PefsFhlYmkQicKHvpYflFS8if4zne5zT5kpJ7pzuvw==", | 825 | "integrity": "sha512-/P5lkRXkWHNAbcJIiHPfRoKqyd7bsyCma1hZNUGfn20qm64T6ZBlrzprymeu918H+mB/0rIg2gGK/BXkhhYgBw==", |
819 | "dev": true | 826 | "dev": true |
820 | }, | 827 | }, |
821 | "typescript-formatter": { | 828 | "typescript-formatter": { |
@@ -894,27 +901,27 @@ | |||
894 | "integrity": "sha512-JvONPptw3GAQGXlVV2utDcHx0BiY34FupW/kI6mZ5x06ER5DdPG/tXWMVHjTNULF5uKPOUUD0SaXg5QaubJL0A==" | 901 | "integrity": "sha512-JvONPptw3GAQGXlVV2utDcHx0BiY34FupW/kI6mZ5x06ER5DdPG/tXWMVHjTNULF5uKPOUUD0SaXg5QaubJL0A==" |
895 | }, | 902 | }, |
896 | "vscode-languageclient": { | 903 | "vscode-languageclient": { |
897 | "version": "6.0.1", | 904 | "version": "6.1.0", |
898 | "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-6.0.1.tgz", | 905 | "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-6.1.0.tgz", |
899 | "integrity": "sha512-7yZaSHichTJEyOJykI2RLQEECf9MqNLoklzC/1OVi/M8ioIsWQ1+lkN1nTsUhd6+F7p9ar9dNmPiEhL0i5uUBA==", | 906 | "integrity": "sha512-Tcp0VoOaa0YzxL4nEfK9tsmcy76Eo8jNLvFQZwh2c8oMm02luL8uGYPLQNAiZ3XGgegfcwiQFZMqbW7DNV0vxA==", |
900 | "requires": { | 907 | "requires": { |
901 | "semver": "^6.3.0", | 908 | "semver": "^6.3.0", |
902 | "vscode-languageserver-protocol": "^3.15.1" | 909 | "vscode-languageserver-protocol": "^3.15.2" |
903 | } | 910 | } |
904 | }, | 911 | }, |
905 | "vscode-languageserver-protocol": { | 912 | "vscode-languageserver-protocol": { |
906 | "version": "3.15.1", | 913 | "version": "3.15.2", |
907 | "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.15.1.tgz", | 914 | "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.15.2.tgz", |
908 | "integrity": "sha512-wJAo06VM9ZBnRqslplDjfz6Tdive0O7z44yNxBFA3x0/YZkXBIL6I+9rwQ/9Y//0X0eCh12FQrj+KmEXf2L5eA==", | 915 | "integrity": "sha512-GdL05JKOgZ76RDg3suiGCl9enESM7iQgGw4x93ibTh4sldvZmakHmTeZ4iUApPPGKf6O3OVBtrsksBXnHYaxNg==", |
909 | "requires": { | 916 | "requires": { |
910 | "vscode-jsonrpc": "^5.0.1", | 917 | "vscode-jsonrpc": "^5.0.1", |
911 | "vscode-languageserver-types": "3.15.0" | 918 | "vscode-languageserver-types": "3.15.1" |
912 | } | 919 | } |
913 | }, | 920 | }, |
914 | "vscode-languageserver-types": { | 921 | "vscode-languageserver-types": { |
915 | "version": "3.15.0", | 922 | "version": "3.15.1", |
916 | "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.15.0.tgz", | 923 | "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.15.1.tgz", |
917 | "integrity": "sha512-AXteNagMhBWnZ6gNN0UB4HTiD/7TajgfHl6jaM6O7qz3zDJw0H3Jf83w05phihnBRCML+K6Ockh8f8bL0OObPw==" | 924 | "integrity": "sha512-+a9MPUQrNGRrGU630OGbYVQ+11iOIovjCkqxajPa9w57Sd5ruK8WQNsslzpa0x/QJqC8kRc2DUxWjIFwoNm4ZQ==" |
918 | }, | 925 | }, |
919 | "wrappy": { | 926 | "wrappy": { |
920 | "version": "1.0.2", | 927 | "version": "1.0.2", |
diff --git a/editors/code/package.json b/editors/code/package.json index cd9c99b35..f687eb8d4 100644 --- a/editors/code/package.json +++ b/editors/code/package.json | |||
@@ -8,7 +8,8 @@ | |||
8 | "version": "0.1.0", | 8 | "version": "0.1.0", |
9 | "publisher": "matklad", | 9 | "publisher": "matklad", |
10 | "repository": { | 10 | "repository": { |
11 | "url": "https://github.com/matklad/rust-analyzer/" | 11 | "url": "https://github.com/rust-analyzer/rust-analyzer.git", |
12 | "type": "git" | ||
12 | }, | 13 | }, |
13 | "categories": [ | 14 | "categories": [ |
14 | "Other" | 15 | "Other" |
@@ -17,27 +18,28 @@ | |||
17 | "vscode": "^1.41.0" | 18 | "vscode": "^1.41.0" |
18 | }, | 19 | }, |
19 | "scripts": { | 20 | "scripts": { |
20 | "vscode:prepublish": "rollup -c", | 21 | "vscode:prepublish": "tsc && rollup -c", |
21 | "package": "vsce package", | 22 | "package": "vsce package", |
22 | "watch": "tsc -watch -p ./", | 23 | "watch": "tsc --watch", |
23 | "fmt": "tsfmt -r && tslint -c tslint.json 'src/**/*.ts' --fix" | 24 | "fmt": "tsfmt -r && tslint -p tsconfig.json -c tslint.json 'src/**/*.ts' --fix" |
24 | }, | 25 | }, |
25 | "dependencies": { | 26 | "dependencies": { |
26 | "jsonc-parser": "^2.1.0", | 27 | "jsonc-parser": "^2.1.0", |
27 | "seedrandom": "^3.0.5", | 28 | "node-fetch": "^2.6.0", |
28 | "vscode-languageclient": "^6.0.1" | 29 | "throttle-debounce": "^2.1.0", |
30 | "vscode-languageclient": "^6.1.0" | ||
29 | }, | 31 | }, |
30 | "devDependencies": { | 32 | "devDependencies": { |
31 | "@rollup/plugin-commonjs": "^11.0.0", | 33 | "@rollup/plugin-commonjs": "^11.0.2", |
32 | "@rollup/plugin-node-resolve": "^6.0.0", | 34 | "@rollup/plugin-node-resolve": "^7.1.1", |
33 | "@rollup/plugin-typescript": "^2.0.1", | 35 | "@types/node": "^12.12.25", |
34 | "@types/node": "^12.12.21", | 36 | "@types/node-fetch": "^2.5.4", |
35 | "@types/seedrandom": "^2.4.28", | 37 | "@types/throttle-debounce": "^2.1.0", |
36 | "@types/vscode": "^1.41.0", | 38 | "@types/vscode": "^1.41.0", |
37 | "rollup": "^1.27.14", | 39 | "rollup": "^1.31.0", |
38 | "tslib": "^1.10.0", | 40 | "tslib": "^1.10.0", |
39 | "tslint": "^5.20.1", | 41 | "tslint": "^5.20.1", |
40 | "typescript": "^3.7.3", | 42 | "typescript": "^3.7.5", |
41 | "typescript-formatter": "^7.2.2", | 43 | "typescript-formatter": "^7.2.2", |
42 | "vsce": "^1.71.0" | 44 | "vsce": "^1.71.0" |
43 | }, | 45 | }, |
@@ -116,6 +118,11 @@ | |||
116 | "command": "rust-analyzer.reload", | 118 | "command": "rust-analyzer.reload", |
117 | "title": "Restart server", | 119 | "title": "Restart server", |
118 | "category": "Rust Analyzer" | 120 | "category": "Rust Analyzer" |
121 | }, | ||
122 | { | ||
123 | "command": "rust-analyzer.onEnter", | ||
124 | "title": "Enhanced enter key", | ||
125 | "category": "Rust Analyzer" | ||
119 | } | 126 | } |
120 | ], | 127 | ], |
121 | "keybindings": [ | 128 | "keybindings": [ |
@@ -138,6 +145,11 @@ | |||
138 | "command": "rust-analyzer.run", | 145 | "command": "rust-analyzer.run", |
139 | "key": "ctrl+r", | 146 | "key": "ctrl+r", |
140 | "when": "editorTextFocus && editorLangId == rust" | 147 | "when": "editorTextFocus && editorLangId == rust" |
148 | }, | ||
149 | { | ||
150 | "command": "rust-analyzer.onEnter", | ||
151 | "key": "enter", | ||
152 | "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust" | ||
141 | } | 153 | } |
142 | ], | 154 | ], |
143 | "configuration": { | 155 | "configuration": { |
@@ -159,17 +171,13 @@ | |||
159 | "default": {}, | 171 | "default": {}, |
160 | "description": "Fine grained feature flags to disable annoying features" | 172 | "description": "Fine grained feature flags to disable annoying features" |
161 | }, | 173 | }, |
162 | "rust-analyzer.enableEnhancedTyping": { | ||
163 | "type": "boolean", | ||
164 | "default": true, | ||
165 | "description": "Enables enhanced typing. NOTE: If using a VIM extension, you should set this to false" | ||
166 | }, | ||
167 | "rust-analyzer.raLspServerPath": { | 174 | "rust-analyzer.raLspServerPath": { |
168 | "type": [ | 175 | "type": [ |
176 | "null", | ||
169 | "string" | 177 | "string" |
170 | ], | 178 | ], |
171 | "default": "ra_lsp_server", | 179 | "default": null, |
172 | "description": "Path to ra_lsp_server executable" | 180 | "description": "Path to ra_lsp_server executable (points to bundled binary by default)" |
173 | }, | 181 | }, |
174 | "rust-analyzer.excludeGlobs": { | 182 | "rust-analyzer.excludeGlobs": { |
175 | "type": "array", | 183 | "type": "array", |
diff --git a/editors/code/rollup.config.js b/editors/code/rollup.config.js index de6a3b2b7..f8d320f46 100644 --- a/editors/code/rollup.config.js +++ b/editors/code/rollup.config.js | |||
@@ -1,12 +1,10 @@ | |||
1 | import typescript from '@rollup/plugin-typescript'; | ||
2 | import resolve from '@rollup/plugin-node-resolve'; | 1 | import resolve from '@rollup/plugin-node-resolve'; |
3 | import commonjs from '@rollup/plugin-commonjs'; | 2 | import commonjs from '@rollup/plugin-commonjs'; |
4 | import nodeBuiltins from 'builtin-modules'; | 3 | import nodeBuiltins from 'builtin-modules'; |
5 | 4 | ||
6 | export default { | 5 | export default { |
7 | input: 'src/main.ts', | 6 | input: 'out/main.js', |
8 | plugins: [ | 7 | plugins: [ |
9 | typescript(), | ||
10 | resolve({ | 8 | resolve({ |
11 | preferBuiltins: true | 9 | preferBuiltins: true |
12 | }), | 10 | }), |
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts index 1ff64a930..2e3d4aba2 100644 --- a/editors/code/src/client.ts +++ b/editors/code/src/client.ts | |||
@@ -1,25 +1,21 @@ | |||
1 | import { homedir } from 'os'; | ||
2 | import * as lc from 'vscode-languageclient'; | 1 | import * as lc from 'vscode-languageclient'; |
3 | import { spawnSync } from 'child_process'; | ||
4 | 2 | ||
5 | import { window, workspace } from 'vscode'; | 3 | import { window, workspace } from 'vscode'; |
6 | import { Config } from './config'; | 4 | import { Config } from './config'; |
5 | import { ensureLanguageServerBinary } from './installation/language_server'; | ||
7 | 6 | ||
8 | export function createClient(config: Config): lc.LanguageClient { | 7 | export async function createClient(config: Config): Promise<null | lc.LanguageClient> { |
9 | // '.' Is the fallback if no folder is open | 8 | // '.' Is the fallback if no folder is open |
10 | // TODO?: Workspace folders support Uri's (eg: file://test.txt). It might be a good idea to test if the uri points to a file. | 9 | // TODO?: Workspace folders support Uri's (eg: file://test.txt). |
11 | let folder: string = '.'; | 10 | // It might be a good idea to test if the uri points to a file. |
12 | if (workspace.workspaceFolders !== undefined) { | 11 | const workspaceFolderPath = workspace.workspaceFolders?.[0]?.uri.fsPath ?? '.'; |
13 | folder = workspace.workspaceFolders[0].uri.fsPath.toString(); | 12 | |
14 | } | 13 | const raLspServerPath = await ensureLanguageServerBinary(config.langServerSource); |
14 | if (!raLspServerPath) return null; | ||
15 | 15 | ||
16 | const command = expandPathResolving(config.raLspServerPath); | ||
17 | if (spawnSync(command, ["--version"]).status !== 0) { | ||
18 | window.showErrorMessage(`Unable to execute '${command} --version'`); | ||
19 | } | ||
20 | const run: lc.Executable = { | 16 | const run: lc.Executable = { |
21 | command, | 17 | command: raLspServerPath, |
22 | options: { cwd: folder }, | 18 | options: { cwd: workspaceFolderPath }, |
23 | }; | 19 | }; |
24 | const serverOptions: lc.ServerOptions = { | 20 | const serverOptions: lc.ServerOptions = { |
25 | run, | 21 | run, |
@@ -37,8 +33,7 @@ export function createClient(config: Config): lc.LanguageClient { | |||
37 | cargoWatchEnable: config.cargoWatchOptions.enable, | 33 | cargoWatchEnable: config.cargoWatchOptions.enable, |
38 | cargoWatchArgs: config.cargoWatchOptions.arguments, | 34 | cargoWatchArgs: config.cargoWatchOptions.arguments, |
39 | cargoWatchCommand: config.cargoWatchOptions.command, | 35 | cargoWatchCommand: config.cargoWatchOptions.command, |
40 | cargoWatchAllTargets: | 36 | cargoWatchAllTargets: config.cargoWatchOptions.allTargets, |
41 | config.cargoWatchOptions.allTargets, | ||
42 | excludeGlobs: config.excludeGlobs, | 37 | excludeGlobs: config.excludeGlobs, |
43 | useClientWatching: config.useClientWatching, | 38 | useClientWatching: config.useClientWatching, |
44 | featureFlags: config.featureFlags, | 39 | featureFlags: config.featureFlags, |
@@ -62,7 +57,7 @@ export function createClient(config: Config): lc.LanguageClient { | |||
62 | // This also requires considering our settings strategy, which is work which needs doing | 57 | // This also requires considering our settings strategy, which is work which needs doing |
63 | // @ts-ignore The tracer is private to vscode-languageclient, but we need access to it to not log publishDecorations requests | 58 | // @ts-ignore The tracer is private to vscode-languageclient, but we need access to it to not log publishDecorations requests |
64 | res._tracer = { | 59 | res._tracer = { |
65 | log: (messageOrDataObject: string | any, data?: string) => { | 60 | log: (messageOrDataObject: string | unknown, data?: string) => { |
66 | if (typeof messageOrDataObject === 'string') { | 61 | if (typeof messageOrDataObject === 'string') { |
67 | if ( | 62 | if ( |
68 | messageOrDataObject.includes( | 63 | messageOrDataObject.includes( |
@@ -86,9 +81,3 @@ export function createClient(config: Config): lc.LanguageClient { | |||
86 | res.registerProposedFeatures(); | 81 | res.registerProposedFeatures(); |
87 | return res; | 82 | return res; |
88 | } | 83 | } |
89 | function expandPathResolving(path: string) { | ||
90 | if (path.startsWith('~/')) { | ||
91 | return path.replace('~', homedir()); | ||
92 | } | ||
93 | return path; | ||
94 | } | ||
diff --git a/editors/code/src/color_theme.ts b/editors/code/src/color_theme.ts index cbad47f35..a6957a76e 100644 --- a/editors/code/src/color_theme.ts +++ b/editors/code/src/color_theme.ts | |||
@@ -28,9 +28,12 @@ export class ColorTheme { | |||
28 | static fromRules(rules: TextMateRule[]): ColorTheme { | 28 | static fromRules(rules: TextMateRule[]): ColorTheme { |
29 | const res = new ColorTheme(); | 29 | const res = new ColorTheme(); |
30 | for (const rule of rules) { | 30 | for (const rule of rules) { |
31 | const scopes = typeof rule.scope === 'string' | 31 | const scopes = typeof rule.scope === 'undefined' |
32 | ? [rule.scope] | 32 | ? [] |
33 | : rule.scope; | 33 | : typeof rule.scope === 'string' |
34 | ? [rule.scope] | ||
35 | : rule.scope; | ||
36 | |||
34 | for (const scope of scopes) { | 37 | for (const scope of scopes) { |
35 | res.rules.set(scope, rule.settings); | 38 | res.rules.set(scope, rule.settings); |
36 | } | 39 | } |
@@ -59,7 +62,7 @@ export class ColorTheme { | |||
59 | } | 62 | } |
60 | 63 | ||
61 | function loadThemeNamed(themeName: string): ColorTheme { | 64 | function loadThemeNamed(themeName: string): ColorTheme { |
62 | function isTheme(extension: vscode.Extension<any>): boolean { | 65 | function isTheme(extension: vscode.Extension<unknown>): boolean { |
63 | return ( | 66 | return ( |
64 | extension.extensionKind === vscode.ExtensionKind.UI && | 67 | extension.extensionKind === vscode.ExtensionKind.UI && |
65 | extension.packageJSON.contributes && | 68 | extension.packageJSON.contributes && |
@@ -67,13 +70,13 @@ function loadThemeNamed(themeName: string): ColorTheme { | |||
67 | ); | 70 | ); |
68 | } | 71 | } |
69 | 72 | ||
70 | let themePaths = vscode.extensions.all | 73 | const themePaths: string[] = vscode.extensions.all |
71 | .filter(isTheme) | 74 | .filter(isTheme) |
72 | .flatMap(ext => { | 75 | .flatMap( |
73 | return ext.packageJSON.contributes.themes | 76 | ext => ext.packageJSON.contributes.themes |
74 | .filter((it: any) => (it.id || it.label) === themeName) | 77 | .filter((it: any) => (it.id || it.label) === themeName) |
75 | .map((it: any) => path.join(ext.extensionPath, it.path)); | 78 | .map((it: any) => path.join(ext.extensionPath, it.path)) |
76 | }); | 79 | ); |
77 | 80 | ||
78 | const res = new ColorTheme(); | 81 | const res = new ColorTheme(); |
79 | for (const themePath of themePaths) { | 82 | for (const themePath of themePaths) { |
@@ -94,13 +97,12 @@ function loadThemeFile(themePath: string): ColorTheme { | |||
94 | return new ColorTheme(); | 97 | return new ColorTheme(); |
95 | } | 98 | } |
96 | const obj = jsonc.parse(text); | 99 | const obj = jsonc.parse(text); |
97 | const tokenColors = obj?.tokenColors ?? []; | 100 | const tokenColors: TextMateRule[] = obj?.tokenColors ?? []; |
98 | const res = ColorTheme.fromRules(tokenColors); | 101 | const res = ColorTheme.fromRules(tokenColors); |
99 | 102 | ||
100 | for (const include in obj?.include ?? []) { | 103 | for (const include of obj?.include ?? []) { |
101 | const includePath = path.join(path.dirname(themePath), include); | 104 | const includePath = path.join(path.dirname(themePath), include); |
102 | const tmp = loadThemeFile(includePath); | 105 | res.mergeFrom(loadThemeFile(includePath)); |
103 | res.mergeFrom(tmp); | ||
104 | } | 106 | } |
105 | 107 | ||
106 | return res; | 108 | return res; |
diff --git a/editors/code/src/commands/index.ts b/editors/code/src/commands/index.ts index dc075aa82..aee969432 100644 --- a/editors/code/src/commands/index.ts +++ b/editors/code/src/commands/index.ts | |||
@@ -4,24 +4,24 @@ import * as lc from 'vscode-languageclient'; | |||
4 | import { Ctx, Cmd } from '../ctx'; | 4 | import { Ctx, Cmd } from '../ctx'; |
5 | import * as sourceChange from '../source_change'; | 5 | import * as sourceChange from '../source_change'; |
6 | 6 | ||
7 | import { analyzerStatus } from './analyzer_status'; | 7 | export * from './analyzer_status'; |
8 | import { matchingBrace } from './matching_brace'; | 8 | export * from './matching_brace'; |
9 | import { joinLines } from './join_lines'; | 9 | export * from './join_lines'; |
10 | import { onEnter } from './on_enter'; | 10 | export * from './on_enter'; |
11 | import { parentModule } from './parent_module'; | 11 | export * from './parent_module'; |
12 | import { syntaxTree } from './syntax_tree'; | 12 | export * from './syntax_tree'; |
13 | import { expandMacro } from './expand_macro'; | 13 | export * from './expand_macro'; |
14 | import { run, runSingle } from './runnables'; | 14 | export * from './runnables'; |
15 | 15 | ||
16 | function collectGarbage(ctx: Ctx): Cmd { | 16 | export function collectGarbage(ctx: Ctx): Cmd { |
17 | return async () => { | 17 | return async () => { |
18 | ctx.client?.sendRequest<null>('rust-analyzer/collectGarbage', null); | 18 | ctx.client?.sendRequest<null>('rust-analyzer/collectGarbage', null); |
19 | }; | 19 | }; |
20 | } | 20 | } |
21 | 21 | ||
22 | function showReferences(ctx: Ctx): Cmd { | 22 | export function showReferences(ctx: Ctx): Cmd { |
23 | return (uri: string, position: lc.Position, locations: lc.Location[]) => { | 23 | return (uri: string, position: lc.Position, locations: lc.Location[]) => { |
24 | let client = ctx.client; | 24 | const client = ctx.client; |
25 | if (client) { | 25 | if (client) { |
26 | vscode.commands.executeCommand( | 26 | vscode.commands.executeCommand( |
27 | 'editor.action.showReferences', | 27 | 'editor.action.showReferences', |
@@ -33,13 +33,13 @@ function showReferences(ctx: Ctx): Cmd { | |||
33 | }; | 33 | }; |
34 | } | 34 | } |
35 | 35 | ||
36 | function applySourceChange(ctx: Ctx): Cmd { | 36 | export function applySourceChange(ctx: Ctx): Cmd { |
37 | return async (change: sourceChange.SourceChange) => { | 37 | return async (change: sourceChange.SourceChange) => { |
38 | sourceChange.applySourceChange(ctx, change); | 38 | await sourceChange.applySourceChange(ctx, change); |
39 | }; | 39 | }; |
40 | } | 40 | } |
41 | 41 | ||
42 | function selectAndApplySourceChange(ctx: Ctx): Cmd { | 42 | export function selectAndApplySourceChange(ctx: Ctx): Cmd { |
43 | return async (changes: sourceChange.SourceChange[]) => { | 43 | return async (changes: sourceChange.SourceChange[]) => { |
44 | if (changes.length === 1) { | 44 | if (changes.length === 1) { |
45 | await sourceChange.applySourceChange(ctx, changes[0]); | 45 | await sourceChange.applySourceChange(ctx, changes[0]); |
@@ -51,26 +51,9 @@ function selectAndApplySourceChange(ctx: Ctx): Cmd { | |||
51 | }; | 51 | }; |
52 | } | 52 | } |
53 | 53 | ||
54 | function reload(ctx: Ctx): Cmd { | 54 | export function reload(ctx: Ctx): Cmd { |
55 | return async () => { | 55 | return async () => { |
56 | vscode.window.showInformationMessage('Reloading rust-analyzer...'); | 56 | vscode.window.showInformationMessage('Reloading rust-analyzer...'); |
57 | await ctx.restartServer(); | 57 | await ctx.restartServer(); |
58 | }; | 58 | }; |
59 | } | 59 | } |
60 | |||
61 | export { | ||
62 | analyzerStatus, | ||
63 | expandMacro, | ||
64 | joinLines, | ||
65 | matchingBrace, | ||
66 | parentModule, | ||
67 | syntaxTree, | ||
68 | onEnter, | ||
69 | collectGarbage, | ||
70 | run, | ||
71 | runSingle, | ||
72 | showReferences, | ||
73 | applySourceChange, | ||
74 | selectAndApplySourceChange, | ||
75 | reload | ||
76 | }; | ||
diff --git a/editors/code/src/commands/on_enter.ts b/editors/code/src/commands/on_enter.ts index 6f61883cd..25eaebcbe 100644 --- a/editors/code/src/commands/on_enter.ts +++ b/editors/code/src/commands/on_enter.ts | |||
@@ -1,28 +1,35 @@ | |||
1 | import * as vscode from 'vscode'; | ||
1 | import * as lc from 'vscode-languageclient'; | 2 | import * as lc from 'vscode-languageclient'; |
2 | 3 | ||
3 | import { applySourceChange, SourceChange } from '../source_change'; | 4 | import { applySourceChange, SourceChange } from '../source_change'; |
4 | import { Cmd, Ctx } from '../ctx'; | 5 | import { Cmd, Ctx } from '../ctx'; |
5 | 6 | ||
6 | export function onEnter(ctx: Ctx): Cmd { | 7 | async function handleKeypress(ctx: Ctx) { |
7 | return async (event: { text: string }) => { | 8 | const editor = ctx.activeRustEditor; |
8 | const editor = ctx.activeRustEditor; | 9 | const client = ctx.client; |
9 | const client = ctx.client; | 10 | |
10 | if (!editor || event.text !== '\n') return false; | 11 | if (!editor || !client) return false; |
11 | if (!client) return false; | 12 | |
13 | const request: lc.TextDocumentPositionParams = { | ||
14 | textDocument: { uri: editor.document.uri.toString() }, | ||
15 | position: client.code2ProtocolConverter.asPosition( | ||
16 | editor.selection.active, | ||
17 | ), | ||
18 | }; | ||
19 | const change = await client.sendRequest<undefined | SourceChange>( | ||
20 | 'rust-analyzer/onEnter', | ||
21 | request, | ||
22 | ); | ||
23 | if (!change) return false; | ||
12 | 24 | ||
13 | const request: lc.TextDocumentPositionParams = { | 25 | await applySourceChange(ctx, change); |
14 | textDocument: { uri: editor.document.uri.toString() }, | 26 | return true; |
15 | position: client.code2ProtocolConverter.asPosition( | 27 | } |
16 | editor.selection.active, | 28 | |
17 | ), | 29 | export function onEnter(ctx: Ctx): Cmd { |
18 | }; | 30 | return async () => { |
19 | const change = await client.sendRequest<undefined | SourceChange>( | 31 | if (await handleKeypress(ctx)) return; |
20 | 'rust-analyzer/onEnter', | ||
21 | request, | ||
22 | ); | ||
23 | if (!change) return false; | ||
24 | 32 | ||
25 | await applySourceChange(ctx, change); | 33 | await vscode.commands.executeCommand('default:type', { text: '\n' }); |
26 | return true; | ||
27 | }; | 34 | }; |
28 | } | 35 | } |
diff --git a/editors/code/src/commands/syntax_tree.ts b/editors/code/src/commands/syntax_tree.ts index 02ea9f166..7dde66ad1 100644 --- a/editors/code/src/commands/syntax_tree.ts +++ b/editors/code/src/commands/syntax_tree.ts | |||
@@ -22,6 +22,7 @@ export function syntaxTree(ctx: Ctx): Cmd { | |||
22 | if (doc.languageId !== 'rust') return; | 22 | if (doc.languageId !== 'rust') return; |
23 | afterLs(() => tdcp.eventEmitter.fire(tdcp.uri)); | 23 | afterLs(() => tdcp.eventEmitter.fire(tdcp.uri)); |
24 | }, | 24 | }, |
25 | null, | ||
25 | ctx.subscriptions, | 26 | ctx.subscriptions, |
26 | ); | 27 | ); |
27 | 28 | ||
@@ -30,6 +31,7 @@ export function syntaxTree(ctx: Ctx): Cmd { | |||
30 | if (!editor || editor.document.languageId !== 'rust') return; | 31 | if (!editor || editor.document.languageId !== 'rust') return; |
31 | tdcp.eventEmitter.fire(tdcp.uri); | 32 | tdcp.eventEmitter.fire(tdcp.uri); |
32 | }, | 33 | }, |
34 | null, | ||
33 | ctx.subscriptions, | 35 | ctx.subscriptions, |
34 | ); | 36 | ); |
35 | 37 | ||
@@ -55,7 +57,7 @@ export function syntaxTree(ctx: Ctx): Cmd { | |||
55 | 57 | ||
56 | // We need to order this after LS updates, but there's no API for that. | 58 | // We need to order this after LS updates, but there's no API for that. |
57 | // Hence, good old setTimeout. | 59 | // Hence, good old setTimeout. |
58 | function afterLs(f: () => any) { | 60 | function afterLs(f: () => void) { |
59 | setTimeout(f, 10); | 61 | setTimeout(f, 10); |
60 | } | 62 | } |
61 | 63 | ||
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index fc21c8813..d5f3da2ed 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts | |||
@@ -1,4 +1,6 @@ | |||
1 | import * as os from "os"; | ||
1 | import * as vscode from 'vscode'; | 2 | import * as vscode from 'vscode'; |
3 | import { BinarySource } from "./installation/interfaces"; | ||
2 | 4 | ||
3 | const RA_LSP_DEBUG = process.env.__RA_LSP_SERVER_DEBUG; | 5 | const RA_LSP_DEBUG = process.env.__RA_LSP_SERVER_DEBUG; |
4 | 6 | ||
@@ -16,16 +18,17 @@ export interface CargoFeatures { | |||
16 | } | 18 | } |
17 | 19 | ||
18 | export class Config { | 20 | export class Config { |
21 | langServerSource!: null | BinarySource; | ||
22 | |||
19 | highlightingOn = true; | 23 | highlightingOn = true; |
20 | rainbowHighlightingOn = false; | 24 | rainbowHighlightingOn = false; |
21 | enableEnhancedTyping = true; | 25 | enableEnhancedTyping = true; |
22 | raLspServerPath = RA_LSP_DEBUG || 'ra_lsp_server'; | ||
23 | lruCapacity: null | number = null; | 26 | lruCapacity: null | number = null; |
24 | displayInlayHints = true; | 27 | displayInlayHints = true; |
25 | maxInlayHintLength: null | number = null; | 28 | maxInlayHintLength: null | number = null; |
26 | excludeGlobs = []; | 29 | excludeGlobs: string[] = []; |
27 | useClientWatching = true; | 30 | useClientWatching = true; |
28 | featureFlags = {}; | 31 | featureFlags: Record<string, boolean> = {}; |
29 | // for internal use | 32 | // for internal use |
30 | withSysroot: null | boolean = null; | 33 | withSysroot: null | boolean = null; |
31 | cargoWatchOptions: CargoWatchOptions = { | 34 | cargoWatchOptions: CargoWatchOptions = { |
@@ -45,11 +48,72 @@ export class Config { | |||
45 | private prevCargoWatchOptions: null | CargoWatchOptions = null; | 48 | private prevCargoWatchOptions: null | CargoWatchOptions = null; |
46 | 49 | ||
47 | constructor(ctx: vscode.ExtensionContext) { | 50 | constructor(ctx: vscode.ExtensionContext) { |
48 | vscode.workspace.onDidChangeConfiguration(_ => this.refresh(), ctx.subscriptions); | 51 | vscode.workspace.onDidChangeConfiguration(_ => this.refresh(ctx), null, ctx.subscriptions); |
49 | this.refresh(); | 52 | this.refresh(ctx); |
53 | } | ||
54 | |||
55 | private static expandPathResolving(path: string) { | ||
56 | if (path.startsWith('~/')) { | ||
57 | return path.replace('~', os.homedir()); | ||
58 | } | ||
59 | return path; | ||
60 | } | ||
61 | |||
62 | /** | ||
63 | * Name of the binary artifact for `ra_lsp_server` that is published for | ||
64 | * `platform` on GitHub releases. (It is also stored under the same name when | ||
65 | * downloaded by the extension). | ||
66 | */ | ||
67 | private static prebuiltLangServerFileName(platform: NodeJS.Platform): null | string { | ||
68 | switch (platform) { | ||
69 | case "linux": return "ra_lsp_server-linux"; | ||
70 | case "darwin": return "ra_lsp_server-mac"; | ||
71 | case "win32": return "ra_lsp_server-windows.exe"; | ||
72 | |||
73 | // Users on these platforms yet need to manually build from sources | ||
74 | case "aix": | ||
75 | case "android": | ||
76 | case "freebsd": | ||
77 | case "openbsd": | ||
78 | case "sunos": | ||
79 | case "cygwin": | ||
80 | case "netbsd": return null; | ||
81 | // The list of platforms is exhaustive (see `NodeJS.Platform` type definition) | ||
82 | } | ||
83 | } | ||
84 | |||
85 | private static langServerBinarySource( | ||
86 | ctx: vscode.ExtensionContext, | ||
87 | config: vscode.WorkspaceConfiguration | ||
88 | ): null | BinarySource { | ||
89 | const langServerPath = RA_LSP_DEBUG ?? config.get<null | string>("raLspServerPath"); | ||
90 | |||
91 | if (langServerPath) { | ||
92 | return { | ||
93 | type: BinarySource.Type.ExplicitPath, | ||
94 | path: Config.expandPathResolving(langServerPath) | ||
95 | }; | ||
96 | } | ||
97 | |||
98 | const prebuiltBinaryName = Config.prebuiltLangServerFileName(process.platform); | ||
99 | |||
100 | if (!prebuiltBinaryName) return null; | ||
101 | |||
102 | return { | ||
103 | type: BinarySource.Type.GithubRelease, | ||
104 | dir: ctx.globalStoragePath, | ||
105 | file: prebuiltBinaryName, | ||
106 | repo: { | ||
107 | name: "rust-analyzer", | ||
108 | owner: "rust-analyzer", | ||
109 | } | ||
110 | }; | ||
50 | } | 111 | } |
51 | 112 | ||
52 | private refresh() { | 113 | |
114 | // FIXME: revisit the logic for `if (.has(...)) config.get(...)` set default | ||
115 | // values only in one place (i.e. remove default values from non-readonly members declarations) | ||
116 | private refresh(ctx: vscode.ExtensionContext) { | ||
53 | const config = vscode.workspace.getConfiguration('rust-analyzer'); | 117 | const config = vscode.workspace.getConfiguration('rust-analyzer'); |
54 | 118 | ||
55 | let requireReloadMessage = null; | 119 | let requireReloadMessage = null; |
@@ -82,10 +146,7 @@ export class Config { | |||
82 | this.prevEnhancedTyping = this.enableEnhancedTyping; | 146 | this.prevEnhancedTyping = this.enableEnhancedTyping; |
83 | } | 147 | } |
84 | 148 | ||
85 | if (config.has('raLspServerPath')) { | 149 | this.langServerSource = Config.langServerBinarySource(ctx, config); |
86 | this.raLspServerPath = | ||
87 | RA_LSP_DEBUG || (config.get('raLspServerPath') as string); | ||
88 | } | ||
89 | 150 | ||
90 | if (config.has('cargo-watch.enable')) { | 151 | if (config.has('cargo-watch.enable')) { |
91 | this.cargoWatchOptions.enable = config.get<boolean>( | 152 | this.cargoWatchOptions.enable = config.get<boolean>( |
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts index a2a4e42a9..70042a479 100644 --- a/editors/code/src/ctx.ts +++ b/editors/code/src/ctx.ts | |||
@@ -1,5 +1,6 @@ | |||
1 | import * as vscode from 'vscode'; | 1 | import * as vscode from 'vscode'; |
2 | import * as lc from 'vscode-languageclient'; | 2 | import * as lc from 'vscode-languageclient'; |
3 | |||
3 | import { Config } from './config'; | 4 | import { Config } from './config'; |
4 | import { createClient } from './client'; | 5 | import { createClient } from './client'; |
5 | 6 | ||
@@ -10,6 +11,9 @@ export class Ctx { | |||
10 | // deal with it. | 11 | // deal with it. |
11 | // | 12 | // |
12 | // Ideally, this should be replaced with async getter though. | 13 | // Ideally, this should be replaced with async getter though. |
14 | // FIXME: this actually needs syncronization of some kind (check how | ||
15 | // vscode deals with `deactivate()` call when extension has some work scheduled | ||
16 | // on the event loop to get a better picture of what we can do here) | ||
13 | client: lc.LanguageClient | null = null; | 17 | client: lc.LanguageClient | null = null; |
14 | private extCtx: vscode.ExtensionContext; | 18 | private extCtx: vscode.ExtensionContext; |
15 | private onDidRestartHooks: Array<(client: lc.LanguageClient) => void> = []; | 19 | private onDidRestartHooks: Array<(client: lc.LanguageClient) => void> = []; |
@@ -20,12 +24,19 @@ export class Ctx { | |||
20 | } | 24 | } |
21 | 25 | ||
22 | async restartServer() { | 26 | async restartServer() { |
23 | let old = this.client; | 27 | const old = this.client; |
24 | if (old) { | 28 | if (old) { |
25 | await old.stop(); | 29 | await old.stop(); |
26 | } | 30 | } |
27 | this.client = null; | 31 | this.client = null; |
28 | const client = createClient(this.config); | 32 | const client = await createClient(this.config); |
33 | if (!client) { | ||
34 | throw new Error( | ||
35 | "Rust Analyzer Language Server is not available. " + | ||
36 | "Please, ensure its [proper installation](https://github.com/rust-analyzer/rust-analyzer/tree/master/docs/user#vs-code)." | ||
37 | ); | ||
38 | } | ||
39 | |||
29 | this.pushCleanup(client.start()); | 40 | this.pushCleanup(client.start()); |
30 | await client.onReady(); | 41 | await client.onReady(); |
31 | 42 | ||
@@ -49,33 +60,11 @@ export class Ctx { | |||
49 | this.pushCleanup(d); | 60 | this.pushCleanup(d); |
50 | } | 61 | } |
51 | 62 | ||
52 | overrideCommand(name: string, factory: (ctx: Ctx) => Cmd) { | 63 | get subscriptions(): Disposable[] { |
53 | const defaultCmd = `default:${name}`; | ||
54 | const override = factory(this); | ||
55 | const original = (...args: any[]) => | ||
56 | vscode.commands.executeCommand(defaultCmd, ...args); | ||
57 | try { | ||
58 | const d = vscode.commands.registerCommand( | ||
59 | name, | ||
60 | async (...args: any[]) => { | ||
61 | if (!(await override(...args))) { | ||
62 | return await original(...args); | ||
63 | } | ||
64 | }, | ||
65 | ); | ||
66 | this.pushCleanup(d); | ||
67 | } catch (_) { | ||
68 | vscode.window.showWarningMessage( | ||
69 | 'Enhanced typing feature is disabled because of incompatibility with VIM extension, consider turning off rust-analyzer.enableEnhancedTyping: https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/user/README.md#settings', | ||
70 | ); | ||
71 | } | ||
72 | } | ||
73 | |||
74 | get subscriptions(): { dispose(): any }[] { | ||
75 | return this.extCtx.subscriptions; | 64 | return this.extCtx.subscriptions; |
76 | } | 65 | } |
77 | 66 | ||
78 | pushCleanup(d: { dispose(): any }) { | 67 | pushCleanup(d: Disposable) { |
79 | this.extCtx.subscriptions.push(d); | 68 | this.extCtx.subscriptions.push(d); |
80 | } | 69 | } |
81 | 70 | ||
@@ -84,12 +73,15 @@ export class Ctx { | |||
84 | } | 73 | } |
85 | } | 74 | } |
86 | 75 | ||
87 | export type Cmd = (...args: any[]) => any; | 76 | export interface Disposable { |
77 | dispose(): void; | ||
78 | } | ||
79 | export type Cmd = (...args: any[]) => unknown; | ||
88 | 80 | ||
89 | export async function sendRequestWithRetry<R>( | 81 | export async function sendRequestWithRetry<R>( |
90 | client: lc.LanguageClient, | 82 | client: lc.LanguageClient, |
91 | method: string, | 83 | method: string, |
92 | param: any, | 84 | param: unknown, |
93 | token?: vscode.CancellationToken, | 85 | token?: vscode.CancellationToken, |
94 | ): Promise<R> { | 86 | ): Promise<R> { |
95 | for (const delay of [2, 4, 6, 8, 10, null]) { | 87 | for (const delay of [2, 4, 6, 8, 10, null]) { |
diff --git a/editors/code/src/highlighting.ts b/editors/code/src/highlighting.ts index 014e96f75..4fbbe3ddc 100644 --- a/editors/code/src/highlighting.ts +++ b/editors/code/src/highlighting.ts | |||
@@ -1,7 +1,5 @@ | |||
1 | import * as vscode from 'vscode'; | 1 | import * as vscode from 'vscode'; |
2 | import * as lc from 'vscode-languageclient'; | 2 | import * as lc from 'vscode-languageclient'; |
3 | import * as seedrandom_ from 'seedrandom'; | ||
4 | const seedrandom = seedrandom_; // https://github.com/jvandemo/generator-angular2-library/issues/221#issuecomment-355945207 | ||
5 | 3 | ||
6 | import { ColorTheme, TextMateRuleSettings } from './color_theme'; | 4 | import { ColorTheme, TextMateRuleSettings } from './color_theme'; |
7 | 5 | ||
@@ -34,6 +32,7 @@ export function activateHighlighting(ctx: Ctx) { | |||
34 | 32 | ||
35 | vscode.workspace.onDidChangeConfiguration( | 33 | vscode.workspace.onDidChangeConfiguration( |
36 | _ => highlighter.removeHighlights(), | 34 | _ => highlighter.removeHighlights(), |
35 | null, | ||
37 | ctx.subscriptions, | 36 | ctx.subscriptions, |
38 | ); | 37 | ); |
39 | 38 | ||
@@ -41,7 +40,7 @@ export function activateHighlighting(ctx: Ctx) { | |||
41 | async (editor: vscode.TextEditor | undefined) => { | 40 | async (editor: vscode.TextEditor | undefined) => { |
42 | if (!editor || editor.document.languageId !== 'rust') return; | 41 | if (!editor || editor.document.languageId !== 'rust') return; |
43 | if (!ctx.config.highlightingOn) return; | 42 | if (!ctx.config.highlightingOn) return; |
44 | let client = ctx.client; | 43 | const client = ctx.client; |
45 | if (!client) return; | 44 | if (!client) return; |
46 | 45 | ||
47 | const params: lc.TextDocumentIdentifier = { | 46 | const params: lc.TextDocumentIdentifier = { |
@@ -54,6 +53,7 @@ export function activateHighlighting(ctx: Ctx) { | |||
54 | ); | 53 | ); |
55 | highlighter.setHighlights(editor, decorations); | 54 | highlighter.setHighlights(editor, decorations); |
56 | }, | 55 | }, |
56 | null, | ||
57 | ctx.subscriptions, | 57 | ctx.subscriptions, |
58 | ); | 58 | ); |
59 | } | 59 | } |
@@ -71,9 +71,9 @@ interface Decoration { | |||
71 | 71 | ||
72 | // Based on this HSL-based color generator: https://gist.github.com/bendc/76c48ce53299e6078a76 | 72 | // Based on this HSL-based color generator: https://gist.github.com/bendc/76c48ce53299e6078a76 |
73 | function fancify(seed: string, shade: 'light' | 'dark') { | 73 | function fancify(seed: string, shade: 'light' | 'dark') { |
74 | const random = seedrandom(seed); | 74 | const random = randomU32Numbers(hashString(seed)); |
75 | const randomInt = (min: number, max: number) => { | 75 | const randomInt = (min: number, max: number) => { |
76 | return Math.floor(random() * (max - min + 1)) + min; | 76 | return Math.abs(random()) % (max - min + 1) + min; |
77 | }; | 77 | }; |
78 | 78 | ||
79 | const h = randomInt(0, 360); | 79 | const h = randomInt(0, 360); |
@@ -107,7 +107,7 @@ class Highlighter { | |||
107 | } | 107 | } |
108 | 108 | ||
109 | public setHighlights(editor: vscode.TextEditor, highlights: Decoration[]) { | 109 | public setHighlights(editor: vscode.TextEditor, highlights: Decoration[]) { |
110 | let client = this.ctx.client; | 110 | const client = this.ctx.client; |
111 | if (!client) return; | 111 | if (!client) return; |
112 | // Initialize decorations if necessary | 112 | // Initialize decorations if necessary |
113 | // | 113 | // |
@@ -176,7 +176,7 @@ function initDecorations(): Map<string, vscode.TextEditorDecorationType> { | |||
176 | const res = new Map(); | 176 | const res = new Map(); |
177 | TAG_TO_SCOPES.forEach((scopes, tag) => { | 177 | TAG_TO_SCOPES.forEach((scopes, tag) => { |
178 | if (!scopes) throw `unmapped tag: ${tag}`; | 178 | if (!scopes) throw `unmapped tag: ${tag}`; |
179 | let rule = theme.lookup(scopes); | 179 | const rule = theme.lookup(scopes); |
180 | const decor = createDecorationFromTextmate(rule); | 180 | const decor = createDecorationFromTextmate(rule); |
181 | res.set(tag, decor); | 181 | res.set(tag, decor); |
182 | }); | 182 | }); |
@@ -247,3 +247,23 @@ const TAG_TO_SCOPES = new Map<string, string[]>([ | |||
247 | ["keyword.unsafe", ["keyword.other.unsafe"]], | 247 | ["keyword.unsafe", ["keyword.other.unsafe"]], |
248 | ["keyword.control", ["keyword.control"]], | 248 | ["keyword.control", ["keyword.control"]], |
249 | ]); | 249 | ]); |
250 | |||
251 | function randomU32Numbers(seed: number) { | ||
252 | let random = seed | 0; | ||
253 | return () => { | ||
254 | random ^= random << 13; | ||
255 | random ^= random >> 17; | ||
256 | random ^= random << 5; | ||
257 | random |= 0; | ||
258 | return random; | ||
259 | }; | ||
260 | } | ||
261 | |||
262 | function hashString(str: string): number { | ||
263 | let res = 0; | ||
264 | for (let i = 0; i < str.length; ++i) { | ||
265 | const c = str.codePointAt(i)!; | ||
266 | res = (res * 31 + c) & ~0; | ||
267 | } | ||
268 | return res; | ||
269 | } | ||
diff --git a/editors/code/src/inlay_hints.ts b/editors/code/src/inlay_hints.ts index 6357e44f1..1c019a51b 100644 --- a/editors/code/src/inlay_hints.ts +++ b/editors/code/src/inlay_hints.ts | |||
@@ -5,19 +5,27 @@ import { Ctx, sendRequestWithRetry } from './ctx'; | |||
5 | 5 | ||
6 | export function activateInlayHints(ctx: Ctx) { | 6 | export function activateInlayHints(ctx: Ctx) { |
7 | const hintsUpdater = new HintsUpdater(ctx); | 7 | const hintsUpdater = new HintsUpdater(ctx); |
8 | vscode.window.onDidChangeVisibleTextEditors(async _ => { | 8 | vscode.window.onDidChangeVisibleTextEditors( |
9 | await hintsUpdater.refresh(); | 9 | async _ => hintsUpdater.refresh(), |
10 | }, ctx.subscriptions); | 10 | null, |
11 | 11 | ctx.subscriptions | |
12 | vscode.workspace.onDidChangeTextDocument(async e => { | 12 | ); |
13 | if (e.contentChanges.length === 0) return; | 13 | |
14 | if (e.document.languageId !== 'rust') return; | 14 | vscode.workspace.onDidChangeTextDocument( |
15 | await hintsUpdater.refresh(); | 15 | async event => { |
16 | }, ctx.subscriptions); | 16 | if (event.contentChanges.length !== 0) return; |
17 | 17 | if (event.document.languageId !== 'rust') return; | |
18 | vscode.workspace.onDidChangeConfiguration(_ => { | 18 | await hintsUpdater.refresh(); |
19 | hintsUpdater.setEnabled(ctx.config.displayInlayHints); | 19 | }, |
20 | }, ctx.subscriptions); | 20 | null, |
21 | ctx.subscriptions | ||
22 | ); | ||
23 | |||
24 | vscode.workspace.onDidChangeConfiguration( | ||
25 | async _ => hintsUpdater.setEnabled(ctx.config.displayInlayHints), | ||
26 | null, | ||
27 | ctx.subscriptions | ||
28 | ); | ||
21 | 29 | ||
22 | ctx.onDidRestart(_ => hintsUpdater.setEnabled(ctx.config.displayInlayHints)); | 30 | ctx.onDidRestart(_ => hintsUpdater.setEnabled(ctx.config.displayInlayHints)); |
23 | } | 31 | } |
@@ -127,13 +135,13 @@ class HintsUpdater { | |||
127 | } | 135 | } |
128 | 136 | ||
129 | private async queryHints(documentUri: string): Promise<InlayHint[] | null> { | 137 | private async queryHints(documentUri: string): Promise<InlayHint[] | null> { |
130 | let client = this.ctx.client; | 138 | const client = this.ctx.client; |
131 | if (!client) return null; | 139 | if (!client) return null; |
132 | const request: InlayHintsParams = { | 140 | const request: InlayHintsParams = { |
133 | textDocument: { uri: documentUri }, | 141 | textDocument: { uri: documentUri }, |
134 | }; | 142 | }; |
135 | let tokenSource = new vscode.CancellationTokenSource(); | 143 | const tokenSource = new vscode.CancellationTokenSource(); |
136 | let prev = this.pending.get(documentUri); | 144 | const prev = this.pending.get(documentUri); |
137 | if (prev) prev.cancel(); | 145 | if (prev) prev.cancel(); |
138 | this.pending.set(documentUri, tokenSource); | 146 | this.pending.set(documentUri, tokenSource); |
139 | try { | 147 | try { |
diff --git a/editors/code/src/installation/download_file.ts b/editors/code/src/installation/download_file.ts new file mode 100644 index 000000000..b51602ef9 --- /dev/null +++ b/editors/code/src/installation/download_file.ts | |||
@@ -0,0 +1,34 @@ | |||
1 | import fetch from "node-fetch"; | ||
2 | import * as fs from "fs"; | ||
3 | import { strict as assert } from "assert"; | ||
4 | |||
5 | /** | ||
6 | * Downloads file from `url` and stores it at `destFilePath`. | ||
7 | * `onProgress` callback is called on recieveing each chunk of bytes | ||
8 | * to track the progress of downloading, it gets the already read and total | ||
9 | * amount of bytes to read as its parameters. | ||
10 | */ | ||
11 | export async function downloadFile( | ||
12 | url: string, | ||
13 | destFilePath: fs.PathLike, | ||
14 | onProgress: (readBytes: number, totalBytes: number) => void | ||
15 | ): Promise<void> { | ||
16 | const response = await fetch(url); | ||
17 | |||
18 | const totalBytes = Number(response.headers.get('content-length')); | ||
19 | assert(!Number.isNaN(totalBytes), "Sanity check of content-length protocol"); | ||
20 | |||
21 | let readBytes = 0; | ||
22 | |||
23 | console.log("Downloading file of", totalBytes, "bytes size from", url, "to", destFilePath); | ||
24 | |||
25 | return new Promise<void>((resolve, reject) => response.body | ||
26 | .on("data", (chunk: Buffer) => { | ||
27 | readBytes += chunk.length; | ||
28 | onProgress(readBytes, totalBytes); | ||
29 | }) | ||
30 | .on("end", resolve) | ||
31 | .on("error", reject) | ||
32 | .pipe(fs.createWriteStream(destFilePath)) | ||
33 | ); | ||
34 | } | ||
diff --git a/editors/code/src/installation/fetch_latest_artifact_metadata.ts b/editors/code/src/installation/fetch_latest_artifact_metadata.ts new file mode 100644 index 000000000..7e3700603 --- /dev/null +++ b/editors/code/src/installation/fetch_latest_artifact_metadata.ts | |||
@@ -0,0 +1,46 @@ | |||
1 | import fetch from "node-fetch"; | ||
2 | import { GithubRepo, ArtifactMetadata } from "./interfaces"; | ||
3 | |||
4 | const GITHUB_API_ENDPOINT_URL = "https://api.github.com"; | ||
5 | |||
6 | /** | ||
7 | * Fetches the latest release from GitHub `repo` and returns metadata about | ||
8 | * `artifactFileName` shipped with this release or `null` if no such artifact was published. | ||
9 | */ | ||
10 | export async function fetchLatestArtifactMetadata( | ||
11 | repo: GithubRepo, artifactFileName: string | ||
12 | ): Promise<null | ArtifactMetadata> { | ||
13 | |||
14 | const repoOwner = encodeURIComponent(repo.owner); | ||
15 | const repoName = encodeURIComponent(repo.name); | ||
16 | |||
17 | const apiEndpointPath = `/repos/${repoOwner}/${repoName}/releases/latest`; | ||
18 | const requestUrl = GITHUB_API_ENDPOINT_URL + apiEndpointPath; | ||
19 | |||
20 | // We skip runtime type checks for simplicity (here we cast from `any` to `GithubRelease`) | ||
21 | |||
22 | console.log("Issuing request for released artifacts metadata to", requestUrl); | ||
23 | |||
24 | const response: GithubRelease = await fetch(requestUrl, { | ||
25 | headers: { Accept: "application/vnd.github.v3+json" } | ||
26 | }) | ||
27 | .then(res => res.json()); | ||
28 | |||
29 | const artifact = response.assets.find(artifact => artifact.name === artifactFileName); | ||
30 | |||
31 | if (!artifact) return null; | ||
32 | |||
33 | return { | ||
34 | releaseName: response.name, | ||
35 | downloadUrl: artifact.browser_download_url | ||
36 | }; | ||
37 | |||
38 | // We omit declaration of tremendous amount of fields that we are not using here | ||
39 | interface GithubRelease { | ||
40 | name: string; | ||
41 | assets: Array<{ | ||
42 | name: string; | ||
43 | browser_download_url: string; | ||
44 | }>; | ||
45 | } | ||
46 | } | ||
diff --git a/editors/code/src/installation/interfaces.ts b/editors/code/src/installation/interfaces.ts new file mode 100644 index 000000000..8039d0b90 --- /dev/null +++ b/editors/code/src/installation/interfaces.ts | |||
@@ -0,0 +1,55 @@ | |||
1 | export interface GithubRepo { | ||
2 | name: string; | ||
3 | owner: string; | ||
4 | } | ||
5 | |||
6 | /** | ||
7 | * Metadata about particular artifact retrieved from GitHub releases. | ||
8 | */ | ||
9 | export interface ArtifactMetadata { | ||
10 | releaseName: string; | ||
11 | downloadUrl: string; | ||
12 | } | ||
13 | |||
14 | /** | ||
15 | * Represents the source of a binary artifact which is either specified by the user | ||
16 | * explicitly, or bundled by this extension from GitHub releases. | ||
17 | */ | ||
18 | export type BinarySource = BinarySource.ExplicitPath | BinarySource.GithubRelease; | ||
19 | |||
20 | export namespace BinarySource { | ||
21 | /** | ||
22 | * Type tag for `BinarySource` discriminated union. | ||
23 | */ | ||
24 | export const enum Type { ExplicitPath, GithubRelease } | ||
25 | |||
26 | export interface ExplicitPath { | ||
27 | type: Type.ExplicitPath; | ||
28 | |||
29 | /** | ||
30 | * Filesystem path to the binary specified by the user explicitly. | ||
31 | */ | ||
32 | path: string; | ||
33 | } | ||
34 | |||
35 | export interface GithubRelease { | ||
36 | type: Type.GithubRelease; | ||
37 | |||
38 | /** | ||
39 | * Repository where the binary is stored. | ||
40 | */ | ||
41 | repo: GithubRepo; | ||
42 | |||
43 | /** | ||
44 | * Directory on the filesystem where the bundled binary is stored. | ||
45 | */ | ||
46 | dir: string; | ||
47 | |||
48 | /** | ||
49 | * Name of the binary file. It is stored under the same name on GitHub releases | ||
50 | * and in local `.dir`. | ||
51 | */ | ||
52 | file: string; | ||
53 | } | ||
54 | |||
55 | } | ||
diff --git a/editors/code/src/installation/language_server.ts b/editors/code/src/installation/language_server.ts new file mode 100644 index 000000000..1ce67b8b2 --- /dev/null +++ b/editors/code/src/installation/language_server.ts | |||
@@ -0,0 +1,141 @@ | |||
1 | import * as vscode from "vscode"; | ||
2 | import * as path from "path"; | ||
3 | import { strict as assert } from "assert"; | ||
4 | import { promises as fs } from "fs"; | ||
5 | import { promises as dns } from "dns"; | ||
6 | import { spawnSync } from "child_process"; | ||
7 | import { throttle } from "throttle-debounce"; | ||
8 | |||
9 | import { BinarySource } from "./interfaces"; | ||
10 | import { fetchLatestArtifactMetadata } from "./fetch_latest_artifact_metadata"; | ||
11 | import { downloadFile } from "./download_file"; | ||
12 | |||
13 | export async function downloadLatestLanguageServer( | ||
14 | {file: artifactFileName, dir: installationDir, repo}: BinarySource.GithubRelease | ||
15 | ) { | ||
16 | const { releaseName, downloadUrl } = (await fetchLatestArtifactMetadata( | ||
17 | repo, artifactFileName | ||
18 | ))!; | ||
19 | |||
20 | await fs.mkdir(installationDir).catch(err => assert.strictEqual( | ||
21 | err?.code, | ||
22 | "EEXIST", | ||
23 | `Couldn't create directory "${installationDir}" to download `+ | ||
24 | `language server binary: ${err.message}` | ||
25 | )); | ||
26 | |||
27 | const installationPath = path.join(installationDir, artifactFileName); | ||
28 | |||
29 | console.time("Downloading ra_lsp_server"); | ||
30 | await vscode.window.withProgress( | ||
31 | { | ||
32 | location: vscode.ProgressLocation.Notification, | ||
33 | cancellable: false, // FIXME: add support for canceling download? | ||
34 | title: `Downloading language server (${releaseName})` | ||
35 | }, | ||
36 | async (progress, _cancellationToken) => { | ||
37 | let lastPrecentage = 0; | ||
38 | await downloadFile(downloadUrl, installationPath, throttle( | ||
39 | 200, | ||
40 | /* noTrailing: */ true, | ||
41 | (readBytes, totalBytes) => { | ||
42 | const newPercentage = (readBytes / totalBytes) * 100; | ||
43 | progress.report({ | ||
44 | message: newPercentage.toFixed(0) + "%", | ||
45 | increment: newPercentage - lastPrecentage | ||
46 | }); | ||
47 | |||
48 | lastPrecentage = newPercentage; | ||
49 | }) | ||
50 | ); | ||
51 | } | ||
52 | ); | ||
53 | console.timeEnd("Downloading ra_lsp_server"); | ||
54 | |||
55 | await fs.chmod(installationPath, 0o755); // Set (rwx, r_x, r_x) permissions | ||
56 | } | ||
57 | export async function ensureLanguageServerBinary( | ||
58 | langServerSource: null | BinarySource | ||
59 | ): Promise<null | string> { | ||
60 | |||
61 | if (!langServerSource) { | ||
62 | vscode.window.showErrorMessage( | ||
63 | "Unfortunately we don't ship binaries for your platform yet. " + | ||
64 | "You need to manually clone rust-analyzer repository and " + | ||
65 | "run `cargo xtask install --server` to build the language server from sources. " + | ||
66 | "If you feel that your platform should be supported, please create an issue " + | ||
67 | "about that [here](https://github.com/rust-analyzer/rust-analyzer/issues) and we " + | ||
68 | "will consider it." | ||
69 | ); | ||
70 | return null; | ||
71 | } | ||
72 | |||
73 | switch (langServerSource.type) { | ||
74 | case BinarySource.Type.ExplicitPath: { | ||
75 | if (isBinaryAvailable(langServerSource.path)) { | ||
76 | return langServerSource.path; | ||
77 | } | ||
78 | |||
79 | vscode.window.showErrorMessage( | ||
80 | `Unable to run ${langServerSource.path} binary. ` + | ||
81 | `To use the pre-built language server, set "rust-analyzer.raLspServerPath" ` + | ||
82 | "value to `null` or remove it from the settings to use it by default." | ||
83 | ); | ||
84 | return null; | ||
85 | } | ||
86 | case BinarySource.Type.GithubRelease: { | ||
87 | const prebuiltBinaryPath = path.join(langServerSource.dir, langServerSource.file); | ||
88 | |||
89 | if (isBinaryAvailable(prebuiltBinaryPath)) { | ||
90 | return prebuiltBinaryPath; | ||
91 | } | ||
92 | |||
93 | const userResponse = await vscode.window.showInformationMessage( | ||
94 | "Language server binary for rust-analyzer was not found. " + | ||
95 | "Do you want to download it now?", | ||
96 | "Download now", "Cancel" | ||
97 | ); | ||
98 | if (userResponse !== "Download now") return null; | ||
99 | |||
100 | try { | ||
101 | await downloadLatestLanguageServer(langServerSource); | ||
102 | } catch (err) { | ||
103 | await vscode.window.showErrorMessage( | ||
104 | `Failed to download language server from ${langServerSource.repo.name} ` + | ||
105 | `GitHub repository: ${err.message}` | ||
106 | ); | ||
107 | |||
108 | await dns.resolve('www.google.com').catch(err => { | ||
109 | console.error("DNS resolution failed, there might be an issue with Internet availability"); | ||
110 | console.error(err); | ||
111 | }); | ||
112 | |||
113 | return null; | ||
114 | } | ||
115 | |||
116 | if (!isBinaryAvailable(prebuiltBinaryPath)) assert(false, | ||
117 | `Downloaded language server binary is not functional.` + | ||
118 | `Downloaded from: ${JSON.stringify(langServerSource)}` | ||
119 | ); | ||
120 | |||
121 | |||
122 | vscode.window.showInformationMessage( | ||
123 | "Rust analyzer language server was successfully installed 🦀" | ||
124 | ); | ||
125 | |||
126 | return prebuiltBinaryPath; | ||
127 | } | ||
128 | } | ||
129 | |||
130 | function isBinaryAvailable(binaryPath: string) { | ||
131 | const res = spawnSync(binaryPath, ["--version"]); | ||
132 | |||
133 | // ACHTUNG! `res` type declaration is inherently wrong, see | ||
134 | // https://github.com/DefinitelyTyped/DefinitelyTyped/issues/42221 | ||
135 | |||
136 | console.log("Checked binary availablity via --version", res); | ||
137 | console.log(binaryPath, "--version output:", res.output?.map(String)); | ||
138 | |||
139 | return res.status === 0; | ||
140 | } | ||
141 | } | ||
diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index 0494ccf63..5efce41f4 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts | |||
@@ -6,12 +6,12 @@ import { activateStatusDisplay } from './status_display'; | |||
6 | import { Ctx } from './ctx'; | 6 | import { Ctx } from './ctx'; |
7 | import { activateHighlighting } from './highlighting'; | 7 | import { activateHighlighting } from './highlighting'; |
8 | 8 | ||
9 | let ctx!: Ctx; | 9 | let ctx: Ctx | undefined; |
10 | 10 | ||
11 | export async function activate(context: vscode.ExtensionContext) { | 11 | export async function activate(context: vscode.ExtensionContext) { |
12 | ctx = new Ctx(context); | 12 | ctx = new Ctx(context); |
13 | 13 | ||
14 | // Commands which invokes manually via command pallet, shortcut, etc. | 14 | // Commands which invokes manually via command palette, shortcut, etc. |
15 | ctx.registerCommand('analyzerStatus', commands.analyzerStatus); | 15 | ctx.registerCommand('analyzerStatus', commands.analyzerStatus); |
16 | ctx.registerCommand('collectGarbage', commands.collectGarbage); | 16 | ctx.registerCommand('collectGarbage', commands.collectGarbage); |
17 | ctx.registerCommand('matchingBrace', commands.matchingBrace); | 17 | ctx.registerCommand('matchingBrace', commands.matchingBrace); |
@@ -21,6 +21,7 @@ export async function activate(context: vscode.ExtensionContext) { | |||
21 | ctx.registerCommand('expandMacro', commands.expandMacro); | 21 | ctx.registerCommand('expandMacro', commands.expandMacro); |
22 | ctx.registerCommand('run', commands.run); | 22 | ctx.registerCommand('run', commands.run); |
23 | ctx.registerCommand('reload', commands.reload); | 23 | ctx.registerCommand('reload', commands.reload); |
24 | ctx.registerCommand('onEnter', commands.onEnter); | ||
24 | 25 | ||
25 | // Internal commands which are invoked by the server. | 26 | // Internal commands which are invoked by the server. |
26 | ctx.registerCommand('runSingle', commands.runSingle); | 27 | ctx.registerCommand('runSingle', commands.runSingle); |
@@ -28,9 +29,6 @@ export async function activate(context: vscode.ExtensionContext) { | |||
28 | ctx.registerCommand('applySourceChange', commands.applySourceChange); | 29 | ctx.registerCommand('applySourceChange', commands.applySourceChange); |
29 | ctx.registerCommand('selectAndApplySourceChange', commands.selectAndApplySourceChange); | 30 | ctx.registerCommand('selectAndApplySourceChange', commands.selectAndApplySourceChange); |
30 | 31 | ||
31 | if (ctx.config.enableEnhancedTyping) { | ||
32 | ctx.overrideCommand('type', commands.onEnter); | ||
33 | } | ||
34 | activateStatusDisplay(ctx); | 32 | activateStatusDisplay(ctx); |
35 | 33 | ||
36 | activateHighlighting(ctx); | 34 | activateHighlighting(ctx); |
diff --git a/editors/code/src/status_display.ts b/editors/code/src/status_display.ts index c75fddf9d..51dbf388b 100644 --- a/editors/code/src/status_display.ts +++ b/editors/code/src/status_display.ts | |||
@@ -1,6 +1,6 @@ | |||
1 | import * as vscode from 'vscode'; | 1 | import * as vscode from 'vscode'; |
2 | 2 | ||
3 | import { WorkDoneProgress, WorkDoneProgressBegin, WorkDoneProgressReport, WorkDoneProgressEnd } from 'vscode-languageclient'; | 3 | import { WorkDoneProgress, WorkDoneProgressBegin, WorkDoneProgressReport, WorkDoneProgressEnd, Disposable } from 'vscode-languageclient'; |
4 | 4 | ||
5 | import { Ctx } from './ctx'; | 5 | import { Ctx } from './ctx'; |
6 | 6 | ||
@@ -9,15 +9,17 @@ const spinnerFrames = ['â ‹', 'â ™', 'â ¹', 'â ¸', 'â ¼', 'â ´', 'â ¦', 'â §', ' | |||
9 | export function activateStatusDisplay(ctx: Ctx) { | 9 | export function activateStatusDisplay(ctx: Ctx) { |
10 | const statusDisplay = new StatusDisplay(ctx.config.cargoWatchOptions.command); | 10 | const statusDisplay = new StatusDisplay(ctx.config.cargoWatchOptions.command); |
11 | ctx.pushCleanup(statusDisplay); | 11 | ctx.pushCleanup(statusDisplay); |
12 | ctx.onDidRestart(client => { | 12 | ctx.onDidRestart(client => ctx.pushCleanup(client.onProgress( |
13 | client.onProgress(WorkDoneProgress.type, 'rustAnalyzer/cargoWatcher', params => statusDisplay.handleProgressNotification(params)); | 13 | WorkDoneProgress.type, |
14 | }); | 14 | 'rustAnalyzer/cargoWatcher', |
15 | params => statusDisplay.handleProgressNotification(params) | ||
16 | ))); | ||
15 | } | 17 | } |
16 | 18 | ||
17 | class StatusDisplay implements vscode.Disposable { | 19 | class StatusDisplay implements Disposable { |
18 | packageName?: string; | 20 | packageName?: string; |
19 | 21 | ||
20 | private i = 0; | 22 | private i: number = 0; |
21 | private statusBarItem: vscode.StatusBarItem; | 23 | private statusBarItem: vscode.StatusBarItem; |
22 | private command: string; | 24 | private command: string; |
23 | private timer?: NodeJS.Timeout; | 25 | private timer?: NodeJS.Timeout; |
@@ -37,11 +39,8 @@ class StatusDisplay implements vscode.Disposable { | |||
37 | this.timer = | 39 | this.timer = |
38 | this.timer || | 40 | this.timer || |
39 | setInterval(() => { | 41 | setInterval(() => { |
40 | if (this.packageName) { | 42 | this.tick(); |
41 | this.statusBarItem!.text = `${this.frame()} cargo ${this.command} [${this.packageName}]`; | 43 | this.refreshLabel(); |
42 | } else { | ||
43 | this.statusBarItem!.text = `${this.frame()} cargo ${this.command}`; | ||
44 | } | ||
45 | }, 300); | 44 | }, 300); |
46 | 45 | ||
47 | this.statusBarItem.show(); | 46 | this.statusBarItem.show(); |
@@ -65,6 +64,14 @@ class StatusDisplay implements vscode.Disposable { | |||
65 | this.statusBarItem.dispose(); | 64 | this.statusBarItem.dispose(); |
66 | } | 65 | } |
67 | 66 | ||
67 | refreshLabel() { | ||
68 | if (this.packageName) { | ||
69 | this.statusBarItem!.text = `${spinnerFrames[this.i]} cargo ${this.command} [${this.packageName}]`; | ||
70 | } else { | ||
71 | this.statusBarItem!.text = `${spinnerFrames[this.i]} cargo ${this.command}`; | ||
72 | } | ||
73 | } | ||
74 | |||
68 | handleProgressNotification(params: WorkDoneProgressBegin | WorkDoneProgressReport | WorkDoneProgressEnd) { | 75 | handleProgressNotification(params: WorkDoneProgressBegin | WorkDoneProgressReport | WorkDoneProgressEnd) { |
69 | switch (params.kind) { | 76 | switch (params.kind) { |
70 | case 'begin': | 77 | case 'begin': |
@@ -74,6 +81,7 @@ class StatusDisplay implements vscode.Disposable { | |||
74 | case 'report': | 81 | case 'report': |
75 | if (params.message) { | 82 | if (params.message) { |
76 | this.packageName = params.message; | 83 | this.packageName = params.message; |
84 | this.refreshLabel(); | ||
77 | } | 85 | } |
78 | break; | 86 | break; |
79 | 87 | ||
@@ -83,7 +91,7 @@ class StatusDisplay implements vscode.Disposable { | |||
83 | } | 91 | } |
84 | } | 92 | } |
85 | 93 | ||
86 | private frame() { | 94 | private tick() { |
87 | return spinnerFrames[(this.i = ++this.i % spinnerFrames.length)]; | 95 | this.i = (this.i + 1) % spinnerFrames.length; |
88 | } | 96 | } |
89 | } | 97 | } |
diff --git a/editors/code/tsconfig.json b/editors/code/tsconfig.json index e60eb8e5e..0c7702974 100644 --- a/editors/code/tsconfig.json +++ b/editors/code/tsconfig.json | |||
@@ -6,6 +6,8 @@ | |||
6 | "lib": [ | 6 | "lib": [ |
7 | "es2019" | 7 | "es2019" |
8 | ], | 8 | ], |
9 | "esModuleInterop": true, | ||
10 | "allowSyntheticDefaultImports": true, | ||
9 | "sourceMap": true, | 11 | "sourceMap": true, |
10 | "rootDir": "src", | 12 | "rootDir": "src", |
11 | "strict": true, | 13 | "strict": true, |
diff --git a/editors/code/tslint.json b/editors/code/tslint.json index 318e02b4b..333e2a321 100644 --- a/editors/code/tslint.json +++ b/editors/code/tslint.json | |||
@@ -3,6 +3,8 @@ | |||
3 | "semicolon": [ | 3 | "semicolon": [ |
4 | true, | 4 | true, |
5 | "always" | 5 | "always" |
6 | ] | 6 | ], |
7 | "prefer-const": true, | ||
8 | "no-floating-promises": true | ||
7 | } | 9 | } |
8 | } | 10 | } |
diff --git a/editors/emacs/rust-analyzer.el b/editors/emacs/rust-analyzer.el deleted file mode 100644 index 06db4f15f..000000000 --- a/editors/emacs/rust-analyzer.el +++ /dev/null | |||
@@ -1,286 +0,0 @@ | |||
1 | ;;; rust-analyzer.el --- Rust analyzer emacs bindings for emacs-lsp -*- lexical-binding: t; -*- | ||
2 | ;;; Code: | ||
3 | |||
4 | (require 'lsp) | ||
5 | (require 'dash) | ||
6 | (require 'ht) | ||
7 | |||
8 | ;; This currently | ||
9 | ;; - sets up rust-analyzer with emacs-lsp, giving | ||
10 | ;; - code actions | ||
11 | ;; - completion (use company-lsp for proper snippet support) | ||
12 | ;; - imenu support | ||
13 | ;; - on-type formatting | ||
14 | ;; - 'hover' type information & documentation (with lsp-ui) | ||
15 | ;; - implements source changes (for code actions etc.), except for file system changes | ||
16 | ;; - implements joinLines (you need to bind rust-analyzer-join-lines to a key) | ||
17 | ;; - implements selectionRanges (either bind lsp-extend-selection to a key, or use expand-region) | ||
18 | ;; - provides rust-analyzer-inlay-hints-mode for inline type hints | ||
19 | ;; - provides rust-analyzer-expand-macro to expand macros | ||
20 | |||
21 | ;; What's missing: | ||
22 | ;; - file system changes in apply-source-change | ||
23 | ;; - semantic highlighting | ||
24 | ;; - onEnter, parentModule, findMatchingBrace | ||
25 | ;; - runnables | ||
26 | ;; - the debugging commands (syntaxTree and analyzerStatus) | ||
27 | ;; - more | ||
28 | |||
29 | ;; Also, there's a problem with company-lsp's caching being too eager, sometimes | ||
30 | ;; resulting in outdated completions. | ||
31 | |||
32 | (defcustom rust-analyzer-command '("ra_lsp_server") | ||
33 | "" | ||
34 | :type '(repeat (string))) | ||
35 | |||
36 | (defconst rust-analyzer--notification-handlers | ||
37 | '(("rust-analyzer/publishDecorations" . (lambda (_w _p))))) | ||
38 | |||
39 | (defconst rust-analyzer--action-handlers | ||
40 | '(("rust-analyzer.applySourceChange" . | ||
41 | (lambda (p) (rust-analyzer--apply-source-change-command p))))) | ||
42 | |||
43 | (defun rust-analyzer--uri-filename (text-document) | ||
44 | (lsp--uri-to-path (gethash "uri" text-document))) | ||
45 | |||
46 | (defun rust-analyzer--goto-lsp-loc (loc) | ||
47 | (-let (((&hash "line" "character") loc)) | ||
48 | (goto-line (1+ line)) | ||
49 | (move-to-column character))) | ||
50 | |||
51 | (defun rust-analyzer--apply-text-document-edit (edit) | ||
52 | "Like lsp--apply-text-document-edit, but it allows nil version." | ||
53 | (let* ((ident (gethash "textDocument" edit)) | ||
54 | (filename (rust-analyzer--uri-filename ident)) | ||
55 | (version (gethash "version" ident))) | ||
56 | (with-current-buffer (find-file-noselect filename) | ||
57 | (when (or (not version) (= version (lsp--cur-file-version))) | ||
58 | (lsp--apply-text-edits (gethash "edits" edit)))))) | ||
59 | |||
60 | (defun rust-analyzer--apply-source-change (data) | ||
61 | ;; TODO fileSystemEdits | ||
62 | (seq-doseq (it (-> data (ht-get "workspaceEdit") (ht-get "documentChanges"))) | ||
63 | (rust-analyzer--apply-text-document-edit it)) | ||
64 | (-when-let (cursor-position (ht-get data "cursorPosition")) | ||
65 | (let ((filename (rust-analyzer--uri-filename (ht-get cursor-position "textDocument"))) | ||
66 | (position (ht-get cursor-position "position"))) | ||
67 | (find-file filename) | ||
68 | (rust-analyzer--goto-lsp-loc position)))) | ||
69 | |||
70 | (defun rust-analyzer--apply-source-change-command (p) | ||
71 | (let ((data (-> p (ht-get "arguments") (lsp-seq-first)))) | ||
72 | (rust-analyzer--apply-source-change data))) | ||
73 | |||
74 | (lsp-register-client | ||
75 | (make-lsp-client | ||
76 | :new-connection (lsp-stdio-connection (lambda () rust-analyzer-command)) | ||
77 | :notification-handlers (ht<-alist rust-analyzer--notification-handlers) | ||
78 | :action-handlers (ht<-alist rust-analyzer--action-handlers) | ||
79 | :major-modes '(rust-mode) | ||
80 | :ignore-messages nil | ||
81 | :server-id 'rust-analyzer)) | ||
82 | |||
83 | (defun rust-analyzer--initialized? () | ||
84 | (when-let ((workspace (lsp-find-workspace 'rust-analyzer (buffer-file-name)))) | ||
85 | (eq 'initialized (lsp--workspace-status workspace)))) | ||
86 | |||
87 | (with-eval-after-load 'company-lsp | ||
88 | ;; company-lsp provides a snippet handler for rust by default that adds () after function calls, which RA does better | ||
89 | (setq company-lsp--snippet-functions (cl-delete "rust" company-lsp--snippet-functions :key #'car :test #'equal))) | ||
90 | |||
91 | ;; join lines | ||
92 | |||
93 | (defun rust-analyzer--join-lines-params () | ||
94 | "Join lines params." | ||
95 | (list :textDocument (lsp--text-document-identifier) | ||
96 | :range (if (use-region-p) | ||
97 | (lsp--region-to-range (region-beginning) (region-end)) | ||
98 | (lsp--region-to-range (point) (point))))) | ||
99 | |||
100 | (defun rust-analyzer-join-lines () | ||
101 | (interactive) | ||
102 | (-> | ||
103 | (lsp-send-request (lsp-make-request "rust-analyzer/joinLines" | ||
104 | (rust-analyzer--join-lines-params))) | ||
105 | (rust-analyzer--apply-source-change))) | ||
106 | |||
107 | ;; selection ranges | ||
108 | |||
109 | (defun rust-analyzer--add-er-expansion () | ||
110 | (make-variable-buffer-local 'er/try-expand-list) | ||
111 | (setq er/try-expand-list (append | ||
112 | er/try-expand-list | ||
113 | '(lsp-extend-selection)))) | ||
114 | |||
115 | (with-eval-after-load 'expand-region | ||
116 | ;; add the expansion for all existing rust-mode buffers. If expand-region is | ||
117 | ;; loaded lazily, it might be loaded when the first rust buffer is opened, and | ||
118 | ;; then it's too late for the hook for that buffer | ||
119 | (dolist (buf (buffer-list)) | ||
120 | (with-current-buffer buf | ||
121 | (when (eq 'rust-mode major-mode) | ||
122 | (rust-analyzer--add-er-expansion)))) | ||
123 | (add-hook 'rust-mode-hook 'rust-analyzer--add-er-expansion)) | ||
124 | |||
125 | ;; runnables | ||
126 | (defvar rust-analyzer--last-runnable nil) | ||
127 | |||
128 | (defun rust-analyzer--runnables-params () | ||
129 | (list :textDocument (lsp--text-document-identifier) | ||
130 | :position (lsp--cur-position))) | ||
131 | |||
132 | (defun rust-analyzer--runnables () | ||
133 | (lsp-send-request (lsp-make-request "rust-analyzer/runnables" | ||
134 | (rust-analyzer--runnables-params)))) | ||
135 | |||
136 | (defun rust-analyzer--select-runnable () | ||
137 | (lsp--completing-read | ||
138 | "Select runnable:" | ||
139 | (if rust-analyzer--last-runnable | ||
140 | (cons rust-analyzer--last-runnable (rust-analyzer--runnables)) | ||
141 | (rust-analyzer--runnables)) | ||
142 | (-lambda ((&hash "label")) label))) | ||
143 | |||
144 | (defun rust-analyzer-run (runnable) | ||
145 | (interactive (list (rust-analyzer--select-runnable))) | ||
146 | (-let (((&hash "env" "bin" "args" "label") runnable)) | ||
147 | (compilation-start | ||
148 | (string-join (append (list bin) args '()) " ") | ||
149 | ;; cargo-process-mode is nice, but try to work without it... | ||
150 | (if (functionp 'cargo-process-mode) 'cargo-process-mode nil) | ||
151 | (lambda (_) (concat "*" label "*"))) | ||
152 | (setq rust-analyzer--last-runnable runnable))) | ||
153 | |||
154 | (defun rust-analyzer-rerun (&optional runnable) | ||
155 | (interactive (list (or rust-analyzer--last-runnable | ||
156 | (rust-analyzer--select-runnable)))) | ||
157 | (rust-analyzer-run (or runnable rust-analyzer--last-runnable))) | ||
158 | |||
159 | ;; analyzer status buffer | ||
160 | (define-derived-mode rust-analyzer-status-mode special-mode "Rust-Analyzer-Status" | ||
161 | "Mode for the rust-analyzer status buffer.") | ||
162 | |||
163 | (defvar-local rust-analyzer--status-buffer-workspace nil) | ||
164 | |||
165 | (defun rust-analyzer-status () | ||
166 | "Displays status information for rust-analyzer." | ||
167 | (interactive) | ||
168 | (let* ((workspace (lsp-find-workspace 'rust-analyzer (buffer-file-name))) | ||
169 | (buf (get-buffer-create (concat "*rust-analyzer status " (with-lsp-workspace workspace (lsp-workspace-root)) "*")))) | ||
170 | (with-current-buffer buf | ||
171 | (rust-analyzer-status-mode) | ||
172 | (setq rust-analyzer--status-buffer-workspace workspace) | ||
173 | (rust-analyzer-status-buffer-refresh)) | ||
174 | (pop-to-buffer buf))) | ||
175 | |||
176 | (defun rust-analyzer-status-buffer-refresh () | ||
177 | (interactive) | ||
178 | (when rust-analyzer--status-buffer-workspace | ||
179 | (let ((inhibit-read-only t)) | ||
180 | (erase-buffer) | ||
181 | (insert (with-lsp-workspace rust-analyzer--status-buffer-workspace | ||
182 | (lsp-send-request (lsp-make-request | ||
183 | "rust-analyzer/analyzerStatus"))))))) | ||
184 | |||
185 | |||
186 | (defun rust-analyzer--syntax-tree-params () | ||
187 | "Syntax tree params." | ||
188 | (list :textDocument (lsp--text-document-identifier) | ||
189 | :range (if (use-region-p) | ||
190 | (lsp--region-to-range (region-beginning) (region-end)) | ||
191 | (lsp--region-to-range (point-min) (point-max))))) | ||
192 | |||
193 | (defun rust-analyzer-syntax-tree () | ||
194 | "Displays syntax tree for current buffer." | ||
195 | (interactive) | ||
196 | (when (eq 'rust-mode major-mode) | ||
197 | (let* ((workspace (lsp-find-workspace 'rust-analyzer (buffer-file-name))) | ||
198 | (buf (get-buffer-create (concat "*rust-analyzer syntax tree " (with-lsp-workspace workspace (lsp-workspace-root)) "*")))) | ||
199 | (when workspace | ||
200 | (let ((parse-result (with-lsp-workspace workspace | ||
201 | (lsp-send-request (lsp-make-request | ||
202 | "rust-analyzer/syntaxTree" | ||
203 | (rust-analyzer--syntax-tree-params)))))) | ||
204 | (with-current-buffer buf | ||
205 | (let ((inhibit-read-only t)) | ||
206 | (erase-buffer) | ||
207 | (insert parse-result))) | ||
208 | (pop-to-buffer buf)))))) | ||
209 | |||
210 | ;; inlay hints | ||
211 | (defun rust-analyzer--update-inlay-hints (buffer) | ||
212 | (if (and (rust-analyzer--initialized?) (eq buffer (current-buffer))) | ||
213 | (lsp-request-async | ||
214 | "rust-analyzer/inlayHints" | ||
215 | (list :textDocument (lsp--text-document-identifier)) | ||
216 | (lambda (res) | ||
217 | (remove-overlays (point-min) (point-max) 'rust-analyzer--inlay-hint t) | ||
218 | (dolist (hint res) | ||
219 | (-let* (((&hash "range" "label" "kind") hint) | ||
220 | ((beg . end) (lsp--range-to-region range)) | ||
221 | (overlay (make-overlay beg end))) | ||
222 | (overlay-put overlay 'rust-analyzer--inlay-hint t) | ||
223 | (overlay-put overlay 'evaporate t) | ||
224 | (cond | ||
225 | ((string= kind "TypeHint") | ||
226 | (overlay-put overlay 'after-string (propertize (concat ": " label) | ||
227 | 'font-lock-face 'font-lock-comment-face))) | ||
228 | ((string= kind "ParameterHint") | ||
229 | (overlay-put overlay 'before-string (propertize (concat label ": ") | ||
230 | 'font-lock-face 'font-lock-comment-face))) | ||
231 | ) | ||
232 | ))) | ||
233 | :mode 'tick)) | ||
234 | nil) | ||
235 | |||
236 | (defvar-local rust-analyzer--inlay-hints-timer nil) | ||
237 | |||
238 | (defun rust-analyzer--inlay-hints-change-handler (&rest rest) | ||
239 | (when rust-analyzer--inlay-hints-timer | ||
240 | (cancel-timer rust-analyzer--inlay-hints-timer)) | ||
241 | (setq rust-analyzer--inlay-hints-timer | ||
242 | (run-with-idle-timer 0.1 nil #'rust-analyzer--update-inlay-hints (current-buffer)))) | ||
243 | |||
244 | (define-minor-mode rust-analyzer-inlay-hints-mode | ||
245 | "Mode for showing inlay hints." | ||
246 | nil nil nil | ||
247 | (cond | ||
248 | (rust-analyzer-inlay-hints-mode | ||
249 | (rust-analyzer--update-inlay-hints (current-buffer)) | ||
250 | (add-hook 'lsp-after-initialize-hook #'rust-analyzer--inlay-hints-change-handler nil t) | ||
251 | (add-hook 'after-change-functions #'rust-analyzer--inlay-hints-change-handler nil t)) | ||
252 | (t | ||
253 | (remove-overlays (point-min) (point-max) 'rust-analyzer--inlay-hint t) | ||
254 | (remove-hook 'lsp-after-initialize-hook #'rust-analyzer--inlay-hints-change-handler t) | ||
255 | (remove-hook 'after-change-functions #'rust-analyzer--inlay-hints-change-handler t)))) | ||
256 | |||
257 | |||
258 | |||
259 | ;; expand macros | ||
260 | (defun rust-analyzer-expand-macro () | ||
261 | "Expands the macro call at point recursively." | ||
262 | (interactive) | ||
263 | (when (eq 'rust-mode major-mode) | ||
264 | (let* ((workspace (lsp-find-workspace 'rust-analyzer (buffer-file-name))) | ||
265 | (params (list :textDocument (lsp--text-document-identifier) | ||
266 | :position (lsp--cur-position)))) | ||
267 | (when workspace | ||
268 | (let* ((response (with-lsp-workspace workspace | ||
269 | (lsp-send-request (lsp-make-request | ||
270 | "rust-analyzer/expandMacro" | ||
271 | params)))) | ||
272 | (result (when response (ht-get response "expansion")))) | ||
273 | (if result | ||
274 | (let ((buf (get-buffer-create (concat "*rust-analyzer macro expansion " (with-lsp-workspace workspace (lsp-workspace-root)) "*")))) | ||
275 | (with-current-buffer buf | ||
276 | (let ((inhibit-read-only t)) | ||
277 | (erase-buffer) | ||
278 | (insert result) | ||
279 | (setq buffer-read-only t) | ||
280 | (special-mode))) | ||
281 | (pop-to-buffer buf)) | ||
282 | (message "No macro found at point, or it could not be expanded"))))))) | ||
283 | |||
284 | |||
285 | (provide 'rust-analyzer) | ||
286 | ;;; rust-analyzer.el ends here | ||
diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs index efa638e06..a53d57335 100644 --- a/xtask/src/codegen.rs +++ b/xtask/src/codegen.rs | |||
@@ -25,7 +25,7 @@ const ERR_INLINE_TESTS_DIR: &str = "crates/ra_syntax/test_data/parser/inline/err | |||
25 | pub const SYNTAX_KINDS: &str = "crates/ra_parser/src/syntax_kind/generated.rs"; | 25 | pub const SYNTAX_KINDS: &str = "crates/ra_parser/src/syntax_kind/generated.rs"; |
26 | pub const AST: &str = "crates/ra_syntax/src/ast/generated.rs"; | 26 | pub const AST: &str = "crates/ra_syntax/src/ast/generated.rs"; |
27 | 27 | ||
28 | const ASSISTS_DIR: &str = "crates/ra_assists/src/assists"; | 28 | const ASSISTS_DIR: &str = "crates/ra_assists/src/handlers"; |
29 | const ASSISTS_TESTS: &str = "crates/ra_assists/src/doc_tests/generated.rs"; | 29 | const ASSISTS_TESTS: &str = "crates/ra_assists/src/doc_tests/generated.rs"; |
30 | const ASSISTS_DOCS: &str = "docs/user/assists.md"; | 30 | const ASSISTS_DOCS: &str = "docs/user/assists.md"; |
31 | 31 | ||
diff --git a/xtask/src/codegen/gen_assists_docs.rs b/xtask/src/codegen/gen_assists_docs.rs index 69f9b4872..697e830df 100644 --- a/xtask/src/codegen/gen_assists_docs.rs +++ b/xtask/src/codegen/gen_assists_docs.rs | |||
@@ -20,6 +20,28 @@ struct Assist { | |||
20 | after: String, | 20 | after: String, |
21 | } | 21 | } |
22 | 22 | ||
23 | fn hide_hash_comments(text: &str) -> String { | ||
24 | text.split('\n') // want final newline | ||
25 | .filter(|&it| !(it.starts_with("# ") || it == "#")) | ||
26 | .map(|it| format!("{}\n", it)) | ||
27 | .collect() | ||
28 | } | ||
29 | |||
30 | fn reveal_hash_comments(text: &str) -> String { | ||
31 | text.split('\n') // want final newline | ||
32 | .map(|it| { | ||
33 | if it.starts_with("# ") { | ||
34 | &it[2..] | ||
35 | } else if it == "#" { | ||
36 | "" | ||
37 | } else { | ||
38 | it | ||
39 | } | ||
40 | }) | ||
41 | .map(|it| format!("{}\n", it)) | ||
42 | .collect() | ||
43 | } | ||
44 | |||
23 | fn collect_assists() -> Result<Vec<Assist>> { | 45 | fn collect_assists() -> Result<Vec<Assist>> { |
24 | let mut res = Vec::new(); | 46 | let mut res = Vec::new(); |
25 | for entry in fs::read_dir(project_root().join(codegen::ASSISTS_DIR))? { | 47 | for entry in fs::read_dir(project_root().join(codegen::ASSISTS_DIR))? { |
@@ -91,13 +113,14 @@ fn doctest_{}() {{ | |||
91 | check( | 113 | check( |
92 | "{}", | 114 | "{}", |
93 | r#####" | 115 | r#####" |
94 | {} | 116 | {}"#####, r#####" |
95 | "#####, r#####" | 117 | {}"#####) |
96 | {} | ||
97 | "#####) | ||
98 | }} | 118 | }} |
99 | "######, | 119 | "######, |
100 | assist.id, assist.id, assist.before, assist.after | 120 | assist.id, |
121 | assist.id, | ||
122 | reveal_hash_comments(&assist.before), | ||
123 | reveal_hash_comments(&assist.after) | ||
101 | ); | 124 | ); |
102 | 125 | ||
103 | buf.push_str(&test) | 126 | buf.push_str(&test) |
@@ -123,12 +146,13 @@ fn generate_docs(assists: &[Assist], mode: Mode) -> Result<()> { | |||
123 | ```rust | 146 | ```rust |
124 | // BEFORE | 147 | // BEFORE |
125 | {} | 148 | {} |
126 | |||
127 | // AFTER | 149 | // AFTER |
128 | {} | 150 | {}``` |
129 | ``` | ||
130 | ", | 151 | ", |
131 | assist.id, assist.doc, before, after | 152 | assist.id, |
153 | assist.doc, | ||
154 | hide_hash_comments(&before), | ||
155 | hide_hash_comments(&after) | ||
132 | ); | 156 | ); |
133 | buf.push_str(&docs); | 157 | buf.push_str(&docs); |
134 | } | 158 | } |
diff --git a/xtask/src/install.rs b/xtask/src/install.rs index bffd91af1..8c65b51e3 100644 --- a/xtask/src/install.rs +++ b/xtask/src/install.rs | |||
@@ -7,7 +7,7 @@ use anyhow::{Context, Result}; | |||
7 | use crate::cmd::{run, run_with_output, Cmd}; | 7 | use crate::cmd::{run, run_with_output, Cmd}; |
8 | 8 | ||
9 | // Latest stable, feel free to send a PR if this lags behind. | 9 | // Latest stable, feel free to send a PR if this lags behind. |
10 | const REQUIRED_RUST_VERSION: u32 = 40; | 10 | const REQUIRED_RUST_VERSION: u32 = 41; |
11 | 11 | ||
12 | pub struct InstallCmd { | 12 | pub struct InstallCmd { |
13 | pub client: Option<ClientOpt>, | 13 | pub client: Option<ClientOpt>, |
diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs index 9b0afe8e0..8fdf43e4a 100644 --- a/xtask/src/lib.rs +++ b/xtask/src/lib.rs | |||
@@ -53,8 +53,7 @@ fn reformat(text: impl std::fmt::Display) -> Result<String> { | |||
53 | write!(rustfmt.stdin.take().unwrap(), "{}", text)?; | 53 | write!(rustfmt.stdin.take().unwrap(), "{}", text)?; |
54 | let output = rustfmt.wait_with_output()?; | 54 | let output = rustfmt.wait_with_output()?; |
55 | let stdout = String::from_utf8(output.stdout)?; | 55 | let stdout = String::from_utf8(output.stdout)?; |
56 | // TODO: update the preable: replace ra_tools with the relevant path | 56 | let preamble = "Generated file, do not edit by hand, see `xtask/src/codegen`"; |
57 | let preamble = "Generated file, do not edit by hand, see `crate/ra_tools/src/codegen`"; | ||
58 | Ok(format!("//! {}\n\n{}", preamble, stdout)) | 57 | Ok(format!("//! {}\n\n{}", preamble, stdout)) |
59 | } | 58 | } |
60 | 59 | ||
diff --git a/xtask/tests/tidy-tests/docs.rs b/xtask/tests/tidy-tests/docs.rs index 8a005d6c4..6a69e7d6a 100644 --- a/xtask/tests/tidy-tests/docs.rs +++ b/xtask/tests/tidy-tests/docs.rs | |||
@@ -6,7 +6,7 @@ use xtask::project_root; | |||
6 | fn is_exclude_dir(p: &Path) -> bool { | 6 | fn is_exclude_dir(p: &Path) -> bool { |
7 | // Test hopefully don't really need comments, and for assists we already | 7 | // Test hopefully don't really need comments, and for assists we already |
8 | // have special comments which are source of doc tests and user docs. | 8 | // have special comments which are source of doc tests and user docs. |
9 | let exclude_dirs = ["tests", "test_data", "assists"]; | 9 | let exclude_dirs = ["tests", "test_data", "handlers"]; |
10 | let mut cur_path = p; | 10 | let mut cur_path = p; |
11 | while let Some(path) = cur_path.parent() { | 11 | while let Some(path) = cur_path.parent() { |
12 | if exclude_dirs.iter().any(|dir| path.ends_with(dir)) { | 12 | if exclude_dirs.iter().any(|dir| path.ends_with(dir)) { |