diff options
Diffstat (limited to 'crates/ra_syntax')
37 files changed, 573 insertions, 5159 deletions
diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml index 7ce26b7c4..7e70dad3f 100644 --- a/crates/ra_syntax/Cargo.toml +++ b/crates/ra_syntax/Cargo.toml | |||
@@ -21,6 +21,7 @@ text_unit = { version = "0.1.6", features = ["serde"] } | |||
21 | smol_str = { version = "0.1.9", features = ["serde"] } | 21 | smol_str = { version = "0.1.9", features = ["serde"] } |
22 | 22 | ||
23 | ra_text_edit = { path = "../ra_text_edit" } | 23 | ra_text_edit = { path = "../ra_text_edit" } |
24 | ra_parser = { path = "../ra_parser" } | ||
24 | 25 | ||
25 | [dev-dependencies] | 26 | [dev-dependencies] |
26 | test_utils = { path = "../test_utils" } | 27 | test_utils = { path = "../test_utils" } |
diff --git a/crates/ra_syntax/fuzz/.gitignore b/crates/ra_syntax/fuzz/.gitignore index 572e03bdf..f734abd49 100644 --- a/crates/ra_syntax/fuzz/.gitignore +++ b/crates/ra_syntax/fuzz/.gitignore | |||
@@ -1,4 +1,4 @@ | |||
1 | 1 | Cargo.lock | |
2 | target | 2 | target |
3 | corpus | 3 | corpus |
4 | artifacts | 4 | artifacts |
diff --git a/crates/ra_syntax/fuzz/Cargo.lock b/crates/ra_syntax/fuzz/Cargo.lock deleted file mode 100644 index 4be6be44f..000000000 --- a/crates/ra_syntax/fuzz/Cargo.lock +++ /dev/null | |||
@@ -1,520 +0,0 @@ | |||
1 | [[package]] | ||
2 | name = "arbitrary" | ||
3 | version = "0.1.1" | ||
4 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
5 | |||
6 | [[package]] | ||
7 | name = "arrayvec" | ||
8 | version = "0.4.10" | ||
9 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
10 | dependencies = [ | ||
11 | "nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", | ||
12 | ] | ||
13 | |||
14 | [[package]] | ||
15 | name = "bit-set" | ||
16 | version = "0.5.0" | ||
17 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
18 | dependencies = [ | ||
19 | "bit-vec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
20 | ] | ||
21 | |||
22 | [[package]] | ||
23 | name = "bit-vec" | ||
24 | version = "0.5.0" | ||
25 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
26 | |||
27 | [[package]] | ||
28 | name = "bitflags" | ||
29 | version = "1.0.4" | ||
30 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
31 | |||
32 | [[package]] | ||
33 | name = "byteorder" | ||
34 | version = "1.2.7" | ||
35 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
36 | |||
37 | [[package]] | ||
38 | name = "cc" | ||
39 | version = "1.0.28" | ||
40 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
41 | |||
42 | [[package]] | ||
43 | name = "cfg-if" | ||
44 | version = "0.1.6" | ||
45 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
46 | |||
47 | [[package]] | ||
48 | name = "cloudabi" | ||
49 | version = "0.0.3" | ||
50 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
51 | dependencies = [ | ||
52 | "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", | ||
53 | ] | ||
54 | |||
55 | [[package]] | ||
56 | name = "drop_bomb" | ||
57 | version = "0.1.4" | ||
58 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
59 | |||
60 | [[package]] | ||
61 | name = "either" | ||
62 | version = "1.5.0" | ||
63 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
64 | |||
65 | [[package]] | ||
66 | name = "fnv" | ||
67 | version = "1.0.6" | ||
68 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
69 | |||
70 | [[package]] | ||
71 | name = "fuchsia-zircon" | ||
72 | version = "0.3.3" | ||
73 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
74 | dependencies = [ | ||
75 | "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", | ||
76 | "fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", | ||
77 | ] | ||
78 | |||
79 | [[package]] | ||
80 | name = "fuchsia-zircon-sys" | ||
81 | version = "0.3.3" | ||
82 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
83 | |||
84 | [[package]] | ||
85 | name = "itertools" | ||
86 | version = "0.8.0" | ||
87 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
88 | dependencies = [ | ||
89 | "either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
90 | ] | ||
91 | |||
92 | [[package]] | ||
93 | name = "lazy_static" | ||
94 | version = "1.2.0" | ||
95 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
96 | |||
97 | [[package]] | ||
98 | name = "libc" | ||
99 | version = "0.2.45" | ||
100 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
101 | |||
102 | [[package]] | ||
103 | name = "libfuzzer-sys" | ||
104 | version = "0.1.0" | ||
105 | source = "git+https://github.com/rust-fuzz/libfuzzer-sys.git#4a413199b5cb1bbed6a1d157b2342b925f8464ac" | ||
106 | dependencies = [ | ||
107 | "arbitrary 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", | ||
108 | "cc 1.0.28 (registry+https://github.com/rust-lang/crates.io-index)", | ||
109 | ] | ||
110 | |||
111 | [[package]] | ||
112 | name = "lock_api" | ||
113 | version = "0.1.5" | ||
114 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
115 | dependencies = [ | ||
116 | "owning_ref 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
117 | "scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", | ||
118 | ] | ||
119 | |||
120 | [[package]] | ||
121 | name = "nodrop" | ||
122 | version = "0.1.13" | ||
123 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
124 | |||
125 | [[package]] | ||
126 | name = "num-traits" | ||
127 | version = "0.2.6" | ||
128 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
129 | |||
130 | [[package]] | ||
131 | name = "owning_ref" | ||
132 | version = "0.4.0" | ||
133 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
134 | dependencies = [ | ||
135 | "stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", | ||
136 | ] | ||
137 | |||
138 | [[package]] | ||
139 | name = "parking_lot" | ||
140 | version = "0.7.0" | ||
141 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
142 | dependencies = [ | ||
143 | "lock_api 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", | ||
144 | "parking_lot_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
145 | ] | ||
146 | |||
147 | [[package]] | ||
148 | name = "parking_lot_core" | ||
149 | version = "0.4.0" | ||
150 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
151 | dependencies = [ | ||
152 | "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)", | ||
153 | "rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", | ||
154 | "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", | ||
155 | "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)", | ||
156 | "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", | ||
157 | ] | ||
158 | |||
159 | [[package]] | ||
160 | name = "proptest" | ||
161 | version = "0.8.7" | ||
162 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
163 | dependencies = [ | ||
164 | "bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
165 | "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", | ||
166 | "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)", | ||
167 | "lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
168 | "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", | ||
169 | "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", | ||
170 | "rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", | ||
171 | "regex-syntax 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", | ||
172 | "rusty-fork 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | ||
173 | "tempfile 3.0.5 (registry+https://github.com/rust-lang/crates.io-index)", | ||
174 | ] | ||
175 | |||
176 | [[package]] | ||
177 | name = "quick-error" | ||
178 | version = "1.2.2" | ||
179 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
180 | |||
181 | [[package]] | ||
182 | name = "ra_syntax" | ||
183 | version = "0.1.0" | ||
184 | dependencies = [ | ||
185 | "arrayvec 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", | ||
186 | "drop_bomb 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", | ||
187 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
188 | "parking_lot 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
189 | "ra_text_edit 0.1.0", | ||
190 | "rowan 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
191 | "smol_str 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", | ||
192 | "text_unit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", | ||
193 | "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
194 | ] | ||
195 | |||
196 | [[package]] | ||
197 | name = "ra_syntax-fuzz" | ||
198 | version = "0.0.1" | ||
199 | dependencies = [ | ||
200 | "libfuzzer-sys 0.1.0 (git+https://github.com/rust-fuzz/libfuzzer-sys.git)", | ||
201 | "ra_syntax 0.1.0", | ||
202 | ] | ||
203 | |||
204 | [[package]] | ||
205 | name = "ra_text_edit" | ||
206 | version = "0.1.0" | ||
207 | dependencies = [ | ||
208 | "proptest 0.8.7 (registry+https://github.com/rust-lang/crates.io-index)", | ||
209 | "text_unit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", | ||
210 | ] | ||
211 | |||
212 | [[package]] | ||
213 | name = "rand" | ||
214 | version = "0.5.5" | ||
215 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
216 | dependencies = [ | ||
217 | "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", | ||
218 | "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", | ||
219 | "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)", | ||
220 | "rand_core 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", | ||
221 | "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", | ||
222 | ] | ||
223 | |||
224 | [[package]] | ||
225 | name = "rand" | ||
226 | version = "0.6.1" | ||
227 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
228 | dependencies = [ | ||
229 | "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", | ||
230 | "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", | ||
231 | "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)", | ||
232 | "rand_chacha 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
233 | "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
234 | "rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
235 | "rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", | ||
236 | "rand_pcg 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", | ||
237 | "rand_xorshift 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
238 | "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", | ||
239 | "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", | ||
240 | ] | ||
241 | |||
242 | [[package]] | ||
243 | name = "rand_chacha" | ||
244 | version = "0.1.0" | ||
245 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
246 | dependencies = [ | ||
247 | "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
248 | "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", | ||
249 | ] | ||
250 | |||
251 | [[package]] | ||
252 | name = "rand_core" | ||
253 | version = "0.2.2" | ||
254 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
255 | dependencies = [ | ||
256 | "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
257 | ] | ||
258 | |||
259 | [[package]] | ||
260 | name = "rand_core" | ||
261 | version = "0.3.0" | ||
262 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
263 | |||
264 | [[package]] | ||
265 | name = "rand_hc" | ||
266 | version = "0.1.0" | ||
267 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
268 | dependencies = [ | ||
269 | "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
270 | ] | ||
271 | |||
272 | [[package]] | ||
273 | name = "rand_isaac" | ||
274 | version = "0.1.1" | ||
275 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
276 | dependencies = [ | ||
277 | "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
278 | ] | ||
279 | |||
280 | [[package]] | ||
281 | name = "rand_pcg" | ||
282 | version = "0.1.1" | ||
283 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
284 | dependencies = [ | ||
285 | "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
286 | "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", | ||
287 | ] | ||
288 | |||
289 | [[package]] | ||
290 | name = "rand_xorshift" | ||
291 | version = "0.1.0" | ||
292 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
293 | dependencies = [ | ||
294 | "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
295 | ] | ||
296 | |||
297 | [[package]] | ||
298 | name = "redox_syscall" | ||
299 | version = "0.1.50" | ||
300 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
301 | |||
302 | [[package]] | ||
303 | name = "regex-syntax" | ||
304 | version = "0.6.4" | ||
305 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
306 | dependencies = [ | ||
307 | "ucd-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", | ||
308 | ] | ||
309 | |||
310 | [[package]] | ||
311 | name = "remove_dir_all" | ||
312 | version = "0.5.1" | ||
313 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
314 | dependencies = [ | ||
315 | "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", | ||
316 | ] | ||
317 | |||
318 | [[package]] | ||
319 | name = "rowan" | ||
320 | version = "0.2.0" | ||
321 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
322 | dependencies = [ | ||
323 | "parking_lot 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
324 | "smol_str 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", | ||
325 | "text_unit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", | ||
326 | ] | ||
327 | |||
328 | [[package]] | ||
329 | name = "rustc_version" | ||
330 | version = "0.2.3" | ||
331 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
332 | dependencies = [ | ||
333 | "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
334 | ] | ||
335 | |||
336 | [[package]] | ||
337 | name = "rusty-fork" | ||
338 | version = "0.2.1" | ||
339 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
340 | dependencies = [ | ||
341 | "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", | ||
342 | "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", | ||
343 | "tempfile 3.0.5 (registry+https://github.com/rust-lang/crates.io-index)", | ||
344 | "wait-timeout 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", | ||
345 | ] | ||
346 | |||
347 | [[package]] | ||
348 | name = "scopeguard" | ||
349 | version = "0.3.3" | ||
350 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
351 | |||
352 | [[package]] | ||
353 | name = "semver" | ||
354 | version = "0.9.0" | ||
355 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
356 | dependencies = [ | ||
357 | "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
358 | ] | ||
359 | |||
360 | [[package]] | ||
361 | name = "semver-parser" | ||
362 | version = "0.7.0" | ||
363 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
364 | |||
365 | [[package]] | ||
366 | name = "serde" | ||
367 | version = "1.0.84" | ||
368 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
369 | |||
370 | [[package]] | ||
371 | name = "smallvec" | ||
372 | version = "0.6.7" | ||
373 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
374 | dependencies = [ | ||
375 | "unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
376 | ] | ||
377 | |||
378 | [[package]] | ||
379 | name = "smol_str" | ||
380 | version = "0.1.9" | ||
381 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
382 | dependencies = [ | ||
383 | "serde 1.0.84 (registry+https://github.com/rust-lang/crates.io-index)", | ||
384 | ] | ||
385 | |||
386 | [[package]] | ||
387 | name = "stable_deref_trait" | ||
388 | version = "1.1.1" | ||
389 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
390 | |||
391 | [[package]] | ||
392 | name = "tempfile" | ||
393 | version = "3.0.5" | ||
394 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
395 | dependencies = [ | ||
396 | "cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", | ||
397 | "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)", | ||
398 | "rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", | ||
399 | "redox_syscall 0.1.50 (registry+https://github.com/rust-lang/crates.io-index)", | ||
400 | "remove_dir_all 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", | ||
401 | "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", | ||
402 | ] | ||
403 | |||
404 | [[package]] | ||
405 | name = "text_unit" | ||
406 | version = "0.1.6" | ||
407 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
408 | dependencies = [ | ||
409 | "serde 1.0.84 (registry+https://github.com/rust-lang/crates.io-index)", | ||
410 | ] | ||
411 | |||
412 | [[package]] | ||
413 | name = "ucd-util" | ||
414 | version = "0.1.3" | ||
415 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
416 | |||
417 | [[package]] | ||
418 | name = "unicode-xid" | ||
419 | version = "0.1.0" | ||
420 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
421 | |||
422 | [[package]] | ||
423 | name = "unreachable" | ||
424 | version = "1.0.0" | ||
425 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
426 | dependencies = [ | ||
427 | "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", | ||
428 | ] | ||
429 | |||
430 | [[package]] | ||
431 | name = "void" | ||
432 | version = "1.0.2" | ||
433 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
434 | |||
435 | [[package]] | ||
436 | name = "wait-timeout" | ||
437 | version = "0.1.5" | ||
438 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
439 | dependencies = [ | ||
440 | "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)", | ||
441 | ] | ||
442 | |||
443 | [[package]] | ||
444 | name = "winapi" | ||
445 | version = "0.3.6" | ||
446 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
447 | dependencies = [ | ||
448 | "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
449 | "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
450 | ] | ||
451 | |||
452 | [[package]] | ||
453 | name = "winapi-i686-pc-windows-gnu" | ||
454 | version = "0.4.0" | ||
455 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
456 | |||
457 | [[package]] | ||
458 | name = "winapi-x86_64-pc-windows-gnu" | ||
459 | version = "0.4.0" | ||
460 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
461 | |||
462 | [metadata] | ||
463 | "checksum arbitrary 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6c7d1523aa3a127adf8b27af2404c03c12825b4c4d0698f01648d63fa9df62ee" | ||
464 | "checksum arrayvec 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "92c7fb76bc8826a8b33b4ee5bb07a247a81e76764ab4d55e8f73e3a4d8808c71" | ||
465 | "checksum bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6f1efcc46c18245a69c38fcc5cc650f16d3a59d034f3106e9ed63748f695730a" | ||
466 | "checksum bit-vec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4440d5cb623bb7390ae27fec0bb6c61111969860f8e3ae198bfa0663645e67cf" | ||
467 | "checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12" | ||
468 | "checksum byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "94f88df23a25417badc922ab0f5716cc1330e87f71ddd9203b3a3ccd9cedf75d" | ||
469 | "checksum cc 1.0.28 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4a8b715cb4597106ea87c7c84b2f1d452c7492033765df7f32651e66fcf749" | ||
470 | "checksum cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "082bb9b28e00d3c9d39cc03e64ce4cea0f1bb9b3fde493f0cbc008472d22bdf4" | ||
471 | "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" | ||
472 | "checksum drop_bomb 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "69b26e475fd29098530e709294e94e661974c851aed42512793f120fed4e199f" | ||
473 | "checksum either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0" | ||
474 | "checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3" | ||
475 | "checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" | ||
476 | "checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" | ||
477 | "checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358" | ||
478 | "checksum lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a374c89b9db55895453a74c1e38861d9deec0b01b405a82516e9d5de4820dea1" | ||
479 | "checksum libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)" = "2d2857ec59fadc0773853c664d2d18e7198e83883e7060b63c924cb077bd5c74" | ||
480 | "checksum libfuzzer-sys 0.1.0 (git+https://github.com/rust-fuzz/libfuzzer-sys.git)" = "<none>" | ||
481 | "checksum lock_api 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "62ebf1391f6acad60e5c8b43706dde4582df75c06698ab44511d15016bc2442c" | ||
482 | "checksum nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9667ddcc6cc8a43afc9b7917599d7216aa09c463919ea32c59ed6cac8bc945" | ||
483 | "checksum num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0b3a5d7cc97d6d30d8b9bc8fa19bf45349ffe46241e8816f50f62f6d6aaabee1" | ||
484 | "checksum owning_ref 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "49a4b8ea2179e6a2e27411d3bca09ca6dd630821cf6894c6c7c8467a8ee7ef13" | ||
485 | "checksum parking_lot 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9723236a9525c757d9725b993511e3fc941e33f27751942232f0058298297edf" | ||
486 | "checksum parking_lot_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "94c8c7923936b28d546dfd14d4472eaf34c99b14e1c973a32b3e6d4eb04298c9" | ||
487 | "checksum proptest 0.8.7 (registry+https://github.com/rust-lang/crates.io-index)" = "926d0604475349f463fe44130aae73f2294b5309ab2ca0310b998bd334ef191f" | ||
488 | "checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0" | ||
489 | "checksum rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e464cd887e869cddcae8792a4ee31d23c7edd516700695608f5b98c67ee0131c" | ||
490 | "checksum rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ae9d223d52ae411a33cf7e54ec6034ec165df296ccd23533d671a28252b6f66a" | ||
491 | "checksum rand_chacha 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "771b009e3a508cb67e8823dda454aaa5368c7bc1c16829fb77d3e980440dd34a" | ||
492 | "checksum rand_core 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1961a422c4d189dfb50ffa9320bf1f2a9bd54ecb92792fb9477f99a1045f3372" | ||
493 | "checksum rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0905b6b7079ec73b314d4c748701f6931eb79fd97c668caa3f1899b22b32c6db" | ||
494 | "checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" | ||
495 | "checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" | ||
496 | "checksum rand_pcg 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "086bd09a33c7044e56bb44d5bdde5a60e7f119a9e95b0775f545de759a32fe05" | ||
497 | "checksum rand_xorshift 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "effa3fcaa47e18db002bdde6060944b6d2f9cfd8db471c30e873448ad9187be3" | ||
498 | "checksum redox_syscall 0.1.50 (registry+https://github.com/rust-lang/crates.io-index)" = "52ee9a534dc1301776eff45b4fa92d2c39b1d8c3d3357e6eb593e0d795506fc2" | ||
499 | "checksum regex-syntax 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "4e47a2ed29da7a9e1960e1639e7a982e6edc6d49be308a3b02daf511504a16d1" | ||
500 | "checksum remove_dir_all 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3488ba1b9a2084d38645c4c08276a1752dcbf2c7130d74f1569681ad5d2799c5" | ||
501 | "checksum rowan 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ae9ae7dba5e703f423ceb8646d636c73e6d858a2f8c834808b4565e42ccda9e2" | ||
502 | "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" | ||
503 | "checksum rusty-fork 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9591f190d2852720b679c21f66ad929f9f1d7bb09d1193c26167586029d8489c" | ||
504 | "checksum scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "94258f53601af11e6a49f722422f6e3425c52b06245a5cf9bc09908b174f5e27" | ||
505 | "checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" | ||
506 | "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" | ||
507 | "checksum serde 1.0.84 (registry+https://github.com/rust-lang/crates.io-index)" = "0e732ed5a5592c17d961555e3b552985baf98d50ce418b7b655f31f6ba7eb1b7" | ||
508 | "checksum smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)" = "b73ea3738b47563803ef814925e69be00799a8c07420be8b996f8e98fb2336db" | ||
509 | "checksum smol_str 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9af1035bc5d742ab6b7ab16713e41cc2ffe78cb474f6f43cd696b2d16052007e" | ||
510 | "checksum stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dba1a27d3efae4351c8051072d619e3ade2820635c3958d826bfea39d59b54c8" | ||
511 | "checksum tempfile 3.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "7e91405c14320e5c79b3d148e1c86f40749a36e490642202a31689cb1a3452b2" | ||
512 | "checksum text_unit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "158bb1c22b638b1da3c95a8ad9f061ea40d4d39fd0301be3a520f92efeeb189e" | ||
513 | "checksum ucd-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "535c204ee4d8434478593480b8f86ab45ec9aae0e83c568ca81abf0fd0e88f86" | ||
514 | "checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" | ||
515 | "checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" | ||
516 | "checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" | ||
517 | "checksum wait-timeout 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "b9f3bf741a801531993db6478b95682117471f76916f5e690dd8d45395b09349" | ||
518 | "checksum winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "92c1eb33641e276cfa214a0522acad57be5c56b10cb348b3c5117db75f3ac4b0" | ||
519 | "checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" | ||
520 | "checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" | ||
diff --git a/crates/ra_syntax/fuzz/fuzz_targets/parser.rs b/crates/ra_syntax/fuzz/fuzz_targets/parser.rs index 396c0ecaf..4667d5579 100644 --- a/crates/ra_syntax/fuzz/fuzz_targets/parser.rs +++ b/crates/ra_syntax/fuzz/fuzz_targets/parser.rs | |||
@@ -4,6 +4,6 @@ extern crate ra_syntax; | |||
4 | 4 | ||
5 | fuzz_target!(|data: &[u8]| { | 5 | fuzz_target!(|data: &[u8]| { |
6 | if let Ok(text) = std::str::from_utf8(data) { | 6 | if let Ok(text) = std::str::from_utf8(data) { |
7 | ra_syntax::utils::check_fuzz_invariants(text) | 7 | ra_syntax::check_fuzz_invariants(text) |
8 | } | 8 | } |
9 | }); | 9 | }); |
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs index 99b0983b0..e2b4f0388 100644 --- a/crates/ra_syntax/src/algo.rs +++ b/crates/ra_syntax/src/algo.rs | |||
@@ -2,7 +2,7 @@ pub mod visit; | |||
2 | 2 | ||
3 | use rowan::TransparentNewType; | 3 | use rowan::TransparentNewType; |
4 | 4 | ||
5 | use crate::{SyntaxNode, TextRange, TextUnit, AstNode}; | 5 | use crate::{SyntaxNode, TextRange, TextUnit, AstNode, Direction}; |
6 | 6 | ||
7 | pub use rowan::LeafAtOffset; | 7 | pub use rowan::LeafAtOffset; |
8 | 8 | ||
@@ -29,10 +29,16 @@ pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) -> | |||
29 | find_leaf_at_offset(syntax, offset).find_map(|leaf| leaf.ancestors().find_map(N::cast)) | 29 | find_leaf_at_offset(syntax, offset).find_map(|leaf| leaf.ancestors().find_map(N::cast)) |
30 | } | 30 | } |
31 | 31 | ||
32 | /// Finds the first sibling in the given direction which is not `trivia` | ||
33 | pub fn non_trivia_sibling(node: &SyntaxNode, direction: Direction) -> Option<&SyntaxNode> { | ||
34 | node.siblings(direction).skip(1).find(|node| !node.kind().is_trivia()) | ||
35 | } | ||
36 | |||
32 | pub fn find_covering_node(root: &SyntaxNode, range: TextRange) -> &SyntaxNode { | 37 | pub fn find_covering_node(root: &SyntaxNode, range: TextRange) -> &SyntaxNode { |
33 | SyntaxNode::from_repr(root.0.covering_node(range)) | 38 | SyntaxNode::from_repr(root.0.covering_node(range)) |
34 | } | 39 | } |
35 | 40 | ||
41 | // Replace with `std::iter::successors` in `1.34.0` | ||
36 | pub fn generate<T>(seed: Option<T>, step: impl Fn(&T) -> Option<T>) -> impl Iterator<Item = T> { | 42 | pub fn generate<T>(seed: Option<T>, step: impl Fn(&T) -> Option<T>) -> impl Iterator<Item = T> { |
37 | ::itertools::unfold(seed, move |slot| { | 43 | ::itertools::unfold(seed, move |slot| { |
38 | slot.take().map(|curr| { | 44 | slot.take().map(|curr| { |
diff --git a/crates/ra_syntax/src/ast.rs b/crates/ra_syntax/src/ast.rs index 62641c9fe..20e0a6856 100644 --- a/crates/ra_syntax/src/ast.rs +++ b/crates/ra_syntax/src/ast.rs | |||
@@ -1,3 +1,4 @@ | |||
1 | //! Abstract Syntax Tree, layered on top of untyped `SyntaxNode`s | ||
1 | mod generated; | 2 | mod generated; |
2 | 3 | ||
3 | use std::marker::PhantomData; | 4 | use std::marker::PhantomData; |
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index b12282b39..e7d402446 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs | |||
@@ -1,22 +1,24 @@ | |||
1 | //! An experimental implementation of [Rust RFC#2256 libsyntax2.0][rfc#2256]. | 1 | //! Syntax Tree library used throughout the rust analyzer. |
2 | //! | 2 | //! |
3 | //! The intent is to be an IDE-ready parser, i.e. one that offers | 3 | //! Properties: |
4 | //! - easy and fast incremental re-parsing | ||
5 | //! - graceful handling of errors | ||
6 | //! - full-fidelity representation (*any* text can be precisely represented as | ||
7 | //! a syntax tree) | ||
4 | //! | 8 | //! |
5 | //! - easy and fast incremental re-parsing, | 9 | //! For more information, see the [RFC]. Current implementation is inspired by |
6 | //! - graceful handling of errors, and | 10 | //! the [Swift] one. |
7 | //! - maintains all information in the source file. | ||
8 | //! | 11 | //! |
9 | //! For more information, see [the RFC][rfc#2265], or [the working draft][RFC.md]. | 12 | //! The most interesting modules here are `syntax_node` (which defines concrete |
13 | //! syntax tree) and `ast` (which defines abstract syntax tree on top of the | ||
14 | //! CST). The actual parser live in a separate `ra_parser` crate, thought the | ||
15 | //! lexer lives in this crate. | ||
10 | //! | 16 | //! |
11 | //! [rfc#2256]: <https://github.com/rust-lang/rfcs/pull/2256> | 17 | //! See `api_walkthrough` test in this file for a quick API tour! |
12 | //! [RFC.md]: <https://github.com/matklad/libsyntax2/blob/master/docs/RFC.md> | 18 | //! |
13 | 19 | //! [RFC]: <https://github.com/rust-lang/rfcs/pull/2256> | |
14 | #![forbid(missing_debug_implementations, unconditional_recursion, future_incompatible)] | 20 | //! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md> |
15 | #![deny(bad_style, missing_docs)] | ||
16 | #![allow(missing_docs)] | ||
17 | //#![warn(unreachable_pub)] // rust-lang/rust#47816 | ||
18 | 21 | ||
19 | mod syntax_kinds; | ||
20 | mod syntax_node; | 22 | mod syntax_node; |
21 | mod syntax_text; | 23 | mod syntax_text; |
22 | mod syntax_error; | 24 | mod syntax_error; |
@@ -27,13 +29,11 @@ mod ptr; | |||
27 | 29 | ||
28 | pub mod algo; | 30 | pub mod algo; |
29 | pub mod ast; | 31 | pub mod ast; |
30 | /// Utilities for simple uses of the parser. | ||
31 | pub mod utils; | ||
32 | 32 | ||
33 | pub use rowan::{SmolStr, TextRange, TextUnit}; | 33 | pub use rowan::{SmolStr, TextRange, TextUnit}; |
34 | pub use ra_parser::SyntaxKind; | ||
34 | pub use crate::{ | 35 | pub use crate::{ |
35 | ast::AstNode, | 36 | ast::AstNode, |
36 | syntax_kinds::SyntaxKind, | ||
37 | syntax_error::{SyntaxError, SyntaxErrorKind, Location}, | 37 | syntax_error::{SyntaxError, SyntaxErrorKind, Location}, |
38 | syntax_text::SyntaxText, | 38 | syntax_text::SyntaxText, |
39 | syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc}, | 39 | syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc}, |
@@ -51,7 +51,7 @@ impl SourceFile { | |||
51 | fn new(green: GreenNode, errors: Vec<SyntaxError>) -> TreeArc<SourceFile> { | 51 | fn new(green: GreenNode, errors: Vec<SyntaxError>) -> TreeArc<SourceFile> { |
52 | let root = SyntaxNode::new(green, errors); | 52 | let root = SyntaxNode::new(green, errors); |
53 | if cfg!(debug_assertions) { | 53 | if cfg!(debug_assertions) { |
54 | utils::validate_block_structure(&root); | 54 | validation::validate_block_structure(&root); |
55 | } | 55 | } |
56 | assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); | 56 | assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); |
57 | TreeArc::cast(root) | 57 | TreeArc::cast(root) |
@@ -82,3 +82,181 @@ impl SourceFile { | |||
82 | errors | 82 | errors |
83 | } | 83 | } |
84 | } | 84 | } |
85 | |||
86 | pub fn check_fuzz_invariants(text: &str) { | ||
87 | let file = SourceFile::parse(text); | ||
88 | let root = file.syntax(); | ||
89 | validation::validate_block_structure(root); | ||
90 | let _ = file.errors(); | ||
91 | } | ||
92 | |||
93 | /// This test does not assert anything and instead just shows off the crate's | ||
94 | /// API. | ||
95 | #[test] | ||
96 | fn api_walkthrough() { | ||
97 | use ast::{ModuleItemOwner, NameOwner}; | ||
98 | |||
99 | let source_code = " | ||
100 | fn foo() { | ||
101 | 1 + 1 | ||
102 | } | ||
103 | "; | ||
104 | // `SourceFile` is the main entry point. | ||
105 | // | ||
106 | // Note how `parse` does not return a `Result`: even completely invalid | ||
107 | // source code might be parsed. | ||
108 | let file = SourceFile::parse(source_code); | ||
109 | |||
110 | // Due to the way ownership is set up, owned syntax Nodes always live behind | ||
111 | // a `TreeArc` smart pointer. `TreeArc` is roughly an `std::sync::Arc` which | ||
112 | // points to the whole file instead of an individual node. | ||
113 | let file: TreeArc<SourceFile> = file; | ||
114 | |||
115 | // `SourceFile` is the root of the syntax tree. We can iterate file's items: | ||
116 | let mut func = None; | ||
117 | for item in file.items() { | ||
118 | match item.kind() { | ||
119 | ast::ModuleItemKind::FnDef(f) => func = Some(f), | ||
120 | _ => unreachable!(), | ||
121 | } | ||
122 | } | ||
123 | // The returned items are always references. | ||
124 | let func: &ast::FnDef = func.unwrap(); | ||
125 | |||
126 | // All nodes implement `ToOwned` trait, with `Owned = TreeArc<Self>`. | ||
127 | // `to_owned` is a cheap operation: atomic increment. | ||
128 | let _owned_func: TreeArc<ast::FnDef> = func.to_owned(); | ||
129 | |||
130 | // Each AST node has a bunch of getters for children. All getters return | ||
131 | // `Option`s though, to account for incomplete code. Some getters are common | ||
132 | // for several kinds of node. In this case, a trait like `ast::NameOwner` | ||
133 | // usually exists. By convention, all ast types should be used with `ast::` | ||
134 | // qualifier. | ||
135 | let name: Option<&ast::Name> = func.name(); | ||
136 | let name = name.unwrap(); | ||
137 | assert_eq!(name.text(), "foo"); | ||
138 | |||
139 | // Let's get the `1 + 1` expression! | ||
140 | let block: &ast::Block = func.body().unwrap(); | ||
141 | let expr: &ast::Expr = block.expr().unwrap(); | ||
142 | |||
143 | // "Enum"-like nodes are represented using the "kind" pattern. It allows us | ||
144 | // to match exhaustively against all flavors of nodes, while maintaining | ||
145 | // internal representation flexibility. The drawback is that one can't write | ||
146 | // nested matches as one pattern. | ||
147 | let bin_expr: &ast::BinExpr = match expr.kind() { | ||
148 | ast::ExprKind::BinExpr(e) => e, | ||
149 | _ => unreachable!(), | ||
150 | }; | ||
151 | |||
152 | // Besides the "typed" AST API, there's an untyped CST one as well. | ||
153 | // To switch from AST to CST, call `.syntax()` method: | ||
154 | let expr_syntax: &SyntaxNode = expr.syntax(); | ||
155 | |||
156 | // Note how `expr` and `bin_expr` are in fact the same node underneath: | ||
157 | assert!(std::ptr::eq(expr_syntax, bin_expr.syntax())); | ||
158 | |||
159 | // To go from CST to AST, `AstNode::cast` function is used: | ||
160 | let expr = match ast::Expr::cast(expr_syntax) { | ||
161 | Some(e) => e, | ||
162 | None => unreachable!(), | ||
163 | }; | ||
164 | |||
165 | // Note how expr is also a reference! | ||
166 | let expr: &ast::Expr = expr; | ||
167 | |||
168 | // This is possible because the underlying representation is the same: | ||
169 | assert_eq!( | ||
170 | expr as *const ast::Expr as *const u8, | ||
171 | expr_syntax as *const SyntaxNode as *const u8 | ||
172 | ); | ||
173 | |||
174 | // The two properties each syntax node has is a `SyntaxKind`: | ||
175 | assert_eq!(expr_syntax.kind(), SyntaxKind::BIN_EXPR); | ||
176 | |||
177 | // And text range: | ||
178 | assert_eq!(expr_syntax.range(), TextRange::from_to(32.into(), 37.into())); | ||
179 | |||
180 | // You can get node's text as a `SyntaxText` object, which will traverse the | ||
181 | // tree collecting token's text: | ||
182 | let text: SyntaxText<'_> = expr_syntax.text(); | ||
183 | assert_eq!(text.to_string(), "1 + 1"); | ||
184 | |||
185 | // There's a bunch of traversal methods on `SyntaxNode`: | ||
186 | assert_eq!(expr_syntax.parent(), Some(block.syntax())); | ||
187 | assert_eq!(block.syntax().first_child().map(|it| it.kind()), Some(SyntaxKind::L_CURLY)); | ||
188 | assert_eq!(expr_syntax.next_sibling().map(|it| it.kind()), Some(SyntaxKind::WHITESPACE)); | ||
189 | |||
190 | // As well as some iterator helpers: | ||
191 | let f = expr_syntax.ancestors().find_map(ast::FnDef::cast); | ||
192 | assert_eq!(f, Some(&*func)); | ||
193 | assert!(expr_syntax.siblings(Direction::Next).any(|it| it.kind() == SyntaxKind::R_CURLY)); | ||
194 | assert_eq!( | ||
195 | expr_syntax.descendants().count(), | ||
196 | 8, // 5 tokens `1`, ` `, `+`, ` `, `!` | ||
197 | // 2 child literal expressions: `1`, `1` | ||
198 | // 1 the node itself: `1 + 1` | ||
199 | ); | ||
200 | |||
201 | // There's also a `preorder` method with a more fine-grained iteration control: | ||
202 | let mut buf = String::new(); | ||
203 | let mut indent = 0; | ||
204 | for event in expr_syntax.preorder() { | ||
205 | match event { | ||
206 | WalkEvent::Enter(node) => { | ||
207 | buf += &format!( | ||
208 | "{:indent$}{:?} {:?}\n", | ||
209 | " ", | ||
210 | node.text(), | ||
211 | node.kind(), | ||
212 | indent = indent | ||
213 | ); | ||
214 | indent += 2; | ||
215 | } | ||
216 | WalkEvent::Leave(_) => indent -= 2, | ||
217 | } | ||
218 | } | ||
219 | assert_eq!(indent, 0); | ||
220 | assert_eq!( | ||
221 | buf.trim(), | ||
222 | r#" | ||
223 | "1 + 1" BIN_EXPR | ||
224 | "1" LITERAL | ||
225 | "1" INT_NUMBER | ||
226 | " " WHITESPACE | ||
227 | "+" PLUS | ||
228 | " " WHITESPACE | ||
229 | "1" LITERAL | ||
230 | "1" INT_NUMBER | ||
231 | "# | ||
232 | .trim() | ||
233 | ); | ||
234 | |||
235 | // To recursively process the tree, there are three approaches: | ||
236 | // 1. explicitly call getter methods on AST nodes. | ||
237 | // 2. use descendants and `AstNode::cast`. | ||
238 | // 3. use descendants and the visitor. | ||
239 | // | ||
240 | // Here's how the first one looks like: | ||
241 | let exprs_cast: Vec<String> = file | ||
242 | .syntax() | ||
243 | .descendants() | ||
244 | .filter_map(ast::Expr::cast) | ||
245 | .map(|expr| expr.syntax().text().to_string()) | ||
246 | .collect(); | ||
247 | |||
248 | // An alternative is to use a visitor. The visitor does not do traversal | ||
249 | // automatically (so it's more akin to a generic lambda) and is constructed | ||
250 | // from closures. This seems more flexible than a single generated visitor | ||
251 | // trait. | ||
252 | use algo::visit::{visitor, Visitor}; | ||
253 | let mut exprs_visit = Vec::new(); | ||
254 | for node in file.syntax().descendants() { | ||
255 | if let Some(result) = | ||
256 | visitor().visit::<ast::Expr, _>(|expr| expr.syntax().text().to_string()).accept(node) | ||
257 | { | ||
258 | exprs_visit.push(result); | ||
259 | } | ||
260 | } | ||
261 | assert_eq!(exprs_cast, exprs_visit); | ||
262 | } | ||
diff --git a/crates/ra_syntax/src/parsing.rs b/crates/ra_syntax/src/parsing.rs index 138d1394a..cf573801c 100644 --- a/crates/ra_syntax/src/parsing.rs +++ b/crates/ra_syntax/src/parsing.rs | |||
@@ -1,78 +1,28 @@ | |||
1 | #[macro_use] | 1 | //! Lexing, bridging to ra_parser (which does the actual parsing) and |
2 | mod token_set; | 2 | //! incremental reparsing. |
3 | mod builder; | 3 | |
4 | mod lexer; | 4 | mod lexer; |
5 | mod event; | ||
6 | mod input; | 5 | mod input; |
7 | mod parser; | 6 | mod builder; |
8 | mod grammar; | ||
9 | mod reparsing; | 7 | mod reparsing; |
10 | 8 | ||
11 | use crate::{ | 9 | use crate::{ |
12 | SyntaxKind, SmolStr, SyntaxError, | 10 | SyntaxError, |
11 | syntax_node::GreenNode, | ||
13 | parsing::{ | 12 | parsing::{ |
14 | builder::GreenBuilder, | 13 | builder::TreeBuilder, |
15 | input::ParserInput, | 14 | input::ParserInput, |
16 | event::EventProcessor, | ||
17 | parser::Parser, | ||
18 | }, | 15 | }, |
19 | syntax_node::GreenNode, | ||
20 | }; | 16 | }; |
21 | 17 | ||
22 | pub use self::lexer::{tokenize, Token}; | 18 | pub use self::lexer::{tokenize, Token}; |
23 | 19 | ||
24 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
25 | pub struct ParseError(pub String); | ||
26 | |||
27 | pub(crate) use self::reparsing::incremental_reparse; | 20 | pub(crate) use self::reparsing::incremental_reparse; |
28 | 21 | ||
29 | pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { | 22 | pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { |
30 | let tokens = tokenize(&text); | 23 | let tokens = tokenize(&text); |
31 | parse_with(GreenBuilder::default(), text, &tokens, grammar::root) | 24 | let token_source = ParserInput::new(text, &tokens); |
32 | } | 25 | let mut tree_sink = TreeBuilder::new(text, &tokens); |
33 | 26 | ra_parser::parse(&token_source, &mut tree_sink); | |
34 | fn parse_with<S: TreeSink>( | 27 | tree_sink.finish() |
35 | tree_sink: S, | ||
36 | text: &str, | ||
37 | tokens: &[Token], | ||
38 | f: fn(&mut Parser), | ||
39 | ) -> S::Tree { | ||
40 | let mut events = { | ||
41 | let input = ParserInput::new(text, &tokens); | ||
42 | let mut p = Parser::new(&input); | ||
43 | f(&mut p); | ||
44 | p.finish() | ||
45 | }; | ||
46 | EventProcessor::new(tree_sink, text, tokens, &mut events).process().finish() | ||
47 | } | ||
48 | |||
49 | /// `TreeSink` abstracts details of a particular syntax tree implementation. | ||
50 | trait TreeSink { | ||
51 | type Tree; | ||
52 | |||
53 | /// Adds new leaf to the current branch. | ||
54 | fn leaf(&mut self, kind: SyntaxKind, text: SmolStr); | ||
55 | |||
56 | /// Start new branch and make it current. | ||
57 | fn start_branch(&mut self, kind: SyntaxKind); | ||
58 | |||
59 | /// Finish current branch and restore previous | ||
60 | /// branch as current. | ||
61 | fn finish_branch(&mut self); | ||
62 | |||
63 | fn error(&mut self, error: ParseError); | ||
64 | |||
65 | /// Complete tree building. Make sure that | ||
66 | /// `start_branch` and `finish_branch` calls | ||
67 | /// are paired! | ||
68 | fn finish(self) -> Self::Tree; | ||
69 | } | ||
70 | |||
71 | /// `TokenSource` abstracts the source of the tokens parser operates one. | ||
72 | /// | ||
73 | /// Hopefully this will allow us to treat text and token trees in the same way! | ||
74 | trait TokenSource { | ||
75 | fn token_kind(&self, pos: usize) -> SyntaxKind; | ||
76 | fn is_token_joint_to_next(&self, pos: usize) -> bool; | ||
77 | fn is_keyword(&self, pos: usize, kw: &str) -> bool; | ||
78 | } | 28 | } |
diff --git a/crates/ra_syntax/src/parsing/builder.rs b/crates/ra_syntax/src/parsing/builder.rs index ee0e2cce7..cfe3139b8 100644 --- a/crates/ra_syntax/src/parsing/builder.rs +++ b/crates/ra_syntax/src/parsing/builder.rs | |||
@@ -1,49 +1,170 @@ | |||
1 | use std::mem; | ||
2 | |||
3 | use ra_parser::{TreeSink, ParseError}; | ||
4 | use rowan::GreenNodeBuilder; | ||
5 | |||
1 | use crate::{ | 6 | use crate::{ |
2 | SmolStr, SyntaxKind, SyntaxError, SyntaxErrorKind, TextUnit, | 7 | SmolStr, SyntaxError, SyntaxErrorKind, TextUnit, TextRange, |
3 | parsing::{TreeSink, ParseError}, | 8 | SyntaxKind::{self, *}, |
9 | parsing::Token, | ||
4 | syntax_node::{GreenNode, RaTypes}, | 10 | syntax_node::{GreenNode, RaTypes}, |
5 | }; | 11 | }; |
6 | 12 | ||
7 | use rowan::GreenNodeBuilder; | 13 | /// Bridges the parser with our specific syntax tree representation. |
8 | 14 | /// | |
9 | pub(crate) struct GreenBuilder { | 15 | /// `TreeBuilder` also handles attachment of trivia (whitespace) to nodes. |
16 | pub(crate) struct TreeBuilder<'a> { | ||
17 | text: &'a str, | ||
18 | tokens: &'a [Token], | ||
10 | text_pos: TextUnit, | 19 | text_pos: TextUnit, |
20 | token_pos: usize, | ||
21 | state: State, | ||
11 | errors: Vec<SyntaxError>, | 22 | errors: Vec<SyntaxError>, |
12 | inner: GreenNodeBuilder<RaTypes>, | 23 | inner: GreenNodeBuilder<RaTypes>, |
13 | } | 24 | } |
14 | 25 | ||
15 | impl Default for GreenBuilder { | 26 | enum State { |
16 | fn default() -> GreenBuilder { | 27 | PendingStart, |
17 | GreenBuilder { | 28 | Normal, |
18 | text_pos: TextUnit::default(), | 29 | PendingFinish, |
19 | errors: Vec::new(), | ||
20 | inner: GreenNodeBuilder::new(), | ||
21 | } | ||
22 | } | ||
23 | } | 30 | } |
24 | 31 | ||
25 | impl TreeSink for GreenBuilder { | 32 | impl<'a> TreeSink for TreeBuilder<'a> { |
26 | type Tree = (GreenNode, Vec<SyntaxError>); | 33 | fn leaf(&mut self, kind: SyntaxKind, n_tokens: u8) { |
27 | 34 | match mem::replace(&mut self.state, State::Normal) { | |
28 | fn leaf(&mut self, kind: SyntaxKind, text: SmolStr) { | 35 | State::PendingStart => unreachable!(), |
29 | self.text_pos += TextUnit::of_str(text.as_str()); | 36 | State::PendingFinish => self.inner.finish_internal(), |
30 | self.inner.leaf(kind, text); | 37 | State::Normal => (), |
38 | } | ||
39 | self.eat_trivias(); | ||
40 | let n_tokens = n_tokens as usize; | ||
41 | let len = self.tokens[self.token_pos..self.token_pos + n_tokens] | ||
42 | .iter() | ||
43 | .map(|it| it.len) | ||
44 | .sum::<TextUnit>(); | ||
45 | self.do_leaf(kind, len, n_tokens); | ||
31 | } | 46 | } |
32 | 47 | ||
33 | fn start_branch(&mut self, kind: SyntaxKind) { | 48 | fn start_branch(&mut self, kind: SyntaxKind) { |
34 | self.inner.start_internal(kind) | 49 | match mem::replace(&mut self.state, State::Normal) { |
50 | State::PendingStart => { | ||
51 | self.inner.start_internal(kind); | ||
52 | // No need to attach trivias to previous node: there is no | ||
53 | // previous node. | ||
54 | return; | ||
55 | } | ||
56 | State::PendingFinish => self.inner.finish_internal(), | ||
57 | State::Normal => (), | ||
58 | } | ||
59 | |||
60 | let n_trivias = | ||
61 | self.tokens[self.token_pos..].iter().take_while(|it| it.kind.is_trivia()).count(); | ||
62 | let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias]; | ||
63 | let mut trivia_end = | ||
64 | self.text_pos + leading_trivias.iter().map(|it| it.len).sum::<TextUnit>(); | ||
65 | |||
66 | let n_attached_trivias = { | ||
67 | let leading_trivias = leading_trivias.iter().rev().map(|it| { | ||
68 | let next_end = trivia_end - it.len; | ||
69 | let range = TextRange::from_to(next_end, trivia_end); | ||
70 | trivia_end = next_end; | ||
71 | (it.kind, &self.text[range]) | ||
72 | }); | ||
73 | n_attached_trivias(kind, leading_trivias) | ||
74 | }; | ||
75 | self.eat_n_trivias(n_trivias - n_attached_trivias); | ||
76 | self.inner.start_internal(kind); | ||
77 | self.eat_n_trivias(n_attached_trivias); | ||
35 | } | 78 | } |
36 | 79 | ||
37 | fn finish_branch(&mut self) { | 80 | fn finish_branch(&mut self) { |
38 | self.inner.finish_internal(); | 81 | match mem::replace(&mut self.state, State::PendingFinish) { |
82 | State::PendingStart => unreachable!(), | ||
83 | State::PendingFinish => self.inner.finish_internal(), | ||
84 | State::Normal => (), | ||
85 | } | ||
39 | } | 86 | } |
40 | 87 | ||
41 | fn error(&mut self, error: ParseError) { | 88 | fn error(&mut self, error: ParseError) { |
42 | let error = SyntaxError::new(SyntaxErrorKind::ParseError(error), self.text_pos); | 89 | let error = SyntaxError::new(SyntaxErrorKind::ParseError(error), self.text_pos); |
43 | self.errors.push(error) | 90 | self.errors.push(error) |
44 | } | 91 | } |
92 | } | ||
93 | |||
94 | impl<'a> TreeBuilder<'a> { | ||
95 | pub(super) fn new(text: &'a str, tokens: &'a [Token]) -> TreeBuilder<'a> { | ||
96 | TreeBuilder { | ||
97 | text, | ||
98 | tokens, | ||
99 | text_pos: 0.into(), | ||
100 | token_pos: 0, | ||
101 | state: State::PendingStart, | ||
102 | errors: Vec::new(), | ||
103 | inner: GreenNodeBuilder::new(), | ||
104 | } | ||
105 | } | ||
106 | |||
107 | pub(super) fn finish(mut self) -> (GreenNode, Vec<SyntaxError>) { | ||
108 | match mem::replace(&mut self.state, State::Normal) { | ||
109 | State::PendingFinish => { | ||
110 | self.eat_trivias(); | ||
111 | self.inner.finish_internal() | ||
112 | } | ||
113 | State::PendingStart | State::Normal => unreachable!(), | ||
114 | } | ||
45 | 115 | ||
46 | fn finish(self) -> (GreenNode, Vec<SyntaxError>) { | ||
47 | (self.inner.finish(), self.errors) | 116 | (self.inner.finish(), self.errors) |
48 | } | 117 | } |
118 | |||
119 | fn eat_trivias(&mut self) { | ||
120 | while let Some(&token) = self.tokens.get(self.token_pos) { | ||
121 | if !token.kind.is_trivia() { | ||
122 | break; | ||
123 | } | ||
124 | self.do_leaf(token.kind, token.len, 1); | ||
125 | } | ||
126 | } | ||
127 | |||
128 | fn eat_n_trivias(&mut self, n: usize) { | ||
129 | for _ in 0..n { | ||
130 | let token = self.tokens[self.token_pos]; | ||
131 | assert!(token.kind.is_trivia()); | ||
132 | self.do_leaf(token.kind, token.len, 1); | ||
133 | } | ||
134 | } | ||
135 | |||
136 | fn do_leaf(&mut self, kind: SyntaxKind, len: TextUnit, n_tokens: usize) { | ||
137 | let range = TextRange::offset_len(self.text_pos, len); | ||
138 | let text: SmolStr = self.text[range].into(); | ||
139 | self.text_pos += len; | ||
140 | self.token_pos += n_tokens; | ||
141 | self.inner.leaf(kind, text); | ||
142 | } | ||
143 | } | ||
144 | |||
145 | fn n_attached_trivias<'a>( | ||
146 | kind: SyntaxKind, | ||
147 | trivias: impl Iterator<Item = (SyntaxKind, &'a str)>, | ||
148 | ) -> usize { | ||
149 | match kind { | ||
150 | CONST_DEF | TYPE_DEF | STRUCT_DEF | ENUM_DEF | ENUM_VARIANT | FN_DEF | TRAIT_DEF | ||
151 | | MODULE | NAMED_FIELD_DEF => { | ||
152 | let mut res = 0; | ||
153 | for (i, (kind, text)) in trivias.enumerate() { | ||
154 | match kind { | ||
155 | WHITESPACE => { | ||
156 | if text.contains("\n\n") { | ||
157 | break; | ||
158 | } | ||
159 | } | ||
160 | COMMENT => { | ||
161 | res = i + 1; | ||
162 | } | ||
163 | _ => (), | ||
164 | } | ||
165 | } | ||
166 | res | ||
167 | } | ||
168 | _ => 0, | ||
169 | } | ||
49 | } | 170 | } |
diff --git a/crates/ra_syntax/src/parsing/event.rs b/crates/ra_syntax/src/parsing/event.rs deleted file mode 100644 index f6f020eab..000000000 --- a/crates/ra_syntax/src/parsing/event.rs +++ /dev/null | |||
@@ -1,247 +0,0 @@ | |||
1 | //! This module provides a way to construct a `File`. | ||
2 | //! It is intended to be completely decoupled from the | ||
3 | //! parser, so as to allow to evolve the tree representation | ||
4 | //! and the parser algorithm independently. | ||
5 | //! | ||
6 | //! The `TreeSink` trait is the bridge between the parser and the | ||
7 | //! tree builder: the parser produces a stream of events like | ||
8 | //! `start node`, `finish node`, and `FileBuilder` converts | ||
9 | //! this stream to a real tree. | ||
10 | use std::mem; | ||
11 | |||
12 | use crate::{ | ||
13 | SmolStr, | ||
14 | SyntaxKind::{self, *}, | ||
15 | TextRange, TextUnit, | ||
16 | parsing::{ | ||
17 | ParseError, TreeSink, | ||
18 | lexer::Token, | ||
19 | }, | ||
20 | }; | ||
21 | |||
22 | /// `Parser` produces a flat list of `Event`s. | ||
23 | /// They are converted to a tree-structure in | ||
24 | /// a separate pass, via `TreeBuilder`. | ||
25 | #[derive(Debug)] | ||
26 | pub(crate) enum Event { | ||
27 | /// This event signifies the start of the node. | ||
28 | /// It should be either abandoned (in which case the | ||
29 | /// `kind` is `TOMBSTONE`, and the event is ignored), | ||
30 | /// or completed via a `Finish` event. | ||
31 | /// | ||
32 | /// All tokens between a `Start` and a `Finish` would | ||
33 | /// become the children of the respective node. | ||
34 | /// | ||
35 | /// For left-recursive syntactic constructs, the parser produces | ||
36 | /// a child node before it sees a parent. `forward_parent` | ||
37 | /// saves the position of current event's parent. | ||
38 | /// | ||
39 | /// Consider this path | ||
40 | /// | ||
41 | /// foo::bar | ||
42 | /// | ||
43 | /// The events for it would look like this: | ||
44 | /// | ||
45 | /// | ||
46 | /// START(PATH) IDENT('foo') FINISH START(PATH) COLONCOLON IDENT('bar') FINISH | ||
47 | /// | /\ | ||
48 | /// | | | ||
49 | /// +------forward-parent------+ | ||
50 | /// | ||
51 | /// And the tree would look like this | ||
52 | /// | ||
53 | /// +--PATH---------+ | ||
54 | /// | | | | ||
55 | /// | | | | ||
56 | /// | '::' 'bar' | ||
57 | /// | | ||
58 | /// PATH | ||
59 | /// | | ||
60 | /// 'foo' | ||
61 | /// | ||
62 | /// See also `CompletedMarker::precede`. | ||
63 | Start { | ||
64 | kind: SyntaxKind, | ||
65 | forward_parent: Option<u32>, | ||
66 | }, | ||
67 | |||
68 | /// Complete the previous `Start` event | ||
69 | Finish, | ||
70 | |||
71 | /// Produce a single leaf-element. | ||
72 | /// `n_raw_tokens` is used to glue complex contextual tokens. | ||
73 | /// For example, lexer tokenizes `>>` as `>`, `>`, and | ||
74 | /// `n_raw_tokens = 2` is used to produced a single `>>`. | ||
75 | Token { | ||
76 | kind: SyntaxKind, | ||
77 | n_raw_tokens: u8, | ||
78 | }, | ||
79 | |||
80 | Error { | ||
81 | msg: ParseError, | ||
82 | }, | ||
83 | } | ||
84 | |||
85 | impl Event { | ||
86 | pub(crate) fn tombstone() -> Self { | ||
87 | Event::Start { kind: TOMBSTONE, forward_parent: None } | ||
88 | } | ||
89 | } | ||
90 | |||
91 | pub(super) struct EventProcessor<'a, S: TreeSink> { | ||
92 | sink: S, | ||
93 | text_pos: TextUnit, | ||
94 | text: &'a str, | ||
95 | token_pos: usize, | ||
96 | tokens: &'a [Token], | ||
97 | events: &'a mut [Event], | ||
98 | } | ||
99 | |||
100 | impl<'a, S: TreeSink> EventProcessor<'a, S> { | ||
101 | pub(super) fn new( | ||
102 | sink: S, | ||
103 | text: &'a str, | ||
104 | tokens: &'a [Token], | ||
105 | events: &'a mut [Event], | ||
106 | ) -> EventProcessor<'a, S> { | ||
107 | EventProcessor { sink, text_pos: 0.into(), text, token_pos: 0, tokens, events } | ||
108 | } | ||
109 | |||
110 | /// Generate the syntax tree with the control of events. | ||
111 | pub(crate) fn process(mut self) -> S { | ||
112 | let mut forward_parents = Vec::new(); | ||
113 | |||
114 | for i in 0..self.events.len() { | ||
115 | match mem::replace(&mut self.events[i], Event::tombstone()) { | ||
116 | Event::Start { kind: TOMBSTONE, .. } => (), | ||
117 | |||
118 | Event::Start { kind, forward_parent } => { | ||
119 | // For events[A, B, C], B is A's forward_parent, C is B's forward_parent, | ||
120 | // in the normal control flow, the parent-child relation: `A -> B -> C`, | ||
121 | // while with the magic forward_parent, it writes: `C <- B <- A`. | ||
122 | |||
123 | // append `A` into parents. | ||
124 | forward_parents.push(kind); | ||
125 | let mut idx = i; | ||
126 | let mut fp = forward_parent; | ||
127 | while let Some(fwd) = fp { | ||
128 | idx += fwd as usize; | ||
129 | // append `A`'s forward_parent `B` | ||
130 | fp = match mem::replace(&mut self.events[idx], Event::tombstone()) { | ||
131 | Event::Start { kind, forward_parent } => { | ||
132 | forward_parents.push(kind); | ||
133 | forward_parent | ||
134 | } | ||
135 | _ => unreachable!(), | ||
136 | }; | ||
137 | // append `B`'s forward_parent `C` in the next stage. | ||
138 | } | ||
139 | |||
140 | for kind in forward_parents.drain(..).rev() { | ||
141 | self.start(kind); | ||
142 | } | ||
143 | } | ||
144 | Event::Finish => { | ||
145 | let is_last = i == self.events.len() - 1; | ||
146 | self.finish(is_last); | ||
147 | } | ||
148 | Event::Token { kind, n_raw_tokens } => { | ||
149 | self.eat_trivias(); | ||
150 | let n_raw_tokens = n_raw_tokens as usize; | ||
151 | let len = self.tokens[self.token_pos..self.token_pos + n_raw_tokens] | ||
152 | .iter() | ||
153 | .map(|it| it.len) | ||
154 | .sum::<TextUnit>(); | ||
155 | self.leaf(kind, len, n_raw_tokens); | ||
156 | } | ||
157 | Event::Error { msg } => self.sink.error(msg), | ||
158 | } | ||
159 | } | ||
160 | self.sink | ||
161 | } | ||
162 | |||
163 | /// Add the node into syntax tree but discard the comments/whitespaces. | ||
164 | fn start(&mut self, kind: SyntaxKind) { | ||
165 | if kind == SOURCE_FILE { | ||
166 | self.sink.start_branch(kind); | ||
167 | return; | ||
168 | } | ||
169 | let n_trivias = | ||
170 | self.tokens[self.token_pos..].iter().take_while(|it| it.kind.is_trivia()).count(); | ||
171 | let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias]; | ||
172 | let mut trivia_end = | ||
173 | self.text_pos + leading_trivias.iter().map(|it| it.len).sum::<TextUnit>(); | ||
174 | |||
175 | let n_attached_trivias = { | ||
176 | let leading_trivias = leading_trivias.iter().rev().map(|it| { | ||
177 | let next_end = trivia_end - it.len; | ||
178 | let range = TextRange::from_to(next_end, trivia_end); | ||
179 | trivia_end = next_end; | ||
180 | (it.kind, &self.text[range]) | ||
181 | }); | ||
182 | n_attached_trivias(kind, leading_trivias) | ||
183 | }; | ||
184 | self.eat_n_trivias(n_trivias - n_attached_trivias); | ||
185 | self.sink.start_branch(kind); | ||
186 | self.eat_n_trivias(n_attached_trivias); | ||
187 | } | ||
188 | |||
189 | fn finish(&mut self, is_last: bool) { | ||
190 | if is_last { | ||
191 | self.eat_trivias() | ||
192 | } | ||
193 | self.sink.finish_branch(); | ||
194 | } | ||
195 | |||
196 | fn eat_trivias(&mut self) { | ||
197 | while let Some(&token) = self.tokens.get(self.token_pos) { | ||
198 | if !token.kind.is_trivia() { | ||
199 | break; | ||
200 | } | ||
201 | self.leaf(token.kind, token.len, 1); | ||
202 | } | ||
203 | } | ||
204 | |||
205 | fn eat_n_trivias(&mut self, n: usize) { | ||
206 | for _ in 0..n { | ||
207 | let token = self.tokens[self.token_pos]; | ||
208 | assert!(token.kind.is_trivia()); | ||
209 | self.leaf(token.kind, token.len, 1); | ||
210 | } | ||
211 | } | ||
212 | |||
213 | fn leaf(&mut self, kind: SyntaxKind, len: TextUnit, n_tokens: usize) { | ||
214 | let range = TextRange::offset_len(self.text_pos, len); | ||
215 | let text: SmolStr = self.text[range].into(); | ||
216 | self.text_pos += len; | ||
217 | self.token_pos += n_tokens; | ||
218 | self.sink.leaf(kind, text); | ||
219 | } | ||
220 | } | ||
221 | |||
222 | fn n_attached_trivias<'a>( | ||
223 | kind: SyntaxKind, | ||
224 | trivias: impl Iterator<Item = (SyntaxKind, &'a str)>, | ||
225 | ) -> usize { | ||
226 | match kind { | ||
227 | CONST_DEF | TYPE_DEF | STRUCT_DEF | ENUM_DEF | ENUM_VARIANT | FN_DEF | TRAIT_DEF | ||
228 | | MODULE | NAMED_FIELD_DEF => { | ||
229 | let mut res = 0; | ||
230 | for (i, (kind, text)) in trivias.enumerate() { | ||
231 | match kind { | ||
232 | WHITESPACE => { | ||
233 | if text.contains("\n\n") { | ||
234 | break; | ||
235 | } | ||
236 | } | ||
237 | COMMENT => { | ||
238 | res = i + 1; | ||
239 | } | ||
240 | _ => (), | ||
241 | } | ||
242 | } | ||
243 | res | ||
244 | } | ||
245 | _ => 0, | ||
246 | } | ||
247 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar.rs b/crates/ra_syntax/src/parsing/grammar.rs deleted file mode 100644 index 7ca9c223c..000000000 --- a/crates/ra_syntax/src/parsing/grammar.rs +++ /dev/null | |||
@@ -1,204 +0,0 @@ | |||
1 | //! This is the actual "grammar" of the Rust language. | ||
2 | //! | ||
3 | //! Each function in this module and its children corresponds | ||
4 | //! to a production of the format grammar. Submodules roughly | ||
5 | //! correspond to different *areas* of the grammar. By convention, | ||
6 | //! each submodule starts with `use super::*` import and exports | ||
7 | //! "public" productions via `pub(super)`. | ||
8 | //! | ||
9 | //! See docs for `Parser` to learn about API, available to the grammar, | ||
10 | //! and see docs for `Event` to learn how this actually manages to | ||
11 | //! produce parse trees. | ||
12 | //! | ||
13 | //! Code in this module also contains inline tests, which start with | ||
14 | //! `// test name-of-the-test` comment and look like this: | ||
15 | //! | ||
16 | //! ``` | ||
17 | //! // test function_with_zero_parameters | ||
18 | //! // fn foo() {} | ||
19 | //! ``` | ||
20 | //! | ||
21 | //! After adding a new inline-test, run `cargo collect-tests` to extract | ||
22 | //! it as a standalone text-fixture into `tests/data/parser/inline`, and | ||
23 | //! run `cargo test` once to create the "gold" value. | ||
24 | //! | ||
25 | //! Coding convention: rules like `where_clause` always produce either a | ||
26 | //! node or an error, rules like `opt_where_clause` may produce nothing. | ||
27 | //! Non-opt rules typically start with `assert!(p.at(FIRST_TOKEN))`, the | ||
28 | //! caller is responsible for branching on the first token. | ||
29 | mod attributes; | ||
30 | mod expressions; | ||
31 | mod items; | ||
32 | mod params; | ||
33 | mod paths; | ||
34 | mod patterns; | ||
35 | mod type_args; | ||
36 | mod type_params; | ||
37 | mod types; | ||
38 | |||
39 | use crate::{ | ||
40 | SyntaxNode, | ||
41 | SyntaxKind::{self, *}, | ||
42 | parsing::{ | ||
43 | token_set::TokenSet, | ||
44 | parser::{CompletedMarker, Marker, Parser} | ||
45 | }, | ||
46 | }; | ||
47 | |||
48 | pub(super) fn root(p: &mut Parser) { | ||
49 | let m = p.start(); | ||
50 | p.eat(SHEBANG); | ||
51 | items::mod_contents(p, false); | ||
52 | m.complete(p, SOURCE_FILE); | ||
53 | } | ||
54 | |||
55 | pub(super) fn reparser(node: &SyntaxNode) -> Option<fn(&mut Parser)> { | ||
56 | let res = match node.kind() { | ||
57 | BLOCK => expressions::block, | ||
58 | NAMED_FIELD_DEF_LIST => items::named_field_def_list, | ||
59 | NAMED_FIELD_LIST => items::named_field_list, | ||
60 | ENUM_VARIANT_LIST => items::enum_variant_list, | ||
61 | MATCH_ARM_LIST => items::match_arm_list, | ||
62 | USE_TREE_LIST => items::use_tree_list, | ||
63 | EXTERN_ITEM_LIST => items::extern_item_list, | ||
64 | TOKEN_TREE if node.first_child().unwrap().kind() == L_CURLY => items::token_tree, | ||
65 | ITEM_LIST => { | ||
66 | let parent = node.parent().unwrap(); | ||
67 | match parent.kind() { | ||
68 | IMPL_BLOCK => items::impl_item_list, | ||
69 | TRAIT_DEF => items::trait_item_list, | ||
70 | MODULE => items::mod_item_list, | ||
71 | _ => return None, | ||
72 | } | ||
73 | } | ||
74 | _ => return None, | ||
75 | }; | ||
76 | Some(res) | ||
77 | } | ||
78 | |||
79 | #[derive(Clone, Copy, PartialEq, Eq)] | ||
80 | enum BlockLike { | ||
81 | Block, | ||
82 | NotBlock, | ||
83 | } | ||
84 | |||
85 | impl BlockLike { | ||
86 | fn is_block(self) -> bool { | ||
87 | self == BlockLike::Block | ||
88 | } | ||
89 | } | ||
90 | |||
91 | fn opt_visibility(p: &mut Parser) { | ||
92 | match p.current() { | ||
93 | PUB_KW => { | ||
94 | let m = p.start(); | ||
95 | p.bump(); | ||
96 | if p.at(L_PAREN) { | ||
97 | match p.nth(1) { | ||
98 | // test crate_visibility | ||
99 | // pub(crate) struct S; | ||
100 | // pub(self) struct S; | ||
101 | // pub(self) struct S; | ||
102 | // pub(self) struct S; | ||
103 | CRATE_KW | SELF_KW | SUPER_KW => { | ||
104 | p.bump(); | ||
105 | p.bump(); | ||
106 | p.expect(R_PAREN); | ||
107 | } | ||
108 | IN_KW => { | ||
109 | p.bump(); | ||
110 | p.bump(); | ||
111 | paths::use_path(p); | ||
112 | p.expect(R_PAREN); | ||
113 | } | ||
114 | _ => (), | ||
115 | } | ||
116 | } | ||
117 | m.complete(p, VISIBILITY); | ||
118 | } | ||
119 | // test crate_keyword_vis | ||
120 | // crate fn main() { } | ||
121 | CRATE_KW => { | ||
122 | let m = p.start(); | ||
123 | p.bump(); | ||
124 | m.complete(p, VISIBILITY); | ||
125 | } | ||
126 | _ => (), | ||
127 | } | ||
128 | } | ||
129 | |||
130 | fn opt_alias(p: &mut Parser) { | ||
131 | if p.at(AS_KW) { | ||
132 | let m = p.start(); | ||
133 | p.bump(); | ||
134 | name(p); | ||
135 | m.complete(p, ALIAS); | ||
136 | } | ||
137 | } | ||
138 | |||
139 | fn abi(p: &mut Parser) { | ||
140 | assert!(p.at(EXTERN_KW)); | ||
141 | let abi = p.start(); | ||
142 | p.bump(); | ||
143 | match p.current() { | ||
144 | STRING | RAW_STRING => p.bump(), | ||
145 | _ => (), | ||
146 | } | ||
147 | abi.complete(p, ABI); | ||
148 | } | ||
149 | |||
150 | fn opt_fn_ret_type(p: &mut Parser) -> bool { | ||
151 | if p.at(THIN_ARROW) { | ||
152 | let m = p.start(); | ||
153 | p.bump(); | ||
154 | types::type_(p); | ||
155 | m.complete(p, RET_TYPE); | ||
156 | true | ||
157 | } else { | ||
158 | false | ||
159 | } | ||
160 | } | ||
161 | |||
162 | fn name_r(p: &mut Parser, recovery: TokenSet) { | ||
163 | if p.at(IDENT) { | ||
164 | let m = p.start(); | ||
165 | p.bump(); | ||
166 | m.complete(p, NAME); | ||
167 | } else { | ||
168 | p.err_recover("expected a name", recovery); | ||
169 | } | ||
170 | } | ||
171 | |||
172 | fn name(p: &mut Parser) { | ||
173 | name_r(p, TokenSet::empty()) | ||
174 | } | ||
175 | |||
176 | fn name_ref(p: &mut Parser) { | ||
177 | if p.at(IDENT) { | ||
178 | let m = p.start(); | ||
179 | p.bump(); | ||
180 | m.complete(p, NAME_REF); | ||
181 | } else { | ||
182 | p.err_and_bump("expected identifier"); | ||
183 | } | ||
184 | } | ||
185 | |||
186 | fn error_block(p: &mut Parser, message: &str) { | ||
187 | go(p, Some(message)); | ||
188 | fn go(p: &mut Parser, message: Option<&str>) { | ||
189 | assert!(p.at(L_CURLY)); | ||
190 | let m = p.start(); | ||
191 | if let Some(message) = message { | ||
192 | p.error(message); | ||
193 | } | ||
194 | p.bump(); | ||
195 | while !p.at(EOF) && !p.at(R_CURLY) { | ||
196 | match p.current() { | ||
197 | L_CURLY => go(p, None), | ||
198 | _ => p.bump(), | ||
199 | } | ||
200 | } | ||
201 | p.eat(R_CURLY); | ||
202 | m.complete(p, ERROR); | ||
203 | } | ||
204 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/attributes.rs b/crates/ra_syntax/src/parsing/grammar/attributes.rs deleted file mode 100644 index cd30e8a45..000000000 --- a/crates/ra_syntax/src/parsing/grammar/attributes.rs +++ /dev/null | |||
@@ -1,31 +0,0 @@ | |||
1 | use super::*; | ||
2 | |||
3 | pub(super) fn inner_attributes(p: &mut Parser) { | ||
4 | while p.current() == POUND && p.nth(1) == EXCL { | ||
5 | attribute(p, true) | ||
6 | } | ||
7 | } | ||
8 | |||
9 | pub(super) fn outer_attributes(p: &mut Parser) { | ||
10 | while p.at(POUND) { | ||
11 | attribute(p, false) | ||
12 | } | ||
13 | } | ||
14 | |||
15 | fn attribute(p: &mut Parser, inner: bool) { | ||
16 | let attr = p.start(); | ||
17 | assert!(p.at(POUND)); | ||
18 | p.bump(); | ||
19 | |||
20 | if inner { | ||
21 | assert!(p.at(EXCL)); | ||
22 | p.bump(); | ||
23 | } | ||
24 | |||
25 | if p.at(L_BRACK) { | ||
26 | items::token_tree(p); | ||
27 | } else { | ||
28 | p.error("expected `[`"); | ||
29 | } | ||
30 | attr.complete(p, ATTR); | ||
31 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/expressions.rs b/crates/ra_syntax/src/parsing/grammar/expressions.rs deleted file mode 100644 index d5a4f4d7b..000000000 --- a/crates/ra_syntax/src/parsing/grammar/expressions.rs +++ /dev/null | |||
@@ -1,473 +0,0 @@ | |||
1 | mod atom; | ||
2 | |||
3 | pub(crate) use self::atom::match_arm_list; | ||
4 | pub(super) use self::atom::{literal, LITERAL_FIRST}; | ||
5 | use super::*; | ||
6 | |||
7 | const EXPR_FIRST: TokenSet = LHS_FIRST; | ||
8 | |||
9 | pub(super) fn expr(p: &mut Parser) -> BlockLike { | ||
10 | let r = Restrictions { forbid_structs: false, prefer_stmt: false }; | ||
11 | expr_bp(p, r, 1) | ||
12 | } | ||
13 | |||
14 | pub(super) fn expr_stmt(p: &mut Parser) -> BlockLike { | ||
15 | let r = Restrictions { forbid_structs: false, prefer_stmt: true }; | ||
16 | expr_bp(p, r, 1) | ||
17 | } | ||
18 | |||
19 | fn expr_no_struct(p: &mut Parser) { | ||
20 | let r = Restrictions { forbid_structs: true, prefer_stmt: false }; | ||
21 | expr_bp(p, r, 1); | ||
22 | } | ||
23 | |||
24 | // test block | ||
25 | // fn a() {} | ||
26 | // fn b() { let _ = 1; } | ||
27 | // fn c() { 1; 2; } | ||
28 | // fn d() { 1; 2 } | ||
29 | pub(crate) fn block(p: &mut Parser) { | ||
30 | if !p.at(L_CURLY) { | ||
31 | p.error("expected a block"); | ||
32 | return; | ||
33 | } | ||
34 | let m = p.start(); | ||
35 | p.bump(); | ||
36 | // This is checked by a validator | ||
37 | attributes::inner_attributes(p); | ||
38 | |||
39 | while !p.at(EOF) && !p.at(R_CURLY) { | ||
40 | match p.current() { | ||
41 | // test nocontentexpr | ||
42 | // fn foo(){ | ||
43 | // ;;;some_expr();;;;{;;;};;;;Ok(()) | ||
44 | // } | ||
45 | SEMI => p.bump(), | ||
46 | _ => { | ||
47 | // test block_items | ||
48 | // fn a() { fn b() {} } | ||
49 | let m = p.start(); | ||
50 | let has_attrs = p.at(POUND); | ||
51 | attributes::outer_attributes(p); | ||
52 | if p.at(LET_KW) { | ||
53 | let_stmt(p, m); | ||
54 | } else { | ||
55 | match items::maybe_item(p, items::ItemFlavor::Mod) { | ||
56 | items::MaybeItem::Item(kind) => { | ||
57 | m.complete(p, kind); | ||
58 | } | ||
59 | items::MaybeItem::Modifiers => { | ||
60 | m.abandon(p); | ||
61 | p.error("expected an item"); | ||
62 | } | ||
63 | // test pub_expr | ||
64 | // fn foo() { pub 92; } //FIXME | ||
65 | items::MaybeItem::None => { | ||
66 | if has_attrs { | ||
67 | m.abandon(p); | ||
68 | p.error( | ||
69 | "expected a let statement or an item after attributes in block", | ||
70 | ); | ||
71 | } else { | ||
72 | let is_blocklike = expressions::expr_stmt(p) == BlockLike::Block; | ||
73 | if p.at(R_CURLY) { | ||
74 | m.abandon(p); | ||
75 | } else { | ||
76 | // test no_semi_after_block | ||
77 | // fn foo() { | ||
78 | // if true {} | ||
79 | // loop {} | ||
80 | // match () {} | ||
81 | // while true {} | ||
82 | // for _ in () {} | ||
83 | // {} | ||
84 | // {} | ||
85 | // macro_rules! test { | ||
86 | // () => {} | ||
87 | // } | ||
88 | // test!{} | ||
89 | // } | ||
90 | if is_blocklike { | ||
91 | p.eat(SEMI); | ||
92 | } else { | ||
93 | p.expect(SEMI); | ||
94 | } | ||
95 | m.complete(p, EXPR_STMT); | ||
96 | } | ||
97 | } | ||
98 | } | ||
99 | } | ||
100 | } | ||
101 | } | ||
102 | } | ||
103 | } | ||
104 | p.expect(R_CURLY); | ||
105 | m.complete(p, BLOCK); | ||
106 | |||
107 | // test let_stmt; | ||
108 | // fn foo() { | ||
109 | // let a; | ||
110 | // let b: i32; | ||
111 | // let c = 92; | ||
112 | // let d: i32 = 92; | ||
113 | // } | ||
114 | fn let_stmt(p: &mut Parser, m: Marker) { | ||
115 | assert!(p.at(LET_KW)); | ||
116 | p.bump(); | ||
117 | patterns::pattern(p); | ||
118 | if p.at(COLON) { | ||
119 | types::ascription(p); | ||
120 | } | ||
121 | if p.eat(EQ) { | ||
122 | expressions::expr(p); | ||
123 | } | ||
124 | p.expect(SEMI); | ||
125 | m.complete(p, LET_STMT); | ||
126 | } | ||
127 | } | ||
128 | |||
129 | #[derive(Clone, Copy)] | ||
130 | struct Restrictions { | ||
131 | forbid_structs: bool, | ||
132 | prefer_stmt: bool, | ||
133 | } | ||
134 | |||
135 | enum Op { | ||
136 | Simple, | ||
137 | Composite(SyntaxKind, u8), | ||
138 | } | ||
139 | |||
140 | fn current_op(p: &Parser) -> (u8, Op) { | ||
141 | if let Some(t) = p.current3() { | ||
142 | match t { | ||
143 | (L_ANGLE, L_ANGLE, EQ) => return (1, Op::Composite(SHLEQ, 3)), | ||
144 | (R_ANGLE, R_ANGLE, EQ) => return (1, Op::Composite(SHREQ, 3)), | ||
145 | _ => (), | ||
146 | } | ||
147 | } | ||
148 | |||
149 | if let Some(t) = p.current2() { | ||
150 | match t { | ||
151 | (PLUS, EQ) => return (1, Op::Composite(PLUSEQ, 2)), | ||
152 | (MINUS, EQ) => return (1, Op::Composite(MINUSEQ, 2)), | ||
153 | (STAR, EQ) => return (1, Op::Composite(STAREQ, 2)), | ||
154 | (SLASH, EQ) => return (1, Op::Composite(SLASHEQ, 2)), | ||
155 | (PIPE, EQ) => return (1, Op::Composite(PIPEEQ, 2)), | ||
156 | (AMP, EQ) => return (1, Op::Composite(AMPEQ, 2)), | ||
157 | (CARET, EQ) => return (1, Op::Composite(CARETEQ, 2)), | ||
158 | (PIPE, PIPE) => return (3, Op::Composite(PIPEPIPE, 2)), | ||
159 | (AMP, AMP) => return (4, Op::Composite(AMPAMP, 2)), | ||
160 | (L_ANGLE, EQ) => return (5, Op::Composite(LTEQ, 2)), | ||
161 | (R_ANGLE, EQ) => return (5, Op::Composite(GTEQ, 2)), | ||
162 | (L_ANGLE, L_ANGLE) => return (9, Op::Composite(SHL, 2)), | ||
163 | (R_ANGLE, R_ANGLE) => return (9, Op::Composite(SHR, 2)), | ||
164 | _ => (), | ||
165 | } | ||
166 | } | ||
167 | |||
168 | let bp = match p.current() { | ||
169 | EQ => 1, | ||
170 | DOTDOT | DOTDOTEQ => 2, | ||
171 | EQEQ | NEQ | L_ANGLE | R_ANGLE => 5, | ||
172 | PIPE => 6, | ||
173 | CARET => 7, | ||
174 | AMP => 8, | ||
175 | MINUS | PLUS => 10, | ||
176 | STAR | SLASH | PERCENT => 11, | ||
177 | _ => 0, | ||
178 | }; | ||
179 | (bp, Op::Simple) | ||
180 | } | ||
181 | |||
182 | // Parses expression with binding power of at least bp. | ||
183 | fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> BlockLike { | ||
184 | let mut lhs = match lhs(p, r) { | ||
185 | Some((lhs, blocklike)) => { | ||
186 | // test stmt_bin_expr_ambiguity | ||
187 | // fn foo() { | ||
188 | // let _ = {1} & 2; | ||
189 | // {1} &2; | ||
190 | // } | ||
191 | if r.prefer_stmt && blocklike.is_block() { | ||
192 | return BlockLike::Block; | ||
193 | } | ||
194 | lhs | ||
195 | } | ||
196 | None => return BlockLike::NotBlock, | ||
197 | }; | ||
198 | |||
199 | loop { | ||
200 | let is_range = p.current() == DOTDOT || p.current() == DOTDOTEQ; | ||
201 | let (op_bp, op) = current_op(p); | ||
202 | if op_bp < bp { | ||
203 | break; | ||
204 | } | ||
205 | let m = lhs.precede(p); | ||
206 | match op { | ||
207 | Op::Simple => p.bump(), | ||
208 | Op::Composite(kind, n) => { | ||
209 | p.bump_compound(kind, n); | ||
210 | } | ||
211 | } | ||
212 | expr_bp(p, r, op_bp + 1); | ||
213 | lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR }); | ||
214 | } | ||
215 | BlockLike::NotBlock | ||
216 | } | ||
217 | |||
218 | const LHS_FIRST: TokenSet = | ||
219 | atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOTDOT, DOTDOTEQ, MINUS]); | ||
220 | |||
221 | fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { | ||
222 | let m; | ||
223 | let kind = match p.current() { | ||
224 | // test ref_expr | ||
225 | // fn foo() { | ||
226 | // let _ = &1; | ||
227 | // let _ = &mut &f(); | ||
228 | // } | ||
229 | AMP => { | ||
230 | m = p.start(); | ||
231 | p.bump(); | ||
232 | p.eat(MUT_KW); | ||
233 | REF_EXPR | ||
234 | } | ||
235 | // test unary_expr | ||
236 | // fn foo() { | ||
237 | // **&1; | ||
238 | // !!true; | ||
239 | // --1; | ||
240 | // } | ||
241 | STAR | EXCL | MINUS => { | ||
242 | m = p.start(); | ||
243 | p.bump(); | ||
244 | PREFIX_EXPR | ||
245 | } | ||
246 | // test full_range_expr | ||
247 | // fn foo() { xs[..]; } | ||
248 | DOTDOT | DOTDOTEQ => { | ||
249 | m = p.start(); | ||
250 | p.bump(); | ||
251 | if p.at_ts(EXPR_FIRST) { | ||
252 | expr_bp(p, r, 2); | ||
253 | } | ||
254 | return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock)); | ||
255 | } | ||
256 | _ => { | ||
257 | let (lhs, blocklike) = atom::atom_expr(p, r)?; | ||
258 | return Some(( | ||
259 | postfix_expr(p, lhs, !(r.prefer_stmt && blocklike.is_block())), | ||
260 | blocklike, | ||
261 | )); | ||
262 | } | ||
263 | }; | ||
264 | expr_bp(p, r, 255); | ||
265 | Some((m.complete(p, kind), BlockLike::NotBlock)) | ||
266 | } | ||
267 | |||
268 | fn postfix_expr( | ||
269 | p: &mut Parser, | ||
270 | mut lhs: CompletedMarker, | ||
271 | // Calls are disallowed if the type is a block and we prefer statements because the call cannot be disambiguated from a tuple | ||
272 | // E.g. `while true {break}();` is parsed as | ||
273 | // `while true {break}; ();` | ||
274 | mut allow_calls: bool, | ||
275 | ) -> CompletedMarker { | ||
276 | loop { | ||
277 | lhs = match p.current() { | ||
278 | // test stmt_postfix_expr_ambiguity | ||
279 | // fn foo() { | ||
280 | // match () { | ||
281 | // _ => {} | ||
282 | // () => {} | ||
283 | // [] => {} | ||
284 | // } | ||
285 | // } | ||
286 | L_PAREN if allow_calls => call_expr(p, lhs), | ||
287 | L_BRACK if allow_calls => index_expr(p, lhs), | ||
288 | DOT if p.nth(1) == IDENT && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON) => { | ||
289 | method_call_expr(p, lhs) | ||
290 | } | ||
291 | DOT => field_expr(p, lhs), | ||
292 | // test postfix_range | ||
293 | // fn foo() { let x = 1..; } | ||
294 | DOTDOT | DOTDOTEQ if !EXPR_FIRST.contains(p.nth(1)) => { | ||
295 | let m = lhs.precede(p); | ||
296 | p.bump(); | ||
297 | m.complete(p, RANGE_EXPR) | ||
298 | } | ||
299 | QUESTION => try_expr(p, lhs), | ||
300 | AS_KW => cast_expr(p, lhs), | ||
301 | _ => break, | ||
302 | }; | ||
303 | allow_calls = true | ||
304 | } | ||
305 | lhs | ||
306 | } | ||
307 | |||
308 | // test call_expr | ||
309 | // fn foo() { | ||
310 | // let _ = f(); | ||
311 | // let _ = f()(1)(1, 2,); | ||
312 | // let _ = f(<Foo>::func()); | ||
313 | // f(<Foo as Trait>::func()); | ||
314 | // } | ||
315 | fn call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | ||
316 | assert!(p.at(L_PAREN)); | ||
317 | let m = lhs.precede(p); | ||
318 | arg_list(p); | ||
319 | m.complete(p, CALL_EXPR) | ||
320 | } | ||
321 | |||
322 | // test index_expr | ||
323 | // fn foo() { | ||
324 | // x[1][2]; | ||
325 | // } | ||
326 | fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | ||
327 | assert!(p.at(L_BRACK)); | ||
328 | let m = lhs.precede(p); | ||
329 | p.bump(); | ||
330 | expr(p); | ||
331 | p.expect(R_BRACK); | ||
332 | m.complete(p, INDEX_EXPR) | ||
333 | } | ||
334 | |||
335 | // test method_call_expr | ||
336 | // fn foo() { | ||
337 | // x.foo(); | ||
338 | // y.bar::<T>(1, 2,); | ||
339 | // } | ||
340 | fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | ||
341 | assert!(p.at(DOT) && p.nth(1) == IDENT && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON)); | ||
342 | let m = lhs.precede(p); | ||
343 | p.bump(); | ||
344 | name_ref(p); | ||
345 | type_args::opt_type_arg_list(p, true); | ||
346 | if p.at(L_PAREN) { | ||
347 | arg_list(p); | ||
348 | } | ||
349 | m.complete(p, METHOD_CALL_EXPR) | ||
350 | } | ||
351 | |||
352 | // test field_expr | ||
353 | // fn foo() { | ||
354 | // x.foo; | ||
355 | // x.0.bar; | ||
356 | // } | ||
357 | fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | ||
358 | assert!(p.at(DOT)); | ||
359 | let m = lhs.precede(p); | ||
360 | p.bump(); | ||
361 | if p.at(IDENT) { | ||
362 | name_ref(p) | ||
363 | } else if p.at(INT_NUMBER) { | ||
364 | p.bump() | ||
365 | } else { | ||
366 | p.error("expected field name or number") | ||
367 | } | ||
368 | m.complete(p, FIELD_EXPR) | ||
369 | } | ||
370 | |||
371 | // test try_expr | ||
372 | // fn foo() { | ||
373 | // x?; | ||
374 | // } | ||
375 | fn try_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | ||
376 | assert!(p.at(QUESTION)); | ||
377 | let m = lhs.precede(p); | ||
378 | p.bump(); | ||
379 | m.complete(p, TRY_EXPR) | ||
380 | } | ||
381 | |||
382 | // test cast_expr | ||
383 | // fn foo() { | ||
384 | // 82 as i32; | ||
385 | // 81 as i8 + 1; | ||
386 | // 79 as i16 - 1; | ||
387 | // } | ||
388 | fn cast_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | ||
389 | assert!(p.at(AS_KW)); | ||
390 | let m = lhs.precede(p); | ||
391 | p.bump(); | ||
392 | // Use type_no_bounds(), because cast expressions are not | ||
393 | // allowed to have bounds. | ||
394 | types::type_no_bounds(p); | ||
395 | m.complete(p, CAST_EXPR) | ||
396 | } | ||
397 | |||
398 | fn arg_list(p: &mut Parser) { | ||
399 | assert!(p.at(L_PAREN)); | ||
400 | let m = p.start(); | ||
401 | p.bump(); | ||
402 | while !p.at(R_PAREN) && !p.at(EOF) { | ||
403 | if !p.at_ts(EXPR_FIRST) { | ||
404 | p.error("expected expression"); | ||
405 | break; | ||
406 | } | ||
407 | expr(p); | ||
408 | if !p.at(R_PAREN) && !p.expect(COMMA) { | ||
409 | break; | ||
410 | } | ||
411 | } | ||
412 | p.eat(R_PAREN); | ||
413 | m.complete(p, ARG_LIST); | ||
414 | } | ||
415 | |||
416 | // test path_expr | ||
417 | // fn foo() { | ||
418 | // let _ = a; | ||
419 | // let _ = a::b; | ||
420 | // let _ = ::a::<b>; | ||
421 | // let _ = format!(); | ||
422 | // } | ||
423 | fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, BlockLike) { | ||
424 | assert!(paths::is_path_start(p) || p.at(L_ANGLE)); | ||
425 | let m = p.start(); | ||
426 | paths::expr_path(p); | ||
427 | match p.current() { | ||
428 | L_CURLY if !r.forbid_structs => { | ||
429 | named_field_list(p); | ||
430 | (m.complete(p, STRUCT_LIT), BlockLike::NotBlock) | ||
431 | } | ||
432 | EXCL => { | ||
433 | let block_like = items::macro_call_after_excl(p); | ||
434 | return (m.complete(p, MACRO_CALL), block_like); | ||
435 | } | ||
436 | _ => (m.complete(p, PATH_EXPR), BlockLike::NotBlock), | ||
437 | } | ||
438 | } | ||
439 | |||
440 | // test struct_lit | ||
441 | // fn foo() { | ||
442 | // S {}; | ||
443 | // S { x, y: 32, }; | ||
444 | // S { x, y: 32, ..Default::default() }; | ||
445 | // } | ||
446 | pub(crate) fn named_field_list(p: &mut Parser) { | ||
447 | assert!(p.at(L_CURLY)); | ||
448 | let m = p.start(); | ||
449 | p.bump(); | ||
450 | while !p.at(EOF) && !p.at(R_CURLY) { | ||
451 | match p.current() { | ||
452 | IDENT => { | ||
453 | let m = p.start(); | ||
454 | name_ref(p); | ||
455 | if p.eat(COLON) { | ||
456 | expr(p); | ||
457 | } | ||
458 | m.complete(p, NAMED_FIELD); | ||
459 | } | ||
460 | DOTDOT => { | ||
461 | p.bump(); | ||
462 | expr(p); | ||
463 | } | ||
464 | L_CURLY => error_block(p, "expected a field"), | ||
465 | _ => p.err_and_bump("expected identifier"), | ||
466 | } | ||
467 | if !p.at(R_CURLY) { | ||
468 | p.expect(COMMA); | ||
469 | } | ||
470 | } | ||
471 | p.expect(R_CURLY); | ||
472 | m.complete(p, NAMED_FIELD_LIST); | ||
473 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/expressions/atom.rs b/crates/ra_syntax/src/parsing/grammar/expressions/atom.rs deleted file mode 100644 index e74305b6a..000000000 --- a/crates/ra_syntax/src/parsing/grammar/expressions/atom.rs +++ /dev/null | |||
@@ -1,475 +0,0 @@ | |||
1 | use super::*; | ||
2 | |||
3 | // test expr_literals | ||
4 | // fn foo() { | ||
5 | // let _ = true; | ||
6 | // let _ = false; | ||
7 | // let _ = 1; | ||
8 | // let _ = 2.0; | ||
9 | // let _ = b'a'; | ||
10 | // let _ = 'b'; | ||
11 | // let _ = "c"; | ||
12 | // let _ = r"d"; | ||
13 | // let _ = b"e"; | ||
14 | // let _ = br"f"; | ||
15 | // } | ||
16 | pub(crate) const LITERAL_FIRST: TokenSet = token_set![ | ||
17 | TRUE_KW, | ||
18 | FALSE_KW, | ||
19 | INT_NUMBER, | ||
20 | FLOAT_NUMBER, | ||
21 | BYTE, | ||
22 | CHAR, | ||
23 | STRING, | ||
24 | RAW_STRING, | ||
25 | BYTE_STRING, | ||
26 | RAW_BYTE_STRING | ||
27 | ]; | ||
28 | |||
29 | pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> { | ||
30 | if !p.at_ts(LITERAL_FIRST) { | ||
31 | return None; | ||
32 | } | ||
33 | let m = p.start(); | ||
34 | p.bump(); | ||
35 | Some(m.complete(p, LITERAL)) | ||
36 | } | ||
37 | |||
38 | // E.g. for after the break in `if break {}`, this should not match | ||
39 | pub(super) const ATOM_EXPR_FIRST: TokenSet = | ||
40 | LITERAL_FIRST.union(paths::PATH_FIRST).union(token_set![ | ||
41 | L_PAREN, | ||
42 | L_CURLY, | ||
43 | L_BRACK, | ||
44 | PIPE, | ||
45 | MOVE_KW, | ||
46 | IF_KW, | ||
47 | WHILE_KW, | ||
48 | MATCH_KW, | ||
49 | UNSAFE_KW, | ||
50 | RETURN_KW, | ||
51 | BREAK_KW, | ||
52 | CONTINUE_KW, | ||
53 | LIFETIME, | ||
54 | ]); | ||
55 | |||
56 | const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW]; | ||
57 | |||
58 | pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { | ||
59 | if let Some(m) = literal(p) { | ||
60 | return Some((m, BlockLike::NotBlock)); | ||
61 | } | ||
62 | if paths::is_path_start(p) || p.at(L_ANGLE) { | ||
63 | return Some(path_expr(p, r)); | ||
64 | } | ||
65 | let la = p.nth(1); | ||
66 | let done = match p.current() { | ||
67 | L_PAREN => tuple_expr(p), | ||
68 | L_BRACK => array_expr(p), | ||
69 | PIPE => lambda_expr(p), | ||
70 | MOVE_KW if la == PIPE => lambda_expr(p), | ||
71 | IF_KW => if_expr(p), | ||
72 | |||
73 | LOOP_KW => loop_expr(p, None), | ||
74 | FOR_KW => for_expr(p, None), | ||
75 | WHILE_KW => while_expr(p, None), | ||
76 | LIFETIME if la == COLON => { | ||
77 | let m = p.start(); | ||
78 | label(p); | ||
79 | match p.current() { | ||
80 | LOOP_KW => loop_expr(p, Some(m)), | ||
81 | FOR_KW => for_expr(p, Some(m)), | ||
82 | WHILE_KW => while_expr(p, Some(m)), | ||
83 | L_CURLY => block_expr(p, Some(m)), | ||
84 | _ => { | ||
85 | // test_err misplaced_label_err | ||
86 | // fn main() { | ||
87 | // 'loop: impl | ||
88 | // } | ||
89 | p.error("expected a loop"); | ||
90 | m.complete(p, ERROR); | ||
91 | return None; | ||
92 | } | ||
93 | } | ||
94 | } | ||
95 | |||
96 | MATCH_KW => match_expr(p), | ||
97 | UNSAFE_KW if la == L_CURLY => { | ||
98 | let m = p.start(); | ||
99 | p.bump(); | ||
100 | block_expr(p, Some(m)) | ||
101 | } | ||
102 | L_CURLY => block_expr(p, None), | ||
103 | RETURN_KW => return_expr(p), | ||
104 | CONTINUE_KW => continue_expr(p), | ||
105 | BREAK_KW => break_expr(p, r), | ||
106 | _ => { | ||
107 | p.err_recover("expected expression", EXPR_RECOVERY_SET); | ||
108 | return None; | ||
109 | } | ||
110 | }; | ||
111 | let blocklike = match done.kind() { | ||
112 | IF_EXPR | WHILE_EXPR | FOR_EXPR | LOOP_EXPR | MATCH_EXPR | BLOCK_EXPR => BlockLike::Block, | ||
113 | _ => BlockLike::NotBlock, | ||
114 | }; | ||
115 | Some((done, blocklike)) | ||
116 | } | ||
117 | |||
118 | // test tuple_expr | ||
119 | // fn foo() { | ||
120 | // (); | ||
121 | // (1); | ||
122 | // (1,); | ||
123 | // } | ||
124 | fn tuple_expr(p: &mut Parser) -> CompletedMarker { | ||
125 | assert!(p.at(L_PAREN)); | ||
126 | let m = p.start(); | ||
127 | p.expect(L_PAREN); | ||
128 | |||
129 | let mut saw_comma = false; | ||
130 | let mut saw_expr = false; | ||
131 | while !p.at(EOF) && !p.at(R_PAREN) { | ||
132 | saw_expr = true; | ||
133 | if !p.at_ts(EXPR_FIRST) { | ||
134 | p.error("expected expression"); | ||
135 | break; | ||
136 | } | ||
137 | expr(p); | ||
138 | if !p.at(R_PAREN) { | ||
139 | saw_comma = true; | ||
140 | p.expect(COMMA); | ||
141 | } | ||
142 | } | ||
143 | p.expect(R_PAREN); | ||
144 | m.complete(p, if saw_expr && !saw_comma { PAREN_EXPR } else { TUPLE_EXPR }) | ||
145 | } | ||
146 | |||
147 | // test array_expr | ||
148 | // fn foo() { | ||
149 | // []; | ||
150 | // [1]; | ||
151 | // [1, 2,]; | ||
152 | // [1; 2]; | ||
153 | // } | ||
154 | fn array_expr(p: &mut Parser) -> CompletedMarker { | ||
155 | assert!(p.at(L_BRACK)); | ||
156 | let m = p.start(); | ||
157 | p.bump(); | ||
158 | if p.eat(R_BRACK) { | ||
159 | return m.complete(p, ARRAY_EXPR); | ||
160 | } | ||
161 | expr(p); | ||
162 | if p.eat(SEMI) { | ||
163 | expr(p); | ||
164 | p.expect(R_BRACK); | ||
165 | return m.complete(p, ARRAY_EXPR); | ||
166 | } | ||
167 | while !p.at(EOF) && !p.at(R_BRACK) { | ||
168 | p.expect(COMMA); | ||
169 | if p.at(R_BRACK) { | ||
170 | break; | ||
171 | } | ||
172 | if !p.at_ts(EXPR_FIRST) { | ||
173 | p.error("expected expression"); | ||
174 | break; | ||
175 | } | ||
176 | expr(p); | ||
177 | } | ||
178 | p.expect(R_BRACK); | ||
179 | m.complete(p, ARRAY_EXPR) | ||
180 | } | ||
181 | |||
182 | // test lambda_expr | ||
183 | // fn foo() { | ||
184 | // || (); | ||
185 | // || -> i32 { 92 }; | ||
186 | // |x| x; | ||
187 | // move |x: i32,| x; | ||
188 | // } | ||
189 | fn lambda_expr(p: &mut Parser) -> CompletedMarker { | ||
190 | assert!(p.at(PIPE) || (p.at(MOVE_KW) && p.nth(1) == PIPE)); | ||
191 | let m = p.start(); | ||
192 | p.eat(MOVE_KW); | ||
193 | params::param_list_opt_types(p); | ||
194 | if opt_fn_ret_type(p) { | ||
195 | if !p.at(L_CURLY) { | ||
196 | p.error("expected `{`"); | ||
197 | } | ||
198 | } | ||
199 | expr(p); | ||
200 | m.complete(p, LAMBDA_EXPR) | ||
201 | } | ||
202 | |||
203 | // test if_expr | ||
204 | // fn foo() { | ||
205 | // if true {}; | ||
206 | // if true {} else {}; | ||
207 | // if true {} else if false {} else {}; | ||
208 | // if S {}; | ||
209 | // } | ||
210 | fn if_expr(p: &mut Parser) -> CompletedMarker { | ||
211 | assert!(p.at(IF_KW)); | ||
212 | let m = p.start(); | ||
213 | p.bump(); | ||
214 | cond(p); | ||
215 | block(p); | ||
216 | if p.at(ELSE_KW) { | ||
217 | p.bump(); | ||
218 | if p.at(IF_KW) { | ||
219 | if_expr(p); | ||
220 | } else { | ||
221 | block(p); | ||
222 | } | ||
223 | } | ||
224 | m.complete(p, IF_EXPR) | ||
225 | } | ||
226 | |||
227 | // test label | ||
228 | // fn foo() { | ||
229 | // 'a: loop {} | ||
230 | // 'b: while true {} | ||
231 | // 'c: for x in () {} | ||
232 | // } | ||
233 | fn label(p: &mut Parser) { | ||
234 | assert!(p.at(LIFETIME) && p.nth(1) == COLON); | ||
235 | let m = p.start(); | ||
236 | p.bump(); | ||
237 | p.bump(); | ||
238 | m.complete(p, LABEL); | ||
239 | } | ||
240 | |||
241 | // test loop_expr | ||
242 | // fn foo() { | ||
243 | // loop {}; | ||
244 | // } | ||
245 | fn loop_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | ||
246 | assert!(p.at(LOOP_KW)); | ||
247 | let m = m.unwrap_or_else(|| p.start()); | ||
248 | p.bump(); | ||
249 | block(p); | ||
250 | m.complete(p, LOOP_EXPR) | ||
251 | } | ||
252 | |||
253 | // test while_expr | ||
254 | // fn foo() { | ||
255 | // while true {}; | ||
256 | // while let Some(x) = it.next() {}; | ||
257 | // } | ||
258 | fn while_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | ||
259 | assert!(p.at(WHILE_KW)); | ||
260 | let m = m.unwrap_or_else(|| p.start()); | ||
261 | p.bump(); | ||
262 | cond(p); | ||
263 | block(p); | ||
264 | m.complete(p, WHILE_EXPR) | ||
265 | } | ||
266 | |||
267 | // test for_expr | ||
268 | // fn foo() { | ||
269 | // for x in [] {}; | ||
270 | // } | ||
271 | fn for_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | ||
272 | assert!(p.at(FOR_KW)); | ||
273 | let m = m.unwrap_or_else(|| p.start()); | ||
274 | p.bump(); | ||
275 | patterns::pattern(p); | ||
276 | p.expect(IN_KW); | ||
277 | expr_no_struct(p); | ||
278 | block(p); | ||
279 | m.complete(p, FOR_EXPR) | ||
280 | } | ||
281 | |||
282 | // test cond | ||
283 | // fn foo() { if let Some(_) = None {} } | ||
284 | fn cond(p: &mut Parser) { | ||
285 | let m = p.start(); | ||
286 | if p.eat(LET_KW) { | ||
287 | patterns::pattern(p); | ||
288 | p.expect(EQ); | ||
289 | } | ||
290 | expr_no_struct(p); | ||
291 | m.complete(p, CONDITION); | ||
292 | } | ||
293 | |||
294 | // test match_expr | ||
295 | // fn foo() { | ||
296 | // match () { }; | ||
297 | // match S {}; | ||
298 | // } | ||
299 | fn match_expr(p: &mut Parser) -> CompletedMarker { | ||
300 | assert!(p.at(MATCH_KW)); | ||
301 | let m = p.start(); | ||
302 | p.bump(); | ||
303 | expr_no_struct(p); | ||
304 | if p.at(L_CURLY) { | ||
305 | match_arm_list(p); | ||
306 | } else { | ||
307 | p.error("expected `{`") | ||
308 | } | ||
309 | m.complete(p, MATCH_EXPR) | ||
310 | } | ||
311 | |||
312 | pub(crate) fn match_arm_list(p: &mut Parser) { | ||
313 | assert!(p.at(L_CURLY)); | ||
314 | let m = p.start(); | ||
315 | p.eat(L_CURLY); | ||
316 | |||
317 | // test match_arms_inner_attribute | ||
318 | // fn foo() { | ||
319 | // match () { | ||
320 | // #![doc("Inner attribute")] | ||
321 | // #![doc("Can be")] | ||
322 | // #![doc("Stacked")] | ||
323 | // _ => (), | ||
324 | // } | ||
325 | // } | ||
326 | attributes::inner_attributes(p); | ||
327 | |||
328 | while !p.at(EOF) && !p.at(R_CURLY) { | ||
329 | if p.at(L_CURLY) { | ||
330 | error_block(p, "expected match arm"); | ||
331 | continue; | ||
332 | } | ||
333 | |||
334 | // test match_arms_outer_attributes | ||
335 | // fn foo() { | ||
336 | // match () { | ||
337 | // #[cfg(feature = "some")] | ||
338 | // _ => (), | ||
339 | // #[cfg(feature = "other")] | ||
340 | // _ => (), | ||
341 | // #[cfg(feature = "many")] | ||
342 | // #[cfg(feature = "attributes")] | ||
343 | // #[cfg(feature = "before")] | ||
344 | // _ => (), | ||
345 | // } | ||
346 | // } | ||
347 | attributes::outer_attributes(p); | ||
348 | |||
349 | // test match_arms_commas | ||
350 | // fn foo() { | ||
351 | // match () { | ||
352 | // _ => (), | ||
353 | // _ => {} | ||
354 | // _ => () | ||
355 | // } | ||
356 | // } | ||
357 | if match_arm(p).is_block() { | ||
358 | p.eat(COMMA); | ||
359 | } else if !p.at(R_CURLY) { | ||
360 | p.expect(COMMA); | ||
361 | } | ||
362 | } | ||
363 | p.expect(R_CURLY); | ||
364 | m.complete(p, MATCH_ARM_LIST); | ||
365 | } | ||
366 | |||
367 | // test match_arm | ||
368 | // fn foo() { | ||
369 | // match () { | ||
370 | // _ => (), | ||
371 | // _ if Test > Test{field: 0} => (), | ||
372 | // X | Y if Z => (), | ||
373 | // | X | Y if Z => (), | ||
374 | // | X => (), | ||
375 | // }; | ||
376 | // } | ||
377 | fn match_arm(p: &mut Parser) -> BlockLike { | ||
378 | let m = p.start(); | ||
379 | p.eat(PIPE); | ||
380 | patterns::pattern_r(p, TokenSet::empty()); | ||
381 | while p.eat(PIPE) { | ||
382 | patterns::pattern(p); | ||
383 | } | ||
384 | if p.at(IF_KW) { | ||
385 | match_guard(p); | ||
386 | } | ||
387 | p.expect(FAT_ARROW); | ||
388 | let ret = expr_stmt(p); | ||
389 | m.complete(p, MATCH_ARM); | ||
390 | ret | ||
391 | } | ||
392 | |||
393 | // test match_guard | ||
394 | // fn foo() { | ||
395 | // match () { | ||
396 | // _ if foo => (), | ||
397 | // } | ||
398 | // } | ||
399 | fn match_guard(p: &mut Parser) -> CompletedMarker { | ||
400 | assert!(p.at(IF_KW)); | ||
401 | let m = p.start(); | ||
402 | p.bump(); | ||
403 | expr(p); | ||
404 | m.complete(p, MATCH_GUARD) | ||
405 | } | ||
406 | |||
407 | // test block_expr | ||
408 | // fn foo() { | ||
409 | // {}; | ||
410 | // unsafe {}; | ||
411 | // 'label: {}; | ||
412 | // } | ||
413 | fn block_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | ||
414 | assert!(p.at(L_CURLY)); | ||
415 | let m = m.unwrap_or_else(|| p.start()); | ||
416 | block(p); | ||
417 | m.complete(p, BLOCK_EXPR) | ||
418 | } | ||
419 | |||
420 | // test return_expr | ||
421 | // fn foo() { | ||
422 | // return; | ||
423 | // return 92; | ||
424 | // } | ||
425 | fn return_expr(p: &mut Parser) -> CompletedMarker { | ||
426 | assert!(p.at(RETURN_KW)); | ||
427 | let m = p.start(); | ||
428 | p.bump(); | ||
429 | if p.at_ts(EXPR_FIRST) { | ||
430 | expr(p); | ||
431 | } | ||
432 | m.complete(p, RETURN_EXPR) | ||
433 | } | ||
434 | |||
435 | // test continue_expr | ||
436 | // fn foo() { | ||
437 | // loop { | ||
438 | // continue; | ||
439 | // continue 'l; | ||
440 | // } | ||
441 | // } | ||
442 | fn continue_expr(p: &mut Parser) -> CompletedMarker { | ||
443 | assert!(p.at(CONTINUE_KW)); | ||
444 | let m = p.start(); | ||
445 | p.bump(); | ||
446 | p.eat(LIFETIME); | ||
447 | m.complete(p, CONTINUE_EXPR) | ||
448 | } | ||
449 | |||
450 | // test break_expr | ||
451 | // fn foo() { | ||
452 | // loop { | ||
453 | // break; | ||
454 | // break 'l; | ||
455 | // break 92; | ||
456 | // break 'l 92; | ||
457 | // } | ||
458 | // } | ||
459 | fn break_expr(p: &mut Parser, r: Restrictions) -> CompletedMarker { | ||
460 | assert!(p.at(BREAK_KW)); | ||
461 | let m = p.start(); | ||
462 | p.bump(); | ||
463 | p.eat(LIFETIME); | ||
464 | // test break_ambiguity | ||
465 | // fn foo(){ | ||
466 | // if break {} | ||
467 | // while break {} | ||
468 | // for i in break {} | ||
469 | // match break {} | ||
470 | // } | ||
471 | if p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(L_CURLY)) { | ||
472 | expr(p); | ||
473 | } | ||
474 | m.complete(p, BREAK_EXPR) | ||
475 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/items.rs b/crates/ra_syntax/src/parsing/grammar/items.rs deleted file mode 100644 index 4b962c1f3..000000000 --- a/crates/ra_syntax/src/parsing/grammar/items.rs +++ /dev/null | |||
@@ -1,392 +0,0 @@ | |||
1 | mod consts; | ||
2 | mod nominal; | ||
3 | mod traits; | ||
4 | mod use_item; | ||
5 | |||
6 | pub(crate) use self::{ | ||
7 | expressions::{match_arm_list, named_field_list}, | ||
8 | nominal::{enum_variant_list, named_field_def_list}, | ||
9 | traits::{impl_item_list, trait_item_list}, | ||
10 | use_item::use_tree_list, | ||
11 | }; | ||
12 | use super::*; | ||
13 | |||
14 | // test mod_contents | ||
15 | // fn foo() {} | ||
16 | // macro_rules! foo {} | ||
17 | // foo::bar!(); | ||
18 | // super::baz! {} | ||
19 | // struct S; | ||
20 | pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) { | ||
21 | attributes::inner_attributes(p); | ||
22 | while !p.at(EOF) && !(stop_on_r_curly && p.at(R_CURLY)) { | ||
23 | item_or_macro(p, stop_on_r_curly, ItemFlavor::Mod) | ||
24 | } | ||
25 | } | ||
26 | |||
27 | pub(super) enum ItemFlavor { | ||
28 | Mod, | ||
29 | Trait, | ||
30 | } | ||
31 | |||
32 | pub(super) const ITEM_RECOVERY_SET: TokenSet = token_set![ | ||
33 | FN_KW, STRUCT_KW, ENUM_KW, IMPL_KW, TRAIT_KW, CONST_KW, STATIC_KW, LET_KW, MOD_KW, PUB_KW, | ||
34 | CRATE_KW | ||
35 | ]; | ||
36 | |||
37 | pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool, flavor: ItemFlavor) { | ||
38 | let m = p.start(); | ||
39 | attributes::outer_attributes(p); | ||
40 | match maybe_item(p, flavor) { | ||
41 | MaybeItem::Item(kind) => { | ||
42 | m.complete(p, kind); | ||
43 | } | ||
44 | MaybeItem::None => { | ||
45 | if paths::is_path_start(p) { | ||
46 | match macro_call(p) { | ||
47 | BlockLike::Block => (), | ||
48 | BlockLike::NotBlock => { | ||
49 | p.expect(SEMI); | ||
50 | } | ||
51 | } | ||
52 | m.complete(p, MACRO_CALL); | ||
53 | } else { | ||
54 | m.abandon(p); | ||
55 | if p.at(L_CURLY) { | ||
56 | error_block(p, "expected an item"); | ||
57 | } else if p.at(R_CURLY) && !stop_on_r_curly { | ||
58 | let e = p.start(); | ||
59 | p.error("unmatched `}`"); | ||
60 | p.bump(); | ||
61 | e.complete(p, ERROR); | ||
62 | } else if !p.at(EOF) && !p.at(R_CURLY) { | ||
63 | p.err_and_bump("expected an item"); | ||
64 | } else { | ||
65 | p.error("expected an item"); | ||
66 | } | ||
67 | } | ||
68 | } | ||
69 | MaybeItem::Modifiers => { | ||
70 | p.error("expected fn, trait or impl"); | ||
71 | m.complete(p, ERROR); | ||
72 | } | ||
73 | } | ||
74 | } | ||
75 | |||
76 | pub(super) enum MaybeItem { | ||
77 | None, | ||
78 | Item(SyntaxKind), | ||
79 | Modifiers, | ||
80 | } | ||
81 | |||
82 | pub(super) fn maybe_item(p: &mut Parser, flavor: ItemFlavor) -> MaybeItem { | ||
83 | opt_visibility(p); | ||
84 | if let Some(kind) = items_without_modifiers(p) { | ||
85 | return MaybeItem::Item(kind); | ||
86 | } | ||
87 | |||
88 | let mut has_mods = false; | ||
89 | // modifiers | ||
90 | has_mods |= p.eat(CONST_KW); | ||
91 | |||
92 | // test_err unsafe_block_in_mod | ||
93 | // fn foo(){} unsafe { } fn bar(){} | ||
94 | if p.at(UNSAFE_KW) && p.nth(1) != L_CURLY { | ||
95 | p.eat(UNSAFE_KW); | ||
96 | has_mods = true; | ||
97 | } | ||
98 | if p.at(EXTERN_KW) { | ||
99 | has_mods = true; | ||
100 | abi(p); | ||
101 | } | ||
102 | if p.at(IDENT) && p.at_contextual_kw("auto") && p.nth(1) == TRAIT_KW { | ||
103 | p.bump_remap(AUTO_KW); | ||
104 | has_mods = true; | ||
105 | } | ||
106 | if p.at(IDENT) && p.at_contextual_kw("default") && p.nth(1) == IMPL_KW { | ||
107 | p.bump_remap(DEFAULT_KW); | ||
108 | has_mods = true; | ||
109 | } | ||
110 | |||
111 | // items | ||
112 | let kind = match p.current() { | ||
113 | // test extern_fn | ||
114 | // extern fn foo() {} | ||
115 | |||
116 | // test const_fn | ||
117 | // const fn foo() {} | ||
118 | |||
119 | // test const_unsafe_fn | ||
120 | // const unsafe fn foo() {} | ||
121 | |||
122 | // test unsafe_extern_fn | ||
123 | // unsafe extern "C" fn foo() {} | ||
124 | |||
125 | // test unsafe_fn | ||
126 | // unsafe fn foo() {} | ||
127 | FN_KW => { | ||
128 | fn_def(p, flavor); | ||
129 | FN_DEF | ||
130 | } | ||
131 | |||
132 | // test unsafe_trait | ||
133 | // unsafe trait T {} | ||
134 | |||
135 | // test auto_trait | ||
136 | // auto trait T {} | ||
137 | |||
138 | // test unsafe_auto_trait | ||
139 | // unsafe auto trait T {} | ||
140 | TRAIT_KW => { | ||
141 | traits::trait_def(p); | ||
142 | TRAIT_DEF | ||
143 | } | ||
144 | |||
145 | // test unsafe_impl | ||
146 | // unsafe impl Foo {} | ||
147 | |||
148 | // test default_impl | ||
149 | // default impl Foo {} | ||
150 | |||
151 | // test unsafe_default_impl | ||
152 | // unsafe default impl Foo {} | ||
153 | IMPL_KW => { | ||
154 | traits::impl_block(p); | ||
155 | IMPL_BLOCK | ||
156 | } | ||
157 | _ => { | ||
158 | return if has_mods { MaybeItem::Modifiers } else { MaybeItem::None }; | ||
159 | } | ||
160 | }; | ||
161 | |||
162 | MaybeItem::Item(kind) | ||
163 | } | ||
164 | |||
165 | fn items_without_modifiers(p: &mut Parser) -> Option<SyntaxKind> { | ||
166 | let la = p.nth(1); | ||
167 | let kind = match p.current() { | ||
168 | // test extern_crate | ||
169 | // extern crate foo; | ||
170 | EXTERN_KW if la == CRATE_KW => { | ||
171 | extern_crate_item(p); | ||
172 | EXTERN_CRATE_ITEM | ||
173 | } | ||
174 | TYPE_KW => { | ||
175 | type_def(p); | ||
176 | TYPE_DEF | ||
177 | } | ||
178 | MOD_KW => { | ||
179 | mod_item(p); | ||
180 | MODULE | ||
181 | } | ||
182 | STRUCT_KW => { | ||
183 | // test struct_items | ||
184 | // struct Foo; | ||
185 | // struct Foo {} | ||
186 | // struct Foo(); | ||
187 | // struct Foo(String, usize); | ||
188 | // struct Foo { | ||
189 | // a: i32, | ||
190 | // b: f32, | ||
191 | // } | ||
192 | nominal::struct_def(p, STRUCT_KW); | ||
193 | if p.at(SEMI) { | ||
194 | p.err_and_bump( | ||
195 | "expected item, found `;`\n\ | ||
196 | consider removing this semicolon", | ||
197 | ); | ||
198 | } | ||
199 | STRUCT_DEF | ||
200 | } | ||
201 | IDENT if p.at_contextual_kw("union") && p.nth(1) == IDENT => { | ||
202 | // test union_items | ||
203 | // union Foo {} | ||
204 | // union Foo { | ||
205 | // a: i32, | ||
206 | // b: f32, | ||
207 | // } | ||
208 | nominal::struct_def(p, UNION_KW); | ||
209 | STRUCT_DEF | ||
210 | } | ||
211 | ENUM_KW => { | ||
212 | nominal::enum_def(p); | ||
213 | ENUM_DEF | ||
214 | } | ||
215 | USE_KW => { | ||
216 | use_item::use_item(p); | ||
217 | USE_ITEM | ||
218 | } | ||
219 | CONST_KW if (la == IDENT || la == MUT_KW) => { | ||
220 | consts::const_def(p); | ||
221 | CONST_DEF | ||
222 | } | ||
223 | STATIC_KW => { | ||
224 | consts::static_def(p); | ||
225 | STATIC_DEF | ||
226 | } | ||
227 | // test extern_block | ||
228 | // extern {} | ||
229 | EXTERN_KW | ||
230 | if la == L_CURLY || ((la == STRING || la == RAW_STRING) && p.nth(2) == L_CURLY) => | ||
231 | { | ||
232 | abi(p); | ||
233 | extern_item_list(p); | ||
234 | EXTERN_BLOCK | ||
235 | } | ||
236 | _ => return None, | ||
237 | }; | ||
238 | Some(kind) | ||
239 | } | ||
240 | |||
241 | fn extern_crate_item(p: &mut Parser) { | ||
242 | assert!(p.at(EXTERN_KW)); | ||
243 | p.bump(); | ||
244 | assert!(p.at(CRATE_KW)); | ||
245 | p.bump(); | ||
246 | name_ref(p); | ||
247 | opt_alias(p); | ||
248 | p.expect(SEMI); | ||
249 | } | ||
250 | |||
251 | pub(crate) fn extern_item_list(p: &mut Parser) { | ||
252 | assert!(p.at(L_CURLY)); | ||
253 | let m = p.start(); | ||
254 | p.bump(); | ||
255 | mod_contents(p, true); | ||
256 | p.expect(R_CURLY); | ||
257 | m.complete(p, EXTERN_ITEM_LIST); | ||
258 | } | ||
259 | |||
260 | fn fn_def(p: &mut Parser, flavor: ItemFlavor) { | ||
261 | assert!(p.at(FN_KW)); | ||
262 | p.bump(); | ||
263 | |||
264 | name_r(p, ITEM_RECOVERY_SET); | ||
265 | // test function_type_params | ||
266 | // fn foo<T: Clone + Copy>(){} | ||
267 | type_params::opt_type_param_list(p); | ||
268 | |||
269 | if p.at(L_PAREN) { | ||
270 | match flavor { | ||
271 | ItemFlavor::Mod => params::param_list(p), | ||
272 | ItemFlavor::Trait => params::param_list_opt_patterns(p), | ||
273 | } | ||
274 | } else { | ||
275 | p.error("expected function arguments"); | ||
276 | } | ||
277 | // test function_ret_type | ||
278 | // fn foo() {} | ||
279 | // fn bar() -> () {} | ||
280 | opt_fn_ret_type(p); | ||
281 | |||
282 | // test function_where_clause | ||
283 | // fn foo<T>() where T: Copy {} | ||
284 | type_params::opt_where_clause(p); | ||
285 | |||
286 | // test fn_decl | ||
287 | // trait T { fn foo(); } | ||
288 | if p.at(SEMI) { | ||
289 | p.bump(); | ||
290 | } else { | ||
291 | expressions::block(p) | ||
292 | } | ||
293 | } | ||
294 | |||
295 | // test type_item | ||
296 | // type Foo = Bar; | ||
297 | fn type_def(p: &mut Parser) { | ||
298 | assert!(p.at(TYPE_KW)); | ||
299 | p.bump(); | ||
300 | |||
301 | name(p); | ||
302 | |||
303 | // test type_item_type_params | ||
304 | // type Result<T> = (); | ||
305 | type_params::opt_type_param_list(p); | ||
306 | |||
307 | if p.at(COLON) { | ||
308 | type_params::bounds(p); | ||
309 | } | ||
310 | |||
311 | // test type_item_where_clause | ||
312 | // type Foo where Foo: Copy = (); | ||
313 | type_params::opt_where_clause(p); | ||
314 | |||
315 | if p.eat(EQ) { | ||
316 | types::type_(p); | ||
317 | } | ||
318 | p.expect(SEMI); | ||
319 | } | ||
320 | |||
321 | pub(crate) fn mod_item(p: &mut Parser) { | ||
322 | assert!(p.at(MOD_KW)); | ||
323 | p.bump(); | ||
324 | |||
325 | name(p); | ||
326 | if p.at(L_CURLY) { | ||
327 | mod_item_list(p); | ||
328 | } else if !p.eat(SEMI) { | ||
329 | p.error("expected `;` or `{`"); | ||
330 | } | ||
331 | } | ||
332 | |||
333 | pub(crate) fn mod_item_list(p: &mut Parser) { | ||
334 | assert!(p.at(L_CURLY)); | ||
335 | let m = p.start(); | ||
336 | p.bump(); | ||
337 | mod_contents(p, true); | ||
338 | p.expect(R_CURLY); | ||
339 | m.complete(p, ITEM_LIST); | ||
340 | } | ||
341 | |||
342 | fn macro_call(p: &mut Parser) -> BlockLike { | ||
343 | assert!(paths::is_path_start(p)); | ||
344 | paths::use_path(p); | ||
345 | macro_call_after_excl(p) | ||
346 | } | ||
347 | |||
348 | pub(super) fn macro_call_after_excl(p: &mut Parser) -> BlockLike { | ||
349 | p.expect(EXCL); | ||
350 | if p.at(IDENT) { | ||
351 | name(p); | ||
352 | } | ||
353 | match p.current() { | ||
354 | L_CURLY => { | ||
355 | token_tree(p); | ||
356 | BlockLike::Block | ||
357 | } | ||
358 | L_PAREN | L_BRACK => { | ||
359 | token_tree(p); | ||
360 | BlockLike::NotBlock | ||
361 | } | ||
362 | _ => { | ||
363 | p.error("expected `{`, `[`, `(`"); | ||
364 | BlockLike::NotBlock | ||
365 | } | ||
366 | } | ||
367 | } | ||
368 | |||
369 | pub(crate) fn token_tree(p: &mut Parser) { | ||
370 | let closing_paren_kind = match p.current() { | ||
371 | L_CURLY => R_CURLY, | ||
372 | L_PAREN => R_PAREN, | ||
373 | L_BRACK => R_BRACK, | ||
374 | _ => unreachable!(), | ||
375 | }; | ||
376 | let m = p.start(); | ||
377 | p.bump(); | ||
378 | while !p.at(EOF) && !p.at(closing_paren_kind) { | ||
379 | match p.current() { | ||
380 | L_CURLY | L_PAREN | L_BRACK => token_tree(p), | ||
381 | R_CURLY => { | ||
382 | p.error("unmatched `}`"); | ||
383 | m.complete(p, TOKEN_TREE); | ||
384 | return; | ||
385 | } | ||
386 | R_PAREN | R_BRACK => p.err_and_bump("unmatched brace"), | ||
387 | _ => p.bump(), | ||
388 | } | ||
389 | } | ||
390 | p.expect(closing_paren_kind); | ||
391 | m.complete(p, TOKEN_TREE); | ||
392 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/items/consts.rs b/crates/ra_syntax/src/parsing/grammar/items/consts.rs deleted file mode 100644 index 5a5852f83..000000000 --- a/crates/ra_syntax/src/parsing/grammar/items/consts.rs +++ /dev/null | |||
@@ -1,21 +0,0 @@ | |||
1 | use super::*; | ||
2 | |||
3 | pub(super) fn static_def(p: &mut Parser) { | ||
4 | const_or_static(p, STATIC_KW) | ||
5 | } | ||
6 | |||
7 | pub(super) fn const_def(p: &mut Parser) { | ||
8 | const_or_static(p, CONST_KW) | ||
9 | } | ||
10 | |||
11 | fn const_or_static(p: &mut Parser, kw: SyntaxKind) { | ||
12 | assert!(p.at(kw)); | ||
13 | p.bump(); | ||
14 | p.eat(MUT_KW); // TODO: validator to forbid const mut | ||
15 | name(p); | ||
16 | types::ascription(p); | ||
17 | if p.eat(EQ) { | ||
18 | expressions::expr(p); | ||
19 | } | ||
20 | p.expect(SEMI); | ||
21 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/items/nominal.rs b/crates/ra_syntax/src/parsing/grammar/items/nominal.rs deleted file mode 100644 index ff9b38f9c..000000000 --- a/crates/ra_syntax/src/parsing/grammar/items/nominal.rs +++ /dev/null | |||
@@ -1,168 +0,0 @@ | |||
1 | use super::*; | ||
2 | |||
3 | pub(super) fn struct_def(p: &mut Parser, kind: SyntaxKind) { | ||
4 | assert!(p.at(STRUCT_KW) || p.at_contextual_kw("union")); | ||
5 | p.bump_remap(kind); | ||
6 | |||
7 | name_r(p, ITEM_RECOVERY_SET); | ||
8 | type_params::opt_type_param_list(p); | ||
9 | match p.current() { | ||
10 | WHERE_KW => { | ||
11 | type_params::opt_where_clause(p); | ||
12 | match p.current() { | ||
13 | SEMI => { | ||
14 | p.bump(); | ||
15 | return; | ||
16 | } | ||
17 | L_CURLY => named_field_def_list(p), | ||
18 | _ => { | ||
19 | //TODO: special case `(` error message | ||
20 | p.error("expected `;` or `{`"); | ||
21 | return; | ||
22 | } | ||
23 | } | ||
24 | } | ||
25 | SEMI if kind == STRUCT_KW => { | ||
26 | p.bump(); | ||
27 | return; | ||
28 | } | ||
29 | L_CURLY => named_field_def_list(p), | ||
30 | L_PAREN if kind == STRUCT_KW => { | ||
31 | pos_field_def_list(p); | ||
32 | // test tuple_struct_where | ||
33 | // struct Test<T>(T) where T: Clone; | ||
34 | // struct Test<T>(T); | ||
35 | type_params::opt_where_clause(p); | ||
36 | p.expect(SEMI); | ||
37 | } | ||
38 | _ if kind == STRUCT_KW => { | ||
39 | p.error("expected `;`, `{`, or `(`"); | ||
40 | return; | ||
41 | } | ||
42 | _ => { | ||
43 | p.error("expected `{`"); | ||
44 | return; | ||
45 | } | ||
46 | } | ||
47 | } | ||
48 | |||
49 | pub(super) fn enum_def(p: &mut Parser) { | ||
50 | assert!(p.at(ENUM_KW)); | ||
51 | p.bump(); | ||
52 | name_r(p, ITEM_RECOVERY_SET); | ||
53 | type_params::opt_type_param_list(p); | ||
54 | type_params::opt_where_clause(p); | ||
55 | if p.at(L_CURLY) { | ||
56 | enum_variant_list(p); | ||
57 | } else { | ||
58 | p.error("expected `{`") | ||
59 | } | ||
60 | } | ||
61 | |||
62 | pub(crate) fn enum_variant_list(p: &mut Parser) { | ||
63 | assert!(p.at(L_CURLY)); | ||
64 | let m = p.start(); | ||
65 | p.bump(); | ||
66 | while !p.at(EOF) && !p.at(R_CURLY) { | ||
67 | if p.at(L_CURLY) { | ||
68 | error_block(p, "expected enum variant"); | ||
69 | continue; | ||
70 | } | ||
71 | let var = p.start(); | ||
72 | attributes::outer_attributes(p); | ||
73 | if p.at(IDENT) { | ||
74 | name(p); | ||
75 | match p.current() { | ||
76 | L_CURLY => named_field_def_list(p), | ||
77 | L_PAREN => pos_field_def_list(p), | ||
78 | EQ => { | ||
79 | p.bump(); | ||
80 | expressions::expr(p); | ||
81 | } | ||
82 | _ => (), | ||
83 | } | ||
84 | var.complete(p, ENUM_VARIANT); | ||
85 | } else { | ||
86 | var.abandon(p); | ||
87 | p.err_and_bump("expected enum variant"); | ||
88 | } | ||
89 | if !p.at(R_CURLY) { | ||
90 | p.expect(COMMA); | ||
91 | } | ||
92 | } | ||
93 | p.expect(R_CURLY); | ||
94 | m.complete(p, ENUM_VARIANT_LIST); | ||
95 | } | ||
96 | |||
97 | pub(crate) fn named_field_def_list(p: &mut Parser) { | ||
98 | assert!(p.at(L_CURLY)); | ||
99 | let m = p.start(); | ||
100 | p.bump(); | ||
101 | while !p.at(R_CURLY) && !p.at(EOF) { | ||
102 | if p.at(L_CURLY) { | ||
103 | error_block(p, "expected field"); | ||
104 | continue; | ||
105 | } | ||
106 | named_field_def(p); | ||
107 | if !p.at(R_CURLY) { | ||
108 | p.expect(COMMA); | ||
109 | } | ||
110 | } | ||
111 | p.expect(R_CURLY); | ||
112 | m.complete(p, NAMED_FIELD_DEF_LIST); | ||
113 | |||
114 | fn named_field_def(p: &mut Parser) { | ||
115 | let m = p.start(); | ||
116 | // test field_attrs | ||
117 | // struct S { | ||
118 | // #[serde(with = "url_serde")] | ||
119 | // pub uri: Uri, | ||
120 | // } | ||
121 | attributes::outer_attributes(p); | ||
122 | opt_visibility(p); | ||
123 | if p.at(IDENT) { | ||
124 | name(p); | ||
125 | p.expect(COLON); | ||
126 | types::type_(p); | ||
127 | m.complete(p, NAMED_FIELD_DEF); | ||
128 | } else { | ||
129 | m.abandon(p); | ||
130 | p.err_and_bump("expected field declaration"); | ||
131 | } | ||
132 | } | ||
133 | } | ||
134 | |||
135 | fn pos_field_def_list(p: &mut Parser) { | ||
136 | assert!(p.at(L_PAREN)); | ||
137 | let m = p.start(); | ||
138 | if !p.expect(L_PAREN) { | ||
139 | return; | ||
140 | } | ||
141 | while !p.at(R_PAREN) && !p.at(EOF) { | ||
142 | let m = p.start(); | ||
143 | // test pos_field_attrs | ||
144 | // struct S ( | ||
145 | // #[serde(with = "url_serde")] | ||
146 | // pub Uri, | ||
147 | // ); | ||
148 | // | ||
149 | // enum S { | ||
150 | // Uri(#[serde(with = "url_serde")] Uri), | ||
151 | // } | ||
152 | attributes::outer_attributes(p); | ||
153 | opt_visibility(p); | ||
154 | if !p.at_ts(types::TYPE_FIRST) { | ||
155 | p.error("expected a type"); | ||
156 | m.complete(p, ERROR); | ||
157 | break; | ||
158 | } | ||
159 | types::type_(p); | ||
160 | m.complete(p, POS_FIELD_DEF); | ||
161 | |||
162 | if !p.at(R_PAREN) { | ||
163 | p.expect(COMMA); | ||
164 | } | ||
165 | } | ||
166 | p.expect(R_PAREN); | ||
167 | m.complete(p, POS_FIELD_DEF_LIST); | ||
168 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/items/traits.rs b/crates/ra_syntax/src/parsing/grammar/items/traits.rs deleted file mode 100644 index d5a8ccd98..000000000 --- a/crates/ra_syntax/src/parsing/grammar/items/traits.rs +++ /dev/null | |||
@@ -1,137 +0,0 @@ | |||
1 | use super::*; | ||
2 | |||
3 | // test trait_item | ||
4 | // trait T<U>: Hash + Clone where U: Copy {} | ||
5 | pub(super) fn trait_def(p: &mut Parser) { | ||
6 | assert!(p.at(TRAIT_KW)); | ||
7 | p.bump(); | ||
8 | name_r(p, ITEM_RECOVERY_SET); | ||
9 | type_params::opt_type_param_list(p); | ||
10 | if p.at(COLON) { | ||
11 | type_params::bounds(p); | ||
12 | } | ||
13 | type_params::opt_where_clause(p); | ||
14 | if p.at(L_CURLY) { | ||
15 | trait_item_list(p); | ||
16 | } else { | ||
17 | p.error("expected `{`"); | ||
18 | } | ||
19 | } | ||
20 | |||
21 | // test trait_item_list | ||
22 | // impl F { | ||
23 | // type A: Clone; | ||
24 | // const B: i32; | ||
25 | // fn foo() {} | ||
26 | // fn bar(&self); | ||
27 | // } | ||
28 | pub(crate) fn trait_item_list(p: &mut Parser) { | ||
29 | assert!(p.at(L_CURLY)); | ||
30 | let m = p.start(); | ||
31 | p.bump(); | ||
32 | while !p.at(EOF) && !p.at(R_CURLY) { | ||
33 | if p.at(L_CURLY) { | ||
34 | error_block(p, "expected an item"); | ||
35 | continue; | ||
36 | } | ||
37 | item_or_macro(p, true, ItemFlavor::Trait); | ||
38 | } | ||
39 | p.expect(R_CURLY); | ||
40 | m.complete(p, ITEM_LIST); | ||
41 | } | ||
42 | |||
43 | // test impl_block | ||
44 | // impl Foo {} | ||
45 | pub(super) fn impl_block(p: &mut Parser) { | ||
46 | assert!(p.at(IMPL_KW)); | ||
47 | p.bump(); | ||
48 | if choose_type_params_over_qpath(p) { | ||
49 | type_params::opt_type_param_list(p); | ||
50 | } | ||
51 | |||
52 | // TODO: never type | ||
53 | // impl ! {} | ||
54 | |||
55 | // test impl_block_neg | ||
56 | // impl !Send for X {} | ||
57 | p.eat(EXCL); | ||
58 | impl_type(p); | ||
59 | if p.eat(FOR_KW) { | ||
60 | impl_type(p); | ||
61 | } | ||
62 | type_params::opt_where_clause(p); | ||
63 | if p.at(L_CURLY) { | ||
64 | impl_item_list(p); | ||
65 | } else { | ||
66 | p.error("expected `{`"); | ||
67 | } | ||
68 | } | ||
69 | |||
70 | // test impl_item_list | ||
71 | // impl F { | ||
72 | // type A = i32; | ||
73 | // const B: i32 = 92; | ||
74 | // fn foo() {} | ||
75 | // fn bar(&self) {} | ||
76 | // } | ||
77 | pub(crate) fn impl_item_list(p: &mut Parser) { | ||
78 | assert!(p.at(L_CURLY)); | ||
79 | let m = p.start(); | ||
80 | p.bump(); | ||
81 | // test impl_inner_attributes | ||
82 | // enum F{} | ||
83 | // impl F { | ||
84 | // //! This is a doc comment | ||
85 | // #![doc("This is also a doc comment")] | ||
86 | // } | ||
87 | attributes::inner_attributes(p); | ||
88 | |||
89 | while !p.at(EOF) && !p.at(R_CURLY) { | ||
90 | if p.at(L_CURLY) { | ||
91 | error_block(p, "expected an item"); | ||
92 | continue; | ||
93 | } | ||
94 | item_or_macro(p, true, ItemFlavor::Mod); | ||
95 | } | ||
96 | p.expect(R_CURLY); | ||
97 | m.complete(p, ITEM_LIST); | ||
98 | } | ||
99 | |||
100 | fn choose_type_params_over_qpath(p: &Parser) -> bool { | ||
101 | // There's an ambiguity between generic parameters and qualified paths in impls. | ||
102 | // If we see `<` it may start both, so we have to inspect some following tokens. | ||
103 | // The following combinations can only start generics, | ||
104 | // but not qualified paths (with one exception): | ||
105 | // `<` `>` - empty generic parameters | ||
106 | // `<` `#` - generic parameters with attributes | ||
107 | // `<` (LIFETIME|IDENT) `>` - single generic parameter | ||
108 | // `<` (LIFETIME|IDENT) `,` - first generic parameter in a list | ||
109 | // `<` (LIFETIME|IDENT) `:` - generic parameter with bounds | ||
110 | // `<` (LIFETIME|IDENT) `=` - generic parameter with a default | ||
111 | // The only truly ambiguous case is | ||
112 | // `<` IDENT `>` `::` IDENT ... | ||
113 | // we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`) | ||
114 | // because this is what almost always expected in practice, qualified paths in impls | ||
115 | // (`impl <Type>::AssocTy { ... }`) aren't even allowed by type checker at the moment. | ||
116 | if !p.at(L_ANGLE) { | ||
117 | return false; | ||
118 | } | ||
119 | if p.nth(1) == POUND || p.nth(1) == R_ANGLE { | ||
120 | return true; | ||
121 | } | ||
122 | (p.nth(1) == LIFETIME || p.nth(1) == IDENT) | ||
123 | && (p.nth(2) == R_ANGLE || p.nth(2) == COMMA || p.nth(2) == COLON || p.nth(2) == EQ) | ||
124 | } | ||
125 | |||
126 | // test_err impl_type | ||
127 | // impl Type {} | ||
128 | // impl Trait1 for T {} | ||
129 | // impl impl NotType {} | ||
130 | // impl Trait2 for impl NotType {} | ||
131 | pub(crate) fn impl_type(p: &mut Parser) { | ||
132 | if p.at(IMPL_KW) { | ||
133 | p.error("expected trait or type"); | ||
134 | return; | ||
135 | } | ||
136 | types::type_(p); | ||
137 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/items/use_item.rs b/crates/ra_syntax/src/parsing/grammar/items/use_item.rs deleted file mode 100644 index 5111d37eb..000000000 --- a/crates/ra_syntax/src/parsing/grammar/items/use_item.rs +++ /dev/null | |||
@@ -1,121 +0,0 @@ | |||
1 | use super::*; | ||
2 | |||
3 | pub(super) fn use_item(p: &mut Parser) { | ||
4 | assert!(p.at(USE_KW)); | ||
5 | p.bump(); | ||
6 | use_tree(p); | ||
7 | p.expect(SEMI); | ||
8 | } | ||
9 | |||
10 | /// Parse a use 'tree', such as `some::path` in `use some::path;` | ||
11 | /// Note that this is called both by `use_item` and `use_tree_list`, | ||
12 | /// so handles both `some::path::{inner::path}` and `inner::path` in | ||
13 | /// `use some::path::{inner::path};` | ||
14 | fn use_tree(p: &mut Parser) { | ||
15 | let la = p.nth(1); | ||
16 | let m = p.start(); | ||
17 | match (p.current(), la) { | ||
18 | // Finish the use_tree for cases of e.g. | ||
19 | // `use some::path::{self, *};` or `use *;` | ||
20 | // This does not handle cases such as `use some::path::*` | ||
21 | // N.B. in Rust 2015 `use *;` imports all from crate root | ||
22 | // however in Rust 2018 `use *;` errors: ('cannot glob-import all possible crates') | ||
23 | // TODO: Add this error (if not out of scope) | ||
24 | |||
25 | // test use_star | ||
26 | // use *; | ||
27 | // use ::*; | ||
28 | // use some::path::{*}; | ||
29 | // use some::path::{::*}; | ||
30 | (STAR, _) => p.bump(), | ||
31 | (COLONCOLON, STAR) => { | ||
32 | // Parse `use ::*;`, which imports all from the crate root in Rust 2015 | ||
33 | // This is invalid inside a use_tree_list, (e.g. `use some::path::{::*}`) | ||
34 | // but still parses and errors later: ('crate root in paths can only be used in start position') | ||
35 | // TODO: Add this error (if not out of scope) | ||
36 | // In Rust 2018, it is always invalid (see above) | ||
37 | p.bump(); | ||
38 | p.bump(); | ||
39 | } | ||
40 | // Open a use tree list | ||
41 | // Handles cases such as `use {some::path};` or `{inner::path}` in | ||
42 | // `use some::path::{{inner::path}, other::path}` | ||
43 | |||
44 | // test use_tree_list | ||
45 | // use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`) | ||
46 | // use {path::from::root}; // Rust 2015 | ||
47 | // use ::{some::arbritrary::path}; // Rust 2015 | ||
48 | // use ::{{{crate::export}}}; // Nonsensical but perfectly legal nestnig | ||
49 | (L_CURLY, _) | (COLONCOLON, L_CURLY) => { | ||
50 | if p.at(COLONCOLON) { | ||
51 | p.bump(); | ||
52 | } | ||
53 | use_tree_list(p); | ||
54 | } | ||
55 | // Parse a 'standard' path. | ||
56 | // Also handles aliases (e.g. `use something as something_else`) | ||
57 | |||
58 | // test use_path | ||
59 | // use ::crate_name; // Rust 2018 - All flavours | ||
60 | // use crate_name; // Rust 2018 - Anchored paths | ||
61 | // use item_in_scope_or_crate_name; // Rust 2018 - Uniform Paths | ||
62 | // | ||
63 | // use self::module::Item; | ||
64 | // use crate::Item; | ||
65 | // use self::some::Struct; | ||
66 | // use crate_name::some_item; | ||
67 | _ if paths::is_path_start(p) => { | ||
68 | paths::use_path(p); | ||
69 | match p.current() { | ||
70 | AS_KW => { | ||
71 | // test use_alias | ||
72 | // use some::path as some_name; | ||
73 | // use some::{ | ||
74 | // other::path as some_other_name, | ||
75 | // different::path as different_name, | ||
76 | // yet::another::path, | ||
77 | // running::out::of::synonyms::for_::different::* | ||
78 | // }; | ||
79 | opt_alias(p); | ||
80 | } | ||
81 | COLONCOLON => { | ||
82 | p.bump(); | ||
83 | match p.current() { | ||
84 | STAR => { | ||
85 | p.bump(); | ||
86 | } | ||
87 | // test use_tree_list_after_path | ||
88 | // use crate::{Item}; | ||
89 | // use self::{Item}; | ||
90 | L_CURLY => use_tree_list(p), | ||
91 | _ => { | ||
92 | // is this unreachable? | ||
93 | p.error("expected `{` or `*`"); | ||
94 | } | ||
95 | } | ||
96 | } | ||
97 | _ => (), | ||
98 | } | ||
99 | } | ||
100 | _ => { | ||
101 | m.abandon(p); | ||
102 | p.err_and_bump("expected one of `*`, `::`, `{`, `self`, `super` or an indentifier"); | ||
103 | return; | ||
104 | } | ||
105 | } | ||
106 | m.complete(p, USE_TREE); | ||
107 | } | ||
108 | |||
109 | pub(crate) fn use_tree_list(p: &mut Parser) { | ||
110 | assert!(p.at(L_CURLY)); | ||
111 | let m = p.start(); | ||
112 | p.bump(); | ||
113 | while !p.at(EOF) && !p.at(R_CURLY) { | ||
114 | use_tree(p); | ||
115 | if !p.at(R_CURLY) { | ||
116 | p.expect(COMMA); | ||
117 | } | ||
118 | } | ||
119 | p.expect(R_CURLY); | ||
120 | m.complete(p, USE_TREE_LIST); | ||
121 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/params.rs b/crates/ra_syntax/src/parsing/grammar/params.rs deleted file mode 100644 index 185386569..000000000 --- a/crates/ra_syntax/src/parsing/grammar/params.rs +++ /dev/null | |||
@@ -1,139 +0,0 @@ | |||
1 | use super::*; | ||
2 | |||
3 | // test param_list | ||
4 | // fn a() {} | ||
5 | // fn b(x: i32) {} | ||
6 | // fn c(x: i32, ) {} | ||
7 | // fn d(x: i32, y: ()) {} | ||
8 | pub(super) fn param_list(p: &mut Parser) { | ||
9 | list_(p, Flavor::Normal) | ||
10 | } | ||
11 | |||
12 | // test param_list_opt_patterns | ||
13 | // fn foo<F: FnMut(&mut Foo<'a>)>(){} | ||
14 | pub(super) fn param_list_opt_patterns(p: &mut Parser) { | ||
15 | list_(p, Flavor::OptionalPattern) | ||
16 | } | ||
17 | |||
18 | pub(super) fn param_list_opt_types(p: &mut Parser) { | ||
19 | list_(p, Flavor::OptionalType) | ||
20 | } | ||
21 | |||
22 | #[derive(Clone, Copy, Eq, PartialEq)] | ||
23 | enum Flavor { | ||
24 | OptionalType, | ||
25 | OptionalPattern, | ||
26 | Normal, | ||
27 | } | ||
28 | |||
29 | impl Flavor { | ||
30 | fn type_required(self) -> bool { | ||
31 | match self { | ||
32 | Flavor::OptionalType => false, | ||
33 | _ => true, | ||
34 | } | ||
35 | } | ||
36 | } | ||
37 | |||
38 | fn list_(p: &mut Parser, flavor: Flavor) { | ||
39 | let (bra, ket) = if flavor.type_required() { (L_PAREN, R_PAREN) } else { (PIPE, PIPE) }; | ||
40 | assert!(p.at(bra)); | ||
41 | let m = p.start(); | ||
42 | p.bump(); | ||
43 | if flavor.type_required() { | ||
44 | opt_self_param(p); | ||
45 | } | ||
46 | while !p.at(EOF) && !p.at(ket) { | ||
47 | if !p.at_ts(VALUE_PARAMETER_FIRST) { | ||
48 | p.error("expected value parameter"); | ||
49 | break; | ||
50 | } | ||
51 | value_parameter(p, flavor); | ||
52 | if !p.at(ket) { | ||
53 | p.expect(COMMA); | ||
54 | } | ||
55 | } | ||
56 | p.expect(ket); | ||
57 | m.complete(p, PARAM_LIST); | ||
58 | } | ||
59 | |||
60 | const VALUE_PARAMETER_FIRST: TokenSet = patterns::PATTERN_FIRST.union(types::TYPE_FIRST); | ||
61 | |||
62 | fn value_parameter(p: &mut Parser, flavor: Flavor) { | ||
63 | let m = p.start(); | ||
64 | match flavor { | ||
65 | Flavor::OptionalType | Flavor::Normal => { | ||
66 | patterns::pattern(p); | ||
67 | if p.at(COLON) || flavor.type_required() { | ||
68 | types::ascription(p) | ||
69 | } | ||
70 | } | ||
71 | // test value_parameters_no_patterns | ||
72 | // type F = Box<Fn(a: i32, &b: &i32, &mut c: &i32, ())>; | ||
73 | Flavor::OptionalPattern => { | ||
74 | let la0 = p.current(); | ||
75 | let la1 = p.nth(1); | ||
76 | let la2 = p.nth(2); | ||
77 | let la3 = p.nth(3); | ||
78 | |||
79 | // test trait_fn_placeholder_parameter | ||
80 | // trait Foo { | ||
81 | // fn bar(_: u64); | ||
82 | // } | ||
83 | if (la0 == IDENT || la0 == UNDERSCORE) && la1 == COLON | ||
84 | || la0 == AMP && la1 == IDENT && la2 == COLON | ||
85 | || la0 == AMP && la1 == MUT_KW && la2 == IDENT && la3 == COLON | ||
86 | { | ||
87 | patterns::pattern(p); | ||
88 | types::ascription(p); | ||
89 | } else { | ||
90 | types::type_(p); | ||
91 | } | ||
92 | } | ||
93 | } | ||
94 | m.complete(p, PARAM); | ||
95 | } | ||
96 | |||
97 | // test self_param | ||
98 | // impl S { | ||
99 | // fn a(self) {} | ||
100 | // fn b(&self,) {} | ||
101 | // fn c(&'a self,) {} | ||
102 | // fn d(&'a mut self, x: i32) {} | ||
103 | // fn e(mut self) {} | ||
104 | // } | ||
105 | fn opt_self_param(p: &mut Parser) { | ||
106 | let m; | ||
107 | if p.at(SELF_KW) || p.at(MUT_KW) && p.nth(1) == SELF_KW { | ||
108 | m = p.start(); | ||
109 | p.eat(MUT_KW); | ||
110 | p.eat(SELF_KW); | ||
111 | // test arb_self_types | ||
112 | // impl S { | ||
113 | // fn a(self: &Self) {} | ||
114 | // fn b(mut self: Box<Self>) {} | ||
115 | // } | ||
116 | if p.at(COLON) { | ||
117 | types::ascription(p); | ||
118 | } | ||
119 | } else { | ||
120 | let la1 = p.nth(1); | ||
121 | let la2 = p.nth(2); | ||
122 | let la3 = p.nth(3); | ||
123 | let n_toks = match (p.current(), la1, la2, la3) { | ||
124 | (AMP, SELF_KW, _, _) => 2, | ||
125 | (AMP, MUT_KW, SELF_KW, _) => 3, | ||
126 | (AMP, LIFETIME, SELF_KW, _) => 3, | ||
127 | (AMP, LIFETIME, MUT_KW, SELF_KW) => 4, | ||
128 | _ => return, | ||
129 | }; | ||
130 | m = p.start(); | ||
131 | for _ in 0..n_toks { | ||
132 | p.bump(); | ||
133 | } | ||
134 | } | ||
135 | m.complete(p, SELF_PARAM); | ||
136 | if !p.at(R_PAREN) { | ||
137 | p.expect(COMMA); | ||
138 | } | ||
139 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/paths.rs b/crates/ra_syntax/src/parsing/grammar/paths.rs deleted file mode 100644 index 33a11886c..000000000 --- a/crates/ra_syntax/src/parsing/grammar/paths.rs +++ /dev/null | |||
@@ -1,103 +0,0 @@ | |||
1 | use super::*; | ||
2 | |||
3 | pub(super) const PATH_FIRST: TokenSet = | ||
4 | token_set![IDENT, SELF_KW, SUPER_KW, CRATE_KW, COLONCOLON, L_ANGLE]; | ||
5 | |||
6 | pub(super) fn is_path_start(p: &Parser) -> bool { | ||
7 | match p.current() { | ||
8 | IDENT | SELF_KW | SUPER_KW | CRATE_KW | COLONCOLON => true, | ||
9 | _ => false, | ||
10 | } | ||
11 | } | ||
12 | |||
13 | pub(super) fn use_path(p: &mut Parser) { | ||
14 | path(p, Mode::Use) | ||
15 | } | ||
16 | |||
17 | pub(super) fn type_path(p: &mut Parser) { | ||
18 | path(p, Mode::Type) | ||
19 | } | ||
20 | |||
21 | pub(super) fn expr_path(p: &mut Parser) { | ||
22 | path(p, Mode::Expr) | ||
23 | } | ||
24 | |||
25 | #[derive(Clone, Copy, Eq, PartialEq)] | ||
26 | enum Mode { | ||
27 | Use, | ||
28 | Type, | ||
29 | Expr, | ||
30 | } | ||
31 | |||
32 | fn path(p: &mut Parser, mode: Mode) { | ||
33 | let path = p.start(); | ||
34 | path_segment(p, mode, true); | ||
35 | let mut qual = path.complete(p, PATH); | ||
36 | loop { | ||
37 | let use_tree = match p.nth(1) { | ||
38 | STAR | L_CURLY => true, | ||
39 | _ => false, | ||
40 | }; | ||
41 | if p.at(COLONCOLON) && !use_tree { | ||
42 | let path = qual.precede(p); | ||
43 | p.bump(); | ||
44 | path_segment(p, mode, false); | ||
45 | let path = path.complete(p, PATH); | ||
46 | qual = path; | ||
47 | } else { | ||
48 | break; | ||
49 | } | ||
50 | } | ||
51 | } | ||
52 | |||
53 | fn path_segment(p: &mut Parser, mode: Mode, first: bool) { | ||
54 | let m = p.start(); | ||
55 | // test qual_paths | ||
56 | // type X = <A as B>::Output; | ||
57 | // fn foo() { <usize as Default>::default(); } | ||
58 | if first && p.eat(L_ANGLE) { | ||
59 | types::type_(p); | ||
60 | if p.eat(AS_KW) { | ||
61 | if is_path_start(p) { | ||
62 | types::path_type(p); | ||
63 | } else { | ||
64 | p.error("expected a trait"); | ||
65 | } | ||
66 | } | ||
67 | p.expect(R_ANGLE); | ||
68 | } else { | ||
69 | if first { | ||
70 | p.eat(COLONCOLON); | ||
71 | } | ||
72 | match p.current() { | ||
73 | IDENT => { | ||
74 | name_ref(p); | ||
75 | opt_path_type_args(p, mode); | ||
76 | } | ||
77 | // test crate_path | ||
78 | // use crate::foo; | ||
79 | SELF_KW | SUPER_KW | CRATE_KW => p.bump(), | ||
80 | _ => { | ||
81 | p.err_recover("expected identifier", items::ITEM_RECOVERY_SET); | ||
82 | } | ||
83 | }; | ||
84 | } | ||
85 | m.complete(p, PATH_SEGMENT); | ||
86 | } | ||
87 | |||
88 | fn opt_path_type_args(p: &mut Parser, mode: Mode) { | ||
89 | match mode { | ||
90 | Mode::Use => return, | ||
91 | Mode::Type => { | ||
92 | // test path_fn_trait_args | ||
93 | // type F = Box<Fn(x: i32) -> ()>; | ||
94 | if p.at(L_PAREN) { | ||
95 | params::param_list_opt_patterns(p); | ||
96 | opt_fn_ret_type(p); | ||
97 | } else { | ||
98 | type_args::opt_type_arg_list(p, false) | ||
99 | } | ||
100 | } | ||
101 | Mode::Expr => type_args::opt_type_arg_list(p, true), | ||
102 | } | ||
103 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/patterns.rs b/crates/ra_syntax/src/parsing/grammar/patterns.rs deleted file mode 100644 index 9d7da639d..000000000 --- a/crates/ra_syntax/src/parsing/grammar/patterns.rs +++ /dev/null | |||
@@ -1,248 +0,0 @@ | |||
1 | use super::*; | ||
2 | |||
3 | pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST | ||
4 | .union(paths::PATH_FIRST) | ||
5 | .union(token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE]); | ||
6 | |||
7 | pub(super) fn pattern(p: &mut Parser) { | ||
8 | pattern_r(p, PAT_RECOVERY_SET) | ||
9 | } | ||
10 | |||
11 | pub(super) fn pattern_r(p: &mut Parser, recovery_set: TokenSet) { | ||
12 | if let Some(lhs) = atom_pat(p, recovery_set) { | ||
13 | // test range_pat | ||
14 | // fn main() { | ||
15 | // match 92 { | ||
16 | // 0 ... 100 => (), | ||
17 | // 101 ..= 200 => (), | ||
18 | // 200 .. 301=> (), | ||
19 | // } | ||
20 | // } | ||
21 | if p.at(DOTDOTDOT) || p.at(DOTDOTEQ) || p.at(DOTDOT) { | ||
22 | let m = lhs.precede(p); | ||
23 | p.bump(); | ||
24 | atom_pat(p, recovery_set); | ||
25 | m.complete(p, RANGE_PAT); | ||
26 | } | ||
27 | } | ||
28 | } | ||
29 | |||
30 | const PAT_RECOVERY_SET: TokenSet = | ||
31 | token_set![LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA]; | ||
32 | |||
33 | fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> { | ||
34 | let la0 = p.nth(0); | ||
35 | let la1 = p.nth(1); | ||
36 | if la0 == REF_KW | ||
37 | || la0 == MUT_KW | ||
38 | || (la0 == IDENT && !(la1 == COLONCOLON || la1 == L_PAREN || la1 == L_CURLY)) | ||
39 | { | ||
40 | return Some(bind_pat(p, true)); | ||
41 | } | ||
42 | if paths::is_path_start(p) { | ||
43 | return Some(path_pat(p)); | ||
44 | } | ||
45 | |||
46 | if is_literal_pat_start(p) { | ||
47 | return Some(literal_pat(p)); | ||
48 | } | ||
49 | |||
50 | let m = match la0 { | ||
51 | UNDERSCORE => placeholder_pat(p), | ||
52 | AMP => ref_pat(p), | ||
53 | L_PAREN => tuple_pat(p), | ||
54 | L_BRACK => slice_pat(p), | ||
55 | _ => { | ||
56 | p.err_recover("expected pattern", recovery_set); | ||
57 | return None; | ||
58 | } | ||
59 | }; | ||
60 | Some(m) | ||
61 | } | ||
62 | |||
63 | fn is_literal_pat_start(p: &mut Parser) -> bool { | ||
64 | p.at(MINUS) && (p.nth(1) == INT_NUMBER || p.nth(1) == FLOAT_NUMBER) | ||
65 | || p.at_ts(expressions::LITERAL_FIRST) | ||
66 | } | ||
67 | |||
68 | // test literal_pattern | ||
69 | // fn main() { | ||
70 | // match () { | ||
71 | // -1 => (), | ||
72 | // 92 => (), | ||
73 | // 'c' => (), | ||
74 | // "hello" => (), | ||
75 | // } | ||
76 | // } | ||
77 | fn literal_pat(p: &mut Parser) -> CompletedMarker { | ||
78 | assert!(is_literal_pat_start(p)); | ||
79 | let m = p.start(); | ||
80 | if p.at(MINUS) { | ||
81 | p.bump(); | ||
82 | } | ||
83 | expressions::literal(p); | ||
84 | m.complete(p, LITERAL_PAT) | ||
85 | } | ||
86 | |||
87 | // test path_part | ||
88 | // fn foo() { | ||
89 | // let foo::Bar = (); | ||
90 | // let ::Bar = (); | ||
91 | // let Bar { .. } = (); | ||
92 | // let Bar(..) = (); | ||
93 | // } | ||
94 | fn path_pat(p: &mut Parser) -> CompletedMarker { | ||
95 | assert!(paths::is_path_start(p)); | ||
96 | let m = p.start(); | ||
97 | paths::expr_path(p); | ||
98 | let kind = match p.current() { | ||
99 | L_PAREN => { | ||
100 | tuple_pat_fields(p); | ||
101 | TUPLE_STRUCT_PAT | ||
102 | } | ||
103 | L_CURLY => { | ||
104 | field_pat_list(p); | ||
105 | STRUCT_PAT | ||
106 | } | ||
107 | _ => PATH_PAT, | ||
108 | }; | ||
109 | m.complete(p, kind) | ||
110 | } | ||
111 | |||
112 | // test tuple_pat_fields | ||
113 | // fn foo() { | ||
114 | // let S() = (); | ||
115 | // let S(_) = (); | ||
116 | // let S(_,) = (); | ||
117 | // let S(_, .. , x) = (); | ||
118 | // } | ||
119 | fn tuple_pat_fields(p: &mut Parser) { | ||
120 | assert!(p.at(L_PAREN)); | ||
121 | p.bump(); | ||
122 | pat_list(p, R_PAREN); | ||
123 | p.expect(R_PAREN); | ||
124 | } | ||
125 | |||
126 | // test field_pat_list | ||
127 | // fn foo() { | ||
128 | // let S {} = (); | ||
129 | // let S { f, ref mut g } = (); | ||
130 | // let S { h: _, ..} = (); | ||
131 | // let S { h: _, } = (); | ||
132 | // } | ||
133 | fn field_pat_list(p: &mut Parser) { | ||
134 | assert!(p.at(L_CURLY)); | ||
135 | let m = p.start(); | ||
136 | p.bump(); | ||
137 | while !p.at(EOF) && !p.at(R_CURLY) { | ||
138 | match p.current() { | ||
139 | DOTDOT => p.bump(), | ||
140 | IDENT if p.nth(1) == COLON => field_pat(p), | ||
141 | L_CURLY => error_block(p, "expected ident"), | ||
142 | _ => { | ||
143 | bind_pat(p, false); | ||
144 | } | ||
145 | } | ||
146 | if !p.at(R_CURLY) { | ||
147 | p.expect(COMMA); | ||
148 | } | ||
149 | } | ||
150 | p.expect(R_CURLY); | ||
151 | m.complete(p, FIELD_PAT_LIST); | ||
152 | } | ||
153 | |||
154 | fn field_pat(p: &mut Parser) { | ||
155 | assert!(p.at(IDENT)); | ||
156 | assert!(p.nth(1) == COLON); | ||
157 | |||
158 | let m = p.start(); | ||
159 | name(p); | ||
160 | p.bump(); | ||
161 | pattern(p); | ||
162 | m.complete(p, FIELD_PAT); | ||
163 | } | ||
164 | |||
165 | // test placeholder_pat | ||
166 | // fn main() { let _ = (); } | ||
167 | fn placeholder_pat(p: &mut Parser) -> CompletedMarker { | ||
168 | assert!(p.at(UNDERSCORE)); | ||
169 | let m = p.start(); | ||
170 | p.bump(); | ||
171 | m.complete(p, PLACEHOLDER_PAT) | ||
172 | } | ||
173 | |||
174 | // test ref_pat | ||
175 | // fn main() { | ||
176 | // let &a = (); | ||
177 | // let &mut b = (); | ||
178 | // } | ||
179 | fn ref_pat(p: &mut Parser) -> CompletedMarker { | ||
180 | assert!(p.at(AMP)); | ||
181 | let m = p.start(); | ||
182 | p.bump(); | ||
183 | p.eat(MUT_KW); | ||
184 | pattern(p); | ||
185 | m.complete(p, REF_PAT) | ||
186 | } | ||
187 | |||
188 | // test tuple_pat | ||
189 | // fn main() { | ||
190 | // let (a, b, ..) = (); | ||
191 | // } | ||
192 | fn tuple_pat(p: &mut Parser) -> CompletedMarker { | ||
193 | assert!(p.at(L_PAREN)); | ||
194 | let m = p.start(); | ||
195 | tuple_pat_fields(p); | ||
196 | m.complete(p, TUPLE_PAT) | ||
197 | } | ||
198 | |||
199 | // test slice_pat | ||
200 | // fn main() { | ||
201 | // let [a, b, ..] = []; | ||
202 | // } | ||
203 | fn slice_pat(p: &mut Parser) -> CompletedMarker { | ||
204 | assert!(p.at(L_BRACK)); | ||
205 | let m = p.start(); | ||
206 | p.bump(); | ||
207 | pat_list(p, R_BRACK); | ||
208 | p.expect(R_BRACK); | ||
209 | m.complete(p, SLICE_PAT) | ||
210 | } | ||
211 | |||
212 | fn pat_list(p: &mut Parser, ket: SyntaxKind) { | ||
213 | while !p.at(EOF) && !p.at(ket) { | ||
214 | match p.current() { | ||
215 | DOTDOT => p.bump(), | ||
216 | _ => { | ||
217 | if !p.at_ts(PATTERN_FIRST) { | ||
218 | p.error("expected a pattern"); | ||
219 | break; | ||
220 | } | ||
221 | pattern(p) | ||
222 | } | ||
223 | } | ||
224 | if !p.at(ket) { | ||
225 | p.expect(COMMA); | ||
226 | } | ||
227 | } | ||
228 | } | ||
229 | |||
230 | // test bind_pat | ||
231 | // fn main() { | ||
232 | // let a = (); | ||
233 | // let mut b = (); | ||
234 | // let ref c = (); | ||
235 | // let ref mut d = (); | ||
236 | // let e @ _ = (); | ||
237 | // let ref mut f @ g @ _ = (); | ||
238 | // } | ||
239 | fn bind_pat(p: &mut Parser, with_at: bool) -> CompletedMarker { | ||
240 | let m = p.start(); | ||
241 | p.eat(REF_KW); | ||
242 | p.eat(MUT_KW); | ||
243 | name(p); | ||
244 | if with_at && p.eat(AT) { | ||
245 | pattern(p); | ||
246 | } | ||
247 | m.complete(p, BIND_PAT) | ||
248 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/type_args.rs b/crates/ra_syntax/src/parsing/grammar/type_args.rs deleted file mode 100644 index f889419c5..000000000 --- a/crates/ra_syntax/src/parsing/grammar/type_args.rs +++ /dev/null | |||
@@ -1,48 +0,0 @@ | |||
1 | use super::*; | ||
2 | |||
3 | pub(super) fn opt_type_arg_list(p: &mut Parser, colon_colon_required: bool) { | ||
4 | let m; | ||
5 | match (colon_colon_required, p.nth(0), p.nth(1)) { | ||
6 | (_, COLONCOLON, L_ANGLE) => { | ||
7 | m = p.start(); | ||
8 | p.bump(); | ||
9 | p.bump(); | ||
10 | } | ||
11 | (false, L_ANGLE, _) => { | ||
12 | m = p.start(); | ||
13 | p.bump(); | ||
14 | } | ||
15 | _ => return, | ||
16 | }; | ||
17 | |||
18 | while !p.at(EOF) && !p.at(R_ANGLE) { | ||
19 | type_arg(p); | ||
20 | if !p.at(R_ANGLE) && !p.expect(COMMA) { | ||
21 | break; | ||
22 | } | ||
23 | } | ||
24 | p.expect(R_ANGLE); | ||
25 | m.complete(p, TYPE_ARG_LIST); | ||
26 | } | ||
27 | |||
28 | // test type_arg | ||
29 | // type A = B<'static, i32, Item=u64>; | ||
30 | fn type_arg(p: &mut Parser) { | ||
31 | let m = p.start(); | ||
32 | match p.current() { | ||
33 | LIFETIME => { | ||
34 | p.bump(); | ||
35 | m.complete(p, LIFETIME_ARG); | ||
36 | } | ||
37 | IDENT if p.nth(1) == EQ => { | ||
38 | name_ref(p); | ||
39 | p.bump(); | ||
40 | types::type_(p); | ||
41 | m.complete(p, ASSOC_TYPE_ARG); | ||
42 | } | ||
43 | _ => { | ||
44 | types::type_(p); | ||
45 | m.complete(p, TYPE_ARG); | ||
46 | } | ||
47 | } | ||
48 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/type_params.rs b/crates/ra_syntax/src/parsing/grammar/type_params.rs deleted file mode 100644 index 40f998682..000000000 --- a/crates/ra_syntax/src/parsing/grammar/type_params.rs +++ /dev/null | |||
@@ -1,175 +0,0 @@ | |||
1 | use super::*; | ||
2 | |||
3 | pub(super) fn opt_type_param_list(p: &mut Parser) { | ||
4 | if !p.at(L_ANGLE) { | ||
5 | return; | ||
6 | } | ||
7 | type_param_list(p); | ||
8 | } | ||
9 | |||
10 | fn type_param_list(p: &mut Parser) { | ||
11 | assert!(p.at(L_ANGLE)); | ||
12 | let m = p.start(); | ||
13 | p.bump(); | ||
14 | |||
15 | while !p.at(EOF) && !p.at(R_ANGLE) { | ||
16 | let m = p.start(); | ||
17 | |||
18 | // test generic_lifetime_type_attribute | ||
19 | // fn foo<#[derive(Lifetime)] 'a, #[derive(Type)] T>(_: &'a T) { | ||
20 | // } | ||
21 | attributes::outer_attributes(p); | ||
22 | |||
23 | match p.current() { | ||
24 | LIFETIME => lifetime_param(p, m), | ||
25 | IDENT => type_param(p, m), | ||
26 | _ => { | ||
27 | m.abandon(p); | ||
28 | p.err_and_bump("expected type parameter") | ||
29 | } | ||
30 | } | ||
31 | if !p.at(R_ANGLE) && !p.expect(COMMA) { | ||
32 | break; | ||
33 | } | ||
34 | } | ||
35 | p.expect(R_ANGLE); | ||
36 | m.complete(p, TYPE_PARAM_LIST); | ||
37 | } | ||
38 | |||
39 | fn lifetime_param(p: &mut Parser, m: Marker) { | ||
40 | assert!(p.at(LIFETIME)); | ||
41 | p.bump(); | ||
42 | if p.at(COLON) { | ||
43 | lifetime_bounds(p); | ||
44 | } | ||
45 | m.complete(p, LIFETIME_PARAM); | ||
46 | } | ||
47 | |||
48 | fn type_param(p: &mut Parser, m: Marker) { | ||
49 | assert!(p.at(IDENT)); | ||
50 | name(p); | ||
51 | if p.at(COLON) { | ||
52 | bounds(p); | ||
53 | } | ||
54 | // test type_param_default | ||
55 | // struct S<T = i32>; | ||
56 | if p.at(EQ) { | ||
57 | p.bump(); | ||
58 | types::type_(p) | ||
59 | } | ||
60 | m.complete(p, TYPE_PARAM); | ||
61 | } | ||
62 | |||
63 | // test type_param_bounds | ||
64 | // struct S<T: 'a + ?Sized + (Copy)>; | ||
65 | pub(super) fn bounds(p: &mut Parser) { | ||
66 | assert!(p.at(COLON)); | ||
67 | p.bump(); | ||
68 | bounds_without_colon(p); | ||
69 | } | ||
70 | |||
71 | fn lifetime_bounds(p: &mut Parser) { | ||
72 | assert!(p.at(COLON)); | ||
73 | p.bump(); | ||
74 | while p.at(LIFETIME) { | ||
75 | p.bump(); | ||
76 | if !p.eat(PLUS) { | ||
77 | break; | ||
78 | } | ||
79 | } | ||
80 | } | ||
81 | |||
82 | pub(super) fn bounds_without_colon(p: &mut Parser) { | ||
83 | loop { | ||
84 | let has_paren = p.eat(L_PAREN); | ||
85 | p.eat(QUESTION); | ||
86 | match p.current() { | ||
87 | LIFETIME => p.bump(), | ||
88 | FOR_KW => types::for_type(p), | ||
89 | _ if paths::is_path_start(p) => types::path_type(p), | ||
90 | _ => break, | ||
91 | } | ||
92 | if has_paren { | ||
93 | p.expect(R_PAREN); | ||
94 | } | ||
95 | if !p.eat(PLUS) { | ||
96 | break; | ||
97 | } | ||
98 | } | ||
99 | } | ||
100 | |||
101 | // test where_clause | ||
102 | // fn foo() | ||
103 | // where | ||
104 | // 'a: 'b + 'c, | ||
105 | // T: Clone + Copy + 'static, | ||
106 | // Iterator::Item: 'a, | ||
107 | // <T as Iterator>::Item: 'a | ||
108 | // {} | ||
109 | pub(super) fn opt_where_clause(p: &mut Parser) { | ||
110 | if !p.at(WHERE_KW) { | ||
111 | return; | ||
112 | } | ||
113 | let m = p.start(); | ||
114 | p.bump(); | ||
115 | |||
116 | while is_where_predicate(p) { | ||
117 | where_predicate(p); | ||
118 | |||
119 | let comma = p.eat(COMMA); | ||
120 | |||
121 | if is_where_clause_end(p) { | ||
122 | break; | ||
123 | } | ||
124 | |||
125 | if !comma { | ||
126 | p.error("expected comma"); | ||
127 | } | ||
128 | } | ||
129 | |||
130 | m.complete(p, WHERE_CLAUSE); | ||
131 | } | ||
132 | |||
133 | fn is_where_predicate(p: &mut Parser) -> bool { | ||
134 | match p.current() { | ||
135 | LIFETIME => true, | ||
136 | IMPL_KW => false, | ||
137 | token => types::TYPE_FIRST.contains(token), | ||
138 | } | ||
139 | } | ||
140 | |||
141 | fn is_where_clause_end(p: &mut Parser) -> bool { | ||
142 | p.current() == L_CURLY || p.current() == SEMI || p.current() == EQ | ||
143 | } | ||
144 | |||
145 | fn where_predicate(p: &mut Parser) { | ||
146 | let m = p.start(); | ||
147 | match p.current() { | ||
148 | LIFETIME => { | ||
149 | p.bump(); | ||
150 | if p.at(COLON) { | ||
151 | lifetime_bounds(p); | ||
152 | } else { | ||
153 | p.error("expected colon"); | ||
154 | } | ||
155 | } | ||
156 | IMPL_KW => { | ||
157 | p.error("expected lifetime or type"); | ||
158 | } | ||
159 | _ => { | ||
160 | // test where_pred_for | ||
161 | // fn test<F>() | ||
162 | // where | ||
163 | // for<'a> F: Fn(&'a str) | ||
164 | // { } | ||
165 | types::type_(p); | ||
166 | |||
167 | if p.at(COLON) { | ||
168 | bounds(p); | ||
169 | } else { | ||
170 | p.error("expected colon"); | ||
171 | } | ||
172 | } | ||
173 | } | ||
174 | m.complete(p, WHERE_PRED); | ||
175 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/types.rs b/crates/ra_syntax/src/parsing/grammar/types.rs deleted file mode 100644 index adc189a29..000000000 --- a/crates/ra_syntax/src/parsing/grammar/types.rs +++ /dev/null | |||
@@ -1,278 +0,0 @@ | |||
1 | use super::*; | ||
2 | |||
3 | pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![ | ||
4 | L_PAREN, EXCL, STAR, L_BRACK, AMP, UNDERSCORE, FN_KW, UNSAFE_KW, EXTERN_KW, FOR_KW, IMPL_KW, | ||
5 | DYN_KW, L_ANGLE, | ||
6 | ]); | ||
7 | |||
8 | const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA]; | ||
9 | |||
10 | pub(super) fn type_(p: &mut Parser) { | ||
11 | type_with_bounds_cond(p, true); | ||
12 | } | ||
13 | |||
14 | pub(super) fn type_no_bounds(p: &mut Parser) { | ||
15 | type_with_bounds_cond(p, false); | ||
16 | } | ||
17 | |||
18 | fn type_with_bounds_cond(p: &mut Parser, allow_bounds: bool) { | ||
19 | match p.current() { | ||
20 | L_PAREN => paren_or_tuple_type(p), | ||
21 | EXCL => never_type(p), | ||
22 | STAR => pointer_type(p), | ||
23 | L_BRACK => array_or_slice_type(p), | ||
24 | AMP => reference_type(p), | ||
25 | UNDERSCORE => placeholder_type(p), | ||
26 | FN_KW | UNSAFE_KW | EXTERN_KW => fn_pointer_type(p), | ||
27 | FOR_KW => for_type(p), | ||
28 | IMPL_KW => impl_trait_type(p), | ||
29 | DYN_KW => dyn_trait_type(p), | ||
30 | // Some path types are not allowed to have bounds (no plus) | ||
31 | L_ANGLE => path_type_(p, allow_bounds), | ||
32 | _ if paths::is_path_start(p) => path_or_macro_type_(p, allow_bounds), | ||
33 | _ => { | ||
34 | p.err_recover("expected type", TYPE_RECOVERY_SET); | ||
35 | } | ||
36 | } | ||
37 | } | ||
38 | |||
39 | pub(super) fn ascription(p: &mut Parser) { | ||
40 | p.expect(COLON); | ||
41 | type_(p) | ||
42 | } | ||
43 | |||
44 | fn paren_or_tuple_type(p: &mut Parser) { | ||
45 | assert!(p.at(L_PAREN)); | ||
46 | let m = p.start(); | ||
47 | p.bump(); | ||
48 | let mut n_types: u32 = 0; | ||
49 | let mut trailing_comma: bool = false; | ||
50 | while !p.at(EOF) && !p.at(R_PAREN) { | ||
51 | n_types += 1; | ||
52 | type_(p); | ||
53 | if p.eat(COMMA) { | ||
54 | trailing_comma = true; | ||
55 | } else { | ||
56 | trailing_comma = false; | ||
57 | break; | ||
58 | } | ||
59 | } | ||
60 | p.expect(R_PAREN); | ||
61 | |||
62 | let kind = if n_types == 1 && !trailing_comma { | ||
63 | // test paren_type | ||
64 | // type T = (i32); | ||
65 | PAREN_TYPE | ||
66 | } else { | ||
67 | // test unit_type | ||
68 | // type T = (); | ||
69 | |||
70 | // test singleton_tuple_type | ||
71 | // type T = (i32,); | ||
72 | TUPLE_TYPE | ||
73 | }; | ||
74 | m.complete(p, kind); | ||
75 | } | ||
76 | |||
77 | // test never_type | ||
78 | // type Never = !; | ||
79 | fn never_type(p: &mut Parser) { | ||
80 | assert!(p.at(EXCL)); | ||
81 | let m = p.start(); | ||
82 | p.bump(); | ||
83 | m.complete(p, NEVER_TYPE); | ||
84 | } | ||
85 | |||
86 | fn pointer_type(p: &mut Parser) { | ||
87 | assert!(p.at(STAR)); | ||
88 | let m = p.start(); | ||
89 | p.bump(); | ||
90 | |||
91 | match p.current() { | ||
92 | // test pointer_type_mut | ||
93 | // type M = *mut (); | ||
94 | // type C = *mut (); | ||
95 | MUT_KW | CONST_KW => p.bump(), | ||
96 | _ => { | ||
97 | // test_err pointer_type_no_mutability | ||
98 | // type T = *(); | ||
99 | p.error( | ||
100 | "expected mut or const in raw pointer type \ | ||
101 | (use `*mut T` or `*const T` as appropriate)", | ||
102 | ); | ||
103 | } | ||
104 | }; | ||
105 | |||
106 | type_no_bounds(p); | ||
107 | m.complete(p, POINTER_TYPE); | ||
108 | } | ||
109 | |||
110 | fn array_or_slice_type(p: &mut Parser) { | ||
111 | assert!(p.at(L_BRACK)); | ||
112 | let m = p.start(); | ||
113 | p.bump(); | ||
114 | |||
115 | type_(p); | ||
116 | let kind = match p.current() { | ||
117 | // test slice_type | ||
118 | // type T = [()]; | ||
119 | R_BRACK => { | ||
120 | p.bump(); | ||
121 | SLICE_TYPE | ||
122 | } | ||
123 | |||
124 | // test array_type | ||
125 | // type T = [(); 92]; | ||
126 | SEMI => { | ||
127 | p.bump(); | ||
128 | expressions::expr(p); | ||
129 | p.expect(R_BRACK); | ||
130 | ARRAY_TYPE | ||
131 | } | ||
132 | // test_err array_type_missing_semi | ||
133 | // type T = [() 92]; | ||
134 | _ => { | ||
135 | p.error("expected `;` or `]`"); | ||
136 | SLICE_TYPE | ||
137 | } | ||
138 | }; | ||
139 | m.complete(p, kind); | ||
140 | } | ||
141 | |||
142 | // test reference_type; | ||
143 | // type A = &(); | ||
144 | // type B = &'static (); | ||
145 | // type C = &mut (); | ||
146 | fn reference_type(p: &mut Parser) { | ||
147 | assert!(p.at(AMP)); | ||
148 | let m = p.start(); | ||
149 | p.bump(); | ||
150 | p.eat(LIFETIME); | ||
151 | p.eat(MUT_KW); | ||
152 | type_no_bounds(p); | ||
153 | m.complete(p, REFERENCE_TYPE); | ||
154 | } | ||
155 | |||
156 | // test placeholder_type | ||
157 | // type Placeholder = _; | ||
158 | fn placeholder_type(p: &mut Parser) { | ||
159 | assert!(p.at(UNDERSCORE)); | ||
160 | let m = p.start(); | ||
161 | p.bump(); | ||
162 | m.complete(p, PLACEHOLDER_TYPE); | ||
163 | } | ||
164 | |||
165 | // test fn_pointer_type | ||
166 | // type A = fn(); | ||
167 | // type B = unsafe fn(); | ||
168 | // type C = unsafe extern "C" fn(); | ||
169 | fn fn_pointer_type(p: &mut Parser) { | ||
170 | let m = p.start(); | ||
171 | p.eat(UNSAFE_KW); | ||
172 | if p.at(EXTERN_KW) { | ||
173 | abi(p); | ||
174 | } | ||
175 | // test_err fn_pointer_type_missing_fn | ||
176 | // type F = unsafe (); | ||
177 | if !p.eat(FN_KW) { | ||
178 | m.abandon(p); | ||
179 | p.error("expected `fn`"); | ||
180 | return; | ||
181 | } | ||
182 | if p.at(L_PAREN) { | ||
183 | params::param_list_opt_patterns(p); | ||
184 | } else { | ||
185 | p.error("expected parameters") | ||
186 | } | ||
187 | // test fn_pointer_type_with_ret | ||
188 | // type F = fn() -> (); | ||
189 | opt_fn_ret_type(p); | ||
190 | m.complete(p, FN_POINTER_TYPE); | ||
191 | } | ||
192 | |||
193 | pub(super) fn for_binder(p: &mut Parser) { | ||
194 | assert!(p.at(FOR_KW)); | ||
195 | p.bump(); | ||
196 | if p.at(L_ANGLE) { | ||
197 | type_params::opt_type_param_list(p); | ||
198 | } else { | ||
199 | p.error("expected `<`"); | ||
200 | } | ||
201 | } | ||
202 | |||
203 | // test for_type | ||
204 | // type A = for<'a> fn() -> (); | ||
205 | pub(super) fn for_type(p: &mut Parser) { | ||
206 | assert!(p.at(FOR_KW)); | ||
207 | let m = p.start(); | ||
208 | for_binder(p); | ||
209 | match p.current() { | ||
210 | FN_KW | UNSAFE_KW | EXTERN_KW => fn_pointer_type(p), | ||
211 | _ if paths::is_path_start(p) => path_type_(p, false), | ||
212 | _ => p.error("expected a path"), | ||
213 | } | ||
214 | m.complete(p, FOR_TYPE); | ||
215 | } | ||
216 | |||
217 | // test impl_trait_type | ||
218 | // type A = impl Iterator<Item=Foo<'a>> + 'a; | ||
219 | fn impl_trait_type(p: &mut Parser) { | ||
220 | assert!(p.at(IMPL_KW)); | ||
221 | let m = p.start(); | ||
222 | p.bump(); | ||
223 | type_params::bounds_without_colon(p); | ||
224 | m.complete(p, IMPL_TRAIT_TYPE); | ||
225 | } | ||
226 | |||
227 | // test dyn_trait_type | ||
228 | // type A = dyn Iterator<Item=Foo<'a>> + 'a; | ||
229 | fn dyn_trait_type(p: &mut Parser) { | ||
230 | assert!(p.at(DYN_KW)); | ||
231 | let m = p.start(); | ||
232 | p.bump(); | ||
233 | type_params::bounds_without_colon(p); | ||
234 | m.complete(p, DYN_TRAIT_TYPE); | ||
235 | } | ||
236 | |||
237 | // test path_type | ||
238 | // type A = Foo; | ||
239 | // type B = ::Foo; | ||
240 | // type C = self::Foo; | ||
241 | // type D = super::Foo; | ||
242 | pub(super) fn path_type(p: &mut Parser) { | ||
243 | path_type_(p, true) | ||
244 | } | ||
245 | |||
246 | // test macro_call_type | ||
247 | // type A = foo!(); | ||
248 | // type B = crate::foo!(); | ||
249 | fn path_or_macro_type_(p: &mut Parser, allow_bounds: bool) { | ||
250 | assert!(paths::is_path_start(p) || p.at(L_ANGLE)); | ||
251 | let m = p.start(); | ||
252 | paths::type_path(p); | ||
253 | |||
254 | let kind = if p.at(EXCL) { | ||
255 | items::macro_call_after_excl(p); | ||
256 | MACRO_CALL | ||
257 | } else { | ||
258 | PATH_TYPE | ||
259 | }; | ||
260 | |||
261 | if allow_bounds && p.eat(PLUS) { | ||
262 | type_params::bounds_without_colon(p); | ||
263 | } | ||
264 | |||
265 | m.complete(p, kind); | ||
266 | } | ||
267 | |||
268 | pub(super) fn path_type_(p: &mut Parser, allow_bounds: bool) { | ||
269 | assert!(paths::is_path_start(p) || p.at(L_ANGLE)); | ||
270 | let m = p.start(); | ||
271 | paths::type_path(p); | ||
272 | // test path_type_with_bounds | ||
273 | // fn foo() -> Box<T + 'f> {} | ||
274 | if allow_bounds && p.eat(PLUS) { | ||
275 | type_params::bounds_without_colon(p); | ||
276 | } | ||
277 | m.complete(p, PATH_TYPE); | ||
278 | } | ||
diff --git a/crates/ra_syntax/src/parsing/input.rs b/crates/ra_syntax/src/parsing/input.rs index 96c03bb11..31c6a3b9b 100644 --- a/crates/ra_syntax/src/parsing/input.rs +++ b/crates/ra_syntax/src/parsing/input.rs | |||
@@ -1,11 +1,30 @@ | |||
1 | use ra_parser::TokenSource; | ||
2 | |||
1 | use crate::{ | 3 | use crate::{ |
2 | SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit, | 4 | SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit, |
3 | parsing::{ | 5 | parsing::lexer::Token, |
4 | TokenSource, | ||
5 | lexer::Token, | ||
6 | }, | ||
7 | }; | 6 | }; |
8 | 7 | ||
8 | pub(crate) struct ParserInput<'t> { | ||
9 | text: &'t str, | ||
10 | /// start position of each token(expect whitespace and comment) | ||
11 | /// ```non-rust | ||
12 | /// struct Foo; | ||
13 | /// ^------^--- | ||
14 | /// | | ^- | ||
15 | /// 0 7 10 | ||
16 | /// ``` | ||
17 | /// (token, start_offset): `[(struct, 0), (Foo, 7), (;, 10)]` | ||
18 | start_offsets: Vec<TextUnit>, | ||
19 | /// non-whitespace/comment tokens | ||
20 | /// ```non-rust | ||
21 | /// struct Foo {} | ||
22 | /// ^^^^^^ ^^^ ^^ | ||
23 | /// ``` | ||
24 | /// tokens: `[struct, Foo, {, }]` | ||
25 | tokens: Vec<Token>, | ||
26 | } | ||
27 | |||
9 | impl<'t> TokenSource for ParserInput<'t> { | 28 | impl<'t> TokenSource for ParserInput<'t> { |
10 | fn token_kind(&self, pos: usize) -> SyntaxKind { | 29 | fn token_kind(&self, pos: usize) -> SyntaxKind { |
11 | if !(pos < self.tokens.len()) { | 30 | if !(pos < self.tokens.len()) { |
@@ -29,26 +48,6 @@ impl<'t> TokenSource for ParserInput<'t> { | |||
29 | } | 48 | } |
30 | } | 49 | } |
31 | 50 | ||
32 | pub(crate) struct ParserInput<'t> { | ||
33 | text: &'t str, | ||
34 | /// start position of each token(expect whitespace and comment) | ||
35 | /// ```non-rust | ||
36 | /// struct Foo; | ||
37 | /// ^------^--- | ||
38 | /// | | ^- | ||
39 | /// 0 7 10 | ||
40 | /// ``` | ||
41 | /// (token, start_offset): `[(struct, 0), (Foo, 7), (;, 10)]` | ||
42 | start_offsets: Vec<TextUnit>, | ||
43 | /// non-whitespace/comment tokens | ||
44 | /// ```non-rust | ||
45 | /// struct Foo {} | ||
46 | /// ^^^^^^ ^^^ ^^ | ||
47 | /// ``` | ||
48 | /// tokens: `[struct, Foo, {, }]` | ||
49 | tokens: Vec<Token>, | ||
50 | } | ||
51 | |||
52 | impl<'t> ParserInput<'t> { | 51 | impl<'t> ParserInput<'t> { |
53 | /// Generate input from tokens(expect comment and whitespace). | 52 | /// Generate input from tokens(expect comment and whitespace). |
54 | pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> ParserInput<'t> { | 53 | pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> ParserInput<'t> { |
diff --git a/crates/ra_syntax/src/parsing/parser.rs b/crates/ra_syntax/src/parsing/parser.rs deleted file mode 100644 index 923b0f2b2..000000000 --- a/crates/ra_syntax/src/parsing/parser.rs +++ /dev/null | |||
@@ -1,270 +0,0 @@ | |||
1 | use std::cell::Cell; | ||
2 | |||
3 | use drop_bomb::DropBomb; | ||
4 | |||
5 | use crate::{ | ||
6 | SyntaxKind::{self, ERROR, EOF, TOMBSTONE}, | ||
7 | parsing::{ | ||
8 | TokenSource, ParseError, | ||
9 | token_set::TokenSet, | ||
10 | event::Event, | ||
11 | }, | ||
12 | }; | ||
13 | |||
14 | /// `Parser` struct provides the low-level API for | ||
15 | /// navigating through the stream of tokens and | ||
16 | /// constructing the parse tree. The actual parsing | ||
17 | /// happens in the `grammar` module. | ||
18 | /// | ||
19 | /// However, the result of this `Parser` is not a real | ||
20 | /// tree, but rather a flat stream of events of the form | ||
21 | /// "start expression, consume number literal, | ||
22 | /// finish expression". See `Event` docs for more. | ||
23 | pub(crate) struct Parser<'t> { | ||
24 | token_source: &'t dyn TokenSource, | ||
25 | token_pos: usize, | ||
26 | events: Vec<Event>, | ||
27 | steps: Cell<u32>, | ||
28 | } | ||
29 | |||
30 | impl<'t> Parser<'t> { | ||
31 | pub(super) fn new(token_source: &'t dyn TokenSource) -> Parser<'t> { | ||
32 | Parser { token_source, token_pos: 0, events: Vec::new(), steps: Cell::new(0) } | ||
33 | } | ||
34 | |||
35 | pub(crate) fn finish(self) -> Vec<Event> { | ||
36 | self.events | ||
37 | } | ||
38 | |||
39 | /// Returns the kind of the current token. | ||
40 | /// If parser has already reached the end of input, | ||
41 | /// the special `EOF` kind is returned. | ||
42 | pub(crate) fn current(&self) -> SyntaxKind { | ||
43 | self.nth(0) | ||
44 | } | ||
45 | |||
46 | /// Returns the kinds of the current two tokens, if they are not separated | ||
47 | /// by trivia. | ||
48 | /// | ||
49 | /// Useful for parsing things like `>>`. | ||
50 | pub(crate) fn current2(&self) -> Option<(SyntaxKind, SyntaxKind)> { | ||
51 | let c1 = self.token_source.token_kind(self.token_pos); | ||
52 | let c2 = self.token_source.token_kind(self.token_pos + 1); | ||
53 | if self.token_source.is_token_joint_to_next(self.token_pos) { | ||
54 | Some((c1, c2)) | ||
55 | } else { | ||
56 | None | ||
57 | } | ||
58 | } | ||
59 | |||
60 | /// Returns the kinds of the current three tokens, if they are not separated | ||
61 | /// by trivia. | ||
62 | /// | ||
63 | /// Useful for parsing things like `=>>`. | ||
64 | pub(crate) fn current3(&self) -> Option<(SyntaxKind, SyntaxKind, SyntaxKind)> { | ||
65 | let c1 = self.token_source.token_kind(self.token_pos); | ||
66 | let c2 = self.token_source.token_kind(self.token_pos + 1); | ||
67 | let c3 = self.token_source.token_kind(self.token_pos + 2); | ||
68 | if self.token_source.is_token_joint_to_next(self.token_pos) | ||
69 | && self.token_source.is_token_joint_to_next(self.token_pos + 1) | ||
70 | { | ||
71 | Some((c1, c2, c3)) | ||
72 | } else { | ||
73 | None | ||
74 | } | ||
75 | } | ||
76 | |||
77 | /// Lookahead operation: returns the kind of the next nth | ||
78 | /// token. | ||
79 | pub(crate) fn nth(&self, n: usize) -> SyntaxKind { | ||
80 | let steps = self.steps.get(); | ||
81 | assert!(steps <= 10_000_000, "the parser seems stuck"); | ||
82 | self.steps.set(steps + 1); | ||
83 | self.token_source.token_kind(self.token_pos + n) | ||
84 | } | ||
85 | |||
86 | /// Checks if the current token is `kind`. | ||
87 | pub(crate) fn at(&self, kind: SyntaxKind) -> bool { | ||
88 | self.current() == kind | ||
89 | } | ||
90 | |||
91 | /// Checks if the current token is in `kinds`. | ||
92 | pub(crate) fn at_ts(&self, kinds: TokenSet) -> bool { | ||
93 | kinds.contains(self.current()) | ||
94 | } | ||
95 | |||
96 | /// Checks if the current token is contextual keyword with text `t`. | ||
97 | pub(crate) fn at_contextual_kw(&self, kw: &str) -> bool { | ||
98 | self.token_source.is_keyword(self.token_pos, kw) | ||
99 | } | ||
100 | |||
101 | /// Starts a new node in the syntax tree. All nodes and tokens | ||
102 | /// consumed between the `start` and the corresponding `Marker::complete` | ||
103 | /// belong to the same node. | ||
104 | pub(crate) fn start(&mut self) -> Marker { | ||
105 | let pos = self.events.len() as u32; | ||
106 | self.push_event(Event::tombstone()); | ||
107 | Marker::new(pos) | ||
108 | } | ||
109 | |||
110 | /// Advances the parser by one token unconditionally. | ||
111 | pub(crate) fn bump(&mut self) { | ||
112 | let kind = self.nth(0); | ||
113 | if kind == EOF { | ||
114 | return; | ||
115 | } | ||
116 | self.do_bump(kind, 1); | ||
117 | } | ||
118 | |||
119 | /// Advances the parser by one token, remapping its kind. | ||
120 | /// This is useful to create contextual keywords from | ||
121 | /// identifiers. For example, the lexer creates an `union` | ||
122 | /// *identifier* token, but the parser remaps it to the | ||
123 | /// `union` keyword, and keyword is what ends up in the | ||
124 | /// final tree. | ||
125 | pub(crate) fn bump_remap(&mut self, kind: SyntaxKind) { | ||
126 | if self.nth(0) == EOF { | ||
127 | // TODO: panic!? | ||
128 | return; | ||
129 | } | ||
130 | self.do_bump(kind, 1); | ||
131 | } | ||
132 | |||
133 | /// Advances the parser by `n` tokens, remapping its kind. | ||
134 | /// This is useful to create compound tokens from parts. For | ||
135 | /// example, an `<<` token is two consecutive remapped `<` tokens | ||
136 | pub(crate) fn bump_compound(&mut self, kind: SyntaxKind, n: u8) { | ||
137 | self.do_bump(kind, n); | ||
138 | } | ||
139 | |||
140 | /// Emit error with the `message` | ||
141 | /// TODO: this should be much more fancy and support | ||
142 | /// structured errors with spans and notes, like rustc | ||
143 | /// does. | ||
144 | pub(crate) fn error<T: Into<String>>(&mut self, message: T) { | ||
145 | let msg = ParseError(message.into()); | ||
146 | self.push_event(Event::Error { msg }) | ||
147 | } | ||
148 | |||
149 | /// Consume the next token if `kind` matches. | ||
150 | pub(crate) fn eat(&mut self, kind: SyntaxKind) -> bool { | ||
151 | if !self.at(kind) { | ||
152 | return false; | ||
153 | } | ||
154 | self.bump(); | ||
155 | true | ||
156 | } | ||
157 | |||
158 | /// Consume the next token if it is `kind` or emit an error | ||
159 | /// otherwise. | ||
160 | pub(crate) fn expect(&mut self, kind: SyntaxKind) -> bool { | ||
161 | if self.eat(kind) { | ||
162 | return true; | ||
163 | } | ||
164 | self.error(format!("expected {:?}", kind)); | ||
165 | false | ||
166 | } | ||
167 | |||
168 | /// Create an error node and consume the next token. | ||
169 | pub(crate) fn err_and_bump(&mut self, message: &str) { | ||
170 | self.err_recover(message, TokenSet::empty()); | ||
171 | } | ||
172 | |||
173 | /// Create an error node and consume the next token. | ||
174 | pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) { | ||
175 | if self.at(SyntaxKind::L_CURLY) || self.at(SyntaxKind::R_CURLY) || self.at_ts(recovery) { | ||
176 | self.error(message); | ||
177 | } else { | ||
178 | let m = self.start(); | ||
179 | self.error(message); | ||
180 | self.bump(); | ||
181 | m.complete(self, ERROR); | ||
182 | }; | ||
183 | } | ||
184 | |||
185 | fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) { | ||
186 | self.token_pos += usize::from(n_raw_tokens); | ||
187 | self.push_event(Event::Token { kind, n_raw_tokens }); | ||
188 | } | ||
189 | |||
190 | fn push_event(&mut self, event: Event) { | ||
191 | self.events.push(event) | ||
192 | } | ||
193 | } | ||
194 | |||
195 | /// See `Parser::start`. | ||
196 | pub(crate) struct Marker { | ||
197 | pos: u32, | ||
198 | bomb: DropBomb, | ||
199 | } | ||
200 | |||
201 | impl Marker { | ||
202 | fn new(pos: u32) -> Marker { | ||
203 | Marker { pos, bomb: DropBomb::new("Marker must be either completed or abandoned") } | ||
204 | } | ||
205 | |||
206 | /// Finishes the syntax tree node and assigns `kind` to it, | ||
207 | /// and mark the create a `CompletedMarker` for possible future | ||
208 | /// operation like `.precede()` to deal with forward_parent. | ||
209 | pub(crate) fn complete(mut self, p: &mut Parser, kind: SyntaxKind) -> CompletedMarker { | ||
210 | self.bomb.defuse(); | ||
211 | let idx = self.pos as usize; | ||
212 | match p.events[idx] { | ||
213 | Event::Start { kind: ref mut slot, .. } => { | ||
214 | *slot = kind; | ||
215 | } | ||
216 | _ => unreachable!(), | ||
217 | } | ||
218 | p.push_event(Event::Finish); | ||
219 | CompletedMarker::new(self.pos, kind) | ||
220 | } | ||
221 | |||
222 | /// Abandons the syntax tree node. All its children | ||
223 | /// are attached to its parent instead. | ||
224 | pub(crate) fn abandon(mut self, p: &mut Parser) { | ||
225 | self.bomb.defuse(); | ||
226 | let idx = self.pos as usize; | ||
227 | if idx == p.events.len() - 1 { | ||
228 | match p.events.pop() { | ||
229 | Some(Event::Start { kind: TOMBSTONE, forward_parent: None }) => (), | ||
230 | _ => unreachable!(), | ||
231 | } | ||
232 | } | ||
233 | } | ||
234 | } | ||
235 | |||
236 | pub(crate) struct CompletedMarker(u32, SyntaxKind); | ||
237 | |||
238 | impl CompletedMarker { | ||
239 | fn new(pos: u32, kind: SyntaxKind) -> Self { | ||
240 | CompletedMarker(pos, kind) | ||
241 | } | ||
242 | |||
243 | /// This method allows to create a new node which starts | ||
244 | /// *before* the current one. That is, parser could start | ||
245 | /// node `A`, then complete it, and then after parsing the | ||
246 | /// whole `A`, decide that it should have started some node | ||
247 | /// `B` before starting `A`. `precede` allows to do exactly | ||
248 | /// that. See also docs about `forward_parent` in `Event::Start`. | ||
249 | /// | ||
250 | /// Given completed events `[START, FINISH]` and its corresponding | ||
251 | /// `CompletedMarker(pos: 0, _)`. | ||
252 | /// Append a new `START` events as `[START, FINISH, NEWSTART]`, | ||
253 | /// then mark `NEWSTART` as `START`'s parent with saving its relative | ||
254 | /// distance to `NEWSTART` into forward_parent(=2 in this case); | ||
255 | pub(crate) fn precede(self, p: &mut Parser) -> Marker { | ||
256 | let new_pos = p.start(); | ||
257 | let idx = self.0 as usize; | ||
258 | match p.events[idx] { | ||
259 | Event::Start { ref mut forward_parent, .. } => { | ||
260 | *forward_parent = Some(new_pos.pos - self.0); | ||
261 | } | ||
262 | _ => unreachable!(), | ||
263 | } | ||
264 | new_pos | ||
265 | } | ||
266 | |||
267 | pub(crate) fn kind(&self) -> SyntaxKind { | ||
268 | self.1 | ||
269 | } | ||
270 | } | ||
diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs index f2d218ab9..19d8adcfb 100644 --- a/crates/ra_syntax/src/parsing/reparsing.rs +++ b/crates/ra_syntax/src/parsing/reparsing.rs | |||
@@ -1,18 +1,25 @@ | |||
1 | //! Implementation of incremental re-parsing. | ||
2 | //! | ||
3 | //! We use two simple strategies for this: | ||
4 | //! - if the edit modifies only a single token (like changing an identifier's | ||
5 | //! letter), we replace only this token. | ||
6 | //! - otherwise, we search for the nearest `{}` block which contains the edit | ||
7 | //! and try to parse only this block. | ||
8 | |||
9 | use ra_text_edit::AtomTextEdit; | ||
10 | use ra_parser::Reparser; | ||
11 | |||
1 | use crate::{ | 12 | use crate::{ |
2 | SyntaxKind::*, TextRange, TextUnit, | 13 | SyntaxKind::*, TextRange, TextUnit, SyntaxError, |
3 | algo, | 14 | algo, |
4 | syntax_node::{GreenNode, SyntaxNode}, | 15 | syntax_node::{GreenNode, SyntaxNode}, |
5 | syntax_error::SyntaxError, | ||
6 | parsing::{ | 16 | parsing::{ |
7 | grammar, parse_with, | 17 | input::ParserInput, |
8 | builder::GreenBuilder, | 18 | builder::TreeBuilder, |
9 | parser::Parser, | ||
10 | lexer::{tokenize, Token}, | 19 | lexer::{tokenize, Token}, |
11 | } | 20 | } |
12 | }; | 21 | }; |
13 | 22 | ||
14 | use ra_text_edit::AtomTextEdit; | ||
15 | |||
16 | pub(crate) fn incremental_reparse( | 23 | pub(crate) fn incremental_reparse( |
17 | node: &SyntaxNode, | 24 | node: &SyntaxNode, |
18 | edit: &AtomTextEdit, | 25 | edit: &AtomTextEdit, |
@@ -61,7 +68,10 @@ fn reparse_block<'node>( | |||
61 | if !is_balanced(&tokens) { | 68 | if !is_balanced(&tokens) { |
62 | return None; | 69 | return None; |
63 | } | 70 | } |
64 | let (green, new_errors) = parse_with(GreenBuilder::default(), &text, &tokens, reparser); | 71 | let token_source = ParserInput::new(&text, &tokens); |
72 | let mut tree_sink = TreeBuilder::new(&text, &tokens); | ||
73 | reparser.parse(&token_source, &mut tree_sink); | ||
74 | let (green, new_errors) = tree_sink.finish(); | ||
65 | Some((node, green, new_errors)) | 75 | Some((node, green, new_errors)) |
66 | } | 76 | } |
67 | 77 | ||
@@ -77,12 +87,13 @@ fn is_contextual_kw(text: &str) -> bool { | |||
77 | } | 87 | } |
78 | } | 88 | } |
79 | 89 | ||
80 | fn find_reparsable_node( | 90 | fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(&SyntaxNode, Reparser)> { |
81 | node: &SyntaxNode, | ||
82 | range: TextRange, | ||
83 | ) -> Option<(&SyntaxNode, fn(&mut Parser))> { | ||
84 | let node = algo::find_covering_node(node, range); | 91 | let node = algo::find_covering_node(node, range); |
85 | node.ancestors().find_map(|node| grammar::reparser(node).map(|r| (node, r))) | 92 | node.ancestors().find_map(|node| { |
93 | let first_child = node.first_child().map(|it| it.kind()); | ||
94 | let parent = node.parent().map(|it| it.kind()); | ||
95 | Reparser::for_node(node.kind(), first_child, parent).map(|r| (node, r)) | ||
96 | }) | ||
86 | } | 97 | } |
87 | 98 | ||
88 | fn is_balanced(tokens: &[Token]) -> bool { | 99 | fn is_balanced(tokens: &[Token]) -> bool { |
@@ -132,7 +143,7 @@ fn merge_errors( | |||
132 | mod tests { | 143 | mod tests { |
133 | use test_utils::{extract_range, assert_eq_text}; | 144 | use test_utils::{extract_range, assert_eq_text}; |
134 | 145 | ||
135 | use crate::{SourceFile, AstNode, utils::dump_tree}; | 146 | use crate::{SourceFile, AstNode}; |
136 | use super::*; | 147 | use super::*; |
137 | 148 | ||
138 | fn do_check<F>(before: &str, replace_with: &str, reparser: F) | 149 | fn do_check<F>(before: &str, replace_with: &str, reparser: F) |
@@ -158,8 +169,8 @@ mod tests { | |||
158 | }; | 169 | }; |
159 | 170 | ||
160 | assert_eq_text!( | 171 | assert_eq_text!( |
161 | &dump_tree(fully_reparsed.syntax()), | 172 | &fully_reparsed.syntax().debug_dump(), |
162 | &dump_tree(incrementally_reparsed.syntax()), | 173 | &incrementally_reparsed.syntax().debug_dump(), |
163 | ) | 174 | ) |
164 | } | 175 | } |
165 | 176 | ||
diff --git a/crates/ra_syntax/src/parsing/token_set.rs b/crates/ra_syntax/src/parsing/token_set.rs deleted file mode 100644 index 5719fe5a2..000000000 --- a/crates/ra_syntax/src/parsing/token_set.rs +++ /dev/null | |||
@@ -1,41 +0,0 @@ | |||
1 | use crate::SyntaxKind; | ||
2 | |||
3 | #[derive(Clone, Copy)] | ||
4 | pub(crate) struct TokenSet(u128); | ||
5 | |||
6 | impl TokenSet { | ||
7 | pub(crate) const fn empty() -> TokenSet { | ||
8 | TokenSet(0) | ||
9 | } | ||
10 | |||
11 | pub(crate) const fn singleton(kind: SyntaxKind) -> TokenSet { | ||
12 | TokenSet(mask(kind)) | ||
13 | } | ||
14 | |||
15 | pub(crate) const fn union(self, other: TokenSet) -> TokenSet { | ||
16 | TokenSet(self.0 | other.0) | ||
17 | } | ||
18 | |||
19 | pub(crate) fn contains(&self, kind: SyntaxKind) -> bool { | ||
20 | self.0 & mask(kind) != 0 | ||
21 | } | ||
22 | } | ||
23 | |||
24 | const fn mask(kind: SyntaxKind) -> u128 { | ||
25 | 1u128 << (kind as usize) | ||
26 | } | ||
27 | |||
28 | #[macro_export] | ||
29 | macro_rules! token_set { | ||
30 | ($($t:ident),*) => { TokenSet::empty()$(.union(TokenSet::singleton($t)))* }; | ||
31 | ($($t:ident),* ,) => { token_set!($($t),*) }; | ||
32 | } | ||
33 | |||
34 | #[test] | ||
35 | fn token_set_works_for_tokens() { | ||
36 | use crate::SyntaxKind::*; | ||
37 | let ts = token_set! { EOF, SHEBANG }; | ||
38 | assert!(ts.contains(EOF)); | ||
39 | assert!(ts.contains(SHEBANG)); | ||
40 | assert!(!ts.contains(PLUS)); | ||
41 | } | ||
diff --git a/crates/ra_syntax/src/syntax_error.rs b/crates/ra_syntax/src/syntax_error.rs index 1a00fcc27..bdd431742 100644 --- a/crates/ra_syntax/src/syntax_error.rs +++ b/crates/ra_syntax/src/syntax_error.rs | |||
@@ -1,6 +1,8 @@ | |||
1 | use std::fmt; | 1 | use std::fmt; |
2 | 2 | ||
3 | use crate::{TextRange, TextUnit, parsing::ParseError}; | 3 | use ra_parser::ParseError; |
4 | |||
5 | use crate::{TextRange, TextUnit}; | ||
4 | 6 | ||
5 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 7 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
6 | pub struct SyntaxError { | 8 | pub struct SyntaxError { |
diff --git a/crates/ra_syntax/src/syntax_kinds.rs b/crates/ra_syntax/src/syntax_kinds.rs deleted file mode 100644 index c1118c5ab..000000000 --- a/crates/ra_syntax/src/syntax_kinds.rs +++ /dev/null | |||
@@ -1,27 +0,0 @@ | |||
1 | mod generated; | ||
2 | |||
3 | use std::fmt; | ||
4 | |||
5 | use crate::SyntaxKind::*; | ||
6 | |||
7 | pub use self::generated::SyntaxKind; | ||
8 | |||
9 | impl fmt::Debug for SyntaxKind { | ||
10 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
11 | let name = self.info().name; | ||
12 | f.write_str(name) | ||
13 | } | ||
14 | } | ||
15 | |||
16 | pub(crate) struct SyntaxInfo { | ||
17 | pub name: &'static str, | ||
18 | } | ||
19 | |||
20 | impl SyntaxKind { | ||
21 | pub fn is_trivia(self) -> bool { | ||
22 | match self { | ||
23 | WHITESPACE | COMMENT => true, | ||
24 | _ => false, | ||
25 | } | ||
26 | } | ||
27 | } | ||
diff --git a/crates/ra_syntax/src/syntax_kinds/generated.rs b/crates/ra_syntax/src/syntax_kinds/generated.rs deleted file mode 100644 index 266b95bbb..000000000 --- a/crates/ra_syntax/src/syntax_kinds/generated.rs +++ /dev/null | |||
@@ -1,642 +0,0 @@ | |||
1 | // This file is automatically generated based on the file `./generated.rs.tera` when `cargo gen-syntax` is run | ||
2 | // Do not edit manually | ||
3 | |||
4 | #![allow(bad_style, missing_docs, unreachable_pub)] | ||
5 | #![cfg_attr(rustfmt, rustfmt_skip)] | ||
6 | use super::SyntaxInfo; | ||
7 | |||
8 | /// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT_DEF`. | ||
9 | #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] | ||
10 | pub enum SyntaxKind { | ||
11 | // Technical SyntaxKinds: they appear temporally during parsing, | ||
12 | // but never end up in the final tree | ||
13 | #[doc(hidden)] | ||
14 | TOMBSTONE, | ||
15 | #[doc(hidden)] | ||
16 | EOF, | ||
17 | SEMI, | ||
18 | COMMA, | ||
19 | L_PAREN, | ||
20 | R_PAREN, | ||
21 | L_CURLY, | ||
22 | R_CURLY, | ||
23 | L_BRACK, | ||
24 | R_BRACK, | ||
25 | L_ANGLE, | ||
26 | R_ANGLE, | ||
27 | AT, | ||
28 | POUND, | ||
29 | TILDE, | ||
30 | QUESTION, | ||
31 | DOLLAR, | ||
32 | AMP, | ||
33 | PIPE, | ||
34 | PLUS, | ||
35 | STAR, | ||
36 | SLASH, | ||
37 | CARET, | ||
38 | PERCENT, | ||
39 | UNDERSCORE, | ||
40 | DOT, | ||
41 | DOTDOT, | ||
42 | DOTDOTDOT, | ||
43 | DOTDOTEQ, | ||
44 | COLON, | ||
45 | COLONCOLON, | ||
46 | EQ, | ||
47 | EQEQ, | ||
48 | FAT_ARROW, | ||
49 | EXCL, | ||
50 | NEQ, | ||
51 | MINUS, | ||
52 | THIN_ARROW, | ||
53 | LTEQ, | ||
54 | GTEQ, | ||
55 | PLUSEQ, | ||
56 | MINUSEQ, | ||
57 | PIPEEQ, | ||
58 | AMPEQ, | ||
59 | CARETEQ, | ||
60 | SLASHEQ, | ||
61 | STAREQ, | ||
62 | PERCENTEQ, | ||
63 | AMPAMP, | ||
64 | PIPEPIPE, | ||
65 | SHL, | ||
66 | SHR, | ||
67 | SHLEQ, | ||
68 | SHREQ, | ||
69 | USE_KW, | ||
70 | FN_KW, | ||
71 | STRUCT_KW, | ||
72 | ENUM_KW, | ||
73 | TRAIT_KW, | ||
74 | IMPL_KW, | ||
75 | DYN_KW, | ||
76 | TRUE_KW, | ||
77 | FALSE_KW, | ||
78 | AS_KW, | ||
79 | EXTERN_KW, | ||
80 | CRATE_KW, | ||
81 | MOD_KW, | ||
82 | PUB_KW, | ||
83 | SELF_KW, | ||
84 | SUPER_KW, | ||
85 | IN_KW, | ||
86 | WHERE_KW, | ||
87 | FOR_KW, | ||
88 | LOOP_KW, | ||
89 | WHILE_KW, | ||
90 | CONTINUE_KW, | ||
91 | BREAK_KW, | ||
92 | IF_KW, | ||
93 | ELSE_KW, | ||
94 | MATCH_KW, | ||
95 | CONST_KW, | ||
96 | STATIC_KW, | ||
97 | MUT_KW, | ||
98 | UNSAFE_KW, | ||
99 | TYPE_KW, | ||
100 | REF_KW, | ||
101 | LET_KW, | ||
102 | MOVE_KW, | ||
103 | RETURN_KW, | ||
104 | AUTO_KW, | ||
105 | DEFAULT_KW, | ||
106 | UNION_KW, | ||
107 | INT_NUMBER, | ||
108 | FLOAT_NUMBER, | ||
109 | CHAR, | ||
110 | BYTE, | ||
111 | STRING, | ||
112 | RAW_STRING, | ||
113 | BYTE_STRING, | ||
114 | RAW_BYTE_STRING, | ||
115 | ERROR, | ||
116 | IDENT, | ||
117 | WHITESPACE, | ||
118 | LIFETIME, | ||
119 | COMMENT, | ||
120 | SHEBANG, | ||
121 | SOURCE_FILE, | ||
122 | STRUCT_DEF, | ||
123 | ENUM_DEF, | ||
124 | FN_DEF, | ||
125 | RET_TYPE, | ||
126 | EXTERN_CRATE_ITEM, | ||
127 | MODULE, | ||
128 | USE_ITEM, | ||
129 | STATIC_DEF, | ||
130 | CONST_DEF, | ||
131 | TRAIT_DEF, | ||
132 | IMPL_BLOCK, | ||
133 | TYPE_DEF, | ||
134 | MACRO_CALL, | ||
135 | TOKEN_TREE, | ||
136 | PAREN_TYPE, | ||
137 | TUPLE_TYPE, | ||
138 | NEVER_TYPE, | ||
139 | PATH_TYPE, | ||
140 | POINTER_TYPE, | ||
141 | ARRAY_TYPE, | ||
142 | SLICE_TYPE, | ||
143 | REFERENCE_TYPE, | ||
144 | PLACEHOLDER_TYPE, | ||
145 | FN_POINTER_TYPE, | ||
146 | FOR_TYPE, | ||
147 | IMPL_TRAIT_TYPE, | ||
148 | DYN_TRAIT_TYPE, | ||
149 | REF_PAT, | ||
150 | BIND_PAT, | ||
151 | PLACEHOLDER_PAT, | ||
152 | PATH_PAT, | ||
153 | STRUCT_PAT, | ||
154 | FIELD_PAT_LIST, | ||
155 | FIELD_PAT, | ||
156 | TUPLE_STRUCT_PAT, | ||
157 | TUPLE_PAT, | ||
158 | SLICE_PAT, | ||
159 | RANGE_PAT, | ||
160 | LITERAL_PAT, | ||
161 | TUPLE_EXPR, | ||
162 | ARRAY_EXPR, | ||
163 | PAREN_EXPR, | ||
164 | PATH_EXPR, | ||
165 | LAMBDA_EXPR, | ||
166 | IF_EXPR, | ||
167 | WHILE_EXPR, | ||
168 | CONDITION, | ||
169 | LOOP_EXPR, | ||
170 | FOR_EXPR, | ||
171 | CONTINUE_EXPR, | ||
172 | BREAK_EXPR, | ||
173 | LABEL, | ||
174 | BLOCK_EXPR, | ||
175 | RETURN_EXPR, | ||
176 | MATCH_EXPR, | ||
177 | MATCH_ARM_LIST, | ||
178 | MATCH_ARM, | ||
179 | MATCH_GUARD, | ||
180 | STRUCT_LIT, | ||
181 | NAMED_FIELD_LIST, | ||
182 | NAMED_FIELD, | ||
183 | CALL_EXPR, | ||
184 | INDEX_EXPR, | ||
185 | METHOD_CALL_EXPR, | ||
186 | FIELD_EXPR, | ||
187 | TRY_EXPR, | ||
188 | CAST_EXPR, | ||
189 | REF_EXPR, | ||
190 | PREFIX_EXPR, | ||
191 | RANGE_EXPR, | ||
192 | BIN_EXPR, | ||
193 | BLOCK, | ||
194 | EXTERN_BLOCK, | ||
195 | EXTERN_ITEM_LIST, | ||
196 | ENUM_VARIANT, | ||
197 | NAMED_FIELD_DEF_LIST, | ||
198 | NAMED_FIELD_DEF, | ||
199 | POS_FIELD_DEF_LIST, | ||
200 | POS_FIELD_DEF, | ||
201 | ENUM_VARIANT_LIST, | ||
202 | ITEM_LIST, | ||
203 | ATTR, | ||
204 | META_ITEM, | ||
205 | USE_TREE, | ||
206 | USE_TREE_LIST, | ||
207 | PATH, | ||
208 | PATH_SEGMENT, | ||
209 | LITERAL, | ||
210 | ALIAS, | ||
211 | VISIBILITY, | ||
212 | WHERE_CLAUSE, | ||
213 | WHERE_PRED, | ||
214 | ABI, | ||
215 | NAME, | ||
216 | NAME_REF, | ||
217 | LET_STMT, | ||
218 | EXPR_STMT, | ||
219 | TYPE_PARAM_LIST, | ||
220 | LIFETIME_PARAM, | ||
221 | TYPE_PARAM, | ||
222 | TYPE_ARG_LIST, | ||
223 | LIFETIME_ARG, | ||
224 | TYPE_ARG, | ||
225 | ASSOC_TYPE_ARG, | ||
226 | PARAM_LIST, | ||
227 | PARAM, | ||
228 | SELF_PARAM, | ||
229 | ARG_LIST, | ||
230 | } | ||
231 | use self::SyntaxKind::*; | ||
232 | |||
233 | impl SyntaxKind { | ||
234 | pub fn is_keyword(self) -> bool { | ||
235 | match self { | ||
236 | | USE_KW | ||
237 | | FN_KW | ||
238 | | STRUCT_KW | ||
239 | | ENUM_KW | ||
240 | | TRAIT_KW | ||
241 | | IMPL_KW | ||
242 | | DYN_KW | ||
243 | | TRUE_KW | ||
244 | | FALSE_KW | ||
245 | | AS_KW | ||
246 | | EXTERN_KW | ||
247 | | CRATE_KW | ||
248 | | MOD_KW | ||
249 | | PUB_KW | ||
250 | | SELF_KW | ||
251 | | SUPER_KW | ||
252 | | IN_KW | ||
253 | | WHERE_KW | ||
254 | | FOR_KW | ||
255 | | LOOP_KW | ||
256 | | WHILE_KW | ||
257 | | CONTINUE_KW | ||
258 | | BREAK_KW | ||
259 | | IF_KW | ||
260 | | ELSE_KW | ||
261 | | MATCH_KW | ||
262 | | CONST_KW | ||
263 | | STATIC_KW | ||
264 | | MUT_KW | ||
265 | | UNSAFE_KW | ||
266 | | TYPE_KW | ||
267 | | REF_KW | ||
268 | | LET_KW | ||
269 | | MOVE_KW | ||
270 | | RETURN_KW | ||
271 | | AUTO_KW | ||
272 | | DEFAULT_KW | ||
273 | | UNION_KW | ||
274 | => true, | ||
275 | _ => false | ||
276 | } | ||
277 | } | ||
278 | |||
279 | pub fn is_punct(self) -> bool { | ||
280 | match self { | ||
281 | | SEMI | ||
282 | | COMMA | ||
283 | | L_PAREN | ||
284 | | R_PAREN | ||
285 | | L_CURLY | ||
286 | | R_CURLY | ||
287 | | L_BRACK | ||
288 | | R_BRACK | ||
289 | | L_ANGLE | ||
290 | | R_ANGLE | ||
291 | | AT | ||
292 | | POUND | ||
293 | | TILDE | ||
294 | | QUESTION | ||
295 | | DOLLAR | ||
296 | | AMP | ||
297 | | PIPE | ||
298 | | PLUS | ||
299 | | STAR | ||
300 | | SLASH | ||
301 | | CARET | ||
302 | | PERCENT | ||
303 | | UNDERSCORE | ||
304 | | DOT | ||
305 | | DOTDOT | ||
306 | | DOTDOTDOT | ||
307 | | DOTDOTEQ | ||
308 | | COLON | ||
309 | | COLONCOLON | ||
310 | | EQ | ||
311 | | EQEQ | ||
312 | | FAT_ARROW | ||
313 | | EXCL | ||
314 | | NEQ | ||
315 | | MINUS | ||
316 | | THIN_ARROW | ||
317 | | LTEQ | ||
318 | | GTEQ | ||
319 | | PLUSEQ | ||
320 | | MINUSEQ | ||
321 | | PIPEEQ | ||
322 | | AMPEQ | ||
323 | | CARETEQ | ||
324 | | SLASHEQ | ||
325 | | STAREQ | ||
326 | | PERCENTEQ | ||
327 | | AMPAMP | ||
328 | | PIPEPIPE | ||
329 | | SHL | ||
330 | | SHR | ||
331 | | SHLEQ | ||
332 | | SHREQ | ||
333 | => true, | ||
334 | _ => false | ||
335 | } | ||
336 | } | ||
337 | pub fn is_literal(self) -> bool { | ||
338 | match self { | ||
339 | | INT_NUMBER | ||
340 | | FLOAT_NUMBER | ||
341 | | CHAR | ||
342 | | BYTE | ||
343 | | STRING | ||
344 | | RAW_STRING | ||
345 | | BYTE_STRING | ||
346 | | RAW_BYTE_STRING | ||
347 | => true, | ||
348 | _ => false | ||
349 | } | ||
350 | } | ||
351 | |||
352 | pub(crate) fn info(self) -> &'static SyntaxInfo { | ||
353 | match self { | ||
354 | SEMI => &SyntaxInfo { name: "SEMI" }, | ||
355 | COMMA => &SyntaxInfo { name: "COMMA" }, | ||
356 | L_PAREN => &SyntaxInfo { name: "L_PAREN" }, | ||
357 | R_PAREN => &SyntaxInfo { name: "R_PAREN" }, | ||
358 | L_CURLY => &SyntaxInfo { name: "L_CURLY" }, | ||
359 | R_CURLY => &SyntaxInfo { name: "R_CURLY" }, | ||
360 | L_BRACK => &SyntaxInfo { name: "L_BRACK" }, | ||
361 | R_BRACK => &SyntaxInfo { name: "R_BRACK" }, | ||
362 | L_ANGLE => &SyntaxInfo { name: "L_ANGLE" }, | ||
363 | R_ANGLE => &SyntaxInfo { name: "R_ANGLE" }, | ||
364 | AT => &SyntaxInfo { name: "AT" }, | ||
365 | POUND => &SyntaxInfo { name: "POUND" }, | ||
366 | TILDE => &SyntaxInfo { name: "TILDE" }, | ||
367 | QUESTION => &SyntaxInfo { name: "QUESTION" }, | ||
368 | DOLLAR => &SyntaxInfo { name: "DOLLAR" }, | ||
369 | AMP => &SyntaxInfo { name: "AMP" }, | ||
370 | PIPE => &SyntaxInfo { name: "PIPE" }, | ||
371 | PLUS => &SyntaxInfo { name: "PLUS" }, | ||
372 | STAR => &SyntaxInfo { name: "STAR" }, | ||
373 | SLASH => &SyntaxInfo { name: "SLASH" }, | ||
374 | CARET => &SyntaxInfo { name: "CARET" }, | ||
375 | PERCENT => &SyntaxInfo { name: "PERCENT" }, | ||
376 | UNDERSCORE => &SyntaxInfo { name: "UNDERSCORE" }, | ||
377 | DOT => &SyntaxInfo { name: "DOT" }, | ||
378 | DOTDOT => &SyntaxInfo { name: "DOTDOT" }, | ||
379 | DOTDOTDOT => &SyntaxInfo { name: "DOTDOTDOT" }, | ||
380 | DOTDOTEQ => &SyntaxInfo { name: "DOTDOTEQ" }, | ||
381 | COLON => &SyntaxInfo { name: "COLON" }, | ||
382 | COLONCOLON => &SyntaxInfo { name: "COLONCOLON" }, | ||
383 | EQ => &SyntaxInfo { name: "EQ" }, | ||
384 | EQEQ => &SyntaxInfo { name: "EQEQ" }, | ||
385 | FAT_ARROW => &SyntaxInfo { name: "FAT_ARROW" }, | ||
386 | EXCL => &SyntaxInfo { name: "EXCL" }, | ||
387 | NEQ => &SyntaxInfo { name: "NEQ" }, | ||
388 | MINUS => &SyntaxInfo { name: "MINUS" }, | ||
389 | THIN_ARROW => &SyntaxInfo { name: "THIN_ARROW" }, | ||
390 | LTEQ => &SyntaxInfo { name: "LTEQ" }, | ||
391 | GTEQ => &SyntaxInfo { name: "GTEQ" }, | ||
392 | PLUSEQ => &SyntaxInfo { name: "PLUSEQ" }, | ||
393 | MINUSEQ => &SyntaxInfo { name: "MINUSEQ" }, | ||
394 | PIPEEQ => &SyntaxInfo { name: "PIPEEQ" }, | ||
395 | AMPEQ => &SyntaxInfo { name: "AMPEQ" }, | ||
396 | CARETEQ => &SyntaxInfo { name: "CARETEQ" }, | ||
397 | SLASHEQ => &SyntaxInfo { name: "SLASHEQ" }, | ||
398 | STAREQ => &SyntaxInfo { name: "STAREQ" }, | ||
399 | PERCENTEQ => &SyntaxInfo { name: "PERCENTEQ" }, | ||
400 | AMPAMP => &SyntaxInfo { name: "AMPAMP" }, | ||
401 | PIPEPIPE => &SyntaxInfo { name: "PIPEPIPE" }, | ||
402 | SHL => &SyntaxInfo { name: "SHL" }, | ||
403 | SHR => &SyntaxInfo { name: "SHR" }, | ||
404 | SHLEQ => &SyntaxInfo { name: "SHLEQ" }, | ||
405 | SHREQ => &SyntaxInfo { name: "SHREQ" }, | ||
406 | USE_KW => &SyntaxInfo { name: "USE_KW" }, | ||
407 | FN_KW => &SyntaxInfo { name: "FN_KW" }, | ||
408 | STRUCT_KW => &SyntaxInfo { name: "STRUCT_KW" }, | ||
409 | ENUM_KW => &SyntaxInfo { name: "ENUM_KW" }, | ||
410 | TRAIT_KW => &SyntaxInfo { name: "TRAIT_KW" }, | ||
411 | IMPL_KW => &SyntaxInfo { name: "IMPL_KW" }, | ||
412 | DYN_KW => &SyntaxInfo { name: "DYN_KW" }, | ||
413 | TRUE_KW => &SyntaxInfo { name: "TRUE_KW" }, | ||
414 | FALSE_KW => &SyntaxInfo { name: "FALSE_KW" }, | ||
415 | AS_KW => &SyntaxInfo { name: "AS_KW" }, | ||
416 | EXTERN_KW => &SyntaxInfo { name: "EXTERN_KW" }, | ||
417 | CRATE_KW => &SyntaxInfo { name: "CRATE_KW" }, | ||
418 | MOD_KW => &SyntaxInfo { name: "MOD_KW" }, | ||
419 | PUB_KW => &SyntaxInfo { name: "PUB_KW" }, | ||
420 | SELF_KW => &SyntaxInfo { name: "SELF_KW" }, | ||
421 | SUPER_KW => &SyntaxInfo { name: "SUPER_KW" }, | ||
422 | IN_KW => &SyntaxInfo { name: "IN_KW" }, | ||
423 | WHERE_KW => &SyntaxInfo { name: "WHERE_KW" }, | ||
424 | FOR_KW => &SyntaxInfo { name: "FOR_KW" }, | ||
425 | LOOP_KW => &SyntaxInfo { name: "LOOP_KW" }, | ||
426 | WHILE_KW => &SyntaxInfo { name: "WHILE_KW" }, | ||
427 | CONTINUE_KW => &SyntaxInfo { name: "CONTINUE_KW" }, | ||
428 | BREAK_KW => &SyntaxInfo { name: "BREAK_KW" }, | ||
429 | IF_KW => &SyntaxInfo { name: "IF_KW" }, | ||
430 | ELSE_KW => &SyntaxInfo { name: "ELSE_KW" }, | ||
431 | MATCH_KW => &SyntaxInfo { name: "MATCH_KW" }, | ||
432 | CONST_KW => &SyntaxInfo { name: "CONST_KW" }, | ||
433 | STATIC_KW => &SyntaxInfo { name: "STATIC_KW" }, | ||
434 | MUT_KW => &SyntaxInfo { name: "MUT_KW" }, | ||
435 | UNSAFE_KW => &SyntaxInfo { name: "UNSAFE_KW" }, | ||
436 | TYPE_KW => &SyntaxInfo { name: "TYPE_KW" }, | ||
437 | REF_KW => &SyntaxInfo { name: "REF_KW" }, | ||
438 | LET_KW => &SyntaxInfo { name: "LET_KW" }, | ||
439 | MOVE_KW => &SyntaxInfo { name: "MOVE_KW" }, | ||
440 | RETURN_KW => &SyntaxInfo { name: "RETURN_KW" }, | ||
441 | AUTO_KW => &SyntaxInfo { name: "AUTO_KW" }, | ||
442 | DEFAULT_KW => &SyntaxInfo { name: "DEFAULT_KW" }, | ||
443 | UNION_KW => &SyntaxInfo { name: "UNION_KW" }, | ||
444 | INT_NUMBER => &SyntaxInfo { name: "INT_NUMBER" }, | ||
445 | FLOAT_NUMBER => &SyntaxInfo { name: "FLOAT_NUMBER" }, | ||
446 | CHAR => &SyntaxInfo { name: "CHAR" }, | ||
447 | BYTE => &SyntaxInfo { name: "BYTE" }, | ||
448 | STRING => &SyntaxInfo { name: "STRING" }, | ||
449 | RAW_STRING => &SyntaxInfo { name: "RAW_STRING" }, | ||
450 | BYTE_STRING => &SyntaxInfo { name: "BYTE_STRING" }, | ||
451 | RAW_BYTE_STRING => &SyntaxInfo { name: "RAW_BYTE_STRING" }, | ||
452 | ERROR => &SyntaxInfo { name: "ERROR" }, | ||
453 | IDENT => &SyntaxInfo { name: "IDENT" }, | ||
454 | WHITESPACE => &SyntaxInfo { name: "WHITESPACE" }, | ||
455 | LIFETIME => &SyntaxInfo { name: "LIFETIME" }, | ||
456 | COMMENT => &SyntaxInfo { name: "COMMENT" }, | ||
457 | SHEBANG => &SyntaxInfo { name: "SHEBANG" }, | ||
458 | SOURCE_FILE => &SyntaxInfo { name: "SOURCE_FILE" }, | ||
459 | STRUCT_DEF => &SyntaxInfo { name: "STRUCT_DEF" }, | ||
460 | ENUM_DEF => &SyntaxInfo { name: "ENUM_DEF" }, | ||
461 | FN_DEF => &SyntaxInfo { name: "FN_DEF" }, | ||
462 | RET_TYPE => &SyntaxInfo { name: "RET_TYPE" }, | ||
463 | EXTERN_CRATE_ITEM => &SyntaxInfo { name: "EXTERN_CRATE_ITEM" }, | ||
464 | MODULE => &SyntaxInfo { name: "MODULE" }, | ||
465 | USE_ITEM => &SyntaxInfo { name: "USE_ITEM" }, | ||
466 | STATIC_DEF => &SyntaxInfo { name: "STATIC_DEF" }, | ||
467 | CONST_DEF => &SyntaxInfo { name: "CONST_DEF" }, | ||
468 | TRAIT_DEF => &SyntaxInfo { name: "TRAIT_DEF" }, | ||
469 | IMPL_BLOCK => &SyntaxInfo { name: "IMPL_BLOCK" }, | ||
470 | TYPE_DEF => &SyntaxInfo { name: "TYPE_DEF" }, | ||
471 | MACRO_CALL => &SyntaxInfo { name: "MACRO_CALL" }, | ||
472 | TOKEN_TREE => &SyntaxInfo { name: "TOKEN_TREE" }, | ||
473 | PAREN_TYPE => &SyntaxInfo { name: "PAREN_TYPE" }, | ||
474 | TUPLE_TYPE => &SyntaxInfo { name: "TUPLE_TYPE" }, | ||
475 | NEVER_TYPE => &SyntaxInfo { name: "NEVER_TYPE" }, | ||
476 | PATH_TYPE => &SyntaxInfo { name: "PATH_TYPE" }, | ||
477 | POINTER_TYPE => &SyntaxInfo { name: "POINTER_TYPE" }, | ||
478 | ARRAY_TYPE => &SyntaxInfo { name: "ARRAY_TYPE" }, | ||
479 | SLICE_TYPE => &SyntaxInfo { name: "SLICE_TYPE" }, | ||
480 | REFERENCE_TYPE => &SyntaxInfo { name: "REFERENCE_TYPE" }, | ||
481 | PLACEHOLDER_TYPE => &SyntaxInfo { name: "PLACEHOLDER_TYPE" }, | ||
482 | FN_POINTER_TYPE => &SyntaxInfo { name: "FN_POINTER_TYPE" }, | ||
483 | FOR_TYPE => &SyntaxInfo { name: "FOR_TYPE" }, | ||
484 | IMPL_TRAIT_TYPE => &SyntaxInfo { name: "IMPL_TRAIT_TYPE" }, | ||
485 | DYN_TRAIT_TYPE => &SyntaxInfo { name: "DYN_TRAIT_TYPE" }, | ||
486 | REF_PAT => &SyntaxInfo { name: "REF_PAT" }, | ||
487 | BIND_PAT => &SyntaxInfo { name: "BIND_PAT" }, | ||
488 | PLACEHOLDER_PAT => &SyntaxInfo { name: "PLACEHOLDER_PAT" }, | ||
489 | PATH_PAT => &SyntaxInfo { name: "PATH_PAT" }, | ||
490 | STRUCT_PAT => &SyntaxInfo { name: "STRUCT_PAT" }, | ||
491 | FIELD_PAT_LIST => &SyntaxInfo { name: "FIELD_PAT_LIST" }, | ||
492 | FIELD_PAT => &SyntaxInfo { name: "FIELD_PAT" }, | ||
493 | TUPLE_STRUCT_PAT => &SyntaxInfo { name: "TUPLE_STRUCT_PAT" }, | ||
494 | TUPLE_PAT => &SyntaxInfo { name: "TUPLE_PAT" }, | ||
495 | SLICE_PAT => &SyntaxInfo { name: "SLICE_PAT" }, | ||
496 | RANGE_PAT => &SyntaxInfo { name: "RANGE_PAT" }, | ||
497 | LITERAL_PAT => &SyntaxInfo { name: "LITERAL_PAT" }, | ||
498 | TUPLE_EXPR => &SyntaxInfo { name: "TUPLE_EXPR" }, | ||
499 | ARRAY_EXPR => &SyntaxInfo { name: "ARRAY_EXPR" }, | ||
500 | PAREN_EXPR => &SyntaxInfo { name: "PAREN_EXPR" }, | ||
501 | PATH_EXPR => &SyntaxInfo { name: "PATH_EXPR" }, | ||
502 | LAMBDA_EXPR => &SyntaxInfo { name: "LAMBDA_EXPR" }, | ||
503 | IF_EXPR => &SyntaxInfo { name: "IF_EXPR" }, | ||
504 | WHILE_EXPR => &SyntaxInfo { name: "WHILE_EXPR" }, | ||
505 | CONDITION => &SyntaxInfo { name: "CONDITION" }, | ||
506 | LOOP_EXPR => &SyntaxInfo { name: "LOOP_EXPR" }, | ||
507 | FOR_EXPR => &SyntaxInfo { name: "FOR_EXPR" }, | ||
508 | CONTINUE_EXPR => &SyntaxInfo { name: "CONTINUE_EXPR" }, | ||
509 | BREAK_EXPR => &SyntaxInfo { name: "BREAK_EXPR" }, | ||
510 | LABEL => &SyntaxInfo { name: "LABEL" }, | ||
511 | BLOCK_EXPR => &SyntaxInfo { name: "BLOCK_EXPR" }, | ||
512 | RETURN_EXPR => &SyntaxInfo { name: "RETURN_EXPR" }, | ||
513 | MATCH_EXPR => &SyntaxInfo { name: "MATCH_EXPR" }, | ||
514 | MATCH_ARM_LIST => &SyntaxInfo { name: "MATCH_ARM_LIST" }, | ||
515 | MATCH_ARM => &SyntaxInfo { name: "MATCH_ARM" }, | ||
516 | MATCH_GUARD => &SyntaxInfo { name: "MATCH_GUARD" }, | ||
517 | STRUCT_LIT => &SyntaxInfo { name: "STRUCT_LIT" }, | ||
518 | NAMED_FIELD_LIST => &SyntaxInfo { name: "NAMED_FIELD_LIST" }, | ||
519 | NAMED_FIELD => &SyntaxInfo { name: "NAMED_FIELD" }, | ||
520 | CALL_EXPR => &SyntaxInfo { name: "CALL_EXPR" }, | ||
521 | INDEX_EXPR => &SyntaxInfo { name: "INDEX_EXPR" }, | ||
522 | METHOD_CALL_EXPR => &SyntaxInfo { name: "METHOD_CALL_EXPR" }, | ||
523 | FIELD_EXPR => &SyntaxInfo { name: "FIELD_EXPR" }, | ||
524 | TRY_EXPR => &SyntaxInfo { name: "TRY_EXPR" }, | ||
525 | CAST_EXPR => &SyntaxInfo { name: "CAST_EXPR" }, | ||
526 | REF_EXPR => &SyntaxInfo { name: "REF_EXPR" }, | ||
527 | PREFIX_EXPR => &SyntaxInfo { name: "PREFIX_EXPR" }, | ||
528 | RANGE_EXPR => &SyntaxInfo { name: "RANGE_EXPR" }, | ||
529 | BIN_EXPR => &SyntaxInfo { name: "BIN_EXPR" }, | ||
530 | BLOCK => &SyntaxInfo { name: "BLOCK" }, | ||
531 | EXTERN_BLOCK => &SyntaxInfo { name: "EXTERN_BLOCK" }, | ||
532 | EXTERN_ITEM_LIST => &SyntaxInfo { name: "EXTERN_ITEM_LIST" }, | ||
533 | ENUM_VARIANT => &SyntaxInfo { name: "ENUM_VARIANT" }, | ||
534 | NAMED_FIELD_DEF_LIST => &SyntaxInfo { name: "NAMED_FIELD_DEF_LIST" }, | ||
535 | NAMED_FIELD_DEF => &SyntaxInfo { name: "NAMED_FIELD_DEF" }, | ||
536 | POS_FIELD_DEF_LIST => &SyntaxInfo { name: "POS_FIELD_DEF_LIST" }, | ||
537 | POS_FIELD_DEF => &SyntaxInfo { name: "POS_FIELD_DEF" }, | ||
538 | ENUM_VARIANT_LIST => &SyntaxInfo { name: "ENUM_VARIANT_LIST" }, | ||
539 | ITEM_LIST => &SyntaxInfo { name: "ITEM_LIST" }, | ||
540 | ATTR => &SyntaxInfo { name: "ATTR" }, | ||
541 | META_ITEM => &SyntaxInfo { name: "META_ITEM" }, | ||
542 | USE_TREE => &SyntaxInfo { name: "USE_TREE" }, | ||
543 | USE_TREE_LIST => &SyntaxInfo { name: "USE_TREE_LIST" }, | ||
544 | PATH => &SyntaxInfo { name: "PATH" }, | ||
545 | PATH_SEGMENT => &SyntaxInfo { name: "PATH_SEGMENT" }, | ||
546 | LITERAL => &SyntaxInfo { name: "LITERAL" }, | ||
547 | ALIAS => &SyntaxInfo { name: "ALIAS" }, | ||
548 | VISIBILITY => &SyntaxInfo { name: "VISIBILITY" }, | ||
549 | WHERE_CLAUSE => &SyntaxInfo { name: "WHERE_CLAUSE" }, | ||
550 | WHERE_PRED => &SyntaxInfo { name: "WHERE_PRED" }, | ||
551 | ABI => &SyntaxInfo { name: "ABI" }, | ||
552 | NAME => &SyntaxInfo { name: "NAME" }, | ||
553 | NAME_REF => &SyntaxInfo { name: "NAME_REF" }, | ||
554 | LET_STMT => &SyntaxInfo { name: "LET_STMT" }, | ||
555 | EXPR_STMT => &SyntaxInfo { name: "EXPR_STMT" }, | ||
556 | TYPE_PARAM_LIST => &SyntaxInfo { name: "TYPE_PARAM_LIST" }, | ||
557 | LIFETIME_PARAM => &SyntaxInfo { name: "LIFETIME_PARAM" }, | ||
558 | TYPE_PARAM => &SyntaxInfo { name: "TYPE_PARAM" }, | ||
559 | TYPE_ARG_LIST => &SyntaxInfo { name: "TYPE_ARG_LIST" }, | ||
560 | LIFETIME_ARG => &SyntaxInfo { name: "LIFETIME_ARG" }, | ||
561 | TYPE_ARG => &SyntaxInfo { name: "TYPE_ARG" }, | ||
562 | ASSOC_TYPE_ARG => &SyntaxInfo { name: "ASSOC_TYPE_ARG" }, | ||
563 | PARAM_LIST => &SyntaxInfo { name: "PARAM_LIST" }, | ||
564 | PARAM => &SyntaxInfo { name: "PARAM" }, | ||
565 | SELF_PARAM => &SyntaxInfo { name: "SELF_PARAM" }, | ||
566 | ARG_LIST => &SyntaxInfo { name: "ARG_LIST" }, | ||
567 | TOMBSTONE => &SyntaxInfo { name: "TOMBSTONE" }, | ||
568 | EOF => &SyntaxInfo { name: "EOF" }, | ||
569 | } | ||
570 | } | ||
571 | pub(crate) fn from_keyword(ident: &str) -> Option<SyntaxKind> { | ||
572 | let kw = match ident { | ||
573 | "use" => USE_KW, | ||
574 | "fn" => FN_KW, | ||
575 | "struct" => STRUCT_KW, | ||
576 | "enum" => ENUM_KW, | ||
577 | "trait" => TRAIT_KW, | ||
578 | "impl" => IMPL_KW, | ||
579 | "dyn" => DYN_KW, | ||
580 | "true" => TRUE_KW, | ||
581 | "false" => FALSE_KW, | ||
582 | "as" => AS_KW, | ||
583 | "extern" => EXTERN_KW, | ||
584 | "crate" => CRATE_KW, | ||
585 | "mod" => MOD_KW, | ||
586 | "pub" => PUB_KW, | ||
587 | "self" => SELF_KW, | ||
588 | "super" => SUPER_KW, | ||
589 | "in" => IN_KW, | ||
590 | "where" => WHERE_KW, | ||
591 | "for" => FOR_KW, | ||
592 | "loop" => LOOP_KW, | ||
593 | "while" => WHILE_KW, | ||
594 | "continue" => CONTINUE_KW, | ||
595 | "break" => BREAK_KW, | ||
596 | "if" => IF_KW, | ||
597 | "else" => ELSE_KW, | ||
598 | "match" => MATCH_KW, | ||
599 | "const" => CONST_KW, | ||
600 | "static" => STATIC_KW, | ||
601 | "mut" => MUT_KW, | ||
602 | "unsafe" => UNSAFE_KW, | ||
603 | "type" => TYPE_KW, | ||
604 | "ref" => REF_KW, | ||
605 | "let" => LET_KW, | ||
606 | "move" => MOVE_KW, | ||
607 | "return" => RETURN_KW, | ||
608 | _ => return None, | ||
609 | }; | ||
610 | Some(kw) | ||
611 | } | ||
612 | |||
613 | pub(crate) fn from_char(c: char) -> Option<SyntaxKind> { | ||
614 | let tok = match c { | ||
615 | ';' => SEMI, | ||
616 | ',' => COMMA, | ||
617 | '(' => L_PAREN, | ||
618 | ')' => R_PAREN, | ||
619 | '{' => L_CURLY, | ||
620 | '}' => R_CURLY, | ||
621 | '[' => L_BRACK, | ||
622 | ']' => R_BRACK, | ||
623 | '<' => L_ANGLE, | ||
624 | '>' => R_ANGLE, | ||
625 | '@' => AT, | ||
626 | '#' => POUND, | ||
627 | '~' => TILDE, | ||
628 | '?' => QUESTION, | ||
629 | '$' => DOLLAR, | ||
630 | '&' => AMP, | ||
631 | '|' => PIPE, | ||
632 | '+' => PLUS, | ||
633 | '*' => STAR, | ||
634 | '/' => SLASH, | ||
635 | '^' => CARET, | ||
636 | '%' => PERCENT, | ||
637 | '_' => UNDERSCORE, | ||
638 | _ => return None, | ||
639 | }; | ||
640 | Some(tok) | ||
641 | } | ||
642 | } | ||
diff --git a/crates/ra_syntax/src/syntax_kinds/generated.rs.tera b/crates/ra_syntax/src/syntax_kinds/generated.rs.tera deleted file mode 100644 index 837437136..000000000 --- a/crates/ra_syntax/src/syntax_kinds/generated.rs.tera +++ /dev/null | |||
@@ -1,96 +0,0 @@ | |||
1 | {# THIS File is not automatically generated: | ||
2 | the below applies to the result of this template | ||
3 | #}// This file is automatically generated based on the file `./generated.rs.tera` when `cargo gen-syntax` is run | ||
4 | // Do not edit manually | ||
5 | |||
6 | #![allow(bad_style, missing_docs, unreachable_pub)] | ||
7 | #![cfg_attr(rustfmt, rustfmt_skip)] | ||
8 | use super::SyntaxInfo; | ||
9 | |||
10 | /// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT_DEF`. | ||
11 | #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] | ||
12 | pub enum SyntaxKind { | ||
13 | // Technical SyntaxKinds: they appear temporally during parsing, | ||
14 | // but never end up in the final tree | ||
15 | #[doc(hidden)] | ||
16 | TOMBSTONE, | ||
17 | #[doc(hidden)] | ||
18 | EOF, | ||
19 | |||
20 | {%- for t in concat(a=single_byte_tokens, b=multi_byte_tokens) %} | ||
21 | {{t.1}}, | ||
22 | {%- endfor -%} | ||
23 | {% for kw in concat(a=keywords, b=contextual_keywords) %} | ||
24 | {{kw | upper}}_KW, | ||
25 | {%- endfor -%} | ||
26 | {% for t in concat(a=literals, b=tokens, c=nodes) %} | ||
27 | {{t}}, | ||
28 | {%- endfor %} | ||
29 | } | ||
30 | use self::SyntaxKind::*; | ||
31 | |||
32 | impl SyntaxKind { | ||
33 | pub fn is_keyword(self) -> bool { | ||
34 | match self { | ||
35 | {%- for kw in concat(a=keywords, b=contextual_keywords) %} | ||
36 | | {{kw | upper}}_KW | ||
37 | {%- endfor %} | ||
38 | => true, | ||
39 | _ => false | ||
40 | } | ||
41 | } | ||
42 | |||
43 | pub fn is_punct(self) -> bool { | ||
44 | match self { | ||
45 | {%- for t in concat(a=single_byte_tokens, b=multi_byte_tokens) %} | ||
46 | | {{t.1}} | ||
47 | {%- endfor %} | ||
48 | => true, | ||
49 | _ => false | ||
50 | } | ||
51 | } | ||
52 | pub fn is_literal(self) -> bool { | ||
53 | match self { | ||
54 | {%- for t in literals %} | ||
55 | | {{t}} | ||
56 | {%- endfor %} | ||
57 | => true, | ||
58 | _ => false | ||
59 | } | ||
60 | } | ||
61 | |||
62 | pub(crate) fn info(self) -> &'static SyntaxInfo { | ||
63 | match self { | ||
64 | {%- for t in concat(a=single_byte_tokens, b=multi_byte_tokens) %} | ||
65 | {{t.1}} => &SyntaxInfo { name: "{{t.1}}" }, | ||
66 | {%- endfor -%} | ||
67 | {% for kw in concat(a=keywords, b=contextual_keywords) %} | ||
68 | {{kw | upper}}_KW => &SyntaxInfo { name: "{{kw | upper}}_KW" }, | ||
69 | {%- endfor -%} | ||
70 | {% for t in concat(a=literals, b=tokens, c=nodes) %} | ||
71 | {{t}} => &SyntaxInfo { name: "{{t}}" }, | ||
72 | {%- endfor %} | ||
73 | TOMBSTONE => &SyntaxInfo { name: "TOMBSTONE" }, | ||
74 | EOF => &SyntaxInfo { name: "EOF" }, | ||
75 | } | ||
76 | } | ||
77 | pub(crate) fn from_keyword(ident: &str) -> Option<SyntaxKind> { | ||
78 | let kw = match ident { | ||
79 | {%- for kw in keywords %} | ||
80 | "{{kw}}" => {{kw | upper}}_KW, | ||
81 | {%- endfor %} | ||
82 | _ => return None, | ||
83 | }; | ||
84 | Some(kw) | ||
85 | } | ||
86 | |||
87 | pub(crate) fn from_char(c: char) -> Option<SyntaxKind> { | ||
88 | let tok = match c { | ||
89 | {%- for t in single_byte_tokens %} | ||
90 | '{{t.0}}' => {{t.1}}, | ||
91 | {%- endfor %} | ||
92 | _ => return None, | ||
93 | }; | ||
94 | Some(tok) | ||
95 | } | ||
96 | } | ||
diff --git a/crates/ra_syntax/src/syntax_node.rs b/crates/ra_syntax/src/syntax_node.rs index aa627398d..4d54ae614 100644 --- a/crates/ra_syntax/src/syntax_node.rs +++ b/crates/ra_syntax/src/syntax_node.rs | |||
@@ -1,9 +1,20 @@ | |||
1 | use std::{fmt, borrow::Borrow}; | 1 | //! This module defines Concrete Syntax Tree (CST), used by rust-analyzer. |
2 | //! | ||
3 | //! The CST includes comments and whitespace, provides a single node type, | ||
4 | //! `SyntaxNode`, and a basic traversal API (parent, children, siblings). | ||
5 | //! | ||
6 | //! The *real* implementation is in the (language-agnostic) `rowan` crate, this | ||
7 | //! modules just wraps its API. | ||
8 | |||
9 | use std::{ | ||
10 | fmt::{self, Write}, | ||
11 | borrow::Borrow, | ||
12 | }; | ||
2 | 13 | ||
3 | use rowan::{Types, TransparentNewType}; | 14 | use rowan::{Types, TransparentNewType}; |
4 | 15 | ||
5 | use crate::{ | 16 | use crate::{ |
6 | SmolStr, SyntaxKind, TextRange, SyntaxText, | 17 | SmolStr, SyntaxKind, TextRange, SyntaxText, SourceFile, AstNode, |
7 | syntax_error::SyntaxError, | 18 | syntax_error::SyntaxError, |
8 | }; | 19 | }; |
9 | 20 | ||
@@ -16,14 +27,17 @@ impl Types for RaTypes { | |||
16 | type RootData = Vec<SyntaxError>; | 27 | type RootData = Vec<SyntaxError>; |
17 | } | 28 | } |
18 | 29 | ||
19 | pub type GreenNode = rowan::GreenNode<RaTypes>; | 30 | pub(crate) type GreenNode = rowan::GreenNode<RaTypes>; |
31 | |||
32 | /// Marker trait for CST and AST nodes | ||
33 | pub trait SyntaxNodeWrapper: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>> {} | ||
34 | impl<T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>> SyntaxNodeWrapper for T {} | ||
20 | 35 | ||
36 | /// An owning smart pointer for CST or AST node. | ||
21 | #[derive(PartialEq, Eq, Hash)] | 37 | #[derive(PartialEq, Eq, Hash)] |
22 | pub struct TreeArc<T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>>( | 38 | pub struct TreeArc<T: SyntaxNodeWrapper>(pub(crate) rowan::TreeArc<RaTypes, T>); |
23 | pub(crate) rowan::TreeArc<RaTypes, T>, | ||
24 | ); | ||
25 | 39 | ||
26 | impl<T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>> Borrow<T> for TreeArc<T> { | 40 | impl<T: SyntaxNodeWrapper> Borrow<T> for TreeArc<T> { |
27 | fn borrow(&self) -> &T { | 41 | fn borrow(&self) -> &T { |
28 | &*self | 42 | &*self |
29 | } | 43 | } |
@@ -31,11 +45,11 @@ impl<T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>> Borrow<T> for Tre | |||
31 | 45 | ||
32 | impl<T> TreeArc<T> | 46 | impl<T> TreeArc<T> |
33 | where | 47 | where |
34 | T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>, | 48 | T: SyntaxNodeWrapper, |
35 | { | 49 | { |
36 | pub(crate) fn cast<U>(this: TreeArc<T>) -> TreeArc<U> | 50 | pub(crate) fn cast<U>(this: TreeArc<T>) -> TreeArc<U> |
37 | where | 51 | where |
38 | U: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>, | 52 | U: SyntaxNodeWrapper, |
39 | { | 53 | { |
40 | TreeArc(rowan::TreeArc::cast(this.0)) | 54 | TreeArc(rowan::TreeArc::cast(this.0)) |
41 | } | 55 | } |
@@ -43,7 +57,7 @@ where | |||
43 | 57 | ||
44 | impl<T> std::ops::Deref for TreeArc<T> | 58 | impl<T> std::ops::Deref for TreeArc<T> |
45 | where | 59 | where |
46 | T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>, | 60 | T: SyntaxNodeWrapper, |
47 | { | 61 | { |
48 | type Target = T; | 62 | type Target = T; |
49 | fn deref(&self) -> &T { | 63 | fn deref(&self) -> &T { |
@@ -53,7 +67,7 @@ where | |||
53 | 67 | ||
54 | impl<T> PartialEq<T> for TreeArc<T> | 68 | impl<T> PartialEq<T> for TreeArc<T> |
55 | where | 69 | where |
56 | T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>, | 70 | T: SyntaxNodeWrapper, |
57 | T: PartialEq<T>, | 71 | T: PartialEq<T>, |
58 | { | 72 | { |
59 | fn eq(&self, other: &T) -> bool { | 73 | fn eq(&self, other: &T) -> bool { |
@@ -64,7 +78,7 @@ where | |||
64 | 78 | ||
65 | impl<T> Clone for TreeArc<T> | 79 | impl<T> Clone for TreeArc<T> |
66 | where | 80 | where |
67 | T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>, | 81 | T: SyntaxNodeWrapper, |
68 | { | 82 | { |
69 | fn clone(&self) -> TreeArc<T> { | 83 | fn clone(&self) -> TreeArc<T> { |
70 | TreeArc(self.0.clone()) | 84 | TreeArc(self.0.clone()) |
@@ -73,7 +87,7 @@ where | |||
73 | 87 | ||
74 | impl<T> fmt::Debug for TreeArc<T> | 88 | impl<T> fmt::Debug for TreeArc<T> |
75 | where | 89 | where |
76 | T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>, | 90 | T: SyntaxNodeWrapper, |
77 | T: fmt::Debug, | 91 | T: fmt::Debug, |
78 | { | 92 | { |
79 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | 93 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { |
@@ -88,13 +102,24 @@ unsafe impl TransparentNewType for SyntaxNode { | |||
88 | type Repr = rowan::SyntaxNode<RaTypes>; | 102 | type Repr = rowan::SyntaxNode<RaTypes>; |
89 | } | 103 | } |
90 | 104 | ||
91 | impl SyntaxNode { | 105 | impl ToOwned for SyntaxNode { |
92 | pub(crate) fn new(green: GreenNode, errors: Vec<SyntaxError>) -> TreeArc<SyntaxNode> { | 106 | type Owned = TreeArc<SyntaxNode>; |
93 | let ptr = TreeArc(rowan::SyntaxNode::new(green, errors)); | 107 | fn to_owned(&self) -> TreeArc<SyntaxNode> { |
108 | let ptr = TreeArc(self.0.to_owned()); | ||
94 | TreeArc::cast(ptr) | 109 | TreeArc::cast(ptr) |
95 | } | 110 | } |
96 | } | 111 | } |
97 | 112 | ||
113 | impl fmt::Debug for SyntaxNode { | ||
114 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | ||
115 | write!(fmt, "{:?}@{:?}", self.kind(), self.range())?; | ||
116 | if has_short_text(self.kind()) { | ||
117 | write!(fmt, " \"{}\"", self.text())?; | ||
118 | } | ||
119 | Ok(()) | ||
120 | } | ||
121 | } | ||
122 | |||
98 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] | 123 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] |
99 | pub enum Direction { | 124 | pub enum Direction { |
100 | Next, | 125 | Next, |
@@ -102,48 +127,10 @@ pub enum Direction { | |||
102 | } | 127 | } |
103 | 128 | ||
104 | impl SyntaxNode { | 129 | impl SyntaxNode { |
105 | pub fn leaf_text(&self) -> Option<&SmolStr> { | 130 | pub(crate) fn new(green: GreenNode, errors: Vec<SyntaxError>) -> TreeArc<SyntaxNode> { |
106 | self.0.leaf_text() | 131 | let ptr = TreeArc(rowan::SyntaxNode::new(green, errors)); |
107 | } | ||
108 | pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode> { | ||
109 | crate::algo::generate(Some(self), |&node| node.parent()) | ||
110 | } | ||
111 | pub fn descendants(&self) -> impl Iterator<Item = &SyntaxNode> { | ||
112 | self.preorder().filter_map(|event| match event { | ||
113 | WalkEvent::Enter(node) => Some(node), | ||
114 | WalkEvent::Leave(_) => None, | ||
115 | }) | ||
116 | } | ||
117 | pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = &SyntaxNode> { | ||
118 | crate::algo::generate(Some(self), move |&node| match direction { | ||
119 | Direction::Next => node.next_sibling(), | ||
120 | Direction::Prev => node.prev_sibling(), | ||
121 | }) | ||
122 | } | ||
123 | pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<&SyntaxNode>> { | ||
124 | self.0.preorder().map(|event| match event { | ||
125 | WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode::from_repr(n)), | ||
126 | WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxNode::from_repr(n)), | ||
127 | }) | ||
128 | } | ||
129 | } | ||
130 | |||
131 | impl ToOwned for SyntaxNode { | ||
132 | type Owned = TreeArc<SyntaxNode>; | ||
133 | fn to_owned(&self) -> TreeArc<SyntaxNode> { | ||
134 | let ptr = TreeArc(self.0.to_owned()); | ||
135 | TreeArc::cast(ptr) | 132 | TreeArc::cast(ptr) |
136 | } | 133 | } |
137 | } | ||
138 | |||
139 | impl SyntaxNode { | ||
140 | pub(crate) fn root_data(&self) -> &Vec<SyntaxError> { | ||
141 | self.0.root_data() | ||
142 | } | ||
143 | |||
144 | pub(crate) fn replace_with(&self, replacement: GreenNode) -> GreenNode { | ||
145 | self.0.replace_self(replacement) | ||
146 | } | ||
147 | 134 | ||
148 | pub fn kind(&self) -> SyntaxKind { | 135 | pub fn kind(&self) -> SyntaxKind { |
149 | self.0.kind() | 136 | self.0.kind() |
@@ -161,6 +148,10 @@ impl SyntaxNode { | |||
161 | self.0.is_leaf() | 148 | self.0.is_leaf() |
162 | } | 149 | } |
163 | 150 | ||
151 | pub fn leaf_text(&self) -> Option<&SmolStr> { | ||
152 | self.0.leaf_text() | ||
153 | } | ||
154 | |||
164 | pub fn parent(&self) -> Option<&SyntaxNode> { | 155 | pub fn parent(&self) -> Option<&SyntaxNode> { |
165 | self.0.parent().map(SyntaxNode::from_repr) | 156 | self.0.parent().map(SyntaxNode::from_repr) |
166 | } | 157 | } |
@@ -185,18 +176,85 @@ impl SyntaxNode { | |||
185 | SyntaxNodeChildren(self.0.children()) | 176 | SyntaxNodeChildren(self.0.children()) |
186 | } | 177 | } |
187 | 178 | ||
179 | pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode> { | ||
180 | crate::algo::generate(Some(self), |&node| node.parent()) | ||
181 | } | ||
182 | |||
183 | pub fn descendants(&self) -> impl Iterator<Item = &SyntaxNode> { | ||
184 | self.preorder().filter_map(|event| match event { | ||
185 | WalkEvent::Enter(node) => Some(node), | ||
186 | WalkEvent::Leave(_) => None, | ||
187 | }) | ||
188 | } | ||
189 | |||
190 | pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = &SyntaxNode> { | ||
191 | crate::algo::generate(Some(self), move |&node| match direction { | ||
192 | Direction::Next => node.next_sibling(), | ||
193 | Direction::Prev => node.prev_sibling(), | ||
194 | }) | ||
195 | } | ||
196 | |||
197 | pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<&SyntaxNode>> { | ||
198 | self.0.preorder().map(|event| match event { | ||
199 | WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode::from_repr(n)), | ||
200 | WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxNode::from_repr(n)), | ||
201 | }) | ||
202 | } | ||
203 | |||
188 | pub fn memory_size_of_subtree(&self) -> usize { | 204 | pub fn memory_size_of_subtree(&self) -> usize { |
189 | self.0.memory_size_of_subtree() | 205 | self.0.memory_size_of_subtree() |
190 | } | 206 | } |
191 | } | ||
192 | 207 | ||
193 | impl fmt::Debug for SyntaxNode { | 208 | pub fn debug_dump(&self) -> String { |
194 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | 209 | let mut errors: Vec<_> = match self.ancestors().find_map(SourceFile::cast) { |
195 | write!(fmt, "{:?}@{:?}", self.kind(), self.range())?; | 210 | Some(file) => file.errors(), |
196 | if has_short_text(self.kind()) { | 211 | None => self.root_data().to_vec(), |
197 | write!(fmt, " \"{}\"", self.text())?; | 212 | }; |
213 | errors.sort_by_key(|e| e.offset()); | ||
214 | let mut err_pos = 0; | ||
215 | let mut level = 0; | ||
216 | let mut buf = String::new(); | ||
217 | macro_rules! indent { | ||
218 | () => { | ||
219 | for _ in 0..level { | ||
220 | buf.push_str(" "); | ||
221 | } | ||
222 | }; | ||
198 | } | 223 | } |
199 | Ok(()) | 224 | |
225 | for event in self.preorder() { | ||
226 | match event { | ||
227 | WalkEvent::Enter(node) => { | ||
228 | indent!(); | ||
229 | writeln!(buf, "{:?}", node).unwrap(); | ||
230 | if node.first_child().is_none() { | ||
231 | let off = node.range().end(); | ||
232 | while err_pos < errors.len() && errors[err_pos].offset() <= off { | ||
233 | indent!(); | ||
234 | writeln!(buf, "err: `{}`", errors[err_pos]).unwrap(); | ||
235 | err_pos += 1; | ||
236 | } | ||
237 | } | ||
238 | level += 1; | ||
239 | } | ||
240 | WalkEvent::Leave(_) => level -= 1, | ||
241 | } | ||
242 | } | ||
243 | |||
244 | assert_eq!(level, 0); | ||
245 | for err in errors[err_pos..].iter() { | ||
246 | writeln!(buf, "err: `{}`", err).unwrap(); | ||
247 | } | ||
248 | |||
249 | buf | ||
250 | } | ||
251 | |||
252 | pub(crate) fn root_data(&self) -> &Vec<SyntaxError> { | ||
253 | self.0.root_data() | ||
254 | } | ||
255 | |||
256 | pub(crate) fn replace_with(&self, replacement: GreenNode) -> GreenNode { | ||
257 | self.0.replace_self(replacement) | ||
200 | } | 258 | } |
201 | } | 259 | } |
202 | 260 | ||
diff --git a/crates/ra_syntax/src/utils.rs b/crates/ra_syntax/src/utils.rs deleted file mode 100644 index 2e1b42da0..000000000 --- a/crates/ra_syntax/src/utils.rs +++ /dev/null | |||
@@ -1,83 +0,0 @@ | |||
1 | use std::{str, fmt::Write}; | ||
2 | |||
3 | use crate::{SourceFile, SyntaxKind, WalkEvent, AstNode, SyntaxNode}; | ||
4 | |||
5 | /// Parse a file and create a string representation of the resulting parse tree. | ||
6 | pub fn dump_tree(syntax: &SyntaxNode) -> String { | ||
7 | let mut errors: Vec<_> = match syntax.ancestors().find_map(SourceFile::cast) { | ||
8 | Some(file) => file.errors(), | ||
9 | None => syntax.root_data().to_vec(), | ||
10 | }; | ||
11 | errors.sort_by_key(|e| e.offset()); | ||
12 | let mut err_pos = 0; | ||
13 | let mut level = 0; | ||
14 | let mut buf = String::new(); | ||
15 | macro_rules! indent { | ||
16 | () => { | ||
17 | for _ in 0..level { | ||
18 | buf.push_str(" "); | ||
19 | } | ||
20 | }; | ||
21 | } | ||
22 | |||
23 | for event in syntax.preorder() { | ||
24 | match event { | ||
25 | WalkEvent::Enter(node) => { | ||
26 | indent!(); | ||
27 | writeln!(buf, "{:?}", node).unwrap(); | ||
28 | if node.first_child().is_none() { | ||
29 | let off = node.range().end(); | ||
30 | while err_pos < errors.len() && errors[err_pos].offset() <= off { | ||
31 | indent!(); | ||
32 | writeln!(buf, "err: `{}`", errors[err_pos]).unwrap(); | ||
33 | err_pos += 1; | ||
34 | } | ||
35 | } | ||
36 | level += 1; | ||
37 | } | ||
38 | WalkEvent::Leave(_) => level -= 1, | ||
39 | } | ||
40 | } | ||
41 | |||
42 | assert_eq!(level, 0); | ||
43 | for err in errors[err_pos..].iter() { | ||
44 | writeln!(buf, "err: `{}`", err).unwrap(); | ||
45 | } | ||
46 | |||
47 | buf | ||
48 | } | ||
49 | |||
50 | pub fn check_fuzz_invariants(text: &str) { | ||
51 | let file = SourceFile::parse(text); | ||
52 | let root = file.syntax(); | ||
53 | validate_block_structure(root); | ||
54 | let _ = file.errors(); | ||
55 | } | ||
56 | |||
57 | pub(crate) fn validate_block_structure(root: &SyntaxNode) { | ||
58 | let mut stack = Vec::new(); | ||
59 | for node in root.descendants() { | ||
60 | match node.kind() { | ||
61 | SyntaxKind::L_CURLY => stack.push(node), | ||
62 | SyntaxKind::R_CURLY => { | ||
63 | if let Some(pair) = stack.pop() { | ||
64 | assert_eq!( | ||
65 | node.parent(), | ||
66 | pair.parent(), | ||
67 | "\nunpaired curleys:\n{}\n{}\n", | ||
68 | root.text(), | ||
69 | dump_tree(root), | ||
70 | ); | ||
71 | assert!( | ||
72 | node.next_sibling().is_none() && pair.prev_sibling().is_none(), | ||
73 | "\nfloating curlys at {:?}\nfile:\n{}\nerror:\n{}\n", | ||
74 | node, | ||
75 | root.text(), | ||
76 | node.text(), | ||
77 | ); | ||
78 | } | ||
79 | } | ||
80 | _ => (), | ||
81 | } | ||
82 | } | ||
83 | } | ||
diff --git a/crates/ra_syntax/src/validation.rs b/crates/ra_syntax/src/validation.rs index 69958f0d7..69f344d65 100644 --- a/crates/ra_syntax/src/validation.rs +++ b/crates/ra_syntax/src/validation.rs | |||
@@ -5,7 +5,8 @@ mod string; | |||
5 | mod block; | 5 | mod block; |
6 | 6 | ||
7 | use crate::{ | 7 | use crate::{ |
8 | SourceFile, SyntaxError, AstNode, | 8 | SourceFile, SyntaxError, AstNode, SyntaxNode, |
9 | SyntaxKind::{L_CURLY, R_CURLY}, | ||
9 | ast, | 10 | ast, |
10 | algo::visit::{visitor_ctx, VisitorCtx}, | 11 | algo::visit::{visitor_ctx, VisitorCtx}, |
11 | }; | 12 | }; |
@@ -14,12 +15,40 @@ pub(crate) fn validate(file: &SourceFile) -> Vec<SyntaxError> { | |||
14 | let mut errors = Vec::new(); | 15 | let mut errors = Vec::new(); |
15 | for node in file.syntax().descendants() { | 16 | for node in file.syntax().descendants() { |
16 | let _ = visitor_ctx(&mut errors) | 17 | let _ = visitor_ctx(&mut errors) |
17 | .visit::<ast::Byte, _>(self::byte::validate_byte_node) | 18 | .visit::<ast::Byte, _>(byte::validate_byte_node) |
18 | .visit::<ast::ByteString, _>(self::byte_string::validate_byte_string_node) | 19 | .visit::<ast::ByteString, _>(byte_string::validate_byte_string_node) |
19 | .visit::<ast::Char, _>(self::char::validate_char_node) | 20 | .visit::<ast::Char, _>(char::validate_char_node) |
20 | .visit::<ast::String, _>(self::string::validate_string_node) | 21 | .visit::<ast::String, _>(string::validate_string_node) |
21 | .visit::<ast::Block, _>(self::block::validate_block_node) | 22 | .visit::<ast::Block, _>(block::validate_block_node) |
22 | .accept(node); | 23 | .accept(node); |
23 | } | 24 | } |
24 | errors | 25 | errors |
25 | } | 26 | } |
27 | |||
28 | pub(crate) fn validate_block_structure(root: &SyntaxNode) { | ||
29 | let mut stack = Vec::new(); | ||
30 | for node in root.descendants() { | ||
31 | match node.kind() { | ||
32 | L_CURLY => stack.push(node), | ||
33 | R_CURLY => { | ||
34 | if let Some(pair) = stack.pop() { | ||
35 | assert_eq!( | ||
36 | node.parent(), | ||
37 | pair.parent(), | ||
38 | "\nunpaired curleys:\n{}\n{}\n", | ||
39 | root.text(), | ||
40 | root.debug_dump(), | ||
41 | ); | ||
42 | assert!( | ||
43 | node.next_sibling().is_none() && pair.prev_sibling().is_none(), | ||
44 | "\nfloating curlys at {:?}\nfile:\n{}\nerror:\n{}\n", | ||
45 | node, | ||
46 | root.text(), | ||
47 | node.text(), | ||
48 | ); | ||
49 | } | ||
50 | } | ||
51 | _ => (), | ||
52 | } | ||
53 | } | ||
54 | } | ||
diff --git a/crates/ra_syntax/tests/test.rs b/crates/ra_syntax/tests/test.rs index 168d0623d..458740c13 100644 --- a/crates/ra_syntax/tests/test.rs +++ b/crates/ra_syntax/tests/test.rs | |||
@@ -8,10 +8,7 @@ use std::{ | |||
8 | }; | 8 | }; |
9 | 9 | ||
10 | use test_utils::{project_dir, dir_tests, read_text, collect_tests}; | 10 | use test_utils::{project_dir, dir_tests, read_text, collect_tests}; |
11 | use ra_syntax::{ | 11 | use ra_syntax::{SourceFile, AstNode, check_fuzz_invariants}; |
12 | SourceFile, AstNode, | ||
13 | utils::{check_fuzz_invariants, dump_tree}, | ||
14 | }; | ||
15 | 12 | ||
16 | #[test] | 13 | #[test] |
17 | fn lexer_tests() { | 14 | fn lexer_tests() { |
@@ -32,7 +29,7 @@ fn parser_tests() { | |||
32 | "There should be no errors in the file {:?}", | 29 | "There should be no errors in the file {:?}", |
33 | path.display() | 30 | path.display() |
34 | ); | 31 | ); |
35 | dump_tree(file.syntax()) | 32 | file.syntax().debug_dump() |
36 | }); | 33 | }); |
37 | dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| { | 34 | dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| { |
38 | let file = SourceFile::parse(text); | 35 | let file = SourceFile::parse(text); |
@@ -43,7 +40,7 @@ fn parser_tests() { | |||
43 | "There should be errors in the file {:?}", | 40 | "There should be errors in the file {:?}", |
44 | path.display() | 41 | path.display() |
45 | ); | 42 | ); |
46 | dump_tree(file.syntax()) | 43 | file.syntax().debug_dump() |
47 | }); | 44 | }); |
48 | } | 45 | } |
49 | 46 | ||