diff options
70 files changed, 1291 insertions, 1045 deletions
diff --git a/Cargo.lock b/Cargo.lock index 7305d6dd5..5aa4ff5de 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -141,9 +141,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" | |||
141 | [[package]] | 141 | [[package]] |
142 | name = "chalk-engine" | 142 | name = "chalk-engine" |
143 | version = "0.9.0" | 143 | version = "0.9.0" |
144 | source = "git+https://github.com/rust-lang/chalk.git#b92c15327f1d336fec7573c7de323ab247cca386" | 144 | source = "git+https://github.com/flodiebold/chalk.git?branch=fuel#fba97af88ff2e0266db12490e2baf47d17e557e3" |
145 | dependencies = [ | 145 | dependencies = [ |
146 | "chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git)", | 146 | "chalk-macros 0.1.1 (git+https://github.com/flodiebold/chalk.git?branch=fuel)", |
147 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 147 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", |
148 | "stacker 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", | 148 | "stacker 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", |
149 | ] | 149 | ] |
@@ -151,17 +151,17 @@ dependencies = [ | |||
151 | [[package]] | 151 | [[package]] |
152 | name = "chalk-ir" | 152 | name = "chalk-ir" |
153 | version = "0.1.0" | 153 | version = "0.1.0" |
154 | source = "git+https://github.com/rust-lang/chalk.git#b92c15327f1d336fec7573c7de323ab247cca386" | 154 | source = "git+https://github.com/flodiebold/chalk.git?branch=fuel#fba97af88ff2e0266db12490e2baf47d17e557e3" |
155 | dependencies = [ | 155 | dependencies = [ |
156 | "chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git)", | 156 | "chalk-engine 0.9.0 (git+https://github.com/flodiebold/chalk.git?branch=fuel)", |
157 | "chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git)", | 157 | "chalk-macros 0.1.1 (git+https://github.com/flodiebold/chalk.git?branch=fuel)", |
158 | "lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)", | 158 | "lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)", |
159 | ] | 159 | ] |
160 | 160 | ||
161 | [[package]] | 161 | [[package]] |
162 | name = "chalk-macros" | 162 | name = "chalk-macros" |
163 | version = "0.1.1" | 163 | version = "0.1.1" |
164 | source = "git+https://github.com/rust-lang/chalk.git#b92c15327f1d336fec7573c7de323ab247cca386" | 164 | source = "git+https://github.com/flodiebold/chalk.git?branch=fuel#fba97af88ff2e0266db12490e2baf47d17e557e3" |
165 | dependencies = [ | 165 | dependencies = [ |
166 | "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", | 166 | "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", |
167 | ] | 167 | ] |
@@ -169,22 +169,22 @@ dependencies = [ | |||
169 | [[package]] | 169 | [[package]] |
170 | name = "chalk-rust-ir" | 170 | name = "chalk-rust-ir" |
171 | version = "0.1.0" | 171 | version = "0.1.0" |
172 | source = "git+https://github.com/rust-lang/chalk.git#b92c15327f1d336fec7573c7de323ab247cca386" | 172 | source = "git+https://github.com/flodiebold/chalk.git?branch=fuel#fba97af88ff2e0266db12490e2baf47d17e557e3" |
173 | dependencies = [ | 173 | dependencies = [ |
174 | "chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git)", | 174 | "chalk-engine 0.9.0 (git+https://github.com/flodiebold/chalk.git?branch=fuel)", |
175 | "chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git)", | 175 | "chalk-ir 0.1.0 (git+https://github.com/flodiebold/chalk.git?branch=fuel)", |
176 | "chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git)", | 176 | "chalk-macros 0.1.1 (git+https://github.com/flodiebold/chalk.git?branch=fuel)", |
177 | ] | 177 | ] |
178 | 178 | ||
179 | [[package]] | 179 | [[package]] |
180 | name = "chalk-solve" | 180 | name = "chalk-solve" |
181 | version = "0.1.0" | 181 | version = "0.1.0" |
182 | source = "git+https://github.com/rust-lang/chalk.git#b92c15327f1d336fec7573c7de323ab247cca386" | 182 | source = "git+https://github.com/flodiebold/chalk.git?branch=fuel#fba97af88ff2e0266db12490e2baf47d17e557e3" |
183 | dependencies = [ | 183 | dependencies = [ |
184 | "chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git)", | 184 | "chalk-engine 0.9.0 (git+https://github.com/flodiebold/chalk.git?branch=fuel)", |
185 | "chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git)", | 185 | "chalk-ir 0.1.0 (git+https://github.com/flodiebold/chalk.git?branch=fuel)", |
186 | "chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git)", | 186 | "chalk-macros 0.1.1 (git+https://github.com/flodiebold/chalk.git?branch=fuel)", |
187 | "chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git)", | 187 | "chalk-rust-ir 0.1.0 (git+https://github.com/flodiebold/chalk.git?branch=fuel)", |
188 | "derive-new 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", | 188 | "derive-new 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", |
189 | "ena 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", | 189 | "ena 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", |
190 | "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", | 190 | "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", |
@@ -257,7 +257,7 @@ dependencies = [ | |||
257 | "encode_unicode 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", | 257 | "encode_unicode 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", |
258 | "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", | 258 | "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", |
259 | "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", | 259 | "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", |
260 | "parking_lot 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", | 260 | "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", |
261 | "regex 1.1.6 (registry+https://github.com/rust-lang/crates.io-index)", | 261 | "regex 1.1.6 (registry+https://github.com/rust-lang/crates.io-index)", |
262 | "termios 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | 262 | "termios 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", |
263 | "unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", | 263 | "unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", |
@@ -487,7 +487,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" | |||
487 | 487 | ||
488 | [[package]] | 488 | [[package]] |
489 | name = "gen_lsp_server" | 489 | name = "gen_lsp_server" |
490 | version = "0.1.0" | 490 | version = "0.2.0" |
491 | dependencies = [ | 491 | dependencies = [ |
492 | "crossbeam-channel 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", | 492 | "crossbeam-channel 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", |
493 | "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", | 493 | "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", |
@@ -551,7 +551,7 @@ dependencies = [ | |||
551 | "console 0.7.5 (registry+https://github.com/rust-lang/crates.io-index)", | 551 | "console 0.7.5 (registry+https://github.com/rust-lang/crates.io-index)", |
552 | "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", | 552 | "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", |
553 | "number_prefix 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", | 553 | "number_prefix 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", |
554 | "parking_lot 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", | 554 | "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", |
555 | "regex 1.1.6 (registry+https://github.com/rust-lang/crates.io-index)", | 555 | "regex 1.1.6 (registry+https://github.com/rust-lang/crates.io-index)", |
556 | ] | 556 | ] |
557 | 557 | ||
@@ -575,7 +575,7 @@ dependencies = [ | |||
575 | 575 | ||
576 | [[package]] | 576 | [[package]] |
577 | name = "insta" | 577 | name = "insta" |
578 | version = "0.7.4" | 578 | version = "0.8.1" |
579 | source = "registry+https://github.com/rust-lang/crates.io-index" | 579 | source = "registry+https://github.com/rust-lang/crates.io-index" |
580 | dependencies = [ | 580 | dependencies = [ |
581 | "chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", | 581 | "chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", |
@@ -1109,12 +1109,12 @@ name = "ra_hir" | |||
1109 | version = "0.1.0" | 1109 | version = "0.1.0" |
1110 | dependencies = [ | 1110 | dependencies = [ |
1111 | "arrayvec 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", | 1111 | "arrayvec 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", |
1112 | "chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git)", | 1112 | "chalk-ir 0.1.0 (git+https://github.com/flodiebold/chalk.git?branch=fuel)", |
1113 | "chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git)", | 1113 | "chalk-rust-ir 0.1.0 (git+https://github.com/flodiebold/chalk.git?branch=fuel)", |
1114 | "chalk-solve 0.1.0 (git+https://github.com/rust-lang/chalk.git)", | 1114 | "chalk-solve 0.1.0 (git+https://github.com/flodiebold/chalk.git?branch=fuel)", |
1115 | "ena 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1115 | "ena 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", |
1116 | "flexi_logger 0.11.4 (registry+https://github.com/rust-lang/crates.io-index)", | 1116 | "flexi_logger 0.11.4 (registry+https://github.com/rust-lang/crates.io-index)", |
1117 | "insta 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", | 1117 | "insta 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", |
1118 | "join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1118 | "join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", |
1119 | "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", | 1119 | "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", |
1120 | "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1120 | "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", |
@@ -1134,7 +1134,7 @@ name = "ra_ide_api" | |||
1134 | version = "0.1.0" | 1134 | version = "0.1.0" |
1135 | dependencies = [ | 1135 | dependencies = [ |
1136 | "fst 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1136 | "fst 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", |
1137 | "insta 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", | 1137 | "insta 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", |
1138 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1138 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", |
1139 | "jemalloc-ctl 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1139 | "jemalloc-ctl 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", |
1140 | "jemallocator 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", | 1140 | "jemallocator 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", |
@@ -1165,7 +1165,7 @@ dependencies = [ | |||
1165 | "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", | 1165 | "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", |
1166 | "failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", | 1166 | "failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", |
1167 | "flexi_logger 0.11.4 (registry+https://github.com/rust-lang/crates.io-index)", | 1167 | "flexi_logger 0.11.4 (registry+https://github.com/rust-lang/crates.io-index)", |
1168 | "gen_lsp_server 0.1.0", | 1168 | "gen_lsp_server 0.2.0", |
1169 | "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", | 1169 | "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", |
1170 | "lsp-types 0.57.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1170 | "lsp-types 0.57.1 (registry+https://github.com/rust-lang/crates.io-index)", |
1171 | "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1171 | "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", |
@@ -1211,6 +1211,7 @@ dependencies = [ | |||
1211 | name = "ra_prof" | 1211 | name = "ra_prof" |
1212 | version = "0.1.0" | 1212 | version = "0.1.0" |
1213 | dependencies = [ | 1213 | dependencies = [ |
1214 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
1214 | "once_cell 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1215 | "once_cell 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", |
1215 | ] | 1216 | ] |
1216 | 1217 | ||
@@ -1242,9 +1243,9 @@ dependencies = [ | |||
1242 | "ra_parser 0.1.0", | 1243 | "ra_parser 0.1.0", |
1243 | "ra_text_edit 0.1.0", | 1244 | "ra_text_edit 0.1.0", |
1244 | "rowan 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1245 | "rowan 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", |
1245 | "smol_str 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", | 1246 | "smol_str 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", |
1246 | "test_utils 0.1.0", | 1247 | "test_utils 0.1.0", |
1247 | "text_unit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", | 1248 | "text_unit 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", |
1248 | "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1249 | "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", |
1249 | "walkdir 2.2.7 (registry+https://github.com/rust-lang/crates.io-index)", | 1250 | "walkdir 2.2.7 (registry+https://github.com/rust-lang/crates.io-index)", |
1250 | ] | 1251 | ] |
@@ -1255,14 +1256,14 @@ version = "0.1.0" | |||
1255 | dependencies = [ | 1256 | dependencies = [ |
1256 | "proptest 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1257 | "proptest 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)", |
1257 | "test_utils 0.1.0", | 1258 | "test_utils 0.1.0", |
1258 | "text_unit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", | 1259 | "text_unit 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", |
1259 | ] | 1260 | ] |
1260 | 1261 | ||
1261 | [[package]] | 1262 | [[package]] |
1262 | name = "ra_tt" | 1263 | name = "ra_tt" |
1263 | version = "0.1.0" | 1264 | version = "0.1.0" |
1264 | dependencies = [ | 1265 | dependencies = [ |
1265 | "smol_str 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", | 1266 | "smol_str 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", |
1266 | ] | 1267 | ] |
1267 | 1268 | ||
1268 | [[package]] | 1269 | [[package]] |
@@ -1467,8 +1468,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" | |||
1467 | dependencies = [ | 1468 | dependencies = [ |
1468 | "colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1469 | "colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", |
1469 | "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1470 | "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", |
1470 | "smol_str 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", | 1471 | "smol_str 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", |
1471 | "text_unit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", | 1472 | "text_unit 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", |
1472 | ] | 1473 | ] |
1473 | 1474 | ||
1474 | [[package]] | 1475 | [[package]] |
@@ -1625,7 +1626,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" | |||
1625 | 1626 | ||
1626 | [[package]] | 1627 | [[package]] |
1627 | name = "smol_str" | 1628 | name = "smol_str" |
1628 | version = "0.1.10" | 1629 | version = "0.1.11" |
1629 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1630 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1630 | dependencies = [ | 1631 | dependencies = [ |
1631 | "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", | 1632 | "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", |
@@ -1748,12 +1749,12 @@ dependencies = [ | |||
1748 | "difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1749 | "difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", |
1749 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1750 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", |
1750 | "serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)", | 1751 | "serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)", |
1751 | "text_unit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", | 1752 | "text_unit 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", |
1752 | ] | 1753 | ] |
1753 | 1754 | ||
1754 | [[package]] | 1755 | [[package]] |
1755 | name = "text_unit" | 1756 | name = "text_unit" |
1756 | version = "0.1.6" | 1757 | version = "0.1.8" |
1757 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1758 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1758 | dependencies = [ | 1759 | dependencies = [ |
1759 | "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", | 1760 | "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", |
@@ -2036,11 +2037,11 @@ dependencies = [ | |||
2036 | "checksum cargo_metadata 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "178d62b240c34223f265a4c1e275e37d62da163d421fc8d7f7e3ee340f803c57" | 2037 | "checksum cargo_metadata 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "178d62b240c34223f265a4c1e275e37d62da163d421fc8d7f7e3ee340f803c57" |
2037 | "checksum cc 1.0.36 (registry+https://github.com/rust-lang/crates.io-index)" = "a0c56216487bb80eec9c4516337b2588a4f2a2290d72a1416d930e4dcdb0c90d" | 2038 | "checksum cc 1.0.36 (registry+https://github.com/rust-lang/crates.io-index)" = "a0c56216487bb80eec9c4516337b2588a4f2a2290d72a1416d930e4dcdb0c90d" |
2038 | "checksum cfg-if 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "11d43355396e872eefb45ce6342e4374ed7bc2b3a502d1b28e36d6e23c05d1f4" | 2039 | "checksum cfg-if 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "11d43355396e872eefb45ce6342e4374ed7bc2b3a502d1b28e36d6e23c05d1f4" |
2039 | "checksum chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git)" = "<none>" | 2040 | "checksum chalk-engine 0.9.0 (git+https://github.com/flodiebold/chalk.git?branch=fuel)" = "<none>" |
2040 | "checksum chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git)" = "<none>" | 2041 | "checksum chalk-ir 0.1.0 (git+https://github.com/flodiebold/chalk.git?branch=fuel)" = "<none>" |
2041 | "checksum chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git)" = "<none>" | 2042 | "checksum chalk-macros 0.1.1 (git+https://github.com/flodiebold/chalk.git?branch=fuel)" = "<none>" |
2042 | "checksum chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git)" = "<none>" | 2043 | "checksum chalk-rust-ir 0.1.0 (git+https://github.com/flodiebold/chalk.git?branch=fuel)" = "<none>" |
2043 | "checksum chalk-solve 0.1.0 (git+https://github.com/rust-lang/chalk.git)" = "<none>" | 2044 | "checksum chalk-solve 0.1.0 (git+https://github.com/flodiebold/chalk.git?branch=fuel)" = "<none>" |
2044 | "checksum chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "45912881121cb26fad7c38c17ba7daa18764771836b34fab7d3fbd93ed633878" | 2045 | "checksum chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "45912881121cb26fad7c38c17ba7daa18764771836b34fab7d3fbd93ed633878" |
2045 | "checksum ci_info 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e5e881307a989a3a5e20d52a32cc05950e3c2178cccfcc9428271a6cde09f902" | 2046 | "checksum ci_info 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e5e881307a989a3a5e20d52a32cc05950e3c2178cccfcc9428271a6cde09f902" |
2046 | "checksum clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9" | 2047 | "checksum clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9" |
@@ -2087,7 +2088,7 @@ dependencies = [ | |||
2087 | "checksum indicatif 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2c60da1c9abea75996b70a931bba6c750730399005b61ccd853cee50ef3d0d0c" | 2088 | "checksum indicatif 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2c60da1c9abea75996b70a931bba6c750730399005b61ccd853cee50ef3d0d0c" |
2088 | "checksum inotify 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "40b54539f3910d6f84fbf9a643efd6e3aa6e4f001426c0329576128255994718" | 2089 | "checksum inotify 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "40b54539f3910d6f84fbf9a643efd6e3aa6e4f001426c0329576128255994718" |
2089 | "checksum inotify-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e74a1aa87c59aeff6ef2cc2fa62d41bc43f54952f55652656b18a02fd5e356c0" | 2090 | "checksum inotify-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e74a1aa87c59aeff6ef2cc2fa62d41bc43f54952f55652656b18a02fd5e356c0" |
2090 | "checksum insta 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "03e7d88a87d342ce8bd698516151be43e6eb2e84b683db528696cb4a382f734a" | 2091 | "checksum insta 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8bbbb69ec4557c37b2bf4d525d106d828e0c2fbd6c44bc98cd3798da13c73b9f" |
2091 | "checksum iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe6e417e7d0975db6512b90796e8ce223145ac4e33c377e4a42882a0e88bb08" | 2092 | "checksum iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe6e417e7d0975db6512b90796e8ce223145ac4e33c377e4a42882a0e88bb08" |
2092 | "checksum itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)" = "0d47946d458e94a1b7bcabbf6521ea7c037062c81f534615abcad76e84d4970d" | 2093 | "checksum itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)" = "0d47946d458e94a1b7bcabbf6521ea7c037062c81f534615abcad76e84d4970d" |
2093 | "checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358" | 2094 | "checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358" |
@@ -2181,7 +2182,7 @@ dependencies = [ | |||
2181 | "checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" | 2182 | "checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" |
2182 | "checksum slug 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b3bc762e6a4b6c6fcaade73e77f9ebc6991b676f88bb2358bddb56560f073373" | 2183 | "checksum slug 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b3bc762e6a4b6c6fcaade73e77f9ebc6991b676f88bb2358bddb56560f073373" |
2183 | "checksum smallvec 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c4488ae950c49d403731982257768f48fada354a5203fe81f9bb6f43ca9002be" | 2184 | "checksum smallvec 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c4488ae950c49d403731982257768f48fada354a5203fe81f9bb6f43ca9002be" |
2184 | "checksum smol_str 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "d077b3367211e9c6e2e012fb804c444e0d80ab5a51ae4137739b58e6446dcaef" | 2185 | "checksum smol_str 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "e6507d018aa8dfcaa08aaab587605591cd2109df66a921486a2220e2daf9fa29" |
2185 | "checksum stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dba1a27d3efae4351c8051072d619e3ade2820635c3958d826bfea39d59b54c8" | 2186 | "checksum stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dba1a27d3efae4351c8051072d619e3ade2820635c3958d826bfea39d59b54c8" |
2186 | "checksum stacker 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "fb79482f57cf598af52094ec4cc3b3c42499d3ce5bd426f2ac41515b7e57404b" | 2187 | "checksum stacker 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "fb79482f57cf598af52094ec4cc3b3c42499d3ce5bd426f2ac41515b7e57404b" |
2187 | "checksum strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" | 2188 | "checksum strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" |
@@ -2193,7 +2194,7 @@ dependencies = [ | |||
2193 | "checksum teraron 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0d89ad4617d1dec55331067fadaa041e813479e1779616f3d3ce9308bf46184e" | 2194 | "checksum teraron 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0d89ad4617d1dec55331067fadaa041e813479e1779616f3d3ce9308bf46184e" |
2194 | "checksum termion 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dde0593aeb8d47accea5392b39350015b5eccb12c0d98044d856983d89548dea" | 2195 | "checksum termion 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dde0593aeb8d47accea5392b39350015b5eccb12c0d98044d856983d89548dea" |
2195 | "checksum termios 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "72b620c5ea021d75a735c943269bb07d30c9b77d6ac6b236bc8b5c496ef05625" | 2196 | "checksum termios 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "72b620c5ea021d75a735c943269bb07d30c9b77d6ac6b236bc8b5c496ef05625" |
2196 | "checksum text_unit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "158bb1c22b638b1da3c95a8ad9f061ea40d4d39fd0301be3a520f92efeeb189e" | 2197 | "checksum text_unit 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "019e5bc4c698b63073968610964c8c0869b60455d8f2e303a0ee7ad2e4f6ade4" |
2197 | "checksum textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" | 2198 | "checksum textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" |
2198 | "checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b" | 2199 | "checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b" |
2199 | "checksum threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e2f0c90a5f3459330ac8bc0d2f879c693bb7a2f59689c1083fc4ef83834da865" | 2200 | "checksum threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e2f0c90a5f3459330ac8bc0d2f879c693bb7a2f59689c1083fc4ef83834da865" |
@@ -11,7 +11,8 @@ https://github.com/rust-lang/compiler-team/tree/master/working-groups/rls-2.0 | |||
11 | 11 | ||
12 | Work on the Rust Analyzer is sponsored by | 12 | Work on the Rust Analyzer is sponsored by |
13 | 13 | ||
14 | [](https://ferrous-systems.com/) | 14 | [<img src="https://user-images.githubusercontent.com/1711539/58105231-cf306900-7bee-11e9-83d8-9f1102e59d29.png" alt="Ferrous Systems" width="300">](https://ferrous-systems.com/) |
15 | - [Mozilla](https://www.mozilla.org/en-US/) | ||
15 | 16 | ||
16 | ## Language Server Quick Start | 17 | ## Language Server Quick Start |
17 | 18 | ||
diff --git a/crates/gen_lsp_server/Cargo.toml b/crates/gen_lsp_server/Cargo.toml index 34343e2f2..ba8bfdbd3 100644 --- a/crates/gen_lsp_server/Cargo.toml +++ b/crates/gen_lsp_server/Cargo.toml | |||
@@ -1,7 +1,7 @@ | |||
1 | [package] | 1 | [package] |
2 | edition = "2018" | 2 | edition = "2018" |
3 | name = "gen_lsp_server" | 3 | name = "gen_lsp_server" |
4 | version = "0.1.0" | 4 | version = "0.2.0" |
5 | authors = ["rust-analyzer developers"] | 5 | authors = ["rust-analyzer developers"] |
6 | repository = "https://github.com/rust-analyzer/rust-analyzer" | 6 | repository = "https://github.com/rust-analyzer/rust-analyzer" |
7 | license = "MIT OR Apache-2.0" | 7 | license = "MIT OR Apache-2.0" |
diff --git a/crates/ra_assists/src/add_explicit_type.rs b/crates/ra_assists/src/add_explicit_type.rs index cb0ac9885..f3ed74b7f 100644 --- a/crates/ra_assists/src/add_explicit_type.rs +++ b/crates/ra_assists/src/add_explicit_type.rs | |||
@@ -3,7 +3,7 @@ use hir::{ | |||
3 | db::HirDatabase, | 3 | db::HirDatabase, |
4 | }; | 4 | }; |
5 | use ra_syntax::{ | 5 | use ra_syntax::{ |
6 | SyntaxKind, | 6 | T, |
7 | ast::{LetStmt, PatKind, NameOwner, AstNode} | 7 | ast::{LetStmt, PatKind, NameOwner, AstNode} |
8 | }; | 8 | }; |
9 | 9 | ||
@@ -24,7 +24,7 @@ pub(crate) fn add_explicit_type(mut ctx: AssistCtx<impl HirDatabase>) -> Option< | |||
24 | let name = pat.name()?; | 24 | let name = pat.name()?; |
25 | let name_range = name.syntax().range(); | 25 | let name_range = name.syntax().range(); |
26 | // Assist not applicable if the type has already been specified | 26 | // Assist not applicable if the type has already been specified |
27 | if stmt.syntax().children_with_tokens().any(|child| child.kind() == SyntaxKind::COLON) { | 27 | if stmt.syntax().children_with_tokens().any(|child| child.kind() == T![:]) { |
28 | return None; | 28 | return None; |
29 | } | 29 | } |
30 | // Infer type | 30 | // Infer type |
diff --git a/crates/ra_assists/src/ast_editor.rs b/crates/ra_assists/src/ast_editor.rs index aa7aeaabb..9afcac01a 100644 --- a/crates/ra_assists/src/ast_editor.rs +++ b/crates/ra_assists/src/ast_editor.rs | |||
@@ -2,7 +2,7 @@ use std::{iter, ops::RangeInclusive}; | |||
2 | 2 | ||
3 | use arrayvec::ArrayVec; | 3 | use arrayvec::ArrayVec; |
4 | use ra_text_edit::TextEditBuilder; | 4 | use ra_text_edit::TextEditBuilder; |
5 | use ra_syntax::{AstNode, TreeArc, ast, SyntaxKind::*, SyntaxElement, SourceFile, InsertPosition, Direction}; | 5 | use ra_syntax::{AstNode, TreeArc, ast, SyntaxKind::*, SyntaxElement, SourceFile, InsertPosition, Direction, T}; |
6 | use ra_fmt::leading_indent; | 6 | use ra_fmt::leading_indent; |
7 | use hir::Name; | 7 | use hir::Name; |
8 | 8 | ||
@@ -49,7 +49,7 @@ impl<N: AstNode> AstEditor<N> { | |||
49 | 49 | ||
50 | fn do_make_multiline(&mut self) { | 50 | fn do_make_multiline(&mut self) { |
51 | let l_curly = | 51 | let l_curly = |
52 | match self.ast().syntax().children_with_tokens().find(|it| it.kind() == L_CURLY) { | 52 | match self.ast().syntax().children_with_tokens().find(|it| it.kind() == T!['{']) { |
53 | Some(it) => it, | 53 | Some(it) => it, |
54 | None => return, | 54 | None => return, |
55 | }; | 55 | }; |
@@ -124,7 +124,7 @@ impl AstEditor<ast::NamedFieldList> { | |||
124 | if let Some(comma) = $anchor | 124 | if let Some(comma) = $anchor |
125 | .syntax() | 125 | .syntax() |
126 | .siblings_with_tokens(Direction::Next) | 126 | .siblings_with_tokens(Direction::Next) |
127 | .find(|it| it.kind() == COMMA) | 127 | .find(|it| it.kind() == T![,]) |
128 | { | 128 | { |
129 | InsertPosition::After(comma) | 129 | InsertPosition::After(comma) |
130 | } else { | 130 | } else { |
@@ -154,7 +154,7 @@ impl AstEditor<ast::NamedFieldList> { | |||
154 | } | 154 | } |
155 | 155 | ||
156 | fn l_curly(&self) -> Option<SyntaxElement> { | 156 | fn l_curly(&self) -> Option<SyntaxElement> { |
157 | self.ast().syntax().children_with_tokens().find(|it| it.kind() == L_CURLY) | 157 | self.ast().syntax().children_with_tokens().find(|it| it.kind() == T!['{']) |
158 | } | 158 | } |
159 | } | 159 | } |
160 | 160 | ||
@@ -188,7 +188,7 @@ impl AstEditor<ast::ItemList> { | |||
188 | } | 188 | } |
189 | 189 | ||
190 | fn l_curly(&self) -> Option<SyntaxElement> { | 190 | fn l_curly(&self) -> Option<SyntaxElement> { |
191 | self.ast().syntax().children_with_tokens().find(|it| it.kind() == L_CURLY) | 191 | self.ast().syntax().children_with_tokens().find(|it| it.kind() == T!['{']) |
192 | } | 192 | } |
193 | } | 193 | } |
194 | 194 | ||
@@ -290,7 +290,7 @@ fn ast_node_from_file_text<N: AstNode>(text: &str) -> TreeArc<N> { | |||
290 | 290 | ||
291 | mod tokens { | 291 | mod tokens { |
292 | use once_cell::sync::Lazy; | 292 | use once_cell::sync::Lazy; |
293 | use ra_syntax::{AstNode, SourceFile, TreeArc, SyntaxToken, SyntaxKind::*}; | 293 | use ra_syntax::{AstNode, SourceFile, TreeArc, SyntaxToken, SyntaxKind::*, T}; |
294 | 294 | ||
295 | static SOURCE_FILE: Lazy<TreeArc<SourceFile>> = Lazy::new(|| SourceFile::parse(",\n; ;")); | 295 | static SOURCE_FILE: Lazy<TreeArc<SourceFile>> = Lazy::new(|| SourceFile::parse(",\n; ;")); |
296 | 296 | ||
@@ -299,7 +299,7 @@ mod tokens { | |||
299 | .syntax() | 299 | .syntax() |
300 | .descendants_with_tokens() | 300 | .descendants_with_tokens() |
301 | .filter_map(|it| it.as_token()) | 301 | .filter_map(|it| it.as_token()) |
302 | .find(|it| it.kind() == COMMA) | 302 | .find(|it| it.kind() == T![,]) |
303 | .unwrap() | 303 | .unwrap() |
304 | } | 304 | } |
305 | 305 | ||
diff --git a/crates/ra_assists/src/auto_import.rs b/crates/ra_assists/src/auto_import.rs index 7c856c19b..1566cf179 100644 --- a/crates/ra_assists/src/auto_import.rs +++ b/crates/ra_assists/src/auto_import.rs | |||
@@ -2,8 +2,9 @@ use ra_text_edit::TextEditBuilder; | |||
2 | use hir::{ self, db::HirDatabase}; | 2 | use hir::{ self, db::HirDatabase}; |
3 | 3 | ||
4 | use ra_syntax::{ | 4 | use ra_syntax::{ |
5 | T, | ||
5 | ast::{ self, NameOwner }, AstNode, SyntaxNode, Direction, TextRange, SmolStr, | 6 | ast::{ self, NameOwner }, AstNode, SyntaxNode, Direction, TextRange, SmolStr, |
6 | SyntaxKind::{ PATH, PATH_SEGMENT, COLONCOLON, COMMA } | 7 | SyntaxKind::{ PATH, PATH_SEGMENT } |
7 | }; | 8 | }; |
8 | use crate::{ | 9 | use crate::{ |
9 | AssistId, | 10 | AssistId, |
@@ -23,7 +24,7 @@ fn collect_path_segments_raw<'a>( | |||
23 | children.next().map(|n| (n, n.kind())), | 24 | children.next().map(|n| (n, n.kind())), |
24 | ); | 25 | ); |
25 | match (first, second, third) { | 26 | match (first, second, third) { |
26 | (Some((subpath, PATH)), Some((_, COLONCOLON)), Some((segment, PATH_SEGMENT))) => { | 27 | (Some((subpath, PATH)), Some((_, T![::])), Some((segment, PATH_SEGMENT))) => { |
27 | path = ast::Path::cast(subpath.as_node()?)?; | 28 | path = ast::Path::cast(subpath.as_node()?)?; |
28 | segments.push(ast::PathSegment::cast(segment.as_node()?)?); | 29 | segments.push(ast::PathSegment::cast(segment.as_node()?)?); |
29 | } | 30 | } |
@@ -421,7 +422,7 @@ fn make_assist_add_in_tree_list( | |||
421 | let last = tree_list.use_trees().last(); | 422 | let last = tree_list.use_trees().last(); |
422 | if let Some(last) = last { | 423 | if let Some(last) = last { |
423 | let mut buf = String::new(); | 424 | let mut buf = String::new(); |
424 | let comma = last.syntax().siblings(Direction::Next).find(|n| n.kind() == COMMA); | 425 | let comma = last.syntax().siblings(Direction::Next).find(|n| n.kind() == T![,]); |
425 | let offset = if let Some(comma) = comma { | 426 | let offset = if let Some(comma) = comma { |
426 | comma.range().end() | 427 | comma.range().end() |
427 | } else { | 428 | } else { |
diff --git a/crates/ra_assists/src/change_visibility.rs b/crates/ra_assists/src/change_visibility.rs index c63470726..620f534b5 100644 --- a/crates/ra_assists/src/change_visibility.rs +++ b/crates/ra_assists/src/change_visibility.rs | |||
@@ -1,8 +1,9 @@ | |||
1 | use hir::db::HirDatabase; | 1 | use hir::db::HirDatabase; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | T, | ||
3 | AstNode, SyntaxNode, TextUnit, | 4 | AstNode, SyntaxNode, TextUnit, |
4 | ast::{self, VisibilityOwner, NameOwner}, | 5 | ast::{self, VisibilityOwner, NameOwner}, |
5 | SyntaxKind::{VISIBILITY, FN_KW, MOD_KW, STRUCT_KW, ENUM_KW, TRAIT_KW, FN_DEF, MODULE, STRUCT_DEF, ENUM_DEF, TRAIT_DEF, IDENT, WHITESPACE, COMMENT, ATTR}, | 6 | SyntaxKind::{VISIBILITY, FN_DEF, MODULE, STRUCT_DEF, ENUM_DEF, TRAIT_DEF, IDENT, WHITESPACE, COMMENT, ATTR}, |
6 | }; | 7 | }; |
7 | 8 | ||
8 | use crate::{AssistCtx, Assist, AssistId}; | 9 | use crate::{AssistCtx, Assist, AssistId}; |
@@ -16,7 +17,7 @@ pub(crate) fn change_visibility(ctx: AssistCtx<impl HirDatabase>) -> Option<Assi | |||
16 | 17 | ||
17 | fn add_vis(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 18 | fn add_vis(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
18 | let item_keyword = ctx.token_at_offset().find(|leaf| match leaf.kind() { | 19 | let item_keyword = ctx.token_at_offset().find(|leaf| match leaf.kind() { |
19 | FN_KW | MOD_KW | STRUCT_KW | ENUM_KW | TRAIT_KW => true, | 20 | T![fn] | T![mod] | T![struct] | T![enum] | T![trait] => true, |
20 | _ => false, | 21 | _ => false, |
21 | }); | 22 | }); |
22 | 23 | ||
diff --git a/crates/ra_assists/src/flip_comma.rs b/crates/ra_assists/src/flip_comma.rs index a9b108111..7626ffad3 100644 --- a/crates/ra_assists/src/flip_comma.rs +++ b/crates/ra_assists/src/flip_comma.rs | |||
@@ -1,14 +1,14 @@ | |||
1 | use hir::db::HirDatabase; | 1 | use hir::db::HirDatabase; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | T, | ||
3 | Direction, | 4 | Direction, |
4 | SyntaxKind::COMMA, | ||
5 | algo::non_trivia_sibling, | 5 | algo::non_trivia_sibling, |
6 | }; | 6 | }; |
7 | 7 | ||
8 | use crate::{AssistCtx, Assist, AssistId}; | 8 | use crate::{AssistCtx, Assist, AssistId}; |
9 | 9 | ||
10 | pub(crate) fn flip_comma(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 10 | pub(crate) fn flip_comma(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
11 | let comma = ctx.token_at_offset().find(|leaf| leaf.kind() == COMMA)?; | 11 | let comma = ctx.token_at_offset().find(|leaf| leaf.kind() == T![,])?; |
12 | let prev = non_trivia_sibling(comma.into(), Direction::Prev)?; | 12 | let prev = non_trivia_sibling(comma.into(), Direction::Prev)?; |
13 | let next = non_trivia_sibling(comma.into(), Direction::Next)?; | 13 | let next = non_trivia_sibling(comma.into(), Direction::Next)?; |
14 | ctx.add_action(AssistId("flip_comma"), "flip comma", |edit| { | 14 | ctx.add_action(AssistId("flip_comma"), "flip comma", |edit| { |
diff --git a/crates/ra_assists/src/remove_dbg.rs b/crates/ra_assists/src/remove_dbg.rs index ae9958f11..6e900f8ef 100644 --- a/crates/ra_assists/src/remove_dbg.rs +++ b/crates/ra_assists/src/remove_dbg.rs | |||
@@ -2,9 +2,7 @@ use hir::db::HirDatabase; | |||
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | ast::{self, AstNode}, | 3 | ast::{self, AstNode}, |
4 | TextUnit, | 4 | TextUnit, |
5 | SyntaxKind::{ | 5 | T |
6 | L_PAREN, R_PAREN, L_CURLY, R_CURLY, L_BRACK, R_BRACK, EXCL | ||
7 | }, | ||
8 | }; | 6 | }; |
9 | use crate::{AssistCtx, Assist, AssistId}; | 7 | use crate::{AssistCtx, Assist, AssistId}; |
10 | 8 | ||
@@ -64,7 +62,7 @@ fn is_valid_macrocall(macro_call: &ast::MacroCall, macro_name: &str) -> Option<b | |||
64 | // Make sure it is actually a dbg-macro call, dbg followed by ! | 62 | // Make sure it is actually a dbg-macro call, dbg followed by ! |
65 | let excl = path.syntax().next_sibling_or_token()?; | 63 | let excl = path.syntax().next_sibling_or_token()?; |
66 | 64 | ||
67 | if name_ref.text() != macro_name || excl.kind() != EXCL { | 65 | if name_ref.text() != macro_name || excl.kind() != T![!] { |
68 | return None; | 66 | return None; |
69 | } | 67 | } |
70 | 68 | ||
@@ -73,7 +71,7 @@ fn is_valid_macrocall(macro_call: &ast::MacroCall, macro_name: &str) -> Option<b | |||
73 | let last_child = node.last_child_or_token()?; | 71 | let last_child = node.last_child_or_token()?; |
74 | 72 | ||
75 | match (first_child.kind(), last_child.kind()) { | 73 | match (first_child.kind(), last_child.kind()) { |
76 | (L_PAREN, R_PAREN) | (L_BRACK, R_BRACK) | (L_CURLY, R_CURLY) => Some(true), | 74 | (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']) => Some(true), |
77 | _ => Some(false), | 75 | _ => Some(false), |
78 | } | 76 | } |
79 | } | 77 | } |
diff --git a/crates/ra_assists/src/split_import.rs b/crates/ra_assists/src/split_import.rs index 57e0efaf2..881c5ecdc 100644 --- a/crates/ra_assists/src/split_import.rs +++ b/crates/ra_assists/src/split_import.rs | |||
@@ -2,14 +2,15 @@ use std::iter::successors; | |||
2 | 2 | ||
3 | use hir::db::HirDatabase; | 3 | use hir::db::HirDatabase; |
4 | use ra_syntax::{ | 4 | use ra_syntax::{ |
5 | TextUnit, AstNode, SyntaxKind::COLONCOLON, | 5 | T, |
6 | TextUnit, AstNode, | ||
6 | ast, | 7 | ast, |
7 | }; | 8 | }; |
8 | 9 | ||
9 | use crate::{AssistCtx, Assist, AssistId}; | 10 | use crate::{AssistCtx, Assist, AssistId}; |
10 | 11 | ||
11 | pub(crate) fn split_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 12 | pub(crate) fn split_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
12 | let colon_colon = ctx.token_at_offset().find(|leaf| leaf.kind() == COLONCOLON)?; | 13 | let colon_colon = ctx.token_at_offset().find(|leaf| leaf.kind() == T![::])?; |
13 | let path = ast::Path::cast(colon_colon.parent())?; | 14 | let path = ast::Path::cast(colon_colon.parent())?; |
14 | let top_path = successors(Some(path), |it| it.parent_path()).last()?; | 15 | let top_path = successors(Some(path), |it| it.parent_path()).last()?; |
15 | 16 | ||
diff --git a/crates/ra_cli/src/analysis_stats.rs b/crates/ra_cli/src/analysis_stats.rs index c95d452b3..b481ace9e 100644 --- a/crates/ra_cli/src/analysis_stats.rs +++ b/crates/ra_cli/src/analysis_stats.rs | |||
@@ -51,7 +51,10 @@ pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> { | |||
51 | println!("Total modules found: {}", visited_modules.len()); | 51 | println!("Total modules found: {}", visited_modules.len()); |
52 | println!("Total declarations: {}", num_decls); | 52 | println!("Total declarations: {}", num_decls); |
53 | println!("Total functions: {}", funcs.len()); | 53 | println!("Total functions: {}", funcs.len()); |
54 | let bar = indicatif::ProgressBar::new(funcs.len() as u64); | 54 | let bar = indicatif::ProgressBar::with_draw_target( |
55 | funcs.len() as u64, | ||
56 | indicatif::ProgressDrawTarget::stderr_nohz(), | ||
57 | ); | ||
55 | bar.set_style( | 58 | bar.set_style( |
56 | indicatif::ProgressStyle::default_bar().template("{wide_bar} {pos}/{len}\n{msg}"), | 59 | indicatif::ProgressStyle::default_bar().template("{wide_bar} {pos}/{len}\n{msg}"), |
57 | ); | 60 | ); |
diff --git a/crates/ra_db/src/lib.rs b/crates/ra_db/src/lib.rs index 1cd400752..bf567721a 100644 --- a/crates/ra_db/src/lib.rs +++ b/crates/ra_db/src/lib.rs | |||
@@ -15,7 +15,7 @@ pub use crate::{ | |||
15 | }, | 15 | }, |
16 | }; | 16 | }; |
17 | 17 | ||
18 | pub trait CheckCanceled: panic::RefUnwindSafe { | 18 | pub trait CheckCanceled { |
19 | /// Aborts current query if there are pending changes. | 19 | /// Aborts current query if there are pending changes. |
20 | /// | 20 | /// |
21 | /// rust-analyzer needs to be able to answer semantic questions about the | 21 | /// rust-analyzer needs to be able to answer semantic questions about the |
@@ -36,14 +36,15 @@ pub trait CheckCanceled: panic::RefUnwindSafe { | |||
36 | Self: Sized, | 36 | Self: Sized, |
37 | F: FnOnce(&Self) -> T + panic::UnwindSafe, | 37 | F: FnOnce(&Self) -> T + panic::UnwindSafe, |
38 | { | 38 | { |
39 | panic::catch_unwind(|| f(self)).map_err(|err| match err.downcast::<Canceled>() { | 39 | let this = panic::AssertUnwindSafe(self); |
40 | panic::catch_unwind(|| f(*this)).map_err(|err| match err.downcast::<Canceled>() { | ||
40 | Ok(canceled) => *canceled, | 41 | Ok(canceled) => *canceled, |
41 | Err(payload) => panic::resume_unwind(payload), | 42 | Err(payload) => panic::resume_unwind(payload), |
42 | }) | 43 | }) |
43 | } | 44 | } |
44 | } | 45 | } |
45 | 46 | ||
46 | impl<T: salsa::Database + panic::RefUnwindSafe> CheckCanceled for T { | 47 | impl<T: salsa::Database> CheckCanceled for T { |
47 | fn check_canceled(&self) { | 48 | fn check_canceled(&self) { |
48 | if self.salsa_runtime().is_current_revision_canceled() { | 49 | if self.salsa_runtime().is_current_revision_canceled() { |
49 | Canceled::throw() | 50 | Canceled::throw() |
diff --git a/crates/ra_fmt/src/lib.rs b/crates/ra_fmt/src/lib.rs index 603be1854..aac5a1d23 100644 --- a/crates/ra_fmt/src/lib.rs +++ b/crates/ra_fmt/src/lib.rs | |||
@@ -3,7 +3,7 @@ | |||
3 | use std::iter::successors; | 3 | use std::iter::successors; |
4 | use itertools::Itertools; | 4 | use itertools::Itertools; |
5 | use ra_syntax::{ | 5 | use ra_syntax::{ |
6 | SyntaxNode, SyntaxKind::*, SyntaxToken, SyntaxKind, | 6 | SyntaxNode, SyntaxKind::*, SyntaxToken, SyntaxKind, T, |
7 | ast::{self, AstNode, AstToken}, | 7 | ast::{self, AstNode, AstToken}, |
8 | }; | 8 | }; |
9 | 9 | ||
@@ -38,7 +38,7 @@ pub fn extract_trivial_expression(block: &ast::Block) -> Option<&ast::Expr> { | |||
38 | return None; | 38 | return None; |
39 | } | 39 | } |
40 | let non_trivial_children = block.syntax().children().filter(|it| match it.kind() { | 40 | let non_trivial_children = block.syntax().children().filter(|it| match it.kind() { |
41 | WHITESPACE | L_CURLY | R_CURLY => false, | 41 | WHITESPACE | T!['{'] | T!['}'] => false, |
42 | _ => it != &expr.syntax(), | 42 | _ => it != &expr.syntax(), |
43 | }); | 43 | }); |
44 | if non_trivial_children.count() > 0 { | 44 | if non_trivial_children.count() > 0 { |
@@ -49,8 +49,8 @@ pub fn extract_trivial_expression(block: &ast::Block) -> Option<&ast::Expr> { | |||
49 | 49 | ||
50 | pub fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str { | 50 | pub fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str { |
51 | match left { | 51 | match left { |
52 | L_PAREN | L_BRACK => return "", | 52 | T!['('] | T!['['] => return "", |
53 | L_CURLY => { | 53 | T!['{'] => { |
54 | if let USE_TREE = right { | 54 | if let USE_TREE = right { |
55 | return ""; | 55 | return ""; |
56 | } | 56 | } |
@@ -58,13 +58,13 @@ pub fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str { | |||
58 | _ => (), | 58 | _ => (), |
59 | } | 59 | } |
60 | match right { | 60 | match right { |
61 | R_PAREN | R_BRACK => return "", | 61 | T![')'] | T![']'] => return "", |
62 | R_CURLY => { | 62 | T!['}'] => { |
63 | if let USE_TREE = left { | 63 | if let USE_TREE = left { |
64 | return ""; | 64 | return ""; |
65 | } | 65 | } |
66 | } | 66 | } |
67 | DOT => return "", | 67 | T![.] => return "", |
68 | _ => (), | 68 | _ => (), |
69 | } | 69 | } |
70 | " " | 70 | " " |
diff --git a/crates/ra_hir/Cargo.toml b/crates/ra_hir/Cargo.toml index 68ffcb2e9..294d047d8 100644 --- a/crates/ra_hir/Cargo.toml +++ b/crates/ra_hir/Cargo.toml | |||
@@ -21,10 +21,10 @@ tt = { path = "../ra_tt", package = "ra_tt" } | |||
21 | test_utils = { path = "../test_utils" } | 21 | test_utils = { path = "../test_utils" } |
22 | ra_prof = { path = "../ra_prof" } | 22 | ra_prof = { path = "../ra_prof" } |
23 | 23 | ||
24 | chalk-solve = { git = "https://github.com/rust-lang/chalk.git" } | 24 | chalk-solve = { git = "https://github.com/flodiebold/chalk.git", branch = "fuel" } |
25 | chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git" } | 25 | chalk-rust-ir = { git = "https://github.com/flodiebold/chalk.git", branch = "fuel" } |
26 | chalk-ir = { git = "https://github.com/rust-lang/chalk.git" } | 26 | chalk-ir = { git = "https://github.com/flodiebold/chalk.git", branch = "fuel" } |
27 | 27 | ||
28 | [dev-dependencies] | 28 | [dev-dependencies] |
29 | flexi_logger = "0.11.0" | 29 | flexi_logger = "0.11.0" |
30 | insta = "0.7.0" | 30 | insta = "0.8.1" |
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index 8e827d4f5..11cdf9c34 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs | |||
@@ -11,7 +11,7 @@ use crate::{ | |||
11 | DefWithBody, Trait, | 11 | DefWithBody, Trait, |
12 | ids, | 12 | ids, |
13 | nameres::{Namespace, ImportSourceMap, RawItems, CrateDefMap}, | 13 | nameres::{Namespace, ImportSourceMap, RawItems, CrateDefMap}, |
14 | ty::{InferenceResult, Ty, method_resolution::CrateImplBlocks, TypableDef, CallableDef, FnSig, TypeCtor, GenericPredicate}, | 14 | ty::{InferenceResult, Ty, method_resolution::CrateImplBlocks, TypableDef, CallableDef, FnSig, TypeCtor, GenericPredicate, Substs}, |
15 | adt::{StructData, EnumData}, | 15 | adt::{StructData, EnumData}, |
16 | impl_block::{ModuleImplBlocks, ImplSourceMap, ImplBlock}, | 16 | impl_block::{ModuleImplBlocks, ImplSourceMap, ImplBlock}, |
17 | generics::{GenericParams, GenericDef}, | 17 | generics::{GenericParams, GenericDef}, |
@@ -126,7 +126,7 @@ pub trait HirDatabase: DefDatabase { | |||
126 | #[salsa::invoke(ExprScopes::expr_scopes_query)] | 126 | #[salsa::invoke(ExprScopes::expr_scopes_query)] |
127 | fn expr_scopes(&self, def: DefWithBody) -> Arc<ExprScopes>; | 127 | fn expr_scopes(&self, def: DefWithBody) -> Arc<ExprScopes>; |
128 | 128 | ||
129 | #[salsa::invoke(crate::ty::infer)] | 129 | #[salsa::invoke(crate::ty::infer_query)] |
130 | fn infer(&self, def: DefWithBody) -> Arc<InferenceResult>; | 130 | fn infer(&self, def: DefWithBody) -> Arc<InferenceResult>; |
131 | 131 | ||
132 | #[salsa::invoke(crate::ty::type_for_def)] | 132 | #[salsa::invoke(crate::ty::type_for_def)] |
@@ -141,6 +141,9 @@ pub trait HirDatabase: DefDatabase { | |||
141 | #[salsa::invoke(crate::ty::generic_predicates)] | 141 | #[salsa::invoke(crate::ty::generic_predicates)] |
142 | fn generic_predicates(&self, def: GenericDef) -> Arc<[GenericPredicate]>; | 142 | fn generic_predicates(&self, def: GenericDef) -> Arc<[GenericPredicate]>; |
143 | 143 | ||
144 | #[salsa::invoke(crate::ty::generic_defaults)] | ||
145 | fn generic_defaults(&self, def: GenericDef) -> Substs; | ||
146 | |||
144 | #[salsa::invoke(crate::expr::body_with_source_map_query)] | 147 | #[salsa::invoke(crate::expr::body_with_source_map_query)] |
145 | fn body_with_source_map( | 148 | fn body_with_source_map( |
146 | &self, | 149 | &self, |
@@ -153,7 +156,7 @@ pub trait HirDatabase: DefDatabase { | |||
153 | #[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)] | 156 | #[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)] |
154 | fn impls_in_crate(&self, krate: Crate) -> Arc<CrateImplBlocks>; | 157 | fn impls_in_crate(&self, krate: Crate) -> Arc<CrateImplBlocks>; |
155 | 158 | ||
156 | #[salsa::invoke(crate::ty::traits::impls_for_trait)] | 159 | #[salsa::invoke(crate::ty::traits::impls_for_trait_query)] |
157 | fn impls_for_trait(&self, krate: Crate, trait_: Trait) -> Arc<[ImplBlock]>; | 160 | fn impls_for_trait(&self, krate: Crate, trait_: Trait) -> Arc<[ImplBlock]>; |
158 | 161 | ||
159 | /// This provides the Chalk trait solver instance. Because Chalk always | 162 | /// This provides the Chalk trait solver instance. Because Chalk always |
@@ -161,11 +164,11 @@ pub trait HirDatabase: DefDatabase { | |||
161 | /// because Chalk does its own internal caching, the solver is wrapped in a | 164 | /// because Chalk does its own internal caching, the solver is wrapped in a |
162 | /// Mutex and the query is marked volatile, to make sure the cached state is | 165 | /// Mutex and the query is marked volatile, to make sure the cached state is |
163 | /// thrown away when input facts change. | 166 | /// thrown away when input facts change. |
164 | #[salsa::invoke(crate::ty::traits::solver)] | 167 | #[salsa::invoke(crate::ty::traits::solver_query)] |
165 | #[salsa::volatile] | 168 | #[salsa::volatile] |
166 | fn solver(&self, krate: Crate) -> Arc<Mutex<crate::ty::traits::Solver>>; | 169 | fn solver(&self, krate: Crate) -> Arc<Mutex<crate::ty::traits::Solver>>; |
167 | 170 | ||
168 | #[salsa::invoke(crate::ty::traits::implements)] | 171 | #[salsa::invoke(crate::ty::traits::implements_query)] |
169 | fn implements( | 172 | fn implements( |
170 | &self, | 173 | &self, |
171 | krate: Crate, | 174 | krate: Crate, |
diff --git a/crates/ra_hir/src/expr/validation.rs b/crates/ra_hir/src/expr/validation.rs index aebed6788..3f758f283 100644 --- a/crates/ra_hir/src/expr/validation.rs +++ b/crates/ra_hir/src/expr/validation.rs | |||
@@ -5,13 +5,11 @@ use ra_syntax::ast::{AstNode, StructLit}; | |||
5 | 5 | ||
6 | use crate::{ | 6 | use crate::{ |
7 | expr::AstPtr, | 7 | expr::AstPtr, |
8 | HirDatabase, | 8 | HirDatabase, Function, Name, |
9 | Function, | ||
10 | Name, | ||
11 | diagnostics::{DiagnosticSink, MissingFields}, | 9 | diagnostics::{DiagnosticSink, MissingFields}, |
12 | adt::AdtDef, | 10 | adt::AdtDef, |
13 | Path, | 11 | Path, |
14 | ty::InferenceResult | 12 | ty::InferenceResult, |
15 | }; | 13 | }; |
16 | use super::{Expr, StructLitField, ExprId}; | 14 | use super::{Expr, StructLitField, ExprId}; |
17 | 15 | ||
@@ -50,43 +48,46 @@ impl<'a, 'b> ExprValidator<'a, 'b> { | |||
50 | spread: &Option<ExprId>, | 48 | spread: &Option<ExprId>, |
51 | db: &impl HirDatabase, | 49 | db: &impl HirDatabase, |
52 | ) { | 50 | ) { |
53 | if let Some(_) = spread { | 51 | if spread.is_some() { |
54 | return; | 52 | return; |
55 | } | 53 | } |
54 | |||
55 | let struct_def = match self.infer[id].as_adt() { | ||
56 | Some((AdtDef::Struct(s), _)) => s, | ||
57 | _ => return, | ||
58 | }; | ||
59 | |||
56 | let lit_fields: FxHashSet<_> = fields.into_iter().map(|f| &f.name).collect(); | 60 | let lit_fields: FxHashSet<_> = fields.into_iter().map(|f| &f.name).collect(); |
57 | let struct_ty = &self.infer[id]; | 61 | let missed_fields: Vec<Name> = struct_def |
58 | if let Some((AdtDef::Struct(s), _)) = struct_ty.as_adt() { | 62 | .fields(db) |
59 | let missed_fields: Vec<Name> = s | 63 | .iter() |
60 | .fields(db) | 64 | .filter_map(|f| { |
61 | .iter() | 65 | let name = f.name(db); |
62 | .filter_map(|f| { | 66 | if lit_fields.contains(&name) { |
63 | let name = f.name(db); | 67 | None |
64 | if lit_fields.contains(&name) { | 68 | } else { |
65 | None | 69 | Some(name) |
66 | } else { | 70 | } |
67 | Some(name) | 71 | }) |
68 | } | 72 | .collect(); |
69 | }) | 73 | if missed_fields.is_empty() { |
70 | .collect(); | 74 | return; |
71 | if missed_fields.is_empty() { | 75 | } |
72 | return; | 76 | let source_map = self.func.body_source_map(db); |
73 | } | 77 | let file_id = self.func.source(db).0; |
74 | let source_map = self.func.body_source_map(db); | 78 | let source_file = db.parse(file_id.original_file(db)); |
75 | let file_id = self.func.source(db).0; | 79 | if let Some(field_list_node) = source_map |
76 | let source_file = db.parse(file_id.original_file(db)); | 80 | .expr_syntax(id) |
77 | if let Some(field_list_node) = source_map | 81 | .map(|ptr| ptr.to_node(source_file.syntax())) |
78 | .expr_syntax(id) | 82 | .and_then(StructLit::cast) |
79 | .map(|ptr| ptr.to_node(source_file.syntax())) | 83 | .and_then(|lit| lit.named_field_list()) |
80 | .and_then(StructLit::cast) | 84 | { |
81 | .and_then(|lit| lit.named_field_list()) | 85 | let field_list_ptr = AstPtr::new(field_list_node); |
82 | { | 86 | self.sink.push(MissingFields { |
83 | let field_list_ptr = AstPtr::new(field_list_node); | 87 | file: file_id, |
84 | self.sink.push(MissingFields { | 88 | field_list: field_list_ptr, |
85 | file: file_id, | 89 | missed_fields, |
86 | field_list: field_list_ptr, | 90 | }) |
87 | missed_fields, | ||
88 | }) | ||
89 | } | ||
90 | } | 91 | } |
91 | } | 92 | } |
92 | } | 93 | } |
diff --git a/crates/ra_hir/src/generics.rs b/crates/ra_hir/src/generics.rs index c29b96f50..79a7fa23a 100644 --- a/crates/ra_hir/src/generics.rs +++ b/crates/ra_hir/src/generics.rs | |||
@@ -5,7 +5,7 @@ | |||
5 | 5 | ||
6 | use std::sync::Arc; | 6 | use std::sync::Arc; |
7 | 7 | ||
8 | use ra_syntax::ast::{self, NameOwner, TypeParamsOwner, TypeBoundsOwner}; | 8 | use ra_syntax::ast::{self, NameOwner, TypeParamsOwner, TypeBoundsOwner, DefaultTypeParamOwner}; |
9 | 9 | ||
10 | use crate::{ | 10 | use crate::{ |
11 | db::{ HirDatabase, DefDatabase}, | 11 | db::{ HirDatabase, DefDatabase}, |
@@ -18,6 +18,7 @@ pub struct GenericParam { | |||
18 | // FIXME: give generic params proper IDs | 18 | // FIXME: give generic params proper IDs |
19 | pub(crate) idx: u32, | 19 | pub(crate) idx: u32, |
20 | pub(crate) name: Name, | 20 | pub(crate) name: Name, |
21 | pub(crate) default: Option<Path>, | ||
21 | } | 22 | } |
22 | 23 | ||
23 | /// Data about the generic parameters of a function, struct, impl, etc. | 24 | /// Data about the generic parameters of a function, struct, impl, etc. |
@@ -68,7 +69,11 @@ impl GenericParams { | |||
68 | GenericDef::Enum(it) => generics.fill(&*it.source(db).1, start), | 69 | GenericDef::Enum(it) => generics.fill(&*it.source(db).1, start), |
69 | GenericDef::Trait(it) => { | 70 | GenericDef::Trait(it) => { |
70 | // traits get the Self type as an implicit first type parameter | 71 | // traits get the Self type as an implicit first type parameter |
71 | generics.params.push(GenericParam { idx: start, name: Name::self_type() }); | 72 | generics.params.push(GenericParam { |
73 | idx: start, | ||
74 | name: Name::self_type(), | ||
75 | default: None, | ||
76 | }); | ||
72 | generics.fill(&*it.source(db).1, start + 1); | 77 | generics.fill(&*it.source(db).1, start + 1); |
73 | } | 78 | } |
74 | GenericDef::TypeAlias(it) => generics.fill(&*it.source(db).1, start), | 79 | GenericDef::TypeAlias(it) => generics.fill(&*it.source(db).1, start), |
@@ -90,7 +95,9 @@ impl GenericParams { | |||
90 | fn fill_params(&mut self, params: &ast::TypeParamList, start: u32) { | 95 | fn fill_params(&mut self, params: &ast::TypeParamList, start: u32) { |
91 | for (idx, type_param) in params.type_params().enumerate() { | 96 | for (idx, type_param) in params.type_params().enumerate() { |
92 | let name = type_param.name().map(AsName::as_name).unwrap_or_else(Name::missing); | 97 | let name = type_param.name().map(AsName::as_name).unwrap_or_else(Name::missing); |
93 | let param = GenericParam { idx: idx as u32 + start, name: name.clone() }; | 98 | let default = type_param.default_type().and_then(|t| t.path()).and_then(Path::from_ast); |
99 | |||
100 | let param = GenericParam { idx: idx as u32 + start, name: name.clone(), default }; | ||
94 | self.params.push(param); | 101 | self.params.push(param); |
95 | 102 | ||
96 | let type_ref = TypeRef::Path(name.into()); | 103 | let type_ref = TypeRef::Path(name.into()); |
@@ -190,13 +197,13 @@ impl From<crate::adt::AdtDef> for GenericDef { | |||
190 | } | 197 | } |
191 | } | 198 | } |
192 | 199 | ||
193 | pub trait HasGenericParams { | 200 | pub trait HasGenericParams: Copy { |
194 | fn generic_params(self, db: &impl DefDatabase) -> Arc<GenericParams>; | 201 | fn generic_params(self, db: &impl DefDatabase) -> Arc<GenericParams>; |
195 | } | 202 | } |
196 | 203 | ||
197 | impl<T> HasGenericParams for T | 204 | impl<T> HasGenericParams for T |
198 | where | 205 | where |
199 | T: Into<GenericDef>, | 206 | T: Into<GenericDef> + Copy, |
200 | { | 207 | { |
201 | fn generic_params(self, db: &impl DefDatabase) -> Arc<GenericParams> { | 208 | fn generic_params(self, db: &impl DefDatabase) -> Arc<GenericParams> { |
202 | db.generic_params(self.into()) | 209 | db.generic_params(self.into()) |
diff --git a/crates/ra_hir/src/mock.rs b/crates/ra_hir/src/mock.rs index fa5882dea..b84cb7503 100644 --- a/crates/ra_hir/src/mock.rs +++ b/crates/ra_hir/src/mock.rs | |||
@@ -236,7 +236,7 @@ impl MockDatabase { | |||
236 | } | 236 | } |
237 | 237 | ||
238 | #[derive(Default)] | 238 | #[derive(Default)] |
239 | pub struct CrateGraphFixture(pub FxHashMap<String, (String, Edition, Vec<String>)>); | 239 | pub struct CrateGraphFixture(pub Vec<(String, (String, Edition, Vec<String>))>); |
240 | 240 | ||
241 | #[macro_export] | 241 | #[macro_export] |
242 | macro_rules! crate_graph { | 242 | macro_rules! crate_graph { |
@@ -246,10 +246,10 @@ macro_rules! crate_graph { | |||
246 | #[allow(unused_mut, unused_assignments)] | 246 | #[allow(unused_mut, unused_assignments)] |
247 | let mut edition = ra_db::Edition::Edition2018; | 247 | let mut edition = ra_db::Edition::Edition2018; |
248 | $(edition = ra_db::Edition::from_string($edition);)? | 248 | $(edition = ra_db::Edition::from_string($edition);)? |
249 | res.0.insert( | 249 | res.0.push(( |
250 | $crate_name.to_string(), | 250 | $crate_name.to_string(), |
251 | ($crate_path.to_string(), edition, vec![$($dep.to_string()),*]) | 251 | ($crate_path.to_string(), edition, vec![$($dep.to_string()),*]) |
252 | ); | 252 | )); |
253 | )* | 253 | )* |
254 | res | 254 | res |
255 | }} | 255 | }} |
diff --git a/crates/ra_hir/src/name.rs b/crates/ra_hir/src/name.rs index 9a999e66c..e3a82cf03 100644 --- a/crates/ra_hir/src/name.rs +++ b/crates/ra_hir/src/name.rs | |||
@@ -5,7 +5,7 @@ use ra_syntax::{ast, SmolStr}; | |||
5 | /// `Name` is a wrapper around string, which is used in hir for both references | 5 | /// `Name` is a wrapper around string, which is used in hir for both references |
6 | /// and declarations. In theory, names should also carry hygiene info, but we are | 6 | /// and declarations. In theory, names should also carry hygiene info, but we are |
7 | /// not there yet! | 7 | /// not there yet! |
8 | #[derive(Clone, PartialEq, Eq, Hash)] | 8 | #[derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] |
9 | pub struct Name { | 9 | pub struct Name { |
10 | text: SmolStr, | 10 | text: SmolStr, |
11 | } | 11 | } |
diff --git a/crates/ra_hir/src/nameres/tests.rs b/crates/ra_hir/src/nameres/tests.rs index 572bd1bf7..14c8ee50b 100644 --- a/crates/ra_hir/src/nameres/tests.rs +++ b/crates/ra_hir/src/nameres/tests.rs | |||
@@ -8,7 +8,11 @@ use ra_db::SourceDatabase; | |||
8 | use test_utils::covers; | 8 | use test_utils::covers; |
9 | use insta::assert_snapshot_matches; | 9 | use insta::assert_snapshot_matches; |
10 | 10 | ||
11 | use crate::{Crate, mock::{MockDatabase, CrateGraphFixture}, nameres::Resolution}; | 11 | use crate::{ |
12 | Crate, | ||
13 | mock::{MockDatabase, CrateGraphFixture}, | ||
14 | nameres::Resolution, | ||
15 | }; | ||
12 | 16 | ||
13 | use super::*; | 17 | use super::*; |
14 | 18 | ||
@@ -25,12 +29,15 @@ fn compute_crate_def_map(fixture: &str, graph: Option<CrateGraphFixture>) -> Arc | |||
25 | fn render_crate_def_map(map: &CrateDefMap) -> String { | 29 | fn render_crate_def_map(map: &CrateDefMap) -> String { |
26 | let mut buf = String::new(); | 30 | let mut buf = String::new(); |
27 | go(&mut buf, map, "\ncrate", map.root); | 31 | go(&mut buf, map, "\ncrate", map.root); |
28 | return buf; | 32 | return buf.trim().to_string(); |
29 | 33 | ||
30 | fn go(buf: &mut String, map: &CrateDefMap, path: &str, module: CrateModuleId) { | 34 | fn go(buf: &mut String, map: &CrateDefMap, path: &str, module: CrateModuleId) { |
31 | *buf += path; | 35 | *buf += path; |
32 | *buf += "\n"; | 36 | *buf += "\n"; |
33 | for (name, res) in map.modules[module].scope.items.iter() { | 37 | |
38 | let mut entries = map.modules[module].scope.items.iter().collect::<Vec<_>>(); | ||
39 | entries.sort_by_key(|(name, _)| *name); | ||
40 | for (name, res) in entries { | ||
34 | *buf += &format!("{}: {}\n", name, dump_resolution(res)) | 41 | *buf += &format!("{}: {}\n", name, dump_resolution(res)) |
35 | } | 42 | } |
36 | for (name, child) in map.modules[module].children.iter() { | 43 | for (name, child) in map.modules[module].children.iter() { |
@@ -54,8 +61,8 @@ fn def_map(fixtute: &str) -> String { | |||
54 | render_crate_def_map(&dm) | 61 | render_crate_def_map(&dm) |
55 | } | 62 | } |
56 | 63 | ||
57 | fn def_map_with_crate_graph(fixtute: &str, graph: CrateGraphFixture) -> String { | 64 | fn def_map_with_crate_graph(fixture: &str, graph: CrateGraphFixture) -> String { |
58 | let dm = compute_crate_def_map(fixtute, Some(graph)); | 65 | let dm = compute_crate_def_map(fixture, Some(graph)); |
59 | render_crate_def_map(&dm) | 66 | render_crate_def_map(&dm) |
60 | } | 67 | } |
61 | 68 | ||
@@ -79,21 +86,20 @@ fn crate_def_map_smoke_test() { | |||
79 | ", | 86 | ", |
80 | ); | 87 | ); |
81 | assert_snapshot_matches!(map, @r###" | 88 | assert_snapshot_matches!(map, @r###" |
82 | crate | 89 | â‹®crate |
83 | V: t v | 90 | â‹®E: t |
84 | E: t | 91 | â‹®S: t v |
85 | foo: t | 92 | â‹®V: t v |
86 | S: t v | 93 | â‹®foo: t |
87 | 94 | â‹® | |
88 | crate::foo | 95 | â‹®crate::foo |
89 | bar: t | 96 | â‹®bar: t |
90 | f: v | 97 | â‹®f: v |
91 | 98 | â‹® | |
92 | crate::foo::bar | 99 | â‹®crate::foo::bar |
93 | Baz: t v | 100 | â‹®Baz: t v |
94 | E: t | 101 | â‹®E: t |
95 | "### | 102 | "###) |
96 | ) | ||
97 | } | 103 | } |
98 | 104 | ||
99 | #[test] | 105 | #[test] |
@@ -113,12 +119,12 @@ fn bogus_paths() { | |||
113 | ", | 119 | ", |
114 | ); | 120 | ); |
115 | assert_snapshot_matches!(map, @r###" | 121 | assert_snapshot_matches!(map, @r###" |
116 | crate | 122 | â‹®crate |
117 | foo: t | 123 | â‹®S: t v |
118 | S: t v | 124 | â‹®foo: t |
119 | 125 | â‹® | |
120 | crate::foo | 126 | â‹®crate::foo |
121 | "### | 127 | "### |
122 | ) | 128 | ) |
123 | } | 129 | } |
124 | 130 | ||
@@ -137,13 +143,13 @@ fn use_as() { | |||
137 | ); | 143 | ); |
138 | assert_snapshot_matches!(map, | 144 | assert_snapshot_matches!(map, |
139 | @r###" | 145 | @r###" |
140 | crate | 146 | â‹®crate |
141 | Foo: t v | 147 | â‹®Foo: t v |
142 | foo: t | 148 | â‹®foo: t |
143 | 149 | â‹® | |
144 | crate::foo | 150 | â‹®crate::foo |
145 | Baz: t v | 151 | â‹®Baz: t v |
146 | "### | 152 | "### |
147 | ); | 153 | ); |
148 | } | 154 | } |
149 | 155 | ||
@@ -164,21 +170,19 @@ fn use_trees() { | |||
164 | pub enum Quux {}; | 170 | pub enum Quux {}; |
165 | ", | 171 | ", |
166 | ); | 172 | ); |
167 | assert_snapshot_matches!(map, | 173 | assert_snapshot_matches!(map, @r###" |
168 | @r###" | 174 | â‹®crate |
169 | crate | 175 | â‹®Baz: t v |
170 | Quux: t | 176 | â‹®Quux: t |
171 | Baz: t v | 177 | â‹®foo: t |
172 | foo: t | 178 | â‹® |
173 | 179 | â‹®crate::foo | |
174 | crate::foo | 180 | â‹®bar: t |
175 | bar: t | 181 | â‹® |
176 | 182 | â‹®crate::foo::bar | |
177 | crate::foo::bar | 183 | â‹®Baz: t v |
178 | Quux: t | 184 | â‹®Quux: t |
179 | Baz: t v | 185 | "###); |
180 | "### | ||
181 | ); | ||
182 | } | 186 | } |
183 | 187 | ||
184 | #[test] | 188 | #[test] |
@@ -199,20 +203,18 @@ fn re_exports() { | |||
199 | pub struct Baz; | 203 | pub struct Baz; |
200 | ", | 204 | ", |
201 | ); | 205 | ); |
202 | assert_snapshot_matches!(map, | 206 | assert_snapshot_matches!(map, @r###" |
203 | @r###" | 207 | â‹®crate |
204 | crate | 208 | â‹®Baz: t v |
205 | Baz: t v | 209 | â‹®foo: t |
206 | foo: t | 210 | â‹® |
207 | 211 | â‹®crate::foo | |
208 | crate::foo | 212 | â‹®Baz: t v |
209 | bar: t | 213 | â‹®bar: t |
210 | Baz: t v | 214 | â‹® |
211 | 215 | â‹®crate::foo::bar | |
212 | crate::foo::bar | 216 | â‹®Baz: t v |
213 | Baz: t v | 217 | "###); |
214 | "### | ||
215 | ); | ||
216 | } | 218 | } |
217 | 219 | ||
218 | #[test] | 220 | #[test] |
@@ -237,10 +239,10 @@ fn std_prelude() { | |||
237 | }, | 239 | }, |
238 | ); | 240 | ); |
239 | assert_snapshot_matches!(map, @r###" | 241 | assert_snapshot_matches!(map, @r###" |
240 | crate | 242 | â‹®crate |
241 | Bar: t v | 243 | â‹®Bar: t v |
242 | Baz: t v | 244 | â‹®Baz: t v |
243 | "###); | 245 | "###); |
244 | } | 246 | } |
245 | 247 | ||
246 | #[test] | 248 | #[test] |
@@ -254,10 +256,10 @@ fn can_import_enum_variant() { | |||
254 | ", | 256 | ", |
255 | ); | 257 | ); |
256 | assert_snapshot_matches!(map, @r###" | 258 | assert_snapshot_matches!(map, @r###" |
257 | crate | 259 | â‹®crate |
258 | V: t v | 260 | â‹®E: t |
259 | E: t | 261 | â‹®V: t v |
260 | "### | 262 | "### |
261 | ); | 263 | ); |
262 | } | 264 | } |
263 | 265 | ||
@@ -285,20 +287,18 @@ fn edition_2015_imports() { | |||
285 | }, | 287 | }, |
286 | ); | 288 | ); |
287 | 289 | ||
288 | assert_snapshot_matches!(map, | 290 | assert_snapshot_matches!(map, @r###" |
289 | @r###" | 291 | â‹®crate |
290 | crate | 292 | â‹®bar: t |
291 | bar: t | 293 | â‹®foo: t |
292 | foo: t | 294 | â‹® |
293 | 295 | â‹®crate::bar | |
294 | crate::bar | 296 | â‹®Bar: t v |
295 | Bar: t v | 297 | â‹® |
296 | 298 | â‹®crate::foo | |
297 | crate::foo | 299 | â‹®Bar: t v |
298 | FromLib: t v | 300 | â‹®FromLib: t v |
299 | Bar: t v | 301 | "###); |
300 | "### | ||
301 | ); | ||
302 | } | 302 | } |
303 | 303 | ||
304 | #[test] | 304 | #[test] |
@@ -317,16 +317,14 @@ fn module_resolution_works_for_non_standard_filenames() { | |||
317 | }, | 317 | }, |
318 | ); | 318 | ); |
319 | 319 | ||
320 | assert_snapshot_matches!(map, | 320 | assert_snapshot_matches!(map, @r###" |
321 | @r###" | 321 | â‹®crate |
322 | crate | 322 | â‹®Bar: t v |
323 | Bar: t v | 323 | â‹®foo: t |
324 | foo: t | 324 | â‹® |
325 | 325 | â‹®crate::foo | |
326 | crate::foo | 326 | â‹®Bar: t v |
327 | Bar: t v | 327 | "###); |
328 | "### | ||
329 | ); | ||
330 | } | 328 | } |
331 | 329 | ||
332 | #[test] | 330 | #[test] |
@@ -348,12 +346,10 @@ fn name_res_works_for_broken_modules() { | |||
348 | pub struct Baz; | 346 | pub struct Baz; |
349 | ", | 347 | ", |
350 | ); | 348 | ); |
351 | assert_snapshot_matches!(map, | 349 | assert_snapshot_matches!(map, @r###" |
352 | @r###" | 350 | â‹®crate |
353 | crate | 351 | â‹®Baz: _ |
354 | Baz: _ | 352 | "###); |
355 | "### | ||
356 | ); | ||
357 | } | 353 | } |
358 | 354 | ||
359 | #[test] | 355 | #[test] |
@@ -369,19 +365,17 @@ fn item_map_using_self() { | |||
369 | pub struct Baz; | 365 | pub struct Baz; |
370 | ", | 366 | ", |
371 | ); | 367 | ); |
372 | assert_snapshot_matches!(map, | 368 | assert_snapshot_matches!(map, @r###" |
373 | @r###" | 369 | â‹®crate |
374 | crate | 370 | â‹®Baz: t v |
375 | Baz: t v | 371 | â‹®foo: t |
376 | foo: t | 372 | â‹® |
377 | 373 | â‹®crate::foo | |
378 | crate::foo | 374 | â‹®bar: t |
379 | bar: t | 375 | â‹® |
380 | 376 | â‹®crate::foo::bar | |
381 | crate::foo::bar | 377 | â‹®Baz: t v |
382 | Baz: t v | 378 | "###); |
383 | "### | ||
384 | ); | ||
385 | } | 379 | } |
386 | 380 | ||
387 | #[test] | 381 | #[test] |
@@ -400,12 +394,10 @@ fn item_map_across_crates() { | |||
400 | }, | 394 | }, |
401 | ); | 395 | ); |
402 | 396 | ||
403 | assert_snapshot_matches!(map, | 397 | assert_snapshot_matches!(map, @r###" |
404 | @r###" | 398 | â‹®crate |
405 | crate | 399 | â‹®Baz: t v |
406 | Baz: t v | 400 | "###); |
407 | "### | ||
408 | ); | ||
409 | } | 401 | } |
410 | 402 | ||
411 | #[test] | 403 | #[test] |
@@ -430,12 +422,14 @@ fn extern_crate_rename() { | |||
430 | }, | 422 | }, |
431 | ); | 423 | ); |
432 | 424 | ||
433 | assert_snapshot_matches!(map, | 425 | assert_snapshot_matches!(map, @r###" |
434 | @r###" | 426 | â‹®crate |
435 | crate | 427 | â‹®alloc_crate: t |
436 | Arc: t v | 428 | â‹®sync: t |
437 | "### | 429 | â‹® |
438 | ); | 430 | â‹®crate::sync |
431 | â‹®Arc: t v | ||
432 | "###); | ||
439 | } | 433 | } |
440 | 434 | ||
441 | #[test] | 435 | #[test] |
@@ -462,9 +456,13 @@ fn extern_crate_rename_2015_edition() { | |||
462 | 456 | ||
463 | assert_snapshot_matches!(map, | 457 | assert_snapshot_matches!(map, |
464 | @r###" | 458 | @r###" |
465 | crate | 459 | â‹®crate |
466 | Arc: t v | 460 | â‹®alloc_crate: t |
467 | "### | 461 | â‹®sync: t |
462 | â‹® | ||
463 | â‹®crate::sync | ||
464 | â‹®Arc: t v | ||
465 | "### | ||
468 | ); | 466 | ); |
469 | } | 467 | } |
470 | 468 | ||
@@ -490,12 +488,10 @@ fn import_across_source_roots() { | |||
490 | }, | 488 | }, |
491 | ); | 489 | ); |
492 | 490 | ||
493 | assert_snapshot_matches!(map, | 491 | assert_snapshot_matches!(map, @r###" |
494 | @r###" | 492 | â‹®crate |
495 | crate | 493 | â‹®C: t v |
496 | C: t v | 494 | "###); |
497 | "### | ||
498 | ); | ||
499 | } | 495 | } |
500 | 496 | ||
501 | #[test] | 497 | #[test] |
@@ -519,12 +515,10 @@ fn reexport_across_crates() { | |||
519 | }, | 515 | }, |
520 | ); | 516 | ); |
521 | 517 | ||
522 | assert_snapshot_matches!(map, | 518 | assert_snapshot_matches!(map, @r###" |
523 | @r###" | 519 | â‹®crate |
524 | crate | 520 | â‹®Baz: t v |
525 | Baz: t v | 521 | "###); |
526 | "### | ||
527 | ); | ||
528 | } | 522 | } |
529 | 523 | ||
530 | #[test] | 524 | #[test] |
@@ -544,13 +538,11 @@ fn values_dont_shadow_extern_crates() { | |||
544 | }, | 538 | }, |
545 | ); | 539 | ); |
546 | 540 | ||
547 | assert_snapshot_matches!(map, | 541 | assert_snapshot_matches!(map, @r###" |
548 | @r###" | 542 | â‹®crate |
549 | crate | 543 | â‹®Bar: t v |
550 | Bar: t v | 544 | â‹®foo: v |
551 | foo: v | 545 | "###); |
552 | "### | ||
553 | ); | ||
554 | } | 546 | } |
555 | 547 | ||
556 | #[test] | 548 | #[test] |
diff --git a/crates/ra_hir/src/nameres/tests/globs.rs b/crates/ra_hir/src/nameres/tests/globs.rs index 6e50c7ff6..e1519ca6b 100644 --- a/crates/ra_hir/src/nameres/tests/globs.rs +++ b/crates/ra_hir/src/nameres/tests/globs.rs | |||
@@ -18,20 +18,20 @@ fn glob_1() { | |||
18 | ", | 18 | ", |
19 | ); | 19 | ); |
20 | assert_snapshot_matches!(map, @r###" | 20 | assert_snapshot_matches!(map, @r###" |
21 | crate | 21 | â‹®crate |
22 | bar: t | 22 | â‹®Baz: t v |
23 | Foo: t v | 23 | â‹®Foo: t v |
24 | Baz: t v | 24 | â‹®bar: t |
25 | foo: t | 25 | â‹®foo: t |
26 | 26 | â‹® | |
27 | crate::foo | 27 | â‹®crate::foo |
28 | bar: t | 28 | â‹®Baz: t v |
29 | Foo: t v | 29 | â‹®Foo: t v |
30 | Baz: t v | 30 | â‹®bar: t |
31 | 31 | â‹® | |
32 | crate::foo::bar | 32 | â‹®crate::foo::bar |
33 | Baz: t v | 33 | â‹®Baz: t v |
34 | "### | 34 | "### |
35 | ); | 35 | ); |
36 | } | 36 | } |
37 | 37 | ||
@@ -54,22 +54,22 @@ fn glob_2() { | |||
54 | ", | 54 | ", |
55 | ); | 55 | ); |
56 | assert_snapshot_matches!(map, @r###" | 56 | assert_snapshot_matches!(map, @r###" |
57 | crate | 57 | â‹®crate |
58 | bar: t | 58 | â‹®Baz: t v |
59 | Foo: t v | 59 | â‹®Foo: t v |
60 | Baz: t v | 60 | â‹®bar: t |
61 | foo: t | 61 | â‹®foo: t |
62 | 62 | â‹® | |
63 | crate::foo | 63 | â‹®crate::foo |
64 | bar: t | 64 | â‹®Baz: t v |
65 | Foo: t v | 65 | â‹®Foo: t v |
66 | Baz: t v | 66 | â‹®bar: t |
67 | 67 | â‹® | |
68 | crate::foo::bar | 68 | â‹®crate::foo::bar |
69 | bar: t | 69 | â‹®Baz: t v |
70 | Foo: t v | 70 | â‹®Foo: t v |
71 | Baz: t v | 71 | â‹®bar: t |
72 | "### | 72 | "### |
73 | ); | 73 | ); |
74 | } | 74 | } |
75 | 75 | ||
@@ -90,9 +90,9 @@ fn glob_across_crates() { | |||
90 | }, | 90 | }, |
91 | ); | 91 | ); |
92 | assert_snapshot_matches!(map, @r###" | 92 | assert_snapshot_matches!(map, @r###" |
93 | crate | 93 | â‹®crate |
94 | Baz: t v | 94 | â‹®Baz: t v |
95 | "### | 95 | "### |
96 | ); | 96 | ); |
97 | } | 97 | } |
98 | 98 | ||
@@ -109,10 +109,10 @@ fn glob_enum() { | |||
109 | ", | 109 | ", |
110 | ); | 110 | ); |
111 | assert_snapshot_matches!(map, @r###" | 111 | assert_snapshot_matches!(map, @r###" |
112 | crate | 112 | â‹®crate |
113 | Foo: t | 113 | â‹®Bar: t v |
114 | Bar: t v | 114 | â‹®Baz: t v |
115 | Baz: t v | 115 | â‹®Foo: t |
116 | "### | 116 | "### |
117 | ); | 117 | ); |
118 | } | 118 | } |
diff --git a/crates/ra_hir/src/nameres/tests/macros.rs b/crates/ra_hir/src/nameres/tests/macros.rs index 8781b026b..f7ca380ad 100644 --- a/crates/ra_hir/src/nameres/tests/macros.rs +++ b/crates/ra_hir/src/nameres/tests/macros.rs | |||
@@ -18,14 +18,14 @@ fn macro_rules_are_globally_visible() { | |||
18 | ", | 18 | ", |
19 | ); | 19 | ); |
20 | assert_snapshot_matches!(map, @r###" | 20 | assert_snapshot_matches!(map, @r###" |
21 | crate | 21 | â‹®crate |
22 | nested: t | 22 | â‹®Foo: t v |
23 | Foo: t v | 23 | â‹®nested: t |
24 | 24 | â‹® | |
25 | crate::nested | 25 | â‹®crate::nested |
26 | Bar: t v | 26 | â‹®Bar: t v |
27 | Baz: t v | 27 | â‹®Baz: t v |
28 | "###); | 28 | "###); |
29 | } | 29 | } |
30 | 30 | ||
31 | #[test] | 31 | #[test] |
@@ -45,15 +45,15 @@ fn macro_rules_can_define_modules() { | |||
45 | ", | 45 | ", |
46 | ); | 46 | ); |
47 | assert_snapshot_matches!(map, @r###" | 47 | assert_snapshot_matches!(map, @r###" |
48 | crate | 48 | â‹®crate |
49 | n1: t | 49 | â‹®n1: t |
50 | 50 | â‹® | |
51 | crate::n1 | 51 | â‹®crate::n1 |
52 | n2: t | 52 | â‹®n2: t |
53 | 53 | â‹® | |
54 | crate::n1::n2 | 54 | â‹®crate::n1::n2 |
55 | X: t v | 55 | â‹®X: t v |
56 | "###); | 56 | "###); |
57 | } | 57 | } |
58 | 58 | ||
59 | #[test] | 59 | #[test] |
@@ -81,14 +81,14 @@ fn macro_rules_from_other_crates_are_visible() { | |||
81 | }, | 81 | }, |
82 | ); | 82 | ); |
83 | assert_snapshot_matches!(map, @r###" | 83 | assert_snapshot_matches!(map, @r###" |
84 | crate | 84 | â‹®crate |
85 | bar: t | 85 | â‹®Bar: t v |
86 | Foo: t v | 86 | â‹®Foo: t v |
87 | Bar: t v | 87 | â‹®bar: t |
88 | 88 | â‹® | |
89 | crate::bar | 89 | â‹®crate::bar |
90 | bar: t | 90 | â‹®Bar: t v |
91 | Foo: t v | 91 | â‹®Foo: t v |
92 | Bar: t v | 92 | â‹®bar: t |
93 | "###); | 93 | "###); |
94 | } | 94 | } |
diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index cfe07156b..3679a2242 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs | |||
@@ -19,8 +19,8 @@ use std::{fmt, mem}; | |||
19 | use crate::{Name, AdtDef, type_ref::Mutability, db::HirDatabase, Trait, GenericParams}; | 19 | use crate::{Name, AdtDef, type_ref::Mutability, db::HirDatabase, Trait, GenericParams}; |
20 | use display::{HirDisplay, HirFormatter}; | 20 | use display::{HirDisplay, HirFormatter}; |
21 | 21 | ||
22 | pub(crate) use lower::{TypableDef, type_for_def, type_for_field, callable_item_sig, generic_predicates}; | 22 | pub(crate) use lower::{TypableDef, type_for_def, type_for_field, callable_item_sig, generic_predicates, generic_defaults}; |
23 | pub(crate) use infer::{infer, InferenceResult, InferTy}; | 23 | pub(crate) use infer::{infer_query, InferenceResult, InferTy}; |
24 | pub use lower::CallableDef; | 24 | pub use lower::CallableDef; |
25 | 25 | ||
26 | /// A type constructor or type name: this might be something like the primitive | 26 | /// A type constructor or type name: this might be something like the primitive |
diff --git a/crates/ra_hir/src/ty/infer.rs b/crates/ra_hir/src/ty/infer.rs index 1e7d97f51..a48272981 100644 --- a/crates/ra_hir/src/ty/infer.rs +++ b/crates/ra_hir/src/ty/infer.rs | |||
@@ -23,6 +23,7 @@ use ena::unify::{InPlaceUnificationTable, UnifyKey, UnifyValue, NoError}; | |||
23 | use rustc_hash::FxHashMap; | 23 | use rustc_hash::FxHashMap; |
24 | 24 | ||
25 | use ra_arena::map::ArenaMap; | 25 | use ra_arena::map::ArenaMap; |
26 | use ra_prof::profile; | ||
26 | use test_utils::tested_by; | 27 | use test_utils::tested_by; |
27 | 28 | ||
28 | use crate::{ | 29 | use crate::{ |
@@ -51,7 +52,8 @@ use super::{ | |||
51 | mod unify; | 52 | mod unify; |
52 | 53 | ||
53 | /// The entry point of type inference. | 54 | /// The entry point of type inference. |
54 | pub fn infer(db: &impl HirDatabase, def: DefWithBody) -> Arc<InferenceResult> { | 55 | pub fn infer_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<InferenceResult> { |
56 | let _p = profile("infer_query"); | ||
55 | db.check_canceled(); | 57 | db.check_canceled(); |
56 | let body = def.body(db); | 58 | let body = def.body(db); |
57 | let resolver = def.resolver(db); | 59 | let resolver = def.resolver(db); |
diff --git a/crates/ra_hir/src/ty/infer/unify.rs b/crates/ra_hir/src/ty/infer/unify.rs index 8ca7e957d..bc9719725 100644 --- a/crates/ra_hir/src/ty/infer/unify.rs +++ b/crates/ra_hir/src/ty/infer/unify.rs | |||
@@ -56,7 +56,12 @@ where | |||
56 | self.var_stack.pop(); | 56 | self.var_stack.pop(); |
57 | result | 57 | result |
58 | } else { | 58 | } else { |
59 | let free_var = InferTy::TypeVar(self.ctx.var_unification_table.find(inner)); | 59 | let root = self.ctx.var_unification_table.find(inner); |
60 | let free_var = match tv { | ||
61 | InferTy::TypeVar(_) => InferTy::TypeVar(root), | ||
62 | InferTy::IntVar(_) => InferTy::IntVar(root), | ||
63 | InferTy::FloatVar(_) => InferTy::FloatVar(root), | ||
64 | }; | ||
60 | let position = self.add(free_var); | 65 | let position = self.add(free_var); |
61 | Ty::Bound(position as u32) | 66 | Ty::Bound(position as u32) |
62 | } | 67 | } |
diff --git a/crates/ra_hir/src/ty/lower.rs b/crates/ra_hir/src/ty/lower.rs index 09d26ce5a..a1a2d0f6b 100644 --- a/crates/ra_hir/src/ty/lower.rs +++ b/crates/ra_hir/src/ty/lower.rs | |||
@@ -9,17 +9,18 @@ use std::sync::Arc; | |||
9 | use std::iter; | 9 | use std::iter; |
10 | 10 | ||
11 | use crate::{ | 11 | use crate::{ |
12 | Function, Struct, StructField, Enum, EnumVariant, Path, | 12 | Function, Struct, StructField, Enum, EnumVariant, Path, ModuleDef, TypeAlias, Const, Static, |
13 | ModuleDef, TypeAlias, | ||
14 | Const, Static, | ||
15 | HirDatabase, | 13 | HirDatabase, |
16 | type_ref::TypeRef, | 14 | type_ref::TypeRef, |
17 | name::KnownName, | 15 | name::KnownName, |
18 | nameres::Namespace, | 16 | nameres::Namespace, |
19 | resolve::{Resolver, Resolution}, | 17 | resolve::{Resolver, Resolution}, |
20 | path::{PathSegment, GenericArg}, | 18 | path::{PathSegment, GenericArg}, |
21 | generics::{GenericParams, HasGenericParams}, | 19 | generics::{HasGenericParams}, |
22 | adt::VariantDef, Trait, generics::{ WherePredicate, GenericDef} | 20 | adt::VariantDef, |
21 | Trait, | ||
22 | generics::{WherePredicate, GenericDef}, | ||
23 | ty::AdtDef, | ||
23 | }; | 24 | }; |
24 | use super::{Ty, primitive, FnSig, Substs, TypeCtor, TraitRef, GenericPredicate}; | 25 | use super::{Ty, primitive, FnSig, Substs, TypeCtor, TraitRef, GenericPredicate}; |
25 | 26 | ||
@@ -120,15 +121,15 @@ impl Ty { | |||
120 | segment: &PathSegment, | 121 | segment: &PathSegment, |
121 | resolved: TypableDef, | 122 | resolved: TypableDef, |
122 | ) -> Substs { | 123 | ) -> Substs { |
123 | let def_generics = match resolved { | 124 | let def_generic: Option<GenericDef> = match resolved { |
124 | TypableDef::Function(func) => func.generic_params(db), | 125 | TypableDef::Function(func) => Some(func.into()), |
125 | TypableDef::Struct(s) => s.generic_params(db), | 126 | TypableDef::Struct(s) => Some(s.into()), |
126 | TypableDef::Enum(e) => e.generic_params(db), | 127 | TypableDef::Enum(e) => Some(e.into()), |
127 | TypableDef::EnumVariant(var) => var.parent_enum(db).generic_params(db), | 128 | TypableDef::EnumVariant(var) => Some(var.parent_enum(db).into()), |
128 | TypableDef::TypeAlias(t) => t.generic_params(db), | 129 | TypableDef::TypeAlias(t) => Some(t.into()), |
129 | TypableDef::Const(_) | TypableDef::Static(_) => GenericParams::default().into(), | 130 | TypableDef::Const(_) | TypableDef::Static(_) => None, |
130 | }; | 131 | }; |
131 | substs_from_path_segment(db, resolver, segment, &def_generics, false) | 132 | substs_from_path_segment(db, resolver, segment, def_generic, false) |
132 | } | 133 | } |
133 | 134 | ||
134 | /// Collect generic arguments from a path into a `Substs`. See also | 135 | /// Collect generic arguments from a path into a `Substs`. See also |
@@ -172,10 +173,12 @@ pub(super) fn substs_from_path_segment( | |||
172 | db: &impl HirDatabase, | 173 | db: &impl HirDatabase, |
173 | resolver: &Resolver, | 174 | resolver: &Resolver, |
174 | segment: &PathSegment, | 175 | segment: &PathSegment, |
175 | def_generics: &GenericParams, | 176 | def_generic: Option<GenericDef>, |
176 | add_self_param: bool, | 177 | add_self_param: bool, |
177 | ) -> Substs { | 178 | ) -> Substs { |
178 | let mut substs = Vec::new(); | 179 | let mut substs = Vec::new(); |
180 | let def_generics = def_generic.map(|def| def.generic_params(db)).unwrap_or_default(); | ||
181 | |||
179 | let parent_param_count = def_generics.count_parent_params(); | 182 | let parent_param_count = def_generics.count_parent_params(); |
180 | substs.extend(iter::repeat(Ty::Unknown).take(parent_param_count)); | 183 | substs.extend(iter::repeat(Ty::Unknown).take(parent_param_count)); |
181 | if add_self_param { | 184 | if add_self_param { |
@@ -199,12 +202,24 @@ pub(super) fn substs_from_path_segment( | |||
199 | } | 202 | } |
200 | } | 203 | } |
201 | // add placeholders for args that were not provided | 204 | // add placeholders for args that were not provided |
202 | // FIXME: handle defaults | ||
203 | let supplied_params = substs.len(); | 205 | let supplied_params = substs.len(); |
204 | for _ in supplied_params..def_generics.count_params_including_parent() { | 206 | for _ in supplied_params..def_generics.count_params_including_parent() { |
205 | substs.push(Ty::Unknown); | 207 | substs.push(Ty::Unknown); |
206 | } | 208 | } |
207 | assert_eq!(substs.len(), def_generics.count_params_including_parent()); | 209 | assert_eq!(substs.len(), def_generics.count_params_including_parent()); |
210 | |||
211 | // handle defaults | ||
212 | if let Some(def_generic) = def_generic { | ||
213 | let default_substs = db.generic_defaults(def_generic); | ||
214 | assert_eq!(substs.len(), default_substs.len()); | ||
215 | |||
216 | for (i, default_ty) in default_substs.iter().enumerate() { | ||
217 | if substs[i] == Ty::Unknown { | ||
218 | substs[i] = default_ty.clone(); | ||
219 | } | ||
220 | } | ||
221 | } | ||
222 | |||
208 | Substs(substs.into()) | 223 | Substs(substs.into()) |
209 | } | 224 | } |
210 | 225 | ||
@@ -249,7 +264,7 @@ impl TraitRef { | |||
249 | resolved: Trait, | 264 | resolved: Trait, |
250 | ) -> Substs { | 265 | ) -> Substs { |
251 | let segment = path.segments.last().expect("path should have at least one segment"); | 266 | let segment = path.segments.last().expect("path should have at least one segment"); |
252 | substs_from_path_segment(db, resolver, segment, &resolved.generic_params(db), true) | 267 | substs_from_path_segment(db, resolver, segment, Some(resolved.into()), true) |
253 | } | 268 | } |
254 | 269 | ||
255 | pub(crate) fn for_trait(db: &impl HirDatabase, trait_: Trait) -> TraitRef { | 270 | pub(crate) fn for_trait(db: &impl HirDatabase, trait_: Trait) -> TraitRef { |
@@ -274,9 +289,9 @@ impl TraitRef { | |||
274 | pub(crate) fn type_for_def(db: &impl HirDatabase, def: TypableDef, ns: Namespace) -> Ty { | 289 | pub(crate) fn type_for_def(db: &impl HirDatabase, def: TypableDef, ns: Namespace) -> Ty { |
275 | match (def, ns) { | 290 | match (def, ns) { |
276 | (TypableDef::Function(f), Namespace::Values) => type_for_fn(db, f), | 291 | (TypableDef::Function(f), Namespace::Values) => type_for_fn(db, f), |
277 | (TypableDef::Struct(s), Namespace::Types) => type_for_struct(db, s), | 292 | (TypableDef::Struct(s), Namespace::Types) => type_for_adt(db, s), |
278 | (TypableDef::Struct(s), Namespace::Values) => type_for_struct_constructor(db, s), | 293 | (TypableDef::Struct(s), Namespace::Values) => type_for_struct_constructor(db, s), |
279 | (TypableDef::Enum(e), Namespace::Types) => type_for_enum(db, e), | 294 | (TypableDef::Enum(e), Namespace::Types) => type_for_adt(db, e), |
280 | (TypableDef::EnumVariant(v), Namespace::Values) => type_for_enum_variant_constructor(db, v), | 295 | (TypableDef::EnumVariant(v), Namespace::Values) => type_for_enum_variant_constructor(db, v), |
281 | (TypableDef::TypeAlias(t), Namespace::Types) => type_for_type_alias(db, t), | 296 | (TypableDef::TypeAlias(t), Namespace::Types) => type_for_type_alias(db, t), |
282 | (TypableDef::Const(c), Namespace::Values) => type_for_const(db, c), | 297 | (TypableDef::Const(c), Namespace::Values) => type_for_const(db, c), |
@@ -331,6 +346,22 @@ pub(crate) fn generic_predicates( | |||
331 | predicates.into() | 346 | predicates.into() |
332 | } | 347 | } |
333 | 348 | ||
349 | /// Resolve the default type params from generics | ||
350 | pub(crate) fn generic_defaults(db: &impl HirDatabase, def: GenericDef) -> Substs { | ||
351 | let resolver = def.resolver(db); | ||
352 | let generic_params = def.generic_params(db); | ||
353 | |||
354 | let defaults = generic_params | ||
355 | .params_including_parent() | ||
356 | .into_iter() | ||
357 | .map(|p| { | ||
358 | p.default.as_ref().map_or(Ty::Unknown, |path| Ty::from_hir_path(db, &resolver, path)) | ||
359 | }) | ||
360 | .collect::<Vec<_>>(); | ||
361 | |||
362 | Substs(defaults.into()) | ||
363 | } | ||
364 | |||
334 | fn fn_sig_for_fn(db: &impl HirDatabase, def: Function) -> FnSig { | 365 | fn fn_sig_for_fn(db: &impl HirDatabase, def: Function) -> FnSig { |
335 | let signature = def.signature(db); | 366 | let signature = def.signature(db); |
336 | let resolver = def.resolver(db); | 367 | let resolver = def.resolver(db); |
@@ -375,7 +406,7 @@ fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> FnSig { | |||
375 | .iter() | 406 | .iter() |
376 | .map(|(_, field)| Ty::from_hir(db, &resolver, &field.type_ref)) | 407 | .map(|(_, field)| Ty::from_hir(db, &resolver, &field.type_ref)) |
377 | .collect::<Vec<_>>(); | 408 | .collect::<Vec<_>>(); |
378 | let ret = type_for_struct(db, def); | 409 | let ret = type_for_adt(db, def); |
379 | FnSig::from_params_and_return(params, ret) | 410 | FnSig::from_params_and_return(params, ret) |
380 | } | 411 | } |
381 | 412 | ||
@@ -383,7 +414,7 @@ fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> FnSig { | |||
383 | fn type_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> Ty { | 414 | fn type_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> Ty { |
384 | let var_data = def.variant_data(db); | 415 | let var_data = def.variant_data(db); |
385 | if var_data.fields().is_none() { | 416 | if var_data.fields().is_none() { |
386 | return type_for_struct(db, def); // Unit struct | 417 | return type_for_adt(db, def); // Unit struct |
387 | } | 418 | } |
388 | let generics = def.generic_params(db); | 419 | let generics = def.generic_params(db); |
389 | let substs = Substs::identity(&generics); | 420 | let substs = Substs::identity(&generics); |
@@ -403,7 +434,7 @@ fn fn_sig_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariant) | |||
403 | .collect::<Vec<_>>(); | 434 | .collect::<Vec<_>>(); |
404 | let generics = def.parent_enum(db).generic_params(db); | 435 | let generics = def.parent_enum(db).generic_params(db); |
405 | let substs = Substs::identity(&generics); | 436 | let substs = Substs::identity(&generics); |
406 | let ret = type_for_enum(db, def.parent_enum(db)).subst(&substs); | 437 | let ret = type_for_adt(db, def.parent_enum(db)).subst(&substs); |
407 | FnSig::from_params_and_return(params, ret) | 438 | FnSig::from_params_and_return(params, ret) |
408 | } | 439 | } |
409 | 440 | ||
@@ -411,21 +442,16 @@ fn fn_sig_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariant) | |||
411 | fn type_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariant) -> Ty { | 442 | fn type_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariant) -> Ty { |
412 | let var_data = def.variant_data(db); | 443 | let var_data = def.variant_data(db); |
413 | if var_data.fields().is_none() { | 444 | if var_data.fields().is_none() { |
414 | return type_for_enum(db, def.parent_enum(db)); // Unit variant | 445 | return type_for_adt(db, def.parent_enum(db)); // Unit variant |
415 | } | 446 | } |
416 | let generics = def.parent_enum(db).generic_params(db); | 447 | let generics = def.parent_enum(db).generic_params(db); |
417 | let substs = Substs::identity(&generics); | 448 | let substs = Substs::identity(&generics); |
418 | Ty::apply(TypeCtor::FnDef(def.into()), substs) | 449 | Ty::apply(TypeCtor::FnDef(def.into()), substs) |
419 | } | 450 | } |
420 | 451 | ||
421 | fn type_for_struct(db: &impl HirDatabase, s: Struct) -> Ty { | 452 | fn type_for_adt(db: &impl HirDatabase, adt: impl Into<AdtDef> + HasGenericParams) -> Ty { |
422 | let generics = s.generic_params(db); | 453 | let generics = adt.generic_params(db); |
423 | Ty::apply(TypeCtor::Adt(s.into()), Substs::identity(&generics)) | 454 | Ty::apply(TypeCtor::Adt(adt.into()), Substs::identity(&generics)) |
424 | } | ||
425 | |||
426 | fn type_for_enum(db: &impl HirDatabase, s: Enum) -> Ty { | ||
427 | let generics = s.generic_params(db); | ||
428 | Ty::apply(TypeCtor::Adt(s.into()), Substs::identity(&generics)) | ||
429 | } | 455 | } |
430 | 456 | ||
431 | fn type_for_type_alias(db: &impl HirDatabase, t: TypeAlias) -> Ty { | 457 | fn type_for_type_alias(db: &impl HirDatabase, t: TypeAlias) -> Ty { |
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index f8364203d..cd24faba5 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs | |||
@@ -1449,6 +1449,35 @@ fn test() { | |||
1449 | } | 1449 | } |
1450 | 1450 | ||
1451 | #[test] | 1451 | #[test] |
1452 | fn infer_associated_method_generics_with_default_param() { | ||
1453 | assert_snapshot_matches!( | ||
1454 | infer(r#" | ||
1455 | struct Gen<T=u32> { | ||
1456 | val: T | ||
1457 | } | ||
1458 | |||
1459 | impl<T> Gen<T> { | ||
1460 | pub fn make() -> Gen<T> { | ||
1461 | loop { } | ||
1462 | } | ||
1463 | } | ||
1464 | |||
1465 | fn test() { | ||
1466 | let a = Gen::make(); | ||
1467 | } | ||
1468 | "#), | ||
1469 | @r###" | ||
1470 | [80; 104) '{ ... }': ! | ||
1471 | [90; 98) 'loop { }': ! | ||
1472 | [95; 98) '{ }': () | ||
1473 | [118; 146) '{ ...e(); }': () | ||
1474 | [128; 129) 'a': Gen<u32> | ||
1475 | [132; 141) 'Gen::make': fn make<u32>() -> Gen<T> | ||
1476 | [132; 143) 'Gen::make()': Gen<u32>"### | ||
1477 | ); | ||
1478 | } | ||
1479 | |||
1480 | #[test] | ||
1452 | fn infer_associated_method_generics_without_args() { | 1481 | fn infer_associated_method_generics_without_args() { |
1453 | assert_snapshot_matches!( | 1482 | assert_snapshot_matches!( |
1454 | infer(r#" | 1483 | infer(r#" |
diff --git a/crates/ra_hir/src/ty/traits.rs b/crates/ra_hir/src/ty/traits.rs index 7de04c044..e6c78c0d4 100644 --- a/crates/ra_hir/src/ty/traits.rs +++ b/crates/ra_hir/src/ty/traits.rs | |||
@@ -4,6 +4,7 @@ use std::sync::{Arc, Mutex}; | |||
4 | use rustc_hash::FxHashSet; | 4 | use rustc_hash::FxHashSet; |
5 | use log::debug; | 5 | use log::debug; |
6 | use chalk_ir::cast::Cast; | 6 | use chalk_ir::cast::Cast; |
7 | use ra_prof::profile; | ||
7 | 8 | ||
8 | use crate::{Crate, Trait, db::HirDatabase, ImplBlock}; | 9 | use crate::{Crate, Trait, db::HirDatabase, ImplBlock}; |
9 | use super::{TraitRef, Ty, Canonical}; | 10 | use super::{TraitRef, Ty, Canonical}; |
@@ -25,7 +26,7 @@ struct ChalkContext<'a, DB> { | |||
25 | krate: Crate, | 26 | krate: Crate, |
26 | } | 27 | } |
27 | 28 | ||
28 | pub(crate) fn solver(_db: &impl HirDatabase, _krate: Crate) -> Arc<Mutex<Solver>> { | 29 | pub(crate) fn solver_query(_db: &impl HirDatabase, _krate: Crate) -> Arc<Mutex<Solver>> { |
29 | // krate parameter is just so we cache a unique solver per crate | 30 | // krate parameter is just so we cache a unique solver per crate |
30 | let solver_choice = chalk_solve::SolverChoice::SLG { max_size: CHALK_SOLVER_MAX_SIZE }; | 31 | let solver_choice = chalk_solve::SolverChoice::SLG { max_size: CHALK_SOLVER_MAX_SIZE }; |
31 | debug!("Creating new solver for crate {:?}", _krate); | 32 | debug!("Creating new solver for crate {:?}", _krate); |
@@ -33,7 +34,7 @@ pub(crate) fn solver(_db: &impl HirDatabase, _krate: Crate) -> Arc<Mutex<Solver> | |||
33 | } | 34 | } |
34 | 35 | ||
35 | /// Collects impls for the given trait in the whole dependency tree of `krate`. | 36 | /// Collects impls for the given trait in the whole dependency tree of `krate`. |
36 | pub(crate) fn impls_for_trait( | 37 | pub(crate) fn impls_for_trait_query( |
37 | db: &impl HirDatabase, | 38 | db: &impl HirDatabase, |
38 | krate: Crate, | 39 | krate: Crate, |
39 | trait_: Trait, | 40 | trait_: Trait, |
@@ -60,7 +61,7 @@ fn solve( | |||
60 | let context = ChalkContext { db, krate }; | 61 | let context = ChalkContext { db, krate }; |
61 | let solver = db.solver(krate); | 62 | let solver = db.solver(krate); |
62 | debug!("solve goal: {:?}", goal); | 63 | debug!("solve goal: {:?}", goal); |
63 | let solution = solver.lock().unwrap().solve(&context, goal); | 64 | let solution = solver.lock().unwrap().solve_with_fuel(&context, goal, Some(1000)); |
64 | debug!("solve({:?}) => {:?}", goal, solution); | 65 | debug!("solve({:?}) => {:?}", goal, solution); |
65 | solution | 66 | solution |
66 | } | 67 | } |
@@ -76,11 +77,12 @@ pub enum Obligation { | |||
76 | } | 77 | } |
77 | 78 | ||
78 | /// Check using Chalk whether trait is implemented for given parameters including `Self` type. | 79 | /// Check using Chalk whether trait is implemented for given parameters including `Self` type. |
79 | pub(crate) fn implements( | 80 | pub(crate) fn implements_query( |
80 | db: &impl HirDatabase, | 81 | db: &impl HirDatabase, |
81 | krate: Crate, | 82 | krate: Crate, |
82 | trait_ref: Canonical<TraitRef>, | 83 | trait_ref: Canonical<TraitRef>, |
83 | ) -> Option<Solution> { | 84 | ) -> Option<Solution> { |
85 | let _p = profile("implements_query"); | ||
84 | let goal: chalk_ir::Goal = trait_ref.value.to_chalk(db).cast(); | 86 | let goal: chalk_ir::Goal = trait_ref.value.to_chalk(db).cast(); |
85 | debug!("goal: {:?}", goal); | 87 | debug!("goal: {:?}", goal); |
86 | let env = chalk_ir::Environment::new(); | 88 | let env = chalk_ir::Environment::new(); |
diff --git a/crates/ra_ide_api/Cargo.toml b/crates/ra_ide_api/Cargo.toml index 333706c1a..d399d5e2e 100644 --- a/crates/ra_ide_api/Cargo.toml +++ b/crates/ra_ide_api/Cargo.toml | |||
@@ -29,7 +29,7 @@ test_utils = { path = "../test_utils" } | |||
29 | ra_assists = { path = "../ra_assists" } | 29 | ra_assists = { path = "../ra_assists" } |
30 | 30 | ||
31 | [dev-dependencies] | 31 | [dev-dependencies] |
32 | insta = "0.7.0" | 32 | insta = "0.8.1" |
33 | 33 | ||
34 | [dev-dependencies.proptest] | 34 | [dev-dependencies.proptest] |
35 | version = "0.9.0" | 35 | version = "0.9.0" |
diff --git a/crates/ra_ide_api/src/diagnostics.rs b/crates/ra_ide_api/src/diagnostics.rs index e23d178b0..9a0eb2c14 100644 --- a/crates/ra_ide_api/src/diagnostics.rs +++ b/crates/ra_ide_api/src/diagnostics.rs | |||
@@ -4,7 +4,7 @@ use itertools::Itertools; | |||
4 | use hir::{source_binder, diagnostics::{Diagnostic as _, DiagnosticSink}}; | 4 | use hir::{source_binder, diagnostics::{Diagnostic as _, DiagnosticSink}}; |
5 | use ra_db::SourceDatabase; | 5 | use ra_db::SourceDatabase; |
6 | use ra_syntax::{ | 6 | use ra_syntax::{ |
7 | Location, SourceFile, SyntaxKind, TextRange, SyntaxNode, | 7 | T, Location, SourceFile, TextRange, SyntaxNode, |
8 | ast::{self, AstNode, NamedFieldList, NamedField}, | 8 | ast::{self, AstNode, NamedFieldList, NamedField}, |
9 | }; | 9 | }; |
10 | use ra_assists::ast_editor::{AstEditor, AstBuilder}; | 10 | use ra_assists::ast_editor::{AstEditor, AstBuilder}; |
@@ -130,9 +130,7 @@ fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement( | |||
130 | single_use_tree: &ast::UseTree, | 130 | single_use_tree: &ast::UseTree, |
131 | ) -> Option<TextEdit> { | 131 | ) -> Option<TextEdit> { |
132 | let use_tree_list_node = single_use_tree.syntax().parent()?; | 132 | let use_tree_list_node = single_use_tree.syntax().parent()?; |
133 | if single_use_tree.path()?.segment()?.syntax().first_child_or_token()?.kind() | 133 | if single_use_tree.path()?.segment()?.syntax().first_child_or_token()?.kind() == T![self] { |
134 | == SyntaxKind::SELF_KW | ||
135 | { | ||
136 | let start = use_tree_list_node.prev_sibling_or_token()?.range().start(); | 134 | let start = use_tree_list_node.prev_sibling_or_token()?.range().start(); |
137 | let end = use_tree_list_node.range().end(); | 135 | let end = use_tree_list_node.range().end(); |
138 | let range = TextRange::from_to(start, end); | 136 | let range = TextRange::from_to(start, end); |
diff --git a/crates/ra_ide_api/src/extend_selection.rs b/crates/ra_ide_api/src/extend_selection.rs index 163fa8c3c..4553faad0 100644 --- a/crates/ra_ide_api/src/extend_selection.rs +++ b/crates/ra_ide_api/src/extend_selection.rs | |||
@@ -157,7 +157,7 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> { | |||
157 | }) | 157 | }) |
158 | .next() | 158 | .next() |
159 | .and_then(|it| it.as_token()) | 159 | .and_then(|it| it.as_token()) |
160 | .filter(|node| node.kind() == COMMA) | 160 | .filter(|node| node.kind() == T![,]) |
161 | } | 161 | } |
162 | 162 | ||
163 | if let Some(comma_node) = nearby_comma(node, Direction::Prev) { | 163 | if let Some(comma_node) = nearby_comma(node, Direction::Prev) { |
diff --git a/crates/ra_ide_api/src/join_lines.rs b/crates/ra_ide_api/src/join_lines.rs index 598717311..4ca005466 100644 --- a/crates/ra_ide_api/src/join_lines.rs +++ b/crates/ra_ide_api/src/join_lines.rs | |||
@@ -1,7 +1,8 @@ | |||
1 | use itertools::Itertools; | 1 | use itertools::Itertools; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | T, | ||
3 | SourceFile, TextRange, TextUnit, SyntaxNode, SyntaxElement, SyntaxToken, | 4 | SourceFile, TextRange, TextUnit, SyntaxNode, SyntaxElement, SyntaxToken, |
4 | SyntaxKind::{self, WHITESPACE, COMMA, R_CURLY, R_PAREN, R_BRACK}, | 5 | SyntaxKind::{self, WHITESPACE}, |
5 | algo::{find_covering_element, non_trivia_sibling}, | 6 | algo::{find_covering_element, non_trivia_sibling}, |
6 | ast::{self, AstNode, AstToken}, | 7 | ast::{self, AstNode, AstToken}, |
7 | Direction, | 8 | Direction, |
@@ -89,7 +90,7 @@ fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUn | |||
89 | if is_trailing_comma(prev.kind(), next.kind()) { | 90 | if is_trailing_comma(prev.kind(), next.kind()) { |
90 | // Removes: trailing comma, newline (incl. surrounding whitespace) | 91 | // Removes: trailing comma, newline (incl. surrounding whitespace) |
91 | edit.delete(TextRange::from_to(prev.range().start(), token.range().end())); | 92 | edit.delete(TextRange::from_to(prev.range().start(), token.range().end())); |
92 | } else if prev.kind() == COMMA && next.kind() == R_CURLY { | 93 | } else if prev.kind() == T![,] && next.kind() == T!['}'] { |
93 | // Removes: comma, newline (incl. surrounding whitespace) | 94 | // Removes: comma, newline (incl. surrounding whitespace) |
94 | let space = if let Some(left) = prev.prev_sibling_or_token() { | 95 | let space = if let Some(left) = prev.prev_sibling_or_token() { |
95 | compute_ws(left.kind(), next.kind()) | 96 | compute_ws(left.kind(), next.kind()) |
@@ -116,7 +117,7 @@ fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUn | |||
116 | 117 | ||
117 | fn has_comma_after(node: &SyntaxNode) -> bool { | 118 | fn has_comma_after(node: &SyntaxNode) -> bool { |
118 | match non_trivia_sibling(node.into(), Direction::Next) { | 119 | match non_trivia_sibling(node.into(), Direction::Next) { |
119 | Some(n) => n.kind() == COMMA, | 120 | Some(n) => n.kind() == T![,], |
120 | _ => false, | 121 | _ => false, |
121 | } | 122 | } |
122 | } | 123 | } |
@@ -150,7 +151,7 @@ fn join_single_use_tree(edit: &mut TextEditBuilder, token: SyntaxToken) -> Optio | |||
150 | 151 | ||
151 | fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool { | 152 | fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool { |
152 | match (left, right) { | 153 | match (left, right) { |
153 | (COMMA, R_PAREN) | (COMMA, R_BRACK) => true, | 154 | (T![,], T![')']) | (T![,], T![']']) => true, |
154 | _ => false, | 155 | _ => false, |
155 | } | 156 | } |
156 | } | 157 | } |
diff --git a/crates/ra_ide_api/src/matching_brace.rs b/crates/ra_ide_api/src/matching_brace.rs index bebd16a69..eaa4b620c 100644 --- a/crates/ra_ide_api/src/matching_brace.rs +++ b/crates/ra_ide_api/src/matching_brace.rs | |||
@@ -1,13 +1,14 @@ | |||
1 | use ra_syntax::{ | 1 | use ra_syntax::{ |
2 | SourceFile, TextUnit, | 2 | SourceFile, TextUnit, |
3 | algo::find_token_at_offset, | 3 | algo::find_token_at_offset, |
4 | SyntaxKind::{self, *}, | 4 | SyntaxKind::{self}, |
5 | ast::AstNode, | 5 | ast::AstNode, |
6 | T | ||
6 | }; | 7 | }; |
7 | 8 | ||
8 | pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> { | 9 | pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> { |
9 | const BRACES: &[SyntaxKind] = | 10 | const BRACES: &[SyntaxKind] = |
10 | &[L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE]; | 11 | &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>]]; |
11 | let (brace_node, brace_idx) = find_token_at_offset(file.syntax(), offset) | 12 | let (brace_node, brace_idx) = find_token_at_offset(file.syntax(), offset) |
12 | .filter_map(|node| { | 13 | .filter_map(|node| { |
13 | let idx = BRACES.iter().position(|&brace| brace == node.kind())?; | 14 | let idx = BRACES.iter().position(|&brace| brace == node.kind())?; |
diff --git a/crates/ra_ide_api/src/syntax_highlighting.rs b/crates/ra_ide_api/src/syntax_highlighting.rs index d9a28d2b5..2158291dc 100644 --- a/crates/ra_ide_api/src/syntax_highlighting.rs +++ b/crates/ra_ide_api/src/syntax_highlighting.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | use rustc_hash::FxHashSet; | 1 | use rustc_hash::FxHashSet; |
2 | 2 | ||
3 | use ra_syntax::{ast, AstNode, TextRange, Direction, SyntaxKind::*, SyntaxElement}; | 3 | use ra_syntax::{ast, AstNode, TextRange, Direction, SyntaxKind, SyntaxKind::*, SyntaxElement, T}; |
4 | use ra_db::SourceDatabase; | 4 | use ra_db::SourceDatabase; |
5 | 5 | ||
6 | use crate::{FileId, db::RootDatabase}; | 6 | use crate::{FileId, db::RootDatabase}; |
@@ -11,6 +11,21 @@ pub struct HighlightedRange { | |||
11 | pub tag: &'static str, | 11 | pub tag: &'static str, |
12 | } | 12 | } |
13 | 13 | ||
14 | fn is_control_keyword(kind: SyntaxKind) -> bool { | ||
15 | match kind { | ||
16 | T![for] | ||
17 | | T![loop] | ||
18 | | T![while] | ||
19 | | T![continue] | ||
20 | | T![break] | ||
21 | | T![if] | ||
22 | | T![else] | ||
23 | | T![match] | ||
24 | | T![return] => true, | ||
25 | _ => false, | ||
26 | } | ||
27 | } | ||
28 | |||
14 | pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> { | 29 | pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> { |
15 | let source_file = db.parse(file_id); | 30 | let source_file = db.parse(file_id); |
16 | 31 | ||
@@ -29,6 +44,8 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa | |||
29 | NAME => "function", | 44 | NAME => "function", |
30 | INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE => "literal", | 45 | INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE => "literal", |
31 | LIFETIME => "parameter", | 46 | LIFETIME => "parameter", |
47 | T![unsafe] => "keyword.unsafe", | ||
48 | k if is_control_keyword(k) => "keyword.control", | ||
32 | k if k.is_keyword() => "keyword", | 49 | k if k.is_keyword() => "keyword", |
33 | _ => { | 50 | _ => { |
34 | if let Some(macro_call) = node.as_node().and_then(ast::MacroCall::cast) { | 51 | if let Some(macro_call) = node.as_node().and_then(ast::MacroCall::cast) { |
@@ -40,7 +57,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa | |||
40 | let mut range_end = name_ref.syntax().range().end(); | 57 | let mut range_end = name_ref.syntax().range().end(); |
41 | for sibling in path.syntax().siblings_with_tokens(Direction::Next) { | 58 | for sibling in path.syntax().siblings_with_tokens(Direction::Next) { |
42 | match sibling.kind() { | 59 | match sibling.kind() { |
43 | EXCL | IDENT => range_end = sibling.range().end(), | 60 | T![!] | IDENT => range_end = sibling.range().end(), |
44 | _ => (), | 61 | _ => (), |
45 | } | 62 | } |
46 | } | 63 | } |
diff --git a/crates/ra_mbe/src/mbe_expander.rs b/crates/ra_mbe/src/mbe_expander.rs index 3a4dbb5f5..7fff8deff 100644 --- a/crates/ra_mbe/src/mbe_expander.rs +++ b/crates/ra_mbe/src/mbe_expander.rs | |||
@@ -281,7 +281,11 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings, | |||
281 | return Err(ExpandError::UnexpectedToken); | 281 | return Err(ExpandError::UnexpectedToken); |
282 | } | 282 | } |
283 | } | 283 | } |
284 | _ => return Err(ExpandError::UnexpectedToken), | 284 | crate::Leaf::Literal(literal) => { |
285 | if input.eat_literal().map(|i| &i.text) != Some(&literal.text) { | ||
286 | return Err(ExpandError::UnexpectedToken); | ||
287 | } | ||
288 | } | ||
285 | }, | 289 | }, |
286 | crate::TokenTree::Repeat(crate::Repeat { subtree, kind, separator }) => { | 290 | crate::TokenTree::Repeat(crate::Repeat { subtree, kind, separator }) => { |
287 | // Dirty hack to make macro-expansion terminate. | 291 | // Dirty hack to make macro-expansion terminate. |
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs index 3554dc110..c938acf64 100644 --- a/crates/ra_mbe/src/subtree_source.rs +++ b/crates/ra_mbe/src/subtree_source.rs | |||
@@ -1,5 +1,5 @@ | |||
1 | use ra_parser::{TokenSource}; | 1 | use ra_parser::{TokenSource}; |
2 | use ra_syntax::{classify_literal, SmolStr, SyntaxKind, SyntaxKind::*}; | 2 | use ra_syntax::{classify_literal, SmolStr, SyntaxKind, SyntaxKind::*, T}; |
3 | use std::cell::{RefCell}; | 3 | use std::cell::{RefCell}; |
4 | 4 | ||
5 | // A Sequece of Token, | 5 | // A Sequece of Token, |
@@ -284,9 +284,9 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> { | |||
284 | 284 | ||
285 | fn convert_delim(d: tt::Delimiter, closing: bool) -> TtToken { | 285 | fn convert_delim(d: tt::Delimiter, closing: bool) -> TtToken { |
286 | let (kinds, texts) = match d { | 286 | let (kinds, texts) = match d { |
287 | tt::Delimiter::Parenthesis => ([L_PAREN, R_PAREN], "()"), | 287 | tt::Delimiter::Parenthesis => ([T!['('], T![')']], "()"), |
288 | tt::Delimiter::Brace => ([L_CURLY, R_CURLY], "{}"), | 288 | tt::Delimiter::Brace => ([T!['{'], T!['}']], "{}"), |
289 | tt::Delimiter::Bracket => ([L_BRACK, R_BRACK], "[]"), | 289 | tt::Delimiter::Bracket => ([T!['['], T![']']], "[]"), |
290 | tt::Delimiter::None => ([L_DOLLAR, R_DOLLAR], ""), | 290 | tt::Delimiter::None => ([L_DOLLAR, R_DOLLAR], ""), |
291 | }; | 291 | }; |
292 | 292 | ||
@@ -299,8 +299,8 @@ fn convert_delim(d: tt::Delimiter, closing: bool) -> TtToken { | |||
299 | fn convert_literal(l: &tt::Literal) -> TtToken { | 299 | fn convert_literal(l: &tt::Literal) -> TtToken { |
300 | let kind = | 300 | let kind = |
301 | classify_literal(&l.text).map(|tkn| tkn.kind).unwrap_or_else(|| match l.text.as_ref() { | 301 | classify_literal(&l.text).map(|tkn| tkn.kind).unwrap_or_else(|| match l.text.as_ref() { |
302 | "true" => SyntaxKind::TRUE_KW, | 302 | "true" => T![true], |
303 | "false" => SyntaxKind::FALSE_KW, | 303 | "false" => T![false], |
304 | _ => panic!("Fail to convert given literal {:#?}", &l), | 304 | _ => panic!("Fail to convert given literal {:#?}", &l), |
305 | }); | 305 | }); |
306 | 306 | ||
@@ -320,11 +320,11 @@ fn convert_ident(ident: &tt::Ident) -> TtToken { | |||
320 | fn convert_punct(p: &tt::Punct) -> TtToken { | 320 | fn convert_punct(p: &tt::Punct) -> TtToken { |
321 | let kind = match p.char { | 321 | let kind = match p.char { |
322 | // lexer may produce compound tokens for these ones | 322 | // lexer may produce compound tokens for these ones |
323 | '.' => DOT, | 323 | '.' => T![.], |
324 | ':' => COLON, | 324 | ':' => T![:], |
325 | '=' => EQ, | 325 | '=' => T![=], |
326 | '!' => EXCL, | 326 | '!' => T![!], |
327 | '-' => MINUS, | 327 | '-' => T![-], |
328 | c => SyntaxKind::from_char(c).unwrap(), | 328 | c => SyntaxKind::from_char(c).unwrap(), |
329 | }; | 329 | }; |
330 | let text = { | 330 | let text = { |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 5e6a6f2a1..d8e344557 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | use ra_parser::{TreeSink, ParseError}; | 1 | use ra_parser::{TreeSink, ParseError}; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, | 3 | AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, |
4 | ast, SyntaxKind::*, TextUnit | 4 | ast, SyntaxKind::*, TextUnit, T |
5 | }; | 5 | }; |
6 | 6 | ||
7 | use crate::subtree_source::{SubtreeTokenSource, Querier}; | 7 | use crate::subtree_source::{SubtreeTokenSource, Querier}; |
@@ -211,9 +211,9 @@ fn convert_tt( | |||
211 | let first_child = tt.first_child_or_token()?; | 211 | let first_child = tt.first_child_or_token()?; |
212 | let last_child = tt.last_child_or_token()?; | 212 | let last_child = tt.last_child_or_token()?; |
213 | let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) { | 213 | let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) { |
214 | (L_PAREN, R_PAREN) => (tt::Delimiter::Parenthesis, true), | 214 | (T!['('], T![')']) => (tt::Delimiter::Parenthesis, true), |
215 | (L_CURLY, R_CURLY) => (tt::Delimiter::Brace, true), | 215 | (T!['{'], T!['}']) => (tt::Delimiter::Brace, true), |
216 | (L_BRACK, R_BRACK) => (tt::Delimiter::Bracket, true), | 216 | (T!['['], T![']']) => (tt::Delimiter::Bracket, true), |
217 | _ => (tt::Delimiter::None, false), | 217 | _ => (tt::Delimiter::None, false), |
218 | }; | 218 | }; |
219 | 219 | ||
@@ -248,23 +248,22 @@ fn convert_tt( | |||
248 | 248 | ||
249 | token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into()); | 249 | token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into()); |
250 | } else { | 250 | } else { |
251 | let child: tt::TokenTree = if token.kind() == SyntaxKind::TRUE_KW | 251 | let child: tt::TokenTree = |
252 | || token.kind() == SyntaxKind::FALSE_KW | 252 | if token.kind() == T![true] || token.kind() == T![false] { |
253 | { | 253 | tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() |
254 | tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() | 254 | } else if token.kind().is_keyword() |
255 | } else if token.kind().is_keyword() | 255 | || token.kind() == IDENT |
256 | || token.kind() == IDENT | 256 | || token.kind() == LIFETIME |
257 | || token.kind() == LIFETIME | 257 | { |
258 | { | 258 | let relative_range = token.range() - global_offset; |
259 | let relative_range = token.range() - global_offset; | 259 | let id = token_map.alloc(relative_range); |
260 | let id = token_map.alloc(relative_range); | 260 | let text = token.text().clone(); |
261 | let text = token.text().clone(); | 261 | tt::Leaf::from(tt::Ident { text, id }).into() |
262 | tt::Leaf::from(tt::Ident { text, id }).into() | 262 | } else if token.kind().is_literal() { |
263 | } else if token.kind().is_literal() { | 263 | tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() |
264 | tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() | 264 | } else { |
265 | } else { | 265 | return None; |
266 | return None; | 266 | }; |
267 | }; | ||
268 | token_trees.push(child); | 267 | token_trees.push(child); |
269 | } | 268 | } |
270 | } | 269 | } |
@@ -305,10 +304,8 @@ impl<'a, Q: Querier> TtTreeSink<'a, Q> { | |||
305 | } | 304 | } |
306 | 305 | ||
307 | fn is_delimiter(kind: SyntaxKind) -> bool { | 306 | fn is_delimiter(kind: SyntaxKind) -> bool { |
308 | use SyntaxKind::*; | ||
309 | |||
310 | match kind { | 307 | match kind { |
311 | L_PAREN | L_BRACK | L_CURLY | R_PAREN | R_BRACK | R_CURLY => true, | 308 | T!['('] | T!['['] | T!['{'] | T![')'] | T![']'] | T!['}'] => true, |
312 | _ => false, | 309 | _ => false, |
313 | } | 310 | } |
314 | } | 311 | } |
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs index 004faf77e..e3a5ceecf 100644 --- a/crates/ra_mbe/src/tests.rs +++ b/crates/ra_mbe/src/tests.rs | |||
@@ -575,6 +575,20 @@ fn test_tt_to_stmts() { | |||
575 | ); | 575 | ); |
576 | } | 576 | } |
577 | 577 | ||
578 | #[test] | ||
579 | fn test_match_literal() { | ||
580 | let rules = create_rules( | ||
581 | r#" | ||
582 | macro_rules! foo { | ||
583 | ('(') => { | ||
584 | fn foo() {} | ||
585 | } | ||
586 | } | ||
587 | "#, | ||
588 | ); | ||
589 | assert_expansion(MacroKind::Items, &rules, "foo! ['(']", "fn foo () {}"); | ||
590 | } | ||
591 | |||
578 | // The following tests are port from intellij-rust directly | 592 | // The following tests are port from intellij-rust directly |
579 | // https://github.com/intellij-rust/intellij-rust/blob/c4e9feee4ad46e7953b1948c112533360b6087bb/src/test/kotlin/org/rust/lang/core/macros/RsMacroExpansionTest.kt | 593 | // https://github.com/intellij-rust/intellij-rust/blob/c4e9feee4ad46e7953b1948c112533360b6087bb/src/test/kotlin/org/rust/lang/core/macros/RsMacroExpansionTest.kt |
580 | 594 | ||
diff --git a/crates/ra_parser/src/event.rs b/crates/ra_parser/src/event.rs index 87cf4eca0..51beb0866 100644 --- a/crates/ra_parser/src/event.rs +++ b/crates/ra_parser/src/event.rs | |||
@@ -38,7 +38,7 @@ pub(crate) enum Event { | |||
38 | /// The events for it would look like this: | 38 | /// The events for it would look like this: |
39 | /// | 39 | /// |
40 | /// | 40 | /// |
41 | /// START(PATH) IDENT('foo') FINISH START(PATH) COLONCOLON IDENT('bar') FINISH | 41 | /// START(PATH) IDENT('foo') FINISH START(PATH) T![::] IDENT('bar') FINISH |
42 | /// | /\ | 42 | /// | /\ |
43 | /// | | | 43 | /// | | |
44 | /// +------forward-parent------+ | 44 | /// +------forward-parent------+ |
diff --git a/crates/ra_parser/src/grammar.rs b/crates/ra_parser/src/grammar.rs index a538ec081..cf603eba1 100644 --- a/crates/ra_parser/src/grammar.rs +++ b/crates/ra_parser/src/grammar.rs | |||
@@ -59,7 +59,7 @@ pub(crate) fn macro_stmts(p: &mut Parser) { | |||
59 | let m = p.start(); | 59 | let m = p.start(); |
60 | 60 | ||
61 | while !p.at(EOF) { | 61 | while !p.at(EOF) { |
62 | if p.current() == SEMI { | 62 | if p.current() == T![;] { |
63 | p.bump(); | 63 | p.bump(); |
64 | continue; | 64 | continue; |
65 | } | 65 | } |
@@ -103,7 +103,7 @@ pub(crate) fn block(p: &mut Parser) { | |||
103 | pub(crate) fn meta_item(p: &mut Parser) { | 103 | pub(crate) fn meta_item(p: &mut Parser) { |
104 | fn is_delimiter(p: &mut Parser) -> bool { | 104 | fn is_delimiter(p: &mut Parser) -> bool { |
105 | match p.current() { | 105 | match p.current() { |
106 | L_CURLY | L_PAREN | L_BRACK => true, | 106 | T!['{'] | T!['('] | T!['['] => true, |
107 | _ => false, | 107 | _ => false, |
108 | } | 108 | } |
109 | } | 109 | } |
@@ -123,12 +123,12 @@ pub(crate) fn meta_item(p: &mut Parser) { | |||
123 | // https://doc.rust-lang.org/reference/paths.html#simple-paths | 123 | // https://doc.rust-lang.org/reference/paths.html#simple-paths |
124 | // The start of an meta must be a simple path | 124 | // The start of an meta must be a simple path |
125 | match p.current() { | 125 | match p.current() { |
126 | IDENT | COLONCOLON | SUPER_KW | SELF_KW | CRATE_KW => p.bump(), | 126 | IDENT | T![::] | T![super] | T![self] | T![crate] => p.bump(), |
127 | EQ => { | 127 | T![=] => { |
128 | p.bump(); | 128 | p.bump(); |
129 | match p.current() { | 129 | match p.current() { |
130 | c if c.is_literal() => p.bump(), | 130 | c if c.is_literal() => p.bump(), |
131 | TRUE_KW | FALSE_KW => p.bump(), | 131 | T![true] | T![false] => p.bump(), |
132 | _ => {} | 132 | _ => {} |
133 | } | 133 | } |
134 | break; | 134 | break; |
@@ -158,7 +158,7 @@ pub(crate) fn reparser( | |||
158 | MATCH_ARM_LIST => items::match_arm_list, | 158 | MATCH_ARM_LIST => items::match_arm_list, |
159 | USE_TREE_LIST => items::use_tree_list, | 159 | USE_TREE_LIST => items::use_tree_list, |
160 | EXTERN_ITEM_LIST => items::extern_item_list, | 160 | EXTERN_ITEM_LIST => items::extern_item_list, |
161 | TOKEN_TREE if first_child? == L_CURLY => items::token_tree, | 161 | TOKEN_TREE if first_child? == T!['{'] => items::token_tree, |
162 | ITEM_LIST => match parent? { | 162 | ITEM_LIST => match parent? { |
163 | IMPL_BLOCK => items::impl_item_list, | 163 | IMPL_BLOCK => items::impl_item_list, |
164 | TRAIT_DEF => items::trait_item_list, | 164 | TRAIT_DEF => items::trait_item_list, |
@@ -184,26 +184,26 @@ impl BlockLike { | |||
184 | 184 | ||
185 | pub(crate) fn opt_visibility(p: &mut Parser) -> bool { | 185 | pub(crate) fn opt_visibility(p: &mut Parser) -> bool { |
186 | match p.current() { | 186 | match p.current() { |
187 | PUB_KW => { | 187 | T![pub] => { |
188 | let m = p.start(); | 188 | let m = p.start(); |
189 | p.bump(); | 189 | p.bump(); |
190 | if p.at(L_PAREN) { | 190 | if p.at(T!['(']) { |
191 | match p.nth(1) { | 191 | match p.nth(1) { |
192 | // test crate_visibility | 192 | // test crate_visibility |
193 | // pub(crate) struct S; | 193 | // pub(crate) struct S; |
194 | // pub(self) struct S; | 194 | // pub(self) struct S; |
195 | // pub(self) struct S; | 195 | // pub(self) struct S; |
196 | // pub(self) struct S; | 196 | // pub(self) struct S; |
197 | CRATE_KW | SELF_KW | SUPER_KW => { | 197 | T![crate] | T![self] | T![super] => { |
198 | p.bump(); | 198 | p.bump(); |
199 | p.bump(); | 199 | p.bump(); |
200 | p.expect(R_PAREN); | 200 | p.expect(T![')']); |
201 | } | 201 | } |
202 | IN_KW => { | 202 | T![in] => { |
203 | p.bump(); | 203 | p.bump(); |
204 | p.bump(); | 204 | p.bump(); |
205 | paths::use_path(p); | 205 | paths::use_path(p); |
206 | p.expect(R_PAREN); | 206 | p.expect(T![')']); |
207 | } | 207 | } |
208 | _ => (), | 208 | _ => (), |
209 | } | 209 | } |
@@ -217,7 +217,7 @@ pub(crate) fn opt_visibility(p: &mut Parser) -> bool { | |||
217 | // | 217 | // |
218 | // test crate_keyword_path | 218 | // test crate_keyword_path |
219 | // fn foo() { crate::foo(); } | 219 | // fn foo() { crate::foo(); } |
220 | CRATE_KW if p.nth(1) != COLONCOLON => { | 220 | T![crate] if p.nth(1) != T![::] => { |
221 | let m = p.start(); | 221 | let m = p.start(); |
222 | p.bump(); | 222 | p.bump(); |
223 | m.complete(p, VISIBILITY); | 223 | m.complete(p, VISIBILITY); |
@@ -228,10 +228,10 @@ pub(crate) fn opt_visibility(p: &mut Parser) -> bool { | |||
228 | } | 228 | } |
229 | 229 | ||
230 | fn opt_alias(p: &mut Parser) { | 230 | fn opt_alias(p: &mut Parser) { |
231 | if p.at(AS_KW) { | 231 | if p.at(T![as]) { |
232 | let m = p.start(); | 232 | let m = p.start(); |
233 | p.bump(); | 233 | p.bump(); |
234 | if !p.eat(UNDERSCORE) { | 234 | if !p.eat(T![_]) { |
235 | name(p); | 235 | name(p); |
236 | } | 236 | } |
237 | m.complete(p, ALIAS); | 237 | m.complete(p, ALIAS); |
@@ -239,7 +239,7 @@ fn opt_alias(p: &mut Parser) { | |||
239 | } | 239 | } |
240 | 240 | ||
241 | fn abi(p: &mut Parser) { | 241 | fn abi(p: &mut Parser) { |
242 | assert!(p.at(EXTERN_KW)); | 242 | assert!(p.at(T![extern])); |
243 | let abi = p.start(); | 243 | let abi = p.start(); |
244 | p.bump(); | 244 | p.bump(); |
245 | match p.current() { | 245 | match p.current() { |
@@ -250,7 +250,7 @@ fn abi(p: &mut Parser) { | |||
250 | } | 250 | } |
251 | 251 | ||
252 | fn opt_fn_ret_type(p: &mut Parser) -> bool { | 252 | fn opt_fn_ret_type(p: &mut Parser) -> bool { |
253 | if p.at(THIN_ARROW) { | 253 | if p.at(T![->]) { |
254 | let m = p.start(); | 254 | let m = p.start(); |
255 | p.bump(); | 255 | p.bump(); |
256 | types::type_(p); | 256 | types::type_(p); |
@@ -280,21 +280,21 @@ fn name_ref(p: &mut Parser) { | |||
280 | let m = p.start(); | 280 | let m = p.start(); |
281 | p.bump(); | 281 | p.bump(); |
282 | m.complete(p, NAME_REF); | 282 | m.complete(p, NAME_REF); |
283 | } else if p.at(SELF_KW) { | 283 | } else if p.at(T![self]) { |
284 | let m = p.start(); | 284 | let m = p.start(); |
285 | p.bump(); | 285 | p.bump(); |
286 | m.complete(p, SELF_KW); | 286 | m.complete(p, T![self]); |
287 | } else { | 287 | } else { |
288 | p.err_and_bump("expected identifier"); | 288 | p.err_and_bump("expected identifier"); |
289 | } | 289 | } |
290 | } | 290 | } |
291 | 291 | ||
292 | fn error_block(p: &mut Parser, message: &str) { | 292 | fn error_block(p: &mut Parser, message: &str) { |
293 | assert!(p.at(L_CURLY)); | 293 | assert!(p.at(T!['{'])); |
294 | let m = p.start(); | 294 | let m = p.start(); |
295 | p.error(message); | 295 | p.error(message); |
296 | p.bump(); | 296 | p.bump(); |
297 | expressions::expr_block_contents(p); | 297 | expressions::expr_block_contents(p); |
298 | p.eat(R_CURLY); | 298 | p.eat(T!['}']); |
299 | m.complete(p, ERROR); | 299 | m.complete(p, ERROR); |
300 | } | 300 | } |
diff --git a/crates/ra_parser/src/grammar/attributes.rs b/crates/ra_parser/src/grammar/attributes.rs index cd30e8a45..20d58445f 100644 --- a/crates/ra_parser/src/grammar/attributes.rs +++ b/crates/ra_parser/src/grammar/attributes.rs | |||
@@ -1,28 +1,28 @@ | |||
1 | use super::*; | 1 | use super::*; |
2 | 2 | ||
3 | pub(super) fn inner_attributes(p: &mut Parser) { | 3 | pub(super) fn inner_attributes(p: &mut Parser) { |
4 | while p.current() == POUND && p.nth(1) == EXCL { | 4 | while p.current() == T![#] && p.nth(1) == T![!] { |
5 | attribute(p, true) | 5 | attribute(p, true) |
6 | } | 6 | } |
7 | } | 7 | } |
8 | 8 | ||
9 | pub(super) fn outer_attributes(p: &mut Parser) { | 9 | pub(super) fn outer_attributes(p: &mut Parser) { |
10 | while p.at(POUND) { | 10 | while p.at(T![#]) { |
11 | attribute(p, false) | 11 | attribute(p, false) |
12 | } | 12 | } |
13 | } | 13 | } |
14 | 14 | ||
15 | fn attribute(p: &mut Parser, inner: bool) { | 15 | fn attribute(p: &mut Parser, inner: bool) { |
16 | let attr = p.start(); | 16 | let attr = p.start(); |
17 | assert!(p.at(POUND)); | 17 | assert!(p.at(T![#])); |
18 | p.bump(); | 18 | p.bump(); |
19 | 19 | ||
20 | if inner { | 20 | if inner { |
21 | assert!(p.at(EXCL)); | 21 | assert!(p.at(T![!])); |
22 | p.bump(); | 22 | p.bump(); |
23 | } | 23 | } |
24 | 24 | ||
25 | if p.at(L_BRACK) { | 25 | if p.at(T!['[']) { |
26 | items::token_tree(p); | 26 | items::token_tree(p); |
27 | } else { | 27 | } else { |
28 | p.error("expected `[`"); | 28 | p.error("expected `[`"); |
diff --git a/crates/ra_parser/src/grammar/expressions.rs b/crates/ra_parser/src/grammar/expressions.rs index 9fe529f53..bb6c78b5f 100644 --- a/crates/ra_parser/src/grammar/expressions.rs +++ b/crates/ra_parser/src/grammar/expressions.rs | |||
@@ -36,14 +36,14 @@ fn expr_no_struct(p: &mut Parser) { | |||
36 | // fn c() { 1; 2; } | 36 | // fn c() { 1; 2; } |
37 | // fn d() { 1; 2 } | 37 | // fn d() { 1; 2 } |
38 | pub(crate) fn block(p: &mut Parser) { | 38 | pub(crate) fn block(p: &mut Parser) { |
39 | if !p.at(L_CURLY) { | 39 | if !p.at(T!['{']) { |
40 | p.error("expected a block"); | 40 | p.error("expected a block"); |
41 | return; | 41 | return; |
42 | } | 42 | } |
43 | let m = p.start(); | 43 | let m = p.start(); |
44 | p.bump(); | 44 | p.bump(); |
45 | expr_block_contents(p); | 45 | expr_block_contents(p); |
46 | p.expect(R_CURLY); | 46 | p.expect(T!['}']); |
47 | m.complete(p, BLOCK); | 47 | m.complete(p, BLOCK); |
48 | } | 48 | } |
49 | 49 | ||
@@ -65,10 +65,10 @@ pub(super) fn stmt(p: &mut Parser, with_semi: StmtWithSemi) { | |||
65 | // #[C] #[D] {} | 65 | // #[C] #[D] {} |
66 | // #[D] return (); | 66 | // #[D] return (); |
67 | // } | 67 | // } |
68 | let has_attrs = p.at(POUND); | 68 | let has_attrs = p.at(T![#]); |
69 | attributes::outer_attributes(p); | 69 | attributes::outer_attributes(p); |
70 | 70 | ||
71 | if p.at(LET_KW) { | 71 | if p.at(T![let]) { |
72 | let_stmt(p, m, with_semi); | 72 | let_stmt(p, m, with_semi); |
73 | return; | 73 | return; |
74 | } | 74 | } |
@@ -90,7 +90,7 @@ pub(super) fn stmt(p: &mut Parser, with_semi: StmtWithSemi) { | |||
90 | p.error(format!("attributes are not allowed on {:?}", kind)); | 90 | p.error(format!("attributes are not allowed on {:?}", kind)); |
91 | } | 91 | } |
92 | 92 | ||
93 | if p.at(R_CURLY) { | 93 | if p.at(T!['}']) { |
94 | // test attr_on_last_expr_in_block | 94 | // test attr_on_last_expr_in_block |
95 | // fn foo() { | 95 | // fn foo() { |
96 | // { #[A] bar!()? } | 96 | // { #[A] bar!()? } |
@@ -121,15 +121,15 @@ pub(super) fn stmt(p: &mut Parser, with_semi: StmtWithSemi) { | |||
121 | match with_semi { | 121 | match with_semi { |
122 | StmtWithSemi::Yes => { | 122 | StmtWithSemi::Yes => { |
123 | if blocklike.is_block() { | 123 | if blocklike.is_block() { |
124 | p.eat(SEMI); | 124 | p.eat(T![;]); |
125 | } else { | 125 | } else { |
126 | p.expect(SEMI); | 126 | p.expect(T![;]); |
127 | } | 127 | } |
128 | } | 128 | } |
129 | StmtWithSemi::No => {} | 129 | StmtWithSemi::No => {} |
130 | StmtWithSemi::Optional => { | 130 | StmtWithSemi::Optional => { |
131 | if p.at(SEMI) { | 131 | if p.at(T![;]) { |
132 | p.eat(SEMI); | 132 | p.eat(T![;]); |
133 | } | 133 | } |
134 | } | 134 | } |
135 | } | 135 | } |
@@ -145,24 +145,24 @@ pub(super) fn stmt(p: &mut Parser, with_semi: StmtWithSemi) { | |||
145 | // let d: i32 = 92; | 145 | // let d: i32 = 92; |
146 | // } | 146 | // } |
147 | fn let_stmt(p: &mut Parser, m: Marker, with_semi: StmtWithSemi) { | 147 | fn let_stmt(p: &mut Parser, m: Marker, with_semi: StmtWithSemi) { |
148 | assert!(p.at(LET_KW)); | 148 | assert!(p.at(T![let])); |
149 | p.bump(); | 149 | p.bump(); |
150 | patterns::pattern(p); | 150 | patterns::pattern(p); |
151 | if p.at(COLON) { | 151 | if p.at(T![:]) { |
152 | types::ascription(p); | 152 | types::ascription(p); |
153 | } | 153 | } |
154 | if p.eat(EQ) { | 154 | if p.eat(T![=]) { |
155 | expressions::expr(p); | 155 | expressions::expr(p); |
156 | } | 156 | } |
157 | 157 | ||
158 | match with_semi { | 158 | match with_semi { |
159 | StmtWithSemi::Yes => { | 159 | StmtWithSemi::Yes => { |
160 | p.expect(SEMI); | 160 | p.expect(T![;]); |
161 | } | 161 | } |
162 | StmtWithSemi::No => {} | 162 | StmtWithSemi::No => {} |
163 | StmtWithSemi::Optional => { | 163 | StmtWithSemi::Optional => { |
164 | if p.at(SEMI) { | 164 | if p.at(T![;]) { |
165 | p.eat(SEMI); | 165 | p.eat(T![;]); |
166 | } | 166 | } |
167 | } | 167 | } |
168 | } | 168 | } |
@@ -174,12 +174,12 @@ pub(crate) fn expr_block_contents(p: &mut Parser) { | |||
174 | // This is checked by a validator | 174 | // This is checked by a validator |
175 | attributes::inner_attributes(p); | 175 | attributes::inner_attributes(p); |
176 | 176 | ||
177 | while !p.at(EOF) && !p.at(R_CURLY) { | 177 | while !p.at(EOF) && !p.at(T!['}']) { |
178 | // test nocontentexpr | 178 | // test nocontentexpr |
179 | // fn foo(){ | 179 | // fn foo(){ |
180 | // ;;;some_expr();;;;{;;;};;;;Ok(()) | 180 | // ;;;some_expr();;;;{;;;};;;;Ok(()) |
181 | // } | 181 | // } |
182 | if p.current() == SEMI { | 182 | if p.current() == T![;] { |
183 | p.bump(); | 183 | p.bump(); |
184 | continue; | 184 | continue; |
185 | } | 185 | } |
@@ -202,41 +202,41 @@ enum Op { | |||
202 | fn current_op(p: &Parser) -> (u8, Op) { | 202 | fn current_op(p: &Parser) -> (u8, Op) { |
203 | if let Some(t) = p.current3() { | 203 | if let Some(t) = p.current3() { |
204 | match t { | 204 | match t { |
205 | (L_ANGLE, L_ANGLE, EQ) => return (1, Op::Composite(SHLEQ, 3)), | 205 | (T![<], T![<], T![=]) => return (1, Op::Composite(T![<<=], 3)), |
206 | (R_ANGLE, R_ANGLE, EQ) => return (1, Op::Composite(SHREQ, 3)), | 206 | (T![>], T![>], T![=]) => return (1, Op::Composite(T![>>=], 3)), |
207 | _ => (), | 207 | _ => (), |
208 | } | 208 | } |
209 | } | 209 | } |
210 | 210 | ||
211 | if let Some(t) = p.current2() { | 211 | if let Some(t) = p.current2() { |
212 | match t { | 212 | match t { |
213 | (PLUS, EQ) => return (1, Op::Composite(PLUSEQ, 2)), | 213 | (T![+], T![=]) => return (1, Op::Composite(T![+=], 2)), |
214 | (MINUS, EQ) => return (1, Op::Composite(MINUSEQ, 2)), | 214 | (T![-], T![=]) => return (1, Op::Composite(T![-=], 2)), |
215 | (STAR, EQ) => return (1, Op::Composite(STAREQ, 2)), | 215 | (T![*], T![=]) => return (1, Op::Composite(T![*=], 2)), |
216 | (PERCENT, EQ) => return (1, Op::Composite(PERCENTEQ, 2)), | 216 | (T![%], T![=]) => return (1, Op::Composite(T![%=], 2)), |
217 | (SLASH, EQ) => return (1, Op::Composite(SLASHEQ, 2)), | 217 | (T![/], T![=]) => return (1, Op::Composite(T![/=], 2)), |
218 | (PIPE, EQ) => return (1, Op::Composite(PIPEEQ, 2)), | 218 | (T![|], T![=]) => return (1, Op::Composite(T![|=], 2)), |
219 | (AMP, EQ) => return (1, Op::Composite(AMPEQ, 2)), | 219 | (T![&], T![=]) => return (1, Op::Composite(T![&=], 2)), |
220 | (CARET, EQ) => return (1, Op::Composite(CARETEQ, 2)), | 220 | (T![^], T![=]) => return (1, Op::Composite(T![^=], 2)), |
221 | (PIPE, PIPE) => return (3, Op::Composite(PIPEPIPE, 2)), | 221 | (T![|], T![|]) => return (3, Op::Composite(T![||], 2)), |
222 | (AMP, AMP) => return (4, Op::Composite(AMPAMP, 2)), | 222 | (T![&], T![&]) => return (4, Op::Composite(T![&&], 2)), |
223 | (L_ANGLE, EQ) => return (5, Op::Composite(LTEQ, 2)), | 223 | (T![<], T![=]) => return (5, Op::Composite(T![<=], 2)), |
224 | (R_ANGLE, EQ) => return (5, Op::Composite(GTEQ, 2)), | 224 | (T![>], T![=]) => return (5, Op::Composite(T![>=], 2)), |
225 | (L_ANGLE, L_ANGLE) => return (9, Op::Composite(SHL, 2)), | 225 | (T![<], T![<]) => return (9, Op::Composite(T![<<], 2)), |
226 | (R_ANGLE, R_ANGLE) => return (9, Op::Composite(SHR, 2)), | 226 | (T![>], T![>]) => return (9, Op::Composite(T![>>], 2)), |
227 | _ => (), | 227 | _ => (), |
228 | } | 228 | } |
229 | } | 229 | } |
230 | 230 | ||
231 | let bp = match p.current() { | 231 | let bp = match p.current() { |
232 | EQ => 1, | 232 | T![=] => 1, |
233 | DOTDOT | DOTDOTEQ => 2, | 233 | T![..] | T![..=] => 2, |
234 | EQEQ | NEQ | L_ANGLE | R_ANGLE => 5, | 234 | T![==] | T![!=] | T![<] | T![>] => 5, |
235 | PIPE => 6, | 235 | T![|] => 6, |
236 | CARET => 7, | 236 | T![^] => 7, |
237 | AMP => 8, | 237 | T![&] => 8, |
238 | MINUS | PLUS => 10, | 238 | T![-] | T![+] => 10, |
239 | STAR | SLASH | PERCENT => 11, | 239 | T![*] | T![/] | T![%] => 11, |
240 | _ => 0, | 240 | _ => 0, |
241 | }; | 241 | }; |
242 | (bp, Op::Simple) | 242 | (bp, Op::Simple) |
@@ -284,7 +284,7 @@ fn expr_bp( | |||
284 | newly_dollar_open = false; | 284 | newly_dollar_open = false; |
285 | } | 285 | } |
286 | 286 | ||
287 | let is_range = p.current() == DOTDOT || p.current() == DOTDOTEQ; | 287 | let is_range = p.current() == T![..] || p.current() == T![..=]; |
288 | let (op_bp, op) = current_op(p); | 288 | let (op_bp, op) = current_op(p); |
289 | if op_bp < bp { | 289 | if op_bp < bp { |
290 | break; | 290 | break; |
@@ -318,10 +318,10 @@ fn lhs( | |||
318 | // let _ = &1; | 318 | // let _ = &1; |
319 | // let _ = &mut &f(); | 319 | // let _ = &mut &f(); |
320 | // } | 320 | // } |
321 | AMP => { | 321 | T![&] => { |
322 | m = p.start(); | 322 | m = p.start(); |
323 | p.bump(); | 323 | p.bump(); |
324 | p.eat(MUT_KW); | 324 | p.eat(T![mut]); |
325 | REF_EXPR | 325 | REF_EXPR |
326 | } | 326 | } |
327 | // test unary_expr | 327 | // test unary_expr |
@@ -330,14 +330,14 @@ fn lhs( | |||
330 | // !!true; | 330 | // !!true; |
331 | // --1; | 331 | // --1; |
332 | // } | 332 | // } |
333 | STAR | EXCL | MINUS => { | 333 | T![*] | T![!] | T![-] => { |
334 | m = p.start(); | 334 | m = p.start(); |
335 | p.bump(); | 335 | p.bump(); |
336 | PREFIX_EXPR | 336 | PREFIX_EXPR |
337 | } | 337 | } |
338 | // test full_range_expr | 338 | // test full_range_expr |
339 | // fn foo() { xs[..]; } | 339 | // fn foo() { xs[..]; } |
340 | DOTDOT | DOTDOTEQ => { | 340 | T![..] | T![..=] => { |
341 | m = p.start(); | 341 | m = p.start(); |
342 | p.bump(); | 342 | p.bump(); |
343 | if p.at_ts(EXPR_FIRST) { | 343 | if p.at_ts(EXPR_FIRST) { |
@@ -375,21 +375,21 @@ fn postfix_expr( | |||
375 | // [] => {} | 375 | // [] => {} |
376 | // } | 376 | // } |
377 | // } | 377 | // } |
378 | L_PAREN if allow_calls => call_expr(p, lhs), | 378 | T!['('] if allow_calls => call_expr(p, lhs), |
379 | L_BRACK if allow_calls => index_expr(p, lhs), | 379 | T!['['] if allow_calls => index_expr(p, lhs), |
380 | DOT if p.nth(1) == IDENT && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON) => { | 380 | T![.] if p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth(2) == T![::]) => { |
381 | method_call_expr(p, lhs) | 381 | method_call_expr(p, lhs) |
382 | } | 382 | } |
383 | DOT => field_expr(p, lhs), | 383 | T![.] => field_expr(p, lhs), |
384 | // test postfix_range | 384 | // test postfix_range |
385 | // fn foo() { let x = 1..; } | 385 | // fn foo() { let x = 1..; } |
386 | DOTDOT | DOTDOTEQ if !EXPR_FIRST.contains(p.nth(1)) => { | 386 | T![..] | T![..=] if !EXPR_FIRST.contains(p.nth(1)) => { |
387 | let m = lhs.precede(p); | 387 | let m = lhs.precede(p); |
388 | p.bump(); | 388 | p.bump(); |
389 | m.complete(p, RANGE_EXPR) | 389 | m.complete(p, RANGE_EXPR) |
390 | } | 390 | } |
391 | QUESTION => try_expr(p, lhs), | 391 | T![?] => try_expr(p, lhs), |
392 | AS_KW => cast_expr(p, lhs), | 392 | T![as] => cast_expr(p, lhs), |
393 | _ => break, | 393 | _ => break, |
394 | }; | 394 | }; |
395 | allow_calls = true | 395 | allow_calls = true |
@@ -405,7 +405,7 @@ fn postfix_expr( | |||
405 | // f(<Foo as Trait>::func()); | 405 | // f(<Foo as Trait>::func()); |
406 | // } | 406 | // } |
407 | fn call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | 407 | fn call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { |
408 | assert!(p.at(L_PAREN)); | 408 | assert!(p.at(T!['('])); |
409 | let m = lhs.precede(p); | 409 | let m = lhs.precede(p); |
410 | arg_list(p); | 410 | arg_list(p); |
411 | m.complete(p, CALL_EXPR) | 411 | m.complete(p, CALL_EXPR) |
@@ -416,11 +416,11 @@ fn call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | |||
416 | // x[1][2]; | 416 | // x[1][2]; |
417 | // } | 417 | // } |
418 | fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | 418 | fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { |
419 | assert!(p.at(L_BRACK)); | 419 | assert!(p.at(T!['['])); |
420 | let m = lhs.precede(p); | 420 | let m = lhs.precede(p); |
421 | p.bump(); | 421 | p.bump(); |
422 | expr(p); | 422 | expr(p); |
423 | p.expect(R_BRACK); | 423 | p.expect(T![']']); |
424 | m.complete(p, INDEX_EXPR) | 424 | m.complete(p, INDEX_EXPR) |
425 | } | 425 | } |
426 | 426 | ||
@@ -430,12 +430,12 @@ fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | |||
430 | // y.bar::<T>(1, 2,); | 430 | // y.bar::<T>(1, 2,); |
431 | // } | 431 | // } |
432 | fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | 432 | fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { |
433 | assert!(p.at(DOT) && p.nth(1) == IDENT && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON)); | 433 | assert!(p.at(T![.]) && p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth(2) == T![::])); |
434 | let m = lhs.precede(p); | 434 | let m = lhs.precede(p); |
435 | p.bump(); | 435 | p.bump(); |
436 | name_ref(p); | 436 | name_ref(p); |
437 | type_args::opt_type_arg_list(p, true); | 437 | type_args::opt_type_arg_list(p, true); |
438 | if p.at(L_PAREN) { | 438 | if p.at(T!['(']) { |
439 | arg_list(p); | 439 | arg_list(p); |
440 | } | 440 | } |
441 | m.complete(p, METHOD_CALL_EXPR) | 441 | m.complete(p, METHOD_CALL_EXPR) |
@@ -455,7 +455,7 @@ fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | |||
455 | // x.0x01; | 455 | // x.0x01; |
456 | // } | 456 | // } |
457 | fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | 457 | fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { |
458 | assert!(p.at(DOT)); | 458 | assert!(p.at(T![.])); |
459 | let m = lhs.precede(p); | 459 | let m = lhs.precede(p); |
460 | p.bump(); | 460 | p.bump(); |
461 | if p.at(IDENT) { | 461 | if p.at(IDENT) { |
@@ -463,7 +463,7 @@ fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | |||
463 | } else if p.at(INT_NUMBER) { | 463 | } else if p.at(INT_NUMBER) { |
464 | p.bump(); | 464 | p.bump(); |
465 | } else if p.at(FLOAT_NUMBER) { | 465 | } else if p.at(FLOAT_NUMBER) { |
466 | // FIXME: How to recover and instead parse INT + DOT? | 466 | // FIXME: How to recover and instead parse INT + T![.]? |
467 | p.bump(); | 467 | p.bump(); |
468 | } else { | 468 | } else { |
469 | p.error("expected field name or number") | 469 | p.error("expected field name or number") |
@@ -476,7 +476,7 @@ fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | |||
476 | // x?; | 476 | // x?; |
477 | // } | 477 | // } |
478 | fn try_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | 478 | fn try_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { |
479 | assert!(p.at(QUESTION)); | 479 | assert!(p.at(T![?])); |
480 | let m = lhs.precede(p); | 480 | let m = lhs.precede(p); |
481 | p.bump(); | 481 | p.bump(); |
482 | m.complete(p, TRY_EXPR) | 482 | m.complete(p, TRY_EXPR) |
@@ -490,7 +490,7 @@ fn try_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | |||
490 | // 0x36 as u8 <= 0x37; | 490 | // 0x36 as u8 <= 0x37; |
491 | // } | 491 | // } |
492 | fn cast_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | 492 | fn cast_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { |
493 | assert!(p.at(AS_KW)); | 493 | assert!(p.at(T![as])); |
494 | let m = lhs.precede(p); | 494 | let m = lhs.precede(p); |
495 | p.bump(); | 495 | p.bump(); |
496 | // Use type_no_bounds(), because cast expressions are not | 496 | // Use type_no_bounds(), because cast expressions are not |
@@ -500,20 +500,20 @@ fn cast_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | |||
500 | } | 500 | } |
501 | 501 | ||
502 | fn arg_list(p: &mut Parser) { | 502 | fn arg_list(p: &mut Parser) { |
503 | assert!(p.at(L_PAREN)); | 503 | assert!(p.at(T!['('])); |
504 | let m = p.start(); | 504 | let m = p.start(); |
505 | p.bump(); | 505 | p.bump(); |
506 | while !p.at(R_PAREN) && !p.at(EOF) { | 506 | while !p.at(T![')']) && !p.at(EOF) { |
507 | if !p.at_ts(EXPR_FIRST) { | 507 | if !p.at_ts(EXPR_FIRST) { |
508 | p.error("expected expression"); | 508 | p.error("expected expression"); |
509 | break; | 509 | break; |
510 | } | 510 | } |
511 | expr(p); | 511 | expr(p); |
512 | if !p.at(R_PAREN) && !p.expect(COMMA) { | 512 | if !p.at(T![')']) && !p.expect(T![,]) { |
513 | break; | 513 | break; |
514 | } | 514 | } |
515 | } | 515 | } |
516 | p.eat(R_PAREN); | 516 | p.eat(T![')']); |
517 | m.complete(p, ARG_LIST); | 517 | m.complete(p, ARG_LIST); |
518 | } | 518 | } |
519 | 519 | ||
@@ -525,15 +525,15 @@ fn arg_list(p: &mut Parser) { | |||
525 | // let _ = format!(); | 525 | // let _ = format!(); |
526 | // } | 526 | // } |
527 | fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, BlockLike) { | 527 | fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, BlockLike) { |
528 | assert!(paths::is_path_start(p) || p.at(L_ANGLE)); | 528 | assert!(paths::is_path_start(p) || p.at(T![<])); |
529 | let m = p.start(); | 529 | let m = p.start(); |
530 | paths::expr_path(p); | 530 | paths::expr_path(p); |
531 | match p.current() { | 531 | match p.current() { |
532 | L_CURLY if !r.forbid_structs => { | 532 | T!['{'] if !r.forbid_structs => { |
533 | named_field_list(p); | 533 | named_field_list(p); |
534 | (m.complete(p, STRUCT_LIT), BlockLike::NotBlock) | 534 | (m.complete(p, STRUCT_LIT), BlockLike::NotBlock) |
535 | } | 535 | } |
536 | EXCL => { | 536 | T![!] => { |
537 | let block_like = items::macro_call_after_excl(p); | 537 | let block_like = items::macro_call_after_excl(p); |
538 | (m.complete(p, MACRO_CALL), block_like) | 538 | (m.complete(p, MACRO_CALL), block_like) |
539 | } | 539 | } |
@@ -548,35 +548,35 @@ fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, BlockLike) { | |||
548 | // S { x, y: 32, ..Default::default() }; | 548 | // S { x, y: 32, ..Default::default() }; |
549 | // } | 549 | // } |
550 | pub(crate) fn named_field_list(p: &mut Parser) { | 550 | pub(crate) fn named_field_list(p: &mut Parser) { |
551 | assert!(p.at(L_CURLY)); | 551 | assert!(p.at(T!['{'])); |
552 | let m = p.start(); | 552 | let m = p.start(); |
553 | p.bump(); | 553 | p.bump(); |
554 | while !p.at(EOF) && !p.at(R_CURLY) { | 554 | while !p.at(EOF) && !p.at(T!['}']) { |
555 | match p.current() { | 555 | match p.current() { |
556 | // test struct_literal_field_with_attr | 556 | // test struct_literal_field_with_attr |
557 | // fn main() { | 557 | // fn main() { |
558 | // S { #[cfg(test)] field: 1 } | 558 | // S { #[cfg(test)] field: 1 } |
559 | // } | 559 | // } |
560 | IDENT | POUND => { | 560 | IDENT | T![#] => { |
561 | let m = p.start(); | 561 | let m = p.start(); |
562 | attributes::outer_attributes(p); | 562 | attributes::outer_attributes(p); |
563 | name_ref(p); | 563 | name_ref(p); |
564 | if p.eat(COLON) { | 564 | if p.eat(T![:]) { |
565 | expr(p); | 565 | expr(p); |
566 | } | 566 | } |
567 | m.complete(p, NAMED_FIELD); | 567 | m.complete(p, NAMED_FIELD); |
568 | } | 568 | } |
569 | DOTDOT => { | 569 | T![..] => { |
570 | p.bump(); | 570 | p.bump(); |
571 | expr(p); | 571 | expr(p); |
572 | } | 572 | } |
573 | L_CURLY => error_block(p, "expected a field"), | 573 | T!['{'] => error_block(p, "expected a field"), |
574 | _ => p.err_and_bump("expected identifier"), | 574 | _ => p.err_and_bump("expected identifier"), |
575 | } | 575 | } |
576 | if !p.at(R_CURLY) { | 576 | if !p.at(T!['}']) { |
577 | p.expect(COMMA); | 577 | p.expect(T![,]); |
578 | } | 578 | } |
579 | } | 579 | } |
580 | p.expect(R_CURLY); | 580 | p.expect(T!['}']); |
581 | m.complete(p, NAMED_FIELD_LIST); | 581 | m.complete(p, NAMED_FIELD_LIST); |
582 | } | 582 | } |
diff --git a/crates/ra_parser/src/grammar/expressions/atom.rs b/crates/ra_parser/src/grammar/expressions/atom.rs index 8dc7e44a9..8b1a1de49 100644 --- a/crates/ra_parser/src/grammar/expressions/atom.rs +++ b/crates/ra_parser/src/grammar/expressions/atom.rs | |||
@@ -60,29 +60,29 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar | |||
60 | if let Some(m) = literal(p) { | 60 | if let Some(m) = literal(p) { |
61 | return Some((m, BlockLike::NotBlock)); | 61 | return Some((m, BlockLike::NotBlock)); |
62 | } | 62 | } |
63 | if paths::is_path_start(p) || p.at(L_ANGLE) { | 63 | if paths::is_path_start(p) || p.at(T![<]) { |
64 | return Some(path_expr(p, r)); | 64 | return Some(path_expr(p, r)); |
65 | } | 65 | } |
66 | let la = p.nth(1); | 66 | let la = p.nth(1); |
67 | let done = match p.current() { | 67 | let done = match p.current() { |
68 | L_PAREN => tuple_expr(p), | 68 | T!['('] => tuple_expr(p), |
69 | L_BRACK => array_expr(p), | 69 | T!['['] => array_expr(p), |
70 | PIPE => lambda_expr(p), | 70 | T![|] => lambda_expr(p), |
71 | MOVE_KW if la == PIPE => lambda_expr(p), | 71 | T![move] if la == T![|] => lambda_expr(p), |
72 | ASYNC_KW if la == PIPE || (la == MOVE_KW && p.nth(2) == PIPE) => lambda_expr(p), | 72 | T![async] if la == T![|] || (la == T![move] && p.nth(2) == T![|]) => lambda_expr(p), |
73 | IF_KW => if_expr(p), | 73 | T![if] => if_expr(p), |
74 | 74 | ||
75 | LOOP_KW => loop_expr(p, None), | 75 | T![loop] => loop_expr(p, None), |
76 | FOR_KW => for_expr(p, None), | 76 | T![for] => for_expr(p, None), |
77 | WHILE_KW => while_expr(p, None), | 77 | T![while] => while_expr(p, None), |
78 | LIFETIME if la == COLON => { | 78 | LIFETIME if la == T![:] => { |
79 | let m = p.start(); | 79 | let m = p.start(); |
80 | label(p); | 80 | label(p); |
81 | match p.current() { | 81 | match p.current() { |
82 | LOOP_KW => loop_expr(p, Some(m)), | 82 | T![loop] => loop_expr(p, Some(m)), |
83 | FOR_KW => for_expr(p, Some(m)), | 83 | T![for] => for_expr(p, Some(m)), |
84 | WHILE_KW => while_expr(p, Some(m)), | 84 | T![while] => while_expr(p, Some(m)), |
85 | L_CURLY => block_expr(p, Some(m)), | 85 | T!['{'] => block_expr(p, Some(m)), |
86 | _ => { | 86 | _ => { |
87 | // test_err misplaced_label_err | 87 | // test_err misplaced_label_err |
88 | // fn main() { | 88 | // fn main() { |
@@ -94,22 +94,22 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar | |||
94 | } | 94 | } |
95 | } | 95 | } |
96 | } | 96 | } |
97 | ASYNC_KW if la == L_CURLY || (la == MOVE_KW && p.nth(2) == L_CURLY) => { | 97 | T![async] if la == T!['{'] || (la == T![move] && p.nth(2) == T!['{']) => { |
98 | let m = p.start(); | 98 | let m = p.start(); |
99 | p.bump(); | 99 | p.bump(); |
100 | p.eat(MOVE_KW); | 100 | p.eat(T![move]); |
101 | block_expr(p, Some(m)) | 101 | block_expr(p, Some(m)) |
102 | } | 102 | } |
103 | MATCH_KW => match_expr(p), | 103 | T![match] => match_expr(p), |
104 | UNSAFE_KW if la == L_CURLY => { | 104 | T![unsafe] if la == T!['{'] => { |
105 | let m = p.start(); | 105 | let m = p.start(); |
106 | p.bump(); | 106 | p.bump(); |
107 | block_expr(p, Some(m)) | 107 | block_expr(p, Some(m)) |
108 | } | 108 | } |
109 | L_CURLY => block_expr(p, None), | 109 | T!['{'] => block_expr(p, None), |
110 | RETURN_KW => return_expr(p), | 110 | T![return] => return_expr(p), |
111 | CONTINUE_KW => continue_expr(p), | 111 | T![continue] => continue_expr(p), |
112 | BREAK_KW => break_expr(p, r), | 112 | T![break] => break_expr(p, r), |
113 | _ => { | 113 | _ => { |
114 | p.err_recover("expected expression", EXPR_RECOVERY_SET); | 114 | p.err_recover("expected expression", EXPR_RECOVERY_SET); |
115 | return None; | 115 | return None; |
@@ -129,25 +129,25 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar | |||
129 | // (1,); | 129 | // (1,); |
130 | // } | 130 | // } |
131 | fn tuple_expr(p: &mut Parser) -> CompletedMarker { | 131 | fn tuple_expr(p: &mut Parser) -> CompletedMarker { |
132 | assert!(p.at(L_PAREN)); | 132 | assert!(p.at(T!['('])); |
133 | let m = p.start(); | 133 | let m = p.start(); |
134 | p.expect(L_PAREN); | 134 | p.expect(T!['(']); |
135 | 135 | ||
136 | let mut saw_comma = false; | 136 | let mut saw_comma = false; |
137 | let mut saw_expr = false; | 137 | let mut saw_expr = false; |
138 | while !p.at(EOF) && !p.at(R_PAREN) { | 138 | while !p.at(EOF) && !p.at(T![')']) { |
139 | saw_expr = true; | 139 | saw_expr = true; |
140 | if !p.at_ts(EXPR_FIRST) { | 140 | if !p.at_ts(EXPR_FIRST) { |
141 | p.error("expected expression"); | 141 | p.error("expected expression"); |
142 | break; | 142 | break; |
143 | } | 143 | } |
144 | expr(p); | 144 | expr(p); |
145 | if !p.at(R_PAREN) { | 145 | if !p.at(T![')']) { |
146 | saw_comma = true; | 146 | saw_comma = true; |
147 | p.expect(COMMA); | 147 | p.expect(T![,]); |
148 | } | 148 | } |
149 | } | 149 | } |
150 | p.expect(R_PAREN); | 150 | p.expect(T![')']); |
151 | m.complete(p, if saw_expr && !saw_comma { PAREN_EXPR } else { TUPLE_EXPR }) | 151 | m.complete(p, if saw_expr && !saw_comma { PAREN_EXPR } else { TUPLE_EXPR }) |
152 | } | 152 | } |
153 | 153 | ||
@@ -159,21 +159,21 @@ fn tuple_expr(p: &mut Parser) -> CompletedMarker { | |||
159 | // [1; 2]; | 159 | // [1; 2]; |
160 | // } | 160 | // } |
161 | fn array_expr(p: &mut Parser) -> CompletedMarker { | 161 | fn array_expr(p: &mut Parser) -> CompletedMarker { |
162 | assert!(p.at(L_BRACK)); | 162 | assert!(p.at(T!['['])); |
163 | let m = p.start(); | 163 | let m = p.start(); |
164 | p.bump(); | 164 | p.bump(); |
165 | if p.eat(R_BRACK) { | 165 | if p.eat(T![']']) { |
166 | return m.complete(p, ARRAY_EXPR); | 166 | return m.complete(p, ARRAY_EXPR); |
167 | } | 167 | } |
168 | expr(p); | 168 | expr(p); |
169 | if p.eat(SEMI) { | 169 | if p.eat(T![;]) { |
170 | expr(p); | 170 | expr(p); |
171 | p.expect(R_BRACK); | 171 | p.expect(T![']']); |
172 | return m.complete(p, ARRAY_EXPR); | 172 | return m.complete(p, ARRAY_EXPR); |
173 | } | 173 | } |
174 | while !p.at(EOF) && !p.at(R_BRACK) { | 174 | while !p.at(EOF) && !p.at(T![']']) { |
175 | p.expect(COMMA); | 175 | p.expect(T![,]); |
176 | if p.at(R_BRACK) { | 176 | if p.at(T![']']) { |
177 | break; | 177 | break; |
178 | } | 178 | } |
179 | if !p.at_ts(EXPR_FIRST) { | 179 | if !p.at_ts(EXPR_FIRST) { |
@@ -182,7 +182,7 @@ fn array_expr(p: &mut Parser) -> CompletedMarker { | |||
182 | } | 182 | } |
183 | expr(p); | 183 | expr(p); |
184 | } | 184 | } |
185 | p.expect(R_BRACK); | 185 | p.expect(T![']']); |
186 | m.complete(p, ARRAY_EXPR) | 186 | m.complete(p, ARRAY_EXPR) |
187 | } | 187 | } |
188 | 188 | ||
@@ -198,17 +198,17 @@ fn array_expr(p: &mut Parser) -> CompletedMarker { | |||
198 | // } | 198 | // } |
199 | fn lambda_expr(p: &mut Parser) -> CompletedMarker { | 199 | fn lambda_expr(p: &mut Parser) -> CompletedMarker { |
200 | assert!( | 200 | assert!( |
201 | p.at(PIPE) | 201 | p.at(T![|]) |
202 | || (p.at(MOVE_KW) && p.nth(1) == PIPE) | 202 | || (p.at(T![move]) && p.nth(1) == T![|]) |
203 | || (p.at(ASYNC_KW) && p.nth(1) == PIPE) | 203 | || (p.at(T![async]) && p.nth(1) == T![|]) |
204 | || (p.at(ASYNC_KW) && p.nth(1) == MOVE_KW && p.nth(2) == PIPE) | 204 | || (p.at(T![async]) && p.nth(1) == T![move] && p.nth(2) == T![|]) |
205 | ); | 205 | ); |
206 | let m = p.start(); | 206 | let m = p.start(); |
207 | p.eat(ASYNC_KW); | 207 | p.eat(T![async]); |
208 | p.eat(MOVE_KW); | 208 | p.eat(T![move]); |
209 | params::param_list_opt_types(p); | 209 | params::param_list_opt_types(p); |
210 | if opt_fn_ret_type(p) { | 210 | if opt_fn_ret_type(p) { |
211 | if !p.at(L_CURLY) { | 211 | if !p.at(T!['{']) { |
212 | p.error("expected `{`"); | 212 | p.error("expected `{`"); |
213 | } | 213 | } |
214 | } | 214 | } |
@@ -224,14 +224,14 @@ fn lambda_expr(p: &mut Parser) -> CompletedMarker { | |||
224 | // if S {}; | 224 | // if S {}; |
225 | // } | 225 | // } |
226 | fn if_expr(p: &mut Parser) -> CompletedMarker { | 226 | fn if_expr(p: &mut Parser) -> CompletedMarker { |
227 | assert!(p.at(IF_KW)); | 227 | assert!(p.at(T![if])); |
228 | let m = p.start(); | 228 | let m = p.start(); |
229 | p.bump(); | 229 | p.bump(); |
230 | cond(p); | 230 | cond(p); |
231 | block(p); | 231 | block(p); |
232 | if p.at(ELSE_KW) { | 232 | if p.at(T![else]) { |
233 | p.bump(); | 233 | p.bump(); |
234 | if p.at(IF_KW) { | 234 | if p.at(T![if]) { |
235 | if_expr(p); | 235 | if_expr(p); |
236 | } else { | 236 | } else { |
237 | block(p); | 237 | block(p); |
@@ -247,7 +247,7 @@ fn if_expr(p: &mut Parser) -> CompletedMarker { | |||
247 | // 'c: for x in () {} | 247 | // 'c: for x in () {} |
248 | // } | 248 | // } |
249 | fn label(p: &mut Parser) { | 249 | fn label(p: &mut Parser) { |
250 | assert!(p.at(LIFETIME) && p.nth(1) == COLON); | 250 | assert!(p.at(LIFETIME) && p.nth(1) == T![:]); |
251 | let m = p.start(); | 251 | let m = p.start(); |
252 | p.bump(); | 252 | p.bump(); |
253 | p.bump(); | 253 | p.bump(); |
@@ -259,7 +259,7 @@ fn label(p: &mut Parser) { | |||
259 | // loop {}; | 259 | // loop {}; |
260 | // } | 260 | // } |
261 | fn loop_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | 261 | fn loop_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { |
262 | assert!(p.at(LOOP_KW)); | 262 | assert!(p.at(T![loop])); |
263 | let m = m.unwrap_or_else(|| p.start()); | 263 | let m = m.unwrap_or_else(|| p.start()); |
264 | p.bump(); | 264 | p.bump(); |
265 | block(p); | 265 | block(p); |
@@ -272,7 +272,7 @@ fn loop_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | |||
272 | // while let Some(x) = it.next() {}; | 272 | // while let Some(x) = it.next() {}; |
273 | // } | 273 | // } |
274 | fn while_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | 274 | fn while_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { |
275 | assert!(p.at(WHILE_KW)); | 275 | assert!(p.at(T![while])); |
276 | let m = m.unwrap_or_else(|| p.start()); | 276 | let m = m.unwrap_or_else(|| p.start()); |
277 | p.bump(); | 277 | p.bump(); |
278 | cond(p); | 278 | cond(p); |
@@ -285,11 +285,11 @@ fn while_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | |||
285 | // for x in [] {}; | 285 | // for x in [] {}; |
286 | // } | 286 | // } |
287 | fn for_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | 287 | fn for_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { |
288 | assert!(p.at(FOR_KW)); | 288 | assert!(p.at(T![for])); |
289 | let m = m.unwrap_or_else(|| p.start()); | 289 | let m = m.unwrap_or_else(|| p.start()); |
290 | p.bump(); | 290 | p.bump(); |
291 | patterns::pattern(p); | 291 | patterns::pattern(p); |
292 | p.expect(IN_KW); | 292 | p.expect(T![in]); |
293 | expr_no_struct(p); | 293 | expr_no_struct(p); |
294 | block(p); | 294 | block(p); |
295 | m.complete(p, FOR_EXPR) | 295 | m.complete(p, FOR_EXPR) |
@@ -305,9 +305,9 @@ fn for_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | |||
305 | // } | 305 | // } |
306 | fn cond(p: &mut Parser) { | 306 | fn cond(p: &mut Parser) { |
307 | let m = p.start(); | 307 | let m = p.start(); |
308 | if p.eat(LET_KW) { | 308 | if p.eat(T![let]) { |
309 | patterns::pattern_list(p); | 309 | patterns::pattern_list(p); |
310 | p.expect(EQ); | 310 | p.expect(T![=]); |
311 | } | 311 | } |
312 | expr_no_struct(p); | 312 | expr_no_struct(p); |
313 | m.complete(p, CONDITION); | 313 | m.complete(p, CONDITION); |
@@ -319,11 +319,11 @@ fn cond(p: &mut Parser) { | |||
319 | // match S {}; | 319 | // match S {}; |
320 | // } | 320 | // } |
321 | fn match_expr(p: &mut Parser) -> CompletedMarker { | 321 | fn match_expr(p: &mut Parser) -> CompletedMarker { |
322 | assert!(p.at(MATCH_KW)); | 322 | assert!(p.at(T![match])); |
323 | let m = p.start(); | 323 | let m = p.start(); |
324 | p.bump(); | 324 | p.bump(); |
325 | expr_no_struct(p); | 325 | expr_no_struct(p); |
326 | if p.at(L_CURLY) { | 326 | if p.at(T!['{']) { |
327 | match_arm_list(p); | 327 | match_arm_list(p); |
328 | } else { | 328 | } else { |
329 | p.error("expected `{`") | 329 | p.error("expected `{`") |
@@ -332,9 +332,9 @@ fn match_expr(p: &mut Parser) -> CompletedMarker { | |||
332 | } | 332 | } |
333 | 333 | ||
334 | pub(crate) fn match_arm_list(p: &mut Parser) { | 334 | pub(crate) fn match_arm_list(p: &mut Parser) { |
335 | assert!(p.at(L_CURLY)); | 335 | assert!(p.at(T!['{'])); |
336 | let m = p.start(); | 336 | let m = p.start(); |
337 | p.eat(L_CURLY); | 337 | p.eat(T!['{']); |
338 | 338 | ||
339 | // test match_arms_inner_attribute | 339 | // test match_arms_inner_attribute |
340 | // fn foo() { | 340 | // fn foo() { |
@@ -347,8 +347,8 @@ pub(crate) fn match_arm_list(p: &mut Parser) { | |||
347 | // } | 347 | // } |
348 | attributes::inner_attributes(p); | 348 | attributes::inner_attributes(p); |
349 | 349 | ||
350 | while !p.at(EOF) && !p.at(R_CURLY) { | 350 | while !p.at(EOF) && !p.at(T!['}']) { |
351 | if p.at(L_CURLY) { | 351 | if p.at(T!['{']) { |
352 | error_block(p, "expected match arm"); | 352 | error_block(p, "expected match arm"); |
353 | continue; | 353 | continue; |
354 | } | 354 | } |
@@ -362,12 +362,12 @@ pub(crate) fn match_arm_list(p: &mut Parser) { | |||
362 | // } | 362 | // } |
363 | // } | 363 | // } |
364 | if match_arm(p).is_block() { | 364 | if match_arm(p).is_block() { |
365 | p.eat(COMMA); | 365 | p.eat(T![,]); |
366 | } else if !p.at(R_CURLY) { | 366 | } else if !p.at(T!['}']) { |
367 | p.expect(COMMA); | 367 | p.expect(T![,]); |
368 | } | 368 | } |
369 | } | 369 | } |
370 | p.expect(R_CURLY); | 370 | p.expect(T!['}']); |
371 | m.complete(p, MATCH_ARM_LIST); | 371 | m.complete(p, MATCH_ARM_LIST); |
372 | } | 372 | } |
373 | 373 | ||
@@ -399,10 +399,10 @@ fn match_arm(p: &mut Parser) -> BlockLike { | |||
399 | attributes::outer_attributes(p); | 399 | attributes::outer_attributes(p); |