diff options
148 files changed, 5160 insertions, 3440 deletions
diff --git a/Cargo.lock b/Cargo.lock index f3a512a8a..76a26ea4e 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -320,6 +320,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" | |||
320 | checksum = "fc4b29f4b9bb94bf267d57269fd0706d343a160937108e9619fe380645428abb" | 320 | checksum = "fc4b29f4b9bb94bf267d57269fd0706d343a160937108e9619fe380645428abb" |
321 | 321 | ||
322 | [[package]] | 322 | [[package]] |
323 | name = "dot" | ||
324 | version = "0.1.4" | ||
325 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
326 | checksum = "a74b6c4d4a1cff5f454164363c16b72fa12463ca6b31f4b5f2035a65fa3d5906" | ||
327 | |||
328 | [[package]] | ||
323 | name = "drop_bomb" | 329 | name = "drop_bomb" |
324 | version = "0.1.5" | 330 | version = "0.1.5" |
325 | source = "registry+https://github.com/rust-lang/crates.io-index" | 331 | source = "registry+https://github.com/rust-lang/crates.io-index" |
@@ -431,18 +437,18 @@ dependencies = [ | |||
431 | 437 | ||
432 | [[package]] | 438 | [[package]] |
433 | name = "fsevent-sys" | 439 | name = "fsevent-sys" |
434 | version = "3.0.2" | 440 | version = "3.1.0" |
435 | source = "registry+https://github.com/rust-lang/crates.io-index" | 441 | source = "registry+https://github.com/rust-lang/crates.io-index" |
436 | checksum = "77a29c77f1ca394c3e73a9a5d24cfcabb734682d9634fc398f2204a63c994120" | 442 | checksum = "ca6f5e6817058771c10f0eb0f05ddf1e35844266f972004fe8e4b21fda295bd5" |
437 | dependencies = [ | 443 | dependencies = [ |
438 | "libc", | 444 | "libc", |
439 | ] | 445 | ] |
440 | 446 | ||
441 | [[package]] | 447 | [[package]] |
442 | name = "fst" | 448 | name = "fst" |
443 | version = "0.4.5" | 449 | version = "0.4.6" |
444 | source = "registry+https://github.com/rust-lang/crates.io-index" | 450 | source = "registry+https://github.com/rust-lang/crates.io-index" |
445 | checksum = "d79238883cf0307100b90aba4a755d8051a3182305dfe7f649a1e9dc0517006f" | 451 | checksum = "e398fae362f4124bbe630d99519fb2d68a03e2e3a23b441028cdcdc4f4895687" |
446 | 452 | ||
447 | [[package]] | 453 | [[package]] |
448 | name = "gimli" | 454 | name = "gimli" |
@@ -480,6 +486,7 @@ version = "0.0.0" | |||
480 | dependencies = [ | 486 | dependencies = [ |
481 | "arrayvec", | 487 | "arrayvec", |
482 | "base_db", | 488 | "base_db", |
489 | "cfg", | ||
483 | "either", | 490 | "either", |
484 | "hir_def", | 491 | "hir_def", |
485 | "hir_expand", | 492 | "hir_expand", |
@@ -587,6 +594,7 @@ version = "0.0.0" | |||
587 | dependencies = [ | 594 | dependencies = [ |
588 | "cfg", | 595 | "cfg", |
589 | "cov-mark", | 596 | "cov-mark", |
597 | "dot", | ||
590 | "either", | 598 | "either", |
591 | "expect-test", | 599 | "expect-test", |
592 | "hir", | 600 | "hir", |
@@ -931,11 +939,10 @@ dependencies = [ | |||
931 | 939 | ||
932 | [[package]] | 940 | [[package]] |
933 | name = "notify" | 941 | name = "notify" |
934 | version = "5.0.0-pre.7" | 942 | version = "5.0.0-pre.8" |
935 | source = "registry+https://github.com/rust-lang/crates.io-index" | 943 | source = "registry+https://github.com/rust-lang/crates.io-index" |
936 | checksum = "1ebe7699a0f8c5759450716ee03d231685c22b4fe8f406c42c22e0ad94d40ce7" | 944 | checksum = "46bbbcd078f1f00ddb7a9abe70b96e91229b44b0b3afdec610f8e5137f8f014b" |
937 | dependencies = [ | 945 | dependencies = [ |
938 | "anymap", | ||
939 | "bitflags", | 946 | "bitflags", |
940 | "crossbeam-channel", | 947 | "crossbeam-channel", |
941 | "filetime", | 948 | "filetime", |
@@ -1287,9 +1294,9 @@ checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" | |||
1287 | 1294 | ||
1288 | [[package]] | 1295 | [[package]] |
1289 | name = "rowan" | 1296 | name = "rowan" |
1290 | version = "0.13.0-pre.5" | 1297 | version = "0.13.0-pre.6" |
1291 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1298 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1292 | checksum = "32a5fc82ed0b7e7fba157331f0d8f64abd73bced6e7ac2a4dfa0c4cf0ab584e8" | 1299 | checksum = "82ccc04e145e9a5ab51b9c12a81d77c4a8250d87a407ab02ac650451141ff00d" |
1293 | dependencies = [ | 1300 | dependencies = [ |
1294 | "countme", | 1301 | "countme", |
1295 | "hashbrown", | 1302 | "hashbrown", |
diff --git a/Cargo.toml b/Cargo.toml index d34251fc0..498cf7d62 100644 --- a/Cargo.toml +++ b/Cargo.toml | |||
@@ -1,7 +1,12 @@ | |||
1 | [workspace] | 1 | [workspace] |
2 | resolver = "2" | ||
2 | members = ["xtask/", "lib/*", "crates/*"] | 3 | members = ["xtask/", "lib/*", "crates/*"] |
3 | 4 | ||
4 | [profile.dev] | 5 | [profile.dev] |
6 | # We do want incremental builds, but they are broken at the moment :( | ||
7 | # https://github.com/rust-lang/rust/issues/85003#issuecomment-833796289 | ||
8 | incremental = false | ||
9 | |||
5 | # Disabling debug info speeds up builds a bunch, | 10 | # Disabling debug info speeds up builds a bunch, |
6 | # and we don't rely on it for debugging that much. | 11 | # and we don't rely on it for debugging that much. |
7 | debug = 0 | 12 | debug = 0 |
@@ -16,9 +21,14 @@ text-size.opt-level = 3 | |||
16 | miniz_oxide.opt-level = 3 | 21 | miniz_oxide.opt-level = 3 |
17 | 22 | ||
18 | [profile.release] | 23 | [profile.release] |
19 | incremental = true | 24 | # We do want incremental release builds, but they are broken at the moment :( |
25 | # https://github.com/rust-lang/rust/issues/85003#issuecomment-833796289 | ||
26 | incremental = false | ||
20 | debug = 0 # Set this to 1 or 2 to get more useful backtraces in debugger. | 27 | debug = 0 # Set this to 1 or 2 to get more useful backtraces in debugger. |
21 | 28 | ||
29 | [profile.test] | ||
30 | incremental = false | ||
31 | |||
22 | [patch.'crates-io'] | 32 | [patch.'crates-io'] |
23 | # rowan = { path = "../rowan" } | 33 | # rowan = { path = "../rowan" } |
24 | 34 | ||
diff --git a/crates/cfg/src/lib.rs b/crates/cfg/src/lib.rs index d88ecf8b0..59fd38880 100644 --- a/crates/cfg/src/lib.rs +++ b/crates/cfg/src/lib.rs | |||
@@ -13,7 +13,7 @@ use tt::SmolStr; | |||
13 | pub use cfg_expr::{CfgAtom, CfgExpr}; | 13 | pub use cfg_expr::{CfgAtom, CfgExpr}; |
14 | pub use dnf::DnfExpr; | 14 | pub use dnf::DnfExpr; |
15 | 15 | ||
16 | /// Configuration options used for conditional compilition on items with `cfg` attributes. | 16 | /// Configuration options used for conditional compilation on items with `cfg` attributes. |
17 | /// We have two kind of options in different namespaces: atomic options like `unix`, and | 17 | /// We have two kind of options in different namespaces: atomic options like `unix`, and |
18 | /// key-value options like `target_arch="x86"`. | 18 | /// key-value options like `target_arch="x86"`. |
19 | /// | 19 | /// |
diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 1682d8bde..93cf6a3d6 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs | |||
@@ -215,6 +215,7 @@ impl FlycheckActor { | |||
215 | } => { | 215 | } => { |
216 | let mut cmd = Command::new(toolchain::cargo()); | 216 | let mut cmd = Command::new(toolchain::cargo()); |
217 | cmd.arg(command); | 217 | cmd.arg(command); |
218 | cmd.current_dir(&self.workspace_root); | ||
218 | cmd.args(&["--workspace", "--message-format=json", "--manifest-path"]) | 219 | cmd.args(&["--workspace", "--message-format=json", "--manifest-path"]) |
219 | .arg(self.workspace_root.join("Cargo.toml")); | 220 | .arg(self.workspace_root.join("Cargo.toml")); |
220 | 221 | ||
diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index 9e329656f..560b15238 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml | |||
@@ -25,3 +25,4 @@ hir_expand = { path = "../hir_expand", version = "0.0.0" } | |||
25 | hir_def = { path = "../hir_def", version = "0.0.0" } | 25 | hir_def = { path = "../hir_def", version = "0.0.0" } |
26 | hir_ty = { path = "../hir_ty", version = "0.0.0" } | 26 | hir_ty = { path = "../hir_ty", version = "0.0.0" } |
27 | tt = { path = "../tt", version = "0.0.0" } | 27 | tt = { path = "../tt", version = "0.0.0" } |
28 | cfg = { path = "../cfg", version = "0.0.0" } | ||
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index f876339de..d443b124c 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs | |||
@@ -52,7 +52,9 @@ use hir_def::{ | |||
52 | }; | 52 | }; |
53 | use hir_expand::{diagnostics::DiagnosticSink, name::name, MacroDefKind}; | 53 | use hir_expand::{diagnostics::DiagnosticSink, name::name, MacroDefKind}; |
54 | use hir_ty::{ | 54 | use hir_ty::{ |
55 | autoderef, could_unify, | 55 | autoderef, |
56 | consteval::ConstExt, | ||
57 | could_unify, | ||
56 | method_resolution::{self, def_crates, TyFingerprint}, | 58 | method_resolution::{self, def_crates, TyFingerprint}, |
57 | primitive::UintTy, | 59 | primitive::UintTy, |
58 | subst_prefix, | 60 | subst_prefix, |
@@ -89,6 +91,7 @@ pub use crate::{ | |||
89 | // Generally, a refactoring which *removes* a name from this list is a good | 91 | // Generally, a refactoring which *removes* a name from this list is a good |
90 | // idea! | 92 | // idea! |
91 | pub use { | 93 | pub use { |
94 | cfg::{CfgAtom, CfgExpr, CfgOptions}, | ||
92 | hir_def::{ | 95 | hir_def::{ |
93 | adt::StructKind, | 96 | adt::StructKind, |
94 | attr::{Attr, Attrs, AttrsWithOwner, Documentation}, | 97 | attr::{Attr, Attrs, AttrsWithOwner, Documentation}, |
@@ -215,6 +218,10 @@ impl Crate { | |||
215 | 218 | ||
216 | doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/") | 219 | doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/") |
217 | } | 220 | } |
221 | |||
222 | pub fn cfg(&self, db: &dyn HirDatabase) -> CfgOptions { | ||
223 | db.crate_graph()[self.id].cfg_options.clone() | ||
224 | } | ||
218 | } | 225 | } |
219 | 226 | ||
220 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | 227 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
@@ -868,6 +875,10 @@ impl Function { | |||
868 | db.function_data(self.id).is_unsafe() | 875 | db.function_data(self.id).is_unsafe() |
869 | } | 876 | } |
870 | 877 | ||
878 | pub fn is_async(self, db: &dyn HirDatabase) -> bool { | ||
879 | db.function_data(self.id).is_async() | ||
880 | } | ||
881 | |||
871 | pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) { | 882 | pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) { |
872 | let krate = self.module(db).id.krate(); | 883 | let krate = self.module(db).id.krate(); |
873 | hir_def::diagnostics::validate_body(db.upcast(), self.id.into(), sink); | 884 | hir_def::diagnostics::validate_body(db.upcast(), self.id.into(), sink); |
@@ -1905,6 +1916,7 @@ impl Type { | |||
1905 | substs.iter(&Interner).filter_map(|a| a.ty(&Interner)).any(go) | 1916 | substs.iter(&Interner).filter_map(|a| a.ty(&Interner)).any(go) |
1906 | } | 1917 | } |
1907 | 1918 | ||
1919 | TyKind::Array(_ty, len) if len.is_unknown() => true, | ||
1908 | TyKind::Array(ty, _) | 1920 | TyKind::Array(ty, _) |
1909 | | TyKind::Slice(ty) | 1921 | | TyKind::Slice(ty) |
1910 | | TyKind::Raw(_, ty) | 1922 | | TyKind::Raw(_, ty) |
@@ -2066,6 +2078,10 @@ impl Type { | |||
2066 | Some(adt.into()) | 2078 | Some(adt.into()) |
2067 | } | 2079 | } |
2068 | 2080 | ||
2081 | pub fn as_builtin(&self) -> Option<BuiltinType> { | ||
2082 | self.ty.as_builtin().map(|inner| BuiltinType { inner }) | ||
2083 | } | ||
2084 | |||
2069 | pub fn as_dyn_trait(&self) -> Option<Trait> { | 2085 | pub fn as_dyn_trait(&self) -> Option<Trait> { |
2070 | self.ty.dyn_trait().map(Into::into) | 2086 | self.ty.dyn_trait().map(Into::into) |
2071 | } | 2087 | } |
diff --git a/crates/hir_def/src/attr.rs b/crates/hir_def/src/attr.rs index a2479016e..aadd4e44a 100644 --- a/crates/hir_def/src/attr.rs +++ b/crates/hir_def/src/attr.rs | |||
@@ -9,7 +9,7 @@ use std::{ | |||
9 | use base_db::CrateId; | 9 | use base_db::CrateId; |
10 | use cfg::{CfgExpr, CfgOptions}; | 10 | use cfg::{CfgExpr, CfgOptions}; |
11 | use either::Either; | 11 | use either::Either; |
12 | use hir_expand::{hygiene::Hygiene, name::AsName, AstId, AttrId, InFile}; | 12 | use hir_expand::{hygiene::Hygiene, name::AsName, AstId, InFile}; |
13 | use itertools::Itertools; | 13 | use itertools::Itertools; |
14 | use la_arena::ArenaMap; | 14 | use la_arena::ArenaMap; |
15 | use mbe::ast_to_token_tree; | 15 | use mbe::ast_to_token_tree; |
@@ -101,17 +101,13 @@ impl RawAttrs { | |||
101 | hygiene: &Hygiene, | 101 | hygiene: &Hygiene, |
102 | ) -> Self { | 102 | ) -> Self { |
103 | let entries = collect_attrs(owner) | 103 | let entries = collect_attrs(owner) |
104 | .enumerate() | 104 | .flat_map(|(id, attr)| match attr { |
105 | .flat_map(|(i, attr)| { | 105 | Either::Left(attr) => Attr::from_src(db, attr, hygiene, id), |
106 | let index = AttrId(i as u32); | 106 | Either::Right(comment) => comment.doc_comment().map(|doc| Attr { |
107 | match attr { | 107 | id, |
108 | Either::Left(attr) => Attr::from_src(db, attr, hygiene, index), | 108 | input: Some(AttrInput::Literal(SmolStr::new(doc))), |
109 | Either::Right(comment) => comment.doc_comment().map(|doc| Attr { | 109 | path: Interned::new(ModPath::from(hir_expand::name!(doc))), |
110 | id: index, | 110 | }), |
111 | input: Some(AttrInput::Literal(SmolStr::new(doc))), | ||
112 | path: Interned::new(ModPath::from(hir_expand::name!(doc))), | ||
113 | }), | ||
114 | } | ||
115 | }) | 111 | }) |
116 | .collect::<Arc<_>>(); | 112 | .collect::<Arc<_>>(); |
117 | 113 | ||
@@ -124,6 +120,7 @@ impl RawAttrs { | |||
124 | } | 120 | } |
125 | 121 | ||
126 | pub(crate) fn merge(&self, other: Self) -> Self { | 122 | pub(crate) fn merge(&self, other: Self) -> Self { |
123 | // FIXME: This needs to fixup `AttrId`s | ||
127 | match (&self.entries, &other.entries) { | 124 | match (&self.entries, &other.entries) { |
128 | (None, None) => Self::EMPTY, | 125 | (None, None) => Self::EMPTY, |
129 | (Some(entries), None) | (None, Some(entries)) => { | 126 | (Some(entries), None) | (None, Some(entries)) => { |
@@ -375,39 +372,26 @@ impl AttrsWithOwner { | |||
375 | 372 | ||
376 | let def_map = module.def_map(db); | 373 | let def_map = module.def_map(db); |
377 | let mod_data = &def_map[module.local_id]; | 374 | let mod_data = &def_map[module.local_id]; |
378 | let attrs = match mod_data.declaration_source(db) { | 375 | match mod_data.declaration_source(db) { |
379 | Some(it) => { | 376 | Some(it) => { |
380 | let mut attrs: Vec<_> = collect_attrs(&it.value as &dyn ast::AttrsOwner) | 377 | let mut map = AttrSourceMap::new(InFile::new(it.file_id, &it.value)); |
381 | .map(|attr| InFile::new(it.file_id, attr)) | ||
382 | .collect(); | ||
383 | if let InFile { file_id, value: ModuleSource::SourceFile(file) } = | 378 | if let InFile { file_id, value: ModuleSource::SourceFile(file) } = |
384 | mod_data.definition_source(db) | 379 | mod_data.definition_source(db) |
385 | { | 380 | { |
386 | attrs.extend( | 381 | map.merge(AttrSourceMap::new(InFile::new(file_id, &file))); |
387 | collect_attrs(&file as &dyn ast::AttrsOwner) | ||
388 | .map(|attr| InFile::new(file_id, attr)), | ||
389 | ) | ||
390 | } | 382 | } |
391 | attrs | 383 | return map; |
392 | } | 384 | } |
393 | None => { | 385 | None => { |
394 | let InFile { file_id, value } = mod_data.definition_source(db); | 386 | let InFile { file_id, value } = mod_data.definition_source(db); |
395 | match &value { | 387 | let attrs_owner = match &value { |
396 | ModuleSource::SourceFile(file) => { | 388 | ModuleSource::SourceFile(file) => file as &dyn ast::AttrsOwner, |
397 | collect_attrs(file as &dyn ast::AttrsOwner) | 389 | ModuleSource::Module(module) => module as &dyn ast::AttrsOwner, |
398 | } | 390 | ModuleSource::BlockExpr(block) => block as &dyn ast::AttrsOwner, |
399 | ModuleSource::Module(module) => { | 391 | }; |
400 | collect_attrs(module as &dyn ast::AttrsOwner) | 392 | return AttrSourceMap::new(InFile::new(file_id, attrs_owner)); |
401 | } | ||
402 | ModuleSource::BlockExpr(block) => { | ||
403 | collect_attrs(block as &dyn ast::AttrsOwner) | ||
404 | } | ||
405 | } | ||
406 | .map(|attr| InFile::new(file_id, attr)) | ||
407 | .collect() | ||
408 | } | 393 | } |
409 | }; | 394 | } |
410 | return AttrSourceMap { attrs }; | ||
411 | } | 395 | } |
412 | AttrDefId::FieldId(id) => { | 396 | AttrDefId::FieldId(id) => { |
413 | let map = db.fields_attrs_source_map(id.parent); | 397 | let map = db.fields_attrs_source_map(id.parent); |
@@ -462,11 +446,7 @@ impl AttrsWithOwner { | |||
462 | }, | 446 | }, |
463 | }; | 447 | }; |
464 | 448 | ||
465 | AttrSourceMap { | 449 | AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn AttrsOwner)) |
466 | attrs: collect_attrs(&owner.value) | ||
467 | .map(|attr| InFile::new(owner.file_id, attr)) | ||
468 | .collect(), | ||
469 | } | ||
470 | } | 450 | } |
471 | 451 | ||
472 | pub fn docs_with_rangemap( | 452 | pub fn docs_with_rangemap( |
@@ -518,7 +498,7 @@ impl AttrsWithOwner { | |||
518 | if buf.is_empty() { | 498 | if buf.is_empty() { |
519 | None | 499 | None |
520 | } else { | 500 | } else { |
521 | Some((Documentation(buf), DocsRangeMap { mapping, source: self.source_map(db).attrs })) | 501 | Some((Documentation(buf), DocsRangeMap { mapping, source_map: self.source_map(db) })) |
522 | } | 502 | } |
523 | } | 503 | } |
524 | } | 504 | } |
@@ -559,27 +539,59 @@ fn inner_attributes( | |||
559 | } | 539 | } |
560 | 540 | ||
561 | pub struct AttrSourceMap { | 541 | pub struct AttrSourceMap { |
562 | attrs: Vec<InFile<Either<ast::Attr, ast::Comment>>>, | 542 | attrs: Vec<InFile<ast::Attr>>, |
543 | doc_comments: Vec<InFile<ast::Comment>>, | ||
563 | } | 544 | } |
564 | 545 | ||
565 | impl AttrSourceMap { | 546 | impl AttrSourceMap { |
547 | fn new(owner: InFile<&dyn ast::AttrsOwner>) -> Self { | ||
548 | let mut attrs = Vec::new(); | ||
549 | let mut doc_comments = Vec::new(); | ||
550 | for (_, attr) in collect_attrs(owner.value) { | ||
551 | match attr { | ||
552 | Either::Left(attr) => attrs.push(owner.with_value(attr)), | ||
553 | Either::Right(comment) => doc_comments.push(owner.with_value(comment)), | ||
554 | } | ||
555 | } | ||
556 | |||
557 | Self { attrs, doc_comments } | ||
558 | } | ||
559 | |||
560 | fn merge(&mut self, other: Self) { | ||
561 | self.attrs.extend(other.attrs); | ||
562 | self.doc_comments.extend(other.doc_comments); | ||
563 | } | ||
564 | |||
566 | /// Maps the lowered `Attr` back to its original syntax node. | 565 | /// Maps the lowered `Attr` back to its original syntax node. |
567 | /// | 566 | /// |
568 | /// `attr` must come from the `owner` used for AttrSourceMap | 567 | /// `attr` must come from the `owner` used for AttrSourceMap |
569 | /// | 568 | /// |
570 | /// Note that the returned syntax node might be a `#[cfg_attr]`, or a doc comment, instead of | 569 | /// Note that the returned syntax node might be a `#[cfg_attr]`, or a doc comment, instead of |
571 | /// the attribute represented by `Attr`. | 570 | /// the attribute represented by `Attr`. |
572 | pub fn source_of(&self, attr: &Attr) -> InFile<&Either<ast::Attr, ast::Comment>> { | 571 | pub fn source_of(&self, attr: &Attr) -> InFile<Either<ast::Attr, ast::Comment>> { |
573 | self.attrs | 572 | self.source_of_id(attr.id) |
574 | .get(attr.id.0 as usize) | 573 | } |
575 | .unwrap_or_else(|| panic!("cannot find `Attr` at index {:?}", attr.id)) | 574 | |
576 | .as_ref() | 575 | fn source_of_id(&self, id: AttrId) -> InFile<Either<ast::Attr, ast::Comment>> { |
576 | if id.is_doc_comment { | ||
577 | self.doc_comments | ||
578 | .get(id.ast_index as usize) | ||
579 | .unwrap_or_else(|| panic!("cannot find doc comment at index {:?}", id)) | ||
580 | .clone() | ||
581 | .map(|attr| Either::Right(attr)) | ||
582 | } else { | ||
583 | self.attrs | ||
584 | .get(id.ast_index as usize) | ||
585 | .unwrap_or_else(|| panic!("cannot find `Attr` at index {:?}", id)) | ||
586 | .clone() | ||
587 | .map(|attr| Either::Left(attr)) | ||
588 | } | ||
577 | } | 589 | } |
578 | } | 590 | } |
579 | 591 | ||
580 | /// A struct to map text ranges from [`Documentation`] back to TextRanges in the syntax tree. | 592 | /// A struct to map text ranges from [`Documentation`] back to TextRanges in the syntax tree. |
581 | pub struct DocsRangeMap { | 593 | pub struct DocsRangeMap { |
582 | source: Vec<InFile<Either<ast::Attr, ast::Comment>>>, | 594 | source_map: AttrSourceMap, |
583 | // (docstring-line-range, attr_index, attr-string-range) | 595 | // (docstring-line-range, attr_index, attr-string-range) |
584 | // a mapping from the text range of a line of the [`Documentation`] to the attribute index and | 596 | // a mapping from the text range of a line of the [`Documentation`] to the attribute index and |
585 | // the original (untrimmed) syntax doc line | 597 | // the original (untrimmed) syntax doc line |
@@ -596,7 +608,7 @@ impl DocsRangeMap { | |||
596 | 608 | ||
597 | let relative_range = range - line_docs_range.start(); | 609 | let relative_range = range - line_docs_range.start(); |
598 | 610 | ||
599 | let &InFile { file_id, value: ref source } = &self.source[idx.0 as usize]; | 611 | let &InFile { file_id, value: ref source } = &self.source_map.source_of_id(idx); |
600 | match source { | 612 | match source { |
601 | Either::Left(_) => None, // FIXME, figure out a nice way to handle doc attributes here | 613 | Either::Left(_) => None, // FIXME, figure out a nice way to handle doc attributes here |
602 | // as well as for whats done in syntax highlight doc injection | 614 | // as well as for whats done in syntax highlight doc injection |
@@ -615,6 +627,12 @@ impl DocsRangeMap { | |||
615 | } | 627 | } |
616 | } | 628 | } |
617 | 629 | ||
630 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
631 | pub(crate) struct AttrId { | ||
632 | is_doc_comment: bool, | ||
633 | pub(crate) ast_index: u32, | ||
634 | } | ||
635 | |||
618 | #[derive(Debug, Clone, PartialEq, Eq)] | 636 | #[derive(Debug, Clone, PartialEq, Eq)] |
619 | pub struct Attr { | 637 | pub struct Attr { |
620 | pub(crate) id: AttrId, | 638 | pub(crate) id: AttrId, |
@@ -749,22 +767,32 @@ fn attrs_from_item_tree<N: ItemTreeNode>(id: ItemTreeId<N>, db: &dyn DefDatabase | |||
749 | 767 | ||
750 | fn collect_attrs( | 768 | fn collect_attrs( |
751 | owner: &dyn ast::AttrsOwner, | 769 | owner: &dyn ast::AttrsOwner, |
752 | ) -> impl Iterator<Item = Either<ast::Attr, ast::Comment>> { | 770 | ) -> impl Iterator<Item = (AttrId, Either<ast::Attr, ast::Comment>)> { |
753 | let (inner_attrs, inner_docs) = inner_attributes(owner.syntax()) | 771 | let (inner_attrs, inner_docs) = inner_attributes(owner.syntax()) |
754 | .map_or((None, None), |(attrs, docs)| (Some(attrs), Some(docs))); | 772 | .map_or((None, None), |(attrs, docs)| (Some(attrs), Some(docs))); |
755 | 773 | ||
756 | let outer_attrs = owner.attrs().filter(|attr| attr.kind().is_outer()); | 774 | let outer_attrs = owner.attrs().filter(|attr| attr.kind().is_outer()); |
757 | let attrs = outer_attrs | 775 | let attrs = |
758 | .chain(inner_attrs.into_iter().flatten()) | 776 | outer_attrs.chain(inner_attrs.into_iter().flatten()).enumerate().map(|(idx, attr)| { |
759 | .map(|attr| (attr.syntax().text_range().start(), Either::Left(attr))); | 777 | ( |
778 | AttrId { ast_index: idx as u32, is_doc_comment: false }, | ||
779 | attr.syntax().text_range().start(), | ||
780 | Either::Left(attr), | ||
781 | ) | ||
782 | }); | ||
760 | 783 | ||
761 | let outer_docs = | 784 | let outer_docs = |
762 | ast::CommentIter::from_syntax_node(owner.syntax()).filter(ast::Comment::is_outer); | 785 | ast::CommentIter::from_syntax_node(owner.syntax()).filter(ast::Comment::is_outer); |
763 | let docs = outer_docs | 786 | let docs = |
764 | .chain(inner_docs.into_iter().flatten()) | 787 | outer_docs.chain(inner_docs.into_iter().flatten()).enumerate().map(|(idx, docs_text)| { |
765 | .map(|docs_text| (docs_text.syntax().text_range().start(), Either::Right(docs_text))); | 788 | ( |
789 | AttrId { ast_index: idx as u32, is_doc_comment: true }, | ||
790 | docs_text.syntax().text_range().start(), | ||
791 | Either::Right(docs_text), | ||
792 | ) | ||
793 | }); | ||
766 | // sort here by syntax node offset because the source can have doc attributes and doc strings be interleaved | 794 | // sort here by syntax node offset because the source can have doc attributes and doc strings be interleaved |
767 | docs.chain(attrs).sorted_by_key(|&(offset, _)| offset).map(|(_, attr)| attr) | 795 | docs.chain(attrs).sorted_by_key(|&(_, offset, _)| offset).map(|(id, _, attr)| (id, attr)) |
768 | } | 796 | } |
769 | 797 | ||
770 | pub(crate) fn variants_attrs_source_map( | 798 | pub(crate) fn variants_attrs_source_map( |
diff --git a/crates/hir_def/src/body.rs b/crates/hir_def/src/body.rs index 8360426f1..98b485b60 100644 --- a/crates/hir_def/src/body.rs +++ b/crates/hir_def/src/body.rs | |||
@@ -21,8 +21,6 @@ use profile::Count; | |||
21 | use rustc_hash::FxHashMap; | 21 | use rustc_hash::FxHashMap; |
22 | use syntax::{ast, AstNode, AstPtr}; | 22 | use syntax::{ast, AstNode, AstPtr}; |
23 | 23 | ||
24 | pub use lower::LowerCtx; | ||
25 | |||
26 | use crate::{ | 24 | use crate::{ |
27 | attr::{Attrs, RawAttrs}, | 25 | attr::{Attrs, RawAttrs}, |
28 | db::DefDatabase, | 26 | db::DefDatabase, |
@@ -35,6 +33,8 @@ use crate::{ | |||
35 | UnresolvedMacro, | 33 | UnresolvedMacro, |
36 | }; | 34 | }; |
37 | 35 | ||
36 | pub use lower::LowerCtx; | ||
37 | |||
38 | /// A subset of Expander that only deals with cfg attributes. We only need it to | 38 | /// A subset of Expander that only deals with cfg attributes. We only need it to |
39 | /// avoid cyclic queries in crate def map during enum processing. | 39 | /// avoid cyclic queries in crate def map during enum processing. |
40 | #[derive(Debug)] | 40 | #[derive(Debug)] |
diff --git a/crates/hir_def/src/body/lower.rs b/crates/hir_def/src/body/lower.rs index 75dc19c11..2a7e0205f 100644 --- a/crates/hir_def/src/body/lower.rs +++ b/crates/hir_def/src/body/lower.rs | |||
@@ -205,7 +205,7 @@ impl ExprCollector<'_> { | |||
205 | self.maybe_collect_expr(expr).unwrap_or_else(|| self.missing_expr()) | 205 | self.maybe_collect_expr(expr).unwrap_or_else(|| self.missing_expr()) |
206 | } | 206 | } |
207 | 207 | ||
208 | /// Returns `None` if the expression is `#[cfg]`d out. | 208 | /// Returns `None` if and only if the expression is `#[cfg]`d out. |
209 | fn maybe_collect_expr(&mut self, expr: ast::Expr) -> Option<ExprId> { | 209 | fn maybe_collect_expr(&mut self, expr: ast::Expr) -> Option<ExprId> { |
210 | let syntax_ptr = AstPtr::new(&expr); | 210 | let syntax_ptr = AstPtr::new(&expr); |
211 | self.check_cfg(&expr)?; | 211 | self.check_cfg(&expr)?; |
@@ -668,7 +668,7 @@ impl ExprCollector<'_> { | |||
668 | if self.check_cfg(&stmt).is_none() { | 668 | if self.check_cfg(&stmt).is_none() { |
669 | return; | 669 | return; |
670 | } | 670 | } |
671 | 671 | let has_semi = stmt.semicolon_token().is_some(); | |
672 | // Note that macro could be expended to multiple statements | 672 | // Note that macro could be expended to multiple statements |
673 | if let Some(ast::Expr::MacroCall(m)) = stmt.expr() { | 673 | if let Some(ast::Expr::MacroCall(m)) = stmt.expr() { |
674 | let macro_ptr = AstPtr::new(&m); | 674 | let macro_ptr = AstPtr::new(&m); |
@@ -685,18 +685,19 @@ impl ExprCollector<'_> { | |||
685 | statements.statements().for_each(|stmt| this.collect_stmt(stmt)); | 685 | statements.statements().for_each(|stmt| this.collect_stmt(stmt)); |
686 | if let Some(expr) = statements.expr() { | 686 | if let Some(expr) = statements.expr() { |
687 | let expr = this.collect_expr(expr); | 687 | let expr = this.collect_expr(expr); |
688 | this.statements_in_scope.push(Statement::Expr(expr)); | 688 | this.statements_in_scope |
689 | .push(Statement::Expr { expr, has_semi }); | ||
689 | } | 690 | } |
690 | } | 691 | } |
691 | None => { | 692 | None => { |
692 | let expr = this.alloc_expr(Expr::Missing, syntax_ptr.clone()); | 693 | let expr = this.alloc_expr(Expr::Missing, syntax_ptr.clone()); |
693 | this.statements_in_scope.push(Statement::Expr(expr)); | 694 | this.statements_in_scope.push(Statement::Expr { expr, has_semi }); |
694 | } | 695 | } |
695 | }, | 696 | }, |
696 | ); | 697 | ); |
697 | } else { | 698 | } else { |
698 | let expr = self.collect_expr_opt(stmt.expr()); | 699 | let expr = self.collect_expr_opt(stmt.expr()); |
699 | self.statements_in_scope.push(Statement::Expr(expr)); | 700 | self.statements_in_scope.push(Statement::Expr { expr, has_semi }); |
700 | } | 701 | } |
701 | } | 702 | } |
702 | ast::Stmt::Item(item) => { | 703 | ast::Stmt::Item(item) => { |
@@ -725,8 +726,17 @@ impl ExprCollector<'_> { | |||
725 | let prev_statements = std::mem::take(&mut self.statements_in_scope); | 726 | let prev_statements = std::mem::take(&mut self.statements_in_scope); |
726 | 727 | ||
727 | block.statements().for_each(|s| self.collect_stmt(s)); | 728 | block.statements().for_each(|s| self.collect_stmt(s)); |
728 | 729 | block.tail_expr().and_then(|e| { | |
729 | let tail = block.tail_expr().map(|e| self.collect_expr(e)); | 730 | let expr = self.maybe_collect_expr(e)?; |
731 | Some(self.statements_in_scope.push(Statement::Expr { expr, has_semi: false })) | ||
732 | }); | ||
733 | |||
734 | let mut tail = None; | ||
735 | if let Some(Statement::Expr { expr, has_semi: false }) = self.statements_in_scope.last() { | ||
736 | tail = Some(*expr); | ||
737 | self.statements_in_scope.pop(); | ||
738 | } | ||
739 | let tail = tail; | ||
730 | let statements = std::mem::replace(&mut self.statements_in_scope, prev_statements); | 740 | let statements = std::mem::replace(&mut self.statements_in_scope, prev_statements); |
731 | let syntax_node_ptr = AstPtr::new(&block.into()); | 741 | let syntax_node_ptr = AstPtr::new(&block.into()); |
732 | let expr_id = self.alloc_expr( | 742 | let expr_id = self.alloc_expr( |
@@ -996,23 +1006,27 @@ impl From<ast::BinOp> for BinaryOp { | |||
996 | impl From<ast::LiteralKind> for Literal { | 1006 | impl From<ast::LiteralKind> for Literal { |
997 | fn from(ast_lit_kind: ast::LiteralKind) -> Self { | 1007 | fn from(ast_lit_kind: ast::LiteralKind) -> Self { |
998 | match ast_lit_kind { | 1008 | match ast_lit_kind { |
1009 | // FIXME: these should have actual values filled in, but unsure on perf impact | ||
999 | LiteralKind::IntNumber(lit) => { | 1010 | LiteralKind::IntNumber(lit) => { |
1000 | if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) { | 1011 | if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) { |
1001 | return Literal::Float(Default::default(), builtin); | 1012 | return Literal::Float(Default::default(), builtin); |
1002 | } else if let builtin @ Some(_) = | 1013 | } else if let builtin @ Some(_) = |
1003 | lit.suffix().and_then(|it| BuiltinInt::from_suffix(&it)) | 1014 | lit.suffix().and_then(|it| BuiltinInt::from_suffix(&it)) |
1004 | { | 1015 | { |
1005 | Literal::Int(Default::default(), builtin) | 1016 | Literal::Int(lit.value().unwrap_or(0) as i128, builtin) |
1006 | } else { | 1017 | } else { |
1007 | let builtin = lit.suffix().and_then(|it| BuiltinUint::from_suffix(&it)); | 1018 | let builtin = lit.suffix().and_then(|it| BuiltinUint::from_suffix(&it)); |
1008 | Literal::Uint(Default::default(), builtin) | 1019 | Literal::Uint(lit.value().unwrap_or(0), builtin) |
1009 | } | 1020 | } |
1010 | } | 1021 | } |
1011 | LiteralKind::FloatNumber(lit) => { | 1022 | LiteralKind::FloatNumber(lit) => { |
1012 | let ty = lit.suffix().and_then(|it| BuiltinFloat::from_suffix(&it)); | 1023 | let ty = lit.suffix().and_then(|it| BuiltinFloat::from_suffix(&it)); |
1013 | Literal::Float(Default::default(), ty) | 1024 | Literal::Float(Default::default(), ty) |
1014 | } | 1025 | } |
1015 | LiteralKind::ByteString(_) => Literal::ByteString(Default::default()), | 1026 | LiteralKind::ByteString(bs) => { |
1027 | let text = bs.value().map(Vec::from).unwrap_or_else(Default::default); | ||
1028 | Literal::ByteString(text) | ||
1029 | } | ||
1016 | LiteralKind::String(_) => Literal::String(Default::default()), | 1030 | LiteralKind::String(_) => Literal::String(Default::default()), |
1017 | LiteralKind::Byte => Literal::Uint(Default::default(), Some(BuiltinUint::U8)), | 1031 | LiteralKind::Byte => Literal::Uint(Default::default(), Some(BuiltinUint::U8)), |
1018 | LiteralKind::Bool(val) => Literal::Bool(val), | 1032 | LiteralKind::Bool(val) => Literal::Bool(val), |
diff --git a/crates/hir_def/src/body/scope.rs b/crates/hir_def/src/body/scope.rs index bd7005ca6..6764de3a7 100644 --- a/crates/hir_def/src/body/scope.rs +++ b/crates/hir_def/src/body/scope.rs | |||
@@ -157,7 +157,7 @@ fn compute_block_scopes( | |||
157 | scope = scopes.new_scope(scope); | 157 | scope = scopes.new_scope(scope); |
158 | scopes.add_bindings(body, scope, *pat); | 158 | scopes.add_bindings(body, scope, *pat); |
159 | } | 159 | } |
160 | Statement::Expr(expr) => { | 160 | Statement::Expr { expr, .. } => { |
161 | scopes.set_scope(*expr, scope); | 161 | scopes.set_scope(*expr, scope); |
162 | compute_expr_scopes(*expr, body, scopes, scope); | 162 | compute_expr_scopes(*expr, body, scopes, scope); |
163 | } | 163 | } |
diff --git a/crates/hir_def/src/expr.rs b/crates/hir_def/src/expr.rs index b4ad984bd..2ba619d23 100644 --- a/crates/hir_def/src/expr.rs +++ b/crates/hir_def/src/expr.rs | |||
@@ -43,8 +43,8 @@ pub enum Literal { | |||
43 | ByteString(Vec<u8>), | 43 | ByteString(Vec<u8>), |
44 | Char(char), | 44 | Char(char), |
45 | Bool(bool), | 45 | Bool(bool), |
46 | Int(u64, Option<BuiltinInt>), | 46 | Int(i128, Option<BuiltinInt>), |
47 | Uint(u64, Option<BuiltinUint>), | 47 | Uint(u128, Option<BuiltinUint>), |
48 | Float(u64, Option<BuiltinFloat>), // FIXME: f64 is not Eq | 48 | Float(u64, Option<BuiltinFloat>), // FIXME: f64 is not Eq |
49 | } | 49 | } |
50 | 50 | ||
@@ -242,7 +242,7 @@ pub struct RecordLitField { | |||
242 | #[derive(Debug, Clone, Eq, PartialEq)] | 242 | #[derive(Debug, Clone, Eq, PartialEq)] |
243 | pub enum Statement { | 243 | pub enum Statement { |
244 | Let { pat: PatId, type_ref: Option<Interned<TypeRef>>, initializer: Option<ExprId> }, | 244 | Let { pat: PatId, type_ref: Option<Interned<TypeRef>>, initializer: Option<ExprId> }, |
245 | Expr(ExprId), | 245 | Expr { expr: ExprId, has_semi: bool }, |
246 | } | 246 | } |
247 | 247 | ||
248 | impl Expr { | 248 | impl Expr { |
@@ -265,7 +265,7 @@ impl Expr { | |||
265 | f(*expr); | 265 | f(*expr); |
266 | } | 266 | } |
267 | } | 267 | } |
268 | Statement::Expr(e) => f(*e), | 268 | Statement::Expr { expr: expression, .. } => f(*expression), |
269 | } | 269 | } |
270 | } | 270 | } |
271 | if let Some(expr) = tail { | 271 | if let Some(expr) = tail { |
diff --git a/crates/hir_def/src/find_path.rs b/crates/hir_def/src/find_path.rs index 858e88038..ee52794aa 100644 --- a/crates/hir_def/src/find_path.rs +++ b/crates/hir_def/src/find_path.rs | |||
@@ -5,10 +5,10 @@ use std::iter; | |||
5 | use hir_expand::name::{known, AsName, Name}; | 5 | use hir_expand::name::{known, AsName, Name}; |
6 | use rustc_hash::FxHashSet; | 6 | use rustc_hash::FxHashSet; |
7 | 7 | ||
8 | use crate::nameres::DefMap; | ||
9 | use crate::{ | 8 | use crate::{ |
10 | db::DefDatabase, | 9 | db::DefDatabase, |
11 | item_scope::ItemInNs, | 10 | item_scope::ItemInNs, |
11 | nameres::DefMap, | ||
12 | path::{ModPath, PathKind}, | 12 | path::{ModPath, PathKind}, |
13 | visibility::Visibility, | 13 | visibility::Visibility, |
14 | ModuleDefId, ModuleId, | 14 | ModuleDefId, ModuleId, |
@@ -134,7 +134,16 @@ fn find_path_inner( | |||
134 | for (name, def_id) in root_def_map.extern_prelude() { | 134 | for (name, def_id) in root_def_map.extern_prelude() { |
135 | if item == ItemInNs::Types(*def_id) { | 135 | if item == ItemInNs::Types(*def_id) { |
136 | let name = scope_name.unwrap_or_else(|| name.clone()); | 136 | let name = scope_name.unwrap_or_else(|| name.clone()); |
137 | return Some(ModPath::from_segments(PathKind::Plain, vec![name])); | 137 | |
138 | let name_already_occupied_in_type_ns = def_map | ||
139 | .with_ancestor_maps(db, from.local_id, &mut |def_map, local_id| { | ||
140 | def_map[local_id].scope.get(&name).take_types().filter(|&id| id != *def_id) | ||
141 | }) | ||
142 | .is_some(); | ||
143 | return Some(ModPath::from_segments( | ||
144 | if name_already_occupied_in_type_ns { PathKind::Abs } else { PathKind::Plain }, | ||
145 | vec![name], | ||
146 | )); | ||
138 | } | 147 | } |
139 | } | 148 | } |
140 | 149 | ||
diff --git a/crates/hir_def/src/item_tree.rs b/crates/hir_def/src/item_tree.rs index 8d13c7e04..cad8a7479 100644 --- a/crates/hir_def/src/item_tree.rs +++ b/crates/hir_def/src/item_tree.rs | |||
@@ -18,7 +18,7 @@ use hir_expand::{ | |||
18 | ast_id_map::FileAstId, | 18 | ast_id_map::FileAstId, |
19 | hygiene::Hygiene, | 19 | hygiene::Hygiene, |
20 | name::{name, AsName, Name}, | 20 | name::{name, AsName, Name}, |
21 | HirFileId, InFile, | 21 | FragmentKind, HirFileId, InFile, |
22 | }; | 22 | }; |
23 | use la_arena::{Arena, Idx, RawIdx}; | 23 | use la_arena::{Arena, Idx, RawIdx}; |
24 | use profile::Count; | 24 | use profile::Count; |
@@ -656,6 +656,7 @@ pub struct MacroCall { | |||
656 | /// Path to the called macro. | 656 | /// Path to the called macro. |
657 | pub path: Interned<ModPath>, | 657 | pub path: Interned<ModPath>, |
658 | pub ast_id: FileAstId<ast::MacroCall>, | 658 | pub ast_id: FileAstId<ast::MacroCall>, |
659 | pub fragment: FragmentKind, | ||
659 | } | 660 | } |
660 | 661 | ||
661 | #[derive(Debug, Clone, Eq, PartialEq)] | 662 | #[derive(Debug, Clone, Eq, PartialEq)] |
diff --git a/crates/hir_def/src/item_tree/lower.rs b/crates/hir_def/src/item_tree/lower.rs index 5743b3386..fe348091d 100644 --- a/crates/hir_def/src/item_tree/lower.rs +++ b/crates/hir_def/src/item_tree/lower.rs | |||
@@ -624,7 +624,8 @@ impl<'a> Ctx<'a> { | |||
624 | fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> { | 624 | fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> { |
625 | let path = Interned::new(ModPath::from_src(self.db, m.path()?, &self.hygiene)?); | 625 | let path = Interned::new(ModPath::from_src(self.db, m.path()?, &self.hygiene)?); |
626 | let ast_id = self.source_ast_id_map.ast_id(m); | 626 | let ast_id = self.source_ast_id_map.ast_id(m); |
627 | let res = MacroCall { path, ast_id }; | 627 | let fragment = hir_expand::to_fragment_kind(m); |
628 | let res = MacroCall { path, ast_id, fragment }; | ||
628 | Some(id(self.data().macro_calls.alloc(res))) | 629 | Some(id(self.data().macro_calls.alloc(res))) |
629 | } | 630 | } |
630 | 631 | ||
diff --git a/crates/hir_def/src/lib.rs b/crates/hir_def/src/lib.rs index da46f16f7..a82ea5957 100644 --- a/crates/hir_def/src/lib.rs +++ b/crates/hir_def/src/lib.rs | |||
@@ -62,13 +62,14 @@ use hir_expand::{ | |||
62 | ast_id_map::FileAstId, | 62 | ast_id_map::FileAstId, |
63 | eager::{expand_eager_macro, ErrorEmitted, ErrorSink}, | 63 | eager::{expand_eager_macro, ErrorEmitted, ErrorSink}, |
64 | hygiene::Hygiene, | 64 | hygiene::Hygiene, |
65 | AstId, AttrId, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, | 65 | AstId, FragmentKind, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, |
66 | }; | 66 | }; |
67 | use la_arena::Idx; | 67 | use la_arena::Idx; |
68 | use nameres::DefMap; | 68 | use nameres::DefMap; |
69 | use path::ModPath; | 69 | use path::ModPath; |
70 | use syntax::ast; | 70 | use syntax::ast; |
71 | 71 | ||
72 | use crate::attr::AttrId; | ||
72 | use crate::builtin_type::BuiltinType; | 73 | use crate::builtin_type::BuiltinType; |
73 | use item_tree::{ | 74 | use item_tree::{ |
74 | Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, ModItem, Static, Struct, Trait, | 75 | Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, ModItem, Static, Struct, Trait, |
@@ -652,6 +653,7 @@ impl AsMacroCall for InFile<&ast::MacroCall> { | |||
652 | resolver: impl Fn(path::ModPath) -> Option<MacroDefId>, | 653 | resolver: impl Fn(path::ModPath) -> Option<MacroDefId>, |
653 | mut error_sink: &mut dyn FnMut(mbe::ExpandError), | 654 | mut error_sink: &mut dyn FnMut(mbe::ExpandError), |
654 | ) -> Result<Result<MacroCallId, ErrorEmitted>, UnresolvedMacro> { | 655 | ) -> Result<Result<MacroCallId, ErrorEmitted>, UnresolvedMacro> { |
656 | let fragment = hir_expand::to_fragment_kind(self.value); | ||
655 | let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value)); | 657 | let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value)); |
656 | let h = Hygiene::new(db.upcast(), self.file_id); | 658 | let h = Hygiene::new(db.upcast(), self.file_id); |
657 | let path = self.value.path().and_then(|path| path::ModPath::from_src(db, path, &h)); | 659 | let path = self.value.path().and_then(|path| path::ModPath::from_src(db, path, &h)); |
@@ -667,6 +669,7 @@ impl AsMacroCall for InFile<&ast::MacroCall> { | |||
667 | 669 | ||
668 | macro_call_as_call_id( | 670 | macro_call_as_call_id( |
669 | &AstIdWithPath::new(ast_id.file_id, ast_id.value, path), | 671 | &AstIdWithPath::new(ast_id.file_id, ast_id.value, path), |
672 | fragment, | ||
670 | db, | 673 | db, |
671 | krate, | 674 | krate, |
672 | resolver, | 675 | resolver, |
@@ -695,6 +698,7 @@ pub struct UnresolvedMacro { | |||
695 | 698 | ||
696 | fn macro_call_as_call_id( | 699 | fn macro_call_as_call_id( |
697 | call: &AstIdWithPath<ast::MacroCall>, | 700 | call: &AstIdWithPath<ast::MacroCall>, |
701 | fragment: FragmentKind, | ||
698 | db: &dyn db::DefDatabase, | 702 | db: &dyn db::DefDatabase, |
699 | krate: CrateId, | 703 | krate: CrateId, |
700 | resolver: impl Fn(path::ModPath) -> Option<MacroDefId>, | 704 | resolver: impl Fn(path::ModPath) -> Option<MacroDefId>, |
@@ -718,7 +722,11 @@ fn macro_call_as_call_id( | |||
718 | .map(MacroCallId::from) | 722 | .map(MacroCallId::from) |
719 | } else { | 723 | } else { |
720 | Ok(def | 724 | Ok(def |
721 | .as_lazy_macro(db.upcast(), krate, MacroCallKind::FnLike { ast_id: call.ast_id }) | 725 | .as_lazy_macro( |
726 | db.upcast(), | ||
727 | krate, | ||
728 | MacroCallKind::FnLike { ast_id: call.ast_id, fragment }, | ||
729 | ) | ||
722 | .into()) | 730 | .into()) |
723 | }; | 731 | }; |
724 | Ok(res) | 732 | Ok(res) |
@@ -745,7 +753,7 @@ fn derive_macro_as_call_id( | |||
745 | MacroCallKind::Derive { | 753 | MacroCallKind::Derive { |
746 | ast_id: item_attr.ast_id, | 754 | ast_id: item_attr.ast_id, |
747 | derive_name: last_segment.to_string(), | 755 | derive_name: last_segment.to_string(), |
748 | derive_attr, | 756 | derive_attr_index: derive_attr.ast_index, |
749 | }, | 757 | }, |
750 | ) | 758 | ) |
751 | .into(); | 759 | .into(); |
diff --git a/crates/hir_def/src/nameres.rs b/crates/hir_def/src/nameres.rs index 1bc72ec1f..249af6fc8 100644 --- a/crates/hir_def/src/nameres.rs +++ b/crates/hir_def/src/nameres.rs | |||
@@ -629,7 +629,7 @@ mod diagnostics { | |||
629 | DiagnosticKind::UnresolvedProcMacro { ast } => { | 629 | DiagnosticKind::UnresolvedProcMacro { ast } => { |
630 | let mut precise_location = None; | 630 | let mut precise_location = None; |
631 | let (file, ast, name) = match ast { | 631 | let (file, ast, name) = match ast { |
632 | MacroCallKind::FnLike { ast_id } => { | 632 | MacroCallKind::FnLike { ast_id, .. } => { |
633 | let node = ast_id.to_node(db.upcast()); | 633 | let node = ast_id.to_node(db.upcast()); |
634 | (ast_id.file_id, SyntaxNodePtr::from(AstPtr::new(&node)), None) | 634 | (ast_id.file_id, SyntaxNodePtr::from(AstPtr::new(&node)), None) |
635 | } | 635 | } |
diff --git a/crates/hir_def/src/nameres/collector.rs b/crates/hir_def/src/nameres/collector.rs index 05ceb1efb..19db6cc59 100644 --- a/crates/hir_def/src/nameres/collector.rs +++ b/crates/hir_def/src/nameres/collector.rs | |||
@@ -13,14 +13,14 @@ use hir_expand::{ | |||
13 | builtin_macro::find_builtin_macro, | 13 | builtin_macro::find_builtin_macro, |
14 | name::{AsName, Name}, | 14 | name::{AsName, Name}, |
15 | proc_macro::ProcMacroExpander, | 15 | proc_macro::ProcMacroExpander, |
16 | AttrId, HirFileId, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, | 16 | FragmentKind, HirFileId, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, |
17 | }; | 17 | }; |
18 | use hir_expand::{InFile, MacroCallLoc}; | 18 | use hir_expand::{InFile, MacroCallLoc}; |
19 | use rustc_hash::{FxHashMap, FxHashSet}; | 19 | use rustc_hash::{FxHashMap, FxHashSet}; |
20 | use syntax::ast; | 20 | use syntax::ast; |
21 | 21 | ||
22 | use crate::{ | 22 | use crate::{ |
23 | attr::Attrs, | 23 | attr::{AttrId, Attrs}, |
24 | db::DefDatabase, | 24 | db::DefDatabase, |
25 | derive_macro_as_call_id, | 25 | derive_macro_as_call_id, |
26 | intern::Interned, | 26 | intern::Interned, |
@@ -215,7 +215,7 @@ struct MacroDirective { | |||
215 | 215 | ||
216 | #[derive(Clone, Debug, Eq, PartialEq)] | 216 | #[derive(Clone, Debug, Eq, PartialEq)] |
217 | enum MacroDirectiveKind { | 217 | enum MacroDirectiveKind { |
218 | FnLike { ast_id: AstIdWithPath<ast::MacroCall> }, | 218 | FnLike { ast_id: AstIdWithPath<ast::MacroCall>, fragment: FragmentKind }, |
219 | Derive { ast_id: AstIdWithPath<ast::Item>, derive_attr: AttrId }, | 219 | Derive { ast_id: AstIdWithPath<ast::Item>, derive_attr: AttrId }, |
220 | } | 220 | } |
221 | 221 | ||
@@ -481,6 +481,11 @@ impl DefCollector<'_> { | |||
481 | let res = self.def_map.resolve_name_in_extern_prelude(self.db, &extern_crate.name); | 481 | let res = self.def_map.resolve_name_in_extern_prelude(self.db, &extern_crate.name); |
482 | 482 | ||
483 | if let Some(ModuleDefId::ModuleId(m)) = res.take_types() { | 483 | if let Some(ModuleDefId::ModuleId(m)) = res.take_types() { |
484 | if m == self.def_map.module_id(current_module_id) { | ||
485 | cov_mark::hit!(ignore_macro_use_extern_crate_self); | ||
486 | return; | ||
487 | } | ||
488 | |||
484 | cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use); | 489 | cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use); |
485 | self.import_all_macros_exported(current_module_id, m.krate); | 490 | self.import_all_macros_exported(current_module_id, m.krate); |
486 | } | 491 | } |
@@ -807,9 +812,10 @@ impl DefCollector<'_> { | |||
807 | let mut res = ReachedFixedPoint::Yes; | 812 | let mut res = ReachedFixedPoint::Yes; |
808 | macros.retain(|directive| { | 813 | macros.retain(|directive| { |
809 | match &directive.kind { | 814 | match &directive.kind { |
810 | MacroDirectiveKind::FnLike { ast_id } => { | 815 | MacroDirectiveKind::FnLike { ast_id, fragment } => { |
811 | match macro_call_as_call_id( | 816 | match macro_call_as_call_id( |
812 | ast_id, | 817 | ast_id, |
818 | *fragment, | ||
813 | self.db, | 819 | self.db, |
814 | self.def_map.krate, | 820 | self.def_map.krate, |
815 | |path| { | 821 | |path| { |
@@ -926,8 +932,9 @@ impl DefCollector<'_> { | |||
926 | 932 | ||
927 | for directive in &self.unexpanded_macros { | 933 | for directive in &self.unexpanded_macros { |
928 | match &directive.kind { | 934 | match &directive.kind { |
929 | MacroDirectiveKind::FnLike { ast_id, .. } => match macro_call_as_call_id( | 935 | MacroDirectiveKind::FnLike { ast_id, fragment } => match macro_call_as_call_id( |
930 | ast_id, | 936 | ast_id, |
937 | *fragment, | ||
931 | self.db, | 938 | self.db, |
932 | self.def_map.krate, | 939 | self.def_map.krate, |
933 | |path| { | 940 | |path| { |
@@ -1496,6 +1503,7 @@ impl ModCollector<'_, '_> { | |||
1496 | let mut error = None; | 1503 | let mut error = None; |
1497 | match macro_call_as_call_id( | 1504 | match macro_call_as_call_id( |
1498 | &ast_id, | 1505 | &ast_id, |
1506 | mac.fragment, | ||
1499 | self.def_collector.db, | 1507 | self.def_collector.db, |
1500 | self.def_collector.def_map.krate, | 1508 | self.def_collector.def_map.krate, |
1501 | |path| { | 1509 | |path| { |
@@ -1524,9 +1532,14 @@ impl ModCollector<'_, '_> { | |||
1524 | } | 1532 | } |
1525 | Ok(Err(_)) => { | 1533 | Ok(Err(_)) => { |
1526 | // Built-in macro failed eager expansion. | 1534 | // Built-in macro failed eager expansion. |
1535 | |||
1536 | // FIXME: don't parse the file here | ||
1537 | let fragment = hir_expand::to_fragment_kind( | ||
1538 | &ast_id.ast_id.to_node(self.def_collector.db.upcast()), | ||
1539 | ); | ||
1527 | self.def_collector.def_map.diagnostics.push(DefDiagnostic::macro_error( | 1540 | self.def_collector.def_map.diagnostics.push(DefDiagnostic::macro_error( |
1528 | self.module_id, | 1541 | self.module_id, |
1529 | MacroCallKind::FnLike { ast_id: ast_id.ast_id }, | 1542 | MacroCallKind::FnLike { ast_id: ast_id.ast_id, fragment }, |
1530 | error.unwrap().to_string(), | 1543 | error.unwrap().to_string(), |
1531 | )); | 1544 | )); |
1532 | return; | 1545 | return; |
@@ -1543,7 +1556,7 @@ impl ModCollector<'_, '_> { | |||
1543 | self.def_collector.unexpanded_macros.push(MacroDirective { | 1556 | self.def_collector.unexpanded_macros.push(MacroDirective { |
1544 | module_id: self.module_id, | 1557 | module_id: self.module_id, |
1545 | depth: self.macro_depth + 1, | 1558 | depth: self.macro_depth + 1, |
1546 | kind: MacroDirectiveKind::FnLike { ast_id }, | 1559 | kind: MacroDirectiveKind::FnLike { ast_id, fragment: mac.fragment }, |
1547 | }); | 1560 | }); |
1548 | } | 1561 | } |
1549 | 1562 | ||
diff --git a/crates/hir_def/src/nameres/tests.rs b/crates/hir_def/src/nameres/tests.rs index 4f2e7a2f9..9f652731d 100644 --- a/crates/hir_def/src/nameres/tests.rs +++ b/crates/hir_def/src/nameres/tests.rs | |||
@@ -411,6 +411,22 @@ struct Arc; | |||
411 | } | 411 | } |
412 | 412 | ||
413 | #[test] | 413 | #[test] |
414 | fn macro_use_extern_crate_self() { | ||
415 | cov_mark::check!(ignore_macro_use_extern_crate_self); | ||
416 | check( | ||
417 | r#" | ||
418 | //- /main.rs crate:main | ||
419 | #[macro_use] | ||
420 | extern crate self as bla; | ||
421 | "#, | ||
422 | expect![[r#" | ||
423 | crate | ||
424 | bla: t | ||
425 | "#]], | ||
426 | ); | ||
427 | } | ||
428 | |||
429 | #[test] | ||
414 | fn reexport_across_crates() { | 430 | fn reexport_across_crates() { |
415 | check( | 431 | check( |
416 | r#" | 432 | r#" |
diff --git a/crates/hir_def/src/nameres/tests/incremental.rs b/crates/hir_def/src/nameres/tests/incremental.rs index 227ecd162..d884a6eb4 100644 --- a/crates/hir_def/src/nameres/tests/incremental.rs +++ b/crates/hir_def/src/nameres/tests/incremental.rs | |||
@@ -137,6 +137,9 @@ m!(Z); | |||
137 | }); | 137 | }); |
138 | let n_recalculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count(); | 138 | let n_recalculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count(); |
139 | assert_eq!(n_recalculated_item_trees, 6); | 139 | assert_eq!(n_recalculated_item_trees, 6); |
140 | let n_reparsed_macros = | ||
141 | events.iter().filter(|it| it.contains("parse_macro_expansion")).count(); | ||
142 | assert_eq!(n_reparsed_macros, 3); | ||
140 | } | 143 | } |
141 | 144 | ||
142 | let new_text = r#" | 145 | let new_text = r#" |
@@ -155,5 +158,8 @@ m!(Z); | |||
155 | }); | 158 | }); |
156 | let n_recalculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count(); | 159 | let n_recalculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count(); |
157 | assert_eq!(n_recalculated_item_trees, 1); | 160 | assert_eq!(n_recalculated_item_trees, 1); |
161 | let n_reparsed_macros = | ||
162 | events.iter().filter(|it| it.contains("parse_macro_expansion")).count(); | ||
163 | assert_eq!(n_reparsed_macros, 0); | ||
158 | } | 164 | } |
159 | } | 165 | } |
diff --git a/crates/hir_def/src/type_ref.rs b/crates/hir_def/src/type_ref.rs index ea29da5da..9e44547cb 100644 --- a/crates/hir_def/src/type_ref.rs +++ b/crates/hir_def/src/type_ref.rs | |||
@@ -2,6 +2,7 @@ | |||
2 | //! be directly created from an ast::TypeRef, without further queries. | 2 | //! be directly created from an ast::TypeRef, without further queries. |
3 | 3 | ||
4 | use hir_expand::{name::Name, AstId, InFile}; | 4 | use hir_expand::{name::Name, AstId, InFile}; |
5 | use std::convert::TryInto; | ||
5 | use syntax::ast; | 6 | use syntax::ast; |
6 | 7 | ||
7 | use crate::{body::LowerCtx, path::Path}; | 8 | use crate::{body::LowerCtx, path::Path}; |
@@ -79,7 +80,9 @@ pub enum TypeRef { | |||
79 | Path(Path), | 80 | Path(Path), |
80 | RawPtr(Box<TypeRef>, Mutability), | 81 | RawPtr(Box<TypeRef>, Mutability), |
81 | Reference(Box<TypeRef>, Option<LifetimeRef>, Mutability), | 82 | Reference(Box<TypeRef>, Option<LifetimeRef>, Mutability), |
82 | Array(Box<TypeRef> /*, Expr*/), | 83 | // FIXME: for full const generics, the latter element (length) here is going to have to be an |
84 | // expression that is further lowered later in hir_ty. | ||
85 | Array(Box<TypeRef>, ConstScalar), | ||
83 | Slice(Box<TypeRef>), | 86 | Slice(Box<TypeRef>), |
84 | /// A fn pointer. Last element of the vector is the return type. | 87 | /// A fn pointer. Last element of the vector is the return type. |
85 | Fn(Vec<TypeRef>, bool /*varargs*/), | 88 | Fn(Vec<TypeRef>, bool /*varargs*/), |
@@ -140,7 +143,16 @@ impl TypeRef { | |||
140 | TypeRef::RawPtr(Box::new(inner_ty), mutability) | 143 | TypeRef::RawPtr(Box::new(inner_ty), mutability) |
141 | } | 144 | } |
142 | ast::Type::ArrayType(inner) => { | 145 | ast::Type::ArrayType(inner) => { |
143 | TypeRef::Array(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty()))) | 146 | // FIXME: This is a hack. We should probably reuse the machinery of |
147 | // `hir_def::body::lower` to lower this into an `Expr` and then evaluate it at the | ||
148 | // `hir_ty` level, which would allow knowing the type of: | ||
149 | // let v: [u8; 2 + 2] = [0u8; 4]; | ||
150 | let len = inner | ||
151 | .expr() | ||
152 | .map(ConstScalar::usize_from_literal_expr) | ||
153 | .unwrap_or(ConstScalar::Unknown); | ||
154 | |||
155 | TypeRef::Array(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty())), len) | ||
144 | } | 156 | } |
145 | ast::Type::SliceType(inner) => { | 157 | ast::Type::SliceType(inner) => { |
146 | TypeRef::Slice(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty()))) | 158 | TypeRef::Slice(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty()))) |
@@ -212,7 +224,7 @@ impl TypeRef { | |||
212 | } | 224 | } |
213 | TypeRef::RawPtr(type_ref, _) | 225 | TypeRef::RawPtr(type_ref, _) |
214 | | TypeRef::Reference(type_ref, ..) | 226 | | TypeRef::Reference(type_ref, ..) |
215 | | TypeRef::Array(type_ref) | 227 | | TypeRef::Array(type_ref, _) |
216 | | TypeRef::Slice(type_ref) => go(&type_ref, f), | 228 | | TypeRef::Slice(type_ref) => go(&type_ref, f), |
217 | TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => { | 229 | TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => { |
218 | for bound in bounds { | 230 | for bound in bounds { |
@@ -298,3 +310,58 @@ impl TypeBound { | |||
298 | } | 310 | } |
299 | } | 311 | } |
300 | } | 312 | } |
313 | |||
314 | /// A concrete constant value | ||
315 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
316 | pub enum ConstScalar { | ||
317 | // for now, we only support the trivial case of constant evaluating the length of an array | ||
318 | // Note that this is u64 because the target usize may be bigger than our usize | ||
319 | Usize(u64), | ||
320 | |||
321 | /// Case of an unknown value that rustc might know but we don't | ||
322 | // FIXME: this is a hack to get around chalk not being able to represent unevaluatable | ||
323 | // constants | ||
324 | // https://github.com/rust-analyzer/rust-analyzer/pull/8813#issuecomment-840679177 | ||
325 | // https://rust-lang.zulipchat.com/#narrow/stream/144729-wg-traits/topic/Handling.20non.20evaluatable.20constants'.20equality/near/238386348 | ||
326 | Unknown, | ||
327 | } | ||
328 | |||
329 | impl std::fmt::Display for ConstScalar { | ||
330 | fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { | ||
331 | match self { | ||
332 | ConstScalar::Usize(us) => write!(fmt, "{}", us), | ||
333 | ConstScalar::Unknown => write!(fmt, "_"), | ||
334 | } | ||
335 | } | ||
336 | } | ||
337 | |||
338 | impl ConstScalar { | ||
339 | /// Gets a target usize out of the ConstScalar | ||
340 | pub fn as_usize(&self) -> Option<u64> { | ||
341 | match self { | ||
342 | &ConstScalar::Usize(us) => Some(us), | ||
343 | _ => None, | ||
344 | } | ||
345 | } | ||
346 | |||
347 | // FIXME: as per the comments on `TypeRef::Array`, this evaluation should not happen at this | ||
348 | // parse stage. | ||
349 | fn usize_from_literal_expr(expr: ast::Expr) -> ConstScalar { | ||
350 | match expr { | ||
351 | ast::Expr::Literal(lit) => { | ||
352 | let lkind = lit.kind(); | ||
353 | match lkind { | ||
354 | ast::LiteralKind::IntNumber(num) | ||
355 | if num.suffix() == None || num.suffix() == Some("usize") => | ||
356 | { | ||
357 | num.value().and_then(|v| v.try_into().ok()) | ||
358 | } | ||
359 | _ => None, | ||
360 | } | ||
361 | } | ||
362 | _ => None, | ||
363 | } | ||
364 | .map(ConstScalar::Usize) | ||
365 | .unwrap_or(ConstScalar::Unknown) | ||
366 | } | ||
367 | } | ||
diff --git a/crates/hir_expand/src/builtin_derive.rs b/crates/hir_expand/src/builtin_derive.rs index 537c03028..b6a6d602f 100644 --- a/crates/hir_expand/src/builtin_derive.rs +++ b/crates/hir_expand/src/builtin_derive.rs | |||
@@ -269,7 +269,7 @@ mod tests { | |||
269 | use expect_test::{expect, Expect}; | 269 | use expect_test::{expect, Expect}; |
270 | use name::AsName; | 270 | use name::AsName; |
271 | 271 | ||
272 | use crate::{test_db::TestDB, AstId, AttrId, MacroCallId, MacroCallKind, MacroCallLoc}; | 272 | use crate::{test_db::TestDB, AstId, MacroCallId, MacroCallKind, MacroCallLoc}; |
273 | 273 | ||
274 | use super::*; | 274 | use super::*; |
275 | 275 | ||
@@ -320,7 +320,7 @@ $0 | |||
320 | kind: MacroCallKind::Derive { | 320 | kind: MacroCallKind::Derive { |
321 | ast_id, | 321 | ast_id, |
322 | derive_name: name.to_string(), | 322 | derive_name: name.to_string(), |
323 | derive_attr: AttrId(0), | 323 | derive_attr_index: 0, |
324 | }, | 324 | }, |
325 | }; | 325 | }; |
326 | 326 | ||
diff --git a/crates/hir_expand/src/builtin_macro.rs b/crates/hir_expand/src/builtin_macro.rs index 179de61f9..280c25f11 100644 --- a/crates/hir_expand/src/builtin_macro.rs +++ b/crates/hir_expand/src/builtin_macro.rs | |||
@@ -118,6 +118,7 @@ register_builtin! { | |||
118 | EAGER: | 118 | EAGER: |
119 | (compile_error, CompileError) => compile_error_expand, | 119 | (compile_error, CompileError) => compile_error_expand, |
120 | (concat, Concat) => concat_expand, | 120 | (concat, Concat) => concat_expand, |
121 | (concat_idents, ConcatIdents) => concat_idents_expand, | ||
121 | (include, Include) => include_expand, | 122 | (include, Include) => include_expand, |
122 | (include_bytes, IncludeBytes) => include_bytes_expand, | 123 | (include_bytes, IncludeBytes) => include_bytes_expand, |
123 | (include_str, IncludeStr) => include_str_expand, | 124 | (include_str, IncludeStr) => include_str_expand, |
@@ -373,6 +374,28 @@ fn concat_expand( | |||
373 | ExpandResult { value: Some(ExpandedEager::new(quote!(#text), FragmentKind::Expr)), err } | 374 | ExpandResult { value: Some(ExpandedEager::new(quote!(#text), FragmentKind::Expr)), err } |
374 | } | 375 | } |
375 | 376 | ||
377 | fn concat_idents_expand( | ||
378 | _db: &dyn AstDatabase, | ||
379 | _arg_id: EagerMacroId, | ||
380 | tt: &tt::Subtree, | ||
381 | ) -> ExpandResult<Option<ExpandedEager>> { | ||
382 | let mut err = None; | ||
383 | let mut ident = String::new(); | ||
384 | for (i, t) in tt.token_trees.iter().enumerate() { | ||
385 | match t { | ||
386 | tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => { | ||
387 | ident.push_str(id.text.as_str()); | ||
388 | } | ||
389 | tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), | ||
390 | _ => { | ||
391 | err.get_or_insert(mbe::ExpandError::UnexpectedToken); | ||
392 | } | ||
393 | } | ||
394 | } | ||
395 | let ident = tt::Ident { text: ident.into(), id: tt::TokenId::unspecified() }; | ||
396 | ExpandResult { value: Some(ExpandedEager::new(quote!(#ident), FragmentKind::Expr)), err } | ||
397 | } | ||
398 | |||
376 | fn relative_file( | 399 | fn relative_file( |
377 | db: &dyn AstDatabase, | 400 | db: &dyn AstDatabase, |
378 | call_id: MacroCallId, | 401 | call_id: MacroCallId, |
@@ -578,6 +601,7 @@ mod tests { | |||
578 | krate, | 601 | krate, |
579 | kind: MacroCallKind::FnLike { | 602 | kind: MacroCallKind::FnLike { |
580 | ast_id: AstId::new(file_id.into(), ast_id_map.ast_id(¯o_call)), | 603 | ast_id: AstId::new(file_id.into(), ast_id_map.ast_id(¯o_call)), |
604 | fragment: FragmentKind::Expr, | ||
581 | }, | 605 | }, |
582 | }; | 606 | }; |
583 | 607 | ||
@@ -788,9 +812,21 @@ mod tests { | |||
788 | r##" | 812 | r##" |
789 | #[rustc_builtin_macro] | 813 | #[rustc_builtin_macro] |
790 | macro_rules! concat {} | 814 | macro_rules! concat {} |
791 | concat!("foo", "r", 0, r#"bar"#, false); | 815 | concat!("foo", "r", 0, r#"bar"#, "\n", false); |
816 | "##, | ||
817 | expect![[r#""foor0bar\nfalse""#]], | ||
818 | ); | ||
819 | } | ||
820 | |||
821 | #[test] | ||
822 | fn test_concat_idents_expand() { | ||
823 | check_expansion( | ||
824 | r##" | ||
825 | #[rustc_builtin_macro] | ||
826 | macro_rules! concat_idents {} | ||
827 | concat_idents!(foo, bar); | ||
792 | "##, | 828 | "##, |
793 | expect![[r#""foor0barfalse""#]], | 829 | expect![[r#"foobar"#]], |
794 | ); | 830 | ); |
795 | } | 831 | } |
796 | } | 832 | } |
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index d61f4b31a..c43d382ad 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs | |||
@@ -8,15 +8,13 @@ use parser::FragmentKind; | |||
8 | use syntax::{ | 8 | use syntax::{ |
9 | algo::diff, | 9 | algo::diff, |
10 | ast::{self, NameOwner}, | 10 | ast::{self, NameOwner}, |
11 | AstNode, GreenNode, Parse, | 11 | AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken, |
12 | SyntaxKind::*, | ||
13 | SyntaxNode, SyntaxToken, | ||
14 | }; | 12 | }; |
15 | 13 | ||
16 | use crate::{ | 14 | use crate::{ |
17 | ast_id_map::AstIdMap, hygiene::HygieneFrame, BuiltinDeriveExpander, BuiltinFnLikeExpander, | 15 | ast_id_map::AstIdMap, hygiene::HygieneFrame, input::process_macro_input, BuiltinDeriveExpander, |
18 | EagerCallLoc, EagerMacroId, HirFileId, HirFileIdRepr, LazyMacroId, MacroCallId, MacroCallLoc, | 16 | BuiltinFnLikeExpander, EagerCallLoc, EagerMacroId, HirFileId, HirFileIdRepr, LazyMacroId, |
19 | MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander, | 17 | MacroCallId, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander, |
20 | }; | 18 | }; |
21 | 19 | ||
22 | /// Total limit on the number of tokens produced by any macro invocation. | 20 | /// Total limit on the number of tokens produced by any macro invocation. |
@@ -160,7 +158,7 @@ pub fn expand_hypothetical( | |||
160 | 158 | ||
161 | let hypothetical_expansion = macro_def.expand(db, lazy_id, &tt); | 159 | let hypothetical_expansion = macro_def.expand(db, lazy_id, &tt); |
162 | 160 | ||
163 | let fragment_kind = to_fragment_kind(db, actual_macro_call); | 161 | let fragment_kind = macro_fragment_kind(db, actual_macro_call); |
164 | 162 | ||
165 | let (node, tmap_2) = | 163 | let (node, tmap_2) = |
166 | mbe::token_tree_to_syntax_node(&hypothetical_expansion.value, fragment_kind).ok()?; | 164 | mbe::token_tree_to_syntax_node(&hypothetical_expansion.value, fragment_kind).ok()?; |
@@ -226,7 +224,7 @@ fn parse_macro_expansion( | |||
226 | None => return ExpandResult { value: None, err: result.err }, | 224 | None => return ExpandResult { value: None, err: result.err }, |
227 | }; | 225 | }; |
228 | 226 | ||
229 | let fragment_kind = to_fragment_kind(db, macro_file.macro_call_id); | 227 | let fragment_kind = macro_fragment_kind(db, macro_file.macro_call_id); |
230 | 228 | ||
231 | log::debug!("expanded = {}", tt.as_debug_string()); | 229 | log::debug!("expanded = {}", tt.as_debug_string()); |
232 | log::debug!("kind = {:?}", fragment_kind); | 230 | log::debug!("kind = {:?}", fragment_kind); |
@@ -269,7 +267,16 @@ fn parse_macro_expansion( | |||
269 | 267 | ||
270 | fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { | 268 | fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { |
271 | let arg = db.macro_arg_text(id)?; | 269 | let arg = db.macro_arg_text(id)?; |
272 | let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg)); | 270 | let (mut tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg)); |
271 | |||
272 | if let MacroCallId::LazyMacro(id) = id { | ||
273 | let loc: MacroCallLoc = db.lookup_intern_macro(id); | ||
274 | if loc.def.is_proc_macro() { | ||
275 | // proc macros expect their inputs without parentheses, MBEs expect it with them included | ||
276 | tt.delimiter = None; | ||
277 | } | ||
278 | } | ||
279 | |||
273 | Some(Arc::new((tt, tmap))) | 280 | Some(Arc::new((tt, tmap))) |
274 | } | 281 | } |
275 | 282 | ||
@@ -283,6 +290,7 @@ fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> { | |||
283 | }; | 290 | }; |
284 | let loc = db.lookup_intern_macro(id); | 291 | let loc = db.lookup_intern_macro(id); |
285 | let arg = loc.kind.arg(db)?; | 292 | let arg = loc.kind.arg(db)?; |
293 | let arg = process_macro_input(db, arg, id); | ||
286 | Some(arg.green().into()) | 294 | Some(arg.green().into()) |
287 | } | 295 | } |
288 | 296 | ||
@@ -427,62 +435,15 @@ fn hygiene_frame(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<HygieneFrame> | |||
427 | Arc::new(HygieneFrame::new(db, file_id)) | 435 | Arc::new(HygieneFrame::new(db, file_id)) |
428 | } | 436 | } |
429 | 437 | ||
430 | /// Given a `MacroCallId`, return what `FragmentKind` it belongs to. | 438 | fn macro_fragment_kind(db: &dyn AstDatabase, id: MacroCallId) -> FragmentKind { |
431 | /// FIXME: Not completed | 439 | match id { |
432 | fn to_fragment_kind(db: &dyn AstDatabase, id: MacroCallId) -> FragmentKind { | 440 | MacroCallId::LazyMacro(id) => { |
433 | let lazy_id = match id { | 441 | let loc: MacroCallLoc = db.lookup_intern_macro(id); |
434 | MacroCallId::LazyMacro(id) => id, | 442 | loc.kind.fragment_kind() |
435 | MacroCallId::EagerMacro(id) => { | ||
436 | return db.lookup_intern_eager_expansion(id).fragment; | ||
437 | } | ||
438 | }; | ||
439 | let syn = db.lookup_intern_macro(lazy_id).kind.node(db).value; | ||
440 | |||
441 | let parent = match syn.parent() { | ||
442 | Some(it) => it, | ||
443 | None => return FragmentKind::Statements, | ||
444 | }; | ||
445 | |||
446 | match parent.kind() { | ||
447 | MACRO_ITEMS | SOURCE_FILE => FragmentKind::Items, | ||
448 | MACRO_STMTS => FragmentKind::Statements, | ||
449 | MACRO_PAT => FragmentKind::Pattern, | ||
450 | MACRO_TYPE => FragmentKind::Type, | ||
451 | ITEM_LIST => FragmentKind::Items, | ||
452 | LET_STMT => { | ||
453 | // FIXME: Handle LHS Pattern | ||
454 | FragmentKind::Expr | ||
455 | } | 443 | } |
456 | EXPR_STMT => FragmentKind::Statements, | 444 | MacroCallId::EagerMacro(id) => { |
457 | BLOCK_EXPR => FragmentKind::Statements, | 445 | let loc: EagerCallLoc = db.lookup_intern_eager_expansion(id); |
458 | ARG_LIST => FragmentKind::Expr, | 446 | loc.fragment |
459 | TRY_EXPR => FragmentKind::Expr, | ||
460 | TUPLE_EXPR => FragmentKind::Expr, | ||
461 | PAREN_EXPR => FragmentKind::Expr, | ||
462 | ARRAY_EXPR => FragmentKind::Expr, | ||
463 | FOR_EXPR => FragmentKind::Expr, | ||
464 | PATH_EXPR => FragmentKind::Expr, | ||
465 | CLOSURE_EXPR => FragmentKind::Expr, | ||
466 | CONDITION => FragmentKind::Expr, | ||
467 | BREAK_EXPR => FragmentKind::Expr, | ||
468 | RETURN_EXPR => FragmentKind::Expr, | ||
469 | MATCH_EXPR => FragmentKind::Expr, | ||
470 | MATCH_ARM => FragmentKind::Expr, | ||
471 | MATCH_GUARD => FragmentKind::Expr, | ||
472 | RECORD_EXPR_FIELD => FragmentKind::Expr, | ||
473 | CALL_EXPR => FragmentKind::Expr, | ||
474 | INDEX_EXPR => FragmentKind::Expr, | ||
475 | METHOD_CALL_EXPR => FragmentKind::Expr, | ||
476 | FIELD_EXPR => FragmentKind::Expr, | ||
477 | AWAIT_EXPR => FragmentKind::Expr, | ||
478 | CAST_EXPR => FragmentKind::Expr, | ||
479 | REF_EXPR => FragmentKind::Expr, | ||
480 | PREFIX_EXPR => FragmentKind::Expr, | ||
481 | RANGE_EXPR => FragmentKind::Expr, | ||
482 | BIN_EXPR => FragmentKind::Expr, | ||
483 | _ => { | ||
484 | // Unknown , Just guess it is `Items` | ||
485 | FragmentKind::Items | ||
486 | } | 447 | } |
487 | } | 448 | } |
488 | } | 449 | } |
diff --git a/crates/hir_expand/src/eager.rs b/crates/hir_expand/src/eager.rs index f12132f84..85491fe8b 100644 --- a/crates/hir_expand/src/eager.rs +++ b/crates/hir_expand/src/eager.rs | |||
@@ -175,8 +175,13 @@ fn lazy_expand( | |||
175 | ) -> ExpandResult<Option<InFile<SyntaxNode>>> { | 175 | ) -> ExpandResult<Option<InFile<SyntaxNode>>> { |
176 | let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value); | 176 | let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value); |
177 | 177 | ||
178 | let fragment = crate::to_fragment_kind(¯o_call.value); | ||
178 | let id: MacroCallId = def | 179 | let id: MacroCallId = def |
179 | .as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id) }) | 180 | .as_lazy_macro( |
181 | db, | ||
182 | krate, | ||
183 | MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), fragment }, | ||
184 | ) | ||
180 | .into(); | 185 | .into(); |
181 | 186 | ||
182 | let err = db.macro_expand_error(id); | 187 | let err = db.macro_expand_error(id); |
diff --git a/crates/hir_expand/src/input.rs b/crates/hir_expand/src/input.rs new file mode 100644 index 000000000..112216859 --- /dev/null +++ b/crates/hir_expand/src/input.rs | |||
@@ -0,0 +1,94 @@ | |||
1 | //! Macro input conditioning. | ||
2 | |||
3 | use syntax::{ | ||
4 | ast::{self, AttrsOwner}, | ||
5 | AstNode, SyntaxNode, | ||
6 | }; | ||
7 | |||
8 | use crate::{ | ||
9 | db::AstDatabase, | ||
10 | name::{name, AsName}, | ||
11 | LazyMacroId, MacroCallKind, MacroCallLoc, | ||
12 | }; | ||
13 | |||
14 | pub(crate) fn process_macro_input( | ||
15 | db: &dyn AstDatabase, | ||
16 | node: SyntaxNode, | ||
17 | id: LazyMacroId, | ||
18 | ) -> SyntaxNode { | ||
19 | let loc: MacroCallLoc = db.lookup_intern_macro(id); | ||
20 | |||
21 | match loc.kind { | ||
22 | MacroCallKind::FnLike { .. } => node, | ||
23 | MacroCallKind::Derive { derive_attr_index, .. } => { | ||
24 | let item = match ast::Item::cast(node.clone()) { | ||
25 | Some(item) => item, | ||
26 | None => return node, | ||
27 | }; | ||
28 | |||
29 | remove_derives_up_to(item, derive_attr_index as usize).syntax().clone() | ||
30 | } | ||
31 | } | ||
32 | } | ||
33 | |||
34 | /// Removes `#[derive]` attributes from `item`, up to `attr_index`. | ||
35 | fn remove_derives_up_to(item: ast::Item, attr_index: usize) -> ast::Item { | ||
36 | let item = item.clone_for_update(); | ||
37 | for attr in item.attrs().take(attr_index + 1) { | ||
38 | if let Some(name) = | ||
39 | attr.path().and_then(|path| path.as_single_segment()).and_then(|seg| seg.name_ref()) | ||
40 | { | ||
41 | if name.as_name() == name![derive] { | ||
42 | attr.syntax().detach(); | ||
43 | } | ||
44 | } | ||
45 | } | ||
46 | item | ||
47 | } | ||
48 | |||
49 | #[cfg(test)] | ||
50 | mod tests { | ||
51 | use base_db::fixture::WithFixture; | ||
52 | use base_db::SourceDatabase; | ||
53 | use expect_test::{expect, Expect}; | ||
54 | |||
55 | use crate::test_db::TestDB; | ||
56 | |||
57 | use super::*; | ||
58 | |||
59 | fn test_remove_derives_up_to(attr: usize, ra_fixture: &str, expect: Expect) { | ||
60 | let (db, file_id) = TestDB::with_single_file(&ra_fixture); | ||
61 | let parsed = db.parse(file_id); | ||
62 | |||
63 | let mut items: Vec<_> = | ||
64 | parsed.syntax_node().descendants().filter_map(ast::Item::cast).collect(); | ||
65 | assert_eq!(items.len(), 1); | ||
66 | |||
67 | let item = remove_derives_up_to(items.pop().unwrap(), attr); | ||
68 | expect.assert_eq(&item.to_string()); | ||
69 | } | ||
70 | |||
71 | #[test] | ||
72 | fn remove_derive() { | ||
73 | test_remove_derives_up_to( | ||
74 | 2, | ||
75 | r#" | ||
76 | #[allow(unused)] | ||
77 | #[derive(Copy)] | ||
78 | #[derive(Hello)] | ||
79 | #[derive(Clone)] | ||
80 | struct A { | ||
81 | bar: u32 | ||
82 | } | ||
83 | "#, | ||
84 | expect![[r#" | ||
85 | #[allow(unused)] | ||
86 | |||
87 | |||
88 | #[derive(Clone)] | ||
89 | struct A { | ||
90 | bar: u32 | ||
91 | }"#]], | ||
92 | ); | ||
93 | } | ||
94 | } | ||
diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index 0402640de..88cb16ca4 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs | |||
@@ -14,9 +14,12 @@ pub mod builtin_macro; | |||
14 | pub mod proc_macro; | 14 | pub mod proc_macro; |
15 | pub mod quote; | 15 | pub mod quote; |
16 | pub mod eager; | 16 | pub mod eager; |
17 | mod input; | ||
17 | 18 | ||
18 | use either::Either; | 19 | use either::Either; |
20 | |||
19 | pub use mbe::{ExpandError, ExpandResult}; | 21 | pub use mbe::{ExpandError, ExpandResult}; |
22 | pub use parser::FragmentKind; | ||
20 | 23 | ||
21 | use std::hash::Hash; | 24 | use std::hash::Hash; |
22 | use std::sync::Arc; | 25 | use std::sync::Arc; |
@@ -269,6 +272,10 @@ impl MacroDefId { | |||
269 | }; | 272 | }; |
270 | Either::Left(*id) | 273 | Either::Left(*id) |
271 | } | 274 | } |
275 | |||
276 | pub fn is_proc_macro(&self) -> bool { | ||
277 | matches!(self.kind, MacroDefKind::ProcMacro(..)) | ||
278 | } | ||
272 | } | 279 | } |
273 | 280 | ||
274 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | 281 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
@@ -290,13 +297,21 @@ pub struct MacroCallLoc { | |||
290 | 297 | ||
291 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 298 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
292 | pub enum MacroCallKind { | 299 | pub enum MacroCallKind { |
293 | FnLike { ast_id: AstId<ast::MacroCall> }, | 300 | FnLike { |
294 | Derive { ast_id: AstId<ast::Item>, derive_name: String, derive_attr: AttrId }, | 301 | ast_id: AstId<ast::MacroCall>, |
302 | fragment: FragmentKind, | ||
303 | }, | ||
304 | Derive { | ||
305 | ast_id: AstId<ast::Item>, | ||
306 | derive_name: String, | ||
307 | /// Syntactical index of the invoking `#[derive]` attribute. | ||
308 | /// | ||
309 | /// Outer attributes are counted first, then inner attributes. This does not support | ||
310 | /// out-of-line modules, which may have attributes spread across 2 files! | ||
311 | derive_attr_index: u32, | ||
312 | }, | ||
295 | } | 313 | } |
296 | 314 | ||
297 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
298 | pub struct AttrId(pub u32); | ||
299 | |||
300 | impl MacroCallKind { | 315 | impl MacroCallKind { |
301 | fn file_id(&self) -> HirFileId { | 316 | fn file_id(&self) -> HirFileId { |
302 | match self { | 317 | match self { |
@@ -324,6 +339,13 @@ impl MacroCallKind { | |||
324 | MacroCallKind::Derive { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()), | 339 | MacroCallKind::Derive { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()), |
325 | } | 340 | } |
326 | } | 341 | } |
342 | |||
343 | fn fragment_kind(&self) -> FragmentKind { | ||
344 | match self { | ||
345 | MacroCallKind::FnLike { fragment, .. } => *fragment, | ||
346 | MacroCallKind::Derive { .. } => FragmentKind::Items, | ||
347 | } | ||
348 | } | ||
327 | } | 349 | } |
328 | 350 | ||
329 | impl MacroCallId { | 351 | impl MacroCallId { |
@@ -357,7 +379,6 @@ pub struct ExpansionInfo { | |||
357 | } | 379 | } |
358 | 380 | ||
359 | pub use mbe::Origin; | 381 | pub use mbe::Origin; |
360 | use parser::FragmentKind; | ||
361 | 382 | ||
362 | impl ExpansionInfo { | 383 | impl ExpansionInfo { |
363 | pub fn call_node(&self) -> Option<InFile<SyntaxNode>> { | 384 | pub fn call_node(&self) -> Option<InFile<SyntaxNode>> { |
@@ -562,3 +583,59 @@ impl<N: AstNode> InFile<N> { | |||
562 | self.with_value(self.value.syntax()) | 583 | self.with_value(self.value.syntax()) |
563 | } | 584 | } |
564 | } | 585 | } |
586 | |||
587 | /// Given a `MacroCallId`, return what `FragmentKind` it belongs to. | ||
588 | /// FIXME: Not completed | ||
589 | pub fn to_fragment_kind(call: &ast::MacroCall) -> FragmentKind { | ||
590 | use syntax::SyntaxKind::*; | ||
591 | |||
592 | let syn = call.syntax(); | ||
593 | |||
594 | let parent = match syn.parent() { | ||
595 | Some(it) => it, | ||
596 | None => return FragmentKind::Statements, | ||
597 | }; | ||
598 | |||
599 | match parent.kind() { | ||
600 | MACRO_ITEMS | SOURCE_FILE => FragmentKind::Items, | ||
601 | MACRO_STMTS => FragmentKind::Statements, | ||
602 | MACRO_PAT => FragmentKind::Pattern, | ||
603 | MACRO_TYPE => FragmentKind::Type, | ||
604 | ITEM_LIST => FragmentKind::Items, | ||
605 | LET_STMT => { | ||
606 | // FIXME: Handle LHS Pattern | ||
607 | FragmentKind::Expr | ||
608 | } | ||
609 | EXPR_STMT => FragmentKind::Statements, | ||
610 | BLOCK_EXPR => FragmentKind::Statements, | ||
611 | ARG_LIST => FragmentKind::Expr, | ||
612 | TRY_EXPR => FragmentKind::Expr, | ||
613 | TUPLE_EXPR => FragmentKind::Expr, | ||
614 | PAREN_EXPR => FragmentKind::Expr, | ||
615 | ARRAY_EXPR => FragmentKind::Expr, | ||
616 | FOR_EXPR => FragmentKind::Expr, | ||
617 | PATH_EXPR => FragmentKind::Expr, | ||
618 | CLOSURE_EXPR => FragmentKind::Expr, | ||
619 | CONDITION => FragmentKind::Expr, | ||
620 | BREAK_EXPR => FragmentKind::Expr, | ||
621 | RETURN_EXPR => FragmentKind::Expr, | ||
622 | MATCH_EXPR => FragmentKind::Expr, | ||
623 | MATCH_ARM => FragmentKind::Expr, | ||
624 | MATCH_GUARD => FragmentKind::Expr, | ||
625 | RECORD_EXPR_FIELD => FragmentKind::Expr, | ||
626 | CALL_EXPR => FragmentKind::Expr, | ||
627 | INDEX_EXPR => FragmentKind::Expr, | ||
628 | METHOD_CALL_EXPR => FragmentKind::Expr, | ||
629 | FIELD_EXPR => FragmentKind::Expr, | ||
630 | AWAIT_EXPR => FragmentKind::Expr, | ||
631 | CAST_EXPR => FragmentKind::Expr, | ||
632 | REF_EXPR => FragmentKind::Expr, | ||
633 | PREFIX_EXPR => FragmentKind::Expr, | ||
634 | RANGE_EXPR => FragmentKind::Expr, | ||
635 | BIN_EXPR => FragmentKind::Expr, | ||
636 | _ => { | ||
637 | // Unknown , Just guess it is `Items` | ||
638 | FragmentKind::Items | ||
639 | } | ||
640 | } | ||
641 | } | ||
diff --git a/crates/hir_expand/src/name.rs b/crates/hir_expand/src/name.rs index bcfd3e524..5a5dc9afd 100644 --- a/crates/hir_expand/src/name.rs +++ b/crates/hir_expand/src/name.rs | |||
@@ -212,6 +212,7 @@ pub mod known { | |||
212 | std_panic, | 212 | std_panic, |
213 | stringify, | 213 | stringify, |
214 | concat, | 214 | concat, |
215 | concat_idents, | ||
215 | include, | 216 | include, |
216 | include_bytes, | 217 | include_bytes, |
217 | include_str, | 218 | include_str, |
diff --git a/crates/hir_expand/src/proc_macro.rs b/crates/hir_expand/src/proc_macro.rs index 75e950816..d5643393a 100644 --- a/crates/hir_expand/src/proc_macro.rs +++ b/crates/hir_expand/src/proc_macro.rs | |||
@@ -2,7 +2,6 @@ | |||
2 | 2 | ||
3 | use crate::db::AstDatabase; | 3 | use crate::db::AstDatabase; |
4 | use base_db::{CrateId, ProcMacroId}; | 4 | use base_db::{CrateId, ProcMacroId}; |
5 | use tt::buffer::{Cursor, TokenBuffer}; | ||
6 | 5 | ||
7 | #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] | 6 | #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] |
8 | pub struct ProcMacroExpander { | 7 | pub struct ProcMacroExpander { |
@@ -44,9 +43,6 @@ impl ProcMacroExpander { | |||
44 | .clone() | 43 | .clone() |
45 | .ok_or_else(|| err!("No derive macro found."))?; | 44 | .ok_or_else(|| err!("No derive macro found."))?; |
46 | 45 | ||
47 | let tt = remove_derive_attrs(tt) | ||
48 | .ok_or_else(|| err!("Fail to remove derive for custom derive"))?; | ||
49 | |||
50 | // Proc macros have access to the environment variables of the invoking crate. | 46 | // Proc macros have access to the environment variables of the invoking crate. |
51 | let env = &krate_graph[calling_crate].env; | 47 | let env = &krate_graph[calling_crate].env; |
52 | 48 | ||
@@ -56,101 +52,3 @@ impl ProcMacroExpander { | |||
56 | } | 52 | } |
57 | } | 53 | } |
58 | } | 54 | } |
59 | |||
60 | fn eat_punct(cursor: &mut Cursor, c: char) -> bool { | ||
61 | if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(punct), _)) = cursor.token_tree() { | ||
62 | if punct.char == c { | ||
63 | *cursor = cursor.bump(); | ||
64 | return true; | ||
65 | } | ||
66 | } | ||
67 | false | ||
68 | } | ||
69 | |||
70 | fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool { | ||
71 | if let Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) = cursor.token_tree() { | ||
72 | if Some(kind) == subtree.delimiter_kind() { | ||
73 | *cursor = cursor.bump_subtree(); | ||
74 | return true; | ||
75 | } | ||
76 | } | ||
77 | false | ||
78 | } | ||
79 | |||
80 | fn eat_ident(cursor: &mut Cursor, t: &str) -> bool { | ||
81 | if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Ident(ident), _)) = cursor.token_tree() { | ||
82 | if t == ident.text.as_str() { | ||
83 | *cursor = cursor.bump(); | ||
84 | return true; | ||
85 | } | ||
86 | } | ||
87 | false | ||
88 | } | ||
89 | |||
90 | fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> { | ||
91 | let buffer = TokenBuffer::from_tokens(&tt.token_trees); | ||
92 | let mut p = buffer.begin(); | ||
93 | let mut result = tt::Subtree::default(); | ||
94 | |||
95 | while !p.eof() { | ||
96 | let curr = p; | ||
97 | |||
98 | if eat_punct(&mut p, '#') { | ||
99 | eat_punct(&mut p, '!'); | ||
100 | let parent = p; | ||
101 | if eat_subtree(&mut p, tt::DelimiterKind::Bracket) { | ||
102 | if eat_ident(&mut p, "derive") { | ||
103 | p = parent.bump(); | ||
104 | continue; | ||
105 | } | ||
106 | } | ||
107 | } | ||
108 | |||
109 | result.token_trees.push(curr.token_tree()?.cloned()); | ||
110 | p = curr.bump(); | ||
111 | } | ||
112 | |||
113 | Some(result) | ||
114 | } | ||
115 | |||
116 | #[cfg(test)] | ||
117 | mod tests { | ||
118 | use super::*; | ||
119 | use test_utils::assert_eq_text; | ||
120 | |||
121 | #[test] | ||
122 | fn test_remove_derive_attrs() { | ||
123 | let tt = mbe::parse_to_token_tree( | ||
124 | r#" | ||
125 | #[allow(unused)] | ||
126 | #[derive(Copy)] | ||
127 | #[derive(Hello)] | ||
128 | struct A { | ||
129 | bar: u32 | ||
130 | } | ||
131 | "#, | ||
132 | ) | ||
133 | .unwrap() | ||
134 | .0; | ||
135 | let result = format!("{:#?}", remove_derive_attrs(&tt).unwrap()); | ||
136 | |||
137 | assert_eq_text!( | ||
138 | r#" | ||
139 | SUBTREE $ | ||
140 | PUNCH # [alone] 0 | ||
141 | SUBTREE [] 1 | ||
142 | IDENT allow 2 | ||
143 | SUBTREE () 3 | ||
144 | IDENT unused 4 | ||
145 | IDENT struct 15 | ||
146 | IDENT A 16 | ||
147 | SUBTREE {} 17 | ||
148 | IDENT bar 18 | ||
149 | PUNCH : [alone] 19 | ||
150 | IDENT u32 20 | ||
151 | "# | ||
152 | .trim(), | ||
153 | &result | ||
154 | ); | ||
155 | } | ||
156 | } | ||
diff --git a/crates/hir_expand/src/quote.rs b/crates/hir_expand/src/quote.rs index c82487ef0..230a59964 100644 --- a/crates/hir_expand/src/quote.rs +++ b/crates/hir_expand/src/quote.rs | |||
@@ -196,8 +196,8 @@ impl_to_to_tokentrees! { | |||
196 | tt::Literal => self { self }; | 196 | tt::Literal => self { self }; |
197 | tt::Ident => self { self }; | 197 | tt::Ident => self { self }; |
198 | tt::Punct => self { self }; | 198 | tt::Punct => self { self }; |
199 | &str => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}}; | 199 | &str => self { tt::Literal{text: format!("\"{}\"", self.escape_debug()).into(), id: tt::TokenId::unspecified()}}; |
200 | String => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}} | 200 | String => self { tt::Literal{text: format!("\"{}\"", self.escape_debug()).into(), id: tt::TokenId::unspecified()}} |
201 | } | 201 | } |
202 | 202 | ||
203 | #[cfg(test)] | 203 | #[cfg(test)] |
diff --git a/crates/hir_ty/src/chalk_ext.rs b/crates/hir_ty/src/chalk_ext.rs index 8c4542956..5232a7d80 100644 --- a/crates/hir_ty/src/chalk_ext.rs +++ b/crates/hir_ty/src/chalk_ext.rs | |||
@@ -1,8 +1,10 @@ | |||
1 | //! Various extensions traits for Chalk types. | 1 | //! Various extensions traits for Chalk types. |
2 | 2 | ||
3 | use chalk_ir::Mutability; | 3 | use chalk_ir::{FloatTy, IntTy, Mutability, Scalar, UintTy}; |
4 | use hir_def::{ | 4 | use hir_def::{ |
5 | type_ref::Rawness, AssocContainerId, FunctionId, GenericDefId, HasModule, Lookup, TraitId, | 5 | builtin_type::{BuiltinFloat, BuiltinInt, BuiltinType, BuiltinUint}, |
6 | type_ref::Rawness, | ||
7 | AssocContainerId, FunctionId, GenericDefId, HasModule, Lookup, TraitId, | ||
6 | }; | 8 | }; |
7 | 9 | ||
8 | use crate::{ | 10 | use crate::{ |
@@ -18,6 +20,7 @@ pub trait TyExt { | |||
18 | fn is_unknown(&self) -> bool; | 20 | fn is_unknown(&self) -> bool; |
19 | 21 | ||
20 | fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)>; | 22 | fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)>; |
23 | fn as_builtin(&self) -> Option<BuiltinType>; | ||
21 | fn as_tuple(&self) -> Option<&Substitution>; | 24 | fn as_tuple(&self) -> Option<&Substitution>; |
22 | fn as_fn_def(&self, db: &dyn HirDatabase) -> Option<FunctionId>; | 25 | fn as_fn_def(&self, db: &dyn HirDatabase) -> Option<FunctionId>; |
23 | fn as_reference(&self) -> Option<(&Ty, Lifetime, Mutability)>; | 26 | fn as_reference(&self) -> Option<(&Ty, Lifetime, Mutability)>; |
@@ -59,6 +62,35 @@ impl TyExt for Ty { | |||
59 | } | 62 | } |
60 | } | 63 | } |
61 | 64 | ||
65 | fn as_builtin(&self) -> Option<BuiltinType> { | ||
66 | match self.kind(&Interner) { | ||
67 | TyKind::Str => Some(BuiltinType::Str), | ||
68 | TyKind::Scalar(Scalar::Bool) => Some(BuiltinType::Bool), | ||
69 | TyKind::Scalar(Scalar::Char) => Some(BuiltinType::Char), | ||
70 | TyKind::Scalar(Scalar::Float(fty)) => Some(BuiltinType::Float(match fty { | ||
71 | FloatTy::F64 => BuiltinFloat::F64, | ||
72 | FloatTy::F32 => BuiltinFloat::F32, | ||
73 | })), | ||
74 | TyKind::Scalar(Scalar::Int(ity)) => Some(BuiltinType::Int(match ity { | ||
75 | IntTy::Isize => BuiltinInt::Isize, | ||
76 | IntTy::I8 => BuiltinInt::I8, | ||
77 | IntTy::I16 => BuiltinInt::I16, | ||
78 | IntTy::I32 => BuiltinInt::I32, | ||
79 | IntTy::I64 => BuiltinInt::I64, | ||
80 | IntTy::I128 => BuiltinInt::I128, | ||
81 | })), | ||
82 | TyKind::Scalar(Scalar::Uint(ity)) => Some(BuiltinType::Uint(match ity { | ||
83 | UintTy::Usize => BuiltinUint::Usize, | ||
84 | UintTy::U8 => BuiltinUint::U8, | ||
85 | UintTy::U16 => BuiltinUint::U16, | ||
86 | UintTy::U32 => BuiltinUint::U32, | ||
87 | UintTy::U64 => BuiltinUint::U64, | ||
88 | UintTy::U128 => BuiltinUint::U128, | ||
89 | })), | ||
90 | _ => None, | ||
91 | } | ||
92 | } | ||
93 | |||
62 | fn as_tuple(&self) -> Option<&Substitution> { | 94 | fn as_tuple(&self) -> Option<&Substitution> { |
63 | match self.kind(&Interner) { | 95 | match self.kind(&Interner) { |
64 | TyKind::Tuple(_, substs) => Some(substs), | 96 | TyKind::Tuple(_, substs) => Some(substs), |
diff --git a/crates/hir_ty/src/consteval.rs b/crates/hir_ty/src/consteval.rs new file mode 100644 index 000000000..e3ceb3d62 --- /dev/null +++ b/crates/hir_ty/src/consteval.rs | |||
@@ -0,0 +1,56 @@ | |||
1 | //! Constant evaluation details | ||
2 | |||
3 | use std::convert::TryInto; | ||
4 | |||
5 | use hir_def::{ | ||
6 | builtin_type::BuiltinUint, | ||
7 | expr::{Expr, Literal}, | ||
8 | type_ref::ConstScalar, | ||
9 | }; | ||
10 | |||
11 | use crate::{Const, ConstData, ConstValue, Interner, TyKind}; | ||
12 | |||
13 | /// Extension trait for [`Const`] | ||
14 | pub trait ConstExt { | ||
15 | /// Is a [`Const`] unknown? | ||
16 | fn is_unknown(&self) -> bool; | ||
17 | } | ||
18 | |||
19 | impl ConstExt for Const { | ||
20 | fn is_unknown(&self) -> bool { | ||
21 | match self.data(&Interner).value { | ||
22 | // interned Unknown | ||
23 | chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst { | ||
24 | interned: ConstScalar::Unknown, | ||
25 | }) => true, | ||
26 | |||
27 | // interned concrete anything else | ||
28 | chalk_ir::ConstValue::Concrete(..) => false, | ||
29 | |||
30 | _ => { | ||
31 | log::error!("is_unknown was called on a non-concrete constant value! {:?}", self); | ||
32 | true | ||
33 | } | ||
34 | } | ||
35 | } | ||
36 | } | ||
37 | |||
38 | // FIXME: support more than just evaluating literals | ||
39 | pub fn eval_usize(expr: &Expr) -> Option<u64> { | ||
40 | match expr { | ||
41 | Expr::Literal(Literal::Uint(v, None)) | ||
42 | | Expr::Literal(Literal::Uint(v, Some(BuiltinUint::Usize))) => (*v).try_into().ok(), | ||
43 | _ => None, | ||
44 | } | ||
45 | } | ||
46 | |||
47 | /// Interns a possibly-unknown target usize | ||
48 | pub fn usize_const(value: Option<u64>) -> Const { | ||
49 | ConstData { | ||
50 | ty: TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize)).intern(&Interner), | ||
51 | value: ConstValue::Concrete(chalk_ir::ConcreteConst { | ||
52 | interned: value.map(|value| ConstScalar::Usize(value)).unwrap_or(ConstScalar::Unknown), | ||
53 | }), | ||
54 | } | ||
55 | .intern(&Interner) | ||
56 | } | ||
diff --git a/crates/hir_ty/src/diagnostics/expr.rs b/crates/hir_ty/src/diagnostics/expr.rs index 79602c3dd..47709c1e8 100644 --- a/crates/hir_ty/src/diagnostics/expr.rs +++ b/crates/hir_ty/src/diagnostics/expr.rs | |||
@@ -83,7 +83,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> { | |||
83 | if let Expr::Block { statements, tail, .. } = body_expr { | 83 | if let Expr::Block { statements, tail, .. } = body_expr { |
84 | if let Some(t) = tail { | 84 | if let Some(t) = tail { |
85 | self.validate_results_in_tail_expr(body.body_expr, *t, db); | 85 | self.validate_results_in_tail_expr(body.body_expr, *t, db); |
86 | } else if let Some(Statement::Expr(id)) = statements.last() { | 86 | } else if let Some(Statement::Expr { expr: id, .. }) = statements.last() { |
87 | self.validate_missing_tail_expr(body.body_expr, *id, db); | 87 | self.validate_missing_tail_expr(body.body_expr, *id, db); |
88 | } | 88 | } |
89 | } | 89 | } |
diff --git a/crates/hir_ty/src/diagnostics/match_check.rs b/crates/hir_ty/src/diagnostics/match_check.rs index e9762622f..6ee0529c6 100644 --- a/crates/hir_ty/src/diagnostics/match_check.rs +++ b/crates/hir_ty/src/diagnostics/match_check.rs | |||
@@ -1119,6 +1119,7 @@ fn main() { | |||
1119 | (true, false, true) => (), | 1119 | (true, false, true) => (), |
1120 | (true) => (), | 1120 | (true) => (), |
1121 | } | 1121 | } |
1122 | match (true, false) { (true,) => {} } | ||
1122 | match (0) { () => () } | 1123 | match (0) { () => () } |
1123 | match Unresolved::Bar { Unresolved::Baz => () } | 1124 | match Unresolved::Bar { Unresolved::Baz => () } |
1124 | } | 1125 | } |
diff --git a/crates/hir_ty/src/display.rs b/crates/hir_ty/src/display.rs index 1f6edf7a2..7bbd1a1f7 100644 --- a/crates/hir_ty/src/display.rs +++ b/crates/hir_ty/src/display.rs | |||
@@ -308,7 +308,7 @@ impl HirDisplay for Const { | |||
308 | let param_data = &generics.params.consts[id.local_id]; | 308 | let param_data = &generics.params.consts[id.local_id]; |
309 | write!(f, "{}", param_data.name) | 309 | write!(f, "{}", param_data.name) |
310 | } | 310 | } |
311 | ConstValue::Concrete(_) => write!(f, "_"), | 311 | ConstValue::Concrete(c) => write!(f, "{}", c.interned), |
312 | } | 312 | } |
313 | } | 313 | } |
314 | } | 314 | } |
@@ -962,11 +962,10 @@ impl HirDisplay for TypeRef { | |||
962 | write!(f, "{}", mutability)?; | 962 | write!(f, "{}", mutability)?; |
963 | inner.hir_fmt(f)?; | 963 | inner.hir_fmt(f)?; |
964 | } | 964 | } |
965 | TypeRef::Array(inner) => { | 965 | TypeRef::Array(inner, len) => { |
966 | write!(f, "[")?; | 966 | write!(f, "[")?; |
967 | inner.hir_fmt(f)?; | 967 | inner.hir_fmt(f)?; |
968 | // FIXME: Array length? | 968 | write!(f, "; {}]", len)?; |
969 | write!(f, "; _]")?; | ||
970 | } | 969 | } |
971 | TypeRef::Slice(inner) => { | 970 | TypeRef::Slice(inner) => { |
972 | write!(f, "[")?; | 971 | write!(f, "[")?; |
diff --git a/crates/hir_ty/src/infer/expr.rs b/crates/hir_ty/src/infer/expr.rs index 50497eecb..b6b5a1b75 100644 --- a/crates/hir_ty/src/infer/expr.rs +++ b/crates/hir_ty/src/infer/expr.rs | |||
@@ -15,7 +15,7 @@ use stdx::always; | |||
15 | use syntax::ast::RangeOp; | 15 | use syntax::ast::RangeOp; |
16 | 16 | ||
17 | use crate::{ | 17 | use crate::{ |
18 | autoderef, dummy_usize_const, | 18 | autoderef, consteval, |
19 | lower::lower_to_chalk_mutability, | 19 | lower::lower_to_chalk_mutability, |
20 | mapping::from_chalk, | 20 | mapping::from_chalk, |
21 | method_resolution, op, | 21 | method_resolution, op, |
@@ -717,11 +717,12 @@ impl<'a> InferenceContext<'a> { | |||
717 | _ => self.table.new_type_var(), | 717 | _ => self.table.new_type_var(), |
718 | }; | 718 | }; |
719 | 719 | ||
720 | match array { | 720 | let len = match array { |
721 | Array::ElementList(items) => { | 721 | Array::ElementList(items) => { |
722 | for expr in items.iter() { | 722 | for expr in items.iter() { |
723 | self.infer_expr_coerce(*expr, &Expectation::has_type(elem_ty.clone())); | 723 | self.infer_expr_coerce(*expr, &Expectation::has_type(elem_ty.clone())); |
724 | } | 724 | } |
725 | Some(items.len() as u64) | ||
725 | } | 726 | } |
726 | Array::Repeat { initializer, repeat } => { | 727 | Array::Repeat { initializer, repeat } => { |
727 | self.infer_expr_coerce( | 728 | self.infer_expr_coerce( |
@@ -734,10 +735,13 @@ impl<'a> InferenceContext<'a> { | |||
734 | TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(&Interner), | 735 | TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(&Interner), |
735 | ), | 736 | ), |
736 | ); | 737 | ); |
738 | |||
739 | let repeat_expr = &self.body.exprs[*repeat]; | ||
740 | consteval::eval_usize(repeat_expr) | ||
737 | } | 741 | } |
738 | } | 742 | }; |
739 | 743 | ||
740 | TyKind::Array(elem_ty, dummy_usize_const()).intern(&Interner) | 744 | TyKind::Array(elem_ty, consteval::usize_const(len)).intern(&Interner) |
741 | } | 745 | } |
742 | Expr::Literal(lit) => match lit { | 746 | Expr::Literal(lit) => match lit { |
743 | Literal::Bool(..) => TyKind::Scalar(Scalar::Bool).intern(&Interner), | 747 | Literal::Bool(..) => TyKind::Scalar(Scalar::Bool).intern(&Interner), |
@@ -745,10 +749,12 @@ impl<'a> InferenceContext<'a> { | |||
745 | TyKind::Ref(Mutability::Not, static_lifetime(), TyKind::Str.intern(&Interner)) | 749 | TyKind::Ref(Mutability::Not, static_lifetime(), TyKind::Str.intern(&Interner)) |
746 | .intern(&Interner) | 750 | .intern(&Interner) |
747 | } | 751 | } |
748 | Literal::ByteString(..) => { | 752 | Literal::ByteString(bs) => { |
749 | let byte_type = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(&Interner); | 753 | let byte_type = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(&Interner); |
750 | let array_type = | 754 | |
751 | TyKind::Array(byte_type, dummy_usize_const()).intern(&Interner); | 755 | let len = consteval::usize_const(Some(bs.len() as u64)); |
756 | |||
757 | let array_type = TyKind::Array(byte_type, len).intern(&Interner); | ||
752 | TyKind::Ref(Mutability::Not, static_lifetime(), array_type).intern(&Interner) | 758 | TyKind::Ref(Mutability::Not, static_lifetime(), array_type).intern(&Interner) |
753 | } | 759 | } |
754 | Literal::Char(..) => TyKind::Scalar(Scalar::Char).intern(&Interner), | 760 | Literal::Char(..) => TyKind::Scalar(Scalar::Char).intern(&Interner), |
@@ -809,7 +815,7 @@ impl<'a> InferenceContext<'a> { | |||
809 | let ty = self.resolve_ty_as_possible(ty); | 815 | let ty = self.resolve_ty_as_possible(ty); |
810 | self.infer_pat(*pat, &ty, BindingMode::default()); | 816 | self.infer_pat(*pat, &ty, BindingMode::default()); |
811 | } | 817 | } |
812 | Statement::Expr(expr) => { | 818 | Statement::Expr { expr, .. } => { |
813 | self.infer_expr(*expr, &Expectation::none()); | 819 | self.infer_expr(*expr, &Expectation::none()); |
814 | } | 820 | } |
815 | } | 821 | } |
diff --git a/crates/hir_ty/src/infer/pat.rs b/crates/hir_ty/src/infer/pat.rs index aea354cde..60b94a642 100644 --- a/crates/hir_ty/src/infer/pat.rs +++ b/crates/hir_ty/src/infer/pat.rs | |||
@@ -126,11 +126,12 @@ impl<'a> InferenceContext<'a> { | |||
126 | _ => &[], | 126 | _ => &[], |
127 | }; | 127 | }; |
128 | 128 | ||
129 | let (pre, post) = match ellipsis { | 129 | let ((pre, post), n_uncovered_patterns) = match ellipsis { |
130 | Some(idx) => args.split_at(idx), | 130 | Some(idx) => { |
131 | None => (&args[..], &[][..]), | 131 | (args.split_at(idx), expectations.len().saturating_sub(args.len())) |
132 | } | ||
133 | None => ((&args[..], &[][..]), 0), | ||
132 | }; | 134 | }; |
133 | let n_uncovered_patterns = expectations.len().saturating_sub(args.len()); | ||
134 | let err_ty = self.err_ty(); | 135 | let err_ty = self.err_ty(); |
135 | let mut expectations_iter = | 136 | let mut expectations_iter = |
136 | expectations.iter().map(|a| a.assert_ty_ref(&Interner)).chain(repeat(&err_ty)); | 137 | expectations.iter().map(|a| a.assert_ty_ref(&Interner)).chain(repeat(&err_ty)); |
diff --git a/crates/hir_ty/src/interner.rs b/crates/hir_ty/src/interner.rs index a1656115d..7b4119747 100644 --- a/crates/hir_ty/src/interner.rs +++ b/crates/hir_ty/src/interner.rs | |||
@@ -6,6 +6,7 @@ use base_db::salsa::InternId; | |||
6 | use chalk_ir::{Goal, GoalData}; | 6 | use chalk_ir::{Goal, GoalData}; |
7 | use hir_def::{ | 7 | use hir_def::{ |
8 | intern::{impl_internable, InternStorage, Internable, Interned}, | 8 | intern::{impl_internable, InternStorage, Internable, Interned}, |
9 | type_ref::ConstScalar, | ||
9 | TypeAliasId, | 10 | TypeAliasId, |
10 | }; | 11 | }; |
11 | use smallvec::SmallVec; | 12 | use smallvec::SmallVec; |
@@ -31,6 +32,7 @@ impl_internable!( | |||
31 | InternedWrapper<chalk_ir::TyData<Interner>>, | 32 | InternedWrapper<chalk_ir::TyData<Interner>>, |
32 | InternedWrapper<chalk_ir::LifetimeData<Interner>>, | 33 | InternedWrapper<chalk_ir::LifetimeData<Interner>>, |
33 | InternedWrapper<chalk_ir::ConstData<Interner>>, | 34 | InternedWrapper<chalk_ir::ConstData<Interner>>, |
35 | InternedWrapper<ConstScalar>, | ||
34 | InternedWrapper<Vec<chalk_ir::CanonicalVarKind<Interner>>>, | 36 | InternedWrapper<Vec<chalk_ir::CanonicalVarKind<Interner>>>, |
35 | InternedWrapper<Vec<chalk_ir::ProgramClause<Interner>>>, | 37 | InternedWrapper<Vec<chalk_ir::ProgramClause<Interner>>>, |
36 | InternedWrapper<Vec<chalk_ir::QuantifiedWhereClause<Interner>>>, | 38 | InternedWrapper<Vec<chalk_ir::QuantifiedWhereClause<Interner>>>, |
@@ -41,7 +43,7 @@ impl chalk_ir::interner::Interner for Interner { | |||
41 | type InternedType = Interned<InternedWrapper<chalk_ir::TyData<Interner>>>; | 43 | type InternedType = Interned<InternedWrapper<chalk_ir::TyData<Interner>>>; |
42 | type InternedLifetime = Interned<InternedWrapper<chalk_ir::LifetimeData<Self>>>; | 44 | type InternedLifetime = Interned<InternedWrapper<chalk_ir::LifetimeData<Self>>>; |
43 | type InternedConst = Interned<InternedWrapper<chalk_ir::ConstData<Self>>>; | 45 | type InternedConst = Interned<InternedWrapper<chalk_ir::ConstData<Self>>>; |
44 | type InternedConcreteConst = (); | 46 | type InternedConcreteConst = ConstScalar; |
45 | type InternedGenericArg = chalk_ir::GenericArgData<Self>; | 47 | type InternedGenericArg = chalk_ir::GenericArgData<Self>; |
46 | type InternedGoal = Arc<GoalData<Self>>; | 48 | type InternedGoal = Arc<GoalData<Self>>; |
47 | type InternedGoals = Vec<Goal<Self>>; | 49 | type InternedGoals = Vec<Goal<Self>>; |
@@ -245,10 +247,15 @@ impl chalk_ir::interner::Interner for Interner { | |||
245 | fn const_eq( | 247 | fn const_eq( |
246 | &self, | 248 | &self, |
247 | _ty: &Self::InternedType, | 249 | _ty: &Self::InternedType, |
248 | _c1: &Self::InternedConcreteConst, | 250 | c1: &Self::InternedConcreteConst, |
249 | _c2: &Self::InternedConcreteConst, | 251 | c2: &Self::InternedConcreteConst, |
250 | ) -> bool { | 252 | ) -> bool { |
251 | true | 253 | match (c1, c2) { |
254 | (&ConstScalar::Usize(a), &ConstScalar::Usize(b)) => a == b, | ||
255 | // we were previously assuming this to be true, I'm not whether true or false on | ||
256 | // unknown values is safer. | ||
257 | (_, _) => true, | ||
258 | } | ||
252 | } | 259 | } |
253 | 260 | ||
254 | fn intern_generic_arg( | 261 | fn intern_generic_arg( |
diff --git a/crates/hir_ty/src/lib.rs b/crates/hir_ty/src/lib.rs index 0505fa4ae..15b61bedc 100644 --- a/crates/hir_ty/src/lib.rs +++ b/crates/hir_ty/src/lib.rs | |||
@@ -10,6 +10,7 @@ mod autoderef; | |||
10 | mod builder; | 10 | mod builder; |
11 | mod chalk_db; | 11 | mod chalk_db; |
12 | mod chalk_ext; | 12 | mod chalk_ext; |
13 | pub mod consteval; | ||
13 | mod infer; | 14 | mod infer; |
14 | mod interner; | 15 | mod interner; |
15 | mod lower; | 16 | mod lower; |
@@ -37,7 +38,11 @@ use chalk_ir::{ | |||
37 | interner::HasInterner, | 38 | interner::HasInterner, |
38 | UintTy, | 39 | UintTy, |
39 | }; | 40 | }; |
40 | use hir_def::{expr::ExprId, type_ref::Rawness, TypeParamId}; | 41 | use hir_def::{ |
42 | expr::ExprId, | ||
43 | type_ref::{ConstScalar, Rawness}, | ||
44 | TypeParamId, | ||
45 | }; | ||
41 | 46 | ||
42 | use crate::{db::HirDatabase, display::HirDisplay, utils::generics}; | 47 | use crate::{db::HirDatabase, display::HirDisplay, utils::generics}; |
43 | 48 | ||
@@ -250,7 +255,9 @@ pub fn dummy_usize_const() -> Const { | |||
250 | let usize_ty = chalk_ir::TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(&Interner); | 255 | let usize_ty = chalk_ir::TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(&Interner); |
251 | chalk_ir::ConstData { | 256 | chalk_ir::ConstData { |
252 | ty: usize_ty, | 257 | ty: usize_ty, |
253 | value: chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst { interned: () }), | 258 | value: chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst { |
259 | interned: ConstScalar::Unknown, | ||
260 | }), | ||
254 | } | 261 | } |
255 | .intern(&Interner) | 262 | .intern(&Interner) |
256 | } | 263 | } |
diff --git a/crates/hir_ty/src/lower.rs b/crates/hir_ty/src/lower.rs index c99dd8d0a..bd8bb6028 100644 --- a/crates/hir_ty/src/lower.rs +++ b/crates/hir_ty/src/lower.rs | |||
@@ -29,8 +29,8 @@ use stdx::impl_from; | |||
29 | use syntax::ast; | 29 | use syntax::ast; |
30 | 30 | ||
31 | use crate::{ | 31 | use crate::{ |
32 | consteval, | ||
32 | db::HirDatabase, | 33 | db::HirDatabase, |
33 | dummy_usize_const, | ||
34 | mapping::ToChalk, | 34 | mapping::ToChalk, |
35 | static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx, | 35 | static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx, |
36 | utils::{ | 36 | utils::{ |
@@ -172,9 +172,12 @@ impl<'a> TyLoweringContext<'a> { | |||
172 | let inner_ty = self.lower_ty(inner); | 172 | let inner_ty = self.lower_ty(inner); |
173 | TyKind::Raw(lower_to_chalk_mutability(*mutability), inner_ty).intern(&Interner) | 173 | TyKind::Raw(lower_to_chalk_mutability(*mutability), inner_ty).intern(&Interner) |
174 | } | 174 | } |
175 | TypeRef::Array(inner) => { | 175 | TypeRef::Array(inner, len) => { |
176 | let inner_ty = self.lower_ty(inner); | 176 | let inner_ty = self.lower_ty(inner); |
177 | TyKind::Array(inner_ty, dummy_usize_const()).intern(&Interner) | 177 | |
178 | let const_len = consteval::usize_const(len.as_usize()); | ||
179 | |||
180 | TyKind::Array(inner_ty, const_len).intern(&Interner) | ||
178 | } | 181 | } |
179 | TypeRef::Slice(inner) => { | 182 | TypeRef::Slice(inner) => { |
180 | let inner_ty = self.lower_ty(inner); | 183 | let inner_ty = self.lower_ty(inner); |
diff --git a/crates/hir_ty/src/tests/coercion.rs b/crates/hir_ty/src/tests/coercion.rs index 63d9d4e0b..190471069 100644 --- a/crates/hir_ty/src/tests/coercion.rs +++ b/crates/hir_ty/src/tests/coercion.rs | |||
@@ -55,7 +55,7 @@ fn coerce_places() { | |||
55 | impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {} | 55 | impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {} |
56 | impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {} | 56 | impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {} |
57 | "#, | 57 | "#, |
58 | expect![[r" | 58 | expect![[r#" |
59 | 30..31 '_': &[T] | 59 | 30..31 '_': &[T] |
60 | 44..55 '{ loop {} }': T | 60 | 44..55 '{ loop {} }': T |
61 | 46..53 'loop {}': ! | 61 | 46..53 'loop {}': ! |
@@ -64,43 +64,43 @@ fn coerce_places() { | |||
64 | 81..92 '{ loop {} }': T | 64 | 81..92 '{ loop {} }': T |
65 | 83..90 'loop {}': ! | 65 | 83..90 'loop {}': ! |
66 | 88..90 '{}': () | 66 | 88..90 '{}': () |
67 | 121..132 '{ loop {} }': *mut [T; _] | 67 | 121..132 '{ loop {} }': *mut [T; 2] |
68 | 123..130 'loop {}': ! | 68 | 123..130 'loop {}': ! |
69 | 128..130 '{}': () | 69 | 128..130 '{}': () |
70 | 159..172 '{ gen() }': *mut [U] | 70 | 159..172 '{ gen() }': *mut [U] |
71 | 165..168 'gen': fn gen<U>() -> *mut [U; _] | 71 | 165..168 'gen': fn gen<U>() -> *mut [U; 2] |
72 | 165..170 'gen()': *mut [U; _] | 72 | 165..170 'gen()': *mut [U; 2] |
73 | 185..419 '{ ...rr); }': () | 73 | 185..419 '{ ...rr); }': () |
74 | 195..198 'arr': &[u8; _] | 74 | 195..198 'arr': &[u8; 1] |
75 | 211..215 '&[1]': &[u8; _] | 75 | 211..215 '&[1]': &[u8; 1] |
76 | 212..215 '[1]': [u8; _] | 76 | 212..215 '[1]': [u8; 1] |
77 | 213..214 '1': u8 | 77 | 213..214 '1': u8 |
78 | 226..227 'a': &[u8] | 78 | 226..227 'a': &[u8] |
79 | 236..239 'arr': &[u8; _] | 79 | 236..239 'arr': &[u8; 1] |
80 | 249..250 'b': u8 | 80 | 249..250 'b': u8 |
81 | 253..254 'f': fn f<u8>(&[u8]) -> u8 | 81 | 253..254 'f': fn f<u8>(&[u8]) -> u8 |
82 | 253..259 'f(arr)': u8 | 82 | 253..259 'f(arr)': u8 |
83 | 255..258 'arr': &[u8; _] | 83 | 255..258 'arr': &[u8; 1] |
84 | 269..270 'c': &[u8] | 84 | 269..270 'c': &[u8] |
85 | 279..286 '{ arr }': &[u8] | 85 | 279..286 '{ arr }': &[u8] |
86 | 281..284 'arr': &[u8; _] | 86 | 281..284 'arr': &[u8; 1] |
87 | 296..297 'd': u8 | 87 | 296..297 'd': u8 |
88 | 300..301 'g': fn g<u8>(S<&[u8]>) -> u8 | 88 | 300..301 'g': fn g<u8>(S<&[u8]>) -> u8 |
89 | 300..315 'g(S { a: arr })': u8 | 89 | 300..315 'g(S { a: arr })': u8 |
90 | 302..314 'S { a: arr }': S<&[u8]> | 90 | 302..314 'S { a: arr }': S<&[u8]> |
91 | 309..312 'arr': &[u8; _] | 91 | 309..312 'arr': &[u8; 1] |
92 | 325..326 'e': [&[u8]; _] | 92 | 325..326 'e': [&[u8]; 1] |
93 | 340..345 '[arr]': [&[u8]; _] | 93 | 340..345 '[arr]': [&[u8]; 1] |
94 | 341..344 'arr': &[u8; _] | 94 | 341..344 'arr': &[u8; 1] |
95 | 355..356 'f': [&[u8]; _] | 95 | 355..356 'f': [&[u8]; 2] |
96 | 370..378 '[arr; 2]': [&[u8]; _] | 96 | 370..378 '[arr; 2]': [&[u8]; 2] |
97 | 371..374 'arr': &[u8; _] | 97 | 371..374 'arr': &[u8; 1] |
98 | 376..377 '2': usize | 98 | 376..377 '2': usize |
99 | 388..389 'g': (&[u8], &[u8]) | 99 | 388..389 'g': (&[u8], &[u8]) |
100 | 406..416 '(arr, arr)': (&[u8], &[u8]) | 100 | 406..416 '(arr, arr)': (&[u8], &[u8]) |
101 | 407..410 'arr': &[u8; _] | 101 | 407..410 'arr': &[u8; 1] |
102 | 412..415 'arr': &[u8; _] | 102 | 412..415 'arr': &[u8; 1] |
103 | "]], | 103 | "#]], |
104 | ); | 104 | ); |
105 | } | 105 | } |
106 | 106 | ||
@@ -113,17 +113,17 @@ fn infer_let_stmt_coerce() { | |||
113 | let x: *const [isize] = &[1]; | 113 | let x: *const [isize] = &[1]; |
114 | } | 114 | } |
115 | ", | 115 | ", |
116 | expect![[r" | 116 | expect![[r#" |
117 | 10..75 '{ ...[1]; }': () | 117 | 10..75 '{ ...[1]; }': () |
118 | 20..21 'x': &[isize] | 118 | 20..21 'x': &[isize] |
119 | 34..38 '&[1]': &[isize; _] | 119 | 34..38 '&[1]': &[isize; 1] |
120 | 35..38 '[1]': [isize; _] | 120 | 35..38 '[1]': [isize; 1] |
121 | 36..37 '1': isize | 121 | 36..37 '1': isize |
122 | 48..49 'x': *const [isize] | 122 | 48..49 'x': *const [isize] |
123 | 68..72 '&[1]': &[isize; _] | 123 | 68..72 '&[1]': &[isize; 1] |
124 | 69..72 '[1]': [isize; _] | 124 | 69..72 '[1]': [isize; 1] |
125 | 70..71 '1': isize | 125 | 70..71 '1': isize |
126 | "]], | 126 | "#]], |
127 | ); | 127 | ); |
128 | } | 128 | } |
129 | 129 | ||
@@ -159,7 +159,7 @@ fn infer_custom_coerce_unsized() { | |||
159 | impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {} | 159 | impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {} |
160 | impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {} | 160 | impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {} |
161 | "#, | 161 | "#, |
162 | expect![[r" | 162 | expect![[r#" |
163 | 257..258 'x': A<[T]> | 163 | 257..258 'x': A<[T]> |
164 | 278..283 '{ x }': A<[T]> | 164 | 278..283 '{ x }': A<[T]> |
165 | 280..281 'x': A<[T]> | 165 | 280..281 'x': A<[T]> |
@@ -169,23 +169,23 @@ fn infer_custom_coerce_unsized() { | |||
169 | 333..334 'x': C<[T]> | 169 | 333..334 'x': C<[T]> |
170 | 354..359 '{ x }': C<[T]> | 170 | 354..359 '{ x }': C<[T]> |
171 | 356..357 'x': C<[T]> | 171 | 356..357 'x': C<[T]> |
172 | 369..370 'a': A<[u8; _]> | 172 | 369..370 'a': A<[u8; 2]> |
173 | 384..385 'b': B<[u8; _]> | 173 | 384..385 'b': B<[u8; 2]> |
174 | 399..400 'c': C<[u8; _]> | 174 | 399..400 'c': C<[u8; 2]> |
175 | 414..480 '{ ...(c); }': () | 175 | 414..480 '{ ...(c); }': () |
176 | 424..425 'd': A<[{unknown}]> | 176 | 424..425 'd': A<[{unknown}]> |
177 | 428..432 'foo1': fn foo1<{unknown}>(A<[{unknown}]>) -> A<[{unknown}]> | 177 | 428..432 'foo1': fn foo1<{unknown}>(A<[{unknown}]>) -> A<[{unknown}]> |
178 | 428..435 'foo1(a)': A<[{unknown}]> | 178 | 428..435 'foo1(a)': A<[{unknown}]> |
179 | 433..434 'a': A<[u8; _]> | 179 | 433..434 'a': A<[u8; 2]> |
180 | 445..446 'e': B<[u8]> | 180 | 445..446 'e': B<[u8]> |
181 | 449..453 'foo2': fn foo2<u8>(B<[u8]>) -> B<[u8]> | 181 | 449..453 'foo2': fn foo2<u8>(B<[u8]>) -> B<[u8]> |
182 | 449..456 'foo2(b)': B<[u8]> | 182 | 449..456 'foo2(b)': B<[u8]> |
183 | 454..455 'b': B<[u8; _]> | 183 | 454..455 'b': B<[u8; 2]> |
184 | 466..467 'f': C<[u8]> | 184 | 466..467 'f': C<[u8]> |
185 | 470..474 'foo3': fn foo3<u8>(C<[u8]>) -> C<[u8]> | 185 | 470..474 'foo3': fn foo3<u8>(C<[u8]>) -> C<[u8]> |
186 | 470..477 'foo3(c)': C<[u8]> | 186 | 470..477 'foo3(c)': C<[u8]> |
187 | 475..476 'c': C<[u8; _]> | 187 | 475..476 'c': C<[u8; 2]> |
188 | "]], | 188 | "#]], |
189 | ); | 189 | ); |
190 | } | 190 | } |
191 | 191 | ||
@@ -208,7 +208,7 @@ fn infer_if_coerce() { | |||
208 | #[lang = "unsize"] | 208 | #[lang = "unsize"] |
209 | pub trait Unsize<T: ?Sized> {} | 209 | pub trait Unsize<T: ?Sized> {} |
210 | "#, | 210 | "#, |
211 | expect![[r" | 211 | expect![[r#" |
212 | 10..11 'x': &[T] | 212 | 10..11 'x': &[T] |
213 | 27..38 '{ loop {} }': &[T] | 213 | 27..38 '{ loop {} }': &[T] |
214 | 29..36 'loop {}': ! | 214 | 29..36 'loop {}': ! |
@@ -220,14 +220,14 @@ fn infer_if_coerce() { | |||
220 | 71..96 '{ ... }': &[i32] | 220 | 71..96 '{ ... }': &[i32] |
221 | 81..84 'foo': fn foo<i32>(&[i32]) -> &[i32] | 221 | 81..84 'foo': fn foo<i32>(&[i32]) -> &[i32] |
222 | 81..90 'foo(&[1])': &[i32] | 222 | 81..90 'foo(&[1])': &[i32] |
223 | 85..89 '&[1]': &[i32; _] | 223 | 85..89 '&[1]': &[i32; 1] |
224 | 86..89 '[1]': [i32; _] | 224 | 86..89 '[1]': [i32; 1] |
225 | 87..88 '1': i32 | 225 | 87..88 '1': i32 |
226 | 102..122 '{ ... }': &[i32; _] | 226 | 102..122 '{ ... }': &[i32; 1] |
227 | 112..116 '&[1]': &[i32; _] | 227 | 112..116 '&[1]': &[i32; 1] |
228 | 113..116 '[1]': [i32; _] | 228 | 113..116 '[1]': [i32; 1] |
229 | 114..115 '1': i32 | 229 | 114..115 '1': i32 |
230 | "]], | 230 | "#]], |
231 | ); | 231 | ); |
232 | } | 232 | } |
233 | 233 | ||
@@ -254,7 +254,7 @@ fn infer_if_else_coerce() { | |||
254 | impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {} | 254 | impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {} |
255 | impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {} | 255 | impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {} |
256 | "#, | 256 | "#, |
257 | expect![[r" | 257 | expect![[r#" |
258 | 10..11 'x': &[T] | 258 | 10..11 'x': &[T] |
259 | 27..38 '{ loop {} }': &[T] | 259 | 27..38 '{ loop {} }': &[T] |
260 | 29..36 'loop {}': ! | 260 | 29..36 'loop {}': ! |
@@ -263,17 +263,17 @@ fn infer_if_else_coerce() { | |||
263 | 59..60 'x': &[i32] | 263 | 59..60 'x': &[i32] |
264 | 63..122 'if tru... }': &[i32] | 264 | 63..122 'if tru... }': &[i32] |
265 | 66..70 'true': bool | 265 | 66..70 'true': bool |
266 | 71..91 '{ ... }': &[i32; _] | 266 | 71..91 '{ ... }': &[i32; 1] |
267 | 81..85 '&[1]': &[i32; _] | 267 | 81..85 '&[1]': &[i32; 1] |
268 | 82..85 '[1]': [i32; _] | 268 | 82..85 '[1]': [i32; 1] |
269 | 83..84 '1': i32 | 269 | 83..84 '1': i32 |
270 | 97..122 '{ ... }': &[i32] | 270 | 97..122 '{ ... }': &[i32] |
271 | 107..110 'foo': fn foo<i32>(&[i32]) -> &[i32] | 271 | 107..110 'foo': fn foo<i32>(&[i32]) -> &[i32] |
272 | 107..116 'foo(&[1])': &[i32] | 272 | 107..116 'foo(&[1])': &[i32] |
273 | 111..115 '&[1]': &[i32; _] | 273 | 111..115 '&[1]': &[i32; 1] |
274 | 112..115 '[1]': [i32; _] | 274 | 112..115 '[1]': [i32; 1] |
275 | 113..114 '1': i32 | 275 | 113..114 '1': i32 |
276 | "]], | 276 | "#]], |
277 | ) | 277 | ) |
278 | } | 278 | } |
279 | 279 | ||
@@ -295,7 +295,7 @@ fn infer_match_first_coerce() { | |||
295 | #[lang = "unsize"] | 295 | #[lang = "unsize"] |
296 | pub trait Unsize<T: ?Sized> {} | 296 | pub trait Unsize<T: ?Sized> {} |
297 | "#, | 297 | "#, |
298 | expect![[r" | 298 | expect![[r#" |
299 | 10..11 'x': &[T] | 299 | 10..11 'x': &[T] |
300 | 27..38 '{ loop {} }': &[T] | 300 | 27..38 '{ loop {} }': &[T] |
301 | 29..36 'loop {}': ! | 301 | 29..36 'loop {}': ! |
@@ -309,19 +309,19 @@ fn infer_match_first_coerce() { | |||
309 | 87..88 '2': i32 | 309 | 87..88 '2': i32 |
310 | 92..95 'foo': fn foo<i32>(&[i32]) -> &[i32] | 310 | 92..95 'foo': fn foo<i32>(&[i32]) -> &[i32] |
311 | 92..101 'foo(&[2])': &[i32] | 311 | 92..101 'foo(&[2])': &[i32] |
312 | 96..100 '&[2]': &[i32; _] | 312 | 96..100 '&[2]': &[i32; 1] |
313 | 97..100 '[2]': [i32; _] | 313 | 97..100 '[2]': [i32; 1] |
314 | 98..99 '2': i32 | 314 | 98..99 '2': i32 |
315 | 111..112 '1': i32 | 315 | 111..112 '1': i32 |
316 | 111..112 '1': i32 | 316 | 111..112 '1': i32 |
317 | 116..120 '&[1]': &[i32; _] | 317 | 116..120 '&[1]': &[i32; 1] |
318 | 117..120 '[1]': [i32; _] | 318 | 117..120 '[1]': [i32; 1] |
319 | 118..119 '1': i32 | 319 | 118..119 '1': i32 |
320 | 130..131 '_': i32 | 320 | 130..131 '_': i32 |
321 | 135..139 '&[3]': &[i32; _] | 321 | 135..139 '&[3]': &[i32; 1] |
322 | 136..139 '[3]': [i32; _] | 322 | 136..139 '[3]': [i32; 1] |
323 | 137..138 '3': i32 | 323 | 137..138 '3': i32 |
324 | "]], | 324 | "#]], |
325 | ); | 325 | ); |
326 | } | 326 | } |
327 | 327 | ||
@@ -348,7 +348,7 @@ fn infer_match_second_coerce() { | |||
348 | impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {} | 348 | impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {} |
349 | impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {} | 349 | impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {} |
350 | "#, | 350 | "#, |
351 | expect![[r" | 351 | expect![[r#" |
352 | 10..11 'x': &[T] | 352 | 10..11 'x': &[T] |
353 | 27..38 '{ loop {} }': &[T] | 353 | 27..38 '{ loop {} }': &[T] |
354 | 29..36 'loop {}': ! | 354 | 29..36 'loop {}': ! |
@@ -360,21 +360,21 @@ fn infer_match_second_coerce() { | |||
360 | 75..76 'i': i32 | 360 | 75..76 'i': i32 |
361 | 87..88 '1': i32 | 361 | 87..88 '1': i32 |
362 | 87..88 '1': i32 | 362 | 87..88 '1': i32 |
363 | 92..96 '&[1]': &[i32; _] | 363 | 92..96 '&[1]': &[i32; 1] |
364 | 93..96 '[1]': [i32; _] | 364 | 93..96 '[1]': [i32; 1] |
365 | 94..95 '1': i32 | 365 | 94..95 '1': i32 |
366 | 106..107 '2': i32 | 366 | 106..107 '2': i32 |
367 | 106..107 '2': i32 | 367 | 106..107 '2': i32 |
368 | 111..114 'foo': fn foo<i32>(&[i32]) -> &[i32] | 368 | 111..114 'foo': fn foo<i32>(&[i32]) -> &[i32] |
369 | 111..120 'foo(&[2])': &[i32] | 369 | 111..120 'foo(&[2])': &[i32] |
370 | 115..119 '&[2]': &[i32; _] | 370 | 115..119 '&[2]': &[i32; 1] |
371 | 116..119 '[2]': [i32; _] | 371 | 116..119 '[2]': [i32; 1] |
372 | 117..118 '2': i32 | 372 | 117..118 '2': i32 |
373 | 130..131 '_': i32 | 373 | 130..131 '_': i32 |
374 | 135..139 '&[3]': &[i32; _] | 374 | 135..139 '&[3]': &[i32; 1] |
375 | 136..139 '[3]': [i32; _] | 375 | 136..139 '[3]': [i32; 1] |
376 | 137..138 '3': i32 | 376 | 137..138 '3': i32 |
377 | "]], | 377 | "#]], |
378 | ); | 378 | ); |
379 | } | 379 | } |
380 | 380 | ||
@@ -685,15 +685,15 @@ fn coerce_unsize_array() { | |||
685 | let f: &[usize] = &[1, 2, 3]; | 685 | let f: &[usize] = &[1, 2, 3]; |
686 | } | 686 | } |
687 | "#, | 687 | "#, |
688 | expect![[r" | 688 | expect![[r#" |
689 | 161..198 '{ ... 3]; }': () | 689 | 161..198 '{ ... 3]; }': () |
690 | 171..172 'f': &[usize] | 690 | 171..172 'f': &[usize] |
691 | 185..195 '&[1, 2, 3]': &[usize; _] | 691 | 185..195 '&[1, 2, 3]': &[usize; 3] |
692 | 186..195 '[1, 2, 3]': [usize; _] | 692 | 186..195 '[1, 2, 3]': [usize; 3] |
693 | 187..188 '1': usize | 693 | 187..188 '1': usize |
694 | 190..191 '2': usize | 694 | 190..191 '2': usize |
695 | 193..194 '3': usize | 695 | 193..194 '3': usize |
696 | "]], | 696 | "#]], |
697 | ); | 697 | ); |
698 | } | 698 | } |
699 | 699 | ||
diff --git a/crates/hir_ty/src/tests/patterns.rs b/crates/hir_ty/src/tests/patterns.rs index f514b3efe..787647e9f 100644 --- a/crates/hir_ty/src/tests/patterns.rs +++ b/crates/hir_ty/src/tests/patterns.rs | |||
@@ -243,8 +243,8 @@ fn infer_pattern_match_slice() { | |||
243 | expect![[r#" | 243 | expect![[r#" |
244 | 10..209 '{ ... } }': () | 244 | 10..209 '{ ... } }': () |
245 | 20..25 'slice': &[f64] | 245 | 20..25 'slice': &[f64] |
246 | 36..42 '&[0.0]': &[f64; _] | 246 | 36..42 '&[0.0]': &[f64; 1] |
247 | 37..42 '[0.0]': [f64; _] | 247 | 37..42 '[0.0]': [f64; 1] |
248 | 38..41 '0.0': f64 | 248 | 38..41 '0.0': f64 |
249 | 48..207 'match ... }': () | 249 | 48..207 'match ... }': () |
250 | 54..59 'slice': &[f64] | 250 | 54..59 'slice': &[f64] |
@@ -345,19 +345,19 @@ fn infer_pattern_match_arr() { | |||
345 | "#, | 345 | "#, |
346 | expect![[r#" | 346 | expect![[r#" |
347 | 10..179 '{ ... } }': () | 347 | 10..179 '{ ... } }': () |
348 | 20..23 'arr': [f64; _] | 348 | 20..23 'arr': [f64; 2] |
349 | 36..46 '[0.0, 1.0]': [f64; _] | 349 | 36..46 '[0.0, 1.0]': [f64; 2] |
350 | 37..40 '0.0': f64 | 350 | 37..40 '0.0': f64 |
351 | 42..45 '1.0': f64 | 351 | 42..45 '1.0': f64 |
352 | 52..177 'match ... }': () | 352 | 52..177 'match ... }': () |
353 | 58..61 'arr': [f64; _] | 353 | 58..61 'arr': [f64; 2] |
354 | 72..80 '[1.0, a]': [f64; _] | 354 | 72..80 '[1.0, a]': [f64; 2] |
355 | 73..76 '1.0': f64 | 355 | 73..76 '1.0': f64 |
356 | 73..76 '1.0': f64 | 356 | 73..76 '1.0': f64 |
357 | 78..79 'a': f64 | 357 | 78..79 'a': f64 |
358 | 84..110 '{ ... }': () | 358 | 84..110 '{ ... }': () |
359 | 98..99 'a': f64 | 359 | 98..99 'a': f64 |
360 | 120..126 '[b, c]': [f64; _] | 360 | 120..126 '[b, c]': [f64; 2] |
361 | 121..122 'b': f64 | 361 | 121..122 'b': f64 |
362 | 124..125 'c': f64 | 362 | 124..125 'c': f64 |
363 | 130..171 '{ ... }': () | 363 | 130..171 '{ ... }': () |
@@ -732,7 +732,7 @@ fn foo(tuple: (u8, i16, f32)) { | |||
732 | 111..112 'a': u8 | 732 | 111..112 'a': u8 |
733 | 114..115 'b': i16 | 733 | 114..115 'b': i16 |
734 | 124..126 '{}': () | 734 | 124..126 '{}': () |
735 | 136..142 '(a, b)': (u8, i16, f32) | 735 | 136..142 '(a, b)': (u8, i16) |
736 | 137..138 'a': u8 | 736 | 137..138 'a': u8 |
737 | 140..141 'b': i16 | 737 | 140..141 'b': i16 |
738 | 146..161 '{/*too short*/}': () | 738 | 146..161 '{/*too short*/}': () |
diff --git a/crates/hir_ty/src/tests/regression.rs b/crates/hir_ty/src/tests/regression.rs index d14f5c9bb..431861712 100644 --- a/crates/hir_ty/src/tests/regression.rs +++ b/crates/hir_ty/src/tests/regression.rs | |||
@@ -99,7 +99,7 @@ fn recursive_vars() { | |||
99 | 10..47 '{ ...&y]; }': () | 99 | 10..47 '{ ...&y]; }': () |
100 | 20..21 'y': &{unknown} | 100 | 20..21 'y': &{unknown} |
101 | 24..31 'unknown': &{unknown} | 101 | 24..31 'unknown': &{unknown} |
102 | 37..44 '[y, &y]': [&&{unknown}; _] | 102 | 37..44 '[y, &y]': [&&{unknown}; 2] |
103 | 38..39 'y': &{unknown} | 103 | 38..39 'y': &{unknown} |
104 | 41..43 '&y': &&{unknown} | 104 | 41..43 '&y': &&{unknown} |
105 | 42..43 'y': &{unknown} | 105 | 42..43 'y': &{unknown} |
@@ -123,7 +123,7 @@ fn recursive_vars_2() { | |||
123 | 24..31 'unknown': &&{unknown} | 123 | 24..31 'unknown': &&{unknown} |
124 | 41..42 'y': &&{unknown} | 124 | 41..42 'y': &&{unknown} |
125 | 45..52 'unknown': &&{unknown} | 125 | 45..52 'unknown': &&{unknown} |
126 | 58..76 '[(x, y..., &x)]': [(&&&{unknown}, &&&{unknown}); _] | 126 | 58..76 '[(x, y..., &x)]': [(&&&{unknown}, &&&{unknown}); 2] |
127 | 59..65 '(x, y)': (&&&{unknown}, &&&{unknown}) | 127 | 59..65 '(x, y)': (&&&{unknown}, &&&{unknown}) |
128 | 60..61 'x': &&{unknown} | 128 | 60..61 'x': &&{unknown} |
129 | 63..64 'y': &&{unknown} | 129 | 63..64 'y': &&{unknown} |
@@ -175,8 +175,8 @@ fn infer_std_crash_2() { | |||
175 | "#, | 175 | "#, |
176 | expect![[r#" | 176 | expect![[r#" |
177 | 22..52 '{ ...n']; }': () | 177 | 22..52 '{ ...n']; }': () |
178 | 28..49 '&[0, b...b'\n']': &[u8; _] | 178 | 28..49 '&[0, b...b'\n']': &[u8; 4] |
179 | 29..49 '[0, b'...b'\n']': [u8; _] | 179 | 29..49 '[0, b'...b'\n']': [u8; 4] |
180 | 30..31 '0': u8 | 180 | 30..31 '0': u8 |
181 | 33..38 'b'\n'': u8 | 181 | 33..38 'b'\n'': u8 |
182 | 40..41 '1': u8 | 182 | 40..41 '1': u8 |
@@ -336,8 +336,8 @@ fn infer_array_macro_call() { | |||
336 | expect![[r#" | 336 | expect![[r#" |
337 | !0..4 '0u32': u32 | 337 | !0..4 '0u32': u32 |
338 | 44..69 '{ ...()]; }': () | 338 | 44..69 '{ ...()]; }': () |
339 | 54..55 'a': [u32; _] | 339 | 54..55 'a': [u32; 1] |
340 | 58..66 '[bar!()]': [u32; _] | 340 | 58..66 '[bar!()]': [u32; 1] |
341 | "#]], | 341 | "#]], |
342 | ); | 342 | ); |
343 | } | 343 | } |
@@ -1050,3 +1050,52 @@ fn test() { | |||
1050 | "#]], | 1050 | "#]], |
1051 | ); | 1051 | ); |
1052 | } | 1052 | } |
1053 | |||
1054 | #[test] | ||
1055 | fn cfg_tail() { | ||
1056 | // https://github.com/rust-analyzer/rust-analyzer/issues/8378 | ||
1057 | check_infer( | ||
1058 | r#" | ||
1059 | fn fake_tail(){ | ||
1060 | { "first" } | ||
1061 | #[cfg(never)] 9 | ||
1062 | } | ||
1063 | fn multiple_fake(){ | ||
1064 | { "fake" } | ||
1065 | { "fake" } | ||
1066 | { "second" } | ||
1067 | #[cfg(never)] { 11 } | ||
1068 | #[cfg(never)] 12; | ||
1069 | #[cfg(never)] 13 | ||
1070 | } | ||
1071 | fn no_normal_tail(){ | ||
1072 | { "third" } | ||
1073 | #[cfg(never)] 14; | ||
1074 | #[cfg(never)] 15; | ||
1075 | } | ||
1076 | fn no_actual_tail(){ | ||
1077 | { "fourth" }; | ||
1078 | #[cfg(never)] 14; | ||
1079 | #[cfg(never)] 15 | ||
1080 | } | ||
1081 | "#, | ||
1082 | expect![[r#" | ||
1083 | 14..53 '{ ...)] 9 }': &str | ||
1084 | 20..31 '{ "first" }': &str | ||
1085 | 22..29 '"first"': &str | ||
1086 | 72..190 '{ ...] 13 }': &str | ||
1087 | 78..88 '{ "fake" }': &str | ||
1088 | 80..86 '"fake"': &str | ||
1089 | 93..103 '{ "fake" }': &str | ||
1090 | 95..101 '"fake"': &str | ||
1091 | 108..120 '{ "second" }': &str | ||
1092 | 110..118 '"second"': &str | ||
1093 | 210..273 '{ ... 15; }': &str | ||
1094 | 216..227 '{ "third" }': &str | ||
1095 | 218..225 '"third"': &str | ||
1096 | 293..357 '{ ...] 15 }': () | ||
1097 | 299..311 '{ "fourth" }': &str | ||
1098 | 301..309 '"fourth"': &str | ||
1099 | "#]], | ||
1100 | ) | ||
1101 | } | ||
diff --git a/crates/hir_ty/src/tests/simple.rs b/crates/hir_ty/src/tests/simple.rs index 0eefd70f2..a9cd42186 100644 --- a/crates/hir_ty/src/tests/simple.rs +++ b/crates/hir_ty/src/tests/simple.rs | |||
@@ -11,7 +11,7 @@ fn test() { | |||
11 | let x = box 1; | 11 | let x = box 1; |
12 | let t = (x, box x, box &1, box [1]); | 12 | let t = (x, box x, box &1, box [1]); |
13 | t; | 13 | t; |
14 | } //^ (Box<i32>, Box<Box<i32>>, Box<&i32>, Box<[i32; _]>) | 14 | } //^ (Box<i32>, Box<Box<i32>>, Box<&i32>, Box<[i32; 1]>) |
15 | 15 | ||
16 | //- /std.rs crate:std | 16 | //- /std.rs crate:std |
17 | #[prelude_import] use prelude::*; | 17 | #[prelude_import] use prelude::*; |
@@ -36,7 +36,7 @@ fn test() { | |||
36 | let x = box 1; | 36 | let x = box 1; |
37 | let t = (x, box x, box &1, box [1]); | 37 | let t = (x, box x, box &1, box [1]); |
38 | t; | 38 | t; |
39 | } //^ (Box<i32, {unknown}>, Box<Box<i32, {unknown}>, {unknown}>, Box<&i32, {unknown}>, Box<[i32; _], {unknown}>) | 39 | } //^ (Box<i32, {unknown}>, Box<Box<i32, {unknown}>, {unknown}>, Box<&i32, {unknown}>, Box<[i32; 1], {unknown}>) |
40 | 40 | ||
41 | //- /std.rs crate:std | 41 | //- /std.rs crate:std |
42 | #[prelude_import] use prelude::*; | 42 | #[prelude_import] use prelude::*; |
@@ -488,23 +488,34 @@ fn infer_literals() { | |||
488 | mod foo {} | 488 | mod foo {} |
489 | "#; | 489 | "#; |
490 | br#"yolo"#; | 490 | br#"yolo"#; |
491 | let a = b"a\x20b\ | ||
492 | c"; | ||
493 | let b = br"g\ | ||
494 | h"; | ||
495 | let c = br#"x"\"yb"#; | ||
491 | } | 496 | } |
492 | "##, | 497 | "##, |
493 | expect![[r##" | 498 | expect![[r##" |
494 | 10..216 '{ ...o"#; }': () | 499 | 18..478 '{ ... }': () |
495 | 16..20 '5i32': i32 | 500 | 32..36 '5i32': i32 |
496 | 26..30 '5f32': f32 | 501 | 50..54 '5f32': f32 |
497 | 36..40 '5f64': f64 | 502 | 68..72 '5f64': f64 |
498 | 46..53 '"hello"': &str | 503 | 86..93 '"hello"': &str |
499 | 59..67 'b"bytes"': &[u8; _] | 504 | 107..115 'b"bytes"': &[u8; 5] |
500 | 73..76 ''c'': char | 505 | 129..132 ''c'': char |
501 | 82..86 'b'b'': u8 | 506 | 146..150 'b'b'': u8 |
502 | 92..96 '3.14': f64 | 507 | 164..168 '3.14': f64 |
503 | 102..106 '5000': i32 | 508 | 182..186 '5000': i32 |
504 | 112..117 'false': bool | 509 | 200..205 'false': bool |
505 | 123..127 'true': bool | 510 | 219..223 'true': bool |
506 | 133..197 'r#" ... "#': &str | 511 | 237..333 'r#" ... "#': &str |
507 | 203..213 'br#"yolo"#': &[u8; _] | 512 | 347..357 'br#"yolo"#': &[u8; 4] |
513 | 375..376 'a': &[u8; 4] | ||
514 | 379..403 'b"a\x2... c"': &[u8; 4] | ||
515 | 421..422 'b': &[u8; 4] | ||
516 | 425..433 'br"g\ h"': &[u8; 4] | ||
517 | 451..452 'c': &[u8; 6] | ||
518 | 455..467 'br#"x"\"yb"#': &[u8; 6] | ||
508 | "##]], | 519 | "##]], |
509 | ); | 520 | ); |
510 | } | 521 | } |
@@ -1260,61 +1271,69 @@ fn infer_array() { | |||
1260 | 1271 | ||
1261 | let b = [a, ["b"]]; | 1272 | let b = [a, ["b"]]; |
1262 | let x: [u8; 0] = []; | 1273 | let x: [u8; 0] = []; |
1274 | // FIXME: requires const evaluation/taking type from rhs somehow | ||
1275 | let y: [u8; 2+2] = [1,2,3,4]; | ||
1263 | } | 1276 | } |
1264 | "#, | 1277 | "#, |
1265 | expect![[r#" | 1278 | expect![[r#" |
1266 | 8..9 'x': &str | 1279 | 8..9 'x': &str |
1267 | 17..18 'y': isize | 1280 | 17..18 'y': isize |
1268 | 27..292 '{ ... []; }': () | 1281 | 27..395 '{ ...,4]; }': () |
1269 | 37..38 'a': [&str; _] | 1282 | 37..38 'a': [&str; 1] |
1270 | 41..44 '[x]': [&str; _] | 1283 | 41..44 '[x]': [&str; 1] |
1271 | 42..43 'x': &str | 1284 | 42..43 'x': &str |
1272 | 54..55 'b': [[&str; _]; _] | 1285 | 54..55 'b': [[&str; 1]; 2] |
1273 | 58..64 '[a, a]': [[&str; _]; _] | 1286 | 58..64 '[a, a]': [[&str; 1]; 2] |
1274 | 59..60 'a': [&str; _] | 1287 | 59..60 'a': [&str; 1] |
1275 | 62..63 'a': [&str; _] | 1288 | 62..63 'a': [&str; 1] |
1276 | 74..75 'c': [[[&str; _]; _]; _] | 1289 | 74..75 'c': [[[&str; 1]; 2]; 2] |
1277 | 78..84 '[b, b]': [[[&str; _]; _]; _] | 1290 | 78..84 '[b, b]': [[[&str; 1]; 2]; 2] |
1278 | 79..80 'b': [[&str; _]; _] | 1291 | 79..80 'b': [[&str; 1]; 2] |
1279 | 82..83 'b': [[&str; _]; _] | 1292 | 82..83 'b': [[&str; 1]; 2] |
1280 | 95..96 'd': [isize; _] | 1293 | 95..96 'd': [isize; 4] |
1281 | 99..111 '[y, 1, 2, 3]': [isize; _] | 1294 | 99..111 '[y, 1, 2, 3]': [isize; 4] |
1282 | 100..101 'y': isize | 1295 | 100..101 'y': isize |
1283 | 103..104 '1': isize | 1296 | 103..104 '1': isize |
1284 | 106..107 '2': isize | 1297 | 106..107 '2': isize |
1285 | 109..110 '3': isize | 1298 | 109..110 '3': isize |
1286 | 121..122 'd': [isize; _] | 1299 | 121..122 'd': [isize; 4] |
1287 | 125..137 '[1, y, 2, 3]': [isize; _] | 1300 | 125..137 '[1, y, 2, 3]': [isize; 4] |
1288 | 126..127 '1': isize | 1301 | 126..127 '1': isize |
1289 | 129..130 'y': isize | 1302 | 129..130 'y': isize |
1290 | 132..133 '2': isize | 1303 | 132..133 '2': isize |
1291 | 135..136 '3': isize | 1304 | 135..136 '3': isize |
1292 | 147..148 'e': [isize; _] | 1305 | 147..148 'e': [isize; 1] |
1293 | 151..154 '[y]': [isize; _] | 1306 | 151..154 '[y]': [isize; 1] |
1294 | 152..153 'y': isize | 1307 | 152..153 'y': isize |
1295 | 164..165 'f': [[isize; _]; _] | 1308 | 164..165 'f': [[isize; 4]; 2] |
1296 | 168..174 '[d, d]': [[isize; _]; _] | 1309 | 168..174 '[d, d]': [[isize; 4]; 2] |
1297 | 169..170 'd': [isize; _] | 1310 | 169..170 'd': [isize; 4] |
1298 | 172..173 'd': [isize; _] | 1311 | 172..173 'd': [isize; 4] |
1299 | 184..185 'g': [[isize; _]; _] | 1312 | 184..185 'g': [[isize; 1]; 2] |
1300 | 188..194 '[e, e]': [[isize; _]; _] | 1313 | 188..194 '[e, e]': [[isize; 1]; 2] |
1301 | 189..190 'e': [isize; _] | 1314 | 189..190 'e': [isize; 1] |
1302 | 192..193 'e': [isize; _] | 1315 | 192..193 'e': [isize; 1] |
1303 | 205..206 'h': [i32; _] | 1316 | 205..206 'h': [i32; 2] |
1304 | 209..215 '[1, 2]': [i32; _] | 1317 | 209..215 '[1, 2]': [i32; 2] |
1305 | 210..211 '1': i32 | 1318 | 210..211 '1': i32 |
1306 | 213..214 '2': i32 | 1319 | 213..214 '2': i32 |
1307 | 225..226 'i': [&str; _] | 1320 | 225..226 'i': [&str; 2] |
1308 | 229..239 '["a", "b"]': [&str; _] | 1321 | 229..239 '["a", "b"]': [&str; 2] |
1309 | 230..233 '"a"': &str | 1322 | 230..233 '"a"': &str |
1310 | 235..238 '"b"': &str | 1323 | 235..238 '"b"': &str |
1311 | 250..251 'b': [[&str; _]; _] | 1324 | 250..251 'b': [[&str; 1]; 2] |
1312 | 254..264 '[a, ["b"]]': [[&str; _]; _] | 1325 | 254..264 '[a, ["b"]]': [[&str; 1]; 2] |
1313 | 255..256 'a': [&str; _] | 1326 | 255..256 'a': [&str; 1] |
1314 | 258..263 '["b"]': [&str; _] | 1327 | 258..263 '["b"]': [&str; 1] |
1315 | 259..262 '"b"': &str | 1328 | 259..262 '"b"': &str |
1316 | 274..275 'x': [u8; _] | 1329 | 274..275 'x': [u8; 0] |
1317 | 287..289 '[]': [u8; _] | 1330 | 287..289 '[]': [u8; 0] |
1331 | 368..369 'y': [u8; _] | ||
1332 | 383..392 '[1,2,3,4]': [u8; 4] | ||
1333 | 384..385 '1': u8 | ||
1334 | 386..387 '2': u8 | ||
1335 | 388..389 '3': u8 | ||
1336 | 390..391 '4': u8 | ||
1318 | "#]], | 1337 | "#]], |
1319 | ); | 1338 | ); |
1320 | } | 1339 | } |
@@ -2409,40 +2428,40 @@ fn infer_operator_overload() { | |||
2409 | 320..422 '{ ... }': V2 | 2428 | 320..422 '{ ... }': V2 |
2410 | 334..335 'x': f32 | 2429 | 334..335 'x': f32 |
2411 | 338..342 'self': V2 | 2430 | 338..342 'self': V2 |
2412 | 338..344 'self.0': [f32; _] | 2431 | 338..344 'self.0': [f32; 2] |
2413 | 338..347 'self.0[0]': {unknown} | 2432 | 338..347 'self.0[0]': {unknown} |
2414 | 338..358 'self.0...s.0[0]': f32 | 2433 | 338..358 'self.0...s.0[0]': f32 |
2415 | 345..346 '0': i32 | 2434 | 345..346 '0': i32 |
2416 | 350..353 'rhs': V2 | 2435 | 350..353 'rhs': V2 |
2417 | 350..355 'rhs.0': [f32; _] | 2436 | 350..355 'rhs.0': [f32; 2] |
2418 | 350..358 'rhs.0[0]': {unknown} | 2437 | 350..358 'rhs.0[0]': {unknown} |
2419 | 356..357 '0': i32 | 2438 | 356..357 '0': i32 |
2420 | 372..373 'y': f32 | 2439 | 372..373 'y': f32 |
2421 | 376..380 'self': V2 | 2440 | 376..380 'self': V2 |
2422 | 376..382 'self.0': [f32; _] | 2441 | 376..382 'self.0': [f32; 2] |
2423 | 376..385 'self.0[1]': {unknown} | 2442 | 376..385 'self.0[1]': {unknown} |
2424 | 376..396 'self.0...s.0[1]': f32 | 2443 | 376..396 'self.0...s.0[1]': f32 |
2425 | 383..384 '1': i32 | 2444 | 383..384 '1': i32 |
2426 | 388..391 'rhs': V2 | 2445 | 388..391 'rhs': V2 |
2427 | 388..393 'rhs.0': [f32; _] | 2446 | 388..393 'rhs.0': [f32; 2] |
2428 | 388..396 'rhs.0[1]': {unknown} | 2447 | 388..396 'rhs.0[1]': {unknown} |
2429 | 394..395 '1': i32 | 2448 | 394..395 '1': i32 |
2430 | 406..408 'V2': V2([f32; _]) -> V2 | 2449 | 406..408 'V2': V2([f32; 2]) -> V2 |
2431 | 406..416 'V2([x, y])': V2 | 2450 | 406..416 'V2([x, y])': V2 |
2432 | 409..415 '[x, y]': [f32; _] | 2451 | 409..415 '[x, y]': [f32; 2] |
2433 | 410..411 'x': f32 | 2452 | 410..411 'x': f32 |
2434 | 413..414 'y': f32 | 2453 | 413..414 'y': f32 |
2435 | 436..519 '{ ... vb; }': () | 2454 | 436..519 '{ ... vb; }': () |
2436 | 446..448 'va': V2 | 2455 | 446..448 'va': V2 |
2437 | 451..453 'V2': V2([f32; _]) -> V2 | 2456 | 451..453 'V2': V2([f32; 2]) -> V2 |
2438 | 451..465 'V2([0.0, 1.0])': V2 | 2457 | 451..465 'V2([0.0, 1.0])': V2 |
2439 | 454..464 '[0.0, 1.0]': [f32; _] | 2458 | 454..464 '[0.0, 1.0]': [f32; 2] |
2440 | 455..458 '0.0': f32 | 2459 | 455..458 '0.0': f32 |
2441 | 460..463 '1.0': f32 | 2460 | 460..463 '1.0': f32 |
2442 | 475..477 'vb': V2 | 2461 | 475..477 'vb': V2 |
2443 | 480..482 'V2': V2([f32; _]) -> V2 | 2462 | 480..482 'V2': V2([f32; 2]) -> V2 |
2444 | 480..494 'V2([0.0, 1.0])': V2 | 2463 | 480..494 'V2([0.0, 1.0])': V2 |
2445 | 483..493 '[0.0, 1.0]': [f32; _] | 2464 | 483..493 '[0.0, 1.0]': [f32; 2] |
2446 | 484..487 '0.0': f32 | 2465 | 484..487 '0.0': f3 |