diff options
-rw-r--r-- | crates/ra_assists/src/handlers/split_import.rs | 52 | ||||
-rw-r--r-- | crates/ra_db/src/fixture.rs | 4 | ||||
-rw-r--r-- | crates/ra_hir/src/source_analyzer.rs | 4 | ||||
-rw-r--r-- | crates/ra_hir_def/src/body.rs | 15 | ||||
-rw-r--r-- | crates/ra_hir_def/src/body/lower.rs | 41 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/expr.rs | 4 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/infer.rs | 28 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/tests.rs | 17 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/tests/simple.rs | 41 | ||||
-rw-r--r-- | crates/ra_ide/src/diagnostics.rs | 27 | ||||
-rw-r--r-- | crates/ra_syntax/src/ast/edit.rs | 22 | ||||
-rw-r--r-- | crates/ra_syntax/src/ast/make.rs | 38 | ||||
-rw-r--r-- | crates/rust-analyzer/src/cli/analysis_stats.rs | 4 |
13 files changed, 226 insertions, 71 deletions
diff --git a/crates/ra_assists/src/handlers/split_import.rs b/crates/ra_assists/src/handlers/split_import.rs index 2c3f07a79..292c39f59 100644 --- a/crates/ra_assists/src/handlers/split_import.rs +++ b/crates/ra_assists/src/handlers/split_import.rs | |||
@@ -1,6 +1,9 @@ | |||
1 | use std::iter::successors; | 1 | use std::iter::{once, successors}; |
2 | 2 | ||
3 | use ra_syntax::{ast, AstNode, TextUnit, T}; | 3 | use ra_syntax::{ |
4 | ast::{self, make}, | ||
5 | AstNode, T, | ||
6 | }; | ||
4 | 7 | ||
5 | use crate::{Assist, AssistCtx, AssistId}; | 8 | use crate::{Assist, AssistCtx, AssistId}; |
6 | 9 | ||
@@ -17,39 +20,50 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
17 | // ``` | 20 | // ``` |
18 | pub(crate) fn split_import(ctx: AssistCtx) -> Option<Assist> { | 21 | pub(crate) fn split_import(ctx: AssistCtx) -> Option<Assist> { |
19 | let colon_colon = ctx.find_token_at_offset(T![::])?; | 22 | let colon_colon = ctx.find_token_at_offset(T![::])?; |
20 | let path = ast::Path::cast(colon_colon.parent())?; | 23 | let path = ast::Path::cast(colon_colon.parent())?.qualifier()?; |
21 | let top_path = successors(Some(path), |it| it.parent_path()).last()?; | 24 | let top_path = successors(Some(path.clone()), |it| it.parent_path()).last()?; |
22 | 25 | ||
23 | let use_tree = top_path.syntax().ancestors().find_map(ast::UseTree::cast); | 26 | let use_tree = top_path.syntax().ancestors().find_map(ast::UseTree::cast)?; |
24 | if use_tree.is_none() { | ||
25 | return None; | ||
26 | } | ||
27 | 27 | ||
28 | let l_curly = colon_colon.text_range().end(); | 28 | let new_tree = split_use_tree_prefix(&use_tree, &path)?; |
29 | let r_curly = match top_path.syntax().parent().and_then(ast::UseTree::cast) { | 29 | let cursor = ctx.frange.range.start(); |
30 | Some(tree) => tree.syntax().text_range().end(), | ||
31 | None => top_path.syntax().text_range().end(), | ||
32 | }; | ||
33 | 30 | ||
34 | ctx.add_assist(AssistId("split_import"), "Split import", |edit| { | 31 | ctx.add_assist(AssistId("split_import"), "Split import", |edit| { |
35 | edit.target(colon_colon.text_range()); | 32 | edit.target(colon_colon.text_range()); |
36 | edit.insert(l_curly, "{"); | 33 | edit.replace_ast(use_tree, new_tree); |
37 | edit.insert(r_curly, "}"); | 34 | edit.set_cursor(cursor); |
38 | edit.set_cursor(l_curly + TextUnit::of_str("{")); | ||
39 | }) | 35 | }) |
40 | } | 36 | } |
41 | 37 | ||
38 | fn split_use_tree_prefix(use_tree: &ast::UseTree, prefix: &ast::Path) -> Option<ast::UseTree> { | ||
39 | let suffix = split_path_prefix(&prefix)?; | ||
40 | let use_tree = make::use_tree(suffix.clone(), use_tree.use_tree_list(), use_tree.alias()); | ||
41 | let nested = make::use_tree_list(once(use_tree)); | ||
42 | let res = make::use_tree(prefix.clone(), Some(nested), None); | ||
43 | Some(res) | ||
44 | } | ||
45 | |||
46 | fn split_path_prefix(prefix: &ast::Path) -> Option<ast::Path> { | ||
47 | let parent = prefix.parent_path()?; | ||
48 | let mut res = make::path_unqualified(parent.segment()?); | ||
49 | for p in successors(parent.parent_path(), |it| it.parent_path()) { | ||
50 | res = make::path_qualified(res, p.segment()?); | ||
51 | } | ||
52 | Some(res) | ||
53 | } | ||
54 | |||
42 | #[cfg(test)] | 55 | #[cfg(test)] |
43 | mod tests { | 56 | mod tests { |
44 | use super::*; | ||
45 | use crate::helpers::{check_assist, check_assist_target}; | 57 | use crate::helpers::{check_assist, check_assist_target}; |
46 | 58 | ||
59 | use super::*; | ||
60 | |||
47 | #[test] | 61 | #[test] |
48 | fn test_split_import() { | 62 | fn test_split_import() { |
49 | check_assist( | 63 | check_assist( |
50 | split_import, | 64 | split_import, |
51 | "use crate::<|>db::RootDatabase;", | 65 | "use crate::<|>db::RootDatabase;", |
52 | "use crate::{<|>db::RootDatabase};", | 66 | "use crate::<|>{db::RootDatabase};", |
53 | ) | 67 | ) |
54 | } | 68 | } |
55 | 69 | ||
@@ -58,7 +72,7 @@ mod tests { | |||
58 | check_assist( | 72 | check_assist( |
59 | split_import, | 73 | split_import, |
60 | "use crate:<|>:db::{RootDatabase, FileSymbol}", | 74 | "use crate:<|>:db::{RootDatabase, FileSymbol}", |
61 | "use crate::{<|>db::{RootDatabase, FileSymbol}}", | 75 | "use crate:<|>:{db::{RootDatabase, FileSymbol}}", |
62 | ) | 76 | ) |
63 | } | 77 | } |
64 | 78 | ||
diff --git a/crates/ra_db/src/fixture.rs b/crates/ra_db/src/fixture.rs index 17cd138c2..da7af110c 100644 --- a/crates/ra_db/src/fixture.rs +++ b/crates/ra_db/src/fixture.rs | |||
@@ -21,9 +21,9 @@ pub trait WithFixture: Default + SourceDatabaseExt + 'static { | |||
21 | (db, file_id) | 21 | (db, file_id) |
22 | } | 22 | } |
23 | 23 | ||
24 | fn with_files(fixture: &str) -> Self { | 24 | fn with_files(ra_fixture: &str) -> Self { |
25 | let mut db = Self::default(); | 25 | let mut db = Self::default(); |
26 | let pos = with_files(&mut db, fixture); | 26 | let pos = with_files(&mut db, ra_fixture); |
27 | assert!(pos.is_none()); | 27 | assert!(pos.is_none()); |
28 | db | 28 | db |
29 | } | 29 | } |
diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs index f3f1ed05a..331ecdd9c 100644 --- a/crates/ra_hir/src/source_analyzer.rs +++ b/crates/ra_hir/src/source_analyzer.rs | |||
@@ -261,7 +261,7 @@ fn scope_for_offset( | |||
261 | .scope_by_expr() | 261 | .scope_by_expr() |
262 | .iter() | 262 | .iter() |
263 | .filter_map(|(id, scope)| { | 263 | .filter_map(|(id, scope)| { |
264 | let source = source_map.expr_syntax(*id)?; | 264 | let source = source_map.expr_syntax(*id).ok()?; |
265 | // FIXME: correctly handle macro expansion | 265 | // FIXME: correctly handle macro expansion |
266 | if source.file_id != offset.file_id { | 266 | if source.file_id != offset.file_id { |
267 | return None; | 267 | return None; |
@@ -337,7 +337,7 @@ fn adjust( | |||
337 | .scope_by_expr() | 337 | .scope_by_expr() |
338 | .iter() | 338 | .iter() |
339 | .filter_map(|(id, scope)| { | 339 | .filter_map(|(id, scope)| { |
340 | let source = source_map.expr_syntax(*id)?; | 340 | let source = source_map.expr_syntax(*id).ok()?; |
341 | // FIXME: correctly handle macro expansion | 341 | // FIXME: correctly handle macro expansion |
342 | if source.file_id != file_id { | 342 | if source.file_id != file_id { |
343 | return None; | 343 | return None; |
diff --git a/crates/ra_hir_def/src/body.rs b/crates/ra_hir_def/src/body.rs index 010d35e55..57ba45b45 100644 --- a/crates/ra_hir_def/src/body.rs +++ b/crates/ra_hir_def/src/body.rs | |||
@@ -149,13 +149,16 @@ pub type PatSource = InFile<PatPtr>; | |||
149 | #[derive(Default, Debug, Eq, PartialEq)] | 149 | #[derive(Default, Debug, Eq, PartialEq)] |
150 | pub struct BodySourceMap { | 150 | pub struct BodySourceMap { |
151 | expr_map: FxHashMap<ExprSource, ExprId>, | 151 | expr_map: FxHashMap<ExprSource, ExprId>, |
152 | expr_map_back: ArenaMap<ExprId, ExprSource>, | 152 | expr_map_back: ArenaMap<ExprId, Result<ExprSource, SyntheticSyntax>>, |
153 | pat_map: FxHashMap<PatSource, PatId>, | 153 | pat_map: FxHashMap<PatSource, PatId>, |
154 | pat_map_back: ArenaMap<PatId, PatSource>, | 154 | pat_map_back: ArenaMap<PatId, Result<PatSource, SyntheticSyntax>>, |
155 | field_map: FxHashMap<(ExprId, usize), AstPtr<ast::RecordField>>, | 155 | field_map: FxHashMap<(ExprId, usize), AstPtr<ast::RecordField>>, |
156 | expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>, | 156 | expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>, |
157 | } | 157 | } |
158 | 158 | ||
159 | #[derive(Default, Debug, Eq, PartialEq, Clone, Copy)] | ||
160 | pub struct SyntheticSyntax; | ||
161 | |||
159 | impl Body { | 162 | impl Body { |
160 | pub(crate) fn body_with_source_map_query( | 163 | pub(crate) fn body_with_source_map_query( |
161 | db: &impl DefDatabase, | 164 | db: &impl DefDatabase, |
@@ -219,8 +222,8 @@ impl Index<PatId> for Body { | |||
219 | } | 222 | } |
220 | 223 | ||
221 | impl BodySourceMap { | 224 | impl BodySourceMap { |
222 | pub fn expr_syntax(&self, expr: ExprId) -> Option<ExprSource> { | 225 | pub fn expr_syntax(&self, expr: ExprId) -> Result<ExprSource, SyntheticSyntax> { |
223 | self.expr_map_back.get(expr).copied() | 226 | self.expr_map_back[expr] |
224 | } | 227 | } |
225 | 228 | ||
226 | pub fn node_expr(&self, node: InFile<&ast::Expr>) -> Option<ExprId> { | 229 | pub fn node_expr(&self, node: InFile<&ast::Expr>) -> Option<ExprId> { |
@@ -238,8 +241,8 @@ impl BodySourceMap { | |||
238 | self.expr_map.get(&src).cloned() | 241 | self.expr_map.get(&src).cloned() |
239 | } | 242 | } |
240 | 243 | ||
241 | pub fn pat_syntax(&self, pat: PatId) -> Option<PatSource> { | 244 | pub fn pat_syntax(&self, pat: PatId) -> Result<PatSource, SyntheticSyntax> { |
242 | self.pat_map_back.get(pat).copied() | 245 | self.pat_map_back[pat] |
243 | } | 246 | } |
244 | 247 | ||
245 | pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<PatId> { | 248 | pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<PatId> { |
diff --git a/crates/ra_hir_def/src/body/lower.rs b/crates/ra_hir_def/src/body/lower.rs index c18e6879b..ec1b0c2e7 100644 --- a/crates/ra_hir_def/src/body/lower.rs +++ b/crates/ra_hir_def/src/body/lower.rs | |||
@@ -14,9 +14,10 @@ use ra_syntax::{ | |||
14 | }; | 14 | }; |
15 | use test_utils::tested_by; | 15 | use test_utils::tested_by; |
16 | 16 | ||
17 | use super::{ExprSource, PatSource}; | ||
17 | use crate::{ | 18 | use crate::{ |
18 | adt::StructKind, | 19 | adt::StructKind, |
19 | body::{Body, BodySourceMap, Expander, PatPtr}, | 20 | body::{Body, BodySourceMap, Expander, PatPtr, SyntheticSyntax}, |
20 | builtin_type::{BuiltinFloat, BuiltinInt}, | 21 | builtin_type::{BuiltinFloat, BuiltinInt}, |
21 | db::DefDatabase, | 22 | db::DefDatabase, |
22 | expr::{ | 23 | expr::{ |
@@ -102,44 +103,48 @@ where | |||
102 | 103 | ||
103 | fn alloc_expr(&mut self, expr: Expr, ptr: AstPtr<ast::Expr>) -> ExprId { | 104 | fn alloc_expr(&mut self, expr: Expr, ptr: AstPtr<ast::Expr>) -> ExprId { |
104 | let ptr = Either::Left(ptr); | 105 | let ptr = Either::Left(ptr); |
105 | let id = self.body.exprs.alloc(expr); | ||
106 | let src = self.expander.to_source(ptr); | 106 | let src = self.expander.to_source(ptr); |
107 | let id = self.make_expr(expr, Ok(src)); | ||
107 | self.source_map.expr_map.insert(src, id); | 108 | self.source_map.expr_map.insert(src, id); |
108 | self.source_map.expr_map_back.insert(id, src); | ||
109 | id | 109 | id |
110 | } | 110 | } |
111 | // desugared exprs don't have ptr, that's wrong and should be fixed | 111 | // desugared exprs don't have ptr, that's wrong and should be fixed |
112 | // somehow. | 112 | // somehow. |
113 | fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId { | 113 | fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId { |
114 | self.body.exprs.alloc(expr) | 114 | self.make_expr(expr, Err(SyntheticSyntax)) |
115 | } | 115 | } |
116 | fn alloc_expr_field_shorthand(&mut self, expr: Expr, ptr: AstPtr<ast::RecordField>) -> ExprId { | 116 | fn alloc_expr_field_shorthand(&mut self, expr: Expr, ptr: AstPtr<ast::RecordField>) -> ExprId { |
117 | let ptr = Either::Right(ptr); | 117 | let ptr = Either::Right(ptr); |
118 | let id = self.body.exprs.alloc(expr); | ||
119 | let src = self.expander.to_source(ptr); | 118 | let src = self.expander.to_source(ptr); |
119 | let id = self.make_expr(expr, Ok(src)); | ||
120 | self.source_map.expr_map.insert(src, id); | 120 | self.source_map.expr_map.insert(src, id); |
121 | id | ||
122 | } | ||
123 | fn empty_block(&mut self) -> ExprId { | ||
124 | self.alloc_expr_desugared(Expr::Block { statements: Vec::new(), tail: None }) | ||
125 | } | ||
126 | fn missing_expr(&mut self) -> ExprId { | ||
127 | self.alloc_expr_desugared(Expr::Missing) | ||
128 | } | ||
129 | fn make_expr(&mut self, expr: Expr, src: Result<ExprSource, SyntheticSyntax>) -> ExprId { | ||
130 | let id = self.body.exprs.alloc(expr); | ||
121 | self.source_map.expr_map_back.insert(id, src); | 131 | self.source_map.expr_map_back.insert(id, src); |
122 | id | 132 | id |
123 | } | 133 | } |
134 | |||
124 | fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId { | 135 | fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId { |
125 | let id = self.body.pats.alloc(pat); | ||
126 | let src = self.expander.to_source(ptr); | 136 | let src = self.expander.to_source(ptr); |
137 | let id = self.make_pat(pat, Ok(src)); | ||
127 | self.source_map.pat_map.insert(src, id); | 138 | self.source_map.pat_map.insert(src, id); |
128 | self.source_map.pat_map_back.insert(id, src); | ||
129 | id | 139 | id |
130 | } | 140 | } |
131 | |||
132 | fn empty_block(&mut self) -> ExprId { | ||
133 | let block = Expr::Block { statements: Vec::new(), tail: None }; | ||
134 | self.body.exprs.alloc(block) | ||
135 | } | ||
136 | |||
137 | fn missing_expr(&mut self) -> ExprId { | ||
138 | self.body.exprs.alloc(Expr::Missing) | ||
139 | } | ||
140 | |||
141 | fn missing_pat(&mut self) -> PatId { | 141 | fn missing_pat(&mut self) -> PatId { |
142 | self.body.pats.alloc(Pat::Missing) | 142 | self.make_pat(Pat::Missing, Err(SyntheticSyntax)) |
143 | } | ||
144 | fn make_pat(&mut self, pat: Pat, src: Result<PatSource, SyntheticSyntax>) -> PatId { | ||
145 | let id = self.body.pats.alloc(pat); | ||
146 | self.source_map.pat_map_back.insert(id, src); | ||
147 | id | ||
143 | } | 148 | } |
144 | 149 | ||
145 | fn collect_expr(&mut self, expr: ast::Expr) -> ExprId { | 150 | fn collect_expr(&mut self, expr: ast::Expr) -> ExprId { |
diff --git a/crates/ra_hir_ty/src/expr.rs b/crates/ra_hir_ty/src/expr.rs index 22f24890d..d8cdf5266 100644 --- a/crates/ra_hir_ty/src/expr.rs +++ b/crates/ra_hir_ty/src/expr.rs | |||
@@ -100,7 +100,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> { | |||
100 | } | 100 | } |
101 | let (_, source_map) = db.body_with_source_map(self.func.into()); | 101 | let (_, source_map) = db.body_with_source_map(self.func.into()); |
102 | 102 | ||
103 | if let Some(source_ptr) = source_map.expr_syntax(id) { | 103 | if let Ok(source_ptr) = source_map.expr_syntax(id) { |
104 | if let Some(expr) = source_ptr.value.left() { | 104 | if let Some(expr) = source_ptr.value.left() { |
105 | let root = source_ptr.file_syntax(db); | 105 | let root = source_ptr.file_syntax(db); |
106 | if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) { | 106 | if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) { |
@@ -145,7 +145,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> { | |||
145 | if params.len() == 2 && params[0] == mismatch.actual { | 145 | if params.len() == 2 && params[0] == mismatch.actual { |
146 | let (_, source_map) = db.body_with_source_map(self.func.into()); | 146 | let (_, source_map) = db.body_with_source_map(self.func.into()); |
147 | 147 | ||
148 | if let Some(source_ptr) = source_map.expr_syntax(id) { | 148 | if let Ok(source_ptr) = source_map.expr_syntax(id) { |
149 | if let Some(expr) = source_ptr.value.left() { | 149 | if let Some(expr) = source_ptr.value.left() { |
150 | self.sink.push(MissingOkInTailExpr { file: source_ptr.file_id, expr }); | 150 | self.sink.push(MissingOkInTailExpr { file: source_ptr.file_id, expr }); |
151 | } | 151 | } |
diff --git a/crates/ra_hir_ty/src/infer.rs b/crates/ra_hir_ty/src/infer.rs index 377f44fa7..39f144216 100644 --- a/crates/ra_hir_ty/src/infer.rs +++ b/crates/ra_hir_ty/src/infer.rs | |||
@@ -425,7 +425,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
425 | let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver); | 425 | let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver); |
426 | // FIXME: this should resolve assoc items as well, see this example: | 426 | // FIXME: this should resolve assoc items as well, see this example: |
427 | // https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521 | 427 | // https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521 |
428 | match resolver.resolve_path_in_type_ns_fully(self.db, path.mod_path()) { | 428 | return match resolver.resolve_path_in_type_ns_fully(self.db, path.mod_path()) { |
429 | Some(TypeNs::AdtId(AdtId::StructId(strukt))) => { | 429 | Some(TypeNs::AdtId(AdtId::StructId(strukt))) => { |
430 | let substs = Ty::substs_from_path(&ctx, path, strukt.into()); | 430 | let substs = Ty::substs_from_path(&ctx, path, strukt.into()); |
431 | let ty = self.db.ty(strukt.into()); | 431 | let ty = self.db.ty(strukt.into()); |
@@ -438,7 +438,33 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
438 | let ty = self.insert_type_vars(ty.subst(&substs)); | 438 | let ty = self.insert_type_vars(ty.subst(&substs)); |
439 | (ty, Some(var.into())) | 439 | (ty, Some(var.into())) |
440 | } | 440 | } |
441 | Some(TypeNs::SelfType(impl_id)) => { | ||
442 | let generics = crate::utils::generics(self.db, impl_id.into()); | ||
443 | let substs = Substs::type_params_for_generics(&generics); | ||
444 | let ty = self.db.impl_self_ty(impl_id).subst(&substs); | ||
445 | let variant = ty_variant(&ty); | ||
446 | (ty, variant) | ||
447 | } | ||
448 | Some(TypeNs::TypeAliasId(it)) => { | ||
449 | let substs = Substs::build_for_def(self.db, it) | ||
450 | .fill(std::iter::repeat_with(|| self.table.new_type_var())) | ||
451 | .build(); | ||
452 | let ty = self.db.ty(it.into()).subst(&substs); | ||
453 | let variant = ty_variant(&ty); | ||
454 | (ty, variant) | ||
455 | } | ||
441 | Some(_) | None => (Ty::Unknown, None), | 456 | Some(_) | None => (Ty::Unknown, None), |
457 | }; | ||
458 | |||
459 | fn ty_variant(ty: &Ty) -> Option<VariantId> { | ||
460 | ty.as_adt().and_then(|(adt_id, _)| match adt_id { | ||
461 | AdtId::StructId(s) => Some(VariantId::StructId(s)), | ||
462 | AdtId::UnionId(u) => Some(VariantId::UnionId(u)), | ||
463 | AdtId::EnumId(_) => { | ||
464 | // Error E0071, expected struct, variant or union type, found enum `Foo` | ||
465 | None | ||
466 | } | ||
467 | }) | ||
442 | } | 468 | } |
443 | } | 469 | } |
444 | 470 | ||
diff --git a/crates/ra_hir_ty/src/tests.rs b/crates/ra_hir_ty/src/tests.rs index 087edcc92..7e9547340 100644 --- a/crates/ra_hir_ty/src/tests.rs +++ b/crates/ra_hir_ty/src/tests.rs | |||
@@ -11,8 +11,13 @@ use std::fmt::Write; | |||
11 | use std::sync::Arc; | 11 | use std::sync::Arc; |
12 | 12 | ||
13 | use hir_def::{ | 13 | use hir_def::{ |
14 | body::BodySourceMap, child_by_source::ChildBySource, db::DefDatabase, item_scope::ItemScope, | 14 | body::{BodySourceMap, SyntheticSyntax}, |
15 | keys, nameres::CrateDefMap, AssocItemId, DefWithBodyId, LocalModuleId, Lookup, ModuleDefId, | 15 | child_by_source::ChildBySource, |
16 | db::DefDatabase, | ||
17 | item_scope::ItemScope, | ||
18 | keys, | ||
19 | nameres::CrateDefMap, | ||
20 | AssocItemId, DefWithBodyId, LocalModuleId, Lookup, ModuleDefId, | ||
16 | }; | 21 | }; |
17 | use hir_expand::InFile; | 22 | use hir_expand::InFile; |
18 | use insta::assert_snapshot; | 23 | use insta::assert_snapshot; |
@@ -67,20 +72,20 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { | |||
67 | 72 | ||
68 | for (pat, ty) in inference_result.type_of_pat.iter() { | 73 | for (pat, ty) in inference_result.type_of_pat.iter() { |
69 | let syntax_ptr = match body_source_map.pat_syntax(pat) { | 74 | let syntax_ptr = match body_source_map.pat_syntax(pat) { |
70 | Some(sp) => { | 75 | Ok(sp) => { |
71 | sp.map(|ast| ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr())) | 76 | sp.map(|ast| ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr())) |
72 | } | 77 | } |
73 | None => continue, | 78 | Err(SyntheticSyntax) => continue, |
74 | }; | 79 | }; |
75 | types.push((syntax_ptr, ty)); | 80 | types.push((syntax_ptr, ty)); |
76 | } | 81 | } |
77 | 82 | ||
78 | for (expr, ty) in inference_result.type_of_expr.iter() { | 83 | for (expr, ty) in inference_result.type_of_expr.iter() { |
79 | let syntax_ptr = match body_source_map.expr_syntax(expr) { | 84 | let syntax_ptr = match body_source_map.expr_syntax(expr) { |
80 | Some(sp) => { | 85 | Ok(sp) => { |
81 | sp.map(|ast| ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr())) | 86 | sp.map(|ast| ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr())) |
82 | } | 87 | } |
83 | None => continue, | 88 | Err(SyntheticSyntax) => continue, |
84 | }; | 89 | }; |
85 | types.push((syntax_ptr, ty)); | 90 | types.push((syntax_ptr, ty)); |
86 | if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr) { | 91 | if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr) { |
diff --git a/crates/ra_hir_ty/src/tests/simple.rs b/crates/ra_hir_ty/src/tests/simple.rs index 3803f5938..c140bd513 100644 --- a/crates/ra_hir_ty/src/tests/simple.rs +++ b/crates/ra_hir_ty/src/tests/simple.rs | |||
@@ -51,6 +51,47 @@ fn test() { | |||
51 | } | 51 | } |
52 | 52 | ||
53 | #[test] | 53 | #[test] |
54 | fn self_in_struct_lit() { | ||
55 | assert_snapshot!(infer( | ||
56 | r#" | ||
57 | //- /main.rs | ||
58 | struct S<T> { x: T } | ||
59 | |||
60 | impl S<u32> { | ||
61 | fn foo() { | ||
62 | Self { x: 1 }; | ||
63 | } | ||
64 | } | ||
65 | "#, | ||
66 | ), @r###" | ||
67 | [63; 93) '{ ... }': () | ||
68 | [73; 86) 'Self { x: 1 }': S<u32> | ||
69 | [83; 84) '1': u32 | ||
70 | "###); | ||
71 | } | ||
72 | |||
73 | #[test] | ||
74 | fn type_alias_in_struct_lit() { | ||
75 | assert_snapshot!(infer( | ||
76 | r#" | ||
77 | //- /main.rs | ||
78 | struct S<T> { x: T } | ||
79 | |||
80 | type SS = S<u32>; | ||
81 | |||
82 | fn foo() { | ||
83 | SS { x: 1 }; | ||
84 | } | ||
85 | |||
86 | "#, | ||
87 | ), @r###" | ||
88 | [64; 84) '{ ...1 }; }': () | ||
89 | [70; 81) 'SS { x: 1 }': S<u32> | ||
90 | [78; 79) '1': u32 | ||
91 | "###); | ||
92 | } | ||
93 | |||
94 | #[test] | ||
54 | fn infer_ranges() { | 95 | fn infer_ranges() { |
55 | let (db, pos) = TestDB::with_position( | 96 | let (db, pos) = TestDB::with_position( |
56 | r#" | 97 | r#" |
diff --git a/crates/ra_ide/src/diagnostics.rs b/crates/ra_ide/src/diagnostics.rs index a52f7fdd9..a10e642db 100644 --- a/crates/ra_ide/src/diagnostics.rs +++ b/crates/ra_ide/src/diagnostics.rs | |||
@@ -473,6 +473,33 @@ mod tests { | |||
473 | } | 473 | } |
474 | 474 | ||
475 | #[test] | 475 | #[test] |
476 | fn test_fill_struct_fields_self() { | ||
477 | let before = r" | ||
478 | struct TestStruct { | ||
479 | one: i32, | ||
480 | } | ||
481 | |||
482 | impl TestStruct { | ||
483 | fn test_fn() { | ||
484 | let s = Self {}; | ||
485 | } | ||
486 | } | ||
487 | "; | ||
488 | let after = r" | ||
489 | struct TestStruct { | ||
490 | one: i32, | ||
491 | } | ||
492 | |||
493 | impl TestStruct { | ||
494 | fn test_fn() { | ||
495 | let s = Self { one: ()}; | ||
496 | } | ||
497 | } | ||
498 | "; | ||
499 | check_apply_diagnostic_fix(before, after); | ||
500 | } | ||
501 | |||
502 | #[test] | ||
476 | fn test_fill_struct_fields_enum() { | 503 | fn test_fill_struct_fields_enum() { |
477 | let before = r" | 504 | let before = r" |
478 | enum Expr { | 505 | enum Expr { |
diff --git a/crates/ra_syntax/src/ast/edit.rs b/crates/ra_syntax/src/ast/edit.rs index 1858e2b6c..40a04b9c5 100644 --- a/crates/ra_syntax/src/ast/edit.rs +++ b/crates/ra_syntax/src/ast/edit.rs | |||
@@ -139,7 +139,7 @@ impl ast::RecordFieldList { | |||
139 | let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new(); | 139 | let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new(); |
140 | to_insert.push(space.into()); | 140 | to_insert.push(space.into()); |
141 | to_insert.push(field.syntax().clone().into()); | 141 | to_insert.push(field.syntax().clone().into()); |
142 | to_insert.push(tokens::comma().into()); | 142 | to_insert.push(make::token(T![,]).into()); |
143 | 143 | ||
144 | macro_rules! after_l_curly { | 144 | macro_rules! after_l_curly { |
145 | () => {{ | 145 | () => {{ |
@@ -160,7 +160,7 @@ impl ast::RecordFieldList { | |||
160 | { | 160 | { |
161 | InsertPosition::After(comma) | 161 | InsertPosition::After(comma) |
162 | } else { | 162 | } else { |
163 | to_insert.insert(0, tokens::comma().into()); | 163 | to_insert.insert(0, make::token(T![,]).into()); |
164 | InsertPosition::After($anchor.syntax().clone().into()) | 164 | InsertPosition::After($anchor.syntax().clone().into()) |
165 | } | 165 | } |
166 | }; | 166 | }; |
@@ -259,6 +259,24 @@ impl ast::UseItem { | |||
259 | } | 259 | } |
260 | } | 260 | } |
261 | 261 | ||
262 | impl ast::UseTree { | ||
263 | #[must_use] | ||
264 | pub fn with_path(&self, path: ast::Path) -> ast::UseTree { | ||
265 | if let Some(old) = self.path() { | ||
266 | return replace_descendants(self, iter::once((old, path))); | ||
267 | } | ||
268 | self.clone() | ||
269 | } | ||
270 | |||
271 | #[must_use] | ||
272 | pub fn with_use_tree_list(&self, use_tree_list: ast::UseTreeList) -> ast::UseTree { | ||
273 | if let Some(old) = self.use_tree_list() { | ||
274 | return replace_descendants(self, iter::once((old, use_tree_list))); | ||
275 | } | ||
276 | self.clone() | ||
277 | } | ||
278 | } | ||
279 | |||
262 | #[must_use] | 280 | #[must_use] |
263 | pub fn strip_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N { | 281 | pub fn strip_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N { |
264 | N::cast(strip_attrs_and_docs_inner(node.syntax().clone())).unwrap() | 282 | N::cast(strip_attrs_and_docs_inner(node.syntax().clone())).unwrap() |
diff --git a/crates/ra_syntax/src/ast/make.rs b/crates/ra_syntax/src/ast/make.rs index 0da24560e..53d6fa562 100644 --- a/crates/ra_syntax/src/ast/make.rs +++ b/crates/ra_syntax/src/ast/make.rs | |||
@@ -25,6 +25,31 @@ fn path_from_text(text: &str) -> ast::Path { | |||
25 | ast_from_text(text) | 25 | ast_from_text(text) |
26 | } | 26 | } |
27 | 27 | ||
28 | pub fn use_tree( | ||
29 | path: ast::Path, | ||
30 | use_tree_list: Option<ast::UseTreeList>, | ||
31 | alias: Option<ast::Alias>, | ||
32 | ) -> ast::UseTree { | ||
33 | let mut buf = "use ".to_string(); | ||
34 | buf += &path.syntax().to_string(); | ||
35 | if let Some(use_tree_list) = use_tree_list { | ||
36 | buf += &format!("::{}", use_tree_list.syntax()); | ||
37 | } | ||
38 | if let Some(alias) = alias { | ||
39 | buf += &format!(" {}", alias.syntax()); | ||
40 | } | ||
41 | ast_from_text(&buf) | ||
42 | } | ||
43 | |||
44 | pub fn use_tree_list(use_trees: impl IntoIterator<Item = ast::UseTree>) -> ast::UseTreeList { | ||
45 | let use_trees = use_trees.into_iter().map(|it| it.syntax().clone()).join(", "); | ||
46 | ast_from_text(&format!("use {{{}}};", use_trees)) | ||
47 | } | ||
48 | |||
49 | pub fn use_item(use_tree: ast::UseTree) -> ast::UseItem { | ||
50 | ast_from_text(&format!("use {};", use_tree.syntax())) | ||
51 | } | ||
52 | |||
28 | pub fn record_field(name: ast::NameRef, expr: Option<ast::Expr>) -> ast::RecordField { | 53 | pub fn record_field(name: ast::NameRef, expr: Option<ast::Expr>) -> ast::RecordField { |
29 | return match expr { | 54 | return match expr { |
30 | Some(expr) => from_text(&format!("{}: {}", name.syntax(), expr.syntax())), | 55 | Some(expr) => from_text(&format!("{}: {}", name.syntax(), expr.syntax())), |
@@ -219,22 +244,13 @@ fn unroot(n: SyntaxNode) -> SyntaxNode { | |||
219 | } | 244 | } |
220 | 245 | ||
221 | pub mod tokens { | 246 | pub mod tokens { |
222 | use crate::{ast, AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken, T}; | ||
223 | use once_cell::sync::Lazy; | 247 | use once_cell::sync::Lazy; |
224 | 248 | ||
249 | use crate::{ast, AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken}; | ||
250 | |||
225 | pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = | 251 | pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = |
226 | Lazy::new(|| SourceFile::parse("const C: <()>::Item = (1 != 1, 2 == 2, !true)\n;")); | 252 | Lazy::new(|| SourceFile::parse("const C: <()>::Item = (1 != 1, 2 == 2, !true)\n;")); |
227 | 253 | ||
228 | pub fn comma() -> SyntaxToken { | ||
229 | SOURCE_FILE | ||
230 | .tree() | ||
231 | .syntax() | ||
232 | .descendants_with_tokens() | ||
233 | .filter_map(|it| it.into_token()) | ||
234 | .find(|it| it.kind() == T![,]) | ||
235 | .unwrap() | ||
236 | } | ||
237 | |||
238 | pub fn single_space() -> SyntaxToken { | 254 | pub fn single_space() -> SyntaxToken { |
239 | SOURCE_FILE | 255 | SOURCE_FILE |
240 | .tree() | 256 | .tree() |
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 6bf0be565..643c54a9d 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs | |||
@@ -158,7 +158,7 @@ pub fn analysis_stats( | |||
158 | // in super-verbose mode for just one function, we print every single expression | 158 | // in super-verbose mode for just one function, we print every single expression |
159 | let (_, sm) = db.body_with_source_map(f_id.into()); | 159 | let (_, sm) = db.body_with_source_map(f_id.into()); |
160 | let src = sm.expr_syntax(expr_id); | 160 | let src = sm.expr_syntax(expr_id); |
161 | if let Some(src) = src { | 161 | if let Ok(src) = src { |
162 | let original_file = src.file_id.original_file(db); | 162 | let original_file = src.file_id.original_file(db); |
163 | let line_index = host.analysis().file_line_index(original_file).unwrap(); | 163 | let line_index = host.analysis().file_line_index(original_file).unwrap(); |
164 | let text_range = src.value.either( | 164 | let text_range = src.value.either( |
@@ -186,7 +186,7 @@ pub fn analysis_stats( | |||
186 | if verbosity.is_verbose() { | 186 | if verbosity.is_verbose() { |
187 | let (_, sm) = db.body_with_source_map(f_id.into()); | 187 | let (_, sm) = db.body_with_source_map(f_id.into()); |
188 | let src = sm.expr_syntax(expr_id); | 188 | let src = sm.expr_syntax(expr_id); |
189 | if let Some(src) = src { | 189 | if let Ok(src) = src { |
190 | // FIXME: it might be nice to have a function (on Analysis?) that goes from Source<T> -> (LineCol, LineCol) directly | 190 | // FIXME: it might be nice to have a function (on Analysis?) that goes from Source<T> -> (LineCol, LineCol) directly |
191 | // But also, we should just turn the type mismatches into diagnostics and provide these | 191 | // But also, we should just turn the type mismatches into diagnostics and provide these |
192 | let root = db.parse_or_expand(src.file_id).unwrap(); | 192 | let root = db.parse_or_expand(src.file_id).unwrap(); |