aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/ra_db/Cargo.toml2
-rw-r--r--crates/ra_hir/src/code_model_impl/module.rs10
-rw-r--r--crates/ra_hir/src/db.rs7
-rw-r--r--crates/ra_hir/src/expr.rs16
-rw-r--r--crates/ra_hir/src/ids.rs38
-rw-r--r--crates/ra_hir/src/marks.rs2
-rw-r--r--crates/ra_hir/src/module_tree.rs99
-rw-r--r--crates/ra_hir/src/nameres.rs2
-rw-r--r--crates/ra_hir/src/nameres/lower.rs5
-rw-r--r--crates/ra_hir/src/query_definitions.rs12
-rw-r--r--crates/ra_hir/src/source_binder.rs28
-rw-r--r--crates/ra_hir/src/ty.rs37
-rw-r--r--crates/ra_hir/src/ty/snapshots/tests__infer_in_elseif.snap17
-rw-r--r--crates/ra_hir/src/ty/snapshots/tests__recursive_vars.snap14
-rw-r--r--crates/ra_hir/src/ty/snapshots/tests__recursive_vars_2.snap21
-rw-r--r--crates/ra_hir/src/ty/tests.rs49
-rw-r--r--crates/ra_ide_api/src/call_info.rs22
-rw-r--r--crates/ra_ide_api/src/marks.rs1
-rw-r--r--crates/ra_ide_api_light/src/assists/replace_if_let_with_match.rs5
-rw-r--r--crates/ra_lsp_server/src/main_loop/handlers.rs28
-rw-r--r--crates/ra_lsp_server/src/project_model/cargo_workspace.rs10
-rw-r--r--crates/ra_syntax/Cargo.toml2
-rw-r--r--crates/ra_syntax/src/ast.rs18
-rw-r--r--crates/ra_syntax/src/grammar/expressions.rs2
-rw-r--r--crates/ra_syntax/tests/data/parser/ok/0043_complex_assignment.rs8
-rw-r--r--crates/ra_syntax/tests/data/parser/ok/0043_complex_assignment.txt109
-rw-r--r--crates/ra_vfs/src/io.rs271
-rw-r--r--crates/ra_vfs/src/io/watcher.rs200
-rw-r--r--crates/ra_vfs/src/lib.rs146
-rw-r--r--crates/tools/src/bin/pre-commit.rs7
-rw-r--r--crates/tools/src/lib.rs106
-rw-r--r--crates/tools/src/main.rs127
-rw-r--r--crates/tools/tests/cli.rs16
33 files changed, 834 insertions, 603 deletions
diff --git a/crates/ra_db/Cargo.toml b/crates/ra_db/Cargo.toml
index 9aa77f72e..2d39b77ed 100644
--- a/crates/ra_db/Cargo.toml
+++ b/crates/ra_db/Cargo.toml
@@ -6,7 +6,7 @@ authors = ["Aleksey Kladov <[email protected]>"]
6 6
7[dependencies] 7[dependencies]
8relative-path = "0.4.0" 8relative-path = "0.4.0"
9salsa = "0.10.0-alpha4" 9salsa = "0.10.0-alpha5"
10rustc-hash = "1.0" 10rustc-hash = "1.0"
11parking_lot = "0.7.0" 11parking_lot = "0.7.0"
12 12
diff --git a/crates/ra_hir/src/code_model_impl/module.rs b/crates/ra_hir/src/code_model_impl/module.rs
index 480ec27bf..418d59c91 100644
--- a/crates/ra_hir/src/code_model_impl/module.rs
+++ b/crates/ra_hir/src/code_model_impl/module.rs
@@ -25,9 +25,10 @@ impl Module {
25 25
26 pub(crate) fn definition_source_impl(&self, db: &impl HirDatabase) -> (FileId, ModuleSource) { 26 pub(crate) fn definition_source_impl(&self, db: &impl HirDatabase) -> (FileId, ModuleSource) {
27 let module_tree = db.module_tree(self.krate); 27 let module_tree = db.module_tree(self.krate);
28 let source = self.module_id.source(&module_tree); 28 let file_id = self.module_id.file_id(&module_tree);
29 let module_source = ModuleSource::from_source_item_id(db, source); 29 let decl_id = self.module_id.decl_id(&module_tree);
30 let file_id = source.file_id.as_original_file(); 30 let module_source = ModuleSource::new(db, file_id, decl_id);
31 let file_id = file_id.as_original_file();
31 (file_id, module_source) 32 (file_id, module_source)
32 } 33 }
33 34
@@ -39,8 +40,7 @@ impl Module {
39 let link = self.module_id.parent_link(&module_tree)?; 40 let link = self.module_id.parent_link(&module_tree)?;
40 let file_id = link 41 let file_id = link
41 .owner(&module_tree) 42 .owner(&module_tree)
42 .source(&module_tree) 43 .file_id(&module_tree)
43 .file_id
44 .as_original_file(); 44 .as_original_file();
45 let src = link.source(&module_tree, db); 45 let src = link.source(&module_tree, db);
46 Some((file_id, src)) 46 Some((file_id, src))
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs
index 5df4bd4a1..3f76b769d 100644
--- a/crates/ra_hir/src/db.rs
+++ b/crates/ra_hir/src/db.rs
@@ -16,6 +16,7 @@ use crate::{
16 adt::{StructData, EnumData}, 16 adt::{StructData, EnumData},
17 impl_block::ModuleImplBlocks, 17 impl_block::ModuleImplBlocks,
18 generics::{GenericParams, GenericDef}, 18 generics::{GenericParams, GenericDef},
19 ids::SourceFileItemId,
19}; 20};
20 21
21#[salsa::query_group(HirDatabaseStorage)] 22#[salsa::query_group(HirDatabaseStorage)]
@@ -51,7 +52,11 @@ pub trait HirDatabase: SourceDatabase + AsRef<HirInterner> {
51 fn file_item(&self, source_item_id: SourceItemId) -> TreeArc<SyntaxNode>; 52 fn file_item(&self, source_item_id: SourceItemId) -> TreeArc<SyntaxNode>;
52 53
53 #[salsa::invoke(crate::module_tree::Submodule::submodules_query)] 54 #[salsa::invoke(crate::module_tree::Submodule::submodules_query)]
54 fn submodules(&self, source: SourceItemId) -> Arc<Vec<crate::module_tree::Submodule>>; 55 fn submodules(
56 &self,
57 file_id: HirFileId,
58 delc_id: Option<SourceFileItemId>,
59 ) -> Arc<Vec<crate::module_tree::Submodule>>;
55 60
56 #[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_query)] 61 #[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_query)]
57 fn lower_module(&self, module: Module) -> (Arc<LoweredModule>, Arc<ImportSourceMap>); 62 fn lower_module(&self, module: Module) -> (Arc<LoweredModule>, Arc<ImportSourceMap>);
diff --git a/crates/ra_hir/src/expr.rs b/crates/ra_hir/src/expr.rs
index 29469af2c..60d997bbe 100644
--- a/crates/ra_hir/src/expr.rs
+++ b/crates/ra_hir/src/expr.rs
@@ -498,7 +498,13 @@ impl ExprCollector {
498 let then_branch = self.collect_block_opt(e.then_branch()); 498 let then_branch = self.collect_block_opt(e.then_branch());
499 let else_branch = e 499 let else_branch = e
500 .else_branch() 500 .else_branch()
501 .map(|e| self.collect_block(e)) 501 .map(|b| match b {
502 ast::ElseBranchFlavor::Block(it) => self.collect_block(it),
503 ast::ElseBranchFlavor::IfExpr(elif) => {
504 let expr: &ast::Expr = ast::Expr::cast(elif.syntax()).unwrap();
505 self.collect_expr(expr)
506 }
507 })
502 .unwrap_or_else(|| self.empty_block()); 508 .unwrap_or_else(|| self.empty_block());
503 let placeholder_pat = self.pats.alloc(Pat::Missing); 509 let placeholder_pat = self.pats.alloc(Pat::Missing);
504 let arms = vec![ 510 let arms = vec![
@@ -521,7 +527,13 @@ impl ExprCollector {
521 } else { 527 } else {
522 let condition = self.collect_expr_opt(e.condition().and_then(|c| c.expr())); 528 let condition = self.collect_expr_opt(e.condition().and_then(|c| c.expr()));
523 let then_branch = self.collect_block_opt(e.then_branch()); 529 let then_branch = self.collect_block_opt(e.then_branch());
524 let else_branch = e.else_branch().map(|e| self.collect_block(e)); 530 let else_branch = e.else_branch().map(|b| match b {
531 ast::ElseBranchFlavor::Block(it) => self.collect_block(it),
532 ast::ElseBranchFlavor::IfExpr(elif) => {
533 let expr: &ast::Expr = ast::Expr::cast(elif.syntax()).unwrap();
534 self.collect_expr(expr)
535 }
536 });
525 self.alloc_expr( 537 self.alloc_expr(
526 Expr::If { 538 Expr::If {
527 condition, 539 condition,
diff --git a/crates/ra_hir/src/ids.rs b/crates/ra_hir/src/ids.rs
index 7dd4b540e..0e4dc6261 100644
--- a/crates/ra_hir/src/ids.rs
+++ b/crates/ra_hir/src/ids.rs
@@ -4,7 +4,7 @@ use std::{
4}; 4};
5 5
6use ra_db::{LocationIntener, FileId}; 6use ra_db::{LocationIntener, FileId};
7use ra_syntax::{TreeArc, SyntaxNode, SourceFile, AstNode, ast}; 7use ra_syntax::{TreeArc, SyntaxNode, SourceFile, AstNode, SyntaxNodePtr, ast};
8use ra_arena::{Arena, RawId, ArenaId, impl_arena_id}; 8use ra_arena::{Arena, RawId, ArenaId, impl_arena_id};
9 9
10use crate::{ 10use crate::{
@@ -203,7 +203,7 @@ pub(crate) trait AstItemDef<N: AstNode>: ArenaId + Clone {
203 let items = ctx.db.file_items(ctx.file_id); 203 let items = ctx.db.file_items(ctx.file_id);
204 let raw = SourceItemId { 204 let raw = SourceItemId {
205 file_id: ctx.file_id, 205 file_id: ctx.file_id,
206 item_id: Some(items.id_of(ctx.file_id, ast.syntax())), 206 item_id: items.id_of(ctx.file_id, ast.syntax()),
207 }; 207 };
208 let loc = ItemLoc { 208 let loc = ItemLoc {
209 module: ctx.module, 209 module: ctx.module,
@@ -301,15 +301,14 @@ impl_arena_id!(SourceFileItemId);
301#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 301#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
302pub struct SourceItemId { 302pub struct SourceItemId {
303 pub(crate) file_id: HirFileId, 303 pub(crate) file_id: HirFileId,
304 /// None for the whole file. 304 pub(crate) item_id: SourceFileItemId,
305 pub(crate) item_id: Option<SourceFileItemId>,
306} 305}
307 306
308/// Maps items' `SyntaxNode`s to `SourceFileItemId`s and back. 307/// Maps items' `SyntaxNode`s to `SourceFileItemId`s and back.
309#[derive(Debug, PartialEq, Eq)] 308#[derive(Debug, PartialEq, Eq)]
310pub struct SourceFileItems { 309pub struct SourceFileItems {
311 file_id: HirFileId, 310 file_id: HirFileId,
312 arena: Arena<SourceFileItemId, TreeArc<SyntaxNode>>, 311 arena: Arena<SourceFileItemId, SyntaxNodePtr>,
313} 312}
314 313
315impl SourceFileItems { 314impl SourceFileItems {
@@ -329,15 +328,15 @@ impl SourceFileItems {
329 // trait does not chage ids of top-level items, which helps caching. 328 // trait does not chage ids of top-level items, which helps caching.
330 bfs(source_file.syntax(), |it| { 329 bfs(source_file.syntax(), |it| {
331 if let Some(module_item) = ast::ModuleItem::cast(it) { 330 if let Some(module_item) = ast::ModuleItem::cast(it) {
332 self.alloc(module_item.syntax().to_owned()); 331 self.alloc(module_item.syntax());
333 } else if let Some(macro_call) = ast::MacroCall::cast(it) { 332 } else if let Some(macro_call) = ast::MacroCall::cast(it) {
334 self.alloc(macro_call.syntax().to_owned()); 333 self.alloc(macro_call.syntax());
335 } 334 }
336 }) 335 })
337 } 336 }
338 337
339 fn alloc(&mut self, item: TreeArc<SyntaxNode>) -> SourceFileItemId { 338 fn alloc(&mut self, item: &SyntaxNode) -> SourceFileItemId {
340 self.arena.alloc(item) 339 self.arena.alloc(SyntaxNodePtr::new(item))
341 } 340 }
342 pub(crate) fn id_of(&self, file_id: HirFileId, item: &SyntaxNode) -> SourceFileItemId { 341 pub(crate) fn id_of(&self, file_id: HirFileId, item: &SyntaxNode) -> SourceFileItemId {
343 assert_eq!( 342 assert_eq!(
@@ -348,17 +347,8 @@ impl SourceFileItems {
348 self.id_of_unchecked(item) 347 self.id_of_unchecked(item)
349 } 348 }
350 pub(crate) fn id_of_unchecked(&self, item: &SyntaxNode) -> SourceFileItemId { 349 pub(crate) fn id_of_unchecked(&self, item: &SyntaxNode) -> SourceFileItemId {
351 if let Some((id, _)) = self.arena.iter().find(|(_id, i)| *i == item) { 350 let ptr = SyntaxNodePtr::new(item);
352 return id; 351 if let Some((id, _)) = self.arena.iter().find(|(_id, i)| **i == ptr) {
353 }
354 // This should not happen. Let's try to give a sensible diagnostics.
355 if let Some((id, i)) = self.arena.iter().find(|(_id, i)| i.range() == item.range()) {
356 // FIXME(#288): whyyy are we getting here?
357 log::error!(
358 "unequal syntax nodes with the same range:\n{:?}\n{:?}",
359 item,
360 i
361 );
362 return id; 352 return id;
363 } 353 }
364 panic!( 354 panic!(
@@ -367,15 +357,11 @@ impl SourceFileItems {
367 self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(), 357 self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(),
368 ); 358 );
369 } 359 }
370 pub fn id_of_parse(&self) -> SourceFileItemId {
371 let (id, _syntax) = self.arena.iter().next().unwrap();
372 id
373 }
374} 360}
375 361
376impl std::ops::Index<SourceFileItemId> for SourceFileItems { 362impl std::ops::Index<SourceFileItemId> for SourceFileItems {
377 type Output = SyntaxNode; 363 type Output = SyntaxNodePtr;
378 fn index(&self, idx: SourceFileItemId) -> &SyntaxNode { 364 fn index(&self, idx: SourceFileItemId) -> &SyntaxNodePtr {
379 &self.arena[idx] 365 &self.arena[idx]
380 } 366 }
381} 367}
diff --git a/crates/ra_hir/src/marks.rs b/crates/ra_hir/src/marks.rs
index 338ed0516..d704c3adb 100644
--- a/crates/ra_hir/src/marks.rs
+++ b/crates/ra_hir/src/marks.rs
@@ -1,4 +1,6 @@
1test_utils::marks!( 1test_utils::marks!(
2 name_res_works_for_broken_modules 2 name_res_works_for_broken_modules
3 item_map_enum_importing 3 item_map_enum_importing
4 type_var_cycles_resolve_completely
5 type_var_cycles_resolve_as_possible
4); 6);
diff --git a/crates/ra_hir/src/module_tree.rs b/crates/ra_hir/src/module_tree.rs
index d5ad9decb..d1dc3fa4b 100644
--- a/crates/ra_hir/src/module_tree.rs
+++ b/crates/ra_hir/src/module_tree.rs
@@ -11,21 +11,28 @@ use ra_syntax::{
11use ra_arena::{Arena, RawId, impl_arena_id}; 11use ra_arena::{Arena, RawId, impl_arena_id};
12use test_utils::tested_by; 12use test_utils::tested_by;
13 13
14use crate::{Name, AsName, HirDatabase, SourceItemId, HirFileId, Problem, SourceFileItems, ModuleSource}; 14use crate::{
15 Name, AsName, HirDatabase, SourceItemId, HirFileId, Problem, SourceFileItems, ModuleSource,
16 ids::SourceFileItemId,
17};
15 18
16impl ModuleSource { 19impl ModuleSource {
17 pub(crate) fn from_source_item_id( 20 pub(crate) fn new(
18 db: &impl HirDatabase, 21 db: &impl HirDatabase,
19 source_item_id: SourceItemId, 22 file_id: HirFileId,
23 decl_id: Option<SourceFileItemId>,
20 ) -> ModuleSource { 24 ) -> ModuleSource {
21 let module_syntax = db.file_item(source_item_id); 25 match decl_id {
22 if let Some(source_file) = ast::SourceFile::cast(&module_syntax) { 26 Some(item_id) => {
23 ModuleSource::SourceFile(source_file.to_owned()) 27 let module = db.file_item(SourceItemId { file_id, item_id });
24 } else if let Some(module) = ast::Module::cast(&module_syntax) { 28 let module = ast::Module::cast(&*module).unwrap();
25 assert!(module.item_list().is_some(), "expected inline module"); 29 assert!(module.item_list().is_some(), "expected inline module");
26 ModuleSource::Module(module.to_owned()) 30 ModuleSource::Module(module.to_owned())
27 } else { 31 }
28 panic!("expected file or inline module") 32 None => {
33 let source_file = db.hir_parse(file_id);
34 ModuleSource::SourceFile(source_file)
35 }
29 } 36 }
30 } 37 }
31} 38}
@@ -34,18 +41,18 @@ impl ModuleSource {
34pub struct Submodule { 41pub struct Submodule {
35 name: Name, 42 name: Name,
36 is_declaration: bool, 43 is_declaration: bool,
37 source: SourceItemId, 44 decl_id: SourceFileItemId,
38} 45}
39 46
40impl Submodule { 47impl Submodule {
41 pub(crate) fn submodules_query( 48 pub(crate) fn submodules_query(
42 db: &impl HirDatabase, 49 db: &impl HirDatabase,
43 source: SourceItemId, 50 file_id: HirFileId,
51 decl_id: Option<SourceFileItemId>,
44 ) -> Arc<Vec<Submodule>> { 52 ) -> Arc<Vec<Submodule>> {
45 db.check_canceled(); 53 db.check_canceled();
46 let file_id = source.file_id;
47 let file_items = db.file_items(file_id); 54 let file_items = db.file_items(file_id);
48 let module_source = ModuleSource::from_source_item_id(db, source); 55 let module_source = ModuleSource::new(db, file_id, decl_id);
49 let submodules = match module_source { 56 let submodules = match module_source {
50 ModuleSource::SourceFile(source_file) => { 57 ModuleSource::SourceFile(source_file) => {
51 collect_submodules(file_id, &file_items, &*source_file) 58 collect_submodules(file_id, &file_items, &*source_file)
@@ -54,6 +61,7 @@ impl Submodule {
54 collect_submodules(file_id, &file_items, module.item_list().unwrap()) 61 collect_submodules(file_id, &file_items, module.item_list().unwrap())
55 } 62 }
56 }; 63 };
64
57 return Arc::new(submodules); 65 return Arc::new(submodules);
58 66
59 fn collect_submodules( 67 fn collect_submodules(
@@ -75,10 +83,7 @@ impl Submodule {
75 let sub = Submodule { 83 let sub = Submodule {
76 name, 84 name,
77 is_declaration: module.has_semi(), 85 is_declaration: module.has_semi(),
78 source: SourceItemId { 86 decl_id: file_items.id_of(file_id, module.syntax()),
79 file_id,
80 item_id: Some(file_items.id_of(file_id, module.syntax())),
81 },
82 }; 87 };
83 Some(sub) 88 Some(sub)
84 }) 89 })
@@ -110,7 +115,9 @@ pub struct ModuleTree {
110 115
111#[derive(Debug, PartialEq, Eq, Hash)] 116#[derive(Debug, PartialEq, Eq, Hash)]
112pub struct ModuleData { 117pub struct ModuleData {
113 source: SourceItemId, 118 file_id: HirFileId,
119 /// Points to `ast::Module`, `None` for the whole file.
120 decl_id: Option<SourceFileItemId>,
114 parent: Option<LinkId>, 121 parent: Option<LinkId>,
115 children: Vec<LinkId>, 122 children: Vec<LinkId>,
116} 123}
@@ -136,8 +143,15 @@ impl ModuleTree {
136 self.mods.iter().map(|(id, _)| id) 143 self.mods.iter().map(|(id, _)| id)
137 } 144 }
138 145
139 pub(crate) fn find_module_by_source(&self, source: SourceItemId) -> Option<ModuleId> { 146 pub(crate) fn find_module_by_source(
140 let (res, _) = self.mods.iter().find(|(_, m)| m.source == source)?; 147 &self,
148 file_id: HirFileId,
149 decl_id: Option<SourceFileItemId>,
150 ) -> Option<ModuleId> {
151 let (res, _) = self
152 .mods
153 .iter()
154 .find(|(_, m)| (m.file_id, m.decl_id) == (file_id, decl_id))?;
141 Some(res) 155 Some(res)
142 } 156 }
143 157
@@ -147,11 +161,7 @@ impl ModuleTree {
147 let source_root_id = db.file_source_root(file_id); 161 let source_root_id = db.file_source_root(file_id);
148 162
149 let source_root = db.source_root(source_root_id); 163 let source_root = db.source_root(source_root_id);
150 let source = SourceItemId { 164 self.init_subtree(db, &source_root, None, file_id.into(), None);
151 file_id: file_id.into(),
152 item_id: None,
153 };
154 self.init_subtree(db, &source_root, None, source);
155 } 165 }
156 166
157 fn init_subtree( 167 fn init_subtree(
@@ -159,16 +169,21 @@ impl ModuleTree {
159 db: &impl HirDatabase, 169 db: &impl HirDatabase,
160 source_root: &SourceRoot, 170 source_root: &SourceRoot,
161 parent: Option<LinkId>, 171 parent: Option<LinkId>,
162 source: SourceItemId, 172 file_id: HirFileId,
173 decl_id: Option<SourceFileItemId>,
163 ) -> ModuleId { 174 ) -> ModuleId {
164 let id = self.alloc_mod(ModuleData { 175 let id = self.alloc_mod(ModuleData {
165 source, 176 file_id,
177 decl_id,
166 parent, 178 parent,
167 children: Vec::new(), 179 children: Vec::new(),
168 }); 180 });
169 for sub in db.submodules(source).iter() { 181 for sub in db.submodules(file_id, decl_id).iter() {
170 let link = self.alloc_link(LinkData { 182 let link = self.alloc_link(LinkData {
171 source: sub.source, 183 source: SourceItemId {
184 file_id,
185 item_id: sub.decl_id,
186 },
172 name: sub.name.clone(), 187 name: sub.name.clone(),
173 owner: id, 188 owner: id,
174 points_to: Vec::new(), 189 points_to: Vec::new(),
@@ -176,24 +191,17 @@ impl ModuleTree {
176 }); 191 });
177 192
178 let (points_to, problem) = if sub.is_declaration { 193 let (points_to, problem) = if sub.is_declaration {
179 let (points_to, problem) = resolve_submodule(db, source.file_id, &sub.name); 194 let (points_to, problem) = resolve_submodule(db, file_id, &sub.name);
180 let points_to = points_to 195 let points_to = points_to
181 .into_iter() 196 .into_iter()
182 .map(|file_id| { 197 .map(|file_id| {
183 self.init_subtree( 198 self.init_subtree(db, source_root, Some(link), file_id.into(), None)
184 db,
185 source_root,
186 Some(link),
187 SourceItemId {
188 file_id: file_id.into(),
189 item_id: None,
190 },
191 )
192 }) 199 })
193 .collect::<Vec<_>>(); 200 .collect::<Vec<_>>();
194 (points_to, problem) 201 (points_to, problem)
195 } else { 202 } else {
196 let points_to = self.init_subtree(db, source_root, Some(link), sub.source); 203 let points_to =
204 self.init_subtree(db, source_root, Some(link), file_id, Some(sub.decl_id));
197 (vec![points_to], None) 205 (vec![points_to], None)
198 }; 206 };
199 207
@@ -216,8 +224,11 @@ impl ModuleTree {
216} 224}
217 225
218impl ModuleId { 226impl ModuleId {
219 pub(crate) fn source(self, tree: &ModuleTree) -> SourceItemId { 227 pub(crate) fn file_id(self, tree: &ModuleTree) -> HirFileId {
220 tree.mods[self].source 228 tree.mods[self].file_id
229 }
230 pub(crate) fn decl_id(self, tree: &ModuleTree) -> Option<SourceFileItemId> {
231 tree.mods[self].decl_id
221 } 232 }
222 pub(crate) fn parent_link(self, tree: &ModuleTree) -> Option<LinkId> { 233 pub(crate) fn parent_link(self, tree: &ModuleTree) -> Option<LinkId> {
223 tree.mods[self].parent 234 tree.mods[self].parent
diff --git a/crates/ra_hir/src/nameres.rs b/crates/ra_hir/src/nameres.rs
index 5193900e0..97ce6c946 100644
--- a/crates/ra_hir/src/nameres.rs
+++ b/crates/ra_hir/src/nameres.rs
@@ -215,7 +215,7 @@ where
215 // Populate extern crates prelude 215 // Populate extern crates prelude
216 { 216 {
217 let root_id = module_id.crate_root(&self.module_tree); 217 let root_id = module_id.crate_root(&self.module_tree);
218 let file_id = root_id.source(&self.module_tree).file_id; 218 let file_id = root_id.file_id(&self.module_tree);
219 let crate_graph = self.db.crate_graph(); 219 let crate_graph = self.db.crate_graph();
220 if let Some(crate_id) = crate_graph.crate_id_for_crate_root(file_id.as_original_file()) 220 if let Some(crate_id) = crate_graph.crate_id_for_crate_root(file_id.as_original_file())
221 { 221 {
diff --git a/crates/ra_hir/src/nameres/lower.rs b/crates/ra_hir/src/nameres/lower.rs
index 1d77548f3..8df11a5f4 100644
--- a/crates/ra_hir/src/nameres/lower.rs
+++ b/crates/ra_hir/src/nameres/lower.rs
@@ -121,10 +121,7 @@ impl LoweredModule {
121 let item_id = file_items.id_of_unchecked(macro_call.syntax()); 121 let item_id = file_items.id_of_unchecked(macro_call.syntax());
122 let loc = MacroCallLoc { 122 let loc = MacroCallLoc {
123 module, 123 module,
124 source_item_id: SourceItemId { 124 source_item_id: SourceItemId { file_id, item_id },
125 file_id,
126 item_id: Some(item_id),
127 },
128 }; 125 };
129 let id = loc.id(db); 126 let id = loc.id(db);
130 let file_id = HirFileId::from(id); 127 let file_id = HirFileId::from(id);
diff --git a/crates/ra_hir/src/query_definitions.rs b/crates/ra_hir/src/query_definitions.rs
index 61c93a964..bf9ac0dfb 100644
--- a/crates/ra_hir/src/query_definitions.rs
+++ b/crates/ra_hir/src/query_definitions.rs
@@ -4,9 +4,7 @@ use std::{
4}; 4};
5 5
6use rustc_hash::FxHashMap; 6use rustc_hash::FxHashMap;
7use ra_syntax::{ 7use ra_syntax::{SyntaxNode, TreeArc};
8 AstNode, SyntaxNode, TreeArc,
9};
10use ra_db::{CrateId}; 8use ra_db::{CrateId};
11 9
12use crate::{ 10use crate::{
@@ -32,10 +30,10 @@ pub(super) fn file_item(
32 db: &impl HirDatabase, 30 db: &impl HirDatabase,
33 source_item_id: SourceItemId, 31 source_item_id: SourceItemId,
34) -> TreeArc<SyntaxNode> { 32) -> TreeArc<SyntaxNode> {
35 match source_item_id.item_id { 33 let source_file = db.hir_parse(source_item_id.file_id);
36 Some(id) => db.file_items(source_item_id.file_id)[id].to_owned(), 34 db.file_items(source_item_id.file_id)[source_item_id.item_id]
37 None => db.hir_parse(source_item_id.file_id).syntax().to_owned(), 35 .to_node(&source_file)
38 } 36 .to_owned()
39} 37}
40 38
41pub(super) fn item_map(db: &impl HirDatabase, crate_id: CrateId) -> Arc<ItemMap> { 39pub(super) fn item_map(db: &impl HirDatabase, crate_id: CrateId) -> Arc<ItemMap> {
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs
index c0b3f1cd4..f523f0647 100644
--- a/crates/ra_hir/src/source_binder.rs
+++ b/crates/ra_hir/src/source_binder.rs
@@ -13,18 +13,14 @@ use ra_syntax::{
13}; 13};
14 14
15use crate::{ 15use crate::{
16 HirDatabase, Function, SourceItemId, ModuleDef, 16 HirDatabase, Function, ModuleDef,
17 AsName, Module, 17 AsName, Module, HirFileId,
18 ids::LocationCtx, 18 ids::{LocationCtx, SourceFileItemId},
19}; 19};
20 20
21/// Locates the module by `FileId`. Picks topmost module in the file. 21/// Locates the module by `FileId`. Picks topmost module in the file.
22pub fn module_from_file_id(db: &impl HirDatabase, file_id: FileId) -> Option<Module> { 22pub fn module_from_file_id(db: &impl HirDatabase, file_id: FileId) -> Option<Module> {
23 let module_source = SourceItemId { 23 module_from_source(db, file_id.into(), None)
24 file_id: file_id.into(),
25 item_id: None,
26 };
27 module_from_source(db, module_source)
28} 24}
29 25
30/// Locates the child module by `mod child;` declaration. 26/// Locates the child module by `mod child;` declaration.
@@ -59,11 +55,7 @@ fn module_from_inline(
59 let file_id = file_id.into(); 55 let file_id = file_id.into();
60 let file_items = db.file_items(file_id); 56 let file_items = db.file_items(file_id);
61 let item_id = file_items.id_of(file_id, module.syntax()); 57 let item_id = file_items.id_of(file_id, module.syntax());
62 let source = SourceItemId { 58 module_from_source(db, file_id, Some(item_id))
63 file_id,
64 item_id: Some(item_id),
65 };
66 module_from_source(db, source)
67} 59}
68 60
69/// Locates the module by child syntax element within the module 61/// Locates the module by child syntax element within the module
@@ -83,13 +75,17 @@ pub fn module_from_child_node(
83 } 75 }
84} 76}
85 77
86fn module_from_source(db: &impl HirDatabase, source: SourceItemId) -> Option<Module> { 78fn module_from_source(
87 let source_root_id = db.file_source_root(source.file_id.as_original_file()); 79 db: &impl HirDatabase,
80 file_id: HirFileId,
81 decl_id: Option<SourceFileItemId>,
82) -> Option<Module> {
83 let source_root_id = db.file_source_root(file_id.as_original_file());
88 db.source_root_crates(source_root_id) 84 db.source_root_crates(source_root_id)
89 .iter() 85 .iter()
90 .find_map(|&krate| { 86 .find_map(|&krate| {
91 let module_tree = db.module_tree(krate); 87 let module_tree = db.module_tree(krate);
92 let module_id = module_tree.find_module_by_source(source)?; 88 let module_id = module_tree.find_module_by_source(file_id, decl_id)?;
93 Some(Module { krate, module_id }) 89 Some(Module { krate, module_id })
94 }) 90 })
95} 91}
diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs
index 179ebddee..31ea45706 100644
--- a/crates/ra_hir/src/ty.rs
+++ b/crates/ra_hir/src/ty.rs
@@ -29,6 +29,8 @@ use ra_arena::map::ArenaMap;
29use join_to_string::join; 29use join_to_string::join;
30use rustc_hash::FxHashMap; 30use rustc_hash::FxHashMap;
31 31
32use test_utils::tested_by;
33
32use crate::{ 34use crate::{
33 Module, Function, Struct, StructField, Enum, EnumVariant, Path, Name, ImplBlock, 35 Module, Function, Struct, StructField, Enum, EnumVariant, Path, Name, ImplBlock,
34 FnSignature, FnScopes, ModuleDef, AdtDef, 36 FnSignature, FnScopes, ModuleDef, AdtDef,
@@ -862,14 +864,15 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
862 } 864 }
863 865
864 fn resolve_all(mut self) -> InferenceResult { 866 fn resolve_all(mut self) -> InferenceResult {
867 let mut tv_stack = Vec::new();
865 let mut expr_types = mem::replace(&mut self.type_of_expr, ArenaMap::default()); 868 let mut expr_types = mem::replace(&mut self.type_of_expr, ArenaMap::default());
866 for ty in expr_types.values_mut() { 869 for ty in expr_types.values_mut() {
867 let resolved = self.resolve_ty_completely(mem::replace(ty, Ty::Unknown)); 870 let resolved = self.resolve_ty_completely(&mut tv_stack, mem::replace(ty, Ty::Unknown));
868 *ty = resolved; 871 *ty = resolved;
869 } 872 }
870 let mut pat_types = mem::replace(&mut self.type_of_pat, ArenaMap::default()); 873 let mut pat_types = mem::replace(&mut self.type_of_pat, ArenaMap::default());
871 for ty in pat_types.values_mut() { 874 for ty in pat_types.values_mut() {
872 let resolved = self.resolve_ty_completely(mem::replace(ty, Ty::Unknown)); 875 let resolved = self.resolve_ty_completely(&mut tv_stack, mem::replace(ty, Ty::Unknown));
873 *ty = resolved; 876 *ty = resolved;
874 } 877 }
875 InferenceResult { 878 InferenceResult {
@@ -1014,13 +1017,21 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
1014 /// by their known types. All types returned by the infer_* functions should 1017 /// by their known types. All types returned by the infer_* functions should
1015 /// be resolved as far as possible, i.e. contain no type variables with 1018 /// be resolved as far as possible, i.e. contain no type variables with
1016 /// known type. 1019 /// known type.
1017 fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty { 1020 fn resolve_ty_as_possible(&mut self, tv_stack: &mut Vec<TypeVarId>, ty: Ty) -> Ty {
1018 ty.fold(&mut |ty| match ty { 1021 ty.fold(&mut |ty| match ty {
1019 Ty::Infer(tv) => { 1022 Ty::Infer(tv) => {
1020 let inner = tv.to_inner(); 1023 let inner = tv.to_inner();
1024 if tv_stack.contains(&inner) {
1025 tested_by!(type_var_cycles_resolve_as_possible);
1026 // recursive type
1027 return tv.fallback_value();
1028 }
1021 if let Some(known_ty) = self.var_unification_table.probe_value(inner).known() { 1029 if let Some(known_ty) = self.var_unification_table.probe_value(inner).known() {
1022 // known_ty may contain other variables that are known by now 1030 // known_ty may contain other variables that are known by now
1023 self.resolve_ty_as_possible(known_ty.clone()) 1031 tv_stack.push(inner);
1032 let result = self.resolve_ty_as_possible(tv_stack, known_ty.clone());
1033 tv_stack.pop();
1034 result
1024 } else { 1035 } else {
1025 ty 1036 ty
1026 } 1037 }
@@ -1049,13 +1060,21 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
1049 1060
1050 /// Resolves the type completely; type variables without known type are 1061 /// Resolves the type completely; type variables without known type are
1051 /// replaced by Ty::Unknown. 1062 /// replaced by Ty::Unknown.
1052 fn resolve_ty_completely(&mut self, ty: Ty) -> Ty { 1063 fn resolve_ty_completely(&mut self, tv_stack: &mut Vec<TypeVarId>, ty: Ty) -> Ty {
1053 ty.fold(&mut |ty| match ty { 1064 ty.fold(&mut |ty| match ty {
1054 Ty::Infer(tv) => { 1065 Ty::Infer(tv) => {
1055 let inner = tv.to_inner(); 1066 let inner = tv.to_inner();
1067 if tv_stack.contains(&inner) {
1068 tested_by!(type_var_cycles_resolve_completely);
1069 // recursive type
1070 return tv.fallback_value();
1071 }
1056 if let Some(known_ty) = self.var_unification_table.probe_value(inner).known() { 1072 if let Some(known_ty) = self.var_unification_table.probe_value(inner).known() {
1057 // known_ty may contain other variables that are known by now 1073 // known_ty may contain other variables that are known by now
1058 self.resolve_ty_completely(known_ty.clone()) 1074 tv_stack.push(inner);
1075 let result = self.resolve_ty_completely(tv_stack, known_ty.clone());
1076 tv_stack.pop();
1077 result
1059 } else { 1078 } else {
1060 tv.fallback_value() 1079 tv.fallback_value()
1061 } 1080 }
@@ -1070,7 +1089,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
1070 let name = path.as_ident().cloned().unwrap_or_else(Name::self_param); 1089 let name = path.as_ident().cloned().unwrap_or_else(Name::self_param);
1071 if let Some(scope_entry) = self.scopes.resolve_local_name(expr, name) { 1090 if let Some(scope_entry) = self.scopes.resolve_local_name(expr, name) {
1072 let ty = self.type_of_pat.get(scope_entry.pat())?; 1091 let ty = self.type_of_pat.get(scope_entry.pat())?;
1073 let ty = self.resolve_ty_as_possible(ty.clone()); 1092 let ty = self.resolve_ty_as_possible(&mut vec![], ty.clone());
1074 return Some(ty); 1093 return Some(ty);
1075 }; 1094 };
1076 }; 1095 };
@@ -1239,7 +1258,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
1239 // use a new type variable if we got Ty::Unknown here 1258 // use a new type variable if we got Ty::Unknown here
1240 let ty = self.insert_type_vars_shallow(ty); 1259 let ty = self.insert_type_vars_shallow(ty);
1241 self.unify(&ty, expected); 1260 self.unify(&ty, expected);
1242 let ty = self.resolve_ty_as_possible(ty); 1261 let ty = self.resolve_ty_as_possible(&mut vec![], ty);
1243 self.write_pat_ty(pat, ty.clone()); 1262 self.write_pat_ty(pat, ty.clone());
1244 ty 1263 ty
1245 } 1264 }
@@ -1538,7 +1557,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
1538 // use a new type variable if we got Ty::Unknown here 1557 // use a new type variable if we got Ty::Unknown here
1539 let ty = self.insert_type_vars_shallow(ty); 1558 let ty = self.insert_type_vars_shallow(ty);
1540 self.unify(&ty, &expected.ty); 1559 self.unify(&ty, &expected.ty);
1541 let ty = self.resolve_ty_as_possible(ty); 1560 let ty = self.resolve_ty_as_possible(&mut vec![], ty);
1542 self.write_expr_ty(tgt_expr, ty.clone()); 1561 self.write_expr_ty(tgt_expr, ty.clone());
1543 ty 1562 ty
1544 } 1563 }
diff --git a/crates/ra_hir/src/ty/snapshots/tests__infer_in_elseif.snap b/crates/ra_hir/src/ty/snapshots/tests__infer_in_elseif.snap
new file mode 100644
index 000000000..6a435e5cf
--- /dev/null
+++ b/crates/ra_hir/src/ty/snapshots/tests__infer_in_elseif.snap
@@ -0,0 +1,17 @@
1---
2created: "2019-01-26T21:36:52.714121185+00:00"
3creator: [email protected]
4expression: "&result"
5source: crates/ra_hir/src/ty/tests.rs
6---
7[35; 38) 'foo': Foo
8[45; 109) '{ ... } }': ()
9[51; 107) 'if tru... }': ()
10[54; 58) 'true': bool
11[59; 67) '{ }': ()
12[73; 107) 'if fal... }': i32
13[76; 81) 'false': bool
14[82; 107) '{ ... }': i32
15[92; 95) 'foo': Foo
16[92; 101) 'foo.field': i32
17
diff --git a/crates/ra_hir/src/ty/snapshots/tests__recursive_vars.snap b/crates/ra_hir/src/ty/snapshots/tests__recursive_vars.snap
new file mode 100644
index 000000000..c3227ff7e
--- /dev/null
+++ b/crates/ra_hir/src/ty/snapshots/tests__recursive_vars.snap
@@ -0,0 +1,14 @@
1---
2created: "2019-01-26T22:42:22.329980185+00:00"
3creator: [email protected]
4expression: "&result"
5source: crates/ra_hir/src/ty/tests.rs
6---
7[11; 48) '{ ...&y]; }': ()
8[21; 22) 'y': &[unknown]
9[25; 32) 'unknown': &[unknown]
10[38; 45) '[y, &y]': [&&[unknown]]
11[39; 40) 'y': &[unknown]
12[42; 44) '&y': &&[unknown]
13[43; 44) 'y': &[unknown]
14
diff --git a/crates/ra_hir/src/ty/snapshots/tests__recursive_vars_2.snap b/crates/ra_hir/src/ty/snapshots/tests__recursive_vars_2.snap
new file mode 100644
index 000000000..de124da5b
--- /dev/null
+++ b/crates/ra_hir/src/ty/snapshots/tests__recursive_vars_2.snap
@@ -0,0 +1,21 @@
1---
2created: "2019-01-26T22:42:22.331805845+00:00"
3creator: [email protected]
4expression: "&result"
5source: crates/ra_hir/src/ty/tests.rs
6---
7[11; 80) '{ ...x)]; }': ()
8[21; 22) 'x': &&[unknown]
9[25; 32) 'unknown': &&[unknown]
10[42; 43) 'y': &&[unknown]
11[46; 53) 'unknown': &&[unknown]
12[59; 77) '[(x, y..., &x)]': [(&&[unknown], &&[unknown])]
13[60; 66) '(x, y)': (&&[unknown], &&[unknown])
14[61; 62) 'x': &&[unknown]
15[64; 65) 'y': &&[unknown]
16[68; 76) '(&y, &x)': (&&&[unknown], &&&[unknown])
17[69; 71) '&y': &&&[unknown]
18[70; 71) 'y': &&[unknown]
19[73; 75) '&x': &&&[unknown]
20[74; 75) 'x': &&[unknown]
21
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs
index e0b0689f8..f74d6f5ea 100644
--- a/crates/ra_hir/src/ty/tests.rs
+++ b/crates/ra_hir/src/ty/tests.rs
@@ -3,6 +3,7 @@ use std::fmt::Write;
3 3
4use ra_db::{SourceDatabase, salsa::Database}; 4use ra_db::{SourceDatabase, salsa::Database};
5use ra_syntax::ast::{self, AstNode}; 5use ra_syntax::ast::{self, AstNode};
6use test_utils::covers;
6 7
7use crate::{ 8use crate::{
8 source_binder, 9 source_binder,
@@ -285,6 +286,23 @@ fn test() {
285} 286}
286 287
287#[test] 288#[test]
289fn infer_in_elseif() {
290 check_inference(
291 "infer_in_elseif",
292 r#"
293struct Foo { field: i32 }
294fn main(foo: Foo) {
295 if true {
296
297 } else if false {
298 foo.field
299 }
300}
301"#,
302 )
303}
304
305#[test]
288fn infer_inherent_method() { 306fn infer_inherent_method() {
289 check_inference( 307 check_inference(
290 "infer_inherent_method", 308 "infer_inherent_method",
@@ -545,6 +563,37 @@ fn quux() {
545 ); 563 );
546} 564}
547 565
566#[test]
567fn recursive_vars() {
568 covers!(type_var_cycles_resolve_completely);
569 covers!(type_var_cycles_resolve_as_possible);
570 check_inference(
571 "recursive_vars",
572 r#"
573fn test() {
574 let y = unknown;
575 [y, &y];
576}
577"#,
578 );
579}
580
581#[test]
582fn recursive_vars_2() {
583 covers!(type_var_cycles_resolve_completely);
584 covers!(type_var_cycles_resolve_as_possible);
585 check_inference(
586 "recursive_vars_2",
587 r#"
588fn test() {
589 let x = unknown;
590 let y = unknown;
591 [(x, y), (&y, &x)];
592}
593"#,
594 );
595}
596
548fn infer(content: &str) -> String { 597fn infer(content: &str) -> String {
549 let (db, _, file_id) = MockDatabase::with_single_file(content); 598 let (db, _, file_id) = MockDatabase::with_single_file(content);
550 let source_file = db.parse(file_id); 599 let source_file = db.parse(file_id);
diff --git a/crates/ra_ide_api/src/call_info.rs b/crates/ra_ide_api/src/call_info.rs
index 7554c4aee..ee1e13799 100644
--- a/crates/ra_ide_api/src/call_info.rs
+++ b/crates/ra_ide_api/src/call_info.rs
@@ -1,3 +1,4 @@
1use test_utils::tested_by;
1use ra_db::SourceDatabase; 2use ra_db::SourceDatabase;
2use ra_syntax::{ 3use ra_syntax::{
3 AstNode, SyntaxNode, TextUnit, TextRange, 4 AstNode, SyntaxNode, TextUnit, TextRange,
@@ -41,7 +42,12 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
41 // where offset is in that list (or beyond). 42 // where offset is in that list (or beyond).
42 // Revisit this after we get documentation comments in. 43 // Revisit this after we get documentation comments in.
43 if let Some(ref arg_list) = calling_node.arg_list() { 44 if let Some(ref arg_list) = calling_node.arg_list() {
44 let start = arg_list.syntax().range().start(); 45 let arg_list_range = arg_list.syntax().range();
46 if !arg_list_range.contains_inclusive(position.offset) {
47 tested_by!(call_info_bad_offset);
48 return None;
49 }
50 let start = arg_list_range.start();
45 51
46 let range_search = TextRange::from_to(start, position.offset); 52 let range_search = TextRange::from_to(start, position.offset);
47 let mut commas: usize = arg_list 53 let mut commas: usize = arg_list
@@ -171,10 +177,12 @@ fn param_list(node: &ast::FnDef) -> Vec<String> {
171 177
172#[cfg(test)] 178#[cfg(test)]
173mod tests { 179mod tests {
174 use super::*; 180 use test_utils::covers;
175 181
176 use crate::mock_analysis::single_file_with_position; 182 use crate::mock_analysis::single_file_with_position;
177 183
184 use super::*;
185
178 fn call_info(text: &str) -> CallInfo { 186 fn call_info(text: &str) -> CallInfo {
179 let (analysis, position) = single_file_with_position(text); 187 let (analysis, position) = single_file_with_position(text);
180 analysis.call_info(position).unwrap().unwrap() 188 analysis.call_info(position).unwrap().unwrap()
@@ -416,4 +424,14 @@ By default this method stops actor's `Context`."#
416 ); 424 );
417 } 425 }
418 426
427 #[test]
428 fn call_info_bad_offset() {
429 covers!(call_info_bad_offset);
430 let (analysis, position) = single_file_with_position(
431 r#"fn foo(x: u32, y: u32) -> u32 {x + y}
432 fn bar() { foo <|> (3, ); }"#,
433 );
434 let call_info = analysis.call_info(position).unwrap();
435 assert!(call_info.is_none());
436 }
419} 437}
diff --git a/crates/ra_ide_api/src/marks.rs b/crates/ra_ide_api/src/marks.rs
index e33bf6c91..21ce7289d 100644
--- a/crates/ra_ide_api/src/marks.rs
+++ b/crates/ra_ide_api/src/marks.rs
@@ -2,4 +2,5 @@ test_utils::marks!(
2 inserts_parens_for_function_calls 2 inserts_parens_for_function_calls
3 goto_definition_works_for_methods 3 goto_definition_works_for_methods
4 goto_definition_works_for_fields 4 goto_definition_works_for_fields
5 call_info_bad_offset
5); 6);
diff --git a/crates/ra_ide_api_light/src/assists/replace_if_let_with_match.rs b/crates/ra_ide_api_light/src/assists/replace_if_let_with_match.rs
index d64c34d54..71880b919 100644
--- a/crates/ra_ide_api_light/src/assists/replace_if_let_with_match.rs
+++ b/crates/ra_ide_api_light/src/assists/replace_if_let_with_match.rs
@@ -11,7 +11,10 @@ pub fn replace_if_let_with_match(ctx: AssistCtx) -> Option<Assist> {
11 let pat = cond.pat()?; 11 let pat = cond.pat()?;
12 let expr = cond.expr()?; 12 let expr = cond.expr()?;
13 let then_block = if_expr.then_branch()?; 13 let then_block = if_expr.then_branch()?;
14 let else_block = if_expr.else_branch()?; 14 let else_block = match if_expr.else_branch()? {
15 ast::ElseBranchFlavor::Block(it) => it,
16 ast::ElseBranchFlavor::IfExpr(_) => return None,
17 };
15 18
16 ctx.build("replace with match", |edit| { 19 ctx.build("replace with match", |edit| {
17 let match_expr = build_match_expr(expr, pat, then_block, else_block); 20 let match_expr = build_match_expr(expr, pat, then_block, else_block);
diff --git a/crates/ra_lsp_server/src/main_loop/handlers.rs b/crates/ra_lsp_server/src/main_loop/handlers.rs
index 8ea9edc84..ace3da020 100644
--- a/crates/ra_lsp_server/src/main_loop/handlers.rs
+++ b/crates/ra_lsp_server/src/main_loop/handlers.rs
@@ -520,21 +520,33 @@ pub fn handle_formatting(
520 let end_position = TextUnit::of_str(&file).conv_with(&file_line_index); 520 let end_position = TextUnit::of_str(&file).conv_with(&file_line_index);
521 521
522 use std::process; 522 use std::process;
523 let mut rustfmt = process::Command::new("rustfmt") 523 let mut rustfmt = process::Command::new("rustfmt");
524 rustfmt
524 .stdin(process::Stdio::piped()) 525 .stdin(process::Stdio::piped())
525 .stdout(process::Stdio::piped()) 526 .stdout(process::Stdio::piped());
526 .spawn()?; 527
528 if let Ok(path) = params.text_document.uri.to_file_path() {
529 if let Some(parent) = path.parent() {
530 rustfmt.current_dir(parent);
531 }
532 }
533 let mut rustfmt = rustfmt.spawn()?;
527 534
528 rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?; 535 rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?;
529 536
530 let output = rustfmt.wait_with_output()?; 537 let output = rustfmt.wait_with_output()?;
531 let captured_stdout = String::from_utf8(output.stdout)?; 538 let captured_stdout = String::from_utf8(output.stdout)?;
532 if !output.status.success() { 539 if !output.status.success() {
533 failure::bail!( 540 return Err(LspError::new(
534 "rustfmt exited with error code {}: {}.", 541 -32900,
535 output.status, 542 format!(
536 captured_stdout, 543 r#"rustfmt exited with:
537 ); 544 Status: {}
545 stdout: {}"#,
546 output.status, captured_stdout,
547 ),
548 )
549 .into());
538 } 550 }
539 551
540 Ok(Some(vec![TextEdit { 552 Ok(Some(vec![TextEdit {
diff --git a/crates/ra_lsp_server/src/project_model/cargo_workspace.rs b/crates/ra_lsp_server/src/project_model/cargo_workspace.rs
index 75ae78bca..8cf99d586 100644
--- a/crates/ra_lsp_server/src/project_model/cargo_workspace.rs
+++ b/crates/ra_lsp_server/src/project_model/cargo_workspace.rs
@@ -117,9 +117,13 @@ impl Target {
117 117
118impl CargoWorkspace { 118impl CargoWorkspace {
119 pub fn from_cargo_metadata(cargo_toml: &Path) -> Result<CargoWorkspace> { 119 pub fn from_cargo_metadata(cargo_toml: &Path) -> Result<CargoWorkspace> {
120 let meta = MetadataCommand::new() 120 let mut meta = MetadataCommand::new();
121 .manifest_path(cargo_toml) 121 meta.manifest_path(cargo_toml)
122 .features(CargoOpt::AllFeatures) 122 .features(CargoOpt::AllFeatures);
123 if let Some(parent) = cargo_toml.parent() {
124 meta.current_dir(parent);
125 }
126 let meta = meta
123 .exec() 127 .exec()
124 .map_err(|e| format_err!("cargo metadata failed: {}", e))?; 128 .map_err(|e| format_err!("cargo metadata failed: {}", e))?;
125 let mut pkg_by_id = FxHashMap::default(); 129 let mut pkg_by_id = FxHashMap::default();
diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml
index c50dc6c67..0ec8492aa 100644
--- a/crates/ra_syntax/Cargo.toml
+++ b/crates/ra_syntax/Cargo.toml
@@ -13,7 +13,7 @@ unicode-xid = "0.1.0"
13itertools = "0.8.0" 13itertools = "0.8.0"
14drop_bomb = "0.1.4" 14drop_bomb = "0.1.4"
15parking_lot = "0.7.0" 15parking_lot = "0.7.0"
16rowan = "0.3.2" 16rowan = "0.3.3"
17 17
18# ideally, `serde` should be enabled by `ra_lsp_serder`, but we enable it here 18# ideally, `serde` should be enabled by `ra_lsp_serder`, but we enable it here
19# to reduce number of compilations 19# to reduce number of compilations
diff --git a/crates/ra_syntax/src/ast.rs b/crates/ra_syntax/src/ast.rs
index d59890d95..3d22a88f3 100644
--- a/crates/ra_syntax/src/ast.rs
+++ b/crates/ra_syntax/src/ast.rs
@@ -302,13 +302,27 @@ impl LetStmt {
302 } 302 }
303} 303}
304 304
305#[derive(Debug, Clone, PartialEq, Eq)]
306pub enum ElseBranchFlavor<'a> {
307 Block(&'a Block),
308 IfExpr(&'a IfExpr),
309}
310
305impl IfExpr { 311impl IfExpr {
306 pub fn then_branch(&self) -> Option<&Block> { 312 pub fn then_branch(&self) -> Option<&Block> {
307 self.blocks().nth(0) 313 self.blocks().nth(0)
308 } 314 }
309 pub fn else_branch(&self) -> Option<&Block> { 315 pub fn else_branch(&self) -> Option<ElseBranchFlavor> {
310 self.blocks().nth(1) 316 let res = match self.blocks().nth(1) {
317 Some(block) => ElseBranchFlavor::Block(block),
318 None => {
319 let elif: &IfExpr = child_opt(self)?;
320 ElseBranchFlavor::IfExpr(elif)
321 }
322 };
323 Some(res)
311 } 324 }
325
312 fn blocks(&self) -> AstChildren<Block> { 326 fn blocks(&self) -> AstChildren<Block> {
313 children(self) 327 children(self)
314 } 328 }
diff --git a/crates/ra_syntax/src/grammar/expressions.rs b/crates/ra_syntax/src/grammar/expressions.rs
index 7ee32fa7c..1604d9b5a 100644
--- a/crates/ra_syntax/src/grammar/expressions.rs
+++ b/crates/ra_syntax/src/grammar/expressions.rs
@@ -423,7 +423,7 @@ fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, BlockLike) {
423 match p.current() { 423 match p.current() {
424 L_CURLY if !r.forbid_structs => { 424 L_CURLY if !r.forbid_structs => {
425 named_field_list(p); 425 named_field_list(p);
426 (m.complete(p, STRUCT_LIT), BlockLike::Block) 426 (m.complete(p, STRUCT_LIT), BlockLike::NotBlock)
427 } 427 }
428 EXCL => { 428 EXCL => {
429 let block_like = items::macro_call_after_excl(p); 429 let block_like = items::macro_call_after_excl(p);
diff --git a/crates/ra_syntax/tests/data/parser/ok/0043_complex_assignment.rs b/crates/ra_syntax/tests/data/parser/ok/0043_complex_assignment.rs
new file mode 100644
index 000000000..7e4a28bf7
--- /dev/null
+++ b/crates/ra_syntax/tests/data/parser/ok/0043_complex_assignment.rs
@@ -0,0 +1,8 @@
1// https://github.com/rust-analyzer/rust-analyzer/issues/674
2
3struct Repr { raw: [u8; 1] }
4
5fn abc() {
6 Repr { raw: [0] }.raw[0] = 0;
7 Repr{raw:[0]}();
8}
diff --git a/crates/ra_syntax/tests/data/parser/ok/0043_complex_assignment.txt b/crates/ra_syntax/tests/data/parser/ok/0043_complex_assignment.txt
new file mode 100644
index 000000000..2279c7966
--- /dev/null
+++ b/crates/ra_syntax/tests/data/parser/ok/0043_complex_assignment.txt
@@ -0,0 +1,109 @@
1SOURCE_FILE@[0; 160)
2 COMMENT@[0; 60)
3 WHITESPACE@[60; 62)
4 STRUCT_DEF@[62; 90)
5 STRUCT_KW@[62; 68)
6 WHITESPACE@[68; 69)
7 NAME@[69; 73)
8 IDENT@[69; 73) "Repr"
9 WHITESPACE@[73; 74)
10 NAMED_FIELD_DEF_LIST@[74; 90)
11 L_CURLY@[74; 75)
12 WHITESPACE@[75; 76)
13 NAMED_FIELD_DEF@[76; 88)
14 NAME@[76; 79)
15 IDENT@[76; 79) "raw"
16 COLON@[79; 80)
17 WHITESPACE@[80; 81)
18 ARRAY_TYPE@[81; 88)
19 L_BRACK@[81; 82)
20 PATH_TYPE@[82; 84)
21 PATH@[82; 84)
22 PATH_SEGMENT@[82; 84)
23 NAME_REF@[82; 84)
24 IDENT@[82; 84) "u8"
25 SEMI@[84; 85)
26 WHITESPACE@[85; 86)
27 LITERAL@[86; 87)
28 INT_NUMBER@[86; 87) "1"
29 R_BRACK@[87; 88)
30 WHITESPACE@[88; 89)
31 R_CURLY@[89; 90)
32 WHITESPACE@[90; 92)
33 FN_DEF@[92; 159)
34 FN_KW@[92; 94)
35 WHITESPACE@[94; 95)
36 NAME@[95; 98)
37 IDENT@[95; 98) "abc"
38 PARAM_LIST@[98; 100)
39 L_PAREN@[98; 99)
40 R_PAREN@[99; 100)
41 WHITESPACE@[100; 101)
42 BLOCK@[101; 159)
43 L_CURLY@[101; 102)
44 WHITESPACE@[102; 107)
45 EXPR_STMT@[107; 136)
46 BIN_EXPR@[107; 135)
47 INDEX_EXPR@[107; 131)
48 FIELD_EXPR@[107; 128)
49 STRUCT_LIT@[107; 124)
50 PATH@[107; 111)
51 PATH_SEGMENT@[107; 111)
52 NAME_REF@[107; 111)
53 IDENT@[107; 111) "Repr"
54 WHITESPACE@[111; 112)
55 NAMED_FIELD_LIST@[112; 124)
56 L_CURLY@[112; 113)
57 WHITESPACE@[113; 114)
58 NAMED_FIELD@[114; 122)
59 NAME_REF@[114; 117)
60 IDENT@[114; 117) "raw"
61 COLON@[117; 118)
62 WHITESPACE@[118; 119)
63 ARRAY_EXPR@[119; 122)
64 L_BRACK@[119; 120)
65 LITERAL@[120; 121)
66 INT_NUMBER@[120; 121) "0"
67 R_BRACK@[121; 122)
68 WHITESPACE@[122; 123)
69 R_CURLY@[123; 124)
70 DOT@[124; 125)
71 NAME_REF@[125; 128)
72 IDENT@[125; 128) "raw"
73 L_BRACK@[128; 129)
74 LITERAL@[129; 130)
75 INT_NUMBER@[129; 130) "0"
76 R_BRACK@[130; 131)
77 WHITESPACE@[131; 132)
78 EQ@[132; 133)
79 WHITESPACE@[133; 134)
80 LITERAL@[134; 135)
81 INT_NUMBER@[134; 135) "0"
82 SEMI@[135; 136)
83 WHITESPACE@[136; 141)
84 EXPR_STMT@[141; 157)
85 CALL_EXPR@[141; 156)
86 STRUCT_LIT@[141; 154)
87 PATH@[141; 145)
88 PATH_SEGMENT@[141; 145)
89 NAME_REF@[141; 145)
90 IDENT@[141; 145) "Repr"
91 NAMED_FIELD_LIST@[145; 154)
92 L_CURLY@[145; 146)
93 NAMED_FIELD@[146; 153)
94 NAME_REF@[146; 149)
95 IDENT@[146; 149) "raw"
96 COLON@[149; 150)
97 ARRAY_EXPR@[150; 153)
98 L_BRACK@[150; 151)
99 LITERAL@[151; 152)
100 INT_NUMBER@[151; 152) "0"
101 R_BRACK@[152; 153)
102 R_CURLY@[153; 154)
103 ARG_LIST@[154; 156)
104 L_PAREN@[154; 155)
105 R_PAREN@[155; 156)
106 SEMI@[156; 157)
107 WHITESPACE@[157; 158)
108 R_CURLY@[158; 159)
109 WHITESPACE@[159; 160)
diff --git a/crates/ra_vfs/src/io.rs b/crates/ra_vfs/src/io.rs
index 7ca1e9835..d764c534a 100644
--- a/crates/ra_vfs/src/io.rs
+++ b/crates/ra_vfs/src/io.rs
@@ -1,19 +1,22 @@
1use std::{fs, sync::Arc, thread}; 1use std::{
2 2 fs,
3use crossbeam_channel::{Receiver, Sender}; 3 thread,
4 path::{Path, PathBuf},
5 sync::{mpsc, Arc},
6 time::Duration,
7};
8use crossbeam_channel::{Receiver, Sender, unbounded, RecvError, select};
4use relative_path::RelativePathBuf; 9use relative_path::RelativePathBuf;
5use thread_worker::WorkerHandle; 10use thread_worker::WorkerHandle;
6use walkdir::WalkDir; 11use walkdir::WalkDir;
12use notify::{DebouncedEvent, RecommendedWatcher, RecursiveMode, Watcher as _Watcher};
7 13
8mod watcher; 14use crate::{RootConfig, Roots, VfsRoot};
9use watcher::Watcher;
10
11use crate::{RootFilter, Roots, VfsRoot};
12 15
13pub(crate) enum Task { 16pub(crate) enum Task {
14 AddRoot { 17 AddRoot {
15 root: VfsRoot, 18 root: VfsRoot,
16 filter: Arc<RootFilter>, 19 config: Arc<RootConfig>,
17 }, 20 },
18} 21}
19 22
@@ -39,6 +42,15 @@ pub enum TaskResult {
39 }, 42 },
40} 43}
41 44
45#[derive(Debug)]
46enum ChangeKind {
47 Create,
48 Write,
49 Remove,
50}
51
52const WATCHER_DELAY: Duration = Duration::from_millis(250);
53
42pub(crate) struct Worker { 54pub(crate) struct Worker {
43 worker: thread_worker::Worker<Task, TaskResult>, 55 worker: thread_worker::Worker<Task, TaskResult>,
44 worker_handle: WorkerHandle, 56 worker_handle: WorkerHandle,
@@ -46,24 +58,75 @@ pub(crate) struct Worker {
46 58
47impl Worker { 59impl Worker {
48 pub(crate) fn start(roots: Arc<Roots>) -> Worker { 60 pub(crate) fn start(roots: Arc<Roots>) -> Worker {
49 let (worker, worker_handle) = 61 // This is a pretty elaborate setup of threads & channels! It is
50 thread_worker::spawn("vfs", 128, move |input_receiver, output_sender| { 62 // explained by the following concerns:
51 let mut watcher = match Watcher::start(roots, output_sender.clone()) { 63 // * we need to burn a thread translating from notify's mpsc to
52 Ok(w) => Some(w), 64 // crossbeam_channel.
53 Err(e) => { 65 // * we want to read all files from a single thread, to gurantee that
54 log::error!("could not start watcher: {}", e); 66 // we always get fresher versions and never go back in time.
55 None 67 // * we want to tear down everything neatly during shutdown.
68 let (worker, worker_handle) = thread_worker::spawn(
69 "vfs",
70 128,
71 // This are the channels we use to communicate with outside world.
72 // If `input_receiver` is closed we need to tear ourselves down.
73 // `output_sender` should not be closed unless the parent died.
74 move |input_receiver, output_sender| {
75 // These are `std` channels notify will send events to
76 let (notify_sender, notify_receiver) = mpsc::channel();
77 // These are the corresponding crossbeam channels
78 let (watcher_sender, watcher_receiver) = unbounded();
79
80 let mut watcher = notify::watcher(notify_sender, WATCHER_DELAY)
81 .map_err(|e| log::error!("failed to spawn notify {}", e))
82 .ok();
83 // Start a silly thread to tranform between two channels
84 let thread = thread::spawn(move || {
85 notify_receiver
86 .into_iter()
87 .for_each(|event| convert_notify_event(event, &watcher_sender))
88 });
89
90 // Process requests from the called or notifications from
91 // watcher until the caller says stop.
92 loop {
93 select! {
94 // Received request from the caller. If this channel is
95 // closed, we should shutdown everything.
96 recv(input_receiver) -> t => match t {
97 Err(RecvError) => {
98 drop(input_receiver);
99 break
100 },
101 Ok(Task::AddRoot { root, config }) => {
102 watch_root(watcher.as_mut(), &output_sender, root, Arc::clone(&config));
103 }
104 },
105 // Watcher send us changes. If **this** channel is
106 // closed, the watcher has died, which indicates a bug
107 // -- escalate!
108 recv(watcher_receiver) -> event => match event {
109 Err(RecvError) => panic!("watcher is dead"),
110 Ok((path, change)) => {
111 handle_change(watcher.as_mut(), &output_sender, &*roots, path, change);
112 }
113 },
56 } 114 }
57 };
58 let res = input_receiver
59 .into_iter()
60 .filter_map(|t| handle_task(t, &mut watcher))
61 .try_for_each(|it| output_sender.send(it));
62 if let Some(watcher) = watcher {
63 let _ = watcher.shutdown();
64 } 115 }
65 res.unwrap() 116 // Stopped the watcher
66 }); 117 drop(watcher.take());
118 // Drain pending events: we are not inrerested in them anyways!
119 watcher_receiver.into_iter().for_each(|_| ());
120
121 let res = thread.join();
122 match &res {
123 Ok(()) => log::info!("... Watcher terminated with ok"),
124 Err(_) => log::error!("... Watcher terminated with err"),
125 }
126 res.unwrap();
127 },
128 );
129
67 Worker { 130 Worker {
68 worker, 131 worker,
69 worker_handle, 132 worker_handle,
@@ -84,46 +147,142 @@ impl Worker {
84 } 147 }
85} 148}
86 149
87fn handle_task(task: Task, watcher: &mut Option<Watcher>) -> Option<TaskResult> { 150fn watch_root(
88 match task { 151 watcher: Option<&mut RecommendedWatcher>,
89 Task::AddRoot { root, filter } => { 152 sender: &Sender<TaskResult>,
90 if let Some(watcher) = watcher { 153 root: VfsRoot,
91 watcher.watch_root(&filter) 154 config: Arc<RootConfig>,
155) {
156 log::debug!("loading {} ...", config.root.as_path().display());
157 let files = watch_recursive(watcher, config.root.as_path(), &*config)
158 .into_iter()
159 .filter_map(|path| {
160 let abs_path = path.to_path(&config.root);
161 let text = read_to_string(&abs_path)?;
162 Some((path, text))
163 })
164 .collect();
165 sender
166 .send(TaskResult::BulkLoadRoot { root, files })
167 .unwrap();
168 log::debug!("... loaded {}", config.root.as_path().display());
169}
170
171fn convert_notify_event(event: DebouncedEvent, sender: &Sender<(PathBuf, ChangeKind)>) {
172 // forward relevant events only
173 match event {
174 DebouncedEvent::NoticeWrite(_)
175 | DebouncedEvent::NoticeRemove(_)
176 | DebouncedEvent::Chmod(_) => {
177 // ignore
178 }
179 DebouncedEvent::Rescan => {
180 // TODO rescan all roots
181 }
182 DebouncedEvent::Create(path) => {
183 sender.send((path, ChangeKind::Create)).unwrap();
184 }
185 DebouncedEvent::Write(path) => {
186 sender.send((path, ChangeKind::Write)).unwrap();
187 }
188 DebouncedEvent::Remove(path) => {
189 sender.send((path, ChangeKind::Remove)).unwrap();
190 }
191 DebouncedEvent::Rename(src, dst) => {
192 sender.send((src, ChangeKind::Remove)).unwrap();
193 sender.send((dst, ChangeKind::Create)).unwrap();
194 }
195 DebouncedEvent::Error(err, path) => {
196 // TODO should we reload the file contents?
197 log::warn!("watcher error \"{}\", {:?}", err, path);
198 }
199 }
200}
201
202fn handle_change(
203 watcher: Option<&mut RecommendedWatcher>,
204 sender: &Sender<TaskResult>,
205 roots: &Roots,
206 path: PathBuf,
207 kind: ChangeKind,
208) {
209 let (root, rel_path) = match roots.find(&path) {
210 None => return,
211 Some(it) => it,
212 };
213 let config = &roots[root];
214 match kind {
215 ChangeKind::Create => {
216 let mut paths = Vec::new();
217 if path.is_dir() {
218 paths.extend(watch_recursive(watcher, &path, &config));
219 } else {
220 paths.push(rel_path);
92 } 221 }
93 log::debug!("loading {} ...", filter.root.as_path().display()); 222 paths
94 let files = load_root(filter.as_ref()); 223 .into_iter()
95 log::debug!("... loaded {}", filter.root.as_path().display()); 224 .filter_map(|rel_path| {
96 Some(TaskResult::BulkLoadRoot { root, files }) 225 let abs_path = rel_path.to_path(&config.root);
226 let text = read_to_string(&abs_path)?;
227 Some((rel_path, text))
228 })
229 .try_for_each(|(path, text)| {
230 sender.send(TaskResult::AddSingleFile { root, path, text })
231 })
232 .unwrap()
97 } 233 }
234 ChangeKind::Write => {
235 if let Some(text) = read_to_string(&path) {
236 sender
237 .send(TaskResult::ChangeSingleFile {
238 root,
239 path: rel_path,
240 text,
241 })
242 .unwrap();
243 }
244 }
245 ChangeKind::Remove => sender
246 .send(TaskResult::RemoveSingleFile {
247 root,
248 path: rel_path,
249 })
250 .unwrap(),
98 } 251 }
99} 252}
100 253
101fn load_root(filter: &RootFilter) -> Vec<(RelativePathBuf, String)> { 254fn watch_recursive(
102 let mut res = Vec::new(); 255 mut watcher: Option<&mut RecommendedWatcher>,
103 for entry in WalkDir::new(&filter.root) 256 dir: &Path,
257 config: &RootConfig,
258) -> Vec<RelativePathBuf> {
259 let mut files = Vec::new();
260 for entry in WalkDir::new(dir)
104 .into_iter() 261 .into_iter()
105 .filter_entry(filter.entry_filter()) 262 .filter_entry(|it| config.contains(it.path()).is_some())
263 .filter_map(|it| it.map_err(|e| log::warn!("watcher error: {}", e)).ok())
106 { 264 {
107 let entry = match entry { 265 if entry.file_type().is_dir() {
108 Ok(entry) => entry, 266 if let Some(watcher) = &mut watcher {
109 Err(e) => { 267 watch_one(watcher, entry.path());
110 log::warn!("watcher error: {}", e);
111 continue;
112 } 268 }
113 }; 269 } else {
114 if !entry.file_type().is_file() { 270 let path = config.contains(entry.path()).unwrap();
115 continue; 271 files.push(path.to_owned());
116 } 272 }
117 let path = entry.path();
118 let text = match fs::read_to_string(path) {
119 Ok(text) => text,
120 Err(e) => {
121 log::warn!("watcher error: {}", e);
122 continue;
123 }
124 };
125 let path = RelativePathBuf::from_path(path.strip_prefix(&filter.root).unwrap()).unwrap();
126 res.push((path.to_owned(), text))
127 } 273 }
128 res 274 files
275}
276
277fn watch_one(watcher: &mut RecommendedWatcher, dir: &Path) {
278 match watcher.watch(dir, RecursiveMode::NonRecursive) {
279 Ok(()) => log::debug!("watching \"{}\"", dir.display()),
280 Err(e) => log::warn!("could not watch \"{}\": {}", dir.display(), e),
281 }
282}
283
284fn read_to_string(path: &Path) -> Option<String> {
285 fs::read_to_string(&path)
286 .map_err(|e| log::warn!("failed to read file {}", e))
287 .ok()
129} 288}
diff --git a/crates/ra_vfs/src/io/watcher.rs b/crates/ra_vfs/src/io/watcher.rs
deleted file mode 100644
index ff6775f59..000000000
--- a/crates/ra_vfs/src/io/watcher.rs
+++ /dev/null
@@ -1,200 +0,0 @@
1use crate::{io, RootFilter, Roots, VfsRoot};
2use crossbeam_channel::Sender;
3use drop_bomb::DropBomb;
4use notify::{DebouncedEvent, RecommendedWatcher, RecursiveMode, Watcher as NotifyWatcher};
5use parking_lot::Mutex;
6use std::{
7 fs,
8 path::{Path, PathBuf},
9 sync::{mpsc, Arc},
10 thread,
11 time::Duration,
12};
13use walkdir::WalkDir;
14
15#[derive(Debug)]
16enum ChangeKind {
17 Create,
18 Write,
19 Remove,
20}
21
22const WATCHER_DELAY: Duration = Duration::from_millis(250);
23
24pub(crate) struct Watcher {
25 thread: thread::JoinHandle<()>,
26 bomb: DropBomb,
27 watcher: Arc<Mutex<Option<RecommendedWatcher>>>,
28}
29
30impl Watcher {
31 pub(crate) fn start(
32 roots: Arc<Roots>,
33 output_sender: Sender<io::TaskResult>,
34 ) -> Result<Watcher, Box<std::error::Error>> {
35 let (input_sender, input_receiver) = mpsc::channel();
36 let watcher = Arc::new(Mutex::new(Some(notify::watcher(
37 input_sender,
38 WATCHER_DELAY,
39 )?)));
40 let sender = output_sender.clone();
41 let watcher_clone = watcher.clone();
42 let thread = thread::spawn(move || {
43 let worker = WatcherWorker {
44 roots,
45 watcher: watcher_clone,
46 sender,
47 };
48 input_receiver
49 .into_iter()
50 // forward relevant events only
51 .try_for_each(|change| worker.handle_debounced_event(change))
52 .unwrap()
53 });
54 Ok(Watcher {
55 thread,
56 watcher,
57 bomb: DropBomb::new(format!("Watcher was not shutdown")),
58 })
59 }
60
61 pub fn watch_root(&mut self, filter: &RootFilter) {
62 for res in WalkDir::new(&filter.root)
63 .into_iter()
64 .filter_entry(filter.entry_filter())
65 {
66 match res {
67 Ok(entry) => {
68 if entry.file_type().is_dir() {
69 watch_one(self.watcher.as_ref(), entry.path());
70 }
71 }
72 Err(e) => log::warn!("watcher error: {}", e),
73 }
74 }
75 }
76
77 pub fn shutdown(mut self) -> thread::Result<()> {
78 self.bomb.defuse();
79 drop(self.watcher.lock().take());
80 let res = self.thread.join();
81 match &res {
82 Ok(()) => log::info!("... Watcher terminated with ok"),
83 Err(_) => log::error!("... Watcher terminated with err"),
84 }
85 res
86 }
87}
88
89struct WatcherWorker {
90 watcher: Arc<Mutex<Option<RecommendedWatcher>>>,
91 roots: Arc<Roots>,
92 sender: Sender<io::TaskResult>,
93}
94
95impl WatcherWorker {
96 fn handle_debounced_event(&self, ev: DebouncedEvent) -> Result<(), Box<std::error::Error>> {
97 match ev {
98 DebouncedEvent::NoticeWrite(_)
99 | DebouncedEvent::NoticeRemove(_)
100 | DebouncedEvent::Chmod(_) => {
101 // ignore
102 }
103 DebouncedEvent::Rescan => {
104 // TODO rescan all roots
105 }
106 DebouncedEvent::Create(path) => {
107 self.handle_change(path, ChangeKind::Create);
108 }
109 DebouncedEvent::Write(path) => {
110 self.handle_change(path, ChangeKind::Write);
111 }
112 DebouncedEvent::Remove(path) => {
113 self.handle_change(path, ChangeKind::Remove);
114 }
115 DebouncedEvent::Rename(src, dst) => {
116 self.handle_change(src, ChangeKind::Remove);
117 self.handle_change(dst, ChangeKind::Create);
118 }
119 DebouncedEvent::Error(err, path) => {
120 // TODO should we reload the file contents?
121 log::warn!("watcher error \"{}\", {:?}", err, path);
122 }
123 }
124 Ok(())
125 }
126
127 fn handle_change(&self, path: PathBuf, kind: ChangeKind) {
128 if let Err(e) = self.try_handle_change(path, kind) {
129 log::warn!("watcher error: {}", e)
130 }
131 }
132
133 fn try_handle_change(
134 &self,
135 path: PathBuf,
136 kind: ChangeKind,
137 ) -> Result<(), Box<std::error::Error>> {
138 let (root, rel_path) = match self.roots.find(&path) {
139 Some(x) => x,
140 None => return Ok(()),
141 };
142 match kind {
143 ChangeKind::Create => {
144 if path.is_dir() {
145 self.watch_recursive(&path, root);
146 } else {
147 let text = fs::read_to_string(&path)?;
148 self.sender.send(io::TaskResult::AddSingleFile {
149 root,
150 path: rel_path,
151 text,
152 })?
153 }
154 }
155 ChangeKind::Write => {
156 let text = fs::read_to_string(&path)?;
157 self.sender.send(io::TaskResult::ChangeSingleFile {
158 root,
159 path: rel_path,
160 text,
161 })?
162 }
163 ChangeKind::Remove => self.sender.send(io::TaskResult::RemoveSingleFile {
164 root,
165 path: rel_path,
166 })?,
167 }
168 Ok(())
169 }
170
171 fn watch_recursive(&self, dir: &Path, root: VfsRoot) {
172 let filter = &self.roots[root];
173 for res in WalkDir::new(dir)
174 .into_iter()
175 .filter_entry(filter.entry_filter())
176 {
177 match res {
178 Ok(entry) => {
179 if entry.file_type().is_dir() {
180 watch_one(self.watcher.as_ref(), entry.path());
181 } else {
182 // emit only for files otherwise we will cause watch_recursive to be called again with a dir that we are already watching
183 // emit as create because we haven't seen it yet
184 self.handle_change(entry.path().to_path_buf(), ChangeKind::Create);
185 }
186 }
187 Err(e) => log::warn!("watcher error: {}", e),
188 }
189 }
190 }
191}
192
193fn watch_one(watcher: &Mutex<Option<RecommendedWatcher>>, dir: &Path) {
194 if let Some(watcher) = watcher.lock().as_mut() {
195 match watcher.watch(dir, RecursiveMode::NonRecursive) {
196 Ok(()) => log::debug!("watching \"{}\"", dir.display()),
197 Err(e) => log::warn!("could not watch \"{}\": {}", dir.display(), e),
198 }
199 }
200}
diff --git a/crates/ra_vfs/src/lib.rs b/crates/ra_vfs/src/lib.rs
index 70a13f765..71a3f807d 100644
--- a/crates/ra_vfs/src/lib.rs
+++ b/crates/ra_vfs/src/lib.rs
@@ -18,94 +18,78 @@ mod io;
18use std::{ 18use std::{
19 cmp::Reverse, 19 cmp::Reverse,
20 fmt, fs, mem, 20 fmt, fs, mem,
21 ops::{Deref, DerefMut},
22 path::{Path, PathBuf}, 21 path::{Path, PathBuf},
23 sync::Arc, 22 sync::Arc,
24 thread, 23 thread,
25}; 24};
26 25
27use crossbeam_channel::Receiver; 26use crossbeam_channel::Receiver;
28use ra_arena::{impl_arena_id, Arena, RawId}; 27use ra_arena::{impl_arena_id, Arena, RawId, map::ArenaMap};
29use relative_path::{Component, RelativePath, RelativePathBuf}; 28use relative_path::{Component, RelativePath, RelativePathBuf};
30use rustc_hash::{FxHashMap, FxHashSet}; 29use rustc_hash::{FxHashMap, FxHashSet};
31use walkdir::DirEntry;
32 30
33pub use crate::io::TaskResult as VfsTask; 31pub use crate::io::TaskResult as VfsTask;
34use io::{TaskResult, Worker}; 32use io::{TaskResult, Worker};
35 33
36/// `RootFilter` is a predicate that checks if a file can belong to a root. If 34#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
37/// several filters match a file (nested dirs), the most nested one wins. 35pub struct VfsRoot(pub RawId);
38pub(crate) struct RootFilter { 36impl_arena_id!(VfsRoot);
37
38/// Describes the contents of a single source root.
39///
40/// `RootConfig` can be thought of as a glob pattern like `src/**.rs` whihc
41/// specifes the source root or as a function whihc takes a `PathBuf` and
42/// returns `true` iff path belongs to the source root
43pub(crate) struct RootConfig {
39 root: PathBuf, 44 root: PathBuf,
40 filter: fn(&Path, &RelativePath) -> bool,
41 excluded_dirs: Vec<PathBuf>, 45 excluded_dirs: Vec<PathBuf>,
42} 46}
43 47
44impl RootFilter { 48pub(crate) struct Roots {
45 fn new(root: PathBuf, excluded_dirs: Vec<PathBuf>) -> RootFilter { 49 roots: Arena<VfsRoot, Arc<RootConfig>>,
46 RootFilter { 50}
51
52impl std::ops::Deref for Roots {
53 type Target = Arena<VfsRoot, Arc<RootConfig>>;
54 fn deref(&self) -> &Self::Target {
55 &self.roots
56 }
57}
58
59impl RootConfig {
60 fn new(root: PathBuf, excluded_dirs: Vec<PathBuf>) -> RootConfig {
61 RootConfig {
47 root, 62 root,
48 filter: default_filter,
49 excluded_dirs, 63 excluded_dirs,
50 } 64 }
51 } 65 }
52 /// Check if this root can contain `path`. NB: even if this returns 66 /// Cheks if root contains a path and returns a root-relative path.
53 /// true, the `path` might actually be conained in some nested root. 67 pub(crate) fn contains(&self, path: &Path) -> Option<RelativePathBuf> {
54 pub(crate) fn can_contain(&self, path: &Path) -> Option<RelativePathBuf> { 68 // First, check excluded dirs
55 let rel_path = path.strip_prefix(&self.root).ok()?; 69 if self.excluded_dirs.iter().any(|it| path.starts_with(it)) {
56 let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
57 if !(self.filter)(path, rel_path.as_relative_path()) {
58 return None; 70 return None;
59 } 71 }
60 Some(rel_path) 72 let rel_path = path.strip_prefix(&self.root).ok()?;
61 } 73 let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
62
63 pub(crate) fn entry_filter<'a>(&'a self) -> impl FnMut(&DirEntry) -> bool + 'a {
64 move |entry: &DirEntry| {
65 if entry.file_type().is_dir() && self.excluded_dirs.iter().any(|it| it == entry.path())
66 {
67 // do not walk nested roots
68 false
69 } else {
70 self.can_contain(entry.path()).is_some()
71 }
72 }
73 }
74}
75 74
76pub(crate) fn default_filter(path: &Path, rel_path: &RelativePath) -> bool { 75 // Ignore some common directories.
77 if path.is_dir() { 76 //
77 // FIXME: don't hard-code, specify at source-root creation time using
78 // gitignore
78 for (i, c) in rel_path.components().enumerate() { 79 for (i, c) in rel_path.components().enumerate() {
79 if let Component::Normal(c) = c { 80 if let Component::Normal(c) = c {
80 // TODO hardcoded for now
81 if (i == 0 && c == "target") || c == ".git" || c == "node_modules" { 81 if (i == 0 && c == "target") || c == ".git" || c == "node_modules" {
82 return false; 82 return None;
83 } 83 }
84 } 84 }
85 } 85 }
86 true
87 } else {
88 rel_path.extension() == Some("rs")
89 }
90}
91
92#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
93pub struct VfsRoot(pub RawId);
94impl_arena_id!(VfsRoot);
95
96#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
97pub struct VfsFile(pub RawId);
98impl_arena_id!(VfsFile);
99 86
100struct VfsFileData { 87 if path.is_file() && rel_path.extension() != Some("rs") {
101 root: VfsRoot, 88 return None;
102 path: RelativePathBuf, 89 }
103 is_overlayed: bool,
104 text: Arc<String>,
105}
106 90
107pub(crate) struct Roots { 91 Some(rel_path)
108 roots: Arena<VfsRoot, Arc<RootFilter>>, 92 }
109} 93}
110 94
111impl Roots { 95impl Roots {
@@ -120,59 +104,61 @@ impl Roots {
120 .map(|it| it.clone()) 104 .map(|it| it.clone())
121 .collect::<Vec<_>>(); 105 .collect::<Vec<_>>();
122 106
123 let root_filter = Arc::new(RootFilter::new(path.clone(), nested_roots)); 107 let config = Arc::new(RootConfig::new(path.clone(), nested_roots));
124 108
125 roots.alloc(root_filter.clone()); 109 roots.alloc(config.clone());
126 } 110 }
127 Roots { roots } 111 Roots { roots }
128 } 112 }
129 pub(crate) fn find(&self, path: &Path) -> Option<(VfsRoot, RelativePathBuf)> { 113 pub(crate) fn find(&self, path: &Path) -> Option<(VfsRoot, RelativePathBuf)> {
130 self.roots 114 self.roots
131 .iter() 115 .iter()
132 .find_map(|(root, data)| data.can_contain(path).map(|it| (root, it))) 116 .find_map(|(root, data)| data.contains(path).map(|it| (root, it)))
133 } 117 }
134} 118}
135 119
136impl Deref for Roots { 120#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
137 type Target = Arena<VfsRoot, Arc<RootFilter>>; 121pub struct VfsFile(pub RawId);
138 fn deref(&self) -> &Self::Target { 122impl_arena_id!(VfsFile);
139 &self.roots
140 }
141}
142 123
143impl DerefMut for Roots { 124struct VfsFileData {
144 fn deref_mut(&mut self) -> &mut Self::Target { 125 root: VfsRoot,
145 &mut self.roots 126 path: RelativePathBuf,
146 } 127 is_overlayed: bool,
128 text: Arc<String>,
147} 129}
148 130
149pub struct Vfs { 131pub struct Vfs {
150 roots: Arc<Roots>, 132 roots: Arc<Roots>,
151 files: Arena<VfsFile, VfsFileData>, 133 files: Arena<VfsFile, VfsFileData>,
152 root2files: FxHashMap<VfsRoot, FxHashSet<VfsFile>>, 134 root2files: ArenaMap<VfsRoot, FxHashSet<VfsFile>>,
153 pending_changes: Vec<VfsChange>, 135 pending_changes: Vec<VfsChange>,
154 worker: Worker, 136 worker: Worker,
155} 137}
156 138
157impl fmt::Debug for Vfs { 139impl fmt::Debug for Vfs {
158 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 140 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
159 f.write_str("Vfs { ... }") 141 f.debug_struct("Vfs")
142 .field("n_roots", &self.roots.len())
143 .field("n_files", &self.files.len())
144 .field("n_pending_changes", &self.pending_changes.len())
145 .finish()
160 } 146 }
161} 147}
162 148
163impl Vfs { 149impl Vfs {
164 pub fn new(roots: Vec<PathBuf>) -> (Vfs, Vec<VfsRoot>) { 150 pub fn new(roots: Vec<PathBuf>) -> (Vfs, Vec<VfsRoot>) {
165 let roots = Arc::new(Roots::new(roots)); 151 let roots = Arc::new(Roots::new(roots));
166 let worker = io::Worker::start(roots.clone()); 152 let worker = io::Worker::start(Arc::clone(&roots));
167 let mut root2files = FxHashMap::default(); 153 let mut root2files = ArenaMap::default();
168 154
169 for (root, filter) in roots.iter() { 155 for (root, config) in roots.iter() {
170 root2files.insert(root, Default::default()); 156 root2files.insert(root, Default::default());
171 worker 157 worker
172 .sender() 158 .sender()
173 .send(io::Task::AddRoot { 159 .send(io::Task::AddRoot {
174 root, 160 root,
175 filter: filter.clone(), 161 config: Arc::clone(config),
176 }) 162 })
177 .unwrap(); 163 .unwrap();
178 } 164 }
@@ -242,7 +228,7 @@ impl Vfs {
242 let mut cur_files = Vec::new(); 228 let mut cur_files = Vec::new();
243 // While we were scanning the root in the backgound, a file might have 229 // While we were scanning the root in the backgound, a file might have
244 // been open in the editor, so we need to account for that. 230 // been open in the editor, so we need to account for that.
245 let exising = self.root2files[&root] 231 let exising = self.root2files[root]
246 .iter() 232 .iter()
247 .map(|&file| (self.files[file].path.clone(), file)) 233 .map(|&file| (self.files[file].path.clone(), file))
248 .collect::<FxHashMap<_, _>>(); 234 .collect::<FxHashMap<_, _>>();
@@ -384,7 +370,7 @@ impl Vfs {
384 is_overlayed, 370 is_overlayed,
385 }; 371 };
386 let file = self.files.alloc(data); 372 let file = self.files.alloc(data);
387 self.root2files.get_mut(&root).unwrap().insert(file); 373 self.root2files.get_mut(root).unwrap().insert(file);
388 file 374 file
389 } 375 }
390 376
@@ -399,7 +385,7 @@ impl Vfs {
399 self.files[file].text = Default::default(); 385 self.files[file].text = Default::default();
400 self.files[file].path = Default::default(); 386 self.files[file].path = Default::default();
401 let root = self.files[file].root; 387 let root = self.files[file].root;
402 let removed = self.root2files.get_mut(&root).unwrap().remove(&file); 388 let removed = self.root2files.get_mut(root).unwrap().remove(&file);
403 assert!(removed); 389 assert!(removed);
404 } 390 }
405 391
@@ -410,7 +396,7 @@ impl Vfs {
410 } 396 }
411 397
412 fn find_file(&self, root: VfsRoot, path: &RelativePath) -> Option<VfsFile> { 398 fn find_file(&self, root: VfsRoot, path: &RelativePath) -> Option<VfsFile> {
413 self.root2files[&root] 399 self.root2files[root]
414 .iter() 400 .iter()
415 .map(|&it| it) 401 .map(|&it| it)
416 .find(|&file| self.files[file].path == path) 402 .find(|&file| self.files[file].path == path)
diff --git a/crates/tools/src/bin/pre-commit.rs b/crates/tools/src/bin/pre-commit.rs
index bae3b26d3..e00bd0d3d 100644
--- a/crates/tools/src/bin/pre-commit.rs
+++ b/crates/tools/src/bin/pre-commit.rs
@@ -1,10 +1,9 @@
1use std::{ 1use std::process::Command;
2 process::{Command},
3};
4 2
5use tools::{Result, run_rustfmt, run, project_root};
6use failure::bail; 3use failure::bail;
7 4
5use tools::{Result, run_rustfmt, run, project_root};
6
8fn main() -> tools::Result<()> { 7fn main() -> tools::Result<()> {
9 run_rustfmt(tools::Overwrite)?; 8 run_rustfmt(tools::Overwrite)?;
10 update_staged() 9 update_staged()
diff --git a/crates/tools/src/lib.rs b/crates/tools/src/lib.rs
index d404db214..311bcb4d8 100644
--- a/crates/tools/src/lib.rs
+++ b/crates/tools/src/lib.rs
@@ -1,7 +1,8 @@
1use std::{ 1use std::{
2 fs,
3 collections::HashMap,
2 path::{Path, PathBuf}, 4 path::{Path, PathBuf},
3 process::{Command, Stdio}, 5 process::{Command, Stdio},
4 fs::copy,
5 io::{Error, ErrorKind} 6 io::{Error, ErrorKind}
6}; 7};
7 8
@@ -13,6 +14,10 @@ pub use teraron::{Mode, Overwrite, Verify};
13pub type Result<T> = std::result::Result<T, failure::Error>; 14pub type Result<T> = std::result::Result<T, failure::Error>;
14 15
15pub const GRAMMAR: &str = "crates/ra_syntax/src/grammar.ron"; 16pub const GRAMMAR: &str = "crates/ra_syntax/src/grammar.ron";
17const GRAMMAR_DIR: &str = "crates/ra_syntax/src/grammar";
18const OK_INLINE_TESTS_DIR: &str = "crates/ra_syntax/tests/data/parser/inline/ok";
19const ERR_INLINE_TESTS_DIR: &str = "crates/ra_syntax/tests/data/parser/inline/err";
20
16pub const SYNTAX_KINDS: &str = "crates/ra_syntax/src/syntax_kinds/generated.rs.tera"; 21pub const SYNTAX_KINDS: &str = "crates/ra_syntax/src/syntax_kinds/generated.rs.tera";
17pub const AST: &str = "crates/ra_syntax/src/ast/generated.rs.tera"; 22pub const AST: &str = "crates/ra_syntax/src/ast/generated.rs.tera";
18const TOOLCHAIN: &str = "stable"; 23const TOOLCHAIN: &str = "stable";
@@ -130,9 +135,9 @@ pub fn install_format_hook() -> Result<()> {
130 if !result_path.exists() { 135 if !result_path.exists() {
131 run("cargo build --package tools --bin pre-commit", ".")?; 136 run("cargo build --package tools --bin pre-commit", ".")?;
132 if cfg!(windows) { 137 if cfg!(windows) {
133 copy("./target/debug/pre-commit.exe", result_path)?; 138 fs::copy("./target/debug/pre-commit.exe", result_path)?;
134 } else { 139 } else {
135 copy("./target/debug/pre-commit", result_path)?; 140 fs::copy("./target/debug/pre-commit", result_path)?;
136 } 141 }
137 } else { 142 } else {
138 return Err(Error::new(ErrorKind::AlreadyExists, "Git hook already created").into()); 143 return Err(Error::new(ErrorKind::AlreadyExists, "Git hook already created").into());
@@ -156,3 +161,98 @@ pub fn run_fuzzer() -> Result<()> {
156 "./crates/ra_syntax", 161 "./crates/ra_syntax",
157 ) 162 )
158} 163}
164
165pub fn gen_tests(mode: Mode) -> Result<()> {
166 let tests = tests_from_dir(&project_root().join(Path::new(GRAMMAR_DIR)))?;
167 fn install_tests(tests: &HashMap<String, Test>, into: &str, mode: Mode) -> Result<()> {
168 let tests_dir = project_root().join(into);
169 if !tests_dir.is_dir() {
170 fs::create_dir_all(&tests_dir)?;
171 }
172 // ok is never actually read, but it needs to be specified to create a Test in existing_tests
173 let existing = existing_tests(&tests_dir, true)?;
174 for t in existing.keys().filter(|&t| !tests.contains_key(t)) {
175 panic!("Test is deleted: {}", t);
176 }
177
178 let mut new_idx = existing.len() + 1;
179 for (name, test) in tests {
180 let path = match existing.get(name) {
181 Some((path, _test)) => path.clone(),
182 None => {
183 let file_name = format!("{:04}_{}.rs", new_idx, name);
184 new_idx += 1;
185 tests_dir.join(file_name)
186 }
187 };
188 teraron::update(&path, &test.text, mode)?;
189 }
190 Ok(())
191 }
192 install_tests(&tests.ok, OK_INLINE_TESTS_DIR, mode)?;
193 install_tests(&tests.err, ERR_INLINE_TESTS_DIR, mode)
194}
195
196#[derive(Default, Debug)]
197struct Tests {
198 pub ok: HashMap<String, Test>,
199 pub err: HashMap<String, Test>,
200}
201
202fn tests_from_dir(dir: &Path) -> Result<Tests> {
203 let mut res = Tests::default();
204 for entry in ::walkdir::WalkDir::new(dir) {
205 let entry = entry.unwrap();
206 if !entry.file_type().is_file() {
207 continue;
208 }
209 if entry.path().extension().unwrap_or_default() != "rs" {
210 continue;
211 }
212 process_file(&mut res, entry.path())?;
213 }
214 let grammar_rs = dir.parent().unwrap().join("grammar.rs");
215 process_file(&mut res, &grammar_rs)?;
216 return Ok(res);
217 fn process_file(res: &mut Tests, path: &Path) -> Result<()> {
218 let text = fs::read_to_string(path)?;
219
220 for (_, test) in collect_tests(&text) {
221 if test.ok {
222 if let Some(old_test) = res.ok.insert(test.name.clone(), test) {
223 bail!("Duplicate test: {}", old_test.name)
224 }
225 } else {
226 if let Some(old_test) = res.err.insert(test.name.clone(), test) {
227 bail!("Duplicate test: {}", old_test.name)
228 }
229 }
230 }
231 Ok(())
232 }
233}
234
235fn existing_tests(dir: &Path, ok: bool) -> Result<HashMap<String, (PathBuf, Test)>> {
236 let mut res = HashMap::new();
237 for file in fs::read_dir(dir)? {
238 let file = file?;
239 let path = file.path();
240 if path.extension().unwrap_or_default() != "rs" {
241 continue;
242 }
243 let name = {
244 let file_name = path.file_name().unwrap().to_str().unwrap();
245 file_name[5..file_name.len() - 3].to_string()
246 };
247 let text = fs::read_to_string(&path)?;
248 let test = Test {
249 name: name.clone(),
250 text,
251 ok,
252 };
253 if let Some(old) = res.insert(name, (path, test)) {
254 println!("Duplicate test: {:?}", old);
255 }
256 }
257 Ok(res)
258}
diff --git a/crates/tools/src/main.rs b/crates/tools/src/main.rs
index d6eabce6c..c3e293911 100644
--- a/crates/tools/src/main.rs
+++ b/crates/tools/src/main.rs
@@ -1,30 +1,13 @@
1use std::{ 1use clap::{App, SubCommand};
2 collections::HashMap,
3 fs,
4 path::{Path, PathBuf},
5};
6
7use clap::{App, Arg, SubCommand};
8use failure::bail;
9 2
10use tools::{ 3use tools::{
11 collect_tests, generate,install_format_hook, run, run_rustfmt, 4 generate, gen_tests, install_format_hook, run, run_rustfmt,
12 Mode, Overwrite, Result, Test, Verify, project_root, run_fuzzer 5 Overwrite, Result, run_fuzzer,
13}; 6};
14 7
15const GRAMMAR_DIR: &str = "crates/ra_syntax/src/grammar";
16const OK_INLINE_TESTS_DIR: &str = "crates/ra_syntax/tests/data/parser/inline/ok";
17const ERR_INLINE_TESTS_DIR: &str = "crates/ra_syntax/tests/data/parser/inline/err";
18
19fn main() -> Result<()> { 8fn main() -> Result<()> {
20 let matches = App::new("tasks") 9 let matches = App::new("tasks")
21 .setting(clap::AppSettings::SubcommandRequiredElseHelp) 10 .setting(clap::AppSettings::SubcommandRequiredElseHelp)
22 .arg(
23 Arg::with_name("verify")
24 .long("--verify")
25 .help("Verify that generated code is up-to-date")
26 .global(true),
27 )
28 .subcommand(SubCommand::with_name("gen-syntax")) 11 .subcommand(SubCommand::with_name("gen-syntax"))
29 .subcommand(SubCommand::with_name("gen-tests")) 12 .subcommand(SubCommand::with_name("gen-tests"))
30 .subcommand(SubCommand::with_name("install-code")) 13 .subcommand(SubCommand::with_name("install-code"))
@@ -32,19 +15,14 @@ fn main() -> Result<()> {
32 .subcommand(SubCommand::with_name("format-hook")) 15 .subcommand(SubCommand::with_name("format-hook"))
33 .subcommand(SubCommand::with_name("fuzz-tests")) 16 .subcommand(SubCommand::with_name("fuzz-tests"))
34 .get_matches(); 17 .get_matches();
35 let mode = if matches.is_present("verify") {
36 Verify
37 } else {
38 Overwrite
39 };
40 match matches 18 match matches
41 .subcommand_name() 19 .subcommand_name()
42 .expect("Subcommand must be specified") 20 .expect("Subcommand must be specified")
43 { 21 {
44 "install-code" => install_code_extension()?, 22 "install-code" => install_code_extension()?,
45 "gen-tests" => gen_tests(mode)?, 23 "gen-tests" => gen_tests(Overwrite)?,
46 "gen-syntax" => generate(Overwrite)?, 24 "gen-syntax" => generate(Overwrite)?,
47 "format" => run_rustfmt(mode)?, 25 "format" => run_rustfmt(Overwrite)?,
48 "format-hook" => install_format_hook()?, 26 "format-hook" => install_format_hook()?,
49 "fuzz-tests" => run_fuzzer()?, 27 "fuzz-tests" => run_fuzzer()?,
50 _ => unreachable!(), 28 _ => unreachable!(),
@@ -52,101 +30,6 @@ fn main() -> Result<()> {
52 Ok(()) 30 Ok(())
53} 31}
54 32
55fn gen_tests(mode: Mode) -> Result<()> {
56 let tests = tests_from_dir(Path::new(GRAMMAR_DIR))?;
57 fn install_tests(tests: &HashMap<String, Test>, into: &str, mode: Mode) -> Result<()> {
58 let tests_dir = project_root().join(into);
59 if !tests_dir.is_dir() {
60 fs::create_dir_all(&tests_dir)?;
61 }
62 // ok is never actually read, but it needs to be specified to create a Test in existing_tests
63 let existing = existing_tests(&tests_dir, true)?;
64 for t in existing.keys().filter(|&t| !tests.contains_key(t)) {
65 panic!("Test is deleted: {}", t);
66 }
67
68 let mut new_idx = existing.len() + 1;
69 for (name, test) in tests {
70 let path = match existing.get(name) {
71 Some((path, _test)) => path.clone(),
72 None => {
73 let file_name = format!("{:04}_{}.rs", new_idx, name);
74 new_idx += 1;
75 tests_dir.join(file_name)
76 }
77 };
78 teraron::update(&path, &test.text, mode)?;
79 }
80 Ok(())
81 }
82 install_tests(&tests.ok, OK_INLINE_TESTS_DIR, mode)?;
83 install_tests(&tests.err, ERR_INLINE_TESTS_DIR, mode)
84}
85
86#[derive(Default, Debug)]
87struct Tests {
88 pub ok: HashMap<String, Test>,
89 pub err: HashMap<String, Test>,
90}
91
92fn tests_from_dir(dir: &Path) -> Result<Tests> {
93 let mut res = Tests::default();
94 for entry in ::walkdir::WalkDir::new(dir) {
95 let entry = entry.unwrap();
96 if !entry.file_type().is_file() {
97 continue;
98 }
99 if entry.path().extension().unwrap_or_default() != "rs" {
100 continue;
101 }
102 process_file(&mut res, entry.path())?;
103 }
104 let grammar_rs = dir.parent().unwrap().join("grammar.rs");
105 process_file(&mut res, &grammar_rs)?;
106 return Ok(res);
107 fn process_file(res: &mut Tests, path: &Path) -> Result<()> {
108 let text = fs::read_to_string(path)?;
109
110 for (_, test) in collect_tests(&text) {
111 if test.ok {
112 if let Some(old_test) = res.ok.insert(test.name.clone(), test) {
113 bail!("Duplicate test: {}", old_test.name)
114 }
115 } else {
116 if let Some(old_test) = res.err.insert(test.name.clone(), test) {
117 bail!("Duplicate test: {}", old_test.name)
118 }
119 }
120 }
121 Ok(())
122 }
123}
124
125fn existing_tests(dir: &Path, ok: bool) -> Result<HashMap<String, (PathBuf, Test)>> {
126 let mut res = HashMap::new();
127 for file in fs::read_dir(dir)? {
128 let file = file?;
129 let path = file.path();
130 if path.extension().unwrap_or_default() != "rs" {
131 continue;
132 }
133 let name = {
134 let file_name = path.file_name().unwrap().to_str().unwrap();
135 file_name[5..file_name.len() - 3].to_string()
136 };
137 let text = fs::read_to_string(&path)?;
138 let test = Test {
139 name: name.clone(),
140 text,
141 ok,
142 };
143 if let Some(old) = res.insert(name, (path, test)) {
144 println!("Duplicate test: {:?}", old);
145 }
146 }
147 Ok(res)
148}
149
150fn install_code_extension() -> Result<()> { 33fn install_code_extension() -> Result<()> {
151 run("cargo install --path crates/ra_lsp_server --force", ".")?; 34 run("cargo install --path crates/ra_lsp_server --force", ".")?;
152 if cfg!(windows) { 35 if cfg!(windows) {
diff --git a/crates/tools/tests/cli.rs b/crates/tools/tests/cli.rs
index 2d238d9ea..2ee4b5223 100644
--- a/crates/tools/tests/cli.rs
+++ b/crates/tools/tests/cli.rs
@@ -1,15 +1,23 @@
1extern crate tools; 1use tools::{generate, gen_tests, run_rustfmt, Verify};
2
3use tools::{generate, run_rustfmt, Verify};
4 2
5#[test] 3#[test]
6fn verify_template_generation() { 4fn generated_grammar_is_fresh() {
7 if let Err(error) = generate(Verify) { 5 if let Err(error) = generate(Verify) {
8 panic!("{}. Please update it by running `cargo gen-syntax`", error); 6 panic!("{}. Please update it by running `cargo gen-syntax`", error);
9 } 7 }
10} 8}
11 9
12#[test] 10#[test]
11fn generated_tests_are_fresh() {
12 if let Err(error) = gen_tests(Verify) {
13 panic!(
14 "{}. Please update tests by running `cargo gen-tests`",
15 error
16 );
17 }
18}
19
20#[test]
13fn check_code_formatting() { 21fn check_code_formatting() {
14 if let Err(error) = run_rustfmt(Verify) { 22 if let Err(error) = run_rustfmt(Verify) {
15 panic!( 23 panic!(